repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
ningirsu/stepmania-server | smserver/smutils/smpacket/smpacket.py | 1 | 34459 | """
The ```SMpacket`` module
========================
Provide easy utilisation of the stepmania protocol.
:Example:
>>> from smserver.smutils.smpacket import smcommand
>>> from smserver.smutils.smpacket import smpacket
>>> # Create a new packet instance
>>> packet = SMPacket.new(smcommand.SMServerCommand.NSCCM, message="test")
>>> print(packet)
<SMPacketServerNSCCM message="test">
>>> # Binary encode your packet
>>> packet.binary
b'\\x00\\x00\\x00\\x06\\x87test\\x00'
>>> # Decode binary data
>>> packet2 = SMPacket.from_("binary", packet.binary)
>>> print(packet2)
<SMPacketServerNSCCM message="test">
>>> packet = SMPacket.new(smcommand.SMServerCommand.NSCPing)
>>> # JSON encode your packet
>>> packet.json
'{"_command": 128}'
>>> # Decode JSON data
>>> packet2 = SMPacket.from_("json", packet.json)
>>> print(packet2)
<SMPacketServerNSCPing >
"""
import json
from smserver.smutils.smpacket import smcommand
from smserver.smutils.smpacket import smencoder
class _SMPacketMetaclass(type):
"""Metaclass that implements PEP 487 protocol"""
def __init__(cls, name, bases, attrs, **kw):
super().__init__(name, bases, attrs, **kw)
parent_class = super(cls, cls)
if hasattr(parent_class, '__init_subclass_custom__'):
parent_class.__init_subclass_custom__(cls, **kw) #pylint: disable=no-member
class SMPacket(metaclass=_SMPacketMetaclass):
""" Main class for declare/parse packet """
_command_type = smcommand.SMCommand
_payload = []
_subclasses = {}
command = None
def __init__(self, **kwargs):
self.command = self.command
if "_command" in kwargs:
kwargs.pop("_command")
self.opts = kwargs
def __init_subclass_custom__(cls, **_kwargs): #pylint: disable=no-self-argument
command = cls.command
if not command:
return
if command in cls._subclasses:
raise ValueError("Command already defined")
cls._subclasses[command] = cls
def __len__(self):
return 1 + len(self.payload)
def __str__(self):
return "<%s %s>" % (
self.__class__.__name__,
" ".join(['%s="%s"' % (k, v) for k, v in self.opts.items()]))
def __repr__(self):
return "<%s %s>" % (
self.__class__.__name__,
" ".join(['%s="%s"' % (k, v) for k, v in self.opts.items()]))
def __getitem__(self, value):
return self.opts[value]
def __setitem__(self, key, value):
self.opts[key] = value
def get(self, value, default=None):
return self.opts.get(value, default)
@classmethod
def new(cls, command, **kwargs):
"""
Return an instance with the corresponding command.
If no command is found, return None
:Example:
>>> from smserver.smutils.smpacket import *
>>> print(SMPacket.new(smcommand.SMServerCommand.NSCCM, message="msg"))
<SMPacketServerNSCCM message="msg">
"""
if command not in cls._subclasses:
return None
return cls._subclasses[command](**kwargs)
@classmethod
def get_class(cls, command):
"""
Get the class which avec the corresponding command
:Example:
>>> from smserver.smutils.smpacket import *
>>> print(SMPacket.get_class(smcommand.SMServerCommand.NSCCM))
<class 'smserver.smutils.smpacket.smpacket.SMPacketServerNSCCM'>
"""
return cls._subclasses.get(command, None)
@property
def binarycommand(self):
"""
Return the command in a binary string
:Example:
>>> from smserver.smutils.smpacket import *
>>> packet = SMPacket.new(smcommand.SMServerCommand.NSCCM, message="msg")
>>> print(packet.binarycommand)
b'\\x87'
"""
return self.command.value.to_bytes(1, byteorder='big')
@property
def binarysize(self):
"""
Return the size of the packet in a 4 bytes string.
:Example:
>>> from smserver.smutils.smpacket import *
>>> packet = SMPacket.new(smcommand.SMServerCommand.NSCCM, message="msg")
>>> print(packet.binarysize)
b'\\x00\\x00\\x00\\x05'
"""
return len(self).to_bytes(4, byteorder='big')
@property
def data(self):
"""
Return the command + payload in a binary string
:Example:
>>> from smserver.smutils.smpacket import *
>>> packet = SMPacket.new(smcommand.SMServerCommand.NSCCM, message="msg")
>>> print(packet.data)
b'\\x87msg\\x00'
"""
return self.binarycommand + self.payload
@property
def binary(self):
"""
Return the full binary encoded packet (size + command + payload)
:Example:
>>> from smserver.smutils.smpacket import *
>>> packet = SMPacket.new(smcommand.SMServerCommand.NSCCM, message="msg")
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x05\\x87msg\\x00'
"""
return self.binarysize + self.data
@property
def payload(self):
"""
Return the paylaod encoded in binary
:Example:
>>> from smserver.smutils.smpacket import *
>>> packet = SMPacket.new(smcommand.SMServerCommand.NSCCM, message="msg")
>>> print(packet.payload)
b'msg\\x00'
"""
return smencoder.BinaryEncoder.encode(self.opts, self._payload)
@property
def json(self):
"""
Return the JSON encoded packet
:Example:
>>> from smserver.smutils.smpacket import *
>>> packet = SMPacket.new(smcommand.SMServerCommand.NSCPing)
>>> print(packet.json)
{"_command": 128}
"""
return smencoder.JSONEncoder.encode(self.opts, self._payload, command=self.command.value)
@classmethod
def from_payload(cls, payload):
"""
Decode the given binary payload
:Example:
>>> from smserver.smutils.smpacket import *
>>> payload_data = b'msg\\x00'
>>> print(SMPacketServerNSCCM.from_payload(payload_data))
<SMPacketServerNSCCM message="msg">
"""
return cls(
**smencoder.BinaryEncoder.decode(payload, cls._payload)[1]
)
@classmethod
def from_json(cls, payload):
"""
Decode a JSON encoded packet
:Example:
>>> from smserver.smutils.smpacket import *
>>> json_data = '{"message": "msg"}'
>>> print(SMPacketServerNSCCM.from_json(json_data))
<SMPacketServerNSCCM message="msg">
"""
return cls(
**smencoder.JSONEncoder.decode(payload, cls._payload)
)
def to_(self, encoding):
"""
Encode the packet to the specified format (json or binary)
"""
return {
"json": self.json,
"binary": self.binary
}[encoding]
@classmethod
def from_(cls, encoding, data):
"""
Decode the packet from the specified format (json or binary)
"""
return {
"json": cls.parse_json,
"binary": cls.parse_binary
}[encoding](data)
@classmethod
def parse_json(cls, data):
""" Parse a JSON packet """
try:
opts = json.loads(data)
except ValueError:
return None
command = cls._command_type.get(opts.get("_command", -1))
if not command:
return None
return cls.get_class(command).from_json(data)
@classmethod
def parse_data(cls, data):
""" Parse a binary packet """
if not data:
return None
command = cls._command_type.get(data[0])
if not command:
return None
return cls.get_class(command).from_payload(data[1:])
@classmethod
def parse_binary(cls, binary):
""" Parse a binary payload """
if len(binary) < 4:
return None
return cls.parse_data(binary[4:])
class SMOPacketClient(SMPacket):
_command_type = smcommand.SMOClientCommand
class SMOPacketServer(SMPacket):
_command_type = smcommand.SMOServerCommand
class SMOPacketClientLogin(SMOPacketClient):
command = smcommand.SMOClientCommand.LOGIN
_payload = [
(smencoder.SMPayloadType.INT, "player_number", None),
(smencoder.SMPayloadType.INT, "encryption", None),
(smencoder.SMPayloadType.NT, "username", None),
(smencoder.SMPayloadType.NT, "password", None)
]
class SMOPacketClientEnterRoom(SMOPacketClient):
command = smcommand.SMOClientCommand.ENTERROOM
_payload = [
(smencoder.SMPayloadType.INT, "enter", None),
(smencoder.SMPayloadType.NT, "room", None),
(smencoder.SMPayloadType.NT, "password", None)
]
class SMOPacketClientCreateRoom(SMOPacketClient):
command = smcommand.SMOClientCommand.CREATEROOM
_payload = [
(smencoder.SMPayloadType.INT, "type", None),
(smencoder.SMPayloadType.NT, "title", None),
(smencoder.SMPayloadType.NT, "description", None),
(smencoder.SMPayloadType.NT, "password", None)
]
class SMOPacketClientRoomInfo(SMOPacketClient):
command = smcommand.SMOClientCommand.ROOMINFO
_payload = [
(smencoder.SMPayloadType.NT, "room", None)
]
class SMOPacketServerLogin(SMOPacketServer):
command = smcommand.SMOServerCommand.LOGIN
_payload = [
(smencoder.SMPayloadType.INT, "approval", None),
(smencoder.SMPayloadType.NT, "text", None)
]
class SMOPacketServerRoomUpdate(SMOPacketServer):
command = smcommand.SMOServerCommand.ROOMUPDATE
_payload = [
(smencoder.SMPayloadType.INT, "type", None),
(smencoder.SMPayloadType.MAP, "room_title", ("type", {
0: (smencoder.SMPayloadType.NT, None, None),
})),
(smencoder.SMPayloadType.MAP, "room_description", ("type", {
0: (smencoder.SMPayloadType.NT, None, None),
})),
(smencoder.SMPayloadType.MAP, "room_type", ("type", {
0: (smencoder.SMPayloadType.INT, None, 1),
})),
(smencoder.SMPayloadType.MAP, "subroom", ("type", {
0: (smencoder.SMPayloadType.INT, None, 1),
})),
(smencoder.SMPayloadType.MAP, "nb_rooms", ("type", {
1: (smencoder.SMPayloadType.INT, None, 1),
})),
(smencoder.SMPayloadType.MAP, "rooms", ("type", {
1: (smencoder.SMPayloadType.LIST, None, ("nb_rooms", [
(smencoder.SMPayloadType.NT, "title", None),
(smencoder.SMPayloadType.NT, "description", None),
])),
})),
(smencoder.SMPayloadType.MAP, "room_status", ("type", {
1: (smencoder.SMPayloadType.INTLIST, None, (1, "nb_rooms")),
})),
(smencoder.SMPayloadType.MAP, "room_flags", ("type", {
1: (smencoder.SMPayloadType.INTLIST, None, (1, "nb_rooms")),
})),
]
class SMOPacketServerGeneralInfo(SMOPacketServer):
command = smcommand.SMOServerCommand.GENERALINFO
_payload = [
(smencoder.SMPayloadType.INT, "format", None),
]
class SMOPacketServerRoomInfo(SMOPacketServer):
command = smcommand.SMOServerCommand.ROOMINFO
_payload = [
(smencoder.SMPayloadType.NT, "song_title", None),
(smencoder.SMPayloadType.NT, "song_subtitle", None),
(smencoder.SMPayloadType.NT, "song_artist", None),
(smencoder.SMPayloadType.INT, "num_players", None),
(smencoder.SMPayloadType.INT, "max_players", None),
(smencoder.SMPayloadType.NTLIST, "players", "num_players"),
]
class SMPacketClientNSCPing(SMPacket):
"""
Client command 000. (Ping)
This command will cause server to respond with a PingR Command
:Example:
>>> from smserver.smutils.smpacket import smpacket
>>> packet = smpacket.SMPacketClientNSCPing()
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x01\\x00'
"""
command = smcommand.SMClientCommand.NSCPing
_payload = []
class SMPacketClientNSCPingR(SMPacket):
"""
Client command 001. (Ping response)
This command is used to respond to Ping Command.
:Example:
>>> from smserver.smutils.smpacket import smpacket
>>> packet = smpacket.SMPacketClientNSCPingR()
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x01\\x01'
"""
command = smcommand.SMClientCommand.NSCPingR
_payload = []
class SMPacketClientNSCHello(SMPacket):
"""
Client command 002. (Hello)
This is the first packet from a client to server.
:param int version: Client protocol version
:param str name: Name of the stepmania build
:Example:
>>> from smserver.smutils.smpacket import smpacket
>>> packet = smpacket.SMPacketClientNSCHello(
... name="stepmania",
... version=128
... )
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x0c\\x02\\x80stepmania\\x00'
"""
command = smcommand.SMClientCommand.NSCHello
_payload = [
(smencoder.SMPayloadType.INT, "version", None),
(smencoder.SMPayloadType.NT, "name", None)
]
class SMPacketClientNSCGSR(SMPacket):
"""
Client command 003 (Game Start Request)
This command is called once after most loading is done, and again
immediately before the sound starts.
The server has to respond with a SMPacketServerNSCGSR, if not the
client will freeze.
:param int first_player_feet: Primary player feet (0 for no player)
:param int second_player_feet: Secondary player feet (0 for no player)
:param int first_player_difficulty: Primary player difficulty (0=Beginner, 1=easy, etc.)
:param int second_player_difficulty: Secondary player difficulty (0=Beginner, 1=easy, etc.)
:param int start_position: (0 is pre-sync, 1 is for sync)
:param int reserved: ignored
:param str song_title: Title of the song to play
:param str song_subtitle: Subtitle of the song to play
:param str song_artist: Artist of the song to play
:param str course_title: Course Title
:param str song_options: Song option in string format
:param str first_player_options: Primary player's option
:param str second_player_options: Secondary player's option
"""
command = smcommand.SMClientCommand.NSCGSR
_payload = [
(smencoder.SMPayloadType.MSN, "first_player_feet", None),
(smencoder.SMPayloadType.LSN, "second_player_feet", None),
(smencoder.SMPayloadType.MSN, "first_player_difficulty", None),
(smencoder.SMPayloadType.LSN, "second_player_difficulty", None),
(smencoder.SMPayloadType.MSN, "start_position", None),
(smencoder.SMPayloadType.LSN, "reserved", None),
(smencoder.SMPayloadType.NT, "song_title", None),
(smencoder.SMPayloadType.NT, "song_subtitle", None),
(smencoder.SMPayloadType.NT, "song_artist", None),
(smencoder.SMPayloadType.NT, "course_title", None),
(smencoder.SMPayloadType.NT, "song_options", None),
(smencoder.SMPayloadType.NT, "first_player_options", None),
(smencoder.SMPayloadType.NT, "second_player_options", None),
]
class SMPacketClientNSCGON(SMPacket):
"""
Client command 004 (Game Over Notice)
This command is sent when end of game is encounter.
:Example:
>>> from smserver.smutils.smpacket import smpacket
>>> packet = smpacket.SMPacketClientNSCGON()
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x01\\x04'
"""
command = smcommand.SMClientCommand.NSCGON
class SMPacketClientNSCGSU(SMPacket):
"""
Client command 005 (Game Status update)
Update game info for each step in the game
:param int player_id: player # (0 or 1)
:param int step_id: (1: hitMine, 2: AvoidMine, ...)
:param int grade: Projected Grade (0: AAAA, 1: AAA, ...)
:param int reserved: ignored
:param int score: Actual score
:param int combo: Actual combo
:param int health: Actual health
:param int offset: Offset from the note (32767=miss)
"""
command = smcommand.SMClientCommand.NSCGSU
_payload = [
(smencoder.SMPayloadType.MSN, "player_id", None),
(smencoder.SMPayloadType.LSN, "step_id", None),
(smencoder.SMPayloadType.MSN, "grade", None),
(smencoder.SMPayloadType.LSN, "reserved", None),
(smencoder.SMPayloadType.INT, "score", 4),
(smencoder.SMPayloadType.INT, "combo", 2),
(smencoder.SMPayloadType.INT, "health", 2),
(smencoder.SMPayloadType.INT, "offset", 2)
]
class SMPacketClientNSCSU(SMPacket):
"""
Client command 006 (Style Update)
This is sent when a profile is choosed. It also indicates the number
of players in the local client. (1 or 2)
:param int nb_players: Number of players in the client (1 or 2)
:param int player_id: Player ID (0 or 1)
:param str player_name: Player name
:Example:
>>> from smserver.smutils.smpacket import smpacket
>>> packet = smpacket.SMPacketClientNSCSU(
... nb_players=2,
... player_id=0,
... player_name="profile1",
... )
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x0c\\x06\\x02\\x00profile1\\x00'
"""
command = smcommand.SMClientCommand.NSCSU
_payload = [
(smencoder.SMPayloadType.INT, "nb_players", None),
(smencoder.SMPayloadType.INT, "player_id", None),
(smencoder.SMPayloadType.NT, "player_name", None),
]
class SMPacketClientNSCCM(SMPacket):
"""
Client command 007 (Chat Message)
The user typed a message for general chat.
:param str message: The message sent by the client.
:Example:
>>> from smserver.smutils.smpacket import smpacket
>>> packet = smpacket.SMPacketClientNSCCM(message="Client message")
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x10\\x07Client message\\x00'
"""
command = smcommand.SMClientCommand.NSCCM
_payload = [
(smencoder.SMPayloadType.NT, "message", None),
]
class SMPacketClientNSCRSG(SMPacket):
"""
Client command 008 (Request Start Game)
Request Start Game and Tell server existance/non existance of song:
The user selected a song on a Net-enabled selection
:param int usage: Usage for this message
:param str song_title: Song title
:param str song_subtitle: Song artist
:param str song_artist: Song subtitle
:Example:
>>> # Client select the song ('Title', by 'Artist').
>>> from smserver.smutils.smpacket import smpacket
>>> packet = smpacket.SMPacketClientNSCRSG(
... usage=2,
... song_title="Title",
... song_artist="Artist",
... )
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x10\\x08\\x02Title\\x00Artist\\x00\\x00'
"""
command = smcommand.SMClientCommand.NSCRSG
_payload = [
(smencoder.SMPayloadType.INT, "usage", 1),
(smencoder.SMPayloadType.NT, "song_title", None),
(smencoder.SMPayloadType.NT, "song_artist", None),
(smencoder.SMPayloadType.NT, "song_subtitle", None),
]
class SMPacketClientNSCCUUL(SMPacket):
"""
Client command 009 (reserved)
"""
command = smcommand.SMClientCommand.NSCCUUL
class SMPacketClientNSSCSMS(SMPacket):
"""
Client command 010 (User status)
Indicate where the user is
:param int action: Int enum indicating where the user is
Action available:
* 0: exited ScreenNetSelectMusic
* 1: entered ScreenNetSelectMusic
* 2: Not Sent
* 3: entered options screen
* 4: exited the evaluation screen
* 5: entered evaluation screen
* 6: exited ScreenNetRoom
* 7: entered ScreenNetRoom
:Example:
>>> from smserver.smutils.smpacket import smpacket
>>> # Client enter in room selection
>>> packet = smpacket.SMPacketClientNSSCSMS(
... action=7,
... )
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x02\\n\\x07'
"""
command = smcommand.SMClientCommand.NSSCSMS
_payload = [
(smencoder.SMPayloadType.INT, "action", None),
]
class SMPacketClientNSCUOpts(SMPacket):
"""
Client command 011 (User options)
User has changed player's options
:param str player_0: Player 0 options
:param str player_1: Player 1 options
"""
command = smcommand.SMClientCommand.NSCUOpts
_payload = [
(smencoder.SMPayloadType.NT, "player_0", None),
(smencoder.SMPayloadType.NT, "player_1", None),
]
class SMPacketClientNSSMONL(SMPacket):
"""
Client command 012 (SMOnline Packet)
The SMLan packet 12 is a wrapper for the SMOnline packet.
:param packet: The SMOPacket to include
:type packet: SMOPacketClient
"""
command = smcommand.SMClientCommand.NSSMONL
_payload = [
(smencoder.SMPayloadType.PACKET, "packet", SMOPacketClient)
]
class SMPacketClientNSCFormatted(SMPacket):
"""
Client command 013 (reserved)
"""
command = smcommand.SMClientCommand.NSCFormatted
class SMPacketClientNSCAttack(SMPacket):
"""
Client command 014 (reserved)
"""
command = smcommand.SMClientCommand.NSCAttack
class SMPacketClientXMLPacket(SMPacket):
"""
Client command 15 (XMLPacket)
This packet contains data in XML format.
:param str xml: XML string
"""
command = smcommand.SMClientCommand.XMLPacket
_payload = [
(smencoder.SMPayloadType.NT, "xml", None),
]
class SMPacketServerNSCPing(SMPacket):
"""
Server command 128 (Ping)
This command will cause client to respond with a PingR command
:Example:
>>> from smserver.smutils.smpacket import smpacket
>>> packet = smpacket.SMPacketServerNSCPing()
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x01\\x80'
"""
command = smcommand.SMServerCommand.NSCPing
class SMPacketServerNSCPingR(SMPacket):
"""
Server command 129 (PingR)
This command is used to respond to a Ping command.
:Example:
>>> from smserver.smutils.smpacket import smpacket
>>> packet = smpacket.SMPacketServerNSCPingR()
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x01\\x81'
"""
command = smcommand.SMServerCommand.NSCPingR
class SMPacketServerNSCHello(SMPacket):
"""
Server command 130 (Hello)
This command introduces the server. (In response of Client Hello
command)
:param str version: The server protocol version (always 128)
:param str name: Name of the server
:param int key: Random key, used for hash password
:Example:
>>> from smserver.smutils.smpacket import smpacket
>>> packet = smpacket.SMPacketServerNSCHello(
... version=128,
... name="MyServer",
... key=999999999
... )
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x0f\\x82\\x80MyServer\\x00;\\x9a\\xc9\\xff'
"""
command = smcommand.SMServerCommand.NSCHello
_payload = [
(smencoder.SMPayloadType.INT, "version", None),
(smencoder.SMPayloadType.NT, "name", None),
(smencoder.SMPayloadType.INT, "key", 4)
]
class SMPacketServerNSCGSR(SMPacket):
"""
Server command 131 (Allow Start)
This will cause the client to start the game
:Example:
>>> from smserver.smutils.smpacket import smpacket
>>> packet = smpacket.SMPacketServerNSCGSR()
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x01\\x83'
"""
command = smcommand.SMServerCommand.NSCGSR
class SMPacketServerNSCGON(SMPacket):
"""
Server command 132 (Game over stats)
This packet is send in response to the game over packet. It
contains information regarding how well each player did.
:param int nb_players: NB of players stats in this packet (size of the next list)
:param list ids: Player's ID (calculate from the SMPacketServerNSCUUL)
:param list score: Player's score
:param list grade: Player's grade
:param list difficulty: Player's difficulty
:param list flawless: NB of flawless note
:param list perfect: NB of perfect note
:param list great: NB of great note
:param list good: NB of good note
:param list bad: NB of bad note
:param list miss: NB of miss note
:param list held: NB of held note
:param list max_combo: Player's max combo
:param list options: Player's options
"""
command = smcommand.SMServerCommand.NSCGON
_payload = [
(smencoder.SMPayloadType.INT, "nb_players", 1),
(smencoder.SMPayloadType.INTLIST, "ids", (1, "nb_players")),
(smencoder.SMPayloadType.INTLIST, "score", (4, "nb_players")),
(smencoder.SMPayloadType.INTLIST, "grade", (1, "nb_players")),
(smencoder.SMPayloadType.INTLIST, "difficulty", (1, "nb_players")),
(smencoder.SMPayloadType.INTLIST, "flawless", (2, "nb_players")),
(smencoder.SMPayloadType.INTLIST, "perfect", (2, "nb_players")),
(smencoder.SMPayloadType.INTLIST, "great", (2, "nb_players")),
(smencoder.SMPayloadType.INTLIST, "good", (2, "nb_players")),
(smencoder.SMPayloadType.INTLIST, "bad", (2, "nb_players")),
(smencoder.SMPayloadType.INTLIST, "miss", (2, "nb_players")),
(smencoder.SMPayloadType.INTLIST, "held", (2, "nb_players")),
(smencoder.SMPayloadType.INTLIST, "max_combo", (2, "nb_players")),
(smencoder.SMPayloadType.NTLIST, "options", "nb_players"),
]
class SMPacketServerNSCGSU(SMPacket):
"""
Server command 133 (Scoreboard update)
This will update the client's scoreboard.
:param int section: Which section to update (0: names, 1:combos, 2: grades)
:param int nb_players: Nb of plyaers in this packet
:param list options: Int list contining names, combos or grades
:Example:
>>> from smserver.smutils.smpacket import smpacket
>>> packet = smpacket.SMPacketServerNSCGSU(
... section=1, # Update the actual combo
... nb_players=2, # 2 users in this packet
... options=[12, 5] # List containing the combos
... )
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x07\\x85\\x01\\x02\\x00\\x0c\\x00\\x05'
"""
command = smcommand.SMServerCommand.NSCGSU
_payload = [
(smencoder.SMPayloadType.INT, "section", 1),
(smencoder.SMPayloadType.INT, "nb_players", 1),
(smencoder.SMPayloadType.MAP, "options", ("section", {
0: (smencoder.SMPayloadType.INTLIST, None, (1, "nb_players")),
1: (smencoder.SMPayloadType.INTLIST, None, (2, "nb_players")),
2: (smencoder.SMPayloadType.INTLIST, None, (1, "nb_players")),
}))
]
class SMPacketServerNSCSU(SMPacket):
"""
Server command 134 (System Message)
Send a system message to user
:param str message: The message to send
:Example:
>>> from smserver.smutils.smpacket import smpacket
>>> packet = smpacket.SMPacketServerNSCSU(message="System message")
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x10\\x86System message\\x00'
"""
command = smcommand.SMServerCommand.NSCSU
_payload = [
(smencoder.SMPayloadType.NT, "message", None)
]
class SMPacketServerNSCCM(SMPacket):
"""
Server command 135 (Chat Message)
Add a chat message to the chat window on some StepMania screens.
:param str message: The message to add
:Example:
>>> from smserver.smutils.smpacket import smpacket
>>> packet = smpacket.SMPacketServerNSCSU(message="Client message")
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x10\\x86Client message\\x00'
"""
command = smcommand.SMServerCommand.NSCCM
_payload = [
(smencoder.SMPayloadType.NT, "message", None)
]
class SMPacketServerNSCRSG(SMPacket):
"""
Server command 136 (Request Start Game)
Tell client to start song/ask if client has song
:param int usage: Usage of this message
:param str song_title: Song title
:param str song_artist: Song artist
:param str song_subtitle: Song subtitle
Usage available:
* 0: See if client has song
* 1: See if client has song, if so, scroll to song
* 2: See if client has song, if so, scroll to song, and play that song
* 3: Blindly start song
:Example:
>>> from smserver.smutils.smpacket import smpacket
>>> packet = smpacket.SMPacketServerNSCRSG(
... usage=0, # Check song presence
... song_title="title",
... song_artist="artist",
... song_subtitle="subtitle",
... )
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x18\\x88\\x00title\\x00artist\\x00subtitle\\x00'
"""
command = smcommand.SMServerCommand.NSCRSG
_payload = [
(smencoder.SMPayloadType.INT, "usage", 1),
(smencoder.SMPayloadType.NT, "song_title", None),
(smencoder.SMPayloadType.NT, "song_artist", None),
(smencoder.SMPayloadType.NT, "song_subtitle", None),
]
class SMPacketServerNSCCUUL(SMPacket):
"""
Server command 137 (Update user list)
This sends all the users currently connected
:param int max_players: NB max of players (max 255)
:param int nb_players: NB of player's in this packet
:param list players: List containing status and name for each user
"""
command = smcommand.SMServerCommand.NSCCUUL
_payload = [
(smencoder.SMPayloadType.INT, "max_players", 1),
(smencoder.SMPayloadType.INT, "nb_players", 1),
(smencoder.SMPayloadType.LIST, "players", ("nb_players", [
(smencoder.SMPayloadType.INT, "status", 1),
(smencoder.SMPayloadType.NT, "name", None),
])
)
]
class SMPacketServerNSSCSMS(SMPacket):
"""
Server command 138
Force change to Networking select music screen.
:param str gametype: Set specified gametype
:param str style: Set specified style
"""
command = smcommand.SMServerCommand.NSSCSMS
_payload = [
(smencoder.SMPayloadType.NT, "gametype", None),
(smencoder.SMPayloadType.NT, "style", None),
]
class SMPacketServerNSCUOpts(SMPacket):
"""
Server command 139 (reserved)
"""
command = smcommand.SMServerCommand.NSCUOpts
class SMPacketServerNSSMONL(SMPacket):
"""
Server command 140 (SMOnline Packet)
The SMLan packet 140 is a wrapper for the SMOnline packet.
:param packet: The SMOPacket to include
:type packet: SMOPacketServer
"""
command = smcommand.SMServerCommand.NSSMONL
_payload = [
(smencoder.SMPayloadType.PACKET, "packet", SMOPacketServer)
]
class SMPacketServerNSCFormatted(SMPacket):
"""
Server command 141 (Formatted information packet)
Send formatted information regarding the server back to the player.
:param str server_name: Server name
:param int server_port: Port the server is listening on
:param int nb_players: Number of players connected
"""
command = smcommand.SMServerCommand.NSCFormatted
_payload = [
(smencoder.SMPayloadType.NT, "server_name", None),
(smencoder.SMPayloadType.INT, "server_port", 2),
(smencoder.SMPayloadType.INT, "nb_players", 2),
]
class SMPacketServerNSCAttack(SMPacket):
"""
Server command 142 (Attack Client)
:param int player: Player number (0 or 1)
:param int time: Duration of the attack (in ms)
:param attack: Text describing modifiers
:type attack: str or smserver.smutils.smattack.SMAttack
List of attack available are in smattack module.
:Example:
>>> from smserver.smutils.smpacket import smpacket
>>> from smserver.smutils import smattack
>>> packet = smpacket.SMPacketServerNSCAttack(
... player=0, # Send the attack to the player 0
... time=1000, # The attack will last 1 second
... attack='drunk', #Send a drunk attack
... )
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x0c\\x8e\\x00\\x00\\x00\\x03\\xe8drunk\\x00'
>>> packet = smpacket.SMPacketServerNSCAttack(
... player=0,
... time=1000,
... attack=smattack.SMAttack.Drunk, # Use an Enum value
... )
>>> print(packet.binary)
b'\\x00\\x00\\x00\\x0c\\x8e\\x00\\x00\\x00\\x03\\xe8drunk\\x00'
"""
def __init__(self, player=0, time=1000, attack=None):
if not isinstance(attack, str):
attack = attack.value
SMPacket.__init__(self, player=player, time=time, attack=attack)
command = smcommand.SMServerCommand.NSCAttack
_payload = [
(smencoder.SMPayloadType.INT, "player", 1),
(smencoder.SMPayloadType.INT, "time", 4),
(smencoder.SMPayloadType.NT, "attack", None),
]
class SMPacketServerXMLPacket(SMPacket):
"""
Server command 143 (XMLPacket)
This packet contains data in XML format.
:param str xml: XML string
"""
command = smcommand.SMServerCommand.XMLPacket
_payload = [
(smencoder.SMPayloadType.NT, "xml", None),
]
| mit | -3,107,368,309,541,007,000 | 29.712121 | 99 | 0.605212 | false |
maloL/nao-fsm | tracking_and_storing.py | 1 | 11706 | # object tracking algorithm with trajectory points plotting on
# video stream window
# after taking 20 points it stores them in txt files
from naoqi import ALProxy, ALBroker, ALModule
import time
from vision_definitions import kVGA, kBGRColorSpace
import cv2 as opencv
import numpy as np
import random
from ghmm import *
import ConfigParser, argparse
import training
global ObjectTracker
# object tracking module
class ObjectTrackerModule(ALModule):
def __init__(self, name):
ALModule.__init__(self, name)
self.data = 0
self.behaviors = []
self.exists = []
self.kindNames = []
self.waiting = []
self.tts = ALProxy("ALTextToSpeech")
self.gestureProxy = ALProxy("NAOObjectGesture", myBroker)
self.motionProxy = ALProxy("ALMotion", myBroker)
self.memProxy = ALProxy("ALMemory", myBroker)
self.motionProxy.setStiffnesses("Head", 1.0)
self.gestureProxy.startTracker(15, 0)
#self.log = open("temp.txt", "w") ############################################################
def startTracker(self, camId):
self.gestureProxy.startTracker(15, camId)
#self.gestureProxy.focusObject(-1)
def stopTracker(self):
self.gestureProxy.stopTracker()
self.gestureProxy.stopFocus()
def load(self, path, name):
self.gestureProxy.loadDataset(path)
self.kindNames.append(name)
self.exists.append(False)
self.behaviors.append([])
self.waiting.append(None)
self.gestureProxy.trackObject(name, -len(self.kindNames))
self.memProxy.subscribeToMicroEvent(name, "ObjectTracker", name, "storeData")
def getIdx(self, name):
if (name in self.kindNames):
return self.kindNames.index(name)
else:
return None
def getBehaviors(self, name):
idx = self.getIdx(name)
if idx!=None:
return self.behaviors[idx]
else:
return None
def getExist(self, name):
idx = self.getIdx(name)
if idx!=None:
return self.exists[idx]
else:
return None
def getWaiting(self, name):
idx = self.getIdx(name)
if idx!=None:
return self.waiting[idx]
else:
return None
def clearWaiting(self):
for i in range(len(self.waiting)):
self.waiting[i] = None
def waitForBehavior(self, name, behavior):
idx = self.getIdx(name)
self.gestureProxy.clearEventTraj(name)
print('Waiting for behavior: ' + str(behavior))
if idx!=None:
if behavior == "Frog":
self.waiting[idx] = ["FrogL", "FrogR"]
else:
if behavior == "Plane":
self.waiting[idx] = ["PlaneL", "PlaneR"]
else:
self.waiting[idx] = [behavior]
else:
return None
def onObjGet(self, key, value, message):
id = -1
if (key in self.kindNames):
id = self.kindNames.index(key)
else:
return
if (value != None):
if (value[0] != 0):
self.exists[id]=True
if (value[5]!=None):
print (value[5])
self.behaviors[id] = value[5]
if (self.waiting[id]!= None):
for tmp in self.waiting[id]:
if tmp in value[5]:
self.waiting[id] = None
break
else:
self.exists[id]=False
if (value[1]!=None):
print (value[1])
self.behaviors[id] = value[1]
if (self.waiting[id]!= None):
for tmp in self.waiting[id]:
if tmp in value[1]:
self.waiting[id] = None
break
def storeData(self, key, value, message):
if value:
if value[0]:
print("I see the cup")
#self.log.write(str(value[3][0])+", "+str(value[3][1])+"\n") ########################################
self.data = value[3]
else:
self.data = 0
print("I don't see the cup")
def unload(self):
self.gestureProxy.stopTracker()
#self.log.close()
for i in range(0, len(self.exists)):
self.gestureProxy.removeObjectKind(0)
self.gestureProxy.removeEvent(self.kindNames[i])
# class with functions for Kalman filter
class KalmanFilter(object):
def __init__(self, process_variance, estimated_measurement_variance):
self.process_variance = process_variance
self.estimated_measurement_variance = estimated_measurement_variance
self.posteri_estimate = 0.0
self.posteri_error_estimate = 1.0
def input_latest_noisy_measurement(self, measurement):
priori_estimate = self.posteri_estimate
priori_error_estimate = self.posteri_error_estimate + self.process_variance
blending_factor = priori_error_estimate / (priori_error_estimate + self.estimated_measurement_variance)
self.posteri_estimate = priori_estimate + blending_factor * (measurement - priori_estimate)
self.posteri_error_estimate = (1 - blending_factor) * priori_error_estimate
def get_latest_estimated_measurement(self):
return self.posteri_estimate
# function for getting video stream from nao camera
def nao_image_getter(alvideoproxy, video):
alimg = alvideoproxy.getImageRemote(video)
imgheader = opencv.cv.CreateImageHeader((alimg[0], alimg[1]), opencv.cv.IPL_DEPTH_8U, 3)
opencv.cv.SetData(imgheader, alimg[6])
img = np.asarray(imgheader[:, :])
return img
if __name__ == '__main__':
# initializing proxies and other required parameters
IP = "192.168.1.105"
PORT = 9559
myBroker = ALBroker("myBroker", "0.0.0.0", 0, IP, PORT)
#opencv.namedWindow("Robot camera feed")
# get sample image to detect size
alvideoproxy = ALProxy("ALVideoDevice", IP, PORT)
video = alvideoproxy.subscribeCamera("video", 0, kVGA, kBGRColorSpace, 30)
motionproxy=ALProxy('ALMotion', myBroker)
motionproxy.killAll()
tts = ALProxy('ALTextToSpeech', myBroker)
behaveproxy = ALProxy('ALBehaviorManager', myBroker)
postureproxy = ALProxy('ALRobotPosture', myBroker)
navigationProxy = ALProxy('ALNavigation', myBroker)
sound = ALProxy('ALAudioDevice', myBroker)
memory = ALProxy('ALMemory', myBroker)
memory.insertData('ObjectGrabber', int(0))
camProxy = ALProxy("ALVideoDevice", IP, PORT)
postureproxy.goToPosture("StandInit", 0.8)
motionproxy.setAngles('HeadPitch', 0, 0.5)
time.sleep(0.5)
motionproxy.setAngles('HeadYaw', 0, 0.5)
time.sleep(0.5)
motionproxy.setStiffnesses("Head", 1.0)
cfile = "Config.ini"
config = ConfigParser.ConfigParser()
config.read(cfile)
set_num = config.get("Grab settings", "dataset")
volume = config.get('Grab settings', 'Volume')
volume = int(float(volume))
sound.setOutputVolume(volume)
new_set = int(set_num) + 1
filename = '/home/luka/Documents/FER_projekt/Diplomski_rad/Temp_set/Pos/gest' + str(new_set)
config.set("Grab settings", "Dataset", str(new_set))
with open(cfile, 'wb') as configfile:
config.write(configfile)
# try object tracking
try:
# kalman filter preparations
iteration_count = 500
measurement_standard_deviation = np.std([random.random() * 2.0 - 1.0 for j in xrange(iteration_count)])
process_variance = 1e-1 # greater = faster, worse estimation, lower = slower, better estimation
estimated_measurement_variance = measurement_standard_deviation ** 2 # 0.05 ** 2
kalman_filter = KalmanFilter(process_variance, estimated_measurement_variance)
posteri_estimate_graph = []
# initilazing tracking
ObjectTracker = ObjectTrackerModule("ObjectTracker")
ObjectTracker.load("/home/nao/ImageSets/cup", 'Cup')
ObjectTracker.gestureProxy.stopTracker()
time.sleep(2)
#tts.say("Now you repeat the gesture")
time.sleep(2)
print ('Starting tracker...')
ObjectTracker.startTracker(0)
image_position = np.zeros(shape=2)
pos_vec = np.zeros(shape=2)
i = 0
log = open(filename + ".txt", "w") ####################################################################################
estimation = np.zeros(shape=(1, 2))
# while loop where tracking is executed
tts.say("Now you repeat the gesture")
time.sleep(0.5)
while len(estimation) < 30:
# if object is detected do data analysis
image = nao_image_getter(alvideoproxy, video)
if ObjectTracker.data:
# angular position data from micro event
pos_data = np.asarray(ObjectTracker.data)
print "data: "
print ObjectTracker.data
# calculating image position based on angular position of object
image_position = camProxy.getImagePositionFromAngularPosition(0, [pos_data[0], pos_data[1]])
image_position = np.asarray(image_position)
print image_position
# applying kalman filter on image position data
kalman_filter.input_latest_noisy_measurement(image_position)
posteri_estimate_graph.append(kalman_filter.get_latest_estimated_measurement())
# separating estimated values for easier plotting
estimation = np.zeros(shape=(len(posteri_estimate_graph), 2))
for i in range(0, len(posteri_estimate_graph)):
temp2 = posteri_estimate_graph[i]
estimation[i, 0] = temp2[0]
estimation[i, 1] = temp2[1]
# video frame size
height, width = image.shape[:2]
opencv.ellipse(image, (int(estimation[-1, 0] * width), int(estimation[-1, 1] * height + 15)),
(70, 90), -180, 0, 360, (255, 0, 0), 2)
# plotting trajectory points
for j in range(2, len(estimation)):
opencv.circle(image, (int(estimation[j, 0] * width), int(estimation[j, 1] * height + 15)), 5, (0, 0, 255), -1)
opencv.putText(image, "Object", (10, 70), opencv.FONT_HERSHEY_SIMPLEX, 3, (0, 255, 0), 5)
opencv.putText(image, "tracking", (10, 140), opencv.FONT_HERSHEY_SIMPLEX, 3, (0, 255, 0), 5)
#opencv.putText(image, "Object tracking", (100, 100), opencv.FONT_HERSHEY_DUPLEX, 2.0, (0, 0, 255))
opencv.imshow("Robot camera feed", image)
#opencv.imwrite("Slike/Tracking/image" + str(len(estimation)) + ".png", image)
if opencv.waitKey(10) == 27:
break
# if try doesn't work for any reason program breaks and stops after
# stopping video subscribe and other things
finally:
n = len(estimation)
for i in range(0, n):
log.write(str(estimation[i, 0])+", "+str(estimation[i, 1])+"\n")
log.close()
ObjectTracker.gestureProxy.stopTracker()
print('Ending tracking...')
time.sleep(1)
alvideoproxy.unsubscribe(video)
opencv.destroyAllWindows()
ObjectTracker.unload()
behaveproxy.stopAllBehaviors()
time.sleep(1.0)
motionproxy.killAll()
myBroker.shutdown() | lgpl-3.0 | -1,398,684,198,154,065,200 | 36.764516 | 130 | 0.579874 | false |
isazi/Transpose | analysis/manage.py | 1 | 2009 | #!/usr/bin/env python
# Copyright 2014 Alessio Sclocco <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def get_tables(queue):
"""Get a list of the tables"""
queue.execute("SHOW TABLES")
return queue.fetchall()
def create_table(queue, table):
"""Create a table to store auto-tuning results for transpose."""
queue.execute("CREATE table " + table + "(id INTEGER NOT NULL PRIMARY KEY AUTO_INCREMENT, M INTEGER NOT NULL, N INTEGER NOT NULL, itemsPerBlock INTEGER NOT NULL, GBs FLOAT UNSIGNED NOT NULL, time FLOAT UNSIGNED NOT NULL, time_err FLOAT UNSIGNED NOT NULL, cov FLOAT UNSIGNED NOT NULL)")
def delete_table(queue, table):
"""Delete table."""
queue.execute("DROP table " + table)
def load_file(queue, table, input_file):
"""Load input_file into a table in the database."""
for line in input_file:
if (line[0] != "#") and (line[0] != "\n"):
items = line.split(sep=" ")
queue.execute("INSERT INTO " + table + " VALUES (NULL, " + items[0] + ", " + items[1] + ", " + items[2] + ", " + items[3] + ", " + items[4] + ", " + items[5] + ", " + items[6].rstrip("\n") + ")")
def print_results(confs):
"""Print the result tuples."""
for conf in confs:
for item in conf:
print(item, end=" ")
print()
def get_M_range(queue, table, N):
"""Return the M in the scenario."""
queue.execute("SELECT DISTINCT M FROM " + table + " WHERE (N = " + N + ") ORDER BY M")
return queue.fetchall()
| apache-2.0 | 2,591,675,039,704,562,000 | 41.744681 | 289 | 0.649079 | false |
Emergen/zivios-agent | modules/ntp.py | 1 | 2447 | """
* Copyright (c) 2008 Zivios, LLC.
*
* This file is part of Zivios.
*
* Zivios is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Zivios is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Zivios. If not, see <http://www.gnu.org/licenses/>.
*
* @package ZiviosAgent
* @copyright Copyright (c) 2008 Zivios, LLC. (http://www.zivios.org)
* @license http://www.zivios.org/legal/license
* @version $Id: Exception.php 908 2008-08-25 11:03:00Z fkhan $
* @subpackage Core
"""
from twisted.web import xmlrpc
import logging
import os
import popen2
import re
import string
import time
import datetime
from twisted.python import log
import ZiviosAgent
class ntp(ZiviosAgent.ZiviosAgent):
def xmlrpc_addService(self):
print 'ntp addService function called'
def xmlrpc_serviceStatus(self):
response,regexcode,exitcode = self.command("statusntpcommand")
return (exitcode==0)
def xmlrpc_stopService(self):
response,regexcode,exitcode = self.command("stopntpcommand")
return (exitcode==0)
def xmlrpc_startService(self):
response,regexcode,exitcode = self.command("startntpcommand")
return (exitcode==0)
def xmlrpc_currentTime(self):
now = datetime.datetime.now()
return now.ctime()
def xmlrpc_getTimezone(self):
tz,tzm = time.tzname
return tzm;
def xmlrpc_getsyncstatus(self):
#sanitizing output!
response,regexcode,exitcode = self.command("ntpq");
resp = response.split('\n')
if (len(resp) <= 2):
return -1
del resp[0:2]
length = len(resp)
del resp[length-1]
retarray = []
for a in resp:
a = a.lstrip()
a = a.rstrip()
joinarray = re.split('\s+',a)
retarray.append(joinarray)
return retarray
def xmlrpc_getGmtOffset(self):
return time.timezone/3600;
| gpl-3.0 | -4,665,748,381,308,129,000 | 28.841463 | 71 | 0.644054 | false |
ancafarcas/superdesk-core | superdesk/datalayer.py | 1 | 4241 | # -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
import superdesk
from eve.io.base import DataLayer
from eve.io.mongo import Mongo
from eve.utils import config, ParsedRequest
from eve_elastic import Elastic, InvalidSearchString # noqa
from flask import current_app
from superdesk.lock import lock, unlock
from superdesk.json_utils import SuperdeskJSONEncoder
class SuperdeskDataLayer(DataLayer):
"""Superdesk Data Layer.
Implements eve data layer interface, is used to make eve work with superdesk service layer.
It handles app initialization and later it forwards eve calls to respective service.
"""
serializers = {}
serializers.update(Mongo.serializers)
serializers.update({'datetime': Elastic.serializers['datetime']})
def init_app(self, app):
app.data = self # app.data must be set for locks to work
self.mongo = Mongo(app)
self.driver = self.mongo.driver
self.storage = self.driver
self.elastic = Elastic(app, serializer=SuperdeskJSONEncoder(), skip_index_init=True, retry_on_timeout=True)
def pymongo(self, resource=None, prefix=None):
return self.mongo.pymongo(resource, prefix)
def init_elastic(self, app):
"""Init elastic index.
It will create index and put mapping. It should run only once so locks are in place.
Thus mongo must be already setup before running this.
"""
with app.app_context():
if lock('elastic', expire=10):
try:
self.elastic.init_index(app)
finally:
unlock('elastic')
def find(self, resource, req, lookup):
return superdesk.get_resource_service(resource).get(req=req, lookup=lookup)
def find_all(self, resource, max_results=1000):
req = ParsedRequest()
req.max_results = max_results
return self._backend(resource).find(resource, req, None)
def find_one(self, resource, req, **lookup):
return superdesk.get_resource_service(resource).find_one(req=req, **lookup)
def find_one_raw(self, resource, _id):
return self._backend(resource).find_one_raw(resource, _id)
def find_list_of_ids(self, resource, ids, client_projection=None):
return self._backend(resource).find_list_of_ids(resource, ids, client_projection)
def insert(self, resource, docs, **kwargs):
return superdesk.get_resource_service(resource).create(docs, **kwargs)
def update(self, resource, id_, updates, original):
return superdesk.get_resource_service(resource).update(id=id_, updates=updates, original=original)
def update_all(self, resource, query, updates):
datasource = self.datasource(resource)
driver = self._backend(resource).driver
collection = driver.db[datasource[0]]
return collection.update(query, {'$set': updates}, multi=True)
def replace(self, resource, id_, document, original):
return superdesk.get_resource_service(resource).replace(id=id_, document=document, original=original)
def remove(self, resource, lookup=None):
if lookup is None:
lookup = {}
return superdesk.get_resource_service(resource).delete(lookup=lookup)
def is_empty(self, resource):
return self._backend(resource).is_empty(resource)
def _search_backend(self, resource):
if resource.endswith(current_app.config['VERSIONS']):
return
datasource = self.datasource(resource)
backend = config.SOURCES.get(datasource[0], {}).get('search_backend', None)
return getattr(self, backend) if backend is not None else None
def _backend(self, resource):
datasource = self.datasource(resource)
backend = config.SOURCES.get(datasource[0], {'backend': 'mongo'}).get('backend', 'mongo')
return getattr(self, backend)
def get_mongo_collection(self, resource):
return self.mongo.pymongo('users').db[resource]
| agpl-3.0 | 5,146,778,410,217,942,000 | 37.908257 | 115 | 0.677906 | false |
gatkin/declxml | tests/test_hooks_validation.py | 1 | 14857 | """Tests using hooks for validation"""
from collections import namedtuple
import pytest
import declxml as xml
from .helpers import strip_xml
_UserTuple = namedtuple('_UserTuple', [
'name',
'age',
])
class _UserClass(object):
def __init__(self, name=None, age=None):
self.name = name
self.age = age
def __eq__(self, other):
return isinstance(other, _UserClass) and\
other.name == self.name and\
other.age == self.age
def __repr__(self):
return '_UserClass(name=\'{}\', age={})'.format(
self.name, self.age
)
class _ValidationError(Exception):
"""Custom validation error class"""
class TestCustomErrorMessage(object):
"""Provide custom validation error messages."""
def test_array_non_root(self):
"""Custom error message for array values."""
processor = xml.dictionary('data', [
xml.array(xml.integer('value'), nested='values', hooks=self._hooks)
])
xml_string = strip_xml("""
<data>
<values>
<value>1</value>
</values>
</data>
""")
value = {
'values': [1],
}
location = 'data/values'
self._assert_error_message(processor, value, xml_string, location)
def test_array_root(self):
"""Custom error message for array values."""
processor = xml.array(xml.integer('value'), nested='data', hooks=self._hooks)
xml_string = strip_xml("""
<data>
<value>1</value>
</data>
""")
value = [1]
location = 'data'
self._assert_error_message(processor, value, xml_string, location)
def test_dictionary_non_root(self):
"""Custom error message for dictionary values."""
processor = xml.dictionary('data', [
xml.dictionary('user', [
xml.string('name'),
xml.integer('age'),
], hooks=self._hooks)
])
xml_string = strip_xml("""
<data>
<user>
<name>Bob</name>
<age>24</age>
</user>
</data>
""")
value = {
'user': {
'name': 'Bob',
'age': 24,
}
}
location = 'data/user'
self._assert_error_message(processor, value, xml_string, location)
def test_dictionary_root(self):
"""Custom error message for dictionary values."""
processor = xml.dictionary('data', [
xml.string('name'),
xml.integer('age'),
], hooks=self._hooks)
xml_string = strip_xml("""
<data>
<name>Bob</name>
<age>24</age>
</data>
""")
value = {
'name': 'Bob',
'age': 24,
}
location = 'data'
self._assert_error_message(processor, value, xml_string, location)
def test_named_tuple_non_root(self):
"""Custom error message for namedtuple values."""
processor = xml.dictionary('data', [
xml.named_tuple('user', _UserTuple, [
xml.string('name'),
xml.integer('age'),
], hooks=self._hooks)
])
xml_string = strip_xml("""
<data>
<user>
<name>Bob</name>
<age>24</age>
</user>
</data>
""")
value = {'user': _UserTuple(name='Bob', age=24)}
location = 'data/user'
self._assert_error_message(processor, value, xml_string, location)
def test_named_tuple_root(self):
"""Custom error message for namedtuple values."""
processor = xml.named_tuple('data', _UserTuple, [
xml.string('name'),
xml.integer('age'),
], hooks=self._hooks)
xml_string = strip_xml("""
<data>
<name>Bob</name>
<age>24</age>
</data>
""")
value = _UserTuple(name='Bob', age=24)
location = 'data'
self._assert_error_message(processor, value, xml_string, location)
def test_primitive(self):
"""Custom error message for primitive values."""
processor = xml.dictionary('data', [
xml.integer('value', hooks=self._hooks)
])
xml_string = strip_xml("""
<data>
<value>1</value>
</data>
""")
value = {'value': 1}
location = 'data/value'
self._assert_error_message(processor, value, xml_string, location)
def test_user_object_non_root(self):
"""Custom error message for user object values."""
processor = xml.dictionary('data', [
xml.user_object('user', _UserClass, [
xml.string('name'),
xml.integer('age'),
], hooks=self._hooks)
])
xml_string = strip_xml("""
<data>
<user>
<name>Bob</name>
<age>24</age>
</user>
</data>
""")
value = {'user': _UserClass(name='Bob', age=24)}
location = 'data/user'
self._assert_error_message(processor, value, xml_string, location)
def test_user_object_root(self):
"""Custom error message for user object values."""
processor = xml.user_object('data', _UserClass, [
xml.string('name'),
xml.integer('age'),
], hooks=self._hooks)
xml_string = strip_xml("""
<data>
<name>Bob</name>
<age>24</age>
</data>
""")
value = _UserClass(name='Bob', age=24)
location = 'data'
self._assert_error_message(processor, value, xml_string, location)
@staticmethod
def _assert_error_message(processor, value, xml_string, expected_location):
with pytest.raises(_ValidationError) as parse_exception:
xml.parse_from_string(processor, xml_string)
actual_parse_message = str(parse_exception.value)
print(actual_parse_message)
assert actual_parse_message.endswith(expected_location)
with pytest.raises(_ValidationError) as serialize_exception:
xml.serialize_to_string(processor, value)
actual_serialize_message = str(serialize_exception.value)
assert actual_serialize_message.endswith(expected_location)
@property
def _hooks(self):
def validate(state, _):
state.raise_error(_ValidationError, 'Invalid value')
return xml.Hooks(
after_parse=validate,
before_serialize=validate,
)
class TestValidateArray(object):
"""Use hooks to validate array values."""
def test_invalid(self):
"""Invalid array value."""
xml_string = strip_xml("""
<data>
<value>1</value>
<value>3</value>
<value>3</value>
</data>
""")
value = [1, 3, 3]
_assert_invalid(self._processor, value, xml_string)
def test_valid(self):
"""Valid array value."""
xml_string = strip_xml("""
<data>
<value>1</value>
<value>2</value>
<value>3</value>
</data>
""")
value = [1, 2, 3]
_assert_valid(self._processor, value, xml_string)
@property
def _processor(self):
def validate(state, value):
if len(value) != len(set(value)):
state.raise_error(_ValidationError)
return value
hooks = xml.Hooks(
after_parse=validate,
before_serialize=validate,
)
processor = xml.array(xml.integer('value'), hooks=hooks, nested='data')
return processor
class TestValidateDictionary(object):
"""Use hooks to validate dictionary values."""
def test_invalid(self):
"""Invalid dictionary value."""
xml_string = strip_xml("""
<data>
<a>5</a>
<b>6</b>
</data>
""")
value = {
'a': 5,
'b': 6,
}
_assert_invalid(self._processor, value, xml_string)
def test_valid(self):
"""Valid dictionary value."""
xml_string = strip_xml("""
<data>
<a>32</a>
<b>67</b>
</data>
""")
value = {
'a': 32,
'b': 67,
}
_assert_valid(self._processor, value, xml_string)
@property
def _processor(self):
def validate(state, value):
if value['a'] == 5 and value['b'] == 6:
state.raise_error(_ValidationError)
return value
hooks = xml.Hooks(
after_parse=validate,
before_serialize=validate,
)
processor = xml.dictionary('data', [
xml.integer('a'),
xml.integer('b'),
], hooks=hooks)
return processor
class TestValidateNamedTuple(object):
"""Use hooks for validating namedtuple values."""
def test_invalid(self):
"""Invalid namedtuple value"""
xml_string = strip_xml("""
<user>
<name>Bob</name>
<age>24</age>
</user>
""")
value = _UserTuple(name='Bob', age=24)
_assert_invalid(self._processor, value, xml_string)
def test_valid(self):
"""Valid namedtuple value"""
xml_string = strip_xml("""
<user>
<name>Jill</name>
<age>28</age>
</user>
""")
value = _UserTuple(name='Jill', age=28)
_assert_valid(self._processor, value, xml_string)
@property
def _processor(self):
def validate(state, value):
if value.name == 'Bob' and value.age == 24:
state.raise_error(_ValidationError)
return value
hooks = xml.Hooks(
after_parse=validate,
before_serialize=validate,
)
processor = xml.named_tuple('user', _UserTuple, [
xml.string('name'),
xml.integer('age')
], hooks=hooks)
return processor
class TestValidatePrimitive(object):
"""Use hooks for validating primitive values."""
def test_invalid(self):
"""Invalid primitive value"""
xml_string = strip_xml("""
<data>
<value>-91</value>
</data>
""")
value = {'value': -91}
_assert_invalid(self._processor, value, xml_string)
def test_valid(self):
"""Valid primitive value"""
xml_string = strip_xml("""
<data>
<value>32</value>
</data>
""")
value = {'value': 32}
_assert_valid(self._processor, value, xml_string)
@property
def _processor(self):
def validate(state, value):
if value < 0:
state.raise_error(_ValidationError)
return value
hooks = xml.Hooks(
after_parse=validate,
before_serialize=validate
)
processor = xml.dictionary('data', [
xml.integer('value', hooks=hooks)
])
return processor
class TestValidateUserObject(object):
"""Use hooks for validating user object values."""
def test_invalid(self):
"""Invalid namedtuple value"""
xml_string = strip_xml("""
<user>
<name>Bob</name>
<age>24</age>
</user>
""")
value = _UserClass(name='Bob', age=24)
_assert_invalid(self._processor, value, xml_string)
def test_valid(self):
"""Valid namedtuple value"""
xml_string = strip_xml("""
<user>
<name>Jill</name>
<age>28</age>
</user>
""")
value = _UserClass(name='Jill', age=28)
_assert_valid(self._processor, value, xml_string)
@property
def _processor(self):
def validate(state, value):
if value.name == 'Bob' and value.age == 24:
state.raise_error(_ValidationError)
return value
hooks = xml.Hooks(
after_parse=validate,
before_serialize=validate,
)
processor = xml.user_object('user', _UserClass, [
xml.string('name'),
xml.integer('age')
], hooks=hooks)
return processor
def test_aggregate_missing_hooks():
"""Process with missing aggregate hooks."""
hooks = xml.Hooks(
after_parse=None,
before_serialize=None
)
processor = xml.dictionary('data', [
xml.integer('a'),
xml.integer('b')
], hooks=hooks)
xml_string = strip_xml("""
<data>
<a>1</a>
<b>2</b>
</data>
""")
value = {
'a': 1,
'b': 2,
}
_assert_valid(processor, value, xml_string)
def test_primitive_missing_hooks():
"""Process primitive value with missing hooks."""
hooks = xml.Hooks(
after_parse=None,
before_serialize=None
)
processor = xml.dictionary('data', [
xml.integer('value', hooks=hooks)
])
xml_string = strip_xml("""
<data>
<value>1</value>
</data>
""")
value = {'value': 1}
_assert_valid(processor, value, xml_string)
def test_processor_locations_parsing():
"""Get processor location in hooks callback."""
expected_locations = [
xml.ProcessorLocation(element_path='data', array_index=None),
xml.ProcessorLocation(element_path='value', array_index=None)
]
def trace(state, _):
assert isinstance(state, xml.ProcessorStateView)
assert expected_locations == list(state.locations)
hooks = xml.Hooks(
after_parse=trace,
before_serialize=trace,
)
processor = xml.dictionary('data', [
xml.integer('value', hooks=hooks),
])
xml_string = strip_xml("""
<data>
<value>1</value>
</data>
""")
value = {'value': 1}
xml.parse_from_string(processor, xml_string)
xml.serialize_to_string(processor, value)
def _assert_invalid(processor, value, xml_string):
"""Assert the processor rejects the XML and value as invalid."""
with pytest.raises(_ValidationError):
xml.parse_from_string(processor, xml_string)
with pytest.raises(_ValidationError):
xml.serialize_to_string(processor, value)
def _assert_valid(processor, value, xml_string):
"""Assert the processor accepts the XML and value as valid."""
actual_value = xml.parse_from_string(processor, xml_string)
assert value == actual_value
actual_xml_string = xml.serialize_to_string(processor, value)
assert xml_string == actual_xml_string
| mit | 1,754,847,672,226,905,900 | 23.516502 | 85 | 0.522515 | false |
openaid-IATI/OIPA | OIPA/iati_synchroniser/tests/test_create_publisher_organisation.py | 1 | 1867 | import unittest
from django.test import TestCase
from iati.factory import iati_factory
from iati_organisation.models import Organisation
from iati_synchroniser.create_publisher_organisation import (
create_publisher_organisation
)
from iati_synchroniser.factory import synchroniser_factory
class CreatePublisherOrganisationTestCase(TestCase):
"""
Test creation of a organisation on adding a publisher
"""
def setUp(self):
iati_factory.LanguageFactory.create(code='en', name='English')
iati_factory.VersionFactory.create(code='2.02', name='2.02')
iati_factory.OrganisationTypeFactory.create(
code='22', name='Multilateral')
@unittest.skip("Not implemented")
def test_update_or_create_publisher_organisation(self):
"""
check if dataset is saved as expected
"""
# setup
publisher = synchroniser_factory.PublisherFactory.create(
organisation=None)
publisher_organization_type = "22"
# call
create_publisher_organisation(publisher, publisher_organization_type)
# prepare
publisher.refresh_from_db()
organisation = Organisation.objects.get(
organisation_identifier=publisher.publisher_iati_id)
# assert
self.assertEqual(publisher.publisher_iati_id,
organisation.organisation_identifier)
self.assertEqual(publisher.display_name,
organisation.name.narratives.first().content)
self.assertEqual(publisher_organization_type, organisation.type.code)
self.assertEqual(publisher.publisher_iati_id,
organisation.reporting_org.reporting_org_identifier)
self.assertEqual(publisher.display_name,
organisation.reporting_org.narratives.first().content)
| agpl-3.0 | 7,065,102,109,405,925,000 | 34.903846 | 79 | 0.677022 | false |
AusTac/parma | b3/parsers/et.py | 1 | 7934 | # BigBrotherBot(B3) (www.bigbrotherbot.net)
# Copyright (C) 2005 Michael "ThorN" Thornton
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
__author__ = 'ThorN'
__version__ = '0.0.1'
import re, string
import b3
from b3.parsers.q3a.abstractParser import AbstractParser
import PunkBuster
class EtParser(AbstractParser):
gameName = 'et'
privateMsg = False
_settings = {}
_settings['line_length'] = 65
_settings['min_wrap_length'] = 90
_commands = {}
_commands['message'] = 'qsay %s %s ^8[pm]^7 %s'
_commands['say'] = 'qsay %s %s'
_commands['set'] = 'set %s %s'
_commands['kick'] = 'clientkick %s %s'
_commands['ban'] = 'banid %s %s'
_commands['tempban'] = 'clientkick %s %s'
_eventMap = {
'warmup' : b3.events.EVT_GAME_WARMUP,
'restartgame' : b3.events.EVT_GAME_ROUND_END
}
# remove the time off of the line
_lineClear = re.compile(r'^(?:[0-9:.]+\s?)?')
_lineFormats = (
#1579:03ConnectInfo: 0: E24F9B2702B9E4A1223E905BF597FA92: ^w[^2AS^w]^2Lead: 3: 3: 24.153.180.106:2794
re.compile(r'^(?P<action>[a-z]+):\s*(?P<data>(?P<cid>[0-9]+):\s*(?P<pbid>[0-9A-Z]{32}):\s*(?P<name>[^:]+):\s*(?P<num1>[0-9]+):\s*(?P<num2>[0-9]+):\s*(?P<ip>[0-9.]+):(?P<port>[0-9]+))$', re.IGNORECASE),
#1536:17sayc: 0: ^w[^2AS^w]^2Lead: sorry...
#1536:34sayteamc: 17: ^1[^7DP^1]^4Timekiller: ^4ammo ^2here !!!!!
re.compile(r'^(?P<action>[a-z]+):\s*(?P<data>(?P<cid>[0-9]+):\s*(?P<name>.+):\s+(?P<text>.*))$', re.IGNORECASE),
#1536:37Kill: 1 18 9: ^1klaus killed ^1[pura]fox.nl by MOD_MP40
re.compile(r'^(?P<action>[a-z]+):\s*(?P<data>(?P<cid>[0-9]+)\s(?P<acid>[0-9]+)\s(?P<aweap>[0-9]+):\s*(?P<text>.*))$', re.IGNORECASE),
re.compile(r'^(?P<action>[a-z]+):\s*(?P<data>(?P<cid>[0-9]+):\s*(?P<text>.*))$', re.IGNORECASE),
re.compile(r'^(?P<action>[a-z]+):\s*(?P<data>(?P<cid>[0-9]+)\s(?P<text>.*))$', re.IGNORECASE),
re.compile(r'^(?P<action>[a-z]+):\s*(?P<data>.*)$', re.IGNORECASE)
)
PunkBuster = None
def startup(self):
# add the world client
client = self.clients.newBaseClient()
client.name = 'World'
client.cid = -1
client.guid = self.gameName + ':WORLD'
client.maxLevel = -1
client.hide = True
self.clients.update(client)
self.PunkBuster = PunkBuster.PunkBuster(self)
def message(self, client, text):
try:
if client == None:
self.say(text)
elif client.cid == None:
pass
else:
lines = []
for line in self.getWrap(text, self._settings['line_length'], self._settings['min_wrap_length']):
lines.append('qsay %s ^8[%s^8]^7 %s' % (self.msgPrefix, client.exactName, line))
self.writelines(lines)
except:
pass
# join
#1579:03ConnectInfo: 0: E24F9B2702B9E4A1223E905BF597FA92: ^w[^2AS^w]^2Lead: 3: 3: 24.153.180.106:2794
def OnConnectinfo(self, action, data, match=None):
guid = match.group('pbid')
client = self.clients.getByCID(match.group('cid'))
if client:
if client.guid == guid:
# this is the same player
if client.exactName != match.group('name'):
client.exactName = match.group('name')
client.setName(self.stripColors(client.exactName))
return b3.events.Event(b3.events.EVT_CLIENT_JOIN, None, client)
else:
# disconnect the existing client
self.verbose('disconnect the existing client %s %s => %s %s', match.group('cid'), guid, client.cid, client)
client.disconnect()
client = self.clients.newBaseClient()
client.cid = match.group('cid')
#if match.group('guid') == '0':
# client.guid = None
#else:
client.pbid = client.guid = self.gameName + ':' + guid
client.ip = match.group('ip')
client.exactName = match.group('name')
client.name = self.stripColors(client.exactName)
self.clients.update(client)
#1579:03ClientUserinfoChangedGUID: 0 E24F9B2702B9E4A1223E905BF597FA92 n\^w[^2AS^w]^2Lead\t\3\c\3\r\0\m\0000000\s\0000000\dn\\dr\0\w\3\lw\3\sw\7\mu\0\ref\0
def OnClientuserinfochangedguid(self, action, data, match=None):
client = self.clients.getByCID(match.group('cid'))
cid, pbid, data = string.split(data, ' ', 2)
bclient = self.parseUserInfo(cid + ' ' + data)
if bclient:
self.clients.update(bclient, client)
def OnGib(self, action, data, match=None):
#1538:42Gib: 5 10 1: ^0Apache Death gibbed ^,^t^9^8that ^2guy by MOD_MACHINEGUN
victim = self.clients.getByCID(match.group('cid'))
if not victim:
self.debug('No victim')
#self.OnJ(action, data, match)
return None
attacker = self.clients.getByCID(match.group('acid'))
if not attacker:
self.debug('No attacker')
return None
event = b3.events.EVT_CLIENT_GIB
if attacker.cid == victim.cid:
event = b3.events.EVT_CLIENT_GIB_SELF
elif attacker.team != b3.TEAM_UNKNOWN and attacker.team == victim.team:
event = b3.events.EVT_CLIENT_GIB_TEAM
return b3.events.Event(event, (100, match.group('aweap'), ''), attacker, victim)
def OnKill(self, action, data, match=None):
#1536:37Kill: 1 18 9: ^1klaus killed ^1[pura]fox.nl by MOD_MP40
victim = self.clients.getByCID(match.group('cid'))
if not victim:
self.debug('No victim')
#self.OnJ(action, data, match)
return None
attacker = self.clients.getByCID(match.group('acid'))
if not attacker:
self.debug('No attacker')
return None
event = b3.events.EVT_CLIENT_KILL
if attacker.cid == victim.cid:
event = b3.events.EVT_CLIENT_SUICIDE
elif attacker.team != b3.TEAM_UNKNOWN and attacker.team == victim.team:
event = b3.events.EVT_CLIENT_KILL_TEAM
return b3.events.Event(event, (100, match.group('aweap'), ''), attacker, victim)
def OnSayteamc(self, action, data, match=None):
#1536:34sayteamc: 17: ^1[^7DP^1]^4Timekiller: ^4ammo ^2here !!!!!
client = self.clients.getByCID(match.group('cid'))
if not client:
self.debug('No client - attempt join')
#self.OnJ(action, data, match)
#client = self.clients.getByCID(match.group('cid'))
#if not client:
return None
return b3.events.Event(b3.events.EVT_CLIENT_TEAM_SAY, match.group('text'), client)
def OnSayc(self, action, data, match=None):
#1536:17sayc: 0: ^w[^2AS^w]^2Lead: sorry...
client = self.clients.getByCID(match.group('cid'))
if not client:
self.debug('No client - attempt join')
#self.OnJ(action, data, match)
#client = self.clients.getByCID(match.group('cid'))
#if not client:
return None
return b3.events.Event(b3.events.EVT_CLIENT_SAY, match.group('text'), client) | gpl-2.0 | -8,859,111,680,739,127,000 | 38.874372 | 209 | 0.584069 | false |
skakri/django-unstructured | wiki/core/permissions.py | 1 | 3004 | from wiki.conf import settings
###############################
# TARGET PERMISSION HANDLING #
###############################
#
# All functions are:
# can_something(target, user)
# => True/False
#
# All functions can be replaced by pointing their relevant
# settings variable in wiki.conf.settings to a callable(target, user)
def can_read(target, user):
if callable(settings.CAN_READ):
return settings.CAN_READ(target, user)
else:
# Deny reading access to deleted entities if user has no delete access
is_deleted = target.current_revision and target.deleted
if is_deleted and not target.can_delete(user):
return False
# Check access for other users...
if user.is_anonymous() and not settings.ANONYMOUS:
return False
elif target.other_read:
return True
elif user.is_anonymous():
return False
if user == target.owner:
return True
if target.group_read:
if target.group and user.groups.filter(id=target.group.id).exists():
return True
if target.can_moderate(user):
return True
return False
def can_write(target, user):
if callable(settings.CAN_WRITE):
return settings.CAN_WRITE(target, user)
# Check access for other users...
if user.is_anonymous() and not settings.ANONYMOUS_WRITE:
return False
elif target.other_write:
return True
elif user.is_anonymous():
return False
if user == target.owner:
return True
if target.group_write:
if target.group and user and user.groups.filter(id=target.group.id).exists():
return True
if target.can_moderate(user):
return True
return False
def can_assign(target, user):
if callable(settings.CAN_ASSIGN):
return settings.CAN_ASSIGN(target, user)
return not user.is_anonymous() and user.has_perm('wiki.assign')
def can_assign_owner(target, user):
if callable(settings.CAN_ASSIGN_OWNER):
return settings.CAN_ASSIGN_OWNER(target, user)
return False
def can_change_permissions(target, user):
if callable(settings.CAN_CHANGE_PERMISSIONS):
return settings.CAN_CHANGE_PERMISSIONS(target, user)
return (
not user.is_anonymous() and (
target.owner == user or
user.has_perm('wiki.assign')
)
)
def can_delete(target, user):
if callable(settings.CAN_DELETE):
return settings.CAN_DELETE(target, user)
return not user.is_anonymous() and target.can_write(user)
def can_moderate(target, user):
if callable(settings.CAN_MODERATE):
return settings.CAN_MODERATE(target, user)
return not user.is_anonymous() and user.has_perm('wiki.moderate')
def can_admin(target, user):
if callable(settings.CAN_ADMIN):
return settings.CAN_ADMIN(target, user)
return not user.is_anonymous() and user.has_perm('wiki.admin')
| gpl-3.0 | 412,424,334,553,834,300 | 31.652174 | 85 | 0.632823 | false |
mobify/iterstuff | iterstuff/recipes.py | 1 | 4131 | from __future__ import absolute_import
from iterstuff.lookahead import Lookahead
def repeatable_takewhile(predicate, iterable):
"""
Return successive entries from an iterable as long as the
predicate evaluates to true for each entry.
Like itertools.takewhile, but does not consume the first
element of the iterable that fails the predicate test.
:param predicate: a single-element callable that returns True
for elements that satisfy a condition, False for those that
do not.
:param iterable: must be a Lookahead
"""
# Assert that the iterable is a Lookahead. The act of wrapping
# an iterable in a Lookahead consumes the first element, so we
# cannot do the wrapping inside this function.
if not isinstance(iterable, Lookahead):
raise TypeError("The iterable parameter must be a Lookahead")
# Use 'peek' to check if the next element will satisfy the
# predicate, and yield while this is True, or until we reach
# the end of the iterable.
while (not iterable.atend) and predicate(iterable.peek):
yield iterable.next()
def batch(iterable, size):
"""
Yield iterables for successive slices of `iterable`, each containing
up to `size` items, with the last being less than `size` if there are
not sufficient items in `iterable`. Pass over the input iterable once
only. Yield iterables, not lists.
@note: each output iterable must be consumed in full before the next
one is yielded. So list(batch(xrange(10), 3)) won't work as expected,
because the iterables are not consumed.
@param iterable: an input iterable.
@param size: the maximum number of items yielded by any output iterable.
"""
# Wrap an enumeration of the iterable in a Lookahead so that it
# yields (count, element) tuples
it = Lookahead(enumerate(iterable))
while not it.atend:
# Set the end_count using the count value
# of the next element.
end_count = it.peek[0] + size
# Yield a generator that will then yield up to
# 'size' elements from 'it'.
yield (
element
for counter, element in repeatable_takewhile(
# t[0] is the count part of each element
lambda t: t[0] < end_count,
it
)
)
def chunked(i, f=lambda _x: _x):
"""
Given an iterable i, apply f over it to extract a value from
each element and yield successive iterables where the result
of f for all elements is the same.
In simpler language, if i is an iterable sorted on some key, yield
chunks of that list where the key value is the same, each chunk being
a separate iterable.
Note that this function yields B{iterators}, not lists, and they refer
back to the iterator passed in, so each B{must} be consumed completely
before the next one is requested.
@param i: an iterable.
@param f: a function to be applied to each element of the iterable to
extract the key.
"""
# Build a generator that return tuples of (element, key-of-element),
# so that we only apply the key method to each element once.
it = Lookahead((_x, f(_x)) for _x in i)
def takechunk():
"""
A generator closure that will yield values while the keys remain
the same. Note that we cannot use L{itertools.takewhile} for this,
because that takes elements and B{then} checks the predicate, so
successive calls to itertools.takewhile for the same generator will
skip elements.
"""
while True:
# Always yield the first element: if we're at the end of the
# generator, this will raise StopIteration and we're done.
(_x, key) = it.next()
yield _x
# Check the lookahead's peek value to see if we should break now.
# We also break when we're at the end of the generator.
if it.atend or key != it.peek[1]:
break
# Yield successive instances of takechunk.
while not it.atend:
yield takechunk()
| mit | -8,346,311,089,307,318,000 | 36.554545 | 77 | 0.658678 | false |
mvpoland/django-smsgateway | smsgateway/views.py | 1 | 1834 | from django import forms
from django.http import Http404
from django.conf import settings
from django.shortcuts import render
from django.contrib.admin.views.decorators import staff_member_required
from smsgateway import send, __version__
from smsgateway.backends import get_backend
accounts = getattr(settings, 'SMSGATEWAY_ACCOUNTS', {})
class BackendDebugForm(forms.Form):
account = forms.ChoiceField(choices=[(k, k) for k in list(accounts.keys()) if k != '__default__'])
recipients = forms.CharField(help_text='Separate multiple recipients with a semicolon (;).')
message = forms.CharField(widget=forms.widgets.Textarea())
signature = forms.CharField()
@staff_member_required
def backend_debug(request):
"""
A form to let you send an SMS for debugging purposes.
"""
context = {}
if request.method == 'POST':
form = BackendDebugForm(request.POST)
if form.is_valid():
success = send(
form.cleaned_data['recipients'].split(';'),
form.cleaned_data['message'],
form.cleaned_data['signature'],
form.cleaned_data['account']
)
if success:
context.update({'message': 'Text message sent'})
else:
context.update({'message': 'Sending failed'})
else:
form = BackendDebugForm()
context.update({
'form': form,
'version': __version__,
})
return render(request, 'smsgateway/backend_debug.html', context)
def backend_handle_incoming(request, backend_name):
"""
Call the backend's handle_incoming method.
"""
if backend_name == 'debug':
return backend_debug(request)
b = get_backend(backend_name)
if b is None:
raise Http404
return b.handle_incoming(request)
| bsd-3-clause | 7,037,312,430,688,589,000 | 29.566667 | 102 | 0.632497 | false |
wcota/dynSIS-py | dynamics.py | 1 | 8344 | #!/usr/bin/env python
# ! ## File: dynamics.py
# ! ## See README.md for more information and use
# !-----------------------------------------------------------------------------
# ! SIS epidemic model algorithm based on the article
# ! Computer Physics Communications 219C (2017) pp. 303-312
# ! "Optimized Gillespie algorithms for the simulation of
# ! Markovian epidemic processes on large and heterogeneous networks"
# ! Copyright (C) 2017 Wesley Cota, Silvio C. Ferreira
# !
# ! Please cite the above cited paper (available at <http://dx.doi.org/10.1016/j.cpc.2017.06.007> )
# ! as reference to our code.
# !
# ! This program is free software: you can redistribute it and/or modify
# ! it under the terms of the GNU General Public License as published by
# ! the Free Software Foundation, either version 3 of the License, or
# ! (at your option) any later version.
# !
# ! This program is distributed in the hope that it will be useful,
# ! but WITHOUT ANY WARRANTY; without even the implied warranty of
# ! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# ! GNU General Public License for more details.
# !
# ! You should have received a copy of the GNU General Public License
# ! along with this program. If not, see <http://www.gnu.org/licenses/>.
# !-----------------------------------------------------------------------------
# ! Author : Wesley Cota
# ! Email : [email protected]
# ! Date : 27 Mar 2017
# ! Version : 1.0
# !-----------------------------------------------------------------------------
# ! See README.md for more details
# ! This code is available at <https://github.com/wcota/dynSIS-py>
# ! For performance, see <https://github.com/wcota/dynSIS> (Fortran implementation)
# ! For NetworkX library, see <https://github.com/wcota/dynSIS-networkx> (NetworkX implementation)
from network import *
from tools import *
from math import log
import sys
print( '################################################################################',
'######### Optimized Gillespie algorithms for the simulation of Markovian ######',
'####### epidemic processes on large and heterogeneous networks: SIS-OGA. #######',
'##============ Copyright (C) 2017 Wesley Cota, Silvio C. Ferreira ============##',
'##===== Paper available at <http://dx.doi.org/10.1016/j.cpc.2017.06.007> =====##',
'##======= The codes are available at <https://github.com/wcota/dynSIS> =======##',
'##======== Please cite the above cited paper as reference to our code ========##',
'##=== This code is under GNU General Public License. Please see README.md. ===##',
'################################################################################',
'',
sep='\n')
# READING PARAMETERS
if len(sys.argv) < 3:
print_error('You must enter input and output names as arguments!')
fnInput = sys.argv[1]
fnOutput = sys.argv[2]
print_info('Reading dynamical parameters...')
dynp_sam = int(input('How much dynamics samples? '))
dynp_lb = float(input('Value of infection rate lambda (mu is defined as equal to 1): '))
dynp_tmax = int(input('Maximum time steps (it stops if the absorbing state is reached): '))
dynp_pINI = float(input('Fraction of infected vertices on the network as initial condition (is random \
for each sample): '))
# / READING PARAMETERS
# LOADING NETWORK
print_info('Loading network to memory...')
netw = readEdges(fnInput)
print_info('Everything ok!')
# / LOADING NETWORK
# PREPARING THE NECESSARY THINGS
net_kmax = max(netw.k) # Used in the rejection probability
avg_rho = np.zeros(dynp_tmax, np.float64) # Average for rho at times t, averaged
avg_t = np.zeros(dynp_tmax, np.float64)
avg_sam = np.zeros(dynp_tmax, np.int) # number of samples for each time t
avg_samSurv = np.zeros(dynp_tmax, np.int) # and of survivng ones
dyn_VI = np.zeros(netw.size, np.int) # list V^I
dyn_sig = np.zeros(netw.size, np.int) # sigma
# / PREPARING THE NECESSARY THINGS
# RUNNING DYNAMICS
print_info('Running dynamics...', True)
dyn_dt_pos_max = 0 # Auxiliar
for sam in range(1,dynp_sam+1):
print_info('Sample #'+str(sam), True)
# Initial conditions
print_info('Initial condition...')
dyn_sig[:] = 0.0
dyn_VI[:] = 0.0
dyn_NI = 0 # N_I
dyn_Nk = 0 # N_k
# Sort vertices and apply the initial condition
for i in range(0, int(netw.size*dynp_pINI)):
while True:
ver = np.random.randint(0,netw.size)
if dyn_sig[ver] == 0:
dyn_VI[dyn_NI] = ver
dyn_NI += 1
dyn_sig[ver] = 1
dyn_Nk += netw.k[ver]
break
# Run dynamics
dyn_t = 0
dyn_dt = 0.0
dyn_dt_pos = 1
print_info('Running...')
while dyn_t <= dynp_tmax and dyn_NI > 0:
# SIS-OGA ALGORITHM
# Calculate the total rate
dyn_R = (dyn_NI + 1.0*dynp_lb * dyn_Nk)
# Select the time step
rnd = max(np.random.uniform(),1e-12) # Avoid u = 0
dyn_dt = -log(rnd) / dyn_R
# Update the time
dyn_t += dyn_dt
# Probability m to heal
dyn_m = 1.0*dyn_NI / dyn_R
if np.random.uniform() < dyn_m: # Select a random occupied vertex and heal.
pos_inf = np.random.randint(0,dyn_NI)
ver = dyn_VI[pos_inf]
# Then, heal it
dyn_sig[ver] = 0
dyn_Nk -= netw.k[ver]
dyn_NI -= 1
dyn_VI[pos_inf] = dyn_VI[dyn_NI]
else: # If not, try to infect: w = 1 - m
# Select the infected vertex i with prob. proportional to k_i
while True:
pos_inf = np.random.randint(0,dyn_NI)
ver = dyn_VI[pos_inf]
if np.random.uniform() < 1.0*netw.k[ver] / (1.0*net_kmax):
break
# Select one of its neighbors
pos_nei = np.random.randint(netw.ini[ver], netw.ini[ver] + netw.k[ver])
ver = netw.con[pos_nei]
if dyn_sig[ver] == 0: # if not a phantom process, infect
dyn_sig[ver] = 1
dyn_Nk += netw.k[ver]
dyn_VI[dyn_NI] = ver # Add one element to list
dyn_NI += 1 # Increase by 1 the list
# Try to save the dynamics by time unit
while (dyn_t >= dyn_dt_pos): # Save data
avg_rho[dyn_dt_pos - 1] += 1.0*dyn_NI/netw.size
avg_t[dyn_dt_pos - 1] += dyn_t
avg_sam[dyn_dt_pos - 1] += 1
if dyn_NI != 0:
avg_samSurv[dyn_dt_pos - 1] += 1
dyn_dt_pos_max = max(dyn_dt_pos,dyn_dt_pos_max) # The maximum t with non-null rho
dyn_dt_pos += 1
# if a absorbing state is reached, exit
# Write output file
flOutput = open(fnOutput, 'wt')
print( '## ***** Algorithm used: Optimized Gillespie Algorithm for SIS (SIS-OGA, Python) *****',
'#@ Network file: '+fnInput,
'#@ Number of nodes: '+str(netw.size),
'#@ Number of edges: '+str(netw.skk),
'#@ Samples: '+str(dynp_sam),
'#! Infection rate (lambda): '+str(dynp_lb),
'#! Maximum time steps: '+str(dynp_tmax),
'#! Fraction of infected vertices (initial condition): '+str(dynp_pINI),
sep='\n',
file=flOutput)
for dt_pos in range(0,dyn_dt_pos_max):
print(1.0*avg_t[dt_pos]/avg_sam[dt_pos], 1.0*avg_rho[dt_pos]/(1.0*sam),
file=flOutput)
# If you use /avg_samSurv[dt_pos] instead of /(1.0*sam) to write avg_rho (2nd column), you have
# QS analysis :)
flOutput.close()
# / RUNNING DYNAMICS
print_info('')
print_info('Everything ok!',True)
print_info('Input file (edges list): '+ fnInput)
print_info('Output file: '+ fnOutput)
print_info('')
print_info('*****Algorithm used: Optimized Gillespie Algorithm for SIS (SIS-OGA, Python)*****')
print_info('Codes available at <https://github.com/wcota/dynSIS>.')
| gpl-3.0 | 8,997,114,562,609,873,000 | 40.103448 | 104 | 0.539909 | false |
NMTHydro/Recharge | utils/TAW_optimization_subroutine/create_geo_info_file.py | 1 | 2526 | # ===============================================================================
# Copyright 2018 gabe-parrish
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= standard library imports ========================
import os
import gdal
import sys
import yaml
# ============= local library imports ===========================
def extract_geo_info(geotiff_path):
""""""
gdal.AllRegister()
# open the raster datasource
datasource_obj = gdal.Open(geotiff_path)
if datasource_obj is None:
print "Can't open the datasource from {}".format(geotiff_path)
sys.exit(1)
# get the size of image (for reading)
rows = datasource_obj.RasterYSize
cols = datasource_obj.RasterXSize
# x - cols, y - rows
dimensions = (cols, rows)
# get the projection
proj = datasource_obj.GetProjection()
# get georefference info to eventually calculate the offset:
transform = datasource_obj.GetGeoTransform()
geo_dict = {'geotransform': transform, 'dimensions': dimensions, 'projection': proj}
return geo_dict
def main(sample_file, output_path, filename):
"""
Taking a ETRM domain and saving the pertinent geo information to a text or yml file
:param sample_file: filepath to geotiff representing the ETRM model domain for the TAW optimiation
:return:
"""
geo_dict = extract_geo_info(sample_file)
# write_raster(array, geotransform, output_path, output_filename, dimensions, projection)
yml_file = os.path.join(output_path, filename)
with open(yml_file, 'w') as w_file:
yaml.dump(geo_dict, w_file)
if __name__ == "__main__":
sample_geotiff_file_path = '/Volumes/Seagate_Expansion_Drive/ETRM_espanola_aoi_inputs/statics/taw_reduced.tif'
output_path = '/Volumes/Seagate_Expansion_Drive/taw_optimization_work_folder'
main(sample_file=sample_geotiff_file_path, output_path=output_path, filename='geo_info_espanola.yml') | apache-2.0 | -6,306,104,769,306,550,000 | 33.148649 | 114 | 0.644101 | false |
gurneyalex/odoo | addons/mrp/wizard/mrp_product_produce.py | 3 | 8943 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from datetime import datetime
from odoo import api, fields, models, _
from odoo.exceptions import UserError
from odoo.tools import float_compare
class MrpProductProduce(models.TransientModel):
_name = "mrp.product.produce"
_description = "Record Production"
_inherit = ["mrp.abstract.workorder"]
@api.model
def default_get(self, fields):
res = super(MrpProductProduce, self).default_get(fields)
production = self.env['mrp.production']
production_id = self.env.context.get('default_production_id') or self.env.context.get('active_id')
if production_id:
production = self.env['mrp.production'].browse(production_id)
if production.exists():
serial_finished = (production.product_id.tracking == 'serial')
todo_uom = production.product_uom_id.id
todo_quantity = self._get_todo(production)
if serial_finished:
todo_quantity = 1.0
if production.product_uom_id.uom_type != 'reference':
todo_uom = self.env['uom.uom'].search([('category_id', '=', production.product_uom_id.category_id.id), ('uom_type', '=', 'reference')]).id
if 'production_id' in fields:
res['production_id'] = production.id
if 'product_id' in fields:
res['product_id'] = production.product_id.id
if 'product_uom_id' in fields:
res['product_uom_id'] = todo_uom
if 'serial' in fields:
res['serial'] = bool(serial_finished)
if 'qty_producing' in fields:
res['qty_producing'] = todo_quantity
if 'consumption' in fields:
res['consumption'] = production.bom_id.consumption
return res
serial = fields.Boolean('Requires Serial')
product_tracking = fields.Selection(related="product_id.tracking")
is_pending_production = fields.Boolean(compute='_compute_pending_production')
move_raw_ids = fields.One2many(related='production_id.move_raw_ids', string="PO Components")
move_finished_ids = fields.One2many(related='production_id.move_finished_ids')
raw_workorder_line_ids = fields.One2many('mrp.product.produce.line',
'raw_product_produce_id', string='Components')
finished_workorder_line_ids = fields.One2many('mrp.product.produce.line',
'finished_product_produce_id', string='By-products')
production_id = fields.Many2one('mrp.production', 'Manufacturing Order',
required=True, ondelete='cascade')
@api.depends('qty_producing')
def _compute_pending_production(self):
""" Compute if it exits remaining quantity once the quantity on the
current wizard will be processed. The purpose is to display or not
button 'continue'.
"""
for product_produce in self:
remaining_qty = product_produce._get_todo(product_produce.production_id)
product_produce.is_pending_production = remaining_qty - product_produce.qty_producing > 0.0
def continue_production(self):
""" Save current wizard and directly opens a new. """
self.ensure_one()
self._record_production()
action = self.production_id.open_produce_product()
action['context'] = {'default_production_id': self.production_id.id}
return action
def action_generate_serial(self):
self.ensure_one()
product_produce_wiz = self.env.ref('mrp.view_mrp_product_produce_wizard', False)
self.finished_lot_id = self.env['stock.production.lot'].create({
'product_id': self.product_id.id,
'company_id': self.production_id.company_id.id
})
return {
'name': _('Produce'),
'type': 'ir.actions.act_window',
'view_mode': 'form',
'res_model': 'mrp.product.produce',
'res_id': self.id,
'view_id': product_produce_wiz.id,
'target': 'new',
}
def do_produce(self):
""" Save the current wizard and go back to the MO. """
self.ensure_one()
self._record_production()
self._check_company()
return {'type': 'ir.actions.act_window_close'}
def _get_todo(self, production):
""" This method will return remaining todo quantity of production. """
main_product_moves = production.move_finished_ids.filtered(lambda x: x.product_id.id == production.product_id.id)
todo_quantity = production.product_qty - sum(main_product_moves.mapped('quantity_done'))
todo_quantity = todo_quantity if (todo_quantity > 0) else 0
return todo_quantity
def _record_production(self):
# Check all the product_produce line have a move id (the user can add product
# to consume directly in the wizard)
for line in self._workorder_line_ids():
if not line.move_id:
# Find move_id that would match
if line.raw_product_produce_id:
moves = line.raw_product_produce_id.move_raw_ids
else:
moves = line.finished_product_produce_id.move_finished_ids
move_id = moves.filtered(lambda m: m.product_id == line.product_id and m.state not in ('done', 'cancel'))
if not move_id:
# create a move to assign it to the line
production = line._get_production()
if line.raw_product_produce_id:
values = {
'name': production.name,
'reference': production.name,
'product_id': line.product_id.id,
'product_uom': line.product_uom_id.id,
'location_id': production.location_src_id.id,
'location_dest_id': self.product_id.property_stock_production.id,
'raw_material_production_id': production.id,
'group_id': production.procurement_group_id.id,
'origin': production.name,
'state': 'confirmed',
'company_id': production.company_id.id,
}
else:
values = production._get_finished_move_value(line.product_id.id, 0, line.product_uom_id.id)
move_id = self.env['stock.move'].create(values)
line.move_id = move_id.id
# because of an ORM limitation (fields on transient models are not
# recomputed by updates in non-transient models), the related fields on
# this model are not recomputed by the creations above
self.invalidate_cache(['move_raw_ids', 'move_finished_ids'])
# Save product produce lines data into stock moves/move lines
for wizard in self:
quantity = wizard.qty_producing
if float_compare(quantity, 0, precision_rounding=self.product_uom_id.rounding) <= 0:
raise UserError(_("The production order for '%s' has no quantity specified.") % self.product_id.display_name)
self._update_finished_move()
self._update_moves()
self.production_id.filtered(lambda mo: mo.state == 'confirmed').write({
'date_start': datetime.now(),
})
class MrpProductProduceLine(models.TransientModel):
_name = 'mrp.product.produce.line'
_inherit = ["mrp.abstract.workorder.line"]
_description = "Record production line"
raw_product_produce_id = fields.Many2one('mrp.product.produce', 'Component in Produce wizard')
finished_product_produce_id = fields.Many2one('mrp.product.produce', 'Finished Product in Produce wizard')
@api.model
def _get_raw_workorder_inverse_name(self):
return 'raw_product_produce_id'
@api.model
def _get_finished_workoder_inverse_name(self):
return 'finished_product_produce_id'
def _get_final_lots(self):
product_produce_id = self.raw_product_produce_id or self.finished_product_produce_id
return product_produce_id.finished_lot_id | product_produce_id.finished_workorder_line_ids.mapped('lot_id')
def _get_production(self):
product_produce_id = self.raw_product_produce_id or self.finished_product_produce_id
return product_produce_id.production_id
@api.onchange('lot_id')
def _onchange_lot_id(self):
""" When the user is encoding a produce line for a tracked product, we apply some logic to
help him. This onchange will automatically switch `qty_done` to 1.0.
"""
if self.product_id.tracking == 'serial':
if self.lot_id:
self.qty_done = 1
else:
self.qty_done = 0
| agpl-3.0 | -4,611,204,496,198,219,300 | 45.82199 | 158 | 0.603377 | false |
makinacorpus/reportlab-ecomobile | src/reportlab/graphics/charts/doughnut.py | 1 | 13260 | #Copyright ReportLab Europe Ltd. 2000-2004
#see license.txt for license details
#history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/graphics/charts/doughnut.py
# doughnut chart
__version__=''' $Id$ '''
__doc__="""Doughnut chart
Produces a circular chart like the doughnut charts produced by Excel.
Can handle multiple series (which produce concentric 'rings' in the chart).
"""
import copy
from math import sin, cos, pi
from types import ListType, TupleType
from reportlab.lib import colors
from reportlab.lib.validators import isColor, isNumber, isListOfNumbersOrNone,\
isListOfNumbers, isColorOrNone, isString,\
isListOfStringsOrNone, OneOf, SequenceOf,\
isBoolean, isListOfColors,\
isNoneOrListOfNoneOrStrings,\
isNoneOrListOfNoneOrNumbers,\
isNumberOrNone
from reportlab.lib.attrmap import *
from reportlab.pdfgen.canvas import Canvas
from reportlab.graphics.shapes import Group, Drawing, Line, Rect, Polygon, Ellipse, \
Wedge, String, SolidShape, UserNode, STATE_DEFAULTS
from reportlab.graphics.widgetbase import Widget, TypedPropertyCollection, PropHolder
from reportlab.graphics.charts.piecharts import AbstractPieChart, WedgeProperties, _addWedgeLabel
from reportlab.graphics.charts.textlabels import Label
from reportlab.graphics.widgets.markers import Marker
class SectorProperties(WedgeProperties):
"""This holds descriptive information about the sectors in a doughnut chart.
It is not to be confused with the 'sector itself'; this just holds
a recipe for how to format one, and does not allow you to hack the
angles. It can format a genuine Sector object for you with its
format method.
"""
_attrMap = AttrMap(BASE=WedgeProperties,
)
class Doughnut(AbstractPieChart):
_attrMap = AttrMap(
x = AttrMapValue(isNumber, desc='X position of the chart within its container.'),
y = AttrMapValue(isNumber, desc='Y position of the chart within its container.'),
width = AttrMapValue(isNumber, desc='width of doughnut bounding box. Need not be same as width.'),
height = AttrMapValue(isNumber, desc='height of doughnut bounding box. Need not be same as height.'),
data = AttrMapValue(None, desc='list of numbers defining sector sizes; need not sum to 1'),
labels = AttrMapValue(isListOfStringsOrNone, desc="optional list of labels to use for each data point"),
startAngle = AttrMapValue(isNumber, desc="angle of first slice; like the compass, 0 is due North"),
direction = AttrMapValue(OneOf('clockwise', 'anticlockwise'), desc="'clockwise' or 'anticlockwise'"),
slices = AttrMapValue(None, desc="collection of sector descriptor objects"),
simpleLabels = AttrMapValue(isBoolean, desc="If true(default) use String not super duper WedgeLabel"),
)
def __init__(self):
self.x = 0
self.y = 0
self.width = 100
self.height = 100
self.data = [1,1]
self.labels = None # or list of strings
self.startAngle = 90
self.direction = "clockwise"
self.simpleLabels = 1
self.slices = TypedPropertyCollection(SectorProperties)
self.slices[0].fillColor = colors.darkcyan
self.slices[1].fillColor = colors.blueviolet
self.slices[2].fillColor = colors.blue
self.slices[3].fillColor = colors.cyan
def demo(self):
d = Drawing(200, 100)
dn = Doughnut()
dn.x = 50
dn.y = 10
dn.width = 100
dn.height = 80
dn.data = [10,20,30,40,50,60]
dn.labels = ['a','b','c','d','e','f']
dn.slices.strokeWidth=0.5
dn.slices[3].popout = 10
dn.slices[3].strokeWidth = 2
dn.slices[3].strokeDashArray = [2,2]
dn.slices[3].labelRadius = 1.75
dn.slices[3].fontColor = colors.red
dn.slices[0].fillColor = colors.darkcyan
dn.slices[1].fillColor = colors.blueviolet
dn.slices[2].fillColor = colors.blue
dn.slices[3].fillColor = colors.cyan
dn.slices[4].fillColor = colors.aquamarine
dn.slices[5].fillColor = colors.cadetblue
dn.slices[6].fillColor = colors.lightcoral
d.add(dn)
return d
def normalizeData(self, data=None):
from operator import add
sum = float(reduce(add,data,0))
return abs(sum)>=1e-8 and map(lambda x,f=360./sum: f*x, data) or len(data)*[0]
def makeSectors(self):
# normalize slice data
if type(self.data) in (ListType, TupleType) and type(self.data[0]) in (ListType, TupleType):
#it's a nested list, more than one sequence
normData = []
n = []
for l in self.data:
t = self.normalizeData(l)
normData.append(t)
n.append(len(t))
self._seriesCount = max(n)
else:
normData = self.normalizeData(self.data)
n = len(normData)
self._seriesCount = n
#labels
if self.labels is None:
labels = []
if type(n) not in (ListType,TupleType):
labels = [''] * n
else:
for m in n:
labels = list(labels) + [''] * m
else:
labels = self.labels
#there's no point in raising errors for less than enough labels if
#we silently create all for the extreme case of no labels.
if type(n) not in (ListType,TupleType):
i = n-len(labels)
if i>0:
labels = list(labels) + [''] * i
else:
tlab = 0
for m in n:
tlab += m
i = tlab-len(labels)
if i>0:
labels = list(labels) + [''] * i
xradius = self.width/2.0
yradius = self.height/2.0
centerx = self.x + xradius
centery = self.y + yradius
if self.direction == "anticlockwise":
whichWay = 1
else:
whichWay = -1
g = Group()
sn = 0
startAngle = self.startAngle #% 360
styleCount = len(self.slices)
if type(self.data[0]) in (ListType, TupleType):
#multi-series doughnut
iradius = (self.height/5.0)/len(self.data)
for series in normData:
i = 0
for angle in series:
endAngle = (startAngle + (angle * whichWay)) #% 360
if abs(startAngle-endAngle)>=1e-5:
if startAngle < endAngle:
a1 = startAngle
a2 = endAngle
else:
a1 = endAngle
a2 = startAngle
#if we didn't use %stylecount here we'd end up with the later sectors
#all having the default style
sectorStyle = self.slices[i%styleCount]
# is it a popout?
cx, cy = centerx, centery
if sectorStyle.popout != 0:
# pop out the sector
averageAngle = (a1+a2)/2.0
aveAngleRadians = averageAngle * pi/180.0
popdistance = sectorStyle.popout
cx = centerx + popdistance * cos(aveAngleRadians)
cy = centery + popdistance * sin(aveAngleRadians)
if type(n) in (ListType,TupleType):
theSector = Wedge(cx, cy, xradius+(sn*iradius)-iradius, a1, a2, yradius=yradius+(sn*iradius)-iradius, radius1=yradius+(sn*iradius)-(2*iradius))
else:
theSector = Wedge(cx, cy, xradius, a1, a2, yradius=yradius, radius1=iradius)
theSector.fillColor = sectorStyle.fillColor
theSector.strokeColor = sectorStyle.strokeColor
theSector.strokeWidth = sectorStyle.strokeWidth
theSector.strokeDashArray = sectorStyle.strokeDashArray
g.add(theSector)
startAngle = endAngle
text = self.getSeriesName(i,'')
if text:
averageAngle = (a1+a2)/2.0
aveAngleRadians = averageAngle*pi/180.0
labelRadius = sectorStyle.labelRadius
labelX = centerx + (0.5 * self.width * cos(aveAngleRadians) * labelRadius)
labelY = centery + (0.5 * self.height * sin(aveAngleRadians) * labelRadius)
g.add(_addWedgeLabel(self,text,averageAngle,labelX,labelY,sectorStyle))
i += 1
sn += 1
else:
i = 0
#single series doughnut
iradius = self.height/5.0
for angle in normData:
endAngle = (startAngle + (angle * whichWay)) #% 360
if abs(startAngle-endAngle)>=1e-5:
if startAngle < endAngle:
a1 = startAngle
a2 = endAngle
else:
a1 = endAngle
a2 = startAngle
#if we didn't use %stylecount here we'd end up with the later sectors
#all having the default style
sectorStyle = self.slices[i%styleCount]
# is it a popout?
cx, cy = centerx, centery
if sectorStyle.popout != 0:
# pop out the sector
averageAngle = (a1+a2)/2.0
aveAngleRadians = averageAngle * pi/180.0
popdistance = sectorStyle.popout
cx = centerx + popdistance * cos(aveAngleRadians)
cy = centery + popdistance * sin(aveAngleRadians)
if n > 1:
theSector = Wedge(cx, cy, xradius, a1, a2, yradius=yradius, radius1=iradius)
elif n==1:
theSector = Wedge(cx, cy, xradius, a1, a2, yradius=yradius, iradius=iradius)
theSector.fillColor = sectorStyle.fillColor
theSector.strokeColor = sectorStyle.strokeColor
theSector.strokeWidth = sectorStyle.strokeWidth
theSector.strokeDashArray = sectorStyle.strokeDashArray
g.add(theSector)
# now draw a label
if labels[i] != "":
averageAngle = (a1+a2)/2.0
aveAngleRadians = averageAngle*pi/180.0
labelRadius = sectorStyle.labelRadius
labelX = centerx + (0.5 * self.width * cos(aveAngleRadians) * labelRadius)
labelY = centery + (0.5 * self.height * sin(aveAngleRadians) * labelRadius)
theLabel = String(labelX, labelY, labels[i])
theLabel.textAnchor = "middle"
theLabel.fontSize = sectorStyle.fontSize
theLabel.fontName = sectorStyle.fontName
theLabel.fillColor = sectorStyle.fontColor
g.add(theLabel)
startAngle = endAngle
i += 1
return g
def draw(self):
g = Group()
g.add(self.makeSectors())
return g
def sample1():
"Make up something from the individual Sectors"
d = Drawing(400, 400)
g = Group()
s1 = Wedge(centerx=200, centery=200, radius=150, startangledegrees=0, endangledegrees=120, radius1=100)
s1.fillColor=colors.red
s1.strokeColor=None
d.add(s1)
s2 = Wedge(centerx=200, centery=200, radius=150, startangledegrees=120, endangledegrees=240, radius1=100)
s2.fillColor=colors.green
s2.strokeColor=None
d.add(s2)
s3 = Wedge(centerx=200, centery=200, radius=150, startangledegrees=240, endangledegrees=260, radius1=100)
s3.fillColor=colors.blue
s3.strokeColor=None
d.add(s3)
s4 = Wedge(centerx=200, centery=200, radius=150, startangledegrees=260, endangledegrees=360, radius1=100)
s4.fillColor=colors.gray
s4.strokeColor=None
d.add(s4)
return d
def sample2():
"Make a simple demo"
d = Drawing(400, 400)
dn = Doughnut()
dn.x = 50
dn.y = 50
dn.width = 300
dn.height = 300
dn.data = [10,20,30,40,50,60]
d.add(dn)
return d
def sample3():
"Make a more complex demo"
d = Drawing(400, 400)
dn = Doughnut()
dn.x = 50
dn.y = 50
dn.width = 300
dn.height = 300
dn.data = [[10,20,30,40,50,60], [10,20,30,40]]
dn.labels = ['a','b','c','d','e','f']
d.add(dn)
return d
if __name__=='__main__':
from reportlab.graphics.renderPDF import drawToFile
d = sample1()
drawToFile(d, 'doughnut1.pdf')
d = sample2()
drawToFile(d, 'doughnut2.pdf')
d = sample3()
drawToFile(d, 'doughnut3.pdf')
| bsd-3-clause | 7,873,950,994,067,001,000 | 36.994269 | 167 | 0.556259 | false |
zentralopensource/zentral | zentral/utils/rison.py | 1 | 2938 | # from https://github.com/pifantastic/python-rison
# encode a json payload in rison
# used in kibana urls
import re
IDCHAR_PUNCTUATION = '_-./~'
NOT_IDCHAR = ''.join([c for c in (chr(i) for i in range(127))
if not (c.isalnum() or c in IDCHAR_PUNCTUATION)])
# Additionally, we need to distinguish ids and numbers by first char.
NOT_IDSTART = '-0123456789'
# Regexp string matching a valid id.
IDRX = ('[^' + NOT_IDSTART + NOT_IDCHAR + '][^' + NOT_IDCHAR + ']*')
# Regexp to check for valid rison ids.
ID_OK_RE = re.compile('^' + IDRX + '$', re.M)
class Encoder(object):
def __init__(self):
pass
@staticmethod
def encoder(v):
if isinstance(v, list):
return Encoder.list
elif isinstance(v, str):
return Encoder.string
elif isinstance(v, bool):
return Encoder.bool
elif isinstance(v, (float, int)):
return Encoder.number
elif isinstance(v, type(None)):
return Encoder.none
elif isinstance(v, dict):
return Encoder.dict
else:
raise AssertionError('Unable to encode type: {0}'.format(type(v)))
@staticmethod
def encode(v):
encoder = Encoder.encoder(v)
return encoder(v)
@staticmethod
def list(x):
a = ['!(']
b = None
for i in range(len(x)):
v = x[i]
f = Encoder.encoder(v)
if f:
v = f(v)
if isinstance(v, str):
if b:
a.append(',')
a.append(v)
b = True
a.append(')')
return ''.join(a)
@staticmethod
def number(v):
return str(v).replace('+', '')
@staticmethod
def none(_):
return '!n'
@staticmethod
def bool(v):
return '!t' if v else '!f'
@staticmethod
def string(v):
if v == '':
return "''"
if ID_OK_RE.match(v):
return v
def replace(match):
if match.group(0) in ["'", '!']:
return '!' + match.group(0)
return match.group(0)
v = re.sub(r'([\'!])', replace, v)
return "'" + v + "'"
@staticmethod
def dict(x):
a = ['(']
b = None
ks = sorted(x.keys())
for i in ks:
v = x[i]
f = Encoder.encoder(v)
if f:
v = f(v)
if isinstance(v, str):
if b:
a.append(',')
a.append(Encoder.string(i))
a.append(':')
a.append(v)
b = True
a.append(')')
return ''.join(a)
def dumps(o):
if not isinstance(o, (dict, list)) or o is None:
raise TypeError('object must be a dict a list or None')
return Encoder.encode(o)
| apache-2.0 | 1,428,220,077,565,753,000 | 23.483333 | 78 | 0.469367 | false |
quodlibetor/dedupe | setup.py | 1 | 1174 | from __future__ import with_statement
import distribute_setup
distribute_setup.use_setuptools()
from setuptools import setup
import os
with open(os.path.join(os.path.dirname(__file__),"README.rst"), 'r') as fh:
long_desc = fh.read()
VERSION = "0.1.2"
setup(name="dedupe",
version=VERSION,
description="A thing to detect duplicate music",
long_description=long_desc,
author="Brandon W Maister",
author_email="[email protected]",
url="http://bitbucket.org/quodlibetor/dedupe",
py_modules=['dedupe', 'distribute_setup', 'setup'],
entry_points= {'console_scripts': [
'dedupe = dedupe:main'
]},
install_requires=['mutagen', 'argparse'],
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.5", # min
"Operating System :: OS Independent", # I think?
"Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: End Users/Desktop",
"License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3)",
]
)
| gpl-3.0 | 8,757,693,658,711,555,000 | 31.611111 | 83 | 0.61414 | false |
RITct/Rita | app/secret_sauce/seqtoseq_model.py | 1 | 11243 | import random
import torch
import torch.nn as nn
from torch.autograd import Variable
from torch import optim
import torch.nn.functional as F
import pickle as pk
use_cuda = torch.cuda.is_available()
SOS_token = 0
EOS_token = 1
MAX_LENGTH = 80
class Dataset:
def __init__(self, name):
self.name = name
self.word2index = {}
self.word2count = {}
self.index2word = {0: "SOS", 1: "EOS"}
self.n_words = 2 # Count SOS and EOS
def addSentence(self, sentence):
for word in sentence.split(' '):
self.addWord(word)
def addWord(self, word):
if word not in self.word2index:
self.word2index[word] = self.n_words
self.word2count[word] = 1
self.index2word[self.n_words] = word
self.n_words += 1
else:
self.word2count[word] += 1
def dataclean(training_data):
input_data = Dataset('input')
output_data = Dataset('ouput')
for pair in training_data:
input_data.addSentence(pair[0])
output_data.addSentence(pair[1])
return input_data, output_data, input_data.n_words, output_data.n_words
class EncoderRNN(nn.Module):
def __init__(self, input_size, hidden_size, n_layers=1):
super(EncoderRNN, self).__init__()
self.n_layers = n_layers
self.hidden_size = hidden_size
if use_cuda:
self.embedding = nn.Embedding(input_size, hidden_size).cuda()
self.gru = nn.GRU(hidden_size, hidden_size).cuda()
else:
self.embedding = nn.Embedding(input_size, hidden_size)
self.gru = nn.GRU(hidden_size, hidden_size)
def forward(self, input, hidden):
embedded = self.embedding(input).view(1, 1, -1)
output = embedded
for i in range(self.n_layers):
output, hidden = self.gru(output, hidden)
output = output.cuda() if use_cuda else output
hidden = hidden.cuda() if use_cuda else output
return output, hidden
def initHidden(self):
result = Variable(torch.zeros(1, 1, self.hidden_size))
if use_cuda:
return result.cuda()
return result
class AttnDecoderRNN(nn.Module):
def __init__(self, hidden_size, output_size, n_layers=1, dropout_p=0.1, max_length=MAX_LENGTH):
super(AttnDecoderRNN, self).__init__()
self.hidden_size = hidden_size
self.output_size = output_size
self.n_layers = n_layers
self.dropout_p = dropout_p
self.max_length = max_length
if use_cuda:
self.embedding = nn.Embedding(self.output_size, self.hidden_size).cuda()
self.attn = nn.Linear(self.hidden_size * 2, self.max_length).cuda()
self.attn_combine = nn.Linear(self.hidden_size * 2, self.hidden_size).cuda()
self.dropout = nn.Dropout(self.dropout_p).cuda()
self.gru = nn.GRU(self.hidden_size, self.hidden_size).cuda()
self.out = nn.Linear(self.hidden_size, self.output_size).cuda()
else:
self.embedding = nn.Embedding(self.output_size, self.hidden_size)
self.attn = nn.Linear(self.hidden_size * 2, self.max_length)
self.attn_combine = nn.Linear(self.hidden_size * 2, self.hidden_size)
self.dropout = nn.Dropout(self.dropout_p)
self.gru = nn.GRU(self.hidden_size, self.hidden_size)
self.out = nn.Linear(self.hidden_size, self.output_size)
def forward(self, input, hidden, encoder_output, encoder_outputs):
embedded = self.embedding(input).view(1, 1, -1)
embedded = self.dropout(embedded)
attn_weights = F.softmax(
self.attn(torch.cat((embedded[0], hidden[0]), 1)))
attn_weights = attn_weights.cuda() if use_cuda else attn_weights
attn_applied = torch.bmm(attn_weights.unsqueeze(0),
encoder_outputs.unsqueeze(0))
attn_applied = attn_applied.cuda() if use_cuda else attn_applied
output = torch.cat((embedded[0], attn_applied[0]), 1)
output = output.cuda() if use_cuda else output
output = self.attn_combine(output).unsqueeze(0)
for i in range(self.n_layers):
output = F.relu(output)
output = output.cuda() if use_cuda else output
output, hidden = self.gru(output, hidden)
output = F.log_softmax(self.out(output[0]))
output = output.cuda() if use_cuda else output
return output, hidden, attn_weights
def initHidden(self):
result = Variable(torch.zeros(1, 1, self.hidden_size))
if use_cuda:
return result.cuda()
return result
def indexesFromSentence(lang, sentence):
out = []
for word in sentence.split(' '):
if word not in lang.word2index:
continue
k = lang.word2index[word]
out.append(k)
return out
def variableFromSentence(lang, sentence):
indexes = indexesFromSentence(lang, sentence)
indexes.append(EOS_token)
result = Variable(torch.LongTensor(indexes).view(-1, 1))
if use_cuda:
return result.cuda()
return result
def variablesFromPair(pair, input_lang, output_lang):
input_variable = variableFromSentence(input_lang, pair[0])
target_variable = variableFromSentence(output_lang, pair[1])
return (input_variable, target_variable)
teacher_forcing_ratio = 0.5
def train(input_variable, target_variable, encoder, decoder, encoder_optimizer, decoder_optimizer, criterion, max_length=MAX_LENGTH):
encoder_hidden = encoder.initHidden()
encoder_optimizer.zero_grad()
decoder_optimizer.zero_grad()
input_length = input_variable.size()[0]
target_length = target_variable.size()[0]
encoder_outputs = Variable(torch.zeros(max_length, encoder.hidden_size))
encoder_outputs = encoder_outputs.cuda() if use_cuda else encoder_outputs
loss = 0
for ei in range(input_length):
encoder_output, encoder_hidden = encoder(
input_variable[ei], encoder_hidden)
encoder_outputs[ei] = encoder_output[0][0]
decoder_input = Variable(torch.LongTensor([[SOS_token]]))
decoder_input = decoder_input.cuda() if use_cuda else decoder_input
decoder_hidden = encoder_hidden
use_teacher_forcing = True if random.random() < teacher_forcing_ratio else False
if use_teacher_forcing:
# Teacher forcing: Feed the target as the next input
for di in range(target_length):
decoder_output, decoder_hidden, decoder_attention = decoder(
decoder_input, decoder_hidden, encoder_output, encoder_outputs)
loss += criterion(decoder_output[0], target_variable[di])
decoder_input = target_variable[di] # Teacher forcing
else:
# Without teacher forcing: use its own predictions as the next input
for di in range(target_length):
decoder_output, decoder_hidden, decoder_attention = decoder(
decoder_input, decoder_hidden, encoder_output, encoder_outputs)
topv, topi = decoder_output.data.topk(1)
ni = topi[0][0]
decoder_input = Variable(torch.LongTensor([[ni]]))
loss += criterion(decoder_output[0], target_variable[di])
if ni == EOS_token:
break
loss.backward()
encoder_optimizer.step()
decoder_optimizer.step()
return loss.data[0] / target_length
def seqtoseq_train(n_iters, training_data,print_every=1000, learning_rate=0.01, tfl=False):
print_loss_total = 0
hidden_size = 256
in_lang, out_lang, inwords, outwords = dataclean(training_data)
metadata = open('app/brain/seqtoseq_meta.pkl', 'wb')
pk.dump([in_lang, out_lang], metadata)
if tfl == False:
encoder = EncoderRNN(inwords, hidden_size)
decoder = AttnDecoderRNN(hidden_size, outwords, dropout_p=0.1)
else:
encoder = torch.load('app/brain/encoder.pt')
decoder = torch.load('app/brain/decoder.pt')
if use_cuda:
encoder = encoder.cuda()
decoder = decoder.cuda()
encoder_optimizer = optim.SGD(encoder.parameters(), lr=learning_rate)
decoder_optimizer = optim.SGD(decoder.parameters(), lr=learning_rate)
training_data = [variablesFromPair(random.choice(training_data),in_lang,out_lang)
for i in range(n_iters)]
criterion = nn.NLLLoss()
if use_cuda:
criterion = criterion.cuda()
for iter in range(1, n_iters + 1):
training_pair = training_data[iter - 1]
input_variable = training_pair[0]
target_variable = training_pair[1]
loss = train(input_variable, target_variable, encoder,
decoder, encoder_optimizer, decoder_optimizer, criterion)
print_loss_total += loss
accuracy = 100-(loss*100)
if accuracy < 0:
accuracy = 0
if iter%1000 == 0:
print(accuracy,"%")
torch.save(encoder, 'app/brain/encoder.pt')
torch.save(decoder, 'app/brain/decoder.pt')
def evaluate(encoder, decoder, input_lang, output_lang, sentence, max_length=MAX_LENGTH):
input_variable = variableFromSentence(input_lang, sentence)
input_length = input_variable.size()[0]
encoder_hidden = encoder.initHidden()
encoder_outputs = Variable(torch.zeros(max_length, encoder.hidden_size))
encoder_outputs = encoder_outputs.cuda() if use_cuda else encoder_outputs
for ei in range(input_length):
encoder_output, encoder_hidden = encoder(input_variable[ei],
encoder_hidden)
encoder_outputs[ei] = encoder_outputs[ei] + encoder_output[0][0]
decoder_input = Variable(torch.LongTensor([[SOS_token]])) # SOS
decoder_input = decoder_input.cuda() if use_cuda else decoder_input
decoder_hidden = encoder_hidden
decoded_words = []
decoder_attentions = torch.zeros(max_length, max_length)
for di in range(max_length):
decoder_output, decoder_hidden, decoder_attention = decoder(
decoder_input, decoder_hidden, encoder_output, encoder_outputs)
decoder_attentions[di] = decoder_attention.data
topv, topi = decoder_output.data.topk(1)
ni = topi[0][0]
if ni == EOS_token:
decoded_words.append('<EOS>')
break
else:
decoded_words.append(output_lang.index2word[ni])
decoder_input = Variable(torch.LongTensor([[ni]]))
decoder_input = decoder_input.cuda() if use_cuda else decoder_input
return decoded_words, decoder_attentions[:di + 1]
def reply_predict(sentence):
try:
encoder = torch.load('app/brain/encoder.pt')
decoder = torch.load('app/brain/decoder.pt')
with open('app/brain/seqtoseq_meta.pkl','rb') as pickle_file:
meta = pk.load(pickle_file)
input_lang = meta[0]
output_lang = meta[1]
output_words, attentions = evaluate(encoder, decoder, input_lang, output_lang, sentence)
output_sentence = ' '.join(output_words)
return output_sentence.split("<EOS>")[0]
except(KeyError):
return random.choice(["sorry i didnt get that","no idea", "i may be malfunctioning", "sorry this is a prototype"])
| bsd-3-clause | -6,247,866,893,398,659,000 | 35.385113 | 133 | 0.631771 | false |
nonZero/demos-python | src/examples/short/object_oriented/static_method_6.py | 1 | 1045 | #!/usr/bin/python2
'''
An example for using class methods to keep per class properties.
Once set, subclass properties shadows properties on the base class.
'''
from __future__ import print_function
class Book(object):
num = 0
def __init__(self, title):
self.title = title
self.id = self.increment_num()
print('Created:', self)
@classmethod
def increment_num(cls):
cls.num += 1
return cls.num
def __str__(self):
return '<{} #{}: {}>'.format(self.__class__.__name__, self.id, self.title)
b1 = Book('Guinness Book of Records')
b2 = Book('The Bible')
print('Book.num:', Book.num)
print('b1.num:', b1.num)
print()
class FictionBook(Book):
num = 0 # Removing me voids warranty
print('Book.num:', Book.num)
print('FictionBook.num:', FictionBook.num)
print()
b3 = FictionBook('Sherlock Holmes')
b4 = FictionBook('Danny Din')
b5 = FictionBook('Kofiko')
print()
print('Book.num:', Book.num)
print('FictionBook.num:', FictionBook.num)
print()
b6 = Book('Britannica')
| gpl-3.0 | -7,061,124,976,053,368,000 | 19.096154 | 82 | 0.636364 | false |
YtvwlD/yarfi | etc/yarfi/services/console_setup.py | 1 | 1300 | # YARFI - Yet Another Replacement For Init
# Copyright (C) 2014 Niklas Sombert
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from subprocess import Popen
from yarfi.ServicesAndTargets import Service as Srv
class Service(Srv):
def __init__(self):
self.description = "set the console font and keyboard layout"
self.depends = ["system", "udev"]
self.conflicts = []
self.respawn = True
self.status_ = ""
self.process = None
def start(self):
self.process = Popen(["/bin/setupcon"]) #use --force? (and --save?)
def status(self):
if self.status_ == "stopped":
return ("stopped")
if self.process:
if self.process.poll() is not None:
self.status_ = "running"
return ("running") | gpl-3.0 | -6,666,950,791,183,914,000 | 32.358974 | 71 | 0.717692 | false |
cydenix/OpenGLCffi | OpenGLCffi/EGL/EXT/EXT/output_base.py | 1 | 1086 | from OpenGLCffi.EGL import params
@params(api='egl', prms=['dpy', 'attrib_list', 'layers', 'max_layers', 'num_layers'])
def eglGetOutputLayersEXT(dpy, attrib_list, layers, max_layers, num_layers):
pass
@params(api='egl', prms=['dpy', 'attrib_list', 'ports', 'max_ports', 'num_ports'])
def eglGetOutputPortsEXT(dpy, attrib_list, ports, max_ports, num_ports):
pass
@params(api='egl', prms=['dpy', 'layer', 'attribute', 'value'])
def eglOutputLayerAttribEXT(dpy, layer, attribute):
pass
@params(api='egl', prms=['dpy', 'layer', 'attribute', 'value'])
def eglQueryOutputLayerAttribEXT(dpy, layer, attribute):
pass
@params(api='egl', prms=['dpy', 'layer', 'name'])
def eglQueryOutputLayerStringEXT(dpy, layer, name):
pass
@params(api='egl', prms=['dpy', 'port', 'attribute', 'value'])
def eglOutputPortAttribEXT(dpy, port, attribute):
pass
@params(api='egl', prms=['dpy', 'port', 'attribute', 'value'])
def eglQueryOutputPortAttribEXT(dpy, port, attribute):
pass
@params(api='egl', prms=['dpy', 'port', 'name'])
def eglQueryOutputPortStringEXT(dpy, port, name):
pass
| mit | 6,723,218,600,509,176,000 | 25.487805 | 85 | 0.689687 | false |
Edraak/circleci-edx-platform | common/djangoapps/edraak_i18n/management/commands/i18n_edraak_theme_push.py | 1 | 2382 | # * Handling merge/forks of UserProfile.meta
from django.core.management.base import BaseCommand
from django.conf import settings
import os
from subprocess import call
import polib
class Command(BaseCommand):
help = '''Run theme's ./scripts/edraak_i18n_theme_push.sh'''
@staticmethod
def remove_ignored_messages(theme_root):
theme_pofile = theme_root / 'conf/locale/en/LC_MESSAGES/edraak-platform-2015-theme.po'
theme_po = polib.pofile(theme_pofile)
# `reversed()` is used to allow removing from the bottom
# instead of changing the index and introducing bugs
for entry in reversed(theme_po):
if 'edraak-ignore' in entry.comment.lower():
theme_po.remove(entry)
print 'Removed ignored translation: ', entry.msgid, '=>', entry.msgstr
theme_po.save()
@staticmethod
def generate_pofile(theme_root):
mako_pofile_relative = 'conf/locale/en/LC_MESSAGES/mako.po'
mako_pofile = theme_root / mako_pofile_relative
if not mako_pofile.dirname().exists():
os.makedirs(mako_pofile.dirname())
open(mako_pofile, 'w').close() # Make sure the file exists and empty
call([
'pybabel',
'-q', 'extract',
'--mapping=conf/locale/babel_mako.cfg',
'--add-comments', 'Translators:',
'--keyword', 'interpolate',
'.',
'--output={}'.format(mako_pofile_relative),
], cwd=theme_root)
call(['i18n_tool', 'segment', '--config', 'conf/locale/config.yaml', 'en'], cwd=theme_root)
if mako_pofile.exists():
mako_pofile.unlink()
@staticmethod
def transifex_push(theme_root):
call(['tx', 'push', '-l', 'en', '-s', '-r', 'edraak.edraak-platform-2015-theme'], cwd=theme_root)
def handle(self, *args, **options):
if settings.FEATURES.get('USE_CUSTOM_THEME', False) and settings.THEME_NAME:
theme_root = settings.ENV_ROOT / "themes" / settings.THEME_NAME
self.generate_pofile(theme_root)
self.remove_ignored_messages(theme_root)
self.transifex_push(theme_root)
else:
print "Error: theme files not found."
print "Are you sure the config is correct? Press <Enter> to continue without theme i18n..."
raw_input()
| agpl-3.0 | -8,983,956,020,593,395,000 | 36.21875 | 105 | 0.607473 | false |
richard-willowit/odoo | addons/stock/__manifest__.py | 2 | 2878 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Inventory Management',
'version': '1.1',
'summary': 'Inventory, Logistics, Warehousing',
'description': "",
'website': 'https://www.odoo.com/page/warehouse',
'depends': ['product', 'barcodes', 'web_planner'],
'category': 'Warehouse',
'sequence': 13,
'demo': [
'data/stock_demo_pre.yml',
'data/procurement_demo.xml',
'data/stock_demo.xml',
'data/stock_orderpoint_demo.xml',
'data/stock_orderpoint_demo.yml',
'data/stock_demo.yml',
'data/stock_location_demo_cpu1.xml',
'data/stock_location_demo_cpu3.yml',
'data/stock_quant_demo.xml',
],
'data': [
'security/stock_security.xml',
'security/ir.model.access.csv',
'views/stock_menu_views.xml',
'data/stock_traceability_report_data.xml',
'data/procurement_data.xml',
'report/report_stock_forecast.xml',
'report/stock_report_views.xml',
'report/report_package_barcode.xml',
'report/report_lot_barcode.xml',
'report/report_location_barcode.xml',
'report/report_stockpicking_operations.xml',
'report/report_deliveryslip.xml',
'report/report_stockinventory.xml',
'wizard/stock_change_product_qty_views.xml',
'wizard/stock_picking_return_views.xml',
'wizard/stock_scheduler_compute_views.xml',
'wizard/stock_immediate_transfer_views.xml',
'wizard/stock_backorder_confirmation_views.xml',
'views/res_partner_views.xml',
'views/product_strategy_views.xml',
'views/stock_incoterms_views.xml',
'views/stock_production_lot_views.xml',
'views/stock_picking_views.xml',
'views/stock_scrap_views.xml',
'views/stock_inventory_views.xml',
'views/stock_quant_views.xml',
'views/stock_location_views.xml',
'views/stock_warehouse_views.xml',
'views/stock_move_line_views.xml',
'views/stock_move_views.xml',
'views/product_views.xml',
'views/res_config_settings_views.xml',
'views/report_stock_traceability.xml',
'views/stock_template.xml',
'views/procurement_views.xml',
'data/default_barcode_patterns.xml',
'data/stock_data.xml',
'data/stock_data.yml',
'data/stock_incoterms_data.xml',
'data/stock_sequence_data.xml',
'data/web_planner_data.xml',
],
'qweb': [
'static/src/xml/stock_traceability_report_backend.xml',
],
'test': [
'test/stock_users.yml',
'test/packing.yml',
'test/packingneg.yml',
'test/procrule.yml',
'test/wiseoperator.yml',
],
'installable': True,
'application': True,
'auto_install': False,
}
| gpl-3.0 | 1,153,926,068,468,263,200 | 33.261905 | 74 | 0.601807 | false |
erijo/py-svndump | svndump/record.py | 1 | 5100 | # Copyright (c) 2012 Erik Johansson <[email protected]>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
from .section import *
class Record(object):
def __init__(self, headers):
super(Record, self).__init__()
self.headers = headers
def discard(self):
pass
def write(self, stream):
self.headers.write(stream)
@staticmethod
def read(stream):
headers = HeaderSection.read(stream)
if headers is None:
return None
if NodeRecord.NODE_PATH_HEADER in headers:
return NodeRecord.read(headers, stream)
elif RevisionRecord.REVISION_NUMBER_HEADER in headers:
return RevisionRecord.read(headers, stream)
elif VersionStampRecord.VERSION_HEADER in headers:
return VersionStampRecord.read(headers, stream)
elif UuidRecord.UUID_HEADER in headers:
return UuidRecord.read(headers, stream)
stream.error("unknown record");
class VersionStampRecord(Record):
VERSION_HEADER = "SVN-fs-dump-format-version"
def __init__(self, headers):
super(VersionStampRecord, self).__init__(headers)
@staticmethod
def read(headers, stream):
return VersionStampRecord(headers)
class UuidRecord(Record):
UUID_HEADER = "UUID"
def __init__(self, headers):
super(UuidRecord, self).__init__(headers)
@staticmethod
def read(headers, stream):
return UuidRecord(headers)
class RevisionRecord(Record):
REVISION_NUMBER_HEADER = "Revision-number"
PROP_CONTENT_LENGTH = "Prop-content-length"
CONTENT_LENGTH = "Content-length"
def __init__(self, headers, properties):
super(RevisionRecord, self).__init__(headers)
self.properties = properties
def write(self, stream):
prop_length = self.properties.dump_length()
self.headers[self.PROP_CONTENT_LENGTH] = prop_length
self.headers[self.CONTENT_LENGTH] = prop_length
super(RevisionRecord, self).write(stream)
self.properties.write(stream)
stream.writeline()
@staticmethod
def read(headers, stream):
properties = PropertySection.read(stream)
return RevisionRecord(headers, properties)
class NodeRecord(Record):
NODE_PATH_HEADER = "Node-path"
NODE_KIND = "Node-kind"
NODE_ACTION = "Node-action"
NODE_COPYFROM_REV = "Node-copyfrom-rev"
NODE_COPYFROM_PATH = "Node-copyfrom-path"
TEXT_COPY_SOURCE_MD5 = "Text-copy-source-md5"
TEXT_CONTENT_MD5 = "Text-content-md5"
TEXT_CONTENT_LENGTH = "Text-content-length"
PROP_CONTENT_LENGTH = "Prop-content-length"
CONTENT_LENGTH = "Content-length"
# New in version 3
TEXT_DELTA = "Text-delta"
PROP_DELTA = "Prop-delta"
TEXT_DELTA_BASE_MD5 = "Text-delta-base-md5"
TEXT_DELTA_BASE_SHA1 = "Text-delta-base-sha1"
TEXT_COPY_SOURCE_SHA1 = "Text-copy-source-sha1"
TEXT_CONTENT_SHA1 = "Text-content-sha1"
def __init__(self, headers, properties, content):
super(NodeRecord, self).__init__(headers)
self.properties = properties
self.content = content
def discard(self):
if self.content is not None:
self.content.discard()
def write(self, stream):
prop_length = 0
if self.properties is not None:
prop_length = self.properties.dump_length()
self.headers[self.PROP_CONTENT_LENGTH] = prop_length
text_length = 0
if self.content is not None:
text_length = self.content.dump_length()
self.headers[self.TEXT_CONTENT_LENGTH] = text_length
if self.properties is not None or self.content is not None:
self.headers[self.CONTENT_LENGTH] = prop_length + text_length
super(NodeRecord, self).write(stream)
if self.properties is not None:
self.properties.write(stream)
if self.content is not None:
self.content.write(stream)
stream.writeline()
stream.writeline()
@staticmethod
def read(headers, stream):
properties = None
if NodeRecord.PROP_CONTENT_LENGTH in headers:
properties = PropertySection.read(stream)
content = None
if NodeRecord.TEXT_CONTENT_LENGTH in headers:
content = Content.read(
stream, headers[NodeRecord.TEXT_CONTENT_LENGTH])
return NodeRecord(headers, properties, content)
| gpl-3.0 | -8,833,452,493,659,131,000 | 30.875 | 73 | 0.65902 | false |
izapolsk/integration_tests | cfme/utils/appliance/services.py | 1 | 3411 | import attr
from cfme.utils.appliance.plugin import AppliancePlugin
from cfme.utils.appliance.plugin import AppliancePluginException
from cfme.utils.log import logger_wrap
from cfme.utils.quote import quote
from cfme.utils.wait import wait_for
class SystemdException(AppliancePluginException):
pass
@attr.s
class SystemdService(AppliancePlugin):
unit_name = attr.ib(type=str)
@logger_wrap('SystemdService command runner: {}')
def _run_service_command(
self,
command,
expected_exit_code=None,
unit_name=None,
log_callback=None
):
"""Wrapper around running the command and raising exception on unexpected code
Args:
command: string command for systemd (stop, start, restart, etc)
expected_exit_code: the exit code to expect, otherwise raise
unit_name: optional unit name, defaults to self.unit_name attribute
log_callback: logger to log against
Raises:
SystemdException: When expected_exit_code is not matched
"""
unit = self.unit_name if unit_name is None else unit_name
with self.appliance.ssh_client as ssh:
cmd = 'systemctl {} {}'.format(quote(command), quote(unit))
log_callback('Running {}'.format(cmd))
result = ssh.run_command(cmd,
container=self.appliance.ansible_pod_name)
if expected_exit_code is not None and result.rc != expected_exit_code:
# TODO: Bring back address
msg = 'Failed to {} {}\nError: {}'.format(
command, self.unit_name, result.output)
if log_callback:
log_callback(msg)
else:
self.logger.error(msg)
raise SystemdException(msg)
return result
def stop(self, log_callback=None):
return self._run_service_command(
'stop',
expected_exit_code=0,
log_callback=log_callback
)
def start(self, log_callback=None):
return self._run_service_command(
'start',
expected_exit_code=0,
log_callback=log_callback
)
def restart(self, log_callback=None):
return self._run_service_command(
'restart',
expected_exit_code=0,
log_callback=log_callback
)
def enable(self, log_callback=None):
return self._run_service_command(
'enable',
expected_exit_code=0,
log_callback=log_callback
)
@property
def enabled(self):
return self._run_service_command('is-enabled').rc == 0
@property
def is_active(self):
return self._run_service_command('is-active').rc == 0
@property
def running(self):
return self._run_service_command("status").rc == 0
def wait_for_running(self, timeout=600):
result, wait = wait_for(
lambda: self.running,
num_sec=timeout,
fail_condition=False,
delay=5,
)
return result
def daemon_reload(self, log_callback=None):
"""Call daemon-reload, no unit name for this"""
return self._run_service_command(
command='daemon-reload',
expected_exit_code=0,
unit_name='',
log_callback=log_callback
)
| gpl-2.0 | 2,346,904,480,859,109,000 | 29.72973 | 86 | 0.586045 | false |
ttrifonov/horizon | horizon/horizon/dashboards/syspanel/users/tables.py | 1 | 4566 | import logging
from django import shortcuts
from django.contrib import messages
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from horizon import api
from horizon import tables
LOG = logging.getLogger(__name__)
class CreateUserLink(tables.LinkAction):
name = "create"
verbose_name = _("Create User")
url = "horizon:syspanel:users:create"
attrs = {
"class": "ajax-modal btn small",
}
class EditUserLink(tables.LinkAction):
name = "edit"
verbose_name = _("Edit")
url = "horizon:syspanel:users:update"
attrs = {
"class": "ajax-modal",
}
class EnableUsersAction(tables.Action):
name = "enable"
verbose_name = _("Enable")
verbose_name_plural = _("Enable Users")
def allowed(self, request, user):
return not user.enabled
def handle(self, data_table, request, object_ids):
failures = 0
enabled = []
for obj_id in object_ids:
try:
api.keystone.user_update_enabled(request, obj_id, True)
enabled.append(obj_id)
except Exception, e:
failures += 1
messages.error(request, _("Error enabling user: %s") % e)
LOG.exception("Error enabling user.")
if failures:
messages.info(request, _("Enabled the following users: %s")
% ", ".join(enabled))
else:
messages.success(request, _("Successfully enabled users: %s")
% ", ".join(enabled))
return shortcuts.redirect('horizon:syspanel:users:index')
class DisableUsersAction(tables.Action):
name = "disable"
verbose_name = _("Disable")
verbose_name_plural = _("Disable Users")
def allowed(self, request, user):
return user.enabled
def handle(self, data_table, request, object_ids):
failures = 0
disabled = []
for obj_id in object_ids:
if obj_id == request.user.id:
messages.info(request, _('You cannot disable the user you are '
'currently logged in as.'))
continue
try:
api.keystone.user_update_enabled(request, obj_id, False)
disabled.append(obj_id)
except Exception, e:
failures += 1
messages.error(request, _("Error disabling user: %s") % e)
LOG.exception("Error disabling user.")
if failures:
messages.info(request, _("Disabled the following users: %s")
% ", ".join(disabled))
else:
if disabled:
messages.success(request, _("Successfully disabled users: %s")
% ", ".join(disabled))
return shortcuts.redirect('horizon:syspanel:users:index')
class DeleteUsersAction(tables.DeleteAction):
data_type_singular = _("User")
data_type_plural = _("Users")
def allowed(self, request, datum):
if datum and datum.id == request.user.id:
return False
return True
def delete(self, request, obj_id):
api.keystone.user_delete(request, obj_id)
class UserFilterAction(tables.FilterAction):
def filter(self, table, users, filter_string):
""" Really naive case-insensitive search. """
# FIXME(gabriel): This should be smarter. Written for demo purposes.
q = filter_string.lower()
def comp(user):
if q in user.name.lower() or q in user.email.lower():
return True
return False
return filter(comp, users)
class UsersTable(tables.DataTable):
STATUS_CHOICES = (
("true", True),
("false", False)
)
id = tables.Column(_('id'))
name = tables.Column(_('name'))
email = tables.Column(_('email'))
# Default tenant is not returned from Keystone currently.
#default_tenant = tables.Column(_('default_tenant'),
# verbose_name="Default Project")
enabled = tables.Column(_('enabled'),
status=True,
status_choices=STATUS_CHOICES)
class Meta:
name = "users"
verbose_name = _("Users")
row_actions = (EditUserLink, EnableUsersAction, DisableUsersAction,
DeleteUsersAction)
table_actions = (UserFilterAction, CreateUserLink, DeleteUsersAction)
| apache-2.0 | -7,072,450,925,408,468,000 | 31.382979 | 79 | 0.56417 | false |
OliverWalter/amdtk | amdtk/models/mixture.py | 1 | 3497 |
"""Mixture of distributions/densities."""
import numpy as np
from scipy.misc import logsumexp
from .discrete_latent_model import DiscreteLatentModel
from .dirichlet import Dirichlet
class MixtureStats(object):
"""Sufficient statistics for :class:BayesianMixture`.
Methods
-------
__getitem__(key)
Index operator.
__add__(stats)
Addition operator.
__iadd__(stats)
In-place addition operator.
"""
def __init__(self, P_Z):
self.__stats = P_Z.sum(axis=0)
def __getitem__(self, key):
if type(key) is not int:
raise TypeError()
if key < 0 or key > 2:
raise IndexError
return self.__stats
def __add__(self, other):
new_stats = MixtureStats(len(self.__stats))
new_stats += self
new_stats += other
return new_stats
def __iadd__(self, other):
self.__stats += other.__stats
return self
class BayesianMixture(DiscreteLatentModel):
"""Bayesian mixture of probability distributions (or densities).
The prior is a Dirichlet density.
Attributes
----------
prior : :class:`Dirichlet`
Prior density.
posterior : :class:`Dirichlet`
Posterior density.
Methods
-------
expLogLikelihood(X)
Expected value of the log-likelihood of the data given the
model.
KLPosteriorPrior()
KL divergence between the posterior and the prior densities.
updatePosterior(mixture_stats, pdf_stats)
Update the parameters of the posterior distribution according to
the accumulated statistics.
"""
def __init__(self, alphas, components):
super().__init__(components)
self.prior = Dirichlet(alphas)
self.posterior = Dirichlet(alphas.copy())
def expLogLikelihood(self, X, weight=1.0):
"""Expected value of the log-likelihood of the data given the
model.
Parameters
----------
X : numpy.ndarray
Data matrix of N frames with D dimensions.
weight : float
Scaling weight for the log-likelihood
Returns
-------
E_llh : numpy.ndarray
The expected value of the log-likelihood for each frame.
E_log_P_Z: numpy.ndarray
Probability distribution of the latent states given the
data.
"""
E_log_weights = self.posterior.expLogPi()
E_log_p_X = np.zeros((X.shape[0], self.k))
for i, pdf in enumerate(self.components):
E_log_p_X[:, i] += E_log_weights[i]
E_log_p_X[:, i] += pdf.expLogLikelihood(X)
E_log_p_X[:, i] *= weight
log_norm = logsumexp(E_log_p_X, axis=1)
E_log_P_Z = (E_log_p_X.T - log_norm).T
return log_norm, E_log_P_Z
def KLPosteriorPrior(self):
"""KL divergence between the posterior and the prior densities.
Returns
-------
KL : float
KL divergence.
"""
KL = 0
for component in self.components:
KL += component.KLPosteriorPrior()
return KL + self.posterior.KL(self.prior)
def updatePosterior(self, mixture_stats):
"""Update the parameters of the posterior distribution.
Parameters
----------
mixture_stats : :class:MixtureStats
Statistics of the mixture weights.
"""
self.posterior = self.prior.newPosterior(mixture_stats)
| bsd-2-clause | 1,982,866,318,469,026,800 | 26.535433 | 72 | 0.582213 | false |
davidbgk/udata | udata/search/commands.py | 1 | 8335 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import logging
import sys
import signal
from contextlib import contextmanager
from datetime import datetime
from flask import current_app
from flask_script import prompt_bool
from udata.commands import submanager, IS_INTERACTIVE
from udata.search import es, adapter_catalog
from elasticsearch.helpers import reindex as es_reindex, streaming_bulk
log = logging.getLogger(__name__)
m = submanager(
'search',
help='Search/Indexation related operations',
description='Handle search and indexation related operations'
)
TIMESTAMP_FORMAT = '%Y-%m-%d-%H-%M'
DEPRECATION_MSG = '{cmd} command will be removed in udata 1.4, use index command instead'
def default_index_name():
'''Build a time based index name'''
return '-'.join([es.index_name, datetime.now().strftime(TIMESTAMP_FORMAT)])
def iter_adapters():
'''Iter over adapter in predictable way'''
adapters = adapter_catalog.values()
return sorted(adapters, key=lambda a: a.model.__name__)
def iter_qs(qs, adapter):
'''Safely iterate over a DB QuerySet yielding ES documents'''
for obj in qs.no_dereference().timeout(False):
if adapter.is_indexable(obj):
try:
doc = adapter.from_model(obj).to_dict(include_meta=True)
yield doc
except Exception as e:
model = adapter.model.__name__
log.error('Unable to index %s "%s": %s', model, str(obj.id),
str(e), exc_info=True)
def iter_for_index(docs, index_name):
'''Iterate over ES documents ensuring a given index'''
for doc in docs:
doc['_index'] = index_name
yield doc
def index_model(index_name, adapter):
''' Indel all objects given a model'''
model = adapter.model
log.info('Indexing {0} objects'.format(model.__name__))
qs = model.objects
if hasattr(model.objects, 'visible'):
qs = qs.visible()
if adapter.exclude_fields:
qs = qs.exclude(*adapter.exclude_fields)
docs = iter_qs(qs, adapter)
docs = iter_for_index(docs, index_name)
for ok, info in streaming_bulk(es.client, docs, raise_on_error=False):
if not ok:
log.error('Unable to index %s "%s": %s', model.__name__,
info['index']['_id'], info['index']['error'])
def disable_refresh(index_name):
'''
Disable refresh to optimize indexing
See: https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-update-settings.html#bulk
''' # noqa
es.indices.put_settings(index=index_name, body={
'index': {
'refresh_interval': '-1'
}
})
def enable_refresh(index_name):
'''
Enable refresh and force merge. To be used after indexing.
See: https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-update-settings.html#bulk
''' # noqa
refresh_interval = current_app.config['ELASTICSEARCH_REFRESH_INTERVAL']
es.indices.put_settings(index=index_name, body={
'index': {'refresh_interval': refresh_interval}
})
es.indices.forcemerge(index=index_name)
def set_alias(index_name, delete=True):
'''
Properly end an indexation by creating an alias.
Previous alias is deleted if needed.
'''
log.info('Creating alias "{0}" on index "{1}"'.format(
es.index_name, index_name))
if es.indices.exists_alias(name=es.index_name):
alias = es.indices.get_alias(name=es.index_name)
previous_indices = alias.keys()
if index_name not in previous_indices:
es.indices.put_alias(index=index_name, name=es.index_name)
for index in previous_indices:
if index != index_name:
es.indices.delete_alias(index=index, name=es.index_name)
if delete:
es.indices.delete(index=index)
else:
es.indices.put_alias(index=index_name, name=es.index_name)
@contextmanager
def handle_error(index_name, keep=False):
'''
Handle errors while indexing.
In case of error, properly log it, remove the index and exit.
If `keep` is `True`, index is not deleted.
'''
# Handle keyboard interrupt
signal.signal(signal.SIGINT, signal.default_int_handler)
signal.signal(signal.SIGTERM, signal.default_int_handler)
has_error = False
try:
yield
except KeyboardInterrupt:
print('') # Proper warning message under the "^C" display
log.warning('Interrupted by signal')
has_error = True
except Exception as e:
log.error(e)
has_error = True
if has_error:
if not keep:
log.info('Removing index %s', index_name)
es.indices.delete(index=index_name)
sys.exit(-1)
@m.option('-t', '--type', dest='doc_type', required=True,
help='Only reindex a given type')
def reindex(doc_type):
'''[DEPRECATED] Reindex models'''
log.warn(DEPRECATION_MSG.format(cmd='reindex'))
index([doc_type], force=True, keep=False)
@m.option('-n', '--name', default=None, help='Optionnal index name')
@m.option('-d', '--delete', default=False, action='store_true',
help='Delete previously aliased indices')
@m.option('-f', '--force', default=False, action='store_true',
help='Do not prompt on deletion')
@m.option('-k', '--keep', default=False, action='store_true',
help='Keep index in case of error')
def init(name=None, delete=False, force=False, keep=False):
'''[DEPRECATED] Initialize or rebuild the search index'''
log.warn(DEPRECATION_MSG.format(cmd='init'))
index(name=name, force=force, keep=not delete)
@m.option(dest='models', nargs='*', metavar='model',
help='Model to reindex')
@m.option('-n', '--name', default=None, help='Optionnal index name')
@m.option('-f', '--force', default=False, action='store_true',
help='Do not prompt on deletion')
@m.option('-k', '--keep', default=False, action='store_true',
help='Do not delete indexes')
def index(models=None, name=None, force=False, keep=False):
'''Initialize or rebuild the search index'''
index_name = name or default_index_name()
doc_types_names = [m.__name__.lower() for m in adapter_catalog.keys()]
models = [model.lower().rstrip('s') for model in (models or [])]
for model in models:
if model not in doc_types_names:
log.error('Unknown model %s', model)
sys.exit(-1)
log.info('Initiliazing index "{0}"'.format(index_name))
if es.indices.exists(index_name):
if IS_INTERACTIVE and not force:
msg = 'Index {0} will be deleted, are you sure?'
delete = prompt_bool(msg.format(index_name))
else:
delete = True
if delete:
es.indices.delete(index_name)
else:
sys.exit(-1)
es.initialize(index_name)
with handle_error(index_name, keep):
disable_refresh(index_name)
for adapter in iter_adapters():
if not models or adapter.doc_type().lower() in models:
index_model(index_name, adapter)
else:
log.info('Copying {0} objects to the new index'.format(
adapter.model.__name__))
# Need upgrade to Elasticsearch-py 5.0.0 to write:
# es.reindex({
# 'source': {'index': es.index_name, 'type': adapter.doc_type()},
# 'dest': {'index': index_name}
# })
#
# http://elasticsearch-py.readthedocs.io/en/master/api.html#elasticsearch.Elasticsearch.reindex
# This method (introduced in Elasticsearch 2.3 but only in Elasticsearch-py 5.0.0)
# triggers a server-side documents copy.
# Instead we use this helper for meant for backward compatibility
# but with poor performance as copy is client-side (scan+bulk)
es_reindex(es.client, es.index_name, index_name, scan_kwargs={
'doc_type': adapter.doc_type()
})
enable_refresh(index_name)
# At this step, we don't want error handler to delete the index
# in case of error
set_alias(index_name, delete=not keep)
| agpl-3.0 | -8,890,075,423,520,776,000 | 34.168776 | 111 | 0.616557 | false |
DISBi/django-disbi | disbi/disbimodels.py | 1 | 7146 | """
Normal Django models with a few custom options for configuration.
If you have custom model classes that need these options, add them here and
create a child class of the appropriate options class and your custom model class.
"""
# Django
from django.db import models
class Options():
def __init__(self, di_show=False, di_display_name=None, di_hr_primary_key=False,
di_choose=False, di_combinable=False,
*args, **kwargs):
"""
Custom options for DISBi fields.
Args:
di_show (bool): Determines whether the column should be
included in the result table.
di_display_name (str): Will be used as column header in the result table.
di_hr_primary_key (bool): Determines whether the column should
be used for identifying rows. If true column must be unique
and may not be `null` or `blank`. Only one di_hr_primary_key
is allowed per model.
TODO: enforce this
"""
self.di_show = di_show
self.di_display_name = di_display_name
self.di_hr_primary_key = di_hr_primary_key
self.di_choose = di_choose
self.di_combinable = di_combinable
super().__init__(*args, **kwargs)
class RelationshipOptions():
def __init__(self, to, di_show=False, di_display_name=None, di_hr_primary_key=False,
di_choose=False, di_combinable=False,
*args, **kwargs):
"""
Custom options for DISBi relationship fields, which have a different
signature than normal fields.
Args:
di_show (bool): Determines whether the column should be
included in the result table.
di_display_name (str): Will be used as column header in the result table.
di_hr_primary_key (bool): Determines whether the column should
be used for identifying rows. If true column must be unique
and may not be `null` or `blank`. Only one di_hr_primary_key
is allowed per model.
TODO: enforce this
"""
self.di_show = di_show
self.display_name = di_display_name
self.di_hr_primary_key = di_hr_primary_key
self.di_choose = di_choose
self.di_combinable = di_combinable
super().__init__(to, *args, **kwargs)
class ExcludeOptions(Options):
"""
Adds the `exclude` option, to exclude rows where this field
evaluates to `False`. Should be only used on Bool fields.
"""
def __init__(self, di_exclude=False, di_show=False, di_display_name=None,
di_hr_primary_key=False, di_choose=False, di_combinable=False,
*args, **kwargs):
self.di_exclude = di_exclude
super().__init__(di_show, di_display_name, di_hr_primary_key, di_choose,
di_combinable
*args, **kwargs)
class FloatField(Options, models.FloatField):
"""
FloatField with custom DISBi options.
"""
pass
class BigIntegerField(Options, models.BigIntegerField):
"""
BigIntegerField with custom DISBi options.
"""
pass
class BinaryField(Options, models.BinaryField):
"""
BinaryField with custom DISBi options.
"""
pass
class CommaSeparatedIntegerField(Options, models.CommaSeparatedIntegerField):
"""
CommaSeparatedIntegerField with custom DISBi options.
"""
pass
class CharField(Options, models.CharField):
"""
CharField with custom DISBi options.
"""
pass
class DateField(Options, models.DateField):
"""
DateField with custom DISBi options.
"""
pass
class DateTimeField(Options, models.DateTimeField):
"""
DateTimeField with custom DISBi options.
"""
pass
class DecimalField(Options, models.DecimalField):
"""
DecimalField with custom DISBi options.
"""
pass
class DurationField(Options, models.DurationField):
"""
DurationField with custom DISBi options.
"""
pass
class EmailField(Options, models.EmailField):
"""
EmailField with custom DISBi options.
"""
pass
class FileField(Options, models.FileField):
"""
FileField with custom DISBi options.
"""
pass
class FilePathField(Options, models.FilePathField):
"""
FilePathField with custom DISBi options.
"""
pass
class ImageField(Options, models.ImageField):
"""
ImageField with custom DISBi options.
"""
pass
class IntegerField(Options, models.IntegerField):
"""
IntegerField with custom DISBi options.
"""
pass
class GenericIPAddressField(Options, models.GenericIPAddressField):
"""
GenericIPAddressField with custom DISBi options.
"""
pass
class PositiveIntegerField(Options, models.PositiveIntegerField):
"""
PositiveIntegerField with custom DISBi options.
"""
pass
class PositiveSmallIntegerField(Options, models.PositiveSmallIntegerField):
"""
PositiveSmallIntegerField with custom DISBi options.
"""
pass
class SlugField(Options, models.SlugField):
"""
SlugField with custom DISBi options.
"""
pass
class SmallIntegerField(Options, models.SmallIntegerField):
"""
SmallIntegerField with custom DISBi options.
"""
pass
class TextField(Options, models.TextField):
"""
TextField with custom DISBi options.
"""
pass
class TimeField(Options, models.TimeField):
"""
TimeField with custom DISBi options.
"""
pass
class URLField(Options, models.URLField):
"""
URLField with custom DISBi options.
"""
pass
class UUIDField(Options, models.UUIDField):
"""
UUIDField with custom DISBi options.
"""
pass
class ForeignKey(RelationshipOptions, models.ForeignKey):
"""
ForeignKey with custom DISBi options.
"""
pass
class ManyToManyField(RelationshipOptions, models.ManyToManyField):
"""
ManyToManyField with custom DISBi options.
"""
pass
class OneToOneField(RelationshipOptions, models.OneToOneField):
"""
OneToOneField with custom DISBi options.
"""
pass
class NullBooleanField(ExcludeOptions, models.NullBooleanField):
"""
NullBooleanField with custom DISBi and exclude options.
"""
pass
class BooleanField(ExcludeOptions, models.BooleanField):
"""
BooleanField with custom DISBi and exclude options.
"""
pass
class EmptyCharField(Options, models.CharField):
"""
FloatField with custom DISBi options and the option to add an
empty value displayer.
"""
def __init__(self, di_empty=None, di_show=True, di_display_name=None, di_hr_primary_key=False,
di_choose=False, di_combinable=False,
*args, **kwargs):
self.di_empty = di_empty
super().__init__(di_show, di_display_name, di_hr_primary_key, di_choose, di_combinable,
*args, **kwargs)
| mit | 7,004,359,213,430,258,000 | 23.898955 | 99 | 0.626784 | false |
openhumanoids/oh-distro | software/ihmc/ihmc_step/translator_ihmc.py | 1 | 14123 | import lcm
import drc
import atlas
import bot_core
import time
import numpy as np
import py_drake_utils as ut
from bdi_step.footsteps import decode_footstep_plan, decode_deprecated_footstep_plan, encode_footstep_plan, FootGoal
from bdi_step.plotting import draw_swing
from bdi_step.utils import Behavior, gl, now_utime
NUM_REQUIRED_WALK_STEPS = 4
PLAN_UPDATE_TIMEOUT = 20 # maximum time allowed between a footstep plan and an 'update' which appends more steps to that plan
# Experimentally determined vector relating BDI's frame for foot position to ours. This is the xyz vector from the position of the foot origin (from drake forwardKin) to the BDI Atlas foot pos estimate, expressed in the frame of the foot.
ATLAS_FRAME_OFFSET = np.array([0.0400, 0.000, -0.0850])
def blank_step_spec():
msg = atlas.behavior_step_spec_t()
msg.foot = atlas.behavior_foot_data_t()
msg.action = atlas.behavior_step_action_t()
return msg
def blank_walk_spec():
msg = atlas.behavior_walk_spec_t()
msg.foot = atlas.behavior_foot_data_t()
msg.action = atlas.behavior_walk_action_t()
return msg
class Mode:
translating = 0
plotting = 1
class IHMCStepTranslator(object):
def __init__(self, mode=Mode.translating, safe=True):
self.mode = mode
self.safe = safe # Don't send atlas behavior commands (to ensure that the robot never starts walking accidentally when running tests)
self.lc = lcm.LCM()
if self.mode == Mode.plotting:
self.gl = gl
else:
self.gl = None
self.bdi_step_queue_in = []
self.delivered_index = None
self.use_spec = True
self.drift_from_plan = np.zeros((3,1))
self.behavior = Behavior.BDI_STEPPING
self.T_local_to_localbdi = bot_core.rigid_transform_t()
self.T_local_to_localbdi.trans = np.zeros(3)
self.T_local_to_localbdi.quat = ut.rpy2quat([0,0,0])
self.last_params = None
self.executing = False
self.last_footstep_plan_time = -np.inf
def handle_bdi_transform(self, channel, msg):
if isinstance(msg, str):
msg = bot_core.rigid_transform_t.decode(msg)
self.T_local_to_localbdi = msg
def handle_footstep_plan(self, channel, msg):
if isinstance(msg, str):
msg = drc.footstep_plan_t.decode(msg)
footsteps, opts = decode_footstep_plan(msg)
self.last_params = msg.params
if len(footsteps) <= 2:
# the first two footsteps are always just the positions of the robot's feet, so a plan of two or fewer footsteps is a no-op
print 'BDI step translator: Empty plan recieved. Not executing.'
return
behavior = opts['behavior']
#if behavior == Behavior.BDI_WALKING:
# # duration = 0.6
# if len(footsteps) < NUM_REQUIRED_WALK_STEPS+2:
# print 'ERROR: Footstep plan must be at least 4 steps for BDI walking translation'
# return
#elif behavior != Behavior.BDI_STEPPING:
# print "BDI step translator: Ignoring footstep plan without BDI_WALKING or BDI_STEPPING behavior"
# return
self.behavior = behavior
now = time.time()
if now - self.last_footstep_plan_time > PLAN_UPDATE_TIMEOUT:
self.executing = False
self.last_footstep_plan_time = now
if self.mode == Mode.plotting:
self.draw(footsteps)
else:
#if not self.executing:
print "Starting new footstep plan"
self.bdi_step_queue_in = footsteps
self.send_params(1)
if not self.safe:
m = "BDI step translator: Steps received; transitioning to {:s}".format("BDI_STEP" if self.behavior == Behavior.BDI_STEPPING else "BDI_WALK")
print m
ut.send_status(6,0,0,m)
time.sleep(1)
self.executing = True
self.send_behavior()
else:
m = "BDI step translator: Steps received; in SAFE mode; not transitioning to {:s}".format("BDI_STEP" if self.behavior == Behavior.BDI_STEPPING else "BDI_WALK")
print m
ut.send_status(6,0,0,m)
#else:
# print "Got updated footstep plan"
# if self.bdi_step_queue_in[self.delivered_index-1].is_right_foot == footsteps[0].is_right_foot:
# print "Re-aligning new footsteps to current plan"
# self.bdi_step_queue_in = self.bdi_step_queue_in[:self.delivered_index-1] + footsteps
# else:
# print "Can't align the updated plan to the current plan"
# return
@property
def bdi_step_queue_out(self):
bdi_step_queue_out = [s.copy() for s in self.bdi_step_queue_in]
for step in bdi_step_queue_out:
# Transform to BDI coordinate frame
T1 = ut.mk_transform(step.pos[:3], step.pos[3:])
T2 = ut.mk_transform(self.T_local_to_localbdi.trans, ut.quat2rpy(self.T_local_to_localbdi.quat))
T = T2.dot(T1)
step.pos[:3] = T[:3,3]
step.pos[3:] = ut.rotmat2rpy(T[:3,:3])
self.lc.publish('BDI_ADJUSTED_FOOTSTEP_PLAN', encode_footstep_plan(bdi_step_queue_out, self.last_params).encode())
for step in bdi_step_queue_out:
# Express pos of the center of the foot, as expected by BDI
R = ut.rpy2rotmat(step.pos[3:])
offs = R.dot(ATLAS_FRAME_OFFSET)
# import pdb; pdb.set_trace()
step.pos[:3] += offs
for i in reversed(range(2, len(bdi_step_queue_out))):
bdi_step_queue_out[i].pos[2] -= bdi_step_queue_out[i-1].pos[2]
return [s.to_bdi_spec(self.behavior, j+1) for j, s in enumerate(bdi_step_queue_out[2:])]
def handle_atlas_status(self, channel, msg):
if (not self.executing) or self.mode != Mode.translating:
return
if isinstance(msg, str):
msg = atlas.status_t.decode(msg)
if self.behavior == Behavior.BDI_WALKING:
index_needed = msg.walk_feedback.next_step_index_needed
# if (self.delivered_index + 1) < index_needed <= len(self.bdi_step_queue_in) - 4:
if index_needed <= len(self.bdi_step_queue_in) - 4:
#print "Handling request for next step: {:d}".format(index_needed)
self.send_params(index_needed-1)
else:
self.executing = False
else:
index_needed = msg.step_feedback.next_step_index_needed
if index_needed > 1 and index_needed > self.delivered_index:
# we're starting a new step, so publish the expected double support configuration
self.send_expected_double_support()
# if self.delivered_index < index_needed <= len(self.bdi_step_queue_in) - 2:
if index_needed <= len(self.bdi_step_queue_in) - 2:
# print "Handling request for next step: {:d}".format(index_needed)
self.send_params(index_needed)
else:
print "done executing"
self.executing = False
# Report progress through the footstep plan execution (only when stepping)
progress_msg = drc.footstep_plan_progress_t()
progress_msg.utime = msg.utime
progress_msg.num_steps = len(self.bdi_step_queue_in) - 2
progress_msg.current_step = index_needed - 1
self.lc.publish('FOOTSTEP_PLAN_PROGRESS', progress_msg.encode())
def send_params(self,step_index,force_stop_walking=False):
"""
Publish the next steppping footstep or up to the next 4 walking footsteps as needed.
"""
assert self.mode == Mode.translating, "Translator in Mode.plotting mode is not allowed to send step/walk params"
if self.behavior == Behavior.BDI_WALKING:
walk_param_msg = atlas.behavior_walk_params_t()
walk_param_msg.num_required_walk_steps = NUM_REQUIRED_WALK_STEPS
walk_param_msg.walk_spec_queue = self.bdi_step_queue_out[step_index-1:step_index+3]
walk_param_msg.step_queue = [atlas.step_data_t() for j in range(NUM_REQUIRED_WALK_STEPS)] # Unused
walk_param_msg.use_spec = True
walk_param_msg.use_relative_step_height = 1 # as of Atlas 2.5.0 this flag is disabled and always acts as if it's set to 1
walk_param_msg.use_demo_walk = 0
if force_stop_walking:
for step in walk_param_msg.walk_spec_queue:
step.step_index = -1
self.lc.publish('ATLAS_WALK_PARAMS', walk_param_msg.encode())
self.delivered_index = walk_param_msg.walk_spec_queue[0].step_index
#print "Sent walk params for step indices {:d} through {:d}".format(walk_param_msg.walk_spec_queue[0].step_index, walk_param_msg.walk_spec_queue[-1].step_index)
elif self.behavior == Behavior.BDI_STEPPING:
step_param_msg = atlas.behavior_step_params_t()
step_param_msg.desired_step = atlas.step_data_t() # Unused
step_param_msg.desired_step_spec = self.bdi_step_queue_out[step_index-1]
step_param_msg.use_relative_step_height = 1 # as of Atlas 2.5.0 this flag is disabled and always acts as if it's set to 1
step_param_msg.use_demo_walk = 0
step_param_msg.use_spec = True
step_param_msg.desired_step = atlas.step_data_t() # Unused
step_param_msg.desired_step_spec = self.bdi_step_queue_out[step_index-1]
if force_stop_walking:
step_param_msg.desired_step_spec.step_index = -1
self.lc.publish('ATLAS_STEP_PARAMS', step_param_msg.encode())
self.delivered_index = step_param_msg.desired_step_spec.step_index
#print "Sent step params for step index {:d}".format(step_param_msg.desired_step_spec.step_index)
else:
raise ValueError("Bad behavior value: {:s}".format(self.behavior))
def send_expected_double_support(self):
"""
Publish the next expected double support configuration as a two-element footstep plan to support continuous replanning mode.
"""
self.lc.publish('NEXT_EXPECTED_DOUBLE_SUPPORT', encode_footstep_plan(self.bdi_step_queue_in[self.delivered_index:self.delivered_index+2], self.last_params).encode())
def send_behavior(self):
command_msg = atlas.behavior_command_t()
command_msg.utime = now_utime()
if self.behavior == Behavior.BDI_STEPPING:
command_msg.command = "step"
elif self.behavior == Behavior.BDI_WALKING:
command_msg.command = "walk"
else:
raise ValueError("Tried to send invalid behavior to Atlas: {:s}".format(self.behavior))
self.lc.publish("ATLAS_BEHAVIOR_COMMAND", command_msg.encode())
def handle_stop_walking(self, channel, msg_data):
"""
Generate a set of footsteps with -1 step indices, which will cause the BDI controller to switch to standing instead of continuing to walk
"""
if self.behavior == Behavior.BDI_WALKING:
n_steps = 6
else:
n_steps = 3
footsteps = [FootGoal(pos=np.zeros((6)),
step_speed=0,
step_height=0,
step_id=0,
pos_fixed=np.zeros((6,1)),
is_right_foot=0,
is_in_contact=0,
bdi_step_duration=0,
bdi_sway_duration=0,
bdi_lift_height=0,
bdi_toe_off=0,
bdi_knee_nominal=0,
bdi_max_body_accel=0,
bdi_max_foot_vel=0,
bdi_sway_end_dist=-1,
bdi_step_end_dist=-1,
terrain_pts=np.matrix([]))] * n_steps
self.bdi_step_queue_in = footsteps
self.send_params(1, force_stop_walking=True)
self.bdi_step_queue_in = [] # to prevent infinite spewing of -1 step indices
self.delivered_index = None
self.executing = False
def run(self):
if self.mode == Mode.translating:
print "IHMCStepTranslator running in robot-side translator mode"
self.lc.subscribe('COMMITTED_FOOTSTEP_PLAN', self.handle_footstep_plan)
#self.lc.subscribe('STOP_WALKING', self.handle_stop_walking)
else:
print "IHMCStepTranslator running in base-side plotter mode"
self.lc.subscribe('FOOTSTEP_PLAN_RESPONSE', self.handle_footstep_plan)
#self.lc.subscribe('CANDIDATE_BDI_FOOTSTEP_PLAN', self.handle_footstep_plan)
#self.lc.subscribe('BDI_ADJUSTED_FOOTSTEP_PLAN', self.handle_footstep_plan)
#self.lc.subscribe('ATLAS_STATUS', self.handle_atlas_status)
self.lc.subscribe('LOCAL_TO_LOCAL_BDI', self.handle_bdi_transform)
while True:
self.lc.handle()
def draw(self, footsteps):
"""
Plot a rough guess of each swing foot trajectory, based on the BDI software manual's description of how swing_height and lift_height behave.
"""
for j in range(len(footsteps)-2):
st0 = footsteps[j].to_bdi_spec(self.behavior, 0)
st1 = footsteps[j+2].to_bdi_spec(self.behavior, 0)
is_stepping = self.behavior==Behavior.BDI_STEPPING
if is_stepping:
lift_height = st1.action.lift_height
else:
lift_height = None
draw_swing(self.gl,
st0.foot.position,
st1.foot.position,
st1.action.swing_height,
is_stepping=is_stepping,
lift_height=lift_height)
self.gl.switch_buffer()
| bsd-3-clause | -3,496,072,450,974,365,000 | 45.920266 | 238 | 0.591517 | false |
albert12132/templar | templar/api/rules/core.py | 1 | 4373 | """
The public API for Templar pre/post-processor rules.
Users can use this module with the following import statement:
from templar.api.rules import core
"""
from templar.exceptions import TemplarError
import re
class Rule:
"""Represents a preprocessor or postprocessor rule. Rules are applied in the order that they
are listed in the Config.
When constructing a rule, the arguments `src` and `dst` are regular expressions; Templar will
only apply a rule if the source and destination of the publishing pipeline match the regexes.
"""
def __init__(self, src=None, dst=None):
if src is not None and not isinstance(src, str):
raise InvalidRule(
"Rule's source pattern must be a string or None, "
"but was type '{}'".format(type(src).__name__))
if dst is not None and not isinstance(dst, str):
raise InvalidRule(
"Rule's destination pattern must be a string or None, "
"but was type '{}'".format(type(src).__name__))
self._src_pattern = src
self._dst_pattern = dst
def applies(self, src, dst):
"""Checks if this rule applies to the given src and dst paths, based on the src pattern and
dst pattern given in the constructor.
If src pattern was None, this rule will apply to any given src path (same for dst).
"""
if self._src_pattern and (src is None or re.search(self._src_pattern, src) is None):
return False
elif self._dst_pattern and (dst is None or re.search(self._dst_pattern, dst) is None):
return False
return True
def apply(self, content):
"""Applies this rule to the given content. A rule can do one or more of the following:
- Return a string; this is taken to be the transformed version of content, and will be used
as the new content after applying this rule.
- Modify variables (a dict). Usually, Rules that modify this dictionary will add new
variables. However, a Rule can also delete or update key/value pairs in the dictionary.
"""
raise NotImplementedError
class SubstitutionRule(Rule):
"""An abstract class that represents a rule that transforms the content that is being processed,
based on a regex pattern and a substitution function. The substitution behaves exactly like
re.sub.
"""
pattern = None # Subclasses should override this variable with a string or compiled regex.
def substitute(self, match):
"""A substitution function that returns the text with which to replace the given match.
Subclasses should implement this method.
"""
raise InvalidRule(
'{} must implement the substitute method to be '
'a valid SubstitutionRule'.format(type(self).__name__))
def apply(self, content):
if isinstance(self.pattern, str):
return re.sub(self.pattern, self.substitute, content)
elif hasattr(self.pattern, 'sub') and callable(self.pattern.sub):
return self.pattern.sub(self.substitute, content)
raise InvalidRule(
"{}'s pattern has type '{}', but expected a string or "
"compiled regex.".format(type(self).__name__, type(self.pattern).__name__))
class VariableRule(Rule):
"""An abstract class that represents a rule that constructs variables given the content. For
VariableRules, the apply method returns a dictionary mapping str -> str instead of returning
transformed content (a string).
"""
def extract(self, content):
"""A substitution function that returns the text with which to replace the given match.
Subclasses should implement this method.
"""
raise InvalidRule(
'{} must implement the extract method to be '
'a valid VariableRule'.format(type(self).__name__))
def apply(self, content):
variables = self.extract(content)
if not isinstance(variables, dict):
raise InvalidRule(
"{} is a VariableRule, so its extract method should return a dict. Instead, it "
"returned type '{}'".format(type(self).__name__, type(variables).__name__))
return variables
class InvalidRule(TemplarError):
pass
| mit | 298,352,553,187,532,500 | 40.647619 | 100 | 0.645095 | false |
katakumpo/niceredis | niceredis/client/server.py | 1 | 6771 | # -*- coding: utf-8 *-*
import warnings
from redis.connection import Token
from redis.exceptions import ConnectionError, RedisError
from .base import RedisBase
class ServerCommands(RedisBase):
# SERVER INFORMATION
def bgrewriteaof(self):
"Tell the Redis server to rewrite the AOF file from data in memory."
return self.execute_command('BGREWRITEAOF')
def bgsave(self):
"""
Tell the Redis server to save its data to disk. Unlike save(),
this method is asynchronous and returns immediately.
"""
return self.execute_command('BGSAVE')
def client_kill(self, address):
"Disconnects the client at ``address`` (ip:port)"
return self.execute_command('CLIENT KILL', address)
def client_list(self):
"Returns a list of currently connected clients"
return self.execute_command('CLIENT LIST')
def client_getname(self):
"Returns the current connection name"
return self.execute_command('CLIENT GETNAME')
def client_setname(self, name):
"Sets the current connection name"
return self.execute_command('CLIENT SETNAME', name)
def config_get(self, pattern="*"):
"Return a dictionary of configuration based on the ``pattern``"
return self.execute_command('CONFIG GET', pattern)
def config_set(self, name, value):
"Set config item ``name`` with ``value``"
return self.execute_command('CONFIG SET', name, value)
def config_resetstat(self):
"Reset runtime statistics"
return self.execute_command('CONFIG RESETSTAT')
def config_rewrite(self):
"Rewrite config file with the minimal change to reflect running config"
return self.execute_command('CONFIG REWRITE')
def dbsize(self):
"Returns the number of keys in the current database"
return self.execute_command('DBSIZE')
def debug_object(self, key):
"Returns version specific meta information about a given key"
return self.execute_command('DEBUG OBJECT', key)
def echo(self, value):
"Echo the string back from the server"
return self.execute_command('ECHO', value)
def flushall(self):
"Delete all keys in all databases on the current host"
return self.execute_command('FLUSHALL')
def flushdb(self):
"Delete all keys in the current database"
return self.execute_command('FLUSHDB')
def info(self, section=None):
"""
Returns a dictionary containing information about the Redis server
The ``section`` option can be used to select a specific section
of information
The section option is not supported by older versions of Redis Server,
and will generate ResponseError
"""
if section is None:
return self.execute_command('INFO')
else:
return self.execute_command('INFO', section)
def lastsave(self):
"""
Return a Python datetime object representing the last time the
Redis database was saved to disk
"""
return self.execute_command('LASTSAVE')
def object(self, infotype, key):
"Return the encoding, idletime, or refcount about the key"
return self.execute_command('OBJECT', infotype, key, infotype=infotype)
def ping(self):
"Ping the Redis server"
return self.execute_command('PING')
def save(self):
"""
Tell the Redis server to save its data to disk,
blocking until the save is complete
"""
return self.execute_command('SAVE')
def sentinel(self, *args):
"Redis Sentinel's SENTINEL command."
warnings.warn(
DeprecationWarning('Use the individual sentinel_* methods'))
def sentinel_get_master_addr_by_name(self, service_name):
"Returns a (host, port) pair for the given ``service_name``"
return self.execute_command('SENTINEL GET-MASTER-ADDR-BY-NAME',
service_name)
def sentinel_master(self, service_name):
"Returns a dictionary containing the specified masters state."
return self.execute_command('SENTINEL MASTER', service_name)
def sentinel_masters(self):
"Returns a list of dictionaries containing each master's state."
return self.execute_command('SENTINEL MASTERS')
def sentinel_monitor(self, name, ip, port, quorum):
"Add a new master to Sentinel to be monitored"
return self.execute_command('SENTINEL MONITOR', name, ip, port, quorum)
def sentinel_remove(self, name):
"Remove a master from Sentinel's monitoring"
return self.execute_command('SENTINEL REMOVE', name)
def sentinel_sentinels(self, service_name):
"Returns a list of sentinels for ``service_name``"
return self.execute_command('SENTINEL SENTINELS', service_name)
def sentinel_set(self, name, option, value):
"Set Sentinel monitoring parameters for a given master"
return self.execute_command('SENTINEL SET', name, option, value)
def sentinel_slaves(self, service_name):
"Returns a list of slaves for ``service_name``"
return self.execute_command('SENTINEL SLAVES', service_name)
def shutdown(self):
"Shutdown the server"
try:
self.execute_command('SHUTDOWN')
except ConnectionError:
# a ConnectionError here is expected
return
raise RedisError("SHUTDOWN seems to have failed.")
def slaveof(self, host=None, port=None):
"""
Set the server to be a replicated slave of the instance identified
by the ``host`` and ``port``. If called without arguments, the
instance is promoted to a master instead.
"""
if host is None and port is None:
return self.execute_command('SLAVEOF', Token('NO'), Token('ONE'))
return self.execute_command('SLAVEOF', host, port)
def slowlog_get(self, num=None):
"""
Get the entries from the slowlog. If ``num`` is specified, get the
most recent ``num`` items.
"""
args = ['SLOWLOG GET']
if num is not None:
args.append(num)
return self.execute_command(*args)
def slowlog_len(self):
"Get the number of items in the slowlog"
return self.execute_command('SLOWLOG LEN')
def slowlog_reset(self):
"Remove all items in the slowlog"
return self.execute_command('SLOWLOG RESET')
def time(self):
"""
Returns the server time as a 2-item tuple of ints:
(seconds since epoch, microseconds into this second).
"""
return self.execute_command('TIME')
| mit | 1,597,255,999,482,688,300 | 34.082902 | 79 | 0.640526 | false |
asdil12/pywikibase | things.py | 1 | 4025 | #!/usr/bin/python2
import re
class BaseValue(object):
def __init__(self, value):
self.value = value
def __str__(self):
return value
def __repr__(self):
return "<%s object: %s>" % (self.__class__.__name__, self.__str__())
def to_value(self):
return self.__str__()
# Note:
# to_value: generates value as expected by set_claim (py obj)
# from_value: expects datavalue.value as provided by get_claims (py obj)
class Property(BaseValue):
def __init__(self, id):
if isinstance(id, str):
self.id = int(id.upper().replace("P", ""))
else:
self.id = id
def __str__(self):
return "P%i" % self.id
def to_value(self):
return {"entity-type": "property", "numeric-id": self.id}
@classmethod
def from_value(cls, value):
assert value["entity-type"] == "property"
return cls(value["numeric-id"])
class Item(BaseValue):
def __init__(self, id):
if isinstance(id, str):
self.id = int(id.upper().replace("Q", ""))
else:
self.id = id
def __str__(self):
return "Q%i" % self.id
def to_value(self):
return {"entity-type": "item", "numeric-id": self.id}
@classmethod
def from_value(cls, value):
# ok this is ugly...
if value["entity-type"] == "property":
return Property.from_value(value)
assert value["entity-type"] == "item"
return cls(value["numeric-id"])
class String(BaseValue):
def __str__(self):
return self.value
def to_value(self):
return self.value
@classmethod
def from_value(cls, value):
return cls(value)
class Time(BaseValue):
# wikibase uses a datetime format based on ISO8601
# eg: +00000002013-01-01T00:00:00Z
iso8601_re = re.compile(r"(?P<ysign>[\+\-])(?P<year>\d+)-(?P<month>\d+)-(?P<day>\d+)T(?P<hour>\d+):(?P<minute>\d+):(?P<second>\d+)Z")
def __init__(self, time, timezone=0, before=0, after=0, precision=11, calendarmodel="http://www.wikidata.org/entity/Q1985727"):
self.time = time
self.timezone = timezone
self.before = before
self.after = after
self.precision = precision # FIXME: allow string input
self.calendarmodel = calendarmodel
def __str__(self):
return self.to_value()["time"]
def to_value(self):
ysign = '+' if self.time["year"] >= 0 else '-'
value_out = {
"time": ysign + "%(year)011i-%(month)02i-%(day)02iT%(hour)02i:%(minute)02i:%(second)02iZ" % self.time,
"timezone": self.timezone,
"before": self.before,
"after": self.after,
"precision": self.precision,
"calendarmodel": self.calendarmodel,
}
return value_out
@classmethod
def from_value(cls, value):
#FIXME: catch error exception when match is empty - raise proper error
time_raw = Time.iso8601_re.match(value["time"]).groupdict()
value_in = {
"time": {
"year": int("%(ysign)s%(year)s" % time_raw),
"month": int(time_raw["month"]),
"day": int(time_raw["day"]),
"hour": int(time_raw["hour"]),
"minute": int(time_raw["minute"]),
"second": int(time_raw["second"]),
},
"timezone": value["timezone"],
"before": value["before"],
"after": value["after"],
"precision": value["precision"],
"calendarmodel": value["calendarmodel"],
}
return cls(**value_in)
class GlobeCoordinate(BaseValue):
def __init__(self, latitude, longitude, precision=0.000001, globe="http://www.wikidata.org/entity/Q2"):
self.latitude = latitude
self.longitude = longitude
self.precision = precision # in degrees (or fractions of)
self.globe = globe
def __str__(self):
return "%f, %f" % (self.latitude, self.longitude)
def to_value(self):
value_out = {
"latitude": self.latitude,
"longitude": self.longitude,
"precision": self.precision,
"globe": self.globe,
}
return value_out
@classmethod
def from_value(cls, value):
try:
del value['altitude']
except KeyError:
pass
return cls(**value)
# datavalue.type -> type class
types = {
"wikibase-entityid": Item, # or Property
"string": String,
"time": Time,
"globecoordinate": GlobeCoordinate,
}
def thing_from_datavalue(datavalue):
return types[datavalue["type"]].from_value(datavalue["value"])
| gpl-3.0 | -1,003,805,324,486,820,000 | 24.636943 | 134 | 0.643478 | false |
adrgerez/ardublockly | package/build_pyinstaller.py | 1 | 9203 | #!/usr/bin/env python2
# -*- coding: utf-8 -*- #
#
# Builds the Ardublockly Python portion of the app for Linux or OS X.
#
# Copyright (c) 2015 carlosperate https://github.com/carlosperate/
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# IMPORTANT: This script is designed to be located one directory level under the
# project root folder.
#
# This script file uses PyInstaller to create a self contained executable
# build of the Ardublockly application.
# It will remove the build folders left from PyInstaller and move the folder
# with the executable application into the project root folder.
#
# Due to all the debugging steps required to get a CI serve running properly
# this script ended up being quite verbose. In might be updated in the future
# to include a -v flag to select a verbose mode.
from __future__ import unicode_literals, print_function
import os
import sys
import shutil
import platform
import subprocess
from glob import glob
spec_coll_name = "server"
if platform.system() == "Darwin":
exec_folder = "arduexec.app"
else:
exec_folder = "arduexec"
py_exec_folder = os.path.join(exec_folder, "server")
script_tag = "[Ardublockly build] "
script_tab = " "
# The project_root_dir depends on the location of this file, so it cannot be
# moved without updating this line
project_root_dir = \
os.path.dirname( # going up 1 level
os.path.dirname(os.path.realpath(__file__))) # folder dir of this
# verbose_print = print if verbose else lambda *a, **k: None
def remove_directory(dir_to_remove):
""" Removes the a given directory. """
if os.path.exists(dir_to_remove):
print(script_tab + "Removing directory %s" % dir_to_remove)
shutil.rmtree(dir_to_remove)
else:
print(script_tab + "Directory %s was not found." % dir_to_remove)
def get_os():
"""
Gets the OS to based on the command line argument of the platform info.
Only possibilities are: "windows", "mac", "linux"
"""
valid_os = ["windows", "linux", "mac"]
print(script_tab + "Checking for command line argument indicated OS:")
if len(sys.argv) > 1:
if sys.argv[1] in valid_os:
# Take the first argument and use it as the os
print(script_tab + "Valid command line argument found: %s" %
sys.argv[1])
return "%s" % sys.argv[1]
else:
print(script_tab + "Invalid command line argument found: %s\n" %
sys.argv[1] + script_tab + "Options available: %s" % valid_os)
print(script_tab + "Valid command line arg not found, checking system.")
os_found = platform.system()
if os_found == "Windows":
raise SystemExit(script_tab + "OS found is: %s\n" % valid_os[0] +
"Exit: This script is not design to run on Windows.")
elif os_found == "Linux":
print(script_tab + "OS found is: %s" % valid_os[1])
return valid_os[1]
elif os_found == "Darwin":
print(script_tab + "OS found is: %s" % valid_os[2])
return valid_os[2]
else:
raise SystemExit("Exit: OS data found is invalid '%s'" % os_found)
def remove_pyinstaller_temps():
"""
Removes the temporary folders created by PyInstaller (dist and build).
"""
remove_directory(os.path.join(os.getcwd(), "dist"))
remove_directory(os.path.join(os.getcwd(), "build"))
def pyinstaller_build():
"""
Launches a subprocess running Python PyInstaller with the spec file from the
package folder. Captures the output streams and checks for errors.
:return: Boolean indicating the success state of the operation.
"""
process_args = [
"python",
"%s" % os.path.join("package", "pyinstaller", "pyinstaller.py"),
"%s" % os.path.join("package", "pyinstaller.spec")]
print(script_tab + "Command: %s" % process_args)
pyinstaller_process = subprocess.Popen(process_args)
std_op, std_err_op = pyinstaller_process.communicate()
if pyinstaller_process.returncode != 0:
print(script_tab + "ERROR: PyInstaller returned with exit code: %s" %
pyinstaller_process.returncode)
return False
return True
def move_executable_folder():
"""
Moves the PyInstaller executable folder from dist to project root.
:return: Boolean indicating the success state of the operation.
"""
original_exec_dir = os.path.join(project_root_dir, "dist", spec_coll_name)
final_exec_dir = os.path.join(project_root_dir, py_exec_folder)
if os.path.exists(original_exec_dir):
print(script_tab + "Moving exec files from %s \n" % original_exec_dir +
script_tab + "to %s" % final_exec_dir)
shutil.move(original_exec_dir, final_exec_dir)
else:
print(script_tab + "ERROR: PyInstaller executable output folder '%s' " %
original_exec_dir + "not found!")
return False
return True
def copy_data_files(os_type):
""" At the moment there are no additional data files required to copy """
pass
def create_shell_file(os_type):
"""
Creates a shell script fil into the project root to be able to easily launch
the Ardublockly application.
The Mac OS X build runs directly from clicking the .app folder, so it no
longer needs a shell script.
"""
shell_text = ""
shell_location = ""
# The script depends on platform
if os_type == "mac":
# There is no need for a shell file in Mac OS X
print(script_tab + "There is no need to create shell file in Mac OS X.")
return
elif os_type == "linux":
shell_text = '#!/bin/bash\n' \
'DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )\n' \
'echo "[Shell Launch Script] Executing from: $DIR"\n' \
'./%s' % os.path.join(exec_folder, "ardublockly")
shell_location = os.path.join(
project_root_dir, "ardublockly_run.sh")
else:
# No other OS expected, so just return. This should never happen
return
try:
print(script_tab + "Creating shell file into %s" % shell_location)
bash_file = open(shell_location, "w")
bash_file.write(shell_text)
bash_file.close()
except Exception as e:
print(script_tab + "%s" % e)
print(script_tab + "ERROR: Shell file to launch the Ardublockly "
"application could not be created.")
# Make shell script executable by launching a subprocess
process_args = ["chmod", "+x", "%s" % shell_location]
print(script_tab + "Command to make executable: %s" % process_args)
try:
pyinstaller_process = subprocess.Popen(process_args)
std_op, std_err_op = pyinstaller_process.communicate()
except Exception as e:
print(script_tab + "%s" % e)
print(script_tab + "ERROR: Could not make Shell file executable.")
def build_ardublockly():
print(script_tag + "Build procedure started.")
print(script_tag + "Checking for OS.")
os_type = get_os()
print(script_tag + "Building Ardublockly for %s." % os_type)
print(script_tag + "Project directory is: %s" % project_root_dir)
print(script_tag + "Script working directory: %s" % os.getcwd())
print(script_tag + "Removing PyInstaller old temp directories.")
remove_pyinstaller_temps()
print(script_tag + "Running PyInstaller process.")
success = pyinstaller_build()
if not success:
print(script_tab + "Removing PyInstaller recent temp directories.")
remove_pyinstaller_temps()
raise SystemExit(script_tab + "Exiting as there was an error in the "
"PyInstaller execution.")
print(script_tag + "Removing old ardublockly executable directory.")
remove_directory(os.path.join(project_root_dir, py_exec_folder))
print(script_tag + "Moving executable folder to project root.")
success = move_executable_folder()
if not success:
print(script_tab + "Removing PyInstaller recent temp directories.")
remove_pyinstaller_temps()
raise SystemExit(script_tab + "Exiting now as there was an error in "
"the PyInstaller execution.")
print(script_tag + "Coping data files into executable directory.")
copy_data_files(os_type)
print(script_tag + "Removing PyInstaller recent temp directories.")
remove_pyinstaller_temps()
print(script_tag + "Creating shell file to easily execute Ardublockly.")
create_shell_file(os_type)
if __name__ == "__main__":
build_ardublockly()
| apache-2.0 | 2,858,567,297,311,880,700 | 36.563265 | 80 | 0.641856 | false |
iulian787/spack | var/spack/repos/builtin/packages/cctools/package.py | 2 | 4409 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Cctools(AutotoolsPackage):
"""The Cooperative Computing Tools (cctools) enable large scale
distributed computations to harness hundreds to thousands of
machines from clusters, clouds, and grids.
"""
homepage = "https://cctools.readthedocs.io"
url = "https://ccl.cse.nd.edu/software/files/cctools-7.1.5-source.tar.gz"
version('7.1.7', sha256='63cbfabe52591d41a1b27040bf27700d2a11b2f30cb2e25132e0016fb1aade03')
version('7.1.5', sha256='c01415fd47a1d9626b6c556e0dc0a6b0d3cd67224fa060cabd44ff78eede1d8a')
version('7.1.3', sha256='b937878ab429dda31bc692e5d9ffb402b9eb44bb674c07a934bb769cee4165ba')
version('7.1.2', sha256='ca871e9fe245d047d4c701271cf2b868e6e3a170e8834c1887157ed855985131')
version('7.1.0', sha256='84748245db10ff26c0c0a7b9fd3ec20fbbb849dd4aadc5e8531fd1671abe7a81')
version('7.0.18', sha256='5b6f3c87ae68dd247534a5c073eb68cb1a60176a7f04d82699fbc05e649a91c2')
version('6.1.1', sha256='97f073350c970d6157f80891b3bf6d4f3eedb5f031fea386dc33e22f22b8af9d')
depends_on('openssl')
depends_on('perl+shared', type=('build', 'run'))
depends_on('python', type=('build', 'run'))
depends_on('readline')
depends_on('gettext') # Corrects python linking of -lintl flag.
depends_on('swig')
# depends_on('xrootd')
depends_on('zlib')
patch('arm.patch', when='target=aarch64:')
patch('cctools_7.0.18.python.patch', when='@7.0.18')
patch('cctools_6.1.1.python.patch', when='@6.1.1')
# Generally SYS_foo is defined to __NR_foo (sys/syscall.h) which
# is then defined to a syscall number (asm/unistd_64.h). Certain
# CentOS systems have SYS_memfd_create defined to
# __NR_memfd_create but are missing the second definition.
# This is a belt and suspenders solution to the problem.
def patch(self):
before = '#if defined(__linux__) && defined(SYS_memfd_create)'
after = '#if defined(__linux__) && defined(SYS_memfd_create) && defined(__NR_memfd_create)' # noqa: E501
f = 'dttools/src/memfdexe.c'
kwargs = {'ignore_absent': False, 'backup': True, 'string': True}
filter_file(before, after, f, **kwargs)
if self.spec.satisfies('%fj'):
makefiles = ['chirp/src/Makefile', 'grow/src/Makefile']
for m in makefiles:
filter_file('-fstack-protector-all', '', m)
def configure_args(self):
args = []
# make sure we do not pick a python outside spack:
if self.spec.satisfies('@6.1.1'):
if self.spec.satisfies('^python@3:'):
args.extend([
'--with-python3-path', self.spec['python'].prefix,
'--with-python-path', 'no'
])
elif self.spec.satisfies('^python@:2.9'):
args.extend([
'--with-python-path', self.spec['python'].prefix,
'--with-python3-path', 'no'
])
else:
args.extend([
'--with-python-path', 'no',
'--with-python3-path', 'no'
])
else:
# versions 7 and above, where --with-python-path recognized the
# python version:
if self.spec.satisfies('^python@3:'):
args.extend([
'--with-python-path', self.spec['python'].prefix,
'--with-python2-path', 'no'
])
elif self.spec.satisfies('^python@:2.9'):
args.extend([
'--with-python-path', self.spec['python'].prefix,
'--with-python3-path', 'no'
])
else:
args.extend([
'--with-python2-path', 'no',
'--with-python3-path', 'no'
])
# disable these bits
for p in ['mysql', 'xrootd']:
args.append('--with-{0}-path=no'.format(p))
# point these bits at the Spack installations
for p in ['openssl', 'perl', 'readline', 'swig', 'zlib']:
args.append('--with-{0}-path={1}'.format(p, self.spec[p].prefix))
return args
| lgpl-2.1 | -6,209,909,144,982,183,000 | 42.653465 | 113 | 0.585847 | false |
ThomasWu/PersonalNewsWeb | backend/common/configuration_service_client_test.py | 1 | 1367 | import configuration_service_client as conf_client
def test_basic_get():
amqp_settings = conf_client.getSystemSettings('amqp')
print amqp_settings
assert amqp_settings is not None
assert len(amqp_settings) > 0
print 'test basic get passed'
def test_setAndDrop():
system = 'test'
settings = {'url': 'test'}
conf_client.setSystemSettings(system, settings)
received_settings = conf_client.getSystemSettings(system)
assert received_settings == settings
conf_client.dropSystemSettings(system)
received_settings = conf_client.getSystemSettings(system)
assert received_settings is None
print 'test set and drop passed'
def test_invalidSet():
valid_system_name = 'test'
invalid_system_name = 1
valid_settings = {'url': 'test'}
invalid_settings = None
# test invalid system name
conf_client.setSystemSettings(invalid_system_name, valid_settings)
received_settings = conf_client.getSystemSettings(invalid_system_name)
assert received_settings is None
# test invalid settings
conf_client.setSystemSettings(valid_system_name, invalid_settings)
received_settings = conf_client.getSystemSettings(valid_system_name)
assert received_settings is None
print 'test invalid set passed'
if __name__=='__main__':
test_basic_get()
test_setAndDrop()
test_invalidSet() | mit | -1,052,612,304,301,058,700 | 34.076923 | 74 | 0.720556 | false |
marifersahin/pyfolio | venv/bin/player.py | 1 | 2205 | #!/Users/polizom/Django/pyfolio/venv/bin/python2.7
#
# The Python Imaging Library
# $Id$
#
from __future__ import print_function
try:
from tkinter import *
except ImportError:
from Tkinter import *
from PIL import Image, ImageTk
import sys
# --------------------------------------------------------------------
# an image animation player
class UI(Label):
def __init__(self, master, im):
if isinstance(im, list):
# list of images
self.im = im[1:]
im = self.im[0]
else:
# sequence
self.im = im
if im.mode == "1":
self.image = ImageTk.BitmapImage(im, foreground="white")
else:
self.image = ImageTk.PhotoImage(im)
Label.__init__(self, master, image=self.image, bg="black", bd=0)
self.update()
try:
duration = im.info["duration"]
except KeyError:
duration = 100
self.after(duration, self.next)
def next(self):
if isinstance(self.im, list):
try:
im = self.im[0]
del self.im[0]
self.image.paste(im)
except IndexError:
return # end of list
else:
try:
im = self.im
im.seek(im.tell() + 1)
self.image.paste(im)
except EOFError:
return # end of file
try:
duration = im.info["duration"]
except KeyError:
duration = 100
self.after(duration, self.next)
self.update_idletasks()
# --------------------------------------------------------------------
# script interface
if __name__ == "__main__":
if not sys.argv[1:]:
print("Syntax: python player.py imagefile(s)")
sys.exit(1)
filename = sys.argv[1]
root = Tk()
root.title(filename)
if len(sys.argv) > 2:
# list of images
print("loading...")
im = []
for filename in sys.argv[1:]:
im.append(Image.open(filename))
else:
# sequence
im = Image.open(filename)
UI(root, im).pack()
root.mainloop()
| mit | 354,823,484,656,687,040 | 20.617647 | 72 | 0.473923 | false |
a25kk/tam | src/tam.sitecontent/tam/sitecontent/testing.py | 1 | 1960 | # -*- coding: utf-8 -*-
"""Base module for unittesting."""
from plone.app.testing import applyProfile
from plone.app.testing import FunctionalTesting
from plone.app.testing import IntegrationTesting
from plone.app.testing import login
from plone.app.testing import PLONE_FIXTURE
from plone.app.testing import PloneSandboxLayer
from plone.app.testing import setRoles
from plone.app.testing import TEST_USER_ID
from plone.app.testing import TEST_USER_NAME
from plone.testing import z2
import unittest2 as unittest
class tamSitecontentLayer(PloneSandboxLayer):
defaultBases = (PLONE_FIXTURE,)
def setUpZope(self, app, configurationContext):
"""Set up Zope."""
# Load ZCML
import tam.sitecontent
self.loadZCML(package=tam.sitecontent)
z2.installProduct(app, 'tam.sitecontent')
def setUpPloneSite(self, portal):
"""Set up Plone."""
# Install into Plone site using portal_setup
applyProfile(portal, 'tam.sitecontent:default')
# Login and create some test content
setRoles(portal, TEST_USER_ID, ['Manager'])
login(portal, TEST_USER_NAME)
portal.invokeFactory('Folder', 'folder')
# Commit so that the test browser sees these objects
portal.portal_catalog.clearFindAndRebuild()
import transaction
transaction.commit()
def tearDownZope(self, app):
"""Tear down Zope."""
z2.uninstallProduct(app, 'tam.sitecontent')
FIXTURE = tamSitecontentLayer()
INTEGRATION_TESTING = IntegrationTesting(
bases=(FIXTURE,), name="tamSitecontentLayer:Integration")
FUNCTIONAL_TESTING = FunctionalTesting(
bases=(FIXTURE,), name="tamSitecontentLayer:Functional")
class IntegrationTestCase(unittest.TestCase):
"""Base class for integration tests."""
layer = INTEGRATION_TESTING
class FunctionalTestCase(unittest.TestCase):
"""Base class for functional tests."""
layer = FUNCTIONAL_TESTING
| mit | -5,577,918,858,943,459,000 | 29.153846 | 61 | 0.714796 | false |
fauskanger/Pretreat | app/classes/graph/path.py | 1 | 1741 | from app.config import config
from app.pythomas import shapes as shapelib
from app.pythomas import pythomas as lib
class Path:
def __init__(self, path_nodes):
path_nodes = None if path_nodes == [None] else path_nodes
self.nodes = [] if not path_nodes else path_nodes
self.complete = False
if path_nodes:
self.complete = True
def __add__(self, other):
if self.last() is other.first():
if len(other.nodes) > 1:
return Path(self.nodes + other.nodes[1:])
return self.copy()
else:
return Path(self.nodes + other.nodes)
def add_node(self, node, index=None):
if node in self.nodes:
return False
if index is None:
self.nodes.append(node)
else:
self.nodes.insert(index, node)
return True
def remove_node(self, node):
return lib.try_remove(self.nodes, node)
def update(self, dt):
pass
def draw(self, batch=None):
pass
def delete(self):
self.nodes.clear()
def get_edge_list(self):
nodes = self.get_node_list()
edges = []
for i in range(1, self.get_count()):
edges.append((nodes[i-1], nodes[i]))
return edges
def first(self):
if not self.nodes:
return None
return self.nodes[0]
def last(self):
if not self.nodes:
return None
return self.nodes[-1]
def has_node(self, node):
return node in self.get_node_list()
def get_node_list(self):
return self.nodes
def get_count(self):
return len(self.nodes)
def copy(self):
return Path(self.nodes)
| gpl-2.0 | 7,555,444,032,393,306,000 | 23.180556 | 65 | 0.556577 | false |
DNX/django-e1337cms | docs/conf.py | 1 | 7732 | # -*- coding: utf-8 -*-
#
# e1337cms documentation build configuration file, created by
# sphinx-quickstart on Fri Jun 1 09:47:07 2012.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'e1337cms'
copyright = u'2012, Denis Darii'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.0.3'
# The full version, including alpha/beta/rc tags.
release = '0.0.3'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'e1337cmsdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'e1337cms.tex', u'e1337cms Documentation',
u'Denis Darii', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'e1337cms', u'e1337cms Documentation',
[u'Denis Darii'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'e1337cms', u'e1337cms Documentation',
u'Denis Darii', 'e1337cms', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
| bsd-3-clause | 5,366,876,038,217,494,000 | 30.950413 | 80 | 0.70357 | false |
emi420/sotsiaal | app/urls.py | 1 | 2985 | from django.conf.urls import patterns, include, url
import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
from django.conf.urls.defaults import *
from django.views.generic.simple import direct_to_template
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
(r'^media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT}),
(r'^static/(.*)$', 'django.views.static.serve', {'document_root': settings.STATIC_URL})
)
urlpatterns += patterns('app.views',
(r'^$','index'),
(r'^legal/$', 'legal'),
(r'^contact/$', 'contact'),
(r'^search/$', 'search'),
(r'^tag/$', 'tag'),
(r'^cache_flush/$', 'cache_flush'),
(r'^new_story/$', 'new_story'),
(r'^new_story$', 'new_story'),
(r'^login/$', 'login'),
(r'^signup/$', 'signup'),
(r'^edit_profile/$', 'edit_profile'),
(r'^view_profile/(.*)/$', 'view_profile'),
(r'^pass_recovery/$', 'pass_recovery'),
(r'^pass_recovery$', 'pass_recovery'),
(r'^add_user/$', 'add_user'),
(r'^do_login/$', 'do_login'),
(r'^logout/$', 'logout'),
(r'^save_profile/$', 'save_profile'),
(r'^save_pass/$', 'save_pass'),
(r'^add_story/$', 'add_story'),
(r'^delete_story/(.*)/$', 'delete_story'),
(r'^add_message/$', 'add_message'),
(r'^send_contact_msg/$', 'send_contact_msg'),
(r'^send_recovery_pass/$', 'send_recovery_pass'),
(r'^send_activation_mail/(.*)/$', 'send_activation_mail'),
(r'^send_deletion_mail/(.*)/$', 'send_deletion_mail'),
(r'^activate_account/(.*)/$', 'activate_account'),
(r'^delete_account/(.*)/$', 'delete_account'),
(r'^account_message/$', 'account_message'),
(r'^story_img/(.*)/$', 'story_img'),
(r'^story_original_img/(.*)/$', 'story_original_img'),
(r'^msg_img/(.*)/$', 'msg_img'),
(r'^msg_original_img/(.*)/$', 'msg_img'),
(r'^msg_original_img_old/(.*)/$', 'msg_img_old'),
(r'^user_img/(.*)/(.*)/$', 'user_img'),
(r'^user_img/(.*)/$', 'user_img'),
(r'^update_karma/$', 'update_karma'),
(r'^ajax/add_friend/$', 'add_friend'),
(r'^ajax/remove_friend/$', 'remove_friend'),
#(r'^ajax/story_wall/$', 'story_wall'),
(r'^ajax/story_followers/$', 'story_followers'),
(r'^ajax/more_story_messages/$', 'more_story_messages'),
(r'^ajax/delete_avatar/$', 'delete_avatar'),
(r'^ajax/delete_bg/$', 'delete_bg'),
(r'^ajax/delete_banner/$', 'delete_banner'),
(r'^ajax/delete_message/$', 'delete_message'),
(r'^ajax/delete_reply/$', 'delete_reply'),
(r'^ajax/vote_msg/$', 'vote_msg'),
(r'^ajax/vote_reply/$', 'vote_reply'),
(r'^ajax/vote_story/$', 'vote_story'),
(r'^print_story/(.*)/$', 'print_story'),
(r'^popular_users/$', 'popular_users'),
(r'^invalid_story/$', 'invalid_story'),
('^(.*)/(.*)/', 'story'), # category/story
('^(.*)/', 'index'), # category
('^(.*)', 'view_profile'), # user profile
)
| gpl-3.0 | -7,142,724,653,885,100,000 | 38.8 | 99 | 0.550084 | false |
lmazuel/azure-sdk-for-python | azure-mgmt-authorization/azure/mgmt/authorization/models/role_assignment_create_parameters.py | 1 | 1625 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class RoleAssignmentCreateParameters(Model):
"""Role assignment create parameters.
:param role_definition_id: The role definition ID used in the role
assignment.
:type role_definition_id: str
:param principal_id: The principal ID assigned to the role. This maps to
the ID inside the Active Directory. It can point to a user, service
principal, or security group.
:type principal_id: str
:param can_delegate: The delgation flag used for creating a role
assignment
:type can_delegate: bool
"""
_attribute_map = {
'role_definition_id': {'key': 'properties.roleDefinitionId', 'type': 'str'},
'principal_id': {'key': 'properties.principalId', 'type': 'str'},
'can_delegate': {'key': 'properties.canDelegate', 'type': 'bool'},
}
def __init__(self, **kwargs):
super(RoleAssignmentCreateParameters, self).__init__(**kwargs)
self.role_definition_id = kwargs.get('role_definition_id', None)
self.principal_id = kwargs.get('principal_id', None)
self.can_delegate = kwargs.get('can_delegate', None)
| mit | -6,886,032,258,312,350,000 | 39.625 | 84 | 0.622154 | false |
lucidmotifs/newtopia | newtopia/ntgame/models/effect.py | 1 | 3504 | # python modules
from enum import Enum
# django modules
from django.db import models
# nt modules
from .province import Province
from .infrastructure import Building
# meta
from ntmeta.models import Entity
class Effect(models.Model):
""" The core component of province change """
""" e.g. Peasant Growth - would signify that applying this effect,
with a given magnitude would impact how fast peasants grow per turn."""
name = models.CharField(max_length=40, unique=False)
""" The entity that generated the effect """
entity = models.ForeignKey(Entity,
on_delete=models.CASCADE,
null=False,
blank=False)
""" Code used to identify the effect, like a key. HASH? """
tag = models.CharField(max_length=40, unique=True)
def __str__(self):
return self.name
class Instance(models.Model):
""" An instance of an effect that can be applied to a building or spell. """
class EffectType(Enum):
DELAYED = 1
IMMEDIATE = 2
OVER_TIME = 3
NEXT_TURN = 4
""" The related effect """
effect = models.ForeignKey(Effect,
on_delete=models.CASCADE,
null=False,
blank=False)
""" Determines the type of application produced """
effect_type = models.IntegerField(
choices=EffectType.__members__.items(),
default=EffectType.IMMEDIATE)
""" How long effect persists. Ignore when `effect_type` is immediate and
determines when the delayed effect pops when `effect_type` is
DELAYED. Measured in ntdays """
duration = models.IntegerField(default=1)
""" Size of the effect. Set to 100 if using raw value. """
magnitude = models.FloatField(default=0.0)
""" Raw value increase/decrease will be converted to a percentage
if used with a subentity, such as a growth rate.
When Provided, magnitude will only be applied to the raw_value.
Exception: can be used as minimum value if base_is_min == True """
base_value = models.IntegerField(default=None)
""" When True, magnitude works as usual, and base_value is only applied if
the resulting Application value would be less than the base_value """
base_is_min = models.BooleanField(default=False)
""" Denotes negative or positive version of effect """
is_negative = models.BooleanField(default=False)
def apply(self, province):
app = Application()
app.instance = self
app.province = province
def __str__(self):
return "{} with mag. {}".format(self.effect.name, self.magnitude)
EffectType = Instance.EffectType
class Application(models.Model):
""" Used to apply effects to provinces """
instance = models.ForeignKey(Instance,
on_delete=models.CASCADE,
null=True,
blank=True)
applied_to = models.ForeignKey(
Province, on_delete=models.CASCADE, null=False, blank=False,
related_name='to')
applied_by = models.ForeignKey(
Province, on_delete=models.CASCADE, null=False, blank=False,
related_name='by')
""" Type of effect; alters how the effect is applied. """
# Round the effect was applied (ntdate)
applied_on = models.IntegerField()
# Round the effect expires (ntdate) (NULL permanent, immediate)
expires_at = models.IntegerField(default=None)
# Round the effect is applied (ntdate)
# (NULL immediate, 0 every tick till expires)
applies_at = models.IntegerField(default=None)
| gpl-3.0 | 7,361,919,509,951,882,000 | 30.854545 | 80 | 0.664669 | false |
pidydx/grr | grr/lib/log_test.py | 1 | 2267 | #!/usr/bin/env python
"""Tests for logging classes."""
import logging
import time
from werkzeug import wrappers as werkzeug_wrappers
from grr.gui import wsgiapp
from grr.lib import flags
from grr.lib import log
from grr.lib import stats
from grr.lib import test_lib
from grr.lib import utils
from grr.proto import jobs_pb2
class ApplicationLoggerTests(test_lib.GRRBaseTest):
"""Store tests."""
def Log(self, msg, *args):
if args:
self.log += msg % (args)
else:
self.log += msg
def setUp(self):
super(ApplicationLoggerTests, self).setUp()
self.l = log.GrrApplicationLogger()
self.log = ""
self.log_stubber = utils.Stubber(logging, "info", self.Log)
self.log_stubber.Start()
def tearDown(self):
super(ApplicationLoggerTests, self).tearDown()
self.log_stubber.Stop()
def testGetEventId(self):
self.assertGreater(
len(self.l.GetNewEventId()), 20, "Invalid event ID generated")
self.assertGreater(
len(self.l.GetNewEventId(int(time.time() * 1e6))), 20,
"Invalid event ID generated")
def testLogHttpAdminUIAccess(self):
stats.STATS.RegisterCounterMetric("grr_gin_request_count")
request = wsgiapp.HttpRequest({
"wsgi.url_scheme": "http",
"SERVER_NAME": "foo.bar",
"SERVER_PORT": "1234"
})
request.user = "testuser"
response = werkzeug_wrappers.Response(
status=202,
headers={"X-GRR-Reason": "foo/test1234",
"X-API-Method": "TestMethod"})
self.l.LogHttpAdminUIAccess(request, response)
self.assertIn("foo/test1234", self.log)
def testLogHttpFrontendAccess(self):
request = self._GenHttpRequestProto()
self.l.LogHttpFrontendAccess(request)
self.assertIn("/test?omg=11%45x%20%20", self.log)
def _GenHttpRequestProto(self):
"""Create a valid request object."""
request = jobs_pb2.HttpRequest()
request.source_ip = "127.0.0.1"
request.user_agent = "Firefox or something"
request.url = "http://test.com/test?omg=11%45x%20%20"
request.user = "anonymous"
request.timestamp = int(time.time() * 1e6)
request.size = 1000
return request
def main(argv):
test_lib.main(argv=argv)
if __name__ == "__main__":
flags.StartMain(main)
| apache-2.0 | 3,389,963,987,248,417,000 | 23.912088 | 70 | 0.664314 | false |
suutari/shoop | shuup/simple_supplier/models.py | 1 | 3252 | # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from django.conf import settings
from django.db import models
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
from enumfields import EnumIntegerField
from shuup.core.fields import MoneyValueField, QuantityField
from shuup.core.suppliers.enums import StockAdjustmentType
from shuup.utils.properties import PriceProperty
def _get_currency():
from shuup.core.models import Shop
if not settings.SHUUP_ENABLE_MULTIPLE_SHOPS:
return Shop.objects.first().currency
return settings.SHUUP_HOME_CURRENCY
def _get_prices_include_tax():
from shuup.core.models import Shop
if not settings.SHUUP_ENABLE_MULTIPLE_SHOPS:
return Shop.objects.first().prices_include_tax
return False
class StockAdjustment(models.Model):
product = models.ForeignKey("shuup.Product", related_name="+", on_delete=models.CASCADE, verbose_name=_("product"))
supplier = models.ForeignKey("shuup.Supplier", on_delete=models.CASCADE, verbose_name=_("supplier"))
created_on = models.DateTimeField(auto_now_add=True, editable=False, db_index=True, verbose_name=_("created on"))
created_by = models.ForeignKey(
settings.AUTH_USER_MODEL, blank=True, null=True, on_delete=models.PROTECT, verbose_name=_("created by"))
delta = QuantityField(default=0, verbose_name=_("delta"))
purchase_price_value = MoneyValueField(default=0)
purchase_price = PriceProperty("purchase_price_value", "currency", "includes_tax")
type = EnumIntegerField(
StockAdjustmentType, db_index=True, default=StockAdjustmentType.INVENTORY, verbose_name=_("type"))
@cached_property
def currency(self):
return _get_currency()
@cached_property
def includes_tax(self):
return _get_prices_include_tax()
class StockCount(models.Model):
alert_limit = QuantityField(default=0, editable=False, verbose_name=_("alert limit"))
product = models.ForeignKey(
"shuup.Product", related_name="+", editable=False, on_delete=models.CASCADE, verbose_name=_("product"))
supplier = models.ForeignKey(
"shuup.Supplier", editable=False, on_delete=models.CASCADE, verbose_name=_("supplier"))
logical_count = QuantityField(default=0, editable=False, verbose_name=_("logical count"))
physical_count = QuantityField(default=0, editable=False, verbose_name=_("physical count"))
stock_value_value = MoneyValueField(default=0)
stock_value = PriceProperty("stock_value_value", "currency", "includes_tax")
stock_unit_price = PriceProperty("stock_unit_price_value", "currency", "includes_tax")
class Meta:
unique_together = [("product", "supplier")]
@cached_property
def currency(self):
return _get_currency()
@cached_property
def includes_tax(self):
return _get_prices_include_tax()
@property
def stock_unit_price_value(self):
return (self.stock_value_value / self.logical_count if self.logical_count else 0)
| agpl-3.0 | 9,174,615,948,482,939,000 | 40.164557 | 119 | 0.718635 | false |
pitunti/alfaPitunti | plugin.video.alfa/channels/seriesblanco.py | 1 | 13145 | # -*- coding: utf-8 -*-
import re
import urlparse
from channels import filtertools
from channelselector import get_thumb
from core import httptools
from core import scrapertoolsV2
from core import servertools
from core.item import Item
from platformcode import config, logger
from channels import autoplay
HOST = "https://seriesblanco.com/"
IDIOMAS = {'es': 'Español', 'en': 'Inglés', 'la': 'Latino', 'vo': 'VO', 'vos': 'VOS', 'vosi': 'VOSI', 'otro': 'OVOS'}
list_idiomas = IDIOMAS.values()
list_language = ['default']
CALIDADES = ['SD', 'HDiTunes', 'Micro-HD-720p', 'Micro-HD-1080p', '1080p', '720p']
list_quality = CALIDADES
list_servers = ['streamix',
'powvideo',
'streamcloud',
'openload',
'flashx',
'streamplay',
'nowvideo',
'gamovideo',
'kingvid',
'vidabc'
]
def mainlist(item):
logger.info()
thumb_series = get_thumb("channels_tvshow.png")
thumb_series_az = get_thumb("channels_tvshow_az.png")
thumb_buscar = get_thumb("search.png")
itemlist = list()
autoplay.init(item.channel, list_servers, list_quality)
itemlist.append(Item(channel=item.channel, title="Listado alfabético", action="series_listado_alfabetico",
thumbnail=thumb_series_az))
itemlist.append(Item(channel=item.channel, title="Todas las series", action="series",
url=urlparse.urljoin(HOST, "listado/"), thumbnail=thumb_series))
itemlist.append(
Item(channel=item.channel, title="Capítulos estrenados recientemente", action="home_section",
extra="Series Online : Capítulos estrenados recientemente",
url=HOST, thumbnail=thumb_series))
itemlist.append(Item(channel=item.channel, title="Series más vistas", action="series", extra="Series Más vistas",
url=urlparse.urljoin(HOST, "listado-visto/"), thumbnail=thumb_series))
itemlist.append(Item(channel=item.channel, title="Últimas fichas creadas", action="series",
url=urlparse.urljoin(HOST, "fichas_creadas/"), thumbnail=thumb_series))
itemlist.append(Item(channel=item.channel, title="Series por género", action="generos",
url=HOST, thumbnail=thumb_series))
itemlist.append(
Item(channel=item.channel, title="Buscar...", action="search", url=urlparse.urljoin(HOST, "finder.php"),
thumbnail=thumb_buscar))
itemlist = filtertools.show_option(itemlist, item.channel, list_idiomas, CALIDADES)
autoplay.show_option(item.channel, itemlist)
return itemlist
def home_section(item):
logger.info("section = %s" % item.extra)
pattern = "['\"]panel-title['\"]>[^/]*%s(.*?)(?:panel-title|\Z)" % item.extra
# logger.debug("pattern = %s" % pattern)
data = httptools.downloadpage(item.url).data
result = re.search(pattern, data, re.MULTILINE | re.DOTALL)
if result:
# logger.debug("found section: {0}".format(result.group(1)))
item.extra = 1
return extract_series_from_data(item, result.group(1))
logger.debug("No match")
return []
def extract_series_from_data(item, data):
itemlist = []
episode_pattern = re.compile('/capitulo-([0-9]+)/')
shows = re.findall("<a.+?href=['\"](?P<url>/serie[^'\"]+)[^<]*<img[^>]*src=['\"](?P<img>http[^'\"]+).*?"
"(?:alt|title)=['\"](?P<name>[^'\"]+)", data)
for url, img, name in shows:
try:
name.decode('utf-8')
except UnicodeError:
name = unicode(name, "iso-8859-1", errors="replace").encode("utf-8")
# logger.debug("Show found: %s -> %s (%s)" % (name, url, img))
if not episode_pattern.search(url):
action = "episodios"
else:
action = "findvideos"
context1=[filtertools.context(item, list_idiomas, CALIDADES), autoplay.context]
itemlist.append(item.clone(title=name, url=urlparse.urljoin(HOST, url),
action=action, show=name,
thumbnail=img,
context=context1))
more_pages = re.search('pagina=([0-9]+)">>>', data)
if more_pages:
# logger.debug("Adding next page item")
itemlist.append(item.clone(title="Siguiente >>", extra=item.extra + 1))
if item.extra > 1:
# logger.debug("Adding previous page item")
itemlist.append(item.clone(title="<< Anterior", extra=item.extra - 1))
return itemlist
def series(item):
logger.info()
if not hasattr(item, 'extra') or not isinstance(item.extra, int):
item.extra = 1
if '?' in item.url:
merger = '&'
else:
merger = '?'
page_url = "%s%spagina=%s" % (item.url, merger, item.extra)
logger.info("url = %s" % page_url)
data = scrapertoolsV2.decodeHtmlentities(httptools.downloadpage(page_url).data)
return extract_series_from_data(item, data)
def series_listado_alfabetico(item):
logger.info()
return [item.clone(action="series", title=letra, url=urlparse.urljoin(HOST, "listado-%s/" % letra))
for letra in "ABCDEFGHIJKLMNOPQRSTUVWXYZ"]
def generos(item):
logger.info()
data = httptools.downloadpage(item.url).data
result = re.findall("href=['\"](?P<url>/listado/[^'\"]+)['\"][^/]+/i>\s*(?P<genero>[^<]+)", data)
return [item.clone(action="series", title=genero, url=urlparse.urljoin(item.url, url)) for url, genero in result]
def newest(categoria):
logger.info("categoria: %s" % categoria)
itemlist = []
try:
if categoria == 'series':
itemlist = home_section(Item(extra=CAPITULOS_DE_ESTRENO_STR, url=HOST))
# Se captura la excepción, para no interrumpir al canal novedades si un canal falla
except:
import sys
for line in sys.exc_info():
logger.error("%s" % line)
return []
return itemlist
def search(item, texto):
logger.info("%s" % texto)
texto = texto.replace(" ", "+")
itemlist = []
try:
post = "query=%s" % texto
data = httptools.downloadpage(item.url, post=post).data
data = re.sub(r"\n|\r|\t|\s{2}", "", data)
shows = re.findall("<a href=['\"](?P<url>/serie[^'\"]+)['\"].*?<img src=['\"](?P<img>[^'\"]+)['\"].*?"
"id=['\"]q2[1\"] name=['\"]q2['\"] value=['\"](?P<title>.*?)['\"]", data)
for url, img, title in shows:
itemlist.append(item.clone(title=title, url=urlparse.urljoin(HOST, url), action="episodios", show=title,
thumbnail=img, context=filtertools.context(item, list_idiomas, CALIDADES)))
# Se captura la excepción, para no interrumpir al buscador global si un canal falla
except:
import sys
for line in sys.exc_info():
logger.error("%s" % line)
return itemlist
def episodios(item):
logger.info("%s - %s" % (item.title, item.url))
itemlist = []
# Descarga la página
data = httptools.downloadpage(item.url).data
fanart = scrapertoolsV2.find_single_match(data, "background-image[^'\"]+['\"]([^'\"]+)")
plot = scrapertoolsV2.find_single_match(data, "id=['\"]profile2['\"]>\s*(.*?)\s*</div>")
# logger.debug("fanart: %s" % fanart)
# logger.debug("plot: %s" % plot)
episodes = re.findall("<tr.*?href=['\"](?P<url>[^'\"]+).+?>(?P<title>.+?)</a>.*?<td>(?P<flags>.*?)</td>", data,
re.MULTILINE | re.DOTALL)
for url, title, flags in episodes:
title = re.sub("<span[^>]+>", "", title).replace("</span>", "")
idiomas = " ".join(["[%s]" % IDIOMAS.get(language, "OVOS") for language in
re.findall("banderas/([^\.]+)", flags, re.MULTILINE)])
filter_lang = idiomas.replace("[", "").replace("]", "").split(" ")
display_title = "%s - %s %s" % (item.show, title, idiomas)
# logger.debug("Episode found %s: %s" % (display_title, urlparse.urljoin(HOST, url)))
itemlist.append(item.clone(title=display_title, url=urlparse.urljoin(HOST, url),
action="findvideos", plot=plot, fanart=fanart, language=filter_lang))
itemlist = filtertools.get_links(itemlist, item, list_idiomas, CALIDADES)
if config.get_videolibrary_support() and len(itemlist) > 0:
itemlist.append(
item.clone(title="Añadir esta serie a la videoteca", action="add_serie_to_library", extra="episodios"))
return itemlist
def parse_videos(item, type_str, data):
video_patterns_str = [
'<tr.+?<span>(?P<date>.+?)</span>.*?banderas/(?P<language>[^\.]+).+?href="(?P<link>[^"]+).+?servidores/'
'(?P<server>[^\.]+).*?</td>.*?<td>.*?<span>(?P<uploader>.+?)</span>.*?<span>(?P<quality>.*?)</span>',
'<tr.+?banderas/(?P<language>[^\.]+).+?<td[^>]*>(?P<date>.+?)</td>.+?href=[\'"](?P<link>[^\'"]+)'
'.+?servidores/(?P<server>[^\.]+).*?</td>.*?<td[^>]*>.*?<a[^>]+>(?P<uploader>.+?)</a>.*?</td>.*?<td[^>]*>'
'(?P<quality>.*?)</td>.*?</tr>'
]
for v_pat_str in video_patterns_str:
v_patt_iter = re.compile(v_pat_str, re.MULTILINE | re.DOTALL).finditer(data)
itemlist = []
for vMatch in v_patt_iter:
v_fields = vMatch.groupdict()
quality = v_fields.get("quality")
# FIX para veces que añaden el idioma en los comentarios
regex = re.compile('sub-inglés-?', re.I)
quality = regex.sub("", quality)
# quality = re.sub(r"sub-inglés-?", "", quality, flags=re.IGNORECASE)
if not quality:
quality = "SD"
# FIX para los guiones en la calidad y no tener que añadir otra opción en la lista de calidades
if quality.startswith("MicroHD"):
regex = re.compile('microhd', re.I)
quality = regex.sub("Micro-HD-", quality)
# quality = re.sub(r"microhd", "Micro-HD-", quality, flags=re.IGNORECASE)
server = v_fields.get("server")
title = "%s en %s [%s] [%s] (%s: %s)" % (type_str, v_fields.get("server"),
IDIOMAS.get(v_fields.get("language"), "OVOS"), quality,
v_fields.get("uploader"), v_fields.get("date"))
itemlist.append(
item.clone(title=title, fulltitle=item.title, url=urlparse.urljoin(HOST, v_fields.get("link")),
action="play", language=IDIOMAS.get(v_fields.get("language"), "OVOS"),
quality=quality, server= server))
if len(itemlist) > 0:
return itemlist
return []
def extract_videos_section(data):
return re.findall("panel-title[^>]*>\s*([VvDd].+?)</div>[^<]*</div>[^<]*</div>", data, re.MULTILINE | re.DOTALL)
def findvideos(item):
logger.info("%s = %s" % (item.show, item.url))
# Descarga la página
data = httptools.downloadpage(item.url).data
# logger.info(data)
online = extract_videos_section(data)
try:
filtro_enlaces = config.get_setting("filterlinks", item.channel)
except:
filtro_enlaces = 2
list_links = []
if filtro_enlaces != 0:
list_links.extend(parse_videos(item, "Ver", online[-2]))
if filtro_enlaces != 1:
list_links.extend(parse_videos(item, "Descargar", online[-1]))
list_links = filtertools.get_links(list_links, item, list_idiomas, CALIDADES)
for i in range(len(list_links)):
a=list_links[i].title
b=a.lstrip('Ver en')
c=b.split('[')
d=c[0].rstrip( )
d=d.lstrip( )
list_links[i].server=d
autoplay.start(list_links, item)
return list_links
def play(item):
logger.info("%s - %s = %s" % (item.show, item.title, item.url))
if item.url.startswith(HOST):
data = httptools.downloadpage(item.url).data
ajax_link = re.findall("loadEnlace\((\d+),(\d+),(\d+),(\d+)\)", data)
ajax_data = ""
for serie, temp, cap, linkID in ajax_link:
# logger.debug(
# "Ajax link request: Serie = %s - Temp = %s - Cap = %s - Link = %s" % (serie, temp, cap, linkID))
ajax_data += httptools.downloadpage(
HOST + '/ajax/load_enlace.php?serie=' + serie + '&temp=' + temp + '&cap=' + cap + '&id=' + linkID).data
if ajax_data:
data = ajax_data
patron = "window.location.href\s*=\s*[\"']([^\"']+)'"
url = scrapertoolsV2.find_single_match(data, patron)
else:
url = item.url
itemlist = servertools.find_video_items(data=url)
titulo = scrapertoolsV2.find_single_match(item.fulltitle, "^(.*?)\s\[.+?$")
if titulo:
titulo += " [%s]" % item.language
for videoitem in itemlist:
if titulo:
videoitem.title = titulo
else:
videoitem.title = item.title
videoitem.channel = item.channel
return itemlist
| gpl-3.0 | -4,026,489,724,223,350,000 | 35.870787 | 119 | 0.564909 | false |
TechAtNYU/feedback-service | feedback.py | 1 | 3796 | import requests
import secrets
import smtplib
headers = {
'content-type': 'application/vnd.api+json',
'accept': 'application/*, text/*',
'authorization': 'Bearer ' + secrets.tnyu_api_key
}
def get_emails(event_id, event_data, eboard_members, attendees):
res = requests.get('https://api.tnyu.org/v3/events/' + event_id +
'?include=attendees', headers=headers, verify=False)
if r.status_code != 200:
return
r = res.json()
event_data.append(r['data'])
for post in r['included']:
if post['attributes'].get('contact'):
if post['attributes']['roles']:
eboard_members.append(post)
else:
attendees.append(post)
def send_emails(event_data, survey_link, eboard_members, attendees):
server = smtplib.SMTP('smtp.gmail.com', 587)
server.starttls()
server.login(secrets.tnyu_email, secrets.tnyu_email_password)
for i, member in enumerate(eboard_members):
msg = "\r\n".join([
'Hi ' + eboard_members[i]['attributes']['name'] + '!\n\n' +
'Thanks for coming out! We are constantly looking to improve ' +
'on our events, and we would really appreciate it if you ' +
'could take two minutes out of your day to fill out our' +
'feedback form. We\'d love to know how we could do better: ' +
survey_link + '?rsvpId=' + eboard_members[i]['id'],
'',
'Filling the form out will give us an idea of how everything ' +
'went and if there was something you really liked about the ' +
'event or something you did not like.\n',
'Feel free to email [email protected] if you have ' +
'other questions or concerns.',
'',
'Thank you,',
'Tech@NYU team'
])
try:
server.sendmail(secrets.tnyu_email, eboard_members[i][
'attributes']['contact']['email'], msg)
except UnicodeEncodeError:
continue
for i, attendee in enumerate(attendees):
msg = "\r\n".join([
"From: " + secrets.tnyu_email,
"To: " + attendees[j]['attributes']['contact']['email'],
"Subject: Thank you for coming to Tech@NYU's " +
event_data[0]['attributes']['title'],
'',
'Hi ' + attendees[j]['attributes']['name'] + '!\n\n' +
'Thanks for coming out! We are constantly looking to improve ' +
'on our events, and we would really appreciate it if you could ' +
' take two minutes out of your day to fill out our feedback ' +
'form. We\'d love to know how we could do better: ' +
survey_link + '?rsvpId=' + attendees[j]['id'],
'',
'Filling the form out will give us an idea of how everything ' +
'went and if there was something you really liked about the ' +
'event or something you did not like.\n',
'Feel free to email [email protected] if you have other ' +
'questions or concerns.',
'',
'Thank you,',
'Tech@NYU team'
])
try:
server.sendmail(secrets.tnyu_email, attendees[j][
'attributes']['contact']['email'], msg)
except UnicodeEncodeError:
continue
server.quit()
def main():
event_id = '5644e5e37af46de029dfb9f9'
eboard_members = []
attendees = []
event_data = []
survey_link = 'https://techatnyu.typeform.com/to/ElE6F5'
print attendees[0]
get_emails(event_id, event_data, eboard_members, attendees)
send_emails(event_data, survey_link, eboard_members, attendees)
main()
| mit | 9,174,346,333,781,503,000 | 35.5 | 78 | 0.560327 | false |
bobbyrward/fr0st | fr0stlib/gui/utils.py | 1 | 8708 | import wx, os
from functools import partial
from fr0stlib.decorators import *
def LoadIcon(*path):
# Check for an icons dir in app base path first for development
filename = os.path.join(wx.GetApp().AppBaseDir, 'icons', *path) + '.png'
if not os.path.exists(filename):
# Not there, check install path
filename = os.path.join(wx.GetApp().IconsDir, *path) + '.png'
img = wx.Image(filename, type=wx.BITMAP_TYPE_PNG)
img.Rescale(16,16)
return wx.BitmapFromImage(img)
def Box(self, name, *a, **k):
box = wx.StaticBoxSizer(wx.StaticBox(self, -1, name),
k.get('orient', wx.VERTICAL))
box.AddMany(a)
return box
def MakeTCs(self, *a, **k):
fgs = wx.FlexGridSizer(99, 2, 1, 1)
tcs = {}
for i, default in a:
tc = NumberTextCtrl(self, **k)
tc.SetFloat(default)
tcs[i] = tc
fgs.Add(wx.StaticText(self, -1, i.replace("_", " ").title()),
0, wx.ALIGN_CENTER_VERTICAL|wx.ALL, 5)
fgs.Add(tc, 0, wx.ALIGN_RIGHT, 5)
return fgs, tcs
class MyChoice(wx.Choice):
def __init__(self, parent, name, d, initial):
self.d = d
choices = sorted(d.iteritems())
wx.Choice.__init__(self, parent, -1, choices=[k for k,_ in choices])
self.SetSelection([v for _,v in choices].index(initial))
def GetFloat(self):
return self.d[self.GetStringSelection()]
class SizePanel(wx.Panel):
def __init__(self, parent, callback=lambda: None):
self.parent = parent
self.keepratio = True
self.callback = callback
wx.Panel.__init__(self, parent, -1)
fgs, tcs = MakeTCs(self, ("width", 512.), ("height", 384.), low=0,
callback=self.SizeCallback)
self.__dict__.update(tcs)
for i in (self.width, self.height):
i.MakeIntOnly()
i.low = 1
ratio = wx.CheckBox(self, -1, "Keep Ratio")
ratio.SetValue(True)
ratio.Bind(wx.EVT_CHECKBOX, self.OnRatio)
box = Box(self, "Size", fgs, ratio)
self.SetSizer(box)
box.Fit(self)
def GetInts(self):
return [int(tc.GetFloat()) for tc in (self.width, self.height)]
def UpdateSize(self, size):
width, height = (float(i) for i in size)
self.width.SetFloat(width)
self.height.SetFloat(height)
self.ratio = width / height
def OnRatio(self, e):
self.keepratio = e.GetInt()
def SizeCallback(self, tc, tempsave=None):
if self.keepratio:
v = tc.GetFloat()
tc.SetInt(v)
if tc == self.width:
w, h = v, v / self.ratio
self.height.SetInt(h)
else:
w, h = v * self.ratio, v
self.width.SetInt(w)
else:
self.ratio = float(self.width.GetFloat()) / self.height.GetFloat()
self.callback()
class NumberTextCtrl(wx.TextCtrl):
low = None
high = None
@BindEvents
def __init__(self, parent, low=None, high=None, callback=None):
self.parent = parent
# Size is set to ubuntu default (75,27), maybe make it 75x21 in win
wx.TextCtrl.__init__(self,parent,-1, size=(75,27))
if (low,high) != (None,None):
self.SetAllowedRange(low, high)
if callback:
self.callback = partial(callback, self)
else:
self.callback = lambda tempsave=None: None
self.HasChanged = False
self.SetFloat(0.0)
def GetFloat(self):
return float(self.GetValue() or "0")
def SetFloat(self, v):
v = self.Checkrange(float(v))
self._value = v
string = ("%.6f" %v).rstrip("0")
if string.endswith("."):
string += "0" # Avoid values like '0.' or '1.'
self.SetValue(string)
def GetInt(self):
return int(self.GetValue() or "0")
def SetInt(self, v):
v = self.Checkrange(int(v))
self._value = v
self.SetValue(str(v))
def MakeIntOnly(self):
self.SetInt(self.GetFloat())
self.SetFloat, self.GetFloat = self.SetInt, self.GetInt
def SetAllowedRange(self, low=None, high=None):
self.low = low
self.high = high
def Checkrange(self, v):
if self.low is not None and v < self.low:
return self.low
elif self.high is not None and v > self.high:
return self.high
return v
@Bind(wx.EVT_MOUSEWHEEL)
def OnMouseWheel(self, evt):
if self.SetFloat == self.SetInt:
return
if evt.CmdDown():
if evt.AltDown():
delta = 0.01
else:
delta = 0.1
elif evt.AltDown():
delta = 0.001
else:
evt.Skip()
return
self.SetFocus() # Makes sure OnKeyUp gets called.
v = self._value + delta * evt.GetWheelRotation() / evt.GetWheelDelta()
self.SetFloat(v)
self.callback(tempsave=False)
self.HasChanged = True
@Bind(wx.EVT_KEY_UP)
def OnKeyUp(self, e):
# TODO: This code is duplicated with the one found in xformeditor.
key = e.GetKeyCode()
if (key == wx.WXK_CONTROL and not e.AltDown()) or (
key == wx.WXK_ALT and not e.ControlDown()):
if self.HasChanged:
if hasattr(self.parent, 'parent') and hasattr(self.parent.parent, 'TreePanel'):
self.parent.parent.TreePanel.TempSave()
self.HasChanged = False
@Bind(wx.EVT_CHAR)
def OnChar(self, event):
key = event.GetKeyCode()
if key in [wx.WXK_RETURN, wx.WXK_NUMPAD_ENTER]:
self.OnKillFocus(None)
elif key < wx.WXK_SPACE or key == wx.WXK_DELETE or key > 255 or key == wx.WXK_TAB:
event.Skip()
elif chr(key) in "0123456789.-":
event.Skip()
else:
# not calling Skip() eats the event
pass #wx.Bell()
@Bind(wx.EVT_KILL_FOCUS)
def OnKillFocus(self,event):
# cmp done with strings because equal floats can compare differently.
if str(self._value) != self.GetValue():
try:
v = self.GetFloat() # Can raise ValueError
except ValueError:
self.SetFloat(self._value)
return
self.SetFloat(v)
self.callback()
class MultiSliderMixin(object):
"""Class to dynamically create and control sliders."""
_new = None
_changed = False
def __init__(self, *a, **k):
super(MultiSliderMixin, self).__init__(*a, **k)
self.sliders = {}
self.Bind(wx.EVT_IDLE, self.OnIdle)
def MakeSlider(self, name, init, low, high, strictrange=True):
"""Programatically builds stuff."""
tc = NumberTextCtrl(self, callback=self.__callback)
if strictrange:
tc.SetAllowedRange(low, high)
slider = wx.Slider(self, -1, init*100, low*100, high*100,
style=wx.SL_HORIZONTAL
| wx.SL_SELRANGE
)
self.sliders[name] = slider, tc
slider.Bind(wx.EVT_SLIDER, partial(self.OnSlider, tc=tc))
## slider.Bind(wx.EVT_LEFT_DOWN, self.OnSliderDown)
slider.Bind(wx.EVT_LEFT_UP, self.OnSliderUp)
name = name.replace("_", " ").title()
return Box(self, name, tc, (slider, wx.EXPAND), orient=wx.HORIZONTAL)
def UpdateSlider(self, name, val):
slider, tc = self.sliders[name]
slider.SetValue(int(val*100))
tc.SetFloat(val)
def IterSliders(self):
for name, (_, tc) in self.sliders.iteritems():
yield name, tc.GetFloat()
def OnSlider(self, e, tc):
val = e.GetInt()/100.
# Make sure _new is only set when there are actual changes.
if val != tc._value:
self._new = True
tc.SetFloat(str(val))
e.Skip()
## def OnSliderDown(self, e):
## e.Skip()
def OnSliderUp(self, e):
if self._changed:
self.parent.TreePanel.TempSave()
self._changed = False
e.Skip()
def OnIdle(self, e):
if self._new is not None:
self.UpdateFlame()
self._new = None
self._changed = True
def __callback(self, tc, tempsave=True):
self.UpdateFlame()
if tempsave:
self.parent.TreePanel.TempSave()
def UpdateFlame(self):
Abstract
def UpdateView(self):
Abstract
| gpl-3.0 | 4,896,840,407,771,886,000 | 27.090323 | 95 | 0.544442 | false |
eLRuLL/scrapy | scrapy/http/response/text.py | 1 | 9259 | """
This module implements the TextResponse class which adds encoding handling and
discovering (through HTTP headers) to base Response class.
See documentation in docs/topics/request-response.rst
"""
from contextlib import suppress
from typing import Generator
from urllib.parse import urljoin
import parsel
from w3lib.encoding import (html_body_declared_encoding, html_to_unicode,
http_content_type_encoding, resolve_encoding)
from w3lib.html import strip_html5_whitespace
from scrapy.http import Request
from scrapy.http.response import Response
from scrapy.utils.python import memoizemethod_noargs, to_unicode
from scrapy.utils.response import get_base_url
class TextResponse(Response):
_DEFAULT_ENCODING = 'ascii'
def __init__(self, *args, **kwargs):
self._encoding = kwargs.pop('encoding', None)
self._cached_benc = None
self._cached_ubody = None
self._cached_selector = None
super(TextResponse, self).__init__(*args, **kwargs)
def _set_url(self, url):
if isinstance(url, str):
self._url = to_unicode(url, self.encoding)
else:
super(TextResponse, self)._set_url(url)
def _set_body(self, body):
self._body = b'' # used by encoding detection
if isinstance(body, str):
if self._encoding is None:
raise TypeError('Cannot convert unicode body - %s has no encoding' %
type(self).__name__)
self._body = body.encode(self._encoding)
else:
super(TextResponse, self)._set_body(body)
def replace(self, *args, **kwargs):
kwargs.setdefault('encoding', self.encoding)
return Response.replace(self, *args, **kwargs)
@property
def encoding(self):
return self._declared_encoding() or self._body_inferred_encoding()
def _declared_encoding(self):
return self._encoding or self._headers_encoding() \
or self._body_declared_encoding()
def body_as_unicode(self):
"""Return body as unicode"""
return self.text
@property
def text(self):
""" Body as unicode """
# access self.encoding before _cached_ubody to make sure
# _body_inferred_encoding is called
benc = self.encoding
if self._cached_ubody is None:
charset = 'charset=%s' % benc
self._cached_ubody = html_to_unicode(charset, self.body)[1]
return self._cached_ubody
def urljoin(self, url):
"""Join this Response's url with a possible relative url to form an
absolute interpretation of the latter."""
return urljoin(get_base_url(self), url)
@memoizemethod_noargs
def _headers_encoding(self):
content_type = self.headers.get(b'Content-Type', b'')
return http_content_type_encoding(to_unicode(content_type))
def _body_inferred_encoding(self):
if self._cached_benc is None:
content_type = to_unicode(self.headers.get(b'Content-Type', b''))
benc, ubody = html_to_unicode(content_type, self.body,
auto_detect_fun=self._auto_detect_fun,
default_encoding=self._DEFAULT_ENCODING)
self._cached_benc = benc
self._cached_ubody = ubody
return self._cached_benc
def _auto_detect_fun(self, text):
for enc in (self._DEFAULT_ENCODING, 'utf-8', 'cp1252'):
try:
text.decode(enc)
except UnicodeError:
continue
return resolve_encoding(enc)
@memoizemethod_noargs
def _body_declared_encoding(self):
return html_body_declared_encoding(self.body)
@property
def selector(self):
from scrapy.selector import Selector
if self._cached_selector is None:
self._cached_selector = Selector(self)
return self._cached_selector
def xpath(self, query, **kwargs):
return self.selector.xpath(query, **kwargs)
def css(self, query):
return self.selector.css(query)
def follow(self, url, callback=None, method='GET', headers=None, body=None,
cookies=None, meta=None, encoding=None, priority=0,
dont_filter=False, errback=None, cb_kwargs=None, flags=None):
# type: (...) -> Request
"""
Return a :class:`~.Request` instance to follow a link ``url``.
It accepts the same arguments as ``Request.__init__`` method,
but ``url`` can be not only an absolute URL, but also
* a relative URL
* a :class:`~scrapy.link.Link` object, e.g. the result of
:ref:`topics-link-extractors`
* a :class:`~scrapy.selector.Selector` object for a ``<link>`` or ``<a>`` element, e.g.
``response.css('a.my_link')[0]``
* an attribute :class:`~scrapy.selector.Selector` (not SelectorList), e.g.
``response.css('a::attr(href)')[0]`` or
``response.xpath('//img/@src')[0]``
See :ref:`response-follow-example` for usage examples.
"""
if isinstance(url, parsel.Selector):
url = _url_from_selector(url)
elif isinstance(url, parsel.SelectorList):
raise ValueError("SelectorList is not supported")
encoding = self.encoding if encoding is None else encoding
return super(TextResponse, self).follow(
url=url,
callback=callback,
method=method,
headers=headers,
body=body,
cookies=cookies,
meta=meta,
encoding=encoding,
priority=priority,
dont_filter=dont_filter,
errback=errback,
cb_kwargs=cb_kwargs,
flags=flags,
)
def follow_all(self, urls=None, callback=None, method='GET', headers=None, body=None,
cookies=None, meta=None, encoding=None, priority=0,
dont_filter=False, errback=None, cb_kwargs=None, flags=None,
css=None, xpath=None):
# type: (...) -> Generator[Request, None, None]
"""
A generator that produces :class:`~.Request` instances to follow all
links in ``urls``. It accepts the same arguments as the :class:`~.Request`'s
``__init__`` method, except that each ``urls`` element does not need to be
an absolute URL, it can be any of the following:
* a relative URL
* a :class:`~scrapy.link.Link` object, e.g. the result of
:ref:`topics-link-extractors`
* a :class:`~scrapy.selector.Selector` object for a ``<link>`` or ``<a>`` element, e.g.
``response.css('a.my_link')[0]``
* an attribute :class:`~scrapy.selector.Selector` (not SelectorList), e.g.
``response.css('a::attr(href)')[0]`` or
``response.xpath('//img/@src')[0]``
In addition, ``css`` and ``xpath`` arguments are accepted to perform the link extraction
within the ``follow_all`` method (only one of ``urls``, ``css`` and ``xpath`` is accepted).
Note that when passing a ``SelectorList`` as argument for the ``urls`` parameter or
using the ``css`` or ``xpath`` parameters, this method will not produce requests for
selectors from which links cannot be obtained (for instance, anchor tags without an
``href`` attribute)
"""
arg_count = len(list(filter(None, (urls, css, xpath))))
if arg_count != 1:
raise ValueError('Please supply exactly one of the following arguments: urls, css, xpath')
if not urls:
if css:
urls = self.css(css)
if xpath:
urls = self.xpath(xpath)
if isinstance(urls, parsel.SelectorList):
selectors = urls
urls = []
for sel in selectors:
with suppress(_InvalidSelector):
urls.append(_url_from_selector(sel))
return super(TextResponse, self).follow_all(
urls=urls,
callback=callback,
method=method,
headers=headers,
body=body,
cookies=cookies,
meta=meta,
encoding=encoding,
priority=priority,
dont_filter=dont_filter,
errback=errback,
cb_kwargs=cb_kwargs,
flags=flags,
)
class _InvalidSelector(ValueError):
"""
Raised when a URL cannot be obtained from a Selector
"""
def _url_from_selector(sel):
# type: (parsel.Selector) -> str
if isinstance(sel.root, str):
# e.g. ::attr(href) result
return strip_html5_whitespace(sel.root)
if not hasattr(sel.root, 'tag'):
raise _InvalidSelector("Unsupported selector: %s" % sel)
if sel.root.tag not in ('a', 'link'):
raise _InvalidSelector("Only <a> and <link> elements are supported; got <%s>" %
sel.root.tag)
href = sel.root.get('href')
if href is None:
raise _InvalidSelector("<%s> element has no href attribute: %s" %
(sel.root.tag, sel))
return strip_html5_whitespace(href)
| bsd-3-clause | -5,294,121,437,932,270,000 | 37.260331 | 102 | 0.587212 | false |
dianchen96/gym | gym/envs/mujoco/mujoco_env.py | 1 | 9674 | import os
from gym import error, spaces
from gym.utils import seeding
import numpy as np
from os import path
import gym
import six
try:
import mujoco_py
from mujoco_py.mjlib import mjlib
except ImportError as e:
raise error.DependencyNotInstalled("{}. (HINT: you need to install mujoco_py, and also perform the setup instructions here: https://github.com/openai/mujoco-py/.)".format(e))
class MujocoEnv(gym.Env):
"""Superclass for all MuJoCo environments.
"""
def __init__(self, model_path, frame_skip):
if model_path.startswith("/"):
fullpath = model_path
else:
fullpath = os.path.join(os.path.dirname(__file__), "assets", model_path)
if not path.exists(fullpath):
raise IOError("File %s does not exist" % fullpath)
self.frame_skip = frame_skip
self.model = mujoco_py.MjModel(fullpath)
self.data = self.model.data
self.viewer = None
# self.camera2 = None
# #import pdb; pdb.set_trace()
# self.camera2 = mujoco_py.MjViewer(init_width=500, init_height=500)
# self.camera2.start()
# self.camera2.set_model(self.model)
# self.camera2_setup()
self.metadata = {
'render.modes': ['human', 'rgb_array'],
'video.frames_per_second': int(np.round(1.0 / self.dt))
}
self.init_qpos = self.model.data.qpos.ravel().copy()
self.init_qvel = self.model.data.qvel.ravel().copy()
observation, _reward, done, _info = self._step(np.zeros(self.model.nu))
assert not done
self.obs_dim = observation.size
bounds = self.model.actuator_ctrlrange.copy()
low = bounds[:, 0]
high = bounds[:, 1]
self.action_space = spaces.Box(low, high)
high = np.inf*np.ones(self.obs_dim)
low = -high
self.observation_space = spaces.Box(low, high)
self._seed()
def _seed(self, seed=None):
self.np_random, seed = seeding.np_random(seed)
return [seed]
# methods to override:
# ----------------------------
def reset_model(self):
"""
Reset the robot degrees of freedom (qpos and qvel).
Implement this in each subclass.
"""
raise NotImplementedError
def viewer_setup(self):
"""
This method is called when the viewer is initialized and after every reset
Optionally implement this method, if you need to tinker with camera position
and so forth.
"""
pass
# -----------------------------
def _reset(self):
mjlib.mj_resetData(self.model.ptr, self.data.ptr)
ob = self.reset_model()
if self.viewer is not None:
self.viewer.autoscale()
self.viewer_setup()
return ob
def set_state(self, qpos, qvel):
assert qpos.shape == (self.model.nq,) and qvel.shape == (self.model.nv,)
self.model.data.qpos = qpos
self.model.data.qvel = qvel
self.model._compute_subtree() # pylint: disable=W0212
# import pdb; pdb.set_trace()
self.model.forward()
@property
def dt(self):
return self.model.opt.timestep * self.frame_skip
def do_simulation(self, ctrl, n_frames):
self.model.data.ctrl = ctrl
for _ in range(n_frames):
self.model.step()
def _render(self, mode='human', close=False):
if close:
if self.viewer is not None:
self._get_viewer().finish()
self.viewer = None
return
if mode == 'rgb_array':
self._get_viewer().render()
data, width, height = self._get_viewer().get_image()
return np.fromstring(data, dtype='uint8').reshape(height, width, 3)[::-1, :, :]
elif mode == 'human':
self._get_viewer().loop_once()
def _get_viewer(self):
if self.viewer is None:
self.viewer = mujoco_py.MjViewer()
self.viewer.start()
self.viewer.set_model(self.model)
self.viewer_setup()
return self.viewer
def get_body_com(self, body_name):
idx = self.model.body_names.index(six.b(body_name))
return self.model.data.com_subtree[idx]
def get_body_comvel(self, body_name):
idx = self.model.body_names.index(six.b(body_name))
return self.model.body_comvels[idx]
def get_body_xmat(self, body_name):
idx = self.model.body_names.index(six.b(body_name))
return self.model.data.xmat[idx].reshape((3, 3))
def state_vector(self):
return np.concatenate([
self.model.data.qpos.flat,
self.model.data.qvel.flat
])
class MujocoPixelEnv(MujocoEnv):
def __init__(
self,
model_path,
frame_skip,
width=42,
height=42,
mode="rgb"
):
if model_path.startswith("/"):
fullpath = model_path
else:
fullpath = os.path.join(os.path.dirname(__file__), "assets", model_path)
if not path.exists(fullpath):
raise IOError("File %s does not exist" % fullpath)
self.frame_skip = frame_skip
self.model = mujoco_py.MjModel(fullpath)
self.data = self.model.data
self.width = width
self.height = height
self.mode = mode
self.viewer = None
self.camera2 = None
self.camera2 = mujoco_py.MjViewer(init_width=self.width, init_height=self.height)
self.camera2.start()
self.camera2.set_model(self.model)
self.camera2_setup()
self.metadata = {
'render.modes': ['human', 'rgb_array'],
'video.frames_per_second': int(np.round(1.0 / self.dt))
}
self.init_qpos = self.model.data.qpos.ravel().copy()
self.init_qvel = self.model.data.qvel.ravel().copy()
observation, _reward, done, _info = self._step(np.zeros(self.model.nu))
assert not done
self.obs_dim = observation.size
bounds = self.model.actuator_ctrlrange.copy()
low = bounds[:, 0]
high = bounds[:, 1]
self.action_space = spaces.Box(low, high)
high = np.inf*np.ones(self.obs_dim)
low = -high
self.observation_space = spaces.Box(low, high)
self._seed()
def camera2_setup(self):
raise NotImplementedError
def _get_obs(self):
camera2_output = None
self.camera2.render()
data, width, height = self.camera2.get_image()
camera2_output = np.fromstring(data, dtype='uint8').reshape(height, width, 3)[::-1, :, :]
if self.mode == "grey":
camera2_output = np.mean(camera2_output, axis=2)[:, :, np.newaxis]
return camera2_output
class MujocoPixel2CamEnv(MujocoEnv):
def __init__(
self,
model_path,
frame_skip,
width=42,
height=42,
mode="rgb"
):
if model_path.startswith("/"):
fullpath = model_path
else:
fullpath = os.path.join(os.path.dirname(__file__), "assets", model_path)
if not path.exists(fullpath):
raise IOError("File %s does not exist" % fullpath)
self.frame_skip = frame_skip
self.model = mujoco_py.MjModel(fullpath)
self.data = self.model.data
self.width = width
self.height = height
self.mode = mode
self.viewer = None
self.camera2 = None
self.camera2 = mujoco_py.MjViewer(init_width=self.width, init_height=self.height)
self.camera2.start()
self.camera2.set_model(self.model)
self.camera2_setup()
self.camera3 = None
self.camera3 = mujoco_py.MjViewer(init_width=self.width, init_height=self.height)
self.camera3.start()
self.camera3.set_model(self.model)
self.camera3_setup()
azimuth = self.camera2.cam.azimuth
self.camera3.cam.azimuth = azimuth + 180
self.metadata = {
'render.modes': ['human', 'rgb_array'],
'video.frames_per_second': int(np.round(1.0 / self.dt))
}
self.init_qpos = self.model.data.qpos.ravel().copy()
self.init_qvel = self.model.data.qvel.ravel().copy()
observation, _reward, done, _info = self._step(np.zeros(self.model.nu))
assert not done
self.obs_dim = observation.size
bounds = self.model.actuator_ctrlrange.copy()
low = bounds[:, 0]
high = bounds[:, 1]
self.action_space = spaces.Box(low, high)
high = np.inf*np.ones(self.obs_dim)
low = -high
self.observation_space = spaces.Box(low, high)
self._seed()
def camera2_setup(self):
raise NotImplementedError
def camera3_setup(self):
raise NotImplementedError
def _get_obs(self):
camera2_output = None
self.camera2.render()
data, width, height = self.camera2.get_image()
camera2_output = np.fromstring(data, dtype='uint8').reshape(height, width, 3)[::-1, :, :]
if self.mode == "grey":
camera2_output = np.mean(camera2_output, axis=2)[:, :, np.newaxis]
camera3_output = None
self.camera3.render()
data, width, height = self.camera3.get_image()
camera3_output = np.fromstring(data, dtype='uint8').reshape(height, width, 3)[::-1, :, :]
if self.mode == "grey":
camera3_output = np.mean(camera3_output, axis=2)[:, :, np.newaxis]
return np.concatenate([camera2_output, camera3_output], axis=2)
| mit | -2,237,362,221,986,834,000 | 31.354515 | 178 | 0.572256 | false |
chenlian2015/skia_from_google | tools/skp/page_sets/skia_jsfiddlebigcar_desktop.py | 2 | 1282 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0401,W0614
from telemetry.page import page as page_module
from telemetry.page import page_set as page_set_module
class SkiaBuildbotDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaBuildbotDesktopPage, self).__init__(
url=url,
page_set=page_set,
credentials_path='data/credentials.json')
self.user_agent_type = 'desktop'
self.archive_data_file = 'data/skia_jsfiddlebigcar_desktop.json'
def RunNavigateSteps(self, action_runner):
action_runner.NavigateToPage(self)
action_runner.Wait(5)
class SkiaJsfiddlebigcarDesktopPageSet(page_set_module.PageSet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaJsfiddlebigcarDesktopPageSet, self).__init__(
user_agent_type='desktop',
archive_data_file='data/skia_jsfiddlebigcar_desktop.json')
urls_list = [
# Why: Page from Chromium's silk test cases
'http://jsfiddle.net/vBQHH/3/embedded/result/',
]
for url in urls_list:
self.AddPage(SkiaBuildbotDesktopPage(url, self))
| bsd-3-clause | 5,301,158,227,787,102,000 | 30.268293 | 74 | 0.710608 | false |
soma0sd/pyNuc | ensdf/dbgen.py | 1 | 2763 | # -*- coding: utf-8 -*-
"""Inner Module Import"""
from ensdf.genlib import files
from ensdf.genlib import regexp
"""Python Packages"""
import pickle
def get_card(ident=''):
data = []
file_list = files.get_all_files()
prog = lambda i: (i+1)*100/len(file_list)
for ix, f in enumerate(file_list):
card = []
for l in f.readlines():
l = l.replace('\n', '')
if l.strip() == '':
if ident in card[0]:
data.append(card)
card = []
else:
card.append(l)
print("\rGet Card... [{:6.2f}%]".format(prog(ix)), end='')
print()
return data
uq = []
def get_ground_level():
global uq
card = get_card("ADOPTED LEVELS")
prog = lambda i: (i+1)*100/len(card)
data = {}
for ic, c in enumerate(card):
for ixl, l1 in enumerate(c):
lv = regexp.re_level_rec(l1)
if lv:
key = regexp.nucid2nucpy(lv['NUCID'])
if key in data.keys():
break
data[key] = {}
data[key]['E'] = lv['E']
data[key]['J'] = lv['J']
data[key]['T'] = lv['T']
data[key]['MODE'] = []
mods = ''
for l2 in c[ixl+1:]:
de = regexp.re_level_decay(l2)
if regexp.re_level_rec(l2):
break
elif de:
mods += de
mode = regexp.mode_parsing(mods, key)
data[key]['MODE'] = mode
print("\rGet Ground level...[{:6.2f}%]".format(prog(ic)), end='')
print()
return data
def get_nist():
import re
data = {}
iso = []
card = []
re_C = re.compile('^[_]+$')
re_i = re.compile('^(.{3}) (.{3}) (.{3}) (.{1,18})[ ]*(.{0,13})')
re_f = re.compile('[\d\.]+')
f = files.get_nist_file()
for l in f.readlines()[3:]:
l = l.replace('\n', '')
if re_C.match(l):
iso.append(card)
card = []
else:
card.append(l)
for c in iso:
m1 = re_i.match(c[0])
main = m1.groups()
Z = int(main[0])
symbol = main[1].strip()
mass = float(re_f.match(main[3]).group(0))
if re_f.match(main[4]):
na = float(re_f.match(main[4]).group(0))
else:
na = 0.0
code = "{:03d}{:03d}".format(Z, int(main[2]))
data[code] = {'SYM': symbol, 'M': mass, 'IS': na}
for cs in c[1:]:
m2 = re_i.match(cs)
sub = m2.groups()
mass = float(re_f.match(sub[3]).group(0))
if re_f.match(sub[4]):
na = float(re_f.match(sub[4]).group(0))
else:
na = 0.0
code = "{:03d}{:03d}".format(Z, int(sub[2]))
data[code] = {'SYM': symbol, 'M': mass, 'IS': na}
data['000001'] = {'SYM': 'n', 'M': 1.008664916, 'IS': 0.0}
return data
data = 0
data = get_ground_level()
nist = get_nist()
f = open('nucinfo.pkl', 'wb')
pickle.dump(data, f)
f = open('nist.pkl', 'wb')
pickle.dump(nist, f)
| mit | -3,026,741,318,854,007,300 | 25.066038 | 69 | 0.500181 | false |
jbloom/epitopefinder | scripts/epitopefinder_plotdistributioncomparison.py | 1 | 3447 | #!python
"""Script for plotting distributions of epitopes per site for two sets of sites.
Uses matplotlib. Designed to analyze output of epitopefinder_getepitopes.py.
Written by Jesse Bloom."""
import os
import sys
import random
import epitopefinder.io
import epitopefinder.plot
def main():
"""Main body of script."""
random.seed(1) # seed random number generator in case P values are being computed
if not epitopefinder.plot.PylabAvailable():
raise ImportError("Cannot import matplotlib / pylab, which are required by this script.")
# output is written to out, currently set to standard out
out = sys.stdout
out.write("Beginning execution of epitopefinder_plotdistributioncomparison.py\n")
# read input file and parse arguments
args = sys.argv[1 : ]
if len(args) != 1:
raise IOError("Script must be called with exactly one argument specifying the input file")
infilename = sys.argv[1]
if not os.path.isfile(infilename):
raise IOError("Failed to find infile %s" % infilename)
d = epitopefinder.io.ParseInfile(open(infilename))
out.write("\nRead input arguments from %s\n" % infilename)
out.write('Read the following key / value pairs:\n')
for (key, value) in d.iteritems():
out.write("%s %s\n" % (key, value))
plotfile = epitopefinder.io.ParseStringValue(d, 'plotfile').strip()
epitopesbysite1_list = []
epitopesbysite2_list = []
for (xlist, xf) in [(epitopesbysite1_list, 'epitopesfile1'), (epitopesbysite2_list, 'epitopesfile2')]:
epitopesfile = epitopefinder.io.ParseFileList(d, xf)
if len(epitopesfile) != 1:
raise ValueError("%s specifies more than one file" % xf)
epitopesfile = epitopesfile[0]
for line in open(epitopesfile).readlines()[1 : ]:
if not (line.isspace() or line[0] == '#'):
(site, n) = line.split(',')
(site, n) = (int(site), int(n))
xlist.append(n)
if not xlist:
raise ValueError("%s failed to specify information for any sites" % xf)
set1name = epitopefinder.io.ParseStringValue(d, 'set1name')
set2name = epitopefinder.io.ParseStringValue(d, 'set2name')
title = epitopefinder.io.ParseStringValue(d, 'title').strip()
if title.upper() in ['NONE', 'FALSE']:
title = None
pvalue = epitopefinder.io.ParseStringValue(d, 'pvalue')
if pvalue.upper() in ['NONE', 'FALSE']:
pvalue = None
pvaluewithreplacement = None
else:
pvalue = int(pvalue)
pvaluewithreplacement = epitopefinder.io.ParseBoolValue(d, 'pvaluewithreplacement')
if pvalue < 1:
raise ValueError("pvalue must be >= 1")
if len(epitopesbysite2_list) >= len(epitopesbysite1_list):
raise ValueError("You cannot use pvalue since epitopesbysite2_list is not a subset of epitopesbysite1_list -- it does not contain fewer sites with specified epitope counts.")
ymax = None
if 'ymax' in d:
ymax = epitopefinder.io.ParseFloatValue(d, 'ymax')
out.write('\nNow creating the plot file %s\n' % plotfile)
epitopefinder.plot.PlotDistributionComparison(epitopesbysite1_list, epitopesbysite2_list, set1name, set2name, plotfile, 'number of epitopes', 'fraction of sites', title, pvalue, pvaluewithreplacement, ymax=ymax)
out.write("\nScript is complete.\n")
if __name__ == '__main__':
main() # run the script
| gpl-3.0 | -4,142,234,527,212,145,000 | 43.766234 | 215 | 0.668407 | false |
dazult/EPA-2012-Residential-Exposure-SOPs | sop_calcs/forms.py | 1 | 76603 | from __future__ import absolute_import
import copy
import datetime
from itertools import chain
from urlparse import urljoin
from django.conf import settings
from django.forms.util import flatatt, to_current_timezone
from django.utils.datastructures import MultiValueDict, MergeDict
from django.utils.html import escape, conditional_escape
from django.utils.translation import ugettext, ugettext_lazy
from django.utils.encoding import StrAndUnicode, force_unicode
from django.utils.safestring import mark_safe
from django.utils import datetime_safe, formats
from django import forms
import json
from collections import defaultdict
import operator
class CheckboxSelectMultipleBootstrap(forms.SelectMultiple):
def __init__(self,attrs=None, choices=()):
super(CheckboxSelectMultipleBootstrap, self).__init__(attrs, choices)
self.choices_attrs = {}
def render(self, name, value, attrs=None, choices=()):
if value is None: value = []
has_id = attrs and 'id' in attrs
final_attrs = self.build_attrs(attrs, name=name)
output = [u'<div>']
# Normalize to strings
str_values = set([force_unicode(v) for v in value])
for i, (option_value, option_label) in enumerate(chain(self.choices, choices)):
# If an ID attribute was given, add a numeric index as a suffix,
# so that the checkboxes don't all have the same ID attribute.
if has_id:
final_attrs = dict(final_attrs, id='%s_%s' % (attrs['id'], i))
label_for = u' for="%s"' % final_attrs['id']
else:
label_for = ''
choice_attrs = copy.copy(final_attrs)
if option_value in self.choices_attrs:
choice_attrs.update(self.choices_attrs[option_value])
cb = forms.CheckboxInput(choice_attrs, check_test=lambda value: value in str_values)
option_value = force_unicode(option_value)
rendered_cb = cb.render(name, option_value)
option_label = conditional_escape(force_unicode(option_label))
output.append(u'<div><label%s class="checkbox inline">%s %s</label></div>' % (label_for, rendered_cb, option_label))
output.append(u'</div>')
return mark_safe(u'\n'.join(output))
def id_for_label(self, id_):
# See the comment for RadioSelect.id_for_label()
if id_:
id_ += '_0'
return id_
class RadioFieldBootstrapRenderer(forms.widgets.RadioSelect.renderer):
def render(self):
"""
Outputs a <ul> for this set of choice fields.
If an id was given to the field, it is applied to the <ul> (each
item in the list will get an id of `$id_$i`).
"""
id_ = self.attrs.get('id', None)
start_tag = '<div id="%s" class="radio inline">'% id_ if id_ else '<div>'
output = [start_tag]
for widget in self:
output.append(force_unicode(widget))
output.append('</div>')
return mark_safe('\n'.join(output))
class RadioSelectBootstrap(forms.widgets.RadioSelect):
renderer = RadioFieldBootstrapRenderer
from sop_calcs.gardensandtrees import gardensandtrees
from sop_calcs.treated_pets import treated_pets
from sop_calcs.insect_repellent import insect_repellent
from sop_calcs.lawnturfsop import lawnturfsop
from sop_calcs.general_handler_sop import general_handler_sop
from sop_calcs.paintsop import paintsop
from sop_calcs.impregnated_materials import impregnated_materials
from sop_calcs.outdoor_misting import outdoor_misting, outdoor_misting_handler
from sop_calcs.indoor_envirnoments import indoor
from sop_calcs.exposure_profile import RiskProfile
class ResultsForm(forms.Form):
title = "Assessment Background Information"
def __init__(self,*args,**kwargs):
self.input_data = kwargs.pop('_input_data',None)
super(ResultsForm,self).__init__(*args,**kwargs)
def inputs(self):
return self.input_data
def lifestage_displays(self):
lifestages = {}
lifestages['adult'] = "Adult (All)"
lifestages['adult_general'] = "Adult (All)"
lifestages['adult_female'] = "Adult Female"
lifestages['adult_male'] = "Adult Male"
lifestages['1_to_2'] = "1 < 2 year old"
lifestages['3_to_6'] = "3 < 6 year old"
lifestages['6_to_11'] = "6 < 11 year old"
lifestages['11_to_16'] = "11 < 16 year old"
return lifestages
def results(self):
try:
s = json.loads(self.input_data)
ss = ""
RIs = defaultdict(lambda : defaultdict(list))
exposure_routes = set(s['0']['exposure_routes'])
exposure_scenarios = set(s['0']['exposure_scenarios'])
body_weights_adults_options = [80., 69., 86.] # kg
bodyweight = {}
bodyweight['adult'] = 80.#body_weights_adults_options[0]
bodyweight['adult_general'] = 80.
bodyweight['adult_female'] = 69.
bodyweight['adult_male'] = 86.
pop_selection = "gen"
amended_RIs = {}
for duration in s['0']['exposure_durations']:
amended_RIs[duration] = {}
for target in s['0']['target_population']:
if target == 'adult_female':
pop_selection = "adult_female"
bodyweight['adult'] = bodyweight['adult_female']
elif target == 'adult_male':
pop_selection = "adult_male"
bodyweight['adult'] = bodyweight['adult_male']
else:
pop_selection = "gen"
bodyweight['adult'] = bodyweight['adult_general']
bodyweight['1_to_2'] = 11.
bodyweight['3_to_6'] = 19.
bodyweight['6_to_11'] = 32.
bodyweight['11_to_16'] = 57.
inhalation_rate = {}
inhalation_rate['adult'] = 0.64
inhalation_rate['1_to_2'] = 0.33
inhalation_rate['3_to_6'] = 0.42
SA_BW_ratio = {'1_to_2':640., 'adult':280.}
risk_profile = RiskProfile(exposure_routes)
for duration in s['0']['exposure_durations']:
ss += "<br>%s<br>" % duration
POD = {}
LOC = {}
absorption = {}
POD['dermal'] = s['1']['dermal_%s_%s_POD'%(duration,target)]
LOC['dermal'] = s['1']['dermal_%s_%s_LOC'%(duration,target)]
try:
POD['dermal'] = s['1']['dermal_%s_%s_POD'%(duration,target)]
LOC['dermal'] = s['1']['dermal_%s_%s_LOC'%(duration,target)]
absorption['dermal'] = s['1']['dermal_absorption']
except:
absorption['dermal'] = 1
try:
POD['inhalation'] = s['1']['inhalation_%s_%s_POD'%(duration,target)]
LOC['inhalation'] = s['1']['inhalation_%s_%s_LOC'%(duration,target)]
absorption['inhalation'] = s['1']['inhalation_absorption']
except:
absorption['inhalation'] = 1
try:
POD['oral'] = s['1']['oral_%s_%s_POD'%(duration,target)]
LOC['oral'] = s['1']['oral_%s_%s_LOC'%(duration,target)]
except:
pass
try:
POD['dietary'] = s['1']['dietary_POD']
LOC['dietary'] = s['1']['dietary_LOC']
except:
pass
if s['3'] != None and 'generalhandler' in exposure_scenarios: #generalhandler
SOP = "General Handler"
combining_dermal_inhalation = []
#application_rate[formulation][scenario][application_method][application_type]
application_rate = defaultdict(lambda : defaultdict(lambda : defaultdict(dict)))
for formulation in GeneralHandlerForm.application_rate_form_map:
for scenario in GeneralHandlerForm.application_rate_form_map[formulation]:
for application_method in GeneralHandlerForm.application_rate_form_map[formulation][scenario]:
for application_type in GeneralHandlerForm.application_rate_form_map[formulation][scenario][application_method]:
if GeneralHandlerForm.application_rate_form_map[formulation][scenario][application_method][application_type] in s['3']:
application_rate[formulation][scenario][application_method][application_type] = s['3'][GeneralHandlerForm.application_rate_form_map[formulation][scenario][application_method][application_type]]
else:
application_rate[formulation][scenario][application_method][application_type] = 0
results = general_handler_sop(POD, LOC, bodyweight, absorption, application_rate)
risk_profile.update(results, SOP, duration)
if s['4'] != None and 'generalhandler' in exposure_scenarios and 'generalhandler' in exposure_scenarios: #misting - handler
SOP = "General Handler"
OASS_fraction_ai = s['4']['OASS_fraction_ai']
OASS_amount_of_product_in_can = s['4']['OASS_amount_of_product_in_can']
ORMS_drum_size = s['4']['ORMS_drum_size']
ORMS_dilution_rate = s['4']['ORMS_dilution_rate']
ORMS_fraction_ai = s['4']['ORMS_fraction_ai']
AB_drum_size = s['4']['AB_drum_size']
AB_dilution_rate = s['4']['AB_dilution_rate']
AB_fraction_ai = s['4']['AB_fraction_ai']
results = outdoor_misting_handler(POD, LOC, bodyweight, absorption['dermal'], absorption['inhalation'], OASS_fraction_ai, OASS_amount_of_product_in_can, ORMS_drum_size, ORMS_dilution_rate, ORMS_fraction_ai, AB_drum_size, AB_dilution_rate, AB_fraction_ai)
risk_profile.update(results, SOP, duration)
if s['5'] != None and 'treatedpet' in exposure_scenarios: #treatedpet
SOP = "Treated Pets"
ai_amounts = {}
amount_applied_form_map = TreatedPetForm.amount_applied_form_map
for animal in ['cat','dog']:
ai_amounts[animal] = {}
for size in ['small','medium','large']:
ai_amounts[animal][size] = s['5'][TreatedPetForm.amount_applied_form_map[animal][size]]*s['5']['fraction_ai']*1000.
results = treated_pets(POD, LOC, bodyweight, absorption['dermal'], ai_amounts)
risk_profile.update(results, SOP, duration)
if s['6'] != None and 'lawn' in exposure_scenarios: #lawn
SOP = "Lawns and Turf"
fraction_active_ingredient = s['6']['fraction_ai_in_pellets']
ttr = {'liquid':s['6']['liquid_ttr_conc'], 'solid':s['6']['solid_ttr_conc']}
application_rate = {'liquid':s['6']['liquid_application_rate'],'solid':s['6']['solid_application_rate']} # lb ai / acre
results = lawnturfsop(POD, LOC, bodyweight, absorption['dermal'], application_rate, ttr, fraction_active_ingredient)
risk_profile.update(results, SOP, duration)
if s['7'] != None and 'garden' in exposure_scenarios: #gardensandtrees
SOP = "Gardens and Trees"
dfr = {'liquid':s['7']['liquid_dfr_conc'], 'solid':s['7']['solid_dfr_conc']}
application_rate = {'liquid':s['7']['liquid_application_rate'],'solid':s['7']['solid_application_rate']} # lb ai / acre
results = gardensandtrees(POD, LOC, bodyweight, absorption['dermal'], application_rate, dfr)
#return "Here1"
risk_profile.update(results, SOP, duration)
#return exposure_scenarios
if s['8'] != None and 'insect' in exposure_scenarios: #insect
SOP = "Insect Repellents"
amount_ai = defaultdict(lambda : defaultdict(dict))
for sunscreen_status in ['without','with']:
for formulation in InsectRepellentsForm.formulations:
amount_ai[sunscreen_status][formulation] = s['8'][InsectRepellentsForm.amount_ai_formulations_form_map[sunscreen_status][formulation]]
results = insect_repellent(POD, LOC, bodyweight, absorption['dermal'], SA_BW_ratio, amount_ai )
risk_profile.update(results, SOP, duration)
if s['9'] != None and 'paint' in exposure_scenarios: #paint
SOP = "Paint and Preservatives"
surface_residue_concentration = s['9']['surface_residue_concentration']
fraction_of_body_exposed = PaintsAndPreservativesForm.DEFAULT_FRACTION_OF_BODY_EXPOSED#s['9']['fraction_of_body_exposed']
daily_material_to_skin_transfer_efficency = PaintsAndPreservativesForm.DEFAULT_DAILY_MATERIAL_TO_SKIN_TRANSFER_EFFICENCY#s['9']['daily_material_to_skin_transfer_efficency']
exposure_time = PaintsAndPreservativesForm.EXPOSURE_TIME[s['9']['indoor_or_outdoor']]#s['9']['exposure_time']
hand_to_mouth_event_freqency = PaintsAndPreservativesForm.HAND_TO_MOUTH_EVENTS_PER_HOUR[s['9']['indoor_or_outdoor']]#s['9']['hand_to_mouth_event_frequency']
results = paintsop(POD, LOC, bodyweight, absorption['dermal'], SA_BW_ratio, surface_residue_concentration, fraction_of_body_exposed, daily_material_to_skin_transfer_efficency, exposure_time, hand_to_mouth_event_freqency )
risk_profile.update(results, SOP, duration)
if s['10'] != None and 'impregnated_materials' in exposure_scenarios: #impregnated_materials
SOP = "Impregnated Materials"
surface_residue_concentration = s['10']['surface_residue_concentration']
weight_fraction = s['10']['weight_fraction_of_active_ingredient']
material_type = s['10']['material_type']
if surface_residue_concentration is None or surface_residue_concentration == 0:
surface_residue_concentration = weight_fraction*ImpregnatedMaterialsForm.MATERIAL_WEIGHT_TO_SURFACE_AREA_DENSITY[material_type]
body_fraction_exposed_type = s['10']['body_fraction_exposed_type']
fraction_of_body_exposed = ImpregnatedMaterialsForm.BODY_FRACTION_EXPOSED[body_fraction_exposed_type]#s['10']['fraction_of_body_exposed']
protective_barrier_present = s['10']['protective_barrier_present']
protection_factor = ImpregnatedMaterialsForm.PROTECTION_FACTOR[protective_barrier_present]
#HtM
type_of_flooring = s['10']['type_of_flooring']
fraction_of_ai_transferred_to_hands = ImpregnatedMaterialsForm.FRACTION_AI_HAND_TRANSFER[type_of_flooring]
hand_exposure_time = ImpregnatedMaterialsForm.FLOOR_EXPOSURE_TIME[type_of_flooring]
daily_material_to_skin_transfer_efficency = ImpregnatedMaterialsForm.FRACTION_AI_HAND_TRANSFER[type_of_flooring]
#daily_material_to_skin_transfer_efficency = ImpregnatedMaterialsForm.DEFAULT_DAILY_MATERIAL_TO_SKIN_TRANSFER_EFFICENCY
indoor_or_outdoor = s['10']['indoor_or_outdoor']
object_exposure_time = ImpregnatedMaterialsForm.EXPOSURE_TIME[indoor_or_outdoor]
hand_to_mouth_event_freqency = ImpregnatedMaterialsForm.HAND_TO_MOUTH_EVENTS_PER_HOUR[indoor_or_outdoor]
#daily_material_to_skin_transfer_efficency = forms.FloatField(required=False,initial=0.14)
#OtM
FRACTION_AI_HAND_TRANSFER = {'':0., 'carpet':0.06,'hard':0.08}
fraction_of_residue_on_object = ImpregnatedMaterialsForm.FRACTION_AI_HAND_TRANSFER[type_of_flooring]
object_to_mouth_event_frequency = ImpregnatedMaterialsForm.OBJECT_TO_MOUTH_EVENTS_PER_HOUR[indoor_or_outdoor]
results = impregnated_materials(POD, LOC, bodyweight, absorption['dermal'], SA_BW_ratio, surface_residue_concentration, fraction_of_body_exposed, daily_material_to_skin_transfer_efficency, protection_factor, fraction_of_ai_transferred_to_hands, hand_exposure_time, hand_to_mouth_event_freqency, fraction_of_residue_on_object, object_exposure_time, object_to_mouth_event_frequency)
risk_profile.update(results, SOP, duration)
if s['11'] != None and 'indoor' in exposure_scenarios: #indoor
SOP = "Indoor"
space_spray_fraction_ai = s['11']['space_spray_fraction_ai']
space_spray_amount_of_product = s['11']['space_spray_amount_of_product']
space_spray_restriction = s['11']['space_spray_restriction']
molecular_weight = s['11']['molecular_weight']
vapor_pressure = s['11']['vapor_pressure']
residues = {}
residues['broadcast'] = s['11']['broadcast_residue']
residues['perimeter/spot/bedbug (coarse)'] = s['11']['coarse_residue']
residues['perimeter/spot/bedbug (pin stream)'] = s['11']['pin_stream_residue']
residues['cracks and crevices'] = s['11']['crack_and_crevice_residue']
residues['foggers'] = s['11']['foggers_residue']
residues['space sprays'] = s['11']['space_sprays_residue']
matress_residue = s['11']['matress_residue']
results = indoor(POD, LOC, bodyweight, absorption['dermal'], absorption['inhalation'], space_spray_fraction_ai, space_spray_amount_of_product, space_spray_restriction, molecular_weight, vapor_pressure,residues,matress_residue)
risk_profile.update(results, SOP, duration)
if s['12'] != None and 'misting' in exposure_scenarios: #misting
SOP = "Misting"
OASS_fraction_ai = s['12']['OASS_fraction_ai']
OASS_amount_of_product_in_can = s['12']['OASS_amount_of_product_in_can']
CCTM_amount_ai_in_product= s['12']['CCTM_amount_ai_in_product']
ORMS_application_rate= s['12']['ORMS_application_rate']
ORMS_dilution_rate= s['12']['ORMS_dilution_rate']
ORMS_fraction_ai= s['12']['ORMS_fraction_ai']
AB_application_rate= s['12']['AB_application_rate']
AB_dilution_rate = s['12']['AB_dilution_rate']
AB_fraction_ai = s['12']['AB_fraction_ai']
results = outdoor_misting(POD, LOC, bodyweight, absorption['dermal'], absorption['inhalation'], OASS_fraction_ai, OASS_amount_of_product_in_can, CCTM_amount_ai_in_product, ORMS_application_rate, ORMS_dilution_rate, ORMS_fraction_ai, AB_application_rate, AB_dilution_rate, AB_fraction_ai)
risk_profile.update(results, SOP, duration)
sorted_RIs = {}
ri_id=0
for duration in risk_profile.results:
sorted_RIs[duration] = {}
for lifestage in risk_profile.results[duration]:
lifestage_final = lifestage
if pop_selection != "gen" and lifestage != 'adult':
continue
elif pop_selection != "gen":
lifestage_final = pop_selection
sorted_RIs[duration][lifestage_final] = risk_profile.results[duration][lifestage]
sorted_RIs[duration][lifestage_final].sort()
amended_RIs[duration][lifestage_final] = []
for l in sorted_RIs[duration][lifestage_final]:
n = list(l)
n.append(ri_id)
ri_id+=1
amended_RIs[duration][lifestage_final].append(n)
return amended_RIs
except Exception as e:
return e, str(e)
class IngredientOverviewForm(forms.Form):
calls = 0
title = "Assessment Background Information"
active_ingredient = forms.CharField(required=False)
#GardenAndTreesForm, InsectRellentsForm, PaintsAndPreservativesForm
SCENARIOS = [('generalhandler','Handler/Applicator (all scenarios)'),('insect','Insect Repellents'),('treatedpet','Treated Pets'),('lawn','Lawns/Turf'),('garden','Gardens And Trees'),('paint','Paints And Preservatives'),('impregnated_materials','Impregnated Materials'), ('indoor','Indoor'),('misting','Outdoor Misting ')]
exposure_scenarios = forms.MultipleChoiceField(choices=SCENARIOS, widget=CheckboxSelectMultipleBootstrap())
ROUTES = [('oral', 'Incidental Oral'), ('dermal', 'Dermal'), ('inhalation', 'Inhalation') , ('dietary', 'Granule/Pellet Ingestion')]
exposure_routes = forms.MultipleChoiceField(choices=ROUTES, widget=CheckboxSelectMultipleBootstrap(), initial = ['oral','dermal','inhalation','dietary'])
DURATIONS = [('short','Short-Term'),('intermediate','Intermediate-Term'),('long','Long-Term')]
exposure_durations = forms.MultipleChoiceField(choices=DURATIONS , widget=CheckboxSelectMultipleBootstrap())
TARGET_POP_CHOICES = [('gen','General Population (Adults + Children)'),('adult_female','Adult (Female Only)'),('adult_male','Adult (Male Only)')]
TARGET_POP_CHOICES_DICT = {}
for choice in TARGET_POP_CHOICES:
TARGET_POP_CHOICES_DICT[choice[0]] = choice[1]
target_population = forms.MultipleChoiceField(choices=TARGET_POP_CHOICES , widget=CheckboxSelectMultipleBootstrap(),initial=['gen'])
def __init__(self,*args,**kwargs):
super(IngredientOverviewForm,self).__init__(*args,**kwargs)
IngredientOverviewForm.calls += 1
def clean(self):
cleaned_data = super(IngredientOverviewForm, self).clean()
exposure_scenarios = cleaned_data.get("exposure_scenarios")
exposure_routes = cleaned_data.get("exposure_routes")
if exposure_routes and exposure_scenarios:
if 'dermal' in exposure_routes:
return cleaned_data
if 'oral' in exposure_routes:
if True in [scenario in exposure_scenarios for scenario in ['lawn','insect','paint','treatedpet','indoor','impregnated_materials', 'misting']]:
return cleaned_data
if 'inhalation' in exposure_routes:
if True in [scenario in exposure_scenarios for scenario in ['indoor','misting','generalhandler']]:
return cleaned_data
if 'dietary' in exposure_routes:
if True in [scenario in exposure_scenarios for scenario in ['lawn']]:
return cleaned_data
raise forms.ValidationError("No combinations of these routes and scenarios exist.")
return cleaned_data
class ToxForm(forms.Form):
calls = 0
title = "Toxicological Information"
POD_STUDY_CHOICES = [('',''),('route-specific','Route-specific'),('oral','Oral')]
ABS_STUDY_CHOICES = [('',''), ('human-study', 'Human Study'), ('animal-study', 'Animal Study'), ('POD or LOAEL/NOAEL comparison','Estimated by POD or LOAEL/NOAEL comparison'),('in vitro study','In vitro study'),('other','Other')]
TARGET_POP_CHOICES = [('gen','General Population (Adults + Children)'),('adult_female','Adult (Female Only)'),('adult_male','Adult (Male Only)')]
TARGET_POP_CHOICES_DICT = {}
for choice in TARGET_POP_CHOICES:
TARGET_POP_CHOICES_DICT[choice[0]] = choice[1]
def __init__(self,*args,**kwargs):
data = kwargs.pop('data_from_step_1',None)
self.data_from_step_1 = data
super(ToxForm,self).__init__(*args, **kwargs)
self.data_from_step_1 = self.initial['data_from_step_1']
ToxForm.calls += 1
logger.error("ToxForm __init__ calls: %s "%ToxForm.calls)
if self.data_from_step_1:
if 'dermal' in self.data_from_step_1['exposure_routes']:
self.fields['dermal_absorption'] = forms.FloatField(required=False, initial=1, label="Dermal Absorption (0-1)",min_value=0., max_value=1.)
self.fields['dermal_absorption_study'] = forms.ChoiceField(choices=ToxForm.ABS_STUDY_CHOICES,required=False,label="Dermal Absorption Study")
self.fields['dermal_POD_study'] = forms.ChoiceField(choices=ToxForm.POD_STUDY_CHOICES,required=False,label="Dermal POD Study" )
for duration in self.data_from_step_1['exposure_durations']:
if 'dermal' in self.data_from_step_1['exposure_routes']:
for target in self.data_from_step_1['target_population']:
self.fields['dermal_%s_%s_POD'%(duration,target)] = forms.FloatField(required=False, min_value=0.,label="%s Term Dermal POD (mg/kg/day) (%s)"%(duration.capitalize(), ToxForm.TARGET_POP_CHOICES_DICT[target]) )
self.fields['dermal_%s_%s_LOC'%(duration,target)] = forms.FloatField(required=False, initial=100, min_value=0.,label="%s Term Dermal LOC (%s)"%(duration.capitalize(), ToxForm.TARGET_POP_CHOICES_DICT[target]) )
if True in [scenario in self.data_from_step_1['exposure_scenarios'] for scenario in ['lawn','insect','paint','treatedpet','indoor','impregnated_materials','misting']] and 'oral' in self.data_from_step_1['exposure_routes']:
for target in self.data_from_step_1['target_population']:
self.fields['oral_%s_%s_POD'%(duration,target)] = forms.FloatField(required=False, min_value=0.,label="%s Term Oral POD (mg/kg/day) (%s)"%(duration.capitalize(), ToxForm.TARGET_POP_CHOICES_DICT[target]))
self.fields['oral_%s_%s_LOC'%(duration,target)] = forms.FloatField(required=False, initial=100, min_value=0., label="%s Term Oral LOC (%s)"%(duration.capitalize(), ToxForm.TARGET_POP_CHOICES_DICT[target]))
if True in [scenario in self.data_from_step_1['exposure_scenarios'] for scenario in ['indoor','misting','generalhandler']] and 'inhalation' in self.data_from_step_1['exposure_routes']:
self.fields['inhalation_absorption'] = forms.FloatField(required=False, initial=1, label="Inhalation Absorption (0-1)",min_value=0., max_value=1.)
self.fields['inhalation_absorption_study'] = forms.ChoiceField(choices=ToxForm.ABS_STUDY_CHOICES,required=False,label="Inhalation Absorption Study")
self.fields['inhalation_POD_study'] = forms.ChoiceField(choices=ToxForm.POD_STUDY_CHOICES,required=False, label="Inhalation POD Study")
for duration in self.data_from_step_1['exposure_durations']:
if True in [scenario in self.data_from_step_1['exposure_scenarios'] for scenario in ['indoor','misting','generalhandler']] and 'inhalation' in self.data_from_step_1['exposure_routes']:
for target in self.data_from_step_1['target_population']:
self.fields['inhalation_%s_%s_POD'%(duration,target)] = forms.FloatField(required=False, min_value=0.,label="%s Term Inhalation POD (mg/kg/day) (%s)"%(duration.capitalize(), ToxForm.TARGET_POP_CHOICES_DICT[target]))
self.fields['inhalation_%s_%s_LOC'%(duration,target)] = forms.FloatField(required=False, initial=100, min_value=0.,label="%s Term Inhalation LOC (%s)"%(duration.capitalize(), ToxForm.TARGET_POP_CHOICES_DICT[target]))
if 'lawn' in self.data_from_step_1['exposure_scenarios'] and 'dietary' in self.data_from_step_1['exposure_routes']:
if 'gen' in self.data_from_step_1['target_population']:
self.fields['dietary_POD'] = forms.FloatField(required=False, min_value=0.,label="Dietary POD (mg/kg/day) (Children)")
self.fields['dietary_LOC'] = forms.FloatField(required=False, initial=100,min_value=0., label="Dietary LOC (Children)")
#assert(self.data_from_step_1, Exception(self.data_from_step_1))
#raise Exception(self.data_from_step_1)
def clean(self, *args, **kwargs):
cleaned_data = super(ToxForm, self).clean()
for route in self.data_from_step_1['exposure_routes']:
if '%s_absorption'%(route) in self.fields:
absorption = cleaned_data.get('%s_absorption'%(route))
pod_study = cleaned_data.get('%s_POD_study'%(route))
if pod_study == 'route-specific' and absorption != 1:
msg = u"Absorption must be 1 for route specific POD studies."
self._errors['%s_absorption'%(route)] = self.error_class([msg])
self._errors['%s_POD_study'%(route)] = self.error_class([msg])
del cleaned_data['%s_POD_study'%(route)]
if '%s_absorption'%(route) in cleaned_data:
del cleaned_data['%s_absorption'%(route)]
# Always return the full collection of cleaned data.
return cleaned_data
class GeneralHandlerForm(forms.Form):
title = "General Handler Data Entry Form"
application_rate = defaultdict(lambda : defaultdict(lambda : defaultdict(dict)))
application_rate_units = defaultdict(lambda : defaultdict(lambda : defaultdict(dict)))
application_rate_form_map = defaultdict(lambda : defaultdict(lambda : defaultdict(dict)))
application_rate['Dust/Powder']['Indoor Environment']['Plunger Duster']['Broadcast; Perimeter/Spot/ Bedbug (course application)'] = 0
application_rate['Dust/Powder']['Gardens / Trees']['Plunger Duster'][''] = 0
application_rate['Dust/Powder']['Indoor Environment']['Bulb duster']['Perimeter/Spot/Bedbug; Crack and Crevice'] = 0
application_rate['Dust/Powder']['Gardens / Trees']['Bulb duster'][''] = 0
application_rate['Dust/Powder']['Indoor Environment']['Electric/power duster']['Broadcast; Perimeter/Spot/ Bedbug (course application)'] = 0
application_rate['Dust/Powder']['Gardens / Trees']['Electric/power duster'][''] = 0
application_rate['Dust/Powder']['Indoor Environment']['Hand crank duster']['Broadcast; Perimeter/Spot/ Bedbug (course application)'] = 0
application_rate['Dust/Powder']['Gardens / Trees']['Hand crank duster'][''] = 0
application_rate['Dust/Powder']['Indoor Environment']['Shaker can']['Broadcast'] = 0
application_rate['Dust/Powder']['Indoor Environment']['Shaker can']['Broadcast; Perimeter/Spot/ Bedbug (course application)'] = 0
application_rate['Dust/Powder']['Gardens / Trees']['Shaker can']['can'] = 0
application_rate['Dust/Powder']['Gardens / Trees']['Shaker can']['ft2'] = 0
application_rate['Liquid concentrate']['Indoor Environment']['Manually-pressurized handwand (w/ or w/o pin stream nozzle)']['Broadcast, Perimeter/Spot/ Bedbug (course application); Perimeter /Spot/ Bedbug (pinstream application); Crack and Crevice'] = 0
application_rate['Liquid concentrate']['Gardens / Trees']['Manually-pressurized handwand']['ft2'] = 0
application_rate['Liquid concentrate']['Gardens / Trees']['Manually-pressurized handwand']['gallons'] = 0
application_rate['Liquid concentrate']['Gardens / Trees']['Hose-end Sprayer']['ft2'] = 0
application_rate['Liquid concentrate']['Gardens / Trees']['Hose-end Sprayer']['gallons'] = 0
application_rate['Liquid concentrate']['Lawns / Turf']['Hose-end Sprayer'][''] = 0
application_rate['Liquid concentrate']['Lawns / Turf']['Manually-pressurized handwand'][''] = 0
application_rate['Liquid concentrate']['Gardens / Trees']['Backpack']['ft2'] = 0
application_rate['Liquid concentrate']['Gardens / Trees']['Backpack']['gallons'] = 0
application_rate['Liquid concentrate']['Gardens / Trees']['Sprinkler can']['ft2'] = 0
application_rate['Liquid concentrate']['Gardens / Trees']['Sprinkler can']['gallons'] = 0
application_rate['Liquid concentrate']['Lawns / Turf']['Sprinkler can'][''] = 0
application_rate['Ready-to-use']['Indoor Environment']['Aerosol can']['Broadcast Surface Spray'] = 0
application_rate['Ready-to-use']['Indoor Environment']['Aerosol can']['Perimeter/ Spot/ Bedbug (course application)'] = 0
application_rate['Ready-to-use']['Indoor Environment']['Aerosol can with pin stream nozzle']['Perimeter/ Spot/ Bedbug (pin stream application); Crack and Crevice'] = 0
application_rate['Ready-to-use']['Indoor Environment']['Aerosol can']['Space spray'] = 0
application_rate['Ready-to-use']['Gardens / Trees']['Aerosol can'][''] = 0
application_rate['Ready-to-use']['Lawns / Turf']['Aerosol can'][''] = 0
application_rate['Ready-to-use']['Indoor Environment']['Trigger-spray bottle']['Broadcast'] = 0
application_rate['Ready-to-use']['Indoor Environment']['Trigger-spray bottle']['Perimeter/ Spot/ Bedbug (course application)'] = 0
application_rate['Ready-to-use']['Insect Repellent']['Aerosol can'][''] = 0
application_rate['Ready-to-use']['Insect Repellent']['Trigger-spray bottle'][''] = 0
application_rate['Ready-to-use']['Gardens / Trees']['Trigger-spray bottle'][''] = 0
application_rate['Ready-to-use']['Lawns / Turf']['Trigger-spray bottle'][''] = 0
application_rate['Ready-to-use']['Indoor Environment']['Bait (granular, hand dispersal)'][''] = 0
application_rate['Ready-to-use']['Gardens / Trees']['Hose-end Sprayer']['ft2'] = 0
application_rate['Ready-to-use']['Gardens / Trees']['Hose-end Sprayer']['gallons'] = 0
application_rate['Ready-to-use']['Lawns / Turf']['Hose-end Sprayer'][''] = 0
application_rate['Wettable powders']['Indoor Environment']['Manually-pressurized handwand (w/ or w/o pin stream nozzle)']['Broadcast, Perimeter/Spot/ Bedbug (course application); Perimeter /Spot/ Bedbug (pinstream application); Crack and Crevice'] = 0
application_rate['Liquid concentrate']['Lawns / Turf']['Backpack'][''] = 0
application_rate['Wettable powders']['Gardens / Trees']['Manually-pressurized handwand']['ft2'] = 0
application_rate['Wettable powders']['Gardens / Trees']['Manually-pressurized handwand']['gallons'] = 0
application_rate['Wettable powders']['Gardens / Trees']['Hose-end Sprayer']['ft2'] = 0
application_rate['Wettable powders']['Gardens / Trees']['Hose-end Sprayer']['gallons'] = 0
application_rate['Wettable powders']['Lawns / Turf']['Hose-end Sprayer'][''] = 0
application_rate['Wettable powders']['Lawns / Turf']['Manually-pressurized handwand'][''] = 0
application_rate['Wettable powders']['Gardens / Trees']['Backpack']['ft2'] = 0
application_rate['Wettable powders']['Gardens / Trees']['Backpack']['gallons'] = 0
application_rate['Wettable powders']['Gardens / Trees']['Sprinkler can']['ft2'] = 0
application_rate['Wettable powders']['Gardens / Trees']['Sprinkler can']['gallons'] = 0
application_rate['Wettable powders']['Lawns / Turf']['Sprinkler can'][''] = 0
application_rate['Wettable powders in water-soluble packaging']['Indoor Environment']['Manually-pressurized handwand (w/ or w/o pin stream nozzle)']['Broadcast, Perimeter/Spot/ Bedbug (course application); Perimeter /Spot/ Bedbug (pinstream application); Crack and Crevice'] = 0
application_rate['Wettable powders']['Lawns / Turf']['Backpack'][''] = 0
application_rate['Wettable powders in water-soluble packaging']['Gardens / Trees']['Manually-pressurized handwand']['ft2'] = 0
application_rate['Wettable powders in water-soluble packaging']['Gardens / Trees']['Manually-pressurized handwand']['gallons'] = 0
application_rate['Wettable powders in water-soluble packaging']['Gardens / Trees']['Hose-end Sprayer']['ft2'] = 0
application_rate['Wettable powders in water-soluble packaging']['Gardens / Trees']['Hose-end Sprayer']['gallons'] = 0
application_rate['Wettable powders in water-soluble packaging']['Lawns / Turf']['Hose-end Sprayer'][''] = 0
application_rate['Wettable powders in water-soluble packaging']['Lawns / Turf']['Manually-pressurized handwand'][''] = 0
application_rate['Wettable powders in water-soluble packaging']['Lawns / Turf']['Backpack'][''] = 0
application_rate['Wettable powders in water-soluble packaging']['Gardens / Trees']['Sprinkler can']['ft2'] = 0
application_rate['Wettable powders in water-soluble packaging']['Gardens / Trees']['Sprinkler can']['gallons'] = 0
application_rate['Wettable powders in water-soluble packaging']['Lawns / Turf']['Sprinkler can'][''] = 0
application_rate['Wettable powders in water-soluble packaging']['Gardens / Trees']['Backpack'][''] = 0
application_rate['Water-disersible Granule / Dry Flowable']['Lawns / Turf']['Manually-pressurized handwand'][''] = 0
application_rate['Water-disersible Granule / Dry Flowable']['Gardens / Trees']['Hose-end Sprayer']['ft2'] = 0
application_rate['Water-disersible Granule / Dry Flowable']['Gardens / Trees']['Hose-end Sprayer']['gallons'] = 0
application_rate['Water-disersible Granule / Dry Flowable']['Lawns / Turf']['Hose-end Sprayer'][''] = 0
application_rate['Wettable powders in water-soluble packaging']['Gardens / Trees']['Backpack'][''] = 0
application_rate['Water-disersible Granule / Dry Flowable']['Gardens / Trees']['Manually-pressurized handwand']['ft2'] = 0
application_rate['Water-disersible Granule / Dry Flowable']['Gardens / Trees']['Manually-pressurized handwand']['gallons'] = 0
application_rate['Water-disersible Granule / Dry Flowable']['Lawns / Turf']['Backpack'][''] = 0
application_rate['Water-disersible Granule / Dry Flowable']['Gardens / Trees']['Sprinkler can']['ft2'] = 0
application_rate['Water-disersible Granule / Dry Flowable']['Gardens / Trees']['Sprinkler can']['gallons'] = 0
application_rate['Water-disersible Granule / Dry Flowable']['Lawns / Turf']['Sprinkler can'][''] = 0
application_rate['Granule']['Gardens / Trees']['Push-type rotary spreader'][''] = 0
application_rate['Granule']['Lawns / Turf']['Push-type rotary spreader'][''] = 0
application_rate['Water-disersible Granule / Dry Flowable']['Gardens / Trees']['Backpack']['ft2'] = 0
application_rate['Water-disersible Granule / Dry Flowable']['Gardens / Trees']['Backpack']['gallons'] = 0
application_rate['Granule']['Lawns / Turf']['Belly grinder'][''] = 0
application_rate['Granule']['Gardens / Trees']['Spoon'][''] = 0
application_rate['Granule']['Lawns / Turf']['Spoon'][''] = 0
application_rate['Granule']['Gardens / Trees']['Cup'][''] = 0
application_rate['Granule']['Lawns / Turf']['Cup'][''] = 0
application_rate['Granule']['Gardens / Trees']['Hand dispersal'][''] = 0
application_rate['Granule']['Lawns / Turf']['Hand dispersal'][''] = 0
application_rate['Granule']['Gardens / Trees']['Shaker can']['can'] = 0
application_rate['Granule']['Gardens / Trees']['Shaker can']['ft2'] = 0
application_rate['Granule']['Lawns / Turf']['Shaker can'][''] = 0
application_rate['Microencapsulated']['Gardens / Trees']['Manually-pressurized handwand']['ft2'] = 0
application_rate['Microencapsulated']['Gardens / Trees']['Manually-pressurized handwand']['gallons'] = 0
application_rate['Microencapsulated']['Gardens / Trees']['Hose-end Sprayer']['ft2'] = 0
application_rate['Microencapsulated']['Gardens / Trees']['Hose-end Sprayer']['gallons'] = 0
application_rate['Microencapsulated']['Lawns / Turf']['Hose-end Sprayer'][''] = 0
application_rate['Microencapsulated']['Lawns / Turf']['Manually-pressurized handwand'][''] = 0
application_rate['Microencapsulated']['Gardens / Trees']['Backpack']['ft2'] = 0
application_rate['Microencapsulated']['Gardens / Trees']['Backpack']['gallons'] = 0
application_rate['Microencapsulated']['Lawns / Turf']['Backpack'][''] = 0
application_rate['Microencapsulated']['Gardens / Trees']['Sprinkler can']['ft2'] = 0
application_rate['Microencapsulated']['Gardens / Trees']['Sprinkler can']['gallons'] = 0
application_rate['Microencapsulated']['Lawns / Turf']['Sprinkler can'][''] = 0
application_rate['Ready-to-use']['Paints / Preservatives']['Aerosol can'][''] = 0
application_rate['Paints / Preservatives/ Stains']['Paints / Preservatives']['Airless Sprayer'][''] = 0
application_rate['Paints / Preservatives/ Stains']['Paints / Preservatives']['Brush'][''] = 0
application_rate['Paints / Preservatives/ Stains']['Paints / Preservatives']['Manually-pressurized handwand'][''] = 0
application_rate['Paints / Preservatives/ Stains']['Paints / Preservatives']['Roller'][''] = 0
application_rate['Liquid concentrate']['Treated Pets']['Dip'][''] = 0
application_rate['Liquid concentrate']['Treated Pets']['Sponge'][''] = 0
application_rate['Ready-to-use']['Treated Pets']['Trigger-spray bottle'][''] = 0
application_rate['Ready-to-use']['Treated Pets']['Aerosol can'][''] = 0
application_rate['Ready-to-use']['Treated Pets']['Shampoo'][''] = 0
application_rate['Ready-to-use']['Treated Pets']['Spot-on'][''] = 0
application_rate['Ready-to-use']['Treated Pets']['Collar'][''] = 0
application_rate['Dust/Powder']['Treated Pets']['Shaker Can'][''] = 0
application_rate_units['Dust/Powder']['Indoor Environment']['Plunger Duster']['Broadcast; Perimeter/Spot/ Bedbug (course application)'] = 'lb ai/lb dust'
application_rate_units['Dust/Powder']['Gardens / Trees']['Plunger Duster'][''] = 'lb ai/ft2'
application_rate_units['Dust/Powder']['Indoor Environment']['Bulb duster']['Perimeter/Spot/Bedbug; Crack and Crevice'] = 'lb ai/lb dust'
application_rate_units['Dust/Powder']['Gardens / Trees']['Bulb duster'][''] = 'lb ai/ft2'
application_rate_units['Dust/Powder']['Indoor Environment']['Electric/power duster']['Broadcast; Perimeter/Spot/ Bedbug (course application)'] = 'lb ai/lb dust'
application_rate_units['Dust/Powder']['Gardens / Trees']['Electric/power duster'][''] = 'lb ai/ft2'
application_rate_units['Dust/Powder']['Indoor Environment']['Hand crank duster']['Broadcast; Perimeter/Spot/ Bedbug (course application)'] = 'lb ai/lb dust'
application_rate_units['Dust/Powder']['Gardens / Trees']['Hand crank duster'][''] = 'lb ai/ft2'
application_rate_units['Dust/Powder']['Indoor Environment']['Shaker can']['Broadcast'] = 'lb ai/can'
application_rate_units['Dust/Powder']['Indoor Environment']['Shaker can']['Broadcast; Perimeter/Spot/ Bedbug (course application)'] = 'lb ai/can'
application_rate_units['Dust/Powder']['Gardens / Trees']['Shaker can']['can'] = 'lb ai/can'
application_rate_units['Dust/Powder']['Gardens / Trees']['Shaker can']['ft2'] = 'lb ai/ft2'
application_rate_units['Liquid concentrate']['Indoor Environment']['Manually-pressurized handwand (w/ or w/o pin stream nozzle)']['Broadcast, Perimeter/Spot/ Bedbug (course application); Perimeter /Spot/ Bedbug (pinstream application); Crack and Crevice'] = 'lb ai/gallon'
application_rate_units['Liquid concentrate']['Gardens / Trees']['Manually-pressurized handwand']['gallons'] = 'lb ai/gallon'
application_rate_units['Liquid concentrate']['Gardens / Trees']['Manually-pressurized handwand']['ft2'] = 'lb ai/ft2'
application_rate_units['Liquid concentrate']['Gardens / Trees']['Hose-end Sprayer']['ft2'] = 'lb ai/ft2'
application_rate_units['Liquid concentrate']['Gardens / Trees']['Hose-end Sprayer']['gallons'] = 'lb ai/gallon'
application_rate_units['Liquid concentrate']['Lawns / Turf']['Hose-end Sprayer'][''] = 'lb ai/acre'
application_rate_units['Liquid concentrate']['Lawns / Turf']['Manually-pressurized handwand'][''] = 'lb ai/gallon'
application_rate_units['Liquid concentrate']['Gardens / Trees']['Backpack']['ft2'] = 'lb ai/ft2'
application_rate_units['Liquid concentrate']['Gardens / Trees']['Backpack']['gallons'] = 'lb ai/gallon'
application_rate_units['Liquid concentrate']['Gardens / Trees']['Sprinkler can']['ft2'] = 'lb ai/ft2'
application_rate_units['Liquid concentrate']['Gardens / Trees']['Sprinkler can']['gallons'] = 'lb ai/gallon'
application_rate_units['Liquid concentrate']['Lawns / Turf']['Sprinkler can'][''] = 'lb ai/ft2'
application_rate_units['Ready-to-use']['Indoor Environment']['Aerosol can']['Broadcast Surface Spray'] = 'lb ai/16-oz can'
application_rate_units['Ready-to-use']['Indoor Environment']['Aerosol can']['Perimeter/ Spot/ Bedbug (course application)'] = 'lb ai/16-oz can'
application_rate_units['Ready-to-use']['Indoor Environment']['Aerosol can with pin stream nozzle']['Perimeter/ Spot/ Bedbug (pin stream application); Crack and Crevice'] = 'lb ai/16-oz can'
application_rate_units['Ready-to-use']['Indoor Environment']['Aerosol can']['Space spray'] = 'lb ai/16-oz can'
application_rate_units['Ready-to-use']['Insect Repellent']['Aerosol can'][''] = 'lb ai/can'
application_rate_units['Ready-to-use']['Insect Repellent']['Trigger-spray bottle'][''] = 'lb ai/bottle'
application_rate_units['Ready-to-use']['Gardens / Trees']['Aerosol can'][''] = 'lb ai/can'
application_rate_units['Ready-to-use']['Lawns / Turf']['Aerosol can'][''] = 'lb ai/can'
application_rate_units['Ready-to-use']['Indoor Environment']['Trigger-spray bottle']['Broadcast'] = 'lb ai/bottle'
application_rate_units['Ready-to-use']['Indoor Environment']['Trigger-spray bottle']['Perimeter/ Spot/ Bedbug (course application)'] = 'lb ai/bottle'
application_rate_units['Ready-to-use']['Gardens / Trees']['Trigger-spray bottle'][''] = 'lb ai/bottle'
application_rate_units['Ready-to-use']['Lawns / Turf']['Trigger-spray bottle'][''] = 'lb ai/bottle'
application_rate_units['Ready-to-use']['Indoor Environment']['Bait (granular, hand dispersal)'][''] = 'lb ai/ft2'
application_rate_units['Ready-to-use']['Gardens / Trees']['Hose-end Sprayer']['ft2'] = 'lb ai/ft2'
application_rate_units['Ready-to-use']['Gardens / Trees']['Hose-end Sprayer']['gallons'] = 'lb ai/gallon'
application_rate_units['Ready-to-use']['Lawns / Turf']['Hose-end Sprayer'][''] = 'lb ai/acre'
application_rate_units['Wettable powders']['Indoor Environment']['Manually-pressurized handwand (w/ or w/o pin stream nozzle)']['Broadcast, Perimeter/Spot/ Bedbug (course application); Perimeter /Spot/ Bedbug (pinstream application); Crack and Crevice'] = 'lb ai/gallon'
application_rate_units['Liquid concentrate']['Lawns / Turf']['Backpack'][''] = 'lb ai/gallon'
application_rate_units['Wettable powders']['Gardens / Trees']['Manually-pressurized handwand']['ft2'] = 'lb ai/ft2'
application_rate_units['Wettable powders']['Gardens / Trees']['Manually-pressurized handwand']['gallons'] = 'lb ai/gallon'
application_rate_units['Wettable powders']['Gardens / Trees']['Hose-end Sprayer']['ft2'] = 'lb ai/ft2'
application_rate_units['Wettable powders']['Gardens / Trees']['Hose-end Sprayer']['gallons'] = 'lb ai/gallon'
application_rate_units['Wettable powders']['Lawns / Turf']['Hose-end Sprayer'][''] = 'lb ai/acre'
application_rate_units['Wettable powders']['Lawns / Turf']['Manually-pressurized handwand'][''] = 'lb ai/gallon'
application_rate_units['Wettable powders']['Gardens / Trees']['Backpack']['ft2'] = 'lb ai/ft2'
application_rate_units['Wettable powders']['Gardens / Trees']['Backpack']['gallons'] = 'lb ai/gallon'
application_rate_units['Wettable powders']['Gardens / Trees']['Sprinkler can']['ft2'] = 'lb ai/ft2'
application_rate_units['Wettable powders']['Gardens / Trees']['Sprinkler can']['gallons'] = 'lb ai/gallon'
application_rate_units['Wettable powders']['Lawns / Turf']['Sprinkler can'][''] = 'lb ai/ft2'
application_rate_units['Wettable powders in water-soluble packaging']['Indoor Environment']['Manually-pressurized handwand (w/ or w/o pin stream nozzle)']['Broadcast, Perimeter/Spot/ Bedbug (course application); Perimeter /Spot/ Bedbug (pinstream application); Crack and Crevice'] = 'lb ai/gallon'
application_rate_units['Wettable powders']['Lawns / Turf']['Backpack'][''] = 'lb ai/gallon'
application_rate_units['Wettable powders in water-soluble packaging']['Gardens / Trees']['Manually-pressurized handwand']['ft2'] = 'lb ai/ft2'
application_rate_units['Wettable powders in water-soluble packaging']['Gardens / Trees']['Manually-pressurized handwand']['gallons'] = 'lb ai/gallon'
application_rate_units['Wettable powders in water-soluble packaging']['Gardens / Trees']['Hose-end Sprayer']['ft2'] = 'lb ai/ft2'
application_rate_units['Wettable powders in water-soluble packaging']['Gardens / Trees']['Hose-end Sprayer']['gallons'] = 'lb ai/gallon'
application_rate_units['Wettable powders in water-soluble packaging']['Lawns / Turf']['Hose-end Sprayer'][''] = 'lb ai/acre'
application_rate_units['Wettable powders in water-soluble packaging']['Lawns / Turf']['Manually-pressurized handwand'][''] = 'lb ai/gallon'
application_rate_units['Wettable powders in water-soluble packaging']['Lawns / Turf']['Backpack'][''] = 'lb ai/gallon'
application_rate_units['Wettable powders in water-soluble packaging']['Gardens / Trees']['Sprinkler can']['ft2'] = 'lb ai/ft2'
application_rate_units['Wettable powders in water-soluble packaging']['Gardens / Trees']['Sprinkler can']['gallons'] = 'lb ai/gallon'
application_rate_units['Wettable powders in water-soluble packaging']['Lawns / Turf']['Sprinkler can'][''] = 'lb ai/ft2'
application_rate_units['Wettable powders in water-soluble packaging']['Gardens / Trees']['Backpack'][''] = 'lb ai/ft2'
application_rate_units['Water-disersible Granule / Dry Flowable']['Lawns / Turf']['Manually-pressurized handwand'][''] = 'lb ai/gallon'
application_rate_units['Water-disersible Granule / Dry Flowable']['Gardens / Trees']['Hose-end Sprayer']['ft2'] = 'lb ai/ft2'
application_rate_units['Water-disersible Granule / Dry Flowable']['Gardens / Trees']['Hose-end Sprayer']['gallons'] = 'lb ai/gallon'
application_rate_units['Water-disersible Granule / Dry Flowable']['Lawns / Turf']['Hose-end Sprayer'][''] = 'lb ai/acre'
application_rate_units['Wettable powders in water-soluble packaging']['Gardens / Trees']['Backpack'][''] = 'lb ai/gallon'
application_rate_units['Water-disersible Granule / Dry Flowable']['Gardens / Trees']['Manually-pressurized handwand']['ft2'] = 'lb ai/ft2'
application_rate_units['Water-disersible Granule / Dry Flowable']['Gardens / Trees']['Manually-pressurized handwand']['gallons'] = 'lb ai/gallon'
application_rate_units['Water-disersible Granule / Dry Flowable']['Lawns / Turf']['Backpack'][''] = 'lb ai/gallon'
application_rate_units['Water-disersible Granule / Dry Flowable']['Gardens / Trees']['Sprinkler can']['ft2'] = 'lb ai/ft2'
application_rate_units['Water-disersible Granule / Dry Flowable']['Gardens / Trees']['Sprinkler can']['gallons'] = 'lb ai/gallon'
application_rate_units['Water-disersible Granule / Dry Flowable']['Lawns / Turf']['Sprinkler can'][''] = 'lb ai/ft2'
application_rate_units['Granule']['Gardens / Trees']['Push-type rotary spreader'][''] = 'lb ai/ft2'
application_rate_units['Granule']['Lawns / Turf']['Push-type rotary spreader'][''] = 'lb ai/acre'
application_rate_units['Water-disersible Granule / Dry Flowable']['Gardens / Trees']['Backpack']['ft2'] = 'lb ai/ft2'
application_rate_units['Water-disersible Granule / Dry Flowable']['Gardens / Trees']['Backpack']['gallons'] = 'lb ai/gallon'
application_rate_units['Granule']['Lawns / Turf']['Belly grinder'][''] = 'lb ai/ft2'
application_rate_units['Granule']['Gardens / Trees']['Spoon'][''] = 'lb ai/ft2'
application_rate_units['Granule']['Lawns / Turf']['Spoon'][''] = 'lb ai/ft2'
application_rate_units['Granule']['Gardens / Trees']['Cup'][''] = 'lb ai/ft2'
application_rate_units['Granule']['Lawns / Turf']['Cup'][''] = 'lb ai/ft2'
application_rate_units['Granule']['Gardens / Trees']['Hand dispersal'][''] = 'lb ai/ft2'
application_rate_units['Granule']['Lawns / Turf']['Hand dispersal'][''] = 'lb ai/ft2'
application_rate_units['Granule']['Gardens / Trees']['Shaker can']['can'] = 'lb ai/can'
application_rate_units['Granule']['Gardens / Trees']['Shaker can']['ft2'] = 'lb ai/ft2'
application_rate_units['Granule']['Lawns / Turf']['Shaker can'][''] = 'lb ai/ft2'
application_rate_units['Microencapsulated']['Gardens / Trees']['Manually-pressurized handwand']['ft2'] = 'lb ai/ft2'
application_rate_units['Microencapsulated']['Gardens / Trees']['Manually-pressurized handwand']['gallons'] = 'lb ai/gallon'
application_rate_units['Microencapsulated']['Gardens / Trees']['Hose-end Sprayer']['ft2'] = 'lb ai/ft2'
application_rate_units['Microencapsulated']['Gardens / Trees']['Hose-end Sprayer']['gallons'] = 'lb ai/gallon'
application_rate_units['Microencapsulated']['Lawns / Turf']['Hose-end Sprayer'][''] = 'lb ai/acre'
application_rate_units['Microencapsulated']['Lawns / Turf']['Manually-pressurized handwand'][''] = 'lb ai/gallon'
application_rate_units['Microencapsulated']['Gardens / Trees']['Backpack']['ft2'] = 'lb ai/ft2'
application_rate_units['Microencapsulated']['Gardens / Trees']['Backpack']['gallons'] = 'lb ai/gallon'
application_rate_units['Microencapsulated']['Lawns / Turf']['Backpack'][''] = 'lb ai/gallon'
application_rate_units['Microencapsulated']['Gardens / Trees']['Sprinkler can']['ft2'] = 'lb ai/ft2'
application_rate_units['Microencapsulated']['Gardens / Trees']['Sprinkler can']['gallons'] = 'lb ai/gallon'
application_rate_units['Microencapsulated']['Lawns / Turf']['Sprinkler can'][''] = 'lb ai/ft2'
application_rate_units['Ready-to-use']['Paints / Preservatives']['Aerosol can'][''] = 'lb ai/12-oz can'
application_rate_units['Paints / Preservatives/ Stains']['Paints / Preservatives']['Airless Sprayer'][''] = 'lb ai/1-gal can'
application_rate_units['Paints / Preservatives/ Stains']['Paints / Preservatives']['Brush'][''] = 'lb ai/1-gal can'
application_rate_units['Paints / Preservatives/ Stains']['Paints / Preservatives']['Manually-pressurized handwand'][''] = 'lb ai/1-gal can'
application_rate_units['Paints / Preservatives/ Stains']['Paints / Preservatives']['Roller'][''] = 'lb ai/1-gal can'
application_rate_units['Liquid concentrate']['Treated Pets']['Dip'][''] = 'lb ai/pet'
application_rate_units['Liquid concentrate']['Treated Pets']['Sponge'][''] = 'lb ai/pet'
application_rate_units['Ready-to-use']['Treated Pets']['Trigger-spray bottle'][''] = 'lb ai/pet'
application_rate_units['Ready-to-use']['Treated Pets']['Aerosol can'][''] = 'lb ai/pet'
application_rate_units['Ready-to-use']['Treated Pets']['Shampoo'][''] = 'lb ai/pet'
application_rate_units['Ready-to-use']['Treated Pets']['Spot-on'][''] = 'lb ai/pet'
application_rate_units['Ready-to-use']['Treated Pets']['Collar'][''] = 'lb ai/pet'
application_rate_units['Dust/Powder']['Treated Pets']['Shaker Can'][''] = 'lb ai/pet'
for formulation in application_rate:
for scenario in application_rate[formulation]:
for application_method in application_rate[formulation][scenario]:
for application_type in application_rate[formulation][scenario][application_method]:
application_rate_form_map[formulation][scenario][application_method][application_type] = "%s, %s, %s, %s" %(formulation, scenario, application_method, application_type )
def __init__(self,*args,**kwargs):
self.data_from_general_handler_sub_scenario_step = kwargs.pop('data_from_general_handler_sub_scenario_step',None)
super(GeneralHandlerForm,self).__init__(*args,**kwargs)
application_rates = []
for formulation in GeneralHandlerForm.application_rate:
if self.data_from_general_handler_sub_scenario_step:
if formulation in self.data_from_general_handler_sub_scenario_step['formulations']:
for scenario in GeneralHandlerForm.application_rate[formulation]:
if scenario in self.data_from_general_handler_sub_scenario_step['sub_scenarios']:
for application_method in GeneralHandlerForm.application_rate[formulation][scenario]:
if application_method in self.data_from_general_handler_sub_scenario_step['equipment']:
application_rates.append((formulation, scenario, application_method, GeneralHandlerForm.application_rate[formulation][scenario][application_method]))
application_rates = sorted(application_rates, key=operator.itemgetter(1))
for formulation, scenario, application_method, application_rate in application_rates:
for application_type in application_rate:
self.fields[GeneralHandlerForm.application_rate_form_map[formulation][scenario][application_method][application_type]] = forms.FloatField(required=False,initial=0, label="%s [Application Rate (%s)]"%(GeneralHandlerForm.application_rate_form_map[formulation][scenario][application_method][application_type],GeneralHandlerForm.application_rate_units[formulation][scenario][application_method][application_type]),min_value=0.)
class GeneralHandlerSubScenariosForm(forms.Form):
title = "General Handler Sub Scenario Selection"
SUB_SCENARIOS_CHOICES = [('Insect Repellent','Insect Repellent'),('Treated Pets','Treated Pets'),('Lawns / Turf','Lawns / Turf'),('Gardens / Trees','Gardens / Trees'),('Paints / Preservatives','Paints / Preservatives'), ('Indoor Environment','Indoor Environment'),('Misting','Misting')]
sub_scenarios = forms.MultipleChoiceField(choices=SUB_SCENARIOS_CHOICES , widget=CheckboxSelectMultipleBootstrap())
FORMULATION_CHOICES = [('Dust/Powder','Dust/Powder'), ('Granule', 'Granule'),('Liquid concentrate','Liquid concentrate'), ('Microencapsulated','Microencapsulated'), ('Paints / Preservatives/ Stains','Paints / Preservatives/ Stains'), ('Ready-to-use','Ready-to-use'), ('Water-disersible Granule / Dry Flowable','Water-disersible Granule / Dry Flowable'), ('Wettable powders','Wettable powders'), ('Wettable powders in water-soluble packaging','Wettable powders in water-soluble packaging')]
formulations = forms.MultipleChoiceField(choices=FORMULATION_CHOICES , widget=CheckboxSelectMultipleBootstrap(), required=False)
EQUIPMENT_CHOICES = [('Aerosol can with pin stream nozzle','Aerosol can with pin stream nozzle'),('Aerosol can','Aerosol can'),('Airless Sprayer','Airless Sprayer'),('Backpack','Backpack'),('Bait (granular, hand dispersal)','Bait (granular, hand dispersal)'),('Belly grinder','Belly grinder'),('Brush','Brush'),('Bulb duster','Bulb duster'),('Collar','Collar'),('Cup','Cup'),('Dip','Dip'),('Electric/power duster','Electric/power duster'),('Hand crank duster','Hand crank duster'),('Hand dispersal','Hand dispersal'),('Hose-end Sprayer','Hose-end Sprayer'),('Manually-pressurized handwand','Manually-pressurized handwand'),('Manually-pressurized handwand (w/ or w/o pin stream nozzle)', 'Manually-pressurized handwand (w/ or w/o pin stream nozzle)'),('Plunger Duster','Plunger Duster'), ('Push-type rotary spreader', 'Push-type rotary spreader'),('Roller','Roller'),('Shaker can','Shaker can'),('Shampoo','Shampoo'),('Sponge','Sponge'),('Spot-on','Spot-on'),('Sprinkler can','Sprinkler can'
),('Trigger-spray bottle','Trigger-spray bottle')]
equipment = forms.MultipleChoiceField(choices=EQUIPMENT_CHOICES , widget=CheckboxSelectMultipleBootstrap(), required=False)
n_inputs_equipment = defaultdict(lambda : defaultdict(lambda : defaultdict(int)))
n_inputs_formulation = defaultdict(lambda : defaultdict(int))
n_inputs_scenarios = defaultdict(int)
for i in xrange(0, len(SUB_SCENARIOS_CHOICES)):
for j in xrange(0, len(FORMULATION_CHOICES)):
for k in xrange(0, len(EQUIPMENT_CHOICES)):
formulation = FORMULATION_CHOICES[j][0]
scenario = SUB_SCENARIOS_CHOICES[i][0]
application_method = EQUIPMENT_CHOICES[k][0]
try:
size = len(GeneralHandlerForm.application_rate[formulation][scenario][application_method])
n_inputs_equipment[i][j][k] += size
n_inputs_formulation[i][j] += size
n_inputs_scenarios[i] += size
except:
pass
def __init__(self,*args,**kwargs):
super(GeneralHandlerSubScenariosForm,self).__init__(*args,**kwargs)
def clean(self):
cleaned_data = super(GeneralHandlerSubScenariosForm, self).clean()
equipment = cleaned_data.get("equipment")
formulations = cleaned_data.get("formulations")
sub_scenarios = cleaned_data.get("sub_scenarios")
if sub_scenarios == ['Misting']:
return cleaned_data
elif sub_scenarios:
if formulations == [] or equipment == []:
raise forms.ValidationError("Both formulations and equipment need to be selected for %s."%", ".join(sub_scenarios))
count = 0
for scenario in sub_scenarios:
for formulation in formulations:
for application_method in equipment:
count += len(GeneralHandlerForm.application_rate[formulation][scenario][application_method])
if count == 0:
raise forms.ValidationError("No scenarios available for this selection of formulations and equipment. Ensure at least one of the equipment choices has greater than 1 in brackets.")
return cleaned_data
class TreatedPetForm(forms.Form):
title = "Treated Pet Data Entry Form"
amount_applied_form_map = defaultdict(dict)
for animal in ['cat','dog']:
for size in ['small','medium','large']:
amount_applied_form_map[animal][size] = "%s %s" %(size, animal)
# amount_applied['Other Pet'][''] = 0
fraction_ai = forms.FloatField(required=False,initial=0,min_value=0.,max_value=1.,label ="Fraction ai in product(0-1)");#defaultdict(dict)
default_pet_weights = {'cat':{},'dog':{}} #lb
default_pet_weights['dog'] = {'small':10.36535946,'medium':38.16827225,'large':76.50578234} #lb
default_pet_weights['cat'] = {'small':3.568299485,'medium':7.8300955,'large':16.13607146}
pet_weight = default_pet_weights['dog']['medium']
#Surface Area (cm2) = ((12.3*((BW (lb)*454)^0.65))
def pet_surface_area(lb):
return 12.3*((lb*454)**0.65)
def __init__(self,*args,**kwargs):
super(TreatedPetForm,self).__init__(*args,**kwargs)
for animal in TreatedPetForm.amount_applied_form_map:
for size in TreatedPetForm.amount_applied_form_map[animal]:
TreatedPetForm.amount_applied_form_map[animal][size] = "%s %s" %(size, animal)
self.fields[TreatedPetForm.amount_applied_form_map[animal][size]] = forms.FloatField(required=False,initial=0,min_value=0.,label = "Amount of product applied to a %s %s (g)" %(size, animal))
class LawnTurfForm(forms.Form):
title = "Lawn and Turf Data Entry Form"
liquid_application_rate = forms.FloatField(required=False,initial=0,min_value=0., label="Liquid Application Rate (lb ai/acre)")
solid_application_rate = forms.FloatField(required=False,initial=0,min_value=0., label="Solid Application Rate (lb ai/acre)")
liquid_ttr_conc = forms.FloatField(required=False,initial=0,min_value=0., label="Liquid TTR (calculated from application rate if not available) (ug/cm2)")#ORt = TTRt
solid_ttr_conc = forms.FloatField(required=False,initial=0,min_value=0., label="Solid TTR (calculated from application rate if not available) (ug/cm2)")#ORt = TTRt
fraction_ai_in_pellets = forms.FloatField(required=False,initial=0, min_value=0.,max_value=1.,label="Fraction of ai in pellets/granules (0-1)")
class GardenAndTreesForm(forms.Form):
title = "Garden and Trees Data Entry Form"
liquid_application_rate = forms.FloatField(required=False,initial=0,min_value=0., label="Liquid Application Rate (lb ai/acre)")
solid_application_rate = forms.FloatField(required=False,initial=0,min_value=0., label="Solid Application Rate (lb ai/acre)")
liquid_dfr_conc = forms.FloatField(required=False,initial=0,min_value=0., label="Liquid DFR (calculated from application rate if not available) (ug/cm2)")
solid_dfr_conc = forms.FloatField(required=False,initial=0,min_value=0., label="Solid DFR (calculated from application rate if not available) (ug/cm2)")
class InsectRepellentsForm(forms.Form):
title = "Insect Repellent Data Entry Form"
formulations = ['Aerosol', 'Pump spray', 'Lotion','Towelette']
amount_ai_formulations_form_map = defaultdict(dict)
for sunscreen_status in ['without','with']:
for formulation in formulations:
amount_ai_formulations_form_map[sunscreen_status][formulation] = "%s repellent %s sunscreen" %(formulation, sunscreen_status)
def __init__(self,*args,**kwargs):
super(InsectRepellentsForm,self).__init__(*args,**kwargs)
for sunscreen_status in ['without','with']:
for formulation in InsectRepellentsForm.formulations:
self.fields[InsectRepellentsForm.amount_ai_formulations_form_map[sunscreen_status][formulation]] = forms.FloatField(required=False,initial=0, min_value=0.,max_value=1., label = "Fraction of ai in %s repellent %s sunscreen (mg ai / mg product)"%(formulation,sunscreen_status))
class PaintsAndPreservativesForm(forms.Form):
title = "Paints and Preservatives Data Entry Form"
surface_residue_concentration = forms.FloatField(required=False,initial=0, min_value=0., label="Surface Residue Concentration (mg ai/cm^2)")
DEFAULT_FRACTION_OF_BODY_EXPOSED = 0.31
DEFAULT_DAILY_MATERIAL_TO_SKIN_TRANSFER_EFFICENCY = 0.14
EXPOSURE_TIME = {'indoor':4., 'outdoor':1.5}
HAND_TO_MOUTH_EVENTS_PER_HOUR = {'indoor':20., 'outdoor':13.9}
indoor_or_outdoor = forms.ChoiceField(choices=[('indoor','Indoor'),('outdoor','Outdoor')], initial='indoor', label="Location of interest (indoor/outdoor)")
class ImpregnatedMaterialsForm(forms.Form):
title = "Impregnated Materials Data Entry Form"
surface_residue_concentration = forms.FloatField(required=False)
weight_fraction_of_active_ingredient = forms.FloatField(required=False)
MATERIAL_CHOICES = [('cotton', 'Cotton'), ('light_cotton_synthetic_mix', 'Light Cotton/Synthetic Mix'), ('heavy_cotton_synthetic_mix','Heavy Cotton/Synthetic Mix'),('all_synthetics','All Synthetics'),('household_carpets','Household Carpets'),('plastic_polymers','Plastic Polymers'), ('vinyl_flooring','Vinyl Flooring')]
material_type = forms.ChoiceField(choices=MATERIAL_CHOICES,required=False)
MATERIAL_CHOICES_DICT = {}
for choice in MATERIAL_CHOICES:
MATERIAL_CHOICES_DICT[choice[0]]=choice[1]
MATERIAL_WEIGHT_TO_SURFACE_AREA_DENSITY = {'cotton': 20., 'light_cotton_synthetic_mix': 10., 'heavy_cotton_synthetic_mix':24.,'all_synthetics':1.,'household_carpets':120.,'plastic_polymers':100., 'vinyl_flooring':40.}
#DERMAL
BODY_FRACTION_CHOICES = [('pants_jacket_shirt','Pants, Jacket, or Shirts'), ('total', 'Total Body Coverage'), ('floor', 'Mattresses, Carpets or Flooring'), ('handlers','Handlers')]
BODY_FRACTION_CHOICES_DICT = {}
for choice in BODY_FRACTION_CHOICES:
BODY_FRACTION_CHOICES_DICT[choice[0]]=choice[1]
body_fraction_exposed_type = forms.ChoiceField(choices=BODY_FRACTION_CHOICES,required=True)
BODY_FRACTION_EXPOSED = {'pants_jacket_shirt':0.5, 'total':1, 'floor':0.5, 'handlers':0.11}
protective_barrier_present = forms.ChoiceField(choices=[('no','No'),('yes','Yes')],required=True,initial='no', label = "Is there a potential protective barried present (such as bed sheets or other fabrics)?")
PROTECTION_FACTOR = {'no':1,'yes':0.5}
#HtM
TYPE_OF_FLOORING_CHOICES = [('',''), ('carpet','Carpet or Textiles'), ('hard', 'Hard Surface or Flooring')]
TYPE_OF_FLOORING_CHOICES_DICT = {}
for choice in TYPE_OF_FLOORING_CHOICES:
TYPE_OF_FLOORING_CHOICES_DICT[choice[0]]=choice[1]
type_of_flooring = forms.ChoiceField(choices=TYPE_OF_FLOORING_CHOICES ,required=False)
FRACTION_AI_HAND_TRANSFER = {'':0., 'carpet':0.06,'hard':0.08}
FLOOR_EXPOSURE_TIME = {'':0., 'carpet':4.,'hard':2.}
DEFAULT_FRACTION_OF_BODY_EXPOSED = 0.31
type_of_flooring = forms.ChoiceField(choices=[('',''), ('carpet','Carpet'), ('hard', 'Hard Surface')] ,required=False)
DEFAULT_DAILY_MATERIAL_TO_SKIN_TRANSFER_EFFICENCY = 0.14
EXPOSURE_TIME = {'indoor':4., 'outdoor':1.5}
HAND_TO_MOUTH_EVENTS_PER_HOUR = {'indoor':20., 'outdoor':13.9}
indoor_or_outdoor = forms.ChoiceField(choices=[('indoor','Indoor'),('outdoor','Outdoor')], initial='indoor', label="Location of interest (indoor/outdoor)")
#daily_material_to_skin_transfer_efficency = forms.FloatField(required=False,initial=0.14)
#OtM
FRACTION_AI_HAND_TRANSFER = {'':0., 'carpet':0.06,'hard':0.08}
OBJECT_TO_MOUTH_EVENTS_PER_HOUR = {'':14.,'indoor':14., 'outdoor':8.8}
class IndoorEnvironmentsForm(forms.Form):
title = "Indoor Environments Data Entry Form"
space_spray_fraction_ai = forms.FloatField(required=False,initial=0, min_value=0.,max_value=1.,label="Fraction of ai in Aerosol Space Sprays (0-1)")
space_spray_amount_of_product = forms.FloatField(required=False,initial=0, min_value=0.,label="Amount of product in Aerosol Space Spray can (g/can)")
SPACE_SPRAY_RESTRICTION_CHOICES = [('NA','Not Applicable')] + [ (t/60., "%s minutes"%t) for t in [0,5,10,15,20,30,40,60,120]]
space_spray_restriction = forms.ChoiceField(choices=SPACE_SPRAY_RESTRICTION_CHOICES)
molecular_weight = forms.FloatField(required=False,initial=0, min_value=0.,label="Molecular weight (g/mol)")
vapor_pressure = forms.FloatField(required=False,initial=0, min_value=0.,label="Vapor pressure (mmHg)")
broadcast_residue = forms.FloatField(required=False,initial=0, min_value=0.,label="Residue deposited on broadcast (ug/cm^2)")
coarse_residue = forms.FloatField(required=False,initial=0, min_value=0.,label="Residue deposited on perimeter/spot/bedbug (coarse) (ug/cm^2)")
pin_stream_residue = forms.FloatField(required=False,initial=0, min_value=0.,label="Residue deposited on perimeter/spot/bedbug (pin stream) (ug/cm^2)")
crack_and_crevice_residue = forms.FloatField(required=False,initial=0, min_value=0.,label="Residue deposited on cracks and crevices (ug/cm^2)")
foggers_residue = forms.FloatField(required=False,initial=0, min_value=0.,label="Residue deposited by foggers (ug/cm^2)")
space_sprays_residue = forms.FloatField(required=False,initial=0, min_value=0.,label="Residue deposited by space sprays (ug/cm^2)")
matress_residue = forms.FloatField(required=False,initial=0, min_value=0.,label="Residue deposited on mattress (ug/cm^2)")
class OutdoorMistingForm(forms.Form):
title = "Outdoor Misting Data Entry Form"
#OASS
OASS_fraction_ai = forms.FloatField(required=False,initial=0, min_value=0.,max_value=1.,label="Fraction of ai in Outdoor Aerosol Space Sprays (0-1)")
OASS_amount_of_product_in_can = forms.FloatField(required=False,initial=0, min_value=0.,label="Amount of product in Outdoor Aerosol Space Spray can (g/can)")
# CCTM
CCTM_amount_ai_in_product = forms.FloatField(required=False,initial=0, min_value=0.,label="Amount ai in Candles, Coils, Torches, and/or Mats (mg ai/product)")
# ORMS
#product app rate on label:
ORMS_application_rate = forms.FloatField(required=False,initial=0, min_value=0.,label="Application rate in Outdoor Residential Misting System(oz/1000 cu.ft.)")
#else
ORMS_dilution_rate = forms.FloatField(required=False,initial=0, min_value=0.,max_value=1.,label="Dilution rate in Outdoor Residential Misting System (vol product/vol total solution) (0-1)")
ORMS_fraction_ai = forms.FloatField(required=False,initial=0, min_value=0.,max_value=1.,label="Fraction of ai in Outdoor Residential Misting System (0-1)")
# AB
#product app rate on label:
AB_application_rate = forms.FloatField(required=False,initial=0, min_value=0.,label="Application rate in Animal Barns(oz/1000 cu.ft.)")
#else
AB_dilution_rate = forms.FloatField(required=False,initial=0, min_value=0.,max_value=1.,label="Dilution rate in Animal Barns (vol product/vol total solution) (0-1)")
AB_fraction_ai = forms.FloatField(required=False,initial=0, min_value=0.,max_value=1.,label="Fraction of ai in Animal Barns (0-1)")
class OutdoorMistingGeneralHandlerForm(forms.Form):
title = "Outdoor Misting General Handler Data Entry Form"
OASS_fraction_ai = forms.FloatField(required=False,initial=0, min_value=0.,max_value=1.,label="Fraction of ai in Outdoor Aerosol Space Sprays (0-1)")
OASS_amount_of_product_in_can = forms.FloatField(required=False,initial=0, min_value=0.,label="Amount of product in Outdoor Aerosol Space Spray can (g/can)")
# ORMS
#product app rate on label:
ORMS_DRUM_CHOICES = [(30,'30 gallons'), (55, '55 gallons')]
ORMS_drum_size = forms.ChoiceField(choices=ORMS_DRUM_CHOICES,required=False, initial=55, label="Outdoor Residential Misting System Drum Size")
ORMS_application_rate = forms.FloatField(required=False,initial=0, min_value=0.,label="Application rate in Outdoor Residential Misting System(oz/1000 cu.ft.)")
#else
ORMS_dilution_rate = forms.FloatField(required=False,initial=0, min_value=0.,label="Dilution rate in Outdoor Residential Misting System (vol product/vol total solution)")
ORMS_fraction_ai = forms.FloatField(required=False,initial=0, min_value=0.,max_value=1.,label="Fraction of ai in Outdoor Residential Misting System (0-1)")
# AB
#product app rate on label:
AB_DRUM_CHOICES = [(30,'30 gallons'), (55, '55 gallons'), (125, '125 gallons')]
AB_drum_size = forms.ChoiceField(choices=AB_DRUM_CHOICES,required=False, initial=55, label="Animal Barn Drum Size" )
#else
AB_dilution_rate = forms.FloatField(required=False,initial=0, min_value=0.,label="Dilution rate in Animal Barns (vol product/vol total solution)")
AB_fraction_ai = forms.FloatField(required=False,initial=0, min_value=0.,max_value=1.,label="Fraction of ai in Animal Barns (0-1)")
| agpl-3.0 | -3,916,772,724,174,924,000 | 73.444121 | 994 | 0.63773 | false |
jmennen/group5 | Code/buzzit/buzzit_messaging/urls.py | 1 | 2558 | __author__ = 'User'
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^myfollowers/$', views.beingFollowedByView, name="my_followers"),
url(r'^circle/new/$', views.CreateCircleView.as_view(), name="new_circle"),
url(r'^circle/(?P<slug>[0-9]+)/$', views.circleDetails, name="circle_details"),
url(r'^circle/(?P<circle_id>[0-9]+)/addusers/$', views.add_users_to_circle, name="add_users_to_circle"),
url(r'^circle/(?P<user_id>[0-9]+)/adduser/$', views.add_user_to_circles, name="add_user_to_circles"),
url(r'^circle/(?P<user_id>[0-9]+)/(?P<circle_id>[0-9]+)/removeuser/$', views.remove_user_from_circle, name="remove_user_from_circle"),
url(r'^circle/(?P<slug>[0-9]+)/delete/$', views.RemoveCircleView, name="delete_circle"),
url(r'^circles/$', views.CircleOverviewView.as_view(), name="circle_overview"),
url(r'^follows/$', views.listfollows, name="list_follows"),
url(r'^circlemessage/new/$', views.postCirclemessage, name="new_circlemessage"),
url(r'^circlemessage/(?P<message_id>[0-9]+)/delete/$', views.delete_circle_message, name="delete_circlemessage"),
url(r'^follow/(?P<user_id>[0-9]+)/$', views.follow, name="follow"),
url(r'^unfollow/(?P<user_id>[0-9]+)/$', views.unfollow, name="unfollow"),
# new since sprint 4
url(r'^are_there_new_notifications/$', views.information_about_new_directmessages, name="notification_polling"),
url(r'^circle/message/(?P<slug>[0-9]+)/$', views.PostDetailsView.as_view(), name="one_circlemessage"),
#url(r'^circle/message/(?P<message_id>[0-9]+)/answer$', views.answer_to_circlemessage, name="answer_circlemessage"),
url(r'^circle/message/(?P<message_id>[0-9]+)/repost$', views.repost, name="repost_circlemessage"),
url(r'^chat/(?P<sender_id>[a-zA-Z0-9]+)/$', views.direct_messages_details, name="chat"),
url(r'^chat/(?P<username>[a-zA-Z0-9]+)/poll/json$', views.chat_polling, name="chat_polling"),
url(r'^chats/$', views.direct_messages_overview, name="all_chats"),
url(r'^search/user/(?P<query>[a-zA-Z0-9]+)/json$', views.search_user_json, name="search_user_json"),
url(r'^search/theme/(?P<query>[a-zA-Z0-9]+)/json$', views.search_theme_json, name="search_theme_json"),
url(r'^search/theme/(?P<theme>[a-zA-Z0-9]+)', views.showPostsToTheTheme, name="search_theme"),
#new sind sprint 5
url(r'^circlemessage/new/json$', views.postCirclemessage_json, name="new_circlemessage_json"),
url(r'^circlemessage/getall/json$', views.get_all_circlemessages_json, name="get_circlemessages_json"),
]
| bsd-2-clause | -8,634,068,687,650,702,000 | 76.515152 | 138 | 0.666145 | false |
mathieulavoie/Bitcluster | web/web.py | 1 | 5465 | #from web import app
from web.dao import getNodeFromAddress, getNodeInformation, getTransations, groupByAllDistribution, groupbyNode, \
groupbyAmount, groupbyDate
from flask import *
import re
import csv
import io
from datetime import datetime, timedelta
app = Flask(__name__)
@app.route('/',methods=['POST', 'GET'])
def web_root():
if request.method == 'POST':
address = request.form['q']
if address.isnumeric():
return redirect(url_for('get_node_request',node_id=address))
else:
pattern = re.compile("^([1-9ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz])+$")
if pattern.match(address):
node_id = getNodeFromAddress(address)
if node_id is not None:
return redirect(url_for('get_node_request',node_id=node_id))
return render_template('index.html',message="Invalid or inexistant address")
return render_template('index.html')
@app.route('/nodes/<int:node_id>')
def get_node_request(node_id):
infos = getNodeInformation(node_id)
limit =100
truncated_trx_in,trx_in = trim_collection(infos['transactions']['in'],limit)
truncated_trx_out,trx_out = trim_collection(infos['transactions']['out'],limit)
truncated_by_node_in,infos['incomes_grouped']['by_node'] = trim_collection(infos['incomes_grouped']['by_node'],limit)
truncated_by_node_out,infos['outcomes_grouped']['by_node'] = trim_collection(infos['outcomes_grouped']['by_node'],limit)
truncated_by_amount_in,infos['incomes_grouped']['by_amount']['amount_usd'] = trim_collection(infos['incomes_grouped']['by_amount']['amount_usd'],limit)
truncated_by_amount_out,infos['outcomes_grouped']['by_amount']['amount_usd'] = trim_collection(infos['outcomes_grouped']['by_amount']['amount_usd'],limit)
infos['transactions'] = {'in': trx_in, 'out':trx_out}
return render_template('node_details.html',informations=infos, truncated=(truncated_trx_in or truncated_trx_out or truncated_by_node_in or truncated_by_node_out or truncated_by_amount_in or truncated_by_amount_out))
def trim_collection(collection, limit):
if len(collection) > limit:
return True, collection[0:limit]
return False, collection
@app.route('/nodes/<int:node_id>/download/json/<direction>')
def download_transations_json(node_id,direction):
if direction not in ["in","out"]:
return Response(response="Invalid direction",status=500)
transactions = getTransations(node_id,direction)
grouped = groupByAllDistribution(transactions,direction)
response = jsonify({"transactions":transactions, "groups":grouped})
response.headers['Content-disposition'] = "attachment;filename=transactions_%d_%s.json"% (node_id, direction)
return response
@app.route('/nodes/<int:node_id>/download/csv/<direction>')
def download_transations_csv(node_id,direction):
if direction not in ["in","out"]:
return Response(response="Invalid direction",status=500)
output = io.StringIO()
fieldnames = ['trx_date','block_id','source_n_id','destination_n_id','amount', 'amount_usd','source','destination']
writer = csv.DictWriter(output, fieldnames=fieldnames)
writer.writeheader()
for trx in getTransations(node_id,direction):
writer.writerow(trx)
return Response(
output.getvalue(),
mimetype="text/csv",
headers={"Content-disposition":"attachment; filename=transactions_%d_%s.csv"% (node_id, direction)})
@app.route('/nodes/<int:node_id>/download/csv/<direction>/<grouping>')
def download_grouped_transactions(node_id,direction,grouping):
if direction not in ["in","out"]:
return Response(response="Invalid direction",status=500)
output = io.StringIO()
transactions = getTransations(node_id,direction)
writer = csv.writer(output)
if grouping == "by_node":
writer.writerow(['node_id','amount_usd','amount_btc','transaction_count'])
for k,v in groupbyNode(transactions,direction):
writer.writerow([k,v['amount_usd'],v['amount_btc'],len(v['transactions'])])
elif grouping == "by_amount":
writer.writerow(['amount_usd','frequency'])
for k,v in groupbyAmount(transactions)['amount_usd']:
writer.writerow([k,v])
elif grouping == "by_date":
date_format = '%Y-%m-%d'
sorted_by_date = groupbyDate(transactions)
min_date = datetime.strptime(sorted_by_date[0][0],date_format)
max_date = datetime.strptime(sorted_by_date[-1][0],date_format)
delta = max_date - min_date
index = 0
writer.writerow(['date','amount_usd','amount_btc','transaction_count'])
for date in [min_date + timedelta(days=x) for x in range(0,delta.days+1)]:
strdate = date.strftime(date_format)
k,v = sorted_by_date[index]
if k == strdate:
writer.writerow([k,v['amount_usd'],v['amount_btc'],len(v['transactions'])])
index +=1
else:
writer.writerow([strdate,0,0,0])
else:
return Response(response="Invalid grouping. Possible options : by_node , by_amount , by_date",status=500)
return Response(
output.getvalue(),
mimetype="text/csv",
headers={"Content-disposition":"attachment; filename=transactions_%d_%s_%s.csv"% (node_id, direction,grouping)})
| mit | -8,318,828,850,818,100,000 | 37.485915 | 219 | 0.654163 | false |
NCI-Cloud/horizon | openstack_dashboard/local/dashboards/project_nci/instances/workflows/create_instance.py | 1 | 41960 | # openstack_dashboard.local.dashboards.project_nci.instances.workflows.create_instance
#
# Copyright (c) 2015, NCI, Australian National University.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import copy
import itertools
import json
import logging
import netaddr
import operator
import os.path
#import pdb ## DEBUG
import re
import socket
import time
import types
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from django.views.decorators.debug import sensitive_variables
from django.template.defaultfilters import filesizeformat
from horizon import exceptions
from horizon import forms
from horizon import messages
from horizon import workflows
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.instances.workflows import create_instance as base_mod
from openstack_dashboard.local.nci import crypto as ncicrypto
from openstack_dashboard.local.nci import utils as nciutils
from openstack_dashboard.local.nci.constants import *
LOG = logging.getLogger(__name__)
class SetInstanceDetailsAction(base_mod.SetInstanceDetailsAction):
Meta = nciutils.subclass_meta_type(base_mod.SetInstanceDetailsAction)
def populate_image_id_choices(self, request, context):
choices = super(SetInstanceDetailsAction, self).populate_image_id_choices(request, context)
# Find the latest VL image for each unique series tag and add an
# alias item to the top of the images list with a more friendly name
# so that the user doesn't have to hunt through the entire list
# looking for the correct image to use.
self.vl_tags = {}
for id, image in choices:
if not id:
continue
parts = image.name.split("-")
if parts[0] == "vl":
if not image.is_public:
LOG.debug("Ignoring non-public VL image: {0}".format(image.name))
continue
# VL images have the following name format:
# vl-<tag_base>[-<tag_variant>-...]-<timestamp>
if len(parts) < 3:
LOG.warning("Invalid VL image name format: {0}".format(image.name))
continue
tag = "-".join(parts[1:-1])
if re.match(r"2[0-9]{7}", parts[-1]):
image._vl_ts = parts[-1]
else:
LOG.warning("Invalid or missing timestamp in VL image name: {0}".format(image.name))
continue
if (tag not in self.vl_tags) or (image._vl_ts > self.vl_tags[tag]._vl_ts):
self.vl_tags[tag] = image
def clone_image(tag):
if "-" in tag:
(base, variant) = tag.split("-", 1)
else:
base = tag
variant = ""
if base.startswith("centos"):
title = "CentOS"
base = base[6:]
elif base.startswith("ubuntu"):
title = "Ubuntu"
base = base[6:]
else:
title = tag
base = ""
variant = ""
if base:
title += " " + base
if variant:
title += " " + variant
image = copy.copy(self.vl_tags[tag])
image._real_id = image.id
image.id = "vltag:" + tag
image.name = title
self.vl_tags[tag] = image
return image
if self.vl_tags:
choices.insert(1, ("---all", "----- All Images -----"))
for tag in reversed(sorted(self.vl_tags.keys())):
image = clone_image(tag)
choices.insert(1, (image.id, image))
choices.insert(1, ("---vl", "----- VL Images -----"))
return choices
def clean_name(self):
if hasattr(super(SetInstanceDetailsAction, self), "clean_name"):
val = super(SetInstanceDetailsAction, self).clean_name()
else:
val = self.cleaned_data.get("name")
val = val.strip()
if val and ("." in val):
valid_fqdn = r"^([a-zA-Z0-9]([a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])?\.)*[a-zA-Z0-9]([a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])?$"
if not re.search(valid_fqdn, val):
msg = _("The specified FQDN doesn't satisfy the requirements of a valid DNS hostname.")
raise forms.ValidationError(msg)
return val
def clean_image_id(self):
if hasattr(super(SetInstanceDetailsAction, self), "clean_image_id"):
val = super(SetInstanceDetailsAction, self).clean_image_id()
else:
val = self.cleaned_data.get("image_id")
if val:
if val.startswith("---"):
val = ""
elif val.startswith("vltag:"):
# Convert the VL image tag back into the real image ID.
tag = val[6:]
if tag not in self.vl_tags:
msg = _("Image tag doesn't exist")
raise forms.ValidationError(msg)
val = self.vl_tags[tag]._real_id
return val
def get_help_text(self):
saved = self._images_cache
try:
# Add our VL image aliases to the image cache temporarily so
# that they are included in the list passed to "initWithImages()"
# in "horizon/static/horizon/js/horizon.quota.js" (via the
# "_flavors_and_quotas.html" template). The result will be
# that any flavours which are too small will be disabled when
# a given image alias is selected in the drop down.
self._images_cache["public_images"].extend(self.vl_tags.values())
return super(SetInstanceDetailsAction, self).get_help_text()
finally:
self._images_cache = saved
class SetInstanceDetails(base_mod.SetInstanceDetails):
action_class = SetInstanceDetailsAction
class SetAccessControlsAction(base_mod.SetAccessControlsAction):
Meta = nciutils.subclass_meta_type(base_mod.SetAccessControlsAction)
def __init__(self, request, context, *args, **kwargs):
super(SetAccessControlsAction, self).__init__(request, context, *args, **kwargs)
# Remove the security groups field since they aren't functional on
# our new cloud.
del self.fields["groups"]
def populate_groups_choices(self, request, context):
return []
class SetAccessControls(base_mod.SetAccessControls):
action_class = SetAccessControlsAction
class FixedIPMultiWidget(forms.MultiWidget):
def __init__(self, choices, attrs=None):
sub_widgets = (
forms.Select(choices=choices, attrs=attrs),
forms.TextInput(attrs=attrs),
)
super(FixedIPMultiWidget, self).__init__(sub_widgets, attrs)
def has_choice(self, value):
for x in self.widgets[0].choices:
if isinstance(x[1], (list, tuple)):
for y in x[1]:
if y[0] == value:
return True
elif x[0] == value:
return True
return False
def decompress(self, value):
if value is not None:
if self.has_choice(value):
return [value, None]
else:
return ["manual", value]
else:
return [None, None]
def value_from_datadict(self, data, files, name):
v = super(FixedIPMultiWidget, self).value_from_datadict(data, files, name)
if v[0] == "manual":
return v[1].strip()
else:
return v[0]
# NB: We aren't subclassing the upstream implementation of this action.
class SetNetworkAction(workflows.Action):
Meta = nciutils.subclass_meta_type(base_mod.SetNetworkAction)
@staticmethod
def user_has_ext_net_priv(request):
return (request.user.is_superuser
or request.user.has_perms([settings.NCI_EXTERNAL_NET_PERM]))
def __init__(self, request, context, *args, **kwargs):
super(SetNetworkAction, self).__init__(request, context, *args, **kwargs)
# If the user has access to the external network then retrieve any
# fixed public IP allocations defined for this tenant.
all_fixed_pub_ips = netaddr.IPSet()
self.fixed_pub_ips_pool = False
if self.user_has_ext_net_priv(request):
try:
if request.user.project_name in settings.NCI_FIXED_PUBLIC_IPS:
for cidr in settings.NCI_FIXED_PUBLIC_IPS[request.user.project_name]:
if cidr == "pool":
self.fixed_pub_ips_pool = True
else:
all_fixed_pub_ips.add(netaddr.IPNetwork(cidr))
elif request.user.project_name == "admin":
self.fixed_pub_ips_pool = True
except (netaddr.AddrFormatError, ValueError) as e:
LOG.exception("Error parsing fixed public IP list: {0}".format(e))
messages.error(request, str(e))
msg = _("Failed to load fixed public IP configuration.")
messages.warning(request, msg)
all_fixed_pub_ips = netaddr.IPSet()
self.fixed_pub_ips_pool = False
self.fixed_pub_ips_enabled = (bool(all_fixed_pub_ips) or self.fixed_pub_ips_pool)
# Build the list of network choices.
networks_list = self.get_networks(request)
self.networks = dict([(x.id, x) for x in networks_list])
network_choices = [(x.id, x.name) for x in sorted(networks_list, key=operator.attrgetter('name'))]
network_choices.insert(0, ("", "-- Unassigned --"))
# Build the fixed and floating IP choice lists.
self.pub_ips = self.get_public_ips(request, all_fixed_pub_ips)
fixed_ip_choices = [
("auto", "Automatic"),
("manual", "Manual"),
]
if self.fixed_pub_ips_enabled:
ext_fixed_ip_choices = [(str(x), str(x)) for x in self.pub_ips["fixed"]]
if self.fixed_pub_ips_pool:
ext_fixed_ip_choices.append(["ext_pool", "Global Allocation Pool"])
grp_title = "External"
if not ext_fixed_ip_choices:
grp_title += " (none available)"
fixed_ip_choices.append((grp_title, ext_fixed_ip_choices))
else:
ext_fixed_ip_choices = []
floating_ip_choices = [(x.id, x.ip) for x in sorted(self.pub_ips["float"].itervalues(), key=lambda x: netaddr.IPAddress(x.ip))]
floating_ip_choices.insert(0, ("", "-- None --"))
# Create the form fields for each network interface.
self.intf_limit = settings.NCI_VM_NETWORK_INTF_LIMIT
if not settings.NCI_DUPLICATE_VM_NETWORK_INTF:
self.intf_limit = max(1, min(self.intf_limit, len(networks_list)))
for i in range(0, self.intf_limit):
self.fields["eth{0:d}_network".format(i)] = forms.ChoiceField(
label=_("Network"),
required=(i == 0),
choices=network_choices,
initial="",
help_text=_("The network that this interface should be attached to."))
self.fields["eth{0:d}_fixed_ip".format(i)] = forms.CharField(
widget=FixedIPMultiWidget(fixed_ip_choices),
label=_("Fixed IP"),
required=True,
initial="auto",
help_text=_("The fixed IP address to assign to this interface."))
self.fields["eth{0:d}_floating_ip".format(i)] = forms.ChoiceField(
label=_("Floating Public IP"),
required=False,
choices=floating_ip_choices,
initial="",
help_text=_("A floating IP address to associate with this interface."))
# Select reasonable defaults if there is an obvious choice. We only
# consider external networks as an option if there aren't any floating
# IPs available.
external_net_ids = set([x for x, y in self.networks.iteritems() if y.get("router:external", False)])
private_net_ids = set(self.networks.keys()) - external_net_ids
default_priv_net = None
if len(private_net_ids) == 1:
default_priv_net = iter(private_net_ids).next()
elif private_net_ids:
# As a convention, when we setup a new tenant we create a network
# with the same name as the tenant.
search = [request.user.project_name]
if request.user.project_name in ["admin", "z00"]:
search.append("internal")
matches = [x for x in private_net_ids if self.networks[x].name in search]
if len(matches) == 1:
default_priv_net = matches[0]
if len(floating_ip_choices) > 1:
if default_priv_net:
self.fields["eth0_network"].initial = default_priv_net
self.fields["eth0_floating_ip"].initial = floating_ip_choices[1][0]
elif ext_fixed_ip_choices:
if len(external_net_ids) == 1:
self.fields["eth0_network"].initial = iter(external_net_ids).next()
self.fields["eth0_fixed_ip"].initial = ext_fixed_ip_choices[0][0]
if default_priv_net:
assert self.intf_limit > 1
self.fields["eth1_network"].initial = default_priv_net
elif default_priv_net:
self.fields["eth0_network"].initial = default_priv_net
# A list of external network IDs is needed for the client side code.
self.external_nets = ";".join(external_net_ids)
def get_networks(self, request):
networks = []
try:
networks = api.neutron.network_list_for_tenant(request, request.user.project_id)
except:
exceptions.handle(request)
msg = _("Unable to retrieve available networks.")
messages.warning(request, msg)
if not self.fixed_pub_ips_enabled:
LOG.debug("Excluding external networks")
networks = filter(lambda x: not x.get("router:external", False), networks)
# TODO: Workaround until we can unshare the "internal" network.
if request.user.project_name not in ["admin", "z00"]:
networks = filter(lambda x: x.get("router:external", False) or not x.shared, networks)
any_ext_nets = False
for net in networks:
# Make sure the "name" attribute is defined.
net.set_id_as_name_if_empty()
any_ext_nets = any_ext_nets or net.get("router:external", False)
if self.fixed_pub_ips_enabled and not any_ext_nets:
LOG.debug("No external networks found - disabling fixed public IPs")
self.fixed_pub_ips_enabled = False
return networks
def get_public_ips(self, request, all_fixed_pub_ips):
ips = {}
try:
# Select any unassigned floating IPs.
floats = api.network.tenant_floating_ip_list(request)
ips["float"] = dict([(x.id, x) for x in floats if not x.port_id])
if self.fixed_pub_ips_enabled and all_fixed_pub_ips:
# Take note of all floating IPs (including assigned) since they
# can't be used as a fixed IP given that a port already exists.
used_ips = [x.ip for x in floats]
# Locate any fixed IPs already assigned to an external network
# port so that we can exclude them from the list.
for net_id, net in self.networks.iteritems():
if not net.get("router:external", False):
continue
LOG.debug("Getting all ports for network: {0}".format(net_id))
ports = api.neutron.port_list(request,
tenant_id=request.user.project_id,
network_id=net_id)
for port in ports:
for fip in port.fixed_ips:
if fip.get("ip_address"):
used_ips.append(fip["ip_address"])
# Select fixed IPs allocated to the tenant that aren't in use.
ips["fixed"] = all_fixed_pub_ips - netaddr.IPSet(used_ips)
else:
ips["fixed"] = []
except:
exceptions.handle(request)
msg = _("Failed to determine available public IPs.")
messages.warning(request, msg)
ips["float"] = {}
ips["fixed"] = []
return ips
def clean(self):
data = super(SetNetworkAction, self).clean()
nics = []
used_ips = {"_float_": set()}
try:
for i in range(0, self.intf_limit):
nic = {}
field_id = "eth{0:d}_network".format(i)
net_id = data.get(field_id)
if net_id:
used_ips.setdefault(net_id, set())
nic["network_id"] = net_id
if i != len(nics):
msg = _("Network interfaces must be assigned consecutively.")
self._errors[field_id] = self.error_class([msg])
elif (not settings.NCI_DUPLICATE_VM_NETWORK_INTF) and (net_id in [n["network_id"] for n in nics]):
msg = _("Network is assigned to another interface.")
self._errors[field_id] = self.error_class([msg])
# Field level validation will have already checked that the
# network ID exists by virtue of being a valid choice.
assert net_id in self.networks
external = self.networks[net_id].get("router:external", False)
else:
external = False
fixed_subnet_id = None
field_id = "eth{0:d}_fixed_ip".format(i)
fixed_ip = data.get(field_id)
if not fixed_ip:
# Value could only be undefined if field level validation
# failed since "required=True" for this field.
assert self._errors.get(field_id)
elif fixed_ip == "auto":
if external:
msg = _("Selected option is not valid on this network.")
self._errors[field_id] = self.error_class([msg])
elif not net_id:
msg = _("No network selected.")
self._errors[field_id] = self.error_class([msg])
elif fixed_ip == "ext_pool":
if external:
# Choice won't be available unless global allocation pool
# is enabled.
assert self.fixed_pub_ips_pool
else:
msg = _("Selected option is not available on this network.")
self._errors[field_id] = self.error_class([msg])
else:
try:
fixed_ip = netaddr.IPAddress(fixed_ip)
except (netaddr.AddrFormatError, ValueError) as e:
msg = _("Not a valid IP address format.")
self._errors[field_id] = self.error_class([msg])
else:
if external:
assert self.fixed_pub_ips_enabled
if fixed_ip not in self.pub_ips["fixed"]:
msg = _("\"{0}\" is not available on this network.".format(fixed_ip))
self._errors[field_id] = self.error_class([msg])
elif fixed_ip in used_ips[net_id]:
msg = _("IP address is assigned to another interface.")
self._errors[field_id] = self.error_class([msg])
else:
nic["fixed_ip"] = fixed_ip
used_ips[net_id].add(fixed_ip)
else:
# Verify that there is a subnet for the selected network
# which contains the fixed IP address.
subnet_cidr = None
for subnet in self.networks[net_id].subnets:
subnet_cidr = netaddr.IPNetwork(subnet.cidr)
if fixed_ip in subnet_cidr:
break
else:
subnet_cidr = None
if not subnet_cidr:
msg = _("IP address must be in a subnet range for the selected network.")
self._errors[field_id] = self.error_class([msg])
elif fixed_ip == subnet_cidr.network:
msg = _("Network address is reserved.")
self._errors[field_id] = self.error_class([msg])
elif fixed_ip == subnet_cidr.broadcast:
msg = _("Broadcast address is reserved.")
self._errors[field_id] = self.error_class([msg])
elif subnet.get("gateway_ip") and (fixed_ip == netaddr.IPAddress(subnet.gateway_ip)):
msg = _("IP address is reserved for the subnet gateway.")
self._errors[field_id] = self.error_class([msg])
else:
fixed_subnet_id = subnet.id
# Is the IP address already assigned to a port on
# this network?
LOG.debug("Getting all ports for network: {0}".format(net_id))
ports = api.neutron.port_list(self.request,
tenant_id=self.request.user.project_id,
network_id=net_id)
found = False
for port in ports:
for fip in port.fixed_ips:
if fip.get("ip_address") and (fixed_ip == netaddr.IPAddress(fip["ip_address"])):
found = True
break
if found:
msg = _("IP address is already in use.")
self._errors[field_id] = self.error_class([msg])
elif fixed_ip in used_ips[net_id]:
msg = _("IP address is assigned to another interface.")
self._errors[field_id] = self.error_class([msg])
else:
nic["fixed_ip"] = fixed_ip
used_ips[net_id].add(fixed_ip)
field_id = "eth{0:d}_floating_ip".format(i)
floating_ip = data.get(field_id)
if floating_ip:
assert floating_ip in self.pub_ips["float"]
if not net_id:
msg = _("No network selected.")
self._errors[field_id] = self.error_class([msg])
elif external:
msg = _("Floating IPs cannot be used on an external network.")
self._errors[field_id] = self.error_class([msg])
elif floating_ip in used_ips["_float_"]:
msg = _("IP address is assigned to another interface.")
self._errors[field_id] = self.error_class([msg])
else:
float_net_id = self.pub_ips["float"][floating_ip].floating_network_id
LOG.debug("Looking for a route between the networks {0} and {1}".format(net_id, float_net_id))
ports = api.neutron.port_list(self.request,
network_id=net_id,
device_owner="network:router_interface")
found = False
for port in ports:
if fixed_subnet_id and (fixed_subnet_id not in [x.get("subnet_id") for x in port.fixed_ips]):
LOG.debug("Ignoring port {0} due to subnet mismatch".format(port.id))
continue
router = api.neutron.router_get(self.request, port.device_id)
if router.get("external_gateway_info", {}).get("network_id") == float_net_id:
LOG.debug("Found path to floating IP network via router: {0}".format(router.id))
found = True
break
if not found:
if self.networks[net_id].shared:
# The Neutron API doesn't return interface ports for routers
# owned by another tenant, even if that network is shared
# with us. So we just have to accept the user's request.
LOG.warning("Unable to locate router for floating IP on shared network: {0}".format(net_id))
else:
msg = _("No router interface found that connects the selected network with the floating IP.")
self._errors[field_id] = self.error_class([msg])
else:
nic["floating_ip"] = floating_ip
used_ips["_float_"].add(floating_ip)
if "network_id" in nic:
nics.append(nic)
except:
exceptions.handle(self.request)
msg = _("Validation failed with an unexpected error.")
raise forms.ValidationError(msg)
if not nics:
msg = _("At least one network interface must be assigned.")
raise forms.ValidationError(msg)
if settings.NCI_DUPLICATE_VM_NETWORK_INTF:
# See "server_create_hook_func()" for why this check is made.
float_nets = set([n["network_id"] for n in nics if "floating_ip" in n])
for net_id in float_nets:
if len(filter(lambda x: x["network_id"] == net_id, nics)) > 1:
msg = _("Networks with a floating IP specified can only be assigned to one interface.")
raise forms.ValidationError(msg)
data["nics"] = nics
return data
# NB: We aren't subclassing the upstream implementation of this step.
class SetNetwork(workflows.Step):
action_class = SetNetworkAction
contributes = ("nics", "network_id")
template_name = "project/instances/../instances_nci/_update_networks.html"
def contribute(self, data, context):
context = super(SetNetwork, self).contribute(data, context)
if context["nics"]:
# Emulate the network list set in the upstream implementation.
context["network_id"] = [n["network_id"] for n in context["nics"]]
return context
class BootstrapConfigAction(workflows.Action):
puppet_action = forms.ChoiceField(
label=_("Puppet Action"),
required=True,
choices=[x for x in PUPPET_ACTION_CHOICES if x[0] == "none"],
initial="none",
help_text=_("Puppet command to execute."))
puppet_env = forms.RegexField(
label=_("Puppet Environment"),
required=False,
regex=REPO_BRANCH_REGEX,
help_text=_("Puppet configuration environment (or equivalent branch name) to deploy."))
install_updates = forms.ChoiceField(
label=_("Install Updates"),
required=True,
choices=[
("reboot", _("Yes (reboot if required)")),
("yes", _("Yes (don't reboot)")),
("no", _("No")),
],
initial="reboot",
help_text=_("Whether to install system updates. (Recommended)"))
class Meta(object):
name = _("Initial Boot")
help_text_template = ("project/instances/../instances_nci/_bootstrap_help.html")
def __init__(self, request, context, *args, **kwargs):
super(BootstrapConfigAction, self).__init__(request, context, *args, **kwargs)
# Check if the project's VL config exists. We only assign a default
# Puppet action if it does. This will allow projects not using the
# VL environment to still be able to launch VMs without having to
# change the Puppet action first.
is_vl = False
try:
container = nci_private_container_name(request)
config_obj_name = nci_vl_project_config_name()
is_vl = api.swift.swift_object_exists(request,
container,
config_obj_name)
except:
exceptions.handle(request)
if is_vl:
obj = None
try:
obj = api.swift.swift_get_object(request,
container,
config_obj_name,
resp_chunk_size=None)
except:
exceptions.handle(request)
msg = _("VL project configuration not found.")
messages.warning(request, msg)
if obj:
project_cfg = None
try:
project_cfg = json.loads(obj.data)
except ValueError as e:
LOG.exception("Error parsing project configuration: {0}".format(e))
messages.error(request, str(e))
msg = _("VL project configuration is corrupt.")
messages.warning(request, msg)
if project_cfg:
self.fields["puppet_env"].initial = project_cfg.get("puppet_env", "")
if project_cfg.get("repo_key") and project_cfg.get("eyaml_key") and project_cfg.get("eyaml_cert"):
self.fields["puppet_action"].choices = PUPPET_ACTION_CHOICES
self.fields["puppet_action"].initial = "apply"
default_action = project_cfg.get("puppet_action", "auto")
if default_action != "auto":
avail_actions = [x[0] for x in self.fields["puppet_action"].choices]
if default_action in avail_actions:
self.fields["puppet_action"].initial = default_action
def clean(self):
data = super(BootstrapConfigAction, self).clean()
if (data.get("puppet_action", "none") != "none") and not data.get("puppet_env"):
msg = _("An environment name is required for the selected Puppet action.")
raise forms.ValidationError(msg)
return data
class BootstrapConfig(workflows.Step):
action_class = BootstrapConfigAction
contributes = ("puppet_action", "puppet_env", "install_updates")
template_name = "project/instances/../instances_nci/_bootstrap_step.html"
def server_create_hook_func(request, context, floats):
def _impl(*args, **kwargs):
float_nets = {}
kwargs["nics"] = []
nics = context["nics"] or []
for n in nics:
# https://github.com/openstack/python-novaclient/blob/2.20.0/novaclient/v1_1/servers.py#L528
nic = {"net-id": n["network_id"]}
ip = n.get("fixed_ip")
if ip:
if ip.version == 6:
nic["v6-fixed-ip"] = str(ip)
else:
assert ip.version == 4
nic["v4-fixed-ip"] = str(ip)
kwargs["nics"].append(nic)
if "floating_ip" in n:
assert n["network_id"] not in float_nets
float_nets[n["network_id"]] = n["floating_ip"]
srv = api.nova.server_create(*args, **kwargs)
if float_nets:
# Find the ports created for the new instance which we need to
# associate each floating IP with. We have to wait until the
# ports are created by Neutron. Note that the only unique
# information we have to identify which port should be paired
# with each floating IP is the network ID. Hence we don't
# support more than one interface connected to the same network
# when floating IPs are specified.
try:
max_attempts = 15
attempt = 0
while attempt < max_attempts:
attempt += 1
LOG.debug("Fetching network ports for instance: {0}".format(srv.id))
ports = api.neutron.port_list(request, device_id=srv.id)
for p in ports:
LOG.debug("Found port: id={0}; owner={1}; network={2}".format(*[p.get(x) for x in ["id", "device_owner", "network_id"]]))
if p.get("device_owner", "").startswith("compute:") and (p.get("network_id") in float_nets):
for t in api.network.floating_ip_target_list_by_instance(request, srv.id):
LOG.debug("Got floating IP target: {0}".format(t))
if t.startswith(p.id):
float_id = float_nets[p.network_id]
api.network.floating_ip_associate(request, float_id, t)
del float_nets[p.network_id]
msg = _("Floating IP {0} associated with new instance.".format(floats[float_id].ip))
messages.info(request, msg)
break
if not float_nets:
# All floating IPs have now been assigned.
srv = api.nova.server_get(request, srv.id)
break
status = api.nova.server_get(request, srv.id).status.lower()
if status == "active":
if max_attempts != 2:
LOG.debug("VM state has become active")
max_attempts = 2
attempt = 0
elif status != "build":
LOG.debug("Aborting wait loop due to server status: {0}".format(status))
break
LOG.debug("Waiting for network port allocation")
time.sleep(2)
except:
exceptions.handle(request)
for f in float_nets.itervalues():
msg = _("Failed to associate floating IP {0} with new instance.".format(floats[f].ip))
messages.warning(request, msg)
return srv
return _impl
def step_generator():
for step in base_mod.LaunchInstance.default_steps:
if step == base_mod.SetInstanceDetails:
yield SetInstanceDetails
elif step == base_mod.SetAccessControls:
yield SetAccessControls
elif step == base_mod.SetNetwork:
yield SetNetwork
elif step == base_mod.PostCreationStep:
# Replace the "Post-Creation" tab with our bootstrap parameters.
yield BootstrapConfig
else:
yield step
class NCILaunchInstance(base_mod.LaunchInstance):
default_steps = [x for x in step_generator()]
@sensitive_variables("context")
def validate(self, context):
if context["count"] > 1:
keys = set(itertools.chain.from_iterable(context["nics"]))
if filter(lambda k: k.endswith("_ip"), keys):
msg = _("Multiple instances cannot be launched with the same IP address.")
self.add_error_to_step(msg, SetNetworkAction.slug)
# Missing from "add_error_to_step()"...
self.get_step(SetNetworkAction.slug).has_errors = True
return False
return True
@sensitive_variables("context")
def handle(self, request, context):
cloud_cfg = {}
if context["puppet_action"] != "none":
# Load the project's VL configuration.
try:
obj = api.swift.swift_get_object(request,
nci_private_container_name(request),
nci_vl_project_config_name(),
resp_chunk_size=None)
except:
exceptions.handle(request)
msg = _("VL project configuration not found.")
messages.error(request, msg)
return False
try:
project_cfg = json.loads(obj.data)
except ValueError as e:
LOG.exception("Error parsing project configuration: {0}".format(e))
messages.error(request, str(e))
msg = _("VL project configuration is corrupt.")
messages.error(request, msg)
return False
# Add the cloud-config parameters for the "nci.puppet" module.
puppet_cfg = cloud_cfg.setdefault("nci", {}).setdefault("puppet", {})
puppet_cfg["action"] = context["puppet_action"]
puppet_cfg["environment"] = context["puppet_env"]
repo_cfg = puppet_cfg.setdefault("repo", {})
repo_cfg["path"] = project_cfg.get("repo_path", "")
eyaml_cfg = puppet_cfg.setdefault("eyaml", {})
try:
msg = _("Failed to initialise crypto stash.")
stash = ncicrypto.CryptoStash(request,
project_cfg.get("stash") or {})
msg = _("Failed to load deployment key.")
key = stash.load_private_key(project_cfg.get("repo_key"))
repo_cfg["key"] = key.cloud_config_dict()
msg = _("Failed to load eyaml key.")
key = stash.load_private_key(project_cfg.get("eyaml_key"))
eyaml_cfg["key"] = key.cloud_config_dict()
msg = _("Failed to load eyaml certificate.")
cert = stash.load_x509_cert(project_cfg.get("eyaml_cert"))
eyaml_cfg["cert"] = cert.cloud_config_dict()
except:
exceptions.handle(request)
messages.error(request, msg)
return False
cloud_cfg["package_upgrade"] = (context["install_updates"] != "no")
cloud_cfg["package_reboot_if_required"] = (context["install_updates"] == "reboot")
if "." in context["name"]:
cloud_cfg["fqdn"] = context["name"]
# Construct the "user data" to inject into the VM for "cloud-init".
user_data = MIMEMultipart()
try:
# Note that JSON is also valid YAML:
# http://yaml.org/spec/1.2/spec.html#id2759572
part = MIMEText(json.dumps(cloud_cfg), "cloud-config")
user_data.attach(part)
except (ValueError, TypeError) as e:
LOG.exception("Error serialising userdata: {0}".format(e))
messages.error(request, str(e))
msg = _("Failed to construct userdata for VM instance.")
messages.error(request, msg)
return False
context["script_data"] = user_data.as_string()
# We could copy the contents of the base class function here and make
# the changes that we need. But that would create a maintenance
# headache since for each OpenStack update we'd have to check whether
# anything in the original implementation changed and replicate it
# here. Instead, we'll rebind the "api.nova.server_create()" function
# in the namespace of the base class function to call our hook closure
# instead.
api_proxy = nciutils.AttributeProxy(base_mod.api)
api_proxy.nova = nciutils.AttributeProxy(base_mod.api.nova)
floats = self.get_step(SetNetworkAction.slug).action.pub_ips["float"]
api_proxy.nova.server_create = server_create_hook_func(request, context, floats)
# We have to strip off any function decorators, otherwise the rebind
# won't be visible inside the function. Whilst this does rely on some
# Python internals, the chances of those changing is significantly
# lower especially since RedHat doesn't change the Python version
# in a major release series.
base_func = nciutils.undecorate(super(NCILaunchInstance, self).handle.__func__, "handle")
g_dict = base_func.__globals__
g_dict.update({"api": api_proxy})
return types.FunctionType(base_func.__code__, g_dict)(self, request, context)
# vim:ts=4 et sw=4 sts=4:
| apache-2.0 | -2,602,691,679,412,135,400 | 42.937173 | 145 | 0.53153 | false |
CodingVault/LeetCodeInPython | sorted_array_to_binary_tree.py | 1 | 1250 | #!/usr/bin/env python
# encoding: utf-8
"""
sorted_array_to_binary_tree.py
Created by Shengwei on 2014-07-03.
"""
# https://oj.leetcode.com/problems/convert-sorted-array-to-binary-search-tree/
# tags: easy, tree, array, sorted, convert, D&C
"""
Given an array where elements are sorted in ascending order, convert it to a height balanced BST.
"""
# Definition for a binary tree node
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
# @param num, a list of integers
# @return a tree node
def sortedArrayToBST(self, num):
def convert_array(left, right):
"""Convert num[left:right] to a (sub)tree."""
# num[x:x] is an empty list (x can be any number)
if left >= right:
return None
# mid point at the very middle of num[left:right]
# or the right one of the middle two
mid = (left + right) / 2
root = TreeNode(num[mid])
root.left = convert_array(left, mid)
root.right = convert_array(mid + 1, right)
return root
return convert_array(0, len(num))
| apache-2.0 | -8,530,960,155,454,743,000 | 26.777778 | 97 | 0.5672 | false |
cwilkes/event_store_meta | tests/test_functional.py | 1 | 3668 | # -*- coding: utf-8 -*-
"""Functional tests using WebTest.
See: http://webtest.readthedocs.org/
"""
import pytest
from flask import url_for
from event_store_meta.user.models import User
from .factories import UserFactory
class TestLoggingIn:
def test_can_log_in_returns_200(self, user, testapp):
# Goes to homepage
res = testapp.get("/")
# Fills out login form in navbar
form = res.forms['loginForm']
form['username'] = user.username
form['password'] = 'myprecious'
# Submits
res = form.submit().follow()
assert res.status_code == 200
def test_sees_alert_on_log_out(self, user, testapp):
res = testapp.get("/")
# Fills out login form in navbar
form = res.forms['loginForm']
form['username'] = user.username
form['password'] = 'myprecious'
# Submits
res = form.submit().follow()
res = testapp.get(url_for('public.logout')).follow()
# sees alert
assert 'You are logged out.' in res
def test_sees_error_message_if_password_is_incorrect(self, user, testapp):
# Goes to homepage
res = testapp.get("/")
# Fills out login form, password incorrect
form = res.forms['loginForm']
form['username'] = user.username
form['password'] = 'wrong'
# Submits
res = form.submit()
# sees error
assert "Invalid password" in res
def test_sees_error_message_if_username_doesnt_exist(self, user, testapp):
# Goes to homepage
res = testapp.get("/")
# Fills out login form, password incorrect
form = res.forms['loginForm']
form['username'] = 'unknown'
form['password'] = 'myprecious'
# Submits
res = form.submit()
# sees error
assert "Unknown user" in res
class TestRegistering:
def test_can_register(self, user, testapp):
old_count = len(User.query.all())
# Goes to homepage
res = testapp.get("/")
# Clicks Create Account button
res = res.click("Create account")
# Fills out the form
form = res.forms["registerForm"]
form['username'] = 'foobar'
form['email'] = '[email protected]'
form['password'] = 'secret'
form['confirm'] = 'secret'
# Submits
res = form.submit().follow()
assert res.status_code == 200
# A new user was created
assert len(User.query.all()) == old_count + 1
def test_sees_error_message_if_passwords_dont_match(self, user, testapp):
# Goes to registration page
res = testapp.get(url_for("public.register"))
# Fills out form, but passwords don't match
form = res.forms["registerForm"]
form['username'] = 'foobar'
form['email'] = '[email protected]'
form['password'] = 'secret'
form['confirm'] = 'secrets'
# Submits
res = form.submit()
# sees error message
assert "Passwords must match" in res
def test_sees_error_message_if_user_already_registered(self, user, testapp):
user = UserFactory(active=True) # A registered user
user.save()
# Goes to registration page
res = testapp.get(url_for("public.register"))
# Fills out form, but username is already registered
form = res.forms["registerForm"]
form['username'] = user.username
form['email'] = '[email protected]'
form['password'] = 'secret'
form['confirm'] = 'secret'
# Submits
res = form.submit()
# sees error
assert "Username already registered" in res
| bsd-3-clause | 1,142,515,972,903,867,800 | 31.75 | 80 | 0.583424 | false |
balazssimon/ml-playground | udemy/lazyprogrammer/deep-reinforcement-learning-python/mountaincar/q_learning.py | 1 | 6102 | # This takes 4min 30s to run in Python 2.7
# But only 1min 30s to run in Python 3.5!
#
# Note: gym changed from version 0.7.3 to 0.8.0
# MountainCar episode length is capped at 200 in later versions.
# This means your agent can't learn as much in the earlier episodes
# since they are no longer as long.
import gym
import os
import sys
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from gym import wrappers
from datetime import datetime
from sklearn.pipeline import FeatureUnion
from sklearn.preprocessing import StandardScaler
from sklearn.kernel_approximation import RBFSampler
from sklearn.linear_model import SGDRegressor
# SGDRegressor defaults:
# loss='squared_loss', penalty='l2', alpha=0.0001,
# l1_ratio=0.15, fit_intercept=True, n_iter=5, shuffle=True,
# verbose=0, epsilon=0.1, random_state=None, learning_rate='invscaling',
# eta0=0.01, power_t=0.25, warm_start=False, average=False
# Inspired by https://github.com/dennybritz/reinforcement-learning
class FeatureTransformer:
def __init__(self, env, n_components=500):
observation_examples = np.array([env.observation_space.sample() for x in range(10000)])
scaler = StandardScaler()
scaler.fit(observation_examples)
# Used to converte a state to a featurizes represenation.
# We use RBF kernels with different variances to cover different parts of the space
featurizer = FeatureUnion([
("rbf1", RBFSampler(gamma=5.0, n_components=n_components)),
("rbf2", RBFSampler(gamma=2.0, n_components=n_components)),
("rbf3", RBFSampler(gamma=1.0, n_components=n_components)),
("rbf4", RBFSampler(gamma=0.5, n_components=n_components))
])
example_features = featurizer.fit_transform(scaler.transform(observation_examples))
self.dimensions = example_features.shape[1]
self.scaler = scaler
self.featurizer = featurizer
def transform(self, observations):
# print "observations:", observations
scaled = self.scaler.transform(observations)
# assert(len(scaled.shape) == 2)
return self.featurizer.transform(scaled)
# Holds one SGDRegressor for each action
class Model:
def __init__(self, env, feature_transformer, learning_rate):
self.env = env
self.models = []
self.feature_transformer = feature_transformer
for i in range(env.action_space.n):
model = SGDRegressor(learning_rate=learning_rate)
model.partial_fit(feature_transformer.transform( [env.reset()] ), [0])
self.models.append(model)
def predict(self, s):
X = self.feature_transformer.transform([s])
result = np.stack([m.predict(X) for m in self.models]).T
assert(len(result.shape) == 2)
return result
def update(self, s, a, G):
X = self.feature_transformer.transform([s])
assert(len(X.shape) == 2)
self.models[a].partial_fit(X, [G])
def sample_action(self, s, eps):
# eps = 0
# Technically, we don't need to do epsilon-greedy
# because SGDRegressor predicts 0 for all states
# until they are updated. This works as the
# "Optimistic Initial Values" method, since all
# the rewards for Mountain Car are -1.
if np.random.random() < eps:
return self.env.action_space.sample()
else:
return np.argmax(self.predict(s))
# returns a list of states_and_rewards, and the total reward
def play_one(model, env, eps, gamma):
observation = env.reset()
done = False
totalreward = 0
iters = 0
while not done and iters < 10000:
action = model.sample_action(observation, eps)
prev_observation = observation
observation, reward, done, info = env.step(action)
# update the model
next = model.predict(observation)
# assert(next.shape == (1, env.action_space.n))
G = reward + gamma*np.max(next[0])
model.update(prev_observation, action, G)
totalreward += reward
iters += 1
return totalreward
def plot_cost_to_go(env, estimator, num_tiles=20):
x = np.linspace(env.observation_space.low[0], env.observation_space.high[0], num=num_tiles)
y = np.linspace(env.observation_space.low[1], env.observation_space.high[1], num=num_tiles)
X, Y = np.meshgrid(x, y)
# both X and Y will be of shape (num_tiles, num_tiles)
Z = np.apply_along_axis(lambda _: -np.max(estimator.predict(_)), 2, np.dstack([X, Y]))
# Z will also be of shape (num_tiles, num_tiles)
fig = plt.figure(figsize=(10, 5))
ax = fig.add_subplot(111, projection='3d')
surf = ax.plot_surface(X, Y, Z,
rstride=1, cstride=1, cmap=matplotlib.cm.coolwarm, vmin=-1.0, vmax=1.0)
ax.set_xlabel('Position')
ax.set_ylabel('Velocity')
ax.set_zlabel('Cost-To-Go == -V(s)')
ax.set_title("Cost-To-Go Function")
fig.colorbar(surf)
plt.show()
def plot_running_avg(totalrewards):
N = len(totalrewards)
running_avg = np.empty(N)
for t in range(N):
running_avg[t] = totalrewards[max(0, t-100):(t+1)].mean()
plt.plot(running_avg)
plt.title("Running Average")
plt.show()
def main(show_plots=True):
env = gym.make('MountainCar-v0')
ft = FeatureTransformer(env)
model = Model(env, ft, "constant")
gamma = 0.99
if 'monitor' in sys.argv:
filename = os.path.basename(__file__).split('.')[0]
monitor_dir = './' + filename + '_' + str(datetime.now())
env = wrappers.Monitor(env, monitor_dir)
N = 300
totalrewards = np.empty(N)
for n in range(N):
# eps = 1.0/(0.1*n+1)
eps = 0.1*(0.97**n)
if n == 199:
print("eps:", eps)
# eps = 1.0/np.sqrt(n+1)
totalreward = play_one(model, env, eps, gamma)
totalrewards[n] = totalreward
if (n + 1) % 100 == 0:
print("episode:", n, "total reward:", totalreward)
print("avg reward for last 100 episodes:", totalrewards[-100:].mean())
print("total steps:", -totalrewards.sum())
if show_plots:
plt.plot(totalrewards)
plt.title("Rewards")
plt.show()
plot_running_avg(totalrewards)
# plot the optimal state-value function
plot_cost_to_go(env, model)
if __name__ == '__main__':
# for i in range(10):
# main(show_plots=False)
main() | apache-2.0 | 6,356,171,879,016,205,000 | 31.636364 | 93 | 0.674861 | false |
dimas-lex/osbb | osb/osb/billing/testing/AccountsServiceTest.py | 1 | 2043 | # -*- coding: utf-8 -*-
from django.test import TestCase
from osb.billing.models import Accounts, Services
# from osb.billing.Services.AccountsService import AccountsService
from osb.billing.Services.AccountsService import AccountsService
from osb.billing.Services.ServiceService import *
class AccountsServiceTest(TestCase):
def setUp(self):
self.accountServ = AccountsService()
self.accountServ.create(uid="1", name="lion", address="pr")
self.accountServ.create(uid="2", name="cat", address="pr2")
self.accountServ.create(uid="3", name="cat", address="pr2", porch=3)
def test_01_get_all(self):
""" Test 'get_all' method """
print self.test_01_get_all.__doc__
self.assertEqual(len(self.accountServ.get_all()), 3)
def test_02_get_by_porch(self):
""" Test 'get_by_porch' method """
print self.test_02_get_by_porch.__doc__
self.assertEqual(len(self.accountServ.get_by_porch(porch=3)), 1)
def test_03_create(self):
""" Test 'create' method """
print self.test_03_create.__doc__
self.assertTrue(
isinstance(
self.accountServ.create(uid="4", name="dog", address="pr"),
Accounts
)
)
def test_04_update(self):
""" Test 'update' method """
print self.test_04_update.__doc__
self.assertTrue( self.accountServ.update(name="dog", uid="3", address="prr") )
def test_05_delete(self):
""" Test 'delete' method """
print self.test_05_delete.__doc__
self.assertTrue( self.accountServ.delete(uid="3") )
# def test_06_print(self):
# """ Just #print out results """
# print self.test_06_print.__doc__
# accounts = self.accountServ.get_all()
# for acc in accounts:
# print ( " ".join(("uid", acc.uid, "name", acc.name, "address", acc.address, "porch", str(acc.porch), "deleted", str(acc.deleted) )) )
# self.assertTrue(True) | gpl-2.0 | -9,115,395,383,184,236,000 | 33.644068 | 148 | 0.594224 | false |
FAForever/faftools | parseudp/FAPacket.py | 1 | 10060 | #!/usr/bin/env python
import sys
import struct
import binascii
import string
import argparse
import zlib
from curses import ascii
# expects tshark on stdin as in:
# tshark -r game.pcap -R 'ip.addr==192.168.0.101' -T fields -d udp.port==6112,echo -e ip.src -e udp.srcport -e ip.dst -e udp.dstport -e frame.time_relative -e echo.data | python FAPacket.py -e -p
# any non-FA packets will crash the parser
# running:
# FAPacket.py [ -p ] [ -e ]
# -p print command stream packets
# -e print encapsulation packets
def hexdump_hash(data):
res = ''
for i in range(0, len(data)):
res += '{0:02x}'.format(ord(data[i]))
return res
def hexdump(data, indent):
res = ''
for i in range(0, len(data), 16):
if i:
for k in range(0, indent):
res += ' '
for j in range(i, min(i + 16, len(data))):
res += '{0:02x} '.format(ord(data[j]))
for k in range(min(i + 16, len(data)), i + 16):
res += ' '
for j in range(i, min(i + 16, len(data))):
if ascii.isprint(data[j]):
res += data[j]
else:
res += '.'
res += '\n'
return res
class FAPacket:
def __init__(self, data):
self.type = ord(data[0])
self.len = ord(data[1]) | ord(data[2]) << 8;
self.data = data[3:]
d = { }
d[0] = 1
d[0x32] = 1
d[0x33] = 1
d[0x34] = 1
d[1] = 1
d[3] = 1
self.decodable = d
def is_advance(self):
return self.type == 0
def is_ack(self):
return self.type == 0x32
def is_set_cmdsrc(self):
return self.type == 1
def cmdsrc(self):
return ord(self.data[0])
def ack_cmdsource(self):
return ord(self.data[0])
def pp_data(self, indent):
return hexdump(self.data, indent)
def can_decode(self):
return self.type in self.decodable
def simtick(self):
if self.type == 0x32:
return struct.unpack("<bL", self.data)[1]
if self.type == 0x33 or self.type == 0x34 or self.type == 0:
return struct.unpack("<L", self.data)[0]
def decode(self):
if self.type == 0:
return "ADV {0}".format(struct.unpack("<L", self.data)[0])
elif self.type == 0x32:
return "ACK {0} {1}".format(self.ack_cmdsource(), self.simtick())
elif self.type == 0x33:
return "SIM {0}".format(struct.unpack("<L", self.data)[0])
elif self.type == 0x34:
return "FIN {0}".format(struct.unpack("<L", self.data)[0])
elif self.type == 1:
return "CMDSOURCE {0}".format(ord(self.data[0]))
elif self.type == 3:
(h, s) = struct.unpack("<16sL", self.data)
return "HASH {0} {1}".format(s, hexdump_hash(h))
else:
return "(error)"
class FAEncap(object):
def __init__(self, src, srcport, dst, dstport, time, packet):
self.offset = 0
self.src = src
self.dst = dst
self.srcport = srcport
self.dstport = dstport
self.time = time
if ord(packet[0]) == 8:
self.type = 8
self.data = packet[1:]
self.len = len(packet) - 1
elif ord(packet[0]) == 0:
self.type = 0
self.data = packet[1:]
self.len = len(packet) - 1
elif ord(packet[0]) == 255:
self.type=255
self.data=''
self.len = len(packet) - 1
else:
(self.type, self.mask, self.seq, self.ack, self.seq2, self.ack2, self.len) = struct.unpack("<bLHHHHH", packet[0:15])
self.data = packet[15:]
def src_full(self):
return src + ":" + srcport
def dst_full(self):
return dst + ":" + dstport
def connection(self):
return self.src_full() + "->" + self.dst_full()
def pp_data(self, indent):
if self.type == 8:
return self.data
else:
return hexdump(self.data, indent)
def packets(self):
ret = []
while True:
p = self.next_packet()
if p == None:
return ret
ret.append(p)
def next_packet(self):
if self.type != 4:
return None
if self.offset + 3 > len(self.data):
return None
len_ = ord(self.data[self.offset + 1]) | ord(self.data[self.offset + 2]) << 8
if self.offset + len_ > len(self.data):
return None
offset = self.offset;
self.offset += len_
if offset == self.offset:
sys.stdout.write("waarg {0} {1} {2}".format(offset, self.offset, binascii.hexlify(self.data)))
return FAPacket(self.data[offset : self.offset])
def prepend_remaining(self, r):
self.data = str(r) + str(self.data)
def remaining(self):
if self.type == 4:
return self.data[self.offset:]
return ''
class FAPeerState(object):
def __init__(self):
self.addr_to_cmdsrc = { }
self.cmdsrc_to_addr = [ ]
self.simtick = [ ]
self.ack_simtick = [ ]
def process_egress(self, addr, packet):
if packet.is_set_cmdsrc():
self.cmdsource = packet.cmdsrc()
if packet.is_advance():
self.simtick[self.addr_to_cmdsrc[addr] ] += packet.simtick()
elif packet.is_ack():
s1 = self.addr_to_cmdsrc[addr]
s2 = packet.ack_cmdsource()
self.ack_simtick[s1][s2] = packet.simtick()
def process_ingress(self, addr, packet):
if packet.is_set_cmdsrc():
s = packet.cmdsrc()
self.addr_to_cmdsrc[addr] = s
while len(self.cmdsrc_to_addr) <= s:
self.cmdsrc_to_addr.append('')
self.simtick.append(0)
self.ack_simtick.append(0)
self.cmdsrc_to_addr[s] = addr
argp = argparse.ArgumentParser(prog = "PROG")
argp.add_argument("-e", action="store_true")
argp.add_argument("-t", action="store_true")
argp.add_argument("-p", action="store_true")
args = argp.parse_args()
remain = { }
inflate = { }
inflate_remain = { }
cmdpackets_seen = { }
future = { }
c32 = [ 0, 0, 0 ]
c33 = 0
c34 = 0
tick = 0
seq_seen = { }
for line in sys.stdin:
(src, srcport, dst, dstport, time, data) = line.split();
#print "*{0}*{1}*{2}*{3}*{4}*{5}".format(src, srcport, dst, dstport, time, data);
e = FAEncap(src, srcport, dst, dstport, time, binascii.unhexlify(data.translate(None, ':')))
if not e.connection() in seq_seen:
seq_seen[e.connection()] = {}
if not e.connection() in remain:
remain[e.connection()] = ''
if not e.connection() in future:
future[e.connection()] = { }
s = '{0} {1} type={2} len={3: 4d}'.format(e.time, e.connection(), e.type, e.len)
if e.type != 4:
print(s)
if e.len:
print(' ' * 7, hexdump(e.data, 8))
elif e.type == 4:
if e.seq2 in seq_seen[e.connection()]:
continue
if len(seq_seen[e.connection()]) and not e.seq2 - 1 in seq_seen[e.connection()]:
print("!! packet received out of sequence !! {0} cseq={1}".format(e.connection(), e.seq2))
future[e.connection()][e.seq2] = e
continue
future[e.connection()][e.seq2] = e
seq_ix = e.seq2
while seq_ix in future[e.connection()]:
e = future[e.connection()][seq_ix]
seq_seen[e.connection()][e.seq2] = 1
seq_ix += 1
s = '{0} {1} type={2} len={3: 4d}'.format(e.time, e.connection(), e.type, e.len)
s += ' cseq={0} cack={1} mask={2} eseq={3} eack={4}'.format(e.seq2, e.ack2, e.mask, e.seq, e.ack)
if args.e:
print(s)
if not e.connection() in inflate:
print(' ' * 7, e.pp_data(8))
if args.p:
if not e.connection() in cmdpackets_seen:
if e.data == "\x02\x00\x00\x00\xff\xff":
print(" !!deflate detected!! on " + e.connection())
inflate[e.connection()] = zlib.decompressobj()
if e.connection() in inflate:
if not e.connection() in cmdpackets_seen:
data = "\x78\x9c"
data += e.data
cmdpackets_seen[e.connection()] = 1
inflate_remain[e.connection()] = ''
else:
data = inflate_remain[e.connection()] + e.data
inflated = inflate[e.connection()].decompress(data)
print(' ' * 7, hexdump(inflated, 8))
e.data = inflated
inflate_remain[e.connection()] = inflate[e.connection()].unconsumed_tail
e.prepend_remaining(remain[e.connection()])
#print e.pp_data(16);
for p in e.packets():
if p.type == 0x32:
c32[p.ack_cmdsource()] = p.simtick()
elif p.type == 0x33:
c33 = p.simtick()
elif p.type == 0x34:
c34 = p.simtick()
elif p.type == 0:
tick += p.simtick()
if p.can_decode():
print(' ', p.decode())
else:
s=' {0:02x} {1: 4d} '.format(p.type, p.len - 3)
print(s, p.pp_data(len(s) + 1))
foo = ""
foo = ''
if c33 < c34:
foo += '<'
elif c33 > c34:
foo += '>'
else:
foo += ' '
if args.t:
print("TICK", ''.join([ str(c32[i]) + ' ' for i in range(0, len(c32)) ]), c33, c34, tick, foo)
remain[e.connection()] = e.remaining()
| gpl-3.0 | 1,441,050,445,854,061,800 | 32.872054 | 195 | 0.48827 | false |
mpercich/Calendarize | ios/dateparser/lib/python2.7/site-packages/ruamel/yaml/loader.py | 1 | 2063 | # coding: utf-8
from __future__ import absolute_import
from ruamel.yaml.reader import Reader
from ruamel.yaml.scanner import Scanner, RoundTripScanner
from ruamel.yaml.parser import Parser, RoundTripParser
from ruamel.yaml.composer import Composer
from ruamel.yaml.constructor import BaseConstructor, SafeConstructor, Constructor, \
RoundTripConstructor
from ruamel.yaml.resolver import VersionedResolver
__all__ = ['BaseLoader', 'SafeLoader', 'Loader', 'RoundTripLoader']
class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, VersionedResolver):
def __init__(self, stream, version=None, preserve_quotes=None):
Reader.__init__(self, stream)
Scanner.__init__(self)
Parser.__init__(self)
Composer.__init__(self)
BaseConstructor.__init__(self)
VersionedResolver.__init__(self)
class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, VersionedResolver):
def __init__(self, stream, version=None, preserve_quotes=None):
Reader.__init__(self, stream)
Scanner.__init__(self)
Parser.__init__(self)
Composer.__init__(self)
SafeConstructor.__init__(self)
VersionedResolver.__init__(self)
class Loader(Reader, Scanner, Parser, Composer, Constructor, VersionedResolver):
def __init__(self, stream, version=None, preserve_quotes=None):
Reader.__init__(self, stream)
Scanner.__init__(self)
Parser.__init__(self)
Composer.__init__(self)
Constructor.__init__(self)
VersionedResolver.__init__(self)
class RoundTripLoader(Reader, RoundTripScanner, RoundTripParser, Composer,
RoundTripConstructor, VersionedResolver):
def __init__(self, stream, version=None, preserve_quotes=None):
Reader.__init__(self, stream)
RoundTripScanner.__init__(self)
RoundTripParser.__init__(self)
Composer.__init__(self)
RoundTripConstructor.__init__(self, preserve_quotes=preserve_quotes)
VersionedResolver.__init__(self, version)
| mit | 5,460,430,173,511,384,000 | 37.203704 | 88 | 0.674746 | false |
uraxy/qiidly | qiidly/command_line.py | 1 | 2094 | # -*- coding: utf-8 -*-
"""dummy docstring."""
import argparse
from qiidly import __version__, __description__
from qiidly.main import Qiidly
def _arg_parser():
parser = argparse.ArgumentParser(
prog='qiidly',
description=__description__)
parser.add_argument('-V', '--version',
action='version',
version='%(prog)s version {}'.format(__version__))
parser.add_argument('-q', '--qiita-token',
required=True,
help='Qiita access token')
parser.add_argument('-f', '--feedly-token',
required=True,
help='Feedly developer access token')
return parser
# http://stackoverflow.com/questions/3041986/apt-command-line-interface-like-yes-no-input
def _query_yes_no(question, default=None):
valid = {'yes': True, 'y': True,
'no': False, 'n': False}
if default is None:
prompt = ' [y/n] '
elif default == "yes":
prompt = ' [Y/n] '
elif default == 'no':
prompt = ' [y/N] '
else:
raise ValueError("Invalid default answer: '{default}'".format(default=default))
while True:
print(question + prompt, end='')
choice = input().lower()
if choice == '' and default is not None:
return valid[default]
elif choice in valid:
return valid[choice]
else:
print("Please respond with 'y' or 'n'.")
def main():
"""dummy."""
args = _arg_parser().parse_args()
for target in ['tags', 'followees']:
q = Qiidly(args.qiita_token, args.feedly_token, target=target)
have_to_sync = q.have_to_sync()
q.print_todo()
if not have_to_sync:
print('Already up-to-date.')
print()
continue
# sync to Feedly
print('')
if not _query_yes_no('Sync to Feedly?', default=None):
print('Did nothing.')
continue
q.sync()
print('Done!')
if __name__ == '__main__':
main()
| mit | -3,202,998,623,235,872,300 | 28.083333 | 89 | 0.523878 | false |
oneklc/dimod | dimod/views/bqm.py | 1 | 6577 | # Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# =============================================================================
try:
import collections.abc as abc
except ImportError:
import collections as abc
class BQMView(object):
__slots__ = '_adj',
def __init__(self, bqm):
self._adj = bqm._adj
# support python2 pickle
def __getstate__(self):
return {'_adj': self._adj}
# support python2 pickle
def __setstate__(self, state):
self._adj = state['_adj']
class LinearView(BQMView, abc.MutableMapping):
"""Acts as a dictionary `{v: bias, ...}` for the linear biases.
The linear biases are stored in a dict-of-dicts format, where 'self loops'
store the linear biases.
So `{v: bias}` is stored `._adj = {v: {v: Bias(bias)}}`.
If v is not in ._adj[v] then the bias is treated as 0.
"""
def __delitem__(self, v):
if v not in self:
raise KeyError
adj = self._adj
if len(adj[v]) - (v in adj[v]) > 0:
raise ValueError("there are interactions associated with {} that must be deleted first".format(v))
del adj[v]
def __getitem__(self, v):
# developer note: we could try to match the type with other biases in
# the bqm, but I think it is better to just use python int 0 as it
# is most likely to be compatible with other numeric types.
return self._adj[v].get(v, 0)
def __iter__(self):
return iter(self._adj)
def __len__(self):
return len(self._adj)
def __setitem__(self, v, bias):
adj = self._adj
if v in adj:
adj[v][v] = bias
else:
adj[v] = {v: bias}
def __str__(self):
return str(dict(self))
def items(self):
return LinearItemsView(self)
class LinearItemsView(abc.ItemsView):
"""Faster items iteration for LinearView."""
__slots__ = ()
def __iter__(self):
for v, neighbours in self._mapping._adj.items():
# see note in LinearView.__getitem__
yield v, neighbours.get(v, 0)
class QuadraticView(BQMView, abc.MutableMapping):
"""Acts as a dictionary `{(u, v): bias, ...}` for the quadratic biases.
The quadratic biases are stored in a dict-of-dicts format. So `{(u, v): bias}` is stored as
`._adj = {u: {v: Bias(bias)}, v: {u: Bias(bias)}}`.
"""
def __delitem__(self, interaction):
u, v = interaction
if u == v:
raise KeyError('{} is not an interaction'.format(interaction))
adj = self._adj
del adj[v][u]
del adj[u][v]
def __getitem__(self, interaction):
u, v = interaction
if u == v:
raise KeyError('{} cannot have an interaction with itself'.format(u))
return self._adj[u][v]
def __iter__(self):
seen = set()
adj = self._adj
for u, neigh in adj.items():
for v in neigh:
if u == v:
# not adjacent to itself
continue
if v not in seen:
yield (u, v)
seen.add(u)
def __len__(self):
# remove the self-loops
return sum(len(neighbours) - (v in neighbours)
for v, neighbours in self._adj.items()) // 2
def __setitem__(self, interaction, bias):
u, v = interaction
if u == v:
raise KeyError('{} cannot have an interaction with itself'.format(u))
adj = self._adj
# we don't know what type we want the biases, so we require that the variables already
# exist before we can add an interaction between them
if u not in adj:
raise KeyError('{} is not already a variable in the binary quadratic model'.format(u))
if v not in adj:
raise KeyError('{} is not already a variable in the binary quadratic model'.format(v))
adj[u][v] = adj[v][u] = bias
def __str__(self):
return str(dict(self))
def items(self):
return QuadraticItemsView(self)
class QuadraticItemsView(abc.ItemsView):
"""Faster items iteration"""
__slots__ = ()
def __iter__(self):
adj = self._mapping._adj
for u, v in self._mapping:
yield (u, v), adj[u][v]
class NeighbourView(abc.Mapping):
"""Acts as a dictionary `{u: bias, ...}` for the neighbours of a variable `v`.
See Also:
:class:`AdjacencyView`
"""
__slots__ = '_adj', '_var'
def __init__(self, adj, v):
self._adj = adj
self._var = v
def __getitem__(self, v):
u = self._var
if u == v:
raise KeyError('{} cannot have an interaction with itself'.format(u))
return self._adj[u][v]
def __setitem__(self, u, bias):
v = self._var
if u == v:
raise KeyError('{} cannot have an interaction with itself'.format(u))
adj = self._adj
if u not in adj:
raise KeyError('{} is not an interaction'.format((u, v)))
adj[v][u] = adj[u][v] = bias
def __iter__(self):
v = self._var
for u in self._adj[v]:
if u != v:
yield u
def __len__(self):
v = self._var
neighbours = self._adj[v]
return len(neighbours) - (v in neighbours) # ignore self
def __str__(self):
return str(dict(self))
class AdjacencyView(BQMView, abc.Mapping):
"""Acts as a dict-of-dicts `{u: {v: bias}, v: {u: bias}}` for the quadratic biases.
The quadratic biases are stored in a dict-of-dicts format. So `{u: {v: bias}, v: {u: bias}}`
is stored as `._adj = {u: {v: Bias(bias)}, v: {u: Bias(bias)}}`.
"""
def __getitem__(self, v):
if v not in self._adj:
raise KeyError('{} is not a variable'.format(v))
return NeighbourView(self._adj, v)
def __iter__(self):
return iter(self._adj)
def __len__(self):
return len(self._adj)
| apache-2.0 | -1,572,494,099,592,310,500 | 27.846491 | 110 | 0.550403 | false |
51reboot/actual_09_homework | 08/zhaoyong/cmdb/userdb.py | 1 | 5201 | #encoding: utf-8
import json
import gconf
import MySQLdb
from dbutils import execute_fetch_sql
from dbutils import execute_commit_sql
'''
获取用户信息
'''
def get_users():
_columns = ('id','username','password','age')
_sql = 'select * from user'
_count,_rt_list = execute_fetch_sql(_sql)
_rt = []
for _line in _rt_list:
_rt.append(dict(zip(_columns, _line)))
print _rt
return _rt
'''
保存用户信息
'''
def save_users(users):
fhandler = open(gconf.USER_FILE, 'wb')
fhandler.write(json.dumps(users))
fhandler.close()
'''
进行用户登录验证
True/False: 用户名和密码验证成功/用户名或密码错误
如果有一个用户的username&password 与输入相同则登录成功
如果所有用户的username&password 与输入不相同则登录失败
'''
def validate_login(username, password):
#_sql = 'select * from user where username="{username}" and password=md5("{password}")'.format(username=username,password=password)
_sql = 'select * from user where username=%s and password=md5(%s)'
_count,_rt_list = execute_fetch_sql(_sql,(username,password))
return _count != 0
'''
验证添加用户的信息
True/False, 描述信息
'''
def validate_add_user(username, password, age):
users = get_users()
for user in users:
if user.get('username') == username:
return False, u'用户名已经存在'
if len(password) < 6:
return False, u'密码长度至少为6位'
if not str(age).isdigit() or int(age) < 0 or int(age) > 100:
return False, u'年龄不正确'
return True, ''
'''
添加用户信息
'''
def add_user(username, password, age):
_sql = 'insert into user(username,password,age) values (%s,md5(%s),%s) '
_args = (username,password,age)
_count = execute_commit_sql(_sql,(username,password,age))
'''
获取用户信息
'''
def get_user(username):
users = get_users()
for user in users:
if user.get('username') == username:
return user
return None
def get_user_id(id,fetch=True):
_columns = ('id','username','password','age')
_sql = 'select * from user where id=%s'
_args = (id)
_count, _rt_list = execute_fetch_sql(_sql,_args)
_rt = []
for _line in _rt_list:
_rt.append(dict(zip(_columns, _line)))
return _rt
#get_user_id(19)
'''
验证用户更新
'''
def validate_update_user(username, password, age,*args):
if get_user(username) is None:
return False, u'用户信息不存在'
if len(password) < 6:
return False, u'密码长度至少为6位'
if not str(age).isdigit() or int(age) < 0 or int(age) > 100:
return False, u'年龄不正确'
return True, ''
'''
验证用户更新
'''
def validate_update_user_age(uid, user_age,*args):
if get_user_id(uid) is None:
return False, u'用户信息不存在'
if not str(user_age).isdigit() or int(user_age) <= 0 or int(user_age) > 100:
return False, u'年龄输入错误'
return True, ''
'''
更新用户信息
'''
def update_user(user_age,uid):
_sql = 'update user set age=%s where id=%s'
_args = (user_age,uid)
_count = execute_commit_sql(_sql,_args)
'''
验证用户
'''
def validate_delete_user(uid):
if get_user_id(uid) is None:
return False, u'用户信息不存在'
return True, ''
'''
删除用户信息
'''
def delete_user(uid):
_sql = 'delete from user where id=%s '
_args = (uid)
_count = execute_commit_sql(_sql,_args)
'''
验证用户信息
'''
def validate_charge_user_password(uid,user_password,username,manager_password):
if not validate_login(username,manager_password):
return False,u'管理员密码错误'
if get_user(username) is None:
return False, u'用户信息不存在'
if len(user_password) < 6:
return False, u'密码长度至少为6位'
return True,''
'''
修改用户密码
'''
def charge_user_password(uid,user_password):
_sql = 'update user set password=md5(%s) where id=%s'
_args = (user_password,uid)
_count = execute_commit_sql(_sql, _args)
'''
日志信息显示
'''
def accesslog(topn):
_columns = ('count','url','ip','code')
_sql = 'select * from accesslog limit %s'
_args = (topn)
_count, _rt_list = execute_fetch_sql(_sql,_args)
_rt = []
for _line in _rt_list:
_rt.append(dict(zip(_columns, _line)))
return _rt
if __name__ == '__main__':
print accesslog(1)
# update_user('aa','123456',88,18)
#get_userid("aa")
#print get_userid()
#print validate_login('kk', '123456')
#print validate_login('kk', '1234567')
#print validate_login('woniu', '123456')
#username = 'woniu1'
#password = '123456'
#age = '28'
#_is_ok, _error = validate_add_user(username, password, age)
#if _is_ok:
# add_user(username, password, age)
#else:
# print _error
#
#print delete_user('woniu2')
#print validate_update_user('woniu2', password, age)[1]
#print validate_update_user('kk', password, 'ac')[1]
#_is_ok, _error = validate_update_user('kk', password, 30)
#if _is_ok:
# update_user('kk', 'abcdef', 31)
| mit | -8,930,152,261,406,002,000 | 22.043689 | 135 | 0.610912 | false |
annoviko/pyclustering | pyclustering/core/tests/ut_package.py | 1 | 4514 | """!
@brief Unit-tests for pyclustering package that is used for exchange between ccore library and python code.
@authors Andrei Novikov ([email protected])
@date 2014-2020
@copyright BSD-3-Clause
"""
import unittest
import numpy
from pyclustering.core.pyclustering_package import package_builder, package_extractor
from ctypes import c_ulong, c_size_t, c_double, c_uint, c_float, c_char_p
class Test(unittest.TestCase):
def templatePackUnpack(self, dataset, c_type_data=None):
package_pointer = package_builder(dataset, c_type_data).create()
unpacked_package = package_extractor(package_pointer).extract()
packing_data = dataset
if isinstance(packing_data, numpy.ndarray):
packing_data = dataset.tolist()
if isinstance(packing_data, str):
self.assertEqual(dataset, unpacked_package)
else:
self.assertTrue(self.compare_containers(packing_data, unpacked_package))
def compare_containers(self, container1, container2):
def is_container(container):
return isinstance(container, list) or isinstance(container, tuple)
if len(container1) == 0 and len(container2) == 0:
return True
if len(container1) != len(container2):
return False
for index in range(len(container1)):
if is_container(container1[index]) and is_container(container2[index]):
return self.compare_containers(container1[index], container2[index])
elif is_container(container1[index]) == is_container(container2[index]):
if container1[index] != container2[index]:
return False
else:
return False
return True
def testListInteger(self):
self.templatePackUnpack([1, 2, 3, 4, 5])
def testListIntegerSingle(self):
self.templatePackUnpack([2])
def testListIntegerNegative(self):
self.templatePackUnpack([-1, -2, -10, -20])
def testListIntegerNegativeAndPositive(self):
self.templatePackUnpack([-1, 26, -10, -20, 13])
def testListFloat(self):
self.templatePackUnpack([1.1, 1.2, 1.3, 1.4, 1.5, 1.6])
def testListFloatNegativeAndPositive(self):
self.templatePackUnpack([1.1, -1.2, -1.3, -1.4, 1.5, -1.6])
def testListLong(self):
self.templatePackUnpack([100000000, 2000000000])
def testListEmpty(self):
self.templatePackUnpack([])
def testListOfListInteger(self):
self.templatePackUnpack([ [1, 2, 3], [4, 5, 6], [7, 8, 9] ])
def testListOfListDouble(self):
self.templatePackUnpack([ [1.1, 5.4], [1.3], [1.4, -9.4] ])
def testListOfListWithGaps(self):
self.templatePackUnpack([ [], [1, 2, 3], [], [4], [], [5, 6, 7] ])
def testListSpecifyUnsignedLong(self):
self.templatePackUnpack([1, 2, 3, 4, 5], c_ulong)
def testListSpecifyUnsignedSizeT(self):
self.templatePackUnpack([1, 2, 3, 4, 5], c_size_t)
def testListSpecifyDouble(self):
self.templatePackUnpack([1.1, 1.6, -7.8], c_double)
def testListOfListSpecifySizeT(self):
self.templatePackUnpack([ [1, 2, 3], [4, 5] ], c_size_t)
def testListOfListSpecifyUnsignedIntWithGaps(self):
self.templatePackUnpack([ [1, 2, 3], [], [4, 5], [], [] ], c_uint)
def testListOfListEmpty(self):
self.templatePackUnpack([ [], [], [] ])
def testListOfListOfListInteger(self):
self.templatePackUnpack([ [ [1], [2] ], [ [3], [4] ], [ [5, 6], [7, 8] ] ])
def testTupleInterger(self):
self.templatePackUnpack([ (1, 2, 3), (4, 5), (6, 7, 8, 9) ], c_uint)
def testTupleFloat(self):
self.templatePackUnpack([ (1.0, 2.0, 3.8), (4.6, 5.0), (6.8, 7.4, 8.5, 9.6) ], c_float)
def testTupleEmpty(self):
self.templatePackUnpack([(), (), ()])
def testNumpyMatrixOneColumn(self):
self.templatePackUnpack(numpy.array([[1.0], [2.0], [3.0]]), c_double)
def testNumpyMatrixTwoColumns(self):
self.templatePackUnpack(numpy.array([[1.0, 1.0], [2.0, 2.0]]), c_double)
def testNumpyMatrixThreeColumns(self):
self.templatePackUnpack(numpy.array([[1.1, 2.2, 3.3], [2.2, 3.3, 4.4], [3.3, 4.4, 5.5]]), c_double)
def testString(self):
self.templatePackUnpack("Test message number one".encode('utf-8'))
def testEmptyString(self):
self.templatePackUnpack("".encode('utf-8'))
| gpl-3.0 | -6,396,613,595,020,719,000 | 32.191176 | 107 | 0.617634 | false |
boudewijnrempt/kura | doc/script5.py | 1 | 3804 | import os.path, sys, string, codecs
from kuralib import kuraapp
from kuragui.guiconfig import guiConf
from kuragui import guiconfig
False = 0
True = 1
def splitCSVLine(line):
"""Splits a CSV-formatted line into a list.
See: http://www.colorstudy.com/software/webware/
"""
list = []
position = 0
fieldStart = 0
while 1:
if position >= len(line):
# This only happens when we have a trailing comma
list.append('')
return list
if line[position] == '"':
field = ""
position = position + 1
while 1:
end = string.find(line, '"', position)
if end == -1:
# This indicates a badly-formed CSV file, but
# we'll accept it anyway.
field = line[position:]
position = len(line)
break
if end + 1 < len(line) and line[end + 1] == '"':
field = "%s%s" % (field, line[position:end + 1])
position = end + 2
else:
field = "%s%s" % (field, line[position:end])
position = end + 2
break
else:
end = string.find(line, ",", position)
if end == -1:
list.append(line[position:end])
return list
field = line[position:end]
position = end + 1
list.append(field)
return list
def init():
if guiConf.backend == guiconfig.FILE:
kuraapp.initApp(guiConf.backend,
dbfile = os.path.join(guiConf.filepath,
guiConf.datastore))
elif guiConf.backend == guiconfig.SQL:
if guiConf.username != "":
try:
kuraapp.initApp(guiConf.backend,
username = str(guiConf.username),
database = str(guiConf.database),
password = str(guiConf.password),
hostname = str(guiConf.hostname))
except Exception, e:
print "Error connecting to database: %s" % e
sys.exit(1)
kuraapp.initCurrentEnvironment(guiConf.usernr,
guiConf.languagenr,
guiConf.projectnr)
def main(args):
if len(args) < 2:
print "Usage: python script5.py f1...fn"
sys.exit(1)
init()
for line in codecs.open(args[1], "r", "UTF-8"):
line = splitCSVLine(line)
print "Inserting %s" % line[0]
lexeme = kuraapp.app.createObject("lng_lex", fields={},
form = line[0],
glosse = line[1],
languagenr = guiConf.languagenr,
phonetic_form = line[3],
usernr = guiConf.usernr)
lexeme.insert()
tag = kuraapp.app.createObject("lng_lex_tag", fields={},
lexnr = lexeme.lexnr,
tag = "POS",
value = line[2],
usernr = guiConf.usernr)
tag.insert()
tag = kuraapp.app.createObject("lng_lex_tag",
lexnr = lexeme.lexnr,
tag = "FILE",
value = args[1],
usernr = guiConf.usernr)
tag.insert()
kuraapp.app.saveFile()
if __name__ == "__main__":
main(sys.argv)
| bsd-2-clause | 1,558,941,842,772,906,000 | 36.294118 | 74 | 0.431651 | false |
elric/virtaal-debian | virtaal/plugins/spellchecker.py | 1 | 13888 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2010-2011 Zuza Software Foundation
#
# This file is part of Virtaal.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
import logging
import os
import os.path
import re
from gettext import dgettext
import gobject
from virtaal.common import pan_app
from virtaal.controllers.baseplugin import BasePlugin
if not pan_app.DEBUG:
try:
import psyco
except:
psyco = None
else:
psyco = None
_dict_add_re = re.compile('Add "(.*)" to Dictionary')
class Plugin(BasePlugin):
"""A plugin to control spell checking.
It can also download spell checkers on Windows."""
display_name = _('Spell Checker')
description = _('Check spelling and provide suggestions')
version = 0.1
_base_URL = 'http://dictionary.locamotion.org/hunspell/'
_dict_URL = _base_URL + '%s.tar.bz2'
_lang_list = 'languages.txt'
# INITIALIZERS #
def __init__(self, internal_name, main_controller):
self.internal_name = internal_name
# If these imports fail, the plugin is automatically disabled
import gtkspell
import enchant
self.gtkspell = gtkspell
self.enchant = enchant
# languages that we've handled before:
self._seen_languages = {}
# languages supported by enchant:
self._enchant_languages = self.enchant.list_languages()
# HTTP clients (Windows only)
self.clients = {}
# downloadable languages (Windows only)
self.languages = set()
unit_view = main_controller.unit_controller.view
self.unit_view = unit_view
self._connect_id = self.unit_view.connect('textview-language-changed', self._on_unit_lang_changed)
self._textbox_ids = []
self._unitview_ids = []
# For some reason the i18n of gtkspell doesn't work on Windows, so we
# intervene. We also don't want the Languages submenu, so we remove it.
if unit_view.sources:
self._connect_to_textboxes(unit_view, unit_view.sources)
srclang = main_controller.lang_controller.source_lang.code
for textview in unit_view.sources:
self._on_unit_lang_changed(unit_view, textview, srclang)
else:
self._unitview_ids.append(unit_view.connect('sources-created', self._connect_to_textboxes))
if unit_view.targets:
self._connect_to_textboxes(unit_view, unit_view.targets)
tgtlang = main_controller.lang_controller.target_lang.code
for textview in unit_view.targets:
self._on_unit_lang_changed(unit_view, textview, tgtlang)
else:
self._unitview_ids.append(unit_view.connect('targets-created', self._connect_to_textboxes))
def destroy(self):
"""Remove signal connections and disable spell checking."""
for id in self._unitview_ids:
self.unit_view.disconnect(id)
for textbox, id in self._textbox_ids:
textbox.disconnect(id)
if getattr(self, '_connect_id', None):
self.unit_view.disconnect(self._connect_id)
for text_view in self.unit_view.sources + self.unit_view.targets:
self._disable_checking(text_view)
def _connect_to_textboxes(self, unitview, textboxes):
for textbox in textboxes:
self._textbox_ids.append((
textbox,
textbox.connect('populate-popup', self._on_populate_popup)
))
# METHODS #
def _build_client(self, url, clients_id, callback, error_callback=None):
from virtaal.support.httpclient import HTTPClient
client = HTTPClient()
client.set_virtaal_useragent()
self.clients[clients_id] = client
if logging.root.level != logging.DEBUG:
client.get(url, callback)
else:
def error_log(request, result):
logging.debug('Could not get %s: status %d' % (url, request.status))
client.get(url, callback, error_callback=error_log)
def _download_checker(self, language):
"""A Windows-only way to obtain new dictionaries."""
if 'APPDATA' not in os.environ:
# We won't have an idea of where to save it, so let's give up now
return
if language in self.clients:
# We already tried earlier, or started the process
return
if not self.languages:
if self._lang_list not in self.clients:
# We don't yet have a list of available languages
url = self._base_URL + self._lang_list #index page listing all the dictionaries
callback = lambda *args: self._process_index(language=language, *args)
self._build_client(url, self._lang_list, callback)
# self._process_index will call this again, so we can exit
return
language_to_download = None
# People almost definitely want 'en_US' for 'en', so let's ensure
# that we get that right:
if language == 'en':
language_to_download = 'en_US'
self.clients[language] = None
else:
# Let's see if a dictionary is available for this language:
for l in self.languages:
if l == language or l.startswith(language+'_'):
self.clients[language] = None
logging.debug("Will use %s to spell check %s", l, language)
language_to_download = l
break
else:
# No dictionary available
# Indicate that we never need to try this language:
logging.debug("Found no suitable language for spell checking")
self.clients[language] = None
return
# Now download the actual files after we have determined that it is
# available
callback = lambda *args: self._process_tarball(language=language, *args)
url = self._dict_URL % language_to_download
self._build_client(url, language, callback)
def _tar_ok(self, tar):
# TODO: Verify that the tarball is ok:
# - only two files
# - must be .aff and .dic
# - language codes should be sane
# - file sizes should be ok
# - no directory structure
return True
def _ensure_dir(self, dir):
if not os.path.isdir(dir):
os.makedirs(dir)
def _process_index(self, request, result, language=None):
"""Process the list of languages."""
if request.status == 200 and not self.languages:
self.languages = set(result.split())
self._download_checker(language)
else:
logging.debug("Couldn't get list of spell checkers")
#TODO: disable plugin
def _process_tarball(self, request, result, language=None):
# Indicate that we already tried and shouldn't try again later:
self.clients[language] = None
if request.status == 200:
logging.debug('Got a dictionary')
from cStringIO import StringIO
import tarfile
file_obj = StringIO(result)
tar = tarfile.open(fileobj=file_obj)
if not self._tar_ok(tar):
return
DICTDIR = os.path.join(os.environ['APPDATA'], 'enchant', 'myspell')
self._ensure_dir(DICTDIR)
tar.extractall(DICTDIR)
self._seen_languages.pop(language, None)
self._enchant_languages = self.enchant.list_languages()
self.unit_view.update_languages()
else:
logging.debug("Couldn't get a dictionary. Status code: %d" % (request.status))
def _disable_checking(self, text_view):
"""Disable checking on the given text_view."""
if getattr(text_view, 'spell_lang', 'xxxx') is None:
# No change necessary - already disabled
return
spell = None
try:
spell = self.gtkspell.get_from_text_view(text_view)
except SystemError, e:
# At least on Mandriva .get_from_text_view() sometimes returns
# a SystemError without a description. Things seem to work fine
# anyway, so let's ignore it and hope for the best.
pass
if not spell is None:
spell.detach()
text_view.spell_lang = None
if psyco:
psyco.cannotcompile(_disable_checking)
# SIGNAL HANDLERS #
def _on_unit_lang_changed(self, unit_view, text_view, language):
if not self.gtkspell:
return
if language == 'en':
language = 'en_US'
if not language in self._seen_languages and not self.enchant.dict_exists(language):
# Sometimes enchants *wants* a country code, other times it does not.
# For the cases where it requires one, we look for the first language
# code that enchant supports and use that one.
for code in self._enchant_languages:
if code.startswith(language+'_'):
self._seen_languages[language] = code
language = code
break
else:
#logging.debug('No code in enchant.list_languages() that starts with "%s"' % (language))
# If we are on Windows, let's try to download a spell checker:
if os.name == 'nt':
self._download_checker(language)
# If we get it, it will only be activated asynchronously
# later
#TODO: packagekit on Linux?
# We couldn't find a dictionary for "language", so we should make sure that we don't
# have a spell checker for a different language on the text view. See bug 717.
self._disable_checking(text_view)
self._seen_languages[language] = None
return
language = self._seen_languages.get(language, language)
if language is None:
self._disable_checking(text_view)
return
if getattr(text_view, 'spell_lang', None) == language:
# No change necessary - already enabled
return
gobject.idle_add(self._activate_checker, text_view, language, priority=gobject.PRIORITY_LOW)
def _activate_checker(self, text_view, language):
# All the expensive stuff in here called on idle. We mush also isolate
# this away from psyco
try:
spell = None
try:
spell = self.gtkspell.get_from_text_view(text_view)
except SystemError, e:
# At least on Mandriva .get_from_text_view() sometimes returns
# a SystemError without a description. Things seem to work fine
# anyway, so let's ignore it and hope for the best.
pass
if spell is None:
spell = self.gtkspell.Spell(text_view, language)
else:
spell.set_language(language)
spell.recheck_all()
text_view.spell_lang = language
except Exception, e:
logging.exception("Could not initialize spell checking", e)
self.gtkspell = None
#TODO: unload plugin
if psyco:
# Some of the gtkspell stuff can't work with psyco and will dump core
# if we don't avoid psyco compilation
psyco.cannotcompile(_activate_checker)
def _on_populate_popup(self, textbox, menu):
# We can't work with the menu immediately, since gtkspell only adds its
# entries in the event handler.
gobject.idle_add(self._fix_menu, menu)
def _fix_menu(self, menu):
_entries_above_separator = False
_now_remove_separator = False
for item in menu:
if item.get_name() == 'GtkSeparatorMenuItem':
if not _entries_above_separator:
menu.remove(item)
break
label = item.get_property('label')
# For some reason the i18n of gtkspell doesn't work on Windows, so
# we intervene.
if label == "<i>(no suggestions)</i>":
#l10n: This refers to spell checking
item.set_property('label', _("<i>(no suggestions)</i>"))
if label == "Ignore All":
#l10n: This refers to spell checking
item.set_property('label', _("Ignore All"))
if label == "More...":
#l10n: This refers to spelling suggestions
item.set_property('label', _("More..."))
m = _dict_add_re.match(label)
if m:
word = m.group(1)
#l10n: This refers to the spell checking dictionary
item.set_property('label', _('Add "%s" to Dictionary') % word)
# We don't want a language selector - we have our own
if label in dgettext('gtkspell', 'Languages'):
menu.remove(item)
if not _entries_above_separator:
_now_remove_separator = True
continue
_entries_above_separator = True
| gpl-2.0 | -2,843,363,457,551,456,000 | 38.566952 | 106 | 0.589502 | false |
endlessm/chromium-browser | third_party/catapult/dashboard/dashboard/common/request_handler.py | 1 | 4410 | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Simple Request handler using Jinja2 templates."""
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import logging
import os
import jinja2
import webapp2
from google.appengine.api import users
from dashboard.common import utils
from dashboard.common import xsrf
_DASHBOARD_PYTHON_DIR = os.path.dirname(os.path.dirname(__file__))
JINJA2_ENVIRONMENT = jinja2.Environment(
loader=jinja2.FileSystemLoader(
[os.path.join(_DASHBOARD_PYTHON_DIR, 'templates')]),
# Security team suggests that autoescaping be enabled.
autoescape=True,
extensions=['jinja2.ext.autoescape'])
class RequestHandler(webapp2.RequestHandler):
"""Base class for requests. Does common template and error handling tasks."""
def RenderHtml(self, template_file, template_values, status=200):
"""Renders HTML given template and values.
Args:
template_file: string. File name under templates directory.
template_values: dict. Mapping of template variables to corresponding.
values.
status: int. HTTP status code.
"""
self.response.set_status(status)
template = JINJA2_ENVIRONMENT.get_template(template_file)
self.GetDynamicVariables(template_values)
self.response.out.write(template.render(template_values))
def RenderStaticHtml(self, filename):
filename = os.path.join(_DASHBOARD_PYTHON_DIR, 'static', filename)
contents = open(filename, 'r')
self.response.out.write(contents.read())
contents.close()
def GetDynamicVariables(self, template_values, request_path=None):
"""Gets the values that vary for every page.
Args:
template_values: dict of name/value pairs.
request_path: path for login urls, None if using the current path.
"""
user_info = ''
xsrf_token = ''
user = users.get_current_user()
display_username = 'Sign in'
title = 'Sign in to an account'
is_admin = False
if user:
display_username = user.email()
title = 'Switch user'
xsrf_token = xsrf.GenerateToken(user)
is_admin = users.is_current_user_admin()
try:
login_url = users.create_login_url(request_path or self.request.path_qs)
except users.RedirectTooLongError:
# On the bug filing pages, the full login URL can be too long. Drop
# the correct redirect URL, since the user should already be logged in at
# this point anyway.
login_url = users.create_login_url('/')
user_info = '<a href="%s" title="%s">%s</a>' % (
login_url, title, display_username)
# Force out of passive login, as it creates multilogin issues.
login_url = login_url.replace('passive=true', 'passive=false')
template_values['login_url'] = login_url
template_values['display_username'] = display_username
template_values['user_info'] = user_info
template_values['is_admin'] = is_admin
template_values['is_internal_user'] = utils.IsInternalUser()
template_values['xsrf_token'] = xsrf_token
template_values['xsrf_input'] = (
'<input type="hidden" name="xsrf_token" value="%s">' % xsrf_token)
template_values['login_url'] = login_url
return template_values
def ReportError(self, error_message, status=500):
"""Reports the given error to the client and logs the error.
Args:
error_message: The message to log and send to the client.
status: The HTTP response code to use.
"""
logging.error('Reporting error: %r', error_message)
self.response.set_status(status)
self.response.out.write('%s\nrequest_id:%s\n' %
(error_message, utils.GetRequestId()))
def ReportWarning(self, warning_message, status=200):
"""Reports a warning to the client and logs the warning.
Args:
warning_message: The warning message to log (as an error).
status: The http response code to use.
"""
logging.warning('Reporting warning: %r', warning_message)
self.response.set_status(status)
self.response.out.write('%s\nrequest_id:%s\n' %
(warning_message, utils.GetRequestId()))
class InvalidInputError(Exception):
"""An error class for invalid user input query parameter values."""
pass
| bsd-3-clause | -7,051,379,980,682,448,000 | 35.446281 | 79 | 0.685488 | false |
simbs/edx-platform | common/djangoapps/util/views.py | 1 | 13657 | import json
import logging
import sys
from functools import wraps
from django.conf import settings
from django.core.cache import caches
from django.core.validators import ValidationError, validate_email
from django.views.decorators.csrf import requires_csrf_token
from django.views.defaults import server_error
from django.http import (Http404, HttpResponse, HttpResponseNotAllowed,
HttpResponseServerError)
import dogstats_wrapper as dog_stats_api
from edxmako.shortcuts import render_to_response
import zendesk
from microsite_configuration import microsite
import calc
import track.views
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
log = logging.getLogger(__name__)
def ensure_valid_course_key(view_func):
"""
This decorator should only be used with views which have argument course_key_string (studio) or course_id (lms).
If course_key_string (studio) or course_id (lms) is not valid raise 404.
"""
@wraps(view_func)
def inner(request, *args, **kwargs):
course_key = kwargs.get('course_key_string') or kwargs.get('course_id')
if course_key is not None:
try:
CourseKey.from_string(course_key)
except InvalidKeyError:
raise Http404
response = view_func(request, *args, **kwargs)
return response
return inner
@requires_csrf_token
def jsonable_server_error(request, template_name='500.html'):
"""
500 error handler that serves JSON on an AJAX request, and proxies
to the Django default `server_error` view otherwise.
"""
if request.is_ajax():
msg = {"error": "The edX servers encountered an error"}
return HttpResponseServerError(json.dumps(msg))
else:
return server_error(request, template_name=template_name)
def handle_500(template_path, context=None, test_func=None):
"""
Decorator for view specific 500 error handling.
Custom handling will be skipped only if test_func is passed and it returns False
Usage:
@handle_500(
template_path='certificates/server-error.html',
context={'error-info': 'Internal Server Error'},
test_func=lambda request: request.GET.get('preview', None)
)
def my_view(request):
# Any unhandled exception in this view would be handled by the handle_500 decorator
# ...
"""
def decorator(func):
"""
Decorator to render custom html template in case of uncaught exception in wrapped function
"""
@wraps(func)
def inner(request, *args, **kwargs):
"""
Execute the function in try..except block and return custom server-error page in case of unhandled exception
"""
try:
return func(request, *args, **kwargs)
except Exception: # pylint: disable=broad-except
if settings.DEBUG:
# In debug mode let django process the 500 errors and display debug info for the developer
raise
elif test_func is None or test_func(request):
# Display custom 500 page if either
# 1. test_func is None (meaning nothing to test)
# 2. or test_func(request) returns True
log.exception("Error in django view.")
return render_to_response(template_path, context)
else:
# Do not show custom 500 error when test fails
raise
return inner
return decorator
def calculate(request):
''' Calculator in footer of every page. '''
equation = request.GET['equation']
try:
result = calc.evaluator({}, {}, equation)
except:
event = {'error': map(str, sys.exc_info()),
'equation': equation}
track.views.server_track(request, 'error:calc', event, page='calc')
return HttpResponse(json.dumps({'result': 'Invalid syntax'}))
return HttpResponse(json.dumps({'result': str(result)}))
class _ZendeskApi(object):
CACHE_PREFIX = 'ZENDESK_API_CACHE'
CACHE_TIMEOUT = 60 * 60
def __init__(self):
"""
Instantiate the Zendesk API.
All of `ZENDESK_URL`, `ZENDESK_USER`, and `ZENDESK_API_KEY` must be set
in `django.conf.settings`.
"""
self._zendesk_instance = zendesk.Zendesk(
settings.ZENDESK_URL,
settings.ZENDESK_USER,
settings.ZENDESK_API_KEY,
use_api_token=True,
api_version=2,
# As of 2012-05-08, Zendesk is using a CA that is not
# installed on our servers
client_args={"disable_ssl_certificate_validation": True}
)
def create_ticket(self, ticket):
"""
Create the given `ticket` in Zendesk.
The ticket should have the format specified by the zendesk package.
"""
ticket_url = self._zendesk_instance.create_ticket(data=ticket)
return zendesk.get_id_from_url(ticket_url)
def update_ticket(self, ticket_id, update):
"""
Update the Zendesk ticket with id `ticket_id` using the given `update`.
The update should have the format specified by the zendesk package.
"""
self._zendesk_instance.update_ticket(ticket_id=ticket_id, data=update)
def get_group(self, name):
"""
Find the Zendesk group named `name`. Groups are cached for
CACHE_TIMEOUT seconds.
If a matching group exists, it is returned as a dictionary
with the format specifed by the zendesk package.
Otherwise, returns None.
"""
cache = caches['default']
cache_key = '{prefix}_group_{name}'.format(prefix=self.CACHE_PREFIX, name=name)
cached = cache.get(cache_key)
if cached:
return cached
groups = self._zendesk_instance.list_groups()['groups']
for group in groups:
if group['name'] == name:
cache.set(cache_key, group, self.CACHE_TIMEOUT)
return group
return None
def _record_feedback_in_zendesk(
realname,
email,
subject,
details,
tags,
additional_info,
group_name=None,
require_update=False
):
"""
Create a new user-requested Zendesk ticket.
Once created, the ticket will be updated with a private comment containing
additional information from the browser and server, such as HTTP headers
and user state. Returns a boolean value indicating whether ticket creation
was successful, regardless of whether the private comment update succeeded.
If `group_name` is provided, attaches the ticket to the matching Zendesk group.
If `require_update` is provided, returns False when the update does not
succeed. This allows using the private comment to add necessary information
which the user will not see in followup emails from support.
"""
zendesk_api = _ZendeskApi()
additional_info_string = (
u"Additional information:\n\n" +
u"\n".join(u"%s: %s" % (key, value) for (key, value) in additional_info.items() if value is not None)
)
# Tag all issues with LMS to distinguish channel in Zendesk; requested by student support team
zendesk_tags = list(tags.values()) + ["LMS"]
# Per edX support, we would like to be able to route white label feedback items
# via tagging
white_label_org = microsite.get_value('course_org_filter')
if white_label_org:
zendesk_tags = zendesk_tags + ["whitelabel_{org}".format(org=white_label_org)]
new_ticket = {
"ticket": {
"requester": {"name": realname, "email": email},
"subject": subject,
"comment": {"body": details},
"tags": zendesk_tags
}
}
group = None
if group_name is not None:
group = zendesk_api.get_group(group_name)
if group is not None:
new_ticket['ticket']['group_id'] = group['id']
try:
ticket_id = zendesk_api.create_ticket(new_ticket)
if group is None:
# Support uses Zendesk groups to track tickets. In case we
# haven't been able to correctly group this ticket, log its ID
# so it can be found later.
log.warning('Unable to find group named %s for Zendesk ticket with ID %s.', group_name, ticket_id)
except zendesk.ZendeskError:
log.exception("Error creating Zendesk ticket")
return False
# Additional information is provided as a private update so the information
# is not visible to the user.
ticket_update = {"ticket": {"comment": {"public": False, "body": additional_info_string}}}
try:
zendesk_api.update_ticket(ticket_id, ticket_update)
except zendesk.ZendeskError:
log.exception("Error updating Zendesk ticket with ID %s.", ticket_id)
# The update is not strictly necessary, so do not indicate
# failure to the user unless it has been requested with
# `require_update`.
if require_update:
return False
return True
DATADOG_FEEDBACK_METRIC = "lms_feedback_submissions"
def _record_feedback_in_datadog(tags):
datadog_tags = [u"{k}:{v}".format(k=k, v=v) for k, v in tags.items()]
dog_stats_api.increment(DATADOG_FEEDBACK_METRIC, tags=datadog_tags)
def submit_feedback(request):
"""
Create a new user-requested ticket, currently implemented with Zendesk.
If feedback submission is not enabled, any request will raise `Http404`.
If any configuration parameter (`ZENDESK_URL`, `ZENDESK_USER`, or
`ZENDESK_API_KEY`) is missing, any request will raise an `Exception`.
The request must be a POST request specifying `subject` and `details`.
If the user is not authenticated, the request must also specify `name` and
`email`. If the user is authenticated, the `name` and `email` will be
populated from the user's information. If any required parameter is
missing, a 400 error will be returned indicating which field is missing and
providing an error message. If Zendesk ticket creation fails, 500 error
will be returned with no body; if ticket creation succeeds, an empty
successful response (200) will be returned.
"""
if not settings.FEATURES.get('ENABLE_FEEDBACK_SUBMISSION', False):
raise Http404()
if request.method != "POST":
return HttpResponseNotAllowed(["POST"])
if (
not settings.ZENDESK_URL or
not settings.ZENDESK_USER or
not settings.ZENDESK_API_KEY
):
raise Exception("Zendesk enabled but not configured")
def build_error_response(status_code, field, err_msg):
return HttpResponse(json.dumps({"field": field, "error": err_msg}), status=status_code)
additional_info = {}
required_fields = ["subject", "details"]
if not request.user.is_authenticated():
required_fields += ["name", "email"]
required_field_errs = {
"subject": "Please provide a subject.",
"details": "Please provide details.",
"name": "Please provide your name.",
"email": "Please provide a valid e-mail.",
}
for field in required_fields:
if field not in request.POST or not request.POST[field]:
return build_error_response(400, field, required_field_errs[field])
subject = request.POST["subject"]
details = request.POST["details"]
tags = dict(
[(tag, request.POST[tag]) for tag in ["issue_type", "course_id"] if tag in request.POST]
)
if request.user.is_authenticated():
realname = request.user.profile.name
email = request.user.email
additional_info["username"] = request.user.username
else:
realname = request.POST["name"]
email = request.POST["email"]
try:
validate_email(email)
except ValidationError:
return build_error_response(400, "email", required_field_errs["email"])
for header, pretty in [
("HTTP_REFERER", "Page"),
("HTTP_USER_AGENT", "Browser"),
("REMOTE_ADDR", "Client IP"),
("SERVER_NAME", "Host")
]:
additional_info[pretty] = request.META.get(header)
success = _record_feedback_in_zendesk(realname, email, subject, details, tags, additional_info)
_record_feedback_in_datadog(tags)
return HttpResponse(status=(200 if success else 500))
def info(request):
''' Info page (link from main header) '''
return render_to_response("info.html", {})
# From http://djangosnippets.org/snippets/1042/
def parse_accept_header(accept):
"""Parse the Accept header *accept*, returning a list with pairs of
(media_type, q_value), ordered by q values.
"""
result = []
for media_range in accept.split(","):
parts = media_range.split(";")
media_type = parts.pop(0)
media_params = []
q = 1.0
for part in parts:
(key, value) = part.lstrip().split("=", 1)
if key == "q":
q = float(value)
else:
media_params.append((key, value))
result.append((media_type, tuple(media_params), q))
result.sort(lambda x, y: -cmp(x[2], y[2]))
return result
def accepts(request, media_type):
"""Return whether this request has an Accept header that matches type"""
accept = parse_accept_header(request.META.get("HTTP_ACCEPT", ""))
return media_type in [t for (t, p, q) in accept]
| agpl-3.0 | 4,153,469,840,877,619,700 | 35.321809 | 120 | 0.63008 | false |
duguhaotian/superscp | src/superscp_tool.py | 1 | 2572 | #!/usr/bin/python
# -*- coding: UTF-8 -*-
import os
import sys
import subprocess
from pathmanager import node
from pathmanager import link
from pathmanager import paths
from pathmanager import tool
def superscp(argv):
if len(argv) != 5:
usage()
return
src = argv[2]
tip = argv[3]
tdir = argv[4]
srcnid = None
ips = tool.get_ips()
if len(ips) > 1:
print("---------------------------------------")
keys = ips.keys()
i = 0
for key in keys:
print("%d. %s" % (i, ips[key]))
i += 1
print("---------------------------------------")
select = input("which ip use to scp, select the index: ")
print("you select ip is : %s" % ips[keys[select]] )
srcnid = keys[select]
elif len(ips) < 1:
print("no ether for scp")
return
else:
srcnid = ips.keys()[0]
srcnid = tool.get_mac(srcnid)
srcnode = node.get_node(srcnid)
if srcnode == None:
print("current host is not register")
return
print(srcnode.show())
tnodes = node.find_by_ip(tip)
tnode = None
if len(tnodes) > 1:
i = 0
print("***********************************")
for tmp in tnodes:
print("%d. %s" % (i, tmp.show()))
i += 1
print("***********************************")
select = input("which target ip use to scp, select the index: ")
tnode = tnodes[select]
elif len(tnodes) < 1:
print("can not find target node by target ip : %s" % tip)
return
else:
tnode = tnodes[0]
print(tnode.show())
idxs = paths.search_by_target(srcnode, tnode)
path = None
if len(idxs) > 1:
print("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
i = 0
for idx in idxs:
print("%d. %s" % (i, paths.get(idx).show()))
i += 1
print("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@")
select = input("select one path to scp, which index you chose: ")
path = paths.get(idxs[i])
elif len(idxs) < 1:
print("cannot find sourceip: %s to targetip: %s path" % (srcnode.nip, tnode.nip))
return
else:
path = paths.get(idxs[0])
rdir=os.path.split(os.path.realpath(__file__))[0]
scpfname = rdir + "/scptool/.data.superscp"
paths.generate_scp_data(path, scpfname)
cmdstr = rdir+"/scptool/magic.sh " + src + " " + tdir
rts = subprocess.check_output(cmdstr, shell=True).decode().strip()
print("magic return: %s", rts)
| apache-2.0 | -9,122,752,600,971,258,000 | 27.577778 | 89 | 0.485226 | false |
appsembler/edx-platform | openedx/core/djangoapps/appsembler/sites/api.py | 1 | 10738 | import logging
import requests
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from rest_framework import generics, views, viewsets
from rest_framework import status
from rest_framework.generics import CreateAPIView
from rest_framework.parsers import JSONParser, MultiPartParser
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from organizations.models import Organization, UserOrganizationMapping
from branding.api import get_base_url
from openedx.core.djangoapps.site_configuration.models import SiteConfiguration
from rest_framework.views import APIView
from openedx.core.lib.api.permissions import ApiKeyHeaderPermission
from openedx.core.lib.api.authentication import (
OAuth2AuthenticationAllowInactiveUser,
)
from openedx.core.djangoapps.appsembler.sites.models import AlternativeDomain
from openedx.core.djangoapps.appsembler.sites.permissions import AMCAdminPermission
from openedx.core.djangoapps.appsembler.sites.serializers import (
SiteConfigurationSerializer,
SiteConfigurationListSerializer,
SiteSerializer,
RegistrationSerializer,
AlternativeDomainSerializer,
)
from openedx.core.djangoapps.appsembler.sites.utils import (
delete_site,
get_customer_files_storage,
to_safe_file_name,
)
log = logging.Logger(__name__)
class SiteViewSet(viewsets.ReadOnlyModelViewSet):
queryset = Site.objects.all()
serializer_class = SiteSerializer
authentication_classes = (OAuth2AuthenticationAllowInactiveUser,)
permission_classes = (IsAuthenticated, AMCAdminPermission)
def get_queryset(self):
queryset = Site.objects.exclude(id=settings.SITE_ID)
user = self.request.user
if not user.is_superuser:
queryset = queryset.filter(organizations=user.organizations.all())
return queryset
class SiteConfigurationViewSet(viewsets.ModelViewSet):
queryset = SiteConfiguration.objects.all()
serializer_class = SiteConfigurationSerializer
list_serializer_class = SiteConfigurationListSerializer
create_serializer_class = SiteSerializer
authentication_classes = (OAuth2AuthenticationAllowInactiveUser,)
permission_classes = (IsAuthenticated, AMCAdminPermission)
def get_serializer_class(self):
if self.action == 'list':
return self.list_serializer_class
if self.action == 'create':
return self.create_serializer_class
return super(SiteConfigurationViewSet, self).get_serializer_class()
def perform_destroy(self, instance):
delete_site(instance.site)
class FileUploadView(views.APIView):
parser_classes = (MultiPartParser,)
# TODO: oauth token isn't present after step 3 in signup, fix later
#permission_classes = (AMCAdminPermission,)
def post(self, request, format=None):
file_obj = request.data['file']
file_path = self.handle_uploaded_file(file_obj, request.GET.get('filename'))
return Response({'file_path': file_path}, status=201)
def handle_uploaded_file(self, content, filename):
storage = get_customer_files_storage()
name = storage.save(filename, content)
return storage.url(name)
class HostFilesView(views.APIView):
"""
Host remote static files internally.
This view hosts files on a Django Storage Backend (e.g. S3 or FileSystem).
This view is stupid and doesn't try to fix errors, thus it will fail
if any of the files it will give up and throw an error.
Usage:
POST /appsembler/api/host_files
{
"urls": [
"https://openclipart.org/download/292749/abstract-icon1.png",
"https://openclipart.org/download/292749/abstract-icon2.png",
"https://openclipart.org/download/292749/abstract-icon3.png",
]
}
Response on Success:
Code = 200
{
"success": true,
"urls": [{
"source": "https://openclipart.org/download/292749/abstract-icon1.png",
"dest": "https://tahoe.appsembler.com/customer_files/c334d1943576/abstract.png"
}, {
"source": "https://openclipart.org/download/292749/abstract-icon2.png",
"dest": "https://tahoe.appsembler.com/customer_files/a12bc334fd/abstract.png"
}, {
"source": "https://openclipart.org/download/292749/abstract-icon3.png",
"dest": "https://tahoe.appsembler.com/customer_files/c334d1334df/abstract.png"
}]
}
Response on Error:
Code = 400 or 500
{
"success": false,
"value": "Error processing the provided file",
"url": "https://openclipart.org/download/292749/abstract-icon3.png"
}
"""
parser_classes = (JSONParser,)
def _logged_response(self, json, status):
logging.info('Error in processing a file for "HostFilesView", "%s". http_status=%s', json, status)
return Response(json, status=status)
def post(self, request):
storage = get_customer_files_storage()
urls = request.data.get('urls')
if not (isinstance(urls, list) and urls):
return self._logged_response({
'success': False,
'value': 'No files were provided.',
}, status=status.HTTP_400_BAD_REQUEST)
maximum_files = settings.APPSEMBLER_FEATURES.get('FILE_HOST_MAXIMUM_FILES', 10)
timeout_secs = settings.APPSEMBLER_FEATURES.get('FILE_HOST_TIMEOUT', 1)
max_download_size_bytes = settings.APPSEMBLER_FEATURES.get('FILE_HOST_MAX_DOWNLOAD_SIZE', 512 * 1024)
if len(urls) > maximum_files:
return self._logged_response({
'success': False,
'value': 'Too many files were provided.',
'maximum_files': maximum_files
}, status=status.HTTP_400_BAD_REQUEST)
stored_urls = []
for source_url in urls:
try:
response = requests.get(source_url, timeout=timeout_secs)
except requests.exceptions.Timeout:
return self._logged_response({
'success': False,
'value': 'Request to the needed URL timed out.',
'url': source_url,
'timeout_seconds': timeout_secs,
}, status=status.HTTP_400_BAD_REQUEST)
except requests.exceptions.RequestException:
return self._logged_response({
'success': False,
'value': 'Error processing the provided URL.',
'url': source_url,
}, status=status.HTTP_400_BAD_REQUEST)
if len(response.content) > max_download_size_bytes:
# TODO: Use a more streamed limit, but probably the timeout would protect against 1TB downloads
# as most servers can't really download anything over than 12MBytes in a single second
# But if you're willing see: https://stackoverflow.com/a/23514616/161278
return self._logged_response({
'success': False,
'value': 'The file is too large to download.',
'url': source_url,
'max_size_bytes': max_download_size_bytes,
}, status=status.HTTP_400_BAD_REQUEST)
cleaned_up = to_safe_file_name(source_url)
new_file_name = storage.get_available_name(cleaned_up, max_length=100)
with storage.open(new_file_name, 'wb') as f:
f.write(response.content)
dest_url = get_base_url(request.is_secure()) + storage.url(new_file_name)
stored_urls.append({
'source': source_url,
'dest': dest_url,
})
return Response({
'success': True,
'urls': stored_urls,
}, status=status.HTTP_200_OK)
class SiteCreateView(generics.CreateAPIView):
serializer_class = RegistrationSerializer
permission_classes = (ApiKeyHeaderPermission,)
class UsernameAvailabilityView(APIView):
def get(self, request, username, format=None):
try:
User.objects.get(username=username)
return Response(None, status=status.HTTP_200_OK)
except User.DoesNotExist:
return Response(None, status=status.HTTP_404_NOT_FOUND)
class FindUsernameByEmailView(APIView):
"""
View to find username by email to be used in AMC signup workflow.
"""
permission_classes = [ApiKeyHeaderPermission]
def get(self, request):
user_email = request.GET.get('email')
organization_name = request.GET.get('organization_name')
if user_email and organization_name:
try:
organization = Organization.objects.get(name=organization_name)
mapping = UserOrganizationMapping.objects.get(user__email=user_email, organization=organization)
return Response({'username': mapping.user.username}, status=status.HTTP_200_OK)
except (Organization.DoesNotExist, UserOrganizationMapping.DoesNotExist):
pass
return Response({}, status=status.HTTP_404_NOT_FOUND)
class DomainAvailabilityView(APIView):
def get(self, request, subdomain, format=None):
try:
Site.objects.get(name=subdomain)
return Response(None, status=status.HTTP_200_OK)
except Site.DoesNotExist:
return Response(None, status=status.HTTP_404_NOT_FOUND)
class DomainSwitchView(APIView):
def post(self, request, format=None):
site_id = request.data.get('site')
if not site_id:
return Response("Site ID needed", status=status.HTTP_400_BAD_REQUEST)
try:
site = Site.objects.get(id=site_id)
if not site.alternative_domain:
return Response("Site {} does not have a custom domain".format(site.domain),
status=status.HTTP_404_NOT_FOUND)
site.alternative_domain.switch_with_active()
return Response(status=status.HTTP_200_OK)
except Site.DoesNotExist:
return Response("The site with ID {} does not exist".format(site_id),
status=status.HTTP_404_NOT_FOUND)
class CustomDomainView(CreateAPIView):
queryset = AlternativeDomain.objects.all()
serializer_class = AlternativeDomainSerializer
| agpl-3.0 | 3,052,088,719,551,813,000 | 38.477941 | 112 | 0.635128 | false |
birocorneliu/conference | lib/to_delete.py | 1 | 2829 | from datetime import datetime
import endpoints
from google.appengine.ext import ndb
from google.appengine.api import taskqueue, memcache
from lib.db import Profile, Conference
from lib.models import ConflictException, ProfileForm, BooleanMessage, ConferenceForm, TeeShirtSize
MEMCACHE_ANNOUNCEMENTS_KEY = "RECENT_ANNOUNCEMENTS"
@ndb.transactional()
def _updateConferenceObject(self, request):
user = endpoints.get_current_user()
if not user:
raise endpoints.UnauthorizedException('Authorization required')
user_id = getUserId(user)
# copy ConferenceForm/ProtoRPC Message into dict
data = {field.name: getattr(request, field.name) for field in request.all_fields()}
# update existing conference
conf = ndb.Key(urlsafe=request.websafeConferenceKey).get()
# check that conference exists
if not conf:
raise endpoints.NotFoundException(
'No conference found with key: %s' % request.websafeConferenceKey)
# check that user is owner
if user_id != conf.organizerUserId:
raise endpoints.ForbiddenException(
'Only the owner can update the conference.')
# Not getting all the fields, so don't create a new object; just
# copy relevant fields from ConferenceForm to Conference object
for field in request.all_fields():
data = getattr(request, field.name)
# only copy fields where we get data
if data not in (None, []):
# special handling for dates (convert string to Date)
if field.name in ('startDate', 'endDate'):
data = datetime.strptime(data, "%Y-%m-%d").date()
if field.name == 'startDate':
conf.month = data.month
# write to Conference object
setattr(conf, field.name, data)
conf.put()
prof = ndb.Key(Profile, user_id).get()
return self._copyConferenceToForm(conf, getattr(prof, 'displayName'))
@staticmethod
def _cacheAnnouncement():
"""Create Announcement & assign to memcache; used by
memcache cron job & putAnnouncement().
"""
confs = Conference.query(ndb.AND(
Conference.seatsAvailable <= 5,
Conference.seatsAvailable > 0)
).fetch(projection=[Conference.name])
if confs:
# If there are almost sold out conferences,
# format announcement and set it in memcache
announcement = '%s %s' % (
'Last chance to attend! The following conferences '
'are nearly sold out:',
', '.join(conf.name for conf in confs))
memcache.set(MEMCACHE_ANNOUNCEMENTS_KEY, announcement)
else:
# If there are no sold out conferences,
# delete the memcache announcements entry
announcement = ""
memcache.delete(MEMCACHE_ANNOUNCEMENTS_KEY)
return announcement
| apache-2.0 | 8,938,905,317,155,928,000 | 35.269231 | 99 | 0.666313 | false |
aWhereAPI/API-Code-Samples | python/header.py | 1 | 7933 | from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from builtins import str
from builtins import bytes
from future import standard_library
standard_library.install_aliases()
from builtins import object
import requests as rq
import base64
import pprint
import json
import random
from menus import Menus
class AWhereAPI(object):
def __init__(self, api_key, api_secret):
"""
Initializes the AWhereAPI class, which is used to perform HTTP requests
to the aWhere V2 API.
Docs:
http://developer.awhere.com/api/reference
"""
self._fields_url = 'https://api.awhere.com/v2/fields'
self._weather_url = 'https://api.awhere.com/v2/weather/fields'
self.api_key = api_key
self.api_secret = api_secret
self.base_64_encoded_secret_key = self.encode_secret_and_key(
self.api_key, self.api_secret)
self.auth_token = self.get_oauth_token(self.base_64_encoded_secret_key)
self._menu = Menus()
def create_test_field(self):
"""
Performs a HTTP POST request to create and add a Field to your aWhere App.AWhereAPI
Docs:
http://developer.awhere.com/api/reference/fields/create-field
"""
# Each Field requires a unique ID
testField = 'TestField-'
testField += str(random.randint(1, 999))
# Next, we build the request body. Please refer to the docs above for
# more info.
fieldBody = {'id': testField,
'name': testField,
'farmId': 'Farm1Test',
'centerPoint': {'latitude': 39.82,
'longitude': -98.56},
'acres': 100}
# Setup the HTTP request headers
auth_headers = {
"Authorization": "Bearer %s" % self.auth_token,
"Content-Type": 'application/json'
}
# Perform the POST request to create your Field
print('Attempting to create new field....\n')
response = rq.post(self._fields_url,
headers=auth_headers,
json=fieldBody)
# A successful request will return a 201 status code
print('The server responded with a status code of %d \n' %
response.status_code)
pprint.pprint(response.json())
print('\n\n\n')
if response.status_code == 201:
print(
'Your field "{0}" was successfully created!'.format(testField))
else:
print('An error occurred. Please review the above resonse and try again.')
def delete_field_by_id(self, field_id):
"""
Performs a HTTP DELETE request to delete a Field from your aWhere App.
Docs: http://developer.awhere.com/api/reference/fields/delete-field
Args:
field_id: The field to be deleted
"""
# Setup the HTTP request headers
auth_headers = {
"Authorization": "Bearer %s" % self.auth_token,
"Content-Type": 'application/json'
}
# Perform the POST request to Delete your Field
response = rq.delete(self._fields_url + '/{0}'.format(field_id),
headers=auth_headers)
print('The server responded with a status code of %d' %
response.status_code)
def encode_secret_and_key(self, key, secret):
"""
Docs:
http://developer.awhere.com/api/authentication
Returns:
Returns the base64-encoded {key}:{secret} combination, seperated by a colon.
"""
# Base64 Encode the Secret and Key
key_secret = '%s:%s' % (key, secret)
#print('\nKey and Secret before Base64 Encoding: %s' % key_secret)
encoded_key_secret = base64.b64encode(
bytes(key_secret, 'utf-8')).decode('ascii')
#print('Key and Secret after Base64 Encoding: %s' % encoded_key_secret)
return encoded_key_secret
def get_fields(self):
"""
Performs a HTTP GET request to obtain all Fields you've created on your aWhere App.
Docs:
http://developer.awhere.com/api/reference/fields/get-fields
"""
# Setup the HTTP request headers
auth_headers = {
"Authorization": "Bearer %s" % self.auth_token,
}
# Perform the HTTP request to obtain a list of all Fields
fields_response = rq.get(self._fields_url,
headers=auth_headers)
responseJSON = fields_response.json()
# Display the count of Fields the user has on their account
print('You have %s fields registered on your account' %
len(responseJSON["fields"]))
# Iterate over the fields and display their name and ID
print('{0} {1} \t\t {2}'.format('#', 'Field Name', 'Field ID'))
print('-------------------------------------------')
count = 0
for field in responseJSON["fields"]:
count += 1
print('{0}. {1} \t {2}\r'.format(
count, field["name"], field["id"]))
def get_weather_by_id(self, field_id):
"""
Performs a HTTP GET request to obtain Forecast, Historical Norms and Forecasts
Docs:
1. Forecast: http://developer.awhere.com/api/forecast-weather-api
2. Historical Norms: http://developer.awhere.com/api/reference/weather/norms
3. Observations: http://developer.awhere.com/api/reference/weather/observations
"""
# Setup the HTTP request headers
auth_headers = {
"Authorization": "Bearer %s" % self.auth_token,
}
# Perform the HTTP request to obtain the Forecast for the Field
response = rq.get(self._weather_url + '/{0}/forecasts?blockSize=24'.format(field_id),
headers=auth_headers)
pprint.pprint(response.json())
print('\nThe above response from the Forecast API endpoint shows the forecast for your field location ({0}).'.format(field_id))
self._menu.os_pause()
# Next, let's obtain the historic norms for a Field
response = rq.get(self._weather_url + '/{0}/norms/04-04'.format(field_id),
headers=auth_headers)
pprint.pprint(response.json())
print('\nThe above response from the Norms API endpoint shows the averages of the last 10 for an arbitrary date, April 4th.')
self._menu.os_pause()
# Finally, display the observed weather. Returns the last 7 days of data for the provided Field.
response = rq.get(self._weather_url + '/{0}/observations'.format(field_id),
headers=auth_headers)
pprint.pprint(response.json())
print('\nThe above response from the Observed Weather API endpoint shows the last 7 days of data for the provided field ({0})'.format(field_id))
def get_oauth_token(self, encoded_key_secret):
"""
Demonstrates how to make a HTTP POST request to obtain an OAuth Token
Docs:
http://developer.awhere.com/api/authentication
Returns:
The access token provided by the aWhere API
"""
auth_url = 'https://api.awhere.com/oauth/token'
auth_headers = {
"Authorization": "Basic %s" % encoded_key_secret,
'Content-Type': 'application/x-www-form-urlencoded'
}
body = "grant_type=client_credentials"
response = rq.post(auth_url,
headers=auth_headers,
data=body)
# .json method is a requests lib method that decodes the response
return response.json()['access_token']
| mit | -6,602,144,733,192,833,000 | 38.272277 | 152 | 0.581117 | false |
bmya/tkobr-addons | tko_l10n_br_point_of_sale_print_cupom_fiscal/account_journal.py | 1 | 1240 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# Thinkopen Brasil
# Copyright (C) Thinkopen Solutions Brasil (<http://www.tkobr.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, api, fields, _
class account_journal(models.Model):
_inherit = 'account.journal'
fiscal_code = fields.Integer('Fiscal Code')
| agpl-3.0 | 3,911,694,348,782,817,300 | 40.333333 | 78 | 0.618548 | false |
clucas111/delineating-linear-elements | Code/clf_preprocessing.py | 1 | 1569 | # -*- coding: utf-8 -*-
"""
@author: Chris Lucas
"""
import numpy as np
import pandas as pd
def merge_dataframes(dfs, key_field_name):
"""
Merges dataframes containing data of one class into one dataframe with
the class in a column.
Parameters
----------
dfs : dict of DataFrames
Dictionary with the class as key and the value as the dataframes
to be merged.
Returns
-------
df : DataFrame
The merged dataframe.
"""
df = pd.DataFrame()
for k, v in dfs.iteritems():
v[key_field_name] = k
df = df.append(v)
return df
def correlated_features(df, features, corr_th=0.98):
"""
Determines highly correlated features which can consequently be dropped.
Parameters
----------
df : DataFrame
The feature values.
features : list of strings
The names of the features (column names in the dataframe)
to be checked.
corr_th : float
The correlation coefficient threshold to determine what is highly
correlated.
Returns
-------
drops : list fo strings
The names of the features which can be dropped.
"""
df_corr = df[features].astype(np.float64).corr(method='pearson')
mask = np.ones(df_corr.columns.size) - np.eye(df_corr.columns.size)
df_corr = mask * df_corr
drops = []
for col in df_corr.columns.values:
if not np.in1d([col], drops):
corr = df_corr[abs(df_corr[col]) > corr_th].index
drops = np.union1d(drops, corr)
return drops
| apache-2.0 | -1,480,821,867,940,271,600 | 23.515625 | 76 | 0.605481 | false |
jeffery-do/Vizdoombot | examples/python/scenarios.py | 1 | 2862 | #!/usr/bin/env python
#####################################################################
# This script presents how to run some scenarios.
# Configuration is loaded from "../../examples/config/<SCENARIO_NAME>.cfg" file.
# <episodes> number of episodes are played.
# Random combination of buttons is chosen for every action.
# Game variables from state and last reward are printed.
#
# To see the scenario description go to "../../scenarios/README.md"
#####################################################################
from __future__ import print_function
import itertools as it
from random import choice
from time import sleep
from vizdoom import DoomGame, ScreenResolution
game = DoomGame()
# Choose scenario config file you wish to watch.
# Don't load two configs cause the second will overrite the first one.
# Multiple config files are ok but combining these ones doesn't make much sense.
game.load_config("../../examples/config/basic.cfg")
# game.load_config("../../examples/config/simpler_basic.cfg")
# game.load_config("../../examples/config/rocket_basic.cfg")
# game.load_config("../../examples/config/deadly_corridor.cfg")
# game.load_config("../../examples/config/deathmatch.cfg")
# game.load_config("../../examples/config/defend_the_center.cfg")
# game.load_config("../../examples/config/defend_the_line.cfg")
# game.load_config("../../examples/config/health_gathering.cfg")
# game.load_config("../../examples/config/my_way_home.cfg")
# game.load_config("../../examples/config/predict_position.cfg")
# game.load_config("../../examples/config/take_cover.cfg")
# Makes the screen bigger to see more details.
game.set_screen_resolution(ScreenResolution.RES_640X480)
game.set_window_visible(True)
game.init()
# Creates all possible actions depending on how many buttons there are.
actions_num = game.get_available_buttons_size()
actions = []
for perm in it.product([False, True], repeat=actions_num):
actions.append(list(perm))
episodes = 10
sleep_time = 0.028
for i in range(episodes):
print("Episode #" + str(i + 1))
# Not needed for the first episode but the loop is nicer.
game.new_episode()
while not game.is_episode_finished():
# Gets the state and possibly to something with it
state = game.get_state()
# Makes a random action and save the reward.
reward = game.make_action(choice(actions))
print("State #" + str(state.number))
print("Game Variables:", state.game_variables)
print("Performed action:", game.get_last_action())
print("Last Reward:", reward)
print("=====================")
# Sleep some time because processing is too fast to watch.
if sleep_time > 0:
sleep(sleep_time)
print("Episode finished!")
print("total reward:", game.get_total_reward())
print("************************")
| mit | 2,760,447,843,961,944,000 | 36.168831 | 80 | 0.650245 | false |
dannyperry571/theapprentice | script.module.nanscrapers/lib/nanscrapers/scraperplugins/sezonlukdizi.py | 1 | 4411 | import json
import re
import urlparse
import requests
from BeautifulSoup import BeautifulSoup
from nanscrapers.common import random_agent, replaceHTMLCodes
from ..scraper import Scraper
import xbmc
class Sezonluldizi(Scraper):
domains = ['sezonlukdizi.com']
name = "sezonlukdizi"
def __init__(self):
self.base_link = 'http://sezonlukdizi.com'
def scrape_episode(self, title, show_year, year, season, episode, imdb, tvdb):
url_title = title.replace(' ', '-').replace('.', '-').replace(":","").replace("!","").replace("?","").lower()
episode_url = '/%s/%01d-sezon-%01d-bolum.html' % (url_title, int(season), int(episode))
return self.sources(replaceHTMLCodes(episode_url))
def sources(self, url):
sources = []
try:
if url == None: return sources
absolute_url = urlparse.urljoin(self.base_link, url)
headers = {'User-Agent': random_agent()}
html = BeautifulSoup(requests.get(absolute_url, headers=headers, timeout=30).content)
pages = []
embed = html.findAll('div', attrs={'id': 'embed'})[0]
pages.append(embed.findAll('iframe')[0]["src"])
for page in pages:
try:
if not page.startswith('http'):
page = 'http:%s' % page
html = BeautifulSoup(requests.get(page, headers=headers, timeout=30).content)
# captions = html.findAll(text=re.compile('kind\s*:\s*(?:\'|\")captions(?:\'|\")'))
# if not captions: break
try:
link_text = html.findAll(text=re.compile('url\s*:\s*\'(http(?:s|)://api.pcloud.com/.+?)\''))[0]
link = re.findall('url\s*:\s*\'(http(?:s|)://api.pcloud.com/.+?)\'', link_text)[0]
variants = json.loads(requests.get(link, headers=headers, timeout=30).content)['variants']
for variant in variants:
if 'hosts' in variant and 'path' in variant and 'height' in variant:
video_url = '%s%s' % (variant['hosts'][0], variant['path'])
heigth = variant['height']
if not video_url.startswith('http'):
video_url = 'http://%s' % video_url
sources.append(
{'source': 'cdn', 'quality': str(heigth), 'scraper': self.name, 'url': video_url,
'direct': False})
except:
pass
try:
links_text = html.findAll(
text=re.compile('"?file"?\s*:\s*"(.+?)"\s*,\s*"?label"?\s*:\s*"(.+?)"'))
if len(links_text) > 0:
for link_text in links_text:
try:
links = re.findall('"?file"?\s*:\s*"([^"]+)"\s*,\s*"?label"?\s*:\s*"(\d+)p?[^"]*"',
link_text)
for link in links:
video_url = link[0]
if not video_url.startswith('http'):
video_url = 'http:%s' % video_url
try:
req = requests.head(video_url, headers=headers)
if req.headers['Location'] != "":
video_url = req.headers['Location']
except:
pass
quality = link[1]
sources.append(
{'source': 'google video', 'quality': quality, 'scraper': self.name,
'url': video_url, 'direct': True})
except:
continue
except:
pass
except:
pass
except:
pass
return sources
| gpl-2.0 | -8,988,593,595,415,021,000 | 45.431579 | 119 | 0.407844 | false |
hortonworks/hortonworks-sandbox | desktop/core/ext-py/Twisted/doc/web/howto/listings/PicturePile/picturepile.py | 1 | 1917 | """Run this with twistd -y."""
import os
from twisted.application import service, internet
from twisted.web.woven import page
from twisted.web import server, static
rootDirectory = os.path.expanduser("~/Pictures")
class DirectoryListing(page.Page):
templateFile = "directory-listing.html"
templateDirectory = os.path.split(os.path.abspath(__file__))[0]
def initialize(self, *args, **kwargs):
self.directory = kwargs['directory']
def wmfactory_title(self, request):
return self.directory
def wmfactory_directory(self, request):
files = os.listdir(self.directory)
for i in xrange(len(files)):
if os.path.isdir(os.path.join(self.directory,files[i])):
files[i] = files[i] + '/'
return files
def getDynamicChild(self, name, request):
# Protect against malicious URLs like '..'
if static.isDangerous(name):
return static.dangerousPathError
# Return a DirectoryListing or an ImageDisplay resource, depending on
# whether the path corresponds to a directory or to a file
path = os.path.join(self.directory,name)
if os.path.exists(path):
if os.path.isdir(path):
return DirectoryListing(directory=path)
else:
return ImageDisplay(image=path)
class ImageDisplay(page.Page):
templateFile="image-display.html"
templateDirectory = os.path.split(os.path.abspath(__file__))[0]
def initialize(self, *args, **kwargs):
self.image = kwargs['image']
def wmfactory_image(self, request):
return self.image
def wchild_preview(self, request):
return static.File(self.image)
site = server.Site(DirectoryListing(directory=rootDirectory))
application = service.Application("ImagePool")
parent = service.IServiceCollection(application)
internet.TCPServer(8088, site).setServiceParent(parent)
| apache-2.0 | 396,252,148,935,259,800 | 30.95 | 75 | 0.674491 | false |
davidwilson-85/easymap | graphic_output/Pillow-4.2.1/Tests/test_file_icns.py | 1 | 2781 | from helper import unittest, PillowTestCase
from PIL import Image
import sys
# sample icon file
TEST_FILE = "Tests/images/pillow.icns"
enable_jpeg2k = hasattr(Image.core, 'jp2klib_version')
class TestFileIcns(PillowTestCase):
def test_sanity(self):
# Loading this icon by default should result in the largest size
# (512x512@2x) being loaded
im = Image.open(TEST_FILE)
im.load()
self.assertEqual(im.mode, "RGBA")
self.assertEqual(im.size, (1024, 1024))
self.assertEqual(im.format, "ICNS")
@unittest.skipIf(sys.platform != 'darwin',
"requires MacOS")
def test_save(self):
im = Image.open(TEST_FILE)
temp_file = self.tempfile("temp.icns")
im.save(temp_file)
reread = Image.open(temp_file)
self.assertEqual(reread.mode, "RGBA")
self.assertEqual(reread.size, (1024, 1024))
self.assertEqual(reread.format, "ICNS")
def test_sizes(self):
# Check that we can load all of the sizes, and that the final pixel
# dimensions are as expected
im = Image.open(TEST_FILE)
for w, h, r in im.info['sizes']:
wr = w * r
hr = h * r
im2 = Image.open(TEST_FILE)
im2.size = (w, h, r)
im2.load()
self.assertEqual(im2.mode, 'RGBA')
self.assertEqual(im2.size, (wr, hr))
def test_older_icon(self):
# This icon was made with Icon Composer rather than iconutil; it still
# uses PNG rather than JP2, however (since it was made on 10.9).
im = Image.open('Tests/images/pillow2.icns')
for w, h, r in im.info['sizes']:
wr = w * r
hr = h * r
im2 = Image.open('Tests/images/pillow2.icns')
im2.size = (w, h, r)
im2.load()
self.assertEqual(im2.mode, 'RGBA')
self.assertEqual(im2.size, (wr, hr))
def test_jp2_icon(self):
# This icon was made by using Uli Kusterer's oldiconutil to replace
# the PNG images with JPEG 2000 ones. The advantage of doing this is
# that OS X 10.5 supports JPEG 2000 but not PNG; some commercial
# software therefore does just this.
# (oldiconutil is here: https://github.com/uliwitness/oldiconutil)
if not enable_jpeg2k:
return
im = Image.open('Tests/images/pillow3.icns')
for w, h, r in im.info['sizes']:
wr = w * r
hr = h * r
im2 = Image.open('Tests/images/pillow3.icns')
im2.size = (w, h, r)
im2.load()
self.assertEqual(im2.mode, 'RGBA')
self.assertEqual(im2.size, (wr, hr))
if __name__ == '__main__':
unittest.main()
| gpl-3.0 | -9,077,484,952,851,127,000 | 30.965517 | 78 | 0.569939 | false |
MeteorKepler/RICGA | ricga/ops/image_processing.py | 1 | 8197 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Helper functions for image preprocessing."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from ricga.reference.tf2keras_image_process import tf2keras_image
def distort_image(image, thread_id):
"""Perform random distortions on an image.
Args:
image: A float32 Tensor of shape [height, width, 3] with values in [0, 1).
thread_id: Preprocessing thread id used to select the ordering of color
distortions. There should be a multiple of 2 preprocessing threads.
Returns:
distorted_image: A float32 Tensor of shape [height, width, 3] with values in
[0, 1].
"""
# Randomly flip horizontally.
with tf.name_scope("flip_horizontal", values=[image]):
image = tf.image.random_flip_left_right(image)
# Randomly distort the colors based on thread id.
color_ordering = thread_id % 2
with tf.name_scope("distort_color", values=[image]):
if color_ordering == 0:
image = tf.image.random_brightness(image, max_delta=32. / 255.)
image = tf.image.random_saturation(image, lower=0.5, upper=1.5)
image = tf.image.random_hue(image, max_delta=0.032)
image = tf.image.random_contrast(image, lower=0.5, upper=1.5)
elif color_ordering == 1:
image = tf.image.random_brightness(image, max_delta=32. / 255.)
image = tf.image.random_contrast(image, lower=0.5, upper=1.5)
image = tf.image.random_saturation(image, lower=0.5, upper=1.5)
image = tf.image.random_hue(image, max_delta=0.032)
# The random_* ops do not necessarily clamp.
image = tf.clip_by_value(image, 0.0, 1.0)
return image
def process_image(encoded_image,
is_training,
height,
width,
ssd_model,
resize_height=346,
resize_width=346,
thread_id=0,
image_format="jpeg"):
"""Decode an image, resize and apply random distortions.
In training, images are distorted slightly differently depending on thread_id.
Args:
encoded_image: String Tensor containing the image.
is_training: Boolean; whether preprocessing for training or eval.
height: Height of the output image.
width: Width of the output image.
ssd_model: SSD300 model.
resize_height: If > 0, resize height before crop to final dimensions.
resize_width: If > 0, resize width before crop to final dimensions.
thread_id: Preprocessing thread id used to select the ordering of color
distortions. There should be a multiple of 2 preprocessing threads.
image_format: "jpeg" or "png".
Returns:
A float32 Tensor of shape [height, width, 3] with values in [-1, 1].
Raises:
ValueError: If image_format is invalid.
"""
# Helper function to log an image summary to the visualizer. Summaries are
# only logged in half of the thread.
def image_summary(name, image_to_sum):
if thread_id % 2 == 0:
tf.summary.image(name, tf.expand_dims(image_to_sum, 0))
# Decode image into a float32 Tensor of shape [?, ?, 3] with values in [0, 1).
with tf.name_scope("decode", values=[encoded_image]):
if image_format == "jpeg":
image = tf.image.decode_jpeg(encoded_image, channels=3)
elif image_format == "png":
image = tf.image.decode_png(encoded_image, channels=3)
else:
raise ValueError("Invalid image format: %s" % image_format)
image = tf.image.convert_image_dtype(image, dtype=tf.float32)
original_image = image
image_summary("original_image", image)
# Resize image.
assert (resize_height > 0) == (resize_width > 0)
if resize_height:
image = tf.image.resize_images(image,
size=[resize_height, resize_width],
method=tf.image.ResizeMethod.BILINEAR)
# Crop to final dimensions.
if is_training:
image = tf.random_crop(image, [height, width, 3])
else:
# Central crop, assuming resize_height > height, resize_width > width.
image = tf.image.resize_image_with_crop_or_pad(image, height, width)
image_summary("resized_image", image)
# Randomly distort the image.
if is_training:
image = distort_image(image, thread_id)
image_summary("final_image", image)
# Rescale to [-1,1] instead of [0, 1]
image = tf.subtract(image, 0.5)
image = tf.multiply(image, 2.0)
# ssd process
image_300x300 = tf.image.resize_images(original_image, [300, 300])
image_300x300_ssd_input = tf2keras_image(image_300x300)
# with tf.variable_scope("ssd"):
ssd_output = ssd_model(tf.expand_dims(image_300x300_ssd_input, 0))[0]
with tf.variable_scope("ssd_out_processing"):
mbox_loc = ssd_output[:, :4]
variances = ssd_output[:, -4:]
mbox_priorbox = ssd_output[:, -8:-4]
mbox_conf = ssd_output[:, 4:-8]
prior_width = mbox_priorbox[:, 2] - mbox_priorbox[:, 0]
prior_height = mbox_priorbox[:, 3] - mbox_priorbox[:, 1]
prior_center_x = 0.5 * (mbox_priorbox[:, 2] + mbox_priorbox[:, 0])
prior_center_y = 0.5 * (mbox_priorbox[:, 3] + mbox_priorbox[:, 1])
decode_bbox_center_x = mbox_loc[:, 0] * prior_width * variances[:, 0]
decode_bbox_center_x += prior_center_x
decode_bbox_center_y = mbox_loc[:, 1] * prior_width * variances[:, 1]
decode_bbox_center_y += prior_center_y
decode_bbox_width = tf.exp(mbox_loc[:, 2] * variances[:, 2])
decode_bbox_width *= prior_width
decode_bbox_height = tf.exp(mbox_loc[:, 3] * variances[:, 3])
decode_bbox_height *= prior_height
decode_bbox_xmin = tf.expand_dims(decode_bbox_center_x - 0.5 * decode_bbox_width, -1)
decode_bbox_ymin = tf.expand_dims(decode_bbox_center_y - 0.5 * decode_bbox_height, -1)
decode_bbox_xmax = tf.expand_dims(decode_bbox_center_x + 0.5 * decode_bbox_width, -1)
decode_bbox_ymax = tf.expand_dims(decode_bbox_center_y + 0.5 * decode_bbox_height, -1)
decode_bbox = tf.concat((decode_bbox_ymin,
decode_bbox_xmax,
decode_bbox_ymax,
decode_bbox_xmin), axis=-1)
decode_bbox = tf.minimum(tf.maximum(decode_bbox, 0.0), 1.0)
mbox_conf_without_background = tf.slice(mbox_conf, [0, 1], [-1, -1])
mbox_conf_max = tf.reduce_max(mbox_conf_without_background, 1)
idx = tf.image.non_max_suppression(decode_bbox, mbox_conf_max, max_output_size=1)
idx = tf.reshape(idx, [1])
good_box = decode_bbox[idx[0]]
region_image = tf.image.crop_and_resize(tf.expand_dims(image_300x300, 0),
boxes=tf.expand_dims(good_box, 0),
box_ind=tf.constant([0], dtype=tf.int32),
crop_size=[height, width],
name="region_images")[0]
image_summary("region_image", region_image)
# Rescale to [-1,1] instead of [0, 1]
region_image = tf.subtract(region_image, 0.5)
region_image = tf.multiply(region_image, 2.0)
return image, region_image
# return ssd, region_image
| apache-2.0 | -7,595,954,824,494,623,000 | 41.91623 | 94 | 0.605831 | false |
MaximeBiset/care4care | main/migrations/0040_auto_20141205_1736.py | 1 | 12414 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import multiselectfield.db.fields
import django.core.validators
import re
class Migration(migrations.Migration):
dependencies = [
('main', '0039_merge'),
]
operations = [
migrations.AlterField(
model_name='contact',
name='comments',
field=models.CharField(max_length=255, verbose_name='Additional comments', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='contact',
name='email',
field=models.EmailField(max_length=75, verbose_name='Email address', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='contact',
name='first_name',
field=models.CharField(max_length=30, verbose_name='First name'),
preserve_default=True,
),
migrations.AlterField(
model_name='contact',
name='languages',
field=multiselectfield.db.fields.MultiSelectField(max_length=8, verbose_name='Spoken languages', choices=[('fr', 'French'), ('en', 'English'), ('nl', 'Dutch')], blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='contact',
name='last_name',
field=models.CharField(max_length=30, verbose_name='Name'),
preserve_default=True,
),
migrations.AlterField(
model_name='contact',
name='location',
field=models.CharField(max_length=256, verbose_name='Address', null=True, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='contact',
name='mobile_number',
field=models.CharField(max_length=16, validators=[django.core.validators.RegexValidator(message="Your phone number must be in format '+99999999'. Up to 15 digits.", regex='^\\+?1?\\d{9,15}$')], verbose_name='Phone number (mobile)', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='contact',
name='phone_number',
field=models.CharField(max_length=16, validators=[django.core.validators.RegexValidator(message="Your phone number must be in format '+99999999'. Up to 15 digits.", regex='^\\+?1?\\d{9,15}$')], verbose_name='Phone number (home)', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='contact',
name='relationship',
field=models.CharField(max_length=255, verbose_name='Your relationship with that person', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='emergencycontact',
name='first_name',
field=models.CharField(max_length=30, verbose_name='First name'),
preserve_default=True,
),
migrations.AlterField(
model_name='emergencycontact',
name='languages',
field=multiselectfield.db.fields.MultiSelectField(max_length=8, verbose_name='Spoken languages', choices=[('fr', 'French'), ('en', 'English'), ('nl', 'Dutch')], blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='emergencycontact',
name='last_name',
field=models.CharField(max_length=30, verbose_name='Name'),
preserve_default=True,
),
migrations.AlterField(
model_name='emergencycontact',
name='location',
field=models.CharField(max_length=256, verbose_name='Address', null=True, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='emergencycontact',
name='mobile_number',
field=models.CharField(max_length=16, validators=[django.core.validators.RegexValidator(message="Your phone number must be in format '+99999999'. Up to 15 digits.", regex='^\\+?1?\\d{9,15}$')], verbose_name='Phone number (mobile)', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='emergencycontact',
name='order',
field=models.IntegerField(choices=[(1, 'First contact'), (2, 'Contact'), (3, 'Last contact')], default=0, verbose_name='Priority'),
preserve_default=True,
),
migrations.AlterField(
model_name='emergencycontact',
name='phone_number',
field=models.CharField(max_length=16, validators=[django.core.validators.RegexValidator(message="Your phone number must be in format '+99999999'. Up to 15 digits.", regex='^\\+?1?\\d{9,15}$')], verbose_name='Phone number (home)', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='user',
name='birth_date',
field=models.DateField(verbose_name='Birthday', null=True, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='user',
name='credit',
field=models.IntegerField(default=0, verbose_name='Remaining credit'),
preserve_default=True,
),
migrations.AlterField(
model_name='user',
name='email',
field=models.EmailField(max_length=75, verbose_name='Email address'),
preserve_default=True,
),
migrations.AlterField(
model_name='user',
name='first_name',
field=models.CharField(max_length=30, verbose_name='First name'),
preserve_default=True,
),
migrations.AlterField(
model_name='user',
name='how_found',
field=multiselectfield.db.fields.MultiSelectField(max_length=41, verbose_name='How did you hear about care4care ?', choices=[('internet', 'The Internet'), ('show', 'A presentation, brochure, flyer,... '), ('branch', 'The local branch'), ('member', 'Another member'), ('friends', 'Friends or family'), ('other', 'Other')]),
preserve_default=True,
),
migrations.AlterField(
model_name='user',
name='languages',
field=multiselectfield.db.fields.MultiSelectField(max_length=8, verbose_name='Spoken languages', choices=[('fr', 'French'), ('en', 'English'), ('nl', 'Dutch')], blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='user',
name='last_name',
field=models.CharField(max_length=30, verbose_name='Name'),
preserve_default=True,
),
migrations.AlterField(
model_name='user',
name='location',
field=models.CharField(max_length=256, verbose_name='Address', null=True, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='user',
name='mobile_number',
field=models.CharField(max_length=16, validators=[django.core.validators.RegexValidator(message="Your phone number must be in format '+99999999'. Up to 15 digits.", regex='^\\+?1?\\d{9,15}$')], verbose_name='Phone number (mobile)', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='user',
name='phone_number',
field=models.CharField(max_length=16, validators=[django.core.validators.RegexValidator(message="Your phone number must be in format '+99999999'. Up to 15 digits.", regex='^\\+?1?\\d{9,15}$')], verbose_name='Phone number (home)', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='user',
name='status',
field=models.IntegerField(choices=[(1, 'Active'), (2, 'On vacation'), (3, 'Disabled')], default=1),
preserve_default=True,
),
migrations.AlterField(
model_name='user',
name='user_type',
field=models.IntegerField(choices=[(1, 'Member'), (2, 'Non-member'), (3, 'Verified member')], verbose_name='Account type', default=1, help_text='A member can help or be helped while a non-member is a professional who registers to access patient data. Please choose the one that suits you'),
preserve_default=True,
),
migrations.AlterField(
model_name='user',
name='username',
field=models.CharField(max_length=30, unique=True, verbose_name='Username', validators=[django.core.validators.RegexValidator(re.compile('^[\\w.@+-]+$', 32), 'Enter a valid username. No more than 30 characters. There may be numbers andcharacters @/./+/-/_', 'invalid')]),
preserve_default=True,
),
migrations.AlterField(
model_name='verifiedinformation',
name='criminal_record',
field=models.FileField(upload_to='documents/', null=True, verbose_name='Criminal record'),
preserve_default=True,
),
migrations.AlterField(
model_name='verifiedinformation',
name='recomendation_letter_1',
field=models.FileField(upload_to='documents/', null=True, verbose_name='Letter of recommendation n°1'),
preserve_default=True,
),
migrations.AlterField(
model_name='verifiedinformation',
name='recomendation_letter_2',
field=models.FileField(upload_to='documents/', null=True, verbose_name='Letter de recommendation n°2'),
preserve_default=True,
),
migrations.AlterField(
model_name='verifieduser',
name='additional_info',
field=models.TextField(max_length=300, verbose_name='Additional information', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='verifieduser',
name='can_wheelchair',
field=models.BooleanField(choices=[(True, 'Yes'), (False, 'No')], verbose_name='Can you carry a wheelchair in your car?', default=False),
preserve_default=True,
),
migrations.AlterField(
model_name='verifieduser',
name='drive_license',
field=multiselectfield.db.fields.MultiSelectField(max_length=11, verbose_name='Type of driving license', choices=[(1, 'Moped'), (2, 'Motorcycle'), (3, 'Car'), (4, 'Truck'), (5, 'Bus'), (6, 'Tractor')], blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='verifieduser',
name='have_car',
field=models.BooleanField(choices=[(True, 'Yes'), (False, 'No')], verbose_name='Do you have a car?', default=False),
preserve_default=True,
),
migrations.AlterField(
model_name='verifieduser',
name='hobbies',
field=models.TextField(max_length=200, verbose_name='Your hobby', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='verifieduser',
name='mail_preferences',
field=models.IntegerField(choices=[(1, 'Message box'), (2, 'Mail')], default=1, verbose_name='Receive my messages'),
preserve_default=True,
),
migrations.AlterField(
model_name='verifieduser',
name='offered_job',
field=multiselectfield.db.fields.MultiSelectField(max_length=21, verbose_name='What jobs you want to do?', choices=[('1', 'Visit home'), ('2', 'Companionship'), ('3', 'Transport by car'), ('4', 'Shopping'), ('5', 'House sitting'), ('6', 'Manual jobs'), ('7', 'Gardening'), ('8', 'Pet sitting'), ('9', 'Personal care'), ('a', 'Administrative'), ('b', 'Other')], blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='verifieduser',
name='receive_help_from_who',
field=models.IntegerField(choices=[(5, 'All'), (3, 'Verified member'), (6, 'My favorite members')], default=5, verbose_name='Receive offers and demands'),
preserve_default=True,
),
]
| agpl-3.0 | -4,895,663,956,325,399,000 | 47.29572 | 385 | 0.584757 | false |
martinbalsam/timing-rp | timingrp/sanit.py | 1 | 2984 | import re
import argparse
"""
This script sanitize the raw data experted from "Timing.app",
it adds the proper double quotes around the "Path" attribute,
and it removes unwanted double quotes inside the "Path" attribute,
that may yield an unwanted escape of the field.
--- TODO ---
speedup:
as for now I'm iterating twice over the whole list,
Maybe the sanitizing can be done in a single regex, but I spent waay to much time
coming up with these. I'll leave it to somebody else (noone)
cleanup:
delete unused files
"""
parser = argparse.ArgumentParser(description="writes the input data into a sanitized .csv file")
parser.add_argument('path', nargs=1, type = str, help='the path of the input raw .csv file to parse')
args = parser.parse_args()
"""paths of the input and output data
each file has the same name structure:
file.csv
file_tmp_.csv
file_san_.csv (THIS IS THE OUTPUT WE WANT, SANITIZED AND SHIT)
file_err_.log
"""
input_data = args.path[0]
temp_data = args.path[0][:-4]+"_tmp_.csv"
output_data = args.path[0][:-4]+"_san_.csv"
errors_log = args.path[0][:-4]+"_err_.log"
#THIS SHIT WORKS IF THERE ARE NO UNEXPECTED BREAKLINE
errors = open(errors_log,'w')
with open(input_data,'r') as original:
with open(temp_data,'w') as new:
#writes the csv header
new.write(original.readline())
for line in original:
#regex to isolate the 'path' attribute
matches = re.search('(^[^,]+,)(.*)(,\d{2}/\d{2}/\d{2} \d{2}:\d{2},\d{2}/\d{2}/\d{2} \d{2}:\d{2},.*$)', line)
try:
#add quotation around the path attribute and writes it in a new file
new.write(matches.group(1)+'"'+matches.group(2)+'"'+matches.group(3)+'\n')
#catches lines that don't match the regex and writes them in an errors.log file
except AttributeError:
errors.write(line)
continue
#Now I recheck the whole list to catch if there are extra double quotation signs (") in the path attribute,
#if so, we delete them
with open(temp_data,'r') as old:
with open(output_data,'w') as new:
new.write(old.readline())
for line in old:
#regex that catches any path that contains one or more double quotation sign (")
matches = re.search('(^[^,]+,")(.*".*)(",\d{2}/\d{2}/\d{2} \d{2}:\d{2},\d{2}/\d{2}/\d{2} \d{2}:\d{2},.*$)', line)
if matches is not None:
#deletes any double quotation mark (") and writes tha sanitized line in a new file
new.write(matches.group(1)+matches.group(2).replace('"','')+matches.group(3)+'\n')
#if the line is ok, it just writes the line in the new file
else:
new.write(line)
#populate a panda DataFrame object with the data, and the proper datetime objects
"""dateparse = lambda x: pd.datetime.strptime(x, '%d/%m/%y %H:%M')
a = pd.read_csv('bin/fine.csv', parse_dates=[2,3], date_parser=dateparse)
"""
| gpl-2.0 | -1,916,926,826,659,169,300 | 39.324324 | 125 | 0.629692 | false |
chromium/chromium | build/android/gyp/native_libraries_template.py | 7 | 1781 | # Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
NATIVE_LIBRARIES_TEMPLATE = """\
// This file is autogenerated by
// build/android/gyp/write_native_libraries_java.py
// Please do not change its content.
package org.chromium.build;
public class NativeLibraries {{
public static final int CPU_FAMILY_UNKNOWN = 0;
public static final int CPU_FAMILY_ARM = 1;
public static final int CPU_FAMILY_MIPS = 2;
public static final int CPU_FAMILY_X86 = 3;
// Set to true to enable the use of the Chromium Linker.
public static {MAYBE_FINAL}boolean sUseLinker{USE_LINKER};
public static {MAYBE_FINAL}boolean sUseLibraryInZipFile{USE_LIBRARY_IN_ZIP_FILE};
public static {MAYBE_FINAL}boolean sUseModernLinker{USE_MODERN_LINKER};
// This is the list of native libraries to be loaded (in the correct order)
// by LibraryLoader.java.
// TODO(cjhopman): This is public since it is referenced by NativeTestActivity.java
// directly. The two ways of library loading should be refactored into one.
public static {MAYBE_FINAL}String[] LIBRARIES = {{{LIBRARIES}}};
// This is the expected version of the 'main' native library, which is the one that
// implements the initial set of base JNI functions including
// base::android::nativeGetVersionName()
// TODO(torne): This is public to work around classloader issues in Trichrome
// where NativeLibraries is not in the same dex as LibraryLoader.
// We should instead split up Java code along package boundaries.
public static {MAYBE_FINAL}String sVersionNumber = {VERSION_NUMBER};
public static {MAYBE_FINAL}int sCpuFamily = {CPU_FAMILY};
}}
"""
| bsd-3-clause | -5,940,693,984,739,795,000 | 44.666667 | 87 | 0.730488 | false |
shobhitmishra/CodingProblems | LeetCode/Session3/BinaryTreePaths.py | 1 | 1149 | from typing import List
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def binaryTreePaths(self, root: TreeNode) -> List[str]:
result = []
if not root:
return result
self.binaryTreePathsHelper(root, [], result)
return result
def binaryTreePathsHelper(self, root, pathSoFar, result):
if root:
pathSoFar.append(str(root.val))
if not root.left and not root.right:
path = "->".join(pathSoFar)
result.append(path)
self.binaryTreePathsHelper(root.left, pathSoFar, result)
self.binaryTreePathsHelper(root.right, pathSoFar, result)
pathSoFar.pop()
root = TreeNode(10)
root.left = TreeNode(3)
root.left.left = TreeNode(2)
root.left.right = TreeNode(8)
root.left.right.left = TreeNode(7)
root.left.right.right = TreeNode(9)
root.right = TreeNode(15)
root.right.left = TreeNode(13)
root.right.right = TreeNode(17)
root.right.right.right = TreeNode(19)
ob = Solution()
print(ob.binaryTreePaths(root))
| mit | -7,620,993,149,577,762,000 | 26.357143 | 69 | 0.619669 | false |
ksmit799/Toontown-Source | toontown/building/DistributedKnockKnockDoor.py | 1 | 6420 | from pandac.PandaModules import *
from direct.interval.IntervalGlobal import *
from direct.distributed.ClockDelta import *
from KnockKnockJokes import *
from toontown.toonbase import ToontownGlobals
from direct.directnotify import DirectNotifyGlobal
from direct.fsm import ClassicFSM
import DistributedAnimatedProp
from toontown.distributed import DelayDelete
from toontown.toonbase import TTLocalizer
from toontown.hood import ZoneUtil
class DistributedKnockKnockDoor(DistributedAnimatedProp.DistributedAnimatedProp):
def __init__(self, cr):
DistributedAnimatedProp.DistributedAnimatedProp.__init__(self, cr)
self.fsm.setName('DistributedKnockKnockDoor')
self.rimshot = None
self.knockSfx = None
return
def generate(self):
DistributedAnimatedProp.DistributedAnimatedProp.generate(self)
self.avatarTracks = []
self.avatarId = 0
def announceGenerate(self):
DistributedAnimatedProp.DistributedAnimatedProp.announceGenerate(self)
self.accept('exitKnockKnockDoorSphere_' + str(self.propId), self.exitTrigger)
self.acceptAvatar()
def disable(self):
self.ignore('exitKnockKnockDoorSphere_' + str(self.propId))
self.ignore('enterKnockKnockDoorSphere_' + str(self.propId))
DistributedAnimatedProp.DistributedAnimatedProp.disable(self)
def delete(self):
DistributedAnimatedProp.DistributedAnimatedProp.delete(self)
if self.rimshot:
self.rimshot = None
if self.knockSfx:
self.knockSfx = None
return
def acceptAvatar(self):
self.acceptOnce('enterKnockKnockDoorSphere_' + str(self.propId), self.enterTrigger)
def setAvatarInteract(self, avatarId):
DistributedAnimatedProp.DistributedAnimatedProp.setAvatarInteract(self, avatarId)
def avatarExit(self, avatarId):
if avatarId == self.avatarId:
for track in self.avatarTracks:
track.finish()
DelayDelete.cleanupDelayDeletes(track)
self.avatarTracks = []
def knockKnockTrack(self, avatar, duration):
if avatar == None:
return
self.rimshot = base.loadSfx('phase_5/audio/sfx/AA_heal_telljoke.mp3')
self.knockSfx = base.loadSfx('phase_5/audio/sfx/GUI_knock_3.mp3')
joke = KnockKnockJokes[self.propId % len(KnockKnockJokes)]
place = base.cr.playGame.getPlace()
if place:
zone = place.getZoneId()
branch = ZoneUtil.getBranchZone(zone)
if branch == ToontownGlobals.SillyStreet:
if self.propId == 44:
joke = KnockKnockContestJokes[ToontownGlobals.SillyStreet]
elif branch == ToontownGlobals.LoopyLane:
if self.propId in KnockKnockContestJokes[ToontownGlobals.LoopyLane].keys():
joke = KnockKnockContestJokes[ToontownGlobals.LoopyLane][self.propId]
elif branch == ToontownGlobals.PunchlinePlace:
if self.propId == 1:
joke = KnockKnockContestJokes[ToontownGlobals.PunchlinePlace]
elif branch == ToontownGlobals.PolarPlace:
if self.propId in KnockKnockContestJokes[ToontownGlobals.PolarPlace].keys():
joke = KnockKnockContestJokes[ToontownGlobals.PolarPlace][self.propId]
self.nametag = None
self.nametagNP = None
doorNP = render.find('**/KnockKnockDoorSphere_' + str(self.propId) + ';+s')
if doorNP.isEmpty():
self.notify.warning('Could not find KnockKnockDoorSphere_%s' % self.propId)
return
self.nametag = NametagGroup()
self.nametag.setAvatar(doorNP)
self.nametag.setFont(ToontownGlobals.getToonFont())
self.nametag.setName(TTLocalizer.DoorNametag)
self.nametag.setActive(0)
self.nametag.manage(base.marginManager)
self.nametag.getNametag3d().setBillboardOffset(4)
nametagNode = self.nametag.getNametag3d().upcastToPandaNode()
self.nametagNP = render.attachNewNode(nametagNode)
self.nametagNP.setName('knockKnockDoor_nt_' + str(self.propId))
pos = doorNP.node().getSolid(0).getCenter()
self.nametagNP.setPos(pos + Vec3(0, 0, avatar.getHeight() + 2))
d = duration * 0.125
track = Sequence(Parallel(Sequence(Wait(d * 0.5), SoundInterval(self.knockSfx)), Func(self.nametag.setChat, TTLocalizer.DoorKnockKnock, CFSpeech), Wait(d)), Func(avatar.setChatAbsolute, TTLocalizer.DoorWhosThere, CFSpeech | CFTimeout, openEnded=0), Wait(d), Func(self.nametag.setChat, joke[0], CFSpeech), Wait(d), Func(avatar.setChatAbsolute, joke[0] + TTLocalizer.DoorWhoAppendix, CFSpeech | CFTimeout, openEnded=0), Wait(d), Func(self.nametag.setChat, joke[1], CFSpeech), Parallel(SoundInterval(self.rimshot, startTime=2.0), Wait(d * 4)), Func(self.cleanupTrack))
track.delayDelete = DelayDelete.DelayDelete(avatar, 'knockKnockTrack')
return track
def cleanupTrack(self):
avatar = self.cr.doId2do.get(self.avatarId, None)
if avatar:
avatar.clearChat()
if self.nametag:
self.nametag.unmanage(base.marginManager)
self.nametagNP.removeNode()
self.nametag = None
self.nametagNP = None
return
def enterOff(self):
DistributedAnimatedProp.DistributedAnimatedProp.enterOff(self)
def exitOff(self):
DistributedAnimatedProp.DistributedAnimatedProp.exitOff(self)
def enterAttract(self, ts):
DistributedAnimatedProp.DistributedAnimatedProp.enterAttract(self, ts)
self.acceptAvatar()
def exitAttract(self):
DistributedAnimatedProp.DistributedAnimatedProp.exitAttract(self)
def enterPlaying(self, ts):
DistributedAnimatedProp.DistributedAnimatedProp.enterPlaying(self, ts)
if self.avatarId:
avatar = self.cr.doId2do.get(self.avatarId, None)
track = self.knockKnockTrack(avatar, 8)
if track != None:
track.start(ts)
self.avatarTracks.append(track)
return
def exitPlaying(self):
DistributedAnimatedProp.DistributedAnimatedProp.exitPlaying(self)
for track in self.avatarTracks:
track.finish()
DelayDelete.cleanupDelayDeletes(track)
self.avatarTracks = []
self.avatarId = 0
| mit | 8,960,783,725,099,481,000 | 43.275862 | 573 | 0.678505 | false |
rhinstaller/blivet | blivet/autopart.py | 1 | 20576 | #
# Copyright (C) 2009-2015 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
# Red Hat Author(s): Dave Lehman <[email protected]>
#
"""This module provides functions related to automatic partitioning."""
import parted
from decimal import Decimal
from . import util
from .size import Size
from .devices.partition import PartitionDevice, FALLBACK_DEFAULT_PART_SIZE
from .devices.luks import LUKSDevice
from .devices.lvm import ThPoolReserveSpec
from .errors import NoDisksError, NotEnoughFreeSpaceError
from .formats import get_format
from .partitioning import do_partitioning, get_free_regions, grow_lvm
from .i18n import _
from .static_data import luks_data
from pykickstart.constants import AUTOPART_TYPE_BTRFS, AUTOPART_TYPE_LVM, AUTOPART_TYPE_LVM_THINP, AUTOPART_TYPE_PLAIN
import logging
log = logging.getLogger("anaconda.blivet.autopart")
# maximum ratio of swap size to disk size (10 %)
MAX_SWAP_DISK_RATIO = Decimal('0.1')
AUTOPART_THPOOL_RESERVE = ThPoolReserveSpec(20, Size("1 GiB"), Size("100 GiB"))
def swap_suggestion(quiet=False, hibernation=False, disk_space=None):
"""
Suggest the size of the swap partition that will be created.
:param quiet: whether to log size information or not
:type quiet: bool
:param hibernation: calculate swap size big enough for hibernation
:type hibernation: bool
:param disk_space: how much disk space is available
:type disk_space: :class:`~.size.Size`
:return: calculated swap size
"""
mem = util.total_memory()
mem = ((mem / 16) + 1) * 16
if not quiet:
log.info("Detected %s of memory", mem)
sixtyfour_GiB = Size("64 GiB")
# the succeeding if-statement implements the following formula for
# suggested swap size.
#
# swap(mem) = 2 * mem, if mem < 2 GiB
# = mem, if 2 GiB <= mem < 8 GiB
# = mem / 2, if 8 GIB <= mem < 64 GiB
# = 4 GiB, if mem >= 64 GiB
if mem < Size("2 GiB"):
swap = 2 * mem
elif mem < Size("8 GiB"):
swap = mem
elif mem < sixtyfour_GiB:
swap = mem / 2
else:
swap = Size("4 GiB")
if hibernation:
if mem <= sixtyfour_GiB:
swap = mem + swap
else:
log.info("Ignoring --hibernation option on systems with %s of RAM or more", sixtyfour_GiB)
if disk_space is not None and not hibernation:
max_swap = disk_space * MAX_SWAP_DISK_RATIO
if swap > max_swap:
log.info("Suggested swap size (%(swap)s) exceeds %(percent)d %% of "
"disk space, using %(percent)d %% of disk space (%(size)s) "
"instead.", {"percent": MAX_SWAP_DISK_RATIO * 100,
"swap": swap,
"size": max_swap})
swap = max_swap
if not quiet:
log.info("Swap attempt of %s", swap)
return swap
def _get_candidate_disks(storage):
""" Return a list of disks to be used for autopart/reqpart.
Disks must be partitioned and have a single free region large enough
for a default-sized (500MiB) partition. They must also be in
:attr:`StorageDiscoveryConfig.clear_part_disks` if it is non-empty.
:param storage: a Blivet instance
:type storage: :class:`~.Blivet`
:returns: a list of partitioned disks with at least 500MiB of free space
:rtype: list of :class:`~.devices.StorageDevice`
"""
disks = []
for disk in storage.partitioned:
if not disk.format.supported or disk.protected:
continue
if storage.config.clear_part_disks and \
(disk.name not in storage.config.clear_part_disks):
continue
part = disk.format.first_partition
while part:
if not part.type & parted.PARTITION_FREESPACE:
part = part.nextPartition()
continue
if Size(part.getLength(unit="B")) > PartitionDevice.default_size:
disks.append(disk)
break
part = part.nextPartition()
return disks
def _schedule_implicit_partitions(storage, disks):
""" Schedule creation of a lvm/btrfs member partitions for autopart.
We create one such partition on each disk. They are not allocated until
later (in :func:`doPartitioning`).
:param storage: a :class:`~.Blivet` instance
:type storage: :class:`~.Blivet`
:param disks: list of partitioned disks with free space
:type disks: list of :class:`~.devices.StorageDevice`
:returns: list of newly created (unallocated) partitions
:rtype: list of :class:`~.devices.PartitionDevice`
"""
# create a separate pv or btrfs partition for each disk with free space
devs = []
# only schedule the partitions if either lvm or btrfs autopart was chosen
if storage.autopart_type == AUTOPART_TYPE_PLAIN:
return devs
for disk in disks:
if storage.encrypted_autopart:
fmt_type = "luks"
fmt_args = {"passphrase": luks_data.encryption_passphrase,
"cipher": storage.encryption_cipher,
"escrow_cert": storage.autopart_escrow_cert,
"add_backup_passphrase": storage.autopart_add_backup_passphrase,
"min_luks_entropy": luks_data.min_entropy}
else:
if storage.autopart_type in (AUTOPART_TYPE_LVM, AUTOPART_TYPE_LVM_THINP):
fmt_type = "lvmpv"
else:
fmt_type = "btrfs"
fmt_args = {}
part = storage.new_partition(fmt_type=fmt_type,
fmt_args=fmt_args,
grow=True,
parents=[disk])
storage.create_device(part)
devs.append(part)
return devs
def _schedule_partitions(storage, disks, implicit_devices, requests=None):
""" Schedule creation of autopart/reqpart partitions.
This only schedules the requests for actual partitions.
:param storage: a :class:`~.Blivet` instance
:type storage: :class:`~.Blivet`
:param disks: list of partitioned disks with free space
:type disks: list of :class:`~.devices.StorageDevice`
:param requests: list of partitioning requests to operate on,
or `~.storage.autopart_requests` by default
:type requests: list of :class:`~.partspec.PartSpec` instances
:returns: None
:rtype: None
"""
if not requests:
requests = storage.autopart_requests
# basis for requests with required_space is the sum of the sizes of the
# two largest free regions
all_free = (Size(reg.getLength(unit="B")) for reg in get_free_regions(disks))
all_free = sorted(all_free, reverse=True)
if not all_free:
# this should never happen since we've already filtered the disks
# to those with at least 500MiB free
log.error("no free space on disks %s", [d.name for d in disks])
return
free = all_free[0]
if len(all_free) > 1:
free += all_free[1]
# The boot disk must be set at this point. See if any platform-specific
# stage1 device we might allocate already exists on the boot disk.
stage1_device = None
for device in storage.devices:
if storage.bootloader.stage1_disk not in device.disks:
continue
if storage.bootloader.is_valid_stage1_device(device, early=True):
stage1_device = device
break
#
# First pass is for partitions only. We'll do LVs later.
#
for request in requests:
if ((request.lv and storage.do_autopart and
storage.autopart_type in (AUTOPART_TYPE_LVM,
AUTOPART_TYPE_LVM_THINP)) or
(request.btr and storage.autopart_type == AUTOPART_TYPE_BTRFS)):
continue
if request.required_space and request.required_space > free:
continue
elif request.fstype in ("prepboot", "efi", "macefi", "hfs+") and \
(storage.bootloader.skip_bootloader or stage1_device):
# there should never be a need for more than one of these
# partitions, so skip them.
log.info("skipping unneeded stage1 %s request", request.fstype)
log.debug("%s", request)
if request.fstype in ["efi", "macefi"] and stage1_device:
# Set the mountpoint for the existing EFI boot partition
stage1_device.format.mountpoint = "/boot/efi"
log.debug("%s", stage1_device)
continue
elif request.fstype == "biosboot":
is_gpt = (stage1_device and
getattr(stage1_device.format, "label_type", None) == "gpt")
has_bios_boot = (stage1_device and
any([p.format.type == "biosboot"
for p in storage.partitions
if p.disk == stage1_device]))
if (storage.bootloader.skip_bootloader or
not (stage1_device and stage1_device.is_disk and
is_gpt and not has_bios_boot)):
# there should never be a need for more than one of these
# partitions, so skip them.
log.info("skipping unneeded stage1 %s request", request.fstype)
log.debug("%s", request)
log.debug("%s", stage1_device)
continue
if request.size > all_free[0]:
# no big enough free space for the requested partition
raise NotEnoughFreeSpaceError(_("No big enough free space on disks for "
"automatic partitioning"))
if request.encrypted and storage.encrypted_autopart:
fmt_type = "luks"
fmt_args = {"passphrase": luks_data.encryption_passphrase,
"cipher": storage.encryption_cipher,
"escrow_cert": storage.autopart_escrow_cert,
"add_backup_passphrase": storage.autopart_add_backup_passphrase,
"min_luks_entropy": luks_data.min_entropy}
else:
fmt_type = request.fstype
fmt_args = {}
dev = storage.new_partition(fmt_type=fmt_type,
fmt_args=fmt_args,
size=request.size,
grow=request.grow,
maxsize=request.max_size,
mountpoint=request.mountpoint,
parents=disks,
weight=request.weight)
# schedule the device for creation
storage.create_device(dev)
if request.encrypted and storage.encrypted_autopart:
luks_fmt = get_format(request.fstype,
device=dev.path,
mountpoint=request.mountpoint)
luks_dev = LUKSDevice("luks-%s" % dev.name,
fmt=luks_fmt,
size=dev.size,
parents=dev)
storage.create_device(luks_dev)
if storage.do_autopart and \
storage.autopart_type in (AUTOPART_TYPE_LVM, AUTOPART_TYPE_LVM_THINP,
AUTOPART_TYPE_BTRFS):
# doing LVM/BTRFS -- make sure the newly created partition fits in some
# free space together with one of the implicitly requested partitions
smallest_implicit = sorted(implicit_devices, key=lambda d: d.size)[0]
if (request.size + smallest_implicit.size) > all_free[0]:
# not enough space to allocate the smallest implicit partition
# and the request, make the implicit partitions smaller in
# attempt to make space for the request
for implicit_req in implicit_devices:
implicit_req.size = FALLBACK_DEFAULT_PART_SIZE
return implicit_devices
def _schedule_volumes(storage, devs):
""" Schedule creation of autopart lvm/btrfs volumes.
Schedules encryption of member devices if requested, schedules creation
of the container (:class:`~.devices.LVMVolumeGroupDevice` or
:class:`~.devices.BTRFSVolumeDevice`) then schedules creation of the
autopart volume requests.
:param storage: a :class:`~.Blivet` instance
:type storage: :class:`~.Blivet`
:param devs: list of member partitions
:type devs: list of :class:`~.devices.PartitionDevice`
:returns: None
:rtype: None
If an appropriate bootloader stage1 device exists on the boot drive, any
autopart request to create another one will be skipped/discarded.
"""
if not devs:
return
if storage.autopart_type in (AUTOPART_TYPE_LVM, AUTOPART_TYPE_LVM_THINP):
new_container = storage.new_vg
new_volume = storage.new_lv
format_name = "lvmpv"
else:
new_container = storage.new_btrfs
new_volume = storage.new_btrfs
format_name = "btrfs"
if storage.encrypted_autopart:
pvs = []
for dev in devs:
pv = LUKSDevice("luks-%s" % dev.name,
fmt=get_format(format_name, device=dev.path),
size=dev.size,
parents=dev)
pvs.append(pv)
storage.create_device(pv)
else:
pvs = devs
# create a vg containing all of the autopart pvs
container = new_container(parents=pvs)
storage.create_device(container)
#
# Convert storage.autopart_requests into Device instances and
# schedule them for creation.
#
# Second pass, for LVs only.
pool = None
for request in storage.autopart_requests:
btr = storage.autopart_type == AUTOPART_TYPE_BTRFS and request.btr
lv = (storage.autopart_type in (AUTOPART_TYPE_LVM,
AUTOPART_TYPE_LVM_THINP) and request.lv)
thinlv = (storage.autopart_type == AUTOPART_TYPE_LVM_THINP and
request.lv and request.thin)
if thinlv and pool is None:
# create a single thin pool in the vg
pool = storage.new_lv(parents=[container], thin_pool=True, grow=True)
storage.create_device(pool)
# make sure VG reserves space for the pool to grow if needed
container.thpool_reserve = AUTOPART_THPOOL_RESERVE
if not btr and not lv and not thinlv:
continue
# required space isn't relevant on btrfs
if (lv or thinlv) and \
request.required_space and request.required_space > container.size:
continue
if request.fstype is None:
if btr:
# btrfs volumes can only contain btrfs filesystems
request.fstype = "btrfs"
else:
request.fstype = storage.default_fstype
kwargs = {"mountpoint": request.mountpoint,
"fmt_type": request.fstype}
if lv or thinlv:
if thinlv:
parents = [pool]
else:
parents = [container]
kwargs.update({"parents": parents,
"grow": request.grow,
"maxsize": request.max_size,
"size": request.size,
"thin_volume": thinlv})
else:
kwargs.update({"parents": [container],
"size": request.size,
"subvol": True})
dev = new_volume(**kwargs)
# schedule the device for creation
storage.create_device(dev)
def do_reqpart(storage, requests):
"""Perform automatic partitioning of just required platform-specific
partitions. This is incompatible with do_autopart.
:param storage: a :class:`~.Blivet` instance
:type storage: :class:`~.Blivet`
:param requests: list of partitioning requests to operate on,
or `~.storage.autopart_requests` by default
:type requests: list of :class:`~.partspec.PartSpec` instances
"""
if not any(d.format.supported for d in storage.partitioned):
raise NoDisksError(_("No usable disks selected"))
disks = _get_candidate_disks(storage)
if disks == []:
raise NotEnoughFreeSpaceError(_("Not enough free space on disks for "
"automatic partitioning"))
_schedule_partitions(storage, disks, [], requests=requests)
def do_autopart(storage, data, min_luks_entropy=None):
""" Perform automatic partitioning.
:param storage: a :class:`~.Blivet` instance
:type storage: :class:`~.Blivet`
:param data: kickstart data
:type data: :class:`pykickstart.BaseHandler`
:param min_luks_entropy: minimum entropy in bits required for
luks format creation; uses default when None
:type min_luks_entropy: int
:attr:`Blivet.do_autopart` controls whether this method creates the
automatic partitioning layout. :attr:`Blivet.autopart_type` controls
which variant of autopart used. It uses one of the pykickstart
AUTOPART_TYPE_* constants. The set of eligible disks is defined in
:attr:`StorageDiscoveryConfig.clear_part_disks`.
.. note::
Clearing of partitions is handled separately, in
:meth:`~.Blivet.clear_partitions`.
"""
# pylint: disable=unused-argument
log.debug("do_autopart: %s", storage.do_autopart)
log.debug("encrypted_autopart: %s", storage.encrypted_autopart)
log.debug("autopart_type: %s", storage.autopart_type)
log.debug("clear_part_type: %s", storage.config.clear_part_type)
log.debug("clear_part_disks: %s", storage.config.clear_part_disks)
log.debug("autopart_requests:\n%s", "".join([str(p) for p in storage.autopart_requests]))
log.debug("storage.disks: %s", [d.name for d in storage.disks])
log.debug("storage.partitioned: %s", [d.name for d in storage.partitioned if d.format.supported])
log.debug("all names: %s", [d.name for d in storage.devices])
log.debug("boot disk: %s", getattr(storage.boot_disk, "name", None))
disks = []
devs = []
if not storage.do_autopart:
return
if not any(d.format.supported for d in storage.partitioned):
raise NoDisksError(_("No usable disks selected"))
if min_luks_entropy is not None:
luks_data.min_entropy = min_luks_entropy
disks = _get_candidate_disks(storage)
devs = _schedule_implicit_partitions(storage, disks)
log.debug("candidate disks: %s", disks)
log.debug("devs: %s", devs)
if disks == []:
raise NotEnoughFreeSpaceError(_("Not enough free space on disks for "
"automatic partitioning"))
devs = _schedule_partitions(storage, disks, devs)
# run the autopart function to allocate and grow partitions
do_partitioning(storage)
_schedule_volumes(storage, devs)
# grow LVs
grow_lvm(storage)
storage.set_up_bootloader()
# only newly added swaps should appear in the fstab
new_swaps = (dev for dev in storage.swaps if not dev.format.exists)
storage.set_fstab_swaps(new_swaps)
| lgpl-2.1 | 1,190,404,045,290,055,400 | 38.267176 | 118 | 0.597492 | false |
wavelets/ThinkStats2 | code/moments.py | 1 | 1149 | """This file contains code for use with "Think Bayes",
by Allen B. Downey, available from greenteapress.com
Copyright 2012 Allen B. Downey
License: GNU GPLv3 http://www.gnu.org/licenses/gpl.html
"""
import math
import thinkstats2
def RawMoment(xs, k):
return sum(x**k for x in xs) / float(len(xs))
def CentralMoment(xs, k):
xbar = RawMoment(xs, 1)
return sum((x - xbar)**k for x in xs) / len(xs)
def StandardizedMoment(xs, k):
var = CentralMoment(xs, 2)
sigma = math.sqrt(var)
return CentralMoment(xs, k) / sigma**k
def Skewness(xs):
return StandardizedMoment(xs, 3)
def Median(xs):
cdf = thinkstats2.MakeCdfFromList(xs)
return cdf.Value(0.5)
def PearsonMedianSkewness(xs):
median = Median(xs)
mean = RawMoment(xs, 1)
var = CentralMoment(xs, 2)
std = math.sqrt(var)
gp = 3 * (mean - median) / std
return gp
def main():
xs = range(10)
print 'mean', RawMoment(xs, 1)
print 'median', Median(xs)
print 'var', CentralMoment(xs, 2)
print 'skewness', Skewness(xs)
print 'Pearson skewness', PearsonMedianSkewness(xs)
if __name__ == '__main__':
main()
| gpl-3.0 | -3,409,726,838,714,081,000 | 19.890909 | 55 | 0.646649 | false |
shinigota/dagpi_splendor | SplendorCode/src/mvc/Display.py | 1 | 32173 | from tkinter import *
from src.element.Card import Card
from src.element.ResourceType import ResourceType
from src.game.GameState import GameState
from src.mvc.EventType import EventType
from src.mvc.GameBoard import GameBoard
from src.mvc.GameRules import GameRules
from src.player.AI import AI
import time
class Display:
window = None
text = "test"
game_rules = None
game_board = None
w = None
h = None
nb_players = 1
false_position = None
def __init__(self):
self.window = Tk()
self.create_image()
def create_window(self):
print('Display -- create_window')
self.window.title(GameRules.game_name)
self.w, self.h = 1900, 1000
self.window.geometry("%dx%d+0+0" % (self.w, self.h))
self.window.config(bg=None)
def make_entry(self, parent, caption, var):
print('Display -- make_entry')
Label(parent, text=caption).pack()
entry = Entry(parent, textvariable=var)
entry.pack()
return entry
def popup_start_click_action(self):
print('Display -- popup_start_click_action')
popup = Toplevel(height=250, width=280)
# popup.protocol("WM_DELETE_WINDOW", self.on_exit)
Label(popup, text="Sélectionnez vos parametres", height=1,
width=30).pack()
self.user_name = StringVar()
self.user_name.set("Player")
Label(popup, text="Name:").pack()
entry = Entry(popup, textvariable=self.user_name)
entry.pack()
self.nb_players=1
self.players_level = dict()
self.players_canvas = dict()
self.players_position = dict()
self.players_position[self.nb_players] = Variable()
self.players_position[self.nb_players].set(int(self.nb_players))
Label(popup, text="Position:").pack()
entry = Entry(popup,
textvariable=self.players_position[self.nb_players])
entry.pack()
canvas = Canvas(popup, height=20,
width=160, background="grey")
canvas.create_text(82, 10, text="Ajouter un adversaire", fill="black")
canvas.bind("<Button-1>", lambda event,
p=popup,
c=canvas:
self.add_player_click_action(p, c))
canvas.pack()
# self.canvas_validate = Canvas(popup, height=20,
# width=60, background="grey")
# self.canvas_validate.create_text(30, 10, text="Valider",
# fill="black")
# self.canvas_validate.bind("<Button-1>", lambda event,
# p=popup:
# self.validate_popup_action(p))
# self.canvas_validate.pack()
def add_player_click_action(self, popup, canvas):
print('Display -- add_player_click_action')
try:
self.false_position.pack_forget()
except:
pass
if self.nb_players >= 2:
self.canvas_validate.pack_forget()
text_nb_players = self.nb_players
Label(popup, text="%s %s" % ("Adversaire n°", text_nb_players)).pack()
self.nb_players = self.nb_players + 1
self.players_level[self.nb_players] = Variable()
self.players_level[self.nb_players].set(0)
self.players_position[self.nb_players] = Variable()
c = Checkbutton(popup, text="Niveau difficile",
variable=self.players_level[self.nb_players])
c.pack()
self.players_position[self.nb_players].set(int(self.nb_players))
Label(popup, text="Position:").pack()
entry = Entry(popup,
textvariable=self.players_position[self.nb_players])
entry.pack()
self.players_canvas[self.nb_players] = c
if self.nb_players == 4:
self.canvas_validate.pack()
canvas.pack_forget()
elif self.nb_players == 2:
self.canvas_validate = Canvas(popup, height=20,
width=60, background="grey")
self.canvas_validate.create_text(30, 10, text="Valider",
fill="black")
self.canvas_validate.bind("<Button-1>", lambda event,
p=popup:
self.validate_popup_action(p))
self.canvas_validate.pack()
else:
self.canvas_validate.pack()
def validate_popup_action(self, popup):
print('Display -- validate_popup_action')
try:
self.false_position.pack_forget()
except:
pass
accept = True
temp_positions = []
for item in self.players_position:
try:
temp_positions.append(int(self.players_position[item].get()))
except:
accept = False
if accept:
for item in range(1, self.nb_players + 1):
if item not in temp_positions:
accept = False
if accept:
popup.destroy()
final_players = []
for key in range(1, self.nb_players+1):
players_dict = dict()
players_dict["position"] = self.players_position[key].get()
if key == 1:
players_dict["name"] = self.user_name.get()
players_dict["difficulty"] = 0
else:
players_dict["name"] = "IA %d" % key
players_dict["difficulty"] = self.players_level[key].get()
final_players.append(players_dict)
self.game_rules.event(EventType.START, final_players)
else:
self.false_position = Label(popup, text="Positions incorrectes",
fg="red")
self.false_position.pack()
def display_tiles(self):
print('Display -- display_tiles')
i = 1
y = 100
for tile in self.game_board.displayed_tiles:
x = 170 + 120 * (i - 1)
self.display_tile(self.window, x, y, tile, None)
i += 1
def display_tile(self, canvas, x, y, tile, event):
print('Display -- display_tile')
canvas = Canvas(canvas, width=100, height=100,
background='#725202')
canvas.create_image(50, 50, image=self.img_tile)
canvas.create_image(13, 13, image=self.get_image_points(tile.points))
i = 1
for key in tile.gems_conditions:
number = tile.gems_conditions[key]
if number > 0:
textcolor = "white"
if ResourceType.get_color(key) == "white":
textcolor = "black"
if i == 1:
canvas.create_image(27, 40, image=self.get_image_rect_gem(
key))
txtBuy1 = canvas.create_text(25, 40, text=number,
fill=textcolor)
i = 2
elif i == 2:
canvas.create_image(27, 80, image=self.get_image_rect_gem(
key))
txtBuy2 = canvas.create_text(25, 80, text=number,
fill=textcolor)
i = 3
elif i == 3:
canvas.create_image(77, 80, image=self.get_image_rect_gem(
key))
txtBuy3 = canvas.create_text(75, 80, text=number,
fill=textcolor)
i = 0
canvas.place(x=x, y=y)
if event is not None:
canvas.bind("<Button-1>", lambda event, e=event,
t=tile: self.game_rules.event(e,
t))
def display_cards(self):
print('Display -- display_cards')
for lvl in range(1, int(self.game_rules.nb_lvl_card) + 1):
i = 1
for card in self.game_board.displayed_cards[lvl]:
x = 170 + 120 * (i - 1)
y = 490 - (130 * (lvl - 1))
self.display_card(self.window, x, y, card,
EventType.CLICK_DISPLAYED_CARD)
i += 1
def display_card(self, canvas, x, y, card, event):
print('Display -- display_card')
canvas = Canvas(canvas, width=100, height=120,
background=self.get_color(int(card.level)))
canvas.create_image(50, 75, image=self.get_image_card_gem(
card.income_gem))
canvas.create_image(15, 20, image=self.get_image_points(card.points))
i = 1
for key in card.purchase_gems:
number = card.purchase_gems[key]
if number > 0:
textcolor = "white"
if ResourceType.get_color(key) == "white":
textcolor = "black"
if i == 1:
canvas.create_image(25, 100,
image=self.get_image_circle_gem(key))
txtBuy1 = canvas.create_text(25, 100, text=number,
fill=textcolor)
i = 2
elif i == 2:
canvas.create_image(75, 100,
image=self.get_image_circle_gem(key))
txtBuy2 = canvas.create_text(75, 100, text=number,
fill=textcolor)
i = 3
elif i == 3:
canvas.create_image(25, 60,
image=self.get_image_circle_gem(key))
txtBuy3 = canvas.create_text(25, 60, text=number,
fill=textcolor)
i = 4
elif i == 4:
canvas.create_image(75, 60,
image=self.get_image_circle_gem(key))
txtBuy4 = canvas.create_text(75, 60, text=number,
fill=textcolor)
i = 0
canvas.place(x=x,
y=y)
if event is not None:
canvas.bind("<Button-1>",
lambda event, e=event,
c=card: self.game_rules.event(e, c))
def display_stacks(self):
print('Display -- display_stacks')
for i in range(1, int(self.game_rules.nb_lvl_card) + 1):
self.display_stack(i, self.game_board.is_deck_empty(i))
def display_stack(self, level, empty):
print('Display -- display_stack')
color = Display.get_color(level)
if empty:
color = "grey"
canvas = Canvas(self.window, width=100, height=120)
canvas.create_image(50, 60, image=self.get_image_deck(level, empty))
canvas.place(x=50, y=490 - (130 * (level - 1)))
canvas.bind("<Button-1>", lambda event, e=EventType.CLICK_DECK_CARD,
l=level: self.game_rules.event(e, l))
def display_bank(self, bank):
print('Display -- display_bank')
x = 70
y = 650
for token in ResourceType.get_sorted_resources():
if token == "Gold":
self.display_gold(self.window, 70, 115, bank[token])
else:
self.display_gem(self.window, x, y, bank[token], token)
x += 100
def display_gold(self, canvas, x, y, nb):
print('Display -- display_gold')
canvas = Canvas(canvas, width=80, height=80)
canvas.create_image(40, 40, image=self.get_image_token_gem("Gold"))
canvas.create_image(40, 40, image=self.get_image_points(nb))
canvas.place(x=x, y=y)
def display_gem(self, canvas, x, y, nb, gem):
print('Display -- display_gem')
canvas = Canvas(canvas, width=80, height=80)
canvas.create_image(40, 40, image=self.get_image_token_gem(gem))
canvas.create_image(40, 40, image=self.get_image_points(nb))
canvas.place(x=x, y=y)
canvas.bind("<Button-1>",
lambda event, e=EventType.CLICK_TAKE_TOKEN_GAMEBOARD,
g=gem: self.game_rules.event(e, g))
###################### Display hand of player #################################
def display_players(self):
print('Display -- display_players')
x = 1300
y = 40
for player in self.game_board.players:
if type(player) == AI:
self.display_player_ia(x, y, player)
y += 280
else:
self.display_player_human(player)
def display_player_human(self, player):
print('Display -- display_player_human')
color = "grey"
if self.game_board.get_current_player() == player:
color = "orange"
canvas = Canvas(self.window, width=500, height=270,
highlightbackground=color)
self.display_player_bank(canvas, 100, 10, player)
canvas.create_text(50, 45, text=player.nickname, fill="black")
canvas.create_text(50, 65, text=str(player.calculate_total_points()) +
" / "
"%d" %
self.game_rules.nb_points_end,
fill="black")
y = 130
i = 1
for card in player.reserved_cards:
x = 10 + 120 * (i - 1)
self.display_card(canvas, x, y, card, EventType.RESERVE_PURCHASE)
i += 1
self.display_player_tile(canvas, 370, 140, player)
canvas.place(x=750, y=320)
def display_player_ia(self, x, y, player):
print('Display -- display_player_ia')
color = "grey"
if self.game_board.get_current_player() == player:
color = "orange"
canvas = Canvas(self.window, width=500, height=270,
highlightbackground=color)
canvas.place(x=x, y=y)
self.display_player_bank(canvas, 100, 10, player)
canvas.create_text(50, 45, text=player.nickname, fill="black")
canvas.create_text(50, 65, text=str(player.calculate_total_points()) +
" / "
"%d" %
self.game_rules.nb_points_end,
fill="black")
y = 130
i = 1
for card in player.reserved_cards:
x = 10 + 120 * (i - 1)
self.display_card_ia(canvas, x, y, card.level)
i += 1
self.display_player_tile(canvas, 370, 140, player)
def display_card_ia(self, canvas, x, y, level):
print('Display -- display_card_ia')
color = Display.get_color(level)
canvas = Canvas(canvas, width=100, height=120)
canvas.create_image(50, 60, image=self.get_image_deck(level, False))
canvas.place(x=x, y=y)
def display_player_tile(self, canvas, x, y, player):
print('Display -- display_player_tile')
canvas = Canvas(canvas, width=100, height=100,
background='#725202')
canvas.create_image(50, 50, image=self.img_tile)
canvas.create_image(50, 50, image=self.get_image_points(
len(player.owned_tiles)))
canvas.place(x=x, y=y)
def display_player_bank(self, canvas, x, y, player):
print('Display -- display_player_bank')
canvas = Canvas(canvas, width=390, height=120)
canvas.place(x=x, y=y)
x = 0
y = 60
for token in ResourceType.get_sorted_resources():
if token == "Gold":
self.display_player_gold(canvas, 320, 30, player.bank[token])
else:
self.display_player_gem(canvas, x, y, player.bank[token],
token)
x += 60
x = 0
y = 0
for token in ResourceType.get_sorted_resources():
if token == "Gold":
pass
else:
self.display_player_income_card(canvas, x, y,
player.get_card_income()[
token],
token)
x += 60
def display_player_gold(self, canvas, x, y, nb):
print('Display -- display_player_gold')
canvas = Canvas(canvas, width=60, height=60)
canvas.create_image(30, 30,
image=self.get_image_token_bank_gem("Gold"))
canvas.create_image(30, 30, image=self.get_image_points(nb))
canvas.place(x=x, y=y)
canvas.bind("<Button-1>",
lambda event, e=EventType.CLICK_GIVE_BACK_PLAYER_TOKEN,
g="Gold": self.game_rules.event(e, g))
def display_player_gem(self, canvas, x, y, nb, gem):
print('Display -- display_player_gem')
color = "white"
if ResourceType.get_color(gem) == "white":
color = "black"
canvas = Canvas(canvas, width=60, height=60)
canvas.create_image(30, 30, image=self.get_image_token_bank_gem(gem))
canvas.create_image(30, 30, image=self.get_image_points(nb))
canvas.place(x=x, y=y)
canvas.bind("<Button-1>",
lambda event, e=EventType.CLICK_GIVE_BACK_PLAYER_TOKEN,
g=gem: self.game_rules.event(e, g))
def display_player_income_card(self, canvas, x, y, nb, gem):
print('Display -- display_player_income_card')
color = "white"
if ResourceType.get_color(gem) == "white":
color = "black"
canvas = Canvas(canvas, width=60, height=60)
canvas.create_image(35, 30, image=self.get_image_rect_bank_gem(gem))
canvas.create_text(30, 30, text=nb, fill=color)
canvas.place(x=x, y=y)
def display_text_help(self):
print('Display -- display_text_help')
canvas = Canvas(self.window, width=500, height=70)
canvas.create_text(100, 30, text=self.game_board.help_text)
canvas.place(x=0, y=0)
def popup_select_card_action(self, isreserved, ispurchase, card):
print('Display -- opup_select_card_action')
# GameState.toggle_modal(True)
self.popup = Toplevel(height=250, width=280)
self.popup.protocol("WM_DELETE_WINDOW", self.on_exit)
Label(self.popup, text="Selectionnez votre action :", height=1,
width=30).place(x=40, y=10)
self.display_card(self.popup, 90, 50, card, None)
if isreserved:
canvas = Canvas(self.popup, height=20,
width=60, background="grey")
canvas.create_text(30, 10, text="Reserver", fill="black")
canvas.bind("<Button-1>", lambda event,
e=EventType.POPUP_RESERVE,
c=card:
self.click_on_popup(e, c))
canvas.place(x=60, y=200)
if ispurchase:
canvas = Canvas(self.popup, height=20,
width=60, background="grey")
canvas.create_text(30, 10, text="Acheter", fill="black")
canvas.bind("<Button-1>", lambda event,
e=EventType.POPUP_PURCHASE,
c=card:
self.click_on_popup(e, c))
canvas.place(x=160, y=200)
def popup_select_tile_action(self, tiles):
print('Display -- popup_select_tile_action')
# GameState.toggle_modal(True)
self.popup = Toplevel(height=170, width=565)
self.popup.protocol("WM_DELETE_WINDOW", self.on_exit)
Label(self.popup, text="Selectionnez votre Noble:", height=1,
width=30).place(x=180, y=10)
x = 10
y = 50
for tile in tiles:
self.display_tile(self.popup, x, y, tile, EventType.CLICK_TILE)
x += 110
def popup_txt(self, txt):
print('Display -- popup_txt')
# GameState.toggle_modal(True)
self.popup = Toplevel(height=300, width=260)
self.popup.protocol("WM_DELETE_WINDOW", self.on_exit)
label = Label(self.popup,
text=txt, height=7,
width=30)
label.place(x=20, y=50)
def click_on_popup(self, event, objet):
print('Display -- click_on_popup')
self.popup.destroy()
# GameState.toggle_modal(False)
self.game_rules.event(event, objet)
def on_exit(self):
print('Display -- on_exit')
self.game_rules.event(EventType.CLOSE_POPUP, None)
self.popup.destroy()
def create_image(self):
print('Display -- create_image')
self.img_bg = PhotoImage(file='../res/bakground.gif')
self.img_button = PhotoImage(file='../res/Button.gif')
self.img0 = PhotoImage(file='../res/0.gif')
self.img0 = self.img0.subsample(3, 3)
self.img1 = PhotoImage(file='../res/1.gif')
self.img1 = self.img1.subsample(3, 3)
self.img2 = PhotoImage(file='../res/2.gif')
self.img2 = self.img2.subsample(3, 3)
self.img3 = PhotoImage(file='../res/3.gif')
self.img3 = self.img3.subsample(3, 3)
self.img4 = PhotoImage(file='../res/4.gif')
self.img4 = self.img4.subsample(3, 3)
self.img5 = PhotoImage(file='../res/5.gif')
self.img5 = self.img5.subsample(3, 3)
self.img6 = PhotoImage(file='../res/6.gif')
self.img6 = self.img6.subsample(3, 3)
self.img7 = PhotoImage(file='../res/7.gif')
self.img7 = self.img7.subsample(3, 3)
self.img_card_D = PhotoImage(file='../res/card_diamant.gif')
self.img_card_D = self.img_card_D.subsample(5, 5)
self.img_card_E = PhotoImage(file='../res/card_emeraude.gif')
self.img_card_E = self.img_card_E.subsample(5, 5)
self.img_card_O = PhotoImage(file='../res/card_onyx.gif')
self.img_card_O = self.img_card_O.subsample(5, 5)
self.img_card_R = PhotoImage(file='../res/card_rubis.gif')
self.img_card_R = self.img_card_R.subsample(5, 5)
self.img_card_S = PhotoImage(file='../res/card_saphir.gif')
self.img_card_S = self.img_card_S.subsample(5, 5)
self.img_circle_D = PhotoImage(file='../res/white_circle.gif')
self.img_circle_D = self.img_circle_D.subsample(2, 2)
self.img_circle_E = PhotoImage(file='../res/green_circle.gif')
self.img_circle_E = self.img_circle_E.subsample(2, 2)
self.img_circle_O = PhotoImage(file='../res/black_circle.gif')
self.img_circle_O = self.img_circle_O.subsample(2, 2)
self.img_circle_R = PhotoImage(file='../res/red_circle.gif')
self.img_circle_R = self.img_circle_R.subsample(2, 2)
self.img_circle_S = PhotoImage(file='../res/blue_circle.gif')
self.img_circle_S = self.img_circle_S.subsample(2, 2)
self.img_rect_D = PhotoImage(file='../res/white_rect.gif')
self.img_rect_D = self.img_rect_D.subsample(2, 2)
self.img_rect_E = PhotoImage(file='../res/green_rect.gif')
self.img_rect_E = self.img_rect_E.subsample(2, 2)
self.img_rect_O = PhotoImage(file='../res/black_rect.gif')
self.img_rect_O = self.img_rect_O.subsample(2, 2)
self.img_rect_R = PhotoImage(file='../res/red_rect.gif')
self.img_rect_R = self.img_rect_R.subsample(2, 2)
self.img_rect_S = PhotoImage(file='../res/blue_rect.gif')
self.img_rect_S = self.img_rect_S.subsample(2, 2)
self.img_rect_bank_D = PhotoImage(file='../res/white_rect.gif')
self.img_rect_bank_E = PhotoImage(file='../res/green_rect.gif')
self.img_rect_bank_O = PhotoImage(file='../res/black_rect.gif')
self.img_rect_bank_R = PhotoImage(file='../res/red_rect.gif')
self.img_rect_bank_S = PhotoImage(file='../res/blue_rect.gif')
self.img_token_D = PhotoImage(file='../res/token_diamant.gif')
self.img_token_D = self.img_token_D.subsample(3, 3)
self.img_token_E = PhotoImage(file='../res/token_emeraude.gif')
self.img_token_E = self.img_token_E.subsample(3, 3)
self.img_token_R = PhotoImage(file='../res/token_rubis.gif')
self.img_token_R = self.img_token_R.subsample(3, 3)
self.img_token_S = PhotoImage(file='../res/token_saphir.gif')
self.img_token_S = self.img_token_S.subsample(3, 3)
self.img_token_O = PhotoImage(file='../res/token_onyx.gif')
self.img_token_O = self.img_token_O.subsample(3, 3)
self.img_token_G = PhotoImage(file='../res/token_gold.gif')
self.img_token_G = self.img_token_G.subsample(3, 3)
self.img_token_bank_D = PhotoImage(file='../res/token_diamant.gif')
self.img_token_bank_D = self.img_token_bank_D.subsample(4, 4)
self.img_token_bank_E = PhotoImage(file='../res/token_emeraude.gif')
self.img_token_bank_E = self.img_token_bank_E.subsample(4, 4)
self.img_token_bank_R = PhotoImage(file='../res/token_rubis.gif')
self.img_token_bank_R = self.img_token_bank_R.subsample(4, 4)
self.img_token_bank_S = PhotoImage(file='../res/token_saphir.gif')
self.img_token_bank_S = self.img_token_bank_S.subsample(4, 4)
self.img_token_bank_O = PhotoImage(file='../res/token_onyx.gif')
self.img_token_bank_O = self.img_token_bank_O.subsample(4, 4)
self.img_token_bank_G = PhotoImage(file='../res/token_gold.gif')
self.img_token_bank_G = self.img_token_bank_G.subsample(4, 4)
self.img_deck_1 = PhotoImage(file='../res/deck_lvl1.gif')
self.img_deck_1 = self.img_deck_1.subsample(3, 3)
self.img_deck_empty_1 = PhotoImage(file='../res/deck_lvl1_empty.gif')
self.img_deck_empty_1 = self.img_deck_empty_1.subsample(7, 7)
self.img_deck_2 = PhotoImage(file='../res/deck_lvl2.gif')
self.img_deck_2 = self.img_deck_2.subsample(3, 3)
self.img_deck_empty_2 = PhotoImage(file='../res/deck_lvl2_empty.gif')
self.img_deck_empty_2 = self.img_deck_empty_2.subsample(3, 3)
self.img_deck_3 = PhotoImage(file='../res/deck_lvl3.gif')
self.img_deck_3 = self.img_deck_3.subsample(3, 3)
self.img_deck_empty_3 = PhotoImage(file='../res/deck_lvl3_empty.gif')
self.img_deck_empty_3 = self.img_deck_empty_3.subsample(3, 3)
self.img_tile = PhotoImage(file='../res/tuile.gif')
self.img_tile = self.img_tile.subsample(1, 1)
def get_image_points(self, points):
print('Display -- get_image_points')
if points == 0:
return self.img0
elif points == 1:
return self.img1
elif points == 2:
return self.img2
elif points == 3:
return self.img3
elif points == 4:
return self.img4
elif points == 5:
return self.img5
elif points == 6:
return self.img6
elif points == 7:
return self.img7
def get_image_card_gem(self, gem):
print('Display -- get_image_card_gem')
if gem == "Diamond":
return self.img_card_D
elif gem == "Emerald":
return self.img_card_E
elif gem == "Sapphire":
return self.img_card_S
elif gem == "Onyx":
return self.img_card_O
elif gem == "Ruby":
return self.img_card_R
def get_image_deck(self, lvl, empty):
print('Display -- get_image_deck')
if lvl == 1:
if empty:
return self.img_deck_empty_1
else:
return self.img_deck_1
elif lvl == 2:
if empty:
return self.img_deck_empty_2
else:
return self.img_deck_2
elif lvl == 3:
if empty:
return self.img_deck_empty_3
else:
return self.img_deck_3
def get_image_circle_gem(self, gem):
print('Display -- get_image_circle_gem')
if gem == "Diamond":
return self.img_circle_D
elif gem == "Emerald":
return self.img_circle_E
elif gem == "Sapphire":
return self.img_circle_S
elif gem == "Onyx":
return self.img_circle_O
elif gem == "Ruby":
return self.img_circle_R
def get_image_rect_gem(self, gem):
print('Display -- get_image_rect_gem')
if gem == "Diamond":
return self.img_rect_D
elif gem == "Emerald":
return self.img_rect_E
elif gem == "Sapphire":
return self.img_rect_S
elif gem == "Onyx":
return self.img_rect_O
elif gem == "Ruby":
return self.img_rect_R
def get_image_token_gem(self, gem):
print('Display -- get_image_token_gem')
if gem == "Diamond":
return self.img_token_D
elif gem == "Emerald":
return self.img_token_E
elif gem == "Sapphire":
return self.img_token_S
elif gem == "Onyx":
return self.img_token_O
elif gem == "Ruby":
return self.img_token_R
elif gem == "Gold":
return self.img_token_G
def get_image_rect_bank_gem(self, gem):
print('Display -- get_image_rect_bank_gem')
if gem == "Diamond":
return self.img_rect_bank_D
elif gem == "Emerald":
return self.img_rect_bank_E
elif gem == "Sapphire":
return self.img_rect_bank_S
elif gem == "Onyx":
return self.img_rect_bank_O
elif gem == "Ruby":
return self.img_rect_bank_R
def get_image_token_bank_gem(self, gem):
print('Display -- get_image_token_bank_gem')
if gem == "Diamond":
return self.img_token_bank_D
elif gem == "Emerald":
return self.img_token_bank_E
elif gem == "Sapphire":
return self.img_token_bank_S
elif gem == "Onyx":
return self.img_token_bank_O
elif gem == "Ruby":
return self.img_token_bank_R
elif gem == "Gold":
return self.img_token_bank_G
@staticmethod
def get_color(level):
print('Display -- get_color')
if level == 1:
color = '#0483f9'
if level == 2:
color = '#05e002'
if level == 3:
color = '#ffac07'
return color
def refresh(self):
print('Display -- refresh')
canvas = Canvas(self.window, height=self.h, width=self.w)
canvas.place(x=0, y=0)
self.display_bank(self.game_board.bank)
self.display_stacks()
self.display_cards()
self.display_tiles()
self.display_players()
self.display_text_help()
def launch(self):
print('Display -- launch')
canvas = Canvas(self.window, height=self.h, width=self.w)
canvas.create_image(1000, 500, image=self.img_bg)
button_quit = Canvas(self.window, height=100, width=300)
button_start = Canvas(self.window, height=100, width=300)
button_start.create_image(151, 52, image=self.img_button)
button_quit.create_image(151, 52, image=self.img_button)
button_quit.place(x=1400, y=500)
button_start.place(x=300, y=500)
canvas.place(x=0, y=0)
button_start.create_text(150, 50, text='Start', fill='gold')
button_quit.create_text(150, 50, text='Quit', fill='gold')
button_quit.bind("<Button-1>", lambda a, b=None: self.window.destroy())
button_start.bind("<Button-1>",
lambda a, b=None: self.popup_start_click_action())
| gpl-3.0 | 5,044,571,538,574,761,000 | 40.564599 | 83 | 0.527711 | false |
CINPLA/exana | exana/tracking/fields.py | 1 | 32391 | import numpy as np
def spatial_rate_map(x, y, t, spike_train, binsize=0.01, box_xlen=1,
box_ylen=1, mask_unvisited=True, convolve=True,
return_bins=False, smoothing=0.02):
"""Divide a 2D space in bins of size binsize**2, count the number of spikes
in each bin and divide by the time spent in respective bins. The map can
then be convolved with a gaussian kernel of size csize determined by the
smoothing factor, binsize and box_xlen.
Parameters
----------
spike_train : neo.SpikeTrain
x : float
1d vector of x positions
y : float
1d vector of y positions
t : float
1d vector of times at x, y positions
binsize : float
spatial binsize
box_xlen : quantities scalar in m
side length of quadratic box
mask_unvisited: bool
mask bins which has not been visited by nans
convolve : bool
convolve the rate map with a 2D Gaussian kernel
Returns
-------
out : rate map
if return_bins = True
out : rate map, xbins, ybins
"""
if not all([len(var) == len(var2) for var in [x,y,t] for var2 in [x,y,t]]):
raise ValueError('x, y, t must have same number of elements')
if box_xlen < x.max() or box_ylen < y.max():
raise ValueError('box length must be larger or equal to max path length')
from decimal import Decimal as dec
decimals = 1e10
remainderx = dec(float(box_xlen)*decimals) % dec(float(binsize)*decimals)
remaindery = dec(float(box_ylen)*decimals) % dec(float(binsize)*decimals)
if remainderx != 0 or remaindery != 0:
raise ValueError('the remainder should be zero i.e. the ' +
'box length should be an exact multiple ' +
'of the binsize')
# interpolate one extra timepoint
t_ = np.append(t, t[-1] + np.median(np.diff(t)))
spikes_in_bin, _ = np.histogram(spike_train, t_)
time_in_bin = np.diff(t_)
xbins = np.arange(0, box_xlen + binsize, binsize)
ybins = np.arange(0, box_ylen + binsize, binsize)
ix = np.digitize(x, xbins, right=True)
iy = np.digitize(y, ybins, right=True)
spike_pos = np.zeros((xbins.size, ybins.size))
time_pos = np.zeros((xbins.size, ybins.size))
for n in range(len(x)):
spike_pos[ix[n], iy[n]] += spikes_in_bin[n]
time_pos[ix[n], iy[n]] += time_in_bin[n]
# correct for shifting of map
spike_pos = spike_pos[1:, 1:]
time_pos = time_pos[1:, 1:]
with np.errstate(divide='ignore', invalid='ignore'):
rate = np.divide(spike_pos, time_pos)
if convolve:
rate[np.isnan(rate)] = 0. # for convolution
from astropy.convolution import Gaussian2DKernel, convolve_fft
csize = (box_xlen / binsize) * smoothing
kernel = Gaussian2DKernel(csize)
rate = convolve_fft(rate, kernel) # TODO edge correction
if mask_unvisited:
was_in_bin = np.asarray(time_pos, dtype=bool)
rate[np.invert(was_in_bin)] = np.nan
if return_bins:
return rate.T, xbins, ybins
else:
return rate.T
def gridness(rate_map, box_xlen, box_ylen, return_acorr=False,
step_size=0.1, method='iter', return_masked_acorr=False):
'''Calculates gridness of a rate map. Calculates the normalized
autocorrelation (A) of a rate map B where A is given as
A = 1/n\Sum_{x,y}(B - \bar{B})^{2}/\sigma_{B}^{2}. Further, the Pearsson's
product-moment correlation coefficients is calculated between A and A_{rot}
rotated 30 and 60 degrees. Finally the gridness is calculated as the
difference between the minimum of coefficients at 60 degrees and the
maximum of coefficients at 30 degrees i.e. gridness = min(r60) - max(r30).
If the method 'iter' is chosen:
In order to focus the analysis on symmetry of A the the central and the
outer part of the gridness is maximized by increasingly mask A at steps of
``step_size``.
If the method 'puncture' is chosen:
This is the standard way of calculating gridness, by masking the central
autocorrelation bump, in addition to rounding the map. See examples.
Parameters
----------
rate_map : numpy.ndarray
box_xlen : float
side length of quadratic box
step_size : float
step size in masking, only applies to the method "iter"
return_acorr : bool
return autocorrelation map or not
return_masked_acorr : bool
return masked autocorrelation map or not
method : 'iter' or 'puncture'
Returns
-------
out : gridness, (autocorrelation map, masked autocorrelation map)
Examples
--------
>>> from exana.tracking.tools import make_test_grid_rate_map
>>> import matplotlib.pyplot as plt
>>> rate_map, pos = make_test_grid_rate_map()
>>> iter_score = gridness(rate_map, box_xlen=1, box_ylen=1, method='iter')
>>> print('%.2f' % iter_score)
1.39
>>> puncture_score = gridness(rate_map, box_xlen=1, box_ylen=1, method='puncture')
>>> print('%.2f' % puncture_score)
0.96
.. plot::
import matplotlib.pyplot as plt
import numpy as np
from exana.tracking.tools import make_test_grid_rate_map
from exana.tracking import gridness
import matplotlib.pyplot as plt
rate_map, _ = make_test_grid_rate_map()
fig, axs = plt.subplots(2, 2)
g1, acorr, m_acorr1 = gridness(rate_map, box_xlen=1,
box_ylen=1, return_acorr=True,
return_masked_acorr=True,
method='iter')
g2, m_acorr2 = gridness(rate_map, box_xlen=1,
box_ylen=1,
return_masked_acorr=True,
method='puncture')
mats = [rate_map, m_acorr1, acorr, m_acorr2]
titles = ['Rate map', 'Masked acorr "iter", gridness = %.2f' % g1,
'Autocorrelation',
'Masked acorr "puncture", gridness = %.2f' % g2]
for ax, mat, title in zip(axs.ravel(), mats, titles):
ax.imshow(mat)
ax.set_title(title)
plt.tight_layout()
plt.show()
'''
import numpy.ma as ma
from exana.misc.tools import fftcorrelate2d
from exana.tracking.tools import gaussian2D
from scipy.optimize import curve_fit
tmp_map = rate_map.copy()
tmp_map[~np.isfinite(tmp_map)] = 0
acorr = fftcorrelate2d(tmp_map, tmp_map, mode='full', normalize=True)
rows, cols = acorr.shape
b_x = np.linspace(- box_xlen / 2., box_xlen / 2., rows)
b_y = np.linspace(- box_ylen / 2., box_ylen / 2., cols)
B_x, B_y = np.meshgrid(b_x, b_y)
if method == 'iter':
if return_masked_acorr: m_acorrs = []
gridscores = []
for outer in np.arange(box_xlen / 4, box_xlen / 2, step_size):
m_acorr = ma.masked_array(
acorr, mask=np.sqrt(B_x**2 + B_y**2) > outer)
for inner in np.arange(0, box_xlen / 4, step_size):
m_acorr = ma.masked_array(
m_acorr, mask=np.sqrt(B_x**2 + B_y**2) < inner)
r30, r60 = rotate_corr(m_acorr)
gridscores.append(np.min(r60) - np.max(r30))
if return_masked_acorr: m_acorrs.append(m_acorr)
gridscore = max(gridscores)
if return_masked_acorr: m_acorr = m_acorrs[gridscores.index(gridscore)]
elif method == 'puncture':
# round picture edges
_gaussian = lambda pos, a, s: gaussian2D(a, pos[0], pos[1], 0, 0, s).ravel()
p0 = (max(acorr.ravel()), min(box_xlen, box_ylen) / 100)
popt, pcov = curve_fit(_gaussian, (B_x, B_y), acorr.ravel(), p0=p0)
m_acorr = ma.masked_array(
acorr, mask=np.sqrt(B_x**2 + B_y**2) > min(box_xlen, box_ylen) / 2)
m_acorr = ma.masked_array(
m_acorr, mask=np.sqrt(B_x**2 + B_y**2) < popt[1])
r30, r60 = rotate_corr(m_acorr)
gridscore = float(np.min(r60) - np.max(r30))
if return_acorr and return_masked_acorr:
return gridscore, acorr, m_acorr
if return_masked_acorr:
return gridscore, m_acorr
if return_acorr:
return gridscore, acorr # acorrs[grids.index(max(grids))]
else:
return gridscore
def rotate_corr(acorr):
from exana.misc.tools import masked_corrcoef2d
from scipy.ndimage.interpolation import rotate
angles = range(30, 180+30, 30)
corr = []
# Rotate and compute correlation coefficient
for angle in angles:
rot_acorr = rotate(acorr, angle, reshape=False)
corr.append(masked_corrcoef2d(rot_acorr, acorr)[0, 1])
r60 = corr[1::2]
r30 = corr[::2]
return r30, r60
def occupancy_map(x, y, t,
binsize=0.01,
box_xlen=1,
box_ylen=1,
mask_unvisited=True,
convolve=True,
return_bins=False,
smoothing=0.02):
'''Divide a 2D space in bins of size binsize**2, count the time spent
in each bin. The map can be convolved with a gaussian kernel of size
csize determined by the smoothing factor, binsize and box_xlen.
Parameters
----------
x : array
1d vector of x positions
y : array
1d vector of y positions
t : array
1d vector of times at x, y positions
binsize : float
spatial binsize
box_xlen : float
side length of quadratic box
mask_unvisited: bool
mask bins which has not been visited by nans
convolve : bool
convolve the rate map with a 2D Gaussian kernel
Returns
-------
occupancy_map : numpy.ndarray
if return_bins = True
out : occupancy_map, xbins, ybins
'''
if not all([len(var) == len(var2) for var in [
x, y, t] for var2 in [x, y, t]]):
raise ValueError('x, y, t must have same number of elements')
if box_xlen < x.max() or box_ylen < y.max():
raise ValueError(
'box length must be larger or equal to max path length')
from decimal import Decimal as dec
decimals = 1e10
remainderx = dec(float(box_xlen)*decimals) % dec(float(binsize)*decimals)
remaindery = dec(float(box_ylen)*decimals) % dec(float(binsize)*decimals)
if remainderx != 0 or remaindery != 0:
raise ValueError('the remainder should be zero i.e. the ' +
'box length should be an exact multiple ' +
'of the binsize')
# interpolate one extra timepoint
t_ = np.array(t.tolist() + [t.max() + np.median(np.diff(t))])
time_in_bin = np.diff(t_)
xbins = np.arange(0, box_xlen + binsize, binsize)
ybins = np.arange(0, box_ylen + binsize, binsize)
ix = np.digitize(x, xbins, right=True)
iy = np.digitize(y, ybins, right=True)
time_pos = np.zeros((xbins.size, ybins.size))
for n in range(len(x) - 1):
time_pos[ix[n], iy[n]] += time_in_bin[n]
# correct for shifting of map since digitize returns values at right edges
time_pos = time_pos[1:, 1:]
if convolve:
rate[np.isnan(rate)] = 0. # for convolution
from astropy.convolution import Gaussian2DKernel, convolve_fft
csize = (box_xlen / binsize) * smoothing
kernel = Gaussian2DKernel(csize)
rate = convolve_fft(rate, kernel) # TODO edge correction
if mask_unvisited:
was_in_bin = np.asarray(time_pos, dtype=bool)
rate[np.invert(was_in_bin)] = np.nan
if return_bins:
return rate.T, xbins, ybins
else:
return rate.T
def nvisits_map(x, y, t,
binsize=0.01,
box_xlen=1,
box_ylen=1,
return_bins=False):
'''Divide a 2D space in bins of size binsize**2, count the
number of visits in each bin. The map can be convolved with
a gaussian kernel of size determined by the smoothing factor,
binsize and box_xlen.
Parameters
----------
x : array
1d vector of x positions
y : array
1d vector of y positions
t : array
1d vector of times at x, y positions
binsize : float
spatial binsize
box_xlen : float
side length of quadratic box
Returns
-------
nvisits_map : numpy.ndarray
if return_bins = True
out : nvisits_map, xbins, ybins
'''
if not all([len(var) == len(var2) for var in [
x, y, t] for var2 in [x, y, t]]):
raise ValueError('x, y, t must have same number of elements')
if box_xlen < x.max() or box_ylen < y.max():
raise ValueError(
'box length must be larger or equal to max path length')
from decimal import Decimal as dec
decimals = 1e10
remainderx = dec(float(box_xlen)*decimals) % dec(float(binsize)*decimals)
remaindery = dec(float(box_ylen)*decimals) % dec(float(binsize)*decimals)
if remainderx != 0 or remaindery != 0:
raise ValueError('the remainder should be zero i.e. the ' +
'box length should be an exact multiple ' +
'of the binsize')
xbins = np.arange(0, box_xlen + binsize, binsize)
ybins = np.arange(0, box_ylen + binsize, binsize)
ix = np.digitize(x, xbins, right=True)
iy = np.digitize(y, ybins, right=True)
nvisits_map = np.zeros((xbins.size, ybins.size))
for n in range(len(x)):
if n == 0:
nvisits_map[ix[n], iy[n]] = 1
else:
if ix[n-1] != ix[n] or iy[n-1] != iy[n]:
nvisits_map[ix[n], iy[n]] += 1
# correct for shifting of map since digitize returns values at right edges
nvisits_map = nvisits_map[1:, 1:]
if return_bins:
return nvisits_map.T, xbins, ybins
else:
return nvisits_map.T
def spatial_rate_map_1d(x, t, spike_train,
binsize=0.01,
track_len=1,
mask_unvisited=True,
convolve=True,
return_bins=False,
smoothing=0.02):
"""Take x coordinates of linear track data, divide in bins of binsize,
count the number of spikes in each bin and divide by the time spent in
respective bins. The map can then be convolved with a gaussian kernel of
size csize determined by the smoothing factor, binsize and box_xlen.
Parameters
----------
spike_train : array
x : array
1d vector of x positions
t : array
1d vector of times at x, y positions
binsize : float
spatial binsize
box_xlen : float
side length of quadratic box
mask_unvisited: bool
mask bins which has not been visited by nans
convolve : bool
convolve the rate map with a 2D Gaussian kernel
Returns
-------
out : rate map
if return_bins = True
out : rate map, xbins
"""
if not all([len(var) == len(var2) for var in [x, t] for var2 in [x, t]]):
raise ValueError('x, t must have same number of elements')
if track_len < x.max():
raise ValueError('track length must be\
larger or equal to max path length')
from decimal import Decimal as dec
decimals = 1e10
remainderx = dec(float(track_len)*decimals) % dec(float(binsize)*decimals)
if remainderx != 0:
raise ValueError('the remainder should be zero i.e. the ' +
'box length should be an exact multiple ' +
'of the binsize')
# interpolate one extra timepoint
t_ = np.array(t.tolist() + [t.max() + np.median(np.diff(t))])
spikes_in_bin, _ = np.histogram(spike_train, t_)
time_in_bin = np.diff(t_)
xbins = np.arange(0, track_len + binsize, binsize)
ix = np.digitize(x, xbins, right=True)
spike_pos = np.zeros(xbins.size)
time_pos = np.zeros(xbins.size)
for n in range(len(x)):
spike_pos[ix[n]] += spikes_in_bin[n]
time_pos[ix[n]] += time_in_bin[n]
# correct for shifting of map since digitize returns values at right edges
spike_pos = spike_pos[1:]
time_pos = time_pos[1:]
with np.errstate(divide='ignore', invalid='ignore'):
rate = np.divide(spike_pos, time_pos)
if convolve:
rate[np.isnan(rate)] = 0. # for convolution
from astropy.convolution import Gaussian2DKernel, convolve_fft
csize = (track_len / binsize) * smoothing
kernel = Gaussian2DKernel(csize)
rate = convolve_fft(rate, kernel) # TODO edge correction
if mask_unvisited:
was_in_bin = np.asarray(time_pos, dtype=bool)
rate[np.invert(was_in_bin)] = np.nan
if return_bins:
return rate.T, xbins
else:
return rate.T
def separate_fields(rate_map, laplace_thrsh=0, center_method='maxima',
cutoff_method='none', box_xlen=1, box_ylen=1, index=False):
"""Separates fields using the laplacian to identify fields separated by
a negative second derivative.
Parameters
----------
rate_map : np 2d array
firing rate in each bin
laplace_thrsh : float
value of laplacian to separate fields by relative to the minima. Should be
on the interval 0 to 1, where 0 cuts off at 0 and 1 cuts off at
min(laplace(rate_map)). Default 0.
center_method : string
method to find field centers. Valid options = ['center_of_mass',
'maxima','gaussian_fit']
cutoff_method (optional) : string or function
function to exclude small fields. If local field value of function
is lower than global function value, the field is excluded. Valid
string_options = ['median', 'mean','none'].
index : bool, default False
return bump center values as index or xy-pos
Returns
-------
fields : numpy array, shape like rate_map.
contains areas all filled with same value, corresponding to fields
in rate_map. The values are in range(1,nFields + 1), sorted by size of the
field (sum of all field values). 0 elsewhere.
n_field : int
field count
bump_centers : (n_field x 2) np ndarray
Coordinates of field centers
"""
cutoff_functions = {'mean':np.mean, 'median':np.median, 'none':None}
if not callable(cutoff_method):
try:
cutoff_func = cutoff_functions[cutoff_method]
except KeyError:
msg = "invalid cutoff_method flag '%s'" % cutoff_method
raise ValueError(msg)
else:
cutoff_func = cutoff_method
from scipy import ndimage
l = ndimage.laplace(rate_map)
l[l>laplace_thrsh*np.min(l)] = 0
# Labels areas of the laplacian not connected by values > 0.
fields, n_fields = ndimage.label(l)
# index 0 is the background
indx = np.arange(1,n_fields+1)
# Use cutoff method to remove unwanted fields
if cutoff_method != 'none':
try:
total_value = cutoff_func(fields)
except:
print('Unexpected error, cutoff_func doesnt like the input:')
raise
field_values = ndimage.labeled_comprehension(rate_map, fields, indx,
cutoff_func, float, 0)
try:
is_field = field_values >= total_value
except:
print('cutoff_func return_values doesnt want to compare:')
raise
if np.sum(is_field) == 0:
return np.zeros(rate_map.shape), 0, np.array([[],[]])
for i in indx:
if not is_field[i-1]:
fields[fields == i] = 0
n_fields = ndimage.label(fields, output=fields)
indx = np.arange(1,n_fields + 1)
# Sort by largest mean
sizes = ndimage.labeled_comprehension(rate_map, fields, indx,
np.mean, float, 0)
size_sort = np.argsort(sizes)[::-1]
new = np.zeros_like(fields)
for i in np.arange(n_fields):
new[fields == size_sort[i]+1] = i+1
fields = new
bc = get_bump_centers(rate_map,labels=fields,ret_index=index,indices=indx,method=center_method,
units=box_xlen.units)
# TODO exclude fields where maxima is on the edge of the field?
return fields, n_fields, bc
def get_bump_centers(rate_map, labels, ret_index=False, indices=None, method='maxima',
units=1):
"""Finds center of fields at labels."""
from scipy import ndimage
if method not in ['maxima','center_of_mass','gaussian_fit']:
msg = "invalid center_method flag '%s'" % method
raise ValueError(msg)
if indices is None:
indices = np.arange(1,np.max(labels)+1)
if method == 'maxima':
bc = ndimage.maximum_position(rate_map, labels=labels,
index=indices)
elif method == 'center_of_mass':
bc = ndimage.center_of_mass(rate_map, labels=labels, index=indices)
elif method == 'gaussian_fit':
from exana.tracking.tools import fit_gauss_asym
bc = np.zeros((len(indices),2))
import matplotlib.pyplot as plt
for i in indices:
r = rate_map.copy()
r[labels != i] = 0
popt = fit_gauss_asym(r, return_data=False)
# TODO Find out which axis is x and which is y
bc[i-1] = (popt[2],popt[1])
if ret_index:
msg = 'ret_index not implemented for gaussian fit'
raise NotImplementedError(msg)
if not ret_index and not method=='gaussian_fit':
bc = (bc + np.array((0.5,0.5)))/rate_map.shape
return np.array(bc)*units
def find_avg_dist(rate_map, thrsh = 0, plot=False):
"""Uses autocorrelation and separate_fields to find average distance
between bumps. Is dependent on high gridness to get separate bumps in
the autocorrelation
Parameters
----------
rate_map : np 2d array
firing rate in each bin
thrsh (optional) : float, default 0
cutoff value for the laplacian of the autocorrelation function.
Should be a negative number. Gives better separation if bumps are
connected by "bridges" or saddles where the laplacian is negative.
plot (optional) : bool, default False
plot acorr and the separated acorr, with bump centers
Returns
-------
avg_dist : float
relative units from 0 to 1 of the box size
"""
from scipy.ndimage import maximum_position
from exana.misc.tools import fftcorrelate2d
# autocorrelate. Returns array (2x - 1) the size of rate_map
acorr = fftcorrelate2d(rate_map,rate_map, mode = 'full', normalize = True)
#acorr[acorr<0] = 0 # TODO Fix this
f, nf, bump_centers = separate_fields(acorr,laplace_thrsh=thrsh,
center_method='maxima',cutoff_method='median')
# TODO Find a way to find valid value for
# thrsh, or remove.
bump_centers = np.array(bump_centers)
# find dists from center in (autocorrelation)relative units (from 0 to 1)
distances = np.linalg.norm(bump_centers - (0.5,0.5), axis = 1)
dist_sort = np.argsort(distances)
distances = distances[dist_sort]
# use maximum 6 closest values except center value
avg_dist = np.median(distances[1:7])
# correct for difference in shapes
avg_dist *= acorr.shape[0]/rate_map.shape[0] # = 1.98
# TODO : raise warning if too big difference between points
if plot:
import matplotlib.pyplot as plt
fig,[ax1,ax2] = plt.subplots(1,2)
ax1.imshow(acorr,extent = (0,1,0,1),origin='lower')
ax1.scatter(*(bump_centers[:,::-1].T))
ax2.imshow(f,extent = (0,1,0,1),origin='lower')
ax2.scatter(*(bump_centers[:,::-1].T))
return avg_dist
def fit_hex(bump_centers, avg_dist=None, plot_bumps = False, method='best'):
"""Fits a hex grid to a given set of bumps. Uses the three bumps most
Parameters
----------
bump_centers : Nx2 np.array
x,y positions of bump centers, x,y /in (0,1)
avg_dist (optional): float
average spacing between bumps
plot_bumps (optional): bool
if True, plots at the three bumps most likely to be in
correct hex-position to the current matplotlib axes.
method (optional): string, valid options: ['closest', 'best']
method to find angle from neighboring bumps.
'closest' uses six bumps nearest to center bump
'best' uses the two bumps nearest to avg_dist
Returns
-------
displacement : float
distance of bump closest to the center in meters
orientation : float
orientation of hexagon (in degrees)
"""
valid_methods = ['closest', 'best']
if method not in valid_methods:
msg = "invalid method flag '%s'" % method
raise ValueError(msg)
bump_centers = np.array(bump_centers)
# sort by distance to center
d = np.linalg.norm(bump_centers - (0.5,0.5), axis=1)
d_sort = np.argsort(d)
dist_sorted = bump_centers[d_sort]
center_bump = dist_sorted[0]; others = dist_sorted[1:]
displacement = d[d_sort][0]
# others distances to center bumps
relpos = others - center_bump
reldist = np.linalg.norm(relpos, axis=1)
if method == 'closest':
# get 6 closest bumps
rel_sort = np.argsort(reldist)
closest = others[rel_sort][:6]
relpos = relpos[rel_sort][:6]
elif method == 'best':
# get 2 bumps such that /sum_{i\neqj}(\abs{r_i-r_j}-avg_ist)^2 is minimized
squares = 1e32*np.ones((others.shape[0], others.shape[0]))
for i in range(len(relpos)):
for j in range(i,len(relpos)):
rel1 = (reldist[i] - avg_dist)**2
rel2 = (reldist[j] - avg_dist)**2
rel3 = (np.linalg.norm(relpos[i]-relpos[j]) - avg_dist)**2
squares[i,j] = rel1 + rel2 + rel3
rel_slice = np.unravel_index(np.argmin(squares), squares.shape)
rel_slice = np.array(rel_slice)
#rel_sort = np.argsort(np.abs(reldist-avg_dist))
closest = others[rel_slice]
relpos = relpos[rel_slice]
# sort by angle
a = np.arctan2(relpos[:,1], relpos[:,0])%(2*np.pi)
a_sort = np.argsort(a)
# extract lowest angle and convert to degrees
orientation = a[a_sort][0] *180/np.pi
# hex grid is symmetric under rotations of 60deg
orientation %= 60
if plot_bumps:
import matplotlib.pyplot as plt
ax=plt.gca()
i = 1
xmin, xmax = ax.get_xlim()
ymin, ymax = ax.get_ylim()
dx = xmax-xmin; dy = ymax - ymin
closest = closest[a_sort]
edges = [center_bump] if method == 'best' else []
edges += [c for c in closest]
edges = np.array(edges)*(dx,dy) + (xmin, ymin)
poly = plt.Polygon(edges, alpha=0.5,color='r')
ax.add_artist(poly)
return displacement, orientation
def calculate_grid_geometry(rate_map, plot_fields=False, **kwargs):
"""Calculates quantitative information about grid field.
Find bump centers, bump spacing, center diplacement and hexagon
orientation
Parameters
----------
rate_map : np 2d array
firing rate in each bin
plot_fields : if True, plots the field labels with field centers to the
current matplotlib ax. Default False
thrsh : float, default 0
see find_avg_dist()
center_method : string, valid options: ['maxima', 'center_of_mass']
default: 'center_of_mass'
see separate_fields()
method : string, valid options: ['closest', 'best']
see fit_hex()
Returns
-------
bump_centers : 2d np.array
x,y positions of bump centers
avg_dist : float
average spacing between bumps, \in [0,1]
displacement : float
distance of bump closest to the center
orientation : float
orientation of hexagon (in degrees)
Examples
--------
>>> import numpy as np
>>> rate_map = np.zeros((5,5))
>>> pos = np.array([ [0,2],
... [1,0],[1,4],
... [2,2],
... [3,0],[3,4],
... [4,2]])
>>> for(i,j) in pos:
... rate_map[i,j] = 1
...
>>> result = calculate_grid_geometry(rate_map)
"""
# TODO add back the following when it is correct
# (array([[0.5, 0.9],
# [0.9, 0.7],
# [0.1, 0.7],
# [0.5, 0.5],
# [0.9, 0.3],
# [0.1, 0.3],
# [0.5, 0.1]]) * m, 0.4472135954999579, 0.0, 26.565051177077983)
from scipy.ndimage import mean, center_of_mass
# TODO: smooth data?
# smooth_rate_map = lambda x:x
# rate_map = smooth_rate_map(rate_map)
center_method = kwargs.pop('center_method',None)
if center_method:
fields, nfields, bump_centers = separate_fields(rate_map,
center_method=center_method)
else:
fields, nfields, bump_centers = separate_fields(rate_map)
if bump_centers.size == 0:
import warnings
msg = 'couldnt find bump centers, returning None'
warnings.warn(msg, RuntimeWarning, stacklevel=2)
return None,None,None,None,
sh = np.array(rate_map.shape)
if plot_fields:
print(fields)
import matplotlib.pyplot as plt
x=np.linspace(0,1,sh[0]+1)
y=np.linspace(0,1,sh[1]+1)
x,y = np.meshgrid(x,y)
ax = plt.gca()
print('nfields: ',nfields)
plt.pcolormesh(x,y, fields)
# switch from row-column to x-y
bump_centers = bump_centers[:,::-1]
thrsh = kwargs.pop('thrsh', None)
if thrsh:
avg_dist = find_avg_dist(rate_map, thrsh)
else:
avg_dist = find_avg_dist(rate_map)
displacement, orientation = fit_hex(bump_centers, avg_dist,
plot_bumps=plot_fields, **kwargs)
return bump_centers, avg_dist, displacement, orientation
class RandomDisplacementBounds(object):
"""random displacement with bounds"""
def __init__(self, xmin, xmax, stepsize=0.5):
self.xmin = np.array(xmin)
self.xmax = np.array(xmax)
self.stepsize = stepsize
def __call__(self, x):
"""take a random step but ensure the new position is within the bounds"""
while True:
# this could be done in a much more clever way, but it will work for example purposes
xnew = x + (self.xmax-self.xmin)*np.random.uniform(-self.stepsize,
self.stepsize, np.shape(x))
if np.all(xnew < self.xmax) and np.all(xnew > self.xmin):
break
return xnew
def optimize_sep_fields(rate_map,step = 0.04, niter=40, T = 1.0, method = 'SLSQP',
glob=True, x0 = [0.065,0.1],callback=None):
"""Optimizes the separation of the fields by minimizing an error
function
Parameters
----------
rate_map :
method :
valid methods=['L-BFGS-B', 'TNC', 'SLSQP']
x0 : list
initial values for smoothing smoothing and laplace_thrsh
Returns
--------
res :
Result of the optimization. Contains smoothing and laplace_thrsh in
attribute res.x"""
from scipy import optimize
from exana.tracking.tools import separation_error_func as err_func
valid_methods = ['L-BFGS-B', 'TNC', 'SLSQP']
if method not in valid_methods:
raise ValueError('invalid method flag %s' %method)
rate_map[np.isnan(rate_map)] = 0.
method = 'SLSQP'
xmin = [0.025, 0]
xmax = [0.2, 1]
bounds = [(low,high) for low,high in zip(xmin,xmax)]
obj_func = lambda args: err_func(args[0], args[1], rate_map)
if glob:
take_step = RandomDisplacementBounds(xmin, xmax,stepsize=step)
minimizer_kwargs = dict(method=method, bounds=bounds)
res = optimize.basinhopping(obj_func, x0, niter=niter, T = T,
minimizer_kwargs=minimizer_kwargs,
take_step=take_step,callback=callback)
else:
res = optimize.minimize(obj_func, x0, method=method, bounds = bounds, options={'disp': True})
return res
if __name__ == "__main__":
import doctest
doctest.testmod()
| gpl-3.0 | 8,174,192,474,725,353,000 | 34.9102 | 101 | 0.591244 | false |
btovar/cctools | apps/wq_hypersweep/test.py | 1 | 1690 | from work_queue import *
import sys
def compose_task(i,j):
id = (i-1)*20+j
d_rate = i*0.05
r_blok = j
outfile = "results%d.csv" % id
command = "./script.sh results%d.csv %f %d" % (id,d_rate,r_blok)
t = Task(command)
t.specify_file("env.tar.gz", "env.tar.gz", WORK_QUEUE_INPUT, cache=True)
t.specify_file("datasets/cifar-10-batches-py", "datasets/cifar-10-batches-py", WORK_QUEUE_INPUT, cache=True)
t.specify_file("resnet.py", "resnet.py", WORK_QUEUE_INPUT, cache=True)
t.specify_file("script.sh", "script.sh", WORK_QUEUE_INPUT, cache=True)
t.specify_file(outfile, outfile, WORK_QUEUE_OUTPUT, cache=False)
return t
def main():
try:
q = WorkQueue(port = WORK_QUEUE_DEFAULT_PORT)
except:
print("Instantiation of Work Queue failed.")
sys.exit(1)
print("Listening on port %d..." % q.port)
for i in range(1,21):
for j in range (1,21):
t = compose_task(i,j)
taskid = q.submit(t)
print("Submitted task (id# %d): %s" % (taskid, t.command))
print("waiting for tasks to complete...")
whitelist = []
blacklist = []
while not q.empty():
t = q.wait(5)
if t:
print("task (id# %d) complete: %s (return code %d)" % (t.id, t.command, t.return_status))
if t.return_status == 0:
if t.hostname not in whitelist:
whitelist.append(t.hostname)
if t.return_status != 0:
print("stdout:\n{}".format(t.output))
print("Blacklisting host: %s" % t.hostname)
q.blacklist(t.hostname)
blacklist.append(t.hostname)
q.submit(t)
print("Resubmitted task (id# %s): %s" % (t.id, t.command))
print("All tasks complete.")
print("Whitelist:", whitelist)
print("Blacklist:", blacklist)
sys.exit(0)
if __name__ == '__main__':
main()
| gpl-2.0 | 2,644,520,837,213,823,500 | 25.40625 | 109 | 0.640828 | false |
jhseu/tensorflow | tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/basic.py | 1 | 2547 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# RUN: %p/basic | FileCheck %s
# pylint: disable=missing-docstring,line-too-long
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.compat.v2 as tf
from tensorflow.compiler.mlir.tensorflow.tests.tf_saved_model import common
# Verify that the tf.versions attribute exists. It is difficult to enforce
# contents, since the version numbers change over time. The conversion logic
# itself is verified in the common graphdef converter, so here just assert
# it is being invoked.
# CHECK: module
# CHECK-SAME: tf.versions
# CHECK-SAME: bad_consumers
# CHECK-SAME: min_consumer
# CHECK-SAME: producer
class TestModule(tf.Module):
def __init__(self):
super(TestModule, self).__init__()
self.v42 = tf.Variable(42.0)
self.c43 = tf.constant(43.0)
# CHECK: "tf_saved_model.global_tensor"() {is_mutable, sym_name = "[[VAR:[a-zA-Z_0-9]+]]", tf_saved_model.exported_names = ["v42"], type = tensor<f32>, value = dense<4.200000e+01> : tensor<f32>} : () -> ()
# CHECK: "tf_saved_model.global_tensor"() {sym_name = "[[CONST:[a-zA-Z_0-9]+]]", tf_saved_model.exported_names = [], type = tensor<f32>, value = dense<4.300000e+01> : tensor<f32>} : () -> ()
# CHECK: func {{@[a-zA-Z_0-9]+}}(
# CHECK-SAME: %arg0: tensor<f32> {tf_saved_model.index_path = [0]},
# CHECK-SAME: %arg1: tensor<*x!tf.resource> {tf_saved_model.bound_input = @[[VAR]]},
# CHECK-SAME: %arg2: tensor<f32> {tf_saved_model.bound_input = @[[CONST]]}) -> (
# CHECK-SAME: tensor<f32> {tf_saved_model.index_path = []})
# CHECK-SAME: attributes {{.*}} tf_saved_model.exported_names = ["some_function"]
@tf.function(input_signature=[tf.TensorSpec([], tf.float32)])
def some_function(self, x):
return x + self.v42 + self.c43
if __name__ == '__main__':
common.do_test(TestModule)
| apache-2.0 | -4,427,873,526,579,521,500 | 42.169492 | 207 | 0.666667 | false |
anak10thn/graphics-dojo-qt5 | dragmove/dragmovecharm.py | 1 | 3229 | #############################################################################
##
## Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies).
## Contact: Qt Software Information ([email protected])
##
## This file is part of the Graphics Dojo project on Qt Labs.
##
## This file may be used under the terms of the GNU General Public
## License version 2.0 or 3.0 as published by the Free Software Foundation
## and appearing in the file LICENSE.GPL included in the packaging of
## this file. Please review the following information to ensure GNU
## General Public Licensing requirements will be met:
## http://www.fsf.org/licensing/licenses/info/GPLv2.html and
## http://www.gnu.org/copyleft/gpl.html.
##
## If you are unsure which license is appropriate for your use, please
## contact the sales department at [email protected].
##
## This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
## WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
##
#############################################################################
from PyQt4.QtCore import QEvent, QObject, QPoint, Qt
from PyQt4.QtGui import QMouseEvent, QWidget
class DragMoveData:
def __init__(self):
self.isMoving = False
self.startDrag = QPoint()
class DragMoveCharm(QObject):
def __init__(self, parent = None):
QObject.__init__(self, parent)
self.dragMoveData = {}
def activateOn(self, widget):
if widget in self.dragMoveData:
return
data = DragMoveData()
data.startDrag = QPoint(0, 0)
data.isMoving = False
self.dragMoveData[widget] = data
widget.installEventFilter(self)
def deactivateFrom(self, widget):
del self.dragMoveData[widget]
self.dragMoveData.remove(widget)
widget.removeEventFilter(self)
def eventFilter(self, object, event):
if not isinstance(object, QWidget):
return False
widget = object
type = event.type()
if type != QEvent.MouseButtonPress and \
type != QEvent.MouseButtonRelease and \
type != QEvent.MouseMove:
return False
if isinstance(event, QMouseEvent):
if event.modifiers() != Qt.NoModifier:
return False
button = event.button()
mouseEvent = event
try:
data = self.dragMoveData[widget]
except KeyError:
return False
consumed = False
if type == QEvent.MouseButtonPress and button == Qt.LeftButton:
data.startDrag = QPoint(mouseEvent.globalPos())
data.isMoving = True
event.accept()
consumed = True
if type == QEvent.MouseButtonRelease:
data.startDrag = QPoint(0, 0)
data.isMoving = False
if type == QEvent.MouseMove and data.isMoving:
pos = mouseEvent.globalPos()
widget.move(widget.pos() + pos - data.startDrag)
data.startDrag = QPoint(pos)
consumed = True
return consumed
| gpl-2.0 | -7,986,007,107,172,308,000 | 31.616162 | 77 | 0.580056 | false |
JackMorris/CaiusHallHelper | main.py | 1 | 1768 | import sys
from datetime import date, timedelta
from configuration import Configuration
from service import raven_service, email_service
def main():
configuration_file_path = sys.argv[1]
configuration = Configuration(configuration_file_path)
_authenticate_services(configuration)
_make_user_bookings(configuration.users, 3)
_send_user_reports(configuration.users, 0)
def _authenticate_services(configuration):
""" Use `configuration` to authenticate raven_service and email_service.
:param configuration: Configuration instance for system configuration
"""
raven_service.set_default_credentials(configuration.default_crsid,
configuration.default_password)
email_service.set_email_credentials(configuration.gmail_username,
configuration.gmail_password)
def _make_user_bookings(users, days_in_advance):
""" Create bookings for each user in `users`.
:param users: list of Users to create bookings for
:param days_in_advance: how far in advance to book
:return: list of Booking instances containing all booked events
"""
date_to_book = date.today() + timedelta(days=days_in_advance)
bookings = []
for user in users:
bookings.append(user.create_booking(date_to_book))
return bookings
def _send_user_reports(users, days_in_advance):
""" Send reports to each user in `users`.
:param users: list of User instances to send reports to
:param days_in_advance: how many days in advance the reports should be for
"""
date_for_report = date.today() + timedelta(days=days_in_advance)
for user in users:
user.email_report(date_for_report)
if __name__ == '__main__':
main() | mit | 4,981,933,257,555,847,000 | 35.102041 | 78 | 0.68948 | false |
pombredanne/pytype | pytype/tests/test_attributes.py | 1 | 2408 | """Test instance and class attributes."""
from pytype.tests import test_inference
class TestAttributes(test_inference.InferenceTest):
"""Tests for attributes."""
def testSimpleAttribute(self):
with self.Infer("""
class A(object):
def method1(self):
self.a = 3
def method2(self):
self.a = 3j
""", deep=True, solve_unknowns=False, extract_locals=True) as ty:
self.assertTypesMatchPytd(ty, """
class A:
a: complex or int
def method1(self) -> NoneType
def method2(self) -> NoneType
""")
def testOutsideAttributeAccess(self):
with self.Infer("""
class A(object):
pass
def f1():
A().a = 3
def f2():
A().a = 3j
""", deep=True, solve_unknowns=False, extract_locals=True) as ty:
self.assertTypesMatchPytd(ty, """
class A:
a: complex or int
def f1() -> NoneType
def f2() -> NoneType
""")
def testPrivate(self):
with self.Infer("""
class C(object):
def __init__(self):
self._x = 3
def foo(self):
return self._x
""", deep=True, solve_unknowns=False, extract_locals=True) as ty:
self.assertTypesMatchPytd(ty, """
class C:
_x: int
def foo(self) -> int
""")
def testPublic(self):
with self.Infer("""
class C(object):
def __init__(self):
self.x = 3
def foo(self):
return self.x
""", deep=True, solve_unknowns=False, extract_locals=True) as ty:
self.assertTypesMatchPytd(ty, """
class C:
x: int
def foo(self) -> int
""")
def testCrosswise(self):
with self.Infer("""
class A(object):
def __init__(self):
if id(self):
self.b = B()
def set_on_b(self):
self.b.x = 3
class B(object):
def __init__(self):
if id(self):
self.a = A()
def set_on_a(self):
self.a.x = 3j
""", deep=True, solve_unknowns=False, extract_locals=True) as ty:
self.assertTypesMatchPytd(ty, """
class A:
b: B
x: complex
def set_on_b(self) -> NoneType
class B:
a: A
x: int
def set_on_a(self) -> NoneType
""")
if __name__ == "__main__":
test_inference.main()
| apache-2.0 | 7,313,925,799,556,199,000 | 24.083333 | 69 | 0.50789 | false |
DolphinDream/sverchok | nodes/curve/nearest_point.py | 1 | 6929 | import numpy as np
import bpy
from bpy.props import FloatProperty, EnumProperty, BoolProperty, IntProperty
from mathutils import Matrix
from mathutils.kdtree import KDTree
import sverchok
from sverchok.node_tree import SverchCustomTreeNode
from sverchok.data_structure import updateNode, zip_long_repeat, ensure_nesting_level, get_data_nesting_level
from sverchok.utils.logging import info, exception
from sverchok.utils.curve import SvCurve
from sverchok.utils.dummy_nodes import add_dummy
from sverchok.dependencies import scipy
if scipy is None:
add_dummy('SvExNearestPointOnCurveNode', "Nearest Point on Curve", 'scipy')
else:
from scipy.optimize import minimize_scalar
def init_guess(curve, points_from, samples=50):
u_min, u_max = curve.get_u_bounds()
us = np.linspace(u_min, u_max, num=samples)
points = curve.evaluate_array(us).tolist()
#print("P:", points)
kdt = KDTree(len(us))
for i, v in enumerate(points):
kdt.insert(v, i)
kdt.balance()
us_out = []
nearest_out = []
for point_from in points_from:
nearest, i, distance = kdt.find(point_from)
us_out.append(us[i])
nearest_out.append(tuple(nearest))
return us_out, nearest_out
def goal(curve, point_from):
def distance(t):
dv = curve.evaluate(t) - np.array(point_from)
return np.linalg.norm(dv)
return distance
class SvExNearestPointOnCurveNode(bpy.types.Node, SverchCustomTreeNode):
"""
Triggers: Nearest Point on Curve
Tooltip: Find the point on the curve which is the nearest to the given point
"""
bl_idname = 'SvExNearestPointOnCurveNode'
bl_label = 'Nearest Point on Curve'
bl_icon = 'OUTLINER_OB_EMPTY'
sv_icon = 'SV_NEAREST_CURVE'
samples : IntProperty(
name = "Init Resolution",
default = 50,
min = 3,
update = updateNode)
precise : BoolProperty(
name = "Precise",
default = True,
update = updateNode)
solvers = [
('Brent', "Brent", "Uses inverse parabolic interpolation when possible to speed up convergence of golden section method", 0),
('Bounded', "Bounded", "Uses the Brent method to find a local minimum in the interval", 1),
('Golden', 'Golden Section', "Uses the golden section search technique", 2)
]
method : EnumProperty(
name = "Method",
description = "Solver method to use; select the one which works for your case",
items = solvers,
default = 'Brent',
update = updateNode)
def draw_buttons(self, context, layout):
layout.prop(self, 'samples')
layout.prop(self, 'precise', toggle=True)
def draw_buttons_ext(self, context, layout):
layout.prop(self, 'method')
def sv_init(self, context):
self.inputs.new('SvCurveSocket', "Curve")
p = self.inputs.new('SvVerticesSocket', "Point")
p.use_prop = True
p.default_property = (0.0, 0.0, 0.0)
self.outputs.new('SvVerticesSocket', "Point")
self.outputs.new('SvStringsSocket', "T")
def process(self):
if not any(socket.is_linked for socket in self.outputs):
return
curves_s = self.inputs['Curve'].sv_get()
curves_s = ensure_nesting_level(curves_s, 2, data_types=(SvCurve,))
src_point_s = self.inputs['Point'].sv_get()
src_point_s = ensure_nesting_level(src_point_s, 4)
points_out = []
t_out = []
for curves, src_points_i in zip_long_repeat(curves_s, src_point_s):
for curve, src_points in zip_long_repeat(curves, src_points_i):
t_min, t_max = curve.get_u_bounds()
new_t = []
new_points = []
init_ts, init_points = init_guess(curve, src_points,samples=self.samples)
#self.info("I: %s", init_points)
for src_point, init_t, init_point in zip(src_points, init_ts, init_points):
if self.precise:
delta_t = (t_max - t_min) / self.samples
self.debug("T_min %s, T_max %s, init_t %s, delta_t %s", t_min, t_max, init_t, delta_t)
if init_t <= t_min:
if init_t - delta_t >= t_min:
bracket = (init_t - delta_t, init_t, t_max)
else:
bracket = None # (t_min, t_min + delta_t, t_min + 2*delta_t)
elif init_t >= t_max:
if init_t + delta_t <= t_max:
bracket = (t_min, init_t, init_t + delta_t)
else:
bracket = None # (t_max - 2*delta_t, t_max - delta_t, t_max)
else:
bracket = (t_min, init_t, t_max)
result = minimize_scalar(goal(curve, src_point),
bounds = (t_min, t_max),
bracket = bracket,
method = self.method
)
if not result.success:
if hasattr(result, 'message'):
message = result.message
else:
message = repr(result)
raise Exception("Can't find the nearest point for {}: {}".format(src_point, message))
t0 = result.x
if t0 < t_min:
t0 = t_min
elif t0 > t_max:
t0 = t_max
else:
t0 = init_t
new_points.append(init_point)
new_t.append(t0)
if self.precise and self.outputs['Point'].is_linked:
new_points = curve.evaluate_array(np.array(new_t)).tolist()
points_out.append(new_points)
t_out.append(new_t)
self.outputs['Point'].sv_set(points_out)
self.outputs['T'].sv_set(t_out)
def register():
if scipy is not None:
bpy.utils.register_class(SvExNearestPointOnCurveNode)
def unregister():
if scipy is not None:
bpy.utils.unregister_class(SvExNearestPointOnCurveNode)
| gpl-3.0 | -8,756,187,733,861,882,000 | 39.758824 | 141 | 0.499928 | false |
lituan/tools | pisa/ccp4_pisa.py | 1 | 2552 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
run pisa to parse interfaces in PDBs
first install CCP4 and change the following variables
"""
import os
import sys
import subprocess
import cPickle as pickle
from multiprocessing import Pool
os.environ['CCP4_SCR'] = 'C:\\ccp4temp'
os.environ['CCP4I_TCLTK'] = 'C:\\CCP4-7\\TclTk84\\bin'
os.environ['CBIN'] = 'C:\\CCP4-7\\7.0\\bin'
os.environ['CLIB'] = 'C:\\CCP4-7\\lib'
os.environ['CLIBD'] = 'C:\\CCP4-7\\lib\\data'
os.environ['CEXAM'] = 'C:\\CCP4-7\\examples'
os.environ['CHTML'] = 'C:\\CCP4-7\\html'
os.environ['CINCL'] = 'C:\\CCP4-7\\include'
os.environ['CCP4I_TOP'] = 'C:\\CCP4-7\\share\\ccp4i'
os.environ['CLIBD_MON'] = 'C:\\CCP4-7\\lib\\data\\monomers\\'
os.environ['MMCIFDIC'] = 'C:\\CCP4-7\\lib\\ccp4\\cif_mmdic.lib'
os.environ['CRANK'] = 'C:\\CCP4-7\\share\\ccp4i\\crank'
os.environ['CCP4_OPEN'] = 'unknown'
os.environ['GFORTRAN_UNBUFFERED_PRECONNECTED'] = 'Y'
os.environ['PATH'] = 'C:\\CCP4-7\\7.0\\bin'
os.environ['PISA_CONF_FILE'] = 'C:\\CCP4-7\\7.0\\share\\pisa\\pisa.cfg'
def pisa(f):
if not os.path.exists('detail'):
os.makedirs('detail')
if not os.path.exists('interface_xml'):
os.makedirs('interface_xml')
if not os.path.exists('assemble_xml'):
os.makedirs('assemble_xml')
pdbid = f[-8:-4].lower()
subprocess.call(['pisa',pdbid,'-analyse',f])
interface_xml_fname = os.path.join('interface_xml',pdbid+'_inteface.xml')
assemble_xml_fname = os.path.join('assemble_xml',pdbid+'_assemble.xml')
subprocess.call(['pisa',pdbid,'-xml','interfaces','>',interface_xml_fname],shell=True)
subprocess.call(['pisa',pdbid,'-xml','assemblies','>',assemble_xml_fname],shell=True)
# output = subprocess.check_output(['pisa',pdbid,'-detail','interfaces',str(interface_num)],shell=True)
for interface_num in range(100,200):
try:
output = subprocess.check_output(['pisa',pdbid,'-detail','interfaces',str(interface_num)],shell=True)
detail_fname = os.path.join('detail',pdbid+'_'+str(interface_num)+'_detail.txt')
subprocess.call(['pisa',pdbid,'-detail','interfaces',str(interface_num),'>',detail_fname],shell=True)
except:
continue
def main():
parameters = []
for root,dirs,files in os.walk(sys.argv[-1]):
for f in files:
if f[-4:] == '.pdb' and len(f) == 8:
f = os.path.join(root,f)
parameters.append(f)
p = Pool(8)
p.map(pisa,parameters)
p.close()
if __name__ == "__main__":
main()
| cc0-1.0 | 6,509,973,869,992,154,000 | 34.943662 | 113 | 0.617947 | false |
scowcron/ImagesOfNetwork | images_of/entrypoints/audit_mods.py | 1 | 1450 | import click
from images_of import command, settings, Reddit
@command
@click.option('--print-mods', is_flag=True, help='List the non-default moderators for all subreddits')
def main(print_mods):
"""Find subs without mods and disenfranchised mods"""
mods = settings.DEFAULT_MODS
r = Reddit('Moderator Auditor v0.1')
r.oauth()
subs = sorted([sub['name'] for sub in settings.CHILD_SUBS])
empty_subs = list()
orphan_mods = dict()
s = r.get_subreddit(settings.PARENT_SUB)
main_sub_mods = [u.name for u in s.get_moderators()]
for sub in subs:
s = r.get_subreddit(sub)
cur_mods = [u.name for u in s.get_moderators()]
real_mods = [m for m in cur_mods if m not in mods]
if not real_mods:
empty_subs.append(sub)
else:
if print_mods:
print('{} : {}'.format(sub, real_mods))
for m in [i for i in real_mods if i not in main_sub_mods]:
orphan_mods[m] = orphan_mods.get(m, []) + [sub]
print()
print('Unmoderated Subreddits: {}'.format(len(empty_subs)))
print('-----------------------')
for sub in sorted(empty_subs):
print(sub)
print()
print('Orphaned Moderators: {}'.format(len(orphan_mods)))
print('-------------------------')
for m, s in orphan_mods.items():
print('{} : {}'.format(m, s))
if __name__ == '__main__':
main()
| mit | 2,044,205,861,254,028,000 | 26.884615 | 102 | 0.551034 | false |
ArcherSys/ArcherSys | node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py | 1 | 9950 | <<<<<<< HEAD
<<<<<<< HEAD
#!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Unit tests for the easy_xml.py file. """
import gyp.easy_xml as easy_xml
import unittest
import StringIO
class TestSequenceFunctions(unittest.TestCase):
def setUp(self):
self.stderr = StringIO.StringIO()
def test_EasyXml_simple(self):
self.assertEqual(
easy_xml.XmlToString(['test']),
'<?xml version="1.0" encoding="utf-8"?><test/>')
self.assertEqual(
easy_xml.XmlToString(['test'], encoding='Windows-1252'),
'<?xml version="1.0" encoding="Windows-1252"?><test/>')
def test_EasyXml_simple_with_attributes(self):
self.assertEqual(
easy_xml.XmlToString(['test2', {'a': 'value1', 'b': 'value2'}]),
'<?xml version="1.0" encoding="utf-8"?><test2 a="value1" b="value2"/>')
def test_EasyXml_escaping(self):
original = '<test>\'"\r&\nfoo'
converted = '<test>\'"
&
foo'
converted_apos = converted.replace("'", ''')
self.assertEqual(
easy_xml.XmlToString(['test3', {'a': original}, original]),
'<?xml version="1.0" encoding="utf-8"?><test3 a="%s">%s</test3>' %
(converted, converted_apos))
def test_EasyXml_pretty(self):
self.assertEqual(
easy_xml.XmlToString(
['test3',
['GrandParent',
['Parent1',
['Child']
],
['Parent2']
]
],
pretty=True),
'<?xml version="1.0" encoding="utf-8"?>\n'
'<test3>\n'
' <GrandParent>\n'
' <Parent1>\n'
' <Child/>\n'
' </Parent1>\n'
' <Parent2/>\n'
' </GrandParent>\n'
'</test3>\n')
def test_EasyXml_complex(self):
# We want to create:
target = (
'<?xml version="1.0" encoding="utf-8"?>'
'<Project>'
'<PropertyGroup Label="Globals">'
'<ProjectGuid>{D2250C20-3A94-4FB9-AF73-11BC5B73884B}</ProjectGuid>'
'<Keyword>Win32Proj</Keyword>'
'<RootNamespace>automated_ui_tests</RootNamespace>'
'</PropertyGroup>'
'<Import Project="$(VCTargetsPath)\\Microsoft.Cpp.props"/>'
'<PropertyGroup '
'Condition="\'$(Configuration)|$(Platform)\'=='
'\'Debug|Win32\'" Label="Configuration">'
'<ConfigurationType>Application</ConfigurationType>'
'<CharacterSet>Unicode</CharacterSet>'
'</PropertyGroup>'
'</Project>')
xml = easy_xml.XmlToString(
['Project',
['PropertyGroup', {'Label': 'Globals'},
['ProjectGuid', '{D2250C20-3A94-4FB9-AF73-11BC5B73884B}'],
['Keyword', 'Win32Proj'],
['RootNamespace', 'automated_ui_tests']
],
['Import', {'Project': '$(VCTargetsPath)\\Microsoft.Cpp.props'}],
['PropertyGroup',
{'Condition': "'$(Configuration)|$(Platform)'=='Debug|Win32'",
'Label': 'Configuration'},
['ConfigurationType', 'Application'],
['CharacterSet', 'Unicode']
]
])
self.assertEqual(xml, target)
if __name__ == '__main__':
unittest.main()
=======
#!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Unit tests for the easy_xml.py file. """
import gyp.easy_xml as easy_xml
import unittest
import StringIO
class TestSequenceFunctions(unittest.TestCase):
def setUp(self):
self.stderr = StringIO.StringIO()
def test_EasyXml_simple(self):
self.assertEqual(
easy_xml.XmlToString(['test']),
'<?xml version="1.0" encoding="utf-8"?><test/>')
self.assertEqual(
easy_xml.XmlToString(['test'], encoding='Windows-1252'),
'<?xml version="1.0" encoding="Windows-1252"?><test/>')
def test_EasyXml_simple_with_attributes(self):
self.assertEqual(
easy_xml.XmlToString(['test2', {'a': 'value1', 'b': 'value2'}]),
'<?xml version="1.0" encoding="utf-8"?><test2 a="value1" b="value2"/>')
def test_EasyXml_escaping(self):
original = '<test>\'"\r&\nfoo'
converted = '<test>\'"
&
foo'
converted_apos = converted.replace("'", ''')
self.assertEqual(
easy_xml.XmlToString(['test3', {'a': original}, original]),
'<?xml version="1.0" encoding="utf-8"?><test3 a="%s">%s</test3>' %
(converted, converted_apos))
def test_EasyXml_pretty(self):
self.assertEqual(
easy_xml.XmlToString(
['test3',
['GrandParent',
['Parent1',
['Child']
],
['Parent2']
]
],
pretty=True),
'<?xml version="1.0" encoding="utf-8"?>\n'
'<test3>\n'
' <GrandParent>\n'
' <Parent1>\n'
' <Child/>\n'
' </Parent1>\n'
' <Parent2/>\n'
' </GrandParent>\n'
'</test3>\n')
def test_EasyXml_complex(self):
# We want to create:
target = (
'<?xml version="1.0" encoding="utf-8"?>'
'<Project>'
'<PropertyGroup Label="Globals">'
'<ProjectGuid>{D2250C20-3A94-4FB9-AF73-11BC5B73884B}</ProjectGuid>'
'<Keyword>Win32Proj</Keyword>'
'<RootNamespace>automated_ui_tests</RootNamespace>'
'</PropertyGroup>'
'<Import Project="$(VCTargetsPath)\\Microsoft.Cpp.props"/>'
'<PropertyGroup '
'Condition="\'$(Configuration)|$(Platform)\'=='
'\'Debug|Win32\'" Label="Configuration">'
'<ConfigurationType>Application</ConfigurationType>'
'<CharacterSet>Unicode</CharacterSet>'
'</PropertyGroup>'
'</Project>')
xml = easy_xml.XmlToString(
['Project',
['PropertyGroup', {'Label': 'Globals'},
['ProjectGuid', '{D2250C20-3A94-4FB9-AF73-11BC5B73884B}'],
['Keyword', 'Win32Proj'],
['RootNamespace', 'automated_ui_tests']
],
['Import', {'Project': '$(VCTargetsPath)\\Microsoft.Cpp.props'}],
['PropertyGroup',
{'Condition': "'$(Configuration)|$(Platform)'=='Debug|Win32'",
'Label': 'Configuration'},
['ConfigurationType', 'Application'],
['CharacterSet', 'Unicode']
]
])
self.assertEqual(xml, target)
if __name__ == '__main__':
unittest.main()
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
=======
#!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Unit tests for the easy_xml.py file. """
import gyp.easy_xml as easy_xml
import unittest
import StringIO
class TestSequenceFunctions(unittest.TestCase):
def setUp(self):
self.stderr = StringIO.StringIO()
def test_EasyXml_simple(self):
self.assertEqual(
easy_xml.XmlToString(['test']),
'<?xml version="1.0" encoding="utf-8"?><test/>')
self.assertEqual(
easy_xml.XmlToString(['test'], encoding='Windows-1252'),
'<?xml version="1.0" encoding="Windows-1252"?><test/>')
def test_EasyXml_simple_with_attributes(self):
self.assertEqual(
easy_xml.XmlToString(['test2', {'a': 'value1', 'b': 'value2'}]),
'<?xml version="1.0" encoding="utf-8"?><test2 a="value1" b="value2"/>')
def test_EasyXml_escaping(self):
original = '<test>\'"\r&\nfoo'
converted = '<test>\'"
&
foo'
converted_apos = converted.replace("'", ''')
self.assertEqual(
easy_xml.XmlToString(['test3', {'a': original}, original]),
'<?xml version="1.0" encoding="utf-8"?><test3 a="%s">%s</test3>' %
(converted, converted_apos))
def test_EasyXml_pretty(self):
self.assertEqual(
easy_xml.XmlToString(
['test3',
['GrandParent',
['Parent1',
['Child']
],
['Parent2']
]
],
pretty=True),
'<?xml version="1.0" encoding="utf-8"?>\n'
'<test3>\n'
' <GrandParent>\n'
' <Parent1>\n'
' <Child/>\n'
' </Parent1>\n'
' <Parent2/>\n'
' </GrandParent>\n'
'</test3>\n')
def test_EasyXml_complex(self):
# We want to create:
target = (
'<?xml version="1.0" encoding="utf-8"?>'
'<Project>'
'<PropertyGroup Label="Globals">'
'<ProjectGuid>{D2250C20-3A94-4FB9-AF73-11BC5B73884B}</ProjectGuid>'
'<Keyword>Win32Proj</Keyword>'
'<RootNamespace>automated_ui_tests</RootNamespace>'
'</PropertyGroup>'
'<Import Project="$(VCTargetsPath)\\Microsoft.Cpp.props"/>'
'<PropertyGroup '
'Condition="\'$(Configuration)|$(Platform)\'=='
'\'Debug|Win32\'" Label="Configuration">'
'<ConfigurationType>Application</ConfigurationType>'
'<CharacterSet>Unicode</CharacterSet>'
'</PropertyGroup>'
'</Project>')
xml = easy_xml.XmlToString(
['Project',
['PropertyGroup', {'Label': 'Globals'},
['ProjectGuid', '{D2250C20-3A94-4FB9-AF73-11BC5B73884B}'],
['Keyword', 'Win32Proj'],
['RootNamespace', 'automated_ui_tests']
],
['Import', {'Project': '$(VCTargetsPath)\\Microsoft.Cpp.props'}],
['PropertyGroup',
{'Condition': "'$(Configuration)|$(Platform)'=='Debug|Win32'",
'Label': 'Configuration'},
['ConfigurationType', 'Application'],
['CharacterSet', 'Unicode']
]
])
self.assertEqual(xml, target)
if __name__ == '__main__':
unittest.main()
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
| mit | -8,514,130,946,126,359,000 | 30.587302 | 77 | 0.554874 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.