repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
gnmiller/craig-bot | craig-bot/lib/python3.6/site-packages/discord/ext/commands/context.py | 1 | 10694 | # -*- coding: utf-8 -*-
"""
The MIT License (MIT)
Copyright (c) 2015-2019 Rapptz
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
import discord.abc
import discord.utils
class Context(discord.abc.Messageable):
r"""Represents the context in which a command is being invoked under.
This class contains a lot of meta data to help you understand more about
the invocation context. This class is not created manually and is instead
passed around to commands as the first parameter.
This class implements the :class:`abc.Messageable` ABC.
Attributes
-----------
message: :class:`.Message`
The message that triggered the command being executed.
bot: :class:`.Bot`
The bot that contains the command being executed.
args: :class:`list`
The list of transformed arguments that were passed into the command.
If this is accessed during the :func:`on_command_error` event
then this list could be incomplete.
kwargs: :class:`dict`
A dictionary of transformed arguments that were passed into the command.
Similar to :attr:`args`\, if this is accessed in the
:func:`on_command_error` event then this dict could be incomplete.
prefix: :class:`str`
The prefix that was used to invoke the command.
command
The command (i.e. :class:`.Command` or its subclasses) that is being
invoked currently.
invoked_with: :class:`str`
The command name that triggered this invocation. Useful for finding out
which alias called the command.
invoked_subcommand
The subcommand (i.e. :class:`.Command` or its subclasses) that was
invoked. If no valid subcommand was invoked then this is equal to
`None`.
subcommand_passed: Optional[:class:`str`]
The string that was attempted to call a subcommand. This does not have
to point to a valid registered subcommand and could just point to a
nonsense string. If nothing was passed to attempt a call to a
subcommand then this is set to `None`.
command_failed: :class:`bool`
A boolean that indicates if the command failed to be parsed, checked,
or invoked.
"""
def __init__(self, **attrs):
self.message = attrs.pop('message', None)
self.bot = attrs.pop('bot', None)
self.args = attrs.pop('args', [])
self.kwargs = attrs.pop('kwargs', {})
self.prefix = attrs.pop('prefix')
self.command = attrs.pop('command', None)
self.view = attrs.pop('view', None)
self.invoked_with = attrs.pop('invoked_with', None)
self.invoked_subcommand = attrs.pop('invoked_subcommand', None)
self.subcommand_passed = attrs.pop('subcommand_passed', None)
self.command_failed = attrs.pop('command_failed', False)
self._state = self.message._state
async def invoke(self, *args, **kwargs):
r"""|coro|
Calls a command with the arguments given.
This is useful if you want to just call the callback that a
:class:`.Command` holds internally.
.. note::
This does not handle converters, checks, cooldowns, pre-invoke,
or after-invoke hooks in any matter. It calls the internal callback
directly as-if it was a regular function.
You must take care in passing the proper arguments when
using this function.
.. warning::
The first parameter passed **must** be the command being invoked.
Parameters
-----------
command: :class:`.Command`
A command or subclass of a command that is going to be called.
\*args
The arguments to to use.
\*\*kwargs
The keyword arguments to use.
"""
try:
command = args[0]
except IndexError:
raise TypeError('Missing command to invoke.') from None
arguments = []
if command.cog is not None:
arguments.append(command.cog)
arguments.append(self)
arguments.extend(args[1:])
ret = await command.callback(*arguments, **kwargs)
return ret
async def reinvoke(self, *, call_hooks=False, restart=True):
"""|coro|
Calls the command again.
This is similar to :meth:`~.Context.invoke` except that it bypasses
checks, cooldowns, and error handlers.
.. note::
If you want to bypass :exc:`.UserInputError` derived exceptions,
it is recommended to use the regular :meth:`~.Context.invoke`
as it will work more naturally. After all, this will end up
using the old arguments the user has used and will thus just
fail again.
Parameters
------------
call_hooks: :class:`bool`
Whether to call the before and after invoke hooks.
restart: :class:`bool`
Whether to start the call chain from the very beginning
or where we left off (i.e. the command that caused the error).
The default is to start where we left off.
"""
cmd = self.command
view = self.view
if cmd is None:
raise ValueError('This context is not valid.')
# some state to revert to when we're done
index, previous = view.index, view.previous
invoked_with = self.invoked_with
invoked_subcommand = self.invoked_subcommand
subcommand_passed = self.subcommand_passed
if restart:
to_call = cmd.root_parent or cmd
view.index = len(self.prefix)
view.previous = 0
view.get_word() # advance to get the root command
else:
to_call = cmd
try:
await to_call.reinvoke(self, call_hooks=call_hooks)
finally:
self.command = cmd
view.index = index
view.previous = previous
self.invoked_with = invoked_with
self.invoked_subcommand = invoked_subcommand
self.subcommand_passed = subcommand_passed
@property
def valid(self):
"""Checks if the invocation context is valid to be invoked with."""
return self.prefix is not None and self.command is not None
async def _get_channel(self):
return self.channel
@property
def cog(self):
"""Returns the cog associated with this context's command. None if it does not exist."""
if self.command is None:
return None
return self.command.cog
@discord.utils.cached_property
def guild(self):
"""Returns the guild associated with this context's command. None if not available."""
return self.message.guild
@discord.utils.cached_property
def channel(self):
"""Returns the channel associated with this context's command. Shorthand for :attr:`.Message.channel`."""
return self.message.channel
@discord.utils.cached_property
def author(self):
"""Returns the author associated with this context's command. Shorthand for :attr:`.Message.author`"""
return self.message.author
@discord.utils.cached_property
def me(self):
"""Similar to :attr:`.Guild.me` except it may return the :class:`.ClientUser` in private message contexts."""
return self.guild.me if self.guild is not None else self.bot.user
@property
def voice_client(self):
r"""Optional[:class:`.VoiceClient`]: A shortcut to :attr:`.Guild.voice_client`\, if applicable."""
g = self.guild
return g.voice_client if g else None
async def send_help(self, *args):
"""send_help(entity=<bot>)
|coro|
Shows the help command for the specified entity if given.
The entity can be a command or a cog.
If no entity is given, then it'll show help for the
entire bot.
If the entity is a string, then it looks up whether it's a
:class:`Cog` or a :class:`Command`.
.. note::
Due to the way this function works, instead of returning
something similar to :meth:`~.commands.HelpCommand.command_not_found`
this returns :class:`None` on bad input or no help command.
Parameters
------------
entity: Optional[Union[:class:`Command`, :class:`Cog`, :class:`str`]]
The entity to show help for.
Returns
--------
Any
The result of the help command, if any.
"""
from .core import Group, Command
bot = self.bot
cmd = bot.help_command
if cmd is None:
return None
cmd = cmd.copy()
cmd.context = self
if len(args) == 0:
await cmd.prepare_help_command(self, None)
mapping = cmd.get_bot_mapping()
return await cmd.send_bot_help(mapping)
entity = args[0]
if entity is None:
return None
if isinstance(entity, str):
entity = bot.get_cog(entity) or bot.get_command(entity)
try:
qualified_name = entity.qualified_name
except AttributeError:
# if we're here then it's not a cog, group, or command.
return None
await cmd.prepare_help_command(self, entity.qualified_name)
if hasattr(entity, '__cog_commands__'):
return await cmd.send_cog_help(entity)
elif isinstance(entity, Group):
return await cmd.send_group_help(entity)
elif isinstance(entity, Command):
return await cmd.send_command_help(entity)
else:
return None
| mit | -5,694,569,240,403,202,000 | 34.885906 | 117 | 0.628016 | false |
mihaipopa12/PythonTrees | RBTree.py | 1 | 11132 | #!/usr/bin/python3
from Dictionary import Dictionary
import unittest
import string
import random
class RBNode(object):
def __init__(self, colour, key, value=None, parent=None, left_son=None, right_son=None):
self.colour = colour
self.key = key
self.value = value
self.parent = parent
self.left_son = left_son
self.right_son = right_son
def set_black(self):
self.colour = True
def set_red(self):
self.colour = False
def grand_parent(self):
return self.parent.parent if self.parent is not None else None
def uncle(self):
grand_parent = self.grand_parent()
if grand_parent is None:
return None
if grand_parent.left_son == self.parent:
return grand_parent.right_son
else:
return grand_parent.left_son
def sibling(self):
return None if self.parent is None else (self.parent.left_son if self.parent.left_son != self else self.parent.right_son)
def rotate_right(self, root = None):
left_son = self.left_son
self.left_son = left_son.right_son
if self.left_son is not None:
self.left_son.parent = self
left_son.right_son = self
if self.parent is not None:
if self.parent.left_son == self:
self.parent.left_son = left_son
else:
self.parent.right_son = left_son
else:
root = left_son
left_son.parent = self.parent
self.parent = left_son
return root
def rotate_left(self, root=None):
right_son = self.right_son
self.right_son = right_son.left_son
if self.right_son is not None:
self.right_son.parent = self
right_son.left_son = self
if self.parent is not None:
if self.parent.left_son == self:
self.parent.left_son = right_son
else:
self.parent.right_son = right_son
else:
root = right_son
right_son.parent = self.parent
self.parent = right_son
return root
def detach(self):
parent = self.parent
if parent is not None:
if parent.left_son == self:
parent.left_son = None
else:
parent.right_son = None
@staticmethod
def is_black(node):
return node is None or node.colour
@staticmethod
def is_red(node):
return not RBNode.is_black(node)
class RBTree(Dictionary):
def __init__(self, root=None):
self._root = root
self._size = 0
def get_root(self):
return self._root
def _insert(self, node, parent, key, value):
if node is None:
new_node = RBNode(False, key, value, parent, None, None)
return new_node, new_node
if key == node.key:
node.value = value
return node, None
if key < node.key:
new_son, inserted_node = self._insert(node.left_son, node, key, value)
node.left_son = new_son
else:
new_son, inserted_node = self._insert(node.right_son, node, key, value)
node.right_son = new_son
return node, inserted_node
def _checkCase1(self, node):
if node.parent is None:
node.set_black()
else:
self._checkCase2(node)
def _checkCase2(self, node):
if RBNode.is_red(node.parent):
self._checkCase3(node)
def _checkCase3(self, node):
uncle = node.uncle()
if RBNode.is_red(uncle):
node.grand_parent().set_red()
node.parent.set_black()
node.uncle().set_black()
self._checkCase1(node.grand_parent())
else:
self._checkCase4(node)
def _checkCase4(self, node):
grand_parent = node.grand_parent()
if grand_parent.right_son == node.parent and node == node.parent.left_son:
self._root = node.parent.rotate_right(self._root)
self._checkCase5(node.right_son)
elif grand_parent.left_son == node.parent and node == node.parent.right_son:
self._root = node.parent.rotate_left(self._root)
self._checkCase5(node.left_son)
else:
self._checkCase5(node)
def _checkCase5(self, node):
parent = node.parent
grand_parent = node.grand_parent()
if grand_parent.left_son == parent and parent.left_son == node:
parent.set_black()
grand_parent.set_red()
self._root = grand_parent.rotate_right(self._root)
elif grand_parent.right_son == parent and parent.right_son == node:
parent.set_black()
grand_parent.set_red()
self._root = grand_parent.rotate_left(self._root)
else:
raise AssertionError
def insert(self, key, value=None):
self._root, inserted_node = self._insert(self._root, None, key, value)
if inserted_node is not None:
self._size += 1
self._checkCase1(inserted_node)
def _erase(self, node):
if node.left_son is not None and node.right_son is not None:
raise AssertionError
existing_son = node.left_son if node.left_son is not None else node.right_son
if existing_son is not None:
if RBNode.is_black(existing_son):
raise AssertionError
node.key, node.value = existing_son.key, existing_son.value
node.left_son = node.right_son = None
existing_son.detach()
else:
double_black_node = node
while double_black_node != self._root and RBNode.is_black(double_black_node):
parent = double_black_node.parent
sibling = double_black_node.sibling()
if double_black_node == double_black_node.parent.left_son:
if RBNode.is_red(sibling):
self._root = parent.rotate_left(self._root)
parent.set_red()
sibling.set_black()
elif RBNode.is_black(sibling.left_son) and RBNode.is_black(sibling.right_son):
sibling.set_red()
if RBNode.is_black(parent):
double_black_node = parent
else:
parent.set_black()
double_black_node = self._root
else:
if RBNode.is_black(sibling.right_son):
self._root = sibling.rotate_right(self._root)
parent.right_son.set_black()
parent.right_son.right_son.set_red()
parent = double_black_node.parent
sibling = double_black_node.sibling()
sibling.colour = parent.colour
parent.set_black()
if sibling.right_son is not None:
sibling.right_son.set_black()
self._root = parent.rotate_left(self._root)
double_black_node = self._root
else:
if RBNode.is_red(sibling):
self._root = parent.rotate_right(self._root)
parent.set_red()
sibling.set_black()
elif RBNode.is_black(sibling.right_son) and RBNode.is_black(sibling.left_son):
sibling.set_red()
if RBNode.is_black(parent):
double_black_node = parent
else:
parent.set_black()
double_black_node = self._root
else:
if RBNode.is_black(sibling.left_son):
self._root = sibling.rotate_left(self._root)
parent.left_son.set_black()
parent.left_son.left_son.set_red()
parent = double_black_node.parent
sibling = double_black_node.sibling()
sibling.colour = parent.colour
parent.set_black()
if sibling.left_son is not None:
sibling.left_son.set_black()
self._root = parent.rotate_right(self._root)
double_black_node = self._root
node.detach()
def erase(self, key):
node_to_be_erased = Dictionary._find(self._root, key)[0]
if node_to_be_erased is None:
return
self._size -= 1
replacing_node = Dictionary._get_left_most(node_to_be_erased.right_son)
if replacing_node is None:
replacing_node = node_to_be_erased
node_to_be_erased.key, node_to_be_erased.value = replacing_node.key, replacing_node.value
if replacing_node != self._root:
self._erase(replacing_node)
else:
# Delete the root
self._root = None
# Queries
def size(self):
return self._size
########################## Testing
class TestRBTreeOperations(unittest.TestCase):
def setUp(self):
self.rbtree = RBTree()
# populate the rbtree
self.number_of_insertions = 10000
for i in range(self.number_of_insertions):
key = random.randint(1, 1000000)
value = ''.join(random.choice(string.ascii_lowercase + string.digits) for _ in range(50))
self.rbtree.insert(key, value)
@staticmethod
def is_sorted(L):
return all(L[i] <= L[i+1] for i in range(len(L)-1))
@staticmethod
def check_rbtree(node):
if node is None:
return True, 0
check_left_son, height_left_son = TestRBTreeOperations.check_rbtree(node.left_son)
check_right_son, height_right_son = TestRBTreeOperations.check_rbtree(node.right_son)
height_left_son += RBNode.is_black(node.left_son)
height_right_son += RBNode.is_black(node.right_son)
return (check_left_son and check_right_son and
(not RBNode.is_red(node) or not RBNode.is_red(node.parent)) and
height_left_son == height_right_son), height_left_son
def test_insert(self):
# the items list must be sorted by keys
self.assertTrue(TestRBTreeOperations.is_sorted(self.rbtree.keys()))
# the treap must have the apropiate structure
self.assertTrue(TestRBTreeOperations.check_rbtree(self.rbtree._root)[0])
def test_erase(self):
number_of_deletions = 50
for i in range(number_of_deletions):
random_key = random.choice(self.rbtree.keys())
self.rbtree.erase(random_key)
self.assertTrue(TestRBTreeOperations.check_rbtree(self.rbtree._root)[0])
if __name__ == "__main__":
unittest.main() | mit | 5,675,649,446,607,458,000 | 32.035608 | 129 | 0.537819 | false |
zfrenchee/pandas | pandas/tests/io/json/test_ujson.py | 1 | 56098 | # -*- coding: utf-8 -*-
try:
import json
except ImportError:
import simplejson as json
import math
import pytz
import pytest
import time
import datetime
import calendar
import re
import decimal
import dateutil
from functools import partial
from pandas.compat import range, zip, StringIO, u
import pandas._libs.json as ujson
import pandas.compat as compat
import numpy as np
from pandas import DataFrame, Series, Index, NaT, DatetimeIndex
import pandas.util.testing as tm
json_unicode = (json.dumps if compat.PY3
else partial(json.dumps, encoding="utf-8"))
class TestUltraJSONTests(object):
@pytest.mark.skipif(compat.is_platform_32bit(),
reason="not compliant on 32-bit, xref #15865")
def test_encodeDecimal(self):
sut = decimal.Decimal("1337.1337")
encoded = ujson.encode(sut, double_precision=15)
decoded = ujson.decode(encoded)
assert decoded == 1337.1337
sut = decimal.Decimal("0.95")
encoded = ujson.encode(sut, double_precision=1)
assert encoded == "1.0"
decoded = ujson.decode(encoded)
assert decoded == 1.0
sut = decimal.Decimal("0.94")
encoded = ujson.encode(sut, double_precision=1)
assert encoded == "0.9"
decoded = ujson.decode(encoded)
assert decoded == 0.9
sut = decimal.Decimal("1.95")
encoded = ujson.encode(sut, double_precision=1)
assert encoded == "2.0"
decoded = ujson.decode(encoded)
assert decoded == 2.0
sut = decimal.Decimal("-1.95")
encoded = ujson.encode(sut, double_precision=1)
assert encoded == "-2.0"
decoded = ujson.decode(encoded)
assert decoded == -2.0
sut = decimal.Decimal("0.995")
encoded = ujson.encode(sut, double_precision=2)
assert encoded == "1.0"
decoded = ujson.decode(encoded)
assert decoded == 1.0
sut = decimal.Decimal("0.9995")
encoded = ujson.encode(sut, double_precision=3)
assert encoded == "1.0"
decoded = ujson.decode(encoded)
assert decoded == 1.0
sut = decimal.Decimal("0.99999999999999944")
encoded = ujson.encode(sut, double_precision=15)
assert encoded == "1.0"
decoded = ujson.decode(encoded)
assert decoded == 1.0
def test_encodeStringConversion(self):
input = "A string \\ / \b \f \n \r \t </script> &"
not_html_encoded = ('"A string \\\\ \\/ \\b \\f \\n '
'\\r \\t <\\/script> &"')
html_encoded = ('"A string \\\\ \\/ \\b \\f \\n \\r \\t '
'\\u003c\\/script\\u003e \\u0026"')
def helper(expected_output, **encode_kwargs):
output = ujson.encode(input, **encode_kwargs)
assert input == json.loads(output)
assert output == expected_output
assert input == ujson.decode(output)
# Default behavior assumes encode_html_chars=False.
helper(not_html_encoded, ensure_ascii=True)
helper(not_html_encoded, ensure_ascii=False)
# Make sure explicit encode_html_chars=False works.
helper(not_html_encoded, ensure_ascii=True, encode_html_chars=False)
helper(not_html_encoded, ensure_ascii=False, encode_html_chars=False)
# Make sure explicit encode_html_chars=True does the encoding.
helper(html_encoded, ensure_ascii=True, encode_html_chars=True)
helper(html_encoded, ensure_ascii=False, encode_html_chars=True)
def test_doubleLongIssue(self):
sut = {u('a'): -4342969734183514}
encoded = json.dumps(sut)
decoded = json.loads(encoded)
assert sut == decoded
encoded = ujson.encode(sut, double_precision=15)
decoded = ujson.decode(encoded)
assert sut == decoded
def test_doubleLongDecimalIssue(self):
sut = {u('a'): -12345678901234.56789012}
encoded = json.dumps(sut)
decoded = json.loads(encoded)
assert sut == decoded
encoded = ujson.encode(sut, double_precision=15)
decoded = ujson.decode(encoded)
assert sut == decoded
def test_encodeNonCLocale(self):
import locale
savedlocale = locale.getlocale(locale.LC_NUMERIC)
try:
locale.setlocale(locale.LC_NUMERIC, 'it_IT.UTF-8')
except:
try:
locale.setlocale(locale.LC_NUMERIC, 'Italian_Italy')
except:
pytest.skip('Could not set locale for testing')
assert ujson.loads(ujson.dumps(4.78e60)) == 4.78e60
assert ujson.loads('4.78', precise_float=True) == 4.78
locale.setlocale(locale.LC_NUMERIC, savedlocale)
def test_encodeDecodeLongDecimal(self):
sut = {u('a'): -528656961.4399388}
encoded = ujson.dumps(sut, double_precision=15)
ujson.decode(encoded)
def test_decimalDecodeTestPrecise(self):
sut = {u('a'): 4.56}
encoded = ujson.encode(sut)
decoded = ujson.decode(encoded, precise_float=True)
assert sut == decoded
@pytest.mark.skipif(compat.is_platform_windows() and not compat.PY3,
reason="buggy on win-64 for py2")
def test_encodeDoubleTinyExponential(self):
num = 1e-40
assert num == ujson.decode(ujson.encode(num))
num = 1e-100
assert num == ujson.decode(ujson.encode(num))
num = -1e-45
assert num == ujson.decode(ujson.encode(num))
num = -1e-145
assert np.allclose(num, ujson.decode(ujson.encode(num)))
def test_encodeDictWithUnicodeKeys(self):
input = {u("key1"): u("value1"), u("key1"):
u("value1"), u("key1"): u("value1"),
u("key1"): u("value1"), u("key1"):
u("value1"), u("key1"): u("value1")}
output = ujson.encode(input)
input = {u("بن"): u("value1"), u("بن"): u("value1"),
u("بن"): u("value1"), u("بن"): u("value1"),
u("بن"): u("value1"), u("بن"): u("value1"),
u("بن"): u("value1")}
output = ujson.encode(input) # noqa
def test_encodeDoubleConversion(self):
input = math.pi
output = ujson.encode(input)
assert round(input, 5) == round(json.loads(output), 5)
assert round(input, 5) == round(ujson.decode(output), 5)
def test_encodeWithDecimal(self):
input = 1.0
output = ujson.encode(input)
assert output == "1.0"
def test_encodeDoubleNegConversion(self):
input = -math.pi
output = ujson.encode(input)
assert round(input, 5) == round(json.loads(output), 5)
assert round(input, 5) == round(ujson.decode(output), 5)
def test_encodeArrayOfNestedArrays(self):
input = [[[[]]]] * 20
output = ujson.encode(input)
assert input == json.loads(output)
# assert output == json.dumps(input)
assert input == ujson.decode(output)
input = np.array(input)
tm.assert_numpy_array_equal(input, ujson.decode(
output, numpy=True, dtype=input.dtype))
def test_encodeArrayOfDoubles(self):
input = [31337.31337, 31337.31337, 31337.31337, 31337.31337] * 10
output = ujson.encode(input)
assert input == json.loads(output)
# assert output == json.dumps(input)
assert input == ujson.decode(output)
tm.assert_numpy_array_equal(
np.array(input), ujson.decode(output, numpy=True))
def test_doublePrecisionTest(self):
input = 30.012345678901234
output = ujson.encode(input, double_precision=15)
assert input == json.loads(output)
assert input == ujson.decode(output)
output = ujson.encode(input, double_precision=9)
assert round(input, 9) == json.loads(output)
assert round(input, 9) == ujson.decode(output)
output = ujson.encode(input, double_precision=3)
assert round(input, 3) == json.loads(output)
assert round(input, 3) == ujson.decode(output)
def test_invalidDoublePrecision(self):
input = 30.12345678901234567890
pytest.raises(ValueError, ujson.encode, input, double_precision=20)
pytest.raises(ValueError, ujson.encode, input, double_precision=-1)
# will throw typeError
pytest.raises(TypeError, ujson.encode, input, double_precision='9')
# will throw typeError
pytest.raises(TypeError, ujson.encode,
input, double_precision=None)
def test_encodeStringConversion2(self):
input = "A string \\ / \b \f \n \r \t"
output = ujson.encode(input)
assert input == json.loads(output)
assert output == '"A string \\\\ \\/ \\b \\f \\n \\r \\t"'
assert input == ujson.decode(output)
pass
def test_decodeUnicodeConversion(self):
pass
def test_encodeUnicodeConversion1(self):
input = "Räksmörgås اسامة بن محمد بن عوض بن لادن"
enc = ujson.encode(input)
dec = ujson.decode(enc)
assert enc == json_unicode(input)
assert dec == json.loads(enc)
def test_encodeControlEscaping(self):
input = "\x19"
enc = ujson.encode(input)
dec = ujson.decode(enc)
assert input == dec
assert enc == json_unicode(input)
def test_encodeUnicodeConversion2(self):
input = "\xe6\x97\xa5\xd1\x88"
enc = ujson.encode(input)
dec = ujson.decode(enc)
assert enc == json_unicode(input)
assert dec == json.loads(enc)
def test_encodeUnicodeSurrogatePair(self):
input = "\xf0\x90\x8d\x86"
enc = ujson.encode(input)
dec = ujson.decode(enc)
assert enc == json_unicode(input)
assert dec == json.loads(enc)
def test_encodeUnicode4BytesUTF8(self):
input = "\xf0\x91\x80\xb0TRAILINGNORMAL"
enc = ujson.encode(input)
dec = ujson.decode(enc)
assert enc == json_unicode(input)
assert dec == json.loads(enc)
def test_encodeUnicode4BytesUTF8Highest(self):
input = "\xf3\xbf\xbf\xbfTRAILINGNORMAL"
enc = ujson.encode(input)
dec = ujson.decode(enc)
assert enc == json_unicode(input)
assert dec == json.loads(enc)
def test_encodeArrayInArray(self):
input = [[[[]]]]
output = ujson.encode(input)
assert input == json.loads(output)
assert output == json.dumps(input)
assert input == ujson.decode(output)
tm.assert_numpy_array_equal(
np.array(input), ujson.decode(output, numpy=True))
pass
def test_encodeIntConversion(self):
input = 31337
output = ujson.encode(input)
assert input == json.loads(output)
assert output == json.dumps(input)
assert input == ujson.decode(output)
pass
def test_encodeIntNegConversion(self):
input = -31337
output = ujson.encode(input)
assert input == json.loads(output)
assert output == json.dumps(input)
assert input == ujson.decode(output)
pass
def test_encodeLongNegConversion(self):
input = -9223372036854775808
output = ujson.encode(input)
assert input == json.loads(output)
assert output == json.dumps(input)
assert input == ujson.decode(output)
def test_encodeListConversion(self):
input = [1, 2, 3, 4]
output = ujson.encode(input)
assert input == json.loads(output)
assert input == ujson.decode(output)
tm.assert_numpy_array_equal(
np.array(input), ujson.decode(output, numpy=True))
pass
def test_encodeDictConversion(self):
input = {"k1": 1, "k2": 2, "k3": 3, "k4": 4}
output = ujson.encode(input) # noqa
assert input == json.loads(output)
assert input == ujson.decode(output)
assert input == ujson.decode(output)
pass
def test_encodeNoneConversion(self):
input = None
output = ujson.encode(input)
assert input == json.loads(output)
assert output == json.dumps(input)
assert input == ujson.decode(output)
pass
def test_encodeTrueConversion(self):
input = True
output = ujson.encode(input)
assert input == json.loads(output)
assert output == json.dumps(input)
assert input == ujson.decode(output)
pass
def test_encodeFalseConversion(self):
input = False
output = ujson.encode(input)
assert input == json.loads(output)
assert output == json.dumps(input)
assert input == ujson.decode(output)
def test_encodeDatetimeConversion(self):
ts = time.time()
input = datetime.datetime.fromtimestamp(ts)
output = ujson.encode(input, date_unit='s')
expected = calendar.timegm(input.utctimetuple())
assert int(expected) == json.loads(output)
assert int(expected) == ujson.decode(output)
def test_encodeDateConversion(self):
ts = time.time()
input = datetime.date.fromtimestamp(ts)
output = ujson.encode(input, date_unit='s')
tup = (input.year, input.month, input.day, 0, 0, 0)
expected = calendar.timegm(tup)
assert int(expected) == json.loads(output)
assert int(expected) == ujson.decode(output)
def test_encodeTimeConversion(self):
tests = [
datetime.time(),
datetime.time(1, 2, 3),
datetime.time(10, 12, 15, 343243),
]
for test in tests:
output = ujson.encode(test)
expected = '"{iso}"'.format(iso=test.isoformat())
assert expected == output
def test_encodeTimeConversion_pytz(self):
# see gh-11473: to_json segfaults with timezone-aware datetimes
test = datetime.time(10, 12, 15, 343243, pytz.utc)
output = ujson.encode(test)
expected = '"{iso}"'.format(iso=test.isoformat())
assert expected == output
def test_encodeTimeConversion_dateutil(self):
# see gh-11473: to_json segfaults with timezone-aware datetimes
test = datetime.time(10, 12, 15, 343243, dateutil.tz.tzutc())
output = ujson.encode(test)
expected = '"{iso}"'.format(iso=test.isoformat())
assert expected == output
def test_nat(self):
input = NaT
assert ujson.encode(input) == 'null', "Expected null"
def test_npy_nat(self):
from distutils.version import LooseVersion
if LooseVersion(np.__version__) < LooseVersion('1.7.0'):
pytest.skip("numpy version < 1.7.0, is "
"{0}".format(np.__version__))
input = np.datetime64('NaT')
assert ujson.encode(input) == 'null', "Expected null"
def test_datetime_units(self):
from pandas._libs.lib import Timestamp
val = datetime.datetime(2013, 8, 17, 21, 17, 12, 215504)
stamp = Timestamp(val)
roundtrip = ujson.decode(ujson.encode(val, date_unit='s'))
assert roundtrip == stamp.value // 10**9
roundtrip = ujson.decode(ujson.encode(val, date_unit='ms'))
assert roundtrip == stamp.value // 10**6
roundtrip = ujson.decode(ujson.encode(val, date_unit='us'))
assert roundtrip == stamp.value // 10**3
roundtrip = ujson.decode(ujson.encode(val, date_unit='ns'))
assert roundtrip == stamp.value
pytest.raises(ValueError, ujson.encode, val, date_unit='foo')
def test_encodeToUTF8(self):
input = "\xe6\x97\xa5\xd1\x88"
enc = ujson.encode(input, ensure_ascii=False)
dec = ujson.decode(enc)
assert enc == json_unicode(input, ensure_ascii=False)
assert dec == json.loads(enc)
def test_decodeFromUnicode(self):
input = u("{\"obj\": 31337}")
dec1 = ujson.decode(input)
dec2 = ujson.decode(str(input))
assert dec1 == dec2
def test_encodeRecursionMax(self):
# 8 is the max recursion depth
class O2:
member = 0
pass
class O1:
member = 0
pass
input = O1()
input.member = O2()
input.member.member = input
try:
output = ujson.encode(input) # noqa
assert False, "Expected overflow exception"
except(OverflowError):
pass
def test_encodeDoubleNan(self):
input = np.nan
assert ujson.encode(input) == 'null', "Expected null"
def test_encodeDoubleInf(self):
input = np.inf
assert ujson.encode(input) == 'null', "Expected null"
def test_encodeDoubleNegInf(self):
input = -np.inf
assert ujson.encode(input) == 'null', "Expected null"
def test_decodeJibberish(self):
input = "fdsa sda v9sa fdsa"
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
return
assert False, "Wrong exception"
def test_decodeBrokenArrayStart(self):
input = "["
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
return
assert False, "Wrong exception"
def test_decodeBrokenObjectStart(self):
input = "{"
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
return
assert False, "Wrong exception"
def test_decodeBrokenArrayEnd(self):
input = "]"
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
return
assert False, "Wrong exception"
def test_decodeArrayDepthTooBig(self):
input = '[' * (1024 * 1024)
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
return
assert False, "Wrong exception"
def test_decodeBrokenObjectEnd(self):
input = "}"
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
return
assert False, "Wrong exception"
def test_decodeObjectDepthTooBig(self):
input = '{' * (1024 * 1024)
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
return
assert False, "Wrong exception"
def test_decodeStringUnterminated(self):
input = "\"TESTING"
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
return
assert False, "Wrong exception"
def test_decodeStringUntermEscapeSequence(self):
input = "\"TESTING\\\""
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
return
assert False, "Wrong exception"
def test_decodeStringBadEscape(self):
input = "\"TESTING\\\""
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
return
assert False, "Wrong exception"
def test_decodeTrueBroken(self):
input = "tru"
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
return
assert False, "Wrong exception"
def test_decodeFalseBroken(self):
input = "fa"
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
return
assert False, "Wrong exception"
def test_decodeNullBroken(self):
input = "n"
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
return
assert False, "Wrong exception"
def test_decodeBrokenDictKeyTypeLeakTest(self):
input = '{{1337:""}}'
for x in range(1000):
try:
ujson.decode(input)
assert False, "Expected exception!"
except ValueError:
continue
assert False, "Wrong exception"
def test_decodeBrokenDictLeakTest(self):
input = '{{"key":"}'
for x in range(1000):
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
continue
assert False, "Wrong exception"
def test_decodeBrokenListLeakTest(self):
input = '[[[true'
for x in range(1000):
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
continue
assert False, "Wrong exception"
def test_decodeDictWithNoKey(self):
input = "{{{{31337}}}}"
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
return
assert False, "Wrong exception"
def test_decodeDictWithNoColonOrValue(self):
input = "{{{{\"key\"}}}}"
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
return
assert False, "Wrong exception"
def test_decodeDictWithNoValue(self):
input = "{{{{\"key\":}}}}"
try:
ujson.decode(input)
assert False, "Expected exception!"
except(ValueError):
return
assert False, "Wrong exception"
def test_decodeNumericIntPos(self):
input = "31337"
assert 31337 == ujson.decode(input)
def test_decodeNumericIntNeg(self):
input = "-31337"
assert -31337 == ujson.decode(input)
@pytest.mark.skipif(compat.PY3, reason="only PY2")
def test_encodeUnicode4BytesUTF8Fail(self):
input = "\xfd\xbf\xbf\xbf\xbf\xbf"
try:
enc = ujson.encode(input) # noqa
assert False, "Expected exception"
except OverflowError:
pass
def test_encodeNullCharacter(self):
input = "31337 \x00 1337"
output = ujson.encode(input)
assert input == json.loads(output)
assert output == json.dumps(input)
assert input == ujson.decode(output)
input = "\x00"
output = ujson.encode(input)
assert input == json.loads(output)
assert output == json.dumps(input)
assert input == ujson.decode(output)
assert '" \\u0000\\r\\n "' == ujson.dumps(u(" \u0000\r\n "))
pass
def test_decodeNullCharacter(self):
input = "\"31337 \\u0000 31337\""
assert ujson.decode(input) == json.loads(input)
def test_encodeListLongConversion(self):
input = [9223372036854775807, 9223372036854775807, 9223372036854775807,
9223372036854775807, 9223372036854775807, 9223372036854775807]
output = ujson.encode(input)
assert input == json.loads(output)
assert input == ujson.decode(output)
tm.assert_numpy_array_equal(np.array(input),
ujson.decode(output, numpy=True,
dtype=np.int64))
pass
def test_encodeLongConversion(self):
input = 9223372036854775807
output = ujson.encode(input)
assert input == json.loads(output)
assert output == json.dumps(input)
assert input == ujson.decode(output)
pass
def test_numericIntExp(self):
input = "1337E40"
output = ujson.decode(input)
assert output == json.loads(input)
def test_numericIntFrcExp(self):
input = "1.337E40"
output = ujson.decode(input)
tm.assert_almost_equal(output, json.loads(input))
def test_decodeNumericIntExpEPLUS(self):
input = "1337E+9"
output = ujson.decode(input)
tm.assert_almost_equal(output, json.loads(input))
def test_decodeNumericIntExpePLUS(self):
input = "1.337e+40"
output = ujson.decode(input)
tm.assert_almost_equal(output, json.loads(input))
def test_decodeNumericIntExpE(self):
input = "1337E40"
output = ujson.decode(input)
tm.assert_almost_equal(output, json.loads(input))
def test_decodeNumericIntExpe(self):
input = "1337e40"
output = ujson.decode(input)
tm.assert_almost_equal(output, json.loads(input))
def test_decodeNumericIntExpEMinus(self):
input = "1.337E-4"
output = ujson.decode(input)
tm.assert_almost_equal(output, json.loads(input))
def test_decodeNumericIntExpeMinus(self):
input = "1.337e-4"
output = ujson.decode(input)
tm.assert_almost_equal(output, json.loads(input))
def test_dumpToFile(self):
f = StringIO()
ujson.dump([1, 2, 3], f)
assert "[1,2,3]" == f.getvalue()
def test_dumpToFileLikeObject(self):
class filelike:
def __init__(self):
self.bytes = ''
def write(self, bytes):
self.bytes += bytes
f = filelike()
ujson.dump([1, 2, 3], f)
assert "[1,2,3]" == f.bytes
def test_dumpFileArgsError(self):
try:
ujson.dump([], '')
except TypeError:
pass
else:
assert False, 'expected TypeError'
def test_loadFile(self):
f = StringIO("[1,2,3,4]")
assert [1, 2, 3, 4] == ujson.load(f)
f = StringIO("[1,2,3,4]")
tm.assert_numpy_array_equal(
np.array([1, 2, 3, 4]), ujson.load(f, numpy=True))
def test_loadFileLikeObject(self):
class filelike:
def read(self):
try:
self.end
except AttributeError:
self.end = True
return "[1,2,3,4]"
f = filelike()
assert [1, 2, 3, 4] == ujson.load(f)
f = filelike()
tm.assert_numpy_array_equal(
np.array([1, 2, 3, 4]), ujson.load(f, numpy=True))
def test_loadFileArgsError(self):
try:
ujson.load("[]")
except TypeError:
pass
else:
assert False, "expected TypeError"
def test_version(self):
assert re.match(r'^\d+\.\d+(\.\d+)?$', ujson.__version__), \
"ujson.__version__ must be a string like '1.4.0'"
def test_encodeNumericOverflow(self):
try:
ujson.encode(12839128391289382193812939)
except OverflowError:
pass
else:
assert False, "expected OverflowError"
def test_encodeNumericOverflowNested(self):
for n in range(0, 100):
class Nested:
x = 12839128391289382193812939
nested = Nested()
try:
ujson.encode(nested)
except OverflowError:
pass
else:
assert False, "expected OverflowError"
def test_decodeNumberWith32bitSignBit(self):
# Test that numbers that fit within 32 bits but would have the
# sign bit set (2**31 <= x < 2**32) are decoded properly.
boundary1 = 2**31 # noqa
boundary2 = 2**32 # noqa
docs = (
'{"id": 3590016419}',
'{{"id": {low}}}'.format(low=2**31),
'{{"id": {high}}}'.format(high=2**32),
'{{"id": {one_less}}}'.format(one_less=(2**32) - 1),
)
results = (3590016419, 2**31, 2**32, 2**32 - 1)
for doc, result in zip(docs, results):
assert ujson.decode(doc)['id'] == result
def test_encodeBigEscape(self):
for x in range(10):
if compat.PY3:
base = '\u00e5'.encode('utf-8')
else:
base = "\xc3\xa5"
input = base * 1024 * 1024 * 2
output = ujson.encode(input) # noqa
def test_decodeBigEscape(self):
for x in range(10):
if compat.PY3:
base = '\u00e5'.encode('utf-8')
else:
base = "\xc3\xa5"
quote = compat.str_to_bytes("\"")
input = quote + (base * 1024 * 1024 * 2) + quote
output = ujson.decode(input) # noqa
def test_toDict(self):
d = {u("key"): 31337}
class DictTest:
def toDict(self):
return d
o = DictTest()
output = ujson.encode(o)
dec = ujson.decode(output)
assert dec == d
def test_defaultHandler(self):
class _TestObject(object):
def __init__(self, val):
self.val = val
@property
def recursive_attr(self):
return _TestObject("recursive_attr")
def __str__(self):
return str(self.val)
pytest.raises(OverflowError, ujson.encode, _TestObject("foo"))
assert '"foo"' == ujson.encode(_TestObject("foo"),
default_handler=str)
def my_handler(obj):
return "foobar"
assert '"foobar"' == ujson.encode(_TestObject("foo"),
default_handler=my_handler)
def my_handler_raises(obj):
raise TypeError("I raise for anything")
with tm.assert_raises_regex(TypeError, "I raise for anything"):
ujson.encode(_TestObject("foo"), default_handler=my_handler_raises)
def my_int_handler(obj):
return 42
assert ujson.decode(ujson.encode(
_TestObject("foo"), default_handler=my_int_handler)) == 42
def my_obj_handler(obj):
return datetime.datetime(2013, 2, 3)
assert (ujson.decode(ujson.encode(datetime.datetime(2013, 2, 3))) ==
ujson.decode(ujson.encode(_TestObject("foo"),
default_handler=my_obj_handler)))
l = [_TestObject("foo"), _TestObject("bar")]
assert (json.loads(json.dumps(l, default=str)) ==
ujson.decode(ujson.encode(l, default_handler=str)))
class TestNumpyJSONTests(object):
def test_Bool(self):
b = np.bool(True)
assert ujson.decode(ujson.encode(b)) == b
def test_BoolArray(self):
inpt = np.array([True, False, True, True, False, True, False, False],
dtype=np.bool)
outp = np.array(ujson.decode(ujson.encode(inpt)), dtype=np.bool)
tm.assert_numpy_array_equal(inpt, outp)
def test_Int(self):
num = np.int(2562010)
assert np.int(ujson.decode(ujson.encode(num))) == num
num = np.int8(127)
assert np.int8(ujson.decode(ujson.encode(num))) == num
num = np.int16(2562010)
assert np.int16(ujson.decode(ujson.encode(num))) == num
num = np.int32(2562010)
assert np.int32(ujson.decode(ujson.encode(num))) == num
num = np.int64(2562010)
assert np.int64(ujson.decode(ujson.encode(num))) == num
num = np.uint8(255)
assert np.uint8(ujson.decode(ujson.encode(num))) == num
num = np.uint16(2562010)
assert np.uint16(ujson.decode(ujson.encode(num))) == num
num = np.uint32(2562010)
assert np.uint32(ujson.decode(ujson.encode(num))) == num
num = np.uint64(2562010)
assert np.uint64(ujson.decode(ujson.encode(num))) == num
def test_IntArray(self):
arr = np.arange(100, dtype=np.int)
dtypes = (np.int, np.int8, np.int16, np.int32, np.int64,
np.uint, np.uint8, np.uint16, np.uint32, np.uint64)
for dtype in dtypes:
inpt = arr.astype(dtype)
outp = np.array(ujson.decode(ujson.encode(inpt)), dtype=dtype)
tm.assert_numpy_array_equal(inpt, outp)
def test_IntMax(self):
num = np.int(np.iinfo(np.int).max)
assert np.int(ujson.decode(ujson.encode(num))) == num
num = np.int8(np.iinfo(np.int8).max)
assert np.int8(ujson.decode(ujson.encode(num))) == num
num = np.int16(np.iinfo(np.int16).max)
assert np.int16(ujson.decode(ujson.encode(num))) == num
num = np.int32(np.iinfo(np.int32).max)
assert np.int32(ujson.decode(ujson.encode(num))) == num
num = np.uint8(np.iinfo(np.uint8).max)
assert np.uint8(ujson.decode(ujson.encode(num))) == num
num = np.uint16(np.iinfo(np.uint16).max)
assert np.uint16(ujson.decode(ujson.encode(num))) == num
num = np.uint32(np.iinfo(np.uint32).max)
assert np.uint32(ujson.decode(ujson.encode(num))) == num
if not compat.is_platform_32bit():
num = np.int64(np.iinfo(np.int64).max)
assert np.int64(ujson.decode(ujson.encode(num))) == num
# uint64 max will always overflow as it's encoded to signed
num = np.uint64(np.iinfo(np.int64).max)
assert np.uint64(ujson.decode(ujson.encode(num))) == num
def test_Float(self):
num = np.float(256.2013)
assert np.float(ujson.decode(ujson.encode(num))) == num
num = np.float32(256.2013)
assert np.float32(ujson.decode(ujson.encode(num))) == num
num = np.float64(256.2013)
assert np.float64(ujson.decode(ujson.encode(num))) == num
def test_FloatArray(self):
arr = np.arange(12.5, 185.72, 1.7322, dtype=np.float)
dtypes = (np.float, np.float32, np.float64)
for dtype in dtypes:
inpt = arr.astype(dtype)
outp = np.array(ujson.decode(ujson.encode(
inpt, double_precision=15)), dtype=dtype)
tm.assert_almost_equal(inpt, outp)
def test_FloatMax(self):
num = np.float(np.finfo(np.float).max / 10)
tm.assert_almost_equal(np.float(ujson.decode(
ujson.encode(num, double_precision=15))), num, 15)
num = np.float32(np.finfo(np.float32).max / 10)
tm.assert_almost_equal(np.float32(ujson.decode(
ujson.encode(num, double_precision=15))), num, 15)
num = np.float64(np.finfo(np.float64).max / 10)
tm.assert_almost_equal(np.float64(ujson.decode(
ujson.encode(num, double_precision=15))), num, 15)
def test_Arrays(self):
arr = np.arange(100)
arr = arr.reshape((10, 10))
tm.assert_numpy_array_equal(
np.array(ujson.decode(ujson.encode(arr))), arr)
tm.assert_numpy_array_equal(ujson.decode(
ujson.encode(arr), numpy=True), arr)
arr = arr.reshape((5, 5, 4))
tm.assert_numpy_array_equal(
np.array(ujson.decode(ujson.encode(arr))), arr)
tm.assert_numpy_array_equal(ujson.decode(
ujson.encode(arr), numpy=True), arr)
arr = arr.reshape((100, 1))
tm.assert_numpy_array_equal(
np.array(ujson.decode(ujson.encode(arr))), arr)
tm.assert_numpy_array_equal(ujson.decode(
ujson.encode(arr), numpy=True), arr)
arr = np.arange(96)
arr = arr.reshape((2, 2, 2, 2, 3, 2))
tm.assert_numpy_array_equal(
np.array(ujson.decode(ujson.encode(arr))), arr)
tm.assert_numpy_array_equal(ujson.decode(
ujson.encode(arr), numpy=True), arr)
l = ['a', list(), dict(), dict(), list(),
42, 97.8, ['a', 'b'], {'key': 'val'}]
arr = np.array(l)
tm.assert_numpy_array_equal(
np.array(ujson.decode(ujson.encode(arr))), arr)
arr = np.arange(100.202, 200.202, 1, dtype=np.float32)
arr = arr.reshape((5, 5, 4))
outp = np.array(ujson.decode(ujson.encode(arr)), dtype=np.float32)
tm.assert_almost_equal(arr, outp)
outp = ujson.decode(ujson.encode(arr), numpy=True, dtype=np.float32)
tm.assert_almost_equal(arr, outp)
def test_OdArray(self):
def will_raise():
ujson.encode(np.array(1))
pytest.raises(TypeError, will_raise)
def test_ArrayNumpyExcept(self):
input = ujson.dumps([42, {}, 'a'])
try:
ujson.decode(input, numpy=True)
assert False, "Expected exception!"
except(TypeError):
pass
except:
assert False, "Wrong exception"
input = ujson.dumps(['a', 'b', [], 'c'])
try:
ujson.decode(input, numpy=True)
assert False, "Expected exception!"
except(ValueError):
pass
except:
assert False, "Wrong exception"
input = ujson.dumps([['a'], 42])
try:
ujson.decode(input, numpy=True)
assert False, "Expected exception!"
except(ValueError):
pass
except:
assert False, "Wrong exception"
input = ujson.dumps([42, ['a'], 42])
try:
ujson.decode(input, numpy=True)
assert False, "Expected exception!"
except(ValueError):
pass
except:
assert False, "Wrong exception"
input = ujson.dumps([{}, []])
try:
ujson.decode(input, numpy=True)
assert False, "Expected exception!"
except(ValueError):
pass
except:
assert False, "Wrong exception"
input = ujson.dumps([42, None])
try:
ujson.decode(input, numpy=True)
assert False, "Expected exception!"
except(TypeError):
pass
except:
assert False, "Wrong exception"
input = ujson.dumps([{'a': 'b'}])
try:
ujson.decode(input, numpy=True, labelled=True)
assert False, "Expected exception!"
except(ValueError):
pass
except:
assert False, "Wrong exception"
input = ujson.dumps({'a': {'b': {'c': 42}}})
try:
ujson.decode(input, numpy=True, labelled=True)
assert False, "Expected exception!"
except(ValueError):
pass
except:
assert False, "Wrong exception"
input = ujson.dumps([{'a': 42, 'b': 23}, {'c': 17}])
try:
ujson.decode(input, numpy=True, labelled=True)
assert False, "Expected exception!"
except(ValueError):
pass
except:
assert False, "Wrong exception"
def test_ArrayNumpyLabelled(self):
input = {'a': []}
output = ujson.loads(ujson.dumps(input), numpy=True, labelled=True)
assert (np.empty((1, 0)) == output[0]).all()
assert (np.array(['a']) == output[1]).all()
assert output[2] is None
input = [{'a': 42}]
output = ujson.loads(ujson.dumps(input), numpy=True, labelled=True)
assert (np.array([42]) == output[0]).all()
assert output[1] is None
assert (np.array([u('a')]) == output[2]).all()
# Write out the dump explicitly so there is no dependency on iteration
# order GH10837
input_dumps = ('[{"a": 42, "b":31}, {"a": 24, "c": 99}, '
'{"a": 2.4, "b": 78}]')
output = ujson.loads(input_dumps, numpy=True, labelled=True)
expectedvals = np.array(
[42, 31, 24, 99, 2.4, 78], dtype=int).reshape((3, 2))
assert (expectedvals == output[0]).all()
assert output[1] is None
assert (np.array([u('a'), 'b']) == output[2]).all()
input_dumps = ('{"1": {"a": 42, "b":31}, "2": {"a": 24, "c": 99}, '
'"3": {"a": 2.4, "b": 78}}')
output = ujson.loads(input_dumps, numpy=True, labelled=True)
expectedvals = np.array(
[42, 31, 24, 99, 2.4, 78], dtype=int).reshape((3, 2))
assert (expectedvals == output[0]).all()
assert (np.array(['1', '2', '3']) == output[1]).all()
assert (np.array(['a', 'b']) == output[2]).all()
class TestPandasJSONTests(object):
def test_DataFrame(self):
df = DataFrame([[1, 2, 3], [4, 5, 6]], index=[
'a', 'b'], columns=['x', 'y', 'z'])
# column indexed
outp = DataFrame(ujson.decode(ujson.encode(df)))
assert (df == outp).values.all()
tm.assert_index_equal(df.columns, outp.columns)
tm.assert_index_equal(df.index, outp.index)
dec = _clean_dict(ujson.decode(ujson.encode(df, orient="split")))
outp = DataFrame(**dec)
assert (df == outp).values.all()
tm.assert_index_equal(df.columns, outp.columns)
tm.assert_index_equal(df.index, outp.index)
outp = DataFrame(ujson.decode(ujson.encode(df, orient="records")))
outp.index = df.index
assert (df == outp).values.all()
tm.assert_index_equal(df.columns, outp.columns)
outp = DataFrame(ujson.decode(ujson.encode(df, orient="values")))
outp.index = df.index
assert (df.values == outp.values).all()
outp = DataFrame(ujson.decode(ujson.encode(df, orient="index")))
assert (df.transpose() == outp).values.all()
tm.assert_index_equal(df.transpose().columns, outp.columns)
tm.assert_index_equal(df.transpose().index, outp.index)
def test_DataFrameNumpy(self):
df = DataFrame([[1, 2, 3], [4, 5, 6]], index=[
'a', 'b'], columns=['x', 'y', 'z'])
# column indexed
outp = DataFrame(ujson.decode(ujson.encode(df), numpy=True))
assert (df == outp).values.all()
tm.assert_index_equal(df.columns, outp.columns)
tm.assert_index_equal(df.index, outp.index)
dec = _clean_dict(ujson.decode(ujson.encode(df, orient="split"),
numpy=True))
outp = DataFrame(**dec)
assert (df == outp).values.all()
tm.assert_index_equal(df.columns, outp.columns)
tm.assert_index_equal(df.index, outp.index)
outp = DataFrame(ujson.decode(ujson.encode(df, orient="index"),
numpy=True))
assert (df.transpose() == outp).values.all()
tm.assert_index_equal(df.transpose().columns, outp.columns)
tm.assert_index_equal(df.transpose().index, outp.index)
def test_DataFrameNested(self):
df = DataFrame([[1, 2, 3], [4, 5, 6]], index=[
'a', 'b'], columns=['x', 'y', 'z'])
nested = {'df1': df, 'df2': df.copy()}
exp = {'df1': ujson.decode(ujson.encode(df)),
'df2': ujson.decode(ujson.encode(df))}
assert ujson.decode(ujson.encode(nested)) == exp
exp = {'df1': ujson.decode(ujson.encode(df, orient="index")),
'df2': ujson.decode(ujson.encode(df, orient="index"))}
assert ujson.decode(ujson.encode(nested, orient="index")) == exp
exp = {'df1': ujson.decode(ujson.encode(df, orient="records")),
'df2': ujson.decode(ujson.encode(df, orient="records"))}
assert ujson.decode(ujson.encode(nested, orient="records")) == exp
exp = {'df1': ujson.decode(ujson.encode(df, orient="values")),
'df2': ujson.decode(ujson.encode(df, orient="values"))}
assert ujson.decode(ujson.encode(nested, orient="values")) == exp
exp = {'df1': ujson.decode(ujson.encode(df, orient="split")),
'df2': ujson.decode(ujson.encode(df, orient="split"))}
assert ujson.decode(ujson.encode(nested, orient="split")) == exp
def test_DataFrameNumpyLabelled(self):
df = DataFrame([[1, 2, 3], [4, 5, 6]], index=[
'a', 'b'], columns=['x', 'y', 'z'])
# column indexed
outp = DataFrame(*ujson.decode(ujson.encode(df),
numpy=True, labelled=True))
assert (df.T == outp).values.all()
tm.assert_index_equal(df.T.columns, outp.columns)
tm.assert_index_equal(df.T.index, outp.index)
outp = DataFrame(*ujson.decode(ujson.encode(df, orient="records"),
numpy=True, labelled=True))
outp.index = df.index
assert (df == outp).values.all()
tm.assert_index_equal(df.columns, outp.columns)
outp = DataFrame(*ujson.decode(ujson.encode(df, orient="index"),
numpy=True, labelled=True))
assert (df == outp).values.all()
tm.assert_index_equal(df.columns, outp.columns)
tm.assert_index_equal(df.index, outp.index)
def test_Series(self):
s = Series([10, 20, 30, 40, 50, 60], name="series",
index=[6, 7, 8, 9, 10, 15]).sort_values()
# column indexed
outp = Series(ujson.decode(ujson.encode(s))).sort_values()
exp = Series([10, 20, 30, 40, 50, 60],
index=['6', '7', '8', '9', '10', '15'])
tm.assert_series_equal(outp, exp)
outp = Series(ujson.decode(ujson.encode(s), numpy=True)).sort_values()
tm.assert_series_equal(outp, exp)
dec = _clean_dict(ujson.decode(ujson.encode(s, orient="split")))
outp = Series(**dec)
tm.assert_series_equal(outp, s)
dec = _clean_dict(ujson.decode(ujson.encode(s, orient="split"),
numpy=True))
outp = Series(**dec)
exp_np = Series(np.array([10, 20, 30, 40, 50, 60]))
exp_pd = Series([10, 20, 30, 40, 50, 60])
outp = Series(ujson.decode(ujson.encode(s, orient="records"),
numpy=True))
tm.assert_series_equal(outp, exp_np)
outp = Series(ujson.decode(ujson.encode(s, orient="records")))
exp = Series([10, 20, 30, 40, 50, 60])
tm.assert_series_equal(outp, exp_pd)
outp = Series(ujson.decode(ujson.encode(s, orient="values"),
numpy=True))
tm.assert_series_equal(outp, exp_np)
outp = Series(ujson.decode(ujson.encode(s, orient="values")))
tm.assert_series_equal(outp, exp_pd)
outp = Series(ujson.decode(ujson.encode(
s, orient="index"))).sort_values()
exp = Series([10, 20, 30, 40, 50, 60],
index=['6', '7', '8', '9', '10', '15'])
tm.assert_series_equal(outp, exp)
outp = Series(ujson.decode(ujson.encode(
s, orient="index"), numpy=True)).sort_values()
tm.assert_series_equal(outp, exp)
def test_SeriesNested(self):
s = Series([10, 20, 30, 40, 50, 60], name="series",
index=[6, 7, 8, 9, 10, 15]).sort_values()
nested = {'s1': s, 's2': s.copy()}
exp = {'s1': ujson.decode(ujson.encode(s)),
's2': ujson.decode(ujson.encode(s))}
assert ujson.decode(ujson.encode(nested)) == exp
exp = {'s1': ujson.decode(ujson.encode(s, orient="split")),
's2': ujson.decode(ujson.encode(s, orient="split"))}
assert ujson.decode(ujson.encode(nested, orient="split")) == exp
exp = {'s1': ujson.decode(ujson.encode(s, orient="records")),
's2': ujson.decode(ujson.encode(s, orient="records"))}
assert ujson.decode(ujson.encode(nested, orient="records")) == exp
exp = {'s1': ujson.decode(ujson.encode(s, orient="values")),
's2': ujson.decode(ujson.encode(s, orient="values"))}
assert ujson.decode(ujson.encode(nested, orient="values")) == exp
exp = {'s1': ujson.decode(ujson.encode(s, orient="index")),
's2': ujson.decode(ujson.encode(s, orient="index"))}
assert ujson.decode(ujson.encode(nested, orient="index")) == exp
def test_Index(self):
i = Index([23, 45, 18, 98, 43, 11], name="index")
# column indexed
outp = Index(ujson.decode(ujson.encode(i)), name='index')
tm.assert_index_equal(i, outp)
outp = Index(ujson.decode(ujson.encode(i), numpy=True), name='index')
tm.assert_index_equal(i, outp)
dec = _clean_dict(ujson.decode(ujson.encode(i, orient="split")))
outp = Index(**dec)
tm.assert_index_equal(i, outp)
assert i.name == outp.name
dec = _clean_dict(ujson.decode(ujson.encode(i, orient="split"),
numpy=True))
outp = Index(**dec)
tm.assert_index_equal(i, outp)
assert i.name == outp.name
outp = Index(ujson.decode(ujson.encode(i, orient="values")),
name='index')
tm.assert_index_equal(i, outp)
outp = Index(ujson.decode(ujson.encode(i, orient="values"),
numpy=True), name='index')
tm.assert_index_equal(i, outp)
outp = Index(ujson.decode(ujson.encode(i, orient="records")),
name='index')
tm.assert_index_equal(i, outp)
outp = Index(ujson.decode(ujson.encode(i, orient="records"),
numpy=True), name='index')
tm.assert_index_equal(i, outp)
outp = Index(ujson.decode(ujson.encode(i, orient="index")),
name='index')
tm.assert_index_equal(i, outp)
outp = Index(ujson.decode(ujson.encode(i, orient="index"),
numpy=True), name='index')
tm.assert_index_equal(i, outp)
def test_datetimeindex(self):
from pandas.core.indexes.datetimes import date_range
rng = date_range('1/1/2000', periods=20)
encoded = ujson.encode(rng, date_unit='ns')
decoded = DatetimeIndex(np.array(ujson.decode(encoded)))
tm.assert_index_equal(rng, decoded)
ts = Series(np.random.randn(len(rng)), index=rng)
decoded = Series(ujson.decode(ujson.encode(ts, date_unit='ns')))
idx_values = decoded.index.values.astype(np.int64)
decoded.index = DatetimeIndex(idx_values)
tm.assert_series_equal(ts, decoded)
def test_decodeArrayTrailingCommaFail(self):
input = "[31337,]"
try:
ujson.decode(input)
except ValueError:
pass
else:
assert False, "expected ValueError"
def test_decodeArrayLeadingCommaFail(self):
input = "[,31337]"
try:
ujson.decode(input)
except ValueError:
pass
else:
assert False, "expected ValueError"
def test_decodeArrayOnlyCommaFail(self):
input = "[,]"
try:
ujson.decode(input)
except ValueError:
pass
else:
assert False, "expected ValueError"
def test_decodeArrayUnmatchedBracketFail(self):
input = "[]]"
try:
ujson.decode(input)
except ValueError:
pass
else:
assert False, "expected ValueError"
def test_decodeArrayEmpty(self):
input = "[]"
ujson.decode(input)
def test_decodeArrayOneItem(self):
input = "[31337]"
ujson.decode(input)
def test_decodeBigValue(self):
input = "9223372036854775807"
ujson.decode(input)
def test_decodeSmallValue(self):
input = "-9223372036854775808"
ujson.decode(input)
def test_decodeTooBigValue(self):
try:
input = "9223372036854775808"
ujson.decode(input)
except ValueError:
pass
else:
assert False, "expected ValueError"
def test_decodeTooSmallValue(self):
try:
input = "-90223372036854775809"
ujson.decode(input)
except ValueError:
pass
else:
assert False, "expected ValueError"
def test_decodeVeryTooBigValue(self):
try:
input = "9223372036854775808"
ujson.decode(input)
except ValueError:
pass
else:
assert False, "expected ValueError"
def test_decodeVeryTooSmallValue(self):
try:
input = "-90223372036854775809"
ujson.decode(input)
except ValueError:
pass
else:
assert False, "expected ValueError"
def test_decodeWithTrailingWhitespaces(self):
input = "{}\n\t "
ujson.decode(input)
def test_decodeWithTrailingNonWhitespaces(self):
try:
input = "{}\n\t a"
ujson.decode(input)
except ValueError:
pass
else:
assert False, "expected ValueError"
def test_decodeArrayWithBigInt(self):
try:
ujson.loads('[18446098363113800555]')
except ValueError:
pass
else:
assert False, "expected ValueError"
def test_decodeArrayFaultyUnicode(self):
try:
ujson.loads('[18446098363113800555]')
except ValueError:
pass
else:
assert False, "expected ValueError"
def test_decodeFloatingPointAdditionalTests(self):
places = 15
tm.assert_almost_equal(-1.1234567893,
ujson.loads("-1.1234567893"),
check_less_precise=places)
tm.assert_almost_equal(-1.234567893,
ujson.loads("-1.234567893"),
check_less_precise=places)
tm.assert_almost_equal(-1.34567893,
ujson.loads("-1.34567893"),
check_less_precise=places)
tm.assert_almost_equal(-1.4567893,
ujson.loads("-1.4567893"),
check_less_precise=places)
tm.assert_almost_equal(-1.567893,
ujson.loads("-1.567893"),
check_less_precise=places)
tm.assert_almost_equal(-1.67893,
ujson.loads("-1.67893"),
check_less_precise=places)
tm.assert_almost_equal(-1.7893, ujson.loads("-1.7893"),
check_less_precise=places)
tm.assert_almost_equal(-1.893, ujson.loads("-1.893"),
check_less_precise=places)
tm.assert_almost_equal(-1.3, ujson.loads("-1.3"),
check_less_precise=places)
tm.assert_almost_equal(1.1234567893, ujson.loads(
"1.1234567893"), check_less_precise=places)
tm.assert_almost_equal(1.234567893, ujson.loads(
"1.234567893"), check_less_precise=places)
tm.assert_almost_equal(
1.34567893, ujson.loads("1.34567893"), check_less_precise=places)
tm.assert_almost_equal(
1.4567893, ujson.loads("1.4567893"), check_less_precise=places)
tm.assert_almost_equal(
1.567893, ujson.loads("1.567893"), check_less_precise=places)
tm.assert_almost_equal(1.67893, ujson.loads("1.67893"),
check_less_precise=places)
tm.assert_almost_equal(1.7893, ujson.loads("1.7893"),
check_less_precise=places)
tm.assert_almost_equal(1.893, ujson.loads("1.893"),
check_less_precise=places)
tm.assert_almost_equal(1.3, ujson.loads("1.3"),
check_less_precise=places)
def test_encodeBigSet(self):
s = set()
for x in range(0, 100000):
s.add(x)
ujson.encode(s)
def test_encodeEmptySet(self):
s = set()
assert "[]" == ujson.encode(s)
def test_encodeSet(self):
s = set([1, 2, 3, 4, 5, 6, 7, 8, 9])
enc = ujson.encode(s)
dec = ujson.decode(enc)
for v in dec:
assert v in s
def _clean_dict(d):
return {str(k): v for k, v in compat.iteritems(d)}
| bsd-3-clause | 3,455,317,893,202,532,000 | 33.057716 | 79 | 0.557984 | false |
seanbell/opensurfaces | server/bsdfs/experiments.py | 1 | 9415 | import json
from decimal import Decimal
from collections import Counter
from django.conf import settings
from django.db.models import Q
from django.contrib.contenttypes.models import ContentType
from django.contrib.auth.models import User
from accounts.models import UserProfile
from shapes.models import Shape, MaterialShape
from bsdfs.models import EnvironmentMap, ShapeBsdfLabel_wd, ShapeBsdfQuality
def configure_experiments():
""" This function is automatically called by
the command ./manage.py mtconfigure """
# must be imported locally to avoid a circular import
from mturk.utils import configure_experiment
# aliases
sandbox = settings.MTURK_SANDBOX
production = not sandbox
# set up envmaps
envmap = EnvironmentMap.objects.get_or_create(
user=User.objects.get_or_create(
username='admin')[0].get_profile(),
name='ennis')
for envmap in EnvironmentMap.objects.all():
configure_experiment(
slug='bsdf_wd',
template_dir='bsdfs/experiments',
module='bsdfs.experiments',
examples_group_attr='shape',
variant={'envmap_id': envmap.id},
version=1, # 2: intrinsic images, 1: original opensurfaces
reward=Decimal('0.10'),
num_outputs_max=1,
contents_per_hit=10,
max_active_hits=2000,
content_type_model=MaterialShape,
out_content_type_model=ShapeBsdfLabel_wd,
out_content_attr='shape',
content_filter={
#'synthetic': True,
#'synthetic_slug__in': ['teapot', 'teacup', 'spoon', 'coyote'],
'invalid': False,
'pixel_area__gt': Shape.MIN_PIXEL_AREA,
'num_vertices__gte': 10,
'correct': True,
'substance__isnull': False,
'substance__fail': False,
'photo__whitebalanced': True,
'photo__scene_category_correct': True,
},
title='Adjust a blob to match an image',
description='Looking at an image, your goal is to adjust the appearance '
'of a blob so that it matches a target photograph. A modern '
'browser is required.',
keywords='material,appearance,image,picture,classify,BRDF,microfacet,blob,appearance',
frame_height=1150,
requirements={},
#qualifications='{ "bsdf_match": 1 }',
auto_add_hits=False, # settings.MTURK_SANDBOX,
)
for attr in ('color', 'gloss'):
content_filter = {
'invalid': False,
'shape__invalid': False,
'give_up': False,
#'shape__pixel_area__gt': Shape.MIN_PIXEL_AREA,
#'shape__correct': True,
#'shape__substance__isnull': False,
#'shape__substance__fail': False,
#'shape__photo__whitebalanced': True,
#'shape__photo__scene_category_correct': True,
}
if production and attr == 'gloss':
content_filter['color_correct'] = True
configure_experiment(
slug='quality_bsdf_%s' % attr,
template_dir='bsdfs/experiments',
module='bsdfs.experiments',
examples_group_attr='shape',
variant={'bsdf_version': 'wd'},
version=1, # 2: intrinsic images, 1: original opensurfaces
reward=Decimal('0.04'),
num_outputs_max=5,
contents_per_hit=40,
content_type_model=ShapeBsdfLabel_wd,
out_content_type_model=ShapeBsdfQuality,
out_content_attr='shapebsdflabel_wd',
content_filter=content_filter,
title='Click on blobs that match an image (%s)' % attr,
description='This task involves clicking on images that match a blob next to the image.',
keywords='material,substance,shape,image,picture,classify,label,blob,match,appearance',
#frame_height=7500,
requirements={},
auto_add_hits=False, # settings.MTURK_SANDBOX,
)
def update_votes_cubam(show_progress=False):
""" This function is automatically called by
mturk.tasks.mturk_update_votes_cubam_task """
from mturk.cubam import update_votes_cubam
changed_objects = []
for bsdf_version in ('wd',):
bsdf_ct = ContentType.objects.get(
app_label="bsdfs", model="shapebsdflabel_%s" % bsdf_version)
bsdf_model = bsdf_ct.model_class()
# gloss
changed_objects += update_votes_cubam(
bsdf_model, ShapeBsdfQuality.objects.filter(
invalid=False, content_type=bsdf_ct,
gloss_correct__isnull=False),
'object_id', 'gloss_correct', 'gloss_correct',
score_threshold=0, min_votes=5,
show_progress=show_progress,
return_changed_objects=True,
experiment_filter={
'slug': 'quality_bsdf_gloss',
'variant': json.dumps({'bsdf_version': bsdf_version}),
}
)
# color
changed_objects += update_votes_cubam(
bsdf_model, ShapeBsdfQuality.objects.filter(
invalid=False, content_type=bsdf_ct,
color_correct__isnull=False),
'object_id', 'color_correct', 'color_correct',
score_threshold=0, min_votes=5,
show_progress=show_progress,
return_changed_objects=True,
experiment_filter={
'slug': 'quality_bsdf_color',
'variant': json.dumps({'bsdf_version': bsdf_version}),
}
)
return changed_objects
def update_changed_objects(changed_objects):
""" This function is automatically called by
mturk.tasks.mturk_update_votes_cubam_task
with all objects that were changed by new votes. """
pass
def external_task_extra_context(slug, context):
""" Add extra context for each task (called by
``mturk.views.external.external_task_GET``) """
if slug.startswith('bsdf'):
context['html_yes'] = 'blob matches'
context['html_no'] = 'blob does not match'
elif slug.startswith('quality_bsdf_color'):
context['html_yes'] = 'color matches'
context['html_no'] = 'color does not match'
elif slug.startswith('quality_bsdf_gloss'):
context['html_yes'] = 'gloss matches'
context['html_no'] = 'gloss does not match'
def configure_qualifications():
from mturk.models import MtQualification, MtQualificationAssignment
from mturk.utils import get_or_create_mturk_worker
#
# BSDF matching
bsdfmatch = MtQualification.objects.get_or_create(
slug="bsdf_match",
defaults={
'name': "Appearance Matching Master",
'keywords': "appearance,matching,blob,graphics,BRDF",
'description': "You are an expert at matching the appearance of a synthetic blob and a shape in an image."
}
)[0]
good_users = dict(Counter(
ShapeBsdfLabel_wd.objects
.filter(color_correct=True, gloss_correct=True)
.values_list('user__mturk_worker_id', flat=True)
).most_common())
bad_users = dict(Counter(
ShapeBsdfLabel_wd.objects
.filter(Q(color_correct=False) | Q(gloss_correct=False))
.values_list('user__mturk_worker_id', flat=True)
).most_common())
for (id, ngood) in good_users.iteritems():
nbad = bad_users[id] if id in bad_users else 0
if ngood + nbad > 0:
perc = float(ngood) / float(ngood + nbad)
if ngood >= 30:
worker = UserProfile.objects.get(mturk_worker_id=id)
if perc >= 0.75:
bsdfmatch_asst, created = bsdfmatch.assignments.get_or_create(
worker=worker)
print 'Granting bsdf_match to %s (%s good, %s bad)' % (id, ngood, nbad)
bsdfmatch_asst.set_value(1)
elif perc < 0.1 and not worker.always_approve:
# worker.block(reason=("For blob matching tasks, your accuracy is %s%%, which is too low. " +
#"Most workers have an accuracy above 75%%.") % int(perc * 100))
print 'WOULD block user %s (%s good, %s bad, %s%%)' % (
worker.mturk_worker_id, ngood, nbad, perc * 100)
elif perc < 0.5:
try:
bsdfmatch.assignments.get(worker=worker).set_value(0)
print 'Revoking bsdf_match from %s (%s good, %s bad)' % (id, ngood, nbad)
except MtQualificationAssignment.DoesNotExist:
pass
elif nbad >= 30 and perc < 0.1 and not worker.always_approve:
# worker.block(reason=("For blob matching tasks, your accuracy is %s%%, which is too low. " +
#"Most workers have an accuracy above 75%%.") % int(perc * 100))
print 'WOULD block user %s (%s good, %s bad, %s%%)' % (
worker.mturk_worker_id, ngood, nbad, perc * 100)
#
# Grant quals to admin
if settings.MTURK_ADMIN_WORKER_ID:
admin_user = get_or_create_mturk_worker(settings.MTURK_ADMIN_WORKER_ID)
bsdfmatch.assignments.get_or_create(worker=admin_user)[0].set_value(1)
| mit | 1,125,210,961,934,694,400 | 38.229167 | 118 | 0.581413 | false |
N9dZ/LearnCodeTheHardWay | ex24.py | 1 | 1131 | print "Let's practice everything."
print 'You\'d need to know \'bout escapes with \\ that do \n newlines and \t tabs.'
# there's variable for a real poem
poem = """
\tThe lovely world
with logic so firmly planted
cannot discern \n the needs of love
nor comprehend passion from intuition
and requires an explanation
\n\t\twhere there is none.
"""
# print content of the poem
print "----------------"
print poem
print "----------------"
five = 10 - 2 + 3 - 6
print "This should be five: %s" % five
# a function to make some calculations
def secret_formula(started):
jelly_beans = started * 500
jars = jelly_beans / 1000
crates = jars / 100
return jelly_beans, jars, crates
# some calculations
start_point = 10000
beans, jars, crates = secret_formula(start_point)
# one variable for one value
print "With a starting point of: %d" % start_point
print "We'd have %d beans, %d jars, and %d crates." % (beans, jars, crates)
start_point = start_point / 10
# use the function's return to value the results directly
print "We can also do that this way:"
print "We'd have %d beans, %d jars, and %d crates." % secret_formula(start_point) | gpl-2.0 | 2,550,332,171,832,219,600 | 31.342857 | 83 | 0.697613 | false |
tanghaibao/goatools | goatools/godag/relationship_str.py | 1 | 3956 | """Create strings representing relationships on GO Terms.
+------- has 'part_of' relationship(s)
| +-- pointed to by a GO ID with a 'part_of' relationship
| |
V V
GO:0008150 L00 D00 .... .rdu biological_process
GO:0050896 L01 D01 .... .rdu response to stimulus
GO:0042221 L02 D02 .... p... response to chemical
GO:0032501 L01 D01 .... .rdu multicellular organismal process
GO:0003008 L02 D02 .... .r.. system process
GO:0051606 L02 D02 .... .... detection of stimulus
GO:0050877 L03 D03 .... .rdu nervous system process
GO:0009593 L03 D03 P... .... detection of chemical stimulus
GO:0007600 L04 D04 .... pr.. sensory perception
GO:0050906 L03 D03 P... .... detection of stimulus involved in sensory perception
GO:0050890 L04 D04 .... .... cognition
GO:0050907 L04 D04 P... .... detection of chemical stimulus involved in sensory perception
GO:0007606 L05 D05 .... p... sensory perception of chemical stimulus
GO:0050893 L05 D05 P... .... sensory processing
GO:0050911 L05 D05 P... .... detection of chemical stimulus involved in sensory perception of smell
GO:0007608 L06 D06 .... p... sensory perception of smell
"""
__copyright__ = "Copyright (C) 2010-2019, DV Klopfenstein, H Tang, All rights reserved."
__author__ = "DV Klopfenstein"
from collections import OrderedDict
from goatools.godag.consts import RELATIONSHIP_LIST
from goatools.godag.consts import RELATIONSHIP_SET
# pylint: disable=too-few-public-methods,bad-whitespace
class RelationshipStr(object):
"""Create strings representing relationships on GO Terms."""
# go-basic.obo: fmt(1.2) rel(2019-02-20) 47,177 GO Terms; optional_attrs(relationship)
# relationship:
# 6,882 part_of
# 3,230 regulates
# 2,804 negatively_regulates
# 2,785 positively_regulates
rel2chr = OrderedDict([
('part_of', 'P'),
('regulates', 'R'),
('negatively_regulates', 'D'),
('positively_regulates', 'U')])
rev2chr = OrderedDict([
('part_of', 'p'),
('regulates', 'r'),
('negatively_regulates', 'd'),
('positively_regulates', 'u')])
def __init__(self, relationships=None):
assert set(self.rel2chr.keys()) == RELATIONSHIP_SET
# Ordered relationships
_rels = relationships if relationships else set()
self.rels = [r for r in RELATIONSHIP_LIST if r in _rels]
def str_relationships(self, goobj):
"""Get a string representing the presence of absence of relationships. Ex: P..."""
rel_cur = goobj.relationship
return "".join([self.rel2chr.get(r, '?') if r in rel_cur else '.' for r in self.rels])
def str_rel_short(self, goobj):
"""Get a string representing the presence of absence of relationships. Ex: P"""
if not goobj.relationship:
return ''
rel_cur = goobj.relationship
return "".join([self.rel2chr.get(r, '?') for r in self.rels if r in rel_cur])
def str_relationships_rev(self, goobj):
"""Get a string representing the presence of absence of relationships. Ex: pr.."""
rel_cur = goobj.relationship_rev
return "".join([self.rev2chr[r] if r in rel_cur else '.' for r in self.rels])
def prt_keys(self, prt, pre):
"""Print the alias for a relationship and its alias."""
prt.write('{PRE}Relationship to parent: {ABC}\n'.format(
PRE=pre, ABC=''.join(self.rel2chr.values())))
for rel, alias in self.rel2chr.items():
prt.write('{PRE} {A} {DESC}\n'.format(PRE=pre, A=alias, DESC=rel))
prt.write('\n{PRE}Relationship to child: {ABC}\n'.format(
PRE=pre, ABC=''.join(self.rev2chr.values())))
for rel, alias in self.rev2chr.items():
prt.write('{PRE} {A} {DESC}\n'.format(PRE=pre, A=alias, DESC=rel))
# Copyright (C) 2010-2019, DV Klopfenstein, H Tang, All rights reserved.
| bsd-2-clause | 6,686,008,549,504,692,000 | 42.472527 | 99 | 0.63094 | false |
jclgoodwin/bustimes.org.uk | vehicles/test_service_map_consumer.py | 1 | 3587 | import vcr
from freezegun import freeze_time
from channels.testing import WebsocketCommunicator
from django.test import TestCase, override_settings
from django.core.cache import cache
from django.utils import timezone
from busstops.models import Region, Service, ServiceCode, StopPoint, DataSource, SIRISource, Operator
from bustimes.models import Route, Calendar, Trip
from buses.routing import application
from .siri_one_shot import siri_one_shot
@override_settings(CACHES={'default': {'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'}})
class WebsocketConsumerTest(TestCase):
@classmethod
def setUpTestData(cls):
source = DataSource.objects.create(name='Icarus')
destination = StopPoint.objects.create(common_name='Plymouth Aerodrome', active=True)
region = Region.objects.create(id='SW', name='South West')
operator = Operator.objects.create(id='SDVN', region=region, name='Stagecoach Devonshire')
cls.service = Service.objects.create(service_code='swe_33-FLC-_-y10', date='2019-06-08')
cls.service.operator.add(operator)
route = Route.objects.create(service=cls.service, source=source)
calendar = Calendar.objects.create(start_date='2019-06-08', mon=True, tue=True, wed=True, thu=True, fri=True,
sat=True, sun=True)
Trip.objects.create(route=route, start='20:40', end='20:50', calendar=calendar, destination=destination)
cls.code_1 = ServiceCode.objects.create(service=cls.service, code='FLCN', scheme='Devon SIRI')
cls.code_2 = ServiceCode.objects.create(service=cls.service, code='FLC', scheme='Bucks SIRI')
cls.siri_source = SIRISource.objects.create(name='Devon', requestor_ref='torbaydevon_siri_traveline',
url='http://data.icarus.cloudamber.com/StopMonitoringRequest.ashx')
async def test_service_map_consumer(self):
with vcr.use_cassette('data/vcr/icarus.yaml'):
with freeze_time('2019-06-08'):
url = f"/ws/vehicle_positions/services/{self.service.id}"
communicator = WebsocketCommunicator(application, url)
connected, subprotocol = await communicator.connect()
self.assertTrue(connected)
message = await communicator.receive_json_from()
self.assertEqual(message, [])
def test_siri_one_shot(self):
# url = f'/vehicles.json?service={self.service.id}'
with vcr.use_cassette('data/vcr/icarus.yaml'):
with freeze_time('2019-06-08'):
now = timezone.now()
with self.assertNumQueries(2):
self.assertEqual('nothing scheduled', siri_one_shot(self.code_1, now, False))
with self.assertNumQueries(1):
self.assertEqual('cached (nothing scheduled)', siri_one_shot(self.code_1, now, False))
self.assertEqual('nothing scheduled', cache.get(f'{self.service.id}:Icarus'))
with freeze_time('2019-06-08 20:37+01:00'):
now = timezone.now()
with self.assertNumQueries(49):
self.assertIsNone(siri_one_shot(self.code_1, now, True))
with self.assertNumQueries(1):
self.assertEqual('cached (line name)', siri_one_shot(self.code_1, now, True))
key = 'http://data.icarus.cloudamber.com/StopMonitoringRequest.ashx:torbaydevon_siri_traveline:FLCN'
self.assertEqual('line name', cache.get(key))
| mpl-2.0 | -4,007,397,036,994,494,000 | 51.75 | 119 | 0.647338 | false |
hugolm84/tomahawk-charts | scraper/tomahawk/spiders/rdiospider.py | 1 | 2964 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) Hugo Lindström <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from scrapy.http.request import Request
import json
from tomahawkspider import TomahawkCrawlSpider, TomahawkSpiderHelper
from tomahawk.itemloaders import TomahawkItemLoader
class RdioSpider(TomahawkCrawlSpider):
name = "Rdio"
base_url = "http://api.rdio.com/1/"
oauth_key = 'gk8zmyzj5xztt8aj48csaart'
oauth_consumer_secret = 'yt35kakDyW'
# Regions, might change http://www.rdio.com/availability/
regions = [ "US"]#, "SE", "CA", "DE", "GB", "AU",
#"BE", "BR", "DK", "EE", "FI", "FR",
#"IS", "IE","IT", "LV", "LT", "NL",
#"NZ", "NO", "PT", "ES"]
default_region = "US"
default_type = "Track"
base_types = ["Artist", "Album", "Track"]
def __init__(self, name=None, **kwargs):
super(RdioSpider, self).__init__()
def start_requests(self):
for base_type in self.base_types:
for region in self.regions:
yield Request(url=self.base_url, method='POST', dont_filter=True,
meta={'oauth_method_args': {'method': 'getTopCharts','type': base_type,'_region': region}},
callback=self.__parse_as_chart__)
def do_create_chart(self, chart, response):
meta = response.meta['oauth_method_args']
name = "Top Overall"
type = meta['type']
region = meta['_region']
chart.add_value("name", name)
chart.add_value("id", name+type+region)
chart.add_value("type", type)
chart.add_value("geo", region)
chart.add_value("description", "%s %s's in %s" % (name, type, region))
return chart
def do_parse(self, chart, response):
response = json.loads(response.body)
item_type = self.do_get_type(chart)
for rank, items in enumerate(response['result']):
entry = TomahawkItemLoader()
entry.add_value(item_type, items.pop('name'))
if item_type != TomahawkSpiderHelper.ArtistType.lower():
entry.add_value("artist",items.pop("artist"))
entry.add_value("rank", rank)
chart.add_value("list", entry.load_item())
return self.do_process_item(chart)
| gpl-2.0 | -8,073,873,393,976,325,000 | 36.987179 | 121 | 0.619305 | false |
kakunbsc/enigma2.2 | lib/python/Plugins/SystemPlugins/NFIFlash/flasher.py | 2 | 10609 | from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Screens.ChoiceBox import ChoiceBox
from Screens.Standby import TryQuitMainloop
from Components.ActionMap import ActionMap
from Components.Sources.StaticText import StaticText
from Components.Sources.Progress import Progress
from Components.Sources.Boolean import Boolean
from Components.Label import Label
from Components.FileList import FileList
from Components.Task import Task, Job, JobManager
from Tools.Directories import fileExists
from Tools.HardwareInfo import HardwareInfo
from os import system
from enigma import eConsoleAppContainer
import re
class writeNAND(Task):
def __init__(self,job,param,box):
Task.__init__(self,job, ("Writing image file to NAND Flash"))
self.setTool("/usr/lib/enigma2/python/Plugins/SystemPlugins/NFIFlash/mywritenand")
if box == "dm7025":
self.end = 256
elif box[:5] == "dm800":
self.end = 512
if box == "dm8000":
self.setTool("/usr/lib/enigma2/python/Plugins/SystemPlugins/NFIFlash/dm8000_writenand")
self.args += param
self.weighting = 1
def processOutput(self, data):
print "[writeNand] " + data
if data == "." or data.endswith(" ."):
self.progress += 1
elif data.find("*** done!") > 0:
print "data.found done"
self.setProgress(self.end)
else:
self.output_line = data
class NFISummary(Screen):
skin = """
<screen position="0,0" size="132,64">
<widget source="title" render="Label" position="2,0" size="120,14" valign="center" font="Regular;12" />
<widget source="content" render="Label" position="2,14" size="120,34" font="Regular;12" transparent="1" zPosition="1" />
<widget source="job_progresslabel" render="Label" position="66,50" size="60,14" font="Regular;12" transparent="1" halign="right" zPosition="0" />
<widget source="job_progressbar" render="Progress" position="2,50" size="66,14" borderWidth="1" />
</screen>"""
def __init__(self, session, parent):
Screen.__init__(self, session, parent)
self["title"] = StaticText(_("Image flash utility"))
self["content"] = StaticText(_("Please select .NFI flash image file from medium"))
self["job_progressbar"] = Progress()
self["job_progresslabel"] = StaticText("")
def setText(self, text):
self["content"].setText(text)
class NFIFlash(Screen):
skin = """
<screen name="NFIFlash" position="90,95" size="560,420" title="Image flash utility">
<ePixmap pixmap="750S/buttons/green.png" position="140,0" zPosition="0" size="140,40" transparent="1" alphatest="on" />
<ePixmap pixmap="750S/buttons/yellow.png" position="280,0" zPosition="0" size="140,40" transparent="1" alphatest="on" />
<ePixmap pixmap="750S/buttons/blue.png" position="420,0" zPosition="0" size="140,40" transparent="1" alphatest="on" />
<widget source="key_green" render="Label" position="140,0" zPosition="1" size="140,40" font="Regular;20" valign="center" halign="center" backgroundColor="#1f771f" transparent="1" />
<widget source="key_yellow" render="Label" position="280,0" zPosition="1" size="140,40" font="Regular;20" valign="center" halign="center" backgroundColor="#a08500" transparent="1" />
<widget source="key_blue" render="Label" position="420,0" zPosition="1" size="140,40" font="Regular;20" valign="center" halign="center" backgroundColor="#18188b" transparent="1" />
<widget source="listlabel" render="Label" position="16,44" size="200,21" valign="center" font="Regular;18" />
<widget name="filelist" position="0,68" size="260,260" scrollbarMode="showOnDemand" />
<widget source="infolabel" render="Label" position="270,44" size="280,284" font="Regular;16" />
<widget source="job_progressbar" render="Progress" position="10,374" size="540,26" borderWidth="1" backgroundColor="#254f7497" />
<widget source="job_progresslabel" render="Label" position="180,378" zPosition="2" font="Regular;18" halign="center" transparent="1" size="200,22" foregroundColor="#000000" />
<widget source="statusbar" render="Label" position="10,404" size="540,16" font="Regular;16" foregroundColor="#cccccc" />
</screen>"""
def __init__(self, session, cancelable = True, close_on_finish = False):
self.skin = NFIFlash.skin
Screen.__init__(self, session)
self["job_progressbar"] = Progress()
self["job_progresslabel"] = StaticText("")
self["finished"] = Boolean()
self["infolabel"] = StaticText("")
self["statusbar"] = StaticText(_("Please select .NFI flash image file from medium"))
self["listlabel"] = StaticText(_("select .NFI flash file")+":")
self["key_green"] = StaticText()
self["key_yellow"] = StaticText()
self["key_blue"] = StaticText()
self["actions"] = ActionMap(["OkCancelActions", "ColorActions", "DirectionActions"],
{
"green": self.ok,
"yellow": self.reboot,
"ok": self.ok,
"left": self.left,
"right": self.right,
"up": self.up,
"down": self.down
}, -1)
currDir = "/media/usb/"
self.filelist = FileList(currDir, matchingPattern = "^.*\.(nfi|NFI)")
self["filelist"] = self.filelist
self.nfifile = ""
self.md5sum = ""
self.job = None
self.box = HardwareInfo().get_device_name()
def closeCB(self):
if ( self.job is None or self.job.status is not self.job.IN_PROGRESS ) and not self.no_autostart:
self.close()
#else:
#if self.cancelable:
#self.cancel()
def up(self):
self["filelist"].up()
self.check_for_NFO()
def down(self):
self["filelist"].down()
self.check_for_NFO()
def right(self):
self["filelist"].pageDown()
self.check_for_NFO()
def left(self):
self["filelist"].pageUp()
self.check_for_NFO()
def check_for_NFO(self):
self.session.summary.setText(self["filelist"].getFilename())
if self["filelist"].getFilename() is None:
return
if self["filelist"].getCurrentDirectory() is not None:
self.nfifile = self["filelist"].getCurrentDirectory()+self["filelist"].getFilename()
if self.nfifile.upper().endswith(".NFI"):
self["key_green"].text = _("Flash")
nfofilename = self.nfifile[0:-3]+"nfo"
if fileExists(nfofilename):
nfocontent = open(nfofilename, "r").read()
self["infolabel"].text = nfocontent
pos = nfocontent.find("MD5:")
if pos > 0:
self.md5sum = nfocontent[pos+5:pos+5+32] + " " + self.nfifile
else:
self.md5sum = ""
else:
self["infolabel"].text = _("No details for this image file") + ":\n" + self["filelist"].getFilename()
self.md5sum = ""
else:
self["infolabel"].text = ""
self["key_green"].text = ""
def ok(self):
if self.job is None or self.job.status is not self.job.IN_PROGRESS:
if self["filelist"].canDescent(): # isDir
self["filelist"].descent()
self.session.summary.setText(self["filelist"].getFilename())
self.check_for_NFO()
else:
self.queryFlash()
def queryFlash(self):
fd = open(self.nfifile, 'r')
print fd
sign = fd.read(11)
print sign
if sign.find("NFI1" + self.box + "\0") == 0:
if self.md5sum != "":
self["statusbar"].text = ("Please wait for md5 signature verification...")
self.session.summary.setText(("Please wait for md5 signature verification..."))
self.container = eConsoleAppContainer()
self.container.setCWD(self["filelist"].getCurrentDirectory())
self.container.appClosed.append(self.md5finished)
self.container.dataSent.append(self.md5ready)
self.container.execute("md5sum -cw -")
self.container.write(self.md5sum)
else:
self.session.openWithCallback(self.queryCB, MessageBox, _("This .NFI file does not have a md5sum signature and is not guaranteed to work. Do you really want to burn this image to flash memory?"), MessageBox.TYPE_YESNO)
else:
self.session.open(MessageBox, (_("This .NFI file does not contain a valid %s image!") % (self.box.upper())), MessageBox.TYPE_ERROR)
def md5ready(self, retval):
self.container.sendEOF()
def md5finished(self, retval):
if retval==0:
self.session.openWithCallback(self.queryCB, MessageBox, _("This .NFI file has a valid md5 signature. Continue programming this image to flash memory?"), MessageBox.TYPE_YESNO)
else:
self.session.openWithCallback(self.queryCB, MessageBox, _("The md5sum validation failed, the file may be corrupted! Are you sure that you want to burn this image to flash memory? You are doing this at your own risk!"), MessageBox.TYPE_YESNO)
def queryCB(self, answer):
if answer == True:
self.createJob()
else:
self["statusbar"].text = _("Please select .NFI flash image file from medium")
def createJob(self):
self.job = Job("Image flashing job")
param = [self.nfifile]
writeNAND(self.job,param,self.box)
#writeNAND2(self.job,param)
#writeNAND3(self.job,param)
self.job.state_changed.append(self.update_job)
self.job.end = 540
self.cwd = self["filelist"].getCurrentDirectory()
self["job_progressbar"].range = self.job.end
self.startJob()
def startJob(self):
self["key_blue"].text = ""
self["key_yellow"].text = ""
self["key_green"].text = ""
#self["progress0"].show()
#self["progress1"].show()
self.job.start(self.jobcb)
def update_job(self):
j = self.job
#print "[job state_changed]"
if j.status == j.IN_PROGRESS:
self.session.summary["job_progressbar"].value = j.progress
self.session.summary["job_progressbar"].range = j.end
self.session.summary["job_progresslabel"].text = "%.2f%%" % (100*j.progress/float(j.end))
self["job_progressbar"].range = j.end
self["job_progressbar"].value = j.progress
#print "[update_job] j.progress=%f, j.getProgress()=%f, j.end=%d, text=%f" % (j.progress, j.getProgress(), j.end, (100*j.progress/float(j.end)))
self["job_progresslabel"].text = "%.2f%%" % (100*j.progress/float(j.end))
self.session.summary.setText(j.tasks[j.current_task].name)
self["statusbar"].text = (j.tasks[j.current_task].name)
elif j.status == j.FINISHED:
self["statusbar"].text = _("Writing NFI image file to flash completed")
self.session.summary.setText(_("NFI image flashing completed. Press Yellow to Reboot!"))
self["key_yellow"].text = _("Reboot")
elif j.status == j.FAILED:
self["statusbar"].text = j.tasks[j.current_task].name + " " + _("failed")
self.session.open(MessageBox, (_("Flashing failed") + ":\n" + j.tasks[j.current_task].name + ":\n" + j.tasks[j.current_task].output_line), MessageBox.TYPE_ERROR)
def jobcb(self, jobref, fasel, blubber):
print "[jobcb] %s %s %s" % (jobref, fasel, blubber)
self["key_green"].text = _("Flash")
def reboot(self):
if self.job.status == self.job.FINISHED:
self["statusbar"].text = ("rebooting...")
TryQuitMainloop(self.session,2)
def createSummary(self):
return NFISummary
| gpl-2.0 | 8,285,512,115,970,571,000 | 39.96139 | 244 | 0.685456 | false |
mardiros/pyshop | pyshop/config.py | 1 | 8175 | #-*- coding: utf-8 -*-
"""
PyShop Pyramid configuration helpers.
"""
from pyramid.interfaces import IBeforeRender
from pyramid.url import static_path, route_path
from pyramid.httpexceptions import HTTPNotFound
from pyramid_jinja2 import renderer_factory
from pyramid_rpc.xmlrpc import XMLRPCRenderer
from pyshop.helpers import pypi
from pyshop.helpers.restxt import parse_rest
from pyshop.helpers.download import renderer_factory as dl_renderer_factory
def notfound(request):
return HTTPNotFound('Not found.')
def add_urlhelpers(event):
"""
Add helpers to the template engine.
"""
event['static_url'] = lambda x: static_path(x, event['request'])
event['route_url'] = lambda name, *args, **kwargs: \
route_path(name, event['request'], *args, **kwargs)
event['parse_rest'] = parse_rest
event['has_permission'] = event['request'].has_permission
def includeme(config):
"""
Pyramid includeme file for the :class:`pyramid.config.Configurator`
"""
settings = config.registry.settings
# config.add_renderer('json', JSONP())
# release file download
config.add_renderer('repository', dl_renderer_factory)
# Jinja configuration
# We don't use jinja2 filename, .html instead
config.add_renderer('.html', renderer_factory)
# helpers
config.add_subscriber(add_urlhelpers, IBeforeRender)
# i18n
config.add_translation_dirs('locale/')
pypi_url = settings.get('pyshop.pypi.url', 'https://pypi.python.org/pypi')
# PyPI url for XML RPC service consume
pypi.set_proxy(pypi_url, settings.get('pyshop.pypi.transport_proxy'))
# Javascript + Media
config.add_static_view('static', 'static', cache_max_age=3600)
# config.add_static_view('repository', 'repository', cache_max_age=3600)
config.add_route(u'login', u'/login',)
config.add_view(u'pyshop.views.credentials.Login',
route_name=u'login',
renderer=u'shared/login.html')
config.add_route(u'logout', u'/logout')
config.add_view(u'pyshop.views.credentials.Logout',
route_name=u'logout',
permission=u'user_view')
# Home page
config.add_route(u'index', u'/')
config.add_view(u'pyshop.views.Index',
route_name=u'index',
permission=u'user_view')
# Archive downloads
config.add_route(u'show_external_release_file',
u'/repository/ext/{release_id}/{filename:.*}',
request_method=u'GET')
config.add_view(u'pyshop.views.repository.show_external_release_file',
route_name=u'show_external_release_file',
renderer=u'repository',
permission=u'download_releasefile')
config.add_route(u'show_release_file',
u'/repository/{file_id}/{filename:.*}',
request_method=u'GET')
config.add_view(u'pyshop.views.repository.show_release_file',
route_name=u'show_release_file',
renderer=u'repository',
permission=u'download_releasefile')
# Simple views used by pip
config.add_route(u'list_simple', u'/simple/', request_method=u'GET')
config.add_view(u'pyshop.views.simple.List',
route_name=u'list_simple',
renderer=u'pyshop/simple/list.html',
permission=u'download_releasefile')
config.add_route(u'show_simple', u'/simple/{package_name}/')
config.add_view(u'pyshop.views.simple.Show',
route_name=u'show_simple',
renderer=u'pyshop/simple/show.html',
permission=u'download_releasefile')
try:
config.add_notfound_view(notfound, append_slash=True)
except AttributeError:
# Pyramid < 1.4
pass
# Used by setup.py sdist upload
config.add_route(u'upload_releasefile', u'/simple/',
request_method=u'POST')
config.add_view(u'pyshop.views.simple.UploadReleaseFile',
renderer=u'pyshop/simple/create.html',
route_name=u'upload_releasefile',
permission=u'upload_releasefile')
# Web Services
config.add_renderer('pyshopxmlrpc', XMLRPCRenderer(allow_none=True))
config.add_xmlrpc_endpoint(
'api', '/pypi/xmlrpc', default_renderer='pyshopxmlrpc')
config.scan('pyshop.views.xmlrpc')
# Backoffice Views
config.add_route(u'list_package', u'/pyshop/package')
config.add_view(u'pyshop.views.package.List',
route_name='list_package',
renderer=u'pyshop/package/list.html',
permission=u'user_view')
config.add_route(u'list_package_page', u'/pyshop/package/p/{page_no}')
config.add_view(u'pyshop.views.package.List',
route_name='list_package_page',
renderer=u'pyshop/package/list.html',
permission=u'user_view')
config.add_route(u'show_package',
u'/pyshop/package/{package_name}')
config.add_route(u'show_package_version',
u'/pyshop/package/{package_name}/{release_version}')
config.add_view(u'pyshop.views.package.Show',
route_name=u'show_package',
renderer=u'pyshop/package/show.html',
permission=u'user_view')
config.add_view(u'pyshop.views.package.Show',
route_name=u'show_package_version',
renderer=u'pyshop/package/show.html',
permission=u'user_view')
# Admin view
config.add_route(u'list_account', u'/pyshop/account')
config.add_view(u'pyshop.views.account.List',
route_name=u'list_account',
renderer=u'pyshop/account/list.html',
permission=u'admin_view')
config.add_route(u'create_account', u'/pyshop/account/new')
config.add_view(u'pyshop.views.account.Create',
route_name=u'create_account',
renderer=u'pyshop/account/create.html',
permission=u'admin_view')
config.add_route(u'edit_account', u'/pyshop/account/{user_id}')
config.add_view(u'pyshop.views.account.Edit',
route_name=u'edit_account',
renderer=u'pyshop/account/edit.html',
permission=u'admin_view')
config.add_route(u'delete_account', u'/pyshop/delete/account/{user_id}')
config.add_view(u'pyshop.views.account.Delete',
route_name=u'delete_account',
renderer=u'pyshop/account/delete.html',
permission=u'admin_view')
config.add_route(u'purge_package', u'/pyshop/purge/package/{package_id}')
config.add_view(u'pyshop.views.package.Purge',
route_name=u'purge_package',
renderer=u'pyshop/package/purge.html',
permission=u'admin_view')
# Current user can update it's information
config.add_route(u'edit_user', u'/pyshop/user')
config.add_view(u'pyshop.views.user.Edit',
route_name=u'edit_user',
renderer=u'pyshop/user/edit.html',
permission=u'user_view')
config.add_route(u'change_password', u'/pyshop/user/password')
config.add_view(u'pyshop.views.user.ChangePassword',
route_name=u'change_password',
renderer=u'pyshop/user/change_password.html',
permission=u'user_view')
# Credentials
for route in ('list_simple', 'show_simple',
'show_release_file', 'show_external_release_file',
'upload_releasefile'):
config.add_view('pyshop.views.credentials.authbasic',
route_name=route,
context='pyramid.exceptions.Forbidden'
)
config.add_view('pyshop.views.credentials.Login',
renderer=u'shared/login.html',
context=u'pyramid.exceptions.Forbidden')
| bsd-3-clause | -596,734,129,651,891,200 | 37.200935 | 78 | 0.596942 | false |
feilaoda/FlickBoard | project/lib/filter.py | 1 | 7291 | # -*- coding: utf-8 -*-
from datetime import datetime
import urllib2
import re
import urllib, hashlib
import string
from itertools import imap
def none2string(value):
if value is None:
return ''
return value
def video(value):
if value is None:
return None
videos = re.findall('(http://v.youku.com/v_show/id_[a-zA-Z0-9\=]+.html)\s?', value)
if (len(videos) > 0):
for video in videos:
video_id = re.findall('http://v.youku.com/v_show/id_([a-zA-Z0-9\=]+).html', video)
value = value.replace('http://v.youku.com/v_show/id_' + video_id[0] + '.html',
'<div class="mediaVideo"><embed src="http://player.youku.com/player.php/sid/' + video_id[0] + '/v.swf" allowFullScreen="true" quality="high" width="480" height="400" align="middle" allowScriptAccess="always" type="application/x-shockwave-flash"></embed></div>')
return value
else:
return urlink(value)
def download_urlize(value):
if value is None:
return None
links = re.findall('(\[dl\]http://[a-zA-Z0-9\:\/\?=\-\_\.\&]+\[\/dl\])\s?', value)
if (len(links) > 0):
for link in links:
url = re.findall('(http://[a-zA-Z0-9\/\?=\-\_\.\&]+)', link)
if len(url) > 0:
value = value.replace(link, '<a href="%s" target="_blank">Download</a>' % (url[0]))
return value
return None
def mentions(value):
if value is None:
return None
ms = re.findall('(@[\w\_]+\.?)\s?', value)
if (len(ms) > 0):
for m in ms:
m_id = re.findall('@([a-zA-Z0-9\_\x80-\xff]+\.?)', m)
if (len(m_id) > 0):
if (m_id[0].endswith('.') != True and len(m_id[0])<32):
value = value.replace('@' + m_id[0], '<a href="/member/info/' + m_id[0] + '" rel="external">@' + m_id[0] + '</a>')
return value
else:
return value
# gravatar filter
def gravatar(value,arg):
default = "http://v2ex.appspot.com/static/img/avatar_" + str(arg) + ".png"
if type(value).__name__ != 'Member':
return '<img src="' + default + '" border="0" align="absmiddle" />'
if arg == 'large':
number_size = 73
member_avatar_url = value.avatar_large_url
elif arg == 'normal':
number_size = 48
member_avatar_url = value.avatar_normal_url
elif arg == 'mini':
number_size = 24
member_avatar_url = value.avatar_mini_url
if member_avatar_url:
return '<img src="'+ member_avatar_url +'" border="0" alt="' + value.username + '" />'
else:
gravatar_url = "http://www.gravatar.com/avatar/" + hashlib.md5(value.email.lower()).hexdigest() + "?"
gravatar_url += urllib.urlencode({'s' : str(number_size), 'd' : default})
return '<img src="' + gravatar_url + '" border="0" alt="' + value.username + '" align="absmiddle" />'
# avatar filter
def avatar(value, arg):
default = "/static/img/avatar_" + str(arg) + ".png"
if type(value).__name__ not in ['Member', 'Node']:
return '<img src="' + default + '" border="0" />'
if arg == 'large':
number_size = 73
member_avatar_url = value.avatar_large_url
elif arg == 'normal':
number_size = 48
member_avatar_url = value.avatar_normal_url
elif arg == 'mini':
number_size = 24
member_avatar_url = value.avatar_mini_url
if value.avatar_mini_url:
return '<img src="'+ member_avatar_url +'" border="0" />'
else:
return '<img src="' + default + '" border="0" />'
# github gist script support
def gist(value):
return re.sub(r'(http://gist.github.com/[\d]+)', r'<script src="\1.js"></script>', value)
_base_js_escapes = (
('\\', r'\u005C'),
('\'', r'\u0027'),
('"', r'\u0022'),
('>', r'\u003E'),
('<', r'\u003C'),
('&', r'\u0026'),
('=', r'\u003D'),
('-', r'\u002D'),
(';', r'\u003B'),
(u'\u2028', r'\u2028'),
(u'\u2029', r'\u2029')
)
# Escape every ASCII character with a value less than 32.
_js_escapes = (_base_js_escapes +
tuple([('%c' % z, '\\u%04X' % z) for z in range(32)]))
def escapejs(value):
"""Hex encodes characters for use in JavaScript strings."""
for bad, good in _js_escapes:
value = value.replace(bad, good)
return value
_word_split_re = re.compile(r'(\s+)')
_punctuation_re = re.compile(
'^(?P<lead>(?:%s)*)(?P<middle>.*?)(?P<trail>(?:%s)*)$' % (
'|'.join(imap(re.escape, ('(', '<', '<'))),
'|'.join(imap(re.escape, ('.', ',', ')', '>', '\n', '>')))
)
)
_simple_email_re = re.compile(r'^\S+@[a-zA-Z0-9._-]+\.[a-zA-Z0-9._-]+$')
_striptags_re = re.compile(r'(<!--.*?-->|<[^>]*>)')
_entity_re = re.compile(r'&([^;]+);')
_letters = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
_digits = '0123456789'
# special singleton representing missing values for the runtime
missing = type('MissingType', (), {'__repr__': lambda x: 'missing'})()
# internal code
internal_code = set()
def urlink(text, trim_url_limit=None, nofollow=False, external=True):
if text is None:
return None
#trim_url_limit=None, nofollow=False, external=True
"""Converts any URLs in text into clickable links. Works on http://,
https:// and www. links. Links can have trailing punctuation (periods,
commas, close-parens) and leading punctuation (opening parens) and
it'll still do the right thing.
If trim_url_limit is not None, the URLs in link text will be limited
to trim_url_limit characters.
If nofollow is True, the URLs in link text will get a rel="nofollow"
attribute.
"""
trim_url = lambda x, limit=trim_url_limit: limit is not None \
and (x[:limit] + (len(x) >=limit and '...'
or '')) or x
words = _word_split_re.split(unicode(text))
nofollow_attr = nofollow and ' rel="nofollow" ' or ''
external_attr = external and ' target="_blank" ' or ''
for i, word in enumerate(words):
match = _punctuation_re.match(word)
if match:
lead, middle, trail = match.groups()
if middle.startswith('www.') or (
'@' not in middle and
not middle.startswith('http://') and
len(middle) > 0 and
middle[0] in _letters + _digits and (
middle.endswith('.org') or
middle.endswith('.net') or
middle.endswith('.com')
)):
middle = '<a href="http://%s"%s%s>%s</a>' % (middle,
nofollow_attr, external_attr, trim_url(middle))
if middle.startswith('http://') or \
middle.startswith('https://'):
middle = '<a href="%s"%s%s>%s</a>' % (middle,
nofollow_attr, external_attr, trim_url(middle))
if '@' in middle and not middle.startswith('www.') and \
not ':' in middle and _simple_email_re.match(middle):
middle = '<a href="mailto:%s">%s</a>' % (middle, middle)
if lead + middle + trail != word:
words[i] = lead + middle + trail
return u''.join(words)
| mit | -399,325,461,132,370,750 | 35.09901 | 295 | 0.535592 | false |
lidaobing/itcc | itcc/molecule/utils.py | 1 | 5019 | # $Id$
import sys
import os.path
import math
from itcc.molecule import read, write
from itcc.molecule.tools import neighbours, is_pyramid
from itcc.molecule import relalist
try:
sorted
except:
from itcc.core.tools import sorted_ as sorted
def mirrormol():
if len(sys.argv) != 2:
sys.stderr.write('Usage: %s <xyzfname>\n' % os.path.basename(sys.argv[0]))
sys.exit(1)
mol = read.readxyz(file(sys.argv[1]))
mol.coords = -mol.coords
write.writexyz(mol)
def printbonds():
if len(sys.argv) != 2:
sys.stderr.write('Usage: %s <xyzfname>\n' % os.path.basename(sys.argv[0]))
sys.exit(1)
mol = read.readxyz(file(sys.argv[1]))
a = relalist.Relalist(mol)
print a
def detailcmp():
from optparse import OptionParser
usage = '%prog [options] <xyzfname1> <xyzfname2>'
parser = OptionParser(usage=usage)
parser.add_option('-a', "--atoms", dest="atoms",
help="only compare selected atoms, 1-based",
metavar="STRING")
parser.add_option('-A', "--atomsfile", dest="atomsfile",
help="read the selected atoms from file",
metavar="FILE")
(options, args) = parser.parse_args()
if len(args) != 2:
parser.error("incorrect number of arguments")
if options.atoms and options.atomsfile:
parser.error("options conflict")
if options.atomsfile:
options.atoms = file(options.atomsfile).read()
atoms = None
if options.atoms:
atoms = [int(x)-1 for x in options.atoms.split()]
mol1 = read.readxyz(file(args[0]))
mol2 = read.readxyz(file(args[1]))
r1 = relalist.Relalist(mol1)
bonds_data = []
for i,j in r1.bonds:
if atoms is not None and (i not in atoms or j not in atoms): continue
l1 = mol1.calclen(i,j)
l2 = mol2.calclen(i,j)
bonds_data.append((abs(l1-l2), (i+1,j+1), l1, l2))
angles_data = []
for i,j,k in r1.angles:
if atoms is not None \
and (i not in atoms \
or j not in atoms \
or k not in atoms):
continue
a1 = math.degrees(mol1.calcang(i,j,k))
a2 = math.degrees(mol2.calcang(i,j,k))
angles_data.append((abs(a1-a2), (i+1,j+1,k+1), a1, a2))
torsions_data = []
for i,j,k,l in r1.torsions:
if atoms is not None \
and (i not in atoms \
or j not in atoms \
or k not in atoms
or l not in atoms):
continue
t1 = math.degrees(mol1.calctor(i,j,k,l))
t2 = math.degrees(mol2.calctor(i,j,k,l))
torsions_data.append((180-abs(abs(t1-t2)-180), (i+1,j+1,k+1,l+1), t1, t2))
print 'bonds:'
for x in sorted(bonds_data):
print x
print
print 'angles:'
for x in sorted(angles_data):
print x
print
print 'torsions:'
for x in sorted(torsions_data):
print x[1][0], x[1][1], x[1][2], x[1][3], x[2], x[3], x[0]
def rg():
if len(sys.argv) < 2:
sys.stderr.write('Usage: %s XYZFNAME...\n' % os.path.basename(sys.argv[0]))
sys.exit(1)
from itcc.molecule import radius_of_gyration
for fname in sys.argv[1:]:
ifile = sys.stdin
if fname != '-':
ifile = file(fname)
mol = read.readxyz(ifile)
print ifile.name, radius_of_gyration(mol)
def sub_pyramid_check(fname, atoms):
mol = read.readxyz(file(fname))
if atoms is None:
atoms = range(len(mol))
res = []
for atom in atoms:
neis = neighbours(mol, atom)
if len(neis) != 4:
continue
if is_pyramid(mol.coords[atom],
mol.coords[neis[0]],
mol.coords[neis[1]],
mol.coords[neis[2]],
mol.coords[neis[3]]):
res.append(atom)
return res
def pyramid_check():
from optparse import OptionParser
usage = '%prog [options] <xyzfname>...'
parser = OptionParser(usage=usage)
parser.add_option('-a', "--atoms", dest="atoms",
help="only compare selected atoms, 1-based",
metavar="STRING")
parser.add_option('-A', "--atomsfile", dest="atomsfile",
help="read the selected atoms from file",
metavar="FILE")
(options, args) = parser.parse_args()
if len(args) < 1:
parser.error("incorrect number of arguments")
if options.atoms and options.atomsfile:
parser.error("options conflict")
if options.atomsfile:
options.atoms = file(options.atomsfile).read()
atoms = None
if options.atoms:
atoms = [int(x)-1 for x in options.atoms.split()]
for fname in args:
res = sub_pyramid_check(fname, atoms)
if res:
print fname, ' '.join(str(x+1) for x in res)
| gpl-3.0 | 6,977,058,385,700,711,000 | 28.350877 | 83 | 0.546523 | false |
cs-chan/Deep-Plant | GRU-CFA/Codes/visualClef.py | 1 | 18504 | # -*- coding: utf-8 -*-
"""
Created on Mon Apr 2 15:24:59 2018
@author: root
"""
import cPickle as pkl
import numpy
import cv2
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import skimage
import skimage.transform
import skimage.io
from PIL import Image, ImageEnhance
import scipy.misc
import tensorflow as tf
import numpy as np
import os
import struct
import scipy.io as sio
from array import array as pyarray
from numpy import array, int8, uint8, zeros
import collections
import pickle
import functools
import sets
from tensorflow.python.ops import rnn, array_ops
from tensorflow.contrib.rnn import GRUCell, DropoutWrapper, MultiRNNCell
from tensorflow.python import debug as tf_debug
from attn_7_1_ex import VariableSequenceClassification
from temp_createStruct5 import ConstructLookupTable
from time import gmtime, strftime
from logging_util import makelog
logfile=makelog()
class DataSet(object):
def __init__(self, layername, numMap):
"""Construct a DataSet."""
mat_contents = sio.loadmat('/home/titanz/Documents/SueHan/matlab/PlantClefVGG_net/RNN_plantclef/train_obs_list.mat')
self._trainList = mat_contents['train_obs_list']
mat_contents = sio.loadmat('/home/titanz/Documents/SueHan/matlab/PlantClefVGG_net/RNN_plantclef/train_obs_class.mat')
self._trainLabels = mat_contents['train_obs_class']
mat_contents = sio.loadmat('/home/titanz/Documents/SueHan/matlab/PlantClefVGG_net/RNN_plantclef/test_obs_list.mat')
self._testList = mat_contents['test_obs_list']
mat_contents = sio.loadmat('/home/titanz/Documents/SueHan/matlab/PlantClefVGG_net/RNN_plantclef/test_obs_class.mat')
self._testLabels = mat_contents['test_obs_class']
self.layerextract = layername
self.numMap = numMap
self._num_examples = self._trainLabels.shape[0]
self._perm_list = np.arange(self._num_examples)
np.random.shuffle(self._perm_list)
self._trainLabelsPerm = self._trainLabels[self._perm_list]
self._num_testexamples = self._testLabels.shape[0]
self._perm_list_test = np.arange(self._num_testexamples)
self._batch_seq = 0
self._epochs_completed = 0
self._index_in_epoch = 0
self._index_in_epoch_test = 0
self._max_seq = 0
self.Batch_Up_model = ConstructLookupTable()
self.mydict2_test256 = self.Batch_Up_model.main(self._testList,2) # for train_testID ! = 1
self.feature_size_conv = self.numMap*14*14
self.feature_size_fc = 4096
def trainList(self):
return self._trainList
def trainLabels(self):
return self._trainLabels
def trainLabelsPerm(self):
return self._trainLabelsPerm
def testList(self):
return self._testList
def testLabels(self):
return self._testLabels
def num_examples(self):
return self._num_examples
def num_testexamples(self):
return self._num_testexamples
def epochs_completed(self):
return self._epochs_completed
def index_in_epoch(self):
return self._index_in_epoch
def max_seq(self):
return self._max_seq
def batch_seq(self):
return self._batch_seq
def PrepareTrainingBatch(self,Newpermbatch, batch_size, indicator):
if indicator == 1:
mydictG = self.Batch_Up_model.main(self._trainList,1) # for train_testID == 1
else:
mydictG = self.mydict2_test256
i = 0
temp = np.zeros(batch_size)
while i < batch_size:
temp[i] = len(mydictG[Newpermbatch[i]][1])
i = i + 1
self._max_seq = int(np.amax(temp))
self._batch_seq = temp
batch_conv = np.zeros([batch_size,self._max_seq,self.feature_size_conv])
batch_fc = np.zeros([batch_size,self._max_seq,self.feature_size_fc])
i = 0
while i < batch_size:
media_length = len(mydictG[Newpermbatch[i]][1])
j = 0
while j < media_length:
### for 256 image size for testing
# pkl_file1 = open(mydictG[Newpermbatch[i]][1][j][0], 'rb')
# output = pickle.load(pkl_file1)
# pkl_file1.close()
#
# pkl_file2 = open(mydictG[Newpermbatch[i]][1][j][1], 'rb')
# output2 = pickle.load(pkl_file2)
# pkl_file2.close()
#
# pkl_file3 = open(mydictG[Newpermbatch[i]][1][j][2], 'rb')
# output3 = pickle.load(pkl_file3)
# pkl_file3.close()
#
# output.update(output2)
# output.update(output3)
# mat_contents = output[self.layerextract[0]]
# batch_conv[i][j][:] = mat_contents.reshape(self.feature_size_conv) #'conv5_3'
#
# mat_contents = output[self.layerextract[1]]
## batch_fc[i][j][:] = mat_contents.reshape(self.feature_size_conv) #'conv5_3_O'
# batch_fc[i][j][:] = mat_contents #'convfc7'
#
# j = j + 1
######################################################################
## for 384,512 image size for testing
if indicator == 1: # training ###################
pkl_file1 = open(mydictG[Newpermbatch[i]][1][j][0], 'rb')
output = pickle.load(pkl_file1)
pkl_file1.close()
pkl_file2 = open(mydictG[Newpermbatch[i]][1][j][1], 'rb')
output2 = pickle.load(pkl_file2)
pkl_file2.close()
pkl_file3 = open(mydictG[Newpermbatch[i]][1][j][2], 'rb')
output3 = pickle.load(pkl_file3)
pkl_file3.close()
output.update(output2)
output.update(output3)
mat_contents = output[self.layerextract[0]]
batch_conv[i][j][:] = mat_contents.reshape(self.feature_size_conv) #'conv5_3'
mat_contents = output[self.layerextract[1]]
batch_fc[i][j][:] = mat_contents.reshape(self.feature_size_fc) #'conv5_3_O'
j = j + 1
else: # testing
pkl_file1 = open(mydictG[Newpermbatch[i]][1][j][0], 'rb')
output = pickle.load(pkl_file1)
pkl_file1.close()
pkl_file2 = open(mydictG[Newpermbatch[i]][1][j][1], 'rb')
output2 = pickle.load(pkl_file2)
pkl_file2.close()
output.update(output2)
mat_contents = output[self.layerextract[0]]
batch_conv[i][j][:] = mat_contents.reshape(self.feature_size_conv) #'conv5_3'
mat_contents = output[self.layerextract[1]]
batch_fc[i][j][:] = mat_contents.reshape(self.feature_size_fc) #'conv5_3_O'
j = j + 1
#########################################################
if indicator == 1:
J = np.arange(media_length)
np.random.shuffle(J)
temp_arr = batch_conv[i,:media_length,:]
temp_arr = temp_arr[J,:]
batch_conv[i,:media_length,:] = temp_arr
temp_arr = batch_fc[i,:media_length,:]
temp_arr = temp_arr[J,:]
batch_fc[i,:media_length,:] = temp_arr
i = i + 1
return batch_fc, batch_conv
def dense_to_one_hot(self,labels_dense, num_classes=1000):
labels_dense = labels_dense.astype(int)
num_labels = labels_dense.shape[0]
index_offset = np.arange(num_labels) * num_classes
labels_one_hot = np.zeros((num_labels, num_classes))
labels_one_hot.flat[index_offset + labels_dense.ravel()] = 1
labels_one_hot = labels_one_hot.astype(np.float32)
temp = zeros((labels_one_hot.shape[0],self._max_seq,num_classes))
i=0
while i < labels_one_hot.shape[0]:
temp[i][0:int(self._batch_seq[i])] = labels_one_hot[i]
i=i+1
return temp
def next_batch(self,batch_size):
start = self._index_in_epoch
self._index_in_epoch += batch_size
if self._index_in_epoch > self._num_examples:
# Finished epoch
self._epochs_completed += 1
# Shuffle the data
self._perm_list = np.arange(self._num_examples)
np.random.shuffle(self._perm_list)
self._trainLabelsPerm = self._trainLabels[self._perm_list]
# Start next epoch
start = 0
self._index_in_epoch = batch_size
assert batch_size <= self._num_examples
end = self._index_in_epoch
return self.PrepareTrainingBatch(self._perm_list[start:end], batch_size, 1), self.dense_to_one_hot(self._trainLabelsPerm[start:end])
def PrepareTestingBatch(self,test_total):
start = self._index_in_epoch_test
self._index_in_epoch_test += test_total
if self._index_in_epoch_test > self._num_testexamples:
start = 0
self._index_in_epoch_test = test_total
assert test_total <= self._num_testexamples
end = self._index_in_epoch_test
return self.PrepareTrainingBatch(self._perm_list_test[start:end], test_total, 0), self.dense_to_one_hot(self._testLabels[start:end])
## Testing
def Reset_index_in_epoch_test(self, init_v = 0):
self._index_in_epoch_test = init_v
def crop_image(self, x, target_height=224, target_width=224):
image = skimage.img_as_float(skimage.io.imread(x)).astype(np.float32)
if len(image.shape) == 2:
image = np.tile(image[:,:,None], 3)
elif len(image.shape) == 4:
image = image[:,:,:,0]
height, width, rgb = image.shape
if width == height:
resized_image = cv2.resize(image, (target_height,target_width))
elif height < width:
resized_image = cv2.resize(image, (int(width * float(target_height)/height), target_width))
cropping_length = int((resized_image.shape[1] - target_height) / 2)
resized_image = resized_image[:,cropping_length:resized_image.shape[1] - cropping_length]
else:
resized_image = cv2.resize(image, (target_height, int(height * float(target_width) / width)))
cropping_length = int((resized_image.shape[0] - target_width) / 2)
resized_image = resized_image[cropping_length:resized_image.shape[0] - cropping_length,:]
return cv2.resize(resized_image, (target_height, target_width))
####### Network Parameters ########
training_iters = 10000000 # run 10000 epoch
batch_size = 15
display_step = 280 #280
test_num_total = 15
layername_conv = 'conv5_3'
layername_fc = 'fc7_final'
layername = [layername_conv, layername_fc]
numMap = 512
featMap = 14*14
num_classes = 1000
dropTrain = 0.5
dropTest = 1
prob_path = '/media/titanz/Data3TB/tensorboard_log/model_20180211/prob_256/'
savefigfile ='/media/titanz/Data3TB/tensorboard_log/model_20180309/attn_visual_imsz384/'
#################################
plantclefdata = DataSet(layername,numMap)
# tf Graph input
x = tf.placeholder("float", [None, None, 4096])
data = tf.placeholder("float", [None, None, numMap*14*14])
target = tf.placeholder("float", [None, None, num_classes])
dropout = tf.placeholder(tf.float32)
batch_size2 = tf.placeholder(tf.int32)
model = VariableSequenceClassification(x, data, target, dropout, batch_size2)
sess = tf.InteractiveSession()
#sess = tf.Session()
#sess = tf_debug.LocalCLIDebugWrapperSession(sess)
saver = tf.train.Saver(max_to_keep = None)
saver.restore(sess, "/media/titanz/Data3TB/tensorboard_log/model_20180309/model_55160")
#################################################################################
mat_contents = sio.loadmat('/home/titanz/Documents/SueHan/matlab/PlantClefVGG_net/RNN_plantclef/test_obs_list.mat')
testList = mat_contents['test_obs_list']
mat_contents = sio.loadmat('/media/titanz/Data3TB/tensorboard_log/model_20180309/obs_tp_media_re.mat')
obs_tp_media_re = mat_contents['obs_tp_media_re']
imagesz = 1 # choose image size 1 = 256 , 2 = 384, 3 = 512
if imagesz == 1: #256
path_folder = '/media/titanz/Data3TB/PlantclefVGA2/PlantClefImageTest/resize_species/species/'
elif imagesz == 2: #384
path_folder = '/media/titanz/data3TB_02/PlantClefolddata_augmented/PlantClefImageTest_SR/resize_species_384/'
else: #512
path_folder = '/media/titanz/data3TB_02/PlantClefolddata_augmented/PlantClefImageTest_SR/resize_species_512/'
smooth = True
# read python dict back from the testing_256 file
pkl_file_test = open('/home/titanz/tensor_flow/tensorflow-master/tensorflow/examples/RNN/myfile_test_256.pkl', 'rb')
mydict2_test = pickle.load(pkl_file_test)
pkl_file_test.close()
mat_contents = sio.loadmat('/media/titanz/Data3TB/tensorflowlist/species/ClassID_CNNID.mat')
classIDList = mat_contents['ClassID_CNNID']
mediafolderTest_content = '/media/titanz/Data3TB/tensorflowlist/VGG_multipath_res_bn_lastconv/test_obs_media_content_256/'
mat_contents = sio.loadmat('/home/titanz/Documents/SueHan/matlab/PlantClefVGG_net/RNN_plantclef/test_obs_class.mat')
testLabels = mat_contents['test_obs_class']
################################################################################
for count in xrange(obs_tp_media_re.shape[0]):
print('Obs num {:7.0f}'.format(count))
ObsIDchosen = obs_tp_media_re[count][0].astype(int) - 1#8258#12# chose 0 <= x < 13887
Obs_name = testList[ObsIDchosen].astype(int)
Obs_name = str(Obs_name).split('[')
Obs_name = Obs_name[1].split(']')
directory = savefigfile + str(Obs_name[0])
if not os.path.exists(directory):
os.makedirs(directory)
plantclefdata.Reset_index_in_epoch_test(init_v = ObsIDchosen)
(test_data_x, test_data_conv), test_label = plantclefdata.PrepareTestingBatch(test_num_total)
pred, alpha_forward1, alpha_forward2, alpha_forward3, alpha_backward1, alpha_backward2, alpha_backward3 = sess.run(model.alpha_list_com, feed_dict={x: test_data_x, data: test_data_conv, batch_size2: test_num_total, target: test_label, dropout: dropTest})#, batch_size: batch_size})
pred_re = pred[0,:,:]
B = np.argmax(pred_re,axis=1)
alpha_forward1 = np.array(alpha_forward1).swapaxes(1,0) # alpha(max_seq, batch, 196)
alpha_forward2 = np.array(alpha_forward2).swapaxes(1,0) # alpha(max_seq, batch, 196)
alpha_forward3 = np.array(alpha_forward3).swapaxes(1,0) # alpha(max_seq, batch, 196)
mat_contents2 = sio.loadmat(mediafolderTest_content + str(mydict2_test[ObsIDchosen][0]) + '.mat',mat_dtype=True)
used = mat_contents2['v']
alphas1 = np.array(alpha_forward1).swapaxes(1,0) # alpha(max_seq, batch, 196)
alphas1 = alphas1[0:used.shape[1],:,:]
alphas2 = np.array(alpha_forward2).swapaxes(1,0) # alpha(max_seq, batch, 196)
alphas2 = alphas2[0:used.shape[1],:,:]
alphas3 = np.array(alpha_forward3).swapaxes(1,0) # alpha(max_seq, batch, 196)
alphas3 = alphas3[0:used.shape[1],:,:]
pred_re = pred[0,:,:]
pred_re = pred_re[0:used.shape[1],:]
B = np.argmax(pred_re,axis=1)
B = B[0:used.shape[1]]
class_picken = testLabels[ObsIDchosen]
class_picken = class_picken.astype(int)
index_plot = 1;
index_plotnc = 1;
for ii in xrange(alphas1.shape[0]): # eg: 0,1,2 #list(range(0,alphas.shape[0]*2,2)):
organlabel = int(used[0,ii])
if organlabel == 0:
organlabelD = 'Branch'
elif organlabel == 1:
organlabelD = 'Entire'
elif organlabel == 2:
organlabelD = 'Flower'
elif organlabel == 3:
organlabelD = 'Fruit'
elif organlabel == 4:
organlabelD = 'Leaf'
elif organlabel == 5:
organlabelD = 'LeafScan'
else:
organlabelD = 'Stem'
plt.figure(1)
L = mydict2_test[ObsIDchosen][1][ii].split('/')
name_str = L[len(L)-1]
name_str2 = name_str.split('.mat')
name_str3 = name_str2[0]
path = path_folder + '{:04d}'.format(class_picken[0]) + '-' + str(classIDList[class_picken[0]][0]) +'/' + name_str3
img = plantclefdata.crop_image(path)
plt.imshow(img)
if smooth:
alpha_img = skimage.transform.pyramid_expand(alphas1[ii,0,:].reshape(14,14), upscale=16, sigma=20)
else:
alpha_img = skimage.transform.resize(alphas1[ii,0,:].reshape(14,14), [img.shape[0], img.shape[1]])
plt.imshow(alpha_img, alpha=0.7)
plt.set_cmap(cm.Greys_r)
plt.axis('off')
plt.savefig(directory + '/' + "course" + str(ii) + ".png")
plt.imshow(img)
plt.axis('off')
lab2 = organlabelD + '_'+ str(class_picken[0]) + '-' + str(B[ii])
plt.savefig(directory + '/' + lab2 + '_' + str(ii) + ".png")
plt.figure(2)
plt.imshow(img)
if smooth:
alpha_img = skimage.transform.pyramid_expand(alphas2[ii,0,:].reshape(14,14), upscale=16, sigma=20)
else:
alpha_img = skimage.transform.resize(alphas2[ii,0,:].reshape(14,14), [img.shape[0], img.shape[1]])
plt.imshow(alpha_img, alpha=0.7) # show attn
plt.set_cmap(cm.Greys_r)
plt.axis('off')
plt.savefig(directory + '/' + "fine" + str(ii) + ".png")
| bsd-3-clause | 4,293,111,319,491,686,000 | 36.609756 | 285 | 0.565013 | false |
molden/hanythingondemand | test/unit/subcommands/test_subcommands_listcmd.py | 1 | 2890 | #!/usr/bin/env python
# #
# Copyright 2009-2015 Ghent University
#
# This file is part of hanythingondemand
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/hanythingondemand
#
# hanythingondemand is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# hanythingondemand is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with hanythingondemand. If not, see <http://www.gnu.org/licenses/>.
# #
"""
@author: Ewan Higgs (Universiteit Gent)
"""
from mock import patch, Mock
from vsc.utils.testing import EnhancedTestCase
from ..util import capture
import hod.subcommands.listcmd as hsl
class TestListSubCommand(EnhancedTestCase):
def test_run_no_jobs(self):
with patch('hod.rmscheduler.rm_pbs.Pbs', return_value=Mock(state=lambda: [])):
with patch('hod.rmscheduler.rm_pbs.master_hostname', return_value='good-host'):
with patch('hod.cluster.cluster_jobid', return_value='good-jobid'):
with patch('hod.cluster.known_cluster_labels', return_value=['mylabel']):
app = hsl.ListSubCommand()
self.assertErrorRegex(SystemExit, '0', app.run, [])
def test_run_one_job(self):
expected = "Cluster label\tjob ID\nmylabel \tJobid good-jobid.good-master state good-state ehosts good-host\n"
import hod.rmscheduler.rm_pbs as rm_pbs
job = rm_pbs.PbsJob('good-jobid.good-master', 'good-state', 'good-host')
with patch('hod.rmscheduler.rm_pbs.Pbs', return_value=Mock(state=lambda: [job])):
with patch('hod.rmscheduler.rm_pbs.master_hostname', return_value='good-master'):
with patch('hod.cluster.cluster_jobid', return_value='good-jobid.good-master'):
with patch('hod.cluster.known_cluster_labels', return_value=['mylabel']):
app = hsl.ListSubCommand()
app.run([])
with capture(app.run, []) as (out, err):
self.assertEqual(out, expected)
def test_usage(self):
app = hsl.ListSubCommand()
usage = app.usage()
self.assertTrue(isinstance(usage, basestring))
| gpl-2.0 | -6,087,086,475,910,849,000 | 44.873016 | 123 | 0.66955 | false |
martinkirch/tofbot | bot.py | 1 | 14539 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is part of tofbot, a friendly IRC bot.
# You may redistribute it under the Simplified BSD License.
# If we meet some day, and you think this stuff is worth it,
# you can buy us a beer in return.
#
# Copyright (c) 2011,2015 Etienne Millon <[email protected]>
# Martin Kirchgessner <[email protected]>
# Nicolas Dumazet <[email protected]>
# Quentin Sabah <[email protected]>
# Christophe-Marie Duquesne <[email protected]>
"""
./bot.py [options] [legacy-arguments]
Legacy-arguments:
NICK CHANNEL [CHANNEL...]
Don't prepend a # to chan names
Tofbot will connect to freenode.net
"""
from datetime import datetime
from irc import Bot
import time
import random
import sys
import os
import plugins
import types
from toflib import *
from toflib import _simple_dispatch, _simple_conf_dispatch, urls_in
import re
from optparse import OptionParser
import json
import atexit
import socket
import traceback
import plugins.euler
import plugins.lolrate
import plugins.donnezmoi
import plugins.jokes
import plugins.twitter
import plugins.dassin
import plugins.eightball
import plugins.sed
import plugins.rick
import plugins.expand
import plugins.like
import plugins.ponce
import plugins.lag
random.seed()
class AutosaveEvent(CronEvent):
def __init__(self, bot, filename):
CronEvent.__init__(self, None)
self.filename = filename
self.bot = bot
def fire(self):
self.bot.save(self.filename)
class Tofbot(Bot):
# Those attributes are published and can be changed by irc users
# value is a str to object converter. It could do sanitization:
# if value is incorrect, raise ValueError
_mutable_attributes = {
"TGtime":int,
"memoryDepth":int
}
def __init__(self, nick=None, name=None, channels=None, password=None, debug=True):
Bot.__init__(self, nick, name, channels, password)
self.joined = False
self.autoTofadeThreshold = 98
self.riddleMaxDist = 2
self.debug = debug
self.TGtime = 5
self.pings = {}
self.memoryDepth = 20
self.lolRateDepth = 8
self.msgMemory = []
self.cron = Cron()
self.plugins = self.load_plugins()
self.startMsgs = []
self.msgHandled = False
def run(self, host=None):
if host == None and not hasattr(self,'host'):
raise Exception("run: no host set or given")
if self.nick == None:
raise Exception("run: no nick set")
if self.name == None:
raise Exception("run: no name set")
self.host = host or self.host
Bot.run(self, self.host)
def load_plugins(self):
d = os.path.dirname(__file__)
plugindir = os.path.join(d, 'plugins')
plugin_instances = {}
for m in dir(plugins):
if type(getattr(plugins,m)) != types.ModuleType:
continue
plugin = getattr(plugins, m)
for n in dir(plugin):
c = getattr(plugin, n)
if type(c) not in [types.ClassType, types.TypeType]:
continue
name = c.__name__
if name.startswith('Plugin'):
instance = c(self)
plugin_name = name[6:].lower()
plugin_instances[plugin_name] = instance
return plugin_instances
# line-feed-safe
def msg(self, chan, msg):
self.msgHandled = True
for m in msg.split("\n"):
Bot.msg(self, chan, m)
def log(self, msg):
if self.debug:
print(msg)
def try_join(self, args):
if (args[0] in ['End of /MOTD command.',
"This server was created ... I don't know"]
):
for chan in self.channels:
self.write(('JOIN', chan))
self.joined = True
def dispatch(self, origin, args):
self.log("o=%s n=%s a=%s" % (origin.sender, origin.nick, args))
is_config = False
senderNick = origin.nick
commandType = args[1]
# if command type is 'BOTCONFIG', bypass the try_join
# because we are configuring the bot before any
# connection.
if commandType != 'BOTCONFIG':
if not self.joined:
self.try_join(args)
return
else:
is_config = 1
args.remove('BOTCONFIG')
commandType = args[1]
if commandType == 'JOIN':
for m in self.startMsgs:
self.msg(self.channels[0], m)
self.startMsgs = []
for p in self.plugins.values():
p.on_join(args[0], senderNick)
elif commandType == 'KICK' and args[3] == self.nick:
reason = args[0]
chan = args[2]
self.write(('JOIN', chan))
for p in self.plugins.values():
p.on_kick(chan, reason)
elif commandType == 'PRIVMSG':
msg_text = args[0]
msg = msg_text.split(" ")
cmd = msg[0]
chan = args[2]
self.pings[senderNick] = datetime.now()
if is_config == False:
self.cron.tick()
if len(cmd) == 0:
return
urls = urls_in(msg_text)
self.msgHandled = False
# We only allow one plugin to answer, so we trigger them
# in random order
for p in self.plugins.values():
if not self.msgHandled:
p.handle_msg(msg_text, chan, senderNick)
for url in urls:
p.on_url(url)
if chan == self.channels[0] and cmd[0] != '!':
self.msgMemory.append("<" + senderNick + "> " + msg_text)
if len(self.msgMemory) > self.memoryDepth:
del self.msgMemory[0]
if len(cmd) == 0 or cmd[0] != '!':
return
cmd = cmd[1:]
chan = None
if len(self.channels) == 0:
chan = 'config'
else:
chan = self.channels[0]
if cmd in _simple_dispatch:
act = self.find_cmd_action("cmd_" + cmd)
act(chan, msg[1:], senderNick)
elif is_config and (cmd in _simple_conf_dispatch):
act = self.find_cmd_action("confcmd_" + cmd)
act(chan, msg[1:], senderNick)
elif cmd == 'context':
self.send_context(senderNick)
elif cmd == 'help':
self.send_help(senderNick)
elif commandType == 'PING':
self.log('PING received in bot.py')
elif commandType == 'ERROR':
traceback.print_exc(file=sys.stdout)
else: # Unknown command type
self.log('Unknown command type : %s' % commandType)
def find_cmd_action(self, cmd_name):
targets = self.plugins.values()
targets.insert(0, self)
for t in targets:
if (hasattr(t, cmd_name)):
action = getattr(t, cmd_name)
return action
def nop(self, chan, args):
pass
return nop
def safe_getattr(self, key):
if key not in self._mutable_attributes:
return None
if not hasattr(self, key):
return "(None)"
else:
return str(getattr(self, key))
def safe_setattr(self, key, value):
try:
converter = self._mutable_attributes.get(key)
if converter is None:
return False
value = converter(value)
setattr(self, key, value)
return True
except ValueError:
pass
@confcmd(1)
def confcmd_chan(self, chan, args):
new_chan = args[0]
if self.channels.count(new_chan) == 0:
self.channels.append(new_chan)
@confcmd(1)
def confcmd_server(self, chan, args):
host = args[0].strip()
self.host = host
@confcmd(1)
def confcmd_port(self, chan, args):
port = int(args[0].strip())
self.port = port
@confcmd(1)
def confcmd_nick(self, chan, args):
nick = args[0].strip()
self.nick = nick
self.user = nick
@confcmd(1)
def confcmd_name(self, chan, args):
name = args[0].strip()
self.name = name
@confcmd(1)
def confcmd_loadchanges(self, chan, args):
filename = args[0].strip()
if not os.path.exists(filename):
return
with open(filename) as f:
changes = f.readlines()
self.startMsgs += changes
@cmd(1)
def cmd_ping(self, chan, args):
"Find when X was last online"
who = args[0]
if who in self.pings:
self.msg(chan,
"Last message from %s was on %s (btw my local time is %s)" %
(who, self.pings[who].__str__(), datetime.now().__str__() ))
else:
self.msg(chan, "I havn't seen any message from " + who)
@cmd(1)
def cmd_get(self, chan, args):
"Retrieve a configuration variable's value"
key = args[0]
value = self.safe_getattr(key)
if value is None:
self.msg(chan, "Ne touche pas à mes parties privées !")
else:
self.msg(chan, "%s = %s" % (key, value))
@cmd(2)
def cmd_set(self, chan, args):
"Set a configuration variable's value"
key = args[0]
value = args[1]
ok = self.safe_setattr(key, value)
if not ok:
self.msg(chan, "N'écris pas sur mes parties privées !")
def send_context(self, to):
"Gives you last messages from the channel"
intro = "Last " + str(len(self.msgMemory)) + " messages sent on " + self.channels[0] + " :"
self.msg(to, intro)
for msg in self.msgMemory:
self.msg(to, msg)
def send_help(self, to):
"Show this help message"
maxlen = 1 + max(map(len, _simple_dispatch))
self.msg(to, "Commands should be entered in the channel or by private message")
self.msg(to, '%*s - %s' % (maxlen, "!help", self.send_help.__doc__))
self.msg(to, '%*s - %s' % (maxlen, "!context", self.send_context.__doc__))
for cmd in _simple_dispatch:
f = self.find_cmd_action("cmd_" + cmd)
self.msg(to, '%*s - %s' % (maxlen, "!"+cmd, f.__doc__))
self.msg(to, "you can also !get or !set " + ", ".join(self._mutable_attributes.keys()))
self.msg(to, "If random-tofades are boring you, enter 'TG " + self.nick + "' (but can be cancelled by GG " + self.nick + ")")
def load(self, filename):
try:
with open(filename) as f:
state = json.load(f)
if state['version'] != 1:
return False
for name, plugin_state in state['plugins'].items():
try:
plugin = self.plugins[name]
plugin.load(plugin_state)
except KeyError:
pass
except IOError as e:
print "Can't load state. Error: ", e
def save(self, filename):
try:
with open(filename, 'w') as f:
state = { 'version': 1
, 'plugins': {}
}
for name, plugin in self.plugins.items():
plugin_state = plugin.save()
state['plugins'][name] = plugin_state
json.dump(state, indent=4, fp=f)
except IOError as e:
print "Can't save state. Error: ", e
def __main():
class FakeOrigin:
pass
def bot_config(b, cmd):
o = FakeOrigin
o.sender = 'bot_config'
o.nick = 'bot_config'
b.dispatch(o, [cmd.strip(), 'BOTCONFIG','PRIVMSG','#bot_config'])
# default timeout for urllib2, in seconds
socket.setdefaulttimeout(15)
# option parser
parser = OptionParser(__doc__)
parser.add_option("-x","--execute", dest="cmds",action="append",help="File to execute prior connection. Can be used several times.")
parser.add_option("-s","--host", dest="host",help="IRC server hostname")
parser.add_option("-p","--port", dest="port",help="IRC server port")
parser.add_option("-k","--nick", dest="nick",help="Bot nickname",default='Tofbot')
parser.add_option("-n","--name", dest="name",help="Bot name",default='Tofbot')
parser.add_option("-c","--channel",dest="channel",action="append",help="Channel to join (without # prefix). Can be used several times.")
parser.add_option("--password", dest="password")
parser.add_option("-d","--debug", action="store_true", dest="debug", default=False)
(options,args) = parser.parse_args();
# legacy arguments handled first
# (new-style arguments prevail)
if len(args) > 0:
options.nick = options.nick or args[0]
options.channel = options.channel or []
for chan in args[1:]:
if options.channel.count(chan) == 0:
options.channel.append(chan)
# initialize Tofbot
# using command-line arguments
b = Tofbot(options.nick, options.name, options.channel, options.password, options.debug)
# execute command files
# these commands may override command-line arguments
options.cmds = options.cmds or []
for filename in options.cmds:
cmdsfile = open(filename,'r')
for line in cmdsfile:
bot_config(b, line)
# Restore serialized data
state_file = "state.json"
b.load(state_file)
# Perform auto-save periodically
autosaveEvent = AutosaveEvent(b, state_file)
b.cron.schedule(autosaveEvent)
# ... and save at exit
@atexit.register
def save_atexit():
print("Exiting, saving state...")
b.save(state_file)
print("Done !")
# default host when legacy-mode
if options.host == None and len(options.cmds) == 0 and len(args) > 0:
options.host = 'irc.freenode.net'
b.run(options.host)
if __name__ == "__main__":
try:
__main()
except Exception, ex:
import traceback
dumpFile = open("_TOFDUMP.txt","w")
traceback.print_exc(None, dumpFile)
dumpFile.close()
raise ex
| bsd-2-clause | -2,886,930,569,378,294,000 | 30.325431 | 140 | 0.547093 | false |
wmfs/chimp | src/calc/TimestampColumn.py | 1 | 3205 | '''
Created on 4 Mar 2012
@author: Tim.Needham
'''
import cs
import chimpsql
import chimpspec
class TimestampColumn:
'''
classdocs
'''
def __init__(self, timestampColumnTag):
self.type = "timestampColumn"
self.taskOrder = 2
self.outputColumn = cs.grabAttribute(timestampColumnTag,"outputColumn")
self.triggeringColumns=[]
triggeringColumnsTag = timestampColumnTag.getElementsByTagName("triggeringColumns")
if len(triggeringColumnsTag)>0:
for column in triggeringColumnsTag[0].getElementsByTagName("column"):
columnName = cs.grabAttribute(column, "name")
self.triggeringColumns.append(columnName)
def debug(self, appLogger):
appLogger.debug(" timestampColumn")
appLogger.debug(" outputColumn : {0}".format(self.outputColumn))
def getExtraSystemFields(self):
extraSystemFields = []
field = chimpspec.SpecificationRecordField(None, None, column=self.outputColumn, type="datetime", mandatory=True, default="now()")
extraSystemFields.append(field)
return(extraSystemFields)
def requiresFile(self):
return(False)
def getTriggeringColumns(self):
return(self.triggeringColumns)
def getComputedTimestampFunction(self, sourceName, schemaName):
self.name = "computed_{0}_{1}_timestamp_update".format(sourceName, self.outputColumn)
dml = ("CREATE OR REPLACE FUNCTION {0}.{1}()\n"
" RETURNS trigger AS\n"
"$BODY$\n"
" BEGIN\n"
" new.{2} = now();\n"
" RETURN new;\n"
" END;\n"
"$BODY$\n"
"LANGUAGE plpgsql;\n\n".format(schemaName, self.name, self.outputColumn))
return chimpsql.Function(self.name, schemaName, [], dml)
def getComputedTimestampTrigger(self, sourceName, schemaName, tableName, triggerFunction):
triggerName = "h_computed_{0}_{1}_timestamp_update".format(sourceName, self.outputColumn)
when = " OR ".join(map(lambda column: "old.{0} IS DISTINCT FROM new.{0}".format(column), self.triggeringColumns))
return chimpsql.Trigger(triggerName, tableName, triggerFunction.name, triggerFunction.schema,
("CREATE TRIGGER {0}\n"
"BEFORE UPDATE OF {1}\n"
"ON {2}.{3}\n"
"FOR EACH ROW\n"
"WHEN ({4})\n"
"EXECUTE PROCEDURE {5}.{6}();\n\n").format(triggerName, ", ".join(self.triggeringColumns), schemaName, tableName, when, schemaName, triggerFunction.name))
def getComputedTimestampIndex(self, sourceName, schemaName, storageTableName):
indexName = "{0}_{1}_{2}_timestamp".format(schemaName, sourceName, self.outputColumn)
return chimpsql.Index(indexName, storageTableName, schemaName,
"CREATE INDEX {0} ON {1}.{2} ({3});\n".format(indexName, schemaName, storageTableName, self.outputColumn))
| gpl-3.0 | 434,596,775,635,696,060 | 41.746667 | 178 | 0.5922 | false |
lmazuel/azure-sdk-for-python | azure-mgmt-network/azure/mgmt/network/v2017_08_01/models/route_filter.py | 1 | 2621 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource import Resource
class RouteFilter(Resource):
"""Route Filter Resource.
Variables are only populated by the server, and will be ignored when
sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: Resource tags.
:type tags: dict[str, str]
:param rules: Collection of RouteFilterRules contained within a route
filter.
:type rules: list[~azure.mgmt.network.v2017_08_01.models.RouteFilterRule]
:param peerings: A collection of references to express route circuit
peerings.
:type peerings:
list[~azure.mgmt.network.v2017_08_01.models.ExpressRouteCircuitPeering]
:ivar provisioning_state: The provisioning state of the resource. Possible
values are: 'Updating', 'Deleting', 'Succeeded' and 'Failed'.
:vartype provisioning_state: str
:ivar etag: Gets a unique read-only string that changes whenever the
resource is updated.
:vartype etag: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
'provisioning_state': {'readonly': True},
'etag': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'rules': {'key': 'properties.rules', 'type': '[RouteFilterRule]'},
'peerings': {'key': 'properties.peerings', 'type': '[ExpressRouteCircuitPeering]'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(self, **kwargs):
super(RouteFilter, self).__init__(**kwargs)
self.rules = kwargs.get('rules', None)
self.peerings = kwargs.get('peerings', None)
self.provisioning_state = None
self.etag = None
| mit | -4,254,058,854,959,345,700 | 36.442857 | 91 | 0.592522 | false |
lukechurch/coda | csv_conversion/internal2csv.py | 1 | 5252 | '''
Copyright (c) 2017 Coda authors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
import argparse
import unicodecsv
import os
nonDecoOutput = ["id", "owner", "data", "timestamp"]
parser = argparse.ArgumentParser()
parser.add_argument("file", help="filepath of the dataset file to convert")
parser.add_argument("--senderIdCol", help="name of column header containing sender ID")
parser.add_argument("--dataCol", help="name of column header containing message text")
parser.add_argument("--messageIdCol", help="name of column header containing unique message ID")
parser.add_argument("--timestamp", help="name of column header containing message timestamps")
args = parser.parse_args()
with open(args.file, "rb") as raw_file:
hs = [h.strip() for h in raw_file.next().split(';')]
header = dict([(h.strip(), True) for h in hs])
missingHeaders = []
for h in nonDecoOutput:
if h not in header:
missingHeaders.append(h)
if len(missingHeaders) > 0:
print "ERROR: Wrong format, missing columns: " + ", ".join(missingHeaders)
else:
reader = unicodecsv.DictReader(raw_file, delimiter=";", fieldnames=hs)
headerStringsForNewFile = {}
schemeIds = {}
schemes = []
dir_path = os.path.dirname(os.path.realpath(args.file))
if args.senderIdCol:
headerStringsForNewFile["owner"] = args.senderIdCol
else:
headerStringsForNewFile["owner"] = "sender"
if args.dataCol:
headerStringsForNewFile["data"] = args.dataCol
else:
headerStringsForNewFile["data"] = "message"
if args.messageIdCol:
headerStringsForNewFile["id"] = args.messageIdCol
else:
headerStringsForNewFile["id"] = "msgId"
if args.timestamp:
headerStringsForNewFile["timestamp"] = args.timestamp
else:
headerStringsForNewFile["timestamp"] = "timestamp"
rowCount = 0
events = {}
eventOrder = []
try:
for row in reader:
if len(row["data"]) == 0 or len(row["id"]) == 0 or len(row["owner"]) == 0:
continue
if row["schemeId"] not in schemeIds:
schemes.append(row["schemeName"])
schemeIds[row["schemeId"]] = 1
if row["id"] not in events:
eventObj = {headerStringsForNewFile["id"]: row["id"],
headerStringsForNewFile["owner"]: row["owner"],
headerStringsForNewFile["timestamp"]: row["timestamp"],
headerStringsForNewFile["data"]: row["data"],
row["schemeName"]: row["deco_codeValue"]}
eventOrder.append(row["id"])
events[row["id"]] = eventObj
else:
events[row["id"]][row["schemeName"]] = row["deco_codeValue"]
rowCount += 1
except UnicodeDecodeError as dec:
print "Can't decode line #%d as unicode!" % rowCount
if len(events) == 0:
print "ERROR: No line read from file has been correctly filled in."
else:
fileName = os.path.splitext(args.file)[0]
with open(os.path.join(dir_path, fileName + "-converted.csv"), "wb") as out:
header = nonDecoOutput + schemes
dialect = unicodecsv.excel
dialect.delimiter = ";"
writer = unicodecsv.DictWriter(out, fieldnames=[headerStringsForNewFile[h] for h in nonDecoOutput] + schemes, dialect=dialect)
writer.writeheader()
for eventId in eventOrder:
writer.writerow(events[eventId])
with open(os.path.join(dir_path, fileName + "-converted.csv"), "r") as myFile:
lines = myFile.readlines()
with open(os.path.join(dir_path, fileName + "-converted.csv"), "w") as myFile:
lines[-1] = lines[-1].strip()
myFile.writelines([item for item in lines if len(item) > 0])
print "SUCCESS: Converted the CSV, stored at \"%s\"" % os.path.join(dir_path, fileName + "-converted.csv") | mit | 1,804,412,583,267,640,300 | 40.039063 | 142 | 0.614813 | false |
hammerlab/isovar | isovar/variant_orf_helpers.py | 1 | 5017 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function, division, absolute_import
from .logging import get_logger
from .variant_orf import VariantORF
logger = get_logger(__name__)
def match_variant_sequence_to_reference_context(
variant_sequence,
reference_context,
min_transcript_prefix_length,
max_transcript_mismatches,
count_mismatches_after_variant=False,
max_trimming_attempts=2):
"""
Iteratively trim low-coverage subsequences of a variant sequence
until it either matches the given reference context or there
are too few nucleotides left in the variant sequence.
Parameters
----------
variant_sequence : VariantSequence
Assembled sequence from RNA reads, will need to be to be reverse
complemented if matching against a reference transcript on the
negative strand.
reference_context : ReferenceContext
Sequence of reference transcript before the variant and associated
metadata.
min_transcript_prefix_length : int
Minimum number of nucleotides we try to match against a reference
transcript.
max_transcript_mismatches : int
Maximum number of nucleotide differences between reference transcript
sequence and the variant sequence.
count_mismatches_after_variant : bool
Set to true if the number of mismatches after the variant locus should
count toward the total max_transcript_mismatches, which by default
only counts mismatches before the variant locus.
max_trimming_attempts : int
How many times do we try trimming the VariantSequence to higher
levels of coverage before giving up?
Returns VariantORF or None
"""
# if we can't get the variant sequence to match this reference
# context then keep trimming it by coverage until either
for i in range(max_trimming_attempts + 1):
# check the reverse-complemented prefix if the reference context is
# on the negative strand since variant sequence is aligned to
# genomic DNA (positive strand)
variant_sequence_too_short = (
(reference_context.strand == "+" and
len(variant_sequence.prefix) < min_transcript_prefix_length) or
(reference_context.strand == "-" and
len(variant_sequence.suffix) < min_transcript_prefix_length)
)
if variant_sequence_too_short:
logger.info(
"Prefix of variant sequence %s shorter than min allowed %d (iter=%d)",
variant_sequence,
min_transcript_prefix_length,
i + 1)
return None
variant_orf = \
VariantORF.from_variant_sequence_and_reference_context(
variant_sequence=variant_sequence,
reference_context=reference_context)
if variant_orf is None:
return None
n_mismatch_before_variant = (
variant_orf.num_mismatches_before_variant)
n_mismatch_after_variant = (
variant_orf.num_mismatches_after_variant)
logger.info("Iter #%d/%d: %s (len=%d)" % (
i + 1,
max_trimming_attempts + 1,
variant_orf,
len(variant_orf.cdna_sequence)))
total_mismatches = n_mismatch_before_variant
if count_mismatches_after_variant:
total_mismatches += n_mismatch_after_variant
if total_mismatches <= max_transcript_mismatches:
# if we got a variant sequence + reading frame with sufficiently
# few mismatches then call it a day
return variant_orf
logger.info(
("Too many mismatches (%d) between variant sequence %s and "
"reference context %s (attempt=%d/%d)"),
n_mismatch_before_variant,
variant_sequence,
reference_context,
i + 1,
max_trimming_attempts + 1)
# if portions of the sequence are supported by only 1 read
# then try trimming to 2 to see if the better supported
# subsequence can be better matched against the reference
current_min_coverage = variant_sequence.min_coverage()
logger.info(
"Trimming to subsequence covered by at least %d reads",
current_min_coverage + 1)
variant_sequence = variant_sequence.trim_by_coverage(
current_min_coverage + 1)
return None
| apache-2.0 | -7,773,328,974,950,547,000 | 38.503937 | 86 | 0.651983 | false |
vfine/webplatform | pmModules/users.py | 1 | 4422 | # users.py
# Display user info
# $Id: users.py 13454 2012-11-08 17:54:19Z fine $
#
import re, os
from datetime import datetime, timedelta
import pmConfig.pmConfig as config
import pmUtils.pmUtils as utils
from pmCore.pmModule import pmModule
from pmTaskBuffer.pmTaskBuffer import pmtaskbuffer as pmt
from operator import itemgetter, attrgetter
class users(pmModule):
#______________________________________________________________________________________
def __init__(self,name=None,parent=None,obj=None):
pmModule.__init__(self,name,parent,obj)
self.publishUI(self.doJson)
#______________________________________________________________________________________
def makeTop(self,users,top):
return sorted(users, key=itemgetter(top), reverse=True)
#______________________________________________________________________________________
def doJson(self,hours=None,days=180,tstart=None,tend=None,PRODUSERNAME=None,top='nJobsA', topsize=0):
""" Get the list of the users
<ul>
<li> hours = use the last hours
<li> days = use the last days
<li> topsize - the size of the top list
<br> = 0 - all users are shown
<li> top - select top list using 'top' column
<ul>
<li> 'nJobsA' - the number of the jobs
<li> 'CPUA1' - Personal the CPU used for the last 24 hours
<li> 'CPUA7' - Personal Cpu used for the last 7 days
<li> 'CPUP1' - Group Cpu for the last 24 hours
<li> 'CPUP7' - Group Cpu for the last 7 days
</ul>
</ul>
"""
if days == None: days = 0
if hours == None: hours = 0
main = {"buffer":{ "params" : {'hours' : days*24+hours }
, "method" : 'getUsers'
, "type" : False
}
}
columns="name,njobsa,latestjob,cpua1,cpua7,cpup1,cpup7"
if topsize==0 or topsize==None: columns+=",scriptcache"
q = pmt.getUsers(PRODUSERNAME,days*24+hours,columns=columns)
header = q['header']
users = q['rows']
if PRODUSERNAME == None:
if topsize > 0:
title = "Recent %(topsize)d Top Panda Analysis Users" % { 'topsize' : topsize }
else:
title = "Recent Panda Analysis Users"
else:
title = "PanDA jobs for %s" % PRODUSERNAME
main["buffer"]["params"]['user'] = PRODUSERNAME,
iNJobs = utils.name2Index(header,"njobsa")
iLatest = utils.name2Index(header,"latestjob")
jobpertime = {"anajobs" : 0, "n1000" : 0, "n10k" : 0 }
recent = { "d3" :0, "d7" :0 , "d30" : 0, "d90" : 0, "d180" :0 }
for u in users:
nxtp = u[iNJobs]
if nxtp == None: continue
nxtp = int(nxtp)
if nxtp > 0 : jobpertime["anajobs"] += nxtp
if nxtp > 1000:
jobpertime["n1000"] += 1;
if nxtp > 10000: jobpertime["n10k"] += 1
nxtp = u[iLatest]
if nxtp != None:
diffdays = (datetime.utcnow() - nxtp).days;
if diffdays < 4: recent["d3"] += 1
if diffdays < 8: recent["d7"] += 1
if diffdays < 31: recent["d30"] += 1
if diffdays < 91: recent["d90"] += 1
if diffdays < 181: recent["d180"] += 1
if topsize > 0 and top != None:
iTop = utils.name2Index(header,top)
users = self.makeTop(users,iTop)[:topsize]
main["buffer"]["top"] = { 'top' : top, 'size' : topsize }
# remove thr group
main["buffer"]["data"] = {'header' : header,'rows' : users }
main["buffer"]["totaljobs"] = jobpertime
main["buffer"]["recent"] = recent
self.publishTitle(title)
self.publish(main)
self.publish( "%s/%s" % (utils.fileScriptURL(),"taskBuffer/%s.js" % "getUsers"),role="script")
return
def leftMenu(self):
""" Return html for inclusion in left menu """
txt = "<a href='%s/users'>Users</a>" % this.server().script()
return txt
def topMenu(self):
""" Return html for inclusion in top menu """
def leftMenu():
""" Return html for inclusion in left menu """
txt = "<a href='%s/users'>Users</a>" % self.config().pandamon['url']
return txt
| lgpl-3.0 | 3,956,176,911,021,721,000 | 39.2 | 104 | 0.504749 | false |
vulogov/zap_proxy | etc/python/CacheDriver.py | 1 | 1608 | __version__ = 'v0.1.0'
import time
class CacheDriver:
def __init__(self, creator):
self.creator = creator
self.ready = False
self.name = ""
def set_cache_args(self, args):
self.args = args
def set(self, name, key, value):
if not self.ready or self.name != name:
self._open(name)
if not self.ready:
return False
self._set(key, time.time(), value)
def get(self, name, key):
if not self.ready or self.name != name:
self._open(name)
if not self.ready:
raise KeyError, key
return self._get(key)
def age(self, name, key):
if not self.ready or self.name != name:
self._open(name)
if not self.ready:
raise KeyError, key
return self._age(key)
def acquire(self, name, key):
self.set(name, "lock:%s"%key, 1)
def release(self, name, key):
self.set(name, "lock:%s"%key, 0)
def lock(self, name, key):
res = self.get("lock:%s"%name, key)
if res == None or res == 0:
return False
return True
def close(self):
if not self.ready:
return
self._close()
class CacheDriverCreator:
def __init__(self, name, env, logger, cls, args, argv):
self.cls = cls
self.name = name
self.env = env
self.logger = logger
self.args = args
self.argv = argv
self.init_cache()
def init_cache(self):
pass
def driver(self):
return self.cls(self) | gpl-3.0 | -9,193,560,970,287,625,000 | 26.271186 | 59 | 0.521144 | false |
blueshed/blueshed-micro | blueshed/micro/web/rpc_handler.py | 1 | 4761 | from pkg_resources import resource_filename # @UnresolvedImport
from tornado import web
from tornado.escape import json_decode
from tornado.web import asynchronous, RequestHandler
import tornado.concurrent
from blueshed.micro.utils.json_utils import dumps
from blueshed.micro.web.context_mixin import ContextMixin
from blueshed.micro.web.cors_mixin import CorsMixin, cors
import functools
import logging
acceptable_form_mime_types = [
"application/x-www-form-urlencoded; charset=UTF-8",
"application/x-www-form-urlencoded"
]
acceptable_json_mime_types = [
"application/json; charset=UTF-8",
"application/json;"
]
class RpcHandler(ContextMixin, CorsMixin, RequestHandler):
'''
Calls services in application.settings['services']
get:
returns the meta data about a service
or all services
suffix .js returns a client control
javascript object for websocket support
suffix <service name>.html returns
an html form to run the service
post:
form-encoded or json-encoded input
result is always json
'''
def initialize(self,
html_template=None,
js_template=None,
http_origins=None,
ws_url=None):
RequestHandler.initialize(self)
self.set_cors_methods("OPTIONS,GET,POST")
if http_origins:
self.set_cors_whitelist(http_origins)
self._html_template = html_template
self._js_template = js_template
self._ws_url = ws_url if ws_url else ''
def get_template_path(self):
''' overrides the template path to use this module '''
if self._html_template is None and self._js_template is None:
return resource_filename('blueshed.micro.web', "templates")
return RequestHandler.get_template_path(self)
def write_error(self, *args, **kwargs):
''' Must override base write error to stop uncaught HTTP errors from clearing CORS headers '''
self.write_cors_headers()
RequestHandler.write_error(self, *args, **kwargs)
def options(self, *args, **kwargs):
self.cors_options()
@cors
def get(self, path=None):
services = self.get_service(path)
if services is None:
services = self.settings['services']
if path is not None and path.endswith(".js"):
self.set_header('content-type', 'text/javascript')
self.render(self._js_template or "api-tmpl.js",
services=services.values(),
ws_url=self._ws_url)
return
elif path is not None and path.endswith(".html"):
self.render(self._html_template or "service.html",
service=services,
error=None,
result=None)
return
self.set_header('content-type', 'application/json; charset=UTF-8')
self.write(dumps(services, indent=4))
@asynchronous
@cors
def post(self, path):
content_type = self.request.headers['content-type']
if content_type in acceptable_json_mime_types:
kwargs = json_decode(self.request.body)
elif content_type in acceptable_form_mime_types:
kwargs = dict([(k, self.get_argument(k))
for k in self.request.body_arguments.keys()
if k[0] != "_"])
elif content_type and content_type.startswith("multipart/form-data"):
kwargs = dict([(k, self.get_argument(k))
for k in self.request.body_arguments.keys()
if k[0] != "_"])
else:
raise web.HTTPError(415, 'content type not supported {}'.format(
self.request.headers['content-type']))
service = self.get_service(path)
service.parse_http_kwargs(kwargs)
context = self.settings['micro_context'](
-1, -1, service.name, {"current_user": self.current_user},
self)
try:
logging.info("%s(%r)", service.name, kwargs)
result = service.perform(context, **kwargs)
if tornado.concurrent.is_future(result):
result.add_done_callback(
functools.partial(self.handle_future,
service,
context,
True))
else:
self.handle_result(service, context, result)
self.finish()
except Exception as ex:
self.write_err(context, ex)
self.finish()
| mit | -7,662,630,218,613,734,000 | 37.088 | 102 | 0.572989 | false |
gh4w/some | web/diego/pronostix/views.py | 1 | 2629 | from datetime import datetime
from django.utils import timezone
from django.views import generic
from django.contrib.auth.decorators import login_required
from django.shortcuts import render, get_object_or_404, HttpResponseRedirect
from django.urls import reverse
import pronostix.models as models
import pronostix.forms as forms
def index(request):
return render(request, 'pronostix/index.html')
class ProchainesRencontresView(generic.ListView):
template_name = 'pronostix/prochaines_rencontres.html'
context_object_name = 'rencontres'
def get_queryset(self):
maintenant = datetime.now()
return models.Rencontre.objects.filter(date__gt=maintenant).order_by('date')
class ResultatsView(generic.ListView):
template_name = 'pronostix/resultats.html'
context_object_name = 'resultats'
def get_queryset(self):
return models.Resultat.objects.all().order_by('rencontre__date')
@login_required
def lister_pronostics(request):
liste = []
now = timezone.now()
for rencontre in models.Rencontre.objects.filter(date__gt = now).order_by('date'):
prono = rencontre.pronostic_set.filter(utilisateur = request.user).first()
liste.append((rencontre, prono))
return render(request, 'pronostix/pronostics.html', { 'models': liste })
@login_required
def modifier_pronostic(request, rencontre_id):
prono = get_object_or_404(models.Pronostic, utilisateur = request.user, rencontre_id = rencontre_id)
if request.method == 'POST':
form = forms.PronosticForm(request.POST, instance = prono)
if form.is_valid():
form.save()
return HttpResponseRedirect(reverse('pronostix:pronostics'))
else:
form = forms.PronosticForm(instance = prono)
return render(request, 'pronostix/modifier_pronostic.html', {'prono': form})
@login_required
def ajouter_pronostic(request, rencontre_id):
rencontre = get_object_or_404(models.Rencontre, pk = rencontre_id)
if request.method == 'POST':
form = forms.PronosticForm(request.POST)
if form.is_valid():
prono = form.save(commit = False)
prono.utilisateur = request.user
prono.rencontre = rencontre
prono.save()
return HttpResponseRedirect(reverse('pronostix:pronostics'))
else:
form = forms.PronosticForm()
return render(request, 'pronostix/ajouter_pronostic.html', { 'rencontre': rencontre, 'prono': form } )
class HelloView(generic.ListView):
template_name = 'pronostix/hello.html'
def get_queryset(self):
return None
| mit | 533,804,269,170,800,700 | 36.028169 | 106 | 0.692278 | false |
schnitzlein/weatherstation | obsolete/Display.py | 1 | 1636 | import os, syslog
import pygame
import logging
class PyLcd :
screen = None;
colourBlack = (0, 0, 0)
def __init__(self):
"Ininitializes a new pygame screen using the framebuffer"
# Based on "Python GUI in Linux frame buffer"
# http://www.karoltomala.com/blog/?p=679
disp_no = os.getenv("DISPLAY")
if disp_no:
print("I'm running under X Server. With display = {0}\nexit now.".format(disp_no))
exit(0)
os.putenv('SDL_FBDEV', '/dev/fb1')
# Select frame buffer driver
# Make sure that SDL_VIDEODRIVER is set
driver = 'fbcon'
if not os.getenv('SDL_VIDEODRIVER'):
os.putenv('SDL_VIDEODRIVER', driver)
try:
pygame.display.init()
except Exception as e:
print("exception: {}".format(e))
except pygame.error:
print('Driver: {0} failed.'.format(driver))
exit(0)
size = (pygame.display.Info().current_w, pygame.display.Info().current_h)
self.screen = pygame.display.set_mode(size, pygame.FULLSCREEN)
if self.screen:
logging.debug("screen Initialized h: {} w: {}".format(pygame.display.Info().current_h, pygame.display.Info().current_w))
# Clear the screen to start
self.screen.fill((0, 0, 0))
# Initialise font support
pygame.font.init()
# Render the screen
pygame.display.update()
def __del__(self):
logging.info("pygame screen destructor called -> QUIT now.")
pygame.display.quit()
#print("Destructor pygame display shuts down.")
| mit | 4,967,346,822,978,412,000 | 34.565217 | 132 | 0.586797 | false |
rectangle-dbmi/Realtime-Port-Authority | scripts/reverseStrings.py | 1 | 1080 | """
Jeremy Jao
2/24/2015
This was introduced because BitmapDescriptorFactory.fromAsset() no longer works...
Must use a hack to use the resource enumeration...
String Reversing....
"""
import os
def renameFile(filename):
"""
renames the file....
"""
routename = filename[:-4]
os.rename(filename, 'bus_' + routename.lower() + '.png')
def reverseStrings():
"""
Reversing strings here
will add a lowercase b to the beginning of the file list
reverses the string of the file
"""
for filename in os.listdir("."):
if not filename.startswith('b_') and filename.endswith('.png'):
renameFile(filename)
def recoverFile(filename):
os.rename(filename, filename[filename.find('_')+1:-4])
def recoverStrings():
"""
recovers the filename....
"""
for filename in os.listdir("."):
if filename.startswith('bus_') and filename.endswith('.png'):
recoverFile(filename)
def main():
"""
Main...
"""
reverseStrings()
# recoverStrings()
if __name__ == "__main__":
main() | gpl-3.0 | 4,362,240,186,759,402,500 | 21.520833 | 82 | 0.615741 | false |
dgilland/alchy | alchy/_compat.py | 1 | 3201 | # -*- coding: utf-8 -*-
# flake8: noqa
# pylint: skip-file
"""Python 2/3 compatibility
Some py2/py3 compatibility support based on a stripped down
version of six so we don't have to depend on a specific version
of it.
Borrowed from
https://github.com/mitsuhiko/flask/blob/master/flask/_compat.py
"""
import sys
PY3 = sys.version_info[0] == 3
def _identity(x): return x
if PY3:
text_type = str
string_types = (str,)
integer_types = (int,)
def iterkeys(d): return iter(d.keys())
def itervalues(d): return iter(d.values())
def iteritems(d): return iter(d.items())
from io import StringIO
def reraise(tp, value, tb=None):
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
implements_to_string = _identity
else:
text_type = unicode
string_types = (str, unicode)
integer_types = (int, long)
def iterkeys(d): return d.iterkeys()
def itervalues(d): return d.itervalues()
def iteritems(d): return d.iteritems()
from cStringIO import StringIO
exec('def reraise(tp, value, tb=None):\n raise tp, value, tb')
def implements_to_string(cls):
cls.__unicode__ = cls.__str__
cls.__str__ = lambda x: x.__unicode__().encode('utf-8')
return cls
def with_metaclass(meta, *bases):
# This requires a bit of explanation: the basic idea is to make a
# dummy metaclass for one level of class instantiation that replaces
# itself with the actual metaclass. Because of internal type checks
# we also need to make sure that we downgrade the custom metaclass
# for one level to something closer to type (that's why __call__ and
# __init__ comes back from type etc.).
#
# This has the advantage over six.with_metaclass in that it does not
# introduce dummy classes into the final MRO.
class metaclass(meta):
__call__ = type.__call__
__init__ = type.__init__
def __new__(cls, name, this_bases, d):
if this_bases is None:
return type.__new__(cls, name, (), d)
return meta(name, bases, d)
return metaclass('temporary_class', None, {})
# Certain versions of pypy have a bug where clearing the exception stack
# breaks the __exit__ function in a very peculiar way. This is currently
# true for pypy 2.2.1 for instance. The second level of exception blocks
# is necessary because pypy seems to forget to check if an exception
# happend until the next bytecode instruction?
BROKEN_PYPY_CTXMGR_EXIT = False
if hasattr(sys, 'pypy_version_info'):
class _Mgr(object):
def __enter__(self):
return self
def __exit__(self, *args):
sys.exc_clear()
try:
try:
with _Mgr():
raise AssertionError()
except:
raise
except TypeError:
BROKEN_PYPY_CTXMGR_EXIT = True
except AssertionError:
pass
# Define classmethod_func(f) to retrieve the unbound function of classmethod f
if sys.version_info[:2] >= (2, 7):
def classmethod_func(f): return f.__func__
else:
def classmethod_func(f): return f.__get__(1).im_func
| mit | -7,518,559,914,268,568,000 | 27.327434 | 78 | 0.630116 | false |
stephantul/somber | somber/plsom.py | 1 | 5148 | """The PLSOM."""
import logging
from typing import Callable, Dict, Optional, Tuple
import numpy as np
from tqdm import tqdm
from somber.som import BaseSom
from somber.components.initializers import range_initialization
from somber.components.utilities import Scaler
logger = logging.getLogger(__name__)
class PLSom(BaseSom):
# Static property names
param_names = {"map_dimensions", "weights", "data_dimensionality", "params"}
def __init__(
self,
map_dimensions: Tuple[int],
data_dimensionality: Optional[int] = None,
beta: Optional[float] = None,
initializer: Callable = range_initialization,
scaler: Optional[Scaler] = None,
) -> None:
"""
An implementation of the PLSom.
The ParameterLess Som is a SOM which does not rely on time-induced
plasticity adaptation. Instead, the plasticity of the SOM is adapted
in an online fashion by continuously monitoring the error of each presented
item.
In general, the PLSom is less prone to catastrophic interference, or
"forgetting" than the original SOM. Simultaneously, it is also more suited
to re-adapting to changes in distribution. This is because the SOM loses
its plasticity according to an exponentially decreasing learning rate and
neighborhood size.
:param map_dimensions: A tuple describing the map size. For example, (10, 10)
will create a 10 * 10 map with 100 neurons, while (10, 10, 10) creates a
10 * 10 * 10 map with 1000 neurons.
:param data_dimensionality: The dimensionality of the input data.
:param initializer: A function which takes in the input data and weight matrix
and returns an initialized weight matrix. The initializers are defined in
somber.components.initializers. Can be set to None.
:param scaler: An initialized instance of Scaler() which is used to scale the
data to have mean 0 and stdev 1.
"""
super().__init__(
map_dimensions,
data_dimensionality=data_dimensionality,
argfunc="argmin",
valfunc="min",
params={"r": {"value": 0, "factor": 1, "orig": 0}},
initializer=initializer,
scaler=scaler,
)
self.beta = beta if beta else 2
def _epoch(
self,
X: np.ndarray,
batch_size: int,
updates_epoch: int,
constants: Dict[str, float],
progressbar: tqdm,
) -> None:
"""
Run a single epoch.
This function shuffles the data internally,
as this improves performance.
:param X: The training data.
:param batch_size: The batch size
:param updates_epoch: The number of updates to perform per epoch
:param constants: A dictionary containing the constants with which to update the
parameters in self.parameters.
:param progressbar: The progressbar instance to show and update during training
"""
# Create batches
X_ = self._create_batches(X, batch_size)
X_len = np.prod(X.shape[:-1])
# Initialize the previous activation
prev = self._init_prev(X_)
prev = self.distance_function(X_[0], self.weights)[0]
influences = self._update_params(prev)
# Iterate over the training data
for idx, x in enumerate(X_):
# Our batches are padded, so we need to
# make sure we know when we hit the padding
# so we don't inadvertently learn zeroes.
diff = X_len - (idx * batch_size)
if diff and diff < batch_size:
x = x[:diff]
# Prev_activation may be None
if prev is not None:
prev = prev[:diff]
# if idx > 0 and idx % update_step == 0:
influences = self._update_params(prev)
prev = self._propagate(x, influences, prev_activation=prev)
if progressbar is not None:
progressbar.update(batch_size)
def _update_params(self, constants: np.ndarray) -> np.ndarray:
"""Update the params."""
constants = np.max(np.min(constants, 1))
self.params["r"]["value"] = max([self.params["r"]["value"], constants])
epsilon = constants / self.params["r"]["value"]
influence = self._calculate_influence(epsilon)
# Account for learning rate
return influence * epsilon
def _calculate_influence(self, epsilon: float) -> np.ndarray:
"""
Pre-calculate the influence for a given value of epsilon.
The neighborhood has size num_neurons * num_neurons, so for a
30 * 30 map, the neighborhood will be size (900, 900).
:param epsilon: The neighborhood value.
:param neighborhood: The influence from each neuron to each other neuron.
"""
n = (self.beta - 1) * np.log(1 + epsilon * (np.e - 1)) + 1
grid = np.exp((-self.distance_grid) / n ** 2)
return grid.reshape(self.num_neurons, self.num_neurons)
| mit | 6,037,134,522,918,761,000 | 37.41791 | 88 | 0.612277 | false |
skggm/skggm | examples/trace_plot_example.py | 1 | 3138 | """
Visualize Regularization Path
=============================
Plot the edge level coefficients (inverse covariance entries)
as a function of the regularization parameter.
"""
import sys
import numpy as np
from sklearn.datasets import make_sparse_spd_matrix
sys.path.append("..")
from inverse_covariance import QuicGraphicalLasso
from inverse_covariance.plot_util import trace_plot
from inverse_covariance.profiling import LatticeGraph
def make_data(n_samples, n_features):
prng = np.random.RandomState(1)
prec = make_sparse_spd_matrix(
n_features, alpha=.98, smallest_coef=.4, largest_coef=.7, random_state=prng
)
cov = np.linalg.inv(prec)
d = np.sqrt(np.diag(cov))
cov /= d
cov /= d[:, np.newaxis]
prec *= d
prec *= d[:, np.newaxis]
X = prng.multivariate_normal(np.zeros(n_features), cov, size=n_samples)
X -= X.mean(axis=0)
X /= X.std(axis=0)
return X, cov, prec
def make_data_banded(n_samples, n_features):
alpha = 0.1
cov, prec, adj = LatticeGraph(
n_blocks=2, random_sign=True, chain_blocks=True, seed=1
).create(n_features, alpha)
prng = np.random.RandomState(2)
X = prng.multivariate_normal(np.zeros(n_features), cov, size=n_samples)
return X, cov, prec
def show_quic_coefficient_trace(X):
path = np.logspace(np.log10(0.01), np.log10(1.0), num=50, endpoint=True)[::-1]
estimator = QuicGraphicalLasso(lam=1.0, path=path, mode="path")
estimator.fit(X)
trace_plot(estimator.precision_, estimator.path_, n_edges=20)
def show_quic_coefficient_trace_truth(X, truth):
path = np.logspace(np.log10(0.01), np.log10(1.0), num=50, endpoint=True)[::-1]
estimator = QuicGraphicalLasso(lam=1.0, path=path, mode="path")
estimator.fit(X)
trace_plot(estimator.precision_, estimator.path_, n_edges=6, ground_truth=truth)
if __name__ == "__main__":
# example 1
n_samples = 10
n_features = 5
X, cov, prec = make_data(n_samples, n_features)
print("Showing basic Erdos-Renyi example with ")
print(" n_samples=10")
print(" n_features=5")
print(" n_edges=20")
show_quic_coefficient_trace(X)
# use ground truth for display
print("Showing basic Erdos-Renyi example with ")
print(" n_samples=100")
print(" n_features=5")
print(" n_edges=6")
print(" ground_truth (shows only false pos and negatives)")
show_quic_coefficient_trace_truth(X, prec)
# example 2
n_samples = 110
n_features = 100
X, cov, prec = make_data_banded(n_samples, n_features)
print("Showing basic Lattice example with ")
print(" n_samples=110")
print(" n_features=100")
print(" n_blocks=2")
print(" random_sign=True")
print(" n_edges=20")
show_quic_coefficient_trace(X)
# use ground truth for display
print("Showing basic Lattice example with ")
print(" n_samples=110")
print(" n_features=100")
print(" n_blocks=2")
print(" random_sign=True")
print(" n_edges=6")
print(" ground_truth (shows only false pos and negatives)")
show_quic_coefficient_trace_truth(X, prec)
| mit | -7,484,878,322,205,193,000 | 29.764706 | 84 | 0.646272 | false |
takaakiaoki/PyFoam | unittests/Basics/TemplateFile.py | 1 | 6744 |
import unittest
from PyFoam.Basics.TemplateFile import TemplateFile,TemplateFileOldFormat,PyratempPreprocessor
from PyFoam.Error import FatalErrorPyFoamException
from tempfile import mktemp
from PyFoam.ThirdParty.six import PY3
import sys
theSuite=unittest.TestSuite()
template1="""$$ y = 3+x
This should be $x+y$"""
template2="""
$$ xxx=13
$$ xx=34+xxx
$2*x+xx-xxx$
"""
templateFor="""$$ y = 2*x
<!--(for i in range(y))--> @!i!@ <!--(end)-->#!
"""
templateMath="sqrt(x) = $sqrt(x)$"
templateList="""<!--(for e in theList)-->#!
<!--(if e.lower()=="joe")-->#!
Big @!e!@
<!--(else)-->#!
Little @!e!@
<!--(end)-->#!
<!--(end)-->#!
"""
templateMacro="""<!--(macro tabsquare)-->
@!x!@ \t = @!x*x!@
<!--(end)-->
<!--(for i in vals)-->@!tabsquare(x=i)!@<!--(end)-->#!
"""
templateBuiltIn="""
<!--(if True)-->TRUE<!--(end)-->
<!--(if not False)-->FALSE<!--(end)-->
@!min(2,3)!@ @!max(2,3)!@
@!chr(42)!@ @!ord(' ')!@
"""
class TemplateFileTest(unittest.TestCase):
def testTemplateFileString(self):
t=TemplateFile(content=template1,expressionDelimiter="$")
self.assertEqual(t.getString({"x":-1}),"This should be 1")
fName=mktemp()
t.writeToFile(fName,{"x":1+2.})
result=open(fName).read()
self.assertEqual(result,"This should be 9.0")
def testTemplateFileFile(self):
fName=mktemp()
open(fName,"w").write(template1)
t=TemplateFile(name=fName,expressionDelimiter="$")
self.assertEqual(t.getString({"x":-1}),"This should be 1")
def testTemplateFileLongVars(self):
t=TemplateFile(content=template2,expressionDelimiter="$")
self.assertEqual(int(t.getString({"x":1})),36)
def testTemplateFileForLoop(self):
t=TemplateFile(content=templateFor)
self.assertEqual(t.getString({"x":2})," 0 1 2 3 ")
def testTemplateFileMacro(self):
t=TemplateFile(content=templateMacro)
if PY3 and sys.version_info.minor>1:
self.assertEqual(t.getString({"vals":[2,3.3,-1]}),"2 \t = 4\n3.3 \t = 10.889999999999999\n-1 \t = 1\n")
else:
self.assertEqual(t.getString({"vals":[2,3.3,-1]}),"2 \t = 4\n3.3 \t = 10.89\n-1 \t = 1\n")
def testTemplateFileListLoop(self):
t=TemplateFile(content=templateList)
self.assertEqual(t.getString({"theList":["Henry","Joe","joe","Tom"]}),"Little Henry\nBig Joe\nBig joe\nLittle Tom\n")
def testTemplateFileLongMath(self):
t=TemplateFile(content=templateMath,expressionDelimiter="$")
self.assertEqual(t.getString({"x":4}),"sqrt(x) = 2.0")
def testTemplateFileMathRealDelim(self):
t=TemplateFile(content=templateMath.replace("$","|"))
self.assertEqual(t.getString({"x":4}),"sqrt(x) = 2.0")
def testTemplateFilePercentDelimiter(self):
t=TemplateFile(content="x=$!x!$")
self.assertEqual(t.getString({"x":4}),"x=4")
def testTemplateFileBuiltinStuff(self):
t=TemplateFile(content=templateBuiltIn)
self.assertEqual(t.getString({}),"\nTRUE\nFALSE\n2 3\n* 32\n")
theSuite.addTest(unittest.makeSuite(TemplateFileTest,"test"))
class TemplateFileOldFormatTest(unittest.TestCase):
def testTemplateFileString(self):
t=TemplateFileOldFormat(content=template1)
self.assertEqual(t.getString({"x":-1}),"This should be 1\n")
fName=mktemp()
t.writeToFile(fName,{"x":"1+sqrt(4)"})
result=open(fName).read()
self.assertEqual(result,"This should be 9.0\n")
def testTemplateFileFile(self):
fName=mktemp()
open(fName,"w").write(template1)
t=TemplateFileOldFormat(name=fName)
self.assertEqual(t.getString({"x":-1}),"This should be 1\n")
def testTemplateFileLongVars(self):
t=TemplateFileOldFormat(content=template2)
self.assertEqual(int(t.getString({"x":1})),36)
def testTemplateFileLongMath(self):
t=TemplateFileOldFormat(content=templateMath)
self.assertEqual(t.getString({"x":4}),"sqrt(x) = 2.0\n")
theSuite.addTest(unittest.makeSuite(TemplateFileOldFormatTest,"test"))
class PyratempPreprocessorTest(unittest.TestCase):
def testFullPreprocessing(self):
p=PyratempPreprocessor()
self.assertEqual(p("nix\nda"),"nix\nda")
self.assertEqual(p("nix\nda\n"),"nix\nda\n")
self.assertEqual(p(""),"")
self.assertEqual(p("\n"),"\n")
self.assertEqual(p("$$ a=2 "),'$!setvar("a", "2")!$#!')
self.assertEqual(p(" $$ a=2 ")," $$ a=2 ")
self.assertRaises(FatalErrorPyFoamException,p,"$$ a ")
# Does not work with old nose
# with self.assertRaises(FatalErrorPyFoamException):
# p("$$ a ")
self.assertEqual(p("$$ a=2\n"),'$!setvar("a", "2")!$#!\n')
self.assertEqual(p("$$ a=2\n$$ b=3"),'$!setvar("a", "2")!$#!\n$!setvar("b", "3")!$#!')
self.assertEqual(p(" $foo$ $bar$ ")," $!foo!$ $!bar!$ ")
self.assertEqual(p("$foo$ $bar$"),"$!foo!$ $!bar!$")
self.assertEqual(p("$foo$ $bar$\n"),"$!foo!$ $!bar!$\n")
def testNoVarLinePreprocessing(self):
p=PyratempPreprocessor(dovarline=False)
self.assertEqual(p("nix\nda"),"nix\nda")
self.assertEqual(p("nix\nda\n"),"nix\nda\n")
self.assertEqual(p(""),"")
self.assertEqual(p("\n"),"\n")
self.assertEqual(p("$$ a=2 "),'$$ a=2 ')
self.assertEqual(p(" $$ a=2 ")," $$ a=2 ")
self.assertEqual(p("$$ a "),"$$ a ")
self.assertEqual(p("$$ a=2\n"),'$$ a=2\n')
self.assertEqual(p("$$ a=2\n$$ b=3"),'$$ a=2\n$$ b=3')
self.assertEqual(p(" $foo$ $bar$ ")," $!foo!$ $!bar!$ ")
self.assertEqual(p("$foo$ $bar$"),"$!foo!$ $!bar!$")
self.assertEqual(p("$foo$ $bar$\n"),"$!foo!$ $!bar!$\n")
def testNoExprPreprocessing(self):
p=PyratempPreprocessor(doexpr=False)
self.assertEqual(p("nix\nda"),"nix\nda")
self.assertEqual(p("nix\nda\n"),"nix\nda\n")
self.assertEqual(p(""),"")
self.assertEqual(p("\n"),"\n")
self.assertEqual(p("$$ a=2 "),'$!setvar("a", "2")!$#!')
self.assertEqual(p(" $$ a=2 ")," $$ a=2 ")
self.assertRaises(FatalErrorPyFoamException,p,"$$ a ")
# Does not work with old nose
# with self.assertRaises(FatalErrorPyFoamException):
# p("$$ a ")
self.assertEqual(p("$$ a=2\n"),'$!setvar("a", "2")!$#!\n')
self.assertEqual(p("$$ a=2\n$$ b=3"),'$!setvar("a", "2")!$#!\n$!setvar("b", "3")!$#!')
self.assertEqual(p(" $foo$ $bar$ ")," $foo$ $bar$ ")
self.assertEqual(p("$foo$ $bar$"),"$foo$ $bar$")
self.assertEqual(p("$foo$ $bar$\n"),"$foo$ $bar$\n")
theSuite.addTest(unittest.makeSuite(PyratempPreprocessorTest,"test"))
| gpl-2.0 | -8,537,140,287,998,110,000 | 36.259669 | 125 | 0.586447 | false |
akikoskinen/androidtestxmlgenerator | instrumentationtotestresulttransformer.py | 1 | 1055 | # Copyright (c) 2011 Aki Koskinen
# Licensed under the MIT license. See LICENSE file for details.
from testresult import TestSuite, TestCase
TEST_COMPLETED_OK = 0
TEST_COMPLETED_ERROR = -1
TEST_COMPLETED_FAIL = -2
INTERESTING_CODES = (TEST_COMPLETED_OK, TEST_COMPLETED_ERROR, TEST_COMPLETED_FAIL)
def Transform(instrumentation):
ret = []
suites = {}
for status in instrumentation.statuses():
if status.statusCode in INTERESTING_CODES:
fullClassName = status['class']
(package, dot, className) = fullClassName.rpartition('.')
if fullClassName in suites:
suite = suites[fullClassName]
else:
suite = TestSuite(fullClassName, package, 0.0)
suites[fullClassName] = suite
ret.append(suite)
case = TestCase(status['test'])
suite.addTestCase(case)
if status.statusCode == TEST_COMPLETED_FAIL:
case.setFailing(status['stack'].partition('\n')[0], status['stack'])
if status.statusCode == TEST_COMPLETED_ERROR:
case.setErroring(status['stack'].partition('\n')[0], status['stack'])
return ret
| mit | 8,402,928,193,915,289,000 | 27.513514 | 82 | 0.706161 | false |
danakj/chromium | mojo/public/tools/bindings/pylib/mojom/parse/translate.py | 3 | 9030 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Translates parse tree to Mojom IR."""
import re
from . import ast
def _DuplicateName(values):
"""Returns the 'name' of the first entry in |values| whose 'name' has already
been encountered. If there are no duplicates, returns None."""
names = set()
for value in values:
if value['name'] in names:
return value['name']
names.add(value['name'])
return None
def _MapTreeForType(func, tree, type_to_map, scope):
assert isinstance(type_to_map, type)
if not tree:
return []
result = [func(subtree)
for subtree in tree if isinstance(subtree, type_to_map)]
duplicate_name = _DuplicateName(result)
if duplicate_name:
raise Exception('Names in mojom must be unique within a scope. The name '
'"%s" is used more than once within the scope "%s".' %
(duplicate_name, scope))
return result
def _MapKind(kind):
map_to_kind = {'bool': 'b',
'int8': 'i8',
'int16': 'i16',
'int32': 'i32',
'int64': 'i64',
'uint8': 'u8',
'uint16': 'u16',
'uint32': 'u32',
'uint64': 'u64',
'float': 'f',
'double': 'd',
'string': 's',
'handle': 'h',
'handle<data_pipe_consumer>': 'h:d:c',
'handle<data_pipe_producer>': 'h:d:p',
'handle<message_pipe>': 'h:m',
'handle<shared_buffer>': 'h:s'}
if kind.endswith('?'):
base_kind = _MapKind(kind[0:-1])
# NOTE: This doesn't rule out enum types. Those will be detected later, when
# cross-reference is established.
reference_kinds = ('m', 's', 'h', 'a', 'r', 'x', 'asso')
if re.split('[^a-z]', base_kind, 1)[0] not in reference_kinds:
raise Exception(
'A type (spec "%s") cannot be made nullable' % base_kind)
return '?' + base_kind
if kind.endswith('}'):
lbracket = kind.rfind('{')
value = kind[0:lbracket]
return 'm[' + _MapKind(kind[lbracket+1:-1]) + '][' + _MapKind(value) + ']'
if kind.endswith(']'):
lbracket = kind.rfind('[')
typename = kind[0:lbracket]
return 'a' + kind[lbracket+1:-1] + ':' + _MapKind(typename)
if kind.endswith('&'):
return 'r:' + _MapKind(kind[0:-1])
if kind.startswith('asso<'):
assert kind.endswith('>')
return 'asso:' + _MapKind(kind[5:-1])
if kind in map_to_kind:
return map_to_kind[kind]
return 'x:' + kind
def _AddOptional(dictionary, key, value):
if value is not None:
dictionary[key] = value;
def _AttributeListToDict(attribute_list):
if attribute_list is None:
return None
assert isinstance(attribute_list, ast.AttributeList)
# TODO(vtl): Check for duplicate keys here.
return dict([(attribute.key, attribute.value)
for attribute in attribute_list])
def _EnumToDict(enum):
def EnumValueToDict(enum_value):
assert isinstance(enum_value, ast.EnumValue)
data = {'name': enum_value.name}
_AddOptional(data, 'value', enum_value.value)
_AddOptional(data, 'attributes',
_AttributeListToDict(enum_value.attribute_list))
return data
assert isinstance(enum, ast.Enum)
data = {'name': enum.name,
'native_only': enum.enum_value_list is None }
if not data['native_only']:
data.update({'fields': map(EnumValueToDict, enum.enum_value_list)})
_AddOptional(data, 'attributes', _AttributeListToDict(enum.attribute_list))
return data
def _ConstToDict(const):
assert isinstance(const, ast.Const)
return {'name': const.name,
'kind': _MapKind(const.typename),
'value': const.value}
class _MojomBuilder(object):
def __init__(self):
self.mojom = {}
def Build(self, tree, name):
def StructToDict(struct):
def StructFieldToDict(struct_field):
assert isinstance(struct_field, ast.StructField)
data = {'name': struct_field.name,
'kind': _MapKind(struct_field.typename)}
_AddOptional(data, 'ordinal',
struct_field.ordinal.value
if struct_field.ordinal else None)
_AddOptional(data, 'default', struct_field.default_value)
_AddOptional(data, 'attributes',
_AttributeListToDict(struct_field.attribute_list))
return data
assert isinstance(struct, ast.Struct)
data = {'name': struct.name,
'native_only': struct.body is None}
if not data['native_only']:
data.update({
'fields': _MapTreeForType(StructFieldToDict, struct.body,
ast.StructField, struct.name),
'enums': _MapTreeForType(_EnumToDict, struct.body, ast.Enum,
struct.name),
'constants': _MapTreeForType(_ConstToDict, struct.body,
ast.Const, struct.name)})
_AddOptional(data, 'attributes',
_AttributeListToDict(struct.attribute_list))
return data
def UnionToDict(union):
def UnionFieldToDict(union_field):
assert isinstance(union_field, ast.UnionField)
data = {'name': union_field.name,
'kind': _MapKind(union_field.typename)}
_AddOptional(data, 'ordinal',
union_field.ordinal.value
if union_field.ordinal else None)
_AddOptional(data, 'attributes',
_AttributeListToDict(union_field.attribute_list))
return data
assert isinstance(union, ast.Union)
data = {'name': union.name,
'fields': _MapTreeForType(UnionFieldToDict, union.body,
ast.UnionField, union.name)}
_AddOptional(data, 'attributes',
_AttributeListToDict(union.attribute_list))
return data
def InterfaceToDict(interface):
def MethodToDict(method):
def ParameterToDict(param):
assert isinstance(param, ast.Parameter)
data = {'name': param.name,
'kind': _MapKind(param.typename)}
_AddOptional(data, 'ordinal',
param.ordinal.value if param.ordinal else None)
_AddOptional(data, 'attributes',
_AttributeListToDict(param.attribute_list))
return data
assert isinstance(method, ast.Method)
data = {'name': method.name,
'parameters': map(ParameterToDict, method.parameter_list)}
if method.response_parameter_list is not None:
data['response_parameters'] = map(ParameterToDict,
method.response_parameter_list)
_AddOptional(data, 'ordinal',
method.ordinal.value if method.ordinal else None)
_AddOptional(data, 'attributes',
_AttributeListToDict(method.attribute_list))
return data
assert isinstance(interface, ast.Interface)
data = {'name': interface.name,
'methods': _MapTreeForType(MethodToDict, interface.body,
ast.Method, interface.name),
'enums': _MapTreeForType(_EnumToDict, interface.body, ast.Enum,
interface.name),
'constants': _MapTreeForType(_ConstToDict, interface.body,
ast.Const, interface.name)}
_AddOptional(data, 'attributes',
_AttributeListToDict(interface.attribute_list))
return data
assert isinstance(tree, ast.Mojom)
self.mojom['name'] = name
self.mojom['namespace'] = tree.module.name[1] if tree.module else ''
self.mojom['imports'] = \
[{'filename': imp.import_filename} for imp in tree.import_list]
self.mojom['structs'] = \
_MapTreeForType(StructToDict, tree.definition_list, ast.Struct, name)
self.mojom['unions'] = \
_MapTreeForType(UnionToDict, tree.definition_list, ast.Union, name)
self.mojom['interfaces'] = \
_MapTreeForType(InterfaceToDict, tree.definition_list, ast.Interface,
name)
self.mojom['enums'] = \
_MapTreeForType(_EnumToDict, tree.definition_list, ast.Enum, name)
self.mojom['constants'] = \
_MapTreeForType(_ConstToDict, tree.definition_list, ast.Const, name)
_AddOptional(self.mojom, 'attributes',
_AttributeListToDict(tree.module.attribute_list)
if tree.module else None)
return self.mojom
def Translate(tree, name):
"""Translate AST to Mojom IR.
Args:
tree: The AST as a mojom.parse.ast.Mojom object.
name: The filename as a str.
Returns:
The Mojom IR as a dict.
"""
return _MojomBuilder().Build(tree, name)
| bsd-3-clause | -7,084,876,562,291,309,000 | 37.262712 | 80 | 0.58206 | false |
DavidAndreev/indico | indico/web/forms/jinja_helpers.py | 1 | 4705 | # This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import json
from wtforms.fields import RadioField, BooleanField
from wtforms.widgets.core import Input, Select, TextArea, HiddenInput
from wtforms.validators import Length, NumberRange
from indico.util.struct.enum import TitledEnum
from indico.web.forms.fields import IndicoSelectMultipleCheckboxField, IndicoEnumRadioField
from indico.web.forms.validators import ConfirmPassword, HiddenUnless, IndicoRegexp
from indico.web.forms.widgets import SelectizeWidget
def is_single_line_field(field):
if isinstance(field.widget, SelectizeWidget):
return True
if isinstance(field.widget, Select):
return not field.widget.multiple
if isinstance(field.widget, Input):
return field.widget.input_type not in {'checkbox', 'radio', 'hidden'}
if isinstance(field.widget, TextArea):
return True
return getattr(field.widget, 'single_line', False)
def _attrs_for_validators(field, validators):
attrs = {}
for validator in validators:
if isinstance(validator, Length):
if validator.min >= 0:
attrs['minlength'] = validator.min
if validator.max >= 0:
attrs['maxlength'] = validator.max
elif isinstance(validator, IndicoRegexp) and validator.client_side:
attrs['pattern'] = validator.regex.pattern
elif isinstance(validator, NumberRange):
if validator.min is not None:
attrs['min'] = validator.min
if validator.max is not None:
attrs['max'] = validator.max
elif isinstance(validator, ConfirmPassword):
attrs['data-confirm-password'] = field.get_form()[validator.fieldname].name
elif isinstance(validator, HiddenUnless):
condition_field = field.get_form()[validator.field]
checked_only = isinstance(condition_field, (RadioField, BooleanField, IndicoEnumRadioField))
val = validator.value
attrs['data-hidden-unless'] = json.dumps({'field': condition_field.name,
'value': val if not isinstance(val, TitledEnum) else val.name,
'checked_only': checked_only})
return attrs
def render_field(field, widget_attrs, disabled=None):
"""Renders a WTForms field, taking into account validators"""
if not widget_attrs.get('placeholder'):
widget_attrs = dict(widget_attrs)
widget_attrs.pop('placeholder', None)
args = _attrs_for_validators(field, field.validators)
args['required'] = (field.flags.required and not field.flags.conditional
and not isinstance(field, IndicoSelectMultipleCheckboxField))
args.update(widget_attrs)
if disabled is not None:
args['disabled'] = disabled
return field(**args)
def iter_form_fields(form, fields=None, skip=None, hidden_fields=False):
"""Iterates over the fields in a WTForm
:param fields: If specified only fields that are in this list are
yielded. This also overrides the field order.
:param skip: If specified, only fields NOT in this set/list are
yielded.
:param hidden_fields: How to handle hidden fields. Setting this to
``True`` or ``False`` will yield only hidden
or non-hidden fields. Setting it to ``None``
will yield all fields.
"""
if fields is not None:
field_iter = (form[field_name] for field_name in fields if field_name in form)
else:
field_iter = iter(form)
if skip:
skip = set(skip)
field_iter = (field for field in field_iter if field.short_name not in skip)
if hidden_fields is not None:
field_iter = (field for field in field_iter if isinstance(field.widget, HiddenInput) == hidden_fields)
for field in field_iter:
yield field
| gpl-3.0 | 9,104,690,325,091,572,000 | 43.386792 | 116 | 0.663549 | false |
arraystream/fftoptionlib | tests/test_cosine_pricer.py | 1 | 3955 | import unittest
from fftoptionlib.cosine_pricer import (
cosin_vanilla_call,
interval_a_and_b,
)
from fftoptionlib.moment_generating_funs import (
cumulants_from_mgf,
general_log_moneyness_mgf,
)
from fftoptionlib.process_class import (
BlackScholes,
Heston,
VarianceGamma,
)
class TestCosinePricer(unittest.TestCase):
def test_black_shole(self):
N = 16
L = 10
S0 = 100
t = 0.1
r = 0.1
q = 0
sigma = 0.25
strike = 80
c1, c2, c4 = cumulants_from_mgf(general_log_moneyness_mgf, strike, BlackScholes(sigma).set_type('mgf'), t=t, r=r, q=q, S0=S0)
intv_a, intv_b = interval_a_and_b(c1, c2, c4, L)
print(intv_a, intv_b)
exp_res = 20.792
res = cosin_vanilla_call(N, strike, intv_a, intv_b, t, r, q, S0, BlackScholes(sigma).set_type('chf'))
self.assertAlmostEqual(exp_res, res, 3)
N = 64
res = cosin_vanilla_call(N, strike, intv_a, intv_b, t, r, q, S0, BlackScholes(sigma).set_type('chf'))
exp_res = 20.799
self.assertAlmostEqual(exp_res, res, 3)
def test_black_shole_2(self):
N = 16
L = 10
S0 = 100
t = 0.1
r = 0.1
q = 0
sigma = 0.25
strike = 100
c1, c2, c4 = cumulants_from_mgf(general_log_moneyness_mgf, strike, BlackScholes(sigma).set_type('mgf'), t=t, r=r, q=q, S0=S0)
intv_a, intv_b = interval_a_and_b(c1, c2, c4, L)
print(intv_a, intv_b)
exp_res = 3.659
res = cosin_vanilla_call(N, strike, intv_a, intv_b, t, r, q, S0, BlackScholes(sigma).set_type('chf'))
self.assertAlmostEqual(exp_res, res, 3)
N = 64
res = cosin_vanilla_call(N, strike, intv_a, intv_b, t, r, q, S0, BlackScholes(sigma).set_type('chf'))
exp_res = 3.660
self.assertAlmostEqual(exp_res, res, 3)
def test_black_shole_3(self):
N = 16
L = 10
S0 = 100
t = 0.1
r = 0.1
q = 0
sigma = 0.25
strike = 120
c1, c2, c4 = cumulants_from_mgf(general_log_moneyness_mgf, strike, BlackScholes(sigma).set_type('mgf'), t=t, r=r, q=q, S0=S0)
intv_a, intv_b = interval_a_and_b(c1, c2, c4, L)
print(intv_a, intv_b)
exp_res = 0.044
res = cosin_vanilla_call(N, strike, intv_a, intv_b, t, r, q, S0, BlackScholes(sigma).set_type('chf'))
self.assertAlmostEqual(exp_res, res, 3)
N = 64
res = cosin_vanilla_call(N, strike, intv_a, intv_b, t, r, q, S0, BlackScholes(sigma).set_type('chf'))
exp_res = 0.045
self.assertAlmostEqual(exp_res, res, 3)
def test_heston(self):
N = 100
L = 10
S0 = 100
t = 0.5
k = 2
r = 0.03
q = 0
sigma = 0.5
theta = 0.04
V0 = 0.04
rho = -0.7
strike = 90
c1, c2, c4 = cumulants_from_mgf(general_log_moneyness_mgf, strike, Heston(V0, theta, k, sigma, rho).set_type('mgf'), t=t, r=r, q=q, S0=S0)
intv_a, intv_b = interval_a_and_b(c1, c2, c4, L)
print(intv_a, intv_b)
exp_res = 13.2023
res = cosin_vanilla_call(N, strike, intv_a, intv_b, t, r, q, S0, Heston(V0, theta, k, sigma, rho).set_type('chf'))
self.assertAlmostEqual(exp_res, res, 3)
def test_vg(self):
N = 150
L = 10
S0 = 100
t = 1 / 12
r = 0.1
q = 0
sigma = 0.12
theta = -0.14
v = 0.2
strike = 90
c1, c2, c4 = cumulants_from_mgf(general_log_moneyness_mgf, strike, VarianceGamma(theta, v, sigma).set_type('mgf'), t=t, r=r, q=q, S0=S0)
intv_a, intv_b = interval_a_and_b(c1, c2, c4, L)
print(intv_a, intv_b)
res = cosin_vanilla_call(N, strike, intv_a, intv_b, t, r, q, S0, VarianceGamma(theta, v, sigma).set_type('chf'))
exp_res = 10.8289
self.assertAlmostEqual(exp_res, res, 4)
| bsd-3-clause | -8,726,790,436,106,144,000 | 32.235294 | 146 | 0.540076 | false |
dora40323106/2014cpa_final_project | std/a40323142.py | 1 | 8795 | #@+leo-ver=5-thin
#@+node:lee.20141224110313.61: * @file example2.py
#@@language python
#@@tabwidth -4
import cherrypy
import random
from std.asciisymbol import asciiImage
#@+others
#@+node:lee.20141223114246.41: ** class Application
class Application(object):
#@+others
#@+node:lee.20141223114246.42: *3* def init
def __init__(self):
#你的名子
self.name = '陳柏曄'
# 你的學號
self.number = '40323142'
# 你的班級
self.classes = '機械設計一年甲班'
# 你的 github repository url
self.github_repo_url = 'https://github.com/hydenbryant/MIDNIGHT.git'
# 你的 bitbucket repository url
#self.bitbucket_repo_url = ''
# 你的 openshift app
self.openshift_url = 'https://work-40323142.rhcloud.com'
# 你的自評
self.evaluation = [('Project 7', 75), ('Project 8', 65), ('Project 9', 70)]
# 你的照片 url
self.photo_url = 'https://copy.com/U44kSdtBp2yziELR'
# 這裡是心得
self.my_remark = """上課雖然聽不大懂老師在教甚麼,但透過同學的幫忙及多次看老師的影片,漸漸的理解python,並且做出自己的網頁,還滿有成就感的
"""
#@+node:lee.20141223114246.43: *3* def use_template
def use_template(self, content):
above = """
<!DOCTYPE html>
<html lang="en">
<head>
<!-- Basic Page Needs
–––––––––––––––––––––––––––––––––––––––––––––––––– -->
<meta charset="utf-8">
<title>title</title>
<meta name="description" content="">
<meta name="author" content="">
<!-- Mobile Specific Metas
–––––––––––––––––––––––––––––––––––––––––––––––––– -->
<meta name="viewport" content="width=device-width, initial-scale=1">
<!-- FONT
–––––––––––––––––––––––––––––––––––––––––––––––––– -->
<style>
@font-face {
font-family: 'Raleway';
font-style: normal;
font-weight: 300;
src: local('Raleway Light'), local('Raleway-Light'), url(/static/font/Raleway300.woff) format('woff');
}
@font-face {
font-family: 'Raleway';
font-style: normal;
font-weight: 400;
src: local('Raleway'), url(/static/font/Raleway400.woff) format('woff');
}
@font-face {
font-family: 'Raleway';
font-style: normal;
font-weight: 600;
src: local('Raleway SemiBold'), local('Raleway-SemiBold'), url(/static/font/Raleway600.woff) format('woff');
}
</style>
<!-- CSS
–––––––––––––––––––––––––––––––––––––––––––––––––– -->
<link rel="stylesheet" href="/static/css/normalize.css">
<link rel="stylesheet" href="/static/css/skeleton.css">
<link rel="stylesheet" href="/static/css/custom.css">
<!-- Favicon
–––––––––––––––––––––––––––––––––––––––––––––––––– -->
<link rel="icon" type="image/png" href="/static/images/favicon.png" />
</head>
<body>
<!-- Primary Page Layout
–––––––––––––––––––––––––––––––––––––––––––––––––– -->
<!-- .container is main centered wrapper -->
<div class="container">
"""
below = """
</div>
<footer class="center">
2014 Computer Programming
</footer>
<!-- Note: columns can be nested, but it's not recommended since Skeleton's grid has %-based gutters, meaning a nested grid results in variable with gutters (which can end up being *really* small on certain browser/device sizes) -->
<!-- End Document
–––––––––––––––––––––––––––––––––––––––––––––––––– -->
</body>
</html>
"""
return above + self.generate_nav(self.link()) + content + below
#@+node:lee.20141223114246.44: *3* def generate_nav
def generate_nav(self, anchors):
above_side = """
<div class="row">
<div class="nav twelve columns">
<input type="checkbox" id="toggle" />
<div>
<label for="toggle" class="toggle" data-open="Main Menu" data-close="Close Menu" onclick></label>
<ul class="menu">
"""
content = ''
for link, name in anchors:
content += '<li><a href="' + link + '">' + name + '</a></li>'
below_side = """
</ul>
</div>
</div>
</div>
"""
return above_side + content + below_side
#@+node:lee.20141223114246.45: *3* def generate_form_page
def generate_form_page(self, form='', output=''):
content = """
<div class="content">
<div class="row">
<div class="one-half column">
%s
</div>
<div class="one-half column">
<div class="output u-full-width">
<p>Output:</p>
<p>
%s
</p>
</div>
</div>
</div>
</div>
"""%(form, output)
return self.use_template(content)
#@+node:lee.20141223114246.55: *3* def generate_headline_page
def generate_headline_page(self, headline, output):
content = """
<div class="content">
<div class="row">
<div class="headline center">%s</div>
<div class="twelve columns">
<p>%s</p>
</div>
</div>
</div>
""" % (headline, output)
return self.use_template(content)
#@+node:lee.20141223114246.46: *3* def generate_personal_page
def generate_personal_page(self, data=None):
if data is None:
return ''
# check data have all we need, if the key not exist, use empty string
must_have_key = ('photo_url', 'name', 'ID', 'class', 'evaluation')
for key in must_have_key:
data[key] = data.get(key, '')
if 'evaluation' in data:
table_content = ''
for projectName, score in data['evaluation']:
table_content += """<tr><td>%s</td><td>%s</td>"""%(projectName, score)
data['evaluation'] = table_content
content = """
<div class="content">
<div class="row">
<div class="one-half column">
<div class="headline">
About Me
</div>
<div class="photo">
<img src="{photo_url:s}" alt="photo">
</div>
<div class="meta">
<ul>
<li>Name: {name:s}</li>
<li>ID NO. : {ID:s}</li>
<li>Class: {class:s}</li>
</ul>
</div>
</div>
<div class="one-half column">
<div class="headline">
Self Evaluation
</div>
<div>
<table class="u-full-width">
<thead>
<tr>
<th>Project Name</th>
<th>Score</th>
</tr>
</thead>
<tbody>
{evaluation:s}
</tbody>
</table>
</div>
</div>
</div>
</div>
""".format(**data)
return self.use_template(content)
#@+node:lee.20141223114246.47: *3* def link
def link(self):
aviable_link = [("index", "HOME"), ("remark", "心得"), (self.openshift_url, "個人Openshift APP"),(self.github_repo_url, "個人github REPO"), ('/', 'back to list')]#, (self.bitbucket_repo_url, "個人bitbucket REPO")]
return aviable_link
#@+node:lee.20141223114246.54: *3* def remark
@cherrypy.expose
def remark(self):
# 這裡是心得
# generate_headline_page(你的標題, 你的內容)
return self.generate_headline_page("REMARK", self.my_remark)
#@+node:lee.20141223114246.48: *3* def index
@cherrypy.expose
def index(self):
# 這裡是首頁
data = {
'name':self.name,
'ID':self.number,
'class':self.classes,
'evaluation': self.evaluation,
'photo_url':self.photo_url,
}
return self.generate_personal_page(data)
#@-others
#@-others
#@-leo
| gpl-3.0 | -2,628,410,771,341,456,400 | 31.325103 | 236 | 0.493698 | false |
FluxIX/pyShellScript | src/pyshell/environment.py | 1 | 7462 | from .tee_output_file import TeeOutputFile
class Environment( object ):
class CloneOptions( object ):
InheritVariables = "inherit_vars"
InheritStreams = "inherit_streams"
MakeParentLink = "parent_link"
def __init__( self, starting_directory = None, parent = None, starting_variables = None, standard_output = None, error_output = None ):
if starting_directory is None:
import os
starting_directory = os.curdir
self.directory_stack = []
self.push_directory( starting_directory )
self.parent = parent
if starting_variables is None:
starting_variables = {}
self.variables = starting_variables
if standard_output is None:
standard_output = TeeOutputFile()
self.__standard_output = standard_output
if error_output is None:
error_output = TeeOutputFile()
self.__error_output = error_output
self._attached = False
def __del__( self ):
if self._detach():
def is_internal_stream( stream ):
import sys
return stream is sys.__stdout__ or stream is sys.__stderr__ or stream is sys.__stdin__
if not is_internal_stream( self.standard_output ):
del self.__standard_output
if not is_internal_stream( self.error_output ):
del self.__error_output
def get_directory_stack( self ):
return self.__directory_stack
def _set_directory_stack( self, value ):
if value is not None:
self.__directory_stack = value
else:
raise ValueError( "Directory stack cannot be None." )
directory_stack = property( get_directory_stack, _set_directory_stack, None, None )
def push_directory( self, directory, suppress_errors = False ):
if directory is not None:
import os
if not os.path.isabs( directory ):
d = os.path.abspath( directory )
else:
d = directory
d = os.path.normpath( d )
if os.path.isdir( d ):
self.directory_stack.append( d )
result = True
elif not suppress_errors:
raise ValueError( "Only directories can be pushed." )
else:
result = False
elif not suppress_errors:
raise ValueError( "Pushed directory cannot be None." )
else:
result = False
return result
def pop_directory( self ):
return self.directory_stack.pop()
@property
def current_directory( self ):
return self.directory_stack[ -1 ]
def get_parent( self ):
return self.__parent
def _set_parent( self, value ):
self.__parent = value
parent = property( get_parent, _set_parent, None, None )
@property
def has_parent( self ):
return self.parent is not None
def get_variables( self ):
return self.__variables
def set_variables( self, value ):
self.__variables = value
variables = property( get_variables, set_variables, None, None )
def clone( self, **kwargs ):
key = Environment.CloneOptions.InheritVariables
if key in kwargs:
inherit_vars = bool( kwargs[ key ] )
else:
inherit_vars = False
key = Environment.CloneOptions.MakeParentLink
if key in kwargs:
parent_link = bool( kwargs[ key ] )
else:
parent_link = False
if parent_link:
parent = self
else:
parent = None
variables = {}
if inherit_vars:
for key in self.variables:
variables[ key ] = self.variables[ key ]
key = Environment.CloneOptions.InheritStreams
if key in kwargs:
inherit_streams = bool( kwargs[ key ] )
else:
inherit_streams = False
if inherit_streams:
standard_output = self.standard_output.clone()
error_output = self.error_output.clone()
else:
standard_output = None
error_output = None
result = Environment( self.current_directory, parent, variables, standard_output, error_output )
return result
@property
def standard_output( self ):
return self.__standard_output
@property
def error_output( self ):
return self.__error_output
def _attach( self ):
result = not self._attached
if result:
import os
import sys
self._previous_working_directory = os.getcwd()
self._previous_standard_output = sys.stdout
self._previous_error_output = sys.stderr
self._previous_environment_variables = os.environ
os.chdir( self.current_directory )
sys.stdout = self.standard_output
sys.stderr = self.error_output
os.environ = self.variables
self._attached = True
return result
def _detach( self ):
result = self._attached
if result:
import os
import sys
os.chdir( self._previous_working_directory )
sys.stdout = self._previous_standard_output
sys.stderr = self._previous_error_output
os.environ = self._previous_environment_variables
self._attached = False
return result
class EnvironmentBuilder( object ):
def __init__( self ):
self.starting_directory = None
self.parent = None
self.starting_variables = None
self.standard_output = None
self.error_output = None
def get_starting_directory( self ):
return self.__starting_directory
def set_starting_directory( self, value ):
self.__starting_directory = value
return self
starting_directory = property( get_starting_directory, set_starting_directory, None, None )
def get_parent( self ):
return self.__parent
def set_parent( self, value ):
self.__parent = value
return self
parent = property( get_parent, set_parent, None, None )
def get_starting_variables( self ):
return self.__starting_variables
def set_starting_variables( self, value ):
self.__starting_variables = value
return self
starting_variables = property( get_starting_variables, set_starting_variables, None, None )
def get_standard_output( self ):
return self.__standard_output
def set_standard_output( self, value ):
self.__standard_output = value
return self
standard_output = property( get_standard_output, set_standard_output, None, None )
def get_error_output( self ):
return self.__error_output
def set_error_output( self, value ):
self.__error_output = value
return self
error_output = property( get_error_output, set_error_output, None, None )
def inherit_starting_variables( self ):
starting_variables = {}
import os
for key in os.environ:
starting_variables[ key ] = os.environ[ key ]
self.starting_variables = starting_variables
return self
def build( self ):
return Environment( self.starting_directory, self.parent, self.starting_variables, self.standard_output, self.error_output )
| lgpl-3.0 | -8,011,103,368,289,027,000 | 27.922481 | 139 | 0.585902 | false |
Iepoev/vsc-mympirun | lib/vsc/mympirun/mpi/mpi.py | 1 | 41888 | #
# Copyright 2011-2016 Ghent University
#
# This file is part of vsc-mympirun,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# the Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/hpcugent/vsc-mympirun
#
# vsc-mympirun is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# vsc-mympirun is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with vsc-mympirun. If not, see <http://www.gnu.org/licenses/>.
#
"""
Base MPI class, all actual classes should inherit from this one
@author: Stijn De Weirdt
@author: Jeroen De Clerck
"""
import os
import pkgutil
import random
import re
import resource
import shutil
import socket
import stat
import string
import subprocess
import time
from IPy import IP
from vsc.utils.fancylogger import getLogger
from vsc.utils.missing import get_subclasses, nub
from vsc.utils.run import run_simple, run_to_file, run_async_to_stdout
# part of the directory that contains the installed fakes
INSTALLATION_SUBDIRECTORY_NAME = '(VSC-tools|(?:vsc-)?mympirun)'
# the fake subdir to contain the fake mpirun symlink
# also hardcoded in setup.py !
FAKE_SUBDIRECTORY_NAME = 'fake'
LOGGER = getLogger()
def what_mpi(name):
"""
Return the path of the selected mpirun and its class.
@param name: The name of the executable used to run mympirun
@return: A triplet containing the following variables:
- The path to the executable used to run mympirun (should be the path to an mpirun implementation)
- The corresponding python class of the MPI variant
- The python classes of the supported MPI flavors (from the various .py files in mympirun/mpi)
"""
# import all modules in this dir: http://stackoverflow.com/a/16853487
for loader, modulename, _ in pkgutil.walk_packages([os.path.dirname(__file__)]):
loader.find_module(modulename).load_module(modulename)
supp_mpi_impl = get_subclasses(MPI) # supported MPI implementations
# remove fake mpirun from $PATH
stripfake()
# get the path of the mpirun executable
mpirun_path = which('mpirun')
if mpirun_path is None:
# no MPI implementation installed
LOGGER.warn("no mpirun command found")
return None, None, supp_mpi_impl
scriptname = os.path.basename(os.path.abspath(name))
# check if mympirun was called by a known mpirun alias (like
# ompirun for OpenMPI or mhmpirun for mpich)
for mpi in supp_mpi_impl:
if mpi._is_mpiscriptname_for(scriptname):
LOGGER.debug("%s was used to call mympirun", scriptname)
return scriptname, mpi, supp_mpi_impl
# mympirun was not called through a known alias, so find out which MPI
# implementation the user has installed
for mpi in supp_mpi_impl:
if mpi._is_mpirun_for(mpirun_path):
return scriptname, mpi, supp_mpi_impl
# no specific flavor found, default to mpirun_path
LOGGER.warn("The executable that called mympirun (%s) isn't supported"
", defaulting to %s", name, mpirun_path)
return mpirun_path, None, supp_mpi_impl
def stripfake():
"""
If the user loaded the vsc-mympirun module but called mpirun, some $PATH trickery catches the attempt.
This function removes the fake path trickery from $PATH (assumes (VSC-tools|mympirun)/1.0.0/bin/fake).
"""
LOGGER.debug("PATH before stripfake(): %s", os.environ['PATH'])
# compile a regex that matches the faked mpirun
reg_fakepath = re.compile(
r"" + os.sep.join(['.*?',
INSTALLATION_SUBDIRECTORY_NAME + '.*?',
'bin',
'%(fake_subdir)s(%(sep)s[^%(sep)s]*)?$' %
{
'fake_subdir': FAKE_SUBDIRECTORY_NAME,
'sep': os.sep
}
]))
oldpath = os.environ.get('PATH', '').split(os.pathsep)
# remove all $PATH elements that match the fakepath regex
os.environ['PATH'] = os.pathsep.join([x for x in oldpath if not reg_fakepath.match(x)])
LOGGER.debug("PATH after stripfake(): %s", os.environ['PATH'])
return
def which(cmd):
"""
Return (first) path in $PATH for specified command, or None if command is not found.
taken from easybuild/tools/filetools.py, 6/7/2016
"""
paths = os.environ.get('PATH', '').split(os.pathsep)
for path in paths:
cmd_path = os.path.join(path, cmd)
# only accept path is command is there, and both readable and executable
if os.access(cmd_path, os.R_OK | os.X_OK):
LOGGER.info("Command %s found at %s", cmd, cmd_path)
return cmd_path
LOGGER.warning("Could not find command '%s' (with permissions to read/execute it) in $PATH (%s)", cmd, paths)
return None
class MPI(object):
"""
Base MPI class to generate the mpirun command line.
To add a new MPI class just create a new class that extends the MPI class, see http://stackoverflow.com/q/456672
"""
RUNTIMEOPTION = None
_mpirun_for = []
_mpiscriptname_for = []
_mpirun_version = None
MPIRUN_LOCALHOSTNAME = 'localhost'
DEFAULT_RSH = None
HYDRA = None
HYDRA_LAUNCHER_NAME = "launcher"
DEVICE_LOCATION_MAP = {'ib': '/dev/infiniband', 'det': '/dev/det', 'shm': '/dev/shm', 'socket': None}
DEVICE_ORDER = ['ib', 'det', 'shm', 'socket']
DEVICE_MPIDEVICE_MAP = {'ib': 'rdma', 'det': 'det', 'shm': 'shm', 'socket': 'socket'}
NETMASK_TYPE_MAP = {'ib': 'ib', 'det': 'eth', 'shm': 'eth', 'socket': 'eth'}
PINNING_OVERRIDE_METHOD = 'numactl'
PINNING_OVERRIDE_TYPE_DEFAULT = None
REMOTE_OPTION_TEMPLATE = "--rsh=%(rsh)s"
MPDBOOT_OPTIONS = []
MPDBOOT_SET_INTERFACE = True
MPIEXEC_TEMPLATE_GLOBAL_OPTION = "-genv %(name)s '%(value)s'"
OPTS_FROM_ENV_TEMPLATE = "-x '%(name)s'"
MPIEXEC_OPTIONS = []
MODULE_ENVIRONMENT_VARIABLES = ['MODULEPATH', 'LOADEDMODULES', 'MODULESHOME']
OPTS_FROM_ENV_BASE = ['LD_LIBRARY_PATH', 'PATH', 'PYTHONPATH', 'CLASSPATH', 'LD_PRELOAD', 'PYTHONUNBUFFERED']
OPTS_FROM_ENV_BASE_PREFIX = ['OMP', 'MKL', 'KMP', 'DAPL', 'PSM', 'IPATH', 'TMI', 'PSC', 'O64', 'VSMP']
OPTS_FROM_ENV_FLAVOR_PREFIX = [] # to be set per flavor
def __init__(self, options, cmdargs, **kwargs):
if not hasattr(self, 'log'):
self.log = getLogger(self.__class__.__name__)
self.options = options
self.cmdargs = cmdargs
self.device = None
self.hydra_info = None
self.has_hydra = self._has_hydra()
self.netmasktype = None
self.netmask = None
self.mympirunbasedir = None
self.mympirundir = None
self.mpdboot_node_filename = None
self.mpdboot_options = None
self.mpdboot_totalnum = None
self.mpdboot_localhost_interface = None
self.mpiexec_node_filename = None
self.mpiexec_options = None
self.mpiexec_global_options = {}
self.mpiexec_opts_from_env = [] # list of variables
self.mpirun_cmd = None
self.pinning_override_type = getattr(self.options, 'overridepin', self.PINNING_OVERRIDE_TYPE_DEFAULT)
super(MPI, self).__init__(**kwargs)
# sanity checks
if getattr(self, 'sched_id', None) is None:
self.log.raiseException("__init__: sched_id is None (should be set by one of the Sched classes)")
if not self.cmdargs:
self.log.raiseException("__init__: no executable or command provided")
# factory methods for MPI
@classmethod
def _is_mpirun_for(cls, mpirun_path):
"""
Check if this class provides support for the mpirun that was called.
@param cls: the class that calls this function
@param mpirun_path: the path to the mpirun aka `which mpirun`
@return: true if $mpirun_path is defined as an mpirun implementation of $cls
"""
# regex matches "cls._mpirun_for/version number"
reg = re.compile(r"(?:%s)%s(\d+(?:(?:\.|-)\d+(?:(?:\.|-)\d+\S+)?)?)" %
("|".join(cls._mpirun_for), os.sep))
reg_match = reg.search(mpirun_path)
LOGGER.debug("_is_mpisrun_for(), reg_match: %s", reg_match)
if reg_match:
if cls._mpirun_version is None:
return True
else:
# do version check (reg_match.group(1) is the version number)
return cls._mpirun_version(reg_match.group(1))
else:
return False
@classmethod
def _is_mpiscriptname_for(cls, scriptname):
"""
Check if this class provides support for scriptname.
@param cls: the class that calls this function
@param scriptname: the executable that called mympirun
@return: true if $scriptname is defined as an mpiscriptname of $cls
"""
return scriptname in cls._mpiscriptname_for
# other general functionality
def _has_hydra(self):
"""Has HYDRA or not"""
return self.HYDRA
### main ###
def main(self):
"""Main method"""
self.prepare()
self.make_mpdboot()
# prepare these separately
self.set_mpiexec_global_options()
self.set_mpiexec_opts_from_env()
self.set_mpiexec_options()
self.make_mpirun()
# actual execution
for runfunc, cmd in self.mpirun_prepare_execution():
self.log.debug("main: going to execute cmd %s", " ".join(cmd))
exitcode, _ = runfunc(cmd)
if exitcode > 0:
self.cleanup()
self.log.raiseException("main: exitcode %s > 0; cmd %s" % (exitcode, cmd))
break
self.cleanup()
### BEGIN prepare ###
def prepare(self):
"""Collect information to create the commands."""
self.check_usable_cpus()
self.check_limit()
self.set_omp_threads()
self.set_netmask()
self.make_node_file()
self.set_pinning()
def check_usable_cpus(self):
"""Check and log if non-standard cpus (eg due to cpusets)."""
if not self.foundppn == len(self.cpus):
self.log.info("check_usable_cpus: non-standard cpus found: requested ppn %s, found cpus %s, usable cpus %s",
self.ppn, self.foundppn, len(self.cpus))
def check_limit(self):
"""Check if the softlimit of the stack exceeds 1MB, if it doesn't, show an error."""
soft, _ = resource.getrlimit(resource.RLIMIT_STACK) # in bytes
if soft > -1 and soft < 1024 * 1024:
# non-fatal
self.log.error("Stack size %s%s too low? Increase with ulimit -s unlimited", soft, 'kB')
def set_omp_threads(self):
"""
Sets ompthreads to the amount of threads every MPI process should use.
For example, with hybrid 2 every MPI process should have a total 2 threads (each on a seperate processors).
This way each node will have 8 MPI processes (assuming ppn is 16). Will default to 1 if hybrid is disabled.
"""
if 'OMP_NUM_THREADS' in os.environ:
threads = os.environ['OMP_NUM_THREADS']
else:
if not self.options.hybrid:
threads = 1
else:
threads = max(self.ppn // self.options.hybrid, 1)
self.log.debug("Set OMP_NUM_THREADS to %s", threads)
os.environ['OMP_NUM_THREADS'] = str(threads)
setattr(self.options, 'ompthreads', threads)
def set_netmask(self):
"""
Set self.netmask to a list containing (ip address/netmask).
Based on the hosts IP address (from ip addr show) and the selected netmasktype from select_device.
"""
if self.netmasktype is None:
self.select_device()
device_ip_reg_map = {
'eth': r"ether.*?\n.*?inet\s+(\d+\.\d+.\d+.\d+/\d+)",
'ib': r"infiniband.*?\n.*?inet\s+(\d+\.\d+.\d+.\d+/\d+)",
}
if self.netmasktype not in device_ip_reg_map:
self.log.raiseException("set_netmask: can't get netmask for %s: unknown mode (device_ip_reg_map %s)" %
(self.netmasktype, device_ip_reg_map))
cmd = "/sbin/ip addr show"
exitcode, out = run_simple(cmd)
if exitcode > 0:
self.log.raiseException("set_netmask: failed to run cmd %s, ec: %s" % (cmd, exitcode))
reg = re.compile(device_ip_reg_map[self.netmasktype])
if not reg.search(out):
self.log.raiseException("set_netmask: can't get netmask for %s: no matches found (reg %s out %s)" %
(self.netmasktype, device_ip_reg_map[self.netmasktype], out))
res = []
for ipaddr_mask in reg.finditer(out):
ip_info = IP(ipaddr_mask.group(1), make_net=True)
network_netmask = "%s/%s" % (ip_info.net(), ip_info.netmask())
res.append(network_netmask)
self.log.debug("set_netmask: convert ipaddr_mask %s into network_netmask %s",
ipaddr_mask.group(1), network_netmask)
self.log.debug("set_netmask: return complete netmask %s", res)
if res:
self.netmask = os.pathsep.join(res)
def select_device(self, force=False):
"""
Select a device (such as infiniband), either with command line arguments or the best available.
See DEVICE_ORDER for order of preference.
"""
if self.device is not None and not force:
self.log.debug("select_device: device already set: %s", self.device)
return
founddev = None
if getattr(self.options, 'rdma', None):
founddev = 'ib'
self.set_device(founddev)
elif getattr(self.options, 'socket', None):
founddev = 'socket'
self.set_device(founddev)
else:
for dev in self.DEVICE_ORDER:
if dev in ('shm',):
# only use shm if a single node is used
if self.nruniquenodes > 1:
continue
path = self.DEVICE_LOCATION_MAP[dev]
if path is None or os.path.exists(path):
founddev = dev
self.device = self.DEVICE_MPIDEVICE_MAP[dev]
self.log.debug("select_device: found path %s for device %s", path, self.device)
break
if self.device is None:
self.log.raiseException("select_device: failed to set device.")
self.netmasktype = self.NETMASK_TYPE_MAP[founddev]
self.log.debug("select_device: set netmasktype %s for device %s (founddev %s)",
self.netmasktype, self.device, founddev)
def set_device(self, founddev):
"""Set self.device to founddev, but doublecheck if the path to this device actually exists """
self.device = self.DEVICE_MPIDEVICE_MAP[founddev]
path = self.DEVICE_LOCATION_MAP[founddev]
if path is None or not os.path.exists(path):
self.log.warning("Forcing device %s (founddevice %s), but path %s not found.",
self.device, founddev, path)
def make_node_file(self):
"""
Make a nodefile and mpdbootfile.
Parses the list of nodes that run an MPI process and writes this information to a nodefile.
Also parses the list of unique nodes and writes this information to a mpdbootfile
(based on hyrda and universe options).
"""
self.make_mympirundir()
if self.mpinodes is None:
self.make_node_list()
nodetxt = "\n".join(self.mpinodes + [''])
mpdboottxt = ""
for uniquenode in self.uniquenodes:
txt = uniquenode
if not self.has_hydra:
if self.options.universe is not None and self.options.universe > 0:
txt += ":%s" % self.get_universe_ncpus()
txt += " ifhn=%s" % uniquenode
mpdboottxt += "%s\n" % txt
try:
nodefn = os.path.join(self.mympirundir, 'nodes')
open(nodefn, 'w').write(nodetxt)
self.mpiexec_node_filename = nodefn
self.log.debug("make_node_file: wrote nodefile %s:\n%s", nodefn, nodetxt)
mpdfn = os.path.join(self.mympirundir, 'mpdboot')
open(mpdfn, 'w').write(mpdboottxt)
self.mpdboot_node_filename = mpdfn
self.log.debug("make_node_file: wrote mpdbootfile %s:\n%s", mpdfn, mpdboottxt)
except Exception:
self.log.raiseException('make_node_file: failed to write nodefile %s mpbboot nodefile %s' % (nodefn, mpdfn))
def get_universe_ncpus(self):
"""Return ppn for universe"""
return self.mpiprocesspernode
def make_mympirundir(self):
"""
Make a dir called .mympirun_id_timestamp in either the given basepath or $HOME.
Temporary files such as the nodefile will be written to this directory.
Allows for easy cleanup after finishing the script.
"""
basepath = getattr(self.options, 'basepath', None)
if basepath is None:
basepath = os.environ['HOME']
if not os.path.exists(basepath):
self.log.raiseException("make_mympirun_dir: basepath %s should exist." % basepath)
self.mympirunbasedir = os.path.join(basepath, '.mympirun')
destdir = os.path.join(self.mympirunbasedir, "%s_%s" % (self.sched_id, time.strftime("%Y%m%d_%H%M%S")))
if not os.path.exists(destdir):
try:
os.makedirs(destdir)
except os.error:
self.log.raiseException('make_mympirun_dir: failed to make job dir %s' % destdir)
self.log.debug("make_mympirun_dir: tmp mympirundir %s", destdir)
self.mympirundir = destdir
### BEGIN pinning ###
def set_pinning(self, mp=None):
if not hasattr(self.options, 'pinmpi'):
setattr(self.options, 'pinmpi', None)
mp = self._pin_flavour(mp)
if isinstance(mp, bool):
self.log.debug("set_pinning: setting pin_flavour %s", mp)
self.options.pinmpi = mp
if not isinstance(self.options.pinmpi, bool):
if self.options.hybrid is not None:
# always pin!
self.options.pinmpi = True
else:
# always pin!
self.options.pinmpi = True
if self.pinning_override_type is not None:
self.log.debug("set_pinning: previous pinning %s; will be overwritten, pinning_override_type set to %s",
self.options.pinmpi, self.pinning_override_type)
self.options.pinmpi = False
else:
self.log.debug("set_pinning: pinmpi %s", self.options.pinmpi)
def _pin_flavour(self, mp=None):
return mp
### BEGIN mpdboot ###
def make_mpdboot(self):
"""
Make the mpdboot configuration.
Read a password from ~/.mpd.conf (if this does not exist, create it).
"""
# check .mpd.conf existence
mpdconffn = os.path.expanduser('~/.mpd.conf')
if not os.path.exists(mpdconffn):
self.log.warning(("make_mpdboot: mpd.conf file not found at %s. Creating this file "
"(text file with minimal entry 'password=<somesecretpassword>')"), mpdconffn)
mpdconff = open(mpdconffn, 'w')
mpdconff.write("password=%s" % ''.join(random.choice(string.ascii_uppercase + string.digits)
for x in range(10)))
mpdconff.close()
# set correct permissions on this file.
os.chmod(mpdconffn, stat.S_IREAD)
self.set_mpdboot_localhost_interface()
self.make_mpdboot_options()
self.log.debug("make_mpdboot set options %s", self.mpdboot_options)
def set_mpdboot_localhost_interface(self):
"""Sets mpdboot_localhost_interface to the first result of get_localhosts()."""
localhosts = self.get_localhosts()
if len(localhosts) > 1:
self.log.warning(("set_mpdboot_localhost_interface: more then one match for localhost from unique nodes "
" found %s, using 1st."), localhosts)
nodename, iface = localhosts[0] # take the first one
self.log.debug("set_mpdboot_localhost_interface: mpd localhost interface %s found for %s", iface, nodename)
self.mpdboot_localhost_interface = (nodename, iface)
def get_localhosts(self):
"""
Get the localhost interfaces, based on the hostnames from the nodes in self.uniquenodes.
Raises Exception if no localhost interface was found.
@return: the list of interfaces that correspond to the list of uniquenodes
"""
iface_prefix = ['eth', 'em', 'ib', 'wlan']
reg_iface = re.compile(r'((?:%s)\d+(?:\.\d+)?(?::\d+)?|lo)' % '|'.join(iface_prefix))
# iterate over uniquenodes and get their interfaces
# add the found interface to res if it matches reg_iface
res = []
for idx, nodename in enumerate(self.uniquenodes):
ip = socket.gethostbyname(nodename)
cmd = "/sbin/ip -4 -o addr show to %s/32" % ip
exitcode, out = run_simple(cmd)
if exitcode == 0:
regex = reg_iface.search(out)
if regex:
iface = regex.group(1)
self.log.debug("get_localhost idx %s: localhost interface %s found for %s (ip: %s)",
idx, iface, nodename, ip)
res.append((nodename, iface))
else:
self.log.debug("get_localhost idx %s: no interface match for prefixes %s out %s",
idx, iface_prefix, out)
else:
self.log.error("get_localhost idx %s: cmd %s failed with output %s", idx, cmd, out)
if not res:
self.log.raiseException("get_localhost: can't find localhost from uniq nodes %s" % self.uniquenodes)
return res
def make_mpdboot_options(self):
"""Add various options to mpdboot_options"""
self.mpdboot_options = self.MPDBOOT_OPTIONS[:]
# add the mpd nodefile to mpdboot options
self.mpdboot_options.append("--file=%s" % self.mpdboot_node_filename)
# add the interface to mpdboot options
if self.MPDBOOT_SET_INTERFACE:
if self.has_hydra:
iface = "-iface %s" % self.mpdboot_localhost_interface[1]
else:
iface = "--ifhn=%s" % self.mpdboot_localhost_interface[0]
self.log.debug('Set mpdboot interface option "%s"', iface)
self.mpdboot_options.append(iface)
else:
self.log.debug('No mpdboot interface option')
# add the number of mpi processes (aka mpi universe) to mpdboot options
if self.options.universe is not None and self.options.universe > 0:
self.mpdboot_options.append("--ncpus=%s" % self.get_universe_ncpus())
# add nr of unique nodes as totalnum if defined
if self.mpdboot_totalnum:
self.mpdboot_options.append("--totalnum=%s" % self.mpdboot_totalnum)
# set verbosity
if self.options.mpdbootverbose:
self.mpdboot_options.append("--verbose")
# mpdboot rsh command
if not self.has_hydra:
self.mpdboot_options.append(self.REMOTE_OPTION_TEMPLATE % {'rsh': self.get_rsh()})
### BEGIN mpiexec ###
def set_mpiexec_global_options(self):
"""
Set mpiexec_global_options.
Unless explicitly asked not to, will add all environment variables to mpiexec_global_options.
"""
self.mpiexec_global_options['MKL_NUM_THREADS'] = '1'
if not self.options.noenvmodules:
for env_var in self.MODULE_ENVIRONMENT_VARIABLES:
if env_var in os.environ and env_var not in self.mpiexec_global_options:
self.mpiexec_global_options[env_var] = os.environ[env_var]
def set_mpiexec_opts_from_env(self):
"""
Get relevant environment variables and append them to mpiexec_opts_from_env
Gets the union of OPTS_FROM_ENV_BASE and the environment variables that start with a given prefix.
These will then be parsed and passed to mpiexec as an option
"""
# get all unique variables that are both in os.environ and in OPTS_FROM_ENV_BASE
vars_to_pass = nub(filter(os.environ.has_key, self.OPTS_FROM_ENV_BASE))
for env_prefix in self.OPTS_FROM_ENV_FLAVOR_PREFIX + self.OPTS_FROM_ENV_BASE_PREFIX + self.options.variablesprefix:
for env_var in os.environ.keys():
# add all environment variable keys that are equal to <prefix> or start with <prefix>_
# to mpiexec_opts_from_env, but only if they aren't already in vars_to_pass
if (env_prefix == env_var or env_var.startswith("%s_" % env_prefix)) and env_var not in vars_to_pass:
self.mpiexec_opts_from_env.append(env_var)
def set_mpiexec_options(self):
"""Add various options to mpiexec_options."""
self.mpiexec_options = self.MPIEXEC_OPTIONS[:]
if self.has_hydra:
self.make_mpiexec_hydra_options()
else:
self.mpiexec_options.append("-machinefile %s" % self.mpiexec_node_filename)
# mpdboot global variables
self.mpiexec_options += self.get_mpiexec_global_options()
# number of procs to start
if self.options.universe is not None and self.options.universe > 0:
self.mpiexec_options.append("-np %s" % self.options.universe)
else:
self.mpiexec_options.append("-np %s" % (self.mpiprocesspernode * self.nruniquenodes))
# pass local env variables to mpiexec
self.mpiexec_options += self.get_mpiexec_opts_from_env()
def make_mpiexec_hydra_options(self):
"""Hydra specific mpiexec options."""
self.get_hydra_info()
self.mpiexec_options.append("--hostfile %s" % self.mpiexec_node_filename)
if self.options.branchcount is not None:
self.mpiexec_options.append("--branch-count %d" % self.options.branchcount)
# default launcher seems ssh
if getattr(self, 'HYDRA_RMK', None) is not None:
rmk = [x for x in self.HYDRA_RMK if x in self.hydra_info.get('rmk', [])]
if len(rmk) > 0:
self.log.debug("make_mpiexe_hydra_options: HYDRA: rmk %s, using first", rmk)
self.mpiexec_options.append("-rmk %s" % rmk[0])
else:
self.log.debug("make_mpiexe_hydra_options: no rmk from HYDRA_RMK %s and hydra_info %s",
self.HYDRA_RMK, self.hydra_info)
else:
launcher = None
if getattr(self, 'HYDRA_LAUNCHER', None) is not None:
launcher = [x for x in self.HYDRA_LAUNCHER if x in self.hydra_info.get('launcher', [])]
if launcher:
self.log.debug("make_mpiexec_hydra_options: HYDRA: launcher %s, using first one", launcher)
else:
self.log.debug("make_mpiexe_hydra_options: no launcher from HYDRA_LAUNCHER %s and hydra_info %s",
self.HYDRA_LAUNCHER, self.hydra_info)
launcher_exec = self.HYDRA_LAUNCHER_EXEC
if not launcher:
launcher_exec = self.get_rsh()
else:
self.mpiexec_options.append("-%s %s" % (self.HYDRA_LAUNCHER_NAME, launcher[0]))
if launcher_exec is not None:
self.log.debug("make_mpiexec_hydra_options: HYDRA using launcher exec %s", launcher_exec)
self.mpiexec_options.append("-%s-exec %s" % (self.HYDRA_LAUNCHER_NAME, launcher_exec))
def get_hydra_info(self):
"""Get a dict with hydra info."""
reg_hydra_info = re.compile(r"^\s+(?P<key>\S[^:\n]*)\s*:(?P<value>.*?)\s*$", re.M)
cmd = "mpirun -info"
exitcode, out = run_simple(cmd)
if exitcode > 0:
self.log.raiseException("get_hydra_info: failed to run cmd %s: %s" % (cmd, out))
hydra_info = {}
for regex in reg_hydra_info.finditer(out):
key = regex.groupdict()['key']
if key is None:
self.log.raiseException("get_hydra_info: failed to get hydra info: missing key in %s (out: %s)" %
(regex.groupdict(), out))
key = key.strip().lower()
value = regex.groupdict()['value']
if value is None:
self.log.debug("get_hydra_info: failed to get hydra info: missing value in %s (out: %s)" %
(regex.groupdict(), out))
value = ''
values = [x.strip().strip('"').strip("'") for x in value.split() if x.strip()]
hydra_info[key] = values
self.log.debug("get_hydra_info: found info %s", hydra_info)
keymap = {
"rmk": r'^resource\s+management\s+kernel.*available',
"launcher": r'^%s.*available' % self.HYDRA_LAUNCHER_NAME,
"chkpt": r'^checkpointing.*available',
}
self.hydra_info = {}
for newkey, regtxt in keymap.items():
reg = re.compile(regtxt, re.I)
matches = [v for k, v in hydra_info.items() if reg.search(k)]
if len(matches) == 0:
continue
else:
if len(matches) > 1:
self.log.warning("get_hydra_info: more than one match %s found: newkey %s regtxt %s hydrainfo %s",
matches, newkey, regtxt, hydra_info)
self.hydra_info[newkey] = matches[0]
self.log.debug("get_hydra_info: filtered info %s", self.hydra_info)
def get_mpiexec_global_options(self):
"""
Create the global options to pass to mpiexec.
Iterates over mpiexec_global_options, and picks the options that aren't already in mpiexec_opts_from_env.
This way the options that are set with environment variables get a higher priority.
@return: the final list of options, including the correct command line argument for the mpi flavor
"""
global_options = []
for key, val in self.mpiexec_global_options.items():
if key in self.mpiexec_opts_from_env:
# environment variable is already set
self.log.debug("get_mpiexec_global_options: found global option %s in mpiexec_opts_from_env.", key)
else:
# insert the keyvalue pair into the correct command line argument
# the command for setting the environment variable depends on the mpi flavor
global_options.append(self.MPIEXEC_TEMPLATE_GLOBAL_OPTION % {'name': key, "value": val})
self.log.debug("get_mpiexec_global_options: template %s return options %s",
self.MPIEXEC_TEMPLATE_GLOBAL_OPTION, global_options)
return global_options
def get_mpiexec_opts_from_env(self):
"""
gets the environment variables that should be passed to mpiexec as an option.
Parses mpiexec_opts_from_env so that the chosen mpi flavor can understand it when it is passed to the
command line argument.
"""
self.log.debug("get_mpiexec_opts_from_env: variables (and current value) to pass: %s",
[[x, os.environ[x]] for x in self.mpiexec_opts_from_env])
if '%(commaseparated)s' in self.OPTS_FROM_ENV_TEMPLATE:
self.log.debug("get_mpiexec_opts_from_env: found commaseparated in template.")
environment_options = [self.OPTS_FROM_ENV_TEMPLATE %
{'commaseparated': ','.join(self.mpiexec_opts_from_env)}]
else:
environment_options = [self.OPTS_FROM_ENV_TEMPLATE %
{'name': x, 'value': os.environ[x]} for x in self.mpiexec_opts_from_env]
self.log.debug("get_mpiexec_opts_from_env: template %s return options %s",
self.OPTS_FROM_ENV_TEMPLATE, environment_options)
return environment_options
### BEGIN mpirun ###
def make_mpirun(self):
"""Make the mpirun command (or whatever). It typically consists of a mpdboot and a mpiexec part."""
self.mpirun_cmd = ['mpirun']
self._make_final_mpirun_cmd()
if self.options.mpirunoptions is not None:
self.mpirun_cmd.append(self.options.mpirunoptions)
self.log.debug("make_mpirun: added user provided options %s", self.options.mpirunoptions)
if self.pinning_override_type is not None:
p_o = self.pinning_override()
if p_o is None or not os.path.isfile(p_o):
self.log.raiseException("make_mpirun: no valid pinning_overrride %s (see previous errors)" % p_o)
else:
self.mpirun_cmd += [p_o]
# the executable
# use undocumented subprocess API call to quote whitespace (executed with Popen(shell=True))
# (see http://stackoverflow.com/questions/4748344/whats-the-reverse-of-shlex-split for alternatives if needed)
quoted_args_string = subprocess.list2cmdline(self.cmdargs)
self.log.debug("make_mpirun: adding cmdargs %s (quoted %s)", self.cmdargs, quoted_args_string)
self.mpirun_cmd.append(quoted_args_string)
def _make_final_mpirun_cmd(self):
"""
Create the acual mpirun command.
Append the mpdboot and mpiexec options to the command.
"""
self.mpirun_cmd += self.mpdboot_options
self.mpirun_cmd += self.mpiexec_options
def pinning_override(self):
"""
Create own pinning
- using taskset or numactl?
- start the real executable with correct pinning
There are self.mpiprocesspernode number of processes to start on (self.nruniquenodes * self.ppn) requested slots
Each node has to accept self.mpiprocesspernode/self.ppn processes over self.ppn number of cpu slots
Do we assume heterogenous nodes (ie same cpu layout as current node?)
- We should but in reality we don't because of different cpusets!
What do we support?
- packed/compact : all together, ranks close to each other
- spread: as far away as possible from each other
Option:
- threaded (default yes): eg in hybrid, pin on all available cores or just one
When in this mode, one needs to disable default/native pinning
There seems no clean way to simply prefix the variables before the real exe
- some mpirun are binary, others are bash
- no clean way to pass the variable
- a simple bash script also resolves the csh problem?
Simple shell check. This is the login shell of the current user
- not necessarily the current shell
- but it is when multinode is used i think (eg startup with ssh)
"""
variableexpression = self.get_pinning_override_variable()
if variableexpression is None:
self.log.raiseException("pinning_override: no variable name found/set.")
self.log.debug("pinning_override: using variable expression %s as local node rank.", variableexpression)
rankname = 'MYMPIRUN_LOCALRANK'
rankmapname = 'MYMPIRUN_LOCALRANK_MAP'
wrappertxt = "#!/bin/bash\n%s=%s\n" % (rankname, variableexpression)
# number of local processors
# - eg numactl -s grep physcpubind
if not self.ppn == self.foundppn:
self.log.raiseException(("pinning_override: number of found procs %s is different from "
"requested ppn %s. Not yet supported.") % (self.foundppn, self.ppn))
override_type = self.pinning_override_type
multithread = True
if override_type.endswith('pin'):
override_type = override_type[:-3]
multithread = False
self.log.debug("pinning_override: type %s multithread %s", override_type, multithread)
# The whole method is very primitive
# - assume cpu layout on OS has correct numbering
# What about pinned threads of threaded apps?
# - eg use likwid to pin those threads too
# cores per process
corespp = self.foundppn // self.mpiprocesspernode
corespp_rest = self.foundppn % self.mpiprocesspernode
if (corespp < 1) or (self.mpiprocesspernode == self.foundppn):
multi = False
self.log.debug(("pinning_override: exactly one or more than one process for each core: mpi processes: %s "
"ppn: %s. Multithreading is disabled."), self.mpiprocesspernode, self.foundppn)
if corespp_rest > 0:
self.log.debug(("pinning_override: number of mpiprocesses (%s) is not an exact multiple of "
"number of procs (%s). Ignoring rest."), self.mpiprocesspernode, self.foundppn)
map_func = None
if override_type in ('packed', 'compact',):
if multi:
# consecutive domains
map_func = lambda x: "%s-%s" % (x * corespp, (x + 1) * corespp - 1)
else:
# consecutive cores
map_func = lambda x: x
elif override_type in ('cycle',):
# eg double with GAMESS
if multi:
self.log.raiseException(
"pinning_override: trying to set pin type to 'cycle' with multithreading enabled: not supported")
else:
map_func = lambda x: (x % self.foundppn)
elif override_type in ('spread',):
if multi:
# spread domains
map_func = lambda x: "%s-%s" % (x * corespp, (x + 1) * corespp - 1)
else:
# spread cores
map_func = lambda x: (x * corespp)
else:
self.log.raiseException("pinning_override: unsupported pinning_override_type %s" %
self.pinning_override_type)
rankmap = [map_func(x) for x in range(self.mpiprocesspernode)]
wrappertxt += "%s=(%s)\n" % (rankmapname, ' '.join(rankmap))
pinning_exe = which(self.PINNING_OVERRIDE_METHOD) # default numactl
if not pinning_exe:
self.log.raiseException("pinning_override: can't find executable %s" % self.PINNING_OVERRIDE_METHOD)
if self.PINNING_OVERRIDE_METHOD in ('numactl',):
pinning_exe += ' --physcpubind="${%s[$%s]}"' % (rankmapname, rankname)
wrappertxt += "%s $@" % pinning_exe
wrapperpath = os.path.join(self.jobdir, 'pinning_override_wrapper.sh')
try:
open(wrapperpath, 'w').write(wrappertxt)
os.chmod(wrapperpath, stat.S_IRWXU)
self.log.debug("pinning_override: wrote wrapper file %s:\n%s", wrapperpath, wrappertxt)
except IOError:
self.log.raiseException('pinning_override: failed to write wrapper file %s', wrapperpath)
self.log.debug("pinning_override: pinning_exe %s to wrapper %s", pinning_exe, wrapperpath)
return wrapperpath
def get_pinning_override_variable(self):
"""
Key element is that one needs to know the rank or something similar of each process
- preferably in environment
- eg QLogic PSC_MPI_NODE_RANK: this instance is the nth local rank.
- alternative is small c mpi program with bash wrapper
- see also likwid-mpirun for alternative example
- mentions similar OMPI_COMM_WORLD_RANK for OpenMPI and PMI_RANK for IntelMPI
- local_rank is remainder of myrank diveded by number of nodes?
This is a bash expression.
- eg $((x/y)) is also fine
"""
self.log.raiseException("get_pinning_override_variable: not implemented.")
def mpirun_prepare_execution(self):
"""
Make a function that runs mpirun with all arguments correctly set
@return: a tuple containing the final function and the final command
"""
def main_runfunc(cmd):
"""The function that will run mpirun"""
if self.options.output is not None:
return run_to_file(cmd, filename=self.options.output)
else:
return run_async_to_stdout(cmd)
return [(main_runfunc, self.mpirun_cmd)]
def cleanup(self):
"""Remove temporary directory (mympirundir)"""
try:
shutil.rmtree(self.mympirundir)
self.log.debug("cleanup: removed mympirundir %s", self.mympirundir)
except OSError:
self.log.raiseException("cleanup: cleaning up mympirundir %s failed" % self.mympirundir)
| gpl-2.0 | -5,106,039,025,082,344,000 | 40.228346 | 123 | 0.597999 | false |
ozgurgunes/django-manifest | tests/test_mixins.py | 1 | 3125 | # -*- coding: utf-8 -*-
""" Manifest View Mixin Tests
"""
from django.core import mail
from django.core.exceptions import ImproperlyConfigured
from django.urls import reverse
from manifest import messages
from manifest.views import EmailChangeView
from tests import data_dicts
from tests.base import ManifestTestCase
class MessageMixinTests(ManifestTestCase):
"""Tests for Manifest view mixins.
"""
form_data = data_dicts.LOGIN_FORM["valid"][0]
def test_message_mixin_success(self):
"""Response should contain ``messages``.
"""
with self.defaults(MANIFEST_USE_MESSAGES=True):
response = self.client.get(reverse("auth_logout"))
response_messages = list(response.context["messages"])
self.assertEqual(len(response_messages), 1)
self.assertEqual(
str(response_messages[0]), messages.AUTH_LOGOUT_SUCCESS
)
def test_message_mixin_error(self):
"""Response should contain ``messages``.
"""
with self.defaults(MANIFEST_USE_MESSAGES=True):
response = self.client.get(
reverse(
"auth_activate",
kwargs={"username": "alice", "token": "fake"},
)
)
response_messages = list(response.context["messages"])
self.assertNotEqual(len(response_messages), 0)
self.assertEqual(
str(response_messages[0]), messages.AUTH_ACTIVATE_ERROR
)
class SendMailMixinTests(ManifestTestCase):
"""Tests for Manifest view mixins.
"""
form_data = data_dicts.LOGIN_FORM["valid"][0]
def test_send_mail_mixin_subject(self):
"""Should raise ``ImproperlyConfigured``.
"""
# pylint: disable=bad-continuation
with self.assertRaisesRegex(
ImproperlyConfigured, "No template name for subject."
):
view = EmailChangeView(
email_subject_template_name=None,
email_message_template_name="dummy",
)
view.create_email(None, None)
def test_send_mail_mixin_message(self):
"""Should raise ``ImproperlyConfigured``.
"""
# pylint: disable=bad-continuation
with self.assertRaisesRegex(
ImproperlyConfigured, "No template name for message."
):
view = EmailChangeView(
email_subject_template_name="dummy",
email_message_template_name=None,
)
view.create_email(None, None)
def test_send_mail_mixin_html(self):
"""Should send 2 emails.
"""
self.client.login(
username=self.form_data["identification"],
password=self.form_data["password"],
)
response = self.client.post(
reverse("test_send_mail_mixin_html"),
data={"email": "[email protected]"},
)
self.assertEqual(len(mail.outbox), 2)
self.assertTemplateUsed(
response, "manifest/emails/confirmation_email_message_new.txt"
)
| mit | 1,813,927,764,464,734,700 | 31.894737 | 74 | 0.58976 | false |
setsulla/owanimo | lib/puyo/util/log.py | 1 | 2199 | import os
import logging
import logging.config
import define
from system import FILE as f
from system import DIRECTORY as d
CONF_FILE = "logging.conf"
BASE_NAME = "Puyopuyo.Owanimo"
BASE_LEVEL = logging.DEBUG
BASE_FORMAT = '%(processName)s.%(name)s ( PID %(process)d ) : %(asctime)s - %(levelname)s - %(message)s'
class Log(object):
def __init__(self, name):
d.create(define.APP_LOG)
self.logger = logging.getLogger(name)
self.logger.setLevel(BASE_LEVEL)
self.__addHandler(self.consoleHandler())
self.__addHandler(self.fileHandler(
os.path.normpath(os.path.join(define.APP_LOG,"system.log"))))
def __addHandler(self, handler):
self.logger.addHandler(handler)
def consoleHandler(self):
f = logging.Formatter(BASE_FORMAT)
h = logging.StreamHandler()
h.setLevel(BASE_LEVEL)
h.setFormatter(f)
return h
def fileHandler(self, filename):
f.create(filename)
fo = logging.Formatter(BASE_FORMAT)
h = logging.FileHandler(filename, 'a+')
h.setLevel(BASE_LEVEL)
h.setFormatter(fo)
return h
def rotateFileHandler(self, filename):
if os.path.exists(filename):
f = logging.Formatter(BASE_FORMAT)
h = logging.handlers.TimedRotatingFileHandler(
filename = os.path.normpath(os.path.join(filename)),
when = 'D',
backupCount = 5
)
h.setLevel(BASE_LEVEL)
h.setFormatter(f)
return trh
else:
return None
def debug(self, message):
self.logger.debug(message)
def warning(self, message):
self.logger.warning(message)
def info(self, message):
self.logger.info(message)
def critical(self, message):
self.logger.critical(message)
def error(self, message):
self.logger.error(message)
def __del__(self):
del self
LOG = Log(BASE_NAME)
if __name__ == "__main__":
logger = LOG
logger.debug("debug")
logger.warning("warning")
logger.info("info")
logger.critical("critical")
logger.error("error")
| mit | -1,362,475,733,673,479,400 | 25.493976 | 104 | 0.600728 | false |
CelineBoudier/rapid-router | game/tests/test_scoreboard.py | 1 | 14455 | # -*- coding: utf-8 -*-
# Code for Life
#
# Copyright (C) 2016, Ocado Innovation Limited
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# ADDITIONAL TERMS – Section 7 GNU General Public Licence
#
# This licence does not grant any right, title or interest in any “Ocado” logos,
# trade names or the trademark “Ocado” or any other trademarks or domain names
# owned by Ocado Innovation Limited or the Ocado group of companies or any other
# distinctive brand features of “Ocado” as may be secured from time to time. You
# must not distribute any modification of this program using the trademark
# “Ocado” or claim any affiliation or association with Ocado or its employees.
#
# You are not authorised to use the name Ocado (or any of its trade names) or
# the names of any author or contributor in advertising or for publicity purposes
# pertaining to the distribution of this program, without the prior written
# authorisation of Ocado.
#
# Any propagation, distribution or conveyance of this program must include this
# copyright notice and these terms. You must not misrepresent the origins of this
# program; modified versions of the program must be marked as such and not
# identified as the original program.
from datetime import timedelta, datetime
from django.utils.timezone import utc
from django.test import TestCase
from hamcrest import *
from game.models import Level, Attempt
from game.views.scoreboard import StudentRow, scoreboard_data
from game.views.scoreboard_csv import scoreboard_csv_multiple_levels, scoreboard_csv_single_level
from portal.models import Class
from portal.tests.utils.classes import create_class_directly
from portal.tests.utils.student import create_school_student_directly
from portal.tests.utils.teacher import signup_teacher_directly
Headers = ['Class', 'Name', 'Total Score', 'Total Time', 'Started Levels %', 'Attempted levels %', 'Finished levels %']
class ScoreboardTestCase(TestCase):
def test_teacher_multiple_students_multiple_levels(self):
level_ids = ids_of_levels_named(["1", "2"])
level1 = Level.objects.get(name="1")
level2 = Level.objects.get(name="2")
clas, student, student2 = set_up_data()
create_attempt(student, level1, 10.5)
create_attempt(student2, level1, 2.3)
create_attempt(student2, level2, 16.7)
student_data, headers = scoreboard_data(Teacher(), level_ids, [clas.id])
assert_that(headers, equal_to(['Class', 'Name', 'Total Score', 'Total Time', 'Progress', u'Level 1', u'Level 2']))
assert_that(student_data, has_length(2))
assert_student_row(student_row=student_data[0],
class_name=clas.name,
student_name=student.user.user.first_name,
total_score=10.5,
total_time=timedelta(0),
progress=(0.0, 0.0, 50.0),
scores=[10.5, ''])
assert_student_row(student_row=student_data[1],
class_name=clas.name,
student_name=student2.user.user.first_name,
total_score=19.0,
total_time=timedelta(0),
progress=(0.0, 50.0, 50.0),
scores=[2.3, 16.7])
def test_teacher_multiple_students_single_level(self):
level_ids = ids_of_levels_named(["1"])
level1 = Level.objects.get(name="1")
clas, student, student2 = set_up_data()
create_attempt(student, level1, 10.5)
create_attempt(student2, level1, 2.3)
student_data, headers = scoreboard_data(Teacher(), level_ids, [clas.id])
assert_that(headers, equal_to(['Class', 'Name', 'Score', 'Total Time', 'Start Time', 'Finish Time']))
assert_that(student_data, has_length(2))
assert_student_row_single_level(student_row=student_data[0],
class_name=clas.name,
student_name=student.user.user.first_name,
total_score=10.5,
total_time=timedelta(0))
assert_student_row_single_level(student_row=student_data[1],
class_name=clas.name,
student_name=student2.user.user.first_name,
total_score=2.3,
total_time=timedelta(0))
def test_student_multiple_students_multiple_levels(self):
level_ids = ids_of_levels_named(["1", "2"])
level1 = Level.objects.get(name="1")
level2 = Level.objects.get(name="2")
clas, student, student2 = set_up_data(True)
create_attempt(student, level1, 10.5)
create_attempt(student2, level1, 2.3)
create_attempt(student2, level2, 16.7)
student_data, headers = scoreboard_data(Student(student), level_ids, [clas.id])
assert_that(headers, equal_to(['Class', 'Name', 'Total Score', 'Total Time', 'Progress', u'Level 1', u'Level 2']))
assert_that(student_data, has_length(2))
assert_student_row(student_row=student_data[0],
class_name=clas.name,
student_name=student.user.user.first_name,
total_score=10.5,
total_time=timedelta(0),
progress=(0.0, 0.0, 50.0),
scores=[10.5, ''])
assert_student_row(student_row=student_data[1],
class_name=clas.name,
student_name=student2.user.user.first_name,
total_score=19.0,
total_time=timedelta(0),
progress=(0.0, 50.0, 50.0),
scores=[2.3, 16.7])
def test_student_multiple_students_single_level(self):
level_ids = ids_of_levels_named(["2"])
level2 = Level.objects.get(name="2")
clas, student, student2 = set_up_data(True)
create_attempt(student, level2, 10.5)
create_attempt(student2, level2, 16.7)
student_data, headers = scoreboard_data(Student(student), level_ids, [clas.id])
assert_that(headers, equal_to(['Class', 'Name', 'Score', 'Total Time', 'Start Time', 'Finish Time']))
assert_that(student_data, has_length(2))
assert_student_row_single_level(student_row=student_data[0],
class_name=clas.name,
student_name=student.user.user.first_name,
total_score=10.5,
total_time=timedelta(0))
assert_student_row_single_level(student_row=student_data[1],
class_name=clas.name,
student_name=student2.user.user.first_name,
total_score=16.7,
total_time=timedelta(0))
def test_student_multiple_students_multiple_levels_cannot_see_classmates(self):
level_ids = ids_of_levels_named(["1", "2"])
level1 = Level.objects.get(name="1")
level2 = Level.objects.get(name="2")
clas, student, student2 = set_up_data()
create_attempt(student, level1, 10.5)
create_attempt(student2, level1, 2.3)
create_attempt(student2, level2, 16.7)
student_data, headers = scoreboard_data(Student(student), level_ids, [clas.id])
assert_that(headers, equal_to(['Class', 'Name', 'Total Score', 'Total Time', 'Progress', u'Level 1', u'Level 2']))
assert_that(student_data, has_length(1))
assert_student_row(student_row=student_data[0],
class_name=clas.name,
student_name=student.user.user.first_name,
total_score=10.5,
total_time=timedelta(0),
progress=(0.0, 0.0, 50.0),
scores=[10.5, ''])
class ScoreboardCsvTestCase(TestCase):
def test_multiple_levels(self):
levels = Level.objects.sorted_levels()
student_rows = [(self.student_row()), (self.student_row())]
response = scoreboard_csv_multiple_levels(student_rows, levels)
actual_header, actual_rows = self.actual_data(response.content)
expected_header = self.expected_header(levels)
expected_rows = self.expected_rows_multiple_levels(student_rows)
assert_that(actual_header, equal_to(expected_header))
assert_that(actual_rows, equal_to(expected_rows))
def test_single_level(self):
student_rows = [(self.student_row()), (self.student_row())]
response = scoreboard_csv_single_level(student_rows)
actual_header, actual_rows = self.actual_data(response.content)
expected_header = 'Class,Name,Score,Total Time,Start Time,Finish Time'
expected_rows = self.expected_rows_single_level(student_rows)
assert_that(actual_header, equal_to(expected_header))
assert_that(actual_rows, equal_to(expected_rows))
def expected_rows_single_level(self, student_rows):
return map(self.expected_row_single_level, student_rows) + [""]
def expected_rows_multiple_levels(self, student_rows):
return map(self.expected_row_multiple_levels, student_rows) + [""]
def student_row(self):
email, password = signup_teacher_directly()
_, class_name, access_code = create_class_directly(email)
_, _, student = create_school_student_directly(access_code)
total_time = timedelta(0, 30)
scores = [x for x in range(20)]
total_score = sum(scores)
progress = (0, 0, 0)
all_scores = scores + [""] * 89
row = StudentRow(student=student,
total_time=total_time,
total_score=total_score,
start_time=datetime.fromtimestamp(1435305072, tz=utc),
finish_time=datetime.fromtimestamp(1438305072, tz=utc),
progress=progress,
scores=all_scores,
class_field=Class(name="MyClass"))
return row
def expected_row_multiple_levels(self, student_row):
beginning = "%s,%s,190,0:00:30,0,0,0,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19," \
% (student_row.class_field.name, student_row.name)
padding = ','.join([""] * 89)
return beginning + padding
def expected_row_single_level(self, student_row):
return "%s,%s,190,0:00:30,2015-06-26 07:51:12+00:00,2015-07-31 01:11:12+00:00" % (
student_row.class_field.name, student_row.name)
def expected_header(self, levels):
level_strings = map(str, levels)
all_header_strings = Headers + level_strings
joined = ','.join(all_header_strings)
return joined
def actual_data(self, content):
split = content.split("\r\n")
header = split[0]
rows = split[1:]
return header, rows
class Student:
def __init__(self, student):
self.student = student
def is_student(self): return True
def is_teacher(self): return False
def is_independent_student(self): return False
class Teacher:
def is_student(self): return False
def is_teacher(self): return True
def is_independent_student(self): return False
def assert_student_row(student_row, class_name, student_name, total_score, total_time, progress, scores):
assert_that(student_row.class_field.name, equal_to(class_name))
assert_that(student_row.name, equal_to(student_name))
assert_that(student_row.total_score, equal_to(total_score))
assert_that(student_row.total_time, equal_to(total_time))
assert_that(student_row.progress, equal_to(progress))
assert_that(student_row.scores, equal_to(scores))
def assert_student_row_single_level(student_row, class_name, student_name, total_score, total_time):
assert_that(student_row.class_field.name, equal_to(class_name))
assert_that(student_row.name, equal_to(student_name))
assert_that(student_row.total_score, equal_to(total_score))
assert_that(student_row.total_time, equal_to(total_time))
def create_attempt(student, level, score):
attempt = Attempt.objects.create(finish_time=datetime.fromtimestamp(1435305072),
level=level,
student=student,
score=score,
is_best_attempt=True)
attempt.start_time=datetime.fromtimestamp(1435305072)
attempt.save()
def ids_of_levels_named(names):
levels = Level.objects.filter(name__in=names)
assert_that(len(levels), equal_to(len(names)))
level_ids = map(lambda x: x.id, levels)
return level_ids
def set_up_data(classmates_data_viewable=False):
email, password = signup_teacher_directly()
clas, class_name, access_code = create_class_directly(email)
if classmates_data_viewable:
clas.classmates_data_viewable = True
clas.save()
_, _, student = create_school_student_directly(access_code)
_, _, student2 = create_school_student_directly(access_code)
create_random_school_data()
return clas, student, student2
def create_random_school_data():
email, password = signup_teacher_directly()
clas, class_name, access_code = create_class_directly(email)
create_school_student_directly(access_code)
create_school_student_directly(access_code)
| agpl-3.0 | 6,419,585,294,995,314,000 | 41.090379 | 122 | 0.605805 | false |
openhumanoids/oh-distro | software/models/common_components/multisense_sl/mit_modifications/multisense_sl.py | 1 | 1937 | import os
drc_base_path = os.getenv("DRC_BASE")
import sys
sys.path.append(os.path.join(drc_base_path, "software", "models",
"model_transformation"))
import mitUrdfUtils as mit
from jointNameMap import jointNameMap
from lxml import etree
import tempfile
from glob import glob
os.chdir(os.path.dirname(os.path.realpath(__file__)))
meshesDirectory = '../meshes'
original_urdf_path = "../multisense_sl_original.urdf"
urdf_path = "../multisense_sl.urdf"
no_joint_urdf_path = "../multisense_sl_no_joint.urdf"
convex_hull_urdf_path = "../multisense_sl_convex_hull.urdf"
no_collision_urdf_path = "../multisense_sl_no_collision.urdf"
# Convert meshes
for inFile in glob(os.path.join(meshesDirectory, "*.dae")):
mit.convertMeshTo(inFile, ".obj")
mit.convertMeshTo(inFile, ".wrl")
for inFile in glob(os.path.join(meshesDirectory, "*.obj")):
if "chull" not in inFile:
mit.createConvexHullMesh(inFile)
for inFile in glob(os.path.join(meshesDirectory, "*.wrl")):
if "chull" not in inFile:
mit.createConvexHullMesh(inFile)
# Expand all includes to allow us to appropriately change mesh filenames
tmp = tempfile.NamedTemporaryFile()
mit.xacro(original_urdf_path, tmp.name, includes_only=True,
recursive_includes=True)
# Load urdf
urdf = etree.parse(tmp.name)
# Replace package:// syntax
#mit.replacePackageWithPathInMeshPaths(urdf, "../common_components")
# Use MITDRC joint names and .obj meshes
mit.useObjMeshes(urdf)
mit.renameJoints(urdf, jointNameMap)
urdf.write(urdf_path, pretty_print=True)
# Generate no-joint urdf
mit.weldAllJoints(urdf)
urdf.write(no_joint_urdf_path, pretty_print=True)
# Generate no-joint, no-collision urdf
mit.removeAllCollisions(urdf)
urdf.write(no_collision_urdf_path, pretty_print=True)
# Generate convex hull urdf
mit.addCollisionsFromVisuals(urdf)
mit.useConvexHullMeshes(urdf)
urdf.write(convex_hull_urdf_path, pretty_print=True)
| bsd-3-clause | -8,525,078,679,496,667,000 | 29.265625 | 72 | 0.737223 | false |
asmaps/nsupdate.info | nsupdate/nsupdate/wsgi.py | 1 | 1425 | """
WSGI config for nsupdate project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "nsupdate.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "nsupdate.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| bsd-3-clause | 6,899,560,263,056,525,000 | 43.53125 | 79 | 0.793684 | false |
b1-systems/kiwi | kiwi/tasks/result_bundle.py | 1 | 7145 | # Copyright (c) 2015 SUSE Linux GmbH. All rights reserved.
#
# This file is part of kiwi.
#
# kiwi is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# kiwi is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with kiwi. If not, see <http://www.gnu.org/licenses/>
#
"""
usage: kiwi result bundle -h | --help
kiwi result bundle --target-dir=<directory> --id=<bundle_id> --bundle-dir=<directory>
[--zsync-source=<download_location>]
kiwi result bundle help
commands:
bundle
create result bundle from the image build results in the
specified target directory. Each result image will contain
the specified bundle identifier as part of its filename.
Uncompressed image files will also become xz compressed
and a sha sum will be created from every result image.
options:
--bundle-dir=<directory>
directory to store the bundle results
--id=<bundle_id>
the bundle id. A free form text appended to the version
information of the result image filename
--target-dir=<directory>
the target directory to expect image build results
--zsync-source=<download_location>
specify the download location from which the bundle file(s)
can be fetched from. The information is effective if zsync is
used to sync the bundle. The zsync control file is only created
for those bundle files which are marked for compression because
in a kiwi build only those are meaningful for a partial binary
file download. It is expected that all files from a bundle
are placed to the same download location
"""
from collections import OrderedDict
import os
# project
from kiwi.tasks.base import CliTask
from kiwi.help import Help
from kiwi.system.result import Result
from kiwi.logger import log
from kiwi.path import Path
from kiwi.utils.compress import Compress
from kiwi.utils.checksum import Checksum
from kiwi.command import Command
from kiwi.exceptions import (
KiwiBundleError
)
class ResultBundleTask(CliTask):
"""
Implements result bundler
Attributes
* :attr:`manual`
Instance of Help
"""
def process(self):
"""
Create result bundle from the image build results in the
specified target directory. Each result image will contain
the specified bundle identifier as part of its filename.
Uncompressed image files will also become xz compressed
and a sha sum will be created from every result image
"""
self.manual = Help()
if self._help():
return
# load serialized result object from target directory
result_directory = os.path.abspath(self.command_args['--target-dir'])
bundle_directory = os.path.abspath(self.command_args['--bundle-dir'])
if result_directory == bundle_directory:
raise KiwiBundleError(
'Bundle directory must be different from target directory'
)
log.info(
'Bundle build results from %s', result_directory
)
result = Result.load(
result_directory + '/kiwi.result'
)
image_version = result.xml_state.get_image_version()
image_name = result.xml_state.xml_data.get_name()
ordered_results = OrderedDict(sorted(result.get_results().items()))
# hard link bundle files, compress and build checksum
if not os.path.exists(bundle_directory):
Path.create(bundle_directory)
for result_file in list(ordered_results.values()):
if result_file.use_for_bundle:
bundle_file_basename = os.path.basename(result_file.filename)
# The bundle id is only taken into account for image results
# which contains the image version appended in its file name
part_name = list(bundle_file_basename.partition(image_name))
bundle_file_basename = ''.join([
part_name[0], part_name[1],
part_name[2].replace(
image_version,
image_version + '-' + self.command_args['--id']
)
])
log.info('Creating %s', bundle_file_basename)
bundle_file = ''.join(
[bundle_directory, '/', bundle_file_basename]
)
Command.run(
[
'cp', result_file.filename, bundle_file
]
)
if self.runtime_config.is_bundle_compression_requested() and \
result_file.compress:
log.info('--> XZ compressing')
compress = Compress(bundle_file)
compress.xz(self.runtime_config.get_xz_options())
bundle_file = compress.compressed_filename
if self.command_args['--zsync-source'] and result_file.shasum:
# Files with a checksum are considered to be image files
# and are therefore eligible to be provided via the
# requested Partial/differential file download based on
# zsync
zsyncmake = Path.which('zsyncmake', access_mode=os.X_OK)
if zsyncmake:
log.info('--> Creating zsync control file')
Command.run(
[
zsyncmake, '-e', '-u', os.sep.join(
[
self.command_args['--zsync-source'],
os.path.basename(bundle_file)
]
), '-o', bundle_file + '.zsync', bundle_file
]
)
else:
log.warning(
'--> zsyncmake missing, zsync setup skipped'
)
if result_file.shasum:
log.info('--> Creating SHA 256 sum')
checksum = Checksum(bundle_file)
with open(bundle_file + '.sha256', 'w') as shasum:
shasum.write(
'{0} {1}'.format(
checksum.sha256(), bundle_file_basename
)
)
def _help(self):
if self.command_args['help']:
self.manual.show('kiwi::result::bundle')
else:
return False
return self.manual
| gpl-3.0 | 1,665,385,439,122,019,300 | 39.596591 | 92 | 0.56795 | false |
jeonghoonkang/BerePi | apps/lcd_berepi/16x2_LCD_RGB.py | 1 | 6266 | #!/usr/bin/python
#-*- coding: utf-8 -*-
# Author : Matt Hawkins
# : ipmstyle, https://github.com/ipmstyle
# : jeonghoonkang, https://github.com/jeonghoonkang
# The wiring for the LCD is as follows:
# 1 : GND
# 2 : 5V
# 3 : Contrast (0-5V)*
# 4 : RS (Register Select)
# 5 : R/W (Read Write) - GROUND THIS PIN
# 6 : Enable or Strobe
# 7 : Data Bit 0 - NOT USED
# 8 : Data Bit 1 - NOT USED
# 9 : Data Bit 2 - NOT USED
# 10: Data Bit 3 - NOT USED
# 11: Data Bit 4
# 12: Data Bit 5
# 13: Data Bit 6
# 14: Data Bit 7
# 15: LCD Backlight +5V**
# 16: RED LCD Backlight (-)
# 17: GREEN LCD Backlight (-)
# 18: BLUE LCD Backlight (-)
import RPi.GPIO as GPIO
import time
# Define GPIO to LCD mapping
LCD_RS = 27
LCD_E = 22
LCD_D4 = 25
LCD_D5 = 24
LCD_D6 = 23
LCD_D7 = 12
#LED_ON = 4
LCD_RED = 4
LCD_GREEN = 17
LCD_BLUE = 7
# Define some device constants
LCD_WIDTH = 16 # Maximum characters per line
LCD_CHR = True
LCD_CMD = False
LCD_LINE_1 = 0x80 # LCD RAM address for the 1st line
LCD_LINE_2 = 0xC0 # LCD RAM address for the 2nd line
# Timing constants
E_PULSE = 0.0005
E_DELAY = 0.0005
GPIO.setmode(GPIO.BCM) # Use BCM GPIO numbers
GPIO.setup(LCD_E, GPIO.OUT) # E
GPIO.setup(LCD_RS, GPIO.OUT) # RS
GPIO.setup(LCD_D4, GPIO.OUT) # DB4
GPIO.setup(LCD_D5, GPIO.OUT) # DB5
GPIO.setup(LCD_D6, GPIO.OUT) # DB6
GPIO.setup(LCD_D7, GPIO.OUT) # DB7
#GPIO.setup(LED_ON, GPIO.OUT) # Backlight enable
GPIO.setup(LCD_RED, GPIO.OUT) # RED Backlight enable
GPIO.setup(LCD_GREEN, GPIO.OUT) # GREEN Backlight enable
GPIO.setup(LCD_BLUE, GPIO.OUT) # BLUEBacklight enable
def main():
# Main program block
# Initialise display
lcd_init()
# Toggle backlight on-off-on
red_backlight(True)
time.sleep(1)
red_backlight(False)
time.sleep(1)
green_backlight(True)
time.sleep(1)
green_backlight(False)
time.sleep(1)
blue_backlight(True)
time.sleep(1)
blue_backlight(False)
time.sleep(1)
while True:
GPIO.output(LCD_RED, True)
GPIO.output(LCD_GREEN, True)
GPIO.output(LCD_BLUE, True)
lcd_string("Rasbperry Pi",LCD_LINE_1,2)
lcd_string(": RED",LCD_LINE_2,2)
red_backlight(False)
time.sleep(3) # 3 second delay
GPIO.output(LCD_RED, True)
GPIO.output(LCD_GREEN, True)
GPIO.output(LCD_BLUE, True)
lcd_string("Rasbperry Pi",LCD_LINE_1,2)
lcd_string(": GREEN",LCD_LINE_2,2)
green_backlight(False)
time.sleep(3) # 3 second delay
GPIO.output(LCD_RED, True)
GPIO.output(LCD_GREEN, True)
GPIO.output(LCD_BLUE, True)
blue_backlight(True)
lcd_string("Rasbperry Pi",LCD_LINE_1,2)
lcd_string(": BLUE",LCD_LINE_2,2)
blue_backlight(False)
time.sleep(3) # 3 second delay
"""
#- RED + GREEN
red_backlight(True)
green_backlight(True)
lcd_string("RED",LCD_LINE_1,2)
lcd_string("GREEN",LCD_LINE_2,2)
time.sleep(3)
green_backlight(False)
red_backlight(False)
time.sleep(0.5)
#- BLUE + GREEN
blue_backlight(True)
green_backlight(True)
lcd_string("BLUE",LCD_LINE_1,2)
lcd_string("GREEN",LCD_LINE_2,2)
time.sleep(3)
green_backlight(False)
blue_backlight(False)
#- RED + BLUE
red_backlight(True)
blue_backlight(True)
lcd_string("RED",LCD_LINE_1,2)
lcd_string("BLUE",LCD_LINE_2,2)
time.sleep(3)
blue_backlight(False)
red_backlight(False)
#- RED + GREEN + BLUE
red_backlight(True)
blue_backlight(True)
green_backlight(True)
lcd_string("RED, BLUE",LCD_LINE_1,2)
lcd_string("GREEN",LCD_LINE_2,2)
time.sleep(3)
red_backlight(False)
blue_backlight(False)
green_backlight(False)
time.sleep(5)
"""
def lcd_byte(bits, mode):
# Send byte to data pins
# bits = data
# mode = True for character
# False for command
GPIO.output(LCD_RS, mode) # RS
# High bits
GPIO.output(LCD_D4, False)
GPIO.output(LCD_D5, False)
GPIO.output(LCD_D6, False)
GPIO.output(LCD_D7, False)
if bits&0x10==0x10:
GPIO.output(LCD_D4, True)
if bits&0x20==0x20:
GPIO.output(LCD_D5, True)
if bits&0x40==0x40:
GPIO.output(LCD_D6, True)
if bits&0x80==0x80:
GPIO.output(LCD_D7, True)
# Toggle 'Enable' pin
lcd_toggle_enable()
# Low bits
GPIO.output(LCD_D4, False)
GPIO.output(LCD_D5, False)
GPIO.output(LCD_D6, False)
GPIO.output(LCD_D7, False)
if bits&0x01==0x01:
GPIO.output(LCD_D4, True)
if bits&0x02==0x02:
GPIO.output(LCD_D5, True)
if bits&0x04==0x04:
GPIO.output(LCD_D6, True)
if bits&0x08==0x08:
GPIO.output(LCD_D7, True)
# Toggle 'Enable' pin
lcd_toggle_enable()
def lcd_init():
# Initialise display
lcd_byte(0x33,LCD_CMD) # 110011 Initialise
lcd_byte(0x32,LCD_CMD) # 110010 Initialise
lcd_byte(0x06,LCD_CMD) # 000110 Cursor move direction
lcd_byte(0x0C,LCD_CMD) # 001100 Display On,Cursor Off, Blink Off
lcd_byte(0x28,LCD_CMD) # 101000 Data length, number of lines, font size
lcd_byte(0x01,LCD_CMD) # 000001 Clear display
time.sleep(E_DELAY)
def lcd_clear():
lcd_byte(0x01,LCD_CMD) # 000001 Clear display
time.sleep(E_DELAY)
def lcd_toggle_enable():
# Toggle enable
time.sleep(E_DELAY)
GPIO.output(LCD_E, True)
time.sleep(E_PULSE)
GPIO.output(LCD_E, False)
time.sleep(E_DELAY)
def lcd_string(message,line,style):
# Send string to display
# style=1 Left justified
# style=2 Centred
# style=3 Right justified
if style==1:
message = message.ljust(LCD_WIDTH," ")
elif style==2:
message = message.center(LCD_WIDTH," ")
elif style==3:
message = message.rjust(LCD_WIDTH," ")
lcd_byte(line, LCD_CMD)
for i in range(LCD_WIDTH):
lcd_byte(ord(message[i]),LCD_CHR)
#def lcd_backlight(flag):
# # Toggle backlight on-off-on
# GPIO.output(LED_ON, flag)
def red_backlight(flag):
# Toggle red-backlight on-off-on
GPIO.output(LCD_RED, flag)
def green_backlight(flag):
# Toggle green-backlight on-off-on
GPIO.output(LCD_GREEN, flag)
def blue_backlight(flag):
# Toggle blue-backlight on-off-on
GPIO.output(LCD_BLUE, flag)
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
pass
finally:
#lcd_byte(0x01, LCD_CMD)
#lcd_string("Goodbye!",LCD_LINE_1,2)
GPIO.cleanup()
| bsd-2-clause | -9,210,237,829,379,006,000 | 22.207407 | 73 | 0.646345 | false |
BollMose/sensor | pms7003.py | 1 | 1949 | #encoding=utf-8
import os
import time
import serial
import sqlite3
from struct import *
def open_device(dev):
return serial.Serial(dev, baudrate=9600, timeout=2.0)
def close_device(ser):
ser.close()
def read_one_data(ser):
rv = b''
while True:
ch1 = ser.read()
if ch1 == b'\x42':
ch2 = ser.read()
if ch2 == b'\x4d':
rv += ch1 + ch2
rv += ser.read(32)
return rv
def read_native_pms(ser):
recv = read_one_data(ser)
length = unpack('>h', recv[2:4])[0]
if length != 28:
return (False, "the length of data is not equal 28.")
pms = unpack('>hhhhhhhhhhhhh', recv[4:30])
# check sum
check = unpack('>h', recv[30:32])[0]
sum = 0x42 + 0x4d + 28
for pm in pms:
sum += (pm & 0x00ff)
sum += ((pm & 0xff00)>>8)
if check != sum:
return (False, "check sum is not right, hope:actual, {}:{}".format(sum, check))
return (True, pms)
if __name__ == '__main__':
with open_device("/dev/ttyUSB0") as ser:
ret, pms = read_native_pms(ser)
ser.flushInput()
if ret == False:
print "read error: " , pms
print "version: ", (pms[12] & 0xff00)>>8
print "error code: ", (pms[12] & 0x00ff)
print(
'PM1.0(CF=1): {}\n'
'PM2.5(CF=1): {}\n'
'PM10 (CF=1): {}\n'
'PM1.0 (STD): {}\n'
'PM2.5 (STD): {}\n'
'PM10 (STD): {}\n'
'>0.3um : {}\n'
'>0.5um : {}\n'
'>1.0um : {}\n'
'>2.5um : {}\n'
'>5.0um : {}\n'
'>10um : {}\n'
.format(pms[0], pms[1], pms[2],
pms[3], pms[4], pms[5],
pms[6], pms[7], pms[8],
pms[9], pms[10], pms[11]))
| apache-2.0 | 3,337,298,386,798,929,000 | 27.661765 | 87 | 0.421755 | false |
googleapis/googleapis-gen | google/ads/googleads/v7/googleads-py/google/ads/googleads/v7/enums/types/search_engine_results_page_type.py | 1 | 1203 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package='google.ads.googleads.v7.enums',
marshal='google.ads.googleads.v7',
manifest={
'SearchEngineResultsPageTypeEnum',
},
)
class SearchEngineResultsPageTypeEnum(proto.Message):
r"""The type of the search engine results page. """
class SearchEngineResultsPageType(proto.Enum):
r"""The type of the search engine results page."""
UNSPECIFIED = 0
UNKNOWN = 1
ADS_ONLY = 2
ORGANIC_ONLY = 3
ADS_AND_ORGANIC = 4
__all__ = tuple(sorted(__protobuf__.manifest))
| apache-2.0 | 2,275,997,382,339,844,900 | 29.846154 | 74 | 0.690773 | false |
grbd/GBD.Build.BlackJack | blackjack/cmake/storage/SetList.py | 1 | 1558 | from blackjack.cmake.ScriptBase import ScriptBase
class SetList(ScriptBase):
"""
Represents a collection of source files to be passed to a Target
"""
def __init__(self, name: str, srcs: [] = None, parentscope: bool = False):
super().__init__()
self._Name = None
self.Name = name
"""Name of the Set"""
self.Srcs = srcs
"""List of Sources"""
self.ParentScope = parentscope
"""If to set the list within the parent scope"""
if self.Srcs is None: self.Srcs = []
return
@property
def Name(self):
"""Name of the Set"""
return self._Name
@Name.setter
def Name(self, value):
self._Name = value.replace(" ", "_")
return
def render_body(self):
from blackjack.cmake.cmd.cmake_set import cmake_set
ret = ["## Source Set"]
opts = ""
if self.ParentScope:
opts = "PARENT_SCOPE"
setcmd = cmake_set(self.Name, self.Srcs, opts)
ret += setcmd.render()
return ret
def add(self, items):
"""Add a single item or list of items"""
if isinstance(items, str):
self.Srcs.append(items)
if isinstance(items, list):
self.Srcs += items
if isinstance(items, SetList):
self.Srcs += items.Srcs
return
def add_spacesep(self, items_str):
"""Add a Space seperated list of items"""
tmparr = [str(i) for i in items_str.split()]
self.Srcs += tmparr
return
| apache-2.0 | 333,099,618,787,520,600 | 27.290909 | 78 | 0.548843 | false |
tgerdes/toolbot | toolbot/adapter/shell.py | 1 | 1382 | import asyncio
import sys
from toolbot.adapter import Adapter
from toolbot.message import TextMessage
class ShellAdapter(Adapter):
def __init__(self, bot):
super().__init__(bot)
def send(self, envelope, *strings):
for string in strings:
# TODO: async print?
print("\x1b[01;32m{}\x1b[0m".format(string))
def emote(self, envelope, *strings):
self.send(envelope, *("* {}".format(string) for string in strings))
def reply(self, envelope, *strings):
self.send(envelope, *("{name}: {msg}".format(
name=envelope['user'].name,
msg=string) for string in strings))
@asyncio.coroutine
def input_loop(self, loop):
f = sys.stdin
fno = f.fileno()
q = asyncio.Queue()
def do_read():
q.put_nowait(f.readline())
loop.add_reader(fno, do_read)
while True:
print("{}> ".format(self.bot.name), end="")
sys.stdout.flush()
line = yield from q.get()
if not line:
print()
break
user = self.bot.brain.userForId(1, name="Shell", room="Shell")
self.receive(TextMessage(user, line, "messageId"))
loop.remove_reader(fno)
self.bot.loop.stop()
def run(self, loop):
asyncio.async(self.input_loop(loop))
| mit | -5,638,943,008,233,438,000 | 24.592593 | 75 | 0.552822 | false |
HyperloopTeam/FullOpenMDAO | lib/python2.7/site-packages/Pyevolve-0.6-py2.7.egg/pyevolve/Crossovers.py | 1 | 21075 | """
:mod:`Crossovers` -- crossover methods module
=====================================================================
In this module we have the genetic operators of crossover (or recombination) for each chromosome representation.
"""
from random import randint as rand_randint, choice as rand_choice
from random import random as rand_random
import math
import Util
import Consts
#############################
## 1D Binary String ##
#############################
def G1DBinaryStringXSinglePoint(genome, **args):
""" The crossover of 1D Binary String, Single Point
.. warning:: You can't use this crossover method for binary strings with length of 1.
"""
sister = None
brother = None
gMom = args["mom"]
gDad = args["dad"]
if len(gMom) == 1:
Util.raiseException("The Binary String have one element, can't use the Single Point Crossover method !", TypeError)
cut = rand_randint(1, len(gMom)-1)
if args["count"] >= 1:
sister = gMom.clone()
sister.resetStats()
sister[cut:] = gDad[cut:]
if args["count"] == 2:
brother = gDad.clone()
brother.resetStats()
brother[cut:] = gMom[cut:]
return (sister, brother)
def G1DBinaryStringXTwoPoint(genome, **args):
""" The 1D Binary String crossover, Two Point
.. warning:: You can't use this crossover method for binary strings with length of 1.
"""
sister = None
brother = None
gMom = args["mom"]
gDad = args["dad"]
if len(gMom) == 1:
Util.raiseException("The Binary String have one element, can't use the Two Point Crossover method !", TypeError)
cuts = [rand_randint(1, len(gMom)-1), rand_randint(1, len(gMom)-1)]
if cuts[0] > cuts[1]:
Util.listSwapElement(cuts, 0, 1)
if args["count"] >= 1:
sister = gMom.clone()
sister.resetStats()
sister[cuts[0]:cuts[1]] = gDad[cuts[0]:cuts[1]]
if args["count"] == 2:
brother = gDad.clone()
brother.resetStats()
brother[cuts[0]:cuts[1]] = gMom[cuts[0]:cuts[1]]
return (sister, brother)
def G1DBinaryStringXUniform(genome, **args):
""" The G1DList Uniform Crossover """
sister = None
brother = None
gMom = args["mom"]
gDad = args["dad"]
sister = gMom.clone()
brother = gDad.clone()
sister.resetStats()
brother.resetStats()
for i in xrange(len(gMom)):
if Util.randomFlipCoin(Consts.CDefG1DBinaryStringUniformProb):
temp = sister[i]
sister[i] = brother[i]
brother[i] = temp
return (sister, brother)
####################
## 1D List ##
####################
def G1DListCrossoverSinglePoint(genome, **args):
""" The crossover of G1DList, Single Point
.. warning:: You can't use this crossover method for lists with just one element.
"""
sister = None
brother = None
gMom = args["mom"]
gDad = args["dad"]
if len(gMom) == 1:
Util.raiseException("The 1D List have one element, can't use the Single Point Crossover method !", TypeError)
cut = rand_randint(1, len(gMom)-1)
if args["count"] >= 1:
sister = gMom.clone()
sister.resetStats()
sister[cut:] = gDad[cut:]
if args["count"] == 2:
brother = gDad.clone()
brother.resetStats()
brother[cut:] = gMom[cut:]
return (sister, brother)
def G1DListCrossoverTwoPoint(genome, **args):
""" The G1DList crossover, Two Point
.. warning:: You can't use this crossover method for lists with just one element.
"""
sister = None
brother = None
gMom = args["mom"]
gDad = args["dad"]
if len(gMom) == 1:
Util.raiseException("The 1D List have one element, can't use the Two Point Crossover method !", TypeError)
cuts = [rand_randint(1, len(gMom)-1), rand_randint(1, len(gMom)-1)]
if cuts[0] > cuts[1]:
Util.listSwapElement(cuts, 0, 1)
if args["count"] >= 1:
sister = gMom.clone()
sister.resetStats()
sister[cuts[0]:cuts[1]] = gDad[cuts[0]:cuts[1]]
if args["count"] == 2:
brother = gDad.clone()
brother.resetStats()
brother[cuts[0]:cuts[1]] = gMom[cuts[0]:cuts[1]]
return (sister, brother)
def G1DListCrossoverUniform(genome, **args):
""" The G1DList Uniform Crossover """
sister = None
brother = None
gMom = args["mom"]
gDad = args["dad"]
sister = gMom.clone()
brother = gDad.clone()
sister.resetStats()
brother.resetStats()
for i in xrange(len(gMom)):
if Util.randomFlipCoin(Consts.CDefG1DListCrossUniformProb):
temp = sister[i]
sister[i] = brother[i]
brother[i] = temp
return (sister, brother)
def G1DListCrossoverOX(genome, **args):
""" The OX Crossover for G1DList (order crossover) """
sister = None
brother = None
gMom = args["mom"]
gDad = args["dad"]
listSize = len(gMom)
c1, c2 = [rand_randint(1, len(gMom)-1), rand_randint(1, len(gMom)-1)]
while c1 == c2:
c2 = rand_randint(1, len(gMom)-1)
if c1 > c2:
h = c1
c1 = c2
c2 = h
if args["count"] >= 1:
sister = gMom.clone()
sister.resetStats()
P1 = [ c for c in gMom[c2:] + gMom[:c2] if c not in gDad[c1:c2] ]
sister.genomeList = P1[listSize - c2:] + gDad[c1:c2] + P1[:listSize-c2]
if args["count"] == 2:
brother = gDad.clone()
brother.resetStats()
P2 = [ c for c in gDad[c2:] + gDad[:c2] if c not in gMom[c1:c2] ]
brother.genomeList = P2[listSize - c2:] + gMom[c1:c2] + P2[:listSize-c2]
assert listSize == len(sister)
assert listSize == len(brother)
return (sister, brother)
def G1DListCrossoverEdge(genome, **args):
""" THe Edge Recombination crossover for G1DList (widely used for TSP problem)
See more information in the `Edge Recombination Operator <http://en.wikipedia.org/wiki/Edge_recombination_operator>`_
Wikipedia entry.
"""
gMom, sisterl = args["mom"], []
gDad, brotherl = args["dad"], []
mom_edges, dad_edges, merge_edges = Util.G1DListGetEdgesComposite(gMom, gDad)
for c, u in (sisterl, set(gMom)), (brotherl, set(gDad)):
curr = None
for i in xrange(len(gMom)):
curr = rand_choice(tuple(u)) if not curr else curr
c.append(curr)
u.remove(curr)
d = [v for v in merge_edges.get(curr, []) if v in u]
if d: curr = rand_choice(d)
else:
s = [v for v in mom_edges.get(curr, []) if v in u]
s += [v for v in dad_edges.get(curr, []) if v in u]
curr = rand_choice(s) if s else None
sister = gMom.clone()
brother = gDad.clone()
sister.resetStats()
brother.resetStats()
sister.genomeList = sisterl
brother.genomeList = brotherl
return (sister, brother)
def G1DListCrossoverCutCrossfill(genome, **args):
""" The crossover of G1DList, Cut and crossfill, for permutations
"""
sister = None
brother = None
gMom = args["mom"]
gDad = args["dad"]
if len(gMom) == 1:
Util.raiseException("The 1D List have one element, can't use the Single Point Crossover method !", TypeError)
cut = rand_randint(1, len(gMom)-1)
if args["count"] >= 1:
sister = gMom.clone()
mother_part = gMom[0:cut]
sister.resetStats()
i = (len(sister) - cut)
x = 0
for v in gDad:
if v in mother_part: continue
if x >= i: break
sister[cut+x] = v
x += 1
if args["count"] == 2:
brother = gDad.clone()
father_part = gDad[0:cut]
brother.resetStats()
i = (len(brother) - cut)
x = 0
for v in gMom:
if v in father_part: continue
if x >= i: break
brother[cut+x] = v
x += 1
return (sister, brother)
def G1DListCrossoverRealSBX(genome, **args):
""" Experimental SBX Implementation - Follows the implementation in NSGA-II (Deb, et.al)
Some implementation `reference <http://vision.ucsd.edu/~sagarwal/icannga.pdf>`_.
.. warning:: This crossover method is Data Type Dependent, which means that
must be used for 1D genome of real values.
"""
EPS = Consts.CDefG1DListSBXEPS
# Crossover distribution index
eta_c = Consts.CDefG1DListSBXEtac
gMom = args["mom"]
gDad = args["dad"]
# Get the variable bounds ('gDad' could have been used; but I love Mom:-))
lb = gMom.getParam("rangemin", Consts.CDefRangeMin)
ub = gMom.getParam("rangemax", Consts.CDefRangeMax)
sister = gMom.clone()
brother = gDad.clone()
sister.resetStats()
brother.resetStats()
for i in range(0,len(gMom)):
if math.fabs(gMom[i]-gDad[i]) > EPS:
if gMom[i] > gDad[i]:
#swap
temp = gMom[i]
gMom[i] = gDad[i]
gDad[i] = temp
#random number betwn. 0 & 1
u = rand_random()
beta = 1.0 + 2*(gMom[i] - lb)/(1.0*(gDad[i]-gMom[i]))
alpha = 2.0 - beta**(-(eta_c+1.0))
if u <= (1.0/alpha):
beta_q = (u*alpha)**(1.0/((eta_c + 1.0)*1.0))
else:
beta_q = (1.0/(2.0-u*alpha))**(1.0/(1.0*(eta_c + 1.0)))
brother[i] = 0.5*((gMom[i] + gDad[i]) - beta_q*(gDad[i]-gMom[i]))
beta = 1.0 + 2.0*(ub - gDad[i])/(1.0*(gDad[i]-gMom[i]))
alpha = 2.0 - beta**(-(eta_c+1.0))
if u <= (1.0/alpha):
beta_q = (u*alpha)**(1.0/((eta_c + 1)*1.0))
else:
beta_q = (1.0/(2.0-u*alpha))**(1.0/(1.0*(eta_c + 1.0)))
sister[i] = 0.5*((gMom[i] + gDad[i]) + beta_q*(gDad[i]-gMom[i]))
if brother[i] > ub: brother[i] = ub
if brother[i] < lb: brother[i] = lb
if sister[i] > ub: sister[i] = ub
if sister[i] < lb: sister[i] = lb
if rand_random() > 0.5:
# Swap
temp = sister[i]
sister[i] = brother[i]
brother[i] = temp
else:
sister[i] = gMom[i]
brother[i] = gDad[i]
return (sister, brother)
####################
## 2D List ##
####################
def G2DListCrossoverUniform(genome, **args):
""" The G2DList Uniform Crossover """
sister = None
brother = None
gMom = args["mom"]
gDad = args["dad"]
sister = gMom.clone()
brother = gDad.clone()
sister.resetStats()
brother.resetStats()
h, w = gMom.getSize()
for i in xrange(h):
for j in xrange(w):
if Util.randomFlipCoin(Consts.CDefG2DListCrossUniformProb):
temp = sister.getItem(i, j)
sister.setItem(i, j, brother.getItem(i, j))
brother.setItem(i, j, temp)
return (sister, brother)
def G2DListCrossoverSingleVPoint(genome, **args):
""" The crossover of G2DList, Single Vertical Point """
sister = None
brother = None
gMom = args["mom"]
gDad = args["dad"]
cut = rand_randint(1, gMom.getWidth()-1)
if args["count"] >= 1:
sister = gMom.clone()
sister.resetStats()
for i in xrange(sister.getHeight()):
sister[i][cut:] = gDad[i][cut:]
if args["count"] == 2:
brother = gDad.clone()
brother.resetStats()
for i in xrange(brother.getHeight()):
brother[i][cut:] = gMom[i][cut:]
return (sister, brother)
def G2DListCrossoverSingleHPoint(genome, **args):
""" The crossover of G2DList, Single Horizontal Point """
sister = None
brother = None
gMom = args["mom"]
gDad = args["dad"]
cut = rand_randint(1, gMom.getHeight()-1)
if args["count"] >= 1:
sister = gMom.clone()
sister.resetStats()
for i in xrange(cut, sister.getHeight()):
sister[i][:] = gDad[i][:]
if args["count"] == 2:
brother = gDad.clone()
brother.resetStats()
for i in xrange(brother.getHeight()):
brother[i][:] = gMom[i][:]
return (sister, brother)
#############################
## 2D Binary String ##
#############################
def G2DBinaryStringXUniform(genome, **args):
""" The G2DBinaryString Uniform Crossover
.. versionadded:: 0.6
The *G2DBinaryStringXUniform* function
"""
sister = None
brother = None
gMom = args["mom"]
gDad = args["dad"]
sister = gMom.clone()
brother = gDad.clone()
sister.resetStats()
brother.resetStats()
h, w = gMom.getSize()
for i in xrange(h):
for j in xrange(w):
if Util.randomFlipCoin(Consts.CDefG2DBinaryStringUniformProb):
temp = sister.getItem(i, j)
sister.setItem(i, j, brother.getItem(i, j))
brother.setItem(i, j, temp)
return (sister, brother)
def G2DBinaryStringXSingleVPoint(genome, **args):
""" The crossover of G2DBinaryString, Single Vertical Point
.. versionadded:: 0.6
The *G2DBinaryStringXSingleVPoint* function
"""
sister = None
brother = None
gMom = args["mom"]
gDad = args["dad"]
cut = rand_randint(1, gMom.getWidth()-1)
if args["count"] >= 1:
sister = gMom.clone()
sister.resetStats()
for i in xrange(sister.getHeight()):
sister[i][cut:] = gDad[i][cut:]
if args["count"] == 2:
brother = gDad.clone()
brother.resetStats()
for i in xrange(brother.getHeight()):
brother[i][cut:] = gMom[i][cut:]
return (sister, brother)
def G2DBinaryStringXSingleHPoint(genome, **args):
""" The crossover of G2DBinaryString, Single Horizontal Point
.. versionadded:: 0.6
The *G2DBinaryStringXSingleHPoint* function
"""
sister = None
brother = None
gMom = args["mom"]
gDad = args["dad"]
cut = rand_randint(1, gMom.getHeight()-1)
if args["count"] >= 1:
sister = gMom.clone()
sister.resetStats()
for i in xrange(cut, sister.getHeight()):
sister[i][:] = gDad[i][:]
if args["count"] == 2:
brother = gDad.clone()
brother.resetStats()
for i in xrange(brother.getHeight()):
brother[i][:] = gMom[i][:]
return (sister, brother)
#############################
## Tree ##
#############################
def GTreeCrossoverSinglePoint(genome, **args):
""" The crossover for GTree, Single Point """
sister = None
brother = None
gMom = args["mom"].clone()
gDad = args["dad"].clone()
gMom.resetStats()
gDad.resetStats()
node_mom_stack = []
all_mom_nodes = []
node_mom_tmp = None
node_dad_stack = []
all_dad_nodes = []
node_dad_tmp = None
node_mom_stack.append(gMom.getRoot())
node_dad_stack.append(gDad.getRoot())
while (len(node_mom_stack) > 0) and (len(node_dad_stack) > 0):
node_mom_tmp = node_mom_stack.pop()
node_dad_tmp = node_dad_stack.pop()
if node_mom_tmp != gMom.getRoot():
all_mom_nodes.append(node_mom_tmp)
all_dad_nodes.append(node_dad_tmp)
node_mom_stack.extend(node_mom_tmp.getChilds())
node_dad_stack.extend(node_dad_tmp.getChilds())
if len(all_mom_nodes)==0 or len(all_dad_nodes)==0:
return (gMom, gDad)
if len(all_dad_nodes) == 1: nodeDad = all_dad_nodes[0]
else: nodeDad = rand_choice(all_dad_nodes)
if len(all_mom_nodes) == 1: nodeMom = all_mom_nodes[0]
else: nodeMom = rand_choice(all_mom_nodes)
nodeMom_parent = nodeMom.getParent()
nodeDad_parent = nodeDad.getParent()
# Sister
if args["count"] >= 1:
sister = gMom
nodeDad.setParent(nodeMom_parent)
nodeMom_parent.replaceChild(nodeMom, nodeDad)
sister.processNodes()
# Brother
if args["count"] == 2:
brother = gDad
nodeMom.setParent(nodeDad_parent)
nodeDad_parent.replaceChild(nodeDad, nodeMom)
brother.processNodes()
return (sister, brother)
def GTreeCrossoverSinglePointStrict(genome, **args):
""" The crossover of Tree, Strict Single Point
..note:: This crossover method creates offspring with restriction of the
*max_depth* parameter.
Accepts the *max_attempt* parameter, *max_depth* (required), and
the distr_leaft (>= 0.0 and <= 1.0), which represents the probability
of leaf selection when findin random nodes for crossover.
"""
sister = None
brother = None
gMom = args["mom"].clone()
gDad = args["dad"].clone()
gMom.resetStats()
gDad.resetStats()
max_depth = gMom.getParam("max_depth", None)
max_attempt = gMom.getParam("max_attempt", 10)
distr_leaf = gMom.getParam("distr_leaf", None)
if max_depth is None:
Util.raiseException("You must specify the max_depth genome parameter !", ValueError)
if max_depth < 0:
Util.raiseException("The max_depth must be >= 1, if you want to use GTreeCrossoverSinglePointStrict crossover !", ValueError)
momRandom = None
dadRandom = None
for i in xrange(max_attempt):
if distr_leaf is None:
dadRandom = gDad.getRandomNode()
momRandom = gMom.getRandomNode()
else:
if Util.randomFlipCoin(distr_leaf):
momRandom = gMom.getRandomNode(1)
else:
momRandom = gMom.getRandomNode(2)
if Util.randomFlipCoin(distr_leaf):
dadRandom = gDad.getRandomNode(1)
else:
dadRandom = gDad.getRandomNode(2)
assert momRandom is not None
assert dadRandom is not None
# Optimize here
mH = gMom.getNodeHeight(momRandom)
dH = gDad.getNodeHeight(dadRandom)
mD = gMom.getNodeDepth(momRandom)
dD = gDad.getNodeDepth(dadRandom)
# The depth of the crossover is greater than the max_depth
if (dD+mH <= max_depth) and (mD+dH <= max_depth):
break
if i == (max_attempt-1):
assert gMom.getHeight() <= max_depth
return (gMom, gDad)
else:
nodeMom, nodeDad = momRandom, dadRandom
nodeMom_parent = nodeMom.getParent()
nodeDad_parent = nodeDad.getParent()
# Sister
if args["count"] >= 1:
sister = gMom
nodeDad.setParent(nodeMom_parent)
if nodeMom_parent is None:
sister.setRoot(nodeDad)
else:
nodeMom_parent.replaceChild(nodeMom, nodeDad)
sister.processNodes()
assert sister.getHeight() <= max_depth
# Brother
if args["count"] == 2:
brother = gDad
nodeMom.setParent(nodeDad_parent)
if nodeDad_parent is None:
brother.setRoot(nodeMom)
else:
nodeDad_parent.replaceChild(nodeDad, nodeMom)
brother.processNodes()
assert brother.getHeight() <= max_depth
return (sister, brother)
#############################################################################
################# GTreeGP Crossovers ######################################
#############################################################################
def GTreeGPCrossoverSinglePoint(genome, **args):
""" The crossover of the GTreeGP, Single Point for Genetic Programming
..note:: This crossover method creates offspring with restriction of the
*max_depth* parameter.
Accepts the *max_attempt* parameter, *max_depth* (required).
"""
sister = None
brother = None
gMom = args["mom"].clone()
gDad = args["dad"].clone()
gMom.resetStats()
gDad.resetStats()
max_depth = gMom.getParam("max_depth", None)
max_attempt = gMom.getParam("max_attempt", 15)
if max_depth is None:
Util.raiseException("You must specify the max_depth genome parameter !", ValueError)
if max_depth < 0:
Util.raiseException("The max_depth must be >= 1, if you want to use GTreeCrossoverSinglePointStrict crossover !", ValueError)
momRandom = None
dadRandom = None
for i in xrange(max_attempt):
dadRandom = gDad.getRandomNode()
if dadRandom.getType() == Consts.nodeType["TERMINAL"]:
momRandom = gMom.getRandomNode(1)
elif dadRandom.getType() == Consts.nodeType["NONTERMINAL"]:
momRandom = gMom.getRandomNode(2)
mD = gMom.getNodeDepth(momRandom)
dD = gDad.getNodeDepth(dadRandom)
# Two nodes are root
if mD==0 and dD==0: continue
mH = gMom.getNodeHeight(momRandom)
if dD+mH > max_depth: continue
dH = gDad.getNodeHeight(dadRandom)
if mD+dH > max_depth: continue
break
if i==(max_attempt-1):
assert gMom.getHeight() <= max_depth
return (gMom, gDad)
else:
nodeMom, nodeDad = momRandom, dadRandom
nodeMom_parent = nodeMom.getParent()
nodeDad_parent = nodeDad.getParent()
# Sister
if args["count"] >= 1:
sister = gMom
nodeDad.setParent(nodeMom_parent)
if nodeMom_parent is None:
sister.setRoot(nodeDad)
else:
nodeMom_parent.replaceChild(nodeMom, nodeDad)
sister.processNodes()
assert sister.getHeight() <= max_depth
# Brother
if args["count"] == 2:
brother = gDad
nodeMom.setParent(nodeDad_parent)
if nodeDad_parent is None:
brother.setRoot(nodeMom)
else:
nodeDad_parent.replaceChild(nodeDad, nodeMom)
brother.processNodes()
assert brother.getHeight() <= max_depth
return (sister, brother)
| gpl-2.0 | 7,506,001,929,453,182,000 | 25.881378 | 131 | 0.584437 | false |
stevearc/stiny | fabfile.py | 1 | 5084 | import os
import fabric.api as fab
import jinja2
import json
from fabric.context_managers import path
from fabric.decorators import roles
from pyramid.settings import aslist
from stiny.gutil import normalize_email
fab.env.roledefs = {
'door': ['[email protected]'],
'web': ['[email protected]'],
}
def _version():
return fab.local('git describe --tags', capture=True)
def _get_ref():
ref = fab.local('git rev-parse HEAD', capture=True)
return ref[:8]
def _get_var(key):
if key not in os.environ:
raise Exception("Missing environment variable %r" % key)
return os.environ[key]
CONSTANTS = {
'venv': '/envs/stiny',
'admins': [normalize_email(e) for e in aslist(_get_var('STINY_ADMINS'))],
'guests': [normalize_email(e) for e in aslist(_get_var('STINY_GUESTS'))],
'phone_access': _get_var('STINY_PHONE_ACCESS'),
'url_prefix': 'gen/' + _get_ref(),
'session': {
'encrypt_key': _get_var('STINY_ENCRYPT_KEY'),
'validate_key': _get_var('STINY_VALIDATE_KEY'),
},
'authtkt': {
'secret': _get_var('STINY_AUTH_SECRET'),
},
'google': {
'client_id': _get_var('STINY_PROD_CLIENT_GOOGLE_CLIENT_ID'),
'server_client_id': _get_var('STINY_SERVER_GOOGLE_CLIENT_ID'),
'server_client_secret': _get_var('STINY_SERVER_GOOGLE_CLIENT_SECRET'),
'calendar_id': _get_var('STINY_CAL_ID'),
},
'twilio': {
'auth_token': _get_var('STINY_TWILIO_AUTH_TOKEN'),
}
}
def _render(filename, **context):
with open(filename, 'r') as ifile:
tmpl = jinja2.Template(ifile.read())
basename = os.path.basename(filename)
fab.local('mkdir -p dist')
outfile = os.path.join('dist', basename)
with open(outfile, 'w') as ofile:
ofile.write(tmpl.render(**context))
return outfile
def _render_put(filename, dest, **kwargs):
rendered = _render(filename, **CONSTANTS)
fab.put(rendered, dest, **kwargs)
def write_credentials(filename):
from stiny.gutil import Calendar
google = CONSTANTS['google']
cal = Calendar(google['server_client_id'], google['server_client_secret'],
filename, calendar_id=google['calendar_id'])
cal.login_if_needed()
def build_web():
fab.local('npm install')
fab.local('rm -rf stiny/webpack')
fab.local('npm run flow')
fab.local('npm run build-prod')
version = _version()
fab.local("sed -i -e 's/version=.*/version=\"%s\",/' setup.py" % version)
write_credentials('stiny/credentials.dat')
fab.local('python setup.py sdist')
fab.local("sed -i -e 's/version=.*/version=\"develop\",/' setup.py")
_render('prod.ini.tmpl', **CONSTANTS)
print "Created dist/stiny-%s.tar.gz" % version
return version
@roles('web')
def deploy_web():
version = build_web()
tarball = "stiny-%s.tar.gz" % version
fab.put("dist/" + tarball)
fab.sudo("if [ ! -e {0} ]; then virtualenv {0}; fi"
.format(CONSTANTS['venv']))
with path(CONSTANTS['venv'] + '/bin', behavior='prepend'):
fab.sudo("yes | pip uninstall stiny || true")
fab.sudo("pip install pastescript")
fab.sudo("pip install %s" % tarball)
_render_put('prod.ini.tmpl', '/etc/emperor/stiny.ini', use_sudo=True)
@roles('door')
def build_rpi_gpio_wheel():
gpio_wheel = 'RPi.GPIO-0.6.2-cp27-cp27mu-linux_armv6l.whl'
fab.local('mkdir -p pex_wheels')
# Generate the RPI.GPIO wheel on the raspberry pi
fab.run('rm -rf /tmp/gpiobuild')
fab.run('mkdir -p /tmp/gpiobuild')
with fab.cd('/tmp/gpiobuild'):
fab.run('virtualenv venv')
with path('/tmp/gpiobuild/venv/bin', behavior='prepend'):
fab.run('pip install wheel')
fab.run('pip wheel RPi.GPIO==0.6.2 --wheel-dir=/tmp/gpiobuild')
fab.get(gpio_wheel, os.path.join('pex_wheels', gpio_wheel))
fab.run('rm -rf /tmp/gpiobuild')
def build_door():
fab.local('rm -f dist/stiny')
constants = ['STINY_SERVER_GOOGLE_CLIENT_ID',
'STINY_SERVER_GOOGLE_CLIENT_SECRET', 'STINY_CAL_ID']
config = {}
for key in constants:
config[key] = _get_var(key)
with open('stiny_worker/stiny_worker/config.json', 'w') as ofile:
json.dump(config, ofile)
write_credentials('stiny_worker/stiny_worker/credentials.dat')
gpio_wheel = 'RPi.GPIO-0.6.2-cp27-cp27mu-linux_armv6l.whl'
if not os.path.exists(os.path.join('pex_wheels', gpio_wheel)):
fab.execute(build_rpi_gpio_wheel)
fab.local('rm -f pex_cache/stiny_worker-develop-py2-none-any.whl')
fab.local('pex -vvvv --platform=linux_armv6l -f pex_wheels '
'--cache-dir=pex_cache '
'stiny_worker -m stiny_worker:main -o dist/stiny')
@roles('door')
def deploy_door():
build_door()
fab.put("dist/stiny")
fab.put("stiny-service", "/etc/init.d/stiny", use_sudo=True, mode=744)
fab.put("stiny-tunnel-service", "/etc/init.d/stiny-tunnel", use_sudo=True,
mode=744)
fab.sudo("service stiny-tunnel restart")
fab.sudo("service stiny restart")
| mit | 5,882,313,111,331,927,000 | 31.8 | 78 | 0.622148 | false |
divyekapoor/grepurls | .ycm_extra_conf.py | 1 | 4740 | # Generated by YCM Generator at 2016-02-07 13:10:00.249398
# This file is NOT licensed under the GPLv3, which is the license for the rest
# of YouCompleteMe.
#
# Here's the license text for this file:
#
# This is free and unencumbered software released into the public domain.
#
# Anyone is free to copy, modify, publish, use, compile, sell, or
# distribute this software, either in source code form or as a compiled
# binary, for any purpose, commercial or non-commercial, and by any
# means.
#
# In jurisdictions that recognize copyright laws, the author or authors
# of this software dedicate any and all copyright interest in the
# software to the public domain. We make this dedication for the benefit
# of the public at large and to the detriment of our heirs and
# successors. We intend this dedication to be an overt act of
# relinquishment in perpetuity of all present and future rights to this
# software under copyright law.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
# For more information, please refer to <http://unlicense.org/>
import os
import ycm_core
flags = [
'-x',
'c++',
'-IPEGTL/',
'-Igflags/build/include',
'-Wall',
'-std=c++11',
]
# Set this to the absolute path to the folder (NOT the file!) containing the
# compile_commands.json file to use that instead of 'flags'. See here for
# more details: http://clang.llvm.org/docs/JSONCompilationDatabase.html
#
# You can get CMake to generate this file for you by adding:
# set( CMAKE_EXPORT_COMPILE_COMMANDS 1 )
# to your CMakeLists.txt file.
#
# Most projects will NOT need to set this to anything; you can just change the
# 'flags' list of compilation flags. Notice that YCM itself uses that approach.
compilation_database_folder = ''
if os.path.exists( compilation_database_folder ):
database = ycm_core.CompilationDatabase( compilation_database_folder )
else:
database = None
SOURCE_EXTENSIONS = [ '.cpp', '.cxx', '.cc', '.c', '.m', '.mm' ]
def DirectoryOfThisScript():
return os.path.dirname( os.path.abspath( __file__ ) )
def MakeRelativePathsInFlagsAbsolute( flags, working_directory ):
if not working_directory:
return list( flags )
new_flags = []
make_next_absolute = False
path_flags = [ '-isystem', '-I', '-iquote', '--sysroot=' ]
for flag in flags:
new_flag = flag
if make_next_absolute:
make_next_absolute = False
if not flag.startswith( '/' ):
new_flag = os.path.join( working_directory, flag )
for path_flag in path_flags:
if flag == path_flag:
make_next_absolute = True
break
if flag.startswith( path_flag ):
path = flag[ len( path_flag ): ]
new_flag = path_flag + os.path.join( working_directory, path )
break
if new_flag:
new_flags.append( new_flag )
return new_flags
def IsHeaderFile( filename ):
extension = os.path.splitext( filename )[ 1 ]
return extension in [ '.h', '.hxx', '.hpp', '.hh' ]
def GetCompilationInfoForFile( filename ):
# The compilation_commands.json file generated by CMake does not have entries
# for header files. So we do our best by asking the db for flags for a
# corresponding source file, if any. If one exists, the flags for that file
# should be good enough.
if IsHeaderFile( filename ):
basename = os.path.splitext( filename )[ 0 ]
for extension in SOURCE_EXTENSIONS:
replacement_file = basename + extension
if os.path.exists( replacement_file ):
compilation_info = database.GetCompilationInfoForFile(
replacement_file )
if compilation_info.compiler_flags_:
return compilation_info
return None
return database.GetCompilationInfoForFile( filename )
def FlagsForFile( filename, **kwargs ):
if database:
# Bear in mind that compilation_info.compiler_flags_ does NOT return a
# python list, but a "list-like" StringVec object
compilation_info = GetCompilationInfoForFile( filename )
if not compilation_info:
return None
final_flags = MakeRelativePathsInFlagsAbsolute(
compilation_info.compiler_flags_,
compilation_info.compiler_working_dir_ )
else:
relative_to = DirectoryOfThisScript()
final_flags = MakeRelativePathsInFlagsAbsolute( flags, relative_to )
return {
'flags': final_flags,
'do_cache': True
}
| mit | -842,317,956,061,606,100 | 32.617021 | 79 | 0.704008 | false |
PetterKun/biblioteca | principal/views.py | 1 | 8531 | #encoding:utf-8
from principal.models import Obra, Video
from django.contrib.auth.models import User
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.core.mail import EmailMessage
from django.contrib.auth.forms import UserCreationForm, AuthenticationForm
from django.contrib.auth.models import User
from principal.forms import RegistroForm, ActivacionForm, ObraForm, VideoForm
from django.contrib.auth import login, authenticate, logout
from django.contrib.auth.decorators import login_required
from django.contrib.admin.views.decorators import staff_member_required
from principal.funciones import *
from django.db.models import Q
def inicio(request):
if not request.user.is_anonymous():
return HttpResponseRedirect('/perfil')
else:
return HttpResponseRedirect('/login')
def entrar(request):
estado = " "
if not request.user.is_anonymous():
return HttpResponseRedirect('/perfil')
if request.method == 'POST':
formulario = AuthenticationForm(request.POST)
if formulario.is_valid:
usuario = request.POST['username']
clave = request.POST['password']
acceso = authenticate(username=usuario, password=clave)
if acceso is not None:
if acceso.is_active and acceso.estado == 'a':
login(request, acceso)
return HttpResponseRedirect('/perfil')
else:
pin = generarPin()
acceso.pin = pin
acceso.save()
#titulo = 'Pin de activación - Akiba-Kei Asociación Juvenil'
#contenido = 'Tu pin es: ' + pin
#correo = EmailMessage(titulo, contenido, to=[acceso.email])
#correo.send()
return HttpResponseRedirect('/activar')
else:
estado = "El usuario y/o la contraseña son incorrectos."
else:
formulario = AuthenticationForm()
return render_to_response('login.html',
{
'formulario':formulario,
'estado':estado
},
context_instance=RequestContext(request)
)
@login_required(login_url='/login')
def salir(request):
logout(request)
return HttpResponseRedirect('/')
@login_required(login_url='/login')
def perfil(request):
usuario = request.user
return render_to_response('perfil.html',
{'usuario':usuario},
context_instance=RequestContext(request)
)
def registro(request):
if request.method == 'POST':
formulario = RegistroForm(request.POST, request.FILES)
if formulario.is_valid():
usuario = formulario.cleaned_data['username']
email = formulario.cleaned_data['email']
password_one = formulario.cleaned_data['password_one']
password_two = formulario.cleaned_data['password_two']
first_name = formulario.cleaned_data['first_name']
last_name = formulario.cleaned_data['last_name']
sexo = formulario.cleaned_data['sexo']
dni = formulario.cleaned_data['dni']
fecha_nacimiento = formulario.cleaned_data['fecha_nacimiento']
direccion = formulario.cleaned_data['direccion']
cp = formulario.cleaned_data['cp']
poblacion = formulario.cleaned_data['poblacion']
provincia = formulario.cleaned_data['provincia']
telefono = formulario.cleaned_data['telefono']
foto = formulario.cleaned_data['foto']
twitter = formulario.cleaned_data['twitter']
facebook = formulario.cleaned_data['facebook']
u = User.objects.create_user(username=usuario, email=email, password=password_one)
u.first_name = first_name
u.last_name = last_name
u.sexo = sexo
u.dni = dni
u.fecha_nacimiento = fecha_nacimiento
u.direccion = direccion
u.cp = cp
u.poblacion = poblacion
u.provincia = provincia
u.telefono = telefono
u.foto = foto
u.twitter = twitter
u.facebook = facebook
u.save()
return HttpResponseRedirect('/login')
else:
formulario = RegistroForm()
return render_to_response('registro.html',
{'formulario':formulario},
context_instance=RequestContext(request))
def activacion(request):
estado = ""
if not request.user.is_anonymous():
return HttpResponseRedirect('/perfil')
if request.method == 'POST':
formulario = ActivacionForm(request.POST)
if formulario.is_valid():
usuario = formulario.cleaned_data['username']
password = formulario.cleaned_data['password']
pin = formulario.cleaned_data['pin']
acceso = authenticate(username=usuario, password=password)
if acceso is not None:
if acceso.pin == pin:
acceso.is_active = True
acceso.estado = 'a'
acceso.save()
return HttpResponseRedirect('/login')
else:
estado = "El pin introducido es incorrecto, por favor intentelo de nuevo."
pin = generarPin()
acceso.pin = pin
acceso.save()
print pin
#titulo = 'Pin de activación - Akiba-Kei Asociación Juvenil'
#contenido = 'Tu pin es: ' + pin
#correo = EmailMessage(titulo, contenido, to=[acceso.email])
#correo.send()
else:
estado = "El usuario y/o la contraseña son incorrectas."
else:
formulario = ActivacionForm()
return render_to_response('activacion.html',
{
'formulario': formulario,
'estado':estado
},
context_instance=RequestContext(request)
)
@staff_member_required
def insertarObra(request):
if request.method == "POST":
formulario = ObraForm(request.POST, request.FILES)
if formulario.is_valid():
formulario.save()
return HttpResponseRedirect('/agregarObra')
else:
formulario = ObraForm()
return render_to_response('agregarobra.html',
{'formulario':formulario},
context_instance=RequestContext(request)
)
@login_required(login_url='/login')
def buscarObra(request):
query_q = request.GET.get('q', '')
query_s = request.GET.get('s', '')
if query_q and query_s:
if query_s == 'titulo':
qset = Q(titulo__icontains = query_q)
elif query_s == 'autor':
qset = Q(autor__icontains = query_q)
elif query_s == 'editorial':
qset = Q(editorial__icontains = query_q)
elif query_s == 'genero':
qset = Q(genero__icontains = query_q)
elif query_s == 'palabra_clave':
qset = Q(palabra_clave__icontains = query_q)
resultados = Obra.objects.filter(qset)
else:
resultados = []
return render_to_response('busquedaObra.html',
{
'resultados':resultados,
'query_q':query_q,
'query_s':query_s
},
context_instance=RequestContext(request)
)
def detalleObra(request, id_obra):
dato = get_object_or_404(Obra, pk=id_obra)
return render_to_response('obra.html',
{'obra':dato},
context_instance=RequestContext(request)
) | apache-2.0 | -5,421,392,188,126,946,000 | 39.794258 | 94 | 0.535484 | false |
mikacashman/CallingApp | lib/CallingApp/CallingAppServer.py | 1 | 23270 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from wsgiref.simple_server import make_server
import sys
import json
import traceback
import datetime
from multiprocessing import Process
from getopt import getopt, GetoptError
from jsonrpcbase import JSONRPCService, InvalidParamsError, KeywordError,\
JSONRPCError, InvalidRequestError
from jsonrpcbase import ServerError as JSONServerError
from os import environ
from ConfigParser import ConfigParser
from biokbase import log
import requests as _requests
import random as _random
import os
from CallingApp.authclient import KBaseAuth as _KBaseAuth
DEPLOY = 'KB_DEPLOYMENT_CONFIG'
SERVICE = 'KB_SERVICE_NAME'
AUTH = 'auth-service-url'
# Note that the error fields do not match the 2.0 JSONRPC spec
def get_config_file():
return environ.get(DEPLOY, None)
def get_service_name():
return environ.get(SERVICE, None)
def get_config():
if not get_config_file():
return None
retconfig = {}
config = ConfigParser()
config.read(get_config_file())
for nameval in config.items(get_service_name() or 'CallingApp'):
retconfig[nameval[0]] = nameval[1]
return retconfig
config = get_config()
from CallingApp.CallingAppImpl import CallingApp # noqa @IgnorePep8
impl_CallingApp = CallingApp(config)
class JSONObjectEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, set):
return list(obj)
if isinstance(obj, frozenset):
return list(obj)
if hasattr(obj, 'toJSONable'):
return obj.toJSONable()
return json.JSONEncoder.default(self, obj)
class JSONRPCServiceCustom(JSONRPCService):
def call(self, ctx, jsondata):
"""
Calls jsonrpc service's method and returns its return value in a JSON
string or None if there is none.
Arguments:
jsondata -- remote method call in jsonrpc format
"""
result = self.call_py(ctx, jsondata)
if result is not None:
return json.dumps(result, cls=JSONObjectEncoder)
return None
def _call_method(self, ctx, request):
"""Calls given method with given params and returns it value."""
method = self.method_data[request['method']]['method']
params = request['params']
result = None
try:
if isinstance(params, list):
# Does it have enough arguments?
if len(params) < self._man_args(method) - 1:
raise InvalidParamsError('not enough arguments')
# Does it have too many arguments?
if(not self._vargs(method) and len(params) >
self._max_args(method) - 1):
raise InvalidParamsError('too many arguments')
result = method(ctx, *params)
elif isinstance(params, dict):
# Do not accept keyword arguments if the jsonrpc version is
# not >=1.1.
if request['jsonrpc'] < 11:
raise KeywordError
result = method(ctx, **params)
else: # No params
result = method(ctx)
except JSONRPCError:
raise
except Exception as e:
# log.exception('method %s threw an exception' % request['method'])
# Exception was raised inside the method.
newerr = JSONServerError()
newerr.trace = traceback.format_exc()
if isinstance(e.message, basestring):
newerr.data = e.message
else:
# Some exceptions embed other exceptions as the message
newerr.data = repr(e.message)
raise newerr
return result
def call_py(self, ctx, jsondata):
"""
Calls jsonrpc service's method and returns its return value in python
object format or None if there is none.
This method is same as call() except the return value is a python
object instead of JSON string. This method is mainly only useful for
debugging purposes.
"""
rdata = jsondata
# we already deserialize the json string earlier in the server code, no
# need to do it again
# try:
# rdata = json.loads(jsondata)
# except ValueError:
# raise ParseError
# set some default values for error handling
request = self._get_default_vals()
if isinstance(rdata, dict) and rdata:
# It's a single request.
self._fill_request(request, rdata)
respond = self._handle_request(ctx, request)
# Don't respond to notifications
if respond is None:
return None
return respond
elif isinstance(rdata, list) and rdata:
# It's a batch.
requests = []
responds = []
for rdata_ in rdata:
# set some default values for error handling
request_ = self._get_default_vals()
self._fill_request(request_, rdata_)
requests.append(request_)
for request_ in requests:
respond = self._handle_request(ctx, request_)
# Don't respond to notifications
if respond is not None:
responds.append(respond)
if responds:
return responds
# Nothing to respond.
return None
else:
# empty dict, list or wrong type
raise InvalidRequestError
def _handle_request(self, ctx, request):
"""Handles given request and returns its response."""
if self.method_data[request['method']].has_key('types'): # noqa @IgnorePep8
self._validate_params_types(request['method'], request['params'])
result = self._call_method(ctx, request)
# Do not respond to notifications.
if request['id'] is None:
return None
respond = {}
self._fill_ver(request['jsonrpc'], respond)
respond['result'] = result
respond['id'] = request['id']
return respond
class MethodContext(dict):
def __init__(self, logger):
self['client_ip'] = None
self['user_id'] = None
self['authenticated'] = None
self['token'] = None
self['module'] = None
self['method'] = None
self['call_id'] = None
self['rpc_context'] = None
self['provenance'] = None
self._debug_levels = set([7, 8, 9, 'DEBUG', 'DEBUG2', 'DEBUG3'])
self._logger = logger
def log_err(self, message):
self._log(log.ERR, message)
def log_info(self, message):
self._log(log.INFO, message)
def log_debug(self, message, level=1):
if level in self._debug_levels:
pass
else:
level = int(level)
if level < 1 or level > 3:
raise ValueError("Illegal log level: " + str(level))
level = level + 6
self._log(level, message)
def set_log_level(self, level):
self._logger.set_log_level(level)
def get_log_level(self):
return self._logger.get_log_level()
def clear_log_level(self):
self._logger.clear_user_log_level()
def _log(self, level, message):
self._logger.log_message(level, message, self['client_ip'],
self['user_id'], self['module'],
self['method'], self['call_id'])
def provenance(self):
callbackURL = os.environ.get('SDK_CALLBACK_URL')
if callbackURL:
# OK, there's a callback server from which we can get provenance
arg_hash = {'method': 'CallbackServer.get_provenance',
'params': [],
'version': '1.1',
'id': str(_random.random())[2:]
}
body = json.dumps(arg_hash)
response = _requests.post(callbackURL, data=body,
timeout=60)
response.encoding = 'utf-8'
if response.status_code == 500:
if ('content-type' in response.headers and
response.headers['content-type'] ==
'application/json'):
err = response.json()
if 'error' in err:
raise ServerError(**err['error'])
else:
raise ServerError('Unknown', 0, response.text)
else:
raise ServerError('Unknown', 0, response.text)
if not response.ok:
response.raise_for_status()
resp = response.json()
if 'result' not in resp:
raise ServerError('Unknown', 0,
'An unknown server error occurred')
return resp['result'][0]
else:
return self.get('provenance')
class ServerError(Exception):
'''
The call returned an error. Fields:
name - the name of the error.
code - the error code.
message - a human readable error message.
data - the server side stacktrace.
'''
def __init__(self, name, code, message, data=None, error=None):
super(Exception, self).__init__(message)
self.name = name
self.code = code
self.message = message if message else ''
self.data = data or error or ''
# data = JSON RPC 2.0, error = 1.1
def __str__(self):
return self.name + ': ' + str(self.code) + '. ' + self.message + \
'\n' + self.data
def getIPAddress(environ):
xFF = environ.get('HTTP_X_FORWARDED_FOR')
realIP = environ.get('HTTP_X_REAL_IP')
trustXHeaders = config is None or \
config.get('dont_trust_x_ip_headers') != 'true'
if (trustXHeaders):
if (xFF):
return xFF.split(',')[0].strip()
if (realIP):
return realIP.strip()
return environ.get('REMOTE_ADDR')
class Application(object):
# Wrap the wsgi handler in a class definition so that we can
# do some initialization and avoid regenerating stuff over
# and over
def logcallback(self):
self.serverlog.set_log_file(self.userlog.get_log_file())
def log(self, level, context, message):
self.serverlog.log_message(level, message, context['client_ip'],
context['user_id'], context['module'],
context['method'], context['call_id'])
def __init__(self):
submod = get_service_name() or 'CallingApp'
self.userlog = log.log(
submod, ip_address=True, authuser=True, module=True, method=True,
call_id=True, changecallback=self.logcallback,
config=get_config_file())
self.serverlog = log.log(
submod, ip_address=True, authuser=True, module=True, method=True,
call_id=True, logfile=self.userlog.get_log_file())
self.serverlog.set_log_level(6)
self.rpc_service = JSONRPCServiceCustom()
self.method_authentication = dict()
self.rpc_service.add(impl_CallingApp.CallingFBA,
name='CallingApp.CallingFBA',
types=[dict])
self.method_authentication['CallingApp.CallingFBA'] = 'required' # noqa
self.rpc_service.add(impl_CallingApp.status,
name='CallingApp.status',
types=[dict])
authurl = config.get(AUTH) if config else None
self.auth_client = _KBaseAuth(authurl)
def __call__(self, environ, start_response):
# Context object, equivalent to the perl impl CallContext
ctx = MethodContext(self.userlog)
ctx['client_ip'] = getIPAddress(environ)
status = '500 Internal Server Error'
try:
body_size = int(environ.get('CONTENT_LENGTH', 0))
except (ValueError):
body_size = 0
if environ['REQUEST_METHOD'] == 'OPTIONS':
# we basically do nothing and just return headers
status = '200 OK'
rpc_result = ""
else:
request_body = environ['wsgi.input'].read(body_size)
try:
req = json.loads(request_body)
except ValueError as ve:
err = {'error': {'code': -32700,
'name': "Parse error",
'message': str(ve),
}
}
rpc_result = self.process_error(err, ctx, {'version': '1.1'})
else:
ctx['module'], ctx['method'] = req['method'].split('.')
ctx['call_id'] = req['id']
ctx['rpc_context'] = {
'call_stack': [{'time': self.now_in_utc(),
'method': req['method']}
]
}
prov_action = {'service': ctx['module'],
'method': ctx['method'],
'method_params': req['params']
}
ctx['provenance'] = [prov_action]
try:
token = environ.get('HTTP_AUTHORIZATION')
# parse out the method being requested and check if it
# has an authentication requirement
method_name = req['method']
auth_req = self.method_authentication.get(
method_name, 'none')
if auth_req != 'none':
if token is None and auth_req == 'required':
err = JSONServerError()
err.data = (
'Authentication required for ' +
'CallingApp ' +
'but no authentication header was passed')
raise err
elif token is None and auth_req == 'optional':
pass
else:
try:
user = self.auth_client.get_user(token)
ctx['user_id'] = user
ctx['authenticated'] = 1
ctx['token'] = token
except Exception, e:
if auth_req == 'required':
err = JSONServerError()
err.data = \
"Token validation failed: %s" % e
raise err
if (environ.get('HTTP_X_FORWARDED_FOR')):
self.log(log.INFO, ctx, 'X-Forwarded-For: ' +
environ.get('HTTP_X_FORWARDED_FOR'))
self.log(log.INFO, ctx, 'start method')
rpc_result = self.rpc_service.call(ctx, req)
self.log(log.INFO, ctx, 'end method')
status = '200 OK'
except JSONRPCError as jre:
err = {'error': {'code': jre.code,
'name': jre.message,
'message': jre.data
}
}
trace = jre.trace if hasattr(jre, 'trace') else None
rpc_result = self.process_error(err, ctx, req, trace)
except Exception:
err = {'error': {'code': 0,
'name': 'Unexpected Server Error',
'message': 'An unexpected server error ' +
'occurred',
}
}
rpc_result = self.process_error(err, ctx, req,
traceback.format_exc())
# print 'Request method was %s\n' % environ['REQUEST_METHOD']
# print 'Environment dictionary is:\n%s\n' % pprint.pformat(environ)
# print 'Request body was: %s' % request_body
# print 'Result from the method call is:\n%s\n' % \
# pprint.pformat(rpc_result)
if rpc_result:
response_body = rpc_result
else:
response_body = ''
response_headers = [
('Access-Control-Allow-Origin', '*'),
('Access-Control-Allow-Headers', environ.get(
'HTTP_ACCESS_CONTROL_REQUEST_HEADERS', 'authorization')),
('content-type', 'application/json'),
('content-length', str(len(response_body)))]
start_response(status, response_headers)
return [response_body]
def process_error(self, error, context, request, trace=None):
if trace:
self.log(log.ERR, context, trace.split('\n')[0:-1])
if 'id' in request:
error['id'] = request['id']
if 'version' in request:
error['version'] = request['version']
e = error['error'].get('error')
if not e:
error['error']['error'] = trace
elif 'jsonrpc' in request:
error['jsonrpc'] = request['jsonrpc']
error['error']['data'] = trace
else:
error['version'] = '1.0'
error['error']['error'] = trace
return json.dumps(error)
def now_in_utc(self):
# noqa Taken from http://stackoverflow.com/questions/3401428/how-to-get-an-isoformat-datetime-string-including-the-default-timezone @IgnorePep8
dtnow = datetime.datetime.now()
dtutcnow = datetime.datetime.utcnow()
delta = dtnow - dtutcnow
hh, mm = divmod((delta.days * 24 * 60 * 60 + delta.seconds + 30) // 60,
60)
return "%s%+02d:%02d" % (dtnow.isoformat(), hh, mm)
application = Application()
# This is the uwsgi application dictionary. On startup uwsgi will look
# for this dict and pull its configuration from here.
# This simply lists where to "mount" the application in the URL path
#
# This uwsgi module "magically" appears when running the app within
# uwsgi and is not available otherwise, so wrap an exception handler
# around it
#
# To run this server in uwsgi with 4 workers listening on port 9999 use:
# uwsgi -M -p 4 --http :9999 --wsgi-file _this_file_
# To run a using the single threaded python BaseHTTP service
# listening on port 9999 by default execute this file
#
try:
import uwsgi
# Before we do anything with the application, see if the
# configs specify patching all std routines to be asynch
# *ONLY* use this if you are going to wrap the service in
# a wsgi container that has enabled gevent, such as
# uwsgi with the --gevent option
if config is not None and config.get('gevent_monkeypatch_all', False):
print "Monkeypatching std libraries for async"
from gevent import monkey
monkey.patch_all()
uwsgi.applications = {'': application}
except ImportError:
# Not available outside of wsgi, ignore
pass
_proc = None
def start_server(host='localhost', port=0, newprocess=False):
'''
By default, will start the server on localhost on a system assigned port
in the main thread. Excecution of the main thread will stay in the server
main loop until interrupted. To run the server in a separate process, and
thus allow the stop_server method to be called, set newprocess = True. This
will also allow returning of the port number.'''
global _proc
if _proc:
raise RuntimeError('server is already running')
httpd = make_server(host, port, application)
port = httpd.server_address[1]
print "Listening on port %s" % port
if newprocess:
_proc = Process(target=httpd.serve_forever)
_proc.daemon = True
_proc.start()
else:
httpd.serve_forever()
return port
def stop_server():
global _proc
_proc.terminate()
_proc = None
def process_async_cli(input_file_path, output_file_path, token):
exit_code = 0
with open(input_file_path) as data_file:
req = json.load(data_file)
if 'version' not in req:
req['version'] = '1.1'
if 'id' not in req:
req['id'] = str(_random.random())[2:]
ctx = MethodContext(application.userlog)
if token:
user = application.auth_client.get_user(token)
ctx['user_id'] = user
ctx['authenticated'] = 1
ctx['token'] = token
if 'context' in req:
ctx['rpc_context'] = req['context']
ctx['CLI'] = 1
ctx['module'], ctx['method'] = req['method'].split('.')
prov_action = {'service': ctx['module'], 'method': ctx['method'],
'method_params': req['params']}
ctx['provenance'] = [prov_action]
resp = None
try:
resp = application.rpc_service.call_py(ctx, req)
except JSONRPCError as jre:
trace = jre.trace if hasattr(jre, 'trace') else None
resp = {'id': req['id'],
'version': req['version'],
'error': {'code': jre.code,
'name': jre.message,
'message': jre.data,
'error': trace}
}
except Exception:
trace = traceback.format_exc()
resp = {'id': req['id'],
'version': req['version'],
'error': {'code': 0,
'name': 'Unexpected Server Error',
'message': 'An unexpected server error occurred',
'error': trace}
}
if 'error' in resp:
exit_code = 500
with open(output_file_path, "w") as f:
f.write(json.dumps(resp, cls=JSONObjectEncoder))
return exit_code
if __name__ == "__main__":
if (len(sys.argv) >= 3 and len(sys.argv) <= 4 and
os.path.isfile(sys.argv[1])):
token = None
if len(sys.argv) == 4:
if os.path.isfile(sys.argv[3]):
with open(sys.argv[3]) as token_file:
token = token_file.read()
else:
token = sys.argv[3]
sys.exit(process_async_cli(sys.argv[1], sys.argv[2], token))
try:
opts, args = getopt(sys.argv[1:], "", ["port=", "host="])
except GetoptError as err:
# print help information and exit:
print str(err) # will print something like "option -a not recognized"
sys.exit(2)
port = 9999
host = 'localhost'
for o, a in opts:
if o == '--port':
port = int(a)
elif o == '--host':
host = a
print "Host set to %s" % host
else:
assert False, "unhandled option"
start_server(host=host, port=port)
# print "Listening on port %s" % port
# httpd = make_server( host, port, application)
#
# httpd.serve_forever()
| mit | 7,628,604,344,491,406,000 | 36.05414 | 151 | 0.529824 | false |
ksachs/invenio | modules/bibauthorid/lib/bibauthorid_templates.py | 1 | 142829 | ## -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2011 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Bibauthorid HTML templates"""
# pylint: disable=W0105
# pylint: disable=C0301
# from cgi import escape
# from urllib import quote
#
import invenio.bibauthorid_config as bconfig
from invenio.config import CFG_SITE_LANG, CFG_ETCDIR
from invenio.config import CFG_SITE_URL, CFG_SITE_SECURE_URL, CFG_BASE_URL, CFG_INSPIRE_SITE
from invenio.config import CFG_BIBAUTHORID_AUTHOR_TICKET_ADMIN_EMAIL, CFG_WEBAUTHORPROFILE_CFG_HEPNAMES_EMAIL
from invenio.bibformat import format_record
from invenio.session import get_session
from invenio.search_engine_utils import get_fieldvalues
from invenio.bibauthorid_config import PERSONID_EXTERNAL_IDENTIFIER_MAP, CREATE_NEW_PERSON, CFG_BIBAUTHORID_ENABLED, \
BIBAUTHORID_CFG_SITE_NAME
from invenio.bibauthorid_webapi import get_person_redirect_link, get_canonical_id_from_person_id, \
get_person_names_from_id, get_person_info_by_pid
from invenio.bibauthorid_frontinterface import get_uid_of_author
from invenio.bibauthorid_frontinterface import get_bibrefrec_name_string
from invenio.bibauthorid_frontinterface import get_canonical_name_of_author
from invenio.messages import gettext_set_language, wash_language
from invenio.webuser import get_email
from invenio.htmlutils import escape_html
from jinja2 import Environment, FileSystemLoader, TemplateNotFound
from invenio.bibauthorid_webutils import group_format_number
from invenio.websearch_templates import tmpl_citesummary_get_link
from invenio.websearch_templates import tmpl_citesummary_get_link_for_rep_breakdown
# from invenio.textutils import encode_for_xml
class WebProfileMenu():
def get_menu_items(self):
return self.menu
def _set_is_owner(self, is_owner):
if isinstance(is_owner, bool):
self.owner = is_owner
def _set_is_admin(self, is_admin):
if isinstance(is_admin, bool):
self.is_admin = is_admin
def _set_canonical_name(self, canonical_name):
if isinstance(canonical_name, str):
self.canonical_name = canonical_name
def _configure_localisation(self, ln):
self.localise = gettext_set_language(ln)
def _set_active_menu_item(self, current_page):
for item in self.menu:
if item['page'] == current_page:
item['active'] = True
def _get_standard_menu_items(self):
personalise = ""
if self.owner:
personalise = "Your "
menu = [
{
'page': "profile",
'text': "%s" % self.localise("View %sProfile" % personalise),
"static": False,
"active": False,
"canonical_name": self.canonical_name,
"disabled": self.canonical_name is ""
},
{
'page': "manage_profile",
'text': "%s" % self.localise("Manage %sProfile" % personalise),
'static': False,
'active': False,
"canonical_name": self.canonical_name,
"disabled": self.canonical_name is ""
},
{
'page': "claim",
'text': "%s" % self.localise("Manage %sPublications" % personalise),
'static': False,
'active': False,
"canonical_name": self.canonical_name,
"disabled": self.canonical_name is ""
},
{
'page': "help",
'text': "%s" % self.localise("Help"),
'static': True,
'active': False,
}
]
return menu
def _get_admin_menu_items(self):
admin_menu_items = self._get_standard_menu_items()
open_tickets_item = {
'page': "claim/tickets_admin",
'text': "%s" % self.localise("Open Tickets"),
'static': True,
'active': False
}
admin_menu_items.append(open_tickets_item)
return list(admin_menu_items)
def _create_menu(self, current_page):
if self.is_admin:
self.menu = self._get_admin_menu_items()
else:
self.menu = self._get_standard_menu_items()
self._set_active_menu_item(current_page)
def __init__(self, canonical_name, current_page, ln, is_owner=False, is_admin=False):
self._configure_localisation(ln)
self._set_canonical_name(canonical_name)
self._set_is_owner(is_owner)
self._set_is_admin(is_admin)
self._create_menu(current_page)
class WebProfilePage():
TEMPLATES_DIR = "%s/bibauthorid/templates" % CFG_ETCDIR
loader = FileSystemLoader(TEMPLATES_DIR)
environment = Environment(loader=loader)
environment.filters['groupformat'] = group_format_number
def __init__(self, page, heading, no_cache=False):
self.css_dir = CFG_BASE_URL + "/css"
self.legacy_css_dir = CFG_BASE_URL + "/img"
self.img_dir = CFG_BASE_URL + "/img"
self.scripts_dir = CFG_BASE_URL + "/js"
self.url = CFG_BASE_URL + "/author"
self.scripts = [
"json3.min.js",
"jquery-ui.min.js",
"jquery.form.js",
"jquery.dataTables.min.js",
"jquery-lightbox/js/jquery.lightbox-0.5.js",
"jquery.omniwindow.js",
"spin.min.js",
"sly.min.js",
"parsley.js",
"bootstrap.min.js?g=c29c5bc",
"underscore-min.js",
"backbone.js",
"handlebars.js",
"author-handlebars-templates.js",
"bibauthorid.js?g=c29c5bc"
]
self.legacy_stylesheets = ["jquery-ui/themes/smoothness/jquery-ui.css",
"datatables_jquery-ui.css"]
self.stylesheets = [
"bootstrap.min.css?g=c29c5bc",
"bibauthorid.css?g=c29c5bc"
]
self.stylesheets = ["%s/%s" % (self.css_dir, item) for item in self.stylesheets]
self.stylesheets = self.stylesheets + \
["%s/%s" % (self.legacy_css_dir, item) for item in self.legacy_stylesheets]
self._initialise_class_variables()
self.no_cache = no_cache
self.heading = heading
self.page = page
self.bootstrap_data = None
def _initialise_class_variables(self):
self.menu = None
self.debug = None
def create_profile_menu(self, canonical_name, ln, is_owner=False, is_admin=False):
menu = WebProfileMenu(canonical_name, self.page, ln, is_owner, is_admin)
self.menu = menu.get_menu_items()
def add_profile_menu(self, menu):
self.menu = menu.get_menu_items()
def add_debug_info(self, debug):
self.debug = debug
def add_bootstrapped_data(self, data):
self.bootstrap_data = data
def get_head(self):
return WebProfilePage.environment.get_template("head.html").render({
'no_cache': self.no_cache,
'scripts': self.scripts,
'stylesheets': self.stylesheets,
'scripts_dir': self.scripts_dir
})
def get_body(self):
return WebProfilePage.environment.get_template("index.html").render({
'title': self.heading,
'menu': self.menu,
'url': self.url,
'debug': self.debug,
'bootstrap': self.bootstrap_data
})
@staticmethod
def _load_named_template(template):
environment = WebProfilePage.environment
if template is not "generic":
loaded_template = environment.get_template("%s.html" % str(template))
else:
loaded_template = environment.get_template("generic_wrapper.html")
return loaded_template
def _get_standard_author_page_parameters(self):
return {
'title': self.heading,
'menu': self.menu,
'url': self.url,
'debug': self.debug,
'bootstrap': self.bootstrap_data,
'search_form_url': "%s/author/search" % CFG_BASE_URL
}
def get_wrapped_body(self, template, content):
parameters = self._get_standard_author_page_parameters()
try:
loaded_template = self._load_named_template(template)
parameters.update(content)
except TemplateNotFound:
loaded_template = self._load_named_template("generic")
parameters.update({
'html': "Unable to load named template.<br>%s" % str(content)
})
return loaded_template.render(parameters)
@staticmethod
def render_template(template, content):
try:
loaded_template = WebProfilePage._load_named_template(template)
except TemplateNotFound:
return "Unable to load named template: %s.<br>%s" % (template, str(content))
return loaded_template.render(content)
@staticmethod
def render_citations_summary_content(citations, canonical_name):
def _get_breakdown_categories_bounds(pid):
"""
An example of a category string would be 'Famous papers (250-499')
This returns (250, 499) which are the lower and upper bound.
"""
bounds_str = category.split(')')[0].split('(')[1]
try:
return (int(bounds_str), 0)
except ValueError:
if '+' in bounds_str:
return (int(bounds_str.strip('+')), 1000000)
else:
return map(int, bounds_str.split('-'))
citeable_breakdown_queries = dict()
published_breakdown_queries = dict()
for category in citations['breakdown_categories']:
low, high = _get_breakdown_categories_bounds(category)
citeable_breakdown_queries[
category] = tmpl_citesummary_get_link_for_rep_breakdown(
canonical_name,
'author',
'collection:citeable',
'cited',
low,
high)
published_breakdown_queries[
category] = tmpl_citesummary_get_link_for_rep_breakdown(
canonical_name,
'author',
'collection:published',
'cited',
low,
high)
try:
result = WebProfilePage.environment.get_template("citations_summary.html").render({
'papers_num': citations['papers_num'],
'citeable': {'avg_cites': citations['data']['Citeable papers']['avg_cites'],
'num': len(citations['papers']['Citeable papers']),
'citations_num': citations['data']['Citeable papers']['total_cites'],
'h_index': citations['data']['Citeable papers']['h-index'],
'breakdown': citations['data']['Citeable papers']['breakdown'],
'breakdown_queries': citeable_breakdown_queries},
'published': {'avg_cites': citations['data']['Published only']['avg_cites'],
'num': len(citations['papers']['Published only']),
'citations_num': citations['data']['Published only']['total_cites'],
'h_index': citations['data']['Published only']['h-index'],
'breakdown': citations['data']['Published only']['breakdown'],
'breakdown_queries': published_breakdown_queries},
'breakdown_categories': citations['breakdown_categories'],
'hindex_fine_print_link': "%s/help/citation-metrics" % CFG_BASE_URL,
'citation_fine_print_link': "%s/help/citation-metrics" % CFG_BASE_URL,
'citeable_papers_link': tmpl_citesummary_get_link(canonical_name, 'author', 'collection:citeable'),
'selfcite_link': '%s/search?ln=en&p=author:%s&of=hcs2' % (CFG_BASE_URL, canonical_name),
'published_only_papers_link': tmpl_citesummary_get_link(canonical_name, 'author', 'collection:published'),
})
except:
result = "No citations data."
return result
@staticmethod
def render_publications_box_content(template_vars):
"""
Creates HTML Markup for Publications Box
@param **kwargs: A dictionary with at least the following keys:
internal_pubs
external_pubs
datasets
@return: HTML Markup
@rtype: str
"""
return WebProfilePage.environment.get_template("publications_box.html").render(template_vars)
def get_profile_page_body(self, last_computed, trial="default"):
if trial is not None or not trial == "default":
file_ext = "_" + str(trial)
else:
file_ext = str()
result = str()
try:
template = WebProfilePage.environment.get_template("profile_page%s.html" % file_ext)
except TemplateNotFound:
template = WebProfilePage.environment.get_template("profile_page.html")
result = "<!-- Failed to load template for trial: %s -->" % str(trial)
# The menu should not be visible if BAI is disabled.
if not CFG_BIBAUTHORID_ENABLED:
self.menu = None
return WebProfilePage.environment.get_template(template).render({
'title': self.heading,
'menu': self.menu,
'url': self.url,
'debug': self.debug,
'bootstrap': self.bootstrap_data,
'last_computed': last_computed,
'citation_fine_print_link': "%s/help/citation-metrics" % CFG_BASE_URL
}) + result
import xml.sax.saxutils
class Template:
"""Templating functions used by aid"""
def __init__(self, language=CFG_SITE_LANG):
"""Set defaults for all aid template output"""
self.language = language
self._ = gettext_set_language(wash_language(language))
def tmpl_person_detail_layout(self, content):
'''
writes HTML content into the person css container
@param content: HTML content
@type content: string
@return: HTML code
@rtype: string
'''
html = []
h = html.append
h('<div id="aid_person">')
h(content)
h('</div>')
return "\n".join(html)
def tmpl_merge_transaction_box(self, teaser_key, messages, show_close_btn=True):
'''
Creates a notification box based on the jQuery UI style
@param teaser_key: key to a dict which returns the teaser
@type teaser_key: string
@param messages: list of keys to a dict which return the message to display in the box
@type messages: list of strings
@param show_close_btn: display close button [x]
@type show_close_btn: boolean
@return: HTML code
@rtype: string
'''
transaction_teaser_dict = {'success': 'Success!',
'failure': 'Failure!'}
transaction_message_dict = {'confirm_success': '%s merge transaction%s successfully executed.',
'confirm_failure':
'%s merge transaction%s failed. This happened because there is at least one profile in the merging list that is either connected to a user or it has claimed papers.'
' Please edit the list accordingly.',
'confirm_operation': '%s merge transaction%s successfully ticketized.'}
teaser = self._(transaction_teaser_dict[teaser_key])
html = []
h = html.append
for key in transaction_message_dict.keys():
same_kind = [mes for mes in messages if mes == key]
trans_no = len(same_kind)
if trans_no == 0:
continue
elif trans_no == 1:
args = [trans_no, '']
else:
args = [trans_no, 's']
color = ''
if teaser_key == 'failure':
color = 'background: #FFC2C2;'
message = self._(transaction_message_dict[key] % tuple(args))
h('<div id="aid_notification_' + key + '" class="ui-widget ui-alert">')
h(' <div style="%s margin-top: 20px; padding: 0pt 0.7em;" class="ui-state-highlight ui-corner-all">' %
(color))
h(' <p><span style="float: left; margin-right: 0.3em;" class="ui-icon ui-icon-info"></span>')
h(' <strong>%s</strong> %s' % (teaser, message))
if show_close_btn:
h(
' <span style="float:right; margin-right: 0.3em;"><a rel="nofollow" href="#" class="aid_close-notify" style="border-style: none;">X</a></span></p>')
h(' </div>')
h('</div>')
return "\n".join(html)
def tmpl_search_ticket_box(self, teaser_key, message_key, bibrefs, show_close_btn=False):
'''
Creates a box informing about a claim in progress for
the search.
@param teaser_key: key to a dict which returns the teaser
@type teaser_key: string
@param message_key: key to a dict which returns the message to display in the box
@type message_key: string
@param bibrefs: bibrefs which are about to be assigned
@type bibrefs: list of strings
@param show_close_btn: display close button [x]
@type show_close_btn: boolean
@return: HTML code
@rtype: string
'''
error_teaser_dict = {'person_search': 'Person search for assignment in progress!'}
error_message_dict = {'assign_papers': 'You are searching for a person to assign the following paper%s:'}
teaser = self._(error_teaser_dict[teaser_key])
arg = ''
if len(bibrefs) > 1:
arg = 's'
message = self._(error_message_dict[message_key] % (arg))
html = []
h = html.append
h('<div id="aid_notification_' + teaser_key + '" class="ui-widget ui-alert">')
h(' <div style="margin-top: 20px; padding: 0pt 0.7em;" class="ui-state-highlight ui-corner-all">')
h(' <p><span style="float: left; margin-right: 0.3em;" class="ui-icon ui-icon-info"></span>')
h(' <strong>%s</strong> %s ' % (teaser, message))
h("<ul>")
for paper in bibrefs:
if ',' in paper:
pbibrec = paper.split(',')[1]
else:
pbibrec = paper
h("<li>%s</li>"
% (format_record(int(pbibrec), "ha")))
h("</ul>")
h('<a rel="nofollow" id="checkout" href="%s/author/claim/action?cancel_search_ticket=True">' %
(CFG_SITE_URL,) + self._('Quit searching.') + '</a>')
if show_close_btn:
h(
' <span style="float:right; margin-right: 0.3em;"><a rel="nofollow" href="#" class="aid_close-notify">X</a></span></p>')
h(' </div>')
h('</div>')
h('<p> </p>')
return "\n".join(html)
def tmpl_merge_ticket_box(self, teaser_key, message_key, primary_cname):
message = self._('When you merge a set of profiles, all the information stored will be assigned to the primary profile. This includes papers, ids or citations.'
' After merging, only the primary profile will remain in the system, all other profiles will be automatically deleted.</br>')
error_teaser_dict = {'person_search': message}
error_message_dict = {'merge_profiles': 'You are about to merge the following profiles:'}
teaser = self._(error_teaser_dict[teaser_key])
message = self._(error_message_dict[message_key])
html = []
h = html.append
h('<div id="aid_notification_' + teaser_key + '" class="ui-widget ui-alert">')
h(' <div style="margin-top: 20px; padding: 0pt 0.7em;" class="ui-state-highlight ui-corner-all">')
h(' <p><span style="float: left; margin-right: 0.3em;" class="ui-icon ui-icon-info"></span>')
h(' <strong>%s</strong> </br>%s ' % (teaser, message))
h("<table id=\"mergeList\" >\
<tr></tr>\
<th></th>\
<th></th>\
<th></th>\
<th></th>\
<tr></tr>")
h("<tr><td></td><td><a id=\"primaryProfile\" href='%s/author/profile/%s'target='_blank'>%s</a></td><td id=\"primaryProfileTd\">primary profile</td><td></td></tr>"
% (CFG_SITE_URL, primary_cname, primary_cname))
# for profile in profiles:
# h("<li><a href='%s'target='_blank' class=\"profile\" >%s</a><a class=\"setPrimaryProfile\">Set as primary</a> <a class=\"removeProfile\">Remove</a></li>"
# % (profile, profile))
h("</table>")
h('<div id="mergeListButtonWrapper">')
h('<form action="%s/author/claim/action" method="get"><input type="hidden" name="cancel_merging" value="True" /> <input type="hidden" name="primary_profile" value="%s" /> <input type="submit" id="cancelMergeButton" class="aid_btn_red" value="%s" /></form>' %
(CFG_SITE_URL, primary_cname, self._('Cancel merging')))
h('<form action="%s/author/claim/action" method="get"><input type="hidden" name="merge" value="True" /><input type="submit" id="mergeButton" class="aid_btn_green" value="%s" /></form>' %
(CFG_SITE_URL, self._('Merge profiles')))
h(' </div>')
h(' </div>')
h('</div>')
h('<p> </p>')
return "\n".join(html)
def tmpl_author_confirmed(self, bibref, pid, verbiage_dict={'alt_confirm': 'Confirmed.',
'confirm_text':
'This record assignment has been confirmed.',
'alt_forget': 'Forget decision!',
'forget_text': 'Forget assignment decision',
'alt_repeal': 'Repeal!',
'repeal_text': 'Repeal record assignment',
'to_other_text': 'Assign to another person',
'alt_to_other': 'To other person!'
},
show_reset_button=True):
'''
Generate play per-paper links for the table for the
status "confirmed"
@param bibref: construct of unique ID for this author on this paper
@type bibref: string
@param pid: the Person ID
@type pid: int
@param verbiage_dict: language for the link descriptions
@type verbiage_dict: dict
'''
stri = ('<!--2!--><span id="aid_status_details"> '
'<img src="%(url)s/img/aid_check.png" alt="%(alt_confirm)s" />'
'%(confirm_text)s <br>')
if show_reset_button:
stri = stri + (
'<a rel="nofollow" id="aid_reset_gr" class="aid_grey op_action" href="%(url)s/author/claim/action?reset=True&selection=%(ref)s&pid=%(pid)s">'
'<img src="%(url)s/img/aid_reset_gray.png" alt="%(alt_forget)s" style="margin-left:22px;" />'
'%(forget_text)s</a><br>')
stri = stri + (
'<a rel="nofollow" id="aid_repeal" class="aid_grey op_action" href="%(url)s/author/claim/action?repeal=True&selection=%(ref)s&pid=%(pid)s">'
'<img src="%(url)s/img/aid_reject_gray.png" alt="%(alt_repeal)s" style="margin-left:22px;"/>'
'%(repeal_text)s</a><br>'
'<a rel="nofollow" id="aid_to_other" class="aid_grey op_action" href="%(url)s/author/claim/action?to_other_person=True&selection=%(ref)s">'
'<img src="%(url)s/img/aid_to_other_gray.png" alt="%(alt_to_other)s" style="margin-left:22px;"/>'
'%(to_other_text)s</a> </span>')
return (stri
% ({'url': CFG_SITE_URL, 'ref': bibref, 'pid': pid,
'alt_confirm': verbiage_dict['alt_confirm'],
'confirm_text': verbiage_dict['confirm_text'],
'alt_forget': verbiage_dict['alt_forget'],
'forget_text': verbiage_dict['forget_text'],
'alt_repeal': verbiage_dict['alt_repeal'],
'repeal_text': verbiage_dict['repeal_text'],
'to_other_text': verbiage_dict['to_other_text'],
'alt_to_other': verbiage_dict['alt_to_other']}))
def tmpl_author_repealed(self, bibref, pid, verbiage_dict={'alt_confirm': 'Confirm!',
'confirm_text': 'Confirm record assignment.',
'alt_forget': 'Forget decision!',
'forget_text': 'Forget assignment decision',
'alt_repeal': 'Rejected!',
'repeal_text': 'Repeal this record assignment.',
'to_other_text': 'Assign to another person',
'alt_to_other': 'To other person!'
}):
'''
Generate play per-paper links for the table for the
status "repealed"
@param bibref: construct of unique ID for this author on this paper
@type bibref: string
@param pid: the Person ID
@type pid: int
@param verbiage_dict: language for the link descriptions
@type verbiage_dict: dict
'''
stri = ('<!---2!--><span id="aid_status_details"> '
'<img src="%(url)s/img/aid_reject.png" alt="%(alt_repeal)s" />'
'%(repeal_text)s <br>'
'<a rel="nofollow" id="aid_confirm" class="aid_grey op_action" href="%(url)s/author/claim/action?confirm=True&selection=%(ref)s&pid=%(pid)s">'
'<img src="%(url)s/img/aid_check_gray.png" alt="%(alt_confirm)s" style="margin-left: 22px;" />'
'%(confirm_text)s</a><br>'
'<a rel="nofollow" id="aid_to_other" class="aid_grey op_action" href="%(url)s/author/claim/action?to_other_person=True&selection=%(ref)s">'
'<img src="%(url)s/img/aid_to_other_gray.png" alt="%(alt_to_other)s" style="margin-left:22px;"/>'
'%(to_other_text)s</a> </span>')
return (stri
% ({'url': CFG_SITE_URL, 'ref': bibref, 'pid': pid,
'alt_confirm': verbiage_dict['alt_confirm'],
'confirm_text': verbiage_dict['confirm_text'],
'alt_forget': verbiage_dict['alt_forget'],
'forget_text': verbiage_dict['forget_text'],
'alt_repeal': verbiage_dict['alt_repeal'],
'repeal_text': verbiage_dict['repeal_text'],
'to_other_text': verbiage_dict['to_other_text'],
'alt_to_other': verbiage_dict['alt_to_other']}))
def tmpl_author_undecided(self, bibref, pid, verbiage_dict={'alt_confirm': 'Confirm!',
'confirm_text': 'Confirm record assignment.',
'alt_repeal': 'Rejected!',
'repeal_text': 'This record has been repealed.',
'to_other_text': 'Assign to another person',
'alt_to_other': 'To other person!'
}):
'''
Generate play per-paper links for the table for the
status "no decision taken yet"
@param bibref: construct of unique ID for this author on this paper
@type bibref: string
@param pid: the Person ID
@type pid: int
@param verbiage_dict: language for the link descriptions
@type verbiage_dict: dict
'''
# batchprocess?mconfirm=True&bibrefs=['100:17,16']&pid=1
string = ('<!--0!--><span id="aid_status_details"> '
'<a rel="nofollow" id="aid_confirm" class="op_action" href="%(url)s/author/claim/action?confirm=True&selection=%(ref)s&pid=%(pid)s">'
'<img src="%(url)s/img/aid_check.png" alt="%(alt_confirm)s" />'
'%(confirm_text)s</a><br />'
'<a rel="nofollow" id="aid_repeal" class="op_action" href="%(url)s/author/claim/action?repeal=True&selection=%(ref)s&pid=%(pid)s">'
'<img src="%(url)s/img/aid_reject.png" alt="%(alt_repeal)s" />'
'%(repeal_text)s</a> <br />'
'<a rel="nofollow" id="aid_to_other" class="op_action" href="%(url)s/author/claim/action?to_other_person=True&selection=%(ref)s">'
'<img src="%(url)s/img/aid_to_other.png" alt="%(alt_to_other)s" />'
'%(to_other_text)s</a> </span>')
return (string
% ({'url': CFG_SITE_URL, 'ref': bibref, 'pid': pid,
'alt_confirm': verbiage_dict['alt_confirm'],
'confirm_text': verbiage_dict['confirm_text'],
'alt_repeal': verbiage_dict['alt_repeal'],
'repeal_text': verbiage_dict['repeal_text'],
'to_other_text': verbiage_dict['to_other_text'],
'alt_to_other': verbiage_dict['alt_to_other']}))
def __tmpl_admin_records_table(
self, form_id, person_id, bibrecids, verbiage_dict={
'no_doc_string': 'Sorry, there are currently no documents to be found in this category.',
'b_confirm': 'Confirm',
'b_repeal': 'Repeal',
'b_to_others':
'Assign to another person',
'b_forget': 'Forget decision'},
buttons_verbiage_dict={
'mass_buttons': {'no_doc_string': 'Sorry, there are currently no documents to be found in this category.',
'b_confirm':
'Confirm',
'b_repeal':
'Repeal',
'b_to_others':
'Assign to another person',
'b_forget': 'Forget decision'},
'record_undecided': {'alt_confirm': 'Confirm!',
'confirm_text':
'Confirm record assignment.',
'alt_repeal':
'Rejected!',
'repeal_text': 'This record has been repealed.'},
'record_confirmed': {'alt_confirm': 'Confirmed.',
'confirm_text':
'This record assignment has been confirmed.',
'alt_forget':
'Forget decision!',
'forget_text':
'Forget assignment decision',
'alt_repeal':
'Repeal!',
'repeal_text': 'Repeal record assignment'},
'record_repealed': {'alt_confirm': 'Confirm!',
'confirm_text':
'Confirm record assignment.',
'alt_forget':
'Forget decision!',
'forget_text':
'Forget assignment decision',
'alt_repeal':
'Rejected!',
'repeal_text': 'Repeal this record assignment.'}},
show_reset_button=True):
'''
Generate the big tables for the person overview page
@param form_id: name of the form
@type form_id: string
@param person_id: Person ID
@type person_id: int
@param bibrecids: List of records to display
@type bibrecids: list
@param verbiage_dict: language for the elements
@type verbiage_dict: dict
@param buttons_verbiage_dict: language for the buttons
@type buttons_verbiage_dict: dict
'''
no_papers_html = ['<div style="text-align:left;margin-top:1em;"><strong>']
no_papers_html.append('%s' % self._(verbiage_dict['no_doc_string']))
no_papers_html.append('</strong></div>')
if not bibrecids or not person_id:
return "\n".join(no_papers_html)
pp_html = []
h = pp_html.append
h('<form id="%s" action="/author/claim/action" method="post">'
% (form_id))
# +self._(' On all pages: '))
h('<div class="aid_reclist_selector">')
h('<a rel="nofollow" rel="group_1" href="#select_all">' + self._('Select All') + '</a> | ')
h('<a rel="nofollow" rel="group_1" href="#select_none">' + self._('Select None') + '</a> | ')
h('<a rel="nofollow" rel="group_1" href="#invert_selection">' + self._('Invert Selection') + '</a> | ')
h('<a rel="nofollow" id="toggle_claimed_rows" href="javascript:toggle_claimed_rows();" '
'alt="hide">' + self._('Hide successful claims') + '</a>')
h('</div>')
h('<div class="aid_reclist_buttons">')
h(('<img src="%s/img/aid_90low_right.png" alt="∟" />')
% (CFG_SITE_URL))
h('<input type="hidden" name="pid" value="%s" />' % (person_id))
h('<input type="submit" name="assign" value="%s" class="aid_btn_blue" />' % self._(verbiage_dict['b_confirm']))
h('<input type="submit" name="reject" value="%s" class="aid_btn_blue" />' % self._(verbiage_dict['b_repeal']))
h('<input type="submit" name="to_other_person" value="%s" class="aid_btn_blue" />' %
self._(verbiage_dict['b_to_others']))
# if show_reset_button:
# h('<input type="submit" name="reset" value="%s" class="aid_btn_blue" />' % verbiage_dict['b_forget'])
h(" </div>")
h('<table class="paperstable" cellpadding="3" width="100%">')
h("<thead>")
h(" <tr>")
h(' <th> </th>')
h(' <th>' + self._('Paper Short Info') + '</th>')
h(' <th>' + self._('Author Name') + '</th>')
h(' <th>' + self._('Affiliation') + '</th>')
h(' <th>' + self._('Date') + '</th>')
h(' <th>' + self._('Experiment') + '</th>')
h(' <th>' + self._('Actions') + '</th>')
h(' </tr>')
h('</thead>')
h('<tbody>')
for idx, paper in enumerate(bibrecids):
h(' <tr style="padding-top: 6px; padding-bottom: 6px;">')
h(' <td><input type="checkbox" name="selection" '
'value="%s" /> </td>' % (paper['bibref']))
rec_info = format_record(int(paper['recid']), "ha")
rec_info = str(idx + 1) + '. ' + rec_info
h(" <td>%s</td>" % (rec_info))
h(" <td>%s</td>" % (paper['authorname']))
aff = ""
if paper['authoraffiliation']:
aff = paper['authoraffiliation']
else:
aff = self._("Not assigned")
h(" <td>%s</td>" % (aff))
if paper['paperdate']:
pdate = paper['paperdate']
else:
pdate = 'N.A.'
h(" <td>%s</td>" % pdate)
if paper['paperexperiment']:
pdate = paper['paperexperiment']
else:
pdate = 'N.A.'
h(" <td>%s</td>" % pdate)
paper_status = self._("No status information found.")
if paper['flag'] == 2:
paper_status = self.tmpl_author_confirmed(paper['bibref'], person_id,
verbiage_dict=buttons_verbiage_dict['record_confirmed'],
show_reset_button=show_reset_button)
elif paper['flag'] == -2:
paper_status = self.tmpl_author_repealed(paper['bibref'], person_id,
verbiage_dict=buttons_verbiage_dict['record_repealed'])
else:
paper_status = self.tmpl_author_undecided(paper['bibref'], person_id,
verbiage_dict=buttons_verbiage_dict['record_undecided'])
h(' <td><div id="bibref%s" style="float:left"><!--%s!-->%s </div>'
% (paper['bibref'], paper['flag'], paper_status))
if 'rt_status' in paper and paper['rt_status']:
h('<img src="%s/img/aid_operator.png" title="%s" '
'alt="actions pending" style="float:right" '
'height="24" width="24" />'
% (CFG_SITE_URL, self._("Operator review of user actions pending")))
h(' </td>')
h(" </tr>")
h(" </tbody>")
h("</table>")
# +self._(' On all pages: '))
h('<div class="aid_reclist_selector">')
h('<a rel="nofollow" rel="group_1" href="#select_all">' + self._('Select All') + '</a> | ')
h('<a rel="nofollow" rel="group_1" href="#select_none">' + self._('Select None') + '</a> | ')
h('<a rel="nofollow" rel="group_1" href="#invert_selection">' + self._('Invert Selection') + '</a> | ')
h('<a rel="nofollow" id="toggle_claimed_rows" href="javascript:toggle_claimed_rows();" '
'alt="hide">' + self._('Hide successful claims') + '</a>')
h('</div>')
h('<div class="aid_reclist_buttons">')
h(('<img src="%s/img/aid_90low_right.png" alt="∟" />')
% (CFG_SITE_URL))
h('<input type="hidden" name="pid" value="%s" />' % (person_id))
h('<input type="submit" name="assign" value="%s" class="aid_btn_blue" />' % verbiage_dict['b_confirm'])
h('<input type="submit" name="reject" value="%s" class="aid_btn_blue" />' % verbiage_dict['b_repeal'])
h('<input type="submit" name="to_other_person" value="%s" class="aid_btn_blue" />' %
verbiage_dict['b_to_others'])
# if show_reset_button:
# h('<input type="submit" name="reset" value="%s" class="aid_btn_blue" />' % verbiage_dict['b_forget'])
h(" </div>")
h("</form>")
return "\n".join(pp_html)
def __tmpl_reviews_table(self, person_id, bibrecids, admin=False):
'''
Generate the table for potential reviews.
@param form_id: name of the form
@type form_id: string
@param person_id: Person ID
@type person_id: int
@param bibrecids: List of records to display
@type bibrecids: list
@param admin: Show admin functions
@type admin: boolean
'''
no_papers_html = ['<div style="text-align:left;margin-top:1em;"><strong>']
no_papers_html.append(self._('Sorry, there are currently no records to be found in this category.'))
no_papers_html.append('</strong></div>')
if not bibrecids or not person_id:
return "\n".join(no_papers_html)
pp_html = []
h = pp_html.append
h('<form id="review" action="/author/claim/batchprocess" method="post">')
h('<table class="reviewstable" cellpadding="3" width="100%">')
h(' <thead>')
h(' <tr>')
h(' <th> </th>')
h(' <th>' + self._('Paper Short Info') + '</th>')
h(' <th>' + self._('Actions') + '</th>')
h(' </tr>')
h(' </thead>')
h(' <tbody>')
for paper in bibrecids:
h(' <tr>')
h(' <td><input type="checkbox" name="selected_bibrecs" '
'value="%s" /> </td>' % (paper))
rec_info = format_record(int(paper[0]), "ha")
if not admin:
rec_info = rec_info.replace("person/search?q=", "author/")
h(" <td>%s</td>" % (rec_info))
h(' <td><a rel="nofollow" href="%s/author/claim/batchprocess?selected_bibrecs=%s&mfind_bibref=claim">' % (CFG_SITE_URL, paper) +
self._('Review Transaction') + '</a></td>')
h(" </tr>")
h(" </tbody>")
h("</table>")
h('<div style="text-align:left;"> ' + self._('On all pages') + ': ')
h('<a rel="nofollow" rel="group_1" href="#select_all">' + self._('Select All') + '</a> | ')
h('<a rel="nofollow" rel="group_1" href="#select_none">' + self._('Select None') + '</a> | ')
h('<a rel="nofollow" rel="group_1" href="#invert_selection">' + self._('Invert Selection') + '</a>')
h('</div>')
h('<div style="vertical-align:middle;">')
h('∟ ' + self._('With selected do') + ': ')
h('<input type="hidden" name="pid" value="%s" />' % (person_id))
h('<input type="hidden" name="mfind_bibref" value="claim" />')
h('<input type="submit" name="submit" value="Review selected transactions" />')
h(" </div>")
h('</form>')
return "\n".join(pp_html)
def tmpl_admin_tabs(self, ln=CFG_SITE_LANG, person_id=-1,
rejected_papers=[],
rest_of_papers=[],
review_needed=[],
rt_tickets=[],
open_rt_tickets=[],
show_tabs=['records', 'repealed', 'review', 'comments', 'tickets', 'data'],
show_reset_button=True,
ticket_links=['delete', 'commit', 'del_entry', 'commit_entry'],
verbiage_dict={'confirmed': 'Records', 'repealed': 'Not this person\'s records',
'review': 'Records in need of review',
'tickets': 'Open Tickets', 'data': 'Data',
'confirmed_ns': 'Papers of this Person',
'repealed_ns': 'Papers _not_ of this Person',
'review_ns': 'Papers in need of review',
'tickets_ns': 'Tickets for this Person',
'data_ns': 'Additional Data for this Person'},
buttons_verbiage_dict={
'mass_buttons': {'no_doc_string': 'Sorry, there are currently no documents to be found in this category.',
'b_confirm': 'Confirm',
'b_repeal': 'Repeal',
'b_to_others': 'Assign to another person',
'b_forget': 'Forget decision'},
'record_undecided': {'alt_confirm': 'Confirm!',
'confirm_text': 'Confirm record assignment.',
'alt_repeal': 'Rejected!',
'repeal_text': 'This record has been repealed.'},
'record_confirmed': {'alt_confirm': 'Confirmed.',
'confirm_text':
'This record assignment has been confirmed.',
'alt_forget': 'Forget decision!',
'forget_text': 'Forget assignment decision',
'alt_repeal': 'Repeal!',
'repeal_text': 'Repeal record assignment'},
'record_repealed': {'alt_confirm': 'Confirm!',
'confirm_text': 'Confirm record assignment.',
'alt_forget': 'Forget decision!',
'forget_text': 'Forget assignment decision',
'alt_repeal': 'Rejected!',
'repeal_text': 'Repeal this record assignment.'}}):
'''
Generate the tabs for the person overview page
@param ln: the language to use
@type ln: string
@param person_id: Person ID
@type person_id: int
@param rejected_papers: list of repealed papers
@type rejected_papers: list
@param rest_of_papers: list of attributed of undecided papers
@type rest_of_papers: list
@param review_needed: list of papers that need a review (choose name)
@type review_needed:list
@param rt_tickets: list of tickets for this Person
@type rt_tickets: list
@param open_rt_tickets: list of open request tickets
@type open_rt_tickets: list
@param show_tabs: list of tabs to display
@type show_tabs: list of strings
@param ticket_links: list of links to display
@type ticket_links: list of strings
@param verbiage_dict: language for the elements
@type verbiage_dict: dict
@param buttons_verbiage_dict: language for the buttons
@type buttons_verbiage_dict: dict
'''
html = []
h = html.append
h('<div id="aid_tabbing">')
h(' <ul>')
if 'records' in show_tabs:
r = verbiage_dict['confirmed']
h(' <li><a rel="nofollow" href="#tabRecords"><span>%(r)s (%(l)s)</span></a></li>' %
({'r': r, 'l': len(rest_of_papers)}))
if 'repealed' in show_tabs:
r = verbiage_dict['repealed']
h(' <li><a rel="nofollow" href="#tabNotRecords"><span>%(r)s (%(l)s)</span></a></li>' %
({'r': r, 'l': len(rejected_papers)}))
if 'review' in show_tabs:
r = verbiage_dict['review']
h(' <li><a rel="nofollow" href="#tabReviewNeeded"><span>%(r)s (%(l)s)</span></a></li>' %
({'r': r, 'l': len(review_needed)}))
if 'tickets' in show_tabs:
r = verbiage_dict['tickets']
h(' <li><a rel="nofollow" href="#tabTickets"><span>%(r)s (%(l)s)</span></a></li>' %
({'r': r, 'l': len(open_rt_tickets)}))
if 'data' in show_tabs:
r = verbiage_dict['data']
h(' <li><a rel="nofollow" href="#tabData"><span>%s</span></a></li>' % r)
userid = get_uid_of_author(person_id)
if userid:
h('<img src="%s/img/webbasket_user.png" alt="%s" width="30" height="30" />' %
(CFG_SITE_URL, self._("The author has an internal ID!")))
h(' </ul>')
if 'records' in show_tabs:
h(' <div id="tabRecords">')
r = verbiage_dict['confirmed_ns']
h('<noscript><h5>%s</h5></noscript>' % r)
h(self.__tmpl_admin_records_table("massfunctions",
person_id, rest_of_papers,
verbiage_dict=buttons_verbiage_dict['mass_buttons'],
buttons_verbiage_dict=buttons_verbiage_dict,
show_reset_button=show_reset_button))
h(" </div>")
if 'repealed' in show_tabs:
h(' <div id="tabNotRecords">')
r = verbiage_dict['repealed_ns']
h('<noscript><h5>%s</h5></noscript>' % r)
h(self._('These records have been marked as not being from this person.'))
h('<br />' + self._('They will be regarded in the next run of the author ')
+ self._('disambiguation algorithm and might disappear from this listing.'))
h(self.__tmpl_admin_records_table("rmassfunctions",
person_id, rejected_papers,
verbiage_dict=buttons_verbiage_dict['mass_buttons'],
buttons_verbiage_dict=buttons_verbiage_dict,
show_reset_button=show_reset_button))
h(" </div>")
if 'review' in show_tabs:
h(' <div id="tabReviewNeeded">')
r = verbiage_dict['review_ns']
h('<noscript><h5>%s</h5></noscript>' % r)
h(self.__tmpl_reviews_table(person_id, review_needed, True))
h(' </div>')
if 'tickets' in show_tabs:
h(' <div id="tabTickets">')
r = verbiage_dict['tickets']
h('<noscript><h5>%s</h5></noscript>' % r)
r = verbiage_dict['tickets_ns']
h('<p>%s:</p>' % r)
if rt_tickets:
pass
# open_rt_tickets = [a for a in open_rt_tickets if a[1] == rt_tickets]
for t in open_rt_tickets:
name = self._('Not provided')
surname = self._('Not provided')
uidip = self._('Not available')
comments = self._('No comments')
email = self._('Not provided')
date = self._('Not Available')
actions = []
rtid = None
for info in t[0]:
if info[0] == 'firstname':
name = info[1]
elif info[0] == 'lastname':
surname = info[1]
elif info[0] == 'uid-ip':
uidip = info[1]
elif info[0] == 'comments':
comments = info[1]
elif info[0] == 'email':
email = info[1]
elif info[0] == 'date':
date = info[1]
elif info[0] in ['assign', 'reject']:
actions.append(info)
elif info[0] == 'rtid':
rtid = info[1]
if 'delete' in ticket_links:
h(('<strong>Ticket number: %(tnum)s </strong> <a rel="nofollow" id="cancel" href=%(url)s/author/claim/action?cancel_rt_ticket=True&selection=%(tnum)s&pid=%(pid)s>' + self._(' Delete this ticket') + ' </a>')
% ({'tnum': t[1], 'url': CFG_SITE_URL, 'pid': str(person_id)}))
if 'commit' in ticket_links:
h((' or <a rel="nofollow" id="commit" href=%(url)s/author/claim/action?commit_rt_ticket=True&selection=%(tnum)s&pid=%(pid)s>' + self._(' Commit this entire ticket') + ' </a> <br>')
% ({'tnum': t[1], 'url': CFG_SITE_URL, 'pid': str(person_id)}))
h('<dd>')
h('Open from: %s, %s <br>' % (surname, name))
h('Date: %s <br>' % date)
h('identified by: %s <br>' % uidip)
h('email: %s <br>' % email)
h('comments: %s <br>' % comments)
h('Suggested actions: <br>')
h('<dd>')
for a in actions:
bibref, bibrec = a[1].split(',')
pname = get_bibrefrec_name_string(bibref)
title = ""
try:
title = get_fieldvalues(int(bibrec), "245__a")[0]
except IndexError:
title = self._("No title available")
title = escape_html(title)
if 'commit_entry' in ticket_links:
h('<a rel="nofollow" id="action" href="%(url)s/author/claim/action?%(action)s=True&pid=%(pid)s&selection=%(bib)s&rt_id=%(rt)s">%(action)s - %(name)s on %(title)s </a>'
% ({'action': a[0], 'url': CFG_SITE_URL,
'pid': str(person_id), 'bib': a[1],
'name': pname, 'title': title, 'rt': t[1]}))
else:
h('%(action)s - %(name)s on %(title)s'
% ({'action': a[0], 'name': pname, 'title': title}))
if 'del_entry' in ticket_links:
h(' - <a rel="nofollow" id="action" href="%(url)s/author/claim/action?cancel_rt_ticket=True&pid=%(pid)s&selection=%(bib)s&rt_id=%(rt)s&rt_action=%(action)s"> Delete this entry </a>'
% ({'action': a[0], 'url': CFG_SITE_URL,
'pid': str(person_id), 'bib': a[1], 'rt': t[1]}))
h(' - <a rel="nofollow" id="show_paper" target="_blank" href="%(url)s/record/%(record)s"> View record </a><br>' %
({'url': CFG_SITE_URL, 'record': str(bibrec)}))
if rtid:
h('<a rel="nofollow" id="closert" href="%(url)s/author/claim/action?close_rt_ticket=True&rtid=%(rtid)s&pid=%(pid)s">Close this ticket in RT</a>'
% ({'url': CFG_SITE_URL, 'rtid': rtid,
'pid': str(person_id)}))
h('</dd>')
h('</dd><br>')
# h(str(open_rt_tickets))
h(" </div>")
if 'data' in show_tabs:
h(' <div id="tabData">')
r = verbiage_dict['data_ns']
h('<noscript><h5>%s</h5></noscript>' % r)
full_canonical_name = str(get_canonical_id_from_person_id(person_id))
if '.' in str(full_canonical_name) and not isinstance(full_canonical_name, int):
canonical_name = full_canonical_name[0:full_canonical_name.rindex('.')]
else:
canonical_name = str(person_id)
h('<div> <strong> Person id </strong> <br> %s <br>' % person_id)
h('<strong> <br> Canonical name setup </strong>')
h('<div style="margin-top: 15px;"> Current canonical name: %s' % full_canonical_name)
h('<form method="GET" action="%s/author/claim/action" rel="nofollow">' % CFG_SITE_URL)
h('<input type="hidden" name="set_canonical_name" value="True" />')
h('<input name="canonical_name" id="canonical_name" type="text" style="border:1px solid #333; width:500px;" value="%s" /> ' %
canonical_name)
h('<input type="hidden" name="pid" value="%s" />' % person_id)
h('<input type="submit" value="set canonical name" class="aid_btn_blue" />')
h(
'<br>NOTE: If the canonical ID is without any number (e.g. J.Ellis), it will take the first available number. ')
h('If the canonical ID is complete (e.g. J.Ellis.1) that ID will be assigned to the current person ')
h('and if another person had that ID, he will lose it and get a new one. </form>')
h('</div>')
userid = get_uid_of_author(person_id)
h('<div> <br>')
h('<strong> Internal IDs </strong> <br>')
if userid:
email = get_email(int(userid))
h('UserID: INSPIRE user %s is associated with this profile with email: %s' % (str(userid), str(email)))
else:
h('UserID: There is no INSPIRE user associated to this profile!')
h('<br></div>')
h('</div> </div>')
h('</div>')
return "\n".join(html)
def tmpl_invenio_search_box(self):
'''
Generate little search box for missing papers. Links to main invenio
search on start papge.
'''
html = []
h = html.append
h('<div style="margin-top: 15px;"> <strong>Search for missing papers:</strong> <form method="GET" action="%s/search">' %
CFG_SITE_URL)
h('<input name="p" id="p" type="text" style="border:1px solid #333; width:500px;" /> ')
h('<input type="submit" name="action_search" value="search" '
'class="aid_btn_blue" />')
h('</form> </div>')
return "\n".join(html)
def tmpl_choose_profile_search_new_person_generator(self, free_id):
def stub():
text = self._("Create new profile")
link = "%s/author/claim/action?associate_profile=True&pid=%s" % (CFG_SITE_URL, free_id)
return text, link
return stub
def tmpl_assigning_search_new_person_generator(self, bibrefs):
def stub():
text = self._("Create a new Person")
link = "%s/author/claim/action?confirm=True&pid=%s" % (CFG_SITE_URL, str(CREATE_NEW_PERSON))
for r in bibrefs:
link = link + '&selection=%s' % str(r)
return text, link
return stub
def tmpl_choose_profile_search_button_generator(self):
def stub(pid, search_param):
text = self._("This is my profile")
parameters = [('associate_profile', True), ('pid', str(pid)), ('search_param', search_param)]
link = "%s/author/claim/action" % (CFG_SITE_URL)
css_class = ""
to_disable = True
return text, link, parameters, css_class, to_disable
return stub
def tmpl_assigning_search_button_generator(self, bibrefs):
def stub(pid, search_param):
text = self._("Assign paper")
parameters = [('confirm', True), ('pid', str(pid)), ('search_param', search_param)]
for r in bibrefs:
parameters.append(('selection', str(r)))
link = "%s/author/claim/action" % (CFG_SITE_URL)
css_class = ""
to_disable = False
return text, link, parameters, css_class, to_disable
return stub
def merge_profiles_button_generator(self):
def stub(pid, search_param):
text = self._("Add to merge list")
parameters = []
link = ""
css_class = "addToMergeButton"
to_disable = False
return text, link, parameters, css_class, to_disable
return stub
def tmpl_choose_profile_search_bar(self):
def stub(search_param):
activated = True
parameters = [('search_param', search_param)]
link = "%s/author/choose_profile" % (CFG_SITE_URL, )
return activated, parameters, link
return stub
def tmpl_general_search_bar(self):
def stub(search_param,):
activated = True
parameters = [('q', search_param)]
link = "%s/author/search" % (CFG_SITE_URL, )
return activated, parameters, link
return stub
def tmpl_merge_profiles_search_bar(self, primary_profile):
def stub(search_param):
activated = True
parameters = [('search_param', search_param), ('primary_profile', primary_profile)]
link = "%s/author/merge_profiles" % (CFG_SITE_URL, )
return activated, parameters, link
return stub
def tmpl_author_search(self, query, results, shown_element_functions):
'''
Generates the search for Person entities.
@param query: the query a user issued to the search
@type query: string
@param results: list of results
@type results: list
@param search_ticket: search ticket object to inform about pending
claiming procedure
@type search_ticket: dict
'''
if not query:
query = ""
html = []
h = html.append
search_bar_activated = False
if 'show_search_bar' in shown_element_functions.keys():
search_bar_activated, parameters, link = shown_element_functions['show_search_bar'](query)
if search_bar_activated:
h(
'<div class="fg-toolbar ui-toolbar ui-widget-header ui-corner-tl ui-corner-tr ui-helper-clearfix" id="aid_search_bar">')
h('<form id="searchform" action="%s" method="GET">' % (link,))
h('Find author clusters by name. e.g: <i>Ellis, J</i>: <br>')
for param in parameters[1:]:
h('<input type="hidden" name=%s value=%s>' % (param[0], param[1]))
h('<input placeholder="Search for a name, e.g: Ellis, J" type="text" name=%s style="border:1px solid #333; width:500px;" '
'maxlength="250" value="%s" class="focus" />' % (parameters[0][0], parameters[0][1]))
h('<input type="submit" value="Search" />')
h('</form>')
if 'new_person_gen' in shown_element_functions.keys():
new_person_text, new_person_link = shown_element_functions['new_person_gen']()
h('<a rel="nofollow" href="%s" ><button type="button" id="new_person_link">%s' %
(new_person_link, new_person_text))
h('</button></a>')
h('</div>')
if not results and not query:
h('</div>')
return "\n".join(html)
if query and not results:
authemail = CFG_BIBAUTHORID_AUTHOR_TICKET_ADMIN_EMAIL
h(('<strong>' + self._("We do not have a publication list for '%s'." +
" Try using a less specific author name, or check" +
" back in a few days as attributions are updated " +
"frequently. Or you can send us feedback, at ") +
"<a rel='nofollow' href=\"mailto:%s\">%s</a>.</strong>") % (query, authemail, authemail))
h('</div>')
return "\n".join(html)
show_action_button = False
if 'button_gen' in shown_element_functions.keys():
show_action_button = True
show_status = False
if 'show_status' in shown_element_functions.keys():
show_status = True
pass_status = False
if 'pass_status' in shown_element_functions.keys():
pass_status = True
# base_color = 100
# row_color = 0
# html table
h('<table id="personsTable">')
h('<!-- Table header -->\
<thead>\
<tr>\
<th scope="col" id="Number" style="width:75px;">Number</th>\
<th scope="col" id="Identifier">Identifier</th>\
<th scope="col" id="Names">Names</th>\
<th scope="col" id="IDs">IDs</th>\
<th scope="col" id="Papers" style="width:350px">Papers</th>\
<th scope="col" id="Link">Link</th>')
if show_status:
h(' <th scope="col" id="Status" >Status</th>')
if show_action_button:
h(' <th scope="col" id="Action">Action</th>')
h(' </tr>\
</thead>\
<!-- Table body -->\
<tbody>')
for index, result in enumerate(results):
# if len(results) > base_color:
# row_color += 1
# else:
# row_color = base_color - (base_color - index *
# base_color / len(results)))
pid = result['pid']
canonical_id = result['canonical_id']
# person row
h('<tr id="pid' + str(pid) + '">')
h('<td>%s</td>' % (index + 1))
# for nindex, name in enumerate(names):
# color = row_color + nindex * 35
# color = min(color, base_color)
# h('<span style="color:rgb(%d,%d,%d);">%s; </span>'
# % (color, color, color, name[0]))
# Identifier
if canonical_id:
h('<td>%s</td>' % (canonical_id,))
else:
canonical_id = ''
h('<td>%s</td>' % ('No canonical id',))
# Names
h('<td class="emptyName' + str(pid) + '">')
# html.extend(self.tmpl_gen_names(names))
h('</td>')
# IDs
h('<td class="emptyIDs' + str(pid) + '" >') # style="text-align:left;padding-left:35px;"
# html.extend(self.tmpl_gen_ext_ids(external_ids))
h('</td>')
# Recent papers
h('<td>')
h(('<a rel="nofollow" href="#" id="aid_moreinfolink" class="mpid%s">'
'<img src="../img/aid_plus_16.png" '
'alt = "toggle additional information." '
'width="11" height="11"/> '
+ self._('Recent Papers') +
'</a>')
% (pid))
h('<div class="more-mpid%s" id="aid_moreinfo">' % (pid))
h('</div>')
h('</td>')
# Link
h('<td>')
h(('<span>'
'<em><a rel="nofollow" href="%s/author/profile/%s" id="aid_moreinfolink" target="_blank">'
+ self._('Go to Profile ') + '(%s)</a></em></span>')
% (CFG_SITE_URL, get_person_redirect_link(pid),
get_person_redirect_link(pid)))
h('</td>')
hidden_status = ""
if pass_status:
if result["status"]:
status = "Available"
else:
status = "Not available"
hidden_status = '<input type="hidden" name="profile_availability" value="%s"/>' % status
if show_status:
h('<td>%s</td>' % (status))
if show_action_button:
action_button_text, action_button_link, action_button_parameters, action_button_class, action_button_to_disable = shown_element_functions[
'button_gen'](pid, query) # class
# Action link
h('<td class="uncheckedProfile' + str(pid) + '" style="text-align:center; vertical-align:middle;">')
parameters_sublink = ''
if action_button_link:
parameters_sublink = '<input type="hidden" name="%s" value="%s" />' % (
action_button_parameters[0][0], str(action_button_parameters[0][1]))
for (param_type, param_value) in action_button_parameters[1:]:
parameters_sublink += '<input type="hidden" name="%s" value="%s" />' % (
param_type, str(param_value))
disabled = ""
if show_status:
if not result["status"] and action_button_to_disable:
disabled = "disabled"
h('<form action="%s" method="get">%s%s<input type="submit" name="%s" class="%s aid_btn_blue" value="%s" %s/></form>' %
(action_button_link, parameters_sublink, hidden_status, canonical_id, action_button_class, action_button_text, disabled)) # confirmlink check if canonical id
h('</td>')
h('</tr>')
h('</tbody>')
h('</table>')
return "\n".join(html)
def tmpl_gen_papers(self, papers):
"""
Generates the recent papers html code.
Returns a list of strings
"""
html = []
h = html.append
if papers:
h((self._('Showing the') + ' %d ' + self._('most recent documents:')) % len(papers))
h("<ul>")
for paper in papers:
h("<li>%s</li>"
% (format_record(int(paper[0]), "ha")))
h("</ul>")
elif not papers:
h("<p>" + self._('Sorry, there are no documents known for this person') + "</p>")
return html
def tmpl_gen_names(self, names):
"""
Generates the names html code.
Returns a list of strings
"""
html = []
h = html.append
delimiter = ";"
if names:
for i, name in enumerate(names):
if i == 0:
h('<span>%s</span>'
% (name[0],))
else:
h('<span">%s  %s</span>'
% (delimiter, name[0]))
else:
h('%s' % ('No names found',))
return html
def tmpl_gen_ext_ids(self, external_ids):
"""
Generates the external ids html code.
Returns a list of strings
"""
html = []
h = html.append
if external_ids:
h('<table id="externalIDsTable">')
for key, value in external_ids.iteritems():
h('<tr>')
h('<td style="margin-top:5px; width:1px; padding-right:2px;">%s:</td>' % key)
h('<td style="padding-left:5px;width:1px;">')
for i, item in enumerate(value):
if i == 0:
h('%s' % item)
else:
h('; %s' % item)
h('</td>')
h('</tr>')
h('</table>')
else:
h('%s' % ('No external ids found',))
return html
def tmpl_choose_profile_footer(self):
return ('<br>In case you don\'t find the correct match or your profile is already taken, please contact us here: <a rel="nofollow" href="mailto:%s">%s</a></p>'
% (CFG_BIBAUTHORID_AUTHOR_TICKET_ADMIN_EMAIL,
CFG_BIBAUTHORID_AUTHOR_TICKET_ADMIN_EMAIL))
def tmpl_probable_profile_suggestion(
self,
probable_profile_suggestion_info,
last_viewed_profile_suggestion_info,
search_param):
'''
Suggest the most likely profile that the user can be based on his papers in external systems that is logged in through.
'''
html = []
h = html.append
last_viewed_profile_message = self._("The following profile is the one you were viewing before logging in: ")
# if the user has searched then his choice should be remembered in case the chosen profile is not available
param = ''
if search_param:
param = '&search_param=' + search_param
h('<ul>')
if probable_profile_suggestion_info:
probable_profile_message = self._("Out of %s paper(s) claimed to your arXiv account, %s match this profile: " %
(probable_profile_suggestion_info['num_of_arXiv_papers'],
probable_profile_suggestion_info['num_of_recids_intersection']))
h('<li>')
h('%s %s ' % (probable_profile_message, probable_profile_suggestion_info['name_string']))
h('<a href="%s/author/profile/%s" target="_blank"> %s </a>' % (CFG_SITE_URL, probable_profile_suggestion_info['canonical_id'],
probable_profile_suggestion_info['canonical_name_string']))
h('<a rel="nofollow" href="%s/author/claim/action?associate_profile=True&pid=%s%s" class="confirmlink"><button type="button">%s</a>' % (CFG_SITE_URL,
str(probable_profile_suggestion_info['pid']), param, 'This is my profile'))
h('</li>')
if last_viewed_profile_suggestion_info:
h('<li>')
h('%s %s ' % (last_viewed_profile_message, last_viewed_profile_suggestion_info['name_string']))
h('<a href="%s/author/profile/%s" target="_blank"> %s </a>' % (CFG_SITE_URL, last_viewed_profile_suggestion_info['canonical_id'],
last_viewed_profile_suggestion_info['canonical_name_string']))
h('<a rel="nofollow" href="%s/author/claim/action?associate_profile=True&pid=%s%s" class="confirmlink"><button type="button">%s</a>' % (CFG_SITE_URL,
str(last_viewed_profile_suggestion_info['pid']), param, 'This is my profile'))
h('</li>')
h("</ul>")
message = self._(
"If none of the options suggested above apply, you can look for other possible options from the list below:")
h('<p>%s</p>' % (message,))
h('</br>')
return "\n".join(html)
def tmpl_choose_profile(self, failed):
'''
SSO landing/choose_profile page.
'''
html = []
h = html.append
if failed:
h(
'<p><strong><font color="red">Unfortunately the profile you chose is no longer available.</font></strong></p>')
h(
'<p>We apologise for the inconvenience. Please select another one.</br>Keep in mind that you can create an empty profile and then claim all of your papers in it.')
else:
h(
'<p><b>You have now successfully logged in via arXiv.org, please choose your profile among these suggestions: </b></p>')
return "\n".join(html)
def tmpl_tickets_admin(self, tickets=[]):
'''
Open tickets short overview for operators.
'''
html = []
h = html.append
if len(tickets) > 0:
h('List of open tickets: <br><br>')
for t in tickets:
h('<a rel="nofollow" href=%(cname)s#tabTickets> %(longname)s - (%(cname)s - PersonID: %(pid)s): %(num)s open tickets. </a><br>'
% ({'cname': str(t[1]), 'longname': str(t[0]), 'pid': str(t[2]), 'num': str(t[3])}))
else:
h('There are currently no open tickets.')
return "\n".join(html)
def tmpl_update_hep_name_headers(self):
"""
Headers used for the hepnames update form
"""
html = []
html.append(r"""<style type="text/css">
.form1
{
margin-left: auto;
margin-right: auto;
}
#tblGrid {
margin-left: 5%;
}
#tblGrid td {
padding-left: 60px;
}
.form2
{
margin-left: 15%;
margin-right: 30%;
}
.span_float_right
{
float:right;
}
.span_float_left
{
float:left;
}
</style>
<script type="text/javascript" src="/js/hepname_update.js"></script>
""")
return "\n".join(html)
def tmpl_update_hep_name(self, full_name, display_name, email,
status, research_field_list,
institution_list, phd_advisor_list,
experiment_list, web_page):
"""
Create form to update a hep name
"""
# Prepare parameters
try:
phd_advisor = phd_advisor_list[0]
except IndexError:
phd_advisor = ''
try:
phd_advisor2 = phd_advisor_list[1]
except IndexError:
phd_advisor2 = ''
is_active = is_retired = is_departed = is_deceased = ''
if status == 'ACTIVE':
is_active = 'selected'
elif status == 'RETIRED':
is_retired = 'selected'
if status == 'DEPARTED':
is_departed = 'selected'
if status == 'DECEASED':
is_deceased = 'selected'
research_field_html = """
<TD><INPUT TYPE=CHECKBOX VALUE=ACC-PHYS name=field>acc-phys</TD>
<TD><INPUT TYPE=CHECKBOX VALUE=ASTRO-PH name=field>astro-ph</TD>
<TD><INPUT TYPE=CHECKBOX VALUE=ATOM-PH name=field>atom-ph</TD>
<TD><INPUT TYPE=CHECKBOX VALUE=CHAO-DYN name=field>chao-dyn</TD></TR>
<tr><TD><INPUT TYPE=CHECKBOX VALUE=CLIMATE name=field>climate</TD>
<TD><INPUT TYPE=CHECKBOX VALUE=COMP name=field>comp</TD>
<TD><INPUT TYPE=CHECKBOX VALUE=COND-MAT name=field>cond-mat</TD>
<TD><INPUT TYPE=CHECKBOX VALUE=GENL-TH name=field>genl-th</TD></TR>
<tr><TD><INPUT TYPE=CHECKBOX VALUE=GR-QC name=field>gr-qc</TD>
<TD><INPUT TYPE=CHECKBOX VALUE=HEP-EX name=field>hep-ex</TD>
<TD><INPUT TYPE=CHECKBOX VALUE=HEP-LAT name=field>hep-lat</TD>
<TD><INPUT TYPE=CHECKBOX VALUE=HEP-PH name=field>hep-ph</TD></TR>
<TR>
<TD><INPUT TYPE=CHECKBOX VALUE=HEP-TH name=field>hep-th</TD>
<TD><INPUT TYPE=CHECKBOX VALUE=INSTR name=field>instr</TD>
<TD><INPUT TYPE=CHECKBOX VALUE=LIBRARIAN name=field>librarian</TD>
<TD><INPUT TYPE=CHECKBOX VALUE=MATH name=field>math</TD></TR>
<TR>
<TD><INPUT TYPE=CHECKBOX VALUE=MATH-PH name=field>math-ph</TD>
<TD><INPUT TYPE=CHECKBOX VALUE=MED-PHYS name=field>med-phys</TD>
<TD><INPUT TYPE=CHECKBOX VALUE=NLIN name=field>nlin</TD>
<TD><INPUT TYPE=CHECKBOX VALUE=NUCL-EX name=field>nucl-ex</TD></TR>
<TR>
<TD><INPUT TYPE=CHECKBOX VALUE=NUCL-TH name=field>nucl-th</TD>
<TD><INPUT TYPE=CHECKBOX VALUE=PHYSICS name=field>physics</TD>
<TD><INPUT TYPE=CHECKBOX VALUE=PLASMA-PHYS name=field>plasma-phys</TD>
<TD><INPUT TYPE=CHECKBOX VALUE=Q-BIO name=field>q-bio</TD></TR>
<TR>
<TD><INPUT TYPE=CHECKBOX VALUE=QUANT-PH name=field>quant-ph</TD>
<TD><INPUT TYPE=CHECKBOX VALUE=SSRL name=field>ssrl</TD>
<TD><INPUT TYPE=CHECKBOX VALUE=OTHER name=field>other</TD>
"""
for research_field in research_field_list:
research_field_html = research_field_html.replace(
'VALUE=' + research_field,
'checked ' + 'VALUE=' + research_field)
institutions_html = ""
institution_num = 1
for institution_entry in institution_list:
institution = """
<tr>
<td> </td>
<td class="cell_padding"><input name="aff.str" type="hidden">
<input type="text" name="inst%(institution_num)s" size="35" value =%(institution_name)s /></td>
<td class="cell_padding"><select name="rank%(institution_num)s">
<option selected value=''> </option>
<option value='SENIOR'>Senior(permanent)</option>
<option value='JUNIOR'>Junior(leads to Senior)</option>
<option value='STAFF'>Staff(non-research)</option>
<option value='VISITOR'>Visitor</option>
<option value='PD'>PostDoc</option>
<option value='PHD'>PhD</option>
<option value='MAS'>Masters</option>
<option value='UG'>Undergrad</option></select></td>
<TD class="cell_padding"><INPUT TYPE="TEXT" value=%(start_year)s name="sy%(institution_num)s" SIZE="4"/>
<INPUT TYPE="TEXT" value=%(end_year)s name="ey%(institution_num)s" SIZE="4"/></TD>
<TD class="cell_padding"> <INPUT TYPE=CHECKBOX VALUE='Y' name="current%(institution_num)s">
<input type="button" value="Delete row" class="formbutton" onclick="removeRow(this);" />
</td>
</tr>
""" % { 'institution_name': xml.sax.saxutils.quoteattr(institution_entry[0]),
'start_year': xml.sax.saxutils.quoteattr(institution_entry[2]),
'end_year': xml.sax.saxutils.quoteattr(institution_entry[3]),
'institution_num': institution_num
}
institution_num += 1
institution = institution.replace(
'value=' + '\'' + institution_entry[1] + '\'',
'selected ' + 'VALUE=' + institution_entry[1])
if institution_entry[4] == 'Current':
institution = institution.replace("VALUE='Y'", 'checked ' + "VALUE='Y'")
institutions_html += institution
institutions_html += "<script>occcnt = %s; </script>" % (institution_num - 1)
experiments_html = """
<select name=exp id=exp multiple=yes>
<option value=""> </option>
<option value=AMANDA>AMANDA</option>
<option value=AMS>AMS</option>
<option value=ANTARES>ANTARES</option>
<option value=AUGER>AUGER</option>
<option value=BAIKAL>BAIKAL</option>
<option value=BNL-E-0877>BNL-E-0877</option>
<option value=BNL-LEGS>BNL-LEGS</option>
<option value=BNL-RHIC-BRAHMS>BNL-RHIC-BRAHMS</option>
<option value=BNL-RHIC-PHENIX>BNL-RHIC-PHENIX</option>
<option value=BNL-RHIC-PHOBOS>BNL-RHIC-PHOBOS</option>
<option value=BNL-RHIC-STAR>BNL-RHIC-STAR</option>
<option value=CDMS>CDMS</option>
<option value=CERN-LEP-ALEPH>CERN-LEP-ALEPH</option>
<option value=CERN-LEP-DELPHI>CERN-LEP-DELPHI</option>
<option value=CERN-LEP-L3>CERN-LEP-L3</option>
<option value=CERN-LEP-OPAL>CERN-LEP-OPAL</option>
<option value=CERN-LHC-ALICE>CERN-LHC-ALICE</option>
<option value=CERN-LHC-ATLAS>CERN-LHC-ATLAS</option>
<option value=CERN-LHC-B>CERN-LHC-B</option>
<option value=CERN-LHC-CMS>CERN-LHC-CMS</option>
<option value=CERN-LHC-LHCB>CERN-LHC-LHCB</option>
<option value=CERN-NA-060>CERN-NA-060</option>
<option value=CERN-NA-061>CERN-NA-061</option>
<option value=CERN-NA-062>CERN-NA-062</option>
<option value=CERN-PS-214>CERN-PS-214 (HARP)</option>
<option value=CESR-CLEO>CESR-CLEO</option>
<option value=CESR-CLEO-C>CESR-CLEO-C</option>
<option value=CESR-CLEO-II>CESR-CLEO-II</option>
<option value=CHIMERA>CHIMERA</option>
<option value=COBRA>COBRA</option>
<option value=COSY-ANKE>COSY-ANKE</option>
<option value=CUORE>CUORE</option>
<option value=COUPP>COUPP</option>
<option value=DAYA-BAY>DAYA-BAY</option>
<option value=DESY-DORIS-ARGUS>DESY-DORIS-ARGUS</option>
<option value=DESY-HERA-B>DESY-HERA-B</option>
<option value=DESY-HERA-H1>DESY-HERA-H1</option>
<option value=DESY-HERA-HERMES>DESY-HERA-HERMES</option>
<option value=DESY-HERA-ZEUS>DESY-HERA-ZEUS</option>
<option value=DESY-PETRA-MARK-J>DESY-PETRA-MARK-J</option>
<option value=DESY-PETRA-PLUTO-2>DESY-PETRA-PLUTO-2</option>
<option value=DESY-PETRA-TASSO>DESY-PETRA-TASSO</option>
<option value=DOUBLE-CHOOZ>DOUBLE-CHOOZ</option>
<option value=DRIFT>DRIFT</option>
<option value=EXO>EXO</option>
<option value=FERMI-LAT>FERMI-LAT</option>
<option value=FNAL-E-0687>FNAL-E-0687</option>
<option value=FNAL-E-0690>FNAL-E-0690</option>
<option value=FNAL-E-0706>FNAL-E-0706</option>
<option value=FNAL-E-0740>FNAL-E-0740 (D0 Run I)</option>
<option value=FNAL-E-0741>FNAL-E-0741 (CDF Run I)</option>
<option value=FNAL-E-0799>FNAL-E-0799 (KTeV)</option>
<option value=FNAL-E-0815>FNAL-E-0815 (NuTeV)</option>
<option value=FNAL-E-0823>FNAL-E-0823 (D0 Run II)</option>
<option value=FNAL-E-0830>FNAL-E-0830 (CDF Run II)</option>
<option value=FNAL-E-0831>FNAL-E-0831 (FOCUS)</option>
<option value=FNAL-E-0832>FNAL-E-0832 (KTeV)</option>
<option value=FNAL-E-0872>FNAL-E-0872 (DONUT)</option>
<option value=FNAL-E-0875>FNAL-E-0875 (MINOS)</option>
<option value=FNAL-E-0886>FNAL-E-0886 (FNPL)</option>
<option value=FNAL-E-0892>FNAL-E-0892 (USCMS)</option>
<option value=FNAL-E-0898>FNAL-E-0898 (MiniBooNE)</option>
<option value=FNAL-E-0904>FNAL-E-0904 (MUCOOL)</option>
<option value=FNAL-E-0906>FNAL-E-0906 (NuSea)</option>
<option value=FNAL-E-0907>FNAL-E-0907 (MIPP)</option>
<option value=FNAL-E-0907>FNAL-E-0918 (BTeV)</option>
<option value=FNAL-E-0907>FNAL-E-0973 (Mu2e)</option>
<option value=FNAL-E-0937>FNAL-E-0937 (FINeSSE)</option>
<option value=FNAL-E-0938>FNAL-E-0938 (MINERvA)</option>
<option value=FNAL-E-0954>FNAL-E-0954 (SciBooNE)</option>
<option value=FNAL-E-0961>FNAL-E-0961 (COUPP)</option>
<option value=FNAL-E-0974>FNAL-E-0974</option>
<option value=FNAL-LC>FNAL-LC</option>
<option value=FNAL-P-0929>FNAL-P-0929 (NOvA)</option>
<option value=FNAL-T-0962>FNAL-T-0962 (ArgoNeuT)</option>
<option value=FRASCATI-DAFNE-KLOE>FRASCATI-DAFNE-KLOE</option>
<option value=FREJUS-NEMO-3>FREJUS-NEMO-3</option>
<option value=GERDA>GERDA</option>
<option value=GSI-HADES>GSI-HADES</option>
<option value=GSI-SIS-ALADIN>GSI-SIS-ALADIN</option>
<option value=HARP>HARP</option>
<option value=HESS>HESS</option>
<option value=ICECUBE>ICECUBE</option>
<option value=ILC>ILC</option>
<option value=JLAB-E-01-104>JLAB-E-01-104</option>
<option value=KAMLAND>KAMLAND</option>
<option value=KASCADE-GRANDE>KASCADE-GRANDE</option>
<option value=KATRIN>KATRIN</option>
<option value=KEK-BF-BELLE>KEK-BF-BELLE</option>
<option value=KEK-BF-BELLE-II>KEK-BF-BELLE-II</option>
<option value=KEK-T2K>KEK-T2K</option>
<option value=LBNE>LBNE</option>
<option value=LIGO>LIGO</option>
<option value=LISA>LISA</option>
<option value=LSST>LSST</option>
<option value=MAGIC>MAGIC</option>
<option value=MAJORANA>MAJORANA</option>
<option value=MICE>MICE</option>
<option value=PICASSO>PICASSO</option>
<option value=PLANCK>PLANCK</option>
<option value=SDSS>SDSS</option>
<option value=SIMPLE>SIMPLE</option>
<option value=SLAC-PEP2-BABAR>SLAC-PEP2-BABAR</option>
<option value=SNAP>SNAP</option>
<option value=SSCL-GEM>SSCL-GEM</option>
<option value=SUDBURY-SNO>SUDBURY-SNO</option>
<option value=SUDBURY-SNO+>SUDBURY-SNO+</option>
<option value=SUPER-KAMIOKANDE>SUPER-KAMIOKANDE</option>
<option value=VERITAS>VERITAS</option>
<option value=VIRGO>VIRGO</option>
<option value=WASA-COSY>WASA-COSY</option>
<option value=WMAP>WMAP</option>
<option value=XENON>XENON</option>
</select>
"""
for experiment in experiment_list:
experiments_html = experiments_html.replace('value=' + experiment, 'selected ' + 'value=' + experiment)
html = []
html.append("""<H4>Changes to Existing Records</H4>
<P>Send us your details (or someone else's). See our <a href="http://www.slac.stanford.edu/spires/hepnames/help/adding.shtml">help
for additions</A>.<BR>If something doesnt fit in the form, just put it in
the comments section.</P>
<FORM name="hepnames_addition"
onSubmit="return OnSubmitCheck();"
action=http://www.slac.stanford.edu/cgi-bin/form-mail.pl
method=post><INPUT type=hidden value=nowhere name=to id=tofield>
<INPUT type=hidden value="New HEPNames Posting" name=subject> <INPUT
type=hidden value=2bsupplied name=form_contact id=formcont> <INPUT
type=hidden value=/spires/hepnames/hepnames_msgupd.file name=email_msg_file>
<INPUT type=hidden value=/spires/hepnames/hepnames_resp_msg.file
name=response_msg_file><INPUT type=hidden value=0 name=debug>
<INPUT type=hidden value="1095498" name=key>
<INPUT type=hidden value="" name=field>
<INPUT type=hidden value="" name=current1>
<INPUT type=hidden value="" name=inst2><INPUT type=hidden value="" name=rank2>
<INPUT type=hidden value="" name=ey2><INPUT type=hidden value="" name=sy2>
<INPUT type=hidden value="" name=current2>
<INPUT type=hidden value="" name=inst3><INPUT type=hidden value="" name=rank3>
<INPUT type=hidden value="" name=ey3><INPUT type=hidden value="" name=sy3>
<INPUT type=hidden value="" name=current3>
<INPUT type=hidden value="" name=inst4><INPUT type=hidden value="" name=rank4>
<INPUT type=hidden value="" name=ey4><INPUT type=hidden value="" name=sy4>
<INPUT type=hidden value="" name=current4>
<INPUT type=hidden value="" name=inst5><INPUT type=hidden value="" name=rank5>
<INPUT type=hidden value="" name=ey5><INPUT type=hidden value="" name=sy5>
<INPUT type=hidden value="" name=current5>
<INPUT type=hidden value="" name=inst7><INPUT type=hidden value="" name=rank7>
<INPUT type=hidden value="" name=ey7><INPUT type=hidden value="" name=sy7>
<INPUT type=hidden value="" name=current7>
<INPUT type=hidden value="" name=inst6><INPUT type=hidden value="" name=rank6>
<INPUT type=hidden value="" name=ey6><INPUT type=hidden value="" name=sy6>
<INPUT type=hidden value="" name=current6>
<INPUT type=hidden value="" name=inst8><INPUT type=hidden value="" name=rank8>
<INPUT type=hidden value="" name=ey8><INPUT type=hidden value="" name=sy8>
<INPUT type=hidden value="" name=current8>
<INPUT type=hidden value="" name=inst9><INPUT type=hidden value="" name=rank9>
<INPUT type=hidden value="" name=ey9><INPUT type=hidden value="" name=sy9>
<INPUT type=hidden value="" name=current9>
<INPUT type=hidden value="" name=inst10><INPUT type=hidden value="" name=rank10>
<INPUT type=hidden value="" name=ey10><INPUT type=hidden value="" name=sy10>
<INPUT type=hidden value="" name=current10>
<INPUT type=hidden value="" name=inst11><INPUT type=hidden value="" name=rank11>
<INPUT type=hidden value="" name=ey11><INPUT type=hidden value="" name=sy11>
<INPUT type=hidden value="" name=current11>
<INPUT type=hidden value="" name=inst12><INPUT type=hidden value="" name=rank12>
<INPUT type=hidden value="" name=ey12><INPUT type=hidden value="" name=sy12>
<INPUT type=hidden value="" name=current12>
<INPUT type=hidden value="" name=inst13><INPUT type=hidden value="" name=rank13>
<INPUT type=hidden value="" name=ey13><INPUT type=hidden value="" name=sy13>
<INPUT type=hidden value="" name=current13>
<INPUT type=hidden value="" name=inst14><INPUT type=hidden value="" name=rank14>
<INPUT type=hidden value="" name=ey14><INPUT type=hidden value="" name=sy14>
<INPUT type=hidden value="" name=current14>
<INPUT type=hidden value="" name=inst15><INPUT type=hidden value="" name=rank15>
<INPUT type=hidden value="" name=ey15><INPUT type=hidden value="" name=sy15>
<INPUT type=hidden value="" name=current15>
<INPUT type=hidden value="" name=inst17><INPUT type=hidden value="" name=rank17>
<INPUT type=hidden value="" name=ey17><INPUT type=hidden value="" name=sy17>
<INPUT type=hidden value="" name=current17>
<INPUT type=hidden value="" name=inst16><INPUT type=hidden value="" name=rank16>
<INPUT type=hidden value="" name=ey16><INPUT type=hidden value="" name=sy16>
<INPUT type=hidden value="" name=current16>
<INPUT type=hidden value="" name=inst18><INPUT type=hidden value="" name=rank18>
<INPUT type=hidden value="" name=ey18><INPUT type=hidden value="" name=sy18>
<INPUT type=hidden value="" name=current18>
<INPUT type=hidden value="" name=inst19><INPUT type=hidden value="" name=rank19>
<INPUT type=hidden value="" name=ey19><INPUT type=hidden value="" name=sy19>
<INPUT type=hidden value="" name=current19>
<INPUT type=hidden value="" name=inst20><INPUT type=hidden value="" name=rank20>
<INPUT type=hidden value="" name=ey20><INPUT type=hidden value="" name=sy20>
<INPUT type=hidden value="" name=current20>
<INPUT type=hidden value="today" name=DV>
<TABLE class=form1>
<TBODY>
<TR>
<TD><STRONG>Full name</STRONG></TD>
<TD><INPUT SIZE=24 value=%(full_name)s name=authorname> <FONT SIZE=2>E.G.
Lampen, John Francis</FONT> </TD></TR>
<TR>
<TD><STRONG>Display Name</STRONG></TD>
<TD><INPUT SIZE=24 value=%(display_name)s name='dispname'> <FONT SIZE=2>E.G.
LampC)n, John </FONT><//TD></TR>
<TR>
<TD><STRONG> Your Email</STRONG></TD>
<TD><INPUT SIZE=24 value=%(email)s name='username' ID='username'><FONT SIZE=2>(<STRONG>REQ'D
</strong> but not displayed - contact only)</font> </TD></TR>
<TR>
<TD><STRONG>Email </STRONG>(Public)</TD>
<TD><INPUT SIZE=24 value=%(email_public)s name='email' id='email'>
<input type='button' value='Same as Above' class='formbutton' onclick='copyem();'/>
</TD></TR><tr><TD><STRONG>Status</STRONG></TD><TD>
<SELECT NAME=status>
<OPTION %(is_active)s value=ACTIVE>Active</OPTION>
<OPTION %(is_retired)s value=RETIRED>Retired</OPTION>
<OPTION %(is_departed)s value=DEPARTED>Departed</OPTION>
<OPTION %(is_deceased)s value=DECEASED>Deceased</OPTION>
</SELECT></TD></TR>
<tr><TD><STRONG>Field of research</STRONG></TD><td> <table><tbody><tr>
%(research_field_html)s
</TR></TBODY></TABLE></TD></TR>
<table id="tblGrid" >
<tr>
<td> </td>
<td class="cell_padding"><strong> Institution History</strong><br>
<FONT size=2>Please take this name from <A href="http://inspirehep.net/Institutions"
target=_TOP>Institutions</A><FONT color=red><SUP>*</SUP></FONT></TD>
<td class="cell_padding"><strong>Rank</td>
<td class="cell_padding"><strong>Start Year End Year</td>
<td class="cell_padding"><strong>Current</strong></td>
</tr>
%(institutions_html)s
</table>
<table><tr>
<a href="javascript:addRow();"> Click to add new Institution field row
<img src="/img/rightarrow.gif" ></a></tr></table>
<hr>
<table class="form2"><tbody><tr>
<TD><span class="span_float_right"><STRONG>Ph.D. Advisor</STRONG></span></TD>
<TD><span class="span_float_left"><INPUT SIZE=24 value=%(phd_advisor)s name=Advisor1> <FONT SIZE=2>E.G.
Beacom, John Francis</FONT> </span></TD></TR>
<tr><TD><span class="span_float_right"><STRONG>2nd Ph.D. Advisor</STRONG></span></TD>
<TD><span class="span_float_left"><INPUT SIZE=24 value=%(phd_advisor2)s name=Advisor2> <FONT SIZE=2>E.G.
Beacom, John Francis</FONT> </span></TD></TR>
<TD><span class="span_float_right"><STRONG>Experiments</STRONG></span>
<br /><span class="span_float_right"><FONT size=2>Hold the Control key to choose multiple current or past experiments <br> Experiments not listed can be added in the Comments field below </font></span></td>
<td><span class="span_float_left">
%(experiments_html)s
</span></td></tr>
<TR>
<TD><span class="span_float_right"><STRONG>Your web page</STRONG></span></TD>
<TD><span class="span_float_left"><INPUT SIZE=50 value=%(web)s name= URL></span></TD></TR>
<TR>
<TD><span class="span_float_right">Please send us your <STRONG>Comments</STRONG></span></td>
<TD><span class="span_float_left"><TEXTAREA NAME=Abstract ROWS=3 COLS=30></textarea><FONT SIZE=2>(not displayed)</FONT></span></TD></TR>
<tr><TD> <span class="span_float_right"><font size="1">SPAM Robots have been sending us submissions via this form, in order to prevent this we ask that you confirm that you are a real person by answering this question, which should be
easy for you, and hard for a SPAM robot. Cutting down on the extraneous submissions we get means that we can handle real requests faster.</font></span></td><td><span class="span_float_left">
<script type="text/javascript" src="https://www.slac.stanford.edu/spires/hepnames/spbeat.js">
</SCRIPT><br /><STRONG> How many people in image</STRONG> <SELECT NAME=beatspam ID=beatspam> <OPTION VALUE=""> </OPTION>
<option value="1"> one person</option>
<option value="2"> two people</option><option value="3"> three people</option>
<option value="4"> more than three</option></select></span></td></tr>
</TBODY></TABLE><INPUT type=submit class="formbutton" value="Send Request"><br /><FONT
color=red><SUP>*</SUP></FONT>Institution name should be in the form given
in the <A href="http://inspirehep.net/Institutions"
target=_TOP>INSTITUTIONS</A> database<BR>(e.g. Harvard U. * Paris U.,
VI-VII * Cambridge U., DAMTP * KEK, Tsukuba). </FORM>
""" % {'full_name': xml.sax.saxutils.quoteattr(full_name),
'display_name': xml.sax.saxutils.quoteattr(display_name),
'email': xml.sax.saxutils.quoteattr(email),
'email_public': xml.sax.saxutils.quoteattr(email),
'phd_advisor': xml.sax.saxutils.quoteattr(phd_advisor),
'phd_advisor2': xml.sax.saxutils.quoteattr(phd_advisor2),
'web': xml.sax.saxutils.quoteattr(web_page),
'is_active': is_active,
'is_retired': is_retired,
'is_departed': is_departed,
'is_deceased': is_deceased,
'research_field_html': research_field_html,
'institutions_html': institutions_html,
'experiments_html': experiments_html
})
return "\n".join(html)
# pylint: enable=C0301
def loading_html(self):
return '<img src=/img/ui-anim_basic_16x16.gif> Loading...'
def tmpl_print_searchresultbox(self, bid, header, body):
""" Print a nicely formatted box for search results. """
# first find total number of hits:
out = ('<table class="searchresultsbox" ><thead><tr><th class="searchresultsboxheader">'
+ header + '</th></tr></thead><tbody><tr><td id ="%s" class="searchresultsboxbody">' % bid
+ body + '</td></tr></tbody></table>')
return out
def tmpl_arxiv_box(self, arxiv_data, ln, add_box=True, loading=True):
_ = gettext_set_language(ln)
html_head = _("""<span title="Login through arXiv is needed to verify this is your profile. When you log in your publication list will automatically update with all your arXiv publications.
You may also continue as a guest. In this case your input will be processed by our staff and will take longer to display."><strong> Login with your arXiv.org account </strong></span>""")
if arxiv_data['login']:
if arxiv_data['view_own_profile']:
html_arxiv = _("You have succesfully logged in via arXiv.<br> You can now manage your profile.<br>")
elif arxiv_data['user_has_pid']:
html_arxiv = _(
"You have succesfully logged in via arXiv.<br><font color='red'>However the profile you are viewing is not your profile.<br><br></font>")
own_profile_link = "%s/author/manage_profile/%s" % (CFG_SITE_URL, arxiv_data['user_pid'])
own_profile_text = _("Manage your profile")
html_arxiv += '<a rel="nofollow" href="%s" class="btn btn-default">%s</a>' % (
own_profile_link, own_profile_text)
else:
html_arxiv = _(
"You have succesfully logged in, but<font color='red'> you are not associated to a person yet.<br>Please use the button below to choose your profile<br></font>")
login_link = '%s/author/choose_profile' % CFG_SITE_URL
login_text = _("Choose your profile")
html_arxiv += '<br><a rel="nofollow" href="%s" class="btn btn-default">%s</a>' % (
login_link, login_text)
else:
html_arxiv = _("Please log in through arXiv to manage your profile.<br>")
login_link = "https://arxiv.org/inspire_login"
login_text = _("Login into INSPIRE through arXiv.org")
html_arxiv += '<br><a rel="nofollow" href="%s" class="btn btn-default">%s</a>' % (login_link, login_text)
if loading:
html_arxiv = self.loading_html()
if add_box:
arxiv_box = self.tmpl_print_searchresultbox('arxiv', html_head, html_arxiv)
return arxiv_box
else:
return html_arxiv
def tmpl_orcid_box(self, orcid_data, ln, orcid_info, add_box=True, loading=True):
_ = gettext_set_language(ln)
html_head = _(""" <span title="ORCiD (Open Researcher and Contributor ID) is a unique researcher identifier that distinguishes you from other researchers.
It holds a record of all your research activities. You can add your ORCiD to all your works to make sure they are associated with you. ">
<strong> Connect this profile to an ORCiD </strong> <span>""")
html_orcid = ""
modal = ""
if orcid_data['orcids']:
html_orcid += _(
'This profile is already connected to the following ORCiD: <strong>%s</strong></br>' %
(",".join(['<a rel="nofollow" href="http://www.orcid.org/' + orcidid + '"">' + orcidid + '</a>' for orcidid in orcid_data['orcids']]),))
if orcid_data['arxiv_login'] and orcid_data['own_profile']:
html_orcid += '<br><div class="btn-group"><a rel="nofollow" href="%s" class="btn btn-default ' % (
"%s/author/manage_profile/push_orcid_pubs" % CFG_SITE_SECURE_URL )
if orcid_info == 'running':
html_orcid += 'disabled'
html_orcid +='">%s</a>' % (
_("Push my claimed publications to ORCiD"))
html_orcid += '<button class="btn btn-primary btn-default '
if orcid_info == 'running' :
html_orcid += 'disabled'
html_orcid += '" data-toggle="modal" data-target="#orcidPushHelp"><b>?</b></button></div>'
html_orcid += '<br><br><a rel="nofollow" href="%s" class="btn btn-default">%s</a>' % (
"%s/author/manage_profile/import_orcid_pubs" % CFG_SITE_SECURE_URL,
_("Import my publications from ORCiD"))
modal += '<div class="modal fade" id="orcidPushHelp"> \
<div class="modal-dialog"><div class="modal-content"> \
<div class="modal-header"> \
<h4 class="modal-title">%s</h4> \
</div> \
<div class="modal-body"> \
<p>%s</p></div> \
<div class="modal-footer"> \
<button type="button" class="btn btn-default" data-dismiss="modal">%s</button> \
<a rel="nofollow" href="%s" class="btn btn-primary">%s</a> \
</div></div></div></div>' % (
_("Pushing your claimed publication list to ORCiD"),
_("By pushing your publications list to ORCiD, \
we will send the details of all the papers and \
datasets you have claimed as yours in INSPIRE. \
Every time you perform this operation, only the \
new additions will be submitted to ORCiD."),
_("Go back"),
"%s/author/manage_profile/push_orcid_pubs" % CFG_SITE_SECURE_URL,
_("Push to ORCiD"))
else:
html_orcid += "This profile has not been connected to an ORCiD account yet. "
if orcid_data['arxiv_login'] and (orcid_data['own_profile'] or orcid_data['add_power']):
add_link = "%s/youraccount/oauth2?provider=%s" % (CFG_SITE_URL, 'orcid')
add_text = _("Connect an ORCiD to this profile")
html_orcid += '<br><br><a rel="nofollow" href="%s" class="btn btn-default">%s</a>' % (
add_link, add_text)
else:
suggest_text = _("Suggest an ORCiD for this profile:")
html_orcid += '<br><br> %s <br> <br>' % suggest_text
html_orcid += '<form class="form-inline"><div class="input-append"><input class="input-xlarge" id="suggested_orcid" type="text">'
html_orcid += ' <a id="orcid_suggestion" class="btn btn-default" href="#">'
html_orcid += '<span class="pid hidden">%s</span>%s</a></div></form>' % (
orcid_data['pid'], 'Submit Suggestion')
if loading:
html_orcid = self.loading_html()
if add_box:
orcid_box = self.tmpl_print_searchresultbox('orcid', html_head, html_orcid)
return orcid_box, modal
else:
return html_orcid, modal
def tmpl_claim_paper_box(self, claim_paper_data, ln, add_box=True, loading=True):
_ = gettext_set_language(ln)
html_head = _("""<span title="When you add more publications you make sure your publication list and citations appear correctly on your profile.
You can also assign publications to other authors. This will help %s provide more accurate publication and citation statistics. "><strong> Manage publications </strong><span>""" % BIBAUTHORID_CFG_SITE_NAME)
html_claim_paper = ("")
link = "%s/author/claim/%s?open_claim=True" % (CFG_SITE_URL, claim_paper_data['canonical_id'])
text = _("Manage publication list")
html_claim_paper += 'Assign publications to your %s profile to keep it up to date. </br></br> <a rel="nofollow" href="%s" class="btn btn-default">%s</a>' % (
BIBAUTHORID_CFG_SITE_NAME, link, text)
if loading:
html_claim_paper = self.loading_html()
if add_box:
claim_paper_box = self.tmpl_print_searchresultbox('claim_paper', html_head, html_claim_paper)
return claim_paper_box
else:
return html_claim_paper
def tmpl_orcid_message(self, orcid_info, ln):
_ = gettext_set_language(ln)
html = ''
if orcid_info == 'running':
html = ('<div class="alert alert-info" role="alert">%s</div>' % _('Request \
for pushing ORCID data is being processed. \
Your works will be available in ORCID database soon.'))
elif orcid_info == 'finished':
html = ('<div class="alert alert-success" role="alert">%s</div>' % _('Your \
request for pushing ORCID data was processed succesfully. \
Your works are available in ORCID database.'))
elif orcid_info == 'error':
html = ('<div class="alert alert-danger" role="alert">%s</div>' % _('An \
error occurred when INSPIRE was processing your ORCID data push \
request. Our developers were informed of the issue and \
will fix it.'))
elif orcid_info == 'wrong_account':
html = ('<div class="alert alert-danger" role="alert">%s</div>' % _('You \
authenticated correctly to ORCID, but you are using a different \
account than the one that is connected to your profile on INSPIRE. \
We will not allow you push your works to a different account. \
If you want to change your ORCID on your INSPIRE profile, \
please contact our staff.'))
return html
def tmpl_ext_ids_box(self, personid, int_ids_data, ext_ids_data, ln, add_box=True, loading=True):
_ = gettext_set_language(ln)
html_head = _("<strong> Person identifiers, internal and external </strong>")
html_ext_ids = 'This is personID: %s <br>' % personid
html_ext_ids += '<div> <strong> External ids: </strong><br>'
# if the user has permission to add/remove ids, in other words if the profile is his or he is admin
if ext_ids_data['person_id'] == ext_ids_data['user_pid'] or ext_ids_data['ulevel'] == "admin":
add_text = _('add external id')
add_parameter = 'add_external_id'
remove_text = _('delete selected ids')
remove_parameter = 'delete_external_ids'
add_missing_text = _('Harvest missing external ids from claimed papers')
add_missing_parameter = 'add_missing_external_ids'
else:
add_text = _('suggest external id to add')
add_parameter = 'suggest_external_id_to_add'
remove_text = _('suggest selected ids to delete')
remove_parameter = 'suggest_external_ids_to_delete'
add_missing_text = _('suggest missing ids')
add_missing_parameter = 'suggest_missing_external_ids'
html_ext_ids += '<form method="GET" action="%s/author/claim/action" rel="nofollow">' % (CFG_SITE_URL)
html_ext_ids += '<input type="hidden" name="%s" value="True">' % (add_missing_parameter,)
html_ext_ids += '<input type="hidden" name="pid" value="%s">' % ext_ids_data['person_id']
html_ext_ids += '<br> <input type="submit" class="btn btn-default" value="%s"> </form>' % (add_missing_text,)
if 'ext_ids' in ext_ids_data and ext_ids_data['ext_ids']:
html_ext_ids += '<form method="GET" action="%s/author/claim/action" rel="nofollow">' % (CFG_SITE_URL)
html_ext_ids += ' <input type="hidden" name="%s" value="True">' % (remove_parameter,)
html_ext_ids += ' <input type="hidden" name="pid" value="%s">' % ext_ids_data['person_id']
for key in ext_ids_data['ext_ids']:
try:
sys = [
system for system in PERSONID_EXTERNAL_IDENTIFIER_MAP if PERSONID_EXTERNAL_IDENTIFIER_MAP[system] == key][0]
except (IndexError):
sys = ''
for id_value in ext_ids_data['ext_ids'][key]:
html_ext_ids += '<br> <input type="checkbox" name="existing_ext_ids" value="%s||%s"> <strong> %s: </strong> %s' % (
key, id_value, sys, id_value)
html_ext_ids += ' <br> <br> <input type="submit" class="btn btn-default" value="%s"> <br> </form>' % (
remove_text,)
else:
html_ext_ids += 'UserID: There are no external users associated to this profile!'
html_ext_ids += '<br> <br>'
html_ext_ids += '<form method="GET" action="%s/author/claim/action" rel="nofollow">' % (CFG_SITE_URL)
html_ext_ids += ' <input type="hidden" name="%s" value="True">' % (add_parameter,)
html_ext_ids += ' <input type="hidden" name="pid" value="%s">' % ext_ids_data['person_id']
html_ext_ids += ' <select name="ext_system">'
html_ext_ids += ' <option value="" selected>-- ' + self._('Choose system') + ' --</option>'
for el in PERSONID_EXTERNAL_IDENTIFIER_MAP:
html_ext_ids += ' <option value="%s"> %s </option>' % (PERSONID_EXTERNAL_IDENTIFIER_MAP[el], el)
html_ext_ids += ' </select>'
html_ext_ids += ' <input type="text" name="ext_id" id="ext_id" style="border:1px solid #333; width:350px;">'
html_ext_ids += ' <input type="submit" class="btn btn-default" value="%s" >' % (add_text,)
# html_ext_ids += '<br>NOTE: please note that if you add an external id it
# will replace the previous one (if any).')
html_ext_ids += '<br> </form> </div>'
html_ext_ids += '<br> <div> <strong> Inspire user ID: </strong> <br>'
html_ext_ids += "Current user id: %s <br>" % repr(int_ids_data['uid'])
html_ext_ids += "Previous user ids: %s <br> " % repr(int_ids_data['old_uids'])
html_ext_ids += '<br>'
html_ext_ids += '<form method="GET" action="%s/author/claim/action" rel="nofollow">' % (CFG_SITE_URL)
html_ext_ids += ' <input type="text" name="uid" id="uid" style="border:1px solid #333; width:350px;">'
html_ext_ids += ' <input type="hidden" name="%s" value="True">' % ('set_uid',)
html_ext_ids += ' <input type="hidden" name="pid" value="%s">' % ext_ids_data['person_id']
html_ext_ids += ' <input type="submit" class="btn btn-default" value="%s"> </form>' % (
'Set (steal!) user id',)
html_ext_ids += '</div>'
if loading:
html_ext_ids += self.loading_html()
if add_box:
ext_ids_box = self.tmpl_print_searchresultbox('external_ids', html_head, html_ext_ids)
return ext_ids_box
else:
return html_ext_ids
# for ajax requests add_box and loading are false
def tmpl_autoclaim_box(self, autoclaim_data, ln, add_box=True, loading=True):
_ = gettext_set_language(ln)
html_head = None
if autoclaim_data['hidden']:
return None
html_head = _("""<span title="You don’t need to add all your publications one by one.
This list contains all your publications that were automatically assigned to your INSPIRE profile through arXiv and ORCiD. "><strong> Automatically assigned publications </strong> </span>""")
if loading:
if autoclaim_data['num_of_claims'] == 0:
html_autoclaim = ''
else:
html_autoclaim = _("<span id=\"autoClaimMessage\">Please wait as we are assigning %s papers from external systems to your"
" Inspire profile</span></br>" % (str(autoclaim_data["num_of_claims"])))
html_autoclaim += self.loading_html()
else:
html_autoclaim = ''
if "unsuccessful_recids" in autoclaim_data.keys() and autoclaim_data["unsuccessful_recids"]:
message = ''
if autoclaim_data["num_of_unsuccessful_recids"] > 1:
message = _(
"The following %s publications need your review before they can be assigned to your profile:" %
(str(autoclaim_data["num_of_unsuccessful_recids"]),))
else:
message = _(
"The following publications need your review before they can be assigned to your profile:")
html_autoclaim += "<br><span id=\"autoClaimUnSuccessMessage\">%s</span></br>" % (message,)
html_autoclaim += '<div style="border:2px;height:100px;overflow:scroll;overflow-y:auto;overflow-x:auto;">'
html_autoclaim += '<br><strong>Publication title</strong> <ol type="1"> <br>'
for rec in autoclaim_data['unsuccessful_recids']:
html_autoclaim += '<li> <a href="%s/record/%s"> <b> ' % (
CFG_SITE_URL,
rec) + autoclaim_data['recids_to_external_ids'][rec] + '</b></a></li>\n'
html_autoclaim += '</ol><br>\n</div>'
link = "%s/author/ticket/review_autoclaim" % CFG_SITE_URL
text = _("Review assigning")
html_autoclaim += '<br><span class=\"bsw\"><a rel="nofollow" href="%s" class="btn btn-default">%s</a></span><br><br>' % (
link, text)
if "successful_recids" in autoclaim_data.keys() and autoclaim_data["successful_recids"]:
message = _('The following publications have been successfully assigned to your profile:\n')
html_autoclaim += "<span id=\"autoClaimSuccessMessage\">%s</span><br>" % (message,)
html_autoclaim += '<div style="border:2px;height:300px;overflow:scroll;overflow-y:auto;overflow-x:auto;">'
html_autoclaim += '<br><strong>Publication title</strong> <ol type="1" style="padding-left:20px"> <br>'
for rec in autoclaim_data['successful_recids']:
html_autoclaim += '<li> <a href="%s/record/%s"> <b> ' % (
CFG_SITE_URL,
rec) + autoclaim_data['recids_to_external_ids'][rec] + '</b></a></li>\n'
html_autoclaim += '</ol><br>\n</div>'
if not html_autoclaim:
html_autoclaim = 'There are no publications to be automatically assigned'
if add_box:
autoclaim_box = self.tmpl_print_searchresultbox('autoclaim', html_head, html_autoclaim)
return autoclaim_box
else:
return '<div id="autoclaim"> %s </div>' % html_autoclaim
def tmpl_support_box(self, support_data, ln, add_box=True, loading=True):
_ = gettext_set_language(ln)
help_link = "%s/author/manage_profile/contact-us" % (CFG_SITE_URL)
help_text = _("Contact Form")
html_head = _("<strong> Contact </strong>")
html_support = _(
"Please contact our user support in case you need help or you just want to suggest some new ideas. We will get back to you. </br>")
html_support += '<br><a rel="nofollow" href="%s" class="btn btn-default contactTrigger">%s</a>' % (help_link, help_text)
if loading:
html_support = self.loading_html()
if add_box:
support_box = self.tmpl_print_searchresultbox('support', html_head, html_support)
return support_box
else:
return html_support
def tmpl_merge_box(self, merge_data, ln, add_box=True, loading=True):
_ = gettext_set_language(ln)
html_head = _("""<span title="It sometimes happens that somebody's publications are scattered among two or more profiles for various reasons
(different spelling, change of name, multiple people with the same name). You can merge a set of profiles together.
This will assign all the information (including publications, IDs and citations) to the profile you choose as a primary profile.
After the merging only the primary profile will exist in the system and all others will be automatically deleted. "><strong> Merge profiles </strong><span>""")
html_merge = _(
"If your or somebody else's publications in %s exist in multiple profiles, you can fix that here. </br>" %
BIBAUTHORID_CFG_SITE_NAME)
merge_link = "%s/author/merge_profiles?search_param=%s&primary_profile=%s" % (
CFG_SITE_URL, merge_data['search_param'], merge_data['canonical_id'])
merge_text = _("Merge profiles")
html_merge += '<br><a rel="nofollow" href="%s" class="btn btn-default">%s</a>' % (merge_link, merge_text)
if loading:
html_merge = self.loading_html()
if add_box:
merge_box = self.tmpl_print_searchresultbox('merge', html_head, html_merge)
return merge_box
else:
return html_merge
def tmpl_hepnames_box(self, hepnames_data, ln, add_box=True, loading=True):
_ = gettext_set_language(ln)
if not loading:
try:
heprec = str(hepnames_data['heprecord'][0])
except (TypeError, KeyError, IndexError):
heprec = ''
if hepnames_data['HaveHep']:
contents = hepnames_data['heprecord']
else:
contents = ''
if not hepnames_data['HaveChoices']:
contents += ("There is no HepNames record associated with this profile. "
"<a href='http://slac.stanford.edu/spires/hepnames/additions.shtml'> Create a new one! </a> <br>"
"The new HepNames record will be visible and associated <br> to this author "
"after manual revision, usually within a few days.")
else:
#<a href="mailto:[email protected]?subject=title&body=something">Mail Me</a>
contents += ("There is no unique HepNames record associated "
"with this profile. <br> Please tell us if you think it is one of "
"the following, or <a href='http://slac.stanford.edu/spires/hepnames/additions.shtml'> Create a new one! </a> <br>"
"<br><br> Possible choices are: ")
# mailbody = ("Hello! Please connect the author profile %s "
# "with the HepNames record %s. Best regards" % (hepnames_data['cid'], '%s'))
# mailstr = '<form method="GET" action="%s/author/manage_profile/connect_author_with_hepname" rel="nofollow">' \
# '<input type="hidden" name="cname" value="%s">' \
# '<input type="hidden" name="hepname" value="%s">' \
# '<input type="submit" class="btn btn-default" value="%s"> </form>' % (CFG_SITE_URL, hepnames_data['cid'], '%s', 'This is the right one!',)
# mailstr = ('''<class="choose_hepname" cname="%s" hepname_rec=%s> This is the right one! </class="choose_hepname">''' % (hepnames_data['cid'], '%s'))
# mailstr = ('''<a href='mailto:%s?subject=HepNames record match: %s %s&body=%s'>'''
# '''This is the right one!</a>''' % ('%s', hepnames_data['cid'], heprec, '%s'))
mailstr = (
'''<a id="hepname_connection" class="btn btn-default" href="#"><span class="cname hidden">%s</span><span class="hepname hidden">%s</span>%s</a>''' %
(hepnames_data['cid'], '%s', 'This is the right one!'))
choices = ['<tr><td>' + x[0] + '</td><td> </td><td align="right">' + mailstr % x[1] + '</td></tr>'
for x in hepnames_data['HepChoices']]
contents += '<table>' + ' '.join(choices) + '</table>'
else:
contents = self.loading_html()
if not add_box:
return contents
else:
return self.tmpl_print_searchresultbox('hepdata', '<strong> HepNames data </strong>', contents)
def tmpl_open_table(self, width_pcnt=False, cell_padding=False, height_pcnt=False):
options = []
if height_pcnt:
options.append('height=%s' % height_pcnt)
if width_pcnt:
options.append('width=%s' % width_pcnt)
else:
options.append('width=100%')
if cell_padding:
options.append('cellpadding=%s' % cell_padding)
else:
options.append('cellpadding=0')
return '<table border=0 %s >' % ' '.join(options)
def tmpl_close_table(self):
return "</table>"
def tmpl_open_row(self):
return "<tr>"
def tmpl_close_row(self):
return "</tr>"
def tmpl_open_col(self):
return "<td valign='top'>"
def tmpl_close_col(self):
return "</td>"
def _grid(self, rows, cols, table_width=False, cell_padding=False):
tmpl = self
def cont(*boxes):
out = []
h = out.append
idx = 0
h(tmpl.tmpl_open_table(width_pcnt=table_width, cell_padding=cell_padding))
for _ in range(rows):
h(tmpl.tmpl_open_row())
for _ in range(cols):
h(tmpl.tmpl_open_col())
h(boxes[idx])
idx += 1
h(tmpl.tmpl_close_col())
h(tmpl.tmpl_close_row())
h(tmpl.tmpl_close_table())
return '\n'.join(out)
return cont
verbiage_dict = {'guest': {'confirmed': 'Papers',
'repealed': 'Papers removed from this profile',
'review': 'Papers in need of review',
'tickets': 'Open Tickets', 'data': 'Data',
'confirmed_ns': 'Papers of this Person',
'repealed_ns': 'Papers _not_ of this Person',
'review_ns': 'Papers in need of review',
'tickets_ns': 'Tickets for this Person',
'data_ns': 'Additional Data for this Person'},
'user': {'owner': {'confirmed': 'Your papers',
'repealed': 'Not your papers',
'review': 'Papers in need of review',
'tickets': 'Your tickets', 'data': 'Data',
'confirmed_ns': 'Your papers',
'repealed_ns': 'Not your papers',
'review_ns': 'Papers in need of review',
'tickets_ns': 'Your tickets',
'data_ns': 'Additional Data for this Person'},
'not_owner': {'confirmed': 'Papers',
'repealed': 'Papers removed from this profile',
'review': 'Papers in need of review',
'tickets': 'Your tickets', 'data': 'Data',
'confirmed_ns': 'Papers of this Person',
'repealed_ns': 'Papers _not_ of this Person',
'review_ns': 'Papers in need of review',
'tickets_ns': 'Tickets you created about this person',
'data_ns': 'Additional Data for this Person'}},
'admin': {'confirmed': 'Papers',
'repealed': 'Papers removed from this profile',
'review': 'Papers in need of review',
'tickets': 'Tickets', 'data': 'Data',
'confirmed_ns': 'Papers of this Person',
'repealed_ns': 'Papers _not_ of this Person',
'review_ns': 'Papers in need of review',
'tickets_ns': 'Request Tickets',
'data_ns': 'Additional Data for this Person'}}
buttons_verbiage_dict = {
'guest': {'mass_buttons': {'no_doc_string': 'Sorry, there are currently no documents to be found in this category.',
'b_confirm': 'Yes, those papers are by this person.',
'b_repeal': 'No, those papers are not by this person',
'b_to_others': 'Assign to another person',
'b_forget': 'Forget decision'},
'record_undecided': {'alt_confirm': 'Confirm!',
'confirm_text': 'Yes, this paper is by this person.',
'alt_repeal': 'Rejected!',
'repeal_text': 'No, this paper is <i>not</i> by this person',
'to_other_text': 'Assign to another person',
'alt_to_other': 'To other person!'},
'record_confirmed': {'alt_confirm': 'Confirmed.',
'confirm_text': 'Marked as this person\'s paper',
'alt_forget': 'Forget decision!',
'forget_text': 'Forget decision.',
'alt_repeal': 'Repeal!',
'repeal_text': 'But it\'s <i>not</i> this person\'s paper.',
'to_other_text': 'Assign to another person',
'alt_to_other': 'To other person!'},
'record_repealed': {'alt_confirm': 'Confirm!',
'confirm_text': 'But it <i>is</i> this person\'s paper.',
'alt_forget': 'Forget decision!',
'forget_text': 'Forget decision.',
'alt_repeal': 'Repealed',
'repeal_text': 'Marked as not this person\'s paper',
'to_other_text': 'Assign to another person',
'alt_to_other': 'To other person!'}},
'user': {'owner': {'mass_buttons': {'no_doc_string': 'Sorry, there are currently no documents to be found in this category.',
'b_confirm': 'These are mine!',
'b_repeal': 'These are not mine!',
'b_to_others': 'It\'s not mine, but I know whose it is!',
'b_forget': 'Forget decision'},
'record_undecided': {'alt_confirm': 'Mine!',
'confirm_text': 'This is my paper!',
'alt_repeal': 'Not mine!',
'repeal_text': 'This is not my paper!',
'to_other_text': 'Assign to another person',
'alt_to_other': 'To other person!'},
'record_confirmed': {'alt_confirm': 'Not Mine.',
'confirm_text': 'Marked as my paper!',
'alt_forget': 'Forget decision!',
'forget_text': 'Forget assignment decision',
'alt_repeal': 'Not Mine!',
'repeal_text': 'But this is not mine!',
'to_other_text': 'Assign to another person',
'alt_to_other': 'To other person!'},
'record_repealed': {'alt_confirm': 'Mine!',
'confirm_text': 'But this is my paper!',
'alt_forget': 'Forget decision!',
'forget_text': 'Forget decision!',
'alt_repeal': 'Not Mine!',
'repeal_text': 'Marked as not your paper.',
'to_other_text': 'Assign to another person',
'alt_to_other': 'To other person!'}},
'not_owner': {'mass_buttons': {'no_doc_string': 'Sorry, there are currently no documents to be found in this category.',
'b_confirm': 'Yes, those papers are by this person.',
'b_repeal': 'No, those papers are not by this person',
'b_to_others': 'Assign to another person',
'b_forget': 'Forget decision'},
'record_undecided': {'alt_confirm': 'Confirm!',
'confirm_text':
'Yes, this paper is by this person.',
'alt_repeal': 'Rejected!',
'repeal_text':
'No, this paper is <i>not</i> by this person',
'to_other_text': 'Assign to another person',
'alt_to_other': 'To other person!'},
'record_confirmed': {'alt_confirm': 'Confirmed.',
'confirm_text': 'Marked as this person\'s paper',
'alt_forget': 'Forget decision!',
'forget_text': 'Forget decision.',
'alt_repeal': 'Repeal!',
'repeal_text':
'But it\'s <i>not</i> this person\'s paper.',
'to_other_text': 'Assign to another person',
'alt_to_other': 'To other person!'},
'record_repealed': {'alt_confirm': 'Confirm!',
'confirm_text':
'But it <i>is</i> this person\'s paper.',
'alt_forget': 'Forget decision!',
'forget_text': 'Forget decision.',
'alt_repeal': 'Repealed',
'repeal_text': 'Marked as not this person\'s paper',
'to_other_text': 'Assign to another person',
'alt_to_other': 'To other person!'}}},
'admin': {'mass_buttons': {'no_doc_string': 'Sorry, there are currently no documents to be found in this category.',
'b_confirm': 'Yes, those papers are by this person.',
'b_repeal': 'No, those papers are not by this person',
'b_to_others': 'Assign to another person',
'b_forget': 'Forget decision'},
'record_undecided': {'alt_confirm': 'Confirm!',
'confirm_text': 'Yes, this paper is by this person.',
'alt_repeal': 'Rejected!',
'repeal_text': 'No, this paper is <i>not</i> by this person',
'to_other_text': 'Assign to another person',
'alt_to_other': 'To other person!'},
'record_confirmed': {'alt_confirm': 'Confirmed.',
'confirm_text': 'Marked as this person\'s paper',
'alt_forget': 'Forget decision!',
'forget_text': 'Forget decision.',
'alt_repeal': 'Repeal!',
'repeal_text': 'But it\'s <i>not</i> this person\'s paper.',
'to_other_text': 'Assign to another person',
'alt_to_other': 'To other person!'},
'record_repealed': {'alt_confirm': 'Confirm!',
'confirm_text': 'But it <i>is</i> this person\'s paper.',
'alt_forget': 'Forget decision!',
'forget_text': 'Forget decision.',
'alt_repeal': 'Repealed',
'repeal_text': 'Marked as not this person\'s paper',
'to_other_text': 'Assign to another person',
'alt_to_other': 'To other person!'}}}
| gpl-2.0 | 261,724,183,691,193,570 | 52.27154 | 267 | 0.493982 | false |
iw3hxn/LibrERP | mrp_bom_history/wizard/bom_add_note.py | 1 | 1132 | from mail.mail_message import truncate_text
from openerp.osv import orm, fields
from tools.translate import _
class bom_add_note(orm.TransientModel):
"""Adds a new note to the case."""
_name = 'bom.add.note'
_description = "Add Internal Note"
_columns = {
'body': fields.text('Note Body', required=True),
}
def action_add(self, cr, uid, ids, context=None):
if context is None:
context = {}
if not context.get('active_model'):
raise orm.except_orm(_('Error'), _('Can not add note!'))
model = context.get('active_model')
case_pool = self.pool.get(model)
for obj in self.browse(cr, uid, ids, context=context):
case_list = case_pool.browse(cr, uid, context['active_ids'],
context=context)
case = case_list[0]
case_pool.message_append(cr, uid, [case], truncate_text(obj.body),
body_text=obj.body)
return {'type': 'ir.actions.act_window_close'}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -8,780,630,179,386,807,000 | 32.294118 | 78 | 0.576855 | false |
myd7349/Ongoing-Study | python/fileutil.py | 1 | 9062 | #! /usr/bin/env python3
# -*- coding: utf-8 -*-
# 2015-03-24T08:33+08:00
# 2015-07-07T11:26+08:00 Use temporary file when running unit test
import contextlib
import functools
import hashlib
import io
import os
import sys
try:
import chardet.universaldetector
_chardet_available = True
except ImportError:
_chardet_available = False
__all__ = ['is_file_obj', 'file_size', 'file_name', 'file_title', 'replace_ext',
'open_file', 'get_file_encoding']
def is_file_obj(f):
return all(map(functools.partial(hasattr, f),
['read', 'fileno', 'seek', 'tell']))
def file_size(f):
"""Get the size of given file.
f: File name or an opened file object
"""
if isinstance(f, str):
return os.stat(f).st_size
if not is_file_obj(f):
raise ValueError('Invalid file object')
try:
return os.fstat(f.fileno()).st_size
except io.UnsupportedOperation:
prev_pos = f.tell()
size = f.seek(0, os.SEEK_END)
f.seek(prev_pos, os.SEEK_SET)
return size
def file_name(f):
"""Retrieve the name of given file.
f: File name or an opened file object
"""
if isinstance(f, str):
return f
if is_file_obj(f):
return getattr(f, 'name', '')
else:
return ''
def file_title(f):
"""Get the title of given file: no path, no extension
f: File name or an opened file object
"""
fn = file_name(f)
return os.path.basename(os.path.splitext(fn)[0])
def replace_ext(file, new_ext, prefix='', suffix=''):
"""Produce a new file name based on input file name with its extension
replaced with a new extension.
file: Source file name
new_ext: The new extension to be used. If it is None, then the extension
stays unchange, but the prefix and suffix are inserted.
prefix: Prefix prepended the file name
suffix: Suffix inserted between file name and extension
"""
fn = file_name(file)
root, ext = os.path.splitext(fn)
if new_ext is None:
new_ext = ext
return prefix + root + suffix + new_ext if ext else prefix + fn + suffix + new_ext
@contextlib.contextmanager
def open_file(file, *args, **kwargs):
"""Sometimes when we wrote a function that accepts a file object, we also
want it be able to deal with file name. So some code like this:
def foo(f):
'Some function that handling an input file.'
if isinstance(f, str):
# A file name is passed in, let's open it.
fp = open(f, 'r')
else:
# Suppose that an opened file object is passed in.
assert hasattr(f, 'read')
fp = f
has been written.
When it comes to a file name, it will be nice to wrap it with a `with` statement,
like this:
with open(f, 'r') as fp:
pass
With this class, we can rewrite `foo` like this:
def foo(f):
'Some function that handling an input file.'
with open_file(f, 'r') as fp:
pass
and if you passed a file name to `foo`, the file will be automatically closed at
the end; if you passed an opened file object, however, open_file will do nothing.
Sounds nice, hah?
"""
is_file_name = isinstance(file, str)
fp = open(file, *args, **kwargs) if is_file_name else file
try:
yield fp
finally:
if is_file_name:
fp.close()
# TODO: unittest
def get_file_encoding(filename, guess=None):
if _chardet_available:
detector = chardet.universaldetector.UniversalDetector()
with open(filename, 'rb') as fp:
for line in fp:
detector.feed(line)
if detector.done:
break
detector.close()
if detector.result['confidence'] > 0.95:
return detector.result['encoding']
return guess if guess else sys.getfilesystemencoding()
# TODO: unittest
def get_md5(filename, block_size=1024*1024):
"""Calculate the MD5 digest of given file."""
with open(filename, 'rb') as fp:
md5 = hashlib.md5()
# Read a binary file elegantly:
# http://stackoverflow.com/questions/1035340/reading-binary-file-in-python
# or you want to make a binary file object iterable?
# http://stackoverflow.com/questions/4566498/python-file-iterator-over-a-binary-file-with-newer-idiom
data = fp.read(block_size)
while data:
md5.update(data)
data = fp.read(block_size)
return md5.hexdigest()
return ''
if __name__ == '__main__':
import tempfile
import unittest
test_data = b'>>> Hello, world!'
class TestFileSize(unittest.TestCase):
def test_regular_file(self):
with tempfile.TemporaryFile() as fp:
fp.write(test_data)
fp.flush()
self.assertEqual(file_size(fp), len(test_data))
def test_bytes_buffer(self):
pseudo_file = io.BytesIO(test_data)
self.assertRaises(io.UnsupportedOperation, pseudo_file.fileno)
self.assertEqual(pseudo_file.read(5), test_data[:5])
self.assertEqual(pseudo_file.tell(), 5)
self.assertEqual(file_size(pseudo_file), len(test_data))
self.assertEqual(pseudo_file.tell(), 5)
self.assertEqual(pseudo_file.read(), test_data[5:])
class TestFileName(unittest.TestCase):
def setUp(self):
fd, self._filename = tempfile.mkstemp()
os.close(fd)
def tearDown(self):
os.unlink(self._filename)
def test_file_name(self):
self.assertEqual(file_name(self._filename), self._filename)
def test_file_object(self):
with open(self._filename, 'w') as fp:
self.assertEqual(file_name(fp), self._filename)
def test_buffer_object(self):
pseudo_file = io.BytesIO()
self.assertEqual(file_name(pseudo_file), '')
class TestFileTitle(unittest.TestCase):
def test(self):
self.assertEqual(file_title('a.out'), 'a')
self.assertEqual(file_title('/home/me/a.out'), 'a')
self.assertEqual(file_title('.gitconfig'), '.gitconfig')
class TestReplaceExt(unittest.TestCase):
def setUp(self):
self._file = 'a.out'
def test(self):
self.assertEqual(replace_ext(self._file, '.exe'), 'a.exe')
file2 = '.cshrc'
self.assertEqual(replace_ext(file2, '.conf'), file2 + '.conf')
self.assertEqual(replace_ext('foo', '.dcm'), 'foo.dcm')
def test_prefix_suffix(self):
self.assertEqual(replace_ext(self._file, '.exe', 'WOW-', '-2'), 'WOW-a-2.exe')
self.assertEqual(replace_ext('foo', '.dcm', suffix='-0'), 'foo-0.dcm')
self.assertEqual(replace_ext('foo', '.dcm', suffix='-1'), 'foo-1.dcm')
self.assertEqual(replace_ext('foo', '.dcm', suffix='-2'), 'foo-2.dcm')
def test_new_ext_as_None(self):
self.assertEqual(replace_ext('BigFish.jpeg', None, 'Screenshot-', '-2015-07-12'),
'Screenshot-BigFish-2015-07-12.jpeg')
class TestOpenFile(unittest.TestCase):
def setUp(self):
fd, self._filename = tempfile.mkstemp()
os.close(fd)
def tearDown(self):
os.unlink(self._filename)
def test_pass_a_file_name(self):
with open_file(self._filename, 'wb') as fp:
fp.write(test_data)
self.assertTrue(fp.closed)
def test_pass_a_file_object(self):
f = open(self._filename, 'w+b')
with open_file(f, 'w+b') as fp:
fp.write(test_data)
self.assertFalse(f.closed)
f.seek(0, 0)
self.assertEqual(f.read(), test_data)
f.close()
unittest.main()
# References:
# [Does filehandle get closed automatically in Python after it goes out of scope?](http://stackoverflow.com/questions/2404430/does-filehandle-get-closed-automatically-in-python-after-it-goes-out-of-scope)
# [Does a File Object Automatically Close when its Reference Count Hits Zero?](http://stackoverflow.com/questions/1834556/does-a-file-object-automatically-close-when-its-reference-count-hits-zero)
# [Explaining Python's '__enter__' and '__exit__'](http://stackoverflow.com/questions/1984325/explaining-pythons-enter-and-exit)
# [Is close() necessary when using iterator on a Python file object](http://stackoverflow.com/questions/1832528/is-close-necessary-when-using-iterator-on-a-python-file-object)
# [Python file with closing automatically](http://www.peterbe.com/plog/python-file-with-closing-automatically)
# [Size of an open file object](http://stackoverflow.com/questions/283707/size-of-an-open-file-object)
# [Understanding Python's "with" statement](http://effbot.org/zone/python-with-statement.htm)
| lgpl-3.0 | 4,613,072,303,637,980,000 | 32.072993 | 204 | 0.603068 | false |
glennguy/plugin.video.9now | resources/lib/menu.py | 1 | 4419 | import xbmcgui
import xbmcplugin
import comm
import config
import sys
import urlparse
import urllib
from aussieaddonscommon import utils
_url = sys.argv[0]
_handle = int(sys.argv[1])
def list_categories():
"""
Make initial list
"""
try:
listing = []
categories = config.CATEGORIES
for category in categories:
li = xbmcgui.ListItem(category)
url_string = '{0}?action=listcategories&category={1}'
url = url_string.format(_url, category)
is_folder = True
listing.append((url, li, is_folder))
genres = comm.list_genres()
for g in genres:
li = xbmcgui.ListItem(g.title, iconImage=g.thumb,
thumbnailImage=g.thumb)
li.setArt({'fanart': g.fanart})
url_string = '{0}?action=listcategories&category=genre&genre={1}'
url = url_string.format(_url, g.title)
is_folder = True
listing.append((url, li, is_folder))
li = xbmcgui.ListItem('Settings')
listing.append(('{0}?action=settings'.format(_url), li, is_folder))
xbmcplugin.addDirectoryItems(_handle, listing, len(listing))
xbmcplugin.endOfDirectory(_handle)
except Exception:
utils.handle_error('Unable to list categories')
def make_episodes_list(url):
""" Make list of episode Listitems for Kodi"""
try:
params = dict(urlparse.parse_qsl(url))
episodes = comm.list_episodes(params)
listing = []
for e in episodes:
li = xbmcgui.ListItem(e.title, iconImage=e.thumb,
thumbnailImage=e.thumb)
li.setArt({'fanart': e.fanart})
url = '{0}?action=listepisodes{1}'.format(_url, e.make_kodi_url())
is_folder = False
li.setProperty('IsPlayable', 'true')
if e.drm is True:
li.setProperty('inputstreamaddon', 'inputstream.adaptive')
li.setInfo('video', {'plot': e.desc,
'plotoutline': e.desc,
'duration': e.duration,
'date': e.get_airdate()})
listing.append((url, li, is_folder))
xbmcplugin.addDirectoryItems(_handle, listing, len(listing))
xbmcplugin.endOfDirectory(_handle)
except Exception:
utils.handle_error('Unable to list episodes')
def make_live_list(url):
""" Make list of channel Listitems for Kodi"""
try:
params = dict(urlparse.parse_qsl(url))
channels = comm.list_live(params)
listing = []
for c in channels:
li = xbmcgui.ListItem(c.title, iconImage=c.thumb,
thumbnailImage=c.thumb)
li.setArt({'fanart': c.fanart})
url = '{0}?action=listchannels{1}'.format(_url, c.make_kodi_url())
is_folder = False
li.setProperty('IsPlayable', 'true')
li.setInfo('video', {'plot': c.desc,
'plotoutline': c.episode_name})
listing.append((url, li, is_folder))
xbmcplugin.addDirectoryItems(_handle, listing, len(listing))
xbmcplugin.endOfDirectory(_handle)
except Exception:
utils.handle_error('Unable to list channels')
def make_series_list(url):
""" Make list of series Listitems for Kodi"""
try:
params = dict(urlparse.parse_qsl(url))
series_list = comm.list_series()
filtered = []
if 'genre' in params:
for s in series_list:
if s.genre == urllib.unquote_plus(params['genre']):
filtered.append(s)
else:
filtered = series_list
listing = []
for s in filtered:
li = xbmcgui.ListItem(s.title, iconImage=s.thumb,
thumbnailImage=s.thumb)
li.setArt({'fanart': s.fanart})
url = '{0}?action=listseries{1}'.format(_url, s.make_kodi_url())
is_folder = True
listing.append((url, li, is_folder))
xbmcplugin.addSortMethod(
_handle, xbmcplugin.SORT_METHOD_LABEL_IGNORE_THE)
xbmcplugin.addDirectoryItems(_handle, listing, len(listing))
xbmcplugin.endOfDirectory(_handle)
except Exception:
utils.handle_error('Unable to list series')
| gpl-3.0 | -3,547,577,061,269,654,000 | 34.926829 | 78 | 0.560308 | false |
vrde/pandora | pandora/migrations/0001_initial.py | 1 | 2086 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Item'
db.create_table(u'pandora_item', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('filename', self.gf('django.db.models.fields.CharField')(unique=True, max_length=1024)),
('dt', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=1024)),
('description', self.gf('django.db.models.fields.TextField')(null=True)),
('mimetype', self.gf('django.db.models.fields.CharField')(max_length=255)),
('ip_address', self.gf('django.db.models.fields.IPAddressField')(max_length=15)),
('size', self.gf('django.db.models.fields.BigIntegerField')()),
))
db.send_create_signal(u'pandora', ['Item'])
def backwards(self, orm):
# Deleting model 'Item'
db.delete_table(u'pandora_item')
models = {
u'pandora.item': {
'Meta': {'ordering': "['-dt']", 'object_name': 'Item'},
'description': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'dt': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'filename': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '1024'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'mimetype': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '1024'}),
'size': ('django.db.models.fields.BigIntegerField', [], {})
}
}
complete_apps = ['pandora'] | mit | 2,874,409,250,318,841,300 | 46.431818 | 108 | 0.584372 | false |
acil-bwh/SlicerCIP | Scripted/attic/CIP_GetImage/CIP_GetImage.py | 1 | 28162 | """ACIL_GetImage is a module developed for the internal use of the Applied Chest Imaging Laboratory to download
cases stored in MAD server via ssh.
It works both in Unix/Mac/Windows, and it uses an internal SSH key created specifically for this purpose, so it
doesn't need that the user has an authorized SSH key installed.
First version: Jorge Onieva (ACIL, [email protected]). Sept 2014"""
import os, sys
from __main__ import vtk, qt, ctk, slicer
from collections import OrderedDict
import subprocess
# Add the CIP common library to the path if it has not been loaded yet
try:
from CIP.logic.SlicerUtil import SlicerUtil
except Exception as ex:
currentpath = os.path.dirname(os.path.realpath(__file__))
# We assume that CIP_Common is in the development structure
path = os.path.normpath(currentpath + '/../../Scripted/CIP_Common')
if not os.path.exists(path):
# We assume that CIP is a subfolder (Slicer behaviour)
path = os.path.normpath(currentpath + '/CIP')
sys.path.append(path)
print(("The following path was manually added to the PythonPath in CIP_GetImage: " + path))
from CIP.logic.SlicerUtil import SlicerUtil
from CIP.logic import Util
import CIP.ui as CIPUI
class CIP_GetImage:
"""Load cases from a SSH server or other device"""
def __init__(self, parent):
"""Constructor for main class"""
self.parent = parent
#ScriptedLoadableModule.__init__(self, parent)
self.parent.title = "CIP GetImage"
self.parent.categories = ["Chest Imaging Platform.Modules"]
self.parent.dependencies = []
self.parent.contributors = ["Jorge Onieva", "Applied Chest Imaging Laboratory", "Brigham and Women's Hospital"]
self.parent.helpText = "This is an internal module to load images from MAD repository via SSH"
self.parent.acknowledgementText = SlicerUtil.ACIL_AcknowledgementText
class CIP_GetImageWidget:
"""Visual object"""
# Study ids. Convention: Descriptive text (key) / Name of the folder in the server
studyIds = OrderedDict()
studyIds["Study 1"] = "Study1"
studyIds["Study 2"] = "Study2"
studyIds["Other"] = "Other"
# Image types. You can add as many as different volume types you have
# Convention:
# Descriptive text (key)
# Files extension (example: "processed").
imageTypes = OrderedDict()
imageTypes["CT"] = "" # Default. No extension
imageTypes["CT Processed"] = "processed" # Default. No extension
# Label maps types. Idem
# Convention:
# Descriptive text (key)
# Checked by default
# Files extension (example: case_partialLungLabelMap.nrrd)
labelMapTypes = OrderedDict()
labelMapTypes["Partial Lung"] = (False, "_partialLungLabelMap")
labelMapTypes["Body Composition"] = (False, "_bodyComposition")
labelMapTypes["Body Composition (interactive)"] = (False, "_interactiveBodyComposition")
def __init__(self, parent = None):
"""Widget constructor (existing module)"""
if not parent:
self.parent = slicer.qMRMLWidget()
self.parent.setLayout(qt.QVBoxLayout())
self.parent.setMRMLScene(slicer.mrmlScene)
else:
self.parent = parent
self.layout = self.parent.layout()
if not parent:
self.setup()
self.parent.show()
def setup(self):
"""Init the widget """
self.modulePath = SlicerUtil.getModuleFolder("CIP_GetImage")
self.resourcesPath = os.path.join(self.modulePath, "CIP_GetImage_Resources")
self.StudyId = ""
self.logic = CIP_GetImageLogic(self.modulePath)
# Widget to load cases faster
self.loadSaveDatabuttonsWidget = CIPUI.LoadSaveDataWidget(parentWidget=self.parent)
self.loadSaveDatabuttonsWidget.setup(moduleName="CIP_GetImage")
#
# Obligatory parameters area
#
parametersCollapsibleButton = ctk.ctkCollapsibleButton()
parametersCollapsibleButton.text = "Image data"
self.layout.addWidget(parametersCollapsibleButton)
parametersFormLayout = qt.QFormLayout(parametersCollapsibleButton)
# Study radio buttons
label = qt.QLabel()
label.text = "Select the study:"
parametersFormLayout.addRow(label)
self.rbgStudy=qt.QButtonGroup()
for key in self.studyIds:
rbStudyid = qt.QRadioButton(key)
self.rbgStudy.addButton(rbStudyid)
parametersFormLayout.addWidget(rbStudyid)
self.txtOtherStudy = qt.QLineEdit()
self.txtOtherStudy.hide()
parametersFormLayout.addWidget(self.txtOtherStudy)
# Case id
self.txtCaseId = qt.QLineEdit()
parametersFormLayout.addRow("Case ID ", self.txtCaseId)
# Image types
label = qt.QLabel()
label.text = "Select the images that you want to load:"
parametersFormLayout.addRow(label)
self.cbsImageTypes = []
for key in self.imageTypes:
check = qt.QCheckBox()
check.checked = True
check.setText(key)
parametersFormLayout.addWidget(check)
self.cbsImageTypes.append(check)
# Label maps
label = qt.QLabel()
label.text = "Select the label maps that you want to load:"
parametersFormLayout.addRow(label)
# Labelmap types checkboxes
self.cbsLabelMapTypes = []
for key in self.labelMapTypes:
check = qt.QCheckBox()
check.setText(key)
check.checked = self.labelMapTypes[key][0]
parametersFormLayout.addWidget(check)
self.cbsLabelMapTypes.append(check)
# Load image Button
self.downloadButton = qt.QPushButton("Download")
self.downloadButton.toolTip = "Load the image"
#self.downloadButton.enabled = False
self.downloadButton.setStyleSheet("background-color: green; font-weight:bold; color:white" )
parametersFormLayout.addRow(self.downloadButton)
self.downloadButton.connect('clicked (bool)', self.onDownloadButton)
# Information message
self.lblDownloading = qt.QLabel()
self.lblDownloading.text = "Downloading images. Please wait..."
self.lblDownloading.hide()
parametersFormLayout.addRow(self.lblDownloading)
#
# Optional Parameters
#
optionalParametersCollapsibleButton = ctk.ctkCollapsibleButton()
optionalParametersCollapsibleButton.text = "Optional parameters"
self.layout.addWidget(optionalParametersCollapsibleButton)
optionalParametersFormLayout = qt.QFormLayout(optionalParametersCollapsibleButton)
# Local storage (Slicer temporary path)
self.localStoragePath = "{0}/CIP".format(slicer.app.temporaryPath)
if not os.path.exists(self.localStoragePath):
os.makedirs(self.localStoragePath)
# Make sure that everybody has write permissions (sometimes there are problems because of umask)
os.chmod(self.localStoragePath, 0o777)
self.storagePathButton = ctk.ctkDirectoryButton()
self.storagePathButton.directory = self.localStoragePath
optionalParametersFormLayout.addRow("Local directory: ", self.storagePathButton)
# Connection type (SSH, "normal")
label = qt.QLabel()
label.text = "Connection type:"
optionalParametersFormLayout.addRow(label)
self.rbgConnectionType=qt.QButtonGroup()
self.rbSSH = qt.QRadioButton("SSH (secure connection)")
self.rbSSH.setChecked(True)
self.rbgConnectionType.addButton(self.rbSSH)
optionalParametersFormLayout.addWidget(self.rbSSH)
self.rbCP = qt.QRadioButton("Common")
self.rbgConnectionType.addButton(self.rbCP)
optionalParametersFormLayout.addWidget(self.rbCP)
# SSH Server login
self.txtServer = qt.QLineEdit()
s = SlicerUtil.settingGetOrSetDefault("CIP_GetImage", "server", "This is your ssh user and server. Example: [email protected]")
self.txtServer.text = s # This is your ssh user and server. Example: [email protected]"
optionalParametersFormLayout.addRow("Server:", self.txtServer)
# Server root path
self.txtServerpath = qt.QLineEdit()
s = SlicerUtil.settingGetOrSetDefault("CIP_GetImage", "serverRootPath", "This is your root path to search for files. Ex: /Cases/Processed")
self.txtServerpath.text = s # This is your root path to search for files. Ex: /Cases/Processed
optionalParametersFormLayout.addRow("Server root path:", self.txtServerpath)
# SSH Private key
self.txtPrivateKeySSH = qt.QLineEdit()
s = SlicerUtil.settingGetOrSetDefault("CIP_GetImage", "sshKey", "")
self.txtPrivateKeySSH.text = s # this is the full path to your ssh key if you need it. Be aware of Unix/Windows comaptibility (hint: use os.path.join)
# Please notice that you won't need a SSH key if your computer already has one locally installed"
optionalParametersFormLayout.addRow("SSH private key (leave blank for computer's default): ", self.txtPrivateKeySSH)
# Cache mode
self.cbCacheMode = qt.QCheckBox("Cache mode activated")
self.cbCacheMode.setChecked(True) # Cache mode is activated by default
optionalParametersFormLayout.addRow("", self.cbCacheMode)
# Clean cache Button
self.cleanCacheButton = qt.QPushButton("Clean cache")
self.cleanCacheButton.toolTip = "Remove all the local cached files"
optionalParametersFormLayout.addRow(self.cleanCacheButton)
optionalParametersCollapsibleButton.collapsed = True
if SlicerUtil.IsDevelopment:
# reload button
self.reloadButton = qt.QPushButton("Reload (just development)")
self.reloadButton.toolTip = "Reload this module (for development purposes)."
self.reloadButton.name = "Reload"
self.layout.addWidget(self.reloadButton)
self.reloadButton.connect('clicked()', self.onReload)
# Add vertical spacer
self.layout.addStretch(1)
# Connections
self.rbgStudy.connect("buttonClicked (QAbstractButton*)", self.onRbStudyClicked)
self.txtOtherStudy.connect("textEdited (QString)", self.onTxtOtherStudyEdited)
self.rbgConnectionType.connect("buttonClicked (QAbstractButton*)", self.onRbgConnectionType)
self.storagePathButton.connect("directorySelected(QString)", self.onTmpDirChanged)
self.cleanCacheButton.connect('clicked (bool)', self.onCleanCacheButtonClicked)
def saveSettings(self):
"""Save the current values in settings to reuse it in future sessions"""
SlicerUtil.setSetting("CIP_GetImage", "sshKey", self.txtPrivateKeySSH.text)
SlicerUtil.setSetting("CIP_GetImage", "server", self.txtServer.text)
SlicerUtil.setSetting("CIP_GetImage", "serverRootPath", self.txtServerpath.text)
def cleanup(self):
self.saveSettings()
#
# Events handling
#
def onDownloadButton(self):
"""Click in download button"""
# Check if there is a Study and Case introduced
self.CaseId = self.txtCaseId.text.strip()
if self.CaseId and self.StudyId:
self.lblDownloading.show()
slicer.app.processEvents()
# Get the selected image types and label maps
imageTypes = [self.imageTypes[cb.text] for cb in [check for check in self.cbsImageTypes if check.isChecked()]]
labelMapExtensions = [self.labelMapTypes[cb.text] for cb in [check for check in self.cbsLabelMapTypes if check.isChecked()]]
result = self.logic.loadCase(self.txtServer.text, self.txtServerpath.text, self.StudyId, self.txtCaseId.text, imageTypes, labelMapExtensions, self.localStoragePath, self.cbCacheMode.checkState(), self.rbSSH.isChecked(), self.txtPrivateKeySSH.text)
self.lblDownloading.hide()
if (result == Util.ERROR):
self.msgBox = qt.QMessageBox(qt.QMessageBox.Warning, 'Error', "There was an error when downloading some of the images of this case. It is possible that some of the selected images where not available in the server. Please review the log console for more details.\nSuggested actions:\n-Empty cache\n-Restart Slicer")
self.msgBox.show()
else:
# Show info messsage
self.msgBox = qt.QMessageBox(qt.QMessageBox.Information, 'Attention', "Please make sure that you have selected a study and a case")
self.msgBox.show()
def onRbStudyClicked(self, button):
"""Study radio buttons clicked (any of them)"""
self.StudyId = self.studyIds[button.text]
self.txtOtherStudy.visible = (button.text == "Other")
if (self.txtOtherStudy.visible):
self.StudyId = self.txtOtherStudy.text.strip()
#self.checkDownloadButtonEnabled()
def onRbgConnectionType(self, button):
self.txtServer.enabled = self.txtPrivateKeySSH.enabled = self.rbSSH.isChecked()
#self.txtPrivateKeySSH.enabled = self.rbSSH.checked
def onTxtOtherStudyEdited(self, text):
"""Any letter typed in "Other study" text box """
self.StudyId = text
#self.checkDownloadButtonEnabled()
def onCleanCacheButtonClicked(self):
"""Clean cache button clicked. Remove all the files in the current local storage path directory"""
import shutil
# Remove directory
shutil.rmtree(self.localStoragePath, ignore_errors=True)
# Recreate it (this is a safe method for symbolic links)
os.makedirs(self.localStoragePath)
# Make sure that everybody has write permissions (sometimes there are problems because of umask)
os.chmod(self.localStoragePath, 0o777)
def onTmpDirChanged(self, d):
print(("Temp dir changed. New dir: " + d))
self.localStoragePath = d
def onReload(self, moduleName="CIP_GetImage"):
"""Reload the module. Just for development purposes. This is a combination of the old and new style in modules writing"""
try:
slicer.util.reloadScriptedModule(moduleName)
except:
#Generic reload method for any scripted module.
#ModuleWizard will subsitute correct default moduleName.
import imp, sys
widgetName = moduleName + "Widget"
# reload the source code
# - set source file path
# - load the module to the global space
filePath = eval('slicer.modules.%s.path' % moduleName.lower())
p = os.path.dirname(filePath)
if not sys.path.__contains__(p):
sys.path.insert(0,p)
fp = open(filePath, "r")
globals()[moduleName] = imp.load_module(
moduleName, fp, filePath, ('.py', 'r', imp.PY_SOURCE))
fp.close()
# rebuild the widget
# - find and hide the existing widget
# - create a new widget in the existing parent
# parent = slicer.util.findChildren(name='%s Reload' % moduleName)[0].parent()
parent = self.parent
for child in parent.children():
try:
child.hide()
except AttributeError:
pass
globals()[widgetName.lower()] = eval(
'globals()["%s"].%s(parent)' % (moduleName, widgetName))
globals()[widgetName.lower()].setup()
#
# CIP_GetImageLogic
# This class makes all the operations not related with the user interface (download and handle volumes, etc.)
#
class CIP_GetImageLogic:
def __init__(self, modulePath):
"""Constructor. Adapt the module full path to windows convention when necessary"""
#ScriptedLoadableModuleLogic.__init__(self)
self.modulePath = modulePath
def loadCase(self, server, serverPath, studyId, caseId, imageTypesExtensions, labelMapExtensions, localStoragePath, cacheOn, sshMode, privateKeySSH):
"""Load all the asked images for a case: main images and label maps.
Arguments:
- server -- User and name of the host. Default: [email protected]
- serverPath -- Root path for all the cases. Default: /mad/store-replicated/clients/copd/Processed
- studyId -- Code of the study. Ex: COPDGene
- caseId -- Case id (NOT patient! It will be extracted from here). Example: 12257B_INSP_STD_UIA_COPD
- imageTypesExtensions -- Extensions of the images that must be appended before 'nrrd' in the filename. Default is blank
- labelMapExtensions -- Extensions that must be appended to the file name to find the labelmap. Ex: _partialLungLabelMap
- localStoragePath -- Local folder where all the images will be downloaded
- cacheOn -- When True, the images are not downloaded if they already exist in local
- privateKeySSH -- Full path to the file that contains the private key used to connect with SSH to the server
Returns OK or ERROR
"""
try:
# Extract Patient Id
patientId = caseId.split('_')[0]
for ext in imageTypesExtensions:
locPath = self.downloadNrrdFile(server, serverPath, studyId, patientId, caseId, ext, localStoragePath, cacheOn, sshMode, privateKeySSH)
if (SlicerUtil.IsDevelopment): print("Loading volume stored in " + locPath)
slicer.util.loadVolume(locPath)
for ext in labelMapExtensions:
locPath = self.downloadNrrdFile(server, serverPath, studyId, patientId, caseId, ext[1], localStoragePath, cacheOn, sshMode, privateKeySSH)
if (SlicerUtil.IsDevelopment): print("Loading label map stored in " + locPath)
(code, vtkLabelmapVolumeNode) = slicer.util.loadLabelVolume(locPath, {}, returnNode=True) # Braces are needed for Windows compatibility... No comments...
return Util.OK
except Exception as exception:
print(exception)
return Util.ERROR
def mustSplit(self, labelMapStructure):
return labelMapStructure[3] is not None
def downloadNrrdFile(self, server, serverPath, studyId, patientId, caseId, ext, localStoragePath, cacheOn, sshMode=True, privateKeySSH=None):
"""Download Header and Raw data in a Nrrd file.
Returns the full local path for the nhrd file (header)
"""
localFile = "{0}/{1}{2}.nhdr".format(localStoragePath, caseId, ext)
# If cache mode is not activated or the file does not exist locally, proceed to download
if (not cacheOn or not os.path.isfile(localFile)):
error = False
try:
if os.path.isfile(localFile):
# Delete file previously to avoid confirmation messages
print("Remove cached files: " + localFile)
try:
os.clear(localFile)
os.clear("{0}/{1}{2}.raw.gz".format(localStoragePath, caseId, ext))
except:
print("Error when deleting local files ({0})".format(localFile))
# Make sure that the ssh key has not too many permissions if it is used (otherwise scp will return an error)
if privateKeySSH:
os.chmod(privateKeySSH, 0o600)
# Download header
if (os.sys.platform == "win32"):
localStoragePath = localStoragePath.replace('/', '\\') + '\\'
if sshMode:
if privateKeySSH:
privateKeyCommand = "-privatekey={0}".format(privateKeySSH)
else:
privateKeyCommand = ""
params = [("%s\\CIP_GetImage_Resources\\WinSCP.com" % self.modulePath) ,"/command", 'open {0} {1}'.format(server, privateKeyCommand), \
'get {0}/{1}/{2}/{3}/{3}{4}.nhdr {5}'.format(serverPath, studyId, patientId, caseId, ext, localStoragePath), "exit"]
else:
params = ['copy',"{0}\\{1}\\{2}\\{3}\\{3}{4}.nhdr".format(serverPath, studyId, patientId, caseId, ext), localStoragePath]
else:
# Unix
if sshMode:
keyCommand = ("-i %s " % privateKeySSH) if privateKeySSH else "" # Set a command if privateKeySsh has any value (non empty)
params = ['scp',"{0}{1}:{2}/{3}/{4}/{5}/{5}{6}.nhdr".format(keyCommand, server, serverPath, studyId, patientId, caseId, ext), localStoragePath]
else:
params = ['cp',"{0}/{1}/{2}/{3}/{3}{4}.nhdr".format(serverPath, studyId, patientId, caseId, ext), localStoragePath]
fullStrCommand = " ".join(params)
(result, output, error) = self.executeDownloadCommand(params)
if (result == Util.ERROR):
print("Error when executing download command. Params:")
print(params)
if (error == None):
error = "Unnknown error"
raise Exception(error)
# Download raw data (just update a parameter)
if (os.sys.platform == "win32"):
if sshMode: paramToModify = 3
else: paramToModify = 1
else:
# Unix
paramToModify = 1
# Replace the name of the parameter
params[paramToModify] = params[paramToModify].replace(".nhdr", ".raw.gz")
# Dowload the raw data
(result, output, error) = self.executeDownloadCommand(params)
if (result == Util.ERROR):
print ("Error when executing download command. Params:")
print (params)
if (error == None):
error = "Unnknown error"
raise Exception(error)
# If everything goes well, check the the path of the Nrrd file to verify that the file have been correctly downlaoded
missingFiles = ""
if not os.path.isfile(localFile):
missingFiles = missingFiles + localFile + ";"
if not os.path.isfile(localFile.replace(".nhdr", ".raw.gz")):
missingFiles = missingFiles + localFile.replace(".nhdr", ".raw.gz") + ";"
if missingFiles:
raise Exception("The download command did not return any error message, but the following files have not been downloaded: " + missingFiles)
except Exception as ex:
# There was en error in the preferred method. If we are in a Unix system, we will try the backup method
if os.sys.platform != "win32":
print(("There was an error when downloading some of the files: " + error))
print("Trying alternative method...")
self.executeDowloadCommand_Backup(fullStrCommand)
# If everything goes well, check the the path of the Nrrd file to verify that the file have been correctly downlaoded
missingFiles = ""
if not os.path.isfile(localFile): missingFiles = missingFiles + localFile + ";"
if not os.path.isfile(localFile.replace(".nhdr", ".raw.gz")): missingFiles = missingFiles + localFile.replace(".nhdr", ".raw.gz") + ";"
if missingFiles:
raise Exception("After a second attempt, the following files have not been downloaded: " + missingFiles)
print("Apparently it worked!")
else:
raise ex
else:
print("File {0} already cached".format(localFile))
# Return path to the Nrrd header file
return localFile
def executeDownloadCommand(self, params):
"""Execute a command to download fisically the file. It will be different depending on the current platform.
In Unix, we will use the "scp" command.
In Windows, we will use WinSCP tool (attached to the module in "Resources" folder)
It returns a tuple: OK/ERROR, StandardOutput, ErrorMessage"""
if SlicerUtil.IsDevelopment:
print ("Attempt to download with these params:")
print (params)
try:
out = err = None
if (os.sys.platform == "win32"):
# Hide console window
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
proc = subprocess.Popen(params, stdout=subprocess.PIPE, stderr=subprocess.PIPE, startupinfo=startupinfo)
print ("Launch process")
# Launch the process
(out, err) = proc.communicate()
print("End of process")
else:
# Preferred method.
proc = subprocess.Popen(params, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Launch the process
(out, err) = proc.communicate()
if SlicerUtil.IsDevelopment:
print("Out: " + out)
print("Err:" + err)
if err:
print("Error returned by system process: " + err)
except Exception as ex:
print("FATAL ERROR IN COPY PROCESS:")
print(ex)
# Fatal error
return (Util.ERROR, out, err)
# In Unix sometimes if there is some error, stderr will contain some value
if err:
return (Util.ERROR, out, err) # ERROR!
## Everything ok
return (Util.OK, out, err)
def executeDowloadCommand_Backup(self, command):
"""Backup function that will be used when the preferred method fails"""
subprocess.check_call(command, shell=True)
subprocess.check_call(command.replace(".nhdr", ".raw.gz"), shell=True)
| bsd-3-clause | -6,395,616,928,657,452,000 | 47.471601 | 331 | 0.586642 | false |
mvtuong/mysite | v1/blog/models.py | 1 | 2011 | from django.db import models
from django.db.models import permalink
# Create your models here.
class Blog(models.Model):
title = models.CharField(max_length=200)
slug = models.SlugField(unique=True)
content = models.TextField(default='', blank=True)
description = models.TextField(default='', blank=True)
date = models.DateField(db_index=True, auto_now_add=True)
topic = models.ForeignKey('blog.Topic')
tag = models.ManyToManyField('blog.Tag', blank=True)
images = models.ManyToManyField('blog.BlogImage', blank=True)
files = models.ManyToManyField('blog.BlogFile', blank=True)
hiden = models.BooleanField(default=False)
featured = models.BooleanField(default=False)
def __str__(seft):
return seft.title
@permalink
def get_absolute_url(self):
if (self.topic.name == "Project"):
return ('view_project_post', None, { 'slug': self.slug })
else:
return ('view_blog_post', None, { 'slug': self.slug })
class BlogImage(models.Model):
image = models.ImageField(upload_to="static/user_upload/images/")
def __str__(seft):
return seft.image.url
class BlogFile(models.Model):
file = models.FileField(upload_to="static/user_upload/files/")
def __str__(seft):
return seft.file.url
class Topic(models.Model):
name = models.CharField(max_length=100, db_index=True)
slug = models.SlugField(db_index=True)
description = models.TextField(max_length=500, blank=True)
def __str__(seft):
return seft.name
@permalink
def get_absolute_url(self):
return ('view_blog_topic', None, { 'slug': self.slug })
class Tag(models.Model):
name = models.CharField(max_length=100, db_index=True)
slug = models.SlugField(db_index=True)
description = models.TextField(max_length=500, blank=True)
def __str__(seft):
return seft.name
@permalink
def get_absolute_url(self):
return ('view_blog_tag', None, { 'slug': self.slug }) | apache-2.0 | 2,280,699,659,320,023,600 | 31.451613 | 69 | 0.661363 | false |
MarilyGunnersLab/MCCE | mcce_stable/bin/pdbdict2tpl.py | 1 | 1624 | #!/usr/bin/python2
import sys
if __name__ == '__main__':
if len(sys.argv) < 2:
print 'Usage:'
print ' ', sys.argv[0], 'hicup_pdbdict_file'
sys.exit(0)
fin = open(sys.argv[1]).readlines()
for i_line in range(0, len(fin)):
if (fin[i_line][:7] == 'RESIDUE'):
resname = fin[i_line][10:13]
print '#ONNECT conf atom orbital ires conn ires conn ires conn ires conn ires conn'
print '#ONNECT |-----|----|---------|----|----|----|----|----|----|----|----|----|----|'
for i_line in range(0, len(fin)):
#01234567890123456789012345678901234567890
#RESIDUE RET 49
#CONECT C2 4 C1 C3 1H2 2H2
#ONNECT conf atom orbital ires conn ires conn ires conn ires conn
#ONNECT |-----|----|---------|----|----|----|----|----|----|----|----|----|----|
#CONNECT RSB+1 C19 sp3 0 C9 0 1H19 0 2H19 0 3H19
if (fin[i_line][:6] == 'CONECT'):
atomname = fin[i_line][11:15]
n_connect = int(fin[i_line][18:20])
if (n_connect == 1): orbital = ' s'
elif (n_connect == 2): orbital = 'sp3'
elif (n_connect == 3): orbital = 'sp2'
elif (n_connect == 4): orbital = 'sp3'
else: orbital = 'UNK'
connect = []
for i_connect in range(0, n_connect):
connect.append(fin[i_line][20+i_connect*5:25+i_connect*5])
print 'CONNECT ','%s01' %resname,atomname,' ',orbital, ' 0 %s' * n_connect %tuple(connect)
| mit | -6,502,614,572,368,778,000 | 38.609756 | 108 | 0.454433 | false |
Balandat/cont_no_regret | old_code/testing.py | 1 | 3136 | '''
Created on Feb 24, 2015
@author: balandat
'''
import numpy as np
from scipy.integrate import quad
import matplotlib.pyplot as plt
from ContNoRegret.Domains import S
from ContNoRegret.Distributions import Uniform
from ContNoRegret.utils import create_random_Sigmas
from ContNoRegret.LossFunctions import GaussianLossFunction
from scipy.stats import expon
from scipy.interpolate import SmoothBivariateSpline, LSQBivariateSpline
# def compute_constants(gamma):
# c = (gamma-1)**(-1)
# a2 = gamma*(1+gamma)/2
# a1 = gamma - 2*c*a2
# a0 = 1 - c*a1 - c**2*a2
# return c, np.array([a0, a1, a2])
#
# def phi(u, gamma):
# c,a = compute_constants(gamma)
# return ( (u<c)*(gamma/(gamma-1)-np.minimum(u,c))**(-gamma) +
# (u>=c)*(a[0]+a[1]*np.maximum(u,c)+a[2]*np.maximum(u,c)**2) )
#
# def phi_prime(u, gamma):
# c,a = compute_constants(gamma)
# return (u<c)*gamma*(gamma/(gamma-1)-np.minimum(u,c))**(-(1+gamma)) + (u>=c)*(a[1]+2*a[2]*np.maximum(u,c))
#
# def phi_double_prime(u, gamma):
# c,a = compute_constants(gamma)
# return (u<c)*gamma*(1+gamma)*(gamma/(gamma-1)-np.minimum(u,c))**(-(2+gamma)) + (u>=c)*2*a[2]
#
# def phi_inv(u, gamma):
# c,a = compute_constants(gamma)
# b = phi(c, gamma)
# return ( (u<b)*(gamma/(gamma-1)-np.minimum(u,b)**(-1/gamma)) +
# (u>=b)*(-a[1]/2/a[2]+np.sqrt(a[1]**2/4/a[2]**2 - (a[0]-np.maximum(u,b))/a[2])) )
#
# def phi_inv_prime(u, gamma):
# return 1/phi_prime(phi_inv(u, gamma))
#
#
# # Plot some functions
# gammas = [1.25, 1.5, 1.75, 2, 3]
# u = np.linspace(-1.5,5,10000)
# v = np.linspace(0.001,10,10000)
# f,axs = plt.subplots(3,1)
# axs[0].plot(u, np.exp(u-1))
# axs[1].plot(u, np.exp(u-1))
# axs[2].plot(u, np.exp(u-1))
# for gamma in gammas:
# axs[0].plot(u, phi(u,gamma))
# axs[1].plot(u, phi_prime(u,gamma))
# axs[2].plot(u, phi_double_prime(u,gamma))
# plt.show()
# for gamma in gammas:
# # gamma = 1.5
# ctilde = gamma/(gamma-1)
# a2 = 0.5*gamma*(1+gamma)/((ctilde-1)**(2+gamma))
# a1 = gamma/((ctilde-1)**(1+gamma)) - 2*a2
# a0 = 1/((ctilde-1)**gamma) - a1 - a2
#
# def phi(u):
# return (u<1)*(ctilde-np.minimum(u,1))**(-gamma) + (u>=1)*(a0+a1*np.maximum(u,1)+a2*np.maximum(u,1)**2)
#
# def phiprime(u):
# return (u<1)*gamma*(ctilde-np.minimum(u,1))**(-(1+gamma)) + (u>=1)*(a1+2*a2*np.maximum(u,1))
#
# def phiinv(u):
# return (u<1)*(ctilde-np.minimum(u,1)**(-1/gamma)) + (u>=1)*(-a1/2/a2+np.sqrt(a1**2/4/a2**2 - (a0-np.maximum(u,1))/a2))
#
# def phiinvprime(u):
# return 1/phiprime(phiinv(u))
# # return (u<1)/gamma*u**(-1+1/gamma) + (u>=1)*(a1**2-4*a2*(a0-np.maximum(u,1)))**(-1/2)
#
#
# # fig2, (ax2, ax3) = plt.subplots(2, 1)
# # fig3, ax4 = plt.subplots(1)
#
# ax1.plot(u, phi(u))#, u, np.exp(u-1))
# # v = np.linspace(0.001, 5, 10000)
# # ax2.plot(v, phiinv(v), v, 1+np.log(v))
# # ax3.plot(v, phiinvprime(v), v, 1/v)
# # ax4.plot(v, phiinvprime(v)-1/(3*v))
# # print(np.min(phiinvprime(v)-1/(3+v))
# plt.show()
| mit | -3,819,883,864,521,728,500 | 31.677083 | 132 | 0.552296 | false |
maxprais/psoriassist | psoriassist/models.py | 1 | 4403 | from __future__ import unicode_literals
from django.db import models
from django.contrib.auth.models import User
from datetime import datetime
class AppUser(models.Model):
user = models.OneToOneField(User)
age = models.IntegerField(null=True)
birthday = models.DateField(null=True)
profile_picture = models.ImageField(null=True)
last_doctor_appointment = models.DateField(null=True)
bio = models.CharField(max_length=2000, null=True)
isMentor = models.BooleanField()
mentoree = models.ManyToManyField('AppUser')
doctor = models.ManyToManyField('Doctor')
def __str__(self):
return self.user.username
class Message(models.Model):
user = models.ForeignKey(AppUser, related_name='initialiseConvo')
other_user = models.ForeignKey(AppUser, related_name='answerConvo')
content = models.TextField()
message_date = models.DateTimeField()
delivered = models.BooleanField(default=False)
def __str__(self):
return self.message_date
class PASIScore(models.Model):
user = models.ForeignKey(AppUser)
score = models.CharField(max_length=100)
def __str__(self):
return self.user.user.username
class LesionSection(models.Model):
section_name = models.CharField(max_length=100)
PASI = models.ForeignKey(PASIScore)
def __str__(self):
return self.section_name
class Lesion(models.Model):
user = models.ForeignKey(AppUser)
name = models.CharField(null=True, max_length=500)
image = models.CharField(max_length=2000, blank=True)
lesion_location = models.ForeignKey(LesionSection, null=True)
date_taken = models.DateTimeField(null=True)
thickness = models.IntegerField(null=True)
redness = models.IntegerField(null=True)
scale = models.IntegerField(null=True)
def __str__(self):
return "%s- %s %s" % (self.user.user.username, self.name, self.date_taken)
class MentalState(models.Model):
user = models.ForeignKey(AppUser)
stress = models.IntegerField()
anxiety = models.IntegerField()
mood = models.IntegerField()
date_taken = models.DateTimeField()
def __str__(self):
return "%s- %s" % (self.user.user.username, self.date_taken)
class Medication(models.Model):
user = models.ForeignKey(AppUser)
name = models.CharField(max_length=800)
prescribed_by = models.ForeignKey('Doctor')
date_prescribed = models.DateField()
expiration_date = models.DateField()
dosage = models.CharField(max_length=2000)
other_info = models.TextField(max_length=2000)
isCurrent = models.BooleanField()
def __str__(self):
return "%s- %s" % (self.user.user.username, self.name)
class Rating(models.Model):
user = models.ManyToManyField(AppUser)
medication = models.ForeignKey(Medication)
effectiveness = models.SmallIntegerField(default=0)
quality_of_life = models.SmallIntegerField(default=0)
adherence = models.SmallIntegerField(default=0)
def __str__(self):
return "%s" % self.medication.name
class Doctor(models.Model):
name = models.CharField(max_length=100)
work_address = models.CharField(max_length=500)
profile_picture = models.ImageField(null=True)
distance_from_user = models.CharField(max_length=300)
def __str__(self):
return self.name
class Appointment(models.Model):
user = models.ManyToManyField(AppUser)
doctor = models.ManyToManyField(Doctor)
date = models.DateTimeField()
location = models.CharField( max_length=800)
type_of_appointment = models.CharField(max_length=100)
reason_for_appointment = models.TextField(max_length=2000)
duration = models.TimeField()
def __str__(self):
return "%s %s %s" % (self.user.user.username, self.doctor.name, self.date)
class ComputerConversation(models.Model):
user = models.ForeignKey(AppUser)
date_sent = models.DateTimeField(auto_now_add=True)
index = models.IntegerField(default=0)
def __str__(self):
return self.user.user.username
#
# def message_time(self):
# date_sent = datetime.now()
# return date_sent
# computer_message = models.TextField(max_length=2000, null=True)
# user_message = models.CharField(max_length=2000, null=True)
| mit | 8,355,907,875,308,252,000 | 30.858209 | 82 | 0.67454 | false |
ianstalk/Flexget | flexget/plugins/clients/rtorrent.py | 1 | 25708 | import os
import re
import socket
from io import BytesIO
from time import sleep
from urllib.parse import urljoin, urlparse, urlsplit
from xmlrpc import client as xmlrpc_client
from loguru import logger
from requests.auth import HTTPBasicAuth, HTTPDigestAuth
from flexget import plugin
from flexget.config_schema import one_or_more
from flexget.entry import Entry
from flexget.event import event
from flexget.utils.bittorrent import Torrent, is_torrent_file
from flexget.utils.pathscrub import pathscrub
from flexget.utils.template import RenderError
logger = logger.bind(name='rtorrent')
class _Method:
# some magic to bind an XML-RPC method to an RPC server.
# supports "nested" methods (e.g. examples.getStateName)
def __init__(self, send, name):
self.__send = send
self.__name = name
def __getattr__(self, name):
return _Method(self.__send, "%s.%s" % (self.__name, name))
def __call__(self, *args):
return self.__send(self.__name, args)
class HTTPDigestTransport(xmlrpc_client.Transport):
"""
Transport that uses requests to support Digest authentication.
"""
def __init__(self, scheme, digest_auth, username, password, session, *args, **kwargs):
self.__scheme = scheme
self.__session = session
self.__digest_auth = digest_auth
self.__username = username
self.__password = password
self.verbose = 0
xmlrpc_client.Transport.__init__(self, *args, **kwargs) # old style class
def request(self, host, handler, request_body, verbose=False):
return self.single_request(host, handler, request_body, verbose)
def single_request(self, host, handler, request_body, verbose=0):
url = urljoin('{0}://{1}'.format(self.__scheme, host), handler)
auth = self.get_auth()
response = self.send_request(url, auth, request_body)
# if status code is 401, it means we used the wrong auth method
if response.status_code == 401:
logger.warning(
'{} auth failed. Retrying with {}. Please change your config.',
'Digest' if self.__digest_auth else 'Basic',
'Basic' if self.__digest_auth else 'Digest',
)
self.__digest_auth = not self.__digest_auth
auth = self.get_auth()
response = self.send_request(url, auth, request_body)
response.raise_for_status()
return self.parse_response(response)
def get_auth(self):
if self.__digest_auth:
return HTTPDigestAuth(self.__username, self.__password)
return HTTPBasicAuth(self.__username, self.__password)
def send_request(self, url, auth, data):
return self.__session.post(url, auth=auth, data=data, raise_status=False)
def parse_response(self, response):
p, u = self.getparser()
if self.verbose:
logger.info('body: {!r}', response)
p.feed(response.content)
p.close()
return u.close()
def encode_netstring(input):
return str(len(input)).encode() + b':' + input + b','
def encode_header(key, value):
return key + b'\x00' + value + b'\x00'
class SCGITransport(xmlrpc_client.Transport):
"""
Public domain SCGITrannsport implementation from:
https://github.com/JohnDoee/autotorrent/blob/develop/autotorrent/scgitransport.py
"""
def __init__(self, *args, **kwargs):
self.socket_path = kwargs.pop('socket_path', '')
xmlrpc_client.Transport.__init__(self, *args, **kwargs)
def single_request(self, host, handler, request_body, verbose=False):
self.verbose = verbose
if self.socket_path:
s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
s.connect(self.socket_path)
else:
host, port = host.split(':')
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((host, int(port)))
request = encode_header(b'CONTENT_LENGTH', str(len(request_body)).encode())
request += encode_header(b'SCGI', b'1')
request += encode_header(b'REQUEST_METHOD', b'POST')
request += encode_header(b'REQUEST_URI', handler.encode())
request = encode_netstring(request)
request += request_body
s.send(request)
response = b''
while True:
r = s.recv(1024)
if not r:
break
response += r
response_body = BytesIO(b'\r\n\r\n'.join(response.split(b'\r\n\r\n')[1:]))
return self.parse_response(response_body)
if not hasattr(xmlrpc_client.Transport, 'single_request'):
SCGITransport.request = SCGITransport.single_request
def create_proxy(url):
parsed = urlsplit(url)
if not parsed.scheme:
path = parsed.path
return xmlrpc_client.ServerProxy('http://1', transport=SCGITransport(socket_path=path))
if parsed.scheme == 'scgi':
url = 'http://%s' % parsed.netloc
return xmlrpc_client.ServerProxy(url, transport=SCGITransport())
logger.debug('Creating Normal XMLRPC Proxy with url {!r}', url)
return xmlrpc_client.ServerProxy(url)
class RTorrent:
""" rTorrent API client """
default_fields = (
'hash',
'name',
'up_total',
'down_total',
'down_rate',
'is_open',
'is_active',
'custom1',
'custom2',
'custom3',
'custom4',
'custom5',
'state',
'complete',
'bytes_done',
'down.rate',
'left_bytes',
'ratio',
'base_path',
'load_date',
)
required_fields = ('hash', 'name', 'base_path')
def __init__(self, uri, username=None, password=None, digest_auth=None, session=None):
"""
New connection to rTorrent
:param uri: RTorrent URL. Supports both http(s) and scgi
:param username: Username for basic auth over http(s)
:param password: Password for basic auth over http(s)
"""
self.uri = uri
self.username = username
self.password = password
self.digest_auth = digest_auth
self._version = None
parsed_uri = urlparse(uri)
if self.username and self.password and parsed_uri.scheme not in ['http', 'https']:
raise OSError('Username and password only supported on http(s)')
# Determine the proxy server
if parsed_uri.scheme in ['http', 'https']:
sp = xmlrpc_client.ServerProxy
elif parsed_uri.scheme == 'scgi':
sp = create_proxy
elif parsed_uri.scheme == '' and parsed_uri.path:
self.uri = parsed_uri.path
sp = create_proxy
else:
raise OSError('Unsupported scheme %s for uri %s' % (parsed_uri.scheme, self.uri))
# Use a special transport if http(s)
if parsed_uri.scheme in ['http', 'https']:
self._server = sp(
self.uri,
transport=HTTPDigestTransport(
parsed_uri.scheme, self.digest_auth, self.username, self.password, session
),
)
else:
self._server = sp(self.uri)
def _clean_fields(self, fields, reverse=False):
if not fields:
fields = list(self.default_fields)
if reverse:
for field in ['up.total', 'down.total', 'down.rate']:
if field in fields:
fields[fields.index(field)] = field.replace('.', '_')
return fields
for required_field in self.required_fields:
if required_field not in fields:
fields.insert(0, required_field)
for field in ['up_total', 'down_total', 'down_rate']:
if field in fields:
fields[fields.index(field)] = field.replace('_', '.')
return fields
def load(self, raw_torrent, fields=None, start=False, mkdir=True):
if fields is None:
fields = {}
# First param is empty 'target'
params = ['', xmlrpc_client.Binary(raw_torrent)]
# Additional fields to set
for key, val in fields.items():
# Values must be escaped if within params
# TODO: What are the escaping requirements? re.escape works differently on python 3.7+
params.append('d.%s.set=%s' % (key, re.escape(str(val))))
if mkdir and 'directory' in fields:
result = self._server.execute.throw('', 'mkdir', '-p', fields['directory'])
if result != 0:
raise xmlrpc_client.Error('Failed creating directory %s' % fields['directory'])
# by default rtorrent won't allow calls over 512kb in size.
xmlrpc_size = (
len(xmlrpc_client.dumps(tuple(params), 'raw_start')) + 71680
) # Add 70kb for buffer
if xmlrpc_size > 524288:
prev_size = self._server.network.xmlrpc.size_limit()
self._server.network.xmlrpc.size_limit.set('', xmlrpc_size)
# Call load method and return the response
if start:
result = self._server.load.raw_start(*params)
else:
result = self._server.load.raw(*params)
if xmlrpc_size > 524288:
self._server.network.xmlrpc.size_limit.set('', prev_size)
return result
def get_directory(self):
return self._server.get_directory()
def torrent(self, info_hash, fields=None):
""" Get the details of a torrent """
if not fields:
fields = list(self.default_fields)
fields = self._clean_fields(fields)
multi_call = xmlrpc_client.MultiCall(self._server)
for field in fields:
method_name = 'd.%s' % field
getattr(multi_call, method_name)(info_hash)
resp = multi_call()
# TODO: Maybe we should return a named tuple or a Torrent class?
return dict(list(zip(self._clean_fields(fields, reverse=True), [val for val in resp])))
def torrents(self, view='main', fields=None):
if not fields:
fields = list(self.default_fields)
fields = self._clean_fields(fields)
params = ['d.%s=' % field for field in fields]
params.insert(0, view)
resp = self._server.d.multicall2('', params)
# Response is formatted as a list of lists, with just the values
return [dict(list(zip(self._clean_fields(fields, reverse=True), val))) for val in resp]
def update(self, info_hash, fields):
multi_call = xmlrpc_client.MultiCall(self._server)
for key, val in fields.items():
method_name = 'd.%s.set' % key
getattr(multi_call, method_name)(info_hash, val)
return multi_call()[0]
def delete(self, info_hash):
return self._server.d.erase(info_hash)
def stop(self, info_hash):
self._server.d.stop(info_hash)
return self._server.d.close(info_hash)
def start(self, info_hash):
return self._server.d.start(info_hash)
def move(self, info_hash, dst_path):
self.stop(info_hash)
torrent = self.torrent(info_hash, fields=['base_path'])
try:
logger.verbose('Creating destination directory `{}`', dst_path)
self._server.execute.throw('', 'mkdir', '-p', dst_path)
except xmlrpc_client.Error:
raise xmlrpc_client.Error("unable to create folder %s" % dst_path)
self._server.execute.throw('', 'mv', '-u', torrent['base_path'], dst_path)
self._server.d.set_directory(info_hash, dst_path)
self.start(info_hash)
class RTorrentPluginBase:
priority_map = {'high': 3, 'medium': 2, 'low': 1, 'off': 0}
def _build_options(self, config, entry, entry_first=True):
options = {}
for opt_key in (
'path',
'message',
'priority',
'custom1',
'custom2',
'custom3',
'custom4',
'custom5',
):
# Values do not merge config with task
# Task takes priority then config is used
entry_value = entry.get(opt_key)
config_value = config.get(opt_key)
if entry_first:
if entry_value:
options[opt_key] = entry.render(entry_value)
elif config_value:
options[opt_key] = entry.render(config_value)
else:
if config_value:
options[opt_key] = entry.render(config_value)
elif entry_value:
options[opt_key] = entry.render(entry_value)
# Convert priority from string to int
priority = options.get('priority')
if priority and priority in self.priority_map:
options['priority'] = self.priority_map[priority]
# Map Flexget path to directory in rTorrent
if options.get('path'):
options['directory'] = options['path']
del options['path']
if 'directory' in options:
options['directory'] = pathscrub(options['directory'])
return options
class RTorrentOutputPlugin(RTorrentPluginBase):
schema = {
'type': 'object',
'properties': {
# connection info
'uri': {'type': 'string'},
'username': {'type': 'string'},
'password': {'type': 'string'},
'digest_auth': {'type': 'boolean', 'default': False},
'start': {'type': 'boolean', 'default': True},
'mkdir': {'type': 'boolean', 'default': True},
'action': {'type': 'string', 'emun': ['update', 'delete', 'add'], 'default': 'add'},
# properties to set on rtorrent download object
'message': {'type': 'string'},
'priority': {'type': 'string'},
'path': {'type': 'string'},
'custom1': {'type': 'string'},
'custom2': {'type': 'string'},
'custom3': {'type': 'string'},
'custom4': {'type': 'string'},
'custom5': {'type': 'string'},
'fast_resume': {'type': 'boolean', 'default': False},
},
'required': ['uri'],
'additionalProperties': False,
}
def _verify_load(self, client, info_hash):
ex = xmlrpc_client.Error()
for _ in range(0, 5):
try:
return client.torrent(info_hash, fields=['hash'])
except xmlrpc_client.Error as e:
ex = e
sleep(0.5)
raise ex
@plugin.priority(120)
def on_task_download(self, task, config):
# If the download plugin is not enabled, we need to call it to get
# our temp .torrent files
if config['action'] == 'add' and 'download' not in task.config:
download = plugin.get('download', self)
download.get_temp_files(task, handle_magnets=True, fail_html=True)
@plugin.priority(135)
def on_task_output(self, task, config):
client = RTorrent(
os.path.expanduser(config['uri']),
username=config.get('username'),
password=config.get('password'),
digest_auth=config['digest_auth'],
session=task.requests,
)
try:
for entry in task.accepted:
if config['action'] == 'add':
if task.options.test:
logger.info('Would add {} to rTorrent', entry['url'])
continue
try:
options = self._build_options(config, entry)
except RenderError as e:
entry.fail("failed to render properties %s" % str(e))
continue
# fast_resume is not really an rtorrent option so it's not in _build_options
fast_resume = entry.get('fast_resume', config['fast_resume'])
self.add_entry(
client,
entry,
options,
start=config['start'],
mkdir=config['mkdir'],
fast_resume=fast_resume,
)
info_hash = entry.get('torrent_info_hash')
if not info_hash:
entry.fail('Failed to %s as no info_hash found' % config['action'])
continue
if config['action'] == 'delete':
if task.options.test:
logger.info(
'Would delete {} ({}) from rTorrent',
entry['title'],
entry['torrent_info_hash'],
)
continue
self.delete_entry(client, entry)
if config['action'] == 'update':
if task.options.test:
logger.info(
'Would update {} ({}) in rTorrent',
entry['title'],
entry['torrent_info_hash'],
)
continue
self.update_entry(client, entry, config)
except OSError as e:
raise plugin.PluginError("Couldn't connect to rTorrent: %s" % str(e))
def delete_entry(self, client, entry):
try:
client.delete(entry['torrent_info_hash'])
logger.verbose(
'Deleted {} ({}) in rtorrent ', entry['title'], entry['torrent_info_hash']
)
except xmlrpc_client.Error as e:
entry.fail('Failed to delete: %s' % str(e))
return
def update_entry(self, client, entry, config):
info_hash = entry['torrent_info_hash']
# First check if it already exists
try:
existing = client.torrent(info_hash, fields=['base_path'])
except xmlrpc_client.Error:
existing = False
# Build options but make config values override entry values
try:
options = self._build_options(config, entry, entry_first=False)
except RenderError as e:
entry.fail("failed to render properties %s" % str(e))
return
if existing and 'directory' in options:
# Check if changing to another directory which requires a move
if options['directory'] != existing['base_path'] and options[
'directory'
] != os.path.dirname(existing['base_path']):
try:
logger.verbose(
"Path is changing, moving files from '{}' to '{}'",
existing['base_path'],
options['directory'],
)
client.move(info_hash, options['directory'])
except xmlrpc_client.Error as e:
entry.fail('Failed moving torrent: %s' % str(e))
return
# Remove directory from update otherwise rTorrent will append the title to the directory path
if 'directory' in options:
del options['directory']
try:
client.update(info_hash, options)
logger.verbose('Updated {} ({}) in rtorrent ', entry['title'], info_hash)
except xmlrpc_client.Error as e:
entry.fail('Failed to update: %s' % str(e))
return
def add_entry(self, client, entry, options, start=True, mkdir=False, fast_resume=False):
if 'torrent_info_hash' not in entry:
entry.fail('missing torrent_info_hash')
return
if entry['url'].startswith('magnet:'):
torrent_raw = 'd10:magnet-uri%d:%se' % (len(entry['url']), entry['url'])
torrent_raw = torrent_raw.encode('ascii')
else:
# Check that file is downloaded
if 'file' not in entry:
raise plugin.PluginError('Temporary download file is missing from entry')
# Verify the temp file exists
if not os.path.exists(entry['file']):
raise plugin.PluginError('Temporary download file is missing from disk')
# Verify valid torrent file
if not is_torrent_file(entry['file']):
entry.fail("Downloaded temp file '%s' is not a torrent file" % entry['file'])
return
# Modify the torrent with resume data if needed
if fast_resume:
base = options.get('directory')
if not base:
base = client.get_directory()
piece_size = entry['torrent'].piece_size
chunks = int((entry['torrent'].size + piece_size - 1) / piece_size)
files = []
for f in entry['torrent'].get_filelist():
relative_file_path = os.path.join(f['path'], f['name'])
if entry['torrent'].is_multi_file:
relative_file_path = os.path.join(
entry['torrent'].name, relative_file_path
)
file_path = os.path.join(base, relative_file_path)
# TODO should it simply add the torrent anyway?
if not os.path.exists(file_path) and not os.path.isfile(file_path):
entry.fail('%s does not exist. Cannot add fast resume data.' % file_path)
return
# cannot bencode floats, so we need to coerce to int
mtime = int(os.path.getmtime(file_path))
# priority 0 should be "don't download"
files.append({'priority': 0, 'mtime': mtime})
entry['torrent'].set_libtorrent_resume(chunks, files)
# Since we modified the torrent, we need to write it to entry['file'] again
with open(entry['file'], 'wb+') as f:
f.write(entry['torrent'].encode())
try:
with open(entry['file'], 'rb') as f:
torrent_raw = f.read()
except OSError as e:
entry.fail('Failed to add to rTorrent %s' % str(e))
return
try:
Torrent(torrent_raw)
except SyntaxError as e:
entry.fail('Strange, unable to decode torrent, raise a BUG: %s' % str(e))
return
# First check if it already exists
try:
if client.torrent(entry['torrent_info_hash']):
logger.warning("Torrent {} already exists, won't add", entry['title'])
return
except xmlrpc_client.Error:
# No existing found
pass
try:
resp = client.load(torrent_raw, fields=options, start=start, mkdir=mkdir)
if resp != 0:
entry.fail('Failed to add to rTorrent invalid return value %s' % resp)
except xmlrpc_client.Error as e:
logger.exception(e)
entry.fail('Failed to add to rTorrent %s' % str(e))
return
# Verify the torrent loaded
try:
self._verify_load(client, entry['torrent_info_hash'])
logger.info('{} added to rtorrent', entry['title'])
except xmlrpc_client.Error as e:
logger.warning('Failed to verify torrent {} loaded: {}', entry['title'], str(e))
def on_task_learn(self, task, config):
""" Make sure all temp files are cleaned up when entries are learned """
# If download plugin is enabled, it will handle cleanup.
if 'download' not in task.config:
download = plugin.get('download', self)
download.cleanup_temp_files(task)
on_task_abort = on_task_learn
class RTorrentInputPlugin(RTorrentPluginBase):
schema = {
'type': 'object',
'properties': {
'uri': {'type': 'string'},
'username': {'type': 'string'},
'password': {'type': 'string'},
'digest_auth': {'type': 'boolean', 'default': False},
'view': {'type': 'string', 'default': 'main'},
'fields': one_or_more({'type': 'string', 'enum': list(RTorrent.default_fields)}),
},
'required': ['uri'],
'additionalProperties': False,
}
def on_task_input(self, task, config):
client = RTorrent(
os.path.expanduser(config['uri']),
username=config.get('username'),
password=config.get('password'),
digest_auth=config['digest_auth'],
session=task.requests,
)
fields = config.get('fields')
try:
torrents = client.torrents(config['view'], fields=fields)
except (OSError, xmlrpc_client.Error) as e:
task.abort('Could not get torrents (%s): %s' % (config['view'], e))
return
entries = []
for torrent in torrents:
entry = Entry(
title=torrent['name'],
url='%s/%s' % (os.path.expanduser(config['uri']), torrent['hash']),
path=torrent['base_path'],
torrent_info_hash=torrent['hash'],
)
for attr, value in torrent.items():
entry[attr] = value
entries.append(entry)
return entries
@event('plugin.register')
def register_plugin():
plugin.register(RTorrentOutputPlugin, 'rtorrent', api_ver=2)
plugin.register(RTorrentInputPlugin, 'from_rtorrent', api_ver=2)
| mit | -316,874,547,476,433,500 | 34.705556 | 101 | 0.543255 | false |
ZhangXinNan/tensorflow | tensorflow/python/ops/math_ops_test.py | 1 | 18593 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.math_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.eager import context
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gradients
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import googletest
exp = np.exp
log = np.log
class ReduceTest(test_util.TensorFlowTestCase):
@test_util.run_in_graph_and_eager_modes
def testReduceAllDims(self):
x = np.array([[1, 2, 3], [4, 5, 6]], dtype=np.int32)
with test_util.device(use_gpu=True):
y_tf = self.evaluate(math_ops.reduce_sum(x))
self.assertEqual(y_tf, 21)
@test_util.run_in_graph_and_eager_modes
def testReduceExplicitAxes(self):
x = np.array([[1, 2, 3], [4, 5, 6]], dtype=np.int32)
with test_util.device(use_gpu=True):
for axis in (0, -2, (0, 0), (0, -2)):
self.assertAllEqual(self.evaluate(math_ops.reduce_sum(x, axis=axis)),
[5, 7, 9])
for axis in (1, -1, (1, 1), (1, -1)):
self.assertAllEqual(self.evaluate(math_ops.reduce_sum(x, axis=axis)),
[6, 15])
for axis in (None, (0, 1), (-1, -2), (-2, -1, 0, 1)):
self.assertEqual(self.evaluate(math_ops.reduce_sum(x, axis=axis)), 21)
@test_util.run_in_graph_and_eager_modes
def testReduceInvalidAxis(self):
if context.executing_eagerly():
# The shape check is in run a graph construction time. In eager mode,
# it misses the check, magically return result given wrong shape.
return
x = np.array([[1, 2, 3], [4, 5, 6]], dtype=np.int32)
axis = np.array([[0], [1]])
with self.assertRaisesRegexp(ValueError, "must be at most rank 1"):
math_ops.reduce_sum(x, axis)
class LogSumExpTest(test_util.TensorFlowTestCase):
def testReduceLogSumExp(self):
for dtype in [np.float16, np.float32, np.double]:
x_np = np.random.rand(5, 5).astype(dtype)
with self.test_session(use_gpu=True):
y_tf_np = math_ops.reduce_logsumexp(x_np).eval()
y_np = log(np.sum(exp(x_np)))
self.assertAllClose(y_tf_np, y_np)
def testReductionIndices(self):
for dtype in [np.float16, np.float32, np.double]:
x_np = np.random.rand(5, 5).astype(dtype)
with self.test_session(use_gpu=True):
y_tf = math_ops.reduce_logsumexp(x_np, reduction_indices=[0])
y_np = log(np.sum(exp(x_np), axis=0))
self.assertShapeEqual(y_np, y_tf)
y_tf_np = y_tf.eval()
self.assertAllClose(y_tf_np, y_np)
def testReductionIndices2(self):
for dtype in [np.float16, np.float32, np.double]:
x_np = np.random.rand(5, 5).astype(dtype)
with self.test_session(use_gpu=True):
y_tf = math_ops.reduce_logsumexp(x_np, reduction_indices=0)
y_np = log(np.sum(exp(x_np), axis=0))
self.assertShapeEqual(y_np, y_tf)
y_tf_np = y_tf.eval()
self.assertAllClose(y_tf_np, y_np)
def testKeepDims(self):
for dtype in [np.float16, np.float32, np.double]:
x_np = np.random.rand(5, 5).astype(dtype)
with self.test_session(use_gpu=True):
y_tf_np = math_ops.reduce_logsumexp(x_np, keepdims=True).eval()
self.assertEqual(y_tf_np.ndim, x_np.ndim)
y_np = log(np.sum(exp(x_np), keepdims=True))
self.assertAllClose(y_tf_np, y_np)
def testOverflow(self):
x = [1000, 1001, 1002, 1003]
for dtype in [np.float16, np.float32, np.double]:
x_np = np.array(x, dtype=dtype)
max_np = np.max(x_np)
with self.assertRaisesRegexp(RuntimeWarning,
"overflow encountered in exp"):
out = log(np.sum(exp(x_np)))
if out == np.inf:
raise RuntimeWarning("overflow encountered in exp")
with self.test_session(use_gpu=True):
x_tf = constant_op.constant(x_np, shape=x_np.shape)
y_tf_np = math_ops.reduce_logsumexp(x_tf).eval()
y_np = log(np.sum(exp(x_np - max_np))) + max_np
self.assertAllClose(y_tf_np, y_np)
def testUnderflow(self):
x = [-1000, -1001, -1002, -1003]
for dtype in [np.float16, np.float32, np.double]:
x_np = np.array(x, dtype=dtype)
max_np = np.max(x_np)
with self.assertRaisesRegexp(RuntimeWarning,
"divide by zero encountered in log"):
out = log(np.sum(exp(x_np)))
if out == -np.inf:
raise RuntimeWarning("divide by zero encountered in log")
with self.test_session(use_gpu=True):
x_tf = constant_op.constant(x_np, shape=x_np.shape)
y_tf_np = math_ops.reduce_logsumexp(x_tf).eval()
y_np = log(np.sum(exp(x_np - max_np))) + max_np
self.assertAllClose(y_tf_np, y_np)
def testInfinity(self):
with self.test_session(use_gpu=True):
res = math_ops.reduce_logsumexp(-np.inf).eval()
self.assertEqual(-np.inf, res)
class RoundTest(test_util.TensorFlowTestCase):
@test_util.run_in_graph_and_eager_modes
def testRounding(self):
x = np.arange(-5.0, 5.0, .25)
for dtype in [np.float32, np.double, np.int32]:
x_np = np.array(x, dtype=dtype)
with test_util.device(use_gpu=True):
x_tf = constant_op.constant(x_np, shape=x_np.shape)
y_tf = math_ops.round(x_tf)
y_tf_np = self.evaluate(y_tf)
y_np = np.round(x_np)
self.assertAllClose(y_tf_np, y_np, atol=1e-2)
class ModTest(test_util.TensorFlowTestCase):
def testFloat(self):
x = [0.5, 0.7, 0.3]
for dtype in [np.float32, np.double]:
# Test scalar and vector versions.
for denom in [x[0], [x[0]] * 3]:
x_np = np.array(x, dtype=dtype)
with self.test_session(use_gpu=True):
x_tf = constant_op.constant(x_np, shape=x_np.shape)
y_tf = math_ops.mod(x_tf, denom)
y_tf_np = y_tf.eval()
y_np = np.fmod(x_np, denom)
self.assertAllClose(y_tf_np, y_np, atol=1e-2)
def testFixed(self):
x = [5, 10, 23]
for dtype in [np.int32, np.int64]:
# Test scalar and vector versions.
for denom in [x[0], x]:
x_np = np.array(x, dtype=dtype)
with self.test_session(use_gpu=True):
x_tf = constant_op.constant(x_np, shape=x_np.shape)
y_tf = math_ops.mod(x_tf, denom)
y_tf_np = y_tf.eval()
y_np = np.mod(x_np, denom)
self.assertAllClose(y_tf_np, y_np)
class SquaredDifferenceTest(test_util.TensorFlowTestCase):
@test_util.run_in_graph_and_eager_modes
def testSquaredDifference(self):
for dtype in [np.int32, np.float16]:
x = np.array([[1, 2, 3], [4, 5, 6]], dtype=dtype)
y = np.array([-3, -2, -1], dtype=dtype)
z = (x - y) * (x - y)
with test_util.device(use_gpu=True):
z_tf = self.evaluate(math_ops.squared_difference(x, y))
self.assertAllClose(z, z_tf)
class ApproximateEqualTest(test_util.TensorFlowTestCase):
@test_util.run_in_graph_and_eager_modes
def testApproximateEqual(self):
for dtype in [np.float32, np.double]:
x = dtype(1)
y = dtype(1.00009)
z = False
with test_util.device(use_gpu=True):
# Default tolerance is 0.00001
z_tf = self.evaluate(math_ops.approximate_equal(x, y))
self.assertAllEqual(z, z_tf)
for dtype in [np.float32, np.double]:
x = dtype(1)
y = dtype(1.000009)
z = True
with test_util.device(use_gpu=True):
# Default tolerance is 0.00001
z_tf = self.evaluate(math_ops.approximate_equal(x, y))
self.assertAllEqual(z, z_tf)
for dtype in [np.float32, np.double]:
x = np.array([[[[-1, 2.00009999], [-3, 4.01]]]], dtype=dtype)
y = np.array([[[[-1.001, 2], [-3.00009, 4]]]], dtype=dtype)
z = np.array([[[[False, True], [True, False]]]], dtype=np.bool)
with test_util.device(use_gpu=True):
z_tf = self.evaluate(math_ops.approximate_equal(x, y, tolerance=0.0001))
self.assertAllEqual(z, z_tf)
def testApproximateEqualShape(self):
for dtype in [np.float32, np.double]:
x = np.array([1, 2], dtype=dtype)
y = np.array([[1, 2]], dtype=dtype)
# The inputs 'x' and 'y' must have the same shape.
with self.assertRaisesRegexp(
ValueError, "Shapes must be equal rank, but are 1 and 2"):
math_ops.approximate_equal(x, y)
class ScalarMulTest(test_util.TensorFlowTestCase):
@test_util.run_in_graph_and_eager_modes
def testAcceptsRefs(self):
if context.executing_eagerly():
var = resource_variable_ops.ResourceVariable(10, name="var")
else:
var = variables.Variable(10)
result = math_ops.scalar_mul(3, var)
init = variables.global_variables_initializer()
with test_util.device(use_gpu=True):
self.evaluate(init)
self.assertEqual(30, self.evaluate(result))
@test_util.run_in_graph_and_eager_modes
def testAcceptsConstant(self):
const = constant_op.constant(10)
result = math_ops.scalar_mul(3, const)
with test_util.device(use_gpu=True):
self.assertEqual(30, self.evaluate(result))
@test_util.run_in_graph_and_eager_modes
def testAcceptsTensor(self):
tensor = array_ops.ones([10, 10])
result = math_ops.scalar_mul(3, tensor)
expected = array_ops.ones([10, 10]) * 3
with test_util.device(use_gpu=True):
self.assertAllEqual(self.evaluate(expected), self.evaluate(result))
@test_util.run_in_graph_and_eager_modes
def testAcceptsIndexedSlices(self):
values = constant_op.constant([2, 3, 5, 7, 0, -1], shape=[3, 2])
indices = constant_op.constant([0, 2, 5])
x = math_ops.scalar_mul(-3, ops.IndexedSlices(values, indices))
with test_util.device(use_gpu=True):
self.assertAllEqual(self.evaluate(x.values),
[[-6, -9], [-15, -21], [0, 3]])
self.assertAllEqual(self.evaluate(x.indices), [0, 2, 5])
class AccumulateNTest(test_util.TensorFlowTestCase):
def testFloat(self):
np.random.seed(12345)
x = [np.random.random((1, 2, 3, 4, 5)) - 0.5 for _ in range(5)]
tf_x = ops.convert_n_to_tensor(x)
with self.test_session(use_gpu=True):
self.assertAllClose(sum(x), math_ops.accumulate_n(tf_x).eval())
self.assertAllClose(x[0] * 5, math_ops.accumulate_n([tf_x[0]] * 5).eval())
def testInt(self):
np.random.seed(54321)
x = [np.random.randint(-128, 128, (5, 4, 3, 2, 1)) for _ in range(6)]
tf_x = ops.convert_n_to_tensor(x)
with self.test_session(use_gpu=True):
self.assertAllEqual(sum(x), math_ops.accumulate_n(tf_x).eval())
self.assertAllEqual(x[0] * 6, math_ops.accumulate_n([tf_x[0]] * 6).eval())
class AddNTest(test_util.TensorFlowTestCase):
def testPartials(self):
"""Test that previously revealed a bug in buffer forwarding for AddN."""
partials = []
for _ in range(98):
partials.append(math_ops.add_n([constant_op.constant(1)]))
partials.append(
math_ops.add_n([constant_op.constant(1),
constant_op.constant(1)]))
res = math_ops.add_n(partials) + constant_op.constant(0)
with self.test_session(use_gpu=True):
self.assertAllEqual(res.eval(), 100)
def testFloat(self):
np.random.seed(12345)
for num_inputs in range(1, 10):
x = [np.random.random((1, 2, 3, 4, 5)) - 0.5 for _ in range(num_inputs)]
tf_x = ops.convert_n_to_tensor(x)
with self.test_session(use_gpu=True):
self.assertAllClose(sum(x), math_ops.add_n(tf_x).eval())
self.assertAllClose(x[0] * num_inputs,
math_ops.add_n([tf_x[0]] * num_inputs).eval())
def testInt(self):
np.random.seed(54321)
for num_inputs in range(1, 10):
x = [
np.random.randint(-128, 128, (5, 4, 3, 2, 1))
for _ in range(num_inputs)
]
tf_x = ops.convert_n_to_tensor(x)
with self.test_session(use_gpu=True):
self.assertAllEqual(sum(x), math_ops.add_n(tf_x).eval())
self.assertAllEqual(x[0] * num_inputs,
math_ops.add_n([tf_x[0]] * num_inputs).eval())
def testGrad(self):
np.random.seed(42)
for num_inputs in range(1, 10):
with self.test_session(use_gpu=True) as sess:
input_vars = [
variables.Variable(10.0 * np.random.random())
for i in range(0, num_inputs)
]
addn = math_ops.add_n(input_vars)
sess.run(variables.global_variables_initializer())
add_n_grad = gradients.gradients(addn, input_vars)
self.assertAllEqual(np.repeat(1.0, num_inputs), # d/dx (x + y + ...) = 1
[g.eval() for g in add_n_grad])
class DivAndModTest(test_util.TensorFlowTestCase):
# TODO(aselle): Test more types before exposing new division operators.
def intTestData(self):
nums = np.arange(-10, 10, 1).reshape(20, 1)
divs = np.arange(-3, 4, 2).reshape(1, 4)
return nums, divs
def floatTestData(self):
nums = np.arange(-10, 10, .25).reshape(80, 1)
divs = np.arange(-3, 0, .25).reshape(1, 12)
return nums, divs
def testFloorModInt(self):
nums, divs = self.intTestData()
with self.test_session():
# TODO(aselle): Change test to use % after switch
# tf_result = math_ops.floor_mod(nums, divs).eval()
tf_result = math_ops.floormod(nums, divs).eval()
np_result = nums % divs
self.assertAllEqual(tf_result, np_result)
def testFloorModFloat(self):
nums, divs = self.floatTestData()
with self.test_session():
tf_result = math_ops.floormod(nums, divs).eval()
np_result = nums % divs
self.assertAllEqual(tf_result, np_result)
# TODO(aselle): put this test in once % switched to floormod
# tf2_result = (array_ops.constant(nums)
# % array_ops.constant(divs)).eval()
# self.assertAllEqual(tf2_result, tf_result)
def testTruncateModInt(self):
nums, divs = self.intTestData()
with self.test_session():
tf_result = math_ops.truncatemod(nums, divs).eval()
np_result = np.fmod(nums, divs)
self.assertAllEqual(tf_result, np_result)
def testTruncateModFloat(self):
nums, divs = self.floatTestData()
with self.test_session():
tf_result = math_ops.truncatemod(nums, divs).eval()
np_result = np.fmod(nums, divs)
self.assertAllEqual(tf_result, np_result)
def testDivideInt(self):
nums, divs = self.intTestData()
with self.test_session():
tf_result = math_ops.floor_div(nums, divs).eval()
np_result = nums // divs
self.assertAllEqual(tf_result, np_result)
# TODO(aselle): Put this test in once // is switched to floordiv
# tf2_result = (array_ops.constant(nums)
# // array_ops.constant(divs)).eval()
# self.assertAllEqual(tf2_result, tf_result)
def testDivideName(self):
with self.test_session():
op = math_ops.divide(
array_ops.constant(3), array_ops.constant(4), name="my_cool_divide")
self.assertEqual(op.name, "my_cool_divide:0")
def testRealDiv(self):
nums, divs = self.floatTestData()
with self.test_session():
tf_result = math_ops.realdiv(nums, divs).eval()
np_result = np.divide(nums, divs)
self.assertAllEqual(tf_result, np_result)
def testComplexDiv(self):
foo = array_ops.constant([1. + 3.j])
with self.test_session():
_ = math_ops.divide(foo, 1.).eval()
_ = math_ops.div(foo, 2.).eval()
def testFloorDivGrad(self):
with self.test_session():
a = variables.Variable(2.)
b = variables.Variable(4.)
with self.test_session() as sess:
sess.run(variables.global_variables_initializer())
c_grad = gradients.gradients(math_ops.divide(a, b), [a, b])
self.assertAllEqual([x.eval() for x in c_grad], [.25, -.125])
c_grad = gradients.gradients(math_ops.div(a, b), [a, b])
self.assertAllEqual([x.eval() for x in c_grad], [.25, -.125])
c_grad = gradients.gradients(math_ops.floordiv(a, b), [a, b])
self.assertAllEqual([None if x is None else x.eval()
for x in c_grad], [None, None])
def testConsistent(self):
nums, divs = self.intTestData()
with self.test_session():
tf_result = (math_ops.floor_div(nums, divs) * divs + math_ops.floormod(
nums, divs)).eval()
tf_nums = array_ops.constant(nums)
tf_divs = array_ops.constant(divs)
tf2_result = (tf_nums // tf_divs * tf_divs + tf_nums % tf_divs).eval()
np_result = (nums // divs) * divs + (nums % divs)
# Consistent with numpy
self.assertAllEqual(tf_result, np_result)
# Consistent with two forms of divide
self.assertAllEqual(tf_result, tf2_result)
# consistency for truncation form
tf3_result = (math_ops.truncatediv(nums, divs) * divs +
math_ops.truncatemod(nums, divs)).eval()
expanded_nums = np.reshape(
np.tile(nums, divs.shape[1]), (nums.shape[0], divs.shape[1]))
# Consistent with desire to get numerator
self.assertAllEqual(tf3_result, expanded_nums)
# Consistent with desire to get numerator
self.assertAllEqual(tf_result, expanded_nums)
class UnsafeDivTest(test_util.TensorFlowTestCase):
def testBasic(self):
nums = np.arange(-10, 10, .25).reshape(80, 1)
divs = np.arange(-3, 3, .25).reshape(1, 24)
np_result = np.true_divide(nums, divs)
np_result[:, divs[0] == 0] = 0
with self.test_session():
tf_result = math_ops.unsafe_div(nums, divs).eval()
self.assertAllEqual(tf_result, np_result)
if __name__ == "__main__":
googletest.main()
| apache-2.0 | 6,028,724,259,961,330,000 | 36.867617 | 80 | 0.620502 | false |
rec/grit | grit/command/Version.py | 1 | 1631 | from __future__ import absolute_import, division, print_function, unicode_literals
import os
import re
import semver
VERSION = re.compile(r'\d+\.\d+\.\d+(?:-\w\d*)')
from grit.Args import ARGS
from grit import ChangeLog
from grit import CommandList
from grit import File
from grit import Git
from grit import GitRoot
from grit import Project
HELP = """
grit v[ersion] [<version-number>]
Without an argument, prints the current project version number.
With an argument, replaces the original version number with the argument.
"""
SAFE = True
def get_version():
files = Project.settings('version')['files']
for f in files:
old_version = File.search(f, VERSION)
if old_version:
return old_version
raise Exception('ERROR: no version number found.')
def version_commit(version_number=None, success=None, failure=None):
root = GitRoot.root()
files = Project.settings('version')['files']
old_version = get_version()
if version_number == old_version:
raise Exception('Version number is already %s' % old_version)
if not version_number:
version_number = semver.increment_string(old_version)
if not CommandList.confirm('update version %s to %s' %
(old_version, version_number)):
return
for f in files:
File.subn(os.path.join(root, f), VERSION, version_number)
if success or failure:
ChangeLog.add_status_line(version_number, success, failure)
Git.git('commit', '-am', 'Set version to %s' % version_number)
def version(version_number=None):
version_commit(version_number)
| artistic-2.0 | -4,114,299,311,459,008,000 | 29.203704 | 82 | 0.676272 | false |
lukedeo/cross-domain | data_analysis.py | 1 | 7237 | """
Functions to get insight into the data
"""
import sys
import pickle
#
# Categories anaylisis of all the amazon data
#
# Number of products: 2498330
# Multilabel elements: 888864
# Percentage of products with a given category
# ============================================
# Collectibles: 0.000273
# Music: 0.024316
# VideoGames: 0.019782
# Electronics: 0.079275
# Beauty: 0.020367
# Automotive: 0.057635
# Movies & TV: 0.000462
# no_category: 0.016674
# Baby: 0.017930
# Books: 0.408854
# Kitchen: 0.083820
# Everything Else: 0.000734
# Grocery: 0.018467
# MobileApps: 0.000008
# Software: 0.004045
# KindleStore: 0.275891
# SportingGoods: 0.090299
# OfficeProducts: 0.032052
# ArtsAndCrafts: 0.017305
# Magazines: 0.009083
# Appliances: 0.007523
# Toys: 0.029429
# LawnAndGarden: 0.026913
# Tools: 0.051303
# MusicalInstruments: 0.022971
# HealthPersonalCare: 0.047808
def categories_distribution(partitions_to_use, total_num_partitions):
"""
Gives information about the frequency of categories or number of elements with more than one category
Gets the data from a list of partitions of data
"""
FILE_NAME_TEMPLATE = "data/amazon-data-%s-of-%s.pkl"
multilabel_count = 0
cat_freq = {'no_category': 0}
num_products = 0
count = 1
for i in partitions_to_use:
sys.stdout.write('Analyzing package %d out of %d \r' % (count, len(partitions_to_use)))
sys.stdout.flush()
file_to_open = FILE_NAME_TEMPLATE % (i, total_num_partitions)
[products, prod_reviews] = pickle.load(open(file_to_open))
for review in prod_reviews:
labels = review['labels']
# Count categories, and number of products with more than one label
if len(labels) == 0:
cat_freq['no_category'] += 1
else:
if len(labels) > 1:
multilabel_count += 1
for cat in labels:
if cat in cat_freq:
cat_freq[cat] += 1
else:
cat_freq[cat] = 1
num_products += 1
# Just in case we need to get the data afterwards
# if len(review['labels']) != 0:
# reviews.append(review['text'])
# labels.append(review['labels'][0])
count += 1
#Normalize data
for cat in cat_freq:
cat_freq[cat] = 1.0 * cat_freq[cat] / num_products
# Show data
sys.stdout.write("\nNumber of products: %d" % num_products)
sys.stdout.write("\nMultilabel elements: %d \n" % multilabel_count)
sys.stdout.write("Percentage of products with a given category\n")
sys.stdout.write("============================================\n")
for cat in cat_freq:
sys.stdout.write("%s: %f\n" % (cat, cat_freq[cat]))
sys.stdout.write("")
return cat_freq
def build_social_data(twitter=True, ebay=True):
"""
Builds the twitter data and gets the labels of each item. Allows retrieving from
differents sources.
Returns the data in the format [social_items, label]
"""
TWITTER_FILE = 'data/twitter.pkl'
EBAY_FILE = 'data/ebay.pkl'
# Holds the social items (tweets, ebay reviews...)
social_items = []
# Holds the labels for each social item
# NOTE: For the moment we take the first label we have in the product!
labels = []
multilabel_count = 0
cat_freq = {'no_category': 0}
num_products = 0
count = 1
count = 0
if twitter:
tweets = pickle.load(open(TWITTER_FILE))
for tweet in tweets:
labels = tweet['labels']
# Count categories, and number of products with more than one label
if len(labels) == 0:
cat_freq['no_category'] += 1
else:
if len(labels) > 1:
multilabel_count += 1
for cat in labels:
if cat in cat_freq:
cat_freq[cat] += 1
else:
cat_freq[cat] = 1
num_products += 1
if ebay:
products = pickle.load(open(EBAY_FILE))
for product in products:
labels = product['labels']
# Count categories, and number of products with more than one label
if len(labels) == 0:
cat_freq['no_category'] += 1
else:
if len(labels) > 1:
multilabel_count += 1
for cat in labels:
if cat in cat_freq:
cat_freq[cat] += 1
else:
cat_freq[cat] = 1
num_products += 1
#Normalize data
for cat in cat_freq:
cat_freq[cat] = 1.0 * cat_freq[cat] / num_products
# Show data
sys.stdout.write("\nNumber of products: %d" % num_products)
sys.stdout.write("\nMultilabel elements: %d \n" % multilabel_count)
sys.stdout.write("Percentage of products with a given category\n")
sys.stdout.write("============================================\n")
for cat in cat_freq:
sys.stdout.write("%s: %f\n" % (cat, cat_freq[cat]))
sys.stdout.write("")
return cat_freq
sys.stdout.write('%d elements loaded\n' % count)
return [social_items, labels]
def categories_distribution(labels):
"""
Gives information about the frequency of categories or number of elements with more than one category
Gets the data from a list of labels
"""
multilabel_count = 0
cat_freq = {'no_category': 0}
num_products = 0
count = 1
for i in partitions_to_use:
sys.stdout.write('Analyzing package %d out of %d \r' % (count, len(partitions_to_use)))
sys.stdout.flush()
file_to_open = FILE_NAME_TEMPLATE % (i, total_num_partitions)
[products, prod_reviews] = pickle.load(open(file_to_open))
for review in prod_reviews:
labels = review['labels']
# Count categories, and number of products with more than one label
if len(labels) == 0:
cat_freq['no_category'] += 1
else:
if len(labels) > 1:
multilabel_count += 1
for cat in labels:
if cat in cat_freq:
cat_freq[cat] += 1
else:
cat_freq[cat] = 1
num_products += 1
# Just in case we need to get the data afterwards
# if len(review['labels']) != 0:
# reviews.append(review['text'])
# labels.append(review['labels'][0])
count += 1
#Normalize data
for cat in cat_freq:
cat_freq[cat] = 1.0 * cat_freq[cat] / num_products
# Show data
sys.stdout.write("\nNumber of products: %d" % num_products)
sys.stdout.write("\nMultilabel elements: %d \n" % multilabel_count)
sys.stdout.write("Percentage of products with a given category\n")
sys.stdout.write("============================================\n")
for cat in cat_freq:
sys.stdout.write("%s: %f\n" % (cat, cat_freq[cat]))
sys.stdout.write("")
return cat_freq
| gpl-2.0 | 1,212,668,472,962,764,800 | 28.538776 | 105 | 0.551057 | false |
amondot/QGIS | python/plugins/processing/gui/ConfigDialog.py | 1 | 10291 | # -*- coding: utf-8 -*-
"""
***************************************************************************
ConfigDialog.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from PyQt4 import uic
from PyQt4.QtCore import Qt, QEvent
from PyQt4.QtGui import (QFileDialog, QDialog, QIcon, QStyle,
QStandardItemModel, QStandardItem, QMessageBox, QStyledItemDelegate,
QLineEdit, QSpinBox, QDoubleSpinBox, QWidget, QToolButton, QHBoxLayout)
from processing.core.ProcessingConfig import ProcessingConfig
from processing.core.Processing import Processing
pluginPath = os.path.split(os.path.dirname(__file__))[0]
WIDGET, BASE = uic.loadUiType(
os.path.join(pluginPath, 'ui', 'DlgConfig.ui'))
class ConfigDialog(BASE, WIDGET):
def __init__(self, toolbox):
super(ConfigDialog, self).__init__(None)
self.setupUi(self)
self.toolbox = toolbox
self.groupIcon = QIcon()
self.groupIcon.addPixmap(self.style().standardPixmap(
QStyle.SP_DirClosedIcon), QIcon.Normal, QIcon.Off)
self.groupIcon.addPixmap(self.style().standardPixmap(
QStyle.SP_DirOpenIcon), QIcon.Normal, QIcon.On)
if hasattr(self.searchBox, 'setPlaceholderText'):
self.searchBox.setPlaceholderText(self.tr('Search...'))
self.model = QStandardItemModel()
self.tree.setModel(self.model)
self.delegate = SettingDelegate()
self.tree.setItemDelegateForColumn(1, self.delegate)
self.searchBox.textChanged.connect(self.fillTree)
self.fillTree()
self.tree.expanded.connect(self.adjustColumns)
def fillTree(self):
self.items = {}
self.model.clear()
self.model.setHorizontalHeaderLabels([self.tr('Setting'),
self.tr('Value')])
text = unicode(self.searchBox.text())
settings = ProcessingConfig.getSettings()
rootItem = self.model.invisibleRootItem()
priorityKeys = [self.tr('General'), self.tr('Models'), self.tr('Scripts')]
for group in priorityKeys:
groupItem = QStandardItem(group)
icon = ProcessingConfig.getGroupIcon(group)
groupItem.setIcon(icon)
groupItem.setEditable(False)
emptyItem = QStandardItem()
emptyItem.setEditable(False)
rootItem.insertRow(0, [groupItem, emptyItem])
for setting in settings[group]:
if setting.hidden:
continue
if text == '' or text.lower() in setting.description.lower():
labelItem = QStandardItem(setting.description)
labelItem.setIcon(icon)
labelItem.setEditable(False)
self.items[setting] = SettingItem(setting)
groupItem.insertRow(0, [labelItem, self.items[setting]])
if text != '':
self.tree.expand(groupItem.index())
providersItem = QStandardItem(self.tr('Providers'))
icon = QIcon(os.path.join(pluginPath, 'images', 'alg.png'))
providersItem.setIcon(icon)
providersItem.setEditable(False)
emptyItem = QStandardItem()
emptyItem.setEditable(False)
rootItem.insertRow(0, [providersItem, emptyItem])
for group in settings.keys():
if group in priorityKeys:
continue
groupItem = QStandardItem(group)
icon = ProcessingConfig.getGroupIcon(group)
groupItem.setIcon(icon)
groupItem.setEditable(False)
for setting in settings[group]:
if setting.hidden:
continue
if text == '' or text.lower() in setting.description.lower():
labelItem = QStandardItem(setting.description)
labelItem.setIcon(icon)
labelItem.setEditable(False)
self.items[setting] = SettingItem(setting)
groupItem.insertRow(0, [labelItem, self.items[setting]])
emptyItem = QStandardItem()
emptyItem.setEditable(False)
providersItem.appendRow([groupItem, emptyItem])
self.tree.sortByColumn(0, Qt.AscendingOrder)
self.adjustColumns()
def accept(self):
for setting in self.items.keys():
if isinstance(setting.value, bool):
setting.value = self.items[setting].checkState() == Qt.Checked
elif isinstance(setting.value, (float, int, long)):
value = unicode(self.items[setting].text())
try:
value = float(value)
setting.value = value
except ValueError:
QMessageBox.critical(self, self.tr('Wrong value'),
self.tr('Wrong parameter value:\n%1') % value)
return
else:
setting.value = unicode(self.items[setting].text())
setting.save()
Processing.updateAlgsList()
QDialog.accept(self)
def adjustColumns(self):
self.tree.resizeColumnToContents(0)
self.tree.resizeColumnToContents(1)
class SettingItem(QStandardItem):
def __init__(self, setting):
QStandardItem.__init__(self)
self.setting = setting
if isinstance(setting.value, bool):
self.setCheckable(True)
self.setEditable(False)
if setting.value:
self.setCheckState(Qt.Checked)
else:
self.setCheckState(Qt.Unchecked)
else:
self.setData(setting.value, Qt.EditRole)
class SettingDelegate(QStyledItemDelegate):
def __init__(self, parent=None):
QStyledItemDelegate.__init__(self, parent)
def createEditor(
self,
parent,
options,
index,
):
value = self.convertValue(index.model().data(index, Qt.EditRole))
if isinstance(value, (int, long)):
spnBox = QSpinBox(parent)
spnBox.setRange(-999999999, 999999999)
return spnBox
elif isinstance(value, float):
spnBox = QDoubleSpinBox(parent)
spnBox.setRange(-999999999.999999, 999999999.999999)
spnBox.setDecimals(6)
return spnBox
elif isinstance(value, (str, unicode)):
if os.path.isdir(value):
return FileDirectorySelector(parent)
elif os.path.isfile(value):
return FileDirectorySelector(parent, True)
else:
return FileDirectorySelector(parent, True)
def setEditorData(self, editor, index):
value = self.convertValue(index.model().data(index, Qt.EditRole))
if isinstance(value, (int, long)):
editor.setValue(value)
elif isinstance(value, float):
editor.setValue(value)
elif isinstance(value, (str, unicode)):
editor.setText(value)
def setModelData(self, editor, model, index):
value = self.convertValue(index.model().data(index, Qt.EditRole))
if isinstance(value, (int, long)):
model.setData(index, editor.value(), Qt.EditRole)
elif isinstance(value, float):
model.setData(index, editor.value(), Qt.EditRole)
elif isinstance(value, (str, unicode)):
model.setData(index, editor.text(), Qt.EditRole)
def sizeHint(self, option, index):
return QSpinBox().sizeHint()
def eventFilter(self, editor, event):
if event.type() == QEvent.FocusOut and hasattr(editor, 'canFocusOut'):
if not editor.canFocusOut:
return False
return QStyledItemDelegate.eventFilter(self, editor, event)
def convertValue(self, value):
if value is None:
return ""
try:
return int(value)
except ValueError:
try:
return float(value)
except ValueError:
return unicode(value)
class FileDirectorySelector(QWidget):
def __init__(self, parent=None, selectFile=False):
QWidget.__init__(self, parent)
# create gui
self.btnSelect = QToolButton()
self.btnSelect.setText(self.tr('...'))
self.lineEdit = QLineEdit()
self.hbl = QHBoxLayout()
self.hbl.setMargin(0)
self.hbl.setSpacing(0)
self.hbl.addWidget(self.lineEdit)
self.hbl.addWidget(self.btnSelect)
self.setLayout(self.hbl)
self.canFocusOut = False
self.selectFile = selectFile
self.setFocusPolicy(Qt.StrongFocus)
self.btnSelect.clicked.connect(self.select)
def select(self):
lastDir = ''
if not self.selectFile:
selectedPath = QFileDialog.getExistingDirectory(None,
self.tr('Select directory'), lastDir,
QFileDialog.ShowDirsOnly)
else:
selectedPath = QFileDialog.getOpenFileName(None,
self.tr('Select file'), lastDir, self.tr('All files (*.*)')
)
if not selectedPath:
return
self.lineEdit.setText(selectedPath)
self.canFocusOut = True
def text(self):
return self.lineEdit.text()
def setText(self, value):
self.lineEdit.setText(value)
| gpl-2.0 | -6,956,270,603,159,187,000 | 34.243151 | 82 | 0.570596 | false |
sygi/deep_q_rl | deep_q_rl/ale_data_set.py | 1 | 9952 | """This class stores all of the samples for training. It is able to
construct randomly selected batches of phi's from the stored history.
"""
import numpy as np
import time
import theano
floatX = theano.config.floatX
class DataSet(object):
"""A replay memory consisting of circular buffers for observed images,
actions, and rewards.
"""
def __init__(self, width, height, rng, max_steps=1000, phi_length=4, ram_size=128):
"""Construct a DataSet.
Arguments:
width, height - image size
max_steps - the number of time steps to store
phi_length - number of images to concatenate into a state
rng - initialized numpy random number generator, used to
choose random minibatches
"""
# TODO: Specify capacity in number of state transitions, not
# number of saved time steps.
# Store arguments.
self.width = width
self.height = height
self.max_steps = max_steps
self.phi_length = phi_length
self.rng = rng
self.ram_size = ram_size
# Allocate the circular buffers and indices.
self.imgs = np.zeros((max_steps, height, width), dtype='uint8')
self.actions = np.zeros(max_steps, dtype='int32')
self.rewards = np.zeros(max_steps, dtype=floatX)
self.terminal = np.zeros(max_steps, dtype='bool')
self.rams = np.zeros((max_steps, ram_size), dtype='uint8')
self.bottom = 0
self.top = 0
self.size = 0
def add_sample(self, img, action, reward, terminal, ram):
"""Add a time step record.
Arguments:
img -- observed image
ram -- observed ram state
action -- action chosen by the agent
reward -- reward received after taking the action
terminal -- boolean indicating whether the episode ended
after this time step
"""
self.imgs[self.top] = img
self.actions[self.top] = action
self.rewards[self.top] = reward
self.terminal[self.top] = terminal
self.rams[self.top] = ram
if self.size == self.max_steps:
self.bottom = (self.bottom + 1) % self.max_steps
else:
self.size += 1
self.top = (self.top + 1) % self.max_steps
def __len__(self):
"""Return an approximate count of stored state transitions."""
# TODO: Properly account for indices which can't be used, as in
# random_batch's check.
return max(0, self.size - self.phi_length)
def last_phi(self):
"""Return the most recent phi (sequence of image frames)."""
indexes = np.arange(self.top - self.phi_length, self.top)
return self.imgs.take(indexes, axis=0, mode='wrap')
def phi(self, img):
"""Return a phi (sequence of image frames), using the last phi_length -
1, plus img.
"""
indexes = np.arange(self.top - self.phi_length + 1, self.top)
phi = np.empty((self.phi_length, self.height, self.width), dtype=floatX)
phi[0:self.phi_length - 1] = self.imgs.take(indexes,
axis=0,
mode='wrap')
phi[-1] = img
return phi
def random_batch(self, batch_size):
"""Return corresponding states, rams, actions, rewards, terminal status, next_states and next_rams for batch_size randomly chosen state transitions.
"""
# Allocate the response.
states = np.zeros((batch_size,
self.phi_length,
self.height,
self.width),
dtype='uint8')
rams = np.zeros((batch_size, self.ram_size), dtype='uint8')
actions = np.zeros((batch_size, 1), dtype='int32')
rewards = np.zeros((batch_size, 1), dtype=floatX)
terminal = np.zeros((batch_size, 1), dtype='bool')
next_states = np.zeros((batch_size,
self.phi_length,
self.height,
self.width),
dtype='uint8')
next_rams = np.zeros((batch_size, self.ram_size), dtype='uint8')
count = 0
while count < batch_size:
# Randomly choose a time step from the replay memory.
index = self.rng.randint(self.bottom,
self.bottom + self.size - self.phi_length)
initial_indices = np.arange(index, index + self.phi_length)
transition_indices = initial_indices + 1
end_index = index + self.phi_length - 1
# Check that the initial state corresponds entirely to a
# single episode, meaning none but the last frame may be
# terminal. If the last frame of the initial state is
# terminal, then the last frame of the transitioned state
# will actually be the first frame of a new episode, which
# the Q learner recognizes and handles correctly during
# training by zeroing the discounted future reward estimate.
if np.any(self.terminal.take(initial_indices[0:-1], mode='wrap')):
continue
# Add the state transition to the response.
states[count] = self.imgs.take(initial_indices, axis=0, mode='wrap')
rams[count] = self.rams.take(end_index, axis=0, mode='wrap')
actions[count] = self.actions.take(end_index, mode='wrap')
rewards[count] = self.rewards.take(end_index, mode='wrap')
terminal[count] = self.terminal.take(end_index, mode='wrap')
next_states[count] = self.imgs.take(transition_indices,
axis=0,
mode='wrap')
next_rams[count] = self.rams.take(end_index+1,
axis=0, mode='wrap')
count += 1
return states, rams, actions, rewards, next_states, next_rams, terminal
# TESTING CODE BELOW THIS POINT... TODO: add ram to this code
def simple_tests():
np.random.seed(222)
dataset = DataSet(width=2, height=3,
rng=np.random.RandomState(42),
max_steps=6, phi_length=4)
for i in range(10):
img = np.random.randint(0, 256, size=(3, 2))
action = np.random.randint(16)
reward = np.random.random()
terminal = False
if np.random.random() < .05:
terminal = True
print 'img', img
dataset.add_sample(img, action, reward, terminal)
print "I", dataset.imgs
print "A", dataset.actions
print "R", dataset.rewards
print "T", dataset.terminal
print "SIZE", dataset.size
print
print "LAST PHI", dataset.last_phi()
print
print 'BATCH', dataset.random_batch(2)
def speed_tests():
dataset = DataSet(width=80, height=80,
rng=np.random.RandomState(42),
max_steps=20000, phi_length=4)
img = np.random.randint(0, 256, size=(80, 80))
action = np.random.randint(16)
reward = np.random.random()
start = time.time()
for i in range(100000):
terminal = False
if np.random.random() < .05:
terminal = True
dataset.add_sample(img, action, reward, terminal)
print "samples per second: ", 100000 / (time.time() - start)
start = time.time()
for i in range(200):
a = dataset.random_batch(32)
print "batches per second: ", 200 / (time.time() - start)
print dataset.last_phi()
def trivial_tests():
dataset = DataSet(width=2, height=1,
rng=np.random.RandomState(42),
max_steps=3, phi_length=2)
img1 = np.array([[1, 1]], dtype='uint8')
img2 = np.array([[2, 2]], dtype='uint8')
img3 = np.array([[3, 3]], dtype='uint8')
dataset.add_sample(img1, 1, 1, False)
dataset.add_sample(img2, 2, 2, False)
dataset.add_sample(img3, 2, 2, True)
print "last", dataset.last_phi()
print "random", dataset.random_batch(1)
def max_size_tests():
dataset1 = DataSet(width=3, height=4,
rng=np.random.RandomState(42),
max_steps=10, phi_length=4)
dataset2 = DataSet(width=3, height=4,
rng=np.random.RandomState(42),
max_steps=1000, phi_length=4)
for i in range(100):
img = np.random.randint(0, 256, size=(4, 3))
action = np.random.randint(16)
reward = np.random.random()
terminal = False
if np.random.random() < .05:
terminal = True
dataset1.add_sample(img, action, reward, terminal)
dataset2.add_sample(img, action, reward, terminal)
np.testing.assert_array_almost_equal(dataset1.last_phi(),
dataset2.last_phi())
print "passed"
def test_memory_usage_ok():
import memory_profiler
dataset = DataSet(width=80, height=80,
rng=np.random.RandomState(42),
max_steps=100000, phi_length=4)
last = time.time()
for i in xrange(1000000000):
if (i % 100000) == 0:
print i
dataset.add_sample(np.random.random((80, 80)), 1, 1, False)
if i > 200000:
states, actions, rewards, next_states, terminals = \
dataset.random_batch(32)
if (i % 10007) == 0:
print time.time() - last
mem_usage = memory_profiler.memory_usage(-1)
print len(dataset), mem_usage
last = time.time()
def main():
speed_tests()
test_memory_usage_ok()
max_size_tests()
simple_tests()
if __name__ == "__main__":
main()
| bsd-3-clause | 7,995,148,214,754,970,000 | 35.588235 | 156 | 0.553658 | false |
ntamas/python-selecta | selecta/renderers.py | 1 | 2075 | """Renderers convert model objects into a visual representation that
can be shown on the UI."""
class Renderer(object):
def attach_to_terminal(self, terminal):
"""Attaches the renderer to the given terminal."""
pass
def render(self, obj, selected=False):
"""Renders the given object into a string that can be printed to
the terminal.
Args:
obj (object): the object to render
selected (bool): whether the object should have a "selected"
appearance
Returns:
str: the string representation of the object, suitable for printing
to the terminal
"""
raise NotImplementedError
class MatchRenderer(Renderer):
"""Converts a ``selecta.matches.Match`` object into a textual
representation that can be printed on the console."""
def attach_to_terminal(self, terminal):
escape_braces = lambda s: s.replace("{", "{{").replace("}", "}}")
self._unselected_templates = {
"match_start": terminal.render("${BG_YELLOW}${FG_BLACK}"),
"match_end": terminal.render("${NORMAL}"),
"start": terminal.render("${NORMAL}"),
"end": terminal.render("${CLEAR_EOL}${NORMAL}")
}
self._selected_templates = {
"match_start": terminal.render("${BG_YELLOW}"),
"match_end": terminal.render("${BG_WHITE}"),
"start": terminal.render("${NORMAL}${BG_WHITE}${FG_BLACK}"),
"end": terminal.render("${CLEAR_EOL}${NORMAL}")
}
def render(self, match, selected=False):
match.canonicalize()
result = list(match.matched_string)
templates = self._selected_templates if selected \
else self._unselected_templates
for start, end in reversed(match.substrings):
result[end:end] = templates["match_end"]
result[start:start] = templates["match_start"]
result[0:0] = templates["start"]
result.extend(templates["end"])
return "".join(result)
| mit | -8,372,224,419,947,170,000 | 36.053571 | 79 | 0.58988 | false |
krishauser/Klampt | Python/klampt/control/blocks/trajectory_tracking.py | 1 | 2744 | from ..controller import ControllerBlock,RobotControllerIO
from klampt.model import trajectory
class TrajectoryPositionController(ControllerBlock):
"""A (robot) controller that takes in a trajectory and outputs the position
along the trajectory. If type is a 2-tuple, this will also output the
derivative of the trajectory"""
def __init__(self,traj,type=('qcmd','dqcmd')):
self.traj = traj
self.outputType = type
self.startTime = None
def inputNames(self):
return ['t']
def outputNames(self):
if isinstance(self.outputType,(tuple,list)):
return self.outputType
else:
return [self.outputType]
def advance(self,**inputs):
t = inputs['t']
if self.startTime == None:
self.startTime = t
t = t - self.startTime
if isinstance(self.outputType,(tuple,list)):
assert len(self.outputType)==2
return {self.outputType[0]:self.traj.eval(t),
self.outputType[1]:self.traj.deriv(t)}
else:
return {self.outputType:self.traj.eval(t)}
def getState(self):
return {'startTime':self.startTime}
def setState(self,state):
self.startTime = state['startTime']
def signal(self,type,**inputs):
if type=='reset':
self.startTime = None
class TrajectoryWithFeedforwardTorqueController(ControllerBlock):
"""A controller that takes in a joint trajectory and a feedforward torque
trajectory."""
def __init__(self,traj,torquetraj):
self.traj = traj
self.torquetraj = torquetraj
self.startTime = None
def inputNames(self):
return ['t']
def outputNames(self):
return ['qcmd','dqcmd','torquecmd']
def advance(self,**inputs):
api = RobotControllerIO(inputs)
t = api.time()
if self.startTime == None:
self.startTime = t
t = t - self.startTime
return api.makeFeedforwardPIDCommand(self.traj.eval(t),self.traj.deriv(t),self.torquetraj.eval(t))
def getState(self):
return {'startTime':self.startTime}
def setState(self,state):
self.startTime = state['startTime']
def signal(self,type,**inputs):
if type=='reset':
self.startTime = None
def make(robot,file="mypath.path",ff_torque_file=None):
if robot == None:
l = trajectory.Trajectory()
else:
l = trajectory.RobotTrajectory(robot)
l.load(file)
if ff_torque_file is not None:
tcmd = trajectory.Trajectory()
tcmd.load(ff_torque_file)
return TrajectoryWithFeedforwardTorqueController(l,ff_torque_file)
return TrajectoryPositionController(l)
| bsd-3-clause | 4,214,445,663,503,191,600 | 30.906977 | 106 | 0.626093 | false |
ColmFitz369/docker-repo | python-flask/my_application/app.py | 1 | 1686 | from flask import Flask
from flask import request
import os, sys
app = Flask(__name__)
@app.route('/')
def index():
return 'Index Page'
@app.route('/hello')
def hello():
<<<<<<< HEAD
return 'Hello World'
@app.route('/user/<username>')
def show_user_profile(username):
# show the user profile for that user
return 'User %s' % username
@app.route('/post/<int:post_id>')
def show_post(post_id):
# show the post with the given id, the id is an integer
return 'Post %d' % post_id
=======
return "Hello World!"
@app.route("/")
def address():
return "Dublin!"
@app.route("/")
def message():
return "How are you?"
>>>>>>> e3545d81ac7e72b259f0cbf6387101363a955bbe
@app.route('/upload', methods=['GET', 'POST'])
def upload_file():
if request.method == 'POST':
f = request.files['file']
f.save('./uploads/'+f.filename)
return '',201
@app.route('/chkUploads')
def chk_uploads():
path = "./uploads/"
dirs = os.listdir( path )
f_str=""
for name in dirs:
f_str +=(str)(name)+"\n"
return f_str
@app.route('/eu1')
def run_eu1():
i=0
total=0
while i < 1000: #stop when we reach multiple bigger than 1000
if(i%5==0 or i%3==0): #ie if multiple of 5 or 3
total+=i #add multiple to cumulative tally
i+=1 #next number (will be used only if a valid multiple)
result=" "+(str)(total)+"\n"
return result
@app.route('/eu2')
def run_eu2():
pre,fib,tally=0,1,0 #initialize variables, pre is last term fib is current
MAX=4000000 #4million is maximum value of a term
while fib <= MAX:
if(fib%2): tally+=fib #add to tally is fib term is even
pre,fib=fib,pre+fib #get new values for pre and fib
result=" "+(str)(tally)+"\n"
return result
if __name__ == "__main__":
app.run(host="0.0.0.0", debug=True)
| mit | -7,064,585,944,802,551,000 | 24.545455 | 74 | 0.666074 | false |
Zerknechterer/pyload | module/plugins/internal/XFSAccount.py | 1 | 5969 | # -*- coding: utf-8 -*-
import re
import time
import urlparse
from module.plugins.internal.Account import Account
from module.plugins.internal.SimpleHoster import parseHtmlForm, set_cookies
class XFSAccount(Account):
__name__ = "XFSAccount"
__type__ = "account"
__version__ = "0.38"
__description__ = """XFileSharing account plugin"""
__license__ = "GPLv3"
__authors__ = [("zoidberg" , "[email protected]"),
("Walter Purcaro", "[email protected]" )]
HOSTER_DOMAIN = None
HOSTER_URL = None
LOGIN_URL = None
COOKIES = True
PREMIUM_PATTERN = r'\(Premium only\)'
VALID_UNTIL_PATTERN = r'Premium.[Aa]ccount expire:.*?(\d{1,2} [\w^_]+ \d{4})'
TRAFFIC_LEFT_PATTERN = r'Traffic available today:.*?<b>\s*(?P<S>[\d.,]+|[Uu]nlimited)\s*(?:(?P<U>[\w^_]+)\s*)?</b>'
TRAFFIC_LEFT_UNIT = "MB" #: used only if no group <U> was found
LEECH_TRAFFIC_PATTERN = r'Leech Traffic left:<b>.*?(?P<S>[\d.,]+|[Uu]nlimited)\s*(?:(?P<U>[\w^_]+)\s*)?</b>'
LEECH_TRAFFIC_UNIT = "MB" #: used only if no group <U> was found
LOGIN_FAIL_PATTERN = r'Incorrect Login or Password|account was banned|Error<'
def __init__(self, manager, accounts): #@TODO: remove in 0.4.10
self.init()
return super(XFSAccount, self).__init__(manager, accounts)
def init(self):
if not self.HOSTER_DOMAIN:
self.logError(_("Missing HOSTER_DOMAIN"))
self.COOKIES = False
else:
if not self.HOSTER_URL:
self.HOSTER_URL = "http://www.%s/" % self.HOSTER_DOMAIN
if isinstance(self.COOKIES, list):
self.COOKIES.insert((self.HOSTER_DOMAIN, "lang", "english"))
set_cookies(req.cj, self.COOKIES)
def loadAccountInfo(self, user, req):
validuntil = None
trafficleft = None
leechtraffic = None
premium = None
if not self.HOSTER_URL: #@TODO: Remove in 0.4.10
return {'validuntil' : validuntil,
'trafficleft' : trafficleft,
'leechtraffic': leechtraffic,
'premium' : premium}
html = req.load(self.HOSTER_URL, get={'op': "my_account"}, decode=True)
premium = True if re.search(self.PREMIUM_PATTERN, html) else False
m = re.search(self.VALID_UNTIL_PATTERN, html)
if m:
expiredate = m.group(1).strip()
self.logDebug("Expire date: " + expiredate)
try:
validuntil = time.mktime(time.strptime(expiredate, "%d %B %Y"))
except Exception, e:
self.logError(e)
else:
self.logDebug("Valid until: %s" % validuntil)
if validuntil > time.mktime(time.gmtime()):
premium = True
trafficleft = -1
else:
premium = False
validuntil = None #: registered account type (not premium)
else:
self.logDebug("VALID_UNTIL_PATTERN not found")
m = re.search(self.TRAFFIC_LEFT_PATTERN, html)
if m:
try:
traffic = m.groupdict()
size = traffic['S']
if "nlimited" in size:
trafficleft = -1
if validuntil is None:
validuntil = -1
else:
if 'U' in traffic:
unit = traffic['U']
elif isinstance(self.TRAFFIC_LEFT_UNIT, basestring):
unit = self.TRAFFIC_LEFT_UNIT
else:
unit = ""
trafficleft = self.parseTraffic(size + unit)
except Exception, e:
self.logError(e)
else:
self.logDebug("TRAFFIC_LEFT_PATTERN not found")
leech = [m.groupdict() for m in re.finditer(self.LEECH_TRAFFIC_PATTERN, html)]
if leech:
leechtraffic = 0
try:
for traffic in leech:
size = traffic['S']
if "nlimited" in size:
leechtraffic = -1
if validuntil is None:
validuntil = -1
break
else:
if 'U' in traffic:
unit = traffic['U']
elif isinstance(self.LEECH_TRAFFIC_UNIT, basestring):
unit = self.LEECH_TRAFFIC_UNIT
else:
unit = ""
leechtraffic += self.parseTraffic(size + unit)
except Exception, e:
self.logError(e)
else:
self.logDebug("LEECH_TRAFFIC_PATTERN not found")
return {'validuntil' : validuntil,
'trafficleft' : trafficleft,
'leechtraffic': leechtraffic,
'premium' : premium}
def login(self, user, data, req):
if not self.HOSTER_URL: #@TODO: Remove in 0.4.10
raise Exception(_("Missing HOSTER_DOMAIN"))
if not self.LOGIN_URL:
self.LOGIN_URL = urlparse.urljoin(self.HOSTER_URL, "login.html")
html = req.load(self.LOGIN_URL, decode=True)
action, inputs = parseHtmlForm('name="FL"', html)
if not inputs:
inputs = {'op' : "login",
'redirect': self.HOSTER_URL}
inputs.update({'login' : user,
'password': data['password']})
if action:
url = urlparse.urljoin("http://", action)
else:
url = self.HOSTER_URL
html = req.load(url, post=inputs, decode=True)
if re.search(self.LOGIN_FAIL_PATTERN, html):
self.wrongPassword()
| gpl-3.0 | 6,233,733,894,626,523,000 | 31.977901 | 119 | 0.495895 | false |
arizvisa/syringe | template/local/pif.py | 1 | 13321 | """
Ripped from https://www.smsoft.ru/en/pifdoc.htm
"""
import ptypes
from ptypes import *
class Heading(pstruct.type):
def __Next_section_offset(self):
return dyn.pointer(Section, pint.uint16_t)
def __Section_data_offset(self):
def _object_(_, self=self):
length = self['Length'].li
return SectionData.withdefault(length.int(), length=length.int())
return dyn.pointer(_object_, pint.uint16_t)
_fields_ = [
(dyn.clone(pstr.string, length=0x10), 'Name'),
(__Next_section_offset, 'NextOffset'),
(__Section_data_offset, 'Offset'),
(pint.uint16_t, 'Length'),
]
class SectionData(ptype.definition):
cache = {}
class default(pstr.string):
pass
class Section(pstruct.type):
def __data(self):
res = self['heading'].li
length = res['Length']
return SectionData.withdefault(length.int(), length=length.int())
def __padding_section(self):
res = self['heading'].li
if res['NextOffset'].int() < 0xffff:
length, fields = res['NextOffset'].int() - self.getoffset(), ['heading', 'data']
return dyn.block(max(0, length - sum(self[fld].li.size() for fld in fields)))
return dyn.block(0)
_fields_ = [
(Heading, 'heading'),
(__data, 'data'),
(__padding_section, 'padding(data)'),
]
class MaximumRequired(pstruct.type):
_fields_ = [
(pint.uint16_t, 'maximum'),
(pint.uint16_t, 'required'),
]
def summary(self):
return "required={:#x} maximum={:#x}".format(self['required'].int(), self['maximum'].int())
@SectionData.define
class BasicSection(pstruct.type):
type = 0x171
@pbinary.littleendian
class _Flags(pbinary.flags):
_fields_ = [
(1, 'COM2'),
(1, 'COM1'),
(1, 'Reserved'),
(1, 'Close on exit'),
(1, 'No screen exchange'),
(1, 'Prevent switch'),
(1, 'Graphics mode'),
(1, 'Direct memory'),
]
@pbinary.littleendian
class _Program_flags(pbinary.flags):
_fields_ = [
(1, 'Unused'),
(1, 'Has parameters'),
(1, 'Exchange interrupt vectors'),
(5, 'Reserved'),
(1, 'Direct screen'),
(1, 'Stop in background mode'),
(1, 'Use coprocessor'),
(1, 'Direct keyboard'),
(4, 'Unknown'),
]
_fields_ = [
(pint.uint8_t, 'Reserved'),
(pint.uint8_t, 'Checksum'),
(dyn.clone(pstr.string, length=30), 'Window title'),
(MaximumRequired, 'Reserved memory'),
(dyn.clone(pstr.string, length=63), 'Path'),
(_Flags, 'Flags'),
(pint.uint8_t, 'Drive index'),
(dyn.clone(pstr.string, length=64), 'Directory'),
(dyn.clone(pstr.string, length=64), 'Parameters'),
(pint.uint8_t, 'Video mode'),
(pint.uint8_t, 'Text video pages quantity'),
(pint.uint8_t, 'First used interrupt'),
(pint.uint8_t, 'Last used interrupt'),
(pint.uint8_t, 'Rows'),
(pint.uint8_t, 'Columns'),
(pint.uint8_t, 'X position'),
(pint.uint8_t, 'Y position'),
(pint.uint16_t, 'Number of last video page'),
(dyn.clone(pstr.string, length=64), 'Shared program path'),
(dyn.clone(pstr.string, length=64), 'Shared program data'),
(_Program_flags, 'Program flags'),
]
@SectionData.define
class Windows386Section(pstruct.type):
type = 0x68
@pbinary.littleendian
class _Bit_mask1(pbinary.flags):
_fields_ = [
(3, 'Unused'),
(1, 'No MS-DOS transition warning'),
(1, 'Unused'),
(1, 'No MS-DOS automatic transition'),
(1, 'Unused'),
(1, 'Prevent Windows detection'),
(1, 'MS-DOS mode'),
(1, 'Unused'),
(1, 'Maximized window'),
(1, 'Minimized window'),
(1, 'Memory protection'),
(1, 'Lock application memory'),
(1, 'Fast paste'),
(1, 'XMS memory locked'),
(1, 'EMS memory locked'),
(1, 'Use shortcut key'),
(1, 'Do not use HMA'),
(1, 'Detect idle time'),
(1, 'No Ctrl+Esc'),
(1, 'No PrtSc'),
(1, 'No Alt+PrtSc'),
(1, 'No Alt+Enter'),
(1, 'No Alt+Space'),
(1, 'No Alt+Esc'),
(1, 'No Alt+Tab'),
(1, 'Unused'),
(1, 'Full-screen mode'),
(1, 'Exclusive run mode'),
(1, 'Background continuation'),
(1, 'Permit exit'),
]
@pbinary.littleendian
class _Bit_mask2(pbinary.flags):
_fields_ = [
(8, 'Unused'),
(1, 'Retain video memory'),
(1, 'Memory: High graphics'),
(1, 'Memory: Low graphics'),
(1, 'Memory: Text graphics'),
(1, 'Ports: High graphics'),
(1, 'Ports: Low graphics'),
(1, 'Ports: Text graphics'),
(1, 'Video ROM emulation'),
]
@pbinary.littleendian
class _Shortcut_modifier(pbinary.flags):
_fields_ = [
(12, 'Unused'),
(1, 'Alt'),
(1, 'Ctrl'),
(2, 'Shift'),
]
_fields_ = [
(MaximumRequired, 'Conventional memory'),
(pint.uint16_t, 'Active priority'),
(pint.uint16_t, 'Background priority'),
(MaximumRequired, 'EMS memory'),
(MaximumRequired, 'XMS memory'),
(_Bit_mask1, 'Bit mask 1'),
(_Bit_mask2, 'Bit mask 2'),
(pint.uint16_t, 'Unknown_16'),
(pint.uint16_t, 'Shortcut key scan code'),
(_Shortcut_modifier, 'Shortcut key modifier'),
(pint.uint16_t, 'Use shortcut key'),
(pint.uint16_t, 'Extended shortcut key'),
(pint.uint16_t, 'Unknown_20'),
(pint.uint16_t, 'Unknown_22'),
(pint.uint32_t, 'Unknown_24'),
(dyn.clone(pstr.string, length=64), 'Parameters'),
]
@SectionData.define
class Windows286Section(pstruct.type):
type = 0x6
@pbinary.littleendian
class _Flags(pbinary.flags):
_fields_ = [
(1, 'COM4'),
(1, 'COM3'),
(8, 'Unused'),
(1, 'No screen retain'),
(1, 'No Ctrl+Esc'),
(1, 'No PrtSc'),
(1, 'No Alt+PrtSc'),
(1, 'No Alt+Esc'),
(1, 'No Alt+Tab'),
]
_fields_ = [
(MaximumRequired, 'XMS memory'),
(_Flags, 'Flags'),
]
@SectionData.define
class WindowsVMM40Section(pstruct.type):
type = 0x1ac
class _Dimensions(pstruct.type):
_fields_ = [
(pint.uint16_t, 'horizontal size'),
(pint.uint16_t, 'vertical size'),
]
@pbinary.littleendian
class _Bit_mask1(pbinary.flags):
_fields_ = [
(10, 'Unknown'),
(1, 'No screensaver'),
(1, 'No exit warning'),
(2, 'Unused'),
(1, 'Continue in background'),
(1, 'Reserved'),
]
@pbinary.littleendian
class _Bit_mask2(pbinary.flags):
_fields_ = [
(7, 'Unknown'),
(1, 'Full-screen mode'),
(1, 'No dynamic video memory'),
(6, 'Unused'),
(1, 'Video-ROM emulation'),
]
@pbinary.littleendian
class _Bit_mask3(pbinary.flags):
_fields_ = [
(4, 'Unknown'),
(1, 'No Ctrl+Esc'),
(1, 'No PrtSc'),
(1, 'No Alt+PrtSc'),
(1, 'No Alt+Enter'),
(1, 'No Alt+Space'),
(1, 'No Alt+Esc'),
(1, 'No Alt+Tab'),
(4, 'Unused'),
(1, 'Fast paste'),
]
@pbinary.littleendian
class _Mouse_flags(pbinary.flags):
_fields_ = [
(14, 'Unused'),
(1, 'Exclusive'),
(1, 'No selection'),
]
@pbinary.littleendian
class _Font_flags(pbinary.flags):
_fields_ = [
(4, 'Unused'),
(1, 'Current TrueType'),
(1, 'Current Raster'),
(5, 'Unknown'),
(1, 'Automatic size'),
(1, 'Use TrueType'),
(1, 'Use Raster'),
(2, 'Reserved'),
]
@pbinary.littleendian
class _Bit_mask4(pbinary.flags):
_fields_ = [
(14, 'Unused'),
(1, 'Show toolbar'),
(1, 'Unknown'),
]
@pbinary.littleendian
class _Last_maximized_flags(pbinary.flags):
_fields_ = [
(14, 'Unknown'),
(1, 'Last maximized'),
(1, 'Reserved'),
]
class _Last_window_state(pint.enum, pint.uint16_t):
_values_ = [
('Normal', 1),
('Minimized', 2),
('Maximized', 3),
]
class _Border_position(pstruct.type):
_fields_ = [
(pint.uint16_t, 'left'),
(pint.uint16_t, 'top'),
(pint.uint16_t, 'right'),
(pint.uint16_t, 'bottom'),
]
_fields_ = [
(dyn.block(88), 'Unknown_0'),
(dyn.clone(pstr.string, length=80), 'Icon filename'),
(pint.uint16_t, 'Icon number'),
(_Bit_mask1, 'Bit mask 1'),
(dyn.block(10), 'Unknown_ac'),
(pint.uint16_t, 'Priority'),
(_Bit_mask2, 'Bit mask 2'),
(dyn.block(8), 'Unknown_ba'),
(pint.uint16_t, 'Number of lines'),
(_Bit_mask3, 'Bit mask 3'),
(pint.uint16_t, 'Unknown_c6'),
(pint.uint16_t, 'Unknown_c8'),
(pint.uint16_t, 'Unknown_ca'),
(pint.uint16_t, 'Unknown_cc'),
(pint.uint16_t, 'Unknown_ce'),
(pint.uint16_t, 'Unknown_d0'),
(pint.uint16_t, 'Unknown_c2'),
(pint.uint16_t, 'Unknown_c4'),
(_Mouse_flags, 'Mouse flags'),
(dyn.block(6), 'Unknown_d8'),
(_Font_flags, 'Font flags'),
(pint.uint16_t, 'Unknown_e0'),
(_Dimensions, 'Raster font size'),
(_Dimensions, 'Current font size'),
(dyn.clone(pstr.string, length=32), 'Raster font name'),
(dyn.clone(pstr.string, length=32), 'TrueType font name'),
(pint.uint16_t, 'Unknown_12a'),
(_Bit_mask4, 'Bit mask 4'),
(pint.uint16_t, 'No restore settings'),
(_Dimensions, 'Screen symbol size'),
(_Dimensions, 'Client area size'),
(_Dimensions, 'Window size'),
(pint.uint16_t, 'Unknown_13c'),
(_Last_maximized_flags, 'Last maximized'),
(_Last_window_state, 'Last start'),
(_Border_position, 'Maximized border position'),
(_Border_position, 'Normal border position'),
(pint.uint32_t, 'Unknown_152'),
(dyn.clone(pstr.string, length=80), 'BAT file name'),
(pint.uint16_t, 'Environment size'),
(pint.uint16_t, 'DPMI memory volume'),
(pint.uint16_t, 'Unknown_1aa'),
]
@SectionData.define
class WindowsNT31Section(pstruct.type):
type = 0x8c
_fields_ = [
(pint.uint16_t, 'Hardware timer emulation'),
(dyn.block(10), 'Unknown_2'),
(dyn.clone(pstr.string, length=64), 'CONFIG.NT filename'),
(dyn.clone(pstr.string, length=64), 'AUTOEXEC.NT filename'),
]
@SectionData.define
class WindowsNT40Section(pstruct.type):
type = 0x68c
_fields_ = [
(pint.uint32_t, 'Unknown_0'),
(dyn.clone(pstr.wstring, length=128), 'Unicode parameters'),
(dyn.clone(pstr.string, length=128), 'Ascii parameters'),
(dyn.block(240), 'Unknown_184'),
(dyn.clone(pstr.wstring, length=80), 'Unicode PIF filename'),
(dyn.clone(pstr.string, length=80), 'Ascii PIF filename'),
(dyn.clone(pstr.wstring, length=30), 'Unicode window title'),
(dyn.clone(pstr.string, length=30), 'Ascii window title'),
(dyn.clone(pstr.wstring, length=80), 'Unicode icon filename'),
(dyn.clone(pstr.string, length=80), 'Ascii icon filename'),
(dyn.clone(pstr.wstring, length=64), 'Unicode working directory'),
(dyn.clone(pstr.string, length=64), 'Ascii working directory'),
(dyn.block(286), 'Unknown_56e'),
]
class Sections(parray.terminated):
_object_ = Section
def isTerminator(self, item):
res = item['heading']
return res['NextOffset'].int() == 0xffff
class File(pstruct.type):
_fields_ = [
(BasicSection, 'basicSection'),
(Heading, 'basicHeading'),
(Sections, 'sections'),
]
def enumerate(self):
item = self['basicHeading']
yield item['Name'].str(), item['Offset'].d.li
while item['NextOffset'].int() < 0xffff:
res = item['NextOffset'].d.li
item = res['heading']
yield item['Name'].str(), item['Offset'].d.li
return
def iterate(self):
for _, item in self.enumerate():
yield item
return
if __name__ == '__main__':
import ptypes, local.pif as PIF
ptypes.setsource(ptypes.prov.file('/home/user/work/syringe/template/samples/_default.pif','rb'))
z = PIF.File()
z=z.l
for name, item in z.enumerate():
print(name)
print(item)
for item in z.iterate():
print(item)
| bsd-2-clause | 2,997,885,994,163,634,000 | 30.196721 | 100 | 0.507995 | false |
musicbrainz/picard | picard/ui/tagsfromfilenames.py | 1 | 6502 | # -*- coding: utf-8 -*-
#
# Picard, the next-generation MusicBrainz tagger
#
# Copyright (C) 2006-2007 Lukáš Lalinský
# Copyright (C) 2009, 2014, 2019-2020 Philipp Wolfer
# Copyright (C) 2012-2013 Michael Wiencek
# Copyright (C) 2014, 2017 Sophist-UK
# Copyright (C) 2016-2017 Sambhav Kothari
# Copyright (C) 2017 Ville Skyttä
# Copyright (C) 2018 Laurent Monin
# Copyright (C) 2018 Vishal Choudhary
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from collections import OrderedDict
import os.path
import re
from PyQt5 import QtWidgets
from picard import config
from picard.script.parser import normalize_tagname
from picard.util.tags import display_tag_name
from picard.ui import PicardDialog
from picard.ui.ui_tagsfromfilenames import Ui_TagsFromFileNamesDialog
from picard.ui.util import StandardButton
class TagMatchExpression:
_numeric_tags = ('tracknumber', 'totaltracks', 'discnumber', 'totaldiscs')
def __init__(self, expression, replace_underscores=False):
self.replace_underscores = replace_underscores
self._tag_re = re.compile(r"(%\w+%)")
self._parse(expression)
def _parse(self, expression):
self._group_map = OrderedDict()
format_re = ['(?:^|/)']
for i, part in enumerate(self._tag_re.split(expression)):
if part.startswith('%') and part.endswith('%'):
name = part[1:-1]
group = '%s_%i' % (name, i)
tag = normalize_tagname(name)
self._group_map[group] = tag
if tag in self._numeric_tags:
format_re.append(r'(?P<' + group + r'>\d+)')
elif tag == 'date':
format_re.append(r'(?P<' + group + r'>\d+(?:-\d+(?:-\d+)?)?)')
else:
format_re.append(r'(?P<' + group + r'>[^/]*?)')
else:
format_re.append(re.escape(part))
# Optional extension
format_re.append(r'(?:\.\w+)?$')
self._format_re = re.compile("".join(format_re))
@property
def matched_tags(self):
# Return unique values, but preserve order
return list(OrderedDict.fromkeys(self._group_map.values()))
def match_file(self, filename):
match = self._format_re.search(filename.replace('\\', '/'))
if match:
result = {}
for group, tag in self._group_map.items():
value = match.group(group).strip()
if tag in self._numeric_tags:
value = value.lstrip("0")
if self.replace_underscores:
value = value.replace('_', ' ')
all_values = result.get(tag, [])
all_values.append(value)
result[tag] = all_values
return result
else:
return {}
class TagsFromFileNamesDialog(PicardDialog):
autorestore = False
options = [
config.TextOption("persist", "tags_from_filenames_format", ""),
]
def __init__(self, files, parent=None):
super().__init__(parent)
self.ui = Ui_TagsFromFileNamesDialog()
self.ui.setupUi(self)
self.restore_geometry()
items = [
"%artist%/%album%/%title%",
"%artist%/%album%/%tracknumber% %title%",
"%artist%/%album%/%tracknumber% - %title%",
"%artist%/%album% - %tracknumber% - %title%",
"%artist% - %album%/%title%",
"%artist% - %album%/%tracknumber% %title%",
"%artist% - %album%/%tracknumber% - %title%",
]
tff_format = config.persist["tags_from_filenames_format"]
if tff_format not in items:
selected_index = 0
if tff_format:
items.insert(0, tff_format)
else:
selected_index = items.index(tff_format)
self.ui.format.addItems(items)
self.ui.format.setCurrentIndex(selected_index)
self.ui.buttonbox.addButton(StandardButton(StandardButton.OK), QtWidgets.QDialogButtonBox.AcceptRole)
self.ui.buttonbox.addButton(StandardButton(StandardButton.CANCEL), QtWidgets.QDialogButtonBox.RejectRole)
self.ui.buttonbox.accepted.connect(self.accept)
self.ui.buttonbox.rejected.connect(self.reject)
self.ui.preview.clicked.connect(self.preview)
self.ui.files.setHeaderLabels([_("File Name")])
self.files = files
self.items = []
for file in files:
item = QtWidgets.QTreeWidgetItem(self.ui.files)
item.setText(0, os.path.basename(file.filename))
self.items.append(item)
def preview(self):
expression = TagMatchExpression(self.ui.format.currentText(), self.ui.replace_underscores.isChecked())
columns = expression.matched_tags
headers = [_("File Name")] + list(map(display_tag_name, columns))
self.ui.files.setColumnCount(len(headers))
self.ui.files.setHeaderLabels(headers)
for item, file in zip(self.items, self.files):
matches = expression.match_file(file.filename)
for i, column in enumerate(columns):
values = matches.get(column, [])
item.setText(i + 1, '; '.join(values))
self.ui.files.header().resizeSections(QtWidgets.QHeaderView.ResizeToContents)
self.ui.files.header().setStretchLastSection(True)
def accept(self):
expression = TagMatchExpression(self.ui.format.currentText(), self.ui.replace_underscores.isChecked())
for file in self.files:
metadata = expression.match_file(file.filename)
for name, values in metadata.items():
file.metadata[name] = values
file.update()
config.persist["tags_from_filenames_format"] = self.ui.format.currentText()
super().accept()
| gpl-2.0 | 2,771,010,965,814,264,000 | 39.111111 | 113 | 0.612188 | false |
salsita/shishito | shishito/runtime/platform/node_webkit/control_test.py | 1 | 2103 | """
@summary: Common configuration functions supporting test execution.
Various startup and termination procedures, helper functions etc.
Not to be used for directly testing the system under test (must not contain Asserts etc.)
"""
import os
import re
from shishito.runtime.platform.shishito_control_test import ShishitoControlTest
#import pyscreenshot as Screenshotter
class ControlTest(ShishitoControlTest):
""" ControlTest for node-webkit platform """
def start_browser(self):
# call browser from proper environment
config_section = self.shishito_support.get_opt('environment_configuration')
print("*********** config_section =", config_section)
self.driver = self.test_environment.call_browser(config_section)
return self.driver
def test_init(self):
""" Executed only once after browser starts.
Suitable for general pre-test logic that do not need to run before every individual test-case.
Waiting for given time (setting "default_implicit_wait").
"""
self.driver.implicitly_wait(int(self.shishito_support.get_opt('default_implicit_wait')))
def stop_test(self, test_info):
"""
!!!TEMPORARY METHOD!!! \n
To be executed after every test-case (test function). If test failed, function saves
screenshots created during test.
For more information see: https://code.google.com/p/chromedriver/issues/detail?id=816
:param test_info: information about test
:return:
"""
if test_info.test_status not in ('passed', None):
# save screenshot in case test fails
screenshot_folder = os.path.join(self.shishito_support.project_root, 'screenshots')
if not os.path.exists(screenshot_folder):
os.makedirs(screenshot_folder)
file_name = re.sub('[^A-Za-z0-9_. ]+', '', test_info.test_name)
self.driver.save_screenshot(os.path.join(screenshot_folder, file_name + '.png'))
#Screenshotter.grab_to_file(os.path.join(screenshot_folder, file_name + '.png'))
| mit | -6,526,401,787,640,681,000 | 41.06 | 102 | 0.672848 | false |
merc-devel/merc | merc/features/rfc1459/privmsg.py | 1 | 2517 | from merc import channel
from merc import errors
from merc import feature
from merc import message
from merc import mode
class PrivmsgFeature(feature.Feature):
NAME = __name__
install = PrivmsgFeature.install
MAX_TARGETS = 4
class _Privmsg(message.Command):
MIN_ARITY = 2
FORCE_TRAILING = True
def __init__(self, targets, text, *args):
self.targets = targets.split(",")
self.text = text
def as_command_params(self):
return [",".join(self.targets), self.text]
def compute_targets(self, app, user, target_name):
if channel.Channel.is_channel_name(target_name):
chan = app.channels.get(target_name)
if DisallowingExternalMessages(chan).get():
try:
chan.check_has_user(user)
except errors.NoSuchNick:
raise errors.CannotSendToChan(chan.name)
app.run_hooks("channel.message.check", user, chan)
if Moderated(chan).get():
chan.check_is_voiced(user)
app.run_hooks("channel.message", user, chan, self.text)
return (app.users.get_by_uid(uid) for uid in chan.users
if uid != user.uid)
else:
target = app.users.get(target_name)
app.run_hooks("user.message", user, target, self.text)
return [target]
def get_real_target_name(self, app, target_name):
if channel.Channel.is_channel_name(target_name):
return app.channels.get(target_name).name
else:
return app.users.get(target_name).nickname
@message.Command.requires_registration
def handle_for(self, app, user, prefix):
for target_name in self.targets[:MAX_TARGETS]:
real_target_name = self.get_real_target_name(app, target_name)
for target in self.compute_targets(app, user, target_name):
target.send(user.prefix, self.__class__(real_target_name, self.text))
@PrivmsgFeature.register_user_command
class Privmsg(_Privmsg):
NAME = "PRIVMSG"
@PrivmsgFeature.register_user_command
class Notice(_Privmsg):
NAME = "NOTICE"
@PrivmsgFeature.register_channel_mode
class DisallowingExternalMessages(mode.FlagMode, mode.ChanModeMixin):
CHAR = "n"
DEFAULT = True
@PrivmsgFeature.register_channel_mode
class Moderated(mode.FlagMode, mode.ChanModeMixin):
CHAR = "m"
@PrivmsgFeature.hook("server.notify")
def send_server_notice(app, user, text):
user.send_reply(Notice("*", text))
@PrivmsgFeature.hook("server.targmax.modify")
def modify_targmax(app, targmax):
targmax["PRIVMSG"] = MAX_TARGETS
targmax["NOTICE"] = MAX_TARGETS
| mit | -7,296,159,504,572,538,000 | 25.21875 | 77 | 0.684148 | false |
blyap/my2 | app/models.py | 1 | 13818 | from datetime import datetime
import hashlib
from werkzeug.security import generate_password_hash, check_password_hash
from itsdangerous import TimedJSONWebSignatureSerializer as Serializer
from markdown import markdown
import bleach
from flask import current_app, request, url_for
from flask.ext.login import UserMixin, AnonymousUserMixin
from app.exceptions import ValidationError
from . import db, login_manager
class Permission:
FOLLOW = 0x01
COMMENT = 0x02
WRITE_ARTICLES = 0x04
MODERATE_COMMENTS = 0x08
ADMINISTER = 0x80
class Role(db.Model):
__tablename__ = 'roles'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), unique=True)
default = db.Column(db.Boolean, default=False, index=True)
permissions = db.Column(db.Integer)
users = db.relationship('User', backref='role', lazy='dynamic')
@staticmethod
def insert_roles():
roles = {
'User': (Permission.FOLLOW |
Permission.COMMENT |
Permission.WRITE_ARTICLES, True),
'Moderator': (Permission.FOLLOW |
Permission.COMMENT |
Permission.WRITE_ARTICLES |
Permission.MODERATE_COMMENTS, False),
'Administrator': (0xff, False)
}
for r in roles:
role = Role.query.filter_by(name=r).first()
if role is None:
role = Role(name=r)
role.permissions = roles[r][0]
role.default = roles[r][1]
db.session.add(role)
db.session.commit()
def __repr__(self):
return '<Role %r>' % self.name
class Follow(db.Model):
__tablename__ = 'follows'
follower_id = db.Column(db.Integer, db.ForeignKey('users.id'),
primary_key=True)
followed_id = db.Column(db.Integer, db.ForeignKey('users.id'),
primary_key=True)
timestamp = db.Column(db.DateTime, default=datetime.utcnow)
class User(UserMixin, db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(64), unique=True, index=True)
username = db.Column(db.String(64), unique=True, index=True)
role_id = db.Column(db.Integer, db.ForeignKey('roles.id'))
password_hash = db.Column(db.String(128))
confirmed = db.Column(db.Boolean, default=False)
name = db.Column(db.String(64))
location = db.Column(db.String(64))
about_me = db.Column(db.Text())
aim_size = db.Column(db.Float(), default=1.0)
member_since = db.Column(db.DateTime(), default=datetime.utcnow)
last_seen = db.Column(db.DateTime(), default=datetime.utcnow)
avatar_hash = db.Column(db.String(32))
posts = db.relationship('Post', backref='author', lazy='dynamic')
followed = db.relationship('Follow',
foreign_keys=[Follow.follower_id],
backref=db.backref('follower', lazy='joined'),
lazy='dynamic',
cascade='all, delete-orphan')
followers = db.relationship('Follow',
foreign_keys=[Follow.followed_id],
backref=db.backref('followed', lazy='joined'),
lazy='dynamic',
cascade='all, delete-orphan')
comments = db.relationship('Comment', backref='author', lazy='dynamic')
@staticmethod
def generate_fake(count=100):
from sqlalchemy.exc import IntegrityError
from random import seed
import forgery_py
seed()
for i in range(count):
u = User(email=forgery_py.internet.email_address(),
username=forgery_py.internet.user_name(True),
password=forgery_py.lorem_ipsum.word(),
confirmed=True,
name=forgery_py.name.full_name(),
location=forgery_py.address.city(),
about_me=forgery_py.lorem_ipsum.sentence(),
member_since=forgery_py.date.date(True))
db.session.add(u)
try:
db.session.commit()
except IntegrityError:
db.session.rollback()
@staticmethod
def add_self_follows():
for user in User.query.all():
if not user.is_following(user):
user.follow(user)
db.session.add(user)
db.session.commit()
def __init__(self, **kwargs):
super(User, self).__init__(**kwargs)
if self.role is None:
if self.email == current_app.config['FLASKY_ADMIN']:
self.role = Role.query.filter_by(permissions=0xff).first()
if self.role is None:
self.role = Role.query.filter_by(default=True).first()
if self.email is not None and self.avatar_hash is None:
self.avatar_hash = hashlib.md5(
self.email.encode('utf-8')).hexdigest()
self.followed.append(Follow(followed=self))
@property
def password(self):
raise AttributeError('password is not a readable attribute')
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
def generate_confirmation_token(self, expiration=3600):
s = Serializer(current_app.config['SECRET_KEY'], expiration)
return s.dumps({'confirm': self.id})
def confirm(self, token):
s = Serializer(current_app.config['SECRET_KEY'])
try:
data = s.loads(token)
except:
return False
if data.get('confirm') != self.id:
return False
self.confirmed = True
db.session.add(self)
return True
def generate_reset_token(self, expiration=3600):
s = Serializer(current_app.config['SECRET_KEY'], expiration)
return s.dumps({'reset': self.id})
def reset_password(self, token, new_password):
s = Serializer(current_app.config['SECRET_KEY'])
try:
data = s.loads(token)
except:
return False
if data.get('reset') != self.id:
return False
self.password = new_password
db.session.add(self)
return True
def generate_email_change_token(self, new_email, expiration=3600):
s = Serializer(current_app.config['SECRET_KEY'], expiration)
return s.dumps({'change_email': self.id, 'new_email': new_email})
def change_email(self, token):
s = Serializer(current_app.config['SECRET_KEY'])
try:
data = s.loads(token)
except:
return False
if data.get('change_email') != self.id:
return False
new_email = data.get('new_email')
if new_email is None:
return False
if self.query.filter_by(email=new_email).first() is not None:
return False
self.email = new_email
self.avatar_hash = hashlib.md5(
self.email.encode('utf-8')).hexdigest()
db.session.add(self)
return True
def can(self, permissions):
return self.role is not None and \
(self.role.permissions & permissions) == permissions
def is_administrator(self):
return self.can(Permission.ADMINISTER)
def ping(self):
self.last_seen = datetime.utcnow()
db.session.add(self)
def gravatar(self, size=100, default='identicon', rating='g'):
if request.is_secure:
url = 'https://secure.gravatar.com/avatar'
else:
url = 'http://www.gravatar.com/avatar'
hash = self.avatar_hash or hashlib.md5(
self.email.encode('utf-8')).hexdigest()
return '{url}/{hash}?s={size}&d={default}&r={rating}'.format(
url=url, hash=hash, size=size, default=default, rating=rating)
def follow(self, user):
if not self.is_following(user):
f = Follow(follower=self, followed=user)
db.session.add(f)
def unfollow(self, user):
f = self.followed.filter_by(followed_id=user.id).first()
if f:
db.session.delete(f)
def is_following(self, user):
return self.followed.filter_by(
followed_id=user.id).first() is not None
def is_followed_by(self, user):
return self.followers.filter_by(
follower_id=user.id).first() is not None
@property
def followed_posts(self):
return Post.query.join(Follow, Follow.followed_id == Post.author_id)\
.filter(Follow.follower_id == self.id)
def to_json(self):
json_user = {
'url': url_for('api.get_post', id=self.id, _external=True),
'username': self.username,
'member_since': self.member_since,
'last_seen': self.last_seen,
'posts': url_for('api.get_user_posts', id=self.id, _external=True),
'followed_posts': url_for('api.get_user_followed_posts',
id=self.id, _external=True),
'post_count': self.posts.count()
}
return json_user
def generate_auth_token(self, expiration):
s = Serializer(current_app.config['SECRET_KEY'],
expires_in=expiration)
return s.dumps({'id': self.id}).decode('ascii')
@staticmethod
def verify_auth_token(token):
s = Serializer(current_app.config['SECRET_KEY'])
try:
data = s.loads(token)
except:
return None
return User.query.get(data['id'])
def __repr__(self):
return '<User %r>' % self.username
class AnonymousUser(AnonymousUserMixin):
def can(self, permissions):
return False
def is_administrator(self):
return False
login_manager.anonymous_user = AnonymousUser
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
class Post(db.Model):
__tablename__ = 'posts'
id = db.Column(db.Integer, primary_key=True)
body = db.Column(db.Text)
body_html = db.Column(db.Text)
timestamp = db.Column(db.DateTime, index=True, default=datetime.utcnow)
author_id = db.Column(db.Integer, db.ForeignKey('users.id'))
comments = db.relationship('Comment', backref='post', lazy='dynamic')
@staticmethod
def generate_fake(count=100):
from random import seed, randint
import forgery_py
seed()
user_count = User.query.count()
for i in range(count):
u = User.query.offset(randint(0, user_count - 1)).first()
p = Post(body=forgery_py.lorem_ipsum.sentences(randint(1, 5)),
timestamp=forgery_py.date.date(True),
author=u)
db.session.add(p)
db.session.commit()
@staticmethod
def on_changed_body(target, value, oldvalue, initiator):
allowed_tags = ['a', 'abbr', 'acronym', 'b', 'blockquote', 'code',
'em', 'i', 'li', 'ol', 'pre', 'strong', 'ul',
'h1', 'h2', 'h3', 'p']
target.body_html = bleach.linkify(bleach.clean(
markdown(value, output_format='html'),
tags=allowed_tags, strip=True))
def to_json(self):
json_post = {
'url': url_for('api.get_post', id=self.id, _external=True),
'body': self.body,
'body_html': self.body_html,
'timestamp': self.timestamp,
'author': url_for('api.get_user', id=self.author_id,
_external=True),
'comments': url_for('api.get_post_comments', id=self.id,
_external=True),
'comment_count': self.comments.count()
}
return json_post
@staticmethod
def from_json(json_post):
body = json_post.get('body')
if body is None or body == '':
raise ValidationError('post does not have a body')
return Post(body=body)
db.event.listen(Post.body, 'set', Post.on_changed_body)
class Comment(db.Model):
__tablename__ = 'comments'
id = db.Column(db.Integer, primary_key=True)
body = db.Column(db.Text)
body_html = db.Column(db.Text)
timestamp = db.Column(db.DateTime, index=True, default=datetime.utcnow)
disabled = db.Column(db.Boolean)
author_id = db.Column(db.Integer, db.ForeignKey('users.id'))
post_id = db.Column(db.Integer, db.ForeignKey('posts.id'))
@staticmethod
def on_changed_body(target, value, oldvalue, initiator):
allowed_tags = ['a', 'abbr', 'acronym', 'b', 'code', 'em', 'i',
'strong']
target.body_html = bleach.linkify(bleach.clean(
markdown(value, output_format='html'),
tags=allowed_tags, strip=True))
def to_json(self):
json_comment = {
'url': url_for('api.get_comment', id=self.id, _external=True),
'post': url_for('api.get_post', id=self.post_id, _external=True),
'body': self.body,
'body_html': self.body_html,
'timestamp': self.timestamp,
'author': url_for('api.get_user', id=self.author_id,
_external=True),
}
return json_comment
@staticmethod
def from_json(json_comment):
body = json_comment.get('body')
if body is None or body == '':
raise ValidationError('comment does not have a body')
return Comment(body=body)
db.event.listen(Comment.body, 'set', Comment.on_changed_body)
| mit | -1,825,395,711,501,689,000 | 34.890909 | 79 | 0.574106 | false |
denizs/torchUp | torchup/agents/DoubleDQN.py | 1 | 6576 | import torch
import torch.optim as optim
import torch.nn as nn
from torchup.agents.DQN import DQNAgent
from torchup.utils.utils import Transition
from torchup.base.models import Variable
class DoubleDQNAgent(DQNAgent):
'''
The DoubleDQNAgent is an implemenation of the Deep Reinforcement Agent
outlined in 'Deep Reinforcement Learning with Double Q-Learning'
by Hasselt et al.
Within the scope of this paper, it was shown that the DQN algorithm introduced
by Mnih et al. suffers from continuous overestimation of the state values,
which can lead to learning poorer policies.
As an approach to tackle this overestimation, the Double-Q Learning algorithm,
initially described by van Hasssel et al. was introduced.
Similarly to the DQN, this algorithm leverages to seperate neural networks
to estimate and update the state-action values. The main idea is to reduce
overestimation by dividing the max operation into a step of action selection
and action evaluation:
The online network is utilised for the action selection, while the target networks
estimates the resulting Q-value for this state:
DQN:
Q = r + gamma * Q_target(s', a)
DoubleDQN:
Q = r+ gamma * Q_target(s', argmax_a Q_online(s',a))
The experiments conducted priorly to the paper indicate that DoubleDQN significantly
reduces overestimation, resulting in outperforming the original DQN algorithm within
the Atari 2600 domain.
'''
def __init__(self, *args, **kwargs):
super(DoubleDQNAgent, self).__init__(target_model_update_freq=30000,
*args,
**kwargs)
# was kommt hier noch hin? eig nikkes oder :S
def optimize(self):
'''
The `optimize` method of the `DoubleDQNAgent` performs the batch updates
described in van Hasselt et al.'s paper 'Deep Reinforcement with Double Q-Learning'
1. Sample a random minibatch of transitions from the agent's replay memory
2. Set our FIXED targets `y_j` to:
2.1 `r_j` for terminal states
2.2 `r_j + GAMMA * Q_target(phi_j+1, argmax Q(s',a))` for non-terminal states
3. Compute our excepted Q_values by a full forward run of of states
4. Perform a gradient descent step on (y_j - Q(phi_j, a_j, theta))^2 with RMSprop
'''
self.n_backprop += 1 # increment the number of performed updates
# let's sample from our experience memory.
# The `sample_batch_size` is set during `__init__`, giving us any-time access.
s0_batch, a_batch, s1_batch, r_batch, t_batch = self.memory.sample(self.sample_batch_size)
if len(s0_batch) is 0:
self.data.loss = 0
self.logger.add_step(self.data)
return
r_batch_v = Variable(torch.cat(r_batch, dim=0))
s0_batch_v = Variable(torch.from_numpy(s0_batch))
s1_batch_v = Variable(torch.from_numpy(s1_batch), volatile=True)
a_batch_v = Variable(torch.cat(a_batch, dim=0))
# Before we start building our targets, it's time to get some expactations
# out of our neural net, which we then use to calculate the loss.
# As our model always returns one state-action value for each possible action,
# we need to 'select' the state-action value which corresponds to the action
# we have taken. Luckily, torch provides a method called `gather`, which allows
# us to do just this.
predicted_qs_v = self.model(s0_batch_v).gather(1, a_batch_v)
self.data.q_value = predicted_qs_v.data.mean()
# Now let's start building our targets:
# First, we need to divide our batch into two catergories:
# * Transitions, that lead to a terminal state
# * Transitions, that lead to a non-terminal state
# As described priorly, we set our targets `y_j` to `r_j` for terminal transitions
# and to `r_j + Q'(s_t+1, argmax Q(s_t+1)` for non-terminal transitions.
# We need to compute both, the expectations of the non-terminal `next_state`s
# and the expectation of all starting states (`state`).
# Also, we need to keep the order of the transitions consistent in order to
# perform the gradient update appropriately. In order to ahieve this,
# we create a bit mask which holds the position of the terminal
# states indicated by a 1. This will allow us to easily update our
# terminal state targets by selecting the respective targets via
# `target[terminal] = ...`:
terminal_mask = torch.ByteTensor(t_batch).type(self.ldt)
# First, let's obtain the actions that we should take according to our
# online model, which is represented by the argmax of the expexted Q_values
# of our non-terminal next states. We obtain these by calling `.max(1)`
# on our variable and selecting the second column. Remember the returns of
# `.max(1)`? Two `torch.autograd.Variable`s or `torch.Tensor`s, the first
# representing the actual values, the latter the indeces (which we want to obtain).
# Also, note that we reassign the data to a new variable, detaching it
# from the computational graph
next_a_v = self.model(s1_batch_v).max(1)[1]
next_a_v = Variable(next_a_v.data)
# Now let's evaluate our policy with respect to our target model's parameters:
next_qs_v = self.target_model(s1_batch_v)
next_qs_v = Variable(next_qs_v.data)
next_max_qs_v = next_qs_v.gather(1, next_a_v)
# Apply terminal mask:
next_max_qs_v[terminal_mask] = 0
# Now let's build our targets:
if next_qs_v.volatile:
next_qs_v.volatile = False
# Perform the update:
# r_j + Q'(s_t+1, argmax Q(s_t+1, a)):
targets_v = (next_max_qs_v * self.gamma) + r_batch_v # <-- fixed Q-target!
# Compute the loss:
loss = self.loss_function(predicted_qs_v, targets_v)
self.data.loss = loss.data[0]
self.logger.add_step(self.data)
# Optimize
self.optimizer.zero_grad()
loss.backward()
for param in self.model.parameters():
param.grad.data.clamp_(-1, 1)
self.optimizer.step()
# Let's check, if we need to update our target network
if self.n_backprop % self.target_model_update_freq is 0:
self.target_model.load_state_dict(self.model.state_dict())
| bsd-2-clause | -2,724,416,356,105,284,600 | 48.074627 | 98 | 0.648723 | false |
otsaloma/gaupol | gaupol/attrdict.py | 1 | 2782 | # -*- coding: utf-8 -*-
# Copyright (C) 2006 Osmo Salomaa
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Observable dictionary with attribute access to keys."""
import aeidon
__all__ = ("AttributeDictionary",)
class AttributeDictionary(aeidon.Observable):
"""
Observable dictionary with attribute access to keys.
:class:`AttributeDictionary` is initialized from a root dictionary,
which is kept in sync with attribute values. This allows convenient
attribute access to dictionary keys and notifications of changes
via the :class:`aeidon.Observable` interface.
"""
def __init__(self, root):
"""Initialize an :class:`AttributeDictionary` instance."""
aeidon.Observable.__init__(self)
self._root = root
self.update(root)
def add_attribute(self, name, value):
"""Add instance attribute and corresponding root dictionary key."""
self._root[name] = value
# In the case of dictionaries, set the original dictionary
# to the root dictionary, but instantiate an AttributeDictionary
# for use as the corresponding attribute.
if isinstance(value, dict):
value = AttributeDictionary(value)
setattr(self, name, value)
self.connect("notify::{}".format(name), self._on_notify, name)
def extend(self, root):
"""Add new values from another root dictionary."""
for name, value in root.items():
if not hasattr(self, name):
self.add_attribute(name, value)
for name, value in root.items():
if isinstance(value, dict):
getattr(self, name).extend(value)
def _on_notify(self, obj, value, name):
"""Synchronize changed attribute value with root dictionary."""
self._root[name] = value
def update(self, root):
"""Update values from another root dictionary."""
self.extend(root)
for name, value in root.items():
if not isinstance(value, dict):
setattr(self, name, value)
for name, value in root.items():
if isinstance(value, dict):
getattr(self, name).update(value)
| gpl-3.0 | 207,437,394,888,405,980 | 36.594595 | 75 | 0.659238 | false |
timsavage/odin | odin/resources.py | 1 | 18168 | # -*- coding: utf-8 -*-
import copy
import six
from odin import exceptions, registration
from odin.exceptions import ValidationError
from odin.fields import NOT_PROVIDED
from odin.utils import cached_property, field_iter_items
DEFAULT_TYPE_FIELD = '$'
META_OPTION_NAMES = (
'name', 'namespace', 'name_space', 'verbose_name', 'verbose_name_plural', 'abstract', 'doc_group', 'type_field'
)
class ResourceOptions(object):
def __init__(self, meta):
self.meta = meta
self.parents = []
self.fields = []
self.virtual_fields = []
self.name = None
self.class_name = None
self.name_space = NOT_PROVIDED
self.verbose_name = None
self.verbose_name_plural = None
self.abstract = False
self.doc_group = None
self.type_field = DEFAULT_TYPE_FIELD
self._cache = {}
def contribute_to_class(self, cls, name):
cls._meta = self
self.name = cls.__name__
self.class_name = "%s.%s" % (cls.__module__, cls.__name__)
if self.meta:
meta_attrs = self.meta.__dict__.copy()
for name in self.meta.__dict__:
if name.startswith('_'):
del meta_attrs[name]
for attr_name in META_OPTION_NAMES:
if attr_name in meta_attrs:
# Allow meta to be defined as namespace
if attr_name == 'namespace':
setattr(self, 'name_space', meta_attrs.pop(attr_name))
else:
setattr(self, attr_name, meta_attrs.pop(attr_name))
elif hasattr(self.meta, attr_name):
setattr(self, attr_name, getattr(self.meta, attr_name))
# Any leftover attributes must be invalid.
if meta_attrs != {}:
raise TypeError("'class Meta' got invalid attribute(s): %s" % ','.join(meta_attrs.keys()))
del self.meta
if not self.verbose_name:
self.verbose_name = self.name.replace('_', ' ').strip('_ ')
if not self.verbose_name_plural:
self.verbose_name_plural = self.verbose_name + 's'
def add_field(self, field):
self.fields.append(field)
cached_property.clear_caches(self)
def add_virtual_field(self, field):
self.virtual_fields.append(field)
cached_property.clear_caches(self)
@property
def resource_name(self):
"""
Full name of resource including namespace (if specified)
"""
if self.name_space:
return "%s.%s" % (self.name_space, self.name)
else:
return self.name
@cached_property
def all_fields(self):
"""
All fields both standard and virtual.
"""
return self.fields + self.virtual_fields
@cached_property
def composite_fields(self):
"""
All composite fields.
"""
# Not the nicest solution but is a fairly safe way of detecting a composite field.
return [f for f in self.fields if (hasattr(f, 'of') and issubclass(f.of, Resource))]
@cached_property
def container_fields(self):
"""
All composite fields with the container flag.
Used by XML like codecs.
"""
return [f for f in self.composite_fields if getattr(f, 'use_container', False)]
@cached_property
def field_map(self):
return {f.attname: f for f in self.fields}
@cached_property
def parent_resource_names(self):
"""
List of parent resource names.
"""
return [p._meta.resource_name for p in self.parents]
@cached_property
def attribute_fields(self):
"""
List of fields where is_attribute is True.
"""
return [f for f in self.fields if f.is_attribute]
@cached_property
def element_fields(self):
"""
List of fields where is_attribute is False.
"""
return [f for f in self.fields if not f.is_attribute]
@cached_property
def element_field_map(self):
return {f.attname: f for f in self.element_fields}
def __repr__(self):
return '<Options for %s>' % self.resource_name
class ResourceBase(type):
"""
Metaclass for all Resources.
"""
def __new__(cls, name, bases, attrs):
super_new = super(ResourceBase, cls).__new__
# attrs will never be empty for classes declared in the standard way
# (ie. with the `class` keyword). This is quite robust.
if name == 'NewBase' and attrs == {}:
return super_new(cls, name, bases, attrs)
parents = [b for b in bases if isinstance(b, ResourceBase) and not (b.__name__ == 'NewBase'
and b.__mro__ == (b, object))]
if not parents:
# If this isn't a subclass of Resource, don't do anything special.
return super_new(cls, name, bases, attrs)
# Create the class.
module = attrs.pop('__module__')
new_class = super_new(cls, name, bases, {'__module__': module})
attr_meta = attrs.pop('Meta', None)
abstract = getattr(attr_meta, 'abstract', False)
if not attr_meta:
meta = getattr(new_class, 'Meta', None)
else:
meta = attr_meta
base_meta = getattr(new_class, '_meta', None)
new_class.add_to_class('_meta', ResourceOptions(meta))
# Generate a namespace if one is not provided
if new_class._meta.name_space is NOT_PROVIDED and base_meta:
# Namespace is inherited
if (not new_class._meta.name_space) or (new_class._meta.name_space is NOT_PROVIDED):
new_class._meta.name_space = base_meta.name_space
if new_class._meta.name_space is NOT_PROVIDED:
new_class._meta.name_space = module
# Bail out early if we have already created this class.
r = registration.get_resource(new_class._meta.resource_name)
if r is not None:
return r
# Add all attributes to the class.
for obj_name, obj in attrs.items():
new_class.add_to_class(obj_name, obj)
# Sort the fields
new_class._meta.fields = sorted(new_class._meta.fields, key=hash)
# All the fields of any type declared on this model
local_field_attnames = set([f.attname for f in new_class._meta.fields])
field_attnames = set(local_field_attnames)
for base in parents:
if not hasattr(base, '_meta'):
# Things without _meta aren't functional models, so they're
# uninteresting parents.
continue
# Check for clashes between locally declared fields and those
# on the base classes (we cannot handle shadowed fields at the
# moment).
for field in base._meta.all_fields:
if field.attname in local_field_attnames:
raise Exception('Local field %r in class %r clashes with field of similar name from '
'base class %r' % (field.attname, name, base.__name__))
for field in base._meta.fields:
if field.attname not in field_attnames:
field_attnames.add(field.attname)
new_class.add_to_class(field.attname, copy.deepcopy(field))
for field in base._meta.virtual_fields:
new_class.add_to_class(field.attname, copy.deepcopy(field))
new_class._meta.parents += base._meta.parents
new_class._meta.parents.append(base)
if abstract:
return new_class
# Register resource
registration.register_resources(new_class)
# Because of the way imports happen (recursively), we may or may not be
# the first time this model tries to register with the framework. There
# should only be one class for each model, so we always return the
# registered version.
return registration.get_resource(new_class._meta.resource_name)
def add_to_class(cls, name, value):
if hasattr(value, 'contribute_to_class'):
value.contribute_to_class(cls, name)
else:
setattr(cls, name, value)
@six.add_metaclass(ResourceBase)
class Resource(object):
def __init__(self, *args, **kwargs):
args_len = len(args)
if args_len > len(self._meta.fields):
raise TypeError('This resource takes %s positional arguments but %s where given.' % (
len(self._meta.fields), args_len))
# The ordering of the zip calls matter - zip throws StopIteration
# when an iter throws it. So if the first iter throws it, the second
# is *not* consumed. We rely on this, so don't change the order
# without changing the logic.
fields_iter = iter(self._meta.fields)
if args_len:
if not kwargs:
for val, field in zip(args, fields_iter):
setattr(self, field.attname, val)
else:
for val, field in zip(args, fields_iter):
setattr(self, field.attname, val)
kwargs.pop(field.name, None)
# Now we're left with the unprocessed fields that *must* come from
# keywords, or default.
for field in fields_iter:
try:
val = kwargs.pop(field.attname)
except KeyError:
val = field.get_default()
setattr(self, field.attname, val)
if kwargs:
raise TypeError("'%s' is an invalid keyword argument for this function" % list(kwargs)[0])
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self)
def __str__(self):
return '%s resource' % self._meta.resource_name
@classmethod
def create_from_dict(cls, d, full_clean=False):
"""
Create a resource instance from a dictionary.
"""
return create_resource_from_dict(d, cls, full_clean)
def to_dict(self, include_virtual=True):
"""
Convert this resource into a `dict` of field_name/value pairs.
.. note::
This method is not recursive, it only operates on this single resource, any sub resources are returned as
is. The use case that prompted the creation of this method is within codecs when a resource must be
converted into a type that can be serialised, these codecs then operate recursively on the returned `dict`.
:param include_virtual: Include virtual fields when generating `dict`.
"""
fields = self._meta.all_fields if include_virtual else self._meta.fields
return dict((f.name, v) for f, v in field_iter_items(self, fields))
def convert_to(self, to_resource, context=None, ignore_fields=None, **field_values):
"""
Convert this resource into a specified resource.
A mapping must be defined for conversion between this resource and to_resource or an exception will be raised.
"""
mapping = registration.get_mapping(self.__class__, to_resource)
ignore_fields = ignore_fields or []
ignore_fields.extend(mapping.exclude_fields)
self.full_clean(ignore_fields)
return mapping(self, context).convert(**field_values)
def update_existing(self, dest_obj, context=None, ignore_fields=None):
"""
Update the fields on an existing destination object.
A mapping must be defined for conversion between this resource and ``dest_obj`` type or an exception will be
raised.
"""
self.full_clean(ignore_fields)
mapping = registration.get_mapping(self.__class__, dest_obj.__class__)
return mapping(self, context).update(dest_obj, ignore_fields)
def extra_attrs(self, attrs):
"""
Called during de-serialisation of data if there are any extra fields defined in the document.
This allows the resource to decide how to handle these fields. By default they are ignored.
"""
pass
def clean(self):
"""
Chance to do more in depth validation.
"""
pass
def full_clean(self, exclude=None):
"""
Calls clean_fields, clean on the resource and raises ``ValidationError``
for any errors that occurred.
"""
errors = {}
try:
self.clean_fields(exclude)
except ValidationError as e:
errors = e.update_error_dict(errors)
try:
self.clean()
except ValidationError as e:
errors = e.update_error_dict(errors)
if errors:
raise ValidationError(errors)
def clean_fields(self, exclude=None):
errors = {}
for f in self._meta.fields:
if exclude and f.name in exclude:
continue
raw_value = f.value_from_object(self)
if f.null and raw_value is None:
continue
try:
raw_value = f.clean(raw_value)
except ValidationError as e:
errors[f.name] = e.messages
# Check for resource level clean methods.
clean_method = getattr(self, "clean_%s" % f.attname, None)
if callable(clean_method):
try:
raw_value = clean_method(raw_value)
except ValidationError as e:
errors.setdefault(f.name, []).extend(e.messages)
setattr(self, f.attname, raw_value)
if errors:
raise ValidationError(errors)
def resolve_resource_type(resource):
if isinstance(resource, type) and issubclass(resource, Resource):
return resource._meta.resource_name, resource._meta.type_field
else:
return resource, DEFAULT_TYPE_FIELD
def create_resource_from_dict(d, resource=None, full_clean=True, copy_dict=True):
"""
Create a resource from a dict.
:param d: dictionary of data.
:param resource: A resource type, resource name or list of resources and names to use as the base for creating a
resource. If a list is supplied the first item will be used if a resource type is not supplied; this could also
be a parent(s) of any resource defined by the dict.
:param full_clean: Do a full clean as part of the creation.
:param copy_dict: Use a copy of the input dictionary rather than destructively processing the input dict.
"""
assert isinstance(d, dict)
if copy_dict:
d = d.copy()
if resource:
resource_type = None
# Convert to single resource then resolve document type
if isinstance(resource, (tuple, list)):
resources = (resolve_resource_type(r) for r in resource)
else:
resources = [resolve_resource_type(resource)]
for resource_name, type_field in resources:
# See if the input includes a type field and check it's registered
document_resource_name = d.get(type_field, None)
if document_resource_name:
resource_type = registration.get_resource(document_resource_name)
else:
resource_type = registration.get_resource(resource_name)
if not resource_type:
raise exceptions.ResourceException("Resource `%s` is not registered." % document_resource_name)
if document_resource_name:
# Check resource types match or are inherited types
if (resource_name == document_resource_name or
resource_name in resource_type._meta.parent_resource_names):
break # We are done
else:
break
if not resource_type:
raise exceptions.ResourceException(
"Incoming resource does not match [%s]" % ', '.join(r for r, t in resources))
else:
# No resource specified, relay on type field
document_resource_name = d.pop(DEFAULT_TYPE_FIELD, None)
if not document_resource_name:
raise exceptions.ResourceException("Resource not defined.")
# Get an instance of a resource type
resource_type = registration.get_resource(document_resource_name)
if not resource_type:
raise exceptions.ResourceException("Resource `%s` is not registered." % document_resource_name)
attrs = []
errors = {}
for f in resource_type._meta.fields:
value = d.pop(f.name, NOT_PROVIDED)
if value is NOT_PROVIDED:
value = f.get_default() if f.use_default_if_not_provided else None
else:
try:
value = f.to_python(value)
except ValidationError as ve:
errors[f.name] = ve.error_messages
attrs.append(value)
if errors:
raise ValidationError(errors)
new_resource = resource_type(*attrs)
if d:
new_resource.extra_attrs(d)
if full_clean:
new_resource.full_clean()
return new_resource
def build_object_graph(d, resource=None, full_clean=True, copy_dict=True):
"""
Generate an object graph from a dict
:param resource: A resource type, resource name or list of resources and names to use as the base for creating a
resource. If a list is supplied the first item will be used if a resource type is not supplied.
:raises ValidationError: During building of the object graph and issues discovered are raised as a ValidationError.
"""
if isinstance(d, dict):
return create_resource_from_dict(d, resource, full_clean, copy_dict)
if isinstance(d, list):
return [build_object_graph(o, resource, full_clean, copy_dict) for o in d]
return d
class ResourceIterable(object):
"""
Iterable that yields resources.
"""
def __init__(self, sequence):
self.sequence = sequence
def __iter__(self):
for item in self.sequence:
yield item
| bsd-3-clause | -4,006,824,706,083,677,700 | 34.83432 | 119 | 0.59269 | false |
swistaq/aoc2016 | src/day10.py | 1 | 2065 | from __future__ import print_function
import re
# id low high
# {botno : ((bot|output, id), (bot|output, id))}
bots = {}
# (value, botno)
vals = []
# {output_id : [values]}
outputs = {}
# {botno : [values]}
states = {}
def parse(line):
global bots, vals, outputs
if line.startswith("bot"):
match = re.search("bot\s(\d+)\sgives low to (bot|output)\s(\d+) and high to (bot|output)\s(\d+)", line)
bots.update(
{int(match.group(1)): ((match.group(2), int(match.group(3))), (match.group(4), int(match.group(5))))})
elif line.startswith("value"):
match = re.search("value\s(\d+)\sgoes to bot\s(\d+)", line)
vals.append((int(match.group(1)), int(match.group(2))))
def update_output(output_id, value):
global outputs
if outputs.has_key(output_id):
outputs.get(output_id).append(value)
else:
outputs.update({output_id: [value]})
def update_bot(bot_no, value):
global states, special_bot
if states.has_key(bot_no):
states.get(bot_no).append(value)
if states.get(bot_no).__len__() == 2:
vals = sorted(states.get(bot_no))
if vals[0] == 17 and vals[1] == 61:
print("BOT COMPARING 17 AND 61:", bot_no)
states.update({bot_no: []})
((target_low, targe_low_id), (target_high, target_high_id)) = bots.get(bot_no)
if target_low == "bot":
update_bot(targe_low_id, vals[0])
elif target_low == "output":
update_output(targe_low_id, vals[0])
if target_high == "bot":
update_bot(target_high_id, vals[1])
elif target_high == "output":
update_output(target_high_id, vals[1])
else:
states.update({bot_no: [value]})
if __name__ == "__main__":
with open("resources/day10") as infile:
for line in infile:
parse(line)
for val in vals:
update_bot(val[1], val[0])
print(outputs.get(0)[0]*outputs.get(1)[0]*outputs.get(2)[0])
| gpl-3.0 | -7,242,079,772,893,059,000 | 31.777778 | 114 | 0.543826 | false |
tjssmy/CuviewerPy | vtkTests/PyQtInter.py | 1 | 1292 | #!/usr/bin/env python
import sys
import vtk
from PyQt4 import QtCore, QtGui
from vtk.qt4.QVTKRenderWindowInteractor import QVTKRenderWindowInteractor
class MainWindow(QtGui.QMainWindow):
def __init__(self, parent=None):
QtGui.QMainWindow.__init__(self, parent)
self.frame = QtGui.QFrame()
self.vl = QtGui.QVBoxLayout()
self.vtkWidget = QVTKRenderWindowInteractor(self.frame)
self.vl.addWidget(self.vtkWidget)
self.ren = vtk.vtkRenderer()
self.vtkWidget.GetRenderWindow().AddRenderer(self.ren)
self.iren = self.vtkWidget.GetRenderWindow().GetInteractor()
# Create source
source = vtk.vtkSphereSource()
source.SetCenter(0, 0, 0)
source.SetRadius(5.0)
# Create a mapper
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputConnection(source.GetOutputPort())
# Create an actor
actor = vtk.vtkActor()
actor.SetMapper(mapper)
self.ren.AddActor(actor)
self.ren.ResetCamera()
self.frame.setLayout(self.vl)
self.setCentralWidget(self.frame)
self.show()
self.iren.Initialize()
if __name__ == "__main__":
app = QtGui.QApplication(sys.argv)
window = MainWindow()
sys.exit(app.exec_()) | mit | -2,950,600,264,807,827,000 | 23.865385 | 73 | 0.641641 | false |
HiSPARC/station-software | user/python/Lib/lib-tk/ttk.py | 2 | 56173 | """Ttk wrapper.
This module provides classes to allow using Tk themed widget set.
Ttk is based on a revised and enhanced version of
TIP #48 (http://tip.tcl.tk/48) specified style engine.
Its basic idea is to separate, to the extent possible, the code
implementing a widget's behavior from the code implementing its
appearance. Widget class bindings are primarily responsible for
maintaining the widget state and invoking callbacks, all aspects
of the widgets appearance lies at Themes.
"""
__version__ = "0.3.1"
__author__ = "Guilherme Polo <[email protected]>"
__all__ = ["Button", "Checkbutton", "Combobox", "Entry", "Frame", "Label",
"Labelframe", "LabelFrame", "Menubutton", "Notebook", "Panedwindow",
"PanedWindow", "Progressbar", "Radiobutton", "Scale", "Scrollbar",
"Separator", "Sizegrip", "Style", "Treeview",
# Extensions
"LabeledScale", "OptionMenu",
# functions
"tclobjs_to_py", "setup_master"]
import Tkinter
from Tkinter import _flatten, _join, _stringify, _splitdict
# Verify if Tk is new enough to not need the Tile package
_REQUIRE_TILE = True if Tkinter.TkVersion < 8.5 else False
def _load_tile(master):
if _REQUIRE_TILE:
import os
tilelib = os.environ.get('TILE_LIBRARY')
if tilelib:
# append custom tile path to the list of directories that
# Tcl uses when attempting to resolve packages with the package
# command
master.tk.eval(
'global auto_path; '
'lappend auto_path {%s}' % tilelib)
master.tk.eval('package require tile') # TclError may be raised here
master._tile_loaded = True
def _format_optvalue(value, script=False):
"""Internal function."""
if script:
# if caller passes a Tcl script to tk.call, all the values need to
# be grouped into words (arguments to a command in Tcl dialect)
value = _stringify(value)
elif isinstance(value, (list, tuple)):
value = _join(value)
return value
def _format_optdict(optdict, script=False, ignore=None):
"""Formats optdict to a tuple to pass it to tk.call.
E.g. (script=False):
{'foreground': 'blue', 'padding': [1, 2, 3, 4]} returns:
('-foreground', 'blue', '-padding', '1 2 3 4')"""
opts = []
for opt, value in optdict.iteritems():
if not ignore or opt not in ignore:
opts.append("-%s" % opt)
if value is not None:
opts.append(_format_optvalue(value, script))
return _flatten(opts)
def _mapdict_values(items):
# each value in mapdict is expected to be a sequence, where each item
# is another sequence containing a state (or several) and a value
# E.g. (script=False):
# [('active', 'selected', 'grey'), ('focus', [1, 2, 3, 4])]
# returns:
# ['active selected', 'grey', 'focus', [1, 2, 3, 4]]
opt_val = []
for item in items:
state = item[:-1]
val = item[-1]
# hacks for bakward compatibility
state[0] # raise IndexError if empty
if len(state) == 1:
# if it is empty (something that evaluates to False), then
# format it to Tcl code to denote the "normal" state
state = state[0] or ''
else:
# group multiple states
state = ' '.join(state) # raise TypeError if not str
opt_val.append(state)
if val is not None:
opt_val.append(val)
return opt_val
def _format_mapdict(mapdict, script=False):
"""Formats mapdict to pass it to tk.call.
E.g. (script=False):
{'expand': [('active', 'selected', 'grey'), ('focus', [1, 2, 3, 4])]}
returns:
('-expand', '{active selected} grey focus {1, 2, 3, 4}')"""
opts = []
for opt, value in mapdict.iteritems():
opts.extend(("-%s" % opt,
_format_optvalue(_mapdict_values(value), script)))
return _flatten(opts)
def _format_elemcreate(etype, script=False, *args, **kw):
"""Formats args and kw according to the given element factory etype."""
spec = None
opts = ()
if etype in ("image", "vsapi"):
if etype == "image": # define an element based on an image
# first arg should be the default image name
iname = args[0]
# next args, if any, are statespec/value pairs which is almost
# a mapdict, but we just need the value
imagespec = _join(_mapdict_values(args[1:]))
spec = "%s %s" % (iname, imagespec)
else:
# define an element whose visual appearance is drawn using the
# Microsoft Visual Styles API which is responsible for the
# themed styles on Windows XP and Vista.
# Availability: Tk 8.6, Windows XP and Vista.
class_name, part_id = args[:2]
statemap = _join(_mapdict_values(args[2:]))
spec = "%s %s %s" % (class_name, part_id, statemap)
opts = _format_optdict(kw, script)
elif etype == "from": # clone an element
# it expects a themename and optionally an element to clone from,
# otherwise it will clone {} (empty element)
spec = args[0] # theme name
if len(args) > 1: # elementfrom specified
opts = (_format_optvalue(args[1], script),)
if script:
spec = '{%s}' % spec
opts = ' '.join(opts)
return spec, opts
def _format_layoutlist(layout, indent=0, indent_size=2):
"""Formats a layout list so we can pass the result to ttk::style
layout and ttk::style settings. Note that the layout doesn't have to
be a list necessarily.
E.g.:
[("Menubutton.background", None),
("Menubutton.button", {"children":
[("Menubutton.focus", {"children":
[("Menubutton.padding", {"children":
[("Menubutton.label", {"side": "left", "expand": 1})]
})]
})]
}),
("Menubutton.indicator", {"side": "right"})
]
returns:
Menubutton.background
Menubutton.button -children {
Menubutton.focus -children {
Menubutton.padding -children {
Menubutton.label -side left -expand 1
}
}
}
Menubutton.indicator -side right"""
script = []
for layout_elem in layout:
elem, opts = layout_elem
opts = opts or {}
fopts = ' '.join(_format_optdict(opts, True, ("children",)))
head = "%s%s%s" % (' ' * indent, elem, (" %s" % fopts) if fopts else '')
if "children" in opts:
script.append(head + " -children {")
indent += indent_size
newscript, indent = _format_layoutlist(opts['children'], indent,
indent_size)
script.append(newscript)
indent -= indent_size
script.append('%s}' % (' ' * indent))
else:
script.append(head)
return '\n'.join(script), indent
def _script_from_settings(settings):
"""Returns an appropriate script, based on settings, according to
theme_settings definition to be used by theme_settings and
theme_create."""
script = []
# a script will be generated according to settings passed, which
# will then be evaluated by Tcl
for name, opts in settings.iteritems():
# will format specific keys according to Tcl code
if opts.get('configure'): # format 'configure'
s = ' '.join(_format_optdict(opts['configure'], True))
script.append("ttk::style configure %s %s;" % (name, s))
if opts.get('map'): # format 'map'
s = ' '.join(_format_mapdict(opts['map'], True))
script.append("ttk::style map %s %s;" % (name, s))
if 'layout' in opts: # format 'layout' which may be empty
if not opts['layout']:
s = 'null' # could be any other word, but this one makes sense
else:
s, _ = _format_layoutlist(opts['layout'])
script.append("ttk::style layout %s {\n%s\n}" % (name, s))
if opts.get('element create'): # format 'element create'
eopts = opts['element create']
etype = eopts[0]
# find where args end, and where kwargs start
argc = 1 # etype was the first one
while argc < len(eopts) and not hasattr(eopts[argc], 'iteritems'):
argc += 1
elemargs = eopts[1:argc]
elemkw = eopts[argc] if argc < len(eopts) and eopts[argc] else {}
spec, opts = _format_elemcreate(etype, True, *elemargs, **elemkw)
script.append("ttk::style element create %s %s %s %s" % (
name, etype, spec, opts))
return '\n'.join(script)
def _list_from_statespec(stuple):
"""Construct a list from the given statespec tuple according to the
accepted statespec accepted by _format_mapdict."""
nval = []
for val in stuple:
typename = getattr(val, 'typename', None)
if typename is None:
nval.append(val)
else: # this is a Tcl object
val = str(val)
if typename == 'StateSpec':
val = val.split()
nval.append(val)
it = iter(nval)
return [_flatten(spec) for spec in zip(it, it)]
def _list_from_layouttuple(tk, ltuple):
"""Construct a list from the tuple returned by ttk::layout, this is
somewhat the reverse of _format_layoutlist."""
ltuple = tk.splitlist(ltuple)
res = []
indx = 0
while indx < len(ltuple):
name = ltuple[indx]
opts = {}
res.append((name, opts))
indx += 1
while indx < len(ltuple): # grab name's options
opt, val = ltuple[indx:indx + 2]
if not opt.startswith('-'): # found next name
break
opt = opt[1:] # remove the '-' from the option
indx += 2
if opt == 'children':
val = _list_from_layouttuple(tk, val)
opts[opt] = val
return res
def _val_or_dict(tk, options, *args):
"""Format options then call Tk command with args and options and return
the appropriate result.
If no option is specified, a dict is returned. If an option is
specified with the None value, the value for that option is returned.
Otherwise, the function just sets the passed options and the caller
shouldn't be expecting a return value anyway."""
options = _format_optdict(options)
res = tk.call(*(args + options))
if len(options) % 2: # option specified without a value, return its value
return res
return _splitdict(tk, res, conv=_tclobj_to_py)
def _convert_stringval(value):
"""Converts a value to, hopefully, a more appropriate Python object."""
value = unicode(value)
try:
value = int(value)
except (ValueError, TypeError):
pass
return value
def _to_number(x):
if isinstance(x, str):
if '.' in x:
x = float(x)
else:
x = int(x)
return x
def _tclobj_to_py(val):
"""Return value converted from Tcl object to Python object."""
if val and hasattr(val, '__len__') and not isinstance(val, basestring):
if getattr(val[0], 'typename', None) == 'StateSpec':
val = _list_from_statespec(val)
else:
val = map(_convert_stringval, val)
elif hasattr(val, 'typename'): # some other (single) Tcl object
val = _convert_stringval(val)
return val
def tclobjs_to_py(adict):
"""Returns adict with its values converted from Tcl objects to Python
objects."""
for opt, val in adict.items():
adict[opt] = _tclobj_to_py(val)
return adict
def setup_master(master=None):
"""If master is not None, itself is returned. If master is None,
the default master is returned if there is one, otherwise a new
master is created and returned.
If it is not allowed to use the default root and master is None,
RuntimeError is raised."""
if master is None:
if Tkinter._support_default_root:
master = Tkinter._default_root or Tkinter.Tk()
else:
raise RuntimeError(
"No master specified and Tkinter is "
"configured to not support default root")
return master
class Style(object):
"""Manipulate style database."""
_name = "ttk::style"
def __init__(self, master=None):
master = setup_master(master)
if not getattr(master, '_tile_loaded', False):
# Load tile now, if needed
_load_tile(master)
self.master = master
self.tk = self.master.tk
def configure(self, style, query_opt=None, **kw):
"""Query or sets the default value of the specified option(s) in
style.
Each key in kw is an option and each value is either a string or
a sequence identifying the value for that option."""
if query_opt is not None:
kw[query_opt] = None
return _val_or_dict(self.tk, kw, self._name, "configure", style)
def map(self, style, query_opt=None, **kw):
"""Query or sets dynamic values of the specified option(s) in
style.
Each key in kw is an option and each value should be a list or a
tuple (usually) containing statespecs grouped in tuples, or list,
or something else of your preference. A statespec is compound of
one or more states and then a value."""
if query_opt is not None:
return _list_from_statespec(self.tk.splitlist(
self.tk.call(self._name, "map", style, '-%s' % query_opt)))
return _splitdict(
self.tk,
self.tk.call(self._name, "map", style, *_format_mapdict(kw)),
conv=_tclobj_to_py)
def lookup(self, style, option, state=None, default=None):
"""Returns the value specified for option in style.
If state is specified it is expected to be a sequence of one
or more states. If the default argument is set, it is used as
a fallback value in case no specification for option is found."""
state = ' '.join(state) if state else ''
return self.tk.call(self._name, "lookup", style, '-%s' % option,
state, default)
def layout(self, style, layoutspec=None):
"""Define the widget layout for given style. If layoutspec is
omitted, return the layout specification for given style.
layoutspec is expected to be a list or an object different than
None that evaluates to False if you want to "turn off" that style.
If it is a list (or tuple, or something else), each item should be
a tuple where the first item is the layout name and the second item
should have the format described below:
LAYOUTS
A layout can contain the value None, if takes no options, or
a dict of options specifying how to arrange the element.
The layout mechanism uses a simplified version of the pack
geometry manager: given an initial cavity, each element is
allocated a parcel. Valid options/values are:
side: whichside
Specifies which side of the cavity to place the
element; one of top, right, bottom or left. If
omitted, the element occupies the entire cavity.
sticky: nswe
Specifies where the element is placed inside its
allocated parcel.
children: [sublayout... ]
Specifies a list of elements to place inside the
element. Each element is a tuple (or other sequence)
where the first item is the layout name, and the other
is a LAYOUT."""
lspec = None
if layoutspec:
lspec = _format_layoutlist(layoutspec)[0]
elif layoutspec is not None: # will disable the layout ({}, '', etc)
lspec = "null" # could be any other word, but this may make sense
# when calling layout(style) later
return _list_from_layouttuple(self.tk,
self.tk.call(self._name, "layout", style, lspec))
def element_create(self, elementname, etype, *args, **kw):
"""Create a new element in the current theme of given etype."""
spec, opts = _format_elemcreate(etype, False, *args, **kw)
self.tk.call(self._name, "element", "create", elementname, etype,
spec, *opts)
def element_names(self):
"""Returns the list of elements defined in the current theme."""
return self.tk.splitlist(self.tk.call(self._name, "element", "names"))
def element_options(self, elementname):
"""Return the list of elementname's options."""
return self.tk.splitlist(self.tk.call(self._name, "element", "options", elementname))
def theme_create(self, themename, parent=None, settings=None):
"""Creates a new theme.
It is an error if themename already exists. If parent is
specified, the new theme will inherit styles, elements and
layouts from the specified parent theme. If settings are present,
they are expected to have the same syntax used for theme_settings."""
script = _script_from_settings(settings) if settings else ''
if parent:
self.tk.call(self._name, "theme", "create", themename,
"-parent", parent, "-settings", script)
else:
self.tk.call(self._name, "theme", "create", themename,
"-settings", script)
def theme_settings(self, themename, settings):
"""Temporarily sets the current theme to themename, apply specified
settings and then restore the previous theme.
Each key in settings is a style and each value may contain the
keys 'configure', 'map', 'layout' and 'element create' and they
are expected to have the same format as specified by the methods
configure, map, layout and element_create respectively."""
script = _script_from_settings(settings)
self.tk.call(self._name, "theme", "settings", themename, script)
def theme_names(self):
"""Returns a list of all known themes."""
return self.tk.splitlist(self.tk.call(self._name, "theme", "names"))
def theme_use(self, themename=None):
"""If themename is None, returns the theme in use, otherwise, set
the current theme to themename, refreshes all widgets and emits
a <<ThemeChanged>> event."""
if themename is None:
# Starting on Tk 8.6, checking this global is no longer needed
# since it allows doing self.tk.call(self._name, "theme", "use")
return self.tk.eval("return $ttk::currentTheme")
# using "ttk::setTheme" instead of "ttk::style theme use" causes
# the variable currentTheme to be updated, also, ttk::setTheme calls
# "ttk::style theme use" in order to change theme.
self.tk.call("ttk::setTheme", themename)
class Widget(Tkinter.Widget):
"""Base class for Tk themed widgets."""
def __init__(self, master, widgetname, kw=None):
"""Constructs a Ttk Widget with the parent master.
STANDARD OPTIONS
class, cursor, takefocus, style
SCROLLABLE WIDGET OPTIONS
xscrollcommand, yscrollcommand
LABEL WIDGET OPTIONS
text, textvariable, underline, image, compound, width
WIDGET STATES
active, disabled, focus, pressed, selected, background,
readonly, alternate, invalid
"""
master = setup_master(master)
if not getattr(master, '_tile_loaded', False):
# Load tile now, if needed
_load_tile(master)
Tkinter.Widget.__init__(self, master, widgetname, kw=kw)
def identify(self, x, y):
"""Returns the name of the element at position x, y, or the empty
string if the point does not lie within any element.
x and y are pixel coordinates relative to the widget."""
return self.tk.call(self._w, "identify", x, y)
def instate(self, statespec, callback=None, *args, **kw):
"""Test the widget's state.
If callback is not specified, returns True if the widget state
matches statespec and False otherwise. If callback is specified,
then it will be invoked with *args, **kw if the widget state
matches statespec. statespec is expected to be a sequence."""
ret = self.tk.getboolean(
self.tk.call(self._w, "instate", ' '.join(statespec)))
if ret and callback:
return callback(*args, **kw)
return ret
def state(self, statespec=None):
"""Modify or inquire widget state.
Widget state is returned if statespec is None, otherwise it is
set according to the statespec flags and then a new state spec
is returned indicating which flags were changed. statespec is
expected to be a sequence."""
if statespec is not None:
statespec = ' '.join(statespec)
return self.tk.splitlist(str(self.tk.call(self._w, "state", statespec)))
class Button(Widget):
"""Ttk Button widget, displays a textual label and/or image, and
evaluates a command when pressed."""
def __init__(self, master=None, **kw):
"""Construct a Ttk Button widget with the parent master.
STANDARD OPTIONS
class, compound, cursor, image, state, style, takefocus,
text, textvariable, underline, width
WIDGET-SPECIFIC OPTIONS
command, default, width
"""
Widget.__init__(self, master, "ttk::button", kw)
def invoke(self):
"""Invokes the command associated with the button."""
return self.tk.call(self._w, "invoke")
class Checkbutton(Widget):
"""Ttk Checkbutton widget which is either in on- or off-state."""
def __init__(self, master=None, **kw):
"""Construct a Ttk Checkbutton widget with the parent master.
STANDARD OPTIONS
class, compound, cursor, image, state, style, takefocus,
text, textvariable, underline, width
WIDGET-SPECIFIC OPTIONS
command, offvalue, onvalue, variable
"""
Widget.__init__(self, master, "ttk::checkbutton", kw)
def invoke(self):
"""Toggles between the selected and deselected states and
invokes the associated command. If the widget is currently
selected, sets the option variable to the offvalue option
and deselects the widget; otherwise, sets the option variable
to the option onvalue.
Returns the result of the associated command."""
return self.tk.call(self._w, "invoke")
class Entry(Widget, Tkinter.Entry):
"""Ttk Entry widget displays a one-line text string and allows that
string to be edited by the user."""
def __init__(self, master=None, widget=None, **kw):
"""Constructs a Ttk Entry widget with the parent master.
STANDARD OPTIONS
class, cursor, style, takefocus, xscrollcommand
WIDGET-SPECIFIC OPTIONS
exportselection, invalidcommand, justify, show, state,
textvariable, validate, validatecommand, width
VALIDATION MODES
none, key, focus, focusin, focusout, all
"""
Widget.__init__(self, master, widget or "ttk::entry", kw)
def bbox(self, index):
"""Return a tuple of (x, y, width, height) which describes the
bounding box of the character given by index."""
return self._getints(self.tk.call(self._w, "bbox", index))
def identify(self, x, y):
"""Returns the name of the element at position x, y, or the
empty string if the coordinates are outside the window."""
return self.tk.call(self._w, "identify", x, y)
def validate(self):
"""Force revalidation, independent of the conditions specified
by the validate option. Returns False if validation fails, True
if it succeeds. Sets or clears the invalid state accordingly."""
return self.tk.getboolean(self.tk.call(self._w, "validate"))
class Combobox(Entry):
"""Ttk Combobox widget combines a text field with a pop-down list of
values."""
def __init__(self, master=None, **kw):
"""Construct a Ttk Combobox widget with the parent master.
STANDARD OPTIONS
class, cursor, style, takefocus
WIDGET-SPECIFIC OPTIONS
exportselection, justify, height, postcommand, state,
textvariable, values, width
"""
Entry.__init__(self, master, "ttk::combobox", **kw)
def current(self, newindex=None):
"""If newindex is supplied, sets the combobox value to the
element at position newindex in the list of values. Otherwise,
returns the index of the current value in the list of values
or -1 if the current value does not appear in the list."""
if newindex is None:
return self.tk.getint(self.tk.call(self._w, "current"))
return self.tk.call(self._w, "current", newindex)
def set(self, value):
"""Sets the value of the combobox to value."""
self.tk.call(self._w, "set", value)
class Frame(Widget):
"""Ttk Frame widget is a container, used to group other widgets
together."""
def __init__(self, master=None, **kw):
"""Construct a Ttk Frame with parent master.
STANDARD OPTIONS
class, cursor, style, takefocus
WIDGET-SPECIFIC OPTIONS
borderwidth, relief, padding, width, height
"""
Widget.__init__(self, master, "ttk::frame", kw)
class Label(Widget):
"""Ttk Label widget displays a textual label and/or image."""
def __init__(self, master=None, **kw):
"""Construct a Ttk Label with parent master.
STANDARD OPTIONS
class, compound, cursor, image, style, takefocus, text,
textvariable, underline, width
WIDGET-SPECIFIC OPTIONS
anchor, background, font, foreground, justify, padding,
relief, text, wraplength
"""
Widget.__init__(self, master, "ttk::label", kw)
class Labelframe(Widget):
"""Ttk Labelframe widget is a container used to group other widgets
together. It has an optional label, which may be a plain text string
or another widget."""
def __init__(self, master=None, **kw):
"""Construct a Ttk Labelframe with parent master.
STANDARD OPTIONS
class, cursor, style, takefocus
WIDGET-SPECIFIC OPTIONS
labelanchor, text, underline, padding, labelwidget, width,
height
"""
Widget.__init__(self, master, "ttk::labelframe", kw)
LabelFrame = Labelframe # Tkinter name compatibility
class Menubutton(Widget):
"""Ttk Menubutton widget displays a textual label and/or image, and
displays a menu when pressed."""
def __init__(self, master=None, **kw):
"""Construct a Ttk Menubutton with parent master.
STANDARD OPTIONS
class, compound, cursor, image, state, style, takefocus,
text, textvariable, underline, width
WIDGET-SPECIFIC OPTIONS
direction, menu
"""
Widget.__init__(self, master, "ttk::menubutton", kw)
class Notebook(Widget):
"""Ttk Notebook widget manages a collection of windows and displays
a single one at a time. Each child window is associated with a tab,
which the user may select to change the currently-displayed window."""
def __init__(self, master=None, **kw):
"""Construct a Ttk Notebook with parent master.
STANDARD OPTIONS
class, cursor, style, takefocus
WIDGET-SPECIFIC OPTIONS
height, padding, width
TAB OPTIONS
state, sticky, padding, text, image, compound, underline
TAB IDENTIFIERS (tab_id)
The tab_id argument found in several methods may take any of
the following forms:
* An integer between zero and the number of tabs
* The name of a child window
* A positional specification of the form "@x,y", which
defines the tab
* The string "current", which identifies the
currently-selected tab
* The string "end", which returns the number of tabs (only
valid for method index)
"""
Widget.__init__(self, master, "ttk::notebook", kw)
def add(self, child, **kw):
"""Adds a new tab to the notebook.
If window is currently managed by the notebook but hidden, it is
restored to its previous position."""
self.tk.call(self._w, "add", child, *(_format_optdict(kw)))
def forget(self, tab_id):
"""Removes the tab specified by tab_id, unmaps and unmanages the
associated window."""
self.tk.call(self._w, "forget", tab_id)
def hide(self, tab_id):
"""Hides the tab specified by tab_id.
The tab will not be displayed, but the associated window remains
managed by the notebook and its configuration remembered. Hidden
tabs may be restored with the add command."""
self.tk.call(self._w, "hide", tab_id)
def identify(self, x, y):
"""Returns the name of the tab element at position x, y, or the
empty string if none."""
return self.tk.call(self._w, "identify", x, y)
def index(self, tab_id):
"""Returns the numeric index of the tab specified by tab_id, or
the total number of tabs if tab_id is the string "end"."""
return self.tk.getint(self.tk.call(self._w, "index", tab_id))
def insert(self, pos, child, **kw):
"""Inserts a pane at the specified position.
pos is either the string end, an integer index, or the name of
a managed child. If child is already managed by the notebook,
moves it to the specified position."""
self.tk.call(self._w, "insert", pos, child, *(_format_optdict(kw)))
def select(self, tab_id=None):
"""Selects the specified tab.
The associated child window will be displayed, and the
previously-selected window (if different) is unmapped. If tab_id
is omitted, returns the widget name of the currently selected
pane."""
return self.tk.call(self._w, "select", tab_id)
def tab(self, tab_id, option=None, **kw):
"""Query or modify the options of the specific tab_id.
If kw is not given, returns a dict of the tab option values. If option
is specified, returns the value of that option. Otherwise, sets the
options to the corresponding values."""
if option is not None:
kw[option] = None
return _val_or_dict(self.tk, kw, self._w, "tab", tab_id)
def tabs(self):
"""Returns a list of windows managed by the notebook."""
return self.tk.splitlist(self.tk.call(self._w, "tabs") or ())
def enable_traversal(self):
"""Enable keyboard traversal for a toplevel window containing
this notebook.
This will extend the bindings for the toplevel window containing
this notebook as follows:
Control-Tab: selects the tab following the currently selected
one
Shift-Control-Tab: selects the tab preceding the currently
selected one
Alt-K: where K is the mnemonic (underlined) character of any
tab, will select that tab.
Multiple notebooks in a single toplevel may be enabled for
traversal, including nested notebooks. However, notebook traversal
only works properly if all panes are direct children of the
notebook."""
# The only, and good, difference I see is about mnemonics, which works
# after calling this method. Control-Tab and Shift-Control-Tab always
# works (here at least).
self.tk.call("ttk::notebook::enableTraversal", self._w)
class Panedwindow(Widget, Tkinter.PanedWindow):
"""Ttk Panedwindow widget displays a number of subwindows, stacked
either vertically or horizontally."""
def __init__(self, master=None, **kw):
"""Construct a Ttk Panedwindow with parent master.
STANDARD OPTIONS
class, cursor, style, takefocus
WIDGET-SPECIFIC OPTIONS
orient, width, height
PANE OPTIONS
weight
"""
Widget.__init__(self, master, "ttk::panedwindow", kw)
forget = Tkinter.PanedWindow.forget # overrides Pack.forget
def insert(self, pos, child, **kw):
"""Inserts a pane at the specified positions.
pos is either the string end, and integer index, or the name
of a child. If child is already managed by the paned window,
moves it to the specified position."""
self.tk.call(self._w, "insert", pos, child, *(_format_optdict(kw)))
def pane(self, pane, option=None, **kw):
"""Query or modify the options of the specified pane.
pane is either an integer index or the name of a managed subwindow.
If kw is not given, returns a dict of the pane option values. If
option is specified then the value for that option is returned.
Otherwise, sets the options to the corresponding values."""
if option is not None:
kw[option] = None
return _val_or_dict(self.tk, kw, self._w, "pane", pane)
def sashpos(self, index, newpos=None):
"""If newpos is specified, sets the position of sash number index.
May adjust the positions of adjacent sashes to ensure that
positions are monotonically increasing. Sash positions are further
constrained to be between 0 and the total size of the widget.
Returns the new position of sash number index."""
return self.tk.getint(self.tk.call(self._w, "sashpos", index, newpos))
PanedWindow = Panedwindow # Tkinter name compatibility
class Progressbar(Widget):
"""Ttk Progressbar widget shows the status of a long-running
operation. They can operate in two modes: determinate mode shows the
amount completed relative to the total amount of work to be done, and
indeterminate mode provides an animated display to let the user know
that something is happening."""
def __init__(self, master=None, **kw):
"""Construct a Ttk Progressbar with parent master.
STANDARD OPTIONS
class, cursor, style, takefocus
WIDGET-SPECIFIC OPTIONS
orient, length, mode, maximum, value, variable, phase
"""
Widget.__init__(self, master, "ttk::progressbar", kw)
def start(self, interval=None):
"""Begin autoincrement mode: schedules a recurring timer event
that calls method step every interval milliseconds.
interval defaults to 50 milliseconds (20 steps/second) if omitted."""
self.tk.call(self._w, "start", interval)
def step(self, amount=None):
"""Increments the value option by amount.
amount defaults to 1.0 if omitted."""
self.tk.call(self._w, "step", amount)
def stop(self):
"""Stop autoincrement mode: cancels any recurring timer event
initiated by start."""
self.tk.call(self._w, "stop")
class Radiobutton(Widget):
"""Ttk Radiobutton widgets are used in groups to show or change a
set of mutually-exclusive options."""
def __init__(self, master=None, **kw):
"""Construct a Ttk Radiobutton with parent master.
STANDARD OPTIONS
class, compound, cursor, image, state, style, takefocus,
text, textvariable, underline, width
WIDGET-SPECIFIC OPTIONS
command, value, variable
"""
Widget.__init__(self, master, "ttk::radiobutton", kw)
def invoke(self):
"""Sets the option variable to the option value, selects the
widget, and invokes the associated command.
Returns the result of the command, or an empty string if
no command is specified."""
return self.tk.call(self._w, "invoke")
class Scale(Widget, Tkinter.Scale):
"""Ttk Scale widget is typically used to control the numeric value of
a linked variable that varies uniformly over some range."""
def __init__(self, master=None, **kw):
"""Construct a Ttk Scale with parent master.
STANDARD OPTIONS
class, cursor, style, takefocus
WIDGET-SPECIFIC OPTIONS
command, from, length, orient, to, value, variable
"""
Widget.__init__(self, master, "ttk::scale", kw)
def configure(self, cnf=None, **kw):
"""Modify or query scale options.
Setting a value for any of the "from", "from_" or "to" options
generates a <<RangeChanged>> event."""
if cnf:
kw.update(cnf)
Widget.configure(self, **kw)
if any(['from' in kw, 'from_' in kw, 'to' in kw]):
self.event_generate('<<RangeChanged>>')
def get(self, x=None, y=None):
"""Get the current value of the value option, or the value
corresponding to the coordinates x, y if they are specified.
x and y are pixel coordinates relative to the scale widget
origin."""
return self.tk.call(self._w, 'get', x, y)
class Scrollbar(Widget, Tkinter.Scrollbar):
"""Ttk Scrollbar controls the viewport of a scrollable widget."""
def __init__(self, master=None, **kw):
"""Construct a Ttk Scrollbar with parent master.
STANDARD OPTIONS
class, cursor, style, takefocus
WIDGET-SPECIFIC OPTIONS
command, orient
"""
Widget.__init__(self, master, "ttk::scrollbar", kw)
class Separator(Widget):
"""Ttk Separator widget displays a horizontal or vertical separator
bar."""
def __init__(self, master=None, **kw):
"""Construct a Ttk Separator with parent master.
STANDARD OPTIONS
class, cursor, style, takefocus
WIDGET-SPECIFIC OPTIONS
orient
"""
Widget.__init__(self, master, "ttk::separator", kw)
class Sizegrip(Widget):
"""Ttk Sizegrip allows the user to resize the containing toplevel
window by pressing and dragging the grip."""
def __init__(self, master=None, **kw):
"""Construct a Ttk Sizegrip with parent master.
STANDARD OPTIONS
class, cursor, state, style, takefocus
"""
Widget.__init__(self, master, "ttk::sizegrip", kw)
class Treeview(Widget, Tkinter.XView, Tkinter.YView):
"""Ttk Treeview widget displays a hierarchical collection of items.
Each item has a textual label, an optional image, and an optional list
of data values. The data values are displayed in successive columns
after the tree label."""
def __init__(self, master=None, **kw):
"""Construct a Ttk Treeview with parent master.
STANDARD OPTIONS
class, cursor, style, takefocus, xscrollcommand,
yscrollcommand
WIDGET-SPECIFIC OPTIONS
columns, displaycolumns, height, padding, selectmode, show
ITEM OPTIONS
text, image, values, open, tags
TAG OPTIONS
foreground, background, font, image
"""
Widget.__init__(self, master, "ttk::treeview", kw)
def bbox(self, item, column=None):
"""Returns the bounding box (relative to the treeview widget's
window) of the specified item in the form x y width height.
If column is specified, returns the bounding box of that cell.
If the item is not visible (i.e., if it is a descendant of a
closed item or is scrolled offscreen), returns an empty string."""
return self._getints(self.tk.call(self._w, "bbox", item, column)) or ''
def get_children(self, item=None):
"""Returns a tuple of children belonging to item.
If item is not specified, returns root children."""
return self.tk.splitlist(
self.tk.call(self._w, "children", item or '') or ())
def set_children(self, item, *newchildren):
"""Replaces item's child with newchildren.
Children present in item that are not present in newchildren
are detached from tree. No items in newchildren may be an
ancestor of item."""
self.tk.call(self._w, "children", item, newchildren)
def column(self, column, option=None, **kw):
"""Query or modify the options for the specified column.
If kw is not given, returns a dict of the column option values. If
option is specified then the value for that option is returned.
Otherwise, sets the options to the corresponding values."""
if option is not None:
kw[option] = None
return _val_or_dict(self.tk, kw, self._w, "column", column)
def delete(self, *items):
"""Delete all specified items and all their descendants. The root
item may not be deleted."""
self.tk.call(self._w, "delete", items)
def detach(self, *items):
"""Unlinks all of the specified items from the tree.
The items and all of their descendants are still present, and may
be reinserted at another point in the tree, but will not be
displayed. The root item may not be detached."""
self.tk.call(self._w, "detach", items)
def exists(self, item):
"""Returns True if the specified item is present in the tree,
False otherwise."""
return self.tk.getboolean(self.tk.call(self._w, "exists", item))
def focus(self, item=None):
"""If item is specified, sets the focus item to item. Otherwise,
returns the current focus item, or '' if there is none."""
return self.tk.call(self._w, "focus", item)
def heading(self, column, option=None, **kw):
"""Query or modify the heading options for the specified column.
If kw is not given, returns a dict of the heading option values. If
option is specified then the value for that option is returned.
Otherwise, sets the options to the corresponding values.
Valid options/values are:
text: text
The text to display in the column heading
image: image_name
Specifies an image to display to the right of the column
heading
anchor: anchor
Specifies how the heading text should be aligned. One of
the standard Tk anchor values
command: callback
A callback to be invoked when the heading label is
pressed.
To configure the tree column heading, call this with column = "#0" """
cmd = kw.get('command')
if cmd and not isinstance(cmd, basestring):
# callback not registered yet, do it now
kw['command'] = self.master.register(cmd, self._substitute)
if option is not None:
kw[option] = None
return _val_or_dict(self.tk, kw, self._w, 'heading', column)
def identify(self, component, x, y):
"""Returns a description of the specified component under the
point given by x and y, or the empty string if no such component
is present at that position."""
return self.tk.call(self._w, "identify", component, x, y)
def identify_row(self, y):
"""Returns the item ID of the item at position y."""
return self.identify("row", 0, y)
def identify_column(self, x):
"""Returns the data column identifier of the cell at position x.
The tree column has ID #0."""
return self.identify("column", x, 0)
def identify_region(self, x, y):
"""Returns one of:
heading: Tree heading area.
separator: Space between two columns headings;
tree: The tree area.
cell: A data cell.
* Availability: Tk 8.6"""
return self.identify("region", x, y)
def identify_element(self, x, y):
"""Returns the element at position x, y.
* Availability: Tk 8.6"""
return self.identify("element", x, y)
def index(self, item):
"""Returns the integer index of item within its parent's list
of children."""
return self.tk.getint(self.tk.call(self._w, "index", item))
def insert(self, parent, index, iid=None, **kw):
"""Creates a new item and return the item identifier of the newly
created item.
parent is the item ID of the parent item, or the empty string
to create a new top-level item. index is an integer, or the value
end, specifying where in the list of parent's children to insert
the new item. If index is less than or equal to zero, the new node
is inserted at the beginning, if index is greater than or equal to
the current number of children, it is inserted at the end. If iid
is specified, it is used as the item identifier, iid must not
already exist in the tree. Otherwise, a new unique identifier
is generated."""
opts = _format_optdict(kw)
if iid is not None:
res = self.tk.call(self._w, "insert", parent, index,
"-id", iid, *opts)
else:
res = self.tk.call(self._w, "insert", parent, index, *opts)
return res
def item(self, item, option=None, **kw):
"""Query or modify the options for the specified item.
If no options are given, a dict with options/values for the item
is returned. If option is specified then the value for that option
is returned. Otherwise, sets the options to the corresponding
values as given by kw."""
if option is not None:
kw[option] = None
return _val_or_dict(self.tk, kw, self._w, "item", item)
def move(self, item, parent, index):
"""Moves item to position index in parent's list of children.
It is illegal to move an item under one of its descendants. If
index is less than or equal to zero, item is moved to the
beginning, if greater than or equal to the number of children,
it is moved to the end. If item was detached it is reattached."""
self.tk.call(self._w, "move", item, parent, index)
reattach = move # A sensible method name for reattaching detached items
def next(self, item):
"""Returns the identifier of item's next sibling, or '' if item
is the last child of its parent."""
return self.tk.call(self._w, "next", item)
def parent(self, item):
"""Returns the ID of the parent of item, or '' if item is at the
top level of the hierarchy."""
return self.tk.call(self._w, "parent", item)
def prev(self, item):
"""Returns the identifier of item's previous sibling, or '' if
item is the first child of its parent."""
return self.tk.call(self._w, "prev", item)
def see(self, item):
"""Ensure that item is visible.
Sets all of item's ancestors open option to True, and scrolls
the widget if necessary so that item is within the visible
portion of the tree."""
self.tk.call(self._w, "see", item)
def selection(self, selop=None, items=None):
"""If selop is not specified, returns selected items."""
if isinstance(items, basestring):
items = (items,)
return self.tk.splitlist(self.tk.call(self._w, "selection", selop, items))
def selection_set(self, items):
"""items becomes the new selection."""
self.selection("set", items)
def selection_add(self, items):
"""Add items to the selection."""
self.selection("add", items)
def selection_remove(self, items):
"""Remove items from the selection."""
self.selection("remove", items)
def selection_toggle(self, items):
"""Toggle the selection state of each item in items."""
self.selection("toggle", items)
def set(self, item, column=None, value=None):
"""Query or set the value of given item.
With one argument, return a dictionary of column/value pairs
for the specified item. With two arguments, return the current
value of the specified column. With three arguments, set the
value of given column in given item to the specified value."""
res = self.tk.call(self._w, "set", item, column, value)
if column is None and value is None:
return _splitdict(self.tk, res,
cut_minus=False, conv=_tclobj_to_py)
else:
return res
def tag_bind(self, tagname, sequence=None, callback=None):
"""Bind a callback for the given event sequence to the tag tagname.
When an event is delivered to an item, the callbacks for each
of the item's tags option are called."""
self._bind((self._w, "tag", "bind", tagname), sequence, callback, add=0)
def tag_configure(self, tagname, option=None, **kw):
"""Query or modify the options for the specified tagname.
If kw is not given, returns a dict of the option settings for tagname.
If option is specified, returns the value for that option for the
specified tagname. Otherwise, sets the options to the corresponding
values for the given tagname."""
if option is not None:
kw[option] = None
return _val_or_dict(self.tk, kw, self._w, "tag", "configure",
tagname)
def tag_has(self, tagname, item=None):
"""If item is specified, returns 1 or 0 depending on whether the
specified item has the given tagname. Otherwise, returns a list of
all items which have the specified tag.
* Availability: Tk 8.6"""
if item is None:
return self.tk.splitlist(
self.tk.call(self._w, "tag", "has", tagname))
else:
return self.tk.getboolean(
self.tk.call(self._w, "tag", "has", tagname, item))
# Extensions
class LabeledScale(Frame, object):
"""A Ttk Scale widget with a Ttk Label widget indicating its
current value.
The Ttk Scale can be accessed through instance.scale, and Ttk Label
can be accessed through instance.label"""
def __init__(self, master=None, variable=None, from_=0, to=10, **kw):
"""Construct a horizontal LabeledScale with parent master, a
variable to be associated with the Ttk Scale widget and its range.
If variable is not specified, a Tkinter.IntVar is created.
WIDGET-SPECIFIC OPTIONS
compound: 'top' or 'bottom'
Specifies how to display the label relative to the scale.
Defaults to 'top'.
"""
self._label_top = kw.pop('compound', 'top') == 'top'
Frame.__init__(self, master, **kw)
self._variable = variable or Tkinter.IntVar(master)
self._variable.set(from_)
self._last_valid = from_
self.label = Label(self)
self.scale = Scale(self, variable=self._variable, from_=from_, to=to)
self.scale.bind('<<RangeChanged>>', self._adjust)
# position scale and label according to the compound option
scale_side = 'bottom' if self._label_top else 'top'
label_side = 'top' if scale_side == 'bottom' else 'bottom'
self.scale.pack(side=scale_side, fill='x')
tmp = Label(self).pack(side=label_side) # place holder
self.label.place(anchor='n' if label_side == 'top' else 's')
# update the label as scale or variable changes
self.__tracecb = self._variable.trace_variable('w', self._adjust)
self.bind('<Configure>', self._adjust)
self.bind('<Map>', self._adjust)
def destroy(self):
"""Destroy this widget and possibly its associated variable."""
try:
self._variable.trace_vdelete('w', self.__tracecb)
except AttributeError:
# widget has been destroyed already
pass
else:
del self._variable
Frame.destroy(self)
self.label = None
self.scale = None
def _adjust(self, *args):
"""Adjust the label position according to the scale."""
def adjust_label():
self.update_idletasks() # "force" scale redraw
x, y = self.scale.coords()
if self._label_top:
y = self.scale.winfo_y() - self.label.winfo_reqheight()
else:
y = self.scale.winfo_reqheight() + self.label.winfo_reqheight()
self.label.place_configure(x=x, y=y)
from_ = _to_number(self.scale['from'])
to = _to_number(self.scale['to'])
if to < from_:
from_, to = to, from_
newval = self._variable.get()
if not from_ <= newval <= to:
# value outside range, set value back to the last valid one
self.value = self._last_valid
return
self._last_valid = newval
self.label['text'] = newval
self.after_idle(adjust_label)
def _get_value(self):
"""Return current scale value."""
return self._variable.get()
def _set_value(self, val):
"""Set new scale value."""
self._variable.set(val)
value = property(_get_value, _set_value)
class OptionMenu(Menubutton):
"""Themed OptionMenu, based after Tkinter's OptionMenu, which allows
the user to select a value from a menu."""
def __init__(self, master, variable, default=None, *values, **kwargs):
"""Construct a themed OptionMenu widget with master as the parent,
the resource textvariable set to variable, the initially selected
value specified by the default parameter, the menu values given by
*values and additional keywords.
WIDGET-SPECIFIC OPTIONS
style: stylename
Menubutton style.
direction: 'above', 'below', 'left', 'right', or 'flush'
Menubutton direction.
command: callback
A callback that will be invoked after selecting an item.
"""
kw = {'textvariable': variable, 'style': kwargs.pop('style', None),
'direction': kwargs.pop('direction', None)}
Menubutton.__init__(self, master, **kw)
self['menu'] = Tkinter.Menu(self, tearoff=False)
self._variable = variable
self._callback = kwargs.pop('command', None)
if kwargs:
raise Tkinter.TclError('unknown option -%s' % (
kwargs.iterkeys().next()))
self.set_menu(default, *values)
def __getitem__(self, item):
if item == 'menu':
return self.nametowidget(Menubutton.__getitem__(self, item))
return Menubutton.__getitem__(self, item)
def set_menu(self, default=None, *values):
"""Build a new menu of radiobuttons with *values and optionally
a default value."""
menu = self['menu']
menu.delete(0, 'end')
for val in values:
menu.add_radiobutton(label=val,
command=Tkinter._setit(self._variable, val, self._callback),
variable=self._variable)
if default:
self._variable.set(default)
def destroy(self):
"""Destroy this widget and its associated variable."""
try:
del self._variable
except AttributeError:
pass
Menubutton.destroy(self)
| gpl-3.0 | 4,497,026,299,902,996,000 | 33.461963 | 93 | 0.607747 | false |
randall-frank/heresy | card_objects.py | 1 | 23728 | #
# T.I.M.E Stories card editor
# Copyright (C) 2017 Randall Frank
# See LICENSE for details
#
import base64
import os
import os.path
from PyQt5 import QtXml
from PyQt5 import QtGui
from PyQt5 import QtCore
from PyQt5 import QtWidgets
# these are the core objects that represent a deck of cards to the editor
class Base(object):
def __init__(self, name, xml_tag):
self.name = name
self.xml_tag = xml_tag
def get_column_info(self, col):
return ""
def set_xml_name(self, name):
self.xml_tag = name
def get_xml_name(self):
return self.xml_tag
def load_attrib_string(self, elem, name, default=None):
QtWidgets.QApplication.processEvents()
tmp = elem.firstChildElement(name)
v = default
if not tmp.isNull():
v = str(tmp.text())
if v is not None:
self.__setattr__(name, v)
def save_attrib_string(self, doc, parent, name):
tmp = doc.createElement(name)
parent.appendChild(tmp)
s = self.__getattribute__(name)
text = doc.createTextNode(str(s))
tmp.appendChild(text)
def load_attrib_int(self, elem, name, default=None):
tmp = elem.firstChildElement(name)
v = default
if not tmp.isNull():
v = int(str(tmp.text()))
if v is not None:
self.__setattr__(name, v)
def save_attrib_int(self, doc, parent, name):
self.save_attrib_string(doc, parent, name)
def load_attrib_obj(self, elem, name, default=None):
tmp = elem.firstChildElement(name)
obj = default
if not tmp.isNull():
obj = eval(str(tmp.text()))
if obj is not None:
self.__setattr__(name, obj)
def save_attrib_obj(self, doc, parent, name):
tmp = doc.createElement(name)
parent.appendChild(tmp)
obj = self.__getattribute__(name)
text = doc.createTextNode(obj.__repr__())
tmp.appendChild(text)
def to_xml(self, doc, parent):
QtWidgets.QApplication.processEvents()
tmp = doc.createElement(self.xml_tag)
tmp.setAttribute('name', self.name)
parent.appendChild(tmp)
return self.to_element(doc, tmp)
def to_element(self, doc, elem):
return True
class Renderable(Base):
def __init__(self, name, xml_tag='renderable'):
super(Renderable, self).__init__(name, xml_tag)
self.order = 0 # Z depth...
self.rotation = 0
self.rectangle = [0, 0, 0, 0]
def render_object(self):
return
class ImageRender(Renderable):
def __init__(self, name="image"):
super(ImageRender, self).__init__(name, 'render_image')
self.image = ""
def get_column_info(self, col):
if col != 1:
return super(ImageRender, self).get_column_info(col)
return "%d,%d - %d,%d" % tuple(self.rectangle)
@classmethod
def from_element(cls, elem):
obj = ImageRender()
obj.load_attrib_string(elem, "image")
obj.load_attrib_obj(elem, "rectangle")
obj.load_attrib_int(elem, "rotation")
obj.load_attrib_int(elem, "order")
return obj
def to_element(self, doc, elem):
self.save_attrib_string(doc, elem, "image")
self.save_attrib_int(doc, elem, "rotation")
self.save_attrib_obj(doc, elem, "rectangle")
self.save_attrib_int(doc, elem, "order")
return True
class RectRender(Renderable):
def __init__(self, name="rect"):
super(RectRender, self).__init__(name, 'render_rect')
self.style = "default"
def get_column_info(self, col):
if col != 1:
return super(RectRender, self).get_column_info(col)
return "%d,%d - %d,%d" % tuple(self.rectangle)
@classmethod
def from_element(cls, elem):
obj = RectRender()
obj.load_attrib_string(elem, "style")
obj.load_attrib_int(elem, "rotation")
obj.load_attrib_obj(elem, "rectangle")
obj.load_attrib_int(elem, "order")
return obj
def to_element(self, doc, elem):
self.save_attrib_string(doc, elem, "style")
self.save_attrib_int(doc, elem, "rotation")
self.save_attrib_obj(doc, elem, "rectangle")
self.save_attrib_int(doc, elem, "order")
return True
class TextRender(Renderable):
def __init__(self, name="text"):
super(TextRender, self).__init__(name, 'render_text')
self.style = "default"
self.text = ""
def get_column_info(self, col):
if col != 1:
return super(TextRender, self).get_column_info(col)
return "%d,%d - %d,%d" % tuple(self.rectangle)
@classmethod
def from_element(cls, elem):
obj = TextRender()
obj.load_attrib_string(elem, "text")
obj.load_attrib_string(elem, "style")
obj.load_attrib_int(elem, "rotation")
obj.load_attrib_obj(elem, "rectangle")
obj.load_attrib_int(elem, "order")
return obj
def to_element(self, doc, elem):
self.save_attrib_string(doc, elem, "text")
self.save_attrib_string(doc, elem, "style")
self.save_attrib_int(doc, elem, "rotation")
self.save_attrib_obj(doc, elem, "rectangle")
self.save_attrib_int(doc, elem, "order")
return True
# Essentially, a Face is a list of renderable items. Right now, text or image items
# that reference styles and images, along with content.
class Face(Base):
def __init__(self, name):
super(Face, self).__init__(name, name)
self.renderables = list() # a face is an array of Renderable instances
@classmethod
def from_element(cls, elem, is_top):
name = "top"
if not is_top:
name = "bottom"
obj = Face(name)
obj.set_xml_name(name)
# walk element children... and map to 'renderables'
obj.renderables = list()
tmp = elem.firstChildElement()
while not tmp.isNull():
tag = str(tmp.tagName())
if tag.endswith('image'):
tmp_obj = ImageRender.from_element(tmp)
elif tag.endswith('text'):
tmp_obj = TextRender.from_element(tmp)
elif tag.endswith('rect'):
tmp_obj = RectRender.from_element(tmp)
else:
tmp_obj = None
if tmp_obj is not None:
obj.renderables.append(tmp_obj)
tmp = tmp.nextSiblingElement()
return obj
def to_element(self, doc, elem):
for r in self.renderables:
r.to_xml(doc, elem)
return True
class Card(Base):
def __init__(self, name, xml_tag='card'):
super(Card, self).__init__(name, xml_tag)
self.top_face = Face('top')
self.bot_face = Face('bottom')
self.card_number = 0
self.local_card_number = 0
@classmethod
def from_element(cls, elem):
name = elem.attribute("name", "Unnamed Card")
obj = Card(str(name))
tmp = elem.firstChildElement("top")
if not tmp.isNull():
obj.top_face = Face.from_element(tmp, True)
tmp = elem.firstChildElement("bottom")
if not tmp.isNull():
obj.bot_face = Face.from_element(tmp, False)
return obj
def to_element(self, doc, elem):
self.top_face.to_xml(doc, elem)
self.bot_face.to_xml(doc, elem)
return True
class Location(Base):
def __init__(self, name):
super(Location, self).__init__(name, 'location')
self.cards = list()
@classmethod
def from_element(cls, elem):
name = elem.attribute("name", "Unnamed Location")
obj = Location(str(name))
tmp = elem.firstChildElement("card")
while not tmp.isNull():
tmp_card = Card.from_element(tmp)
if tmp_card is not None:
obj.cards.append(tmp_card)
tmp = tmp.nextSiblingElement('card')
return None
def to_element(self, doc, elem):
for c in self.cards:
c.to_xml(doc, elem)
return True
class Style(Base):
def __init__(self, name):
super(Style, self).__init__(name, 'style')
self.typeface = "Arial"
self.typesize = 12
self.fillcolor = [255, 255, 255, 255]
self.borderthickness = 0
self.bordercolor = [0, 0, 0, 255]
self.textcolor = [0, 0, 0, 255]
self.linestyle = "solid"
@classmethod
def from_element(cls, elem):
name = elem.attribute("name", "Unnamed Image")
obj = Style(str(name))
obj.load_attrib_string(elem, "typeface", "Arial")
obj.load_attrib_string(elem, "linestyle")
obj.load_attrib_obj(elem, "fillcolor")
obj.load_attrib_obj(elem, "bordercolor")
obj.load_attrib_obj(elem, "textcolor")
obj.load_attrib_int(elem, "typesize")
obj.load_attrib_int(elem, "borderthickness")
return obj
def to_element(self, doc, elem):
self.save_attrib_string(doc, elem, "typeface")
self.save_attrib_string(doc, elem, "linestyle")
self.save_attrib_obj(doc, elem, "fillcolor")
self.save_attrib_obj(doc, elem, "bordercolor")
self.save_attrib_obj(doc, elem, "textcolor")
self.save_attrib_int(doc, elem, "typesize")
self.save_attrib_int(doc, elem, "borderthickness")
return True
class Image(Base):
def __init__(self, name):
super(Image, self).__init__(name, 'image')
self.file = ''
self.rectangle = [0, 0, 0, 0] # x,y,dx,dy
self.usage = 'any'
def get_image(self, deck):
f = deck.find_file(self.file)
if f is None:
return None
img = f.image.copy(self.rectangle[0], self.rectangle[1], self.rectangle[2], self.rectangle[3]) # QImage
return img
def get_column_info(self, col):
if col != 1:
return super(Image, self).get_column_info(col)
return "%d,%d - %d,%d" % tuple(self.rectangle)
@classmethod
def from_element(cls, elem):
name = elem.attribute("name", "Unnamed Image")
obj = Image(str(name))
obj.load_attrib_string(elem, "file")
obj.load_attrib_obj(elem, "rectangle")
obj.load_attrib_string(elem, "usage")
return obj
def to_element(self, doc, elem):
self.save_attrib_string(doc, elem, "file")
self.save_attrib_obj(doc, elem, "rectangle")
self.save_attrib_string(doc, elem, "usage")
return True
class File(Base):
def __init__(self, name):
super(File, self).__init__(name, 'file')
self.image = QtGui.QImage()
self.filename = ""
self.store_inline = True
def load_file(self, filename, name=None, store_as_resource=True):
self.image.load(filename)
self.filename = filename
self.store_inline = store_as_resource
if name is not None:
self.name = name
else:
self.name = filename
def get_column_info(self, col):
if col != 1:
return super(File, self).get_column_info(col)
return "%dx%d" % tuple(self.size())
def size(self):
return [self.image.width(), self.image.height()]
@classmethod
def from_element(cls, elem):
QtWidgets.QApplication.processEvents()
name = elem.attribute("name", "Unnamed File")
filename = elem.attribute("filename", None)
obj = File(name)
# two cases: text is the file content or text is empty
# in the latter case, try to read the 'name' as a file
try:
tmp = elem.text() # get unicode string
if len(tmp) == 0:
if not obj.image.load(filename, name):
print("Warning, failed to load file: {}".format(filename))
return None
else:
tmp = bytes(tmp, "UTF-8") # convert to ASCII 8bit bytes
s = base64.b64decode(tmp) # decode to binary
buffer = QtCore.QBuffer() # do the I/O
buffer.setData(s)
buffer.open(QtCore.QIODevice.ReadWrite)
if not obj.image.load(buffer, "png"):
if not obj.image.load(filename, name):
return None
except Exception as e:
print("File from_element Error", str(e))
return None
return obj
def to_element(self, doc, elem):
try:
if self.store_inline:
buffer = QtCore.QBuffer()
buffer.open(QtCore.QIODevice.ReadWrite)
self.image.save(buffer, "png") # Do the I/O
s = base64.b64encode(buffer.data()) # encode binary data as ASCII 8bit bytes
tmp = s.decode(encoding="UTF-8") # convert the ASCII 8bit sequence to Unicode
text = doc.createTextNode(tmp) # Add it to the DOM
elem.appendChild(text)
elem.setAttribute('filename', self.filename)
except Exception as e:
print("File to_element Error", str(e))
return False
return True
class Deck(Base):
def __init__(self, name=""):
super(Deck, self).__init__(name, 'deck')
self.files = list() # of Files
self.images = list() # of Images
self.styles = list() # of Styles
self.default_card = Card("Card Base", xml_tag="defaultcard")
self.default_item_card = Card("Item Card Base", xml_tag="defaultitemcard")
self.default_location_card = Card("Location Card Base", xml_tag="defaultlocationcard")
# Proper order of a deck
self.base = list() # of Cards
self.items = list() # of Cards
self.plan = list() # of Cards
self.misc = list() # of Cards
self.characters = list() # of Cards
self.icon_reference = Card("Icon Reference", xml_tag='iconreference')
self.locations = list() # of Locations
def find_file(self, name, default=None):
for f in self.files:
if f.name == name:
return f
return default
def find_image(self, name, default=None):
for i in self.images:
if i.name == name:
return i
return default
def find_style(self, name, default=Style("default")):
for s in self.styles:
if s.name == name:
return s
return default
def renumber_entities(self):
global_count = 1
# card blocks
for chunk in [self.base, self.items, self.plan, self.misc, self.characters]:
local_count = 1
for card in chunk:
card.card_number = global_count
card.local_card_number = local_count
global_count += 1
local_count += 1
# reference card
self.icon_reference.card_number = global_count
self.icon_reference.local_card_number = local_count
global_count += 1
local_count += 1
# locations
for location in self.locations:
local_count = 1
for card in location:
card.card_number = global_count
card.local_card_number = local_count
global_count += 1
local_count += 1
def save(self, filename):
QtWidgets.QApplication.setOverrideCursor(QtCore.Qt.WaitCursor)
doc = QtXml.QDomDocument()
# build the DOM
self.to_xml(doc, doc)
# convert the DOM to a string
s = doc.toString()
success = True
try:
fp = open(filename, "wb")
fp.write(bytes(s, "UTF-8"))
fp.close()
except Exception as e:
success = False
QtWidgets.QApplication.restoreOverrideCursor()
return success
def load(self, filename):
QtWidgets.QApplication.setOverrideCursor(QtCore.Qt.WaitCursor)
try:
fp = open(filename, "rb")
xml = fp.read()
fp.close()
except:
QtWidgets.QApplication.restoreOverrideCursor()
return False
doc = QtXml.QDomDocument()
ok, msg, line, col = doc.setContent(xml)
if not ok:
QtWidgets.QApplication.restoreOverrideCursor()
return False
deck = doc.firstChildElement("deck")
if not deck.isNull():
assets = deck.firstChildElement("assets") # the <assets> block
if not assets.isNull():
if not self.parse_assets(assets):
QtWidgets.QApplication.restoreOverrideCursor()
return False
cards = deck.firstChildElement("cards") # the <cards> block
if not cards.isNull():
if not self.parse_cards(cards):
QtWidgets.QApplication.restoreOverrideCursor()
return False
QtWidgets.QApplication.restoreOverrideCursor()
return True
def parse_cards(self, root):
# single cards
# default cards (layering) and the reference card
work = dict(defaultcard=[Card, 'default_card'],
defaultitemcard=[Card, 'default_item_card'],
defaultlocationcard=[Card, 'default_location_card'],
iconreference=[Card, 'icon_reference'])
for tag, v in work.items():
tmp = root.firstChildElement(tag)
if not tmp.isNull():
tmp_obj = v[0].from_element(tmp)
if tmp_obj is not None:
tmp_obj.set_xml_name(tag)
self.__setattr__(v[1], tmp_obj)
# Plan, Items, Base, Characters, Locations - simple lists
# [v0, v1, v2] use v0.from_element() to create an object starting at the tag v2
# make a list of objects at self.{v1}
work = dict(base=[Card, 'base', 'card'],
items=[Card, 'items', 'card'],
plan=[Card, 'plan', 'card'],
misc=[Card, 'misc', 'card'],
characters=[Card, 'characters', 'card'],
locations=[Location, 'locations', 'location'])
for tag, v in work.items():
tmp_root = root.firstChildElement(tag)
if not tmp_root.isNull():
self.__setattr__(v[1], list())
tmp = tmp_root.firstChildElement(v[2])
while not tmp.isNull():
tmp_obj = v[0].from_element(tmp)
if tmp_obj is not None:
self.__getattribute__(v[1]).append(tmp_obj)
tmp = tmp.nextSiblingElement(v[2])
return True
def parse_assets(self, root):
work = dict(file=[File, self.files],
image=[Image, self.images],
style=[Style, self.styles])
for tag, v in work.items():
tmp = root.firstChildElement(tag)
while not tmp.isNull():
tmp_obj = v[0].from_element(tmp)
if tmp_obj is not None:
v[1].append(tmp_obj)
tmp = tmp.nextSiblingElement(tag)
return True
def to_element(self, doc, elem): # the deck element
# assets
tmp = doc.createElement("assets")
elem.appendChild(tmp)
# files, styles, images
for f in self.files:
f.to_xml(doc, tmp)
for s in self.styles:
s.to_xml(doc, tmp)
for i in self.images:
i.to_xml(doc, tmp)
# cards
card_root = doc.createElement("cards")
elem.appendChild(card_root)
# singletons
self.default_card.to_xml(doc, card_root)
self.default_item_card.to_xml(doc, card_root)
self.default_location_card.to_xml(doc, card_root)
self.icon_reference.to_xml(doc, card_root)
# lists: base, items, plan, misc, characters, locations
blocks = dict(base=self.base, plan=self.plan, items=self.items, misc=self.misc,
characters=self.characters, locations=self.locations)
for tag, v in blocks.items():
tag_elem = doc.createElement(tag) # make an element inside of <cards>
card_root.appendChild(tag_elem)
for i in v:
i.to_xml(doc, tag_elem) # write all of the cards into the new element
return True
def build_empty_deck(media_dirs=None):
deck = Deck()
if media_dirs is None:
# Load images from resources
d = QtCore.QDir(":/default_files")
for name in d.entryList():
f = File(name)
f.load_file(":/default_files/"+name, name, store_as_resource=True)
deck.files.append(f)
else:
for d in media_dirs:
for root, dirs, files in os.walk(d):
for name in files:
filename = os.path.join(root, name)
basename, ext = os.path.splitext(os.path.basename(filename))
if ext.lower() in [".jpg", ".png"]:
print("Adding image: {} ({})".format(filename, basename))
f = File(basename)
f.load_file(filename, basename, store_as_resource=False)
deck.files.append(f)
# a default style
deck.styles.append(Style("default"))
return deck
# <deck>
# <assets>
# <file name="name">pngcontent</file>
# <image name="name">
# <file>name</file>
# <rect_pix>x0 y0 dx dy</rect_pix>
# <usage>face|badge|token...</usage>
# </locked/>
# </image>
# <style name="name">
# <typeface>name</typeface>
# <typesize>size</typesize>
# <fillcolor></fillcolor>
# <borderthickness></borderthickness>
# <bordercolor></bordercolor>
# <textcolor></textcolor>
# <linestyle></linestyle>
# </style>
# </assets>
# <cards>
# <defaultcard>
# <top>
# <image name="top"></image>
# <textblock name="locationname"></textblock>
# </top>
# <bottom>
# <image name="bottom"></image>
# </bottom>
# </defaultcard>
# <defaultitemcard>
# <top>
# <image name="top"></image>
# <textblock name="locationname"></textblock>
# </top>
# <bottom>
# <image name="bottom"></image>
# </bottom>
# </defaultitemcard>
# <defaultlocationcard>
# <top>
# <image name="top"></image>
# <textblock name="locationname"></textblock>
# </top>
# <bottom>
# <image name="bottom"></image>
# </bottom>
# </defaultlocationcard>
# <base>
# <card></card>
# <card></card>
# <card></card>
# <card></card>
# <card></card>
# </base>
# <iconreference>
# <top></top>
# <bottom></bottom>
# </iconreference>
# <characters>
# <card></card>
# <card></card>
# <card></card>
# </characters>
# <plan>
# <card></card>
# <card></card>
# <card></card>
# <card></card>
# </plan>
# <items>
# <card></card>
# <card></card>
# </items>
# <misc>
# <card></card>
# <card></card>
# </misc>
# <locations>
# <location>
# <card></card>
# <card></card>
# <card></card>
# </location>
# <location>
# <card></card>
# <card></card>
# <card></card>
# </location>
# </locations>
# </cards>
# </deck>
# <card name="name">
# <top>
# <render_text name="">
# <rotation>angle</rotation>
# <style>style</style>
# <rectangle>x y dx dy</rectangle>
# </render_text>
# <render_image name="">
# 
# <rectangle>x y dx dy</rectangle>
# </render_image>
# </top>
# <bottom>
# </bottom>
# </card>
| mit | 1,640,969,313,959,279,400 | 32.001391 | 112 | 0.553355 | false |
praekelt/nurseconnect | nurseconnect/celery.py | 1 | 1088 | from __future__ import absolute_import
import os
from celery import Celery
from django.conf import settings
from celery.signals import celeryd_init
from django.core.management import call_command
os.environ.setdefault("DJANGO_SETTINGS_MODULE",
"nurseconnect.settings.production")
app = Celery("proj")
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object("django.conf:settings")
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS) # pragma: no cover
@celeryd_init.connect
def ensure_search_index_updated(sender, instance, **kwargs):
'''
Run update_index when celery starts
'''
try:
from wagtail.wagtailsearch.backends.db import DBSearch
backend = DBSearch
except ImportError:
from wagtail.wagtailsearch.backends.db import DatabaseSearchBackend
backend = DatabaseSearchBackend
from wagtail.wagtailsearch.backends import get_search_backend
if not isinstance(get_search_backend(), backend):
call_command('update_index')
| bsd-2-clause | -7,281,622,645,620,145,000 | 27.631579 | 75 | 0.732537 | false |
nmardosz/freedb-to-json-parser | freedbparser.py | 1 | 7260 | import os
import sys
import xml.sax.handler
import xml.sax
import codecs
import re
import time
import json
reload(sys)
sys.setdefaultencoding('utf8')
releaseCounter = 0
if ( __name__ == "__main__"):
trackslength = []
#trackoffsets = []
disclength = []
prevcombinenum = ''
trackcombinenum = ''
partialtracktitle = ''
tracktitle = []
#alltracksnolast = 0
discartist = ''
disctitle = ''
nexttitle = ''
discyear = ''
discgenre = ''
formattedtrackslengths = []
formattedtracktitles = []
indexer = ''
morethanonetitle = 0
aftertrackframeoffset = 0
genreleaseidname = ''
genreleasenametoid = ''
fo = codecs.open('releases.json','w',encoding='utf8')
fl = codecs.open('parselog.txt','w',encoding='utf8')
#fo = open('releases.json','w')
trackframeoffset = re.compile("#[ \t]+Track[ \t]+frame[ \t]+offsets:")
framematch = re.compile("#[ +\t+]+[0-9]+")
framematchnos = re.compile("#[0-9]+")
framedontmatch = re.compile("#[ +\t+]+[0-9]+[ +\t+\-\_+a-z+:+]+")
disclengthmatch = re.compile("# +Disc +length: +[0-9]+")
tracktitlematch = re.compile("TTITLE[0-9]+=.*")
discartisttitle = re.compile("DTITLE=.*")
discyearmatch = re.compile("DYEAR=.*")
discgenrematch = re.compile("DGENRE=.*")
artiststitlematch = re.compile(" \/ ")
indir = 'D:/FreeDB/FreedbDump/'
for root, dirs, filenames in os.walk(indir):
for filename in filenames:
#with open("65078809") as infile:
if (os.stat(os.path.join(root, filename)).st_size == 0):
continue
with open(os.path.join(root, filename)) as infile:
#print(filename)
fl.write(os.path.join(root, filename) + '\n')
genreleaseidname = os.path.basename(os.path.normpath(root))
if (genreleaseidname == "blues"):
genreleasenametoid = "0001"
if (genreleaseidname == "classical"):
genreleasenametoid = "0002"
if (genreleaseidname == "country"):
genreleasenametoid = "0003"
if (genreleaseidname == "data"):
genreleasenametoid = "0004"
if (genreleaseidname == "folk"):
genreleasenametoid = "0005"
if (genreleaseidname == "jazz"):
genreleasenametoid = "0006"
if (genreleaseidname == "misc"):
genreleasenametoid = "0007"
if (genreleaseidname == "newage"):
genreleasenametoid = "0008"
if (genreleaseidname == "reggae"):
genreleasenametoid = "0009"
if (genreleaseidname == "rock"):
genreleasenametoid = "0010"
if (genreleaseidname == "soundtrack"):
genreleasenametoid = "0011"
for line in infile:
if (trackframeoffset.match(line)):
aftertrackframeoffset = 1
if (aftertrackframeoffset == 1):
if ((not framedontmatch.match(line)) and (framematch.match(line)) or (framematchnos.match(line))):
trackslength.append(map(int, re.findall('\d+', line)))
if (disclengthmatch.match(line)):
disclength.append(map(int, re.findall('\d+', line)))
if (tracktitlematch.match(line)):
trackcombinenum = line.split("=")[0]
if trackcombinenum == prevcombinenum:
prevcombinenum = line.split("=")[0]
partialtracktitle = tracktitle[-1]
partialtracktitle = partialtracktitle.rstrip() + line.split("=")[1].rstrip()
tracktitle[-1] = partialtracktitle
continue
if trackcombinenum != prevcombinenum:
prevcombinenum = line.split("=")[0]
tracktitle.append(line.split("=")[1])
continue
if (discartisttitle.match(line)):
morethanonetitle += 1
if (morethanonetitle == 1):
discartist = line.split(" / ")[0].decode('iso-8859-1').encode("utf-8").rstrip()
discartist = re.sub('DTITLE=', '', discartist)
try:
disctitle = line.split(" / ")[1].decode('iso-8859-1').encode("utf-8").rstrip()
if not disctitle:
disctitle = discartist
except:
disctitle = discartist
if (morethanonetitle > 1):
nexttitle = line.decode('iso-8859-1').encode("utf-8").rstrip()
nexttitle = re.sub('DTITLE=', '', nexttitle)
disctitle += nexttitle.decode('iso-8859-1').encode("utf-8")
nexttitle = ''
if (discyearmatch.match(line)):
discyear = line.split("=")[1]
if (discgenrematch.match(line)):
discgenre = line.split("=")[1]
for idx, item in enumerate(trackslength[:-1]):
currentframe = map(lambda x: float(x)/75, trackslength[idx])
nextframe = map(lambda x: float(x)/75, trackslength[idx + 1])
tracknumlength = [a - b for a, b in zip(nextframe, currentframe)]
m, s = divmod(tracknumlength[0], 60)
h, m = divmod(m, 60)
if(h == 0):
timeconv = "%d:%02d" % (m, s)
else:
timeconv = "%d:%02d:%02d" % (h, m, s)
#currentframe = int(currentframe) / 75
#nextframe = int(nextframe) / 75
#fo.write("tracknumber {0}: length: {1}\n".format(idx + 1, '' .join(map(str, timeconv))))
formattedtrackslengths.append(timeconv)
for item in disclength:
#'' .join(map(str, item))
lasttrackoffset = map(lambda x: float(x)/75, trackslength[-1])
lasttracklength = [a - b for a, b in zip(item, lasttrackoffset)]
m, s = divmod(lasttracklength[0], 60)
h, m = divmod(m, 60)
if(h == 0):
timeconv = "%d:%02d" % (m, s)
else:
timeconv = "%d:%02d:%02d" % (h, m, s)
#fo.write("tracknumber {0}: length: {1}\n".format(len(trackslength), timeconv))
formattedtrackslengths.append(timeconv)
for item in tracktitle:
#fo.write("Title: {0}".format(item))
formattedtracktitles.append(item.decode('iso-8859-1').encode("utf-8").rstrip())
fo.write('{"releaseid": ' + json.dumps(genreleasenametoid + filename.decode('iso-8859-1').encode("utf-8").lower().rstrip()) + ', ')
fo.write('"l_artist_name": ' + json.dumps(discartist.decode('iso-8859-1').encode("utf-8").lower().rstrip()) + ', ')
fo.write('"artist_name": ' + json.dumps(discartist.decode('iso-8859-1').encode("utf-8").rstrip()) + ', ')
fo.write('"l_title": ' + json.dumps(disctitle.decode('iso-8859-1').encode("utf-8").lower().rstrip()) + ', ')
fo.write('"title": ' + json.dumps(disctitle.decode('iso-8859-1').encode("utf-8").rstrip()) + ', ')
fo.write('"year": ' + json.dumps(discyear.decode('iso-8859-1').encode("utf-8").rstrip()) + ', ')
fo.write('"genre": ' + json.dumps(discgenre.decode('iso-8859-1').encode("utf-8").rstrip()) + ', ')
fo.write('"tracklist": [')
if (len(formattedtrackslengths) == 0):
fo.write(']')
if (len(formattedtrackslengths) > 0):
for idx, item in enumerate(formattedtrackslengths):
indexer = idx + 1
fo.write('{"track_position": ' + json.dumps(str(indexer)) + ', "track_title": ' + json.dumps(formattedtracktitles[idx]) + ', "track_duration": ' + json.dumps(formattedtrackslengths[idx]))
if (indexer == len(formattedtrackslengths)):
fo.write('}]')
else:
fo.write('},')
fo.write('}\n')
indexer = ''
trackslength = []
disclength = []
prevcombinenum = ''
trackcombinenum = ''
partialtracktitle = ''
tracktitle = []
discartist = ''
disctitle = ''
discyear = ''
discgenre = ''
formattedtrackslengths = []
formattedtracktitles = []
morethanonetitle = 0
aftertrackframeoffset = 0
infile.close()
fo.close()
fl.close()
| mit | -9,202,248,683,548,465,000 | 36.61658 | 193 | 0.612672 | false |
mlperf/training_results_v0.6 | Fujitsu/benchmarks/resnet/implementations/mxnet/3rdparty/tvm/topi/tests/python/test_topi_bnn.py | 1 | 2015 | """Test code for binary neural network operators."""
import numpy as np
import tvm
import topi
from topi.util import get_const_tuple
from tvm.contrib.pickle_memoize import memoize
def verify_binary_dense(batch, in_dim, out_dim):
A = tvm.placeholder((batch, in_dim), name='A')
B = tvm.placeholder((out_dim, in_dim), name='B')
bnn_A = topi.nn.binarize_pack(A)
bnn_B = topi.nn.binarize_pack(B)
# binary dense
bnn_A1 = tvm.placeholder(bnn_A.shape, dtype=bnn_A.dtype)
bnn_B1 = tvm.placeholder(bnn_B.shape, dtype=bnn_B.dtype)
bnn_C = topi.nn.binary_dense(bnn_A1, bnn_B1)
# schedule
with tvm.target.create('llvm'):
s1 = topi.generic.schedule_binarize_pack(bnn_A)
s2 = topi.generic.schedule_binarize_pack(bnn_B)
s3 = topi.generic.schedule_binary_dense(bnn_C)
dtype = A.dtype
@memoize("topi.tests.test_topi_binary_dense")
def get_ref_data():
# generate random matrix of +1 or -1 value
a_np = (np.random.randint(2, size=(batch, in_dim)) * 2 - 1).astype(dtype)
b_np = (np.random.randint(2, size=(out_dim, in_dim)) * 2 - 1).astype(dtype)
c_np = np.dot(a_np, b_np.T)
return a_np, b_np, c_np
a_np, b_np, c_np = get_ref_data()
ctx = tvm.cpu(0)
a = tvm.nd.array(a_np, ctx)
b = tvm.nd.array(b_np, ctx)
bnn_a = tvm.nd.array(np.zeros(get_const_tuple(bnn_A.shape), dtype=bnn_A.dtype), ctx)
bnn_b = tvm.nd.array(np.zeros(get_const_tuple(bnn_B.shape), dtype=bnn_B.dtype), ctx)
bnn_c = tvm.nd.array(np.zeros(get_const_tuple(bnn_C.shape), dtype=bnn_C.dtype), ctx)
f1 = tvm.build(s1, [A, bnn_A], 'llvm')
f2 = tvm.build(s2, [B, bnn_B], 'llvm')
f3 = tvm.build(s3, [bnn_A1, bnn_B1, bnn_C], 'llvm')
f1(a, bnn_a)
f2(b, bnn_b)
f3(bnn_a, bnn_b, bnn_c)
np.testing.assert_allclose(bnn_c.asnumpy(), c_np, rtol=1e-5)
def test_binary_dense():
verify_binary_dense(1, 4096, 1024)
verify_binary_dense(1, 1024, 1000)
if __name__ == "__main__":
test_binary_dense()
| apache-2.0 | -2,668,544,037,635,792,000 | 35.636364 | 88 | 0.619851 | false |
dnil/OGTtoPED | OGTtoPED.py | 1 | 3605 | import argparse
import sys
from openpyxl import load_workbook
import config
from sample import Sample
from family import update_family, family_ped
# CLI
parser = argparse.ArgumentParser(description="Convert OGT xlsx to PED file")
parser.add_argument("orderform",
help="OGT order form with sample ID, status and family groups.")
parser.add_argument("outfile", help="Output PED file", nargs='?')
parser.add_argument("-D", "--debug", help="Enable DEBUG output.",
action="store_true")
args = parser.parse_args()
if config.debug:
print(sys.stderr, "DEBUG output turned on.")
config.debug = True
config.outfile = args.outfile
# Truncate output ped file
if config.outfile is not None:
out = open(config.outfile, 'w')
else:
out = sys.stdout
# Open workbook
wb = load_workbook(filename = args.orderform)
ws = wb.active
# Sanity checks
if ws.title != "material form":
print(sys.stderr, "WARNING: Non standard active sheet name ", ws.title)
if (ws['B12'].value != "Customer Sample ID"
or ws['M12'].value != "Additional Experimental Comments"
or ws['C12'].value != "Source (cells, tissue etc)"):
print(sys.stderr, ("Unexpected table / cell layout: check to see"
"that sheet is ok, and ask to have the script updated."))
exit(1)
# Main
# Iterate over all rows, parse row blocks
in_sample_section = False
in_family = False
max_rows = 1024
samples_found = 0
family = []
family_count = 0
for rownum in range(1,max_rows+1):
cell=ws["B" + str(rownum)]
if not in_sample_section:
if cell.value == "Customer Sample ID":
if config.debug:
print(sys.stderr, "Found sample ID tag.")
in_sample_section = True
else:
if cell.value is not None:
# Found a new sample row.
sample_id = cell.value
sample_id.rstrip()
if not in_family:
if config.debug:
print(sys.stderr, ("New family, starting with sample "
"'{}'").format(sample_id))
family_count += 1
info_cell = ws["M" + str(rownum)]
info = info_cell.value
if info is None:
info = "NA"
info.rstrip()
tissue_cell = ws["C" + str(rownum)]
tissue = tissue_cell.value
if tissue is None:
tissue = "NA"
tissue.rstrip()
sample = Sample(sample_id, info, tissue)
in_family = True
family.append(sample)
if sample.info.find("singleton") != -1:
# Found a singleton!
sample.affected = True
update_family(family)
print >> out, family_ped(family, family_count).rstrip()
# This ends the current family.
if config.debug:
print(sys.stderr, "Found a singleton. Family complete.")
family = []
in_family = False
# Note that the next row may be a None or a new family member..
samples_found += 1
elif cell.value is None:
# Value None means an empty row.
if in_family:
# This ends the current family.
if config.debug:
print(sys.stderr, "Family complete.")
update_family(family)
print >> out, family_ped(family, family_count).rstrip()
family = []
in_family = False
| artistic-2.0 | 893,453,471,902,956,800 | 27.84 | 79 | 0.553398 | false |
ric2b/Vivaldi-browser | chromium/third_party/blink/renderer/build/scripts/core/style/make_computed_style_base.py | 1 | 23462 | #!/usr/bin/env python
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import math
import json5_generator
import template_expander
import keyword_utils
import bisect
from blinkbuild.name_style_converter import NameStyleConverter
from core.css import css_properties
from core.style.computed_style_fields import DiffGroup, Enum, Group, Field
from itertools import chain
# Heuristic ordering of types from largest to smallest, used to sort fields by
# their alignment sizes.
# Specifying the exact alignment sizes for each type is impossible because it's
# platform specific, so we define an ordering instead.
# The ordering comes from the data obtained in:
# https://codereview.chromium.org/2841413002
# FIXME: Put alignment sizes into code form, rather than linking to a CL
# which may disappear.
ALIGNMENT_ORDER = [
# Aligns like double
'ScaleTransformOperation',
'RotateTransformOperation',
'TranslateTransformOperation',
'double',
# Aligns like a pointer (can be 32 or 64 bits)
'NamedGridLinesMap',
'OrderedNamedGridLines',
'NamedGridAreaMap',
'TransformOperations',
'Vector<CSSPropertyID>',
'Vector<GridTrackSize>',
'Vector<AtomicString>',
'GridPosition',
'GapLength',
'AtomicString',
'scoped_refptr',
'Persistent',
'std::unique_ptr',
'Vector<String>',
'Font',
'FillLayer',
'NinePieceImage',
'ContentSize',
# Aligns like float
'StyleOffsetRotation',
'TransformOrigin',
'ScrollPadding',
'ScrollMargin',
'LengthBox',
'LengthSize',
'FloatSize',
'LengthPoint',
'Length',
'TextSizeAdjust',
'TabSize',
'float',
# Aligns like int
'cc::ScrollSnapType',
'cc::ScrollSnapAlign',
'BorderValue',
'StyleColor',
'Color',
'LayoutUnit',
'LineClampValue',
'OutlineValue',
'unsigned',
'size_t',
'int',
# Aligns like short
'unsigned short',
'short',
# Aligns like char
'StyleSelfAlignmentData',
'StyleContentAlignmentData',
'uint8_t',
'char',
# Aligns like bool
'bool'
]
# FIXME: Improve documentation and add docstrings.
def _flatten_list(x):
"""Flattens a list of lists into a single list."""
return list(chain.from_iterable(x))
def _get_include_paths(properties):
"""
Get a list of paths that need to be included for ComputedStyleBase.
"""
include_paths = set()
for property_ in properties:
include_paths.update(property_['include_paths'])
return list(sorted(include_paths))
def _create_groups(properties):
"""Create a tree of groups from a list of properties.
Returns:
Group: The root group of the tree. The name of the group is set to None.
"""
# We first convert properties into a dictionary structure. Each dictionary
# represents a group. The None key corresponds to the fields directly stored
# on that group. The other keys map from group name to another dictionary.
# For example:
# {
# None: [field1, field2, ...]
# 'groupA': { None: [field3] },
# 'groupB': {
# None: [],
# 'groupC': { None: [field4] },
# },
# }
#
# We then recursively convert this dictionary into a tree of Groups.
# FIXME: Skip the first step by changing Group attributes to methods.
def _dict_to_group(name, group_dict):
fields_in_current_group = group_dict.pop(None)
subgroups = [
_dict_to_group(subgroup_name, subgroup_dict) for subgroup_name,
subgroup_dict in group_dict.items()]
return Group(name, subgroups, _reorder_fields(fields_in_current_group))
root_group_dict = {None: []}
for property_ in properties:
current_group_dict = root_group_dict
if property_['field_group']:
for group_name in property_['field_group'].split('->'):
current_group_dict[group_name] = current_group_dict.get(
group_name, {None: []})
current_group_dict = current_group_dict[group_name]
current_group_dict[None].extend(_create_fields(property_))
return _dict_to_group(None, root_group_dict)
def _create_diff_groups_map(diff_function_inputs, root_group):
diff_functions_map = {}
for entry in diff_function_inputs:
# error handling
field_names = entry['fields_to_diff'] + _list_field_dependencies(
entry['methods_to_diff'] + entry['predicates_to_test'])
for name in field_names:
assert name in [
field.property_name for field in root_group.all_fields], \
"The field '{}' isn't a defined field on ComputedStyle. " \
"Please check that there's an entry for '{}' in" \
"css_properties.json5 or " \
"computed_style_extra_fields.json5".format(name, name)
diff_functions_map[entry['name'].original] = _create_diff_groups(
entry['fields_to_diff'], entry['methods_to_diff'],
entry['predicates_to_test'], root_group)
return diff_functions_map
def _list_field_dependencies(entries_with_field_dependencies):
field_dependencies = []
for entry in entries_with_field_dependencies:
field_dependencies += entry['field_dependencies']
return field_dependencies
def _create_diff_groups(fields_to_diff,
methods_to_diff,
predicates_to_test,
root_group):
diff_group = DiffGroup(root_group)
field_dependencies = _list_field_dependencies(
methods_to_diff + predicates_to_test)
for subgroup in root_group.subgroups:
if any(field.property_name in (fields_to_diff + field_dependencies)
for field in subgroup.all_fields):
diff_group.subgroups.append(_create_diff_groups(
fields_to_diff, methods_to_diff, predicates_to_test, subgroup))
for entry in fields_to_diff:
for field in root_group.fields:
if not field.is_inherited_flag and entry == field.property_name:
diff_group.fields.append(field)
for entry in methods_to_diff:
for field in root_group.fields:
if (not field.is_inherited_flag and
field.property_name in entry['field_dependencies'] and
entry['method'] not in diff_group.expressions):
diff_group.expressions.append(entry['method'])
for entry in predicates_to_test:
for field in root_group.fields:
if (not field.is_inherited_flag and
field.property_name in entry['field_dependencies']
and entry['predicate'] not in diff_group.predicates):
diff_group.predicates.append(entry['predicate'])
return diff_group
def _create_enums(properties):
"""Returns a list of Enums to be generated"""
enums = {}
for property_ in properties:
# Only generate enums for keyword properties that do not
# require includes.
if (property_['field_template'] in ('keyword', 'multi_keyword') and
len(property_['include_paths']) == 0):
enum = Enum(property_['type_name'], property_['keywords'],
is_set=(property_['field_template'] == 'multi_keyword'))
if property_['field_template'] == 'multi_keyword':
assert property_['keywords'][0] == 'none', \
"First keyword in a 'multi_keyword' field must be " \
"'none' in '{}'.".format(property_['name'])
if enum.type_name in enums:
# There's an enum with the same name, check if the enum
# values are the same
assert set(enums[enum.type_name].values) == set(enum.values), \
"'{}' can't have type_name '{}' because it was used by " \
"a previous property, but with a different set of " \
"keywords. Either give it a different name or ensure " \
"the keywords are the same.".format(
property_['name'], enum.type_name)
else:
enums[enum.type_name] = enum
# Return the enums sorted by type name
return list(sorted(enums.values(), key=lambda e: e.type_name))
def _create_property_field(property_):
"""
Create a property field.
"""
name_for_methods = property_['name_for_methods']
assert property_['default_value'] is not None, \
'MakeComputedStyleBase requires an default value for all fields, ' \
'none specified for property ' + property_['name']
type_name = property_['type_name']
if property_['field_template'] == 'keyword':
assert property_['field_size'] is None, \
("'" + property_['name'] + "' is a keyword field, "
"so it should not specify a field_size")
size = int(math.ceil(math.log(len(property_['keywords']), 2)))
elif property_['field_template'] == 'multi_keyword':
size = len(property_['keywords']) - 1 # Subtract 1 for 'none' keyword
elif property_['field_template'] == 'external':
size = None
elif property_['field_template'] == 'primitive':
# pack bools with 1 bit.
size = 1 if type_name == 'bool' else property_["field_size"]
elif property_['field_template'] == 'pointer':
size = None
else:
assert property_['field_template'] == 'monotonic_flag', \
"Please use a valid value for field_template"
size = 1
return Field(
'property',
name_for_methods,
property_name=property_['name'].original,
inherited=property_['inherited'],
independent=property_['independent'],
type_name=property_['type_name'],
wrapper_pointer_name=property_['wrapper_pointer_name'],
field_template=property_['field_template'],
size=size,
default_value=property_['default_value'],
custom_copy=property_['custom_copy'],
custom_compare=property_['custom_compare'],
mutable=property_['mutable'],
getter_method_name=property_['getter'],
setter_method_name=property_['setter'],
initial_method_name=property_['initial'],
computed_style_custom_functions=property_[
'computed_style_custom_functions'],
)
def _create_inherited_flag_field(property_):
"""
Create the field used for an inheritance fast path from an independent CSS
property, and return the Field object.
"""
name_for_methods = NameStyleConverter(property_['name_for_methods']).to_function_name(suffix=['is', 'inherited'])
name_source = NameStyleConverter(name_for_methods)
return Field(
'inherited_flag',
name_for_methods,
property_name=property_['name'].original,
type_name='bool',
wrapper_pointer_name=None,
field_template='primitive',
size=1,
default_value='true',
custom_copy=False,
custom_compare=False,
mutable=False,
getter_method_name=name_source.to_function_name(),
setter_method_name=name_source.to_function_name(prefix='set'),
initial_method_name=name_source.to_function_name(prefix='initial'),
computed_style_custom_functions=property_[
"computed_style_custom_functions"],
)
def _create_fields(property_):
"""
Create ComputedStyle fields from a property and return a list of Fields.
"""
fields = []
# Only generate properties that have a field template
if property_['field_template'] is not None:
# If the property is independent, add the single-bit sized isInherited
# flag to the list of Fields as well.
if property_['independent']:
fields.append(_create_inherited_flag_field(property_))
fields.append(_create_property_field(property_))
return fields
def _reorder_bit_fields(bit_fields):
# Since fields cannot cross word boundaries, in order to minimize
# padding, group fields into buckets so that as many buckets as possible
# are exactly 32 bits. Although this greedy approach may not always
# produce the optimal solution, we add a static_assert to the code to
# ensure ComputedStyleBase results in the expected size. If that
# static_assert fails, this code is falling into the small number of
# cases that are suboptimal, and may need to be rethought.
# For more details on packing bit fields to reduce padding, see:
# http://www.catb.org/esr/structure-packing/#_bitfields
field_buckets = []
# Consider fields in descending order of size to reduce fragmentation
# when they are selected. Ties broken in alphabetical order by name.
for field in sorted(bit_fields, key=lambda f: (-f.size, f.name)):
added_to_bucket = False
# Go through each bucket and add this field if it will not increase
# the bucket's size to larger than 32 bits. Otherwise, make a new
# bucket containing only this field.
for bucket in field_buckets:
if sum(f.size for f in bucket) + field.size <= 32:
bucket.append(field)
added_to_bucket = True
break
if not added_to_bucket:
field_buckets.append([field])
return _flatten_list(field_buckets)
def _reorder_non_bit_fields(non_bit_fields):
# A general rule of thumb is to sort members by their alignment requirement
# (from biggest aligned to smallest).
for field in non_bit_fields:
assert field.alignment_type in ALIGNMENT_ORDER, \
"Type {} has unknown alignment. Please update ALIGNMENT_ORDER " \
"to include it.".format(field.name)
return list(sorted(
non_bit_fields, key=lambda f: ALIGNMENT_ORDER.index(f.alignment_type)))
def _reorder_fields(fields):
"""
Returns a list of fields ordered to minimise padding.
"""
# Separate out bit fields from non bit fields
bit_fields = [field for field in fields if field.is_bit_field]
non_bit_fields = [field for field in fields if not field.is_bit_field]
# Non bit fields go first, then the bit fields.
return _reorder_non_bit_fields(
non_bit_fields) + _reorder_bit_fields(bit_fields)
def _get_properties_ranking_using_partition_rule(
properties_ranking, partition_rule):
"""Take the contents of the properties ranking file and produce a dictionary
of css properties with their group number based on the partition_rule
Args:
properties_ranking: rankings map as read from CSSPropertyRanking.json5
partition_rule: cumulative distribution over properties_ranking
Returns:
dictionary with keys are css properties' name values are the group
that each css properties belong to. Smaller group number is higher
popularity in the ranking.
"""
return dict(
zip(properties_ranking, [
bisect.bisect_left(
partition_rule, float(i) / len(properties_ranking)) + 1
for i in range(len(properties_ranking))]))
def _evaluate_rare_non_inherited_group(properties, properties_ranking,
num_layers, partition_rule=None):
"""Re-evaluate the grouping of RareNonInherited groups based on each
property's popularity.
Args:
properties: list of all css properties
properties_ranking: map of property rankings
num_layers: the number of group to split
partition_rule: cumulative distribution over properties_ranking
Ex: [0.3, 0.6, 1]
"""
if partition_rule is None:
partition_rule = [
1.0 * (i + 1) / num_layers for i in range(num_layers)]
assert num_layers == len(partition_rule), \
"Length of rule and num_layers mismatch"
layers_name = [
"rare-non-inherited-usage-less-than-{}-percent".format(
int(round(partition_rule[i] * 100)))
for i in range(num_layers)
]
properties_ranking = _get_properties_ranking_using_partition_rule(
properties_ranking, partition_rule)
for property_ in properties:
if (property_["field_group"] is not None and
"*" in property_["field_group"]
and not property_["inherited"] and
property_["name"].original in properties_ranking):
assert property_["field_group"] == "*", \
"The property {} will be automatically assigned a group, " \
"please put '*' as the field_group".format(property_['name'])
property_["field_group"] = "->".join(
layers_name[0:properties_ranking[property_["name"].original]])
elif property_["field_group"] is not None and \
"*" in property_["field_group"] and \
not property_["inherited"] and \
property_["name"].original not in properties_ranking:
group_tree = property_["field_group"].split("->")[1:]
group_tree = [layers_name[0], layers_name[0] + "-sub"] + group_tree
property_["field_group"] = "->".join(group_tree)
def _evaluate_rare_inherit_group(properties, properties_ranking,
num_layers, partition_rule=None):
"""Re-evaluate the grouping of RareInherited groups based on each property's
popularity.
Args:
properties: list of all css properties
properties_ranking: map of property rankings
num_layers: the number of group to split
partition_rule: cumulative distribution over properties_ranking
Ex: [0.4, 1]
"""
if partition_rule is None:
partition_rule = [
1.0 * (i + 1) / num_layers for i in range(num_layers)
]
assert num_layers == len(partition_rule), \
"Length of rule and num_layers mismatch"
layers_name = [
"rare-inherited-usage-less-than-{}-percent".format(
int(round(partition_rule[i] * 100)))
for i in range(num_layers)
]
properties_ranking = _get_properties_ranking_using_partition_rule(
properties_ranking, partition_rule)
for property_ in properties:
if property_["field_group"] is not None and \
"*" in property_["field_group"] \
and property_["inherited"] and \
property_["name"].original in properties_ranking:
property_["field_group"] = "->".join(
layers_name[0:properties_ranking[property_["name"].original]])
elif property_["field_group"] is not None and \
"*" in property_["field_group"] \
and property_["inherited"] and \
property_["name"].original not in properties_ranking:
group_tree = property_["field_group"].split("->")[1:]
group_tree = [layers_name[0], layers_name[0] + "-sub"] + group_tree
property_["field_group"] = "->".join(group_tree)
class ComputedStyleBaseWriter(json5_generator.Writer):
def __init__(self, json5_file_paths, output_dir):
super(ComputedStyleBaseWriter, self).__init__([], output_dir)
self._input_files = json5_file_paths
# Reads css_properties.json5, computed_style_field_aliases.json5 and
# computed_style_extra_fields.json5
self._css_properties = css_properties.CSSProperties(
json5_file_paths[0:3])
# We sort the enum values based on each value's position in
# the keywords as listed in css_properties.json5. This will ensure that
# if there is a continuous
# segment in css_properties.json5 matching the segment in this enum then
# the generated enum will have the same order and continuity as
# css_properties.json5 and we can get the longest continuous segment.
# Thereby reduce the switch case statement to the minimum.
properties = keyword_utils.sort_keyword_properties_by_canonical_order(
self._css_properties.longhands,
json5_file_paths[4],
self.default_parameters)
self._properties = properties + self._css_properties.extra_fields
self._generated_enums = _create_enums(self._properties)
# Organise fields into a tree structure where the root group
# is ComputedStyleBase.
group_parameters = dict([
(conf["name"], conf["cumulative_distribution"]) for conf in
json5_generator.Json5File.load_from_files(
[json5_file_paths[6]]).name_dictionaries])
properties_ranking = [
x["name"].original for x in json5_generator.Json5File.load_from_files(
[json5_file_paths[5]]).name_dictionaries
]
_evaluate_rare_non_inherited_group(
self._properties,
properties_ranking,
len(group_parameters["rare_non_inherited_properties_rule"]),
group_parameters["rare_non_inherited_properties_rule"])
_evaluate_rare_inherit_group(
self._properties,
properties_ranking,
len(group_parameters["rare_inherited_properties_rule"]),
group_parameters["rare_inherited_properties_rule"])
self._root_group = _create_groups(self._properties)
self._diff_functions_map = _create_diff_groups_map(
json5_generator.Json5File.load_from_files(
[json5_file_paths[3]]).name_dictionaries,
self._root_group)
self._include_paths = _get_include_paths(self._properties)
self._outputs = {
'computed_style_base.h': self.generate_base_computed_style_h,
'computed_style_base.cc': self.generate_base_computed_style_cpp,
'computed_style_base_constants.h':
self.generate_base_computed_style_constants,
}
@template_expander.use_jinja(
'core/style/templates/computed_style_base.h.tmpl', tests={'in': lambda a, b: a in b})
def generate_base_computed_style_h(self):
return {
'input_files': self._input_files,
'properties': self._properties,
'enums': self._generated_enums,
'include_paths': self._include_paths,
'computed_style': self._root_group,
'diff_functions_map': self._diff_functions_map,
}
@template_expander.use_jinja(
'core/style/templates/computed_style_base.cc.tmpl',
tests={'in': lambda a, b: a in b})
def generate_base_computed_style_cpp(self):
return {
'input_files': self._input_files,
'properties': self._properties,
'enums': self._generated_enums,
'include_paths': self._include_paths,
'computed_style': self._root_group,
'diff_functions_map': self._diff_functions_map,
}
@template_expander.use_jinja('core/style/templates/computed_style_base_constants.h.tmpl')
def generate_base_computed_style_constants(self):
return {
'input_files': self._input_files,
'properties': self._properties,
'enums': self._generated_enums,
}
if __name__ == '__main__':
json5_generator.Maker(ComputedStyleBaseWriter).main()
| bsd-3-clause | 3,961,905,178,856,200,700 | 38.498316 | 117 | 0.619896 | false |
fmcardoso/aco-scs | pantspath/__init__.py | 1 | 1809 | """A Python3 implementation of the Ant Colony Optimization Meta-Heuristic.
**Pants** provides you with the ability to quickly determine how to
visit a collection of interconnected nodes such that the work done is
minimized. Nodes can be any arbitrary collection of data while the edges
represent the amount of "work" required to travel between two nodes.
Thus, **Pants** is a tool for solving traveling salesman problems.
The world is built from a list of nodes and a function responsible for
returning the length of the edge between any two given nodes. The length
function need not return actual length. Instead, "length" refers to that
the amount of "work" involved in moving from the first node to the second
node - whatever that "work" may be. For a silly, random example, it could
even be the number of dishes one must wash before moving to the next
station at a least dish-washing dish washer competition.
Solutions are found through an iterative process. In each iteration,
several ants are allowed to find a solution that "visits" every node of
the world. The amount of pheromone on each edge is updated according to
the length of the solutions in which it was used. The ant that traveled the
least distance is considered to be the local best solution. If the local
solution has a shorter distance than the best from any previous
iteration, it then becomes the global best solution. The elite ant(s)
then deposit their pheromone along the path of the global best solution
to strengthen it further, and the process repeats.
You can read more about `Ant Colony Optimization on
Wikipedia <http://en.wikipedia.org/wiki/Ant_colony_optimization_algorithms>`_.
.. moduleauthor:: Robert Grant <[email protected]>
"""
from .ant import Ant
from .world import World, Edge
from .solver import Solver
| mit | 1,317,406,960,457,766,400 | 49.25 | 78 | 0.791598 | false |
ryanmiao/libvirt-test-API | utils/xml_parser.py | 1 | 7336 | #!/usr/bin/env python
#
# xml_parser.py: Parse XML document, the result is a python dict.
#
# Copyright (C) 2010-2012 Red Hat, Inc.
#
# libvirt-test-API is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 2 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranties of
# TITLE, NON-INFRINGEMENT, MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
from xml.dom import minidom
import StringIO
class xml_parser(object):
"""Class xml_parser. It parses and xml document into a python dictionary.
The elements of the xml documents will be python dictionary keys. For
example, the xml document:
<firstel>firstdata</firstel>
<secondel>
<subinsecond> seconddata </subinsecond>
</secondel>
will be parsed into the python dictionary:
{ "firstel":"firstdata" , "secondel":{"subsinsecond":"seconddata"} }
Then the data can be retrieve as:
out = xml_parser.xml_parser().parse(xml)
out["firstel"] (this will be firstdata )
out["secondel"]["subinsecond"] (this will be seconddata)
attributes will be put into attr hash, so say the xml document is:
<source>
<device path = '/dev/mapper/vg_hpdl120g501-lv_home'/>
</source>
It will be parsed into:
out["source"]["device"]["attr"]["path"]
which will be set to:
"/dev/mapper/vg_hpdl120g501-lv_home"
"""
def __init__(self):
pass
def parse(self, arg):
out = None
if type(arg) == file:
out = self.parsefile(arg)
elif os.path.exists(arg):
print "file: %s " % arg
out = self.parsefile(arg)
else:
streamstr = StringIO.StringIO(arg)
out = self.parsefile(streamstr)
if out != None:
return out
def parsefile(self, filepath):
xmldoc = minidom.parse(filepath)
thenode = xmldoc.firstChild
outdic = dict()
self.parseintodict(thenode, 0, outdic)
return outdic
def parseintodict(self, node, level, out, rootkey = None):
for thenode in node.childNodes:
if thenode.nodeType == node.ELEMENT_NODE:
key = thenode.nodeName
value = None
try:
value = thenode.childNodes[0].data
if value.strip() == '':
value = None
except:
value = None
newdict = { key:value }
attrdic = None
if rootkey != None:
self.keyfindandset(out, rootkey, thenode)
else:
if thenode.attributes != None:
tmpattr = dict()
if thenode.attributes.length > 0:
for attrkey in thenode.attributes.keys():
tmpattr.update(
{attrkey:thenode.attributes.get(attrkey).nodeValue})
attrdic = { "attr":tmpattr }
if key in out:
if out[key] == None:
if attrdic != None:
if value == None:
out[key] = attrdic
else:
valdic = { "value":value }
valdic.update(attrdic)
out[key] = valdic
else:
out[key] = value
elif type(out[key]) == list:
if attrdic != None:
newdict.update(attrdic)
out[key].append(newdict)
elif type(out[key]) == dict:
if attrdic != None:
newdict.update(attrdic)
out[key].update(newdict)
else:
tmp = out[key]
out[key] = [tmp, value]
else:
out[key] = value
if attrdic != None:
if value == None:
newdict[key] = attrdic
else:
valdic = { "value":value }
valdic.update(attrdic)
newdict = valdic
out[key] = newdict
self.parseintodict(thenode, level+1, out, key)
return out
def keyfindandset(self, thedict, thekey, thenode):
# get the key/value pair from the node.
newvalkey = thenode.nodeName
value = None
try:
value = thenode.childNodes[0].data
if value.strip() == '':
value = None
except:
value = None
newval = { newvalkey:value }
attrdic = None
if thenode.attributes != None:
tmpattr = dict()
if thenode.attributes.length > 0:
for key in thenode.attributes.keys():
tmpattr.update(
{key:thenode.attributes.get(key).nodeValue})
attrdic = { "attr":tmpattr }
if attrdic != None:
if value == None:
newval.update({newvalkey:attrdic})
else:
valdic = { "value":value }
newval.update(valdic)
newval.update(attrdic)
for key in thedict.keys():
if key == thekey:
if type(thedict[key]) == dict:
if newvalkey in thedict[key]:
if newval[newvalkey] != None:
tmpdic = thedict[key][newvalkey]
thedict[key][newvalkey] = [tmpdic]
thedict[key][newvalkey].append(newval)
else:
if type(thedict[key][newvalkey]) == list:
thedict[key][newvalkey].append(dict())
else:
tmpdic = thedict[key][newvalkey]
thedict[key][newvalkey] = [tmpdic]
thedict[key][newvalkey].append(dict())
else:
thedict[key].update(newval)
elif type(thedict[key]) == list:
if newvalkey in thedict[key][-1]:
thedict[key].append(newval)
else:
thedict[key][-1].update(newval)
else:
thedict[key] = newval
if type(thedict[key]) == dict:
self.keyfindandset(thedict[key], thekey, thenode)
| gpl-2.0 | 4,921,790,545,853,446,000 | 39.530387 | 84 | 0.466876 | false |
ksmit799/Toontown-Source | toontown/cogdominium/CogdoMazeGameObjects.py | 1 | 10289 | from pandac.PandaModules import CollisionSphere, CollisionTube, CollisionNode
from pandac.PandaModules import NodePath, BitMask32
from pandac.PandaModules import Point3, Point4, WaitInterval, Vec3, Vec4
from direct.interval.IntervalGlobal import LerpScaleInterval, LerpColorScaleInterval, LerpPosInterval, LerpFunc
from direct.interval.IntervalGlobal import Func, Sequence, Parallel
from direct.showbase.DirectObject import DirectObject
from direct.task.Task import Task
from toontown.toonbase import ToontownGlobals
import CogdoMazeGameGlobals as Globals
from CogdoGameExit import CogdoGameExit
import CogdoUtil
import math
import random
class CogdoMazeSplattable:
def __init__(self, object, name, collisionRadius):
self.object = object
self.splat = CogdoUtil.loadMazeModel('splash')
self.splat.setBillboardPointEye()
self.splat.setBin('fixed', 40)
self.splat.setDepthTest(False)
self.splat.setDepthWrite(False)
self.splatTrack = None
self._splatSfxIval = base.cogdoGameAudioMgr.createSfxIval('splat')
self.initGagCollision(name, collisionRadius)
return
def destroy(self):
self.disableGagCollision()
if self._splatSfxIval.isPlaying():
self._splatSfxIval.finish()
del self._splatSfxIval
def initGagCollision(self, name, radius):
self.gagCollisionName = name
collision = CollisionTube(0, 0, 0, 0, 0, 4, radius)
collision.setTangible(1)
self.gagCollNode = CollisionNode(self.gagCollisionName)
self.gagCollNode.setIntoCollideMask(ToontownGlobals.PieBitmask)
self.gagCollNode.addSolid(collision)
self.gagCollNodePath = self.object.attachNewNode(self.gagCollNode)
def disableGagCollision(self):
self.gagCollNodePath.removeNode()
def doSplat(self):
if self.splatTrack and self.splatTrack.isPlaying():
self.splatTrack.finish()
self.splat.reparentTo(render)
self.splat.setPos(self.object, 0, 0, 3.0)
self.splat.setY(self.splat.getY() - 1.0)
self._splatSfxIval.node = self.splat
self.splatTrack = Parallel(self._splatSfxIval, Sequence(Func(self.splat.showThrough), LerpScaleInterval(self.splat, duration=0.5, scale=6, startScale=1, blendType='easeOut'), Func(self.splat.hide)))
self.splatTrack.start()
class CogdoMazeDrop(NodePath, DirectObject):
def __init__(self, game, id, x, y):
NodePath.__init__(self, 'dropNode%s' % id)
self.game = game
self.id = id
self.reparentTo(hidden)
self.setPos(x, y, 0)
shadow = loader.loadModel('phase_3/models/props/square_drop_shadow')
shadow.setZ(0.2)
shadow.setBin('ground', 10)
shadow.setColor(1, 1, 1, 1)
shadow.reparentTo(self)
self.shadow = shadow
drop = CogdoUtil.loadMazeModel('cabinetSmFalling')
roll = random.randint(-15, 15)
drop.setHpr(0, 0, roll)
drop.setZ(Globals.DropHeight)
self.collTube = CollisionTube(0, 0, 0, 0, 0, 4, Globals.DropCollisionRadius)
self.collTube.setTangible(0)
name = Globals.DropCollisionName
self.collNode = CollisionNode(name)
self.collNode.addSolid(self.collTube)
self.collNodePath = drop.attachNewNode(self.collNode)
self.collNodePath.hide()
self.collNodePath.setTag('isFalling', str('True'))
drop.reparentTo(self)
self.drop = drop
self._dropSfx = base.cogdoGameAudioMgr.createSfxIval('drop', volume=0.6)
def disableCollisionDamage(self):
self.collTube.setTangible(1)
self.collTube.setRadius(Globals.DroppedCollisionRadius)
self.collNode.setIntoCollideMask(ToontownGlobals.WallBitmask)
self.collNodePath.setTag('isFalling', str('False'))
def getDropIval(self):
shadow = self.shadow
drop = self.drop
id = self.id
hangTime = Globals.ShadowTime
dropTime = Globals.DropTime
dropHeight = Globals.DropHeight
targetShadowScale = 0.5
targetShadowAlpha = 0.4
shadowScaleIval = LerpScaleInterval(shadow, dropTime, targetShadowScale, startScale=0)
shadowAlphaIval = LerpColorScaleInterval(shadow, hangTime, Point4(1, 1, 1, targetShadowAlpha), startColorScale=Point4(1, 1, 1, 0))
shadowIval = Parallel(shadowScaleIval, shadowAlphaIval)
startPos = Point3(0, 0, dropHeight)
drop.setPos(startPos)
dropIval = LerpPosInterval(drop, dropTime, Point3(0, 0, 0), startPos=startPos, blendType='easeIn')
dropSoundIval = self._dropSfx
dropSoundIval.node = self
self.drop.setTransparency(1)
def _setRandScale(t):
self.drop.setScale(self, 1 - random.random() / 16, 1 - random.random() / 16, 1 - random.random() / 4)
scaleChange = 0.4 + random.random() / 4
dropShakeSeq = Sequence(LerpScaleInterval(self.drop, 0.25, Vec3(1.0 + scaleChange, 1.0 + scaleChange / 2, 1.0 - scaleChange), blendType='easeInOut'), LerpScaleInterval(self.drop, 0.25, Vec3(1.0, 1.0, 1.0), blendType='easeInOut'), Func(self.disableCollisionDamage), LerpScaleInterval(self.drop, 0.2, Vec3(1.0 + scaleChange / 8, 1.0 + scaleChange / 8, 1.0 - scaleChange / 8), blendType='easeInOut'), LerpScaleInterval(self.drop, 0.2, Vec3(1.0, 1.0, 1.0), blendType='easeInOut'), LerpScaleInterval(self.drop, 0.15, Vec3(1.0 + scaleChange / 16, 1.0 + scaleChange / 16, 1.0 - scaleChange / 16), blendType='easeInOut'), LerpScaleInterval(self.drop, 0.15, Vec3(1.0, 1.0, 1.0), blendType='easeInOut'), LerpScaleInterval(self.drop, 0.1, Vec3(1.0 + scaleChange / 16, 1.0 + scaleChange / 8, 1.0 - scaleChange / 16), blendType='easeInOut'), LerpColorScaleInterval(self.drop, Globals.DropFadeTime, Vec4(1.0, 1.0, 1.0, 0.0)))
ival = Sequence(Func(self.reparentTo, render), Parallel(Sequence(WaitInterval(hangTime), dropIval), shadowIval), Parallel(Func(self.game.dropHit, self, id), dropSoundIval, dropShakeSeq), Func(self.game.cleanupDrop, id), name='drop%s' % id)
self.ival = ival
return ival
def destroy(self):
self.ival.pause()
self.ival = None
self._dropSfx.pause()
self._dropSfx = None
self.collTube = None
self.collNode = None
self.collNodePath.removeNode()
self.collNodePath = None
self.removeNode()
return
class CogdoMazeExit(CogdoGameExit, DirectObject):
EnterEventName = 'CogdoMazeDoor_Enter'
def __init__(self):
CogdoGameExit.__init__(self)
self.revealed = False
self._players = []
self._initCollisions()
def _initCollisions(self):
collSphere = CollisionSphere(0, 0, 0, 3.0)
collSphere.setTangible(0)
self.collNode = CollisionNode(self.getName())
self.collNode.addSolid(collSphere)
self.collNP = self.attachNewNode(self.collNode)
def destroy(self):
self.ignoreAll()
CogdoGameExit.destroy(self)
def enable(self):
self.collNode.setFromCollideMask(ToontownGlobals.WallBitmask)
self.accept('enter' + self.getName(), self._handleEnterCollision)
def disable(self):
self.ignore('enter' + self.getName())
self.collNode.setFromCollideMask(BitMask32(0))
def _handleEnterCollision(self, collEntry):
messenger.send(CogdoMazeExit.EnterEventName, [self])
def onstage(self):
self.unstash()
self.enable()
def offstage(self):
self.stash()
self.disable()
def playerEntersDoor(self, player):
if player not in self._players:
self._players.append(player)
self.toonEnters(player.toon)
def getPlayerCount(self):
return len(self._players)
def hasPlayer(self, player):
return player in self._players
class CogdoMazeWaterCooler(NodePath, DirectObject):
UpdateTaskName = 'CogdoMazeWaterCooler_Update'
def __init__(self, serialNum, model):
NodePath.__init__(self, 'CogdoMazeWaterCooler-%i' % serialNum)
self.serialNum = serialNum
self._model = model
self._model.reparentTo(self)
self._model.setPosHpr(0, 0, 0, 0, 0, 0)
self._initCollisions()
self._initArrow()
self._update = None
self.__startUpdateTask()
return
def destroy(self):
self.ignoreAll()
self.__stopUpdateTask()
self.collNodePath.removeNode()
self.removeNode()
def _initCollisions(self):
offset = Globals.WaterCoolerTriggerOffset
self.collSphere = CollisionSphere(offset[0], offset[1], offset[2], Globals.WaterCoolerTriggerRadius)
self.collSphere.setTangible(0)
name = Globals.WaterCoolerCollisionName
self.collNode = CollisionNode(name)
self.collNode.addSolid(self.collSphere)
self.collNodePath = self.attachNewNode(self.collNode)
def _initArrow(self):
matchingGameGui = loader.loadModel('phase_3.5/models/gui/matching_game_gui')
arrow = matchingGameGui.find('**/minnieArrow')
arrow.setScale(Globals.CoolerArrowScale)
arrow.setColor(*Globals.CoolerArrowColor)
arrow.setPos(0, 0, Globals.CoolerArrowZ)
arrow.setHpr(0, 0, 90)
arrow.setBillboardAxis()
self._arrow = NodePath('Arrow')
arrow.reparentTo(self._arrow)
self._arrow.reparentTo(self)
self._arrowTime = 0
self.accept(Globals.WaterCoolerShowEventName, self.showArrow)
self.accept(Globals.WaterCoolerHideEventName, self.hideArrow)
matchingGameGui.removeNode()
def showArrow(self):
self._arrow.unstash()
def hideArrow(self):
self._arrow.stash()
def update(self, dt):
newZ = math.sin(globalClock.getFrameTime() * Globals.CoolerArrowSpeed) * Globals.CoolerArrowBounce
self._arrow.setZ(newZ)
def __startUpdateTask(self):
self.__stopUpdateTask()
self._update = taskMgr.add(self._updateTask, self.UpdateTaskName, 45)
def __stopUpdateTask(self):
if self._update is not None:
taskMgr.remove(self._update)
return
def _updateTask(self, task):
dt = globalClock.getDt()
self.update(dt)
return Task.cont
| mit | 8,465,987,789,883,547,000 | 39.507874 | 919 | 0.666731 | false |
chromium/chromium | ui/ozone/generate_constructor_list.py | 5 | 5576 | #!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Code generator for PlatformObject<> constructor list.
This script takes as arguments a list of platform names as a text file and
a list of types and generates a C++ source file containing a list of
the constructors for that object in platform order.
Example Output: ./ui/ozone/generate_constructor_list.py \
--platform test \
--platform dri \
--export OZONE \
--namespace ui \
--typename OzonePlatform \
--include '"ui/ozone/ozone_platform.h"'
// DO NOT MODIFY. GENERATED BY generate_constructor_list.py
#include "ui/ozone/platform_object_internal.h"
#include "ui/ozone/ozone_platform.h"
namespace ui {
OzonePlatform* CreateOzonePlatformTest();
OzonePlatform* CreateOzonePlatformDri();
} // namespace ui
namespace ui {
typedef ui::OzonePlatform* (*OzonePlatformConstructor)();
template <> const OzonePlatformConstructor
PlatformConstructorList<ui::OzonePlatform>::kConstructors[] = {
&ui::CreateOzonePlatformTest,
&ui::CreateOzonePlatformDri,
};
template class COMPONENT_EXPORT(OZONE) PlatformObject<ui::OzonePlatform>;
} // namespace ui
"""
try:
from StringIO import StringIO # for Python 2
except ImportError:
from io import StringIO # for Python 3
import optparse
import os
import collections
import re
import sys
def GetTypedefName(typename):
"""Determine typedef name of constructor for typename.
This is just typename + "Constructor".
"""
return typename + 'Constructor'
def GetConstructorName(typename, platform):
"""Determine name of static constructor function from platform name.
This is just "Create" + typename + platform.
"""
return 'Create' + typename + platform.capitalize()
def GenerateConstructorList(out, namespace, export, typenames, platforms,
includes, usings):
"""Generate static array containing a list of constructors."""
out.write('// DO NOT MODIFY. GENERATED BY generate_constructor_list.py\n')
out.write('\n')
out.write('#include "ui/ozone/platform_object_internal.h"\n')
out.write('\n')
for include in includes:
out.write('#include %(include)s\n' % {'include': include})
out.write('\n')
for using in usings:
out.write('using %(using)s;\n' % {'using': using})
out.write('\n')
out.write('namespace %(namespace)s {\n' % {'namespace': namespace})
out.write('\n')
# Declarations of constructor functions.
for typename in typenames:
for platform in platforms:
constructor = GetConstructorName(typename, platform)
out.write('%(typename)s* %(constructor)s();\n'
% {'typename': typename,
'constructor': constructor})
out.write('\n')
out.write('} // namespace %(namespace)s\n' % {'namespace': namespace})
out.write('\n')
out.write('namespace ui {\n')
out.write('\n')
# Handy typedefs for constructor types.
for typename in typenames:
out.write('typedef %(typename)s* (*%(typedef)s)();\n'
% {'typename': typename,
'typedef': GetTypedefName(typename)})
out.write('\n')
# The actual constructor lists.
for typename in typenames:
out.write('template <> const %(typedef)s\n'
% {'typedef': GetTypedefName(typename)})
out.write('PlatformConstructorList<%(typename)s>::kConstructors[] = {\n'
% {'typename': typename})
for platform in platforms:
constructor = GetConstructorName(typename, platform)
out.write(' &%(namespace)s::%(constructor)s,\n'
% {'namespace': namespace, 'constructor': constructor})
out.write('};\n')
out.write('\n')
# Exported template instantiation.
for typename in typenames:
out.write('template class COMPONENT_EXPORT(%(export)s)' \
' PlatformObject<%(typename)s>;\n'
% {'export': export, 'typename': typename})
out.write('\n')
out.write('} // namespace ui\n')
out.write('\n')
def main(argv):
parser = optparse.OptionParser()
parser.add_option('--namespace', default='ozone')
parser.add_option('--export', default='OZONE')
parser.add_option('--platform_list')
parser.add_option('--output_cc')
parser.add_option('--include', action='append', default=[])
parser.add_option('--platform', action='append', default=[])
parser.add_option('--typename', action='append', default=[])
parser.add_option('--using', action='append', default=[])
options, _ = parser.parse_args(argv)
platforms = list(options.platform)
typenames = list(options.typename)
includes = list(options.include)
usings = list(options.using)
if options.platform_list:
platforms = open(options.platform_list, 'r').read().strip().split('\n')
if not platforms:
sys.stderr.write('No platforms are selected!')
sys.exit(1)
# Write to standard output or file specified by --output_cc.
out_cc = getattr(sys.stdout, 'buffer', sys.stdout)
if options.output_cc:
out_cc = open(options.output_cc, 'wb')
out_cc_str = StringIO()
GenerateConstructorList(out_cc_str, options.namespace, options.export,
typenames, platforms, includes, usings)
out_cc.write(out_cc_str.getvalue().encode('utf-8'))
if options.output_cc:
out_cc.close()
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| bsd-3-clause | -1,612,006,540,855,881,200 | 29.140541 | 76 | 0.65477 | false |
jgibbons-cp/sva_scan_examples | app/sva_scan_examples/config_helper.py | 1 | 1913 | import os
import sys
class ConfigHelper():
"""
Manage all configuration information for the application
"""
def __init__(self):
TRUE = "True"
FALSE = "False"
ERROR = 1
self.halo_key = os.getenv("HALO_API_KEY")
self.halo_secret = os.getenv("HALO_API_SECRET_KEY")
# get the results directory and create it if it does not exist
scan_results_directory = os.environ["SCAN_RESULTS_DIRECTORY"] = \
"/tmp/scan_results/"
path_exists = os.path.exists(scan_results_directory)
if not path_exists:
try:
os.mkdir(scan_results_directory)
path_exists = os.path.exists(scan_results_directory)
except OSError:
pass
days_for_scan_age = os.environ["DAYS_FOR_SCAN_AGE"] = "0"
days_for_scan_age = int(days_for_scan_age)
days_string_is_int_value = isinstance(days_for_scan_age, int)
os.environ["HALO_SERVER_GROUP"] = "Git"
scan_examples = os.environ["SCAN_EXAMPLES"] = "False"
heartbeat_interval = os.environ["HEARTBEAT_INTERVAL"] = "60"
heartbeat_interval = int(heartbeat_interval)
hi_string_is_int_value = isinstance(heartbeat_interval, int)
# for unit tests Travis populates the IP
server_ip = "<server_ip>"
os.environ["SERVER_IP"] = server_ip
unit_tests = os.environ["UNIT_TESTS"] = "no_unit_tests" # NOQA
if self.halo_key is None or self.halo_secret is None \
or not os.path.exists(scan_results_directory) or not path_exists \
or days_string_is_int_value == "False" \
or hi_string_is_int_value == "False" \
or scan_examples != TRUE and scan_examples != FALSE:
print "Configuration validation failed... exiting...\n"
sys.exit(ERROR)
| bsd-2-clause | 6,427,597,855,864,617,000 | 35.788462 | 79 | 0.578672 | false |
emakis/erpnext | erpnext/controllers/taxes_and_totals.py | 1 | 24666 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import json
import frappe, erpnext
from frappe import _, scrub
from frappe.utils import cint, flt, cstr, fmt_money, round_based_on_smallest_currency_fraction
from erpnext.controllers.accounts_controller import validate_conversion_rate, \
validate_taxes_and_charges, validate_inclusive_tax
class calculate_taxes_and_totals(object):
def __init__(self, doc):
self.doc = doc
self.calculate()
def calculate(self):
self.discount_amount_applied = False
self._calculate()
if self.doc.meta.get_field("discount_amount"):
self.set_discount_amount()
self.apply_discount_amount()
if self.doc.doctype in ["Sales Invoice", "Purchase Invoice"]:
self.calculate_total_advance()
if self.doc.meta.get_field("other_charges_calculation"):
self.set_item_wise_tax_breakup()
def _calculate(self):
self.calculate_item_values()
self.initialize_taxes()
self.determine_exclusive_rate()
self.calculate_net_total()
self.calculate_taxes()
self.manipulate_grand_total_for_inclusive_tax()
self.calculate_totals()
self._cleanup()
def validate_conversion_rate(self):
# validate conversion rate
company_currency = erpnext.get_company_currency(self.doc.company)
if not self.doc.currency or self.doc.currency == company_currency:
self.doc.currency = company_currency
self.doc.conversion_rate = 1.0
else:
validate_conversion_rate(self.doc.currency, self.doc.conversion_rate,
self.doc.meta.get_label("conversion_rate"), self.doc.company)
self.doc.conversion_rate = flt(self.doc.conversion_rate)
def calculate_item_values(self):
if not self.discount_amount_applied:
for item in self.doc.get("items"):
self.doc.round_floats_in(item)
if item.discount_percentage == 100:
item.rate = 0.0
elif not item.rate:
item.rate = flt(item.price_list_rate *
(1.0 - (item.discount_percentage / 100.0)), item.precision("rate"))
if item.doctype in ['Quotation Item', 'Sales Order Item', 'Delivery Note Item', 'Sales Invoice Item']:
item.rate_with_margin = self.calculate_margin(item)
item.rate = flt(item.rate_with_margin * (1.0 - (item.discount_percentage / 100.0)), item.precision("rate"))\
if item.rate_with_margin > 0 else item.rate
item.net_rate = item.rate
item.amount = flt(item.rate * item.qty, item.precision("amount"))
item.net_amount = item.amount
self._set_in_company_currency(item, ["price_list_rate", "rate", "net_rate", "amount", "net_amount"])
item.item_tax_amount = 0.0
def _set_in_company_currency(self, doc, fields):
"""set values in base currency"""
for f in fields:
val = flt(flt(doc.get(f), doc.precision(f)) * self.doc.conversion_rate, doc.precision("base_" + f))
doc.set("base_" + f, val)
def initialize_taxes(self):
for tax in self.doc.get("taxes"):
if not self.discount_amount_applied:
validate_taxes_and_charges(tax)
validate_inclusive_tax(tax, self.doc)
tax.item_wise_tax_detail = {}
tax_fields = ["total", "tax_amount_after_discount_amount",
"tax_amount_for_current_item", "grand_total_for_current_item",
"tax_fraction_for_current_item", "grand_total_fraction_for_current_item"]
if tax.charge_type != "Actual" and \
not (self.discount_amount_applied and self.doc.apply_discount_on=="Grand Total"):
tax_fields.append("tax_amount")
for fieldname in tax_fields:
tax.set(fieldname, 0.0)
self.doc.round_floats_in(tax)
def determine_exclusive_rate(self):
if not any((cint(tax.included_in_print_rate) for tax in self.doc.get("taxes"))):
return
for item in self.doc.get("items"):
item_tax_map = self._load_item_tax_rate(item.item_tax_rate)
cumulated_tax_fraction = 0
for i, tax in enumerate(self.doc.get("taxes")):
tax.tax_fraction_for_current_item = self.get_current_tax_fraction(tax, item_tax_map)
if i==0:
tax.grand_total_fraction_for_current_item = 1 + tax.tax_fraction_for_current_item
else:
tax.grand_total_fraction_for_current_item = \
self.doc.get("taxes")[i-1].grand_total_fraction_for_current_item \
+ tax.tax_fraction_for_current_item
cumulated_tax_fraction += tax.tax_fraction_for_current_item
if cumulated_tax_fraction and not self.discount_amount_applied and item.qty:
item.net_amount = flt(item.amount / (1 + cumulated_tax_fraction), item.precision("net_amount"))
item.net_rate = flt(item.net_amount / item.qty, item.precision("net_rate"))
item.discount_percentage = flt(item.discount_percentage, item.precision("discount_percentage"))
self._set_in_company_currency(item, ["net_rate", "net_amount"])
def _load_item_tax_rate(self, item_tax_rate):
return json.loads(item_tax_rate) if item_tax_rate else {}
def get_current_tax_fraction(self, tax, item_tax_map):
"""
Get tax fraction for calculating tax exclusive amount
from tax inclusive amount
"""
current_tax_fraction = 0
if cint(tax.included_in_print_rate):
tax_rate = self._get_tax_rate(tax, item_tax_map)
if tax.charge_type == "On Net Total":
current_tax_fraction = tax_rate / 100.0
elif tax.charge_type == "On Previous Row Amount":
current_tax_fraction = (tax_rate / 100.0) * \
self.doc.get("taxes")[cint(tax.row_id) - 1].tax_fraction_for_current_item
elif tax.charge_type == "On Previous Row Total":
current_tax_fraction = (tax_rate / 100.0) * \
self.doc.get("taxes")[cint(tax.row_id) - 1].grand_total_fraction_for_current_item
if getattr(tax, "add_deduct_tax", None):
current_tax_fraction *= -1.0 if (tax.add_deduct_tax == "Deduct") else 1.0
return current_tax_fraction
def _get_tax_rate(self, tax, item_tax_map):
if item_tax_map.has_key(tax.account_head):
return flt(item_tax_map.get(tax.account_head), self.doc.precision("rate", tax))
else:
return tax.rate
def calculate_net_total(self):
self.doc.total = self.doc.base_total = self.doc.net_total = self.doc.base_net_total = 0.0
for item in self.doc.get("items"):
self.doc.total += item.amount
self.doc.base_total += item.base_amount
self.doc.net_total += item.net_amount
self.doc.base_net_total += item.base_net_amount
self.doc.round_floats_in(self.doc, ["total", "base_total", "net_total", "base_net_total"])
def calculate_taxes(self):
# maintain actual tax rate based on idx
actual_tax_dict = dict([[tax.idx, flt(tax.tax_amount, tax.precision("tax_amount"))]
for tax in self.doc.get("taxes") if tax.charge_type == "Actual"])
for n, item in enumerate(self.doc.get("items")):
item_tax_map = self._load_item_tax_rate(item.item_tax_rate)
for i, tax in enumerate(self.doc.get("taxes")):
# tax_amount represents the amount of tax for the current step
current_tax_amount = self.get_current_tax_amount(item, tax, item_tax_map)
# Adjust divisional loss to the last item
if tax.charge_type == "Actual":
actual_tax_dict[tax.idx] -= current_tax_amount
if n == len(self.doc.get("items")) - 1:
current_tax_amount += actual_tax_dict[tax.idx]
# accumulate tax amount into tax.tax_amount
if tax.charge_type != "Actual" and \
not (self.discount_amount_applied and self.doc.apply_discount_on=="Grand Total"):
tax.tax_amount += current_tax_amount
# store tax_amount for current item as it will be used for
# charge type = 'On Previous Row Amount'
tax.tax_amount_for_current_item = current_tax_amount
# set tax after discount
tax.tax_amount_after_discount_amount += current_tax_amount
if getattr(tax, "category", None):
# if just for valuation, do not add the tax amount in total
# hence, setting it as 0 for further steps
current_tax_amount = 0.0 if (tax.category == "Valuation") \
else current_tax_amount
current_tax_amount *= -1.0 if (tax.add_deduct_tax == "Deduct") else 1.0
# Calculate tax.total viz. grand total till that step
# note: grand_total_for_current_item contains the contribution of
# item's amount, previously applied tax and the current tax on that item
if i==0:
tax.grand_total_for_current_item = flt(item.net_amount + current_tax_amount, tax.precision("total"))
else:
tax.grand_total_for_current_item = \
flt(self.doc.get("taxes")[i-1].grand_total_for_current_item + current_tax_amount, tax.precision("total"))
# in tax.total, accumulate grand total of each item
tax.total += tax.grand_total_for_current_item
# set precision in the last item iteration
if n == len(self.doc.get("items")) - 1:
self.round_off_totals(tax)
# adjust Discount Amount loss in last tax iteration
if i == (len(self.doc.get("taxes")) - 1) and self.discount_amount_applied \
and self.doc.discount_amount and self.doc.apply_discount_on == "Grand Total":
self.adjust_discount_amount_loss(tax)
def get_current_tax_amount(self, item, tax, item_tax_map):
tax_rate = self._get_tax_rate(tax, item_tax_map)
current_tax_amount = 0.0
if tax.charge_type == "Actual":
# distribute the tax amount proportionally to each item row
actual = flt(tax.tax_amount, tax.precision("tax_amount"))
current_tax_amount = item.net_amount*actual / self.doc.net_total if self.doc.net_total else 0.0
elif tax.charge_type == "On Net Total":
current_tax_amount = (tax_rate / 100.0) * item.net_amount
elif tax.charge_type == "On Previous Row Amount":
current_tax_amount = (tax_rate / 100.0) * \
self.doc.get("taxes")[cint(tax.row_id) - 1].tax_amount_for_current_item
elif tax.charge_type == "On Previous Row Total":
current_tax_amount = (tax_rate / 100.0) * \
self.doc.get("taxes")[cint(tax.row_id) - 1].grand_total_for_current_item
current_tax_amount = flt(current_tax_amount, tax.precision("tax_amount"))
self.set_item_wise_tax(item, tax, tax_rate, current_tax_amount)
return current_tax_amount
def set_item_wise_tax(self, item, tax, tax_rate, current_tax_amount):
# store tax breakup for each item
key = item.item_code or item.item_name
item_wise_tax_amount = current_tax_amount*self.doc.conversion_rate
if tax.item_wise_tax_detail.get(key):
item_wise_tax_amount += tax.item_wise_tax_detail[key][1]
tax.item_wise_tax_detail[key] = [tax_rate,flt(item_wise_tax_amount, tax.precision("base_tax_amount"))]
def round_off_totals(self, tax):
tax.total = flt(tax.total, tax.precision("total"))
tax.tax_amount = flt(tax.tax_amount, tax.precision("tax_amount"))
tax.tax_amount_after_discount_amount = flt(tax.tax_amount_after_discount_amount, tax.precision("tax_amount"))
self._set_in_company_currency(tax, ["total", "tax_amount", "tax_amount_after_discount_amount"])
def adjust_discount_amount_loss(self, tax):
discount_amount_loss = self.doc.grand_total - flt(self.doc.discount_amount) - tax.total
tax.tax_amount_after_discount_amount = flt(tax.tax_amount_after_discount_amount +
discount_amount_loss, tax.precision("tax_amount"))
tax.total = flt(tax.total + discount_amount_loss, tax.precision("total"))
self._set_in_company_currency(tax, ["total", "tax_amount_after_discount_amount"])
def manipulate_grand_total_for_inclusive_tax(self):
# if fully inclusive taxes and diff
if self.doc.get("taxes") and all(cint(t.included_in_print_rate) for t in self.doc.get("taxes")):
last_tax = self.doc.get("taxes")[-1]
diff = self.doc.total - flt(last_tax.total, self.doc.precision("grand_total"))
if diff and abs(diff) <= (2.0 / 10**last_tax.precision("tax_amount")):
last_tax.tax_amount += diff
last_tax.tax_amount_after_discount_amount += diff
last_tax.total += diff
self._set_in_company_currency(last_tax,
["total", "tax_amount", "tax_amount_after_discount_amount"])
def calculate_totals(self):
self.doc.grand_total = flt(self.doc.get("taxes")[-1].total
if self.doc.get("taxes") else self.doc.net_total)
self.doc.total_taxes_and_charges = flt(self.doc.grand_total - self.doc.net_total,
self.doc.precision("total_taxes_and_charges"))
self._set_in_company_currency(self.doc, ["total_taxes_and_charges"])
if self.doc.doctype in ["Quotation", "Sales Order", "Delivery Note", "Sales Invoice"]:
self.doc.base_grand_total = flt(self.doc.grand_total * self.doc.conversion_rate) \
if self.doc.total_taxes_and_charges else self.doc.base_net_total
else:
self.doc.taxes_and_charges_added = self.doc.taxes_and_charges_deducted = 0.0
for tax in self.doc.get("taxes"):
if tax.category in ["Valuation and Total", "Total"]:
if tax.add_deduct_tax == "Add":
self.doc.taxes_and_charges_added += flt(tax.tax_amount_after_discount_amount)
else:
self.doc.taxes_and_charges_deducted += flt(tax.tax_amount_after_discount_amount)
self.doc.round_floats_in(self.doc, ["taxes_and_charges_added", "taxes_and_charges_deducted"])
self.doc.base_grand_total = flt(self.doc.grand_total * self.doc.conversion_rate) \
if (self.doc.taxes_and_charges_added or self.doc.taxes_and_charges_deducted) \
else self.doc.base_net_total
self._set_in_company_currency(self.doc, ["taxes_and_charges_added", "taxes_and_charges_deducted"])
self.doc.round_floats_in(self.doc, ["grand_total", "base_grand_total"])
if self.doc.meta.get_field("rounded_total"):
self.doc.rounded_total = round_based_on_smallest_currency_fraction(self.doc.grand_total,
self.doc.currency, self.doc.precision("rounded_total"))
if self.doc.meta.get_field("base_rounded_total"):
company_currency = erpnext.get_company_currency(self.doc.company)
self.doc.base_rounded_total = \
round_based_on_smallest_currency_fraction(self.doc.base_grand_total,
company_currency, self.doc.precision("base_rounded_total"))
def _cleanup(self):
for tax in self.doc.get("taxes"):
tax.item_wise_tax_detail = json.dumps(tax.item_wise_tax_detail, separators=(',', ':'))
def set_discount_amount(self):
if self.doc.additional_discount_percentage:
self.doc.discount_amount = flt(flt(self.doc.get(scrub(self.doc.apply_discount_on)))
* self.doc.additional_discount_percentage / 100, self.doc.precision("discount_amount"))
def apply_discount_amount(self):
if self.doc.discount_amount:
if not self.doc.apply_discount_on:
frappe.throw(_("Please select Apply Discount On"))
self.doc.base_discount_amount = flt(self.doc.discount_amount * self.doc.conversion_rate,
self.doc.precision("base_discount_amount"))
total_for_discount_amount = self.get_total_for_discount_amount()
taxes = self.doc.get("taxes")
net_total = 0
if total_for_discount_amount:
# calculate item amount after Discount Amount
for i, item in enumerate(self.doc.get("items")):
distributed_amount = flt(self.doc.discount_amount) * \
item.net_amount / total_for_discount_amount
item.net_amount = flt(item.net_amount - distributed_amount, item.precision("net_amount"))
net_total += item.net_amount
# discount amount rounding loss adjustment if no taxes
if (not taxes or self.doc.apply_discount_on == "Net Total") \
and i == len(self.doc.get("items")) - 1:
discount_amount_loss = flt(self.doc.net_total - net_total - self.doc.discount_amount,
self.doc.precision("net_total"))
item.net_amount = flt(item.net_amount + discount_amount_loss,
item.precision("net_amount"))
item.net_rate = flt(item.net_amount / item.qty, item.precision("net_rate")) if item.qty else 0
self._set_in_company_currency(item, ["net_rate", "net_amount"])
self.discount_amount_applied = True
self._calculate()
else:
self.doc.base_discount_amount = 0
def get_total_for_discount_amount(self):
if self.doc.apply_discount_on == "Net Total":
return self.doc.net_total
else:
actual_taxes_dict = {}
for tax in self.doc.get("taxes"):
if tax.charge_type == "Actual":
actual_taxes_dict.setdefault(tax.idx, tax.tax_amount)
elif tax.row_id in actual_taxes_dict:
actual_tax_amount = flt(actual_taxes_dict.get(tax.row_id, 0)) * flt(tax.rate) / 100
actual_taxes_dict.setdefault(tax.idx, actual_tax_amount)
return flt(self.doc.grand_total - sum(actual_taxes_dict.values()), self.doc.precision("grand_total"))
def calculate_total_advance(self):
if self.doc.docstatus < 2:
total_allocated_amount = sum([flt(adv.allocated_amount, adv.precision("allocated_amount"))
for adv in self.doc.get("advances")])
self.doc.total_advance = flt(total_allocated_amount, self.doc.precision("total_advance"))
if self.doc.party_account_currency == self.doc.currency:
invoice_total = flt(self.doc.grand_total - flt(self.doc.write_off_amount),
self.doc.precision("grand_total"))
else:
base_write_off_amount = flt(flt(self.doc.write_off_amount) * self.doc.conversion_rate,
self.doc.precision("base_write_off_amount"))
invoice_total = flt(self.doc.grand_total * self.doc.conversion_rate,
self.doc.precision("grand_total")) - base_write_off_amount
if invoice_total > 0 and self.doc.total_advance > invoice_total:
frappe.throw(_("Advance amount cannot be greater than {0} {1}")
.format(self.doc.party_account_currency, invoice_total))
if self.doc.docstatus == 0:
self.calculate_outstanding_amount()
def calculate_outstanding_amount(self):
# NOTE:
# write_off_amount is only for POS Invoice
# total_advance is only for non POS Invoice
if self.doc.doctype == "Sales Invoice":
self.calculate_paid_amount()
if self.doc.is_return: return
self.doc.round_floats_in(self.doc, ["grand_total", "total_advance", "write_off_amount"])
self._set_in_company_currency(self.doc, ['write_off_amount'])
if self.doc.party_account_currency == self.doc.currency:
total_amount_to_pay = flt(self.doc.grand_total - self.doc.total_advance
- flt(self.doc.write_off_amount), self.doc.precision("grand_total"))
else:
total_amount_to_pay = flt(flt(self.doc.grand_total *
self.doc.conversion_rate, self.doc.precision("grand_total")) - self.doc.total_advance
- flt(self.doc.base_write_off_amount), self.doc.precision("grand_total"))
if self.doc.doctype == "Sales Invoice":
self.doc.round_floats_in(self.doc, ["paid_amount"])
self.calculate_write_off_amount()
self.calculate_change_amount()
paid_amount = self.doc.paid_amount \
if self.doc.party_account_currency == self.doc.currency else self.doc.base_paid_amount
change_amount = self.doc.change_amount \
if self.doc.party_account_currency == self.doc.currency else self.doc.base_change_amount
self.doc.outstanding_amount = flt(total_amount_to_pay - flt(paid_amount) +
flt(change_amount), self.doc.precision("outstanding_amount"))
elif self.doc.doctype == "Purchase Invoice":
self.doc.outstanding_amount = flt(total_amount_to_pay, self.doc.precision("outstanding_amount"))
def calculate_paid_amount(self):
paid_amount = base_paid_amount = 0.0
if self.doc.is_pos:
for payment in self.doc.get('payments'):
payment.amount = flt(payment.amount)
payment.base_amount = payment.amount * flt(self.doc.conversion_rate)
paid_amount += payment.amount
base_paid_amount += payment.base_amount
elif not self.doc.is_return:
self.doc.set('payments', [])
self.doc.paid_amount = flt(paid_amount, self.doc.precision("paid_amount"))
self.doc.base_paid_amount = flt(base_paid_amount, self.doc.precision("base_paid_amount"))
def calculate_change_amount(self):
self.doc.change_amount = 0.0
self.doc.base_change_amount = 0.0
if self.doc.paid_amount > self.doc.grand_total and not self.doc.is_return \
and any([d.type == "Cash" for d in self.doc.payments]):
self.doc.change_amount = flt(self.doc.paid_amount - self.doc.grand_total +
self.doc.write_off_amount, self.doc.precision("change_amount"))
self.doc.base_change_amount = flt(self.doc.base_paid_amount - self.doc.base_grand_total +
self.doc.base_write_off_amount, self.doc.precision("base_change_amount"))
def calculate_write_off_amount(self):
if flt(self.doc.change_amount) > 0:
self.doc.write_off_amount = flt(self.doc.grand_total - self.doc.paid_amount + self.doc.change_amount,
self.doc.precision("write_off_amount"))
self.doc.base_write_off_amount = flt(self.doc.write_off_amount * self.doc.conversion_rate,
self.doc.precision("base_write_off_amount"))
def calculate_margin(self, item):
rate_with_margin = 0.0
if item.price_list_rate:
if item.pricing_rule and not self.doc.ignore_pricing_rule:
pricing_rule = frappe.get_doc('Pricing Rule', item.pricing_rule)
item.margin_type = pricing_rule.margin_type
item.margin_rate_or_amount = pricing_rule.margin_rate_or_amount
if item.margin_type and item.margin_rate_or_amount:
margin_value = item.margin_rate_or_amount if item.margin_type == 'Amount' else flt(item.price_list_rate) * flt(item.margin_rate_or_amount) / 100
rate_with_margin = flt(item.price_list_rate) + flt(margin_value)
return rate_with_margin
def set_item_wise_tax_breakup(self):
item_tax = {}
tax_accounts = []
company_currency = erpnext.get_company_currency(self.doc.company)
item_tax, tax_accounts = self.get_item_tax(item_tax, tax_accounts, company_currency)
headings = get_table_column_headings(tax_accounts)
distinct_items, taxable_amount = self.get_distinct_items()
rows = get_table_rows(distinct_items, item_tax, tax_accounts, company_currency, taxable_amount)
if not rows:
self.doc.other_charges_calculation = ""
else:
self.doc.other_charges_calculation = '''
<div class="tax-break-up" style="overflow-x: auto;">
<table class="table table-bordered table-hover">
<thead><tr>{headings}</tr></thead>
<tbody>{rows}</tbody>
</table>
</div>'''.format(**{
"headings": "".join(headings),
"rows": "".join(rows)
})
def get_item_tax(self, item_tax, tax_accounts, company_currency):
for tax in self.doc.taxes:
tax_amount_precision = tax.precision("tax_amount")
tax_rate_precision = tax.precision("rate");
item_tax_map = self._load_item_tax_rate(tax.item_wise_tax_detail)
for item_code, tax_data in item_tax_map.items():
if not item_tax.get(item_code):
item_tax[item_code] = {}
if isinstance(tax_data, list):
tax_rate = ""
if tax_data[0]:
if tax.charge_type == "Actual":
tax_rate = fmt_money(flt(tax_data[0], tax_amount_precision),
tax_amount_precision, company_currency)
else:
tax_rate = cstr(flt(tax_data[0], tax_rate_precision)) + "%"
tax_amount = fmt_money(flt(tax_data[1], tax_amount_precision),
tax_amount_precision, company_currency)
item_tax[item_code][tax.name] = [tax_rate, tax_amount]
else:
item_tax[item_code][tax.name] = [cstr(flt(tax_data, tax_rate_precision)) + "%", ""]
tax_accounts.append([tax.name, tax.account_head])
return item_tax, tax_accounts
def get_distinct_items(self):
distinct_item_names = []
distinct_items = []
taxable_amount = {}
for item in self.doc.items:
item_code = item.item_code or item.item_name
if item_code not in distinct_item_names:
distinct_item_names.append(item_code)
distinct_items.append(item)
taxable_amount[item_code] = item.net_amount
else:
taxable_amount[item_code] = taxable_amount.get(item_code, 0) + item.net_amount
return distinct_items, taxable_amount
def get_table_column_headings(tax_accounts):
headings_name = [_("Item Name"), _("Taxable Amount")] + [d[1] for d in tax_accounts]
headings = []
for head in headings_name:
if head == _("Item Name"):
headings.append('<th style="min-width: 120px;" class="text-left">' + (head or "") + "</th>")
else:
headings.append('<th style="min-width: 80px;" class="text-right">' + (head or "") + "</th>")
return headings
def get_table_rows(distinct_items, item_tax, tax_accounts, company_currency, taxable_amount):
rows = []
for item in distinct_items:
item_tax_record = item_tax.get(item.item_code or item.item_name)
if not item_tax_record:
continue
taxes = []
for head in tax_accounts:
if item_tax_record[head[0]]:
taxes.append("<td class='text-right'>(" + item_tax_record[head[0]][0] + ") "
+ item_tax_record[head[0]][1] + "</td>")
else:
taxes.append("<td></td>")
item_code = item.item_code or item.item_name
rows.append("<tr><td>{item_name}</td><td class='text-right'>{taxable_amount}</td>{taxes}</tr>".format(**{
"item_name": item.item_name,
"taxable_amount": fmt_money(taxable_amount.get(item_code, 0), item.precision("net_amount"), company_currency),
"taxes": "".join(taxes)
}))
return rows | gpl-3.0 | 2,259,971,950,055,743,200 | 39.043831 | 148 | 0.689167 | false |
nismod/energy_demand | energy_demand/plotting/fig_p2_weather_val.py | 1 | 14554 | """Fig 2 figure
"""
import numpy as np
import matplotlib.pyplot as plt
#from scipy.stats import mstats
import pandas as pd
import geopandas as gpd
from scipy import stats
from shapely.geometry import Point
import matplotlib.pyplot as plt
from collections import defaultdict
from matplotlib.colors import Normalize
from energy_demand.plotting import result_mapping
from energy_demand.technologies import tech_related
from energy_demand.plotting import basic_plot_functions
def run(
data_input,
regions,
simulation_yr_to_plot,
population,
fueltype_str,
path_shapefile,
fig_name
):
"""
"""
fueltype_int = tech_related.get_fueltype_int(fueltype_str)
# -----------------------------------------------------------
# Iterate overall weather_yrs and store data in dataframe
# (columns = timestep, rows: value of year)
# -----------------------------------------------------------
# List of selected data for every weather year and region (which is then converted to array)
weather_yrs_data = defaultdict(dict)
print("Weather yrs: " + str(list(data_input.keys())), flush=True)
for weather_yr, data_weather_yr in data_input.items():
# Weather year specific data for every region
regions_fuel = data_weather_yr[simulation_yr_to_plot][fueltype_int] # Select fueltype
for region_nr, region_name in enumerate(regions):
try:
weather_yrs_data[region_name].append(regions_fuel[region_nr])
except (KeyError, AttributeError):
weather_yrs_data[region_name] = [regions_fuel[region_nr]]
regional_statistics_columns = [
'name',
'mean_peak_h',
'diff_av_max',
'mean_peak_h_pp',
'diff_av_max_pp',
'std_dev_average_every_h',
'std_dev_peak_h_norm_pop']
df_stats = pd.DataFrame(columns=regional_statistics_columns)
for region_name, region_data in weather_yrs_data.items():
# Convert regional data to dataframe
region_data_array = np.array(region_data)
df = pd.DataFrame(
region_data_array,
columns=range(8760))
# Calculate regional statistics
mean = df.mean(axis=0)
std_dev = df.std(axis=0) #standard deviation across every hour
# Get maximum per colum
#max_every_h = df.max()
#colum_max_h = max_every_h.argmax() #get colum (respesctively hour) of maximum value
# Average standard deviation across every hour
std_dev_average_every_h = np.std(list(std_dev))
max_entry = df.max(axis=0) #maximum entry for every hour
min_entry = df.min(axis=0) #maximum entry for every hour
# Get hour number with maximum demand
hour_nr_max = max_entry.argmax()
hour_nr_min = min_entry.argmin()
# standard deviation of peak hour
std_dev_peak_h = std_dev[hour_nr_max]
# Difference between average and max
diff_av_max = max_entry[hour_nr_max] - mean[hour_nr_max]
mean_peak_h = mean[hour_nr_max]
# Convert GW to KW
diff_av_max = diff_av_max * 1000000 #GW to KW
mean_peak_h = mean_peak_h * 1000000 #GW to KW
# Weight with population
for region_nr, n in enumerate(regions):
if region_name == n:
nr_of_reg = region_nr
break
pop = population[nr_of_reg]
# Divide standard deviation of peak hour by population
# which gives measure of weather variability in peak hour
std_dev_peak_h_norm_pop = std_dev_peak_h / pop
diff_av_max_pp = diff_av_max / pop
mean_peak_h_pp = mean_peak_h / pop
line_entry = [[
str(region_name),
mean_peak_h,
diff_av_max,
mean_peak_h_pp,
diff_av_max_pp,
std_dev_average_every_h,
std_dev_peak_h_norm_pop]]
line_df = pd.DataFrame(
line_entry, columns=regional_statistics_columns)
df_stats = df_stats.append(line_df)
print(df_stats['diff_av_max'].max())
print(df_stats['mean_peak_h'].max())
print(df_stats['std_dev_peak_h_norm_pop'].max())
print("-")
print(df_stats['diff_av_max_pp'].max())
print(df_stats['diff_av_max_pp'].min())
print("-")
print(df_stats['mean_peak_h_pp'].max())
print(df_stats['mean_peak_h_pp'].min())
# ---------------
# Create spatial maps
# http://darribas.org/gds15/content/labs/lab_03.html
# http://nbviewer.jupyter.org/gist/jorisvandenbossche/57d392c085901eb4981054402b37b6b1
# ---------------
# Load uk shapefile
uk_shapefile = gpd.read_file(path_shapefile)
# Merge stats to geopanda
shp_gdp_merged = uk_shapefile.merge(
df_stats,
on='name')
# Assign projection
crs = {'init': 'epsg:27700'} #27700: OSGB_1936_British_National_Grid
uk_gdf = gpd.GeoDataFrame(shp_gdp_merged, crs=crs)
ax = uk_gdf.plot()
# Assign bin colors according to defined cmap and whether
# plot with min_max values or only min/max values
#bin_values = [0, 0.0025, 0.005, 0.0075, 0.01]
#bin_values = [0, 0.02, 0.04, 0.06, 0.08, 0.1] #list(np.arange(0.0, 1.0, 0.1))
# Field to plot
field_to_plot = "diff_av_max_pp" # Difference between average and peak per person in KWh
#field_to_plot = "diff_av_max" # Difference between average and peak
field_to_plot = 'std_dev_peak_h_norm_pop'
nr_of_intervals = 6
bin_values = result_mapping.get_reasonable_bin_values_II(
data_to_plot=list(uk_gdf[field_to_plot]),
nr_of_intervals=nr_of_intervals)
print(float(uk_gdf[field_to_plot]))
print("BINS " + str(bin_values))
uk_gdf, cmap_rgb_colors, color_zero, min_value, max_value = user_defined_bin_classification(
uk_gdf,
field_to_plot,
bin_values=bin_values)
# plot with face color attribute
uk_gdf.plot(ax=ax, facecolor=uk_gdf['bin_color'], edgecolor='black', linewidth=0.5)
#shp_gdp_merged.plot(column='diff_av_max', scheme='QUANTILES', k=5, cmap='OrRd', linewidth=0.1)
#ax = uk_gdf.plot(column='diff_av_max', scheme='QUANTILES', k=5, cmap='OrRd', linewidth=0.1)
#uk_gdf[uk_gdf['name'] == 'E06000024'].plot(ax=ax, facecolor='green', edgecolor='black')
#uk_gdf[uk_gdf['diff_av_max'] < 0.01].plot(ax=ax, facecolor='blue', edgecolor='black')
# Get legend patches TODO IMPROVE
# TODO IMRPVE: MAKE CORRECT ONE FOR NEW PROCESSING
legend_handles = result_mapping.get_legend_handles(
bin_values[1:-1],
cmap_rgb_colors,
color_zero,
min_value,
max_value)
plt.legend(
handles=legend_handles,
title="tittel_elgend",
prop={'size': 8},
loc='upper center',
bbox_to_anchor=(0.5, -0.05),
frameon=False)
# PLot bins on plot
plt.text(
0,
-20,
bin_values[:-1], #leave away maximum value
fontsize=8)
plt.tight_layout()
plt.show()
raise Exception
plt.savefig(fig_name)
plt.close()
def norm_cmap(values, cmap, vmin=None, vmax=None):
"""
Normalize and set colormap
Parameters
----------
values : Series or array to be normalized
cmap : matplotlib Colormap
normalize : matplotlib.colors.Normalize
cm : matplotlib.cm
vmin : Minimum value of colormap. If None, uses min(values).
vmax : Maximum value of colormap. If None, uses max(values).
Returns
-------
n_cmap : mapping of normalized values to colormap (cmap)
Source
------
https://ocefpaf.github.io/python4oceanographers/blog/2015/08/24/choropleth/
"""
mn = vmin or min(values)
mx = vmax or max(values)
norm = Normalize(vmin=mn, vmax=mx)
n_cmap = plt.cm.ScalarMappable(norm=norm, cmap=cmap)
rgb_colors = [n_cmap.to_rgba(value) for value in values]
return n_cmap, rgb_colors
def plot_colors(rgb_colors):
"""function to plot colors
"""
nr_dots = len(rgb_colors)
dots = []
x = []
y = []
for i in range(nr_dots):
x.append(i + 20)
y.append(i + 20)
#plt.scatter(x, y, c=cmap, s=50)
plt.scatter(x, y, c=rgb_colors, s=50)
plt.show()
def user_defined_bin_classification(
input_df,
field_name,
bin_values,
cmap_diverging=None,
cmap_sequential=None
):
"""Classify values according to bins
Arguments
---------
input_df : dataframe
Dataframe to plot
higher_as_bin : int
Bin value of > than last bin
cmap_sequential : str
'Greys', 'Purples', 'Blues', 'Greens', 'Oranges', 'Reds','YlOrBr',
'YlOrRd', 'OrRd', 'PuRd', 'RdPu', 'BuPu', 'GnBu', 'PuBu', 'YlGnBu',
'PuBuGn', 'BuGn', 'YlGn'
cmap_diverging : str
'PiYG', 'PRGn', 'BrBG', 'PuOr', 'RdGy', 'RdBu', 'RdYlBu', 'RdYlGn',
'Spectral', 'coolwarm', 'bwr', 'seismic'
Info
-----
Include 0 in min_max_plot == False
Python colors:
https://matplotlib.org/1.4.1/examples/color/colormaps_reference.html
https://ocefpaf.github.io/python4oceanographers/blog/2015/08/24/choropleth/
https://matplotlib.org/examples/color/colormaps_reference.html
"""
# Check if largest value is large than last bin
max_real_value = float(input_df[field_name].max())
min_real_value = float(input_df[field_name].min())
if max_real_value > 0 and min_real_value < 0:
min_max_plot = True
else:
min_max_plot = False
if not min_max_plot:
# If only minus values
if max_real_value < 0: #only min values
if min_real_value > bin_values[0]:
# add "higher as bin"
bin_values.insert(0, min_real_value)
elif bin_values[0] < min_real_value:
crit_append_val = False
raise Exception("The minimum user defined bin smaller is larger than minimum existing value")
if not cmap_sequential:
cmap, cmap_rgb_colors = norm_cmap(bin_values[:1], cmap='Purples') #'YlOrBr'
else:
cmap, cmap_rgb_colors = norm_cmap(bin_values[:1], cmap=cmap_sequential) #'YlOrBr'
else: #only positive values
if max_real_value > bin_values[-1]:
# add "higher as bin"
bin_values.append(max_real_value)
elif bin_values[-1] > max_real_value:
raise Exception("The maximum user defined bin value is larger than maximum min: min: {} max: {}".format(bin_values[-1], max_real_value))
if not cmap_sequential:
cmap, cmap_rgb_colors = norm_cmap(bin_values[1:], cmap='Purples')
else:
cmap, cmap_rgb_colors = norm_cmap(bin_values[1:], cmap=cmap_sequential)
# e.g. [0, 3, 6] --> generates (0, 3], and (3, 6] bin
input_df['bin_color'] = pd.cut(
input_df[field_name],
bin_values,
include_lowest=True,
right=True,
labels=cmap_rgb_colors)
color_zero = 'grey' # default
else:
if max_real_value < bin_values[-1]:
raise Exception("The maximum user defined bin value is larger than maximum value {} {}".format(bin_values[-1], max_real_value))
elif min_real_value > bin_values[0]:
raise Exception("The minimum user defined bin smaller is larger than minimum existing value")
else:
pass
# Add minimum and maximum value
bin_values.append(max_real_value)
bin_values.insert(0, min_real_value)
if not cmap_diverging:
cmap, cmap_rgb_colors = norm_cmap(bin_values, cmap='coolwarm')
else:
cmap, cmap_rgb_colors = norm_cmap(bin_values, cmap=cmap_diverging)
# Reclassify zero value
positive_bin_colors = []
minus_bin_colors = []
minus_bins = []
positive_bins = [0]
for cnt, i in enumerate(bin_values):
if i < 0:
minus_bin_colors.append(cmap_rgb_colors[cnt])
minus_bins.append(i)
elif i == 0:
color_zero = cmap_rgb_colors[cnt]
else:
positive_bin_colors.append(cmap_rgb_colors[cnt])
positive_bins.append(i)
minus_bins.append(0)
# ----
# Classify
# ----
# Classify values in dataframe and assign color value as "bin" column
minus_dataframe = input_df[field_name][input_df[field_name] < 0].to_frame()
zero_dataframe = input_df[field_name][input_df[field_name] == 0].to_frame()
plus_dataframe = input_df[field_name][input_df[field_name] > 0].to_frame()
# e.g. [0, 3, 6] --> generates (0, 3], and (3, 6] bin
minus_dataframe['bin_color'] = pd.cut(
minus_dataframe[field_name],
minus_bins,
include_lowest=True,
right=True,
labels=minus_bin_colors)
zero_dataframe['bin_color'] = [color_zero for _ in range(len(zero_dataframe))] #create list with zero color
plus_dataframe['bin_color'] = pd.cut(
plus_dataframe[field_name],
positive_bins,
include_lowest=True,
right=True,
labels=positive_bin_colors)
# Add bins
input_df = minus_dataframe.append(zero_dataframe)
input_df = input_df.append(plus_dataframe)
return input_df, cmap_rgb_colors, color_zero, min_real_value, max_real_value
'''ax = input_df.plot()
# Calculate color values
#uk_gdf[uk_gdf['name'] == 'E06000024'].plot(ax=ax, facecolor='green', edgecolor='black')
#uk_gdf[uk_gdf['diff_av_max'] < 0.01].plot(ax=ax, facecolor='blue', edgecolor='black')
# Convert dict to dataframe
#df = pd.DataFrame.from_dict(input_df, orient='index')
#df['Coordinates'] = list(zip(df.longitude, df.latitude))
#df['Coordinates'] = df['Coordinates'].apply(Point)
# Load uk shapefile
uk_shapefile = gpd.read_file(path_shapefile)
# Assign correct projection
crs = {'init': 'epsg:27700'} #27700 == OSGB_1936_British_National_Grid
uk_gdf = gpd.GeoDataFrame(uk_shapefile, crs=crs)
# Transform
uk_gdf = uk_gdf.to_crs({'init' :'epsg:4326'})
# Plot
ax = uk_gdf.plot(color='white', edgecolor='black')
# print coordinates
#world.plot(column='gdp_per_cap', cmap='OrRd', scheme='quantiles');
plt.savefig(fig_path)'''
| mit | 2,933,761,730,508,633,600 | 32.457471 | 152 | 0.589735 | false |
sivertkh/gtrackcore | gtrackcore/track_operations/raw_operations/Shift.py | 1 | 4473 |
import numpy as np
def shift(starts, ends, regionSize, strands=None, shiftLength=None,
useFraction=False, useStrands=True, treatMissingAsNegative=False):
"""
Shift elements in a track a give nr of BP.
:param starts: numpy array. Starts
:param ends: numpy array. Ends
:param regionSize: Int. The regions max size.
:param strands: numpy array. Strand info
:param shift: Int. Nr of BP to shift if we want to shift all segments
:param fraction: Boolean. Shift is a fraction of the size of the segment.
:param useMissingStrand: Boolean. If we are to use segment with missing
strand information.
:param treatMissingAsPositive: Boolean. If the missing segments with
missing strand information as negative or positive. Default is true. Set
to false if you want to treat them as negative.
:param allowOverlap: Boolean. If we allow overlapping segments in the
result.
:return: New shifted track as start, ends, strand and index
"""
assert shiftLength is not None
if useStrands and strands is None:
# We need strand info to follow it.
useStrands = False
if useStrands:
# Shift in the strand direction.
if treatMissingAsNegative:
positiveIndex = np.where(strands == '+')
negativeIndex = np.where((strands == '-') | (strands == '.'))
else:
positiveIndex = np.where((strands == '+') | (strands == '.'))
negativeIndex = np.where(strands == '-')
if useFraction:
positiveLengths = ends[positiveIndex] - starts[positiveIndex]
negativeLengths = ends[negativeIndex] - starts[negativeIndex]
positiveShift = positiveLengths * shiftLength
#positiveShift = positiveShift.astype(int)
positiveShift = np.around(positiveShift).astype(int)
negativeShift = negativeLengths * shiftLength
#negativeShift = negativeShift.astype(int)
negativeShift = np.around(negativeShift).astype(int)
else:
positiveShift = shiftLength
negativeShift = shiftLength
# Update the positive segments
starts[positiveIndex] = starts[positiveIndex] + positiveShift
ends[positiveIndex] = ends[positiveIndex] + positiveShift
# Update the negative segments
starts[negativeIndex] = starts[negativeIndex] - negativeShift
ends[negativeIndex] = ends[negativeIndex] - negativeShift
else:
if useFraction:
# Using a fraction of the length as a basis for the shift.
# Round to int
lengths = ends - starts
shiftLength = lengths * shiftLength
shiftLength = np.around(shiftLength).astype(int)
#shiftLength = shiftLength.astype(int)
# Strand is not given or we are to ignore it.
starts = starts + shiftLength
ends = ends + shiftLength
# We now check and fix any underflow/overflow
# This is where one of the segments is shifted under 0 or over the size
# of the region.
# Create a index to use in the trackView creation
index = np.arange(0, len(starts), 1, dtype='int32')
# Find end underflow and start overflow first. These segments can be
# removed directly.
endUnderflowIndex = np.where(ends < 0)
starts = np.delete(starts, endUnderflowIndex)
ends = np.delete(ends, endUnderflowIndex)
index = np.delete(index, endUnderflowIndex)
startOverflowIndex = np.where(starts > regionSize)
starts = np.delete(starts, startOverflowIndex)
ends = np.delete(ends, startOverflowIndex)
index = np.delete(index, startOverflowIndex)
# Find start underflow and set it to 0.
startUnderflowIndex = np.where(starts < 0)
starts[startUnderflowIndex] = 0
# Find end overflow and set i to regionSize.
endOverflowIndex = np.where(ends > regionSize)
ends[endOverflowIndex] = regionSize
# When two segments overlap totally, we get dangling points...
# For now we fix it by removing all points. This is probably not the
# way to go..
# if (newStarts == newEnds).any():
danglingPoints = np.where(starts == ends)
starts = np.delete(starts, danglingPoints)
ends = np.delete(ends, danglingPoints)
if strands is not None:
strands = np.delete(strands, danglingPoints)
index = np.delete(index, danglingPoints)
return starts, ends, index, strands
| gpl-3.0 | 2,891,743,441,771,215,000 | 36.588235 | 77 | 0.663537 | false |
Cabalist/Comix | src/preferences.py | 1 | 25264 | # coding=utf-8
"""preferences.py - Preference handler."""
from __future__ import absolute_import
import cPickle
import os
import gtk
from src import constants
from src import labels
ZOOM_MODE_BEST = 0
ZOOM_MODE_WIDTH = 1
ZOOM_MODE_HEIGHT = 2
ZOOM_MODE_MANUAL = 3
# All the preferences are stored here.
prefs = {
'comment extensions': ['txt', 'nfo'],
'auto load last file': False,
'page of last file': 1,
'path to last file': '',
'auto open next archive': True,
'bg colour': (5000, 5000, 5000),
'checkered bg for transparent images': True,
'cache': True,
'animate gifs': False,
'animate': False,
'stretch': False,
'default double page': False,
'default fullscreen': False,
'default zoom mode': ZOOM_MODE_BEST,
'default manga mode': False,
'lens magnification': 2,
'lens size': 200,
'no double page for wide images': False,
'double step in double page mode': True,
'show page numbers on thumbnails': True,
'thumbnail size': 80,
'create thumbnails': True,
'slideshow delay': 3000,
'smart space scroll': True,
'flip with wheel': False,
'smart bg': False,
'store recent file info': True,
'hide all': False,
'hide all in fullscreen': True,
'stored hide all values': (True, True, True, True, True),
'path of last browsed in filechooser': constants.HOME_DIR,
'last path in save filechooser': './',
'last filter in main filechooser': 0,
'last filter in library filechooser': 1,
'show menubar': True,
'show scrollbar': True,
'show statusbar': True,
'show toolbar': True,
'show thumbnails': True,
'rotation': 0,
'auto rotate from exif': True,
'vertical flip': False,
'horizontal flip': False,
'keep transformation': False,
'window height': gtk.gdk.screen_get_default().get_height() * 3 // 4,
'window width': min(gtk.gdk.screen_get_default().get_width() * 3 // 4,
gtk.gdk.screen_get_default().get_height() * 5 // 8),
'library cover size': 128,
'auto add books into collections': True,
'last library collection': None,
'lib window height': gtk.gdk.screen_get_default().get_height() * 3 // 4,
'lib window width': gtk.gdk.screen_get_default().get_width() * 3 // 4
}
_config_path = os.path.join(constants.CONFIG_DIR, 'preferences.pickle')
_dialog = None
class _PreferencesDialog(gtk.Dialog):
"""The preferences dialog where most (but not all) settings that are
saved between sessions are presented to the user.
"""
def __init__(self, window):
self._window = window
gtk.Dialog.__init__(self, _('Preferences'), window, 0,
(gtk.STOCK_CLOSE, gtk.RESPONSE_CLOSE))
self.connect('response', self._response)
self.set_has_separator(False)
self.set_resizable(True)
self.set_default_response(gtk.RESPONSE_CLOSE)
notebook = gtk.Notebook()
self.vbox.pack_start(notebook)
self.set_border_width(4)
notebook.set_border_width(6)
# ----------------------------------------------------------------
# The "Appearance" tab.
# ----------------------------------------------------------------
page = _PreferencePage(80)
page.new_section(_('Background'))
fixed_bg_button = gtk.RadioButton(None, '%s:' %
_('Use this colour as background'))
fixed_bg_button.set_tooltip_text(
_('Always use this selected colour as the background colour.'))
color_button = gtk.ColorButton(gtk.gdk.Color(*prefs['bg colour']))
color_button.connect('color_set', self._color_button_cb)
page.add_row(fixed_bg_button, color_button)
dynamic_bg_button = gtk.RadioButton(fixed_bg_button,
_('Use dynamic background colour.'))
dynamic_bg_button.set_active(prefs['smart bg'])
dynamic_bg_button.connect('toggled', self._check_button_cb, 'smart bg')
dynamic_bg_button.set_tooltip_text(
_('Automatically pick a background colour that fits the viewed image.'))
page.add_row(dynamic_bg_button)
page.new_section(_('Thumbnails'))
label = gtk.Label('%s:' % _('Thumbnail size (in pixels)'))
adjustment = gtk.Adjustment(prefs['thumbnail size'], 20, 128, 1, 10)
thumb_size_spinner = gtk.SpinButton(adjustment)
thumb_size_spinner.connect('value_changed', self._spinner_cb,
'thumbnail size')
page.add_row(label, thumb_size_spinner)
thumb_number_button = gtk.CheckButton(
_('Show page numbers on thumbnails.'))
thumb_number_button.set_active(
prefs['show page numbers on thumbnails'])
thumb_number_button.connect('toggled', self._check_button_cb,
'show page numbers on thumbnails')
page.add_row(thumb_number_button)
page.new_section(_('Magnifying Glass'))
label = gtk.Label('%s:' % _('Magnifying glass size (in pixels)'))
adjustment = gtk.Adjustment(prefs['lens size'], 50, 400, 1, 10)
glass_size_spinner = gtk.SpinButton(adjustment)
glass_size_spinner.connect('value_changed', self._spinner_cb,
'lens size')
glass_size_spinner.set_tooltip_text(
_('Set the size of the magnifying glass. It is a square with a side of this many pixels.'))
page.add_row(label, glass_size_spinner)
label = gtk.Label('%s:' % _('Magnification factor'))
adjustment = gtk.Adjustment(prefs['lens magnification'], 1.1, 10.0,
0.1, 1.0)
glass_magnification_spinner = gtk.SpinButton(adjustment, digits=1)
glass_magnification_spinner.connect('value_changed', self._spinner_cb,
'lens magnification')
glass_magnification_spinner.set_tooltip_text(
_('Set the magnification factor of the magnifying glass.'))
page.add_row(label, glass_magnification_spinner)
page.new_section(_('Image scaling'))
stretch_button = gtk.CheckButton(_('Stretch small images.'))
stretch_button.set_active(prefs['stretch'])
stretch_button.connect('toggled', self._check_button_cb, 'stretch')
stretch_button.set_tooltip_text(_('Stretch images to a size that is larger than their original size if '
'the current zoom mode requests it. If this preference is unset, images '
'are never scaled to be larger than their original size.'))
page.add_row(stretch_button)
page.new_section(_('Transparency'))
checkered_bg_button = gtk.CheckButton(
_('Use checkered background for transparent images.'))
checkered_bg_button.set_active(
prefs['checkered bg for transparent images'])
checkered_bg_button.connect('toggled', self._check_button_cb,
'checkered bg for transparent images')
checkered_bg_button.set_tooltip_text(_('Use a grey checkered background for transparent images. If this '
'preference is unset, the background is plain white instead.'))
page.add_row(checkered_bg_button)
notebook.append_page(page, gtk.Label(_('Appearance')))
# ----------------------------------------------------------------
# The "Behaviour" tab.
# ----------------------------------------------------------------
page = _PreferencePage(150)
page.new_section(_('Scroll'))
smart_space_button = gtk.CheckButton(
_('Use smart space key scrolling.'))
smart_space_button.set_active(prefs['smart space scroll'])
smart_space_button.connect('toggled', self._check_button_cb,
'smart space scroll')
smart_space_button.set_tooltip_text(_('Use smart scrolling with the space key. Normally '
'the space key scrolls only right down (or up when '
'shift is pressed), but with this preference set it '
'also scrolls sideways and so tries to follow the '
'natural reading order of the comic book.'))
page.add_row(smart_space_button)
flip_with_wheel_button = gtk.CheckButton(
_('Flip pages when scrolling off the edges of the page.'))
flip_with_wheel_button.set_active(prefs['flip with wheel'])
flip_with_wheel_button.connect('toggled', self._check_button_cb,
'flip with wheel')
flip_with_wheel_button.set_tooltip_text(_('Flip pages when scrolling "off the page" '
'with the scroll wheel or with the arrow keys.'
' It takes three consecutive "steps" with the '
'scroll wheel or the arrow keys for the pages to '
'be flipped.'))
page.add_row(flip_with_wheel_button)
page.new_section(_('Double page mode'))
step_length_button = gtk.CheckButton(
_('Flip two pages in double page mode.'))
step_length_button.set_active(prefs['double step in double page mode'])
step_length_button.connect('toggled', self._check_button_cb,
'double step in double page mode')
step_length_button.set_tooltip_text(
_('Flip two pages, instead of one, each time we flip pages in double page mode.'))
page.add_row(step_length_button)
virtual_double_button = gtk.CheckButton(
_('Show only one wide image in double page mode.'))
virtual_double_button.set_active(
prefs['no double page for wide images'])
virtual_double_button.connect('toggled', self._check_button_cb,
'no double page for wide images')
virtual_double_button.set_tooltip_text(_("Display only one image in double page mode, "
"if the image's width exceeds its height. The "
"result of this is that scans that span two "
"pages are displayed properly (i.e. alone) also "
"in double page mode."))
page.add_row(virtual_double_button)
page.new_section(_('Files'))
auto_open_next_button = gtk.CheckButton(
_('Automatically open the next archive.'))
auto_open_next_button.set_active(prefs['auto open next archive'])
auto_open_next_button.connect('toggled', self._check_button_cb,
'auto open next archive')
auto_open_next_button.set_tooltip_text(_('Automatically open the next archive '
'in the directory when flipping past '
'the last page, or the previous archive '
'when flipping past the first page.'))
page.add_row(auto_open_next_button)
auto_open_last_button = gtk.CheckButton(
_('Automatically open the last viewed file on startup.'))
auto_open_last_button.set_active(prefs['auto load last file'])
auto_open_last_button.connect('toggled', self._check_button_cb,
'auto load last file')
auto_open_last_button.set_tooltip_text(
_('Automatically open, on startup, the file that was open when Comix was last closed.'))
page.add_row(auto_open_last_button)
store_recent_button = gtk.CheckButton(
_('Store information about recently opened files.'))
store_recent_button.set_active(prefs['store recent file info'])
store_recent_button.connect('toggled', self._check_button_cb,
'store recent file info')
store_recent_button.set_tooltip_text(
_('Add information about all files opened from within Comix to the shared recent files list.'))
page.add_row(store_recent_button)
create_thumbs_button = gtk.CheckButton(
_('Store thumbnails for opened files.'))
create_thumbs_button.set_active(prefs['create thumbnails'])
create_thumbs_button.connect('toggled', self._check_button_cb,
'create thumbnails')
create_thumbs_button.set_tooltip_text(_('Store thumbnails for opened files according '
'to the freedesktop.org specification. These '
'thumbnails are shared by many other applications, '
'such as most file managers.'))
page.add_row(create_thumbs_button)
page.new_section(_('Cache'))
cache_button = gtk.CheckButton(_('Use a cache to speed up browsing.'))
cache_button.set_active(prefs['cache'])
cache_button.connect('toggled', self._check_button_cb, 'cache')
cache_button.set_tooltip_text(_('Cache the images that are next to the currently '
'viewed image in order to speed up browsing. Since '
'the speed improvements are quite big, it is recommended '
'that you have this preference set, unless you are '
'running short on free RAM.'))
page.add_row(cache_button)
page.new_section(_('Image Animation'))
gif_button = gtk.CheckButton(_('Play GIF image animations.'))
gif_button.set_active(prefs['animate gifs'])
gif_button.connect('toggled', self._check_button_cb, 'animate gifs')
# TODO: Change if PixbufAnimation gets resizing
gif_button.set_tooltip_text(_('Play animations for GIF files, if there is one. '
'If this is set, animated GIF images will not be resized.'))
page.add_row(gif_button)
notebook.append_page(page, gtk.Label(_('Behaviour')))
# ----------------------------------------------------------------
# The "Display" tab.
# ----------------------------------------------------------------
page = _PreferencePage(180)
page.new_section(_('Default modes'))
double_page_button = gtk.CheckButton(
_('Use double page mode by default.'))
double_page_button.set_active(prefs['default double page'])
double_page_button.connect('toggled', self._check_button_cb,
'default double page')
page.add_row(double_page_button)
fullscreen_button = gtk.CheckButton(_('Use fullscreen by default.'))
fullscreen_button.set_active(prefs['default fullscreen'])
fullscreen_button.connect('toggled', self._check_button_cb,
'default fullscreen')
page.add_row(fullscreen_button)
manga_button = gtk.CheckButton(_('Use manga mode by default.'))
manga_button.set_active(prefs['default manga mode'])
manga_button.connect('toggled', self._check_button_cb,
'default manga mode')
page.add_row(manga_button)
label = gtk.Label('%s:' % _('Default zoom mode'))
zoom_combo = gtk.combo_box_new_text()
zoom_combo.append_text(_('Best fit mode'))
zoom_combo.append_text(_('Fit width mode'))
zoom_combo.append_text(_('Fit height mode'))
zoom_combo.append_text(_('Manual zoom mode'))
# Change this if the combobox entries are reordered.
zoom_combo.set_active(prefs['default zoom mode'])
zoom_combo.connect('changed', self._combo_box_cb)
page.add_row(label, zoom_combo)
page.new_section(_('Fullscreen'))
hide_in_fullscreen_button = gtk.CheckButton(
_('Automatically hide all toolbars in fullscreen.'))
hide_in_fullscreen_button.set_active(prefs['hide all in fullscreen'])
hide_in_fullscreen_button.connect('toggled', self._check_button_cb,
'hide all in fullscreen')
page.add_row(hide_in_fullscreen_button)
page.new_section(_('Slideshow'))
label = gtk.Label('%s:' % _('Slideshow delay (in seconds)'))
adjustment = gtk.Adjustment(prefs['slideshow delay'] / 1000.0,
0.5, 3600.0, 0.1, 1)
delay_spinner = gtk.SpinButton(adjustment, digits=1)
delay_spinner.connect('value_changed', self._spinner_cb,
'slideshow delay')
page.add_row(label, delay_spinner)
page.new_section(_('Comments'))
label = gtk.Label('%s:' % _('Comment extensions'))
extensions_entry = gtk.Entry()
extensions_entry.set_text(', '.join(prefs['comment extensions']))
extensions_entry.connect('activate', self._entry_cb)
extensions_entry.connect('focus_out_event', self._entry_cb)
extensions_entry.set_tooltip_text(
_('Treat all files found within archives, that have one of these file endings, as comments.'))
page.add_row(label, extensions_entry)
page.new_section(_('Rotation'))
auto_rotate_button = gtk.CheckButton(
_('Automatically rotate images according to their metadata.'))
auto_rotate_button.set_active(prefs['auto rotate from exif'])
auto_rotate_button.connect('toggled', self._check_button_cb,
'auto rotate from exif')
auto_rotate_button.set_tooltip_text(_('Automatically rotate images when an '
'orientation is specified in the image '
'metadata, such as in an Exif tag.'))
page.add_row(auto_rotate_button)
notebook.append_page(page, gtk.Label(_('Display')))
self.show_all()
def _check_button_cb(self, button, preference):
"""Callback for all checkbutton-type preferences."""
prefs[preference] = button.get_active()
if preference == 'smart bg':
if not prefs[preference]:
self._window.set_bg_colour(prefs['bg colour'])
else:
self._window.draw_image(scroll=False)
elif preference in ('stretch', 'checkered bg for transparent images',
'no double page for wide images', 'auto rotate from exif'):
self._window.draw_image(scroll=False)
elif preference == 'hide all in fullscreen' and self._window.is_fullscreen:
self._window.draw_image(scroll=False)
elif preference == 'show page numbers on thumbnails':
self._window.thumbnailsidebar.clear()
self._window.thumbnailsidebar.load_thumbnails()
def _color_button_cb(self, colorbutton):
"""Callback for the background colour selection button."""
colour = colorbutton.get_color()
prefs['bg colour'] = colour.red, colour.green, colour.blue
if not prefs['smart bg'] or not self._window.file_handler.file_loaded:
self._window.set_bg_colour(prefs['bg colour'])
def _spinner_cb(self, spinbutton, preference):
"""Callback for spinner-type preferences."""
value = spinbutton.get_value()
if preference == 'lens size':
prefs[preference] = int(value)
elif preference == 'lens magnification':
prefs[preference] = value
elif preference == 'slideshow delay':
prefs[preference] = int(value * 1000)
self._window.slideshow.update_delay()
elif preference == 'thumbnail size':
prefs[preference] = int(value)
self._window.thumbnailsidebar.resize()
self._window.draw_image(scroll=False)
def _combo_box_cb(self, combobox):
"""Callback for combobox-type preferences."""
zoom_mode = combobox.get_active()
prefs['default zoom mode'] = zoom_mode
def _entry_cb(self, entry, event=None):
"""Callback for entry-type preferences."""
text = entry.get_text()
extensions = [e.strip() for e in text.split(',')]
prefs['comment extensions'] = [e for e in extensions if e]
self._window.file_handler.update_comment_extensions()
def _response(self, dialog, response):
_close_dialog()
class _PreferencePage(gtk.VBox):
"""The _PreferencePage is a conveniece class for making one "page"
in a preferences-style dialog that contains one or more
_PreferenceSections.
"""
def __init__(self, right_column_width):
"""Create a new page where any possible right columns have the
width request <right_column_width>.
"""
gtk.VBox.__init__(self, False, 12)
self.set_border_width(12)
self._right_column_width = right_column_width
self._section = None
def new_section(self, header):
"""Start a new section in the page, with the header text from
<header>.
"""
self._section = _PreferenceSection(header, self._right_column_width)
self.pack_start(self._section, False, False)
def add_row(self, left_item, right_item=None):
"""Add a row to the page (in the latest section), containing one
or two items. If the left item is a label it is automatically
aligned properly.
"""
if isinstance(left_item, gtk.Label):
left_item.set_alignment(0, 0.5)
if right_item is None:
self._section.contentbox.pack_start(left_item)
else:
left_box, right_box = self._section.new_split_vboxes()
left_box.pack_start(left_item)
right_box.pack_start(right_item)
class _PreferenceSection(gtk.VBox):
"""The _PreferenceSection is a convenience class for making one
"section" of a preference-style dialog, e.g. it has a bold header
and a number of rows which are indented with respect to that header.
"""
def __init__(self, header, right_column_width=150):
"""Contruct a new section with the header set to the text in
<header>, and the width request of the (possible) right columns
set to that of <right_column_width>.
"""
gtk.VBox.__init__(self, False, 0)
self._right_column_width = right_column_width
self.contentbox = gtk.VBox(False, 6)
label = labels.BoldLabel(header)
label.set_alignment(0, 0.5)
hbox = gtk.HBox(False, 0)
hbox.pack_start(gtk.HBox(), False, False, 6)
hbox.pack_start(self.contentbox)
self.pack_start(label, False, False)
self.pack_start(hbox, False, False, 6)
def new_split_vboxes(self):
"""Return two new VBoxes that are automatically put in the section
after the previously added items. The right one has a width request
equal to the right_column_width value passed to the class contructor,
in order to make it easy for all "right column items" in a page to
line up nicely.
"""
left_box = gtk.VBox(False, 6)
right_box = gtk.VBox(False, 6)
right_box.set_size_request(self._right_column_width, -1)
hbox = gtk.HBox(False, 12)
hbox.pack_start(left_box)
hbox.pack_start(right_box, False, False)
self.contentbox.pack_start(hbox)
return left_box, right_box
def open_dialog(action, window):
global _dialog
if _dialog is None:
_dialog = _PreferencesDialog(window)
else:
_dialog.present()
def _close_dialog(*args):
global _dialog
if _dialog is not None:
_dialog.destroy()
_dialog = None
def read_preferences_file():
"""Read preferences data from disk."""
if os.path.isfile(_config_path):
config = None
try:
config = open(_config_path, 'rb')
version = cPickle.load(config)
old_prefs = cPickle.load(config)
config.close()
except Exception:
print('! Corrupt preferences file "{}", deleting...'.format(_config_path))
if config is not None:
config.close()
os.remove(_config_path)
else:
for key in old_prefs:
if key in prefs:
prefs[key] = old_prefs[key]
def write_preferences_file():
"""Write preference data to disk."""
config = open(_config_path, 'wb')
cPickle.dump(constants.VERSION, config, cPickle.HIGHEST_PROTOCOL)
cPickle.dump(prefs, config, cPickle.HIGHEST_PROTOCOL)
config.close()
| gpl-2.0 | -4,057,415,922,797,753,300 | 47.121905 | 115 | 0.571604 | false |
owtf/owtf | owtf/shell/interactive.py | 1 | 6596 | """
owtf.shell.interactive
~~~~~~~~~~~~~~~~~~~~~~
The shell module allows running arbitrary shell commands and is critical
to the framework in order to run third party tools. The interactive shell module allows non-blocking
interaction with subprocesses running tools or remote connections (i.e. shells)
"""
import logging
import subprocess
from owtf.db.session import get_scoped_session
from owtf.shell.base import BaseShell
from owtf.shell.utils import DisconnectException, recv_some, send_all, AsyncPopen
from owtf.utils.error import user_abort
__all__ = ["InteractiveShell"]
class InteractiveShell(BaseShell):
def __init__(self):
BaseShell.__init__(self) # Calling parent class to do its init part
self.connection = None
self.options = None
self.session = get_scoped_session()
self.command_time_offset = "InteractiveCommand"
def check_conn(self, abort_message):
"""Check the connection is alive or not
:param abort_message: Abort message to print
:type abort_message: `str`
:return: True if channel is open, else False
:rtype: `bool`
"""
if not self.connection:
logging.warn("ERROR - Communication channel closed - %s", abort_message)
return False
return True
def read(self, time=1):
"""Read data from the channel
:param time: Time interval in seconds
:type time: `int`
:return: Output from the channel
:rtype: `str`
"""
output = ""
if not self.check_conn("Cannot read"):
return output
try:
output = recv_some(self.connection, time)
except DisconnectException:
logging.warn("ERROR: read - The Communication channel is down!")
return output # End of communication channel
logging.info(output) # Show progress on screen
return output
def format_cmd(self, command):
"""Format the command to be printed on console
:param command: Command to run
:type command: `str`
:return: Formatted command string
:rtype: `str`
"""
if (
"RHOST" in self.options and "RPORT" in self.options
): # Interactive shell on remote connection
return "{!s}:{!s}-{!s}".format(
self.options["RHOST"], self.options["RPORT"], command
)
else:
return "Interactive - {!s}".format(command)
def run(self, command, plugin_info):
"""Format the command to be printed on console
:param command: Command to run
:type command: `str`
:return: Formatted command string
:rtype: `str`
"""
output = ""
cancelled = False
if not self.check_conn("NOT RUNNING Interactive command: {!s}".format(command)):
return output
# TODO: tail to be configurable: \n for *nix, \r\n for win32
log_cmd = self.format_cmd(command)
cmd_info = self.start_cmd(log_cmd, log_cmd)
try:
logging.info("Running Interactive command: %s", command)
send_all(self.connection, command + "\n")
output += self.read()
self.finish_cmd(self.session, cmd_info, cancelled, plugin_info)
except DisconnectException:
cancelled = True
logging.warn("ERROR: Run - The Communication Channel is down!")
self.finish_cmd(self.session, cmd_info, cancelled, plugin_info)
except KeyboardInterrupt:
cancelled = True
self.finish_cmd(self.session, cmd_info, cancelled, plugin_info)
output += user_abort("Command", output) # Identify as Command Level abort
if not cancelled:
self.finish_cmd(self.session, cmd_info, cancelled, plugin_info)
return output
def run_cmd_list(self, cmd_list, plugin_info):
"""Run a list of commands
:param cmd_list: List of commands to run
:type cmd_list: `list`
:param plugin_info: Plugin context information
:type plugin_info: `dict`
:return: Command output
:rtype: `str`
"""
output = ""
for command in cmd_list:
if command != "None":
output += self.run(command, plugin_info)
return output
def open(self, options, plugin_info):
"""Open the connection channel
:param options: User supplied args
:type options: `dict`
:param plugin_info: Context info for plugins
:type plugin_info: `dict`
:return: Plugin output
:rtype: `str`
"""
output = ""
if not self.connection:
name, command = options["ConnectVia"][0]
self.connection = AsyncPopen(
command,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
stdin=subprocess.PIPE,
bufsize=1,
)
self.options = options # Store Options for Closing processing and if initial Commands are given
if options["InitialCommands"]:
output += self.run_cmd_list([options["InitialCommands"]], plugin_info)
output += self.read()
output += self.read()
return output
def close(self, plugin_info):
"""Close the communication channel
:param plugin_info: Context information for plugin
:type plugin_info: `dict`
:return: None
:rtype: None
"""
logging.info("Close: %s", str(self.options))
if self.options["CommandsBeforeExit"]:
logging.info("Running commands before closing Communication Channel..")
self.run_cmd_list(
self.options["CommandsBeforeExit"].split(
self.options["CommandsBeforeExitDelim"]
),
plugin_info,
)
logging.info("Trying to close Communication Channel..")
self.run("exit", plugin_info)
if self.options["ExitMethod"] == "kill":
logging.info("Killing Communication Channel..")
self.connection.kill()
else: # By default wait
logging.info("Waiting for Communication Channel to close..")
self.connection.wait()
self.connection = None
def is_closed(self):
"""Check if connection is closed
:return: True if closed, else True
:rtype: `bool`
"""
return self.connection is None
| bsd-3-clause | 6,571,136,281,010,158,000 | 34.085106 | 108 | 0.583535 | false |
danaukes/popupcad | popupcad/filetypes/genericshapebase.py | 2 | 15733 |
# -*- coding: utf-8 -*-
"""
Written by Daniel M. Aukes and CONTRIBUTORS
Email: danaukes<at>asu.edu.
Please see LICENSE for full license.
"""
from popupcad.geometry.vertex import ShapeVertex
import numpy
import qt.QtCore as qc
import qt.QtGui as qg
from dev_tools.enum import enum
import popupcad
class ShapeInvalid(Exception):
pass
class NotSimple(Exception):
pass
class GenericShapeBase(object):
display = ['construction', 'exterior', 'interiors']
editable = ['construction']
shapetypes = enum(
line='line',
polyline='polyline',
polygon='polygon',
circle='circle',
rect2point='rect2point')
deletable = []
def __init__(self,exterior,interiors,construction=False,test_shapely=False):
self.id = id(self)
self.exterior = exterior
self.interiors = interiors
self.construction = construction
# self.exterior = self.condition_loop(self.exterior)
# self.interiors = [self.condition_loop(interior) for interior in self.interiors]
self.exterior = self.remove_redundant_points(self.exterior)
self.interiors = [self.remove_redundant_points(interior) for interior in self.interiors]
def is_valid_bool(self):
try:
self.is_valid()
return True
except:
return False
def is_valid(self):
shapely = self.to_shapely(scaling = popupcad.csg_processing_scaling)
if not shapely.is_simple:
raise(NotSimple)
if not shapely.is_valid:
raise(ShapeInvalid)
@classmethod
def lastdir(cls):
return popupcad.lastshapedir
@classmethod
def setlastdir(cls, directory):
popupcad.lastshapedir = directory
def isValid(self):
notempty = self.len_exterior() > 0
return notempty
def copy_data(self, new_type, identical=True):
exterior = [vertex.copy(identical) for vertex in self.get_exterior()]
interiors = [[vertex.copy(identical) for vertex in interior]
for interior in self.get_interiors()]
new = new_type(exterior, interiors, self.is_construction())
if identical:
new.id = self.id
return new
def copy(self, identical=True):
return self.copy_data(type(self), identical)
def upgrade(self, identical=True):
exterior = [vertex.upgrade(identical) for vertex in self.get_exterior()]
interiors = [[vertex.upgrade(identical) for vertex in interior] for interior in self.get_interiors()]
new = type(self)(exterior, interiors, self.is_construction())
if identical:
new.id = self.id
return new
def get_exterior(self):
return self.exterior
def get_interiors(self):
return self.interiors
def is_construction(self):
try:
return self.construction
except AttributeError:
self.construction = False
return self.construction
def set_construction(self, test):
self.construction = test
def exteriorpoints(self, scaling=1):
return [vertex.getpos(scaling) for vertex in self.get_exterior()]
def interiorpoints(self, scaling=1):
return [[vertex.getpos(scaling) for vertex in interior]
for interior in self.get_interiors()]
def exteriorpoints_3d(self, z=0):
points = numpy.array([vertex.getpos() for vertex in self.get_exterior()])
size = list(points.shape)
size[1]+=1
points2 = numpy.zeros(size)
points2[:,:2] = points
points2[:,2] = z
return points2.tolist()
def interiorpoints_3d(self, z=0):
interiors2 = []
for interior in self.get_interiors():
points = numpy.array([vertex.getpos() for vertex in interior])
size = list(points.shape)
size[1]+=1
points2 = numpy.zeros(size)
points2[:,:2] = points
points2[:,2] = z
interiors2.append(points2.tolist())
return interiors2
def vertices(self):
vertices = self.get_exterior()[:]
[vertices.extend(interior) for interior in self.get_interiors()]
return vertices
def points(self, scaling=1):
return [vertex.getpos(scaling) for vertex in self.vertices()]
def segments_closed(self):
points = self.get_exterior()
segments = list(zip(points, points[1:] + points[:1]))
for points in self.get_interiors():
segments.extend(list(zip(points, points[1:] + points[:1])))
return segments
def segments_open(self):
points = self.get_exterior()
segments = list(zip(points[:-1], points[1:]))
for points in self.get_interiors():
segments.extend(list(zip(points[:-1], points[1:])))
return segments
def segmentpoints(self, scaling=1):
segments = self.segments()
segmentpoints = [
(point1.getpos(scaling),
point2.getpos(scaling)) for point1,
point2 in segments]
return segmentpoints
def painterpath(self):
exterior = self.exteriorpoints(scaling=popupcad.view_scaling)
interiors = self.interiorpoints(scaling=popupcad.view_scaling)
return self.gen_painterpath(exterior, interiors)
def gen_painterpath(self, exterior, interiors):
path = qg.QPainterPath()
return path
def properties(self):
from idealab_tools.propertyeditor import PropertyEditor
return PropertyEditor(self)
def addvertex_exterior(self, vertex, special=False):
self.exterior.append(vertex)
self.update_handles()
def addvertex_exterior_special(self, vertex, special=False):
if len(self.get_exterior()) > 2:
if special:
a = [v.getpos() for v in self.get_exterior()]
b = list(zip(a, a[1:] + a[:1]))
c = numpy.array(b)
d = numpy.array(vertex.getpos())
e = c - d
f = e.reshape(-1, 4)
g = (f**2).sum(1)
h = g.argmin()
self.insert_exterior_vertex(h + 1, vertex)
self.update_handles()
return
self.append_exterior_vertex(vertex)
self.update_handles()
def removevertex(self, vertex):
if vertex in self.exterior:
ii = self.exterior.index(vertex)
self.exterior.pop(ii)
for interior in self.interiors:
if vertex in self.interior:
ii = interior.index(vertex)
interior.pop(ii)
self.update_handles()
def checkedge(self, edge):
import popupcad.algorithms.points as points
for pt1, pt2 in zip(edge[:-1], edge[1:]):
if points.twopointsthesame(pt1, pt2, popupcad.distinguishable_number_difference):
raise Exception
@staticmethod
def _condition_loop(loop,round_vertices = False, test_rounded_vertices = True, remove_forward_redundancy=True, remove_loop_reduncancy=True,terminate_with_start = False,decimal_places = None):
if len(loop)>0:
if remove_forward_redundancy:
new_loop = [loop.pop(0)]
while not not loop:
v1 = new_loop[-1]
v2 = loop.pop(0)
if test_rounded_vertices:
equal = v1.rounded_is_equal(v2,decimal_places)
else:
equal = v1.identical(v2)
if not equal:
new_loop.append(v2)
else:
new_loop = loop[:]
v1 = new_loop[0]
v2 = new_loop[-1]
if test_rounded_vertices:
equal = v1.rounded_is_equal(v2,decimal_places)
else:
equal = v1.identical(v2)
if terminate_with_start:
if not equal:
new_loop.append(v1.copy(identical=False))
if remove_loop_reduncancy:
if equal:
new_loop.pop(-1)
if round_vertices:
new_loop = [item.round(decimal_places) for item in new_loop]
return new_loop
else:
return loop
def _condition(self,round_vertices = False, test_rounded_vertices = True, remove_forward_redundancy=True, remove_loop_reduncancy=True,terminate_with_start = False,decimal_places = None):
self.exterior = self._condition_loop(self.exterior,round_vertices = False, test_rounded_vertices = True, remove_forward_redundancy=True, remove_loop_reduncancy=True,terminate_with_start = False,decimal_places = None)
self.interiors = [self._condition_loop(interior,round_vertices = False, test_rounded_vertices = True, remove_forward_redundancy=True, remove_loop_reduncancy=True,terminate_with_start = False,decimal_places = None) for interior in self.interiors]
@classmethod
def condition_loop(cls,loop):
return cls._condition_loop(loop)
# def condition(self):
# self.exterior = self.condition_loop(self.exterior)
# self.interiors = [self.condition_loop(interior) for interior in self.interiors]
@classmethod
def gen_from_point_lists(cls, exterior_p, interiors_p, **kwargs):
exterior = [ShapeVertex(point) for point in exterior_p]
interiors= [[ShapeVertex(point) for point in interior] for interior in interiors_p]
return cls(exterior, interiors, **kwargs)
def genInteractiveVertices(self):
try:
return self._exteriorhandles, self._interiorhandles
except AttributeError:
self.update_handles()
return self._exteriorhandles, self._interiorhandles
def update_handles(self):
try:
for handle in self._handles:
handle.harddelete()
except AttributeError:
pass
exterior = [vertex.gen_interactive() for vertex in self.get_exterior()]
interiors = [[vertex.gen_interactive() for vertex in interior] for interior in self.get_interiors()]
handles = exterior[:]
[handles.extend(interior) for interior in interiors]
self._exteriorhandles = exterior
self._interiorhandles = interiors
self._handles = handles
def len_exterior(self):
return len(self.get_exterior())
def get_handles(self):
try:
return self._handles
except AttributeError:
self.update_handles()
return self._handles
def get_exterior_handles(self):
try:
return self._exteriorhandles
except AttributeError:
self.update_handles()
return self._exteriorhandles
def triangles3(self):
return []
@staticmethod
def generateQPolygon(points):
poly = qg.QPolygonF([qc.QPointF(*(point))
for point in numpy.array(points)])
return poly
def is_equal(self, other):
if isinstance(self, type(other)):
if len(
self.get_exterior()) == len(
other.get_exterior()) and len(
self.get_interiors()) == len(
other.get_interiors()):
for point1, point2 in zip(
self.get_exterior(), other.get_exterior()):
if not point1.is_equal(point2, popupcad.distinguishable_number_difference):
return False
for interior1, interior2 in zip(
self.get_interiors(), other.get_interiors()):
if len(interior1) != len(interior2):
return False
for point1, point2 in zip(interior1, interior2):
if not point1.is_equal(point2, popupcad.distinguishable_number_difference):
return False
return True
return False
def scale(self, m):
[item.scale(m) for item in self.get_exterior()]
[item.scale(m) for interior in self.get_interiors() for item in interior]
def shift(self, dxdy):
[item.shift(dxdy) for item in self.get_exterior()]
[item.shift(dxdy) for interior in self.get_interiors()
for item in interior]
def transform(self, T):
exteriorpoints = (T.dot(numpy.array(self.exteriorpoints_3d(z=1)).T)).T[:,:2].tolist()
interiorpoints = [(T.dot(numpy.array(interior).T)).T[:,:2].tolist() for interior in self.interiorpoints_3d(z=1)]
return self.gen_from_point_lists(exteriorpoints,interiorpoints)
def constrained_shift(self, dxdy, constraintsystem):
a = [(item, dxdy) for item in self.get_exterior()]
a.extend([(item, dxdy) for interior in self.get_interiors() for item in interior])
constraintsystem.constrained_shift(a)
def flip(self):
self.exterior = self.get_exterior()[::-1]
self.interiors = [interior[::-1] for interior in self.get_interiors()]
def hollow(self):
return [self]
def fill(self):
return [self]
def insert_exterior_vertex(self, ii, vertex):
self.exterior.insert(ii, vertex)
def append_exterior_vertex(self, vertex):
self.exterior.append(vertex)
def output_dxf(self,model_space,layer = None):
csg = self.to_shapely(scaling = popupcad.csg_processing_scaling)
new = popupcad.algorithms.csg_shapely.to_generic(csg)
return new.output_dxf(model_space,layer)
def __lt__(self,other):
return self.exteriorpoints()[0]<other.exteriorpoints()[0]
def find_minimal_enclosing_circle(self):
from popupcad.algorithms.minimal_enclosing_circle import numerical_stable_circle
return numerical_stable_circle(self.exteriorpoints)
#Gets the center
def get_center(self):
'''Retrieves the center point of the shape'''
points = self.exteriorpoints()
x_values = [point[0]/popupcad.SI_length_scaling for point in points]
y_values = [point[1]/popupcad.SI_length_scaling for point in points]
x = float(sum(x_values)) / len(x_values)
y = float(sum(y_values)) / len(y_values)
return (x, y)
def exterior_points_from_center(self):
'''Retrieves the exterior points relative to the center'''
center = self.get_center()
points = self.exteriorpoints()
x_values = [point[0]/popupcad.SI_length_scaling - center[0] for point in points]
y_values = [point[1]/popupcad.SI_length_scaling - center[1] for point in points]
return list(zip(x_values, y_values))
@classmethod
def remove_redundant_points(cls, points, scaling=1,loop_test = True):
newpoints = []
if len(points)>0:
points = points[:]
newpoints.append(points.pop(0))
while not not points:
newpoint = points.pop(0)
if not popupcad.algorithms.points.twopointsthesame(newpoints[-1].getpos(scaling),newpoint.getpos(scaling),popupcad.distinguishable_number_difference):
if len(points)==0 and loop_test:
if not popupcad.algorithms.points.twopointsthesame(newpoints[0].getpos(scaling),newpoint.getpos(scaling),popupcad.distinguishable_number_difference):
newpoints.append(newpoint)
else:
newpoints.append(newpoint)
return newpoints | mit | 9,166,630,827,800,457,000 | 35.590698 | 253 | 0.59372 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.