repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
meain/bridge | docs/report/urls.py | 1 | 1263 | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index, name="index"),
url(r'^signin/$', views.signin, name='signin'),
url(r'^create_user/(?P<user_id>[0-9]+)/(?P<user_class>\
([a-z])+)$', views.create_new_user, \
name='new_user'),
url(r'^timetable/(?P<user_id>[0-9]+)$', \
views.get_timetable, name='timetable'),
url(r'^notes/(?P<user_id>[0-9]+)$', \
views.get_notes, name='notes'),
url(r'^subject_data/(?P<user_id>[0-9]+)$', \
views.get_sub_data, name='subject_data'),
url(r'^events/(?P<user_id>[0-9]+)$', \
views.get_events_dummy, name='events'),
url(r'^track_data/$', \
views.get_track_data, name='events'),
url(r'^calendar/(?P<user_id>[0-9]+)$', \
views.get_cal_data_dummy, name='events'),
url(r'^subject_attendence/(?P<user_id>[0-9]+)$', \
views.get_attendence, name='get_attendence'),
url(r'^create_user/$', \
views.create_new_user, name='new_user'),
url(r'^update_attendence/$', \
views.update_attendence,\
name='update_attendence'),
url(r'^set_track_data/$', \
views.set_track_data, name='set_track_data'),
]
| mit | 7,931,066,556,517,879,000 | 39.741935 | 59 | 0.53365 | false |
GoogleCloudPlatform/PerfKitBenchmarker | tests/linux_packages/pip_test.py | 1 | 2020 | """Tests for pip."""
from typing import Dict, List
import unittest
from absl.testing import parameterized
import mock
from perfkitbenchmarker.linux_packages import pip
from perfkitbenchmarker.linux_packages import python
from tests import pkb_common_test_case
# executed remote commands
NEED_PIP_27 = [
'curl https://bootstrap.pypa.io/pip/2.7/get-pip.py | sudo python -',
'pip --version',
'mkdir -p /opt/pkb && pip freeze | tee /opt/pkb/requirements.txt'
]
NEED_PIP_38 = [
'curl https://bootstrap.pypa.io/pip/get-pip.py | sudo python3 -',
'pip3 --version',
'mkdir -p /opt/pkb && pip3 freeze | tee /opt/pkb/requirements.txt'
]
EXISTING_PIP_27 = [
'echo \'exec python -m pip "$@"\'| sudo tee /usr/bin/pip && '
'sudo chmod 755 /usr/bin/pip',
'pip --version',
'mkdir -p /opt/pkb && pip freeze | tee /opt/pkb/requirements.txt',
]
EXISTING_PIP_38 = [
'echo \'exec python3 -m pip "$@"\'| sudo tee /usr/bin/pip3 && '
'sudo chmod 755 /usr/bin/pip3',
'pip3 --version',
'mkdir -p /opt/pkb && pip3 freeze | tee /opt/pkb/requirements.txt',
]
PYTHON_38_KWARGS = {'pip_cmd': 'pip3', 'python_cmd': 'python3'}
class PipTest(pkb_common_test_case.PkbCommonTestCase):
@parameterized.named_parameters(
('need_pip_27', False, '2.7', NEED_PIP_27, {}),
('need_pip_38', False, '3.8', NEED_PIP_38, PYTHON_38_KWARGS),
('existing_pip_27', True, '2.7', EXISTING_PIP_27, {}),
('existing_pip_38', True, '3.8', EXISTING_PIP_38, PYTHON_38_KWARGS),
)
def testInstall(self, need_pip: bool, python_version: str,
expected_commands: List[str], install_kwargs: Dict[str, str]):
self.enter_context(
mock.patch.object(
python, 'GetPythonVersion', return_value=python_version))
vm = mock.Mock()
vm.TryRemoteCommand.return_value = need_pip
pip.Install(vm, **install_kwargs)
vm.RemoteCommand.assert_has_calls(
[mock.call(cmd) for cmd in expected_commands])
if __name__ == '__main__':
unittest.main()
| apache-2.0 | -1,458,470,186,040,201,200 | 32.114754 | 80 | 0.640099 | false |
berkmancenter/mediacloud | apps/common/src/python/mediawords/key_value_store/multiple_stores.py | 1 | 7164 | from typing import List, Union
from mediawords.db import DatabaseHandler
from mediawords.key_value_store import KeyValueStore, McKeyValueStoreException
from mediawords.util.perl import decode_object_from_bytes_if_needed
class McMultipleStoresStoreException(McKeyValueStoreException):
"""Multiple stores exception."""
pass
class MultipleStoresStore(KeyValueStore):
"""Key-value store that reads from / writes to multiple stores."""
__slots__ = [
'__stores_for_reading',
'__stores_for_writing',
]
def __init__(self,
stores_for_reading: List[KeyValueStore] = None,
stores_for_writing: List[KeyValueStore] = None):
"""Constructor."""
if stores_for_reading is None:
stores_for_reading = []
if stores_for_writing is None:
stores_for_writing = []
if len(stores_for_reading) + len(stores_for_writing) == 0:
raise McMultipleStoresStoreException("At least one store for reading / writing should be present.")
self.__stores_for_reading = stores_for_reading
self.__stores_for_writing = stores_for_writing
def stores_for_reading(self) -> list:
"""Return list of stores for reading."""
return self.__stores_for_reading
def stores_for_writing(self) -> list:
"""Return list of stores for writing."""
return self.__stores_for_writing
def fetch_content(self, db: DatabaseHandler, object_id: int, object_path: str = None) -> bytes:
"""Fetch content from any of the stores that might have it; raise if none of them do."""
object_id = self._prepare_object_id(object_id)
object_path = decode_object_from_bytes_if_needed(object_path)
if len(self.__stores_for_reading) == 0:
raise McMultipleStoresStoreException("List of stores for reading object ID %d is empty." % object_id)
errors = []
content = None
for store in self.__stores_for_reading:
try:
# MC_REWRITE_TO_PYTHON: use named parameters after Python rewrite
content = store.fetch_content(db, object_id, object_path)
if content is None:
raise McMultipleStoresStoreException("Fetching object ID %d from store %s succeeded, "
"but the returned content is undefined." % (
object_id, str(store),
))
except Exception as ex:
# Silently skip through errors and die() only if content wasn't found anywhere
errors.append("Error fetching object ID %(object_id)d from store %(store)s: %(exception)s" % {
'object_id': object_id,
'store': store,
'exception': str(ex),
})
else:
break
if content is None:
raise McMultipleStoresStoreException(
"All stores failed while fetching object ID %(object_id)d; errors: %(errors)s" % {
'object_id': object_id,
'errors': "\n".join(errors),
}
)
return content
def store_content(self,
db: DatabaseHandler,
object_id: int,
content: Union[str, bytes],
content_type: str='binary/octet-stream') -> str:
"""Store content to all stores; raise if one of them fails."""
object_id = self._prepare_object_id(object_id)
content = self._prepare_content(content)
if len(self.__stores_for_writing) == 0:
raise McMultipleStoresStoreException("List of stores for writing object ID %d is empty." % object_id)
last_store_path = None
for store in self.__stores_for_writing:
try:
# MC_REWRITE_TO_PYTHON: use named parameters after Python rewrite
last_store_path = store.store_content(db, object_id, content)
if last_store_path is None:
raise McMultipleStoresStoreException(
"Storing object ID %d to %s succeeded, but the returned path is empty." % (object_id, store,)
)
except Exception as ex:
raise McMultipleStoresStoreException(
"Error while saving object ID %(object_id)d to store %(store)s: %(exception)s" % {
'object_id': object_id,
'store': str(store),
'exception': str(ex)
}
)
if last_store_path is None:
raise McMultipleStoresStoreException(
"Storing object ID %d to all stores succeeded, but the returned path is empty." % object_id
)
return last_store_path
def remove_content(self, db: DatabaseHandler, object_id: int, object_path: str = None) -> None:
"""Remove content from all stores; raise if one of them fails."""
object_id = self._prepare_object_id(object_id)
object_path = decode_object_from_bytes_if_needed(object_path)
if len(self.__stores_for_writing) == 0:
raise McMultipleStoresStoreException("List of stores for writing object ID %d is empty." % object_id)
for store in self.__stores_for_writing:
try:
# MC_REWRITE_TO_PYTHON: use named parameters after Python rewrite
store.remove_content(db, object_id, object_path)
except Exception as ex:
raise McMultipleStoresStoreException(
"Error while removing object ID %(object_id)d from store %(store)s: %(exception)s" % {
'object_id': object_id,
'store': str(store),
'exception': str(ex)
}
)
def content_exists(self, db: DatabaseHandler, object_id: int, object_path: str = None) -> bool:
"""Test if content in at least one of the stores."""
object_id = self._prepare_object_id(object_id)
object_path = decode_object_from_bytes_if_needed(object_path)
if len(self.__stores_for_reading) == 0:
raise McMultipleStoresStoreException("List of stores for reading object ID %d is empty." % object_id)
for store in self.__stores_for_reading:
try:
# MC_REWRITE_TO_PYTHON: use named parameters after Python rewrite
exists = store.content_exists(db, object_id, object_path)
except Exception as ex:
raise McMultipleStoresStoreException(
"Error while testing whether object ID %(object_id)d exists in store %(store)s: %(exception)s" % {
'object_id': object_id,
'store': store,
'exception': str(ex),
})
else:
if exists:
return True
return False
| agpl-3.0 | -3,426,115,544,674,871,000 | 37.934783 | 118 | 0.556253 | false |
artdavis/pyfred | pyfred/webcolors-1.5/webcolors.py | 1 | 27031 | """
Utility functions for working with the color names and color value
formats defined by the HTML and CSS specifications for use in
documents on the Web.
See documentation (in docs/ directory of source distribution) for
details of the supported formats and conversions.
"""
import math
import re
import string
import struct
# Python 2's unichr() is Python 3's chr().
try:
unichr
except NameError:
unichr = chr
# Python 2's unicode is Python 3's str.
try:
unicode
except NameError:
unicode = str
def _reversedict(d):
"""
Internal helper for generating reverse mappings; given a
dictionary, returns a new dictionary with keys and values swapped.
"""
return dict(zip(d.values(), d.keys()))
HEX_COLOR_RE = re.compile(r'^#([a-fA-F0-9]{3}|[a-fA-F0-9]{6})$')
SUPPORTED_SPECIFICATIONS = (u'html4', u'css2', u'css21', u'css3')
SPECIFICATION_ERROR_TEMPLATE = u"'%%s' is not a supported specification for color name lookups; \
supported specifications are: %s." % (u', '.join(SUPPORTED_SPECIFICATIONS))
# Mappings of color names to normalized hexadecimal color values.
#################################################################
# The HTML 4 named colors.
#
# The canonical source for these color definitions is the HTML 4
# specification:
#
# http://www.w3.org/TR/html401/types.html#h-6.5
#
# The file tests/definitions.py in the source distribution of this
# module downloads a copy of the HTML 4 standard and parses out the
# color names to ensure the values below are correct.
HTML4_NAMES_TO_HEX = {
u'aqua': u'#00ffff',
u'black': u'#000000',
u'blue': u'#0000ff',
u'fuchsia': u'#ff00ff',
u'green': u'#008000',
u'gray': u'#808080',
u'lime': u'#00ff00',
u'maroon': u'#800000',
u'navy': u'#000080',
u'olive': u'#808000',
u'purple': u'#800080',
u'red': u'#ff0000',
u'silver': u'#c0c0c0',
u'teal': u'#008080',
u'white': u'#ffffff',
u'yellow': u'#ffff00',
}
# CSS 2 used the same list as HTML 4.
CSS2_NAMES_TO_HEX = HTML4_NAMES_TO_HEX
# CSS 2.1 added orange.
CSS21_NAMES_TO_HEX = dict(HTML4_NAMES_TO_HEX, orange=u'#ffa500')
# The CSS 3/SVG named colors.
#
# The canonical source for these color definitions is the SVG
# specification's color list (which was adopted as CSS 3's color
# definition):
#
# http://www.w3.org/TR/SVG11/types.html#ColorKeywords
#
# CSS 3 also provides definitions of these colors:
#
# http://www.w3.org/TR/css3-color/#svg-color
#
# SVG provides the definitions as RGB triplets. CSS 3 provides them
# both as RGB triplets and as hexadecimal. Since hex values are more
# common in real-world HTML and CSS, the mapping below is to hex
# values instead. The file tests/definitions.py in the source
# distribution of this module downloads a copy of the CSS 3 color
# module and parses out the color names to ensure the values below are
# correct.
CSS3_NAMES_TO_HEX = {
u'aliceblue': u'#f0f8ff',
u'antiquewhite': u'#faebd7',
u'aqua': u'#00ffff',
u'aquamarine': u'#7fffd4',
u'azure': u'#f0ffff',
u'beige': u'#f5f5dc',
u'bisque': u'#ffe4c4',
u'black': u'#000000',
u'blanchedalmond': u'#ffebcd',
u'blue': u'#0000ff',
u'blueviolet': u'#8a2be2',
u'brown': u'#a52a2a',
u'burlywood': u'#deb887',
u'cadetblue': u'#5f9ea0',
u'chartreuse': u'#7fff00',
u'chocolate': u'#d2691e',
u'coral': u'#ff7f50',
u'cornflowerblue': u'#6495ed',
u'cornsilk': u'#fff8dc',
u'crimson': u'#dc143c',
u'cyan': u'#00ffff',
u'darkblue': u'#00008b',
u'darkcyan': u'#008b8b',
u'darkgoldenrod': u'#b8860b',
u'darkgray': u'#a9a9a9',
u'darkgrey': u'#a9a9a9',
u'darkgreen': u'#006400',
u'darkkhaki': u'#bdb76b',
u'darkmagenta': u'#8b008b',
u'darkolivegreen': u'#556b2f',
u'darkorange': u'#ff8c00',
u'darkorchid': u'#9932cc',
u'darkred': u'#8b0000',
u'darksalmon': u'#e9967a',
u'darkseagreen': u'#8fbc8f',
u'darkslateblue': u'#483d8b',
u'darkslategray': u'#2f4f4f',
u'darkslategrey': u'#2f4f4f',
u'darkturquoise': u'#00ced1',
u'darkviolet': u'#9400d3',
u'deeppink': u'#ff1493',
u'deepskyblue': u'#00bfff',
u'dimgray': u'#696969',
u'dimgrey': u'#696969',
u'dodgerblue': u'#1e90ff',
u'firebrick': u'#b22222',
u'floralwhite': u'#fffaf0',
u'forestgreen': u'#228b22',
u'fuchsia': u'#ff00ff',
u'gainsboro': u'#dcdcdc',
u'ghostwhite': u'#f8f8ff',
u'gold': u'#ffd700',
u'goldenrod': u'#daa520',
u'gray': u'#808080',
u'grey': u'#808080',
u'green': u'#008000',
u'greenyellow': u'#adff2f',
u'honeydew': u'#f0fff0',
u'hotpink': u'#ff69b4',
u'indianred': u'#cd5c5c',
u'indigo': u'#4b0082',
u'ivory': u'#fffff0',
u'khaki': u'#f0e68c',
u'lavender': u'#e6e6fa',
u'lavenderblush': u'#fff0f5',
u'lawngreen': u'#7cfc00',
u'lemonchiffon': u'#fffacd',
u'lightblue': u'#add8e6',
u'lightcoral': u'#f08080',
u'lightcyan': u'#e0ffff',
u'lightgoldenrodyellow': u'#fafad2',
u'lightgray': u'#d3d3d3',
u'lightgrey': u'#d3d3d3',
u'lightgreen': u'#90ee90',
u'lightpink': u'#ffb6c1',
u'lightsalmon': u'#ffa07a',
u'lightseagreen': u'#20b2aa',
u'lightskyblue': u'#87cefa',
u'lightslategray': u'#778899',
u'lightslategrey': u'#778899',
u'lightsteelblue': u'#b0c4de',
u'lightyellow': u'#ffffe0',
u'lime': u'#00ff00',
u'limegreen': u'#32cd32',
u'linen': u'#faf0e6',
u'magenta': u'#ff00ff',
u'maroon': u'#800000',
u'mediumaquamarine': u'#66cdaa',
u'mediumblue': u'#0000cd',
u'mediumorchid': u'#ba55d3',
u'mediumpurple': u'#9370db',
u'mediumseagreen': u'#3cb371',
u'mediumslateblue': u'#7b68ee',
u'mediumspringgreen': u'#00fa9a',
u'mediumturquoise': u'#48d1cc',
u'mediumvioletred': u'#c71585',
u'midnightblue': u'#191970',
u'mintcream': u'#f5fffa',
u'mistyrose': u'#ffe4e1',
u'moccasin': u'#ffe4b5',
u'navajowhite': u'#ffdead',
u'navy': u'#000080',
u'oldlace': u'#fdf5e6',
u'olive': u'#808000',
u'olivedrab': u'#6b8e23',
u'orange': u'#ffa500',
u'orangered': u'#ff4500',
u'orchid': u'#da70d6',
u'palegoldenrod': u'#eee8aa',
u'palegreen': u'#98fb98',
u'paleturquoise': u'#afeeee',
u'palevioletred': u'#db7093',
u'papayawhip': u'#ffefd5',
u'peachpuff': u'#ffdab9',
u'peru': u'#cd853f',
u'pink': u'#ffc0cb',
u'plum': u'#dda0dd',
u'powderblue': u'#b0e0e6',
u'purple': u'#800080',
u'red': u'#ff0000',
u'rosybrown': u'#bc8f8f',
u'royalblue': u'#4169e1',
u'saddlebrown': u'#8b4513',
u'salmon': u'#fa8072',
u'sandybrown': u'#f4a460',
u'seagreen': u'#2e8b57',
u'seashell': u'#fff5ee',
u'sienna': u'#a0522d',
u'silver': u'#c0c0c0',
u'skyblue': u'#87ceeb',
u'slateblue': u'#6a5acd',
u'slategray': u'#708090',
u'slategrey': u'#708090',
u'snow': u'#fffafa',
u'springgreen': u'#00ff7f',
u'steelblue': u'#4682b4',
u'tan': u'#d2b48c',
u'teal': u'#008080',
u'thistle': u'#d8bfd8',
u'tomato': u'#ff6347',
u'turquoise': u'#40e0d0',
u'violet': u'#ee82ee',
u'wheat': u'#f5deb3',
u'white': u'#ffffff',
u'whitesmoke': u'#f5f5f5',
u'yellow': u'#ffff00',
u'yellowgreen': u'#9acd32',
}
# Mappings of normalized hexadecimal color values to color names.
#################################################################
HTML4_HEX_TO_NAMES = _reversedict(HTML4_NAMES_TO_HEX)
CSS2_HEX_TO_NAMES = HTML4_HEX_TO_NAMES
CSS21_HEX_TO_NAMES = _reversedict(CSS21_NAMES_TO_HEX)
CSS3_HEX_TO_NAMES = _reversedict(CSS3_NAMES_TO_HEX)
# Aliases of the above mappings, for backwards compatibility.
#################################################################
(html4_names_to_hex,
css2_names_to_hex,
css21_names_to_hex,
css3_names_to_hex) = (HTML4_NAMES_TO_HEX,
CSS2_NAMES_TO_HEX,
CSS21_NAMES_TO_HEX,
CSS3_NAMES_TO_HEX)
(html4_hex_to_names,
css2_hex_to_names,
css21_hex_to_names,
css3_hex_to_names) = (HTML4_HEX_TO_NAMES,
CSS2_HEX_TO_NAMES,
CSS21_HEX_TO_NAMES,
CSS3_HEX_TO_NAMES)
# Normalization functions.
#################################################################
def normalize_hex(hex_value):
"""
Normalize a hexadecimal color value to 6 digits, lowercase.
"""
match = HEX_COLOR_RE.match(hex_value)
if match is None:
raise ValueError(
u"'%s' is not a valid hexadecimal color value." % hex_value
)
hex_digits = match.group(1)
if len(hex_digits) == 3:
hex_digits = u''.join(2 * s for s in hex_digits)
return u'#%s' % hex_digits.lower()
def _normalize_integer_rgb(value):
"""
Internal normalization function for clipping integer values into
the permitted range (0-255, inclusive).
"""
return 0 if value < 0 \
else 255 if value > 255 \
else value
def normalize_integer_triplet(rgb_triplet):
"""
Normalize an integer ``rgb()`` triplet so that all values are
within the range 0-255 inclusive.
"""
return tuple(_normalize_integer_rgb(value) for value in rgb_triplet)
def _normalize_percent_rgb(value):
"""
Internal normalization function for clipping percent values into
the permitted range (0%-100%, inclusive).
"""
percent = value.split(u'%')[0]
percent = float(percent) if u'.' in percent else int(percent)
return u'0%' if percent < 0 \
else u'100%' if percent > 100 \
else u'%s%%' % percent
def normalize_percent_triplet(rgb_triplet):
"""
Normalize a percentage ``rgb()`` triplet so that all values are
within the range 0%-100% inclusive.
"""
return tuple(_normalize_percent_rgb(value) for value in rgb_triplet)
# Conversions from color names to various formats.
#################################################################
def name_to_hex(name, spec=u'css3'):
"""
Convert a color name to a normalized hexadecimal color value.
The optional keyword argument ``spec`` determines which
specification's list of color names will be used; valid values are
``html4``, ``css2``, ``css21`` and ``css3``, and the default is
``css3``.
When no color of that name exists in the given specification,
``ValueError`` is raised.
"""
if spec not in SUPPORTED_SPECIFICATIONS:
raise ValueError(SPECIFICATION_ERROR_TEMPLATE % spec)
normalized = name.lower()
hex_value = {u'css2': CSS2_NAMES_TO_HEX,
u'css21': CSS21_NAMES_TO_HEX,
u'css3': CSS3_NAMES_TO_HEX,
u'html4': HTML4_NAMES_TO_HEX}[spec].get(normalized)
if hex_value is None:
raise ValueError(
u"'%s' is not defined as a named color in %s." % (name, spec)
)
return hex_value
def name_to_rgb(name, spec=u'css3'):
"""
Convert a color name to a 3-tuple of integers suitable for use in
an ``rgb()`` triplet specifying that color.
"""
return hex_to_rgb(name_to_hex(name, spec=spec))
def name_to_rgb_percent(name, spec=u'css3'):
"""
Convert a color name to a 3-tuple of percentages suitable for use
in an ``rgb()`` triplet specifying that color.
"""
return rgb_to_rgb_percent(name_to_rgb(name, spec=spec))
# Conversions from hexadecimal color values to various formats.
#################################################################
def hex_to_name(hex_value, spec=u'css3'):
"""
Convert a hexadecimal color value to its corresponding normalized
color name, if any such name exists.
The optional keyword argument ``spec`` determines which
specification's list of color names will be used; valid values are
``html4``, ``css2``, ``css21`` and ``css3``, and the default is
``css3``.
When no color name for the value is found in the given
specification, ``ValueError`` is raised.
"""
if spec not in SUPPORTED_SPECIFICATIONS:
raise ValueError(SPECIFICATION_ERROR_TEMPLATE % spec)
normalized = normalize_hex(hex_value)
name = {u'css2': CSS2_HEX_TO_NAMES,
u'css21': CSS21_HEX_TO_NAMES,
u'css3': CSS3_HEX_TO_NAMES,
u'html4': HTML4_HEX_TO_NAMES}[spec].get(normalized)
if name is None:
raise ValueError(
u"'%s' has no defined color name in %s." % (hex_value, spec)
)
return name
def hex_to_rgb(hex_value):
"""
Convert a hexadecimal color value to a 3-tuple of integers
suitable for use in an ``rgb()`` triplet specifying that color.
"""
hex_value = normalize_hex(hex_value)
hex_value = int(hex_value[1:], 16)
return (hex_value >> 16,
hex_value >> 8 & 0xff,
hex_value & 0xff)
def hex_to_rgb_percent(hex_value):
"""
Convert a hexadecimal color value to a 3-tuple of percentages
suitable for use in an ``rgb()`` triplet representing that color.
"""
return rgb_to_rgb_percent(hex_to_rgb(hex_value))
# Conversions from integer rgb() triplets to various formats.
#################################################################
def rgb_to_name(rgb_triplet, spec=u'css3'):
"""
Convert a 3-tuple of integers, suitable for use in an ``rgb()``
color triplet, to its corresponding normalized color name, if any
such name exists.
The optional keyword argument ``spec`` determines which
specification's list of color names will be used; valid values are
``html4``, ``css2``, ``css21`` and ``css3``, and the default is
``css3``.
If there is no matching name, ``ValueError`` is raised.
"""
return hex_to_name(
rgb_to_hex(
normalize_integer_triplet(rgb_triplet)),
spec=spec)
def rgb_to_hex(rgb_triplet):
"""
Convert a 3-tuple of integers, suitable for use in an ``rgb()``
color triplet, to a normalized hexadecimal value for that color.
"""
return u'#%02x%02x%02x' % normalize_integer_triplet(rgb_triplet)
def rgb_to_rgb_percent(rgb_triplet):
"""
Convert a 3-tuple of integers, suitable for use in an ``rgb()``
color triplet, to a 3-tuple of percentages suitable for use in
representing that color.
This function makes some trade-offs in terms of the accuracy of
the final representation; for some common integer values,
special-case logic is used to ensure a precise result (e.g.,
integer 128 will always convert to '50%', integer 32 will always
convert to '12.5%'), but for all other values a standard Python
``float`` is used and rounded to two decimal places, which may
result in a loss of precision for some values.
"""
# In order to maintain precision for common values,
# special-case them.
specials = {255: u'100%', 128: u'50%', 64: u'25%',
32: u'12.5%', 16: u'6.25%', 0: u'0%'}
return tuple(specials.get(d, u'%.02f%%' % (d / 255.0 * 100))
for d in normalize_integer_triplet(rgb_triplet))
# Conversions from percentage rgb() triplets to various formats.
#################################################################
def rgb_percent_to_name(rgb_percent_triplet, spec=u'css3'):
"""
Convert a 3-tuple of percentages, suitable for use in an ``rgb()``
color triplet, to its corresponding normalized color name, if any
such name exists.
The optional keyword argument ``spec`` determines which
specification's list of color names will be used; valid values are
``html4``, ``css2``, ``css21`` and ``css3``, and the default is
``css3``.
If there is no matching name, ``ValueError`` is raised.
"""
return rgb_to_name(
rgb_percent_to_rgb(
normalize_percent_triplet(
rgb_percent_triplet)),
spec=spec)
def rgb_percent_to_hex(rgb_percent_triplet):
"""
Convert a 3-tuple of percentages, suitable for use in an ``rgb()``
color triplet, to a normalized hexadecimal color value for that
color.
"""
return rgb_to_hex(
rgb_percent_to_rgb(
normalize_percent_triplet(rgb_percent_triplet)))
def _percent_to_integer(percent):
"""
Internal helper for converting a percentage value to an integer
between 0 and 255 inclusive.
"""
num = float(percent.split(u'%')[0]) / 100 * 255
e = num - math.floor(num)
return e < 0.5 and int(math.floor(num)) or int(math.ceil(num))
def rgb_percent_to_rgb(rgb_percent_triplet):
"""
Convert a 3-tuple of percentages, suitable for use in an ``rgb()``
color triplet, to a 3-tuple of integers suitable for use in
representing that color.
Some precision may be lost in this conversion. See the note
regarding precision for ``rgb_to_rgb_percent()`` for details.
"""
return tuple(map(_percent_to_integer,
normalize_percent_triplet(rgb_percent_triplet)))
# HTML5 color algorithms.
#################################################################
# These functions are written in a way that may seem strange to
# developers familiar with Python, because they do not use the most
# efficient or idiomatic way of accomplishing their tasks. This is
# because, for compliance, these functions are written as literal
# translations into Python of the algorithms in HTML5.
#
# For ease of understanding, the relevant steps of the algorithm from
# the standard are included as comments interspersed in the
# implementation.
def html5_parse_simple_color(input):
"""
Apply the simple color parsing algorithm from section 2.4.6 of
HTML5.
"""
# 1. Let input be the string being parsed.
#
# 2. If input is not exactly seven characters long, then return an
# error.
if not isinstance(input, unicode) or len(input) != 7:
raise ValueError(
u"An HTML5 simple color must be a Unicode string "
u"exactly seven characters long."
)
# 3. If the first character in input is not a U+0023 NUMBER SIGN
# character (#), then return an error.
if not input.startswith('#'):
raise ValueError(
u"An HTML5 simple color must begin with the "
u"character '#' (U+0023)."
)
# 4. If the last six characters of input are not all ASCII hex
# digits, then return an error.
if not all(c in string.hexdigits for c in input[1:]):
raise ValueError(
u"An HTML5 simple color must contain exactly six ASCII hex digits."
)
# 5. Let result be a simple color.
#
# 6. Interpret the second and third characters as a hexadecimal
# number and let the result be the red component of result.
#
# 7. Interpret the fourth and fifth characters as a hexadecimal
# number and let the result be the green component of result.
#
# 8. Interpret the sixth and seventh characters as a hexadecimal
# number and let the result be the blue component of result.
#
# 9. Return result.
result = (int(input[1:3], 16),
int(input[3:5], 16),
int(input[5:7], 16))
return result
def html5_serialize_simple_color(simple_color):
"""
Apply the serialization algorithm for a simple color from section
2.4.6 of HTML5.
"""
red, green, blue = simple_color
# 1. Let result be a string consisting of a single "#" (U+0023)
# character.
result = u'#'
# 2. Convert the red, green, and blue components in turn to
# two-digit hexadecimal numbers using lowercase ASCII hex
# digits, zero-padding if necessary, and append these numbers
# to result, in the order red, green, blue.
result += (u"%02x" % red).lower()
result += (u"%02x" % green).lower()
result += (u"%02x" % blue).lower()
# 3. Return result, which will be a valid lowercase simple color.
return result
def html5_parse_legacy_color(input):
"""
Apply the legacy color parsing algorithm from section 2.4.6 of
HTML5.
"""
# 1. Let input be the string being parsed.
if not isinstance(input, unicode):
raise ValueError(
u"HTML5 legacy color parsing requires a Unicode string as input."
)
# 2. If input is the empty string, then return an error.
if input == "":
raise ValueError(
u"HTML5 legacy color parsing forbids empty string as a value."
)
# 3. Strip leading and trailing whitespace from input.
input = input.strip()
# 4. If input is an ASCII case-insensitive match for the string
# "transparent", then return an error.
if input.lower() == u"transparent":
raise ValueError(
u'HTML5 legacy color parsing forbids "transparent" as a value.'
)
# 5. If input is an ASCII case-insensitive match for one of the
# keywords listed in the SVG color keywords section of the CSS3
# Color specification, then return the simple color
# corresponding to that keyword.
keyword_hex = CSS3_NAMES_TO_HEX.get(input.lower())
if keyword_hex is not None:
return html5_parse_simple_color(keyword_hex)
# 6. If input is four characters long, and the first character in
# input is a "#" (U+0023) character, and the last three
# characters of input are all ASCII hex digits, then run these
# substeps:
if len(input) == 4 and \
input.startswith(u'#') and \
all(c in string.hexdigits for c in input[1:]):
# 1. Let result be a simple color.
#
# 2. Interpret the second character of input as a hexadecimal
# digit; let the red component of result be the resulting
# number multiplied by 17.
#
# 3. Interpret the third character of input as a hexadecimal
# digit; let the green component of result be the resulting
# number multiplied by 17.
#
# 4. Interpret the fourth character of input as a hexadecimal
# digit; let the blue component of result be the resulting
# number multiplied by 17.
result = (int(input[1], 16) * 17,
int(input[2], 16) * 17,
int(input[3], 16) * 17)
# 5. Return result.
return result
# 7. Replace any characters in input that have a Unicode code
# point greater than U+FFFF (i.e. any characters that are not
# in the basic multilingual plane) with the two-character
# string "00".
# This one's a bit weird due to the existence of multiple internal
# Unicode string representations in different versions and builds
# of Python.
#
# From Python 2.2 through 3.2, Python could be compiled with
# "narrow" or "wide" Unicode strings (see PEP 261). Narrow builds
# handled Unicode strings with two-byte characters and surrogate
# pairs for non-BMP code points. Wide builds handled Unicode
# strings with four-byte characters and no surrogates. This means
# ord() is only sufficient to identify a non-BMP character on a
# wide build.
#
# Starting with Python 3.3, the internal string representation
# (see PEP 393) is now dynamic, and Python chooses an encoding --
# either latin-1, UCS-2 or UCS-4 -- wide enough to handle the
# highest code point in the string.
#
# The code below bypasses all of that for a consistently effective
# method: encode the string to little-endian UTF-32, then perform
# a binary unpack of it as four-byte integers. Those integers will
# be the Unicode code points, and from there filtering out non-BMP
# code points is easy.
encoded_input = input.encode('utf_32_le')
# Format string is '<' (for little-endian byte order), then a
# sequence of 'L' characters (for 4-byte unsigned long integer)
# equal to the length of the original string, which is also
# one-fourth the encoded length. For example, for a six-character
# input the generated format string will be '<LLLLLL'.
format_string = '<' + ('L' * (int(len(encoded_input) / 4)))
codepoints = struct.unpack(format_string, encoded_input)
input = ''.join(u'00' if c > 0xffff
else unichr(c)
for c in codepoints)
# 8. If input is longer than 128 characters, truncate input,
# leaving only the first 128 characters.
if len(input) > 128:
input = input[:128]
# 9. If the first character in input is a "#" (U+0023) character,
# remove it.
if input.startswith(u'#'):
input = input[1:]
# 10. Replace any character in input that is not an ASCII hex
# digit with the character "0" (U+0030).
if any(c for c in input if c not in string.hexdigits):
input = ''.join(c if c in string.hexdigits else u'0' for c in input)
# 11. While input's length is zero or not a multiple of three,
# append a "0" (U+0030) character to input.
while (len(input) == 0) or (len(input) % 3 != 0):
input += u'0'
# 12. Split input into three strings of equal length, to obtain
# three components. Let length be the length of those
# components (one third the length of input).
length = int(len(input) / 3)
red = input[:length]
green = input[length:length*2]
blue = input[length*2:]
# 13. If length is greater than 8, then remove the leading
# length-8 characters in each component, and let length be 8.
if length > 8:
red, green, blue = (red[length-8:],
green[length-8:],
blue[length-8:])
length = 8
# 14. While length is greater than two and the first character in
# each component is a "0" (U+0030) character, remove that
# character and reduce length by one.
while (length > 2) and (red[0] == u'0' and
green[0] == u'0' and
blue[0] == u'0'):
red, green, blue = (red[1:],
green[1:],
blue[1:])
length -= 1
# 15. If length is still greater than two, truncate each
# component, leaving only the first two characters in each.
if length > 2:
red, green, blue = (red[:2],
green[:2],
blue[:2])
# 16. Let result be a simple color.
#
# 17. Interpret the first component as a hexadecimal number; let
# the red component of result be the resulting number.
#
# 18. Interpret the second component as a hexadecimal number; let
# the green component of result be the resulting number.
#
# 19. Interpret the third component as a hexadecimal number; let
# the blue component of result be the resulting number.
result = (int(red, 16),
int(green, 16),
int(blue, 16))
# 20. Return result.
return result
| gpl-3.0 | -3,413,213,957,325,096,000 | 31.844471 | 97 | 0.610484 | false |
hammerlab/avm | helpers.py | 1 | 2181 | from __future__ import print_function, division, absolute_import
import itertools
import numpy as np
from sklearn.metrics import roc_auc_score
def class_prob(model, X):
if hasattr(model, 'predict_proba'):
prob = model.predict_proba(X)
return prob[:, -1]
else:
pred = model.decision_function(X)
if len(pred.shape) > 1 and pred.shape[1] == 1:
pred = pred[:, 0]
assert len(pred.shape) == 1, pred.shape
return pred
def roc_auc(model, X, y):
p = class_prob(model, X)
return roc_auc_score(y, p)
def normalize(X_train, X_test):
Xm = X_train.mean(axis=0)
X_train = X_train - Xm
X_test = X_test - Xm
Xs = X_train.std(axis=0)
Xs[Xs == 0] = 1
X_train /= Xs
X_test /= Xs
return X_train, X_test
def all_combinations(param_grid):
keys = []
value_lists = []
for (key, value_list) in param_grid.items():
keys.append(key)
value_lists.append(value_list)
return [
{key: value for (key, value) in zip(keys, values)}
for values
in itertools.product(*value_lists)
]
def cv_indices_generator(n_samples, n_iters, sample_with_replacement=False):
"""
Generator returns (iteration, (train_indices, test_indices))
"""
for i in range(n_iters):
n_train = 2 * n_samples // 3
if sample_with_replacement:
# bootstrap sampling training sets which are 2/3 of the full data
train_indices = np.random.randint(0, n_samples, n_train)
train_indices_set = set(train_indices)
test_indices = np.array([i for i in range(n_samples) if i not in train_indices_set])
else:
all_indices = np.arange(n_samples)
np.random.shuffle(all_indices)
train_indices = all_indices[:n_train]
test_indices = all_indices[n_train:]
print("# total = %d, # train = %d, # test = %d, max train index = %d" % (
n_samples, len(train_indices), len(test_indices), max(train_indices)))
assert len(train_indices) + len(test_indices) == n_samples
yield (i, (train_indices, test_indices))
| apache-2.0 | -6,378,130,856,304,016,000 | 33.619048 | 96 | 0.585511 | false |
vagabondcoder/nm_tools | nm_listen.py | 1 | 8285 | #!/usr/bin/env python3
""" NetworkManager event listener and calls user scripts
Listens on DBus for NetworkManager events.
When an interface is coming up or coming down, user scripts will be
called.
"""
import dbus
from dbus.mainloop.glib import DBusGMainLoop
import sys
import socket
import struct
import gi.repository.GLib
import daemon
import subprocess
import os
import logging
import pidfile
bin_dir = os.environ['HOME'] + "/.config/nm_listen/" # location of user scripts to be called on NM changes
log_file = "/tmp/nm_listen.log" # logfile location
pid_file = "/tmp/nm_listen.pid" # pid file location
NM_DBUS_SERVICE = "org.freedesktop.NetworkManager"
NM_DBUS_DEVICE = "org.freedesktop.NetworkManager.Device"
NM_DBUS_IP4Config = "org.freedesktop.NetworkManager.IP4Config"
NM_DBUS_INTERFACE = "org.freedesktop.NetworkManager"
NM_DBUS_OPATH = "/org/freedesktop/NetworkManager"
NM_DBUS_SETTINGS = "org.freedesktop.NetworkManager.Settings"
NM_DBUS_SETTINGS_CONN = "org.freedesktop.NetworkManager.Settings.Connection"
DBUS_SERVICE = "org.freedesktop.DBus"
DBUS_PATH = "/org/freedesktop/DBus"
DBUS_PROPS_IFACE = "org.freedesktop.DBus.Properties"
device_states = { 0: "Unknown",
10: "Unmanaged",
20: "Unavailable",
30: "Disconnected",
40: "Prepare",
50: "Config",
60: "Need Auth",
70: "IP Config",
80: "IP Check",
90: "Secondaries",
100: "Activated",
110: "Deactivating",
120: "Failed" }
connectivity_states = { 0: "Unknown",
1: "None",
2: "Portal",
3: "Limited",
4: "Full" }
nm_states = { 0: "Unknown",
10: "Asleep",
20: "Disconnected",
30: "Disconnecting",
40: "Connecting",
50: "Connected local",
60: "Connected site",
70: "Connected global" }
DBusGMainLoop(set_as_default=True)
class utils( object ):
def bytes_to_python( bs ):
return bytes("",'ascii').join(bs).decode('utf-8')
def mac_to_python(mac):
return "%02X:%02X:%02X:%02X:%02X:%02X" % tuple([ord(x) for x in mac])
def addr_to_python(addr):
return socket.inet_ntoa(struct.pack('I', addr))
def mask_to_python(mask):
return int(mask)
def addrconf_to_python(addrconf):
addr, netmask, gateway = addrconf
return [
utils.addr_to_python(addr),
utils.mask_to_python( netmask ),
utils.addr_to_python(gateway) ]
def DeviceAdded( dpath ):
""" Gets called when a device is added.
Just set the callback for state changes
"""
bus = dbus.SystemBus()
dobj = bus.get_object( NM_DBUS_SERVICE, dpath )
iface = dbus.Interface( dobj, dbus_interface=NM_DBUS_DEVICE )
iface.connect_to_signal( "StateChanged", DeviceStateChanged
, sender_keyword='sender'
,interface_keyword='iface'
,path_keyword='path'
,destination_keyword='dest'
,member_keyword='mem' )
dev_props = dbus.Interface( dobj, DBUS_PROPS_IFACE )
dev_type = dev_props.Get( NM_DBUS_DEVICE, "Interface" )
logging.info( "Tracking state changes on : %s " % dev_type )
def NewConnection( cpath ):
"""NM has a new connection. Just keep track of updates
"""
bus = dbus.SystemBus()
conn_obj = bus.get_object( NM_DBUS_SERVICE, cpath )
iface = dbus.Interface( conn_obj, dbus_interface=NM_DBUS_SETTINGS_CONN )
c_settings = iface.GetSettings()
conn = c_settings['connection']
conn_id = conn['id']
logging.info( "Got new connection '%s'" % conn_id )
#if 'autoconnect' in conn:
c_settings[ 'connection' ][ 'autoconnect' ] = False;
logging.info( "Turning off autoconnect for %s" % conn_id )
iface.Update( c_settings )
def DeviceStateChanged(new_state, old_state, reason, sender=None, iface=None, mem=None, path=None, dest=None):
""" The state of one of the devices changed.
If the state is one of the ones we care about, call the user scripts.
If is an interface coming up, add some additional environment
variables.
"""
bus = dbus.SystemBus()
logging.debug( "DeviceStateChanged: '%s' : '%s' : '%s'" %
( device_states[ new_state ]
, device_states[ old_state ]
, reason ) )
try:
dobj = bus.get_object( NM_DBUS_SERVICE, path )
except dbus.exceptions.DBusException as e:
logging.warning( "Error in DeviceStateChanged : ", e )
return
action = None
if new_state == 40: # prepare the connection
action = 'pre-up'
elif new_state == 100: #activated
action = 'up'
elif new_state == 110: #disconnecting
action = 'pre-down'
elif new_state == 30: #disconnected
action = 'down'
if action != None:
# update the environment that will passed into the script
dev_props = dbus.Interface( dobj, DBUS_PROPS_IFACE )
dev_name = dev_props.Get( NM_DBUS_DEVICE, "Interface" )
env = os.environ.copy()
env[ 'DEVICE_IFACE' ] = dev_name
env[ 'DEVICE_IP_IFACE' ] = dev_props.Get( NM_DBUS_DEVICE, "IpInterface" )
if new_state == 100: # activated
ip4ConfigPath = dev_props.Get( NM_DBUS_DEVICE, "Ip4Config" )
ip4Config = bus.get_object( NM_DBUS_SERVICE, ip4ConfigPath )
ip4_props = dbus.Interface( ip4Config, DBUS_PROPS_IFACE )
addrs_dbus = ip4_props.Get( NM_DBUS_IP4Config, "Addresses" )
addrs = [ utils.addrconf_to_python(addr) for addr in addrs_dbus ]
# NM sets these environment variables as well
env[ 'IP4_NUM_ADDRESSES' ] = str( len( addrs ) )
for i in range( 0, len(addrs) ):
a = "%s/%d %s" % ( addrs[i][0], addrs[i][1], addrs[i][2] )
logging.debug( 'Address : %s ' % a )
env[ 'IP4_ADDRESS_%d' % i ] = a
routes = ip4_props.Get( NM_DBUS_IP4Config, 'Routes' )
env[ 'IP4_NUM_ROUTES' ] = str( len( routes ) )
for i in range( 0, len(routes) ):
env[ 'IP4_ROUTE_%d' % i ] = str( routes[i] )
domains_dbus = ip4_props.Get( NM_DBUS_IP4Config, 'Domains' )
domains = [ str(d) for d in domains_dbus ]
env[ 'IP4_DOMAINS' ] = ' '.join(domains)
logging.info( "Action: %s %s" % ( action, dev_name ) )
# actually call the user scripts
files = os.listdir( bin_dir )
files.sort()
for f in files:
full = bin_dir+f
if os.access( full, os.X_OK ):
logging.info( "Running: %s %s %s" % ( full, dev_name, action ) )
subprocess.Popen( [ full, dev_name, action ], env=env )
else:
logging.warning( "Can't execute %s', skipping" % full )
def initialize():
""" Go through the devices and add them so we can listen
for state changes.
"""
try:
logging.info( "Initializing" )
bus = dbus.SystemBus()
nm_obj = bus.get_object(NM_DBUS_SERVICE, NM_DBUS_OPATH)
ds = nm_obj.GetDevices()
for dpath in ds:
DeviceAdded( dpath )
logging.info( "Initialized" )
except dbus.exceptions.DBusException as e:
# this isn't probably a problem. If NM isn't on then
# this exception will trigger. When it comes back
# then DeviceAdded will get called by the signal handler
logging.warn( "Failed to initialize : ", e )
def listen():
"""This just sets up all the callbacks and then
loops on DBus events.
"""
bus = dbus.SystemBus()
bus.add_signal_receiver( DeviceAdded
, dbus_interface=NM_DBUS_INTERFACE
, signal_name="DeviceAdded" )
bus.add_signal_receiver( NewConnection
, dbus_interface=NM_DBUS_SETTINGS
, signal_name="NewConnection" )
initialize()
loop = gi.repository.GLib.MainLoop()
try:
loop.run()
except KeyboardInterrupt:
print( "Keyboard interrupt received...shuting down..." )
loop.quit()
sys.exit(0)
except SystemExit:
logging.info( "Quitting listen.", flush=True )
loop.quit()
sys.exit(0)
except Exception as e:
logging.warning( "Quitting listen.", e, flush=True )
def stop_daemon():
pfile=pidfile.PidFile( pid_file )
pfile.kill()
def restart_daemon():
stop_daemon()
start_daemon()
def start_daemon():
f = open( log_file, "a+" )
context = daemon.DaemonContext( stderr=f, stdout=f, pidfile=pidfile.PidFile( pid_file ) )
try:
context.open()
listen()
except SystemExit as e:
logging.warning( "Quitting : %s " % str( e ), flush=True )
except Exception as e:
logging.warning( "Quitting : %s " % str( e ), flush=True )
context.close()
f.close()
if __name__ == "__main__":
logging.basicConfig( format='%(levelname)s:%(message)s', level=logging.DEBUG )
if len( sys.argv ) > 1 and sys.argv[1] == "-l":
listen()
else:
if len( sys.argv ) > 1:
if sys.argv[1] == "stop":
stop_daemon()
elif sys.argv[1] == "restart":
restart_daemon()
elif sys.argv[1] == "start":
start_daemon()
else:
print( "Usage : %s [-l |start|stop|restart]" % sys.argv[0] )
else:
start_daemon()
| gpl-2.0 | -1,890,240,044,035,423,000 | 28.275618 | 110 | 0.672179 | false |
mensi/pyggi | pyggi/lib/filters.py | 1 | 3924 | # -*- coding: utf-8 -*-
"""
:copyright: (c) 2011 by Tobias Heinzen
:license: BSD, see LICENSE for more details
"""
import time
html_escape_table = {
"&": "&",
'"': """,
"'": "'",
">": ">",
"<": "<",
}
def force_unicode(txt):
try:
return unicode(txt)
except UnicodeDecodeError:
pass
orig = txt
if type(txt) != str:
txt = str(txt)
for args in [('utf-8',), ('latin1',), ('ascii', 'replace')]:
try:
return txt.decode(*args)
except UnicodeDecodeError:
pass
raise ValueError("Unable to force %s object %r to unicode" % (type(orig).__name__, orig))
def format_datetime(value, format='iso8601'):
# convert format to iso8601 compliant
if format == 'iso8601':
format = "%Y-%m-%d %H:%M:%S"
# convert format to iso8601 compliant (full)
if format == 'iso8601-full':
format = "%a %b %d %H:%M:%S %Z %Y"
# if we have a timestamp, we have to convert it
# to a time_struct
if isinstance(value, int):
from datetime import datetime
value = datetime.fromtimestamp(value).timetuple()
return time.strftime(format, value)
def format_diff(value):
# escape HTML, because format_diff shall be used with 'safe'
value = unicode(value, 'utf-8') # correct?
value = "".join(html_escape_table.get(c,c) for c in value)
if value.startswith("+") and not value.startswith("+++"):
return '<li class="diff-add">%s </li>' % value
elif value.startswith("-") and not value.startswith("---"):
return '<li class="diff-remove">%s </li>' % value
elif value.startswith("@@"):
return '<li class="diff-change">%s </li>' % value
return '<li>%s</li>' % value
def humanize_timesince(when):
import datetime
# convert when to datetime
if type(when) == int:
when = datetime.datetime.utcfromtimestamp(when)
else:
when = datetime.datetime(*when[:6])
now = datetime.datetime.utcnow()
difference = now - when
if difference < datetime.timedelta(minutes=2):
return "%s seconds ago" % difference.seconds
elif difference < datetime.timedelta(hours=2):
return "%s minutes ago" % (difference.seconds / 60)
elif difference < datetime.timedelta(days=2):
return "%s hours ago" % (difference.days * 24 + difference.seconds / 3600)
elif difference < datetime.timedelta(days=2*7):
return "%s days ago" % difference.days
elif difference < datetime.timedelta(days=2*30):
return "%s weeks ago" % (difference.days / 7)
elif difference < datetime.timedelta(days=2*365):
return "%s months ago" % (difference.days / 30)
else:
return "%s years ago" % (difference.days / 365)
def is_text(mimetype):
"""
determine if a mimetype holds printable text (ascii)
"""
# all text documents
if mimetype.startswith("text/"):
return True
# xml/html/xhtml documents
if mimetype.startswith("application/") and \
(mimetype.find("html") != -1 or mimetype.find("xml") != -1):
return True
# javascript documents
if mimetype == "application/javascript":
return True
return False
def first_line(string):
string = string.replace('\r', '\n', 1)
try:
return string[:string.index('\n')]
except ValueError:
return string
def static_url_for(filename):
from flask import url_for, request
from config import config
import urllib
url_base = request.environ.get('wsgiorg.routing_args', ([], {}))[1].get('static_url_base')
if not url_base and config.has_option('general', 'static_url_base'):
url_base = config.get('general', 'static_url_base')
if url_base:
return url_base.rstrip('/') + '/' + urllib.quote(filename)
else:
return url_for('static', filename=filename)
| bsd-3-clause | 1,785,850,873,471,814,100 | 29.65625 | 94 | 0.602701 | false |
lorensen/VTKExamples | src/Python/GeometricObjects/Quad.py | 1 | 1673 | #!/usr/bin/env python
import vtk
def main():
colors = vtk.vtkNamedColors()
# Create four points (must be in counter clockwise order)
p0 = [0.0, 0.0, 0.0]
p1 = [1.0, 0.0, 0.0]
p2 = [1.0, 1.0, 0.0]
p3 = [0.0, 1.0, 0.0]
# Add the points to a vtkPoints object
points = vtk.vtkPoints()
points.InsertNextPoint(p0)
points.InsertNextPoint(p1)
points.InsertNextPoint(p2)
points.InsertNextPoint(p3)
# Create a quad on the four points
quad = vtk.vtkQuad()
quad.GetPointIds().SetId(0, 0)
quad.GetPointIds().SetId(1, 1)
quad.GetPointIds().SetId(2, 2)
quad.GetPointIds().SetId(3, 3)
# Create a cell array to store the quad in
quads = vtk.vtkCellArray()
quads.InsertNextCell(quad)
# Create a polydata to store everything in
polydata = vtk.vtkPolyData()
# Add the points and quads to the dataset
polydata.SetPoints(points)
polydata.SetPolys(quads)
# Setup actor and mapper
mapper = vtk.vtkPolyDataMapper()
mapper.SetInputData(polydata)
actor = vtk.vtkActor()
actor.SetMapper(mapper)
actor.GetProperty().SetColor(colors.GetColor3d("Silver"))
# Setup render window, renderer, and interactor
renderer = vtk.vtkRenderer()
renderWindow = vtk.vtkRenderWindow()
renderWindow.SetWindowName("Quad")
renderWindow.AddRenderer(renderer)
renderWindowInteractor = vtk.vtkRenderWindowInteractor()
renderWindowInteractor.SetRenderWindow(renderWindow)
renderer.AddActor(actor)
renderer.SetBackground(colors.GetColor3d("Salmon"))
renderWindow.Render()
renderWindowInteractor.Start()
if __name__ == '__main__':
main()
| apache-2.0 | -8,918,097,045,932,373,000 | 25.983871 | 61 | 0.676031 | false |
pacoqueen/bbinn | formularios/resultados_cemento.py | 1 | 30261 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright (C) 2005-2008 Francisco José Rodríguez Bogado, #
# Diego Muñoz Escalante. #
# ([email protected], [email protected]) #
# #
# This file is part of GeotexInn. #
# #
# GeotexInn is free software; you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation; either version 2 of the License, or #
# (at your option) any later version. #
# #
# GeotexInn is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with GeotexInn; if not, write to the Free Software #
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA #
###############################################################################
###################################################################
## resultados_fibra.py - Resultados de pruebas de fibra.
###################################################################
## NOTAS:
##
###################################################################
## Changelog:
## 18 de mayo de 2006 -> Inicio
## 19 de mayo de 2006 -> Testing
##
###################################################################
## FIXME:
## - Al salir con el evento destroy (bolaspa) pregunta dos veces si
## quiere salir y la segunda vez ignora la respuesta.
##
###################################################################
## NOTAS: Se reusa la misma ventana (glade) de resultados de fibra,
## todo lo relacionado con rizo es humedad en la fibra de cemento.
###################################################################
from ventana import Ventana
import utils
import pygtk
pygtk.require('2.0')
import gtk, gtk.glade, time, sqlobject
try:
import pclases
except ImportError:
import sys
from os.path import join as pathjoin; sys.path.append(pathjoin("..", "framework"))
import pclases
try:
import geninformes
except ImportError:
import sys
sys.path.append('../informes')
import geninformes
from utils import _float as float
from resultados_fibra import comprobar_y_preguntar_si_guardar
class ResultadosFibra(Ventana):
def __init__(self, objeto = None, usuario = None):
"""
Constructor. objeto puede ser un objeto de pclases con el que
comenzar la ventana (en lugar del primero de la tabla, que es
el que se muestra por defecto).
"""
self.usuario = usuario
Ventana.__init__(self, 'resultados_fibra.glade', objeto)
connections = {'b_salir/clicked': self._salir,
'b_lote/clicked': self.set_loteCem,
'b_fecha/clicked': self.fecha,
'b_add/clicked': self.add,
'b_drop/clicked': self.drop,
'sp_tolerancia/value-changed': self.cambiar_tolerancia,
'b_guardar_obs/clicked': self.guardar_obs,
'b_imprimir/clicked': self.imprimir,
'ventana/delete_event': self._salir
}
self.add_connections(connections)
self.activar_widgets(False)
self.inicializar_ventana()
if objeto == None:
self.loteCem = None
else:
self.loteCem = objeto
self.actualizar_ventana()
gtk.main()
def _salir(self, *args, **kw):
"""
Si hay cambios pendientes en observaciones, pregunta.
Después llama a la función salir heredada.
"""
comprobar_y_preguntar_si_guardar(self)
self.salir(*args, **kw)
# --------------- Funciones auxiliares ------------------------------
def activar_widgets(self, valor):
self.ws = ('e_codigo',
'e_nombre',
'e_dtex',
'e_corte',
'e_color',
'e_tenacidad',
'e_elongacion',
'e_rizo',
'e_grasa',
'e_encogimiento',
'tv_pruebas',
'b_add',
'b_drop',
'b_fecha',
'e_media',
'e_desvtipica',
'sp_tolerancia',
'txt_observaciones',
'frame4')
for i in self.ws:
self.wids[i].set_sensitive(valor)
if self.usuario:
try:
ventana = pclases.Ventana.select(pclases.Ventana.q.fichero == "resultados_cemento.py")[0] # OJO: HARCODED
except IndexError:
txt = "resultados_fibra::activar_widgets -> Ventana no encontrada en BD."
self.logger.error(txt)
print txt
else:
permiso = self.usuario.get_permiso(ventana)
if not permiso.escritura and self.usuario.nivel > 1:
self.wids['tv_pruebas'].set_sensitive(False)
self.wids['txt_observaciones'].set_sensitive(False)
if not permiso.nuevo and self.usuario.nivel > 1:
self.wids['b_add'].set_sensitive(False)
def crear_listview(self, tv):
cols = (('Fecha', 'gobject.TYPE_STRING', True, True, True, self.cambiar_fecha),
('Título (DTEX)', 'gobject.TYPE_STRING', True, True, False, self.cambiar_titulo),
('Alargamiento (%)', 'gobject.TYPE_STRING', True, True, False, self.cambiar_alargamiento),
('Tenacidad (cN/tex)', 'gobject.TYPE_STRING', True, True, False, self.cambiar_tenacidad),
('Grasa (%)', 'gobject.TYPE_STRING', True, True, False, self.cambiar_grasa),
('Encogimiento (%)', 'gobject.TYPE_STRING', True, True, False, self.cambiar_encogimiento),
('Humedad (%)', 'gobject.TYPE_STRING', True, True, False, self.cambiar_humedad),
('ID', 'gobject.TYPE_STRING', False, False, False, None)) # Contiene los ID de los resultados separados por ','
utils.preparar_listview(tv, cols)
tv.get_column(1).get_cell_renderers()[0].set_property('xalign', 0.1)
tv.get_column(2).get_cell_renderers()[0].set_property('xalign', 0.1)
tv.get_column(3).get_cell_renderers()[0].set_property('xalign', 0.1)
tv.get_column(4).get_cell_renderers()[0].set_property('xalign', 0.1)
tv.get_column(5).get_cell_renderers()[0].set_property('xalign', 0.1)
tv.get_column(6).get_cell_renderers()[0].set_property('xalign', 0.1)
def inicializar_ventana(self):
"""
Inicializa los widgets de la ventana.
"""
self.crear_listview(self.wids['tv_pruebas'])
self.wids['b_fecha'].set_property("visible", False)
self.wids['l_rizo'].set_label("Humedad: ")
self.wids['txt_observaciones'].get_buffer().connect("changed", lambda txtbuffer: self.wids['b_guardar_obs'].set_sensitive(True))
def func_sort(self, t1, t2):
if t1[0] < t2[0]:
return -1
elif t1[0] > t2[0]:
return 1
else:
return 0
def preparar_pruebas(self):
"""
Devuelve una lista de listas que contiene las pruebas ordenadas del
loteCem por fecha de la forma: [(fecha, prueba título, ..., "id0,id1,...id5")]
"""
res = []
for p in self.loteCem.pruebasTitulo:
res.append([p.fecha, p.resultado, None, None, None, None, None, [p.id, 0, 0, 0, 0, 0]])
for p in self.loteCem.pruebasElongacion:
puesto = False
for fila in res:
if p.fecha == fila[0] and fila[2] == None: # Hay hueco en la fecha
fila[2] = p.resultado
fila[-1][1] = p.id
puesto = True
break
if not puesto:
res.append([p.fecha, None, p.resultado, None, None, None, None, [0, p.id, 0, 0, 0, 0]])
for p in self.loteCem.pruebasTenacidad:
puesto = False
for fila in res:
if p.fecha == fila[0] and fila[3] == None: # Hay hueco en la fecha
fila[3] = p.resultado
fila[-1][2] = p.id
puesto = True
break
if not puesto:
res.append([p.fecha, None, None, p.resultado, None, None, None, [0, 0, p.id, 0, 0, 0]])
for p in self.loteCem.pruebasGrasa:
puesto = False
for fila in res:
if p.fecha == fila[0] and fila[4] == None: # Hay hueco en la fecha
fila[4] = p.resultado
fila[-1][3] = p.id
puesto = True
break
if not puesto:
res.append([p.fecha, None, None, None, p.resultado, None, None, [0, 0, 0, p.id, 0, 0]])
for p in self.loteCem.pruebasEncogimiento:
puesto = False
for fila in res:
if p.fecha == fila[0] and fila[5] == None: # Hay hueco en la fecha
fila[5] = p.resultado
fila[-1][4] = p.id
puesto = True
break
if not puesto:
res.append([p.fecha, None, None, None, None, p.resultado, None, [0, 0, 0, 0, p.id, 0]])
for p in self.loteCem.pruebasHumedad:
puesto = False
for fila in res:
if p.fecha == fila[0] and fila[6] == None: # Hay hueco en la fecha
fila[6] = p.resultado
fila[-1][5] = p.id
puesto = True
break
if not puesto:
res.append([p.fecha, None, None, None, None, None, p.resultado, [0, 0, 0, 0, 0, p.id]])
res.sort(self.func_sort)
res = [(utils.str_fecha(f[0]), \
f[1] and "%.2f" % f[1] or "", \
f[2] and "%.2f" % f[2] or "", \
f[3] and "%.2f" % f[3] or "", \
f[4] and "%.2f" % f[4] or "", \
f[5] and "%.2f" % f[5] or "", \
f[6] and "%.2f" % f[6] or "", \
','.join(map(str, f[7]))) for f in res]
return res
def rellenar_pruebas(self):
"""
Introduce en el treeview las pruebas del loteCem seleccionado y
recalcula la característica del loteCem.
"""
model = self.wids['tv_pruebas'].get_model()
model.clear()
self.calcular_caracteristicas()
pruebas = self.preparar_pruebas()
for prueba in pruebas:
model.append(prueba)
def calcular_caracteristicas(self):
"""
Calcula la media, desviación típica y marca los valores según tolerancia.
"""
loteCem = self.loteCem
# La tolerancia depende del tipo de producto:
try:
dtex = loteCem.bigbags[0].articulos[0].productoVenta.camposEspecificosBala.dtex
except:
utils.dialogo_info(titulo = 'ERROR',
texto = 'Ocurrió un error buscando el tipo de fibra.',
padre = self.wids['ventana'])
return
mediatitulo = 0
sumatorio = 0
desvtipica = 0
for p in loteCem.pruebasTitulo:
mediatitulo += p.resultado
sumatorio += p.resultado**2.0
try:
mediatitulo /= len(loteCem.pruebasTitulo)
desvtipica = sumatorio / len(loteCem.pruebasTitulo)
desvtipica -= mediatitulo**2.0
desvtipica = desvtipica**0.5 # ValueError cuando intente hacer raíz de número negativo. No debería ocurrir.
except ZeroDivisionError:
mediatitulo = 0
desvtipica = 0
loteCem.mediatitulo = mediatitulo
self.wids['e_desvtipica'].set_text("%.2f" % desvtipica)
self.marcar_tolerancia(dtex, mediatitulo, loteCem.tolerancia)
self.calcular_caracteristicas_propias()
self.rellenar_info_loteCem()
def calcular_elongacion(self):
"""
Calcula la media de los valores de y elongación.
"""
loteCem = self.loteCem
loteCem.update_valor("elongacion")
def calcular_tenacidad(self):
loteCem = self.loteCem
loteCem.update_valor("tenacidad")
def calcular_grasa(self):
loteCem = self.loteCem
# La elongación depende del tipo de producto:
loteCem.update_valor("grasa")
def calcular_encogimiento(self):
loteCem = self.loteCem
loteCem.update_valor("encogimiento")
def calcular_humedad(self):
loteCem = self.loteCem
loteCem.update_valor("humedad")
def calcular_caracteristicas_propias(self):
self.calcular_elongacion()
self.calcular_tenacidad()
self.calcular_grasa()
self.calcular_encogimiento()
self.calcular_humedad()
self.rellenar_info_loteCem()
def marcar_tolerancia(self, dtex, mediatitulo, tolerancia):
self.wids['ruler'].set_sensitive(False)
diferencia = abs(mediatitulo - dtex)
try:
porcentaje = (diferencia * 100) / dtex # En formato 0 a 100 porque las posiciones del ruler son de -100 a 100
except ZeroDivisionError: # El DTEX del artículo es 0.
porcentaje = 0.0
if mediatitulo < dtex:
porcentaje *= -1
self.wids['ruler'].set_property('position', porcentaje)
difmax = dtex * tolerancia
if round(diferencia,2) > difmax:
self.wids['e_media'].modify_base(gtk.STATE_NORMAL, self.wids['e_media'].get_colormap().alloc_color("red"))
else:
self.wids['e_media'].modify_base(gtk.STATE_NORMAL, self.wids['e_media'].get_colormap().alloc_color("green"))
self.colorear(self.wids['tv_pruebas'], dtex, difmax)
def colorear(self, tv, dtex, diferencia):
"""
diferencia es la diferencia máxima en valor absoluto que debe
haber entre el resultado y el título del artículo.
"""
def cell_func(col, cell, model, itr, (dtex, dif)):
resultado = model[itr][1].replace(" ", "")
if resultado != "":
resultado = float(resultado)
if round(abs(resultado-dtex),2) > dif:
color = "red"
else:
color = "green"
cell.set_property("text", "%.2f" % resultado)
else:
color = "white"
cell.set_property("text", "")
cell.set_property("cell-background", color)
cols = tv.get_columns()
col = cols[1]
cells = col.get_cell_renderers()
for cell in cells:
col.set_cell_data_func(cell, cell_func, (dtex, diferencia))
def actualizar_ventana(self):
"""
Método que sobreescribe el "actualizar_ventana" que hereda de la clase ventana.
PRECONDICION: self.loteCem no puede ser None
"""
try:
self.loteCem.sync()
self.rellenar_widgets()
except sqlobject.SQLObjectNotFound:
utils.dialogo_info(titulo = 'REGISTRO ELIMINADO',
texto = 'El registro ha sido borrado desde otro puesto.',
padre = self.wids['ventana'])
self.loteCem = None
self.activar_widgets(self.loteCem!=None)
# --------------- Manejadores de eventos ----------------------------
def guardar_obs(self, boton):
"""
Guarda el contenido del TextView en el atributo observaciones.
"""
if self.objeto != None:
buffer = self.wids['txt_observaciones'].get_buffer()
self.objeto.observaciones = buffer.get_text(buffer.get_start_iter(), buffer.get_end_iter())
self.wids['b_guardar_obs'].set_sensitive(False)
def add(self, w):
if self.loteCem != None:
model = self.wids['tv_pruebas'].get_model()
model.append((utils.str_fecha(time.localtime()),
"", "", "", "", "", "", "0,0,0,0,0,0"))
else:
print "WARNING: Se ha intentano añadir una prueba con loteCem = None"
def drop(self, w):
"""
Borra una línea completa de resultados.
"""
model, iter = self.wids['tv_pruebas'].get_selection().get_selected()
if iter != None and utils.dialogo(titulo = 'BORRAR PRUEBA', texto = '¿Está seguro?', padre = self.wids['ventana']):
ids = map(int, model[iter][-1].split(','))
for columnaid in range(len(ids)):
id = ids[columnaid]
if id != 0:
clase = self.get_clase(columnaid+1)
prueba = clase.get(id)
prueba.destroySelf()
self.rellenar_pruebas()
def set_loteCem(self, w):
comprobar_y_preguntar_si_guardar(self)
codlote = utils.dialogo_entrada(titulo = 'Nº LOTE',
texto = 'Introduzca número o código de lote de fibra '
'de cemento:',
padre = self.wids['ventana'])
if codlote != None:
numlote = utils.parse_numero(codlote)
loteCems = pclases.LoteCem.select(pclases.OR(
pclases.LoteCem.q.numlote == numlote,
pclases.LoteCem.q.codigo.contains(codlote)))
if loteCems.count() == 0:
utils.dialogo_info(titulo = 'LOTE NO ENCONTRADO',
texto = 'No se encontró ningún lote de fibra de cemento'
' %s.' % (codlote),
padre = self.wids['ventana'])
return
elif loteCems.count() > 1:
filas = [(l.id, l.numlote, l.codigo, l.tenacidad,
l.elongacion, l.humedad, l.encogimiento)
for l in loteCems]
idloteCem = utils.dialogo_resultado(filas,
titulo = 'SELECCIONE LOTE',
cabeceras = ('ID', 'Número', 'Código', 'Tenacidad',
'Elongación', 'Humedad', 'Encogimiento'),
padre = self.wids['ventana'])
if idloteCem < 0:
return
loteCem = pclases.LoteCem.get(idloteCem)
else:
loteCem = loteCems[0]
if len(loteCem.bigbags) == 0:
utils.dialogo_info(titulo = 'LOTE VACÍO',
texto = 'El lote de cemento no contiene bigbags, no '
'puede\nrealizar pruebas sobre un lote vacío.',
padre = self.wids['ventana'])
self.loteCem = None
return
self.loteCem = loteCem
self.actualizar_ventana()
def rellenar_widgets(self):
self.objeto = self.loteCem
self.activar_widgets(self.loteCem != None)
if self.loteCem != None:
self.rellenar_info_loteCem()
self.rellenar_pruebas()
self.rellenar_observaciones()
def rellenar_observaciones(self):
"""
Introduce las observaciones de la partida en el TextView.
"""
self.wids['txt_observaciones'].get_buffer().set_text(self.objeto.observaciones)
self.wids['b_guardar_obs'].set_sensitive(False)
def rellenar_info_loteCem(self):
"""
PRECONDICIÓN: self.loteCem != None y len(self.loteCem.bigbags) > 0
"""
loteCem = self.loteCem
self.wids['e_codigo'].set_text("%d (%s)" % (loteCem.numlote, loteCem.codigo))
self.wids['e_nombre'].set_text(loteCem.bigbags[0].articulos[0].productoVenta.nombre)
self.wids['e_dtex'].set_text("%.1f DTEX" % (loteCem.bigbags[0].articulos[0].productoVenta.camposEspecificosBala.dtex))
self.wids['e_corte'].set_text(`loteCem.bigbags[0].articulos[0].productoVenta.camposEspecificosBala.corte`)
self.wids['e_color'].set_text(loteCem.bigbags[0].articulos[0].productoVenta.camposEspecificosBala.color or '')
self.wids['e_tenacidad'].set_text(loteCem.tenacidad == None and "-" or utils.float2str(loteCem.tenacidad))
self.wids['e_elongacion'].set_text(loteCem.elongacion == None and "-" or utils.float2str(loteCem.elongacion))
self.wids['e_rizo'].set_text(loteCem.humedad == None and "-" or utils.float2str(loteCem.humedad))
self.wids['e_encogimiento'].set_text(loteCem.encogimiento == None and "-" or utils.float2str(loteCem.encogimiento))
self.wids['e_grasa'].set_text(loteCem.grasa == None and "-" or utils.float2str(loteCem.grasa))
self.wids['e_media'].set_text(loteCem.mediatitulo == None and "-" or "%.2f DTEX" % (loteCem.mediatitulo))
try:
self.wids['sp_tolerancia'].set_value(loteCem.tolerancia*100.0)
except:
self.wids['sp_tolerancia'].set_value(20)
loteCem.tolerancia = 0.2
def fecha(self, w):
self.wids['e_fecha'].set_text(utils.str_fecha(utils.mostrar_calendario(fecha_defecto = self.objeto and self.objeto.fecha or None, padre = self.wids['ventana'])))
def cambiar_fecha(self, cell, path, texto):
try:
fecha = time.strptime(texto, '%d/%m/%Y')
except:
utils.dialogo_info('FECHA INCORRECTA',
'La fecha introducida (%s) no es correcta.' % (texto),
padre = self.wids['ventana'])
return
model = self.wids['tv_pruebas'].get_model()
model[path][0] = utils.str_fecha(fecha)
ids = map(int, model[path][-1].split(','))
for col in xrange(6):
if ids[col] != 0:
clase = self.get_clase(col+1)
prueba = clase.get(ids[col])
prueba.fecha = fecha
def get_clase(self, columna):
if columna == 1:
clase = pclases.PruebaTitulo
elif columna == 2:
clase = pclases.PruebaElongacion
elif columna == 3:
clase = pclases.PruebaTenacidad
elif columna == 4:
clase = pclases.PruebaGrasa
elif columna == 5:
clase = pclases.PruebaEncogimiento
elif columna == 6:
clase = pclases.PruebaHumedad
else:
print "WARNING: resultados_fibra.py: No debería entrar aquí."
clase = None
return clase
def cambiar_resultado(self, tv, path, texto, columna):
texto = texto.replace(" ", "")
if texto != "":
try:
resultado = utils._float(texto)
except:
utils.dialogo_info('RESULTADO INCORRECTO',
'El número tecleado (%s) no es correcto.' % (texto),
padre = self.wids['ventana'])
return
clase = self.get_clase(columna)
columnaid = columna-1 # Porque en los IDS empieza por 0
if clase != None:
model = self.wids['tv_pruebas'].get_model()
ids = map(int, model[path][-1].split(','))
id = ids[columnaid]
if id == 0:
if texto != "":
fecha = time.strptime(model[path][0], '%d/%m/%Y')
try:
prueba = clase(fecha = fecha,
resultado = resultado,
loteCem = self.loteCem,
lote = None)
except TypeError: # Es prueba de Humedad, no lleva relación con lote de fibra:
prueba = clase(fecha = fecha,
resultado = resultado,
loteCem = self.loteCem)
ids[columnaid] = prueba.id
model[path][-1] = ','.join(map(str, ids))
model[path][columna] = "%.2f" % resultado
else:
prueba = clase.get(int(id))
if texto == "":
try:
prueba.destroySelf()
except:
utils.dialogo_info(titulo = "ERROR",
texto = "El resultado no se pudo eliminar.",
padre = self.wids['ventana'])
return
model[path][columna] = ""
ids[columnaid] = 0
model[path][-1] = ','.join(map(str, ids))
self.rellenar_pruebas() # Prefiero esto a comprobar si la fila se ha quedado vacía, etc...
else:
prueba.resultado = resultado
if columna != 6:
model[path][columna] = "%.2f" % resultado
else:
model[path][columna] = "%d" % resultado
self.calcular_caracteristicas()
# print model[path][-1]
# self.rellenar_pruebas()
def cambiar_titulo(self, tv ,path, texto):
self.cambiar_resultado(tv, path, texto, 1)
def cambiar_alargamiento(self, tv ,path, texto):
self.cambiar_resultado(tv, path, texto, 2)
def cambiar_tenacidad(self, tv ,path, texto):
self.cambiar_resultado(tv, path, texto, 3)
def cambiar_grasa(self, tv ,path, texto):
self.cambiar_resultado(tv, path, texto, 4)
def cambiar_encogimiento(self, tv ,path, texto):
self.cambiar_resultado(tv, path, texto, 5)
def cambiar_humedad(self, tv ,path, texto):
self.cambiar_resultado(tv, path, texto, 6)
def cambiar_tolerancia(self, sp):
loteCem = self.loteCem
try:
loteCem.tolerancia = float(sp.get_value()) / 100.0
self.calcular_caracteristicas()
except ValueError:
utils.dialogo_info(titulo = 'VALOR INCORRECTO',
texto = 'El valor %s no es correcto.' % (sp.get_value()),
padre = self.wids['ventana'])
def imprimir(self, boton):
"""
Imprime la información en pantalla.
"""
import informes, geninformes
txt = "LOTE: %s\n" % (self.wids['e_codigo'].get_text())
txt += "PRODUCTO: %s\n\n" % (self.wids['e_nombre'].get_text())
txt += "\nCaracterísticas del lote:\n"
txt += " DTEX: %s\n" % (self.wids['e_dtex'].get_text())
txt += " Tenacidad: %s\n" % (self.wids['e_tenacidad'].get_text())
txt += " Alargamiento: %s\n" % (self.wids['e_elongacion'].get_text())
txt += " Corte: %s\n" % (self.wids['e_corte'].get_text())
txt += " Grasa: %s\n" % (self.wids['e_grasa'].get_text())
txt += " Encogimiento: %s\n" % (self.wids['e_encogimiento'].get_text())
txt += " Color: %s\n" % (self.wids['e_color'].get_text())
txt += " Humedad: %s\n" % (self.wids['e_rizo'].get_text())
loteCem = self.loteCem
try:
dtex = loteCem.bigbags[0].articulos[0].productoVenta.camposEspecificosBala.dtex
tolerancia = loteCem.tolerancia
mediatitulo = loteCem.mediatitulo
except:
utils.dialogo_info(titulo = 'ERROR',
texto = 'Ocurrió un error buscando el tipo de fibra.',
padre = self.wids['ventana'])
dtex = 0
tolerancia = 0
mediatitulo = 0
difmax = dtex * tolerancia
diferencia = abs(mediatitulo - dtex)
if round(diferencia, 2) > difmax:
ok = False
else:
ok = True
txt += " Media de título: %s (%s)\n" % (self.wids['e_media'].get_text(),
ok and "dentro del %s%% de tolerancia" % utils.float2str(self.wids['sp_tolerancia'].get_value(), 0)
or "no cumple el %s%% de tolerancia" % utils.float2str(self.wids['sp_tolerancia'].get_value(), 0)
)
txt += " Desviación típica: %s\n" % (self.wids['e_desvtipica'].get_text())
txt += "\nResultados de las pruebas:\n"
model = self.wids['tv_pruebas'].get_model()
for fila in model:
txt += " %s\n" % (fila[0])
txt += " Título (dtex): %s\n" % (fila[1])
txt += " Alargamiento (%%): %s\n" % (fila[2])
txt += " Tenacidad (cN/tex): %s\n" % (fila[3])
txt += " Grasa (%%): %s\n" % (fila[4])
txt += " Encogimiento (%%): %s\n" % (fila[5])
txt += " Humedad (%%): %s\n" % (fila[6])
buffer = self.wids['txt_observaciones'].get_buffer()
txt += "\nObervaciones: %s\n" % buffer.get_text(buffer.get_start_iter(), buffer.get_end_iter())
informes.abrir_pdf(geninformes.texto_libre(txt, "Resultados de laboratorio: %s" % (self.objeto and self.objeto.codigo or "")))
if __name__=='__main__':
a = ResultadosFibra()
| gpl-2.0 | 6,911,447,172,993,764,000 | 44.227545 | 169 | 0.505197 | false |
kevana/corpscores | dci_notify/scraper/scraper.py | 1 | 6017 | #!/Users/kevanahlquist/Dropbox/dev/dci_notify/env/bin/python
'''
Monitor the dci.org website for new score postings.
'''
from __future__ import print_function
#Initialize Sentry before others, requires SENTRY_DSN environment variable
from raven import Client
client = Client()
# Imports
from bs4 import BeautifulSoup
from datetime import datetime
from email.mime.text import MIMEText
from requests.exceptions import ConnectionError
from socket import error as SocketError
import json
import os
import requests
import smtplib
import time
# Config directives
MAIL_SERVER = 'smtp.mailgun.org'
MAIL_PORT = 465
MAIL_USE_TLS = False
MAIL_USE_SSL = True
MAIL_USERNAME = os.environ.get('MAIL_USERNAME', '[email protected]')
MAIL_PASSWORD = os.environ.get('MAIL_PASSWORD', 'example_password')
MAIL_DEFAULT_SENDER = os.environ.get('MAIL_DEFAULT_SENDER', '[email protected]')
MAIL_SUPPRESS_SEND = os.environ.get('MAIL_SUPPRESS_SEND', False)
APP_SUPPRESS_POST = os.environ.get('APP_SUPPRESS_POST', False)
API_POST_URL = os.environ.get('API_POST_URL', 'http://example.com/events/') # 'http://corpscores.herokuapp.com/events/'
RECIPIENT = '[email protected]' # Emails message before sending to SMS.
# May be able to ignore basedir, make file wherever script is running
basedir = os.path.abspath(os.path.dirname(__file__))
OUTFILE = os.path.join(basedir, 'lastscrape.txt')
API_KEY = os.environ.get('API_KEY', 'API_KEY')
# JSONify dates in ISO 8601 format
dthandler = lambda obj: (
obj.isoformat()
if isinstance(obj, datetime)
else json.JSONEncoder().default(obj))
def eqIn(item, iterable):
'''Quick in operator to test for equality instead of identity'''
for thing in iterable:
if item == thing:
return True
return False
def send_email(text):
'''Send the raw event to an admin.'''
msg = MIMEText(text)
msg['Subject'] = 'New Event posted on DCI.org'
msg['From'] = MAIL_DEFAULT_SENDER
msg['To'] = RECIPIENT
if not MAIL_SUPPRESS_SEND:
s = smtplib.SMTP(MAIL_SERVER)
s.login(MAIL_USERNAME, MAIL_PASSWORD)
s.sendmail(MAIL_DEFAULT_SENDER, [RECIPIENT], msg.as_string())
def post_to_app(text):
"""Post event to app, text is a string containing a json object."""
headers = {'Content-type': 'application/json',
'Accept': 'application/json'}
r = requests.post(API_POST_URL, data=text, headers=headers)
if r.status_code != 200:
raise IOError('Unable to post event to app: %s' % text)
def process_event(event):
'''Retrieve, parse, and send the scores for the given event UUID.'''
params = {'event': event}
try:
r = requests.get('http://www.dci.org/scores/index.cfm', params=params)
except (SocketError, ConnectionError):
return
if r.status_code != 200:
return
# Get coarse info out of page
soup = BeautifulSoup(r.text)
scoresTable = (soup.find_all('table')[5].
find_all('table')[1])
infoHeader = (soup.find_all('table')[5].
find('h3'))
infoList = list(infoHeader.strings)
# Build a new event structure
thisEvent = {}
thisEvent['date'] = datetime.strptime(infoList[0], '%A, %B %d, %Y')
thisEvent['name'] = infoList[2]
loc = infoList[1].rsplit(' ', 1)
thisEvent['city'] = loc[0].rstrip(',\n\r\t ')
thisEvent['state'] = loc[1]
# Parse scores
rows = scoresTable.findChildren('tr')[2:-2]
eventResults = []
for row in rows:
columns = row.findChildren('td')
cleanColumns = [col.text.strip() for col in columns]
if len(cleanColumns) < 3:
break # Some events have Exhibition/International class labels
result = {}
result['place'] = cleanColumns[0]
result['corps'] = cleanColumns[1]
result['score'] = cleanColumns[2]
eventResults.append(result)
thisEvent['results'] = eventResults
thisEvent['api_key'] = API_KEY
event_text = json.dumps(thisEvent,
sort_keys=True,
indent=2,
default=dthandler)
#send_email(event_text)
add_processed_event(event)
if not APP_SUPPRESS_POST:
post_to_app(event_text)
def set_processed_events(events):
'''Write all processed events out to persistent storage.'''
with open(OUTFILE, 'w') as f:
f.writelines('%s\n' % event for event in events)
def get_processed_events():
'''Retrieve all processed events from persistent storage.'''
try:
with open(OUTFILE, 'r') as f:
ret = f.readlines()
ret = [item.strip() for item in ret]
except IOError:
ret = []
return ret
def add_processed_event(event):
'''Add a single new event to the processed events collection.'''
events = get_processed_events()
if event not in events:
events += event
set_processed_events(events)
def scrape_func():
'''Entry method when script is run.
Download scores page to obtain list of event UUIDs, compare to previously
processed events, process any new events.
'''
try:
# Base /scores URL redirects to the most recent score data
r = requests.get('http://www.dci.org/scores', allow_redirects=True)
except (SocketError, ConnectionError):
return
if r.status_code != 200:
return
soup = BeautifulSoup(r.text)
try:
options = soup.find('select').findChildren()
except AttributeError:
return None
current_events = [opt['value'] for opt in options]
last_processed_events = get_processed_events()
diff = [item for item in current_events if not eqIn(item,
last_processed_events)]
if diff:
for event in diff:
process_event(event)
if __name__ == '__main__':
while True:
try:
scrape_func()
except Exception as e:
print(e)
time.sleep(60)
| bsd-3-clause | 4,312,084,939,726,736,000 | 30.502618 | 119 | 0.633206 | false |
wavesaudio/instl | pybatch/MacOnlyBatchCommands.py | 1 | 12457 | import os
import shutil
from pathlib import Path
from typing import List
from configVar import config_vars
import logging
from utils import dock_util
from .subprocessBatchCommands import ShellCommand
log = logging.getLogger(__name__)
from .baseClasses import PythonBatchCommandBase
from .removeBatchCommands import RmDir, RmFile, RmFileOrDir
import utils
class MacDock(PythonBatchCommandBase):
""" Change Dock items (Mac only)
If 'path_to_item' is not None item will be added to the dock labeled 'label_for_item'
or removed if remove==True
Dock will restarted if restart_the_doc==True
"""
def __init__(self, path_to_item=None, label_for_item=None, restart_the_doc=False, remove=False, username=None, **kwargs) -> None:
super().__init__(**kwargs)
self.path_to_item = path_to_item
self.label_for_item = label_for_item
self.restart_the_doc = restart_the_doc
self.remove = remove
self.username = username # for testing purposes, during run we should have this info from config_vars
def repr_own_args(self, all_args: List[str]) -> None:
all_args.append(self.optional_named__init__param('path_to_item', self.path_to_item))
all_args.append(self.optional_named__init__param('label_for_item', self.label_for_item))
all_args.append(self.optional_named__init__param('username', self.username))
all_args.append(self.optional_named__init__param('restart_the_doc', self.restart_the_doc, False))
all_args.append(self.optional_named__init__param('remove', self.remove, False))
def progress_msg_self(self) -> str:
return f"""{self.__class__.__name__} setting '{self.path_to_item}' """
def __call__(self, *args, **kwargs) -> None:
PythonBatchCommandBase.__call__(self, *args, **kwargs)
home_dir = str(config_vars['HOME_DIR']) if 'HOME_DIR' in config_vars else "~"
username = str(config_vars['ACTING_UNAME']) if 'ACTING_UNAME' in config_vars else self.username
dock_bundle = 'com.apple.dock'
plist_buddy_path = "/usr/libexec/PlistBuddy"
mac_dock_path = f"{home_dir}/Library/Preferences/com.apple.dock.plist"
if self.restart_the_doc:
dock_cmd = "killall Dock"
else:
dock_cmd = ''
if self.remove:
app_name = self.label_for_item or Path(self.path_to_item).name.split(".")[0]
get_records_number = f"awk '/{app_name}/" + " {print NR-1}'"
dock_cmd = f''' {plist_buddy_path} -c "Delete persistent-apps:`sudo -u {username} defaults read {dock_bundle} persistent-apps | grep file-label |''' + \
get_records_number + \
f'''`" {mac_dock_path} ; ''' + \
dock_cmd
elif self.path_to_item:
plist_template = f'''"<dict><key>tile-data</key><dict><key>file-data</key><dict><key>_CFURLString</key>
<string>{self.path_to_item}</string><key>_CFURLStringType</key>
<integer>0</integer></dict></dict></dict>"'''
dock_cmd = f'''sudo -u {username} defaults write {dock_bundle} persistent-apps -array-add {plist_template} ; {dock_cmd}'''
log.info(dock_cmd)
with ShellCommand(dock_cmd, report_own_progress=False, stderr_means_err=False) as shell_cmd_macdoc:
shell_cmd_macdoc()
class CreateSymlink(PythonBatchCommandBase):
""" create a symbolic link (MacOS only)"""
def __init__(self, path_to_symlink: os.PathLike, path_to_target: os.PathLike, **kwargs) -> None:
super().__init__(**kwargs)
self.path_to_symlink = path_to_symlink
self.path_to_target = path_to_target
def repr_own_args(self, all_args: List[str]) -> None:
all_args.append(self.unnamed__init__param(self.path_to_symlink))
all_args.append(self.unnamed__init__param(self.path_to_target))
def progress_msg_self(self) -> str:
return f"""Create symlink '{self.path_to_symlink}' to '{self.path_to_target}'"""
def __call__(self, *args, **kwargs) -> None:
PythonBatchCommandBase.__call__(self, *args, **kwargs)
path_to_target = utils.ExpandAndResolvePath(self.path_to_target)
path_to_symlink = Path(os.path.expandvars(self.path_to_symlink))
self.doing = f"""create symlink '{path_to_symlink}' to target '{path_to_target}'"""
with RmFile(path_to_symlink, report_own_progress=False, resolve_path=False) as rf:
rf()
path_to_symlink.symlink_to(path_to_target)
class RmSymlink(PythonBatchCommandBase):
"""remove a symlink not it's target
- It's OK is the symlink or the target does not exist
- but exception will be raised if path is a folder
(MacOS only)
"""
def __init__(self, path: os.PathLike, **kwargs) -> None:
super().__init__(**kwargs)
self.path: os.PathLike = path
self.exceptions_to_ignore.append(FileNotFoundError)
def repr_own_args(self, all_args: List[str]) -> None:
all_args.append(self.unnamed__init__param(self.path))
def progress_msg_self(self):
return f"""Remove symlink '{self.path}'"""
def __call__(self, *args, **kwargs):
PythonBatchCommandBase.__call__(self, *args, **kwargs)
expanded_path = os.path.expandvars(self.path)
unresolved_path = Path(expanded_path)
self.doing = f"""removing symlink '{unresolved_path}'"""
if unresolved_path.is_symlink():
with RmFile(unresolved_path, report_own_progress=False, resolve_path=False) as rf:
rf()
elif unresolved_path.exists():
log.warning(f"RmSymlink, not a symlink: {unresolved_path}")
else:
log.warning(f"RmSymlink, not found: {unresolved_path}")
class SymlinkToSymlinkFile(PythonBatchCommandBase):
""" replace a symlink with a file with te same name + the extension '.symlink'
the '.symlink' will contain the text of the target of the symlink.
This will allow uploading symlinks to cloud storage does not support symlinks
(MacOS only)
"""
def __init__(self, symlink_to_convert: os.PathLike, **kwargs) -> None:
super().__init__(**kwargs)
self.symlink_to_convert = Path(symlink_to_convert)
def repr_own_args(self, all_args: List[str]) -> None:
all_args.append(self.unnamed__init__param(self.symlink_to_convert))
def progress_msg_self(self) -> str:
return f"""Convert symlink file '{self.symlink_to_convert}'"""
def __call__(self, *args, **kwargs) -> None:
PythonBatchCommandBase.__call__(self, *args, **kwargs)
symlink_to_convert = Path(os.path.expandvars(self.symlink_to_convert))
self.doing = f"""convert real symlink '{symlink_to_convert}' to .symlink file"""
if symlink_to_convert.is_symlink():
link_value = os.readlink(symlink_to_convert)
symlink_text_path = symlink_to_convert.with_name(f"{symlink_to_convert.name}.symlink")
symlink_text_path.write_text(link_value)
with RmFile(symlink_to_convert, report_own_progress=False, resolve_path=False) as rf:
rf()
class SymlinkFileToSymlink(PythonBatchCommandBase):
""" replace a file with extension '.symlink' to a real symlink.
the '.symlink' should contain the text of the target of the symlink. And was created with SymlinkToSymlinkFile.
This will allow uploading symlinks to cloud storage does not support symlinks
(MacOS only)
"""
def __init__(self, symlink_file_to_convert: os.PathLike, **kwargs) -> None:
super().__init__(**kwargs)
self.symlink_file_to_convert = os.fspath(symlink_file_to_convert)
def repr_own_args(self, all_args: List[str]) -> None:
all_args.append(self.unnamed__init__param(self.symlink_file_to_convert))
def progress_msg_self(self) -> str:
return f"""Resolve symlink '{self.symlink_file_to_convert}'"""
def __call__(self, *args, **kwargs) -> None:
PythonBatchCommandBase.__call__(self, *args, **kwargs)
symlink_file_to_convert = utils.ExpandAndResolvePath(self.symlink_file_to_convert)
symlink_target = symlink_file_to_convert.read_text()
self.doing = f"""convert symlink file '{symlink_file_to_convert}' to real symlink to target '{symlink_target}'"""
symlink = Path(symlink_file_to_convert.parent, symlink_file_to_convert.stem)
it_was = None
if symlink.is_symlink():
with RmFile(symlink, report_own_progress=False, resolve_path=False) as rf:
rf()
it_was = "symlink"
elif symlink.is_file():
with RmFile(symlink, report_own_progress=False, resolve_path=False) as rf:
rf()
it_was = "file"
elif symlink.is_dir():
with RmDir(symlink, report_own_progress=False) as rd:
rd()
it_was = "folder"
if symlink.exists():
raise IsADirectoryError(f"{it_was} '{symlink}' a was found and could not be removed")
symlink.symlink_to(symlink_target)
symlink_file_to_convert.unlink()
class CreateSymlinkFilesInFolder(PythonBatchCommandBase):
""" replace a symlink with a file with te same name + the extension '.symlink'
the '.symlink' will contain the text of the target of the symlink.
This will allow uploading symlinks to cloud storage does not support symlinks
(MacOS only)
"""
def __init__(self, folder_to_convert: os.PathLike, **kwargs) -> None:
super().__init__(**kwargs)
self.folder_to_convert = Path(folder_to_convert)
self.last_symlink_file = None
def repr_own_args(self, all_args: List[str]) -> None:
all_args.append(self.unnamed__init__param(self.folder_to_convert))
def progress_msg_self(self) -> str:
return f"""Create symlinks files in '{self.folder_to_convert}'"""
def __call__(self, *args, **kwargs) -> None:
self.doing = f"""convert real symlinks in '{self.folder_to_convert}' to .symlink files"""
PythonBatchCommandBase.__call__(self, *args, **kwargs)
resolved_folder_to_convert = utils.ExpandAndResolvePath(self.folder_to_convert)
for root, dirs, files in os.walk(resolved_folder_to_convert, followlinks=False):
for item in files + dirs:
item_path = Path(root, item)
if item_path.is_symlink():
try:
self.last_symlink_file = item_path
with SymlinkToSymlinkFile(item_path, own_progress_count=0) as symlink_converter:
self.doing = f"""convert symlink '{item_path}' to .symlink file"""
symlink_converter()
except:
log.warning(f"failed to convert {item_path}")
class ResolveSymlinkFilesInFolder(PythonBatchCommandBase):
""" replace a symlink with a file with te same name + the extension '.symlink'
the '.symlink' will contain the text of the target of the symlink.
This will allow uploading symlinks to cloud storage does not support symlinks
(MacOS only)
"""
def __init__(self, folder_to_convert: os.PathLike, **kwargs) -> None:
super().__init__(**kwargs)
self.folder_to_convert = folder_to_convert
self.last_symlink_file = None
def repr_own_args(self, all_args: List[str]) -> None:
all_args.append(self.unnamed__init__param(self.folder_to_convert))
def progress_msg_self(self) -> str:
return f"""Resolve symlinks in '{self.folder_to_convert}'"""
def __call__(self, *args, **kwargs) -> None:
PythonBatchCommandBase.__call__(self, *args, **kwargs)
resolved_folder_to_convert = utils.ExpandAndResolvePath(self.folder_to_convert)
for root, dirs, files in os.walk(resolved_folder_to_convert, followlinks=False):
for item in files:
item_path = Path(root, item)
if item_path.suffix == ".symlink":
self.last_symlink_file = os.fspath(item_path)
self.doing = f"""resolve symlink file '{self.last_symlink_file}'"""
with SymlinkFileToSymlink(item_path, own_progress_count=0) as symlink_converter:
symlink_converter()
| bsd-3-clause | 7,787,313,575,806,903,000 | 45.481343 | 164 | 0.622702 | false |
OscarES/Differential-Algebra-Tracker | diffWithNpyAndDstFiles.py | 1 | 2191 | import numpy as np
from IOHandler import loadMultipart
##### Diff between particle data, data should be stored as x column xp column y column yp
firstdata = raw_input('Enter first multipart datafile name:')
seconddata = raw_input('Enter second multipart datafile name:')
firstmultipart = loadMultipart(firstdata)
secondmultipart = loadMultipart(seconddata)
xf = [firstmultipart[i][0][0] for i in xrange(len(firstmultipart))]
xpf = [firstmultipart[i][0][1] for i in xrange(len(firstmultipart))]
yf = [firstmultipart[i][0][2] for i in xrange(len(firstmultipart))]
ypf = [firstmultipart[i][0][3] for i in xrange(len(firstmultipart))]
zf = [firstmultipart[i][0][4] for i in xrange(len(firstmultipart))]
zpf = [firstmultipart[i][0][5] for i in xrange(len(firstmultipart))]
xs = [secondmultipart[i][0][0] for i in xrange(len(secondmultipart))]
xps = [secondmultipart[i][0][1] for i in xrange(len(secondmultipart))]
ys = [secondmultipart[i][0][2] for i in xrange(len(secondmultipart))]
yps = [secondmultipart[i][0][3] for i in xrange(len(secondmultipart))]
zs = [secondmultipart[i][0][4] for i in xrange(len(secondmultipart))]
zps = [secondmultipart[i][0][5] for i in xrange(len(secondmultipart))]
diffx = np.array(xf) - np.array(xs)
diffxp = np.array(xpf) - np.array(xps)
diffy = np.array(yf) - np.array(ys)
diffyp = np.array(ypf) - np.array(yps)
diffz = np.array(zf) - np.array(zs)
diffzp = np.array(zpf) - np.array(zps)
diffx = diffx.astype('float')
diffxp = diffxp.astype('float')
diffy = diffy.astype('float')
diffyp = diffyp.astype('float')
diffz = diffz.astype('float')
diffzp = diffzp.astype('float')
stdx = np.std(diffx)
stdxp = np.std(diffxp)
stdy = np.std(diffy)
stdyp = np.std(diffyp)
stdz = np.std(diffz)
stdzp = np.std(diffzp)
print 'stdx:',stdx
print 'stdxp:',stdxp
print 'stdy:',stdy
print 'stdyp:',stdyp
print 'stdz:',stdz
print 'stdzp:',stdzp
# std for xin,xpin,yin,ypin
#print 'Initial beam std (when firstsdata is the init while and not results...)'
#print 'stdx:',np.std(firstx)
#print 'stdxp:',np.std(firstxp)
#print 'stdy:',np.std(firsty)
#print 'stdyp:',np.std(firstyp)
## TODO:
#1: make the program work by calling something like: python diff.py out.txt out2.txt | gpl-3.0 | -2,406,106,296,024,561,700 | 34.934426 | 89 | 0.715199 | false |
coopie/huzzer | huzzer/expressions.py | 1 | 5250 | from . import BOOL, INT, types
class Expression:
"""
A representation of an expression.
## Methods:
* stringify: function that takes a `namer` as an argument. This function renders the expression as text.
Expressions can be empty or parameterized. An empty expression when called creates a new expression
with the arguments of the invocation.
Expressions with arguments cannot be called.
"""
def __init__(self, type_signiature, stringify_func, args=[]):
assert stringify_func is not None, 'expression must have a way of representing itself as a string'
self.type_signiature = type_signiature
self.stringify_func = stringify_func
self.args = args
def __call__(self, *args):
assert len(self.args) == 0
assert len(args) == len(self.type_signiature[1:])
return Expression(self.type_signiature, self.stringify_func, args)
def stringify(self, namer):
return self.stringify_func(namer, self.args)
class FunctionExpression(Expression):
def __init__(self, type_signiature, function_id, args=[]):
self.type_signiature = type_signiature
self.function_id = function_id
self.args = args
def stringify_func(namer, args):
if len(args) != 0:
args_strings = ' ' + ' '.join([x.stringify(namer) for x in args])
return '({0}{1})'.format(namer.name_function(self), args_strings)
else:
return namer.name_function(self)
self.stringify_func = stringify_func
def __call__(self, *args):
assert len(self.args) == 0
assert len(args) == len(self.type_signiature[1:])
return FunctionExpression(self.type_signiature, self.function_id, args)
class VariableExpression(Expression):
def __init__(self, type_signiature, var_id):
assert type_signiature in types
self.type_signiature = type_signiature
self.var_id = var_id
def stringify(self, namer):
return namer.name_variable(self)
def __call__(self, *args):
raise TypeError('VariableExpression should not be called as it can never be an empty expression')
def stringify_binary_function(function_name):
def stringify_expr(namer, args):
assert len(args) == 2
a, b = [x.stringify(namer) for x in args]
template = '({0} {1} {2})'
return template.format(function_name, a, b)
return stringify_expr
def stringify_infix_function(function_name):
def stringify_expr(namer, args):
assert len(args) == 2
a, b = [x.stringify(namer) for x in args]
return '({1} {0} {2})'.format(function_name, a, b)
return stringify_expr
def stringify_unary_function(function_string):
def stringify_expr(namer, args):
assert len(args) == 1
return function_string.format(args[0].stringify(namer))
return stringify_expr
def type_of_expression(expr):
return expr.type_signiature[-1]
def make_binary_expr(type_signiature, stringify_func):
return Expression(type_signiature, stringify_func)
# empty expressions used for expression generation
div_expr = Expression((INT, INT, INT), stringify_binary_function('div'))
mod_expr = Expression((INT, INT, INT), stringify_binary_function('mod'))
max_expr = Expression((INT, INT, INT), stringify_binary_function('max'))
min_expr = Expression((INT, INT, INT), stringify_binary_function('min'))
plus_expr = Expression((INT, INT, INT), stringify_infix_function('+'))
minus_expr = Expression((INT, INT, INT), stringify_infix_function('-'))
mul_expr = Expression((INT, INT, INT), stringify_infix_function('*'))
eq_expr = Expression((INT, INT, BOOL), stringify_infix_function('=='))
neq_expr = Expression((INT, INT, BOOL), stringify_infix_function('/='))
gt_expr = Expression((INT, INT, BOOL), stringify_infix_function('>'))
gte_expr = Expression((INT, INT, BOOL), stringify_infix_function('>='))
lt_expr = Expression((INT, INT, BOOL), stringify_infix_function('<'))
lte_expr = Expression((INT, INT, BOOL), stringify_infix_function('<='))
or_expr = Expression((BOOL, BOOL, BOOL), stringify_infix_function('||'))
and_expr = Expression((BOOL, BOOL, BOOL), stringify_infix_function('&&'))
not_expr = Expression((BOOL, BOOL), stringify_unary_function('(not {})'))
fromEnum_expr = Expression((BOOL, INT), stringify_unary_function('(fromEnum {})'))
All_BRANCH_EXPRESSIONS = [
div_expr,
mod_expr,
max_expr,
min_expr,
plus_expr,
minus_expr,
mul_expr,
eq_expr,
neq_expr,
gt_expr,
gte_expr,
lt_expr,
lte_expr,
or_expr,
and_expr,
or_expr,
not_expr,
fromEnum_expr
]
BRANCH_EXPRESSIONS = {}
for haskell_type in types:
expressions_of_type = [x for x in All_BRANCH_EXPRESSIONS if type_of_expression(x) == haskell_type]
BRANCH_EXPRESSIONS[haskell_type] = expressions_of_type
def stringify_literal(namer, args):
assert len(args) == 1
return str(args[0])
# these are treated like unary expressions, which take an x and return an x
int_literal = Expression((INT, INT), stringify_literal)
bool_literal = Expression((BOOL, BOOL), stringify_literal)
LITERAL_EXPRESSIONS = {
INT: int_literal,
BOOL: bool_literal
}
| mit | 8,976,415,102,576,479,000 | 31.8125 | 108 | 0.659238 | false |
durandj/botman | tests/commands/test_command_decorators.py | 1 | 4519 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Tests for the command decorators
"""
import unittest
import unittest.mock
import asynctest
import botman.commands.base
import botman.errors
import tests.mixins
@asynctest.fail_on(unused_loop=False)
class TestChatCommandDecorator(tests.mixins.DiscordMockMixin, asynctest.TestCase):
"""
Tests for the chat_command decorator
"""
def test_not_callable(self):
"""
Tests that we can't decorate a non-callable object
"""
expected = 'Cannot use a non-callable as a command'
with self.assertRaises(botman.errors.ConfigurationError, msg=expected):
botman.commands.base.chat_command('test')({})
def test_not_coroutine(self):
"""
Tests that we can't decorate a non-coroutine function
"""
mock_handler = unittest.mock.Mock(__name__='test')
expected = 'Cannot use a non-coroutine as a command'
with self.assertRaises(botman.errors.ConfigurationError, msg=expected):
botman.commands.base.chat_command('test')(mock_handler)
def test_decorator_returns_command(self):
"""
Tests that the decorator returns a Command instance
"""
mock_handler = asynctest.CoroutineMock(__name__='test')
wrapped = botman.commands.base.chat_command('test')(mock_handler)
self.assertIsInstance(
wrapped,
botman.commands.base.Command,
'The function became a command instance',
)
def test_wrapper_has_name(self):
"""
Tests that the decorator adds the correct name
"""
mock_handler = asynctest.CoroutineMock(__name__='test')
wrapped = botman.commands.base.chat_command('test')(mock_handler)
self.assertEqual(
'test',
wrapped.name,
'The command had the correct name',
)
async def test_wrapper_calls_handler(self):
"""
Tests that the Command instance calls the handler
"""
mock_handler = asynctest.CoroutineMock(__name__='test')
wrapped = botman.commands.base.chat_command('test')(mock_handler)
message = self.get_mock_message('test')
wrapped.pattern = 'test'
mock_bot = self.get_mock_bot()
await wrapped(mock_bot, message, '')
mock_handler.assert_called_with(mock_bot, message)
class TestDescriptionDecorator(unittest.TestCase):
"""
Tests for the description deorator
"""
def test_decorator_non_command(self):
"""
Tests that the decorator only works on command instances
"""
mock_handler = asynctest.CoroutineMock(__name__='test')
expected = 'test must have the chat_command decorator'
with self.assertRaises(botman.errors.ConfigurationError, msg=expected):
botman.commands.base.description('Descriptive')(mock_handler)
def test_sets_description(self):
"""
Tests that the decorator actually sets the description
"""
mock_handler = asynctest.CoroutineMock(__name__='test')
wrapped = botman.commands.base.chat_command('test')(mock_handler)
wrapped = botman.commands.base.description('Descriptive')(wrapped)
self.assertEqual(
'Descriptive',
wrapped.description,
'The description was set',
)
class TestParametersDecorator(unittest.TestCase):
"""
Tests for the parameters decorator
"""
def test_decorator_non_command(self):
"""
Tests that the decorator only works on command instances
"""
mock_handler = asynctest.CoroutineMock(__name__='test')
expected = 'test must have the chat_command decorator'
with self.assertRaises(botman.errors.ConfigurationError, msg=expected):
botman.commands.base.parameters()(mock_handler)
def test_sets_parameters(self):
"""
Tests that the decorator actually sets the parameters
"""
params = {
'one': botman.commands.base.StringArg(),
'two': botman.commands.base.StringArg(),
}
mock_handler = asynctest.CoroutineMock(__name__='test')
wrapped = botman.commands.base.chat_command('test')(mock_handler)
wrapped = botman.commands.base.parameters(**params)(wrapped)
self.assertDictEqual(
params,
wrapped.parameters,
'The parameters were set',
)
| mit | 1,011,528,141,740,947,200 | 28.154839 | 82 | 0.622925 | false |
rajalokan/keystone | doc/source/conf.py | 1 | 9381 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# keystone documentation build configuration file, created by
# sphinx-quickstart on Mon Jan 9 12:02:59 2012.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import subprocess
import sys
import warnings
# NOTE(dstanek): adds _ to the builtins so keystone modules can be imported
__builtins__['_'] = str
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../../'))
sys.path.insert(0, os.path.abspath('../'))
sys.path.insert(0, os.path.abspath('./'))
# -- General configuration ----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
'oslo_config.sphinxconfiggen',
'oslo_policy.sphinxpolicygen',
'oslosphinx',
'ext.support_matrix',
]
config_generator_config_file = '../../config-generator/keystone.conf'
sample_config_basename = '_static/keystone'
policy_generator_config_file = (
'../../config-generator/keystone-policy-generator.conf'
)
sample_policy_basename = '_static/keystone'
todo_include_todos = True
# Add any paths that contain templates here, relative to this directory.
# if os.getenv('HUDSON_PUBLISH_DOCS'):
# templates_path = ['_ga', '_templates']
# else:
# templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'keystone'
copyright = u'2012, OpenStack Foundation'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['old']
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
show_authors = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
modindex_common_prefix = ['keystone.']
# -- Options for man page output --------------------------------------------
# Grouping the document tree for man pages.
# List of tuples 'sourcefile', 'target', u'title', u'Authors name', 'manual'
man_pages = [
('man/keystone-manage', 'keystone-manage', u'Keystone Management Utility',
[u'OpenStack'], 1)
]
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
# html_theme_path = ["."]
# html_theme = '_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
git_cmd = ["git", "log", "--pretty=format:'%ad, commit %h'", "--date=local",
"-n1"]
try:
html_last_updated_fmt = subprocess.check_output(
git_cmd).decode('utf-8')
except Exception:
warnings.warn('Cannot get last updated time from git repository. '
'Not setting "html_last_updated_fmt".')
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'keystonedoc'
# -- Options for LaTeX output -------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples (source
# start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index', 'keystone.tex', u'Keystone Documentation',
u'OpenStack', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for Texinfo output -----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'keystone', u'Keystone Documentation',
u'OpenStack', 'keystone', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# Example configuration for intersphinx: refer to the Python standard library.
# intersphinx_mapping = {'http://docs.python.org/': None}
| apache-2.0 | -6,700,539,261,785,440,000 | 31.686411 | 79 | 0.690012 | false |
simonpessemesse/seguinus | easyPoS/Gui/Taches.py | 1 | 3919 | from PyQt4 import QtCore, QtGui
from kronos import ThreadedScheduler
import FenetreTache
import taches.views as taaaches
from datetime import datetime, timedelta, date
import preferences
import webbrowser
from chambres.models import joliePeriode
from easyPoS.models import DonneesEntreprise, LigneFacture, PreparationFacture, Produit, LogFacture
from chambres.models import Client
from chambres.models import Reservation, TourOperateur
import traceback
from PyQt4.QtGui import QIcon
from PyQt4.QtCore import QThread, SIGNAL
from easyPoS.models import Facture
import time
import EditionFacture
import sys
import logging
class GuetteNouvelles(QThread):
def __init__(self, parent=None):
QThread.__init__(self, parent)
self.minuteur = None
def run(self):
if self.minuteur:
self.minuteur.stop()
self.minuteur = ThreadedScheduler()
today = date.today()
executions = taaaches.taches(today)
for t in executions:
if (t.rappel):
rap = str(t.rappel).split(":")
diff = datetime(today.year, today.month, today.day, int(rap[0]), int(rap[1])) - datetime.now()
if (abs(diff) == diff):
print(t, " pour ", str(t.rappel))
self.minuteur.add_single_task(self.montreFenetre, "test action 1",
diff.days * 24 * 60 * 60 + diff.seconds, "threaded", [t.id], None)
else:
pass
self.minuteur.add_single_task(self.run, "malin le fauve", 600, "threaded", [], None)
self.minuteur.start()
def montreFenetre(self, id):
print("montre", id)
self.emit(SIGNAL("showTache(int)"), id)
class TachesListe(QtGui.QMainWindow):
def nettoie(self):
toDelete = []
for key, item in self.facturettes.items():
if not item.isVisible():
item.stop()
item.deleteLater()
toDelete.append(key)
for i in toDelete:
del self.facturettes[i]
def montreFenetre(self, plop):
id = plop
if id in self.facturettes:
self.facturettes[id].setFocus()
self.facturettes[id].setWindowState(QtCore.Qt.WindowActive)
self.facturettes[id].activateWindow()
self.facturettes[id].show()
else:
self.facturettes[id] = FenetreTache.EditTache(tache=id, parent=self)
self.facturettes[id].show()
def nouvo(self, idFact=0):
self.montreFenetre(idFact)
return
if idFact < 1:
facture = Facture(etat='B')
c = Client()
c.save()
facture.client = c
facture.save()
self.listePrincipaleModel.ajoute(facture)
else:
facture = Facture.objects.get(pk=idFact)
self.listePrincipaleModel.ajoute(facture)
def montreTache(self):
today = date.today()
executions = taaaches.taches(today)
for t in executions:
if (t.rappel):
rap = str(t.rappel).split(":")
diff = datetime(today.year, today.month, today.day, int(rap[0]), int(rap[1])) - datetime.now()
if (abs(diff) == diff) and t.date == date.today():
pass
else:
if not t.estPeriodique():
self.montreFenetre(t.id)
def __init__(self):
QtGui.QMainWindow.__init__(self)
self.minuteur = None
self.guetteNouvelles = GuetteNouvelles()
self.facturettes = {}
# self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
self.resize(600, 500)
self.setWindowTitle('Taches')
QtCore.QObject.connect(self.guetteNouvelles, SIGNAL("showTache(int)"), self.nouvo)
self.guetteNouvelles.start()
self.montreTache()
| gpl-2.0 | 8,621,369,324,232,689,000 | 33.078261 | 116 | 0.586374 | false |
asterix24/GestionaleCaldaie | gestionale/test/settings.py | 1 | 5158 | # Django settings for gestionale project.
from local_settings import *
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = ''
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'it-IT'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = False
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Session and login settings
LOGIN_URL = '/login/'
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
# Idle time before the session expire in seconds.
SESSION_IDLE_TIMEOUT = 3600
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'main.middleware.LoginRequiredMiddleware',
'main.middleware.SessionIdleTimeout',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'gestionale.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'gestionale.' + APP_PREFIX_NAME + '.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
TEMPLATE_CONTEXT_PROCESSORS = (
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.static",
"django.core.context_processors.tz",
"django.contrib.messages.context_processors.messages",
"gestionale.local_env",
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django_extensions',
'main',
'south',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
#'django.contrib.admindocs',
)
import sys
if DEBUG:
import logging
logging.basicConfig(level=logging.INFO, format='[%(asctime)s] %(message)s', datefmt='%d/%b/%Y %H:%M:%S',
filename=LOCAL_LOG_PATH + "gestionale.log")
logging.getLogger('main.data_render').setLevel(logging.INFO)
logging.getLogger('main.database_manager').setLevel(logging.INFO)
logging.getLogger('main.myfomrs').setLevel(logging.INFO)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
| gpl-2.0 | 7,529,562,840,878,033,000 | 34.328767 | 108 | 0.73575 | false |
karan259/GrovePi | Software/Python/firmware_tests/multi_led_blink.py | 1 | 2380 | #!/usr/bin/env python
# GrovePi LED blink test for the Grove LED Socket (http://www.seeedstudio.com/wiki/Grove_-_LED_Socket_Kit)
#
# The GrovePi connects the Raspberry Pi and Grove sensors. You can learn more about GrovePi here: http://www.dexterindustries.com/GrovePi
#
# Have a question about this example? Ask on the forums here: http://www.dexterindustries.com/forum/?forum=grovepi
#
'''
## License
The MIT License (MIT)
GrovePi for the Raspberry Pi: an open source platform for connecting Grove Sensors to the Raspberry Pi.
Copyright (C) 2015 Dexter Industries
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
import time
from grovepi import *
# Connect the Grove LED to digital port D4,D5,D6
led0 = 4
led1 = 5
led2 = 6
pinMode(led0,"OUTPUT")
pinMode(led1,"OUTPUT")
pinMode(led2,"OUTPUT")
while True:
try:
#Blink the LED
digitalWrite(led0,1) # Send HIGH to switch on LED
digitalWrite(led1,1) # Send HIGH to switch on LED
digitalWrite(led2,1) # Send HIGH to switch on LED
print ("LED ON!")
time.sleep(1)
digitalWrite(led0,0) # Send LOW to switch off LED
digitalWrite(led1,0) # Send LOW to switch off LED
digitalWrite(led2,0) # Send LOW to switch off LED
print ("LED OFF!")
time.sleep(1)
except IOError: # Print "Error" if communication error encountered
print ("Error")
| mit | -8,457,691,376,727,992,000 | 36.1875 | 139 | 0.734034 | false |
cburmeister/fluid | app/lib/partial_file.py | 1 | 1095 | """
A wrapper around Flask's send_file implementing the 206 partial protocol.
"""
from flask import Response, request, send_file
import mimetypes
import os
import re
def send(path):
"""Returns a file via the 206 partial protocol."""
range_header = request.headers.get('Range', None)
if not range_header:
return send_file(path) # Client must want the entire file
size = os.path.getsize(path)
start, end = 0, None
m = re.search('(\d+)-(\d*)', range_header)
g = m.groups()
if g[0]:
start = int(g[0])
if g[1]:
end = int(g[1])
length = min(size - start, 5120000)
if end is not None:
length = end - start
data = None
with open(path, 'rb') as f:
f.seek(start)
data = f.read(length)
mimetype, _ = mimetypes.guess_type(path)
rv = Response(data, 206, mimetype=mimetype, direct_passthrough=True)
rv.headers.add('Accept-Ranges', 'bytes')
rv.headers.add(
'Content-Range', 'bytes {0}-{1}/{2}'.format(
start, start + length - 1, size
)
)
return rv
| mit | 5,795,330,658,714,252,000 | 23.333333 | 73 | 0.591781 | false |
ardi69/pyload-0.4.10 | pyload/plugin/hoster/UloziskoSk.py | 1 | 2443 | # -*- coding: utf-8 -*-
import re
import urlparse
from pyload.plugin.internal.SimpleHoster import SimpleHoster
class UloziskoSk(SimpleHoster):
__name = "UloziskoSk"
__type = "hoster"
__version = "0.25"
__pattern = r'http://(?:www\.)?ulozisko\.sk/.+'
__config = [("use_premium", "bool", "Use premium account if available", True)]
__description = """Ulozisko.sk hoster plugin"""
__license = "GPLv3"
__authors = [("zoidberg", "[email protected]")]
NAME_PATTERN = r'<div class="down1">(?P<N>[^<]+)</div>'
SIZE_PATTERN = ur'Veľkosť súboru: <strong>(?P<S>[\d.,]+) (?P<U>[\w^_]+)</strong><br />'
OFFLINE_PATTERN = ur'<span class = "red">Zadaný súbor neexistuje z jedného z nasledujúcich dôvodov:</span>'
LINK_FREE_PATTERN = r'<form name = "formular" action = "(.+?)" method = "post">'
ID_PATTERN = r'<input type = "hidden" name = "id" value = "(.+?)" />'
CAPTCHA_PATTERN = r'<img src="(/obrazky/obrazky\.php\?fid=.+?)" alt="" />'
IMG_PATTERN = ur'<strong>PRE ZVÄČŠENIE KLIKNITE NA OBRÁZOK</strong><br /><a href = "(.+?)">'
def process(self, pyfile):
self.html = self.load(pyfile.url, decode=True)
self.getFileInfo()
m = re.search(self.IMG_PATTERN, self.html)
if m:
self.link = "http://ulozisko.sk" + m.group(1)
else:
self.handle_free(pyfile)
def handle_free(self, pyfile):
m = re.search(self.LINK_FREE_PATTERN, self.html)
if m is None:
self.error(_("LINK_FREE_PATTERN not found"))
parsed_url = 'http://www.ulozisko.sk' + m.group(1)
m = re.search(self.ID_PATTERN, self.html)
if m is None:
self.error(_("ID_PATTERN not found"))
id = m.group(1)
self.logDebug("URL:" + parsed_url + ' ID:' + id)
m = re.search(self.CAPTCHA_PATTERN, self.html)
if m is None:
self.error(_("CAPTCHA_PATTERN not found"))
captcha_url = urlparse.urljoin("http://www.ulozisko.sk", m.group(1))
captcha = self.decryptCaptcha(captcha_url, cookies=True)
self.logDebug("CAPTCHA_URL:" + captcha_url + ' CAPTCHA:' + captcha)
self.download(parsed_url,
post={"antispam": captcha,
"id" : id,
"name" : pyfile.name,
"but" : "++++STIAHNI+S%DABOR++++"})
| gpl-3.0 | -5,508,463,001,809,072,000 | 34.231884 | 111 | 0.546689 | false |
googleapis/googleapis-gen | google/apps/market/v2/hosted-marketplace-v2-py/ccc/hosted/marketplace_v2/services/license_notification_service/transports/grpc.py | 1 | 11550 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import grpc_helpers # type: ignore
from google.api_core import gapic_v1 # type: ignore
import google.auth # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from ccc.hosted.marketplace_v2.types import resources
from ccc.hosted.marketplace_v2.types import services
from .base import LicenseNotificationServiceTransport, DEFAULT_CLIENT_INFO
class LicenseNotificationServiceGrpcTransport(LicenseNotificationServiceTransport):
"""gRPC backend transport for LicenseNotificationService.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_stubs: Dict[str, Callable]
def __init__(self, *,
host: str = 'appsmarket.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Sequence[str] = None,
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
credentials=self._credentials,
credentials_file=credentials_file,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@classmethod
def create_channel(cls,
host: str = 'appsmarket.googleapis.com',
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
Raises:
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
return grpc_helpers.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs
)
@property
def grpc_channel(self) -> grpc.Channel:
"""Return the channel designed to connect to this service.
"""
return self._grpc_channel
@property
def list(self) -> Callable[
[services.LicenseNotificationListRequest],
resources.LicenseNotificationList]:
r"""Return a callable for the list method over gRPC.
Get a list of licensing notifications with regards to
a given app.
Returns:
Callable[[~.LicenseNotificationListRequest],
~.LicenseNotificationList]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if 'list' not in self._stubs:
self._stubs['list'] = self.grpc_channel.unary_unary(
'/ccc.hosted.marketplace.v2.LicenseNotificationService/List',
request_serializer=services.LicenseNotificationListRequest.serialize,
response_deserializer=resources.LicenseNotificationList.deserialize,
)
return self._stubs['list']
__all__ = (
'LicenseNotificationServiceGrpcTransport',
)
| apache-2.0 | -7,904,287,990,203,589,000 | 44.833333 | 87 | 0.61013 | false |
MartinThoma/hwrt | tests/create_ffiles_test.py | 1 | 4678 | #!/usr/bin/env python
# Core Library modules
import os
# First party modules
import hwrt.create_ffiles as create_ffiles
import hwrt.data_multiplication as data_multiplication
import hwrt.features as features
import hwrt.utils as utils
import tests.testhelper as th
def test_training_set_multiplication():
"""Test the create_ffiles.training_set_multiplication method."""
sample = th.get_symbol_as_handwriting(292934)
training_set = [
{
"id": 1337,
"is_in_testset": 0,
"formula_id": 42,
"handwriting": sample,
"formula_in_latex": "B",
}
]
mult_queue = [data_multiplication.Multiply()]
create_ffiles.training_set_multiplication(training_set, mult_queue)
def test_execution():
formula_id2index = {1337: 1, 12: 2}
feature_folder = "."
index2latex = {1: "\\alpha", 2: "\\beta"}
create_ffiles._create_index_formula_lookup(
formula_id2index, feature_folder, index2latex
)
def test_prepare_dataset():
"""Test create_ffiles.prepare_dataset."""
dataset = []
for i in range(200):
dataset.append(
{"handwriting": th.get_symbol_as_handwriting(97705), "formula_id": 42}
)
# dataset[-1]['handwriting'].formula_id = 42
formula_id2index = {}
formula_id2index[42] = 1
feature_list = [features.StrokeCount()]
is_traindata = False
create_ffiles.prepare_dataset(dataset, formula_id2index, feature_list, is_traindata)
def test_normalize_features_one():
"""Test create_ffiles._normalize_features with one point."""
feature_list = [features.Width(), features.Height()]
prepared = [([123], 1)]
is_traindata = True
out = create_ffiles._normalize_features(feature_list, prepared, is_traindata)
assert out == [([0.0], 1)]
def test_normalize_features_two():
"""Test create_ffiles._normalize_features with two points."""
feature_list = [features.Width(), features.Height()]
prepared = [([123], 1), ([100], 1)]
is_traindata = True
out = create_ffiles._normalize_features(feature_list, prepared, is_traindata)
# Mean: 111.5; Range: 23
assert out == [([0.5], 1), ([-0.5], 1)]
# Now the other set
prepared = [([111.5], 1), ([90], 1), ([180], 1)]
is_traindata = False
out = create_ffiles._normalize_features(feature_list, prepared, is_traindata)
assert out == [([0.0], 1), ([-0.93478260869565222], 1), ([2.9782608695652173], 1)]
def test_normalize_features_two_feats():
"""Test create_ffiles._normalize_features with two points."""
feature_list = [features.Width(), features.Height()]
prepared = [([123, 123], 1), ([100, 100], 1)]
is_traindata = True
out = create_ffiles._normalize_features(feature_list, prepared, is_traindata)
# Mean: 111.5; Range: 23
assert out == [([0.5, 0.5], 1), ([-0.5, -0.5], 1)]
# Now the other set
prepared = [([111.5, 111.5], 1), ([146, 146], 1), ([54, 54], 1)]
is_traindata = False
out = create_ffiles._normalize_features(feature_list, prepared, is_traindata)
assert out == [([0.0, 0.0], 1), ([1.5, 1.5], 1), ([-2.5, -2.5], 1)]
def test_normalize_features_two_feats2():
"""Test create_ffiles._normalize_features with two points."""
feature_list = [features.Width(), features.Height()]
prepared = [([123, 123], 1), ([100, 100], 1)]
is_traindata = True
out = create_ffiles._normalize_features(feature_list, prepared, is_traindata)
# Mean: 111.5; Range: 23
assert out == [([0.5, 0.5], 1), ([-0.5, -0.5], 1)]
# Now the other set
prepared = [([111.5, 146], 1), ([146, 111.5], 1), ([54, 54], 1)]
is_traindata = False
out = create_ffiles._normalize_features(feature_list, prepared, is_traindata)
assert out == [([0.0, 1.5], 1), ([1.5, 0.0], 1), ([-2.5, -2.5], 1)]
def test_normalize_features_two_classes():
"""Test create_ffiles._normalize_features with two classes."""
feature_list = [features.Width(), features.Height()]
prepared = [([123], 1), ([100], 1), ([500], 2)]
is_traindata = True
out = create_ffiles._normalize_features(feature_list, prepared, is_traindata)
# Mean: 241; Range: 400
assert out == [([-0.295], 1), ([-0.3525], 1), ([0.6475], 2)]
def test_create_translation_file():
"""Test create_ffiles._create_translation_file."""
feature_folder = os.path.join(
utils.get_project_root(), "feature-files", "small-baseline"
)
dataset_name = "testtestdata"
translation = [(133700, "\\alpha", 42)]
formula_id2index = {42: 1}
create_ffiles._create_translation_file(
feature_folder, dataset_name, translation, formula_id2index
)
| mit | -465,068,950,497,233,400 | 34.709924 | 88 | 0.618854 | false |
GiulianoFranchetto/zephyr | boards/xtensa/intel_s1000_crb/support/messenger.py | 1 | 8143 | #!/usr/bin/env python3
#
# Copyright (c) 2019 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0
#
# Author: Sathish Kuttan <[email protected]>
# This file defines a message class that contains functions to create
# commands to the target and to parse responses from the target.
import bitstruct
class Message:
"""
Message class containing the methods to create command messages and
parse response messages.
"""
message_id = {1: 'Control'}
cmd_rsp = {2: 'Load Firmware',
4: 'Mode Select',
0x10: 'Memory Read',
0x11: 'Memory Write',
0x12: 'Memory Block Write',
0x13: 'Execute',
0x14: 'Wait',
0x20: 'Ready'}
tx_data = None
tx_bulk_data = None
tx_index = 0
cmd_word_fmt = 'u1 u1 u1 u5 u16 u8'
cmd_keys = ['cmd', 'rsvd1', 'rsp', 'msg_id', 'rsvd2', 'cmd_rsp']
def __init__(self):
"""
Intialize a byte array of 64 bytes for command messages
Intialize another byte array of 4096 bytes for bulk messages
"""
self.tx_data = bytearray(64)
self.tx_bulk_data = bytearray(4096)
def init_tx_data(self):
"""
Intialize transmit message buffers to zeros
"""
for index in range(len(self.tx_data)):
self.tx_data[index] = 0
self.tx_index = 0
def endian_swap(self, dst, dst_offset, src):
"""
Performs a byte swap of a 32-bit word to change it's endianness
"""
for index in range(0, len(src), 4):
dst[dst_offset + index + 0] = src[index + 3]
dst[dst_offset + index + 1] = src[index + 2]
dst[dst_offset + index + 2] = src[index + 1]
dst[dst_offset + index + 3] = src[index + 0]
def print_cmd_message(self):
"""
Prints the contents of the command message buffer
"""
for index in range(0, self.tx_index, 4):
offset = index * 8
word = bitstruct.unpack_from('u32', self.tx_data, offset)
print('Index: %2d Content: 0x%08x' %(index, word[0]))
def print_response(self, msg, verbose = False):
"""
Parses and prints the contents of the response message
"""
unpacked = bitstruct.unpack_from_dict(self.cmd_word_fmt,
self.cmd_keys, msg)
msg_id = unpacked['msg_id']
rsp = unpacked['cmd_rsp']
if msg_id == 0 and rsp == 0:
print('RSP <<< NULL.')
else:
print('RSP <<< %s.' % self.cmd_rsp[rsp])
if verbose == True:
count = bitstruct.unpack_from('u32', msg, 4 * 8)[0]
count &= 0x1ff
for index in range(0, 8 + (count * 4), 4):
offset = index * 8
word = bitstruct.unpack_from('u32', msg, offset)
print('Index: %2d Content: 0x%08x' %(index, word[0]))
def get_cmd_code(self, cmd):
"""
Looks up the command and returns the numeric code
"""
index = list(self.cmd_rsp.values()).index(cmd)
return list(self.cmd_rsp.keys())[index]
def print_cmd_code(self, cmd):
"""
Prints the numeric code for the given command
"""
key = self.get_cmd_code(cmd)
print('CMD >>> %s. Command Code: 0x%02x' % (cmd, key))
def create_null_cmd(self):
"""
Creates a NULL command
"""
print('CMD >>> NULL.')
for index in range(len(self.tx_data)):
self.tx_data[index] = 0
self.tx_index = len(self.tx_data)
return self.tx_data
def create_memwrite_cmd(self, tuple):
"""
Creates a memory write command with memory address and value pairs
"""
cmd = 'Memory Write'
print('CMD >>> %s.' % cmd)
code = self.get_cmd_code(cmd)
self.init_tx_data()
index = list(self.message_id.values()).index('Control')
msg_id = list(self.message_id.keys())[index]
bitstruct.pack_into_dict(self.cmd_word_fmt, self.cmd_keys,
self.tx_data, 0, {'cmd': 1, 'rsvd1': 0, 'rsp': 0,
'msg_id': msg_id, 'rsvd2': 0, 'cmd_rsp': code})
self.tx_index += 4
bitstruct.pack_into('u32', self.tx_data, self.tx_index * 8,
len(tuple))
self.tx_index += 4
for index in range(len(tuple)):
bitstruct.pack_into('u32', self.tx_data, self.tx_index * 8,
tuple[index])
self.tx_index += 4
return self.tx_data
def create_memread_cmd(self, tuple):
"""
Creates a memory read command with memory addresses
"""
cmd = 'Memory Read'
print('CMD >>> %s.' % cmd)
code = self.get_cmd_code(cmd)
self.init_tx_data()
index = list(self.message_id.values()).index('Control')
msg_id = list(self.message_id.keys())[index]
bitstruct.pack_into_dict(self.cmd_word_fmt, self.cmd_keys,
self.tx_data, 0, {'cmd': 1, 'rsvd1': 0, 'rsp': 0,
'msg_id': msg_id, 'rsvd2': 0, 'cmd_rsp': code})
self.tx_index += 4
bitstruct.pack_into('u32', self.tx_data, self.tx_index * 8,
len(tuple))
self.tx_index += 4
for index in range(len(tuple)):
bitstruct.pack_into('u32', self.tx_data, self.tx_index * 8,
tuple[index])
self.tx_index += 4
return self.tx_data
def create_loadfw_cmd(self, size, sha):
"""
Creates a command to load firmware with associated parameters
"""
cmd = 'Load Firmware'
print('CMD >>> %s.' % cmd)
code = self.get_cmd_code(cmd)
FW_NO_EXEC_FLAG = (1 << 26)
SEL_HP_CLK = (1 << 21)
LD_FW_HEADER_LEN = 3
count_flags = FW_NO_EXEC_FLAG | SEL_HP_CLK
count_flags |= (LD_FW_HEADER_LEN + int(len(sha) / 4))
self.init_tx_data()
index = list(self.message_id.values()).index('Control')
msg_id = list(self.message_id.keys())[index]
bitstruct.pack_into_dict(self.cmd_word_fmt, self.cmd_keys,
self.tx_data, 0, {'cmd': 1, 'rsvd1': 0, 'rsp': 0,
'msg_id': msg_id, 'rsvd2': 0, 'cmd_rsp': code})
self.tx_index += 4
bitstruct.pack_into('u32', self.tx_data, self.tx_index * 8, count_flags)
self.tx_index += 4
bitstruct.pack_into('u32', self.tx_data, self.tx_index * 8, 0xbe000000)
self.tx_index += 4
bitstruct.pack_into('u32', self.tx_data, self.tx_index * 8, 0)
self.tx_index += 4
bitstruct.pack_into('u32', self.tx_data, self.tx_index * 8, size)
self.tx_index += 4
self.endian_swap(self.tx_data, self.tx_index, sha)
self.tx_index += len(sha)
return self.tx_data
def create_execfw_cmd(self):
"""
Creates a command to excute firmware
"""
cmd = 'Execute'
print('CMD >>> %s.' % cmd)
code = self.get_cmd_code(cmd)
EXE_FW_HEADER_LEN = 1
count = EXE_FW_HEADER_LEN
self.init_tx_data()
index = list(self.message_id.values()).index('Control')
msg_id = list(self.message_id.keys())[index]
bitstruct.pack_into_dict(self.cmd_word_fmt, self.cmd_keys,
self.tx_data, 0, {'cmd': 1, 'rsvd1': 0, 'rsp': 0,
'msg_id': msg_id, 'rsvd2': 0, 'cmd_rsp': code})
self.tx_index += 4
bitstruct.pack_into('u32', self.tx_data, self.tx_index * 8, count)
self.tx_index += 4
bitstruct.pack_into('u32', self.tx_data, self.tx_index * 8, 0xbe000000)
self.tx_index += 4
return self.tx_data
def create_bulk_message(self, data):
"""
Copies the input byte stream to the bulk message buffer
"""
self.endian_swap(self.tx_bulk_data, 0, data)
return self.tx_bulk_data[:len(data)]
def get_bulk_message_size(self):
"""
Returns the size of the bulk message buffer
"""
return len(self.tx_bulk_data)
| apache-2.0 | 9,050,911,253,903,432,000 | 33.948498 | 80 | 0.534078 | false |
cwaldbieser/txsshadmin | txsshadmin/proto_dispatcher.py | 1 | 3715 |
from twisted.conch.recvline import HistoricRecvLine
from twisted.python import log
from textwrap import dedent
def makeSSHDispatcherProtocolFactory(handlerFactory, *args, **kwds):
def makeDispatcherProtocol(avatar, *a, **k):
proto = SSHDispatcherProtocol()
proto.handler = handlerFactory(avatar, *args, **kwds)
return proto
return makeDispatcherProtocol
class SSHDispatcherProtocol(HistoricRecvLine):
prompt = "$"
CTRL_D = '\x04'
def connectionMade(self):
HistoricRecvLine.connectionMade(self)
self.keyHandlers.update({
self.CTRL_D: lambda: self.handler.onEOF(self)})
try:
self.handler.onConnect(self)
except AttributeError:
pass
self.showPrompt()
def showPrompt(self):
self.terminal.write("{0} ".format(self.prompt))
def getCommandFunc(self, cmd):
return getattr(self.handler, 'handle_{0}'.format(cmd), None)
def lineReceived(self, line):
line = line.strip()
if line:
argv = line.split()
cmd = argv[0]
args = argv[1:]
func = self.getCommandFunc(cmd)
if func:
try:
func(self, *args)
except Exception as ex:
self.terminal.write("Errors occured.")
self.terminal.nextLine()
log.msg(str(ex))
else:
self.terminal.write("Unknown command, '{0}'.".format(cmd))
self.terminal.nextLine()
self.showPrompt()
class BaseHandler(object):
def __init__(self, avatar):
self.avatar = avatar
commands = [attr[7:] for attr in dir(self)
if attr.startswith('handle_') and attr.lower() == attr]
commands.sort()
self.commandHelp = "Commands: {0}".format(' '.join(commands))
def onConnect(self, dispatcher):
pass
def handle_help(self, dispatcher, *args):
"""
Get help on a command.
help [COMMAND]
"""
terminal = dispatcher.terminal
if len(args) == 0:
terminal.write(self.commandHelp)
terminal.nextLine()
terminal.write("Use `help <command>` for help on a particular command.")
terminal.nextLine()
else:
cmd = args[0]
handler = "handle_{0}".format(cmd)
if hasattr(self, handler):
func = getattr(self, handler)
doc = dedent(func.__doc__)
lines = doc.split('\n')
for line in lines:
terminal.write(line)
terminal.nextLine()
else:
terminal.write("Unknown command, '{0}'.".format(cmd))
termnial.nextLine()
def handle_whoami(self, dispatcher):
"""
Show who you are logged in as.
"""
terminal = dispatcher.terminal
terminal.write("You are '{0}'.".format(self.avatar.avatarId))
terminal.nextLine()
def handle_clear(self, dispatcher):
"""
Clear the terminal.
"""
terminal = dispatcher.terminal
terminal.reset()
def handle_quit(self, dispatcher):
"""
Exit this admin shell.
"""
terminal = dispatcher.terminal
terminal.write("Goodbye.")
terminal.nextLine()
terminal.loseConnection()
def onEOF(self, dispatcher):
terminal = dispatcher.terminal
lineBuffer = dispatcher.lineBuffer
if lineBuffer:
terminal.write('\a')
else:
self.handle_quit(dispatcher)
| gpl-3.0 | -4,181,060,626,740,396,500 | 28.484127 | 84 | 0.545626 | false |
nicfit/Clique | clique/app/identity.py | 1 | 1960 | # -*- coding: utf-8 -*-
import sys
import json
import argparse
import nicfit
from .. import Identity, IdentityChain
from .utils import prompt
from ..common import thumbprint, newJwk, jwkIsPrivate
@nicfit.command.register
class identity(nicfit.Command):
HELP = "Identity and stuffs"
def _initArgParser(self, parser):
parser.add_argument("-i", "--identity", default=None,
type=argparse.FileType('r'),
help="File containing an Identity in JSON format.")
parser.add_argument("-k", "--keyfile", default=None,
type=argparse.FileType('r'),
help="File containing a private JWK.")
parser.add_argument("--iss", default=None,
help="Identity issuer.")
def _run(self):
if self.args.identity:
ident = Identity.fromJson(json.loads(self.args.identity.read()))
else:
if self.args.keyfile:
try:
jwk = json.loads(self.args.keyfile.read())
key = newJwk(**jwk)
if not jwkIsPrivate(key):
raise ValueError(
"Key file does not contain a private key")
except Exception as ex:
print("Error loading key: " + str(ex), file=sys.stderr)
return 1
key._params["kid"] = thumbprint(key)
else:
key = Identity.generateKey()
iss = self.args.iss or prompt("iss? ")
ident = Identity(iss, key)
ident.idchain = IdentityChain.fromIdentity(ident,
ident.acct).serialize()
print(json.dumps(ident.toJson(private=True), indent=2, sort_keys=True))
idchain = IdentityChain.deserialize(ident.idchain)
print("\n## IdentityChain ##:\n" + str(idchain))
| lgpl-3.0 | 3,020,030,026,125,875,700 | 35.981132 | 79 | 0.529082 | false |
rgcarrasqueira/python-pagseguro | pagseguro/configs.py | 1 | 6137 | # coding: utf-8
import abc
class AbstractConfig(object): # pragma: no cover
__metaclass__ = abc.ABCMeta
def __init__(self, sandbox=False):
self.sandbox = sandbox
@classmethod
def get(self, key, default=None):
return getattr(self, key, default)
@abc.abstractproperty
def BASE_URL(self):
return self._BASE_URL
@BASE_URL.setter
def BASE_URL(self, value):
self._BASE_URL = value
@abc.abstractproperty
def VERSION(self):
return self._VERSION
@VERSION.setter
def VERSION(self, value):
self._VERSION = value
@abc.abstractproperty
def CHECKOUT_SUFFIX(self):
return self._CHECKOUT_SUFFIX
@CHECKOUT_SUFFIX.setter
def CHECKOUT_SUFFIX(self, value):
self._CHECKOUT_SUFFIX = value
@abc.abstractproperty
def CHARSET(self):
return self._CHARSET
@CHARSET.setter
def CHARSET(self, value):
self._CHARSET = value
@abc.abstractproperty
def NOTIFICATION_SUFFIX(self):
return self._NOTIFICATION_SUFFIX
@NOTIFICATION_SUFFIX.setter
def NOTIFICATION_SUFFIX(self, value):
self._NOTIFICATION_SUFFIX = value
@abc.abstractproperty
def TRANSACTION_SUFFIX(self):
return self._TRANSACTION_SUFFIX
@TRANSACTION_SUFFIX.setter
def TRANSACTION_SUFFIX(self, value):
self._TRANSACTION_SUFFIX = value
@abc.abstractproperty
def QUERY_TRANSACTION_SUFFIX(self):
return self._QUERY_TRANSACTION_SUFFIX
@QUERY_TRANSACTION_SUFFIX.setter
def QUERY_TRANSACTION_SUFFIX(self, value):
self._QUERY_TRANSACTION_SUFFIX = value
@abc.abstractproperty
def CHECKOUT_URL(self):
return self._CHECKOUT_URL
@CHECKOUT_URL.setter
def CHECKOUT_URL(self, value):
self._CHECKOUT_URL = value
@abc.abstractproperty
def NOTIFICATION_URL(self):
return self._NOTIFICATION_URL
@NOTIFICATION_URL.setter
def NOTIFICATION_URL(self, value):
self._NOTIFICATION_URL = value
@abc.abstractproperty
def TRANSACTION_URL(self):
return self._TRANSACTION_URL
@TRANSACTION_URL.setter
def TRANSACTION_URL(self, value):
self._TRANSACTION_URL = value
@abc.abstractproperty
def QUERY_TRANSACTION_URL(self):
return self._QUERY_TRANSACTION_URL
@QUERY_TRANSACTION_URL.setter
def QUERY_TRANSACTION_URL(self, value):
self._QUERY_TRANSACTION_URL = value
@abc.abstractproperty
def CURRENCY(self):
return self._CURRENCY
@CURRENCY.setter
def CURRENCY(self, value):
self._CURRENCY = value
@abc.abstractproperty
def CTYPE(self):
return self._CTYPE
@CTYPE.setter
def CTYPE(self, value):
self._CTYPE = value
@abc.abstractproperty
def HEADERS(self):
return self._HEADERS
@HEADERS.setter
def HEADERS(self, value):
self._HEADERS = value
@abc.abstractproperty
def REFERENCE_PREFIX(self):
return self._REFERENCE_PREFIX
@REFERENCE_PREFIX.setter
def REFERENCE_PREFIX(self, value):
self._REFERENCE_PREFIX = value
@abc.abstractproperty
def PAYMENT_HOST(self):
return self._PAYMENT_HOST
@PAYMENT_HOST.setter
def PAYMENT_HOST(self, value):
self._PAYMENT_HOST = value
@abc.abstractproperty
def PAYMENT_URL(self):
return self._PAYMENT_URL
@PAYMENT_URL.setter
def PAYMENT_URL(self, value):
self._PAYMENT_URL = value
@abc.abstractproperty
def DATETIME_FORMAT(self):
return self._DATETIME_FORMAT
@DATETIME_FORMAT.setter
def DATETIME_FORMAT(self, value):
self._DATETIME_FORMAT = value
class Config(AbstractConfig):
BASE_URL = "https://ws.pagseguro.uol.com.br"
VERSION = "/v2/"
CHECKOUT_SUFFIX = VERSION + "checkout"
CHARSET = "UTF-8" # ISO-8859-1
NOTIFICATION_SUFFIX = VERSION + "transactions/notifications/%s"
PRE_APPROVAL_NOTIFICATION_SUFFIX = (
VERSION + "pre-approvals/notifications/%s"
)
PRE_APPROVAL_PAYMENT_URL = BASE_URL + VERSION + "pre-approvals/payment"
PRE_APPROVAL_CANCEL_URL = BASE_URL + VERSION + "pre-approvals/cancel/%s"
TRANSACTION_SUFFIX = VERSION + "transactions/%s"
QUERY_TRANSACTION_SUFFIX = VERSION + "transactions"
SESSION_CHECKOUT_SUFFIX = VERSION + "sessions/"
SESSION_CHECKOUT_URL = BASE_URL + SESSION_CHECKOUT_SUFFIX
TRANSPARENT_CHECKOUT_URL = BASE_URL + QUERY_TRANSACTION_SUFFIX + '/'
CHECKOUT_URL = BASE_URL + CHECKOUT_SUFFIX
NOTIFICATION_URL = BASE_URL + NOTIFICATION_SUFFIX
PRE_APPROVAL_NOTIFICATION_URL = BASE_URL + PRE_APPROVAL_NOTIFICATION_SUFFIX
TRANSACTION_URL = BASE_URL + TRANSACTION_SUFFIX
QUERY_TRANSACTION_URL = BASE_URL + QUERY_TRANSACTION_SUFFIX
QUERY_PRE_APPROVAL_URL = BASE_URL + VERSION + "pre-approvals"
CURRENCY = "BRL"
CTYPE = "application/x-www-form-urlencoded; charset={0}".format(CHARSET)
HEADERS = {"Content-Type": CTYPE}
REFERENCE_PREFIX = "REF%s"
PAYMENT_HOST = "https://pagseguro.uol.com.br"
PAYMENT_URL = PAYMENT_HOST + CHECKOUT_SUFFIX + "/payment.html?code=%s"
DATETIME_FORMAT = '%Y-%m-%dT%H:%M:%S'
class ConfigSandbox(AbstractConfig):
BASE_URL = "https://ws.sandbox.pagseguro.uol.com.br"
VERSION = "/v2/"
CHECKOUT_SUFFIX = VERSION + "checkout"
CHARSET = "UTF-8" # ISO-8859-1
NOTIFICATION_SUFFIX = VERSION + "transactions/notifications/%s"
TRANSACTION_SUFFIX = VERSION + "transactions/%s"
QUERY_TRANSACTION_SUFFIX = VERSION + "transactions"
CHECKOUT_URL = BASE_URL + CHECKOUT_SUFFIX
NOTIFICATION_URL = BASE_URL + NOTIFICATION_SUFFIX
TRANSACTION_URL = BASE_URL + TRANSACTION_SUFFIX
QUERY_TRANSACTION_URL = BASE_URL + QUERY_TRANSACTION_SUFFIX
CURRENCY = "BRL"
CTYPE = "application/x-www-form-urlencoded; charset={0}".format(CHARSET)
HEADERS = {"Content-Type": CTYPE}
REFERENCE_PREFIX = "REF%s"
PAYMENT_HOST = "https://sandbox.pagseguro.uol.com.br"
PAYMENT_URL = PAYMENT_HOST + CHECKOUT_SUFFIX + "/payment.html?code=%s"
DATETIME_FORMAT = '%Y-%m-%dT%H:%M:%S'
| mit | -8,740,387,266,421,151,000 | 27.948113 | 79 | 0.663842 | false |
sguazt/dcsxx-testbed | tools/giws/classRepresentation/parameterGiws.py | 1 | 2638 | #!/usr/bin/python -u
# Copyright or Copr. INRIA/Scilab - Sylvestre LEDRU
#
# Sylvestre LEDRU - <[email protected]> <[email protected]>
#
# This software is a computer program whose purpose is to generate C++ wrapper
# for Java objects/methods.
#
# This software is governed by the CeCILL license under French law and
# abiding by the rules of distribution of free software. You can use,
# modify and/ or redistribute the software under the terms of the CeCILL
# license as circulated by CEA, CNRS and INRIA at the following URL
# "http://www.cecill.info".
#
# As a counterpart to the access to the source code and rights to copy,
# modify and redistribute granted by the license, users are provided only
# with a limited warranty and the software's author, the holder of the
# economic rights, and the successive licensors have only limited
# liability.
#
# In this respect, the user's attention is drawn to the risks associated
# with loading, using, modifying and/or developing or reproducing the
# software by the user in light of its specific status of free software,
# that may mean that it is complicated to manipulate, and that also
# therefore means that it is reserved for developers and experienced
# professionals having in-depth computer knowledge. Users are therefore
# encouraged to load and test the software's suitability as regards their
# requirements in conditions enabling the security of their systems and/or
# data to be ensured and, more generally, to use and operate it in the
# same conditions as regards security.
#
# The fact that you are presently reading this means that you have had
# knowledge of the CeCILL license and that you accept its terms.
#
# For more information, see the file COPYING
from datatypes.dataGiws import dataGiws
from datatypes.dataBufferGiws import dataBufferGiws
from datatypes.dataFactoryGiws import dataFactoryGiws
class parameterGiws():
__name=""
__type=""
def __init__(self, name, type):
myDataFactory=dataFactoryGiws()
self.__type=myDataFactory.create(type)
self.__name=name
def getName(self):
return self.__name
def getType(self):
return self.__type
def __str__(self):
return """%s %s, """ % (self.getType().getNativeType(), self.getName())
def generateCXXHeader(self):
""" Generate the profil of the parameter """
str="""%s %s""" % (self.getType().getNativeTypeWithConst(), self.getName())
if self.getType().isArray():
if self.getType().getDimensionArray() == 1:
str+=", int %sSize"%self.getName()
else:
str+=", int %sSize, int %sSizeCol"%(self.getName(),self.getName())
return str
| apache-2.0 | 8,050,276,257,811,369,000 | 37.794118 | 79 | 0.735027 | false |
jansky/JanskyBlog | misc.py | 1 | 1343 | """
The MIT License (MIT)
Copyright (c) 2014 Janský Důnska
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import os
from string import Template
def PrintHeaders(httpcode):
print "HTTP/1.0 " + httpcode
print "Content-type: text/html; charset=utf-8"
print
# Uncomment below to enable the simple WriteDocument function
#def WriteDocument(content,title=""):
# print "<!DOCTYPE html>"
# print "<html>"
# print "<head>"
# if title == "":
# print "<title>Blog</title>"
# else:
# print "<title>" + title + " | Blog</title>"
# print "<meta charset='utf-8'/>"
# print "<meta name='author' content='PyBloggingSystem v0.1'/>"
# print "<body>"
# print content
# print "</body>"
# print "</html>"
def DoDocumentTemplating(data, templateFile):
templateFileString = ""
with open(templateFile, 'r') as template_file:
templateFileString = template_file.read()
print Template(templateFileString).safe_substitute(data)
| mit | -4,770,807,393,456,153,000 | 25.294118 | 77 | 0.727069 | false |
Luciden/easl | easl/log.py | 1 | 5275 | __author__ = 'Dennis'
import csv
class Log(object):
"""
Simple log that contains all experiment information (actions, observations).
Time based. Logs for every time step what happened.
Can (not yet) be read from/to files etc.
"""
def __init__(self):
"""
Attributes
----------
log : [{}]
All entries.
An entry describes time, type of entry and its type-related data.
verbose : bool
If set to True, logging attempts are printed to stdout.
"""
self.log = []
self.verbose = False
self.time = 0
self.length = 0
def read_file(self, file_name):
"""
Parameters
----------
file_name : string
Name of the file to read the log from.
"""
self.__from_file(file_name)
def set_verbose(self, verbose=True):
self.verbose = verbose
def get_length(self):
return self.length
def get_at_time(self, time):
entries = []
for e in self.log:
if e["_time"] == time:
entries.append(e)
return entries
def time_tick(self, time=None):
if time is None:
self.time += 1
else:
self.time = time
if self.verbose:
print "t {0}".format(self.time)
def do_log(self, kind, data):
entry = {"_time": self.time, "_type": kind}
entry.update(data)
self.log.append(entry)
self.length = self.time
if self.verbose:
print entry
def write_file(self, name):
"""
Writes all entries to a file.
Parameters
----------
name : string
Name of the file to write to.
"""
f = open(name, 'wt')
try:
writer = csv.DictWriter(f)
for entry in self.log:
writer.writerow(entry)
finally:
f.close()
def __from_file(self, name):
"""
Reads all entries from a file.
Parameters
----------
name : string
Name of the file to read from.
"""
f = open(name, 'rt')
try:
reader = csv.DictReader(f)
for row in reader:
self.log.append(row)
finally:
f.close()
def make_data(self, file_name, attribute_labels, number=None):
"""
Parameters
----------
file : string
File name to write to.
"""
suffix = "" if number is None else "_{0}".format(str(number))
f = open(file_name + suffix + ".csv", "wt")
try:
writer = csv.writer(f, delimiter=' ')
# Calculate changes in position for every limb.
attributes = attribute_labels.keys()
labels = attribute_labels.values()
data = []
for entry in self.log:
if "observation" in entry and entry["observation"] in attributes:
t = entry["_time"]
if len(data) - 1 < t:
data.append({})
data[t][entry["observation"]] = entry["value"]
writer.writerow(["t"] + labels)
for i in range(len(data) - 1):
k = [0] * len(attributes)
for p in range(len(attributes)):
if data[i][attributes[p]] != data[i + 1][attributes[p]]:
k[p] = 1
writer.writerow([i] + k)
finally:
f.close()
@staticmethod
def make_bins(name, c, n, number=None):
"""
Parameters
----------
c : int
Number of columns next to the time column.
"""
suffix = "" if number is None else "_{0}".format(str(number))
f = open(name + suffix + ".csv", "rt")
o = open(name + suffix + "_bins.csv", "wt")
try:
# Skip header
f.readline()
reader = csv.reader(f, delimiter=' ')
bins = []
i_bin = 1
current = [0] * len(c)
for row in reader:
if int(row[0]) >= i_bin * n:
bins.append(current)
i_bin += 1
current = [0] * len(c)
current = [x + y for (x, y) in zip(current, [int(z) for z in row[1:]])]
bins.append(current)
writer = csv.writer(o, delimiter=' ')
writer.writerow(["block"] + c)
for i in range(len(bins)):
writer.writerow([str(i)] + [str(x) for x in bins[i]])
finally:
f.close()
o.close()
@staticmethod
def write_data(name, c, data):
o = open(name + ".csv", "wt")
try:
writer = csv.writer(o, delimiter=' ')
writer.writerow(["block"] + c)
for i in range(len(data)):
writer.writerow([str(i)] + [str(x) for x in data[i]])
finally:
o.close()
| mit | 3,446,437,785,109,815,300 | 26.208556 | 87 | 0.442085 | false |
schristakidis/p2ner | p2ner/components/overlay/completeclient/completeclient/messages/submessages.py | 1 | 5521 | # -*- coding: utf-8 -*-
# Copyright 2012 Loris Corazza, Sakis Christakidis
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from p2ner.base.ControlMessage import ControlMessage, trap_sent,probe_all,BaseControlMessage
from p2ner.base.Consts import MessageCodes as MSG
from construct import Container
class AskInitNeighs(BaseControlMessage):
type = "subsidmessage"
code = MSG.ASK_INIT_NEIGHS_SUB
ack = True
@classmethod
def send(cls, sid, superOverlay,interOverlay, peer, out):
d=out.send(cls, Container(streamid = sid, superOverlay=superOverlay, interOverlay=interOverlay), peer)
d.addErrback(trap_sent)
return d
class PeerListMessage(ControlMessage):
type = "subpeerlistmessage"
code = MSG.SEND_IP_LIST_SUB
ack = True
def trigger(self, message):
if self.stream.id != message.streamid or not self.subOverlay.checkTriggerInitiatorsMessage(message.superOverlay,message.interOverlay):
return False
return True
def action(self, message, peer):
self.log.debug('received peerList message from %s for %s',peer,str(message.peer))
if message.peer:
for p in message.peer:
self.subOverlay.checkSendAddNeighbour(p,peer)
else:
self.subOverlay.checkSendAddNeighbour(None,peer)
@classmethod
def send(cls, sid, peerlist, peer, out):
return out.send(cls, Container(streamid=sid, peer=peerlist), peer).addErrback(trap_sent)
class AddNeighbourMessage(ControlMessage):
type = "suboverlaymessage"
code = MSG.ADD_NEIGH_SUB
ack = True
def trigger(self, message):
if self.stream.id != message.streamid or not self.subOverlay.checkTriggerMessage(message.superOverlay,message.interOverlay):
return False
return True
def action(self, message, peer):
peer.dataPort=message.port
peer.reportedBW=message.bw
if message.peer:
peer.lip=message.peer.ip
peer.lport=message.peer.port
peer.ldataPort=message.peer.dataPort
peer.hpunch=message.peer.hpunch
self.log.debug('received add neigh message from %s',peer)
print 'received neigh message from ',peer
self.subOverlay.checkAcceptNeighbour(peer)
@classmethod
def send(cls, id,sOver,iOver,port,bw, inpeer, peer, out):
msg = Container(streamid=id,superOverlay=sOver,interOverlay=iOver,port=int(port), bw=bw,peer=inpeer)
d=out.send(cls, msg, peer)
d.addErrback(trap_sent)
return d
class ConfirmNeighbourMessage(ControlMessage):
type = "subsidmessage"
code = MSG.CONFIRM_NEIGH_SUB
ack = True
def trigger(self, message):
if self.stream.id != message.streamid or not self.subOverlay.checkTriggerMessage(message.superOverlay,message.interOverlay):
return False
return True
def action(self, message, peer):
self.subOverlay.addNeighbour(peer)
@classmethod
def send(cls, sid, sOver,iOver,peer, out):
d=out.send(cls, Container(streamid = sid, superOverlay=sOver, interOverlay=iOver), peer)
d.addErrback(trap_sent)
return d
class SuggestNewPeerMessage(ControlMessage):
type = "subpeerlistmessage"
code = MSG.SUGGEST_NEW_PEER_SUB
ack = True
def trigger(self, message):
if self.stream.id != message.streamid or not self.subOverlay.checkTriggerMessage(message.superOverlay,message.interOverlay):
return False
return True
def action(self, message, peer):
self.log.debug('received suggest new peer message from %s',peer)
self.subOverlay.suggestNewPeer(peer,message.peer)
@classmethod
def send(cls, sid,sover,iover, peerlist, peer, out, suc_func=None,err_func=None):
return out.send(cls, Container(streamid=sid, superOverlay=sover, interOverlay=iover, peer=peerlist), peer).addErrback(probe_all,err_func=err_func,suc_func=suc_func)
class SuggestMessage(ControlMessage):
type = "subpeerlistmessage"
code = MSG.SUGGEST_SUB
ack = True
def trigger(self, message):
if self.stream.id != message.streamid or not self.subOverlay.checkTriggerMessage(message.superOverlay,message.interOverlay):
return False
return True
def action(self, message, peer):
self.log.debug('received suggest message from %s',peer)
self.subOverlay.availableNewPeers(peer,message.peer)
@classmethod
def send(cls, sid,sover,iover, peerlist, peer, out):
return out.send(cls, Container(streamid=sid, superOverlay=sover,interOverlay=iover, peer=peerlist), peer).addErrback(trap_sent)
class PingMessage(ControlMessage):
type='basemessage'
code=MSG.ADDNEIGH_RTT
ack=True
def trigger(self,message):
return True
def action(self,message,peer):
return
@classmethod
def send(cls, peer, out):
out.send(cls,Container(message=None),peer).addErrback(trap_sent)
| apache-2.0 | -2,815,734,315,043,387,400 | 33.080247 | 172 | 0.690998 | false |
GeosoftInc/gxpy | geosoft/gxapi/GXEDOC.py | 1 | 18354 | ### extends 'class_empty.py'
### block ClassImports
# NOTICE: Do not edit anything here, it is generated code
from . import gxapi_cy
from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref
### endblock ClassImports
### block Header
# NOTICE: The code generator will not replace the code in this block
### endblock Header
### block ClassImplementation
# NOTICE: Do not edit anything here, it is generated code
class GXEDOC(gxapi_cy.WrapEDOC):
"""
GXEDOC class.
The `GXEDOC <geosoft.gxapi.GXEDOC>` class provides access to a generic documents views as loaded within
Oasis montaj.
"""
def __init__(self, handle=0):
super(GXEDOC, self).__init__(GXContext._get_tls_geo(), handle)
@classmethod
def null(cls):
"""
A null (undefined) instance of `GXEDOC <geosoft.gxapi.GXEDOC>`
:returns: A null `GXEDOC <geosoft.gxapi.GXEDOC>`
:rtype: GXEDOC
"""
return GXEDOC()
def is_null(self):
"""
Check if this is a null (undefined) instance
:returns: True if this is a null (undefined) instance, False otherwise.
:rtype: bool
"""
return self._internal_handle() == 0
# GMSYS 3D Models
@classmethod
def create_new_gms_3d(cls, name, nx, ny, type):
"""
Creates a new `GXGMSYS <geosoft.gxapi.GXGMSYS>` 3D Model into the workspace, flags as new.
:param name: Document to load.
:param nx: X Size
:param ny: Y Size
:param type: :ref:`GMS3D_MODELTYPE`
:type name: str
:type nx: int
:type ny: int
:type type: int
:returns: Handle to the newly created edited model.
:rtype: GXEDOC
.. versionadded:: 5.0
**License:** `Geosoft Extended End-User License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-ext-end-user-lic>`_
**Limitations:** May not be available while executing a command line program.
**Note:** See `load <geosoft.gxapi.GXEDOC.load>`. This is used for brand new documents, it also sets
an internal flag such that if on closing the user chooses
not to save changes, the document is deleted thus keeping the
project folders clean.
"""
ret_val = gxapi_cy.WrapEDOC._create_new_gms_3d(GXContext._get_tls_geo(), name.encode(), nx, ny, type)
return GXEDOC(ret_val)
# Miscellaneous
@classmethod
def current(cls, type):
"""
This method returns the Current Edited Document.
:param type: :ref:`EDOC_TYPE`
:type type: int
:returns: `GXEDOC <geosoft.gxapi.GXEDOC>` Object
:rtype: GXEDOC
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Limitations:** May not be available while executing a command line program.
"""
ret_val = gxapi_cy.WrapEDOC._current(GXContext._get_tls_geo(), type)
return GXEDOC(ret_val)
@classmethod
def current_no_activate(cls, type):
"""
This method returns the Current Edited Document.
:param type: :ref:`EDOC_TYPE`
:type type: int
:returns: `GXEDOC <geosoft.gxapi.GXEDOC>` Object
:rtype: GXEDOC
.. versionadded:: 9.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Limitations:** May not be available while executing a command line program.
**Note:** This function acts just like `current <geosoft.gxapi.GXEDOC.current>` except that the document is not activated (brought to foreground) and no
guarantee is given about which document is currently active.
"""
ret_val = gxapi_cy.WrapEDOC._current_no_activate(GXContext._get_tls_geo(), type)
return GXEDOC(ret_val)
@classmethod
def current_if_exists(cls, type):
"""
This method returns the Current Edited Document.
:param type: :ref:`EDOC_TYPE`
:type type: int
:returns: `GXEDOC <geosoft.gxapi.GXEDOC>` Object to current edited document. If there is no current document,
the user is not prompted for a document, and 0 is returned.
:rtype: GXEDOC
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Limitations:** May not be available while executing a command line program.
"""
ret_val = gxapi_cy.WrapEDOC._current_if_exists(GXContext._get_tls_geo(), type)
return GXEDOC(ret_val)
@classmethod
def get_documents_lst(cls, lst, path, type):
"""
Load the file names of open documents into a `GXLST <geosoft.gxapi.GXLST>`.
:param lst: `GXLST <geosoft.gxapi.GXLST>` to load
:param path: :ref:`EDOC_PATH`
:param type: :ref:`EDOC_TYPE`
:type lst: GXLST
:type path: int
:type type: int
:returns: The number of documents loaded into the `GXLST <geosoft.gxapi.GXLST>`.
The `GXLST <geosoft.gxapi.GXLST>` is cleared first.
:rtype: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Limitations:** May not be available while executing a command line program.
"""
ret_val = gxapi_cy.WrapEDOC._get_documents_lst(GXContext._get_tls_geo(), lst, path, type)
return ret_val
def get_name(self, name):
"""
Get the name of the document object of this `GXEDOC <geosoft.gxapi.GXEDOC>`.
:param name: Name returned
:type name: str_ref
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Limitations:** May not be available while executing a command line program.
"""
name.value = self._get_name(name.value.encode())
def get_window_state(self):
"""
Retrieve the current state of the document window
:returns: :ref:`EDOC_WINDOW_STATE`
:rtype: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Limitations:** May not be available while executing a command line program.
"""
ret_val = self._get_window_state()
return ret_val
@classmethod
def have_current(cls, type):
"""
Returns true if a document is loaded
:param type: :ref:`EDOC_TYPE`
:type type: int
:rtype: bool
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Limitations:** May not be available while executing a command line program.
"""
ret_val = gxapi_cy.WrapEDOC._have_current(GXContext._get_tls_geo(), type)
return ret_val
@classmethod
def loaded(cls, name, type):
"""
Returns 1 if a document is loaded .
:param name: document name
:param type: :ref:`EDOC_TYPE`
:type name: str
:type type: int
:returns: 1 if document is loaded, 0 otherwise.
:rtype: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Limitations:** May not be available while executing a command line program.
"""
ret_val = gxapi_cy.WrapEDOC._loaded(GXContext._get_tls_geo(), name.encode(), type)
return ret_val
def get_window_position(self, left, top, right, bottom, state, is_floating):
"""
Get the map window's position and dock state
:param left: Window left position
:param top: Window top position
:param right: Window right position
:param bottom: Window bottom position
:param state: Window state :ref:`EDOC_WINDOW_STATE`
:param is_floating: Docked or floating :ref:`EDOC_WINDOW_POSITION`
:type left: int_ref
:type top: int_ref
:type right: int_ref
:type bottom: int_ref
:type state: int_ref
:type is_floating: int_ref
.. versionadded:: 9.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Limitations:** May not be available while executing a command line program.
"""
left.value, top.value, right.value, bottom.value, state.value, is_floating.value = self._get_window_position(left.value, top.value, right.value, bottom.value, state.value, is_floating.value)
def set_window_position(self, left, top, right, bottom, state, is_floating):
"""
Get the map window's position and dock state
:param left: Window left position
:param top: Window top position
:param right: Window right position
:param bottom: Window bottom position
:param state: Window state :ref:`EDOC_WINDOW_STATE`
:param is_floating: Docked or floating :ref:`EDOC_WINDOW_POSITION`
:type left: int
:type top: int
:type right: int
:type bottom: int
:type state: int
:type is_floating: int
.. versionadded:: 9.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Limitations:** May not be available while executing a command line program.
"""
self._set_window_position(left, top, right, bottom, state, is_floating)
def read_only(self):
"""
Checks if a document is currently opened in a read-only mode.
:rtype: bool
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Limitations:** May not be available while executing a command line program.
"""
ret_val = self._read_only()
return ret_val
@classmethod
def load(cls, name, type):
"""
Loads a list of documents into the workspace
:param name: list of documents (';' or '|' delimited) to load.
:param type: :ref:`EDOC_TYPE`
:type name: str
:type type: int
:returns: Handle to current edited document, which will be the last
document in the list.
:rtype: GXEDOC
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Limitations:** May not be available while executing a command line program.
**Note:** The last listed document will become the current document.
Only the first file in the list may have a directory path.
All other files in the list are assumed to be in the same
directory as the first file.
"""
ret_val = gxapi_cy.WrapEDOC._load(GXContext._get_tls_geo(), name.encode(), type)
return GXEDOC(ret_val)
@classmethod
def load_no_activate(cls, name, type):
"""
Loads a list of documents into the workspace
:param name: list of documents (';' or '|' delimited) to load.
:param type: :ref:`EDOC_TYPE`
:type name: str
:type type: int
:returns: Handle to current edited document, which will be the last
document in the list.
:rtype: GXEDOC
.. versionadded:: 9.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Limitations:** May not be available while executing a command line program.
**Note:** This function acts just like `load <geosoft.gxapi.GXEDOC.load>` except that the document(s) is not activated (brought to foreground) and no
guarantee is given about which document is currently active.
"""
ret_val = gxapi_cy.WrapEDOC._load_no_activate(GXContext._get_tls_geo(), name.encode(), type)
return GXEDOC(ret_val)
def make_current(self):
"""
Makes this `GXEDOC <geosoft.gxapi.GXEDOC>` object the current active object to the user.
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Limitations:** May not be available while executing a command line program.
"""
self._make_current()
def set_window_state(self, state):
"""
Changes the state of the document window
:param state: :ref:`EDOC_WINDOW_STATE`
:type state: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Limitations:** May not be available while executing a command line program.
"""
self._set_window_state(state)
@classmethod
def sync(cls, file, type):
"""
Syncronize the Metadata of a document that is not currently open
:param file: Document file name
:param type: :ref:`EDOC_TYPE`
:type file: str
:type type: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Limitations:** May not be available while executing a command line program.
"""
gxapi_cy.WrapEDOC._sync(GXContext._get_tls_geo(), file.encode(), type)
def sync_open(self):
"""
Syncronize the Metadata of a document
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Limitations:** May not be available while executing a command line program.
"""
self._sync_open()
@classmethod
def un_load(cls, name, type):
"""
Unloads an edited document.
:param name: Name of document to unload
:param type: :ref:`EDOC_TYPE`
:type name: str
:type type: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Limitations:** May not be available while executing a command line program.
**Note:** If the document is not loaded, nothing happens.
Same as `un_load_verify <geosoft.gxapi.GXEDOC.un_load_verify>` with FALSE to prompt save.
"""
gxapi_cy.WrapEDOC._un_load(GXContext._get_tls_geo(), name.encode(), type)
@classmethod
def un_load_all(cls, type):
"""
Unloads all opened documents
:param type: :ref:`EDOC_TYPE`
:type type: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Limitations:** May not be available while executing a command line program.
"""
gxapi_cy.WrapEDOC._un_load_all(GXContext._get_tls_geo(), type)
@classmethod
def un_load_discard(cls, name, type):
"""
Unloads a document in the workspace, discards changes.
:param name: Name of document to unload
:param type: :ref:`EDOC_TYPE`
:type name: str
:type type: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Limitations:** May not be available while executing a command line program.
**Note:** If the document is not loaded, nothing happens.
"""
gxapi_cy.WrapEDOC._un_load_discard(GXContext._get_tls_geo(), name.encode(), type)
@classmethod
def un_load_verify(cls, name, verify, type):
"""
Unloads an edited document, optional prompt to save.
:param name: Name of document to unload
:param verify: :ref:`EDOC_UNLOAD`
:param type: :ref:`EDOC_TYPE`
:type name: str
:type verify: int
:type type: int
.. versionadded:: 5.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Limitations:** May not be available while executing a command line program.
**Note:** If the document is not loaded, nothing happens.
The user can be prompted to save before unloading.
If `EDOC_UNLOAD_NO_PROMPT <geosoft.gxapi.EDOC_UNLOAD_NO_PROMPT>`, data is always saved.
"""
gxapi_cy.WrapEDOC._un_load_verify(GXContext._get_tls_geo(), name.encode(), verify, type)
### endblock ClassImplementation
### block ClassExtend
# NOTICE: The code generator will not replace the code in this block
### endblock ClassExtend
### block Footer
# NOTICE: The code generator will not replace the code in this block
### endblock Footer | bsd-2-clause | -78,147,204,572,312,750 | 30.537801 | 198 | 0.600959 | false |
2013Commons/hue | apps/beeswax/src/beeswax/views.py | 1 | 35107 | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
import re
import sys
import time
from django import forms
from django.contrib import messages
from django.contrib.auth.models import User
from django.db.models import Q
from django.http import HttpResponse, QueryDict
from django.shortcuts import redirect
from django.utils.html import escape
from django.utils.translation import ugettext as _
from django.core.urlresolvers import reverse
from desktop.appmanager import get_apps_dict
from desktop.context_processors import get_app_name
from desktop.lib.paginator import Paginator
from desktop.lib.django_util import copy_query_dict, format_preserving_redirect, render
from desktop.lib.django_util import login_notrequired, get_desktop_uri_prefix
from desktop.lib.exceptions_renderable import PopupException
from desktop.lib.i18n import smart_unicode
from desktop.models import Document
from jobsub.parameterization import find_variables
import beeswax.forms
import beeswax.design
import beeswax.management.commands.beeswax_install_examples
from beeswax import common, data_export, models
from beeswax.models import SavedQuery, QueryHistory
from beeswax.server import dbms
from beeswax.server.dbms import expand_exception, get_query_server_config, QueryServerException
LOG = logging.getLogger(__name__)
def index(request):
return execute_query(request)
"""
Design views
"""
def save_design(request, form, type_, design, explicit_save):
"""
save_design(request, form, type_, design, explicit_save) -> SavedQuery
A helper method to save the design:
* If ``explicit_save``, then we save the data in the current design.
* If the user clicked the submit button, we do NOT overwrite the current
design. Instead, we create a new "auto" design (iff the user modified
the data). This new design is named after the current design, with the
AUTO_DESIGN_SUFFIX to signify that it's different.
Need to return a SavedQuery because we may end up with a different one.
Assumes that form.saveform is the SaveForm, and that it is valid.
"""
authorized_get_design(request, design.id)
assert form.saveform.is_valid()
sub_design_form = form # Beeswax/Impala case
if type_ == models.HQL:
design_cls = beeswax.design.HQLdesign
elif type_ == models.IMPALA:
design_cls = beeswax.design.HQLdesign
elif type_ == models.SPARK:
from spark.design import SparkDesign
design_cls = SparkDesign
sub_design_form = form.query
else:
raise ValueError(_('Invalid design type %(type)s') % {'type': type_})
design_obj = design_cls(sub_design_form, query_type=type_)
name = form.saveform.cleaned_data['name']
desc = form.saveform.cleaned_data['desc']
return _save_design(request.user, design, type_, design_obj, explicit_save, name, desc)
def _save_design(user, design, type_, design_obj, explicit_save, name=None, desc=None):
# Design here means SavedQuery
old_design = design
new_data = design_obj.dumps()
# Auto save if (1) the user didn't click "save", and (2) the data is different.
# Create an history design if the user is executing a shared design.
# Don't generate an auto-saved design if the user didn't change anything.
if explicit_save and (not design.doc.exists() or design.doc.get().can_write_or_exception(user)):
design.name = name
design.desc = desc
design.is_auto = False
elif design_obj != old_design.get_design():
# Auto save iff the data is different
if old_design.id is not None:
# Clone iff the parent design isn't a new unsaved model
design = old_design.clone(new_owner=user)
if not old_design.is_auto:
design.name = old_design.name + models.SavedQuery.AUTO_DESIGN_SUFFIX
else:
design.name = models.SavedQuery.DEFAULT_NEW_DESIGN_NAME
design.is_auto = True
design.type = type_
design.data = new_data
design.save()
LOG.info('Saved %s design "%s" (id %s) for %s' % (explicit_save and '' or 'auto ', design.name, design.id, design.owner))
if design.doc.exists():
design.doc.update(name=design.name, description=design.desc)
else:
Document.objects.link(design, owner=design.owner, extra=design.type, name=design.name, description=design.desc)
if design.is_auto:
design.doc.get().add_to_history()
return design
def delete_design(request):
if request.method == 'POST':
ids = request.POST.getlist('designs_selection')
designs = dict([(design_id, authorized_get_design(request, design_id, owner_only=True)) for design_id in ids])
if None in designs.values():
LOG.error('Cannot delete non-existent design(s) %s' % ','.join([key for key, name in designs.items() if name is None]))
return list_designs(request)
for design in designs.values():
if request.POST.get('skipTrash', 'false') == 'false':
design.doc.get().send_to_trash()
else:
design.doc.all().delete()
design.delete()
return redirect(reverse(get_app_name(request) + ':list_designs'))
else:
return render('confirm.mako', request, {'url': request.path, 'title': _('Delete design(s)?')})
def restore_design(request):
if request.method == 'POST':
ids = request.POST.getlist('designs_selection')
designs = dict([(design_id, authorized_get_design(request, design_id)) for design_id in ids])
if None in designs.values():
LOG.error('Cannot restore non-existent design(s) %s' % ','.join([key for key, name in designs.items() if name is None]))
return list_designs(request)
for design in designs.values():
design.doc.get().restore_from_trash()
return redirect(reverse(get_app_name(request) + ':list_designs'))
else:
return render('confirm.mako', request, {'url': request.path, 'title': _('Restore design(s)?')})
def clone_design(request, design_id):
"""Clone a design belonging to any user"""
design = authorized_get_design(request, design_id)
if design is None:
LOG.error('Cannot clone non-existent design %s' % (design_id,))
return list_designs(request)
copy = design.clone(request.user)
copy.save()
copy_doc = design.doc.get().copy(owner=request.user)
copy.doc.all().delete()
copy.doc.add(copy_doc)
messages.info(request, _('Copied design: %(name)s') % {'name': design.name})
return format_preserving_redirect(request, reverse(get_app_name(request) + ':execute_design', kwargs={'design_id': copy.id}))
def list_designs(request):
"""
View function for show all saved queries.
We get here from /beeswax/list_designs?filterargs, with the options being:
page=<n> - Controls pagination. Defaults to 1.
user=<name> - Show design items belonging to a user. Default to all users.
type=<type> - <type> is "hql", for saved query type. Default to show all.
sort=<key> - Sort by the attribute <key>, which is one of:
"date", "name", "desc", and "type" (design type)
Accepts the form "-date", which sort in descending order.
Default to "-date".
text=<frag> - Search for fragment "frag" in names and descriptions.
"""
DEFAULT_PAGE_SIZE = 20
app_name = get_app_name(request)
# Extract the saved query list.
prefix = 'q-'
querydict_query = _copy_prefix(prefix, request.GET)
# Manually limit up the user filter.
querydict_query[ prefix + 'type' ] = app_name
page, filter_params = _list_designs(request.user, querydict_query, DEFAULT_PAGE_SIZE, prefix)
return render('list_designs.mako', request, {
'page': page,
'filter_params': filter_params,
'user': request.user,
'designs_json': json.dumps([query.id for query in page.object_list])
})
def list_trashed_designs(request):
DEFAULT_PAGE_SIZE = 20
app_name= get_app_name(request)
user = request.user
# Extract the saved query list.
prefix = 'q-'
querydict_query = _copy_prefix(prefix, request.GET)
# Manually limit up the user filter.
querydict_query[ prefix + 'type' ] = app_name
page, filter_params = _list_designs(user, querydict_query, DEFAULT_PAGE_SIZE, prefix, is_trashed=True)
return render('list_trashed_designs.mako', request, {
'page': page,
'filter_params': filter_params,
'user': request.user,
'designs_json': json.dumps([query.id for query in page.object_list])
})
def my_queries(request):
"""
View a mix of history and saved queries.
It understands all the GET params in ``list_query_history`` (with a ``h-`` prefix)
and those in ``list_designs`` (with a ``q-`` prefix). The only thing it disallows
is the ``user`` filter, since this view only shows what belongs to the user.
"""
DEFAULT_PAGE_SIZE = 30
app_name= get_app_name(request)
# Extract the history list.
prefix = 'h-'
querydict_history = _copy_prefix(prefix, request.GET)
# Manually limit up the user filter.
querydict_history[ prefix + 'user' ] = request.user
querydict_history[ prefix + 'type' ] = app_name
hist_page, hist_filter = _list_query_history(request.user,
querydict_history,
DEFAULT_PAGE_SIZE,
prefix)
# Extract the saved query list.
prefix = 'q-'
querydict_query = _copy_prefix(prefix, request.GET)
# Manually limit up the user filter.
querydict_query[ prefix + 'user' ] = request.user
querydict_query[ prefix + 'type' ] = app_name
query_page, query_filter = _list_designs(request.user, querydict_query, DEFAULT_PAGE_SIZE, prefix)
filter_params = hist_filter
filter_params.update(query_filter)
return render('my_queries.mako', request, {
'request': request,
'h_page': hist_page,
'q_page': query_page,
'filter_params': filter_params,
'designs_json': json.dumps([query.id for query in query_page.object_list])
})
def list_query_history(request):
"""
View the history of query (for the current user).
We get here from /beeswax/query_history?filterargs, with the options being:
page=<n> - Controls pagination. Defaults to 1.
user=<name> - Show history items from a user. Default to current user only.
Also accepts ':all' to show all history items.
type=<type> - <type> is "beeswax|impala", for design type. Default to show all.
design_id=<id> - Show history for this particular design id.
sort=<key> - Sort by the attribute <key>, which is one of:
"date", "state", "name" (design name), and "type" (design type)
Accepts the form "-date", which sort in descending order.
Default to "-date".
auto_query=<bool> - Show auto generated actions (drop table, read data, etc). Default True
"""
DEFAULT_PAGE_SIZE = 30
prefix = 'q-'
share_queries = request.user.is_superuser
querydict_query = request.GET.copy()
if not share_queries:
querydict_query[prefix + 'user'] = request.user.username
app_name = get_app_name(request)
querydict_query[prefix + 'type'] = app_name
page, filter_params = _list_query_history(request.user, querydict_query, DEFAULT_PAGE_SIZE, prefix)
filter = request.GET.get(prefix + 'search') and request.GET.get(prefix + 'search') or ''
if request.GET.get('format') == 'json':
resp = {
'queries': [massage_query_history_for_json(app_name, query_history) for query_history in page.object_list]
}
return HttpResponse(json.dumps(resp), mimetype="application/json")
return render('list_history.mako', request, {
'request': request,
'page': page,
'filter_params': filter_params,
'share_queries': share_queries,
'prefix': prefix,
'filter': filter,
})
def massage_query_history_for_json(app_name, query_history):
return {
'query': query_history.query,
'timeInMs': time.mktime(query_history.submission_date.timetuple()),
'timeFormatted': query_history.submission_date.strftime("%x %X"),
'designUrl': reverse(app_name + ':execute_design', kwargs={'design_id': query_history.design.id}),
'resultsUrl': not query_history.is_failure() and reverse(app_name + ':watch_query_history', kwargs={'query_history_id': query_history.id}) or ""
}
def download(request, id, format):
try:
query_history = authorized_get_query_history(request, id, must_exist=True)
db = dbms.get(request.user, query_history.get_query_server_config())
LOG.debug('Download results for query %s: [ %s ]' % (query_history.server_id, query_history.query))
return data_export.download(query_history.get_handle(), format, db)
except Exception, e:
if not hasattr(e, 'message') or not e.message:
message = e
else:
message = e.message
raise PopupException(message, detail='')
"""
Queries Views
"""
def execute_query(request, design_id=None, query_history_id=None):
"""
View function for executing an arbitrary query.
"""
action = 'query'
if query_history_id:
query_history = authorized_get_query_history(request, query_history_id, must_exist=True)
design = query_history.design
try:
if query_history.server_id and query_history.server_guid:
handle, state = _get_query_handle_and_state(query_history)
if 'on_success_url' in request.GET:
if request.GET.get('on_success_url'):
action = 'watch-redirect'
else:
action = 'watch-results'
else:
action = 'editor-results'
except QueryServerException, e:
if 'Invalid query handle' in e.message or 'Invalid OperationHandle' in e.message:
query_history.save_state(QueryHistory.STATE.expired)
LOG.warn("Invalid query handle", exc_info=sys.exc_info())
action = 'editor-expired-results'
else:
raise e
else:
# Check perms.
authorized_get_design(request, design_id)
app_name = get_app_name(request)
query_type = SavedQuery.TYPES_MAPPING[app_name]
design = safe_get_design(request, query_type, design_id)
query_history = None
context = {
'design': design,
'query': query_history, # Backward
'query_history': query_history,
'autocomplete_base_url': reverse(get_app_name(request) + ':api_autocomplete_databases', kwargs={}),
'can_edit_name': design and design.id and not design.is_auto,
'can_edit': design and design.id and design.doc.get().can_write(request.user),
'action': action,
'on_success_url': request.GET.get('on_success_url'),
'has_metastore': 'metastore' in get_apps_dict(request.user)
}
return render('execute.mako', request, context)
def view_results(request, id, first_row=0):
"""
Returns the view for the results of the QueryHistory with the given id.
The query results MUST be ready.
To display query results, one should always go through the execute_query view.
If the result set has has_result_set=False, display an empty result.
If ``first_row`` is 0, restarts (if necessary) the query read. Otherwise, just
spits out a warning if first_row doesn't match the servers conception.
Multiple readers will produce a confusing interaction here, and that's known.
It understands the ``context`` GET parameter. (See execute_query().)
"""
first_row = long(first_row)
start_over = (first_row == 0)
results = type('Result', (object,), {
'rows': 0,
'columns': [],
'has_more': False,
'start_row': 0,
})
data = []
fetch_error = False
error_message = ''
log = ''
columns = []
app_name = get_app_name(request)
query_history = authorized_get_query_history(request, id, must_exist=True)
query_server = query_history.get_query_server_config()
db = dbms.get(request.user, query_server)
handle, state = _get_query_handle_and_state(query_history)
context_param = request.GET.get('context', '')
query_context = parse_query_context(context_param)
# Update the status as expired should not be accessible
# Impala does not support startover for now
expired = state == models.QueryHistory.STATE.expired
# Retrieve query results or use empty result if no result set
try:
if query_server['server_name'] == 'impala' and not handle.has_result_set:
downloadable = False
else:
results = db.fetch(handle, start_over, 100)
data = []
# Materialize and HTML escape results
# TODO: use Number + list comprehension
for row in results.rows():
escaped_row = []
for field in row:
if isinstance(field, (int, long, float, complex, bool)):
escaped_field = field
elif field is None:
escaped_field = 'NULL'
else:
field = smart_unicode(field, errors='replace') # Prevent error when getting back non utf8 like charset=iso-8859-1
escaped_field = escape(field).replace(' ', ' ')
escaped_row.append(escaped_field)
data.append(escaped_row)
# We display the "Download" button only when we know that there are results:
downloadable = first_row > 0 or data
log = db.get_log(handle)
columns = results.data_table.cols()
except Exception, ex:
fetch_error = True
error_message, log = expand_exception(ex, db, handle)
# Handle errors
error = fetch_error or results is None or expired
context = {
'error': error,
'message': error_message,
'query': query_history,
'results': data,
'columns': columns,
'expected_first_row': first_row,
'log': log,
'hadoop_jobs': app_name != 'impala' and _parse_out_hadoop_jobs(log),
'query_context': query_context,
'can_save': False,
'context_param': context_param,
'expired': expired,
'app_name': app_name,
'next_json_set': None,
'is_finished': query_history.is_finished()
}
if not error:
download_urls = {}
if downloadable:
for format in common.DL_FORMATS:
download_urls[format] = reverse(app_name + ':download', kwargs=dict(id=str(id), format=format))
results.start_row = first_row
context.update({
'id': id,
'results': data,
'has_more': results.has_more,
'next_row': results.start_row + len(data),
'start_row': results.start_row,
'expected_first_row': first_row,
'columns': columns,
'download_urls': download_urls,
'can_save': query_history.owner == request.user,
'next_json_set':
reverse(get_app_name(request) + ':view_results', kwargs={
'id': str(id),
'first_row': results.start_row + len(data)
}
)
+ ('?context=' + context_param or '') + '&format=json'
})
context['columns'] = massage_columns_for_json(columns)
if 'save_form' in context:
del context['save_form']
if 'query' in context:
del context['query']
return HttpResponse(json.dumps(context), mimetype="application/json")
def configuration(request):
app_name = get_app_name(request)
query_server = get_query_server_config(app_name)
config_values = dbms.get(request.user, query_server).get_default_configuration(
bool(request.REQUEST.get("include_hadoop", False)))
for value in config_values:
if 'password' in value.key.lower():
value.value = "*" * 10
return render("configuration.mako", request, {'config_values': config_values})
"""
Other views
"""
def install_examples(request):
response = {'status': -1, 'message': ''}
if request.method == 'POST':
try:
app_name = get_app_name(request)
beeswax.management.commands.beeswax_install_examples.Command().handle_noargs(app_name=app_name, user=request.user)
response['status'] = 0
except Exception, err:
LOG.exception(err)
response['message'] = str(err)
else:
response['message'] = _('A POST request is required.')
return HttpResponse(json.dumps(response), mimetype="application/json")
@login_notrequired
def query_done_cb(request, server_id):
"""
A callback for query completion notification. When the query is done,
BeeswaxServer notifies us by sending a GET request to this view.
"""
message_template = '<html><head></head>%(message)s<body></body></html>'
message = {'message': 'error'}
try:
query_history = QueryHistory.objects.get(server_id=server_id + '\n')
# Update the query status
query_history.set_to_available()
# Find out details about the query
if not query_history.notify:
message['message'] = 'email_notify is false'
return HttpResponse(message_template % message)
design = query_history.design
user = query_history.owner
subject = _("Beeswax query completed.")
if design:
subject += ": %s" % (design.name,)
link = "%s%s" % \
(get_desktop_uri_prefix(),
reverse(get_app_name(request) + ':watch_query_history', kwargs={'query_history_id': query_history.id}))
body = _("%(subject)s. See the results here: %(link)s\n\nQuery:\n%(query)s") % {
'subject': subject, 'link': link, 'query': query_history.query
}
user.email_user(subject, body)
message['message'] = 'sent'
except Exception, ex:
msg = "Failed to send query completion notification via e-mail: %s" % (ex)
LOG.error(msg)
message['message'] = msg
return HttpResponse(message_template % message)
"""
Utils
"""
def massage_columns_for_json(cols):
massaged_cols = []
for column in cols:
massaged_cols.append({
'name': column.name,
'type': column.type,
'comment': column.comment
})
return massaged_cols
def authorized_get_design(request, design_id, owner_only=False, must_exist=False):
if design_id is None and not must_exist:
return None
try:
design = SavedQuery.objects.get(id=design_id)
except SavedQuery.DoesNotExist:
if must_exist:
raise PopupException(_('Design %(id)s does not exist.') % {'id': design_id})
else:
return None
if owner_only:
design.doc.get().can_write_or_exception(request.user)
else:
design.doc.get().can_read_or_exception(request.user)
return design
def authorized_get_query_history(request, query_history_id, owner_only=False, must_exist=False):
if query_history_id is None and not must_exist:
return None
try:
query_history = QueryHistory.get(id=query_history_id)
except QueryHistory.DoesNotExist:
if must_exist:
raise PopupException(_('QueryHistory %(id)s does not exist.') % {'id': query_history_id})
else:
return None
# Some queries don't have a design so are not linked to Document Model permission
if query_history.design is None or not query_history.design.doc.exists():
if not request.user.is_superuser and request.user != query_history.owner:
raise PopupException(_('Permission denied to read QueryHistory %(id)s') % {'id': query_history_id})
else:
query_history.design.doc.get().can_read_or_exception(request.user)
return query_history
def safe_get_design(request, design_type, design_id=None):
"""
Return a new design, if design_id is None,
Return the design with the given id and type. If the design is not found,
display a notification and return a new design.
"""
design = None
if design_id is not None:
design = authorized_get_design(request, design_id)
if design is None:
design = SavedQuery(owner=request.user, type=design_type)
return design
def make_parameterization_form(query_str):
"""
Creates a django form on the fly with arguments from the
query.
"""
variables = find_variables(query_str)
if len(variables) > 0:
class Form(forms.Form):
for name in sorted(variables):
locals()[name] = forms.CharField(required=True)
return Form
else:
return None
def execute_directly(request, query, query_server=None,
design=None, on_success_url=None, on_success_params=None,
**kwargs):
"""
execute_directly(request, query_msg, tablename, design) -> HTTP response for execution
This method wraps around dbms.execute_query() to take care of the HTTP response
after the execution.
query
The HQL model Query object.
query_server
To which Query Server to submit the query.
Dictionary with keys: ['server_name', 'server_host', 'server_port'].
design
The design associated with the query.
on_success_url
Where to go after the query is done. The URL handler may expect an option "context" GET
param. (See ``watch_query``.) For advanced usage, on_success_url can be a function, in
which case the on complete URL is the return of:
on_success_url(history_obj) -> URL string
Defaults to the view results page.
on_success_params
Optional params to pass to the on_success_url (in additional to "context").
Note that this may throw a Beeswax exception.
"""
if design is not None:
authorized_get_design(request, design.id)
db = dbms.get(request.user, query_server)
database = query.query.get('database', 'default')
db.use(database)
query_history = db.execute_query(query, design)
watch_url = reverse(get_app_name(request) + ':watch_query_history', kwargs={'query_history_id': query_history.id})
# Prepare the GET params for the watch_url
get_dict = QueryDict(None, mutable=True)
# (1) on_success_url
if on_success_url:
if callable(on_success_url):
on_success_url = on_success_url(query_history)
get_dict['on_success_url'] = on_success_url
# (2) misc
if on_success_params:
get_dict.update(on_success_params)
return format_preserving_redirect(request, watch_url, get_dict)
def _list_designs(user, querydict, page_size, prefix="", is_trashed=False):
"""
_list_designs(user, querydict, page_size, prefix, is_trashed) -> (page, filter_param)
A helper to gather the designs page. It understands all the GET params in
``list_designs``, by reading keys from the ``querydict`` with the given ``prefix``.
"""
DEFAULT_SORT = ('-', 'date') # Descending date
SORT_ATTR_TRANSLATION = dict(
date='last_modified',
name='name',
desc='description',
type='extra',
)
# Trash and security
if is_trashed:
db_queryset = Document.objects.trashed_docs(SavedQuery, user)
else:
db_queryset = Document.objects.available_docs(SavedQuery, user)
# Filter by user
filter_username = querydict.get(prefix + 'user')
if filter_username:
try:
db_queryset = db_queryset.filter(owner=User.objects.get(username=filter_username))
except User.DoesNotExist:
# Don't care if a bad filter term is provided
pass
# Design type
d_type = querydict.get(prefix + 'type')
if d_type and d_type in SavedQuery.TYPES_MAPPING.keys():
db_queryset = db_queryset.filter(extra=str(SavedQuery.TYPES_MAPPING[d_type]))
# Text search
frag = querydict.get(prefix + 'text')
if frag:
db_queryset = db_queryset.filter(Q(name__icontains=frag) | Q(description__icontains=frag))
# Ordering
sort_key = querydict.get(prefix + 'sort')
if sort_key:
if sort_key[0] == '-':
sort_dir, sort_attr = '-', sort_key[1:]
else:
sort_dir, sort_attr = '', sort_key
if not SORT_ATTR_TRANSLATION.has_key(sort_attr):
LOG.warn('Bad parameter to list_designs: sort=%s' % (sort_key,))
sort_dir, sort_attr = DEFAULT_SORT
else:
sort_dir, sort_attr = DEFAULT_SORT
db_queryset = db_queryset.order_by(sort_dir + SORT_ATTR_TRANSLATION[sort_attr])
designs = [job.content_object for job in db_queryset.all() if job.content_object and job.content_object.is_auto == False]
pagenum = int(querydict.get(prefix + 'page', 1))
paginator = Paginator(designs, page_size)
page = paginator.page(pagenum)
# We need to pass the parameters back to the template to generate links
keys_to_copy = [ prefix + key for key in ('user', 'type', 'sort') ]
filter_params = copy_query_dict(querydict, keys_to_copy)
return page, filter_params
def _get_query_handle_and_state(query_history):
"""
Front-end wrapper to handle exceptions. Expects the query to be submitted.
"""
handle = query_history.get_handle()
if handle is None:
raise PopupException(_("Failed to retrieve query state from the Query Server."))
state = dbms.get(query_history.owner, query_history.get_query_server_config()).get_state(handle)
if state is None:
raise PopupException(_("Failed to contact Server to check query status."))
return (handle, state)
def parse_query_context(context):
"""
parse_query_context(context) -> ('table', <table_name>) -or- ('design', <design_obj>)
"""
if not context:
return None
pair = context.split(':', 1)
if len(pair) != 2 or pair[0] not in ('table', 'design'):
LOG.error("Invalid query context data: %s" % (context,))
return None
if pair[0] == 'design': # Translate design id to design obj
pair[1] = models.SavedQuery.get(int(pair[1]))
return pair
HADOOP_JOBS_RE = re.compile("(http[^\s]*/jobdetails.jsp\?jobid=([a-z0-9_]*))")
HADOOP_YARN_JOBS_RE = re.compile("(http[^\s]*/proxy/([a-z0-9_]+?)/)")
def _parse_out_hadoop_jobs(log):
"""
Ideally, Hive would tell us what jobs it has run directly
from the Thrift interface. For now, we parse the logs
to look for URLs to those jobs.
"""
ret = []
for match in HADOOP_JOBS_RE.finditer(log):
full_job_url, job_id = match.groups()
# We ignore full_job_url for now, but it may
# come in handy if we support multiple MR clusters
# correctly.
# Ignore duplicates
if job_id not in ret:
ret.append(job_id)
for match in HADOOP_YARN_JOBS_RE.finditer(log):
full_job_url, job_id = match.groups()
if job_id not in ret:
ret.append(job_id)
return ret
def _copy_prefix(prefix, base_dict):
"""Copy keys starting with ``prefix``"""
querydict = QueryDict(None, mutable=True)
for key, val in base_dict.iteritems():
if key.startswith(prefix):
querydict[key] = val
return querydict
def _list_query_history(user, querydict, page_size, prefix=""):
"""
_list_query_history(user, querydict, page_size, prefix) -> (page, filter_param)
A helper to gather the history page. It understands all the GET params in
``list_query_history``, by reading keys from the ``querydict`` with the
given ``prefix``.
"""
DEFAULT_SORT = ('-', 'date') # Descending date
SORT_ATTR_TRANSLATION = dict(
date='submission_date',
state='last_state',
name='design__name',
type='design__type',
)
db_queryset = models.QueryHistory.objects.select_related()
# Filtering
#
# Queries without designs are the ones we submitted on behalf of the user,
# (e.g. view table data). Exclude those when returning query history.
if querydict.get(prefix + 'auto_query', 'on') != 'on':
db_queryset = db_queryset.exclude(design__isnull=False, design__is_auto=True)
user_filter = querydict.get(prefix + 'user', user.username)
if user_filter != ':all':
db_queryset = db_queryset.filter(owner__username=user_filter)
# Design id
design_id = querydict.get(prefix + 'design_id')
if design_id:
db_queryset = db_queryset.filter(design__id=int(design_id))
# Search
search_filter = querydict.get(prefix + 'search')
if search_filter:
db_queryset = db_queryset.filter(Q(design__name__icontains=search_filter) | Q(query__icontains=search_filter) | Q(owner__username__icontains=search_filter))
# Design type
d_type = querydict.get(prefix + 'type')
if d_type:
if d_type not in SavedQuery.TYPES_MAPPING.keys():
LOG.warn('Bad parameter to list_query_history: type=%s' % (d_type,))
else:
db_queryset = db_queryset.filter(design__type=SavedQuery.TYPES_MAPPING[d_type])
# Ordering
sort_key = querydict.get(prefix + 'sort')
if sort_key:
sort_dir, sort_attr = '', sort_key
if sort_key[0] == '-':
sort_dir, sort_attr = '-', sort_key[1:]
if not SORT_ATTR_TRANSLATION.has_key(sort_attr):
LOG.warn('Bad parameter to list_query_history: sort=%s' % (sort_key,))
sort_dir, sort_attr = DEFAULT_SORT
else:
sort_dir, sort_attr = DEFAULT_SORT
db_queryset = db_queryset.order_by(sort_dir + SORT_ATTR_TRANSLATION[sort_attr])
# Get the total return count before slicing
total_count = db_queryset.count()
# Slicing (must be the last filter applied)
pagenum = int(querydict.get(prefix + 'page', 1))
if pagenum < 1:
pagenum = 1
db_queryset = db_queryset[ page_size * (pagenum - 1) : page_size * pagenum ]
paginator = Paginator(db_queryset, page_size, total=total_count)
page = paginator.page(pagenum)
# We do slicing ourselves, rather than letting the Paginator handle it, in order to
# update the last_state on the running queries
for history in page.object_list:
_update_query_state(history.get_full_object())
# We need to pass the parameters back to the template to generate links
keys_to_copy = [ prefix + key for key in ('user', 'type', 'sort', 'design_id', 'auto_query', 'search') ]
filter_params = copy_query_dict(querydict, keys_to_copy)
return page, filter_params
def _update_query_state(query_history):
"""
Update the last_state for a QueryHistory object. Returns success as True/False.
This only occurs iff the current last_state is submitted or running, since the other
states are stable, more-or-less.
Note that there is a transition from available/failed to expired. That occurs lazily
when the user attempts to view results that have expired.
"""
if query_history.last_state <= models.QueryHistory.STATE.running.index:
try:
state_enum = dbms.get(query_history.owner, query_history.get_query_server_config()).get_state(query_history.get_handle())
if state_enum is None:
# Error was logged at the source
return False
except Exception, e:
LOG.error(e)
state_enum = models.QueryHistory.STATE.failed
query_history.save_state(state_enum)
return True
def get_db_choices(request):
app_name = get_app_name(request)
query_server = get_query_server_config(app_name)
db = dbms.get(request.user, query_server)
dbs = db.get_databases()
return [(db, db) for db in dbs]
WHITESPACE = re.compile("\s+", re.MULTILINE)
def collapse_whitespace(s):
return WHITESPACE.sub(" ", s).strip()
| apache-2.0 | 5,357,420,010,559,240,000 | 33.384917 | 160 | 0.670806 | false |
jastarex/DeepLearningCourseCodes | 08_RNN_and_Seq2Seq/bidirection_rnn_pytorch.py | 1 | 3204 | import torch
import torch.nn as nn
import torchvision.datasets as dsets
import torchvision.transforms as transforms
from torch.autograd import Variable
# Hyper Parameters
sequence_length = 28
input_size = 28
hidden_size = 128
num_layers = 2
num_classes = 10
batch_size = 100
num_epochs = 2
learning_rate = 0.003
# MNIST Dataset
train_dataset = dsets.MNIST(root='./data/',
train=True,
transform=transforms.ToTensor(),
download=True)
test_dataset = dsets.MNIST(root='./data/',
train=False,
transform=transforms.ToTensor())
# Data Loader (Input Pipeline)
train_loader = torch.utils.data.DataLoader(dataset=train_dataset,
batch_size=batch_size,
shuffle=True)
test_loader = torch.utils.data.DataLoader(dataset=test_dataset,
batch_size=batch_size,
shuffle=False)
# BiRNN Model (Many-to-One)
class BiRNN(nn.Module):
def __init__(self, input_size, hidden_size, num_layers, num_classes):
super(BiRNN, self).__init__()
self.hidden_size = hidden_size
self.num_layers = num_layers
self.lstm = nn.LSTM(input_size, hidden_size, num_layers,
batch_first=True, bidirectional=True)
self.fc = nn.Linear(hidden_size*2, num_classes) # 2 for bidirection
def forward(self, x):
# Set initial states
h0 = Variable(torch.zeros(self.num_layers*2, x.size(0), self.hidden_size)) # 2 for bidirection
c0 = Variable(torch.zeros(self.num_layers*2, x.size(0), self.hidden_size))
# Forward propagate RNN
out, _ = self.lstm(x, (h0, c0))
# Decode hidden state of last time step
out = self.fc(out[:, -1, :])
return out
rnn = BiRNN(input_size, hidden_size, num_layers, num_classes)
# Loss and Optimizer
criterion = nn.CrossEntropyLoss()
optimizer = torch.optim.Adam(rnn.parameters(), lr=learning_rate)
# Train the Model
for epoch in range(num_epochs):
for i, (images, labels) in enumerate(train_loader):
images = Variable(images.view(-1, sequence_length, input_size))
labels = Variable(labels)
# Forward + Backward + Optimize
optimizer.zero_grad()
outputs = rnn(images)
loss = criterion(outputs, labels)
loss.backward()
optimizer.step()
if (i+1) % 100 == 0:
print ('Epoch [%d/%d], Step [%d/%d], Loss: %.4f'
%(epoch+1, num_epochs, i+1, len(train_dataset)//batch_size, loss.data[0]))
# Test the Model
correct = 0
total = 0
for images, labels in test_loader:
images = Variable(images.view(-1, sequence_length, input_size))
outputs = rnn(images)
_, predicted = torch.max(outputs.data, 1)
total += labels.size(0)
correct += (predicted == labels).sum()
print('Test Accuracy of the model on the 10000 test images: %d %%' % (100 * correct / total))
# Save the Model
torch.save(rnn.state_dict(), 'rnn.pkl') | apache-2.0 | -5,972,245,828,934,102,000 | 32.385417 | 103 | 0.581773 | false |
cmouse/pdns-remotebackend-python | tests/test.py | 1 | 2562 | import json
import unittest
import re
import sys
from subprocess import PIPE, Popen
class pipetest(unittest.TestCase):
def test_pipe_abi_pipe(self):
sub = Popen(["/usr/bin/env", "python", "src/pipe_abi.py", "pipe"],
stdin=PIPE, stdout=PIPE, stderr=sys.stderr,
close_fds=True, shell=False)
(writer, reader) = (sub.stdin, sub.stdout)
writer.write("HELO\t1\n".encode("utf-8"))
writer.flush()
sub.poll()
line = reader.readline().decode("utf-8")
assert(re.match("^OK\t", line))
writer.write("Q\ttest.com\tIN\tSOA\t-1\t127.0.0.1\n".encode("utf-8"))
writer.flush()
line = reader.readline().decode("utf-8")
print(line)
assert(re.match("^DATA\ttest.com\tIN\tSOA\t300\t-1\t"
"sns.dns.icann.org. noc.dns.icann.org. "
"2013073082 7200 3600 1209600 3600",
line))
writer.flush()
line = reader.readline().decode("utf-8")
assert(re.match("^END", line))
writer.write(
"Q\tinvalid.test\tIN\tSOA\t-1\t127.0.0.1\n".encode("utf-8")
)
writer.flush()
line = reader.readline().decode("utf-8")
assert(re.match("^FAIL", line))
sub.stdout.close()
sub.stdin.close()
sub.kill()
sub.wait()
def test_pipe_abi_remote(self):
sub = Popen(["/usr/bin/env", "python", "src/pipe_abi.py", "remote"],
stdin=PIPE, stdout=PIPE, stderr=sys.stderr,
close_fds=True, shell=False)
(writer, reader) = (sub.stdin, sub.stdout)
writer.write(json.dumps({
"method": "initialize",
"parameters": {
"timeout": 2000
}
}).encode("utf-8"))
writer.write("\n".encode("utf-8"))
writer.flush()
sub.poll()
line = reader.readline().decode("utf-8")
resp = json.loads(line)
assert(resp["result"])
writer.write(json.dumps({
"method": "lookup",
"parameters": {
"qname": "test.com",
"qtype": "SOA"
}
}).encode("utf-8"))
writer.write("\n".encode("utf-8"))
writer.flush()
resp = json.loads(reader.readline().decode("utf-8"))
assert(resp["result"][0]["qname"] == "test.com")
sub.stdout.close()
sub.stdin.close()
sub.kill()
sub.wait()
if __name__ == '__main__':
unittest.main()
| mit | -1,175,585,197,421,390,800 | 32.272727 | 77 | 0.506245 | false |
Jegp/multimodalrnnproject | dualmodal_hyper.py | 1 | 4348 | from __future__ import print_function
import numpy
import numpy as np
from keras.preprocessing import sequence
from keras.datasets import imdb
from keras.models import Sequential, Model
from keras.layers import Concatenate
from keras.layers.core import Dense, Dropout, Activation
from keras.layers.embeddings import Embedding
from keras.layers.recurrent import LSTM
from keras.callbacks import EarlyStopping, ModelCheckpoint
from sklearn.externals import joblib
from keras.utils.np_utils import to_categorical
from hyperopt import Trials, STATUS_OK, tpe
from hyperas import optim
from hyperas.distributions import choice, uniform, conditional
def data():
X_train_v, X_test_v, y_train_v, y_test_v = joblib.load('305010.pkl')[0]
X_train_a, X_test_a, y_train_a, y_test_a = joblib.load('305010.pkl')[1]
X_train_a = numpy.array(X_train_a)
X_train_a = X_train_a.reshape((X_train_a.shape[0], 1, X_train_a.shape[1]))
y_train_a = to_categorical(y_train_a)
X_test_a = numpy.array(X_test_a)
X_test_a = X_test_a.reshape((X_test_a.shape[0], 1, X_test_a.shape[1]))
y_test_a = to_categorical(y_test_a)
max_features_a = X_train_a.shape[2]
maxlen_a = X_train_a.shape[0]
X_train_v = numpy.array(X_train_v)
X_train_v = X_train_v.reshape((X_train_v.shape[0], 1, X_train_v.shape[1]))
y_train_v = to_categorical(y_train_v)
X_test_v = numpy.array(X_test_v)
X_test_v = X_test_v.reshape((X_test_v.shape[0], 1, X_test_v.shape[1]))
y_test_v = to_categorical(y_test_v)
max_features_v = X_train_v.shape[2]
maxlen_v = X_train_v.shape[0]
return X_train_a, y_train_a, X_test_a, y_test_a, max_features_a, maxlen_a, X_train_v, y_train_v, X_test_v, y_test_v, max_features_v, maxlen_v
def model(X_train_a, y_train_a, X_test_a, y_test_a, max_features_a, maxlen_a, X_train_v, y_train_v, X_test_v, y_test_v, max_features_v, maxlen_v):
model_auditory = Sequential()
model_auditory.add(LSTM(1000, input_shape=(1, max_features_a), return_sequences=True))
model_auditory.add(LSTM(800, return_sequences=True))
#model_auditory.add(Dropout({{uniform(0, 1)}}))
#model_auditory.add(Dense(y_test_a.shape[1]))
model_visual = Sequential()
model_visual.add(LSTM(210, input_shape=(1, max_features_v), return_sequences=True))
model_visual.add(LSTM(120, return_sequences=True))
#model_visual.add(Dropout({{uniform(0, 1)}}))
#model_visual.add(Dense(y)
## Merge models
## - Sequential cannot be used to concatenate, so we have to use the functional API
out = Concatenate()([model_auditory.output, model_visual.output])
out = LSTM({{choice([20, 30, 40, 50, 60, 100])}})(out)
# Avoid overfitting
#out = Dropout({{uniform(0, 1)}})(concatenated)
out = Dropout({{uniform(0, 1)}})(out)
# Regular dense nn with sigmoid activation function
out = Dense(y_train_a.shape[1], activation='softmax')(out)
model = Model(inputs = [model_auditory.input, model_visual.input], outputs = out)
## Compile model
model.compile(
loss='categorical_crossentropy'
, optimizer='rmsprop'
, metrics = ['accuracy'] # Collect accuracy metric
)
## Early stop
early_stopping = EarlyStopping(monitor='loss', patience=8)
## Fit model
model.fit([X_train_a, X_train_v], y_train_a,
batch_size=128,
epochs=500,
validation_data=([X_test_a, X_test_v], y_test_a),
callbacks=[early_stopping])
## Extract score)
score, acc = model.evaluate([X_test_a, X_test_v], y_test_a, verbose=0)
print("Accuracy: ", acc)
return {'loss': -acc, 'status': STATUS_OK, 'model': model}
if __name__ == '__main__':
best_run, best_model = optim.minimize(model=model, data=data, algo=tpe.suggest, max_evals=10, trials=Trials())
#X_train_a, y_train_a, X_test_a, y_test_a, max_features_a, maxlen_a, X_train_v, y_train_v, X_test_v, y_test_v, max_features_v, maxlen_v = data()
#model(X_train_a, y_train_a, X_test_a, y_test_a, max_features_a, maxlen_a, X_train_v, y_train_v, X_test_v, y_test_v, max_features_v, maxlen_v)
#X_train, y_train, X_test, y_test, m, n = data()
#print(best_model.evaluate(X_test, y_test))
#best_model.evaluate(X_test, y_test)
print(best_run)
| gpl-3.0 | 4,162,809,143,844,896,000 | 39.259259 | 148 | 0.645124 | false |
osspeak/osspeak | osspeak/recognition/actions/library/vocola/dragonkeys.py | 1 | 12786 | ###
### Code for parsing extended SendDragonKeys syntax into a series of
### Input events suitable for calling SendInput with.
###
### Uses ctypes (requires Python 2.5+).
###
### Assumes input is 8-bit Windows-1252 encoding.
###
###
### Author: Mark Lillibridge
### Version: 0.7
###
import re
from ctypes import *
from recognition.actions.library.vocola.sendinput import *
debug = False
###
### Break SendDragonKeys input into the chords that make it up. Each
### chord is represented in terms of its three parts: modifiers, base,
### and effect.
###
### E.g., "a{shift+left_10} " -> [[None, "a", None], ["shift", "left",
### "10"], [None, "space", None]]
###
### Update: The chord's text is also stored for unparsing without information loss.
### E.g., "{{}" -> [None, "{", None, "{{}"]
###
def parse_into_chords(specification):
chords = []
while len(specification) > 0:
m = chord_pattern.match(specification)
if not m:
raise ValueError(f'Cannot parse chords from specification {specification}')
modifiers = m.group(1)
if modifiers: modifiers = modifiers[:-1] # remove final "+"
chords += [[modifiers, m.group(2), m.group(3), m.group(0)]]
specification = specification[m.end():]
return chords
# Because we can't be sure of the current code page, treat all non-ASCII
# characters as potential accented letters for now.
chord_pattern = re.compile(r"""\{ ( (?: [a-zA-Z0-9\x80-\xff]+ \+ )* )
( . | [-a-zA-Z0-9/*+.\x80-\xff]+ )
(?: [ _] (\d+|hold|release) )?
\}""", re.VERBOSE|re.IGNORECASE)
###
###
###
def chord_to_events(chord):
modifiers, base, effect, text = chord
if base == " ":
base = "space"
if modifiers:
modifiers = modifiers.split("+")
else:
modifiers = []
hold_count = release_count = 1
if effect:
effect = effect.lower()
if effect == "hold": release_count = 0
elif effect == "release": hold_count = 0
else:
hold_count = int(effect)
if hold_count == 0:
# check for bad names even when no events:
for modifier in modifiers:
single(modifier, False)
single(base, False)
return []
if len(base) == 1:
try:
m, f = how_type_character(base)
if debug and (len(m)>0 or describe_key(f)!=base):
mm = ""
if m: mm = '+'.join(m) + "+"
bb = "<" + base + ">"
if ord(base[0])<32: bb = hex(ord(base[0]))
print("typing " + bb + " by {" + mm + describe_key(f) + "}")
modifiers += m
base = "VK" + hex(f)
except:
if debug and ord(base[0])<128:
bb = "<" + base + ">"
if ord(base[0])<32: bb = hex(ord(base[0]))
print("can't type " + bb + " on current keyboard layout")
pass
events = []
modifiers_down = []
modifiers_up = []
for modifier in modifiers:
modifiers_down += single(modifier, False)
modifiers_up = modifiers_up + single(modifier, True)
try:
# down down up (hardware auto-repeat style) fails so use down,up pairs:
if hold_count > 1:
return modifiers_down \
+ (single(base,False)+single(base, True))*hold_count \
+ modifiers_up
if hold_count > 0:
events += modifiers_down + single(base,False)*hold_count
if release_count > 0:
events += single(base, True) + modifiers_up
return events
except:
if len(base) != 1:
raise
if len(modifiers) != 0:
print("Warning: unable to use modifiers with character: " + base)
# Unicode?
if release_count==0:
print("Warning: unable to independently hold character: " + base)
if hold_count==0:
print("Warning: unable to independently release character: " + base)
return []
if debug:
print("using numpad entry for: " + base)
return windows1252_to_events(ord(base[0])) * hold_count
###
### Pressing/releasing a single generalized virtual key or mouse button
###
##
## Keyboard key names:
##
Key_name = {
#
# SendDragonKeys virtual key names:
#
"alt" : VK_MENU,
"back" : VK_BACK,
"backspace" : VK_BACK,
"break" : VK_CANCEL,
"capslock" : VK_CAPITAL,
"center" : VK_CLEAR,
"ctrl" : VK_CONTROL,
"del" : VK_DELETE,
"down" : VK_DOWN,
"end" : VK_END,
"enter" : VK_RETURN,
"esc" : VK_ESCAPE,
"escape" : VK_ESCAPE,
"home" : VK_HOME,
"ins" : VK_INSERT,
"left" : VK_LEFT,
"numlock" : VK_NUMLOCK,
"pagedown" : VK_NEXT,
"pageup" : VK_PRIOR,
"pgdn" : VK_NEXT,
"pgup" : VK_PRIOR,
"pause" : VK_PAUSE,
"prtsc" : VK_SNAPSHOT,
"right" : VK_RIGHT,
"scrolllock" : VK_SCROLL,
"shift" : VK_SHIFT,
"space" : VK_SPACE,
#"sysreq" : VK_SYSREQ,# <<<>>>
"tab" : VK_TAB,
"up" : VK_UP,
"f1" : VK_F1,
"f2" : VK_F2,
"f3" : VK_F3,
"f4" : VK_F4,
"f5" : VK_F5,
"f6" : VK_F6,
"f7" : VK_F7,
"f8" : VK_F8,
"f9" : VK_F9,
"f10" : VK_F10,
"f11" : VK_F11,
"f12" : VK_F12,
"f13" : VK_F13,
"f14" : VK_F14,
"f15" : VK_F15,
"f16" : VK_F16,
"numkey/" : VK_DIVIDE,
"numkey*" : VK_MULTIPLY,
"numkey-" : VK_SUBTRACT,
"numkey+" : VK_ADD,
"numkey0" : VK_NUMPAD0,
"numkey1" : VK_NUMPAD1,
"numkey2" : VK_NUMPAD2,
"numkey3" : VK_NUMPAD3,
"numkey4" : VK_NUMPAD4,
"numkey5" : VK_NUMPAD5,
"numkey6" : VK_NUMPAD6,
"numkey7" : VK_NUMPAD7,
"numkey8" : VK_NUMPAD8,
"numkey9" : VK_NUMPAD9,
"numkey." : VK_DECIMAL,
"numkeyenter" : GK_NUM_RETURN,
"extdel" : GK_NUM_DELETE,
"extdown" : GK_NUM_DOWN,
"extend" : GK_NUM_END,
"exthome" : GK_NUM_HOME,
"extins" : GK_NUM_INSERT,
"extleft" : GK_NUM_LEFT,
"extpgdn" : GK_NUM_NEXT,
"extpgup" : GK_NUM_PRIOR,
"extright" : GK_NUM_RIGHT,
"extup" : GK_NUM_UP,
"leftalt" : VK_LMENU,
"rightalt" : VK_RMENU,
"leftctrl" : VK_LCONTROL,
"rightctrl" : VK_RCONTROL,
"leftshift" : VK_LSHIFT,
"rightshift" : VK_RSHIFT,
"0" : VK_0,
"1" : VK_1,
"2" : VK_2,
"3" : VK_3,
"4" : VK_4,
"5" : VK_5,
"6" : VK_6,
"7" : VK_7,
"8" : VK_8,
"9" : VK_9,
"a" : VK_A,
"b" : VK_B,
"c" : VK_C,
"d" : VK_D,
"e" : VK_E,
"f" : VK_F,
"g" : VK_G,
"h" : VK_H,
"i" : VK_I,
"j" : VK_J,
"k" : VK_K,
"l" : VK_L,
"m" : VK_M,
"n" : VK_N,
"o" : VK_O,
"p" : VK_P,
"q" : VK_Q,
"r" : VK_R,
"s" : VK_S,
"t" : VK_T,
"u" : VK_U,
"v" : VK_V,
"w" : VK_W,
"x" : VK_X,
"y" : VK_Y,
"z" : VK_Z,
#
# New names for virtual keys:
#
"win" : VK_LWIN,
"leftwin" : VK_LWIN,
"rightwin" : VK_RWIN,
"apps" : VK_APPS, # name may change...
"f17" : VK_F17,
"f18" : VK_F18,
"f19" : VK_F19,
"f20" : VK_F20,
"f21" : VK_F21,
"f22" : VK_F22,
"f23" : VK_F23,
"f24" : VK_F24,
"browserback" : VK_BROWSER_BACK,
"browserfavorites" : VK_BROWSER_FAVORITES,
"browserforward" : VK_BROWSER_FORWARD,
"browserhome" : VK_BROWSER_HOME,
"browserrefresh" : VK_BROWSER_REFRESH,
"browsersearch" : VK_BROWSER_SEARCH,
"browserstop" : VK_BROWSER_STOP,
# these names may change in the future...
"launchapp1" : VK_LAUNCH_APP1,
"launchapp2" : VK_LAUNCH_APP2,
"launchmail" : VK_LAUNCH_MAIL,
"launchmediaselect" : VK_LAUNCH_MEDIA_SELECT,
"medianexttrack" : VK_MEDIA_NEXT_TRACK,
"mediaplaypause" : VK_MEDIA_PLAY_PAUSE,
"mediaprevioustrack" : VK_MEDIA_PREV_TRACK,
"mediastop" : VK_MEDIA_STOP,
"volumedown" : VK_VOLUME_DOWN,
"volumemute" : VK_VOLUME_MUTE,
"volumeup" : VK_VOLUME_UP,
# possibly more names to come...
"oem1" : VK_OEM_1,
"oem2" : VK_OEM_2,
"oem3" : VK_OEM_3,
"oem4" : VK_OEM_4,
"oem5" : VK_OEM_5,
"oem6" : VK_OEM_6,
"oem7" : VK_OEM_7,
"oem8" : VK_OEM_8,
"oem102" : VK_OEM_102,
"oemcomma" : VK_OEM_COMMA,
"oemminus" : VK_OEM_MINUS,
"oemperiod" : VK_OEM_PERIOD,
"oemplus" : VK_OEM_PLUS,
}
Code_to_name = {}
for name in Key_name.keys():
Code_to_name[Key_name[name]] = name
def describe_key(code):
try:
return Code_to_name[code]
except:
return "VK" + hex(code)
##
## Mouse button names:
##
Button_name = {
"leftbutton" : "left", # really primary button
"middlebutton" : "middle",
"rightbutton" : "right", # really secondary button
"xbutton1" : "X1",
"xbutton2" : "X2",
}
GetSystemMetrics = windll.user32.GetSystemMetrics
GetSystemMetrics.argtypes = [c_int]
GetSystemMetrics.restype = c_int
# Convert ExtendSendDragonKeys mouse button names to those required
# by SendInput.py, swapping left & right buttons if user has "Switch
# primary and secondary buttons" selected:
def get_mouse_button(button_name):
try:
button = Button_name[button_name.lower()]
if button=="left" or button=="right":
if GetSystemMetrics(win32con.SM_SWAPBUTTON):
if button=="left":
button = "right"
else:
button = "left"
return button
except:
raise KeyError("unknown mouse button: " + key)
##
## Create a single virtual event to press or release a keyboard key or
## mouse button:
##
def single(key, releasing):
# universal syntax is VK0xhh for virtual key with code 0xhh:
if key[0:4] == "VK0x":
return [virtual_key_event(int(key[4:],16), releasing)]
lower_key = key.lower()
try:
return [virtual_key_event(Key_name[lower_key], releasing)]
except:
try:
return [mouse_button_event(get_mouse_button(lower_key), releasing)]
except:
raise KeyError("unknown key/button: " + key)
###
###
###
DWORD = c_ulong # 32 bits
SHORT = c_short # 16 bits
#TCHAR = c_char # if not using Unicode
TCHAR = c_wchar # if using Unicode
HKL = HANDLE = PVOID = c_void_p
GetKeyboardLayout = windll.user32.GetKeyboardLayout
GetKeyboardLayout.argtypes = [DWORD]
GetKeyboardLayout.restype = HKL
VkKeyScan = windll.user32.VkKeyScanW
VkKeyScan.argtypes = [TCHAR]
VkKeyScan.restype = SHORT
VkKeyScanEx = windll.user32.VkKeyScanExW
VkKeyScanEx.argtypes = [TCHAR, HKL]
VkKeyScanEx.restype = SHORT
def how_type_character(char):
how_type = VkKeyScan(char)
virtual_key = how_type & 0xff
if virtual_key == 0xff:
raise ValueError("unable to type character with current keyboard layout: "
+ char)
modifiers = []
if how_type&0x400: modifiers += ["alt"]
if how_type&0x200: modifiers += ["ctrl"]
if how_type&0x100: modifiers += ["shift"]
if how_type&0xf800:
raise ValueError("unknown modifiers required, tell MDL: " + hex(how_type))
return modifiers, virtual_key
###
###
###
def windows1252_to_events(code):
events = []
events += single("alt", False)
events += numpad(0)
events += numpad(code/100 %10)
events += numpad(code/10 %10)
events += numpad(code/1 %10)
events += single("alt", True)
return events
def numpad(i):
return chord_to_events([None, "numkey"+str(i), None, "{numkey"+str(i)+"}"]) | mit | 4,115,163,699,120,067,600 | 27.289823 | 87 | 0.492805 | false |
eternallyBaffled/itrade | itrade_wxoperations.py | 1 | 52124 | #!/usr/bin/env python
# -*- coding: iso-8859-1 -*-
# ============================================================================
# Project Name : iTrade
# Module Name : itrade_wxoperations.py
#
# Description: wxPython portfolio operations screen
#
# The Original Code is iTrade code (http://itrade.sourceforge.net).
#
# The Initial Developer of the Original Code is Gilles Dumortier.
#
# Portions created by the Initial Developer are Copyright (C) 2004-2008 the
# Initial Developer. All Rights Reserved.
#
# Contributor(s):
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; see http://www.gnu.org/licenses/gpl.html
#
# History Rev Description
# 2006-01-1x dgil Split code from original itrade_wxportfolio.py module
# ============================================================================
# ============================================================================
# Imports
# ============================================================================
# python system
import datetime
import logging
# iTrade system
import itrade_config
# wxPython system
if not itrade_config.nowxversion:
import itrade_wxversion
import wx
import wx.lib.mixins.listctrl as wxl
from wx.lib import masked
# import sized_controls from wx.lib for wxPython version >= 2.8.8.0 (from wxaddons otherwise)
import wx.lib.sized_controls as sc
# iTrade system
from itrade_logging import *
from itrade_local import message,getGroupChar,getDecimalChar
from itrade_quotes import *
from itrade_portfolio import *
from itrade_currency import currency2symbol
#from itrade_wxdatation import itrade_datePicker
from itrade_wxselectquote import select_iTradeQuote
import itrade_wxres
from itrade_wxmixin import iTrade_wxFrame,iTradeSelectorListCtrl
from itrade_wxutil import FontFromSize,iTradeSizedDialog
# ============================================================================
# menu identifier
# ============================================================================
ID_CLOSE = 111
ID_DISPALL = 120
ID_DISPQUOTES = 121
ID_DISPCASH = 122
ID_DISPSRD = 123
ID_DISPPVAL = 124
ID_SMALL_TEXT = 230
ID_NORMAL_TEXT = 231
ID_BIG_TEXT = 232
ID_MODIFY = 150
ID_DELETE = 151
ID_ADD = 152
ID_30DAYS = 200
ID_90DAYS = 201
ID_CURRENTYEAR = 202
ID_ALLYEARS = 203
# ============================================================================
# display mode
# ============================================================================
DISP_ALL = 0
DISP_QUOTES = 1
DISP_CASH = 2
DISP_SRD = 3
DISP_PVAL = 4
# ============================================================================
# period mode
# ============================================================================
PERIOD_30DAYS = 0
PERIOD_90DAYS = 1
PERIOD_CURRENTYEAR = 2
PERIOD_ALLYEARS = 3
# ============================================================================
# List identifier
# ============================================================================
IDC_DATE = 0
IDC_OPERATION = 1
IDC_DESCRIPTION = 2
IDC_PRU = 3
IDC_NUMBER = 4
IDC_DEBIT = 5
IDC_CREDIT = 6
IDC_EXPENSES = 7
IDC_BALANCE = 8
IDC_SRD = 9
IDC_RESERVED = 10
# ============================================================================
#
# ============================================================================
OPERATION_MODIFY = 0
OPERATION_ADD = 1
OPERATION_DELETE = 2
# ============================================================================
# List of supported operations
# ============================================================================
operation_ctrl = (
(OPERATION_BUY,'portfolio_ctrl_buy'),
(OPERATION_SELL,'portfolio_ctrl_sell'),
(OPERATION_CREDIT,'portfolio_ctrl_credit'),
(OPERATION_DEBIT,'portfolio_ctrl_debit'),
(OPERATION_FEE,'portfolio_ctrl_fee'),
(OPERATION_DIVIDEND,'portfolio_ctrl_dividend'),
(OPERATION_DETACHMENT,'portfolio_ctrl_detachment'),
(OPERATION_INTEREST,'portfolio_ctrl_interest'),
(OPERATION_BUY_SRD,'portfolio_ctrl_buy_srd'),
(OPERATION_SELL_SRD,'portfolio_ctrl_sell_srd'),
(OPERATION_LIQUIDATION,'portfolio_ctrl_liquidation'),
(OPERATION_QUOTE,'portfolio_ctrl_quote'),
(OPERATION_REGISTER,'portfolio_ctrl_register')
# OPERATION_SPLIT : 'portfolio_ctrl_split'
)
# ============================================================================
# iTradeOperationsDialog
#
# parent parent window
# op Operation structure (pre-filling optional)
# opmode EDIT, ADD, DELETE
# ============================================================================
class iTradeOperationDialog(iTradeSizedDialog):
def __init__(self, parent, op, opmode, market=None, currency='EUR'):
# pre-init
self.opmode = opmode
self.m_market = market
if op:
self.m_type = op.type()
self.m_value = op.nv_value()
self.m_expenses = op.nv_expenses()
self.m_number = op.nv_number()
self.m_name = op.name()
if op.isQuote():
if op.quote():
self.m_name = op.quote().key()
self.m_market = op.quote().market()
self.m_datetime = op.datetime()
self.m_ref = op.ref()
else:
self.m_type = OPERATION_SELL
self.m_value = 0.0
self.m_expenses = 0.0
self.m_number = 0
self.m_name = ""
self.m_datetime = datetime.datetime.now()
self.m_ref = -1
self.m_parent = parent
if opmode == OPERATION_MODIFY:
tb = message('portfolio_modify')
elif opmode == OPERATION_ADD:
tb = message('portfolio_new')
elif opmode == OPERATION_DELETE:
tb = message('portfolio_delete')
else:
tb = '??'
tt = tb + ' %s - %s %s'
if op:
self.tt = tt % (op.datetime().strftime('%x'),op.operation(),op.description())
else:
self.tt = tb
# init
iTradeSizedDialog.__init__(self,parent, -1, self.tt, style=wx.DEFAULT_DIALOG_STYLE, size=(420, 480))
# container
container = self.GetContentsPane()
container.SetSizerType("vertical")
# resizable pane
pane = sc.SizedPanel(container, -1)
pane.SetSizerType("form")
pane.SetSizerProps(expand=True)
# Row 1 : date
label = wx.StaticText(pane, -1, message('portfolio_date'))
label.SetSizerProps(valign='center')
ssdatetime = wx.DateTimeFromDMY(self.m_datetime.day,self.m_datetime.month-1,self.m_datetime.year)
self.wxDateCtrl = wx.DatePickerCtrl(pane, -1, ssdatetime , size = (120,-1), style = wx.DP_DROPDOWN | wx.DP_SHOWCENTURY)
wx.EVT_DATE_CHANGED(self, self.wxDateCtrl.GetId(), self.OnDate)
# Row 2 : time
label = wx.StaticText(pane, -1, message('portfolio_time'))
label.SetSizerProps(valign='center')
hhmmsstime = wx.DateTimeFromHMS(self.m_datetime.hour, self.m_datetime.minute, self.m_datetime.second)
self.wxTimeCtrl = masked.TimeCtrl(pane, -1, hhmmsstime, format='24HHMMSS')
self.Bind(masked.EVT_TIMEUPDATE, self.OnTime, self.wxTimeCtrl )
# Row 3 : kind of operation
label = wx.StaticText(pane, -1, message('portfolio_operation'))
label.SetSizerProps(valign='center')
self.wxTypeCtrl = wx.ComboBox(pane,-1, "", size=wx.Size(160,-1), style=wx.CB_DROPDOWN|wx.CB_READONLY)
wx.EVT_COMBOBOX(self,self.wxTypeCtrl.GetId(),self.OnType)
count = 0
for k,v in operation_ctrl:
#print '***',message(v),k
self.wxTypeCtrl.Append(message(v),k)
if k==self.m_type:
idx = count
count = count + 1
self.wxTypeCtrl.SetSelection(idx)
# Row 4 : quote
btnpane = sc.SizedPanel(container, -1)
btnpane.SetSizerType("horizontal")
btnpane.SetSizerProps(expand=True)
self.wxNameLabel = wx.StaticText(btnpane, -1, message('portfolio_description'))
self.wxNameLabel.SetSizerProps(valign='center')
bmp = wx.Bitmap(os.path.join(itrade_config.dirRes, 'quotes.png'))
self.wxNameButton = wx.BitmapButton(btnpane, -1, bmp, size=wx.Size(bmp.GetWidth()+5, bmp.GetHeight()+5))
wx.EVT_BUTTON(self, self.wxNameButton.GetId(), self.OnQuote)
#print 'creating ctrl:',self.m_name
self.wxNameCtrl = wx.TextCtrl(btnpane, -1, self.m_name, size=wx.Size(240,-1), style = wx.TE_LEFT)
wx.EVT_TEXT( self, self.wxNameCtrl.GetId(), self.OnDescChange )
self.wxNameCtrl.SetSizerProps(expand=True)
# Row 5 : value
btnpane = sc.SizedPanel(container, -1)
btnpane.SetSizerType("horizontal")
btnpane.SetSizerProps(expand=True)
self.wxValueLabel = wx.StaticText(btnpane, -1, message('portfolio_field_credit'))
self.wxValueLabel.SetSizerProps(valign='center')
self.wxValueCtrl = masked.Ctrl(btnpane, integerWidth=9, fractionWidth=2, controlType=masked.controlTypes.NUMBER, allowNegative = False, groupDigits = True, groupChar=getGroupChar(), decimalChar=getDecimalChar(), selectOnEntry=True )
wx.EVT_TEXT( self, self.wxValueCtrl.GetId(), self.OnValueChange )
self.wxValueTxt = wx.StaticText(btnpane, -1, currency2symbol(currency))
self.wxValueTxt.SetSizerProps(valign='center')
self.wxExpPreTxt = wx.StaticText(btnpane, -1, '')
self.wxExpPreTxt.SetSizerProps(valign='center')
self.wxExpensesCtrl = masked.Ctrl(btnpane, integerWidth=4, fractionWidth=2, controlType=masked.controlTypes.NUMBER, allowNegative = False, groupDigits = True, groupChar=getGroupChar(), decimalChar=getDecimalChar(), selectOnEntry=True )
wx.EVT_TEXT( self, self.wxExpensesCtrl.GetId(), self.OnExpensesChange )
self.wxExpPostTxt = wx.StaticText(btnpane, -1, "%s %s" % (currency2symbol(currency),message('portfolio_post_expenses')))
self.wxExpPostTxt.SetSizerProps(valign='center')
# resizable pane
pane = sc.SizedPanel(container, -1)
pane.SetSizerType("form")
pane.SetSizerProps(expand=True)
# number
label = wx.StaticText(pane, -1, message('portfolio_quantity'))
label.SetSizerProps(valign='center')
self.wxNumberCtrl = masked.Ctrl(pane, integerWidth=9, fractionWidth=0, controlType=masked.controlTypes.NUMBER, allowNegative = False, groupChar=getGroupChar(), decimalChar=getDecimalChar() )
wx.EVT_TEXT( self, self.wxNumberCtrl.GetId(), self.OnNumberChange )
# separator
line = wx.StaticLine(container, -1, size=(20,-1), style=wx.LI_HORIZONTAL)
line.SetSizerProps(expand=True)
# Last Row : OK and Cancel
btnpane = sc.SizedPanel(container, -1)
btnpane.SetSizerType("horizontal")
btnpane.SetSizerProps(expand=True)
# context help
if wx.Platform != "__WXMSW__":
btn = wx.ContextHelpButton(btnpane)
# OK
btn = wx.Button(btnpane, wx.ID_OK, tb)
btn.SetDefault()
btn.SetHelpText(message('ok_desc'))
wx.EVT_BUTTON(self, wx.ID_OK, self.OnValid)
# CANCEL
btn = wx.Button(btnpane, wx.ID_CANCEL, message('cancel'))
btn.SetHelpText(message('cancel_desc'))
wx.EVT_BUTTON(self, wx.ID_CANCEL, self.OnCancel)
self.refreshPage()
def OnCancel(self,event):
self.aRet = None
self.EndModal(wx.ID_CANCEL)
def OnValid(self,event):
if self.Validate() and self.TransferDataFromWindow():
self.aRet = (self.m_datetime,self.m_type,self.m_name,self.m_value,self.m_expenses,self.m_number,self.m_ref)
self.EndModal(wx.ID_OK)
def refreshPage(self):
self.wxDateCtrl.SetValue(wx.DateTimeFromDMY(self.m_datetime.day,self.m_datetime.month-1,self.m_datetime.year))
self.wxValueCtrl.SetValue(self.m_value)
self.wxExpensesCtrl.SetValue(self.m_expenses)
self.wxNumberCtrl.SetValue(self.m_number)
self.wxNameCtrl.SetLabel(self.m_name)
if isOperationTypeIncludeTaxes(self.m_type):
self.wxExpPreTxt.SetLabel(message('portfolio_pre_expenses1'))
else:
self.wxExpPreTxt.SetLabel(message('portfolio_pre_expenses2'))
sign = signOfOperationType(self.m_type)
if sign =='+':
self.wxValueLabel.SetLabel(message('portfolio_field_credit'))
self.wxExpensesCtrl.Show(True)
self.wxValueCtrl.Show(True)
self.wxValueTxt.Show(True)
self.wxExpPreTxt.Show(True)
self.wxExpPostTxt.Show(True)
elif sign == '-':
self.wxValueLabel.SetLabel(message('portfolio_field_debit'))
self.wxExpensesCtrl.Show(True)
self.wxValueCtrl.Show(True)
self.wxValueTxt.Show(True)
self.wxExpPreTxt.Show(True)
self.wxExpPostTxt.Show(True)
elif sign == '~':
self.wxValueLabel.SetLabel(message('portfolio_field_valorization'))
self.wxValueCtrl.Show(True)
self.wxValueTxt.Show(True)
self.wxExpensesCtrl.Show(False)
self.wxExpPreTxt.Show(False)
self.wxExpPostTxt.Show(False)
else:
self.wxValueLabel.SetLabel(message('portfolio_field_freeofcharges'))
self.wxExpensesCtrl.Show(False)
self.wxValueCtrl.Show(False)
self.wxValueTxt.Show(False)
self.wxExpPreTxt.Show(False)
self.wxExpPostTxt.Show(False)
if isOperationTypeAQuote(self.m_type):
self.wxNameLabel.SetLabel(message('portfolio_quote'))
self.wxNameButton.Enable(True)
self.wxNameButton.Show(True)
self.wxNameCtrl.Enable(False)
if isOperationTypeHasShareNumber(self.m_type):
self.wxNumberCtrl.Enable(True)
else:
self.wxNumberCtrl.Enable(False)
else:
self.wxNameLabel.SetLabel(message('portfolio_description'))
self.wxNameButton.Enable(False)
self.wxNameButton.Show(False)
self.wxNameCtrl.Enable(True)
self.wxNumberCtrl.Enable(False)
if self.opmode == OPERATION_DELETE:
self.wxNumberCtrl.Enable(False)
self.wxNameCtrl.Enable(False)
self.wxNameButton.Enable(False)
self.wxNumberCtrl.Enable(False)
self.wxExpensesCtrl.Enable(False)
self.wxValueCtrl.Enable(False)
self.wxDateCtrl.Enable(False)
self.wxTypeCtrl.Enable(False)
# a little trick to make sure that you can't resize the dialog to
# less screen space than the controls need
self.Fit()
self.SetMinSize(self.GetSize())
def OnDate(self, evt):
dRet = self.wxDateCtrl.GetValue()
if dRet:
debug('OnDate: %s\n' % dRet)
self.m_datetime = self.m_datetime.combine(datetime.date(dRet.GetYear(),dRet.GetMonth()+1,dRet.GetDay()), self.m_datetime.time())
self.refreshPage()
def OnTime(self, evt):
dRet = self.wxTimeCtrl.GetValue(as_wxDateTime=True)
if dRet:
debug('OnTime: %s\n' % dRet)
self.m_datetime = self.m_datetime.combine(self.m_datetime.date(), datetime.time(dRet.GetHour(), dRet.GetMinute(), dRet.GetSecond()))
self.refreshPage()
def OnType(self,evt):
t = self.wxTypeCtrl.GetClientData(self.wxTypeCtrl.GetSelection())
debug("OnType %s" % t)
self.m_type = t
self.refreshPage()
def OnQuote(self,evt):
quote = quotes.lookupKey(self.m_name)
quote = select_iTradeQuote(self,quote,filter=True,market=self.m_market,filterEnabled=True,tradableOnly=True)
if quote:
debug('onQuote: %s - %s' % (quote.ticker(),quote.key()))
self.m_name = quote.key()
self.m_market = quote.market()
self.refreshPage()
def OnValueChange(self,event):
ctl = self.FindWindowById( event.GetId() )
if ctl.IsValid():
debug('new value value = %s\n' % ctl.GetValue() )
self.m_value = float(ctl.GetValue())
def OnNumberChange( self, event ):
ctl = self.FindWindowById( event.GetId() )
if ctl.IsValid():
debug('new number value = %s\n' % ctl.GetValue() )
self.m_number = int(ctl.GetValue())
def OnExpensesChange(self,event):
ctl = self.FindWindowById(event.GetId())
if ctl.IsValid():
debug('new expenses value = %s\n' % ctl.GetValue() )
self.m_expenses = float(ctl.GetValue())
def OnDescChange(self,event):
ctl = self.FindWindowById( event.GetId() )
debug('new value value = %s\n' % ctl.GetValue() )
self.m_name = ctl.GetValue()
# ============================================================================
# iTradeOperationsListCtrl
# ============================================================================
class iTradeOperationsListCtrl(wx.ListCtrl, wxl.ListCtrlAutoWidthMixin):
def __init__(self, parent, ID, pos=wx.DefaultPosition,
size=wx.DefaultSize, style=0):
wx.ListCtrl.__init__(self, parent, ID, pos, size, style)
wxl.ListCtrlAutoWidthMixin.__init__(self)
# ============================================================================
# iTradeOperationToolbar
#
# ============================================================================
class iTradeOperationToolbar(wx.ToolBar):
def __init__(self,parent,id):
wx.ToolBar.__init__(self,parent,id,style = wx.TB_HORIZONTAL | wx.NO_BORDER | wx.TB_FLAT)
self.m_parent = parent
self._init_toolbar()
def _init_toolbar(self):
self._NTB2_EXIT = wx.NewId()
self._NTB2_DISPALL = wx.NewId()
self._NTB2_DISPQUOTES = wx.NewId()
self._NTB2_DISPCASH = wx.NewId()
self._NTB2_DISPSRD = wx.NewId()
self._NTB2_DISPPVAL = wx.NewId()
self._NTB2_ADD = wx.NewId()
self._NTB2_MODIFY = wx.NewId()
self._NTB2_DELETE = wx.NewId()
self._NTB2_30DAYS = wx.NewId()
self._NTB2_90DAYS = wx.NewId()
self._NTB2_CURRENTYEAR = wx.NewId()
self._NTB2_ALLYEARS = wx.NewId()
self.SetToolBitmapSize(wx.Size(24,24))
self.AddSimpleTool(self._NTB2_EXIT, wx.ArtProvider.GetBitmap(wx.ART_CROSS_MARK, wx.ART_TOOLBAR),
message('main_close'), message('main_desc_close'))
self.AddControl(wx.StaticLine(self, -1, size=(-1,23), style=wx.LI_VERTICAL))
self.AddRadioLabelTool(self._NTB2_DISPALL,'',wx.Bitmap(os.path.join(itrade_config.dirRes, 'dispall.png')),wx.NullBitmap,message('portfolio_dispall'),message('portfolio_desc_dispall'))
self.AddRadioLabelTool(self._NTB2_DISPQUOTES,'',wx.Bitmap(os.path.join(itrade_config.dirRes, 'dispquote.png')),wx.NullBitmap,message('portfolio_dispquotes'),message('portfolio_desc_dispquotes'))
self.AddRadioLabelTool(self._NTB2_DISPCASH,'',wx.Bitmap(os.path.join(itrade_config.dirRes, 'dispcash.png')),wx.NullBitmap,message('portfolio_dispcash'),message('portfolio_desc_dispcash'))
self.AddRadioLabelTool(self._NTB2_DISPSRD,'',wx.Bitmap(os.path.join(itrade_config.dirRes, 'dispsrd.png')),wx.NullBitmap,message('portfolio_dispsrd'),message('portfolio_desc_dispsrd'))
self.AddRadioLabelTool(self._NTB2_DISPPVAL,'',wx.Bitmap(os.path.join(itrade_config.dirRes, 'dispvalue.png')),wx.NullBitmap,message('portfolio_dispvalues'),message('portfolio_desc_dispvalues'))
self.AddControl(wx.StaticLine(self, -1, size=(-1,23), style=wx.LI_VERTICAL))
self.AddSimpleTool(self._NTB2_ADD,wx.Bitmap(os.path.join(itrade_config.dirRes, 'add.png')),message('portfolio_opadd'),message('portfolio_desc_opadd'))
self.AddSimpleTool(self._NTB2_MODIFY,wx.Bitmap(os.path.join(itrade_config.dirRes, 'modify.png')),message('portfolio_opmodify'),message('portfolio_desc_opmodify'))
self.AddSimpleTool(self._NTB2_DELETE,wx.Bitmap(os.path.join(itrade_config.dirRes, 'delete.png')),message('portfolio_opdelete'),message('portfolio_desc_opdelete'))
self.AddControl(wx.StaticLine(self, -1, size=(-1,23), style=wx.LI_VERTICAL))
self.AddRadioLabelTool(self._NTB2_30DAYS,'',wx.Bitmap(os.path.join(itrade_config.dirRes, 'filter30.png')),wx.NullBitmap,message('portfolio_per30days'),message('portfolio_desc_per30days'))
self.AddRadioLabelTool(self._NTB2_90DAYS,'',wx.Bitmap(os.path.join(itrade_config.dirRes, 'filter90.png')),wx.NullBitmap,message('portfolio_per90days'),message('portfolio_desc_per90days'))
self.AddRadioLabelTool(self._NTB2_CURRENTYEAR,'',wx.Bitmap(os.path.join(itrade_config.dirRes, 'filter.png')),wx.NullBitmap,message('portfolio_peryear'),message('portfolio_desc_peryear'))
self.AddRadioLabelTool(self._NTB2_ALLYEARS,'',wx.Bitmap(os.path.join(itrade_config.dirRes, 'nofilter.png')),wx.NullBitmap,message('portfolio_perall'),message('portfolio_desc_perall'))
wx.EVT_TOOL(self, self._NTB2_EXIT, self.onExit)
wx.EVT_TOOL(self, self._NTB2_DISPALL, self.onDispAll)
wx.EVT_TOOL(self, self._NTB2_DISPQUOTES, self.onDispQuotes)
wx.EVT_TOOL(self, self._NTB2_DISPCASH, self.onDispCash)
wx.EVT_TOOL(self, self._NTB2_DISPSRD, self.onDispSRD)
wx.EVT_TOOL(self, self._NTB2_DISPPVAL, self.onDispPVal)
wx.EVT_TOOL(self, self._NTB2_MODIFY, self.onModify)
wx.EVT_TOOL(self, self._NTB2_DELETE, self.onDelete)
wx.EVT_TOOL(self, self._NTB2_ADD, self.onAdd)
wx.EVT_TOOL(self, self._NTB2_30DAYS, self.on30Days)
wx.EVT_TOOL(self, self._NTB2_90DAYS, self.on90Days)
wx.EVT_TOOL(self, self._NTB2_CURRENTYEAR, self.onCurrentYear)
wx.EVT_TOOL(self, self._NTB2_ALLYEARS, self.onAllYears)
self.Realize()
def onDispAll(self,event):
self.m_parent.OnDispAll(event)
def onDispQuotes(self,event):
self.m_parent.OnDispQuotes(event)
def onDispCash(self,event):
self.m_parent.OnDispCash(event)
def onDispSRD(self,event):
self.m_parent.OnDispSRD(event)
def onDispPVal(self,event):
self.m_parent.OnDispPVal(event)
def onAdd(self,event):
self.m_parent.OnAdd(event)
def onModify(self,event):
self.m_parent.OnModify(event)
def onDelete(self,event):
self.m_parent.OnDelete(event)
def on30Days(self,event):
self.m_parent.On30Days(event)
def on90Days(self,event):
self.m_parent.On90Days(event)
def onCurrentYear(self,event):
self.m_parent.OnCurrentYear(event)
def onAllYears(self,event):
self.m_parent.OnAllYears(event)
def onExit(self,event):
self.m_parent.OnClose(event)
# ============================================================================
# iTradeOperationsWindow
# ============================================================================
class iTradeOperationsWindow(wx.Frame,iTrade_wxFrame,wxl.ColumnSorterMixin):
# window identifier
ID_WINDOW_TOP = 300
ID_WINDOW_INFO = 301
def __init__(self,parent,id,title,port):
self.m_id = wx.NewId()
wx.Frame.__init__(self,None,self.m_id, title, size = (800,320), style=wx.DEFAULT_FRAME_STYLE|wx.NO_FULL_REPAINT_ON_RESIZE)
iTrade_wxFrame.__init__(self,parent,'portfolio')
self.m_port = port
self.m_mode = DISP_ALL
self.m_period = PERIOD_30DAYS
self.m_currentItem = -1
# the menu
self.filemenu = wx.Menu()
self.filemenu.Append(ID_CLOSE,message('main_close'),message('main_desc_close'))
self.dispmenu = wx.Menu()
self.dispmenu.AppendRadioItem(ID_DISPALL,message('portfolio_dispall'),message('portfolio_desc_dispall'))
self.dispmenu.AppendRadioItem(ID_DISPQUOTES,message('portfolio_dispquotes'),message('portfolio_desc_dispquotes'))
self.dispmenu.AppendRadioItem(ID_DISPCASH,message('portfolio_dispcash'),message('portfolio_desc_dispcash'))
self.dispmenu.AppendRadioItem(ID_DISPSRD,message('portfolio_dispsrd'),message('portfolio_desc_dispsrd'))
self.dispmenu.AppendRadioItem(ID_DISPPVAL,message('portfolio_dispvalues'),message('portfolio_desc_dispvalues'))
self.dispmenu.AppendSeparator()
self.textmenu = wx.Menu()
self.dispmenu.AppendSubMenu(self.textmenu, message('portfolio_text'),message('portfolio_desc_text'))
self.textmenu.AppendRadioItem(ID_SMALL_TEXT, message('portfolio_view_small'),message('portfolio_view_desc_small'))
self.textmenu.AppendRadioItem(ID_NORMAL_TEXT, message('portfolio_view_normal'),message('portfolio_view_desc_normal'))
self.textmenu.AppendRadioItem(ID_BIG_TEXT, message('portfolio_view_big'),message('portfolio_view_desc_big'))
self.opmenu = wx.Menu()
self.opmenu.Append(ID_MODIFY,message('portfolio_opmodify'),message('portfolio_desc_opmodify'))
self.opmenu.Append(ID_DELETE,message('portfolio_opdelete'),message('portfolio_desc_opdelete'))
self.opmenu.Append(ID_ADD,message('portfolio_opadd'),message('portfolio_desc_opadd'))
self.permenu = wx.Menu()
self.permenu.AppendRadioItem(ID_30DAYS,message('portfolio_per30days'),message('portfolio_desc_per30days'))
self.permenu.AppendRadioItem(ID_90DAYS,message('portfolio_per90days'),message('portfolio_desc_per90days'))
self.permenu.AppendRadioItem(ID_CURRENTYEAR,message('portfolio_peryear'),message('portfolio_desc_peryear'))
self.permenu.AppendRadioItem(ID_ALLYEARS,message('portfolio_perall'),message('portfolio_desc_perall'))
# default checking
self.updateMenuItems()
# Creating the menubar
menuBar = wx.MenuBar()
# Adding the "<x>menu" to the MenuBar
menuBar.Append(self.filemenu,message('portfolio_menu_file'))
menuBar.Append(self.dispmenu,message('portfolio_menu_disp'))
menuBar.Append(self.opmenu,message('portfolio_menu_op'))
menuBar.Append(self.permenu,message('portfolio_menu_per'))
# Adding the MenuBar to the Frame content
self.SetMenuBar(menuBar)
# create an image list
self.m_imagelist = wx.ImageList(16,16)
self.idx_plus = self.m_imagelist.Add(wx.Bitmap(os.path.join(itrade_config.dirRes, 'plus.png')))
self.idx_minus = self.m_imagelist.Add(wx.Bitmap(os.path.join(itrade_config.dirRes, 'minus.png')))
self.idx_neutral = self.m_imagelist.Add(wx.Bitmap(os.path.join(itrade_config.dirRes, 'neutral.png')))
self.idx_unknown = self.m_imagelist.Add(wx.Bitmap(os.path.join(itrade_config.dirRes, 'unknown.png')))
self.sm_up = self.m_imagelist.Add(wx.Bitmap(os.path.join(itrade_config.dirRes, 'sm_up.png')))
self.sm_dn = self.m_imagelist.Add(wx.Bitmap(os.path.join(itrade_config.dirRes, 'sm_down.png')))
#
tID = wx.NewId()
self.m_list = iTradeOperationsListCtrl(self, tID,
style = wx.LC_REPORT | wx.SUNKEN_BORDER | wx.LC_SINGLE_SEL | wx.LC_VRULES | wx.LC_HRULES)
self.m_list.SetImageList(self.m_imagelist, wx.IMAGE_LIST_SMALL)
self.m_list.SetFont(FontFromSize(itrade_config.operationFontSize))
# Now that the list exists we can init the other base class,
# see wxPython/lib/mixins/listctrl.py
wxl.ColumnSorterMixin.__init__(self, IDC_RESERVED)
# Toolbar
self.m_toolbar = iTradeOperationToolbar(self, wx.NewId())
wx.EVT_SIZE(self, self.OnSize)
wx.EVT_LIST_ITEM_ACTIVATED(self, tID, self.OnItemActivated)
wx.EVT_LIST_ITEM_SELECTED(self, tID, self.OnItemSelected)
wx.EVT_COMMAND_RIGHT_CLICK(self.m_list, tID, self.OnRightClick)
wx.EVT_RIGHT_UP(self.m_list, self.OnRightClick)
wx.EVT_RIGHT_DOWN(self.m_list, self.OnRightDown)
wx.EVT_MENU(self, ID_CLOSE, self.OnClose)
wx.EVT_MENU(self, ID_DISPALL, self.OnDispAll)
wx.EVT_MENU(self, ID_DISPQUOTES, self.OnDispQuotes)
wx.EVT_MENU(self, ID_DISPCASH, self.OnDispCash)
wx.EVT_MENU(self, ID_DISPSRD, self.OnDispSRD)
wx.EVT_MENU(self, ID_DISPPVAL, self.OnDispPVal)
wx.EVT_MENU(self, ID_SMALL_TEXT, self.OnTextSmall)
wx.EVT_MENU(self, ID_NORMAL_TEXT, self.OnTextNormal)
wx.EVT_MENU(self, ID_BIG_TEXT, self.OnTextBig)
wx.EVT_MENU(self, ID_MODIFY, self.OnModify)
wx.EVT_MENU(self, ID_DELETE, self.OnDelete)
wx.EVT_MENU(self, ID_ADD, self.OnAdd)
wx.EVT_MENU(self, ID_30DAYS, self.On30Days)
wx.EVT_MENU(self, ID_90DAYS, self.On90Days)
wx.EVT_MENU(self, ID_CURRENTYEAR, self.OnCurrentYear)
wx.EVT_MENU(self, ID_ALLYEARS, self.OnAllYears)
wx.EVT_WINDOW_DESTROY(self, self.OnDestroy)
wx.EVT_CLOSE(self, self.OnCloseWindow)
self.populate()
# --- [ wxl.ColumnSorterMixin management ] -------------------------------------
# Used by the wxl.ColumnSorterMixin, see wxPython/lib/mixins/listctrl.py
def GetListCtrl(self):
return self.m_list
# Used by the wxl.ColumnSorterMixin, see wxPython/lib/mixins/listctrl.py
def GetSortImages(self):
return (self.sm_dn, self.sm_up)
# --- [ Text font size management ] -------------------------------------
def OnChangeViewText(self):
itrade_config.saveConfig()
self.updateMenuItems()
self.m_list.SetFont(FontFromSize(itrade_config.operationFontSize))
self.populate()
def OnTextSmall(self,e):
itrade_config.operationFontSize = 1
self.OnChangeViewText()
def OnTextNormal(self,e):
itrade_config.operationFontSize = 2
self.OnChangeViewText()
def OnTextBig(self,e):
itrade_config.operationFontSize = 3
self.OnChangeViewText()
# --- [ window management ] -------------------------------------
def OnDestroy(self, evt):
if self.m_parent:
self.m_parent.m_hOperation = None
def OnCloseWindow(self, evt):
self.saveConfig()
self.Destroy()
# --- [ filter management ] -------------------------------------
def filterSRDcolumn(self):
if self.m_mode == DISP_ALL:
return True
if self.m_mode == DISP_QUOTES:
return True
if self.m_mode == DISP_CASH:
return False
if self.m_mode == DISP_SRD:
return True
if self.m_mode == DISP_PVAL:
return False
def filterDisplay(self,op):
if self.m_mode == DISP_ALL:
# no filter at all
return True
if self.m_mode == DISP_QUOTES:
# display on quotes transfers
return op.isQuote() and (op.type()!=OPERATION_LIQUIDATION)
if self.m_mode == DISP_CASH:
# display on quotes transfers
return op.isCash() and (not op.isSRD() or op.type()==OPERATION_LIQUIDATION)
if self.m_mode == DISP_SRD:
# display on SRD operations
return op.isSRD()
if self.m_mode == DISP_PVAL:
return (op.type() == OPERATION_SELL) or (op.type()==OPERATION_LIQUIDATION)
return False
def filterPeriod(self,op):
if self.m_period == PERIOD_ALLYEARS:
return True
elif self.m_period == PERIOD_CURRENTYEAR:
# year should be the current one
return op.date().year==datetime.date.today().year
elif self.m_period == PERIOD_90DAYS:
# last 90 days
return (datetime.date.today() - op.date()) <= timedelta(90)
elif self.m_period == PERIOD_30DAYS:
# last 30 days
return (datetime.date.today() - op.date()) <= timedelta(30)
return False
# --- [ list population ] -------------------------------------
def populate(self):
self.m_list.ClearAll()
self.itemDataMap = {}
self.itemOpMap = {}
# set column headers
self.m_list.InsertColumn(IDC_DATE, message('portfolio_list_date'), wx.LIST_FORMAT_LEFT)
self.m_list.InsertColumn(IDC_OPERATION, message('portfolio_list_operation'), wx.LIST_FORMAT_LEFT)
self.m_list.InsertColumn(IDC_DESCRIPTION, message('portfolio_list_description'), wx.LIST_FORMAT_LEFT)
self.m_list.InsertColumn(IDC_NUMBER, message('portfolio_list_number'), wx.LIST_FORMAT_RIGHT)
self.m_list.InsertColumn(IDC_PRU, message('UPP'), wx.LIST_FORMAT_RIGHT)
self.m_list.InsertColumn(IDC_DEBIT,message('portfolio_list_debit'), wx.LIST_FORMAT_RIGHT)
self.m_list.InsertColumn(IDC_CREDIT,message('portfolio_list_credit'), wx.LIST_FORMAT_RIGHT)
self.m_list.InsertColumn(IDC_EXPENSES,message('portfolio_list_expense'), wx.LIST_FORMAT_RIGHT)
self.m_list.InsertColumn(IDC_BALANCE,message('portfolio_list_balance'), wx.LIST_FORMAT_RIGHT)
if self.filterSRDcolumn():
self.m_list.InsertColumn(IDC_SRD,message('portfolio_list_srd'), wx.LIST_FORMAT_RIGHT)
self.m_list.InsertColumn(IDC_RESERVED, '', wx.LIST_FORMAT_LEFT)
# remember columns widths with just the header and no data
self.m_hdrcolwidths = []
for col in range(self.m_list.GetColumnCount() - 1):
self.m_list.SetColumnWidth(col, wx.LIST_AUTOSIZE_USEHEADER)
self.m_hdrcolwidths.append(self.m_list.GetColumnWidth(col))
# populate the list
x = 0
balance = 0
srd = 0
for eachOp in self.m_port.operations().list():
if self.filterDisplay(eachOp):
#print 'populate:',eachOp
sign = eachOp.sign()
if sign=='+':
if eachOp.isSRD():
if eachOp.type()==OPERATION_LIQUIDATION:
balance = balance + eachOp.nv_value()
srd = srd + ( eachOp.nv_value() + eachOp.nv_expenses() )
else:
srd = srd + eachOp.nv_value()
else:
if self.m_mode == DISP_PVAL:
balance = balance + eachOp.nv_pvalue()
else:
balance = balance + eachOp.nv_value()
elif sign=='-':
if eachOp.isSRD():
srd = srd - eachOp.nv_value()
else:
balance = balance - eachOp.nv_value()
# do we really need to display this op ?
if self.filterPeriod(eachOp):
if sign=='+':
idx = self.idx_plus
elif sign=='-':
idx = self.idx_minus
elif sign==' ' or sign=='~':
idx = self.idx_neutral
else:
idx = self.idx_unknown
sdate = eachOp.datetime().strftime('%x %X')
self.m_list.InsertImageStringItem(x, sdate, idx)
self.m_list.SetStringItem(x,IDC_OPERATION,eachOp.operation())
if eachOp.nv_number()>0:
self.m_list.SetStringItem(x,IDC_NUMBER,'%s' % eachOp.sv_number())
else:
self.m_list.SetStringItem(x,IDC_NUMBER,'')
if sign=='+':
self.m_list.SetStringItem(x,IDC_CREDIT,eachOp.sv_value())
vdebit = 0.0
vcredit = eachOp.nv_value()
elif sign=='-':
self.m_list.SetStringItem(x,IDC_DEBIT,eachOp.sv_value())
vcredit = 0.0
vdebit = eachOp.nv_value()
elif sign=='~':
self.m_list.SetStringItem(x,IDC_CREDIT,eachOp.sv_value())
vcredit = eachOp.nv_value()
self.m_list.SetStringItem(x,IDC_DEBIT,eachOp.sv_value())
vdebit = eachOp.nv_value()
else:
vcredit = 0.0
vdebit = 0.0
self.m_list.SetStringItem(x,IDC_EXPENSES,eachOp.sv_expenses())
self.m_list.SetStringItem(x,IDC_DESCRIPTION,eachOp.description())
self.m_list.SetStringItem(x,IDC_BALANCE,'%.2f' % balance)
if self.filterSRDcolumn():
if eachOp.isSRD():
self.m_list.SetStringItem(x,IDC_SRD,'%.2f' % srd)
vsrd = srd
else:
self.m_list.SetStringItem(x,IDC_SRD,'')
vsrd = 0.0
self.m_list.SetStringItem(x,IDC_RESERVED,'%d' % eachOp.ref())
else:
vsrd = 0.0
self.m_list.SetStringItem(x,IDC_SRD,'%d' % eachOp.ref())
try:
pr = str( '%.2f'%((vcredit + vdebit)/eachOp.nv_number()))
if pr == '0.00' : pr =''
except ZeroDivisionError:
pr = ''
self.m_list.SetStringItem(x,IDC_PRU,pr)
self.itemDataMap[x] = (eachOp.date().strftime('%Y%m%d'),eachOp.operation(),eachOp.description(),eachOp.nv_number(),pr,vdebit,vcredit,eachOp.nv_expenses(),balance,vsrd)
self.itemOpMap[x] = eachOp.ref()
item = self.m_list.GetItem(x)
if sign == '+':
item.SetTextColour(wx.BLACK)
elif sign == '-':
item.SetTextColour(wx.BLUE)
elif sign == ' ':
item.SetTextColour(wx.BLACK)
else:
item.SetTextColour(wx.RED)
self.m_list.SetItem(item)
# one more item !
#self.m_op[x] = eachOp
x = x + 1
# fix the item data
items = self.itemDataMap.items()
for x in range(len(items)):
key, data = items[x]
self.m_list.SetItemData(x, key)
# adjust size of columns
self.adjustColumns()
# default selection
if len(items)>0:
self.m_currentItem = 0
self.m_list.SetItemState(0, wx.LIST_STATE_SELECTED, wx.LIST_STATE_SELECTED)
self.m_list.EnsureVisible(self.m_currentItem)
else:
self.m_currentItem = -1
# --- [ adjust columns width ] -------------------------------------
def adjustColumns(self):
for col in range(self.m_list.GetColumnCount() - 1):
self.m_list.SetColumnWidth(col, wx.LIST_AUTOSIZE)
if self.m_list.GetColumnWidth(col) < self.m_hdrcolwidths[col]:
self.m_list.SetColumnWidth(col, self.m_hdrcolwidths[col])
self.m_list.resizeLastColumn(15)
# --- [ menu ] -------------------------------------
def updateMenuItems(self):
# period
if self.m_period == PERIOD_ALLYEARS:
m = self.permenu.FindItemById(ID_ALLYEARS)
elif self.m_period == PERIOD_CURRENTYEAR:
m = self.permenu.FindItemById(ID_CURRENTYEAR)
elif self.m_period == PERIOD_90DAYS:
m = self.permenu.FindItemById(ID_90DAYS)
elif self.m_period == PERIOD_30DAYS:
m = self.permenu.FindItemById(ID_30DAYS)
m.Check(True)
# operations
m = self.opmenu.FindItemById(ID_DELETE)
m.Enable(self.m_currentItem>=0)
m = self.opmenu.FindItemById(ID_MODIFY)
m.Enable(self.m_currentItem>=0)
# display
if self.m_mode == DISP_ALL:
m = self.dispmenu.FindItemById(ID_DISPALL)
elif self.m_mode == DISP_QUOTES:
m = self.dispmenu.FindItemById(ID_DISPQUOTES)
elif self.m_mode == DISP_CASH:
m = self.dispmenu.FindItemById(ID_DISPCASH)
elif self.m_mode == DISP_SRD:
m = self.dispmenu.FindItemById(ID_DISPSRD)
elif self.m_mode == DISP_PVAL:
m = self.dispmenu.FindItemById(ID_DISPPVAL)
m.Check(True)
m = self.textmenu.FindItemById(ID_SMALL_TEXT)
m.Check(itrade_config.operationFontSize==1)
m = self.textmenu.FindItemById(ID_NORMAL_TEXT)
m.Check(itrade_config.operationFontSize==2)
m = self.textmenu.FindItemById(ID_BIG_TEXT)
m.Check(itrade_config.operationFontSize==3)
def OnClose(self,e):
self.Close(True)
def OnDispAll(self,e):
self.m_mode = DISP_ALL
self.updateMenuItems()
self.m_toolbar.ToggleTool(self.m_toolbar._NTB2_DISPALL,True)
self.populate()
def OnDispQuotes(self,e):
self.m_mode = DISP_QUOTES
self.updateMenuItems()
self.m_toolbar.ToggleTool(self.m_toolbar._NTB2_DISPQUOTES,True)
self.populate()
def OnDispCash(self,e):
self.m_mode = DISP_CASH
self.updateMenuItems()
self.m_toolbar.ToggleTool(self.m_toolbar._NTB2_DISPCASH,True)
self.populate()
def OnDispSRD(self,e):
self.m_mode = DISP_SRD
self.updateMenuItems()
self.m_toolbar.ToggleTool(self.m_toolbar._NTB2_DISPSRD,True)
self.populate()
def OnDispPVal(self,e):
self.m_mode = DISP_PVAL
self.updateMenuItems()
self.m_toolbar.ToggleTool(self.m_toolbar._NTB2_DISPPVAL,True)
self.populate()
def On30Days(self,e):
self.m_period = PERIOD_30DAYS
self.updateMenuItems()
self.m_toolbar.ToggleTool(self.m_toolbar._NTB2_30DAYS,True)
self.populate()
def On90Days(self,e):
self.m_period = PERIOD_90DAYS
self.updateMenuItems()
self.m_toolbar.ToggleTool(self.m_toolbar._NTB2_90DAYS,True)
self.populate()
def OnCurrentYear(self,e):
self.m_period = PERIOD_CURRENTYEAR
self.updateMenuItems()
self.m_toolbar.ToggleTool(self.m_toolbar._NTB2_CURRENTYEAR,True)
self.populate()
def OnAllYears(self,e):
self.m_period = PERIOD_ALLYEARS
self.updateMenuItems()
self.m_toolbar.ToggleTool(self.m_toolbar._NTB2_ALLYEARS,True)
self.populate()
def OnSize(self, event):
w,h = self.GetClientSizeTuple()
self.m_toolbar.SetDimensions(0, 0, w, 32)
self.m_list.SetDimensions(0, 32, w, h-32)
event.Skip(False)
def getColumnText(self, index, col):
item = self.m_list.GetItem(index, col)
return item.GetText()
# --- [ popup menu ] ------------------------------------------------------
def OnRightDown(self, event):
self.x = event.GetX()
self.y = event.GetY()
debug("x, y = %s" % str((self.x, self.y)))
item, flags = self.m_list.HitTest((self.x, self.y))
if flags & wx.LIST_HITTEST_ONITEM:
pass
else:
self.m_currentItem = -1
self.updateMenuItems()
event.Skip()
def OnItemActivated(self, event):
self.m_currentItem = event.m_itemIndex
self.updateMenuItems()
if self.m_currentItem>=0:
debug("OnItemActivated: %s" % self.m_list.GetItemText(self.m_currentItem))
self.OnModify(event)
def OnItemSelected(self, event):
self.m_currentItem = event.m_itemIndex
self.updateMenuItems()
if self.m_currentItem>=0:
debug("OnItemSelected: %s, %s, %s, %s\n" %
(self.m_currentItem,
self.m_list.GetItemText(self.m_currentItem),
self.getColumnText(self.m_currentItem, 1),
self.getColumnText(self.m_currentItem, 2)))
event.Skip()
def OnRightClick(self, event):
if self.m_currentItem<0:
inList = False
else:
debug("OnRightClick %s\n" % self.m_list.GetItemText(self.m_currentItem))
inList = True
# only do this part the first time so the events are only bound once
if not hasattr(self, "m_popupID_Modify"):
self.m_popupID_Modify = ID_MODIFY
self.m_popupID_Delete = ID_DELETE
self.m_popupID_Add = ID_ADD
wx.EVT_MENU(self, self.m_popupID_Modify, self.OnModify)
wx.EVT_MENU(self, self.m_popupID_Delete, self.OnDelete)
wx.EVT_MENU(self, self.m_popupID_Add, self.OnAdd)
# make a menu
menu = wx.Menu()
# add some items
menu.Append(self.m_popupID_Modify, message('main_popup_edit'))
menu.Enable(self.m_popupID_Modify,inList)
menu.Append(self.m_popupID_Delete, message('main_popup_remove'))
menu.Enable(self.m_popupID_Delete,inList)
menu.AppendSeparator()
menu.Append(self.m_popupID_Add, message('main_popup_add'))
# Popup the menu. If an item is selected then its handler
# will be called before PopupMenu returns.
self.PopupMenu(menu, wx.Point(self.x, self.y))
menu.Destroy()
def OnModify(self, event):
key = self.m_list.GetItemData(self.m_currentItem)
ind = self.itemOpMap[key]
info("OnModify currentItem=%d key=%d ind=%d",self.m_currentItem,key,ind)
aRet = edit_iTradeOperation(self,self.m_port.getOperation(ind),OPERATION_MODIFY,currency=self.m_port.currency())
if aRet:
info('OnModify: date=%s type=%s name=%s value=%12.2f expenses=%12.2f number=%d ref=%d' %(str(aRet[0]),aRet[1],aRet[2],aRet[3],aRet[4],aRet[5],aRet[6]))
self.m_port.delOperation(ind)
self.m_port.addOperation(aRet)
self.RebuildList()
def OnDelete(self, event):
key = self.m_list.GetItemData(self.m_currentItem)
ind = self.itemOpMap[key]
info("OnDelete currentItem=%d key=%d ind=%d",self.m_currentItem,key,ind)
aRet = edit_iTradeOperation(self,self.m_port.getOperation(ind),OPERATION_DELETE,currency=self.m_port.currency())
if aRet:
info('OnDelete: date=%s type=%s name=%s value=%12.2f expenses=%12.2f number=%d ref=%d' %(str(aRet[0]),aRet[1],aRet[2],aRet[3],aRet[4],aRet[5],aRet[6]))
self.m_port.delOperation(ind)
self.RebuildList()
def OnAdd(self, event):
info("OnAdd")
aRet = edit_iTradeOperation(self,None,OPERATION_ADD,market=self.m_port.market(),currency=self.m_port.currency())
if aRet:
info('OnAdd: date=%s type=%s name=%s value=%12.2f expenses=%12.2f number=%d ref=%d' %(str(aRet[0]),aRet[1],aRet[2],aRet[3],aRet[4],aRet[5],aRet[6]))
self.m_port.addOperation(aRet)
self.RebuildList()
# --- [ Rebuild screen and Parent ] ---------------------------------------
def RebuildList(self):
self.m_port.saveOperations()
self.populate()
if self.m_parent:
self.m_parent.RebuildList()
# ============================================================================
# open_iTradeOperations
# ============================================================================
def open_iTradeOperations(win,port=None):
debug('open_iTradeOperations')
if win and win.m_hOperation:
# set focus
win.m_hOperation.SetFocus()
else:
if not isinstance(port,Portfolio):
port = loadPortfolio()
frame = iTradeOperationsWindow(win, -1, "%s - %s" %(message('portfolio_title'),port.name()),port)
if win:
win.m_hOperation = frame
frame.Show()
# ============================================================================
# edit_iTradeOperation()
#
# op operation to edit
# opmode operation mode (modify,add,delete)
# market default market (add only)
# ============================================================================
def edit_iTradeOperation(win,op,opmode,market=None,currency='EUR'):
dlg = iTradeOperationDialog(win,op,opmode,market,currency)
if dlg.ShowModal()==wx.ID_OK:
aRet = dlg.aRet
else:
aRet = None
dlg.Destroy()
return aRet
# ============================================================================
# add_iTradeOperation()
#
# win parent window
# quote quote involved in the operation
# type type of operation : OPERATION_xxx
#
# auto-filled information :
# operation date is current date
#
# returns True if operation has been added
# ============================================================================
def add_iTradeOperation(win,portfolio,quote,type):
if quote:
key = quote.key()
else:
key = None
op = Operation(d=datetime.datetime.now(),t=type,m=key,v='0.0',e='0.0',n='0',vat=portfolio.vat(),ref=-1)
aRet = edit_iTradeOperation(win,op,OPERATION_ADD,market=portfolio.market(),currency=portfolio.currency())
if aRet:
info('add_iTradeOperation: date=%s type=%s name=%s value=%12.2f expenses=%12.2f number=%d ref=%d' %(str(aRet[0]),aRet[1],aRet[2],aRet[3],aRet[4],aRet[5],aRet[6]))
portfolio.addOperation(aRet)
portfolio.saveOperations()
return True
return False
# ============================================================================
# Test me
# ============================================================================
if __name__=='__main__':
setLevel(logging.INFO)
app = wx.App(False)
# load configuration
itrade_config.loadConfig()
from itrade_local import *
setLang('us')
gMessage.load()
# load extensions
import itrade_ext
itrade_ext.loadExtensions(itrade_config.fileExtData,itrade_config.dirExtData)
# init modules
initQuotesModule()
initPortfolioModule()
import itrade_wxportfolio
port = itrade_wxportfolio.select_iTradePortfolio(None,'default','select')
if port:
port = loadPortfolio(port.filename())
open_iTradeOperations(None,port)
app.MainLoop()
# ============================================================================
# That's all folks !
# ============================================================================
| gpl-3.0 | -71,433,267,296,261,624 | 39.335448 | 243 | 0.565594 | false |
peterrenshaw/zerotasks | machine.py | 1 | 1465 | #!/usr/bin/env python
# ~*~ encoding: utf-8 ~*~
#=======
# _____ ______ __
# /__ / ___ _________ /_ __/___ ______/ /_______
# / / / _ \/ ___/ __ \ / / / __ `/ ___/ //_/ ___/
# / /__/ __/ / / /_/ / / / / /_/ (__ ) ,< (__ )
# /____/\___/_/ \____/ /_/ \__,_/____/_/|_/____/
#
# This file is part of Zero Tasks.
#
# Zero Tasks is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Zero Tasks is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zero Tasks. If not, see <http://www.gnu.org/licenses/gpl-3.0.txt>.
#
# name: machine.py
# date: 2016NOV10
# prog: pr
# desc: machine dependent Zero Tasks: read docs/ABOUT.txt
#======
#------
# DIR structure is as follows
#
# /HOME/REL_PATH/APP_DIR
#
HOME = "/path/to/user"
REL_PATH = "relative/path"
APP_DIR = "zerotasks"
#------
def main():
"""main entry point for cli"""
pass
# main entry point for cli
if __name__ == "__main__":
main()
# vim: ff=unix:ts=4:sw=4:tw=78:noai:expandtab
| gpl-3.0 | -7,992,657,440,703,624,000 | 25.636364 | 80 | 0.54744 | false |
falconindy/auracle | tests/test_regex_search.py | 1 | 1141 | #!/usr/bin/env python
# SPDX-License-Identifier: MIT
import auracle_test
class TestRegexSearch(auracle_test.TestCase):
def testFragmentTooShort(self):
r = self.Auracle(['search', 'f'])
self.assertNotEqual(0, r.process.returncode)
self.assertIn('insufficient for searching by regular expression',
r.process.stderr.decode())
def testInvalidRegex(self):
r = self.Auracle(['search', '*invalid'])
self.assertNotEqual(0, r.process.returncode)
self.assertIn('invalid regex', r.process.stderr.decode())
def testMultipleSearchesWithFiltering(self):
r = self.Auracle(
['search', '--quiet', '^aurac.+', '.le-git$', 'auracle'])
self.assertEqual(0, r.process.returncode)
self.assertEqual('auracle-git', r.process.stdout.decode().strip())
self.assertCountEqual([
'/rpc?v=5&type=search&by=name-desc&arg=aurac',
'/rpc?v=5&type=search&by=name-desc&arg=le-git',
'/rpc?v=5&type=search&by=name-desc&arg=auracle',
], r.request_uris)
if __name__ == '__main__':
auracle_test.main()
| mit | 729,133,156,264,159,500 | 33.575758 | 74 | 0.617003 | false |
finkn/InPUTpy | test/test_tools.py | 1 | 10092 | """
:copyright: (c) 2013 by Christoffer Fink.
:license: MIT. See LICENSE for details.
"""
import unittest
import warnings
from test.tools import *
class TestTools(unittest.TestCase):
# ----- assertVariability tests -----
def testAssertVariabilityWithOneIterationShouldFail(self):
with self.assertRaises(AssertionError):
Generator.fromSequence([1,2,3]).isVariable(iterations=1)
def testAssertVariabilityWithConstantValueShouldFail(self):
with self.assertRaises(AssertionError):
Generator.fromFunction(lambda: 1).isVariable()
def testAssertVariabilityWithDifferentValuesShouldSucceed(self):
Generator.fromSequence([1,1,1,1,2]).isVariable(iterations=5)
# The variability test fails even though different values are generated
# because not enough values are generated to see the variation.
def testAssertVariabilityFailsIfNotEnoughIterations(self):
with self.assertRaises(AssertionError):
Generator.fromSequence([1,1,1,1,2]).isVariable(iterations=4)
# ----- assertConstancy tests -----
def testAssertConstancyWithOneIterationShouldFail(self):
with self.assertRaises(AssertionError):
Generator.fromFunction(lambda: 1).isConstant(iterations=1)
def testAssertConstancyWithConstantValueShouldSucceed(self):
Generator.fromFunction(lambda: 1).isConstant()
def testAssertConstancyWithDifferentValueShouldFail(self):
with self.assertRaises(AssertionError):
Generator.fromSequence([1,1,1,1,2]).isConstant(iterations=5)
# Even though the values are not constant, not enough values are
# generated to see the variation.
def testAssertConstancySucceedsIfNotEnoughIterations(self):
Generator.fromSequence([1,1,1,1,2]).isConstant(iterations=4)
# ----- assertMatchingArrayDimensions tests -----
def testSingleDimensionArrayWithMatchingSizeShouldSucceed(self):
tests = (
((0,), []),
((1,), [1]),
((5,), [1, 1, 1, 1, 1]),
)
for (sizes, array) in tests:
assertMatchingArrayDimensions(sizes, array)
def testSingleDimensionArrayWithWrongSizeShouldFail(self):
tests = (
((1,), []),
((0,), [1]),
((1,), [1, 1, 1, 1, 1]),
)
for (sizes, array) in tests:
with self.assertRaises(AssertionError):
assertMatchingArrayDimensions(sizes, array)
def testMultiDimensionalArrayWithMatchingSizesShouldSucceed(self):
tests = (
((2, 0), [[], []]),
((2, 1), [[1], [1]]),
((2, 1), [[[]], [[]]]), # The elements happen to be lists.
((1, 2, 2), [[[1, 1], [1, 1]]]),
)
for (sizes, array) in tests:
assertMatchingArrayDimensions(sizes, array)
def testMultiDimensionalArrayWithWrongSizesShouldFail(self):
tests = (
((2, 0), [1]), # Actually (1,)
((2, 1), [[1, 1], [1, 1]]), # Actually (2, 2)
((1, 2, 2), [[[1], [1]]]), # Actually (1, 2, 1)
)
for (sizes, array) in tests:
with self.assertRaises(AssertionError):
assertMatchingArrayDimensions(sizes, array)
# ----- assert generated values matching tests -----
def testAssertGeneratesAnyShouldFailIfNoneMatch(self):
values = [6,7,8,9]
expected = [1,2,3,4,5]
with self.assertRaises(AssertionError):
Generator.fromSequence(values).generatesAny(expected)
# Only 2 will be generated, but it matches, and that's enough.
def testAssertGeneratesAnyShouldSucceedIfAnyMatch(self):
expected = [1,2,3,4,5]
Generator.fromFunction(lambda: 2).generatesAny(expected)
def testAssertGeneratesAllWithTooFewIterationsShouldFail(self):
values = [2,2,1,5,4,1,1,5,4,3] # 10 values, 1-5 occur at least once.
expected = [1,2,3,4,5]
gen = Generator.fromSequence(values)
iterations = len(values) - 1
with self.assertRaises(AssertionError):
gen.generatesAll(expected, iterations)
def testAssertGeneratesAllShouldFailIfAnyMissing(self):
values = [1,2,3,4]
expected = [1,2,3,4,5]
with self.assertRaises(AssertionError):
Generator.fromSequence(values).generatesAll(expected)
# 3 does not match, but that is irrelevant as long as 1 and 2 occur.
def testAssertGeneratesAllShouldSucceedIfAllAreMatched(self):
Generator.fromSequence([3,1,2]).generatesAll([1,2])
def testAssertGeneratsAllFailsIfNotEnoughIterations(self):
Generator.fromSequence([1,2,3]).generatesAll([1,2], 2)
# 1 and 2 match, but 3 does not.
def testAssertGeneratesOnlyShouldFailIfAnyMismatch(self):
values = [1,2,3]
expected = [1,2]
with self.assertRaises(AssertionError):
Generator.fromSequence([1,2,3]).generatesOnly([1,2])
# The value 3 should cause the test to fail, but there are not enough
# iterations to reach that value.
def testAssertGeneratesOnlySucceedsIfNotEnoughIterations(self):
Generator.fromSequence([1,2,3]).generatesOnly([1,2], 2)
# All accepted values are not generated, but we don't need to.
# What is important is that no non-matching values occur.
def testAssertGeneratesOnlyShouldSucceedIfNoMismatch(self):
values = [3,4,3,4,5,6,5,6]
expected = [1,2,3,4,5,6,7,8,9,10]
Generator.fromSequence(values).generatesOnly(expected)
# ----- generator from sequence tests -----
def testFiniteGeneratorFromSeq(self):
seq = [0,1,2,3,4,5,6,7,8,9]
expected = [0,1,2,3,4,5,6,7,8,9]
f = Generator.fromSequence(seq, finite=True)
result = [f() for i in range(len(seq))]
self.assertEqual(expected, result)
def testFiniteGeneratorFromSeqShouldRaiseErrorWhenExhausted(self):
seq = [0,1,2,3,4,5,6,7,8,9]
f = Generator.fromSequence(seq, finite=True)
with self.assertRaises(IndexError):
result = [f() for i in range(len(seq) + 1)]
# Expecting a result that is 3 times as long as the sequence.
def testInfiniteGeneratorFromSeqShouldWrapAround(self):
seq = [0,1,2,3]
expected = [0,1,2,3] * 3
f = Generator.fromSequence(seq)
iterations = len(expected)
self.assertTrue(len(seq) < iterations)
result = [f() for i in range(iterations)]
self.assertEqual(expected, result)
# This test is partially redundant, but it specifically confirms that
# most functions do not execute all iterations unless they have to.
# A finite generator of otherwise insufficient length is used as a test.
def testShortcut(self):
seq = [1,2,3,4,1]
expected = [1,2,3]
it = len(seq) * 100 # Much greater than length.
# Now do tests. If they didn't shortcut, an IndexError would be raised.
assertVariability(finiteGeneratorFromSeq(seq), it)
assertGeneratesAny(finiteGeneratorFromSeq(seq), expected, it)
assertGeneratesAll(finiteGeneratorFromSeq(seq), expected, it)
# We expect these tests to fail early (not because values ran out).
with self.assertRaises(AssertionError):
assertConstancy(finiteGeneratorFromSeq(seq), it)
with self.assertRaises(AssertionError):
assertGeneratesOnly(finiteGeneratorFromSeq(seq), expected, it)
# ----- Interval tests -----
def testInterval(self):
tests = {
'[1,3]': ([1,2,3], [0,4]),
']1,3]': ([2,3], [0,1,4]),
'[1,3[': ([1,2], [0,3,4]),
']1,3[': ([2], [0,1,3,4]),
'[*,3]': ([-1,3], [4,5]),
'[1,*]': ([1,2,3], [-1,0]),
'[*,3[': ([-1,2], [3,4,5]),
']1,*]': ([2,3], [-1,0,1]),
'[*,*]': ([1,2,3], []),
']*,*[': ([1,2,2], []),
'[.005,.01]': ([.005, .0051, .009, .01], [.0049, .011]),
'].005,.01]': ([.0051, .009, .01], [.005, .0049, .011]),
'[.005,.01[': ([.005, .0051, .009], [.0049, .011, .01]),
'].005,.01[': ([.0051, .009], [.005, .0049, .011, .01]),
}
for (k, v) in tests.items():
(included, excluded) = v
for value in included:
Interval(k).contains(value)
with self.assertRaises(AssertionError):
Interval(k).doesNotContain(value)
for value in excluded:
Interval(k).doesNotContain(value)
with self.assertRaises(AssertionError):
Interval(k).contains(value)
def testEvaluatedIntervalWithDependenciesFails(self):
with self.assertRaises(AssertionError):
Interval('[A, 3]')
def testValueSink(self):
def acceptGT3(x):
if x <= 3: raise ValueError
def acceptLT3(x):
if x >= 3: raise ValueError
def accept3(x):
if x != 3: raise ValueError
def acceptAll(x):
pass
def rejectAll(x):
raise ValueError
tests = {
# function accepts, rejects
acceptGT3: ([4,5], [2,3]),
acceptLT3: ([1,2], [3,4]),
accept3: ([3], [2,4]),
acceptAll: ([1,2,3], []),
rejectAll: ([], [1,2,3]),
}
# Make a ValueSink from each (lambda) function...
for (k,v) in tests.items():
(accepted, rejected) = v
sink = ValueSink(k)
# ...and check that it accepts and rejects the appropriate values.
for value in accepted:
sink.accepts(value)
with self.assertRaises(AssertionError):
sink.rejects(value)
for value in rejected:
sink.rejects(value)
with self.assertRaises(AssertionError):
sink.accepts(value)
if __name__ == '__main__':
unittest.main()
| mit | -837,333,768,997,059,100 | 37.227273 | 79 | 0.586207 | false |
babelsberg/babelsberg-r | tests/modules/ffi/test_data_converter.py | 1 | 3736 | import pytest
from tests.modules.ffi.base import BaseFFITest
class TestDataConverter(BaseFFITest):
def test_it_is_a_Module(self, space):
assert self.ask(space, "FFI::DataConverter.is_a? Module")
def test_it_has_the_following_instance_methods(self, space):
w_res = space.execute("FFI::DataConverter.instance_methods")
instance_methods = self.unwrap(space, w_res)
assert 'native_type' in instance_methods
assert 'to_native' in instance_methods
assert 'from_native' in instance_methods
code_DataConverterImplementation = """
class DataConverterImplementation
include FFI::DataConverter
def impl_native_type(*args)
native_type(*args)
end
def impl_from_native(*args)
from_native(*args)
end
def impl_to_native(*args)
to_native(*args)
end
end
"""
class TestDataConverter__native_type(BaseFFITest):
@pytest.mark.xfail
def test_it_raises_NotImplementedError_without_args(self, space):
space.execute(code_DataConverterImplementation)
with self.raises(space, "NotImplementedError",
"native_type method not overridden and no "
"native_type set"):
space.execute("""
DataConverterImplementation.new.impl_native_type
""")
def test_it_calls_find_type_if_one_arg_was_given(self, space):
space.execute(code_DataConverterImplementation)
w_res = space.execute("""
def FFI.find_type(arg)
return arg
end
DataConverterImplementation.new.impl_native_type(FFI::Type::VOID)
""")
assert w_res is space.execute("FFI::Type::VOID")
def test_it_returns_the_result_of_find_type(self, ffis):
ffis.execute(code_DataConverterImplementation)
self.ask(ffis, """
DataConverterImplementation.new.impl_native_type(:void).equal? FFI::Type::VOID
""")
def test_it_sets_the_result_of_find_type_as_attr(self, ffis):
ffis.execute(code_DataConverterImplementation)
w_res = ffis.execute("""
dci = DataConverterImplementation.new
dci.impl_native_type(:void)
class DataConverterImplementation
attr_reader :native_type
end
dci.native_type
""")
assert w_res is ffis.execute("FFI::Type::VOID")
def test_it_raises_ArgumentError_for_more_than_1_arg(self, space):
space.execute(code_DataConverterImplementation)
with self.raises(space, "ArgumentError", "incorrect arguments"):
space.execute("""
DataConverterImplementation.new.impl_native_type(:int8, :more)
""")
def check_it_takes_two_args_and_returns_the_first(ffitest, space, funcname):
space.execute(code_DataConverterImplementation)
w_res = space.execute("%s(1, 2)" %funcname)
assert ffitest.unwrap(space, w_res) == 1
with ffitest.raises(space, "ArgumentError"):
space.execute(funcname)
with ffitest.raises(space, "ArgumentError"):
space.execute("%s(1)" %funcname)
with ffitest.raises(space, "ArgumentError"):
space.execute("%s(1, 2, 3)" %funcname)
class TestDataConverter__to_native(BaseFFITest):
def test_it_takes_two_arguments_and_returns_the_first_one(self, space):
check_it_takes_two_args_and_returns_the_first(self, space,
"DataConverterImplementation.new.impl_to_native")
class TestDataConverter__from_native(BaseFFITest):
def test_it_returns_nil_for_now(self, space):
check_it_takes_two_args_and_returns_the_first(self, space,
"DataConverterImplementation.new.impl_from_native")
| bsd-3-clause | -690,132,232,936,254,500 | 36.36 | 105 | 0.645343 | false |
lukesneeringer/fauxquests | fauxquests/session.py | 1 | 4388 | from __future__ import unicode_literals
from collections import namedtuple
from fauxquests.adapter import FauxAdapter
from fauxquests.compat import mock
from requests.compat import OrderedDict
from requests.sessions import Session
from sdict import AlphaSortedDict
class FauxServer(Session):
"""A class that can register certain endpoints to have false
responses returned.
"""
def __init__(self, adapter_class=FauxAdapter, url_pattern='%s'):
"""Create a new Fauxquests instance, which knows how to
mock out requests.session.Session and insert itself.
If a `url_pattern` is provided, then all URLs registered
are interpolated through the `url_pattern`.
"""
# Initialize this object.
super(FauxServer, self).__init__()
self.patcher = mock.patch('requests.sessions.Session',
return_value=self)
self.adapters = OrderedDict()
# Write settings to this object.
self.adapter_class = adapter_class
self.url_pattern = url_pattern
# Save a list of registrations to apply to any FauxAdapter
# that this FauxServer creates.
self.registrations = {}
def __enter__(self):
"""Mock out `requests.session.Session`, replacing it with this
object.
"""
return self.start()
def __exit__(self, type, value, traceback):
return self.stop()
def register(self, url, response, status_code=200, method='GET',
headers=None, **kwargs):
"""Register a given URL and response with this FauxServer.
Internally, this object's context manager creates and returns a
FauxAdapters, so regisrations within a context manager go away
when the context manager is exited.
This method, however, is run before the context manager is applied,
and applies universally to all adapters this object creates.
"""
self.registrations[url] = Registration('', response, status_code,
method, headers, kwargs)
def register_json(self, url, response, status_code=200,
method='GET', headers=None, **kwargs):
"""Register a given URL and response with this FauxServer.
Internally, this object's context manager creates and returns a
FauxAdapters, so regisrations within a context manager go away
when the context manager is exited.
This method, however, is run before the context manager is applied,
and applies universally to all adapters this object creates.
"""
self.registrations[url] = Registration('json', response, status_code,
method, headers, kwargs)
def start(self):
"""Institute the patching process, meaining requests sent to
requests (how meta) are caught and handled by our adapter instead.
"""
# Mount the Fauxquests adapter, which handles delivery of
# responses based on the provided URL.
adapter = self.adapter_class(url_pattern=self.url_pattern)
self.mount('https://', adapter)
self.mount('http://', adapter)
# Iterate over any registrations that are saved as part of this
# FauxServer object and register them to the Adapter.
for url, reg in self.registrations.items():
# Is this a plain registration or a JSON registration?
method_name = 'register'
if reg.type:
method_name += '_' + reg.type
# Forward the registration to the adapter.
getattr(adapter, method_name)(url, reg.response, reg.status_code,
reg.method, reg.headers, **reg.kwargs)
# Start the patcher.
self.patcher.start()
# Return the adapter object, which can accept registred
# URLs with responses
return adapter
def stop(self):
"""Undo the patching process set up in `self.start`, and also
set this object back to having no adapters.
"""
self.patcher.stop()
self.adapters = OrderedDict()
Registration = namedtuple('Registration', ['type', 'response', 'status_code',
'method', 'headers', 'kwargs'])
| bsd-3-clause | -9,205,908,773,695,624,000 | 38.890909 | 80 | 0.616226 | false |
ntoll/yotta | yotta/lib/registry_access.py | 1 | 22721 | # Copyright 2014 ARM Limited
#
# Licensed under the Apache License, Version 2.0
# See LICENSE file for details.
# standard library modules, , ,
import re
import logging
from collections import OrderedDict
import uuid
import functools
import json
import binascii
import calendar
import datetime
import hashlib
import itertools
import base64
import webbrowser
import os
try:
from urllib import quote as quoteURL
except ImportError:
from urllib.parse import quote as quoteURL
# requests, apache2
import requests
# PyJWT, MIT, Jason Web Tokens, pip install PyJWT
import jwt
# cryptography, Apache License, Python Cryptography library,
import cryptography
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.primitives import serialization
# settings, , load and save settings, internal
import settings
# access_common, , things shared between different component access modules, internal
import access_common
# version, , represent versions and specifications, internal
import version
# Ordered JSON, , read & write json, internal
import ordered_json
# Github Access, , access repositories on github, internal
import github_access
# export key, , export pycrypto keys, internal
import exportkey
Registry_Base_URL = 'https://registry.yottabuild.org'
Registry_Auth_Audience = 'http://registry.yottabuild.org'
Website_Base_URL = 'http://yottabuild.org'
_OpenSSH_Keyfile_Strip = re.compile(b"^(ssh-[a-z0-9]*\s+)|(\s+.+\@.+)|\n", re.MULTILINE)
logger = logging.getLogger('access')
# suppress logging from the requests library
logging.getLogger("requests").setLevel(logging.WARNING)
class AuthError(RuntimeError):
pass
# Internal functions
def generate_jwt_token(private_key, registry=None):
registry = registry or Registry_Base_URL
expires = calendar.timegm((datetime.datetime.utcnow() + datetime.timedelta(hours=2)).timetuple())
prn = _fingerprint(private_key.public_key())
logger.debug('fingerprint: %s' % prn)
token_fields = {
"iss": 'yotta',
"aud": registry,
"prn": prn,
"exp": str(expires)
}
logger.debug('token fields: %s' % token_fields)
private_key_pem = private_key.private_bytes(
serialization.Encoding.PEM,
serialization.PrivateFormat.PKCS8,
serialization.NoEncryption()
)
token = jwt.encode(token_fields, private_key_pem.decode('ascii'), 'RS256').decode('ascii')
logger.debug('encoded token: %s' % token)
return token
def _pubkeyWireFormat(pubkey):
pubk_numbers = pubkey.public_numbers()
logger.debug('openssh format publickey:\n%s' % exportkey.openSSH(pubk_numbers))
return quoteURL(_OpenSSH_Keyfile_Strip.sub(b'', exportkey.openSSH(pubk_numbers)))
def _fingerprint(pubkey):
stripped = _OpenSSH_Keyfile_Strip.sub(b'', exportkey.openSSH(pubkey.public_numbers()))
decoded = base64.b64decode(stripped)
khash = hashlib.md5(decoded).hexdigest()
return ':'.join([khash[i:i+2] for i in range(0, len(khash), 2)])
def _returnRequestError(fn):
''' Decorator that captures requests.exceptions.RequestException errors
and returns them as an error message. If no error occurs the reture
value of the wrapped function is returned (normally None). '''
@functools.wraps(fn)
def wrapped(*args, **kwargs):
try:
return fn(*args, **kwargs)
except requests.exceptions.RequestException as e:
return "server returned status %s: %s" % (e.response.status_code, e.message)
return wrapped
def _handleAuth(fn):
''' Decorator to re-try API calls after asking the user for authentication. '''
@functools.wraps(fn)
def wrapped(*args, **kwargs):
try:
return fn(*args, **kwargs)
except requests.exceptions.HTTPError as e:
if e.response.status_code == requests.codes.unauthorized:
logger.debug('%s unauthorised', fn)
# any provider is sufficient for registry auth
github_access.authorizeUser(provider=None)
logger.debug('retrying after authentication...')
return fn(*args, **kwargs)
else:
raise
return wrapped
def _friendlyAuthError(fn):
''' Decorator to print a friendly you-are-not-authorised message. Use
**outside** the _handleAuth decorator to only print the message after
the user has been given a chance to login. '''
@functools.wraps(fn)
def wrapped(*args, **kwargs):
try:
return fn(*args, **kwargs)
except requests.exceptions.HTTPError as e:
if e.response.status_code == requests.codes.unauthorized:
logger.error('insufficient permission')
return None
else:
raise
return wrapped
def _getPrivateRegistryKey():
if 'YOTTA_PRIVATE_REGISTRY_API_KEY' in os.environ:
return os.environ['YOTTA_PRIVATE_REGISTRY_API_KEY']
return None
def _listVersions(namespace, name):
sources = _getSources()
registry_urls = [s['url'] for s in sources if 'type' in s and s['type'] == 'registry']
# look in the public registry last
registry_urls.append(Registry_Base_URL)
versions = []
for registry in registry_urls:
# list versions of the package:
url = '%s/%s/%s/versions' % (
registry,
namespace,
name
)
request_headers = _headersForRegistry(registry)
logger.debug("GET %s, %s", url, request_headers)
response = requests.get(url, headers=request_headers)
if response.status_code == 404:
continue
# raise any other HTTP errors
response.raise_for_status()
for x in ordered_json.loads(response.text):
rtv = RegistryThingVersion(x, namespace, name, registry=registry)
if not rtv in versions:
versions.append(rtv)
if not len(versions):
raise access_common.Unavailable(
('%s does not exist in the %s registry. '+
'Check that the name is correct, and that it has been published.') % (name, namespace)
)
return versions
def _tarballURL(namespace, name, version, registry=None):
registry = registry or Registry_Base_URL
return '%s/%s/%s/versions/%s/tarball' % (
registry, namespace, name, version
)
def _getTarball(url, directory, sha256):
logger.debug('registry: get: %s' % url)
if not sha256:
logger.warn('tarball %s has no hash to check' % url)
# figure out which registry we're fetching this tarball from (if any) and
# add appropriate headers
registry = Registry_Base_URL
for source in _getSources():
if ('type' in source and source['type'] == 'registry' and
'url' in source and url.startswith(source['url'])):
registry = source['url']
break
request_headers = _headersForRegistry(registry)
logger.debug('GET %s, %s', url, request_headers)
response = requests.get(url, headers=request_headers, allow_redirects=True, stream=True)
response.raise_for_status()
return access_common.unpackTarballStream(response, directory, ('sha256', sha256))
def _getSources():
sources = settings.get('sources')
if sources is None:
sources = []
return sources
def _isPublicRegistry(registry):
return (registry is None) or (registry == Registry_Base_URL)
def _friendlyRegistryName(registry):
return registry
def _getPrivateKey(registry):
if _isPublicRegistry(registry):
return settings.getProperty('keys', 'private')
else:
for s in _getSources():
if _sourceMatches(s, registry):
if 'keys' in s and s['keys'] and 'private' in s['keys']:
return s['keys']['private']
return None
def _sourceMatches(source, registry):
return ('type' in source and source['type'] == 'registry' and
'url' in source and source['url'] == registry)
def _generateAndSaveKeys(registry=None):
registry = registry or Registry_Base_URL
k = rsa.generate_private_key(
public_exponent=65537, key_size=2048, backend=default_backend()
)
privatekey_pem = k.private_bytes(
serialization.Encoding.PEM,
serialization.PrivateFormat.PKCS8,
serialization.NoEncryption()
)
pubkey_pem = k.public_key().public_bytes(
serialization.Encoding.PEM,
serialization.PublicFormat.SubjectPublicKeyInfo
)
if _isPublicRegistry(registry):
settings.setProperty('keys', 'private', privatekey_pem.decode('ascii'))
settings.setProperty('keys', 'public', pubkey_pem.decode('ascii'))
else:
sources = _getSources()
keys = None
for s in sources:
if _sourceMatches(s, registry):
if not 'keys' in s:
s['keys'] = dict()
keys = s['keys']
break
if keys is None:
keys = dict()
sources.append({
'type':'registry',
'url':registry,
'keys':keys
})
keys['private'] = privatekey_pem.decode('ascii')
keys['public'] = pubkey_pem.decode('ascii')
settings.set('sources', sources)
return pubkey_pem, privatekey_pem
def _getPrivateKeyObject(registry=None):
registry = registry or Registry_Base_URL
privatekey_pem = _getPrivateKey(registry)
if not privatekey_pem:
pubkey_pem, privatekey_pem = _generateAndSaveKeys(registry)
else:
# settings are unicode, we should be able to safely decode to ascii for
# the key though, as it will either be hex or PEM encoded:
privatekey_pem = privatekey_pem.encode('ascii')
# if the key doesn't look like PEM, it might be hex-encided-DER (which we
# used historically), so try loading that:
if b'-----BEGIN PRIVATE KEY-----' in privatekey_pem:
return serialization.load_pem_private_key(
privatekey_pem, None, default_backend()
)
else:
privatekey_der = binascii.unhexlify(privatekey_pem)
return serialization.load_der_private_key(
privatekey_der, None, default_backend()
)
def _headersForRegistry(registry):
registry = registry or Registry_Base_URL
auth_token = generate_jwt_token(_getPrivateKeyObject(registry), registry)
r = {
'Authorization': 'Bearer %s' % auth_token
}
if registry == Registry_Base_URL:
return r
for s in _getSources():
if _sourceMatches(s, registry):
if 'apikey' in s:
r['X-Api-Key'] = s['apikey']
break
return r
# API
class RegistryThingVersion(access_common.RemoteVersion):
def __init__(self, data, namespace, name, registry=None):
logger.debug('RegistryThingVersion %s/%s data: %s' % (namespace, name, data))
version = data['version']
self.namespace = namespace
self.name = name
self.version = version
if 'hash' in data and 'sha256' in data['hash']:
self.sha256 = data['hash']['sha256']
else:
self.sha256 = None
url = _tarballURL(self.namespace, self.name, version, registry)
super(RegistryThingVersion, self).__init__(
version, url, name=name, friendly_source=_friendlyRegistryName(registry)
)
def unpackInto(self, directory):
assert(self.url)
_getTarball(self.url, directory, self.sha256)
class RegistryThing(access_common.RemoteComponent):
def __init__(self, name, version_spec, namespace):
self.name = name
self.spec = version_spec
self.namespace = namespace
@classmethod
def createFromSource(cls, vs, name, registry):
''' returns a registry component for anything that's a valid package
name (this does not guarantee that the component actually exists in
the registry: use availableVersions() for that).
'''
# we deliberately allow only lowercase, hyphen, and (unfortunately)
# numbers in package names, to reduce the possibility of confusingly
# similar names: if the name doesn't match this then escalate to make
# the user fix it
name_match = re.match('^([a-z0-9-]+)$', name)
if not name_match:
raise ValueError('Dependency name "%s" is not valid (must contain only lowercase letters, hyphen, and numbers)' % name)
assert(vs.semantic_spec)
return RegistryThing(name, vs.semantic_spec, registry)
def versionSpec(self):
return self.spec
def availableVersions(self):
''' return a list of Version objects, each able to retrieve a tarball '''
return _listVersions(self.namespace, self.name)
def tipVersion(self):
raise NotImplementedError()
@classmethod
def remoteType(cls):
return 'registry'
@_handleAuth
def publish(namespace, name, version, description_file, tar_file, readme_file,
readme_file_ext, registry=None):
''' Publish a tarblob to the registry, if the request fails, an exception
is raised, which either triggers re-authentication, or is turned into a
return value by the decorators. (If successful, the decorated function
returns None)
'''
registry = registry or Registry_Base_URL
url = '%s/%s/%s/versions/%s' % (
registry,
namespace,
name,
version
)
if readme_file_ext == '.md':
readme_section_name = 'readme.md'
elif readme_file_ext == '':
readme_section_name = 'readme'
else:
raise ValueError('unsupported readme type: "%s"' % readne_file_ext)
# description file is in place as text (so read it), tar file is a file
body = OrderedDict([('metadata', (None, description_file.read(),'application/json')),
('tarball',('tarball', tar_file)),
(readme_section_name, (readme_section_name, readme_file))])
headers = _headersForRegistry(registry)
response = requests.put(url, headers=headers, files=body)
if not response.ok:
return "server returned status %s: %s" % (response.status_code, response.text)
return None
@_handleAuth
def unpublish(namespace, name, version, registry=None):
''' Try to unpublish a recently published version. Return any errors that
occur.
'''
registry = registry or Registry_Base_URL
url = '%s/%s/%s/versions/%s' % (
registry,
namespace,
name,
version
)
headers = _headersForRegistry(registry)
response = requests.delete(url, headers=headers)
if not response.ok:
return "server returned status %s: %s" % (response.status_code, response.text)
return None
@_friendlyAuthError
@_handleAuth
def listOwners(namespace, name, registry=None):
''' List the owners of a module or target (owners are the people with
permission to publish versions and add/remove the owners).
'''
registry = registry or Registry_Base_URL
url = '%s/%s/%s/owners' % (
registry,
namespace,
name
)
request_headers = _headersForRegistry(registry)
response = requests.get(url, headers=request_headers)
if response.status_code == 404:
logger.error('no such %s, "%s"' % (namespace[:-1], name))
return []
# raise exceptions for other errors - the auth decorators handle these and
# re-try if appropriate
response.raise_for_status()
return ordered_json.loads(response.text)
@_friendlyAuthError
@_handleAuth
def addOwner(namespace, name, owner, registry=None):
''' Add an owner for a module or target (owners are the people with
permission to publish versions and add/remove the owners).
'''
registry = registry or Registry_Base_URL
url = '%s/%s/%s/owners/%s' % (
registry,
namespace,
name,
owner
)
request_headers = _headersForRegistry(registry)
response = requests.put(url, headers=request_headers)
if response.status_code == 404:
logger.error('no such %s, "%s"' % (namespace[:-1], name))
return
# raise exceptions for other errors - the auth decorators handle these and
# re-try if appropriate
response.raise_for_status()
@_friendlyAuthError
@_handleAuth
def removeOwner(namespace, name, owner, registry=None):
''' Remove an owner for a module or target (owners are the people with
permission to publish versions and add/remove the owners).
'''
registry = registry or Registry_Base_URL
url = '%s/%s/%s/owners/%s' % (
registry,
namespace,
name,
owner
)
request_headers = _headersForRegistry(registry)
response = requests.delete(url, headers=request_headers)
if response.status_code == 404:
logger.error('no such %s, "%s"' % (namespace[:-1], name))
return
# raise exceptions for other errors - the auth decorators handle these and
# re-try if appropriate
response.raise_for_status()
def search(query='', keywords=[], registry=None):
''' generator of objects returned by the search endpoint (both modules and
targets).
Query is a full-text search (description, name, keywords), keywords
search only the module/target description keywords lists.
If both parameters are specified the search is the intersection of the
two queries.
'''
registry = registry or Registry_Base_URL
url = '%s/search' % registry
headers = _headersForRegistry(registry)
params = {
'skip': 0,
'limit': 50
}
if len(query):
params['query'] = query
if len(keywords):
params['keywords[]'] = keywords
while True:
response = requests.get(url, headers=headers, params=params)
response.raise_for_status()
objects = ordered_json.loads(response.text)
if len(objects):
for o in objects:
yield o
params['skip'] += params['limit']
else:
break
def deauthorize(registry=None):
registry = registry or Registry_Base_URL
if _isPublicRegistry(registry):
if settings.get('keys'):
settings.set('keys', dict())
else:
sources = [s for s in _getSources() if not _sourceMatches(s, registry)]
settings.set('sources', sources)
def setAPIKey(registry, api_key):
''' Set the api key for accessing a registry. This is only necessary for
development/test registries.
'''
if (registry is None) or (registry == Registry_Base_URL):
return
sources = _getSources()
source = None
for s in sources:
if _sourceMatches(s, registry):
source = s
if source is None:
source = {
'type':'registry',
'url':registry,
}
sources.append(source)
source['apikey'] = api_key
settings.set('sources', sources)
def getPublicKey(registry=None):
''' Return the user's public key (generating and saving a new key pair if necessary) '''
registry = registry or Registry_Base_URL
pubkey_pem = None
if _isPublicRegistry(registry):
pubkey_pem = settings.getProperty('keys', 'public')
else:
for s in _getSources():
if _sourceMatches(s, registry):
if 'keys' in s and s['keys'] and 'public' in s['keys']:
pubkey_pem = s['keys']['public']
break
if not pubkey_pem:
pubkey_pem, privatekey_pem = _generateAndSaveKeys()
else:
# settings are unicode, we should be able to safely decode to ascii for
# the key though, as it will either be hex or PEM encoded:
pubkey_pem = pubkey_pem.encode('ascii')
# if the key doesn't look like PEM, it might be hex-encided-DER (which we
# used historically), so try loading that:
if b'-----BEGIN PUBLIC KEY-----' in pubkey_pem:
pubkey = serialization.load_pem_public_key(pubkey_pem, default_backend())
else:
pubkey_der = binascii.unhexlify(pubkey_pem)
pubkey = serialization.load_der_public_key(pubkey_der, default_backend())
return _pubkeyWireFormat(pubkey)
def testLogin(registry=None):
registry = registry or Registry_Base_URL
url = '%s/users/me' % (
registry
)
request_headers = _headersForRegistry(registry)
logger.debug('test login...')
response = requests.get(url, headers=request_headers)
response.raise_for_status()
def getAuthData(registry=None):
''' Poll the registry to get the result of a completed authentication
(which, depending on the authentication the user chose or was directed
to, will include a github or other access token)
'''
registry = registry or Registry_Base_URL
url = '%s/tokens' % (
registry
)
request_headers = _headersForRegistry(registry)
logger.debug('poll for tokens... %s', request_headers)
try:
response = requests.get(url, headers=request_headers)
except requests.RequestException as e:
logger.debug(str(e))
return None
if response.status_code == requests.codes.unauthorized:
logger.debug('Unauthorised')
return None
elif response.status_code == requests.codes.not_found:
logger.debug('Not Found')
return None
body = response.text
logger.debug('auth data response: %s' % body);
r = {}
parsed_response = ordered_json.loads(body)
if 'error' in parsed_response:
raise AuthError(parsed_response['error'])
for token in parsed_response:
if token['provider'] == 'github':
r['github'] = token['accessToken']
break
logger.debug('parsed auth tokens %s' % r);
return r
def getLoginURL(provider=None, registry=None):
registry = registry or Registry_Base_URL
if provider:
query = ('?provider=%s' % provider)
else:
query = ''
if not _isPublicRegistry(registry):
if not len(query):
query = '?'
query += '&private=1'
return Website_Base_URL + '/' + query + '#login/' + getPublicKey(registry)
def openBrowserLogin(provider=None, registry=None):
registry = registry or Registry_Base_URL
webbrowser.open(getLoginURL(provider=provider, registry=registry))
| apache-2.0 | -412,501,898,421,974,500 | 31.928986 | 131 | 0.632763 | false |
myboycrais99/Rule1 | GetGrowthRateExceptions.py | 1 | 4224 | '''
This file passes the maximum allowed number of years that the stock
is allowed to fail to meet the desired growth rates. Default is 0 years
This value will be used in PassGrowthRate.py
'''
# -------------------------- Required Files----- ---------------------
#
#
# --------------------------------------------------------------------
# --------------------------Variable Declaration ---------------------
# years: global variable
# years_exceptions: This is the maximum allowed number of years that
# the stock is allowed to fail to meet the desired
# growth rates.
# loop_counter: used for the while loop.
# invalid_answer: boolean expression... 0 is valid. 1 is invalid.
#
# --------------------------------------------------------------------
#default value
years_exceptions = 0
print()
#print filename
print('Filename: GetGrowthRateExceptions')
#This function requests the user to input a value. If an invalid answer is supplied, it
#allows the operator three attempts before it sets the default value
def get_years_exceptions ():
# years_exceptions = input('Maximum allowed number of years that a stock is allowed '
# 'to fail to meet the desired growth rates: ')
# print()
#
#
# #Set to default values
# #invalid_answer = 1: it's invalid
# #invalid_answer =0: valid
# invalid_answer = 1
#
# # max number of loops = 3. Starting counter at 0.
# loop_counter = 0
#
# #Check if the value is a number below 5. If it's not, ask for a new value.
# #Allow for three attempts before setting default value to 0.
# while loop_counter <= 3 and invalid_answer == 1:
#
# if years_exceptions == '0':
# invalid_answer = 0 #0 means it's valid
# elif years_exceptions == '1':
# invalid_answer = 0
# elif years_exceptions == '2':
# invalid_answer = 0
# elif years_exceptions == '3':
# invalid_answer = 0
# elif years_exceptions == '4':
# invalid_answer = 0
# else:
# years_exceptions = input('You entered an invalid answer. Please try again: ')
#
# loop_counter = loop_counter + 1
#
# #end while loop
#
# #Check the final looped value was valid or not
# if loop_counter == 4 and invalid_answer == 1:
# if years_exceptions == '0':
# invalid_answer = 0 #0 means it's valid
# elif years_exceptions == '1':
# invalid_answer = 0
# elif years_exceptions == '2':
# invalid_answer = 0
# elif years_exceptions == '3':
# invalid_answer = 0
# elif years_exceptions == '4':
# invalid_answer = 0
# #end if
#
# # Check if loop_counter = 4. If it does, set the years_exception to default value 0
# if loop_counter == 4 and invalid_answer == 1:
# years_exceptions = 0
# print()
# print()
# print('you suck as you apparently can not follow the simplest of instructions.')
# print('I am overriding your answer to 0')
# print()
# print('years exceptions: ', years_exceptions)
#
# #since inputs are always as a string, this function converts it to an integer
# years_exceptions = int(years_exceptions)
#
# print()
# print('years exceptions: ', years_exceptions)
#temporarily overriding years exceptions to a default value for troubleshooting
years_exceptions = 1
print('years excempt:', years_exceptions)
return years_exceptions
# END FUNCTION
#call the function
get_years_exceptions()
#outside_funcion_exceptions = get_years_exceptions()
#print()
#print('final answer \n years exceptions =', outside_funcion_exceptions)
#todo: instead of printing a nice message, the values that pass the criteria need to
#todo: then move on to another list or be exported etc. | mit | 3,795,689,030,596,564,000 | 33.813559 | 94 | 0.54285 | false |
firmadyne/scraper | firmware/spiders/centurylink.py | 1 | 1956 | from scrapy import Spider
from scrapy.http import Request
from firmware.items import FirmwareImage
from firmware.loader import FirmwareLoader
import urllib.request, urllib.parse, urllib.error
# http://home.centurytel.net/ihd/
class CenturyLinkSpider(Spider):
name = "centurylink"
allowed_domains = ["centurylink.com"]
start_urls = ["http://internethelp.centurylink.com/internethelp/downloads-auto-firmware-q.html"]
def parse(self, response):
product = None
for section in response.xpath("//div[@class='product-content']/div[@class='product-box2']/div"):
text = section.xpath(".//text()").extract()
if not section.xpath(".//a"):
product = text[0].strip()
else:
for link in section.xpath(".//a/@href").extract():
if link.endswith(".html"):
yield Request(
url=urllib.parse.urljoin(response.url, link),
meta={"product": product,
"version": FirmwareLoader.find_version(text)},
headers={"Referer": response.url},
callback=self.parse_download)
def parse_download(self, response):
for link in response.xpath("//div[@id='auto']//a"):
href = link.xpath("./@href").extract()[0]
text = link.xpath(".//text()").extract()[0]
if ("downloads" in href or "firmware" in href) and \
not href.endswith(".html"):
item = FirmwareLoader(item=FirmwareImage(), response=response)
item.add_value("version", response.meta["version"])
item.add_value("url", href)
item.add_value("description", text)
item.add_value("product", response.meta["product"])
item.add_value("vendor", self.name)
yield item.load_item()
| mit | 4,258,128,315,239,091,000 | 41.521739 | 104 | 0.555215 | false |
botswana-harvard/edc-label | edc_label/views/change_printer_view.py | 1 | 1732 | from django.views.generic.edit import ProcessFormView
from django.urls.base import reverse
from django.http.response import HttpResponseRedirect
from edc_base.models import UserProfile
from django.contrib.auth.mixins import LoginRequiredMixin
class ChangePrinterView(LoginRequiredMixin, ProcessFormView):
success_url = 'edc_label:home_url'
empty_selection = '--'
def post(self, request, *args, **kwargs):
user_profile = UserProfile.objects.get(user=self.request.user)
print_server_name = request.POST.get('print_server_name')
if print_server_name:
if print_server_name == self.empty_selection:
print_server_name = None
request.session['print_server_name'] = print_server_name
user_profile.print_server = print_server_name
clinic_label_printer_name = request.POST.get(
'clinic_label_printer_name')
if clinic_label_printer_name:
if clinic_label_printer_name == self.empty_selection:
clinic_label_printer_name = None
request.session['clinic_label_printer_name'] = clinic_label_printer_name
user_profile.clinic_label_printer = clinic_label_printer_name
lab_label_printer_name = request.POST.get('lab_label_printer_name')
if lab_label_printer_name:
if lab_label_printer_name == self.empty_selection:
lab_label_printer_name = None
request.session['lab_label_printer_name'] = lab_label_printer_name
user_profile.lab_label_printer = lab_label_printer_name
user_profile.save()
success_url = reverse(self.success_url)
return HttpResponseRedirect(redirect_to=success_url)
| gpl-3.0 | 5,477,501,294,457,762,000 | 39.27907 | 84 | 0.671478 | false |
OSGeoLabBp/tutorials | english/img_processing/code/circles.py | 1 | 1095 | import cv2
import numpy as np
import os.path
from sys import argv
if len(argv) < 2:
print("Usage: {} img_file [img_file ...]".format(argv[0]))
exit()
# process images
for fn in argv[1:]:
try:
src_img = cv2.imread(fn) # load image
except:
print("Failed to read image {}".format(fn))
continue
# convert image to gray scale
gray_img = cv2.cvtColor(src_img, cv2.COLOR_BGR2GRAY)
# noise reduction
img = cv2.medianBlur(gray_img, 5)
#find circles
circles = cv2.HoughCircles(img, cv2.HOUGH_GRADIENT, 1, 400,
param1=100, param2=30, minRadius=10, maxRadius=1000)
print(circles)
circles = np.uint16(np.around(circles))
for i in circles[0,:]:
# draw the outer circle
cv2.circle(src_img, (i[0], i[1]), i[2], (0, 255, 0), 10)
# draw the center of the circle
cv2.circle(src_img, (i[0], i[1]), 2, (0,0,255), 10)
fn1 = os.path.split(fn)
fn2 = os.path.join(fn1[0], "c_" + fn1[1])
cv2.imwrite(fn2, src_img)
cv2.imshow('circles', src_img)
cv2.waitKey(0)
| cc0-1.0 | -2,507,481,706,028,149,000 | 30.285714 | 80 | 0.585388 | false |
SNeuhausen/training_management | utils/field_utils.py | 1 | 2138 | class FieldUtils(object):
""" A utility class, which provides helper functions for managing fields of BaseModels. """
@staticmethod
def get_field_description(model_object, field_name):
result = model_object.fields_get([field_name])
field_description = result.get(field_name)
return field_description
@classmethod
def get_selection_label(cls, model_object, field_name, selection_value):
""" Returns the label for a given selection value of field ``field_name`` from model ``model_object``. """
field_description = cls.get_field_description(model_object, field_name)
selection_pairs = field_description.get('selection')
for pair in selection_pairs:
value = pair[0]
if value == selection_value:
label = pair[1]
return label
@classmethod
def is_valid_selection_value(cls, model_object, field_name, selection_value):
""" Checks, whether the given selection field ``field_name`` has a selection value ``selection_value``. """
field_description = cls.get_field_description(model_object, field_name)
selection_pairs = field_description.get("selection")
for pair in selection_pairs:
value = pair[0]
if value == selection_value:
return True
return False
@classmethod
def assert_selection_value(cls, model_object, field_name, selection_value):
""" Checks, if the given selection value is contained in the selection field or raises an exception. """
assert cls.is_valid_selection_value(model_object, field_name, selection_value), \
u"The value '{0}' is not contained in selection field '{1}'".format(selection_value, field_name)
@classmethod
def assert_and_get_selection_value(cls, model_object, field_name, selection_value):
""" Assert that ``selection_value`` is a valid value in selection field ``field_name`` and return
``selection_value``.
"""
cls.assert_selection_value(model_object, field_name, selection_value)
return selection_value | gpl-3.0 | 4,628,970,539,293,727,000 | 47.613636 | 115 | 0.65435 | false |
ask/kamqp | tests/client_0_8/test_exceptions.py | 1 | 1494 | #!/usr/bin/env python
"""
Test kamqp.client_0_8.exceptions module
"""
# Copyright (C) 2007-2008 Barry Pederson <[email protected]>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
import unittest
import settings
from kamqp.client_0_8.exceptions import *
class TestException(unittest.TestCase):
def test_exception(self):
exc = AMQPError(7, 'My Error', (10, 10))
self.assertEqual(exc.amqp_reply_code, 7)
self.assertEqual(exc.amqp_reply_text, 'My Error')
self.assertEqual(exc.amqp_method_sig, (10, 10))
self.assertEqual(exc.args,
(7, 'My Error', (10, 10), 'Connection.start'))
def main():
suite = unittest.TestLoader().loadTestsFromTestCase(TestException)
unittest.TextTestRunner(**settings.test_args).run(suite)
if __name__ == '__main__':
main()
| lgpl-2.1 | -464,473,646,086,698,800 | 32.2 | 75 | 0.713521 | false |
toomanycats/IndeedScraper | test.py | 1 | 1136 | from flask import Flask, render_template
from bokeh.charts import Bar
from bokeh.embed import components
from bokeh.util.string import encode_utf8
from bokeh.plotting import figure
import pandas as pd
app = Flask(__name__)
@app.route('/')
def test():
kws = ["one", "two", "cat", "dog"]
count = [23, 45, 11, 87]
df = pd.DataFrame({"kw": kws,
"count": count
})
#p = Bar(df, 'kw')
df.sort("count", inplace=True)
df.set_index("kw", inplace=True)
series = df['count']
p = figure(width=1000, height=1000, y_range=series.index.tolist())
j = 1
for k, v in series.iteritems():
w = v / 2 * 2
p.rect(x=v/2,
y=j,
width=w,
height=0.4,
color=(76, 114, 176),
width_units="data",
height_units="data"
)
j += 1
script, div = components(p)
page = render_template('test.html', div=div, script=script)
return encode_utf8(page)
if __name__ == "__main__":
app.run(debug=True,
threaded=False
)
| mit | -600,367,166,198,424,400 | 22.666667 | 70 | 0.514085 | false |
stpeter/memberbot | sleekxmpp/plugins/xep_0009.py | 1 | 7589 | """
XEP-0009 XMPP Remote Procedure Calls
"""
from __future__ import with_statement
import base
import logging
from xml.etree import cElementTree as ET
import copy
import time
import base64
def py2xml(*args):
params = ET.Element("params")
for x in args:
param = ET.Element("param")
param.append(_py2xml(x))
params.append(param) #<params><param>...
return params
def _py2xml(*args):
for x in args:
val = ET.Element("value")
if type(x) is int:
i4 = ET.Element("i4")
i4.text = str(x)
val.append(i4)
if type(x) is bool:
boolean = ET.Element("boolean")
boolean.text = str(int(x))
val.append(boolean)
elif type(x) is str:
string = ET.Element("string")
string.text = x
val.append(string)
elif type(x) is float:
double = ET.Element("double")
double.text = str(x)
val.append(double)
elif type(x) is rpcbase64:
b64 = ET.Element("Base64")
b64.text = x.encoded()
val.append(b64)
elif type(x) is rpctime:
iso = ET.Element("dateTime.iso8601")
iso.text = str(x)
val.append(iso)
elif type(x) is list:
array = ET.Element("array")
data = ET.Element("data")
for y in x:
data.append(_py2xml(y))
array.append(data)
val.append(array)
elif type(x) is dict:
struct = ET.Element("struct")
for y in x.keys():
member = ET.Element("member")
name = ET.Element("name")
name.text = y
member.append(name)
member.append(_py2xml(x[y]))
struct.append(member)
val.append(struct)
return val
def xml2py(params):
vals = []
for param in params.findall('param'):
vals.append(_xml2py(param.find('value')))
return vals
def _xml2py(value):
if value.find('i4') is not None:
return int(value.find('i4').text)
if value.find('int') is not None:
return int(value.find('int').text)
if value.find('boolean') is not None:
return bool(value.find('boolean').text)
if value.find('string') is not None:
return value.find('string').text
if value.find('double') is not None:
return float(value.find('double').text)
if value.find('Base64') is not None:
return rpcbase64(value.find('Base64').text)
if value.find('dateTime.iso8601') is not None:
return rpctime(value.find('dateTime.iso8601'))
if value.find('struct') is not None:
struct = {}
for member in value.find('struct').findall('member'):
struct[member.find('name').text] = _xml2py(member.find('value'))
return struct
if value.find('array') is not None:
array = []
for val in value.find('array').find('data').findall('value'):
array.append(_xml2py(val))
return array
raise ValueError()
class rpcbase64(object):
def __init__(self, data):
#base 64 encoded string
self.data = data
def decode(self):
return base64.decodestring(data)
def __str__(self):
return self.decode()
def encoded(self):
return self.data
class rpctime(object):
def __init__(self,data=None):
#assume string data is in iso format YYYYMMDDTHH:MM:SS
if type(data) is str:
self.timestamp = time.strptime(data,"%Y%m%dT%H:%M:%S")
elif type(data) is time.struct_time:
self.timestamp = data
elif data is None:
self.timestamp = time.gmtime()
else:
raise ValueError()
def iso8601(self):
#return a iso8601 string
return time.strftime("%Y%m%dT%H:%M:%S",self.timestamp)
def __str__(self):
return self.iso8601()
class JabberRPCEntry(object):
def __init__(self,call):
self.call = call
self.result = None
self.error = None
self.allow = {} #{'<jid>':['<resource1>',...],...}
self.deny = {}
def check_acl(self, jid, resource):
#Check for deny
if jid in self.deny.keys():
if self.deny[jid] == None or resource in self.deny[jid]:
return False
#Check for allow
if allow == None:
return True
if jid in self.allow.keys():
if self.allow[jid] == None or resource in self.allow[jid]:
return True
return False
def acl_allow(self, jid, resource):
if jid == None:
self.allow = None
elif resource == None:
self.allow[jid] = None
elif jid in self.allow.keys():
self.allow[jid].append(resource)
else:
self.allow[jid] = [resource]
def acl_deny(self, jid, resource):
if jid == None:
self.deny = None
elif resource == None:
self.deny[jid] = None
elif jid in self.deny.keys():
self.deny[jid].append(resource)
else:
self.deny[jid] = [resource]
def call_method(self, args):
ret = self.call(*args)
class xep_0009(base.base_plugin):
def plugin_init(self):
self.xep = '0009'
self.description = 'Jabber-RPC'
self.xmpp.add_handler("<iq type='set'><query xmlns='jabber:iq:rpc' /></iq>", self._callMethod)
self.xmpp.add_handler("<iq type='result'><query xmlns='jabber:iq:rpc' /></iq>", self._callResult)
self.xmpp.add_handler("<iq type='error'><query xmlns='jabber:iq:rpc' /></iq>", self._callError)
self.entries = {}
self.activeCalls = []
def post_init(self):
self.xmpp['xep_0030'].add_feature('jabber:iq:rpc')
self.xmpp['xep_0030'].add_identity('automatition','rpc')
def register_call(self, method, name=None):
#@returns an string that can be used in acl commands.
with self.lock:
if name is None:
self.entries[method.__name__] = JabberRPCEntry(method)
return method.__name__
else:
self.entries[name] = JabberRPCEntry(method)
return name
def acl_allow(self, entry, jid=None, resource=None):
#allow the method entry to be called by the given jid and resource.
#if jid is None it will allow any jid/resource.
#if resource is None it will allow any resource belonging to the jid.
with self.lock:
if self.entries[entry]:
self.entries[entry].acl_allow(jid,resource)
else:
raise ValueError()
def acl_deny(self, entry, jid=None, resource=None):
#Note: by default all requests are denied unless allowed with acl_allow.
#If you deny an entry it will not be allowed regardless of acl_allow
with self.lock:
if self.entries[entry]:
self.entries[entry].acl_deny(jid,resource)
else:
raise ValueError()
def unregister_call(self, entry):
#removes the registered call
with self.lock:
if self.entries[entry]:
del self.entries[entry]
else:
raise ValueError()
def makeMethodCallQuery(self,pmethod,params):
query = self.xmpp.makeIqQuery(iq,"jabber:iq:rpc")
methodCall = ET.Element('methodCall')
methodName = ET.Element('methodName')
methodName.text = pmethod
methodCall.append(methodName)
methodCall.append(params)
query.append(methodCall)
return query
def makeIqMethodCall(self,pto,pmethod,params):
iq = self.xmpp.makeIqSet()
iq.set('to',pto)
iq.append(self.makeMethodCallQuery(pmethod,params))
return iq
def makeIqMethodResponse(self,pto,pid,params):
iq = self.xmpp.makeIqResult(pid)
iq.set('to',pto)
query = self.xmpp.makeIqQuery(iq,"jabber:iq:rpc")
methodResponse = ET.Element('methodResponse')
methodResponse.append(params)
query.append(methodResponse)
return iq
def makeIqMethodError(self,pto,id,pmethod,params,condition):
iq = self.xmpp.makeIqError(id)
iq.set('to',pto)
iq.append(self.makeMethodCallQuery(pmethod,params))
iq.append(self.xmpp['xep_0086'].makeError(condition))
return iq
def call_remote(self, pto, pmethod, *args):
pass
#calls a remote method. Returns the id of the Iq.
def _callMethod(self,xml):
pass
def _callResult(self,xml):
pass
def _callError(self,xml):
pass
| gpl-2.0 | 3,609,349,652,676,770,300 | 25.798535 | 99 | 0.652919 | false |
pearu/f2py | extgen/scalar_rules.py | 1 | 17398 | """
TODO: npy_void
"""
from __future__ import absolute_import
import numpy
from .capi import sctypebits
c_char = dict(\
ctype = 'signed char',
init = ' = 0',
argument_format = 'b',
return_format = 'b',
argument_title = 'a python integer (converting to C signed char)',
return_title = 'a python integer (converting from C signed char)',
init_container = 'CDeclaration',
)
c_short = dict(\
ctype = 'short int',
init = ' = 0',
argument_format = 'h',
return_format = 'h',
argument_title = 'a python integer (converting to C short int)',
return_title = 'a python integer (converting from C short int)',
init_container = 'CDeclaration',
)
c_int = dict(\
ctype = 'int',
init = ' = 0',
argument_format = 'i',
return_format = 'i',
argument_title = 'a python integer (converting to C int)',
return_title = 'a python integer (converting from C int)',
init_container = 'CDeclaration',
)
c_long = dict(\
ctype = 'long',
init = ' = 0',
argument_format = 'l',
return_format = 'l',
argument_title = 'a python integer (converting to C long int)',
return_title = 'a python integer (converting from C long int)',
init_container = 'CDeclaration',
)
c_long_long = dict(\
ctype = 'PY_LONG_LONG',
init = ' = 0',
argument_format = 'L',
return_format = 'L',
argument_title = 'a python integer (converting to C PY_LONG_LONG)',
return_title = 'a python integer (converting from C PY_LONG_LONG)',
init_container = 'CDeclaration',
)
c_unsigned_char = dict(\
ctype = 'unsigned char',
init = ' = 0',
argument_format = 'B',
return_format = 'B',
argument_title = 'a python integer (converting to C unsigned char)',
return_title = 'a python integer (converting from C unsigned char)',
init_container = 'CDeclaration',
)
c_unsigned_short = dict(\
ctype = 'unsigned short int',
init = ' = 0',
argument_format = 'H',
return_format = 'H',
argument_title = 'a python integer (converting to C unsigned short int)',
return_title = 'a python integer (converting from C unsigned short int)',
init_container = 'CDeclaration',
)
c_unsigned_int = dict(\
ctype = 'unsigned int',
init = ' = 0',
argument_format = 'I',
return_format = 'I',
argument_title = 'a python integer (converting to C unsigned int)',
return_title = 'a python integer (converting from C unsigned int)',
init_container = 'CDeclaration',
)
c_unsigned_long = dict(\
ctype = 'unsigned long',
init = ' = 0',
argument_format = 'k',
return_format = 'k',
argument_title = 'a python integer (converting to C unsigned long int)',
return_title = 'a python integer (converting from C unsigned long int)',
init_container = 'CDeclaration',
)
c_unsigned_long_long = dict(\
ctype = 'unsigned PY_LONG_LONG',
init = ' = 0',
argument_format = 'K',
return_format = 'K',
argument_title = 'a python integer (converting to C unsigned PY_LONG_LONG)',
return_title = 'a python integer (converting from C unsigned PY_LONG_LONG)',
init_container = 'CDeclaration',
)
c_float = dict(\
ctype = 'float',
init = ' = 0.0',
argument_format = 'f',
return_format = 'f',
argument_title = 'a python floating point number (converting to C float)',
return_title = 'a python floating point number (converting from C float)',
init_container = 'CDeclaration',
)
c_double = dict(\
ctype = 'double',
init = ' = 0.0',
argument_format = 'd',
return_format = 'd',
argument_title = 'a python floating point number (converting to C double)',
return_title = 'a python floating point number (converting from C double)',
init_container = 'CDeclaration',
)
c_Py_complex = dict(\
ctype = 'Py_complex',
argument_format = 'D',
return_format = 'D',
init = ' = {0.0, 0.0}',
argument_title = 'a python complex number (converting to C Py_complex structure)',
return_title = 'a python complex number (converting from C Py_complex structure)',
init_container = 'CDeclaration',
)
c_Py_ssize_t = dict(\
ctype = 'Py_ssize_t',
argument_format = 'n',
return_format = 'n',
init = ' = 0',
argument_title = 'a python integer (converting to C Py_ssize_t)',
return_title = 'a python integer (converting from C Py_ssize_t)',
init_container = 'CDeclaration',
)
c_char1 = dict(\
ctype = 'char',
argument_format = 'c',
return_format = 'c',
init = " = '\\0'",
argument_title = 'a python character (converting to C char)',
return_title = 'a python character (converting from C char)',
init_container = 'CDeclaration',
)
c_const_char_ptr = dict(\
ctype = 'const char *',
argument_format = 'z',
return_format = 'z',
init = ' = NULL',
argument_title = 'a python string or Unicode or None object (converting to C const char *)',
return_title = 'a python string or None (converting from C char *)',
)
c_char_ptr = dict(\
ctype = 'char *',
argument_format = 'O&',
argument_converter = 'pyobj_to_char_ptr',
clean_argument_converter = 'clean_pyobj_to_char_ptr',
return_format = 'z',
init = ' = NULL',
argument_title = 'a python string (converting to C char *)',
return_title = 'a python string or None (converting from C char *)',
)
c_Py_UNICODE_ptr = dict(\
ctype = 'Py_UNICODE*',
argument_format ='u',
return_format = 'u',
init = ' = NULL',
argument_title = 'a python Unicode object (converting to C Py_UNICODE*)',
return_title = 'a python Unicode object or None (converting from C Py_UNICODE*)'
)
py_bool = dict(\
ctype = 'PyBoolObject*',
init = ' = NULL',
pyctype = 'PyBool_Type',
argument_format = 'O!',
return_format = 'N',
title = 'a python bool',
)
py_int = dict(\
ctype = 'PyObject*',
ctype_exact = 'PyIntObject*',
init = ' = NULL',
pyctype = 'PyInt_Type',
argument_format = 'O!',
return_format = 'N',
title = 'a python integer',
init_container = 'FromPyObj',
refcounted = True,
)
py_long = dict(\
ctype = 'PyLongObject*',
init = ' = NULL',
pyctype = 'PyLong_Type',
argument_format = 'O!',
return_format = 'N',
title = 'a python long integer'
)
py_float = dict(\
ctype = 'PyObject*',
init = ' = NULL',
pyctype = 'PyFloat_Type',
argument_format = 'O!',
return_format = 'N',
title = 'a python floating point number',
init_container = 'FromPyObj',
refcounted = True,
)
py_complex = dict(\
ctype = 'PyComplexObject*',
init = ' = NULL',
pyctype = 'PyComplex_Type',
argument_format = 'O!',
return_format = 'N',
title = 'a python complex number'
)
py_str = dict(\
ctype = 'PyStringObject*',
init = ' = NULL',
argument_format = 'S',
return_format = 'N',
title = 'a python string'
)
py_unicode = dict(\
ctype = 'PyUnicodeObject*',
init = ' = NULL',
argument_format = 'U',
return_format = 'N',
title = 'a python Unicode object'
)
py_buffer = dict(\
pyctype = 'PyBuffer_Type',
ctype = 'PyBufferObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python buffer')
py_tuple = dict(\
pyctype = 'PyTuple_Type',
ctype = 'PyTupleObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python tuple')
py_list = dict(\
pyctype = 'PyList_Type',
ctype = 'PyListObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python list')
py_dict = dict(\
pyctype = 'PyDict_Type',
ctype = 'PyDictObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python dictionary')
py_file = dict(\
pyctype = 'PyFile_Type',
ctype = 'PyFileObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python file object')
py_instance = dict(\
pyctype = 'PyInstance_Type',
ctype = 'PyObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python instance object')
py_function = dict(\
pyctype = 'PyFunction_Type',
ctype = 'PyFunctionObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python function object')
py_method = dict(\
pyctype = 'PyMethod_Type',
ctype = 'PyObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python instance method object')
py_module = dict(\
pyctype = 'PyModule_Type',
ctype = 'PyObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python module object')
py_iter = dict(\
pyctype = 'PySeqIter_Type',
ctype = 'PyObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python iterator')
py_property = dict(\
pyctype = 'PyProperty_Type',
ctype = 'PyObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python property attribute')
py_slice = dict(\
pyctype = 'PySlice_Type',
ctype = 'PyObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python slice object')
py_cell = dict(\
pyctype = 'PyCell_Type',
ctype = 'PyCellObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL')
py_generator = dict(\
pyctype = 'PyGen_Type',
ctype = 'PyGenObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL')
py_set = dict(\
pyctype = 'PySet_Type',
ctype = 'PySetObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python set object')
py_frozenset = dict(\
pyctype = 'PyFrozenSet_Type',
ctype = 'PySetObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python frozenset object')
py_cobject = dict(\
ctype = 'PyCObject*',
argument_format = 'O',
return_format = 'N',
init = ' = NULL',
title = 'a PyCObject object')
py_type = dict(\
pyctype = 'PyType_Type',
ctype = 'PyTypeObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a python type object')
py_object = dict(\
ctype = 'PyObject*',
argument_format = 'O',
return_format = 'N',
init = ' = NULL',
title = 'a python object')
numeric_array = dict(\
pyctype = 'PyArray_Type',
ctype = 'PyArrayObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a Numeric array',
require_numeric = True,
)
numpy_ndarray = dict(\
pyctype = 'PyArray_Type',
ctype = 'PyArrayObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a numpy array',
require_numpy = True,
)
numpy_descr = dict(\
pyctype = 'PyArrayDescr_Type',
ctype = 'PyArray_Descr*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
require_numpy = True,
)
numpy_ufunc = dict(\
pyctype = 'PyUFunc_Type',
ctype = 'PyUFuncObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
title = 'a numpy universal function',
require_numpy = True,
)
numpy_iter = dict(\
pyctype = 'PyArrayIter_Type',
ctype = 'PyArrayIterObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
require_numpy = True,
)
numpy_multiiter = dict(\
pyctype = 'PyArrayMultiIter_Type',
ctype = 'PyArrayMultiIterObject*',
argument_format = 'O!',
return_format = 'N',
init = ' = NULL',
require_numpy = True,
)
npy_bool = dict(\
ctype = 'npy_bool',
init = ' = 0',
argument_format = 'O&',
argument_converter = 'pyobj_to_npy_bool',
return_format = 'O&',
return_converter = 'pyobj_from_npy_bool',
argument_title = 'a python truth value (converting to C npy_bool)',
return_title = 'a numpy bool',
require_numpy = True,
init_container = 'CDeclaration',
)
numpy_bool = dict(\
ctype = 'PyBoolScalarObject*',
init = ' = NULL',
argument_format = 'O&',
argument_converter = 'pyobj_to_numpy_bool',
return_format = 'N',
require_numpy = True,
argument_title = 'a python bool (converting to C PyBoolScalarObject*)',
return_title = 'a numpy bool',
)
numpy_string = dict(\
ctype = 'PyStringScalarObject*',
init = ' = NULL',
argument_format = 'O&',
argument_converter = 'pyobj_to_numpy_string',
return_format = 'N',
require_numpy = True,
argument_title = 'a python string (converting to C PyStringScalarObject*)',
return_title = 'a numpy string',
)
numpy_unicode = dict(\
ctype = 'PyUnicodeScalarObject*',
init = ' = NULL',
argument_format = 'O&',
argument_converter = 'pyobj_to_numpy_unicode',
return_format = 'N',
require_numpy = True,
argument_title = 'a python string (converting to C PyUnicodeScalarObject*)',
return_title = 'a numpy unicode',
)
npy_string = dict(\
typedef = 'npy_string',
ctype = 'npy_string',
init = ' = {NULL, 0}',
argument_format = 'O&',
argument_converter = 'pyobj_to_npy_string',
clean_argument_converter = 'clean_pyobj_to_npy_string',
return_format = 'O&',
return_converter = 'pyobj_from_npy_string',
require_numpy = True,
argument_title = 'a python string (converting to C npy_string)',
return_title = 'a numpy string',
)
npy_unicode = dict(\
typedef = 'npy_unicode',
ctype = 'npy_unicode',
init = ' = {NULL, 0}',
argument_format = 'O&',
argument_converter = 'pyobj_to_npy_unicode',
clean_argument_converter = 'clean_pyobj_to_npy_unicode',
return_format = 'O&',
return_converter = 'pyobj_from_npy_unicode',
require_numpy = True,
argument_title = 'a python string (converting to C npy_unicode)',
return_title = 'a numpy unicode',
)
numpy_void = dict(\
ctype = 'PyVoidScalarObject*',
init = ' = NULL',
argument_format = 'O&',
argument_converter = 'pyobj_to_numpy_void',
return_format = 'N',
require_numpy = True,
argument_title = 'a python string (converting to C PyVoidScalarObject*)',
return_title = 'a numpy void',
)
c_PY_LONG_LONG = c_long_long
c_unsigned_PY_LONG_LONG = c_unsigned_long_long
numpy_bool_ = numpy_bool
numpy_str_ = numpy_str = numpy_string0 \
= numpy_string_ = numpy_string
numpy_unicode0 = numpy_unicode_ = numpy_unicode
npy_str = npy_string
numpy_void0 = numpy_void
def _generate():
scalars = {}
for Cls_name, bits_list in list(sctypebits.items()):
if Cls_name=='Complex':
init = ' = {0.0, 0.0}'
t = 'complex'
elif Cls_name=='Float':
init = ' = 0.0'
t = 'floating point number'
else:
init = ' = 0'
t = 'integer'
for bits in bits_list:
n = Cls_name.lower() + str(bits)
Cls = Cls_name + str(bits)
ctype = 'npy_' + n
scalars[ctype] = dict(
ctype = ctype,
pycype = None,
init = init,
argument_format = 'O&',
argument_converter = 'pyobj_to_'+ctype,
return_format = 'O&',
return_converter = 'pyobj_from_'+ctype,
require_numpy = True,
argument_title = 'a python %s (converting to C %s)' % (t,ctype),
return_title = 'a numpy %s-bit %s' % (bits, t),
init_container = 'CDeclaration',
)
ctype = 'Py%sScalarObject*' % (Cls)
ctype_name = 'numpy_' + n
scalars[ctype_name] = dict(
ctype = ctype,
pyctype = None,
init = ' = NULL',
argument_format = 'O&',
argument_converter = 'pyobj_to_'+ctype_name,
return_format = 'N',
require_numpy = True,
argument_title = 'a python %s (converting to C %s)' % (t,ctype),
return_title = 'a numpy %s-bit %s' % (bits, t)
)
return scalars
for _k, _d in list(_generate().items()):
exec(_k + ' = _d')
npy_intp = eval('npy_'+numpy.intp.__name__)
npy_int_ = eval('npy_'+numpy.int_.__name__)
npy_float = eval('npy_'+numpy.float_.__name__)
npy_complex = eval('npy_'+numpy.complex_.__name__)
if 0:
array = dict(
c_int = dict(\
ctype='int*',
init=' = NULL',
title='a C int array',
input_title = 'a python integer sequence (converting to C int*)',
input_format = 'O',
input_object = '&%(varname)s_py',
input_frompyobj = dict(\
required = '%(varname)s_arr = PyArray_FROMANY(%(varname)s_py, NPY_INT, %(rank)s, %(rank)s, %(requirements)s);\n'
'if (%(varname)s_arr != NULL) {\n'
' %(varname)s = PyArray_DATA(%(varname)s_arr);',
),
input_cleanfrompyobj = dict(\
required = '} /*if (%(varname)s_arr != NULL)*/'
),
output_title = 'a python integer sequence (converting from C int*)',
output_format = 'N',
output_object = '%(varname)s_arr'
),
numpy_int8 = dict(\
ctype='npy_int8*',
init=' = NULL',
title='a C npy_int8 array'
)
)
| bsd-3-clause | 2,982,873,471,366,255,000 | 26.659777 | 118 | 0.575583 | false |
iamRusty/RustyPE | 18/try.py | 1 | 1347 | """
===============================================================
Trial Program for PE 18
Goal: Find the greatest path-sum.
https://projecteuler.net/problem=18
Note: The program uses FILE IO
===============================================================
"""
_FILE_NAME = "data.pe"
def extract(fileName):
f = open(fileName, "r")
data = f.readlines()
number = []
for line in data:
row = line.split()
number.append(row)
count = 0
while (count < len(number)):
number[count] = [int(i) for i in number[count]]
count += 1
f.close()
return number
#Credits to Uziel Agub for introducing this method. I'm amazed
def maxPathSum(number):
numOfRows = len(number) - 1
count = numOfRows - 1
while (count > -1):
curCol = 0
while (curCol < len(number[count])):
if (number[count + 1][curCol] > number[count + 1][curCol + 1]):
number[count][curCol] += number[count + 1][curCol]
else:
number[count][curCol] += number[count + 1][curCol + 1]
curCol += 1
count -= 1
return number[0][0]
def main():
number = extract(_FILE_NAME)
answer = maxPathSum(number)
print(answer)
main()
| mit | -6,180,291,786,295,823,000 | 26.659574 | 75 | 0.472903 | false |
frutik/formunculous | formunculous/models.py | 1 | 18803 | # This file is part of formunculous.
#
# formunculous is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# formunculous is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with formunculous. If not, see <http://www.gnu.org/licenses/>.
# Copyright 2009-2011 Carson Gee
from django.db import models
from django.contrib.auth.models import User, Group
from django.contrib.sites.models import Site
from django.contrib.contenttypes.models import ContentType
from django.contrib.localflavor.us.forms import USStateSelect
from django.contrib.localflavor.us.models import PhoneNumberField
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import smart_unicode
from django import forms
from django.conf import settings
from formunculous.widgets import *
from formunculous.storage import *
from formunculous import fields
import datetime
# Application Definition Models
class CurrentManager(models.Manager):
def current(self, **kwargs):
return self.get_query_set().filter(
start_date__lte=datetime.datetime.now(),
stop_date__gte=datetime.datetime.now(),
parent = None, sites=Site.objects.get_current(), **kwargs
)
def reviewer(self, user, **kwargs):
return self.get_query_set().filter( reviewers=user, email_only=False,
parent = None,
sites=Site.objects.get_current(),
**kwargs)
class ApplicationDefinition(models.Model):
# Site differentiation
try:
sid = settings.SITE_ID
except AttributeError:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("You're using the Django \"sites framework\"\
without having set the SITE_ID setting. Create a site\
in your database and set the SITE_ID setting to fix\
this error.")
sites = models.ManyToManyField(Site, default=(sid,))
# Add an optional recursive relation to enable creating sub forms
# to create one-to-many relations for applications
parent = models.ForeignKey('self', null=True, blank=True)
name = models.CharField(max_length=150)
owner = models.EmailField(_('Owner E-mail'))
notify_owner = models.BooleanField(help_text="Email the owner each time \
an application is submitted")
slug = models.SlugField(_('slug'),unique=True)
description = models.TextField(blank=True)
start_date = models.DateTimeField(default=datetime.datetime.now(),
help_text=_("The date the application \
will first be visible to user."))
stop_date = models.DateTimeField(default=datetime.datetime.now(),
help_text=_("The date the application \
will no longer be available to be \
filled out"))
authentication = models.BooleanField(help_text=_("Require the applicant \
to authenticate before using the application?"))
# New in 1.2 (requires sql patch for existing installs)
authentication_multi_submit = models.BooleanField(
"Multiple Submissions (Authenticated)",
help_text="Allow an authenticated user to submit multiple forms\
(applicable only if the form requires authentication")
reviewers = models.ManyToManyField(User, null=True, blank=True)
notify_reviewers = models.BooleanField(help_text="Email every reviewer each\
time an application is submitted")
email_only = models.BooleanField(help_text=_("If checked, completed \
applications will not be stored in the database but \
emailed to the owner/reviewers (dependent on whether \
those notification flags are set"))
objects = CurrentManager()
class Meta:
ordering = ["stop_date"]
def __unicode__(self):
return( u'%s' % self.name )
def is_active(self):
now = datetime.datetime.now()
if now >= self.stop_date or now <= self.start_date:
return False
else:
return True
class SubApplicationDefinition(models.Model):
# Get subform name and slug from the ApplicationDefinition
app_definition = models.ForeignKey(ApplicationDefinition)
min_entries = models.IntegerField(default = 0,
help_text = _("The minimum number of\
instances of this sub-form the\
user must fill out. 0 if none."))
max_entries = models.IntegerField(default = 0,
help_text = _("The maximum number of\
instances of this sub-form the\
user must fill out. 0 if\
there is no limit."))
extras = models.IntegerField(default = 3,
help_text = _("The number of extra forms to\
show by default on an\
application"))
# Application data types/fields
class FieldDefinition(models.Model):
"""
The base model for data type field definitions.
"""
field_types = ()
type = models.CharField(_('Type'),max_length=250,)
application = models.ForeignKey(ApplicationDefinition)
pre_text = models.TextField(blank = True,
help_text=_("The html here is prepended \
to the form field."))
post_text = models.TextField(blank = True,
help_text=_("The html here is appended \
to the form field."))
page = models.IntegerField(default=1)
order = models.IntegerField()
group = models.BooleanField(default=False,
help_text=_("Group this with nearby\
fields using an indented and \
colored background."))
label = models.CharField(max_length=250)
slug = models.SlugField()
help_text = models.TextField(blank = True,
help_text=_("The text here is added \
to the defined field to help the \
user understand its purpose."))
require = models.BooleanField(default=True,
help_text=_("This requires that value be \
entered for this field on \
the application form."))
reviewer_only = models.BooleanField(help_text=_("Make this field viewable\
only to the reviewer of an application, not the applicant."))
header = models.BooleanField(default=True,
help_text=_("If this is set to true the field\
will be used as a header in the\
reviewer view."))
# New in 1.2 (requires sql patch for existing installs)
multi_select = models.BooleanField(_("Allow Multiple Selections"),
help_text=_("If selected, the user\
will be allowed to check multiple\
options from dropdown selections. Be\
careful about which field type this is\
used for (generally only use large\
text area fields)."))
use_radio = models.BooleanField(_("Use Radio Buttons"),
help_text=_("Dropdown selections\
will be represented with radio buttons\
for single select dropdowns and\
check boxes for multi-select.\
"))
class Meta:
ordering = ['page', 'order']
def __unicode__(self):
return( u'%s.%s: %s' % (self.page, self.order, self.label) )
class DropDownChoices(models.Model):
field_definition = models.ForeignKey(FieldDefinition)
text = models.CharField(max_length = 255)
value = models.CharField(max_length = 255)
class Meta:
ordering = ['value']
# Instance Models (field and application)
class Application(models.Model):
# Add an optional recursive relation to enable creating sub forms
# to create one-to-many relations for applications
parent = models.ForeignKey('self', null=True, blank=True)
user = models.ForeignKey(User, blank=True, null=True)
submission_date = models.DateTimeField(null=True, blank=True)
app_definition = models.ForeignKey(ApplicationDefinition)
def get_field_values(self, reviewer_fields=False, all_fields=False):
"""
Returns a collection of dictionary objects with the field names
and their values.
By default this does not include the reviewer only fields that
are in the application definition. To get those, pass True
into the function.
"""
# Introspect model namespace
import formunculous.models as funcmodels
fields = []
if not all_fields:
field_set = self.app_definition.fielddefinition_set.filter(
reviewer_only=reviewer_fields)
else:
field_set = self.app_definition.fielddefinition_set.all()
for field_def in field_set:
field_model = getattr(funcmodels, field_def.type)
try:
field_val = field_model.objects.get( app = self,
field_def = field_def)
field_dict = {'label': field_def.label,
'data': field_val.value,
'type': field_def.type,
'slug': field_def.slug,
'multiselect': field_def.multi_select },
except:
field_dict = {'label': field_def.label, 'data': None,
'type': field_def.type },
fields += field_dict
return fields
def get_field_value(self, field_slug):
"""
Gets the value of the field defined by the slug given for this
application instance, or returns None if either the value
or the field definition is not found.
"""
# Introspect model namespace
import formunculous.models as funcmodels
try:
field_def = FieldDefinition.objects.get(
slug=field_slug,
application=self.app_definition)
except FieldDefinition.DoesNotExist:
return None
field_model = getattr(funcmodels, field_def.type)
try:
field_val = field_model.objects.get( app = self,
field_def=field_def )
except field_model.DoesNotExist:
return None
return field_val.value
# Define empty permission model for using builder
class Form(models.Model):
class Meta:
permissions = (
("can_delete_applications", "Can delete applications"),
)
class BaseField(models.Model):
"""
This is the base model for all field types Each unique field type
must extend this model for the field to work properly.
"""
name = 'Base'
icone = None
field_def = models.ForeignKey(FieldDefinition)
app = models.ForeignKey(Application)
allow_dropdown = False
class TextField(BaseField):
"""
This is max length (most DBs) generic text field that has no
input restrictions.
"""
FieldDefinition.field_types+=('TextField','Text Input',),
name = 'Text Input'
icon = 'formunculous/img/field_icons/text_input.png'
value = models.CharField(max_length=255, blank=True, null=True)
widget = None
allow_dropdown = True
class TextArea(BaseField):
"""
This is the large text area field.
"""
FieldDefinition.field_types+=('TextArea', 'Large Text Area',),
name= "Large Text Area"
icon = 'formunculous/img/field_icons/text_area.png'
value = models.TextField(blank=True, null=True)
widget = None
allow_dropdown = True
class BooleanField(BaseField):
"""
A simple yes/no field.
"""
FieldDefinition.field_types+=('BooleanField', 'Yes/No Question',),
name = "Yes/No Question/Checkbox"
icon = 'formunculous/img/field_icons/yes_no.png'
value = models.BooleanField(blank=True, default=False)
widget = None
allow_dropdown = False
class EmailField(BaseField):
"""
Builtin email field
"""
FieldDefinition.field_types+=('EmailField', 'Email Address',),
name = "Email Address"
icon = 'formunculous/img/field_icons/email.png'
value = models.EmailField(blank=True, null=True)
widget = None
allow_dropdown = True
class USPhoneNumber(BaseField):
FieldDefinition.field_types+=('USPhoneNumber', 'U.S. Phone Number',),
name = "U.S. Phone Number"
icon = 'formunculous/img/field_icons/phone.png'
value = PhoneNumberField(null=True, blank=True)
widget = None
allow_dropdown = True
class USStateField(BaseField):
"""
Provides a dropdown selection of U.S. States and
provinces.
"""
FieldDefinition.field_types+=('USStateField', 'U.S. States',),
name = "U.S. States"
icon = 'formunculous/img/field_icons/usstate.png'
value = models.CharField(null=True, blank=True,
max_length="255")
widget = OptionalStateSelect
allow_dropdown = False
class USZipCodeField(BaseField):
FieldDefinition.field_types+=('USZipCodeField', 'U.S. Zipcode',),
name = "U.S. Zipcode"
icon = 'formunculous/img/field_icons/zipcode.png'
value = fields.USZipCodeModelField(null=True, blank=True)
widget = None
allow_dropdown = True
class DateField(BaseField):
"""
Uses a nice jquery widget for selecting a date.
"""
FieldDefinition.field_types+=('DateField', 'Date Input',),
name = "Date Input"
icon = 'formunculous/img/field_icons/date.png'
value = models.DateField(blank=True, null=True)
widget = DateWidget
allow_dropdown = True
class FloatField(BaseField):
"""
Float field. Accepts any decimal number basically
"""
FieldDefinition.field_types+=('FloatField', 'Decimal Number',),
name = "Decimal Number Field"
icon = 'formunculous/img/field_icons/decimal.png'
value = models.FloatField(blank=True, null=True)
widget = None
allow_dropdown = True
class IntegerField(BaseField):
"""
Integer field. Accepts any whole number + or -
"""
FieldDefinition.field_types+=('IntegerField', 'Whole Number',),
name = "Whole Number Field"
icon = 'formunculous/img/field_icons/wholenumber.png'
value = models.IntegerField(blank=True, null=True)
widget = None
allow_dropdown = True
class PositiveIntegerField(BaseField):
"""
Integer field. Accepts any whole number that is positive
"""
FieldDefinition.field_types+=('PositiveIntegerField',
'Positive Whole Number',),
name = "Positive Whole Number Field"
icon = 'formunculous/img/field_icons/positivewhole.png'
value = models.PositiveIntegerField(blank=True, null=True)
widget = None
allow_dropdown = True
class URLField(BaseField):
"""
URL field. Accepts any valid URL
"""
FieldDefinition.field_types+=('URLField', 'URL',),
name = "URL"
icon = 'formunculous/img/field_icons/url.png'
value = models.URLField(blank=True, null=True)
widget = None
allow_dropdown = True
class IPAddressField(BaseField):
"""
IP address field field. Accepts any valid IPv4 address.
"""
FieldDefinition.field_types+=('IPAddressField', 'IP Address',),
name = "IP Address"
icon = 'formunculous/img/field_icons/ipaddress.png'
value = models.IPAddressField(blank=True, null=True)
widget = None
allow_dropdown = True
# File Based Fields
class FileField(BaseField):
"""
This field accepts any file, regardless of type, and size
is limited by the Django settings
"""
FieldDefinition.field_types+=('FileField','File Upload',),
name = 'File Upload'
icon = 'formunculous/img/field_icons/file.png'
value = models.FileField(upload_to=upload_to_path,
storage=ApplicationStorage(),
blank=True, null=True,max_length=100)
widget = FileWidget
allow_dropdown = False
class ImageField(BaseField):
"""
This is a file field that only accepts common image formats.
"""
FieldDefinition.field_types+=('ImageField','Picture Upload',),
name = 'Picture Upload'
icon = 'formunculous/img/field_icons/picture.png'
value = models.ImageField(upload_to=upload_to_path,
storage=ApplicationStorage(),
blank=True, null=True, max_length=100)
widget = FileWidget
allow_dropdown = False
class DocumentField(BaseField):
"""
Validates common document mime-types/extensions
"""
FieldDefinition.field_types+=('DocumentField', 'Document Upload',),
name = "Document Upload"
icon = 'formunculous/img/field_icons/document.png'
value = fields.DocumentField(upload_to=upload_to_path,
storage=ApplicationStorage(),
blank=True, null=True, max_length=100)
widget = FileWidget
allow_dropdown = False
| gpl-3.0 | -5,708,087,654,961,886,000 | 32.516934 | 81 | 0.586236 | false |
f0rki/cb-multios | original-challenges/CGC_File_System/support/genpolls.py | 1 | 4100 | import struct
import time
import sys
import random
import string
def randString( z ):
s = ''
for i in range(z):
s += random.choice( string.lowercase + string.uppercase + string.digits)
return s
def delfile( dirs ):
### randomly select a directory then pull the file to remove
fl = ''
cnt = 0
while fl == '' and cnt < 30:
z = random.choice(dirs)
cnt += 1
if len(z[1]) == 0:
continue
f = random.choice(z[1])
i = z[1].index(f)
del z[1][i]
fl = f[0]
if fl == '':
return ''
data = '\t\t<write echo="ascii"><data>REPO\\x%.2x%s</data></write>\n' %(len(fl), fl)
i = fl.rfind('/')
fl = fl[i+1:]
data += '\t\t<read echo="ascii"><delim>\\n</delim><match><data>[INFO] %s removed\\n</data></match></read>\n' %(fl)
return data
def addfile( dirs ):
### Select a base dir to add the file
td = random.choice( dirs )
base_dir = td[0]
maxlen = 10
if (254 - len(base_dir)) < 10:
return ''
n = randString( random.randint(4, 10) )
newfile = base_dir + '/' + n
fdata = randString( random.randint(20,100) )
z = 'SEND\\x%.2x%s' %(len(newfile), newfile)
z += '\\x%.2x\\x%.2x%s' %( len(fdata)&0xff, (len(fdata)>>8)&0xff, fdata )
data = '\t\t<write echo="ascii"><data>%s</data></write>\n' %(z)
data += '\t\t<read echo="ascii"><delim>\\n</delim><match><data>[INFO] File received: %s\\n</data></match></read>\n' %(newfile)
td[1].append( [newfile, fdata] )
return data
def getFile( dirs ):
### Select a base dir with a file
t = []
cnt = 0
while t == [] and cnt < 30:
z = random.choice( dirs )
if len(z[1]) != 1:
t = z[1]
break
cnt += 1
if t == []:
return ''
fl = random.choice(t)
req = 'RECV\\x%.2x%s' %( len(fl[0]), fl[0] )
data = '\t\t<write echo="ascii"><data>%s</data></write>\n' %(req)
data += '\t\t<read echo="ascii"><delim>\\n</delim><match><data>[DATA] %s\\n</data></match></read>\n' %(fl[1])
return data
def addDir( dirs ):
### Select a base dir to add
td = random.choice( dirs )
base_dir = td[0]
maxlen = 10
if (254 - len(base_dir)) < 10:
return ''
n = randString( random.randint(4, 10) )
newdir = base_dir + '/' + n
z = 'NDIR\\x%.2x%s' %(len(newdir), newdir)
data = '\t\t<write echo="ascii"><data>' + z + '</data></write>\n'
data += '\t\t<read echo="ascii"><delim>\\n</delim><match><data>[INFO] Added new directory\\n</data></match></read>\n'
dirs.append( [ newdir, [] ] )
return data
def addPrint( dirs ):
c = []
for x in dirs:
for y in x[1]:
c.append( [ y[0], 'File', str(len(y[1]))] )
if x[0] == '':
continue
c.append( [x[0], 'Dir', '' ])
c.sort(key=lambda x: x[0])
data = '\t\t<write echo="ascii"><data>PRNT</data></write>\n'
data += '\t\t<read echo="ascii"><delim>\\n</delim><match><data>[INFO] Listing files\\n</data></match></read>\n'
for y in c:
if y[1] == 'File':
l = ' ' + y[2]
else:
l = ''
data += '\t\t<read echo="ascii"><delim>\\n</delim><match><data>%s %s%s\\n</data></match></read>\n' %(y[0], y[1], l)
return data
def addStop():
data = '\t\t<write echo="ascii"><data>STOP</data></write>\n'
data += '\t\t<read echo="ascii"><delim>\\n</delim><match><data>[INFO] Terminating\\n</data></match></read>\n'
return data
def newpoll( ind ):
dir_structure = [ ['', [] ] ]
data = '<?xml version="1.0" standalone="no" ?>\n'
data += '<!DOCTYPE pov SYSTEM "/usr/share/cgc-replay/replay.dtd">\n'
data += '<pov>\n'
data += '\t<cbid>CROMU_00033</cbid>\n'
data += '\t<replay>\n'
flist = [addDir, addfile, getFile]
repo = 0
for i in range(0, random.randint(105, 200)):
if i == 100:
flist.append(delfile)
b = random.choice( flist )
if b == delfile:
repo = 1
data += b( dir_structure )
### Only add this in if a file has been not been deleted
if repo == 0:
data += addPrint( dir_structure )
data += addStop()
data += '\t</replay>\n'
data += '</pov>\n'
f = open("./poller/for-testing/POLL%.5d.xml" %(ind), "wb")
f.write(data)
f.close()
if __name__ == '__main__':
seed = time.time()
if len(sys.argv) == 2:
seed = int(sys.argv[1])
random.seed(seed)
for i in range(1, 1000):
newpoll(i)
| mit | 6,480,812,888,750,274,000 | 19.707071 | 127 | 0.565366 | false |
alwayssayyes/incrediblesy | app/views.py | 1 | 9016 | from flask import render_template, Flask, request, redirect, url_for, current_app
from app import app
from urllib2 import urlopen
from bs4 import BeautifulSoup
from flaskext import wtf
from flaskext.wtf import Form, TextField, TextAreaField, SubmitField, validators, ValidationError
from google.appengine.ext import db
from database import Member
class Photo(db.Model):
photo = db.BlobProperty()
class Tweet(db.Model):
photo = db.BlobProperty()
text = db.StringProperty()
class ContactForm(Form):
name = TextField("Name", [validators.Required("Please enter your name.")])
email = TextField(
"Email", [validators.Required("Please enter your email address."), validators.Email("Please enter valid email address.")])
subject = TextField(
"Subject", [validators.Required("Please enter a subject.")])
message = TextAreaField(
"Message", [validators.Required("Please enter a message.")])
submit = SubmitField("Send")
@app.route('/', methods=["GET", "POST"])
def main():
form = None
return render_template('photo.html', form=form)
@app.route('/tweet', methods=["GET", "POST"])
def tweet():
if request.method == 'POST':
post_data = request.files.get('photo')
filestream = post_data.read()
post_dataa = request.form.get('text')
upload_data = Tweet()
upload_data.photo = db.Blob(filestream)
upload_data.text = post_dataa
upload_data.put()
datalist=Tweet.all()
url = url_for("shows", key=upload_data.key())
return render_template("photo.html", texta=post_dataa, url=url, Tweet=datalist)
else:
return render_template('photo.html')
@app.route('/upload', methods=["GET", "POST"])
def upload_db():
if request.method == 'POST':
post_data = request.files.get('photo')
filestream =post_data.read()
upload_data =Photo()
upload_data.photo =db.Blob(filestream)
upload_data.put()
url = url_for("shows", key=upload_data.key())
return render_template("photo.html", url=url)
else:
return render_template('photo.html')
@app.route('/show/<key>', methods=['GET'])
def shows(key):
uploaded_data = db.get(key)
return current_app.response_class(
uploaded_data.photo)
@app.route('/vali', methods=["GET", "POST"])
def vali():
form = ContactForm()
if request.method == 'POST':
if not form.validate():
return render_template('vali.html', form=form)
else:
return "Nice to meet you," + form.name.data + "!"
return render_template('vali.html', form=form)
# class news(Form):
# category = TextField("category", [validators.Required("Please enter another one")])
# submit = SubmitField("Send")
# @app.route('/crawlhw', methods=["GET", "POST"])
# def crawlhw():
# form = news()
# if request.method == 'POST':
# if not form.validate():
# return render_template('crawlhomework.html' )
# @app.route('/lotto', methods=['GET'])
# def lotto():
# if request.method == 'GET':
# if request.args.get('getlotto'):
# html = urlopen(
# 'http://www.nlotto.co.kr/lotto645Confirm.do?method=allWin').read()
# bs = BeautifulSoup(html)
# trs = bs.select('.tblType1 > tbody > tr')
# lottos = []
# for i in trs:
# tds = i.select('td')
# if len(tds) > 1:
# lotto = str(i.select('td')[1].get_text())
# lotto = lotto.split(', ')
# lottos.append(lotto)
# return render_template('haha.html', lottos=lotto)
# return render_template('haha.html')
# @app.route('/uploaddb', methods=['GET'])
# def uploaddb():
# return 0
# # @app.route('/')
# # def test():
# # return render_template('getpost.html')
# @app.route('/index')
# def index():
# return render_template('index.html')
# @app.route('/gugu', methods=['get'])
# def gugu():
# if request.method == 'GET':
# danstart = request.args.get('danstart')
# danend = request.args.get('danend')
# if danstart and danend:
# gugudan = []
# for dan in range(int(danstart), int(danend) + 1):
# for i in range(1, 10):
# gugudan.append(
# str(dan) + "x" + str(i) + "=" + str(dan + i))
# return render_template('haha.html', gugudan=gugudan)
# return render_template('haha.html')
# @app.route('/cal', methods=['GET', 'POST'])
# def cal():
# if request.method == 'POST':
# x = request.form.get('x')
# op = request.form.get('op')
# y = request.form.get('y')
# if x and op and y:
# result = 0
# if op == '+':
# result = int(x) + int(y)
# return render_template('haha.html', calresult=result)
# elif op == '-':
# result = int(x) - int(y)
# return render_template('haha.html', calresult=result)
# elif op == '*':
# result = int(x) * int(y)
# return render_template('haha.html', calresult=result)
# elif op == '/':
# result = float(x) / float(y)
# return render_template('haha.html', calresult=result)
# return render_template('haha.html')
# @app.route('/what', methods=['GET'])
# def what():
# if request.method == 'GET':
# if request.args.get('news'):
# pass
# @app.route('/')
# @app.route('/index')
# def index():
# return render_template("photo.html")
# @app.route('/practice', methods=["GET", "POST"])
# def practice():
# if request.method == 'POST':
# post_data = request.files.get('photo')
# filestream = post_data.read()
# post_dataa = request.form.get('text')
# upload_data = Database()
# upload_data.photo = db.Blob(filestream)
# upload_data.text = post_dataa
# upload_data.put()
# datalist=Database.all()
# url = url_for("shows", key=upload_data.key())
# return render_template("photo.html", texta=post_dataa, Database=datalist)
# else:
# return render_template('photo.html')
# @app.route('/show/<key>', methods=['GET'])
# def shows(key):
# uploaded_data = db.get(key)
# return current_app.response_class(
# uploaded_data.photo)
# @app.route('/', methods=['GET','POST'])
# def show_entries():
# members = Member.all()
# return render_template("practice.html", members=members)
# @app.route('/add_entry', methods=['POST'])
# def add_entry():
# userId = request.form['id']
# userPasswd = request.form['passwd']
# for member in Member.all():
# if userId == member.getId():
# return "failed"
# member = Member(userId=userId, userPasswd=userPasswd)
# member.put()
# return "OK"
# @app.route('/getpost', methods=['GET', 'POST'])
# def getpost():
# get=None
# post=None
# if request.args:
# get = request.args.get['getget']
# if request.form:
# post = request.form['postpost']
# return render_template('getpost.html',
# get_variable = get, post_variable = post)
# )
# @app.route('/crawl', methods=['GET','POST'])
# def crawl():
# if request.method == 'POST' and request.form:
# address = request.form.get('crawl')
# htmltext = urllib.urlopen(address).read()
# soup = BeautifulSoup(htmltext, from_encoding="utf-8")
# result=[]
# for tag in soup.select(".title"):
# result.append(tag.get_text())
# return render_template('getpost.html', result=result)
# else:
# return render_template('getpost.html')
# @app.route('/yeahyeah')
# def ohyeah():
# return render_template('iamsoyoung.html')
# @app.route('/getpost',methods=['GET','POST'])
# def getpost():
# get_value=None
# if request.method=='GET':
# if 'getget' in request.args:
# get_value=request.args.get('getget')
# if get_value != 'http://pgr21.com/pb/pb.php?id=freedom':
# return render_template('listshow.html',error='URL not found')
# htmltext = urllib.urlopen(get_value).read()
# soup = BeautifulSoup(htmltext, from_encoding="utf-8")
# authors = []
# for tag in soup.select(".tdname"):
# authors.append(tag.get_text())
# return render_template('listshow.html',
# list=authors)
# return render_template('getpost.html')
# @app.route('/getpost2',methods=['GET','POST'])
# def getpost2():
# get_value=None
# if request.method=='POST':
# if request.form and ('postpost' in request.form):
# get_value=request.form['postpost']
# htmltext = urllib.urlopen(get_value).read()
# soup = BeautifulSoup(htmltext, from_encoding="utf-8")
# authors = []
# for tag in soup.select(".tdname"):
# authors.append(tag.get_text())
# return render_template('listshow.html',
# list=authors)
# return render_template('getpost.html') | apache-2.0 | 86,643,763,824,553,280 | 28.371336 | 130 | 0.581522 | false |
auduny/home-assistant | tests/components/google_assistant/test_trait.py | 1 | 41902 | """Tests for the Google Assistant traits."""
from unittest.mock import patch, Mock
import pytest
from homeassistant.components import (
binary_sensor,
camera,
cover,
fan,
input_boolean,
light,
lock,
media_player,
scene,
script,
switch,
vacuum,
group,
)
from homeassistant.components.climate import const as climate
from homeassistant.components.google_assistant import (
trait, helpers, const, error)
from homeassistant.const import (
STATE_ON, STATE_OFF, ATTR_ENTITY_ID, SERVICE_TURN_ON, SERVICE_TURN_OFF,
TEMP_CELSIUS, TEMP_FAHRENHEIT, ATTR_SUPPORTED_FEATURES, ATTR_TEMPERATURE,
ATTR_DEVICE_CLASS, ATTR_ASSUMED_STATE, STATE_UNKNOWN)
from homeassistant.core import State, DOMAIN as HA_DOMAIN, EVENT_CALL_SERVICE
from homeassistant.util import color
from tests.common import async_mock_service, mock_coro
BASIC_CONFIG = helpers.Config(
should_expose=lambda state: True,
)
REQ_ID = 'ff36a3cc-ec34-11e6-b1a0-64510650abcf'
BASIC_DATA = helpers.RequestData(
BASIC_CONFIG,
'test-agent',
REQ_ID,
)
PIN_CONFIG = helpers.Config(
should_expose=lambda state: True,
secure_devices_pin='1234'
)
PIN_DATA = helpers.RequestData(
PIN_CONFIG,
'test-agent',
REQ_ID,
)
async def test_brightness_light(hass):
"""Test brightness trait support for light domain."""
assert helpers.get_google_type(light.DOMAIN, None) is not None
assert trait.BrightnessTrait.supported(light.DOMAIN,
light.SUPPORT_BRIGHTNESS, None)
trt = trait.BrightnessTrait(hass, State('light.bla', light.STATE_ON, {
light.ATTR_BRIGHTNESS: 243
}), BASIC_CONFIG)
assert trt.sync_attributes() == {}
assert trt.query_attributes() == {
'brightness': 95
}
events = []
hass.bus.async_listen(EVENT_CALL_SERVICE, events.append)
calls = async_mock_service(hass, light.DOMAIN, light.SERVICE_TURN_ON)
await trt.execute(
trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA,
{'brightness': 50}, {})
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: 'light.bla',
light.ATTR_BRIGHTNESS_PCT: 50
}
assert len(events) == 1
assert events[0].data == {
'domain': 'light',
'service': 'turn_on',
'service_data': {'brightness_pct': 50, 'entity_id': 'light.bla'}
}
async def test_brightness_media_player(hass):
"""Test brightness trait support for media player domain."""
assert helpers.get_google_type(media_player.DOMAIN, None) is not None
assert trait.BrightnessTrait.supported(media_player.DOMAIN,
media_player.SUPPORT_VOLUME_SET,
None)
trt = trait.BrightnessTrait(hass, State(
'media_player.bla', media_player.STATE_PLAYING, {
media_player.ATTR_MEDIA_VOLUME_LEVEL: .3
}), BASIC_CONFIG)
assert trt.sync_attributes() == {}
assert trt.query_attributes() == {
'brightness': 30
}
calls = async_mock_service(
hass, media_player.DOMAIN, media_player.SERVICE_VOLUME_SET)
await trt.execute(
trait.COMMAND_BRIGHTNESS_ABSOLUTE, BASIC_DATA,
{'brightness': 60}, {})
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: 'media_player.bla',
media_player.ATTR_MEDIA_VOLUME_LEVEL: .6
}
async def test_camera_stream(hass):
"""Test camera stream trait support for camera domain."""
hass.config.api = Mock(base_url='http://1.1.1.1:8123')
assert helpers.get_google_type(camera.DOMAIN, None) is not None
assert trait.CameraStreamTrait.supported(camera.DOMAIN,
camera.SUPPORT_STREAM, None)
trt = trait.CameraStreamTrait(
hass, State('camera.bla', camera.STATE_IDLE, {}), BASIC_CONFIG
)
assert trt.sync_attributes() == {
'cameraStreamSupportedProtocols': [
"hls",
],
'cameraStreamNeedAuthToken': False,
'cameraStreamNeedDrmEncryption': False,
}
assert trt.query_attributes() == {}
with patch('homeassistant.components.camera.async_request_stream',
return_value=mock_coro('/api/streams/bla')):
await trt.execute(trait.COMMAND_GET_CAMERA_STREAM, BASIC_DATA, {}, {})
assert trt.query_attributes() == {
'cameraStreamAccessUrl': 'http://1.1.1.1:8123/api/streams/bla'
}
async def test_onoff_group(hass):
"""Test OnOff trait support for group domain."""
assert helpers.get_google_type(group.DOMAIN, None) is not None
assert trait.OnOffTrait.supported(group.DOMAIN, 0, None)
trt_on = trait.OnOffTrait(hass, State('group.bla', STATE_ON), BASIC_CONFIG)
assert trt_on.sync_attributes() == {}
assert trt_on.query_attributes() == {
'on': True
}
trt_off = trait.OnOffTrait(hass, State('group.bla', STATE_OFF),
BASIC_CONFIG)
assert trt_off.query_attributes() == {
'on': False
}
on_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_ON)
await trt_on.execute(
trait.COMMAND_ONOFF, BASIC_DATA,
{'on': True}, {})
assert len(on_calls) == 1
assert on_calls[0].data == {
ATTR_ENTITY_ID: 'group.bla',
}
off_calls = async_mock_service(hass, HA_DOMAIN, SERVICE_TURN_OFF)
await trt_on.execute(
trait.COMMAND_ONOFF, BASIC_DATA,
{'on': False}, {})
assert len(off_calls) == 1
assert off_calls[0].data == {
ATTR_ENTITY_ID: 'group.bla',
}
async def test_onoff_input_boolean(hass):
"""Test OnOff trait support for input_boolean domain."""
assert helpers.get_google_type(input_boolean.DOMAIN, None) is not None
assert trait.OnOffTrait.supported(input_boolean.DOMAIN, 0, None)
trt_on = trait.OnOffTrait(hass, State('input_boolean.bla', STATE_ON),
BASIC_CONFIG)
assert trt_on.sync_attributes() == {}
assert trt_on.query_attributes() == {
'on': True
}
trt_off = trait.OnOffTrait(hass, State('input_boolean.bla', STATE_OFF),
BASIC_CONFIG)
assert trt_off.query_attributes() == {
'on': False
}
on_calls = async_mock_service(hass, input_boolean.DOMAIN, SERVICE_TURN_ON)
await trt_on.execute(
trait.COMMAND_ONOFF, BASIC_DATA,
{'on': True}, {})
assert len(on_calls) == 1
assert on_calls[0].data == {
ATTR_ENTITY_ID: 'input_boolean.bla',
}
off_calls = async_mock_service(hass, input_boolean.DOMAIN,
SERVICE_TURN_OFF)
await trt_on.execute(
trait.COMMAND_ONOFF, BASIC_DATA,
{'on': False}, {})
assert len(off_calls) == 1
assert off_calls[0].data == {
ATTR_ENTITY_ID: 'input_boolean.bla',
}
async def test_onoff_switch(hass):
"""Test OnOff trait support for switch domain."""
assert helpers.get_google_type(switch.DOMAIN, None) is not None
assert trait.OnOffTrait.supported(switch.DOMAIN, 0, None)
trt_on = trait.OnOffTrait(hass, State('switch.bla', STATE_ON),
BASIC_CONFIG)
assert trt_on.sync_attributes() == {}
assert trt_on.query_attributes() == {
'on': True
}
trt_off = trait.OnOffTrait(hass, State('switch.bla', STATE_OFF),
BASIC_CONFIG)
assert trt_off.query_attributes() == {
'on': False
}
on_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_ON)
await trt_on.execute(
trait.COMMAND_ONOFF, BASIC_DATA,
{'on': True}, {})
assert len(on_calls) == 1
assert on_calls[0].data == {
ATTR_ENTITY_ID: 'switch.bla',
}
off_calls = async_mock_service(hass, switch.DOMAIN, SERVICE_TURN_OFF)
await trt_on.execute(
trait.COMMAND_ONOFF, BASIC_DATA,
{'on': False}, {})
assert len(off_calls) == 1
assert off_calls[0].data == {
ATTR_ENTITY_ID: 'switch.bla',
}
async def test_onoff_fan(hass):
"""Test OnOff trait support for fan domain."""
assert helpers.get_google_type(fan.DOMAIN, None) is not None
assert trait.OnOffTrait.supported(fan.DOMAIN, 0, None)
trt_on = trait.OnOffTrait(hass, State('fan.bla', STATE_ON), BASIC_CONFIG)
assert trt_on.sync_attributes() == {}
assert trt_on.query_attributes() == {
'on': True
}
trt_off = trait.OnOffTrait(hass, State('fan.bla', STATE_OFF), BASIC_CONFIG)
assert trt_off.query_attributes() == {
'on': False
}
on_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_ON)
await trt_on.execute(
trait.COMMAND_ONOFF, BASIC_DATA,
{'on': True}, {})
assert len(on_calls) == 1
assert on_calls[0].data == {
ATTR_ENTITY_ID: 'fan.bla',
}
off_calls = async_mock_service(hass, fan.DOMAIN, SERVICE_TURN_OFF)
await trt_on.execute(
trait.COMMAND_ONOFF, BASIC_DATA,
{'on': False}, {})
assert len(off_calls) == 1
assert off_calls[0].data == {
ATTR_ENTITY_ID: 'fan.bla',
}
async def test_onoff_light(hass):
"""Test OnOff trait support for light domain."""
assert helpers.get_google_type(light.DOMAIN, None) is not None
assert trait.OnOffTrait.supported(light.DOMAIN, 0, None)
trt_on = trait.OnOffTrait(hass, State('light.bla', STATE_ON), BASIC_CONFIG)
assert trt_on.sync_attributes() == {}
assert trt_on.query_attributes() == {
'on': True
}
trt_off = trait.OnOffTrait(hass, State('light.bla', STATE_OFF),
BASIC_CONFIG)
assert trt_off.query_attributes() == {
'on': False
}
on_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON)
await trt_on.execute(
trait.COMMAND_ONOFF, BASIC_DATA,
{'on': True}, {})
assert len(on_calls) == 1
assert on_calls[0].data == {
ATTR_ENTITY_ID: 'light.bla',
}
off_calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_OFF)
await trt_on.execute(
trait.COMMAND_ONOFF, BASIC_DATA,
{'on': False}, {})
assert len(off_calls) == 1
assert off_calls[0].data == {
ATTR_ENTITY_ID: 'light.bla',
}
async def test_onoff_media_player(hass):
"""Test OnOff trait support for media_player domain."""
assert helpers.get_google_type(media_player.DOMAIN, None) is not None
assert trait.OnOffTrait.supported(media_player.DOMAIN, 0, None)
trt_on = trait.OnOffTrait(hass, State('media_player.bla', STATE_ON),
BASIC_CONFIG)
assert trt_on.sync_attributes() == {}
assert trt_on.query_attributes() == {
'on': True
}
trt_off = trait.OnOffTrait(hass, State('media_player.bla', STATE_OFF),
BASIC_CONFIG)
assert trt_off.query_attributes() == {
'on': False
}
on_calls = async_mock_service(hass, media_player.DOMAIN, SERVICE_TURN_ON)
await trt_on.execute(
trait.COMMAND_ONOFF, BASIC_DATA,
{'on': True}, {})
assert len(on_calls) == 1
assert on_calls[0].data == {
ATTR_ENTITY_ID: 'media_player.bla',
}
off_calls = async_mock_service(hass, media_player.DOMAIN,
SERVICE_TURN_OFF)
await trt_on.execute(
trait.COMMAND_ONOFF, BASIC_DATA,
{'on': False}, {})
assert len(off_calls) == 1
assert off_calls[0].data == {
ATTR_ENTITY_ID: 'media_player.bla',
}
async def test_onoff_climate(hass):
"""Test OnOff trait not supported for climate domain."""
assert helpers.get_google_type(climate.DOMAIN, None) is not None
assert not trait.OnOffTrait.supported(
climate.DOMAIN, climate.SUPPORT_ON_OFF, None)
async def test_dock_vacuum(hass):
"""Test dock trait support for vacuum domain."""
assert helpers.get_google_type(vacuum.DOMAIN, None) is not None
assert trait.DockTrait.supported(vacuum.DOMAIN, 0, None)
trt = trait.DockTrait(hass, State('vacuum.bla', vacuum.STATE_IDLE),
BASIC_CONFIG)
assert trt.sync_attributes() == {}
assert trt.query_attributes() == {
'isDocked': False
}
calls = async_mock_service(hass, vacuum.DOMAIN,
vacuum.SERVICE_RETURN_TO_BASE)
await trt.execute(
trait.COMMAND_DOCK, BASIC_DATA, {}, {})
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: 'vacuum.bla',
}
async def test_startstop_vacuum(hass):
"""Test startStop trait support for vacuum domain."""
assert helpers.get_google_type(vacuum.DOMAIN, None) is not None
assert trait.StartStopTrait.supported(vacuum.DOMAIN, 0, None)
trt = trait.StartStopTrait(hass, State('vacuum.bla', vacuum.STATE_PAUSED, {
ATTR_SUPPORTED_FEATURES: vacuum.SUPPORT_PAUSE,
}), BASIC_CONFIG)
assert trt.sync_attributes() == {'pausable': True}
assert trt.query_attributes() == {
'isRunning': False,
'isPaused': True
}
start_calls = async_mock_service(hass, vacuum.DOMAIN,
vacuum.SERVICE_START)
await trt.execute(trait.COMMAND_STARTSTOP, BASIC_DATA, {'start': True}, {})
assert len(start_calls) == 1
assert start_calls[0].data == {
ATTR_ENTITY_ID: 'vacuum.bla',
}
stop_calls = async_mock_service(hass, vacuum.DOMAIN,
vacuum.SERVICE_STOP)
await trt.execute(
trait.COMMAND_STARTSTOP, BASIC_DATA, {'start': False}, {})
assert len(stop_calls) == 1
assert stop_calls[0].data == {
ATTR_ENTITY_ID: 'vacuum.bla',
}
pause_calls = async_mock_service(hass, vacuum.DOMAIN,
vacuum.SERVICE_PAUSE)
await trt.execute(
trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {'pause': True}, {})
assert len(pause_calls) == 1
assert pause_calls[0].data == {
ATTR_ENTITY_ID: 'vacuum.bla',
}
unpause_calls = async_mock_service(hass, vacuum.DOMAIN,
vacuum.SERVICE_START)
await trt.execute(
trait.COMMAND_PAUSEUNPAUSE, BASIC_DATA, {'pause': False}, {})
assert len(unpause_calls) == 1
assert unpause_calls[0].data == {
ATTR_ENTITY_ID: 'vacuum.bla',
}
async def test_color_setting_color_light(hass):
"""Test ColorSpectrum trait support for light domain."""
assert helpers.get_google_type(light.DOMAIN, None) is not None
assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None)
assert trait.ColorSettingTrait.supported(light.DOMAIN,
light.SUPPORT_COLOR, None)
trt = trait.ColorSettingTrait(hass, State('light.bla', STATE_ON, {
light.ATTR_HS_COLOR: (20, 94),
light.ATTR_BRIGHTNESS: 200,
ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR,
}), BASIC_CONFIG)
assert trt.sync_attributes() == {
'colorModel': 'hsv'
}
assert trt.query_attributes() == {
'color': {
'spectrumHsv': {
'hue': 20,
'saturation': 0.94,
'value': 200 / 255,
}
}
}
assert trt.can_execute(trait.COMMAND_COLOR_ABSOLUTE, {
'color': {
'spectrumRGB': 16715792
}
})
calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON)
await trt.execute(trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {
'color': {
'spectrumRGB': 1052927
}
}, {})
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: 'light.bla',
light.ATTR_HS_COLOR: (240, 93.725),
}
await trt.execute(trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {
'color': {
'spectrumHSV': {
'hue': 100,
'saturation': .50,
'value': .20,
}
}
}, {})
assert len(calls) == 2
assert calls[1].data == {
ATTR_ENTITY_ID: 'light.bla',
light.ATTR_HS_COLOR: [100, 50],
light.ATTR_BRIGHTNESS: .2 * 255,
}
async def test_color_setting_temperature_light(hass):
"""Test ColorTemperature trait support for light domain."""
assert helpers.get_google_type(light.DOMAIN, None) is not None
assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None)
assert trait.ColorSettingTrait.supported(light.DOMAIN,
light.SUPPORT_COLOR_TEMP, None)
trt = trait.ColorSettingTrait(hass, State('light.bla', STATE_ON, {
light.ATTR_MIN_MIREDS: 200,
light.ATTR_COLOR_TEMP: 300,
light.ATTR_MAX_MIREDS: 500,
ATTR_SUPPORTED_FEATURES: light.SUPPORT_COLOR_TEMP,
}), BASIC_CONFIG)
assert trt.sync_attributes() == {
'colorTemperatureRange': {
'temperatureMinK': 2000,
'temperatureMaxK': 5000,
}
}
assert trt.query_attributes() == {
'color': {
'temperatureK': 3333
}
}
assert trt.can_execute(trait.COMMAND_COLOR_ABSOLUTE, {
'color': {
'temperature': 400
}
})
calls = async_mock_service(hass, light.DOMAIN, SERVICE_TURN_ON)
with pytest.raises(helpers.SmartHomeError) as err:
await trt.execute(trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {
'color': {
'temperature': 5555
}
}, {})
assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE
await trt.execute(trait.COMMAND_COLOR_ABSOLUTE, BASIC_DATA, {
'color': {
'temperature': 2857
}
}, {})
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: 'light.bla',
light.ATTR_COLOR_TEMP: color.color_temperature_kelvin_to_mired(2857)
}
async def test_color_light_temperature_light_bad_temp(hass):
"""Test ColorTemperature trait support for light domain."""
assert helpers.get_google_type(light.DOMAIN, None) is not None
assert not trait.ColorSettingTrait.supported(light.DOMAIN, 0, None)
assert trait.ColorSettingTrait.supported(light.DOMAIN,
light.SUPPORT_COLOR_TEMP, None)
trt = trait.ColorSettingTrait(hass, State('light.bla', STATE_ON, {
light.ATTR_MIN_MIREDS: 200,
light.ATTR_COLOR_TEMP: 0,
light.ATTR_MAX_MIREDS: 500,
}), BASIC_CONFIG)
assert trt.query_attributes() == {
}
async def test_scene_scene(hass):
"""Test Scene trait support for scene domain."""
assert helpers.get_google_type(scene.DOMAIN, None) is not None
assert trait.SceneTrait.supported(scene.DOMAIN, 0, None)
trt = trait.SceneTrait(hass, State('scene.bla', scene.STATE), BASIC_CONFIG)
assert trt.sync_attributes() == {}
assert trt.query_attributes() == {}
assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {})
calls = async_mock_service(hass, scene.DOMAIN, SERVICE_TURN_ON)
await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {})
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: 'scene.bla',
}
async def test_scene_script(hass):
"""Test Scene trait support for script domain."""
assert helpers.get_google_type(script.DOMAIN, None) is not None
assert trait.SceneTrait.supported(script.DOMAIN, 0, None)
trt = trait.SceneTrait(hass, State('script.bla', STATE_OFF), BASIC_CONFIG)
assert trt.sync_attributes() == {}
assert trt.query_attributes() == {}
assert trt.can_execute(trait.COMMAND_ACTIVATE_SCENE, {})
calls = async_mock_service(hass, script.DOMAIN, SERVICE_TURN_ON)
await trt.execute(trait.COMMAND_ACTIVATE_SCENE, BASIC_DATA, {}, {})
# We don't wait till script execution is done.
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: 'script.bla',
}
async def test_temperature_setting_climate_onoff(hass):
"""Test TemperatureSetting trait support for climate domain - range."""
assert helpers.get_google_type(climate.DOMAIN, None) is not None
assert not trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None)
assert trait.TemperatureSettingTrait.supported(
climate.DOMAIN, climate.SUPPORT_OPERATION_MODE, None)
hass.config.units.temperature_unit = TEMP_FAHRENHEIT
trt = trait.TemperatureSettingTrait(hass, State(
'climate.bla', climate.STATE_AUTO, {
ATTR_SUPPORTED_FEATURES: (
climate.SUPPORT_OPERATION_MODE | climate.SUPPORT_ON_OFF |
climate.SUPPORT_TARGET_TEMPERATURE_HIGH |
climate.SUPPORT_TARGET_TEMPERATURE_LOW),
climate.ATTR_OPERATION_MODE: climate.STATE_COOL,
climate.ATTR_OPERATION_LIST: [
climate.STATE_COOL,
climate.STATE_HEAT,
climate.STATE_AUTO,
],
climate.ATTR_MIN_TEMP: None,
climate.ATTR_MAX_TEMP: None,
}), BASIC_CONFIG)
assert trt.sync_attributes() == {
'availableThermostatModes': 'off,on,cool,heat,heatcool',
'thermostatTemperatureUnit': 'F',
}
assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {})
calls = async_mock_service(
hass, climate.DOMAIN, SERVICE_TURN_ON)
await trt.execute(trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {
'thermostatMode': 'on',
}, {})
assert len(calls) == 1
calls = async_mock_service(
hass, climate.DOMAIN, SERVICE_TURN_OFF)
await trt.execute(trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {
'thermostatMode': 'off',
}, {})
assert len(calls) == 1
async def test_temperature_setting_climate_range(hass):
"""Test TemperatureSetting trait support for climate domain - range."""
assert helpers.get_google_type(climate.DOMAIN, None) is not None
assert not trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None)
assert trait.TemperatureSettingTrait.supported(
climate.DOMAIN, climate.SUPPORT_OPERATION_MODE, None)
hass.config.units.temperature_unit = TEMP_FAHRENHEIT
trt = trait.TemperatureSettingTrait(hass, State(
'climate.bla', climate.STATE_AUTO, {
climate.ATTR_CURRENT_TEMPERATURE: 70,
climate.ATTR_CURRENT_HUMIDITY: 25,
ATTR_SUPPORTED_FEATURES:
climate.SUPPORT_OPERATION_MODE |
climate.SUPPORT_TARGET_TEMPERATURE_HIGH |
climate.SUPPORT_TARGET_TEMPERATURE_LOW,
climate.ATTR_OPERATION_MODE: climate.STATE_AUTO,
climate.ATTR_OPERATION_LIST: [
STATE_OFF,
climate.STATE_COOL,
climate.STATE_HEAT,
climate.STATE_AUTO,
],
climate.ATTR_TARGET_TEMP_HIGH: 75,
climate.ATTR_TARGET_TEMP_LOW: 65,
climate.ATTR_MIN_TEMP: 50,
climate.ATTR_MAX_TEMP: 80
}), BASIC_CONFIG)
assert trt.sync_attributes() == {
'availableThermostatModes': 'off,cool,heat,heatcool',
'thermostatTemperatureUnit': 'F',
}
assert trt.query_attributes() == {
'thermostatMode': 'heatcool',
'thermostatTemperatureAmbient': 21.1,
'thermostatHumidityAmbient': 25,
'thermostatTemperatureSetpointLow': 18.3,
'thermostatTemperatureSetpointHigh': 23.9,
}
assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, {})
assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {})
calls = async_mock_service(
hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE)
await trt.execute(
trait.COMMAND_THERMOSTAT_TEMPERATURE_SET_RANGE, BASIC_DATA, {
'thermostatTemperatureSetpointHigh': 25,
'thermostatTemperatureSetpointLow': 20,
}, {})
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: 'climate.bla',
climate.ATTR_TARGET_TEMP_HIGH: 77,
climate.ATTR_TARGET_TEMP_LOW: 68,
}
calls = async_mock_service(
hass, climate.DOMAIN, climate.SERVICE_SET_OPERATION_MODE)
await trt.execute(trait.COMMAND_THERMOSTAT_SET_MODE, BASIC_DATA, {
'thermostatMode': 'heatcool',
}, {})
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: 'climate.bla',
climate.ATTR_OPERATION_MODE: climate.STATE_AUTO,
}
with pytest.raises(helpers.SmartHomeError) as err:
await trt.execute(
trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA,
{'thermostatTemperatureSetpoint': -100}, {})
assert err.value.code == const.ERR_VALUE_OUT_OF_RANGE
hass.config.units.temperature_unit = TEMP_CELSIUS
async def test_temperature_setting_climate_setpoint(hass):
"""Test TemperatureSetting trait support for climate domain - setpoint."""
assert helpers.get_google_type(climate.DOMAIN, None) is not None
assert not trait.TemperatureSettingTrait.supported(climate.DOMAIN, 0, None)
assert trait.TemperatureSettingTrait.supported(
climate.DOMAIN, climate.SUPPORT_OPERATION_MODE, None)
hass.config.units.temperature_unit = TEMP_CELSIUS
trt = trait.TemperatureSettingTrait(hass, State(
'climate.bla', climate.STATE_AUTO, {
ATTR_SUPPORTED_FEATURES: (
climate.SUPPORT_OPERATION_MODE | climate.SUPPORT_ON_OFF),
climate.ATTR_OPERATION_MODE: climate.STATE_COOL,
climate.ATTR_OPERATION_LIST: [
STATE_OFF,
climate.STATE_COOL,
],
climate.ATTR_MIN_TEMP: 10,
climate.ATTR_MAX_TEMP: 30,
ATTR_TEMPERATURE: 18,
climate.ATTR_CURRENT_TEMPERATURE: 20
}), BASIC_CONFIG)
assert trt.sync_attributes() == {
'availableThermostatModes': 'off,on,cool',
'thermostatTemperatureUnit': 'C',
}
assert trt.query_attributes() == {
'thermostatMode': 'cool',
'thermostatTemperatureAmbient': 20,
'thermostatTemperatureSetpoint': 18,
}
assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {})
assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {})
calls = async_mock_service(
hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE)
with pytest.raises(helpers.SmartHomeError):
await trt.execute(
trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA,
{'thermostatTemperatureSetpoint': -100}, {})
await trt.execute(
trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA,
{'thermostatTemperatureSetpoint': 19}, {})
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: 'climate.bla',
ATTR_TEMPERATURE: 19
}
async def test_temperature_setting_climate_setpoint_auto(hass):
"""
Test TemperatureSetting trait support for climate domain.
Setpoint in auto mode.
"""
hass.config.units.temperature_unit = TEMP_CELSIUS
trt = trait.TemperatureSettingTrait(hass, State(
'climate.bla', climate.STATE_AUTO, {
ATTR_SUPPORTED_FEATURES: (
climate.SUPPORT_OPERATION_MODE | climate.SUPPORT_ON_OFF),
climate.ATTR_OPERATION_MODE: climate.STATE_AUTO,
climate.ATTR_OPERATION_LIST: [
STATE_OFF,
climate.STATE_AUTO,
],
climate.ATTR_MIN_TEMP: 10,
climate.ATTR_MAX_TEMP: 30,
ATTR_TEMPERATURE: 18,
climate.ATTR_CURRENT_TEMPERATURE: 20
}), BASIC_CONFIG)
assert trt.sync_attributes() == {
'availableThermostatModes': 'off,on,heatcool',
'thermostatTemperatureUnit': 'C',
}
assert trt.query_attributes() == {
'thermostatMode': 'heatcool',
'thermostatTemperatureAmbient': 20,
'thermostatTemperatureSetpointHigh': 18,
'thermostatTemperatureSetpointLow': 18,
}
assert trt.can_execute(trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, {})
assert trt.can_execute(trait.COMMAND_THERMOSTAT_SET_MODE, {})
calls = async_mock_service(
hass, climate.DOMAIN, climate.SERVICE_SET_TEMPERATURE)
await trt.execute(
trait.COMMAND_THERMOSTAT_TEMPERATURE_SETPOINT, BASIC_DATA,
{'thermostatTemperatureSetpoint': 19}, {})
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: 'climate.bla',
ATTR_TEMPERATURE: 19
}
async def test_lock_unlock_lock(hass):
"""Test LockUnlock trait locking support for lock domain."""
assert helpers.get_google_type(lock.DOMAIN, None) is not None
assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN,
None)
trt = trait.LockUnlockTrait(hass,
State('lock.front_door', lock.STATE_UNLOCKED),
PIN_CONFIG)
assert trt.sync_attributes() == {}
assert trt.query_attributes() == {
'isLocked': False
}
assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {'lock': True})
calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_LOCK)
# No challenge data
with pytest.raises(error.ChallengeNeeded) as err:
await trt.execute(
trait.COMMAND_LOCKUNLOCK, PIN_DATA, {'lock': True}, {})
assert len(calls) == 0
assert err.code == const.ERR_CHALLENGE_NEEDED
assert err.challenge_type == const.CHALLENGE_PIN_NEEDED
# invalid pin
with pytest.raises(error.ChallengeNeeded) as err:
await trt.execute(
trait.COMMAND_LOCKUNLOCK, PIN_DATA, {'lock': True},
{'pin': 9999})
assert len(calls) == 0
assert err.code == const.ERR_CHALLENGE_NEEDED
assert err.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED
await trt.execute(trait.COMMAND_LOCKUNLOCK, PIN_DATA, {'lock': True},
{'pin': '1234'})
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: 'lock.front_door'
}
async def test_lock_unlock_unlock(hass):
"""Test LockUnlock trait unlocking support for lock domain."""
assert helpers.get_google_type(lock.DOMAIN, None) is not None
assert trait.LockUnlockTrait.supported(lock.DOMAIN, lock.SUPPORT_OPEN,
None)
trt = trait.LockUnlockTrait(hass,
State('lock.front_door', lock.STATE_LOCKED),
PIN_CONFIG)
assert trt.sync_attributes() == {}
assert trt.query_attributes() == {
'isLocked': True
}
assert trt.can_execute(trait.COMMAND_LOCKUNLOCK, {'lock': False})
calls = async_mock_service(hass, lock.DOMAIN, lock.SERVICE_UNLOCK)
# No challenge data
with pytest.raises(error.ChallengeNeeded) as err:
await trt.execute(
trait.COMMAND_LOCKUNLOCK, PIN_DATA, {'lock': False}, {})
assert len(calls) == 0
assert err.code == const.ERR_CHALLENGE_NEEDED
assert err.challenge_type == const.CHALLENGE_PIN_NEEDED
# invalid pin
with pytest.raises(error.ChallengeNeeded) as err:
await trt.execute(
trait.COMMAND_LOCKUNLOCK, PIN_DATA, {'lock': False},
{'pin': 9999})
assert len(calls) == 0
assert err.code == const.ERR_CHALLENGE_NEEDED
assert err.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED
await trt.execute(
trait.COMMAND_LOCKUNLOCK, PIN_DATA, {'lock': False}, {'pin': '1234'})
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: 'lock.front_door'
}
async def test_fan_speed(hass):
"""Test FanSpeed trait speed control support for fan domain."""
assert helpers.get_google_type(fan.DOMAIN, None) is not None
assert trait.FanSpeedTrait.supported(fan.DOMAIN, fan.SUPPORT_SET_SPEED,
None)
trt = trait.FanSpeedTrait(
hass, State(
'fan.living_room_fan', fan.SPEED_HIGH, attributes={
'speed_list': [
fan.SPEED_OFF, fan.SPEED_LOW, fan.SPEED_MEDIUM,
fan.SPEED_HIGH
],
'speed': 'low'
}), BASIC_CONFIG)
assert trt.sync_attributes() == {
'availableFanSpeeds': {
'ordered': True,
'speeds': [
{
'speed_name': 'off',
'speed_values': [
{
'speed_synonym': ['stop', 'off'],
'lang': 'en'
}
]
},
{
'speed_name': 'low',
'speed_values': [
{
'speed_synonym': [
'slow', 'low', 'slowest', 'lowest'],
'lang': 'en'
}
]
},
{
'speed_name': 'medium',
'speed_values': [
{
'speed_synonym': ['medium', 'mid', 'middle'],
'lang': 'en'
}
]
},
{
'speed_name': 'high',
'speed_values': [
{
'speed_synonym': [
'high', 'max', 'fast', 'highest', 'fastest',
'maximum'],
'lang': 'en'
}
]
}
]
},
'reversible': False
}
assert trt.query_attributes() == {
'currentFanSpeedSetting': 'low',
'on': True,
'online': True
}
assert trt.can_execute(
trait.COMMAND_FANSPEED, params={'fanSpeed': 'medium'})
calls = async_mock_service(hass, fan.DOMAIN, fan.SERVICE_SET_SPEED)
await trt.execute(
trait.COMMAND_FANSPEED, BASIC_DATA, {'fanSpeed': 'medium'}, {})
assert len(calls) == 1
assert calls[0].data == {
'entity_id': 'fan.living_room_fan',
'speed': 'medium'
}
async def test_modes(hass):
"""Test Mode trait."""
assert helpers.get_google_type(media_player.DOMAIN, None) is not None
assert trait.ModesTrait.supported(
media_player.DOMAIN, media_player.SUPPORT_SELECT_SOURCE, None)
trt = trait.ModesTrait(
hass, State(
'media_player.living_room', media_player.STATE_PLAYING,
attributes={
media_player.ATTR_INPUT_SOURCE_LIST: [
'media', 'game', 'chromecast', 'plex'
],
media_player.ATTR_INPUT_SOURCE: 'game'
}),
BASIC_CONFIG)
attribs = trt.sync_attributes()
assert attribs == {
'availableModes': [
{
'name': 'input source',
'name_values': [
{
'name_synonym': ['input source'],
'lang': 'en'
}
],
'settings': [
{
'setting_name': 'media',
'setting_values': [
{
'setting_synonym': ['media', 'media mode'],
'lang': 'en'
}
]
},
{
'setting_name': 'game',
'setting_values': [
{
'setting_synonym': ['game', 'game mode'],
'lang': 'en'
}
]
},
{
'setting_name': 'chromecast',
'setting_values': [
{
'setting_synonym': ['chromecast'],
'lang': 'en'
}
]
}
],
'ordered': False
}
]
}
assert trt.query_attributes() == {
'currentModeSettings': {'source': 'game'},
'on': True,
'online': True
}
assert trt.can_execute(
trait.COMMAND_MODES, params={
'updateModeSettings': {
trt.HA_TO_GOOGLE.get(media_player.ATTR_INPUT_SOURCE): 'media'
}})
calls = async_mock_service(
hass, media_player.DOMAIN, media_player.SERVICE_SELECT_SOURCE)
await trt.execute(
trait.COMMAND_MODES, BASIC_DATA, {
'updateModeSettings': {
trt.HA_TO_GOOGLE.get(media_player.ATTR_INPUT_SOURCE): 'media'
}}, {})
assert len(calls) == 1
assert calls[0].data == {
'entity_id': 'media_player.living_room',
'source': 'media'
}
async def test_openclose_cover(hass):
"""Test OpenClose trait support for cover domain."""
assert helpers.get_google_type(cover.DOMAIN, None) is not None
assert trait.OpenCloseTrait.supported(cover.DOMAIN,
cover.SUPPORT_SET_POSITION, None)
# No position
trt = trait.OpenCloseTrait(hass, State('cover.bla', cover.STATE_OPEN, {
}), BASIC_CONFIG)
assert trt.sync_attributes() == {}
assert trt.query_attributes() == {
'openPercent': 100
}
# No state
trt = trait.OpenCloseTrait(hass, State('cover.bla', STATE_UNKNOWN, {
}), BASIC_CONFIG)
assert trt.sync_attributes() == {}
with pytest.raises(helpers.SmartHomeError):
trt.query_attributes()
# Assumed state
trt = trait.OpenCloseTrait(hass, State('cover.bla', cover.STATE_OPEN, {
ATTR_ASSUMED_STATE: True,
}), BASIC_CONFIG)
assert trt.sync_attributes() == {}
with pytest.raises(helpers.SmartHomeError):
trt.query_attributes()
trt = trait.OpenCloseTrait(hass, State('cover.bla', cover.STATE_OPEN, {
cover.ATTR_CURRENT_POSITION: 75
}), BASIC_CONFIG)
assert trt.sync_attributes() == {}
assert trt.query_attributes() == {
'openPercent': 75
}
calls = async_mock_service(
hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION)
await trt.execute(
trait.COMMAND_OPENCLOSE, BASIC_DATA,
{'openPercent': 50}, {})
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: 'cover.bla',
cover.ATTR_POSITION: 50
}
@pytest.mark.parametrize('device_class', (
cover.DEVICE_CLASS_DOOR,
cover.DEVICE_CLASS_GARAGE,
))
async def test_openclose_cover_secure(hass, device_class):
"""Test OpenClose trait support for cover domain."""
assert helpers.get_google_type(cover.DOMAIN, device_class) is not None
assert trait.OpenCloseTrait.supported(
cover.DOMAIN, cover.SUPPORT_SET_POSITION, device_class)
trt = trait.OpenCloseTrait(hass, State('cover.bla', cover.STATE_OPEN, {
ATTR_DEVICE_CLASS: device_class,
cover.ATTR_CURRENT_POSITION: 75
}), PIN_CONFIG)
assert trt.sync_attributes() == {}
assert trt.query_attributes() == {
'openPercent': 75
}
calls = async_mock_service(
hass, cover.DOMAIN, cover.SERVICE_SET_COVER_POSITION)
# No challenge data
with pytest.raises(error.ChallengeNeeded) as err:
await trt.execute(
trait.COMMAND_OPENCLOSE, PIN_DATA,
{'openPercent': 50}, {})
assert len(calls) == 0
assert err.code == const.ERR_CHALLENGE_NEEDED
assert err.challenge_type == const.CHALLENGE_PIN_NEEDED
# invalid pin
with pytest.raises(error.ChallengeNeeded) as err:
await trt.execute(
trait.COMMAND_OPENCLOSE, PIN_DATA,
{'openPercent': 50}, {'pin': '9999'})
assert len(calls) == 0
assert err.code == const.ERR_CHALLENGE_NEEDED
assert err.challenge_type == const.CHALLENGE_FAILED_PIN_NEEDED
await trt.execute(
trait.COMMAND_OPENCLOSE, PIN_DATA,
{'openPercent': 50}, {'pin': '1234'})
assert len(calls) == 1
assert calls[0].data == {
ATTR_ENTITY_ID: 'cover.bla',
cover.ATTR_POSITION: 50
}
@pytest.mark.parametrize('device_class', (
binary_sensor.DEVICE_CLASS_DOOR,
binary_sensor.DEVICE_CLASS_GARAGE_DOOR,
binary_sensor.DEVICE_CLASS_LOCK,
binary_sensor.DEVICE_CLASS_OPENING,
binary_sensor.DEVICE_CLASS_WINDOW,
))
async def test_openclose_binary_sensor(hass, device_class):
"""Test OpenClose trait support for binary_sensor domain."""
assert helpers.get_google_type(
binary_sensor.DOMAIN, device_class) is not None
assert trait.OpenCloseTrait.supported(binary_sensor.DOMAIN,
0, device_class)
trt = trait.OpenCloseTrait(hass, State('binary_sensor.test', STATE_ON, {
ATTR_DEVICE_CLASS: device_class,
}), BASIC_CONFIG)
assert trt.sync_attributes() == {
'queryOnlyOpenClose': True,
}
assert trt.query_attributes() == {
'openPercent': 100
}
trt = trait.OpenCloseTrait(hass, State('binary_sensor.test', STATE_OFF, {
ATTR_DEVICE_CLASS: device_class,
}), BASIC_CONFIG)
assert trt.sync_attributes() == {
'queryOnlyOpenClose': True,
}
assert trt.query_attributes() == {
'openPercent': 0
}
| apache-2.0 | 8,503,505,274,772,646,000 | 31.787167 | 79 | 0.580306 | false |
uchchwhash/fortran-linter | linter/tokens.py | 1 | 3201 | """
Some lowest-level parsers, that is, tokenizers.
"""
import re
from .parsers import parser, join
from .parsers import Success, Failure
def exact(string, ignore_case=False):
""" Only matches the exact `string`. """
if ignore_case:
string = string.lower()
@parser(repr(string))
def inner(text, start):
""" A parser for the `string`. """
whole = len(string)
segment = text[start: start + whole]
if ignore_case:
segment = segment.lower()
if segment == string:
return Success(text, start, start + whole, string)
else:
raise Failure(text, start, repr(string))
return inner
def satisfies(predicate, desc):
""" Recognize a character satisfying given `predicate`. """
@parser(desc)
def inner(text, start):
""" A parser that applies the `predicate`. """
if start < len(text) and predicate(text[start]):
return Success(text, start, start + 1, text[start])
else:
raise Failure(text, start, desc)
return inner
def one_of(chars):
""" Recognize any of the given characters `chars`. """
return satisfies(lambda c: c in chars, "one of {}".format(chars))
def none_of(chars):
""" Consumes a character that is not on the list `chars`. """
return satisfies(lambda c: c not in chars, "none of {}".format(chars))
#: succeeds for any character
wildcard = satisfies(lambda c: True, "")
#: matches a space character
space = satisfies(lambda c: c.isspace(), "whitespace")
#: matches whitespace
spaces = (+space // join) % "whitespaces"
#: matches optional whitespace
whitespace = (~space // join) % "optional whitespace"
#: matches a letter
letter = satisfies(lambda c: c.isalpha(), "letter")
#: matches a word
word = (+letter // join) % "word"
#: matches a digit
digit = satisfies(lambda c: c.isdigit(), "digit")
#: matches a list of digits
digits = (+digit // join) % "digits"
#: matches one alphanumeric character
alphanumeric = satisfies(lambda c: c.isalnum(), "alphanumeric")
#: matches multiple alphanumeric characters
alphanumerics = (+alphanumeric // join) % "alphanumerics"
def separated_by(prsr, sep, empty=None):
""" A list of `prsr` parsers separated by `sep` parsers. """
@parser
def inner(text, start):
""" A parser that returns the list of values parsed by `prsr`. """
head = prsr.scan(text, start)
tail = (~(sep >> prsr)).scan(text, head.end)
return Success(text, start, tail.end, [head.value] + tail.value)
if empty is None:
return inner
else:
return inner | empty
def liberal(prsr):
""" No fuss about surrounding whitespace. """
return whitespace >> prsr << whitespace
def regex(exp, flags=0):
""" Match a regex. Perhaps too powerful. """
if isinstance(exp, str):
exp = re.compile(exp, flags)
@parser
def inner(text, start):
""" A parser that applies the regex. """
match = exp.match(text, start)
if match:
return Success(text, match.start(), match.end(), match)
else:
raise Failure(text, start, exp.pattern)
return inner
| mit | -5,792,294,949,699,803,000 | 25.89916 | 74 | 0.621368 | false |
bentzinir/Buffe | Applications/mgail/environments/humanoid/fm_test.py | 1 | 1436 | import common
env_name = 'humanoid'
git_path = '/home/llt_lab/Documents/repo/'
run_dir = git_path + '/Buffe-2017/Applications/mgail/environments/' + env_name + '/'
env = __import__('environment').ENVIRONMENT(run_dir)
forward_model = __import__('forward_model').ForwardModel(state_size=env.state_size,
action_size=env.action_size,
rho=env.fm_rho,
beta=env.fm_beta,
encoding_size=env.fm_encoding_size,
batch_size=env.fm_batch_size,
multi_layered_encoder=env.fm_multi_layered_encoder,
num_steps=env.fm_num_steps,
separate_encoders=env.fm_separate_encoders,
merger=env.fm_merger,
activation=env.fm_activation,
lstm=env.fm_lstm)
forward_model.pretrain(env.fm_opt, env.fm_lr, env.fm_batch_size, env.fm_num_iterations, env.run_dir + env.fm_expert_er_path)
| mit | -908,592,799,820,367,100 | 61.434783 | 124 | 0.392061 | false |
Cadasta/cadasta-platform | cadasta/search/tests/test_parser.py | 1 | 12984 | import pytest
from django.test import TestCase
from .. import parser
class ParserTest(TestCase):
def test_parse_string(self):
p = parser.query.parseString
# Simple cases
assert p('a').asList() == ['a']
assert p('a ').asList() == ['a']
assert p(' a ').asList() == ['a']
assert p(' a').asList() == ['a']
assert p('a b').asList() == ['a', 'b']
assert p('a b').asList() == ['a', 'b']
assert p(' a b').asList() == ['a', 'b']
assert p('a b ').asList() == ['a', 'b']
assert p(' a b ').asList() == ['a', 'b']
assert p('a_b').asList() == ['a_b']
assert p('a b c').asList() == ['a', 'b', 'c']
assert p('a___ b--- c+++').asList() == ['a___', 'b---', 'c+++']
# Quoted cases
assert p('"a b"').asList() == ['"a b"']
assert p('"a b"').asList() == ['"a b"']
assert p('"a b" c').asList() == ['"a b"', 'c']
assert p('a "b c"').asList() == ['a', '"b c"']
assert p('a "b c" d').asList() == ['a', '"b c"', 'd']
# +- cases
assert p('+a').asList() == [['+', 'a']]
assert p('-a').asList() == [['-', 'a']]
assert p('+"a b"').asList() == [['+', '"a b"']]
assert p('-"a b"').asList() == [['-', '"a b"']]
assert p('b +a').asList() == ['b', ['+', 'a']]
assert p('b -a').asList() == ['b', ['-', 'a']]
assert p('"b +a"').asList() == ['"b +a"']
assert p('"b -a"').asList() == ['"b -a"']
assert p('b+a').asList() == ['b+a']
assert p('b-a').asList() == ['b-a']
assert p('"b+a"').asList() == ['"b+a"']
assert p('"b-a"').asList() == ['"b-a"']
assert p('+a b c').asList() == [['+', 'a'], 'b', 'c']
assert p('-a b c').asList() == [['-', 'a'], 'b', 'c']
assert p('+a "b c"').asList() == [['+', 'a'], '"b c"']
assert p('-a "b c"').asList() == [['-', 'a'], '"b c"']
assert p('a b +c').asList() == ['a', 'b', ['+', 'c']]
assert p('a b -c').asList() == ['a', 'b', ['-', 'c']]
assert p('a "b +c"').asList() == ['a', '"b +c"']
assert p('a "b -c"').asList() == ['a', '"b -c"']
assert p('+a -b +c').asList() == [['+', 'a'], ['-', 'b'], ['+', 'c']]
assert p('-a +b -c').asList() == [['-', 'a'], ['+', 'b'], ['-', 'c']]
assert p('+a -"b +c"').asList() == [['+', 'a'], ['-', '"b +c"']]
assert p('-a +"b -c"').asList() == [['-', 'a'], ['+', '"b -c"']]
assert p('+a-b +c').asList() == [['+', 'a-b'], ['+', 'c']]
assert p('-a+b -c').asList() == [['-', 'a+b'], ['-', 'c']]
assert p('+"a-b" +c').asList() == [['+', '"a-b"'], ['+', 'c']]
assert p('-"a+b" -c').asList() == [['-', '"a+b"'], ['-', 'c']]
assert p('+a-"b +c"').asList() == [['+', 'a-"b'], ['+', 'c"']]
assert p('-a+"b -c"').asList() == [['-', 'a+"b'], ['-', 'c"']]
assert p('+a -b+c').asList() == [['+', 'a'], ['-', 'b+c']]
assert p('-a +b-c').asList() == [['-', 'a'], ['+', 'b-c']]
assert p('+a -"b+c"').asList() == [['+', 'a'], ['-', '"b+c"']]
assert p('-a +"b-c"').asList() == [['-', 'a'], ['+', '"b-c"']]
assert p('+a "-b+c"').asList() == [['+', 'a'], '"-b+c"']
assert p('-a "+b-c"').asList() == [['-', 'a'], '"+b-c"']
def test_parse_query(self):
f = parser.fields
assert parser.parse_query('ab') == {
'bool': {
'should': [
{
'multi_match': {
'query': 'ab',
'fields': f,
'boost': 10,
}
},
{
'multi_match': {
'query': 'ab',
'fields': f,
'fuzziness': 1,
'prefix_length': 1,
}
},
],
'must_not': [{'match': {'archived': True}}],
}
}
assert parser.parse_query('"a b"') == {
'bool': {
'should': [
{
'multi_match': {
'query': 'a b',
'fields': f,
'type': 'phrase',
'boost': 10,
}
},
],
'must_not': [{'match': {'archived': True}}],
}
}
assert parser.parse_query('+ab') == {
'bool': {
'must': [
{
'multi_match': {
'query': 'ab',
'fields': f,
'boost': 10,
}
},
{
'multi_match': {
'query': 'ab',
'fields': f,
'fuzziness': 1,
'prefix_length': 1,
}
},
],
'must_not': [{'match': {'archived': True}}],
}
}
assert parser.parse_query('+"a b"') == {
'bool': {
'must': [
{
'multi_match': {
'query': 'a b',
'fields': f,
'type': 'phrase',
'boost': 10,
}
},
],
'must_not': [{'match': {'archived': True}}],
}
}
assert parser.parse_query('-a') == {
'bool': {
'must_not': [
{
'multi_match': {
'query': 'a',
'fields': f,
'boost': 1,
}
},
{'match': {'archived': True}},
],
}
}
assert parser.parse_query('-"a b"') == {
'bool': {
'must_not': [
{
'multi_match': {
'query': 'a b',
'fields': f,
'type': 'phrase',
'boost': 1,
}
},
{'match': {'archived': True}},
],
}
}
assert parser.parse_query('"a" +"b"') == {
'bool': {
'must': [
{
'multi_match': {
'query': 'b',
'fields': f,
'type': 'phrase',
'boost': 10,
}
},
],
'should': [
{
'multi_match': {
'query': 'a',
'fields': f,
'type': 'phrase',
'boost': 10,
}
},
],
'must_not': [{'match': {'archived': True}}],
}
}
assert parser.parse_query('"a" -"b"') == {
'bool': {
'must_not': [
{
'multi_match': {
'query': 'b',
'fields': f,
'type': 'phrase',
'boost': 1,
}
},
{'match': {'archived': True}},
],
'should': [
{
'multi_match': {
'query': 'a',
'fields': f,
'type': 'phrase',
'boost': 10,
}
},
],
}
}
assert parser.parse_query('+"a" -"b"') == {
'bool': {
'must': [
{
'multi_match': {
'query': 'a',
'fields': f,
'type': 'phrase',
'boost': 10,
}
},
],
'must_not': [
{
'multi_match': {
'query': 'b',
'fields': f,
'type': 'phrase',
'boost': 1,
}
},
{'match': {'archived': True}},
],
}
}
def test_transform_to_dsl(self):
f = parser.fields
assert parser.transform_to_dsl(['a']) == [
{'multi_match': {'query': 'a', 'fields': f, 'boost': 10}},
]
assert parser.transform_to_dsl(['ab']) == [
{'multi_match': {'query': 'ab', 'fields': f, 'boost': 10}},
{'multi_match': {
'query': 'ab',
'fields': f,
'fuzziness': 1,
'prefix_length': 1,
}},
]
assert parser.transform_to_dsl(['"a"']) == [
{'multi_match': {
'query': 'a',
'fields': f,
'type': 'phrase',
'boost': 10,
}},
]
assert parser.transform_to_dsl(['a'], has_fuzziness=False) == [
{'multi_match': {'query': 'a', 'fields': f, 'boost': 1}},
]
assert parser.transform_to_dsl(['"a"'], has_fuzziness=False) == [
{'multi_match': {
'query': 'a',
'fields': f,
'type': 'phrase',
'boost': 1,
}},
]
assert parser.transform_to_dsl(['ab', '"b"']) == [
{'multi_match': {'query': 'ab', 'fields': f, 'boost': 10}},
{'multi_match': {
'query': 'ab',
'fields': f,
'fuzziness': 1,
'prefix_length': 1,
}},
{'multi_match': {
'query': 'b',
'fields': f,
'type': 'phrase',
'boost': 10,
}},
]
assert parser.transform_to_dsl(['"a"', 'bc']) == [
{'multi_match': {
'query': 'a',
'fields': f,
'type': 'phrase',
'boost': 10,
}},
{'multi_match': {'query': 'bc', 'fields': f, 'boost': 10}},
{'multi_match': {
'query': 'bc',
'fields': f,
'fuzziness': 1,
'prefix_length': 1,
}},
]
assert parser.transform_to_dsl(['ab', '"b"'], has_fuzziness=False) == [
{'multi_match': {'query': 'ab', 'fields': f, 'boost': 1}},
{'multi_match': {
'query': 'b',
'fields': f,
'type': 'phrase',
'boost': 1,
}},
]
assert parser.transform_to_dsl(['"a"', 'bc'], has_fuzziness=False) == [
{'multi_match': {
'query': 'a',
'fields': f,
'type': 'phrase',
'boost': 1,
}},
{'multi_match': {'query': 'bc', 'fields': f, 'boost': 1}},
]
assert parser.transform_to_dsl(['"a']) == [
{'multi_match': {'query': '"a', 'fields': f, 'boost': 10}},
{'multi_match': {
'query': '"a',
'fields': f,
'fuzziness': 1,
'prefix_length': 1,
}},
]
assert parser.transform_to_dsl(['a"']) == [
{'multi_match': {'query': 'a"', 'fields': f, 'boost': 10}},
{'multi_match': {
'query': 'a"',
'fields': f,
'fuzziness': 1,
'prefix_length': 1,
}},
]
def test_get_fuzziness(self):
with pytest.raises(AssertionError):
parser.get_fuzziness('')
assert parser.get_fuzziness('a') == 0
assert parser.get_fuzziness('ab') == 1
assert parser.get_fuzziness('abc') == 1
assert parser.get_fuzziness('abcd') == 2
assert parser.get_fuzziness('abcde') == 2
assert parser.get_fuzziness('abcdef') == 2
| agpl-3.0 | 4,627,493,749,427,611,000 | 34.768595 | 79 | 0.275878 | false |
hall-lab/svtools | tests/reclassifier_tests.py | 1 | 5067 | from unittest import TestCase, main
import os
import time
import sys
import tempfile
import difflib
import svtools.sv_classifier
import gzip
class IntegrationTest_sv_classify(TestCase):
def test_chromosome_prefix(self):
self.assertEqual(svtools.sv_classifier.chromosome_prefix('chrBLAH'), 'BLAH')
self.assertEqual(svtools.sv_classifier.chromosome_prefix('BLAH'), 'chrBLAH')
def test_integration_nb(self):
test_directory = os.path.dirname(os.path.abspath(__file__))
test_data_dir = os.path.join(test_directory, 'test_data', 'sv_classifier')
input = os.path.join(test_data_dir, 'reclass.test.vcf.gz')
expected_result = os.path.join(test_data_dir, 'output.nb.vcf.gz')
annot=os.path.join(test_data_dir, 'repeatMasker.recent.lt200millidiv.LINE_SINE_SVA.b37.sorted.bed.gz')
sex_file=os.path.join(test_data_dir, 'ceph.sex.txt')
train=os.path.join(test_data_dir, 'training.vars.vcf.gz')
diags_handle, diags_file = tempfile.mkstemp(suffix='.txt')
temp_descriptor, temp_output_path = tempfile.mkstemp(suffix='.vcf')
sex=open(sex_file, 'r')
sex_chrom_names = set(('X', 'Y'))
with gzip.open(input, 'rb') as input_handle, os.fdopen(temp_descriptor, 'w') as output_handle:
svtools.sv_classifier.run_reclassifier(input_handle, output_handle, sex, sex_chrom_names, annot, 0.9, None, 1.0, 0.2, train, 'naive_bayes', diags_file)
expected_lines = gzip.open(expected_result, 'rb').readlines()
expected_lines[1] = '##fileDate=' + time.strftime('%Y%m%d') + '\n'
produced_lines = open(temp_output_path).readlines()
diff = difflib.unified_diff(produced_lines, expected_lines, fromfile=temp_output_path, tofile=expected_result)
os.remove(temp_output_path)
os.remove(diags_file)
result = ''.join(diff)
self.assertEqual(result, '')
def test_integration_ls(self):
test_directory = os.path.dirname(os.path.abspath(__file__))
test_data_dir = os.path.join(test_directory, 'test_data', 'sv_classifier')
input = os.path.join(test_data_dir, 'reclass.test.vcf.gz')
expected_result = os.path.join(test_data_dir, 'output.ls.vcf.gz')
annot=os.path.join(test_data_dir, 'repeatMasker.recent.lt200millidiv.LINE_SINE_SVA.b37.sorted.bed.gz')
sex_file=os.path.join(test_data_dir, 'ceph.sex.txt')
train=os.path.join(test_data_dir, 'training.vars.vcf.gz')
diags_handle, diags_file = tempfile.mkstemp(suffix='.txt')
temp_descriptor, temp_output_path = tempfile.mkstemp(suffix='.vcf')
sex=open(sex_file, 'r')
sex_chrom_names = set(('X', 'Y'))
with gzip.open(input, 'rb') as input_handle, os.fdopen(temp_descriptor, 'w') as output_handle:
svtools.sv_classifier.run_reclassifier(input_handle, output_handle, sex, sex_chrom_names, annot, 0.9, None, 1.0, 0.2, train, 'large_sample', diags_file)
expected_lines = gzip.open(expected_result, 'rb').readlines()
expected_lines[1] = '##fileDate=' + time.strftime('%Y%m%d') + '\n'
produced_lines = open(temp_output_path).readlines()
diff = difflib.unified_diff(produced_lines, expected_lines, fromfile=temp_output_path, tofile=expected_result)
os.remove(temp_output_path)
os.remove(diags_file)
result = ''.join(diff)
self.assertEqual(result, '')
def test_integration_hyb(self):
test_directory = os.path.dirname(os.path.abspath(__file__))
test_data_dir = os.path.join(test_directory, 'test_data', 'sv_classifier')
input = os.path.join(test_data_dir, 'reclass.test.vcf.gz')
expected_result = os.path.join(test_data_dir, 'output.hyb.vcf.gz')
annot=os.path.join(test_data_dir, 'repeatMasker.recent.lt200millidiv.LINE_SINE_SVA.b37.sorted.bed.gz')
sex_file=os.path.join(test_data_dir, 'ceph.sex.txt')
train=os.path.join(test_data_dir, 'training.vars.vcf.gz')
diags_handle, diags_file = tempfile.mkstemp(suffix='.txt')
temp_descriptor, temp_output_path = tempfile.mkstemp(suffix='.vcf')
sex=open(sex_file, 'r')
sex_chrom_names = set(('X', 'Y'))
with gzip.open(input, 'rb') as input_handle, os.fdopen(temp_descriptor, 'w') as output_handle:
svtools.sv_classifier.run_reclassifier(input_handle, output_handle, sex, sex_chrom_names, annot, 0.9, None, 1.0, 0.2, train, 'hybrid', diags_file)
expected_lines = gzip.open(expected_result, 'rb').readlines()
expected_lines[1] = '##fileDate=' + time.strftime('%Y%m%d') + '\n'
produced_lines = open(temp_output_path).readlines()
diff = difflib.unified_diff(produced_lines, expected_lines, fromfile=temp_output_path, tofile=expected_result)
os.remove(temp_output_path)
os.remove(diags_file)
result = ''.join(diff)
self.assertEqual(result, '')
if __name__ == "__main__":
main()
| mit | 5,920,470,928,028,170,000 | 51.237113 | 165 | 0.638445 | false |
fiam/oauthsp | middleware.py | 1 | 1611 | # -*- coding: utf-8 -*-
# Copyright (c) 2008 Alberto García Hierro <[email protected]>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from oauthsp.exceptions import OAuthError
# This middleware is not required by django-oauthsp
# nor WAPI, since both of them have the OAuth calls
# wrapped in try/except blocks. However, if you
# are going to manipulate OAuth requests, you may
# find it handy.
class OAuthExceptionMiddleware(object):
def process_exception(self, request, exception):
if isinstance(exception, OAuthError):
return exception.get_response()
return None
| mit | -8,372,374,637,237,358,000 | 42.459459 | 79 | 0.761816 | false |
lintzc/gpdb | gpMgmt/bin/gppylib/operations/test/unit/test_unit_persistent_rebuild.py | 4 | 87711 | #!/usr/bin/env python
#
# Copyright (c) Pivotal Inc 2014. All Rights Reserved.
#
import os
import re
import shutil
import unittest2 as unittest
from collections import defaultdict
from gppylib.gpversion import GpVersion
from gppylib.commands.base import Command, CommandResult, ExecutionError
from mock import patch, MagicMock, Mock, mock_open
from gppylib.operations.persistent_rebuild import ValidateContentID, DbIdInfo, GetDbIdInfo, BackupPersistentTableFiles,\
RebuildTable, RebuildPersistentTables, ValidatePersistentBackup,\
RunBackupRestore, ValidateMD5Sum
remove_per_db_pt_entry = False
remove_global_pt_entry = False
def pt_query_side_effect(*args, **kwargs):
# missing the global persistent table entry
GET_ALL_DATABASES = """select oid, datname from pg_database"""
PER_DATABASE_PT_FILES_QUERY = """SELECT relfilenode FROM pg_class WHERE oid IN (5094, 5095)"""
GLOBAL_PT_FILES_QUERY = """SELECT relfilenode FROM pg_class WHERE oid IN (5090, 5091, 5092, 5093)"""
if args[1] == GET_ALL_DATABASES:
return [[123, 'db1']]
elif args[1] == PER_DATABASE_PT_FILES_QUERY:
if remove_per_db_pt_entry:
return [[5095]]
else:
return [[5094], [5095]]
else:
if remove_global_pt_entry:
return [[5091], [5092], [5093]]
else:
return [[5090], [5091], [5092], [5093]]
class ValidateContentIDTestCase(unittest.TestCase):
def setUp(self):
self.contentid_validator = ValidateContentID(content_id=None,
contentid_file=None,
gparray=None)
@patch('os.path.isfile', return_value=True)
def test_validate_contentid_file_with_valid_content_ids(self, mock1):
expected = [1, 2, 3]
file_contents = '1\n2\n3'
self.contentid_validator.contentid_file = '/tmp/contentid_file'
m = MagicMock()
m.return_value.__enter__.return_value.__iter__.return_value = iter(file_contents.split())
with patch('__builtin__.open', m, create=True):
self.assertEqual(expected, self.contentid_validator._validate_contentid_file())
@patch('os.path.isfile', return_value=True)
def test_validate_contentid_file_with_spaces_content_ids(self, mock1):
expected = [1, 2, 3]
file_contents = ' 1\n2 \n3 \n'
self.contentid_validator.contentid_file = '/tmp/contentid_file'
m = MagicMock()
m.return_value.__enter__.return_value.__iter__.return_value = iter(file_contents.split())
with patch('__builtin__.open', m, create=True):
self.assertEqual(expected, self.contentid_validator._validate_contentid_file())
@patch('os.path.isfile', return_value=True)
def test_validate_contentid_file_with_invalid_content_ids(self, mock1):
expected = [1, 2, 3]
file_contents = '1\nb\n3'
self.contentid_validator.contentid_file = '/tmp/contentid_file'
m = MagicMock()
m.return_value.__enter__.return_value.__iter__.return_value = iter(file_contents.split())
with patch('__builtin__.open', m, create=True):
with self.assertRaisesRegexp(Exception, 'Found non integer content id "b" in contentid file "/tmp/contentid_file"'):
self.contentid_validator._validate_contentid_file()
@patch('os.path.isfile', return_value=True)
def test_validate_contentid_file_with_empty_file(self, mock1):
file_contents = ''
self.contentid_validator.contentid_file = '/tmp/contentid_file'
m = MagicMock()
m.return_value.__enter__.return_value.__iter__.return_value = iter(file_contents.split())
with patch('__builtin__.open', m, create=True):
with self.assertRaisesRegexp(Exception, 'Please make sure there is atleast one integer content ID in the file'):
self.contentid_validator._validate_contentid_file()
@patch('os.path.isfile', return_value=False)
def test_validate_contentid_file_with_non_existent_file(self, mock1):
expected = [1, 2, 3]
file_contents = '1\nb\n3'
self.contentid_validator.contentid_file = '/tmp/contentid_file'
with self.assertRaisesRegexp(Exception, 'Unable to find contentid file "/tmp/contentid_file"'):
self.contentid_validator._validate_contentid_file()
@patch('os.path.isfile', return_value=True)
def test_validate_contentid_file_with_blank_lines(self, mock1):
expected = [1, 2]
file_contents = '1\n\n\n2'
self.contentid_validator.contentid_file = '/tmp/contentid_file'
m = MagicMock()
m.return_value.__enter__.return_value.__iter__.return_value = iter(file_contents.split())
with patch('__builtin__.open', m, create=True):
self.assertEqual(expected, self.contentid_validator._validate_contentid_file())
@patch('os.path.isfile', return_value=True)
def test_validate_contentid_file_with_negative_integers(self, mock1):
expected = [-1, 2]
file_contents = '-1\n2'
self.contentid_validator.contentid_file = '/tmp/contentid_file'
m = MagicMock()
m.return_value.__enter__.return_value.__iter__.return_value = iter(file_contents.split())
with patch('__builtin__.open', m, create=True):
self.assertEqual(expected, self.contentid_validator._validate_contentid_file())
def test_validate_content_id_with_valid_segments(self):
expected = [1, 2, 3]
mock_segs = []
for i in range(6):
m = Mock()
m.getSegmentContentId = Mock()
m.getSegmentContentId.return_value = (i % 3) + 1
mock_segs.append(m)
gparray = Mock()
gparray.getDbList = Mock()
gparray.getDbList.return_value = mock_segs
self.contentid_validator.gparray = gparray
self.contentid_validator.content_id = [1, 2, 3]
self.assertEqual(expected, self.contentid_validator._validate_content_id())
def test_validate_content_id_with_invalid_segments(self):
mock_segs = []
for i in range(6):
m = Mock()
m.getSegmentContentId = Mock()
m.getSegmentContentId.return_value = i + 10
mock_segs.append(m)
gparray = Mock()
gparray.getDbList = Mock()
gparray.getDbList.return_value = mock_segs
self.contentid_validator.gparray = gparray
self.contentid_validator.content_id = [1, 2, 3]
with self.assertRaisesRegexp(Exception, 'The following content ids are not present in gp_segment_configuration: 1, 2, 3'):
self.contentid_validator._validate_content_id()
def test_validate_content_id_with_primary_segment_down(self):
mock_segs = []
for i in range(6):
m = Mock()
m.getSegmentContentId = Mock()
m.getSegmentContentId.return_value = (i % 3) + 1
if i == 1:
m.getSegmentStatus = Mock()
m.getSegmentStatus.return_value = 'd'
mock_segs.append(m)
gparray = Mock()
gparray.getDbList = Mock()
gparray.getDbList.return_value = mock_segs
self.contentid_validator.gparray = gparray
self.contentid_validator.content_id = [1, 2, 3]
self.contentid_validator._validate_content_id()
def test_validate_content_id_with_resync(self):
mock_segs = []
for i in range(6):
m = Mock()
m.getSegmentContentId.return_value = (i % 3) + 1
m.getSegmentStatus.return_value = 'u'
if i == 1:
m.getSegmentMode.return_value = 'r'
else:
m.getSegmentMode.return_value = 's'
mock_segs.append(m)
gparray = Mock()
gparray.getDbList = Mock()
gparray.getDbList.return_value = mock_segs
self.contentid_validator.gparray = gparray
self.contentid_validator.content_id = [1, 2, 3]
with self.assertRaisesRegexp(Exception, 'Can not rebuild persistent tables for content ids that are in resync mode'):
self.contentid_validator._validate_content_id()
@patch('gppylib.operations.persistent_rebuild.ask_yesno', return_value=False)
def test_validate_content_id_with_some_others_resync(self, mock1):
mock_segs = []
for i in range(6):
m = Mock()
m.getSegmentContentId.return_value = (i % 3) + 1
m.getSegmentStatus.return_value = 'u'
if m.getSegmentContentId.return_value in (1, 2):
m.getSegmentMode.return_value = 'r'
else:
m.getSegmentMode.return_value = 's'
mock_segs.append(m)
gparray = Mock()
gparray.getDbList = Mock()
gparray.getDbList.return_value = mock_segs
self.contentid_validator.gparray = gparray
self.contentid_validator.content_id = [3]
with self.assertRaisesRegexp(Exception, 'Aborting rebuild due to user request'):
self.contentid_validator._validate_content_id()
def test_validate_content_id_with_change_tracking_segments(self):
mock_segs = []
for i in range(6):
m = Mock()
m.getSegmentContentId = Mock()
m.getSegmentContentId.return_value = (i % 3) + 1
if i == 1:
m.getSegmentStatus = Mock()
m.getSegmentStatus.return_value = 'c'
mock_segs.append(m)
gparray = Mock()
gparray.getDbList = Mock()
gparray.getDbList.return_value = mock_segs
self.contentid_validator.gparray = gparray
self.contentid_validator.content_id = [1, 2, 3]
self.assertEqual([1, 2, 3], self.contentid_validator._validate_content_id())
def test_parse_content_id(self):
self.contentid_validator.content_id = '1, 2, 3'
self.assertEqual([1, 2, 3], self.contentid_validator._parse_content_id())
def test_parse_content_id_valid_single_content_id(self):
self.contentid_validator.content_id = '-1'
self.assertEqual([-1], self.contentid_validator._parse_content_id())
def test_parse_content_id_invalid_comma_separated_list(self):
self.contentid_validator.content_id = '1, 2, 3,,'
with self.assertRaisesRegexp(Exception, 'Some content ids are not integers:'):
self.contentid_validator._parse_content_id()
def test_parse_content_id_invalid_integers(self):
self.contentid_validator.content_id = '1, 2, a, x,'
with self.assertRaisesRegexp(Exception, 'Some content ids are not integers:'):
self.contentid_validator._parse_content_id()
@patch('gppylib.operations.persistent_rebuild.ValidateContentID._validate_content_id', return_value=[1, 2, 3])
def test_validate_with_only_content_id(self, mock1):
self.contentid_validator.content_id = '1, 2, 3'
self.contentid_validator.contentid_file = None
self.assertEqual([1, 2, 3], self.contentid_validator.validate())
@patch('gppylib.operations.persistent_rebuild.ValidateContentID._validate_content_id', side_effect=Exception('ERROR'))
def test_validate_with_only_content_id_with_error(self, mock1):
self.contentid_validator.content_id = '1, 2, 3'
self.contentid_validator.contentid_file = None
with self.assertRaisesRegexp(Exception, 'ERROR'):
self.contentid_validator.validate()
@patch('gppylib.operations.persistent_rebuild.ValidateContentID._validate_contentid_file', return_value=[1, 2, 3])
@patch('gppylib.operations.persistent_rebuild.ValidateContentID._validate_content_id', return_value=[1, 2, 3])
def test_validate_with_only_content_id_file(self, mock1, mock2):
self.contentid_validator.contentid_file = '/tmp/f1'
self.contentid_validator.content_id = None
self.assertEqual([1, 2, 3], self.contentid_validator.validate())
@patch('gppylib.operations.persistent_rebuild.ValidateContentID._validate_contentid_file', side_effect=Exception('ERROR'))
def test_validate_with_only_content_id_file_with_error(self, mock1):
self.contentid_validator.contentid_file = '/tmp/f1'
self.contentid_validator.content_id = None
with self.assertRaisesRegexp(Exception, 'ERROR'):
self.contentid_validator.validate()
class GetDbIdInfoTestCase(unittest.TestCase):
def setUp(self):
self.dbid_info = GetDbIdInfo(gparray=None, content_id=None)
@patch('gppylib.operations.persistent_rebuild.dbconn.execSQL', return_value=[(1000, '2000'), (1001, '2001 2002')])
@patch('gppylib.operations.persistent_rebuild.dbconn.connect')
@patch('gppylib.operations.persistent_rebuild.dbconn.DbURL')
def test_get_filespace_to_tablespace_map(self, mock1, mock2, mock3):
m = Mock()
m.getSegmentFilespaces.return_value = {1000: '/tmp/fs1', 1001: '/tmp/fs2'}
self.assertEqual({1000: [2000], 1001: [2001, 2002]}, self.dbid_info._get_filespace_to_tablespace_map(m))
@patch('gppylib.operations.persistent_rebuild.dbconn.execSQL', return_value=[])
@patch('gppylib.operations.persistent_rebuild.dbconn.connect')
@patch('gppylib.operations.persistent_rebuild.dbconn.DbURL')
def test_get_filespace_to_tablespace_map_empty_filespaces(self, mock1, mock2, mock3):
m = Mock()
m.getSegmentFilespaces.return_value = {}
self.assertEqual({}, self.dbid_info._get_filespace_to_tablespace_map(m))
@patch('gppylib.operations.persistent_rebuild.dbconn.execSQL', return_value=[(1000, '2000'), (1001, '2001 2002')])
@patch('gppylib.operations.persistent_rebuild.dbconn.connect')
@patch('gppylib.operations.persistent_rebuild.dbconn.DbURL')
def test_get_tablespace_to_dboid_map(self, mock1, mock2, mock3):
ts_oids = [1000, 1001]
self.assertEqual({1000: [2000], 1001: [2001, 2002]}, self.dbid_info._get_tablespace_to_dboid_map(ts_oids))
@patch('gppylib.operations.persistent_rebuild.dbconn.execSQL', return_value=[])
@patch('gppylib.operations.persistent_rebuild.dbconn.connect')
@patch('gppylib.operations.persistent_rebuild.dbconn.DbURL')
def test_get_tablespace_to_dboid_map_empty_tablespaces(self, mock1, mock2, mock3):
ts_oids = []
self.assertEqual({}, self.dbid_info._get_tablespace_to_dboid_map(ts_oids))
@patch('gppylib.operations.persistent_rebuild.GetDbIdInfo._get_filespace_to_tablespace_map', return_value={})
@patch('gppylib.operations.persistent_rebuild.GetDbIdInfo._get_tablespace_to_dboid_map', return_value={})
def test_get_info_with_no_matching_content_id(self, mock1, mock2):
mock_segs = []
for i in range(6):
m = Mock()
m.getSegmentContentId.return_value = i + 1
m.getSegmentRole.return_value = 'p' if i < 3 else 'm'
m.getSegmentStatus.return_value = 'u'
mock_segs.append(m)
gparray = Mock()
gparray.getDbList = Mock()
gparray.getDbList.return_value = mock_segs
self.dbid_info.gparray = gparray
self.dbid_info.content_id = [11, 12]
expected = []
self.assertEqual(expected, self.dbid_info.get_info())
@patch('gppylib.operations.persistent_rebuild.GetDbIdInfo._get_filespace_to_tablespace_map', return_value={1000: [2000, 2002], 1001: [2001, 2003]})
@patch('gppylib.operations.persistent_rebuild.GetDbIdInfo._get_tablespace_to_dboid_map',
return_value={2000: [12345], 2001: [2345, 4567], 2002: [8765, 4634], 2003: [3456]})
def test_get_info_with_single_matching_content_id(self, mock1, mock2):
mock_segs = []
for i in range(6):
m = Mock()
m.getSegmentContentId.return_value = i + 1
m.getSegmentDbId.return_value = i + 2
m.getSegmentRole.return_value = 'p' if i < 3 else 'm'
m.getSegmentStatus.return_value = 'u'
m.getSegmentHostName.return_value = 'mdw1'
m.getSegmentPort.return_value = 5001 + i
m.getSegmentFilespaces.return_value = {1000: '/tmp/f1', 1001: '/tmp/f2'}
m.isSegmentDown.return_value = False
mock_segs.append(m)
gparray = Mock()
gparray.getDbList = Mock()
gparray.getDbList.return_value = mock_segs
self.dbid_info.gparray = gparray
self.dbid_info.content_id = [1, 10]
expected = [DbIdInfo(1, 'p', 2, 5001, 'mdw1', {1000: '/tmp/f1', 1001: '/tmp/f2'}, {1000: [2000, 2002], 1001: [2001, 2003]},
{2000: [12345], 2001: [2345, 4567], 2002: [8765, 4634], 2003: [3456]}, False)]
self.assertEqual(expected, self.dbid_info.get_info())
@patch('gppylib.operations.persistent_rebuild.GetDbIdInfo._get_filespace_to_tablespace_map', return_value={})
@patch('gppylib.operations.persistent_rebuild.GetDbIdInfo._get_tablespace_to_dboid_map', return_value={})
def test_get_info_with_single_matching_content_id_and_no_filespaces(self, mock1, mock2):
mock_segs = []
for i in range(6):
m = Mock()
m.getSegmentContentId.return_value = i + 1
m.getSegmentDbId.return_value = i + 2
m.getSegmentRole.return_value = 'p' if i < 3 else 'm'
m.getSegmentStatus.return_value = 'u'
m.getSegmentHostName.return_value = 'mdw1'
m.getSegmentPort.return_value = 5001 + i
m.getSegmentFilespaces.return_value = {}
m.isSegmentDown.return_value = False
mock_segs.append(m)
gparray = Mock()
gparray.getDbList = Mock()
gparray.getDbList.return_value = mock_segs
self.dbid_info.gparray = gparray
self.dbid_info.content_id = [1, 10]
expected = [DbIdInfo(1, 'p', 2, 5001, 'mdw1', {}, {}, {}, False)]
self.assertEqual(expected, self.dbid_info.get_info())
@patch('gppylib.operations.persistent_rebuild.GetDbIdInfo._get_filespace_to_tablespace_map', return_value={})
@patch('gppylib.operations.persistent_rebuild.GetDbIdInfo._get_tablespace_to_dboid_map', return_value={})
def test_get_info_with_single_matching_content_id_and_no_tablespaces(self, mock1, mock2):
mock_segs = []
for i in range(6):
m = Mock()
m.getSegmentContentId.return_value = i + 1
m.getSegmentDbId.return_value = i + 2
m.getSegmentRole.return_value = 'p' if i < 3 else 'm'
m.getSegmentStatus.return_value = 'u'
m.getSegmentHostName.return_value = 'mdw1'
m.getSegmentPort.return_value = 5001 + i
m.getSegmentFilespaces.return_value = {1000: '/tmp/f1', 1001: '/tmp/f2'}
m.isSegmentDown.return_value = False
mock_segs.append(m)
gparray = Mock()
gparray.getDbList = Mock()
gparray.getDbList.return_value = mock_segs
self.dbid_info.gparray = gparray
self.dbid_info.content_id = [1, 10]
expected = [DbIdInfo(1, 'p', 2, 5001, 'mdw1', {1000: '/tmp/f1', 1001: '/tmp/f2'}, {}, {}, False)]
self.assertEqual(expected, self.dbid_info.get_info())
@patch('gppylib.operations.persistent_rebuild.GetDbIdInfo._get_filespace_to_tablespace_map', return_value={})
@patch('gppylib.operations.persistent_rebuild.GetDbIdInfo._get_tablespace_to_dboid_map', return_value={})
def test_get_info_with_single_matching_content_id_and_down_segments(self, mock1, mock2):
mock_segs = []
for i in range(6):
m = Mock()
m.getSegmentContentId.return_value = i + 1
m.getSegmentDbId.return_value = i + 2
m.getSegmentRole.return_value = 'p' if i < 3 else 'm'
m.getSegmentStatus.return_value = 'd' if i == 3 else 'u'
m.getSegmentHostName.return_value = 'mdw1'
m.getSegmentPort.return_value = 5001 + i
m.getSegmentFilespaces.return_value = {1000: '/tmp/f1', 1001: '/tmp/f2'}
m.isSegmentDown.return_value = False
mock_segs.append(m)
gparray = Mock()
gparray.getDbList = Mock()
gparray.getDbList.return_value = mock_segs
self.dbid_info.gparray = gparray
self.dbid_info.content_id = [1, 10]
expected = [DbIdInfo(1, 'p', 2, 5001, 'mdw1', {1000: '/tmp/f1', 1001: '/tmp/f2'}, {}, {}, False)]
self.assertEqual(expected, self.dbid_info.get_info())
@patch('gppylib.operations.persistent_rebuild.GetDbIdInfo._get_filespace_to_tablespace_map', return_value={})
@patch('gppylib.operations.persistent_rebuild.GetDbIdInfo._get_tablespace_to_dboid_map', return_value={})
def test_get_info_with_single_matching_content_id_and_segment_in_ct(self, mock1, mock2):
mock_segs = []
for i in range(6):
m = Mock()
m.getSegmentContentId.return_value = i + 1
m.getSegmentDbId.return_value = i + 2
m.getSegmentRole.return_value = 'p' if i < 3 else 'm'
m.getSegmentStatus.return_value = 'c' if i == 3 else 'u'
m.getSegmentHostName.return_value = 'mdw1'
m.getSegmentPort.return_value = 5001 + i
m.getSegmentFilespaces.return_value = {1000: '/tmp/f1', 1001: '/tmp/f2'}
m.isSegmentDown.return_value = False
mock_segs.append(m)
gparray = Mock()
gparray.getDbList = Mock()
gparray.getDbList.return_value = mock_segs
self.dbid_info.gparray = gparray
gparray.isSegmentDown = Mock()
gparray.isSegmentDown.return_value = False
self.dbid_info.content_id = [1, 10]
expected = [DbIdInfo(1, 'p', 2, 5001, 'mdw1', {1000: '/tmp/f1', 1001: '/tmp/f2'}, {}, {}, False)]
self.assertEqual(expected, self.dbid_info.get_info())
@patch('gppylib.operations.persistent_rebuild.GetDbIdInfo._get_filespace_to_tablespace_map', return_value={})
@patch('gppylib.operations.persistent_rebuild.GetDbIdInfo._get_tablespace_to_dboid_map', return_value={})
def test_get_info_with_single_matching_content_id_and_content_down(self, mock1, mock2):
mock_segs = []
for i in range(6):
m = Mock()
m.getSegmentContentId.return_value = i + 1
m.getSegmentDbId.return_value = i + 2
m.getSegmentRole.return_value = 'p' if i < 3 else 'm'
m.getSegmentStatus.return_value = 'd' if i == 3 or i == 0 else 'u'
m.getSegmentHostName.return_value = 'mdw1'
m.getSegmentPort.return_value = 5001 + i
m.getSegmentFilespaces.return_value = {1000: '/tmp/f1', 1001: '/tmp/f2'}
m.isSegmentDown.return_value = False
mock_segs.append(m)
gparray = Mock()
gparray.getDbList = Mock()
gparray.getDbList.return_value = mock_segs
self.dbid_info.gparray = gparray
self.dbid_info.content_id = [1, 10]
expected = [DbIdInfo(1, 'p', 2, 5001, 'mdw1', {1000: '/tmp/f1', 1001: '/tmp/f2'}, {}, {}, False)]
self.assertEqual(expected, self.dbid_info.get_info())
@patch('gppylib.operations.persistent_rebuild.GetDbIdInfo._get_filespace_to_tablespace_map', return_value={})
@patch('gppylib.operations.persistent_rebuild.GetDbIdInfo._get_tablespace_to_dboid_map', return_value={})
def test_get_info_with_single_matching_content_id_and_mirror_down(self, mock1, mock2):
mock_segs = []
for i in range(6):
m = Mock()
m.getSegmentContentId.return_value = (i + 1) % 3
m.getSegmentDbId.return_value = i + 2
m.getSegmentRole.return_value = 'p' if i < 3 else 'm'
m.getSegmentStatus.return_value = 'd' if i >= 3 else 'u'
m.getSegmentHostName.return_value = 'mdw1'
m.getSegmentPort.return_value = 5001 + i
m.getSegmentFilespaces.return_value = {1000: '/tmp/f1', 1001: '/tmp/f2'}
# We want to compare from the content ID
m.isSegmentDown.return_value = True if i >= 3 else False
mock_segs.append(m)
gparray = Mock()
gparray.getDbList = Mock()
gparray.getDbList.return_value = mock_segs
self.dbid_info.gparray = gparray
self.dbid_info.content_id = [2,10]
expected = [DbIdInfo(2, 'p', 3, 5002, 'mdw1', {1000: '/tmp/f1', 1001: '/tmp/f2'}, {}, {}, False)]
self.assertEqual(expected, self.dbid_info.get_info())
class BackupPersistentTableFilesTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
# create persistent table files under new filespace/tablespace/database,
# and also the default filespace, tablespace/database
# timestamp: 20140604101010
try:
# source files
os.makedirs(os.path.join('/tmp/p1', '2000', '123'))
os.makedirs(os.path.join('/tmp/p2', 'base', '234'))
os.makedirs(os.path.join('/tmp/p2', 'global'))
os.makedirs(os.path.join('/tmp/p2', 'pg_xlog'))
os.makedirs(os.path.join('/tmp/p2', 'pg_clog'))
os.makedirs(os.path.join('/tmp/p2', 'pg_distributedlog'))
os.makedirs(os.path.join('/tmp/p1', 'empty'))
open('/tmp/p1/2000/123/5094', 'w').close()
open('/tmp/p1/2000/123/5094.1', 'w').close()
open('/tmp/p1/2000/123/5095', 'w').close()
open('/tmp/p2/base/234/5094', 'w').close()
open('/tmp/p2/base/234/5095', 'w').close()
open('/tmp/p2/global/pg_control', 'w').close()
open('/tmp/p2/global/5090', 'w').close()
open('/tmp/p2/global/5091', 'w').close()
open('/tmp/p2/global/5092', 'w').close()
open('/tmp/p2/global/5093', 'w').close()
open('/tmp/p2/pg_xlog/0000', 'w').close()
open('/tmp/p2/pg_clog/0000', 'w').close()
open('/tmp/p2/pg_distributedlog/000', 'w').close()
# Backup files
os.makedirs(os.path.join('/tmp/p1', 'pt_rebuild_bk_20140604101010','2000', '123'))
os.makedirs(os.path.join('/tmp/p2', 'pt_rebuild_bk_20140604101010', 'base', '234'))
os.makedirs(os.path.join('/tmp/p2', 'pt_rebuild_bk_20140604101010', 'global'))
os.makedirs(os.path.join('/tmp/p2', 'pt_rebuild_bk_20140604101010', 'pg_xlog'))
os.makedirs(os.path.join('/tmp/p2', 'pt_rebuild_bk_20140604101010', 'pg_clog'))
os.makedirs(os.path.join('/tmp/p2', 'pt_rebuild_bk_20140604101010', 'pg_distributedlog'))
open('/tmp/p1/pt_rebuild_bk_20140604101010/2000/123/5094', 'w').close()
open('/tmp/p1/pt_rebuild_bk_20140604101010/2000/123/5094.1', 'w').close()
open('/tmp/p1/pt_rebuild_bk_20140604101010/2000/123/5095', 'w').close()
open('/tmp/p2/pt_rebuild_bk_20140604101010/base/234/5094', 'w').close()
open('/tmp/p2/pt_rebuild_bk_20140604101010/base/234/5095', 'w').close()
open('/tmp/p2/pt_rebuild_bk_20140604101010/global/pg_control', 'w').close()
open('/tmp/p2/pt_rebuild_bk_20140604101010/global/5090', 'w').close()
open('/tmp/p2/pt_rebuild_bk_20140604101010/global/5091', 'w').close()
open('/tmp/p2/pt_rebuild_bk_20140604101010/global/5092', 'w').close()
open('/tmp/p2/pt_rebuild_bk_20140604101010/global/5093', 'w').close()
open('/tmp/p2/pt_rebuild_bk_20140604101010/pg_xlog/0000', 'w').close()
open('/tmp/p2/pt_rebuild_bk_20140604101010/pg_clog/0000', 'w').close()
open('/tmp/p2/pt_rebuild_bk_20140604101010/pg_distributedlog/000', 'w').close()
except OSError:
pass
@classmethod
def tearDownClass(cls):
try:
shutil.rmtree('/tmp/p1')
shutil.rmtree('/tmp/p2')
except Exception:
pass
def setUp(self):
self.backup_persistent_files = BackupPersistentTableFiles(dbid_info=None,
perdb_pt_filenames={2:{17088L:['5094', '5095'],1L: [5094L, 5095L]},
3:{17088L:['5094', '5095'],1L: [5094L, 5095L]}},
global_pt_filenames={2: ['5090', '5091', '5092', '5093'],
3: ['5090', '5091', '5092', '5093']},
timestamp='20140604101010')
@patch('os.makedirs')
def test_copy_files(self, mock1):
src_ptfiles = ['/tmp/global/5090', '/tmp/global/5091']
dst_ptfiles = ['/tmp1/global/5090', '/tmp1/global/5091']
self.backup_persistent_files.pool = Mock()
content = -1
actionType = 'backup'
m = Mock()
m.validate.return_value = {'/tmp/global/5090': 'abdfe', '/tmp/global/5091': 'abdfe',
'/tmp1/global/5090': 'abdfe', '/tmp1/global/5091': 'abdfe'}
self.backup_persistent_files.md5_validator = m
self.backup_persistent_files._copy_files(src_ptfiles, dst_ptfiles, content, actionType)
@patch('os.makedirs')
@patch('gppylib.operations.persistent_rebuild.Command.run')
def test_copy_files_with_restore(self, mock1, mock2):
src_ptfiles = ['/tmp/global/5090', '/tmp/global/5091']
dst_ptfiles = ['/tmp1/global/5090', '/tmp1/global/5091']
self.backup_persistent_files.pool = Mock()
m = Mock()
content = -1
actionType = 'restore'
m.validate.return_value = {'/tmp/global/5090': 'abdfe', '/tmp/global/5091': 'abdfe',
'/tmp1/global/5090': 'abdfe', '/tmp1/global/5091': 'abdfe'}
self.backup_persistent_files.md5_validator = m
self.backup_persistent_files._copy_files(src_ptfiles, dst_ptfiles, content, actionType)
@patch('os.makedirs')
def test_copy_files_without_errors_with_no_files(self, mock1):
src_ptfiles = []
dst_ptfiles = []
self.backup_persistent_files.pool = Mock()
m = Mock()
content = -1
actionType = 'backup'
m.validate.side_effect = [{}, {}]
self.backup_persistent_files.md5_validator = m
self.backup_persistent_files._copy_files(src_ptfiles, dst_ptfiles, content, actionType)
@patch('os.makedirs')
@patch('gppylib.operations.persistent_rebuild.Command.run')
def test_copy_files_without_errors_with_no_files_with_restore(self, mock1, mock2):
src_ptfiles = []
dst_ptfiles = []
self.backup_persistent_files.pool = Mock()
m = Mock()
content = -1
actionType = 'restore'
m.validate.side_effect = [{}, {}]
self.backup_persistent_files.md5_validator = m
self.backup_persistent_files.restore=True
self.backup_persistent_files._copy_files(src_ptfiles, dst_ptfiles, content, actionType)
@patch('os.makedirs')
def test_copy_files_with_md5_mismatch(self, mock1):
src_ptfiles = ['/tmp/global/5090', '/tmp/global/5091']
dst_ptfiles = ['/tmp1/global/5090', '/tmp1/global/5091']
self.backup_persistent_files.pool = Mock()
m = Mock()
content = -1
actionType = 'backup'
m.validate.return_value = {'/tmp/global/5090': 'asdfads', '/tmp/global/5091': 'abdfe',
'/tmp1/global/5090': 'asdfadsf', '/tmp1/global/5091': 'abdfe'}
self.backup_persistent_files.md5_validator = m
with self.assertRaisesRegexp(Exception, 'MD5 sums do not match! Expected md5 = "{\'/tmp/global/5090\': \'asdfads\'}",\
but actual md5 = "{\'/tmp1/global/5090\': \'asdfadsf\'}"'):
self.backup_persistent_files._copy_files(src_ptfiles, dst_ptfiles, content, actionType)
@patch('os.makedirs')
@patch('gppylib.operations.persistent_rebuild.ValidateMD5Sum.validate', return_value={'5090': 'sdfadsf', '5091': 'sdfadsf'})
def test_copy_files_with_errors(self, mock1, mock2):
src_ptfiles = ['/tmp/global/5090', '/tmp/global/5091']
dst_ptfiles = ['/tmp1/global/5090', '/tmp1/global/5091']
m = Mock()
content = -1
actionType = 'backup'
m.check_results.side_effect = ExecutionError('Error !!!', Mock())
self.backup_persistent_files.pool = m
m.validate.return_value = {'5090': 'sdfadsf', '5091': 'sdfadsf'}
self.backup_persistent_files.md5_validator = m
with self.assertRaisesRegexp(ExecutionError, 'Error !!!'):
self.backup_persistent_files._copy_files(src_ptfiles, dst_ptfiles, content, actionType)
def test_build_PT_src_dest_pairs_filelist_None(self):
src_dir = ''
dest_dir = ''
file_list = None
self.assertEqual((None, None), self.backup_persistent_files.build_PT_src_dest_pairs(src_dir, dest_dir, file_list))
def test_build_PT_src_dest_pairs_filelist_Empty(self):
src_dir = ''
dest_dir = ''
file_list = []
self.assertEqual((None, None), self.backup_persistent_files.build_PT_src_dest_pairs(src_dir, dest_dir, file_list))
def test_build_PT_src_dest_pairs_non_exist_src_dir(self):
src_dir = 'tmp'
dest_dir = '/tmp'
file_list = ['5090']
self.assertEqual((None, None), self.backup_persistent_files.build_PT_src_dest_pairs(src_dir, dest_dir, file_list))
def test_build_PT_src_dest_pairs_empty_src_dir(self):
src_dir = '/tmp/p1/empty'
dest_dir = '/tmp/p1/empty'
file_list = ['5090']
self.assertEqual((None, None), self.backup_persistent_files.build_PT_src_dest_pairs(src_dir, dest_dir, file_list))
def test_build_PT_src_dest_pairs_with_file_missed(self):
src_dir = '/tmp/p1/'
dest_dir = '/tmp/p1/'
file_list = ['5555']
self.assertEqual((None, None), self.backup_persistent_files.build_PT_src_dest_pairs(src_dir, dest_dir, file_list))
def test_build_PT_src_dest_pairs_with_extended_file_exist(self):
src_dir = '/tmp/p1/2000/123'
dest_dir = '/tmp/p1/pt_rebuild_bk_20140604101010/2000/123'
file_list = ['5094']
src_files = ['/tmp/p1/2000/123/5094', '/tmp/p1/2000/123/5094.1']
dest_files = ['/tmp/p1/pt_rebuild_bk_20140604101010/2000/123/5094', '/tmp/p1/pt_rebuild_bk_20140604101010/2000/123/5094.1']
self.assertEqual((src_files, dest_files), self.backup_persistent_files.build_PT_src_dest_pairs(src_dir, dest_dir, file_list))
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_files')
def test_copy_global_pt_files(self, mock1):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
self.backup_persistent_files.dbid_info = [d1, d2]
self.assertEqual(None, self.backup_persistent_files._copy_global_pt_files())
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles.build_PT_src_dest_pairs', return_value=[None, None])
def test_copy_global_pt_files_with_restore_with_failure(self, mock1):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
self.backup_persistent_files.dbid_info = [d1, d2]
with self.assertRaisesRegexp(Exception, 'Missing global persistent files from source directory.'):
self.backup_persistent_files._copy_global_pt_files(restore=True)
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles.build_PT_src_dest_pairs', return_value=[None, None])
def test_copy_global_pt_files_without_restore_with_failure(self, mock1):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
self.backup_persistent_files.dbid_info = [d1, d2]
with self.assertRaisesRegexp(Exception, 'Missing global persistent files from source directory.'):
self.backup_persistent_files._copy_global_pt_files()
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_files',
side_effect=[Mock(), Exception('Error while backing up files')])
def test_copy_global_pt_files_with_errors(self, mock1):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
self.backup_persistent_files.dbid_info = [d1, d2]
with self.assertRaisesRegexp(Exception, 'Backup of global persistent files failed'):
self.backup_persistent_files._copy_global_pt_files()
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_files')
def test_copy_global_pt_files_without_errors(self, mock1):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
self.backup_persistent_files.dbid_info = [d1, d2]
self.assertEqual(None, self.backup_persistent_files._copy_global_pt_files())
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_files')
def test_copy_global_pt_files_with_restore_without_errors(self, mock1):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
self.backup_persistent_files.dbid_info = [d1, d2]
self.assertEqual(None, self.backup_persistent_files._copy_global_pt_files(restore=True))
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_files',
side_effect=[Mock(), Exception('Error while backing up files')])
def test_copy_global_pt_files_with_restore_with_errors(self, mock1):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
self.backup_persistent_files.dbid_info = [d1, d2]
with self.assertRaisesRegexp(Exception, 'Restore of global persistent files failed'):
self.backup_persistent_files._copy_global_pt_files(restore=True)
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_files')
def test_copy_per_db_pt_files(self, mock1):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
self.backup_persistent_files.dbid_info = [d1, d2]
self.assertEqual(None, self.backup_persistent_files._copy_per_db_pt_files())
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles.build_PT_src_dest_pairs', return_value=[None, None])
def test_copy_per_db_pt_files_with_restore_with_failure(self, mock1):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
self.backup_persistent_files.dbid_info = [d1, d2]
with self.assertRaisesRegexp(Exception, 'Missing per-database persistent files from source directory.'):
self.backup_persistent_files._copy_per_db_pt_files(restore=True)
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles.build_PT_src_dest_pairs', return_value=[None, None])
def test_copy_per_db_pt_files_without_restore_with_failure(self, mock1):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
self.backup_persistent_files.dbid_info = [d1, d2]
with self.assertRaisesRegexp(Exception, 'Missing per-database persistent files from source directory.'):
self.backup_persistent_files._copy_per_db_pt_files()
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_files',
side_effect=[Mock(), Exception('Error while backing up files')])
def test_copy_per_db_pt_files_with_errors(self, mock1):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
self.backup_persistent_files.dbid_info = [d1, d2]
with self.assertRaisesRegexp(Exception, 'Backup of per database persistent files failed'):
self.backup_persistent_files._copy_per_db_pt_files()
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_files')
def test_copy_per_db_pt_files_without_errors(self, mock1):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
self.backup_persistent_files.dbid_info = [d1, d2]
self.assertEqual(None, self.backup_persistent_files._copy_per_db_pt_files())
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_files')
def test_copy_per_db_pt_files_with_unused_filespace(self, mock1):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {3052: [2001]}, {2000: [123], 2001: [234]}, False)
self.backup_persistent_files.dbid_info = [d1, d2]
self.assertEqual(None, self.backup_persistent_files._copy_per_db_pt_files())
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_files')
def test_copy_per_db_pt_files_with_unused_tablespace(self, mock1):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2001: [234]}, False)
self.backup_persistent_files.dbid_info = [d1, d2]
self.assertEqual(None, self.backup_persistent_files._copy_per_db_pt_files())
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_files')
def test_copy_per_db_pt_files_with_restore_without_errors(self, mock1):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
self.backup_persistent_files.dbid_info = [d1, d2]
self.assertEqual(None, self.backup_persistent_files._copy_per_db_pt_files(restore=True))
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_files')
def test_copy_Xactlog_files_without_restore_without_errors(self, mock1):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
self.backup_persistent_files.dbid_info = [d1, d2]
self.assertEqual(None, self.backup_persistent_files._copy_Xactlog_files())
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles.build_Xactlog_src_dest_pairs', return_value=[[],[]])
def test_copy_Xactlog_files_without_restore_with_failure(self, mock1):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
self.backup_persistent_files.dbid_info = [d1, d2]
with self.assertRaisesRegexp(Exception, 'should not be empty'):
self.backup_persistent_files._copy_Xactlog_files()
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles.build_Xactlog_src_dest_pairs', return_value=[[],[]])
def test_copy_Xactlog_files_with_restore_with_failure(self, mock1):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
self.backup_persistent_files.dbid_info = [d1, d2]
with self.assertRaisesRegexp(Exception, 'should not be empty'):
self.backup_persistent_files._copy_Xactlog_files(restore=True)
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_files')
def test_copy_Xactlog_files_with_restore_without_errors(self, mock1):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
self.backup_persistent_files.dbid_info = [d1, d2]
self.assertEqual(None, self.backup_persistent_files._copy_Xactlog_files(restore=True))
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_files')
def test_copy_pg_control_files_without_restore_without_errors(self, mock1):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
self.backup_persistent_files.dbid_info = [d1, d2]
self.assertEqual(None, self.backup_persistent_files._copy_pg_control_file())
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_files')
def test_copy_pg_control_files_with_restore_without_errors(self, mock1):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
self.backup_persistent_files.dbid_info = [d1, d2]
self.assertEqual(None, self.backup_persistent_files._copy_pg_control_file(restore=True))
@patch('os.path.isfile', return_value=False)
def test_copy_pg_control_files_without_restore_with_failure(self, mock1):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
self.backup_persistent_files.dbid_info = [d1, d2]
with self.assertRaisesRegexp(Exception, 'Global pg_control file is missing from source directory'):
self.backup_persistent_files._copy_pg_control_file()
@patch('os.path.isfile', return_value=False)
def test_copy_pg_control_files_with_restore_with_failure(self, mock1):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
self.backup_persistent_files.dbid_info = [d1, d2]
with self.assertRaisesRegexp(Exception, 'Global pg_control file is missing from backup directory'):
self.backup_persistent_files._copy_pg_control_file(restore=True)
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_files',
side_effect=[Mock(), Mock(), Mock(), Exception('Error while backing up files')])
def test_copy_per_db_pt_files_with_restore_with_errors(self, mock1):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
self.backup_persistent_files.dbid_info = [d1, d2]
with self.assertRaisesRegexp(Exception, 'Restore of per database persistent files failed'):
self.backup_persistent_files._copy_per_db_pt_files(restore=True)
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_files')
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_global_pt_files', return_value=None)
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_per_db_pt_files', return_value=None)
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_Xactlog_files', return_value=None)
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_pg_control_file', return_value=None)
@patch('gppylib.operations.persistent_rebuild.WorkerPool')
@patch('gppylib.operations.persistent_rebuild.Command')
@patch('gppylib.operations.persistent_rebuild.ValidateMD5Sum.init')
def test_restore_without_errors(self, mock1, mock2, mock3, mock4, mock5, mock6, mock7, mock8):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
self.backup_persistent_files.dbid_info = [d1, d2]
self.assertEqual(None, self.backup_persistent_files.restore())
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_global_pt_files', side_effect=Exception('Error'))
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_per_db_pt_files', return_value=None)
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_Xactlog_files', return_value=None)
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_pg_control_file', return_value=None)
@patch('gppylib.operations.persistent_rebuild.WorkerPool')
@patch('gppylib.operations.persistent_rebuild.Command')
@patch('gppylib.operations.persistent_rebuild.ValidateMD5Sum.init')
def test_restore_with_global_file_bkup_error(self, mock1, mock2, mock3, mock4, mock5, mock6, mock7):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
self.backup_persistent_files.dbid_info = [d1, d2]
with self.assertRaisesRegexp(Exception, 'Error'):
self.backup_persistent_files.restore()
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_global_pt_files', return_value=None)
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_per_db_pt_files', side_effect=Exception('Error'))
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_Xactlog_files', return_value=None)
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_pg_control_file', return_value=None)
@patch('gppylib.operations.persistent_rebuild.WorkerPool')
@patch('gppylib.operations.persistent_rebuild.Command')
@patch('gppylib.operations.persistent_rebuild.ValidateMD5Sum.init')
def test_restore_with_per_db_bkup_error(self, mock1, mock2, mock3, mock4, mock5, mock6, mock7):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
self.backup_persistent_files.dbid_info = [d1, d2]
with self.assertRaisesRegexp(Exception, 'Error'):
self.backup_persistent_files.restore()
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_global_pt_files', return_value=None)
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_per_db_pt_files', return_value=None)
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_Xactlog_files', side_effect=Exception('Error'))
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_pg_control_file', return_value=None)
@patch('gppylib.operations.persistent_rebuild.WorkerPool')
@patch('gppylib.operations.persistent_rebuild.Command')
@patch('gppylib.operations.persistent_rebuild.ValidateMD5Sum.init')
def test_restore_with_xlog_bkup_error(self, mock1, mock2, mock3, mock4, mock5, mock6, mock7):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
self.backup_persistent_files.dbid_info = [d1, d2]
with self.assertRaisesRegexp(Exception, 'Error'):
self.backup_persistent_files.restore()
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_global_pt_files', return_value=None)
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_per_db_pt_files', return_value=None)
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_Xactlog_files', return_value=None)
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_pg_control_file', side_effect=Exception('Error'))
@patch('gppylib.operations.persistent_rebuild.WorkerPool')
@patch('gppylib.operations.persistent_rebuild.Command')
@patch('gppylib.operations.persistent_rebuild.ValidateMD5Sum.init')
def test_restore_with_pg_control_bkup_error(self, mock1, mock2, mock3, mock4, mock5, mock6, mock7):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
self.backup_persistent_files.dbid_info = [d1, d2]
with self.assertRaisesRegexp(Exception, 'Error'):
self.backup_persistent_files.restore()
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_global_pt_files', side_effect=Exception('Error'))
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_per_db_pt_files', side_effect=Exception('Error'))
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_Xactlog_files', return_value=None)
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_pg_control_file', return_value=None)
@patch('gppylib.operations.persistent_rebuild.WorkerPool')
@patch('gppylib.operations.persistent_rebuild.Command')
@patch('gppylib.operations.persistent_rebuild.ValidateMD5Sum.init')
def test_restore_with_global_and_per_db_bkup_error(self, mock1, mock2, mock3, mock4, mock5, mock6, mock7):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
self.backup_persistent_files.dbid_info = [d1, d2]
with self.assertRaisesRegexp(Exception, 'Error'):
self.backup_persistent_files.restore()
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_global_pt_files', return_value=None)
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_per_db_pt_files', return_value=None)
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_Xactlog_files', side_effect=Exception('Error'))
@patch('gppylib.operations.persistent_rebuild.BackupPersistentTableFiles._copy_pg_control_file', side_effect=Exception('Error'))
@patch('gppylib.operations.persistent_rebuild.WorkerPool')
@patch('gppylib.operations.persistent_rebuild.Command')
@patch('gppylib.operations.persistent_rebuild.ValidateMD5Sum.init')
def test_restore_with_xlog_and_pg_control_bkup_error(self, mock1, mock2, mock3, mock4, mock5, mock6, mock7):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
self.backup_persistent_files.dbid_info = [d1, d2]
with self.assertRaisesRegexp(Exception, 'Error'):
self.backup_persistent_files.restore()
class RebuildTableTestCase(unittest.TestCase):
def setUp(self):
self.rebuild_table = RebuildTable(dbid_info=None)
def test_initializer_captures_values(self):
self.rebuild_table = RebuildTable(dbid_info="abcd", has_mirrors="efg", batch_size=123, backup_dir=456)
self.assertEquals("abcd",self.rebuild_table.dbid_info)
self.assertEquals("efg",self.rebuild_table.has_mirrors)
self.assertEquals(123,self.rebuild_table.batch_size)
self.assertEquals(456,self.rebuild_table.backup_dir)
def test_get_valid_dbids(self):
content_ids = [1, 2]
expected = [0, 1]
mock_segs = []
for i in range(2):
m = Mock()
m.getSegmentContentId.return_value = i + 1
m.getSegmentRole.return_value = 'p'
m.getSegmentDbId.return_value = i
m.getSegmentPort.return_value = 5000 + i
m.getSegmentHostName.return_value = 'mdw%d' % (i + 1)
m.getSegmentStatus.return_value = 'u'
mock_segs.append(m)
m = Mock()
m.getDbList.return_value = mock_segs
self.rebuild_table.gparray = m
self.assertEqual(expected, self.rebuild_table._get_valid_dbids(content_ids))
def test_get_valid_dbids_empty_contents(self):
content_ids = []
expected = []
mock_segs = []
for i in range(2):
m = Mock()
m.getSegmentContentId.return_value = i + 1
m.getSegmentRole.return_value = 'p'
m.getSegmentDbId.return_value = i
m.getSegmentPort.return_value = 5000 + i
m.getSegmentHostName.return_value = 'mdw%d' % (i + 1)
m.getSegmentStatus.return_value = 'u'
mock_segs.append(m)
m = Mock()
m.getDbList.return_value = mock_segs
self.rebuild_table.gparray = m
self.assertEqual(expected, self.rebuild_table._get_valid_dbids(content_ids))
def test_get_valid_dbids_non_matching_content_ids(self):
content_ids = [3, 4, 5]
expected = []
mock_segs = []
for i in range(2):
m = Mock()
m.getSegmentContentId.return_value = i + 1
m.getSegmentRole.return_value = 'p'
m.getSegmentDbId.return_value = i
m.getSegmentPort.return_value = 5000 + i
m.getSegmentHostName.return_value = 'mdw%d' % (i + 1)
m.getSegmentStatus.return_value = 'u'
mock_segs.append(m)
m = Mock()
m.getDbList.return_value = mock_segs
self.rebuild_table.gparray = m
self.assertEqual(expected, self.rebuild_table._get_valid_dbids(content_ids))
def test_get_valid_dbids_content_ids_down(self):
content_ids = [1, 2, 3]
mock_segs = []
for i in range(2):
m = Mock()
m.getSegmentContentId.return_value = i + 1
m.getSegmentRole.return_value = 'p'
m.getSegmentDbId.return_value = i
m.getSegmentPort.return_value = 5000 + i
m.getSegmentHostName.return_value = 'mdw%d' % (i + 1)
m.getSegmentStatus.return_value = 'u' if i % 2 else 'd'
mock_segs.append(m)
m = Mock()
m.getDbList.return_value = mock_segs
self.rebuild_table.gparray = m
with self.assertRaisesRegexp(Exception, 'Segment .* is down. Cannot continue with persistent table rebuild'):
self.rebuild_table._get_valid_dbids(content_ids)
def test_get_valid_dbids_content_ids_resync(self):
content_ids = [1, 2, 3]
mock_segs = []
for i in range(2):
m = Mock()
m.getSegmentContentId.return_value = i + 1
m.getSegmentRole.return_value = 'p'
m.getSegmentDbId.return_value = i
m.getSegmentPort.return_value = 5000 + i
m.getSegmentHostName.return_value = 'mdw%d' % (i + 1)
m.getSegmentStatus.return_value = 'u'
m.getSegmentMode.return_value = 'r' if i % 2 else 's'
mock_segs.append(m)
m = Mock()
m.getDbList.return_value = mock_segs
self.rebuild_table.gparray = m
with self.assertRaisesRegexp(Exception, 'Segment .* is in resync. Cannot continue with persistent table rebuild'):
self.rebuild_table._get_valid_dbids(content_ids)
@patch('gppylib.operations.persistent_rebuild.ValidatePersistentBackup.validate_backups', return_value=Mock())
def test_get_valid_dbids_content_ids_are_mirrors(self, mock1):
content_ids = [1, 2, 3]
expected = [1]
mock_segs = []
for i in range(2):
m = Mock()
m.getSegmentContentId.return_value = i + 1
m.getSegmentRole.return_value = 'p' if i % 2 else 'm'
m.getSegmentDbId.return_value = i
m.getSegmentPort.return_value = 5000 + i
m.getSegmentHostName.return_value = 'mdw%d' % (i + 1)
m.getSegmentStatus.return_value = 'u'
mock_segs.append(m)
m = Mock()
m.getDbList.return_value = mock_segs
self.rebuild_table.gparray = m
self.assertEqual(expected, self.rebuild_table._get_valid_dbids(content_ids))
@patch('gppylib.operations.persistent_rebuild.GpArray')
@patch('gppylib.operations.persistent_rebuild.RebuildTable._validate_backups')
@patch('gppylib.operations.persistent_rebuild.RebuildTable._get_valid_dbids', return_value=[1, 2, 3])
@patch('gppylib.operations.persistent_rebuild.ParallelOperation.run')
def test_rebuild(self, mock1, mock2, mock3, mock4):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
self.rebuild_table.dbid_info = [d1, d2]
expected_success = [d1, d2]
expected_failure = []
self.assertEqual((expected_success, expected_failure), self.rebuild_table.rebuild())
@patch('gppylib.operations.persistent_rebuild.GpArray')
@patch('gppylib.operations.persistent_rebuild.RebuildTable._validate_backups')
@patch('gppylib.operations.persistent_rebuild.RebuildTable._get_valid_dbids', return_value=[1, 2, 3])
@patch('gppylib.operations.persistent_rebuild.ParallelOperation.run')
@patch('gppylib.operations.persistent_rebuild.RemoteOperation.get_ret', side_effect=[Mock(), Exception('Error')])
def test_rebuild_with_errors(self, mock1, mock2, mock3, mock4, mock5):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
self.rebuild_table.dbid_info = [d1, d2]
expected_success = [d1]
expected_failure = [(d2, 'Error')]
self.assertEqual((expected_success, expected_failure), self.rebuild_table.rebuild())
class ValidatePersistentBackupTestCase(unittest.TestCase):
def setUp(self):
self.validate_persistent_backup = ValidatePersistentBackup(dbid_info=None, timestamp='20140605101010')
def test_process_results(self):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
m1 = Mock()
m1.get_results.return_value = CommandResult(0, '/tmp/f1', '', True, False)
m1.cmdStr = "find /tmp/f1 -name pt_rebuild_bk_"
m2 = Mock()
m2.get_results.return_value = CommandResult(0, '/tmp/f1', '', True, False)
m2.cmdStr = "find /tmp/f1 -name pt_rebuild_bk_"
m = Mock()
m.getCompletedItems.return_value = [m1, m2]
self.validate_persistent_backup._process_results(d1, m)
def test_process_results_with_errors(self):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
m1 = Mock()
m1.get_results.return_value = CommandResult(0, '/tmp/f1', '', True, False)
m1.cmdStr = "find /tmp/f1 -name pt_rebuild_bk_"
m2 = Mock()
m2.get_results.return_value = CommandResult(1, '/tmp/f1', '', True, False)
m2.cmdStr = "find /tmp/f1 -name pt_rebuild_bk_"
m = Mock()
m.getCompletedItems.return_value = [m1, m2]
with self.assertRaisesRegexp(Exception, 'Failed to validate backups'):
self.validate_persistent_backup._process_results(d1, m)
def test_process_results_with_missing_backup(self):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
m1 = Mock()
m1.get_results.return_value = CommandResult(0, '/tmp/f1', '', True, False)
m1.cmdStr = "find /tmp/f1 -name pt_rebuild_bk_"
m2 = Mock()
m2.get_results.return_value = CommandResult(0, '', '', True, False)
m2.cmdStr = "find /foo/bar -name pt_rebuild_bk_"
m = Mock()
m.getCompletedItems.return_value = [m1, m2]
with self.assertRaisesRegexp(Exception, 'Failed to validate backups'):
self.validate_persistent_backup._process_results(d1, m)
@patch('gppylib.operations.persistent_rebuild.WorkerPool')
def test_validate(self, mock1):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
self.validate_persistent_backup.dbid_info = [d1, d2]
self.validate_persistent_backup.validate_backups()
@patch('gppylib.operations.persistent_rebuild.WorkerPool')
@patch('gppylib.operations.persistent_rebuild.ValidatePersistentBackup._process_results', side_effect=Exception('Failed to validate backups'))
def test_validate_error_in_workerpool(self, mock1, mock2):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
self.validate_persistent_backup.dbid_info = [d1, d2]
with self.assertRaisesRegexp(Exception, 'Failed to validate backups'):
self.validate_persistent_backup.validate_backups()
class RunBackupRestoreTestCase(unittest.TestCase):
def setUp(self):
self.run_backup_restore = RunBackupRestore(dbid_info=None, timestamp=None)
@patch('gppylib.operations.persistent_rebuild.WorkerPool')
@patch('gppylib.operations.persistent_rebuild.RunBackupRestore._process_results')
def test_run_backup_restore(self, mock1, mock2):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
host_to_dbid_info_map = {'h1': [d1], 'h2': [d2]}
self.run_backup_restore._run_backup_restore(host_to_dbid_info_map)
@patch('gppylib.operations.persistent_rebuild.WorkerPool')
@patch('gppylib.operations.persistent_rebuild.RunBackupRestore._process_results', side_effect=Exception('ERROR'))
def test_run_backup_restore_with_errors(self, mock1, mock2):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
host_to_dbid_info_map = {'h1': [d1], 'h2': [d2]}
with self.assertRaisesRegexp(Exception, 'ERROR'):
self.run_backup_restore._run_backup_restore(host_to_dbid_info_map)
@patch('gppylib.operations.persistent_rebuild.WorkerPool')
@patch('gppylib.operations.persistent_rebuild.RunBackupRestore._process_results')
def test_run_backup_restore_with_restore(self, mock1, mock2):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
host_to_dbid_info_map = {'h1': [d1], 'h2': [d2]}
self.run_backup_restore._run_backup_restore(host_to_dbid_info_map, restore=True)
@patch('gppylib.operations.persistent_rebuild.WorkerPool')
@patch('gppylib.operations.persistent_rebuild.RunBackupRestore._process_results', side_effect=Exception('ERROR'))
def test_run_backup_restore_with_errors_with_restore(self, mock1, mock2):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
host_to_dbid_info_map = {'h1': [d1], 'h2': [d2]}
with self.assertRaisesRegexp(Exception, 'ERROR'):
self.run_backup_restore._run_backup_restore(host_to_dbid_info_map, restore=True)
@patch('gppylib.operations.persistent_rebuild.WorkerPool')
@patch('gppylib.operations.persistent_rebuild.RunBackupRestore._process_results')
def test_run_backup_restore_with_validate(self, mock1, mock2):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
host_to_dbid_info_map = {'h1': [d1], 'h2': [d2]}
self.run_backup_restore._run_backup_restore(host_to_dbid_info_map, validate_backups=True)
@patch('gppylib.operations.persistent_rebuild.WorkerPool')
@patch('gppylib.operations.persistent_rebuild.RunBackupRestore._process_results', side_effect=Exception('ERROR'))
def test_run_backup_restore_with_errors_with_validate(self, mock1, mock2):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
host_to_dbid_info_map = {'h1': [d1], 'h2': [d2]}
with self.assertRaisesRegexp(Exception, 'ERROR'):
self.run_backup_restore._run_backup_restore(host_to_dbid_info_map, validate_backups=True)
def test_get_host_to_dbid_info_map(self):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h2', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
expected = {'h1': [d1], 'h2': [d2]}
self.run_backup_restore.dbid_info = [d1, d2]
self.assertEqual(expected, self.run_backup_restore._get_host_to_dbid_info_map())
def test_get_host_to_dbid_info_map_empty(self):
self.run_backup_restore.dbid_info = []
self.assertEqual({}, self.run_backup_restore._get_host_to_dbid_info_map())
def test_get_host_to_dbid_info_map_multiple_entries_per_host(self):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
d2 = DbIdInfo(2, 'p', 3, 5002, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
expected = {'h1': [d1, d2]}
self.run_backup_restore.dbid_info = [d1, d2]
self.assertEqual(expected, self.run_backup_restore._get_host_to_dbid_info_map())
def test_process_results(self):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
m1 = Mock()
m1.get_results.return_value = CommandResult(0, '/tmp/f1', '', True, False)
m2 = Mock()
m2.get_results.return_value = CommandResult(0, '/tmp/f1', '', True, False)
m = Mock()
m.getCompletedItems.return_value = [m1, m2]
self.run_backup_restore._process_results(m, 'ERR')
def test_process_results_with_errors(self):
d1 = DbIdInfo(1, 'p', 2, 5001, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
m1 = Mock()
m1.get_results.return_value = CommandResult(0, '/tmp/f1', '', True, False)
m2 = Mock()
m2.get_results.return_value = CommandResult(1, 'ERR', '', True, False)
m = Mock()
m.getCompletedItems.return_value = [m1, m2]
with self.assertRaisesRegexp(Exception, 'ERR'):
self.run_backup_restore._process_results(m, 'ERR')
class ValidateMD5SumTestCase(unittest.TestCase):
def setUp(self):
self.validate_md5sum = ValidateMD5Sum(pool=None)
@patch('platform.system', return_value='Darwin')
def test_get_md5_prog_for_osx(self, mock1):
self.assertEqual('md5', self.validate_md5sum._get_md5_prog())
@patch('platform.system', return_value='Linux')
def test_get_md5_prog_for_linux(self, mock1):
self.assertEqual('md5sum', self.validate_md5sum._get_md5_prog())
@patch('platform.system', return_value='Solaris')
def test_get_md5_prog_for_invalid_os(self, mock1):
with self.assertRaisesRegexp(Exception, 'Cannot determine the md5 program since Solaris platform is not supported'):
self.validate_md5sum._get_md5_prog()
@patch('platform.system', return_value='Darwin')
def test_get_md5_results_pat_for_osx(self, mock1):
pat = re.compile('MD5 \((.*)\) = (.*)')
self.assertEqual(pat, self.validate_md5sum._get_md5_results_pat())
@patch('platform.system', return_value='Linux')
def test_get_md5_results_pat_for_osx(self, mock1):
pat = re.compile('(.*) (.*)')
self.assertEqual(pat, self.validate_md5sum._get_md5_results_pat())
@patch('platform.system', return_value='Solaris')
def test_get_md5_results_pat_for_invalid_os(self, mock1):
with self.assertRaisesRegexp(Exception, 'Cannot determine the pattern for results of md5 program since Solaris platform is not supported'):
self.validate_md5sum._get_md5_results_pat()
@patch('platform.system', return_value='Darwin')
def test_process_results_on_osx(self, mock1):
m = Mock()
m1 = Mock()
m1.get_results.return_value = CommandResult(0, 'MD5 (foo) = afsdfasdf', '', True, False)
m2 = Mock()
m2.get_results.return_value = CommandResult(0, 'MD5 (foo1) = sdfadsff', '', True, False)
m.getCompletedItems.return_value = [m1, m2]
self.validate_md5sum.pool = m
self.validate_md5sum.md5_results_pat = re.compile('MD5 \((.*)\) = (.*)')
expected = {'foo': 'afsdfasdf', 'foo1': 'sdfadsff'}
self.assertEqual(expected, self.validate_md5sum._process_md5_results())
@patch('platform.system', return_value='Darwin')
def test_process_results_on_osx_with_error(self, mock1):
m = Mock()
m1 = Mock()
m1.get_results.return_value = CommandResult(0, 'MD5 (foo1) = sdfadsff', '', True, False)
m2 = Mock()
m2.get_results.return_value = CommandResult(1, '', 'Error', True, False)
m.getCompletedItems.return_value = [m1, m2]
self.validate_md5sum.pool = m
self.validate_md5sum.md5_results_pat = re.compile('MD5 \((.*)\) = (.*)')
with self.assertRaisesRegexp(Exception, 'Unable to calculate md5sum'):
self.validate_md5sum._process_md5_results()
@patch('platform.system', return_value='Linux')
def test_process_results_on_linux(self, mock1):
m = Mock()
m1 = Mock()
m1.get_results.return_value = CommandResult(0, 'afsdfasdf foo', '', True, False)
m2 = Mock()
m2.get_results.return_value = CommandResult(0, 'sdfadsff foo1', '', True, False)
m.getCompletedItems.return_value = [m1, m2]
self.validate_md5sum.pool = m
self.validate_md5sum.md5_results_pat = re.compile('(.*) (.*)')
expected = {'foo': 'afsdfasdf', 'foo1': 'sdfadsff'}
self.assertEqual(expected, self.validate_md5sum._process_md5_results())
@patch('platform.system', return_value='Linux')
def test_process_results_on_linux_with_error(self, mock1):
m = Mock()
m1 = Mock()
m1.get_results.return_value = CommandResult(0, 'sdfadsff fo1', '', True, False)
m2 = Mock()
m2.get_results.return_value = CommandResult(1, '', 'Error', True, False)
m.getCompletedItems.return_value = [m1, m2]
self.validate_md5sum.pool = m
self.validate_md5sum.md5_results_pat = re.compile('(.*) (.*)')
with self.assertRaisesRegexp(Exception, 'Unable to calculate md5sum'):
self.validate_md5sum._process_md5_results()
class RebuildPersistentTableTestCase(unittest.TestCase):
def setUp(self):
self.rebuild_persistent_table = RebuildPersistentTables(content_id = None,
contentid_file = None,
backup=None,
restore=None,
batch_size=None,
backup_dir=None)
@patch('gppylib.operations.persistent_rebuild.platform.system', return_value='Linux')
def test_check_platform_linux(self, mock1):
self.rebuild_persistent_table._check_platform()
@patch('gppylib.operations.persistent_rebuild.platform.system', return_value='Solaris')
def test_check_platform_non_linux(self, mock1):
with self.assertRaisesRegexp(Exception, 'This tool is only supported on Linux and OSX platforms'):
self.rebuild_persistent_table._check_platform()
def test_validate_has_mirrors_and_standby(self):
mock_segs = []
for i in range(6):
m = Mock()
m.getSegmentContentId.return_value = i - 1
m.isSegmentMirror.return_value = True if i < 3 else False
mock_segs.append(m)
m = Mock()
m.getDbList.return_value = mock_segs
self.rebuild_persistent_table.gparray = m
self.rebuild_persistent_table._validate_has_mirrors_and_standby()
self.assertTrue(self.rebuild_persistent_table.has_mirrors)
def test_validate_has_mirrors_and_standby_with_no_mirrors(self):
mock_segs = []
for i in range(6):
m = Mock()
m.getSegmentContentId.return_value = i - 1
m.isSegmentMirror.return_value = False
mock_segs.append(m)
m = Mock()
m.getDbList.return_value = mock_segs
self.rebuild_persistent_table.gparray = m
self.rebuild_persistent_table._validate_has_mirrors_and_standby()
self.assertFalse(self.rebuild_persistent_table.has_mirrors)
def test_validate_has_mirrors_and_standby_with_mirrors_for_master(self):
mock_segs = []
for i in range(6):
m = Mock()
m.getSegmentContentId.return_value = i - 1
m.isSegmentMirror.return_value = True if i == -1 else False
mock_segs.append(m)
m = Mock()
m.getDbList.return_value = mock_segs
self.rebuild_persistent_table.gparray = m
self.rebuild_persistent_table._validate_has_mirrors_and_standby()
self.assertTrue(self.rebuild_persistent_table.has_standby)
@patch('gppylib.operations.persistent_rebuild.findCmdInPath', return_value=True)
def test_check_md5_prog(self, mock1):
self.rebuild_persistent_table._check_md5_prog()
@patch('gppylib.operations.persistent_rebuild.findCmdInPath', return_value=False)
def test_check_md5_prog_no_md5(self, mock1):
with self.assertRaisesRegexp(Exception, 'Unable to find md5.* program. Please make sure it is in PATH'):
self.rebuild_persistent_table._check_md5_prog()
@patch('gppylib.operations.persistent_rebuild.GpVersion.local', return_value=GpVersion('4.2.7.3'))
def test_check_database_version(self, mock1):
self.rebuild_persistent_table._check_database_version()
@patch('gppylib.operations.persistent_rebuild.GpVersion.local', return_value=GpVersion('4.0.1.0'))
def test_check_database_version_with_lower_version(self, mock1):
with self.assertRaisesRegexp(Exception, 'This tool is not supported on Greenplum version lower than 4.1.0.0'):
self.rebuild_persistent_table._check_database_version()
@patch('gppylib.operations.persistent_rebuild.dbconn.execSQL', side_effect=[[[5090], [5091], [5092], [5093]], [[123, 'template1']],
[[5094], [16992]]])
@patch('gppylib.operations.persistent_rebuild.dbconn.connect')
@patch('gppylib.operations.persistent_rebuild.dbconn.DbURL')
def test_get_persistent_table_filenames(self, mock1, mock2, mock3):
d1 = DbIdInfo(2, 'p', 3, 5002, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
self.rebuild_persistent_table.dbid_info = [d1]
self.rebuild_persistent_table._get_persistent_table_filenames()
expected_global = defaultdict(defaultdict)
expected_files = ['5090', '5091', '5092', '5093']
expected_dbid = {3:expected_files}
expected_global = {'h1':expected_dbid}
expected_perdb_pt_files = defaultdict(defaultdict)
exp_pt_files = ['5094', '16992']
exp_dboid = {123:exp_pt_files}
exp_dbid = {3:exp_dboid}
expected_perdb_pt_file = {'h1':exp_dbid}
from gppylib.operations.persistent_rebuild import GLOBAL_PERSISTENT_FILES, PER_DATABASE_PERSISTENT_FILES
self.assertEqual(GLOBAL_PERSISTENT_FILES, expected_global)
self.assertEqual(PER_DATABASE_PERSISTENT_FILES, expected_perdb_pt_file)
@patch('gppylib.operations.persistent_rebuild.dbconn.execSQL', side_effect=pt_query_side_effect)
@patch('gppylib.operations.persistent_rebuild.dbconn.connect')
@patch('gppylib.operations.persistent_rebuild.dbconn.DbURL')
def test_get_persistent_table_filenames_lacking_global_relfilenode(self, mock1, mock2, mock3):
d1 = DbIdInfo(2, 'p', 3, 5002, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
global remove_global_pt_entry
remove_global_pt_entry = True
self.rebuild_persistent_table.dbid_info = [d1]
with self.assertRaisesRegexp(Exception, 'Missing relfilenode entry of global pesistent tables in pg_class'):
self.rebuild_persistent_table._get_persistent_table_filenames()
remove_global_pt_entry = False
@patch('gppylib.operations.persistent_rebuild.dbconn.execSQL', side_effect=pt_query_side_effect)
@patch('gppylib.operations.persistent_rebuild.dbconn.connect')
@patch('gppylib.operations.persistent_rebuild.dbconn.DbURL')
def test_get_persistent_table_filenames_lacking_per_database_relfilenode(self, mock1, mock2, mock3):
d1 = DbIdInfo(2, 'p', 3, 5002, 'h1', {1000: '/tmp/p1', 3052: '/tmp/p2'}, {1000: [2000], 3052: [2001]}, {2000: [123], 2001: [234]}, False)
global remove_per_db_pt_entry
remove_per_db_pt_entry = True
self.rebuild_persistent_table.dbid_info = [d1]
with self.assertRaisesRegexp(Exception, 'Missing relfilenode entry of per database persistent tables in pg_class'):
self.rebuild_persistent_table._get_persistent_table_filenames()
remove_per_db_pt_entry = False
if __name__ == '__main__':
unittest.main()
| apache-2.0 | 6,547,121,164,706,175,000 | 58.952837 | 151 | 0.624095 | false |
braindevices/nanoVanalystLib | nanoVanalystLib/nanoVanalystLib/imgFileUtils.py | 1 | 1373 | # -*- coding: UTF-8 -*-
'''
Created on Mar 12, 2015-1:16:18 PM
@author: Ling Wang<[email protected]>
'''
import cv2, os
from Constants_and_Parameters import *
def loadAsGray(imgFile, cropY=[0,880]):
img = cv2.imread(imgFile)
img = img[cropY[0]:cropY[1],:,:]
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
return gray
def showImg(img, windName, write = False, outDir = None, prefix = None, waitTime = None, flagShowImg = None):
if outDir == None:
outDir = os.environ[K_PoreAnalyzer_TMP]
if waitTime == None:
waitTime = int(os.environ[K_PoreAnalyzer_IMshowWait])
if prefix == None:
prefix = os.environ[K_PoreAnalyzer_IMprefix]
if flagShowImg == None:
flagShowImg = bool(os.environ[K_FLAG_SHOW_IMG])
if flagShowImg:
cv2.imshow(windName, img)
cv2.waitKey(waitTime)
cv2.destroyWindow(windName)
_file = os.path.join(outDir, prefix+ windName+".png")
cv2.imwrite(_file, img)
return _file
import numpy as np
def saveNPZ(structName, structKwds, outDir = None, prefix = None):
if outDir == None:
outDir = os.environ[K_PoreAnalyzer_TMP]
if prefix == None:
prefix = os.environ[K_PoreAnalyzer_IMprefix]
_npzFile = os.path.join(outDir, prefix+ structName)
np.savez_compressed(_npzFile, **structKwds)
return _npzFile | gpl-2.0 | 3,744,444,933,070,862,000 | 28.869565 | 109 | 0.646031 | false |
psychopy/versions | psychopy/monitors/calibData.py | 1 | 4990 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Part of the PsychoPy library
# Copyright (C) 2002-2018 Jonathan Peirce (C) 2019-2020 Open Science Tools Ltd.
# Distributed under the terms of the GNU General Public License (GPL).
"""Data useful for calibrations (Smith-Pokorny cone fundamentals etc...)
"""
from __future__ import absolute_import, print_function
import numpy
# 380 to 780 inclusive with 5nm steps
wavelength_5nm = numpy.arange(380, 785, 5)
juddVosXYZ1976_5nm = numpy.asarray([
[0.003, 0.005, 0.011, 0.021, 0.038, 0.063, 0.100, 0.158, 0.229, 0.281,
0.311, 0.331, 0.333, 0.317, 0.289, 0.260, 0.233, 0.210, 0.175, 0.133,
0.092, 0.057, 0.032, 0.015, 0.005, 0.002, 0.009, 0.029, 0.064, 0.111,
0.167, 0.228, 0.293, 0.362, 0.436, 0.515, 0.597, 0.681, 0.764, 0.844,
0.916, 0.977, 1.023, 1.051, 1.055, 1.036, 0.992, 0.929, 0.843, 0.740,
0.633, 0.534, 0.441, 0.355, 0.279, 0.215, 0.162, 0.118, 0.086, 0.063,
0.046, 0.032, 0.022, 0.016, 0.011, 0.008, 0.006, 0.004, 0.003, 0.002,
0.001, 0.001, 0.001, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000,
0.000],
[0.000, 0.000, 0.001, 0.002, 0.003, 0.005, 0.007, 0.012, 0.018, 0.023,
0.027, 0.033, 0.038, 0.042, 0.047, 0.052, 0.060, 0.073, 0.091, 0.113,
0.139, 0.170, 0.208, 0.258, 0.323, 0.405, 0.503, 0.608, 0.710, 0.795,
0.862, 0.915, 0.954, 0.980, 0.995, 1.000, 0.995, 0.979, 0.952, 0.916,
0.870, 0.816, 0.757, 0.695, 0.631, 0.567, 0.503, 0.442, 0.381, 0.321,
0.265, 0.217, 0.175, 0.138, 0.107, 0.082, 0.061, 0.044, 0.032, 0.023,
0.017, 0.012, 0.008, 0.006, 0.004, 0.003, 0.002, 0.001, 0.001, 0.001,
0.001, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000,
0.000],
[0.012, 0.024, 0.049, 0.095, 0.174, 0.290, 0.461, 0.732, 1.066, 1.315,
1.467, 1.580, 1.617, 1.568, 1.472, 1.374, 1.292, 1.236, 1.114, 0.942,
0.756, 0.586, 0.447, 0.341, 0.264, 0.206, 0.154, 0.109, 0.077, 0.056,
0.041, 0.029, 0.020, 0.013, 0.009, 0.006, 0.004, 0.003, 0.002, 0.002,
0.002, 0.002, 0.001, 0.001, 0.001, 0.001, 0.000, 0.000, 0.000, 0.000,
0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000,
0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000,
0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000, 0.000,
0.000], ])
cones_SmithPokorny = numpy.asarray([
[0.000000, 0.000000, 0.000000, 0.000000, 0.002660, 0.004380, 0.006890,
0.010800, 0.015800, 0.020000, 0.023300, 0.026800, 0.030100, 0.032400,
0.034300, 0.036800, 0.041200, 0.050200, 0.062700, 0.079800, 0.102000,
0.128000, 0.162000, 0.206000, 0.263000, 0.337000, 0.423000, 0.520000,
0.617000, 0.700000, 0.773000, 0.834000, 0.883000, 0.923000, 0.954000,
0.977000, 0.993000, 1.000000, 0.997000, 0.986000, 0.965000, 0.934000,
0.894000, 0.848000, 0.795000, 0.735000, 0.670000, 0.602000, 0.530000,
0.454000, 0.380000, 0.315000, 0.256000, 0.204000, 0.159000, 0.122000,
0.091400, 0.067000, 0.048200, 0.035000, 0.025700, 0.018000, 0.012400,
0.008660, 0.006210, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000,
0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000,
0.000000, 0.000000, 0.000000, 0.000000],
[0.000000, 0.000000, 0.000000, 0.000000, 0.002820, 0.004750, 0.007670,
0.012400, 0.018900, 0.025400, 0.031700, 0.039500, 0.047700, 0.055500,
0.063500, 0.073100, 0.086000, 0.107000, 0.130000, 0.157000, 0.189000,
0.224000, 0.267000, 0.324000, 0.396000, 0.491000, 0.595000, 0.706000,
0.808000, 0.884000, 0.941000, 0.978000, 0.997000, 0.999000, 0.987000,
0.961000, 0.922000, 0.870000, 0.806000, 0.732000, 0.651000, 0.564000,
0.477000, 0.393000, 0.318000, 0.250000, 0.193000, 0.147000, 0.110000,
0.080800, 0.058300, 0.041800, 0.029600, 0.020700, 0.014400, 0.010100,
0.006990, 0.004850, 0.003330, 0.002330, 0.001640, 0.001110, 0.000750,
0.000517, 0.000368, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000,
0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000,
0.000000, 0.000000, 0.000000, 0.000000],
[0.000000, 0.000000, 0.000000, 0.000000, 0.108000, 0.179000, 0.285000,
0.453000, 0.659000, 0.813000, 0.908000, 0.977000, 1.000000, 0.970000,
0.910000, 0.850000, 0.799000, 0.775000, 0.689000, 0.582000, 0.468000,
0.362000, 0.276000, 0.212000, 0.164000, 0.128000, 0.095600, 0.067600,
0.047400, 0.034700, 0.025600, 0.018200, 0.012400, 0.008260, 0.005450,
0.003650, 0.002530, 0.001840, 0.001440, 0.001260, 0.001160, 0.001000,
0.000812, 0.000741, 0.000610, 0.000479, 0.000312, 0.000240, 0.000198,
0.000132, 0.000090, 0.000068, 0.000053, 0.000038, 0.000025, 0.000019,
0.000014, 0.000010, 0.000008, 0.000005, 0.000004, 0.000003, 0.000002,
0.000001, 0.000001, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000,
0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000, 0.000000,
0.000000, 0.000000, 0.000000, 0.000000], ])
| gpl-3.0 | -4,679,395,640,326,204,000 | 61.375 | 79 | 0.614228 | false |
alexvh/pydflatex | pydflatex/processor.py | 1 | 1137 | #!/usr/bin/env python
# coding: UTF-8
from __future__ import division
import latex_logger
class LaTeXError(Exception):
"""
LaTeX Error
"""
class Processor(object):
"""
Models an object with a logger and some options.
General options:
- colour
- debug
"""
def __init__(self, logger=None, options=None):
# storing the options
self.options = self.defaults.copy()
if options is not None:
self.options.update(options)
# setting up the logger
if logger is not None:
self.logger = logger
else:
self.logger = self.setup_logger()
self.logger.debug("%s\ninitialized with\n%s\n" % (type(self), options))
defaults={
'colour': True,
'debug': False,
}
def setup_logger(self, handlers=None):
if self.options['colour']:
LoggerClass = latex_logger.LaTeXLoggerColour
else:
LoggerClass = latex_logger.LaTeXLogger
logger = LoggerClass('pydflatex')
if not handlers:
if not self.options['debug']:
logger.addHandler(latex_logger.std_handler)
else:
logger.addHandler(latex_logger.debug_handler)
else:
for handler in handlers:
logger.addHandler(handler)
return logger
| bsd-3-clause | 5,279,224,763,711,696,000 | 20.055556 | 73 | 0.689534 | false |
unistra/django-rest-framework-fine-permissions | rest_framework_fine_permissions/permissions.py | 1 | 2283 | # -*- coding: utf-8 -*-
""" Provides new permission policies for django-rest-framework
"""
from rest_framework.permissions import DjangoModelPermissions, BasePermission
from django.contrib.contenttypes.models import ContentType
from rest_framework_fine_permissions.models import FilterPermissionModel
from django.core.exceptions import ObjectDoesNotExist
from rest_framework_fine_permissions.serializers import QSerializer
class FullDjangoModelPermissions(DjangoModelPermissions):
"""
The request is authenticated using `django.contrib.auth` permissions.
See: https://docs.djangoproject.com/en/dev/topics/auth/#permissions
It ensures that the user is authenticated, and has the appropriate
`view`/`add`/`change`/`delete` permissions on the model.
This permission can only be applied against view classes that provide a
`.model` or `.queryset` attribute.
"""
perms_map = {
'GET': ['%(app_label)s.view_%(model_name)s'],
'OPTIONS': [],
'HEAD': ['%(app_label)s.view_%(model_name)s'],
'POST': ['%(app_label)s.add_%(model_name)s'],
'PUT': ['%(app_label)s.change_%(model_name)s'],
'PATCH': ['%(app_label)s.change_%(model_name)s'],
'DELETE': ['%(app_label)s.delete_%(model_name)s'],
}
class FilterPermission(BasePermission):
"""
filter permission
"""
def has_object_permission(self, request, view, obj):
"""
check filter permissions
"""
user = request.user
if not user.is_superuser and not user.is_anonymous:
valid = False
try:
ct = ContentType.objects.get_for_model(obj)
fpm = FilterPermissionModel.objects.get(user=user,
content_type=ct)
myq = QSerializer(base64=True).loads(fpm.filter)
try:
myobj = obj.__class__.objects.filter(myq).distinct().get(pk=obj.pk)
if myobj:
valid = True
except ObjectDoesNotExist:
valid = False
except ObjectDoesNotExist:
valid = True
finally:
return valid
else:
return True
| gpl-2.0 | -2,426,721,725,778,089,000 | 33.575758 | 87 | 0.594654 | false |
dparks1134/GenomeTreeTk | genometreetk/markers/lgt_test.py | 1 | 10697 | ###############################################################################
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
###############################################################################
import os
import sys
import logging
from biolib.external.fasttree import FastTree
from biolib.common import make_sure_path_exists
from genometreetk.default_values import DefaultValues
from genometreetk.common import create_concatenated_alignment
from genometreetk.jackknife_markers import JackknifeMarkers
import dendropy
class LgtTest(object):
"""Identify gene trees that may have undergone one or more lateral transfer.
Specifically, the following test is applied:
1) infer a jackknifed genome tree by randomly subsampling marker genes under 100 replicates
2) identify all splits with at least a minimum jackknife support value, and
where at least a certain percentage of the taxa fall on each side of the split
3) determine how many of these "well-support, internal" splits are recovered in each gene tree
4) filter gene trees which do not recover a specific percentage of these splits
"""
def __init__(self, cpus):
"""Initialize.
Parameters
----------
cpus : int
Number of cpus to use.
"""
self.logger = logging.getLogger()
self.cpus = cpus
def run(self, genome_ids,
marker_genes,
hmm_model_file,
min_support,
min_per_taxa,
perc_markers_to_jackknife,
gene_tree_dir,
alignment_dir,
output_dir):
"""Identify gene trees which do not recover well-support, internal splits in a jackknifed genome tree.
Parameters
----------
genome_ids : iterable
Genomes of interest.
marker_genes : iterable
Unique ids of marker genes.
hmm_model_file : str
File containing HMMs for each marker gene.
min_support : float
Minimum jackknife support of splits to use during LGT filtering [0, 1].
min_per_taxa : float
Minimum percentage of taxa required to consider a split during LGT filtering [0, 1].
perc_markers_to_jackknife : float
Percentage of taxa to keep during marker jackknifing [0, 1].
gene_tree_dir : str
Directory containing gene trees.
alignment_dir : str
Directory containing multiple sequence alignments.
output_dir : str
Output directory.
"""
output_dir = os.path.join(output_dir, 'jackknife_markers')
make_sure_path_exists(output_dir)
# create concatenated alignment file
self.logger.info('Concatenating alignments.')
concatenated_alignment_file = os.path.join(output_dir, 'concatenated_alignment.faa')
marker_file = os.path.join(output_dir, 'concatenated_markers.tsv')
create_concatenated_alignment(genome_ids, marker_genes, alignment_dir, concatenated_alignment_file, marker_file)
# create concatenated genome tree
self.logger.info('Inferring concatenated genome tree.')
concatenated_tree = os.path.join(output_dir, 'concatenated.tree')
concatenated_tree_log = os.path.join(output_dir, 'concatenated.tree.log')
log_file = os.path.join(output_dir, 'concatenated.fasttree.log')
fast_tree = FastTree(multithreaded=True)
fast_tree.run(concatenated_alignment_file, 'prot', 'wag', concatenated_tree, concatenated_tree_log, log_file)
# calculate jackknife support values
self.logger.info('Calculating jackknife marker support values.')
jackknife_markers = JackknifeMarkers(self.cpus)
jackknife_tree = jackknife_markers.run(concatenated_tree, concatenated_alignment_file, marker_file, perc_markers_to_jackknife, 100, 'wag', output_dir)
# jackknife_tree = os.path.join(output_dir, 'concatenated.jk_markers.tree')
# identify well-support, internal splits
self.logger.info('Identifying well-support, internal splits.')
tree = dendropy.Tree.get_from_path(jackknife_tree, schema='newick', rooting='force-unrooted', preserve_underscores=True)
num_leaves = len(tree.leaf_nodes())
num_internal_nodes = 0
num_major_splits = 0
well_supported_major_splits = 0
splits = []
for node in tree.internal_nodes():
num_internal_nodes += 1
num_node_leaves = len(node.leaf_nodes())
if min(num_node_leaves, num_leaves - num_node_leaves) >= max(min_per_taxa * num_leaves, 2):
num_major_splits += 1
if int(node.label) > (min_support * 100.0):
well_supported_major_splits += 1
split = set([x.taxon.label for x in node.leaf_nodes()])
splits.append((split, node.edge_length))
self.logger.info('# internal nodes: %d' % num_internal_nodes)
self.logger.info('# major splits: %d' % num_major_splits)
self.logger.info('# well-supported, major splits: %d' % well_supported_major_splits)
# filter gene trees that do not recover well-support, internal splits
self.logger.info('Filtering gene trees.')
distances = {}
for i, mg in enumerate(sorted(marker_genes)):
sys.stdout.write('==> Processed %d of %d (%.2f) gene trees.\r' % (i + 1, len(marker_genes), (i + 1) * 100.0 / len(marker_genes)))
sys.stdout.flush()
# read gene tree
f = mg + '.tree'
gene_tree_file = os.path.join(gene_tree_dir, f)
gene_tree = dendropy.Tree.get_from_path(gene_tree_file, schema='newick', rooting='force-unrooted', preserve_underscores=True)
# prune gene tree so each genome is present exactly once
processed_genome_ids = set()
taxa_to_prune = []
for node in gene_tree.leaf_nodes():
genome_id = node.taxon.label.split(DefaultValues.SEQ_CONCAT_CHAR)[0]
if genome_id in processed_genome_ids or genome_id not in genome_ids:
taxa_to_prune.append(node.taxon)
processed_genome_ids.add(genome_id)
gene_tree.prune_taxa(taxa_to_prune)
# rename nodes to contain only genome id
gene_tree_taxa_set = set()
for node in gene_tree.leaf_nodes():
genome_id = node.taxon.label.split(DefaultValues.SEQ_CONCAT_CHAR)[0]
node.taxon.label = genome_id
gene_tree_taxa_set.add(genome_id)
# re-encode the split system over the new taxon namespace
gene_tree.migrate_taxon_namespace(dendropy.TaxonNamespace(gene_tree_taxa_set))
gene_tree.encode_bipartitions()
split_bitmasks = set(b.split_bitmask for b in gene_tree.bipartition_encoding)
# determine number of splits recovered by or compatible with this gene tree
recovered_splits = 0
compatible_splits = 0
compatible_edge_length = 0
for split, edge_length in splits:
common_taxa_labels = split.intersection(gene_tree_taxa_set)
common_split = gene_tree.taxon_namespace.taxa_bitmask(labels=common_taxa_labels)
normalized_split = dendropy.Bipartition.normalize_bitmask(
bitmask=common_split,
fill_bitmask=gene_tree.taxon_namespace.all_taxa_bitmask(),
lowest_relevant_bit=1)
if normalized_split in split_bitmasks:
recovered_splits += 1
if gene_tree.is_compatible_with_bipartition(dendropy.Bipartition(bitmask=normalized_split, is_rooted=False)):
compatible_splits += 1
compatible_edge_length += edge_length
perc_recovered_splits = recovered_splits * 100.0 / len(splits)
perc_comp_splits = compatible_splits * 100.0 / len(splits)
norm_comp_edge_length = float(compatible_edge_length) / sum([s[1] for s in splits])
# calculate weighted Robinson-Foulds (Manhattan) and Felsenstein's Euclidean
# distances to the concatenated genome tree
pruned_tree = tree.clone(depth=2)
pruned_tree.retain_taxa_with_labels(gene_tree.taxon_namespace.labels())
pruned_tree.migrate_taxon_namespace(gene_tree.taxon_namespace)
pruned_tree.encode_bipartitions()
pruned_tree_edge_len = sum([e.length for e in pruned_tree.edges() if e.length])
gene_tree_edge_len = sum([e.length for e in gene_tree.edges() if e.length])
pruned_tree.scale_edges(1.0 / pruned_tree_edge_len)
gene_tree.scale_edges(1.0 / gene_tree_edge_len)
manhattan = dendropy.calculate.treecompare.weighted_robinson_foulds_distance(pruned_tree, gene_tree)
euclidean = dendropy.calculate.treecompare.euclidean_distance(pruned_tree, gene_tree)
distances[mg] = (perc_recovered_splits, perc_comp_splits, norm_comp_edge_length, manhattan, euclidean)
return distances, num_internal_nodes, num_major_splits, well_supported_major_splits
| gpl-3.0 | -828,814,402,445,759,400 | 47.294931 | 158 | 0.581284 | false |
gabrielelanaro/pyquante | PyQuante/MINDO3.py | 1 | 28237 | """\
MINDO3.py: Dewar's MINDO/3 Semiempirical Method
This program is part of the PyQuante quantum chemistry program suite.
Copyright (c) 2004, Richard P. Muller. All Rights Reserved.
PyQuante version 1.2 and later is covered by the modified BSD
license. Please see the file LICENSE that is part of this
distribution.
"""
from Constants import bohr2ang,e2,ev2kcal
from MINDO3_Parameters import axy,Bxy
from math import sqrt,exp,pow
from NumWrap import zeros,eigh,dot,array
from LA2 import mkdens,trace2
from PyQuante.Convergence import SimpleAverager
A0 = bohr2ang
def get_beta0(atnoi,atnoj):
"Resonanace integral for coupling between different atoms"
return Bxy[(min(atnoi,atnoj),max(atnoi,atnoj))]
def get_alpha(atnoi,atnoj):
"Part of the scale factor for the nuclear repulsion"
return axy[(min(atnoi,atnoj),max(atnoi,atnoj))]
def get_gamma(atomi,atomj):
"Coulomb repulsion that goes to the proper limit at R=0"
R2 = atomi.dist2(atomj)*bohr2ang**2
return e2/sqrt(R2+0.25*pow(atomi.rho+atomj.rho,2))
def get_g(bfi,bfj):
"Coulomb-like term for orbitals on the same atom"
i,j = bfi.type,bfj.type
assert bfi.atom is bfj.atom, "Incorrect call to get_g"
if i==0 and j==0:
return bfi.atom.gss
elif i==0 or j==0:
return bfi.atom.gsp
elif i==j:
return bfi.atom.gpp
return bfi.atom.gppp
def get_h(bfi,bfj):
"Exchange-like term for orbitals on the same atom"
i,j = bfi.type,bfj.type
assert bfi.atom is bfj.atom, "Incorrect call to get_h"
if i==0 or j==0:
return bfi.atom.hsp
return bfi.atom.hppp
def get_nbf(atoms):
"Number of basis functions in an atom list"
nbf = 0
for atom in atoms: nbf += atom.nbf
return nbf
def get_F0_old(atoms):
"Form the zero-iteration (density matrix independent) Fock matrix"
nbf = get_nbf(atoms)
nat = len(atoms)
F0 = zeros((nbf,nbf),'d')
ibf = 0 # bf number of the first bfn on iat
for iat in xrange(nat):
atomi = atoms[iat]
for i in xrange(atomi.nbf):
bfi = atomi.basis[i]
F0[ibf+i,ibf+i] = bfi.u
jbf = 0
for jat in xrange(nat):
atomj = atoms[jat]
if iat != jat:
gammaij = get_gamma(atomi,atomj)
betaij = get_beta0(atomi.atno,atomj.atno)
F0[ibf+i,ibf+i] -= gammaij*atomj.Z
for j in xrange(atomj.nbf):
bfj = atomj.basis[j]
Sij = bfi.cgbf.overlap(bfj.cgbf)
#Sij = mopac_overlap(bfi,bfj)
IPij = bfi.ip+bfj.ip
F0[ibf+i,jbf+j] = betaij*IPij*Sij
F0[jbf+j,ibf+i] = F0[ibf+i,jbf+j]
jbf += atomj.nbf
ibf += atomi.nbf
return F0
def get_F0(atoms):
"Form the zero-iteration (density matrix independent) Fock matrix"
nbf = get_nbf(atoms)
nat = len(atoms)
F0 = zeros((nbf,nbf),'d')
basis = []
for atom in atoms:
for bf in atom.basis:
basis.append(bf)
# U term
for i in xrange(nbf):
F0[i,i] = basis[i].u
# Nuclear attraction
ibf = 0 # bf number of the first bfn on iat
for iat in xrange(nat):
atomi = atoms[iat]
for jat in xrange(nat):
atomj = atoms[jat]
if iat == jat: continue
gammaij = get_gamma(atomi,atomj)
for i in xrange(atomi.nbf):
F0[ibf+i,ibf+i] -= gammaij*atomj.Z
ibf += atomi.nbf
# Off-diagonal term
for ibf in xrange(nbf):
bfi = basis[ibf]
ati = bfi.atom
atnoi = ati.atno
for jbf in xrange(ibf):
bfj = basis[jbf]
atj = bfj.atom
atnoj = atj.atno
betaij = get_beta0(atnoi,atnoj)
Sij = bfi.cgbf.overlap(bfj.cgbf)
IPij = bfi.ip + bfj.ip
F0[ibf,jbf] = F0[jbf,ibf] = betaij*IPij*Sij
return F0
def get_F1(atoms,D):
"One-center corrections to the core fock matrix"
nbf = get_nbf(atoms)
nat = len(atoms)
F1 = zeros((nbf,nbf),'d')
ibf = 0 # bf number of the first bfn on iat
for iat in xrange(nat):
atomi = atoms[iat]
for i in xrange(atomi.nbf):
bfi = atomi.basis[i]
gii = get_g(bfi,bfi)
qi = D[ibf+i,ibf+i]
F1[ibf+i,ibf+i] = 0.5*qi*gii
for j in xrange(atomi.nbf): # ij on same atom
if j != i:
bfj = atomi.basis[j]
qj = D[ibf+j,ibf+j]
gij = get_g(bfi,bfj)
pij = D[ibf+i,ibf+j]
hij = get_h(bfi,bfj)
# the following 0.5 is something of a kludge to match
# the mopac results.
F1[ibf+i,ibf+i] += qj*gij - 0.5*qj*hij
F1[ibf+i,ibf+j] += 0.5*pij*(3*hij-gij)
ibf += atomi.nbf
return F1
def get_F1_open(atoms,Da,Db):
"One-center corrections to the core fock matrix"
nbf = get_nbf(atoms)
nat = len(atoms)
F1 = zeros((nbf,nbf),'d')
ibf = 0 # bf number of the first bfn on iat
for iat in xrange(nat):
atomi = atoms[iat]
for i in xrange(atomi.nbf):
gii = get_g(atomi.basis[i],atomi.basis[i])
qib = Db[ibf+i,ibf+i]
#electron only interacts with the other electron in orb,
# not with itself
F1[ibf+i,ibf+i] = qib*gii
for j in xrange(atomi.nbf): # ij on same atom
if j != i:
qja = Da[ibf+j,ibf+j]
qjb = Db[ibf+j,ibf+j]
qj = qja+qjb
gij = get_g(atomi.basis[i],atomi.basis[j])
pija = Da[ibf+i,ibf+j]
pijb = Db[ibf+i,ibf+j]
pij = pija + pijb
hij = get_h(atomi.basis[i],atomi.basis[j])
# the following 0.5 is something of a kludge to match
# the mopac results.
F1[ibf+i,ibf+i] += qj*gij - qja*hij
F1[ibf+i,ibf+j] += 2*pij*hij - pija*(hij+gij)
ibf += atomi.nbf
return F1
Gij_cache = None
def get_F2(atoms,D,use_cache=False):
"Two-center corrections to the core fock matrix"
global Gij_cache
nbf = get_nbf(atoms)
nat = len(atoms)
F2 = zeros((nbf,nbf),'d')
# Optionally cache Gamma values
if use_cache and Gij_cache is None:
Gij_cache = zeros((nat,nat),'d')
for iat in xrange(nat):
atomi = atoms[iat]
for jat in xrange(iat):
atomj = atoms[jat]
Gij_cache[iat,jat] = get_gamma(atomi,atomj)
Gij_cache[jat,iat] = Gij_cache[iat,jat]
ibf = 0 # bf number of the first bfn on iat
for iat in xrange(nat):
atomi = atoms[iat]
jbf = 0
for jat in xrange(nat):
atomj = atoms[jat]
if iat != jat:
if use_cache:
gammaij = Gij_cache[iat,jat]
else:
gammaij = get_gamma(atomi,atomj)
for i in xrange(atomi.nbf):
qi = D[ibf+i,ibf+i]
qj = 0
for j in xrange(atomj.nbf):
pij = D[ibf+i,jbf+j]
F2[ibf+i,jbf+j] -= 0.25*pij*gammaij
F2[jbf+j,ibf+i] = F2[ibf+i,jbf+j]
qj += D[jbf+j,jbf+j]
F2[jbf+j,jbf+j] += 0.5*qi*gammaij
F2[ibf+i,ibf+i] += 0.5*qj*gammaij
jbf += atomj.nbf
ibf += atomi.nbf
return F2
def get_F2_open(atoms,Da,Db):
"Two-center corrections to the core fock matrix"
nbf = get_nbf(atoms)
nat = len(atoms)
F2 = zeros((nbf,nbf),'d')
ibf = 0 # bf number of the first bfn on iat
for iat in xrange(nat):
atomi = atoms[iat]
jbf = 0
for jat in xrange(nat):
atomj = atoms[jat]
if iat != jat:
gammaij = get_gamma(atomi,atomj)
for i in xrange(atomi.nbf):
for j in xrange(atomj.nbf):
pija = Da[ibf+i,jbf+j]
pijb = Db[ibf+i,jbf+j]
pij = pija+pijb
qja = Da[jbf+j,jbf+j]
qjb = Db[jbf+j,jbf+j]
qj = qja+qjb
qia = Da[ibf+i,ibf+i]
qib = Db[ibf+i,ibf+i]
qi = qia+qib
F2[ibf+i,jbf+j] -= 0.25*pij*gammaij
F2[jbf+j,ibf+i] = F2[ibf+i,jbf+j]
# The following 0.5 is a kludge
F2[ibf+i,ibf+i] += 0.5*qj*gammaij
F2[jbf+j,jbf+j] += 0.5*qi*gammaij
jbf += atomj.nbf
ibf += atomi.nbf
return F2
def get_nel(atoms,charge=0):
"Number of electrons in an atoms. Can be dependent on the charge"
nel = 0
for atom in atoms: nel += atom.Z
return nel-charge
def get_enuke(atoms):
"Compute the nuclear repulsion energy"
enuke = 0
for i in xrange(len(atoms)):
atomi = atoms[i]
for j in xrange(i):
atomj = atoms[j]
R2 = atomi.dist2(atomj)*bohr2ang**2
R = sqrt(R2)
scale = get_scale(atomi.atno,atomj.atno,R)
gammaij = get_gamma(atomi,atomj)
enuke_ij = atomi.Z*atomj.Z*gammaij \
+ abs(atomi.Z*atomj.Z*(e2/R-gammaij)*scale)
enuke += enuke_ij
#print "R ",i+1,j+1,enuke_ij,enuke
return enuke
def get_scale(atnoi,atnoj,R):
"Prefactor from the nuclear repulsion term"
alpha = get_alpha(atnoi,atnoj)
if atnoi == 1:
if atnoj == 7 or atnoj == 8:
return alpha*exp(-R)
elif atnoj == 1:
if atnoi == 7 or atnoi == 8:
return alpha*exp(-R)
return exp(-alpha*R)
def get_guess_D(atoms):
"Average occupation density matrix"
nbf = get_nbf(atoms)
D = zeros((nbf,nbf),'d')
ibf = 0
for atom in atoms:
atno = atom.atno
for i in xrange(atom.nbf):
if atno == 1:
D[ibf+i,ibf+i] = atom.Z/1.
else:
D[ibf+i,ibf+i] = atom.Z/4.
ibf += atom.nbf
return D
def get_reference_energy(atoms):
"Ref = heat of formation - energy of atomization"
eat = 0
hfat = 0
for atom in atoms:
eat += atom.Eref
hfat += atom.Hf
return hfat-eat*ev2kcal
def get_open_closed(nel,mult=None):
"Get the number of open/closed orbitals based on nel & multiplicity"
nclosed,nopen = divmod(nel,2)
if mult: #test the multiplicity
nopen = mult-1
nclosed,ntest = divmod(nel-nopen,2)
if ntest:
raise Exception("Impossible nel, multiplicity %d %d " % (nel,mult))
return nclosed,nopen
def get_Hf(atoms,Eel):
Enuke = get_enuke(atoms)
Eref = get_reference_energy(atoms)
Etot = Eel + Enuke
return Etot*ev2kcal+Eref
def scf(atoms,**opts):
"Driver routine for energy calculations"
chg = opts.get('chg',0)
mult = opts.get('mult',None)
verbose = opts.get('verbose',False)
atoms = initialize(atoms)
nel = get_nel(atoms)-int(chg)
nclosed,nopen = get_open_closed(nel,mult)
Enuke = get_enuke(atoms)
nbf = get_nbf(atoms)
eref = get_reference_energy(atoms)
if verbose:
print "Nel = %d, Nclosed = %d, Nopen = %d," % (nel,nclosed,nopen), \
"Enuke = %10.4f, Nbf = %d" % (Enuke,nbf)
F0 = get_F0(atoms)
if nopen:
Eel = scfopen(atoms,F0,nclosed+nopen,nclosed,**opts)
else:
Eel = scfclosed(atoms,F0,nclosed,**opts)
Etot = Eel+Enuke
Hf = Etot*ev2kcal+eref
if verbose: print "Final Heat of Formation = ",Hf
return Hf
def scfclosed(atoms,F0,nclosed,**opts):
"SCF procedure for closed-shell molecules"
verbose = opts.get('verbose',False)
do_avg = opts.get('avg',False)
maxiter = opts.get('maxiter',50)
D = get_guess_D(atoms)
Eold = 0
if do_avg: avg = SimpleAverager(do_avg)
for i in xrange(maxiter):
if do_avg: D = avg.getD(D)
F1 = get_F1(atoms,D)
F2 = get_F2(atoms,D)
F = F0+F1+F2
Eel = 0.5*trace2(D,F0+F)
if verbose: print i+1,Eel,get_Hf(atoms,Eel)
#if verbose: print i+1,Eel
if abs(Eel-Eold) < 0.001:
if verbose:
print "Exiting because converged",i+1,Eel,Eold
break
Eold = Eel
orbe,orbs = eigh(F)
D = 2*mkdens(orbs,0,nclosed)
return Eel
def scfopen(atoms,F0,nalpha,nbeta,**opts):
"SCF procedure for open-shell molecules"
verbose = opts.get('verbose',False)
D = get_guess_D(atoms)
Da = 0.5*D
Db = 0.5*D
Eold = 0
for i in xrange(10):
F1a = get_F1_open(atoms,Da,Db)
F1b = get_F1_open(atoms,Db,Da)
F2a = get_F2_open(atoms,Da,Db)
F2b = get_F2_open(atoms,Db,Da)
Fa = F0+F1a+F2a
Fb = F0+F1b+F2b
Eel = 0.5*trace2(Da,F0+Fa)+0.5*trace2(Db,F0+Fb)
if verbose: print i,Eel
if abs(Eel-Eold) < 0.001: break
Eold = Eel
orbea,orbsa = eigh(Fa)
orbeb,orbsb = eigh(Fb)
Da = mkdens(orbsa,0,nalpha)
Db = mkdens(orbsb,0,nbeta)
return Eel
def initialize(atoms):
"Assign parameters for the rest of the calculation"
from Slater import gauss_powers,gexps,gcoefs,s_or_p
from MINDO3_Parameters import Uss,Upp,IPs,IPp,CoreQ,f03,nbfat,\
zetas,zetap,Eat,Hfat,gss,gsp,gpp,gppp,hsp,hppp,NQN
from CGBF import CGBF
from Bunch import Bunch # Generic object to hold basis functions
ibf = 0 # Counter to overall basis function count
for atom in atoms:
xyz = atom.pos()
atom.Z = CoreQ[atom.atno]
atom.basis = []
atom.rho = e2/f03[atom.atno]
atom.nbf = nbfat[atom.atno]
atom.Eref = Eat[atom.atno]
atom.Hf = Hfat[atom.atno]
atom.gss = gss[atom.atno]
atom.gsp = gsp[atom.atno]
atom.gpp = gpp[atom.atno]
atom.gppp = gppp[atom.atno]
atom.hsp = hsp[atom.atno]
atom.hppp = hppp[atom.atno]
for i in xrange(atom.nbf):
bfunc = Bunch()
atom.basis.append(bfunc)
bfunc.index = ibf # pointer to overall basis function index
ibf += 1
bfunc.type = i # s,x,y,z
bfunc.atom = atom # pointer to parent atom
bfunc.cgbf = CGBF(xyz,gauss_powers[i])
zi = gexps[(NQN[atom.atno],s_or_p[i])]
ci = gcoefs[(NQN[atom.atno],s_or_p[i])]
if i:
zeta = zetap[atom.atno]
bfunc.u = Upp[atom.atno]
bfunc.ip = IPp[atom.atno]
else:
zeta = zetas[atom.atno]
bfunc.u = Uss[atom.atno]
bfunc.ip = IPs[atom.atno]
for j in xrange(len(zi)):
bfunc.cgbf.add_primitive(zi[j]*zeta*zeta,ci[j])
bfunc.cgbf.normalize()
return atoms
def get_fock(atoms):
"Just return the 0th iteration fock matrix"
atoms = initialize(atoms)
F0 = get_F0(atoms)
D = get_guess_D(atoms)
F1 = get_F1(atoms,D)
F2 = get_F2(atoms,D)
return F0+F1+F2
def energy_forces_factories(atoms,**kwargs):
# This is a factory function. It creates two functions, one that,
# given a vector of coordinates, returns an energy, and another that,
# given a vector of corrdinates, returns a vector of gradients. The
# factory function also returns a list of initial coordinates. The two
# functions and the initial coordinates are useful for calling the
# optimizer functions.
verbose_level = kwargs.get('verbose_level',0)
return_etot_as_e = kwargs.get('return_etot_as_e',False)
numeric_forces = kwargs.get('numeric_forces',False)
nat = len(atoms)
coords = zeros(3*nat,'d')
for i in xrange(nat):
for j in xrange(3):
coords[3*i+j] = atoms[i].r[j]
def Efunc(cnew):
for i in xrange(nat):
for j in xrange(3):
atoms[i].r[j] = cnew[3*i+j]
Hf,F = get_energy_forces(atoms,doforces=False)
if verbose_level > 1:
print "MINDO3 energy calculation requested:"
print atoms
print Hf
# Recompute the total energy:
eref = get_reference_energy(atoms)
Etot = (Hf-eref)/ev2kcal
if return_etot_as_e: return Etot
return Hf
def Ffunc(cnew):
for i in xrange(nat):
for j in xrange(3):
atoms[i].r[j] = cnew[3*i+j]
Hf,Forces = get_energy_forces(atoms,doforces=True)
F = zeros(3*nat,'d')
for i in xrange(nat):
for j in xrange(3):
F[3*i+j] = Forces[i,j]
if verbose_level > 0:
print "MINDO3 gradient calculation requested:"
print atoms
print Hf
return F
def Ffunc_num(cnew):
E0 = Efunc(cnew)
F = zeros(3*nat,'d')
ei = zeros(3*nat,'d')
dx = 1e-7
for i in xrange(nat):
for j in xrange(3):
ei[3*i+j] = 1.0
E1 = Efunc(cnew+ei*dx)
ei[3*i+j] = 0.0
F[3*i+j] = (E1-E0)/dx
if verbose_level > 0:
print "MINDO3 gradient calculation requested:"
print atoms
print Hf
return F
if numeric_forces: return coords,Efunc,Ffunc_num
return coords,Efunc,Ffunc
def opt(atoms,**kwargs):
from PyQuante.optimize import fminBFGS
c0,Efunc,Ffunc = energy_forces_factories(atoms,**kwargs)
print "C0 = ",c0
# Currently optimization works when I use Energies and numerical
# forces, but not using the analytical forces. Obviously something
# is wrong somewhere here, but I don't have time to fix this now.
# Hopefully the final fix won't be too hard.
copt = fminBFGS(Efunc,c0,Ffunc,avegtol=1e-4)
#copt = fminBFGS(Efunc,c0,None,avegtol=1e-4)
Efinal = Efunc(copt)
return Efinal,copt
def get_energy_forces(atoms,**opts):
opts['return_energy'] = True
return numeric_forces(atoms,**opts)
def numeric_forces(atoms,D=None,**opts):
"Compute numerical forces on atoms"
# D is ignored here.
dx = opts.get('dx',1e-6)
sym = opts.get('sym',True)
return_energy = opts.get('return_energy',False)
nat = len(atoms)
Forces = zeros((nat,3),'d')
E0 = scf(atoms)
for iat in xrange(nat):
for idir in xrange(3):
dr = zeros(3,'d')
dr[idir] = dx
atoms[iat].translate(dr)
Ep = scf(atoms)
atoms[iat].translate(-dr)
if sym:
atoms[iat].translate(-dr)
Em = scf(atoms)
atoms[iat].translate(dr)
Forces[iat,idir] = 0.5*(Ep-Em)/dx
else:
Forces[iat,idir] = (Ep-E0)/dx
if return_energy: return E0,Forces
return Forces
def forces(atoms,D):
"Compute analytic forces on list of atoms"
print "Warning: Analytical forces not tested yet!"
nat = len(atoms)
Forces = zeros((nat,3),'d')
# Loop over all pairs of atoms and compute the force between them
#cached_dSij = full_dSij(atoms)
for iat in xrange(nat):
atomi = atoms[iat]
for jat in xrange(iat):
atomj = atoms[jat]
alpha = get_alpha(atomi.atno,atomj.atno)
beta = get_beta0(atomi.atno,atomj.atno)
R2 = atomi.dist2(atomj)*bohr2ang**2
R = sqrt(R2)
c2 = 0.25*pow(atomi.rho+atomj.rho,2)
for dir in xrange(3):
Fij = 0 # Force between atoms iat and jat in direction dir
# initialize some constants
delta = atomi.r[dir]-atomj.r[dir]
c1 = delta*atomi.Z*atomj.Z*e2/R
dr1 = e2*delta*pow(R2+c2,-1.5)
# Nuclear repulsion terms
if ( (atomi.atno == 1
and (atomj.atno == 7 or atomj.atno == 8))
or (atomj.atno == 1
and (atomi.atno == 7 or atomi.atno == 8))):
# Special case of NH or OH bonds
Fij += -c1*alpha*(1/R2 - R*pow(R2+c2,-1.5)
+ 1/R - 1/sqrt(R2+c2))*exp(-R) \
- c1*R*pow(R2+c2,-1.5)
else:
Fij += -c1*(1/R2 - R*pow(R2+c2,-1.5) + alpha/R
- alpha/sqrt(R2+c2))*exp(-alpha*R) \
- c1*R*pow(R2+c2,-1.5)
# Overlap terms
for bfi in atomi.basis:
for bfj in atomj.basis:
Dij = D[bfi.index,bfj.index]
dSij = mopac_doverlap(bfi,bfj,dir)
#dSij = -bfi.cgbf.doverlap(bfj.cgbf,dir)/bohr2ang
#dSij = -bfi.cgbf.doverlap_num(bfj.cgbf,dir)/bohr2ang
Fij += 2*beta*(bfi.ip+bfj.ip)*Dij*dSij
# Core attraction terms
for bfj in atomj.basis:
Fij += atomi.Z*D[bfj.index,bfj.index]*dr1
for bfi in atomi.basis:
Fij += atomj.Z*D[bfi.index,bfi.index]*dr1
# Two-electron terms
for bfi in atomi.basis:
for bfj in atomj.basis:
Dii = D[bfi.index,bfi.index]
Djj = D[bfj.index,bfj.index]
Dij = D[bfi.index,bfj.index]
# exchange is the first term, coulomb is second:
Fij += 0.5*dr1*pow(Dij,2)-dr1*Dii*Djj
# Now sum total forces and convert to kcal/mol
Forces[iat][dir] += ev2kcal*Fij
Forces[jat][dir] -= ev2kcal*Fij
return Forces
def mopac_overlap(bfi,bfj): # from the routine gover.f
cgbfi,cgbfj = bfi.cgbf,bfj.cgbf
ri = cgbfi.origin # distance in bohr
rj = cgbfj.origin
RR = pow(ri[0]-rj[0],2)+pow(ri[1]-rj[1],2)+pow(ri[2]-rj[2],2)
itype = bfi.type
jtype = bfj.type
Sij = 0
for primi in cgbfi.prims:
for primj in cgbfj.prims():
amb = primialpha+primjalpha
apb = primialpha*primjalpha
adb = apb/amb
if itype > 0 and jtype > 0:
#is = 4
tomb = (ri[itype-1]-rj[itype-1])*(ri[jtype-1]-rj[jtype-1])
abn = -adb*tomb
if itype == jtype: abn = abn + 0.5
abn = 4*abn*sqrt(apb)/amb
elif itype > 0:
#is = 3
tomb = (ri[itype-1]-rj[itype-1])
abn = -2*tomb*primjalpha*sqrt(primialpha)/amb
elif jtype > 0:
#is = 2
tomb = (ri[jtype-1]-rj[jtype-1])
abn = 2*tomb*primialpha*sqrt(primjalpha)/amb
else:
#is = 1
abn = 1.0
if adb*RR < 90:
Sij += primi.coef*primj.coef*\
pow(2*sqrt(apb)/amb,1.5)*exp(-adb*RR)*abn
return Sij
def mopac_doverlap(bfi,bfj,direction): # from the routine dcart.f
cgbfi,cgbfj = bfi.cgbf,bfj.cgbf
ri = cgbfi.origin # distance in bohr
rj = cgbfj.origin
RR = pow(ri[0]-rj[0],2)+pow(ri[1]-rj[1],2)+pow(ri[2]-rj[2],2)
del1 = ri[direction] - rj[direction]
itype = bfi.type
jtype = bfj.type
DS = 0
for primi in cgbfi.prims:
primialpha = primi.exp
for primj in cgbfj.prims:
primjalpha = primj.exp
del2 = del3 = 0
SS = 0
apb = primialpha*primjalpha
amb = primialpha+primjalpha
adb = apb/amb
adr = min(adb*RR,35.0)
if itype == 0 and jtype == 0: # ss
# is=1
abn = -2.*adb*del1/A0
elif itype == 0 and jtype > 0: # sp
if jtype-1 == direction:
#is = 3
abn = 2*adb/sqrt(primjalpha)*(1-2*adb*del1*del1)/A0
else:
#is = 2
del2 = ri[jtype-1]-rj[jtype-1]
abn = -4*adb*adb*del1*del2/sqrt(primjalpha)/A0
elif itype > 0 and jtype == 0: # ps
if itype-1 == direction:
#is = 5
abn = -2*adb/sqrt(primialpha)*(1-2*adb*del1*del1)/A0
else:
#is = 4
del2 = ri[itype-1]-rj[itype-1]
abn = 4*adb*adb*del1*del2/sqrt(primialpha)/A0
elif itype == jtype:
if direction == itype-1:
#is = 9 (p|p)
abn=-8*adb*adb*del1/sqrt(apb)*(1.5-adb*del1*del1)/A0
else:
#is = 8 (p'|p')
del2 = ri[jtype-1]-rj[jtype-1]
abn=-8*pow(adb,2)*del1/sqrt(apb)*(0.5-adb*del2*del2)/A0
elif (direction != itype-1) and (direction != jtype-1):
#is = 7(p'|p")
del2 = ri[itype-1] - rj[itype-1]
del3 = ri[jtype-1] - rj[jtype-1]
abn=8*pow(adb,3)*del1*del2*del3/sqrt(apb)/A0
else:
#is = 6 (p|p') or (p'|p)
del2 = ri[itype+jtype-direction-2]-rj[itype+jtype-direction-2]
abn=-4*adb*adb*del2/sqrt(apb)*(1-2*adb*del1*del1)/A0
SS = pow(2*sqrt(apb)/amb,1.5)*exp(-adr)*abn
DS += SS*primi.coef*primj.coef
return DS
def test_olap():
# Test function to compare results of my CGBF overlap routines to those
# of mopacs. The issue is that the derivative gives different results.
from math import sin,cos
from copy import deepcopy
delta = 0.001
for theta in [0.,10.,20.,30.,45.,55.214134,90.]:
at1 = (1,(0,0,0))
at2 = (6,(cos(theta),sin(theta),0.1))
atoms = initialize([at1,at2])
bfi = atoms[0].basis[0]
bfj = atoms[1].basis[2]
dSijx = mopac_doverlap(bfi,bfj,0)
dSijy = mopac_doverlap(bfi,bfj,1)
dSijz = mopac_doverlap(bfi,bfj,2)
dSijx2 = -bfi.cgbf.doverlap(bfj.cgbf,0)/bohr2ang
dSijy2 = -bfi.cgbf.doverlap(bfj.cgbf,1)/bohr2ang
dSijz2 = -bfi.cgbf.doverlap(bfj.cgbf,2)/bohr2ang
dSijx4 = -bfi.cgbf.doverlap_num(bfj.cgbf,0)/bohr2ang
dSijy4 = -bfi.cgbf.doverlap_num(bfj.cgbf,1)/bohr2ang
dSijz4 = -bfi.cgbf.doverlap_num(bfj.cgbf,2)/bohr2ang
print "%2d %6.3f %6.3f %6.3f %6.3f %6.3f %6.3f " %\
(theta,dSijx,dSijy,dSijz,dSijx2,dSijy2,dSijz2)
return
def write_mopac_input(atoms,fname=None):
from PyQuante.Element import symbol
from PyQuante.Constants import bohr2ang
if not fname: fname = atoms.name + ".dat"
lines = ['MINDO3',atoms.name,'Input file written by PyQuante']
for atom in atoms:
atno = atom.atno
sym = symbol[atno]
x,y,z = [bohr2ang*i for i in atom.r]
lines.append('%s %10.4f 0 %10.4f 0 %10.4f 0'
% (sym,x,y,z))
open(fname,'w').write('\n'.join(lines))
return
if __name__ == '__main__':
from Molecule import Molecule
h2o = Molecule('H2O',atomlist=[(8,(0,0,0)),(1,(1.,0,0)),(1,(0,1.,0))])
oh = Molecule('OH',atomlist=[(8,(0,0,0)),(1,(1.,0,0))])
ch4 = Molecule('Methane', atomlist =
[(6,(0,0,0)),(1,(1.,0,0)),(1,(0,1.,0)),
(1,(0,0,1.)),(1,(0,0,-1.))])
print scf(h2o)
print scf(oh)
print scf(ch4)
#E,F = get_energy_forces(ch4)
#for Fi in F: print Fi
#import profile,pstats
#profile.run('get_energy_forces(ch4)','prof')
#prof = pstats.Stats('prof')
#prof.strip_dirs().sort_stats('time').print_stats(15)
#test_olap()
| bsd-3-clause | 7,576,133,613,060,574,000 | 33.351582 | 79 | 0.515458 | false |
hoh/Billabong | billabong/storage/__init__.py | 1 | 1393 | # Copyright (c) 2015 "Hugo Herter http://hugoherter.com"
#
# This file is part of Billabong.
#
# Intercom is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Blob Storage."""
from .abstract import Storage
from .folder import FolderStorage
from .http import HTTPStorage
from .ssh import SSHStorage
assert Storage
assert FolderStorage
assert HTTPStorage
assert SSHStorage
def load_store(store_settings):
"""Instanciate a storage instance from settings."""
type_ = store_settings['type']
args = store_settings.get('args', {})
if type_ == 'FolderStorage':
return FolderStorage(**args)
elif type_ == 'HTTPStorage':
return HTTPStorage(**args)
elif type_ == 'SSHStorage':
return SSHStorage(**args)
else:
raise ValueError("Unknown type", type_)
| agpl-3.0 | -8,488,347,488,452,236,000 | 30.659091 | 74 | 0.723618 | false |
dariocorral/panoanda | panoanda/candles.py | 1 | 3916 | """
Created on Sun Sep 17 09:27:31 2017
@author: dariocorral
"""
import pandas as pd
from datetime import timedelta
from tickers import Tickers
from hourOffset import Hour
class Candles(object):
"""
OANDA Historical Rates ready to use with Pandas
"""
def __init__(self):
self.__hour = Hour()
self.__tickers = Tickers()
def dataframe(self,periods, granularity,sundayCandle, *ticker):
"""
OANDA Historical Rates
:param periods: number of periods
:type: int
:paramm granularity: OANDA timeframes:
“S5” - 5 seconds
“S10” - 10 seconds
“S15” - 15 seconds
“S30” - 30 seconds
“M1” - 1 minute
“M2” - 2 minutes
“M3” - 3 minutes
“M4” - 4 minutes
“M5” - 5 minutes
“M10” - 10 minutes
“M15” - 15 minutes
“M30” - 30 minutes
“H1” - 1 hour
“H2” - 2 hours
“H3” - 3 hours
“H4” - 4 hours
“H6” - 6 hours
“H8” - 8 hours
“H12” - 12 hours
“D” - 1 Day
“W” - 1 Week
“M” - 1 Month
:type: string
:param sundayCandle: True -> Sunday Candles included
False -> No Sunday Candles
:type : bool
:param ticker: required instruments format OANDA API
:type: str, tuple or list
:return: dataFrame object
"""
#Define empty dataframe
df = pd.DataFrame()
for instr in ticker:
histRates = self.__tickers._oanda_api.get_history(count =
int(periods * 1.2), instrument= instr,
candleFormat = 'midpoint',granularity= granularity,
dailyAlignment= (self.__hour.hour_offset_calculate(
6 ,
self.__hour.offset_NY_GMT)),
weeklyAlignment='Monday')
#From dict to dataframe
histRates = histRates.get('candles')
histRates = pd.DataFrame.from_dict(histRates)
histRates['ticker'] = instr
histRates['time'] = pd.to_datetime(histRates['time'])
#Apply GMT_hours_offset to local time
histRates['time'] += timedelta(hours =
self.__hour.offset_local_GMT)
histRates.set_index ('time', inplace = True)
#Sunday candle filter
if sundayCandle == False:
histRates['Weekday'] = histRates.index.weekday
histRates = histRates.loc[histRates['Weekday'] != 6]
histRates = histRates.tail(periods)
else:
histRates = histRates.tail(periods)
#Daily and weekly granularity in date format without hours
if granularity == 'D' or granularity == 'W':
histRates.index = histRates.index.date
#Columns definition
histRates= histRates[['ticker','openMid','highMid','lowMid',
'closeMid','volume','complete']]
histRates.columns = ('ticker','open','high','low','close','volume',
'complete')
df = df.append(histRates)
return df
| mit | 5,409,763,454,632,025,000 | 32.008621 | 81 | 0.439394 | false |
JioCloud/cinder | cinder/volume/drivers/drbdmanagedrv.py | 1 | 19926 | # Copyright (c) 2014 LINBIT HA Solutions GmbH
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
This driver connects Cinder to an installed DRBDmanage instance, see
http://oss.linbit.com/drbdmanage/
http://git.linbit.com/drbdmanage.git/
for more details.
"""
import six
import uuid
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import importutils
from oslo_utils import units
from cinder import exception
from cinder.i18n import _, _LW, _LI
from cinder.volume import driver
try:
import dbus
import drbdmanage.consts as dm_const
import drbdmanage.exceptions as dm_exc
import drbdmanage.utils as dm_utils
except ImportError:
dbus = None
dm_const = None
dm_exc = None
dm_utils = None
LOG = logging.getLogger(__name__)
drbd_opts = [
cfg.StrOpt('drbdmanage_redundancy',
default='1',
help='Number of nodes that should replicate the data.'),
# TODO(PM): offsite_redundancy?
# TODO(PM): choose DRBDmanage storage pool?
]
CONF = cfg.CONF
CONF.register_opts(drbd_opts)
AUX_PROP_CINDER_VOL_ID = "cinder-id"
DM_VN_PREFIX = 'CV_' # sadly 2CV isn't allowed by DRBDmanage
DM_SN_PREFIX = 'SN_'
class DrbdManageDriver(driver.VolumeDriver):
"""Cinder driver that uses DRBDmanage for storage."""
VERSION = '1.0.0'
drbdmanage_dbus_name = 'org.drbd.drbdmanaged'
drbdmanage_dbus_interface = '/interface'
def __init__(self, *args, **kwargs):
self.empty_list = dbus.Array([], signature="a(s)")
self.empty_dict = dbus.Array([], signature="a(ss)")
super(DrbdManageDriver, self).__init__(*args, **kwargs)
self.configuration.append_config_values(drbd_opts)
if not self.drbdmanage_dbus_name:
self.drbdmanage_dbus_name = 'org.drbd.drbdmanaged'
if not self.drbdmanage_dbus_interface:
self.drbdmanage_dbus_interface = '/interface'
self.drbdmanage_redundancy = int(getattr(self.configuration,
'drbdmanage_redundancy', 1))
self.dm_control_vol = ".drbdctrl"
# Copied from the LVM driver, see
# I43190d1dac33748fe55fa00f260f32ab209be656
target_driver = self.target_mapping[
self.configuration.safe_get('iscsi_helper')]
LOG.debug('Attempting to initialize DRBD driver with the '
'following target_driver: %s',
target_driver)
self.target_driver = importutils.import_object(
target_driver,
configuration=self.configuration,
db=self.db,
executor=self._execute)
def dbus_connect(self):
self.odm = dbus.SystemBus().get_object(self.drbdmanage_dbus_name,
self.drbdmanage_dbus_interface)
self.odm.ping()
def call_or_reconnect(self, fn, *args):
"""Call DBUS function; on a disconnect try once to reconnect."""
try:
return fn(*args)
except dbus.DBusException as e:
LOG.warning(_LW("Got disconnected; trying to reconnect. (%s)"), e)
self.dbus_connect()
# Old function object is invalid, get new one.
return getattr(self.odm, fn._method_name)(*args)
def do_setup(self, context):
"""Any initialization the volume driver does while starting."""
super(DrbdManageDriver, self).do_setup(context)
self.dbus_connect()
def check_for_setup_error(self):
"""Verify that requirements are in place to use DRBDmanage driver."""
if not all((dbus, dm_exc, dm_const, dm_utils)):
msg = _('DRBDmanage driver setup error: some required '
'libraries (dbus, drbdmanage.*) not found.')
LOG.error(msg)
raise exception.VolumeDriverException(message=msg)
if self.odm.ping() != 0:
message = _('Cannot ping DRBDmanage backend')
raise exception.VolumeBackendAPIException(data=message)
def _clean_uuid(self):
"""Returns a UUID string, WITHOUT braces."""
# Some uuid library versions put braces around the result!?
# We don't want them, just a plain [0-9a-f-]+ string.
id = str(uuid.uuid4())
id = id.replace("{", "")
id = id.replace("}", "")
return id
def _check_result(self, res, ignore=None, ret=0):
seen_success = False
seen_error = False
result = ret
for (code, fmt, arg_l) in res:
# convert from DBUS to Python
arg = dict(arg_l)
if ignore and code in ignore:
if not result:
result = code
continue
if code == dm_exc.DM_SUCCESS:
seen_success = True
continue
seen_error = _("Received error string: %s") % (fmt % arg)
if seen_error:
raise exception.VolumeBackendAPIException(data=seen_error)
if seen_success:
return ret
# by default okay - or the ignored error code.
return ret
# DRBDmanage works in kiB units; Cinder uses GiB.
def _vol_size_to_dm(self, size):
return int(size * units.Gi / units.Ki)
def _vol_size_to_cinder(self, size):
return int(size * units.Ki / units.Gi)
def is_clean_volume_name(self, name, prefix):
try:
if (name.startswith(CONF.volume_name_template % "") and
uuid.UUID(name[7:]) is not None):
return prefix + name[7:]
except ValueError:
return None
try:
if uuid.UUID(name) is not None:
return prefix + name
except ValueError:
return None
def _priv_hash_from_volume(self, volume):
return dm_utils.dict_to_aux_props({
AUX_PROP_CINDER_VOL_ID: volume['id'],
})
def snapshot_name_from_cinder_snapshot(self, snapshot):
sn_name = self.is_clean_volume_name(snapshot['id'], DM_SN_PREFIX)
return sn_name
def _res_and_vl_data_for_volume(self, volume, empty_ok=False):
"""Find DRBD resource and volume ID.
A DRBD resource might consist of several "volumes"
(think consistency groups).
So we have to find the number of the volume within one resource.
Returns resource name, volume number, and resource
and volume properties.
"""
# If we get a string, use it as-is.
# Else it's a dictionary; then get the ID.
if isinstance(volume, six.string_types):
v_uuid = volume
else:
v_uuid = volume['id']
res, rl = self.call_or_reconnect(self.odm.list_volumes,
self.empty_dict,
0,
dm_utils.dict_to_aux_props(
{AUX_PROP_CINDER_VOL_ID: v_uuid}),
self.empty_dict)
self._check_result(res)
if (not rl) or (len(rl) == 0):
if empty_ok:
LOG.debug("No volume %s found.", v_uuid)
return None, None, None, None
raise exception.VolumeBackendAPIException(
data=_("volume %s not found in drbdmanage") % v_uuid)
if len(rl) > 1:
raise exception.VolumeBackendAPIException(
data=_("multiple resources with name %s found by drbdmanage") %
v_uuid)
(r_name, r_props, vols) = rl[0]
if len(vols) != 1:
raise exception.VolumeBackendAPIException(
data=_("not exactly one volume with id %s") %
v_uuid)
(v_nr, v_props) = vols[0]
LOG.debug("volume %(uuid)s is %(res)s/%(nr)d; %(rprop)s, %(vprop)s",
{'uuid': v_uuid, 'res': r_name, 'nr': v_nr,
'rprop': r_props, 'vprop': v_props})
return r_name, v_nr, r_props, v_props
def _resource_and_snap_data_from_snapshot(self, snapshot, empty_ok=False):
"""Find DRBD resource and snapshot name from the snapshot ID."""
s_uuid = snapshot['id']
res, rs = self.call_or_reconnect(self.odm.list_snapshots,
self.empty_dict,
self.empty_dict,
0,
dm_utils.dict_to_aux_props(
{AUX_PROP_CINDER_VOL_ID: s_uuid}),
self.empty_dict)
self._check_result(res)
if (not rs) or (len(rs) == 0):
if empty_ok:
return None
else:
raise exception.VolumeBackendAPIException(
data=_("no snapshot with id %s found in drbdmanage") %
s_uuid)
if len(rs) > 1:
raise exception.VolumeBackendAPIException(
data=_("multiple resources with snapshot ID %s found") %
s_uuid)
(r_name, snaps) = rs[0]
if len(snaps) != 1:
raise exception.VolumeBackendAPIException(
data=_("not exactly one snapshot with id %s") % s_uuid)
(s_name, s_props) = snaps[0]
LOG.debug("snapshot %(uuid)s is %(res)s/%(snap)s",
{'uuid': s_uuid, 'res': r_name, 'snap': s_name})
return r_name, s_name, s_props
def _resource_name_volnr_for_volume(self, volume, empty_ok=False):
res, vol, _, _ = self._res_and_vl_data_for_volume(volume, empty_ok)
return res, vol
def local_path(self, volume):
dres, dvol = self._resource_name_volnr_for_volume(volume)
res, data = self.call_or_reconnect(self.odm.text_query,
[dm_const.TQ_GET_PATH,
dres,
str(dvol)])
self._check_result(res)
if len(data) == 1:
return data[0]
message = _('Got bad path information from DRBDmanage! (%s)') % data
raise exception.VolumeBackendAPIException(data=message)
def create_volume(self, volume):
"""Creates a DRBD resource.
We address it later on via the ID that gets stored
as a private property.
"""
# TODO(PM): consistency groups
dres = self.is_clean_volume_name(volume['id'], DM_VN_PREFIX)
res = self.call_or_reconnect(self.odm.create_resource,
dres,
self.empty_dict)
self._check_result(res, ignore=[dm_exc.DM_EEXIST], ret=None)
# If we get DM_EEXIST, then the volume already exists, eg. because
# deploy gave an error on a previous try (like ENOSPC).
# Still, there might or might not be the volume in the resource -
# we have to check that explicitly.
(_, drbd_vol) = self._resource_name_volnr_for_volume(volume,
empty_ok=True)
if not drbd_vol:
props = self._priv_hash_from_volume(volume)
# TODO(PM): properties - redundancy, etc
res = self.call_or_reconnect(self.odm.create_volume,
dres,
self._vol_size_to_dm(volume['size']),
props)
self._check_result(res)
# If we crashed between create_volume and the deploy call,
# the volume might be defined but not exist on any server. Oh my.
res = self.call_or_reconnect(self.odm.auto_deploy,
dres, self.drbdmanage_redundancy,
0, True)
self._check_result(res)
return 0
def delete_volume(self, volume):
"""Deletes a resource."""
dres, dvol = self._resource_name_volnr_for_volume(
volume,
empty_ok=True)
if not dres:
# OK, already gone.
return True
# TODO(PM): check if in use? Ask whether Primary, or just check result?
res = self.call_or_reconnect(self.odm.remove_volume, dres, dvol, False)
self._check_result(res, ignore=[dm_exc.DM_ENOENT])
res, rl = self.call_or_reconnect(self.odm.list_volumes,
[dres],
0,
self.empty_dict,
self.empty_list)
self._check_result(res)
# We expect the _resource_ to be here still (we just got a volnr from
# it!), so just query the volumes.
# If the resource has no volumes anymore, the current DRBDmanage
# version (errorneously, IMO) returns no *resource*, too.
if len(rl) > 1:
message = _('DRBDmanage expected one resource ("%(res)s"), '
'got %(n)d') % {'res': dres, 'n': len(rl)}
raise exception.VolumeBackendAPIException(data=message)
# Delete resource, if empty
if (not rl) or (not rl[0]) or (len(rl[0][2]) == 0):
res = self.call_or_reconnect(self.odm.remove_resource, dres, False)
self._check_result(res, ignore=[dm_exc.DM_ENOENT])
def create_volume_from_snapshot(self, volume, snapshot):
"""Creates a volume from a snapshot."""
LOG.debug("create vol from snap: from %(snap)s make %(vol)s",
{'snap': snapshot['id'], 'vol': volume['id']})
# TODO(PM): Consistency groups.
dres, sname, sprop = self._resource_and_snap_data_from_snapshot(
snapshot)
new_res = self.is_clean_volume_name(volume['id'], DM_VN_PREFIX)
r_props = self.empty_dict
# TODO(PM): consistency groups => different volume number possible
v_props = [(0, self._priv_hash_from_volume(volume))]
res = self.call_or_reconnect(self.odm.restore_snapshot,
new_res,
dres,
sname,
r_props,
v_props)
return self._check_result(res, ignore=[dm_exc.DM_ENOENT])
def create_cloned_volume(self, volume, src_vref):
temp_id = self._clean_uuid()
snapshot = {'id': temp_id}
self.create_snapshot({'id': temp_id, 'volume_id': src_vref['id']})
self.create_volume_from_snapshot(volume, snapshot)
self.delete_snapshot(snapshot)
def _update_volume_stats(self):
data = {}
data["vendor_name"] = 'Open Source'
data["driver_version"] = self.VERSION
data["storage_protocol"] = self.target_driver.protocol
# This has to match the name set in the cinder volume driver spec,
# so keep it lowercase
data["volume_backend_name"] = "drbdmanage"
data["pools"] = []
res, free, total = self.call_or_reconnect(self.odm.cluster_free_query,
self.drbdmanage_redundancy)
self._check_result(res)
location_info = ('DrbdManageDriver:%(cvol)s:%(dbus)s' %
{'cvol': self.dm_control_vol,
'dbus': self.drbdmanage_dbus_name})
# TODO(PM): multiple DRBDmanage instances and/or multiple pools
single_pool = {}
single_pool.update(dict(
pool_name=data["volume_backend_name"],
free_capacity_gb=self._vol_size_to_cinder(free),
total_capacity_gb=self._vol_size_to_cinder(total),
reserved_percentage=self.configuration.reserved_percentage,
location_info=location_info,
QoS_support=False))
data["pools"].append(single_pool)
self._stats = data
def get_volume_stats(self, refresh=True):
"""Get volume status."""
self._update_volume_stats()
return self._stats
def extend_volume(self, volume, new_size):
dres, dvol = self._resource_name_volnr_for_volume(volume)
res = self.call_or_reconnect(self.odm.resize_volume,
dres, dvol, -1,
{"size": self._vol_size_to_dm(new_size)},
0)
self._check_result(res)
return 0
def create_snapshot(self, snapshot):
"""Creates a snapshot."""
sn_name = self.snapshot_name_from_cinder_snapshot(snapshot)
dres, dvol = self._resource_name_volnr_for_volume(
snapshot["volume_id"])
res, data = self.call_or_reconnect(self.odm.list_assignments,
self.empty_dict,
[dres],
0,
self.empty_dict,
self.empty_dict)
self._check_result(res)
nodes = [d[0] for d in data]
if len(nodes) < 1:
raise exception.VolumeBackendAPIException(
_('Snapshot res "%s" that is not deployed anywhere?') %
(dres))
props = self._priv_hash_from_volume(snapshot)
res = self.call_or_reconnect(self.odm.create_snapshot,
dres, sn_name, nodes, props)
self._check_result(res)
def delete_snapshot(self, snapshot):
"""Deletes a snapshot."""
dres, sname, _ = self._resource_and_snap_data_from_snapshot(
snapshot, empty_ok=True)
if not dres:
# resource already gone?
LOG.warning(_LW("snapshot: %s not found, "
"skipping delete operation"), snapshot['id'])
LOG.info(_LI('Successfully deleted snapshot: %s'), snapshot['id'])
return True
res = self.call_or_reconnect(self.odm.remove_snapshot,
dres, sname, True)
return self._check_result(res, ignore=[dm_exc.DM_ENOENT])
# ####### Interface methods for DataPath (Target Driver) ########
def ensure_export(self, context, volume):
volume_path = self.local_path(volume)
return self.target_driver.ensure_export(
context,
volume,
volume_path)
def create_export(self, context, volume):
volume_path = self.local_path(volume)
export_info = self.target_driver.create_export(
context,
volume,
volume_path)
return {'provider_location': export_info['location'],
'provider_auth': export_info['auth'], }
def remove_export(self, context, volume):
return self.target_driver.remove_export(context, volume)
def initialize_connection(self, volume, connector):
return self.target_driver.initialize_connection(volume, connector)
def validate_connector(self, connector):
return self.target_driver.validate_connector(connector)
def terminate_connection(self, volume, connector, **kwargs):
return None
| apache-2.0 | -4,814,244,151,652,230,000 | 36.596226 | 79 | 0.543812 | false |
lylejohnson/FXPy | src/controls.py | 1 | 313359 | # This file was created automatically by SWIG.
import controlsc
from misc import *
from windows import *
from containers import *
import fox
class FX_LabelPtr(FX_FramePtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_onPaint,(self,) + _args, _kwargs)
return val
def onHotKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_onHotKeyPress,(self,) + _args, _kwargs)
return val
def onHotKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_onHotKeyRelease,(self,) + _args, _kwargs)
return val
def onCmdGetStringValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_onCmdGetStringValue,(self,) + _args, _kwargs)
return val
def onCmdSetStringValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_onCmdSetStringValue,(self,) + _args, _kwargs)
return val
def onQueryHelp(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_onQueryHelp,(self,) + _args, _kwargs)
return val
def onQueryTip(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_onQueryTip,(self,) + _args, _kwargs)
return val
def getText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_getText,(self,) + _args, _kwargs)
return val
def setText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_setText,(self,) + _args, _kwargs)
return val
def setIcon(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_setIcon,(self,) + _args, _kwargs)
return val
def getIcon(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_getIcon,(self,) + _args, _kwargs)
return val
def setFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_setFont,(self,) + _args, _kwargs)
return val
def getFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_getFont,(self,) + _args, _kwargs)
return val
def getTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_getTextColor,(self,) + _args, _kwargs)
return val
def setTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_setTextColor,(self,) + _args, _kwargs)
return val
def setJustify(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_setJustify,(self,) + _args, _kwargs)
return val
def getJustify(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_getJustify,(self,) + _args, _kwargs)
return val
def setIconPosition(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_setIconPosition,(self,) + _args, _kwargs)
return val
def getIconPosition(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_getIconPosition,(self,) + _args, _kwargs)
return val
def setHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_setHelpText,(self,) + _args, _kwargs)
return val
def getHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_getHelpText,(self,) + _args, _kwargs)
return val
def setTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_setTipText,(self,) + _args, _kwargs)
return val
def getTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Label_getTipText,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_Label instance at %s>" % (self.this,)
class FX_Label(FX_LabelPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_Label,_args,_kwargs)
self.thisown = 1
class FXLabelPtr(FX_LabelPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXLabel_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXLabel instance at %s>" % (self.this,)
class FXLabel(FXLabelPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXLabel,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_DialPtr(FX_FramePtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_onPaint,(self,) + _args, _kwargs)
return val
def onMotion(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_onMotion,(self,) + _args, _kwargs)
return val
def onMouseWheel(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_onMouseWheel,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onUngrabbed(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_onUngrabbed,(self,) + _args, _kwargs)
return val
def onCmdSetValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_onCmdSetValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_onCmdSetIntValue,(self,) + _args, _kwargs)
return val
def onCmdGetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_onCmdGetIntValue,(self,) + _args, _kwargs)
return val
def onCmdSetRealValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_onCmdSetRealValue,(self,) + _args, _kwargs)
return val
def onCmdGetRealValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_onCmdGetRealValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_onCmdSetIntRange,(self,) + _args, _kwargs)
return val
def onCmdGetIntRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_onCmdGetIntRange,(self,) + _args, _kwargs)
return val
def onCmdSetRealRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_onCmdSetRealRange,(self,) + _args, _kwargs)
return val
def onCmdGetRealRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_onCmdGetRealRange,(self,) + _args, _kwargs)
return val
def onQueryHelp(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_onQueryHelp,(self,) + _args, _kwargs)
return val
def onQueryTip(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_onQueryTip,(self,) + _args, _kwargs)
return val
def setRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_setRange,(self,) + _args, _kwargs)
return val
def getRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_getRange,(self,) + _args, _kwargs)
return val
def setValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_setValue,(self,) + _args, _kwargs)
return val
def getValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_getValue,(self,) + _args, _kwargs)
return val
def setRevolutionIncrement(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_setRevolutionIncrement,(self,) + _args, _kwargs)
return val
def getRevolutionIncrement(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_getRevolutionIncrement,(self,) + _args, _kwargs)
return val
def setNotchSpacing(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_setNotchSpacing,(self,) + _args, _kwargs)
return val
def getNotchSpacing(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_getNotchSpacing,(self,) + _args, _kwargs)
return val
def setNotchOffset(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_setNotchOffset,(self,) + _args, _kwargs)
return val
def getNotchOffset(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_getNotchOffset,(self,) + _args, _kwargs)
return val
def getDialStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_getDialStyle,(self,) + _args, _kwargs)
return val
def setDialStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_setDialStyle,(self,) + _args, _kwargs)
return val
def setHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_setHelpText,(self,) + _args, _kwargs)
return val
def getHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_getHelpText,(self,) + _args, _kwargs)
return val
def setTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_setTipText,(self,) + _args, _kwargs)
return val
def getTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Dial_getTipText,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_Dial instance at %s>" % (self.this,)
class FX_Dial(FX_DialPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_Dial,_args,_kwargs)
self.thisown = 1
class FXDialPtr(FX_DialPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXDial_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXDial instance at %s>" % (self.this,)
class FXDial(FXDialPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXDial,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_ColorWellPtr(FX_FramePtr):
ID_COLORDIALOG = controlsc.FX_ColorWell_ID_COLORDIALOG
ID_LAST = controlsc.FX_ColorWell_ID_LAST
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onPaint,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onMiddleBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onMiddleBtnPress,(self,) + _args, _kwargs)
return val
def onMiddleBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onMiddleBtnRelease,(self,) + _args, _kwargs)
return val
def onKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onKeyPress,(self,) + _args, _kwargs)
return val
def onKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onKeyRelease,(self,) + _args, _kwargs)
return val
def onUngrabbed(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onUngrabbed,(self,) + _args, _kwargs)
return val
def onMotion(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onMotion,(self,) + _args, _kwargs)
return val
def onBeginDrag(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onBeginDrag,(self,) + _args, _kwargs)
return val
def onEndDrag(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onEndDrag,(self,) + _args, _kwargs)
return val
def onDragged(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onDragged,(self,) + _args, _kwargs)
return val
def onFocusIn(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onFocusIn,(self,) + _args, _kwargs)
return val
def onFocusOut(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onFocusOut,(self,) + _args, _kwargs)
return val
def onDNDEnter(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onDNDEnter,(self,) + _args, _kwargs)
return val
def onDNDLeave(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onDNDLeave,(self,) + _args, _kwargs)
return val
def onDNDMotion(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onDNDMotion,(self,) + _args, _kwargs)
return val
def onDNDDrop(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onDNDDrop,(self,) + _args, _kwargs)
return val
def onDNDRequest(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onDNDRequest,(self,) + _args, _kwargs)
return val
def onSelectionLost(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onSelectionLost,(self,) + _args, _kwargs)
return val
def onSelectionGained(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onSelectionGained,(self,) + _args, _kwargs)
return val
def onSelectionRequest(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onSelectionRequest,(self,) + _args, _kwargs)
return val
def onClicked(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onClicked,(self,) + _args, _kwargs)
return val
def onDoubleClicked(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onDoubleClicked,(self,) + _args, _kwargs)
return val
def onTripleClicked(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onTripleClicked,(self,) + _args, _kwargs)
return val
def onQueryHelp(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onQueryHelp,(self,) + _args, _kwargs)
return val
def onQueryTip(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onQueryTip,(self,) + _args, _kwargs)
return val
def onCmdSetValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onCmdSetValue,(self,) + _args, _kwargs)
return val
def onCmdColorWell(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onCmdColorWell,(self,) + _args, _kwargs)
return val
def onChgColorWell(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onChgColorWell,(self,) + _args, _kwargs)
return val
def onCmdSetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onCmdSetIntValue,(self,) + _args, _kwargs)
return val
def onCmdGetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_onCmdGetIntValue,(self,) + _args, _kwargs)
return val
def setRGBA(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_setRGBA,(self,) + _args, _kwargs)
return val
def getRGBA(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_getRGBA,(self,) + _args, _kwargs)
return val
def setHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_setHelpText,(self,) + _args, _kwargs)
return val
def getHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_getHelpText,(self,) + _args, _kwargs)
return val
def setTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_setTipText,(self,) + _args, _kwargs)
return val
def getTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_getTipText,(self,) + _args, _kwargs)
return val
def isOpaqueOnly(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_isOpaqueOnly,(self,) + _args, _kwargs)
return val
def setOpaqueOnly(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWell_setOpaqueOnly,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_ColorWell instance at %s>" % (self.this,)
class FX_ColorWell(FX_ColorWellPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_ColorWell,_args,_kwargs)
self.thisown = 1
class FXColorWellPtr(FX_ColorWellPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWell_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXColorWell instance at %s>" % (self.this,)
class FXColorWell(FXColorWellPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXColorWell,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_TextFieldPtr(FX_FramePtr):
ID_CURSOR_HOME = controlsc.FX_TextField_ID_CURSOR_HOME
ID_CURSOR_END = controlsc.FX_TextField_ID_CURSOR_END
ID_CURSOR_RIGHT = controlsc.FX_TextField_ID_CURSOR_RIGHT
ID_CURSOR_LEFT = controlsc.FX_TextField_ID_CURSOR_LEFT
ID_MARK = controlsc.FX_TextField_ID_MARK
ID_EXTEND = controlsc.FX_TextField_ID_EXTEND
ID_SELECT_ALL = controlsc.FX_TextField_ID_SELECT_ALL
ID_DESELECT_ALL = controlsc.FX_TextField_ID_DESELECT_ALL
ID_CUT_SEL = controlsc.FX_TextField_ID_CUT_SEL
ID_COPY_SEL = controlsc.FX_TextField_ID_COPY_SEL
ID_PASTE_SEL = controlsc.FX_TextField_ID_PASTE_SEL
ID_DELETE_SEL = controlsc.FX_TextField_ID_DELETE_SEL
ID_OVERST_STRING = controlsc.FX_TextField_ID_OVERST_STRING
ID_INSERT_STRING = controlsc.FX_TextField_ID_INSERT_STRING
ID_BACKSPACE = controlsc.FX_TextField_ID_BACKSPACE
ID_DELETE = controlsc.FX_TextField_ID_DELETE
ID_TOGGLE_EDITABLE = controlsc.FX_TextField_ID_TOGGLE_EDITABLE
ID_TOGGLE_OVERSTRIKE = controlsc.FX_TextField_ID_TOGGLE_OVERSTRIKE
ID_BLINK = controlsc.FX_TextField_ID_BLINK
ID_LAST = controlsc.FX_TextField_ID_LAST
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onPaint,(self,) + _args, _kwargs)
return val
def onKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onKeyPress,(self,) + _args, _kwargs)
return val
def onKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onKeyRelease,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onMiddleBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onMiddleBtnPress,(self,) + _args, _kwargs)
return val
def onMiddleBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onMiddleBtnRelease,(self,) + _args, _kwargs)
return val
def onVerify(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onVerify,(self,) + _args, _kwargs)
return val
def onMotion(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onMotion,(self,) + _args, _kwargs)
return val
def onSelectionLost(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onSelectionLost,(self,) + _args, _kwargs)
return val
def onSelectionGained(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onSelectionGained,(self,) + _args, _kwargs)
return val
def onSelectionRequest(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onSelectionRequest,(self,) + _args, _kwargs)
return val
def onClipboardLost(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onClipboardLost,(self,) + _args, _kwargs)
return val
def onClipboardGained(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onClipboardGained,(self,) + _args, _kwargs)
return val
def onClipboardRequest(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onClipboardRequest,(self,) + _args, _kwargs)
return val
def onFocusSelf(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onFocusSelf,(self,) + _args, _kwargs)
return val
def onFocusIn(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onFocusIn,(self,) + _args, _kwargs)
return val
def onFocusOut(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onFocusOut,(self,) + _args, _kwargs)
return val
def onBlink(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onBlink,(self,) + _args, _kwargs)
return val
def onAutoScroll(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onAutoScroll,(self,) + _args, _kwargs)
return val
def onQueryHelp(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onQueryHelp,(self,) + _args, _kwargs)
return val
def onQueryTip(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onQueryTip,(self,) + _args, _kwargs)
return val
def onCmdSetValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdSetValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdSetIntValue,(self,) + _args, _kwargs)
return val
def onCmdSetRealValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdSetRealValue,(self,) + _args, _kwargs)
return val
def onCmdSetStringValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdSetStringValue,(self,) + _args, _kwargs)
return val
def onCmdGetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdGetIntValue,(self,) + _args, _kwargs)
return val
def onCmdGetRealValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdGetRealValue,(self,) + _args, _kwargs)
return val
def onCmdGetStringValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdGetStringValue,(self,) + _args, _kwargs)
return val
def onCmdCursorHome(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdCursorHome,(self,) + _args, _kwargs)
return val
def onCmdCursorEnd(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdCursorEnd,(self,) + _args, _kwargs)
return val
def onCmdCursorRight(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdCursorRight,(self,) + _args, _kwargs)
return val
def onCmdCursorLeft(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdCursorLeft,(self,) + _args, _kwargs)
return val
def onCmdMark(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdMark,(self,) + _args, _kwargs)
return val
def onCmdExtend(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdExtend,(self,) + _args, _kwargs)
return val
def onCmdSelectAll(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdSelectAll,(self,) + _args, _kwargs)
return val
def onCmdDeselectAll(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdDeselectAll,(self,) + _args, _kwargs)
return val
def onCmdCutSel(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdCutSel,(self,) + _args, _kwargs)
return val
def onCmdCopySel(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdCopySel,(self,) + _args, _kwargs)
return val
def onCmdPasteSel(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdPasteSel,(self,) + _args, _kwargs)
return val
def onCmdDeleteSel(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdDeleteSel,(self,) + _args, _kwargs)
return val
def onCmdOverstString(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdOverstString,(self,) + _args, _kwargs)
return val
def onCmdInsertString(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdInsertString,(self,) + _args, _kwargs)
return val
def onCmdBackspace(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdBackspace,(self,) + _args, _kwargs)
return val
def onCmdDelete(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdDelete,(self,) + _args, _kwargs)
return val
def onCmdToggleEditable(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdToggleEditable,(self,) + _args, _kwargs)
return val
def onUpdToggleEditable(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onUpdToggleEditable,(self,) + _args, _kwargs)
return val
def onCmdToggleOverstrike(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onCmdToggleOverstrike,(self,) + _args, _kwargs)
return val
def onUpdToggleOverstrike(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_onUpdToggleOverstrike,(self,) + _args, _kwargs)
return val
def isEditable(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_isEditable,(self,) + _args, _kwargs)
return val
def setEditable(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_setEditable,(self,) + _args, _kwargs)
return val
def setCursorPos(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_setCursorPos,(self,) + _args, _kwargs)
return val
def getCursorPos(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_getCursorPos,(self,) + _args, _kwargs)
return val
def setAnchorPos(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_setAnchorPos,(self,) + _args, _kwargs)
return val
def getAnchorPos(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_getAnchorPos,(self,) + _args, _kwargs)
return val
def setText(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_setText,(self,) + _args, _kwargs)
return val
def getText(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_getText,(self,) + _args, _kwargs)
return val
def setFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_setFont,(self,) + _args, _kwargs)
return val
def getFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_getFont,(self,) + _args, _kwargs)
return val
def setTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_setTextColor,(self,) + _args, _kwargs)
return val
def getTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_getTextColor,(self,) + _args, _kwargs)
return val
def setSelBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_setSelBackColor,(self,) + _args, _kwargs)
return val
def getSelBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_getSelBackColor,(self,) + _args, _kwargs)
return val
def setSelTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_setSelTextColor,(self,) + _args, _kwargs)
return val
def getSelTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_getSelTextColor,(self,) + _args, _kwargs)
return val
def setNumColumns(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_setNumColumns,(self,) + _args, _kwargs)
return val
def getNumColumns(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_getNumColumns,(self,) + _args, _kwargs)
return val
def setJustify(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_setJustify,(self,) + _args, _kwargs)
return val
def getJustify(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_getJustify,(self,) + _args, _kwargs)
return val
def setHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_setHelpText,(self,) + _args, _kwargs)
return val
def getHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_getHelpText,(self,) + _args, _kwargs)
return val
def setTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_setTipText,(self,) + _args, _kwargs)
return val
def getTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_getTipText,(self,) + _args, _kwargs)
return val
def setTextStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_setTextStyle,(self,) + _args, _kwargs)
return val
def getTextStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_getTextStyle,(self,) + _args, _kwargs)
return val
def selectAll(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_selectAll,(self,) + _args, _kwargs)
return val
def setSelection(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_setSelection,(self,) + _args, _kwargs)
return val
def extendSelection(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_extendSelection,(self,) + _args, _kwargs)
return val
def killSelection(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_killSelection,(self,) + _args, _kwargs)
return val
def isPosSelected(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_isPosSelected,(self,) + _args, _kwargs)
return val
def isPosVisible(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_isPosVisible,(self,) + _args, _kwargs)
return val
def makePositionVisible(self, *_args, **_kwargs):
val = apply(controlsc.FX_TextField_makePositionVisible,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_TextField instance at %s>" % (self.this,)
class FX_TextField(FX_TextFieldPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_TextField,_args,_kwargs)
self.thisown = 1
class FXTextFieldPtr(FX_TextFieldPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXTextField_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXTextField instance at %s>" % (self.this,)
class FXTextField(FXTextFieldPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXTextField,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_ButtonPtr(FX_LabelPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onPaint,(self,) + _args, _kwargs)
return val
def onUpdate(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onUpdate,(self,) + _args, _kwargs)
return val
def onEnter(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onEnter,(self,) + _args, _kwargs)
return val
def onLeave(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onLeave,(self,) + _args, _kwargs)
return val
def onFocusIn(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onFocusIn,(self,) + _args, _kwargs)
return val
def onFocusOut(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onFocusOut,(self,) + _args, _kwargs)
return val
def onUngrabbed(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onUngrabbed,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onKeyPress,(self,) + _args, _kwargs)
return val
def onKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onKeyRelease,(self,) + _args, _kwargs)
return val
def onHotKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onHotKeyPress,(self,) + _args, _kwargs)
return val
def onHotKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onHotKeyRelease,(self,) + _args, _kwargs)
return val
def onCheck(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onCheck,(self,) + _args, _kwargs)
return val
def onUncheck(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onUncheck,(self,) + _args, _kwargs)
return val
def onCmdSetValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onCmdSetValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onCmdSetIntValue,(self,) + _args, _kwargs)
return val
def onCmdGetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_onCmdGetIntValue,(self,) + _args, _kwargs)
return val
def setState(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_setState,(self,) + _args, _kwargs)
return val
def getState(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_getState,(self,) + _args, _kwargs)
return val
def setButtonStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_setButtonStyle,(self,) + _args, _kwargs)
return val
def getButtonStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_Button_getButtonStyle,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_Button instance at %s>" % (self.this,)
class FX_Button(FX_ButtonPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_Button,_args,_kwargs)
self.thisown = 1
class FXButtonPtr(FX_ButtonPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXButton_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXButton instance at %s>" % (self.this,)
class FXButton(FXButtonPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXButton,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_ToggleButtonPtr(FX_LabelPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onPaint,(self,) + _args, _kwargs)
return val
def onUpdate(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onUpdate,(self,) + _args, _kwargs)
return val
def onEnter(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onEnter,(self,) + _args, _kwargs)
return val
def onLeave(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onLeave,(self,) + _args, _kwargs)
return val
def onFocusIn(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onFocusIn,(self,) + _args, _kwargs)
return val
def onFocusOut(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onFocusOut,(self,) + _args, _kwargs)
return val
def onUngrabbed(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onUngrabbed,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onKeyPress,(self,) + _args, _kwargs)
return val
def onKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onKeyRelease,(self,) + _args, _kwargs)
return val
def onHotKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onHotKeyPress,(self,) + _args, _kwargs)
return val
def onHotKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onHotKeyRelease,(self,) + _args, _kwargs)
return val
def onCheck(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onCheck,(self,) + _args, _kwargs)
return val
def onUncheck(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onUncheck,(self,) + _args, _kwargs)
return val
def onQueryHelp(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onQueryHelp,(self,) + _args, _kwargs)
return val
def onQueryTip(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onQueryTip,(self,) + _args, _kwargs)
return val
def onCmdSetValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onCmdSetValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onCmdSetIntValue,(self,) + _args, _kwargs)
return val
def onCmdGetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_onCmdGetIntValue,(self,) + _args, _kwargs)
return val
def setState(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_setState,(self,) + _args, _kwargs)
return val
def getState(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_getState,(self,) + _args, _kwargs)
return val
def setAltText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_setAltText,(self,) + _args, _kwargs)
return val
def getAltText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_getAltText,(self,) + _args, _kwargs)
return val
def setAltIcon(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_setAltIcon,(self,) + _args, _kwargs)
return val
def getAltIcon(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_getAltIcon,(self,) + _args, _kwargs)
return val
def setAltHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_setAltHelpText,(self,) + _args, _kwargs)
return val
def getAltHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_getAltHelpText,(self,) + _args, _kwargs)
return val
def setAltTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_setAltTipText,(self,) + _args, _kwargs)
return val
def getAltTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToggleButton_getAltTipText,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_ToggleButton instance at %s>" % (self.this,)
class FX_ToggleButton(FX_ToggleButtonPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_ToggleButton,_args,_kwargs)
self.thisown = 1
class FXToggleButtonPtr(FX_ToggleButtonPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXToggleButton_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXToggleButton instance at %s>" % (self.this,)
class FXToggleButton(FXToggleButtonPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXToggleButton,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_RadioButtonPtr(FX_LabelPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onPaint,(self,) + _args, _kwargs)
return val
def onUpdate(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onUpdate,(self,) + _args, _kwargs)
return val
def onEnter(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onEnter,(self,) + _args, _kwargs)
return val
def onLeave(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onLeave,(self,) + _args, _kwargs)
return val
def onFocusIn(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onFocusIn,(self,) + _args, _kwargs)
return val
def onFocusOut(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onFocusOut,(self,) + _args, _kwargs)
return val
def onUngrabbed(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onUngrabbed,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onKeyPress,(self,) + _args, _kwargs)
return val
def onKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onKeyRelease,(self,) + _args, _kwargs)
return val
def onHotKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onHotKeyPress,(self,) + _args, _kwargs)
return val
def onHotKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onHotKeyRelease,(self,) + _args, _kwargs)
return val
def onUncheckRadio(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onUncheckRadio,(self,) + _args, _kwargs)
return val
def onCheck(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onCheck,(self,) + _args, _kwargs)
return val
def onUncheck(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onUncheck,(self,) + _args, _kwargs)
return val
def onUnknown(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onUnknown,(self,) + _args, _kwargs)
return val
def onCmdSetValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onCmdSetValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onCmdSetIntValue,(self,) + _args, _kwargs)
return val
def onCmdGetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_onCmdGetIntValue,(self,) + _args, _kwargs)
return val
def setCheck(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_setCheck,(self,) + _args, _kwargs)
return val
def getCheck(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_getCheck,(self,) + _args, _kwargs)
return val
def setRadioButtonStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_setRadioButtonStyle,(self,) + _args, _kwargs)
return val
def getRadioButtonStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_getRadioButtonStyle,(self,) + _args, _kwargs)
return val
def getRadioColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_getRadioColor,(self,) + _args, _kwargs)
return val
def setRadioColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_RadioButton_setRadioColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_RadioButton instance at %s>" % (self.this,)
class FX_RadioButton(FX_RadioButtonPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_RadioButton,_args,_kwargs)
self.thisown = 1
class FXRadioButtonPtr(FX_RadioButtonPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXRadioButton_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXRadioButton instance at %s>" % (self.this,)
class FXRadioButton(FXRadioButtonPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXRadioButton,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_CheckButtonPtr(FX_LabelPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onPaint,(self,) + _args, _kwargs)
return val
def onUpdate(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onUpdate,(self,) + _args, _kwargs)
return val
def onEnter(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onEnter,(self,) + _args, _kwargs)
return val
def onLeave(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onLeave,(self,) + _args, _kwargs)
return val
def onFocusIn(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onFocusIn,(self,) + _args, _kwargs)
return val
def onFocusOut(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onFocusOut,(self,) + _args, _kwargs)
return val
def onUngrabbed(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onUngrabbed,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onKeyPress,(self,) + _args, _kwargs)
return val
def onKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onKeyRelease,(self,) + _args, _kwargs)
return val
def onHotKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onHotKeyPress,(self,) + _args, _kwargs)
return val
def onHotKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onHotKeyRelease,(self,) + _args, _kwargs)
return val
def onCheck(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onCheck,(self,) + _args, _kwargs)
return val
def onUncheck(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onUncheck,(self,) + _args, _kwargs)
return val
def onUnknown(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onUnknown,(self,) + _args, _kwargs)
return val
def onCmdSetValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onCmdSetValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onCmdSetIntValue,(self,) + _args, _kwargs)
return val
def onCmdGetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_onCmdGetIntValue,(self,) + _args, _kwargs)
return val
def setCheck(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_setCheck,(self,) + _args, _kwargs)
return val
def getCheck(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_getCheck,(self,) + _args, _kwargs)
return val
def getBoxColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_getBoxColor,(self,) + _args, _kwargs)
return val
def setBoxColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_setBoxColor,(self,) + _args, _kwargs)
return val
def setCheckButtonStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_setCheckButtonStyle,(self,) + _args, _kwargs)
return val
def getCheckButtonStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_CheckButton_getCheckButtonStyle,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_CheckButton instance at %s>" % (self.this,)
class FX_CheckButton(FX_CheckButtonPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_CheckButton,_args,_kwargs)
self.thisown = 1
class FXCheckButtonPtr(FX_CheckButtonPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXCheckButton_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXCheckButton instance at %s>" % (self.this,)
class FXCheckButton(FXCheckButtonPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXCheckButton,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_ArrowButtonPtr(FX_FramePtr):
ID_REPEAT = controlsc.FX_ArrowButton_ID_REPEAT
ID_LAST = controlsc.FX_ArrowButton_ID_LAST
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_onPaint,(self,) + _args, _kwargs)
return val
def onUpdate(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_onUpdate,(self,) + _args, _kwargs)
return val
def onEnter(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_onEnter,(self,) + _args, _kwargs)
return val
def onLeave(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_onLeave,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onUngrabbed(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_onUngrabbed,(self,) + _args, _kwargs)
return val
def onRepeat(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_onRepeat,(self,) + _args, _kwargs)
return val
def onKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_onKeyPress,(self,) + _args, _kwargs)
return val
def onKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_onKeyRelease,(self,) + _args, _kwargs)
return val
def onHotKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_onHotKeyPress,(self,) + _args, _kwargs)
return val
def onHotKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_onHotKeyRelease,(self,) + _args, _kwargs)
return val
def onQueryHelp(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_onQueryHelp,(self,) + _args, _kwargs)
return val
def onQueryTip(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_onQueryTip,(self,) + _args, _kwargs)
return val
def setState(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_setState,(self,) + _args, _kwargs)
return val
def getState(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_getState,(self,) + _args, _kwargs)
return val
def setHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_setHelpText,(self,) + _args, _kwargs)
return val
def getHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_getHelpText,(self,) + _args, _kwargs)
return val
def setTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_setTipText,(self,) + _args, _kwargs)
return val
def getTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_getTipText,(self,) + _args, _kwargs)
return val
def setArrowStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_setArrowStyle,(self,) + _args, _kwargs)
return val
def getArrowStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_getArrowStyle,(self,) + _args, _kwargs)
return val
def setArrowSize(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_setArrowSize,(self,) + _args, _kwargs)
return val
def getArrowSize(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_getArrowSize,(self,) + _args, _kwargs)
return val
def setJustify(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_setJustify,(self,) + _args, _kwargs)
return val
def getJustify(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_getJustify,(self,) + _args, _kwargs)
return val
def getArrowColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_getArrowColor,(self,) + _args, _kwargs)
return val
def setArrowColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ArrowButton_setArrowColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_ArrowButton instance at %s>" % (self.this,)
class FX_ArrowButton(FX_ArrowButtonPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_ArrowButton,_args,_kwargs)
self.thisown = 1
class FXArrowButtonPtr(FX_ArrowButtonPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXArrowButton_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXArrowButton instance at %s>" % (self.this,)
class FXArrowButton(FXArrowButtonPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXArrowButton,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_PickerPtr(FX_ButtonPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onMotion(self, *_args, **_kwargs):
val = apply(controlsc.FX_Picker_onMotion,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_Picker_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_Picker_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onEnter(self, *_args, **_kwargs):
val = apply(controlsc.FX_Picker_onEnter,(self,) + _args, _kwargs)
return val
def onLeave(self, *_args, **_kwargs):
val = apply(controlsc.FX_Picker_onLeave,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_Picker instance at %s>" % (self.this,)
class FX_Picker(FX_PickerPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_Picker,_args,_kwargs)
self.thisown = 1
class FXPickerPtr(FX_PickerPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXPicker_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXPicker instance at %s>" % (self.this,)
class FXPicker(FXPickerPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXPicker,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_SpinnerPtr(FX_PackerPtr):
ID_INCREMENT = controlsc.FX_Spinner_ID_INCREMENT
ID_DECREMENT = controlsc.FX_Spinner_ID_DECREMENT
ID_ENTRY = controlsc.FX_Spinner_ID_ENTRY
ID_LAST = controlsc.FX_Spinner_ID_LAST
def __init__(self,this):
self.this = this
self.thisown = 0
def onUpdIncrement(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_onUpdIncrement,(self,) + _args, _kwargs)
return val
def onCmdIncrement(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_onCmdIncrement,(self,) + _args, _kwargs)
return val
def onUpdDecrement(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_onUpdDecrement,(self,) + _args, _kwargs)
return val
def onCmdDecrement(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_onCmdDecrement,(self,) + _args, _kwargs)
return val
def onCmdEntry(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_onCmdEntry,(self,) + _args, _kwargs)
return val
def onChgEntry(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_onChgEntry,(self,) + _args, _kwargs)
return val
def onKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_onKeyPress,(self,) + _args, _kwargs)
return val
def onKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_onKeyRelease,(self,) + _args, _kwargs)
return val
def onCmdSetValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_onCmdSetValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_onCmdSetIntValue,(self,) + _args, _kwargs)
return val
def onCmdGetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_onCmdGetIntValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_onCmdSetIntRange,(self,) + _args, _kwargs)
return val
def onCmdGetIntRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_onCmdGetIntRange,(self,) + _args, _kwargs)
return val
def increment(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_increment,(self,) + _args, _kwargs)
return val
def decrement(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_decrement,(self,) + _args, _kwargs)
return val
def isCyclic(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_isCyclic,(self,) + _args, _kwargs)
return val
def setCyclic(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_setCyclic,(self,) + _args, _kwargs)
return val
def isTextVisible(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_isTextVisible,(self,) + _args, _kwargs)
return val
def setTextVisible(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_setTextVisible,(self,) + _args, _kwargs)
return val
def setValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_setValue,(self,) + _args, _kwargs)
return val
def getValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_getValue,(self,) + _args, _kwargs)
return val
def setRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_setRange,(self,) + _args, _kwargs)
return val
def getRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_getRange,(self,) + _args, _kwargs)
return val
def setIncrement(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_setIncrement,(self,) + _args, _kwargs)
return val
def getIncrement(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_getIncrement,(self,) + _args, _kwargs)
return val
def setFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_setFont,(self,) + _args, _kwargs)
return val
def getFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_getFont,(self,) + _args, _kwargs)
return val
def setHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_setHelpText,(self,) + _args, _kwargs)
return val
def getHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_getHelpText,(self,) + _args, _kwargs)
return val
def setTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_setTipText,(self,) + _args, _kwargs)
return val
def getTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_getTipText,(self,) + _args, _kwargs)
return val
def setSpinnerStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_setSpinnerStyle,(self,) + _args, _kwargs)
return val
def getSpinnerStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_getSpinnerStyle,(self,) + _args, _kwargs)
return val
def setEditable(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_setEditable,(self,) + _args, _kwargs)
return val
def isEditable(self, *_args, **_kwargs):
val = apply(controlsc.FX_Spinner_isEditable,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_Spinner instance at %s>" % (self.this,)
class FX_Spinner(FX_SpinnerPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_Spinner,_args,_kwargs)
self.thisown = 1
class FXSpinnerPtr(FX_SpinnerPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXSpinner_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXSpinner instance at %s>" % (self.this,)
class FXSpinner(FXSpinnerPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXSpinner,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_TooltipPtr(FX_ShellPtr):
ID_TIP_SHOW = controlsc.FX_Tooltip_ID_TIP_SHOW
ID_TIP_HIDE = controlsc.FX_Tooltip_ID_TIP_HIDE
ID_LAST = controlsc.FX_Tooltip_ID_LAST
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_Tooltip_onPaint,(self,) + _args, _kwargs)
return val
def onUpdate(self, *_args, **_kwargs):
val = apply(controlsc.FX_Tooltip_onUpdate,(self,) + _args, _kwargs)
return val
def onTipShow(self, *_args, **_kwargs):
val = apply(controlsc.FX_Tooltip_onTipShow,(self,) + _args, _kwargs)
return val
def onTipHide(self, *_args, **_kwargs):
val = apply(controlsc.FX_Tooltip_onTipHide,(self,) + _args, _kwargs)
return val
def onCmdGetStringValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Tooltip_onCmdGetStringValue,(self,) + _args, _kwargs)
return val
def onCmdSetStringValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Tooltip_onCmdSetStringValue,(self,) + _args, _kwargs)
return val
def setText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Tooltip_setText,(self,) + _args, _kwargs)
return val
def getText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Tooltip_getText,(self,) + _args, _kwargs)
return val
def setFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_Tooltip_setFont,(self,) + _args, _kwargs)
return val
def getFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_Tooltip_getFont,(self,) + _args, _kwargs)
return val
def getTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Tooltip_getTextColor,(self,) + _args, _kwargs)
return val
def setTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Tooltip_setTextColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_Tooltip instance at %s>" % (self.this,)
class FX_Tooltip(FX_TooltipPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_Tooltip,_args,_kwargs)
self.thisown = 1
class FXTooltipPtr(FX_TooltipPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXTooltip_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXTooltip instance at %s>" % (self.this,)
class FXTooltip(FXTooltipPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXTooltip,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_OptionPtr(FX_LabelPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_Option_onPaint,(self,) + _args, _kwargs)
return val
def onEnter(self, *_args, **_kwargs):
val = apply(controlsc.FX_Option_onEnter,(self,) + _args, _kwargs)
return val
def onLeave(self, *_args, **_kwargs):
val = apply(controlsc.FX_Option_onLeave,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_Option_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_Option_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_Option_onKeyPress,(self,) + _args, _kwargs)
return val
def onKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_Option_onKeyRelease,(self,) + _args, _kwargs)
return val
def onHotKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_Option_onHotKeyPress,(self,) + _args, _kwargs)
return val
def onHotKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_Option_onHotKeyRelease,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_Option instance at %s>" % (self.this,)
class FX_Option(FX_OptionPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_Option,_args,_kwargs)
self.thisown = 1
class FXOptionPtr(FX_OptionPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXOption_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXOption instance at %s>" % (self.this,)
class FXOption(FXOptionPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXOption,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_OptionMenuPtr(FX_LabelPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_onPaint,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onFocusIn(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_onFocusIn,(self,) + _args, _kwargs)
return val
def onFocusOut(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_onFocusOut,(self,) + _args, _kwargs)
return val
def onMotion(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_onMotion,(self,) + _args, _kwargs)
return val
def onKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_onKeyPress,(self,) + _args, _kwargs)
return val
def onKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_onKeyRelease,(self,) + _args, _kwargs)
return val
def onCmdPost(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_onCmdPost,(self,) + _args, _kwargs)
return val
def onCmdUnpost(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_onCmdUnpost,(self,) + _args, _kwargs)
return val
def onQueryHelp(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_onQueryHelp,(self,) + _args, _kwargs)
return val
def onQueryTip(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_onQueryTip,(self,) + _args, _kwargs)
return val
def onCmdSetValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_onCmdSetValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_onCmdSetIntValue,(self,) + _args, _kwargs)
return val
def onCmdGetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_onCmdGetIntValue,(self,) + _args, _kwargs)
return val
def setCurrent(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_setCurrent,(self,) + _args, _kwargs)
return val
def getCurrent(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_getCurrent,(self,) + _args, _kwargs)
return val
def setCurrentNo(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_setCurrentNo,(self,) + _args, _kwargs)
return val
def getCurrentNo(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_getCurrentNo,(self,) + _args, _kwargs)
return val
def setPopup(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_setPopup,(self,) + _args, _kwargs)
return val
def getPopup(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_getPopup,(self,) + _args, _kwargs)
return val
def isPopped(self, *_args, **_kwargs):
val = apply(controlsc.FX_OptionMenu_isPopped,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_OptionMenu instance at %s>" % (self.this,)
class FX_OptionMenu(FX_OptionMenuPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_OptionMenu,_args,_kwargs)
self.thisown = 1
class FXOptionMenuPtr(FX_OptionMenuPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXOptionMenu_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXOptionMenu instance at %s>" % (self.this,)
class FXOptionMenu(FXOptionMenuPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXOptionMenu,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_TabBarPtr(FX_PackerPtr):
ID_OPEN_ITEM = controlsc.FX_TabBar_ID_OPEN_ITEM
ID_OPEN_FIRST = controlsc.FX_TabBar_ID_OPEN_FIRST
ID_OPEN_SECOND = controlsc.FX_TabBar_ID_OPEN_SECOND
ID_OPEN_THIRD = controlsc.FX_TabBar_ID_OPEN_THIRD
ID_OPEN_FOURTH = controlsc.FX_TabBar_ID_OPEN_FOURTH
ID_OPEN_FIFTH = controlsc.FX_TabBar_ID_OPEN_FIFTH
ID_OPEN_SIXTH = controlsc.FX_TabBar_ID_OPEN_SIXTH
ID_OPEN_SEVENTH = controlsc.FX_TabBar_ID_OPEN_SEVENTH
ID_OPEN_EIGHTH = controlsc.FX_TabBar_ID_OPEN_EIGHTH
ID_OPEN_NINETH = controlsc.FX_TabBar_ID_OPEN_NINETH
ID_OPEN_TENTH = controlsc.FX_TabBar_ID_OPEN_TENTH
ID_OPEN_LAST = controlsc.FX_TabBar_ID_OPEN_LAST
ID_LAST = controlsc.FX_TabBar_ID_LAST
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBar_onPaint,(self,) + _args, _kwargs)
return val
def onFocusNext(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBar_onFocusNext,(self,) + _args, _kwargs)
return val
def onFocusPrev(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBar_onFocusPrev,(self,) + _args, _kwargs)
return val
def onFocusUp(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBar_onFocusUp,(self,) + _args, _kwargs)
return val
def onFocusDown(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBar_onFocusDown,(self,) + _args, _kwargs)
return val
def onFocusLeft(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBar_onFocusLeft,(self,) + _args, _kwargs)
return val
def onFocusRight(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBar_onFocusRight,(self,) + _args, _kwargs)
return val
def onCmdOpenItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBar_onCmdOpenItem,(self,) + _args, _kwargs)
return val
def onCmdSetValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBar_onCmdSetValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBar_onCmdSetIntValue,(self,) + _args, _kwargs)
return val
def onCmdGetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBar_onCmdGetIntValue,(self,) + _args, _kwargs)
return val
def onCmdOpen(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBar_onCmdOpen,(self,) + _args, _kwargs)
return val
def onUpdOpen(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBar_onUpdOpen,(self,) + _args, _kwargs)
return val
def setCurrent(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBar_setCurrent,(self,) + _args, _kwargs)
return val
def getCurrent(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBar_getCurrent,(self,) + _args, _kwargs)
return val
def getTabStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBar_getTabStyle,(self,) + _args, _kwargs)
return val
def setTabStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBar_setTabStyle,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_TabBar instance at %s>" % (self.this,)
class FX_TabBar(FX_TabBarPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_TabBar,_args,_kwargs)
self.thisown = 1
class FXTabBarPtr(FX_TabBarPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_setBackColor,(self,) + _args, _kwargs)
return val
def setCurrent(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBar_setCurrent,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXTabBar instance at %s>" % (self.this,)
class FXTabBar(FXTabBarPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXTabBar,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_TabItemPtr(FX_LabelPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabItem_onPaint,(self,) + _args, _kwargs)
return val
def onFocusIn(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabItem_onFocusIn,(self,) + _args, _kwargs)
return val
def onFocusOut(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabItem_onFocusOut,(self,) + _args, _kwargs)
return val
def onUngrabbed(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabItem_onUngrabbed,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabItem_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabItem_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabItem_onKeyPress,(self,) + _args, _kwargs)
return val
def onKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabItem_onKeyRelease,(self,) + _args, _kwargs)
return val
def onHotKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabItem_onHotKeyPress,(self,) + _args, _kwargs)
return val
def onHotKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabItem_onHotKeyRelease,(self,) + _args, _kwargs)
return val
def getTabOrientation(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabItem_getTabOrientation,(self,) + _args, _kwargs)
return val
def setTabOrientation(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabItem_setTabOrientation,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_TabItem instance at %s>" % (self.this,)
class FX_TabItem(FX_TabItemPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_TabItem,_args,_kwargs)
self.thisown = 1
class FXTabItemPtr(FX_TabItemPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXTabItem_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXTabItem instance at %s>" % (self.this,)
class FXTabItem(FXTabItemPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXTabItem,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_TabBookPtr(FX_TabBarPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBook_onPaint,(self,) + _args, _kwargs)
return val
def onFocusNext(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBook_onFocusNext,(self,) + _args, _kwargs)
return val
def onFocusPrev(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBook_onFocusPrev,(self,) + _args, _kwargs)
return val
def onFocusUp(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBook_onFocusUp,(self,) + _args, _kwargs)
return val
def onFocusDown(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBook_onFocusDown,(self,) + _args, _kwargs)
return val
def onFocusLeft(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBook_onFocusLeft,(self,) + _args, _kwargs)
return val
def onFocusRight(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBook_onFocusRight,(self,) + _args, _kwargs)
return val
def onCmdOpenItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_TabBook_onCmdOpenItem,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_TabBook instance at %s>" % (self.this,)
class FX_TabBook(FX_TabBookPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_TabBook,_args,_kwargs)
self.thisown = 1
class FXTabBookPtr(FX_TabBookPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_setBackColor,(self,) + _args, _kwargs)
return val
def setCurrent(self, *_args, **_kwargs):
val = apply(controlsc.FXTabBook_setCurrent,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXTabBook instance at %s>" % (self.this,)
class FXTabBook(FXTabBookPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXTabBook,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_ScrollbarPtr(FX_WindowPtr):
ID_TIMEWHEEL = controlsc.FX_Scrollbar_ID_TIMEWHEEL
ID_AUTOINC_LINE = controlsc.FX_Scrollbar_ID_AUTOINC_LINE
ID_AUTODEC_LINE = controlsc.FX_Scrollbar_ID_AUTODEC_LINE
ID_AUTOINC_PAGE = controlsc.FX_Scrollbar_ID_AUTOINC_PAGE
ID_AUTODEC_PAGE = controlsc.FX_Scrollbar_ID_AUTODEC_PAGE
ID_AUTOINC_PIX = controlsc.FX_Scrollbar_ID_AUTOINC_PIX
ID_AUTODEC_PIX = controlsc.FX_Scrollbar_ID_AUTODEC_PIX
ID_LAST = controlsc.FX_Scrollbar_ID_LAST
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onPaint,(self,) + _args, _kwargs)
return val
def onMotion(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onMotion,(self,) + _args, _kwargs)
return val
def onMouseWheel(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onMouseWheel,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onMiddleBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onMiddleBtnPress,(self,) + _args, _kwargs)
return val
def onMiddleBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onMiddleBtnRelease,(self,) + _args, _kwargs)
return val
def onRightBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onRightBtnPress,(self,) + _args, _kwargs)
return val
def onRightBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onRightBtnRelease,(self,) + _args, _kwargs)
return val
def onUngrabbed(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onUngrabbed,(self,) + _args, _kwargs)
return val
def onTimeIncPix(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onTimeIncPix,(self,) + _args, _kwargs)
return val
def onTimeIncLine(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onTimeIncLine,(self,) + _args, _kwargs)
return val
def onTimeIncPage(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onTimeIncPage,(self,) + _args, _kwargs)
return val
def onTimeDecPix(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onTimeDecPix,(self,) + _args, _kwargs)
return val
def onTimeDecLine(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onTimeDecLine,(self,) + _args, _kwargs)
return val
def onTimeDecPage(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onTimeDecPage,(self,) + _args, _kwargs)
return val
def onTimeWheel(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onTimeWheel,(self,) + _args, _kwargs)
return val
def onCmdSetValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onCmdSetValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onCmdSetIntValue,(self,) + _args, _kwargs)
return val
def onCmdGetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onCmdGetIntValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onCmdSetIntRange,(self,) + _args, _kwargs)
return val
def onCmdGetIntRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_onCmdGetIntRange,(self,) + _args, _kwargs)
return val
def setRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_setRange,(self,) + _args, _kwargs)
return val
def getRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_getRange,(self,) + _args, _kwargs)
return val
def setPage(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_setPage,(self,) + _args, _kwargs)
return val
def getPage(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_getPage,(self,) + _args, _kwargs)
return val
def setLine(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_setLine,(self,) + _args, _kwargs)
return val
def getLine(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_getLine,(self,) + _args, _kwargs)
return val
def setPosition(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_setPosition,(self,) + _args, _kwargs)
return val
def getPosition(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_getPosition,(self,) + _args, _kwargs)
return val
def setHiliteColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_setHiliteColor,(self,) + _args, _kwargs)
return val
def getHiliteColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_getHiliteColor,(self,) + _args, _kwargs)
return val
def setShadowColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_setShadowColor,(self,) + _args, _kwargs)
return val
def getShadowColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_getShadowColor,(self,) + _args, _kwargs)
return val
def getBorderColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_getBorderColor,(self,) + _args, _kwargs)
return val
def setBorderColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_setBorderColor,(self,) + _args, _kwargs)
return val
def getScrollbarStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_getScrollbarStyle,(self,) + _args, _kwargs)
return val
def setScrollbarStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_Scrollbar_setScrollbarStyle,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_Scrollbar instance at %s>" % (self.this,)
class FX_Scrollbar(FX_ScrollbarPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_Scrollbar,_args,_kwargs)
self.thisown = 1
class FXScrollbarPtr(FX_ScrollbarPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollbar_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXScrollbar instance at %s>" % (self.this,)
class FXScrollbar(FXScrollbarPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXScrollbar,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_ScrollCornerPtr(FX_WindowPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_ScrollCorner_onPaint,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_ScrollCorner instance at %s>" % (self.this,)
class FX_ScrollCorner(FX_ScrollCornerPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_ScrollCorner,_args,_kwargs)
self.thisown = 1
class FXScrollCornerPtr(FX_ScrollCornerPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXScrollCorner_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXScrollCorner instance at %s>" % (self.this,)
class FXScrollCorner(FXScrollCornerPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXScrollCorner,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_ListItemPtr(FX_ObjectPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def getText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_getText,(self,) + _args, _kwargs)
return val
def setText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_setText,(self,) + _args, _kwargs)
return val
def getIcon(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_getIcon,(self,) + _args, _kwargs)
return val
def setIcon(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_setIcon,(self,) + _args, _kwargs)
return val
def setData(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_setData,(self,) + _args, _kwargs)
return val
def getData(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_getData,(self,) + _args, _kwargs)
return val
def hasFocus(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_hasFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_setFocus,(self,) + _args, _kwargs)
return val
def isSelected(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_isSelected,(self,) + _args, _kwargs)
return val
def setSelected(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_setSelected,(self,) + _args, _kwargs)
return val
def isEnabled(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_isEnabled,(self,) + _args, _kwargs)
return val
def setEnabled(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_setEnabled,(self,) + _args, _kwargs)
return val
def isDraggable(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_isDraggable,(self,) + _args, _kwargs)
return val
def setDraggable(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_setDraggable,(self,) + _args, _kwargs)
return val
def isIconOwned(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_isIconOwned,(self,) + _args, _kwargs)
return val
def setIconOwned(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_setIconOwned,(self,) + _args, _kwargs)
return val
def getWidth(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_getWidth,(self,) + _args, _kwargs)
return val
def getHeight(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_getHeight,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_create,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_detach,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListItem_destroy,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_ListItem instance at %s>" % (self.this,)
class FX_ListItem(FX_ListItemPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_ListItem,_args,_kwargs)
self.thisown = 1
class FXListItemPtr(FX_ListItemPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXListItem_onDefault,(self,) + _args, _kwargs)
return val
def setText(self, *_args, **_kwargs):
val = apply(controlsc.FXListItem_setText,(self,) + _args, _kwargs)
return val
def setIcon(self, *_args, **_kwargs):
val = apply(controlsc.FXListItem_setIcon,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXListItem_setFocus,(self,) + _args, _kwargs)
return val
def setSelected(self, *_args, **_kwargs):
val = apply(controlsc.FXListItem_setSelected,(self,) + _args, _kwargs)
return val
def setEnabled(self, *_args, **_kwargs):
val = apply(controlsc.FXListItem_setEnabled,(self,) + _args, _kwargs)
return val
def setDraggable(self, *_args, **_kwargs):
val = apply(controlsc.FXListItem_setDraggable,(self,) + _args, _kwargs)
return val
def setIconOwned(self, *_args, **_kwargs):
val = apply(controlsc.FXListItem_setIconOwned,(self,) + _args, _kwargs)
return val
def getWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXListItem_getWidth,(self,) + _args, _kwargs)
return val
def getHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXListItem_getHeight,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXListItem_create,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXListItem_detach,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXListItem_destroy,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXListItem instance at %s>" % (self.this,)
class FXListItem(FXListItemPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXListItem,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_ListPtr(FX_ScrollAreaPtr):
ID_TIPTIMER = controlsc.FX_List_ID_TIPTIMER
ID_LOOKUPTIMER = controlsc.FX_List_ID_LOOKUPTIMER
ID_LAST = controlsc.FX_List_ID_LAST
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onPaint,(self,) + _args, _kwargs)
return val
def onEnter(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onEnter,(self,) + _args, _kwargs)
return val
def onLeave(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onLeave,(self,) + _args, _kwargs)
return val
def onUngrabbed(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onUngrabbed,(self,) + _args, _kwargs)
return val
def onKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onKeyPress,(self,) + _args, _kwargs)
return val
def onKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onKeyRelease,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onRightBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onRightBtnPress,(self,) + _args, _kwargs)
return val
def onRightBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onRightBtnRelease,(self,) + _args, _kwargs)
return val
def onMotion(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onMotion,(self,) + _args, _kwargs)
return val
def onFocusIn(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onFocusIn,(self,) + _args, _kwargs)
return val
def onFocusOut(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onFocusOut,(self,) + _args, _kwargs)
return val
def onAutoScroll(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onAutoScroll,(self,) + _args, _kwargs)
return val
def onClicked(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onClicked,(self,) + _args, _kwargs)
return val
def onDoubleClicked(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onDoubleClicked,(self,) + _args, _kwargs)
return val
def onTripleClicked(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onTripleClicked,(self,) + _args, _kwargs)
return val
def onQueryTip(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onQueryTip,(self,) + _args, _kwargs)
return val
def onQueryHelp(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onQueryHelp,(self,) + _args, _kwargs)
return val
def onCommand(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onCommand,(self,) + _args, _kwargs)
return val
def onTipTimer(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onTipTimer,(self,) + _args, _kwargs)
return val
def onLookupTimer(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onLookupTimer,(self,) + _args, _kwargs)
return val
def onCmdSetValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onCmdSetValue,(self,) + _args, _kwargs)
return val
def onCmdGetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onCmdGetIntValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_onCmdSetIntValue,(self,) + _args, _kwargs)
return val
def getNumItems(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getNumItems,(self,) + _args, _kwargs)
return val
def getNumVisible(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getNumVisible,(self,) + _args, _kwargs)
return val
def setNumVisible(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_setNumVisible,(self,) + _args, _kwargs)
return val
def retrieveItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_retrieveItem,(self,) + _args, _kwargs)
if val: val = FX_ListItemPtr(val)
return val
def insertItem(self, *_args, **_kwargs):
try:
val = apply(controlsc.FX_List_insertItem,(self,) + _args, _kwargs)
return val
except:
val = apply(controlsc.FX_List_insertItemStr,(self,) + _args, _kwargs)
return val
def insertItemStr(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_insertItemStr,(self,) + _args, _kwargs)
return val
def replaceItem(self, *_args, **_kwargs):
try:
val = apply(controlsc.FX_List_replaceItem,(self,) + _args, _kwargs)
return val
except:
val = apply(controlsc.FX_List_replaceItemStr,(self,) + _args, _kwargs)
return val
def replaceItemStr(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_replaceItemStr,(self,) + _args, _kwargs)
return val
def appendItem(self, *_args, **_kwargs):
try:
val = apply(controlsc.FX_List_appendItem,(self,) + _args, _kwargs)
return val
except:
val = apply(controlsc.FX_List_appendItemStr,(self,) + _args, _kwargs)
return val
def appendItemStr(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_appendItemStr,(self,) + _args, _kwargs)
return val
def prependItem(self, *_args, **_kwargs):
try:
val = apply(controlsc.FX_List_prependItem,(self,) + _args, _kwargs)
return val
except:
val = apply(controlsc.FX_List_prependItemStr,(self,) + _args, _kwargs)
return val
def prependItemStr(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_prependItemStr,(self,) + _args, _kwargs)
return val
def removeItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_removeItem,(self,) + _args, _kwargs)
return val
def clearItems(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_clearItems,(self,) + _args, _kwargs)
return val
def getItemWidth(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getItemWidth,(self,) + _args, _kwargs)
return val
def getItemHeight(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getItemHeight,(self,) + _args, _kwargs)
return val
def getItemAt(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getItemAt,(self,) + _args, _kwargs)
return val
def hitItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_hitItem,(self,) + _args, _kwargs)
return val
def findItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_findItem,(self,) + _args, _kwargs)
return val
def makeItemVisible(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_makeItemVisible,(self,) + _args, _kwargs)
return val
def updateItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_updateItem,(self,) + _args, _kwargs)
return val
def setItemText(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_setItemText,(self,) + _args, _kwargs)
return val
def getItemText(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getItemText,(self,) + _args, _kwargs)
return val
def setItemIcon(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_setItemIcon,(self,) + _args, _kwargs)
return val
def getItemIcon(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getItemIcon,(self,) + _args, _kwargs)
return val
def setItemData(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_setItemData,(self,) + _args, _kwargs)
return val
def getItemData(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getItemData,(self,) + _args, _kwargs)
return val
def isItemSelected(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_isItemSelected,(self,) + _args, _kwargs)
return val
def isItemCurrent(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_isItemCurrent,(self,) + _args, _kwargs)
return val
def isItemVisible(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_isItemVisible,(self,) + _args, _kwargs)
return val
def isItemEnabled(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_isItemEnabled,(self,) + _args, _kwargs)
return val
def enableItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_enableItem,(self,) + _args, _kwargs)
return val
def disableItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_disableItem,(self,) + _args, _kwargs)
return val
def selectItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_selectItem,(self,) + _args, _kwargs)
return val
def deselectItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_deselectItem,(self,) + _args, _kwargs)
return val
def toggleItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_toggleItem,(self,) + _args, _kwargs)
return val
def setCurrentItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_setCurrentItem,(self,) + _args, _kwargs)
return val
def getCurrentItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getCurrentItem,(self,) + _args, _kwargs)
return val
def setAnchorItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_setAnchorItem,(self,) + _args, _kwargs)
return val
def getAnchorItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getAnchorItem,(self,) + _args, _kwargs)
return val
def getCursorItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getCursorItem,(self,) + _args, _kwargs)
return val
def extendSelection(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_extendSelection,(self,) + _args, _kwargs)
return val
def killSelection(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_killSelection,(self,) + _args, _kwargs)
return val
def setFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_setFont,(self,) + _args, _kwargs)
return val
def getFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getFont,(self,) + _args, _kwargs)
return val
def getTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getTextColor,(self,) + _args, _kwargs)
return val
def setTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_setTextColor,(self,) + _args, _kwargs)
return val
def getSelBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getSelBackColor,(self,) + _args, _kwargs)
return val
def setSelBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_setSelBackColor,(self,) + _args, _kwargs)
return val
def getSelTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getSelTextColor,(self,) + _args, _kwargs)
return val
def setSelTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_setSelTextColor,(self,) + _args, _kwargs)
return val
def getSortFunc(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getSortFunc,(self,) + _args, _kwargs)
return val
def setSortFunc(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_setSortFunc,(self,) + _args, _kwargs)
return val
def getListStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getListStyle,(self,) + _args, _kwargs)
return val
def setListStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_setListStyle,(self,) + _args, _kwargs)
return val
def setHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_setHelpText,(self,) + _args, _kwargs)
return val
def getHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_List_getHelpText,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_List instance at %s>" % (self.this,)
class FX_List(FX_ListPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_List,_args,_kwargs)
self.thisown = 1
class FXListPtr(FX_ListPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXList_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXList_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXList_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXList_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXList_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXList_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXList_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXList_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXList_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXList_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXList_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXList_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXList_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXList_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXList_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXList_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXList_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXList_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXList_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXList_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXList_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXList_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXList_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXList_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXList_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXList_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXList_setBackColor,(self,) + _args, _kwargs)
return val
def getContentWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXList_getContentWidth,(self,) + _args, _kwargs)
return val
def getContentHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXList_getContentHeight,(self,) + _args, _kwargs)
return val
def getViewportWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXList_getViewportWidth,(self,) + _args, _kwargs)
return val
def getViewportHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXList_getViewportHeight,(self,) + _args, _kwargs)
return val
def moveContents(self, *_args, **_kwargs):
val = apply(controlsc.FXList_moveContents,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXList instance at %s>" % (self.this,)
class FXList(FXListPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXList,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_ComboBoxPtr(FX_PackerPtr):
ID_LIST = controlsc.FX_ComboBox_ID_LIST
ID_TEXT = controlsc.FX_ComboBox_ID_TEXT
ID_LAST = controlsc.FX_ComboBox_ID_LAST
def __init__(self,this):
self.this = this
self.thisown = 0
def onFocusUp(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_onFocusUp,(self,) + _args, _kwargs)
return val
def onFocusDown(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_onFocusDown,(self,) + _args, _kwargs)
return val
def onTextButton(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_onTextButton,(self,) + _args, _kwargs)
return val
def onTextChanged(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_onTextChanged,(self,) + _args, _kwargs)
return val
def onTextCommand(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_onTextCommand,(self,) + _args, _kwargs)
return val
def onListClicked(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_onListClicked,(self,) + _args, _kwargs)
return val
def onFwdToText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_onFwdToText,(self,) + _args, _kwargs)
return val
def onUpdFmText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_onUpdFmText,(self,) + _args, _kwargs)
return val
def isEditable(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_isEditable,(self,) + _args, _kwargs)
return val
def setEditable(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_setEditable,(self,) + _args, _kwargs)
return val
def setText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_setText,(self,) + _args, _kwargs)
return val
def getText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_getText,(self,) + _args, _kwargs)
return val
def setNumColumns(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_setNumColumns,(self,) + _args, _kwargs)
return val
def getNumColumns(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_getNumColumns,(self,) + _args, _kwargs)
return val
def getNumItems(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_getNumItems,(self,) + _args, _kwargs)
return val
def getNumVisible(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_getNumVisible,(self,) + _args, _kwargs)
return val
def setNumVisible(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_setNumVisible,(self,) + _args, _kwargs)
return val
def isItemCurrent(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_isItemCurrent,(self,) + _args, _kwargs)
return val
def setCurrentItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_setCurrentItem,(self,) + _args, _kwargs)
return val
def getCurrentItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_getCurrentItem,(self,) + _args, _kwargs)
return val
def retrieveItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_retrieveItem,(self,) + _args, _kwargs)
return val
def replaceItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_replaceItem,(self,) + _args, _kwargs)
return val
def insertItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_insertItem,(self,) + _args, _kwargs)
return val
def appendItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_appendItem,(self,) + _args, _kwargs)
return val
def prependItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_prependItem,(self,) + _args, _kwargs)
return val
def removeItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_removeItem,(self,) + _args, _kwargs)
return val
def clearItems(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_clearItems,(self,) + _args, _kwargs)
return val
def setItemText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_setItemText,(self,) + _args, _kwargs)
return val
def getItemText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_getItemText,(self,) + _args, _kwargs)
return val
def setItemData(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_setItemData,(self,) + _args, _kwargs)
return val
def getItemData(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_getItemData,(self,) + _args, _kwargs)
return val
def isPaneShown(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_isPaneShown,(self,) + _args, _kwargs)
return val
def sortItems(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_sortItems,(self,) + _args, _kwargs)
return val
def setFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_setFont,(self,) + _args, _kwargs)
return val
def getFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_getFont,(self,) + _args, _kwargs)
return val
def setComboStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_setComboStyle,(self,) + _args, _kwargs)
return val
def getComboStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_getComboStyle,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_setBackColor,(self,) + _args, _kwargs)
return val
def getBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_getBackColor,(self,) + _args, _kwargs)
return val
def setTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_setTextColor,(self,) + _args, _kwargs)
return val
def getTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_getTextColor,(self,) + _args, _kwargs)
return val
def setSelBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_setSelBackColor,(self,) + _args, _kwargs)
return val
def getSelBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_getSelBackColor,(self,) + _args, _kwargs)
return val
def setSelTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_setSelTextColor,(self,) + _args, _kwargs)
return val
def getSelTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_getSelTextColor,(self,) + _args, _kwargs)
return val
def getSortFunc(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_getSortFunc,(self,) + _args, _kwargs)
return val
def setSortFunc(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_setSortFunc,(self,) + _args, _kwargs)
return val
def setHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_setHelpText,(self,) + _args, _kwargs)
return val
def getHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_getHelpText,(self,) + _args, _kwargs)
return val
def setTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_setTipText,(self,) + _args, _kwargs)
return val
def getTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ComboBox_getTipText,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_ComboBox instance at %s>" % (self.this,)
class FX_ComboBox(FX_ComboBoxPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_ComboBox,_args,_kwargs)
self.thisown = 1
class FXComboBoxPtr(FX_ComboBoxPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXComboBox_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXComboBox instance at %s>" % (self.this,)
class FXComboBox(FXComboBoxPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXComboBox,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_DragCornerPtr(FX_WindowPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_DragCorner_onPaint,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_DragCorner_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_DragCorner_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onMotion(self, *_args, **_kwargs):
val = apply(controlsc.FX_DragCorner_onMotion,(self,) + _args, _kwargs)
return val
def getHiliteColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_DragCorner_getHiliteColor,(self,) + _args, _kwargs)
return val
def setHiliteColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_DragCorner_setHiliteColor,(self,) + _args, _kwargs)
return val
def getShadowColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_DragCorner_getShadowColor,(self,) + _args, _kwargs)
return val
def setShadowColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_DragCorner_setShadowColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_DragCorner instance at %s>" % (self.this,)
class FX_DragCorner(FX_DragCornerPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_DragCorner,_args,_kwargs)
self.thisown = 1
class FXDragCornerPtr(FX_DragCornerPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXDragCorner_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXDragCorner instance at %s>" % (self.this,)
class FXDragCorner(FXDragCornerPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXDragCorner,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_StatuslinePtr(FX_FramePtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusline_onPaint,(self,) + _args, _kwargs)
return val
def onUpdate(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusline_onUpdate,(self,) + _args, _kwargs)
return val
def onCmdGetStringValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusline_onCmdGetStringValue,(self,) + _args, _kwargs)
return val
def onCmdSetStringValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusline_onCmdSetStringValue,(self,) + _args, _kwargs)
return val
def setText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusline_setText,(self,) + _args, _kwargs)
return val
def setNormalText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusline_setNormalText,(self,) + _args, _kwargs)
return val
def getNormalText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusline_getNormalText,(self,) + _args, _kwargs)
return val
def getText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusline_getText,(self,) + _args, _kwargs)
return val
def setFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusline_setFont,(self,) + _args, _kwargs)
return val
def getFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusline_getFont,(self,) + _args, _kwargs)
return val
def getTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusline_getTextColor,(self,) + _args, _kwargs)
return val
def setTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusline_setTextColor,(self,) + _args, _kwargs)
return val
def getTextHighlightColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusline_getTextHighlightColor,(self,) + _args, _kwargs)
return val
def setTextHighlightColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusline_setTextHighlightColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_Statusline instance at %s>" % (self.this,)
class FX_Statusline(FX_StatuslinePtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_Statusline,_args,_kwargs)
self.thisown = 1
class FXStatuslinePtr(FX_StatuslinePtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusline_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXStatusline instance at %s>" % (self.this,)
class FXStatusline(FXStatuslinePtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXStatusline,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_StatusbarPtr(FX_HorizontalFramePtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def setCornerStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusbar_setCornerStyle,(self,) + _args, _kwargs)
return val
def getCornerStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusbar_getCornerStyle,(self,) + _args, _kwargs)
return val
def getStatusline(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusbar_getStatusline,(self,) + _args, _kwargs)
return val
def getDragCorner(self, *_args, **_kwargs):
val = apply(controlsc.FX_Statusbar_getDragCorner,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_Statusbar instance at %s>" % (self.this,)
class FX_Statusbar(FX_StatusbarPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_Statusbar,_args,_kwargs)
self.thisown = 1
class FXStatusbarPtr(FX_StatusbarPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXStatusbar_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXStatusbar instance at %s>" % (self.this,)
class FXStatusbar(FXStatusbarPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXStatusbar,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_SliderPtr(FX_FramePtr):
ID_AUTOINC = controlsc.FX_Slider_ID_AUTOINC
ID_AUTODEC = controlsc.FX_Slider_ID_AUTODEC
ID_LAST = controlsc.FX_Slider_ID_LAST
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onPaint,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onMiddleBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onMiddleBtnPress,(self,) + _args, _kwargs)
return val
def onMiddleBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onMiddleBtnRelease,(self,) + _args, _kwargs)
return val
def onUngrabbed(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onUngrabbed,(self,) + _args, _kwargs)
return val
def onMotion(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onMotion,(self,) + _args, _kwargs)
return val
def onTimeInc(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onTimeInc,(self,) + _args, _kwargs)
return val
def onTimeDec(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onTimeDec,(self,) + _args, _kwargs)
return val
def onCmdSetValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onCmdSetValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onCmdSetIntValue,(self,) + _args, _kwargs)
return val
def onCmdGetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onCmdGetIntValue,(self,) + _args, _kwargs)
return val
def onCmdSetRealValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onCmdSetRealValue,(self,) + _args, _kwargs)
return val
def onCmdGetRealValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onCmdGetRealValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onCmdSetIntRange,(self,) + _args, _kwargs)
return val
def onCmdGetIntRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onCmdGetIntRange,(self,) + _args, _kwargs)
return val
def onCmdSetRealRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onCmdSetRealRange,(self,) + _args, _kwargs)
return val
def onCmdGetRealRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onCmdGetRealRange,(self,) + _args, _kwargs)
return val
def onQueryHelp(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onQueryHelp,(self,) + _args, _kwargs)
return val
def onQueryTip(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_onQueryTip,(self,) + _args, _kwargs)
return val
def setRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_setRange,(self,) + _args, _kwargs)
return val
def getRange(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_getRange,(self,) + _args, _kwargs)
return val
def setValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_setValue,(self,) + _args, _kwargs)
return val
def getValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_getValue,(self,) + _args, _kwargs)
return val
def getSliderStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_getSliderStyle,(self,) + _args, _kwargs)
return val
def setSliderStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_setSliderStyle,(self,) + _args, _kwargs)
return val
def getHeadSize(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_getHeadSize,(self,) + _args, _kwargs)
return val
def setHeadSize(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_setHeadSize,(self,) + _args, _kwargs)
return val
def getSlotSize(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_getSlotSize,(self,) + _args, _kwargs)
return val
def setSlotSize(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_setSlotSize,(self,) + _args, _kwargs)
return val
def getIncrement(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_getIncrement,(self,) + _args, _kwargs)
return val
def setIncrement(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_setIncrement,(self,) + _args, _kwargs)
return val
def setTickDelta(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_setTickDelta,(self,) + _args, _kwargs)
return val
def getTickDelta(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_getTickDelta,(self,) + _args, _kwargs)
return val
def getSlotColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_getSlotColor,(self,) + _args, _kwargs)
return val
def setSlotColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_setSlotColor,(self,) + _args, _kwargs)
return val
def getHiliteColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_getHiliteColor,(self,) + _args, _kwargs)
return val
def setHiliteColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_setHiliteColor,(self,) + _args, _kwargs)
return val
def getShadowColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_getShadowColor,(self,) + _args, _kwargs)
return val
def setShadowColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_setShadowColor,(self,) + _args, _kwargs)
return val
def getBorderColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_getBorderColor,(self,) + _args, _kwargs)
return val
def setBorderColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_setBorderColor,(self,) + _args, _kwargs)
return val
def getBaseColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_getBaseColor,(self,) + _args, _kwargs)
return val
def setBaseColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_setBaseColor,(self,) + _args, _kwargs)
return val
def getHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_getHelpText,(self,) + _args, _kwargs)
return val
def setHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_setHelpText,(self,) + _args, _kwargs)
return val
def getTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_getTipText,(self,) + _args, _kwargs)
return val
def setTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Slider_setTipText,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_Slider instance at %s>" % (self.this,)
class FX_Slider(FX_SliderPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_Slider,_args,_kwargs)
self.thisown = 1
class FXSliderPtr(FX_SliderPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXSlider_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXSlider instance at %s>" % (self.this,)
class FXSlider(FXSliderPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXSlider,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_HeaderItemPtr(FX_ObjectPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def getText(self, *_args, **_kwargs):
val = apply(controlsc.FX_HeaderItem_getText,(self,) + _args, _kwargs)
return val
def setText(self, *_args, **_kwargs):
val = apply(controlsc.FX_HeaderItem_setText,(self,) + _args, _kwargs)
return val
def getIcon(self, *_args, **_kwargs):
val = apply(controlsc.FX_HeaderItem_getIcon,(self,) + _args, _kwargs)
return val
def setIcon(self, *_args, **_kwargs):
val = apply(controlsc.FX_HeaderItem_setIcon,(self,) + _args, _kwargs)
return val
def setData(self, *_args, **_kwargs):
val = apply(controlsc.FX_HeaderItem_setData,(self,) + _args, _kwargs)
return val
def getData(self, *_args, **_kwargs):
val = apply(controlsc.FX_HeaderItem_getData,(self,) + _args, _kwargs)
return val
def setSize(self, *_args, **_kwargs):
val = apply(controlsc.FX_HeaderItem_setSize,(self,) + _args, _kwargs)
return val
def getSize(self, *_args, **_kwargs):
val = apply(controlsc.FX_HeaderItem_getSize,(self,) + _args, _kwargs)
return val
def setArrowDir(self, *_args, **_kwargs):
val = apply(controlsc.FX_HeaderItem_setArrowDir,(self,) + _args, _kwargs)
return val
def getArrowDir(self, *_args, **_kwargs):
val = apply(controlsc.FX_HeaderItem_getArrowDir,(self,) + _args, _kwargs)
return val
def getWidth(self, *_args, **_kwargs):
val = apply(controlsc.FX_HeaderItem_getWidth,(self,) + _args, _kwargs)
return val
def getHeight(self, *_args, **_kwargs):
val = apply(controlsc.FX_HeaderItem_getHeight,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FX_HeaderItem_create,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FX_HeaderItem_detach,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FX_HeaderItem_destroy,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_HeaderItem instance at %s>" % (self.this,)
class FX_HeaderItem(FX_HeaderItemPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_HeaderItem,_args,_kwargs)
self.thisown = 1
class FXHeaderItemPtr(FX_HeaderItemPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXHeaderItem_onDefault,(self,) + _args, _kwargs)
return val
def setText(self, *_args, **_kwargs):
val = apply(controlsc.FXHeaderItem_setText,(self,) + _args, _kwargs)
return val
def setIcon(self, *_args, **_kwargs):
val = apply(controlsc.FXHeaderItem_setIcon,(self,) + _args, _kwargs)
return val
def getWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXHeaderItem_getWidth,(self,) + _args, _kwargs)
return val
def getHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXHeaderItem_getHeight,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXHeaderItem_create,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXHeaderItem_detach,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXHeaderItem_destroy,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXHeaderItem instance at %s>" % (self.this,)
class FXHeaderItem(FXHeaderItemPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXHeaderItem,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_HeaderPtr(FX_FramePtr):
ID_TIPTIMER = controlsc.FX_Header_ID_TIPTIMER
ID_LAST = controlsc.FX_Header_ID_LAST
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_onPaint,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onUngrabbed(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_onUngrabbed,(self,) + _args, _kwargs)
return val
def onMotion(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_onMotion,(self,) + _args, _kwargs)
return val
def onTipTimer(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_onTipTimer,(self,) + _args, _kwargs)
return val
def onQueryTip(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_onQueryTip,(self,) + _args, _kwargs)
return val
def onQueryHelp(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_onQueryHelp,(self,) + _args, _kwargs)
return val
def getNumItems(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_getNumItems,(self,) + _args, _kwargs)
return val
def retrieveItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_retrieveItem,(self,) + _args, _kwargs)
return val
def replaceItem(self, *_args, **_kwargs):
try:
val = apply(controlsc.FX_Header_replaceItem,(self,) + _args, _kwargs)
return val
except:
val = apply(controlsc.FX_Header_replaceItem2,(self,) + _args, _kwargs)
return val
def replaceItem2(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_replaceItem2,(self,) + _args, _kwargs)
return val
def insertItem(self, *_args, **_kwargs):
try:
val = apply(controlsc.FX_Header_insertItem,(self,) + _args, _kwargs)
return val
except:
val = apply(controlsc.FX_Header_insertItem2,(self,) + _args, _kwargs)
return val
def insertItem2(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_insertItem2,(self,) + _args, _kwargs)
return val
def appendItem(self, *_args, **_kwargs):
try:
val = apply(controlsc.FX_Header_appendItem,(self,) + _args, _kwargs)
return val
except:
val = apply(controlsc.FX_Header_appendItem2,(self,) + _args, _kwargs)
return val
def appendItem2(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_appendItem2,(self,) + _args, _kwargs)
return val
def prependItem(self, *_args, **_kwargs):
try:
val = apply(controlsc.FX_Header_prependItem,(self,) + _args, _kwargs)
return val
except:
val = apply(controlsc.FX_Header_prependItem2,(self,) + _args, _kwargs)
return val
def prependItem2(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_prependItem2,(self,) + _args, _kwargs)
return val
def removeItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_removeItem,(self,) + _args, _kwargs)
return val
def clearItems(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_clearItems,(self,) + _args, _kwargs)
return val
def getItemAt(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_getItemAt,(self,) + _args, _kwargs)
return val
def setItemText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_setItemText,(self,) + _args, _kwargs)
return val
def getItemText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_getItemText,(self,) + _args, _kwargs)
return val
def setItemIcon(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_setItemIcon,(self,) + _args, _kwargs)
return val
def getItemIcon(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_getItemIcon,(self,) + _args, _kwargs)
return val
def setItemSize(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_setItemSize,(self,) + _args, _kwargs)
return val
def getItemSize(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_getItemSize,(self,) + _args, _kwargs)
return val
def getItemOffset(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_getItemOffset,(self,) + _args, _kwargs)
return val
def setItemData(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_setItemData,(self,) + _args, _kwargs)
return val
def getItemData(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_getItemData,(self,) + _args, _kwargs)
return val
def setFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_setFont,(self,) + _args, _kwargs)
return val
def getFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_getFont,(self,) + _args, _kwargs)
return val
def getTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_getTextColor,(self,) + _args, _kwargs)
return val
def setTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_setTextColor,(self,) + _args, _kwargs)
return val
def setHeaderStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_setHeaderStyle,(self,) + _args, _kwargs)
return val
def getHeaderStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_getHeaderStyle,(self,) + _args, _kwargs)
return val
def setHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_setHelpText,(self,) + _args, _kwargs)
return val
def getHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_Header_getHelpText,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_Header instance at %s>" % (self.this,)
class FX_Header(FX_HeaderPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_Header,_args,_kwargs)
self.thisown = 1
class FXHeaderPtr(FX_HeaderPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXHeader_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXHeader instance at %s>" % (self.this,)
class FXHeader(FXHeaderPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXHeader,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_ProgressBarPtr(FX_FramePtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_onPaint,(self,) + _args, _kwargs)
return val
def onCmdSetValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_onCmdSetValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_onCmdSetIntValue,(self,) + _args, _kwargs)
return val
def onCmdGetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_onCmdGetIntValue,(self,) + _args, _kwargs)
return val
def setProgress(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_setProgress,(self,) + _args, _kwargs)
return val
def getProgress(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_getProgress,(self,) + _args, _kwargs)
return val
def setTotal(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_setTotal,(self,) + _args, _kwargs)
return val
def getTotal(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_getTotal,(self,) + _args, _kwargs)
return val
def increment(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_increment,(self,) + _args, _kwargs)
return val
def hideNumber(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_hideNumber,(self,) + _args, _kwargs)
return val
def showNumber(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_showNumber,(self,) + _args, _kwargs)
return val
def setBarSize(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_setBarSize,(self,) + _args, _kwargs)
return val
def getBarSize(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_getBarSize,(self,) + _args, _kwargs)
return val
def setBarBGColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_setBarBGColor,(self,) + _args, _kwargs)
return val
def getBarBGColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_getBarBGColor,(self,) + _args, _kwargs)
return val
def setBarColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_setBarColor,(self,) + _args, _kwargs)
return val
def getBarColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_getBarColor,(self,) + _args, _kwargs)
return val
def setTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_setTextColor,(self,) + _args, _kwargs)
return val
def getTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_getTextColor,(self,) + _args, _kwargs)
return val
def setTextAltColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_setTextAltColor,(self,) + _args, _kwargs)
return val
def getTextAltColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_getTextAltColor,(self,) + _args, _kwargs)
return val
def setFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_setFont,(self,) + _args, _kwargs)
return val
def getFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_getFont,(self,) + _args, _kwargs)
return val
def setBarStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_setBarStyle,(self,) + _args, _kwargs)
return val
def getBarStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_ProgressBar_getBarStyle,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_ProgressBar instance at %s>" % (self.this,)
class FX_ProgressBar(FX_ProgressBarPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_ProgressBar,_args,_kwargs)
self.thisown = 1
class FXProgressBarPtr(FX_ProgressBarPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXProgressBar_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXProgressBar instance at %s>" % (self.this,)
class FXProgressBar(FXProgressBarPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXProgressBar,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_ToolbarTabPtr(FX_FramePtr):
ID_COLLAPSE = controlsc.FX_ToolbarTab_ID_COLLAPSE
ID_UNCOLLAPSE = controlsc.FX_ToolbarTab_ID_UNCOLLAPSE
ID_LAST = controlsc.FX_ToolbarTab_ID_LAST
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_onPaint,(self,) + _args, _kwargs)
return val
def onUpdate(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_onUpdate,(self,) + _args, _kwargs)
return val
def onEnter(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_onEnter,(self,) + _args, _kwargs)
return val
def onLeave(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_onLeave,(self,) + _args, _kwargs)
return val
def onUngrabbed(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_onUngrabbed,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onKeyPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_onKeyPress,(self,) + _args, _kwargs)
return val
def onKeyRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_onKeyRelease,(self,) + _args, _kwargs)
return val
def onCmdCollapse(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_onCmdCollapse,(self,) + _args, _kwargs)
return val
def onUpdCollapse(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_onUpdCollapse,(self,) + _args, _kwargs)
return val
def onCmdUncollapse(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_onCmdUncollapse,(self,) + _args, _kwargs)
return val
def onUpdUncollapse(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_onUpdUncollapse,(self,) + _args, _kwargs)
return val
def collapse(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_collapse,(self,) + _args, _kwargs)
return val
def isCollapsed(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_isCollapsed,(self,) + _args, _kwargs)
return val
def setTabStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_setTabStyle,(self,) + _args, _kwargs)
return val
def getTabStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_getTabStyle,(self,) + _args, _kwargs)
return val
def getActiveColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_getActiveColor,(self,) + _args, _kwargs)
return val
def setActiveColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarTab_setActiveColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_ToolbarTab instance at %s>" % (self.this,)
class FX_ToolbarTab(FX_ToolbarTabPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_ToolbarTab,_args,_kwargs)
self.thisown = 1
class FXToolbarTabPtr(FX_ToolbarTabPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarTab_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXToolbarTab instance at %s>" % (self.this,)
class FXToolbarTab(FXToolbarTabPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXToolbarTab,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_ToolbarPtr(FX_PackerPtr):
ID_UNDOCK = controlsc.FX_Toolbar_ID_UNDOCK
ID_DOCK_TOP = controlsc.FX_Toolbar_ID_DOCK_TOP
ID_DOCK_BOTTOM = controlsc.FX_Toolbar_ID_DOCK_BOTTOM
ID_DOCK_LEFT = controlsc.FX_Toolbar_ID_DOCK_LEFT
ID_DOCK_RIGHT = controlsc.FX_Toolbar_ID_DOCK_RIGHT
ID_TOOLBARGRIP = controlsc.FX_Toolbar_ID_TOOLBARGRIP
ID_LAST = controlsc.FX_Toolbar_ID_LAST
def __init__(self,this):
self.this = this
self.thisown = 0
def onCmdUndock(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_onCmdUndock,(self,) + _args, _kwargs)
return val
def onUpdUndock(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_onUpdUndock,(self,) + _args, _kwargs)
return val
def onCmdDockTop(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_onCmdDockTop,(self,) + _args, _kwargs)
return val
def onUpdDockTop(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_onUpdDockTop,(self,) + _args, _kwargs)
return val
def onCmdDockBottom(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_onCmdDockBottom,(self,) + _args, _kwargs)
return val
def onUpdDockBottom(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_onUpdDockBottom,(self,) + _args, _kwargs)
return val
def onCmdDockLeft(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_onCmdDockLeft,(self,) + _args, _kwargs)
return val
def onUpdDockLeft(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_onUpdDockLeft,(self,) + _args, _kwargs)
return val
def onCmdDockRight(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_onCmdDockRight,(self,) + _args, _kwargs)
return val
def onUpdDockRight(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_onUpdDockRight,(self,) + _args, _kwargs)
return val
def onBeginDragGrip(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_onBeginDragGrip,(self,) + _args, _kwargs)
return val
def onEndDragGrip(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_onEndDragGrip,(self,) + _args, _kwargs)
return val
def onDraggedGrip(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_onDraggedGrip,(self,) + _args, _kwargs)
return val
def setDryDock(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_setDryDock,(self,) + _args, _kwargs)
return val
def setWetDock(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_setWetDock,(self,) + _args, _kwargs)
return val
def getDryDock(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_getDryDock,(self,) + _args, _kwargs)
return val
def getWetDock(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_getWetDock,(self,) + _args, _kwargs)
return val
def isDocked(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_isDocked,(self,) + _args, _kwargs)
return val
def dock(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_dock,(self,) + _args, _kwargs)
return val
def undock(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_undock,(self,) + _args, _kwargs)
return val
def setDockingSide(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_setDockingSide,(self,) + _args, _kwargs)
return val
def getDockingSide(self, *_args, **_kwargs):
val = apply(controlsc.FX_Toolbar_getDockingSide,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_Toolbar instance at %s>" % (self.this,)
class FX_Toolbar(FX_ToolbarPtr):
def __init__(self,this):
self.this = this
class FXToolbarPtr(FX_ToolbarPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_setBackColor,(self,) + _args, _kwargs)
return val
def dock(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_dock,(self,) + _args, _kwargs)
return val
def undock(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbar_undock,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXToolbar instance at %s>" % (self.this,)
class FXToolbar(FXToolbarPtr):
def __init__(self,*_args,**_kwargs):
try:
self.this = apply(controlsc.CreateFloatingToolbar,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
return
except:
pass
try:
self.this = apply(controlsc.CreateNonFloatingToolbar,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
except:
pass
class FX_ToolbarShellPtr(FX_TopWindowPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarShell_onPaint,(self,) + _args, _kwargs)
return val
def setFrameStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarShell_setFrameStyle,(self,) + _args, _kwargs)
return val
def getFrameStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarShell_getFrameStyle,(self,) + _args, _kwargs)
return val
def getBorderWidth(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarShell_getBorderWidth,(self,) + _args, _kwargs)
return val
def setHiliteColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarShell_setHiliteColor,(self,) + _args, _kwargs)
return val
def getHiliteColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarShell_getHiliteColor,(self,) + _args, _kwargs)
return val
def setShadowColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarShell_setShadowColor,(self,) + _args, _kwargs)
return val
def getShadowColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarShell_getShadowColor,(self,) + _args, _kwargs)
return val
def setBorderColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarShell_setBorderColor,(self,) + _args, _kwargs)
return val
def getBorderColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarShell_getBorderColor,(self,) + _args, _kwargs)
return val
def setBaseColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarShell_setBaseColor,(self,) + _args, _kwargs)
return val
def getBaseColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarShell_getBaseColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_ToolbarShell instance at %s>" % (self.this,)
class FX_ToolbarShell(FX_ToolbarShellPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_ToolbarShell,_args,_kwargs)
self.thisown = 1
class FXToolbarShellPtr(FX_ToolbarShellPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
try:
val = apply(controlsc.FXToolbarShell_show,(self,) + _args, _kwargs)
return val
except:
val = apply(controlsc.FXToolbarShell_show2,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_setBackColor,(self,) + _args, _kwargs)
return val
def show2(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_show2,(self,) + _args, _kwargs)
return val
def iconify(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_iconify,(self,) + _args, _kwargs)
return val
def deiconify(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarShell_deiconify,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXToolbarShell instance at %s>" % (self.this,)
class FXToolbarShell(FXToolbarShellPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXToolbarShell,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_ToolbarGripPtr(FX_WindowPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarGrip_onPaint,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarGrip_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarGrip_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onMotion(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarGrip_onMotion,(self,) + _args, _kwargs)
return val
def onEnter(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarGrip_onEnter,(self,) + _args, _kwargs)
return val
def onLeave(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarGrip_onLeave,(self,) + _args, _kwargs)
return val
def setDoubleBar(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarGrip_setDoubleBar,(self,) + _args, _kwargs)
return val
def getDoubleBar(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarGrip_getDoubleBar,(self,) + _args, _kwargs)
return val
def setHiliteColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarGrip_setHiliteColor,(self,) + _args, _kwargs)
return val
def getHiliteColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarGrip_getHiliteColor,(self,) + _args, _kwargs)
return val
def setShadowColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarGrip_setShadowColor,(self,) + _args, _kwargs)
return val
def getShadowColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarGrip_getShadowColor,(self,) + _args, _kwargs)
return val
def getActiveColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ToolbarGrip_getActiveColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_ToolbarGrip instance at %s>" % (self.this,)
class FX_ToolbarGrip(FX_ToolbarGripPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_ToolbarGrip,_args,_kwargs)
self.thisown = 1
class FXToolbarGripPtr(FX_ToolbarGripPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXToolbarGrip_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXToolbarGrip instance at %s>" % (self.this,)
class FXToolbarGrip(FXToolbarGripPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXToolbarGrip,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_ListBoxPtr(FX_PackerPtr):
ID_LIST = controlsc.FX_ListBox_ID_LIST
ID_FIELD = controlsc.FX_ListBox_ID_FIELD
ID_LAST = controlsc.FX_ListBox_ID_LAST
def __init__(self,this):
self.this = this
self.thisown = 0
def onFocusUp(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_onFocusUp,(self,) + _args, _kwargs)
return val
def onFocusDown(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_onFocusDown,(self,) + _args, _kwargs)
return val
def onFieldButton(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_onFieldButton,(self,) + _args, _kwargs)
return val
def onListUpdate(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_onListUpdate,(self,) + _args, _kwargs)
return val
def onListChanged(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_onListChanged,(self,) + _args, _kwargs)
return val
def onListClicked(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_onListClicked,(self,) + _args, _kwargs)
return val
def onCmdSetValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_onCmdSetValue,(self,) + _args, _kwargs)
return val
def onCmdGetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_onCmdGetIntValue,(self,) + _args, _kwargs)
return val
def onCmdSetIntValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_onCmdSetIntValue,(self,) + _args, _kwargs)
return val
def getNumItems(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_getNumItems,(self,) + _args, _kwargs)
return val
def getNumVisible(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_getNumVisible,(self,) + _args, _kwargs)
return val
def setNumVisible(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_setNumVisible,(self,) + _args, _kwargs)
return val
def isItemCurrent(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_isItemCurrent,(self,) + _args, _kwargs)
return val
def setCurrentItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_setCurrentItem,(self,) + _args, _kwargs)
return val
def getCurrentItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_getCurrentItem,(self,) + _args, _kwargs)
return val
def retrieveItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_retrieveItem,(self,) + _args, _kwargs)
return val
def replaceItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_replaceItem,(self,) + _args, _kwargs)
return val
def insertItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_insertItem,(self,) + _args, _kwargs)
return val
def appendItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_appendItem,(self,) + _args, _kwargs)
return val
def prependItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_prependItem,(self,) + _args, _kwargs)
return val
def removeItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_removeItem,(self,) + _args, _kwargs)
return val
def clearItems(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_clearItems,(self,) + _args, _kwargs)
return val
def findItem(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_findItem,(self,) + _args, _kwargs)
return val
def setItemText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_setItemText,(self,) + _args, _kwargs)
return val
def getItemText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_getItemText,(self,) + _args, _kwargs)
return val
def setItemIcon(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_setItemIcon,(self,) + _args, _kwargs)
return val
def getItemIcon(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_getItemIcon,(self,) + _args, _kwargs)
return val
def setItemData(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_setItemData,(self,) + _args, _kwargs)
return val
def getItemData(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_getItemData,(self,) + _args, _kwargs)
return val
def isPaneShown(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_isPaneShown,(self,) + _args, _kwargs)
return val
def sortItems(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_sortItems,(self,) + _args, _kwargs)
return val
def setFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_setFont,(self,) + _args, _kwargs)
return val
def getFont(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_getFont,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_setBackColor,(self,) + _args, _kwargs)
return val
def getBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_getBackColor,(self,) + _args, _kwargs)
return val
def setTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_setTextColor,(self,) + _args, _kwargs)
return val
def getTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_getTextColor,(self,) + _args, _kwargs)
return val
def setSelBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_setSelBackColor,(self,) + _args, _kwargs)
return val
def getSelBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_getSelBackColor,(self,) + _args, _kwargs)
return val
def setSelTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_setSelTextColor,(self,) + _args, _kwargs)
return val
def getSelTextColor(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_getSelTextColor,(self,) + _args, _kwargs)
return val
def getSortFunc(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_getSortFunc,(self,) + _args, _kwargs)
return val
def setSortFunc(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_setSortFunc,(self,) + _args, _kwargs)
return val
def setHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_setHelpText,(self,) + _args, _kwargs)
return val
def getHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_getHelpText,(self,) + _args, _kwargs)
return val
def setTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_setTipText,(self,) + _args, _kwargs)
return val
def getTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ListBox_getTipText,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_ListBox instance at %s>" % (self.this,)
class FX_ListBox(FX_ListBoxPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_ListBox,_args,_kwargs)
self.thisown = 1
class FXListBoxPtr(FX_ListBoxPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXListBox_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXListBox instance at %s>" % (self.this,)
class FXListBox(FXListBoxPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXListBox,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_DriveBoxPtr(FX_ListBoxPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onListChanged(self, *_args, **_kwargs):
val = apply(controlsc.FX_DriveBox_onListChanged,(self,) + _args, _kwargs)
return val
def onListClicked(self, *_args, **_kwargs):
val = apply(controlsc.FX_DriveBox_onListClicked,(self,) + _args, _kwargs)
return val
def onCmdSetValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_DriveBox_onCmdSetValue,(self,) + _args, _kwargs)
return val
def onCmdSetStringValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_DriveBox_onCmdSetStringValue,(self,) + _args, _kwargs)
return val
def onCmdGetStringValue(self, *_args, **_kwargs):
val = apply(controlsc.FX_DriveBox_onCmdGetStringValue,(self,) + _args, _kwargs)
return val
def setDrive(self, *_args, **_kwargs):
val = apply(controlsc.FX_DriveBox_setDrive,(self,) + _args, _kwargs)
return val
def getDrive(self, *_args, **_kwargs):
val = apply(controlsc.FX_DriveBox_getDrive,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_DriveBox instance at %s>" % (self.this,)
class FX_DriveBox(FX_DriveBoxPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_DriveBox,_args,_kwargs)
self.thisown = 1
class FXDriveBoxPtr(FX_DriveBoxPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXDriveBox_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXDriveBox instance at %s>" % (self.this,)
class FXDriveBox(FXDriveBoxPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXDriveBox,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_ColorBarPtr(FX_FramePtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_onPaint,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onMotion(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_onMotion,(self,) + _args, _kwargs)
return val
def onQueryHelp(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_onQueryHelp,(self,) + _args, _kwargs)
return val
def onQueryTip(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_onQueryTip,(self,) + _args, _kwargs)
return val
def setHue(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_setHue,(self,) + _args, _kwargs)
return val
def getHue(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_getHue,(self,) + _args, _kwargs)
return val
def setSat(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_setSat,(self,) + _args, _kwargs)
return val
def getSat(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_getSat,(self,) + _args, _kwargs)
return val
def setVal(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_setVal,(self,) + _args, _kwargs)
return val
def getVal(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_getVal,(self,) + _args, _kwargs)
return val
def getBarStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_getBarStyle,(self,) + _args, _kwargs)
return val
def setBarStyle(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_setBarStyle,(self,) + _args, _kwargs)
return val
def setHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_setHelpText,(self,) + _args, _kwargs)
return val
def getHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_getHelpText,(self,) + _args, _kwargs)
return val
def setTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_setTipText,(self,) + _args, _kwargs)
return val
def getTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorBar_getTipText,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_ColorBar instance at %s>" % (self.this,)
class FX_ColorBar(FX_ColorBarPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_ColorBar,_args,_kwargs)
self.thisown = 1
class FXColorBarPtr(FX_ColorBarPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXColorBar_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXColorBar instance at %s>" % (self.this,)
class FXColorBar(FXColorBarPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXColorBar,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
class FX_ColorWheelPtr(FX_FramePtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onPaint(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWheel_onPaint,(self,) + _args, _kwargs)
return val
def onLeftBtnPress(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWheel_onLeftBtnPress,(self,) + _args, _kwargs)
return val
def onLeftBtnRelease(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWheel_onLeftBtnRelease,(self,) + _args, _kwargs)
return val
def onMotion(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWheel_onMotion,(self,) + _args, _kwargs)
return val
def onQueryHelp(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWheel_onQueryHelp,(self,) + _args, _kwargs)
return val
def onQueryTip(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWheel_onQueryTip,(self,) + _args, _kwargs)
return val
def setHue(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWheel_setHue,(self,) + _args, _kwargs)
return val
def getHue(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWheel_getHue,(self,) + _args, _kwargs)
return val
def setSat(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWheel_setSat,(self,) + _args, _kwargs)
return val
def getSat(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWheel_getSat,(self,) + _args, _kwargs)
return val
def setVal(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWheel_setVal,(self,) + _args, _kwargs)
return val
def getVal(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWheel_getVal,(self,) + _args, _kwargs)
return val
def setHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWheel_setHelpText,(self,) + _args, _kwargs)
return val
def getHelpText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWheel_getHelpText,(self,) + _args, _kwargs)
return val
def setTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWheel_setTipText,(self,) + _args, _kwargs)
return val
def getTipText(self, *_args, **_kwargs):
val = apply(controlsc.FX_ColorWheel_getTipText,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FX_ColorWheel instance at %s>" % (self.this,)
class FX_ColorWheel(FX_ColorWheelPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FX_ColorWheel,_args,_kwargs)
self.thisown = 1
class FXColorWheelPtr(FX_ColorWheelPtr):
def __init__(self,this):
self.this = this
self.thisown = 0
def onDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_onDefault,(self,) + _args, _kwargs)
return val
def create(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_create,(self,) + _args, _kwargs)
return val
def destroy(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_destroy,(self,) + _args, _kwargs)
return val
def detach(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_detach,(self,) + _args, _kwargs)
return val
def resize(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_resize,(self,) + _args, _kwargs)
return val
def getDefaultWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_getDefaultWidth,(self,) + _args, _kwargs)
return val
def getDefaultHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_getDefaultHeight,(self,) + _args, _kwargs)
return val
def show(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_show,(self,) + _args, _kwargs)
return val
def hide(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_hide,(self,) + _args, _kwargs)
return val
def enable(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_enable,(self,) + _args, _kwargs)
return val
def disable(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_disable,(self,) + _args, _kwargs)
return val
def canFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_canFocus,(self,) + _args, _kwargs)
return val
def setFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_setFocus,(self,) + _args, _kwargs)
return val
def killFocus(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_killFocus,(self,) + _args, _kwargs)
return val
def setDefault(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_setDefault,(self,) + _args, _kwargs)
return val
def recalc(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_recalc,(self,) + _args, _kwargs)
return val
def layout(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_layout,(self,) + _args, _kwargs)
return val
def lower(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_lower,(self,) + _args, _kwargs)
return val
def move(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_move,(self,) + _args, _kwargs)
return val
def position(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_position,(self,) + _args, _kwargs)
return val
def isComposite(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_isComposite,(self,) + _args, _kwargs)
return val
def contains(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_contains,(self,) + _args, _kwargs)
return val
def getWidthForHeight(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_getWidthForHeight,(self,) + _args, _kwargs)
return val
def getHeightForWidth(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_getHeightForWidth,(self,) + _args, _kwargs)
return val
def doesSaveUnder(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_doesSaveUnder,(self,) + _args, _kwargs)
return val
def reparent(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_reparent,(self,) + _args, _kwargs)
return val
def setBackColor(self, *_args, **_kwargs):
val = apply(controlsc.FXColorWheel_setBackColor,(self,) + _args, _kwargs)
return val
def __repr__(self):
return "<C FXColorWheel instance at %s>" % (self.this,)
class FXColorWheel(FXColorWheelPtr):
def __init__(self,*_args,**_kwargs):
self.this = apply(controlsc.new_FXColorWheel,_args,_kwargs)
self.thisown = 1
FXPyRegister(self)
#-------------- FUNCTION WRAPPERS ------------------
def CreateFloatingToolbar(*_args, **_kwargs):
val = apply(controlsc.CreateFloatingToolbar,_args,_kwargs)
if val: val = FXToolbarPtr(val)
return val
def CreateNonFloatingToolbar(*_args, **_kwargs):
val = apply(controlsc.CreateNonFloatingToolbar,_args,_kwargs)
if val: val = FXToolbarPtr(val)
return val
#-------------- VARIABLE WRAPPERS ------------------
JUSTIFY_NORMAL = controlsc.JUSTIFY_NORMAL
JUSTIFY_CENTER_X = controlsc.JUSTIFY_CENTER_X
JUSTIFY_LEFT = controlsc.JUSTIFY_LEFT
JUSTIFY_RIGHT = controlsc.JUSTIFY_RIGHT
JUSTIFY_HZ_APART = controlsc.JUSTIFY_HZ_APART
JUSTIFY_CENTER_Y = controlsc.JUSTIFY_CENTER_Y
JUSTIFY_TOP = controlsc.JUSTIFY_TOP
JUSTIFY_BOTTOM = controlsc.JUSTIFY_BOTTOM
JUSTIFY_VT_APART = controlsc.JUSTIFY_VT_APART
ICON_UNDER_TEXT = controlsc.ICON_UNDER_TEXT
ICON_AFTER_TEXT = controlsc.ICON_AFTER_TEXT
ICON_BEFORE_TEXT = controlsc.ICON_BEFORE_TEXT
ICON_ABOVE_TEXT = controlsc.ICON_ABOVE_TEXT
ICON_BELOW_TEXT = controlsc.ICON_BELOW_TEXT
TEXT_OVER_ICON = controlsc.TEXT_OVER_ICON
TEXT_AFTER_ICON = controlsc.TEXT_AFTER_ICON
TEXT_BEFORE_ICON = controlsc.TEXT_BEFORE_ICON
TEXT_ABOVE_ICON = controlsc.TEXT_ABOVE_ICON
TEXT_BELOW_ICON = controlsc.TEXT_BELOW_ICON
LABEL_NORMAL = controlsc.LABEL_NORMAL
DIAL_VERTICAL = controlsc.DIAL_VERTICAL
DIAL_HORIZONTAL = controlsc.DIAL_HORIZONTAL
DIAL_CYCLIC = controlsc.DIAL_CYCLIC
DIAL_HAS_NOTCH = controlsc.DIAL_HAS_NOTCH
DIAL_NORMAL = controlsc.DIAL_NORMAL
COLORWELL_OPAQUEONLY = controlsc.COLORWELL_OPAQUEONLY
COLORWELL_SOURCEONLY = controlsc.COLORWELL_SOURCEONLY
COLORWELL_NORMAL = controlsc.COLORWELL_NORMAL
TEXTFIELD_PASSWD = controlsc.TEXTFIELD_PASSWD
TEXTFIELD_INTEGER = controlsc.TEXTFIELD_INTEGER
TEXTFIELD_REAL = controlsc.TEXTFIELD_REAL
TEXTFIELD_READONLY = controlsc.TEXTFIELD_READONLY
TEXTFIELD_ENTER_ONLY = controlsc.TEXTFIELD_ENTER_ONLY
TEXTFIELD_LIMITED = controlsc.TEXTFIELD_LIMITED
TEXTFIELD_OVERSTRIKE = controlsc.TEXTFIELD_OVERSTRIKE
TEXTFIELD_NORMAL = controlsc.TEXTFIELD_NORMAL
STATE_UP = controlsc.STATE_UP
STATE_DOWN = controlsc.STATE_DOWN
STATE_ENGAGED = controlsc.STATE_ENGAGED
STATE_UNCHECKED = controlsc.STATE_UNCHECKED
STATE_CHECKED = controlsc.STATE_CHECKED
BUTTON_AUTOGRAY = controlsc.BUTTON_AUTOGRAY
BUTTON_AUTOHIDE = controlsc.BUTTON_AUTOHIDE
BUTTON_TOOLBAR = controlsc.BUTTON_TOOLBAR
BUTTON_DEFAULT = controlsc.BUTTON_DEFAULT
BUTTON_INITIAL = controlsc.BUTTON_INITIAL
BUTTON_NORMAL = controlsc.BUTTON_NORMAL
TOGGLEBUTTON_AUTOGRAY = controlsc.TOGGLEBUTTON_AUTOGRAY
TOGGLEBUTTON_AUTOHIDE = controlsc.TOGGLEBUTTON_AUTOHIDE
TOGGLEBUTTON_TOOLBAR = controlsc.TOGGLEBUTTON_TOOLBAR
TOGGLEBUTTON_NORMAL = controlsc.TOGGLEBUTTON_NORMAL
RADIOBUTTON_AUTOGRAY = controlsc.RADIOBUTTON_AUTOGRAY
RADIOBUTTON_AUTOHIDE = controlsc.RADIOBUTTON_AUTOHIDE
RADIOBUTTON_NORMAL = controlsc.RADIOBUTTON_NORMAL
CHECKBUTTON_AUTOGRAY = controlsc.CHECKBUTTON_AUTOGRAY
CHECKBUTTON_AUTOHIDE = controlsc.CHECKBUTTON_AUTOHIDE
CHECKBUTTON_NORMAL = controlsc.CHECKBUTTON_NORMAL
ARROW_NONE = controlsc.ARROW_NONE
ARROW_UP = controlsc.ARROW_UP
ARROW_DOWN = controlsc.ARROW_DOWN
ARROW_LEFT = controlsc.ARROW_LEFT
ARROW_RIGHT = controlsc.ARROW_RIGHT
ARROW_REPEAT = controlsc.ARROW_REPEAT
ARROW_AUTOGRAY = controlsc.ARROW_AUTOGRAY
ARROW_AUTOHIDE = controlsc.ARROW_AUTOHIDE
ARROW_TOOLBAR = controlsc.ARROW_TOOLBAR
ARROW_NORMAL = controlsc.ARROW_NORMAL
SPIN_NORMAL = controlsc.SPIN_NORMAL
SPIN_CYCLIC = controlsc.SPIN_CYCLIC
SPIN_NOTEXT = controlsc.SPIN_NOTEXT
SPIN_NOMAX = controlsc.SPIN_NOMAX
SPIN_NOMIN = controlsc.SPIN_NOMIN
TOOLTIP_NORMAL = controlsc.TOOLTIP_NORMAL
TOOLTIP_PERMANENT = controlsc.TOOLTIP_PERMANENT
TOOLTIP_VARIABLE = controlsc.TOOLTIP_VARIABLE
TAB_TOP = controlsc.TAB_TOP
TAB_LEFT = controlsc.TAB_LEFT
TAB_RIGHT = controlsc.TAB_RIGHT
TAB_BOTTOM = controlsc.TAB_BOTTOM
TAB_TOP_NORMAL = controlsc.TAB_TOP_NORMAL
TAB_BOTTOM_NORMAL = controlsc.TAB_BOTTOM_NORMAL
TAB_LEFT_NORMAL = controlsc.TAB_LEFT_NORMAL
TAB_RIGHT_NORMAL = controlsc.TAB_RIGHT_NORMAL
TABBOOK_TOPTABS = controlsc.TABBOOK_TOPTABS
TABBOOK_BOTTOMTABS = controlsc.TABBOOK_BOTTOMTABS
TABBOOK_SIDEWAYS = controlsc.TABBOOK_SIDEWAYS
TABBOOK_LEFTTABS = controlsc.TABBOOK_LEFTTABS
TABBOOK_RIGHTTABS = controlsc.TABBOOK_RIGHTTABS
TABBOOK_NORMAL = controlsc.TABBOOK_NORMAL
SCROLLBAR_HORIZONTAL = controlsc.SCROLLBAR_HORIZONTAL
SCROLLBAR_VERTICAL = controlsc.SCROLLBAR_VERTICAL
LIST_EXTENDEDSELECT = controlsc.LIST_EXTENDEDSELECT
LIST_SINGLESELECT = controlsc.LIST_SINGLESELECT
LIST_BROWSESELECT = controlsc.LIST_BROWSESELECT
LIST_MULTIPLESELECT = controlsc.LIST_MULTIPLESELECT
LIST_AUTOSELECT = controlsc.LIST_AUTOSELECT
LIST_NORMAL = controlsc.LIST_NORMAL
COMBOBOX_NO_REPLACE = controlsc.COMBOBOX_NO_REPLACE
COMBOBOX_REPLACE = controlsc.COMBOBOX_REPLACE
COMBOBOX_INSERT_BEFORE = controlsc.COMBOBOX_INSERT_BEFORE
COMBOBOX_INSERT_AFTER = controlsc.COMBOBOX_INSERT_AFTER
COMBOBOX_INSERT_FIRST = controlsc.COMBOBOX_INSERT_FIRST
COMBOBOX_INSERT_LAST = controlsc.COMBOBOX_INSERT_LAST
COMBOBOX_STATIC = controlsc.COMBOBOX_STATIC
COMBOBOX_NORMAL = controlsc.COMBOBOX_NORMAL
STATUSBAR_WITH_DRAGCORNER = controlsc.STATUSBAR_WITH_DRAGCORNER
SLIDERBAR_SIZE = controlsc.SLIDERBAR_SIZE
SLIDERHEAD_SIZE = controlsc.SLIDERHEAD_SIZE
SLIDER_HORIZONTAL = controlsc.SLIDER_HORIZONTAL
SLIDER_VERTICAL = controlsc.SLIDER_VERTICAL
SLIDER_ARROW_UP = controlsc.SLIDER_ARROW_UP
SLIDER_ARROW_DOWN = controlsc.SLIDER_ARROW_DOWN
SLIDER_ARROW_LEFT = controlsc.SLIDER_ARROW_LEFT
SLIDER_ARROW_RIGHT = controlsc.SLIDER_ARROW_RIGHT
SLIDER_INSIDE_BAR = controlsc.SLIDER_INSIDE_BAR
SLIDER_TICKS_TOP = controlsc.SLIDER_TICKS_TOP
SLIDER_TICKS_BOTTOM = controlsc.SLIDER_TICKS_BOTTOM
SLIDER_TICKS_LEFT = controlsc.SLIDER_TICKS_LEFT
SLIDER_TICKS_RIGHT = controlsc.SLIDER_TICKS_RIGHT
SLIDER_NORMAL = controlsc.SLIDER_NORMAL
HEADER_BUTTON = controlsc.HEADER_BUTTON
HEADER_HORIZONTAL = controlsc.HEADER_HORIZONTAL
HEADER_VERTICAL = controlsc.HEADER_VERTICAL
HEADER_TRACKING = controlsc.HEADER_TRACKING
HEADER_NORMAL = controlsc.HEADER_NORMAL
PROGRESSBAR_HORIZONTAL = controlsc.PROGRESSBAR_HORIZONTAL
PROGRESSBAR_VERTICAL = controlsc.PROGRESSBAR_VERTICAL
PROGRESSBAR_PERCENTAGE = controlsc.PROGRESSBAR_PERCENTAGE
PROGRESSBAR_DIAL = controlsc.PROGRESSBAR_DIAL
PROGRESSBAR_NORMAL = controlsc.PROGRESSBAR_NORMAL
TOOLBARTAB_HORIZONTAL = controlsc.TOOLBARTAB_HORIZONTAL
TOOLBARTAB_VERTICAL = controlsc.TOOLBARTAB_VERTICAL
TOOLBARGRIP_SINGLE = controlsc.TOOLBARGRIP_SINGLE
TOOLBARGRIP_DOUBLE = controlsc.TOOLBARGRIP_DOUBLE
TOOLBARGRIP_SEPARATOR = controlsc.TOOLBARGRIP_SEPARATOR
LISTBOX_NORMAL = controlsc.LISTBOX_NORMAL
COLORBAR_HORIZONTAL = controlsc.COLORBAR_HORIZONTAL
COLORBAR_VERTICAL = controlsc.COLORBAR_VERTICAL
cvar = controlsc.cvar
| lgpl-2.1 | 2,367,650,108,529,640,000 | 43.259746 | 91 | 0.615725 | false |
adrienbrault/home-assistant | homeassistant/components/template/config.py | 1 | 4042 | """Template config validator."""
import logging
import voluptuous as vol
from homeassistant.components.sensor import (
DEVICE_CLASSES_SCHEMA as SENSOR_DEVICE_CLASSES_SCHEMA,
DOMAIN as SENSOR_DOMAIN,
)
from homeassistant.config import async_log_exception, config_without_domain
from homeassistant.const import (
CONF_DEVICE_CLASS,
CONF_ENTITY_PICTURE_TEMPLATE,
CONF_FRIENDLY_NAME,
CONF_FRIENDLY_NAME_TEMPLATE,
CONF_ICON,
CONF_ICON_TEMPLATE,
CONF_NAME,
CONF_SENSORS,
CONF_STATE,
CONF_UNIQUE_ID,
CONF_UNIT_OF_MEASUREMENT,
CONF_VALUE_TEMPLATE,
)
from homeassistant.helpers import config_validation as cv, template
from homeassistant.helpers.trigger import async_validate_trigger_config
from .const import (
CONF_ATTRIBUTE_TEMPLATES,
CONF_ATTRIBUTES,
CONF_AVAILABILITY,
CONF_AVAILABILITY_TEMPLATE,
CONF_PICTURE,
CONF_TRIGGER,
DOMAIN,
)
from .sensor import SENSOR_SCHEMA as PLATFORM_SENSOR_SCHEMA
CONVERSION_PLATFORM = {
CONF_ICON_TEMPLATE: CONF_ICON,
CONF_ENTITY_PICTURE_TEMPLATE: CONF_PICTURE,
CONF_AVAILABILITY_TEMPLATE: CONF_AVAILABILITY,
CONF_ATTRIBUTE_TEMPLATES: CONF_ATTRIBUTES,
CONF_FRIENDLY_NAME_TEMPLATE: CONF_NAME,
CONF_FRIENDLY_NAME: CONF_NAME,
CONF_VALUE_TEMPLATE: CONF_STATE,
}
SENSOR_SCHEMA = vol.Schema(
{
vol.Optional(CONF_NAME): cv.template,
vol.Required(CONF_STATE): cv.template,
vol.Optional(CONF_ICON): cv.template,
vol.Optional(CONF_PICTURE): cv.template,
vol.Optional(CONF_AVAILABILITY): cv.template,
vol.Optional(CONF_ATTRIBUTES): vol.Schema({cv.string: cv.template}),
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
vol.Optional(CONF_DEVICE_CLASS): SENSOR_DEVICE_CLASSES_SCHEMA,
vol.Optional(CONF_UNIQUE_ID): cv.string,
}
)
TRIGGER_ENTITY_SCHEMA = vol.Schema(
{
vol.Optional(CONF_UNIQUE_ID): cv.string,
vol.Required(CONF_TRIGGER): cv.TRIGGER_SCHEMA,
vol.Optional(SENSOR_DOMAIN): vol.All(cv.ensure_list, [SENSOR_SCHEMA]),
vol.Optional(CONF_SENSORS): cv.schema_with_slug_keys(PLATFORM_SENSOR_SCHEMA),
}
)
async def async_validate_config(hass, config):
"""Validate config."""
if DOMAIN not in config:
return config
trigger_entity_configs = []
for cfg in cv.ensure_list(config[DOMAIN]):
try:
cfg = TRIGGER_ENTITY_SCHEMA(cfg)
cfg[CONF_TRIGGER] = await async_validate_trigger_config(
hass, cfg[CONF_TRIGGER]
)
except vol.Invalid as err:
async_log_exception(err, DOMAIN, cfg, hass)
continue
if CONF_SENSORS not in cfg:
trigger_entity_configs.append(cfg)
continue
logging.getLogger(__name__).warning(
"The entity definition format under template: differs from the platform configuration format. See https://www.home-assistant.io/integrations/template#configuration-for-trigger-based-template-sensors"
)
sensor = list(cfg[SENSOR_DOMAIN]) if SENSOR_DOMAIN in cfg else []
for device_id, entity_cfg in cfg[CONF_SENSORS].items():
entity_cfg = {**entity_cfg}
for from_key, to_key in CONVERSION_PLATFORM.items():
if from_key not in entity_cfg or to_key in entity_cfg:
continue
val = entity_cfg.pop(from_key)
if isinstance(val, str):
val = template.Template(val)
entity_cfg[to_key] = val
if CONF_NAME not in entity_cfg:
entity_cfg[CONF_NAME] = template.Template(device_id)
sensor.append(entity_cfg)
cfg = {**cfg, "sensor": sensor}
trigger_entity_configs.append(cfg)
# Create a copy of the configuration with all config for current
# component removed and add validated config back in.
config = config_without_domain(config, DOMAIN)
config[DOMAIN] = trigger_entity_configs
return config
| mit | 7,074,478,497,944,458,000 | 31.079365 | 211 | 0.655616 | false |
krux/adspygoogle | tests/adspygoogle/dfp/creative_service_unittest.py | 1 | 13058 | #!/usr/bin/python
# -*- coding: UTF-8 -*-
#
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests to cover CreativeService."""
__author__ = '[email protected] (Stan Grinberg)'
import base64
import os
import sys
sys.path.insert(0, os.path.join('..', '..', '..'))
import unittest
from adspygoogle.common import Utils
from tests.adspygoogle.dfp import client
from tests.adspygoogle.dfp import HTTP_PROXY
from tests.adspygoogle.dfp import SERVER_V201108
from tests.adspygoogle.dfp import SERVER_V201111
from tests.adspygoogle.dfp import TEST_VERSION_V201108
from tests.adspygoogle.dfp import TEST_VERSION_V201111
from tests.adspygoogle.dfp import VERSION_V201108
from tests.adspygoogle.dfp import VERSION_V201111
class CreativeServiceTestV201108(unittest.TestCase):
"""Unittest suite for CreativeService using v201108."""
SERVER = SERVER_V201108
VERSION = VERSION_V201108
client.debug = False
service = None
advertiser_id = '0'
creative1 = None
creative2 = None
IMAGE_DATA1 = open(os.path.join('data', 'medium_rectangle.jpg').replace(
'\\', '/'), 'r').read()
IMAGE_DATA2 = open(os.path.join('data', 'inline.jpg').replace(
'\\', '/'), 'r').read()
IMAGE_DATA3 = open(os.path.join('data', 'skyscraper.jpg').replace(
'\\', '/'), 'r').read()
IMAGE_DATA1 = base64.encodestring(IMAGE_DATA1)
IMAGE_DATA2 = base64.encodestring(IMAGE_DATA2)
IMAGE_DATA3 = base64.encodestring(IMAGE_DATA3)
def setUp(self):
"""Prepare unittest."""
print self.id()
if not self.__class__.service:
self.__class__.service = client.GetCreativeService(
self.__class__.SERVER, self.__class__.VERSION, HTTP_PROXY)
if self.__class__.advertiser_id == '0':
company = {
'name': 'Company #%s' % Utils.GetUniqueName(),
'type': 'ADVERTISER'
}
company_service = client.GetCompanyService(
self.__class__.SERVER, self.__class__.VERSION, HTTP_PROXY)
self.__class__.advertiser_id = company_service.CreateCompany(
company)[0]['id']
def testCreateCreative(self):
"""Test whether we can create a creative."""
creative = {
'type': 'ImageCreative',
'name': 'Image Creative #%s' % Utils.GetUniqueName(),
'advertiserId': self.__class__.advertiser_id,
'destinationUrl': 'http://google.com',
'imageName': 'image.jpg',
'imageByteArray': self.__class__.IMAGE_DATA1,
'size': {'width': '300', 'height': '250'}
}
self.assert_(isinstance(
self.__class__.service.CreateCreative(creative), tuple))
def testCreateCreatives(self):
"""Test whether we can create a list of creatives."""
creatives = [
{
'type': 'ImageCreative',
'name': 'Image Creative #%s' % Utils.GetUniqueName(),
'advertiserId': self.__class__.advertiser_id,
'destinationUrl': 'http://google.com',
'imageName': 'inline.jpg',
'imageByteArray': self.__class__.IMAGE_DATA2,
'size': {'width': '300', 'height': '250'}
},
{
'type': 'ImageCreative',
'name': 'Image Creative #%s' % Utils.GetUniqueName(),
'advertiserId': self.__class__.advertiser_id,
'destinationUrl': 'http://google.com',
'imageName': 'skyscraper.jpg',
'imageByteArray': self.__class__.IMAGE_DATA3,
'size': {'width': '120', 'height': '600'}
}
]
creatives = self.__class__.service.CreateCreatives(creatives)
self.__class__.creative1 = creatives[0]
self.__class__.creative2 = creatives[1]
self.assert_(isinstance(creatives, tuple))
def testGetCreative(self):
"""Test whether we can fetch an existing creative."""
if not self.__class__.creative1:
self.testCreateCreatives()
self.assert_(isinstance(self.__class__.service.GetCreative(
self.__class__.creative1['id']), tuple))
self.assertEqual(self.__class__.service.GetCreative(
self.__class__.creative1['id'])[0]['Creative_Type'],
'ImageCreative')
def testGetCreativesByStatement(self):
"""Test whether we can fetch a list of existing creatives that match given
statement."""
if not self.__class__.creative1:
self.testCreateCreatives()
filter_statement = {'query': 'WHERE id = %s ORDER BY name LIMIT 1'
% self.__class__.creative1['id']}
self.assert_(isinstance(
self.__class__.service.GetCreativesByStatement(filter_statement),
tuple))
def testUpdateCreative(self):
"""Test whether we can update a creative."""
if not self.__class__.creative1:
self.testCreateCreatives()
destination_url = 'http://news.google.com'
image_name = 'inline.jpg'
size = {'width': '300', 'isAspectRatio': 'false', 'height': '250'}
self.__class__.creative1['destinationUrl'] = destination_url
self.__class__.creative1['imageName'] = image_name
self.__class__.creative1['size'] = size
creative = self.__class__.service.UpdateCreative(self.__class__.creative1)
self.assert_(isinstance(creative, tuple))
self.assertEqual(creative[0]['destinationUrl'], destination_url)
self.assertEqual(creative[0]['imageName'], image_name)
self.assertEqual(creative[0]['size'], size)
def testUpdateCreatives(self):
"""Test whether we can update a list of creatives."""
if not self.__class__.creative1 or not self.__class__.creative2:
self.testCreateCreatives()
destination_url = 'http://finance.google.com'
self.__class__.creative1['destinationUrl'] = 'http://finance.google.com'
self.__class__.creative1['imageName'] = 'inline.jpg'
self.__class__.creative1['size'] = {'width': '300', 'height': '250'}
self.__class__.creative2['destinationUrl'] = 'http://finance.google.com'
self.__class__.creative2['imageName'] = 'skyscraper.jpg'
self.__class__.creative2['size'] = {'width': '120', 'height': '600'}
creatives = self.__class__.service.UpdateCreatives(
[self.__class__.creative1, self.__class__.creative2])
self.assert_(isinstance(creatives, tuple))
for creative in creatives:
self.assertEqual(creative['destinationUrl'], destination_url)
class CreativeServiceTestV201111(unittest.TestCase):
"""Unittest suite for CreativeService using v201111."""
SERVER = SERVER_V201111
VERSION = VERSION_V201111
client.debug = False
service = None
advertiser_id = '0'
creative1 = None
creative2 = None
IMAGE_DATA1 = open(os.path.join('data', 'medium_rectangle.jpg').replace(
'\\', '/'), 'r').read()
IMAGE_DATA2 = open(os.path.join('data', 'inline.jpg').replace(
'\\', '/'), 'r').read()
IMAGE_DATA3 = open(os.path.join('data', 'skyscraper.jpg').replace(
'\\', '/'), 'r').read()
IMAGE_DATA1 = base64.encodestring(IMAGE_DATA1)
IMAGE_DATA2 = base64.encodestring(IMAGE_DATA2)
IMAGE_DATA3 = base64.encodestring(IMAGE_DATA3)
def setUp(self):
"""Prepare unittest."""
print self.id()
if not self.__class__.service:
self.__class__.service = client.GetCreativeService(
self.__class__.SERVER, self.__class__.VERSION, HTTP_PROXY)
if self.__class__.advertiser_id == '0':
company = {
'name': 'Company #%s' % Utils.GetUniqueName(),
'type': 'ADVERTISER'
}
company_service = client.GetCompanyService(
self.__class__.SERVER, self.__class__.VERSION, HTTP_PROXY)
self.__class__.advertiser_id = company_service.CreateCompany(
company)[0]['id']
def testCreateCreative(self):
"""Test whether we can create a creative."""
creative = {
'type': 'ImageCreative',
'name': 'Image Creative #%s' % Utils.GetUniqueName(),
'advertiserId': self.__class__.advertiser_id,
'destinationUrl': 'http://google.com',
'imageName': 'image.jpg',
'imageByteArray': self.__class__.IMAGE_DATA1,
'size': {'width': '300', 'height': '250'}
}
self.assert_(isinstance(
self.__class__.service.CreateCreative(creative), tuple))
def testCreateCreatives(self):
"""Test whether we can create a list of creatives."""
creatives = [
{
'type': 'ImageCreative',
'name': 'Image Creative #%s' % Utils.GetUniqueName(),
'advertiserId': self.__class__.advertiser_id,
'destinationUrl': 'http://google.com',
'imageName': 'inline.jpg',
'imageByteArray': self.__class__.IMAGE_DATA2,
'size': {'width': '300', 'height': '250'}
},
{
'type': 'ImageCreative',
'name': 'Image Creative #%s' % Utils.GetUniqueName(),
'advertiserId': self.__class__.advertiser_id,
'destinationUrl': 'http://google.com',
'imageName': 'skyscraper.jpg',
'imageByteArray': self.__class__.IMAGE_DATA3,
'size': {'width': '120', 'height': '600'}
}
]
creatives = self.__class__.service.CreateCreatives(creatives)
self.__class__.creative1 = creatives[0]
self.__class__.creative2 = creatives[1]
self.assert_(isinstance(creatives, tuple))
def testGetCreative(self):
"""Test whether we can fetch an existing creative."""
if not self.__class__.creative1:
self.testCreateCreatives()
self.assert_(isinstance(self.__class__.service.GetCreative(
self.__class__.creative1['id']), tuple))
self.assertEqual(self.__class__.service.GetCreative(
self.__class__.creative1['id'])[0]['Creative_Type'],
'ImageCreative')
def testGetCreativesByStatement(self):
"""Test whether we can fetch a list of existing creatives that match given
statement."""
if not self.__class__.creative1:
self.testCreateCreatives()
filter_statement = {'query': 'WHERE id = %s ORDER BY name LIMIT 1'
% self.__class__.creative1['id']}
self.assert_(isinstance(
self.__class__.service.GetCreativesByStatement(filter_statement),
tuple))
def testUpdateCreative(self):
"""Test whether we can update a creative."""
if not self.__class__.creative1:
self.testCreateCreatives()
destination_url = 'http://news.google.com'
image_name = 'inline.jpg'
size = {'width': '300', 'isAspectRatio': 'false', 'height': '250'}
self.__class__.creative1['destinationUrl'] = destination_url
self.__class__.creative1['imageName'] = image_name
self.__class__.creative1['size'] = size
creative = self.__class__.service.UpdateCreative(self.__class__.creative1)
self.assert_(isinstance(creative, tuple))
self.assertEqual(creative[0]['destinationUrl'], destination_url)
self.assertEqual(creative[0]['imageName'], image_name)
self.assertEqual(creative[0]['size'], size)
def testUpdateCreatives(self):
"""Test whether we can update a list of creatives."""
if not self.__class__.creative1 or not self.__class__.creative2:
self.testCreateCreatives()
destination_url = 'http://finance.google.com'
self.__class__.creative1['destinationUrl'] = 'http://finance.google.com'
self.__class__.creative1['imageName'] = 'inline.jpg'
self.__class__.creative1['size'] = {'width': '300', 'height': '250'}
self.__class__.creative2['destinationUrl'] = 'http://finance.google.com'
self.__class__.creative2['imageName'] = 'skyscraper.jpg'
self.__class__.creative2['size'] = {'width': '120', 'height': '600'}
creatives = self.__class__.service.UpdateCreatives(
[self.__class__.creative1, self.__class__.creative2])
self.assert_(isinstance(creatives, tuple))
for creative in creatives:
self.assertEqual(creative['destinationUrl'], destination_url)
def makeTestSuiteV201108():
"""Set up test suite using v201108.
Returns:
TestSuite test suite using v201108.
"""
suite = unittest.TestSuite()
suite.addTests(unittest.makeSuite(CreativeServiceTestV201108))
return suite
def makeTestSuiteV201111():
"""Set up test suite using v201111.
Returns:
TestSuite test suite using v201111.
"""
suite = unittest.TestSuite()
suite.addTests(unittest.makeSuite(CreativeServiceTestV201111))
return suite
if __name__ == '__main__':
suites = []
if TEST_VERSION_V201108:
suites.append(makeTestSuiteV201108())
if TEST_VERSION_V201111:
suites.append(makeTestSuiteV201111())
if suites:
alltests = unittest.TestSuite(suites)
unittest.main(defaultTest='alltests')
| apache-2.0 | 3,119,851,365,561,125,000 | 37.747774 | 78 | 0.636698 | false |
bitmovin/bitcodin-python | bitcodin/test/output/testcase_create_ftp_output.py | 1 | 1525 | __author__ = 'Dominic Miglar <[email protected]>'
import unittest
from bitcodin import create_output
from bitcodin import delete_output
from bitcodin import FTPOutput
from bitcodin.test.settings import ftp_output_config
from bitcodin.test.bitcodin_test_case import BitcodinTestCase
class CreateFTPOutputTestCase(BitcodinTestCase):
output = None
def setUp(self):
super(CreateFTPOutputTestCase, self).setUp()
self.ftp_configuration = {
'name': 'Python API Test FTP Output',
'host': ftp_output_config.get('host', None),
'username': ftp_output_config.get('username', None),
'password': ftp_output_config.get('password', None),
'passive': True
}
def runTest(self):
output = FTPOutput(
name=self.ftp_configuration.get('name'),
host=self.ftp_configuration.get('host'),
basic_auth_user=self.ftp_configuration.get('username'),
basic_auth_password=self.ftp_configuration.get('password'),
passive=self.ftp_configuration.get('passive')
)
self.output = create_output(output)
self.assertEquals(self.output.name, output.name)
self.assertEquals(self.output.host, output.host.split('/')[0])
self.assertEquals(self.output.passive, output.passive)
def tearDown(self):
delete_output(self.output.output_id)
super(CreateFTPOutputTestCase, self).tearDown()
if __name__ == '__main__':
unittest.main()
| unlicense | -8,674,127,031,489,153,000 | 32.888889 | 71 | 0.653115 | false |
veusz/veusz | veusz/plugins/votable.py | 1 | 3549 | # Copyright (C) 2012 Science and Technology Facilities Council.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
##############################################################################
import io
from urllib import request
from .importplugin import ImportPlugin, importpluginregistry
from .datasetplugin import Dataset1D, DatasetText
try:
from astropy.io.votable.table import parse
except ImportError:
parse = None
print('VO table import: astropy module not available')
class ImportPluginVoTable(ImportPlugin):
name = 'VO table import'
author = 'Graham Bell'
description = 'Reads datasets from VO tables'
def _load_votable(self, params):
if 'url' in params.field_results:
try:
buff = io.StringIO(request.urlopen(
params.field_results['url']).read())
except TypeError:
buff = io.BytesIO(request.urlopen(
params.field_results['url']).read())
return parse(buff, filename=params.filename)
else:
return parse(params.filename)
def doImport(self, params):
result = []
votable = self._load_votable(params)
for table in votable.iter_tables():
for field in table.fields:
fieldname = field.name
if field.datatype in [
'float', 'double', 'short', 'int', 'unsignedByte']:
result.append(Dataset1D(
fieldname, table.array[fieldname]))
elif field.datatype in ['char', 'string', 'unicodeChar']:
result.append(DatasetText(
fieldname, table.array[fieldname]))
elif field.datatype in ['floatComplex', 'doubleComplex']:
print(
'VO table import: skipping complex field ' +
fieldname)
elif field.datatype in ['boolean', 'bit']:
print(
'VO table import: skipping boolean field ' +
fieldname)
else:
print(
'VO table import: unknown data type ' +
field.datatype + ' for field ' + fieldname)
return result
def getPreview(self, params):
try:
votable = self._load_votable(params)
except:
return ('', False)
summary = []
for table in votable.iter_tables():
summary.append(table.name + ':')
for field in table.fields:
summary.append(
' ' + field.name +
' (' + field.datatype +')')
return ('\n'.join(summary), True)
if parse is not None:
importpluginregistry += [ImportPluginVoTable]
| gpl-2.0 | -2,412,068,919,430,953,500 | 34.49 | 78 | 0.560158 | false |
plaufer/wikiwsd | wsd/build/articleinserter.py | 1 | 1279 | import Queue
import threading
MAX_WAIT_QUEUE_TIMEOUT = 2
class ArticleInserter(threading.Thread):
'''Thread which inserts articles into the database
'''
def __init__(self, queue, build_view):
threading.Thread.__init__(self)
'''constructor
@param queue the queue to which the articles and redirects are read
@param build_view the database build view to use to connect to the database
'''
self._queue = queue
self._build_view = build_view
self._end = False
def run(self):
while not self._end:
try:
# fetch item from queue
item = self._queue.get(True, MAX_WAIT_QUEUE_TIMEOUT)
# insert as article or redirect respectively
if item['type'] == 'article':
self._build_view.insert_article(item['id'], item['title'])
else:
self._build_view.insert_redirect(item['title'], item['target'])
# commit and mark as done
self._build_view.commit()
self._build_view.reset_cache()
self._queue.task_done()
except Queue.Empty:
pass
def end(self):
self._end = True | mit | 5,740,311,425,071,852,000 | 29.47619 | 86 | 0.542611 | false |
ubuntu1234/pimouse_ros | test/travis_test_motors1.py | 1 | 1628 | #!/usr/bin/env python
#encoding: utf8
import unittest, rostest
import rosnode, rospy
import time
from pimouse_ros.msg import MotorFreqs
from geometry_msgs.msg import Twist
class MotorTest(unittest.TestCase):
def file_check(self,dev,value,message):
with open("/dev/" + dev,'r') as f:
self.assertEqual(f.readline(),str(value)+"\n",message)
def test_node_exist(self):
nodes = rosnode.get_node_names()
self.assertIn('/motors', nodes,"node does not exist")
def test_put_freq(self):
pub = rospy.Publisher('/motor_raw', MotorFreqs)
m = MotorFreqs()
m.left_hz = 123
m.right_hz = 456
for i in range(10):
pub.publish(m)
time.sleep(0.1)
self.file_check("rtmotor_raw_l0",m.left_hz,"wrong left value from motor_raw")
self.file_check("rtmotor_raw_r0",m.right_hz,"wrong right value from motor_raw")
def test_put_cmd_vel(self):
pub = rospy.Publisher('/cmd_vel',Twist)
m = Twist()
m.linear.x = 0.1414
m.angular.z = 1.57
for i in range(10):
pub.publish(m)
time.sleep(0.1)
self.file_check("rtmotor_raw_l0",200,"wrong left value from cmd_vel")
self.file_check("rtmotor_raw_r0",600,"wrong right value from cmd_vel")
time.sleep(1.1)
self.file_check("rtmotor_raw_r0",0,"don't stop after 1[s]")
self.file_check("rtmotor_raw_l0",0,"don't stop after 1[s]")
if __name__ == '__main__':
time.sleep(3)
rospy.init_node('travis_test_motors')
rostest.rosrun('pimouse_ros','travis_test_motors', MotorTest)
| gpl-3.0 | 8,247,758,054,122,998,000 | 32.22449 | 87 | 0.60688 | false |
Unknowncmbk/Two-Shot | backend/participant_stat.py | 1 | 2762 | #!/usr/bin/python
# local imports
import credentials
# python modules
import MySQLdb
import urllib
import json
class ParticipantStat(object):
def __init__(self, match_id, participant_id):
self.match_id = match_id
self.participant_id = participant_id
self.kills = 0
self.deaths = 0
self.assists = 0
self.magic_damage = 0
self.magic_damage_champs = 0
self.magic_damage_taken = 0
self.champ_level = 0
self.gold_earned = 0
self.win = 0
def __setKDA__(self, kills, deaths, assists):
self.kills = kills
self.deaths = deaths
self.assists = assists
def __setDamage__(self, magic_damage, magic_damage_champs, magic_damage_taken):
self.magic_damage = magic_damage
self.magic_damage_champs = magic_damage_champs
self.magic_damage_taken = magic_damage_taken
def __setOther__(self, champ_level, gold_earned, win):
self.champ_level = champ_level
self.gold_earned = gold_earned
self.win = win
def __str__(self):
return "match_id: " + str(self.match_id) + "\nparticipant_id: " + str(self.participant_id)
def save(self):
"""
Saves this ParticipantStat to the database.
"""
# Get new database instance
db = credentials.getDatabase()
cur = db.cursor()
query = '''INSERT IGNORE INTO participant_stat (match_id, participant_id, kills, deaths, assists, magic_damage, magic_damage_champs, magic_damage_taken, champ_level, gold_earned, win)
VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);'''
data = (self.match_id, self.participant_id, self.kills, self.deaths, self.assists, self.magic_damage, self.magic_damage_champs, self.magic_damage_taken, self.champ_level, self.gold_earned, self.win)
cur.execute(query, data)
# commit query
db.commit()
db.close()
return True
def load(match_id, participant_id):
'''
Args:
item_id: The id of the item to query
match_id: The id of the match
participant_id: The id of the participant
Returns:
A ParticipantStat object.
'''
# Get new database instance
db = credentials.getDatabase()
cur = db.cursor()
query = '''SELECT * FROM participant_stat WHERE match_id = %s AND participant_id = %s;'''
cur.execute(query, match_id, participant_id)
pa = ""
for tup in cur:
pa = ParticipantStat(tup[0], tup[1], tup[2], tup[3], tup[4], tup[5], tup[6], tup[7], tup[8], tup[9], tup[10])
# commit query
db.commit()
db.close()
return pa
| mit | -8,455,300,076,131,699,000 | 29.033708 | 206 | 0.581101 | false |
storborg/pyweaving | pyweaving/generators/tartan.py | 1 | 3330 | from __future__ import (absolute_import, division, print_function,
unicode_literals)
import re
from .. import Draft
color_map = {
'A': (92, 140, 168), # azure / light blue
'G': (0, 104, 24), # green
'B': (44, 44, 128), # blue
'K': (0, 0, 0), # black
'W': (224, 224, 224), # white
'Y': (232, 192, 0), # yellow
'R': (200, 0, 44), # red
'P': (120, 0, 120), # purple
'C': (208, 80, 84), # ??? light red of some kind
'LP': (180, 104, 172), # light purple
}
def tartan(sett, repeats=1):
colors = []
for piece in sett.split(', '):
m = re.match('([A-Z]+)(\d+)', piece)
colors.append((
color_map[m.group(1)],
int(m.group(2)),
))
# tartan is always the same design mirrored once
colors.extend(reversed(colors))
print("Threads per repeat: %d" %
sum(count for color, count in colors))
# tartan is always 2/2 twill
# we'll need 4 shafts and 4 treadles
draft = Draft(num_shafts=4, num_treadles=4)
# do tie-up
for ii in range(4):
draft.treadles[3 - ii].shafts.add(draft.shafts[ii])
draft.treadles[3 - ii].shafts.add(draft.shafts[(ii + 1) % 4])
thread_no = 0
for ii in range(repeats):
for color, count in colors:
for jj in range(count):
draft.add_warp_thread(
color=color,
shaft=thread_no % 4,
)
draft.add_weft_thread(
color=color,
treadles=[thread_no % 4],
)
thread_no += 1
return draft
# Tartan Setts
gordon_red = ('A12, G12, R18, K12, R18, B18, W4, C16, W4, K32, A12, '
'W4, B32, W4, G36')
gordon_modern = 'B24, K4, B4, K4, B4, K24, G24, Y4, G24, K24, B24, K4, B4'
gordon_dress = ('W4, B2, W24, B4, W4, K16, B16, K4, B4, K4, B16, K16, '
'G16, K2, Y4, K2, G16, K16, W4, B4, W24, B2, W4')
gordon_old = 'B24, K4, B4, K4, B4, K24, G24, Y4, G24, K24, B24, K4, B4'
gordon_red_muted = ('A12, G12, R18, K12, R18, B18, W4, C16, W4, K32, A12, '
'W4, B32, W4, G36')
gordon_red_old_huntly = ('B28, W2, G16, W2, DG32, A12, W2, B28, W2, G28, '
'A12, G12, R16, DG12, R16, DG2')
gordon_old_ancient = 'K8, B46, K46, G44, Y6, G6, Y12'
gordon_of_abergeldie = 'G36, Y2, LP12, K2, W2, R40'
gordon_of_esselmont = 'K8, P46, K46, G44, Y6, G6, Y12'
gordon_roxburgh_district = 'B4, R2, G32, B16, W2, B2, W2, B32'
gordon_roxburgh_red = 'B6, DG52, B6, R6, B40, R6, B6, R52, DG10, W6'
gordon_roxburgh_red_muted = 'B6, DG52, B6, R6, B40, R6, B6, R52, DG10, W6'
gordon_huntly_district = ('G16, R4, G16, R24, B2, R2, B4, R2, B2, R24, B2, '
'R2, B4, R2, B2, R24, W2, R6, Y2, B24, R6, B24, '
'Y2, R6, W2, R24, G4, R6, G4, R24, G16, R4, G16')
gordon_aberdeen_district = ('W4, LG8, K32, W4, P12, A8, W4, A8, P12, W4, P6, '
'R16, LR6, W4, LR6, R16, P6, W4, K24, LG8, K24, '
'W4, P6, R16, LR6, W4, LR6, R16, P6, W4, A20, W4, '
'R12, LR6, W2, LR6, R12, W4, LG8, K32, W4, R46, '
'LR6, W4')
gordon_huntly = 'R4, MB6, FB24, K22, MG22, Y4'
| mit | -5,582,420,518,683,146,000 | 31.647059 | 79 | 0.494595 | false |
timj/scons | src/engine/SCons/Tool/docbook/__init__.py | 1 | 29293 |
"""SCons.Tool.docbook
Tool-specific initialization for Docbook.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import os
import glob
import re
import SCons.Action
import SCons.Builder
import SCons.Defaults
import SCons.Script
import SCons.Tool
import SCons.Util
# Get full path to this script
scriptpath = os.path.dirname(os.path.realpath(__file__))
# Local folder for the collection of DocBook XSLs
db_xsl_folder = 'docbook-xsl-1.76.1'
# Do we have libxml2/libxslt/lxml?
has_libxml2 = True
has_lxml = True
try:
import libxml2
import libxslt
except:
has_libxml2 = False
try:
import lxml
except:
has_lxml = False
# Set this to True, to prefer xsltproc over libxml2 and lxml
prefer_xsltproc = False
# Regexs for parsing Docbook XML sources of MAN pages
re_manvolnum = re.compile("<manvolnum>([^<]*)</manvolnum>")
re_refname = re.compile("<refname>([^<]*)</refname>")
#
# Helper functions
#
def __extend_targets_sources(target, source):
""" Prepare the lists of target and source files. """
if not SCons.Util.is_List(target):
target = [target]
if not source:
source = target[:]
elif not SCons.Util.is_List(source):
source = [source]
if len(target) < len(source):
target.extend(source[len(target):])
return target, source
def __init_xsl_stylesheet(kw, env, user_xsl_var, default_path):
if kw.get('DOCBOOK_XSL','') == '':
xsl_style = kw.get('xsl', env.subst(user_xsl_var))
if xsl_style == '':
path_args = [scriptpath, db_xsl_folder] + default_path
xsl_style = os.path.join(*path_args)
kw['DOCBOOK_XSL'] = xsl_style
def __select_builder(lxml_builder, libxml2_builder, cmdline_builder):
""" Selects a builder, based on which Python modules are present. """
if prefer_xsltproc:
return cmdline_builder
if not has_libxml2:
# At the moment we prefer libxml2 over lxml, the latter can lead
# to conflicts when installed together with libxml2.
if has_lxml:
return lxml_builder
else:
return cmdline_builder
return libxml2_builder
def __ensure_suffix(t, suffix):
""" Ensure that the target t has the given suffix. """
tpath = str(t)
if not tpath.endswith(suffix):
return tpath+suffix
return t
def __ensure_suffix_stem(t, suffix):
""" Ensure that the target t has the given suffix, and return the file's stem. """
tpath = str(t)
if not tpath.endswith(suffix):
stem = tpath
tpath += suffix
return tpath, stem
else:
stem, ext = os.path.splitext(tpath)
return t, stem
def __get_xml_text(root):
""" Return the text for the given root node (xml.dom.minidom). """
txt = ""
for e in root.childNodes:
if (e.nodeType == e.TEXT_NODE):
txt += e.data
return txt
def __create_output_dir(base_dir):
""" Ensure that the output directory base_dir exists. """
root, tail = os.path.split(base_dir)
dir = None
if tail:
if base_dir.endswith('/'):
dir = base_dir
else:
dir = root
else:
if base_dir.endswith('/'):
dir = base_dir
if dir and not os.path.isdir(dir):
os.makedirs(dir)
#
# Supported command line tools and their call "signature"
#
xsltproc_com = {'xsltproc' : '$DOCBOOK_XSLTPROC $DOCBOOK_XSLTPROCFLAGS -o $TARGET $DOCBOOK_XSL $SOURCE',
'saxon' : '$DOCBOOK_XSLTPROC $DOCBOOK_XSLTPROCFLAGS -o $TARGET $DOCBOOK_XSL $SOURCE $DOCBOOK_XSLTPROCPARAMS',
'saxon-xslt' : '$DOCBOOK_XSLTPROC $DOCBOOK_XSLTPROCFLAGS -o $TARGET $DOCBOOK_XSL $SOURCE $DOCBOOK_XSLTPROCPARAMS',
'xalan' : '$DOCBOOK_XSLTPROC $DOCBOOK_XSLTPROCFLAGS -q -out $TARGET -xsl $DOCBOOK_XSL -in $SOURCE'}
xmllint_com = {'xmllint' : '$DOCBOOK_XMLLINT $DOCBOOK_XMLLINTFLAGS --xinclude $SOURCE > $TARGET'}
fop_com = {'fop' : '$DOCBOOK_FOP $DOCBOOK_FOPFLAGS -fo $SOURCE -pdf $TARGET',
'xep' : '$DOCBOOK_FOP $DOCBOOK_FOPFLAGS -valid -fo $SOURCE -pdf $TARGET',
'jw' : '$DOCBOOK_FOP $DOCBOOK_FOPFLAGS -f docbook -b pdf $SOURCE -o $TARGET'}
def __detect_cl_tool(env, chainkey, cdict):
"""
Helper function, picks a command line tool from the list
and initializes its environment variables.
"""
if env.get(chainkey,'') == '':
clpath = ''
for cltool in cdict:
clpath = env.WhereIs(cltool)
if clpath:
env[chainkey] = clpath
if not env[chainkey + 'COM']:
env[chainkey + 'COM'] = cdict[cltool]
def _detect(env):
"""
Detect all the command line tools that we might need for creating
the requested output formats.
"""
global prefer_xsltproc
if env.get('DOCBOOK_PREFER_XSLTPROC',''):
prefer_xsltproc = True
if ((not has_libxml2 and not has_lxml) or (prefer_xsltproc)):
# Try to find the XSLT processors
__detect_cl_tool(env, 'DOCBOOK_XSLTPROC', xsltproc_com)
__detect_cl_tool(env, 'DOCBOOK_XMLLINT', xmllint_com)
__detect_cl_tool(env, 'DOCBOOK_FOP', fop_com)
#
# Scanners
#
include_re = re.compile('fileref\\s*=\\s*["|\']([^\\n]*)["|\']')
sentity_re = re.compile('<!ENTITY\\s+%*\\s*[^\\s]+\\s+SYSTEM\\s+["|\']([^\\n]*)["|\']>')
def __xml_scan(node, env, path, arg):
""" Simple XML file scanner, detecting local images and XIncludes as implicit dependencies. """
# Does the node exist yet?
if not os.path.isfile(str(node)):
return []
if env.get('DOCBOOK_SCANENT',''):
# Use simple pattern matching for system entities..., no support
# for recursion yet.
contents = node.get_text_contents()
return sentity_re.findall(contents)
xsl_file = os.path.join(scriptpath,'utils','xmldepend.xsl')
if not has_libxml2 or prefer_xsltproc:
if has_lxml and not prefer_xsltproc:
from lxml import etree
xsl_tree = etree.parse(xsl_file)
doc = etree.parse(str(node))
result = doc.xslt(xsl_tree)
depfiles = [x.strip() for x in str(result).splitlines() if x.strip() != "" and not x.startswith("<?xml ")]
return depfiles
else:
# Try to call xsltproc
xsltproc = env.subst("$DOCBOOK_XSLTPROC")
if xsltproc and xsltproc.endswith('xsltproc'):
result = env.backtick(' '.join([xsltproc, xsl_file, str(node)]))
depfiles = [x.strip() for x in str(result).splitlines() if x.strip() != "" and not x.startswith("<?xml ")]
return depfiles
else:
# Use simple pattern matching, there is currently no support
# for xi:includes...
contents = node.get_text_contents()
return include_re.findall(contents)
styledoc = libxml2.parseFile(xsl_file)
style = libxslt.parseStylesheetDoc(styledoc)
doc = libxml2.readFile(str(node), None, libxml2.XML_PARSE_NOENT)
result = style.applyStylesheet(doc, None)
depfiles = []
for x in str(result).splitlines():
if x.strip() != "" and not x.startswith("<?xml "):
depfiles.extend(x.strip().split())
style.freeStylesheet()
doc.freeDoc()
result.freeDoc()
return depfiles
# Creating the instance of our XML dependency scanner
docbook_xml_scanner = SCons.Script.Scanner(function = __xml_scan,
argument = None)
#
# Action generators
#
def __generate_xsltproc_action(source, target, env, for_signature):
cmd = env['DOCBOOK_XSLTPROCCOM']
# Does the environment have a base_dir defined?
base_dir = env.subst('$base_dir')
if base_dir:
# Yes, so replace target path by its filename
return cmd.replace('$TARGET','${TARGET.file}')
return cmd
#
# Emitters
#
def __emit_xsl_basedir(target, source, env):
# Does the environment have a base_dir defined?
base_dir = env.subst('$base_dir')
if base_dir:
# Yes, so prepend it to each target
return [os.path.join(base_dir, str(t)) for t in target], source
# No, so simply pass target and source names through
return target, source
#
# Builders
#
def __build_libxml2(target, source, env):
"""
General XSLT builder (HTML/FO), using the libxml2 module.
"""
xsl_style = env.subst('$DOCBOOK_XSL')
styledoc = libxml2.parseFile(xsl_style)
style = libxslt.parseStylesheetDoc(styledoc)
doc = libxml2.readFile(str(source[0]),None,libxml2.XML_PARSE_NOENT)
# Support for additional parameters
parampass = {}
if parampass:
result = style.applyStylesheet(doc, parampass)
else:
result = style.applyStylesheet(doc, None)
style.saveResultToFilename(str(target[0]), result, 0)
style.freeStylesheet()
doc.freeDoc()
result.freeDoc()
return None
def __build_lxml(target, source, env):
"""
General XSLT builder (HTML/FO), using the lxml module.
"""
from lxml import etree
xslt_ac = etree.XSLTAccessControl(read_file=True,
write_file=True,
create_dir=True,
read_network=False,
write_network=False)
xsl_style = env.subst('$DOCBOOK_XSL')
xsl_tree = etree.parse(xsl_style)
transform = etree.XSLT(xsl_tree, access_control=xslt_ac)
doc = etree.parse(str(source[0]))
# Support for additional parameters
parampass = {}
if parampass:
result = transform(doc, **parampass)
else:
result = transform(doc)
try:
of = open(str(target[0]), "wb")
of.write(of.write(etree.tostring(result, pretty_print=True)))
of.close()
except:
pass
return None
def __xinclude_libxml2(target, source, env):
"""
Resolving XIncludes, using the libxml2 module.
"""
doc = libxml2.readFile(str(source[0]), None, libxml2.XML_PARSE_NOENT)
doc.xincludeProcessFlags(libxml2.XML_PARSE_NOENT)
doc.saveFile(str(target[0]))
doc.freeDoc()
return None
def __xinclude_lxml(target, source, env):
"""
Resolving XIncludes, using the lxml module.
"""
from lxml import etree
doc = etree.parse(str(source[0]))
doc.xinclude()
try:
doc.write(str(target[0]), xml_declaration=True,
encoding="UTF-8", pretty_print=True)
except:
pass
return None
__libxml2_builder = SCons.Builder.Builder(
action = __build_libxml2,
src_suffix = '.xml',
source_scanner = docbook_xml_scanner,
emitter = __emit_xsl_basedir)
__lxml_builder = SCons.Builder.Builder(
action = __build_lxml,
src_suffix = '.xml',
source_scanner = docbook_xml_scanner,
emitter = __emit_xsl_basedir)
__xinclude_libxml2_builder = SCons.Builder.Builder(
action = __xinclude_libxml2,
suffix = '.xml',
src_suffix = '.xml',
source_scanner = docbook_xml_scanner)
__xinclude_lxml_builder = SCons.Builder.Builder(
action = __xinclude_lxml,
suffix = '.xml',
src_suffix = '.xml',
source_scanner = docbook_xml_scanner)
__xsltproc_builder = SCons.Builder.Builder(
action = SCons.Action.CommandGeneratorAction(__generate_xsltproc_action,
{'cmdstr' : '$DOCBOOK_XSLTPROCCOMSTR'}),
src_suffix = '.xml',
source_scanner = docbook_xml_scanner,
emitter = __emit_xsl_basedir)
__xmllint_builder = SCons.Builder.Builder(
action = SCons.Action.Action('$DOCBOOK_XMLLINTCOM','$DOCBOOK_XMLLINTCOMSTR'),
suffix = '.xml',
src_suffix = '.xml',
source_scanner = docbook_xml_scanner)
__fop_builder = SCons.Builder.Builder(
action = SCons.Action.Action('$DOCBOOK_FOPCOM','$DOCBOOK_FOPCOMSTR'),
suffix = '.pdf',
src_suffix = '.fo',
ensure_suffix=1)
def DocbookEpub(env, target, source=None, *args, **kw):
"""
A pseudo-Builder, providing a Docbook toolchain for ePub output.
"""
import zipfile
import shutil
def build_open_container(target, source, env):
"""Generate the *.epub file from intermediate outputs
Constructs the epub file according to the Open Container Format. This
function could be replaced by a call to the SCons Zip builder if support
was added for different compression formats for separate source nodes.
"""
zf = zipfile.ZipFile(str(target[0]), 'w')
mime_file = open('mimetype', 'w')
mime_file.write('application/epub+zip')
mime_file.close()
zf.write(mime_file.name, compress_type = zipfile.ZIP_STORED)
for s in source:
if os.path.isfile(str(s)):
head, tail = os.path.split(str(s))
if not head:
continue
s = head
for dirpath, dirnames, filenames in os.walk(str(s)):
for fname in filenames:
path = os.path.join(dirpath, fname)
if os.path.isfile(path):
zf.write(path, os.path.relpath(path, str(env.get('ZIPROOT', ''))),
zipfile.ZIP_DEFLATED)
zf.close()
def add_resources(target, source, env):
"""Add missing resources to the OEBPS directory
Ensure all the resources in the manifest are present in the OEBPS directory.
"""
hrefs = []
content_file = os.path.join(source[0].get_abspath(), 'content.opf')
if not os.path.isfile(content_file):
return
hrefs = []
if has_libxml2:
nsmap = {'opf' : 'http://www.idpf.org/2007/opf'}
# Read file and resolve entities
doc = libxml2.readFile(content_file, None, 0)
opf = doc.getRootElement()
# Create xpath context
xpath_context = doc.xpathNewContext()
# Register namespaces
for key, val in nsmap.iteritems():
xpath_context.xpathRegisterNs(key, val)
if hasattr(opf, 'xpathEval') and xpath_context:
# Use the xpath context
xpath_context.setContextNode(opf)
items = xpath_context.xpathEval(".//opf:item")
else:
items = opf.findall(".//{'http://www.idpf.org/2007/opf'}item")
for item in items:
if hasattr(item, 'prop'):
hrefs.append(item.prop('href'))
else:
hrefs.append(item.attrib['href'])
doc.freeDoc()
xpath_context.xpathFreeContext()
elif has_lxml:
from lxml import etree
opf = etree.parse(content_file)
# All the opf:item elements are resources
for item in opf.xpath('//opf:item',
namespaces= { 'opf': 'http://www.idpf.org/2007/opf' }):
hrefs.append(item.attrib['href'])
for href in hrefs:
# If the resource was not already created by DocBook XSL itself,
# copy it into the OEBPS folder
referenced_file = os.path.join(source[0].get_abspath(), href)
if not os.path.exists(referenced_file):
shutil.copy(href, os.path.join(source[0].get_abspath(), href))
# Init list of targets/sources
target, source = __extend_targets_sources(target, source)
# Init XSL stylesheet
__init_xsl_stylesheet(kw, env, '$DOCBOOK_DEFAULT_XSL_EPUB', ['epub','docbook.xsl'])
# Setup builder
__builder = __select_builder(__lxml_builder, __libxml2_builder, __xsltproc_builder)
# Create targets
result = []
if not env.GetOption('clean'):
# Ensure that the folders OEBPS and META-INF exist
__create_output_dir('OEBPS/')
__create_output_dir('META-INF/')
dirs = env.Dir(['OEBPS', 'META-INF'])
# Set the fixed base_dir
kw['base_dir'] = 'OEBPS/'
tocncx = __builder.__call__(env, 'toc.ncx', source[0], **kw)
cxml = env.File('META-INF/container.xml')
env.SideEffect(cxml, tocncx)
env.Depends(tocncx, kw['DOCBOOK_XSL'])
result.extend(tocncx+[cxml])
container = env.Command(__ensure_suffix(str(target[0]), '.epub'),
tocncx+[cxml], [add_resources, build_open_container])
mimetype = env.File('mimetype')
env.SideEffect(mimetype, container)
result.extend(container)
# Add supporting files for cleanup
env.Clean(tocncx, dirs)
return result
def DocbookHtml(env, target, source=None, *args, **kw):
"""
A pseudo-Builder, providing a Docbook toolchain for HTML output.
"""
# Init list of targets/sources
target, source = __extend_targets_sources(target, source)
# Init XSL stylesheet
__init_xsl_stylesheet(kw, env, '$DOCBOOK_DEFAULT_XSL_HTML', ['html','docbook.xsl'])
# Setup builder
__builder = __select_builder(__lxml_builder, __libxml2_builder, __xsltproc_builder)
# Create targets
result = []
for t,s in zip(target,source):
r = __builder.__call__(env, __ensure_suffix(t,'.html'), s, **kw)
env.Depends(r, kw['DOCBOOK_XSL'])
result.extend(r)
return result
def DocbookHtmlChunked(env, target, source=None, *args, **kw):
"""
A pseudo-Builder, providing a Docbook toolchain for chunked HTML output.
"""
# Init target/source
if not SCons.Util.is_List(target):
target = [target]
if not source:
source = target
target = ['index.html']
elif not SCons.Util.is_List(source):
source = [source]
# Init XSL stylesheet
__init_xsl_stylesheet(kw, env, '$DOCBOOK_DEFAULT_XSL_HTMLCHUNKED', ['html','chunkfast.xsl'])
# Setup builder
__builder = __select_builder(__lxml_builder, __libxml2_builder, __xsltproc_builder)
# Detect base dir
base_dir = kw.get('base_dir', '')
if base_dir:
__create_output_dir(base_dir)
# Create targets
result = []
r = __builder.__call__(env, __ensure_suffix(str(target[0]), '.html'), source[0], **kw)
env.Depends(r, kw['DOCBOOK_XSL'])
result.extend(r)
# Add supporting files for cleanup
env.Clean(r, glob.glob(os.path.join(base_dir, '*.html')))
return result
def DocbookHtmlhelp(env, target, source=None, *args, **kw):
"""
A pseudo-Builder, providing a Docbook toolchain for HTMLHELP output.
"""
# Init target/source
if not SCons.Util.is_List(target):
target = [target]
if not source:
source = target
target = ['index.html']
elif not SCons.Util.is_List(source):
source = [source]
# Init XSL stylesheet
__init_xsl_stylesheet(kw, env, '$DOCBOOK_DEFAULT_XSL_HTMLHELP', ['htmlhelp','htmlhelp.xsl'])
# Setup builder
__builder = __select_builder(__lxml_builder, __libxml2_builder, __xsltproc_builder)
# Detect base dir
base_dir = kw.get('base_dir', '')
if base_dir:
__create_output_dir(base_dir)
# Create targets
result = []
r = __builder.__call__(env, __ensure_suffix(str(target[0]), '.html'), source[0], **kw)
env.Depends(r, kw['DOCBOOK_XSL'])
result.extend(r)
# Add supporting files for cleanup
env.Clean(r, ['toc.hhc', 'htmlhelp.hhp', 'index.hhk'] +
glob.glob(os.path.join(base_dir, '[ar|bk|ch]*.html')))
return result
def DocbookPdf(env, target, source=None, *args, **kw):
"""
A pseudo-Builder, providing a Docbook toolchain for PDF output.
"""
# Init list of targets/sources
target, source = __extend_targets_sources(target, source)
# Init XSL stylesheet
__init_xsl_stylesheet(kw, env, '$DOCBOOK_DEFAULT_XSL_PDF', ['fo','docbook.xsl'])
# Setup builder
__builder = __select_builder(__lxml_builder, __libxml2_builder, __xsltproc_builder)
# Create targets
result = []
for t,s in zip(target,source):
t, stem = __ensure_suffix_stem(t, '.pdf')
xsl = __builder.__call__(env, stem+'.fo', s, **kw)
result.extend(xsl)
env.Depends(xsl, kw['DOCBOOK_XSL'])
result.extend(__fop_builder.__call__(env, t, xsl, **kw))
return result
def DocbookMan(env, target, source=None, *args, **kw):
"""
A pseudo-Builder, providing a Docbook toolchain for Man page output.
"""
# Init list of targets/sources
target, source = __extend_targets_sources(target, source)
# Init XSL stylesheet
__init_xsl_stylesheet(kw, env, '$DOCBOOK_DEFAULT_XSL_MAN', ['manpages','docbook.xsl'])
# Setup builder
__builder = __select_builder(__lxml_builder, __libxml2_builder, __xsltproc_builder)
# Create targets
result = []
for t,s in zip(target,source):
volnum = "1"
outfiles = []
srcfile = __ensure_suffix(str(s),'.xml')
if os.path.isfile(srcfile):
try:
import xml.dom.minidom
dom = xml.dom.minidom.parse(__ensure_suffix(str(s),'.xml'))
# Extract volume number, default is 1
for node in dom.getElementsByTagName('refmeta'):
for vol in node.getElementsByTagName('manvolnum'):
volnum = __get_xml_text(vol)
# Extract output filenames
for node in dom.getElementsByTagName('refnamediv'):
for ref in node.getElementsByTagName('refname'):
outfiles.append(__get_xml_text(ref)+'.'+volnum)
except:
# Use simple regex parsing
f = open(__ensure_suffix(str(s),'.xml'), 'r')
content = f.read()
f.close()
for m in re_manvolnum.finditer(content):
volnum = m.group(1)
for m in re_refname.finditer(content):
outfiles.append(m.group(1)+'.'+volnum)
if not outfiles:
# Use stem of the source file
spath = str(s)
if not spath.endswith('.xml'):
outfiles.append(spath+'.'+volnum)
else:
stem, ext = os.path.splitext(spath)
outfiles.append(stem+'.'+volnum)
else:
# We have to completely rely on the given target name
outfiles.append(t)
__builder.__call__(env, outfiles[0], s, **kw)
env.Depends(outfiles[0], kw['DOCBOOK_XSL'])
result.append(outfiles[0])
if len(outfiles) > 1:
env.Clean(outfiles[0], outfiles[1:])
return result
def DocbookSlidesPdf(env, target, source=None, *args, **kw):
"""
A pseudo-Builder, providing a Docbook toolchain for PDF slides output.
"""
# Init list of targets/sources
target, source = __extend_targets_sources(target, source)
# Init XSL stylesheet
__init_xsl_stylesheet(kw, env, '$DOCBOOK_DEFAULT_XSL_SLIDESPDF', ['slides','fo','plain.xsl'])
# Setup builder
__builder = __select_builder(__lxml_builder, __libxml2_builder, __xsltproc_builder)
# Create targets
result = []
for t,s in zip(target,source):
t, stem = __ensure_suffix_stem(t, '.pdf')
xsl = __builder.__call__(env, stem+'.fo', s, **kw)
env.Depends(xsl, kw['DOCBOOK_XSL'])
result.extend(xsl)
result.extend(__fop_builder.__call__(env, t, xsl, **kw))
return result
def DocbookSlidesHtml(env, target, source=None, *args, **kw):
"""
A pseudo-Builder, providing a Docbook toolchain for HTML slides output.
"""
# Init list of targets/sources
if not SCons.Util.is_List(target):
target = [target]
if not source:
source = target
target = ['index.html']
elif not SCons.Util.is_List(source):
source = [source]
# Init XSL stylesheet
__init_xsl_stylesheet(kw, env, '$DOCBOOK_DEFAULT_XSL_SLIDESHTML', ['slides','html','plain.xsl'])
# Setup builder
__builder = __select_builder(__lxml_builder, __libxml2_builder, __xsltproc_builder)
# Detect base dir
base_dir = kw.get('base_dir', '')
if base_dir:
__create_output_dir(base_dir)
# Create targets
result = []
r = __builder.__call__(env, __ensure_suffix(str(target[0]), '.html'), source[0], **kw)
env.Depends(r, kw['DOCBOOK_XSL'])
result.extend(r)
# Add supporting files for cleanup
env.Clean(r, [os.path.join(base_dir, 'toc.html')] +
glob.glob(os.path.join(base_dir, 'foil*.html')))
return result
def DocbookXInclude(env, target, source, *args, **kw):
"""
A pseudo-Builder, for resolving XIncludes in a separate processing step.
"""
# Init list of targets/sources
target, source = __extend_targets_sources(target, source)
# Setup builder
__builder = __select_builder(__xinclude_lxml_builder,__xinclude_libxml2_builder,__xmllint_builder)
# Create targets
result = []
for t,s in zip(target,source):
result.extend(__builder.__call__(env, t, s, **kw))
return result
def DocbookXslt(env, target, source=None, *args, **kw):
"""
A pseudo-Builder, applying a simple XSL transformation to the input file.
"""
# Init list of targets/sources
target, source = __extend_targets_sources(target, source)
# Init XSL stylesheet
kw['DOCBOOK_XSL'] = kw.get('xsl', 'transform.xsl')
# Setup builder
__builder = __select_builder(__lxml_builder, __libxml2_builder, __xsltproc_builder)
# Create targets
result = []
for t,s in zip(target,source):
r = __builder.__call__(env, t, s, **kw)
env.Depends(r, kw['DOCBOOK_XSL'])
result.extend(r)
return result
def generate(env):
"""Add Builders and construction variables for docbook to an Environment."""
env.SetDefault(
# Default names for customized XSL stylesheets
DOCBOOK_DEFAULT_XSL_EPUB = '',
DOCBOOK_DEFAULT_XSL_HTML = '',
DOCBOOK_DEFAULT_XSL_HTMLCHUNKED = '',
DOCBOOK_DEFAULT_XSL_HTMLHELP = '',
DOCBOOK_DEFAULT_XSL_PDF = '',
DOCBOOK_DEFAULT_XSL_MAN = '',
DOCBOOK_DEFAULT_XSL_SLIDESPDF = '',
DOCBOOK_DEFAULT_XSL_SLIDESHTML = '',
# Paths to the detected executables
DOCBOOK_XSLTPROC = '',
DOCBOOK_XMLLINT = '',
DOCBOOK_FOP = '',
# Additional flags for the text processors
DOCBOOK_XSLTPROCFLAGS = SCons.Util.CLVar(''),
DOCBOOK_XMLLINTFLAGS = SCons.Util.CLVar(''),
DOCBOOK_FOPFLAGS = SCons.Util.CLVar(''),
DOCBOOK_XSLTPROCPARAMS = SCons.Util.CLVar(''),
# Default command lines for the detected executables
DOCBOOK_XSLTPROCCOM = xsltproc_com['xsltproc'],
DOCBOOK_XMLLINTCOM = xmllint_com['xmllint'],
DOCBOOK_FOPCOM = fop_com['fop'],
# Screen output for the text processors
DOCBOOK_XSLTPROCCOMSTR = None,
DOCBOOK_XMLLINTCOMSTR = None,
DOCBOOK_FOPCOMSTR = None,
)
_detect(env)
env.AddMethod(DocbookEpub, "DocbookEpub")
env.AddMethod(DocbookHtml, "DocbookHtml")
env.AddMethod(DocbookHtmlChunked, "DocbookHtmlChunked")
env.AddMethod(DocbookHtmlhelp, "DocbookHtmlhelp")
env.AddMethod(DocbookPdf, "DocbookPdf")
env.AddMethod(DocbookMan, "DocbookMan")
env.AddMethod(DocbookSlidesPdf, "DocbookSlidesPdf")
env.AddMethod(DocbookSlidesHtml, "DocbookSlidesHtml")
env.AddMethod(DocbookXInclude, "DocbookXInclude")
env.AddMethod(DocbookXslt, "DocbookXslt")
def exists(env):
return 1
| mit | 2,695,215,425,218,211,300 | 32.747696 | 130 | 0.595637 | false |
smjhnits/Praktikum_TU_D_16-17 | Anfängerpraktikum/Protokolle/V355_Gekoppelte_Schwingungen/LaTex-Dateien/Messungc_Plot1.py | 1 | 2021 | import numpy as np
from scipy.stats import sem
from uncertainties import ufloat
import uncertainties.unumpy as unp
import matplotlib.pyplot as plt
from scipy.optimize import curve_fit
L = 32.51 * 10 ** (-3)
C = 0.801 * 10 ** (-9)
Csp = 0.037 * 10 ** (-9)
R = 48
Start = np.array([30.85, 30.84, 30.83, 30.82, 30.81, 30.80, 30.79, 30.77]) * 10 ** (3)
Stop = np.array([55.05, 50, 40, 40, 40, 40, 40, 40]) * 10 ** (3)
Sweep_Zeit = 2
Zeiten = np.array([1.36, 1, 1.475, 1.125, 0.925, 0.740, 0.6, 0.5])
Nü_positiv = np.array([30.77, 30.79, 30.80, 30.81, 30.82, 30.83, 30.84, 30.85]) * 10 ** (3)
Kopplungskapazitäten = np.array([9.99, 8, 6.47, 5.02, 4.00, 3.00, 2.03, 1.01]) * 10 ** (-9)
C_K_Error = np.array([ufloat(n, 0.003*n) for n in Kopplungskapazitäten])
nu_m_theo = np.array([1 / ( 2 * np.pi * unp.sqrt( L * ( (1/C + 2/n)**(-1) + Csp) ) ) for n in C_K_Error])
nu_p_theo = 1 / ( 2 * np.pi * np.sqrt( L * ( C + Csp) ) )
nu_p_theo1 = np.array([nu_p_theo, nu_p_theo, nu_p_theo, nu_p_theo, nu_p_theo, nu_p_theo, nu_p_theo, nu_p_theo ])
nu_m_theo1 = np.array([unp.nominal_values(n) for n in nu_m_theo])
Differenzen = np.array([ Stop[i]-n for i,n in enumerate(Start)])
Zeitverhältniss = np.array([n/Sweep_Zeit for n in Zeiten])
Abstände = np.array([Differenzen[i]*n for i,n in enumerate(Zeitverhältniss)])
nu_m_expC = np.array([n + Abstände[i] for i,n in enumerate(Start)])
nu_m_expC1 = nu_m_expC[::-1]
plt.plot(Kopplungskapazitäten, unp.nominal_values(nu_m_expC1)*10**(-3), 'bx', label = r'Messung 3.3.1: $\nu_{-}$')
plt.plot(Kopplungskapazitäten, nu_m_theo1*10**(-3), 'rx', label = r'Theoriewerte: $\nu_{-}$')
plt.plot(Kopplungskapazitäten, Nü_positiv*10**(-3), 'mx', label = r'Messung 3.3.1: $\nu_{+}$')
plt.plot(Kopplungskapazitäten, nu_p_theo1*10**(-3), 'yx', label = r'Theoriewerte: $\nu_{+}$')
plt.xlabel(r'$Kopplungskapazität \,\, C_k \,\, in \,\, \mathrm{F}$')
plt.ylabel(r'$Frequenzen \,\, \nu \,\, in \,\, \mathrm{kHz}$')
plt.legend(loc = 'best')
plt.savefig('Messungc_Plot1.pdf')
plt.show()
| mit | -687,076,574,687,488,800 | 44.636364 | 114 | 0.616036 | false |
Southpaw-TACTIC/Team | src/python/Lib/site-packages/pythonwin/pywin/framework/editor/editor.py | 1 | 16036 | #####################################################################
#
# editor.py
#
# A general purpose text editor, built on top of the win32ui edit
# type, which is built on an MFC CEditView
#
#
# We now support reloading of externally modified documented
# (eg, presumably by some other process, such as source control or
# another editor.
# We also suport auto-loading of externally modified files.
# - if the current document has not been modified in this
# editor, but has been modified on disk, then the file
# can be automatically reloaded.
#
# Note that it will _always_ prompt you if the file in the editor has been modified.
import win32ui
import win32api
import win32con
import regex
import re
import string
import sys, os
import traceback
from pywin.mfc import docview, dialog, afxres
from pywin.framework.editor import GetEditorOption, SetEditorOption, GetEditorFontOption, SetEditorFontOption, defaultCharacterFormat
patImport=regex.symcomp('import \(<name>.*\)')
patIndent=regex.compile('^\\([ \t]*[~ \t]\\)')
ID_LOCATE_FILE = 0xe200
ID_GOTO_LINE = 0xe2001
MSG_CHECK_EXTERNAL_FILE = win32con.WM_USER+1999 ## WARNING: Duplicated in document.py and coloreditor.py
# Key Codes that modify the bufffer when Ctrl or Alt are NOT pressed.
MODIFYING_VK_KEYS = [win32con.VK_BACK, win32con.VK_TAB, win32con.VK_RETURN, win32con.VK_SPACE, win32con.VK_DELETE]
for k in range(48, 91):
MODIFYING_VK_KEYS.append(k)
# Key Codes that modify the bufffer when Ctrl is pressed.
MODIFYING_VK_KEYS_CTRL = [win32con.VK_BACK, win32con.VK_RETURN, win32con.VK_SPACE, win32con.VK_DELETE]
# Key Codes that modify the bufffer when Alt is pressed.
MODIFYING_VK_KEYS_ALT = [win32con.VK_BACK, win32con.VK_RETURN, win32con.VK_SPACE, win32con.VK_DELETE]
# The editor itself starts here.
# Using the MFC Document/View model, we have an EditorDocument, which is responsible for
# managing the contents of the file, and a view which is responsible for rendering it.
#
# Due to a limitation in the Windows edit controls, we are limited to one view
# per document, although nothing in this code assumes this (I hope!)
isRichText=1 # We are using the Rich Text control. This has not been tested with value "0" for quite some time!
#ParentEditorDocument=docview.Document
from document import EditorDocumentBase
ParentEditorDocument=EditorDocumentBase
class EditorDocument(ParentEditorDocument):
#
# File loading and saving operations
#
def OnOpenDocument(self, filename):
#
# handle Unix and PC text file format.
#
# Get the "long name" of the file name, as it may have been translated
# to short names by the shell.
self.SetPathName(filename) # Must set this early!
# Now do the work!
self.BeginWaitCursor()
win32ui.SetStatusText("Loading file...",1)
try:
f = open(filename,"rb")
except IOError:
win32ui.MessageBox(filename + '\nCan not find this file\nPlease verify that the correct path and file name are given')
self.EndWaitCursor()
return 0
raw=f.read()
f.close()
contents = self.TranslateLoadedData(raw)
rc = 0
if win32ui.IsWin32s() and len(contents)>62000: # give or take a few bytes
win32ui.MessageBox("This file is too big for Python on Windows 3.1\r\nPlease use another editor to view this file.")
else:
try:
self.GetFirstView().SetWindowText(contents)
rc = 1
except TypeError: # Null byte in file.
win32ui.MessageBox("This file contains NULL bytes, and can not be edited")
rc = 0
self.EndWaitCursor()
self.SetModifiedFlag(0) # No longer dirty
self._DocumentStateChanged()
return rc
def TranslateLoadedData(self, data):
"""Given raw data read from a file, massage it suitable for the edit window"""
# if a CR in the first 250 chars, then perform the expensive translate
if string.find(data[:250],'\r')==-1:
win32ui.SetStatusText("Translating from Unix file format - please wait...",1)
return re.sub('\r*\n','\r\n',data)
else:
return data
def SaveFile(self, fileName):
if isRichText:
view = self.GetFirstView()
view.SaveTextFile(fileName)
else: # Old style edit view window.
self.GetFirstView().SaveFile(fileName)
try:
# Make sure line cache has updated info about me!
import linecache
linecache.checkcache()
except:
pass
#
# Color state stuff
#
def SetAllLineColors(self, color = None):
for view in self.GetAllViews():
view.SetAllLineColors(color)
def SetLineColor(self, lineNo, color):
"Color a line of all views"
for view in self.GetAllViews():
view.SetLineColor(lineNo, color)
# def StreamTextOut(self, data): ### This seems unreliable???
# self.saveFileHandle.write(data)
# return 1 # keep em coming!
#ParentEditorView=docview.EditView
ParentEditorView=docview.RichEditView
class EditorView(ParentEditorView):
def __init__(self, doc):
ParentEditorView.__init__(self, doc)
if isRichText:
self.SetWordWrap(win32ui.CRichEditView_WrapNone)
self.addToMRU = 1
self.HookHandlers()
self.bCheckingFile = 0
self.defCharFormat = GetEditorFontOption("Default Font", defaultCharacterFormat)
# Smart tabs override everything else if context can be worked out.
self.bSmartTabs = GetEditorOption("Smart Tabs", 1)
self.tabSize = GetEditorOption("Tab Size", 8)
self.indentSize = GetEditorOption("Indent Size", 8)
# If next indent is at a tab position, and useTabs is set, a tab will be inserted.
self.bUseTabs = GetEditorOption("Use Tabs", 1)
def OnInitialUpdate(self):
rc = self._obj_.OnInitialUpdate()
self.SetDefaultCharFormat(self.defCharFormat)
return rc
def CutCurLine(self):
curLine = self._obj_.LineFromChar()
nextLine = curLine+1
start = self._obj_.LineIndex(curLine)
end = self._obj_.LineIndex(nextLine)
if end==0: # must be last line.
end = start + self.end.GetLineLength(curLine)
self._obj_.SetSel(start,end)
self._obj_.Cut()
def _PrepareUserStateChange(self):
"Return selection, lineindex, etc info, so it can be restored"
self.SetRedraw(0)
return self.GetModify(), self.GetSel(), self.GetFirstVisibleLine()
def _EndUserStateChange(self, info):
scrollOff = info[2] - self.GetFirstVisibleLine()
if scrollOff:
self.LineScroll(scrollOff)
self.SetSel(info[1])
self.SetModify(info[0])
self.SetRedraw(1)
self.InvalidateRect()
self.UpdateWindow()
def _UpdateUIForState(self):
self.SetReadOnly(self.GetDocument()._IsReadOnly())
def SetAllLineColors(self, color = None):
if isRichText:
info = self._PrepareUserStateChange()
try:
if color is None: color = self.defCharFormat[4]
self.SetSel(0,-1)
self.SetSelectionCharFormat((win32con.CFM_COLOR, 0,0,0,color))
finally:
self._EndUserStateChange(info)
def SetLineColor(self, lineNo, color):
"lineNo is the 1 based line number to set. If color is None, default color is used."
if isRichText:
info = self._PrepareUserStateChange()
try:
if color is None: color = self.defCharFormat[4]
lineNo = lineNo-1
startIndex = self.LineIndex(lineNo)
if startIndex!=-1:
self.SetSel(startIndex, self.LineIndex(lineNo+1))
self.SetSelectionCharFormat((win32con.CFM_COLOR, 0,0,0,color))
finally:
self._EndUserStateChange(info)
def Indent(self):
"""Insert an indent to move the cursor to the next tab position.
Honors the tab size and 'use tabs' settings. Assumes the cursor is already at the
position to be indented, and the selection is a single character (ie, not a block)
"""
start, end = self._obj_.GetSel()
startLine = self._obj_.LineFromChar(start)
line = self._obj_.GetLine(startLine)
realCol = start - self._obj_.LineIndex(startLine)
# Calulate the next tab stop.
# Expand existing tabs.
curCol = 0
for ch in line[:realCol]:
if ch=='\t':
curCol = ((curCol / self.tabSize) + 1) * self.tabSize
else:
curCol = curCol + 1
nextColumn = ((curCol / self.indentSize) + 1) * self.indentSize
# print "curCol is", curCol, "nextColumn is", nextColumn
ins = None
if self.bSmartTabs:
# Look for some context.
if realCol==0: # Start of the line - see if the line above can tell us
lookLine = startLine-1
while lookLine >= 0:
check = self._obj_.GetLine(lookLine)[0:1]
if check in ['\t', ' ']:
ins = check
break
lookLine = lookLine - 1
else: # See if the previous char can tell us
check = line[realCol-1]
if check in ['\t', ' ']:
ins = check
# Either smart tabs off, or not smart enough!
# Use the "old style" settings.
if ins is None:
if self.bUseTabs and nextColumn % self.tabSize==0:
ins = '\t'
else:
ins = ' '
if ins == ' ':
# Calc the number of spaces to take us to the next stop
ins = ins * (nextColumn - curCol)
self._obj_.ReplaceSel(ins)
def BlockDent(self, isIndent, startLine, endLine):
" Indent/Undent all lines specified "
if not self.GetDocument().CheckMakeDocumentWritable(): return 0
tabSize=self.tabSize # hard-code for now!
info = self._PrepareUserStateChange()
try:
for lineNo in range(startLine, endLine):
pos=self._obj_.LineIndex(lineNo)
self._obj_.SetSel(pos, pos)
if isIndent:
self.Indent()
else:
line = self._obj_.GetLine(lineNo)
try:
noToDel = 0
if line[0]=='\t':
noToDel = 1
elif line[0]==' ':
for noToDel in range(0,tabSize):
if line[noToDel]!=' ':
break
else:
noToDel=tabSize
if noToDel:
self._obj_.SetSel(pos, pos+noToDel)
self._obj_.Clear()
except IndexError:
pass
finally:
self._EndUserStateChange(info)
self.GetDocument().SetModifiedFlag(1) # Now dirty
self._obj_.SetSel(self.LineIndex(startLine), self.LineIndex(endLine))
def GotoLine(self, lineNo = None):
try:
if lineNo is None:
lineNo = string.atoi(raw_input("Enter Line Number"))
except (ValueError, KeyboardInterrupt):
return 0
self.GetLineCount() # Seems to be needed when file first opened???
charNo = self.LineIndex(lineNo-1)
self.SetSel(charNo)
def HookHandlers(self): # children can override, but should still call me!
# self.HookAllKeyStrokes(self.OnKey)
self.HookMessage(self.OnCheckExternalDocumentUpdated,MSG_CHECK_EXTERNAL_FILE)
self.HookMessage(self.OnRClick,win32con.WM_RBUTTONDOWN)
self.HookMessage(self.OnSetFocus, win32con.WM_SETFOCUS)
self.HookMessage(self.OnKeyDown, win32con.WM_KEYDOWN)
self.HookKeyStroke(self.OnKeyCtrlY, 25) # ^Y
self.HookKeyStroke(self.OnKeyCtrlG, 7) # ^G
self.HookKeyStroke(self.OnKeyTab, 9) # TAB
self.HookKeyStroke(self.OnKeyEnter, 13) # Enter
self.HookCommand(self.OnCmdLocateFile, ID_LOCATE_FILE)
self.HookCommand(self.OnCmdGotoLine, ID_GOTO_LINE)
self.HookCommand(self.OnEditPaste, afxres.ID_EDIT_PASTE)
self.HookCommand(self.OnEditCut, afxres.ID_EDIT_CUT)
# Hook Handlers
def OnSetFocus(self,msg):
# Even though we use file change notifications, we should be very sure about it here.
self.OnCheckExternalDocumentUpdated(msg)
def OnRClick(self,params):
menu = win32ui.CreatePopupMenu()
# look for a module name
line=string.strip(self._obj_.GetLine())
flags=win32con.MF_STRING|win32con.MF_ENABLED
if patImport.match(line)==len(line):
menu.AppendMenu(flags, ID_LOCATE_FILE, "&Locate %s.py"%patImport.group('name'))
menu.AppendMenu(win32con.MF_SEPARATOR);
menu.AppendMenu(flags, win32ui.ID_EDIT_UNDO, '&Undo')
menu.AppendMenu(win32con.MF_SEPARATOR);
menu.AppendMenu(flags, win32ui.ID_EDIT_CUT, 'Cu&t')
menu.AppendMenu(flags, win32ui.ID_EDIT_COPY, '&Copy')
menu.AppendMenu(flags, win32ui.ID_EDIT_PASTE, '&Paste')
menu.AppendMenu(flags, win32con.MF_SEPARATOR);
menu.AppendMenu(flags, win32ui.ID_EDIT_SELECT_ALL, '&Select all')
menu.AppendMenu(flags, win32con.MF_SEPARATOR);
menu.AppendMenu(flags, ID_GOTO_LINE, '&Goto line...')
menu.TrackPopupMenu(params[5])
return 0
def OnCmdGotoLine(self, cmd, code):
self.GotoLine()
return 0
def OnCmdLocateFile(self, cmd, code):
modName = patImport.group('name')
if not modName:
return 0
import pywin.framework.scriptutils
fileName = pywin.framework.scriptutils.LocatePythonFile(modName)
if fileName is None:
win32ui.SetStatusText("Can't locate module %s" % modName)
else:
win32ui.GetApp().OpenDocumentFile(fileName)
return 0
# Key handlers
def OnKeyEnter(self, key):
if not self.GetDocument().CheckMakeDocumentWritable(): return 0
curLine = self._obj_.GetLine()
self._obj_.ReplaceSel('\r\n') # insert the newline
# If the current line indicates the next should be indented,
# then copy the current indentation to this line.
res = patIndent.match(curLine,0)
if res>0 and string.strip(curLine):
curIndent = patIndent.group(1)
self._obj_.ReplaceSel(curIndent)
return 0 # dont pass on
def OnKeyCtrlY(self, key):
if not self.GetDocument().CheckMakeDocumentWritable(): return 0
self.CutCurLine()
return 0 # dont let him have it!
def OnKeyCtrlG(self, key):
self.GotoLine()
return 0 # dont let him have it!
def OnKeyTab(self, key):
if not self.GetDocument().CheckMakeDocumentWritable(): return 0
start, end = self._obj_.GetSel()
if start==end: # normal TAB key
self.Indent()
return 0 # we handled this.
# Otherwise it is a block indent/dedent.
if start>end:
start, end = end, start # swap them.
startLine = self._obj_.LineFromChar(start)
endLine = self._obj_.LineFromChar(end)
self.BlockDent(win32api.GetKeyState(win32con.VK_SHIFT)>=0, startLine, endLine)
return 0
def OnEditPaste(self, id, code):
# Return 1 if we can make the file editable.(or it already is!)
return self.GetDocument().CheckMakeDocumentWritable()
def OnEditCut(self, id, code):
# Return 1 if we can make the file editable.(or it already is!)
return self.GetDocument().CheckMakeDocumentWritable()
def OnKeyDown(self, msg):
key = msg[2]
if win32api.GetKeyState(win32con.VK_CONTROL) & 0x8000:
modList = MODIFYING_VK_KEYS_CTRL
elif win32api.GetKeyState(win32con.VK_MENU) & 0x8000:
modList = MODIFYING_VK_KEYS_ALT
else:
modList = MODIFYING_VK_KEYS
if key in modList:
# Return 1 if we can make the file editable.(or it already is!)
return self.GetDocument().CheckMakeDocumentWritable()
return 1 # Pass it on OK
# def OnKey(self, key):
# return self.GetDocument().CheckMakeDocumentWritable()
def OnCheckExternalDocumentUpdated(self, msg):
if self._obj_ is None or self.bCheckingFile: return
self.bCheckingFile = 1
self.GetDocument().CheckExternalDocumentUpdated()
self.bCheckingFile = 0
from template import EditorTemplateBase
class EditorTemplate(EditorTemplateBase):
def __init__(self, res=win32ui.IDR_TEXTTYPE, makeDoc=None, makeFrame=None, makeView=None):
if makeDoc is None: makeDoc = EditorDocument
if makeView is None: makeView = EditorView
EditorTemplateBase.__init__(self, res, makeDoc, makeFrame, makeView)
def _CreateDocTemplate(self, resourceId):
return win32ui.CreateRichEditDocTemplate(resourceId)
def CreateWin32uiDocument(self):
return self.DoCreateRichEditDoc()
def Create(fileName = None, title=None, template = None):
return editorTemplate.OpenDocumentFile(fileName)
from pywin.framework.editor import GetDefaultEditorModuleName
prefModule = GetDefaultEditorModuleName()
# Initialize only if this is the "default" editor.
if __name__==prefModule:
# For debugging purposes, when this module may be reloaded many times.
try:
win32ui.GetApp().RemoveDocTemplate(editorTemplate)
except (NameError, win32ui.error):
pass
editorTemplate = EditorTemplate()
win32ui.GetApp().AddDocTemplate(editorTemplate)
| epl-1.0 | 6,208,114,795,459,562,000 | 32.486022 | 133 | 0.695373 | false |
ganeti/ganeti | lib/config/__init__.py | 1 | 111167 | #
#
# Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014 Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Configuration management for Ganeti
This module provides the interface to the Ganeti cluster configuration.
The configuration data is stored on every node but is updated on the master
only. After each update, the master distributes the data to the other nodes.
Currently, the data storage format is JSON. YAML was slow and consuming too
much memory.
"""
# TODO: Break up this file into multiple chunks - Wconfd RPC calls, local config
# manipulations, grouped by object they operate on (cluster/instance/disk)
# pylint: disable=C0302
# pylint: disable=R0904
# R0904: Too many public methods
import copy
import os
import random
import logging
import time
import threading
import itertools
from ganeti.config.temporary_reservations import TemporaryReservationManager
from ganeti.config.utils import ConfigSync, ConfigManager
from ganeti.config.verify import (VerifyType, VerifyNic, VerifyIpolicy,
ValidateConfig)
from ganeti import errors
from ganeti import utils
from ganeti import constants
import ganeti.wconfd as wc
from ganeti import objects
from ganeti import serializer
from ganeti import uidpool
from ganeti import netutils
from ganeti import runtime
from ganeti import pathutils
from ganeti import network
def GetWConfdContext(ec_id, livelock):
"""Prepare a context for communication with WConfd.
WConfd needs to know the identity of each caller to properly manage locks and
detect job death. This helper function prepares the identity object given a
job ID (optional) and a livelock file.
@type ec_id: int, or None
@param ec_id: the job ID or None, if the caller isn't a job
@type livelock: L{ganeti.utils.livelock.LiveLock}
@param livelock: a livelock object holding the lockfile needed for WConfd
@return: the WConfd context
"""
if ec_id is None:
return (threading.current_thread().getName(),
livelock.GetPath(), os.getpid())
else:
return (ec_id,
livelock.GetPath(), os.getpid())
def GetConfig(ec_id, livelock, **kwargs):
"""A utility function for constructing instances of ConfigWriter.
It prepares a WConfd context and uses it to create a ConfigWriter instance.
@type ec_id: int, or None
@param ec_id: the job ID or None, if the caller isn't a job
@type livelock: L{ganeti.utils.livelock.LiveLock}
@param livelock: a livelock object holding the lockfile needed for WConfd
@type kwargs: dict
@param kwargs: Any additional arguments for the ConfigWriter constructor
@rtype: L{ConfigWriter}
@return: the ConfigWriter context
"""
kwargs['wconfdcontext'] = GetWConfdContext(ec_id, livelock)
# if the config is to be opened in the accept_foreign mode, we should
# also tell the RPC client not to check for the master node
accept_foreign = kwargs.get('accept_foreign', False)
kwargs['wconfd'] = wc.Client(allow_non_master=accept_foreign)
return ConfigWriter(**kwargs)
# job id used for resource management at config upgrade time
_UPGRADE_CONFIG_JID = "jid-cfg-upgrade"
def _MatchNameComponentIgnoreCase(short_name, names):
"""Wrapper around L{utils.text.MatchNameComponent}.
"""
return utils.MatchNameComponent(short_name, names, case_sensitive=False)
def _CheckInstanceDiskIvNames(disks):
"""Checks if instance's disks' C{iv_name} attributes are in order.
@type disks: list of L{objects.Disk}
@param disks: List of disks
@rtype: list of tuples; (int, string, string)
@return: List of wrongly named disks, each tuple contains disk index,
expected and actual name
"""
result = []
for (idx, disk) in enumerate(disks):
exp_iv_name = "disk/%s" % idx
if disk.iv_name != exp_iv_name:
result.append((idx, exp_iv_name, disk.iv_name))
return result
class ConfigWriter(object):
"""The interface to the cluster configuration.
WARNING: The class is no longer thread-safe!
Each thread must construct a separate instance.
@ivar _all_rms: a list of all temporary reservation managers
Currently the class fulfills 3 main functions:
1. lock the configuration for access (monitor)
2. reload and write the config if necessary (bridge)
3. provide convenient access methods to config data (facade)
"""
def __init__(self, cfg_file=None, offline=False, _getents=runtime.GetEnts,
accept_foreign=False, wconfdcontext=None, wconfd=None):
self.write_count = 0
self._config_data = None
self._SetConfigData(None)
self._offline = offline
if cfg_file is None:
self._cfg_file = pathutils.CLUSTER_CONF_FILE
else:
self._cfg_file = cfg_file
self._getents = _getents
self._temporary_ids = TemporaryReservationManager()
self._all_rms = [self._temporary_ids]
# Note: in order to prevent errors when resolving our name later,
# we compute it here once and reuse it; it's
# better to raise an error before starting to modify the config
# file than after it was modified
self._my_hostname = netutils.Hostname.GetSysName()
self._cfg_id = None
self._wconfdcontext = wconfdcontext
self._wconfd = wconfd
self._accept_foreign = accept_foreign
self._lock_count = 0
self._lock_current_shared = None
self._lock_forced = False
def _ConfigData(self):
return self._config_data
def OutDate(self):
self._config_data = None
def _SetConfigData(self, cfg):
self._config_data = cfg
def _GetWConfdContext(self):
return self._wconfdcontext
# this method needs to be static, so that we can call it on the class
@staticmethod
def IsCluster():
"""Check if the cluster is configured.
"""
return os.path.exists(pathutils.CLUSTER_CONF_FILE)
def _UnlockedGetNdParams(self, node):
nodegroup = self._UnlockedGetNodeGroup(node.group)
return self._ConfigData().cluster.FillND(node, nodegroup)
@ConfigSync(shared=1)
def GetNdParams(self, node):
"""Get the node params populated with cluster defaults.
@type node: L{objects.Node}
@param node: The node we want to know the params for
@return: A dict with the filled in node params
"""
return self._UnlockedGetNdParams(node)
@ConfigSync(shared=1)
def GetNdGroupParams(self, nodegroup):
"""Get the node groups params populated with cluster defaults.
@type nodegroup: L{objects.NodeGroup}
@param nodegroup: The node group we want to know the params for
@return: A dict with the filled in node group params
"""
return self._UnlockedGetNdGroupParams(nodegroup)
def _UnlockedGetNdGroupParams(self, group):
"""Get the ndparams of the group.
@type group: L{objects.NodeGroup}
@param group: The group we want to know the params for
@rtype: dict of str to int
@return: A dict with the filled in node group params
"""
return self._ConfigData().cluster.FillNDGroup(group)
@ConfigSync(shared=1)
def GetGroupSshPorts(self):
"""Get a map of group UUIDs to SSH ports.
@rtype: dict of str to int
@return: a dict mapping the UUIDs to the SSH ports
"""
port_map = {}
for uuid, group in self._config_data.nodegroups.items():
ndparams = self._UnlockedGetNdGroupParams(group)
port = ndparams.get(constants.ND_SSH_PORT)
port_map[uuid] = port
return port_map
@ConfigSync(shared=1)
def GetInstanceDiskParams(self, instance):
"""Get the disk params populated with inherit chain.
@type instance: L{objects.Instance}
@param instance: The instance we want to know the params for
@return: A dict with the filled in disk params
"""
node = self._UnlockedGetNodeInfo(instance.primary_node)
nodegroup = self._UnlockedGetNodeGroup(node.group)
return self._UnlockedGetGroupDiskParams(nodegroup)
def _UnlockedGetInstanceDisks(self, inst_uuid):
"""Return the disks' info for the given instance
@type inst_uuid: string
@param inst_uuid: The UUID of the instance we want to know the disks for
@rtype: List of L{objects.Disk}
@return: A list with all the disks' info
"""
instance = self._UnlockedGetInstanceInfo(inst_uuid)
if instance is None:
raise errors.ConfigurationError("Unknown instance '%s'" % inst_uuid)
return [self._UnlockedGetDiskInfo(disk_uuid)
for disk_uuid in instance.disks]
@ConfigSync(shared=1)
def GetInstanceDisks(self, inst_uuid):
"""Return the disks' info for the given instance
This is a simple wrapper over L{_UnlockedGetInstanceDisks}.
"""
return self._UnlockedGetInstanceDisks(inst_uuid)
def AddInstanceDisk(self, inst_uuid, disk, idx=None, replace=False):
"""Add a disk to the config and attach it to instance."""
if not isinstance(disk, objects.Disk):
raise errors.ProgrammerError("Invalid type passed to AddInstanceDisk")
disk.UpgradeConfig()
utils.SimpleRetry(True, self._wconfd.AddInstanceDisk, 0.1, 30,
args=[inst_uuid, disk.ToDict(), idx, replace])
self.OutDate()
def AttachInstanceDisk(self, inst_uuid, disk_uuid, idx=None):
"""Attach an existing disk to an instance."""
utils.SimpleRetry(True, self._wconfd.AttachInstanceDisk, 0.1, 30,
args=[inst_uuid, disk_uuid, idx])
self.OutDate()
def _UnlockedRemoveDisk(self, disk_uuid):
"""Remove the disk from the configuration.
@type disk_uuid: string
@param disk_uuid: The UUID of the disk object
"""
if disk_uuid not in self._ConfigData().disks:
raise errors.ConfigurationError("Disk %s doesn't exist" % disk_uuid)
# Disk must not be attached anywhere
for inst in self._ConfigData().instances.values():
if disk_uuid in inst.disks:
raise errors.ReservationError("Cannot remove disk %s. Disk is"
" attached to instance %s"
% (disk_uuid, inst.name))
# Remove disk from config file
del self._ConfigData().disks[disk_uuid]
self._ConfigData().cluster.serial_no += 1
def RemoveInstanceDisk(self, inst_uuid, disk_uuid):
"""Detach a disk from an instance and remove it from the config."""
utils.SimpleRetry(True, self._wconfd.RemoveInstanceDisk, 0.1, 30,
args=[inst_uuid, disk_uuid])
self.OutDate()
def DetachInstanceDisk(self, inst_uuid, disk_uuid):
"""Detach a disk from an instance."""
utils.SimpleRetry(True, self._wconfd.DetachInstanceDisk, 0.1, 30,
args=[inst_uuid, disk_uuid])
self.OutDate()
def _UnlockedGetDiskInfo(self, disk_uuid):
"""Returns information about a disk.
It takes the information from the configuration file.
@param disk_uuid: UUID of the disk
@rtype: L{objects.Disk}
@return: the disk object
"""
if disk_uuid not in self._ConfigData().disks:
return None
return self._ConfigData().disks[disk_uuid]
@ConfigSync(shared=1)
def GetDiskInfo(self, disk_uuid):
"""Returns information about a disk.
This is a simple wrapper over L{_UnlockedGetDiskInfo}.
"""
return self._UnlockedGetDiskInfo(disk_uuid)
def _UnlockedGetDiskInfoByName(self, disk_name):
"""Return information about a named disk.
Return disk information from the configuration file, searching with the
name of the disk.
@param disk_name: Name of the disk
@rtype: L{objects.Disk}
@return: the disk object
"""
disk = None
count = 0
for d in self._ConfigData().disks.values():
if d.name == disk_name:
count += 1
disk = d
if count > 1:
raise errors.ConfigurationError("There are %s disks with this name: %s"
% (count, disk_name))
return disk
@ConfigSync(shared=1)
def GetDiskInfoByName(self, disk_name):
"""Return information about a named disk.
This is a simple wrapper over L{_UnlockedGetDiskInfoByName}.
"""
return self._UnlockedGetDiskInfoByName(disk_name)
def _UnlockedGetDiskList(self):
"""Get the list of disks.
@return: array of disks, ex. ['disk2-uuid', 'disk1-uuid']
"""
return list(self._ConfigData().disks)
@ConfigSync(shared=1)
def GetAllDisksInfo(self):
"""Get the configuration of all disks.
This is a simple wrapper over L{_UnlockedGetAllDisksInfo}.
"""
return self._UnlockedGetAllDisksInfo()
def _UnlockedGetAllDisksInfo(self):
"""Get the configuration of all disks.
@rtype: dict
@return: dict of (disk, disk_info), where disk_info is what
would GetDiskInfo return for the node
"""
my_dict = dict([(disk_uuid, self._UnlockedGetDiskInfo(disk_uuid))
for disk_uuid in self._UnlockedGetDiskList()])
return my_dict
def _AllInstanceNodes(self, inst_uuid):
"""Compute the set of all disk-related nodes for an instance.
This abstracts away some work from '_UnlockedGetInstanceNodes'
and '_UnlockedGetInstanceSecondaryNodes'.
@type inst_uuid: string
@param inst_uuid: The UUID of the instance we want to get nodes for
@rtype: set of strings
@return: A set of names for all the nodes of the instance
"""
instance = self._UnlockedGetInstanceInfo(inst_uuid)
if instance is None:
raise errors.ConfigurationError("Unknown instance '%s'" % inst_uuid)
instance_disks = self._UnlockedGetInstanceDisks(inst_uuid)
all_nodes = []
for disk in instance_disks:
all_nodes.extend(disk.all_nodes)
return (set(all_nodes), instance)
def _UnlockedGetInstanceNodes(self, inst_uuid):
"""Get all disk-related nodes for an instance.
For non-DRBD instances, this will contain only the instance's primary node,
whereas for DRBD instances, it will contain both the primary and the
secondaries.
@type inst_uuid: string
@param inst_uuid: The UUID of the instance we want to get nodes for
@rtype: list of strings
@return: A list of names for all the nodes of the instance
"""
(all_nodes, instance) = self._AllInstanceNodes(inst_uuid)
# ensure that primary node is always the first
all_nodes.discard(instance.primary_node)
return (instance.primary_node, ) + tuple(all_nodes)
@ConfigSync(shared=1)
def GetInstanceNodes(self, inst_uuid):
"""Get all disk-related nodes for an instance.
This is just a wrapper over L{_UnlockedGetInstanceNodes}
"""
return self._UnlockedGetInstanceNodes(inst_uuid)
def _UnlockedGetInstanceSecondaryNodes(self, inst_uuid):
"""Get the list of secondary nodes.
@type inst_uuid: string
@param inst_uuid: The UUID of the instance we want to get nodes for
@rtype: list of strings
@return: A tuple of names for all the secondary nodes of the instance
"""
(all_nodes, instance) = self._AllInstanceNodes(inst_uuid)
all_nodes.discard(instance.primary_node)
return tuple(all_nodes)
@ConfigSync(shared=1)
def GetInstanceSecondaryNodes(self, inst_uuid):
"""Get the list of secondary nodes.
This is a simple wrapper over L{_UnlockedGetInstanceSecondaryNodes}.
"""
return self._UnlockedGetInstanceSecondaryNodes(inst_uuid)
def _UnlockedGetInstanceLVsByNode(self, inst_uuid, lvmap=None):
"""Provide a mapping of node to LVs a given instance owns.
@type inst_uuid: string
@param inst_uuid: The UUID of the instance we want to
compute the LVsByNode for
@type lvmap: dict
@param lvmap: Optional dictionary to receive the
'node' : ['lv', ...] data.
@rtype: dict or None
@return: None if lvmap arg is given, otherwise, a dictionary of
the form { 'node_uuid' : ['volume1', 'volume2', ...], ... };
volumeN is of the form "vg_name/lv_name", compatible with
GetVolumeList()
"""
def _MapLVsByNode(lvmap, devices, node_uuid):
"""Recursive helper function."""
if not node_uuid in lvmap:
lvmap[node_uuid] = []
for dev in devices:
if dev.dev_type == constants.DT_PLAIN:
if not dev.forthcoming:
lvmap[node_uuid].append(dev.logical_id[0] + "/" + dev.logical_id[1])
elif dev.dev_type in constants.DTS_DRBD:
if dev.children:
_MapLVsByNode(lvmap, dev.children, dev.logical_id[0])
_MapLVsByNode(lvmap, dev.children, dev.logical_id[1])
elif dev.children:
_MapLVsByNode(lvmap, dev.children, node_uuid)
instance = self._UnlockedGetInstanceInfo(inst_uuid)
if instance is None:
raise errors.ConfigurationError("Unknown instance '%s'" % inst_uuid)
if lvmap is None:
lvmap = {}
ret = lvmap
else:
ret = None
_MapLVsByNode(lvmap,
self._UnlockedGetInstanceDisks(instance.uuid),
instance.primary_node)
return ret
@ConfigSync(shared=1)
def GetInstanceLVsByNode(self, inst_uuid, lvmap=None):
"""Provide a mapping of node to LVs a given instance owns.
This is a simple wrapper over L{_UnlockedGetInstanceLVsByNode}
"""
return self._UnlockedGetInstanceLVsByNode(inst_uuid, lvmap=lvmap)
@ConfigSync(shared=1)
def GetGroupDiskParams(self, group):
"""Get the disk params populated with inherit chain.
@type group: L{objects.NodeGroup}
@param group: The group we want to know the params for
@return: A dict with the filled in disk params
"""
return self._UnlockedGetGroupDiskParams(group)
def _UnlockedGetGroupDiskParams(self, group):
"""Get the disk params populated with inherit chain down to node-group.
@type group: L{objects.NodeGroup}
@param group: The group we want to know the params for
@return: A dict with the filled in disk params
"""
data = self._ConfigData().cluster.SimpleFillDP(group.diskparams)
assert isinstance(data, dict), "Not a dictionary: " + str(data)
return data
@ConfigSync(shared=1)
def GetPotentialMasterCandidates(self):
"""Gets the list of node names of potential master candidates.
@rtype: list of str
@return: list of node names of potential master candidates
"""
# FIXME: Note that currently potential master candidates are nodes
# but this definition will be extended once RAPI-unmodifiable
# parameters are introduced.
nodes = self._UnlockedGetAllNodesInfo()
return [node_info.name for node_info in nodes.values()]
def GenerateMAC(self, net_uuid, _ec_id):
"""Generate a MAC for an instance.
This should check the current instances for duplicates.
"""
return self._wconfd.GenerateMAC(self._GetWConfdContext(), net_uuid)
def ReserveMAC(self, mac, _ec_id):
"""Reserve a MAC for an instance.
This only checks instances managed by this cluster, it does not
check for potential collisions elsewhere.
"""
self._wconfd.ReserveMAC(self._GetWConfdContext(), mac)
@ConfigSync(shared=1)
def CommitTemporaryIps(self, _ec_id):
"""Tell WConfD to commit all temporary ids"""
self._wconfd.CommitTemporaryIps(self._GetWConfdContext())
def ReleaseIp(self, net_uuid, address, _ec_id):
"""Give a specific IP address back to an IP pool.
The IP address is returned to the IP pool and marked as reserved.
"""
if net_uuid:
if self._offline:
raise errors.ProgrammerError("Can't call ReleaseIp in offline mode")
self._wconfd.ReleaseIp(self._GetWConfdContext(), net_uuid, address)
def GenerateIp(self, net_uuid, _ec_id):
"""Find a free IPv4 address for an instance.
"""
if self._offline:
raise errors.ProgrammerError("Can't call GenerateIp in offline mode")
return self._wconfd.GenerateIp(self._GetWConfdContext(), net_uuid)
def ReserveIp(self, net_uuid, address, _ec_id, check=True):
"""Reserve a given IPv4 address for use by an instance.
"""
if self._offline:
raise errors.ProgrammerError("Can't call ReserveIp in offline mode")
return self._wconfd.ReserveIp(self._GetWConfdContext(), net_uuid, address,
check)
def ReserveLV(self, lv_name, _ec_id):
"""Reserve an VG/LV pair for an instance.
@type lv_name: string
@param lv_name: the logical volume name to reserve
"""
return self._wconfd.ReserveLV(self._GetWConfdContext(), lv_name)
def GenerateDRBDSecret(self, _ec_id):
"""Generate a DRBD secret.
This checks the current disks for duplicates.
"""
return self._wconfd.GenerateDRBDSecret(self._GetWConfdContext())
# FIXME: After _AllIDs is removed, move it to config_mock.py
def _AllLVs(self):
"""Compute the list of all LVs.
"""
lvnames = set()
for instance in self._ConfigData().instances.values():
node_data = self._UnlockedGetInstanceLVsByNode(instance.uuid)
for lv_list in node_data.values():
lvnames.update(lv_list)
return lvnames
def _AllNICs(self):
"""Compute the list of all NICs.
"""
nics = []
for instance in self._ConfigData().instances.values():
nics.extend(instance.nics)
return nics
def _AllIDs(self, include_temporary):
"""Compute the list of all UUIDs and names we have.
@type include_temporary: boolean
@param include_temporary: whether to include the _temporary_ids set
@rtype: set
@return: a set of IDs
"""
existing = set()
if include_temporary:
existing.update(self._temporary_ids.GetReserved())
existing.update(self._AllLVs())
existing.update(self._ConfigData().instances)
existing.update(self._ConfigData().nodes)
existing.update([i.uuid for i in self._AllUUIDObjects() if i.uuid])
return existing
def _GenerateUniqueID(self, ec_id):
"""Generate an unique UUID.
This checks the current node, instances and disk names for
duplicates.
@rtype: string
@return: the unique id
"""
existing = self._AllIDs(include_temporary=False)
return self._temporary_ids.Generate(existing, utils.NewUUID, ec_id)
@ConfigSync(shared=1)
def GenerateUniqueID(self, ec_id):
"""Generate an unique ID.
This is just a wrapper over the unlocked version.
@type ec_id: string
@param ec_id: unique id for the job to reserve the id to
"""
return self._GenerateUniqueID(ec_id)
def _AllMACs(self):
"""Return all MACs present in the config.
@rtype: list
@return: the list of all MACs
"""
result = []
for instance in self._ConfigData().instances.values():
for nic in instance.nics:
result.append(nic.mac)
return result
def _AllDRBDSecrets(self):
"""Return all DRBD secrets present in the config.
@rtype: list
@return: the list of all DRBD secrets
"""
def helper(disk, result):
"""Recursively gather secrets from this disk."""
if disk.dev_type == constants.DT_DRBD8:
result.append(disk.logical_id[5])
if disk.children:
for child in disk.children:
helper(child, result)
result = []
for disk in self._ConfigData().disks.values():
helper(disk, result)
return result
@staticmethod
def _VerifyDisks(data, result):
"""Per-disk verification checks
Extends L{result} with diagnostic information about the disks.
@type data: see L{_ConfigData}
@param data: configuration data
@type result: list of strings
@param result: list containing diagnostic messages
"""
for disk_uuid in data.disks:
disk = data.disks[disk_uuid]
result.extend(["disk %s error: %s" % (disk.uuid, msg)
for msg in disk.Verify()])
if disk.uuid != disk_uuid:
result.append("disk '%s' is indexed by wrong UUID '%s'" %
(disk.name, disk_uuid))
def _UnlockedVerifyConfig(self):
"""Verify function.
@rtype: list
@return: a list of error messages; a non-empty list signifies
configuration errors
"""
# pylint: disable=R0914
result = []
seen_macs = []
ports = {}
data = self._ConfigData()
cluster = data.cluster
# First call WConfd to perform its checks, if we're not offline
if not self._offline:
try:
self._wconfd.VerifyConfig()
except errors.ConfigVerifyError as err:
try:
for msg in err.args[1]:
result.append(msg)
except IndexError:
pass
# check cluster parameters
VerifyType("cluster", "beparams", cluster.SimpleFillBE({}),
constants.BES_PARAMETER_TYPES, result.append)
VerifyType("cluster", "nicparams", cluster.SimpleFillNIC({}),
constants.NICS_PARAMETER_TYPES, result.append)
VerifyNic("cluster", cluster.SimpleFillNIC({}), result.append)
VerifyType("cluster", "ndparams", cluster.SimpleFillND({}),
constants.NDS_PARAMETER_TYPES, result.append)
VerifyIpolicy("cluster", cluster.ipolicy, True, result.append)
for disk_template in cluster.diskparams:
if disk_template not in constants.DTS_HAVE_ACCESS:
continue
access = cluster.diskparams[disk_template].get(constants.LDP_ACCESS,
constants.DISK_KERNELSPACE)
if access not in constants.DISK_VALID_ACCESS_MODES:
result.append(
"Invalid value of '%s:%s': '%s' (expected one of %s)" % (
disk_template, constants.LDP_ACCESS, access,
utils.CommaJoin(constants.DISK_VALID_ACCESS_MODES)
)
)
self._VerifyDisks(data, result)
# per-instance checks
for instance_uuid in data.instances:
instance = data.instances[instance_uuid]
if instance.uuid != instance_uuid:
result.append("instance '%s' is indexed by wrong UUID '%s'" %
(instance.name, instance_uuid))
if instance.primary_node not in data.nodes:
result.append("instance '%s' has invalid primary node '%s'" %
(instance.name, instance.primary_node))
for snode in self._UnlockedGetInstanceSecondaryNodes(instance.uuid):
if snode not in data.nodes:
result.append("instance '%s' has invalid secondary node '%s'" %
(instance.name, snode))
for idx, nic in enumerate(instance.nics):
if nic.mac in seen_macs:
result.append("instance '%s' has NIC %d mac %s duplicate" %
(instance.name, idx, nic.mac))
else:
seen_macs.append(nic.mac)
if nic.nicparams:
filled = cluster.SimpleFillNIC(nic.nicparams)
owner = "instance %s nic %d" % (instance.name, idx)
VerifyType(owner, "nicparams",
filled, constants.NICS_PARAMETER_TYPES, result.append)
VerifyNic(owner, filled, result.append)
# parameter checks
if instance.beparams:
VerifyType("instance %s" % instance.name, "beparams",
cluster.FillBE(instance), constants.BES_PARAMETER_TYPES,
result.append)
# check that disks exists
for disk_uuid in instance.disks:
if disk_uuid not in data.disks:
result.append("Instance '%s' has invalid disk '%s'" %
(instance.name, disk_uuid))
instance_disks = self._UnlockedGetInstanceDisks(instance.uuid)
# gather the drbd ports for duplicate checks
for (idx, dsk) in enumerate(instance_disks):
if dsk.dev_type in constants.DTS_DRBD:
tcp_port = dsk.logical_id[2]
if tcp_port not in ports:
ports[tcp_port] = []
ports[tcp_port].append((instance.name, "drbd disk %s" % idx))
# gather network port reservation
net_port = getattr(instance, "network_port", None)
if net_port is not None:
if net_port not in ports:
ports[net_port] = []
ports[net_port].append((instance.name, "network port"))
wrong_names = _CheckInstanceDiskIvNames(instance_disks)
if wrong_names:
tmp = "; ".join(("name of disk %s should be '%s', but is '%s'" %
(idx, exp_name, actual_name))
for (idx, exp_name, actual_name) in wrong_names)
result.append("Instance '%s' has wrongly named disks: %s" %
(instance.name, tmp))
# cluster-wide pool of free ports
for free_port in cluster.tcpudp_port_pool:
if free_port not in ports:
ports[free_port] = []
ports[free_port].append(("cluster", "port marked as free"))
# compute tcp/udp duplicate ports
keys = list(ports)
keys.sort()
for pnum in keys:
pdata = ports[pnum]
if len(pdata) > 1:
txt = utils.CommaJoin(["%s/%s" % val for val in pdata])
result.append("tcp/udp port %s has duplicates: %s" % (pnum, txt))
# highest used tcp port check
if keys:
if keys[-1] > cluster.highest_used_port:
result.append("Highest used port mismatch, saved %s, computed %s" %
(cluster.highest_used_port, keys[-1]))
if not data.nodes[cluster.master_node].master_candidate:
result.append("Master node is not a master candidate")
# master candidate checks
mc_now, mc_max, _ = self._UnlockedGetMasterCandidateStats()
if mc_now < mc_max:
result.append("Not enough master candidates: actual %d, target %d" %
(mc_now, mc_max))
# node checks
for node_uuid, node in data.nodes.items():
if node.uuid != node_uuid:
result.append("Node '%s' is indexed by wrong UUID '%s'" %
(node.name, node_uuid))
if [node.master_candidate, node.drained, node.offline].count(True) > 1:
result.append("Node %s state is invalid: master_candidate=%s,"
" drain=%s, offline=%s" %
(node.name, node.master_candidate, node.drained,
node.offline))
if node.group not in data.nodegroups:
result.append("Node '%s' has invalid group '%s'" %
(node.name, node.group))
else:
VerifyType("node %s" % node.name, "ndparams",
cluster.FillND(node, data.nodegroups[node.group]),
constants.NDS_PARAMETER_TYPES, result.append)
used_globals = constants.NDC_GLOBALS.intersection(node.ndparams)
if used_globals:
result.append("Node '%s' has some global parameters set: %s" %
(node.name, utils.CommaJoin(used_globals)))
# nodegroups checks
nodegroups_names = set()
for nodegroup_uuid in data.nodegroups:
nodegroup = data.nodegroups[nodegroup_uuid]
if nodegroup.uuid != nodegroup_uuid:
result.append("node group '%s' (uuid: '%s') indexed by wrong uuid '%s'"
% (nodegroup.name, nodegroup.uuid, nodegroup_uuid))
if utils.UUID_RE.match(nodegroup.name.lower()):
result.append("node group '%s' (uuid: '%s') has uuid-like name" %
(nodegroup.name, nodegroup.uuid))
if nodegroup.name in nodegroups_names:
result.append("duplicate node group name '%s'" % nodegroup.name)
else:
nodegroups_names.add(nodegroup.name)
group_name = "group %s" % nodegroup.name
VerifyIpolicy(group_name, cluster.SimpleFillIPolicy(nodegroup.ipolicy),
False, result.append)
if nodegroup.ndparams:
VerifyType(group_name, "ndparams",
cluster.SimpleFillND(nodegroup.ndparams),
constants.NDS_PARAMETER_TYPES, result.append)
# drbd minors check
# FIXME: The check for DRBD map needs to be implemented in WConfd
# IP checks
default_nicparams = cluster.nicparams[constants.PP_DEFAULT]
ips = {}
def _AddIpAddress(ip, name):
ips.setdefault(ip, []).append(name)
_AddIpAddress(cluster.master_ip, "cluster_ip")
for node in data.nodes.values():
_AddIpAddress(node.primary_ip, "node:%s/primary" % node.name)
if node.secondary_ip != node.primary_ip:
_AddIpAddress(node.secondary_ip, "node:%s/secondary" % node.name)
for instance in data.instances.values():
for idx, nic in enumerate(instance.nics):
if nic.ip is None:
continue
nicparams = objects.FillDict(default_nicparams, nic.nicparams)
nic_mode = nicparams[constants.NIC_MODE]
nic_link = nicparams[constants.NIC_LINK]
if nic_mode == constants.NIC_MODE_BRIDGED:
link = "bridge:%s" % nic_link
elif nic_mode == constants.NIC_MODE_ROUTED:
link = "route:%s" % nic_link
elif nic_mode == constants.NIC_MODE_OVS:
link = "ovs:%s" % nic_link
else:
raise errors.ProgrammerError("NIC mode '%s' not handled" % nic_mode)
_AddIpAddress("%s/%s/%s" % (link, nic.ip, nic.network),
"instance:%s/nic:%d" % (instance.name, idx))
for ip, owners in ips.items():
if len(owners) > 1:
result.append("IP address %s is used by multiple owners: %s" %
(ip, utils.CommaJoin(owners)))
return result
@ConfigSync(shared=1)
def VerifyConfigAndLog(self, feedback_fn=None):
"""A simple wrapper around L{_UnlockedVerifyConfigAndLog}"""
return self._UnlockedVerifyConfigAndLog(feedback_fn=feedback_fn)
def _UnlockedVerifyConfigAndLog(self, feedback_fn=None):
"""Verify the configuration and log any errors.
The errors get logged as critical errors and also to the feedback function,
if given.
@param feedback_fn: Callable feedback function
@rtype: list
@return: a list of error messages; a non-empty list signifies
configuration errors
"""
assert feedback_fn is None or callable(feedback_fn)
# Warn on config errors, but don't abort the save - the
# configuration has already been modified, and we can't revert;
# the best we can do is to warn the user and save as is, leaving
# recovery to the user
config_errors = self._UnlockedVerifyConfig()
if config_errors:
errmsg = ("Configuration data is not consistent: %s" %
(utils.CommaJoin(config_errors)))
logging.critical(errmsg)
if feedback_fn:
feedback_fn(errmsg)
return config_errors
@ConfigSync(shared=1)
def VerifyConfig(self):
"""Verify function.
This is just a wrapper over L{_UnlockedVerifyConfig}.
@rtype: list
@return: a list of error messages; a non-empty list signifies
configuration errors
"""
return self._UnlockedVerifyConfig()
def AddTcpUdpPort(self, port):
"""Adds a new port to the available port pool."""
utils.SimpleRetry(True, self._wconfd.AddTcpUdpPort, 0.1, 30, args=[port])
self.OutDate()
@ConfigSync(shared=1)
def GetPortList(self):
"""Returns a copy of the current port list.
"""
return self._ConfigData().cluster.tcpudp_port_pool.copy()
def AllocatePort(self):
"""Allocate a port."""
def WithRetry():
port = self._wconfd.AllocatePort()
self.OutDate()
if port is None:
raise utils.RetryAgain()
else:
return port
return utils.Retry(WithRetry, 0.1, 30)
@ConfigSync(shared=1)
def ComputeDRBDMap(self):
"""Compute the used DRBD minor/nodes.
This is just a wrapper over a call to WConfd.
@return: dictionary of node_uuid: dict of minor: instance_uuid;
the returned dict will have all the nodes in it (even if with
an empty list).
"""
if self._offline:
raise errors.ProgrammerError("Can't call ComputeDRBDMap in offline mode")
else:
return dict((k, dict(v)) for (k, v) in self._wconfd.ComputeDRBDMap())
def AllocateDRBDMinor(self, node_uuids, disk_uuid):
"""Allocate a drbd minor.
This is just a wrapper over a call to WConfd.
The free minor will be automatically computed from the existing
devices. A node can not be given multiple times.
The result is the list of minors, in the same
order as the passed nodes.
@type node_uuids: list of strings
@param node_uuids: the nodes in which we allocate minors
@type disk_uuid: string
@param disk_uuid: the disk for which we allocate minors
@rtype: list of ints
@return: A list of minors in the same order as the passed nodes
"""
assert isinstance(disk_uuid, str), \
"Invalid argument '%s' passed to AllocateDRBDMinor" % disk_uuid
if self._offline:
raise errors.ProgrammerError("Can't call AllocateDRBDMinor"
" in offline mode")
result = self._wconfd.AllocateDRBDMinor(disk_uuid, node_uuids)
logging.debug("Request to allocate drbd minors, input: %s, returning %s",
node_uuids, result)
return result
def ReleaseDRBDMinors(self, disk_uuid):
"""Release temporary drbd minors allocated for a given disk.
This is just a wrapper over a call to WConfd.
@type disk_uuid: string
@param disk_uuid: the disk for which temporary minors should be released
"""
assert isinstance(disk_uuid, str), \
"Invalid argument passed to ReleaseDRBDMinors"
# in offline mode we allow the calls to release DRBD minors,
# because then nothing can be allocated anyway;
# this is useful for testing
if not self._offline:
self._wconfd.ReleaseDRBDMinors(disk_uuid)
@ConfigSync(shared=1)
def GetInstanceDiskTemplate(self, inst_uuid):
"""Return the disk template of an instance.
This corresponds to the currently attached disks. If no disks are attached,
it is L{constants.DT_DISKLESS}, if homogeneous disk types are attached,
that type is returned, if that isn't the case, L{constants.DT_MIXED} is
returned.
@type inst_uuid: str
@param inst_uuid: The uuid of the instance.
"""
return utils.GetDiskTemplate(self._UnlockedGetInstanceDisks(inst_uuid))
@ConfigSync(shared=1)
def GetConfigVersion(self):
"""Get the configuration version.
@return: Config version
"""
return self._ConfigData().version
@ConfigSync(shared=1)
def GetClusterName(self):
"""Get cluster name.
@return: Cluster name
"""
return self._ConfigData().cluster.cluster_name
@ConfigSync(shared=1)
def GetMasterNode(self):
"""Get the UUID of the master node for this cluster.
@return: Master node UUID
"""
return self._ConfigData().cluster.master_node
@ConfigSync(shared=1)
def GetMasterNodeName(self):
"""Get the hostname of the master node for this cluster.
@return: Master node hostname
"""
return self._UnlockedGetNodeName(self._ConfigData().cluster.master_node)
@ConfigSync(shared=1)
def GetMasterNodeInfo(self):
"""Get the master node information for this cluster.
@rtype: objects.Node
@return: Master node L{objects.Node} object
"""
return self._UnlockedGetNodeInfo(self._ConfigData().cluster.master_node)
@ConfigSync(shared=1)
def GetMasterIP(self):
"""Get the IP of the master node for this cluster.
@return: Master IP
"""
return self._ConfigData().cluster.master_ip
@ConfigSync(shared=1)
def GetMasterNetdev(self):
"""Get the master network device for this cluster.
"""
return self._ConfigData().cluster.master_netdev
@ConfigSync(shared=1)
def GetMasterNetmask(self):
"""Get the netmask of the master node for this cluster.
"""
return self._ConfigData().cluster.master_netmask
@ConfigSync(shared=1)
def GetUseExternalMipScript(self):
"""Get flag representing whether to use the external master IP setup script.
"""
return self._ConfigData().cluster.use_external_mip_script
@ConfigSync(shared=1)
def GetFileStorageDir(self):
"""Get the file storage dir for this cluster.
"""
return self._ConfigData().cluster.file_storage_dir
@ConfigSync(shared=1)
def GetSharedFileStorageDir(self):
"""Get the shared file storage dir for this cluster.
"""
return self._ConfigData().cluster.shared_file_storage_dir
@ConfigSync(shared=1)
def GetGlusterStorageDir(self):
"""Get the Gluster storage dir for this cluster.
"""
return self._ConfigData().cluster.gluster_storage_dir
@ConfigSync(shared=1)
def GetHypervisorType(self):
"""Get the hypervisor type for this cluster.
"""
return self._ConfigData().cluster.enabled_hypervisors[0]
@ConfigSync(shared=1)
def GetRsaHostKey(self):
"""Return the rsa hostkey from the config.
@rtype: string
@return: the rsa hostkey
"""
return self._ConfigData().cluster.rsahostkeypub
@ConfigSync(shared=1)
def GetDsaHostKey(self):
"""Return the dsa hostkey from the config.
@rtype: string
@return: the dsa hostkey
"""
return self._ConfigData().cluster.dsahostkeypub
@ConfigSync(shared=1)
def GetDefaultIAllocator(self):
"""Get the default instance allocator for this cluster.
"""
return self._ConfigData().cluster.default_iallocator
@ConfigSync(shared=1)
def GetDefaultIAllocatorParameters(self):
"""Get the default instance allocator parameters for this cluster.
@rtype: dict
@return: dict of iallocator parameters
"""
return self._ConfigData().cluster.default_iallocator_params
@ConfigSync(shared=1)
def GetPrimaryIPFamily(self):
"""Get cluster primary ip family.
@return: primary ip family
"""
return self._ConfigData().cluster.primary_ip_family
@ConfigSync(shared=1)
def GetMasterNetworkParameters(self):
"""Get network parameters of the master node.
@rtype: L{object.MasterNetworkParameters}
@return: network parameters of the master node
"""
cluster = self._ConfigData().cluster
result = objects.MasterNetworkParameters(
uuid=cluster.master_node, ip=cluster.master_ip,
netmask=cluster.master_netmask, netdev=cluster.master_netdev,
ip_family=cluster.primary_ip_family)
return result
@ConfigSync(shared=1)
def GetInstallImage(self):
"""Get the install image location
@rtype: string
@return: location of the install image
"""
return self._ConfigData().cluster.install_image
@ConfigSync()
def SetInstallImage(self, install_image):
"""Set the install image location
@type install_image: string
@param install_image: location of the install image
"""
self._ConfigData().cluster.install_image = install_image
@ConfigSync(shared=1)
def GetInstanceCommunicationNetwork(self):
"""Get cluster instance communication network
@rtype: string
@return: instance communication network, which is the name of the
network used for instance communication
"""
return self._ConfigData().cluster.instance_communication_network
@ConfigSync()
def SetInstanceCommunicationNetwork(self, network_name):
"""Set cluster instance communication network
@type network_name: string
@param network_name: instance communication network, which is the name of
the network used for instance communication
"""
self._ConfigData().cluster.instance_communication_network = network_name
@ConfigSync(shared=1)
def GetZeroingImage(self):
"""Get the zeroing image location
@rtype: string
@return: the location of the zeroing image
"""
return self._config_data.cluster.zeroing_image
@ConfigSync(shared=1)
def GetCompressionTools(self):
"""Get cluster compression tools
@rtype: list of string
@return: a list of tools that are cleared for use in this cluster for the
purpose of compressing data
"""
return self._ConfigData().cluster.compression_tools
@ConfigSync()
def SetCompressionTools(self, tools):
"""Set cluster compression tools
@type tools: list of string
@param tools: a list of tools that are cleared for use in this cluster for
the purpose of compressing data
"""
self._ConfigData().cluster.compression_tools = tools
@ConfigSync()
def AddNodeGroup(self, group, ec_id, check_uuid=True):
"""Add a node group to the configuration.
This method calls group.UpgradeConfig() to fill any missing attributes
according to their default values.
@type group: L{objects.NodeGroup}
@param group: the NodeGroup object to add
@type ec_id: string
@param ec_id: unique id for the job to use when creating a missing UUID
@type check_uuid: bool
@param check_uuid: add an UUID to the group if it doesn't have one or, if
it does, ensure that it does not exist in the
configuration already
"""
self._UnlockedAddNodeGroup(group, ec_id, check_uuid)
def _UnlockedAddNodeGroup(self, group, ec_id, check_uuid):
"""Add a node group to the configuration.
"""
logging.info("Adding node group %s to configuration", group.name)
# Some code might need to add a node group with a pre-populated UUID
# generated with ConfigWriter.GenerateUniqueID(). We allow them to bypass
# the "does this UUID" exist already check.
if check_uuid:
self._EnsureUUID(group, ec_id)
try:
existing_uuid = self._UnlockedLookupNodeGroup(group.name)
except errors.OpPrereqError:
pass
else:
raise errors.OpPrereqError("Desired group name '%s' already exists as a"
" node group (UUID: %s)" %
(group.name, existing_uuid),
errors.ECODE_EXISTS)
group.serial_no = 1
group.ctime = group.mtime = time.time()
group.UpgradeConfig()
self._ConfigData().nodegroups[group.uuid] = group
self._ConfigData().cluster.serial_no += 1
@ConfigSync()
def RemoveNodeGroup(self, group_uuid):
"""Remove a node group from the configuration.
@type group_uuid: string
@param group_uuid: the UUID of the node group to remove
"""
logging.info("Removing node group %s from configuration", group_uuid)
if group_uuid not in self._ConfigData().nodegroups:
raise errors.ConfigurationError("Unknown node group '%s'" % group_uuid)
assert len(self._ConfigData().nodegroups) != 1, \
"Group '%s' is the only group, cannot be removed" % group_uuid
del self._ConfigData().nodegroups[group_uuid]
self._ConfigData().cluster.serial_no += 1
def _UnlockedLookupNodeGroup(self, target):
"""Lookup a node group's UUID.
@type target: string or None
@param target: group name or UUID or None to look for the default
@rtype: string
@return: nodegroup UUID
@raises errors.OpPrereqError: when the target group cannot be found
"""
if target is None:
if len(self._ConfigData().nodegroups) != 1:
raise errors.OpPrereqError("More than one node group exists. Target"
" group must be specified explicitly.")
else:
return list(self._ConfigData().nodegroups)[0]
if target in self._ConfigData().nodegroups:
return target
for nodegroup in self._ConfigData().nodegroups.values():
if nodegroup.name == target:
return nodegroup.uuid
raise errors.OpPrereqError("Node group '%s' not found" % target,
errors.ECODE_NOENT)
@ConfigSync(shared=1)
def LookupNodeGroup(self, target):
"""Lookup a node group's UUID.
This function is just a wrapper over L{_UnlockedLookupNodeGroup}.
@type target: string or None
@param target: group name or UUID or None to look for the default
@rtype: string
@return: nodegroup UUID
"""
return self._UnlockedLookupNodeGroup(target)
def _UnlockedGetNodeGroup(self, uuid):
"""Lookup a node group.
@type uuid: string
@param uuid: group UUID
@rtype: L{objects.NodeGroup} or None
@return: nodegroup object, or None if not found
"""
if uuid not in self._ConfigData().nodegroups:
return None
return self._ConfigData().nodegroups[uuid]
@ConfigSync(shared=1)
def GetNodeGroup(self, uuid):
"""Lookup a node group.
@type uuid: string
@param uuid: group UUID
@rtype: L{objects.NodeGroup} or None
@return: nodegroup object, or None if not found
"""
return self._UnlockedGetNodeGroup(uuid)
def _UnlockedGetAllNodeGroupsInfo(self):
"""Get the configuration of all node groups.
"""
return dict(self._ConfigData().nodegroups)
@ConfigSync(shared=1)
def GetAllNodeGroupsInfo(self):
"""Get the configuration of all node groups.
"""
return self._UnlockedGetAllNodeGroupsInfo()
@ConfigSync(shared=1)
def GetAllNodeGroupsInfoDict(self):
"""Get the configuration of all node groups expressed as a dictionary of
dictionaries.
"""
return dict((uuid, ng.ToDict()) for (uuid, ng) in
self._UnlockedGetAllNodeGroupsInfo().items())
@ConfigSync(shared=1)
def GetNodeGroupList(self):
"""Get a list of node groups.
"""
return list(self._ConfigData().nodegroups)
@ConfigSync(shared=1)
def GetNodeGroupMembersByNodes(self, nodes):
"""Get nodes which are member in the same nodegroups as the given nodes.
"""
ngfn = lambda node_uuid: self._UnlockedGetNodeInfo(node_uuid).group
return frozenset(member_uuid
for node_uuid in nodes
for member_uuid in
self._UnlockedGetNodeGroup(ngfn(node_uuid)).members)
@ConfigSync(shared=1)
def GetMultiNodeGroupInfo(self, group_uuids):
"""Get the configuration of multiple node groups.
@param group_uuids: List of node group UUIDs
@rtype: list
@return: List of tuples of (group_uuid, group_info)
"""
return [(uuid, self._UnlockedGetNodeGroup(uuid)) for uuid in group_uuids]
def AddInstance(self, instance, _ec_id, replace=False):
"""Add an instance to the config.
This should be used after creating a new instance.
@type instance: L{objects.Instance}
@param instance: the instance object
@type replace: bool
@param replace: if true, expect the instance to be present and
replace rather than add.
"""
if not isinstance(instance, objects.Instance):
raise errors.ProgrammerError("Invalid type passed to AddInstance")
instance.serial_no = 1
utils.SimpleRetry(True, self._wconfd.AddInstance, 0.1, 30,
args=[instance.ToDict(),
self._GetWConfdContext(),
replace])
self.OutDate()
def _EnsureUUID(self, item, ec_id):
"""Ensures a given object has a valid UUID.
@param item: the instance or node to be checked
@param ec_id: the execution context id for the uuid reservation
"""
if not item.uuid:
item.uuid = self._GenerateUniqueID(ec_id)
else:
self._CheckUniqueUUID(item, include_temporary=True)
def _CheckUniqueUUID(self, item, include_temporary):
"""Checks that the UUID of the given object is unique.
@param item: the instance or node to be checked
@param include_temporary: whether temporarily generated UUID's should be
included in the check. If the UUID of the item to be checked is
a temporarily generated one, this has to be C{False}.
"""
if not item.uuid:
raise errors.ConfigurationError("'%s' must have an UUID" % (item.name,))
if item.uuid in self._AllIDs(include_temporary=include_temporary):
raise errors.ConfigurationError("Cannot add '%s': UUID %s already"
" in use" % (item.name, item.uuid))
def _CheckUUIDpresent(self, item):
"""Checks that an object with the given UUID exists.
@param item: the instance or other UUID possessing object to verify that
its UUID is present
"""
if not item.uuid:
raise errors.ConfigurationError("'%s' must have an UUID" % (item.name,))
if item.uuid not in self._AllIDs(include_temporary=False):
raise errors.ConfigurationError("Cannot replace '%s': UUID %s not present"
% (item.name, item.uuid))
def _SetInstanceStatus(self, inst_uuid, status, disks_active,
admin_state_source):
"""Set the instance's status to a given value.
@rtype: L{objects.Instance}
@return: the updated instance object
"""
def WithRetry():
result = self._wconfd.SetInstanceStatus(inst_uuid, status,
disks_active, admin_state_source)
self.OutDate()
if result is None:
raise utils.RetryAgain()
else:
return result
return objects.Instance.FromDict(utils.Retry(WithRetry, 0.1, 30))
def MarkInstanceUp(self, inst_uuid):
"""Mark the instance status to up in the config.
This also sets the instance disks active flag.
@rtype: L{objects.Instance}
@return: the updated instance object
"""
return self._SetInstanceStatus(inst_uuid, constants.ADMINST_UP, True,
constants.ADMIN_SOURCE)
def MarkInstanceOffline(self, inst_uuid):
"""Mark the instance status to down in the config.
This also clears the instance disks active flag.
@rtype: L{objects.Instance}
@return: the updated instance object
"""
return self._SetInstanceStatus(inst_uuid, constants.ADMINST_OFFLINE, False,
constants.ADMIN_SOURCE)
def RemoveInstance(self, inst_uuid):
"""Remove the instance from the configuration.
"""
utils.SimpleRetry(True, self._wconfd.RemoveInstance, 0.1, 30,
args=[inst_uuid])
self.OutDate()
@ConfigSync()
def RenameInstance(self, inst_uuid, new_name):
"""Rename an instance.
This needs to be done in ConfigWriter and not by RemoveInstance
combined with AddInstance as only we can guarantee an atomic
rename.
"""
if inst_uuid not in self._ConfigData().instances:
raise errors.ConfigurationError("Unknown instance '%s'" % inst_uuid)
inst = self._ConfigData().instances[inst_uuid]
inst.name = new_name
instance_disks = self._UnlockedGetInstanceDisks(inst_uuid)
for (_, disk) in enumerate(instance_disks):
if disk.dev_type in [constants.DT_FILE, constants.DT_SHARED_FILE]:
# rename the file paths in logical and physical id
file_storage_dir = os.path.dirname(os.path.dirname(disk.logical_id[1]))
disk.logical_id = (disk.logical_id[0],
utils.PathJoin(file_storage_dir, inst.name,
os.path.basename(disk.logical_id[1])))
# Force update of ssconf files
self._ConfigData().cluster.serial_no += 1
def MarkInstanceDown(self, inst_uuid):
"""Mark the status of an instance to down in the configuration.
This does not touch the instance disks active flag, as shut down instances
can still have active disks.
@rtype: L{objects.Instance}
@return: the updated instance object
"""
return self._SetInstanceStatus(inst_uuid, constants.ADMINST_DOWN, None,
constants.ADMIN_SOURCE)
def MarkInstanceUserDown(self, inst_uuid):
"""Mark the status of an instance to user down in the configuration.
This does not touch the instance disks active flag, as user shut
down instances can still have active disks.
"""
self._SetInstanceStatus(inst_uuid, constants.ADMINST_DOWN, None,
constants.USER_SOURCE)
def MarkInstanceDisksActive(self, inst_uuid):
"""Mark the status of instance disks active.
@rtype: L{objects.Instance}
@return: the updated instance object
"""
return self._SetInstanceStatus(inst_uuid, None, True, None)
def MarkInstanceDisksInactive(self, inst_uuid):
"""Mark the status of instance disks inactive.
@rtype: L{objects.Instance}
@return: the updated instance object
"""
return self._SetInstanceStatus(inst_uuid, None, False, None)
def _UnlockedGetInstanceList(self):
"""Get the list of instances.
This function is for internal use, when the config lock is already held.
"""
return list(self._ConfigData().instances)
@ConfigSync(shared=1)
def GetInstanceList(self):
"""Get the list of instances.
@return: array of instances, ex. ['instance2-uuid', 'instance1-uuid']
"""
return self._UnlockedGetInstanceList()
def ExpandInstanceName(self, short_name):
"""Attempt to expand an incomplete instance name.
"""
# Locking is done in L{ConfigWriter.GetAllInstancesInfo}
all_insts = self.GetAllInstancesInfo().values()
expanded_name = _MatchNameComponentIgnoreCase(
short_name, [inst.name for inst in all_insts])
if expanded_name is not None:
# there has to be exactly one instance with that name
inst = [n for n in all_insts if n.name == expanded_name][0]
return (inst.uuid, inst.name)
else:
return (None, None)
def _UnlockedGetInstanceInfo(self, inst_uuid):
"""Returns information about an instance.
This function is for internal use, when the config lock is already held.
"""
if inst_uuid not in self._ConfigData().instances:
return None
return self._ConfigData().instances[inst_uuid]
@ConfigSync(shared=1)
def GetInstanceInfo(self, inst_uuid):
"""Returns information about an instance.
It takes the information from the configuration file. Other information of
an instance are taken from the live systems.
@param inst_uuid: UUID of the instance
@rtype: L{objects.Instance}
@return: the instance object
"""
return self._UnlockedGetInstanceInfo(inst_uuid)
@ConfigSync(shared=1)
def GetInstanceNodeGroups(self, inst_uuid, primary_only=False):
"""Returns set of node group UUIDs for instance's nodes.
@rtype: frozenset
"""
instance = self._UnlockedGetInstanceInfo(inst_uuid)
if not instance:
raise errors.ConfigurationError("Unknown instance '%s'" % inst_uuid)
if primary_only:
nodes = [instance.primary_node]
else:
nodes = self._UnlockedGetInstanceNodes(instance.uuid)
return frozenset(self._UnlockedGetNodeInfo(node_uuid).group
for node_uuid in nodes)
@ConfigSync(shared=1)
def GetInstanceNetworks(self, inst_uuid):
"""Returns set of network UUIDs for instance's nics.
@rtype: frozenset
"""
instance = self._UnlockedGetInstanceInfo(inst_uuid)
if not instance:
raise errors.ConfigurationError("Unknown instance '%s'" % inst_uuid)
networks = set()
for nic in instance.nics:
if nic.network:
networks.add(nic.network)
return frozenset(networks)
@ConfigSync(shared=1)
def GetMultiInstanceInfo(self, inst_uuids):
"""Get the configuration of multiple instances.
@param inst_uuids: list of instance UUIDs
@rtype: list
@return: list of tuples (instance UUID, instance_info), where
instance_info is what would GetInstanceInfo return for the
node, while keeping the original order
"""
return [(uuid, self._UnlockedGetInstanceInfo(uuid)) for uuid in inst_uuids]
@ConfigSync(shared=1)
def GetMultiInstanceInfoByName(self, inst_names):
"""Get the configuration of multiple instances.
@param inst_names: list of instance names
@rtype: list
@return: list of tuples (instance, instance_info), where
instance_info is what would GetInstanceInfo return for the
node, while keeping the original order
"""
result = []
for name in inst_names:
instance = self._UnlockedGetInstanceInfoByName(name)
if instance:
result.append((instance.uuid, instance))
else:
raise errors.ConfigurationError("Instance data of instance '%s'"
" not found." % name)
return result
@ConfigSync(shared=1)
def GetAllInstancesInfo(self):
"""Get the configuration of all instances.
@rtype: dict
@return: dict of (instance, instance_info), where instance_info is what
would GetInstanceInfo return for the node
"""
return self._UnlockedGetAllInstancesInfo()
def _UnlockedGetAllInstancesInfo(self):
my_dict = dict([(inst_uuid, self._UnlockedGetInstanceInfo(inst_uuid))
for inst_uuid in self._UnlockedGetInstanceList()])
return my_dict
@ConfigSync(shared=1)
def GetInstancesInfoByFilter(self, filter_fn):
"""Get instance configuration with a filter.
@type filter_fn: callable
@param filter_fn: Filter function receiving instance object as parameter,
returning boolean. Important: this function is called while the
configuration locks is held. It must not do any complex work or call
functions potentially leading to a deadlock. Ideally it doesn't call any
other functions and just compares instance attributes.
"""
return dict((uuid, inst)
for (uuid, inst) in self._ConfigData().instances.items()
if filter_fn(inst))
@ConfigSync(shared=1)
def GetInstanceInfoByName(self, inst_name):
"""Get the L{objects.Instance} object for a named instance.
@param inst_name: name of the instance to get information for
@type inst_name: string
@return: the corresponding L{objects.Instance} instance or None if no
information is available
"""
return self._UnlockedGetInstanceInfoByName(inst_name)
def _UnlockedGetInstanceInfoByName(self, inst_name):
for inst in self._UnlockedGetAllInstancesInfo().values():
if inst.name == inst_name:
return inst
return None
def _UnlockedGetInstanceName(self, inst_uuid):
inst_info = self._UnlockedGetInstanceInfo(inst_uuid)
if inst_info is None:
raise errors.OpExecError("Unknown instance: %s" % inst_uuid)
return inst_info.name
@ConfigSync(shared=1)
def GetInstanceName(self, inst_uuid):
"""Gets the instance name for the passed instance.
@param inst_uuid: instance UUID to get name for
@type inst_uuid: string
@rtype: string
@return: instance name
"""
return self._UnlockedGetInstanceName(inst_uuid)
@ConfigSync(shared=1)
def GetInstanceNames(self, inst_uuids):
"""Gets the instance names for the passed list of nodes.
@param inst_uuids: list of instance UUIDs to get names for
@type inst_uuids: list of strings
@rtype: list of strings
@return: list of instance names
"""
return self._UnlockedGetInstanceNames(inst_uuids)
def SetInstancePrimaryNode(self, inst_uuid, target_node_uuid):
"""Sets the primary node of an existing instance
@param inst_uuid: instance UUID
@type inst_uuid: string
@param target_node_uuid: the new primary node UUID
@type target_node_uuid: string
"""
utils.SimpleRetry(True, self._wconfd.SetInstancePrimaryNode, 0.1, 30,
args=[inst_uuid, target_node_uuid])
self.OutDate()
@ConfigSync()
def SetDiskNodes(self, disk_uuid, nodes):
"""Sets the nodes of an existing disk
@param disk_uuid: disk UUID
@type disk_uuid: string
@param nodes: the new nodes for the disk
@type nodes: list of node uuids
"""
self._UnlockedGetDiskInfo(disk_uuid).nodes = nodes
@ConfigSync()
def SetDiskLogicalID(self, disk_uuid, logical_id):
"""Sets the logical_id of an existing disk
@param disk_uuid: disk UUID
@type disk_uuid: string
@param logical_id: the new logical_id for the disk
@type logical_id: tuple
"""
disk = self._UnlockedGetDiskInfo(disk_uuid)
if disk is None:
raise errors.ConfigurationError("Unknown disk UUID '%s'" % disk_uuid)
if len(disk.logical_id) != len(logical_id):
raise errors.ProgrammerError("Logical ID format mismatch\n"
"Existing logical ID: %s\n"
"New logical ID: %s", disk.logical_id,
logical_id)
disk.logical_id = logical_id
def _UnlockedGetInstanceNames(self, inst_uuids):
return [self._UnlockedGetInstanceName(uuid) for uuid in inst_uuids]
def _UnlockedAddNode(self, node, ec_id):
"""Add a node to the configuration.
@type node: L{objects.Node}
@param node: a Node instance
"""
logging.info("Adding node %s to configuration", node.name)
self._EnsureUUID(node, ec_id)
node.serial_no = 1
node.ctime = node.mtime = time.time()
self._UnlockedAddNodeToGroup(node.uuid, node.group)
assert node.uuid in self._ConfigData().nodegroups[node.group].members
self._ConfigData().nodes[node.uuid] = node
self._ConfigData().cluster.serial_no += 1
@ConfigSync()
def AddNode(self, node, ec_id):
"""Add a node to the configuration.
@type node: L{objects.Node}
@param node: a Node instance
"""
self._UnlockedAddNode(node, ec_id)
@ConfigSync()
def RemoveNode(self, node_uuid):
"""Remove a node from the configuration.
"""
logging.info("Removing node %s from configuration", node_uuid)
if node_uuid not in self._ConfigData().nodes:
raise errors.ConfigurationError("Unknown node '%s'" % node_uuid)
self._UnlockedRemoveNodeFromGroup(self._ConfigData().nodes[node_uuid])
del self._ConfigData().nodes[node_uuid]
self._ConfigData().cluster.serial_no += 1
def ExpandNodeName(self, short_name):
"""Attempt to expand an incomplete node name into a node UUID.
"""
# Locking is done in L{ConfigWriter.GetAllNodesInfo}
all_nodes = self.GetAllNodesInfo().values()
expanded_name = _MatchNameComponentIgnoreCase(
short_name, [node.name for node in all_nodes])
if expanded_name is not None:
# there has to be exactly one node with that name
node = [n for n in all_nodes if n.name == expanded_name][0]
return (node.uuid, node.name)
else:
return (None, None)
def _UnlockedGetNodeInfo(self, node_uuid):
"""Get the configuration of a node, as stored in the config.
This function is for internal use, when the config lock is already
held.
@param node_uuid: the node UUID
@rtype: L{objects.Node}
@return: the node object
"""
if node_uuid not in self._ConfigData().nodes:
return None
return self._ConfigData().nodes[node_uuid]
@ConfigSync(shared=1)
def GetNodeInfo(self, node_uuid):
"""Get the configuration of a node, as stored in the config.
This is just a locked wrapper over L{_UnlockedGetNodeInfo}.
@param node_uuid: the node UUID
@rtype: L{objects.Node}
@return: the node object
"""
return self._UnlockedGetNodeInfo(node_uuid)
@ConfigSync(shared=1)
def GetNodeInstances(self, node_uuid):
"""Get the instances of a node, as stored in the config.
@param node_uuid: the node UUID
@rtype: (list, list)
@return: a tuple with two lists: the primary and the secondary instances
"""
pri = []
sec = []
for inst in self._ConfigData().instances.values():
if inst.primary_node == node_uuid:
pri.append(inst.uuid)
if node_uuid in self._UnlockedGetInstanceSecondaryNodes(inst.uuid):
sec.append(inst.uuid)
return (pri, sec)
@ConfigSync(shared=1)
def GetNodeGroupInstances(self, uuid, primary_only=False):
"""Get the instances of a node group.
@param uuid: Node group UUID
@param primary_only: Whether to only consider primary nodes
@rtype: frozenset
@return: List of instance UUIDs in node group
"""
if primary_only:
nodes_fn = lambda inst: [inst.primary_node]
else:
nodes_fn = lambda inst: self._UnlockedGetInstanceNodes(inst.uuid)
return frozenset(inst.uuid
for inst in self._ConfigData().instances.values()
for node_uuid in nodes_fn(inst)
if self._UnlockedGetNodeInfo(node_uuid).group == uuid)
def _UnlockedGetHvparamsString(self, hvname):
"""Return the string representation of the list of hyervisor parameters of
the given hypervisor.
@see: C{GetHvparams}
"""
result = ""
hvparams = self._ConfigData().cluster.hvparams[hvname]
for key in hvparams:
result += "%s=%s\n" % (key, hvparams[key])
return result
@ConfigSync(shared=1)
def GetHvparamsString(self, hvname):
"""Return the hypervisor parameters of the given hypervisor.
@type hvname: string
@param hvname: name of a hypervisor
@rtype: string
@return: string containing key-value-pairs, one pair on each line;
format: KEY=VALUE
"""
return self._UnlockedGetHvparamsString(hvname)
def _UnlockedGetNodeList(self):
"""Return the list of nodes which are in the configuration.
This function is for internal use, when the config lock is already
held.
@rtype: list
"""
return list(self._ConfigData().nodes)
@ConfigSync(shared=1)
def GetNodeList(self):
"""Return the list of nodes which are in the configuration.
"""
return self._UnlockedGetNodeList()
def _UnlockedGetOnlineNodeList(self):
"""Return the list of nodes which are online.
"""
all_nodes = [self._UnlockedGetNodeInfo(node)
for node in self._UnlockedGetNodeList()]
return [node.uuid for node in all_nodes if not node.offline]
@ConfigSync(shared=1)
def GetOnlineNodeList(self):
"""Return the list of nodes which are online.
"""
return self._UnlockedGetOnlineNodeList()
@ConfigSync(shared=1)
def GetVmCapableNodeList(self):
"""Return the list of nodes which are not vm capable.
"""
all_nodes = [self._UnlockedGetNodeInfo(node)
for node in self._UnlockedGetNodeList()]
return [node.uuid for node in all_nodes if node.vm_capable]
@ConfigSync(shared=1)
def GetNonVmCapableNodeList(self):
"""Return the list of nodes' uuids which are not vm capable.
"""
all_nodes = [self._UnlockedGetNodeInfo(node)
for node in self._UnlockedGetNodeList()]
return [node.uuid for node in all_nodes if not node.vm_capable]
@ConfigSync(shared=1)
def GetNonVmCapableNodeNameList(self):
"""Return the list of nodes' names which are not vm capable.
"""
all_nodes = [self._UnlockedGetNodeInfo(node)
for node in self._UnlockedGetNodeList()]
return [node.name for node in all_nodes if not node.vm_capable]
@ConfigSync(shared=1)
def GetMultiNodeInfo(self, node_uuids):
"""Get the configuration of multiple nodes.
@param node_uuids: list of node UUIDs
@rtype: list
@return: list of tuples of (node, node_info), where node_info is
what would GetNodeInfo return for the node, in the original
order
"""
return [(uuid, self._UnlockedGetNodeInfo(uuid)) for uuid in node_uuids]
def _UnlockedGetAllNodesInfo(self):
"""Gets configuration of all nodes.
@note: See L{GetAllNodesInfo}
"""
return dict([(node_uuid, self._UnlockedGetNodeInfo(node_uuid))
for node_uuid in self._UnlockedGetNodeList()])
@ConfigSync(shared=1)
def GetAllNodesInfo(self):
"""Get the configuration of all nodes.
@rtype: dict
@return: dict of (node, node_info), where node_info is what
would GetNodeInfo return for the node
"""
return self._UnlockedGetAllNodesInfo()
def _UnlockedGetNodeInfoByName(self, node_name):
for node in self._UnlockedGetAllNodesInfo().values():
if node.name == node_name:
return node
return None
@ConfigSync(shared=1)
def GetNodeInfoByName(self, node_name):
"""Get the L{objects.Node} object for a named node.
@param node_name: name of the node to get information for
@type node_name: string
@return: the corresponding L{objects.Node} instance or None if no
information is available
"""
return self._UnlockedGetNodeInfoByName(node_name)
@ConfigSync(shared=1)
def GetNodeGroupInfoByName(self, nodegroup_name):
"""Get the L{objects.NodeGroup} object for a named node group.
@param nodegroup_name: name of the node group to get information for
@type nodegroup_name: string
@return: the corresponding L{objects.NodeGroup} instance or None if no
information is available
"""
for nodegroup in self._UnlockedGetAllNodeGroupsInfo().values():
if nodegroup.name == nodegroup_name:
return nodegroup
return None
def _UnlockedGetNodeName(self, node_spec):
if isinstance(node_spec, objects.Node):
return node_spec.name
elif isinstance(node_spec, str):
node_info = self._UnlockedGetNodeInfo(node_spec)
if node_info is None:
raise errors.OpExecError("Unknown node: %s" % node_spec)
return node_info.name
else:
raise errors.ProgrammerError("Can't handle node spec '%s'" % node_spec)
@ConfigSync(shared=1)
def GetNodeName(self, node_spec):
"""Gets the node name for the passed node.
@param node_spec: node to get names for
@type node_spec: either node UUID or a L{objects.Node} object
@rtype: string
@return: node name
"""
return self._UnlockedGetNodeName(node_spec)
def _UnlockedGetNodeNames(self, node_specs):
return [self._UnlockedGetNodeName(node_spec) for node_spec in node_specs]
@ConfigSync(shared=1)
def GetNodeNames(self, node_specs):
"""Gets the node names for the passed list of nodes.
@param node_specs: list of nodes to get names for
@type node_specs: list of either node UUIDs or L{objects.Node} objects
@rtype: list of strings
@return: list of node names
"""
return self._UnlockedGetNodeNames(node_specs)
@ConfigSync(shared=1)
def GetNodeGroupsFromNodes(self, node_uuids):
"""Returns groups for a list of nodes.
@type node_uuids: list of string
@param node_uuids: List of node UUIDs
@rtype: frozenset
"""
return frozenset(self._UnlockedGetNodeInfo(uuid).group
for uuid in node_uuids)
def _UnlockedGetMasterCandidateUuids(self):
"""Get the list of UUIDs of master candidates.
@rtype: list of strings
@return: list of UUIDs of all master candidates.
"""
return [node.uuid for node in self._ConfigData().nodes.values()
if node.master_candidate]
@ConfigSync(shared=1)
def GetMasterCandidateUuids(self):
"""Get the list of UUIDs of master candidates.
@rtype: list of strings
@return: list of UUIDs of all master candidates.
"""
return self._UnlockedGetMasterCandidateUuids()
def _UnlockedGetMasterCandidateStats(self, exceptions=None):
"""Get the number of current and maximum desired and possible candidates.
@type exceptions: list
@param exceptions: if passed, list of nodes that should be ignored
@rtype: tuple
@return: tuple of (current, desired and possible, possible)
"""
mc_now = mc_should = mc_max = 0
for node in self._ConfigData().nodes.values():
if exceptions and node.uuid in exceptions:
continue
if not (node.offline or node.drained) and node.master_capable:
mc_max += 1
if node.master_candidate:
mc_now += 1
pool_size = self._ConfigData().cluster.candidate_pool_size
mc_should = mc_max if pool_size is None else min(mc_max, pool_size)
return (mc_now, mc_should, mc_max)
@ConfigSync(shared=1)
def GetMasterCandidateStats(self, exceptions=None):
"""Get the number of current and maximum possible candidates.
This is just a wrapper over L{_UnlockedGetMasterCandidateStats}.
@type exceptions: list
@param exceptions: if passed, list of nodes that should be ignored
@rtype: tuple
@return: tuple of (current, max)
"""
return self._UnlockedGetMasterCandidateStats(exceptions)
@ConfigSync()
def MaintainCandidatePool(self, exception_node_uuids):
"""Try to grow the candidate pool to the desired size.
@type exception_node_uuids: list
@param exception_node_uuids: if passed, list of nodes that should be ignored
@rtype: list
@return: list with the adjusted nodes (L{objects.Node} instances)
"""
mc_now, mc_max, _ = self._UnlockedGetMasterCandidateStats(
exception_node_uuids)
mod_list = []
if mc_now < mc_max:
node_list = list(self._ConfigData().nodes)
random.shuffle(node_list)
for uuid in node_list:
if mc_now >= mc_max:
break
node = self._ConfigData().nodes[uuid]
if (node.master_candidate or node.offline or node.drained or
node.uuid in exception_node_uuids or not node.master_capable):
continue
mod_list.append(node)
node.master_candidate = True
node.serial_no += 1
mc_now += 1
if mc_now != mc_max:
# this should not happen
logging.warning("Warning: MaintainCandidatePool didn't manage to"
" fill the candidate pool (%d/%d)", mc_now, mc_max)
if mod_list:
self._ConfigData().cluster.serial_no += 1
return mod_list
def _UnlockedAddNodeToGroup(self, node_uuid, nodegroup_uuid):
"""Add a given node to the specified group.
"""
if nodegroup_uuid not in self._ConfigData().nodegroups:
# This can happen if a node group gets deleted between its lookup and
# when we're adding the first node to it, since we don't keep a lock in
# the meantime. It's ok though, as we'll fail cleanly if the node group
# is not found anymore.
raise errors.OpExecError("Unknown node group: %s" % nodegroup_uuid)
if node_uuid not in self._ConfigData().nodegroups[nodegroup_uuid].members:
self._ConfigData().nodegroups[nodegroup_uuid].members.append(node_uuid)
def _UnlockedRemoveNodeFromGroup(self, node):
"""Remove a given node from its group.
"""
nodegroup = node.group
if nodegroup not in self._ConfigData().nodegroups:
logging.warning("Warning: node '%s' has unknown node group '%s'"
" (while being removed from it)", node.uuid, nodegroup)
nodegroup_obj = self._ConfigData().nodegroups[nodegroup]
if node.uuid not in nodegroup_obj.members:
logging.warning("Warning: node '%s' not a member of its node group '%s'"
" (while being removed from it)", node.uuid, nodegroup)
else:
nodegroup_obj.members.remove(node.uuid)
@ConfigSync()
def AssignGroupNodes(self, mods):
"""Changes the group of a number of nodes.
@type mods: list of tuples; (node name, new group UUID)
@param mods: Node membership modifications
"""
groups = self._ConfigData().nodegroups
nodes = self._ConfigData().nodes
resmod = []
# Try to resolve UUIDs first
for (node_uuid, new_group_uuid) in mods:
try:
node = nodes[node_uuid]
except KeyError:
raise errors.ConfigurationError("Unable to find node '%s'" % node_uuid)
if node.group == new_group_uuid:
# Node is being assigned to its current group
logging.debug("Node '%s' was assigned to its current group (%s)",
node_uuid, node.group)
continue
# Try to find current group of node
try:
old_group = groups[node.group]
except KeyError:
raise errors.ConfigurationError("Unable to find old group '%s'" %
node.group)
# Try to find new group for node
try:
new_group = groups[new_group_uuid]
except KeyError:
raise errors.ConfigurationError("Unable to find new group '%s'" %
new_group_uuid)
assert node.uuid in old_group.members, \
("Inconsistent configuration: node '%s' not listed in members for its"
" old group '%s'" % (node.uuid, old_group.uuid))
assert node.uuid not in new_group.members, \
("Inconsistent configuration: node '%s' already listed in members for"
" its new group '%s'" % (node.uuid, new_group.uuid))
resmod.append((node, old_group, new_group))
# Apply changes
for (node, old_group, new_group) in resmod:
assert node.uuid != new_group.uuid and old_group.uuid != new_group.uuid, \
"Assigning to current group is not possible"
node.group = new_group.uuid
# Update members of involved groups
if node.uuid in old_group.members:
old_group.members.remove(node.uuid)
if node.uuid not in new_group.members:
new_group.members.append(node.uuid)
# Update timestamps and serials (only once per node/group object)
now = time.time()
for obj in frozenset(itertools.chain(*resmod)):
obj.serial_no += 1
obj.mtime = now
# Force ssconf update
self._ConfigData().cluster.serial_no += 1
def _BumpSerialNo(self):
"""Bump up the serial number of the config.
"""
self._ConfigData().serial_no += 1
self._ConfigData().mtime = time.time()
def _AllUUIDObjects(self):
"""Returns all objects with uuid attributes.
"""
return (list(self._ConfigData().instances.values()) +
list(self._ConfigData().nodes.values()) +
list(self._ConfigData().nodegroups.values()) +
list(self._ConfigData().networks.values()) +
list(self._ConfigData().disks.values()) +
self._AllNICs() +
[self._ConfigData().cluster])
def GetConfigManager(self, shared=False, forcelock=False):
"""Returns a ConfigManager, which is suitable to perform a synchronized
block of configuration operations.
WARNING: This blocks all other configuration operations, so anything that
runs inside the block should be very fast, preferably not using any IO.
"""
return ConfigManager(self, shared=shared, forcelock=forcelock)
def _AddLockCount(self, count):
self._lock_count += count
return self._lock_count
def _LockCount(self):
return self._lock_count
def _OpenConfig(self, shared, force=False):
"""Read the config data from WConfd or disk.
"""
if self._AddLockCount(1) > 1:
if self._lock_current_shared and not shared:
self._AddLockCount(-1)
raise errors.ConfigurationError("Can't request an exclusive"
" configuration lock while holding"
" shared")
elif not force or self._lock_forced or not shared or self._offline:
return # we already have the lock, do nothing
else:
self._lock_current_shared = shared
if force:
self._lock_forced = True
# Read the configuration data. If offline, read the file directly.
# If online, call WConfd.
if self._offline:
try:
raw_data = utils.ReadFile(self._cfg_file)
data_dict = serializer.Load(raw_data)
# Make sure the configuration has the right version
ValidateConfig(data_dict)
data = objects.ConfigData.FromDict(data_dict)
except errors.ConfigVersionMismatch:
raise
except Exception as err:
raise errors.ConfigurationError(err)
self._cfg_id = utils.GetFileID(path=self._cfg_file)
if (not hasattr(data, "cluster") or
not hasattr(data.cluster, "rsahostkeypub")):
raise errors.ConfigurationError("Incomplete configuration"
" (missing cluster.rsahostkeypub)")
if not data.cluster.master_node in data.nodes:
msg = ("The configuration denotes node %s as master, but does not"
" contain information about this node" %
data.cluster.master_node)
raise errors.ConfigurationError(msg)
master_info = data.nodes[data.cluster.master_node]
if master_info.name != self._my_hostname and not self._accept_foreign:
msg = ("The configuration denotes node %s as master, while my"
" hostname is %s; opening a foreign configuration is only"
" possible in accept_foreign mode" %
(master_info.name, self._my_hostname))
raise errors.ConfigurationError(msg)
self._SetConfigData(data)
# Upgrade configuration if needed
self._UpgradeConfig(saveafter=True)
else:
if shared and not force:
if self._config_data is None:
logging.debug("Requesting config, as I have no up-to-date copy")
dict_data = self._wconfd.ReadConfig()
logging.debug("Configuration received")
else:
dict_data = None
else:
# poll until we acquire the lock
while True:
logging.debug("Receiving config from WConfd.LockConfig [shared=%s]",
bool(shared))
dict_data = \
self._wconfd.LockConfig(self._GetWConfdContext(), bool(shared))
if dict_data is not None:
logging.debug("Received config from WConfd.LockConfig")
break
time.sleep(random.random())
try:
if dict_data is not None:
self._SetConfigData(objects.ConfigData.FromDict(dict_data))
self._UpgradeConfig()
except Exception as err:
raise errors.ConfigurationError(err)
def _CloseConfig(self, save):
"""Release resources relating the config data.
"""
if self._AddLockCount(-1) > 0:
return # we still have the lock, do nothing
if save:
try:
logging.debug("Writing configuration and unlocking it")
self._WriteConfig(releaselock=True)
logging.debug("Configuration write, unlock finished")
except Exception as err:
logging.critical("Can't write the configuration: %s", str(err))
raise
elif not self._offline and \
not (self._lock_current_shared and not self._lock_forced):
logging.debug("Unlocking configuration without writing")
self._wconfd.UnlockConfig(self._GetWConfdContext())
self._lock_forced = False
# TODO: To WConfd
def _UpgradeConfig(self, saveafter=False):
"""Run any upgrade steps.
This method performs both in-object upgrades and also update some data
elements that need uniqueness across the whole configuration or interact
with other objects.
@warning: if 'saveafter' is 'True', this function will call
L{_WriteConfig()} so it needs to be called only from a
"safe" place.
"""
# Keep a copy of the persistent part of _config_data to check for changes
# Serialization doesn't guarantee order in dictionaries
if saveafter:
oldconf = copy.deepcopy(self._ConfigData().ToDict())
else:
oldconf = None
# In-object upgrades
self._ConfigData().UpgradeConfig()
for item in self._AllUUIDObjects():
if item.uuid is None:
item.uuid = self._GenerateUniqueID(_UPGRADE_CONFIG_JID)
if not self._ConfigData().nodegroups:
default_nodegroup_name = constants.INITIAL_NODE_GROUP_NAME
default_nodegroup = objects.NodeGroup(name=default_nodegroup_name,
members=[])
self._UnlockedAddNodeGroup(default_nodegroup, _UPGRADE_CONFIG_JID, True)
for node in self._ConfigData().nodes.values():
if not node.group:
node.group = self._UnlockedLookupNodeGroup(None)
# This is technically *not* an upgrade, but needs to be done both when
# nodegroups are being added, and upon normally loading the config,
# because the members list of a node group is discarded upon
# serializing/deserializing the object.
self._UnlockedAddNodeToGroup(node.uuid, node.group)
if saveafter:
modified = (oldconf != self._ConfigData().ToDict())
else:
modified = True # can't prove it didn't change, but doesn't matter
if modified and saveafter:
self._WriteConfig()
self._UnlockedDropECReservations(_UPGRADE_CONFIG_JID)
else:
if self._offline:
self._UnlockedVerifyConfigAndLog()
def _WriteConfig(self, destination=None, releaselock=False):
"""Write the configuration data to persistent storage.
"""
if destination is None:
destination = self._cfg_file
# Save the configuration data. If offline, write the file directly.
# If online, call WConfd.
if self._offline:
self._BumpSerialNo()
txt = serializer.DumpJson(
self._ConfigData().ToDict(_with_private=True),
private_encoder=serializer.EncodeWithPrivateFields
)
getents = self._getents()
try:
fd = utils.SafeWriteFile(destination, self._cfg_id, data=txt,
close=False, gid=getents.confd_gid, mode=0o640)
except errors.LockError:
raise errors.ConfigurationError("The configuration file has been"
" modified since the last write, cannot"
" update")
try:
self._cfg_id = utils.GetFileID(fd=fd)
finally:
os.close(fd)
else:
try:
if releaselock:
res = self._wconfd.WriteConfigAndUnlock(self._GetWConfdContext(),
self._ConfigData().ToDict())
if not res:
logging.warning("WriteConfigAndUnlock indicates we already have"
" released the lock; assuming this was just a retry"
" and the initial call succeeded")
else:
self._wconfd.WriteConfig(self._GetWConfdContext(),
self._ConfigData().ToDict())
except errors.LockError:
raise errors.ConfigurationError("The configuration file has been"
" modified since the last write, cannot"
" update")
self.write_count += 1
def _GetAllHvparamsStrings(self, hypervisors):
"""Get the hvparams of all given hypervisors from the config.
@type hypervisors: list of string
@param hypervisors: list of hypervisor names
@rtype: dict of strings
@returns: dictionary mapping the hypervisor name to a string representation
of the hypervisor's hvparams
"""
hvparams = {}
for hv in hypervisors:
hvparams[hv] = self._UnlockedGetHvparamsString(hv)
return hvparams
@staticmethod
def _ExtendByAllHvparamsStrings(ssconf_values, all_hvparams):
"""Extends the ssconf_values dictionary by hvparams.
@type ssconf_values: dict of strings
@param ssconf_values: dictionary mapping ssconf_keys to strings
representing the content of ssconf files
@type all_hvparams: dict of strings
@param all_hvparams: dictionary mapping hypervisor names to a string
representation of their hvparams
@rtype: same as ssconf_values
@returns: the ssconf_values dictionary extended by hvparams
"""
for hv in all_hvparams:
ssconf_key = constants.SS_HVPARAMS_PREF + hv
ssconf_values[ssconf_key] = all_hvparams[hv]
return ssconf_values
def _UnlockedGetSshPortMap(self, node_infos):
node_ports = dict([(node.name,
self._UnlockedGetNdParams(node).get(
constants.ND_SSH_PORT))
for node in node_infos])
return node_ports
def _UnlockedGetSsconfValues(self):
"""Return the values needed by ssconf.
@rtype: dict
@return: a dictionary with keys the ssconf names and values their
associated value
"""
fn = "\n".join
instance_names = utils.NiceSort(
[inst.name for inst in
self._UnlockedGetAllInstancesInfo().values()])
node_infos = list(self._UnlockedGetAllNodesInfo().values())
node_names = [node.name for node in node_infos]
node_pri_ips = ["%s %s" % (ninfo.name, ninfo.primary_ip)
for ninfo in node_infos]
node_snd_ips = ["%s %s" % (ninfo.name, ninfo.secondary_ip)
for ninfo in node_infos]
node_vm_capable = ["%s=%s" % (ninfo.name, str(ninfo.vm_capable))
for ninfo in node_infos]
instance_data = fn(instance_names)
off_data = fn(node.name for node in node_infos if node.offline)
on_data = fn(node.name for node in node_infos if not node.offline)
mc_data = fn(node.name for node in node_infos if node.master_candidate)
mc_ips_data = fn(node.primary_ip for node in node_infos
if node.master_candidate)
node_data = fn(node_names)
node_pri_ips_data = fn(node_pri_ips)
node_snd_ips_data = fn(node_snd_ips)
node_vm_capable_data = fn(node_vm_capable)
cluster = self._ConfigData().cluster
cluster_tags = fn(cluster.GetTags())
master_candidates_certs = fn("%s=%s" % (mc_uuid, mc_cert)
for mc_uuid, mc_cert
in cluster.candidate_certs.items())
hypervisor_list = fn(cluster.enabled_hypervisors)
all_hvparams = self._GetAllHvparamsStrings(constants.HYPER_TYPES)
uid_pool = uidpool.FormatUidPool(cluster.uid_pool, separator="\n")
nodegroups = ["%s %s" % (nodegroup.uuid, nodegroup.name) for nodegroup in
self._ConfigData().nodegroups.values()]
nodegroups_data = fn(utils.NiceSort(nodegroups))
networks = ["%s %s" % (net.uuid, net.name) for net in
self._ConfigData().networks.values()]
networks_data = fn(utils.NiceSort(networks))
ssh_ports = fn("%s=%s" % (node_name, port)
for node_name, port
in self._UnlockedGetSshPortMap(node_infos).items())
ssconf_values = {
constants.SS_CLUSTER_NAME: cluster.cluster_name,
constants.SS_CLUSTER_TAGS: cluster_tags,
constants.SS_FILE_STORAGE_DIR: cluster.file_storage_dir,
constants.SS_SHARED_FILE_STORAGE_DIR: cluster.shared_file_storage_dir,
constants.SS_GLUSTER_STORAGE_DIR: cluster.gluster_storage_dir,
constants.SS_MASTER_CANDIDATES: mc_data,
constants.SS_MASTER_CANDIDATES_IPS: mc_ips_data,
constants.SS_MASTER_CANDIDATES_CERTS: master_candidates_certs,
constants.SS_MASTER_IP: cluster.master_ip,
constants.SS_MASTER_NETDEV: cluster.master_netdev,
constants.SS_MASTER_NETMASK: str(cluster.master_netmask),
constants.SS_MASTER_NODE: self._UnlockedGetNodeName(cluster.master_node),
constants.SS_NODE_LIST: node_data,
constants.SS_NODE_PRIMARY_IPS: node_pri_ips_data,
constants.SS_NODE_SECONDARY_IPS: node_snd_ips_data,
constants.SS_NODE_VM_CAPABLE: node_vm_capable_data,
constants.SS_OFFLINE_NODES: off_data,
constants.SS_ONLINE_NODES: on_data,
constants.SS_PRIMARY_IP_FAMILY: str(cluster.primary_ip_family),
constants.SS_INSTANCE_LIST: instance_data,
constants.SS_RELEASE_VERSION: constants.RELEASE_VERSION,
constants.SS_HYPERVISOR_LIST: hypervisor_list,
constants.SS_MAINTAIN_NODE_HEALTH: str(cluster.maintain_node_health),
constants.SS_UID_POOL: uid_pool,
constants.SS_NODEGROUPS: nodegroups_data,
constants.SS_NETWORKS: networks_data,
constants.SS_ENABLED_USER_SHUTDOWN: str(cluster.enabled_user_shutdown),
constants.SS_SSH_PORTS: ssh_ports,
}
ssconf_values = self._ExtendByAllHvparamsStrings(ssconf_values,
all_hvparams)
bad_values = [(k, v) for k, v in ssconf_values.items()
if not isinstance(v, str)]
if bad_values:
err = utils.CommaJoin("%s=%s" % (k, v) for k, v in bad_values)
raise errors.ConfigurationError("Some ssconf key(s) have non-string"
" values: %s" % err)
return ssconf_values
@ConfigSync(shared=1)
def GetSsconfValues(self):
"""Wrapper using lock around _UnlockedGetSsconf().
"""
return self._UnlockedGetSsconfValues()
@ConfigSync(shared=1)
def GetVGName(self):
"""Return the volume group name.
"""
return self._ConfigData().cluster.volume_group_name
@ConfigSync()
def SetVGName(self, vg_name):
"""Set the volume group name.
"""
self._ConfigData().cluster.volume_group_name = vg_name
self._ConfigData().cluster.serial_no += 1
@ConfigSync(shared=1)
def GetDRBDHelper(self):
"""Return DRBD usermode helper.
"""
return self._ConfigData().cluster.drbd_usermode_helper
@ConfigSync()
def SetDRBDHelper(self, drbd_helper):
"""Set DRBD usermode helper.
"""
self._ConfigData().cluster.drbd_usermode_helper = drbd_helper
self._ConfigData().cluster.serial_no += 1
@ConfigSync(shared=1)
def GetMACPrefix(self):
"""Return the mac prefix.
"""
return self._ConfigData().cluster.mac_prefix
@ConfigSync(shared=1)
def GetClusterInfo(self):
"""Returns information about the cluster
@rtype: L{objects.Cluster}
@return: the cluster object
"""
return self._ConfigData().cluster
@ConfigSync(shared=1)
def DisksOfType(self, dev_type):
"""Check if in there is at disk of the given type in the configuration.
"""
return self._ConfigData().DisksOfType(dev_type)
@ConfigSync(shared=1)
def GetDetachedConfig(self):
"""Returns a detached version of a ConfigManager, which represents
a read-only snapshot of the configuration at this particular time.
"""
return DetachedConfig(self._ConfigData())
def Update(self, target, feedback_fn, ec_id=None):
"""Notify function to be called after updates.
This function must be called when an object (as returned by
GetInstanceInfo, GetNodeInfo, GetCluster) has been updated and the
caller wants the modifications saved to the backing store. Note
that all modified objects will be saved, but the target argument
is the one the caller wants to ensure that it's saved.
@param target: an instance of either L{objects.Cluster},
L{objects.Node} or L{objects.Instance} which is existing in
the cluster
@param feedback_fn: Callable feedback function
"""
update_function = None
if isinstance(target, objects.Cluster):
if self._offline:
self.UpdateOfflineCluster(target, feedback_fn)
return
else:
update_function = self._wconfd.UpdateCluster
elif isinstance(target, objects.Node):
update_function = self._wconfd.UpdateNode
elif isinstance(target, objects.Instance):
update_function = self._wconfd.UpdateInstance
elif isinstance(target, objects.NodeGroup):
update_function = self._wconfd.UpdateNodeGroup
elif isinstance(target, objects.Network):
update_function = self._wconfd.UpdateNetwork
elif isinstance(target, objects.Disk):
update_function = self._wconfd.UpdateDisk
else:
raise errors.ProgrammerError("Invalid object type (%s) passed to"
" ConfigWriter.Update" % type(target))
def WithRetry():
result = update_function(target.ToDict())
self.OutDate()
if result is None:
raise utils.RetryAgain()
else:
return result
vals = utils.Retry(WithRetry, 0.1, 30)
self.OutDate()
target.serial_no = vals[0]
target.mtime = float(vals[1])
if ec_id is not None:
# Commit all ips reserved by OpInstanceSetParams and OpGroupSetParams
# FIXME: After RemoveInstance is moved to WConfd, use its internal
# functions from TempRes module.
self.CommitTemporaryIps(ec_id)
# Just verify the configuration with our feedback function.
# It will get written automatically by the decorator.
self.VerifyConfigAndLog(feedback_fn=feedback_fn)
@ConfigSync()
def UpdateOfflineCluster(self, target, feedback_fn):
self._ConfigData().cluster = target
target.serial_no += 1
target.mtime = time.time()
self.VerifyConfigAndLog(feedback_fn=feedback_fn)
def _UnlockedDropECReservations(self, _ec_id):
"""Drop per-execution-context reservations
"""
# FIXME: Remove the following two lines after all reservations are moved to
# wconfd.
for rm in self._all_rms:
rm.DropECReservations(_ec_id)
if not self._offline:
self._wconfd.DropAllReservations(self._GetWConfdContext())
def DropECReservations(self, ec_id):
self._UnlockedDropECReservations(ec_id)
@ConfigSync(shared=1)
def GetAllNetworksInfo(self):
"""Get configuration info of all the networks.
"""
return dict(self._ConfigData().networks)
def _UnlockedGetNetworkList(self):
"""Get the list of networks.
This function is for internal use, when the config lock is already held.
"""
return list(self._ConfigData().networks)
@ConfigSync(shared=1)
def GetNetworkList(self):
"""Get the list of networks.
@return: array of networks, ex. ["main", "vlan100", "200]
"""
return self._UnlockedGetNetworkList()
@ConfigSync(shared=1)
def GetNetworkNames(self):
"""Get a list of network names
"""
names = [net.name
for net in self._ConfigData().networks.values()]
return names
def _UnlockedGetNetwork(self, uuid):
"""Returns information about a network.
This function is for internal use, when the config lock is already held.
"""
if uuid not in self._ConfigData().networks:
return None
return self._ConfigData().networks[uuid]
@ConfigSync(shared=1)
def GetNetwork(self, uuid):
"""Returns information about a network.
It takes the information from the configuration file.
@param uuid: UUID of the network
@rtype: L{objects.Network}
@return: the network object
"""
return self._UnlockedGetNetwork(uuid)
@ConfigSync()
def AddNetwork(self, net, ec_id, check_uuid=True):
"""Add a network to the configuration.
@type net: L{objects.Network}
@param net: the Network object to add
@type ec_id: string
@param ec_id: unique id for the job to use when creating a missing UUID
"""
self._UnlockedAddNetwork(net, ec_id, check_uuid)
def _UnlockedAddNetwork(self, net, ec_id, check_uuid):
"""Add a network to the configuration.
"""
logging.info("Adding network %s to configuration", net.name)
if check_uuid:
self._EnsureUUID(net, ec_id)
net.serial_no = 1
net.ctime = net.mtime = time.time()
self._ConfigData().networks[net.uuid] = net
self._ConfigData().cluster.serial_no += 1
def _UnlockedLookupNetwork(self, target):
"""Lookup a network's UUID.
@type target: string
@param target: network name or UUID
@rtype: string
@return: network UUID
@raises errors.OpPrereqError: when the target network cannot be found
"""
if target is None:
return None
if target in self._ConfigData().networks:
return target
for net in self._ConfigData().networks.values():
if net.name == target:
return net.uuid
raise errors.OpPrereqError("Network '%s' not found" % target,
errors.ECODE_NOENT)
@ConfigSync(shared=1)
def LookupNetwork(self, target):
"""Lookup a network's UUID.
This function is just a wrapper over L{_UnlockedLookupNetwork}.
@type target: string
@param target: network name or UUID
@rtype: string
@return: network UUID
"""
return self._UnlockedLookupNetwork(target)
@ConfigSync()
def RemoveNetwork(self, network_uuid):
"""Remove a network from the configuration.
@type network_uuid: string
@param network_uuid: the UUID of the network to remove
"""
logging.info("Removing network %s from configuration", network_uuid)
if network_uuid not in self._ConfigData().networks:
raise errors.ConfigurationError("Unknown network '%s'" % network_uuid)
del self._ConfigData().networks[network_uuid]
self._ConfigData().cluster.serial_no += 1
def _UnlockedGetGroupNetParams(self, net_uuid, node_uuid):
"""Get the netparams (mode, link) of a network.
Get a network's netparams for a given node.
@type net_uuid: string
@param net_uuid: network uuid
@type node_uuid: string
@param node_uuid: node UUID
@rtype: dict or None
@return: netparams
"""
node_info = self._UnlockedGetNodeInfo(node_uuid)
nodegroup_info = self._UnlockedGetNodeGroup(node_info.group)
netparams = nodegroup_info.networks.get(net_uuid, None)
return netparams
@ConfigSync(shared=1)
def GetGroupNetParams(self, net_uuid, node_uuid):
"""Locking wrapper of _UnlockedGetGroupNetParams()
"""
return self._UnlockedGetGroupNetParams(net_uuid, node_uuid)
@ConfigSync(shared=1)
def CheckIPInNodeGroup(self, ip, node_uuid):
"""Check IP uniqueness in nodegroup.
Check networks that are connected in the node's node group
if ip is contained in any of them. Used when creating/adding
a NIC to ensure uniqueness among nodegroups.
@type ip: string
@param ip: ip address
@type node_uuid: string
@param node_uuid: node UUID
@rtype: (string, dict) or (None, None)
@return: (network name, netparams)
"""
if ip is None:
return (None, None)
node_info = self._UnlockedGetNodeInfo(node_uuid)
nodegroup_info = self._UnlockedGetNodeGroup(node_info.group)
for net_uuid in nodegroup_info.networks:
net_info = self._UnlockedGetNetwork(net_uuid)
pool = network.AddressPool(net_info)
if pool.Contains(ip):
return (net_info.name, nodegroup_info.networks[net_uuid])
return (None, None)
@ConfigSync(shared=1)
def GetCandidateCerts(self):
"""Returns the candidate certificate map.
"""
return self._ConfigData().cluster.candidate_certs
@ConfigSync()
def SetCandidateCerts(self, certs):
"""Replaces the master candidate cert list with the new values.
@type certs: dict of string to string
@param certs: map of node UUIDs to SSL client certificate digests.
"""
self._ConfigData().cluster.candidate_certs = certs
@ConfigSync()
def AddNodeToCandidateCerts(self, node_uuid, cert_digest,
info_fn=logging.info, warn_fn=logging.warn):
"""Adds an entry to the candidate certificate map.
@type node_uuid: string
@param node_uuid: the node's UUID
@type cert_digest: string
@param cert_digest: the digest of the node's client SSL certificate
@type info_fn: function
@param info_fn: logging function for information messages
@type warn_fn: function
@param warn_fn: logging function for warning messages
"""
cluster = self._ConfigData().cluster
if node_uuid in cluster.candidate_certs:
old_cert_digest = cluster.candidate_certs[node_uuid]
if old_cert_digest == cert_digest:
if info_fn is not None:
info_fn("Certificate digest for node %s already in config."
"Not doing anything." % node_uuid)
return
else:
if warn_fn is not None:
warn_fn("Overriding differing certificate digest for node %s"
% node_uuid)
cluster.candidate_certs[node_uuid] = cert_digest
@ConfigSync()
def RemoveNodeFromCandidateCerts(self, node_uuid,
warn_fn=logging.warn):
"""Removes the entry of the given node in the certificate map.
@type node_uuid: string
@param node_uuid: the node's UUID
@type warn_fn: function
@param warn_fn: logging function for warning messages
"""
cluster = self._ConfigData().cluster
if node_uuid not in cluster.candidate_certs:
if warn_fn is not None:
warn_fn("Cannot remove certifcate for node %s, because it's not"
" in the candidate map." % node_uuid)
return
del cluster.candidate_certs[node_uuid]
def FlushConfig(self):
"""Force the distribution of configuration to master candidates.
It is not necessary to hold a lock for this operation, it is handled
internally by WConfd.
"""
if not self._offline:
self._wconfd.FlushConfig()
def FlushConfigGroup(self, uuid):
"""Force the distribution of configuration to master candidates of a group.
It is not necessary to hold a lock for this operation, it is handled
internally by WConfd.
"""
if not self._offline:
self._wconfd.FlushConfigGroup(uuid)
@ConfigSync(shared=1)
def GetAllDiskInfo(self):
"""Get the configuration of all disks.
@rtype: dict
@return: dict of (disk, disk_info), where disk_info is what
would GetDiskInfo return for disk
"""
return self._UnlockedGetAllDiskInfo()
def _UnlockedGetAllDiskInfo(self):
return dict((disk_uuid, self._UnlockedGetDiskInfo(disk_uuid))
for disk_uuid in self._UnlockedGetDiskList())
@ConfigSync(shared=1)
def GetInstanceForDisk(self, disk_uuid):
"""Returns the instance the disk is currently attached to.
@type disk_uuid: string
@param disk_uuid: the identifier of the disk in question.
@rtype: string
@return: uuid of instance the disk is attached to.
"""
for inst_uuid, inst_info in self._UnlockedGetAllInstancesInfo().items():
if disk_uuid in inst_info.disks:
return inst_uuid
class DetachedConfig(ConfigWriter):
"""Read-only snapshot of the config."""
def __init__(self, config_data):
super(DetachedConfig, self).__init__(self, offline=True)
self._SetConfigData(config_data)
@staticmethod
def _WriteCallError():
raise errors.ProgrammerError("DetachedConfig supports only read-only"
" operations")
def _OpenConfig(self, shared, force=None):
if not shared:
DetachedConfig._WriteCallError()
def _CloseConfig(self, save):
if save:
DetachedConfig._WriteCallError()
| bsd-2-clause | -9,024,758,844,000,849,000 | 31.696176 | 80 | 0.659278 | false |
geodynamics/pylith | tests/pytests/utils/__init__.py | 1 | 1085 | from .TestCollectVersionInfo import TestCollectVersionInfo
from .TestConstants import TestConstants
from .TestEmptyBin import TestEmptyBin
from .TestNullComponent import TestNullComponent
from .TestDumpParameters import TestDumpParameters
from .TestDumpParametersAscii import TestDumpParametersAscii
from .TestDumpParametersJson import TestDumpParametersJson
from .TestEventLogger import TestEventLogger
from .TestPetscManager import TestPetscManager
from .TestDependenciesVersion import TestDependenciesVersion
from .TestPetscVersion import TestPetscVersion
from .TestPylithVersion import TestPylithVersion
from .TestProfiling import TestProfiling
def test_classes():
classes = [
TestCollectVersionInfo,
TestConstants,
TestEmptyBin,
TestNullComponent,
TestDumpParameters,
TestDumpParametersAscii,
TestDumpParametersJson,
TestEventLogger,
TestPetscManager,
TestDependenciesVersion,
TestPetscVersion,
TestPylithVersion,
TestProfiling,
]
return classes
# End of file
| mit | 812,771,477,899,451,100 | 30 | 60 | 0.787097 | false |
javiercantero/streamlink | src/streamlink/plugins/viasat.py | 1 | 4387 | import re
from streamlink import NoStreamsError
from streamlink.exceptions import PluginError
from streamlink.plugin import Plugin
from streamlink.plugin.api import StreamMapper, http, validate
from streamlink.stream import HDSStream, HLSStream, RTMPStream
from streamlink.utils import rtmpparse
STREAM_API_URL = "https://playapi.mtgx.tv/v3/videos/stream/{0}"
_swf_url_re = re.compile(r"data-flashplayer-url=\"([^\"]+)\"")
_player_data_re = re.compile(r"window.fluxData\s*=\s*JSON.parse\(\"(.+)\"\);")
_stream_schema = validate.Schema(
validate.any(
None,
validate.all({"msg": validate.text}),
validate.all({
"streams": validate.all(
{validate.text: validate.any(validate.text, int, None)},
validate.filter(lambda k, v: isinstance(v, validate.text))
)
}, validate.get("streams"))
)
)
class Viasat(Plugin):
"""Streamlink Plugin for Viasat"""
_iframe_re = re.compile(r"""<iframe.+src=["'](?P<url>[^"']+)["'].+allowfullscreen""")
_image_re = re.compile(r"""<meta\sproperty=["']og:image["']\scontent=".+/(?P<stream_id>\d+)/[^/]+\.jpg""")
_url_re = re.compile(r"""https?://(?:www\.)?
(?:
juicyplay\.dk
|
play\.nova\.bg
|
(?:tvplay\.)?
skaties\.lv
|
(?:(?:tv3)?play\.)?
tv3\.(?:dk|ee|lt)
|
tv6play\.no
|
viafree\.(?:dk|no|se)
)
/(?:
(?:
.+/
|
embed\?id=
)
(?P<stream_id>\d+)
)?
""", re.VERBOSE)
@classmethod
def can_handle_url(cls, url):
return cls._url_re.match(url)
def _get_swf_url(self):
res = http.get(self.url)
match = _swf_url_re.search(res.text)
if not match:
raise PluginError("Unable to find SWF URL in the HTML")
return match.group(1)
def _create_dynamic_streams(self, stream_type, parser, video):
try:
streams = parser(self.session, video[1])
return streams.items()
except IOError as err:
self.logger.error("Failed to extract {0} streams: {1}", stream_type, err)
def _create_rtmp_stream(self, video):
name, stream_url = video
params = {
"rtmp": stream_url,
"pageUrl": self.url,
"swfVfy": self._get_swf_url(),
}
if stream_url.endswith(".mp4"):
tcurl, playpath = rtmpparse(stream_url)
params["rtmp"] = tcurl
params["playpath"] = playpath
else:
params["live"] = True
return name, RTMPStream(self.session, params)
def _extract_streams(self, stream_id):
res = http.get(STREAM_API_URL.format(stream_id), raise_for_status=False)
stream_info = http.json(res, schema=_stream_schema)
if stream_info.get("msg"):
# error message
self.logger.error(stream_info.get("msg"))
raise NoStreamsError(self.url)
mapper = StreamMapper(lambda pattern, video: re.search(pattern, video[1]))
mapper.map(
r"/\w+\.m3u8",
self._create_dynamic_streams,
"HLS", HLSStream.parse_variant_playlist
)
mapper.map(
r"/\w+\.f4m",
self._create_dynamic_streams,
"HDS", HDSStream.parse_manifest
)
mapper.map(r"^rtmp://", self._create_rtmp_stream)
return mapper(stream_info.items())
def _get_stream_id(self, text):
"""Try to find a stream_id"""
m = self._image_re.search(text)
if m:
return m.group("stream_id")
def _get_iframe(self, text):
"""Fallback if no stream_id was found before"""
m = self._iframe_re.search(text)
if m:
return self.session.streams(m.group("url"))
def _get_streams(self):
match = self._url_re.match(self.url)
stream_id = match.group("stream_id")
if not stream_id:
text = http.get(self.url).text
stream_id = self._get_stream_id(text)
if not stream_id:
return self._get_iframe(text)
if stream_id:
return self._extract_streams(stream_id)
__plugin__ = Viasat
| bsd-2-clause | 6,951,403,913,173,260,000 | 28.641892 | 110 | 0.531115 | false |
AnthonyDiGirolamo/heliopause | sector.py | 1 | 9915 | import libtcodpy as libtcod
import math
from random import randrange
import time
import pprint
pp = pprint.PrettyPrinter(indent=4, width=200).pprint
from planet import Planet
from asteroid import Asteroid
class Sector:
def __init__(self, screen_width, screen_height, buffer, background=libtcod.Color(0,0,0)):
self.twopi = 2 * math.pi
self.background = background
# self.background = libtcod.Color(32,32,64)
self.buffer = buffer
self.screen_width = screen_width
self.screen_height = screen_height
self.visible_space_left = 0
self.visible_space_top = 0
self.visible_space_right = 0
self.visible_space_bottom = 0
self.planets = []
self.asteroids = []
self.particles = []
self.selected_planet = None
self.selected_asteroid = None
self.selected_blink = 0
def mirror_y_coordinate(self, y):
return (self.screen_height- 1 - y)
def add_planet(self, **keyword_args):
self.planets.append(Planet(sector=self, **keyword_args))
self.planet_distances = [None for p in self.planets]
return [self.planets[-1].icon, self.planets[-1].icon_color, len(self.planets)]
def add_asteroid(self, **keyword_args):
self.asteroids.append(Asteroid(sector=self, **keyword_args))
self.asteroid_distances = [None for p in self.asteroids]
return [self.asteroids[-1].icon, self.asteroids[-1].icon_color, len(self.asteroids)]
def update_visibility(self, player_sector_position_x, player_sector_position_y):
self.visible_space_left = player_sector_position_x - self.screen_width/2
self.visible_space_top = player_sector_position_y + self.screen_height/2
self.visible_space_right = self.visible_space_left + self.screen_width
self.visible_space_bottom = self.visible_space_top - self.screen_height
def clear_selected_planet(self):
self.selected_planet = None
def distance_from_center(self, ship):
return math.sqrt(ship.sector_position_x**2 + ship.sector_position_y**2)
def update_selected_planet_distance(self, ship):
planet = self.get_selected_planet()
if self.selected_planet is not None:
self.planet_distances[self.selected_planet] = math.sqrt((ship.sector_position_x - planet.sector_position_x)**2.0 + (ship.sector_position_y - planet.sector_position_y)**2.0)
elif self.selected_asteroid is not None:
self.asteroid_distances[self.selected_asteroid] = math.sqrt((ship.sector_position_x - planet.sector_position_x)**2.0 + (ship.sector_position_y - planet.sector_position_y)**2.0)
newx = planet.sector_position_x - ship.sector_position_x
newy = planet.sector_position_y - ship.sector_position_y
try:
self.selected_planet_angle = math.atan(newy / newx)
except:
self.selected_planet_angle = 0.0
if newx > 0.0 and newy < 0.0:
self.selected_planet_angle += self.twopi
elif newx < 0.0:
self.selected_planet_angle += math.pi
def get_selected_planet(self):
if self.selected_planet is not None:
return self.planets[self.selected_planet]
elif self.selected_asteroid is not None:
return self.asteroids[self.selected_asteroid]
def selected_planet_distance(self):
if self.selected_planet is not None:
return self.planet_distances[self.selected_planet]
elif self.selected_asteroid is not None:
return self.asteroid_distances[self.selected_asteroid]
def update_all_planet_distances(self, ship):
self.planet_distances = [ math.sqrt((ship.sector_position_x - planet.sector_position_x)**2.0 + (ship.sector_position_y - planet.sector_position_y)**2.0) for planet in self.planets]
self.asteroid_distances = [ math.sqrt((ship.sector_position_x - asteroid.sector_position_x)**2.0 + (ship.sector_position_y - asteroid.sector_position_y)**2.0) for asteroid in self.asteroids]
def closest_planet(self, ship):
self.update_all_planet_distances(ship)
nearest_planet_index = 0
smallest_distance = None
for index, distance in enumerate(self.planet_distances):
if smallest_distance is None or distance < smallest_distance:
nearest_planet_index = index
smallest_distance = distance
return [nearest_planet_index, smallest_distance]
def closest_asteroid(self, ship):
self.update_all_planet_distances(ship)
nearest_asteroid_index = 0
smallest_distance = None
for index, distance in enumerate(self.asteroid_distances):
if smallest_distance is None or distance < smallest_distance:
nearest_asteroid_index = index
smallest_distance = distance
return [nearest_asteroid_index, smallest_distance]
def land_at_closest_planet(self, ship):
landed = False
message = None
index, distance = self.closest_planet(ship)
planet = self.planets[index]
if distance < 1.25*(planet.width/2.0):
for p in self.planets:
p.selected = False
planet.selected = True
if ship.velocity > 0.20:
message = "You are moving to fast to land.".format(distance)
else:
landed = True
planet.render_detail()
else:
message = "There isn't a planet in landing range."
if landed:
ship.velocity = 0.0
return [landed, message, index]
def add_particle(self, particle):
self.particles.append( particle )
def update_particle_positions(self):
for p in self.particles:
p.update_position()
def scroll_particles(self, heading=0.0, velocity=0.0):
deltax = math.cos(heading) * velocity * -1
deltay = math.sin(heading) * velocity * -1
# remove particles which have faded
self.particles = [p for p in self.particles if p.on_screen]
for particle in self.particles:
if particle.on_screen:
particle.x += deltax * 1.0
particle.y += deltay * 1.0
particle.index -= 1
if particle.index < 0:
particle.index = 0
particle.on_screen = False
def draw_minimap(self, buffer, width, height, ship):
zoom = 1.0
distance = 1000.0
zoom = float(int(distance + max([ abs((ship.sector_position_x)), abs(ship.sector_position_y) ])) / int(distance))
buffer.clear(self.background[0], self.background[1], self.background[2])
size = int((width-3) / 2.0)
size_reduction = (zoom*distance)/size
for index, p in enumerate(self.asteroids + self.planets):
x = size + 1 + int(p.sector_position_x / (size_reduction))
y = size + 1 - int(p.sector_position_y / (size_reduction))
if 0 < x < width-1 and 0 < y < height-1:
buffer.set(x, y, 0, 0, 0, p.icon_color[0], p.icon_color[1], p.icon_color[2], p.icon)
if self.selected_planet is not None:
x = size + 1 + int(self.planets[self.selected_planet].sector_position_x / (size_reduction))
y = size + 1 - int(self.planets[self.selected_planet].sector_position_y / (size_reduction))
t = time.clock()
if t > self.selected_blink + 0.5:
if t > self.selected_blink + 1.0:
self.selected_blink = t
buffer.set(x+1, y, 0, 0, 0, 0, 255, 0, 175)
buffer.set(x-1, y, 0, 0, 0, 0, 255, 0, 174)
x = size + 1 + int(ship.sector_position_x / (size_reduction))
y = size + 1 - int(ship.sector_position_y / (size_reduction))
if 0 < x < width-1 and 0 < y < height-1:
buffer.set_fore(x, y, 255, 255, 255, ship.icon())
def cycle_planet_target(self, ship):
self.deselect_asteroid()
if self.selected_planet == None:
self.selected_planet = 0
else:
self.selected_planet += 1
if self.selected_planet == len(self.planets):
self.selected_planet = None
if self.selected_planet is not None:
for p in self.planets:
p.selected = False
self.planets[self.selected_planet].selected = True
self.update_selected_planet_distance(ship)
def deselect_planet(self):
if self.selected_planet is not None:
self.selected_planet = None
for p in self.planets:
p.selected = False
def deselect_asteroid(self):
if self.selected_asteroid is not None:
self.selected_asteroid = None
for p in self.asteroids:
p.selected = False
def cycle_target(self, ship):
self.deselect_planet()
if self.selected_asteroid == None:
self.selected_asteroid = 0
else:
self.selected_asteroid += 1
if self.selected_asteroid == len(self.asteroids):
self.selected_asteroid = None
if self.selected_asteroid is not None:
for p in self.asteroids:
p.selected = False
self.asteroids[self.selected_asteroid].selected = True
self.update_selected_planet_distance(ship)
# self.update_selected_asteroid_distance(ship)
def target_nearest_planet(self, ship):
self.deselect_asteroid()
self.selected_planet, distance = self.closest_planet(ship)
self.planets[self.selected_planet].selected = True
def target_nearest_asteroid(self, ship):
self.deselect_planet()
self.selected_asteroid, distance = self.closest_asteroid(ship)
self.asteroids[self.selected_asteroid].selected = True
| mit | -1,482,294,771,838,708,700 | 39.635246 | 198 | 0.611195 | false |
openqt/algorithms | leetcode/python/lc080-remove-duplicates-from-sorted-array-ii.py | 1 | 2142 | # coding=utf-8
import unittest
"""80. Remove Duplicates from Sorted Array II
https://leetcode.com/problems/remove-duplicates-from-sorted-array-ii/description/
Given a sorted array _nums_ , remove the duplicates [**in-
place**](https://en.wikipedia.org/wiki/In-place_algorithm) such that
duplicates appeared at most _twice_ and return the new length.
Do not allocate extra space for another array, you must do this by **modifying
the input array[in-place](https://en.wikipedia.org/wiki/In-place_algorithm)**
with O(1) extra memory.
**Example 1:**
Given _nums_ = **[1,1,1,2,2,3]** ,
Your function should return length = **5** , with the first five elements of _nums_ being **1, 1, 2, 2** and **3** respectively.
It doesn 't matter what you leave beyond the returned length.
**Example 2:**
Given _nums_ = **[0,0,1,1,1,1,2,3,3]** ,
Your function should return length = **7** , with the first seven elements of _nums_ being modified to **0** , **0** , **1** , **1** , **2** , **3** and **3** respectively.
It doesn 't matter what values are set beyond the returned length.
**Clarification:**
Confused why the returned value is an integer but your answer is an array?
Note that the input array is passed in by **reference** , which means
modification to the input array will be known to the caller as well.
Internally you can think of this:
// **nums** is passed in by reference. (i.e., without making a copy)
int len = removeDuplicates(nums);
// any modification to **nums** in your function would be known by the caller.
// using the length returned by your function, it prints the first **len** elements.
for (int i = 0; i < len; i++) {
print(nums[i]);
}
Similar Questions:
Remove Duplicates from Sorted Array (remove-duplicates-from-sorted-array)
"""
class Solution(object):
def removeDuplicates(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
def test(self):
pass
if __name__ == "__main__":
unittest.main()
| gpl-3.0 | 1,719,257,980,137,989,000 | 27.226667 | 180 | 0.630719 | false |
diogocs1/simuladormips | lib/simulador.py | 1 | 3312 | # -*- encoding: UTF-8 -*-
from controle import UC
from lib.memoria import Mem_instrucoes, Mem_dados
from lib.registradores import Banco
from lib.operacoes import ULA
from lib.instrucoes import Instrucao_R_I
class Sistema (object):
def __init__(self):
self.__PC = 0
self.__UC = UC()
self.__ULA = ULA()
self.__memoriaInstrucao = Mem_instrucoes()
self.__memoriaDados = Mem_dados()
self.__bancoDeRegistradores = Banco()
def executaInstrucao (self):
instrucao = self.__memoriaInstrucao.getInstrucao(self.__PC)
valores = self.decodifica(instrucao)
if valores:
self.__ULA.opera(self.__UC, instrucao, valores)
self.incrementaPC()
def decodifica (self, instrucao):
'''
Função: decodifica(instrucao)
Descrição: Localiza e retorna valores de registradores e variáveis
'''
# Verifica se o PC aponta para um Label
if type(instrucao) is str:
return None
self.__UC.decodifica(instrucao)
if type(instrucao) is Instrucao_R_I:
resultado = instrucao.getResultado()
valor2 = None
# buscando o primeiro registrador
resultado = self.__bancoDeRegistradores.getRegistrador(nome=resultado)
# buscando operando 1
valor1 = instrucao.getValor1()
print valor1
if self.__bancoDeRegistradores.getRegistrador(nome=valor1):
valor1 = self.__bancoDeRegistradores.getRegistrador(nome=valor1).getValor()
elif self.__memoriaDados.getDado(nome=valor1):
valor1 = self.__memoriaDados.getDado(valor1).getValor()
# buscando operando 2
if instrucao.getValor2():
valor2 = instrucao.getValor2()
if self.__bancoDeRegistradores.getRegistrador(nome=valor2):
valor2 = self.__bancoDeRegistradores.getRegistrador(nome=valor2).getValor()
elif self.__memoriaDados.getDado(nome=valor2):
valor2 = self.__memoriaDados.getDado(valor2).getValor()
return [resultado, valor1, valor2]
else:
endereco = instrucao.getEndereco()
fila_de_inst = self.__memoriaInstrucao.getDados()
for inst in fila_de_inst:
if inst == endereco:
self.__PC = fila_de_inst.index(inst)
return None
return None
def getPC(self):
return self.__PC
def getProximaInstrucao(self):
try:
return self.__memoriaInstrucao.getInstrucao(self.__PC)
except:
return "Fim do programa!"
def setPC (self, indice):
self.__PC = indice
def incrementaPC(self):
self.__PC += 1
def getIR (self):
return self.__IR
def getMDR (self):
return self.__MDR
def getA (self):
return self.__A
def getB (self):
return self.__B
def getULA (self):
return self.__ULA
def getUC(self):
return self.__UC
def getMemoriaInstrucao(self):
return self.__memoriaInstrucao
def getMemoriaDados(self):
return self.__memoriaDados
def getBanco (self):
return self.__bancoDeRegistradores
| gpl-2.0 | 6,597,753,176,257,532,000 | 35.351648 | 95 | 0.588449 | false |
dmkelly/Django-Location-Form-Field | fields.py | 1 | 4316 | from django import forms
class LocationWidget(forms.widgets.Widget):
"""Forms widget to represent a location.
Uses Google Maps API to represent a location on a map with a marker.
"""
def __init__(self, *args, **kwargs):
super(LocationWidget, self).__init__(*args, **kwargs)
def render(self, name, value, attrs):
if not value:
lat, lon = (0,0,)
else:
lat, lon = value.split(',')
html = []
if attrs.get('help_text') is not None:
html.append('<p>' + attrs['help_text'] + '</p>')
html.append("""<div id="map" style="height:%(height)s;width:%(width)s;">
<noscript>This page requires JavaScript.</noscript>
</div>
<input id="gmap_loc_%(name)s" type="hidden" name="%(name)s" value="%(value)s" />
<script type="text/javascript">
function initialize_map() {
if(typeof(google) == 'undefined') {
document.getElementById('map').innerHTML = 'Google API not found';
return;
}
var options = {
center: new google.maps.LatLng(%(lat)s, %(lon)s),
zoom: 13,
mapTypeId: google.maps.MapTypeId.ROADMAP
};
%(name)s_map = new google.maps.Map(document.getElementById('map'),
options);
var marker = new google.maps.Marker({
position: %(name)s_map.getCenter(),
draggable: true,
animation: google.maps.Animation.DROP,
map: %(name)s_map,
title: '%(marker_text)s'
});
google.maps.event.addListener(marker, 'position_changed', function() {
var valInput=document.getElementById('gmap_loc_%(name)s');
valInput.value = marker.getPosition().lat()+','+marker.getPosition().lng();
});
google.maps.event.addListener(%(name)s_map, 'resize', function() {
%(name)s_map.setCenter(%(name)s_map.getCenter());
});
}
initialize_map();
</script>
""" % {'name': name, 'value':value,
'height':self.attrs.get('height', '400px'),
'width':self.attrs.get('width', '400px'),
'lat': lat, 'lon': lon,
'marker_text':self.attrs.get('marker_text', 'Drag the marker to the desired location')})
return ''.join(html)
class LocationField(forms.Field):
"""This form field is used to obtain a latitude and longitude coordinate
from a Google Map.
"""
widget = LocationWidget
def __init__(self, *args, **kwargs):
super(LocationField, self).__init__(*args, **kwargs)
def to_python(self, value):
if not value:
return None
else:
return {'latitude': self.__parse_latitude(value),
'longitude': self.__parse_longitude(value)}
def __to_micro_coordinate(self, coord):
"""Only works on cleaned data."""
if not coord:
return None
return int(float(coord) * 1000000)
def validate(self, value):
super(LocationField, self).validate(value)
if type(value) is dict:
self.__validate_as_dict(value)
else:
self.__validate_as_dict({'latitude':self.__parse_latitude(value),
'longitude':self.__parse_longitude(value)})
def __validate_as_dict(self, value):
if not (value['latitude'] and value['longitude']):
raise forms.ValidationError('Missing at least one coordinate')
if value['latitude'] > 90.000000 or value['latitude'] < -90.000000:
raise forms.ValidationError('Latitude out of range')
if value['longitude'] > 180.000000 or value['longitude'] < -180.000000:
raise forms.ValidationError('Longitude out of range')
def __parse_latitude(self, value):
return float(value.split(',')[0])
def __parse_longitude(self, value):
try:
return float(value.split(',')[1])
except IndexError:
return None
| epl-1.0 | 6,844,412,214,279,928,000 | 39.716981 | 103 | 0.522938 | false |
ahlusar1989/flowzillow | flowzillow/client.py | 1 | 9263 | from urlparse import urljoin
import requests
from flowzillow import constants
from flowzillow.exceptions import ZillowError
def _trim_none_values(dict_):
new_dict = dict(dict_)
del_keys = []
for k, v in new_dict.iteritems():
if not v:
del_keys.append(k)
for key in del_keys:
del new_dict[key]
return new_dict
def _validate_response(response):
if response.status_code != constants.SUCCESS_CODE:
raise ZillowError(response)
class SearchClient(object):
def search(self, latlong1, latlong2, **kwargs):
"""
Search for all home listings in within a set of rectangular geocoordinates. Returns
a block of JSON with all the search results. Since this is an undocumented API not
all parameters available to this API are of known purpose
:param tuple latlong1: Geocoords of the upper left point of the rectangular search box
:param tuple latlong2: Geocoords of the lower right point of the rectangular search box
:param **kwargs:
:param spt: Seems to be "homes" by default
:param rid: Region ID. A region unique number
:param days: Number of days on market. Select "any" for any number of days
:param att: Custom keyword search.
:param sort: Sort by choice of (days/featured/priced/pricea/lot/built/size/baths/beds/zest/zesta)
:param zoom: The zoom of the map.
:param pf: Search for properties in pre-foreclosure (0/1)
:param pmf: Search for foreclosed properties (0/1)
:param laundry: In unit laundry (rentals only) (0/1)
:param parking: On site-parking (rentals only) (0/1)
:param pets: Accepts pets (rentals only) (0/1)
:param bd: Bedrooms (number plus) eg input of "1," means 1 bedroom and up
:param pr: Price (number plus) eg input of 50000 means 50000 and up
:param ba: Bathrooms (number plus)
:param sf: Square feet "<min>,<max>". If either min or max not set just leave blank but keep comma
:param lot: Lot size "<min>,<max>"
:param yr: Year build "<min>,<max>"
:param lt: List Type. A 6 digit binary number for filtering by for sale 111111 would mean search for
all for sale "By Agent", "By Owner", "Foreclosures", "New Homes", "Open Houses Only", "Coming Soon."
:param status: Status of home. A 6 digit binary number. input of 111011 means search for all houses
(Set to 1 to search "For Sale"), "Make me move", "Recently Sold", (Next bit seems unused),
"For Rent", (Set to 1 if you want to search for foreclosure properties)
:param ht: Home Type. A 6 digit binary number. 111111 means search for "Houses", "Condos",
"Apartments", "Manufactured", "Lots/Land", "Townhomes"
:param rt: ?? 6 seems to be default
:param red: ?? 0 seems to be default
:param pho: ?? 0 seems to be default
:param pnd: ?? 0 seems to be default
:param zso: ?? 0 seems to be default
:param ds: ?? "all" seems to be default
:param p: ?? 1 seems to be default
"""
params = self._make_search_params(latlong1, latlong2, **kwargs)
response = requests.get(
urljoin(constants.BASE_URL, "search/GetResults.htm"), params=params
)
_validate_response(response)
return response.json()
def _make_rect_param(self, latlong1, latlong2):
geo1 = map(lambda coord: str(coord).replace(".", ""), reversed(list(latlong1)))
geo2 = map(lambda coord: str(coord).replace(".", ""), reversed(list(latlong2)))
return ",".join(geo1 + geo2)
def _make_search_params(self, latlong1, latlong2, **kwargs):
rect = self._make_rect_param(latlong1, latlong2)
param_dict = {
"ht": constants.HOME_TYPE,
"isMapSearch": False,
"lt": constants.LISTING_TYPE,
"rect": rect,
"red": constants.RED,
"rt": constants.RT,
"search": constants.SEARCH,
"spt": constants.SPT,
"status": constants.STATUS,
"zoom": constants.ZOOM_LEVEL,
"pr": ",",
"mp": ",",
"bd": "0,",
"ba": "0,",
"sf": "0,",
"lot": "0,",
"yr": "0,",
"pho": "0,",
"pets": 0,
"parking": 0,
"laundry": 0,
"pnd": 0,
"zso": 0,
"days": constants.DAYS,
"ds": constants.DS,
"pf": constants.PF,
"pmf": constants.PMF,
"p": constants.P,
"sort": constants.SORT,
}
param_dict.update(kwargs)
return param_dict.items()
class ZillowClient(object):
def __init__(self, zws_id):
self.zws_id = zws_id
def _perform_get_request(self, path, params):
response = requests.get(urljoin(constants.ZILLOW_WEBSERVICE, path),
params=_trim_none_values(params).items())
_validate_response(response)
return response.content
def get_z_estimate(self, zpid, rent_z_estimate=None):
return self._perform_get_request(
"GetZestimate.htm",
{"zws-id": self.zws_id, "zpid": zpid, "rentzestimate": rent_z_estimate},
)
def get_search_results(self, address, city_state_zip, rent_z_estimate=None):
return self._perform_get_request(
"GetSearchResults.htm",
{"zws-id": self.zws_id,
"address": address,
"citystatezip": city_state_zip,
"rent_z_estimate": rent_z_estimate},
)
def get_chart(self, zpid, unit_type, width, height, chart_duration):
return self._perform_get_request(
"GetChart.htm",
{"zws-id": self.zws_id,
"zpid": zpid,
"unit-type": unit_type,
"width": "width",
"height": height,
"chartDuration": chart_duration}
)
def get_comps(self, zpid, count, rent_z_estimate=None):
return self._perform_get_request(
"GetComps.htm",
{"zws-id": self.zws_id,
"zpid": zpid,
"count": count,
"rentzestimate": rent_z_estimate}
)
def get_deep_comps(self, zpid, count, rent_z_estimate=None):
return self._perform_get_request(
"GetDeepComps.htm",
{"zws-id": self.zws_id,
"zpid": zpid,
"count": count,
"rentzestimate": rent_z_estimate}
)
def get_deep_search_results(self, address, city_state_zip, rent_z_estimate=None):
return self._perform_get_request(
"GetDeepSearchResults.htm",
{"zws-id": self.zws_id,
"address": address,
"citystatezip": city_state_zip,
"rent_z_estimate": rent_z_estimate}
)
def get_updated_property_details(self, zpid):
return self._perform_get_request(
"GetUpdatedPropertyDetails.htm",
{"zws-id": self.zws_id, "zpid": zpid}
)
def get_demographics(self, region_id=None, state=None, city=None, neighborhood=None, zipcode=None):
"""
Get the demographics of a specific city.
At least rid, state/city, city/neighborhood, or zipcode is required
"""
if not region_id and not (state and city) and not (city and neighborhood) and not zipcode:
raise ValueError("At least rid, state/city, city/neighborhood, or zipcode is required")
return self._perform_get_request(
"GetDemographics.htm",
{"zws-id": self.zws_id,
"regionId": region_id,
"state": state,
"city": city,
"neighborhood": neighborhood,
"zip": zipcode}
)
def get_region_children(self, region_id=None, state=None, county=None, city=None, child_type=None):
"""
Get a list of sub-regions with their relevant information
At least region_id or state is required
"""
if not region_id and not state:
raise ValueError("At least region_id or state is required")
return self._perform_get_request(
"GetRegionChildren.htm",
{"zws-id": self.zws_id,
"regionId": region_id,
"state": state,
"county": county,
"city": city,
"childtype": child_type}
)
def get_region_chart(self,
unit_type,
city=None,
state=None,
neighborhood=None,
zipcode=None,
width=None,
height=None,
chart_duration=None):
return self._perform_get_request(
"GetRegionChart.htm",
{"zws-id": self.zws_id,
"city": city,
"state": state,
"neighborhood": neighborhood,
"zip": zipcode,
"unit-type": unit_type,
"width": width,
"height": height,
"chartDuration": chart_duration}
)
| gpl-2.0 | -5,074,563,894,233,085,000 | 37.435685 | 108 | 0.555975 | false |
cortesi/qtile | test/layouts/test_max.py | 1 | 3445 | # Copyright (c) 2011 Florian Mounier
# Copyright (c) 2012, 2014-2015 Tycho Andersen
# Copyright (c) 2013 Mattias Svala
# Copyright (c) 2013 Craig Barnes
# Copyright (c) 2014 ramnes
# Copyright (c) 2014 Sean Vig
# Copyright (c) 2014 Adi Sieker
# Copyright (c) 2014 Chris Wesseling
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import pytest
from libqtile import layout
import libqtile.manager
import libqtile.config
from ..conftest import no_xinerama
from .layout_utils import assertFocused, assertFocusPath
class MaxConfig(object):
auto_fullscreen = True
main = None
groups = [
libqtile.config.Group("a"),
libqtile.config.Group("b"),
libqtile.config.Group("c"),
libqtile.config.Group("d")
]
layouts = [
layout.Max()
]
floating_layout = libqtile.layout.floating.Floating()
keys = []
mouse = []
screens = []
def max_config(x):
return no_xinerama(pytest.mark.parametrize("qtile", [MaxConfig], indirect=True)(x))
@max_config
def test_max_simple(qtile):
qtile.testWindow("one")
assert qtile.c.layout.info()["clients"] == ["one"]
qtile.testWindow("two")
assert qtile.c.layout.info()["clients"] == ["one", "two"]
@max_config
def test_max_updown(qtile):
qtile.testWindow("one")
qtile.testWindow("two")
qtile.testWindow("three")
assert qtile.c.layout.info()["clients"] == ["one", "two", "three"]
qtile.c.layout.up()
assert qtile.c.groups()["a"]["focus"] == "two"
qtile.c.layout.down()
assert qtile.c.groups()["a"]["focus"] == "three"
@max_config
def test_max_remove(qtile):
qtile.testWindow("one")
two = qtile.testWindow("two")
assert qtile.c.layout.info()["clients"] == ["one", "two"]
qtile.kill_window(two)
assert qtile.c.layout.info()["clients"] == ["one"]
@max_config
def test_max_window_focus_cycle(qtile):
# setup 3 tiled and two floating clients
qtile.testWindow("one")
qtile.testWindow("two")
qtile.testWindow("float1")
qtile.c.window.toggle_floating()
qtile.testWindow("float2")
qtile.c.window.toggle_floating()
qtile.testWindow("three")
# test preconditions
assert qtile.c.layout.info()['clients'] == ['one', 'two', 'three']
# last added window has focus
assertFocused(qtile, "three")
# assert window focus cycle, according to order in layout
assertFocusPath(qtile, 'float1', 'float2', 'one', 'two', 'three')
| mit | -1,863,056,603,731,960,800 | 31.809524 | 87 | 0.692017 | false |
alephu5/Soundbyte | environment/lib/python3.3/site-packages/pandas/tseries/tests/test_period.py | 1 | 92150 | """Tests suite for Period handling.
Parts derived from scikits.timeseries code, original authors:
- Pierre Gerard-Marchant & Matt Knox
- pierregm_at_uga_dot_edu - mattknow_ca_at_hotmail_dot_com
"""
from datetime import datetime, date, timedelta
from numpy.ma.testutils import assert_equal
from pandas import Timestamp
from pandas.tseries.frequencies import MONTHS, DAYS
from pandas.tseries.period import Period, PeriodIndex, period_range
from pandas.tseries.index import DatetimeIndex, date_range, Index
from pandas.tseries.tools import to_datetime
import pandas.tseries.period as pmod
import pandas.core.datetools as datetools
import pandas as pd
import numpy as np
from numpy.random import randn
from pandas.compat import range, lrange, lmap, zip
from pandas import Series, TimeSeries, DataFrame
from pandas.util.testing import(assert_series_equal, assert_almost_equal,
assertRaisesRegexp)
import pandas.util.testing as tm
from pandas import compat
from numpy.testing import assert_array_equal
class TestPeriodProperties(tm.TestCase):
"Test properties such as year, month, weekday, etc...."
#
def test_quarterly_negative_ordinals(self):
p = Period(ordinal=-1, freq='Q-DEC')
self.assertEquals(p.year, 1969)
self.assertEquals(p.quarter, 4)
p = Period(ordinal=-2, freq='Q-DEC')
self.assertEquals(p.year, 1969)
self.assertEquals(p.quarter, 3)
p = Period(ordinal=-2, freq='M')
self.assertEquals(p.year, 1969)
self.assertEquals(p.month, 11)
def test_period_cons_quarterly(self):
# bugs in scikits.timeseries
for month in MONTHS:
freq = 'Q-%s' % month
exp = Period('1989Q3', freq=freq)
self.assert_('1989Q3' in str(exp))
stamp = exp.to_timestamp('D', how='end')
p = Period(stamp, freq=freq)
self.assertEquals(p, exp)
def test_period_cons_annual(self):
# bugs in scikits.timeseries
for month in MONTHS:
freq = 'A-%s' % month
exp = Period('1989', freq=freq)
stamp = exp.to_timestamp('D', how='end') + timedelta(days=30)
p = Period(stamp, freq=freq)
self.assertEquals(p, exp + 1)
def test_period_cons_weekly(self):
for num in range(10, 17):
daystr = '2011-02-%d' % num
for day in DAYS:
freq = 'W-%s' % day
result = Period(daystr, freq=freq)
expected = Period(daystr, freq='D').asfreq(freq)
self.assertEquals(result, expected)
def test_timestamp_tz_arg(self):
import pytz
p = Period('1/1/2005', freq='M').to_timestamp(tz='Europe/Brussels')
self.assertEqual(p.tz,
pytz.timezone('Europe/Brussels').normalize(p).tzinfo)
def test_period_constructor(self):
i1 = Period('1/1/2005', freq='M')
i2 = Period('Jan 2005')
self.assertEquals(i1, i2)
i1 = Period('2005', freq='A')
i2 = Period('2005')
i3 = Period('2005', freq='a')
self.assertEquals(i1, i2)
self.assertEquals(i1, i3)
i4 = Period('2005', freq='M')
i5 = Period('2005', freq='m')
self.assertRaises(ValueError, i1.__ne__, i4)
self.assertEquals(i4, i5)
i1 = Period.now('Q')
i2 = Period(datetime.now(), freq='Q')
i3 = Period.now('q')
self.assertEquals(i1, i2)
self.assertEquals(i1, i3)
# Biz day construction, roll forward if non-weekday
i1 = Period('3/10/12', freq='B')
i2 = Period('3/10/12', freq='D')
self.assertEquals(i1, i2.asfreq('B'))
i2 = Period('3/11/12', freq='D')
self.assertEquals(i1, i2.asfreq('B'))
i2 = Period('3/12/12', freq='D')
self.assertEquals(i1, i2.asfreq('B'))
i3 = Period('3/10/12', freq='b')
self.assertEquals(i1, i3)
i1 = Period(year=2005, quarter=1, freq='Q')
i2 = Period('1/1/2005', freq='Q')
self.assertEquals(i1, i2)
i1 = Period(year=2005, quarter=3, freq='Q')
i2 = Period('9/1/2005', freq='Q')
self.assertEquals(i1, i2)
i1 = Period(year=2005, month=3, day=1, freq='D')
i2 = Period('3/1/2005', freq='D')
self.assertEquals(i1, i2)
i3 = Period(year=2005, month=3, day=1, freq='d')
self.assertEquals(i1, i3)
i1 = Period(year=2012, month=3, day=10, freq='B')
i2 = Period('3/12/12', freq='B')
self.assertEquals(i1, i2)
i1 = Period('2005Q1')
i2 = Period(year=2005, quarter=1, freq='Q')
i3 = Period('2005q1')
self.assertEquals(i1, i2)
self.assertEquals(i1, i3)
i1 = Period('05Q1')
self.assertEquals(i1, i2)
lower = Period('05q1')
self.assertEquals(i1, lower)
i1 = Period('1Q2005')
self.assertEquals(i1, i2)
lower = Period('1q2005')
self.assertEquals(i1, lower)
i1 = Period('1Q05')
self.assertEquals(i1, i2)
lower = Period('1q05')
self.assertEquals(i1, lower)
i1 = Period('4Q1984')
self.assertEquals(i1.year, 1984)
lower = Period('4q1984')
self.assertEquals(i1, lower)
i1 = Period('1982', freq='min')
i2 = Period('1982', freq='MIN')
self.assertEquals(i1, i2)
i2 = Period('1982', freq=('Min', 1))
self.assertEquals(i1, i2)
expected = Period('2007-01', freq='M')
i1 = Period('200701', freq='M')
self.assertEqual(i1, expected)
i1 = Period('200701', freq='M')
self.assertEqual(i1, expected)
i1 = Period(200701, freq='M')
self.assertEqual(i1, expected)
i1 = Period(ordinal=200701, freq='M')
self.assertEqual(i1.year, 18695)
i1 = Period(datetime(2007, 1, 1), freq='M')
i2 = Period('200701', freq='M')
self.assertEqual(i1, i2)
i1 = Period(date(2007, 1, 1), freq='M')
i2 = Period(datetime(2007, 1, 1), freq='M')
self.assertEqual(i1, i2)
self.assertRaises(ValueError, Period, ordinal=200701)
self.assertRaises(ValueError, Period, '2007-1-1', freq='X')
def test_freq_str(self):
i1 = Period('1982', freq='Min')
self.assert_(i1.freq[0] != '1')
def test_repr(self):
p = Period('Jan-2000')
self.assert_('2000-01' in repr(p))
p = Period('2000-12-15')
self.assert_('2000-12-15' in repr(p))
def test_millisecond_repr(self):
p = Period('2000-01-01 12:15:02.123')
self.assertEquals("Period('2000-01-01 12:15:02.123', 'L')", repr(p))
def test_microsecond_repr(self):
p = Period('2000-01-01 12:15:02.123567')
self.assertEquals("Period('2000-01-01 12:15:02.123567', 'U')", repr(p))
def test_strftime(self):
p = Period('2000-1-1 12:34:12', freq='S')
res = p.strftime('%Y-%m-%d %H:%M:%S')
self.assertEqual(res, '2000-01-01 12:34:12')
tm.assert_isinstance(res, compat.text_type) # GH3363
def test_sub_delta(self):
left, right = Period('2011', freq='A'), Period('2007', freq='A')
result = left - right
self.assertEqual(result, 4)
self.assertRaises(ValueError, left.__sub__,
Period('2007-01', freq='M'))
def test_to_timestamp(self):
p = Period('1982', freq='A')
start_ts = p.to_timestamp(how='S')
aliases = ['s', 'StarT', 'BEGIn']
for a in aliases:
self.assertEquals(start_ts, p.to_timestamp('D', how=a))
end_ts = p.to_timestamp(how='E')
aliases = ['e', 'end', 'FINIsH']
for a in aliases:
self.assertEquals(end_ts, p.to_timestamp('D', how=a))
from_lst = ['A', 'Q', 'M', 'W', 'B',
'D', 'H', 'Min', 'S']
def _ex(p):
return Timestamp((p + 1).start_time.value - 1)
for i, fcode in enumerate(from_lst):
p = Period('1982', freq=fcode)
result = p.to_timestamp().to_period(fcode)
self.assertEquals(result, p)
self.assertEquals(p.start_time, p.to_timestamp(how='S'))
self.assertEquals(p.end_time, _ex(p))
# Frequency other than daily
p = Period('1985', freq='A')
result = p.to_timestamp('H', how='end')
expected = datetime(1985, 12, 31, 23)
self.assertEquals(result, expected)
result = p.to_timestamp('T', how='end')
expected = datetime(1985, 12, 31, 23, 59)
self.assertEquals(result, expected)
result = p.to_timestamp(how='end')
expected = datetime(1985, 12, 31)
self.assertEquals(result, expected)
expected = datetime(1985, 1, 1)
result = p.to_timestamp('H', how='start')
self.assertEquals(result, expected)
result = p.to_timestamp('T', how='start')
self.assertEquals(result, expected)
result = p.to_timestamp('S', how='start')
self.assertEquals(result, expected)
assertRaisesRegexp(ValueError, 'Only mult == 1', p.to_timestamp, '5t')
def test_start_time(self):
freq_lst = ['A', 'Q', 'M', 'D', 'H', 'T', 'S']
xp = datetime(2012, 1, 1)
for f in freq_lst:
p = Period('2012', freq=f)
self.assertEquals(p.start_time, xp)
self.assertEquals(Period('2012', freq='B').start_time,
datetime(2012, 1, 2))
self.assertEquals(Period('2012', freq='W').start_time,
datetime(2011, 12, 26))
def test_end_time(self):
p = Period('2012', freq='A')
def _ex(*args):
return Timestamp(Timestamp(datetime(*args)).value - 1)
xp = _ex(2013, 1, 1)
self.assertEquals(xp, p.end_time)
p = Period('2012', freq='Q')
xp = _ex(2012, 4, 1)
self.assertEquals(xp, p.end_time)
p = Period('2012', freq='M')
xp = _ex(2012, 2, 1)
self.assertEquals(xp, p.end_time)
xp = _ex(2012, 1, 2)
p = Period('2012', freq='D')
self.assertEquals(p.end_time, xp)
xp = _ex(2012, 1, 1, 1)
p = Period('2012', freq='H')
self.assertEquals(p.end_time, xp)
xp = _ex(2012, 1, 3)
self.assertEquals(Period('2012', freq='B').end_time, xp)
xp = _ex(2012, 1, 2)
self.assertEquals(Period('2012', freq='W').end_time, xp)
def test_anchor_week_end_time(self):
def _ex(*args):
return Timestamp(Timestamp(datetime(*args)).value - 1)
p = Period('2013-1-1', 'W-SAT')
xp = _ex(2013, 1, 6)
self.assertEquals(p.end_time, xp)
def test_properties_annually(self):
# Test properties on Periods with annually frequency.
a_date = Period(freq='A', year=2007)
assert_equal(a_date.year, 2007)
def test_properties_quarterly(self):
# Test properties on Periods with daily frequency.
qedec_date = Period(freq="Q-DEC", year=2007, quarter=1)
qejan_date = Period(freq="Q-JAN", year=2007, quarter=1)
qejun_date = Period(freq="Q-JUN", year=2007, quarter=1)
#
for x in range(3):
for qd in (qedec_date, qejan_date, qejun_date):
assert_equal((qd + x).qyear, 2007)
assert_equal((qd + x).quarter, x + 1)
def test_properties_monthly(self):
# Test properties on Periods with daily frequency.
m_date = Period(freq='M', year=2007, month=1)
for x in range(11):
m_ival_x = m_date + x
assert_equal(m_ival_x.year, 2007)
if 1 <= x + 1 <= 3:
assert_equal(m_ival_x.quarter, 1)
elif 4 <= x + 1 <= 6:
assert_equal(m_ival_x.quarter, 2)
elif 7 <= x + 1 <= 9:
assert_equal(m_ival_x.quarter, 3)
elif 10 <= x + 1 <= 12:
assert_equal(m_ival_x.quarter, 4)
assert_equal(m_ival_x.month, x + 1)
def test_properties_weekly(self):
# Test properties on Periods with daily frequency.
w_date = Period(freq='WK', year=2007, month=1, day=7)
#
assert_equal(w_date.year, 2007)
assert_equal(w_date.quarter, 1)
assert_equal(w_date.month, 1)
assert_equal(w_date.week, 1)
assert_equal((w_date - 1).week, 52)
def test_properties_daily(self):
# Test properties on Periods with daily frequency.
b_date = Period(freq='B', year=2007, month=1, day=1)
#
assert_equal(b_date.year, 2007)
assert_equal(b_date.quarter, 1)
assert_equal(b_date.month, 1)
assert_equal(b_date.day, 1)
assert_equal(b_date.weekday, 0)
assert_equal(b_date.dayofyear, 1)
#
d_date = Period(freq='D', year=2007, month=1, day=1)
#
assert_equal(d_date.year, 2007)
assert_equal(d_date.quarter, 1)
assert_equal(d_date.month, 1)
assert_equal(d_date.day, 1)
assert_equal(d_date.weekday, 0)
assert_equal(d_date.dayofyear, 1)
def test_properties_hourly(self):
# Test properties on Periods with hourly frequency.
h_date = Period(freq='H', year=2007, month=1, day=1, hour=0)
#
assert_equal(h_date.year, 2007)
assert_equal(h_date.quarter, 1)
assert_equal(h_date.month, 1)
assert_equal(h_date.day, 1)
assert_equal(h_date.weekday, 0)
assert_equal(h_date.dayofyear, 1)
assert_equal(h_date.hour, 0)
#
def test_properties_minutely(self):
# Test properties on Periods with minutely frequency.
t_date = Period(freq='Min', year=2007, month=1, day=1, hour=0,
minute=0)
#
assert_equal(t_date.quarter, 1)
assert_equal(t_date.month, 1)
assert_equal(t_date.day, 1)
assert_equal(t_date.weekday, 0)
assert_equal(t_date.dayofyear, 1)
assert_equal(t_date.hour, 0)
assert_equal(t_date.minute, 0)
def test_properties_secondly(self):
# Test properties on Periods with secondly frequency.
s_date = Period(freq='Min', year=2007, month=1, day=1,
hour=0, minute=0, second=0)
#
assert_equal(s_date.year, 2007)
assert_equal(s_date.quarter, 1)
assert_equal(s_date.month, 1)
assert_equal(s_date.day, 1)
assert_equal(s_date.weekday, 0)
assert_equal(s_date.dayofyear, 1)
assert_equal(s_date.hour, 0)
assert_equal(s_date.minute, 0)
assert_equal(s_date.second, 0)
def test_pnow(self):
dt = datetime.now()
val = pmod.pnow('D')
exp = Period(dt, freq='D')
self.assertEquals(val, exp)
def test_constructor_corner(self):
self.assertRaises(ValueError, Period, year=2007, month=1,
freq='2M')
self.assertRaises(ValueError, Period, datetime.now())
self.assertRaises(ValueError, Period, datetime.now().date())
self.assertRaises(ValueError, Period, 1.6, freq='D')
self.assertRaises(ValueError, Period, ordinal=1.6, freq='D')
self.assertRaises(ValueError, Period, ordinal=2, value=1, freq='D')
self.assertRaises(ValueError, Period)
self.assertRaises(ValueError, Period, month=1)
p = Period('2007-01-01', freq='D')
result = Period(p, freq='A')
exp = Period('2007', freq='A')
self.assertEquals(result, exp)
def test_constructor_infer_freq(self):
p = Period('2007-01-01')
self.assert_(p.freq == 'D')
p = Period('2007-01-01 07')
self.assert_(p.freq == 'H')
p = Period('2007-01-01 07:10')
self.assert_(p.freq == 'T')
p = Period('2007-01-01 07:10:15')
self.assert_(p.freq == 'S')
p = Period('2007-01-01 07:10:15.123')
self.assert_(p.freq == 'L')
p = Period('2007-01-01 07:10:15.123000')
self.assert_(p.freq == 'L')
p = Period('2007-01-01 07:10:15.123400')
self.assert_(p.freq == 'U')
def noWrap(item):
return item
class TestFreqConversion(tm.TestCase):
"Test frequency conversion of date objects"
def test_asfreq_corner(self):
val = Period(freq='A', year=2007)
self.assertRaises(ValueError, val.asfreq, '5t')
def test_conv_annual(self):
# frequency conversion tests: from Annual Frequency
ival_A = Period(freq='A', year=2007)
ival_AJAN = Period(freq="A-JAN", year=2007)
ival_AJUN = Period(freq="A-JUN", year=2007)
ival_ANOV = Period(freq="A-NOV", year=2007)
ival_A_to_Q_start = Period(freq='Q', year=2007, quarter=1)
ival_A_to_Q_end = Period(freq='Q', year=2007, quarter=4)
ival_A_to_M_start = Period(freq='M', year=2007, month=1)
ival_A_to_M_end = Period(freq='M', year=2007, month=12)
ival_A_to_W_start = Period(freq='WK', year=2007, month=1, day=1)
ival_A_to_W_end = Period(freq='WK', year=2007, month=12, day=31)
ival_A_to_B_start = Period(freq='B', year=2007, month=1, day=1)
ival_A_to_B_end = Period(freq='B', year=2007, month=12, day=31)
ival_A_to_D_start = Period(freq='D', year=2007, month=1, day=1)
ival_A_to_D_end = Period(freq='D', year=2007, month=12, day=31)
ival_A_to_H_start = Period(freq='H', year=2007, month=1, day=1,
hour=0)
ival_A_to_H_end = Period(freq='H', year=2007, month=12, day=31,
hour=23)
ival_A_to_T_start = Period(freq='Min', year=2007, month=1, day=1,
hour=0, minute=0)
ival_A_to_T_end = Period(freq='Min', year=2007, month=12, day=31,
hour=23, minute=59)
ival_A_to_S_start = Period(freq='S', year=2007, month=1, day=1,
hour=0, minute=0, second=0)
ival_A_to_S_end = Period(freq='S', year=2007, month=12, day=31,
hour=23, minute=59, second=59)
ival_AJAN_to_D_end = Period(freq='D', year=2007, month=1, day=31)
ival_AJAN_to_D_start = Period(freq='D', year=2006, month=2, day=1)
ival_AJUN_to_D_end = Period(freq='D', year=2007, month=6, day=30)
ival_AJUN_to_D_start = Period(freq='D', year=2006, month=7, day=1)
ival_ANOV_to_D_end = Period(freq='D', year=2007, month=11, day=30)
ival_ANOV_to_D_start = Period(freq='D', year=2006, month=12, day=1)
assert_equal(ival_A.asfreq('Q', 'S'), ival_A_to_Q_start)
assert_equal(ival_A.asfreq('Q', 'e'), ival_A_to_Q_end)
assert_equal(ival_A.asfreq('M', 's'), ival_A_to_M_start)
assert_equal(ival_A.asfreq('M', 'E'), ival_A_to_M_end)
assert_equal(ival_A.asfreq('WK', 'S'), ival_A_to_W_start)
assert_equal(ival_A.asfreq('WK', 'E'), ival_A_to_W_end)
assert_equal(ival_A.asfreq('B', 'S'), ival_A_to_B_start)
assert_equal(ival_A.asfreq('B', 'E'), ival_A_to_B_end)
assert_equal(ival_A.asfreq('D', 'S'), ival_A_to_D_start)
assert_equal(ival_A.asfreq('D', 'E'), ival_A_to_D_end)
assert_equal(ival_A.asfreq('H', 'S'), ival_A_to_H_start)
assert_equal(ival_A.asfreq('H', 'E'), ival_A_to_H_end)
assert_equal(ival_A.asfreq('min', 'S'), ival_A_to_T_start)
assert_equal(ival_A.asfreq('min', 'E'), ival_A_to_T_end)
assert_equal(ival_A.asfreq('T', 'S'), ival_A_to_T_start)
assert_equal(ival_A.asfreq('T', 'E'), ival_A_to_T_end)
assert_equal(ival_A.asfreq('S', 'S'), ival_A_to_S_start)
assert_equal(ival_A.asfreq('S', 'E'), ival_A_to_S_end)
assert_equal(ival_AJAN.asfreq('D', 'S'), ival_AJAN_to_D_start)
assert_equal(ival_AJAN.asfreq('D', 'E'), ival_AJAN_to_D_end)
assert_equal(ival_AJUN.asfreq('D', 'S'), ival_AJUN_to_D_start)
assert_equal(ival_AJUN.asfreq('D', 'E'), ival_AJUN_to_D_end)
assert_equal(ival_ANOV.asfreq('D', 'S'), ival_ANOV_to_D_start)
assert_equal(ival_ANOV.asfreq('D', 'E'), ival_ANOV_to_D_end)
assert_equal(ival_A.asfreq('A'), ival_A)
def test_conv_quarterly(self):
# frequency conversion tests: from Quarterly Frequency
ival_Q = Period(freq='Q', year=2007, quarter=1)
ival_Q_end_of_year = Period(freq='Q', year=2007, quarter=4)
ival_QEJAN = Period(freq="Q-JAN", year=2007, quarter=1)
ival_QEJUN = Period(freq="Q-JUN", year=2007, quarter=1)
ival_Q_to_A = Period(freq='A', year=2007)
ival_Q_to_M_start = Period(freq='M', year=2007, month=1)
ival_Q_to_M_end = Period(freq='M', year=2007, month=3)
ival_Q_to_W_start = Period(freq='WK', year=2007, month=1, day=1)
ival_Q_to_W_end = Period(freq='WK', year=2007, month=3, day=31)
ival_Q_to_B_start = Period(freq='B', year=2007, month=1, day=1)
ival_Q_to_B_end = Period(freq='B', year=2007, month=3, day=30)
ival_Q_to_D_start = Period(freq='D', year=2007, month=1, day=1)
ival_Q_to_D_end = Period(freq='D', year=2007, month=3, day=31)
ival_Q_to_H_start = Period(freq='H', year=2007, month=1, day=1,
hour=0)
ival_Q_to_H_end = Period(freq='H', year=2007, month=3, day=31,
hour=23)
ival_Q_to_T_start = Period(freq='Min', year=2007, month=1, day=1,
hour=0, minute=0)
ival_Q_to_T_end = Period(freq='Min', year=2007, month=3, day=31,
hour=23, minute=59)
ival_Q_to_S_start = Period(freq='S', year=2007, month=1, day=1,
hour=0, minute=0, second=0)
ival_Q_to_S_end = Period(freq='S', year=2007, month=3, day=31,
hour=23, minute=59, second=59)
ival_QEJAN_to_D_start = Period(freq='D', year=2006, month=2, day=1)
ival_QEJAN_to_D_end = Period(freq='D', year=2006, month=4, day=30)
ival_QEJUN_to_D_start = Period(freq='D', year=2006, month=7, day=1)
ival_QEJUN_to_D_end = Period(freq='D', year=2006, month=9, day=30)
assert_equal(ival_Q.asfreq('A'), ival_Q_to_A)
assert_equal(ival_Q_end_of_year.asfreq('A'), ival_Q_to_A)
assert_equal(ival_Q.asfreq('M', 'S'), ival_Q_to_M_start)
assert_equal(ival_Q.asfreq('M', 'E'), ival_Q_to_M_end)
assert_equal(ival_Q.asfreq('WK', 'S'), ival_Q_to_W_start)
assert_equal(ival_Q.asfreq('WK', 'E'), ival_Q_to_W_end)
assert_equal(ival_Q.asfreq('B', 'S'), ival_Q_to_B_start)
assert_equal(ival_Q.asfreq('B', 'E'), ival_Q_to_B_end)
assert_equal(ival_Q.asfreq('D', 'S'), ival_Q_to_D_start)
assert_equal(ival_Q.asfreq('D', 'E'), ival_Q_to_D_end)
assert_equal(ival_Q.asfreq('H', 'S'), ival_Q_to_H_start)
assert_equal(ival_Q.asfreq('H', 'E'), ival_Q_to_H_end)
assert_equal(ival_Q.asfreq('Min', 'S'), ival_Q_to_T_start)
assert_equal(ival_Q.asfreq('Min', 'E'), ival_Q_to_T_end)
assert_equal(ival_Q.asfreq('S', 'S'), ival_Q_to_S_start)
assert_equal(ival_Q.asfreq('S', 'E'), ival_Q_to_S_end)
assert_equal(ival_QEJAN.asfreq('D', 'S'), ival_QEJAN_to_D_start)
assert_equal(ival_QEJAN.asfreq('D', 'E'), ival_QEJAN_to_D_end)
assert_equal(ival_QEJUN.asfreq('D', 'S'), ival_QEJUN_to_D_start)
assert_equal(ival_QEJUN.asfreq('D', 'E'), ival_QEJUN_to_D_end)
assert_equal(ival_Q.asfreq('Q'), ival_Q)
def test_conv_monthly(self):
# frequency conversion tests: from Monthly Frequency
ival_M = Period(freq='M', year=2007, month=1)
ival_M_end_of_year = Period(freq='M', year=2007, month=12)
ival_M_end_of_quarter = Period(freq='M', year=2007, month=3)
ival_M_to_A = Period(freq='A', year=2007)
ival_M_to_Q = Period(freq='Q', year=2007, quarter=1)
ival_M_to_W_start = Period(freq='WK', year=2007, month=1, day=1)
ival_M_to_W_end = Period(freq='WK', year=2007, month=1, day=31)
ival_M_to_B_start = Period(freq='B', year=2007, month=1, day=1)
ival_M_to_B_end = Period(freq='B', year=2007, month=1, day=31)
ival_M_to_D_start = Period(freq='D', year=2007, month=1, day=1)
ival_M_to_D_end = Period(freq='D', year=2007, month=1, day=31)
ival_M_to_H_start = Period(freq='H', year=2007, month=1, day=1,
hour=0)
ival_M_to_H_end = Period(freq='H', year=2007, month=1, day=31,
hour=23)
ival_M_to_T_start = Period(freq='Min', year=2007, month=1, day=1,
hour=0, minute=0)
ival_M_to_T_end = Period(freq='Min', year=2007, month=1, day=31,
hour=23, minute=59)
ival_M_to_S_start = Period(freq='S', year=2007, month=1, day=1,
hour=0, minute=0, second=0)
ival_M_to_S_end = Period(freq='S', year=2007, month=1, day=31,
hour=23, minute=59, second=59)
assert_equal(ival_M.asfreq('A'), ival_M_to_A)
assert_equal(ival_M_end_of_year.asfreq('A'), ival_M_to_A)
assert_equal(ival_M.asfreq('Q'), ival_M_to_Q)
assert_equal(ival_M_end_of_quarter.asfreq('Q'), ival_M_to_Q)
assert_equal(ival_M.asfreq('WK', 'S'), ival_M_to_W_start)
assert_equal(ival_M.asfreq('WK', 'E'), ival_M_to_W_end)
assert_equal(ival_M.asfreq('B', 'S'), ival_M_to_B_start)
assert_equal(ival_M.asfreq('B', 'E'), ival_M_to_B_end)
assert_equal(ival_M.asfreq('D', 'S'), ival_M_to_D_start)
assert_equal(ival_M.asfreq('D', 'E'), ival_M_to_D_end)
assert_equal(ival_M.asfreq('H', 'S'), ival_M_to_H_start)
assert_equal(ival_M.asfreq('H', 'E'), ival_M_to_H_end)
assert_equal(ival_M.asfreq('Min', 'S'), ival_M_to_T_start)
assert_equal(ival_M.asfreq('Min', 'E'), ival_M_to_T_end)
assert_equal(ival_M.asfreq('S', 'S'), ival_M_to_S_start)
assert_equal(ival_M.asfreq('S', 'E'), ival_M_to_S_end)
assert_equal(ival_M.asfreq('M'), ival_M)
def test_conv_weekly(self):
# frequency conversion tests: from Weekly Frequency
ival_W = Period(freq='WK', year=2007, month=1, day=1)
ival_WSUN = Period(freq='WK', year=2007, month=1, day=7)
ival_WSAT = Period(freq='WK-SAT', year=2007, month=1, day=6)
ival_WFRI = Period(freq='WK-FRI', year=2007, month=1, day=5)
ival_WTHU = Period(freq='WK-THU', year=2007, month=1, day=4)
ival_WWED = Period(freq='WK-WED', year=2007, month=1, day=3)
ival_WTUE = Period(freq='WK-TUE', year=2007, month=1, day=2)
ival_WMON = Period(freq='WK-MON', year=2007, month=1, day=1)
ival_WSUN_to_D_start = Period(freq='D', year=2007, month=1, day=1)
ival_WSUN_to_D_end = Period(freq='D', year=2007, month=1, day=7)
ival_WSAT_to_D_start = Period(freq='D', year=2006, month=12, day=31)
ival_WSAT_to_D_end = Period(freq='D', year=2007, month=1, day=6)
ival_WFRI_to_D_start = Period(freq='D', year=2006, month=12, day=30)
ival_WFRI_to_D_end = Period(freq='D', year=2007, month=1, day=5)
ival_WTHU_to_D_start = Period(freq='D', year=2006, month=12, day=29)
ival_WTHU_to_D_end = Period(freq='D', year=2007, month=1, day=4)
ival_WWED_to_D_start = Period(freq='D', year=2006, month=12, day=28)
ival_WWED_to_D_end = Period(freq='D', year=2007, month=1, day=3)
ival_WTUE_to_D_start = Period(freq='D', year=2006, month=12, day=27)
ival_WTUE_to_D_end = Period(freq='D', year=2007, month=1, day=2)
ival_WMON_to_D_start = Period(freq='D', year=2006, month=12, day=26)
ival_WMON_to_D_end = Period(freq='D', year=2007, month=1, day=1)
ival_W_end_of_year = Period(freq='WK', year=2007, month=12, day=31)
ival_W_end_of_quarter = Period(freq='WK', year=2007, month=3, day=31)
ival_W_end_of_month = Period(freq='WK', year=2007, month=1, day=31)
ival_W_to_A = Period(freq='A', year=2007)
ival_W_to_Q = Period(freq='Q', year=2007, quarter=1)
ival_W_to_M = Period(freq='M', year=2007, month=1)
if Period(freq='D', year=2007, month=12, day=31).weekday == 6:
ival_W_to_A_end_of_year = Period(freq='A', year=2007)
else:
ival_W_to_A_end_of_year = Period(freq='A', year=2008)
if Period(freq='D', year=2007, month=3, day=31).weekday == 6:
ival_W_to_Q_end_of_quarter = Period(freq='Q', year=2007,
quarter=1)
else:
ival_W_to_Q_end_of_quarter = Period(freq='Q', year=2007,
quarter=2)
if Period(freq='D', year=2007, month=1, day=31).weekday == 6:
ival_W_to_M_end_of_month = Period(freq='M', year=2007, month=1)
else:
ival_W_to_M_end_of_month = Period(freq='M', year=2007, month=2)
ival_W_to_B_start = Period(freq='B', year=2007, month=1, day=1)
ival_W_to_B_end = Period(freq='B', year=2007, month=1, day=5)
ival_W_to_D_start = Period(freq='D', year=2007, month=1, day=1)
ival_W_to_D_end = Period(freq='D', year=2007, month=1, day=7)
ival_W_to_H_start = Period(freq='H', year=2007, month=1, day=1,
hour=0)
ival_W_to_H_end = Period(freq='H', year=2007, month=1, day=7,
hour=23)
ival_W_to_T_start = Period(freq='Min', year=2007, month=1, day=1,
hour=0, minute=0)
ival_W_to_T_end = Period(freq='Min', year=2007, month=1, day=7,
hour=23, minute=59)
ival_W_to_S_start = Period(freq='S', year=2007, month=1, day=1,
hour=0, minute=0, second=0)
ival_W_to_S_end = Period(freq='S', year=2007, month=1, day=7,
hour=23, minute=59, second=59)
assert_equal(ival_W.asfreq('A'), ival_W_to_A)
assert_equal(ival_W_end_of_year.asfreq('A'),
ival_W_to_A_end_of_year)
assert_equal(ival_W.asfreq('Q'), ival_W_to_Q)
assert_equal(ival_W_end_of_quarter.asfreq('Q'),
ival_W_to_Q_end_of_quarter)
assert_equal(ival_W.asfreq('M'), ival_W_to_M)
assert_equal(ival_W_end_of_month.asfreq('M'),
ival_W_to_M_end_of_month)
assert_equal(ival_W.asfreq('B', 'S'), ival_W_to_B_start)
assert_equal(ival_W.asfreq('B', 'E'), ival_W_to_B_end)
assert_equal(ival_W.asfreq('D', 'S'), ival_W_to_D_start)
assert_equal(ival_W.asfreq('D', 'E'), ival_W_to_D_end)
assert_equal(ival_WSUN.asfreq('D', 'S'), ival_WSUN_to_D_start)
assert_equal(ival_WSUN.asfreq('D', 'E'), ival_WSUN_to_D_end)
assert_equal(ival_WSAT.asfreq('D', 'S'), ival_WSAT_to_D_start)
assert_equal(ival_WSAT.asfreq('D', 'E'), ival_WSAT_to_D_end)
assert_equal(ival_WFRI.asfreq('D', 'S'), ival_WFRI_to_D_start)
assert_equal(ival_WFRI.asfreq('D', 'E'), ival_WFRI_to_D_end)
assert_equal(ival_WTHU.asfreq('D', 'S'), ival_WTHU_to_D_start)
assert_equal(ival_WTHU.asfreq('D', 'E'), ival_WTHU_to_D_end)
assert_equal(ival_WWED.asfreq('D', 'S'), ival_WWED_to_D_start)
assert_equal(ival_WWED.asfreq('D', 'E'), ival_WWED_to_D_end)
assert_equal(ival_WTUE.asfreq('D', 'S'), ival_WTUE_to_D_start)
assert_equal(ival_WTUE.asfreq('D', 'E'), ival_WTUE_to_D_end)
assert_equal(ival_WMON.asfreq('D', 'S'), ival_WMON_to_D_start)
assert_equal(ival_WMON.asfreq('D', 'E'), ival_WMON_to_D_end)
assert_equal(ival_W.asfreq('H', 'S'), ival_W_to_H_start)
assert_equal(ival_W.asfreq('H', 'E'), ival_W_to_H_end)
assert_equal(ival_W.asfreq('Min', 'S'), ival_W_to_T_start)
assert_equal(ival_W.asfreq('Min', 'E'), ival_W_to_T_end)
assert_equal(ival_W.asfreq('S', 'S'), ival_W_to_S_start)
assert_equal(ival_W.asfreq('S', 'E'), ival_W_to_S_end)
assert_equal(ival_W.asfreq('WK'), ival_W)
def test_conv_business(self):
# frequency conversion tests: from Business Frequency"
ival_B = Period(freq='B', year=2007, month=1, day=1)
ival_B_end_of_year = Period(freq='B', year=2007, month=12, day=31)
ival_B_end_of_quarter = Period(freq='B', year=2007, month=3, day=30)
ival_B_end_of_month = Period(freq='B', year=2007, month=1, day=31)
ival_B_end_of_week = Period(freq='B', year=2007, month=1, day=5)
ival_B_to_A = Period(freq='A', year=2007)
ival_B_to_Q = Period(freq='Q', year=2007, quarter=1)
ival_B_to_M = Period(freq='M', year=2007, month=1)
ival_B_to_W = Period(freq='WK', year=2007, month=1, day=7)
ival_B_to_D = Period(freq='D', year=2007, month=1, day=1)
ival_B_to_H_start = Period(freq='H', year=2007, month=1, day=1,
hour=0)
ival_B_to_H_end = Period(freq='H', year=2007, month=1, day=1,
hour=23)
ival_B_to_T_start = Period(freq='Min', year=2007, month=1, day=1,
hour=0, minute=0)
ival_B_to_T_end = Period(freq='Min', year=2007, month=1, day=1,
hour=23, minute=59)
ival_B_to_S_start = Period(freq='S', year=2007, month=1, day=1,
hour=0, minute=0, second=0)
ival_B_to_S_end = Period(freq='S', year=2007, month=1, day=1,
hour=23, minute=59, second=59)
assert_equal(ival_B.asfreq('A'), ival_B_to_A)
assert_equal(ival_B_end_of_year.asfreq('A'), ival_B_to_A)
assert_equal(ival_B.asfreq('Q'), ival_B_to_Q)
assert_equal(ival_B_end_of_quarter.asfreq('Q'), ival_B_to_Q)
assert_equal(ival_B.asfreq('M'), ival_B_to_M)
assert_equal(ival_B_end_of_month.asfreq('M'), ival_B_to_M)
assert_equal(ival_B.asfreq('WK'), ival_B_to_W)
assert_equal(ival_B_end_of_week.asfreq('WK'), ival_B_to_W)
assert_equal(ival_B.asfreq('D'), ival_B_to_D)
assert_equal(ival_B.asfreq('H', 'S'), ival_B_to_H_start)
assert_equal(ival_B.asfreq('H', 'E'), ival_B_to_H_end)
assert_equal(ival_B.asfreq('Min', 'S'), ival_B_to_T_start)
assert_equal(ival_B.asfreq('Min', 'E'), ival_B_to_T_end)
assert_equal(ival_B.asfreq('S', 'S'), ival_B_to_S_start)
assert_equal(ival_B.asfreq('S', 'E'), ival_B_to_S_end)
assert_equal(ival_B.asfreq('B'), ival_B)
def test_conv_daily(self):
# frequency conversion tests: from Business Frequency"
ival_D = Period(freq='D', year=2007, month=1, day=1)
ival_D_end_of_year = Period(freq='D', year=2007, month=12, day=31)
ival_D_end_of_quarter = Period(freq='D', year=2007, month=3, day=31)
ival_D_end_of_month = Period(freq='D', year=2007, month=1, day=31)
ival_D_end_of_week = Period(freq='D', year=2007, month=1, day=7)
ival_D_friday = Period(freq='D', year=2007, month=1, day=5)
ival_D_saturday = Period(freq='D', year=2007, month=1, day=6)
ival_D_sunday = Period(freq='D', year=2007, month=1, day=7)
ival_D_monday = Period(freq='D', year=2007, month=1, day=8)
ival_B_friday = Period(freq='B', year=2007, month=1, day=5)
ival_B_monday = Period(freq='B', year=2007, month=1, day=8)
ival_D_to_A = Period(freq='A', year=2007)
ival_Deoq_to_AJAN = Period(freq='A-JAN', year=2008)
ival_Deoq_to_AJUN = Period(freq='A-JUN', year=2007)
ival_Deoq_to_ADEC = Period(freq='A-DEC', year=2007)
ival_D_to_QEJAN = Period(freq="Q-JAN", year=2007, quarter=4)
ival_D_to_QEJUN = Period(freq="Q-JUN", year=2007, quarter=3)
ival_D_to_QEDEC = Period(freq="Q-DEC", year=2007, quarter=1)
ival_D_to_M = Period(freq='M', year=2007, month=1)
ival_D_to_W = Period(freq='WK', year=2007, month=1, day=7)
ival_D_to_H_start = Period(freq='H', year=2007, month=1, day=1,
hour=0)
ival_D_to_H_end = Period(freq='H', year=2007, month=1, day=1,
hour=23)
ival_D_to_T_start = Period(freq='Min', year=2007, month=1, day=1,
hour=0, minute=0)
ival_D_to_T_end = Period(freq='Min', year=2007, month=1, day=1,
hour=23, minute=59)
ival_D_to_S_start = Period(freq='S', year=2007, month=1, day=1,
hour=0, minute=0, second=0)
ival_D_to_S_end = Period(freq='S', year=2007, month=1, day=1,
hour=23, minute=59, second=59)
assert_equal(ival_D.asfreq('A'), ival_D_to_A)
assert_equal(ival_D_end_of_quarter.asfreq('A-JAN'),
ival_Deoq_to_AJAN)
assert_equal(ival_D_end_of_quarter.asfreq('A-JUN'),
ival_Deoq_to_AJUN)
assert_equal(ival_D_end_of_quarter.asfreq('A-DEC'),
ival_Deoq_to_ADEC)
assert_equal(ival_D_end_of_year.asfreq('A'), ival_D_to_A)
assert_equal(ival_D_end_of_quarter.asfreq('Q'), ival_D_to_QEDEC)
assert_equal(ival_D.asfreq("Q-JAN"), ival_D_to_QEJAN)
assert_equal(ival_D.asfreq("Q-JUN"), ival_D_to_QEJUN)
assert_equal(ival_D.asfreq("Q-DEC"), ival_D_to_QEDEC)
assert_equal(ival_D.asfreq('M'), ival_D_to_M)
assert_equal(ival_D_end_of_month.asfreq('M'), ival_D_to_M)
assert_equal(ival_D.asfreq('WK'), ival_D_to_W)
assert_equal(ival_D_end_of_week.asfreq('WK'), ival_D_to_W)
assert_equal(ival_D_friday.asfreq('B'), ival_B_friday)
assert_equal(ival_D_saturday.asfreq('B', 'S'), ival_B_friday)
assert_equal(ival_D_saturday.asfreq('B', 'E'), ival_B_monday)
assert_equal(ival_D_sunday.asfreq('B', 'S'), ival_B_friday)
assert_equal(ival_D_sunday.asfreq('B', 'E'), ival_B_monday)
assert_equal(ival_D.asfreq('H', 'S'), ival_D_to_H_start)
assert_equal(ival_D.asfreq('H', 'E'), ival_D_to_H_end)
assert_equal(ival_D.asfreq('Min', 'S'), ival_D_to_T_start)
assert_equal(ival_D.asfreq('Min', 'E'), ival_D_to_T_end)
assert_equal(ival_D.asfreq('S', 'S'), ival_D_to_S_start)
assert_equal(ival_D.asfreq('S', 'E'), ival_D_to_S_end)
assert_equal(ival_D.asfreq('D'), ival_D)
def test_conv_hourly(self):
# frequency conversion tests: from Hourly Frequency"
ival_H = Period(freq='H', year=2007, month=1, day=1, hour=0)
ival_H_end_of_year = Period(freq='H', year=2007, month=12, day=31,
hour=23)
ival_H_end_of_quarter = Period(freq='H', year=2007, month=3, day=31,
hour=23)
ival_H_end_of_month = Period(freq='H', year=2007, month=1, day=31,
hour=23)
ival_H_end_of_week = Period(freq='H', year=2007, month=1, day=7,
hour=23)
ival_H_end_of_day = Period(freq='H', year=2007, month=1, day=1,
hour=23)
ival_H_end_of_bus = Period(freq='H', year=2007, month=1, day=1,
hour=23)
ival_H_to_A = Period(freq='A', year=2007)
ival_H_to_Q = Period(freq='Q', year=2007, quarter=1)
ival_H_to_M = Period(freq='M', year=2007, month=1)
ival_H_to_W = Period(freq='WK', year=2007, month=1, day=7)
ival_H_to_D = Period(freq='D', year=2007, month=1, day=1)
ival_H_to_B = Period(freq='B', year=2007, month=1, day=1)
ival_H_to_T_start = Period(freq='Min', year=2007, month=1, day=1,
hour=0, minute=0)
ival_H_to_T_end = Period(freq='Min', year=2007, month=1, day=1,
hour=0, minute=59)
ival_H_to_S_start = Period(freq='S', year=2007, month=1, day=1,
hour=0, minute=0, second=0)
ival_H_to_S_end = Period(freq='S', year=2007, month=1, day=1,
hour=0, minute=59, second=59)
assert_equal(ival_H.asfreq('A'), ival_H_to_A)
assert_equal(ival_H_end_of_year.asfreq('A'), ival_H_to_A)
assert_equal(ival_H.asfreq('Q'), ival_H_to_Q)
assert_equal(ival_H_end_of_quarter.asfreq('Q'), ival_H_to_Q)
assert_equal(ival_H.asfreq('M'), ival_H_to_M)
assert_equal(ival_H_end_of_month.asfreq('M'), ival_H_to_M)
assert_equal(ival_H.asfreq('WK'), ival_H_to_W)
assert_equal(ival_H_end_of_week.asfreq('WK'), ival_H_to_W)
assert_equal(ival_H.asfreq('D'), ival_H_to_D)
assert_equal(ival_H_end_of_day.asfreq('D'), ival_H_to_D)
assert_equal(ival_H.asfreq('B'), ival_H_to_B)
assert_equal(ival_H_end_of_bus.asfreq('B'), ival_H_to_B)
assert_equal(ival_H.asfreq('Min', 'S'), ival_H_to_T_start)
assert_equal(ival_H.asfreq('Min', 'E'), ival_H_to_T_end)
assert_equal(ival_H.asfreq('S', 'S'), ival_H_to_S_start)
assert_equal(ival_H.asfreq('S', 'E'), ival_H_to_S_end)
assert_equal(ival_H.asfreq('H'), ival_H)
def test_conv_minutely(self):
# frequency conversion tests: from Minutely Frequency"
ival_T = Period(freq='Min', year=2007, month=1, day=1,
hour=0, minute=0)
ival_T_end_of_year = Period(freq='Min', year=2007, month=12, day=31,
hour=23, minute=59)
ival_T_end_of_quarter = Period(freq='Min', year=2007, month=3, day=31,
hour=23, minute=59)
ival_T_end_of_month = Period(freq='Min', year=2007, month=1, day=31,
hour=23, minute=59)
ival_T_end_of_week = Period(freq='Min', year=2007, month=1, day=7,
hour=23, minute=59)
ival_T_end_of_day = Period(freq='Min', year=2007, month=1, day=1,
hour=23, minute=59)
ival_T_end_of_bus = Period(freq='Min', year=2007, month=1, day=1,
hour=23, minute=59)
ival_T_end_of_hour = Period(freq='Min', year=2007, month=1, day=1,
hour=0, minute=59)
ival_T_to_A = Period(freq='A', year=2007)
ival_T_to_Q = Period(freq='Q', year=2007, quarter=1)
ival_T_to_M = Period(freq='M', year=2007, month=1)
ival_T_to_W = Period(freq='WK', year=2007, month=1, day=7)
ival_T_to_D = Period(freq='D', year=2007, month=1, day=1)
ival_T_to_B = Period(freq='B', year=2007, month=1, day=1)
ival_T_to_H = Period(freq='H', year=2007, month=1, day=1, hour=0)
ival_T_to_S_start = Period(freq='S', year=2007, month=1, day=1,
hour=0, minute=0, second=0)
ival_T_to_S_end = Period(freq='S', year=2007, month=1, day=1,
hour=0, minute=0, second=59)
assert_equal(ival_T.asfreq('A'), ival_T_to_A)
assert_equal(ival_T_end_of_year.asfreq('A'), ival_T_to_A)
assert_equal(ival_T.asfreq('Q'), ival_T_to_Q)
assert_equal(ival_T_end_of_quarter.asfreq('Q'), ival_T_to_Q)
assert_equal(ival_T.asfreq('M'), ival_T_to_M)
assert_equal(ival_T_end_of_month.asfreq('M'), ival_T_to_M)
assert_equal(ival_T.asfreq('WK'), ival_T_to_W)
assert_equal(ival_T_end_of_week.asfreq('WK'), ival_T_to_W)
assert_equal(ival_T.asfreq('D'), ival_T_to_D)
assert_equal(ival_T_end_of_day.asfreq('D'), ival_T_to_D)
assert_equal(ival_T.asfreq('B'), ival_T_to_B)
assert_equal(ival_T_end_of_bus.asfreq('B'), ival_T_to_B)
assert_equal(ival_T.asfreq('H'), ival_T_to_H)
assert_equal(ival_T_end_of_hour.asfreq('H'), ival_T_to_H)
assert_equal(ival_T.asfreq('S', 'S'), ival_T_to_S_start)
assert_equal(ival_T.asfreq('S', 'E'), ival_T_to_S_end)
assert_equal(ival_T.asfreq('Min'), ival_T)
def test_conv_secondly(self):
# frequency conversion tests: from Secondly Frequency"
ival_S = Period(freq='S', year=2007, month=1, day=1,
hour=0, minute=0, second=0)
ival_S_end_of_year = Period(freq='S', year=2007, month=12, day=31,
hour=23, minute=59, second=59)
ival_S_end_of_quarter = Period(freq='S', year=2007, month=3, day=31,
hour=23, minute=59, second=59)
ival_S_end_of_month = Period(freq='S', year=2007, month=1, day=31,
hour=23, minute=59, second=59)
ival_S_end_of_week = Period(freq='S', year=2007, month=1, day=7,
hour=23, minute=59, second=59)
ival_S_end_of_day = Period(freq='S', year=2007, month=1, day=1,
hour=23, minute=59, second=59)
ival_S_end_of_bus = Period(freq='S', year=2007, month=1, day=1,
hour=23, minute=59, second=59)
ival_S_end_of_hour = Period(freq='S', year=2007, month=1, day=1,
hour=0, minute=59, second=59)
ival_S_end_of_minute = Period(freq='S', year=2007, month=1, day=1,
hour=0, minute=0, second=59)
ival_S_to_A = Period(freq='A', year=2007)
ival_S_to_Q = Period(freq='Q', year=2007, quarter=1)
ival_S_to_M = Period(freq='M', year=2007, month=1)
ival_S_to_W = Period(freq='WK', year=2007, month=1, day=7)
ival_S_to_D = Period(freq='D', year=2007, month=1, day=1)
ival_S_to_B = Period(freq='B', year=2007, month=1, day=1)
ival_S_to_H = Period(freq='H', year=2007, month=1, day=1,
hour=0)
ival_S_to_T = Period(freq='Min', year=2007, month=1, day=1,
hour=0, minute=0)
assert_equal(ival_S.asfreq('A'), ival_S_to_A)
assert_equal(ival_S_end_of_year.asfreq('A'), ival_S_to_A)
assert_equal(ival_S.asfreq('Q'), ival_S_to_Q)
assert_equal(ival_S_end_of_quarter.asfreq('Q'), ival_S_to_Q)
assert_equal(ival_S.asfreq('M'), ival_S_to_M)
assert_equal(ival_S_end_of_month.asfreq('M'), ival_S_to_M)
assert_equal(ival_S.asfreq('WK'), ival_S_to_W)
assert_equal(ival_S_end_of_week.asfreq('WK'), ival_S_to_W)
assert_equal(ival_S.asfreq('D'), ival_S_to_D)
assert_equal(ival_S_end_of_day.asfreq('D'), ival_S_to_D)
assert_equal(ival_S.asfreq('B'), ival_S_to_B)
assert_equal(ival_S_end_of_bus.asfreq('B'), ival_S_to_B)
assert_equal(ival_S.asfreq('H'), ival_S_to_H)
assert_equal(ival_S_end_of_hour.asfreq('H'), ival_S_to_H)
assert_equal(ival_S.asfreq('Min'), ival_S_to_T)
assert_equal(ival_S_end_of_minute.asfreq('Min'), ival_S_to_T)
assert_equal(ival_S.asfreq('S'), ival_S)
class TestPeriodIndex(tm.TestCase):
def setUp(self):
pass
def test_hash_error(self):
index = period_range('20010101', periods=10)
with tm.assertRaisesRegexp(TypeError,
"unhashable type: %r" %
type(index).__name__):
hash(index)
def test_make_time_series(self):
index = PeriodIndex(freq='A', start='1/1/2001', end='12/1/2009')
series = Series(1, index=index)
tm.assert_isinstance(series, TimeSeries)
def test_astype(self):
idx = period_range('1990', '2009', freq='A')
result = idx.astype('i8')
self.assert_(np.array_equal(result, idx.values))
def test_constructor_use_start_freq(self):
# GH #1118
p = Period('4/2/2012', freq='B')
index = PeriodIndex(start=p, periods=10)
expected = PeriodIndex(start='4/2/2012', periods=10, freq='B')
self.assert_(index.equals(expected))
def test_constructor_field_arrays(self):
# GH #1264
years = np.arange(1990, 2010).repeat(4)[2:-2]
quarters = np.tile(np.arange(1, 5), 20)[2:-2]
index = PeriodIndex(year=years, quarter=quarters, freq='Q-DEC')
expected = period_range('1990Q3', '2009Q2', freq='Q-DEC')
self.assert_(index.equals(expected))
self.assertRaises(
ValueError, PeriodIndex, year=years, quarter=quarters,
freq='2Q-DEC')
index = PeriodIndex(year=years, quarter=quarters)
self.assert_(index.equals(expected))
years = [2007, 2007, 2007]
months = [1, 2]
self.assertRaises(ValueError, PeriodIndex, year=years, month=months,
freq='M')
self.assertRaises(ValueError, PeriodIndex, year=years, month=months,
freq='2M')
self.assertRaises(ValueError, PeriodIndex, year=years, month=months,
freq='M', start=Period('2007-01', freq='M'))
years = [2007, 2007, 2007]
months = [1, 2, 3]
idx = PeriodIndex(year=years, month=months, freq='M')
exp = period_range('2007-01', periods=3, freq='M')
self.assert_(idx.equals(exp))
def test_constructor_U(self):
# U was used as undefined period
self.assertRaises(ValueError, period_range, '2007-1-1', periods=500,
freq='X')
def test_constructor_arrays_negative_year(self):
years = np.arange(1960, 2000).repeat(4)
quarters = np.tile(lrange(1, 5), 40)
pindex = PeriodIndex(year=years, quarter=quarters)
self.assert_(np.array_equal(pindex.year, years))
self.assert_(np.array_equal(pindex.quarter, quarters))
def test_constructor_invalid_quarters(self):
self.assertRaises(ValueError, PeriodIndex, year=lrange(2000, 2004),
quarter=lrange(4), freq='Q-DEC')
def test_constructor_corner(self):
self.assertRaises(ValueError, PeriodIndex, periods=10, freq='A')
start = Period('2007', freq='A-JUN')
end = Period('2010', freq='A-DEC')
self.assertRaises(ValueError, PeriodIndex, start=start, end=end)
self.assertRaises(ValueError, PeriodIndex, start=start)
self.assertRaises(ValueError, PeriodIndex, end=end)
result = period_range('2007-01', periods=10.5, freq='M')
exp = period_range('2007-01', periods=10, freq='M')
self.assert_(result.equals(exp))
def test_constructor_fromarraylike(self):
idx = period_range('2007-01', periods=20, freq='M')
self.assertRaises(ValueError, PeriodIndex, idx.values)
self.assertRaises(ValueError, PeriodIndex, list(idx.values))
self.assertRaises(ValueError, PeriodIndex,
data=Period('2007', freq='A'))
result = PeriodIndex(iter(idx))
self.assert_(result.equals(idx))
result = PeriodIndex(idx)
self.assert_(result.equals(idx))
result = PeriodIndex(idx, freq='M')
self.assert_(result.equals(idx))
result = PeriodIndex(idx, freq='D')
exp = idx.asfreq('D', 'e')
self.assert_(result.equals(exp))
def test_constructor_datetime64arr(self):
vals = np.arange(100000, 100000 + 10000, 100, dtype=np.int64)
vals = vals.view(np.dtype('M8[us]'))
self.assertRaises(ValueError, PeriodIndex, vals, freq='D')
def test_is_(self):
create_index = lambda: PeriodIndex(freq='A', start='1/1/2001',
end='12/1/2009')
index = create_index()
self.assertTrue(index.is_(index))
self.assertFalse(index.is_(create_index()))
self.assertTrue(index.is_(index.view()))
self.assertTrue(index.is_(index.view().view().view().view().view()))
self.assertTrue(index.view().is_(index))
ind2 = index.view()
index.name = "Apple"
self.assertTrue(ind2.is_(index))
self.assertFalse(index.is_(index[:]))
self.assertFalse(index.is_(index.asfreq('M')))
self.assertFalse(index.is_(index.asfreq('A')))
self.assertFalse(index.is_(index - 2))
self.assertFalse(index.is_(index - 0))
def test_comp_period(self):
idx = period_range('2007-01', periods=20, freq='M')
result = idx < idx[10]
exp = idx.values < idx.values[10]
self.assert_(np.array_equal(result, exp))
def test_getitem_ndim2(self):
idx = period_range('2007-01', periods=3, freq='M')
result = idx[:, None]
# MPL kludge
tm.assert_isinstance(result, PeriodIndex)
def test_getitem_partial(self):
rng = period_range('2007-01', periods=50, freq='M')
ts = Series(np.random.randn(len(rng)), rng)
self.assertRaises(KeyError, ts.__getitem__, '2006')
result = ts['2008']
self.assert_((result.index.year == 2008).all())
result = ts['2008':'2009']
self.assertEquals(len(result), 24)
result = ts['2008-1':'2009-12']
self.assertEquals(len(result), 24)
result = ts['2008Q1':'2009Q4']
self.assertEquals(len(result), 24)
result = ts[:'2009']
self.assertEquals(len(result), 36)
result = ts['2009':]
self.assertEquals(len(result), 50 - 24)
exp = result
result = ts[24:]
assert_series_equal(exp, result)
ts = ts[10:].append(ts[10:])
self.assertRaises(ValueError, ts.__getitem__, slice('2008', '2009'))
def test_getitem_datetime(self):
rng = period_range(start='2012-01-01', periods=10, freq='W-MON')
ts = Series(lrange(len(rng)), index=rng)
dt1 = datetime(2011, 10, 2)
dt4 = datetime(2012, 4, 20)
rs = ts[dt1:dt4]
assert_series_equal(rs, ts)
def test_sub(self):
rng = period_range('2007-01', periods=50)
result = rng - 5
exp = rng + (-5)
self.assert_(result.equals(exp))
def test_periods_number_check(self):
self.assertRaises(
ValueError, period_range, '2011-1-1', '2012-1-1', 'B')
def test_tolist(self):
index = PeriodIndex(freq='A', start='1/1/2001', end='12/1/2009')
rs = index.tolist()
[tm.assert_isinstance(x, Period) for x in rs]
recon = PeriodIndex(rs)
self.assert_(index.equals(recon))
def test_to_timestamp(self):
index = PeriodIndex(freq='A', start='1/1/2001', end='12/1/2009')
series = Series(1, index=index, name='foo')
exp_index = date_range('1/1/2001', end='12/31/2009', freq='A-DEC')
result = series.to_timestamp(how='end')
self.assert_(result.index.equals(exp_index))
self.assertEquals(result.name, 'foo')
exp_index = date_range('1/1/2001', end='1/1/2009', freq='AS-JAN')
result = series.to_timestamp(how='start')
self.assert_(result.index.equals(exp_index))
def _get_with_delta(delta, freq='A-DEC'):
return date_range(to_datetime('1/1/2001') + delta,
to_datetime('12/31/2009') + delta, freq=freq)
delta = timedelta(hours=23)
result = series.to_timestamp('H', 'end')
exp_index = _get_with_delta(delta)
self.assert_(result.index.equals(exp_index))
delta = timedelta(hours=23, minutes=59)
result = series.to_timestamp('T', 'end')
exp_index = _get_with_delta(delta)
self.assert_(result.index.equals(exp_index))
result = series.to_timestamp('S', 'end')
delta = timedelta(hours=23, minutes=59, seconds=59)
exp_index = _get_with_delta(delta)
self.assert_(result.index.equals(exp_index))
self.assertRaises(ValueError, index.to_timestamp, '5t')
index = PeriodIndex(freq='H', start='1/1/2001', end='1/2/2001')
series = Series(1, index=index, name='foo')
exp_index = date_range('1/1/2001 00:59:59', end='1/2/2001 00:59:59',
freq='H')
result = series.to_timestamp(how='end')
self.assert_(result.index.equals(exp_index))
self.assertEquals(result.name, 'foo')
def test_to_timestamp_quarterly_bug(self):
years = np.arange(1960, 2000).repeat(4)
quarters = np.tile(lrange(1, 5), 40)
pindex = PeriodIndex(year=years, quarter=quarters)
stamps = pindex.to_timestamp('D', 'end')
expected = DatetimeIndex([x.to_timestamp('D', 'end') for x in pindex])
self.assert_(stamps.equals(expected))
def test_to_timestamp_preserve_name(self):
index = PeriodIndex(freq='A', start='1/1/2001', end='12/1/2009',
name='foo')
self.assertEquals(index.name, 'foo')
conv = index.to_timestamp('D')
self.assertEquals(conv.name, 'foo')
def test_to_timestamp_repr_is_code(self):
zs=[Timestamp('99-04-17 00:00:00',tz='UTC'),
Timestamp('2001-04-17 00:00:00',tz='UTC'),
Timestamp('2001-04-17 00:00:00',tz='America/Los_Angeles'),
Timestamp('2001-04-17 00:00:00',tz=None)]
for z in zs:
self.assertEquals( eval(repr(z)), z)
def test_as_frame_columns(self):
rng = period_range('1/1/2000', periods=5)
df = DataFrame(randn(10, 5), columns=rng)
ts = df[rng[0]]
assert_series_equal(ts, df.ix[:, 0])
# GH # 1211
repr(df)
ts = df['1/1/2000']
assert_series_equal(ts, df.ix[:, 0])
def test_indexing(self):
# GH 4390, iat incorrectly indexing
index = period_range('1/1/2001', periods=10)
s = Series(randn(10), index=index)
expected = s[index[0]]
result = s.iat[0]
self.assert_(expected == result)
def test_frame_setitem(self):
rng = period_range('1/1/2000', periods=5)
rng.name = 'index'
df = DataFrame(randn(5, 3), index=rng)
df['Index'] = rng
rs = Index(df['Index'])
self.assert_(rs.equals(rng))
rs = df.reset_index().set_index('index')
tm.assert_isinstance(rs.index, PeriodIndex)
self.assert_(rs.index.equals(rng))
def test_nested_dict_frame_constructor(self):
rng = period_range('1/1/2000', periods=5)
df = DataFrame(randn(10, 5), columns=rng)
data = {}
for col in df.columns:
for row in df.index:
data.setdefault(col, {})[row] = df.get_value(row, col)
result = DataFrame(data, columns=rng)
tm.assert_frame_equal(result, df)
data = {}
for col in df.columns:
for row in df.index:
data.setdefault(row, {})[col] = df.get_value(row, col)
result = DataFrame(data, index=rng).T
tm.assert_frame_equal(result, df)
def test_frame_to_time_stamp(self):
K = 5
index = PeriodIndex(freq='A', start='1/1/2001', end='12/1/2009')
df = DataFrame(randn(len(index), K), index=index)
df['mix'] = 'a'
exp_index = date_range('1/1/2001', end='12/31/2009', freq='A-DEC')
result = df.to_timestamp('D', 'end')
self.assert_(result.index.equals(exp_index))
assert_almost_equal(result.values, df.values)
exp_index = date_range('1/1/2001', end='1/1/2009', freq='AS-JAN')
result = df.to_timestamp('D', 'start')
self.assert_(result.index.equals(exp_index))
def _get_with_delta(delta, freq='A-DEC'):
return date_range(to_datetime('1/1/2001') + delta,
to_datetime('12/31/2009') + delta, freq=freq)
delta = timedelta(hours=23)
result = df.to_timestamp('H', 'end')
exp_index = _get_with_delta(delta)
self.assert_(result.index.equals(exp_index))
delta = timedelta(hours=23, minutes=59)
result = df.to_timestamp('T', 'end')
exp_index = _get_with_delta(delta)
self.assert_(result.index.equals(exp_index))
result = df.to_timestamp('S', 'end')
delta = timedelta(hours=23, minutes=59, seconds=59)
exp_index = _get_with_delta(delta)
self.assert_(result.index.equals(exp_index))
# columns
df = df.T
exp_index = date_range('1/1/2001', end='12/31/2009', freq='A-DEC')
result = df.to_timestamp('D', 'end', axis=1)
self.assert_(result.columns.equals(exp_index))
assert_almost_equal(result.values, df.values)
exp_index = date_range('1/1/2001', end='1/1/2009', freq='AS-JAN')
result = df.to_timestamp('D', 'start', axis=1)
self.assert_(result.columns.equals(exp_index))
delta = timedelta(hours=23)
result = df.to_timestamp('H', 'end', axis=1)
exp_index = _get_with_delta(delta)
self.assert_(result.columns.equals(exp_index))
delta = timedelta(hours=23, minutes=59)
result = df.to_timestamp('T', 'end', axis=1)
exp_index = _get_with_delta(delta)
self.assert_(result.columns.equals(exp_index))
result = df.to_timestamp('S', 'end', axis=1)
delta = timedelta(hours=23, minutes=59, seconds=59)
exp_index = _get_with_delta(delta)
self.assert_(result.columns.equals(exp_index))
# invalid axis
assertRaisesRegexp(ValueError, 'axis', df.to_timestamp, axis=2)
assertRaisesRegexp(ValueError, 'Only mult == 1', df.to_timestamp, '5t', axis=1)
def test_index_duplicate_periods(self):
# monotonic
idx = PeriodIndex([2000, 2007, 2007, 2009, 2009], freq='A-JUN')
ts = Series(np.random.randn(len(idx)), index=idx)
result = ts[2007]
expected = ts[1:3]
assert_series_equal(result, expected)
result[:] = 1
self.assert_((ts[1:3] == 1).all())
# not monotonic
idx = PeriodIndex([2000, 2007, 2007, 2009, 2007], freq='A-JUN')
ts = Series(np.random.randn(len(idx)), index=idx)
result = ts[2007]
expected = ts[idx == 2007]
assert_series_equal(result, expected)
def test_constructor(self):
pi = PeriodIndex(freq='A', start='1/1/2001', end='12/1/2009')
assert_equal(len(pi), 9)
pi = PeriodIndex(freq='Q', start='1/1/2001', end='12/1/2009')
assert_equal(len(pi), 4 * 9)
pi = PeriodIndex(freq='M', start='1/1/2001', end='12/1/2009')
assert_equal(len(pi), 12 * 9)
pi = PeriodIndex(freq='D', start='1/1/2001', end='12/31/2009')
assert_equal(len(pi), 365 * 9 + 2)
pi = PeriodIndex(freq='B', start='1/1/2001', end='12/31/2009')
assert_equal(len(pi), 261 * 9)
pi = PeriodIndex(freq='H', start='1/1/2001', end='12/31/2001 23:00')
assert_equal(len(pi), 365 * 24)
pi = PeriodIndex(freq='Min', start='1/1/2001', end='1/1/2001 23:59')
assert_equal(len(pi), 24 * 60)
pi = PeriodIndex(freq='S', start='1/1/2001', end='1/1/2001 23:59:59')
assert_equal(len(pi), 24 * 60 * 60)
start = Period('02-Apr-2005', 'B')
i1 = PeriodIndex(start=start, periods=20)
assert_equal(len(i1), 20)
assert_equal(i1.freq, start.freq)
assert_equal(i1[0], start)
end_intv = Period('2006-12-31', 'W')
i1 = PeriodIndex(end=end_intv, periods=10)
assert_equal(len(i1), 10)
assert_equal(i1.freq, end_intv.freq)
assert_equal(i1[-1], end_intv)
end_intv = Period('2006-12-31', '1w')
i2 = PeriodIndex(end=end_intv, periods=10)
assert_equal(len(i1), len(i2))
self.assert_((i1 == i2).all())
assert_equal(i1.freq, i2.freq)
end_intv = Period('2006-12-31', ('w', 1))
i2 = PeriodIndex(end=end_intv, periods=10)
assert_equal(len(i1), len(i2))
self.assert_((i1 == i2).all())
assert_equal(i1.freq, i2.freq)
try:
PeriodIndex(start=start, end=end_intv)
raise AssertionError('Cannot allow mixed freq for start and end')
except ValueError:
pass
end_intv = Period('2005-05-01', 'B')
i1 = PeriodIndex(start=start, end=end_intv)
try:
PeriodIndex(start=start)
raise AssertionError(
'Must specify periods if missing start or end')
except ValueError:
pass
# infer freq from first element
i2 = PeriodIndex([end_intv, Period('2005-05-05', 'B')])
assert_equal(len(i2), 2)
assert_equal(i2[0], end_intv)
i2 = PeriodIndex(np.array([end_intv, Period('2005-05-05', 'B')]))
assert_equal(len(i2), 2)
assert_equal(i2[0], end_intv)
# Mixed freq should fail
vals = [end_intv, Period('2006-12-31', 'w')]
self.assertRaises(ValueError, PeriodIndex, vals)
vals = np.array(vals)
self.assertRaises(ValueError, PeriodIndex, vals)
def test_shift(self):
pi1 = PeriodIndex(freq='A', start='1/1/2001', end='12/1/2009')
pi2 = PeriodIndex(freq='A', start='1/1/2002', end='12/1/2010')
self.assert_(pi1.shift(0).equals(pi1))
assert_equal(len(pi1), len(pi2))
assert_equal(pi1.shift(1).values, pi2.values)
pi1 = PeriodIndex(freq='A', start='1/1/2001', end='12/1/2009')
pi2 = PeriodIndex(freq='A', start='1/1/2000', end='12/1/2008')
assert_equal(len(pi1), len(pi2))
assert_equal(pi1.shift(-1).values, pi2.values)
pi1 = PeriodIndex(freq='M', start='1/1/2001', end='12/1/2009')
pi2 = PeriodIndex(freq='M', start='2/1/2001', end='1/1/2010')
assert_equal(len(pi1), len(pi2))
assert_equal(pi1.shift(1).values, pi2.values)
pi1 = PeriodIndex(freq='M', start='1/1/2001', end='12/1/2009')
pi2 = PeriodIndex(freq='M', start='12/1/2000', end='11/1/2009')
assert_equal(len(pi1), len(pi2))
assert_equal(pi1.shift(-1).values, pi2.values)
pi1 = PeriodIndex(freq='D', start='1/1/2001', end='12/1/2009')
pi2 = PeriodIndex(freq='D', start='1/2/2001', end='12/2/2009')
assert_equal(len(pi1), len(pi2))
assert_equal(pi1.shift(1).values, pi2.values)
pi1 = PeriodIndex(freq='D', start='1/1/2001', end='12/1/2009')
pi2 = PeriodIndex(freq='D', start='12/31/2000', end='11/30/2009')
assert_equal(len(pi1), len(pi2))
assert_equal(pi1.shift(-1).values, pi2.values)
def test_asfreq(self):
pi1 = PeriodIndex(freq='A', start='1/1/2001', end='1/1/2001')
pi2 = PeriodIndex(freq='Q', start='1/1/2001', end='1/1/2001')
pi3 = PeriodIndex(freq='M', start='1/1/2001', end='1/1/2001')
pi4 = PeriodIndex(freq='D', start='1/1/2001', end='1/1/2001')
pi5 = PeriodIndex(freq='H', start='1/1/2001', end='1/1/2001 00:00')
pi6 = PeriodIndex(freq='Min', start='1/1/2001', end='1/1/2001 00:00')
pi7 = PeriodIndex(freq='S', start='1/1/2001', end='1/1/2001 00:00:00')
self.assertEquals(pi1.asfreq('Q', 'S'), pi2)
self.assertEquals(pi1.asfreq('Q', 's'), pi2)
self.assertEquals(pi1.asfreq('M', 'start'), pi3)
self.assertEquals(pi1.asfreq('D', 'StarT'), pi4)
self.assertEquals(pi1.asfreq('H', 'beGIN'), pi5)
self.assertEquals(pi1.asfreq('Min', 'S'), pi6)
self.assertEquals(pi1.asfreq('S', 'S'), pi7)
self.assertEquals(pi2.asfreq('A', 'S'), pi1)
self.assertEquals(pi2.asfreq('M', 'S'), pi3)
self.assertEquals(pi2.asfreq('D', 'S'), pi4)
self.assertEquals(pi2.asfreq('H', 'S'), pi5)
self.assertEquals(pi2.asfreq('Min', 'S'), pi6)
self.assertEquals(pi2.asfreq('S', 'S'), pi7)
self.assertEquals(pi3.asfreq('A', 'S'), pi1)
self.assertEquals(pi3.asfreq('Q', 'S'), pi2)
self.assertEquals(pi3.asfreq('D', 'S'), pi4)
self.assertEquals(pi3.asfreq('H', 'S'), pi5)
self.assertEquals(pi3.asfreq('Min', 'S'), pi6)
self.assertEquals(pi3.asfreq('S', 'S'), pi7)
self.assertEquals(pi4.asfreq('A', 'S'), pi1)
self.assertEquals(pi4.asfreq('Q', 'S'), pi2)
self.assertEquals(pi4.asfreq('M', 'S'), pi3)
self.assertEquals(pi4.asfreq('H', 'S'), pi5)
self.assertEquals(pi4.asfreq('Min', 'S'), pi6)
self.assertEquals(pi4.asfreq('S', 'S'), pi7)
self.assertEquals(pi5.asfreq('A', 'S'), pi1)
self.assertEquals(pi5.asfreq('Q', 'S'), pi2)
self.assertEquals(pi5.asfreq('M', 'S'), pi3)
self.assertEquals(pi5.asfreq('D', 'S'), pi4)
self.assertEquals(pi5.asfreq('Min', 'S'), pi6)
self.assertEquals(pi5.asfreq('S', 'S'), pi7)
self.assertEquals(pi6.asfreq('A', 'S'), pi1)
self.assertEquals(pi6.asfreq('Q', 'S'), pi2)
self.assertEquals(pi6.asfreq('M', 'S'), pi3)
self.assertEquals(pi6.asfreq('D', 'S'), pi4)
self.assertEquals(pi6.asfreq('H', 'S'), pi5)
self.assertEquals(pi6.asfreq('S', 'S'), pi7)
self.assertEquals(pi7.asfreq('A', 'S'), pi1)
self.assertEquals(pi7.asfreq('Q', 'S'), pi2)
self.assertEquals(pi7.asfreq('M', 'S'), pi3)
self.assertEquals(pi7.asfreq('D', 'S'), pi4)
self.assertEquals(pi7.asfreq('H', 'S'), pi5)
self.assertEquals(pi7.asfreq('Min', 'S'), pi6)
self.assertRaises(ValueError, pi7.asfreq, 'T', 'foo')
self.assertRaises(ValueError, pi1.asfreq, '5t')
def test_ts_repr(self):
index = PeriodIndex(freq='A', start='1/1/2001', end='12/31/2010')
ts = Series(np.random.randn(len(index)), index=index)
repr(ts) # ??
val = period_range('2013Q1', periods=1, freq="Q")
expected = "<class 'pandas.tseries.period.PeriodIndex'>\nfreq: Q-DEC\n[2013Q1]\nlength: 1"
assert_equal(repr(val), expected)
val = period_range('2013Q1', periods=2, freq="Q")
expected = "<class 'pandas.tseries.period.PeriodIndex'>\nfreq: Q-DEC\n[2013Q1, 2013Q2]\nlength: 2"
assert_equal(repr(val), expected)
val = period_range('2013Q1', periods=3, freq="Q")
expected = "<class 'pandas.tseries.period.PeriodIndex'>\nfreq: Q-DEC\n[2013Q1, ..., 2013Q3]\nlength: 3"
assert_equal(repr(val), expected)
def test_period_index_unicode(self):
pi = PeriodIndex(freq='A', start='1/1/2001', end='12/1/2009')
assert_equal(len(pi), 9)
assert_equal(pi, eval(compat.text_type(pi)))
pi = PeriodIndex(freq='Q', start='1/1/2001', end='12/1/2009')
assert_equal(len(pi), 4 * 9)
assert_equal(pi, eval(compat.text_type(pi)))
pi = PeriodIndex(freq='M', start='1/1/2001', end='12/1/2009')
assert_equal(len(pi), 12 * 9)
assert_equal(pi, eval(compat.text_type(pi)))
start = Period('02-Apr-2005', 'B')
i1 = PeriodIndex(start=start, periods=20)
assert_equal(len(i1), 20)
assert_equal(i1.freq, start.freq)
assert_equal(i1[0], start)
assert_equal(i1, eval(compat.text_type(i1)))
end_intv = Period('2006-12-31', 'W')
i1 = PeriodIndex(end=end_intv, periods=10)
assert_equal(len(i1), 10)
assert_equal(i1.freq, end_intv.freq)
assert_equal(i1[-1], end_intv)
assert_equal(i1, eval(compat.text_type(i1)))
end_intv = Period('2006-12-31', '1w')
i2 = PeriodIndex(end=end_intv, periods=10)
assert_equal(len(i1), len(i2))
self.assert_((i1 == i2).all())
assert_equal(i1.freq, i2.freq)
assert_equal(i1, eval(compat.text_type(i1)))
assert_equal(i2, eval(compat.text_type(i2)))
end_intv = Period('2006-12-31', ('w', 1))
i2 = PeriodIndex(end=end_intv, periods=10)
assert_equal(len(i1), len(i2))
self.assert_((i1 == i2).all())
assert_equal(i1.freq, i2.freq)
assert_equal(i1, eval(compat.text_type(i1)))
assert_equal(i2, eval(compat.text_type(i2)))
try:
PeriodIndex(start=start, end=end_intv)
raise AssertionError('Cannot allow mixed freq for start and end')
except ValueError:
pass
end_intv = Period('2005-05-01', 'B')
i1 = PeriodIndex(start=start, end=end_intv)
assert_equal(i1, eval(compat.text_type(i1)))
try:
PeriodIndex(start=start)
raise AssertionError(
'Must specify periods if missing start or end')
except ValueError:
pass
# infer freq from first element
i2 = PeriodIndex([end_intv, Period('2005-05-05', 'B')])
assert_equal(len(i2), 2)
assert_equal(i2[0], end_intv)
assert_equal(i2, eval(compat.text_type(i2)))
i2 = PeriodIndex(np.array([end_intv, Period('2005-05-05', 'B')]))
assert_equal(len(i2), 2)
assert_equal(i2[0], end_intv)
assert_equal(i2, eval(compat.text_type(i2)))
# Mixed freq should fail
vals = [end_intv, Period('2006-12-31', 'w')]
self.assertRaises(ValueError, PeriodIndex, vals)
vals = np.array(vals)
self.assertRaises(ValueError, PeriodIndex, vals)
def test_frame_index_to_string(self):
index = PeriodIndex(['2011-1', '2011-2', '2011-3'], freq='M')
frame = DataFrame(np.random.randn(3, 4), index=index)
# it works!
frame.to_string()
def test_asfreq_ts(self):
index = PeriodIndex(freq='A', start='1/1/2001', end='12/31/2010')
ts = Series(np.random.randn(len(index)), index=index)
df = DataFrame(np.random.randn(len(index), 3), index=index)
result = ts.asfreq('D', how='end')
df_result = df.asfreq('D', how='end')
exp_index = index.asfreq('D', how='end')
self.assert_(len(result) == len(ts))
self.assert_(result.index.equals(exp_index))
self.assert_(df_result.index.equals(exp_index))
result = ts.asfreq('D', how='start')
self.assert_(len(result) == len(ts))
self.assert_(result.index.equals(index.asfreq('D', how='start')))
def test_badinput(self):
self.assertRaises(datetools.DateParseError, Period, '1/1/-2000', 'A')
# self.assertRaises(datetools.DateParseError, Period, '-2000', 'A')
# self.assertRaises(datetools.DateParseError, Period, '0', 'A')
def test_negative_ordinals(self):
p = Period(ordinal=-1000, freq='A')
p = Period(ordinal=0, freq='A')
idx1 = PeriodIndex(ordinal=[-1, 0, 1], freq='A')
idx2 = PeriodIndex(ordinal=np.array([-1, 0, 1]), freq='A')
assert_array_equal(idx1,idx2)
def test_dti_to_period(self):
dti = DatetimeIndex(start='1/1/2005', end='12/1/2005', freq='M')
pi1 = dti.to_period()
pi2 = dti.to_period(freq='D')
self.assertEquals(pi1[0], Period('Jan 2005', freq='M'))
self.assertEquals(pi2[0], Period('1/31/2005', freq='D'))
self.assertEquals(pi1[-1], Period('Nov 2005', freq='M'))
self.assertEquals(pi2[-1], Period('11/30/2005', freq='D'))
def test_pindex_slice_index(self):
pi = PeriodIndex(start='1/1/10', end='12/31/12', freq='M')
s = Series(np.random.rand(len(pi)), index=pi)
res = s['2010']
exp = s[0:12]
assert_series_equal(res, exp)
res = s['2011']
exp = s[12:24]
assert_series_equal(res, exp)
def test_pindex_qaccess(self):
pi = PeriodIndex(['2Q05', '3Q05', '4Q05', '1Q06', '2Q06'], freq='Q')
s = Series(np.random.rand(len(pi)), index=pi).cumsum()
# Todo: fix these accessors!
self.assert_(s['05Q4'] == s[2])
def test_period_dt64_round_trip(self):
dti = date_range('1/1/2000', '1/7/2002', freq='B')
pi = dti.to_period()
self.assert_(pi.to_timestamp().equals(dti))
dti = date_range('1/1/2000', '1/7/2002', freq='B')
pi = dti.to_period(freq='H')
self.assert_(pi.to_timestamp().equals(dti))
def test_to_period_quarterly(self):
# make sure we can make the round trip
for month in MONTHS:
freq = 'Q-%s' % month
rng = period_range('1989Q3', '1991Q3', freq=freq)
stamps = rng.to_timestamp()
result = stamps.to_period(freq)
self.assert_(rng.equals(result))
def test_to_period_quarterlyish(self):
offsets = ['BQ', 'QS', 'BQS']
for off in offsets:
rng = date_range('01-Jan-2012', periods=8, freq=off)
prng = rng.to_period()
self.assert_(prng.freq == 'Q-DEC')
def test_to_period_annualish(self):
offsets = ['BA', 'AS', 'BAS']
for off in offsets:
rng = date_range('01-Jan-2012', periods=8, freq=off)
prng = rng.to_period()
self.assert_(prng.freq == 'A-DEC')
def test_to_period_monthish(self):
offsets = ['MS', 'EOM', 'BM']
for off in offsets:
rng = date_range('01-Jan-2012', periods=8, freq=off)
prng = rng.to_period()
self.assert_(prng.freq == 'M')
def test_no_multiples(self):
self.assertRaises(ValueError, period_range, '1989Q3', periods=10,
freq='2Q')
self.assertRaises(ValueError, period_range, '1989', periods=10,
freq='2A')
self.assertRaises(ValueError, Period, '1989', freq='2A')
# def test_pindex_multiples(self):
# pi = PeriodIndex(start='1/1/10', end='12/31/12', freq='2M')
# self.assertEquals(pi[0], Period('1/1/10', '2M'))
# self.assertEquals(pi[1], Period('3/1/10', '2M'))
# self.assertEquals(pi[0].asfreq('6M'), pi[2].asfreq('6M'))
# self.assertEquals(pi[0].asfreq('A'), pi[2].asfreq('A'))
# self.assertEquals(pi[0].asfreq('M', how='S'),
# Period('Jan 2010', '1M'))
# self.assertEquals(pi[0].asfreq('M', how='E'),
# Period('Feb 2010', '1M'))
# self.assertEquals(pi[1].asfreq('M', how='S'),
# Period('Mar 2010', '1M'))
# i = Period('1/1/2010 12:05:18', '5S')
# self.assertEquals(i, Period('1/1/2010 12:05:15', '5S'))
# i = Period('1/1/2010 12:05:18', '5S')
# self.assertEquals(i.asfreq('1S', how='E'),
# Period('1/1/2010 12:05:19', '1S'))
def test_iteration(self):
index = PeriodIndex(start='1/1/10', periods=4, freq='B')
result = list(index)
tm.assert_isinstance(result[0], Period)
self.assert_(result[0].freq == index.freq)
def test_take(self):
index = PeriodIndex(start='1/1/10', end='12/31/12', freq='D')
taken = index.take([5, 6, 8, 12])
taken2 = index[[5, 6, 8, 12]]
tm.assert_isinstance(taken, PeriodIndex)
self.assert_(taken.freq == index.freq)
tm.assert_isinstance(taken2, PeriodIndex)
self.assert_(taken2.freq == index.freq)
def test_joins(self):
index = period_range('1/1/2000', '1/20/2000', freq='D')
for kind in ['inner', 'outer', 'left', 'right']:
joined = index.join(index[:-5], how=kind)
tm.assert_isinstance(joined, PeriodIndex)
self.assert_(joined.freq == index.freq)
def test_join_self(self):
index = period_range('1/1/2000', '1/20/2000', freq='D')
for kind in ['inner', 'outer', 'left', 'right']:
res = index.join(index, how=kind)
self.assert_(index is res)
def test_join_does_not_recur(self):
df = tm.makeCustomDataframe(3, 2, data_gen_f=lambda *args:
np.random.randint(2), c_idx_type='p',
r_idx_type='dt')
s = df.iloc[:2, 0]
res = s.index.join(df.columns, how='outer')
expected = Index([s.index[0], s.index[1],
df.columns[0], df.columns[1]], object)
tm.assert_index_equal(res, expected)
def test_align_series(self):
rng = period_range('1/1/2000', '1/1/2010', freq='A')
ts = Series(np.random.randn(len(rng)), index=rng)
result = ts + ts[::2]
expected = ts + ts
expected[1::2] = np.nan
assert_series_equal(result, expected)
result = ts + _permute(ts[::2])
assert_series_equal(result, expected)
# it works!
for kind in ['inner', 'outer', 'left', 'right']:
ts.align(ts[::2], join=kind)
with assertRaisesRegexp(ValueError, 'Only like-indexed'):
ts + ts.asfreq('D', how="end")
def test_align_frame(self):
rng = period_range('1/1/2000', '1/1/2010', freq='A')
ts = DataFrame(np.random.randn(len(rng), 3), index=rng)
result = ts + ts[::2]
expected = ts + ts
expected.values[1::2] = np.nan
tm.assert_frame_equal(result, expected)
result = ts + _permute(ts[::2])
tm.assert_frame_equal(result, expected)
def test_union(self):
index = period_range('1/1/2000', '1/20/2000', freq='D')
result = index[:-5].union(index[10:])
self.assert_(result.equals(index))
# not in order
result = _permute(index[:-5]).union(_permute(index[10:]))
self.assert_(result.equals(index))
# raise if different frequencies
index = period_range('1/1/2000', '1/20/2000', freq='D')
index2 = period_range('1/1/2000', '1/20/2000', freq='W-WED')
self.assertRaises(ValueError, index.union, index2)
self.assertRaises(ValueError, index.join, index.to_timestamp())
def test_intersection(self):
index = period_range('1/1/2000', '1/20/2000', freq='D')
result = index[:-5].intersection(index[10:])
self.assert_(result.equals(index[10:-5]))
# not in order
left = _permute(index[:-5])
right = _permute(index[10:])
result = left.intersection(right).order()
self.assert_(result.equals(index[10:-5]))
# raise if different frequencies
index = period_range('1/1/2000', '1/20/2000', freq='D')
index2 = period_range('1/1/2000', '1/20/2000', freq='W-WED')
self.assertRaises(ValueError, index.intersection, index2)
def test_fields(self):
# year, month, day, hour, minute
# second, weekofyear, week, dayofweek, weekday, dayofyear, quarter
# qyear
pi = PeriodIndex(freq='A', start='1/1/2001', end='12/1/2005')
self._check_all_fields(pi)
pi = PeriodIndex(freq='Q', start='1/1/2001', end='12/1/2002')
self._check_all_fields(pi)
pi = PeriodIndex(freq='M', start='1/1/2001', end='1/1/2002')
self._check_all_fields(pi)
pi = PeriodIndex(freq='D', start='12/1/2001', end='6/1/2001')
self._check_all_fields(pi)
pi = PeriodIndex(freq='B', start='12/1/2001', end='6/1/2001')
self._check_all_fields(pi)
pi = PeriodIndex(freq='H', start='12/31/2001', end='1/1/2002 23:00')
self._check_all_fields(pi)
pi = PeriodIndex(freq='Min', start='12/31/2001', end='1/1/2002 00:20')
self._check_all_fields(pi)
pi = PeriodIndex(freq='S', start='12/31/2001 00:00:00',
end='12/31/2001 00:05:00')
self._check_all_fields(pi)
end_intv = Period('2006-12-31', 'W')
i1 = PeriodIndex(end=end_intv, periods=10)
self._check_all_fields(i1)
def _check_all_fields(self, periodindex):
fields = ['year', 'month', 'day', 'hour', 'minute',
'second', 'weekofyear', 'week', 'dayofweek',
'weekday', 'dayofyear', 'quarter', 'qyear']
periods = list(periodindex)
for field in fields:
field_idx = getattr(periodindex, field)
assert_equal(len(periodindex), len(field_idx))
for x, val in zip(periods, field_idx):
assert_equal(getattr(x, field), val)
def test_is_full(self):
index = PeriodIndex([2005, 2007, 2009], freq='A')
self.assert_(not index.is_full)
index = PeriodIndex([2005, 2006, 2007], freq='A')
self.assert_(index.is_full)
index = PeriodIndex([2005, 2005, 2007], freq='A')
self.assert_(not index.is_full)
index = PeriodIndex([2005, 2005, 2006], freq='A')
self.assert_(index.is_full)
index = PeriodIndex([2006, 2005, 2005], freq='A')
self.assertRaises(ValueError, getattr, index, 'is_full')
self.assert_(index[:0].is_full)
def test_map(self):
index = PeriodIndex([2005, 2007, 2009], freq='A')
result = index.map(lambda x: x + 1)
expected = index + 1
self.assert_(result.equals(expected))
result = index.map(lambda x: x.ordinal)
exp = [x.ordinal for x in index]
assert_array_equal(result, exp)
def test_map_with_string_constructor(self):
raw = [2005, 2007, 2009]
index = PeriodIndex(raw, freq='A')
types = str,
if compat.PY3:
# unicode
types += compat.text_type,
for t in types:
expected = np.array(lmap(t, raw), dtype=object)
res = index.map(t)
# should return an array
tm.assert_isinstance(res, np.ndarray)
# preserve element types
self.assert_(all(isinstance(resi, t) for resi in res))
# dtype should be object
self.assertEqual(res.dtype, np.dtype('object').type)
# lastly, values should compare equal
assert_array_equal(res, expected)
def test_convert_array_of_periods(self):
rng = period_range('1/1/2000', periods=20, freq='D')
periods = list(rng)
result = pd.Index(periods)
tm.assert_isinstance(result, PeriodIndex)
def test_with_multi_index(self):
# #1705
index = date_range('1/1/2012', periods=4, freq='12H')
index_as_arrays = [index.to_period(freq='D'), index.hour]
s = Series([0, 1, 2, 3], index_as_arrays)
tm.assert_isinstance(s.index.levels[0], PeriodIndex)
tm.assert_isinstance(s.index.values[0][0], Period)
def test_to_datetime_1703(self):
index = period_range('1/1/2012', periods=4, freq='D')
result = index.to_datetime()
self.assertEquals(result[0], Timestamp('1/1/2012'))
def test_get_loc_msg(self):
idx = period_range('2000-1-1', freq='A', periods=10)
bad_period = Period('2012', 'A')
self.assertRaises(KeyError, idx.get_loc, bad_period)
try:
idx.get_loc(bad_period)
except KeyError as inst:
self.assert_(inst.args[0] == bad_period)
def test_append_concat(self):
# #1815
d1 = date_range('12/31/1990', '12/31/1999', freq='A-DEC')
d2 = date_range('12/31/2000', '12/31/2009', freq='A-DEC')
s1 = Series(np.random.randn(10), d1)
s2 = Series(np.random.randn(10), d2)
s1 = s1.to_period()
s2 = s2.to_period()
# drops index
result = pd.concat([s1, s2])
tm.assert_isinstance(result.index, PeriodIndex)
self.assertEquals(result.index[0], s1.index[0])
def test_pickle_freq(self):
# GH2891
import pickle
prng = period_range('1/1/2011', '1/1/2012', freq='M')
new_prng = pickle.loads(pickle.dumps(prng))
self.assertEqual(new_prng.freq,'M')
def test_slice_keep_name(self):
idx = period_range('20010101', periods=10, freq='D', name='bob')
self.assertEqual(idx.name, idx[1:].name)
def _permute(obj):
return obj.take(np.random.permutation(len(obj)))
class TestMethods(tm.TestCase):
"Base test class for MaskedArrays."
def test_add(self):
dt1 = Period(freq='D', year=2008, month=1, day=1)
dt2 = Period(freq='D', year=2008, month=1, day=2)
assert_equal(dt1 + 1, dt2)
#
self.assertRaises(TypeError, dt1.__add__, "str")
self.assertRaises(TypeError, dt1.__add__, dt2)
class TestPeriodRepresentation(tm.TestCase):
"""
Wish to match NumPy units
"""
def test_annual(self):
self._check_freq('A', 1970)
def test_monthly(self):
self._check_freq('M', '1970-01')
def test_weekly(self):
self._check_freq('W-THU', '1970-01-01')
def test_daily(self):
self._check_freq('D', '1970-01-01')
def test_business_daily(self):
self._check_freq('B', '1970-01-01')
def test_hourly(self):
self._check_freq('H', '1970-01-01')
def test_minutely(self):
self._check_freq('T', '1970-01-01')
def test_secondly(self):
self._check_freq('S', '1970-01-01')
def test_millisecondly(self):
self._check_freq('L', '1970-01-01')
def test_microsecondly(self):
self._check_freq('U', '1970-01-01')
def test_nanosecondly(self):
self._check_freq('N', '1970-01-01')
def _check_freq(self, freq, base_date):
rng = PeriodIndex(start=base_date, periods=10, freq=freq)
exp = np.arange(10, dtype=np.int64)
self.assert_(np.array_equal(rng.values, exp))
def test_negone_ordinals(self):
freqs = ['A', 'M', 'Q', 'D', 'H', 'T', 'S']
period = Period(ordinal=-1, freq='D')
for freq in freqs:
repr(period.asfreq(freq))
for freq in freqs:
period = Period(ordinal=-1, freq=freq)
repr(period)
self.assertEquals(period.year, 1969)
period = Period(ordinal=-1, freq='B')
repr(period)
period = Period(ordinal=-1, freq='W')
repr(period)
class TestComparisons(tm.TestCase):
def setUp(self):
self.january1 = Period('2000-01', 'M')
self.january2 = Period('2000-01', 'M')
self.february = Period('2000-02', 'M')
self.march = Period('2000-03', 'M')
self.day = Period('2012-01-01', 'D')
def test_equal(self):
self.assertEqual(self.january1, self.january2)
def test_equal_Raises_Value(self):
self.assertRaises(ValueError, self.january1.__eq__, self.day)
def test_equal_Raises_Type(self):
self.assertRaises(TypeError, self.january1.__eq__, 1)
def test_notEqual(self):
self.assertNotEqual(self.january1, self.february)
def test_greater(self):
self.assert_(self.february > self.january1)
def test_greater_Raises_Value(self):
self.assertRaises(ValueError, self.january1.__gt__, self.day)
def test_greater_Raises_Type(self):
self.assertRaises(TypeError, self.january1.__gt__, 1)
def test_greaterEqual(self):
self.assert_(self.january1 >= self.january2)
def test_greaterEqual_Raises_Value(self):
self.assertRaises(ValueError, self.january1.__ge__, self.day)
self.assertRaises(TypeError, self.january1.__ge__, 1)
def test_smallerEqual(self):
self.assert_(self.january1 <= self.january2)
def test_smallerEqual_Raises_Value(self):
self.assertRaises(ValueError, self.january1.__le__, self.day)
def test_smallerEqual_Raises_Type(self):
self.assertRaises(TypeError, self.january1.__le__, 1)
def test_smaller(self):
self.assert_(self.january1 < self.february)
def test_smaller_Raises_Value(self):
self.assertRaises(ValueError, self.january1.__lt__, self.day)
def test_smaller_Raises_Type(self):
self.assertRaises(TypeError, self.january1.__lt__, 1)
def test_sort(self):
periods = [self.march, self.january1, self.february]
correctPeriods = [self.january1, self.february, self.march]
self.assertEqual(sorted(periods), correctPeriods)
if __name__ == '__main__':
import nose
nose.runmodule(argv=[__file__, '-vvs', '-x', '--pdb', '--pdb-failure'],
exit=False)
| gpl-3.0 | -8,733,224,334,563,988,000 | 39.047805 | 111 | 0.559501 | false |
BackupTheBerlios/pimplayer | pimp/handlers/ipimp.py | 1 | 3287 | #!/usr/bin/env python
# Pimp is a highly interactive music player.
# Copyright (C) 2011 [email protected]
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Launch IPython in a deamon thread."""
from pimp.extensions.context import *
from pimp.core.song import *
from pimp.core.playlist import *
from audiodb.core.db import File
logger=logging.getLogger("ipimp")
logger.setLevel(logging.DEBUG)
#Shity hack to import module name if alreay loaded ...
import sys
if 'pimp.extensions.player_event' in sys.modules.keys():
from pimp.extensions.player_event import *
if 'pimp.extensions.tag' in sys.modules.keys():
from pimp.extensions.tag import *
if 'pimp.core.file' in sys.modules.keys():
from pimp.core.file import *
if 'pimp.core.pimp' in sys.modules.keys():
from pimp.core.pimp import *
import atexit,readline,os
historyPath = os.path.expanduser("~/.pimpstory")
def save_history(historyPath=historyPath):
import readline
readline.write_history_file(historyPath)
if os.path.exists(historyPath):
readline.read_history_file(historyPath)
atexit.register(save_history)
del(atexit)
del(os)
del(readline)
import pimp.core.player
import threading
#from IPython.Shell import IPShellEmbed
#from IPython import embed
class Ipimp (threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
# self.ipshell = embed
#IPShellEmbed(["-noconfirm_exit"])
def run(self):
logger.info("Ipimp is launched")
from IPython.config.loader import Config
cfg = Config()
shell_config = cfg.InteractiveShellEmbed
shell_config.prompt_in1 = 'In <\\#>: '
shell_config.prompt_in2 = ' .\\D.: '
shell_config.prompt_out = 'Out<\\#>: '
shell_config.history_length = 0
# shell_config.history_manager = False
# First import the embeddable shell class
from IPython.frontend.terminal.embed import InteractiveShellEmbed
# Now create an instance of the embeddable shell. The first argument is a
# string with options exactly as you would type them if you were starting
# IPython at the system command line. Any parameters you want to define for
# configuration can thus be specified here.
ipshell = InteractiveShellEmbed(config=cfg,
banner1 = 'Dropping into IPython',
exit_msg = 'Leaving Interpreter, back to program.')
ipshell.history_manager.end_session()
ipshell()
logger.info("Quit Ipimp")
print "IPython exited ... (type Ctrl-C to exit Pimp)"
| gpl-3.0 | 4,128,168,203,558,339,600 | 33.6 | 93 | 0.676605 | false |
Alexander-Minyushkin/aistreamer | worker/app.py | 1 | 1520 | import os
from flask import Flask, render_template, request
import luigi
from luigi.contrib.gcs import GCSTarget, GCSClient
import subprocess
from merge_video import MergeVideoAndAudio
app = Flask(__name__)
@app.route('/')
def hello_world():
target = os.environ.get('TARGET', 'World')
return 'Hello {}!\n'.format(target)
# http://localhost:8080/merge_video?youtube_id=asdf&text_id=pg_12
@app.route('/merge_video', methods=['GET'])
def merge_video():
youtube_id = request.args.get('youtube_id')
youtube_link = f'https://www.youtube.com/watch?v={youtube_id}'
text_id = request.args.get('text_id')
# --scheduler-url
# https://luigi.readthedocs.io/en/latest/central_scheduler.html
# $luigid --background --pidfile <PATH_TO_PIDFILE> --logdir <PATH_TO_LOGDIR> --state-path <PATH_TO_STATEFILE>
scheduler_url = os.environ.get('SCHEDULER', 'http://127.0.0.1:8082')
#if not num:
luigi.run(['detect.MergeVideoAndAudio',
'--gs-path-video', youtube_link, #'gs://amvideotest/Welcome_to_Adam_Does_Movies.mp4', # 'gs://amvideotest/battlefield1.mp4', #
'--text-generator','markov',
'--text-generator-source', 'gs://amvideotest/source/pg/pg345.txt', #'gs://amvideotest/source/pg/pg345.txt',
'--workers', '1',
'--scheduler-url', scheduler_url])
return f'Completed youtube_link: {youtube_link}\ntext_id: {text_id}'
if __name__ == "__main__":
app.run(debug=True,host='0.0.0.0',port=int(os.environ.get('PORT', 8080))) | apache-2.0 | -997,589,223,079,689,300 | 35.214286 | 139 | 0.653289 | false |
ioram7/keystone-federado-pgid2013 | build/sqlalchemy/examples/beaker_caching/caching_query.py | 1 | 10036 | """caching_query.py
Represent persistence structures which allow the usage of
Beaker caching with SQLAlchemy.
The three new concepts introduced here are:
* CachingQuery - a Query subclass that caches and
retrieves results in/from Beaker.
* FromCache - a query option that establishes caching
parameters on a Query
* RelationshipCache - a variant of FromCache which is specific
to a query invoked during a lazy load.
* _params_from_query - extracts value parameters from
a Query.
The rest of what's here are standard SQLAlchemy and
Beaker constructs.
"""
from sqlalchemy.orm.interfaces import MapperOption
from sqlalchemy.orm.query import Query
from sqlalchemy.sql import visitors
class CachingQuery(Query):
"""A Query subclass which optionally loads full results from a Beaker
cache region.
The CachingQuery stores additional state that allows it to consult
a Beaker cache before accessing the database:
* A "region", which is a cache region argument passed to a
Beaker CacheManager, specifies a particular cache configuration
(including backend implementation, expiration times, etc.)
* A "namespace", which is a qualifying name that identifies a
group of keys within the cache. A query that filters on a name
might use the name "by_name", a query that filters on a date range
to a joined table might use the name "related_date_range".
When the above state is present, a Beaker cache is retrieved.
The "namespace" name is first concatenated with
a string composed of the individual entities and columns the Query
requests, i.e. such as ``Query(User.id, User.name)``.
The Beaker cache is then loaded from the cache manager based
on the region and composed namespace. The key within the cache
itself is then constructed against the bind parameters specified
by this query, which are usually literals defined in the
WHERE clause.
The FromCache and RelationshipCache mapper options below represent
the "public" method of configuring this state upon the CachingQuery.
"""
def __init__(self, manager, *args, **kw):
self.cache_manager = manager
Query.__init__(self, *args, **kw)
def __iter__(self):
"""override __iter__ to pull results from Beaker
if particular attributes have been configured.
Note that this approach does *not* detach the loaded objects from
the current session. If the cache backend is an in-process cache
(like "memory") and lives beyond the scope of the current session's
transaction, those objects may be expired. The method here can be
modified to first expunge() each loaded item from the current
session before returning the list of items, so that the items
in the cache are not the same ones in the current Session.
"""
if hasattr(self, '_cache_parameters'):
return self.get_value(createfunc=lambda: list(Query.__iter__(self)))
else:
return Query.__iter__(self)
def invalidate(self):
"""Invalidate the value represented by this Query."""
cache, cache_key = _get_cache_parameters(self)
cache.remove(cache_key)
def get_value(self, merge=True, createfunc=None):
"""Return the value from the cache for this query.
Raise KeyError if no value present and no
createfunc specified.
"""
cache, cache_key = _get_cache_parameters(self)
ret = cache.get_value(cache_key, createfunc=createfunc)
if merge:
ret = self.merge_result(ret, load=False)
return ret
def set_value(self, value):
"""Set the value in the cache for this query."""
cache, cache_key = _get_cache_parameters(self)
cache.put(cache_key, value)
def query_callable(manager, query_cls=CachingQuery):
def query(*arg, **kw):
return query_cls(manager, *arg, **kw)
return query
def _get_cache_parameters(query):
"""For a query with cache_region and cache_namespace configured,
return the correspoinding Cache instance and cache key, based
on this query's current criterion and parameter values.
"""
if not hasattr(query, '_cache_parameters'):
raise ValueError("This Query does not have caching parameters configured.")
region, namespace, cache_key = query._cache_parameters
namespace = _namespace_from_query(namespace, query)
if cache_key is None:
# cache key - the value arguments from this query's parameters.
args = [str(x) for x in _params_from_query(query)]
args.extend([str(query._limit), str(query._offset)])
cache_key = " ".join(args)
assert cache_key is not None, "Cache key was None !"
# get cache
cache = query.cache_manager.get_cache_region(namespace, region)
# optional - hash the cache_key too for consistent length
# import uuid
# cache_key= str(uuid.uuid5(uuid.NAMESPACE_DNS, cache_key))
return cache, cache_key
def _namespace_from_query(namespace, query):
# cache namespace - the token handed in by the
# option + class we're querying against
namespace = " ".join([namespace] + [str(x) for x in query._entities])
# memcached wants this
namespace = namespace.replace(' ', '_')
return namespace
def _set_cache_parameters(query, region, namespace, cache_key):
if hasattr(query, '_cache_parameters'):
region, namespace, cache_key = query._cache_parameters
raise ValueError("This query is already configured "
"for region %r namespace %r" %
(region, namespace)
)
query._cache_parameters = region, namespace, cache_key
class FromCache(MapperOption):
"""Specifies that a Query should load results from a cache."""
propagate_to_loaders = False
def __init__(self, region, namespace, cache_key=None):
"""Construct a new FromCache.
:param region: the cache region. Should be a
region configured in the Beaker CacheManager.
:param namespace: the cache namespace. Should
be a name uniquely describing the target Query's
lexical structure.
:param cache_key: optional. A string cache key
that will serve as the key to the query. Use this
if your query has a huge amount of parameters (such
as when using in_()) which correspond more simply to
some other identifier.
"""
self.region = region
self.namespace = namespace
self.cache_key = cache_key
def process_query(self, query):
"""Process a Query during normal loading operation."""
_set_cache_parameters(query, self.region, self.namespace, self.cache_key)
class RelationshipCache(MapperOption):
"""Specifies that a Query as called within a "lazy load"
should load results from a cache."""
propagate_to_loaders = True
def __init__(self, region, namespace, attribute):
"""Construct a new RelationshipCache.
:param region: the cache region. Should be a
region configured in the Beaker CacheManager.
:param namespace: the cache namespace. Should
be a name uniquely describing the target Query's
lexical structure.
:param attribute: A Class.attribute which
indicates a particular class relationship() whose
lazy loader should be pulled from the cache.
"""
self.region = region
self.namespace = namespace
self._relationship_options = {
( attribute.property.parent.class_, attribute.property.key ) : self
}
def process_query_conditionally(self, query):
"""Process a Query that is used within a lazy loader.
(the process_query_conditionally() method is a SQLAlchemy
hook invoked only within lazyload.)
"""
if query._current_path:
mapper, key = query._current_path[-2:]
for cls in mapper.class_.__mro__:
if (cls, key) in self._relationship_options:
relationship_option = self._relationship_options[(cls, key)]
_set_cache_parameters(
query,
relationship_option.region,
relationship_option.namespace,
None)
def and_(self, option):
"""Chain another RelationshipCache option to this one.
While many RelationshipCache objects can be specified on a single
Query separately, chaining them together allows for a more efficient
lookup during load.
"""
self._relationship_options.update(option._relationship_options)
return self
def _params_from_query(query):
"""Pull the bind parameter values from a query.
This takes into account any scalar attribute bindparam set up.
E.g. params_from_query(query.filter(Cls.foo==5).filter(Cls.bar==7)))
would return [5, 7].
"""
v = []
def visit_bindparam(bind):
if bind.key in query._params:
value = query._params[bind.key]
elif bind.callable:
# lazyloader may dig a callable in here, intended
# to late-evaluate params after autoflush is called.
# convert to a scalar value.
value = bind.callable()
else:
value = bind.value
v.append(value)
# TODO: this pulls the binds from the final compiled statement.
# ideally, this would be a little more performant if it pulled
# from query._criterion and others directly, however this would
# need to be implemented not to miss anything, including
# subqueries in the columns clause. See
# http://stackoverflow.com/questions/9265900/sqlalchemy-how-to-traverse-bindparam-values-in-a-subquery/
visitors.traverse(query.statement, {}, {'bindparam':visit_bindparam})
return v
| apache-2.0 | 2,404,844,990,423,938,000 | 34.971326 | 107 | 0.656238 | false |
capitalone/cloud-custodian | tests/test_webhook.py | 1 | 9095 | # Copyright 2019 Microsoft Corporation
# Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
import datetime
import json
import mock
from c7n.actions.webhook import Webhook
from c7n.exceptions import PolicyValidationError
from .common import BaseTest
import os
class WebhookTest(BaseTest):
def test_valid_policy(self):
policy = {
"name": "webhook-batch",
"resource": "ec2",
"actions": [
{
"type": "webhook",
"url": "http://foo.com",
}
],
}
self.assertTrue(self.load_policy(data=policy, validate=True))
policy = {
"name": "webhook-batch",
"resource": "ec2",
"actions": [
{
"type": "webhook",
"url": "http://foo.com",
"batch": True,
"query-params": {
"foo": "bar"
}
}
],
}
self.assertTrue(self.load_policy(data=policy, validate=True))
def test_invalid_policy(self):
# Missing URL parameter
policy = {
"name": "webhook-batch",
"resource": "ec2",
"actions": [
{
"type": "webhook"
}
],
}
with self.assertRaises(PolicyValidationError):
self.load_policy(data=policy, validate=True)
# Bad method
policy = {
"name": "webhook-batch",
"resource": "ec2",
"actions": [
{
"type": "webhook",
"url": "http://foo.com",
"method": "CREATE"
}
],
}
with self.assertRaises(PolicyValidationError):
self.load_policy(data=policy, validate=True)
@mock.patch('c7n.actions.webhook.urllib3.PoolManager.request')
def test_process_batch(self, request_mock):
resources = [
{
"name": "test_name",
"value": "test_value"
},
{
"name": "test_name",
"value": "test_value"
},
{
"name": "test_name",
"value": "test_value"
},
{
"name": "test_name",
"value": "test_value"
},
{
"name": "test_name",
"value": "test_value"
}
]
data = {
"url": "http://foo.com",
"batch": True,
"batch-size": 2,
"query-params": {
"foo": "resources[0].name"
}
}
wh = Webhook(data=data, manager=self._get_manager())
wh.process(resources)
req = request_mock.call_args[1]
# 5 resources with max batch size 2 == 3 calls
self.assertEqual(3, len(request_mock.call_args_list))
# Check out one of the calls in detail
self.assertEqual("http://foo.com?foo=test_name", req['url'])
self.assertEqual("POST", req['method'])
self.assertEqual({}, req['headers'])
@mock.patch('c7n.actions.webhook.urllib3.PoolManager.request')
def test_process_batch_body(self, request_mock):
resources = [
{
"name": "test_name",
"value": "test_value"
}
]
data = {
"url": "http://foo.com",
"batch": True,
"body": "resources[].name",
"body-size": 10,
"headers": {
"test": "'header'"
},
"query-params": {
"foo": "resources[0].name"
}
}
wh = Webhook(data=data, manager=self._get_manager())
wh.process(resources)
req = request_mock.call_args[1]
self.assertEqual("http://foo.com?foo=test_name", req['url'])
self.assertEqual("POST", req['method'])
self.assertEqual(b'[\n"test_name"\n]', req['body'])
self.assertEqual(
{"test": "header", "Content-Type": "application/json"},
req['headers'])
@mock.patch('c7n.actions.webhook.urllib3.PoolManager.request')
def test_process_date_serializer(self, request_mock):
current = datetime.datetime.utcnow()
resources = [
{
"name": "test1",
"value": current
},
]
data = {
"url": "http://foo.com",
"body": "resources[]",
'batch': True,
}
wh = Webhook(data=data, manager=self._get_manager())
wh.process(resources)
req1 = request_mock.call_args_list[0][1]
self.assertEqual(
json.loads(req1['body'])[0]['value'],
current.isoformat())
@mock.patch('c7n.actions.webhook.urllib3.PoolManager.request')
def test_process_no_batch(self, request_mock):
resources = [
{
"name": "test1",
"value": "test_value"
},
{
"name": "test2",
"value": "test_value"
}
]
data = {
"url": "http://foo.com",
"query-params": {
"foo": "resource.name"
}
}
wh = Webhook(data=data, manager=self._get_manager())
wh.process(resources)
req1 = request_mock.call_args_list[0][1]
req2 = request_mock.call_args_list[1][1]
self.assertEqual("http://foo.com?foo=test1", req1['url'])
self.assertEqual("http://foo.com?foo=test2", req2['url'])
@mock.patch('c7n.actions.webhook.urllib3.PoolManager.request')
def test_process_existing_query_string(self, request_mock):
resources = [
{
"name": "test1",
"value": "test_value"
},
{
"name": "test2",
"value": "test_value"
}
]
data = {
"url": "http://foo.com?existing=test",
"query-params": {
"foo": "resource.name"
}
}
wh = Webhook(data=data, manager=self._get_manager())
wh.process(resources)
req1 = request_mock.call_args_list[0][1]
req2 = request_mock.call_args_list[1][1]
self.assertIn("existing=test", req1['url'])
self.assertIn("foo=test1", req1['url'])
self.assertIn("existing=test", req2['url'])
self.assertIn("foo=test2", req2['url'])
@mock.patch('c7n.actions.webhook.urllib3.PoolManager.request')
def test_process_policy_metadata(self, request_mock):
resources = [
{
"name": "test1",
"value": "test_value"
},
{
"name": "test2",
"value": "test_value"
}
]
data = {
"url": "http://foo.com",
"query-params": {
"policy": "policy.name"
}
}
wh = Webhook(data=data, manager=self._get_manager())
wh.process(resources)
req1 = request_mock.call_args_list[0][1]
req2 = request_mock.call_args_list[1][1]
self.assertEqual("http://foo.com?policy=webhook_policy", req1['url'])
self.assertEqual("http://foo.com?policy=webhook_policy", req2['url'])
@mock.patch('c7n.actions.webhook.urllib3.ProxyManager.request')
@mock.patch('c7n.actions.webhook.urllib3.PoolManager.request')
def test_process_with_http_proxy(self, pool_request_mock, proxy_request_mock):
with mock.patch.dict(os.environ,
{'HTTP_PROXY': 'http://mock.http.proxy.server:8000'},
clear=True):
resources = [
{
"name": "test_name",
"value": "test_value"
}
]
data = {
"url": "http://foo.com"
}
wh = Webhook(data=data, manager=self._get_manager())
wh.process(resources)
proxy_req = proxy_request_mock.call_args[1]
self.assertEqual("http://foo.com", proxy_req['url'])
self.assertEqual("POST", proxy_req['method'])
self.assertEqual(1, proxy_request_mock.call_count)
self.assertEqual(0, pool_request_mock.call_count)
def _get_manager(self):
"""The tests don't require real resource data
or recordings, but they do need a valid manager with
policy metadata so we just make one here to use"""
policy = self.load_policy({
"name": "webhook_policy",
"resource": "ec2",
"actions": [
{
"type": "webhook",
"url": "http://foo.com"}
]})
return policy.resource_manager
| apache-2.0 | 4,588,318,786,688,363,500 | 28.433657 | 82 | 0.464651 | false |
dude56987/YoutubeTV | resources/lib/tables.py | 1 | 8449 | #########################################################################
# Generic database libary using pickle to store values in files.
# Copyright (C) 2016 Carl J Smith
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
########################################################################
import masterdebug
debug=masterdebug.init(False)
from files import loadFile
from files import writeFile
################################################################################
# import all the things
from pickle import loads as unpickle
from pickle import dumps as pickle
from os.path import join as pathJoin
from os.path import exists as pathExists
from os import listdir
from os import makedirs
from os import remove as removeFile
from random import choice
################################################################################
class table():
def __init__(self,path):
'''
DB table to store things as files and directories. This is
designed to reduce ram usage when reading things from large
databases. Specifically this is designed for caches.
# variables #
.path
The path on the filesystem where the table is stored.
.names
Gives you a list containing the names of all stored
values as strings.
.namePaths
Gives you a dict where the keys are the names and
the value is the path of that value database file
.length
The length of names stored in this table
'''
# path of the root of the cache, this is where files
# will be stored on the system
self.path=path
# create the paths if they do not exist
if not pathExists(self.path):
makedirs(self.path)
debug.add('table path',self.path)
# the path prefix is for tables stored in tables
self.pathPrefix=''
# tables are stored as files
tempTable=[]
# array of all the value names stored on table
namesPath=pathJoin(self.path,'names.table')
# if no namepaths exist create them
if not pathExists(pathJoin(namesPath)):
# write the new value to the system
writeFile(namesPath,pickle(dict()))
# load the name paths
self.namePaths=unpickle(loadFile(namesPath))
debug.add('self.namePaths',self.namePaths)
# create a array of all the names of values stored
self.names=self.namePaths.keys()
debug.add('self.names',self.names)
# length of all the values stored on the table
self.length=len(self.names)
debug.add('self.length',self.length)
# the protected list is a array of names that are
# protected from limit cleaning
protectedPath=pathJoin(self.path,'protected.table')
if pathExists(pathJoin(protectedPath)):
# load the list
self.protectedList=unpickle(loadFile(protectedPath))
else:
# create a blank list
self.protectedList=[]
# limit value to limit the number of values
# load the limit value from file if it exists
limitPath=pathJoin(self.path,'limit.table')
if pathExists(pathJoin(limitPath)):
self.limit=unpickle(loadFile(limitPath))
else:
self.limit=None
################################################################################
def reset(self):
'''
Delete all stored values stored in the table.
'''
for value in self.names:
self.deleteValue(value)
################################################################################
def setProtected(self,name):
'''
Set a name in the table to be protected from removal
because of limits.
'''
# generate the filepath to the protected values
# list
filePath=pathJoin(self.path,'protected.table')
# check if the path exists
if pathExists(filePath):
# read the protected list from the file
protectedList=unpickle(loadFile(filePath))
else:
# create the list and append the name
protectedList=[]
# append the new value to the list
protectedList.append(name)
# pickle the protected list for storage
protectedList=pickle(protectedList)
# write the changes back to the protected list
writeFile(filePath,protectedList)
################################################################################
def setLimit(self,limit):
'''
Set the limit of values that are stored in this table.
This ignores protected values.
'''
# write the limit value to the limit file in the table
filePath=pathJoin(self.path,'limit.table')
# set the limit in this instance
self.limit=limit
# write the new limit back to the storage
success=writeFile(filePath,limit)
return success
################################################################################
def checkLimits(self):
if self.limit is not None and\
self.length-len(self.protectedList) > limit:
deathList=[]
for name in self.names:
if name not in self.protectedList:
deathList.append(name)
# randomly pick a value to delete
# TODO: create table metadata to dertermine the
# time that values were added to the table
# and remove the oldest value when limits
# have been exceeded
deathMark=choice(deathList)
# delete the value
if self.deleteValue(deathMark) is False:
return False
# successfully removed item or no items needed
# to be removed
return True
################################################################################
def loadValue(self,name):
'''
Loads a saved value and returns it.
'''
# find the file path in the names array
if name in self.names:
filePath=self.namePaths[name]
else:
return False
# check if the path exists
if pathExists(filePath):
# load the data
fileData=loadFile(filePath)
else:
# return false if the value does not exist
return False
# unpickle the filedata
fileData = unpickle(fileData)
debug.add('loading value '+str(name),fileData)
# returns the value of a table stored on disk
return fileData
################################################################################
def saveValue(self,name,value):
'''
Save a value with the name name and the value value.
'''
debug.add('saving value '+str(name),value)
# create a file assocation for the name to store the value
if name not in self.names:
debug.add('name not in self.names')
# create a counter for the generated filename
counter=0
# seed value for while loop
newName = (str(counter)+'.value')
# find a filename that does not already exist in
# the database directory
while newName in listdir(self.path):
# increment the counter
counter+=1
# iterate the value
newName=(str(counter)+'.value')
debug.add('newname',newName)
# set the metadata value for the filepaths in this table instance
self.namePaths[name]=pathJoin(self.path,newName)
# write the newly created name assocation to table metadata on disk
writeFile(pathJoin(self.path,'names.table'),pickle(self.namePaths))
debug.add('namePaths',self.namePaths)
# update the length and names attributes
self.names=self.namePaths.keys()
self.length=len(self.names)
# saves a table changes back onto the disk
fileData=writeFile(self.namePaths[name],pickle(value))
return fileData
################################################################################
def deleteValue(self,name):
'''
Delete a value with name name.
'''
# clean up names to avoid stupid
debug.add('deleting value ',name)
# figure out the path to the named value file
if name in self.names:
filePath=self.namePaths[name]
# remove the metadata entry
del self.namePaths[name]
# write changes to database metadata file
writeFile(pathJoin(self.path,'names.table'),pickle(self.namePaths))
# update the length and names attributes
self.names=self.namePaths.keys()
self.length=len(self.names)
else:
return False
if pathExists(filePath):
# remove the file accocated with the value
removeFile(filePath)
return True
else:
return False
################################################################################
| gpl-3.0 | -4,693,631,649,817,199,000 | 34.953191 | 81 | 0.644928 | false |
CubicERP/odoo | addons/sale/sale.py | 1 | 70307 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from datetime import datetime, timedelta
import time
from openerp.osv import fields, osv
from openerp.tools.translate import _
from openerp.tools import DEFAULT_SERVER_DATE_FORMAT, DEFAULT_SERVER_DATETIME_FORMAT
import openerp.addons.decimal_precision as dp
from openerp import workflow
class res_company(osv.Model):
_inherit = "res.company"
_columns = {
'sale_note': fields.text('Default Terms and Conditions', translate=True, help="Default terms and conditions for quotations."),
}
class sale_order(osv.osv):
_name = "sale.order"
_inherit = ['mail.thread', 'ir.needaction_mixin']
_description = "Sales Order"
_track = {
'state': {
'sale.mt_order_confirmed': lambda self, cr, uid, obj, ctx=None: obj.state in ['manual'],
'sale.mt_order_sent': lambda self, cr, uid, obj, ctx=None: obj.state in ['sent']
},
}
def _amount_line_tax(self, cr, uid, line, context=None):
val = 0.0
for c in self.pool.get('account.tax').compute_all(cr, uid, line.tax_id, line.price_unit * (1-(line.discount or 0.0)/100.0), line.product_uom_qty, line.product_id, line.order_id.partner_id)['taxes']:
val += c.get('amount', 0.0)
return val
def _amount_all_wrapper(self, cr, uid, ids, field_name, arg, context=None):
""" Wrapper because of direct method passing as parameter for function fields """
return self._amount_all(cr, uid, ids, field_name, arg, context=context)
def _amount_all(self, cr, uid, ids, field_name, arg, context=None):
cur_obj = self.pool.get('res.currency')
res = {}
for order in self.browse(cr, uid, ids, context=context):
res[order.id] = {
'amount_untaxed': 0.0,
'amount_tax': 0.0,
'amount_total': 0.0,
}
val = val1 = 0.0
cur = order.pricelist_id.currency_id
for line in order.order_line:
val1 += line.price_subtotal
val += self._amount_line_tax(cr, uid, line, context=context)
res[order.id]['amount_tax'] = cur_obj.round(cr, uid, cur, val)
res[order.id]['amount_untaxed'] = cur_obj.round(cr, uid, cur, val1)
res[order.id]['amount_total'] = res[order.id]['amount_untaxed'] + res[order.id]['amount_tax']
return res
def _invoiced_rate(self, cursor, user, ids, name, arg, context=None):
res = {}
for sale in self.browse(cursor, user, ids, context=context):
if sale.invoiced:
res[sale.id] = 100.0
continue
tot = 0.0
for invoice in sale.invoice_ids:
if invoice.state not in ('draft', 'cancel'):
tot += invoice.amount_untaxed
if tot:
res[sale.id] = min(100.0, tot * 100.0 / (sale.amount_untaxed or 1.00))
else:
res[sale.id] = 0.0
return res
def _invoice_exists(self, cursor, user, ids, name, arg, context=None):
res = {}
for sale in self.browse(cursor, user, ids, context=context):
res[sale.id] = False
if sale.invoice_ids:
res[sale.id] = True
return res
def _invoiced(self, cursor, user, ids, name, arg, context=None):
res = {}
for sale in self.browse(cursor, user, ids, context=context):
res[sale.id] = True
invoice_existence = False
for invoice in sale.invoice_ids:
if invoice.state!='cancel':
invoice_existence = True
if invoice.state != 'paid':
res[sale.id] = False
break
if not invoice_existence or sale.state == 'manual':
res[sale.id] = False
return res
def _invoiced_search(self, cursor, user, obj, name, args, context=None):
if not len(args):
return []
clause = ''
sale_clause = ''
no_invoiced = False
for arg in args:
if (arg[1] == '=' and arg[2]) or (arg[1] == '!=' and not arg[2]):
clause += 'AND inv.state = \'paid\''
else:
clause += 'AND inv.state != \'cancel\' AND sale.state != \'cancel\' AND inv.state <> \'paid\' AND rel.order_id = sale.id '
sale_clause = ', sale_order AS sale '
no_invoiced = True
cursor.execute('SELECT rel.order_id ' \
'FROM sale_order_invoice_rel AS rel, account_invoice AS inv '+ sale_clause + \
'WHERE rel.invoice_id = inv.id ' + clause)
res = cursor.fetchall()
if no_invoiced:
cursor.execute('SELECT sale.id ' \
'FROM sale_order AS sale ' \
'WHERE sale.id NOT IN ' \
'(SELECT rel.order_id ' \
'FROM sale_order_invoice_rel AS rel) and sale.state != \'cancel\'')
res.extend(cursor.fetchall())
if not res:
return [('id', '=', 0)]
return [('id', 'in', [x[0] for x in res])]
def _get_order(self, cr, uid, ids, context=None):
result = {}
for line in self.pool.get('sale.order.line').browse(cr, uid, ids, context=context):
result[line.order_id.id] = True
return result.keys()
def _get_default_company(self, cr, uid, context=None):
company_id = self.pool.get('res.users')._get_company(cr, uid, context=context)
if not company_id:
raise osv.except_osv(_('Error!'), _('There is no default company for the current user!'))
return company_id
def _get_default_section_id(self, cr, uid, context=None):
""" Gives default section by checking if present in the context """
section_id = self._resolve_section_id_from_context(cr, uid, context=context) or False
if not section_id:
section_id = self.pool.get('res.users').browse(cr, uid, uid, context).default_section_id.id or False
return section_id
def _resolve_section_id_from_context(self, cr, uid, context=None):
""" Returns ID of section based on the value of 'section_id'
context key, or None if it cannot be resolved to a single
Sales Team.
"""
if context is None:
context = {}
if type(context.get('default_section_id')) in (int, long):
return context.get('default_section_id')
if isinstance(context.get('default_section_id'), basestring):
section_ids = self.pool.get('crm.case.section').name_search(cr, uid, name=context['default_section_id'], context=context)
if len(section_ids) == 1:
return int(section_ids[0][0])
return None
_columns = {
'name': fields.char('Order Reference', required=True, copy=False,
readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, select=True),
'origin': fields.char('Source Document', help="Reference of the document that generated this sales order request."),
'client_order_ref': fields.char('Reference/Description', copy=False),
'state': fields.selection([
('draft', 'Draft Quotation'),
('confirm', 'Confirmed'),
('sent', 'Quotation Sent'),
('cancel', 'Cancelled'),
('waiting_date', 'Waiting Schedule'),
('progress', 'Sales Order'),
('manual', 'Sale to Invoice'),
('shipping_except', 'Shipping Exception'),
('invoice_except', 'Invoice Exception'),
('done', 'Done'),
], 'Status', readonly=True, copy=False, help="Gives the status of the quotation or sales order.\
\nThe exception status is automatically set when a cancel operation occurs \
in the invoice validation (Invoice Exception) or in the picking list process (Shipping Exception).\nThe 'Waiting Schedule' status is set when the invoice is confirmed\
but waiting for the scheduler to run on the order date.", select=True),
'date_order': fields.datetime('Date', required=True, readonly=True, select=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, copy=False),
'create_date': fields.datetime('Creation Date', readonly=True, select=True, help="Date on which sales order is created."),
'date_confirm': fields.date('Confirmation Date', readonly=True, select=True, help="Date on which sales order is confirmed.", copy=False),
'user_id': fields.many2one('res.users', 'Salesperson', states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, select=True, track_visibility='onchange'),
'partner_id': fields.many2one('res.partner', 'Customer', readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, required=True, change_default=True, select=True, track_visibility='always'),
'partner_invoice_id': fields.many2one('res.partner', 'Invoice Address', readonly=True, required=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, help="Invoice address for current sales order."),
'partner_shipping_id': fields.many2one('res.partner', 'Delivery Address', readonly=True, required=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, help="Delivery address for current sales order."),
'order_policy': fields.selection([
('manual', 'On Demand'),
], 'Create Invoice', required=True, readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]},
help="""This field controls how invoice and delivery operations are synchronized."""),
'pricelist_id': fields.many2one('product.pricelist', 'Pricelist', required=True, readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, help="Pricelist for current sales order."),
'currency_id': fields.related('pricelist_id', 'currency_id', type="many2one", relation="res.currency", string="Currency", readonly=True, required=True),
'project_id': fields.many2one('account.analytic.account', 'Contract / Analytic', readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, help="The analytic account related to a sales order."),
'order_line': fields.one2many('sale.order.line', 'order_id', 'Order Lines', readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}, copy=True),
'invoice_ids': fields.many2many('account.invoice', 'sale_order_invoice_rel', 'order_id', 'invoice_id', 'Invoices', readonly=True, copy=False, help="This is the list of invoices that have been generated for this sales order. The same sales order may have been invoiced in several times (by line for example)."),
'invoiced_rate': fields.function(_invoiced_rate, string='Invoiced Ratio', type='float'),
'invoiced': fields.function(_invoiced, string='Paid',
fnct_search=_invoiced_search, type='boolean', help="It indicates that an invoice has been paid."),
'invoice_exists': fields.function(_invoice_exists, string='Invoiced',
fnct_search=_invoiced_search, type='boolean', help="It indicates that sales order has at least one invoice."),
'note': fields.text('Terms and conditions'),
'amount_untaxed': fields.function(_amount_all_wrapper, digits_compute=dp.get_precision('Account'), string='Untaxed Amount',
store={
'sale.order': (lambda self, cr, uid, ids, c={}: ids, ['order_line'], 10),
'sale.order.line': (_get_order, ['price_unit', 'tax_id', 'discount', 'product_uom_qty'], 10),
},
multi='sums', help="The amount without tax.", track_visibility='always'),
'amount_tax': fields.function(_amount_all_wrapper, digits_compute=dp.get_precision('Account'), string='Taxes',
store={
'sale.order': (lambda self, cr, uid, ids, c={}: ids, ['order_line'], 10),
'sale.order.line': (_get_order, ['price_unit', 'tax_id', 'discount', 'product_uom_qty'], 10),
},
multi='sums', help="The tax amount."),
'amount_total': fields.function(_amount_all_wrapper, digits_compute=dp.get_precision('Account'), string='Total',
store={
'sale.order': (lambda self, cr, uid, ids, c={}: ids, ['order_line'], 10),
'sale.order.line': (_get_order, ['price_unit', 'tax_id', 'discount', 'product_uom_qty'], 10),
},
multi='sums', help="The total amount."),
'payment_term': fields.many2one('account.payment.term', 'Payment Term'),
'fiscal_position': fields.many2one('account.fiscal.position', 'Fiscal Position'),
'company_id': fields.many2one('res.company', 'Company'),
'section_id': fields.many2one('crm.case.section', 'Sales Team'),
'procurement_group_id': fields.many2one('procurement.group', 'Procurement group', copy=False),
'product_id': fields.related('order_line', 'product_id', type='many2one', relation='product.product', string='Product'),
}
_defaults = {
'date_order': fields.datetime.now,
'order_policy': 'manual',
'company_id': _get_default_company,
'state': 'draft',
'user_id': lambda obj, cr, uid, context: uid,
'name': lambda obj, cr, uid, context: '/',
'partner_invoice_id': lambda self, cr, uid, context: context.get('partner_id', False) and self.pool.get('res.partner').address_get(cr, uid, [context['partner_id']], ['invoice'])['invoice'],
'partner_shipping_id': lambda self, cr, uid, context: context.get('partner_id', False) and self.pool.get('res.partner').address_get(cr, uid, [context['partner_id']], ['delivery'])['delivery'],
'note': lambda self, cr, uid, context: self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.sale_note,
'section_id': lambda s, cr, uid, c: s._get_default_section_id(cr, uid, c),
}
_sql_constraints = [
('name_uniq', 'unique(name, company_id)', 'Order Reference must be unique per Company!'),
]
_order = 'date_order desc, id desc'
# Form filling
def unlink(self, cr, uid, ids, context=None):
sale_orders = self.read(cr, uid, ids, ['state'], context=context)
unlink_ids = []
for s in sale_orders:
if s['state'] in ['draft', 'cancel']:
unlink_ids.append(s['id'])
else:
raise osv.except_osv(_('Invalid Action!'), _('In order to delete a confirmed sales order, you must cancel it before!'))
return osv.osv.unlink(self, cr, uid, unlink_ids, context=context)
def copy_quotation(self, cr, uid, ids, context=None):
id = self.copy(cr, uid, ids[0], context=context)
view_ref = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'sale', 'view_order_form')
view_id = view_ref and view_ref[1] or False,
return {
'type': 'ir.actions.act_window',
'name': _('Sales Order'),
'res_model': 'sale.order',
'res_id': id,
'view_type': 'form',
'view_mode': 'form',
'view_id': view_id,
'target': 'current',
'nodestroy': True,
}
def onchange_pricelist_id(self, cr, uid, ids, pricelist_id, order_lines, context=None):
context = context or {}
if not pricelist_id:
return {}
value = {
'currency_id': self.pool.get('product.pricelist').browse(cr, uid, pricelist_id, context=context).currency_id.id
}
if not order_lines or order_lines == [(6, 0, [])]:
return {'value': value}
warning = {
'title': _('Pricelist Warning!'),
'message' : _('If you change the pricelist of this order (and eventually the currency), prices of existing order lines will not be updated.')
}
return {'warning': warning, 'value': value}
def get_salenote(self, cr, uid, ids, partner_id, context=None):
context_lang = context.copy()
if partner_id:
partner_lang = self.pool.get('res.partner').browse(cr, uid, partner_id, context=context).lang
context_lang.update({'lang': partner_lang})
return self.pool.get('res.users').browse(cr, uid, uid, context=context_lang).company_id.sale_note
def onchange_delivery_id(self, cr, uid, ids, company_id, partner_id, delivery_id, fiscal_position, context=None):
r = {'value': {}}
if not fiscal_position:
if not company_id:
company_id = self._get_default_company(cr, uid, context=context)
fiscal_position = self.pool['account.fiscal.position'].get_fiscal_position(cr, uid, company_id, partner_id, delivery_id, context=context)
if fiscal_position:
r['value']['fiscal_position'] = fiscal_position
return r
def onchange_partner_id(self, cr, uid, ids, part, context=None):
if not part:
return {'value': {'partner_invoice_id': False, 'partner_shipping_id': False, 'payment_term': False, 'fiscal_position': False}}
part = self.pool.get('res.partner').browse(cr, uid, part, context=context)
addr = self.pool.get('res.partner').address_get(cr, uid, [part.id], ['delivery', 'invoice', 'contact'])
pricelist = part.property_product_pricelist and part.property_product_pricelist.id or False
payment_term = part.property_payment_term and part.property_payment_term.id or False
dedicated_salesman = part.user_id and part.user_id.id or uid
val = {
'partner_invoice_id': addr['invoice'],
'partner_shipping_id': addr['delivery'],
'payment_term': payment_term,
'user_id': dedicated_salesman,
}
delivery_onchange = self.onchange_delivery_id(cr, uid, ids, False, part.id, addr['delivery'], False, context=context)
val.update(delivery_onchange['value'])
if pricelist:
val['pricelist_id'] = pricelist
sale_note = self.get_salenote(cr, uid, ids, part.id, context=context)
if sale_note: val.update({'note': sale_note})
return {'value': val}
def create(self, cr, uid, vals, context=None):
if context is None:
context = {}
if vals.get('name', '/') == '/':
vals['name'] = self.pool.get('ir.sequence').get(cr, uid, 'sale.order') or '/'
if vals.get('partner_id') and any(f not in vals for f in ['partner_invoice_id', 'partner_shipping_id', 'pricelist_id', 'fiscal_position']):
defaults = self.onchange_partner_id(cr, uid, [], vals['partner_id'], context=context)['value']
if not vals.get('fiscal_position') and vals.get('partner_shipping_id'):
delivery_onchange = self.onchange_delivery_id(cr, uid, [], vals.get('company_id'), None, vals['partner_id'], vals.get('partner_shipping_id'), context=context)
defaults.update(delivery_onchange['value'])
vals = dict(defaults, **vals)
ctx = dict(context or {}, mail_create_nolog=True)
new_id = super(sale_order, self).create(cr, uid, vals, context=ctx)
self.message_post(cr, uid, [new_id], body=_("Quotation created"), context=ctx)
return new_id
def button_dummy(self, cr, uid, ids, context=None):
return True
# FIXME: deprecated method, overriders should be using _prepare_invoice() instead.
# can be removed after 6.1.
def _inv_get(self, cr, uid, order, context=None):
return {}
def _prepare_invoice(self, cr, uid, order, lines, context=None):
"""Prepare the dict of values to create the new invoice for a
sales order. This method may be overridden to implement custom
invoice generation (making sure to call super() to establish
a clean extension chain).
:param browse_record order: sale.order record to invoice
:param list(int) line: list of invoice line IDs that must be
attached to the invoice
:return: dict of value to create() the invoice
"""
if context is None:
context = {}
journal_ids = self.pool.get('account.journal').search(cr, uid,
[('type', '=', 'sale'), ('company_id', '=', order.company_id.id)],
limit=1)
if not journal_ids:
raise osv.except_osv(_('Error!'),
_('Please define sales journal for this company: "%s" (id:%d).') % (order.company_id.name, order.company_id.id))
invoice_vals = {
'name': order.client_order_ref or '',
'origin': order.name,
'type': 'out_invoice',
'reference': order.client_order_ref or order.name,
'account_id': order.partner_id.property_account_receivable.id,
'partner_id': order.partner_invoice_id.id,
'journal_id': journal_ids[0],
'invoice_line': [(6, 0, lines)],
'currency_id': order.pricelist_id.currency_id.id,
'comment': order.note,
'payment_term': order.payment_term and order.payment_term.id or False,
'fiscal_position': order.fiscal_position.id or order.partner_id.property_account_position.id,
'date_invoice': context.get('date_invoice', False),
'company_id': order.company_id.id,
'user_id': order.user_id and order.user_id.id or False,
'section_id' : order.section_id.id
}
# Care for deprecated _inv_get() hook - FIXME: to be removed after 6.1
invoice_vals.update(self._inv_get(cr, uid, order, context=context))
return invoice_vals
def _make_invoice(self, cr, uid, order, lines, context=None):
inv_obj = self.pool.get('account.invoice')
obj_invoice_line = self.pool.get('account.invoice.line')
if context is None:
context = {}
invoiced_sale_line_ids = self.pool.get('sale.order.line').search(cr, uid, [('order_id', '=', order.id), ('invoiced', '=', True)], context=context)
from_line_invoice_ids = []
for invoiced_sale_line_id in self.pool.get('sale.order.line').browse(cr, uid, invoiced_sale_line_ids, context=context):
for invoice_line_id in invoiced_sale_line_id.invoice_lines:
if invoice_line_id.invoice_id.id not in from_line_invoice_ids:
from_line_invoice_ids.append(invoice_line_id.invoice_id.id)
for preinv in order.invoice_ids:
if preinv.state not in ('cancel',) and preinv.id not in from_line_invoice_ids:
for preline in preinv.invoice_line:
inv_line_id = obj_invoice_line.copy(cr, uid, preline.id, {'invoice_id': False, 'price_unit': -preline.price_unit})
lines.append(inv_line_id)
inv = self._prepare_invoice(cr, uid, order, lines, context=context)
inv_id = inv_obj.create(cr, uid, inv, context=context)
data = inv_obj.onchange_payment_term_date_invoice(cr, uid, [inv_id], inv['payment_term'], time.strftime(DEFAULT_SERVER_DATE_FORMAT))
if data.get('value', False):
inv_obj.write(cr, uid, [inv_id], data['value'], context=context)
inv_obj.button_compute(cr, uid, [inv_id])
return inv_id
def print_quotation(self, cr, uid, ids, context=None):
'''
This function prints the sales order and mark it as sent, so that we can see more easily the next step of the workflow
'''
assert len(ids) == 1, 'This option should only be used for a single id at a time'
self.signal_workflow(cr, uid, ids, 'quotation_sent')
return self.pool['report'].get_action(cr, uid, ids, 'sale.report_saleorder', context=context)
def manual_invoice(self, cr, uid, ids, context=None):
""" create invoices for the given sales orders (ids), and open the form
view of one of the newly created invoices
"""
mod_obj = self.pool.get('ir.model.data')
# create invoices through the sales orders' workflow
inv_ids0 = set(inv.id for sale in self.browse(cr, uid, ids, context) for inv in sale.invoice_ids)
self.signal_workflow(cr, uid, ids, 'manual_invoice')
inv_ids1 = set(inv.id for sale in self.browse(cr, uid, ids, context) for inv in sale.invoice_ids)
# determine newly created invoices
new_inv_ids = list(inv_ids1 - inv_ids0)
res = mod_obj.get_object_reference(cr, uid, 'account', 'invoice_form')
res_id = res and res[1] or False,
return {
'name': _('Customer Invoices'),
'view_type': 'form',
'view_mode': 'form',
'view_id': [res_id],
'res_model': 'account.invoice',
'context': "{'type':'out_invoice'}",
'type': 'ir.actions.act_window',
'nodestroy': True,
'target': 'current',
'res_id': new_inv_ids and new_inv_ids[0] or False,
}
def action_view_invoice(self, cr, uid, ids, context=None):
'''
This function returns an action that display existing invoices of given sales order ids. It can either be a in a list or in a form view, if there is only one invoice to show.
'''
mod_obj = self.pool.get('ir.model.data')
act_obj = self.pool.get('ir.actions.act_window')
result = mod_obj.get_object_reference(cr, uid, 'account', 'action_invoice_tree1')
id = result and result[1] or False
result = act_obj.read(cr, uid, [id], context=context)[0]
#compute the number of invoices to display
inv_ids = []
for so in self.browse(cr, uid, ids, context=context):
inv_ids += [invoice.id for invoice in so.invoice_ids]
#choose the view_mode accordingly
if len(inv_ids)>1:
result['domain'] = "[('id','in',["+','.join(map(str, inv_ids))+"])]"
else:
res = mod_obj.get_object_reference(cr, uid, 'account', 'invoice_form')
result['views'] = [(res and res[1] or False, 'form')]
result['res_id'] = inv_ids and inv_ids[0] or False
return result
def test_no_product(self, cr, uid, order, context):
for line in order.order_line:
if line.product_id and (line.product_id.type<>'service'):
return False
return True
def action_invoice_create(self, cr, uid, ids, grouped=False, states=None, date_invoice = False, context=None):
if states is None:
states = ['confirmed', 'done', 'exception']
res = False
invoices = {}
invoice_ids = []
invoice = self.pool.get('account.invoice')
obj_sale_order_line = self.pool.get('sale.order.line')
partner_currency = {}
# If date was specified, use it as date invoiced, usefull when invoices are generated this month and put the
# last day of the last month as invoice date
if date_invoice:
context = dict(context or {}, date_invoice=date_invoice)
for o in self.browse(cr, uid, ids, context=context):
currency_id = o.pricelist_id.currency_id.id
if (o.partner_id.id in partner_currency) and (partner_currency[o.partner_id.id] <> currency_id):
raise osv.except_osv(
_('Error!'),
_('You cannot group sales having different currencies for the same partner.'))
partner_currency[o.partner_id.id] = currency_id
lines = []
for line in o.order_line:
if line.invoiced:
continue
elif (line.state in states):
lines.append(line.id)
created_lines = obj_sale_order_line.invoice_line_create(cr, uid, lines)
if created_lines:
invoices.setdefault(o.partner_invoice_id.id or o.partner_id.id, []).append((o, created_lines))
if not invoices:
for o in self.browse(cr, uid, ids, context=context):
for i in o.invoice_ids:
if i.state == 'draft':
return i.id
for val in invoices.values():
if grouped:
res = self._make_invoice(cr, uid, val[0][0], reduce(lambda x, y: x + y, [l for o, l in val], []), context=context)
invoice_ref = ''
origin_ref = ''
for o, l in val:
invoice_ref += (o.client_order_ref or o.name) + '|'
origin_ref += (o.origin or o.name) + '|'
self.write(cr, uid, [o.id], {'state': 'progress'})
cr.execute('insert into sale_order_invoice_rel (order_id,invoice_id) values (%s,%s)', (o.id, res))
self.invalidate_cache(cr, uid, ['invoice_ids'], [o.id], context=context)
#remove last '|' in invoice_ref
if len(invoice_ref) >= 1:
invoice_ref = invoice_ref[:-1]
if len(origin_ref) >= 1:
origin_ref = origin_ref[:-1]
invoice.write(cr, uid, [res], {'origin': origin_ref, 'name': invoice_ref})
else:
for order, il in val:
res = self._make_invoice(cr, uid, order, il, context=context)
invoice_ids.append(res)
self.write(cr, uid, [order.id], {'state': 'progress'})
cr.execute('insert into sale_order_invoice_rel (order_id,invoice_id) values (%s,%s)', (order.id, res))
self.invalidate_cache(cr, uid, ['invoice_ids'], [order.id], context=context)
return res
def action_invoice_cancel(self, cr, uid, ids, context=None):
self.write(cr, uid, ids, {'state': 'invoice_except'}, context=context)
return True
def action_invoice_end(self, cr, uid, ids, context=None):
for this in self.browse(cr, uid, ids, context=context):
for line in this.order_line:
if line.state == 'exception':
line.write({'state': 'confirmed'})
if this.state == 'invoice_except':
this.write({'state': 'progress'})
return True
def action_cancel(self, cr, uid, ids, context=None):
if context is None:
context = {}
sale_order_line_obj = self.pool.get('sale.order.line')
account_invoice_obj = self.pool.get('account.invoice')
procurement_obj = self.pool.get('procurement.order')
for sale in self.browse(cr, uid, ids, context=context):
for inv in sale.invoice_ids:
if inv.state not in ('draft', 'cancel'):
raise osv.except_osv(
_('Cannot cancel this sales order!'),
_('First cancel all invoices attached to this sales order.'))
inv.signal_workflow('invoice_cancel')
procurement_obj.cancel(cr, uid, sum([l.procurement_ids.ids for l in sale.order_line],[]))
sale_order_line_obj.write(cr, uid, [l.id for l in sale.order_line],
{'state': 'cancel'})
self.write(cr, uid, ids, {'state': 'cancel'})
return True
def action_button_confirm(self, cr, uid, ids, context=None):
assert len(ids) == 1, 'This option should only be used for a single id at a time.'
self.signal_workflow(cr, uid, ids, 'order_confirm')
return True
def action_wait(self, cr, uid, ids, context=None):
context = context or {}
for o in self.browse(cr, uid, ids):
if not o.order_line:
raise osv.except_osv(_('Error!'),_('You cannot confirm a sales order which has no line.'))
noprod = self.test_no_product(cr, uid, o, context)
if (o.order_policy == 'manual') or noprod:
self.write(cr, uid, [o.id], {'state': 'manual', 'date_confirm': fields.date.context_today(self, cr, uid, context=context)})
else:
self.write(cr, uid, [o.id], {'state': 'progress', 'date_confirm': fields.date.context_today(self, cr, uid, context=context)})
self.pool.get('sale.order.line').button_confirm(cr, uid, [x.id for x in o.order_line])
return True
def action_quotation_send(self, cr, uid, ids, context=None):
'''
This function opens a window to compose an email, with the edi sale template message loaded by default
'''
assert len(ids) == 1, 'This option should only be used for a single id at a time.'
ir_model_data = self.pool.get('ir.model.data')
try:
template_id = ir_model_data.get_object_reference(cr, uid, 'sale', 'email_template_edi_sale')[1]
except ValueError:
template_id = False
try:
compose_form_id = ir_model_data.get_object_reference(cr, uid, 'mail', 'email_compose_message_wizard_form')[1]
except ValueError:
compose_form_id = False
ctx = dict()
ctx.update({
'default_model': 'sale.order',
'default_res_id': ids[0],
'default_use_template': bool(template_id),
'default_template_id': template_id,
'default_composition_mode': 'comment',
'mark_so_as_sent': True
})
return {
'type': 'ir.actions.act_window',
'view_type': 'form',
'view_mode': 'form',
'res_model': 'mail.compose.message',
'views': [(compose_form_id, 'form')],
'view_id': compose_form_id,
'target': 'new',
'context': ctx,
}
def action_done(self, cr, uid, ids, context=None):
for order in self.browse(cr, uid, ids, context=context):
self.pool.get('sale.order.line').write(cr, uid, [line.id for line in order.order_line], {'state': 'done'}, context=context)
return self.write(cr, uid, ids, {'state': 'done'}, context=context)
def _prepare_order_line_procurement(self, cr, uid, order, line, group_id=False, context=None):
date_planned = self._get_date_planned(cr, uid, order, line, order.date_order, context=context)
return {
'name': line.name,
'origin': order.name,
'date_planned': date_planned,
'product_id': line.product_id.id,
'product_qty': line.product_uom_qty,
'product_uom': line.product_uom.id,
'product_uos_qty': (line.product_uos and line.product_uos_qty) or line.product_uom_qty,
'product_uos': (line.product_uos and line.product_uos.id) or line.product_uom.id,
'company_id': order.company_id.id,
'group_id': group_id,
'invoice_state': (order.order_policy == 'picking') and '2binvoiced' or 'none',
'sale_line_id': line.id
}
def _get_date_planned(self, cr, uid, order, line, start_date, context=None):
date_planned = datetime.strptime(start_date, DEFAULT_SERVER_DATETIME_FORMAT) + timedelta(days=line.delay or 0.0)
return date_planned
def _prepare_procurement_group(self, cr, uid, order, context=None):
return {'name': order.name, 'partner_id': order.partner_shipping_id.id}
def procurement_needed(self, cr, uid, ids, context=None):
#when sale is installed only, there is no need to create procurements, that's only
#further installed modules (sale_service, sale_stock) that will change this.
sale_line_obj = self.pool.get('sale.order.line')
res = []
for order in self.browse(cr, uid, ids, context=context):
res.append(sale_line_obj.need_procurement(cr, uid, [line.id for line in order.order_line], context=context))
return any(res)
def action_ignore_delivery_exception(self, cr, uid, ids, context=None):
for sale_order in self.browse(cr, uid, ids, context=context):
self.write(cr, uid, ids, {'state': 'progress' if sale_order.invoice_exists else 'manual'}, context=context)
return True
def action_ship_create(self, cr, uid, ids, context=None):
"""Create the required procurements to supply sales order lines, also connecting
the procurements to appropriate stock moves in order to bring the goods to the
sales order's requested location.
:return: True
"""
context = context or {}
context['lang'] = self.pool['res.users'].browse(cr, uid, uid).lang
procurement_obj = self.pool.get('procurement.order')
sale_line_obj = self.pool.get('sale.order.line')
for order in self.browse(cr, uid, ids, context=context):
proc_ids = []
vals = self._prepare_procurement_group(cr, uid, order, context=context)
if not order.procurement_group_id:
group_id = self.pool.get("procurement.group").create(cr, uid, vals, context=context)
order.write({'procurement_group_id': group_id})
for line in order.order_line:
#Try to fix exception procurement (possible when after a shipping exception the user choose to recreate)
if line.procurement_ids:
#first check them to see if they are in exception or not (one of the related moves is cancelled)
procurement_obj.check(cr, uid, [x.id for x in line.procurement_ids if x.state not in ['cancel', 'done']])
line.refresh()
#run again procurement that are in exception in order to trigger another move
except_proc_ids = [x.id for x in line.procurement_ids if x.state in ('exception', 'cancel')]
procurement_obj.reset_to_confirmed(cr, uid, except_proc_ids, context=context)
proc_ids += except_proc_ids
elif sale_line_obj.need_procurement(cr, uid, [line.id], context=context):
if (line.state == 'done') or not line.product_id:
continue
vals = self._prepare_order_line_procurement(cr, uid, order, line, group_id=order.procurement_group_id.id, context=context)
ctx = context.copy()
ctx['procurement_autorun_defer'] = True
proc_id = procurement_obj.create(cr, uid, vals, context=ctx)
proc_ids.append(proc_id)
#Confirm procurement order such that rules will be applied on it
#note that the workflow normally ensure proc_ids isn't an empty list
procurement_obj.run(cr, uid, proc_ids, context=context)
#if shipping was in exception and the user choose to recreate the delivery order, write the new status of SO
if order.state == 'shipping_except':
val = {'state': 'progress', 'shipped': False}
if (order.order_policy == 'manual'):
for line in order.order_line:
if (not line.invoiced) and (line.state not in ('cancel', 'draft')):
val['state'] = 'manual'
break
order.write(val)
return True
def onchange_fiscal_position(self, cr, uid, ids, fiscal_position, order_lines, context=None):
'''Update taxes of order lines for each line where a product is defined
:param list ids: not used
:param int fiscal_position: sale order fiscal position
:param list order_lines: command list for one2many write method
'''
order_line = []
fiscal_obj = self.pool.get('account.fiscal.position')
product_obj = self.pool.get('product.product')
line_obj = self.pool.get('sale.order.line')
fpos = False
if fiscal_position:
fpos = fiscal_obj.browse(cr, uid, fiscal_position, context=context)
for line in order_lines:
# create (0, 0, { fields })
# update (1, ID, { fields })
if line[0] in [0, 1]:
prod = None
if line[2].get('product_id'):
prod = product_obj.browse(cr, uid, line[2]['product_id'], context=context)
elif line[1]:
prod = line_obj.browse(cr, uid, line[1], context=context).product_id
if prod and prod.taxes_id:
line[2]['tax_id'] = [[6, 0, fiscal_obj.map_tax(cr, uid, fpos, prod.taxes_id)]]
order_line.append(line)
# link (4, ID)
# link all (6, 0, IDS)
elif line[0] in [4, 6]:
line_ids = line[0] == 4 and [line[1]] or line[2]
for line_id in line_ids:
prod = line_obj.browse(cr, uid, line_id, context=context).product_id
if prod and prod.taxes_id:
order_line.append([1, line_id, {'tax_id': [[6, 0, fiscal_obj.map_tax(cr, uid, fpos, prod.taxes_id)]]}])
else:
order_line.append([4, line_id])
else:
order_line.append(line)
return {'value': {'order_line': order_line}}
def test_procurements_done(self, cr, uid, ids, context=None):
for sale in self.browse(cr, uid, ids, context=context):
for line in sale.order_line:
if not all([x.state == 'done' for x in line.procurement_ids]):
return False
return True
def test_procurements_except(self, cr, uid, ids, context=None):
for sale in self.browse(cr, uid, ids, context=context):
for line in sale.order_line:
if any([x.state == 'cancel' for x in line.procurement_ids]):
return True
return False
# TODO add a field price_unit_uos
# - update it on change product and unit price
# - use it in report if there is a uos
class sale_order_line(osv.osv):
def need_procurement(self, cr, uid, ids, context=None):
#when sale is installed only, there is no need to create procurements, that's only
#further installed modules (sale_service, sale_stock) that will change this.
prod_obj = self.pool.get('product.product')
for line in self.browse(cr, uid, ids, context=context):
if prod_obj.need_procurement(cr, uid, [line.product_id.id], context=context):
return True
return False
def _amount_line(self, cr, uid, ids, field_name, arg, context=None):
tax_obj = self.pool.get('account.tax')
cur_obj = self.pool.get('res.currency')
res = {}
if context is None:
context = {}
for line in self.browse(cr, uid, ids, context=context):
price = line.price_unit * (1 - (line.discount or 0.0) / 100.0)
taxes = tax_obj.compute_all(cr, uid, line.tax_id, price, line.product_uom_qty, line.product_id, line.order_id.partner_id)
cur = line.order_id.pricelist_id.currency_id
res[line.id] = cur_obj.round(cr, uid, cur, taxes['total'])
return res
def _get_uom_id(self, cr, uid, *args):
try:
proxy = self.pool.get('ir.model.data')
result = proxy.get_object_reference(cr, uid, 'product', 'product_uom_unit')
return result[1]
except Exception, ex:
return False
def _fnct_line_invoiced(self, cr, uid, ids, field_name, args, context=None):
res = dict.fromkeys(ids, False)
for this in self.browse(cr, uid, ids, context=context):
res[this.id] = this.invoice_lines and \
all(iline.invoice_id.state != 'cancel' for iline in this.invoice_lines)
return res
def _order_lines_from_invoice(self, cr, uid, ids, context=None):
# direct access to the m2m table is the less convoluted way to achieve this (and is ok ACL-wise)
cr.execute("""SELECT DISTINCT sol.id FROM sale_order_invoice_rel rel JOIN
sale_order_line sol ON (sol.order_id = rel.order_id)
WHERE rel.invoice_id = ANY(%s)""", (list(ids),))
return [i[0] for i in cr.fetchall()]
def _get_price_reduce(self, cr, uid, ids, field_name, arg, context=None):
res = dict.fromkeys(ids, 0.0)
for line in self.browse(cr, uid, ids, context=context):
res[line.id] = line.price_subtotal / line.product_uom_qty
return res
_name = 'sale.order.line'
_description = 'Sales Order Line'
_columns = {
'order_id': fields.many2one('sale.order', 'Order Reference', required=True, ondelete='cascade', select=True, readonly=True, states={'draft':[('readonly',False)]}),
'name': fields.text('Description', required=True, readonly=True, states={'draft': [('readonly', False)]}),
'sequence': fields.integer('Sequence', help="Gives the sequence order when displaying a list of sales order lines."),
'product_id': fields.many2one('product.product', 'Product', domain=[('sale_ok', '=', True)], change_default=True, readonly=True, states={'draft': [('readonly', False)]}, ondelete='restrict'),
'invoice_lines': fields.many2many('account.invoice.line', 'sale_order_line_invoice_rel', 'order_line_id', 'invoice_id', 'Invoice Lines', readonly=True, copy=False),
'invoiced': fields.function(_fnct_line_invoiced, string='Invoiced', type='boolean',
store={
'account.invoice': (_order_lines_from_invoice, ['state'], 10),
'sale.order.line': (lambda self,cr,uid,ids,ctx=None: ids, ['invoice_lines'], 10)
}),
'price_unit': fields.float('Unit Price', required=True, digits_compute= dp.get_precision('Product Price'), readonly=True, states={'draft': [('readonly', False)]}),
'price_subtotal': fields.function(_amount_line, string='Subtotal', digits_compute= dp.get_precision('Account')),
'price_reduce': fields.function(_get_price_reduce, type='float', string='Price Reduce', digits_compute=dp.get_precision('Product Price')),
'tax_id': fields.many2many('account.tax', 'sale_order_tax', 'order_line_id', 'tax_id', 'Taxes', readonly=True, states={'draft': [('readonly', False)]}),
'address_allotment_id': fields.many2one('res.partner', 'Allotment Partner',help="A partner to whom the particular product needs to be allotted."),
'product_uom_qty': fields.float('Quantity', digits_compute= dp.get_precision('Product UoS'), required=True, readonly=True, states={'draft': [('readonly', False)]}),
'product_uom': fields.many2one('product.uom', 'Unit of Measure ', required=True, readonly=True, states={'draft': [('readonly', False)]}),
'product_uos_qty': fields.float('Quantity (UoS)' ,digits_compute= dp.get_precision('Product UoS'), readonly=True, states={'draft': [('readonly', False)]}),
'product_uos': fields.many2one('product.uom', 'Product UoS'),
'discount': fields.float('Discount (%)', digits_compute= dp.get_precision('Discount'), readonly=True, states={'draft': [('readonly', False)]}),
'th_weight': fields.float('Weight', readonly=True, states={'draft': [('readonly', False)]}),
'state': fields.selection(
[('cancel', 'Cancelled'),('draft', 'Draft'),('confirmed', 'Confirmed'),('exception', 'Exception'),('done', 'Done')],
'Status', required=True, readonly=True, copy=False,
help='* The \'Draft\' status is set when the related sales order in draft status. \
\n* The \'Confirmed\' status is set when the related sales order is confirmed. \
\n* The \'Exception\' status is set when the related sales order is set as exception. \
\n* The \'Done\' status is set when the sales order line has been picked. \
\n* The \'Cancelled\' status is set when a user cancel the sales order related.'),
'order_partner_id': fields.related('order_id', 'partner_id', type='many2one', relation='res.partner', store=True, string='Customer'),
'salesman_id':fields.related('order_id', 'user_id', type='many2one', relation='res.users', store=True, string='Salesperson'),
'company_id': fields.related('order_id', 'company_id', type='many2one', relation='res.company', string='Company', store=True, readonly=True),
'delay': fields.float('Delivery Lead Time', required=True, help="Number of days between the order confirmation and the shipping of the products to the customer", readonly=True, states={'draft': [('readonly', False)]}),
'procurement_ids': fields.one2many('procurement.order', 'sale_line_id', 'Procurements'),
}
_order = 'order_id desc, sequence, id'
_defaults = {
'product_uom' : _get_uom_id,
'discount': 0.0,
'product_uom_qty': 1,
'product_uos_qty': 1,
'sequence': 10,
'state': 'draft',
'price_unit': 0.0,
'delay': 0.0,
}
def _get_line_qty(self, cr, uid, line, context=None):
if line.product_uos:
return line.product_uos_qty or 0.0
return line.product_uom_qty
def _get_line_uom(self, cr, uid, line, context=None):
if line.product_uos:
return line.product_uos.id
return line.product_uom.id
def _prepare_order_line_invoice_line(self, cr, uid, line, account_id=False, context=None):
"""Prepare the dict of values to create the new invoice line for a
sales order line. This method may be overridden to implement custom
invoice generation (making sure to call super() to establish
a clean extension chain).
:param browse_record line: sale.order.line record to invoice
:param int account_id: optional ID of a G/L account to force
(this is used for returning products including service)
:return: dict of values to create() the invoice line
"""
res = {}
if not line.invoiced:
if not account_id:
if line.product_id:
account_id = line.product_id.property_account_income.id
if not account_id:
account_id = line.product_id.categ_id.property_account_income_categ.id
if not account_id:
raise osv.except_osv(_('Error!'),
_('Please define income account for this product: "%s" (id:%d).') % \
(line.product_id.name, line.product_id.id,))
else:
prop = self.pool.get('ir.property').get(cr, uid,
'property_account_income_categ', 'product.category',
context=context)
account_id = prop and prop.id or False
uosqty = self._get_line_qty(cr, uid, line, context=context)
uos_id = self._get_line_uom(cr, uid, line, context=context)
pu = 0.0
if uosqty:
pu = round(line.price_unit * line.product_uom_qty / uosqty,
self.pool.get('decimal.precision').precision_get(cr, uid, 'Product Price'))
fpos = line.order_id.fiscal_position or False
account_id = self.pool.get('account.fiscal.position').map_account(cr, uid, fpos, account_id)
if not account_id:
raise osv.except_osv(_('Error!'),
_('There is no Fiscal Position defined or Income category account defined for default properties of Product categories.'))
res = {
'name': line.name,
'sequence': line.sequence,
'origin': line.order_id.name,
'account_id': account_id,
'price_unit': pu,
'quantity': uosqty,
'discount': line.discount,
'uos_id': uos_id,
'product_id': line.product_id.id or False,
'invoice_line_tax_id': [(6, 0, [x.id for x in line.tax_id])],
'account_analytic_id': line.order_id.project_id and line.order_id.project_id.id or False,
}
return res
def invoice_line_create(self, cr, uid, ids, context=None):
if context is None:
context = {}
create_ids = []
sales = set()
for line in self.browse(cr, uid, ids, context=context):
vals = self._prepare_order_line_invoice_line(cr, uid, line, False, context)
if vals:
inv_id = self.pool.get('account.invoice.line').create(cr, uid, vals, context=context)
self.write(cr, uid, [line.id], {'invoice_lines': [(4, inv_id)]}, context=context)
sales.add(line.order_id.id)
create_ids.append(inv_id)
# Trigger workflow events
for sale_id in sales:
workflow.trg_write(uid, 'sale.order', sale_id, cr)
return create_ids
def button_cancel(self, cr, uid, ids, context=None):
for line in self.browse(cr, uid, ids, context=context):
if line.invoiced:
raise osv.except_osv(_('Invalid Action!'), _('You cannot cancel a sales order line that has already been invoiced.'))
return self.write(cr, uid, ids, {'state': 'cancel'})
def button_confirm(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state': 'confirmed'})
def button_done(self, cr, uid, ids, context=None):
res = self.write(cr, uid, ids, {'state': 'done'})
for line in self.browse(cr, uid, ids, context=context):
workflow.trg_write(uid, 'sale.order', line.order_id.id, cr)
return res
def uos_change(self, cr, uid, ids, product_uos, product_uos_qty=0, product_id=None):
product_obj = self.pool.get('product.product')
if not product_id:
return {'value': {'product_uom': product_uos,
'product_uom_qty': product_uos_qty}, 'domain': {}}
product = product_obj.browse(cr, uid, product_id)
value = {
'product_uom': product.uom_id.id,
}
# FIXME must depend on uos/uom of the product and not only of the coeff.
try:
value.update({
'product_uom_qty': product_uos_qty / product.uos_coeff,
'th_weight': product_uos_qty / product.uos_coeff * product.weight
})
except ZeroDivisionError:
pass
return {'value': value}
def create(self, cr, uid, values, context=None):
if values.get('order_id') and values.get('product_id') and any(f not in values for f in ['name', 'price_unit', 'type', 'product_uom_qty', 'product_uom']):
order = self.pool['sale.order'].read(cr, uid, values['order_id'], ['pricelist_id', 'partner_id', 'date_order', 'fiscal_position'], context=context)
defaults = self.product_id_change(cr, uid, [], order['pricelist_id'][0], values['product_id'],
qty=float(values.get('product_uom_qty', False)),
uom=values.get('product_uom', False),
qty_uos=float(values.get('product_uos_qty', False)),
uos=values.get('product_uos', False),
name=values.get('name', False),
partner_id=order['partner_id'][0],
date_order=order['date_order'],
fiscal_position=order['fiscal_position'][0] if order['fiscal_position'] else False,
flag=False, # Force name update
context=context
)['value']
if defaults.get('tax_id'):
defaults['tax_id'] = [[6, 0, defaults['tax_id']]]
values = dict(defaults, **values)
return super(sale_order_line, self).create(cr, uid, values, context=context)
def product_id_change(self, cr, uid, ids, pricelist, product, qty=0,
uom=False, qty_uos=0, uos=False, name='', partner_id=False,
lang=False, update_tax=True, date_order=False, packaging=False, fiscal_position=False, flag=False, context=None):
context = context or {}
lang = lang or context.get('lang', False)
if not partner_id:
raise osv.except_osv(_('No Customer Defined!'), _('Before choosing a product,\n select a customer in the sales form.'))
warning = False
product_uom_obj = self.pool.get('product.uom')
partner_obj = self.pool.get('res.partner')
product_obj = self.pool.get('product.product')
context = {'lang': lang, 'partner_id': partner_id}
partner = partner_obj.browse(cr, uid, partner_id)
lang = partner.lang
context_partner = {'lang': lang, 'partner_id': partner_id}
if not product:
return {'value': {'th_weight': 0,
'product_uos_qty': qty}, 'domain': {'product_uom': [],
'product_uos': []}}
if not date_order:
date_order = time.strftime(DEFAULT_SERVER_DATE_FORMAT)
result = {}
warning_msgs = ''
product_obj = product_obj.browse(cr, uid, product, context=context_partner)
uom2 = False
if uom:
uom2 = product_uom_obj.browse(cr, uid, uom)
if product_obj.uom_id.category_id.id != uom2.category_id.id:
uom = False
if uos:
if product_obj.uos_id:
uos2 = product_uom_obj.browse(cr, uid, uos)
if product_obj.uos_id.category_id.id != uos2.category_id.id:
uos = False
else:
uos = False
fpos = False
if not fiscal_position:
fpos = partner.property_account_position or False
else:
fpos = self.pool.get('account.fiscal.position').browse(cr, uid, fiscal_position)
if update_tax: #The quantity only have changed
result['tax_id'] = self.pool.get('account.fiscal.position').map_tax(cr, uid, fpos, product_obj.taxes_id)
if not flag:
result['name'] = self.pool.get('product.product').name_get(cr, uid, [product_obj.id], context=context_partner)[0][1]
if product_obj.description_sale:
result['name'] += '\n'+product_obj.description_sale
domain = {}
if (not uom) and (not uos):
result['product_uom'] = product_obj.uom_id.id
if product_obj.uos_id:
result['product_uos'] = product_obj.uos_id.id
result['product_uos_qty'] = qty * product_obj.uos_coeff
uos_category_id = product_obj.uos_id.category_id.id
else:
result['product_uos'] = False
result['product_uos_qty'] = qty
uos_category_id = False
result['th_weight'] = qty * product_obj.weight
domain = {'product_uom':
[('category_id', '=', product_obj.uom_id.category_id.id)],
'product_uos':
[('category_id', '=', uos_category_id)]}
elif uos and not uom: # only happens if uom is False
result['product_uom'] = product_obj.uom_id and product_obj.uom_id.id
result['product_uom_qty'] = qty_uos / product_obj.uos_coeff
result['th_weight'] = result['product_uom_qty'] * product_obj.weight
elif uom: # whether uos is set or not
default_uom = product_obj.uom_id and product_obj.uom_id.id
q = product_uom_obj._compute_qty(cr, uid, uom, qty, default_uom)
if product_obj.uos_id:
result['product_uos'] = product_obj.uos_id.id
result['product_uos_qty'] = qty * product_obj.uos_coeff
else:
result['product_uos'] = False
result['product_uos_qty'] = qty
result['th_weight'] = q * product_obj.weight # Round the quantity up
if not uom2:
uom2 = product_obj.uom_id
# get unit price
if not pricelist:
warn_msg = _('You have to select a pricelist or a customer in the sales form !\n'
'Please set one before choosing a product.')
warning_msgs += _("No Pricelist ! : ") + warn_msg +"\n\n"
else:
price = self.pool.get('product.pricelist').price_get(cr, uid, [pricelist],
product, qty or 1.0, partner_id, {
'uom': uom or result.get('product_uom'),
'date': date_order,
})[pricelist]
if price is False:
warn_msg = _("Cannot find a pricelist line matching this product and quantity.\n"
"You have to change either the product, the quantity or the pricelist.")
warning_msgs += _("No valid pricelist line found ! :") + warn_msg +"\n\n"
else:
result.update({'price_unit': price})
if warning_msgs:
warning = {
'title': _('Configuration Error!'),
'message' : warning_msgs
}
return {'value': result, 'domain': domain, 'warning': warning}
def product_uom_change(self, cursor, user, ids, pricelist, product, qty=0,
uom=False, qty_uos=0, uos=False, name='', partner_id=False,
lang=False, update_tax=True, date_order=False, context=None):
context = context or {}
lang = lang or ('lang' in context and context['lang'])
if not uom:
return {'value': {'price_unit': 0.0, 'product_uom' : uom or False}}
return self.product_id_change(cursor, user, ids, pricelist, product,
qty=qty, uom=uom, qty_uos=qty_uos, uos=uos, name=name,
partner_id=partner_id, lang=lang, update_tax=update_tax,
date_order=date_order, context=context)
def unlink(self, cr, uid, ids, context=None):
if context is None:
context = {}
"""Allows to delete sales order lines in draft,cancel states"""
for rec in self.browse(cr, uid, ids, context=context):
if rec.state not in ['draft', 'cancel']:
raise osv.except_osv(_('Invalid Action!'), _('Cannot delete a sales order line which is in state \'%s\'.') %(rec.state,))
return super(sale_order_line, self).unlink(cr, uid, ids, context=context)
class mail_compose_message(osv.Model):
_inherit = 'mail.compose.message'
def send_mail(self, cr, uid, ids, context=None):
context = context or {}
if context.get('default_model') == 'sale.order' and context.get('default_res_id') and context.get('mark_so_as_sent'):
context = dict(context, mail_post_autofollow=True)
self.pool.get('sale.order').signal_workflow(cr, uid, [context['default_res_id']], 'quotation_sent')
return super(mail_compose_message, self).send_mail(cr, uid, ids, context=context)
class account_invoice(osv.Model):
_inherit = 'account.invoice'
def _get_default_section_id(self, cr, uid, context=None):
""" Gives default section by checking if present in the context """
section_id = self._resolve_section_id_from_context(cr, uid, context=context) or False
if not section_id:
section_id = self.pool.get('res.users').browse(cr, uid, uid, context).default_section_id.id or False
return section_id
def _resolve_section_id_from_context(self, cr, uid, context=None):
""" Returns ID of section based on the value of 'section_id'
context key, or None if it cannot be resolved to a single
Sales Team.
"""
if context is None:
context = {}
if type(context.get('default_section_id')) in (int, long):
return context.get('default_section_id')
if isinstance(context.get('default_section_id'), basestring):
section_ids = self.pool.get('crm.case.section').name_search(cr, uid, name=context['default_section_id'], context=context)
if len(section_ids) == 1:
return int(section_ids[0][0])
return None
_columns = {
'section_id': fields.many2one('crm.case.section', 'Sales Team'),
}
_defaults = {
'section_id': lambda self, cr, uid, c=None: self._get_default_section_id(cr, uid, context=c)
}
def confirm_paid(self, cr, uid, ids, context=None):
sale_order_obj = self.pool.get('sale.order')
res = super(account_invoice, self).confirm_paid(cr, uid, ids, context=context)
so_ids = sale_order_obj.search(cr, uid, [('invoice_ids', 'in', ids)], context=context)
for so_id in so_ids:
sale_order_obj.message_post(cr, uid, so_id, body=_("Invoice paid"), context=context)
return res
def unlink(self, cr, uid, ids, context=None):
""" Overwrite unlink method of account invoice to send a trigger to the sale workflow upon invoice deletion """
invoice_ids = self.search(cr, uid, [('id', 'in', ids), ('state', 'in', ['draft', 'cancel'])], context=context)
#if we can't cancel all invoices, do nothing
if len(invoice_ids) == len(ids):
#Cancel invoice(s) first before deleting them so that if any sale order is associated with them
#it will trigger the workflow to put the sale order in an 'invoice exception' state
for id in ids:
workflow.trg_validate(uid, 'account.invoice', id, 'invoice_cancel', cr)
return super(account_invoice, self).unlink(cr, uid, ids, context=context)
class procurement_order(osv.osv):
_inherit = 'procurement.order'
_columns = {
'sale_line_id': fields.many2one('sale.order.line', string='Sale Order Line'),
}
def write(self, cr, uid, ids, vals, context=None):
if isinstance(ids, (int, long)):
ids = [ids]
res = super(procurement_order, self).write(cr, uid, ids, vals, context=context)
from openerp import workflow
if vals.get('state') in ['done', 'cancel', 'exception']:
for proc in self.browse(cr, uid, ids, context=context):
if proc.sale_line_id and proc.sale_line_id.order_id:
order_id = proc.sale_line_id.order_id.id
if self.pool.get('sale.order').test_procurements_done(cr, uid, [order_id], context=context):
workflow.trg_validate(uid, 'sale.order', order_id, 'ship_end', cr)
if self.pool.get('sale.order').test_procurements_except(cr, uid, [order_id], context=context):
workflow.trg_validate(uid, 'sale.order', order_id, 'ship_except', cr)
return res
class product_product(osv.Model):
_inherit = 'product.product'
def _sales_count(self, cr, uid, ids, field_name, arg, context=None):
r = dict.fromkeys(ids, 0)
domain = [
('state', 'in', ['waiting_date','progress','manual', 'shipping_except', 'invoice_except', 'done']),
('product_id', 'in', ids),
]
for group in self.pool['sale.report'].read_group(cr, uid, domain, ['product_id','product_uom_qty'], ['product_id'], context=context):
r[group['product_id'][0]] = group['product_uom_qty']
return r
def action_view_sales(self, cr, uid, ids, context=None):
result = self.pool['ir.model.data'].xmlid_to_res_id(cr, uid, 'sale.action_order_line_product_tree', raise_if_not_found=True)
result = self.pool['ir.actions.act_window'].read(cr, uid, [result], context=context)[0]
result['domain'] = "[('product_id','in',[" + ','.join(map(str, ids)) + "])]"
return result
_columns = {
'sales_count': fields.function(_sales_count, string='# Sales', type='integer'),
}
class product_template(osv.Model):
_inherit = 'product.template'
def _sales_count(self, cr, uid, ids, field_name, arg, context=None):
res = dict.fromkeys(ids, 0)
for template in self.browse(cr, uid, ids, context=context):
res[template.id] = sum([p.sales_count for p in template.product_variant_ids])
return res
def action_view_sales(self, cr, uid, ids, context=None):
act_obj = self.pool.get('ir.actions.act_window')
mod_obj = self.pool.get('ir.model.data')
product_ids = []
for template in self.browse(cr, uid, ids, context=context):
product_ids += [x.id for x in template.product_variant_ids]
result = mod_obj.xmlid_to_res_id(cr, uid, 'sale.action_order_line_product_tree',raise_if_not_found=True)
result = act_obj.read(cr, uid, [result], context=context)[0]
result['domain'] = "[('product_id','in',[" + ','.join(map(str, product_ids)) + "])]"
return result
_columns = {
'sales_count': fields.function(_sales_count, string='# Sales', type='integer'),
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 5,077,323,756,699,645,000 | 52.384207 | 318 | 0.579615 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.