metadata
dict
text
stringlengths
60
3.49M
{ "source": "JIC-CSB/jicbioimage.core", "score": 3 }
#### File: core/util/array.py ```python import sys from functools import wraps import random import numpy as np from jicbioimage.core.util.color import pretty_color_palette, unique_color_palette def normalise(array): """Return array normalised such that all values are between 0 and 1. If all the values in the array are the same the function will return: - np.zeros(array.shape, dtype=np.float) if the value is 0 or less - np.ones(array.shape, dtype=np.float) if the value is greater than 0 :param array: numpy.array :returns: numpy.array.astype(numpy.float) """ min_val = array.min() max_val = array.max() array_range = max_val - min_val if array_range == 0: # min_val == max_val if min_val > 0: return np.ones(array.shape, dtype=np.float) return np.zeros(array.shape, dtype=np.float) return (array.astype(np.float) - min_val) / array_range def reduce_stack(array3D, z_function): """Return 2D array projection of the input 3D array. The input function is applied to each line of an input x, y value. :param array3D: 3D numpy.array :param z_function: function to use for the projection (e.g. :func:`max`) """ xmax, ymax, _ = array3D.shape projection = np.zeros((xmax, ymax), dtype=array3D.dtype) for x in range(xmax): for y in range(ymax): projection[x, y] = z_function(array3D[x, y, :]) return projection def map_stack(array3D, z_function): """Return 3D array where each z-slice has had the function applied to it. :param array3D: 3D numpy.array :param z_function: function to be mapped to each z-slice """ _, _, zdim = array3D.shape return np.dstack([z_function(array3D[:, :, z]) for z in range(zdim)]) def check_dtype(array, allowed): """Raises TypeError if the array is not of an allowed dtype. :param array: array whose dtype is to be checked :param allowed: instance or list of allowed dtypes :raises: TypeError """ if not hasattr(allowed, "__iter__"): allowed = [allowed, ] if array.dtype not in allowed: msg = "Invalid dtype {}. Allowed dtype(s): {}" raise(TypeError(msg.format(array.dtype, allowed))) def dtype_contract(input_dtype=None, output_dtype=None): """Function decorator for specifying input and/or output array dtypes. :param input_dtype: dtype of input array :param output_dtype: dtype of output array :returns: function decorator """ def wrap(function): @wraps(function) def wrapped_function(*args, **kwargs): if input_dtype is not None: check_dtype(args[0], input_dtype) array = function(*args, **kwargs) if output_dtype is not None: check_dtype(array, output_dtype) return array return wrapped_function return wrap def color_array(array, color_dict): """Return RGB color array. Assigning a unique RGB color value to each unique element of the input array and return an array of shape (array.shape, 3). :param array: input numpy.array :param color_dict: dictionary with keys/values corresponding to identifiers and RGB tuples respectively """ output_array = np.zeros(array.shape + (3,), np.uint8) unique_identifiers = set(np.unique(array)) for identifier in unique_identifiers: output_array[np.where(array == identifier)] = color_dict[identifier] return output_array def pretty_color_array(array, keep_zero_black=True): """Return a RGB pretty color array. Assigning a pretty RGB color value to each unique element of the input array and return an array of shape (array.shape, 3). :param array: input numpy.array :param keep_zero_black: whether or not the background should be black :returns: numpy.array """ unique_identifiers = set(np.unique(array)) color_dict = pretty_color_palette(unique_identifiers, keep_zero_black) return color_array(array, color_dict) def unique_color_array(array): """Return a RGB unique color array. Assigning a unique RGB color value to each unique element of the input array and return an array of shape (array.shape, 3). :param array: input numpy.array :returns: numpy.array """ unique_identifiers = set(np.unique(array)) color_dict = unique_color_palette(unique_identifiers) return color_array(array, color_dict) ``` #### File: jicbioimage.core/tests/Image_unit_tests.py ```python import io import unittest import numpy as np import PIL try: from mock import Mock, patch except ImportError: from unittest.mock import Mock, patch class BaseImageTests(unittest.TestCase): def test_repr(self): from jicbioimage.core.image import _BaseImage im = _BaseImage((50,50)) pos = hex(id(im)) expected = "<_BaseImage object at {}, dtype=float64>".format(pos) self.assertEqual(repr(im), expected) im = _BaseImage((50,50), dtype=bool) pos = hex(id(im)) expected = "<_BaseImage object at {}, dtype=bool>".format(pos) self.assertEqual(repr(im), expected) class ImageTests(unittest.TestCase): def test_repr(self): from jicbioimage.core.image import Image im = Image((50,50)) pos = hex(id(im)) expected = "<Image object at {}, dtype=uint8>".format(pos) self.assertEqual(repr(im), expected) def test_import_Image_class(self): # This throws an error if the class cannot be imported. from jicbioimage.core.image import Image def test_instantiation_from_shape(self): from jicbioimage.core.image import Image image = Image((50, 50)) self.assertTrue(isinstance(image, np.ndarray)) self.assertEqual(image.shape, (50, 50)) self.assertEqual(len(image.history), 0) self.assertEqual(image.history.creation, 'Instantiated Image from shape (50, 50)') def test_instantiation_from_shape_no_history(self): from jicbioimage.core.image import Image image = Image((50, 50), log_in_history=False) self.assertEqual(len(image.history), 0) def test_rgb_instantiation_from_shape(self): from jicbioimage.core.image import Image image = Image((50, 50, 3)) self.assertEqual(image.shape, (50, 50, 3)) def test_default_type(self): from jicbioimage.core.image import Image image = Image((50, 50)) self.assertEqual(image.dtype, np.uint8, 'Image type not np.uint8 but {}'.format(image.dtype)) def test_default_name(self): from jicbioimage.core.image import Image image = Image((50, 50)) self.assertTrue(image.name is None) def test_instantiation_from_shape_with_name(self): from jicbioimage.core.image import Image image = Image((50, 50), name='test') self.assertEqual(image.name, 'test') self.assertEqual(len(image.history), 0) self.assertEqual(image.history.creation, 'Instantiated Image from shape (50, 50) as test') def test_repr_png_callable(self): from jicbioimage.core.image import Image image = Image((50, 50)) self.assertTrue(callable(image._repr_png_)) def test_png_attr(self): from jicbioimage.core.image import Image image = Image((50, 50)) self.assertTrue(hasattr(image, 'png')) def test_png(self): from jicbioimage.core.image import Image image = Image((600, 500), dtype=np.uint64) png = image.png() ar = np.asarray(PIL.Image.open(io.BytesIO(png))) self.assertEqual(ar.shape[0], 600) self.assertEqual(ar.shape[1], 500) def test_png_converts_to_uint8(self): from jicbioimage.core.image import Image image = Image((50, 50), dtype=np.uint64) # The below raises error if the image is not converted to uint8 # before returning the png string. png = image.png def test_png_with_width(self): from jicbioimage.core.image import Image image = Image((600, 800), dtype=np.uint64) thumbnail = image.png(width=300) ar = np.asarray(PIL.Image.open(io.BytesIO(thumbnail))) self.assertEqual(ar.shape[0], 300) self.assertEqual(ar.shape[1], 400) def test_rgb_thumbnail(self): from jicbioimage.core.image import Image image = Image((600, 800, 3), dtype=np.uint64) thumbnail = image.png(width=300) ar = np.asarray(PIL.Image.open(io.BytesIO(thumbnail))) self.assertEqual(ar.shape[0], 300) self.assertEqual(ar.shape[1], 400) self.assertEqual(ar.shape[2], 3) def test_from_array(self): from jicbioimage.core.image import Image ar = np.zeros((50,50), dtype=np.uint8) im = Image.from_array(ar) self.assertTrue(isinstance(im, Image)) self.assertEqual(len(im.history), 0) self.assertEqual(im.history.creation, 'Created Image from array') def test_from_array_with_name(self): from jicbioimage.core.image import Image ar = np.zeros((50,50), dtype=np.uint8) im = Image.from_array(ar, name='Test1') self.assertEqual(len(im.history), 0) self.assertEqual(im.history.creation, 'Created Image from array as Test1') def test_from_array_no_history(self): from jicbioimage.core.image import Image ar = np.zeros((50,50), dtype=np.uint8) im = Image.from_array(ar, log_in_history=False) self.assertEqual(len(im.history), 0) class sorted_listdir_test(unittest.TestCase): @patch('os.listdir') def test_sorted_lisdir(self, patch_listdir): patch_listdir.return_value = ["z20.png", "z3.png", "z1.png"] from jicbioimage.core.image import _sorted_listdir l = _sorted_listdir(".") patch_listdir.assert_called_with(".") self.assertEqual(l, ["z1.png", "z3.png", "z20.png"]) if __name__ == '__main__': unittest.main() ``` #### File: jicbioimage.core/tests/io_functional_tests.py ```python import unittest import os import os.path import shutil HERE = os.path.dirname(__file__) DATA_DIR = os.path.join(HERE, 'data') TMP_DIR = os.path.join(HERE, 'tmp') class MD5HashFunctionalTests(unittest.TestCase): def setUp(self): if not os.path.isdir(TMP_DIR): os.mkdir(TMP_DIR) def tearDown(self): shutil.rmtree(TMP_DIR) def test_md5_from_file(self): from jicbioimage.core.io import _md5_hexdigest_from_file input_file = os.path.join(DATA_DIR, "tjelvar.png") self.assertEqual(_md5_hexdigest_from_file(input_file), "894c9860e11667d29cdbf034e58ee75f") def test_md5_from_file_with_smaller_blocksize(self): from jicbioimage.core.io import _md5_hexdigest_from_file input_file = os.path.join(DATA_DIR, "tjelvar.png") self.assertEqual(_md5_hexdigest_from_file(input_file, 4096), "894c9860e11667d29cdbf034e58ee75f") ``` #### File: jicbioimage.core/tests/transform_functional_tests.py ```python import unittest import os import os.path import shutil import numpy as np HERE = os.path.dirname(__file__) DATA_DIR = os.path.join(HERE, 'data') TMP_DIR = os.path.join(HERE, 'tmp') class TransformationUserStory(unittest.TestCase): def setUp(self): from jicbioimage.core.io import AutoName AutoName.count = 0 if not os.path.isdir(TMP_DIR): os.mkdir(TMP_DIR) def tearDown(self): from jicbioimage.core.io import AutoName AutoName.count = 0 shutil.rmtree(TMP_DIR) def test_creating_transformations_from_scratch(self): # What if the default names of images was just the order in which they # were created? # Or perhaps the order + the function name, e.g. # 1_gaussian.png # 2_sobel.png # 3_gaussian.png # The order could be tracked in a class variable in an AutoName # object. The AutoName object could also store the output directory # as a class variable. from jicbioimage.core.image import Image from jicbioimage.core.transform import transformation from jicbioimage.core.io import AutoName AutoName.directory = TMP_DIR @transformation def identity(image): return image image = Image.from_file(os.path.join(DATA_DIR, 'tjelvar.png')) image = identity(image) self.assertEqual(len(image.history), 1, image.history) self.assertEqual(str(image.history[-1]), '<History.Event(identity(image))>') created_fpath = os.path.join(TMP_DIR, '1_identity.png') self.assertTrue(os.path.isfile(created_fpath), 'No such file: {}'.format(created_fpath)) def test_transform_can_take_named_argument(self): from jicbioimage.core.image import Image from jicbioimage.core.transform import transformation from jicbioimage.core.io import AutoName AutoName.directory = TMP_DIR @transformation def identity(image): return image image = Image((50, 50)) # The command below should not raise an IndexError. image = identity(image=image) def test_BZ2(self): from skimage.filters import gaussian_filter from jicbioimage.core.image import Image from jicbioimage.core.transform import transformation from jicbioimage.core.io import AutoName AutoName.directory = TMP_DIR @transformation def blur(image): new_image = np.zeros(image.shape, dtype=np.float) sigma = 2 if len(image.shape) == 3: # We have an RGB image. for i in range(image.shape[2]): new_image[:][:][i] = gaussian_filter(image[:][:][i], sigma) else: new_image = gaussian_filter(image, sigma) return new_image image = Image.from_file(os.path.join(DATA_DIR, 'tjelvar.png')) # The creation of a list is not an Event in the history list, # it is a separate attribute. self.assertEqual(len(image.history), 0) self.assertTrue(image.history.creation.startswith('Created Image from')) image = blur(image) # Image should have one event in the history now. self.assertEqual(len(image.history), 1) self.assertTrue(image.history.creation.startswith('Created Image from')) self.assertEqual(str(image.history[0]), '<History.Event(blur(image))>') # Image returned is of jicbioimage.core.image.Image type. self.assertTrue(isinstance(image, Image)) created_fpath = os.path.join(TMP_DIR, '1_blur.png') self.assertTrue(os.path.isfile(created_fpath), 'No such file: {}'.format(created_fpath)) def test_stack_to_image_transform(self): from jicbioimage.core.image import Image from jicbioimage.core.io import DataManager, FileBackend backend = FileBackend(TMP_DIR) data_manager = DataManager(backend) from jicbioimage.core.transform import transformation from jicbioimage.core.io import AutoName AutoName.directory = TMP_DIR @transformation def average_projection(stack): xmax, ymax, zmax = stack.shape projection = np.sum(stack, axis=2, dtype=np.uint8) // zmax return Image.from_array(projection) data_manager.load(os.path.join(DATA_DIR, 'z-series.ome.tif')) microscopy_collection = data_manager[0] stack = microscopy_collection.zstack() image = average_projection(stack) self.assertTrue(isinstance(image, Image)) def test_auto_safe_dtype(self): # AutoSave.auto_safe_type is True by default from jicbioimage.core.transform import transformation from jicbioimage.core.io import AutoName import numpy as np AutoName.directory = TMP_DIR def some_transform(image): return image decorated = transformation(some_transform) im = np.zeros((50, 50), dtype=np.uint64) decorated(im) created_fpath = os.path.join(TMP_DIR, '1_some_transform.png') self.assertTrue(os.path.isfile(created_fpath), 'No such file: {}'.format(created_fpath)) def test_transform_on_Image3D(self): from jicbioimage.core.image import Image3D from jicbioimage.core.transform import transformation from jicbioimage.core.io import AutoName AutoName.directory = TMP_DIR @transformation def some_transform(image): return image stack = Image3D((50, 50, 11)) stack = some_transform(stack) self.assertTrue(isinstance(stack, Image3D)) output_dir = os.path.join(TMP_DIR, "1_some_transform.stack") self.assertTrue(os.path.isdir(output_dir)) expected = ["z00.png", "z01.png", "z02.png", "z03.png", "z04.png", "z05.png", "z06.png", "z07.png", "z08.png", "z09.png", "z10.png"] actual = os.listdir(output_dir) for f in expected: self.assertTrue(f in actual) # Make sure that previous data is removed prior to writing. AutoName.count = 0 stack = Image3D((50, 50, 3)) stack = some_transform(stack) actual = os.listdir(output_dir) for f in expected: # These are the old files. self.assertFalse(f in actual) expected = ["z0.png", "z1.png", "z2.png"] # These are the new files. for f in expected: self.assertTrue(f in actual) def test_transform_Image3D_to_Image(self): from jicbioimage.core.image import Image, Image3D from jicbioimage.core.transform import transformation from jicbioimage.core.io import AutoName AutoName.directory = TMP_DIR @transformation def first_z(image): return image[:,:,0].view(Image) stack = Image3D((50, 50, 5)) image = first_z(stack) self.assertTrue(isinstance(image, Image)) output_name = os.path.join(TMP_DIR, "1_first_z.png") self.assertTrue(os.path.isfile(output_name)) output_dir = os.path.join(TMP_DIR, "1_first_z.stack") self.assertFalse(os.path.isdir(output_dir)) def test_transform_Image_to_Image3D(self): from jicbioimage.core.image import Image, Image3D from jicbioimage.core.transform import transformation from jicbioimage.core.io import AutoName AutoName.directory = TMP_DIR @transformation def add_height(image): stack = np.dstack([image, image, image]) return Image3D.from_array(stack) im = Image((50, 50)) stack = add_height(im) self.assertTrue(isinstance(stack, Image3D)) output_name = os.path.join(TMP_DIR, "1_add_height.png") self.assertFalse(os.path.isfile(output_name)) output_dir = os.path.join(TMP_DIR, "1_add_height.stack") self.assertTrue(os.path.isdir(output_dir)) if __name__ == '__main__': unittest.main() ```
{ "source": "JIC-CSB/jicbioimage.segment", "score": 3 }
#### File: jicbioimage.segment/tests/Region_unit_tests.py ```python import unittest import numpy as np class RegionTestCase(unittest.TestCase): def test_force_dtype(self): from jicbioimage.segment import Region test_array = np.array([[0, 1, 1], [0, 0, 1], [0, 0, 0]]) region = Region(test_array) self.assertEqual(region.dtype, bool) def test_region(self): from jicbioimage.segment import Region test_array = np.array([[0, 1, 1], [0, 0, 1], [0, 0, 0]]) region = Region(test_array) self.assertFalse(region[0, 0]) self.assertTrue(region[0, 1]) self.assertEqual(region.shape, (3, 3)) def test_region_select_from_array(self): from jicbioimage.segment import Region id_array = np.array([[0, 0, 0], [1, 1, 1], [2, 2, 2]]) region_1 = Region.select_from_array(id_array, 1) self.assertFalse(region_1[0, 0]) self.assertTrue(region_1[1, 0]) self.assertFalse(region_1[2, 0]) self.assertEqual(region_1.area, 3) def test_region_area(self): from jicbioimage.segment import Region test_array = np.array([[0, 1, 1], [0, 0, 1], [0, 0, 0]]) region = Region(test_array) self.assertEqual(region.area, 3) def test_region_perimeter(self): from jicbioimage.segment import Region test_array = np.array([[0, 0, 0, 0, 0], [0, 1, 1, 1, 0], [0, 1, 1, 1, 0], [0, 1, 1, 1, 0], [0, 0, 0, 0, 0]]) region = Region(test_array) self.assertEqual(region.perimeter, 8) def test_region_border(self): from jicbioimage.segment import Region test_array = np.array([[0, 0, 0, 0, 0], [0, 1, 1, 1, 0], [0, 1, 1, 1, 0], [0, 1, 1, 1, 0], [0, 0, 0, 0, 0]]) region = Region(test_array) border_array = np.array([[0, 0, 0, 0, 0], [0, 1, 1, 1, 0], [0, 1, 0, 1, 0], [0, 1, 1, 1, 0], [0, 0, 0, 0, 0]]) border_region = Region(border_array) self.assertTrue(np.array_equal(region.border, border_region)) def test_region_inner(self): from jicbioimage.segment import Region test_array = np.array([[0, 0, 0, 0, 0], [0, 1, 1, 1, 0], [0, 1, 1, 1, 0], [0, 1, 1, 1, 0], [0, 0, 0, 0, 0]]) region = Region(test_array) inner_array = np.array([[0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 1, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0]]) inner_region = Region(inner_array) self.assertTrue(np.array_equal(region.inner, inner_region)) def test_force_binary(self): from jicbioimage.segment import Region test_array = np.array([[0, 1, 2], [0, 0, 1], [0, 0, 0]]) binary = np.array(test_array, dtype=bool) region = Region(test_array) self.assertTrue(np.array_equal(region, binary)) def test_region_convex_hull(self): from jicbioimage.segment import Region test_array = np.array([[0, 0, 0, 0, 0], [0, 1, 1, 1, 0], [0, 1, 0, 0, 0], [0, 1, 0, 0, 0], [0, 0, 0, 0, 0]]) region = Region(test_array) convex_hull_array = np.array([[0, 0, 0, 0, 0], [0, 1, 1, 1, 0], [0, 1, 1, 0, 0], [0, 1, 0, 0, 0], [0, 0, 0, 0, 0]], dtype=bool) self.assertTrue(np.array_equal(region.convex_hull, convex_hull_array)) def test_index_arrays(self): from jicbioimage.segment import Region test_array = np.array([[0, 1, 1], [0, 0, 1], [0, 0, 0]]) region = Region(test_array) x_array, y_array = region.index_arrays self.assertTrue(np.array_equal(x_array, np.array([0, 0, 1]))) self.assertTrue(np.array_equal(y_array, np.array([1, 2, 2]))) def test_points(self): from jicbioimage.segment import Region test_array = np.array([[0, 1, 1], [0, 0, 1], [0, 0, 0]]) region = Region(test_array) x_array, y_array = region.index_arrays self.assertEqual(region.points, [(0, 1), (0, 2), (1, 2)]) def test_dilate(self): from jicbioimage.segment import Region test_array = np.array([[0, 0, 0, 0, 0], [0, 0, 1, 1, 0], [0, 1, 1, 0, 0], [0, 1, 0, 0, 0], [0, 0, 0, 0, 0]]) region = Region(test_array) dilate_array = np.array([[0, 0, 1, 1, 0], [0, 1, 1, 1, 1], [1, 1, 1, 1, 0], [1, 1, 1, 0, 0], [0, 1, 0, 0, 0]], dtype=bool) self.assertTrue(np.array_equal(region.dilate(), dilate_array)) def test_repr(self): from jicbioimage.segment import Region test_array = np.array([[0, 1, 1], [0, 0, 1], [0, 0, 0]]) region = Region(test_array) self.assertEqual(repr(region), repr(region)) def test_str(self): from jicbioimage.segment import Region test_array = np.array([[0, 1, 1], [0, 0, 1], [0, 0, 0]]) region = Region(test_array) self.assertEqual(str(region), str(region)) def test_centroid(self): from jicbioimage.segment import Region test_array = np.array([[0, 1, 1, 1, 0], [0, 1, 1, 1, 0], [0, 1, 1, 1, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0]]) region = Region(test_array) c = region.centroid self.assertEqual(c, (1., 2.)) def test_centroid_with_fraction(self): from jicbioimage.segment import Region test_array = np.array([[0, 1, 1, 0, 0], [0, 1, 1, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0], [0, 0, 0, 0, 0]]) region = Region(test_array) c = region.centroid self.assertEqual(c, (0.5, 1.5)) ```
{ "source": "JIC-CSB/jicgeometry", "score": 4 }
#### File: jicgeometry/jicgeometry/__init__.py ```python import math __version__ = "0.6.0" class Point2D(object): """Class representing a point in 2D space.""" def __init__(self, a1, a2=None): if a2 is None: # We assume that we have given a sequence with x, y coordinates. self.x, self.y = a1 else: self.x = a1 self.y = a2 self._set_types() def _set_types(self): """Make sure that x, y have consistent types and set dtype.""" # If we given something that is not an int or a float we raise # a RuntimeError as we do not want to have to guess if the given # input should be interpreted as an int or a float, for example the # interpretation of the string "1" vs the interpretation of the string # "1.0". for c in (self.x, self.y): if not (isinstance(c, int) or isinstance(c, float)): raise(RuntimeError('x, y coords should be int or float')) if isinstance(self.x, int) and isinstance(self.y, int): self._dtype = "int" else: # At least one value is a float so promote both to float. self.x = float(self.x) self.y = float(self.y) self._dtype = "float" @property def dtype(self): """Return the type of the x, y coordinates as a string.""" return self._dtype @property def magnitude(self): """Return the magnitude when treating the point as a vector.""" return math.sqrt(self.x**2 + self.y**2) @property def unit_vector(self): """Return the unit vector.""" return Point2D(self.x / self.magnitude, self.y / self.magnitude) def distance(self, other): """Return distance to the other point.""" tmp = self - other return tmp.magnitude def __repr__(self): s = "<Point2D(x={}, y={}, dtype={})>" if self.dtype == "float": s = "<Point2D(x={:.2f}, y={:.2f}, dtype={})>" return s.format(self.x, self.y, self.dtype) def __eq__(self, other): if self.dtype != other.dtype: return False return self.x == other.x and self.y == other.y def __add__(self, other): return Point2D(self.x + other.x, self.y + other.y) def __sub__(self, other): return Point2D(self.x - other.x, self.y - other.y) def __mul__(self, other): return Point2D(self.x * other, self.y * other) def __div__(self, other): return self * (1/float(other)) def __truediv__(self, other): return self.__div__(other) def __len__(self): return 2 def __getitem__(self, key): if key == 0: return self.x elif key == 1: return self.y else: raise(IndexError()) def __iter__(self): return iter([self.x, self.y]) def astype(self, dtype): """Return a point of the specified dtype.""" if dtype == "int": return Point2D(int(round(self.x, 0)), int(round(self.y, 0))) elif dtype == "float": return Point2D(float(self.x), float(self.y)) else: raise(RuntimeError("Invalid dtype: {}".format(dtype))) def astuple(self): """Return the x, y coordinates as a tuple.""" return self.x, self.y class Point3D(object): """Class representing a point in 3D space.""" def __init__(self, a1, a2=None, a3=None): if a2 is not None and a3 is not None: self.x, self.y, self.z = a1, a2, a3 else: self.x, self.y, self.z = a1 self._set_types() def _set_types(self): """Make sure that x, y, z have consistent types and set dtype.""" # If we given something that is not an int or a float we raise # a RuntimeError as we do not want to have to guess if the given # input should be interpreted as an int or a float, for example the # interpretation of the string "1" vs the interpretation of the string # "1.0". for c in (self.x, self.y, self.z): if not (isinstance(c, int) or isinstance(c, float)): raise(RuntimeError('x, y coords should be int or float')) if (isinstance(self.x, int) and isinstance(self.y, int) and isinstance(self.z, int)): self._dtype = "int" else: # At least one value is a float so promote both to float. self.x = float(self.x) self.y = float(self.y) self.z = float(self.z) self._dtype = "float" @property def dtype(self): """Return the type of the x, y coordinates as a string.""" return self._dtype def __iter__(self): return iter([self.x, self.y, self.z]) def __repr__(self): s = "<Point3D(x={}, y={}, z={}, dtype={})>" if self.dtype == "float": s = "<Point2D(x={:.2f}, y={:.2f}, z={:.2f}, dtype={})>" return s.format(self.x, self.y, self.z, self.dtype) def __eq__(self, other): if self.dtype != other.dtype: return False return (self.x == other.x and self.y == other.y and self.z == other.z) def __add__(self, other): return Point3D(self.x + other.x, self.y + other.y, self.z + other.z) def __sub__(self, other): return Point3D(self.x - other.x, self.y - other.y, self.z - other.z) def __mul__(self, other): return Point3D(self.x * other, self.y * other, self.z * other) def __div__(self, other): return self * (1/float(other)) def __truediv__(self, other): return self.__div__(other) def __len__(self): return 3 def __getitem__(self, key): if key == 0: return self.x elif key == 1: return self.y elif key == 2: return self.z else: raise(IndexError()) @property def magnitude(self): """Return the magnitude when treating the point as a vector.""" return math.sqrt(self.x**2 + self.y**2 + self.z**2) @property def unit_vector(self): """Return the unit vector.""" return Point3D(self.x / self.magnitude, self.y / self.magnitude, self.z / self.magnitude) def distance(self, other): """Return distance to the other point.""" tmp = self - other return tmp.magnitude def astype(self, dtype): """Return a point of the specified dtype.""" if dtype == "int": return Point3D(int(round(self.x, 0)), int(round(self.y, 0)), int(round(self.z, 0))) elif dtype == "float": return Point3D(float(self.x), float(self.y), float(self.z)) else: raise(RuntimeError("Invalid dtype: {}".format(dtype))) def astuple(self): """Return the x, y coordinates as a tuple.""" return self.x, self.y, self.z ```
{ "source": "JIC-CSB/jicimagelib", "score": 4 }
#### File: jicimagelib/jicimagelib/geometry.py ```python import math class Point2D(object): """Class representing a point in 2D space.""" def __init__(self, a1, a2=None): if a2 is None: # We assume that we have given a sequence with x, y coordinates. self.x, self.y = a1 else: self.x = a1 self.y = a2 self._set_types() def _set_types(self): """Make sure that x, y have consistent types and set dtype.""" # If we given something that is not an int or a float we raise # a RuntimeError as we do not want to have to guess if the given # input should be interpreted as an int or a float, for example the # interpretation of the string "1" vs the interpretation of the string # "1.0". for c in (self.x, self.y): if not ( isinstance(c, int) or isinstance(c, float) ): raise(RuntimeError('x, y coords should be int or float')) if isinstance(self.x, int) and isinstance(self.y, int): self._dtype = "int" else: # At least one value is a float so promote both to float. self.x = float(self.x) self.y = float(self.y) self._dtype = "float" @property def dtype(self): """Return the type of the x, y coordinates as a string.""" return self._dtype @property def magnitude(self): """Return the magnitude when treating the point as a vector.""" return math.sqrt( self.x * self.x + self.y * self.y ) @property def unit_vector(self): """Return the unit vector.""" return Point2D( self.x / self.magnitude, self.y / self.magnitude ) def distance(self, other): """Return distance to the other point.""" tmp = self - other return tmp.magnitude def __repr__(self): s = "<Point2D(x={}, y={}, dtype={})>" if self.dtype == "float": s = "<Point2D(x={:.2f}, y={:.2f}, dtype={})>" return s.format( self.x, self.y, self.dtype) def __eq__(self, other): if self.dtype != other.dtype: return False return self.x == other.x and self.y == other.y def __add__(self, other): return Point2D( self.x + other.x, self.y + other.y ) def __sub__(self, other): return Point2D( self.x - other.x, self.y - other.y ) def __mul__(self, other): return Point2D( self.x * other, self.y * other) def __div__(self, other): if isinstance(other, int): raise(NotImplementedError("Integer division not yet implemented")) return self * (1/other) def __len__(self): return 2 def __getitem__(self, key): if key == 0: return self.x elif key == 1: return self.y else: raise(IndexError()) def __iter__(self): return iter( [self.x, self.y] ) def astype(self, dtype): """Return a point of the specified dtype.""" if dtype == "int": return Point2D( int( round(self.x, 0) ), int( round(self.y, 0) ) ) elif dtype == "float": return Point2D( float(self.x), float(self.y)) else: raise(RuntimeError("Invalid dtype: {}".format(dtype))) def astuple(self): """Return the x, y coordinates as a tuple.""" return self.x, self.y ``` #### File: jicimagelib/jicimagelib/transform.py ```python from functools import wraps import numpy as np import PIL.Image import scipy.ndimage.filters import skimage.morphology import skimage.exposure from jicimagelib.io import AutoName, AutoWrite from jicimagelib.image import Image from jicimagelib.util.array import ( normalise, reduce_stack, dtype_contract, ) ############################################################################# # Function decorator for creating transforms. ############################################################################# def transformation(func): """Function decorator to turn another function into a transformation.""" @wraps(func) def func_as_transformation(*args, **kwargs): # When using transforms that return new ndarrays we lose the # jicimagelib.image.Image type and the history of the image. # One therefore needs to: # - Extract the history from the input jicimagelib.image.Image. # - Apply the transformation, which may return a numpy ndarray. # - Force the image to the jicimagelib.image.Image type. # - Re-attach the extracted history if hasattr(args[0], 'history'): # Working on jicimagelib.Image. history = args[0].history else: # Working on something without a history, e.g. a ndarray stack. history = [] image = func(*args, **kwargs) image = Image.from_array(image, log_in_history=False) image.history = history image.history.append('Applied {} transform'.format(func.__name__)) if AutoWrite.on: fpath = AutoName.name(func) try: if AutoWrite.auto_safe_dtype: safe_range_im = 255 * normalise(image) pil_im = PIL.Image.fromarray(safe_range_im.astype(np.uint8)) else: pil_im = PIL.Image.fromarray(image) except TypeError: # Give a more meaningful error message. raise(TypeError( "Cannot handle this data type: {}".format(image.dtype))) pil_im.save(fpath) return image return func_as_transformation ############################################################################# # General purpose transforms. ############################################################################# @transformation def max_intensity_projection(stack): """Return maximum intensity projection of a stack. :param stack: 3D array from which to project third dimension :returns: :class:`jicimagelib.image.Image` """ return reduce_stack(stack, max) @transformation def min_intensity_projection(stack): """Return minimum intensity projection of a stack. :param stack: 3D array from which to project third dimension :returns: :class:`jicimagelib.image.Image` """ return reduce_stack(stack, min) @transformation @dtype_contract(input_dtype=np.float, output_dtype=np.float) def smooth_gaussian(image, sigma=1): """Returns Gaussian smoothed image. :param image: numpy array or :class:`jicimagelib.image.Image` :param sigma: standard deviation :returns: :class:`jicimagelib.image.Image` """ return scipy.ndimage.filters.gaussian_filter(image, sigma=sigma, mode="nearest") @transformation @dtype_contract(output_dtype=np.float) def equalize_adaptive_clahe(image, ntiles=8, clip_limit=0.01): """Return contrast limited adaptive histogram equalized image. The return value is normalised to the range 0 to 1. :param image: numpy array or :class:`jicimagelib.image.Image` of dtype float :param ntiles: number of tile regions :param clip_limit: clipping limit in range 0 to 1, higher values give more contrast """ # Convert input for skimage. skimage_float_im = normalise(image) if np.all(skimage_float_im): raise(RuntimeError("Cannot equalise when there is no variation.")) normalised = skimage.exposure.equalize_adapthist(skimage_float_im, ntiles_x=ntiles, ntiles_y=ntiles, clip_limit=clip_limit) assert np.max(normalised) == 1.0 assert np.min(normalised) == 0.0 return normalised @transformation @dtype_contract(output_dtype=np.bool) def threshold_otsu(image, multiplier=1.0): """Return image thresholded using Otsu's method. """ otsu_value = skimage.filters.threshold_otsu(image) return image > otsu_value * multiplier @transformation @dtype_contract(input_dtype=np.bool, output_dtype=np.bool) def remove_small_objects(image, min_size=50): """Remove small objects from an boolean image. :param image: boolean numpy array or :class:`jicimagelib.image.Image` :returns: boolean :class:`jicimagelib.image.Image` """ return skimage.morphology.remove_small_objects(image, min_size=min_size) ``` #### File: jicimagelib/tests/AutoWrite_unit_tests.py ```python import unittest class AutoWriteTests(unittest.TestCase): def test_import_AutoWrite_class(self): # This throws an error if the class cannot be imported. from jicimagelib.io import AutoWrite def test_on(self): from jicimagelib.io import AutoWrite self.assertTrue(AutoWrite.on) def test_auto_safe_dtype(self): from jicimagelib.io import AutoWrite self.assertTrue(AutoWrite.auto_safe_dtype) if __name__ == '__main__': unittest.main() ``` #### File: jicimagelib/tests/DataManager_functional_tests.py ```python import unittest import os import os.path import shutil import numpy as np from skimage.io import imread, use_plugin HERE = os.path.dirname(__file__) DATA_DIR = os.path.join(HERE, 'data') TMP_DIR = os.path.join(HERE, 'tmp') class DataManagerUserStory(unittest.TestCase): def setUp(self): if not os.path.isdir(TMP_DIR): os.mkdir(TMP_DIR) def tearDown(self): shutil.rmtree(TMP_DIR) def test_manual_addition_of_ImageCollection_to_DataManager(self): # We start off by creating a :class:`jicimagelib.image.DataManager`. # This takes a backend argument. The backend provides a means to store # unpacked image files. from jicimagelib.io import FileBackend from jicimagelib.image import DataManager backend = FileBackend(directory=TMP_DIR) data_manager = DataManager(backend=backend) # The :func:`jicimagelib.image.DataManager.conver` function is be # default an instance of the callable # :class:`jicimagelib.io.BFConvertWrapper` class. from jicimagelib.io import BFConvertWrapper self.assertTrue(isinstance(data_manager.convert, BFConvertWrapper)) # We also need to import an ImageCollection from jicimagelib.image import ImageCollection # If the input file has not already been converted with do so. fpath = os.path.join(DATA_DIR, 'z-series.ome.tif') self.assertFalse(data_manager.convert.already_converted(fpath)) if not data_manager.convert.already_converted(fpath): path_to_manifest = data_manager.convert(fpath) # unpacks and creates manifests self.assertEqual(path_to_manifest, os.path.join(TMP_DIR, 'z-series.ome.tif', 'manifest.json')) image_collection = ImageCollection() image_collection.parse_manifest(path_to_manifest) self.assertEqual(len(image_collection), 5) data_manager.append(image_collection) self.assertEqual(len(data_manager), 1) self.assertTrue(data_manager.convert.already_converted(fpath)) def test_data_manager(self): # Alice wants to analyse her microscopy data. To access the raw image # data within the microscopy files she uses a DataManager. from jicimagelib.image import DataManager from jicimagelib.io import FileBackend backend = FileBackend(TMP_DIR) data_manager = DataManager(backend) # Initially the DataManger is empty. self.assertEqual(len(data_manager), 0) # Alice loads her file of interest. data_manager.load(os.path.join(DATA_DIR, 'single-channel.ome.tif')) self.assertEqual(len(data_manager), 1) # A DataManager is a container for MicroscopyCollection instances. from jicimagelib.image import MicroscopyCollection microscopy_collection = data_manager[0] self.assertTrue(isinstance(microscopy_collection, MicroscopyCollection)) # In this instance the image collection only contains one item. self.assertEqual(len(microscopy_collection), 1) image = microscopy_collection[0] # A MicroscopyCollection is a container for MicroscopyImage instances. from jicimagelib.image import MicroscopyImage self.assertTrue(isinstance(image, MicroscopyImage)) # Alice then loads her second file of interest. data_manager.load(os.path.join(DATA_DIR, 'z-series.ome.tif')) # The data manager now contains to image collections. self.assertEqual(len(data_manager), 2) # There are five z-slices in the new image collection. zseries_collection = data_manager[1] self.assertEqual(len(zseries_collection), 5) # File format conversion trouble (for example using non existing input # file) raises RuntimeError. with self.assertRaises(RuntimeError): data_manager.load(os.path.join(DATA_DIR, 'nonsese.ome.tif')) def test_data_manager_already_unpacked(self): # The second time the data manager is loaded, it should contain data # without unpacking. from jicimagelib.image import DataManager from jicimagelib.io import FileBackend backend = FileBackend(TMP_DIR) data_manager = DataManager(backend) data_manager.load(os.path.join(DATA_DIR, 'single-channel.ome.tif')) self.assertEqual(len(data_manager), 1) backend_reload = FileBackend(TMP_DIR) data_manager_reload = DataManager(backend) data_manager_reload.load(os.path.join(DATA_DIR, 'single-channel.ome.tif')) self.assertEqual(len(data_manager_reload), 1) def test_error_message_when_bfconvert_not_in_path(self): from jicimagelib.image import DataManager from jicimagelib.io import FileBackend backend = FileBackend(TMP_DIR) data_manager = DataManager(backend) tmp_path = os.environ['PATH'] del os.environ['PATH'] with self.assertRaises(RuntimeError): data_manager.load(os.path.join(DATA_DIR, 'single-channel.ome.tif')) os.environ['PATH'] = tmp_path def test_proxy_image(self): from jicimagelib.image import DataManager from jicimagelib.io import FileBackend backend = FileBackend(TMP_DIR) data_manager = DataManager(backend) data_manager.load(os.path.join(DATA_DIR, 'single-channel.ome.tif')) microscopy_collection = data_manager[0] proxy_image = microscopy_collection.proxy_image() self.assertTrue(os.path.isfile(proxy_image.fpath), 'no such file: {}'.format(proxy_image.fpath)) self.assertTrue(isinstance(proxy_image.image, np.ndarray)) self.assertEqual(proxy_image.image.shape, (167, 439)) def test_image_collection(self): from jicimagelib.image import DataManager from jicimagelib.io import FileBackend backend = FileBackend(TMP_DIR) data_manager = DataManager(backend) data_manager.load(os.path.join(DATA_DIR, 'multi-channel-4D-series.ome.tif')) microscopy_collection = data_manager[0] self.assertEqual(len(microscopy_collection), 7*5*3) # 7t, 5z, 3c proxy_image = microscopy_collection.proxy_image() self.assertEqual(proxy_image.series, 0) self.assertEqual(proxy_image.channel, 0) self.assertEqual(proxy_image.zslice, 0) self.assertEqual(proxy_image.timepoint, 0) proxy_image = microscopy_collection.proxy_image(s=0, c=1, z=2, t=3) self.assertEqual(proxy_image.series, 0) self.assertEqual(proxy_image.channel, 1) self.assertEqual(proxy_image.zslice, 2) self.assertEqual(proxy_image.timepoint, 3) self.assertEqual(5, len([i for i in microscopy_collection.zstack_proxy_iterator()])) for i, proxy_image in enumerate(microscopy_collection.zstack_proxy_iterator()): self.assertEqual(proxy_image.series, 0) self.assertEqual(proxy_image.channel, 0) self.assertEqual(proxy_image.zslice, i) self.assertEqual(proxy_image.timepoint, 0) self.assertEqual(5, len([i for i in microscopy_collection.zstack_proxy_iterator(s=0, c=1, t=3)])) for i, proxy_image in enumerate(microscopy_collection.zstack_proxy_iterator(s=0, c=1, t=3)): self.assertEqual(proxy_image.series, 0) self.assertEqual(proxy_image.channel, 1) self.assertEqual(proxy_image.zslice, i) self.assertEqual(proxy_image.timepoint, 3) zstack_array = microscopy_collection.zstack_array(s=0, c=1, t=3) self.assertTrue(isinstance(zstack_array, np.ndarray)) self.assertEqual(zstack_array.shape, (167, 439, 5)) image = microscopy_collection.image(s=0, c=1, z=2, t=3) self.assertTrue(isinstance(image, np.ndarray)) self.assertEqual(image.shape, (167, 439)) def test_multipage_tiff(self): from jicimagelib.image import DataManager from jicimagelib.image import MicroscopyCollection, ImageCollection from jicimagelib.io import FileBackend backend = FileBackend(directory=TMP_DIR) data_manager = DataManager(backend) data_manager.load(os.path.join(DATA_DIR, 'multipage.tif')) image_collection = data_manager[0] # When we load a multipage tiff file we get an ImageCollection not a # MicroscopyCollection. self.assertFalse(isinstance(image_collection, MicroscopyCollection)) self.assertTrue(isinstance(image_collection, ImageCollection)) # Let us get the first proxy image. first_proxy_image = image_collection.proxy_image() self.assertTrue(os.path.isfile(first_proxy_image.fpath)) # Let us get the last proxy image. last_proxy_image = image_collection.proxy_image(index=-1) self.assertTrue(os.path.isfile(last_proxy_image.fpath)) # Let us get some actual images. first_image = image_collection.image() self.assertEqual(np.max(first_image), 30) second_image = image_collection.image(1) self.assertEqual(np.max(second_image), 90) third_image = image_collection.image(index=2) self.assertEqual(np.max(third_image), 120) def test_load_returns_collection(self): from jicimagelib.image import DataManager from jicimagelib.image import ImageCollection from jicimagelib.image import MicroscopyImage, ProxyImage from jicimagelib.io import FileBackend backend = FileBackend(directory=TMP_DIR) data_manager = DataManager(backend) collection = data_manager.load(os.path.join(DATA_DIR, 'multipage.tif')) self.assertTrue(isinstance(collection, ImageCollection)) self.assertFalse(isinstance(collection[0], MicroscopyImage)) self.assertTrue(isinstance(collection[0], ProxyImage)) if __name__ == '__main__': unittest.main() ```
{ "source": "JIC-CSB/jobarchitect", "score": 3 }
#### File: jobarchitect/jobarchitect/backends.py ```python import os from jinja2 import Environment, PackageLoader ENV = Environment(loader=PackageLoader('jobarchitect', 'templates'), keep_trailing_newline=True) class JobSpec(object): """Job specification class.""" def __init__(self, tool_path, dataset_path, output_root, hash_ids, image_name=None): self._spec = dict() self._spec["tool_path"] = tool_path self._spec["tool_dir"] = os.path.dirname(tool_path) self._spec["tool_script"] = os.path.basename(tool_path) self._spec["dataset_path"] = os.path.abspath(dataset_path) self._spec["output_root"] = os.path.abspath(output_root) self._spec["hash_ids"] = " ".join([str(i) for i in hash_ids]) if image_name is not None: self._spec["image_name"] = image_name def __getitem__(self, key): return self._spec[key] def keys(self): return self._spec.keys() @property def tool_path(self): """Return the path to the tool.""" return self._spec["tool_path"] @property def dataset_path(self): """Return the dataset path.""" return self._spec["dataset_path"] @property def output_root(self): """Return the output root path.""" return self._spec["output_root"] @property def hash_ids(self): """Return the hash identifiers as a string.""" return self._spec["hash_ids"] @property def image_name(self): """Return the container image name.""" if "image_name" not in self._spec: raise(AttributeError("Image name not specified")) return self._spec["image_name"] def generate_bash_job(jobspec): """Return bash job script job as a string. The script contains code to run all analysis on all data in one chunk from a split dataset. :param jobspec: job specification as a :class:`jobarchitect.JobSpec` :returns: bash job script as a string """ template = ENV.get_template("bash_job.sh.j2") return template.render(jobspec) def generate_docker_job(jobspec): """Return docker job script as a string. The script contains code to run a docker container to analyse data. :param jobspec: job specification as a :class:`jobarchitect.JobSpec` :returns: docker job script as a string """ template = ENV.get_template("docker_job.sh.j2") return template.render(jobspec) def generate_singularity_job(jobspec): """Return singularity job script as a string. The script contains code to run a docker container to analyse data. :param jobspec: job specification as a :class:`jobarchitect.JobSpec` :returns: docker job script as a string """ template = ENV.get_template("singularity_job.sh.j2") return template.render(jobspec) def render_script(template_name, variables): """Return script as a string. """ template = ENV.get_template(template_name) return template.render(variables) ```
{ "source": "JIC-CSB/root-image-analysis", "score": 3 }
#### File: root-image-analysis/scripts/sum_segmentation_area.py ```python import argparse import numpy as np from skimage.io import use_plugin, imread use_plugin('freeimage') def sum_segmented_area(segmentation_file): im_array = imread(segmentation_file) area = len(np.where(im_array != 0)[0]) return area def main(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('segmentation_file', help="File containing segmentation") args = parser.parse_args() print sum_segmented_area(args.segmentation_file) if __name__ == '__main__': main() ``` #### File: scripts/workflow/__init__.py ```python import os import inspect import json from collections import OrderedDict import logging from collections import namedtuple import copy_reg import types __version__ = 0.3 ############################################################################# # Enable pickling of instance methods. ############################################################################# def reduce_method(m): return (getattr, (m.__self__, m.__func__.__name__)) copy_reg.pickle(types.MethodType, reduce_method) ############################################################################# # Setup logging. ############################################################################# def setup_logger(name): logger = logging.getLogger(name) console_handler = logging.StreamHandler() formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') console_handler.setFormatter(formatter) logger.addHandler(console_handler) logger.setLevel(logging.WARNING) return logger logger = setup_logger(__name__) ############################################################################# # Workflow run function. ############################################################################# def run(workflow, mapper=map): """Run the workflow.""" if not os.path.isdir(workflow.output_directory): os.mkdir(workflow.output_directory) if len(workflow.nodes) > 0: for node in workflow.nodes: run(node, mapper=mapper) else: try: workflow.process() except NotImplementedError: mapper(workflow.execute, workflow.get_tasks()) ############################################################################# # Settings. ############################################################################# class _NestedClassGetter(object): """See also: http://stackoverflow.com/questions/1947904/how-can-i-pickle-a-nested-class-in-python/11493777#11493777 """ def __call__(self, containing_class, class_name): nested_class = getattr(containing_class, class_name) nested_instance = _NestedClassGetter() nested_instance.__class__ = nested_class return nested_instance class _MetaSettings(type): """Meta class for storing node settings.""" def __init__(cls, name, bases, attrs): cls.allowed_attrs = [a for a in attrs.keys() if not a.startswith('__') and not callable(cls.__dict__[a])] class BaseSettings(object): """Base class for storing node settings. :raises: RuntimeError if one tries to set a setting that has not been specified in the class. """ __metaclass__ = _MetaSettings map = map def __setattr__(self, name, value): if name == 'map': self.__dict__[name] = value elif name not in self.__class__.allowed_attrs: raise RuntimeError self.__dict__[name] = value def __reduce__(self): """See also: http://stackoverflow.com/questions/1947904/how-can-i-pickle-a-nested-class-in-python/11493777#11493777 """ state = self.__dict__.copy() for key, value in self.items(): state[key] = value return (_NestedClassGetter(), (_BaseNode, self.__class__.__name__,), state,) def _keys(self): """Return list of sorted setting names.""" return sorted([key for key in self.__class__.allowed_attrs]) def items(self): """Return settings as a sorted list of key/value pairs.""" items = [] for key in self._keys(): try: items.append((key, self.__dict__[key])) except KeyError: items.append((key, self.__class__.__dict__[key])) return items def to_json(self, indent=None): """Return json representation of settings. Ordered alphabetically.""" ordered_dict = OrderedDict(self.items()) return json.dumps(ordered_dict, indent=indent) def from_json(self, json_str): """Configure the settings from a json string.""" for key, value in json.loads(json_str).items(): self.__setattr__(key, value) ############################################################################# # Private base node. ############################################################################# class _BaseNode(object): """Base class for the processing nodes.""" class Settings(BaseSettings): pass def __init__(self): self.settings = self.__class__.Settings() self._output_directory = '' self._parent = None self.nodes = [] self.configure() def configure(self): """Configure a meta node.""" pass @property def output_directory(self): """Return the node's working directory. :returns: path to output directory """ if self._parent is None: return os.path.join(self._output_directory, self.__class__.__name__) else: return os.path.join(self._parent.output_directory, self.__class__.__name__) @output_directory.setter def output_directory(self, directory): """Set the worflow's working directory. :param directory: directory where the workflow will create sub-directories :raises: RuntimeError if called on a node that is not the top-level one """ if self._parent is None: self._output_directory = directory else: raise RuntimeError('Working directory cannot be set on a sub node.') def process(self): """Process the node. Override this function to implement custom processing logic. This function does not get called if the execute and get_task functions have been implemented. This function is useful when it is difficult to set up a function and input for a map(funciton, input) logic. For example when going from many files to one. """ raise NotImplementedError def get_tasks(self): """Return a list of task. Or rather a list of tuples of inputs for each task. Override this function to implement the desired input for the execution logic. The execute command is called by the process fuction using map: map(self.execute, self.get_tasks) """ raise NotImplementedError def execute(self, task_input): """Execute a single task. Override this function to implement the desired execution logic. The execute command is called by the process fuction using map: map(self.execute, self.get_tasks) """ raise NotImplementedError def add_node(self, node): """Add a node to the meta node. :param node: node to be added to the meta node :returns: the added node """ node._parent = self self.nodes.append(node) return node ############################################################################# # Input and output ############################################################################# class FilePath(str): """Class for dealing with file paths. Subclass of ``str``. """ @property def exists(self): """Wether or not the file exists.""" return os.path.isfile(self) def is_more_recent_than(self, other): """Wether or not the file is more recent than the other file.""" return os.path.getmtime(self) > os.path.getmtime(other) class _InOne(object): """Base class for nodes that take one input.""" def __init__(self, input_obj): self.input_obj = input_obj @property def input_file(self): """Return the input file name. :returns: class:`workflow.FilePath` """ return FilePath(self.input_obj) class _InMany(object): """Base class for nodes that take many inputs.""" def __init__(self, input_obj): self.input_obj = input_obj @property def input_files(self): """Return list containing input file names / tuples of file names. If the input_obj was a path or a node this function yields filenames. If the input_obj was a tuple or list of paths/nodes this function yields a tuple of filenames. :returns: list of :class:`workflow.FilePath` instances or list of tuples of :class:`workflow.FilePath` instances """ def yield_files(input_obj): """Recursive function for yielding files.""" if isinstance(input_obj, _OutMany): # If the input object is an instance of _OutMany it will have # access to the output_files property. for fname in input_obj.output_files: yield FilePath(fname) elif hasattr(input_obj, '__iter__'): # This comes after isinstance(input_obj, _OutMany) because some # unit test make use of MagicMock that has an "__iter__" # attribute. # The input object is a tuple or list of input objects. all_files = [] for iobj in input_obj: all_files.append(yield_files(iobj)) for fnames in zip(*all_files): yield fnames else: # At this point we assume that we have been given a path to an # input directory. for fname in os.listdir(input_obj): yield FilePath(os.path.join(input_obj, fname)) return [f for f in yield_files(self.input_obj)] class _OutMany(object): """Base class for nodes that return many outputs.""" @property def output_files(self): """Return list of output file names. :returns: list of :class:`workflow.FilePath` instances """ return [FilePath(os.path.join(self.output_directory, fname)) for fname in os.listdir(self.output_directory)] def get_output_file(self, fname, enumerator=None): """Returns output file name. This is a helper function to create meaningful output filenames. :param fname: input file name :param enumerator: unique id (useful if the input file names are not unique) :returns: :class:`workflow.FilePath` """ logger.info('fname: {}'.format(fname)) fname = os.path.basename(fname) if enumerator is not None: name, suffix = fname.split('.') fname = '{}_{}.{}'.format(name, enumerator, suffix) return FilePath(os.path.join(self.output_directory, fname)) class _OutOne(object): """Base class for nodes that produce one output.""" def __init__(self, output_obj): self.output_obj = output_obj @property def output_file(self): """Return the output file name. :returns: :class:`workflow.FilePath` """ return FilePath(self.output_obj) ############################################################################# # Public nodes. ############################################################################# Task = namedtuple('Task', ['input_file', 'output_file', 'settings']) class OneToManyNode(_BaseNode, _InOne, _OutMany): """One to many processing node.""" def __init__(self, input_obj): _InOne.__init__(self, input_obj) # Run base code initialisation after in case _BaseNode.configure tries # to access input_obj added by _InOne.__init__. _BaseNode.__init__(self) class ManyToManyNode(_BaseNode, _InMany, _OutMany): """Many to many processing node.""" def __init__(self, input_obj): _InMany.__init__(self, input_obj) # Run base code initialisation after in case _BaseNode.configure tries # to access input_obj added by _InMany.__init__. _BaseNode.__init__(self) def get_tasks(self): """Return list of named tuples of input values for execute. :returns: list of Task(input_file, output_file, settings) """ tasks = [] for input_fn in self.input_files: output_fn = self.get_output_file(input_fn) if output_fn.exists and output_fn.is_more_recent_than(input_fn): continue tasks.append(Task(input_fn, self.get_output_file(input_fn), self.settings)) return tasks class ManyToOneNode(_BaseNode, _InMany, _OutOne): """Many to one processing node.""" def __init__(self, input_obj, output_obj): _InMany.__init__(self, input_obj) _OutOne.__init__(self, output_obj) # Run base code initialisation after in case _BaseNode.configure tries # to access input_obj/ouput_obj added by _InMany/_OutOne.__init__. _BaseNode.__init__(self) class OneToOneNode(_BaseNode, _InOne, _OutOne): """One to one node.""" def __init__(self, input_obj, output_obj): _InOne.__init__(self, input_obj) _OutOne.__init__(self, output_obj) # Run base code initialisation after in case _BaseNode.configure tries # to access input_obj/ouput_obj added by _InOne/_OutOne.__init__. _BaseNode.__init__(self) ```
{ "source": "JIC-CSB/wheat-leaf-segmentation", "score": 3 }
#### File: wheat-leaf-segmentation/scripts/util.py ```python import os.path import argparse from jicbioimage.core.image import DataManager from jicbioimage.core.io import FileBackend HERE = os.path.dirname(os.path.realpath(__file__)) def get_microscopy_collection(input_file): """Return microscopy collection from input file.""" data_dir = os.path.abspath(os.path.join(HERE, "..", "data")) if not os.path.isdir(data_dir): os.mkdir(data_dir) backend_dir = os.path.join(data_dir, 'unpacked') file_backend = FileBackend(backend_dir) data_manager = DataManager(file_backend) microscopy_collection = data_manager.load(input_file) return microscopy_collection def argparse_get_image(): """Return microscopy series image from input file.""" parser = argparse.ArgumentParser(__doc__) parser.add_argument("input_file", help="path to raw microscopy data") parser.add_argument("series", type=int, help="microscopy series") args = parser.parse_args() microscopy_collection = get_microscopy_collection(args.input_file) image = microscopy_collection.image(s=args.series) image = image[:, :, 0] return image ```
{ "source": "jic-dtool/dtool-annotation", "score": 2 }
#### File: dtool-annotation/tests/test_annotation_functional.py ```python from click.testing import CliRunner from . import tmp_dataset_fixture # NOQA def test_annotation_basic(tmp_dataset_fixture): # NOQA from dtool_annotation.cli import annotation runner = CliRunner() result = runner.invoke(annotation, [ "set", tmp_dataset_fixture.uri, "project", "world-peace" ]) assert result.exit_code == 0 result = runner.invoke(annotation, [ "get", tmp_dataset_fixture.uri, "project" ]) assert result.exit_code == 0 expected = "world-peace" actual = result.output.strip() assert actual == expected def test_annotation_invalid_name(tmp_dataset_fixture): # NOQA from dtool_annotation.cli import annotation runner = CliRunner() # Spaces, slashes, etc are not allowed. result = runner.invoke(annotation, [ "set", tmp_dataset_fixture.uri, "project name", "world-peace" ]) assert result.exit_code == 400 expected_lines = [ "Invalid annotation name 'project name'", "Name must be 80 characters or less", "Names may only contain the characters: 0-9 a-z A-Z - _ .", "Example: citation-index", ] for line in expected_lines: assert result.output.find(line) != -1 def test_get_non_existing_annotation(tmp_dataset_fixture): # NOQA from dtool_annotation.cli import annotation runner = CliRunner() result = runner.invoke(annotation, [ "get", tmp_dataset_fixture.uri, "project", ]) assert result.exit_code == 401 expected = "No annotation named: 'project'" assert result.output.strip() == expected def test_annotation_types(tmp_dataset_fixture): # NOQA from dtool_annotation.cli import annotation runner = CliRunner() # Default to string result = runner.invoke(annotation, [ "set", tmp_dataset_fixture.uri, "one_as_str", "1" ]) assert result.exit_code == 0 assert tmp_dataset_fixture.get_annotation("one_as_str") == "1" # Explicit set to string. result = runner.invoke(annotation, [ "set", "--type", "str", tmp_dataset_fixture.uri, "one_as_str_explicit", "1" ]) assert result.exit_code == 0 assert tmp_dataset_fixture.get_annotation("one_as_str_explicit") == "1" # Explicit set to int. result = runner.invoke(annotation, [ "set", "--type", "int", tmp_dataset_fixture.uri, "one_as_int_explicit", "1" ]) assert result.exit_code == 0 assert tmp_dataset_fixture.get_annotation("one_as_int_explicit") == 1 # Explicit set to float. result = runner.invoke(annotation, [ "set", "--type", "float", tmp_dataset_fixture.uri, "one_as_float_explicit", "1" ]) assert result.exit_code == 0 ann = tmp_dataset_fixture.get_annotation("one_as_float_explicit") offset = 0.00000001 assert ann > 1 - offset assert ann < 1 + offset assert isinstance(ann, float) # Explicit set to bool. result = runner.invoke(annotation, [ "set", "--type", "bool", tmp_dataset_fixture.uri, "true_as_bool_explicit", "1" ]) assert result.exit_code == 0 ann = tmp_dataset_fixture.get_annotation("true_as_bool_explicit") assert ann assert isinstance(ann, bool) result = runner.invoke(annotation, [ "set", "--type", "bool", tmp_dataset_fixture.uri, "false_as_bool_explicit", "0" ]) assert result.exit_code == 0 ann = tmp_dataset_fixture.get_annotation("false_as_bool_explicit") assert not ann assert isinstance(ann, bool) # Explicit set to json. result = runner.invoke(annotation, [ "set", "--type", "json", tmp_dataset_fixture.uri, "json_explicit", '{"x": 3, "y": 5}' ]) assert result.exit_code == 0 ann = tmp_dataset_fixture.get_annotation("json_explicit") assert ann == {"x": 3, "y": 5} def test_ls_command(tmp_dataset_fixture): # NOQA from dtool_annotation.cli import annotation runner = CliRunner() result = runner.invoke(annotation, [ "set", tmp_dataset_fixture.uri, "project", "world-peace" ]) assert result.exit_code == 0 result = runner.invoke(annotation, [ "set", tmp_dataset_fixture.uri, "stars", "3", "--type", "int" ]) assert result.exit_code == 0 result = runner.invoke(annotation, [ "set", tmp_dataset_fixture.uri, "params", '{"x": 3}', "--type", "json" ]) assert result.exit_code == 0 result = runner.invoke(annotation, [ "ls", tmp_dataset_fixture.uri, ]) assert result.exit_code == 0 expectations = [ ("params", "x': 3}"), ("project", "world-peace"), ("stars", "3") ] for e, a in zip(expectations, result.output.strip().split("\n")): assert a.count("\t") == 1 assert a.startswith(e[0]) assert a.endswith(e[1]) ```
{ "source": "jic-dtool/dtool-conf", "score": 3 }
#### File: dtool-conf/dtool_config/cli.py ```python import click import dtoolcore.utils import dtool_config.utils CONFIG_PATH = dtoolcore.utils.DEFAULT_CONFIG_PATH @click.group() def config(): """Configure dtool settings.""" @config.group() def user(): """Configure user settings.""" @user.command() @click.argument("username", nargs=-1, required=False) def name(username): """Display / set / update the user name.""" if not username: click.secho(dtool_config.utils.get_username(CONFIG_PATH)) else: username_str = " ".join(username) click.secho(dtool_config.utils.set_username(CONFIG_PATH, username_str)) @user.command() @click.argument("email_address", required=False) def email(email_address): """Display / set / update the user email.""" if not email_address: click.secho(dtool_config.utils.get_user_email(CONFIG_PATH)) else: click.secho(dtool_config.utils.set_user_email( CONFIG_PATH, email_address )) @config.command() @click.argument( "readme_template_file", required=False, type=click.Path(exists=True, dir_okay=False) ) def readme_template(readme_template_file): """Display / set / update the readme template file.""" if not readme_template_file: click.secho(dtool_config.utils.get_readme_template_fpath( CONFIG_PATH, )) else: click.secho(dtool_config.utils.set_readme_template_fpath( CONFIG_PATH, readme_template_file )) @config.group() def ecs(): """Configure ECS S3 object storage.""" @ecs.command() @click.argument("bucket_name") @click.argument("url", required=False) def endpoint(bucket_name, url): """Display / set / update the ECS endpoint URL.""" if not url: click.secho( dtool_config.utils.get_ecs_endpoint(CONFIG_PATH, bucket_name) ) else: click.secho( dtool_config.utils.set_ecs_endpoint(CONFIG_PATH, bucket_name, url) ) @ecs.command() @click.argument("bucket_name") @click.argument("ecs_access_key_id", required=False) def access_key_id(bucket_name, ecs_access_key_id): """Display / set / update the ECS access key id (username).""" if not ecs_access_key_id: click.secho( dtool_config.utils.get_ecs_access_key_id(CONFIG_PATH, bucket_name) ) else: click.secho(dtool_config.utils.set_ecs_access_key_id( CONFIG_PATH, bucket_name, ecs_access_key_id )) @ecs.command() @click.argument("bucket_name") @click.argument("ecs_secret_access_key", required=False) def secret_access_key(bucket_name, ecs_secret_access_key): """Display / set / update the ECS secret access key.""" if not ecs_secret_access_key: click.secho(dtool_config.utils.get_ecs_secret_access_key( CONFIG_PATH, bucket_name ) ) else: click.secho(dtool_config.utils.set_ecs_secret_access_key( CONFIG_PATH, bucket_name, ecs_secret_access_key )) @ecs.command(name="ls") def list_ecs_base_uris(): """List the configured ECS base URIs.""" for base_uri in dtool_config.utils.list_ecs_base_uris(CONFIG_PATH): click.secho(base_uri) @config.command() @click.argument( "cache_directory_path", required=False, type=click.Path(exists=True, file_okay=False) ) def cache(cache_directory_path): """Display / set / update the dtool cache directory.""" if not cache_directory_path: click.secho(dtool_config.utils.get_cache( CONFIG_PATH, )) else: click.secho(dtool_config.utils.set_cache( CONFIG_PATH, cache_directory_path )) @config.group() def azure(): """Configure Azure Storage.""" @azure.command() # NOQA @click.argument("container") def get(container): """Print the secret access key of the specified Azure storage container.""" click.secho(dtool_config.utils.get_azure_secret_access_key( CONFIG_PATH, container, )) @azure.command() # NOQA @click.argument("container") @click.argument("azure_secret_access_key") def set(container, azure_secret_access_key): """Set/update the access key for the specified Azure storage container.""" click.secho(dtool_config.utils.set_azure_secret_access_key( CONFIG_PATH, container, azure_secret_access_key )) @azure.command() # NOQA def ls(): """List the configured Azure base URIs.""" for base_uri in dtool_config.utils.list_azure_base_uris(CONFIG_PATH): click.secho(base_uri) ```
{ "source": "jic-dtool/dtool-gui-tk", "score": 2 }
#### File: dtool-gui-tk/dtool_gui_tk/models.py ```python import os import logging import json from operator import itemgetter import dtoolcore import dtoolcore.utils from ruamel.yaml import YAML # This is a hack. from dtool_info.inventory import _dataset_info from dtool_info.utils import sizeof_fmt from dtool_gui_tk.metadata import MetadataSchemaItem logger = logging.getLogger(__name__) LOCAL_BASE_URI_KEY = "DTOOL_LOCAL_BASE_URI" METADATA_SCHEMA_ANNOTATION_NAME = "_metadata_schema" def get_json_schema_type(obj): """Return JSON schema type representation of object. :param obj: object to return the JSON schema type for :returns: JSON schema type as a string :raises: :class:`dtool_gui_tk.models.UnsupportedTypeError` if the value is a complex data structure. Currently supported data types are ``str``, ``int``, ``float``, and ``bool``. """ if isinstance(obj, str): return "string" # This check needs to be before int because bool is subclass of int. elif isinstance(obj, bool): return "boolean" elif isinstance(obj, int): return "integer" elif isinstance(obj, float): return "number" else: raise(UnsupportedTypeError("{} not supported yet".format(type(obj)))) def metadata_model_from_dataset(dataset): """Return MetadataModel from a dataset. Schema extracted from the readme and annotations. Specifically, if an annotation named "_metadata_schema" it is loaded. Key value pairs from the readme are then added. Key value pairs are then extracted from the dataset annotations (the "_metdata_schema" key is ignored). The precedent for determining the type for a schema item is to use the type defined in the "_metadata_schema" if present, if not the type of the value extracted from the dataset is used. :param dataset: :class:`dtoolcore.DataSet` :returns: :class:`dtool_gui_tk.models.MetadataModel` instance :raises dtool_gui_tk.models.MetadataConflictError: if the values extracted from the readme and annotations do not match for a particular key :raises dtool_gui_tk.models.UnsupportedTypeError: if the value is not supported, see :func:`dtool_gui_tk.models.get_json_schema_type`. """ metadata_model = MetadataModel() ignore_metadata_schemas = set() if METADATA_SCHEMA_ANNOTATION_NAME in dataset.list_annotation_names(): schema = dataset.get_annotation(METADATA_SCHEMA_ANNOTATION_NAME) metadata_model.load_master_schema(schema) for name in metadata_model.item_names: ignore_metadata_schemas.add(name) yaml = YAML() readme_dict = yaml.load(dataset.get_readme_content()) if readme_dict is None: readme_dict = {} if isinstance(readme_dict, dict): for key in readme_dict.keys(): value = readme_dict[key] _type = get_json_schema_type(value) schema = {"type": _type} # Only add schema items not added from "_metadata_schema". if key not in ignore_metadata_schemas: metadata_model.add_metadata_property(key, schema, True) # Update the value regardless. metadata_model.set_value(key, value) for key in dataset.list_annotation_names(): # Ignore the special key that stores a schema. if key == METADATA_SCHEMA_ANNOTATION_NAME: continue value = dataset.get_annotation(key) _type = get_json_schema_type(value) schema = {"type": _type} if key in readme_dict: readme_value = readme_dict[key] if readme_value != value: err_msg = "Annotation ({}) and readme ({}) values do not match for key {}" # NOQA raise(MetadataConflictError( err_msg.format(readme_value, value, key) ) ) # Only add schema items not added from "_metadata_schema". if key not in ignore_metadata_schemas: metadata_model.add_metadata_property(key, schema, True) # Update the value regardless. metadata_model.set_value(key, dataset.get_annotation(key)) return metadata_model class DirectoryDoesNotExistError(IOError): pass class MetadataValidationError(ValueError): pass class MetadataConflictError(ValueError): pass class MissingBaseURIModelError(ValueError): pass class MissingDataSetNameError(ValueError): pass class MissingInputDirectoryError(ValueError): pass class MissingMetadataModelError(ValueError): pass class MissingRequiredMetadataError(ValueError): pass class UnsupportedTypeError(TypeError): pass class _ConfigFileVariableBaseModel(object): def __init__(self, config_path=None): self._config_path = config_path def _get(self): return dtoolcore.utils.get_config_value_from_file( self.KEY, self._config_path ) def _put(self, value): dtoolcore.utils.write_config_value_to_file( self.KEY, value, self._config_path ) class LocalBaseURIModel(_ConfigFileVariableBaseModel): "Model for managing local base URI." KEY = "DTOOL_LOCAL_BASE_URI" def get_base_uri(self): """Return the base URI. :returns: base URI where datasets will be read from and written to """ return self._get() def put_base_uri(self, base_uri): """Put/update the base URI. The value is updated in the config file. :param base_uri: base URI """ value = dtoolcore.utils.sanitise_uri(base_uri) self._put(value) class MetadataSchemaListModel(_ConfigFileVariableBaseModel): "Model for managing list of metadata schama." KEY = "DTOOL_METADATA_SCHEMA_DIRECTORY" def get_metadata_schema_directory(self): """Return the metadata schema directory. :returns: absolute path to directory where metadata schemas are stored as JSON files """ return self._get() def put_metadata_schema_directory(self, metadata_schema_directory): """Put/update the path to the metadata schema directory. The value is updated in the config file. :param metadata_schema_directory: path to the metadata schema directory """ value = os.path.abspath(metadata_schema_directory) self._put(value) def put_metadata_schema_item(self, name, metadata_schema): """Put/update a metadata schema item in the metadata schema directory. :param name: name of the metadata schema :param metadata_schema: dictionary with the metadata schema """ fname = name + ".json" fpath = os.path.join( self.get_metadata_schema_directory(), fname ) with open(fpath, "w") as fh: json.dump(metadata_schema, fh) @property def metadata_model_names(self): """Return list of metadata model names. :returns: list of metadata model names """ metadata_schema_directory = self.get_metadata_schema_directory() if metadata_schema_directory is None: return [] filenames = os.listdir(metadata_schema_directory) return sorted([os.path.splitext(f)[0] for f in filenames]) def get_metadata_model(self, name): """Returns class:`dtool_gui_tk.models.MetadataModel` instance. :param name: metadata model name :returns: `dtool_gui_tk.models.MetadataModel instance """ metadata_schema_directory = self.get_metadata_schema_directory() schema_fpath = os.path.join(metadata_schema_directory, name + ".json") metadata_model = MetadataModel() with open(schema_fpath) as fh: master_schema = json.load(fh) metadata_model.load_master_schema(master_schema) return metadata_model class MetadataModel(object): "Model for managing metadata." def __init__(self): self._metadata_schema_items = {} self._metadata_values = {} self._required_item_names = set() self._selected_optional_item_names = set() def __eq__(self, other): if not self._metadata_schema_items == other._metadata_schema_items: return False if not self._metadata_values == other._metadata_values: return False if not self._required_item_names == other._required_item_names: return False if not self._selected_optional_item_names == other._selected_optional_item_names: # NOQA return False return True @property def is_empty(self): """Return True if no metadata schema value has been loaded or if the model has been cleared. :returns: boolean """ if len(self._metadata_schema_items) == 0: return True return False @property def item_names(self): """Return metadata names (keys). :returns: names of items in the metadata schema """ return sorted(self._metadata_schema_items.keys()) @property def required_item_names(self): """Return list of names of required metadata items. :returns: names of required items in the metadata schema """ return sorted(list(self._required_item_names)) @property def optional_item_names(self): """Return list of names of optional metadata items. :returns: names of optional items in the metadata schema """ all_set = set(self.item_names) required_set = set(self.required_item_names) return sorted(list(all_set - required_set)) @property def selected_optional_item_names(self): """Return list of names of selected optional metadata items. A :class:`dtool_gui_tk.models.MetadataModel` instance can have optional :class:`metadata.MetadataSchemaItem` instances. However for these to be included when the dataset metadata is set/updated they need to be selected. This property lists the names of the selected optional metadata schema items. :returns: names of selected optional items in the metadata schema """ return sorted(list(self._selected_optional_item_names)) @property def deselected_optional_item_names(self): """Return list of names of deselected optional metadata items. Inverse of :func:`dtool_gui_tk.models.MetadataModel.selected_optional_item_names` :returns: names of deselected optional items in the metadata schema """ optional_set = set(self.optional_item_names) selected_set = set(self.selected_optional_item_names) return sorted(list(optional_set - selected_set)) @property def in_scope_item_names(self): """Return required and selected optional item names. :returns: names of required and selected optional items in the metadata schema """ return self.required_item_names + self.selected_optional_item_names @property def all_issues(self): """Return list of issues with metadata. Only reports on issues that are required and optional metadata that has been selected. Each value that has been set is evaluated against its schema. :returns: list of issues """ _issues = [] for item_name in self.in_scope_item_names: schema = self.get_schema(item_name) value = self.get_value(item_name) if value is not None: for i in schema.issues(value): _issues.append((item_name, str(i))) return _issues def clear(self): """Clear the model of existing data.""" self._metadata_schema_items = {} self._metadata_values = {} self._required_item_names = set() self._selected_optional_item_names = set() def load_master_schema(self, master_schema): """Load JSON schema of an object describing the metadata model. Example of a mater schema:: { "type": "object, "properties": { "description": {"type:" "string"}, "project": {"type": "string"} } "required": ["description"] } The "type" of the master schema should be "object". The "properties" in the master schema are converted to :class:`metadata.MetadataSchemaItem` instances. The "required" property is used to classify metadata items as required/optional. :param master_schema: dictionary containing a JSON schema """ for name, schema in master_schema["properties"].items(): self._metadata_schema_items[name] = MetadataSchemaItem(schema) if "required" in master_schema: for r in master_schema["required"]: self._required_item_names.add(r) def add_metadata_property(self, name, schema={}, required=False): """Add a metadata property to the master schema. Method to build a build up or extend the master schema. :param name: name of the metadata item, the key used in the property dictionary of the master schema :param schema: the JSON schema to use to create a :class:`metadata.MetadataSchemaItem` :param required: boolean value stating whether the property is required or optional """ self._metadata_schema_items[name] = MetadataSchemaItem(schema) if required: self._required_item_names.add(name) def get_master_schema(self): """Return JSON schema of object describing the metadata model. :returns: JSON schema representing the current state of the :class:`dtool_gui_tk.models.MetadataModel` as a dictionary """ master_schema = { "type": "object", "properties": {}, "required": [] } for name in self.item_names: schema_item = self._metadata_schema_items[name] master_schema["properties"][name] = schema_item.schema for name in self.required_item_names: master_schema["required"].append(name) return master_schema def get_schema(self, name): """Return metadata schema. :param name: name of the metadata :returns: :class:`metadata.MetadataSchemaItem` """ return self._metadata_schema_items[name] def get_value(self, name): """Return metadata value. :param name: name of the metadata :returns: the value of the metadata """ if name not in self._metadata_values: return None return self._metadata_values[name] def set_value(self, name, value): """Set the metadata value. :param name: name of the metadata :param value: value to set the metadata to """ self._metadata_values[name] = value def set_value_from_str(self, name, value_as_str): """Set the metadata value from a string forcing the type. :param name: name of the metadata :param value_as_str: string representing the value of the metadata """ type_ = self.get_schema(name).type if type_ == "string": if value_as_str == "": self.set_value(name, None) else: self.set_value(name, value_as_str) elif type_ == "integer": try: logger.info("Forcing type to integer") self.set_value(name, int(value_as_str)) except ValueError: logger.warning("Could not force to integer") self.set_value(name, None) elif type_ == "number": try: logger.info("Forcing type to float") self.set_value(name, float(value_as_str)) except ValueError: logger.warning("Could not force to float") self.set_value(name, None) elif type_ == "boolean": logger.info("Forcing type to bool") if value_as_str == "True": self.set_value(name, True) elif value_as_str == "False": self.set_value(name, False) else: logger.warning("Could not force to bool") self.set_value(name, None) else: raise(UnsupportedTypeError("{} not supported yet".format(type_))) def is_okay(self, name): """Validate the metadata value against its schema. :param name: name of the metadata :returns: True if the value is valid """ schema = self.get_schema(name) value = self.get_value(name) return schema.is_okay(value) def issues(self, name): """Return list of issues with specific metadata item. :returns: list of issues """ _issues = [] schema = self.get_schema(name) value = self.get_value(name) if value is not None: for i in schema.issues(value): _issues.append(str(i)) return _issues def select_optional_item(self, name): "Mark an optinal metadata item as selected." if name in self.optional_item_names: self._selected_optional_item_names.add(name) def deselect_optional_item(self, name): "Mark an optinal metadata item as not selected." if name in self.selected_optional_item_names: self._selected_optional_item_names.remove(name) class DataSetModel(object): "Model for working with a frozen dataset." def __init__(self): self._dataset = None self._metadata_model = None @property def name(self): """Return the name of the loaded dataset. :returns: name of the dataset or None of the dataset has not been set """ if self._dataset is None: return None return self._dataset.name @property def metadata_model(self): """Return the metadata model. :returns: :class:`dtool_gui_tk.models.MetadataModel` instance """ return self._metadata_model @property def is_empty(self): """Return True if no dataset has been loaded or if the model has been cleared. :returns: boolean """ if self._dataset is None: return True return False def list_tags(self): """Return the underlying dataset's tags. :returns: list """ if self._dataset is None: return [] return self._dataset.list_tags() def put_tag(self, tag): """Add tag to underlying dataset. :param tag: new tag """ self._dataset.put_tag(tag) def delete_tag(self, tag): """Delete tag from underlying dataset. :param tag: tag """ self._dataset.delete_tag(tag) def clear(self): """Clear the model of existing data.""" self._dataset = None self._metadata_model = None def load_dataset(self, uri): """Load the dataset from a URI. :param uri: URI to a dtoolcore.DataSet """ logger.info("{} loading dataset from URI: {}".format(self, uri)) self.clear() self._dataset = dtoolcore.DataSet.from_uri(uri) self._metadata_model = metadata_model_from_dataset(self._dataset) def get_item_props_list(self): """Return list of dict of properties for each item in the dataset.""" item_props_list = [] for identifier in self._dataset.identifiers: props = self._dataset.item_properties(identifier) item_props_list.append({ "identifier": identifier, "relpath": props["relpath"], "size_int": props["size_in_bytes"], "size_str": sizeof_fmt(props["size_in_bytes"]) }) return sorted(item_props_list, key=itemgetter("relpath")) def update_name(self, name): """Update the name of the dataset. :param name: new dataset name """ self._dataset.update_name(name) def update_metadata(self): """Update dataset with any changes made to the metadata model. Sets all the metadata for all :attr:`dtool_gui_tk.models.MetadataModel.in_scope_item_names` Both the dataset readme and annotations are updated. :raises dtool_gui_tk.models.MetadataValidationError: if the metadata value is not valid according to its schema :raises dtool_gui_tk.models.MissingRequiredMetadataError: if a required metadata value has not been set """ if self._metadata_model is None: raise(MissingMetadataModelError("Metadata model has not been set")) for name in self.metadata_model.required_item_names: metadata = self.metadata_model.get_value(name) if metadata is None: raise(MissingRequiredMetadataError( "Missing required metadata: {}".format(name) )) for name in self.metadata_model.in_scope_item_names: if not self.metadata_model.is_okay(name): value = self.metadata_model.get_value(name) raise(MetadataValidationError( "Metadata {} value not valid: {}".format(name, value) )) readme_lines = ["---"] for key in self.metadata_model.in_scope_item_names: value = self.metadata_model.get_value(key) self._dataset.put_annotation(key, value) readme_lines.append("{}: {}".format(key, value)) readme_content = "\n".join(readme_lines) self._dataset.put_readme(readme_content) # Update _metadata_schema annotation. metadata_schema = self.metadata_model.get_master_schema() self._dataset.put_annotation( METADATA_SCHEMA_ANNOTATION_NAME, metadata_schema ) class ProtoDataSetModel(object): "Model for creating building up and creating a dataset." def __init__(self): self._name = None self._input_directory = None self._base_uri_model = None self._metadata_model = None self._uri = None @property def name(self): """Return the name to use for the dataset. :returns: name of the dataset or None if it has not been set """ return self._name @property def base_uri(self): """Return the base URI for the dataset. :returns: base URI or None if it has not been set """ return self._base_uri_model.get_base_uri() @property def input_directory(self): """Return the path to the input directory. :returns: input data directory path or None if it has not been set """ return self._input_directory @property def metadata_model(self): """Return the metadata model. :returns: :class:`dtool_gui_tk.models.MetadataModel` instance or None if it has not been set """ return self._metadata_model @property def uri(self): """Return the URI of the created dataset. :returns: dataset URI or None if it has not been set """ return self._uri def _yield_path_handle_tuples(self): path_length = len(self.input_directory) + 1 for dirpath, dirnames, filenames in os.walk(self.input_directory): for fn in filenames: path = os.path.join(dirpath, fn) handle = path[path_length:] if dtoolcore.utils.IS_WINDOWS: handle = dtoolcore.utils.windows_to_unix_path(handle) # NOQA yield (path, handle) def set_name(self, name): """Set the name to use for the dataset. :param name: dataset name """ self._name = name def set_input_directory(self, input_directory): """Set the input directory for the dataset creation process. :param input_directory: path to the input directory :raises: dtool_gui_tk.models.DirectoryDoesNotExistError if the input directory does not exist """ if not os.path.isdir(input_directory): raise(DirectoryDoesNotExistError( "Cannot set input directory to: {}".format(input_directory) )) self._input_directory = input_directory def set_base_uri_model(self, base_uri_model): """Set the base URI model. :param base_uri_model: :class:`dtool_gui_tk.models.LocalBaseURIModel` """ self._base_uri_model = base_uri_model def set_metadata_model(self, metadata_model): """Set the metadata model. :param metadata_model: :class:`dtool_gui_tk.models.MetadataModel` """ self._metadata_model = metadata_model def create(self, progressbar=None): """Create the dataset in the base URI. :raises dtool_gui_tk.models.MissingInputDirectoryError: if the input directory has not been set :raises dtool_gui_tk.models.MissingDataSetNameError: if the dataset name has not been set. :raises dtool_gui_tk.models.MissingBaseURIModelError: if the base URI model has not been set. :raises dtool_gui_tk.models.MissingMetadataModelError: if the metadata model has not been set. """ if self._name is None: raise(MissingDataSetNameError("Dataset name has not been set")) if self._input_directory is None: raise(MissingInputDirectoryError("Input directory has not been set")) # NOQA if self._base_uri_model is None: raise(MissingBaseURIModelError("Base URI model has not been set")) if self._metadata_model is None: raise(MissingMetadataModelError("Metadata model has not been set")) for name in self.metadata_model.required_item_names: metadata = self.metadata_model.get_value(name) if metadata is None: raise(MissingRequiredMetadataError( "Missing required metadata: {}".format(name) )) for name in self.metadata_model.in_scope_item_names: if not self.metadata_model.is_okay(name): value = self.metadata_model.get_value(name) raise(MetadataValidationError( "Metadata {} value not valid: {}".format(name, value) )) with dtoolcore.DataSetCreator(self.name, self.base_uri) as ds_creator: # Add metadata. readme_lines = ["---"] for key in self.metadata_model.in_scope_item_names: value = self.metadata_model.get_value(key) ds_creator.put_annotation(key, value) readme_lines.append("{}: {}".format(key, value)) ds_creator.put_readme("\n".join(readme_lines)) # Add the metadata schema. metadata_schema = self.metadata_model.get_master_schema() ds_creator.put_annotation( METADATA_SCHEMA_ANNOTATION_NAME, metadata_schema ) # Add data items. for fpath, handle in self._yield_path_handle_tuples(): ds_creator.put_item(fpath, handle) if progressbar is not None: progressbar.update(1) self._uri = ds_creator.uri class DataSetListModel(object): "Model for managing dataset in a base URI." def __init__(self): self._base_uri_model = None self._datasets = [] self._datasets_info = [] self._active_index = None self._tag_filter = None self._all_tags = set() @property def base_uri(self): """Return base URI. :returns: base UIR """ if self._base_uri_model is None: return None return self._base_uri_model.get_base_uri() @property def active_index(self): return self._active_index @property def names(self): """Return list of dataset names. :returns: list of dataset names """ return [ds.name for ds in self._datasets] @property def tag_filter(self): """Return the tag filter. :returns: tag filter """ return self._tag_filter def set_base_uri_model(self, base_uri_model): """Set the base URI model. :param base_uri_model: dtool_gui_tk.models.LocalBaseURIModel """ self._base_uri_model = base_uri_model if self._base_uri_model.get_base_uri() is not None: self.reindex() def set_tag_filter(self, tag): """Set the tag filter. :param tag: tag string """ self._tag_filter = tag if self._base_uri_model.get_base_uri() is not None: self.reindex() def get_active_uri(self): """Return the URI of the dataset at the active index. """ if self.active_index is None: return None return self._datasets[self.active_index].uri def get_active_name(self): """Return the name of the dataset at the active index. """ if self.active_index is None: return None return self._datasets[self.active_index].name def set_active_index(self, index): """Set the active_index. :raises: IndexError if the index is invalid """ if len(self._datasets) == 0: # No datasets in the model. raise(IndexError()) if index < 0: # Can't have a negative index. raise(IndexError()) if index >= len(self._datasets): raise(IndexError()) self._active_index = index def list_tags(self): """Return list of unique tags from all datasets. :returns: list of all unique tags """ return sorted(list(self._all_tags)) def reindex(self): """Index the base URI.""" self._datasets = [] self._datasets_info = [] self._active_index = None self._all_tags = set() base_uri = self._base_uri_model.get_base_uri() if base_uri is None: return for ds in dtoolcore.iter_datasets_in_base_uri(base_uri): append_okay = True ds_tags = set(ds.list_tags()) self._all_tags.update(ds_tags) if self.tag_filter is not None and self.tag_filter not in ds_tags: append_okay = False if append_okay: self._datasets.append(ds) self._datasets_info.append(_dataset_info(ds)) # The initial active index is 0 if there are datasets in the model. if len(self._datasets) > 0: self._active_index = 0 def sort(self, key="name", reverse=False): """Sort the datasets by items properties.""" logger.info("Sorting using key={}, reverse={}".format(key, reverse)) assert key in ("name", "size_int", "num_items", "creator", "date") # Nothing to sort if there are no datasets. if len(self._datasets) == 0: return sort_values = [p[key] for p in self._datasets_info] zipped_lists = zip(sort_values, self._datasets, self._datasets_info) sorted_pairs = sorted(zipped_lists, key=itemgetter(0), reverse=reverse) tuples = zip(*sorted_pairs) _, self._datasets, self._datasets_info = [list(t) for t in tuples] def yield_properties(self): """Return iterable that yields dictionaries with dataset properties.""" for info in self._datasets_info: yield info ``` #### File: dtool-gui-tk/tests/test_models_module.py ```python import os from . import tmp_dir_fixture # NOQA import pytest def test_LocalBaseURIModel(tmp_dir_fixture): # NOQA from dtool_gui_tk.models import LocalBaseURIModel import dtoolcore.utils config_path = os.path.join(tmp_dir_fixture, "config.json") assert not os.path.isfile(config_path) base_uri_path = os.path.join(tmp_dir_fixture, "datasets") base_uri = dtoolcore.utils.sanitise_uri(base_uri_path) base_uri_model = LocalBaseURIModel(config_path=config_path) assert base_uri_model.get_base_uri() is None # Configure the base URI. base_uri_model.put_base_uri(base_uri_path) assert os.path.isfile(config_path) assert base_uri_model.get_base_uri() == base_uri another_base_uri_model = LocalBaseURIModel(config_path=config_path) assert another_base_uri_model.get_base_uri() == base_uri def test_MetadataModel(): from dtool_gui_tk.models import MetadataModel metadata_model = MetadataModel() assert metadata_model.is_empty master_schema = { "type": "object", "properties": { "project": {"type": "string", "minLength": 3, "maxLength": 80}, "species": { "type": "string", "enum": ["A. australe", "A. barrelieri"] }, "age": {"type": "integer", "minimum": 0, "maximum": 90} }, "required": ["project"] } metadata_model.load_master_schema(master_schema) assert not metadata_model.is_empty assert metadata_model.required_item_names == ["project"] expected_item_names = sorted(master_schema["properties"].keys()) assert metadata_model.item_names == expected_item_names from dtool_gui_tk.metadata import MetadataSchemaItem project_schema = master_schema["properties"]["project"] project_metadata_schema_item = MetadataSchemaItem(project_schema) assert metadata_model.get_schema("project") == project_metadata_schema_item # NOQA # At this point no values for any metadata has been set. assert metadata_model.get_value("project") is None # Test setting the project. metadata_model.set_value("project", "dtool-gui") assert metadata_model.get_value("project") == "dtool-gui" # Test updating the project. metadata_model.set_value("project", "updated-name") assert metadata_model.get_value("project") == "updated-name" # It is possible to set values that would fail validation. metadata_model.set_value("age", "not a number") assert metadata_model.get_value("age") == "not a number" # It is up to the client to check that the value is valid. assert not metadata_model.is_okay("age") # Fix the metadata and check again. metadata_model.set_value("age", 10) assert metadata_model.is_okay("age") # Check that generated master schema matches the input master schema. assert metadata_model.get_master_schema() == master_schema # Test clearing metadata model. metadata_model.clear() assert metadata_model.is_empty def test_MetadataModelSchemaBuilderAPI(): from dtool_gui_tk.models import MetadataModel metadata_model = MetadataModel() assert metadata_model.required_item_names == [] assert metadata_model.item_names == [] # Initially the metadata model has not got any "properties". empty_schema = { "type": "object", "properties": {}, "required": [] } assert metadata_model.get_master_schema() == empty_schema # Add some "properties" to the master metadata schema. project_schema = {"type": "string", "minLength": 3, "maxLength": 80} metadata_model.add_metadata_property( name="project", schema=project_schema, required=True ) metadata_model.add_metadata_property( name="age", schema={"type": "integer", "minimum": 0, "maximum": 90}, required=False ) assert metadata_model.required_item_names == ["project"] assert metadata_model.item_names == ["age", "project"] from dtool_gui_tk.metadata import MetadataSchemaItem project_metadata_schema_item = MetadataSchemaItem(project_schema) assert metadata_model.get_schema("project") == project_metadata_schema_item # NOQA populated_schema = { "type": "object", "properties": { "project": {"type": "string", "minLength": 3, "maxLength": 80}, "age": {"type": "integer", "minimum": 0, "maximum": 90} }, "required": ["project"] } assert metadata_model.get_master_schema() == populated_schema def test_MetadataModel_selected_API(): # A MetadataModel can have "optional" meta data items. # For these to be used they need to be "selected" first." from dtool_gui_tk.models import MetadataModel metadata_model = MetadataModel() master_schema = { "type": "object", "properties": { "project": {"type": "string", "minLength": 3, "maxLength": 80}, "species": { "type": "string", "enum": ["A. australe", "A. barrelieri"] }, "age": {"type": "integer", "minimum": 0, "maximum": 90} }, "required": ["project"] } metadata_model.load_master_schema(master_schema) # Initially no optional items are selected. assert metadata_model.optional_item_names == ["age", "species"] assert metadata_model.selected_optional_item_names == [] assert metadata_model.deselected_optional_item_names == ["age", "species"] # Select and optional item. metadata_model.select_optional_item("species") assert metadata_model.optional_item_names == ["age", "species"] assert metadata_model.selected_optional_item_names == ["species"] assert metadata_model.deselected_optional_item_names == ["age"] # Do nothing quietly if the same action is called again. metadata_model.select_optional_item("species") assert metadata_model.optional_item_names == ["age", "species"] assert metadata_model.selected_optional_item_names == ["species"] assert metadata_model.deselected_optional_item_names == ["age"] # Deselect an optional item. metadata_model.deselect_optional_item("species") assert metadata_model.optional_item_names == ["age", "species"] assert metadata_model.selected_optional_item_names == [] assert metadata_model.deselected_optional_item_names == ["age", "species"] # Do nothing quietly if the same action is called again. metadata_model.deselect_optional_item("species") assert metadata_model.optional_item_names == ["age", "species"] assert metadata_model.selected_optional_item_names == [] assert metadata_model.deselected_optional_item_names == ["age", "species"] # Required and selected metadata is "in scope" to be processed etc. expected = metadata_model.required_item_names + metadata_model.selected_optional_item_names # NOQA assert metadata_model.in_scope_item_names == expected def test_MetadataModel_issues_API(): from dtool_gui_tk.models import MetadataModel metadata_model = MetadataModel() master_schema = { "type": "object", "properties": { "project": {"type": "string", "minLength": 3, "maxLength": 80}, "species": { "type": "string", "enum": ["A. australe", "A. barrelieri"] }, "age": {"type": "integer", "minimum": 0, "maximum": 90} }, "required": ["project"] } metadata_model.load_master_schema(master_schema) metadata_model.set_value("project", "x") assert len(metadata_model.all_issues) == 1 assert metadata_model.all_issues[0] == ("project", "'x' is too short") assert len(metadata_model.issues("project")) == 1 assert metadata_model.issues("project") == ["'x' is too short"] def test_MetadataModel_str_to_typed(): from dtool_gui_tk.models import MetadataModel, UnsupportedTypeError master_schema = { "type": "object", "properties": { "project": {"type": "string"}, "age": {"type": "integer"}, "time": {"type": "number"}, "is_amazing": {"type": "boolean"}, } } metadata_model = MetadataModel() metadata_model.load_master_schema(master_schema) metadata_model.set_value_from_str("project", "test") assert isinstance(metadata_model.get_value("project"), str) metadata_model.set_value_from_str("age", "2") assert isinstance(metadata_model.get_value("age"), int) metadata_model.set_value_from_str("time", "5") assert isinstance(metadata_model.get_value("time"), float) metadata_model.set_value_from_str("is_amazing", "True") assert isinstance(metadata_model.get_value("is_amazing"), bool) metadata_model.set_value_from_str("project", "") assert metadata_model.get_value("project") is None # If forced typing fails, set to None. metadata_model.set_value_from_str("age", "not-an-integer") assert metadata_model.get_value("age") is None metadata_model.set_value_from_str("time", "not-a-float") assert metadata_model.get_value("time") is None metadata_model.set_value_from_str("is_amazing", "not-a-bool") assert metadata_model.get_value("is_amazing") is None metadata_model.add_metadata_property("not_supported", {"type": "object"}) with pytest.raises(UnsupportedTypeError): metadata_model.set_value_from_str("not_supported", {"age": 3}) def test_ProtoDataSetModel(tmp_dir_fixture): # NOQA from dtool_gui_tk.models import ProtoDataSetModel proto_dataset_model = ProtoDataSetModel() assert proto_dataset_model.name is None proto_dataset_model.set_name("my-dataset") assert proto_dataset_model.name == "my-dataset" assert proto_dataset_model.input_directory is None proto_dataset_model.set_input_directory(tmp_dir_fixture) assert proto_dataset_model.input_directory == tmp_dir_fixture from dtool_gui_tk.models import DirectoryDoesNotExistError with pytest.raises(DirectoryDoesNotExistError): proto_dataset_model.set_input_directory("does not exist") assert proto_dataset_model.input_directory == tmp_dir_fixture def test_DataSetListModel(tmp_dir_fixture): # NOQA from dtool_gui_tk.models import DataSetListModel, LocalBaseURIModel dataset_list_model = DataSetListModel() assert dataset_list_model.base_uri is None # Create and configure a base URI and BaseURIModel. base_uri_directory = os.path.join(tmp_dir_fixture, "datasets") os.mkdir(base_uri_directory) config_path = os.path.join(tmp_dir_fixture, "dtool-gui.json") base_uri_model = LocalBaseURIModel(config_path) base_uri_model.put_base_uri(base_uri_directory) # Add the base URI model to the dataset list model. base_uri = base_uri_model.get_base_uri() dataset_list_model.set_base_uri_model(base_uri_model) assert dataset_list_model.base_uri == base_uri assert len(dataset_list_model.names) == 0 assert dataset_list_model.active_index is None with pytest.raises(IndexError): dataset_list_model.set_active_index(0) assert dataset_list_model.get_active_uri() is None # Create three empty datasets in the base URI. from dtoolcore import DataSetCreator dataset_names = sorted(["ds1", "ds2", "ds3"]) creator_usernames = ("not", "in", "order") dataset_uris = {} for (ds_name, creator_name) in zip(dataset_names, creator_usernames): with DataSetCreator( name=ds_name, base_uri=base_uri, creator_username=creator_name ) as ds_creator: dataset_uris[ds_name] = ds_creator.uri # Need to update the dataset list model for the datasets to be discovered. assert len(dataset_list_model.names) == 0 # Update the dataset_list_model. dataset_list_model.reindex() # Test active_index and set_active_index. assert dataset_list_model.active_index == 0 with pytest.raises(IndexError): dataset_list_model.set_active_index(-1) with pytest.raises(IndexError): dataset_list_model.set_active_index(4) dataset_list_model.set_active_index(2) assert dataset_list_model.active_index == 2 dataset_list_model.reindex() assert dataset_list_model.active_index == 0 # Access list of dataset names. assert dataset_list_model.names == dataset_names # Get URI from name. for i, name in enumerate(dataset_list_model.names): dataset_list_model.set_active_index(i) assert dataset_list_model.get_active_uri() == dataset_uris[name] assert dataset_list_model.get_active_name() == name # Test yield_properties. props_generator = dataset_list_model.yield_properties() try: from collections.abc import Iterable except ImportError: from collections import Iterable assert isinstance(props_generator, Iterable) first = next(props_generator) assert "name" in first assert first["name"] == "ds1" assert "creator" in first assert first["creator"] == "not" # Test yield_properties with sorting. dataset_list_model.sort(key="creator") props_generator = dataset_list_model.yield_properties() try: from collections.abc import Iterable except ImportError: from collections import Iterable assert isinstance(props_generator, Iterable) first = next(props_generator) assert "name" in first assert first["name"] == "ds2" assert "creator" in first assert first["creator"] == "in" # Test yield_properties. dataset_list_model.sort(reverse=True) props_generator = dataset_list_model.yield_properties() try: from collections.abc import Iterable except ImportError: from collections import Iterable assert isinstance(props_generator, Iterable) first = next(props_generator) assert "name" in first assert first["name"] == "ds3" assert "creator" in first assert first["creator"] == "order" def test_DataSetListModel_filter_by_tag(tmp_dir_fixture): # NOQA from dtool_gui_tk.models import DataSetListModel, LocalBaseURIModel # Create and configure a base URI and BaseURIModel. base_uri_directory = os.path.join(tmp_dir_fixture, "datasets") os.mkdir(base_uri_directory) config_path = os.path.join(tmp_dir_fixture, "dtool-gui.json") base_uri_model = LocalBaseURIModel(config_path) base_uri_model.put_base_uri(base_uri_directory) # Create and configure a DataSetListModel. dataset_list_model = DataSetListModel() base_uri = base_uri_model.get_base_uri() dataset_list_model.set_base_uri_model(base_uri_model) # Create three empty datasets in the base URI. from dtoolcore import DataSetCreator dataset_names = sorted(["ds1", "ds2", "ds3"]) creator_usernames = ("not", "in", "order") dataset_uris = {} for (ds_name, creator_name) in zip(dataset_names, creator_usernames): with DataSetCreator( name=ds_name, base_uri=base_uri, creator_username=creator_name ) as ds_creator: if ds_name != "ds2": ds_creator.put_tag("some") else: ds_creator.put_tag("one") ds_creator.put_tag("all") dataset_uris[ds_name] = ds_creator.uri dataset_list_model.reindex() # Test without filtering. assert dataset_list_model.tag_filter is None names = [prop["name"] for prop in dataset_list_model.yield_properties()] assert names == ["ds1", "ds2", "ds3"] # Test with filtering. dataset_list_model.set_tag_filter("some") assert dataset_list_model.tag_filter == "some" names = [prop["name"] for prop in dataset_list_model.yield_properties()] assert names == ["ds1", "ds3"] # Test with filtering and sorting. dataset_list_model.sort(reverse=True) names = [prop["name"] for prop in dataset_list_model.yield_properties()] assert names == ["ds3", "ds1"] # Test remove filtering. dataset_list_model.set_tag_filter(None) assert dataset_list_model.tag_filter is None names = [prop["name"] for prop in dataset_list_model.yield_properties()] assert names == ["ds1", "ds2", "ds3"] # List all the unique tags. assert dataset_list_model.list_tags() == ["all", "one", "some"] # Delete the "some" tag. from dtoolcore import DataSet dataset_list_model.set_tag_filter("one") uri = dataset_list_model.get_active_uri() ds = DataSet.from_uri(uri) ds.delete_tag("one") dataset_list_model.reindex() assert dataset_list_model.list_tags() == ["all", "some"] def test_MetadataSchemaListModel(tmp_dir_fixture): # NOQA from dtool_gui_tk.models import MetadataSchemaListModel config_path = os.path.join(tmp_dir_fixture, "config.json") assert not os.path.isfile(config_path) metadata_schema_dir = os.path.join(tmp_dir_fixture, "metadata_schemas") os.mkdir(metadata_schema_dir) metadata_schema_list_model = MetadataSchemaListModel(config_path=config_path) # NOQA assert metadata_schema_list_model.get_metadata_schema_directory() is None assert metadata_schema_list_model.metadata_model_names == [] # Configure the metadata schema directory. metadata_schema_list_model.put_metadata_schema_directory(metadata_schema_dir) # NOQA assert os.path.isfile(config_path) assert metadata_schema_list_model.get_metadata_schema_directory() == metadata_schema_dir # NOQA another_model = MetadataSchemaListModel(config_path=config_path) assert another_model.get_metadata_schema_directory() == metadata_schema_dir # Add a schema manually. basic_schema = { "type": "object", "properties": { "project": {"type": "string"} } } import json fpath = os.path.join(metadata_schema_dir, "basic.json") with open(fpath, "w") as fh: json.dump(basic_schema, fh) assert another_model.metadata_model_names == ["basic"] # Add another schema using API. advanced_schema = { "type": "object", "properties": { "project": {"type": "string", "minLength": 6, "maxLength": 80} }, "required": ["project"] } another_model.put_metadata_schema_item( name="advanced", metadata_schema=advanced_schema ) assert another_model.metadata_model_names == ["advanced", "basic"] # Test schema retrieval. from dtool_gui_tk.models import MetadataModel advanced_model = MetadataModel() advanced_model.load_master_schema(advanced_schema) accessed_model = metadata_schema_list_model.get_metadata_model("advanced") # NOQA assert advanced_model == accessed_model def test_DataSetModel_basic(tmp_dir_fixture): # NOQA # Create an empty dataset model. from dtool_gui_tk.models import DataSetModel dataset_model = DataSetModel() assert dataset_model.name is None assert dataset_model.metadata_model is None assert dataset_model.is_empty # Create a dataset. from dtoolcore import DataSetCreator dataset_name = "my-dataset" annotations = {"project": "my-project", "description": "my-description"} readme_lines = ["---"] with DataSetCreator( name=dataset_name, base_uri=tmp_dir_fixture ) as ds_creator: # Add some metadata. for key in sorted(annotations.keys()): value = annotations[key] ds_creator.put_annotation(key, value) readme_lines.append("{}: {}".format(key, value)) readme = "\n".join(readme_lines) ds_creator.put_readme(readme) # Add some items. for animal in ["cat", "tiger"]: handle = animal + ".txt" fpath = ds_creator.prepare_staging_abspath_promise(handle) with open(fpath, "w") as fh: fh.write(animal) uri = ds_creator.uri dataset_model.load_dataset(uri) assert not dataset_model.is_empty # Check that it has the right properties. assert dataset_model.name == dataset_name expected_schema = { "type": "object", "properties": { "project": {"type": "string"}, "description": {"type": "string"} }, "required": ["description", "project"] } assert dataset_model.metadata_model.get_master_schema() == expected_schema for key in annotations.keys(): assert dataset_model.metadata_model.get_value(key) == annotations[key] # Test the get_item_props_list method. expected_content = [ { 'identifier': 'e55aada093b34671ec2f9467fe83f0d3d8c31f30', 'relpath': 'cat.txt', 'size_int': 3, 'size_str': ' 3.0B ' }, { 'identifier': '433635d53dae167009941349491abf7aae9becbd', 'relpath': 'tiger.txt', 'size_int': 5, 'size_str': ' 5.0B ' }, ] assert dataset_model.get_item_props_list() == expected_content # Check that one can update the properties on the actual dataset. from dtoolcore import DataSet dataset_model.update_name("new-name") assert dataset_model.name == "new-name" dataset = DataSet.from_uri(uri) assert dataset.name == "new-name" dataset_model.metadata_model.set_value("project", "new-project") assert dataset_model.metadata_model.get_value("project") == "new-project" # Updating the metadata in the metadata model requires a call to # DataSetModel.update_metadata() assert dataset.get_annotation("project") == "my-project" dataset_model.update_metadata() assert dataset.get_annotation("project") == "new-project" # DataSetModel.update_metadata() should raise MissingRequiredMetadataError # and MetadataValidationError if appropriate. from dtool_gui_tk.models import ( MissingRequiredMetadataError, MetadataValidationError ) # Add new required metadata property, # but don't set it so that it is missing. dataset_model.metadata_model.add_metadata_property( name="age", schema={"type": "integer", "exclusiveMinimum": 0}, required=True ) with pytest.raises(MissingRequiredMetadataError): dataset_model.update_metadata() # Set the age metadata to an invalid value. dataset_model.metadata_model.set_value("age", -1) with pytest.raises(MetadataValidationError): dataset_model.update_metadata() # Test with valid value. # Set the age metadata to an invalid value. dataset_model.metadata_model.set_value("age", 1) dataset_model.update_metadata() assert dataset.get_annotation("age") == 1 # When the dataset is updated the special _metadata_schema annotation is # also updated. expected_schema = dataset_model.metadata_model.get_master_schema() assert dataset.get_annotation("_metadata_schema") == expected_schema # If the metadata model is missing DataSetModel.update_metadata # should raise MissingMetadataModelError. from dtool_gui_tk.models import MissingMetadataModelError dataset_model._metadata_model = None with pytest.raises(MissingMetadataModelError): dataset_model.update_metadata() # Test clearing the DataSetModel. dataset_model.clear() assert dataset_model.is_empty def test_DataSetModel_update_metadata_works_on_annotations_and_readme(tmp_dir_fixture): # NOQA # Create a basic dataset. from dtoolcore import DataSetCreator, DataSet with DataSetCreator("my-dataset", tmp_dir_fixture) as ds_creator: ds_creator.put_annotation("project", "test") # Create a dataset model from the dataset from dtool_gui_tk.models import DataSetModel dataset_model = DataSetModel() dataset_model.load_dataset(ds_creator.uri) # Add an optional metadata and set it. dataset_model.metadata_model.add_metadata_property("age", {"type": "integer"}) # NOQA dataset_model.metadata_model.set_value("age", 3) dataset_model.metadata_model.select_optional_item("age") # Load the dataset. dataset = DataSet.from_uri(ds_creator.uri) # Dataset before metadata update. expected_readme = "" assert dataset.get_readme_content() == expected_readme expected_annotation_keys = ["project"] assert dataset.list_annotation_names() == expected_annotation_keys # NOQA # Update the metadata. dataset_model.update_metadata() # Dataset after metadata update. expected_readme = "---\nproject: test\nage: 3" assert dataset.get_readme_content() == expected_readme expected_annotation_keys = ["_metadata_schema", "age", "project"] assert dataset.list_annotation_names() == expected_annotation_keys # NOQA def test_DataSetModel_tags(tmp_dir_fixture): # NOQA # Create a basic dataset. from dtoolcore import DataSetCreator with DataSetCreator("my-dataset", tmp_dir_fixture) as ds_creator: ds_creator.put_annotation("project", "test") # Create a dataset model from the dataset from dtool_gui_tk.models import DataSetModel dataset_model = DataSetModel() assert dataset_model.list_tags() == [] dataset_model.load_dataset(ds_creator.uri) assert dataset_model.list_tags() == [] dataset_model.put_tag("testtag") assert dataset_model.list_tags() == ["testtag"] # Check idempotency dataset_model.put_tag("testtag") assert dataset_model.list_tags() == ["testtag"] dataset_model.delete_tag("testtag") assert dataset_model.list_tags() == [] dataset_model.delete_tag("anothertag") assert dataset_model.list_tags() == [] def test_json_schema_from_dataset_only_readme(tmp_dir_fixture): # NOQA from dtoolcore import DataSet, DataSetCreator from dtool_gui_tk.models import metadata_model_from_dataset, MetadataModel # Only readme. readme = "---\nproject: test\nage: 3\ntemperature: 25.5" with DataSetCreator("only-readme", tmp_dir_fixture, readme) as ds_creator: uri = ds_creator.uri dataset = DataSet.from_uri(uri) # Create expected metadata model. expected_schema = { "type": "object", "properties": { "project": {"type": "string"}, "age": {"type": "integer"}, "temperature": {"type": "number"} }, "required": ["age", "project", "temperature"] } expected_metadata_model = MetadataModel() expected_metadata_model.load_master_schema(expected_schema) expected_metadata_model.set_value("project", "test") expected_metadata_model.set_value("age", 3) expected_metadata_model.set_value("temperature", 25.5) assert metadata_model_from_dataset(dataset) == expected_metadata_model def test_json_schema_from_dataset_only_annotations(tmp_dir_fixture): # NOQA from dtoolcore import DataSet, DataSetCreator from dtool_gui_tk.models import metadata_model_from_dataset, MetadataModel with DataSetCreator("only-annotations", tmp_dir_fixture) as ds_creator: ds_creator.put_annotation("an-int", 3) ds_creator.put_annotation("a-float", 3.5) ds_creator.put_annotation("a-string", "hello") ds_creator.put_annotation("a-bool", True) uri = ds_creator.uri dataset = DataSet.from_uri(uri) # Create expected metadata model. expected_schema = { "type": "object", "properties": { "an-int": {"type": "integer"}, "a-float": {"type": "number"}, "a-string": {"type": "string"}, "a-bool": {"type": "boolean"} }, "required": ["an-int", "a-float", "a-string", "a-bool"] } expected_metadata_model = MetadataModel() expected_metadata_model.load_master_schema(expected_schema) expected_metadata_model.set_value("an-int", 3) expected_metadata_model.set_value("a-float", 3.5) expected_metadata_model.set_value("a-string", "hello") expected_metadata_model.set_value("a-bool", True) actual_metadata_model = metadata_model_from_dataset(dataset) assert actual_metadata_model.item_names == expected_metadata_model.item_names # NOQA assert actual_metadata_model.required_item_names == expected_metadata_model.required_item_names # NOQA for name in expected_metadata_model.item_names: expected_schema = expected_metadata_model.get_schema(name) actual_schema = actual_metadata_model.get_schema(name) assert expected_schema == actual_schema expected_value = expected_metadata_model.get_value(name) actual_value = actual_metadata_model.get_value(name) assert expected_value == actual_value assert metadata_model_from_dataset(dataset) == expected_metadata_model # Unsupported type. from dtool_gui_tk.models import UnsupportedTypeError with DataSetCreator("unsupported-type", tmp_dir_fixture) as ds_creator: ds_creator.put_annotation("complex-object", {"x": 1, "y": 2}) ds_creator.put_annotation("an-int", 3) uri = ds_creator.uri dataset = DataSet.from_uri(uri) with pytest.raises(UnsupportedTypeError): metadata_model_from_dataset(dataset) def test_json_schema_from_dataset_readme_and_annotations_diverse_not_conflicting(tmp_dir_fixture): # NOQA from dtoolcore import DataSet, DataSetCreator from dtool_gui_tk.models import metadata_model_from_dataset, MetadataModel # Diverse but not conflicting. readme = "---\nproject: test" with DataSetCreator("readme-and-annotations", tmp_dir_fixture, readme) as ds_creator: # NOQA ds_creator.put_annotation("age", 3) uri = ds_creator.uri dataset = DataSet.from_uri(uri) # Create expected metadata model. expected_schema = { "type": "object", "properties": { "project": {"type": "string"}, "age": {"type": "integer"} }, "required": ["age", "project"] } expected_metadata_model = MetadataModel() expected_metadata_model.load_master_schema(expected_schema) expected_metadata_model.set_value("project", "test") expected_metadata_model.set_value("age", 3) actual_metadata_model = metadata_model_from_dataset(dataset) assert actual_metadata_model == expected_metadata_model def test_json_schema_from_dataset_readme_and_annotations_conflicting(tmp_dir_fixture): # NOQA from dtoolcore import DataSet, DataSetCreator from dtool_gui_tk.models import metadata_model_from_dataset # Identical but missing type. readme = "---\nproject: test\nage: 4" with DataSetCreator("readme-and-annotations", tmp_dir_fixture, readme) as ds_creator: # NOQA ds_creator.put_annotation("age", 3) uri = ds_creator.uri dataset = DataSet.from_uri(uri) from dtool_gui_tk.models import MetadataConflictError with pytest.raises(MetadataConflictError): metadata_model_from_dataset(dataset) def test_json_schema_from_dataset_schema_annotation(tmp_dir_fixture): # NOQA # Create a master metadata schema. metadata_schema = { "type": "object", "properties": { "project": {"type": "string"}, "age": {"type": "integer"} }, "required": ["project"] } # Create a dataset. from dtoolcore import DataSet, DataSetCreator with DataSetCreator("annotation-schema", tmp_dir_fixture) as ds_creator: # NOQA ds_creator.put_annotation("_metadata_schema", metadata_schema) dataset = DataSet.from_uri(ds_creator.uri) # Create the expected model from the schema. from dtool_gui_tk.models import MetadataModel expected_metadata_model = MetadataModel() expected_metadata_model.load_master_schema(metadata_schema) # Test that the function returns the correct model. from dtool_gui_tk.models import metadata_model_from_dataset actual_metadata_model = metadata_model_from_dataset(dataset) assert actual_metadata_model == expected_metadata_model def test_json_schema_from_dataset_schema_annotation_with_conflicting_type_in_readme_and_annotations(tmp_dir_fixture): # NOQA # Create a master metadata schema. metadata_schema = { "type": "object", "properties": { "project": {"type": "string"}, "age": {"type": "integer"} }, "required": ["project"] } # Create a dataset. from dtoolcore import DataSet, DataSetCreator readme = "---\nproject: 1" # Type is integer instead of string. with DataSetCreator("annotation-schema", tmp_dir_fixture, readme) as ds_creator: # NOQA ds_creator.put_annotation("_metadata_schema", metadata_schema) ds_creator.put_annotation("age", "old") # Type is string instead of integer # NOQA dataset = DataSet.from_uri(ds_creator.uri) # Create the expected model from the schema. from dtool_gui_tk.models import MetadataModel expected_metadata_model = MetadataModel() expected_metadata_model.load_master_schema(metadata_schema) expected_metadata_model.set_value("project", 1) # Expecting incorrect type. # NOQA expected_metadata_model.set_value("age", "old") # Expecting incorrect type. # NOQA # Test that the function returns the correct model. from dtool_gui_tk.models import metadata_model_from_dataset actual_metadata_model = metadata_model_from_dataset(dataset) assert actual_metadata_model == expected_metadata_model ```
{ "source": "jic-dtool/dtool-http", "score": 2 }
#### File: dtool-http/dtool_http/storagebroker.py ```python import logging import os import json import shutil import requests from dtoolcore.utils import ( get_config_value, mkdir_parents, generous_parse_uri, DEFAULT_CACHE_PATH, ) logger = logging.getLogger(__name__) HTTP_MANIFEST_KEY = 'http_manifest.json' class HTTPError(RuntimeError): pass class HTTPStorageBroker(object): """ Storage broker to interact with datasets over HTTP in a read only fashion. """ key = "http" def __init__(self, uri, admin_metadata, config_path=None): scheme, netloc, path, _, _, _ = generous_parse_uri(uri) self._uri = self._get_base_uri(uri) self.scheme = scheme self.netloc = netloc self.uuid = path[1:] http_manifest_url = self._uri + '/' + HTTP_MANIFEST_KEY self.http_manifest = self._get_json_from_url( http_manifest_url ) self._cache_abspath = get_config_value( "DTOOL_CACHE_DIRECTORY", config_path=config_path, default=DEFAULT_CACHE_PATH ) # Helper functions def _get_base_uri(self, url): r = requests.get(url) if r.status_code != 301: logger.info("Dataset moved, redirecting to: {}".format( r.url)) return r.url def _get_request(self, url, stream=False): r = requests.get(url, stream=stream) logger.info("Response status code: {}".format(r.status_code)) if r.status_code != 200: raise(HTTPError(r.status_code)) return r def _get_text_from_url(self, url): r = self._get_request(url) return r.text def _get_json_from_url(self, url): text = self._get_text_from_url(url) return json.loads(text) # Functions to allow dataset retrieval def get_admin_metadata(self): """Return administrative metadata as a dictionary.""" return self.http_manifest["admin_metadata"] def get_manifest(self): """Return the manifest as a dictionary.""" url = self.http_manifest["manifest_url"] return self._get_json_from_url(url) def get_readme_content(self): """Return content of the README file as a string.""" url = self.http_manifest["readme_url"] return self._get_text_from_url(url) def has_admin_metadata(self): """Return True if the administrative metadata exists. This is the definition of being a "dataset". """ return "admin_metadata" in self.http_manifest def get_item_abspath(self, identifier): """Return absolute path at which item content can be accessed. :param identifier: item identifier :returns: absolute path from which the item content can be accessed """ dataset_cache_abspath = os.path.join( self._cache_abspath, self.uuid ) mkdir_parents(dataset_cache_abspath) manifest = self.get_manifest() relpath = manifest['items'][identifier]['relpath'] _, ext = os.path.splitext(relpath) local_item_abspath = os.path.join( dataset_cache_abspath, identifier + ext ) if not os.path.isfile(local_item_abspath): url = self.http_manifest["item_urls"][identifier] r = self._get_request(url, stream=True) tmp_local_item_abspath = local_item_abspath + ".tmp" with open(tmp_local_item_abspath, 'wb') as f: shutil.copyfileobj(r.raw, f) os.rename(tmp_local_item_abspath, local_item_abspath) return local_item_abspath def get_overlay(self, overlay_name): """Return overlay as a dictionary. :param overlay_name: name of the overlay :returns: overlay as a dictionary """ url = self.http_manifest["overlays"][overlay_name] return self._get_json_from_url(url) def get_annotation(self, annotation_name): """Return annotation. :param overlay_name: name of the annotation :returns: annotation """ url = self.http_manifest["annotations"][annotation_name] return self._get_json_from_url(url) def list_overlay_names(self): """Return list of overlay names.""" if "overlays" not in self.http_manifest: return [] return self.http_manifest["overlays"].keys() def list_annotation_names(self): """Return list of annotation names.""" if "annotations" not in self.http_manifest: return [] return self.http_manifest["annotations"].keys() def list_tags(self): """Return list of annotation names.""" if "tags" not in self.http_manifest: return [] return self.http_manifest["tags"] def list_dataset_uris(self, base_uri, CONFIG_PATH): """Return list of datasets in base uri.""" return [] def http_enable(self): """Return the URI from which the dataset can be accessed via HTTP.""" return self._uri class HTTPSStorageBroker(HTTPStorageBroker): """ Storage broker to interact with datasets over HTTPS in a read only fashion. """ key = "https" ```
{ "source": "jic-dtool/dtool-info", "score": 2 }
#### File: dtool-info/tests/test_compare_command.py ```python import os from click.testing import CliRunner from . import SAMPLE_DATASETS_DIR he_dataset_uri = "file://" + os.path.join(SAMPLE_DATASETS_DIR, "he") she_dataset_uri = "file://" + os.path.join(SAMPLE_DATASETS_DIR, "she") cat_dataset_uri = "file://" + os.path.join(SAMPLE_DATASETS_DIR, "cat") lion_dataset_uri = "file://" + os.path.join(SAMPLE_DATASETS_DIR, "lion") def test_dataset_diff_functional(): from dtool_info.dataset import diff runner = CliRunner() result = runner.invoke(diff, [he_dataset_uri, he_dataset_uri]) assert result.exit_code == 0 result = runner.invoke(diff, [he_dataset_uri, she_dataset_uri]) assert result.exit_code == 1 assert result.output.startswith("Different identifiers") result = runner.invoke(diff, [cat_dataset_uri, lion_dataset_uri]) assert result.exit_code == 2 assert result.output.find("Different sizes") != -1 result = runner.invoke(diff, [cat_dataset_uri, she_dataset_uri]) assert result.exit_code == 0 result = runner.invoke(diff, ["--full", cat_dataset_uri, she_dataset_uri]) assert result.exit_code == 3 assert result.output.find("Different content") != -1 ``` #### File: dtool-info/tests/test_dtool_info_package.py ```python def test_version_is_string(): import dtool_info assert isinstance(dtool_info.__version__, str) ``` #### File: dtool-info/tests/test_summary_command.py ```python import os import json from click.testing import CliRunner from . import SAMPLE_DATASETS_DIR lion_dataset_uri = "file://" + os.path.join(SAMPLE_DATASETS_DIR, "lion") def test_dataset_summary_functional(): from dtool_info.dataset import summary # Create expected output. expected_lines = [ "name: lion", "uuid: 5cb6d8bb-255b-4ca5-a445-c1f8121c5333", "creator_username: olssont", "number_of_items: 1", "size: 5.0B", "frozen_at: 2017-09-22", ] runner = CliRunner() result = runner.invoke(summary, [lion_dataset_uri]) assert result.exit_code == 0 assert "\n".join(expected_lines) == result.output.strip() def test_dataset_summary_json_functional(): from dtoolcore import DataSet from dtool_info.dataset import summary # Create expected output. lion_ds = DataSet.from_uri(lion_dataset_uri) tot_size = sum([lion_ds.item_properties(i)["size_in_bytes"] for i in lion_ds.identifiers]) expected = { "name": lion_ds.name, "uuid": lion_ds.uuid, "number_of_items": len(lion_ds.identifiers), "size_in_bytes": tot_size, "creator_username": lion_ds._admin_metadata["creator_username"], "frozen_at": lion_ds._admin_metadata["frozen_at"], } runner = CliRunner() result = runner.invoke(summary, ["--format", "json", lion_dataset_uri]) assert result.exit_code == 0 actual = json.loads(result.output) assert expected == actual ``` #### File: dtool-info/tests/test_uri_command.py ```python import os from click.testing import CliRunner from . import SAMPLE_DATASETS_DIR from . import tmp_dir_fixture # NOQA lion_dataset_path = os.path.join(SAMPLE_DATASETS_DIR, "lion") def test_uri_command_functional(): from dtool_info.dataset import uri runner = CliRunner() result = runner.invoke(uri, [lion_dataset_path]) assert result.exit_code == 0 output = result.output.strip() assert output.startswith("file://") assert output.endswith(lion_dataset_path) ```
{ "source": "jic-dtool/dtool-lookup-server-annotation-filter-plugin", "score": 2 }
#### File: dtool-lookup-server-annotation-filter-plugin/dtool_lookup_server_annotation_filter_plugin/utils.py ```python from dtool_lookup_server import ( mongo, MONGO_COLLECTION, ) from dtool_lookup_server.utils import ( get_user_obj, preprocess_query_base_uris, ) VALID_MONGO_QUERY_KEYS = ( "base_uris", ) try: # Python 2. ALLOWED_TYPES = (str, int, float, bool, unicode) except NameError: # Python 3. ALLOWED_TYPES = (str, int, float, bool) def filter_dict_to_mongo_query(filters): """Return mongo query from filters dict.""" base_uri_subquery = None if len(filters["base_uris"]) == 1: base_uri_subquery = str(filters["base_uris"][0]) else: base_uris = [str(b) for b in filters["base_uris"]] base_uri_subquery = {"$in": base_uris} return {"base_uri": base_uri_subquery} def _extract_valid_keys(ds_info): ds_valid_keys = set() # Some old datasets may not have any annotations. if "annotations" not in ds_info: return ds_valid_keys for key, value in ds_info["annotations"].items(): if type(value) not in ALLOWED_TYPES: continue ds_valid_keys.add(key) return ds_valid_keys def _extract_keys_of_interest(filters): keys_of_interest = set() if "annotation_keys" in filters: keys_of_interest.update(filters["annotation_keys"]) if "annotations" in filters: keys_of_interest.update(filters["annotations"].keys()) return keys_of_interest def _exclude_dataset_info_filter(ds_info, filters): "There is probably a more clever way to do this using the mongo query language." # NOQA # Create set of valid keys in dataset. ds_valid_keys = _extract_valid_keys(ds_info) # Dataset without a valid key are ignored. if len(ds_valid_keys) == 0: return True # All keys of interest must be present on the dataset for it to be # recognised. keys_of_interest = _extract_keys_of_interest(filters) ds_valid_keys = _extract_valid_keys(ds_info) if len(keys_of_interest.difference(ds_valid_keys)) > 0: return True # If the "annotations" filter is on check that the key/value pair is # present, if not skip the dataset. skip = False if "annotations" in filters: for ann_key, ann_value in filters["annotations"].items(): if ann_key not in ds_valid_keys: skip = True break if ds_info["annotations"][ann_key] != ann_value: skip = True break return skip def get_annotation_key_info_by_user(username, filters): """Return dictionary with annotation keys and numbers of datasets given that key and any filters passed into the function. :param username: username :param filters: dictionary with filters :returns: dictionary where keys are annotation keys and values are the numbers of datasets with that key given the filter provided """ # Validate the user; raises AuthenticationError if invalid. get_user_obj(username) filters = preprocess_query_base_uris(username, filters) mongo_query = filter_dict_to_mongo_query(filters) # If there are no base URI the user has not got permissions to view any # datasets. if len(filters["base_uris"]) == 0: return {} cx = mongo.db[MONGO_COLLECTION].find( mongo_query, { "annotations": True, } ) # There is probably a more clever way to do this using the # mongo query language. annotation_key_info = {} for ds in cx: if _exclude_dataset_info_filter(ds, filters): continue # Add the key information. for key in _extract_valid_keys(ds): annotation_key_info[key] = annotation_key_info.get(key, 0) + 1 return annotation_key_info def get_annotation_value_info_by_user(username, filters): """Return dictionary with annotation keys and dictionaries of values and numbers of datasets for those values given that any filters passed into the function. :param username: username :param filters: dictionary with filters :returns: dictionary where keys are annotation keys and values are dictionaries with the values and the the numbers of datasets for those values given the filters provided """ filters = preprocess_query_base_uris(username, filters) mongo_query = filter_dict_to_mongo_query(filters) # If there are no base URI the user has not got permissions to view any # datasets. if len(filters["base_uris"]) == 0: return {} cx = mongo.db[MONGO_COLLECTION].find( mongo_query, { "annotations": True, } ) # There is probably a more clever way to do this using the # mongo query language. annotation_value_info = {} for ds in cx: if _exclude_dataset_info_filter(ds, filters): continue for key in _extract_keys_of_interest(filters): value = ds["annotations"][key] value_dict = annotation_value_info.get(key, {}) value_dict[value] = value_dict.get(value, 0) + 1 annotation_value_info[key] = value_dict return annotation_value_info def get_num_datasets_by_user(username, filters): """Return number of datasets given the filters passed into the function. :param username: username :param filters: dictionary with filters :returns: number of datasets """ filters = preprocess_query_base_uris(username, filters) mongo_query = filter_dict_to_mongo_query(filters) # If there are no base URI the user has not got permissions to view any # datasets. if len(filters["base_uris"]) == 0: return 0 cx = mongo.db[MONGO_COLLECTION].find( mongo_query, { "annotations": True, } ) # There is probably a more clever way to do this using the # mongo query language. num_datasets = 0 for ds in cx: if _exclude_dataset_info_filter(ds, filters): continue num_datasets += 1 # All keys_of_interest must be present on the dataset for it to be # recognised. keys_of_interest = _extract_keys_of_interest(filters) ds_valid_keys = _extract_valid_keys(ds) if len(keys_of_interest.difference(ds_valid_keys)) > 0: continue return num_datasets def get_datasets_by_user(username, filters): """Return datasets given the filters passed into the function. :param username: username :param filters: dictionary with filters :returns: list of dictionaries with information about the datasets """ filters = preprocess_query_base_uris(username, filters) mongo_query = filter_dict_to_mongo_query(filters) # If there are no base URI the user has not got permissions to view any # datasets. if len(filters["base_uris"]) == 0: return [] cx = mongo.db[MONGO_COLLECTION].find( mongo_query, { "_id": False, "readme": False, "manifest": False, } ) # There is probably a more clever way to do this using the # mongo query language. datasets = [] for ds in cx: if _exclude_dataset_info_filter(ds, filters): continue datasets.append(ds) return datasets ``` #### File: dtool-lookup-server-annotation-filter-plugin/tests/test_get_annotation_values_by_user.py ```python from . import tmp_app # NOQA def test_get_annotation_values_by_user_no_filter(tmp_app): # NOQA from dtool_lookup_server_annotation_filter_plugin.utils import ( get_annotation_value_info_by_user ) expected_key_info = {} actual_key_info = get_annotation_value_info_by_user(username="grumpy", filters={}) # NOQA assert expected_key_info == actual_key_info def test_get_annotation_values_by_user_annotation_key_filter_single(tmp_app): # NOQA from dtool_lookup_server_annotation_filter_plugin.utils import ( get_annotation_value_info_by_user ) filters = {"annotation_keys": ["color"]} actual_key_info = get_annotation_value_info_by_user("grumpy", filters) # NOQA expected_key_info = {"color": {"blue": 1, "red": 1}} assert expected_key_info == actual_key_info def test_get_annotation_values_by_user_annotation_key_filter_multiple(tmp_app): # NOQA from dtool_lookup_server_annotation_filter_plugin.utils import ( get_annotation_value_info_by_user ) filters = {"annotation_keys": ["color", "pattern"]} actual_key_info = get_annotation_value_info_by_user("grumpy", filters) # NOQA expected_key_info = {"color": {"red": 1}, "pattern": {"wavy": 1}} assert expected_key_info == actual_key_info def test_get_annotation_values_by_user_annotation_filter(tmp_app): # NOQA from dtool_lookup_server_annotation_filter_plugin.utils import ( get_annotation_value_info_by_user ) filters = {"annotation_keys": ["color"], "annotations": {"pattern": "wavy"}} # NOQA actual_key_info = get_annotation_value_info_by_user("grumpy", filters) # NOQA expected_key_info = {"color": {"red": 1}, "pattern": {"wavy": 1}} assert expected_key_info == actual_key_info def test_get_annotation_values_by_user_complex_filter(tmp_app): # NOQA from dtool_lookup_server_annotation_filter_plugin.utils import ( get_annotation_value_info_by_user ) filters = {"annotations": {"color": "red"}} actual_key_info = get_annotation_value_info_by_user("grumpy", filters) # NOQA expected_key_info = {"color": {"red": 1}} assert expected_key_info == actual_key_info def test_get_annotation_values_by_user_base_uri_filter(tmp_app): # NOQA from dtool_lookup_server_annotation_filter_plugin.utils import ( get_annotation_value_info_by_user ) filters = {"base_uris": ["s3://mr-men"], "annotation_keys": ["pattern"]} expected_key_info = {"pattern": {"wavy": 1}} actual_key_info = get_annotation_value_info_by_user("grumpy", filters) # NOQA assert expected_key_info == actual_key_info ``` #### File: dtool-lookup-server-annotation-filter-plugin/tests/test_get_datasets_by_user.py ```python from . import tmp_app # NOQA EXPECTED_DATASET_KEYS = ( "base_uri", "created_at", "creator_username", "frozen_at", "name", "type", "uri", "uuid", "annotations", ) def test_get_datasets_by_user_no_filter(tmp_app): # NOQA from dtool_lookup_server_annotation_filter_plugin.utils import ( get_datasets_by_user ) content = get_datasets_by_user(username="grumpy", filters={}) # NOQA assert(len(content) == 3) for ds_info in content: for expected_key in EXPECTED_DATASET_KEYS: assert expected_key in ds_info def test_get_dataset_by_user_annotation_key_filter_single(tmp_app): # NOQA from dtool_lookup_server_annotation_filter_plugin.utils import ( get_datasets_by_user ) filters = {"annotation_keys": ["color"]} hits = get_datasets_by_user("grumpy", filters) # NOQA assert len(hits) == 2 expected_names = set(["blue-shirt", "red-wavy-shirt"]) actual_names = set([i["name"] for i in hits]) assert expected_names == actual_names def test_get_datasets_by_user_annotation_key_filter_multiple(tmp_app): # NOQA from dtool_lookup_server_annotation_filter_plugin.utils import ( get_datasets_by_user ) filters = {"annotation_keys": ["color", "pattern"]} hits = get_datasets_by_user("grumpy", filters) # NOQA assert len(hits) == 1 assert hits[0]["name"] == "red-wavy-shirt" def test_get_datasets_by_user_annotation_filter(tmp_app): # NOQA from dtool_lookup_server_annotation_filter_plugin.utils import ( get_datasets_by_user ) filters = {"annotation_keys": ["color"], "annotations": {"pattern": "wavy"}} # NOQA hits = get_datasets_by_user("grumpy", filters) # NOQA assert len(hits) == 1 assert hits[0]["name"] == "red-wavy-shirt" def test_get_datasets_by_user_complex_filter(tmp_app): # NOQA from dtool_lookup_server_annotation_filter_plugin.utils import ( get_datasets_by_user ) filters = {"annotations": {"color": "red"}} hits = get_datasets_by_user("grumpy", filters) # NOQA assert len(hits) == 1 assert hits[0]["name"] == "red-wavy-shirt" def test_get_datasets_by_user_base_uri_filter(tmp_app): # NOQA from dtool_lookup_server_annotation_filter_plugin.utils import ( get_datasets_by_user ) filters = {"base_uris": ["s3://mr-men"], "annotation_keys": ["pattern"]} hits = get_datasets_by_user("grumpy", filters) # NOQA assert len(hits) == 1 assert hits[0]["name"] == "red-wavy-shirt" ``` #### File: dtool-lookup-server-annotation-filter-plugin/tests/test_version_route_functional.py ```python import json from . import tmp_app # NOQA from . import GRUMPY_TOKEN, NOONE_TOKEN def test_version_route(tmp_app): # NOQA from dtool_lookup_server_annotation_filter_plugin import __version__ headers = dict(Authorization="Bearer " + GRUMPY_TOKEN) r = tmp_app.get( "/annotation_filter_plugin/version", headers=headers, ) assert r.status_code == 200 response = json.loads(r.data.decode("utf-8")) assert response == __version__ def test_num_datasets_route_with_noone_user(tmp_app): # NOQA headers = dict(Authorization="Bearer " + NOONE_TOKEN) r = tmp_app.get( "/annotation_filter_plugin/version", headers=headers, content_type="application/json" ) assert r.status_code == 401 ```
{ "source": "jic-dtool/dtool_lookup_server", "score": 3 }
#### File: dtool_lookup_server/dtool_lookup_server/config_routes.py ```python from flask import ( abort, Blueprint, jsonify, ) from flask_jwt_extended import ( jwt_required, get_jwt_identity, ) from dtool_lookup_server import AuthenticationError from dtool_lookup_server.utils import config_to_dict bp = Blueprint("config", __name__, url_prefix="/config") @bp.route("/info", methods=["GET"]) @jwt_required() def server_config(): """Return the JSON-serialized server configuration.""" # NOTE: dummy, no authentication implemented here so far. username = get_jwt_identity() try: config = config_to_dict(username) except AuthenticationError: abort(401) return jsonify(config) ``` #### File: dtool_lookup_server/dtool_lookup_server/user_routes.py ```python from flask import ( abort, Blueprint, jsonify, ) from flask_jwt_extended import ( jwt_required, get_jwt_identity, ) from dtool_lookup_server import AuthenticationError import dtool_lookup_server.utils bp = Blueprint("user", __name__, url_prefix="/user") @bp.route("/info/<username>", methods=["GET"]) @jwt_required() def get_user_info(username): """Return a users information. A user can see his/her own profile. An admin user can see other user's profiles. A user who tries to see another user's profile gets a 404. """ token_username = get_jwt_identity() try: user = dtool_lookup_server.utils.get_user_obj(token_username) except AuthenticationError: # Unregistered users should see 404. abort(404) # Return 404 if the user is not admin and the token username # does not match up with the username in the URL. if not user.is_admin: if token_username != username: abort(404) return jsonify(dtool_lookup_server.utils.get_user_info(username)) ``` #### File: dtool_lookup_server/registration_utils/deprecated-mass_registration.py ```python import argparse import json from datetime import date, datetime import dtoolcore import yaml from pymongo import MongoClient # Placeholder until app becomes a package or some other solution # to the fact that "app" directory is not in the python path. import os import sys _HERE = os.path.dirname(os.path.abspath(__file__)) _ROOT = os.path.join(_HERE, "..") sys.path.insert(0, _ROOT) from app.utils import register_dataset # NOQA CONFIG_PATH = None client = MongoClient() db = client["dtool_info"] collection = db["datasets"] def json_serial(obj): if isinstance(obj, (datetime, date)): return obj.isoformat() raise TypeError("Type {} not serializable".format(type(obj))) def register_all_datasets(base_uri): base_uri = dtoolcore.utils.sanitise_uri(base_uri) StorageBroker = dtoolcore._get_storage_broker(base_uri, CONFIG_PATH) for uri in StorageBroker.list_dataset_uris(base_uri, CONFIG_PATH): try: dataset = dtoolcore.DataSet.from_uri(uri) except dtoolcore.DtoolCoreTypeError: pass dataset_info = dataset._admin_metadata dataset_info["uri"] = dataset.uri # Add the readme info. readme_info = yaml.load(dataset.get_readme_content()) dataset_info["readme"] = readme_info # Clean up datetime.data. dataset_info_json_str = json.dumps(dataset_info, default=json_serial) dataset_info = json.loads(dataset_info_json_str) r = register_dataset(collection, dataset_info) print("Registered: {}".format(r)) if __name__ == "__main__": parser = argparse.ArgumentParser(description=__doc__) parser.add_argument("base_uri") args = parser.parse_args() register_all_datasets(args.base_uri) ``` #### File: dtool_lookup_server/tests/test_base_uri_routes.py ```python import json from . import tmp_app_with_users, tmp_app_with_data # NOQA from . import ( snowwhite_token, grumpy_token, noone_token, ) def test_base_uri_regsiter_route(tmp_app_with_users): # NOQA from dtool_lookup_server.utils import base_uri_exists base_uri = "s3://snow-white-again" assert not base_uri_exists(base_uri) data = {"base_uri": base_uri} headers = dict(Authorization="Bearer " + snowwhite_token) r = tmp_app_with_users.post( "/admin/base_uri/register", headers=headers, data=json.dumps(data), content_type="application/json" ) assert r.status_code == 201 assert base_uri_exists(base_uri) # Ensure idempotent. headers = dict(Authorization="Bearer " + snowwhite_token) r = tmp_app_with_users.post( "/admin/base_uri/register", headers=headers, data=json.dumps(data), content_type="application/json" ) assert r.status_code == 201 assert base_uri_exists(base_uri) # Only admins allowed. However, don't give away that URL exists to # non-admins. headers = dict(Authorization="Bearer " + grumpy_token) r = tmp_app_with_users.post( "/admin/base_uri/register", headers=headers, data=json.dumps(data), content_type="application/json" ) assert r.status_code == 404 headers = dict(Authorization="Bearer " + noone_token) r = tmp_app_with_users.post( "/admin/base_uri/register", headers=headers, data=json.dumps(data), content_type="application/json" ) assert r.status_code == 404 def test_base_uri_list_route(tmp_app_with_data): # NOQA headers = dict(Authorization="Bearer " + snowwhite_token) r = tmp_app_with_data.get( "/admin/base_uri/list", headers=headers, ) assert r.status_code == 200 assert len(json.loads(r.data.decode("utf-8"))) == 2 headers = dict(Authorization="Bearer " + grumpy_token) r = tmp_app_with_data.get( "/admin/base_uri/list", headers=headers, ) assert r.status_code == 404 headers = dict(Authorization="Bearer " + noone_token) r = tmp_app_with_data.get( "/admin/base_uri/list", headers=headers, ) assert r.status_code == 404 ``` #### File: dtool_lookup_server/tests/test_summary_of_datasets_by_user.py ```python from . import tmp_app_with_data # NOQA def test_summary_of_datasets_by_user(tmp_app_with_data): # NOQA from dtool_lookup_server.utils import summary_of_datasets_by_user summary = summary_of_datasets_by_user("grumpy") exected_output = { "number_of_datasets": 3, "creator_usernames": ["queen"], "base_uris": ["s3://mr-men", "s3://snow-white"], "datasets_per_creator": {"queen": 3}, "datasets_per_base_uri": {"s3://mr-men": 1, "s3://snow-white": 2}, "tags": ["evil", "fruit", "good"], "datasets_per_tag": {"good": 1, "evil": 2, "fruit": 3} } assert summary == exected_output ``` #### File: dtool_lookup_server/tests/test_user_routes.py ```python import json from . import tmp_app_with_users # NOQA from . import ( snowwhite_token, grumpy_token, noone_token, ) def test_list_user_route(tmp_app_with_users): # NOQA # Snow-white admin retrieve's her own info. headers = dict(Authorization="Bearer " + snowwhite_token) r = tmp_app_with_users.get( "/user/info/snow-white", headers=headers, ) assert r.status_code == 200 expected_content = { u"username": u"snow-white", u"is_admin": True, u"search_permissions_on_base_uris": [], u"register_permissions_on_base_uris": [] } assert json.loads(r.data.decode("utf-8")) == expected_content # Grumpy non-admin retrieve's his own info. headers = dict(Authorization="Bearer " + grumpy_token) r = tmp_app_with_users.get( "/user/info/grumpy", headers=headers, ) assert r.status_code == 200 base_uri = "s3://snow-white" expected_content = { u"username": u"grumpy", u"is_admin": False, u"search_permissions_on_base_uris": [base_uri], u"register_permissions_on_base_uris": [base_uri] } assert json.loads(r.data.decode("utf-8")) == expected_content # Snow-white admin retrieve Grumpy's info. headers = dict(Authorization="Bearer " + snowwhite_token) r = tmp_app_with_users.get( "/user/info/grumpy", headers=headers, ) assert r.status_code == 200 base_uri = "s3://snow-white" expected_content = { u"username": u"grumpy", u"is_admin": False, u"search_permissions_on_base_uris": [base_uri], u"register_permissions_on_base_uris": [base_uri] } assert json.loads(r.data.decode("utf-8")) == expected_content # Noone tries to retrieve Grumpy's info. 404. headers = dict(Authorization="Bearer " + noone_token) r = tmp_app_with_users.get( "/user/info/grumpy", headers=headers, ) assert r.status_code == 404 # Grumpy tries to retrieve Snow White's info. 404. headers = dict(Authorization="Bearer " + grumpy_token) r = tmp_app_with_users.get( "/user/info/snow-white", headers=headers, ) assert r.status_code == 404 ``` #### File: dtool_lookup_server/tests/test_utils_get_annotations_from_uri_by_user.py ```python import pytest from . import tmp_app_with_data # NOQA def test_get_annotations_from_uri_by_user(tmp_app_with_data): # NOQA from dtool_lookup_server import ( AuthenticationError, AuthorizationError, UnknownBaseURIError, UnknownURIError, ) from dtool_lookup_server.utils import ( get_annotations_from_uri_by_user, ) base_uri = "s3://snow-white" uuid = "af6727bf-29c7-43dd-b42f-a5d7ede28337" uri = "{}/{}".format(base_uri, uuid) expected_annotations = {"type": "fruit"} assert get_annotations_from_uri_by_user("grumpy", uri) == expected_annotations # NOQA with pytest.raises(AuthenticationError): get_annotations_from_uri_by_user("dont_exist", uri) with pytest.raises(AuthorizationError): get_annotations_from_uri_by_user("sleepy", uri) with pytest.raises(UnknownBaseURIError): get_annotations_from_uri_by_user("grumpy", "s3://dont_exist/" + uuid) with pytest.raises(UnknownURIError): get_annotations_from_uri_by_user("grumpy", base_uri + "/dont_exist") ``` #### File: dtool_lookup_server/tests/test_utils_get_readme_from_uri_by_user.py ```python import pytest from . import tmp_app_with_data # NOQA def test_get_readme_from_uri_by_user(tmp_app_with_data): # NOQA from dtool_lookup_server import ( AuthenticationError, AuthorizationError, UnknownBaseURIError, UnknownURIError, ) from dtool_lookup_server.utils import ( get_readme_from_uri_by_user, ) base_uri = "s3://snow-white" uuid = "af6727bf-29c7-43dd-b42f-a5d7ede28337" uri = "{}/{}".format(base_uri, uuid) expected_readme = {"descripton": "apples from queen"} assert get_readme_from_uri_by_user("grumpy", uri) == expected_readme with pytest.raises(AuthenticationError): get_readme_from_uri_by_user("dont_exist", uri) with pytest.raises(AuthorizationError): get_readme_from_uri_by_user("sleepy", uri) with pytest.raises(UnknownBaseURIError): get_readme_from_uri_by_user("grumpy", "s3://dont_exist/" + uuid) with pytest.raises(UnknownURIError): get_readme_from_uri_by_user("grumpy", base_uri + "/dont_exist") ``` #### File: dtool_lookup_server/tests/test_utils_register_dataset.py ```python import pytest from . import tmp_app # NOQA def test_register_dataset(tmp_app): # NOQA from dtool_lookup_server import ValidationError from dtool_lookup_server.utils import ( register_users, register_base_uri, update_permissions, register_dataset, get_admin_metadata_from_uri, get_readme_from_uri_by_user, ) register_users([ dict(username="grumpy"), dict(username="sleepy"), ]) base_uri = "s3://snow-white" register_base_uri(base_uri) permissions = { "base_uri": base_uri, "users_with_search_permissions": ["grumpy", "sleepy"], "users_with_register_permissions": ["grumpy"], } update_permissions(permissions) uuid = "af6727bf-29c7-43dd-b42f-a5d7ede28337" uri = "{}/{}".format(base_uri, uuid) dataset_info = { "base_uri": base_uri, "uuid": uuid, "uri": uri, "name": "my-dataset", "type": "dataset", "readme": {"description": "test dataset"}, "manifest": { "dtoolcore_version": "3.7.0", "hash_function": "md5sum_hexdigest", "items": { "e4cc3a7dc281c3d89ed4553293c4b4b110dc9bf3": { "hash": "d89117c9da2cc34586e183017cb14851", "relpath": "U00096.3.rev.1.bt2", "size_in_bytes": 5741810, "utc_timestamp": 1536832115.0 } } }, "creator_username": "olssont", "frozen_at": 1536238185.881941, "created_at": 1536236399.19497, "number_of_items": 9876, "size_in_bytes": 5741810, "annotations": {"software": "bowtie2"}, "tags": ["rnaseq"], } register_dataset(dataset_info) expected_content = { "base_uri": base_uri, "uuid": uuid, "uri": uri, "name": "my-dataset", "creator_username": "olssont", "frozen_at": 1536238185.881941, "created_at": 1536236399.19497, "number_of_items": 9876, "size_in_bytes": 5741810, } assert get_admin_metadata_from_uri(uri) == expected_content assert get_readme_from_uri_by_user("sleepy", uri) == dataset_info["readme"] with pytest.raises(ValidationError): register_dataset({"name": "not-all-required-metadata"}) def test_register_dataset_without_created_at(tmp_app): # NOQA from dtool_lookup_server import ValidationError from dtool_lookup_server.utils import ( register_users, register_base_uri, update_permissions, register_dataset, get_admin_metadata_from_uri, get_readme_from_uri_by_user, ) register_users([ dict(username="grumpy"), dict(username="sleepy"), ]) base_uri = "s3://snow-white" register_base_uri(base_uri) permissions = { "base_uri": base_uri, "users_with_search_permissions": ["grumpy", "sleepy"], "users_with_register_permissions": ["grumpy"], } update_permissions(permissions) uuid = "af6727bf-29c7-43dd-b42f-a5d7ede28337" uri = "{}/{}".format(base_uri, uuid) dataset_info = { "base_uri": base_uri, "uuid": uuid, "uri": uri, "name": "my-dataset", "type": "dataset", "readme": {"description": "test dataset"}, "manifest": { "dtoolcore_version": "3.7.0", "hash_function": "md5sum_hexdigest", "items": { "e4cc3a7dc281c3d89ed4553293c4b4b110dc9bf3": { "hash": "d89117c9da2cc34586e183017cb14851", "relpath": "U00096.3.rev.1.bt2", "size_in_bytes": 5741810, "utc_timestamp": 1536832115.0 } } }, "creator_username": "olssont", "frozen_at": 1536238185.881941, "number_of_items": 1232, "size_in_bytes": 5741810, "annotations": {"software": "bowtie2"}, "tags": ["rnaseq"], } register_dataset(dataset_info) # When missing, created_at will be set to frozen_at. expected_content = { "base_uri": base_uri, "uuid": uuid, "uri": uri, "name": "my-dataset", "creator_username": "olssont", "frozen_at": 1536238185.881941, "created_at": 1536238185.881941, "number_of_items": 1232, "size_in_bytes": 5741810, } assert get_admin_metadata_from_uri(uri) == expected_content assert get_readme_from_uri_by_user("sleepy", uri) == dataset_info["readme"] with pytest.raises(ValidationError): register_dataset({"name": "not-all-required-metadata"}) def test_register_dataset_without_created_at_and_size_in_bytes(tmp_app): # NOQA from dtool_lookup_server import ValidationError from dtool_lookup_server.utils import ( register_users, register_base_uri, update_permissions, register_dataset, get_admin_metadata_from_uri, get_readme_from_uri_by_user, ) register_users([ dict(username="grumpy"), dict(username="sleepy"), ]) base_uri = "s3://snow-white" register_base_uri(base_uri) permissions = { "base_uri": base_uri, "users_with_search_permissions": ["grumpy", "sleepy"], "users_with_register_permissions": ["grumpy"], } update_permissions(permissions) uuid = "af6727bf-29c7-43dd-b42f-a5d7ede28337" uri = "{}/{}".format(base_uri, uuid) dataset_info = { "base_uri": base_uri, "uuid": uuid, "uri": uri, "name": "my-dataset", "type": "dataset", "readme": {"description": "test dataset"}, "manifest": { "dtoolcore_version": "3.7.0", "hash_function": "md5sum_hexdigest", "items": { "e4cc3a7dc281c3d89ed4553293c4b4b110dc9bf3": { "hash": "d89117c9da2cc34586e183017cb14851", "relpath": "U00096.3.rev.1.bt2", "size_in_bytes": 5741810, "utc_timestamp": 1536832115.0 } } }, "creator_username": "olssont", "frozen_at": 1536238185.881941, "annotations": {"software": "bowtie2"}, "tags": ["rnaseq"], } register_dataset(dataset_info) # When missing, created_at will be set to frozen_at. expected_content = { "base_uri": base_uri, "uuid": uuid, "uri": uri, "name": "my-dataset", "creator_username": "olssont", "frozen_at": 1536238185.881941, "created_at": 1536238185.881941, "number_of_items": None, "size_in_bytes": None, } assert get_admin_metadata_from_uri(uri) == expected_content assert get_readme_from_uri_by_user("sleepy", uri) == dataset_info["readme"] with pytest.raises(ValidationError): register_dataset({"name": "not-all-required-metadata"}) def test_register_too_large_metadata_dataset(tmp_app): # NOQA from dtool_lookup_server import ValidationError from dtool_lookup_server.utils import ( register_users, register_base_uri, update_permissions, register_dataset, get_admin_metadata_from_uri, ) register_users([ dict(username="grumpy"), dict(username="sleepy"), ]) base_uri = "s3://snow-white" register_base_uri(base_uri) permissions = { "base_uri": base_uri, "users_with_search_permissions": ["grumpy", "sleepy"], "users_with_register_permissions": ["grumpy"], } update_permissions(permissions) uuid = "af6727bf-29c7-43dd-b42f-a5d7ede28337" uri = "{}/{}".format(base_uri, uuid) dataset_info = { "base_uri": base_uri, "uuid": uuid, "uri": uri, "name": "my-dataset", "type": "dataset", "manifest": { "dtoolcore_version": "3.7.0", "hash_function": "md5sum_hexdigest", "items": { "e4cc3a7dc281c3d89ed4553293c4b4b110dc9bf3": { "hash": "d89117c9da2cc34586e183017cb14851", "relpath": "U00096.3.rev.1.bt2", "size_in_bytes": 5741810, "utc_timestamp": 1536832115.0 } } }, "creator_username": "olssont", "frozen_at": 1536238185.881941, "created_at": 1536236399.19497, "annotations": {"software": "bowtie2"}, "tags": ["rnaseq"], } really_large_readme = {} for i in range(100000): key = "here_is_a_long_key_{}".format(i) value = "here_is_a_long_value_{}".format(i) * 10 really_large_readme[key] = value dataset_info["readme"] = really_large_readme # The dataset_info is too large and raises: # pymongo.errors.DocumentTooLarge: BSON document too large (28978543 bytes) # - the connected server supports BSON document sizes up to 16793598 bytes. # See https://github.com/jic-dtool/dtool-lookup-server/issues/16 # So the code catches this and raises dtool_lookup_server.ValidationError # instead. with pytest.raises(ValidationError): register_dataset(dataset_info) assert get_admin_metadata_from_uri(dataset_info["uri"]) is None ``` #### File: dtool_lookup_server/tests/test_utils_user_management.py ```python from . import tmp_app # NOQA def test_user_management_helper_functions(tmp_app): # NOQA from dtool_lookup_server.utils import ( register_users, get_user_info, list_users, ) # Create list of dictionaries of users. admin_username = "magic.mirror" data_champion_username = "snow-white" standard_user_username = "dopey" users = [ {"username": admin_username, "is_admin": True}, {"username": data_champion_username, "is_admin": False}, {"username": standard_user_username}, ] # Register the users. register_users(users) user_info = get_user_info(admin_username) expected_content = { "username": admin_username, "is_admin": True, "search_permissions_on_base_uris": [], "register_permissions_on_base_uris": [] } assert user_info == expected_content user_info = get_user_info(data_champion_username) expected_content = { "username": data_champion_username, "is_admin": False, "search_permissions_on_base_uris": [], "register_permissions_on_base_uris": [] } assert user_info == expected_content user_info = get_user_info(standard_user_username) expected_content = { "username": standard_user_username, "is_admin": False, "search_permissions_on_base_uris": [], "register_permissions_on_base_uris": [] } assert user_info == expected_content # Test non-existing user. assert get_user_info("no-one") is None # Test registering input with an existing user present. new_username = "sleepy" users = [{"username": data_champion_username}, {"username": new_username}] register_users(users) user_info = get_user_info(new_username) expected_content = { "username": new_username, "is_admin": False, "search_permissions_on_base_uris": [], "register_permissions_on_base_uris": [] } assert user_info == expected_content # Test listing users. expected_content = [ { "username": admin_username, "is_admin": True, "search_permissions_on_base_uris": [], "register_permissions_on_base_uris": [] }, { "username": data_champion_username, "is_admin": False, "search_permissions_on_base_uris": [], "register_permissions_on_base_uris": [] }, { "username": standard_user_username, "is_admin": False, "search_permissions_on_base_uris": [], "register_permissions_on_base_uris": [] }, { "username": new_username, "is_admin": False, "search_permissions_on_base_uris": [], "register_permissions_on_base_uris": [] }, ] assert list_users() == expected_content # Test deleting users. from dtool_lookup_server.utils import delete_users users_to_delete = [ { "username": standard_user_username, "is_admin": False, "search_permissions_on_base_uris": [], "register_permissions_on_base_uris": [] }, { "username": new_username, "is_admin": False, "search_permissions_on_base_uris": [], "register_permissions_on_base_uris": [] }, ] delete_users(users_to_delete) expected_content = [ { "username": admin_username, "is_admin": True, "search_permissions_on_base_uris": [], "register_permissions_on_base_uris": [] }, { "username": data_champion_username, "is_admin": False, "search_permissions_on_base_uris": [], "register_permissions_on_base_uris": [] }, ] assert list_users() == expected_content # Test updating users admin privileges. from dtool_lookup_server.utils import update_users users_to_update = [ {"username": admin_username}, # The is_admin value defaults to False. {"username": data_champion_username, "is_admin": True}, {"username": standard_user_username}, # Not in system so ignored. ] update_users(users_to_update) expected_content = [ { "username": admin_username, "is_admin": False, "search_permissions_on_base_uris": [], "register_permissions_on_base_uris": [] }, { "username": data_champion_username, "is_admin": True, "search_permissions_on_base_uris": [], "register_permissions_on_base_uris": [] }, ] assert list_users() == expected_content ```
{ "source": "jic-dtool/dtool-overlay", "score": 2 }
#### File: dtool-overlay/dtool_overlay/cli.py ```python import dtoolcore import click from dtool_cli.cli import dataset_uri_argument from dtool_overlay.utils import ( TransformOverlays, bool_overlay_from_glob_rule, pair_overlay_from_suffix, value_overlays_from_parsing, ) @click.group() def overlays(): """Overlays provide per item structural metadata.""" @overlays.command() @dataset_uri_argument def show(dataset_uri): """Show the overlays as CSV table.""" ds = dtoolcore.DataSet.from_uri(dataset_uri) overlays = TransformOverlays.from_dataset(ds) click.secho(overlays.to_csv()) @overlays.group() def template(): """Create overlay CSV template. Templates can be saved as overlays using the ``dtool overlays write`` command. """ @template.command() @dataset_uri_argument @click.argument("overlay_name") @click.argument("glob_rule") def glob(dataset_uri, overlay_name, glob_rule): """Create template with boolean values based on matching of a glob rule. For example, one could create an overlay named "is_csv" using the glob_rule "*.csv". dtool overlays template glob <DS_URI> is_csv '*.csv' Note that the glob_rule needs to be quoted on the command line to avoid the shell expanding it. """ ds = dtoolcore.DataSet.from_uri(dataset_uri) overlays = bool_overlay_from_glob_rule(overlay_name, ds, glob_rule) click.secho(overlays.to_csv()) @template.command() @dataset_uri_argument @click.argument("parse_rule") def parse(dataset_uri, parse_rule): """Create template by parsing relpaths. For example, consider the relpath structure "repl_1/salt/result.csv" one could create overlays named "replicate", "treatment" using the command below. dtool overlays template parse <DS_URI> \\ 'repl_{replicate:d}/{treatment}/result.csv' The parse_rule needs to be quoted on the command line to avoid the shell expanding it. Note that the replicate values will be typed as integers, see https://pypi.org/project/parse/ for more details. It is possible to ignore parts of a relpath by using a pair of curly braces without a name in it. The command below is different from that above in that it only creates a "replicate" overlay. dtool overlays template parse <DS_URI> \\ 'repl_{replicate:d}/{}/result.csv' """ ds = dtoolcore.DataSet.from_uri(dataset_uri) overlays = value_overlays_from_parsing(ds, parse_rule) click.secho(overlays.to_csv()) @template.command() @dataset_uri_argument @click.option("-n", "--overlay_name") @click.argument("suffix") def pairs(dataset_uri, overlay_name, suffix): """Create template with pair item identifiers for files with common prefix. For example, consider the relpaths: exp1/read1.fq.gz exp1/read2.fq.gz exp2/read1/fq.gz exp2/read2/fq.gz One could create an overlay named "pair_id" for these using the command dtool overlays template pairs <DS_URI> .fq.gz The suffix above (.fq.gz) results in the common prefixes would be "exp1/read" and "exp2/read". This is then used to find matching pairs. """ if overlay_name is None: overlay_name = "pair_id" ds = dtoolcore.DataSet.from_uri(dataset_uri) overlays = pair_overlay_from_suffix(overlay_name, ds, suffix) click.secho(overlays.to_csv()) @overlays.command() @dataset_uri_argument @click.argument('csv_template', type=click.File('r')) def write(dataset_uri, csv_template): """Add overlays from CSV template to dataset. For example to add an overlay stored in the file "template.csv": dtool overlays write <DS_URI> template.csv To stream content from stdin use "-", e.g. dtool overlays glob <URI> is_csv '*.csv' | dtool overlays write <URI> - """ ds = dtoolcore.DataSet.from_uri(dataset_uri) csv_content = csv_template.read() overlays = TransformOverlays.from_csv(csv_content) overlays.put_in_dataset(ds) ```
{ "source": "jic-dtool/smarttools", "score": 3 }
#### File: smarttools/build_utils/build_docker_images.py ```python import os import subprocess _HERE = os.path.dirname(__file__) DOCKER_DIR = os.path.join(_HERE, "docker") def build_docker_images(): for docker_subdir in os.listdir(DOCKER_DIR): docker_dir = os.path.join(DOCKER_DIR, docker_subdir) if not os.path.isdir(docker_dir): continue cmd = [ "docker", "build", docker_dir, "--tag", "jicscicomp/{}".format(docker_subdir) ] subprocess.call(cmd) if __name__ == "__main__": build_docker_images() ``` #### File: smarttools/build_utils/build_smarttool_image.py ```python import os import subprocess import yaml docker_template = """ FROM {docker_base_image} {docker_snippet} ADD {tool_python_url} /scripts/ """ def build_smarttool_image(tool_dir): tool_file = os.path.join(tool_dir, 'tool.yml') tool_python_url_bits = [ 'https://raw.githubusercontent.com/', 'jic-dtool/smarttools/master/smarttools/smarttools/', os.path.basename(tool_dir), '/smarttool_runner.py' ] tool_python_url = "".join(tool_python_url_bits) with open(tool_file) as fh: tool_description = yaml.load(fh) smarttool_script = os.path.join(tool_dir, 'smarttool_runner.py') tool_description.update( {'tool_python_url': tool_python_url} ) dockerfile_contents = docker_template.format(**tool_description) build_command = [ 'docker', 'build', '-t', 'jicscicomp/{}'.format(tool_description['name']), '-' ] p = subprocess.Popen(build_command, stdin=subprocess.PIPE) p.communicate(dockerfile_contents) ``` #### File: smarttools/trim_seqs_sickle/smarttool_runner.py ```python import os from smarttoolbase import SmartTool, Command, parse_args from dtoolcore.utils import generate_identifier BASE_COMMANDS = [ Command("sickle pe -t sanger -f {forward_read_fpath} -r {reverse_read_fpath} -o sickled_1.fq -p sickled_2.fq -s trash.fq") # NOQA ] OUTPUTS = [ 'sickled_1.fq', 'sickled_2.fq', ] def find_paired_read(dataset, identifier): pair_id = dataset.get_overlay('pair_id') return pair_id[identifier] class TrimSeqsTrimmomatic(SmartTool): def pre_run(self, identifier): self.base_command_props['forward_read_fpath'] = self.input_dataset.item_content_abspath(identifier) # NOQA paired_read_identifier = find_paired_read(self.input_dataset, identifier) # NOQA self.base_command_props['reverse_read_fpath'] = self.input_dataset.item_content_abspath(paired_read_identifier) # NOQA def stage_outputs(self, identifier): read1_handle = None read2_handle = None for filename in self.outputs: useful_name = self.input_dataset.get_overlay( 'useful_name' )[identifier] fpath = os.path.join(self.working_directory, filename) relpath = os.path.join(useful_name, filename) out_id = self.output_proto_dataset.put_item(fpath, relpath) self.output_proto_dataset.add_item_metadata( out_id, 'from', "{}/{}".format(self.input_dataset.uri, identifier) ) # Add is_read1 overlay. if filename.find("_1") != -1: self.output_proto_dataset.add_item_metadata( out_id, "is_read1", True ) read1_handle = out_id else: self.output_proto_dataset.add_item_metadata( out_id, "is_read1", False ) read2_handle = out_id # Add pair_id overlay. self.output_proto_dataset.add_item_metadata( read1_handle, "pair_id", generate_identifier(read2_handle) ) self.output_proto_dataset.add_item_metadata( read2_handle, "pair_id", generate_identifier(read1_handle) ) def main(): args = parse_args() with TrimSeqsTrimmomatic(args.input_uri, args.output_uri) as smart_tool: smart_tool.base_commands = BASE_COMMANDS smart_tool.outputs = OUTPUTS smart_tool(args.identifier) if __name__ == "__main__": main() ```
{ "source": "jicewarwick/AShareData", "score": 3 }
#### File: AShareData/AShareData/config.py ```python import json from typing import Dict, Optional, Union import sqlalchemy as sa from sqlalchemy.engine.url import URL from .database_interface import DBInterface, MySQLInterface __config__: Dict = None __db_interface__: DBInterface = None def prepare_engine(config: Dict) -> sa.engine.Engine: """Create sqlalchemy engine from config dict""" url = URL(drivername=config['driver'], host=config['host'], port=config['port'], database=config['database'], username=config['username'], password=config['password'], query={'charset': 'utf8mb4'}) return sa.create_engine(url) def generate_db_interface_from_config(config_loc: Union[str, Dict], init: bool = False) -> Optional[DBInterface]: if isinstance(config_loc, str): with open(config_loc, 'r', encoding='utf-8') as f: global_config = json.load(f) else: global_config = config_loc if 'mysql' in global_config['db_interface']['driver']: engine = prepare_engine(global_config['db_interface']) return MySQLInterface(engine, init=init) def set_global_config(config_loc: str): global __config__ with open(config_loc, 'r', encoding='utf-8') as f: __config__ = json.load(f) def get_global_config(): global __config__ if __config__ is None: raise ValueError('Global configuration not set. Please use "set_global_config" to initialize.') return __config__ def get_db_interface(): global __db_interface__ if __db_interface__ is None: __db_interface__ = generate_db_interface_from_config(get_global_config()) return __db_interface__ def set_db_interface(db_interface: DBInterface): __db_interface__ = db_interface ``` #### File: AShareData/model/capm.py ```python from .model import FinancialModel class CapitalAssetPricingModel(FinancialModel): def __init__(self): super().__init__('Capital Asset Pricing Model', []) ``` #### File: AShareData/tests/db_interface_test.py ```python import unittest import datetime as dt from AShareData.config import get_db_interface, set_global_config from AShareData.date_utils import date_type2datetime class MyTestCase(unittest.TestCase): def setUp(self) -> None: set_global_config('config.json') self.db_interface = get_db_interface() def test_read_data(self): table_name = '合并资产负债表' factor_name = '期末总股本' start_date = date_type2datetime('20190101') end_date = date_type2datetime('20190101') report_period = date_type2datetime('20181231') print(self.db_interface.read_table(table_name, factor_name).head()) print(self.db_interface.read_table(table_name, factor_name, start_date=start_date, end_date=end_date).head()) print(self.db_interface.read_table(table_name, factor_name, start_date=start_date).head()) print(self.db_interface.read_table(table_name, factor_name, report_period=report_period).head()) def test_calendar(self): self.db_interface.read_table('交易日历') def test_db_timestamp(self): table_name = '合并资产负债表' print(self.db_interface.get_latest_timestamp(table_name)) table_name = '模型因子日收益率' print(self.db_interface.get_latest_timestamp(table_name)) print(self.db_interface.get_latest_timestamp(table_name, default_ts=dt.datetime(2021, 3, 4))) if __name__ == '__main__': unittest.main() ``` #### File: AShareData/tests/model_test.py ```python import datetime as dt import unittest from AShareData import set_global_config from AShareData.model import * class MyTestCase(unittest.TestCase): def setUp(self) -> None: set_global_config('config.json') def test_something(self): self.assertEqual(True, False) @staticmethod def test_FF3factor_return(): model = FamaFrench3FactorModel() smb = SMBandHMLCompositor(model) date = dt.datetime(2021, 3, 9) pre_date = dt.datetime(2021, 3, 8) pre_month_date = dt.datetime(2021, 2, 26) smb.compute_factor_return(balance_date=pre_date, pre_date=pre_date, date=date, rebalance_marker='D', period_marker='D') smb.compute_factor_return(balance_date=pre_month_date, pre_date=pre_date, date=date, rebalance_marker='M', period_marker='D') smb.compute_factor_return(balance_date=pre_month_date, pre_date=pre_month_date, date=date, rebalance_marker='M', period_marker='M') @staticmethod def test_FFC4_factor_return(): model = FamaFrenchCarhart4FactorModel() umd = UMDCompositor(model) date = dt.datetime(2021, 3, 9) pre_date = dt.datetime(2021, 3, 8) pre_month_date = dt.datetime(2021, 2, 26) umd.compute_factor_return(balance_date=pre_date, pre_date=pre_date, date=date, rebalance_marker='D', period_marker='D') umd.compute_factor_return(balance_date=pre_month_date, pre_date=pre_date, date=date, rebalance_marker='M', period_marker='D') umd.compute_factor_return(balance_date=pre_month_date, pre_date=pre_month_date, date=date, rebalance_marker='M', period_marker='M') if __name__ == '__main__': unittest.main() ``` #### File: AShareData/tests/web_data_test.py ```python import datetime as dt import unittest import AShareData as asd class WebDataSourceTest(unittest.TestCase): def setUp(self) -> None: asd.set_global_config('config.json') self.web_crawler = asd.WebDataCrawler() self.calendar = asd.SHSZTradingCalendar() def test_sw_industry(self): self.web_crawler.get_sw_industry() def test_zx_industry(self): self.web_crawler.get_zz_industry(self.calendar.offset(dt.date.today(), -1)) if __name__ == '__main__': unittest.main() ```
{ "source": "jicewarwick/DingTalkMessageBot", "score": 3 }
#### File: jicewarwick/DingTalkMessageBot/DingTalkMessageBot.py ```python import base64 import hashlib import hmac import json import time import urllib.parse import requests class DingTalkMessageBot(object): msg_template = { "msgtype": "text", "text": { "content": "" } } header = { 'Content-Type': 'application/json', 'Charset': 'UTF-8' } def __init__(self, token: str, secret: str = None): self.token = token self.secret = secret def _generate_url(self): if self.secret: timestamp = str(round(time.time() * 1000)) secret_enc = self.secret.encode('utf-8') string_to_sign = '{}\n{}'.format(timestamp, self.secret) string_to_sign_enc = string_to_sign.encode('utf-8') hmac_code = hmac.new(secret_enc, string_to_sign_enc, digestmod=hashlib.sha256).digest() sign = urllib.parse.quote_plus(base64.b64encode(hmac_code)) return f'https://oapi.dingtalk.com/robot/send?access_token={self.token}&timestamp={timestamp}&sign={sign}' else: return f'https://oapi.dingtalk.com/robot/send?access_token={self.token}' def send_message(self, msg: str): info = self.msg_template.copy() info['text']['content'] = msg requests.post(self._generate_url(), json=info, headers=self.header) @classmethod def from_config(cls, json_loc: str, bot_name: str): with open(json_loc, 'r', encoding='utf-8') as f: config = json.load(f) assert 'ding_bot' in config.keys(), 'config file must contain entry "ding_bot"' assert bot_name in config['ding_bot'].keys(), f'config file does not contain {bot_name} in entry "ding_bot"' assert 'token' in config['ding_bot'][bot_name].keys(), f"config does not provide {bot_name}'s token" token = config['ding_bot'][bot_name]['token'] if 'secret' in config['ding_bot'][bot_name].keys(): secret = config['ding_bot'][bot_name]['secret'] else: secret = None return cls(token, secret) ```
{ "source": "jichang1/LinkedInProfileRetriever", "score": 3 }
#### File: jichang1/LinkedInProfileRetriever/main.py ```python from flask import Flask from flask import render_template from flask import request from datetime import datetime from os import path import requests import json app = Flask(__name__) @app.route('/') def Index(): return render_template( 'index.html', title='Home Page', year=datetime.now().year ) @app.route('/oauth/callback') def Auth_upload(): authcode = request.args.get('code', '') token = GetToken(authcode) profile = GetMe(token) return render_template( 'profile.html', token=token, profile=profile ) def GetToken(authcode): client_id = '$client_id$' client_secret = '$client_secret$' redirect_uri = 'https://localhost:5000/oauth/callback' url = "https://www.linkedin.com/oauth/v2/accessToken" row_data = { 'grant_type': 'authorization_code', 'code': authcode, 'redirect_uri': redirect_uri, 'client_id': client_id, 'client_secret': client_secret } response = requests.post(url, data= row_data, headers={'Content-Type': 'application/x-www-form-urlencoded'}) if response.status_code == 200 and 'access_token' in response.json(): return response.json()['access_token'] else: return '' def GetMe(token): url = 'https://api.linkedin.com/v2/me' response = requests.get(url, headers={'Connection': 'keep-alive', 'X-RestLi-Protocol-Version': '2.0.0', 'Authorization': 'Bearer %s' % token }) return json.dumps(response.json(), sort_keys=True, indent=4) rootdir = path.dirname(__file__) key_path = path.join(rootdir, 'ssl', 'server.key') crt_path = path.join(rootdir, 'ssl', 'server.crt') ssl_context = (crt_path, key_path) if __name__ == '__main__': app.run('127.0.0.1', 5000, ssl_context=ssl_context) ```
{ "source": "jichangjichang/Paddle", "score": 3 }
#### File: tests/unittests/test_memory_optimization_transpiler.py ```python from __future__ import print_function import unittest import paddle.fluid.layers as layers import paddle.fluid.optimizer as optimizer from paddle.fluid.framework import Program, program_guard from paddle.fluid.transpiler import memory_optimize class TestControlFlowGraph(unittest.TestCase): def setUp(self): program = Program() with program_guard(program, startup_program=Program()): x = layers.data(name='x', shape=[13], dtype='float32') y_predict = layers.fc(input=x, size=1, act=None) y = layers.data(name='y', shape=[1], dtype='float32') cost = layers.square_error_cost(input=y_predict, label=y) avg_cost = layers.mean(cost) opt = optimizer.SGD(learning_rate=0.001) opt = opt.minimize(avg_cost) self.program = program def test_control_flow_graph(self): print("before optimization") print(str(self.program)) result_program = memory_optimize(self.program) print("after optimization") print(str(result_program)) class TestMemoryTranspiler2(unittest.TestCase): def setUp(self): program = Program() with program_guard(program, startup_program=Program()): x = layers.data(name='x', shape=[13], dtype='float32') fc = layers.fc(input=x, size=10, act=None) reshape = layers.reshape(x=fc, shape=[-1, 2, 5]) fc = layers.reshape(x=reshape, shape=[-1, 5, 2]) y_predict = layers.fc(input=fc, size=1, act=None) y = layers.data(name='y', shape=[1], dtype='float32') cost = layers.square_error_cost(input=y_predict, label=y) avg_cost = layers.mean(cost) opt = optimizer.SGD(learning_rate=0.001) opt.minimize(avg_cost) self.program = program def test_inplace_ops(self): print("before optimization") print(str(self.program)) result_program = memory_optimize(self.program) print("after optimization") print(str(result_program)) if __name__ == "__main__": unittest.main() ``` #### File: tests/unittests/test_roi_pool_op.py ```python from __future__ import print_function import unittest import numpy as np import math import sys import paddle.compat as cpt from op_test import OpTest class TestROIPoolOp(OpTest): def set_data(self): self.init_test_case() self.make_rois() self.calc_roi_pool() self.inputs = {'X': self.x, 'ROIs': (self.rois[:, 1:5], self.rois_lod)} self.attrs = { 'spatial_scale': self.spatial_scale, 'pooled_height': self.pooled_height, 'pooled_width': self.pooled_width } self.outputs = {'Out': self.outs, 'Argmax': self.argmaxes} def init_test_case(self): self.batch_size = 3 self.channels = 3 self.height = 6 self.width = 4 # n, c, h, w self.x_dim = (self.batch_size, self.channels, self.height, self.width) self.spatial_scale = 1.0 / 4.0 self.pooled_height = 2 self.pooled_width = 2 self.x = np.random.random(self.x_dim).astype('float32') def calc_roi_pool(self): out_data = np.zeros((self.rois_num, self.channels, self.pooled_height, self.pooled_width)) argmax_data = np.zeros((self.rois_num, self.channels, self.pooled_height, self.pooled_width)) for i in range(self.rois_num): roi = self.rois[i] roi_batch_id = roi[0] roi_start_w = int(cpt.round(roi[1] * self.spatial_scale)) roi_start_h = int(cpt.round(roi[2] * self.spatial_scale)) roi_end_w = int(cpt.round(roi[3] * self.spatial_scale)) roi_end_h = int(cpt.round(roi[4] * self.spatial_scale)) roi_height = int(max(roi_end_h - roi_start_h + 1, 1)) roi_width = int(max(roi_end_w - roi_start_w + 1, 1)) x_i = self.x[roi_batch_id] bin_size_h = float(roi_height) / float(self.pooled_height) bin_size_w = float(roi_width) / float(self.pooled_width) for c in range(self.channels): for ph in range(self.pooled_height): for pw in range(self.pooled_width): hstart = int(math.floor(ph * bin_size_h)) wstart = int(math.floor(pw * bin_size_w)) hend = int(math.ceil((ph + 1) * bin_size_h)) wend = int(math.ceil((pw + 1) * bin_size_w)) hstart = min(max(hstart + roi_start_h, 0), self.height) hend = min(max(hend + roi_start_h, 0), self.height) wstart = min(max(wstart + roi_start_w, 0), self.width) wend = min(max(wend + roi_start_w, 0), self.width) is_empty = (hend <= hstart) or (wend <= wstart) if is_empty: out_data[i, c, ph, pw] = 0 else: out_data[i, c, ph, pw] = -sys.float_info.max argmax_data[i, c, ph, pw] = -1 for h in range(hstart, hend): for w in range(wstart, wend): if x_i[c, h, w] > out_data[i, c, ph, pw]: out_data[i, c, ph, pw] = x_i[c, h, w] argmax_data[i, c, ph, pw] = h * self.width + w self.outs = out_data.astype('float32') self.argmaxes = argmax_data.astype('int64') def make_rois(self): rois = [] self.rois_lod = [[]] for bno in range(self.batch_size): self.rois_lod[0].append(bno + 1) for i in range(bno + 1): x1 = np.random.random_integers( 0, self.width // self.spatial_scale - self.pooled_width) y1 = np.random.random_integers( 0, self.height // self.spatial_scale - self.pooled_height) x2 = np.random.random_integers(x1 + self.pooled_width, self.width // self.spatial_scale) y2 = np.random.random_integers( y1 + self.pooled_height, self.height // self.spatial_scale) roi = [bno, x1, y1, x2, y2] rois.append(roi) self.rois_num = len(rois) self.rois = np.array(rois).astype("int64") def setUp(self): self.op_type = "roi_pool" self.set_data() def test_check_output(self): self.check_output() def test_check_grad(self): self.check_grad(['X'], 'Out') if __name__ == '__main__': unittest.main() ```
{ "source": "jichangjichang/rocBLAS", "score": 3 }
#### File: benchmarks/perf_script/performanceUtility.py ```python import signal from subprocess import check_output import errorHandler from datetime import datetime def currentUser(): try: return check_output("who", shell = True).split()[0]; except: print 'Unhandled Exception at performanceUtility::currentUser()' raise #Details: Generate sorted numbers in radices of 2,3 and 5 upto a given upper limit number def generate235Radices(maxSize): sizeList = list() i = 0 j = 0 k = 0 SUM = int() sumj = int() sumk = int() sumi = 1 while(True): sumj = 1 j = 0 while(True): sumk = 1 k = 0 while(True): SUM = sumi*sumj*sumk if ( SUM > maxSize ): break sizeList.append(SUM) k += 1 sumk *= 2 if (k == 0): break j += 1 sumj *= 3 if ( j == 0 and k == 0): break i += 1 sumi *= 5 sizeList.sort() return sizeList def timeout(timeout_time, default): def timeout_function(f): def f2(args): def timeout_handler(signum, frame): raise errorHandler.TimeoutException() old_handler = signal.signal(signal.SIGALRM, timeout_handler) signal.alarm(timeout_time) # triger alarm in timeout_time seconds retval = "" try: retval = f(args) except errorHandler.TimeoutException: raise errorHandler.ApplicationException(__file__, errorHandler.TIME_OUT) except: signal.alarm(0) raise finally: #print 'executing finally' signal.signal(signal.SIGALRM, old_handler) signal.alarm(0) return retval return f2 return timeout_function def logTxtOutput(fileName, mode, txt): todayFile = fileName+'-'+datetime.now().strftime('%Y-%b-%d')+'.txt' with open(todayFile, mode) as f: f.write('------\n'+txt+'\n') def log(filename, txt): with open(filename, 'a') as f: f.write(datetime.now().ctime()+'# '+txt+'\n') ```
{ "source": "Jichao-Wang/Calibration_ZhangZhengyou_Method", "score": 2 }
#### File: Jichao-Wang/Calibration_ZhangZhengyou_Method/calib_IR.py ```python import os import numpy as np import cv2 import glob def calib(inter_corner_shape, size_per_grid, img_dir,img_type): # criteria: only for subpix calibration, which is not used here. # criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 30, 0.001) w,h = inter_corner_shape # cp_int: corner point in int form, save the coordinate of corner points in world sapce in 'int' form # like (0,0,0), (1,0,0), (2,0,0) ....,(10,7,0). cp_int = np.zeros((w*h,3), np.float32) cp_int[:,:2] = np.mgrid[0:w,0:h].T.reshape(-1,2) # cp_world: corner point in world space, save the coordinate of corner points in world space. cp_world = cp_int*size_per_grid obj_points = [] # the points in world space img_points = [] # the points in image space (relevant to obj_points) images = glob.glob(img_dir + os.sep + '**.' + img_type) for fname in images: img = cv2.imread(fname) gray_img = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY) # find the corners, cp_img: corner points in pixel space. ret, cp_img = cv2.findChessboardCorners(gray_img, (w,h), None) # if ret is True, save. if ret == True: # cv2.cornerSubPix(gray_img,cp_img,(11,11),(-1,-1),criteria) obj_points.append(cp_world) img_points.append(cp_img) # view the corners cv2.drawChessboardCorners(img, (w,h), cp_img, ret) cv2.imshow('FoundCorners',img) cv2.waitKey(1) cv2.destroyAllWindows() # calibrate the camera ret, mat_inter, coff_dis, v_rot, v_trans = cv2.calibrateCamera(obj_points, img_points, gray_img.shape[::-1], None, None) print (("ret:"),ret) print (("internal matrix:\n"),mat_inter) # in the form of (k_1,k_2,p_1,p_2,k_3) print (("distortion cofficients:\n"),coff_dis) print (("rotation vectors:\n"),v_rot) print (("translation vectors:\n"),v_trans) # calculate the error of reproject total_error = 0 for i in range(len(obj_points)): img_points_repro, _ = cv2.projectPoints(obj_points[i], v_rot[i], v_trans[i], mat_inter, coff_dis) error = cv2.norm(img_points[i], img_points_repro, cv2.NORM_L2)/len(img_points_repro) total_error += error print(("Average Error of Reproject: "), total_error/len(obj_points)) return mat_inter, coff_dis def dedistortion(inter_corner_shape, img_dir,img_type, save_dir, mat_inter, coff_dis): w,h = inter_corner_shape images = glob.glob(img_dir + os.sep + '**.' + img_type) for fname in images: img_name = fname.split(os.sep)[-1] img = cv2.imread(fname) newcameramtx, roi = cv2.getOptimalNewCameraMatrix(mat_inter,coff_dis,(w,h),0,(w,h)) # 自由比例参数 dst = cv2.undistort(img, mat_inter, coff_dis, None, newcameramtx) # clip the image # x,y,w,h = roi # dst = dst[y:y+h, x:x+w] cv2.imwrite(save_dir + os.sep + img_name, dst) print('Dedistorted images have been saved to: %s successfully.' %save_dir) if __name__ == '__main__': inter_corner_shape = (11,8) size_per_grid = 0.02 img_dir = ".\\pic\\IR_camera_calib_img" img_type = "png" # calibrate the camera mat_inter, coff_dis = calib(inter_corner_shape, size_per_grid, img_dir,img_type) # dedistort and save the dedistortion result. save_dir = ".\\pic\\save_dedistortion" if(not os.path.exists(save_dir)): os.makedirs(save_dir) dedistortion(inter_corner_shape, img_dir, img_type, save_dir, mat_inter, coff_dis) ``` #### File: Jichao-Wang/Calibration_ZhangZhengyou_Method/calib_RGB.py ```python import os import numpy as np import cv2 import glob def calib(inter_corner_shape, size_per_grid, img_dir, img_type): # criteria: only for subpix calibration, which is not used here. # criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 30, 0.001) w, h = inter_corner_shape # cp_int: corner point in int form, save the coordinate of corner points in world sapce in 'int' form # like (0,0,0), (1,0,0), (2,0,0) ....,(10,7,0). cp_int = np.zeros((w * h, 3), np.float32) cp_int[:, :2] = np.mgrid[0:w, 0:h].T.reshape(-1, 2) # cp_world: corner point in world space, save the coordinate of corner points in world space. cp_world = cp_int * size_per_grid obj_points = [] # the points in world space img_points = [] # the points in image space (relevant to obj_points) images = glob.glob(img_dir + os.sep + '**.' + img_type) for fname in images: img = cv2.imread(fname) gray_img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) # find the corners, cp_img: corner points in pixel space. ret, cp_img = cv2.findChessboardCorners(gray_img, (w, h), None) # if ret is True, save. if ret == True: # cv2.cornerSubPix(gray_img,cp_img,(11,11),(-1,-1),criteria) obj_points.append(cp_world) img_points.append(cp_img) # view the corners cv2.drawChessboardCorners(img, (w, h), cp_img, ret) cv2.imshow('FoundCorners', img) cv2.waitKey(1) cv2.destroyAllWindows() # calibrate the camera ret, mat_inter, coff_dis, v_rot, v_trans = cv2.calibrateCamera(obj_points, img_points, gray_img.shape[::-1], None, None) print(("ret:"), ret) print(("internal matrix:\n"), mat_inter) # in the form of (k_1,k_2,p_1,p_2,k_3) print(("distortion cofficients:\n"), coff_dis) print(("rotation vectors:\n"), len(v_rot), v_rot) print(("translation vectors:\n"), v_trans) # calculate the error of reproject total_error = 0 for i in range(len(obj_points)): img_points_repro, _ = cv2.projectPoints(obj_points[i], v_rot[i], v_trans[i], mat_inter, coff_dis) error = cv2.norm(img_points[i], img_points_repro, cv2.NORM_L2) / len(img_points_repro) total_error += error print(("Average Error of Reproject: "), total_error / len(obj_points)) return mat_inter, coff_dis if __name__ == '__main__': inter_corner_shape = (11, 8) size_per_grid = 0.02 img_dir = ".\\pic\\M300_zoom_camera_2_img" img_type = "jpg" calib(inter_corner_shape, size_per_grid, img_dir, img_type) ``` #### File: Jichao-Wang/Calibration_ZhangZhengyou_Method/cov.py ```python import numpy as np def forget_miu(data, forgot_factor=0.99): ans = 0 # input a 1*n sample vector for i in range(len(data) - 1, -1, -1): ans = data[i] + forgot_factor * ans return ans def forget_fenmu(data, forgot_factor=0.99): ans = 0 for i in range(len(data)): ans = forgot_factor * ans + forgot_factor return ans def forgot_cov(wjc_data, forgot_factor=0.1): # 每行代表一个随机变量,包含若干个sample # 为每个元素加入遗忘因子 forgot_data = np.zeros(wjc_data.shape) for i in range(forgot_data.shape[0]): for j in range(forgot_data.shape[1]): forgot_data[i][j] = wjc_data[i][j] * forgot_factor ** (forgot_data.shape[1] - j - 1) # print(forgot_data) # 每行减去自己的平均值 for i in range(len(wjc_data)): mid_raw_mean = np.mean(wjc_data[i]) for j in range(len(wjc_data[i])): wjc_data[i][j] = wjc_data[i][j] - mid_raw_mean # 计算cov cov_denomitor = forget_fenmu(forgot_data[0]) X = np.array(forgot_data) print('cov', X.dot(X.T) / cov_denomitor) return X.dot(X.T) / cov_denomitor wjc_data = np.array([[0.04, 0.02, -0.1, -0.05], [0.03, 0.04, -0.09, 0.02], [0.03, 0.04, -0.09, 0.02]]) forgot_cov(wjc_data) ```
{ "source": "Jichao-Wang/MDOAU2-net", "score": 2 }
#### File: Jichao-Wang/MDOAU2-net/crf.py ```python import numpy as np import pydensecrf.densecrf as dcrf import cv2 try: from cv2 import imread, imwrite except ImportError: # 如果没有安装OpenCV,就是用skimage from skimage.io import imread, imsave imwrite = imsave from pydensecrf.utils import unary_from_labels, create_pairwise_bilateral, create_pairwise_gaussian """ original_image_path 原始图像路径 predicted_image_path 之前用自己的模型预测的图像路径 CRF_image_path 即将进行CRF后处理得到的结果图像保存路径 """ def CRFs(original_image_path, predicted_image_path, CRF_image_path): print("original_image_path: ", original_image_path) img = imread(original_image_path) img = cv2.resize(img, (128, 128), interpolation=cv2.INTER_CUBIC) # 将predicted_image的RGB颜色转换为uint32颜色 0xbbggrr anno_rgb = imread(predicted_image_path).astype(np.uint32) anno_lbl = anno_rgb[:, :, 0] + (anno_rgb[:, :, 1] << 8) + (anno_rgb[:, :, 2] << 16) # 将uint32颜色转换为1,2,... colors, labels = np.unique(anno_lbl, return_inverse=True) # 如果你的predicted_image里的黑色(0值)不是待分类类别,表示不确定区域,即将分为其他类别 # 那么就取消注释以下代码 # HAS_UNK = 0 in colors # if HAS_UNK: # colors = colors[1:] # 创建从predicted_image到32位整数颜色的映射。 colorize = np.empty((len(colors), 3), np.uint8) colorize[:, 0] = (colors & 0x0000FF) colorize[:, 1] = (colors & 0x00FF00) >> 8 colorize[:, 2] = (colors & 0xFF0000) >> 16 # 计算predicted_image中的类数。 n_labels = len(set(labels.flat)) # n_labels = len(set(labels.flat)) - int(HAS_UNK) ##如果有不确定区域,用这一行代码替换上一行 ########################### ### 设置CRF模型 ### ########################### use_2d = False use_2d = True ########################################################### ##不是很清楚什么情况用2D ##作者说“对于图像,使用此库的最简单方法是使用DenseCRF2D类” ##作者还说“DenseCRF类可用于通用(非二维)密集CRF” ##但是根据我的测试结果一般情况用DenseCRF比较对 #########################################################33 if use_2d: # 使用densecrf2d类 d = dcrf.DenseCRF2D(img.shape[1], img.shape[0], n_labels) # 得到一元势(负对数概率) U = unary_from_labels(labels, n_labels, gt_prob=0.2, zero_unsure=None) # U = unary_from_labels(labels, n_labels, gt_prob=0.2, zero_unsure=HAS_UNK)## 如果有不确定区域,用这一行代码替换上一行 d.setUnaryEnergy(U) # 增加了与颜色无关的术语,只是位置-----会惩罚空间上孤立的小块分割,即强制执行空间上更一致的分割 d.addPairwiseGaussian(sxy=(3, 3), compat=3, kernel=dcrf.DIAG_KERNEL, normalization=dcrf.NORMALIZE_SYMMETRIC) # 增加了颜色相关术语,即特征是(x,y,r,g,b)-----使用局部颜色特征来细化它们 d.addPairwiseBilateral(sxy=(80, 80), srgb=(13, 13, 13), rgbim=img, compat=10, kernel=dcrf.DIAG_KERNEL, normalization=dcrf.NORMALIZE_SYMMETRIC) ''' addPairwiseGaussian函数里的sxy为公式中的 $\theta_{\gamma}$, addPairwiseBilateral函数里的sxy、srgb为$\theta_{\alpha}$ 和 $\theta_{\beta}$ ''' else: # 使用densecrf类 d = dcrf.DenseCRF(img.shape[1] * img.shape[0], n_labels) # 得到一元势(负对数概率) U = unary_from_labels(labels, n_labels, gt_prob=0.5, zero_unsure=None) # U = unary_from_labels(labels, n_labels, gt_prob=0.7, zero_unsure=HAS_UNK)## 如果有不确定区域,用这一行代码替换上一行 d.setUnaryEnergy(U) # 这将创建与颜色无关的功能,然后将它们添加到CRF中 feats = create_pairwise_gaussian(sdims=(3, 3), shape=img.shape[:2]) d.addPairwiseEnergy(feats, compat=8, kernel=dcrf.DIAG_KERNEL, normalization=dcrf.NORMALIZE_SYMMETRIC) # 这将创建与颜色相关的功能,然后将它们添加到CRF中 feats = create_pairwise_bilateral(sdims=(80, 80), schan=(13, 13, 13), img=img, chdim=2) d.addPairwiseEnergy(feats, compat=10, kernel=dcrf.DIAG_KERNEL, normalization=dcrf.NORMALIZE_SYMMETRIC) #################################### ### 做推理和计算 ### #################################### # 进行5次推理 Q = d.inference(10) # 找出每个像素最可能的类 MAP = np.argmax(Q, axis=0) # 将predicted_image转换回相应的颜色并保存图像 MAP = colorize[MAP, :] imwrite(CRF_image_path, MAP.reshape(img.shape)) print("CRF图像保存在", CRF_image_path, "!") CRFs('sample_1_image.png', 'sample_1_image_label_pre.png', 'CRF_sample1_pre.png') ``` #### File: Jichao-Wang/MDOAU2-net/preprocess.py ```python import cv2 import os import stable_seed stable_seed.setup_seed() # transform black-red label images to black-white label mode def transform_label_format(): png_path = "./d2/train/label/4/" png_names = os.listdir(png_path) print(len(png_names), "pictures") for i in range(len(png_names)): image = cv2.imread(png_path + png_names[i]) for j in range(image.shape[0]): for k in range(image.shape[1]): if image[j, k, 2] >= 127: image[j, k] = [0, 0, 0] else: image[j, k] = [255, 255, 255] cv2.imwrite(png_path + png_names[i], image) # transform_label_format() ```
{ "source": "Jichao-Wang/MDOAU-net", "score": 3 }
#### File: Jichao-Wang/MDOAU-net/MDOAU_net.py ```python import torch from torch import nn class conv_block(nn.Module): def __init__(self, ch_in, ch_out): super(conv_block, self).__init__() self.conv = nn.Sequential( nn.Conv2d(ch_in, ch_out, kernel_size=11, stride=1, padding=10, dilation=2, bias=True), nn.BatchNorm2d(ch_out), nn.ReLU(inplace=True), ) def forward(self, x): x = self.conv(x) return x class multi_scaled_dilation_conv_block(nn.Module): # 多尺度预处理kernel def __init__(self, ch_in, ch_out, kernel_size, dilation=1): super(multi_scaled_dilation_conv_block, self).__init__() self.conv = nn.Sequential( nn.ReflectionPad2d(int((kernel_size - 1) / 2 * dilation)), nn.Conv2d(ch_in, ch_out, kernel_size=kernel_size, stride=1, dilation=dilation, bias=True), nn.BatchNorm2d(ch_out), nn.ReLU(inplace=True), ) def forward(self, x): x = self.conv(x) return x class bias_convolution(nn.Module): # 多方向的空洞卷积,提供每个像素不同方向的情况 def __init__(self, ch_in, ch_out, kernel_size, dilation=1, direction=''): # default is normal convolution super(bias_convolution, self).__init__() self.direction = direction self.padding_size = int((kernel_size - 1) * dilation) # self.direction_padding = nn.ReflectionPad2d(self.padding_size) self.direction_padding_LU = nn.ReflectionPad2d((self.padding_size, 0, self.padding_size, 0)) self.direction_padding_RU = nn.ReflectionPad2d((0, self.padding_size, self.padding_size, 0)) self.direction_padding_LD = nn.ReflectionPad2d((self.padding_size, 0, 0, self.padding_size)) self.direction_padding_RD = nn.ReflectionPad2d((0, self.padding_size, 0, self.padding_size)) self.conv = nn.Sequential( nn.Conv2d(ch_in, ch_out, kernel_size=kernel_size, stride=1, dilation=dilation, bias=True), nn.BatchNorm2d(ch_out), nn.ReLU(inplace=True), ) def forward(self, x): # print(self.padding_size) # x = self.direction_padding(x) x_LU = self.direction_padding_LU(x) x_RU = self.direction_padding_RU(x) x_LD = self.direction_padding_LD(x) x_RD = self.direction_padding_RD(x) if self.direction == 'LU': # padding to left up return self.conv(x_LU) elif self.direction == 'LD': # padding to left down return self.conv(x_LD) elif self.direction == 'RU': # padding to right up return self.conv(x_RU) elif self.direction == 'RD': # padding to right down return self.conv(x_RD) else: # normal padding return self.conv(x) class offset_convolution(nn.Module): def __init__(self, ch_in, ch_out): super(offset_convolution, self).__init__() self.LU_bias_convolution = bias_convolution(ch_in=ch_in, ch_out=ch_out, kernel_size=7, dilation=1, direction='LU') self.LD_bias_convolution = bias_convolution(ch_in=ch_in, ch_out=ch_out, kernel_size=7, dilation=1, direction='LD') self.RU_bias_convolution = bias_convolution(ch_in=ch_in, ch_out=ch_out, kernel_size=7, dilation=1, direction='RU') self.RD_bias_convolution = bias_convolution(ch_in=ch_in, ch_out=ch_out, kernel_size=7, dilation=1, direction='RD') self.final_conv = nn.Conv2d(ch_out * 4, ch_out, kernel_size=3, stride=1, padding=1) self.BN = nn.BatchNorm2d(ch_out) self.activation = nn.ReLU(inplace=True) def forward(self, x): LU_BC = self.LU_bias_convolution(x) LD_BC = self.LD_bias_convolution(x) RU_BC = self.RU_bias_convolution(x) RD_BC = self.RD_bias_convolution(x) d = torch.cat((LU_BC, LD_BC, RU_BC, RD_BC), dim=1) d = self.final_conv(d) d = self.BN(d) d = self.activation(d) return d class up_conv(nn.Module): def __init__(self, ch_in, ch_out): super(up_conv, self).__init__() self.up = nn.Sequential( nn.Upsample(scale_factor=2), nn.Conv2d(ch_in, ch_out, kernel_size=3, stride=1, padding=1, bias=True), nn.BatchNorm2d(ch_out), nn.ReLU(inplace=True) ) def forward(self, x): x = self.up(x) return x class Attention_block(nn.Module): def __init__(self, F_g, F_l, F_int): super(Attention_block, self).__init__() self.W_g = nn.Sequential( nn.Conv2d(F_g, F_int, kernel_size=1, stride=1, padding=0, bias=True), nn.BatchNorm2d(F_int) ) self.W_x = nn.Sequential( nn.Conv2d(F_l, F_int, kernel_size=1, stride=1, padding=0, bias=True), nn.BatchNorm2d(F_int) ) self.psi = nn.Sequential( nn.Conv2d(F_int, 1, kernel_size=1, stride=1, padding=0, bias=True), nn.BatchNorm2d(1), nn.Sigmoid() ) self.relu = nn.ReLU(inplace=True) def forward(self, g, x): # 下采样的gating signal 卷积, here gate is query. g1 = self.W_g(g) # 上采样的 l 卷积 x1 = self.W_x(x) # concat + relu psi = self.relu(g1 + x1) # channel 减为1,并Sigmoid,得到权重矩阵 psi = self.psi(psi) # 返回加权的 x return x * psi class MDOAU_net(nn.Module): # Fused multi-scaled convolution block # 后接my_model1作为主题 (减少channel并扩大感受的AttU) # 中间使用offest convolution def __init__(self, img_ch=1, output_ch=1): super(MDOAU_net, self).__init__() # offset_convolution() self.Maxpool = nn.MaxPool2d(kernel_size=2, stride=2) self.multi_scale_1 = multi_scaled_dilation_conv_block(img_ch, 16, kernel_size=3, dilation=1) self.multi_scale_2 = multi_scaled_dilation_conv_block(img_ch, 16, kernel_size=5, dilation=1) self.multi_scale_3 = multi_scaled_dilation_conv_block(img_ch, 16, kernel_size=7, dilation=2) self.multi_scale_4 = multi_scaled_dilation_conv_block(img_ch, 16, kernel_size=11, dilation=2) self.multi_scale_5 = multi_scaled_dilation_conv_block(img_ch, 16, kernel_size=15, dilation=3) self.Conv1 = conv_block(ch_in=16 * 5, ch_out=8) self.Conv2 = conv_block(ch_in=8, ch_out=16) self.Conv3 = conv_block(ch_in=16, ch_out=32) self.Conv4 = conv_block(ch_in=32, ch_out=64) self.Conv5 = conv_block(ch_in=64, ch_out=128) self.o1 = offset_convolution(ch_in=8, ch_out=8) self.o2 = offset_convolution(ch_in=16, ch_out=16) self.o3 = offset_convolution(ch_in=32, ch_out=32) self.o4 = offset_convolution(ch_in=64, ch_out=64) self.Up5 = up_conv(ch_in=128, ch_out=64) self.Att5 = Attention_block(F_g=64, F_l=64, F_int=32) self.Up_conv5 = conv_block(ch_in=128, ch_out=64) self.Up4 = up_conv(ch_in=64, ch_out=32) self.Att4 = Attention_block(F_g=32, F_l=32, F_int=16) self.Up_conv4 = conv_block(ch_in=64, ch_out=32) self.Up3 = up_conv(ch_in=32, ch_out=16) self.Att3 = Attention_block(F_g=16, F_l=16, F_int=8) self.Up_conv3 = conv_block(ch_in=32, ch_out=16) self.Up2 = up_conv(ch_in=16, ch_out=8) self.Att2 = Attention_block(F_g=8, F_l=8, F_int=4) self.Up_conv2 = conv_block(ch_in=16, ch_out=8) self.Conv_1x1 = nn.Conv2d(8, output_ch, kernel_size=1, stride=1, padding=0) self.sigmoid = nn.Sigmoid() self.Conv_1x1_1 = nn.Conv2d(8, output_ch, kernel_size=1, stride=1, padding=0) def forward(self, x, train_flag=False): # multi_scale_generator x_pre_1 = self.multi_scale_1(x) x_pre_2 = self.multi_scale_2(x) x_pre_3 = self.multi_scale_3(x) x_pre_4 = self.multi_scale_4(x) x_pre_5 = self.multi_scale_5(x) muti_scale_x = torch.cat((x_pre_1, x_pre_2, x_pre_3, x_pre_4, x_pre_5), dim=1) # encoding path x1 = self.Conv1(muti_scale_x) x2 = self.Maxpool(x1) x2 = self.Conv2(x2) x3 = self.Maxpool(x2) x3 = self.Conv3(x3) x4 = self.Maxpool(x3) x4 = self.Conv4(x4) x5 = self.Maxpool(x4) x5 = self.Conv5(x5) # offset convolution o1 = self.o1(x1) o2 = self.o2(x2) o3 = self.o3(x3) o4 = self.o4(x4) # decoding + concat path d5 = self.Up5(x5) x4 = self.Att5(g=d5, x=o4) d5 = torch.cat((x4, d5), dim=1) d5 = self.Up_conv5(d5) d4 = self.Up4(d5) x3 = self.Att4(g=d4, x=o3) d4 = torch.cat((x3, d4), dim=1) d4 = self.Up_conv4(d4) d3 = self.Up3(d4) x2 = self.Att3(g=d3, x=o2) d3 = torch.cat((x2, d3), dim=1) d3 = self.Up_conv3(d3) d2 = self.Up2(d3) x1 = self.Att2(g=d2, x=o1) d2 = torch.cat((x1, d2), dim=1) d2 = self.Up_conv2(d2) d1 = self.Conv_1x1(d2) if train_flag: return d1 else: return self.sigmoid(d1) ``` #### File: Jichao-Wang/MDOAU-net/wjc_core.py ```python import torch import numpy as np import os import matplotlib.pyplot as plt import PIL.Image as Image from torch.utils.data import DataLoader from torch import nn, optim from torchvision.transforms import transforms from dataset import Train_Dataset, Validation_Dataset, Test_Dataset import skimage.io as io import shutil threshold = 0.5 # 二分类阈值 # 是否使用cuda device = torch.device("cuda" if torch.cuda.is_available() else "cpu") x_transforms = transforms.Compose([ transforms.ToTensor(), transforms.Normalize([0.5], [0.5]) ]) # mask只需要转换为tensor y_transforms = transforms.ToTensor() def makedir(new_path): folder = os.path.exists(new_path) if not folder: os.makedirs(new_path) else: shutil.rmtree(new_path) os.makedirs(new_path) def init_work_space(args): makedir('./' + args.model_name + '/results') makedir(args.ckpt) makedir('./' + args.model_name + '/runs') def train_model(args, writer, model, criterion, optimizer, dataload, regular=''): save_epoch, best_val_acc = 0, -0.1 for epoch in range(args.epoch): print('Epoch {}/{}'.format(epoch, args.epoch - 1)) print('-' * 10) dt_size = len(dataload.dataset) epoch_loss = 0 epoch_correct_pixels, epoch_total_pixels = [], [] step = 0 for x, y in dataload: step += 1 inputs = x.to(device) labels = y.to(device) # zero the parameter gradients optimizer.zero_grad() # forward outputs = model(inputs).to(device) del inputs loss = criterion(outputs, labels) loss.backward() optimizer.step() # calculate accuracy predicted = outputs.detach().numpy() predicted[predicted >= threshold] = 1 predicted[predicted < threshold] = 0 correct = (predicted == labels.detach().numpy()).sum() del predicted pixel_num = 1.0 for i in range(len(labels.size())): pixel_num *= labels.size()[i] epoch_correct_pixels.append(correct) epoch_total_pixels.append(pixel_num) epoch_loss += float(loss.item()) del labels del loss val_accuracy = validation(args, model, method='train') epoch_loss = epoch_loss / step epoch_train_accuracy = np.mean(epoch_correct_pixels) / np.mean(epoch_total_pixels) print("epoch %d loss:%0.3f train accuracy:%0.3f val accuracy:%0.3f" % ( epoch, epoch_loss, epoch_train_accuracy, val_accuracy)) writer.add_scalar('loss', epoch_loss / step, global_step=epoch) writer.add_scalar('train accuracy', epoch_train_accuracy, global_step=epoch) writer.add_scalar('validated accuracy', val_accuracy, global_step=epoch) writer.add_scalars('accuracy/group', {'train_accuracy': epoch_train_accuracy, 'validated accuracy': val_accuracy}, global_step=epoch) if best_val_acc < val_accuracy: save_epoch = epoch torch.save(model, args.ckpt + '/' + args.model_name + '.pth') best_val_acc = val_accuracy print("Model:", args.model_name) print("Dataset:", args.data_file) print("Best epoch is" + str(save_epoch)) print("Best val acc is " + str(best_val_acc)) return model # 训练模型 def train(args, writer, model, regular=''): model.to(device) criterion = nn.BCELoss() optimizer = optim.Adam(model.parameters(), ) liver_dataset = Train_Dataset(args.data_file, transform=x_transforms, target_transform=y_transforms) dataloaders = DataLoader(liver_dataset, batch_size=args.batch_size, shuffle=True, num_workers=1) train_model(args, writer, model, criterion, optimizer, dataloaders, regular) # 用于测试模型在有image有label的数据中的表现 def validation(args, model, print_each=False, method='train'): liver_dataset = Validation_Dataset(args.data_file, transform=x_transforms, target_transform=y_transforms) # dataloaders = DataLoader(liver_dataset, batch_size=1) if method == 'train': dataloaders = DataLoader(liver_dataset, batch_size=8) model.eval() epoch_correct_pixels, epoch_total_pixels = [], [] with torch.no_grad(): for x, y, x_path in dataloaders: inputs = x.to(device) labels = y.to(device) predicted = model(inputs).detach().numpy() predicted[predicted >= threshold] = 1 predicted[predicted < threshold] = 0 correct = (predicted == labels.detach().numpy()).sum() del predicted pixel_num = 1.0 for i in range(len(labels.size())): pixel_num *= labels.size()[i] epoch_correct_pixels.append(correct) epoch_total_pixels.append(pixel_num) if print_each: print(x_path, 'acc', correct / pixel_num) return np.mean(epoch_correct_pixels) / np.mean(epoch_total_pixels) # 用于测试只有image但没有label的数据 def test(args, save_gray=False, manual=False, weight_path=''): model = None if not manual: model = torch.load(args.ckpt + '/' + args.model_name + '.pth', map_location='cpu') if manual: model = torch.load(weight_path, map_location='cpu') # use certain model weight. liver_dataset = Test_Dataset(args.data_file, transform=x_transforms, target_transform=y_transforms) dataloaders = DataLoader(liver_dataset, batch_size=1) model.eval() with torch.no_grad(): for x, pic_name_i in dataloaders: pic_name_i = pic_name_i[0] io.imsave(args.model_name + "/results/" + pic_name_i.split('.')[0] + "_x.png", torch.squeeze(x).numpy()) predict = model(x) predict = torch.squeeze(predict).detach().numpy() if save_gray: io.imsave(args.model_name + "/results/" + pic_name_i.split('.')[0] + "_gray_pre.png", predict) predict[predict >= threshold] = 1 predict[predict < threshold] = 0 io.imsave(args.model_name + "/results/" + pic_name_i.split('.')[0] + "_label_pre.png", predict) class SaveOutput: def __init__(self): self.outputs = [] def __call__(self, module, module_in, module_out): self.outputs.append(module_out) def clear(self): self.outputs = [] def model_forward_visualization(image_path, weight_path, model_name=''): """输入一张测试图像和训练好的模型权重,可视化每一步卷积的结果""" model = torch.load(weight_path, map_location='cpu') # load trained model save_output = SaveOutput() # register hooks for each layer hook_handles, k1, k2 = [], 0, 0 for layer in model.modules(): k1 += 1 if isinstance(layer, torch.nn.modules.conv.Conv2d): k2 += 1 handle = layer.register_forward_hook(save_output) hook_handles.append(handle) x = x_transforms(Image.open(image_path).convert('L').resize(size=(512, 512))).unsqueeze(0) print(x, x.dtype) y = model(x) def module_output_to_numpy(tensor): return tensor.detach().to('cpu').numpy() for layer_idx in range(len(save_output.outputs)): images = module_output_to_numpy(save_output.outputs[layer_idx]) # 这里的0代表读取output里第一个卷积层的输出 print(type(images)) print(images.shape) mid_1 = images.shape[1] mid_idx = 0 while mid_idx < mid_1: # mid_idx is the index of feature with plt.style.context("seaborn-white"): plt.figure(frameon=False) for idx in range(64): # idx is the index of subplot if mid_idx == mid_1: break plt.subplot(8, 8, idx + 1) plt.imshow(images[0, mid_idx]) mid_idx += 1 plt.setp(plt.gcf().get_axes(), xticks=[], yticks=[]) plt.savefig( './model_visualization/' + model_name + '/layer_' + str(layer_idx) + '_mid_' + str(mid_idx) + '.png') plt.cla() plt.close('all') def model_print(model): print(sum(p.numel() for p in model.parameters())) ```
{ "source": "Jichao-Wang/point_cloud_match_by_Apriltags", "score": 2 }
#### File: Jichao-Wang/point_cloud_match_by_Apriltags/match.py ```python from image_geometry import PinholeCameraModel import cv2 from cv_bridge import CvBridge, CvBridgeError from sensor_msgs.msg import Image, PointField, CameraInfo import numpy as np import tf import sensor_msgs.point_cloud2 import message_filters # to synchronize topic subscription import rospy from sensor_msgs.msg import PointCloud2, Image from sensor_msgs.point_cloud2 import read_points from tf.transformations import euler_from_quaternion, quaternion_from_euler from nav_msgs.msg import Odometry from std_msgs.msg import Float64MultiArray import pandas as pd import os cameraModel = PinholeCameraModel() pub_image = {} pub_pc, pub_pc_tag = {}, {} pub_to_apriltag_image = {} sub_to_apriltag_meg = {} detected_tags_list = [] # all coordinates of detected tags so far # pub_pose = rospy.Publisher('lidar_pose', Pose, queue_size=10) isRotMatSet = False # rotationMatrix_lidar_camera = np.array([[0.00561514, -0.999907, -0.0124428,-0.0171173], # [0.0304767, 0.0126084, -0.999456, -0.0587173], # [0.99952, 0.00523287, 0.0305447, -0.0324206], # [0, 0, 0, 1]]) #rotationMatrix_lidar_camera = np.array([[-0.0443173, -0.998888, -0.0160588, 0.0677557], # [0.0297446, 0.0147482, -0.999449, -0.019818], # [0.998575, -0.0447705, 0.0290579, 0.24684], # [0, 0, 0, 1]]) rotationMatrix_lidar_camera = np.array([[0.0084294, -0.999568, -0.0281508, 0.00157387], [-0.0254016, 0.0279287, -0.999287, 0.0626685], [0.999642, 0.00913847, -0.0251552, -0.0269682], [0, 0, 0, 1]]) cv_image = [] bridge = {} saving_csv_index_i = 0 # used for csv file saving name while ('pc' + str(saving_csv_index_i) + '_tags_location.csv') in os.listdir(): saving_csv_index_i += 1 print('csv saving file is ready.') def april_tag_ouput_decoder(april_tag_list_msg): ans = [] april_tag_list = list(april_tag_list_msg.data) # print('april_tag_list', april_tag_list, type(april_tag_list), len(april_tag_list)) for i in range(0, len(april_tag_list), 4): ans.append(april_tag_list[i:i + 4]) # ans is [[id,x,y,z]*n] return ans def create_pc_fields(): fields = [] fields.append(PointField('x', 0, PointField.FLOAT32, 1)) fields.append(PointField('y', 4, PointField.FLOAT32, 1)) fields.append(PointField('z', 8, PointField.FLOAT32, 1)) fields.append(PointField('intensity', 12, PointField.FLOAT32, 1)) fields.append(PointField('r', 16, PointField.FLOAT32, 1)) fields.append(PointField('g', 20, PointField.FLOAT32, 1)) fields.append(PointField('b', 24, PointField.FLOAT32, 1)) return fields def RGBD_callback(image_data, pointCloud_data): global cv_image global bridge global cameraModel global isRotMatSet global rotationMatrix global pub_image global pub_pc global pub_pc_tag global transformMatrix global detected_tags_list tag_centers_points, new_points = [], [] # timestr_image = "%.6f" % image_data.header.stamp.to_sec() # print(timestr_image) # timestr_point = "%.6f" % pointCloud_data.header.stamp.to_sec() # print(timestr_point) # print("new frame received.") try: cv_image = bridge.imgmsg_to_cv2(image_data, "bgr8") width, height = cv_image.shape[:2] # print "cv_image w h = "+str(width) +", "+ str(height) except CvBridgeError as e: print(e) if (isRotMatSet): # get location of each April_tag each time pub_to_apriltag_image.publish(image_data) april_tag_msg = rospy.wait_for_message("/pub_tag_msg", Float64MultiArray) april_tag_infos_one_frame = april_tag_ouput_decoder(april_tag_msg) # info of tags in one frame # print(april_tag_infos_one_frame) # translate the coordinate for i in range(len(april_tag_infos_one_frame)): mid_coordinate = [april_tag_infos_one_frame[i][1], april_tag_infos_one_frame[i][2], april_tag_infos_one_frame[i][3], 1.0] # transformedPoint = rotationMatrix_lidar_camera.dot(transformMatrix.dot(mid_coordinate)) transformedPoint = np.linalg.inv(transformMatrix).dot( np.linalg.inv(rotationMatrix_lidar_camera).dot(mid_coordinate)) april_tag_infos_one_frame[i] = [april_tag_infos_one_frame[i][0], transformedPoint[0], transformedPoint[1], transformedPoint[2]] tag_centers_points.append([transformedPoint[0], transformedPoint[1], transformedPoint[2], int(april_tag_infos_one_frame[i][0]), 255, 0, 0]) # add new tag coordinate to the whole tag coordinate array. for i in range(len(april_tag_infos_one_frame)): detected_tags_list.append(april_tag_infos_one_frame[i]) cv_temp = [] cv_temp = cv_image.copy() width, height = cv_temp.shape[:2] for point in (read_points(pointCloud_data, skip_nans=True)): pointXYZ = [point[0], point[1], point[2], 1.0] intensity = point[3] intensityInt = int(intensity * intensity * intensity) transformedPoint = rotationMatrix_lidar_camera.dot(transformMatrix.dot(pointXYZ)) if transformedPoint[2] < 0: continue projected_2d_point = cameraModel.project3dToPixel(transformedPoint) # projection if projected_2d_point[0] >= 10 and projected_2d_point[0] <= height - 10 and projected_2d_point[1] >= 10 and \ projected_2d_point[1] <= width - 10: cv2.circle(cv_temp, (int(projected_2d_point[0]), int(projected_2d_point[1])), 5, (intensityInt % 255, (intensityInt / 255) % 255, (intensityInt / 255 / 255)), thickness=-1) [b, g, r] = cv_image[int(projected_2d_point[1]), int(projected_2d_point[0])] new_points.append([point[0], point[1], point[2], intensity, r, g, b]) try: pub_image.publish(bridge.cv2_to_imgmsg(cv_temp, "bgr8")) new_pointCloud = sensor_msgs.point_cloud2.create_cloud(pointCloud_data.header, create_pc_fields(), new_points) pc_tag_centers = sensor_msgs.point_cloud2.create_cloud(pointCloud_data.header, create_pc_fields(), tag_centers_points) pub_pc.publish(new_pointCloud) pub_pc_tag.publish(pc_tag_centers) except CvBridgeError as e: print(e) else: print('Waiting for pose info from sub_pose') # save tag location of the point cloud into csv for matching (and debugging). detected_tags = pd.DataFrame(np.array(detected_tags_list).reshape((-1, 4)), columns=['tag_id', 'tag_x', 'tag_y', 'tag_z']) detected_tags.to_csv('pc' + str(saving_csv_index_i) + '_tags_location.csv', index=False) def poseCallback(data): global isRotMatSet global rotationMatrix global transformMatrix pose = data # print("lidarToRGB, pose received") quaternion = ( pose.pose.pose.orientation.x, pose.pose.pose.orientation.y, pose.pose.pose.orientation.z, pose.pose.pose.orientation.w) euler = tf.transformations.euler_from_quaternion(quaternion) roll = euler[0] pitch = euler[1] yaw = euler[2] translation = [pose.pose.pose.position.x, pose.pose.pose.position.y, pose.pose.pose.position.z, -1.0] rotationMatrix = tf.transformations.euler_matrix(roll, pitch, yaw) transformMatrix = rotationMatrix.transpose() transformMatrix[:, 3] = -rotationMatrix.transpose().dot(translation) # print(transformMatrix) isRotMatSet = True def cameraCallback(data): global cameraModel cameraModel.fromCameraInfo(data) def lidarToRGB(): global pub_image global pub_pc global pub_pc_tag global bridge global pub_to_apriltag_image global sub_to_apriltag_meg rospy.init_node('lidar_to_rgb', anonymous=True) bridge = CvBridge() sub_pose = rospy.resolve_name('/Odometry') # subscribe to camera sub_pose = rospy.Subscriber(sub_pose, Odometry, callback=poseCallback, queue_size=1) camera = rospy.Subscriber(rospy.resolve_name('/camera/color/camera_info'), CameraInfo, callback=cameraCallback, queue_size=1) pub_to_apriltag_image = rospy.Publisher("/input_apriltag_image", Image, queue_size=10) pub_image = rospy.Publisher("image_color_with_proj", Image, queue_size=1) pub_pc = rospy.Publisher("pointcloud_color", PointCloud2, queue_size=1) pub_pc_tag = rospy.Publisher("pointcloud_tags_center", PointCloud2, queue_size=1) sub_image = message_filters.Subscriber('/camera/color/image_raw', Image) sub_pointcloud = message_filters.Subscriber('/cloud_registered', PointCloud2) ts = message_filters.ApproximateTimeSynchronizer([sub_image, sub_pointcloud], 1, 0.05) ts.registerCallback(RGBD_callback) rospy.spin() if __name__ == '__main__': try: lidarToRGB() except rospy.ROSInterruptException: pass ```
{ "source": "Jichao-Wang/Point_cloud_projection_segmentation-", "score": 2 }
#### File: Point_cloud_projection_segmentation-/demo/color3.py ```python import os import sys print('Current working path', os.getcwd()) # please enter "/home/jichao/python_ws/Swin-Transformer-Semantic-Segmentation-main/demo" to run the python file print('当前 Python 解释器路径:', sys.executable) parent_path = os.path.dirname(sys.path[0]) print('Import libraries from', parent_path) if parent_path not in sys.path: sys.path.append(parent_path) import cv2 from argparse import ArgumentParser from mmseg.apis import inference_segmentor, init_segmentor, show_result_pyplot from mmseg.core.evaluation import get_palette from image_geometry import PinholeCameraModel from cv_bridge import CvBridge, CvBridgeError from sensor_msgs.msg import Image, PointField, CameraInfo import numpy as np import tf import sensor_msgs.point_cloud2 import message_filters # to synchronize topic subscription import rospy from sensor_msgs.msg import PointCloud2, Image from sensor_msgs.point_cloud2 import read_points from tf.transformations import euler_from_quaternion, quaternion_from_euler from nav_msgs.msg import Odometry cameraModel = PinholeCameraModel() pub_image = {} pub_pc = {} # pub_pose = rospy.Publisher('lidar_pose', Pose, queue_size=10) isRotMatSet = False # rotationMatrix_lidar_camera = np.array([[0.00561514, -0.999907, -0.0124428,-0.0171173], # [0.0304767, 0.0126084, -0.999456, -0.0587173], # [0.99952, 0.00523287, 0.0305447, -0.0324206], # [0, 0, 0, 1]]) rotationMatrix_lidar_camera = np.array([[-0.0443173, -0.998888, -0.0160588, 0.0677557], [0.0297446, 0.0147482, -0.999449, -0.019818], [0.998575, -0.0447705, 0.0290579, 0.24684], [0, 0, 0, 1]]) cv_image = [] bridge = {} image_count = 0 def create_pc_fields(): fields = [] fields.append(PointField('x', 0, PointField.FLOAT32, 1)) fields.append(PointField('y', 4, PointField.FLOAT32, 1)) fields.append(PointField('z', 8, PointField.FLOAT32, 1)) fields.append(PointField('intensity', 12, PointField.FLOAT32, 1)) fields.append(PointField('r', 16, PointField.FLOAT32, 1)) fields.append(PointField('g', 20, PointField.FLOAT32, 1)) fields.append(PointField('b', 24, PointField.FLOAT32, 1)) return fields def RGBD_callback(image_data, pointCloud_data): global cv_image global bridge global cameraModel global isRotMatSet global rotationMatrix global pub_image global pub_pc global transformMatrix global image_count # timestr_image = "%.6f" % image_data.header.stamp.to_sec() # print(timestr_image) # timestr_point = "%.6f" % pointCloud_data.header.stamp.to_sec() # print(timestr_point) # print("new frame received.") try: cv_image = bridge.imgmsg_to_cv2(image_data, "bgr8") width, height = cv_image.shape[:2] # print "cv_image w h = "+str(width) +", "+ str(height) cv2.imwrite('./image/' + str(image_count) + '.png', cv_image) cv2.imwrite('demo2.png', cv_image) image_count += 1 except CvBridgeError as e: print(e) if (isRotMatSet): result = inference_segmentor(model, args.img) segment_image = show_result_pyplot(model, args.img, result, get_palette(args.palette), display=False) cv2.imwrite('./demo2_segmented.png', segment_image) cv_temp = [] # cv_temp = cv_image.copy() cv_temp = segment_image.copy() width, height = cv_temp.shape[:2] new_points = [] for point in (read_points(pointCloud_data, skip_nans=True)): pointXYZ = [point[0], point[1], point[2], 1.0] intensity = point[3] intensityInt = int(intensity * intensity * intensity) transformedPoint = rotationMatrix_lidar_camera.dot(transformMatrix.dot(pointXYZ)) if transformedPoint[2] < 0: continue projected_2d_point = cameraModel.project3dToPixel(transformedPoint) # projection if projected_2d_point[0] >= 10 and projected_2d_point[0] <= height - 10 and projected_2d_point[1] >= 10 and \ projected_2d_point[1] <= width - 10: cv2.circle(cv_temp, (int(projected_2d_point[0]), int(projected_2d_point[1])), 5, (intensityInt % 255, (intensityInt / 255) % 255, (intensityInt / 255 / 255)), thickness=-1) [b, g, r] = segment_image[int(projected_2d_point[1]), int(projected_2d_point[0])] intensity = result[0][int(projected_2d_point[1])][int(projected_2d_point[0])] # used as label of segmentation new_points.append([point[0], point[1], point[2], intensity, r, g, b]) try: pub_image.publish(bridge.cv2_to_imgmsg(cv_temp, "bgr8")) new_pointCloud = sensor_msgs.point_cloud2.create_cloud(pointCloud_data.header, create_pc_fields(), new_points) pub_pc.publish(new_pointCloud) except CvBridgeError as e: print(e) else: print('Waiting for pose info from sub_pose') def poseCallback(data): global isRotMatSet global rotationMatrix global transformMatrix pose = data # print("lidarToRGB, pose received") quaternion = ( pose.pose.pose.orientation.x, pose.pose.pose.orientation.y, pose.pose.pose.orientation.z, pose.pose.pose.orientation.w) euler = tf.transformations.euler_from_quaternion(quaternion) roll = euler[0] pitch = euler[1] yaw = euler[2] translation = [pose.pose.pose.position.x, pose.pose.pose.position.y, pose.pose.pose.position.z, -1.0] rotationMatrix = tf.transformations.euler_matrix(roll, pitch, yaw) transformMatrix = rotationMatrix.transpose() transformMatrix[:, 3] = -rotationMatrix.transpose().dot(translation) # print(transformMatrix) isRotMatSet = True def cameraCallback(data): global cameraModel cameraModel.fromCameraInfo(data) def lidarToRGB(): global pub_image global pub_pc global bridge rospy.init_node('lidar_to_rgb', anonymous=True) bridge = CvBridge() sub_pose = rospy.resolve_name('/Odometry') # subscribe to camera sub_pose = rospy.Subscriber(sub_pose, Odometry, callback=poseCallback, queue_size=1) camera = rospy.Subscriber(rospy.resolve_name('/camera/color/camera_info'), CameraInfo, callback=cameraCallback, queue_size=1) pub_image = rospy.Publisher("image_color_with_proj", Image, queue_size=1) pub_pc = rospy.Publisher("pointcloud_color", PointCloud2, queue_size=1) sub_image = message_filters.Subscriber('/camera/color/image_raw', Image) sub_pointcloud = message_filters.Subscriber('/cloud_registered', PointCloud2) ts = message_filters.ApproximateTimeSynchronizer([sub_image, sub_pointcloud], 1, 0.05) ts.registerCallback(RGBD_callback) rospy.spin() image_file = './demo2.png' config_file = '../configs/swin/upernet_swin_tiny_patch4_window7_512x512_160k_ade20k.py' checkpoint_file = '../upernet_swin_tiny_patch4_window7_512x512.pth' parser = ArgumentParser() parser.add_argument('--img', default=image_file, help='Image file') parser.add_argument('--config', default=config_file, help='Config file') parser.add_argument('--checkpoint', default=checkpoint_file, help='Checkpoint file') parser.add_argument('--device', default='cuda:0', help='Device used for inference') parser.add_argument( '--palette', default='cityscapes', help='Color palette used for segmentation map') args = parser.parse_args() # build the model from a config file and a checkpoint file model = init_segmentor(args.config, args.checkpoint, device=args.device) if __name__ == '__main__': try: lidarToRGB() except rospy.ROSInterruptException: pass ```
{ "source": "jichengyuan/mmclassificationCust", "score": 3 }
#### File: jichengyuan/mmclassificationCust/convert_data.py ```python import os import glob import re # 生成train.txt和val.txt #需要改为您自己的路径 root_dir = "G:\Dataset\imagenette2_tiny" #在该路径下有train,val,meta三个文件夹 train_dir = os.path.join(root_dir, "train") val_dir = os.path.join(root_dir, "val") meta_dir = os.path.join(root_dir, "meta") def generate_txt(images_dir,map_dict): # 读取所有文件名 imgs_dirs = glob.glob(images_dir+"\\*\\*") # 打开写入文件 typename = images_dir.split("\\")[-1] target_txt_path = os.path.join(meta_dir,typename+".txt") print(target_txt_path) f = open(target_txt_path,"w") # 遍历所有图片名 for img_dir in imgs_dirs: # 获取第一级目录名称 filename = img_dir.split("\\")[-2] f_path = img_dir.split("\\")[-1] relate_name = filename+"\\"+f_path num = map_dict[filename] # 写入文件 f.write(relate_name+" "+num+"\n") def get_map_dict(): # 读取所有类别映射关系 class_map_dict = {} with open(os.path.join(meta_dir,"classmap.txt"),"r") as F: lines = F.readlines() for line in lines: line = line.split("\n")[0] filename,cls,num = line.split(" ") class_map_dict[filename] = num return class_map_dict if __name__ == '__main__': class_map_dict = get_map_dict() generate_txt(images_dir=train_dir,map_dict=class_map_dict) generate_txt(images_dir=val_dir,map_dict=class_map_dict) ```
{ "source": "jichenjc/python-zvm-sdk", "score": 2 }
#### File: python-zvm-sdk/smutLayer/powerVM.py ```python import time import generalUtils import msgs from vmUtils import execCmdThruIUCV, invokeSMCLI from vmUtils import isLoggedOn from vmUtils import waitForOSState, waitForVMState modId = 'PVM' vmOSUpStates = ['on', 'up'] vmOSUpDownStates = ['down', 'off', 'on', 'up'] version = "1.0.0" """ List of subfunction handlers. Each subfunction contains a list that has: Readable name of the routine that handles the subfunction, Code for the function call. """ subfuncHandler = { 'HELP': ['help', lambda rh: help(rh)], 'ISREACHABLE': ['checkIsReachable', lambda rh: checkIsReachable(rh)], 'OFF': ['deactivate', lambda rh: deactivate(rh)], 'ON': ['activate', lambda rh: activate(rh)], 'PAUSE': ['pause', lambda rh: pause(rh)], 'REBOOT': ['reboot', lambda rh: reboot(rh)], 'RESET': ['reset', lambda rh: reset(rh)], 'SOFTOFF': ['softDeactivate', lambda rh: softDeactivate(rh)], 'STATUS': ['getStatus', lambda rh: getStatus(rh)], 'UNPAUSE': ['unpause', lambda rh: unpause(rh)], 'VERSION': ['getVersion', lambda rh: getVersion(rh)], 'WAIT': ['wait', lambda rh: wait(rh)], } """ List of positional operands based on subfunction. Each subfunction contains a list which has a dictionary with the following information for the positional operands: - Human readable name of the operand, - Property in the parms dictionary to hold the value, - Is it required (True) or optional (False), - Type of data (1: int, 2: string). """ posOpsList = {} """ List of additional operands/options supported by the various subfunctions. The dictionary followng the subfunction name uses the keyword from the command as a key. Each keyword has a dictionary that lists: - the related parms item that stores the value, - how many values follow the keyword, and - the type of data for those values (1: int, 2: string) For example, the 'WAIT' subfunction has a 'poll' operand that takes one additional operand (time in seconds) which is an int. While the 'showparms' operand is just the keyword and has no additional value portion. """ keyOpsList = { 'HELP': {}, 'ISREACHABLE': { '--showparms': ['showParms', 0, 0]}, 'OFF': { '--maxwait': ['maxWait', 1, 1], '--poll': ['poll', 1, 1], '--showparms': ['showParms', 0, 0], '--wait': ['wait', 0, 0]}, 'ON': { '--state': ['desiredState', 1, 2], '--maxwait': ['maxWait', 1, 1], '--poll': ['poll', 1, 1], '--showparms': ['showParms', 0, 0], '--wait': ['wait', 0, 0]}, 'PAUSE': {'--showparms': ['showParms', 0, 0]}, 'REBOOT': { '--maxwait': ['maxWait', 1, 1], '--poll': ['poll', 1, 1], '--showparms': ['showParms', 0, 0], '--wait': ['wait', 0, 0]}, 'RESET': { '--state': ['desiredState', 1, 2], '--maxwait': ['maxWait', 1, 1], '--poll': ['poll', 1, 1], '--showparms': ['showParms', 0, 0], '--wait': ['wait', 0, 0]}, 'SOFTOFF': { '--maxwait': ['maxWait', 1, 1], '--poll': ['poll', 1, 1], '--showparms': ['showParms', 0, 0], '--wait': ['wait', 0, 0]}, 'STATUS': { '--showparms': ['showParms', 0, 0] }, 'UNPAUSE': { '--showparms': ['showParms', 0, 0]}, 'VERSION': {}, 'WAIT': { '--maxwait': ['maxWait', 1, 1], '--poll': ['poll', 1, 1], '--showparms': ['showParms', 0, 0], '--state': ['desiredState', 1, 2]}, } def activate(rh): """ Activate a virtual machine. Input: Request Handle with the following properties: function - 'POWERVM' subfunction - 'ON' userid - userid of the virtual machine parms['desiredState'] - Desired state. Optional, unless 'maxQueries' is specified. parms['maxQueries'] - Maximum number of queries to issue. Optional. parms['maxWait'] - Maximum time to wait in seconds. Optional, unless 'maxQueries' is specified. parms['poll'] - Polling interval in seconds. Optional, unless 'maxQueries' is specified. Output: Request Handle updated with the results. Return code - 0: ok, non-zero: error """ rh.printSysLog("Enter powerVM.activate, userid: " + rh.userid) parms = ["-T", rh.userid] smcliResults = invokeSMCLI(rh, "Image_Activate", parms) if smcliResults['overallRC'] == 0: pass elif (smcliResults['overallRC'] == 8 and smcliResults['rc'] == 200 and smcliResults['rs'] == 8): pass # All good. No need to change the ReqHandle results. else: # SMAPI API failed. rh.printLn("ES", smcliResults['response']) rh.updateResults(smcliResults) # Use results from invokeSMCLI if rh.results['overallRC'] == 0 and 'maxQueries' in rh.parms: # Wait for the system to be in the desired state of: # OS is 'up' and reachable or VM is 'on'. if rh.parms['desiredState'] == 'up': results = waitForOSState( rh, rh.userid, rh.parms['desiredState'], maxQueries=rh.parms['maxQueries'], sleepSecs=rh.parms['poll']) else: results = waitForVMState( rh, rh.userid, rh.parms['desiredState'], maxQueries=rh.parms['maxQueries'], sleepSecs=rh.parms['poll']) if results['overallRC'] == 0: rh.printLn("N", "%s: %s" % (rh.userid, rh.parms['desiredState'])) else: rh.updateResults(results) rh.printSysLog("Exit powerVM.activate, rc: " + str(rh.results['overallRC'])) return rh.results['overallRC'] def checkIsReachable(rh): """ Check if a virtual machine is reachable. Input: Request Handle with the following properties: function - 'POWERVM' subfunction - 'ISREACHABLE' userid - userid of the virtual machine Output: Request Handle updated with the results. overallRC - 0: determined the status, non-zero: some weird failure while trying to execute a command on the guest via IUCV rc - RC returned from execCmdThruIUCV rs - 0: not reachable, 1: reachable """ rh.printSysLog("Enter powerVM.checkIsReachable, userid: " + rh.userid) strCmd = "echo 'ping'" results = execCmdThruIUCV(rh, rh.userid, strCmd) if results['overallRC'] == 0: rh.printLn("N", rh.userid + ": reachable") reachable = 1 else: # A failure from execCmdThruIUCV is acceptable way of determining # that the system is unreachable. We won't pass along the # error message. rh.printLn("N", rh.userid + ": unreachable") reachable = 0 rh.updateResults({"rs": reachable}) rh.printSysLog("Exit powerVM.checkIsReachable, rc: 0") return 0 def deactivate(rh): """ Deactivate a virtual machine. Input: Request Handle with the following properties: function - 'POWERVM' subfunction - 'OFF' userid - userid of the virtual machine parms['maxQueries'] - Maximum number of queries to issue. Optional. parms['maxWait'] - Maximum time to wait in seconds. Optional, unless 'maxQueries' is specified. parms['poll'] - Polling interval in seconds. Optional, unless 'maxQueries' is specified. Output: Request Handle updated with the results. Return code - 0: ok, non-zero: error """ rh.printSysLog("Enter powerVM.deactivate, userid: " + rh.userid) parms = ["-T", rh.userid, "-f", "IMMED"] results = invokeSMCLI(rh, "Image_Deactivate", parms) if results['overallRC'] == 0: pass elif (results['overallRC'] == 8 and results['rc'] == 200 and (results['rs'] == 12 or results['rs'] == 16)): # Tolerable error. Machine is already in or going into the state # we want it to enter. rh.printLn("N", rh.userid + ": off") rh.updateResults({}, reset=1) else: # SMAPI API failed. rh.printLn("ES", results['response']) rh.updateResults(results) # Use results from invokeSMCLI if results['overallRC'] == 0 and 'maxQueries' in rh.parms: results = waitForVMState( rh, rh.userid, 'off', maxQueries=rh.parms['maxQueries'], sleepSecs=rh.parms['poll']) if results['overallRC'] == 0: rh.printLn("N", rh.userid + ": off") else: rh.updateResults(results) rh.printSysLog("Exit powerVM.deactivate, rc: " + str(rh.results['overallRC'])) return rh.results['overallRC'] def doIt(rh): """ Perform the requested function by invoking the subfunction handler. Input: Request Handle Output: Request Handle updated with parsed input. Return code - 0: ok, non-zero: error """ rh.printSysLog("Enter powerVM.doIt") # Show the invocation parameters, if requested. if 'showParms' in rh.parms and rh.parms['showParms'] is True: rh.printLn("N", "Invocation parameters: ") rh.printLn("N", " Routine: powerVM." + str(subfuncHandler[rh.subfunction][0]) + "(reqHandle)") rh.printLn("N", " function: " + rh.function) rh.printLn("N", " userid: " + rh.userid) rh.printLn("N", " subfunction: " + rh.subfunction) rh.printLn("N", " parms{}: ") for key in rh.parms: if key != 'showParms': rh.printLn("N", " " + key + ": " + str(rh.parms[key])) rh.printLn("N", " ") # Call the subfunction handler subfuncHandler[rh.subfunction][1](rh) rh.printSysLog("Exit powerVM.doIt, rc: " + str(rh.results['overallRC'])) return rh.results['overallRC'] def getStatus(rh): """ Get the power (logon/off) status of a virtual machine. Input: Request Handle with the following properties: function - 'POWERVM' subfunction - 'STATUS' userid - userid of the virtual machine Output: Request Handle updated with the results. results['overallRC'] - 0: ok, non-zero: error if ok: results['rc'] - 0: for both on and off cases results['rs'] - 0: powered on results['rs'] - 1: powered off """ rh.printSysLog("Enter powerVM.getStatus, userid: " + rh.userid) results = isLoggedOn(rh, rh.userid) if results['overallRC'] != 0: # Unexpected error pass elif results['rs'] == 0: rh.printLn("N", rh.userid + ": on") else: rh.printLn("N", rh.userid + ": off") rh.updateResults(results) rh.printSysLog("Exit powerVM.getStatus, rc: " + str(rh.results['overallRC'])) return rh.results['overallRC'] def getVersion(rh): """ Get the version of this function. Input: Request Handle Output: Request Handle updated with the results. Return code - 0: ok, non-zero: error """ rh.printLn("N", "Version: " + version) return 0 def help(rh): """ Produce help output specifically for PowerVM functions. Input: Request Handle Output: Request Handle updated with the results. Return code - 0: ok, non-zero: error """ showInvLines(rh) showOperandLines(rh) return 0 def parseCmdline(rh): """ Parse the request command input. Input: Request Handle Output: Request Handle updated with parsed input. Return code - 0: ok, non-zero: error """ rh.printSysLog("Enter powerVM.parseCmdline") if rh.totalParms >= 2: rh.userid = rh.request[1].upper() else: # Userid is missing. msg = msgs.msg['0010'][1] % modId rh.printLn("ES", msg) rh.updateResults(msgs.msg['0010'][0]) rh.printSysLog("Exit powerVM.parseCmdLine, rc: " + rh.results['overallRC']) return rh.results['overallRC'] if rh.totalParms == 2: rh.subfunction = rh.userid rh.userid = '' if rh.totalParms >= 3: rh.subfunction = rh.request[2].upper() # Verify the subfunction is valid. if rh.subfunction not in subfuncHandler: # Subfunction is missing. subList = ', '.join(sorted(subfuncHandler.keys())) msg = msgs.msg['0011'][1] % (modId, subList) rh.printLn("ES", msg) rh.updateResults(msgs.msg['0011'][0]) # Parse the rest of the command line. if rh.results['overallRC'] == 0: rh.argPos = 3 # Begin Parsing at 4th operand generalUtils.parseCmdline(rh, posOpsList, keyOpsList) waiting = 0 if rh.results['overallRC'] == 0: if rh.subfunction == 'WAIT': waiting = 1 if rh.parms['desiredState'] not in vmOSUpDownStates: # Desired state is not: down, off, on or up. msg = msgs.msg['0013'][1] % (modId, rh.parms['desiredState'], ", ".join(vmOSUpDownStates)) rh.printLn("ES", msg) rh.updateResults(msgs.msg['0013'][0]) if (rh.results['overallRC'] == 0 and 'wait' in rh.parms): waiting = 1 if 'desiredState' not in rh.parms: if rh.subfunction in ['ON', 'RESET', 'REBOOT']: rh.parms['desiredState'] = 'up' else: # OFF and SOFTOFF default to 'off'. rh.parms['desiredState'] = 'off' if rh.results['overallRC'] == 0 and waiting == 1: if rh.subfunction == 'ON' or rh.subfunction == 'RESET': if ('desiredState' not in rh.parms or rh.parms['desiredState'] not in vmOSUpStates): # Desired state is not: on or up. msg = msgs.msg['0013'][1] % (modId, rh.parms['desiredState'], ", ".join(vmOSUpStates)) rh.printLn("ES", msg) rh.updateResults(msgs.msg['0013'][0]) if rh.results['overallRC'] == 0: if 'maxWait' not in rh.parms: rh.parms['maxWait'] = 300 if 'poll' not in rh.parms: rh.parms['poll'] = 15 rh.parms['maxQueries'] = (rh.parms['maxWait'] + rh.parms['poll'] - 1) / rh.parms['poll'] # If we had to do some rounding, give a warning # out to the command line user that the wait # won't be what they expected. if rh.parms['maxWait'] % rh.parms['poll'] != 0: msg = msgs.msg['0017'][1] % (modId, rh.parms['maxWait'], rh.parms['poll'], rh.parms['maxQueries'] * rh.parms['poll'], rh.parms['maxQueries']) rh.printLn("W", msg) rh.printSysLog("Exit powerVM.parseCmdLine, rc: " + str(rh.results['overallRC'])) return rh.results['overallRC'] def pause(rh): """ Pause a virtual machine. Input: Request Handle with the following properties: function - 'POWERVM' subfunction - 'PAUSE' userid - userid of the virtual machine Output: Request Handle updated with the results. Return code - 0: ok, non-zero: error """ rh.printSysLog("Enter powerVM.pause, userid: " + rh.userid) parms = ["-T", rh.userid, "-k", "PAUSE=YES"] results = invokeSMCLI(rh, "Image_Pause", parms) if results['overallRC'] != 0: # SMAPI API failed. rh.printLn("ES", results['response']) rh.updateResults(results) # Use results from invokeSMCLI rh.printSysLog("Exit powerVM.pause, rc: " + str(rh.results['overallRC'])) return rh.results['overallRC'] def reboot(rh): """ Reboot a virtual machine. Input: Request Handle with the following properties: function - 'POWERVM' subfunction - 'REBOOT' userid - userid of the virtual machine parms['desiredState'] - Desired state. Optional, unless 'maxQueries' is specified. parms['maxQueries'] - Maximum number of queries to issue. Optional. parms['maxWait'] - Maximum time to wait in seconds. Optional, unless 'maxQueries' is specified. parms['poll'] - Polling interval in seconds. Optional, unless 'maxQueries' is specified. Output: Request Handle updated with the results. Return code - 0: ok, non-zero: error """ rh.printSysLog("Enter powerVM.reboot, userid: " + rh.userid) strCmd = "shutdown -r now" results = execCmdThruIUCV(rh, rh.userid, strCmd) if results['overallRC'] != 0: # Command failed to execute using IUCV. rh.printLn("ES", results['response']) rh.updateResults(results) if rh.results['overallRC'] == 0: # Wait for the OS to go down results = waitForOSState(rh, rh.userid, "down", maxQueries=30, sleepSecs=10) if results['overallRC'] == 0: rh.printLn("N", rh.userid + ": down (interim state)") if rh.results['overallRC'] == 0 and 'maxQueries' in rh.parms: results = waitForOSState(rh, rh.userid, 'up', maxQueries=rh.parms['maxQueries'], sleepSecs=rh.parms['poll']) if results['overallRC'] == 0: rh.printLn("N", rh.userid + ": up") else: rh.updateResults(results) rh.printSysLog("Exit powerVM.reboot, rc: " + str(rh.results['overallRC'])) return rh.results['overallRC'] def reset(rh): """ Reset a virtual machine. Input: Request Handle with the following properties: function - 'POWERVM' subfunction - 'RESET' userid - userid of the virtual machine parms['maxQueries'] - Maximum number of queries to issue. Optional. parms['maxWait'] - Maximum time to wait in seconds. Optional, unless 'maxQueries' is specified. parms['poll'] - Polling interval in seconds. Optional, unless 'maxQueries' is specified. Output: Request Handle updated with the results. Return code - 0: ok, non-zero: error """ rh.printSysLog("Enter powerVM.reset, userid: " + rh.userid) # Log off the user parms = ["-T", rh.userid] results = invokeSMCLI(rh, "Image_Deactivate", parms) if results['overallRC'] != 0: if results['rc'] == 200 and results['rs'] == 12: # Tolerated error. Machine is already in the desired state. results['overallRC'] = 0 results['rc'] = 0 results['rs'] = 0 else: # SMAPI API failed. rh.printLn("ES", results['response']) rh.updateResults(results) # Use results from invokeSMCLI # Wait for the logoff to complete if results['overallRC'] == 0: results = waitForVMState(rh, rh.userid, "off", maxQueries=30, sleepSecs=10) # Log the user back on if results['overallRC'] == 0: parms = ["-T", rh.userid] results = invokeSMCLI(rh, "Image_Activate", parms) if results['overallRC'] != 0: # SMAPI API failed. rh.printLn("ES", results['response']) rh.updateResults(results) # Use results from invokeSMCLI if results['overallRC'] == 0 and 'maxQueries' in rh.parms: if rh.parms['desiredState'] == 'up': results = waitForOSState( rh, rh.userid, rh.parms['desiredState'], maxQueries=rh.parms['maxQueries'], sleepSecs=rh.parms['poll']) else: results = waitForVMState( rh, rh.userid, rh.parms['desiredState'], maxQueries=rh.parms['maxQueries'], sleepSecs=rh.parms['poll']) if results['overallRC'] == 0: rh.printLn("N", rh.userid + ": " + rh.parms['desiredState']) else: rh.updateResults(results) rh.printSysLog("Exit powerVM.reset, rc: " + str(rh.results['overallRC'])) return rh.results['overallRC'] def showInvLines(rh): """ Produce help output related to command synopsis Input: Request Handle """ if rh.subfunction != '': rh.printLn("N", "Usage:") rh.printLn("N", " python " + rh.cmdName + " PowerVM <userid>") rh.printLn("N", " [isreachable | pause | " + "status | unpause]") rh.printLn("N", " python " + rh.cmdName + " PowerVM <userid>") rh.printLn("N", " [on | reset] --wait --state " + "[on | up] --maxwait <secs>") rh.printLn("N", " --poll <secs>") rh.printLn("N", " python " + rh.cmdName + " PowerVM <userid>") rh.printLn("N", " [off | reboot | softoff] " + "wait --maxwait <secs> --poll <secs>") rh.printLn("N", " python " + rh.cmdName + " PowerVM " + "<userid> wait") rh.printLn("N", " --state [down | on | off | up] " + "--maxwait <secs>") rh.printLn("N", " --poll <secs>") rh.printLn("N", " python " + rh.cmdName + " PowerVM help") rh.printLn("N", " python " + rh.cmdName + " PowerVM version") return def showOperandLines(rh): """ Produce help output related to operands. Input: Request Handle """ if rh.function == 'HELP': rh.printLn("N", " For the PowerVM function:") else: rh.printLn("N", "Sub-Functions(s):") rh.printLn("N", " help - Displays this help " + "information.") rh.printLn("N", " isreachable - Determine whether the " + "virtual OS in a virtual machine") rh.printLn("N", " is reachable") rh.printLn("N", " on - Log on the virtual machine") rh.printLn("N", " off - Log off the virtual machine") rh.printLn("N", " pause - Pause a virtual machine") rh.printLn("N", " reboot - Issue a shutdown command to " + "reboot the OS in a virtual") rh.printLn("N", " machine") rh.printLn("N", " reset - Power a virtual machine off " + "and then back on") rh.printLn("N", " softoff - Issue a shutdown command to " + "shutdown the OS in a virtual") rh.printLn("N", " machine and then log the " + "virtual machine off z/VM.") rh.printLn("N", " status - show the log on/off status " + "of the virtual machine") rh.printLn("N", " unpause - Unpause a virtual machine") rh.printLn("N", " wait - Wait for the virtual machine " + "to go into the specified") rh.printLn("N", " state of either:") rh.printLn("N", " down: virtual machine's " + "OS is not reachable with IUCV") rh.printLn("N", " off: virtual machine is " + "logged off") rh.printLn("N", " on: virtual machine is " + "logged on") rh.printLn("N", " up: virtual machine's OS " + "is reachable with IUCV") rh.printLn("N", " version - show the version of the " + "power function") if rh.subfunction != '': rh.printLn("N", "Operand(s):") rh.printLn("N", " <userid> - Userid of the target " + "virtual machine") rh.printLn("N", " --maxwait <secs> - " + "Maximum time in seconds to wait") rh.printLn("N", " --poll <secs> - " + "Seconds to wait between polling") rh.printLn("N", " --state [down | off | on | up] - " + "Desired state for virtual machine") rh.printLn("N", " (on or off) or for the operating " + "system (down or up).") rh.printLn("N", " --wait - wait for the machine to go into " + "the desired state.") return def softDeactivate(rh): """ Deactivate a virtual machine by first shutting down Linux and then log it off. Input: Request Handle with the following properties: function - 'POWERVM' subfunction - 'SOFTOFF' userid - userid of the virtual machine parms['maxQueries'] - Maximum number of queries to issue. Optional. parms['maxWait'] - Maximum time to wait in seconds. Optional, unless 'maxQueries' is specified. parms['poll'] - Polling interval in seconds. Optional, unless 'maxQueries' is specified. Output: Request Handle updated with the results. Return code - 0: ok, non-zero: error """ rh.printSysLog("Enter powerVM.softDeactivate, userid: " + rh.userid) strCmd = "echo 'ping'" iucvResults = execCmdThruIUCV(rh, rh.userid, strCmd) if iucvResults['overallRC'] == 0: # We could talk to the machine, tell it to shutdown nicely. strCmd = "shutdown -h now" iucvResults = execCmdThruIUCV(rh, rh.userid, strCmd) if iucvResults['overallRC'] == 0: time.sleep(15) else: # Shutdown failed. Let CP take down the system # after we log the results. rh.printSysLog("powerVM.softDeactivate " + rh.userid + " is unreachable. Treating it as already shutdown.") else: # Could not ping the machine. Treat it as a success # after we log the results. rh.printSysLog("powerVM.softDeactivate " + rh.userid + " is unreachable. Treating it as already shutdown.") # Tell z/VM to log off the system. parms = ["-T", rh.userid] smcliResults = invokeSMCLI(rh, "Image_Deactivate", parms) if smcliResults['overallRC'] == 0: pass elif (smcliResults['overallRC'] == 8 and smcliResults['rc'] == 200 and (smcliResults['rs'] == 12 or + smcliResults['rs'] == 16)): # Tolerable error. # Machine is already logged off or is logging off. rh.printLn("N", rh.userid + " is already logged off.") else: # SMAPI API failed. rh.printLn("ES", smcliResults['response']) rh.updateResults(smcliResults) # Use results from invokeSMCLI if rh.results['overallRC'] == 0 and 'maxQueries' in rh.parms: # Wait for the system to log off. waitResults = waitForVMState( rh, rh.userid, 'off', maxQueries=rh.parms['maxQueries'], sleepSecs=rh.parms['poll']) if waitResults['overallRC'] == 0: rh.printLn("N", "Userid '" + rh.userid + " is in the desired state: off") else: rh.updateResults(waitResults) rh.printSysLog("Exit powerVM.softDeactivate, rc: " + str(rh.results['overallRC'])) return rh.results['overallRC'] def unpause(rh): """ Unpause a virtual machine. Input: Request Handle with the following properties: function - 'POWERVM' subfunction - 'UNPAUSE' userid - userid of the virtual machine Output: Request Handle updated with the results. Return code - 0: ok, non-zero: error """ rh.printSysLog("Enter powerVM.unpause, userid: " + rh.userid) parms = ["-T", rh.userid, "-k", "PAUSE=NO"] results = invokeSMCLI(rh, "Image_Pause", parms) if results['overallRC'] != 0: # SMAPI API failed. rh.printLn("ES", results['response']) rh.updateResults(results) # Use results from invokeSMCLI rh.printSysLog("Exit powerVM.unpause, rc: " + str(rh.results['overallRC'])) return rh.results['overallRC'] def wait(rh): """ Wait for the virtual machine to go into the specified state. Input: Request Handle with the following properties: function - 'POWERVM' subfunction - 'WAIT' userid - userid of the virtual machine parms['desiredState'] - Desired state parms['maxQueries'] - Maximum number of queries to issue parms['maxWait'] - Maximum time to wait in seconds parms['poll'] - Polling interval in seconds Output: Request Handle updated with the results. Return code - 0: ok, non-zero: error """ rh.printSysLog("Enter powerVM.wait, userid: " + rh.userid) if (rh.parms['desiredState'] == 'off' or rh.parms['desiredState'] == 'on'): results = waitForVMState( rh, rh.userid, rh.parms['desiredState'], maxQueries=rh.parms['maxQueries'], sleepSecs=rh.parms['poll']) else: results = waitForOSState( rh, rh.userid, rh.parms['desiredState'], maxQueries=rh.parms['maxQueries'], sleepSecs=rh.parms['poll']) if results['overallRC'] == 0: rh.printLn("N", rh.userid + ": " + rh.parms['desiredState']) else: rh.updateResults(results) rh.printSysLog("Exit powerVM.wait, rc: " + str(rh.results['overallRC'])) return rh.results['overallRC'] ``` #### File: python-zvm-sdk/smutLayer/vmUtils.py ```python import re import subprocess from subprocess import CalledProcessError import time import msgs modId = 'VMU' version = '1.0.0' # Version of this script def disableEnableDisk(rh, userid, vaddr, option): """ Disable or enable a disk. Input: Request Handle: owning userid virtual address option ('-e': enable, '-d': disable) Output: Dictionary containing the following: overallRC - overall return code, 0: success, non-zero: failure rc - rc from the chccwdev command or IUCV transmission. rs - rs from the chccwdev command or IUCV transmission. results - possible error message from the IUCV transmission. """ rh.printSysLog("Enter vmUtils.disableEnableDisk, userid: " + userid + " addr: " + vaddr + " option: " + option) results = { 'overallRC': 0, 'rc': 0, 'rs': 0, 'response': '' } """ Can't guarantee the success of online/offline disk, need to wait Until it's done because we may detach the disk after -d option or use the disk after the -e option """ for secs in [0.1, 0.4, 1, 1.5, 3, 7, 15, 32, 30, 30, 60, 60, 60, 60, 60]: strCmd = "/sbin/chccwdev " + option + " " + vaddr + " 2>&1" results = execCmdThruIUCV(rh, userid, strCmd) if results['overallRC'] == 0: break elif (results['overallRC'] == 2 and results['rc'] == 8 and results['rs'] == 1 and option == '-d'): # Linux does not know about the disk being disabled. # Ok, nothing to do. Treat this as a success. results = {'overallRC': 0, 'rc': 0, 'rs': 0, 'response': ''} break time.sleep(secs) rh.printSysLog("Exit vmUtils.disableEnableDisk, rc: " + str(results['overallRC'])) return results def execCmdThruIUCV(rh, userid, strCmd, hideInLog=[]): """ Send a command to a virtual machine using IUCV. Input: Request Handle Userid of the target virtual machine Command string to send (Optional) List of strCmd words (by index) to hide in sysLog by replacing the word with "<hidden>". Output: Dictionary containing the following: overallRC - overall return code, 0: success, 2: failure rc - RC returned from iucvclnt if overallRC != 0. rs - RS returned from iucvclnt if overallRC != 0. errno - Errno returned from iucvclnt if overallRC != 0. response - Output of the iucvclnt command or this routine. Notes: 1) This routine does not use the Request Handle printLn function. This is because an error might be expected and we might desire to suppress it. Instead, any error messages are put in the response dictionary element that is returned. """ if len(hideInLog) == 0: rh.printSysLog("Enter vmUtils.execCmdThruIUCV, userid: " + userid + " cmd: " + strCmd) else: logCmd = strCmd.split(' ') for i in hideInLog: logCmd[i] = '<hidden>' rh.printSysLog("Enter vmUtils.execCmdThruIUCV, userid: " + userid + " cmd: " + ' '.join(logCmd)) iucvpath = '/opt/zthin/bin/IUCV/' results = { 'overallRC': 0, 'rc': 0, 'rs': 0, 'errno': 0, 'response': [], } cmd = [iucvpath + "iucvclnt", userid, strCmd] try: results['response'] = subprocess.check_output( cmd, stderr=subprocess.STDOUT, close_fds=True) except CalledProcessError as e: msg = [] results['overallRC'] = 2 results['rc'] = e.returncode match = re.search('Return code (.+?),', e.output) if match: try: results['rc'] = int(match.group(1)) except ValueError: # Return code in response from IUCVCLNT is not an int. msg = msgs.msg['0311'][1] % (modId, userid, strCmd, results['rc'], match.group(1), e.output) if not msg: # We got the rc. Now, get the rs. match = re.search('Reason code (.+?)\.', e.output) if match: try: results['rs'] = int(match.group(1)) except ValueError: # Reason code in response from IUCVCLNT is not an int. msg = msgs.msg['0312'][1] % (modId, userid, strCmd, results['rc'], match.group(1), e.output) if msg: # Already produced an error message. pass elif results['rc'] == 1: # Command was not authorized or a generic Linux error. msg = msgs.msg['0313'][1] % (modId, userid, strCmd, results['rc'], results['rs'], e.output) elif results['rc'] == 2: # IUCV client parameter error. msg = msgs.msg['0314'][1] % (modId, userid, strCmd, results['rc'], results['rs'], e.output) elif results['rc'] == 4: # IUCV socket error msg = msgs.msg['0315'][1] % (modId, userid, strCmd, results['rc'], results['rs'], e.output) elif results['rc'] == 8: # Executed command failed msg = msgs.msg['0316'][1] % (modId, userid, strCmd, results['rc'], results['rs'], e.output) elif results['rc'] == 16: # File Transport failed msg = msgs.msg['0317'][1] % (modId, userid, strCmd, results['rc'], results['rs'], e.output) elif results['rc'] == 32: # IUCV server file was not found on this system. msg += msgs.msg['0318'][1] % (modId, userid, strCmd, results['rc'], results['rs'], e.output) else: # Unrecognized IUCV client error msg = msgs.msg['0319'][1] % (modId, userid, strCmd, results['rc'], results['rs'], e.output) results['response'] = msg except Exception as e: # Other exceptions from this system (i.e. not the managed system). results = msgs.msg['0421'][0] msg = msgs.msg['0421'][1] % (modId, strCmd, type(e).__name__, str(e)) results['response'] = msg rh.printSysLog("Exit vmUtils.execCmdThruIUCV, rc: " + str(results['rc'])) return results def getPerfInfo(rh, useridlist): """ Get the performance information for a userid Input: Request Handle Userid to query <- may change this to a list later. Output: Dictionary containing the following: overallRC - overall return code, 0: success, non-zero: failure rc - RC returned from SMCLI if overallRC = 0. rs - RS returned from SMCLI if overallRC = 0. errno - Errno returned from SMCLI if overallRC = 0. response - Stripped and reformatted output of the SMCLI command. """ rh.printSysLog("Enter vmUtils.getPerfInfo, userid: " + useridlist) parms = ["-T", rh.userid, "-c", "1"] results = invokeSMCLI(rh, "Image_Performance_Query", parms) if results['overallRC'] != 0: # SMCLI failed. rh.printLn("ES", results['response']) rh.printSysLog("Exit vmUtils.getPerfInfo, rc: " + str(results['overallRC'])) return results lines = results['response'].split("\n") usedTime = 0 totalCpu = 0 totalMem = 0 usedMem = 0 try: for line in lines: if "Used CPU time:" in line: usedTime = line.split()[3].strip('"') # Value is in us, need make it seconds usedTime = int(usedTime) / 1000000 if "Guest CPUs:" in line: totalCpu = line.split()[2].strip('"') if "Max memory:" in line: totalMem = line.split()[2].strip('"') # Value is in Kb, need to make it Mb totalMem = int(totalMem) / 1024 if "Used memory:" in line: usedMem = line.split()[2].strip('"') usedMem = int(usedMem) / 1024 except Exception as e: msg = msgs.msg['0412'][1] % (modId, type(e).__name__, str(e), results['response']) rh.printLn("ES", msg) results['overallRC'] = 4 results['rc'] = 4 results['rs'] = 412 if results['overallRC'] == 0: memstr = "Total Memory: %iM\n" % totalMem usedmemstr = "Used Memory: %iM\n" % usedMem procstr = "Processors: %s\n" % totalCpu timestr = "CPU Used Time: %i sec\n" % usedTime results['response'] = memstr + usedmemstr + procstr + timestr rh.printSysLog("Exit vmUtils.getPerfInfo, rc: " + str(results['rc'])) return results def installFS(rh, vaddr, mode, fileSystem, diskType): """ Install a filesystem on a virtual machine's dasd. Input: Request Handle: userid - Userid that owns the disk Virtual address as known to the owning system. Access mode to use to get the disk. Disk Type - 3390 or 9336 Output: Dictionary containing the following: overallRC - overall return code, 0: success, non-zero: failure rc - RC returned from SMCLI if overallRC = 0. rs - RS returned from SMCLI if overallRC = 0. errno - Errno returned from SMCLI if overallRC = 0. response - Output of the SMCLI command. """ rh.printSysLog("Enter vmUtils.installFS, userid: " + rh.userid + ", vaddr: " + str(vaddr) + ", mode: " + mode + ", file system: " + fileSystem + ", disk type: " + diskType) results = { 'overallRC': 0, 'rc': 0, 'rs': 0, 'errno': 0, } out = '' diskAccessed = False # Get access to the disk. cmd = ["/opt/zthin/bin/linkdiskandbringonline", rh.userid, vaddr, mode] strCmd = ' '.join(cmd) rh.printSysLog("Invoking: " + strCmd) try: out = subprocess.check_output(cmd, close_fds=True) diskAccessed = True except CalledProcessError as e: rh.printLn("ES", msgs.msg['0415'][1] % (modId, strCmd, e.returncode, e.output)) results = msgs.msg['0415'][0] results['rs'] = e.returncode rh.updateResults(results) except Exception as e: # All other exceptions. results = msgs.msg['0421'][0] rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd, type(e).__name__, str(e))) if results['overallRC'] == 0: """ sample output: linkdiskandbringonline maint start time: 2017-03-03-16:20:48.011 Success: Userid maint vdev 193 linked at ad35 device name dasdh linkdiskandbringonline exit time: 2017-03-03-16:20:52.150 """ match = re.search('Success:(.+?)\n', out) if match: parts = match.group(1).split() if len(parts) > 9: device = "/dev/" + parts[9] else: strCmd = ' '.join(cmd) rh.printLn("ES", msgs.msg['0416'][1] % (modId, 'Success:', 10, strCmd, out)) results = msgs.msg['0416'][0] rh.updateResults(results) else: strCmd = ' '.join(cmd) rh.printLn("ES", msgs.msg['0417'][1] % (modId, 'Success:', strCmd, out)) results = msgs.msg['0417'][0] rh.updateResults(results) if results['overallRC'] == 0 and diskType == "3390": # dasdfmt the disk cmd = ["/sbin/dasdfmt", "-y", "-b", "4096", "-d", "cdl", "-f", device] strCmd = ' '.join(cmd) rh.printSysLog("Invoking: " + strCmd) try: out = subprocess.check_output(cmd, close_fds=True) except CalledProcessError as e: rh.printLn("ES", msgs.msg['0415'][1] % (modId, strCmd, e.returncode, e.output)) results = msgs.msg['0415'][0] results['rs'] = e.returncode rh.updateResults(results) except Exception as e: # All other exceptions. strCmd = " ".join(cmd) rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd, type(e).__name__, str(e))) results = msgs.msg['0421'][0] rh.updateResults(results) if results['overallRC'] == 0 and diskType == "3390": # Settle the devices so we can do the partition. strCmd = ("which udevadm &> /dev/null && " + "udevadm settle || udevsettle") rh.printSysLog("Invoking: " + strCmd) try: subprocess.check_output( strCmd, stderr=subprocess.STDOUT, close_fds=True, shell=True) except CalledProcessError as e: rh.printLn("ES", msgs.msg['0415'][1] % (modId, strCmd, e.returncode, e.output)) results = msgs.msg['0415'][0] results['rs'] = e.returncode rh.updateResults(results) except Exception as e: # All other exceptions. strCmd = " ".join(cmd) rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd, type(e).__name__, str(e))) results = msgs.msg['0421'][0] rh.updateResults(results) if results['overallRC'] == 0 and diskType == "3390": # Prepare the partition with fdasd cmd = ["/sbin/fdasd", "-a", device] strCmd = ' '.join(cmd) rh.printSysLog("Invoking: " + strCmd) try: out = subprocess.check_output(cmd, stderr=subprocess.STDOUT, close_fds=True) except CalledProcessError as e: rh.printLn("ES", msgs.msg['0415'][1] % (modId, strCmd, e.returncode, e.output)) results = msgs.msg['0415'][0] results['rs'] = e.returncode rh.updateResults(results) except Exception as e: # All other exceptions. rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd, type(e).__name__, str(e))) results = msgs.msg['0421'][0] rh.updateResults(results) if results['overallRC'] == 0 and diskType == "9336": # Delete the existing partition in case the disk already # has a partition in it. cmd = "/sbin/fdisk " + device + " << EOF\nd\nw\nEOF" rh.printSysLog("Invoking: /sbin/fdsik " + device + " << EOF\\nd\\nw\\nEOF ") try: out = subprocess.check_output(cmd, stderr=subprocess.STDOUT, close_fds=True, shell=True) except CalledProcessError as e: rh.printLn("ES", msgs.msg['0415'][1] % (modId, cmd, e.returncode, e.output)) results = msgs.msg['0415'][0] results['rs'] = e.returncode rh.updateResults(results) except Exception as e: # All other exceptions. rh.printLn("ES", msgs.msg['0421'][1] % (modId, cmd, type(e).__name__, str(e))) results = msgs.msg['0421'][0] rh.updateResults(results) if results['overallRC'] == 0 and diskType == "9336": # Prepare the partition with fdisk cmd = "/sbin/fdisk " + device + " << EOF\nn\np\n1\n\n\nw\nEOF" rh.printSysLog("Invoking: /sbin/fdisk " + device + " << EOF\\nn\\np\\n1\\n\\n\\nw\\nEOF") try: out = subprocess.check_output(cmd, stderr=subprocess.STDOUT, close_fds=True, shell=True) except CalledProcessError as e: rh.printLn("ES", msgs.msg['0415'][1] % (modId, cmd, e.returncode, e.output)) results = msgs.msg['0415'][0] results['rs'] = e.returncode rh.updateResults(results) except Exception as e: # All other exceptions. rh.printLn("ES", msgs.msg['0421'][1] % (modId, cmd, type(e).__name__, str(e))) results = msgs.msg['0421'][0] rh.updateResults(results) if results['overallRC'] == 0: # Settle the devices so we can do the partition. strCmd = ("which udevadm &> /dev/null && " + "udevadm settle || udevsettle") rh.printSysLog("Invoking: " + strCmd) try: subprocess.check_output( strCmd, stderr=subprocess.STDOUT, close_fds=True, shell=True) except CalledProcessError as e: rh.printLn("ES", msgs.msg['0415'][1] % (modId, strCmd, e.returncode, e.output)) results = msgs.msg['0415'][0] results['rs'] = e.returncode rh.updateResults(results) except Exception as e: # All other exceptions. strCmd = " ".join(cmd) rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd, type(e).__name__, str(e))) results = msgs.msg['0421'][0] rh.updateResults(results) if results['overallRC'] == 0: # Install the file system into the disk. device = device + "1" # Point to first partition if fileSystem != 'swap': if fileSystem == 'xfs': cmd = ["mkfs.xfs", "-f", device] else: cmd = ["mkfs", "-F", "-t", fileSystem, device] strCmd = ' '.join(cmd) rh.printSysLog("Invoking: " + strCmd) try: out = subprocess.check_output(cmd, stderr=subprocess.STDOUT, close_fds=True) rh.printLn("N", "File system: " + fileSystem + " is installed.") except CalledProcessError as e: rh.printLn("ES", msgs.msg['0415'][1] % (modId, strCmd, e.returncode, e.output)) results = msgs.msg['0415'][0] results['rs'] = e.returncode rh.updateResults(results) except Exception as e: # All other exceptions. rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd, type(e).__name__, str(e))) results = msgs.msg['0421'][0] rh.updateResults(results) else: rh.printLn("N", "File system type is swap. No need to install " + "a filesystem.") if diskAccessed: # Give up the disk. cmd = ["/opt/zthin/bin/offlinediskanddetach", rh.userid, vaddr] strCmd = ' '.join(cmd) rh.printSysLog("Invoking: " + strCmd) try: out = subprocess.check_output(cmd, close_fds=True) except CalledProcessError as e: rh.printLn("ES", msgs.msg['0415'][1] % (modId, strCmd, e.returncode, e.output)) results = msgs.msg['0415'][0] results['rs'] = e.returncode rh.updateResults(results) except Exception as e: # All other exceptions. rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd, type(e).__name__, str(e))) results = msgs.msg['0421'][0] rh.updateResults(results) rh.printSysLog("Exit vmUtils.installFS, rc: " + str(results['rc'])) return results def invokeSMCLI(rh, api, parms, hideInLog=[]): """ Invoke SMCLI and parse the results. Input: Request Handle API name, SMCLI parms as an array (Optional) List of parms (by index) to hide in sysLog by replacing the parm with "<hidden>". Output: Dictionary containing the following: overallRC - overall return code, 0: success, non-zero: failure rc - RC returned from SMCLI if overallRC = 0. rs - RS returned from SMCLI if overallRC = 0. errno - Errno returned from SMCLI if overallRC = 0. response - String output of the SMCLI command. Note: - If the first three words of the header returned from smcli do not do not contain words that represent valid integer values or contain too few words then one or more error messages are generated. THIS SHOULD NEVER OCCUR !!!! """ if len(hideInLog) == 0: rh.printSysLog("Enter vmUtils.invokeSMCLI, userid: " + rh.userid + ", function: " + api + ", parms: " + str(parms)) else: logParms = parms for i in hideInLog: logParms[i] = '<hidden>' rh.printSysLog("Enter vmUtils.invokeSMCLI, userid: " + rh.userid + ", function: " + api + ", parms: " + str(logParms)) goodHeader = False results = { 'overallRC': 0, 'rc': 0, 'rs': 0, 'errno': 0, 'response': [], 'strError': '', } cmd = [] cmd.append('/opt/zthin/bin/smcli') cmd.append(api) cmd.append('--addRCheader') try: smcliResp = subprocess.check_output(cmd + parms, close_fds=True).split('\n', 1) results['response'] = smcliResp[1] results['overallRC'] = 0 results['rc'] = 0 except CalledProcessError as e: strCmd = " ".join(cmd + parms) # Break up the RC header into its component parts. if e.output == '': smcliResp = [''] else: smcliResp = e.output.split('\n', 1) # Split the header into its component pieces. rcHeader = smcliResp[0].split('(details)', 1) if len(rcHeader) == 0: rcHeader = ['', ''] elif len(rcHeader) == 1: # No data after the details tag. Add empty [1] value. rcHeader.append('') codes = rcHeader[0].split(' ') # Validate the rc, rs, and errno. if len(codes) < 3: # Unexpected number of codes. Need at least 3. results = msgs.msg['0301'][0] results['response'] = msgs.msg['0301'][1] % (modId, api, strCmd, rcHeader[0], rcHeader[1]) else: goodHeader = True # Convert the first word (overall rc from SMAPI) to an int # and set the SMUT overall rc based on this value. orcError = False try: results['overallRC'] = int(codes[0]) if results['overallRC'] not in [8, 24, 25]: orcError = True except ValueError: goodHeader = False orcError = True if orcError: results['overallRC'] = 25 # SMCLI Internal Error results = msgs.msg['0302'][0] results['response'] = msgs.msg['0302'][1] % (modId, api, codes[0], strCmd, rcHeader[0], rcHeader[1]) # Convert the second word to an int and save as rc. try: results['rc'] = int(codes[1]) except ValueError: goodHeader = False results = msgs.msg['0303'][0] results['response'] = msgs.msg['0303'][1] % (modId, api, codes[1], strCmd, rcHeader[0], rcHeader[1]) # Convert the second word to an int and save it as either # the rs or errno. try: word3 = int(codes[2]) if results['overallRC'] == 8: results['rs'] = word3 # Must be an rs elif results['overallRC'] == 25: results['errno'] = word3 # Must be the errno # We ignore word 3 for everyone else and default to 0. except ValueError: goodHeader = False results = msgs.msg['0304'][0] results['response'] = msgs.msg['0304'][1] % (modId, api, codes[1], strCmd, rcHeader[0], rcHeader[1]) results['strError'] = rcHeader[1].lstrip() if goodHeader: # Produce a message that provides the error info. results['response'] = msgs.msg['0300'][1] % (modId, api, results['overallRC'], results['rc'], results['rs'], results['errno'], strCmd, smcliResp[1]) except Exception as e: # All other exceptions. strCmd = " ".join(cmd + parms) results = msgs.msg['0305'][0] results['response'] = msgs.msg['0305'][1] % (modId, strCmd, type(e).__name__, str(e)) rh.printSysLog("Exit vmUtils.invokeSMCLI, rc: " + str(results['overallRC'])) return results def isLoggedOn(rh, userid): """ Determine whether a virtual machine is logged on. Input: Request Handle: userid being queried Output: Dictionary containing the following: overallRC - overall return code, 0: success, non-zero: failure rc - 0: if we got status. Otherwise, it is the error return code from the commands issued. rs - Based on rc value. For rc==0, rs is: 0: if we determined it is logged on. 1: if we determined it is logged off. """ rh.printSysLog("Enter vmUtils.isLoggedOn, userid: " + userid) results = { 'overallRC': 0, 'rc': 0, 'rs': 0, } cmd = ["/sbin/vmcp", "query", "user", userid] strCmd = ' '.join(cmd) rh.printSysLog("Invoking: " + strCmd) try: subprocess.check_output( cmd, close_fds=True, stderr=subprocess.STDOUT) except CalledProcessError as e: match = re.search('(^HCP\w\w\w045E|^HCP\w\w\w361E)', e.output) if match: # Not logged on results['rs'] = 1 else: # Abnormal failure rh.printLn("ES", msgs.msg['0415'][1] % (modId, strCmd, e.returncode, e.output)) results = msgs.msg['0415'][0] results['rs'] = e.returncode except Exception as e: # All other exceptions. results = msgs.msg['0421'][0] rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd, type(e).__name__, str(e))) rh.printSysLog("Exit vmUtils.isLoggedOn, overallRC: " + str(results['overallRC']) + " rc: " + str(results['rc']) + " rs: " + str(results['rs'])) return results def punch2reader(rh, userid, fileLoc, spoolClass): """ Punch a file to a virtual reader of the specified virtual machine. Input: Request Handle - for general use and to hold the results userid - userid of the virtual machine fileLoc - File to send spoolClass - Spool class Output: Request Handle updated with the results. Return code - 0: ok, non-zero: error """ rh.printSysLog("Enter punch2reader.punchFile") results = {} # Setting rc to time out rc code as default and its changed during runtime results['rc'] = 9 # Punch to the current user intially and then change the spool class. cmd = ["vmur", "punch", "-r", fileLoc] strCmd = ' '.join(cmd) for secs in [1, 2, 3, 5, 10]: rh.printSysLog("Invoking: " + strCmd) try: results['response'] = subprocess.check_output(cmd, close_fds=True, stderr=subprocess.STDOUT) results['rc'] = 0 rh.updateResults(results) break except CalledProcessError as e: results['response'] = e.output # Check if we have concurrent instance of vmur active if results['response'].find("A concurrent instance of vmur" + " is already active") == -1: # Failure in VMUR punch update the rc results['rc'] = 7 break else: # if concurrent vmur is active try after sometime rh.printSysLog("Punch in use. Retrying after " + str(secs) + " seconds") time.sleep(secs) except Exception as e: # All other exceptions. rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd, type(e).__name__, str(e))) results = msgs.msg['0421'][0] rh.updateResults(results) if results['rc'] == 7: # Failure while issuing vmur command (For eg: invalid file given) msg = msgs.msg['0401'][1] % (modId, fileLoc, userid, results['response']) rh.printLn("ES", msg) rh.updateResults(msgs.msg['0401'][0]) elif results['rc'] == 9: # Failure due to vmur timeout msg = msgs.msg['0406'][1] % (modId, fileLoc) rh.printLn("ES", msg) rh.updateResults(msgs.msg['0406'][0]) if rh.results['overallRC'] == 0: # On VMUR success change the class of the spool file spoolId = re.findall(r'\d+', str(results['response'])) cmd = ["vmcp", "change", "rdr", str(spoolId[0]), "class", spoolClass] strCmd = " ".join(cmd) rh.printSysLog("Invoking: " + strCmd) try: results['response'] = subprocess.check_output(cmd, close_fds=True, stderr=subprocess.STDOUT) rh.updateResults(results) except CalledProcessError as e: msg = msgs.msg['0404'][1] % (modId, spoolClass, e.output) rh.printLn("ES", msg) rh.updateResults(msgs.msg['0404'][0]) # Class change failed # Delete the punched file from current userid cmd = ["vmcp", "purge", "rdr", spoolId[0]] strCmd = " ".join(cmd) rh.printSysLog("Invoking: " + strCmd) try: results['response'] = subprocess.check_output(cmd, close_fds=True, stderr=subprocess.STDOUT) # We only need to issue the printLn. # Don't need to change return/reason code values except CalledProcessError as e: msg = msgs.msg['0403'][1] % (modId, spoolId[0], e.output) rh.printLn("ES", msg) except Exception as e: # All other exceptions related to purge. # We only need to issue the printLn. # Don't need to change return/reason code values rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd, type(e).__name__, str(e))) except Exception as e: # All other exceptions related to change rdr. results = msgs.msg['0421'][0] rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd, type(e).__name__, str(e))) rh.updateResults(msgs.msg['0421'][0]) if rh.results['overallRC'] == 0: # Transfer the file from current user to specified user cmd = ["vmcp", "transfer", "*", "rdr", str(spoolId[0]), "to", userid, "rdr"] strCmd = " ".join(cmd) rh.printSysLog("Invoking: " + strCmd) try: results['response'] = subprocess.check_output(cmd, close_fds=True, stderr=subprocess.STDOUT) rh.updateResults(results) except CalledProcessError as e: msg = msgs.msg['0424'][1] % (modId, fileLoc, userid, e.output) rh.printLn("ES", msg) rh.updateResults(msgs.msg['0424'][0]) # Transfer failed so delete the punched file from current userid cmd = ["vmcp", "purge", "rdr", spoolId[0]] strCmd = " ".join(cmd) rh.printSysLog("Invoking: " + strCmd) try: results['response'] = subprocess.check_output(cmd, close_fds=True, stderr=subprocess.STDOUT) # We only need to issue the printLn. # Don't need to change return/reason code values except CalledProcessError as e: msg = msgs.msg['0403'][1] % (modId, spoolId[0], e.output) rh.printLn("ES", msg) except Exception as e: # All other exceptions related to purge. rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd, type(e).__name__, str(e))) except Exception as e: # All other exceptions related to transfer. results = msgs.msg['0421'][0] rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd, type(e).__name__, str(e))) rh.updateResults(msgs.msg['0421'][0]) rh.printSysLog("Exit vmUtils.punch2reader, rc: " + str(rh.results['overallRC'])) return rh.results['overallRC'] def waitForOSState(rh, userid, desiredState, maxQueries=90, sleepSecs=5): """ Wait for the virtual OS to go into the indicated state. Input: Request Handle userid whose state is to be monitored Desired state, 'up' or 'down', case sensitive Maximum attempts to wait for desired state before giving up Sleep duration between waits Output: Dictionary containing the following: overallRC - overall return code, 0: success, non-zero: failure rc - RC returned from execCmdThruIUCV if overallRC = 0. rs - RS returned from execCmdThruIUCV if overallRC = 0. errno - Errno returned from execCmdThruIUCV if overallRC = 0. response - Updated with an error message if wait times out. Note: """ rh.printSysLog("Enter vmUtils.waitForOSState, userid: " + userid + " state: " + desiredState + " maxWait: " + str(maxQueries) + " sleepSecs: " + str(sleepSecs)) results = {} strCmd = "echo 'ping'" stateFnd = False for i in range(1, maxQueries + 1): results = execCmdThruIUCV(rh, rh.userid, strCmd) if results['overallRC'] == 0: if desiredState == 'up': stateFnd = True break else: if desiredState == 'down': stateFnd = True break if i < maxQueries: time.sleep(sleepSecs) if stateFnd is True: results = { 'overallRC': 0, 'rc': 0, 'rs': 0, } else: maxWait = maxQueries * sleepSecs rh.printLn("ES", msgs.msg['0413'][1] % (modId, userid, desiredState, maxWait)) results = msgs.msg['0413'][0] rh.printSysLog("Exit vmUtils.waitForOSState, rc: " + str(results['overallRC'])) return results def waitForVMState(rh, userid, desiredState, maxQueries=90, sleepSecs=5): """ Wait for the virtual machine to go into the indicated state. Input: Request Handle userid whose state is to be monitored Desired state, 'on' or 'off', case sensitive Maximum attempts to wait for desired state before giving up Sleep duration between waits Output: Dictionary containing the following: overallRC - overall return code, 0: success, non-zero: failure rc - RC returned from SMCLI if overallRC = 0. rs - RS returned from SMCLI if overallRC = 0. Note: """ rh.printSysLog("Enter vmUtils.waitForVMState, userid: " + userid + " state: " + desiredState + " maxWait: " + str(maxQueries) + " sleepSecs: " + str(sleepSecs)) results = {} cmd = ["/sbin/vmcp", "query", "user", userid] strCmd = " ".join(cmd) stateFnd = False for i in range(1, maxQueries + 1): rh.printSysLog("Invoking: " + strCmd) try: out = subprocess.check_output( cmd, close_fds=True, stderr=subprocess.STDOUT) if desiredState == 'on': stateFnd = True break except CalledProcessError as e: match = re.search('(^HCP\w\w\w045E|^HCP\w\w\w361E)', e.output) if match: # Logged off if desiredState == 'off': stateFnd = True break else: # Abnormal failure out = e.output rh.printLn("ES", msgs.msg['0415'][1] % (modId, strCmd, e.returncode, out)) results = msgs.msg['0415'][0] results['rs'] = e.returncode break except Exception as e: # All other exceptions. rh.printLn("ES", msgs.msg['0421'][1] % (modId, strCmd, type(e).__name__, str(e))) results = msgs.msg['0421'][0] if i < maxQueries: # Sleep a bit before looping. time.sleep(sleepSecs) if stateFnd is True: results = { 'overallRC': 0, 'rc': 0, 'rs': 0, } else: maxWait = maxQueries * sleepSecs rh.printLn("ES", msgs.msg['0414'][1] % (modId, userid, desiredState, maxWait)) results = msgs.msg['0414'][0] rh.printSysLog("Exit vmUtils.waitForVMState, rc: " + str(results['overallRC'])) return results def purgeReader(rh): """ Purge reader of the specified userid. Input: Request Handle Output: Dictionary containing the following: overallRC - overall return code, 0: success, non-zero: failure rc - RC returned from SMCLI if overallRC = 0. rs - RS returned from SMCLI if overallRC = 0. errno - Errno returned from SMCLI if overallRC = 0. response - Updated with an error message. Note: """ rh.printSysLog("Enter vmUtils.purgeRDR, userid: " + rh.userid) results = {'overallRC': 0, 'rc': 0, 'rs': 0, 'response': []} # Temporarily use this SMAPI to purge the reader # We've asked for a new one to do this parms = ['-T', rh.userid, '-c', 'cmd=PURGE %s RDR ALL' % rh.userid] results = invokeSMCLI(rh, "xCAT_Commands_IUO", parms) if results['overallRC'] != 0: rh.printLn("ES", results['response']) rh.updateResults(results) rh.printSysLog("Exit vmUtils.purgeReader, rc: " + str(results['overallRC'])) return results ``` #### File: python-zvm-sdk/zvmsdk/client.py ```python from zvmsdk import config from zvmsdk import log from zvmsdk import utils as zvmutils CONF = config.CONF LOG = log.LOG _XCAT_CLIENT = None _SMUT_CLIENT = None def get_xcatclient(): global _XCAT_CLIENT if _XCAT_CLIENT is None: try: _XCAT_CLIENT = zvmutils.import_object( 'zvmsdk.xcatclient.XCATClient') except ImportError: LOG.error("Unable to get zvmclient") raise ImportError return _XCAT_CLIENT def get_smutclient(): global _SMUT_CLIENT if _SMUT_CLIENT is None: try: _SMUT_CLIENT = zvmutils.import_object( 'zvmsdk.smutclient.SMUTClient') except ImportError: LOG.error("Unable to get zvmclient") raise ImportError return _SMUT_CLIENT def get_zvmclient(): if CONF.zvm.client_type == 'xcat': return get_xcatclient() elif CONF.zvm.client_type == 'smut': return get_smutclient() else: # TODO: raise Exception pass class ZVMClient(object): def __init__(self): self._pathutils = zvmutils.PathUtils() def guest_start(self, userid): pass def guest_stop(self, userid): pass def get_power_state(self, userid): pass def image_import(self, image_name, url, image_meta, remote_host=None): pass def image_query(self, imagekeyword=None): pass def image_delete(self, image_name): pass def get_host_info(self): pass def get_diskpool_info(self, pool): pass def virtual_network_vswitch_query_iuo_stats(self): pass def get_vm_list(self): pass def add_vswitch(self, name, rdev=None, controller='*', connection='CONNECT', network_type='IP', router="NONROUTER", vid='UNAWARE', port_type='ACCESS', gvrp='GVRP', queue_mem=8, native_vid=1, persist=True): pass def couple_nic_to_vswitch(self, userid, nic_vdev, vswitch_name, active=False): pass def create_nic(self, userid, vdev=None, nic_id=None, mac_addr=None, ip_addr=None, active=False): pass def delete_nic(self, userid, vdev, active=False): pass def delete_vswitch(self, switch_name, persist=True): pass def get_vm_nic_vswitch_info(self, vm_id): pass def get_vswitch_list(self): pass def grant_user_to_vswitch(self, vswitch_name, userid): pass def revoke_user_from_vswitch(self, vswitch_name, userid): pass def set_vswitch_port_vlan_id(self, vswitch_name, userid, vlan_id): pass def set_vswitch(self, switch_name, **kwargs): pass def uncouple_nic_from_vswitch(self, userid, nic_vdev, active=False): pass def get_guest_connection_status(self, userid): pass def guest_deploy(self, node, image_name, transportfiles=None, remotehost=None, vdev=None): pass def process_additional_minidisks(self, userid, disk_info): pass def get_user_direct(self, userid): pass def create_vm(self, userid, cpu, memory, disk_list, profile): pass def delete_vm(self, userid): pass def _generate_vdev(self, base, offset): """Generate virtual device number based on base vdev :param base: base virtual device number, string of 4 bit hex. :param offset: offset to base, integer. """ vdev = hex(int(base, 16) + offset)[2:] return vdev.rjust(4, '0') def generate_disk_vdev(self, start_vdev=None, offset=0): """Generate virtual device number for disks :param offset: offset of user_root_vdev. :return: virtual device number, string of 4 bit hex. """ if not start_vdev: start_vdev = CONF.zvm.user_root_vdev vdev = self._generate_vdev(start_vdev, offset) if offset >= 0 and offset < 254: return vdev else: msg = "Invalid virtual device number for disk:%s" % vdev LOG.error(msg) raise def add_mdisks(self, userid, disk_list, start_vdev=None): """Add disks for the userid :disks: A list dictionary to describe disk info, for example: disk: [{'size': '1g', 'format': 'ext3', 'disk_pool': 'ECKD:eckdpool1'}] """ for idx, disk in enumerate(disk_list): vdev = self.generate_disk_vdev(start_vdev=start_vdev, offset=idx) self._add_mdisk(userid, disk, vdev) def authorize_iucv_client(self, guest_userid, client_userid): pass def image_performance_query(self, uid_list): """Call Image_Performance_Query to get guest current status. :uid_list: A list of zvm userids to be queried """ pass def get_image_performance_info(self, userid): """Get CPU and memory usage information. :userid: the zvm userid to be queried """ pi_dict = self.image_performance_query([userid]) return pi_dict.get(userid.upper(), None) def _parse_vswitch_inspect_data(self, rd_list): """ Parse the Virtual_Network_Vswitch_Query_IUO_Stats data to get inspect data. This is an internal function shared by both smut and xcat client. """ def _parse_value(data_list, idx, keyword, offset): return idx + offset, data_list[idx].rpartition(keyword)[2].strip() vsw_dict = {} with zvmutils.expect_invalid_resp_data(): # vswitch count idx = 0 idx, vsw_count = _parse_value(rd_list, idx, 'vswitch count:', 2) vsw_dict['vswitch_count'] = int(vsw_count) # deal with each vswitch data vsw_dict['vswitches'] = [] for i in range(vsw_dict['vswitch_count']): vsw_data = {} # skip vswitch number idx += 1 # vswitch name idx, vsw_name = _parse_value(rd_list, idx, 'vswitch name:', 1) vsw_data['vswitch_name'] = vsw_name # uplink count idx, up_count = _parse_value(rd_list, idx, 'uplink count:', 1) # skip uplink data idx += int(up_count) * 9 # skip bridge data idx += 8 # nic count vsw_data['nics'] = [] idx, nic_count = _parse_value(rd_list, idx, 'nic count:', 1) nic_count = int(nic_count) for j in range(nic_count): nic_data = {} idx, nic_id = _parse_value(rd_list, idx, 'nic_id:', 1) userid, toss, vdev = nic_id.partition(' ') nic_data['userid'] = userid nic_data['vdev'] = vdev idx, nic_data['nic_fr_rx'] = _parse_value(rd_list, idx, 'nic_fr_rx:', 1 ) idx, nic_data['nic_fr_rx_dsc'] = _parse_value(rd_list, idx, 'nic_fr_rx_dsc:', 1 ) idx, nic_data['nic_fr_rx_err'] = _parse_value(rd_list, idx, 'nic_fr_rx_err:', 1 ) idx, nic_data['nic_fr_tx'] = _parse_value(rd_list, idx, 'nic_fr_tx:', 1 ) idx, nic_data['nic_fr_tx_dsc'] = _parse_value(rd_list, idx, 'nic_fr_tx_dsc:', 1 ) idx, nic_data['nic_fr_tx_err'] = _parse_value(rd_list, idx, 'nic_fr_tx_err:', 1 ) idx, nic_data['nic_rx'] = _parse_value(rd_list, idx, 'nic_rx:', 1 ) idx, nic_data['nic_tx'] = _parse_value(rd_list, idx, 'nic_tx:', 1 ) vsw_data['nics'].append(nic_data) # vlan count idx, vlan_count = _parse_value(rd_list, idx, 'vlan count:', 1) # skip vlan data idx += int(vlan_count) * 3 # skip the blank line idx += 1 vsw_dict['vswitches'].append(vsw_data) return vsw_dict def _is_vdev_valid(self, vdev, vdev_info): for used_vdev in vdev_info: if ((int(vdev, 16) >= int(used_vdev, 16)) and (int(vdev, 16) <= int(used_vdev, 16) + 2)): return False return True ``` #### File: sdkwsgi/handlers/host.py ```python import json from zvmsdk import api from zvmsdk import log from zvmsdk.sdkwsgi.handlers import tokens from zvmsdk.sdkwsgi import util from zvmsdk.sdkwsgi import wsgi_wrapper from zvmsdk import utils _HOSTACTION = None LOG = log.LOG class HostAction(object): def __init__(self): self.api = api.SDKAPI(skip_input_check=True) def get_info(self): info = self.api.host_get_info() return info def get_disk_info(self, diskname): info = self.api.host_diskpool_get_info(disk_pool=diskname) return info def get_action(): global _HOSTACTION if _HOSTACTION is None: _HOSTACTION = HostAction() return _HOSTACTION @wsgi_wrapper.SdkWsgify @tokens.validate def host_get_info(req): def _host_get_info(): action = get_action() return action.get_info() info = _host_get_info() info_json = json.dumps({'host': info}) req.response.body = utils.to_utf8(info_json) req.response.content_type = 'application/json' return req.response @wsgi_wrapper.SdkWsgify @tokens.validate def host_get_disk_info(req): def _host_get_disk_info(diskname): action = get_action() return action.get_disk_info(diskname) diskname = util.wsgi_path_item(req.environ, 'disk') info = _host_get_disk_info(diskname) info_json = json.dumps({'disk_info': info}) req.response.body = utils.to_utf8(info_json) req.response.content_type = 'application/json' return req.response ``` #### File: sdkwsgi/validation/parameter_types.py ```python import re import unicodedata import six def single_param(schema): ret = multi_params(schema) ret['maxItems'] = 1 return ret def multi_params(schema): return {'type': 'array', 'items': schema} class ValidationRegex(object): def __init__(self, regex, reason): self.regex = regex self.reason = reason def _is_printable(char): """determine if a unicode code point is printable. This checks if the character is either "other" (mostly control codes), or a non-horizontal space. All characters that don't match those criteria are considered printable; that is: letters; combining marks; numbers; punctuation; symbols; (horizontal) space separators. """ category = unicodedata.category(char) return (not category.startswith("C") and (not category.startswith("Z") or category == "Zs")) def _get_all_chars(): for i in range(0xFFFF): yield six.unichr(i) def _build_regex_range(ws=True, invert=False, exclude=None): """Build a range regex for a set of characters in utf8. This builds a valid range regex for characters in utf8 by iterating the entire space and building up a set of x-y ranges for all the characters we find which are valid. :param ws: should we include whitespace in this range. :param exclude: any characters we want to exclude :param invert: invert the logic The inversion is useful when we want to generate a set of ranges which is everything that's not a certain class. For instance, produce all all the non printable characters as a set of ranges. """ if exclude is None: exclude = [] regex = "" # are we currently in a range in_range = False # last character we found, for closing ranges last = None # last character we added to the regex, this lets us know that we # already have B in the range, which means we don't need to close # it out with B-B. While the later seems to work, it's kind of bad form. last_added = None def valid_char(char): if char in exclude: result = False elif ws: result = _is_printable(char) else: # Zs is the unicode class for space characters, of which # there are about 10 in this range. result = (_is_printable(char) and unicodedata.category(char) != "Zs") if invert is True: return not result return result # iterate through the entire character range. in_ for c in _get_all_chars(): if valid_char(c): if not in_range: regex += re.escape(c) last_added = c in_range = True else: if in_range and last != last_added: regex += "-" + re.escape(last) in_range = False last = c else: if in_range: regex += "-" + re.escape(c) return regex valid_name_regex_base = '^(?![%s])[%s]*(?<![%s])$' valid_name_regex = ValidationRegex( valid_name_regex_base % ( _build_regex_range(ws=False, invert=True), _build_regex_range(), _build_regex_range(ws=False, invert=True)), "printable characters. Can not start or end with whitespace.") name = { 'type': 'string', 'minLength': 1, 'maxLength': 255, 'format': 'name' } positive_integer = { 'type': ['integer', 'string'], 'pattern': '^[0-9]*$', 'minimum': 1 } ipv4 = { 'type': 'string', 'format': 'ipv4' } nic_info = { 'type': 'array', 'items': { 'type': 'object', 'properties': { 'nic_id': {'type': 'string'}, 'mac_addr': {'type': 'string'} }, 'additionalProperties': False } } boolean = { 'type': ['boolean', 'string'], 'enum': [True, 'True', 'TRUE', 'true', '1', 'ON', 'On', 'on', 'YES', 'Yes', 'yes', False, 'False', 'FALSE', 'false', '0', 'OFF', 'Off', 'off', 'NO', 'No', 'no'] } rdev_list = { 'type': ['string'], 'pattern': '^([0-9a-fA-F]{,4})(\s+[0-9a-fA-F]{,4}){,2}$' } rdev = { 'type': ['string'], 'minLength': 1, 'maxLength': 4, 'pattern': '^[0-9a-fA-F]{,4}$' } vdev = { 'type': ['string'], 'minLength': 1, 'maxLength': 4, 'pattern': '^[0-9a-fA-F]{,4}$' } url = { 'type': ['string'], # FIXME: uri cannot validate url, need accurate definition 'format': 'uri' } mac_address = { 'type': 'string', 'pattern': '^([0-9a-fA-F]{2})(:[0-9a-fA-F]{2}){5}$' } remotehost = { 'type': ['string'], 'format': 'hostname' } userid = { 'type': ['string'], 'minLength': 1, 'maxLength': 8 } controller = { 'type': ['string'], 'anyOf': [ {'pattern': '\*'}, {'minLength': 1, 'maxLength': 8} ] } nic_id = { 'type': ['string'] } userid_list = { 'type': ['string'], # TODO:validate userid_list in inspect APIs 'pattern': '^(\w{,8})(,\w{,8}){0,}$' } disk_list = { 'type': 'array', 'items': { 'type': 'object', 'properties': { 'size': {'type': 'string'}, # TODO: set format to enum 'format': {'type': 'string'}, 'is_boot_disk': boolean, 'disk_pool': {'type': 'string', 'pattern': '^\w+:\w+$'} }, 'required': ['size'], 'additionalProperties': False } } image_meta = { 'type': 'object', 'properties': { 'os_version': {'type': 'string'}, # md5 shoule be 32 hexadeciaml numbers 'md5sum': {'type': 'string', 'pattern': '^[0-9a-fA-F]{32}$'} }, 'required': ['os_version', 'md5sum'], 'additionalProperties': False } ``` #### File: python-zvm-sdk/zvmsdk/smutclient.py ```python import os import re import tempfile from smutLayer import smut from zvmsdk import client from zvmsdk import config from zvmsdk import constants as const from zvmsdk import database from zvmsdk import exception from zvmsdk import log from zvmsdk import utils as zvmutils CONF = config.CONF LOG = log.LOG class SMUTClient(client.ZVMClient): def __init__(self): super(SMUTClient, self).__init__() self._smut = smut.SMUT() self._NetDbOperator = database.NetworkDbOperator() self._GuestDbOperator = database.GuestDbOperator() def _request(self, requestData): try: results = self._smut.request(requestData) except Exception as err: LOG.error('SMUT internal parse encounter error') raise exception.ZVMClientInternalError(msg=err) if results['overallRC'] != 0: raise exception.ZVMClientRequestFailed(results=results) return results def get_power_state(self, userid): """Get power status of a z/VM instance.""" LOG.debug('Query power stat of %s' % userid) requestData = "PowerVM " + userid + " status" results = self._request(requestData) with zvmutils.expect_invalid_resp_data(results): status = results['response'][0].partition(': ')[2] return status def guest_start(self, userid): """"Power on VM.""" requestData = "PowerVM " + userid + " on" self._request(requestData) def guest_stop(self, userid): """"Power off VM.""" requestData = "PowerVM " + userid + " off" self._request(requestData) def create_vm(self, userid, cpu, memory, disk_list, profile): """ Create VM and add disks if specified. """ rd = ('makevm %(uid)s directory LBYONLY %(mem)im %(pri)s ' '--cpus %(cpu)i --profile %(prof)s' % {'uid': userid, 'mem': memory, 'pri': const.ZVM_USER_DEFAULT_PRIVILEGE, 'cpu': cpu, 'prof': profile}) if CONF.zvm.default_admin_userid: rd += (' --logonby "%s"' % CONF.zvm.default_admin_userid) if disk_list and 'is_boot_disk' in disk_list[0]: ipl_disk = CONF.zvm.user_root_vdev rd += (' --ipl %s' % ipl_disk) self._request(rd) if disk_list: # Add disks for vm self.add_mdisks(userid, disk_list) def _add_mdisk(self, userid, disk, vdev): """Create one disk for userid NOTE: No read, write and multi password specified, and access mode default as 'MR'. """ size = disk['size'] fmt = disk.get('format') disk_pool = disk.get('disk_pool') or CONF.zvm.disk_pool [diskpool_type, diskpool_name] = disk_pool.split(':') if (diskpool_type.upper() == 'ECKD'): action = 'add3390' else: action = 'add9336' rd = ' '.join(['changevm', userid, action, diskpool_name, vdev, size, '--mode MR']) if fmt: rd += (' --filesystem %s' % fmt) self._request(rd) def guest_authorize_iucv_client(self, userid, client=None): """Punch a script to authorized the client on guest vm""" client = client or zvmutils.get_smut_userid() iucv_path = "/tmp/" + userid if not os.path.exists(iucv_path): os.makedirs(iucv_path) iucv_auth_file = iucv_path + "/iucvauth.sh" zvmutils.generate_iucv_authfile(iucv_auth_file, client) try: requestData = "ChangeVM " + userid + " punchfile " + \ iucv_auth_file + " --class x" self._request(requestData) except Exception as err: raise exception.ZVMSMUTAuthorizeIUCVClientFailed( client=client, vm=userid, msg=err) finally: self._pathutils.clean_temp_folder(iucv_path) def guest_deploy(self, userid, image_name, transportfiles=None, remotehost=None, vdev=None): """ Deploy image and punch config driver to target """ # Get image location (TODO: update this image location) image_file = "/var/lib/zvmsdk/images/" + image_name # Unpack image file to root disk vdev = vdev or CONF.zvm.user_root_vdev cmd = ['/opt/zthin/bin/unpackdiskimage', userid, vdev, image_file] (rc, output) = zvmutils.execute(cmd) if rc != 0: err_msg = ("unpackdiskimage failed with return code: %d." % rc) output_lines = output.split('\n') for line in output_lines: if line.__contains__("ERROR:"): err_msg += ("\\n" + line.strip()) LOG.error(err_msg) raise exception.ZVMGuestDeployFailed(userid=userid, msg=err_msg) # Purge guest reader to clean dirty data rd = ("changevm %s purgerdr" % userid) with zvmutils.expect_request_failed_and_reraise( exception.ZVMGuestDeployFailed, userid=userid): self._request(rd) # Punch transport files if specified if transportfiles: # Copy transport file to local try: tmp_trans_dir = tempfile.mkdtemp() local_trans = tmp_trans_dir + '/cfgdrv' if remotehost: cmd = ["/usr/bin/scp", "-B", ("%s:%s" % (remotehost, transportfiles)), local_trans] else: cmd = ["/usr/bin/cp", transportfiles, local_trans] (rc, output) = zvmutils.execute(cmd) if rc != 0: err_msg = ("copy config drive to local failed with" "return code: %d." % rc) LOG.error(err_msg) raise exception.ZVMGuestDeployFailed(userid=userid, msg=err_msg) # Punch config drive to guest userid rd = ("changevm %(uid)s punchfile %(file)s --class X" % {'uid': userid, 'file': local_trans}) with zvmutils.expect_request_failed_and_reraise( exception.ZVMGuestDeployFailed, userid=userid): self._request(rd) finally: # remove the local temp config drive folder self._pathutils.clean_temp_folder(tmp_trans_dir) def grant_user_to_vswitch(self, vswitch_name, userid): """Set vswitch to grant user.""" smut_userid = zvmutils.get_smut_userid() requestData = ' '.join(( 'SMAPI %s API Virtual_Network_Vswitch_Set_Extended' % smut_userid, "--operands", "-k switch_name=%s" % vswitch_name, "-k grant_userid=%s" % userid, "-k persist=YES")) with zvmutils.expect_request_failed_and_reraise( exception.ZVMNetworkError): self._request(requestData) def revoke_user_from_vswitch(self, vswitch_name, userid): """Revoke user for vswitch.""" smut_userid = zvmutils.get_smut_userid() requestData = ' '.join(( 'SMAPI %s API Virtual_Network_Vswitch_Set_Extended' % smut_userid, "--operands", "-k switch_name=%s" % vswitch_name, "-k revoke_userid=%s" % userid, "-k persist=YES")) with zvmutils.expect_request_failed_and_reraise( exception.ZVMNetworkError): self._request(requestData) def image_performance_query(self, uid_list): """Call Image_Performance_Query to get guest current status. :uid_list: A list of zvm userids to be queried """ if not isinstance(uid_list, list): uid_list = [uid_list] smut_userid = zvmutils.get_smut_userid() rd = ' '.join(( "SMAPI %s API Image_Performance_Query" % smut_userid, "--operands", '-T "%s"' % (' '.join(uid_list)), "-c %d" % len(uid_list))) results = self._request(rd) ipq_kws = { 'userid': "Guest name:", 'guest_cpus': "Guest CPUs:", 'used_cpu_time': "Used CPU time:", 'elapsed_cpu_time': "Elapsed time:", 'min_cpu_count': "Minimum CPU count:", 'max_cpu_limit': "Max CPU limit:", 'samples_cpu_in_use': "Samples CPU in use:", 'samples_cpu_delay': ",Samples CPU delay:", 'used_memory': "Used memory:", 'max_memory': "Max memory:", 'min_memory': "Minimum memory:", 'shared_memory': "Shared memory:", } pi_dict = {} pi = {} with zvmutils.expect_invalid_resp_data(): rpi_list = ('\n'.join(results['response'])).split("\n\n") for rpi in rpi_list: try: pi = zvmutils.translate_response_to_dict(rpi, ipq_kws) except exception.ZVMInvalidResponseDataError as err: emsg = err.format_message() # when there is only one userid queried and this userid is # in 'off'state, the smcli will only returns the queried # userid number, no valid performance info returned. if(emsg.__contains__("No value matched with keywords.")): continue else: raise err for k, v in pi.items(): pi[k] = v.strip('" ') if pi.get('userid') is not None: pi_dict[pi['userid']] = pi return pi_dict def get_vm_nic_vswitch_info(self, vm_id): """ Get NIC and switch mapping for the specified virtual machine. """ switch_info = self._NetDbOperator.switch_select_record_for_node(vm_id) with zvmutils.expect_invalid_resp_data(): switch_dict = {} for item in switch_info: switch_dict[item[0]] = item[1] LOG.debug("Switch info the %(vm_id)s is %(switch_dict)s", {"vm_id": vm_id, "switch_dict": switch_dict}) return switch_dict def virtual_network_vswitch_query_iuo_stats(self): smut_userid = zvmutils.get_smut_userid() rd = ' '.join(( "SMAPI %s API Virtual_Network_Vswitch_Query_IUO_Stats" % smut_userid, "--operands", '-T "%s"' % smut_userid, '-k "switch_name=*"' )) results = self._request(rd) return self._parse_vswitch_inspect_data(results['response']) def get_host_info(self): results = self._request("getHost general") host_info = zvmutils.translate_response_to_dict( '\n'.join(results['response']), const.RINV_HOST_KEYWORDS) return host_info def get_diskpool_info(self, pool): results = self._request("getHost diskpoolspace %s" % pool) dp_info = zvmutils.translate_response_to_dict( '\n'.join(results['response']), const.DISKPOOL_KEYWORDS) return dp_info @zvmutils.wrap_invalid_resp_data_error def get_vswitch_list(self): smut_userid = zvmutils.get_smut_userid() rd = ' '.join(( "SMAPI %s API Virtual_Network_Vswitch_Query" % smut_userid, "--operands", "-s \'*\'")) with zvmutils.expect_request_failed_and_reraise( exception.ZVMNetworkError): try: result = self._request(rd) except exception.ZVMClientRequestFailed as err: emsg = err.format_message() if ((err.results['rc'] == 212) and (err.results['rs'] == 40)): LOG.warning("No Virtual switch in the host") return [] else: raise exception.ZVMNetworkError( msg=("Failed to query vswitch list, %s") % emsg) if (not result['response'] or not result['response'][0]): return [] else: data = '\n'.join([s for s in result['response'] if isinstance(s, const._TSTR_OR_TUNI)]) output = re.findall('VSWITCH: Name: (.*)', data) return output def set_vswitch_port_vlan_id(self, vswitch_name, userid, vlan_id): smut_userid = zvmutils.get_smut_userid() rd = ' '.join(( "SMAPI %s API Virtual_Network_Vswitch_Set_Extended" % smut_userid, "--operands", "-k grant_userid=%s" % userid, "-k switch_name=%s" % vswitch_name, "-k user_vlan_id=%s" % vlan_id, "-k persist=YES")) with zvmutils.expect_request_failed_and_reraise( exception.ZVMNetworkError): self._request(rd) @zvmutils.wrap_invalid_resp_data_error def add_vswitch(self, name, rdev=None, controller='*', connection='CONNECT', network_type='IP', router="NONROUTER", vid='UNAWARE', port_type='ACCESS', gvrp='GVRP', queue_mem=8, native_vid=1, persist=True): smut_userid = zvmutils.get_smut_userid() rd = ' '.join(( "SMAPI %s API Virtual_Network_Vswitch_Create_Extended" % smut_userid, "--operands", '-k switch_name=%s' % name)) if rdev is not None: rd += " -k real_device_address" +\ "=\'%s\'" % rdev.replace(',', ' ') if controller != '*': rd += " -k controller_name=%s" % controller rd = ' '.join((rd, "-k connection_value=%s" % connection, "-k queue_memory_limit=%s" % queue_mem, "-k transport_type=%s" % network_type, "-k vlan_id=%s" % vid, "-k persist=%s" % (persist and 'YES' or 'NO'))) # Only if vswitch is vlan awared, port_type, gvrp and native_vid are # allowed to specified if isinstance(vid, int) or vid.upper() != 'UNAWARE': if ((native_vid is not None) and ((native_vid < 1) or (native_vid > 4094))): raise exception.ZVMInvalidInput( msg=("Failed to create vswitch %s: %s") % (name, 'valid native VLAN id should be 1-4094 or None')) rd = ' '.join((rd, "-k port_type=%s" % port_type, "-k gvrp_value=%s" % gvrp, "-k native_vlanid=%s" % native_vid)) if router is not None: rd += " -k routing_value=%s" % router with zvmutils.expect_request_failed_and_reraise( exception.ZVMNetworkError): self._request(rd) @zvmutils.wrap_invalid_resp_data_error def set_vswitch(self, switch_name, **kwargs): """Set vswitch""" smut_userid = zvmutils.get_smut_userid() rd = ' '.join(( "SMAPI %s API Virtual_Network_Vswitch_Set_Extended" % smut_userid, "--operands", "-k switch_name=%s" % switch_name)) for k, v in kwargs.items(): rd = ' '.join((rd, "-k %(key)s=\'%(value)s\'" % {'key': k, 'value': v})) with zvmutils.expect_request_failed_and_reraise( exception.ZVMNetworkError): self._request(rd) @zvmutils.wrap_invalid_resp_data_error def delete_vswitch(self, switch_name, persist=True): smut_userid = zvmutils.get_smut_userid() rd = ' '.join(( "SMAPI %s API Virtual_Network_Vswitch_Delete_Extended" % smut_userid, "--operands", "-k switch_name=%s" % switch_name, "-k persist=%s" % (persist and 'YES' or 'NO'))) with zvmutils.expect_request_failed_and_reraise( exception.ZVMNetworkError): try: self._request(rd) except exception.ZVMClientRequestFailed as err: results = err.results emsg = err.format_message() if ((results['rc'] == 212) and (results['rs'] == 40)): LOG.warning("Vswitch %s does not exist", switch_name) return else: raise exception.ZVMNetworkError( msg=("Failed to delete vswitch %s: %s") % (switch_name, emsg)) def create_nic(self, userid, vdev=None, nic_id=None, mac_addr=None, ip_addr=None, active=False): ports_info = self._NetDbOperator.switch_select_table() vdev_info = [] for p in ports_info: if p[0] == userid: vdev_info.append(p[1]) if len(vdev_info) == 0: # no nic defined for the guest if vdev is None: nic_vdev = CONF.zvm.default_nic_vdev else: nic_vdev = vdev else: if vdev is None: used_vdev = max(vdev_info) nic_vdev = str(hex(int(used_vdev, 16) + 3))[2:] else: if self._is_vdev_valid(vdev, vdev_info): nic_vdev = vdev else: raise exception.ZVMInvalidInput( msg=("The specified virtual device number %s " "has already been used" % vdev)) if len(nic_vdev) > 4: raise exception.ZVMNetworkError( msg=("Virtual device number %s is not valid" % nic_vdev)) LOG.debug('Nic attributes: vdev is %(vdev)s, ' 'ID is %(id)s, address is %(address)s', {'vdev': nic_vdev, 'id': nic_id or 'not specified', 'address': mac_addr or 'not specified'}) self._create_nic(userid, nic_vdev, nic_id=nic_id, mac_addr=mac_addr, active=active) return nic_vdev def _create_nic(self, userid, vdev, nic_id=None, mac_addr=None, active=False): requestData = ' '.join(( 'SMAPI %s API Virtual_Network_Adapter_Create_Extended_DM' % userid, "--operands", "-k image_device_number=%s" % vdev, "-k adapter_type=QDIO")) if mac_addr is not None: mac = ''.join(mac_addr.split(':'))[6:] requestData += ' -k mac_id=%s' % mac with zvmutils.expect_request_failed_and_reraise( exception.ZVMNetworkError): self._request(requestData) if active: if mac_addr is not None: LOG.warning("Ignore the mac address %s when " "adding nic on an active system" % mac_addr) requestData = ' '.join(( 'SMAPI %s API Virtual_Network_Adapter_Create_Extended' % userid, "--operands", "-k image_device_number=%s" % vdev, "-k adapter_type=QDIO")) try: self._request(requestData) except (exception.ZVMClientRequestFailed, exception.ZVMClientInternalError) as err1: msg1 = err1.format_message() persist_OK = True requestData = ' '.join(( 'SMAPI %s API Virtual_Network_Adapter_Delete_DM' % userid, "--operands", '-v %s' % vdev)) try: self._request(requestData) except exception.ZVMClientRequestFailed as err2: results = err2.results msg2 = err2.format_message() if ((results['rc'] == 404) and (results['rs'] == 8)): persist_OK = True else: persist_OK = False if persist_OK: msg = ("Failed to create nic %s for %s on the active " "guest system, %s") % (vdev, userid, msg1) else: msg = ("Failed to create nic %s for %s on the active " "guest system, %s, and failed to revoke user " "direct's changes, %s") % (vdev, userid, msg1, msg2) raise exception.ZVMNetworkError(msg) self._NetDbOperator.switch_add_record_for_nic(userid, vdev, port=nic_id) def get_user_direct(self, userid): results = self._request("getvm %s directory" % userid) return results.get('response', []) def delete_nic(self, userid, vdev, active=False): rd = ' '.join(( "SMAPI %s API Virtual_Network_Adapter_Delete_DM" % userid, '-v %s' % vdev)) with zvmutils.expect_request_failed_and_reraise( exception.ZVMNetworkError): try: self._request(rd) except exception.ZVMClientRequestFailed as err: results = err.results emsg = err.format_message() if ((results['rc'] == 404) and (results['rs'] == 8)): LOG.warning("Virtual device %s does not exist in " "the guest's user direct", vdev) else: raise exception.ZVMNetworkError( msg=("Failed to delete nic %s for %s in " "the guest's user direct, %s") % (vdev, userid, emsg)) self._NetDbOperator.switch_delete_record_for_nic(userid, vdev) if active: rd = ' '.join(( "SMAPI %s API Virtual_Network_Adapter_Delete" % userid, '-v %s' % vdev)) with zvmutils.expect_request_failed_and_reraise( exception.ZVMNetworkError): try: self._request(rd) except exception.ZVMClientRequestFailed as err: results = err.results emsg = err.format_message() if ((results['rc'] == 204) and (results['rs'] == 8)): LOG.warning("Virtual device %s does not exist on " "the active guest system", vdev) else: raise exception.ZVMNetworkError( msg=("Failed to delete nic %s for %s on " "the active guest system, %s") % (vdev, userid, emsg)) def _couple_nic(self, userid, vdev, vswitch_name, active=False): """Couple NIC to vswitch by adding vswitch into user direct.""" requestData = ' '.join(( 'SMAPI %s' % userid, "API Virtual_Network_Adapter_Connect_Vswitch_DM", "--operands", "-v %s" % vdev, "-n %s" % vswitch_name)) with zvmutils.expect_request_failed_and_reraise( exception.ZVMNetworkError): self._request(requestData) # the inst must be active, or this call will failed if active: requestData = ' '.join(( 'SMAPI %s' % userid, 'API Virtual_Network_Adapter_Connect_Vswitch', "--operands", "-v %s" % vdev, "-n %s" % vswitch_name)) try: self._request(requestData) except (exception.ZVMClientRequestFailed, exception.ZVMClientInternalError) as err1: results1 = err1.results msg1 = err1.format_message() if ((results1 is not None) and (results1['rc'] == 204) and (results1['rs'] == 20)): LOG.warning("Virtual device %s already connected " "on the active guest system", vdev) else: persist_OK = True requestData = ' '.join(( 'SMAPI %s' % userid, 'API Virtual_Network_Adapter_Disconnect_DM', "--operands", '-v %s' % vdev)) try: self._request(requestData) except (exception.ZVMClientRequestFailed, exception.ZVMClientInternalError) as err2: results2 = err2.results msg2 = err2.format_message() if ((results2 is not None) and (results2['rc'] == 212) and (results2['rs'] == 32)): persist_OK = True else: persist_OK = False if persist_OK: msg = ("Failed to couple nic %s to vswitch %s " "on the active guest system, %s") % (vdev, vswitch_name, msg1) else: msg = ("Failed to couple nic %s to vswitch %s " "on the active guest system, %s, and " "failed to revoke user direct's changes, " "%s") % (vdev, vswitch_name, msg1, msg2) raise exception.ZVMNetworkError(msg) """Update information in switch table.""" self._NetDbOperator.switch_updat_record_with_switch(userid, vdev, vswitch_name) def couple_nic_to_vswitch(self, userid, nic_vdev, vswitch_name, active=False): """Couple nic to vswitch.""" if active: msg = ("both in the user direct of guest %s and on " "the active guest system" % userid) else: msg = "in the user direct of guest %s" % userid LOG.debug("Connect nic %s to switch %s %s", nic_vdev, vswitch_name, msg) self._couple_nic(userid, nic_vdev, vswitch_name, active=active) def _uncouple_nic(self, userid, vdev, active=False): """Uncouple NIC from vswitch""" requestData = ' '.join(( 'SMAPI %s' % userid, "API Virtual_Network_Adapter_Disconnect_DM", "--operands", "-v %s" % vdev)) try: self._request(requestData) except (exception.ZVMClientRequestFailed, exception.ZVMClientInternalError) as err: results = err.results emsg = err.format_message() if ((results is not None) and (results['rc'] == 212) and (results['rs'] == 32)): LOG.warning("Virtual device %s is already disconnected " "in the guest's user direct", vdev) else: raise exception.ZVMNetworkError( msg=("Failed to uncouple nic %s " "in the guest's user direct, %s") % (vdev, emsg)) """Update information in switch table.""" self._NetDbOperator.switch_updat_record_with_switch(userid, vdev, None) # the inst must be active, or this call will failed if active: requestData = ' '.join(( 'SMAPI %s' % userid, 'API Virtual_Network_Adapter_Disconnect', "--operands", "-v %s" % vdev)) try: self._request(requestData) except (exception.ZVMClientRequestFailed, exception.ZVMClientInternalError) as err: results = err.results emsg = err.format_message() if ((results is not None) and (results['rc'] == 204) and (results['rs'] == 48)): LOG.warning("Virtual device %s is already " "disconnected on the active " "guest system", vdev) else: raise exception.ZVMNetworkError( msg=("Failed to uncouple nic %s " "on the active guest system, %s") % (vdev, emsg)) def uncouple_nic_from_vswitch(self, userid, nic_vdev, active=False): if active: msg = ("both in the user direct of guest %s and on " "the active guest system" % userid) else: msg = "in the user direct of guest %s" % userid LOG.debug("Disconnect nic %s with network %s", nic_vdev, msg) self._uncouple_nic(userid, nic_vdev, active=active) ``` #### File: tests/functional/test_imageops.py ```python import os import uuid from zvmsdk import config from zvmsdk.tests.functional import base from zvmsdk import utils CONF = config.CONF class SDKAPIImageTestCase(base.SDKAPIBaseTestCase): def test_image_operations(self): """ Import a image, query the existence and then delete it""" image_fname = str(uuid.uuid1()) image_fpath = ''.join([CONF.image.temp_path, image_fname]) os.system('touch %s' % image_fpath) url = "file://" + image_fpath image_meta = {'os_version': 'rhel7.2'} self.sdkapi.image_import(image_fname, url, image_meta, utils.get_host()) query_result = self.sdkapi.image_query(image_fname) expect_result = ['rhel7.2-s390x-netboot-%s' % image_fname.replace('-', '_')] self.assertEqual(query_result, expect_result) self.sdkapi.image_delete(query_result[0]) query_result_after_delete = self.sdkapi.image_query(image_fname) expect_result_after_delete = [] self.assertEqual(query_result_after_delete, expect_result_after_delete) os.system('rm -f %s' % image_fpath) ``` #### File: sdkwsgi/handlers/test_volume.py ```python import datetime import jwt import mock import unittest from zvmsdk.sdkwsgi.handlers import volume FAKE_UUID = '00000000-0000-0000-0000-000000000000' class FakeResp(object): def __init__(self): self.body = {} class FakeReq(object): def __init__(self): self.headers = {} self.environ = {} self.__name__ = '' self.response = FakeResp() def __getitem__(self, name): return self.headers class HandlersVolumeTest(unittest.TestCase): def setUp(self): expired_elapse = datetime.timedelta(seconds=100) expired_time = datetime.datetime.utcnow() + expired_elapse payload = jwt.encode({'exp': expired_time}, 'username') self.req = FakeReq() self.req.headers['X-Auth-Token'] = payload @mock.patch.object(volume.VolumeAction, 'attach') def test_volume_attach(self, mock_attach): self.req.body = '{}' volume.volume_attach(self.req) self.assertTrue(mock_attach.called) @mock.patch.object(volume.VolumeAction, 'detach') def test_volume_detach(self, mock_detach): self.req.body = '{}' volume.volume_detach(self.req) self.assertTrue(mock_detach.called) ``` #### File: tests/unit/test_api.py ```python import mock from zvmsdk import api from zvmsdk import exception from zvmsdk.tests.unit import base class SDKAPITestCase(base.SDKTestCase): """Testcases for compute APIs.""" def setUp(self): super(SDKAPITestCase, self).setUp() self.api = api.SDKAPI() self._vmops = mock.MagicMock() def test_init_ComputeAPI(self): self.assertTrue(isinstance(self.api, api.SDKAPI)) @mock.patch("zvmsdk.vmops.VMOps.get_info") def test_guest_get_info(self, ginfo): self.api.guest_get_info('fakevm') ginfo.assert_called_once_with('fakevm') @mock.patch("zvmsdk.vmops.VMOps.guest_deploy") def test_guest_deploy(self, guest_deploy): user_id = 'fakevm' image_name = 'fakeimg' transportfiles = '/tmp/transport.tgz' vdev = '0100' self.api.guest_deploy(user_id, image_name, transportfiles=transportfiles, vdev=vdev) guest_deploy.assert_called_with(user_id, image_name, transportfiles, None, vdev) @mock.patch("zvmsdk.imageops.ImageOps.image_import") def test_image_import(self, image_import): image_name = '95a4da37-9f9b-4fb2-841f-f0bb441b7544' url = "file:////install/temp/test.img" image_meta = {'os_version': "rhel6.7"} self.api.image_import(image_name, url, image_meta) image_import.assert_called_once_with(image_name, url, image_meta, remote_host=None) @mock.patch("zvmsdk.vmops.VMOps.create_vm") def test_guest_create(self, create_vm): userid = 'userid' vcpus = 1 memory = 1024 disk_list = [] user_profile = 'profile' self.api.guest_create(userid, vcpus, memory, disk_list, user_profile) create_vm.assert_called_once_with(userid, vcpus, memory, disk_list, user_profile) @mock.patch("zvmsdk.imageops.ImageOps.image_query") def test_image_query(self, image_query): imagekeyword = 'eae09a9f_7958_4024_a58c_83d3b2fc0aab' self.api.image_query(imagekeyword) image_query.assert_called_once_with(imagekeyword) @mock.patch("zvmsdk.vmops.VMOps.delete_vm") def test_guest_delete(self, delete_vm): userid = 'userid' self.api.guest_delete(userid) delete_vm.assert_called_once_with(userid) @mock.patch("zvmsdk.monitor.ZVMMonitor.inspect_cpus") def test_guest_inspect_cpus_list(self, inspect_cpus): userid_list = ["userid1", "userid2"] self.api.guest_inspect_cpus(userid_list) inspect_cpus.assert_called_once_with(userid_list) @mock.patch("zvmsdk.monitor.ZVMMonitor.inspect_cpus") def test_guest_inspect_cpus_single(self, inspect_cpus): userid_list = "userid1" self.api.guest_inspect_cpus(userid_list) inspect_cpus.assert_called_once_with(["userid1"]) @mock.patch("zvmsdk.monitor.ZVMMonitor.inspect_mem") def test_guest_inspect_mem_list(self, inspect_mem): userid_list = ["userid1", "userid2"] self.api.guest_inspect_mem(userid_list) inspect_mem.assert_called_once_with(userid_list) @mock.patch("zvmsdk.monitor.ZVMMonitor.inspect_mem") def test_guest_inspect_mem_single(self, inspect_mem): userid_list = "userid1" self.api.guest_inspect_mem(userid_list) inspect_mem.assert_called_once_with(["userid1"]) @mock.patch("zvmsdk.monitor.ZVMMonitor.inspect_vnics") def test_guest_inspect_vnics_list(self, inspect_vnics): userid_list = ["userid1", "userid2"] self.api.guest_inspect_vnics(userid_list) inspect_vnics.assert_called_once_with(userid_list) @mock.patch("zvmsdk.monitor.ZVMMonitor.inspect_vnics") def test_guest_inspect_vnics_single(self, inspect_vnics): userid_list = "userid1" self.api.guest_inspect_vnics(userid_list) inspect_vnics.assert_called_once_with(["userid1"]) @mock.patch("zvmsdk.vmops.VMOps.guest_stop") def test_guest_stop(self, gs): userid = 'fakeuser' self.api.guest_stop(userid) gs.assert_called_once_with(userid, 0, 10) @mock.patch("zvmsdk.vmops.VMOps.guest_config_minidisks") def test_guest_process_additional_disks(self, config_disks): userid = 'userid' disk_list = [{'vdev': '0101', 'format': 'ext3', 'mntdir': '/mnt/0101'}] self.api.guest_config_minidisks(userid, disk_list) config_disks.assert_called_once_with(userid, disk_list) @mock.patch("zvmsdk.vmops.VMOps.guest_start") def test_skip_api_input_check(self, gs): zapi = api.SDKAPI(skip_input_check=True) zapi.guest_start(1) gs.assert_called_once_with(1) @mock.patch("zvmsdk.vmops.VMOps.guest_stop") def test_api_input_check_with_default_value(self, gs): self.api.guest_stop('fakeuser', 60) gs.assert_called_once_with('fakeuser', 60, 10) def test_api_input_check_failed(self): self.assertRaises(exception.ZVMInvalidInput, self.api.guest_start, 1) @mock.patch("zvmsdk.vmops.VMOps.get_definition_info") def test_api_input_check_with_keyword(self, gdi): self.api.guest_get_definition_info('uid', nic_coupled='1000') gdi.assert_called_once_with('uid', nic_coupled='1000') @mock.patch("zvmsdk.vmops.VMOps.get_definition_info") def test_api_input_check_with_invalid_keyword(self, gdi): self.assertRaises(exception.ZVMInvalidInput, self.api.guest_get_definition_info, 'uid', invalid='1000') @mock.patch("zvmsdk.vmops.VMOps.guest_start") def test_check_input_userid_length(self, gs): self.assertRaises(exception.ZVMInvalidInput, self.api.guest_start, '123456789') @mock.patch("zvmsdk.vmops.VMOps.guest_start") def test_check_input_too_many_parameters(self, gs): self.assertRaises(exception.ZVMInvalidInput, self.api.guest_start, 'fakeuser', '12345678') @mock.patch("zvmsdk.imageops.ImageOps.image_delete") def test_image_delete(self, image_delete): image_name = 'eae09a9f_7958_4024_a58c_83d3b2fc0aab' self.api.image_delete(image_name) image_delete.assert_called_once_with(image_name) def test_set_vswitch(self): self.assertRaises(exception.ZVMInvalidInput, self.api.vswitch_set, "vswitch_name", unknown='fake_id') @mock.patch("zvmsdk.vmops.VMOps.create_disks") def test_guest_add_disks(self, cds): userid = 'testuid' disk_list = [{'size': '1g'}] self.api.guest_create_disks(userid, disk_list) cds.assert_called_once_with(userid, disk_list) @mock.patch("zvmsdk.vmops.VMOps.create_disks") def test_guest_add_disks_nothing_to_do(self, cds): self.api.guest_create_disks('userid', []) cds.assert_not_called() ``` #### File: tests/unit/test_database.py ```python import mock import unittest import uuid from zvmsdk import config from zvmsdk import database from zvmsdk.database import VolumeDBUtils from zvmsdk import exception from zvmsdk import log from zvmsdk.tests.unit import base CONF = config.CONF LOG = log.LOG def _to_db_str(sequential_list): """Convert a list or tuple object to a string of database format.""" entry_list = [] for _entry in sequential_list: # I know only text type need to be converted by now. More types could # be added in the future when we know. if isinstance(_entry, str): entry_list.append("u'%s'" % _entry) else: entry_list.append(str(_entry)) return "(%s)" % ", ".join(entry_list) class VolumeDBUtilsTestCase(unittest.TestCase): @classmethod def setUpClass(cls): cls.db_path = CONF.database.path CONF.database.path = '/tmp/test_sdk.db' cls._util = VolumeDBUtils() @classmethod def tearDownClass(cls): with database.get_db_conn() as conn: conn.execute("DROP TABLE volumes") conn.execute("DROP TABLE volume_attachments") CONF.database.path = cls.db_path @mock.patch.object(VolumeDBUtils, '_initialize_table_volume_attachments') @mock.patch.object(VolumeDBUtils, '_initialize_table_volumes') def test__init__(self, _initialize_table_volumes, _initialize_table_volume_attachments): self._util.__init__() _initialize_table_volumes.assert_called_once_with() _initialize_table_volume_attachments.assert_called_once_with() def test_get_volume_by_id_errors(self): # error - Empty volume id volume_id_null = None self.assertRaises(exception.DatabaseException, self._util.get_volume_by_id, volume_id_null) # not found volume_id = str(uuid.uuid4()) self.assertIsNone(self._util.get_volume_by_id(volume_id)) def test_get_volume_by_id(self): # setup test volume volume = {'protocol_type': 'fc', 'size': '3G'} volume_id = self._util.insert_volume(volume) # query volume = self._util.get_volume_by_id(volume_id) expected = [volume_id, 'fc', '3G'] actual = [volume[0], volume[1], volume[2]] self.assertEqual(expected, actual) # clean test volume self._util.delete_volume(volume_id) def test_insert_volume_errors(self): # empty volume volume = None self.assertRaises(exception.DatabaseException, self._util.insert_volume, volume) # protocol_type absent volume = {'size': '3G'} self.assertRaises(exception.DatabaseException, self._util.insert_volume, volume) volume = {'protocol_type': 'fc'} self.assertRaises(exception.DatabaseException, self._util.insert_volume, volume) def test_insert_volume(self): # insert a simplest volume volume = {'protocol_type': 'fc', 'size': '3G'} volume_id = self._util.insert_volume(volume) # query volume = self._util.get_volume_by_id(volume_id) expected = [volume_id, 'fc', '3G', 'free', 0] actual = [volume[0], volume[1], volume[2], volume[3], volume[6]] self.assertEqual(expected, actual) # clean test volume self._util.delete_volume(volume_id) # insert a complicated volume image_id = str(uuid.uuid4()) snapshot_id = str(uuid.uuid4()) volume = {'protocol_type': 'fc', 'size': '3G', 'image_id': image_id, 'snapshot_id': snapshot_id, 'comment': 'hello world'} volume_id = self._util.insert_volume(volume) # query volume = self._util.get_volume_by_id(volume_id) expected = _to_db_str((volume_id, 'fc', '3G', 'free', image_id, snapshot_id, 0, None, 'hello world')) self.assertEqual(expected, str(volume)) def test_update_volume_errors(self): # empty volume volume = None self.assertRaises(exception.DatabaseException, self._util.update_volume, volume) volume = {} self.assertRaises(exception.DatabaseException, self._util.update_volume, volume) # volume not found volume_id = str(uuid.uuid4()) volume = {'id': volume_id} self.assertRaises(exception.DatabaseException, self._util.update_volume, volume) def test_update_volume(self): # set up the test volume image_id = str(uuid.uuid4()) snapshot_id = str(uuid.uuid4()) volume = {'protocol_type': 'fc', 'size': '3G', 'image_id': image_id, 'snapshot_id': snapshot_id, 'comment': 'hello world'} volume_id = self._util.insert_volume(volume) # make update image_id = str(uuid.uuid4()) snapshot_id = str(uuid.uuid4()) volume = {'id': volume_id, 'size': '5G', 'status': 'in-use', 'image_id': image_id, 'snapshot_id': snapshot_id, 'comment': 'goodbye world'} self._util.update_volume(volume) # query the volume volume = self._util.get_volume_by_id(volume_id) expected = _to_db_str((volume_id, 'fc', '5G', 'in-use', image_id, snapshot_id, 0, None, 'goodbye world')) self.assertEqual(expected, str(volume)) # clean the volume self._util.delete_volume(volume_id) def test_delete_volume_errors(self): # empty volume volume_id = None self.assertRaises(exception.DatabaseException, self._util.insert_volume, volume_id) # not found volume_id = str(uuid.uuid4()) self.assertIsNone(self._util.get_volume_by_id(volume_id)) def test_delete_volume(self): # insert a simplest volume volume = {'protocol_type': 'fc', 'size': '3G'} volume_id = self._util.insert_volume(volume) # query it self.assertIsNotNone(self._util.get_volume_by_id(volume_id)) # delete it self._util.delete_volume(volume_id) # query again self.assertIsNone(self._util.get_volume_by_id(volume_id)) class GuestDbOperatorTestCase(base.SDKTestCase): @classmethod def setUpClass(cls): super(GuestDbOperatorTestCase, cls).setUpClass() cls.old_db_path = CONF.database.path base.set_conf('database', 'path', '/tmp/test_sdk.db') cls.db_op = database.GuestDbOperator() @classmethod def tearDownClass(cls): super(GuestDbOperatorTestCase, cls).tearDownClass() with database.get_db_conn() as conn: conn.execute("DROP TABLE guests") # Restore the original db path CONF.database.path = cls.old_db_path @mock.patch.object(uuid, 'uuid4') def test_add_guest(self, get_uuid): userid = 'fakeuser' meta = 'fakemeta=1, fakemeta2=True' get_uuid.return_value = u'ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c' self.db_op.add_guest(userid, meta=meta) # Query, the guest should in table guests = self.db_op.get_guest_list() self.assertEqual(1, len(guests)) self.assertListEqual([(u'ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c', u'FAKEUSER', u'fakemeta=1, fakemeta2=True', u'')], guests) self.db_op.delete_guest_by_id('ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c') @mock.patch.object(uuid, 'uuid4') def test_add_guest_twice_error(self, get_uuid): userid = 'fakeuser' meta = 'fakemeta=1, fakemeta2=True' get_uuid.return_value = u'ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c' self.db_op.add_guest(userid, meta=meta) # Add same user the second time self.assertRaises(exception.DatabaseException, self.db_op.add_guest, 'fakeuser') self.db_op.delete_guest_by_id('ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c') @mock.patch.object(uuid, 'uuid4') def test_delete_guest_by_id(self, get_uuid): userid = 'fakeuser' meta = 'fakemeta=1, fakemeta2=True' get_uuid.return_value = u'ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c' self.db_op.add_guest(userid, meta=meta) # Delete self.db_op.delete_guest_by_id('ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c') guests = self.db_op.get_guest_list() self.assertListEqual([], guests) def test_delete_guest_by_id_not_exist(self): self.assertRaises(exception.DatabaseException, self.db_op.delete_guest_by_id, 'ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c') @mock.patch.object(uuid, 'uuid4') def test_delete_guest_by_userid(self, get_uuid): userid = 'fakeuser' meta = 'fakemeta=1, fakemeta2=True' get_uuid.return_value = u'ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c' self.db_op.add_guest(userid, meta=meta) # Delete self.db_op.delete_guest_by_userid('FaKeuser') guests = self.db_op.get_guest_list() self.assertListEqual([], guests) def test_delete_guest_by_userid_not_exist(self): self.assertRaises(exception.DatabaseException, self.db_op.delete_guest_by_userid, 'Fakeuser') @mock.patch.object(uuid, 'uuid4') def test_get_guest_by_userid(self, get_uuid): userid = 'fakeuser' meta = 'fakemeta=1, fakemeta2=True' get_uuid.return_value = u'ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c' self.db_op.add_guest(userid, meta=meta) # get guest guest = self.db_op.get_guest_by_userid('FaKeuser') self.assertEqual((u'ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c', u'FAKEUSER', u'fakemeta=1, fakemeta2=True', u''), guest) self.db_op.delete_guest_by_id('ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c') def test_get_guest_by_userid_not_exist(self): guest = self.db_op.get_guest_by_userid('FaKeuser') self.assertEqual(None, guest) @mock.patch.object(uuid, 'uuid4') def test_get_guest_by_id(self, get_uuid): userid = 'fakeuser' meta = 'fakemeta=1, fakemeta2=True' get_uuid.return_value = u'ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c' self.db_op.add_guest(userid, meta=meta) # get guest guest = self.db_op.get_guest_by_id( 'ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c') self.assertEqual((u'ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c', u'FAKEUSER', u'fakemeta=1, fakemeta2=True', u''), guest) self.db_op.delete_guest_by_id('ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c') def test_get_guest_by_id_not_exist(self): guest = self.db_op.get_guest_by_id( 'aa8f352e-4c9e-4335-aafa-4f4eb2fcc77c') self.assertEqual(None, guest) @mock.patch.object(uuid, 'uuid4') def test_update_guest_by_id(self, get_uuid): userid = 'fakeuser' meta = 'fakemeta=1, fakemeta2=True' get_uuid.return_value = u'ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c' self.db_op.add_guest(userid, meta=meta) # Update self.db_op.update_guest_by_id( 'ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c', meta='newmeta', comments='newcomment') guest = self.db_op.get_guest_by_id( 'ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c') self.assertEqual((u'ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c', u'FAKEUSER', u'newmeta', u'newcomment'), guest) self.db_op.delete_guest_by_id('ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c') @mock.patch.object(uuid, 'uuid4') def test_update_guest_by_id_wrong_input(self, get_uuid): userid = 'fakeuser' meta = 'fakemeta=1, fakemeta2=True' get_uuid.return_value = u'ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c' self.db_op.add_guest(userid, meta=meta) # Update self.assertRaises(exception.DatabaseException, self.db_op.update_guest_by_id, 'ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c') self.db_op.delete_guest_by_id('ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c') def test_update_guest_by_id_not_exist(self): self.assertRaises(exception.DatabaseException, self.db_op.update_guest_by_id, 'ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c', meta='newmeta') @mock.patch.object(uuid, 'uuid4') def test_update_guest_by_id_null_value(self, get_uuid): userid = 'fakeuser' meta = 'fakemeta=1, fakemeta2=True' get_uuid.return_value = u'ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c' self.db_op.add_guest(userid, meta=meta) # Update self.db_op.update_guest_by_id( 'ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c', meta='', comments='') guest = self.db_op.get_guest_by_id( 'ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c') self.assertEqual((u'ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c', u'FAKEUSER', u'', u''), guest) self.db_op.delete_guest_by_id('ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c') @mock.patch.object(uuid, 'uuid4') def test_update_guest_by_userid(self, get_uuid): userid = 'fakeuser' meta = 'fakemeta=1, fakemeta2=True' get_uuid.return_value = u'ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c' self.db_op.add_guest(userid, meta=meta) # Update self.db_op.update_guest_by_userid( 'Fakeuser', meta='newmetauserid', comments='newcommentuserid') guest = self.db_op.get_guest_by_userid('Fakeuser') self.assertEqual((u'ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c', u'FAKEUSER', u'newmetauserid', u'newcommentuserid'), guest) self.db_op.delete_guest_by_id('ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c') @mock.patch.object(uuid, 'uuid4') def test_update_guest_by_userid_wrong_input(self, get_uuid): userid = 'fakeuser' meta = 'fakemeta=1, fakemeta2=True' get_uuid.return_value = u'ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c' self.db_op.add_guest(userid, meta=meta) # Update self.assertRaises(exception.DatabaseException, self.db_op.update_guest_by_userid, 'FakeUser') self.db_op.delete_guest_by_id('ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c') def test_update_guest_by_userid_not_exist(self): self.assertRaises(exception.DatabaseException, self.db_op.update_guest_by_userid, 'FaKeUser', meta='newmeta') @mock.patch.object(uuid, 'uuid4') def test_update_guest_by_userid_null_value(self, get_uuid): userid = 'fakeuser' meta = 'fakemeta=1, fakemeta2=True' get_uuid.return_value = u'ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c' self.db_op.add_guest(userid, meta=meta) # Update self.db_op.update_guest_by_userid( 'FaKeUser', meta='', comments='') guest = self.db_op.get_guest_by_userid('fakeuser') self.assertEqual((u'ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c', u'FAKEUSER', u'', u''), guest) self.db_op.delete_guest_by_id('ad8f352e-4c9e-4335-aafa-4f4eb2fcc77c') ``` #### File: tests/unit/test_hostops.py ```python import mock from zvmsdk import config from zvmsdk import hostops from zvmsdk.tests.unit import base CONF = config.CONF class SDKHostOpsTestCase(base.SDKTestCase): def setUp(self): self._hostops = hostops.get_hostops() @mock.patch.object(hostops.HOSTOps, 'diskpool_get_info') @mock.patch.object(hostops.get_hostops()._zvmclient, 'get_host_info') def test_get_host_info(self, get_host_info, diskpool_get_info): get_host_info.return_value = { "zvm_host": "FAKENODE", "zhcp": "fakehcp.fake.com", "cec_vendor": "FAKE", "cec_model": "2097", "hypervisor_os": "z/VM 6.1.0", "hypervisor_name": "fakenode", "architecture": "s390x", "lpar_cpu_total": "10", "lpar_cpu_used": "10", "lpar_memory_total": "16G", "lpar_memory_used": "16.0G", "lpar_memory_offline": "0", "ipl_time": "IPL at 03/13/14 21:43:12 EDT", } diskpool_get_info.return_value = { "disk_total": 406105, "disk_used": 367263, "disk_available": 38843, } host_info = self._hostops.get_info() get_host_info.assert_called_once_with() diskpool = CONF.zvm.disk_pool.split(':')[1] diskpool_get_info.assert_called_once_with(diskpool) self.assertEqual(host_info['vcpus'], 10) self.assertEqual(host_info['hypervisor_version'], 610) self.assertEqual(host_info['disk_total'], 406105) @mock.patch.object(hostops.get_hostops()._zvmclient, 'get_diskpool_info') def test_get_diskpool_info(self, get_diskpool_info): get_diskpool_info.return_value = { "disk_total": "406105.3 G", "disk_used": "367262.6 G", "disk_available": "38842.7 G", } dp_info = self._hostops.diskpool_get_info("fakepool") get_diskpool_info.assert_called_once_with("fakepool") self.assertEqual(dp_info['disk_total'], 406105) self.assertEqual(dp_info['disk_used'], 367263) self.assertEqual(dp_info['disk_available'], 38843) ```
{ "source": "jicheu/matrix-archive", "score": 3 }
#### File: jicheu/matrix-archive/matrix-archive.py ```python from nio import ( AsyncClient, AsyncClientConfig, MatrixRoom, MessageDirection, RedactedEvent, RoomEncryptedMedia, RoomMessage, RoomMessageFormatted, RoomMessageMedia, crypto, store, exceptions ) from bullet import Check from functools import partial from typing import Union, TextIO from urllib.parse import urlparse from datetime import datetime import aiofiles import argparse import asyncio import getpass import itertools import os import re import sys import csv DEVICE_NAME = "matrix-archive" def parse_args(): """Parse arguments from command line call""" parser = argparse.ArgumentParser( description=__doc__, add_help=False, # Use individual setting below instead formatter_class=argparse.RawDescriptionHelpFormatter, ) parser.add_argument( "folder", metavar="FOLDER", default=".", nargs="?", # Make positional argument optional help="""Set output folder """, ) parser.add_argument( "--help", action="help", help="""Show this help message and exit """, ) parser.add_argument( "--listrooms", action="store_true", help="""Use unattended batch mode to fetch list """, ) parser.add_argument( "--batch", action="store_true", help="""Use unattended batch mode """, ) parser.add_argument( "--server", metavar="HOST", default="https://matrix-client.matrix.org", help="""Set default Matrix homeserver """, ) parser.add_argument( "--user", metavar="USER_ID", default="@user:matrix.org", help="""Set default user ID """, ) parser.add_argument( "--userpass", metavar="PASSWORD", help="""Set default user password """, ) parser.add_argument( "--keys", metavar="FILENAME", default="element-keys.txt", help="""Set default path to room E2E keys """, ) parser.add_argument( "--keyspass", metavar="PASSWORD", help="""Set default passphrase for room E2E keys """, ) parser.add_argument( "--room", metavar="ROOM_ID", default=[], action="append", help="""Add room to list of automatically fetched rooms """, ) parser.add_argument( "--roomregex", metavar="PATTERN", default=[], action="append", help="""Same as --room but by regex pattern """, ) parser.add_argument( "--all-rooms", action="store_true", help="""Select all rooms """, ) parser.add_argument( "--no-media", action="store_true", help="""Don't download media """, ) return parser.parse_args() def mkdir(path): try: os.mkdir(path) except FileExistsError: pass return path async def create_client() -> AsyncClient: homeserver = ARGS.server user_id = ARGS.user password = ARGS.userpass if not ARGS.batch and not ARGS.listrooms: homeserver = input(f"Enter URL of your homeserver: [{homeserver}] ") or homeserver user_id = input(f"Enter your full user ID: [{user_id}] ") or user_id password = <PASSWORD>() client = AsyncClient( homeserver=homeserver, user=user_id, config=AsyncClientConfig(store=store.SqliteMemoryStore), ) await client.login(password, DEVICE_NAME) client.load_store() room_keys_path = ARGS.keys room_keys_password = <PASSWORD>.keyspass if not ARGS.batch and not ARGS.listrooms: room_keys_path = input(f"Enter full path to room E2E keys: [{room_keys_path}] ") or room_keys_path room_keys_password = <PASSWORD>("Room keys password: ") print("Importing keys. This may take a while...") await client.import_keys(room_keys_path, room_keys_password) return client def list_room(client: AsyncClient): selection=[] user_id=ARGS.user with open( f"{OUTPUT_DIR}/.env/rooms_list.{user_id}.txt","w" ) as rlist: for room_id, room in client.rooms.items(): selection.append( f"{room_id} -> {room.display_name}") # print(selection) cli = Check(choices=selection) result=cli.launch() #rlist.write(f"{room_id}, {room.display_name}\n") final="" for val in result: val=re.sub(":smart4.*","",re.sub("!","",val)) final=final + val+"\n" rlist.write(final) return client async def select_room(client: AsyncClient) -> MatrixRoom: print("\nList of joined rooms (room id, display name):") with open( f"{OUTPUT_DIR}/.env/rooms_list.{user_id}.txt","w" ) as rlist: for room_id, room in client.rooms.items(): print(f"{room_id}, {room.display_name}") rlist.write(f"{room_id}, {room.display_name}\n") room_id = input(f"Enter room id: ") return client.rooms[room_id] def choose_filename(filename): start, ext = os.path.splitext(filename) for i in itertools.count(1): if not os.path.exists(filename): break filename = f"{start}({i}){ext}" return filename async def write_event( client: AsyncClient, room: MatrixRoom, output_file, event: RoomMessage ) -> None: if not ARGS.no_media: media_dir = mkdir(f"{OUTPUT_DIR}/{room.display_name}_{room.room_id}_media") sender_name = f"<{event.sender}>" if event.sender in room.users: # If user is still present in room, include current nickname sender_name = f"{room.users[event.sender].display_name} {sender_name}" if isinstance(event, RoomMessageFormatted): #await output_file.write(serialize_event(dict(type="text", body=event.body,))) output=[event.sender,sender_name,str(datetime.fromtimestamp(event.server_timestamp/1000)),event.body] output_file.writerow(output) elif isinstance(event, (RoomMessageMedia, RoomEncryptedMedia)): media_data = await download_mxc(client, event.url) filename = choose_filename(f"{media_dir}/{event.body}") async with aiofiles.open(filename, "wb") as f: try: await f.write( crypto.attachments.decrypt_attachment( media_data, event.source["content"]["file"]["key"]["k"], event.source["content"]["file"]["hashes"]["sha256"], event.source["content"]["file"]["iv"], ) ) except KeyError: # EAFP: Unencrypted media produces KeyError await f.write(media_data) # Set atime and mtime of file to event timestamp os.utime(filename, ns=((event.server_timestamp * 1000000,) * 2)) #await output_file.write(serialize_event(dict(type="media", src="." + filename[len(OUTPUT_DIR):],))) output=[event.sender,sender_name,str(datetime.fromtimestamp(event.server_timestamp/1000)),"."+filename[len(OUTPUT_DIR):]] output_file.writerow(output) elif isinstance(event, RedactedEvent): # await output_file.write(serialize_event(dict(type="redacted",))) output=[event.sender,sender_name,str(datetime.fromtimestamp(event.server_timestamp/1000)),"Redacted message"] output_file.writerow(output) async def save_avatars(client: AsyncClient, room: MatrixRoom) -> None: avatar_dir = mkdir(f"{OUTPUT_DIR}/{room.display_name}_{room.room_id}_avatars") for user in room.users.values(): if user.avatar_url: async with aiofiles.open(f"{avatar_dir}/{user.user_id}", "wb") as f: await f.write(await download_mxc(client, user.avatar_url)) async def download_mxc(client: AsyncClient, url: str): mxc = urlparse(url) response = await client.download(mxc.netloc, mxc.path.strip("/")) return response.body def is_valid_event(event): events = (RoomMessageFormatted, RedactedEvent) if not ARGS.no_media: events += (RoomMessageMedia, RoomEncryptedMedia) return isinstance(event, events) async def fetch_room_events( client: AsyncClient, start_token: str, room: MatrixRoom, direction: MessageDirection, ) -> list: events = [] while True: response = await client.room_messages( room.room_id, start_token, limit=1000, direction=direction ) if len(response.chunk) == 0: break events.extend(event for event in response.chunk if is_valid_event(event)) start_token = response.end return events async def write_room_events(client, room): print(f"Fetching {room.room_id} room messages and writing to disk...") sync_resp = await client.sync( full_state=True, sync_filter={"room": {"timeline": {"limit": 1}}} ) start_token = sync_resp.rooms.join[room.room_id].timeline.prev_batch # Generally, it should only be necessary to fetch back events but, # sometimes depending on the sync, front events need to be fetched # as well. fetch_room_events_ = partial(fetch_room_events, client, start_token, room) with open( f"{OUTPUT_DIR}/{room.display_name}_{room.room_id}.csv", "w" ) as f: csv_file=csv.writer(f) header=['User ID', 'Pretty Name', 'Date', 'Message'] csv_file.writerow(header) for events in [ reversed(await fetch_room_events_(MessageDirection.back)), await fetch_room_events_(MessageDirection.front), ]: for event in events: try: await write_event(client, room, csv_file, event) except exceptions.EncryptionError as e: print(e, file=sys.stderr) async def main() -> None: try: client = await create_client() await client.sync( full_state=True, # Limit fetch of room events as they will be fetched later sync_filter={"room": {"timeline": {"limit": 1}}}) for room_id, room in client.rooms.items(): # Iterate over rooms to see if a room has been selected to # be automatically fetched if room_id in ARGS.room or any(re.match(pattern, room_id) for pattern in ARGS.roomregex): print(f"Selected room: {room_id}") await write_room_events(client, room) if ARGS.batch: # If the program is running in unattended batch mode, # then we can quit at this point raise SystemExit if ARGS.listrooms: print ("Listing rooms...\n") list_room(client) else: while True: room = await select_room(client) await write_room_events(client, room) except KeyboardInterrupt: sys.exit(1) finally: await client.logout() await client.close() if __name__ == "__main__": ARGS = parse_args() if ARGS.all_rooms: # Select all rooms by adding a regex pattern which matches every string ARGS.roomregex.append(".*") OUTPUT_DIR = mkdir(ARGS.folder) asyncio.get_event_loop().run_until_complete(main()) ```
{ "source": "JIC-Image-Analysis/beetle_feeding_damage", "score": 2 }
#### File: beetle_feeding_damage/scripts/analysis.py ```python import os import logging import argparse import errno import numpy as np from dtoolcore import DataSet from jicbioimage.core.image import Image from jicbioimage.core.transform import transformation from jicbioimage.core.io import AutoName, AutoWrite from skimage.morphology import disk from jicbioimage.transform import ( invert, threshold_otsu, remove_small_objects, erode_binary, ) from jicbioimage.segment import ( watershed_with_seeds, connected_components, Region, ) from jicbioimage.illustrate import Canvas __version__ = "0.1.0" AutoName.prefix_format = "{:03d}_" @transformation def identity(image): """Return the image as is.""" return image @transformation def select_red(image): return image[:, :, 0] @transformation def threshold_abs(image, min_value): return image > min_value @transformation def fill_small_holes(image, min_size): aw = AutoWrite.on AutoWrite.on = False image = invert(image) image = remove_small_objects(image, min_size=min_size) image = invert(image) AutoWrite.on = aw return image def fill_small_holes_in_region(region, min_size): aw = AutoWrite.on AutoWrite.on = False region = invert(region) region = remove_small_objects(region, min_size=min_size) region = invert(region) AutoWrite.on = aw return region def analyse_file_org(fpath, output_directory): """Analyse a single file.""" logging.info("Analysing file: {}".format(fpath)) image = Image.from_file(fpath) image = identity(image) image = select_red(image) image = invert(image) image = threshold_otsu(image) seeds = remove_small_objects(image, min_size=1000) seeds = fill_small_holes(seeds, min_size=1000) seeds = erode_binary(seeds, selem=disk(30)) seeds = connected_components(seeds, background=0) watershed_with_seeds(-image, seeds=seeds, mask=image) def get_negative_single_channel(image): negative = identity(image) negative = select_red(negative) negative = invert(negative) return negative def find_seeds(image): seeds = threshold_abs(image, 200) seeds = remove_small_objects(seeds, min_size=1000) seeds = connected_components(seeds, background=0) return seeds def find_mask(image): mask = threshold_abs(image, 170) return mask @transformation def post_process_segmentation(segmentation): for i in segmentation.identifiers: region = segmentation.region_by_identifier(i) region = fill_small_holes_in_region(region, 10000) segmentation[region] = i return segmentation def process_leaf(whole_leaf, eaten_leaf, ann): ys, xs = whole_leaf.index_arrays bounding_box = np.zeros(ann.shape[0:2]).view(Region) bounding_box[np.min(ys):np.max(ys), np.min(xs):np.max(xs)] = True eaten_fraction = float(eaten_leaf.area) / float(whole_leaf.area) percentage_eaten = (1.0 - eaten_fraction) * 100 ann.mask_region(eaten_leaf.inner.border.dilate(), (255, 0, 255)) ann.mask_region(whole_leaf.inner.border.dilate(), (0, 255, 255)) ann.mask_region(bounding_box.inner.border.dilate(), (255, 0, 0)) ann.text_at( "{:.0f}%".format(percentage_eaten), (np.min(ys) + 10, np.min(xs) + 10), size=56) return ann @transformation def annotate(image, whole_leaf_segmentation, eaten_leaf_segmentation): ann = image.view(Canvas) for i in whole_leaf_segmentation.identifiers: whole_leaf_region = whole_leaf_segmentation.region_by_identifier(i) eaten_leaf_region = eaten_leaf_segmentation.region_by_identifier(i) ann = process_leaf(whole_leaf_region, eaten_leaf_region, ann) return ann def analyse_file(fpath, output_directory, test_data_only=False): """Analyse a single file.""" logging.info("Analysing file: {}".format(fpath)) AutoName.directory = output_directory image = Image.from_file(fpath) negative = get_negative_single_channel(image) seeds = find_seeds(negative) mask = find_mask(negative) eaten_leaf_segmentation = watershed_with_seeds( negative, seeds=seeds, mask=mask) whole_leaf_segmentation = post_process_segmentation( eaten_leaf_segmentation.copy()) ann = annotate(image, whole_leaf_segmentation, eaten_leaf_segmentation) ann_fpath = os.path.join(output_directory, "annotated.png") with open(ann_fpath, "wb") as fh: fh.write(ann.png()) def safe_mkdir(directory): try: os.makedirs(directory) except OSError as exc: # Python >2.5 if exc.errno == errno.EEXIST and os.path.isdir(directory): pass else: raise def data_item_directory(output_directory, rel_path): abs_path = os.path.join(output_directory, rel_path) safe_mkdir(abs_path) return abs_path def analyse_dataset(dataset_dir, output_dir, test_data_only=False): """Analyse all the files in the dataset.""" dataset = DataSet.from_path(dataset_dir) logging.info("Analysing files in dataset: {}".format(dataset.name)) for i in dataset.identifiers: abs_path = dataset.abspath_from_identifier(i) item_info = dataset.item_from_identifier(i) specific_output_dir = data_item_directory( output_dir, item_info["path"]) analyse_file(abs_path, specific_output_dir, test_data_only) if test_data_only: break def analyse_directory(input_directory, output_directory): """Analyse all the files in a directory.""" logging.info("Analysing files in directory: {}".format(input_directory)) for fname in os.listdir(input_directory): fpath = os.path.join(input_directory, fname) analyse_file(fpath, output_directory) def main(): # Parse the command line arguments. parser = argparse.ArgumentParser(description=__doc__) parser.add_argument("input_source", help="Input file/directory") parser.add_argument("output_dir", help="Output directory") parser.add_argument("--debug", default=False, action="store_true", help="Write out intermediate images") parser.add_argument("--test", default=False, action="store_true", help="Use only test data") args = parser.parse_args() # Create the output directory if it does not exist. if not os.path.isdir(args.output_dir): os.mkdir(args.output_dir) AutoName.directory = args.output_dir # Only write out intermediate images in debug mode. if not args.debug: AutoWrite.on = False # Setup a logger for the script. log_fname = "audit.log" log_fpath = os.path.join(args.output_dir, log_fname) logging_level = logging.INFO if args.debug: logging_level = logging.DEBUG logging.basicConfig(filename=log_fpath, level=logging_level) # Log some basic information about the script that is running. logging.info("Script name: {}".format(__file__)) logging.info("Script version: {}".format(__version__)) # Run the analysis. analyse_dataset(args.input_source, args.output_dir, args.test) if __name__ == "__main__": main() ```
{ "source": "JIC-Image-Analysis/cells-from-leaves", "score": 2 }
#### File: cells-from-leaves/scripts/analysis.py ```python import os import logging import argparse import json from jicbioimage.core.image import Image from jicbioimage.core.transform import transformation from jicbioimage.core.io import AutoName, AutoWrite from jicbioimage.segment import Region from utils import get_microscopy_collection from parameters import Parameters from surface import surface_from_stack from segment import segment_cells from projection import ( project_wall, project_marker, ) from annotation import write_cell_views __version__ = "0.5.0" AutoName.prefix_format = "{:03d}_" @transformation def identity(image): """Return the image as is.""" return image def save_cells(cells, wall_projection, marker_projection, output_directory): d = os.path.join(output_directory, "annotated-cells") if not os.path.isdir(d): os.mkdir(d) for i in cells.identifiers: region = cells.region_by_identifier(i) celldata = dict(cell_id=i, centroid=list(region.centroid), area=region.area) fpath_prefix = os.path.join(d, "cell-{:05d}".format(i)) write_cell_views(fpath_prefix, wall_projection, marker_projection, region, celldata) with open(fpath_prefix + ".json", "w") as fh: json.dump(celldata, fh) def analyse_file(fpath, mask, output_directory, **kwargs): """Analyse a single file.""" logging.info("Analysing file: {}".format(fpath)) microscopy_collection = get_microscopy_collection(fpath) wall_stack = microscopy_collection.zstack(c=kwargs["wall_channel"]) wall_stack = identity(wall_stack) surface = surface_from_stack(wall_stack, **kwargs) wall_projection = project_wall(wall_stack, surface, **kwargs) cells = segment_cells(wall_projection, surface, mask, **kwargs) marker_stack = microscopy_collection.zstack(c=kwargs["marker_channel"]) marker_stack = identity(marker_stack) marker_projection = project_marker(marker_stack, surface, **kwargs) save_cells(cells, wall_projection, marker_projection, output_directory) def main(): # Parse the command line arguments. parser = argparse.ArgumentParser(description=__doc__) parser.add_argument("input_file", help="Input file") parser.add_argument("mask_file", help="Mask file") parser.add_argument("parameters_file", help="Parameters file") parser.add_argument("output_dir", help="Output directory") parser.add_argument("--debug", default=False, action="store_true", help="Write out intermediate images") args = parser.parse_args() # Check that the input file exists. if not os.path.isfile(args.input_file): parser.error("{} not a file".format(args.input_file)) if not os.path.isfile(args.parameters_file): parser.error("{} not a file".format(args.parameters_file)) # Read in the parameters. params = Parameters.from_file(args.parameters_file) # Create the output directory if it does not exist. if not os.path.isdir(args.output_dir): os.mkdir(args.output_dir) AutoName.directory = args.output_dir # Only write out intermediate images in debug mode. if not args.debug: AutoWrite.on = False # Setup a logger for the script. log_fname = "audit.log" log_fpath = os.path.join(args.output_dir, log_fname) logging_level = logging.INFO if args.debug: logging_level = logging.DEBUG logging.basicConfig(filename=log_fpath, level=logging_level) # Log some basic information about the script that is running. logging.info("Script name: {}".format(__file__)) logging.info("Script version: {}".format(__version__)) logging.info("Parameters: {}".format(params)) # Run the analysis. mask_im = Image.from_file(args.mask_file) mask = Region.select_from_array(mask_im, 0) identity(mask) analyse_file(args.input_file, mask, args.output_dir, **params) if __name__ == "__main__": main() ``` #### File: cells-from-leaves/scripts/batch_unpack.py ```python import os import argparse from time import time from utils import get_microscopy_collection def unpack_all(input_dir): for fname in os.listdir(input_dir): fpath = os.path.join(input_dir, fname) print("Processing {}...".format(fpath)) start = time() mc = get_microscopy_collection(fpath) end = time() print("time elapsed {} seconds.".format(end-start)) if __name__ == "__main__": parser = argparse.ArgumentParser(description=__doc__) parser.add_argument("input_dir", help="Input directory") args = parser.parse_args() if not os.path.isdir(args.input_dir): parser.error("{} not a directory".format(args.input_dir)) unpack_all(args.input_dir) ``` #### File: cells-from-leaves/scripts/parameters.py ```python import yaml class Parameters(dict): """Class for storing, reading in and writing out parameters.""" @classmethod def from_yaml(cls, string): """Return Parameter instance from yaml string.""" p = cls() d = yaml.load(string) p.update(d) return p @classmethod def from_file(cls, fpath): """Read parameters from file.""" with open(fpath, "r") as fh: return cls.from_yaml(fh.read()) def to_yaml(self): """Return yaml string representation.""" return yaml.dump(dict(self), explicit_start=True, default_flow_style=False) def to_file(self, fpath): """Write parameters to file.""" with open(fpath, "w") as fh: fh.write(self.to_yaml()) def test_from_yaml(): p = Parameters.from_yaml("---\npi: 3.14\n") assert isinstance(p, Parameters) assert p["pi"] == 3.14 def test_to_yaml(): p = Parameters() p["pi"] = 3.14 assert p.to_yaml() == "---\npi: 3.14\n", p.to_yaml() ``` #### File: cells-from-leaves/scripts/utils.py ```python import os.path import logging from jicbioimage.core.image import MicroscopyCollection from jicbioimage.core.io import ( FileBackend, DataManager, _md5_hexdigest_from_file, ) HERE = os.path.dirname(os.path.realpath(__file__)) def get_data_manager(): """Return a data manager.""" data_dir = os.path.abspath(os.path.join(HERE, "..", "output")) if not os.path.isdir(data_dir): raise(OSError("Data directory does not exist: {}".format(data_dir))) backend_dir = os.path.join(data_dir, 'unpacked') file_backend = FileBackend(backend_dir) return DataManager(file_backend), backend_dir def get_microscopy_collection_from_tiff(input_file): """Return microscopy collection from tiff file.""" data_manager, backend_dir = get_data_manager() data_manager.load(input_file) md5_hex = _md5_hexdigest_from_file(input_file) manifest_path = os.path.join(backend_dir, md5_hex, "manifest.json") microscopy_collection = MicroscopyCollection() microscopy_collection.parse_manifest(manifest_path) return microscopy_collection def get_microscopy_collection_from_org(input_file): """Return microscopy collection from microscopy file.""" data_manager, _ = get_data_manager() return data_manager.load(input_file) def get_microscopy_collection(input_file): name, ext = os.path.splitext(input_file) ext = ext.lower() if ext == '.tif' or ext == '.tiff': logging.debug("reading in a tif file") return get_microscopy_collection_from_tiff(input_file) else: logging.debug("reading in a microscopy file") return get_microscopy_collection_from_org(input_file) ```
{ "source": "JIC-Image-Analysis/cookiecutter-image-analysis", "score": 2 }
#### File: {{cookiecutter.project}}/scripts/analysis.py ```python import os import logging import argparse import errno from dtoolcore import DataSet from jicbioimage.core.image import Image from jicbioimage.core.transform import transformation from jicbioimage.core.io import AutoName, AutoWrite __version__ = "{{ cookiecutter.version }}" AutoName.prefix_format = "{:03d}_" def safe_mkdir(directory): """Create directories if they do not exist.""" try: os.makedirs(directory) except OSError as exc: # Python >2.5 if exc.errno == errno.EEXIST and os.path.isdir(directory): pass else: raise def item_output_path(output_directory, rel_path): """Return item output path; and create it if it does not already exist.""" abs_path = os.path.join(output_directory, rel_path) safe_mkdir(abs_path) return abs_path @transformation def identity(image): """Return the image as is.""" return image def analyse_file(fpath, output_directory): """Analyse a single file.""" logging.info("Analysing file: {}".format(fpath)) AutoName.directory = output_directory image = Image.from_file(fpath) image = identity(image) def analyse_dataset(dataset_dir, output_dir): """Analyse all the files in the dataset.""" dataset = DataSet.from_path(dataset_dir) logging.info("Analysing items in dataset: {}".format(dataset.name)) for i in dataset.identifiers: data_item_abspath = dataset.abspath_from_identifier(i) item_info = dataset.item_from_identifier(i) specific_output_dir = item_output_path(output_dir, item_info["path"]) analyse_file(data_item_abspath, specific_output_dir) def main(): # Parse the command line arguments. parser = argparse.ArgumentParser(description=__doc__) parser.add_argument("input_dataset", help="Input dataset") parser.add_argument("output_dir", help="Output directory") parser.add_argument("--debug", default=False, action="store_true", help="Write out intermediate images") args = parser.parse_args() # Create the output directory if it does not exist. if not os.path.isdir(args.output_dir): os.mkdir(args.output_dir) AutoName.directory = args.output_dir # Only write out intermediate images in debug mode. if not args.debug: AutoWrite.on = False # Setup a logger for the script. log_fname = "audit.log" log_fpath = os.path.join(args.output_dir, log_fname) logging_level = logging.INFO if args.debug: logging_level = logging.DEBUG logging.basicConfig(filename=log_fpath, level=logging_level) # Log some basic information about the script that is running. logging.info("Script name: {}".format(__file__)) logging.info("Script version: {}".format(__version__)) # Run the analysis. if os.path.isdir(args.input_dataset): analyse_dataset(args.input_dataset, args.output_dir) else: parser.error("{} not a directory".format(args.input_dataset)) if __name__ == "__main__": main() ```
{ "source": "JIC-Image-Analysis/dtoolbioimage", "score": 2 }
#### File: dtoolbioimage/dtoolbioimage/convert.py ```python import os import re import sys import logging import subprocess from pathlib import Path from tempfile import TemporaryDirectory import xml.etree.ElementTree as ET import click import dtoolcore from parse import parse from dtoolbioimage.derived_dataset import DerivedDataSet def get_image_metadata_from_raw_image(raw_image_fpath): showinf_path = "showinf" showinf_args = ["-nocore", "-nopix", "-novalid", "-no-upgrade", "-omexml-only"] command = [showinf_path] + showinf_args + [raw_image_fpath] raw_output = subprocess.check_output(command) xml_string_output = raw_output.decode() image_metadata = image_metadata_from_xml(xml_string_output) return image_metadata def image_metadata_from_xml(xml_metadata_string): root = ET.fromstring(xml_metadata_string) xml_namespace = list(root.attrib.values())[0].split()[0] def ns_element(name): return "{{{}}}{}".format(xml_namespace, name) metadata_by_image_name = {} for element in root.findall(ns_element("Image")): image_name = element.attrib['Name'] pixels_element = element.find(ns_element("Pixels")) image_data = pixels_element.attrib metadata_by_image_name[image_name] = image_data return metadata_by_image_name def simple_slugify(input_string): return re.sub('[ #]', '_', input_string) def path_to_root_name(raw_path): basename = os.path.basename(raw_path) name, ext = os.path.splitext(basename) return simple_slugify(name) def run_conversion(raw_image_fpath, root_name, output_dirpath, sep): bfconvert_path = "bfconvert" bfconvert_args = ["-no-upgrade", "-nolookup"] Path(output_dirpath).mkdir(exist_ok=True, parents=True) format_string = "{}{}%n{}S%s_T%t_C%c_Z%z.png".format(root_name, sep, sep) output_format_string = os.path.join(output_dirpath, format_string) command = [bfconvert_path] + bfconvert_args + [raw_image_fpath, output_format_string] try: subprocess.call(command) except FileNotFoundError: sys.stderr.write("ERROR: Can't run bfconvert - is it installed and in the PATH?\n") raise def raw_image_idns(dataset): microscope_image_exts = ['.czi', '.lif'] def is_microscope_image(idn): root, ext = os.path.splitext(dataset.item_properties(idn)['relpath']) return ext in microscope_image_exts return list(idn for idn in dataset.identifiers if is_microscope_image(idn)) def convert_and_stage(raw_image_fpath, root_name, staging_path, output_ds): sep = "-_-" run_conversion(raw_image_fpath, root_name, staging_path, sep) image_metadata_by_name = get_image_metadata_from_raw_image(raw_image_fpath) to_push = os.listdir(staging_path) n_items = len(to_push) for n, fn in enumerate(to_push, 1): root_name, series_name, descriptor = fn.rsplit(sep, maxsplit=2) S, T, C, Z = parse("S{}_T{}_C{}_Z{}.png", descriptor) plane_coords = {'S': S, 'T': T, 'C': C, 'Z': Z} item_fpath = os.path.join(staging_path, fn) item_relpath = "{}/{}/{}".format(root_name, series_name, descriptor) image_metadata = image_metadata_by_name[series_name] logging.info('Pushing {}/{}, {}'.format(n, n_items, item_relpath)) output_ds.put_item(item_fpath, item_relpath) output_ds.add_item_metadata(item_relpath, "plane_coords", plane_coords) output_ds.add_item_metadata(item_relpath, "microscope_metadata", image_metadata) def convert_single_idn(dataset, idn, output_ds): with TemporaryDirectory() as tempdir: relpath_name = dataset.item_properties(idn)['relpath'] root_name = path_to_root_name(relpath_name) raw_image_fpath = dataset.item_content_abspath(idn) convert_and_stage(raw_image_fpath, root_name, tempdir, output_ds) def raw_image_dataset_to_image_dataset(dataset, output_ds): microscope_image_idns = raw_image_idns(dataset) for idn in microscope_image_idns: logging.info('Converting {}'.format(idn)) convert_single_idn(dataset, idn, output_ds) logging.info('Freezing') @click.command() @click.argument('dataset_uri') @click.argument('output_base_uri') @click.argument('output_name') def cli(dataset_uri, output_base_uri, output_name): logging.basicConfig(level=logging.INFO) dataset = dtoolcore.DataSet.from_uri(dataset_uri) with DerivedDataSet(output_base_uri, output_name, dataset, overwrite=True) as output_ds: output_ds.readme_dict['type'] = 'image_dataset' output_ds.readme_dict['converted_by'] = 'dtoolbioimage.convert' raw_image_dataset_to_image_dataset(dataset, output_ds) if __name__ == '__main__': cli() # NOQA ``` #### File: dtoolbioimage/dtoolbioimage/derived_dataset.py ```python import os import shutil import tempfile from pathlib import Path try: from StringIO import StringIO except ImportError: from io import StringIO import dtoolcore from ruamel.yaml import YAML def proto_dataset_from_base_uri(name, base_uri): admin_metadata = dtoolcore.generate_admin_metadata(name) parsed_base_uri = dtoolcore.utils.generous_parse_uri(base_uri) proto_dataset = dtoolcore.generate_proto_dataset( admin_metadata=admin_metadata, base_uri=dtoolcore.utils.urlunparse(parsed_base_uri) ) proto_dataset.create() return proto_dataset class DerivedDataSet(object): def __init__(self, output_base_uri, name, source_ds=None, overwrite=False): try: self.proto_dataset = proto_dataset_from_base_uri(name, output_base_uri) except dtoolcore.storagebroker.StorageBrokerOSError: if overwrite: dest_path = os.path.join(output_base_uri, name) shutil.rmtree(dest_path) self.proto_dataset = proto_dataset_from_base_uri(name, output_base_uri) else: raise self.readme_dict = {} if source_ds is not None: self.readme_dict['source_ds_name'] = source_ds.name self.readme_dict['source_ds_uri'] = source_ds.uri self.readme_dict['source_ds_uuid'] = source_ds.uuid def __enter__(self): self.tmpdir = Path(tempfile.mkdtemp()) self.to_stage = [] return self def _create_readme(self): yaml = YAML() yaml.explicit_start = True yaml.indent(mapping=2, sequence=4, offset=2) stream = StringIO() yaml.dump(self.readme_dict, stream) self.proto_dataset.put_readme(stream.getvalue()) def __exit__(self, type, value, traceback): for abspath, relpath in self.to_stage: self.proto_dataset.put_item(abspath, relpath) self._create_readme() self.proto_dataset.freeze() shutil.rmtree(self.tmpdir) def put_item(self, item_abspath, relpath): self.proto_dataset.put_item(item_abspath, relpath) def staging_fpath(self, relpath): parent_dirs = self.tmpdir/os.path.dirname(relpath) parent_dirs.mkdir(parents=True, exist_ok=True) staging_abspath = self.tmpdir/relpath self.to_stage.append((staging_abspath, relpath)) return staging_abspath def add_item_metadata(self, relpath, key, value): self.proto_dataset.add_item_metadata(relpath, key, value) @property def uri(self): return self.proto_dataset.uri ```
{ "source": "JIC-Image-Analysis/find-plasmodesmata-dockerised", "score": 3 }
#### File: find-plasmodesmata-dockerised/scripts/analyse_image.py ```python import os import os.path import argparse import logging from jicbioimage.core.io import AutoName from plasmodesmata_analysis import ( get_microscopy_collection, plasmodesmata_analysis, __version__, log_settings, ) # Setup logging with a stream handler. logger = logging.getLogger(os.path.basename(__file__)) logger.setLevel(logging.DEBUG) ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) logger.addHandler(ch) def analyse_all_series(microscopy_collection, output_dir, threshold, min_voxel, max_voxel): """Analyse all series in input microscopy file.""" for s in microscopy_collection.series: sub_dir = os.path.join(output_dir, str(s)) if not os.path.isdir(sub_dir): os.mkdir(sub_dir) AutoName.directory = sub_dir logger.info("Analysing series: {}".format(s)) plasmodesmata_analysis(microscopy_collection, s, threshold, min_voxel, max_voxel) def main(): parser = argparse.ArgumentParser(__doc__) parser.add_argument("input_file", help="path to raw microscopy data") parser.add_argument("output_dir", help="output directory") parser.add_argument("-t", "--threshold", default=15000, type=int, help="abs threshold (default=15000)") parser.add_argument("--min-voxel", default=2, type=int, help="Minimum voxel volume (default=2)") parser.add_argument("--max-voxel", default=50, type=int, help="Maximum voxel volume (default=50)") args = parser.parse_args() dir_name = os.path.basename(args.input_file).split(".")[0] specific_out_dir = os.path.join(args.output_dir, dir_name) if not os.path.isdir(args.output_dir): os.mkdir(args.output_dir) if not os.path.isdir(specific_out_dir): os.mkdir(specific_out_dir) if not os.path.isfile(args.input_file): parser.error("No such microscopy file: {}".format(args.input_file)) # Create file handle logger. fh = logging.FileHandler(os.path.join(specific_out_dir, "log"), mode="w") fh.setLevel(logging.DEBUG) format_ = "%(asctime)s - %(name)s - %(levelname)s - %(message)s" formatter = logging.Formatter(format_) fh.setFormatter(formatter) logger.addHandler(fh) log_settings(logger, __version__, args) microscopy_collection = get_microscopy_collection(args.input_file) analyse_all_series(microscopy_collection, specific_out_dir, args.threshold, args.min_voxel, args.max_voxel) if __name__ == "__main__": main() ```
{ "source": "JIC-Image-Analysis/fishtools", "score": 2 }
#### File: fishtools/fishtools/segment.py ```python import json import numpy as np import skimage.draw import skimage.filters import skimage.exposure import skimage.segmentation import scipy.ndimage from dtoolbioimage.segment import Segmentation from dtoolbioimage import Image as dbiImage def cell_mask_from_fishimage(fishimage, params, probe_channel=0): ks = params.ks bs = params.bs sigma = params.sigma minproj = np.min(fishimage.probes[probe_channel], axis=2) eq = skimage.exposure.equalize_adapthist(minproj, kernel_size=(ks, ks)) eq_nuclear_proj_smoothed = skimage.filters.gaussian(eq, sigma=sigma) thresh_image = skimage.filters.threshold_local(eq_nuclear_proj_smoothed, block_size=bs) result = (eq_nuclear_proj_smoothed > thresh_image) return result def nuc_cell_mask_from_fishimage(fishimage, params): ks = params.ks bs = params.bs sigma = params.sigma minproj = np.min(fishimage.nuclei, axis=2) eq = skimage.exposure.equalize_adapthist(minproj, kernel_size=(ks, ks)) eq_nuclear_proj_smoothed = skimage.filters.gaussian(eq, sigma=sigma) thresh_image = skimage.filters.threshold_local(eq_nuclear_proj_smoothed, block_size=bs) result = (eq_nuclear_proj_smoothed > thresh_image) return result def label_image_from_coords(label_coords, dim): label_img = np.zeros(dim, dtype=np.uint16) current_label = 1 for label, points in label_coords.items(): for p in points: r, c = p rr, cc = skimage.draw.disk((r, c), 12) try: label_img[rr, cc] = current_label current_label += 1 except IndexError: pass return label_img def label_coords_from_points_fpath(fpath): with open(fpath) as fh: label_coords = json.load(fh) return label_coords def label_image_from_points_fpath(fpath, dim): label_coords = label_coords_from_points_fpath(fpath) return label_image_from_coords(label_coords, dim) def filter_segmentation_by_region_list(segmentation, region_ids): rids_not_in_files = segmentation.labels - set(region_ids) trimmed_segmentation = segmentation.copy() for rid in rids_not_in_files: trimmed_segmentation[np.where(trimmed_segmentation == rid)] = 0 return Segmentation.from_array(trimmed_segmentation) def segmentation_from_nuclear_channel_and_markers(fishimage, label_img, params): nucmask = nuc_cell_mask_from_fishimage(fishimage, params) assert nucmask.shape == label_img.shape n_segmentation = skimage.segmentation.watershed( -scipy.ndimage.distance_transform_edt(nucmask), markers=label_img, mask=nucmask ).view(Segmentation) return n_segmentation def segmentation_from_cellmask_and_label_image(cell_mask, label_img): noholes = skimage.morphology.remove_small_holes(cell_mask, area_threshold=150) segmentation = skimage.segmentation.watershed( -scipy.ndimage.distance_transform_edt(noholes), markers=label_img, mask=noholes ) return Segmentation.from_array(segmentation) def filter_segmentation_by_label_coords(segmentation, label_coords): valid_labels = { segmentation[tuple(p)] for p in label_coords["1"] } return filter_segmentation_by_region_list(segmentation, valid_labels) def scale_segmentation(cell_regions, maxproj): scaled_cell_regions = Segmentation.from_array( skimage.transform.resize( cell_regions, maxproj.shape, anti_aliasing=False, order=0, preserve_range=True ).astype(int) ) return scaled_cell_regions def get_filtered_segmentation(dataitem, params): nuc_label_image = segmentation_from_nuclear_channel_and_markers( dataitem.fishimage, skimage.measure.label(dataitem.scaled_markers), params ) # nuc_label_image.pretty_color_image.view(dbiImage).save("nuc_label_img.png") segmentation = segmentation_from_cellmask_and_label_image( dataitem.cell_mask(params), nuc_label_image ) scaled_good_mask = scale_segmentation(dataitem.good_mask, dataitem.maxproj) labelled_points = skimage.measure.label(scaled_good_mask) rprops = skimage.measure.regionprops(labelled_points) region_centroids = [r.centroid for r in rprops] icentroids = [(int(r), int(c)) for r, c in region_centroids] good_regions = [segmentation[r, c] for r, c in icentroids] filtered_segmentation = filter_segmentation_by_region_list( segmentation, good_regions ) return filtered_segmentation ``` #### File: fishtools/scripts/process_a_data_item.py ```python from types import SimpleNamespace import click import skimage.measure from fishtools.data import DataLoader, DataItem from fishtools.segment import ( segmentation_from_nuclear_channel_and_markers, segmentation_from_cellmask_and_label_image ) from fishtools.vis import visualise_counts @click.command() def main(): config = { "deconv_dirpath": "/Users/mhartley/Dropbox/fca alleles project/smFISH Venus probes/smFISH Venus deconvolved/fca-3", "deconv_fname_template": "fca3-FLCVenus-VenusRNA{n}_fca3-FLCVenus-VenusRNA{n}_0_qmle_ch01.tif", "ids_uri": "/Users/mhartley/data_repo/fish_test_ids/", "image_name_template": "fca3-FLCVenus-VenusRNA{n}", "series_name_template": "fca3-FLCVenus-VenusRNA{n}.czi #1", "annotation_dirpath": "local-data/fca3", "annotation_template": "fca3_{n}.png", "bad_col": (10, 14, 96), "good_col": (103, 20, 0) } dl = DataLoader(config) n = 14 dataitem = dl.load_by_specifier(n=n) params = SimpleNamespace(ks=11, bs=191, sigma=2) nuc_label_image = segmentation_from_nuclear_channel_and_markers( dataitem.fishimage, skimage.measure.label(dataitem.scaled_markers), params ) segmentation = segmentation_from_cellmask_and_label_image( dataitem.cell_mask(params), nuc_label_image ) # import skimage.measure # scaled_good_mask = scale_segmentation(dataitem.good_mask, dataitem.maxproj) # labelled_points = skimage.measure.label(scaled_good_mask) # rprops = skimage.measure.regionprops(labelled_points) # region_centroids = [r.centroid for r in rprops] # icentroids = [(int(r), int(c)) for r, c in region_centroids] vis = visualise_counts(dataitem.maxproj, segmentation, dataitem.probe_locs_2d(75)) vis.save(f"vis{n}.png") if __name__ == "__main__": main() ```
{ "source": "JIC-Image-Analysis/leaf-cell-polarisation-tensors", "score": 3 }
#### File: leaf-cell-polarisation-tensors/scripts/annotate.py ```python import PIL from jicbioimage.core.util.color import pretty_color_from_identifier from jicbioimage.illustrate import AnnotatedImage from utils import marker_cell_identifier def annotate_segmentation(cells, fh): """Write out segmentation image.""" fh.write(cells.png()) def annotate_markers(markers, cells, fh): """Write out marker image.""" ydim, xdim = markers.shape ann = AnnotatedImage.blank_canvas(width=xdim, height=ydim) for i in markers.identifiers: m_region = markers.region_by_identifier(i) cell_id = marker_cell_identifier(m_region, cells) color = pretty_color_from_identifier(cell_id) ann.mask_region(m_region, color) fh.write(ann.png()) def annotate_tensors(ydim, xdim, tensor_manager, fh): """Write out tensor image.""" ann = AnnotatedImage.blank_canvas(width=xdim, height=ydim) for i in tensor_manager.identifiers: tensor = tensor_manager[i] color = pretty_color_from_identifier(tensor.cell_id) ann.draw_line(tensor.centroid, tensor.marker, color) fh.write(ann.png()) def make_transparent(pil_im, alpha): """Return rgba pil image.""" pil_im = pil_im.convert("RGBA") pixdata = pil_im.load() for y in xrange(pil_im.size[1]): for x in xrange(pil_im.size[0]): rgba = list(pixdata[x, y]) rgba[-1] = alpha pixdata[x, y] = tuple(rgba) return pil_im ``` #### File: leaf-cell-polarisation-tensors/scripts/segment.py ```python from jicbioimage.transform import ( dilate_binary, invert, remove_small_objects, max_intensity_projection, ) from jicbioimage.segment import ( connected_components, watershed_with_seeds, ) from utils import threshold_abs, remove_large_segments def cell_segmentation(wall_intensity2D, wall_mask2D, max_cell_size): """Return image segmented into cells.""" seeds = dilate_binary(wall_mask2D) seeds = invert(seeds) seeds = remove_small_objects(seeds, min_size=10) seeds = connected_components(seeds, background=0) segmentation = watershed_with_seeds(-wall_intensity2D, seeds=seeds) segmentation = remove_large_segments(segmentation, max_cell_size) return segmentation def marker_segmentation(marker_intensity3D, wall_mask3D, threshold): """Return fluorescent marker segmentation.""" marker_intensity3D = marker_intensity3D * wall_mask3D markers2D = max_intensity_projection(marker_intensity3D) markers2D = threshold_abs(markers2D, threshold) markers2D = remove_small_objects(markers2D, min_size=50) return connected_components(markers2D, background=0) ``` #### File: leaf-cell-polarisation-tensors/scripts/utils.py ```python import os.path import logging import numpy as np from jicbioimage.core.image import MicroscopyCollection from jicbioimage.core.transform import transformation from jicbioimage.core.io import ( AutoWrite, FileBackend, DataManager, _md5_hexdigest_from_file, ) from jicbioimage.transform import ( remove_small_objects, max_intensity_projection, invert, remove_small_objects, ) HERE = os.path.dirname(os.path.realpath(__file__)) def get_data_manager(): """Return a data manager.""" data_dir = os.path.abspath(os.path.join(HERE, "..", "data")) if not os.path.isdir(data_dir): raise(OSError("Data directory does not exist: {}".format(data_dir))) backend_dir = os.path.join(data_dir, 'unpacked') file_backend = FileBackend(backend_dir) return DataManager(file_backend), backend_dir def get_microscopy_collection_from_tiff(input_file): """Return microscopy collection from tiff file.""" data_manager, backend_dir = get_data_manager() data_manager.load(input_file) md5_hex = _md5_hexdigest_from_file(input_file) manifest_path = os.path.join(backend_dir, md5_hex, "manifest.json") microscopy_collection = MicroscopyCollection() microscopy_collection.parse_manifest(manifest_path) return microscopy_collection def get_microscopy_collection_from_org(input_file): """Return microscopy collection from microscopy file.""" data_manager, _ = get_data_manager() return data_manager.load(input_file) def get_microscopy_collection(input_file): name, ext = os.path.splitext(input_file) ext = ext.lower() if ext == '.tif' or ext == '.tiff': logging.debug("reading in a tif file") return get_microscopy_collection_from_tiff(input_file) else: logging.debug("reading in a microscopy file") return get_microscopy_collection_from_org(input_file) @transformation def identity(image): return image @transformation def threshold_abs(image, threshold): """Return image thresholded using the mean.""" return image > threshold @transformation def mask_from_large_objects(image, max_size): tmp_autowrite = AutoWrite.on AutoWrite.on = False mask = remove_small_objects(image, min_size=max_size) mask = invert(mask) AutoWrite.on = tmp_autowrite return mask def test_remove_large_objects(): ar = np.array([[0, 0, 1, 1], [0, 0, 1, 1], [0, 0, 0, 0], [1, 0, 0, 0]], dtype=bool) exp = np.array([[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [1, 0, 0, 0]], dtype=bool) out = remove_large_objects(ar, max_size=3) print(out) assert np.array_equal(out, exp) @transformation def remove_large_segments(segmentation, max_size): for i in segmentation.identifiers: region = segmentation.region_by_identifier(i) if region.area > max_size: segmentation[region] = 0 return segmentation def segment_zslice(image): """Segment a zslice.""" tmp_autowrite = AutoWrite.on AutoWrite.on = False image = identity(image) image = threshold_abs(image, 100) image = remove_small_objects(image, min_size=500) AutoWrite.on = tmp_autowrite return image def preprocess_zstack(zstack_proxy_iterator, cutoff): """Select the pixels where the signal is.""" raw = [] zstack = [] for i, proxy_image in enumerate(zstack_proxy_iterator): image = proxy_image.image segmented = segment_zslice(image) raw.append(image) zstack.append(segmented) return np.dstack(raw), np.dstack(zstack) def get_wall_intensity_and_mask_images(microscopy_collection, channel): """ Return (wall_intensity2D, wall_intensity3D, wall_mask2D, wall_mask3D). """ wall_ziter = microscopy_collection.zstack_proxy_iterator(c=channel) wall_intensity3D, wall_mask3D = preprocess_zstack(wall_ziter, 90) wall_intensity2D = max_intensity_projection(wall_intensity3D) wall_mask2D = max_intensity_projection(wall_mask3D) return wall_intensity2D, wall_intensity3D, wall_mask2D, wall_mask3D def get_marker_intensity_images(microscopy_collection, channel): """REturn (marker_intensity2D, marker_intensity3D) tuple.""" marker_intensity3D = microscopy_collection.zstack_array(c=channel) marker_intensity2D = max_intensity_projection(marker_intensity3D) return marker_intensity2D, marker_intensity3D def marker_cell_identifier(marker_region, cells): """Return cell identifier of marker region.""" pos = marker_region.convex_hull.centroid return cells[pos] ```
{ "source": "JIC-Image-Analysis/profile_lines", "score": 3 }
#### File: profile_lines/scripts/profile_lines.py ```python import argparse from collections import namedtuple import numpy as np import skimage.morphology from jicbioimage.core.image import Image from jicbioimage.core.transform import transformation from jicbioimage.segment import connected_components from jicbioimage.transform import dilate_binary, remove_small_objects Datum = namedtuple("datum", "time, intensity, series") @transformation def convert_to_grayscale(rgb_image): return rgb_image[:, :, 0] @transformation def convert_to_signal(line_image): """Convert RGB line_image into binary image where True denotes signal.""" grayscale_image = convert_to_grayscale(line_image) inverted = 255 - grayscale_image return inverted > 0 @transformation def skeletonize(image): return skimage.morphology.skeletonize(image) def csv_header(): return ",".join(Datum._fields) + "\n" def yield_data(line_profile, series): for time, intensity in enumerate(line_profile): yield Datum(str(time), str(intensity), str(series)) def csv_lines(line_profile, series): return [",".join(d) for d in yield_data(line_profile, series)] def csv_body(line_profile, series): return "\n".join(csv_lines(line_profile, series)) + "\n" def save_line_profile(filename, line_profile, series): with open(filename, 'w') as f: f.write(csv_header()) f.write(csv_body(line_profile, series)) def segment(line_image, dilation): lines = convert_to_signal(line_image) lines = skeletonize(lines) lines = remove_small_objects(lines, min_size=10, connectivity=2) lines = dilate_binary(lines, selem=np.ones((1, dilation))) segmentation = connected_components(lines, background=0) return segmentation def yield_line_masks(segmented_lines): for i in segmented_lines.identifiers: region = segmented_lines.region_by_identifier(i) yield region def sample_image_from_lines(image_file, lines_file, dilation, reduce_method): data_image = Image.from_file(image_file) line_image = Image.from_file(lines_file) segmented_lines = segment(line_image, dilation) with open("all_series.csv", "w") as fh: fh.write(csv_header()) for n, line_region in enumerate(yield_line_masks(segmented_lines)): line_intensity = data_image * line_region if reduce_method == "max": line_profile = np.amax(line_intensity, axis=1) elif reduce_method == "mean": sum_intensity = np.sum(line_intensity, axis=1) sum_rows = np.sum(line_region, axis=1) line_profile = sum_intensity / sum_rows else: err_msg = "Unknown reduce method: {}".format(reduce_method) raise(RuntimeError(err_msg)) series_filename = "series_{:02d}.csv".format(n) save_line_profile(series_filename, line_profile, n) fh.write(csv_body(line_profile, n)) def main(): parser = argparse.ArgumentParser(__doc__) parser.add_argument('kymograph_file', help='Image containing kymograph') parser.add_argument('line_file', help='Iamge containing lines') parser.add_argument('-d', '--dilation', default=2, type=int, help='Dilation of line') parser.add_argument('-r', '--reduce-method', default="max", choices=("max", "mean"), help='Method to reduce row to single value') args = parser.parse_args() sample_image_from_lines(args.kymograph_file, args.line_file, args.dilation, args.reduce_method) if __name__ == '__main__': main() ```
{ "source": "JIC-Image-Analysis/seed_cell_parameterisation", "score": 2 }
#### File: seed_cell_parameterisation/scripts/dtoolutils.py ```python import os import shutil import tempfile from contextlib import contextmanager TMPDIR_PREFIX = os.path.expanduser('~/tmp') @contextmanager def temp_working_dir(): working_dir = tempfile.mkdtemp(prefix=TMPDIR_PREFIX) try: yield working_dir finally: shutil.rmtree(working_dir) def stage_outputs( outputs, working_dir, dataset, output_dataset, overlays_to_copy, identifier ): for filename, metadata in outputs: src_abspath = os.path.join(working_dir, filename) useful_name = dataset.get_overlay('useful_name')[identifier] relpath = os.path.join(useful_name, filename) print("Push {} as {}.".format(src_abspath, relpath)) output_dataset.put_item(src_abspath, relpath) # Add 'from' overlay output_dataset.add_item_metadata(relpath, 'from', identifier) # Copy overlays for overlay_name in overlays_to_copy: value = dataset.get_overlay(overlay_name)[identifier] output_dataset.add_item_metadata(relpath, overlay_name, value) # Add extra metadata for k, v in metadata.items(): output_dataset.add_item_metadata(relpath, k, v) ``` #### File: seed_cell_parameterisation/scripts/find_uncompleted.py ```python import yaml import click from dtool_azure import AzureDataSet, AzureProtoDataSet @click.command() @click.option('--config-path') def main(config_path=None): with open('analysis.yml') as fh: analyis_config = yaml.load(fh) input_uuid = analyis_config['input_dataset'] output_uuid = analyis_config['output_dataset'] input_dataset = AzureDataSet.from_uri(input_uuid, config_path=config_path) output_dataset = AzureProtoDataSet.from_uri(output_uuid) input_identifiers = set(input_dataset.identifiers) completed_identifers = set([ output_dataset._item_metadata(identifier)['from'] for identifier in output_dataset._iteridentifiers() ]) uncompleted_identififers = input_identifiers - completed_identifers print("Completed {} of {}".format( len(completed_identifers), len(input_identifiers) ) ) if __name__ == '__main__': main() ``` #### File: seed_cell_parameterisation/scripts/seedcellsize.py ```python import os import argparse import subprocess from dtoolcore import DataSet, ProtoDataSet from dtoolutils import temp_working_dir, stage_outputs class PythonSmartTool(object): def __init__(self): parser = argparse.ArgumentParser() parser.add_argument( '-d', '--dataset', help='URI of input dataset' ) parser.add_argument( '-i', '--identifier', help='Identifier (hash) to process' ) parser.add_argument( '-o', '--output-dataset', help='URI of output dataset' ) args = parser.parse_args() self.input_dataset = DataSet.from_uri(args.dataset) self.output_dataset = ProtoDataSet.from_uri(args.output_dataset) self.identifier = args.identifier def run(self): input_path = self.input_dataset.item_content_abspath(self.identifier) with temp_working_dir() as tmpdir: command = ["python", "/scripts/analysis.py"] command += ["-i", input_path] command += ["-o", tmpdir] subprocess.call(command) outputs_with_metadata = [(o, {}) for o in self.outputs] stage_outputs( outputs_with_metadata, tmpdir, self.input_dataset, self.output_dataset, [], self.identifier ) def main(): tool = PythonSmartTool() # tool.run() # tool.container = "jicscicomp/seedcellsize" # tool.command_string = "python /scripts/analysis.py -i /input1 -o /output" tool.outputs = [ 'original.png', 'segmentation.png', 'labels.png', 'false_color.png', 'results.csv' ] tool.run() if __name__ == '__main__': main() ``` #### File: seed_cell_parameterisation/scripts/worker.py ```python import json import time import shlex import subprocess import click import redis def execute_task(task): command = ['python'] + shlex.split(task["tool_path"]) command += ['-d', task["input_uuid"]] command += ['-i', task["identifier"]] command += ['-o', task["output_uuid"]] return subprocess.call(command) @click.command() @click.option('--redis-host', envvar='REDIS_HOST') def main(redis_host): r = redis.StrictRedis(host=redis_host, port=6379) while True: task_identifier = r.brpoplpush('workqueue', 'inprogress') raw_task = r.hget('tasks', task_identifier) task = json.loads(raw_task) return_code = execute_task(task) if return_code is 0: r.lrem('inprogress', 1, task_identifier) r.lpush('completed', task_identifier) else: raise("BLEW UP HORRIBLY on {}".format(task["identifier"])) if __name__ == '__main__': main() ```
{ "source": "JIC-Image-Analysis/senescence-in-field", "score": 3 }
#### File: senescence-in-field/scripts/copy_overlay.py ```python import click import dtoolcore def ensure_uri(path_or_uri): if ':' in path_or_uri: return path_or_uri else: return "disk:{}".format(path_or_uri) @click.command() @click.argument('src_dataset_path') @click.argument('dst_dataset_path') @click.argument('overlay_name') def main(src_dataset_path, dst_dataset_path, overlay_name): src_uri = ensure_uri(src_dataset_path) dst_uri = ensure_uri(dst_dataset_path) src_dataset = dtoolcore.DataSet.from_uri(src_uri) dst_dataset = dtoolcore.DataSet.from_uri(dst_uri) src_overlay = src_dataset.get_overlay(overlay_name) dst_overlay = {} from_overlay = dst_dataset.get_overlay('from') for dst_id in dst_dataset.identifiers: src_id = from_overlay[dst_id] dst_overlay[dst_id] = src_overlay[src_id] dst_dataset.put_overlay(overlay_name, dst_overlay) if __name__ == '__main__': main() ``` #### File: senescence-in-field/scripts/create_pca_component_overlay.py ```python import csv from collections import defaultdict import dtoolcore import click import numpy as np def calc_pca_components(all_entries): rgb_matrix = np.transpose(np.array( [ map(float, [entry['R'], entry['G'], entry['B']]) for entry in all_entries ] )) cov = np.cov(rgb_matrix) evalues, evectors = np.linalg.eig(cov) return evectors.T def calc_senescence(entry, pca_rotation): c_R = pca_rotation[0] * float(entry['R']) c_G = pca_rotation[1] * float(entry['G']) c_B = pca_rotation[2] * float(entry['B']) return c_R + c_G + c_B def find_senescence_values_by_plot_and_date(results): pca_components = calc_pca_components(results) pca_component_2 = pca_components[1] by_plot_then_date = defaultdict(dict) for entry in results: senescence = calc_senescence(entry, pca_component_2) by_plot_then_date[entry['plot']][entry['date']] = senescence return by_plot_then_date def generate_pca_overlay(dataset, results): senescence_values = find_senescence_values_by_plot_and_date(results) plot_number_overlay = dataset.get_overlay('plot_number') ordering_overlay = dataset.get_overlay('ordering') date_overlay = dataset.get_overlay('date') pca_overlay = {} for identifier in dataset.identifiers: label = "{}_{}".format( plot_number_overlay[identifier], ordering_overlay[identifier] ) date = date_overlay[identifier] try: senescence = senescence_values[label][date] except KeyError: senescence = None pca_overlay[identifier] = senescence dataset.put_overlay('pca_component_2', pca_overlay) def load_output_csv_data(results_file): with open(results_file, 'r') as fh: reader = csv.DictReader(fh) all_entries = [row for row in reader] return all_entries @click.command() @click.argument('dataset_uri') @click.argument('results_csv_file') def main(dataset_uri, results_csv_file): dataset = dtoolcore.DataSet.from_uri(dataset_uri) results = load_output_csv_data(results_csv_file) generate_pca_overlay(dataset, results) if __name__ == '__main__': main() ``` #### File: senescence-in-field/scripts/entropy_segment.py ```python import numpy as np from skimage.morphology import disk from jicbioimage.core.transform import transformation from jicbioimage.transform import ( threshold_otsu, remove_small_objects, erode_binary, smooth_gaussian ) from jicbioimage.segment import connected_components, watershed_with_seeds from utils import ( red_channel, green_channel, blue_channel, abs_difference, fill_small_holes, ) @transformation def threshold_abs(image): return image > 20 @transformation def median_filter(image): from skimage.filters.rank import median return median(image, disk(10)) @transformation def local_entropy(image): smoothed = median_filter(image) diff = smoothed.astype(np.int16) - image.astype(np.int16) diff[np.where(diff < 0)] = 0 return diff def normalise_array(array): a_min = array.min() a_max = array.max() return (array - a_min) / (a_max - a_min) def force_to_uint8(array): normalised = normalise_array(array) scaled = normalised * 255 return scaled.astype(np.uint8) @transformation def sklocal(image): from skimage.filters.rank import entropy le = entropy(image, disk(5)) return force_to_uint8(le) @transformation def skmean(image): from skimage.filters.rank import mean mean_filtered = mean(image, disk(30)) print mean_filtered.min(), mean_filtered.max() return mean_filtered @transformation def segment(image): """Return field plots.""" red = red_channel(image) green = green_channel(image) image = sklocal(green) print image.min(), image.max() image = skmean(image) #entropy = local_entropy(green) #smoothed = median_filter(entropy) #image = difference(blue_green, red) #image = difference(green, red) mask = threshold_otsu(image) #mask = threshold_abs(image) mask = remove_small_objects(mask, min_size=1000) mask = fill_small_holes(mask, min_size=100) #seeds = erode_binary(mask, selem=disk(10)) seeds = erode_binary(mask, selem=disk(50)) seeds = remove_small_objects(seeds, min_size=100) seeds = connected_components(seeds, background=0) #return watershed_with_seeds(-image, seeds=seeds, mask=mask) return watershed_with_seeds(image, seeds=seeds, mask=mask) @transformation def filter_sides(segmentation): """Remove hedges on left and right hand side. Also remove anything from the edge of the hedge to the closest edge of the image. """ ydim, xdim = segmentation.shape mid_point = xdim // 2 for i in segmentation.identifiers: region = segmentation.region_by_identifier(i) if region.area > 200000: segmentation[region] = 0 y, x = [int(i) for i in region.centroid] if x < mid_point: # Left hand side of the hedge. xlim = np.min(region.index_arrays[1]) # Using the identifiers in the region rather # than masking the region itself avoids the # issue of ending up with small cutoff left # overs. ids = np.unique(segmentation[0:ydim, 0:xlim]) for i in ids: segmentation[segmentation == i] = 0 else: # Right hand side of the hedge. xlim = np.max(region.index_arrays[1]) ids = np.unique(segmentation[0:ydim, xlim:xdim]) for i in ids: segmentation[segmentation == i] = 0 return segmentation @transformation def filter_touching_border(segmentation): """Remove any plots touching top and bottom border of image.""" ydim, xdim = segmentation.shape for i in segmentation.identifiers: region = segmentation.region_by_identifier(i) ys = region.index_arrays[0] if np.min(ys) == 0: segmentation[region] = 0 if np.max(ys) == ydim - 1: segmentation[region] = 0 return segmentation @transformation def filter_by_size(plots): """Remove plots the size of which lies outside particular min and max plot sizes.""" #params = Parameters() identifiers = plots.identifiers # TODO - set relative to median? min_plot_size = 20000 max_plot_size = 120000 for identifier in identifiers: region = plots.region_by_identifier(identifier) size = region.area if (size < min_plot_size) or (size > max_plot_size): plots.remove_region(identifier) return plots ``` #### File: senescence-in-field/scripts/generate_image_series.py ```python from jicbioimage.core.image import Image import dtoolcore import click from translate_labels import rack_plot_to_image_plot from image_utils import join_horizontally, join_vertically def identifiers_where_match_is_true(dataset, match_function): return [i for i in dataset.identifiers if match_function(i)] def generate_image_series_for_plot(rack, plot): n_image, n_plot = rack_plot_to_image_plot(rack, plot) # n_image, n_plot = 55, 24 print "{}_{}".format(n_image, n_plot) dataset_uri = 'file:/Users/hartleym/data_intermediate/separate_plots' dataset = dtoolcore.DataSet.from_uri(dataset_uri) plot_number_overlay = dataset.get_overlay('plot_number') ordering_overlay = dataset.get_overlay('ordering') date_overlay = dataset.get_overlay('date') def is_match(i): try: ordering_as_int = int(ordering_overlay[i]) except TypeError: return False if ordering_as_int != n_image: return False if int(plot_number_overlay[i]) != n_plot: return False return True identifiers = identifiers_where_match_is_true(dataset, is_match) def sort_identifiers_by_date(identifiers): dates_and_identifiers = [(date_overlay[i], i) for i in identifiers] sorted_dates_and_identifiers = sorted(dates_and_identifiers) _, sorted_identifiers = zip(*sorted_dates_and_identifiers) return(sorted_identifiers) sorted_identifiers = sort_identifiers_by_date(identifiers) def identifiers_to_joined_image(identifiers): images = [] for identifier in identifiers: image_fpath = dataset.item_content_abspath(identifier) image = Image.from_file(image_fpath) images.append(image) return join_horizontally(images) result = identifiers_to_joined_image(sorted_identifiers) output_fname = 'example_from_tobin.png' with open(output_fname, 'wb') as fh: fh.write(result.png()) @click.command() def main(): # Early leaf senescence # generate_image_series_for_plot(3, 16) # generate_image_series_for_plot(7, 9) # generate_image_series_for_plot(9, 1) # Late leaf senescence generate_image_series_for_plot(7, 15) if __name__ == '__main__': main() ``` #### File: senescence-in-field/scripts/grid.py ```python import numpy as np def width(region): ys, xs = region.index_arrays w = np.max(xs) - np.min(xs) return w def mean_width(plots): regions = [plots.region_by_identifier(i) for i in plots.identifiers] return sum([width(r) for r in regions]) / float(len(regions)) def grid(plots): """Return list of list of regions. """ ydim, xdim = plots.shape w = mean_width(plots) columns = [] for i in plots.identifiers: r = plots.region_by_identifier(i) r.identifier = i if len(columns) == 0: c = Column(w, xdim) c.append(r) columns.append(c) continue included = False for c in columns: if c.in_column(r): c.append(r) included = True break if not included: c = Column(w, xdim) c.append(r) columns.append(c) # Sort the columns left to right. columns.sort(key=lambda i: i.x_mean) for c in columns: # Sort the rows top to bottom. c.sort(key=lambda i: i.centroid[0]) return columns class Column(list): def __init__(self, width, xdim): self.width = width / 2 self.xdim = xdim @property def x_mean(self): return sum(p.centroid[1] for p in self) / len(self) def in_column(self, region): x = region.centroid[1] lower = max(0, self.x_mean - self.width) upper = min(self.x_mean + self.width, self.xdim) if (x > lower) and (x < upper): return True return False ``` #### File: senescence-in-field/scripts/label_segmentation_plots.py ```python import os import argparse import numpy as np from jicbioimage.core.image import Image from jicbioimage.segment import SegmentedImage from jicbioimage.illustrate import AnnotatedImage from jicgeometry import Point2D from dtoolcore import DataSet from test_tagger_data import find_approx_plot_locs def load_segmentation_from_rgb_image(filename): rgb_image = Image.from_file(filename) ydim, xdim, _ = rgb_image.shape segmentation = np.zeros((ydim, xdim), dtype=np.uint32) segmentation += rgb_image[:, :, 2] segmentation += rgb_image[:, :, 1].astype(np.uint32) * 256 segmentation += rgb_image[:, :, 0].astype(np.uint32) * 256 * 256 return segmentation.view(SegmentedImage) def annotate_with_set_of_points(image, points): grayscale = np.mean(image, axis=2) annotated = AnnotatedImage.from_grayscale(grayscale) xdim, ydim, _ = annotated.shape def annotate_location(fractional_coords): xfrac, yfrac = fractional_coords ypos = int(ydim * xfrac) xpos = int(xdim * yfrac) for x in range(-2, 3): for y in range(-2, 3): annotated.draw_cross( (xpos+x, ypos+y), color=(255, 0, 0), radius=50 ) for loc in points: annotate_location(loc) return annotated def label_plots(dataset): identifier = "384b5421bc782259b218eaab39171d51462202fd" segmentation_file = "/output/DJI_0118-segmented.JPG" segmentation = load_segmentation_from_rgb_image(segmentation_file) approx_plot_locs = find_approx_plot_locs(dataset, identifier) xdim, ydim = segmentation.shape def image_coords_to_rel_coords(image, point): ydim, xdim = image.shape y_abs, x_abs = point x_rel = float(x_abs) / xdim y_rel = float(y_abs) / ydim return Point2D(x_rel, y_rel) centroids = [] for sid in segmentation.identifiers: c = segmentation.region_by_identifier(sid).centroid centroids.append(image_coords_to_rel_coords(segmentation, c)) loc_labels = {l: str(n) for n, l in enumerate(approx_plot_locs)} image = Image.from_file(dataset.abspath_from_identifier(identifier)) annotated = annotate_with_set_of_points(image, centroids) def rel_coords_to_image_coords(image, point): ydim, xdim = image.shape x_rel, y_rel = point return Point2D(int(y_rel * ydim), int(x_rel * xdim)) for l in approx_plot_locs: annotated.text_at( loc_labels[l], rel_coords_to_image_coords(segmentation, l), size=60, color=(0, 255, 0)) def closest_loc_label(p): dists = [(p.distance(l), l) for l in approx_plot_locs] dists.sort() return loc_labels[dists[0][1]] for c in centroids: label = closest_loc_label(c) annotated.text_at( label, rel_coords_to_image_coords(segmentation, c) + Point2D(20, 20), size=60, color=(0, 255, 255)) with open('/output/ann.png', 'wb') as f: f.write(annotated.png()) grayscale = np.mean(image, axis=2) annotated2 = AnnotatedImage.from_grayscale(grayscale) for sid in segmentation.identifiers: region = segmentation.region_by_identifier(sid) annotated2.mask_region(region.border.dilate(), [255, 255, 0]) def closest_loc_label(p): dists = [(p.distance(l), l) for l in approx_plot_locs] dists.sort() return loc_labels[dists[0][1]] for c in centroids: label = closest_loc_label(c) annotated2.text_at( label, rel_coords_to_image_coords(segmentation, c) - Point2D(30, 30), size=60, color=(0, 255, 255)) with open('/output/ann_plots.png', 'wb') as f: f.write(annotated2.png()) def main(): parser = argparse.ArgumentParser() parser.add_argument('dataset_path', help='Path to dataset') parser.add_argument('output_path', help='Output directory') args = parser.parse_args() dataset = DataSet.from_path(args.dataset_path) label_plots(dataset) # explore_dataset(dataset, args.output_path, n=1) if __name__ == '__main__': main() ```
{ "source": "JIC-Image-Analysis/stem_tissue_characterisatoin", "score": 3 }
#### File: stem_tissue_characterisatoin/scripts/annotate.py ```python from jicbioimage.illustrate import Canvas def get_normalised_rgb(cell_intensity, imin, imax): norm_area = ((cell_intensity - imin) / float(imax - imin)) * 255 norm_area = int(round(norm_area)) assert norm_area >= 0 assert norm_area < 256 return (255 - norm_area, 255 - norm_area, 255) def generate_annotation(segmentation): areas = [segmentation.region_by_identifier(i).area for i in segmentation.identifiers] imin = min(areas) imax = max(areas) ydim, xdim = segmentation.shape canvas = Canvas.blank_canvas(width=xdim, height=ydim) for i in segmentation.identifiers: region = segmentation.region_by_identifier(i) area = region.area color = get_normalised_rgb(area, imin, imax) canvas.mask_region(region.inner.inner, color) return canvas ```
{ "source": "JIC-Image-Analysis/stomata-characterisation", "score": 3 }
#### File: scripts/util/geometry.py ```python import math import numpy as np import cv2 from nose.tools import assert_almost_equal from jicimagelib.geometry import Point2D def ellipse_box(region): """Return the box representing the ellipse (center, bounds, angle).""" border = region.border border_points = np.array(border.points) transposed_points = np.array([(a, b) for (b, a) in border_points]) return cv2.fitEllipse(transposed_points) def angle2vector(angle): """Return the unit vector representation of the angle as x, y pair.""" radians = (math.pi / 180.0) * angle return Point2D( math.cos(radians), math.sin(radians) ) def line(center, angle, length): """Return the two points representing the line.""" center = Point2D(center) direction = angle2vector(angle) half_length = length/2 p1 = center - (direction * half_length) p2 = center + (direction * half_length) return p1, p2 # Tests... def test_angle2vector_0_degrees(): x, y = angle2vector(0) assert x == 1.0, "{} != 1.0".format(x) assert y == 0.0, "{} != 0.0".format(y) def test_angle2vector_90_degrees(): x, y = angle2vector(90) assert_almost_equal(x, 0.0) assert_almost_equal(y, 1.0) def test_angle2vector_180_degrees(): x, y = angle2vector(180) assert_almost_equal(x, -1.0) assert_almost_equal(y, 0.0) def test_angle2vector_270_degrees(): x, y = angle2vector(270) assert_almost_equal(x, 0.0) assert_almost_equal(y, -1.0) def test_angle2vector_360_degrees(): x, y = angle2vector(360) assert_almost_equal(x, 1.0) assert_almost_equal(y, 0.0) ```
{ "source": "jick155/Y.L_leetcode", "score": 4 }
#### File: Y.L_leetcode/code/7.py ```python class Solution: def reverse(self, x: int) -> int: if '-' in str(x): result = (- int(str(x)[::-1][:-1])) else: result = (int(str(x)[::-1])) if (result <= int(-2**31)) or (result >= int(2**31 -1)): result = 0 return result ''' #2 class Solution(object): def reverse(self, x): """ :type x: int :rtype: int """ arr = [] #set up an array to store values f = False #used like sign to tell whether the integer is negative or not if x<0: x=-x f=True while True: arr.append(x%10) #get the number of each poisition x=x/10 if x==0: break result = 0 for i in arr: #restor the reverse integer result = i+10*result if f: #restore the negative num. result *= -1 if (result <= int(-2**31)) or (result >= int(2**31 -1)): result = 0 return result ''' ```
{ "source": "jickieduan/python27", "score": 2 }
#### File: wx/lib/dialogs.py ```python import wx import wx.lib.layoutf as layoutf #---------------------------------------------------------------------- class ScrolledMessageDialog(wx.Dialog): def __init__(self, parent, msg, caption, pos=wx.DefaultPosition, size=(500,300), style=wx.DEFAULT_DIALOG_STYLE): wx.Dialog.__init__(self, parent, -1, caption, pos, size, style) x, y = pos if x == -1 and y == -1: self.CenterOnScreen(wx.BOTH) self.text = text = wx.TextCtrl(self, -1, msg, style=wx.TE_MULTILINE | wx.TE_READONLY) ok = wx.Button(self, wx.ID_OK, "OK") ok.SetDefault() lc = layoutf.Layoutf('t=t5#1;b=t5#2;l=l5#1;r=r5#1', (self,ok)) text.SetConstraints(lc) lc = layoutf.Layoutf('b=b5#1;x%w50#1;w!80;h*', (self,)) ok.SetConstraints(lc) self.SetAutoLayout(1) self.Layout() class MultipleChoiceDialog(wx.Dialog): def __init__(self, parent, msg, title, lst, pos = wx.DefaultPosition, size = (200,200), style = wx.DEFAULT_DIALOG_STYLE): wx.Dialog.__init__(self, parent, -1, title, pos, size, style) x, y = pos if x == -1 and y == -1: self.CenterOnScreen(wx.BOTH) stat = wx.StaticText(self, -1, msg) self.lbox = wx.ListBox(self, 100, wx.DefaultPosition, wx.DefaultSize, lst, wx.LB_MULTIPLE) ok = wx.Button(self, wx.ID_OK, "OK") ok.SetDefault() cancel = wx.Button(self, wx.ID_CANCEL, "Cancel") dlgsizer = wx.BoxSizer(wx.VERTICAL) dlgsizer.Add(stat, 0, wx.ALL, 4) dlgsizer.Add(self.lbox, 1, wx.EXPAND | wx.ALL, 4) btnsizer = wx.StdDialogButtonSizer() btnsizer.AddButton(ok) btnsizer.AddButton(cancel) btnsizer.Realize() dlgsizer.Add(btnsizer, 0, wx.ALL | wx.ALIGN_RIGHT, 4) self.SetSizer(dlgsizer) self.lst = lst self.Layout() def GetValue(self): return self.lbox.GetSelections() def GetValueString(self): sel = self.lbox.GetSelections() val = [ self.lst[i] for i in sel ] return tuple(val) #---------------------------------------------------------------------- """ function wrappers for wxPython system dialogs Author: <NAME> Date: 2003-1-2 Rev: 3 This is the third refactor of the PythonCard dialog.py module for inclusion in the main wxPython distribution. There are a number of design decisions and subsequent code refactoring to be done, so I'm releasing this just to get some feedback. rev 3: - result dictionary replaced by DialogResults class instance - should message arg be replaced with msg? most wxWindows dialogs seem to use the abbreviation? rev 2: - All dialog classes have been replaced by function wrappers - Changed arg lists to more closely match wxWindows docs and wxPython.lib.dialogs - changed 'returned' value to the actual button id the user clicked on - added a returnedString value for the string version of the return value - reworked colorDialog and fontDialog so you can pass in just a color or font for the most common usage case - probably need to use colour instead of color to match the English English spelling in wxWindows (sigh) - I still think we could lose the parent arg and just always use None """ class DialogResults: def __init__(self, returned): self.returned = returned self.accepted = returned in (wx.ID_OK, wx.ID_YES) self.returnedString = returnedString(returned) def __repr__(self): return str(self.__dict__) def returnedString(ret): if ret == wx.ID_OK: return "Ok" elif ret == wx.ID_CANCEL: return "Cancel" elif ret == wx.ID_YES: return "Yes" elif ret == wx.ID_NO: return "No" ## findDialog was created before wxPython got a Find/Replace dialog ## but it may be instructive as to how a function wrapper can ## be added for your own custom dialogs ## this dialog is always modal, while wxFindReplaceDialog is ## modeless and so doesn't lend itself to a function wrapper def findDialog(parent=None, searchText='', wholeWordsOnly=0, caseSensitive=0): dlg = wx.Dialog(parent, -1, "Find", wx.DefaultPosition, (380, 120)) wx.StaticText(dlg, -1, 'Find what:', (7, 10)) wSearchText = wx.TextCtrl(dlg, -1, searchText, (80, 7), (195, -1)) wSearchText.SetValue(searchText) wx.Button(dlg, wx.ID_OK, "Find Next", (285, 5), wx.DefaultSize).SetDefault() wx.Button(dlg, wx.ID_CANCEL, "Cancel", (285, 35), wx.DefaultSize) wWholeWord = wx.CheckBox(dlg, -1, 'Match whole word only', (7, 35), wx.DefaultSize, wx.NO_BORDER) if wholeWordsOnly: wWholeWord.SetValue(1) wCase = wx.CheckBox(dlg, -1, 'Match case', (7, 55), wx.DefaultSize, wx.NO_BORDER) if caseSensitive: wCase.SetValue(1) wSearchText.SetSelection(0, len(wSearchText.GetValue())) wSearchText.SetFocus() result = DialogResults(dlg.ShowModal()) result.searchText = wSearchText.GetValue() result.wholeWordsOnly = wWholeWord.GetValue() result.caseSensitive = wCase.GetValue() dlg.Destroy() return result def colorDialog(parent=None, colorData=None, color=None): if colorData: dialog = wx.ColourDialog(parent, colorData) else: dialog = wx.ColourDialog(parent) dialog.GetColourData().SetChooseFull(1) if color is not None: dialog.GetColourData().SetColour(color) result = DialogResults(dialog.ShowModal()) result.colorData = dialog.GetColourData() result.color = result.colorData.GetColour().Get() dialog.Destroy() return result ## it is easier to just duplicate the code than ## try and replace color with colour in the result def colourDialog(parent=None, colourData=None, colour=None): if colourData: dialog = wx.ColourDialog(parent, colourData) else: dialog = wx.ColourDialog(parent) dialog.GetColourData().SetChooseFull(1) if colour is not None: dialog.GetColourData().SetColour(color) result = DialogResults(dialog.ShowModal()) result.colourData = dialog.GetColourData() result.colour = result.colourData.GetColour().Get() dialog.Destroy() return result def fontDialog(parent=None, fontData=None, font=None): if fontData is None: fontData = wx.FontData() fontData.SetColour(wx.BLACK) fontData.SetInitialFont(wx.SystemSettings.GetFont(wx.SYS_DEFAULT_GUI_FONT)) if font is not None: fontData.SetInitialFont(font) dialog = wx.FontDialog(parent, fontData) result = DialogResults(dialog.ShowModal()) if result.accepted: fontData = dialog.GetFontData() result.fontData = fontData result.color = fontData.GetColour().Get() result.colour = result.color result.font = fontData.GetChosenFont() else: result.color = None result.colour = None result.font = None dialog.Destroy() return result def textEntryDialog(parent=None, message='', title='', defaultText='', style=wx.OK | wx.CANCEL): dialog = wx.TextEntryDialog(parent, message, title, defaultText, style) result = DialogResults(dialog.ShowModal()) result.text = dialog.GetValue() dialog.Destroy() return result def messageDialog(parent=None, message='', title='Message box', aStyle = wx.OK | wx.CANCEL | wx.CENTRE, pos=wx.DefaultPosition): dialog = wx.MessageDialog(parent, message, title, aStyle, pos) result = DialogResults(dialog.ShowModal()) dialog.Destroy() return result ## KEA: alerts are common, so I'm providing a class rather than ## requiring the user code to set up the right icons and buttons ## the with messageDialog function def alertDialog(parent=None, message='', title='Alert', pos=wx.DefaultPosition): return messageDialog(parent, message, title, wx.ICON_EXCLAMATION | wx.OK, pos) def scrolledMessageDialog(parent=None, message='', title='', pos=wx.DefaultPosition, size=(500,300)): dialog = ScrolledMessageDialog(parent, message, title, pos, size) result = DialogResults(dialog.ShowModal()) dialog.Destroy() return result def fileDialog(parent=None, title='Open', directory='', filename='', wildcard='*.*', style=wx.OPEN | wx.MULTIPLE): dialog = wx.FileDialog(parent, title, directory, filename, wildcard, style) result = DialogResults(dialog.ShowModal()) if result.accepted: result.paths = dialog.GetPaths() else: result.paths = None dialog.Destroy() return result ## openFileDialog and saveFileDialog are convenience functions ## they represent the most common usages of the fileDialog ## with the most common style options def openFileDialog(parent=None, title='Open', directory='', filename='', wildcard='All Files (*.*)|*.*', style=wx.OPEN | wx.MULTIPLE): return fileDialog(parent, title, directory, filename, wildcard, style) def saveFileDialog(parent=None, title='Save', directory='', filename='', wildcard='All Files (*.*)|*.*', style=wx.SAVE | wx.OVERWRITE_PROMPT): return fileDialog(parent, title, directory, filename, wildcard, style) def dirDialog(parent=None, message='Choose a directory', path='', style=0, pos=wx.DefaultPosition, size=wx.DefaultSize): dialog = wx.DirDialog(parent, message, path, style, pos, size) result = DialogResults(dialog.ShowModal()) if result.accepted: result.path = dialog.GetPath() else: result.path = None dialog.Destroy() return result directoryDialog = dirDialog def singleChoiceDialog(parent=None, message='', title='', lst=[], style=wx.OK | wx.CANCEL | wx.CENTRE): dialog = wx.SingleChoiceDialog(parent, message, title, list(lst), style | wx.DEFAULT_DIALOG_STYLE) result = DialogResults(dialog.ShowModal()) result.selection = dialog.GetStringSelection() dialog.Destroy() return result def multipleChoiceDialog(parent=None, message='', title='', lst=[], pos=wx.DefaultPosition, size=wx.DefaultSize): dialog = wx.MultiChoiceDialog(parent, message, title, lst, wx.CHOICEDLG_STYLE, pos) result = DialogResults(dialog.ShowModal()) result.selection = tuple([lst[i] for i in dialog.GetSelections()]) dialog.Destroy() return result #--------------------------------------------------------------------------- try: wx.CANCEL_DEFAULT wx.OK_DEFAULT except AttributeError: wx.CANCEL_DEFAULT = 0 wx.OK_DEFAULT = 0 class MultiMessageDialog(wx.Dialog): """ A dialog like wx.MessageDialog, but with an optional 2nd message string that is shown in a scrolled window, and also allows passing in the icon to be shown instead of the stock error, question, etc. icons. The btnLabels can be used if you'd like to change the stock labels on the buttons, it's a dictionary mapping stock IDs to label strings. """ CONTENT_MAX_W = 550 CONTENT_MAX_H = 350 def __init__(self, parent, message, caption = "Message Box", msg2="", style = wx.OK | wx.CANCEL, pos = wx.DefaultPosition, icon=None, btnLabels=None): if 'wxMac' not in wx.PlatformInfo: title = caption # the caption will be displayed inside the dialog on Macs else: title = "" wx.Dialog.__init__(self, parent, -1, title, pos, style = wx.DEFAULT_DIALOG_STYLE | style & (wx.STAY_ON_TOP | wx.DIALOG_NO_PARENT)) bitmap = None isize = (32,32) # was an icon passed to us? if icon is not None: if isinstance(icon, wx.Icon): bitmap = wx.BitmapFromIcon(icon) elif isinstance(icon, wx.Image): bitmap = wx.BitmapFromImage(icon) else: assert isinstance(icon, wx.Bitmap) bitmap = icon else: # check for icons in the style flags artid = None if style & wx.ICON_ERROR or style & wx.ICON_HAND: artid = wx.ART_ERROR elif style & wx.ICON_EXCLAMATION: artid = wx.ART_WARNING elif style & wx.ICON_QUESTION: artid = wx.ART_QUESTION elif style & wx.ICON_INFORMATION: artid = wx.ART_INFORMATION if artid is not None: bitmap = wx.ArtProvider.GetBitmap(artid, wx.ART_MESSAGE_BOX, isize) if bitmap: bitmap = wx.StaticBitmap(self, -1, bitmap) else: bitmap = isize # will be a spacer when added to the sizer # Sizer to contain the icon, text area and buttons sizer = wx.BoxSizer(wx.HORIZONTAL) sizer.Add(bitmap, 0, wx.TOP|wx.LEFT, 12) sizer.Add((10,10)) # Make the text area messageSizer = wx.BoxSizer(wx.VERTICAL) if 'wxMac' in wx.PlatformInfo and caption: caption = wx.StaticText(self, -1, caption) caption.SetFont(wx.Font(18, wx.SWISS, wx.NORMAL, wx.BOLD)) messageSizer.Add(caption) messageSizer.Add((10,10)) stext = wx.StaticText(self, -1, message) #stext.SetLabelMarkup(message) Wrap() causes all markup to be lost, so don't try to use it yet... stext.Wrap(self.CONTENT_MAX_W) messageSizer.Add(stext) if msg2: messageSizer.Add((15,15)) t = wx.TextCtrl(self, style=wx.TE_MULTILINE|wx.TE_READONLY|wx.TE_RICH|wx.TE_DONTWRAP) t.SetValue(msg2) # Set size to be used by the sizer based on the message content, # with good maximums dc = wx.ClientDC(t) dc.SetFont(t.GetFont()) w,h,lh = dc.GetMultiLineTextExtent(msg2) w = min(self.CONTENT_MAX_W, 10 + w + wx.SystemSettings.GetMetric(wx.SYS_VSCROLL_X)) h = min(self.CONTENT_MAX_H, 10 + h) t.SetMinSize((w,h)) messageSizer.Add(t, 0, wx.EXPAND) # Make the buttons buttonSizer = self.CreateStdDialogButtonSizer( style & (wx.OK | wx.CANCEL | wx.YES_NO | wx.NO_DEFAULT | wx.CANCEL_DEFAULT | wx.YES_DEFAULT | wx.OK_DEFAULT )) self.Bind(wx.EVT_BUTTON, self.OnButton) if btnLabels: for sid, label in btnLabels.iteritems(): btn = self.FindWindowById(sid) if btn: btn.SetLabel(label) messageSizer.Add(wx.Size(1, 15)) messageSizer.Add(buttonSizer, 0, wx.LEFT | wx.RIGHT | wx.BOTTOM | wx.EXPAND, 12) sizer.Add(messageSizer, 0, wx.LEFT | wx.RIGHT | wx.TOP, 12) self.SetSizer(sizer) self.Fit() if parent: self.CenterOnParent() else: self.CenterOnScreen() for c in self.Children: if isinstance(c, wx.Button): wx.CallAfter(c.SetFocus) break def OnButton(self, evt): if self.IsModal(): self.EndModal(evt.EventObject.Id) else: self.Close() def MultiMessageBox(message, caption, msg2="", style=wx.OK, parent=None, icon=None, btnLabels=None): """ A function like wx.MessageBox which uses MultiMessageDialog. """ #if not style & wx.ICON_NONE and not style & wx.ICON_MASK: if not style & wx.ICON_MASK: if style & wx.YES: style |= wx.ICON_QUESTION else: style |= wx.ICON_INFORMATION dlg = MultiMessageDialog(parent, message, caption, msg2, style, icon=icon, btnLabels=btnLabels) ans = dlg.ShowModal() dlg.Destroy() if ans == wx.ID_OK: return wx.OK elif ans == wx.ID_YES: return wx.YES elif ans == wx.ID_NO: return wx.NO elif ans == wx.ID_CANCEL: return wx.CANCEL print "unexpected return code from MultiMessageDialog??" return wx.CANCEL #--------------------------------------------------------------------------- ``` #### File: wx/lib/docview.py ```python import os import os.path import shutil import wx import sys _ = wx.GetTranslation #---------------------------------------------------------------------- # docview globals #---------------------------------------------------------------------- DOC_SDI = 1 DOC_MDI = 2 DOC_NEW = 4 DOC_SILENT = 8 DOC_OPEN_ONCE = 16 DOC_NO_VIEW = 32 DEFAULT_DOCMAN_FLAGS = DOC_SDI & DOC_OPEN_ONCE TEMPLATE_VISIBLE = 1 TEMPLATE_INVISIBLE = 2 TEMPLATE_NO_CREATE = (4 | TEMPLATE_VISIBLE) DEFAULT_TEMPLATE_FLAGS = TEMPLATE_VISIBLE MAX_FILE_HISTORY = 9 #---------------------------------------------------------------------- # Convenience functions from wxWindows used in docview #---------------------------------------------------------------------- def FileNameFromPath(path): """ Returns the filename for a full path. """ return os.path.split(path)[1] def FindExtension(path): """ Returns the extension of a filename for a full path. """ return os.path.splitext(path)[1].lower() def FileExists(path): """ Returns True if the path exists. """ return os.path.isfile(path) def PathOnly(path): """ Returns the path of a full path without the filename. """ return os.path.split(path)[0] #---------------------------------------------------------------------- # Document/View Classes #---------------------------------------------------------------------- class Document(wx.EvtHandler): """ The document class can be used to model an application's file-based data. It is part of the document/view framework supported by wxWindows, and cooperates with the wxView, wxDocTemplate and wxDocManager classes. Note this wxPython version also keeps track of the modification date of the document and if it changes on disk outside of the application, we will warn the user before saving to avoid clobbering the file. """ def __init__(self, parent=None): """ Constructor. Define your own default constructor to initialize application-specific data. """ wx.EvtHandler.__init__(self) self._documentParent = parent self._documentTemplate = None self._commandProcessor = None self._savedYet = False self._writeable = True self._documentTitle = None self._documentFile = None self._documentTypeName = None self._documentModified = False self._documentModificationDate = None self._documentViews = [] def ProcessEvent(self, event): """ Processes an event, searching event tables and calling zero or more suitable event handler function(s). Note that the ProcessEvent method is called from the wxPython docview framework directly since wxPython does not have a virtual ProcessEvent function. """ return False def GetFilename(self): """ Gets the filename associated with this document, or "" if none is associated. """ return self._documentFile def GetTitle(self): """ Gets the title for this document. The document title is used for an associated frame (if any), and is usually constructed by the framework from the filename. """ return self._documentTitle def SetTitle(self, title): """ Sets the title for this document. The document title is used for an associated frame (if any), and is usually constructed by the framework from the filename. """ self._documentTitle = title def GetDocumentName(self): """ The document type name given to the wxDocTemplate constructor, copied to this document when the document is created. If several document templates are created that use the same document type, this variable is used in wxDocManager::CreateView to collate a list of alternative view types that can be used on this kind of document. """ return self._documentTypeName def SetDocumentName(self, name): """ Sets he document type name given to the wxDocTemplate constructor, copied to this document when the document is created. If several document templates are created that use the same document type, this variable is used in wxDocManager::CreateView to collate a list of alternative view types that can be used on this kind of document. Do not change the value of this variable. """ self._documentTypeName = name def GetDocumentSaved(self): """ Returns True if the document has been saved. This method has been added to wxPython and is not in wxWindows. """ return self._savedYet def SetDocumentSaved(self, saved=True): """ Sets whether the document has been saved. This method has been added to wxPython and is not in wxWindows. """ self._savedYet = saved def GetCommandProcessor(self): """ Returns the command processor associated with this document. """ return self._commandProcessor def SetCommandProcessor(self, processor): """ Sets the command processor to be used for this document. The document will then be responsible for its deletion. Normally you should not call this; override OnCreateCommandProcessor instead. """ self._commandProcessor = processor def IsModified(self): """ Returns true if the document has been modified since the last save, false otherwise. You may need to override this if your document view maintains its own record of being modified (for example if using wxTextWindow to view and edit the document). """ return self._documentModified def Modify(self, modify): """ Call with true to mark the document as modified since the last save, false otherwise. You may need to override this if your document view maintains its own record of being modified (for example if using xTextWindow to view and edit the document). This method has been extended to notify its views that the dirty flag has changed. """ self._documentModified = modify self.UpdateAllViews(hint=("modify", self, self._documentModified)) def SetDocumentModificationDate(self): """ Saves the file's last modification date. This is used to check if the file has been modified outside of the application. This method has been added to wxPython and is not in wxWindows. """ self._documentModificationDate = os.path.getmtime(self.GetFilename()) def GetDocumentModificationDate(self): """ Returns the file's modification date when it was loaded from disk. This is used to check if the file has been modified outside of the application. This method has been added to wxPython and is not in wxWindows. """ return self._documentModificationDate def IsDocumentModificationDateCorrect(self): """ Returns False if the file has been modified outside of the application. This method has been added to wxPython and is not in wxWindows. """ if not os.path.exists(self.GetFilename()): # document must be in memory only and can't be out of date return True return self._documentModificationDate == os.path.getmtime(self.GetFilename()) def GetViews(self): """ Returns the list whose elements are the views on the document. """ return self._documentViews def GetDocumentTemplate(self): """ Returns the template that created the document. """ return self._documentTemplate def SetDocumentTemplate(self, template): """ Sets the template that created the document. Should only be called by the framework. """ self._documentTemplate = template def DeleteContents(self): """ Deletes the contents of the document. Override this method as necessary. """ return True def Destroy(self): """ Destructor. Removes itself from the document manager. """ self.DeleteContents() self._documentModificationDate = None if self.GetDocumentManager(): self.GetDocumentManager().RemoveDocument(self) wx.EvtHandler.Destroy(self) def Close(self): """ Closes the document, by calling OnSaveModified and then (if this true) OnCloseDocument. This does not normally delete the document object: use DeleteAllViews to do this implicitly. """ if self.OnSaveModified(): if self.OnCloseDocument(): return True else: return False else: return False def OnCloseDocument(self): """ The default implementation calls DeleteContents (an empty implementation) sets the modified flag to false. Override this to supply additional behaviour when the document is closed with Close. """ self.NotifyClosing() self.DeleteContents() self.Modify(False) return True def DeleteAllViews(self): """ Calls wxView.Close and deletes each view. Deleting the final view will implicitly delete the document itself, because the wxView destructor calls RemoveView. This in turns calls wxDocument::OnChangedViewList, whose default implemention is to save and delete the document if no views exist. """ manager = self.GetDocumentManager() for view in self._documentViews: if not view.Close(): return False if self in manager.GetDocuments(): self.Destroy() return True def GetFirstView(self): """ A convenience function to get the first view for a document, because in many cases a document will only have a single view. """ if len(self._documentViews) == 0: return None return self._documentViews[0] def GetDocumentManager(self): """ Returns the associated document manager. """ if self._documentTemplate: return self._documentTemplate.GetDocumentManager() return None def OnNewDocument(self): """ The default implementation calls OnSaveModified and DeleteContents, makes a default title for the document, and notifies the views that the filename (in fact, the title) has changed. """ if not self.OnSaveModified() or not self.OnCloseDocument(): return False self.DeleteContents() self.Modify(False) self.SetDocumentSaved(False) name = self.GetDocumentManager().MakeDefaultName() self.SetTitle(name) self.SetFilename(name, notifyViews = True) def Save(self): """ Saves the document by calling OnSaveDocument if there is an associated filename, or SaveAs if there is no filename. """ if not self.IsModified(): # and self._savedYet: This was here, but if it is not modified who cares if it hasn't been saved yet? return True """ check for file modification outside of application """ if not self.IsDocumentModificationDateCorrect(): msgTitle = wx.GetApp().GetAppName() if not msgTitle: msgTitle = _("Application") res = wx.MessageBox(_("'%s' has been modified outside of %s. Overwrite '%s' with current changes?") % (self.GetPrintableName(), msgTitle, self.GetPrintableName()), msgTitle, wx.YES_NO | wx.CANCEL | wx.ICON_QUESTION, self.GetDocumentWindow()) if res == wx.NO: return True elif res == wx.YES: pass else: # elif res == wx.CANCEL: return False if not self._documentFile or not self._savedYet: return self.SaveAs() return self.OnSaveDocument(self._documentFile) def SaveAs(self): """ Prompts the user for a file to save to, and then calls OnSaveDocument. """ docTemplate = self.GetDocumentTemplate() if not docTemplate: return False descr = docTemplate.GetDescription() + _(" (") + docTemplate.GetFileFilter() + _(") |") + docTemplate.GetFileFilter() # spacing is important, make sure there is no space after the "|", it causes a bug on wx_gtk filename = wx.FileSelector(_("Save As"), docTemplate.GetDirectory(), FileNameFromPath(self.GetFilename()), docTemplate.GetDefaultExtension(), wildcard = descr, flags = wx.SAVE | wx.OVERWRITE_PROMPT, parent = self.GetDocumentWindow()) if filename == "": return False name, ext = os.path.splitext(filename) if ext == "": filename += '.' + docTemplate.GetDefaultExtension() self.SetFilename(filename) self.SetTitle(FileNameFromPath(filename)) for view in self._documentViews: view.OnChangeFilename() if not self.OnSaveDocument(filename): return False if docTemplate.FileMatchesTemplate(filename): self.GetDocumentManager().AddFileToHistory(filename) return True def OnSaveDocument(self, filename): """ Constructs an output file for the given filename (which must not be empty), and calls SaveObject. If SaveObject returns true, the document is set to unmodified; otherwise, an error message box is displayed. """ if not filename: return False msgTitle = wx.GetApp().GetAppName() if not msgTitle: msgTitle = _("File Error") backupFilename = None fileObject = None copied = False try: # if current file exists, move it to a safe place temporarily if os.path.exists(filename): # Check if read-only. if not os.access(filename, os.W_OK): wx.MessageBox("Could not save '%s'. No write permission to overwrite existing file." % FileNameFromPath(filename), msgTitle, wx.OK | wx.ICON_EXCLAMATION, self.GetDocumentWindow()) return False i = 1 backupFilename = "%s.bak%s" % (filename, i) while os.path.exists(backupFilename): i += 1 backupFilename = "%s.bak%s" % (filename, i) shutil.copy(filename, backupFilename) copied = True fileObject = file(filename, 'w') self.SaveObject(fileObject) fileObject.close() fileObject = None if backupFilename: os.remove(backupFilename) except: # for debugging purposes import traceback traceback.print_exc() if fileObject: fileObject.close() # file is still open, close it, need to do this before removal # save failed, remove copied file if backupFilename and copied: os.remove(backupFilename) wx.MessageBox("Could not save '%s'. %s" % (FileNameFromPath(filename), sys.exc_value), msgTitle, wx.OK | wx.ICON_EXCLAMATION, self.GetDocumentWindow()) return False self.SetDocumentModificationDate() self.SetFilename(filename, True) self.Modify(False) self.SetDocumentSaved(True) #if wx.Platform == '__WXMAC__': # Not yet implemented in wxPython # wx.FileName(file).MacSetDefaultTypeAndCreator() return True def OnOpenDocument(self, filename): """ Constructs an input file for the given filename (which must not be empty), and calls LoadObject. If LoadObject returns true, the document is set to unmodified; otherwise, an error message box is displayed. The document's views are notified that the filename has changed, to give windows an opportunity to update their titles. All of the document's views are then updated. """ if not self.OnSaveModified(): return False msgTitle = wx.GetApp().GetAppName() if not msgTitle: msgTitle = _("File Error") fileObject = file(filename, 'r') try: self.LoadObject(fileObject) fileObject.close() fileObject = None except: # for debugging purposes import traceback traceback.print_exc() if fileObject: fileObject.close() # file is still open, close it wx.MessageBox("Could not open '%s'. %s" % (FileNameFromPath(filename), sys.exc_value), msgTitle, wx.OK | wx.ICON_EXCLAMATION, self.GetDocumentWindow()) return False self.SetDocumentModificationDate() self.SetFilename(filename, True) self.Modify(False) self.SetDocumentSaved(True) self.UpdateAllViews() return True def LoadObject(self, file): """ Override this function and call it from your own LoadObject before loading your own data. LoadObject is called by the framework automatically when the document contents need to be loaded. Note that the wxPython version simply sends you a Python file object, so you can use pickle. """ return True def SaveObject(self, file): """ Override this function and call it from your own SaveObject before saving your own data. SaveObject is called by the framework automatically when the document contents need to be saved. Note that the wxPython version simply sends you a Python file object, so you can use pickle. """ return True def Revert(self): """ Override this function to revert the document to its last saved state. """ return False def GetPrintableName(self): """ Copies a suitable document name into the supplied name buffer. The default function uses the title, or if there is no title, uses the filename; or if no filename, the string 'Untitled'. """ if self._documentTitle: return self._documentTitle elif self._documentFile: return FileNameFromPath(self._documentFile) else: return _("Untitled") def GetDocumentWindow(self): """ Intended to return a suitable window for using as a parent for document-related dialog boxes. By default, uses the frame associated with the first view. """ if len(self._documentViews) > 0: return self._documentViews[0].GetFrame() else: return wx.GetApp().GetTopWindow() def OnCreateCommandProcessor(self): """ Override this function if you want a different (or no) command processor to be created when the document is created. By default, it returns an instance of wxCommandProcessor. """ return CommandProcessor() def OnSaveModified(self): """ If the document has been modified, prompts the user to ask if the changes should be changed. If the user replies Yes, the Save function is called. If No, the document is marked as unmodified and the function succeeds. If Cancel, the function fails. """ if not self.IsModified(): return True """ check for file modification outside of application """ if not self.IsDocumentModificationDateCorrect(): msgTitle = wx.GetApp().GetAppName() if not msgTitle: msgTitle = _("Warning") res = wx.MessageBox(_("'%s' has been modified outside of %s. Overwrite '%s' with current changes?") % (self.GetPrintableName(), msgTitle, self.GetPrintableName()), msgTitle, wx.YES_NO | wx.CANCEL | wx.ICON_QUESTION, self.GetDocumentWindow()) if res == wx.NO: self.Modify(False) return True elif res == wx.YES: return wx.lib.docview.Document.Save(self) else: # elif res == wx.CANCEL: return False msgTitle = wx.GetApp().GetAppName() if not msgTitle: msgTitle = _("Warning") res = wx.MessageBox(_("Save changes to '%s'?") % self.GetPrintableName(), msgTitle, wx.YES_NO | wx.CANCEL | wx.ICON_QUESTION, self.GetDocumentWindow()) if res == wx.NO: self.Modify(False) return True elif res == wx.YES: return self.Save() else: # elif res == wx.CANCEL: return False def Draw(context): """ Called by printing framework to draw the view. """ return True def AddView(self, view): """ If the view is not already in the list of views, adds the view and calls OnChangedViewList. """ if not view in self._documentViews: self._documentViews.append(view) self.OnChangedViewList() return True def RemoveView(self, view): """ Removes the view from the document's list of views, and calls OnChangedViewList. """ if view in self._documentViews: self._documentViews.remove(view) self.OnChangedViewList() return True def OnCreate(self, path, flags): """ The default implementation calls DeleteContents (an empty implementation) sets the modified flag to false. Override this to supply additional behaviour when the document is opened with Open. """ if flags & DOC_NO_VIEW: return True return self.GetDocumentTemplate().CreateView(self, flags) def OnChangedViewList(self): """ Called when a view is added to or deleted from this document. The default implementation saves and deletes the document if no views exist (the last one has just been removed). """ if len(self._documentViews) == 0: if self.OnSaveModified(): pass # C version does a delete but Python will garbage collect def UpdateAllViews(self, sender = None, hint = None): """ Updates all views. If sender is non-NULL, does not update this view. hint represents optional information to allow a view to optimize its update. """ for view in self._documentViews: if view != sender: view.OnUpdate(sender, hint) def NotifyClosing(self): """ Notifies the views that the document is going to close. """ for view in self._documentViews: view.OnClosingDocument() def SetFilename(self, filename, notifyViews = False): """ Sets the filename for this document. Usually called by the framework. If notifyViews is true, wxView.OnChangeFilename is called for all views. """ self._documentFile = filename if notifyViews: for view in self._documentViews: view.OnChangeFilename() def GetWriteable(self): """ Returns true if the document can be written to its accociated file path. This method has been added to wxPython and is not in wxWindows. """ if not self._writeable: return False if not self._documentFile: # Doesn't exist, do a save as return True else: return os.access(self._documentFile, os.W_OK) def SetWriteable(self, writeable): """ Set to False if the document can not be saved. This will disable the ID_SAVE_AS event and is useful for custom documents that should not be saveable. The ID_SAVE event can be disabled by never Modifying the document. This method has been added to wxPython and is not in wxWindows. """ self._writeable = writeable class View(wx.EvtHandler): """ The view class can be used to model the viewing and editing component of an application's file-based data. It is part of the document/view framework supported by wxWindows, and cooperates with the wxDocument, wxDocTemplate and wxDocManager classes. """ def __init__(self): """ Constructor. Define your own default constructor to initialize application-specific data. """ wx.EvtHandler.__init__(self) self._viewDocument = None self._viewFrame = None def Destroy(self): """ Destructor. Removes itself from the document's list of views. """ if self._viewDocument: self._viewDocument.RemoveView(self) wx.EvtHandler.Destroy(self) def ProcessEvent(self, event): """ Processes an event, searching event tables and calling zero or more suitable event handler function(s). Note that the ProcessEvent method is called from the wxPython docview framework directly since wxPython does not have a virtual ProcessEvent function. """ if not self.GetDocument() or not self.GetDocument().ProcessEvent(event): return False else: return True def ProcessUpdateUIEvent(self, event): """ Processes a UI event, searching event tables and calling zero or more suitable event handler function(s). Note that the ProcessEvent method is called from the wxPython docview framework directly since wxPython does not have a virtual ProcessEvent function. """ return False def OnActivateView(self, activate, activeView, deactiveView): """ Called when a view is activated by means of wxView::Activate. The default implementation does nothing. """ pass def OnClosingDocument(self): """ Override this to clean up the view when the document is being closed. The default implementation does nothing. """ pass def OnDraw(self, dc): """ Override this to draw the view for the printing framework. The default implementation does nothing. """ pass def OnPrint(self, dc, info): """ Override this to print the view for the printing framework. The default implementation calls View.OnDraw. """ self.OnDraw(dc) def OnUpdate(self, sender, hint): """ Called when the view should be updated. sender is a pointer to the view that sent the update request, or NULL if no single view requested the update (for instance, when the document is opened). hint is as yet unused but may in future contain application-specific information for making updating more efficient. """ if hint: if hint[0] == "modify": # if dirty flag changed, update the view's displayed title frame = self.GetFrame() if frame and hasattr(frame, "OnTitleIsModified"): frame.OnTitleIsModified() return True return False def OnChangeFilename(self): """ Called when the filename has changed. The default implementation constructs a suitable title and sets the title of the view frame (if any). """ if self.GetFrame(): appName = wx.GetApp().GetAppName() if not self.GetDocument(): if appName: title = appName else: return else: if appName and isinstance(self.GetFrame(), DocChildFrame): # Only need app name in title for SDI title = appName + _(" - ") else: title = '' self.GetFrame().SetTitle(title + self.GetDocument().GetPrintableName()) def GetDocument(self): """ Returns the document associated with the view. """ return self._viewDocument def SetDocument(self, doc): """ Associates the given document with the view. Normally called by the framework. """ self._viewDocument = doc if doc: doc.AddView(self) def GetViewName(self): """ Gets the name associated with the view (passed to the wxDocTemplate constructor). Not currently used by the framework. """ return self._viewTypeName def SetViewName(self, name): """ Sets the view type name. Should only be called by the framework. """ self._viewTypeName = name def Close(self, deleteWindow=True): """ Closes the view by calling OnClose. If deleteWindow is true, this function should delete the window associated with the view. """ if self.OnClose(deleteWindow = deleteWindow): return True else: return False def Activate(self, activate=True): """ Call this from your view frame's OnActivate member to tell the framework which view is currently active. If your windowing system doesn't call OnActivate, you may need to call this function from OnMenuCommand or any place where you know the view must be active, and the framework will need to get the current view. The prepackaged view frame wxDocChildFrame calls wxView.Activate from its OnActivate member and from its OnMenuCommand member. """ if self.GetDocument() and self.GetDocumentManager(): self.OnActivateView(activate, self, self.GetDocumentManager().GetCurrentView()) self.GetDocumentManager().ActivateView(self, activate) def OnClose(self, deleteWindow=True): """ Implements closing behaviour. The default implementation calls wxDocument.Close to close the associated document. Does not delete the view. The application may wish to do some cleaning up operations in this function, if a call to wxDocument::Close succeeded. For example, if your application's all share the same window, you need to disassociate the window from the view and perhaps clear the window. If deleteWindow is true, delete the frame associated with the view. """ if self.GetDocument(): return self.GetDocument().Close() else: return True def OnCreate(self, doc, flags): """ wxDocManager or wxDocument creates a wxView via a wxDocTemplate. Just after the wxDocTemplate creates the wxView, it calls wxView::OnCreate. In its OnCreate member function, the wxView can create a wxDocChildFrame or a derived class. This wxDocChildFrame provides user interface elements to view and/or edit the contents of the wxDocument. By default, simply returns true. If the function returns false, the view will be deleted. """ return True def OnCreatePrintout(self): """ Returns a wxPrintout object for the purposes of printing. It should create a new object every time it is called; the framework will delete objects it creates. By default, this function returns an instance of wxDocPrintout, which prints and previews one page by calling wxView.OnDraw. Override to return an instance of a class other than wxDocPrintout. """ return DocPrintout(self, self.GetDocument().GetPrintableName()) def GetFrame(self): """ Gets the frame associated with the view (if any). Note that this "frame" is not a wxFrame at all in the generic MDI implementation which uses the notebook pages instead of the frames and this is why this method returns a wxWindow and not a wxFrame. """ return self._viewFrame def SetFrame(self, frame): """ Sets the frame associated with this view. The application should call this if possible, to tell the view about the frame. See GetFrame for the explanation about the mismatch between the "Frame" in the method name and the type of its parameter. """ self._viewFrame = frame def GetDocumentManager(self): """ Returns the document manager instance associated with this view. """ if self._viewDocument: return self.GetDocument().GetDocumentManager() else: return None class DocTemplate(wx.Object): """ The wxDocTemplate class is used to model the relationship between a document class and a view class. """ def __init__(self, manager, description, filter, dir, ext, docTypeName, viewTypeName, docType, viewType, flags=DEFAULT_TEMPLATE_FLAGS, icon=None): """ Constructor. Create instances dynamically near the start of your application after creating a wxDocManager instance, and before doing any document or view operations. manager is the document manager object which manages this template. description is a short description of what the template is for. This string will be displayed in the file filter list of Windows file selectors. filter is an appropriate file filter such as \*.txt. dir is the default directory to use for file selectors. ext is the default file extension (such as txt). docTypeName is a name that should be unique for a given type of document, used for gathering a list of views relevant to a particular document. viewTypeName is a name that should be unique for a given view. docClass is a Python class. If this is not supplied, you will need to derive a new wxDocTemplate class and override the CreateDocument member to return a new document instance on demand. viewClass is a Python class. If this is not supplied, you will need to derive a new wxDocTemplate class and override the CreateView member to return a new view instance on demand. flags is a bit list of the following: wx.TEMPLATE_VISIBLE The template may be displayed to the user in dialogs. wx.TEMPLATE_INVISIBLE The template may not be displayed to the user in dialogs. wx.DEFAULT_TEMPLATE_FLAGS Defined as wxTEMPLATE_VISIBLE. """ self._docManager = manager self._description = description self._fileFilter = filter self._directory = dir self._defaultExt = ext self._docTypeName = docTypeName self._viewTypeName = viewTypeName self._docType = docType self._viewType = viewType self._flags = flags self._icon = icon self._docManager.AssociateTemplate(self) def GetDefaultExtension(self): """ Returns the default file extension for the document data, as passed to the document template constructor. """ return self._defaultExt def SetDefaultExtension(self, defaultExt): """ Sets the default file extension. """ self._defaultExt = defaultExt def GetDescription(self): """ Returns the text description of this template, as passed to the document template constructor. """ return self._description def SetDescription(self, description): """ Sets the template description. """ self._description = description def GetDirectory(self): """ Returns the default directory, as passed to the document template constructor. """ return self._directory def SetDirectory(self, dir): """ Sets the default directory. """ self._directory = dir def GetDocumentManager(self): """ Returns the document manager instance for which this template was created. """ return self._docManager def SetDocumentManager(self, manager): """ Sets the document manager instance for which this template was created. Should not be called by the application. """ self._docManager = manager def GetFileFilter(self): """ Returns the file filter, as passed to the document template constructor. """ return self._fileFilter def SetFileFilter(self, filter): """ Sets the file filter. """ self._fileFilter = filter def GetFlags(self): """ Returns the flags, as passed to the document template constructor. (see the constructor description for more details). """ return self._flags def SetFlags(self, flags): """ Sets the internal document template flags (see the constructor description for more details). """ self._flags = flags def GetIcon(self): """ Returns the icon, as passed to the document template constructor. This method has been added to wxPython and is not in wxWindows. """ return self._icon def SetIcon(self, flags): """ Sets the icon. This method has been added to wxPython and is not in wxWindows. """ self._icon = icon def GetDocumentType(self): """ Returns the Python document class, as passed to the document template constructor. """ return self._docType def GetViewType(self): """ Returns the Python view class, as passed to the document template constructor. """ return self._viewType def IsVisible(self): """ Returns true if the document template can be shown in user dialogs, false otherwise. """ return (self._flags & TEMPLATE_VISIBLE) == TEMPLATE_VISIBLE def IsNewable(self): """ Returns true if the document template can be shown in "New" dialogs, false otherwise. This method has been added to wxPython and is not in wxWindows. """ return (self._flags & TEMPLATE_NO_CREATE) != TEMPLATE_NO_CREATE def GetDocumentName(self): """ Returns the document type name, as passed to the document template constructor. """ return self._docTypeName def GetViewName(self): """ Returns the view type name, as passed to the document template constructor. """ return self._viewTypeName def CreateDocument(self, path, flags): """ Creates a new instance of the associated document class. If you have not supplied a class to the template constructor, you will need to override this function to return an appropriate document instance. """ doc = self._docType() doc.SetFilename(path) doc.SetDocumentTemplate(self) self.GetDocumentManager().AddDocument(doc) doc.SetCommandProcessor(doc.OnCreateCommandProcessor()) if doc.OnCreate(path, flags): return doc else: if doc in self.GetDocumentManager().GetDocuments(): doc.DeleteAllViews() return None def CreateView(self, doc, flags): """ Creates a new instance of the associated document view. If you have not supplied a class to the template constructor, you will need to override this function to return an appropriate view instance. """ view = self._viewType() view.SetDocument(doc) if view.OnCreate(doc, flags): return view else: view.Destroy() return None def FileMatchesTemplate(self, path): """ Returns True if the path's extension matches one of this template's file filter extensions. """ ext = FindExtension(path) if not ext: return False extList = self.GetFileFilter().replace('*','').split(';') return ext in extList class DocManager(wx.EvtHandler): """ The wxDocManager class is part of the document/view framework supported by wxWindows, and cooperates with the wxView, wxDocument and wxDocTemplate classes. """ def __init__(self, flags=DEFAULT_DOCMAN_FLAGS, initialize=True): """ Constructor. Create a document manager instance dynamically near the start of your application before doing any document or view operations. flags is used in the Python version to indicate whether the document manager is in DOC_SDI or DOC_MDI mode. If initialize is true, the Initialize function will be called to create a default history list object. If you derive from wxDocManager, you may wish to call the base constructor with false, and then call Initialize in your own constructor, to allow your own Initialize or OnCreateFileHistory functions to be called. """ wx.EvtHandler.__init__(self) self._defaultDocumentNameCounter = 1 self._flags = flags self._currentView = None self._lastActiveView = None self._maxDocsOpen = 10000 self._fileHistory = None self._templates = [] self._docs = [] self._lastDirectory = "" if initialize: self.Initialize() wx.EVT_MENU(self, wx.ID_OPEN, self.OnFileOpen) wx.EVT_MENU(self, wx.ID_CLOSE, self.OnFileClose) wx.EVT_MENU(self, wx.ID_CLOSE_ALL, self.OnFileCloseAll) wx.EVT_MENU(self, wx.ID_REVERT, self.OnFileRevert) wx.EVT_MENU(self, wx.ID_NEW, self.OnFileNew) wx.EVT_MENU(self, wx.ID_SAVE, self.OnFileSave) wx.EVT_MENU(self, wx.ID_SAVEAS, self.OnFileSaveAs) wx.EVT_MENU(self, wx.ID_UNDO, self.OnUndo) wx.EVT_MENU(self, wx.ID_REDO, self.OnRedo) wx.EVT_MENU(self, wx.ID_PRINT, self.OnPrint) wx.EVT_MENU(self, wx.ID_PRINT_SETUP, self.OnPrintSetup) wx.EVT_MENU(self, wx.ID_PREVIEW, self.OnPreview) wx.EVT_UPDATE_UI(self, wx.ID_OPEN, self.OnUpdateFileOpen) wx.EVT_UPDATE_UI(self, wx.ID_CLOSE, self.OnUpdateFileClose) wx.EVT_UPDATE_UI(self, wx.ID_CLOSE_ALL, self.OnUpdateFileCloseAll) wx.EVT_UPDATE_UI(self, wx.ID_REVERT, self.OnUpdateFileRevert) wx.EVT_UPDATE_UI(self, wx.ID_NEW, self.OnUpdateFileNew) wx.EVT_UPDATE_UI(self, wx.ID_SAVE, self.OnUpdateFileSave) wx.EVT_UPDATE_UI(self, wx.ID_SAVEAS, self.OnUpdateFileSaveAs) wx.EVT_UPDATE_UI(self, wx.ID_UNDO, self.OnUpdateUndo) wx.EVT_UPDATE_UI(self, wx.ID_REDO, self.OnUpdateRedo) wx.EVT_UPDATE_UI(self, wx.ID_PRINT, self.OnUpdatePrint) wx.EVT_UPDATE_UI(self, wx.ID_PRINT_SETUP, self.OnUpdatePrintSetup) wx.EVT_UPDATE_UI(self, wx.ID_PREVIEW, self.OnUpdatePreview) def Destroy(self): """ Destructor. """ self.Clear() wx.EvtHandler.Destroy(self) def GetFlags(self): """ Returns the document manager's flags. This method has been added to wxPython and is not in wxWindows. """ return self._flags def CloseDocument(self, doc, force=True): """ Closes the specified document. """ if doc.Close() or force: doc.DeleteAllViews() if doc in self._docs: doc.Destroy() return True return False def CloseDocuments(self, force=True): """ Closes all currently opened documents. """ for document in self._docs[::-1]: # Close in lifo (reverse) order. We clone the list to make sure we go through all docs even as they are deleted if not self.CloseDocument(document, force): return False if document: document.DeleteAllViews() # Implicitly delete the document when the last view is removed return True def Clear(self, force=True): """ Closes all currently opened document by callling CloseDocuments and clears the document manager's templates. """ if not self.CloseDocuments(force): return False self._templates = [] return True def Initialize(self): """ Initializes data; currently just calls OnCreateFileHistory. Some data cannot always be initialized in the constructor because the programmer must be given the opportunity to override functionality. In fact Initialize is called from the wxDocManager constructor, but this can be vetoed by passing false to the second argument, allowing the derived class's constructor to call Initialize, possibly calling a different OnCreateFileHistory from the default. The bottom line: if you're not deriving from Initialize, forget it and construct wxDocManager with no arguments. """ self.OnCreateFileHistory() return True def OnCreateFileHistory(self): """ A hook to allow a derived class to create a different type of file history. Called from Initialize. """ self._fileHistory = wx.FileHistory() def OnFileClose(self, event): """ Closes and deletes the currently active document. """ doc = self.GetCurrentDocument() if doc: doc.DeleteAllViews() if doc in self._docs: self._docs.remove(doc) def OnFileCloseAll(self, event): """ Closes and deletes all the currently opened documents. """ return self.CloseDocuments(force = False) def OnFileNew(self, event): """ Creates a new document and reads in the selected file. """ self.CreateDocument('', DOC_NEW) def OnFileOpen(self, event): """ Creates a new document and reads in the selected file. """ if not self.CreateDocument('', DEFAULT_DOCMAN_FLAGS): self.OnOpenFileFailure() def OnFileRevert(self, event): """ Reverts the current document by calling wxDocument.Save for the current document. """ doc = self.GetCurrentDocument() if not doc: return doc.Revert() def OnFileSave(self, event): """ Saves the current document by calling wxDocument.Save for the current document. """ doc = self.GetCurrentDocument() if not doc: return doc.Save() def OnFileSaveAs(self, event): """ Calls wxDocument.SaveAs for the current document. """ doc = self.GetCurrentDocument() if not doc: return doc.SaveAs() def OnPrint(self, event): """ Prints the current document by calling its View's OnCreatePrintout method. """ view = self.GetCurrentView() if not view: return printout = view.OnCreatePrintout() if printout: if not hasattr(self, "printData"): self.printData = wx.PrintData() self.printData.SetPaperId(wx.PAPER_LETTER) self.printData.SetPrintMode(wx.PRINT_MODE_PRINTER) pdd = wx.PrintDialogData(self.printData) printer = wx.Printer(pdd) printer.Print(view.GetFrame(), printout) def OnPrintSetup(self, event): """ Presents the print setup dialog. """ view = self.GetCurrentView() if view: parentWin = view.GetFrame() else: parentWin = wx.GetApp().GetTopWindow() if not hasattr(self, "printData"): self.printData = wx.PrintData() self.printData.SetPaperId(wx.PAPER_LETTER) data = wx.PrintDialogData(self.printData) printDialog = wx.PrintDialog(parentWin, data) printDialog.GetPrintDialogData().SetSetupDialog(True) printDialog.ShowModal() # this makes a copy of the wx.PrintData instead of just saving # a reference to the one inside the PrintDialogData that will # be destroyed when the dialog is destroyed self.printData = wx.PrintData(printDialog.GetPrintDialogData().GetPrintData()) printDialog.Destroy() def OnPreview(self, event): """ Previews the current document by calling its View's OnCreatePrintout method. """ view = self.GetCurrentView() if not view: return printout = view.OnCreatePrintout() if printout: if not hasattr(self, "printData"): self.printData = wx.PrintData() self.printData.SetPaperId(wx.PAPER_LETTER) self.printData.SetPrintMode(wx.PRINT_MODE_PREVIEW) data = wx.PrintDialogData(self.printData) # Pass two printout objects: for preview, and possible printing. preview = wx.PrintPreview(printout, view.OnCreatePrintout(), data) if not preview.Ok(): wx.MessageBox(_("Unable to display print preview.")) return # wxWindows source doesn't use base frame's pos, size, and icon, but did it this way so it would work like MS Office etc. mimicFrame = wx.GetApp().GetTopWindow() frame = wx.PreviewFrame(preview, mimicFrame, _("Print Preview"), mimicFrame.GetPosition(), mimicFrame.GetSize()) frame.SetIcon(mimicFrame.GetIcon()) frame.SetTitle(_("%s - %s - Preview") % (mimicFrame.GetTitle(), view.GetDocument().GetPrintableName())) frame.Initialize() frame.Show(True) def OnUndo(self, event): """ Issues an Undo command to the current document's command processor. """ doc = self.GetCurrentDocument() if not doc: return if doc.GetCommandProcessor(): doc.GetCommandProcessor().Undo() def OnRedo(self, event): """ Issues a Redo command to the current document's command processor. """ doc = self.GetCurrentDocument() if not doc: return if doc.GetCommandProcessor(): doc.GetCommandProcessor().Redo() def OnUpdateFileOpen(self, event): """ Updates the user interface for the File Open command. """ event.Enable(True) def OnUpdateFileClose(self, event): """ Updates the user interface for the File Close command. """ event.Enable(self.GetCurrentDocument() != None) def OnUpdateFileCloseAll(self, event): """ Updates the user interface for the File Close All command. """ event.Enable(self.GetCurrentDocument() != None) def OnUpdateFileRevert(self, event): """ Updates the user interface for the File Revert command. """ event.Enable(self.GetCurrentDocument() != None) def OnUpdateFileNew(self, event): """ Updates the user interface for the File New command. """ return True def OnUpdateFileSave(self, event): """ Updates the user interface for the File Save command. """ doc = self.GetCurrentDocument() event.Enable(doc != None and doc.IsModified()) def OnUpdateFileSaveAs(self, event): """ Updates the user interface for the File Save As command. """ event.Enable(self.GetCurrentDocument() != None and self.GetCurrentDocument().GetWriteable()) def OnUpdateUndo(self, event): """ Updates the user interface for the Undo command. """ doc = self.GetCurrentDocument() event.Enable(doc != None and doc.GetCommandProcessor() != None and doc.GetCommandProcessor().CanUndo()) if doc and doc.GetCommandProcessor(): doc.GetCommandProcessor().SetMenuStrings() else: event.SetText(_("&Undo\tCtrl+Z")) def OnUpdateRedo(self, event): """ Updates the user interface for the Redo command. """ doc = self.GetCurrentDocument() event.Enable(doc != None and doc.GetCommandProcessor() != None and doc.GetCommandProcessor().CanRedo()) if doc and doc.GetCommandProcessor(): doc.GetCommandProcessor().SetMenuStrings() else: event.SetText(_("&Redo\tCtrl+Y")) def OnUpdatePrint(self, event): """ Updates the user interface for the Print command. """ event.Enable(self.GetCurrentDocument() != None) def OnUpdatePrintSetup(self, event): """ Updates the user interface for the Print Setup command. """ return True def OnUpdatePreview(self, event): """ Updates the user interface for the Print Preview command. """ event.Enable(self.GetCurrentDocument() != None) def GetCurrentView(self): """ Returns the currently active view. """ if self._currentView: return self._currentView if len(self._docs) == 1: return self._docs[0].GetFirstView() return None def GetLastActiveView(self): """ Returns the last active view. This is used in the SDI framework where dialogs can be mistaken for a view and causes the framework to deactivete the current view. This happens when something like a custom dialog box used to operate on the current view is shown. """ if len(self._docs) >= 1: return self._lastActiveView else: return None def ProcessEvent(self, event): """ Processes an event, searching event tables and calling zero or more suitable event handler function(s). Note that the ProcessEvent method is called from the wxPython docview framework directly since wxPython does not have a virtual ProcessEvent function. """ view = self.GetCurrentView() if view: if view.ProcessEvent(event): return True id = event.GetId() if id == wx.ID_OPEN: self.OnFileOpen(event) return True elif id == wx.ID_CLOSE: self.OnFileClose(event) return True elif id == wx.ID_CLOSE_ALL: self.OnFileCloseAll(event) return True elif id == wx.ID_REVERT: self.OnFileRevert(event) return True elif id == wx.ID_NEW: self.OnFileNew(event) return True elif id == wx.ID_SAVE: self.OnFileSave(event) return True elif id == wx.ID_SAVEAS: self.OnFileSaveAs(event) return True elif id == wx.ID_UNDO: self.OnUndo(event) return True elif id == wx.ID_REDO: self.OnRedo(event) return True elif id == wx.ID_PRINT: self.OnPrint(event) return True elif id == wx.ID_PRINT_SETUP: self.OnPrintSetup(event) return True elif id == wx.ID_PREVIEW: self.OnPreview(event) return True else: return False def ProcessUpdateUIEvent(self, event): """ Processes a UI event, searching event tables and calling zero or more suitable event handler function(s). Note that the ProcessEvent method is called from the wxPython docview framework directly since wxPython does not have a virtual ProcessEvent function. """ id = event.GetId() view = self.GetCurrentView() if view: if view.ProcessUpdateUIEvent(event): return True if id == wx.ID_OPEN: self.OnUpdateFileOpen(event) return True elif id == wx.ID_CLOSE: self.OnUpdateFileClose(event) return True elif id == wx.ID_CLOSE_ALL: self.OnUpdateFileCloseAll(event) return True elif id == wx.ID_REVERT: self.OnUpdateFileRevert(event) return True elif id == wx.ID_NEW: self.OnUpdateFileNew(event) return True elif id == wx.ID_SAVE: self.OnUpdateFileSave(event) return True elif id == wx.ID_SAVEAS: self.OnUpdateFileSaveAs(event) return True elif id == wx.ID_UNDO: self.OnUpdateUndo(event) return True elif id == wx.ID_REDO: self.OnUpdateRedo(event) return True elif id == wx.ID_PRINT: self.OnUpdatePrint(event) return True elif id == wx.ID_PRINT_SETUP: self.OnUpdatePrintSetup(event) return True elif id == wx.ID_PREVIEW: self.OnUpdatePreview(event) return True else: return False def CreateDocument(self, path, flags=0): """ Creates a new document in a manner determined by the flags parameter, which can be: wx.lib.docview.DOC_NEW Creates a fresh document. wx.lib.docview.DOC_SILENT Silently loads the given document file. If wx.lib.docview.DOC_NEW is present, a new document will be created and returned, possibly after asking the user for a template to use if there is more than one document template. If wx.lib.docview.DOC_SILENT is present, a new document will be created and the given file loaded into it. If neither of these flags is present, the user will be presented with a file selector for the file to load, and the template to use will be determined by the extension (Windows) or by popping up a template choice list (other platforms). If the maximum number of documents has been reached, this function will delete the oldest currently loaded document before creating a new one. wxPython version supports the document manager's wx.lib.docview.DOC_OPEN_ONCE and wx.lib.docview.DOC_NO_VIEW flag. if wx.lib.docview.DOC_OPEN_ONCE is present, trying to open the same file multiple times will just return the same document. if wx.lib.docview.DOC_NO_VIEW is present, opening a file will generate the document, but not generate a corresponding view. """ templates = [] for temp in self._templates: if temp.IsVisible(): templates.append(temp) if len(templates) == 0: return None if len(self.GetDocuments()) >= self._maxDocsOpen: doc = self.GetDocuments()[0] if not self.CloseDocument(doc, False): return None if flags & DOC_NEW: for temp in templates[:]: if not temp.IsNewable(): templates.remove(temp) if len(templates) == 1: temp = templates[0] else: temp = self.SelectDocumentType(templates) if temp: newDoc = temp.CreateDocument(path, flags) if newDoc: newDoc.SetDocumentName(temp.GetDocumentName()) newDoc.SetDocumentTemplate(temp) newDoc.OnNewDocument() return newDoc else: return None if path and flags & DOC_SILENT: temp = self.FindTemplateForPath(path) else: temp, path = self.SelectDocumentPath(templates, path, flags) # Existing document if path and self.GetFlags() & DOC_OPEN_ONCE: for document in self._docs: if document.GetFilename() and os.path.normcase(document.GetFilename()) == os.path.normcase(path): """ check for file modification outside of application """ if not document.IsDocumentModificationDateCorrect(): msgTitle = wx.GetApp().GetAppName() if not msgTitle: msgTitle = _("Warning") shortName = document.GetPrintableName() res = wx.MessageBox(_("'%s' has been modified outside of %s. Reload '%s' from file system?") % (shortName, msgTitle, shortName), msgTitle, wx.YES_NO | wx.ICON_QUESTION, self.FindSuitableParent()) if res == wx.YES: if not self.CloseDocument(document, False): wx.MessageBox(_("Couldn't reload '%s'. Unable to close current '%s'.") % (shortName, shortName)) return None return self.CreateDocument(path, flags) elif res == wx.NO: # don't ask again document.SetDocumentModificationDate() firstView = document.GetFirstView() if not firstView and not (flags & DOC_NO_VIEW): document.GetDocumentTemplate().CreateView(document, flags) document.UpdateAllViews() firstView = document.GetFirstView() if firstView and firstView.GetFrame() and not (flags & DOC_NO_VIEW): firstView.GetFrame().SetFocus() # Not in wxWindows code but useful nonetheless if hasattr(firstView.GetFrame(), "IsIconized") and firstView.GetFrame().IsIconized(): # Not in wxWindows code but useful nonetheless firstView.GetFrame().Iconize(False) return None if temp: newDoc = temp.CreateDocument(path, flags) if newDoc: newDoc.SetDocumentName(temp.GetDocumentName()) newDoc.SetDocumentTemplate(temp) if not newDoc.OnOpenDocument(path): frame = newDoc.GetFirstView().GetFrame() newDoc.DeleteAllViews() # Implicitly deleted by DeleteAllViews if frame: frame.Destroy() # DeleteAllViews doesn't get rid of the frame, so we'll explicitly destroy it. return None self.AddFileToHistory(path) return newDoc return None def CreateView(self, doc, flags=0): """ Creates a new view for the given document. If more than one view is allowed for the document (by virtue of multiple templates mentioning the same document type), a choice of view is presented to the user. """ templates = [] for temp in self._templates: if temp.IsVisible(): if temp.GetDocumentName() == doc.GetDocumentName(): templates.append(temp) if len(templates) == 0: return None if len(templates) == 1: temp = templates[0] view = temp.CreateView(doc, flags) if view: view.SetViewName(temp.GetViewName()) return view temp = SelectViewType(templates) if temp: view = temp.CreateView(doc, flags) if view: view.SetViewName(temp.GetViewName()) return view else: return None def DeleteTemplate(self, template, flags): """ Placeholder, not yet implemented in wxWindows. """ pass def FlushDoc(self, doc): """ Placeholder, not yet implemented in wxWindows. """ return False def MatchTemplate(self, path): """ Placeholder, not yet implemented in wxWindows. """ return None def GetCurrentDocument(self): """ Returns the document associated with the currently active view (if any). """ view = self.GetCurrentView() if view: return view.GetDocument() else: return None def MakeDefaultName(self): """ Returns a suitable default name. This is implemented by appending an integer counter to the string "Untitled" and incrementing the counter. """ name = _("Untitled %d") % self._defaultDocumentNameCounter self._defaultDocumentNameCounter = self._defaultDocumentNameCounter + 1 return name def MakeFrameTitle(self): """ Returns a suitable title for a document frame. This is implemented by appending the document name to the application name. """ appName = wx.GetApp().GetAppName() if not doc: title = appName else: docName = doc.GetPrintableName() title = docName + _(" - ") + appName return title def AddFileToHistory(self, fileName): """ Adds a file to the file history list, if we have a pointer to an appropriate file menu. """ if self._fileHistory: self._fileHistory.AddFileToHistory(fileName) def RemoveFileFromHistory(self, i): """ Removes a file from the file history list, if we have a pointer to an appropriate file menu. """ if self._fileHistory: self._fileHistory.RemoveFileFromHistory(i) def GetFileHistory(self): """ Returns the file history. """ return self._fileHistory def GetHistoryFile(self, i): """ Returns the file at index i from the file history. """ if self._fileHistory: return self._fileHistory.GetHistoryFile(i) else: return None def FileHistoryUseMenu(self, menu): """ Use this menu for appending recently-visited document filenames, for convenient access. Calling this function with a valid menu enables the history list functionality. Note that you can add multiple menus using this function, to be managed by the file history object. """ if self._fileHistory: self._fileHistory.UseMenu(menu) def FileHistoryRemoveMenu(self, menu): """ Removes the given menu from the list of menus managed by the file history object. """ if self._fileHistory: self._fileHistory.RemoveMenu(menu) def FileHistoryLoad(self, config): """ Loads the file history from a config object. """ if self._fileHistory: self._fileHistory.Load(config) def FileHistorySave(self, config): """ Saves the file history into a config object. This must be called explicitly by the application. """ if self._fileHistory: self._fileHistory.Save(config) def FileHistoryAddFilesToMenu(self, menu=None): """ Appends the files in the history list, to all menus managed by the file history object. If menu is specified, appends the files in the history list to the given menu only. """ if self._fileHistory: if menu: self._fileHistory.AddFilesToThisMenu(menu) else: self._fileHistory.AddFilesToMenu() def GetHistoryFilesCount(self): """ Returns the number of files currently stored in the file history. """ if self._fileHistory: return self._fileHistory.GetNoHistoryFiles() else: return 0 def FindTemplateForPath(self, path): """ Given a path, try to find template that matches the extension. This is only an approximate method of finding a template for creating a document. Note this wxPython verson looks for and returns a default template if no specific template is found. """ default = None for temp in self._templates: if temp.FileMatchesTemplate(path): return temp if "*.*" in temp.GetFileFilter(): default = temp return default def FindSuitableParent(self): """ Returns a parent frame or dialog, either the frame with the current focus or if there is no current focus the application's top frame. """ parent = wx.GetApp().GetTopWindow() focusWindow = wx.Window_FindFocus() if focusWindow: while focusWindow and not isinstance(focusWindow, wx.Dialog) and not isinstance(focusWindow, wx.Frame): focusWindow = focusWindow.GetParent() if focusWindow: parent = focusWindow return parent def SelectDocumentPath(self, templates, flags, save): """ Under Windows, pops up a file selector with a list of filters corresponding to document templates. The wxDocTemplate corresponding to the selected file's extension is returned. On other platforms, if there is more than one document template a choice list is popped up, followed by a file selector. This function is used in wxDocManager.CreateDocument. """ if wx.Platform == "__WXMSW__" or wx.Platform == "__WXGTK__" or wx.Platform == "__WXMAC__": descr = '' for temp in templates: if temp.IsVisible(): if len(descr) > 0: descr = descr + _('|') descr = descr + temp.GetDescription() + _(" (") + temp.GetFileFilter() + _(") |") + temp.GetFileFilter() # spacing is important, make sure there is no space after the "|", it causes a bug on wx_gtk descr = _("All|*.*|%s") % descr # spacing is important, make sure there is no space after the "|", it causes a bug on wx_gtk else: descr = _("*.*") dlg = wx.FileDialog(self.FindSuitableParent(), _("Select a File"), wildcard=descr, style=wx.OPEN|wx.FILE_MUST_EXIST|wx.CHANGE_DIR) # dlg.CenterOnParent() # wxBug: caused crash with wx.FileDialog if dlg.ShowModal() == wx.ID_OK: path = dlg.GetPath() else: path = None dlg.Destroy() if path: theTemplate = self.FindTemplateForPath(path) return (theTemplate, path) return (None, None) def OnOpenFileFailure(self): """ Called when there is an error opening a file. """ pass def SelectDocumentType(self, temps, sort=False): """ Returns a document template by asking the user (if there is more than one template). This function is used in wxDocManager.CreateDocument. Parameters templates - list of templates from which to choose a desired template. sort - If more than one template is passed in in templates, then this parameter indicates whether the list of templates that the user will have to choose from is sorted or not when shown the choice box dialog. Default is false. """ templates = [] for temp in temps: if temp.IsVisible(): want = True for temp2 in templates: if temp.GetDocumentName() == temp2.GetDocumentName() and temp.GetViewName() == temp2.GetViewName(): want = False break if want: templates.append(temp) if len(templates) == 0: return None elif len(templates) == 1: return templates[0] if sort: def tempcmp(a, b): return cmp(a.GetDescription(), b.GetDescription()) templates.sort(tempcmp) strings = [] for temp in templates: strings.append(temp.GetDescription()) res = wx.GetSingleChoiceIndex(_("Select a document type:"), _("Documents"), strings, self.FindSuitableParent()) if res == -1: return None return templates[res] def SelectViewType(self, temps, sort=False): """ Returns a document template by asking the user (if there is more than one template), displaying a list of valid views. This function is used in wxDocManager::CreateView. The dialog normally will not appear because the array of templates only contains those relevant to the document in question, and often there will only be one such. """ templates = [] strings = [] for temp in temps: if temp.IsVisible() and temp.GetViewTypeName(): if temp.GetViewName() not in strings: templates.append(temp) strings.append(temp.GetViewTypeName()) if len(templates) == 0: return None elif len(templates) == 1: return templates[0] if sort: def tempcmp(a, b): return cmp(a.GetViewTypeName(), b.GetViewTypeName()) templates.sort(tempcmp) res = wx.GetSingleChoiceIndex(_("Select a document view:"), _("Views"), strings, self.FindSuitableParent()) if res == -1: return None return templates[res] def GetTemplates(self): """ Returns the document manager's template list. This method has been added to wxPython and is not in wxWindows. """ return self._templates def AssociateTemplate(self, docTemplate): """ Adds the template to the document manager's template list. """ if docTemplate not in self._templates: self._templates.append(docTemplate) def DisassociateTemplate(self, docTemplate): """ Removes the template from the list of templates. """ self._templates.remove(docTemplate) def AddDocument(self, document): """ Adds the document to the list of documents. """ if document not in self._docs: self._docs.append(document) def RemoveDocument(self, doc): """ Removes the document from the list of documents. """ if doc in self._docs: self._docs.remove(doc) def ActivateView(self, view, activate=True, deleting=False): """ Sets the current view. """ if activate: self._currentView = view self._lastActiveView = view else: self._currentView = None def GetMaxDocsOpen(self): """ Returns the number of documents that can be open simultaneously. """ return self._maxDocsOpen def SetMaxDocsOpen(self, maxDocsOpen): """ Sets the maximum number of documents that can be open at a time. By default, this is 10,000. If you set it to 1, existing documents will be saved and deleted when the user tries to open or create a new one (similar to the behaviour of Windows Write, for example). Allowing multiple documents gives behaviour more akin to MS Word and other Multiple Document Interface applications. """ self._maxDocsOpen = maxDocsOpen def GetDocuments(self): """ Returns the list of documents. """ return self._docs class DocParentFrame(wx.Frame): """ The wxDocParentFrame class provides a default top-level frame for applications using the document/view framework. This class can only be used for SDI (not MDI) parent frames. It cooperates with the wxView, wxDocument, wxDocManager and wxDocTemplates classes. """ def __init__(self, manager, frame, id, title, pos=wx.DefaultPosition, size=wx.DefaultSize, style=wx.DEFAULT_FRAME_STYLE, name="frame"): """ Constructor. Note that the event table must be rebuilt for the frame since the EvtHandler is not virtual. """ wx.Frame.__init__(self, frame, id, title, pos, size, style) self._docManager = manager wx.EVT_CLOSE(self, self.OnCloseWindow) wx.EVT_MENU(self, wx.ID_EXIT, self.OnExit) wx.EVT_MENU_RANGE(self, wx.ID_FILE1, wx.ID_FILE9, self.OnMRUFile) wx.EVT_MENU(self, wx.ID_NEW, self.ProcessEvent) wx.EVT_MENU(self, wx.ID_OPEN, self.ProcessEvent) wx.EVT_MENU(self, wx.ID_CLOSE_ALL, self.ProcessEvent) wx.EVT_MENU(self, wx.ID_CLOSE, self.ProcessEvent) wx.EVT_MENU(self, wx.ID_REVERT, self.ProcessEvent) wx.EVT_MENU(self, wx.ID_SAVE, self.ProcessEvent) wx.EVT_MENU(self, wx.ID_SAVEAS, self.ProcessEvent) wx.EVT_MENU(self, wx.ID_UNDO, self.ProcessEvent) wx.EVT_MENU(self, wx.ID_REDO, self.ProcessEvent) wx.EVT_MENU(self, wx.ID_PRINT, self.ProcessEvent) wx.EVT_MENU(self, wx.ID_PRINT_SETUP, self.ProcessEvent) wx.EVT_MENU(self, wx.ID_PREVIEW, self.ProcessEvent) wx.EVT_UPDATE_UI(self, wx.ID_NEW, self.ProcessUpdateUIEvent) wx.EVT_UPDATE_UI(self, wx.ID_OPEN, self.ProcessUpdateUIEvent) wx.EVT_UPDATE_UI(self, wx.ID_CLOSE_ALL, self.ProcessUpdateUIEvent) wx.EVT_UPDATE_UI(self, wx.ID_CLOSE, self.ProcessUpdateUIEvent) wx.EVT_UPDATE_UI(self, wx.ID_REVERT, self.ProcessUpdateUIEvent) wx.EVT_UPDATE_UI(self, wx.ID_SAVE, self.ProcessUpdateUIEvent) wx.EVT_UPDATE_UI(self, wx.ID_SAVEAS, self.ProcessUpdateUIEvent) wx.EVT_UPDATE_UI(self, wx.ID_UNDO, self.ProcessUpdateUIEvent) wx.EVT_UPDATE_UI(self, wx.ID_REDO, self.ProcessUpdateUIEvent) wx.EVT_UPDATE_UI(self, wx.ID_PRINT, self.ProcessUpdateUIEvent) wx.EVT_UPDATE_UI(self, wx.ID_PRINT_SETUP, self.ProcessUpdateUIEvent) wx.EVT_UPDATE_UI(self, wx.ID_PREVIEW, self.ProcessUpdateUIEvent) def ProcessEvent(self, event): """ Processes an event, searching event tables and calling zero or more suitable event handler function(s). Note that the ProcessEvent method is called from the wxPython docview framework directly since wxPython does not have a virtual ProcessEvent function. """ return self._docManager and self._docManager.ProcessEvent(event) def ProcessUpdateUIEvent(self, event): """ Processes a UI event, searching event tables and calling zero or more suitable event handler function(s). Note that the ProcessEvent method is called from the wxPython docview framework directly since wxPython does not have a virtual ProcessEvent function. """ return self._docManager and self._docManager.ProcessUpdateUIEvent(event) def OnExit(self, event): """ Called when File/Exit is chosen and closes the window. """ self.Close() def OnMRUFile(self, event): """ Opens the appropriate file when it is selected from the file history menu. """ n = event.GetId() - wx.ID_FILE1 filename = self._docManager.GetHistoryFile(n) if filename: self._docManager.CreateDocument(filename, DOC_SILENT) else: self._docManager.RemoveFileFromHistory(n) msgTitle = wx.GetApp().GetAppName() if not msgTitle: msgTitle = _("File Error") wx.MessageBox("The file '%s' doesn't exist and couldn't be opened.\nIt has been removed from the most recently used files list" % FileNameFromPath(file), msgTitle, wx.OK | wx.ICON_EXCLAMATION, self) def OnCloseWindow(self, event): """ Deletes all views and documents. If no user input cancelled the operation, the frame will be destroyed and the application will exit. """ if self._docManager.Clear(not event.CanVeto()): self.Destroy() else: event.Veto() class DocChildFrame(wx.Frame): """ The wxDocChildFrame class provides a default frame for displaying documents on separate windows. This class can only be used for SDI (not MDI) child frames. The class is part of the document/view framework supported by wxWindows, and cooperates with the wxView, wxDocument, wxDocManager and wxDocTemplate classes. """ def __init__(self, doc, view, frame, id, title, pos=wx.DefaultPosition, size=wx.DefaultSize, style=wx.DEFAULT_FRAME_STYLE, name="frame"): """ Constructor. Note that the event table must be rebuilt for the frame since the EvtHandler is not virtual. """ wx.Frame.__init__(self, frame, id, title, pos, size, style, name) wx.EVT_ACTIVATE(self, self.OnActivate) wx.EVT_CLOSE(self, self.OnCloseWindow) self._childDocument = doc self._childView = view if view: view.SetFrame(self) wx.EVT_MENU(self, wx.ID_NEW, self.ProcessEvent) wx.EVT_MENU(self, wx.ID_OPEN, self.ProcessEvent) wx.EVT_MENU(self, wx.ID_CLOSE_ALL, self.ProcessEvent) wx.EVT_MENU(self, wx.ID_CLOSE, self.ProcessEvent) wx.EVT_MENU(self, wx.ID_REVERT, self.ProcessEvent) wx.EVT_MENU(self, wx.ID_SAVE, self.ProcessEvent) wx.EVT_MENU(self, wx.ID_SAVEAS, self.ProcessEvent) wx.EVT_MENU(self, wx.ID_UNDO, self.ProcessEvent) wx.EVT_MENU(self, wx.ID_REDO, self.ProcessEvent) wx.EVT_MENU(self, wx.ID_PRINT, self.ProcessEvent) wx.EVT_MENU(self, wx.ID_PRINT_SETUP, self.ProcessEvent) wx.EVT_MENU(self, wx.ID_PREVIEW, self.ProcessEvent) wx.EVT_UPDATE_UI(self, wx.ID_NEW, self.ProcessUpdateUIEvent) wx.EVT_UPDATE_UI(self, wx.ID_OPEN, self.ProcessUpdateUIEvent) wx.EVT_UPDATE_UI(self, wx.ID_CLOSE_ALL, self.ProcessUpdateUIEvent) wx.EVT_UPDATE_UI(self, wx.ID_CLOSE, self.ProcessUpdateUIEvent) wx.EVT_UPDATE_UI(self, wx.ID_REVERT, self.ProcessUpdateUIEvent) wx.EVT_UPDATE_UI(self, wx.ID_SAVE, self.ProcessUpdateUIEvent) wx.EVT_UPDATE_UI(self, wx.ID_SAVEAS, self.ProcessUpdateUIEvent) wx.EVT_UPDATE_UI(self, wx.ID_UNDO, self.ProcessUpdateUIEvent) wx.EVT_UPDATE_UI(self, wx.ID_REDO, self.ProcessUpdateUIEvent) wx.EVT_UPDATE_UI(self, wx.ID_PRINT, self.ProcessUpdateUIEvent) wx.EVT_UPDATE_UI(self, wx.ID_PRINT_SETUP, self.ProcessUpdateUIEvent) wx.EVT_UPDATE_UI(self, wx.ID_PREVIEW, self.ProcessUpdateUIEvent) def ProcessEvent(self, event): """ Processes an event, searching event tables and calling zero or more suitable event handler function(s). Note that the ProcessEvent method is called from the wxPython docview framework directly since wxPython does not have a virtual ProcessEvent function. """ if self._childView: self._childView.Activate(True) if not self._childView or not self._childView.ProcessEvent(event): # IsInstance not working, but who cares just send all the commands up since this isn't a real ProcessEvent like wxWindows # if not isinstance(event, wx.CommandEvent) or not self.GetParent() or not self.GetParent().ProcessEvent(event): if not self.GetParent() or not self.GetParent().ProcessEvent(event): return False else: return True else: return True def ProcessUpdateUIEvent(self, event): """ Processes a UI event, searching event tables and calling zero or more suitable event handler function(s). Note that the ProcessEvent method is called from the wxPython docview framework directly since wxPython does not have a virtual ProcessEvent function. """ if self.GetParent(): self.GetParent().ProcessUpdateUIEvent(event) else: return False def OnActivate(self, event): """ Activates the current view. """ event.Skip() if self._childView: self._childView.Activate(event.GetActive()) def OnCloseWindow(self, event): """ Closes and deletes the current view and document. """ if self._childView: ans = False if not event.CanVeto(): ans = True else: ans = self._childView.Close(deleteWindow = False) if ans: self._childView.Activate(False) self._childView.Destroy() self._childView = None if self._childDocument: self._childDocument.Destroy() # This isn't in the wxWindows codebase but the document needs to be disposed of somehow self._childDocument = None self.Destroy() else: event.Veto() else: event.Veto() def GetDocument(self): """ Returns the document associated with this frame. """ return self._childDocument def SetDocument(self, document): """ Sets the document for this frame. """ self._childDocument = document def GetView(self): """ Returns the view associated with this frame. """ return self._childView def SetView(self, view): """ Sets the view for this frame. """ self._childView = view class DocMDIParentFrame(wx.MDIParentFrame): """ The wxDocMDIParentFrame class provides a default top-level frame for applications using the document/view framework. This class can only be used for MDI parent frames. It cooperates with the wxView, wxDocument, wxDocManager and wxDocTemplate classes. """ def __init__(self, manager, frame, id, title, pos=wx.DefaultPosition, size=wx.DefaultSize, style=wx.DEFAULT_FRAME_STYLE, name="frame"): """ Constructor. Note that the event table must be rebuilt for the frame since the EvtHandler is not virtual. """ wx.MDIParentFrame.__init__(self, frame, id, title, pos, size, style, name) self._docManager = manager wx.EVT_CLOSE(self, self.OnCloseWindow) wx.EVT_MENU(self, wx.ID_EXIT, self.OnExit) wx.EVT_MENU_RANGE(self, wx.ID_FILE1, wx.ID_FILE9, self.OnMRUFile) wx.EVT_MENU(self, wx.ID_NEW, self.ProcessEvent) wx.EVT_MENU(self, wx.ID_OPEN, self.ProcessEvent) wx.EVT_MENU(self, wx.ID_CLOSE_ALL, self.ProcessEvent) wx.EVT_MENU(self, wx.ID_CLOSE, self.ProcessEvent) wx.EVT_MENU(self, wx.ID_REVERT, self.ProcessEvent) wx.EVT_MENU(self, wx.ID_SAVE, self.ProcessEvent) wx.EVT_MENU(self, wx.ID_SAVEAS, self.ProcessEvent) wx.EVT_MENU(self, wx.ID_UNDO, self.ProcessEvent) wx.EVT_MENU(self, wx.ID_REDO, self.ProcessEvent) wx.EVT_MENU(self, wx.ID_PRINT, self.ProcessEvent) wx.EVT_MENU(self, wx.ID_PRINT_SETUP, self.ProcessEvent) wx.EVT_MENU(self, wx.ID_PREVIEW, self.ProcessEvent) wx.EVT_UPDATE_UI(self, wx.ID_NEW, self.ProcessUpdateUIEvent) wx.EVT_UPDATE_UI(self, wx.ID_OPEN, self.ProcessUpdateUIEvent) wx.EVT_UPDATE_UI(self, wx.ID_CLOSE_ALL, self.ProcessUpdateUIEvent) wx.EVT_UPDATE_UI(self, wx.ID_CLOSE, self.ProcessUpdateUIEvent) wx.EVT_UPDATE_UI(self, wx.ID_REVERT, self.ProcessUpdateUIEvent) wx.EVT_UPDATE_UI(self, wx.ID_SAVE, self.ProcessUpdateUIEvent) wx.EVT_UPDATE_UI(self, wx.ID_SAVEAS, self.ProcessUpdateUIEvent) wx.EVT_UPDATE_UI(self, wx.ID_UNDO, self.ProcessUpdateUIEvent) wx.EVT_UPDATE_UI(self, wx.ID_REDO, self.ProcessUpdateUIEvent) wx.EVT_UPDATE_UI(self, wx.ID_PRINT, self.ProcessUpdateUIEvent) wx.EVT_UPDATE_UI(self, wx.ID_PRINT_SETUP, self.ProcessUpdateUIEvent) wx.EVT_UPDATE_UI(self, wx.ID_PREVIEW, self.ProcessUpdateUIEvent) def ProcessEvent(self, event): """ Processes an event, searching event tables and calling zero or more suitable event handler function(s). Note that the ProcessEvent method is called from the wxPython docview framework directly since wxPython does not have a virtual ProcessEvent function. """ return self._docManager and self._docManager.ProcessEvent(event) def ProcessUpdateUIEvent(self, event): """ Processes a UI event, searching event tables and calling zero or more suitable event handler function(s). Note that the ProcessEvent method is called from the wxPython docview framework directly since wxPython does not have a virtual ProcessEvent function. """ return self._docManager and self._docManager.ProcessUpdateUIEvent(event) def OnExit(self, event): """ Called when File/Exit is chosen and closes the window. """ self.Close() def OnMRUFile(self, event): """ Opens the appropriate file when it is selected from the file history menu. """ n = event.GetId() - wx.ID_FILE1 filename = self._docManager.GetHistoryFile(n) if filename: self._docManager.CreateDocument(filename, DOC_SILENT) else: self._docManager.RemoveFileFromHistory(n) msgTitle = wx.GetApp().GetAppName() if not msgTitle: msgTitle = _("File Error") wx.MessageBox("The file '%s' doesn't exist and couldn't be opened.\nIt has been removed from the most recently used files list" % FileNameFromPath(file), msgTitle, wx.OK | wx.ICON_EXCLAMATION, self) def OnCloseWindow(self, event): """ Deletes all views and documents. If no user input cancelled the operation, the frame will be destroyed and the application will exit. """ if self._docManager.Clear(not event.CanVeto()): self.Destroy() else: event.Veto() class DocMDIChildFrame(wx.MDIChildFrame): """ The wxDocMDIChildFrame class provides a default frame for displaying documents on separate windows. This class can only be used for MDI child frames. The class is part of the document/view framework supported by wxWindows, and cooperates with the wxView, wxDocument, wxDocManager and wxDocTemplate classes. """ def __init__(self, doc, view, frame, id, title, pos=wx.DefaultPosition, size=wx.DefaultSize, style=wx.DEFAULT_FRAME_STYLE, name="frame"): """ Constructor. Note that the event table must be rebuilt for the frame since the EvtHandler is not virtual. """ wx.MDIChildFrame.__init__(self, frame, id, title, pos, size, style, name) self._childDocument = doc self._childView = view if view: view.SetFrame(self) # self.Create(doc, view, frame, id, title, pos, size, style, name) self._activeEvent = None self._activated = 0 wx.EVT_ACTIVATE(self, self.OnActivate) wx.EVT_CLOSE(self, self.OnCloseWindow) if frame: # wxBug: For some reason the EVT_ACTIVATE event is not getting triggered for the first mdi client window that is opened so we have to do it manually mdiChildren = filter(lambda x: isinstance(x, wx.MDIChildFrame), frame.GetChildren()) if len(mdiChildren) == 1: self.Activate() ## # Couldn't get this to work, but seems to work fine with single stage construction ## def Create(self, doc, view, frame, id, title, pos, size, style, name): ## self._childDocument = doc ## self._childView = view ## if wx.MDIChildFrame.Create(self, frame, id, title, pos, size, style, name): ## if view: ## view.SetFrame(self) ## return True ## return False def Activate(self): # Need this in case there are embedded sash windows and such, OnActivate is not getting called """ Activates the current view. """ if self._childView: self._childView.Activate(True) def ProcessEvent(event): """ Processes an event, searching event tables and calling zero or more suitable event handler function(s). Note that the ProcessEvent method is called from the wxPython docview framework directly since wxPython does not have a virtual ProcessEvent function. """ if self._activeEvent == event: return False self._activeEvent = event # Break recursion loops if self._childView: self._childView.Activate(True) if not self._childView or not self._childView.ProcessEvent(event): if not isinstance(event, wx.CommandEvent) or not self.GetParent() or not self.GetParent().ProcessEvent(event): ret = False else: ret = True else: ret = True self._activeEvent = None return ret def OnActivate(self, event): """ Sets the currently active view to be the frame's view. You may need to override (but still call) this function in order to set the keyboard focus for your subwindow. """ event.Skip() if self._activated != 0: return True self._activated += 1 wx.MDIChildFrame.Activate(self) if event.GetActive() and self._childView: self._childView.Activate(event.GetActive()) self._activated = 0 def OnCloseWindow(self, event): """ Closes and deletes the current view and document. """ if self._childView: ans = False if not event.CanVeto(): ans = True else: ans = self._childView.Close(deleteWindow = False) if ans: self._childView.Activate(False) self._childView.Destroy() self._childView = None if self._childDocument: # This isn't in the wxWindows codebase but the document needs to be disposed of somehow self._childDocument.DeleteContents() if self._childDocument.GetDocumentManager(): self._childDocument.GetDocumentManager().RemoveDocument(self._childDocument) self._childDocument = None self.Destroy() else: event.Veto() else: event.Veto() def GetDocument(self): """ Returns the document associated with this frame. """ return self._childDocument def SetDocument(self, document): """ Sets the document for this frame. """ self._childDocument = document def GetView(self): """ Returns the view associated with this frame. """ return self._childView def SetView(self, view): """ Sets the view for this frame. """ self._childView = view def OnTitleIsModified(self): """ Add/remove to the frame's title an indication that the document is dirty. If the document is dirty, an '*' is appended to the title This method has been added to wxPython and is not in wxWindows. """ title = self.GetTitle() if title: if self.GetDocument().IsModified(): if title.endswith("*"): return else: title = title + "*" self.SetTitle(title) else: if title.endswith("*"): title = title[:-1] self.SetTitle(title) else: return class DocPrintout(wx.Printout): """ DocPrintout is a default Printout that prints the first page of a document view. """ def __init__(self, view, title="Printout"): """ Constructor. """ wx.Printout.__init__(self, title) self._printoutView = view def GetView(self): """ Returns the DocPrintout's view. """ return self._printoutView def OnPrintPage(self, page): """ Prints the first page of the view. """ dc = self.GetDC() ppiScreenX, ppiScreenY = self.GetPPIScreen() ppiPrinterX, ppiPrinterY = self.GetPPIPrinter() scale = ppiPrinterX/ppiScreenX w, h = dc.GetSize() pageWidth, pageHeight = self.GetPageSizePixels() overallScale = scale * w / pageWidth dc.SetUserScale(overallScale, overallScale) if self._printoutView: self._printoutView.OnDraw(dc) return True def HasPage(self, pageNum): """ Indicates that the DocPrintout only has a single page. """ return pageNum == 1 def GetPageInfo(self): """ Indicates that the DocPrintout only has a single page. """ minPage = 1 maxPage = 1 selPageFrom = 1 selPageTo = 1 return (minPage, maxPage, selPageFrom, selPageTo) #---------------------------------------------------------------------- # Command Classes #---------------------------------------------------------------------- class Command(wx.Object): """ wxCommand is a base class for modelling an application command, which is an action usually performed by selecting a menu item, pressing a toolbar button or any other means provided by the application to change the data or view. """ def __init__(self, canUndo = False, name = None): """ Constructor. wxCommand is an abstract class, so you will need to derive a new class and call this constructor from your own constructor. canUndo tells the command processor whether this command is undo-able. You can achieve the same functionality by overriding the CanUndo member function (if for example the criteria for undoability is context- dependent). name must be supplied for the command processor to display the command name in the application's edit menu. """ self._canUndo = canUndo self._name = name def CanUndo(self): """ Returns true if the command can be undone, false otherwise. """ return self._canUndo def GetName(self): """ Returns the command name. """ return self._name def Do(self): """ Override this member function to execute the appropriate action when called. Return true to indicate that the action has taken place, false otherwise. Returning false will indicate to the command processor that the action is not undoable and should not be added to the command history. """ return True def Undo(self): """ Override this member function to un-execute a previous Do. Return true to indicate that the action has taken place, false otherwise. Returning false will indicate to the command processor that the action is not redoable and no change should be made to the command history. How you implement this command is totally application dependent, but typical strategies include: Perform an inverse operation on the last modified piece of data in the document. When redone, a copy of data stored in command is pasted back or some operation reapplied. This relies on the fact that you know the ordering of Undos; the user can never Undo at an arbitrary position in he command history. Restore the entire document state (perhaps using document transactioning). Potentially very inefficient, but possibly easier to code if the user interface and data are complex, and an 'inverse execute' operation is hard to write. """ return True class CommandProcessor(wx.Object): """ wxCommandProcessor is a class that maintains a history of wxCommands, with undo/redo functionality built-in. Derive a new class from this if you want different behaviour. """ def __init__(self, maxCommands=-1): """ Constructor. maxCommands may be set to a positive integer to limit the number of commands stored to it, otherwise (and by default) the list of commands can grow arbitrarily. """ self._maxCommands = maxCommands self._editMenu = None self._undoAccelerator = _("Ctrl+Z") self._redoAccelerator = _("Ctrl+Y") self.ClearCommands() def _GetCurrentCommand(self): if len(self._commands) == 0: return None else: return self._commands[-1] def _GetCurrentRedoCommand(self): if len(self._redoCommands) == 0: return None else: return self._redoCommands[-1] def GetMaxCommands(self): """ Returns the maximum number of commands that the command processor stores. """ return self._maxCommands def GetCommands(self): """ Returns the list of commands. """ return self._commands def ClearCommands(self): """ Deletes all the commands in the list and sets the current command pointer to None. """ self._commands = [] self._redoCommands = [] def GetEditMenu(self): """ Returns the edit menu associated with the command processor. """ return self._editMenu def SetEditMenu(self, menu): """ Tells the command processor to update the Undo and Redo items on this menu as appropriate. Set this to NULL if the menu is about to be destroyed and command operations may still be performed, or the command processor may try to access an invalid pointer. """ self._editMenu = menu def GetUndoAccelerator(self): """ Returns the string that will be appended to the Undo menu item. """ return self._undoAccelerator def SetUndoAccelerator(self, accel): """ Sets the string that will be appended to the Redo menu item. """ self._undoAccelerator = accel def GetRedoAccelerator(self): """ Returns the string that will be appended to the Redo menu item. """ return self._redoAccelerator def SetRedoAccelerator(self, accel): """ Sets the string that will be appended to the Redo menu item. """ self._redoAccelerator = accel def SetMenuStrings(self): """ Sets the menu labels according to the currently set menu and the current command state. """ if self.GetEditMenu() != None: undoCommand = self._GetCurrentCommand() redoCommand = self._GetCurrentRedoCommand() undoItem = self.GetEditMenu().FindItemById(wx.ID_UNDO) redoItem = self.GetEditMenu().FindItemById(wx.ID_REDO) if self.GetUndoAccelerator(): undoAccel = '\t' + self.GetUndoAccelerator() else: undoAccel = '' if self.GetRedoAccelerator(): redoAccel = '\t' + self.GetRedoAccelerator() else: redoAccel = '' if undoCommand and undoItem and undoCommand.CanUndo(): undoItem.SetText(_("&Undo ") + undoCommand.GetName() + undoAccel) #elif undoCommand and not undoCommand.CanUndo(): # undoItem.SetText(_("Can't Undo") + undoAccel) else: undoItem.SetText(_("&Undo" + undoAccel)) if redoCommand and redoItem: redoItem.SetText(_("&Redo ") + redoCommand.GetName() + redoAccel) else: redoItem.SetText(_("&Redo") + redoAccel) def CanUndo(self): """ Returns true if the currently-active command can be undone, false otherwise. """ if self._GetCurrentCommand() == None: return False return self._GetCurrentCommand().CanUndo() def CanRedo(self): """ Returns true if the currently-active command can be redone, false otherwise. """ return self._GetCurrentRedoCommand() != None def Submit(self, command, storeIt=True): """ Submits a new command to the command processor. The command processor calls wxCommand::Do to execute the command; if it succeeds, the command is stored in the history list, and the associated edit menu (if any) updated appropriately. If it fails, the command is deleted immediately. Once Submit has been called, the passed command should not be deleted directly by the application. storeIt indicates whether the successful command should be stored in the history list. """ done = command.Do() if done: del self._redoCommands[:] if storeIt: self._commands.append(command) if self._maxCommands > -1: if len(self._commands) > self._maxCommands: del self._commands[0] return done def Redo(self): """ Redoes the command just undone. """ cmd = self._GetCurrentRedoCommand() if not cmd: return False done = cmd.Do() if done: self._commands.append(self._redoCommands.pop()) return done def Undo(self): """ Undoes the command just executed. """ cmd = self._GetCurrentCommand() if not cmd: return False done = cmd.Undo() if done: self._redoCommands.append(self._commands.pop()) return done ``` #### File: wx/lib/itemspicker.py ```python import wx __version__ = 0.1 IP_DEFAULT_STYLE = 0 IP_SORT_CHOICES = 1 IP_SORT_SELECTED = 2 IP_REMOVE_FROM_CHOICES = 4 wxEVT_IP_SELECTION_CHANGED = wx.NewEventType() EVT_IP_SELECTION_CHANGED = wx.PyEventBinder(wxEVT_IP_SELECTION_CHANGED, 1) LB_STYLE = wx.LB_EXTENDED|wx.LB_HSCROLL class IpSelectionChanged(wx.PyCommandEvent): def __init__(self, id, items, object = None): wx.PyCommandEvent.__init__(self, wxEVT_IP_SELECTION_CHANGED, id) self.__items = items self.SetEventObject(object) def GetItems(self): return self.__items class ItemsPicker(wx.Panel): ''' ItemsPicker is a widget that allows the user to form a set of picked items out of a given list ''' def __init__(self, parent, id=wx.ID_ANY, choices = [], label = '', selectedLabel = '', ipStyle = IP_DEFAULT_STYLE, *args, **kw): ''' ItemsPicker(parent, choices = [], label = '', selectedLabel = '', ipStyle = IP_DEFAULT_STYLE) ''' wx.Panel.__init__(self, parent, id, *args, **kw) self._ipStyle = ipStyle sizer = wx.BoxSizer(wx.HORIZONTAL) sizer.Add(self._CreateSourceList(choices, label), 1, wx.EXPAND|wx.ALL, 5) sizer.Add(self._CreateButtons(), 0, wx.ALIGN_CENTER|wx.ALL, 5) sizer.Add(self._CreateDestList(selectedLabel), 1, wx.EXPAND|wx.ALL, 5) self.SetSizer(sizer) def SetItems(self, items): '''SetItems(self, items)=> None items - Sequence of strings that the user can pick from''' return self._source.SetItems(items) def GetItems(self): '''GetItems(self)=> items returns list of strings that the user can pick from''' return self._source.GetItems() Items = property(fget = GetItems, fset = SetItems, doc = 'See GetItems/SetItems') def GetSelections(self): '''GetSelections(self)=>items returns list of strings that were selected ''' return self._dest.GetItems() def SetSelections(self, items): '''SetSelections(self, items)=>None items - Sequence of strings to be selected The items are displayed in the selection part of the widget''' assert len(items)==len(set(items)),"duplicate items are not allowed" if items != self._dest.GetItems(): self._dest.SetItems(items) self._FireIpSelectionChanged() Selections = property(fget = GetSelections, fset = SetSelections, doc = 'See GetSelections/SetSelections') def _CreateButtons(self): sizer = wx.BoxSizer(wx.VERTICAL) self.bAdd = wx.Button(self, -1, label = 'Add ->') self.bAdd.Bind(wx.EVT_BUTTON, self._OnAdd) self.bRemove = wx.Button(self, -1, label = '<- Remove') self.bRemove.Bind(wx.EVT_BUTTON, self._OnRemove) sizer.Add(self.bAdd, 0, wx.EXPAND|wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5) sizer.Add(self.bRemove, 0, wx.EXPAND|wx.ALL, 5) return sizer def _set_add_button_label(self, label=None): if label is None: return self.bAdd.SetLabel(label) add_button_label = property(fset = _set_add_button_label, fget = lambda x:x) def _set_remove_button_label(self, label=None): if label is None: return self.bRemove.SetLabel(label) remove_button_label = property(fset = _set_remove_button_label, fget = lambda x:x) def _OnAdd(self, e): if self._ipStyle & IP_REMOVE_FROM_CHOICES: self._MoveItems(self._source,self._dest) else: self._AddSelectedItems() def _MoveItems(self,source,dest): selections = source.GetSelections() selectedItems = map(source.GetString, selections) dest.SetItems(dest.GetItems() + selectedItems) selections = set(selections) source.SetItems([item for i, item in enumerate(source.GetItems())\ if i not in selections]) self._FireIpSelectionChanged() def _AddSelectedItems(self): newItems = map(self._source.GetString, self._source.GetSelections()) items = self._dest.GetItems() oldItems = set(items) for newItem in newItems: if newItem not in oldItems: items.append(newItem) self.SetSelections(items) def _FireIpSelectionChanged(self): self.GetEventHandler().ProcessEvent( IpSelectionChanged(self.GetId(), self._dest.GetItems(), self )) def _OnRemove(self, e): if self._ipStyle & IP_REMOVE_FROM_CHOICES: self._MoveItems(self._dest, self._source) else: self._RemoveSelected() def _RemoveSelected(self): selections = self._dest.GetSelections() if selections: allItems = self._dest.GetItems() items = [item for i, item in enumerate(allItems)\ if i not in selections] self.SetSelections(items) self._FireIpSelectionChanged() def _CreateSourceList(self, items, label): style = LB_STYLE if self._ipStyle & IP_SORT_CHOICES: style |= wx.LB_SORT sizer = wx.BoxSizer(wx.VERTICAL) if label: sizer.Add(wx.StaticText(self, label = label), 0, wx.ALIGN_LEFT|wx.ALL, 5) self._source = wx.ListBox(self, -1, style = style) self._source.Bind(wx.EVT_LISTBOX_DCLICK, self._OnDClick) self._source.SetItems(items) sizer.Add(self._source, 1, wx.EXPAND|wx.ALL, 5) return sizer def _CreateDestList(self, label): style = LB_STYLE if self._ipStyle & IP_SORT_SELECTED: style |= wx.LB_SORT sizer = wx.BoxSizer(wx.VERTICAL) if label: sizer.Add(wx.StaticText(self, label = label), 0, wx.ALIGN_LEFT|wx.ALL, 5) self._dest = wx.ListBox(self, -1, style = style) self._dest.Bind(wx.EVT_LISTBOX_DCLICK, self._OnDClick) sizer.Add(self._dest, 1, wx.EXPAND|wx.ALL, 5) return sizer def _OnDClick(self, e): lb = e.GetEventObject() selections = lb.GetSelections() if len(selections) != 1: return #DCLICK only works on one item if e.GetSelection() != selections[0]: #this can happen using ^DCLICK when two items are selected return if lb == self._source: self._OnAdd(e) else: self._OnRemove(e) if __name__ == '__main__': test = wx.App(0) frame = wx.Frame(None, -1) d = wx.Dialog(frame, style = wx.RESIZE_BORDER|wx.DEFAULT_DIALOG_STYLE) d.sizer = wx.BoxSizer(wx.VERTICAL) d.sizer.Add(wx.StaticText(d, -1, label = 'Example of the ItemsPicker'), 0, wx.ALL, 10) ip = ItemsPicker(d, -1, ['pop', 'cool', 'lame'], 'Stuff:', 'Selected stuff:',IP_SORT_SELECTED|IP_SORT_CHOICES|IP_REMOVE_FROM_CHOICES) ip.add_button_label = u'left -> right' ip.remove_button_label = u'right -> left' d.sizer.Add(ip, 1, wx.EXPAND, 1) d.SetSizer(d.sizer) test.SetTopWindow(frame) def callback(e): print 'selected items', e.GetItems() d.Bind(EVT_IP_SELECTION_CHANGED, callback) d.ShowModal() d.Destroy() frame.Close() ``` #### File: lib/masked/timectrl.py ```python import copy import string import types import wx from wx.tools.dbg import Logger from wx.lib.masked import Field, BaseMaskedTextCtrl dbg = Logger() ##dbg(enable=0) try: from mx import DateTime accept_mx = True except ImportError: accept_mx = False # This class of event fires whenever the value of the time changes in the control: wxEVT_TIMEVAL_UPDATED = wx.NewEventType() EVT_TIMEUPDATE = wx.PyEventBinder(wxEVT_TIMEVAL_UPDATED, 1) class TimeUpdatedEvent(wx.PyCommandEvent): """ Used to fire an EVT_TIMEUPDATE event whenever the value in a TimeCtrl changes. """ def __init__(self, id, value ='12:00:00 AM'): wx.PyCommandEvent.__init__(self, wxEVT_TIMEVAL_UPDATED, id) self.value = value def GetValue(self): """Retrieve the value of the time control at the time this event was generated""" return self.value class TimeCtrlAccessorsMixin: """ Defines TimeCtrl's list of attributes having their own Get/Set functions, ignoring those in the base class that make no sense for a time control. """ exposed_basectrl_params = ( 'defaultValue', 'description', 'useFixedWidthFont', 'emptyBackgroundColour', 'validBackgroundColour', 'invalidBackgroundColour', 'validFunc', 'validRequired', ) for param in exposed_basectrl_params: propname = param[0].upper() + param[1:] exec('def Set%s(self, value): self.SetCtrlParameters(%s=value)' % (propname, param)) exec('def Get%s(self): return self.GetCtrlParameter("%s")''' % (propname, param)) if param.find('Colour') != -1: # add non-british spellings, for backward-compatibility propname.replace('Colour', 'Color') exec('def Set%s(self, value): self.SetCtrlParameters(%s=value)' % (propname, param)) exec('def Get%s(self): return self.GetCtrlParameter("%s")''' % (propname, param)) class TimeCtrl(BaseMaskedTextCtrl): """ Masked control providing several time formats and manipulation of time values. """ valid_ctrl_params = { 'format' : 'HHMMSS', # default format code 'displaySeconds' : True, # by default, shows seconds 'min': None, # by default, no bounds set 'max': None, 'limited': False, # by default, no limiting even if bounds set 'useFixedWidthFont': True, # by default, use a fixed-width font 'oob_color': "Yellow" # by default, the default masked.TextCtrl "invalid" color } def __init__ ( self, parent, id=-1, value = '00:00:00', pos = wx.DefaultPosition, size = wx.DefaultSize, fmt24hr=False, spinButton = None, style = wx.TE_PROCESS_TAB, validator = wx.DefaultValidator, name = "time", **kwargs ): # set defaults for control: ## dbg('setting defaults:') self.__fmt24hr = False wxdt = wx.DateTimeFromDMY(1, 0, 1970) try: if wxdt.Format('%p') != 'AM': TimeCtrl.valid_ctrl_params['format'] = '24HHMMSS' self.__fmt24hr = True fmt24hr = True # force/change default positional argument # (will countermand explicit set to False too.) except: TimeCtrl.valid_ctrl_params['format'] = '24HHMMSS' self.__fmt24hr = True fmt24hr = True # force/change default positional argument # (will countermand explicit set to False too.) for key, param_value in TimeCtrl.valid_ctrl_params.items(): # This is done this way to make setattr behave consistently with # "private attribute" name mangling setattr(self, "_TimeCtrl__" + key, copy.copy(param_value)) # create locals from current defaults, so we can override if # specified in kwargs, and handle uniformly: min = self.__min max = self.__max limited = self.__limited self.__posCurrent = 0 # handle deprecated keword argument name: if kwargs.has_key('display_seconds'): kwargs['displaySeconds'] = kwargs['display_seconds'] del kwargs['display_seconds'] if not kwargs.has_key('displaySeconds'): kwargs['displaySeconds'] = True # (handle positional arg (from original release) differently from rest of kwargs:) if not kwargs.has_key('format'): if fmt24hr: if kwargs.has_key('displaySeconds') and kwargs['displaySeconds']: kwargs['format'] = '24HHMMSS' del kwargs['displaySeconds'] else: kwargs['format'] = '24HHMM' else: if kwargs.has_key('displaySeconds') and kwargs['displaySeconds']: kwargs['format'] = 'HHMMSS' del kwargs['displaySeconds'] else: kwargs['format'] = 'HHMM' if not kwargs.has_key('useFixedWidthFont'): # allow control over font selection: kwargs['useFixedWidthFont'] = self.__useFixedWidthFont maskededit_kwargs = self.SetParameters(**kwargs) # allow for explicit size specification: if size != wx.DefaultSize: # override (and remove) "autofit" autoformat code in standard time formats: maskededit_kwargs['formatcodes'] = 'T!' # This allows range validation if set maskededit_kwargs['validFunc'] = self.IsInBounds # This allows range limits to affect insertion into control or not # dynamically without affecting individual field constraint validation maskededit_kwargs['retainFieldValidation'] = True # Now we can initialize the base control: BaseMaskedTextCtrl.__init__( self, parent, id=id, pos=pos, size=size, style = style, validator = validator, name = name, setupEventHandling = False, **maskededit_kwargs) # This makes ':' act like tab (after we fix each ':' key event to remove "shift") self._SetKeyHandler(':', self._OnChangeField) # This makes the up/down keys act like spin button controls: self._SetKeycodeHandler(wx.WXK_UP, self.__OnSpinUp) self._SetKeycodeHandler(wx.WXK_DOWN, self.__OnSpinDown) # This allows ! and c/C to set the control to the current time: self._SetKeyHandler('!', self.__OnSetToNow) self._SetKeyHandler('c', self.__OnSetToNow) self._SetKeyHandler('C', self.__OnSetToNow) # Set up event handling ourselves, so we can insert special # processing on the ":' key to remove the "shift" attribute # *before* the default handlers have been installed, so # that : takes you forward, not back, and so we can issue # EVT_TIMEUPDATE events on changes: self.Bind(wx.EVT_SET_FOCUS, self._OnFocus ) ## defeat automatic full selection self.Bind(wx.EVT_KILL_FOCUS, self._OnKillFocus ) ## run internal validator self.Bind(wx.EVT_LEFT_UP, self.__LimitSelection) ## limit selections to single field self.Bind(wx.EVT_LEFT_DCLICK, self._OnDoubleClick ) ## select field under cursor on dclick self.Bind(wx.EVT_KEY_DOWN, self._OnKeyDown ) ## capture control events not normally seen, eg ctrl-tab. self.Bind(wx.EVT_CHAR, self.__OnChar ) ## remove "shift" attribute from colon key event, ## then call BaseMaskedTextCtrl._OnChar with ## the possibly modified event. self.Bind(wx.EVT_TEXT, self.__OnTextChange, self ) ## color control appropriately and EVT_TIMEUPDATE events # Validate initial value and set if appropriate try: self.SetBounds(min, max) self.SetLimited(limited) self.SetValue(value) except: self.SetValue('00:00:00') if spinButton: self.BindSpinButton(spinButton) # bind spin button up/down events to this control def SetParameters(self, **kwargs): """ Function providing access to the parameters governing TimeCtrl display and bounds. """ ## dbg('TimeCtrl::SetParameters(%s)' % repr(kwargs), indent=1) maskededit_kwargs = {} reset_format = False if kwargs.has_key('display_seconds'): kwargs['displaySeconds'] = kwargs['display_seconds'] del kwargs['display_seconds'] if kwargs.has_key('format') and kwargs.has_key('displaySeconds'): del kwargs['displaySeconds'] # always apply format if specified # assign keyword args as appropriate: for key, param_value in kwargs.items(): if key not in TimeCtrl.valid_ctrl_params.keys(): raise AttributeError('invalid keyword argument "%s"' % key) if key == 'format': wxdt = wx.DateTimeFromDMY(1, 0, 1970) try: if wxdt.Format('%p') != 'AM': require24hr = True else: require24hr = False except: require24hr = True # handle both local or generic 'maskededit' autoformat codes: if param_value == 'HHMMSS' or param_value == 'TIMEHHMMSS': self.__displaySeconds = True self.__fmt24hr = False elif param_value == 'HHMM' or param_value == 'TIMEHHMM': self.__displaySeconds = False self.__fmt24hr = False elif param_value == '24HHMMSS' or param_value == '24HRTIMEHHMMSS': self.__displaySeconds = True self.__fmt24hr = True elif param_value == '24HHMM' or param_value == '24HRTIMEHHMM': self.__displaySeconds = False self.__fmt24hr = True else: raise AttributeError('"%s" is not a valid format' % param_value) if require24hr and not self.__fmt24hr: raise AttributeError('"%s" is an unsupported time format for the current locale' % param_value) reset_format = True elif key in ("displaySeconds", "display_seconds") and not kwargs.has_key('format'): self.__displaySeconds = param_value reset_format = True elif key == "min": min = param_value elif key == "max": max = param_value elif key == "limited": limited = param_value elif key == "useFixedWidthFont": maskededit_kwargs[key] = param_value elif key == "oob_color": maskededit_kwargs['invalidBackgroundColor'] = param_value if reset_format: if self.__fmt24hr: if self.__displaySeconds: maskededit_kwargs['autoformat'] = '24HRTIMEHHMMSS' else: maskededit_kwargs['autoformat'] = '24HRTIMEHHMM' # Set hour field to zero-pad, right-insert, require explicit field change, # select entire field on entry, and require a resultant valid entry # to allow character entry: hourfield = Field(formatcodes='0r<SV', validRegex='0\d|1\d|2[0123]', validRequired=True) else: if self.__displaySeconds: maskededit_kwargs['autoformat'] = 'TIMEHHMMSS' else: maskededit_kwargs['autoformat'] = 'TIMEHHMM' # Set hour field to allow spaces (at start), right-insert, # require explicit field change, select entire field on entry, # and require a resultant valid entry to allow character entry: hourfield = Field(formatcodes='_0<rSV', validRegex='0[1-9]| [1-9]|1[012]', validRequired=True) ampmfield = Field(formatcodes='S', emptyInvalid = True, validRequired = True) # Field 1 is always a zero-padded right-insert minute field, # similarly configured as above: minutefield = Field(formatcodes='0r<SV', validRegex='[0-5]\d', validRequired=True) fields = [ hourfield, minutefield ] if self.__displaySeconds: fields.append(copy.copy(minutefield)) # second field has same constraints as field 1 if not self.__fmt24hr: fields.append(ampmfield) # set fields argument: maskededit_kwargs['fields'] = fields # This allows range validation if set maskededit_kwargs['validFunc'] = self.IsInBounds # This allows range limits to affect insertion into control or not # dynamically without affecting individual field constraint validation maskededit_kwargs['retainFieldValidation'] = True if hasattr(self, 'controlInitialized') and self.controlInitialized: self.SetCtrlParameters(**maskededit_kwargs) # set appropriate parameters # Validate initial value and set if appropriate try: self.SetBounds(min, max) self.SetLimited(limited) self.SetValue(value) except: self.SetValue('00:00:00') ## dbg(indent=0) return {} # no arguments to return else: ## dbg(indent=0) return maskededit_kwargs def BindSpinButton(self, sb): """ This function binds an externally created spin button to the control, so that up/down events from the button automatically change the control. """ ## dbg('TimeCtrl::BindSpinButton') self.__spinButton = sb if self.__spinButton: # bind event handlers to spin ctrl self.__spinButton.Bind(wx.EVT_SPIN_UP, self.__OnSpinUp, self.__spinButton) self.__spinButton.Bind(wx.EVT_SPIN_DOWN, self.__OnSpinDown, self.__spinButton) def __repr__(self): return "<TimeCtrl: %s>" % self.GetValue() def SetValue(self, value): """ Validating SetValue function for time values: This function will do dynamic type checking on the value argument, and convert wxDateTime, mxDateTime, or 12/24 format time string into the appropriate format string for the control. """ ## dbg('TimeCtrl::SetValue(%s)' % repr(value), indent=1) try: strtime = self._toGUI(self.__validateValue(value)) except: ## dbg('validation failed', indent=0) raise ## dbg('strtime:', strtime) self._SetValue(strtime) ## dbg(indent=0) def ChangeValue(self, value): """ Validating ChangeValue function for time values: This function will do dynamic type checking on the value argument, and convert wxDateTime, mxDateTime, or 12/24 format time string into the appropriate format string for the control. """ ## dbg('TimeCtrl::ChangeValue(%s)' % repr(value), indent=1) try: strtime = self._toGUI(self.__validateValue(value)) except: ## dbg('validation failed', indent=0) raise ## dbg('strtime:', strtime) self._ChangeValue(strtime) ## dbg(indent=0) def GetValue(self, as_wxDateTime = False, as_mxDateTime = False, as_wxTimeSpan = False, as_mxDateTimeDelta = False): """ This function returns the value of the display as a string by default, but supports return as a wx.DateTime, mx.DateTime, wx.TimeSpan, or mx.DateTimeDelta, if requested. (Evaluated in the order above-- first one wins!) """ if as_wxDateTime or as_mxDateTime or as_wxTimeSpan or as_mxDateTimeDelta: value = self.GetWxDateTime() if as_wxDateTime: pass elif as_mxDateTime: value = DateTime.DateTime(1970, 1, 1, value.GetHour(), value.GetMinute(), value.GetSecond()) elif as_wxTimeSpan: value = wx.TimeSpan(value.GetHour(), value.GetMinute(), value.GetSecond()) elif as_mxDateTimeDelta: value = DateTime.DateTimeDelta(0, value.GetHour(), value.GetMinute(), value.GetSecond()) else: value = BaseMaskedTextCtrl.GetValue(self) return value def SetWxDateTime(self, wxdt): """ Because SetValue can take a wx.DateTime, this is now just an alias. """ self.SetValue(wxdt) def GetWxDateTime(self, value=None): """ This function is the conversion engine for TimeCtrl; it takes one of the following types: * time string * wx.DateTime * wx.TimeSpan * mxDateTime * mxDateTimeDelta and converts it to a wx.DateTime that always has Jan 1, 1970 as its date portion, so that range comparisons around values can work using wx.DateTime's built-in comparison function. If a value is not provided to convert, the string value of the control will be used. If the value is not one of the accepted types, a ValueError will be raised. """ global accept_mx ## dbg(suspend=1) ## dbg('TimeCtrl::GetWxDateTime(%s)' % repr(value), indent=1) if value is None: ## dbg('getting control value') value = self.GetValue() ## dbg('value = "%s"' % value) if type(value) == types.UnicodeType: value = str(value) # convert to regular string valid = True # assume true if type(value) == types.StringType: # Construct constant wxDateTime, then try to parse the string: wxdt = wx.DateTimeFromDMY(1, 0, 1970) ## dbg('attempting conversion') value = value.strip() # (parser doesn't like leading spaces) valid = wxdt.ParseTime(value) if not valid: # deal with bug/deficiency in wx.DateTime: try: if wxdt.Format('%p') not in ('AM', 'PM') and checkTime in (5,8): # couldn't parse the AM/PM field raise ValueError('cannot convert string "%s" to valid time for the current locale; please use 24hr time instead' % value) else: ## dbg(indent=0, suspend=0) raise ValueError('cannot convert string "%s" to valid time' % value) except: raise ValueError('cannot convert string "%s" to valid time for the current locale; please use 24hr time instead' % value) else: if isinstance(value, wx.DateTime): hour, minute, second = value.GetHour(), value.GetMinute(), value.GetSecond() elif isinstance(value, wx.TimeSpan): totalseconds = value.GetSeconds() hour = totalseconds / 3600 minute = totalseconds / 60 - (hour * 60) second = totalseconds - ((hour * 3600) + (minute * 60)) elif accept_mx and isinstance(value, DateTime.DateTimeType): hour, minute, second = value.hour, value.minute, value.second elif accept_mx and isinstance(value, DateTime.DateTimeDeltaType): hour, minute, second = value.hour, value.minute, value.second else: # Not a valid function argument if accept_mx: error = 'GetWxDateTime requires wxDateTime, mxDateTime or parsable time string, passed %s'% repr(value) else: error = 'GetWxDateTime requires wxDateTime or parsable time string, passed %s'% repr(value) ## dbg(indent=0, suspend=0) raise ValueError(error) wxdt = wx.DateTimeFromDMY(1, 0, 1970) wxdt.SetHour(hour) wxdt.SetMinute(minute) wxdt.SetSecond(second) ## dbg('wxdt:', wxdt, indent=0, suspend=0) return wxdt def SetMxDateTime(self, mxdt): """ Because SetValue can take an mx.DateTime, (if DateTime is importable), this is now just an alias. """ self.SetValue(value) def GetMxDateTime(self, value=None): """ Returns the value of the control as an mx.DateTime, with the date portion set to January 1, 1970. """ if value is None: t = self.GetValue(as_mxDateTime=True) else: # Convert string 1st to wxDateTime, then use components, since # mx' DateTime.Parser.TimeFromString() doesn't handle AM/PM: wxdt = self.GetWxDateTime(value) hour, minute, second = wxdt.GetHour(), wxdt.GetMinute(), wxdt.GetSecond() t = DateTime.DateTime(1970,1,1) + DateTimeDelta(0, hour, minute, second) return t def SetMin(self, min=None): """ Sets the minimum value of the control. If a value of None is provided, then the control will have no explicit minimum value. If the value specified is greater than the current maximum value, then the function returns 0 and the minimum will not change from its current setting. On success, the function returns 1. If successful and the current value is lower than the new lower bound, if the control is limited, the value will be automatically adjusted to the new minimum value; if not limited, the value in the control will be colored as invalid. """ ## dbg('TimeCtrl::SetMin(%s)'% repr(min), indent=1) if min is not None: try: min = self.GetWxDateTime(min) self.__min = self._toGUI(min) except: ## dbg('exception occurred', indent=0) return False else: self.__min = min if self.IsLimited() and not self.IsInBounds(): self.SetLimited(self.__limited) # force limited value: else: self._CheckValid() ret = True ## dbg('ret:', ret, indent=0) return ret def GetMin(self, as_string = False): """ Gets the minimum value of the control. If None, it will return None. Otherwise it will return the current minimum bound on the control, as a wxDateTime by default, or as a string if as_string argument is True. """ ## dbg(suspend=1) ## dbg('TimeCtrl::GetMin, as_string?', as_string, indent=1) if self.__min is None: ## dbg('(min == None)') ret = self.__min elif as_string: ret = self.__min ## dbg('ret:', ret) else: try: ret = self.GetWxDateTime(self.__min) except: ## dbg(suspend=0) ## dbg('exception occurred', indent=0) raise ## dbg('ret:', repr(ret)) ## dbg(indent=0, suspend=0) return ret def SetMax(self, max=None): """ Sets the maximum value of the control. If a value of None is provided, then the control will have no explicit maximum value. If the value specified is less than the current minimum value, then the function returns False and the maximum will not change from its current setting. On success, the function returns True. If successful and the current value is greater than the new upper bound, if the control is limited the value will be automatically adjusted to this maximum value; if not limited, the value in the control will be colored as invalid. """ ## dbg('TimeCtrl::SetMax(%s)' % repr(max), indent=1) if max is not None: try: max = self.GetWxDateTime(max) self.__max = self._toGUI(max) except: ## dbg('exception occurred', indent=0) return False else: self.__max = max ## dbg('max:', repr(self.__max)) if self.IsLimited() and not self.IsInBounds(): self.SetLimited(self.__limited) # force limited value: else: self._CheckValid() ret = True ## dbg('ret:', ret, indent=0) return ret def GetMax(self, as_string = False): """ Gets the minimum value of the control. If None, it will return None. Otherwise it will return the current minimum bound on the control, as a wxDateTime by default, or as a string if as_string argument is True. """ ## dbg(suspend=1) ## dbg('TimeCtrl::GetMin, as_string?', as_string, indent=1) if self.__max is None: ## dbg('(max == None)') ret = self.__max elif as_string: ret = self.__max ## dbg('ret:', ret) else: try: ret = self.GetWxDateTime(self.__max) except: ## dbg(suspend=0) ## dbg('exception occurred', indent=0) raise ## dbg('ret:', repr(ret)) ## dbg(indent=0, suspend=0) return ret def SetBounds(self, min=None, max=None): """ This function is a convenience function for setting the min and max values at the same time. The function only applies the maximum bound if setting the minimum bound is successful, and returns True only if both operations succeed. **NOTE:** leaving out an argument will remove the corresponding bound. """ ret = self.SetMin(min) return ret and self.SetMax(max) def GetBounds(self, as_string = False): """ This function returns a two-tuple (min,max), indicating the current bounds of the control. Each value can be None if that bound is not set. """ return (self.GetMin(as_string), self.GetMax(as_string)) def SetLimited(self, limited): """ If called with a value of True, this function will cause the control to limit the value to fall within the bounds currently specified. If the control's value currently exceeds the bounds, it will then be limited accordingly. If called with a value of 0, this function will disable value limiting, but coloring of out-of-bounds values will still take place if bounds have been set for the control. """ ## dbg('TimeCtrl::SetLimited(%d)' % limited, indent=1) self.__limited = limited if not limited: self.SetMaskParameters(validRequired = False) self._CheckValid() ## dbg(indent=0) return ## dbg('requiring valid value') self.SetMaskParameters(validRequired = True) min = self.GetMin() max = self.GetMax() if min is None or max is None: ## dbg('both bounds not set; no further action taken') return # can't limit without 2 bounds elif not self.IsInBounds(): # set value to the nearest bound: try: value = self.GetWxDateTime() except: ## dbg('exception occurred', indent=0) raise if min <= max: # valid range doesn't span midnight ## dbg('min <= max') # which makes the "nearest bound" computation trickier... # determine how long the "invalid" pie wedge is, and cut # this interval in half for comparison purposes: # Note: relies on min and max and value date portions # always being the same. interval = (min + wx.TimeSpan(24, 0, 0, 0)) - max half_interval = wx.TimeSpan( 0, # hours 0, # minutes interval.GetSeconds() / 2, # seconds 0) # msec if value < min: # min is on next day, so use value on # "next day" for "nearest" interval calculation: cmp_value = value + wx.TimeSpan(24, 0, 0, 0) else: # "before midnight; ok cmp_value = value if (cmp_value - max) > half_interval: ## dbg('forcing value to min (%s)' % min.FormatTime()) self.SetValue(min) else: ## dbg('forcing value to max (%s)' % max.FormatTime()) self.SetValue(max) else: ## dbg('max < min') # therefore max < value < min guaranteed to be true, # so "nearest bound" calculation is much easier: if (value - max) >= (min - value): # current value closer to min; pick that edge of pie wedge ## dbg('forcing value to min (%s)' % min.FormatTime()) self.SetValue(min) else: ## dbg('forcing value to max (%s)' % max.FormatTime()) self.SetValue(max) ## dbg(indent=0) def IsLimited(self): """ Returns True if the control is currently limiting the value to fall within any current bounds. *Note:* can be set even if there are no current bounds. """ return self.__limited def IsInBounds(self, value=None): """ Returns True if no value is specified and the current value of the control falls within the current bounds. As the clock is a "circle", both minimum and maximum bounds must be set for a value to ever be considered "out of bounds". This function can also be called with a value to see if that value would fall within the current bounds of the given control. """ if value is not None: try: value = self.GetWxDateTime(value) # try to regularize passed value except ValueError: ## dbg('ValueError getting wxDateTime for %s' % repr(value), indent=0) raise ## dbg('TimeCtrl::IsInBounds(%s)' % repr(value), indent=1) if self.__min is None or self.__max is None: ## dbg(indent=0) return True elif value is None: try: value = self.GetWxDateTime() except: ## dbg('exception occurred', indent=0) raise ## dbg('value:', value.FormatTime()) # Get wxDateTime representations of bounds: min = self.GetMin() max = self.GetMax() midnight = wx.DateTimeFromDMY(1, 0, 1970) if min <= max: # they don't span midnight ret = min <= value <= max else: # have to break into 2 tests; to be in bounds # either "min" <= value (<= midnight of *next day*) # or midnight <= value <= "max" ret = min <= value or (midnight <= value <= max) ## dbg('in bounds?', ret, indent=0) return ret def IsValid( self, value ): """ Can be used to determine if a given value would be a legal and in-bounds value for the control. """ try: self.__validateValue(value) return True except ValueError: return False def SetFormat(self, format): self.SetParameters(format=format) def GetFormat(self): if self.__displaySeconds: if self.__fmt24hr: return '24HHMMSS' else: return 'HHMMSS' else: if self.__fmt24hr: return '24HHMM' else: return 'HHMM' #------------------------------------------------------------------------------------------------------------- # these are private functions and overrides: def __OnTextChange(self, event=None): ## dbg('TimeCtrl::OnTextChange', indent=1) # Allow Maskedtext base control to color as appropriate, # and Skip the EVT_TEXT event (if appropriate.) ##! WS: For some inexplicable reason, every wxTextCtrl.SetValue() ## call is generating two (2) EVT_TEXT events. (!) ## The the only mechanism I can find to mask this problem is to ## keep track of last value seen, and declare a valid EVT_TEXT ## event iff the value has actually changed. The masked edit ## OnTextChange routine does this, and returns True on a valid event, ## False otherwise. if not BaseMaskedTextCtrl._OnTextChange(self, event): return ## dbg('firing TimeUpdatedEvent...') evt = TimeUpdatedEvent(self.GetId(), self.GetValue()) evt.SetEventObject(self) self.GetEventHandler().ProcessEvent(evt) ## dbg(indent=0) def SetInsertionPoint(self, pos): """ This override records the specified position and associated cell before calling base class' function. This is necessary to handle the optional spin button, because the insertion point is lost when the focus shifts to the spin button. """ ## dbg('TimeCtrl::SetInsertionPoint', pos, indent=1) BaseMaskedTextCtrl.SetInsertionPoint(self, pos) # (causes EVT_TEXT event to fire) self.__posCurrent = self.GetInsertionPoint() ## dbg(indent=0) def SetSelection(self, sel_start, sel_to): ## dbg('TimeCtrl::SetSelection', sel_start, sel_to, indent=1) # Adjust selection range to legal extent if not already if sel_start < 0: sel_start = 0 if self.__posCurrent != sel_start: # force selection and insertion point to match self.SetInsertionPoint(sel_start) cell_start, cell_end = self._FindField(sel_start)._extent if not cell_start <= sel_to <= cell_end: sel_to = cell_end self.__bSelection = sel_start != sel_to BaseMaskedTextCtrl.SetSelection(self, sel_start, sel_to) ## dbg(indent=0) def __OnSpin(self, key): """ This is the function that gets called in response to up/down arrow or bound spin button events. """ self.__IncrementValue(key, self.__posCurrent) # changes the value # Ensure adjusted control regains focus and has adjusted portion # selected: self.SetFocus() start, end = self._FindField(self.__posCurrent)._extent self.SetInsertionPoint(start) self.SetSelection(start, end) ## dbg('current position:', self.__posCurrent) def __OnSpinUp(self, event): """ Event handler for any bound spin button on EVT_SPIN_UP; causes control to behave as if up arrow was pressed. """ ## dbg('TimeCtrl::OnSpinUp', indent=1) self.__OnSpin(wx.WXK_UP) keep_processing = False ## dbg(indent=0) return keep_processing def __OnSpinDown(self, event): """ Event handler for any bound spin button on EVT_SPIN_DOWN; causes control to behave as if down arrow was pressed. """ ## dbg('TimeCtrl::OnSpinDown', indent=1) self.__OnSpin(wx.WXK_DOWN) keep_processing = False ## dbg(indent=0) return keep_processing def __OnChar(self, event): """ Handler to explicitly look for ':' keyevents, and if found, clear the m_shiftDown field, so it will behave as forward tab. It then calls the base control's _OnChar routine with the modified event instance. """ ## dbg('TimeCtrl::OnChar', indent=1) keycode = event.GetKeyCode() ## dbg('keycode:', keycode) if keycode == ord(':'): ## dbg('colon seen! removing shift attribute') event.m_shiftDown = False BaseMaskedTextCtrl._OnChar(self, event ) ## handle each keypress ## dbg(indent=0) def __OnSetToNow(self, event): """ This is the key handler for '!' and 'c'; this allows the user to quickly set the value of the control to the current time. """ self.SetValue(wx.DateTime_Now().FormatTime()) keep_processing = False return keep_processing def __LimitSelection(self, event): """ Event handler for motion events; this handler changes limits the selection to the new cell boundaries. """ ## dbg('TimeCtrl::LimitSelection', indent=1) pos = self.GetInsertionPoint() self.__posCurrent = pos sel_start, sel_to = self.GetSelection() selection = sel_start != sel_to if selection: # only allow selection to end of current cell: start, end = self._FindField(sel_start)._extent if sel_to < pos: sel_to = start elif sel_to > pos: sel_to = end ## dbg('new pos =', self.__posCurrent, 'select to ', sel_to) self.SetInsertionPoint(self.__posCurrent) self.SetSelection(self.__posCurrent, sel_to) if event: event.Skip() ## dbg(indent=0) def __IncrementValue(self, key, pos): ## dbg('TimeCtrl::IncrementValue', key, pos, indent=1) text = self.GetValue() field = self._FindField(pos) ## dbg('field: ', field._index) start, end = field._extent slice = text[start:end] if key == wx.WXK_UP: increment = 1 else: increment = -1 if slice in ('A', 'P'): if slice == 'A': newslice = 'P' elif slice == 'P': newslice = 'A' newvalue = text[:start] + newslice + text[end:] elif field._index == 0: # adjusting this field is trickier, as its value can affect the # am/pm setting. So, we use wxDateTime to generate a new value for us: # (Use a fixed date not subject to DST variations:) converter = wx.DateTimeFromDMY(1, 0, 1970) ## dbg('text: "%s"' % text) converter.ParseTime(text.strip()) currenthour = converter.GetHour() ## dbg('current hour:', currenthour) newhour = (currenthour + increment) % 24 ## dbg('newhour:', newhour) converter.SetHour(newhour) ## dbg('converter.GetHour():', converter.GetHour()) newvalue = converter # take advantage of auto-conversion for am/pm in .SetValue() else: # minute or second field; handled the same way: newslice = "%02d" % ((int(slice) + increment) % 60) newvalue = text[:start] + newslice + text[end:] try: self.SetValue(newvalue) except ValueError: # must not be in bounds: if not wx.Validator_IsSilent(): wx.Bell() ## dbg(indent=0) def _toGUI( self, wxdt ): """ This function takes a wxdt as an unambiguous representation of a time, and converts it to a string appropriate for the format of the control. """ if self.__fmt24hr: if self.__displaySeconds: strval = wxdt.Format('%H:%M:%S') else: strval = wxdt.Format('%H:%M') else: if self.__displaySeconds: strval = wxdt.Format('%I:%M:%S %p') else: strval = wxdt.Format('%I:%M %p') return strval def __validateValue( self, value ): """ This function converts the value to a wxDateTime if not already one, does bounds checking and raises ValueError if argument is not a valid value for the control as currently specified. It is used by both the SetValue() and the IsValid() methods. """ ## dbg('TimeCtrl::__validateValue(%s)' % repr(value), indent=1) if not value: ## dbg(indent=0) raise ValueError('%s not a valid time value' % repr(value)) valid = True # assume true try: value = self.GetWxDateTime(value) # regularize form; can generate ValueError if problem doing so except: ## dbg('exception occurred', indent=0) raise if self.IsLimited() and not self.IsInBounds(value): ## dbg(indent=0) raise ValueError ( 'value %s is not within the bounds of the control' % str(value) ) ## dbg(indent=0) return value #---------------------------------------------------------------------------- # Test jig for TimeCtrl: if __name__ == '__main__': import traceback class TestPanel(wx.Panel): def __init__(self, parent, id, pos = wx.DefaultPosition, size = wx.DefaultSize, fmt24hr = 0, test_mx = 0, style = wx.TAB_TRAVERSAL ): wx.Panel.__init__(self, parent, id, pos, size, style) self.test_mx = test_mx self.tc = TimeCtrl(self, 10, fmt24hr = fmt24hr) sb = wx.SpinButton( self, 20, wx.DefaultPosition, (-1,20), 0 ) self.tc.BindSpinButton(sb) sizer = wx.BoxSizer( wx.HORIZONTAL ) sizer.Add( self.tc, 0, wx.ALIGN_CENTRE|wx.LEFT|wx.TOP|wx.BOTTOM, 5 ) sizer.Add( sb, 0, wx.ALIGN_CENTRE|wx.RIGHT|wx.TOP|wx.BOTTOM, 5 ) self.SetAutoLayout( True ) self.SetSizer( sizer ) sizer.Fit( self ) sizer.SetSizeHints( self ) self.Bind(EVT_TIMEUPDATE, self.OnTimeChange, self.tc) def OnTimeChange(self, event): ## dbg('OnTimeChange: value = ', event.GetValue()) wxdt = self.tc.GetWxDateTime() ## dbg('wxdt =', wxdt.GetHour(), wxdt.GetMinute(), wxdt.GetSecond()) if self.test_mx: mxdt = self.tc.GetMxDateTime() ## dbg('mxdt =', mxdt.hour, mxdt.minute, mxdt.second) class MyApp(wx.App): def OnInit(self): import sys fmt24hr = '24' in sys.argv test_mx = 'mx' in sys.argv try: frame = wx.Frame(None, -1, "TimeCtrl Test", (20,20), (100,100) ) panel = TestPanel(frame, -1, (-1,-1), fmt24hr=fmt24hr, test_mx = test_mx) frame.Show(True) except: traceback.print_exc() return False return True try: app = MyApp(0) app.MainLoop() except: traceback.print_exc() __i=0 ## CHANGELOG: ## ==================== ## Version 1.3 ## 1. Converted docstrings to reST format, added doc for ePyDoc. ## 2. Renamed helper functions, vars etc. not intended to be visible in public ## interface to code. ## ## Version 1.2 ## 1. Changed parameter name display_seconds to displaySeconds, to follow ## other masked edit conventions. ## 2. Added format parameter, to remove need to use both fmt24hr and displaySeconds. ## 3. Changed inheritance to use BaseMaskedTextCtrl, to remove exposure of ## nonsensical parameter methods from the control, so it will work ## properly with Boa. ``` #### File: lib/mixins/inspection.py ```python import wx from wx.lib.inspection import InspectionTool #---------------------------------------------------------------------------- class InspectionMixin(object): """ This class is intended to be used as a mix-in with the wx.App class. When used it will add the ability to popup a InspectionFrame window where the widget under the mouse cursor will be selected in the tree and loaded into the shell's namespace as 'obj'. The default key sequence to activate the inspector is Ctrl-Alt-I (or Cmd-Alt-I on Mac) but this can be changed via parameters to the `Init` method, or the application can call `ShowInspectionTool` from other event handlers if desired. To use this class simply derive a class from wx.App and InspectionMixin and then call the `Init` method from the app's OnInit. """ def InitInspection(self, pos=wx.DefaultPosition, size=wx.Size(850,700), config=None, locals=None, alt=True, cmd=True, shift=False, keyCode=ord('I')): """ Make the event binding that will activate the InspectionFrame window. """ self.Bind(wx.EVT_KEY_DOWN, self._OnKeyPress) self._alt = alt self._cmd = cmd self._shift = shift self._keyCode = keyCode InspectionTool().Init(pos, size, config, locals, self) def _OnKeyPress(self, evt): """ Event handler, check for our hot-key. Normally it is Ctrl-Alt-I but that can be changed by what is passed to the Init method. """ if evt.AltDown() == self._alt and \ evt.CmdDown() == self._cmd and \ evt.ShiftDown() == self._shift and \ evt.GetKeyCode() == self._keyCode: self.ShowInspectionTool() else: evt.Skip() Init = InitInspection # compatibility alias def ShowInspectionTool(self): """ Show the Inspection tool, creating it if neccesary, setting it to display the widget under the cursor. """ # get the current widget under the mouse wnd = wx.FindWindowAtPointer() InspectionTool().Show(wnd) #--------------------------------------------------------------------------- class InspectableApp(wx.App, InspectionMixin): """ A simple mix of wx.App and InspectionMixin that can be used stand-alone. """ def OnInit(self): self.InitInspection() return True #--------------------------------------------------------------------------- ``` #### File: wx-3.0-msw/wx/propgrid.py ```python import _propgrid import new new_instancemethod = new.instancemethod def _swig_setattr_nondynamic(self,class_type,name,value,static=1): if (name == "thisown"): return self.this.own(value) if (name == "this"): if type(value).__name__ == 'PySwigObject': self.__dict__[name] = value return method = class_type.__swig_setmethods__.get(name,None) if method: return method(self,value) if (not static) or hasattr(self,name): self.__dict__[name] = value else: raise AttributeError("You cannot add attributes to %s" % self) def _swig_setattr(self,class_type,name,value): return _swig_setattr_nondynamic(self,class_type,name,value,0) def _swig_getattr(self,class_type,name): if (name == "thisown"): return self.this.own() method = class_type.__swig_getmethods__.get(name,None) if method: return method(self) raise AttributeError,name def _swig_repr(self): try: strthis = "proxy of " + self.this.__repr__() except: strthis = "" return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,) import types try: _object = types.ObjectType _newclass = 1 except AttributeError: class _object : pass _newclass = 0 del types def _swig_setattr_nondynamic_method(set): def set_attr(self,name,value): if (name == "thisown"): return self.this.own(value) if hasattr(self,name) or (name == "this"): set(self,name,value) else: raise AttributeError("You cannot add attributes to %s" % self) return set_attr import _core import _windows wx = _core __docfilter__ = wx.__DocFilter(globals()) PG_XBEFORETEXT = _propgrid.PG_XBEFORETEXT PG_XBEFOREWIDGET = _propgrid.PG_XBEFOREWIDGET PG_ICON_WIDTH = _propgrid.PG_ICON_WIDTH PG_USE_RENDERER_NATIVE = _propgrid.PG_USE_RENDERER_NATIVE PG_SUPPORT_TOOLTIPS = _propgrid.PG_SUPPORT_TOOLTIPS PG_CUSTOM_IMAGE_WIDTH = _propgrid.PG_CUSTOM_IMAGE_WIDTH PG_NO_CHILD_EVT_MOTION = _propgrid.PG_NO_CHILD_EVT_MOTION PG_NAT_BUTTON_BORDER_ANY = _propgrid.PG_NAT_BUTTON_BORDER_ANY PG_NAT_BUTTON_BORDER_X = _propgrid.PG_NAT_BUTTON_BORDER_X PG_NAT_BUTTON_BORDER_Y = _propgrid.PG_NAT_BUTTON_BORDER_Y PG_REFRESH_CONTROLS = _propgrid.PG_REFRESH_CONTROLS PG_CONTROL_MARGIN = _propgrid.PG_CONTROL_MARGIN CC_CUSTOM_IMAGE_MARGIN1 = _propgrid.CC_CUSTOM_IMAGE_MARGIN1 CC_CUSTOM_IMAGE_MARGIN2 = _propgrid.CC_CUSTOM_IMAGE_MARGIN2 DEFAULT_IMAGE_OFFSET_INCREMENT = _propgrid.DEFAULT_IMAGE_OFFSET_INCREMENT PG_DRAG_MARGIN = _propgrid.PG_DRAG_MARGIN PG_SPLITTERX_DETECTMARGIN1 = _propgrid.PG_SPLITTERX_DETECTMARGIN1 PG_SPLITTERX_DETECTMARGIN2 = _propgrid.PG_SPLITTERX_DETECTMARGIN2 PG_SMALL_SCREEN = _propgrid.PG_SMALL_SCREEN PG_COMPATIBILITY_1_4 = _propgrid.PG_COMPATIBILITY_1_4 PG_INCLUDE_ADVPROPS = _propgrid.PG_INCLUDE_ADVPROPS PG_INCLUDE_CHECKBOX = _propgrid.PG_INCLUDE_CHECKBOX PG_KEEP_STRUCTURE = _propgrid.PG_KEEP_STRUCTURE PG_RECURSE = _propgrid.PG_RECURSE PG_INC_ATTRIBUTES = _propgrid.PG_INC_ATTRIBUTES PG_RECURSE_STARTS = _propgrid.PG_RECURSE_STARTS PG_FORCE = _propgrid.PG_FORCE PG_SORT_TOP_LEVEL_ONLY = _propgrid.PG_SORT_TOP_LEVEL_ONLY PG_DONT_RECURSE = _propgrid.PG_DONT_RECURSE PG_FULL_VALUE = _propgrid.PG_FULL_VALUE PG_REPORT_ERROR = _propgrid.PG_REPORT_ERROR PG_PROPERTY_SPECIFIC = _propgrid.PG_PROPERTY_SPECIFIC PG_EDITABLE_VALUE = _propgrid.PG_EDITABLE_VALUE PG_COMPOSITE_FRAGMENT = _propgrid.PG_COMPOSITE_FRAGMENT PG_UNEDITABLE_COMPOSITE_FRAGMENT = _propgrid.PG_UNEDITABLE_COMPOSITE_FRAGMENT PG_VALUE_IS_CURRENT = _propgrid.PG_VALUE_IS_CURRENT PG_PROGRAMMATIC_VALUE = _propgrid.PG_PROGRAMMATIC_VALUE PG_SETVAL_REFRESH_EDITOR = _propgrid.PG_SETVAL_REFRESH_EDITOR PG_SETVAL_AGGREGATED = _propgrid.PG_SETVAL_AGGREGATED PG_SETVAL_FROM_PARENT = _propgrid.PG_SETVAL_FROM_PARENT PG_SETVAL_BY_USER = _propgrid.PG_SETVAL_BY_USER class PGPaintData(object): """Proxy of C++ PGPaintData class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') def __init__(self): raise AttributeError, "No constructor defined" __repr__ = _swig_repr m_parent = property(_propgrid.PGPaintData_m_parent_get, _propgrid.PGPaintData_m_parent_set) m_choiceItem = property(_propgrid.PGPaintData_m_choiceItem_get, _propgrid.PGPaintData_m_choiceItem_set) m_drawnWidth = property(_propgrid.PGPaintData_m_drawnWidth_get, _propgrid.PGPaintData_m_drawnWidth_set) m_drawnHeight = property(_propgrid.PGPaintData_m_drawnHeight_get, _propgrid.PGPaintData_m_drawnHeight_set) _propgrid.PGPaintData_swigregister(PGPaintData) PG_CUSTOM_IMAGE_SPACINGY = _propgrid.PG_CUSTOM_IMAGE_SPACINGY PG_CAPRECTXMARGIN = _propgrid.PG_CAPRECTXMARGIN PG_CAPRECTYMARGIN = _propgrid.PG_CAPRECTYMARGIN class PGCell(_core.Object): """Proxy of C++ PGCell class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args): """ __init__(self) -> PGCell __init__(self, PGCell other) -> PGCell __init__(self, String text, Bitmap bitmap=wxNullBitmap, Colour fgCol=wxNullColour, Colour bgCol=wxNullColour) -> PGCell """ _propgrid.PGCell_swiginit(self,_propgrid.new_PGCell(*args)) __swig_destroy__ = _propgrid.delete_PGCell __del__ = lambda self : None; def GetData(*args): """ GetData(self) GetData(self) """ return _propgrid.PGCell_GetData(*args) def HasText(*args, **kwargs): """HasText(self) -> bool""" return _propgrid.PGCell_HasText(*args, **kwargs) def SetEmptyData(*args, **kwargs): """SetEmptyData(self)""" return _propgrid.PGCell_SetEmptyData(*args, **kwargs) def MergeFrom(*args, **kwargs): """MergeFrom(self, PGCell srcCell)""" return _propgrid.PGCell_MergeFrom(*args, **kwargs) def SetText(*args, **kwargs): """SetText(self, String text)""" return _propgrid.PGCell_SetText(*args, **kwargs) def SetBitmap(*args, **kwargs): """SetBitmap(self, Bitmap bitmap)""" return _propgrid.PGCell_SetBitmap(*args, **kwargs) def SetFgCol(*args, **kwargs): """SetFgCol(self, Colour col)""" return _propgrid.PGCell_SetFgCol(*args, **kwargs) def SetFont(*args, **kwargs): """SetFont(self, Font font)""" return _propgrid.PGCell_SetFont(*args, **kwargs) def SetBgCol(*args, **kwargs): """SetBgCol(self, Colour col)""" return _propgrid.PGCell_SetBgCol(*args, **kwargs) def GetText(*args, **kwargs): """GetText(self) -> String""" return _propgrid.PGCell_GetText(*args, **kwargs) def GetBitmap(*args, **kwargs): """GetBitmap(self) -> Bitmap""" return _propgrid.PGCell_GetBitmap(*args, **kwargs) def GetFgCol(*args, **kwargs): """GetFgCol(self) -> Colour""" return _propgrid.PGCell_GetFgCol(*args, **kwargs) def GetFont(*args, **kwargs): """GetFont(self) -> Font""" return _propgrid.PGCell_GetFont(*args, **kwargs) def GetBgCol(*args, **kwargs): """GetBgCol(self) -> Colour""" return _propgrid.PGCell_GetBgCol(*args, **kwargs) def IsInvalid(*args, **kwargs): """IsInvalid(self) -> bool""" return _propgrid.PGCell_IsInvalid(*args, **kwargs) _propgrid.PGCell_swigregister(PGCell) PG_PROP_MODIFIED = _propgrid.PG_PROP_MODIFIED PG_PROP_DISABLED = _propgrid.PG_PROP_DISABLED PG_PROP_HIDDEN = _propgrid.PG_PROP_HIDDEN PG_PROP_CUSTOMIMAGE = _propgrid.PG_PROP_CUSTOMIMAGE PG_PROP_NOEDITOR = _propgrid.PG_PROP_NOEDITOR PG_PROP_COLLAPSED = _propgrid.PG_PROP_COLLAPSED PG_PROP_INVALID_VALUE = _propgrid.PG_PROP_INVALID_VALUE PG_PROP_WAS_MODIFIED = _propgrid.PG_PROP_WAS_MODIFIED PG_PROP_AGGREGATE = _propgrid.PG_PROP_AGGREGATE PG_PROP_CHILDREN_ARE_COPIES = _propgrid.PG_PROP_CHILDREN_ARE_COPIES PG_PROP_PROPERTY = _propgrid.PG_PROP_PROPERTY PG_PROP_CATEGORY = _propgrid.PG_PROP_CATEGORY PG_PROP_MISC_PARENT = _propgrid.PG_PROP_MISC_PARENT PG_PROP_READONLY = _propgrid.PG_PROP_READONLY PG_PROP_COMPOSED_VALUE = _propgrid.PG_PROP_COMPOSED_VALUE PG_PROP_USES_COMMON_VALUE = _propgrid.PG_PROP_USES_COMMON_VALUE PG_PROP_AUTO_UNSPECIFIED = _propgrid.PG_PROP_AUTO_UNSPECIFIED PG_PROP_CLASS_SPECIFIC_1 = _propgrid.PG_PROP_CLASS_SPECIFIC_1 PG_PROP_CLASS_SPECIFIC_2 = _propgrid.PG_PROP_CLASS_SPECIFIC_2 PG_PROP_BEING_DELETED = _propgrid.PG_PROP_BEING_DELETED class PGChoices(object): """Proxy of C++ PGChoices class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args): """ __init__(self) -> PGChoices __init__(self, PGChoices a) -> PGChoices __init__(self, wxChar labels, long values=None) -> PGChoices __init__(self, wxArrayString labels, wxArrayInt values=wxArrayInt()) -> PGChoices __init__(self, data) -> PGChoices """ _propgrid.PGChoices_swiginit(self,_propgrid.new_PGChoices(*args)) __swig_destroy__ = _propgrid.delete_PGChoices __del__ = lambda self : None; def Add(*args): """ Add(self, wxChar labels, ValArrItem values=None) Add(self, wxArrayString arr, wxArrayInt arrint=wxArrayInt()) Add(self, String label, int value=INT_MAX) Add(self, String label, Bitmap bitmap, int value=INT_MAX) Add(self, entry) """ return _propgrid.PGChoices_Add(*args) def AddAsSorted(*args, **kwargs): """AddAsSorted(self, String label, int value=INT_MAX)""" return _propgrid.PGChoices_AddAsSorted(*args, **kwargs) def Assign(*args, **kwargs): """Assign(self, PGChoices a)""" return _propgrid.PGChoices_Assign(*args, **kwargs) def AssignData(*args, **kwargs): """AssignData(self, data)""" return _propgrid.PGChoices_AssignData(*args, **kwargs) def Clear(*args, **kwargs): """Clear(self)""" return _propgrid.PGChoices_Clear(*args, **kwargs) def Copy(*args, **kwargs): """Copy(self) -> PGChoices""" return _propgrid.PGChoices_Copy(*args, **kwargs) def EnsureData(*args, **kwargs): """EnsureData(self)""" return _propgrid.PGChoices_EnsureData(*args, **kwargs) def GetId(*args, **kwargs): """GetId(self) -> PGChoicesId""" return _propgrid.PGChoices_GetId(*args, **kwargs) def GetLabel(*args, **kwargs): """GetLabel(self, int ind) -> String""" return _propgrid.PGChoices_GetLabel(*args, **kwargs) def GetCount(*args, **kwargs): """GetCount(self) -> int""" return _propgrid.PGChoices_GetCount(*args, **kwargs) def GetValue(*args, **kwargs): """GetValue(self, int ind) -> int""" return _propgrid.PGChoices_GetValue(*args, **kwargs) def GetValuesForStrings(*args, **kwargs): """GetValuesForStrings(self, wxArrayString strings) -> wxArrayInt""" return _propgrid.PGChoices_GetValuesForStrings(*args, **kwargs) def GetIndicesForStrings(*args, **kwargs): """GetIndicesForStrings(self, wxArrayString strings, wxArrayString unmatched=None) -> wxArrayInt""" return _propgrid.PGChoices_GetIndicesForStrings(*args, **kwargs) def Index(*args): """ Index(self, String str) -> int Index(self, int val) -> int """ return _propgrid.PGChoices_Index(*args) def Insert(*args): """ Insert(self, String label, int index, int value=INT_MAX) Insert(self, entry, int index) """ return _propgrid.PGChoices_Insert(*args) def IsOk(*args, **kwargs): """IsOk(self) -> bool""" return _propgrid.PGChoices_IsOk(*args, **kwargs) def Item(*args): """ Item(self, int i) Item(self, int i) """ return _propgrid.PGChoices_Item(*args) def RemoveAt(*args, **kwargs): """RemoveAt(self, size_t nIndex, size_t count=1)""" return _propgrid.PGChoices_RemoveAt(*args, **kwargs) def Set(*args): """ Set(self, wxChar labels, long values=None) Set(self, wxArrayString labels, wxArrayInt values=wxArrayInt()) """ return _propgrid.PGChoices_Set(*args) def AllocExclusive(*args, **kwargs): """AllocExclusive(self)""" return _propgrid.PGChoices_AllocExclusive(*args, **kwargs) def GetData(*args, **kwargs): """GetData(self)""" return _propgrid.PGChoices_GetData(*args, **kwargs) def GetDataPtr(*args, **kwargs): """GetDataPtr(self)""" return _propgrid.PGChoices_GetDataPtr(*args, **kwargs) def ExtractData(*args, **kwargs): """ExtractData(self)""" return _propgrid.PGChoices_ExtractData(*args, **kwargs) def GetLabels(*args, **kwargs): """GetLabels(self) -> wxArrayString""" return _propgrid.PGChoices_GetLabels(*args, **kwargs) _propgrid.PGChoices_swigregister(PGChoices) class PGProperty(_core.Object): """Proxy of C++ PGProperty class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args): """ __init__(self) -> PGProperty __init__(self, String label, String name) -> PGProperty """ _propgrid.PGProperty_swiginit(self,_propgrid.new_PGProperty(*args)) __swig_destroy__ = _propgrid.delete_PGProperty __del__ = lambda self : None; def OnSetValue(*args, **kwargs): """OnSetValue(self)""" return _propgrid.PGProperty_OnSetValue(*args, **kwargs) def DoGetValue(*args, **kwargs): """DoGetValue(self) -> wxVariant""" return _propgrid.PGProperty_DoGetValue(*args, **kwargs) def ValueToString(*args, **kwargs): """ValueToString(self, wxVariant value, int argFlags=0) -> String""" return _propgrid.PGProperty_ValueToString(*args, **kwargs) def SetValueFromString(*args, **kwargs): """SetValueFromString(self, String text, int flags=PG_PROGRAMMATIC_VALUE) -> bool""" return _propgrid.PGProperty_SetValueFromString(*args, **kwargs) def SetValueFromInt(*args, **kwargs): """SetValueFromInt(self, long value, int flags=0) -> bool""" return _propgrid.PGProperty_SetValueFromInt(*args, **kwargs) def OnMeasureImage(*args, **kwargs): """OnMeasureImage(self, int item=-1) -> Size""" return _propgrid.PGProperty_OnMeasureImage(*args, **kwargs) def OnEvent(*args, **kwargs): """OnEvent(self, PropertyGrid propgrid, Window wnd_primary, Event event) -> bool""" return _propgrid.PGProperty_OnEvent(*args, **kwargs) def ChildChanged(*args, **kwargs): """ChildChanged(self, wxVariant thisValue, int childIndex, wxVariant childValue) -> wxVariant""" return _propgrid.PGProperty_ChildChanged(*args, **kwargs) def DoGetEditorClass(*args, **kwargs): """DoGetEditorClass(self) -> PGEditor""" return _propgrid.PGProperty_DoGetEditorClass(*args, **kwargs) def DoGetValidator(*args, **kwargs): """DoGetValidator(self) -> Validator""" return _propgrid.PGProperty_DoGetValidator(*args, **kwargs) def OnCustomPaint(*args, **kwargs): """OnCustomPaint(self, DC dc, Rect rect, PGPaintData paintdata)""" return _propgrid.PGProperty_OnCustomPaint(*args, **kwargs) def GetCellRenderer(*args, **kwargs): """GetCellRenderer(self, int column)""" return _propgrid.PGProperty_GetCellRenderer(*args, **kwargs) def GetChoiceSelection(*args, **kwargs): """GetChoiceSelection(self) -> int""" return _propgrid.PGProperty_GetChoiceSelection(*args, **kwargs) def RefreshChildren(*args, **kwargs): """RefreshChildren(self)""" return _propgrid.PGProperty_RefreshChildren(*args, **kwargs) def DoSetAttribute(*args, **kwargs): """DoSetAttribute(self, String name, wxVariant value) -> bool""" return _propgrid.PGProperty_DoSetAttribute(*args, **kwargs) def DoGetAttribute(*args, **kwargs): """DoGetAttribute(self, String name) -> wxVariant""" return _propgrid.PGProperty_DoGetAttribute(*args, **kwargs) def GetEditorDialog(*args, **kwargs): """GetEditorDialog(self) -> PGEditorDialogAdapter""" return _propgrid.PGProperty_GetEditorDialog(*args, **kwargs) def OnValidationFailure(*args, **kwargs): """OnValidationFailure(self, wxVariant pendingValue)""" return _propgrid.PGProperty_OnValidationFailure(*args, **kwargs) def AddChoice(*args, **kwargs): """AddChoice(self, String label, int value=INT_MAX) -> int""" return _propgrid.PGProperty_AddChoice(*args, **kwargs) def AreChildrenComponents(*args, **kwargs): """AreChildrenComponents(self) -> bool""" return _propgrid.PGProperty_AreChildrenComponents(*args, **kwargs) def DeleteChildren(*args, **kwargs): """DeleteChildren(self)""" return _propgrid.PGProperty_DeleteChildren(*args, **kwargs) def DeleteChoice(*args, **kwargs): """DeleteChoice(self, int index)""" return _propgrid.PGProperty_DeleteChoice(*args, **kwargs) def Enable(*args, **kwargs): """Enable(self, bool enable=True)""" return _propgrid.PGProperty_Enable(*args, **kwargs) def EnableCommonValue(*args, **kwargs): """EnableCommonValue(self, bool enable=True)""" return _propgrid.PGProperty_EnableCommonValue(*args, **kwargs) def GenerateComposedValue(*args, **kwargs): """GenerateComposedValue(self) -> String""" return _propgrid.PGProperty_GenerateComposedValue(*args, **kwargs) def GetLabel(*args, **kwargs): """GetLabel(self) -> String""" return _propgrid.PGProperty_GetLabel(*args, **kwargs) def GetName(*args, **kwargs): """GetName(self) -> String""" return _propgrid.PGProperty_GetName(*args, **kwargs) def GetBaseName(*args, **kwargs): """GetBaseName(self) -> String""" return _propgrid.PGProperty_GetBaseName(*args, **kwargs) def GetChoices(*args, **kwargs): """GetChoices(self) -> PGChoices""" return _propgrid.PGProperty_GetChoices(*args, **kwargs) def GetY(*args, **kwargs): """GetY(self) -> int""" return _propgrid.PGProperty_GetY(*args, **kwargs) def GetValue(*args, **kwargs): """GetValue(self) -> wxVariant""" return _propgrid.PGProperty_GetValue(*args, **kwargs) def GetValuePlain(*args, **kwargs): """GetValuePlain(self) -> wxVariant""" return _propgrid.PGProperty_GetValuePlain(*args, **kwargs) def GetValueAsString(*args, **kwargs): """GetValueAsString(self, int argFlags=0) -> String""" return _propgrid.PGProperty_GetValueAsString(*args, **kwargs) def GetCell(*args): """ GetCell(self, int column) -> PGCell GetCell(self, int column) -> PGCell """ return _propgrid.PGProperty_GetCell(*args) def GetOrCreateCell(*args, **kwargs): """GetOrCreateCell(self, int column) -> PGCell""" return _propgrid.PGProperty_GetOrCreateCell(*args, **kwargs) def GetDisplayedCommonValueCount(*args, **kwargs): """GetDisplayedCommonValueCount(self) -> int""" return _propgrid.PGProperty_GetDisplayedCommonValueCount(*args, **kwargs) def GetDisplayedString(*args, **kwargs): """GetDisplayedString(self) -> String""" return _propgrid.PGProperty_GetDisplayedString(*args, **kwargs) def GetHintText(*args, **kwargs): """GetHintText(self) -> String""" return _propgrid.PGProperty_GetHintText(*args, **kwargs) def GetGrid(*args, **kwargs): """GetGrid(self) -> PropertyGrid""" return _propgrid.PGProperty_GetGrid(*args, **kwargs) def GetGridIfDisplayed(*args, **kwargs): """GetGridIfDisplayed(self) -> PropertyGrid""" return _propgrid.PGProperty_GetGridIfDisplayed(*args, **kwargs) def GetMainParent(*args, **kwargs): """GetMainParent(self) -> PGProperty""" return _propgrid.PGProperty_GetMainParent(*args, **kwargs) def GetParent(*args, **kwargs): """GetParent(self) -> PGProperty""" return _propgrid.PGProperty_GetParent(*args, **kwargs) def IsTextEditable(*args, **kwargs): """IsTextEditable(self) -> bool""" return _propgrid.PGProperty_IsTextEditable(*args, **kwargs) def IsValueUnspecified(*args, **kwargs): """IsValueUnspecified(self) -> bool""" return _propgrid.PGProperty_IsValueUnspecified(*args, **kwargs) def HasFlag(*args, **kwargs): """HasFlag(self, int flag) -> FlagType""" return _propgrid.PGProperty_HasFlag(*args, **kwargs) def GetAttributes(*args, **kwargs): """GetAttributes(self)""" return _propgrid.PGProperty_GetAttributes(*args, **kwargs) def GetAttributesAsList(*args, **kwargs): """GetAttributesAsList(self) -> wxVariant""" return _propgrid.PGProperty_GetAttributesAsList(*args, **kwargs) def GetFlags(*args, **kwargs): """GetFlags(self) -> FlagType""" return _propgrid.PGProperty_GetFlags(*args, **kwargs) def GetEditorClass(*args, **kwargs): """GetEditorClass(self) -> PGEditor""" return _propgrid.PGProperty_GetEditorClass(*args, **kwargs) def GetValueType(*args, **kwargs): """GetValueType(self) -> String""" return _propgrid.PGProperty_GetValueType(*args, **kwargs) def GetColumnEditor(*args, **kwargs): """GetColumnEditor(self, int column) -> PGEditor""" return _propgrid.PGProperty_GetColumnEditor(*args, **kwargs) def GetCommonValue(*args, **kwargs): """GetCommonValue(self) -> int""" return _propgrid.PGProperty_GetCommonValue(*args, **kwargs) def HasVisibleChildren(*args, **kwargs): """HasVisibleChildren(self) -> bool""" return _propgrid.PGProperty_HasVisibleChildren(*args, **kwargs) def InsertChild(*args, **kwargs): """InsertChild(self, int index, PGProperty childProperty) -> PGProperty""" return _propgrid.PGProperty_InsertChild(*args, **kwargs) def InsertChoice(*args, **kwargs): """InsertChoice(self, String label, int index, int value=INT_MAX) -> int""" return _propgrid.PGProperty_InsertChoice(*args, **kwargs) def IsCategory(*args, **kwargs): """IsCategory(self) -> bool""" return _propgrid.PGProperty_IsCategory(*args, **kwargs) def IsRoot(*args, **kwargs): """IsRoot(self) -> bool""" return _propgrid.PGProperty_IsRoot(*args, **kwargs) def IsSubProperty(*args, **kwargs): """IsSubProperty(self) -> bool""" return _propgrid.PGProperty_IsSubProperty(*args, **kwargs) def GetLastVisibleSubItem(*args, **kwargs): """GetLastVisibleSubItem(self) -> PGProperty""" return _propgrid.PGProperty_GetLastVisibleSubItem(*args, **kwargs) def GetDefaultValue(*args, **kwargs): """GetDefaultValue(self) -> wxVariant""" return _propgrid.PGProperty_GetDefaultValue(*args, **kwargs) def GetMaxLength(*args, **kwargs): """GetMaxLength(self) -> int""" return _propgrid.PGProperty_GetMaxLength(*args, **kwargs) def AreAllChildrenSpecified(*args, **kwargs): """AreAllChildrenSpecified(self, wxVariant pendingList=None) -> bool""" return _propgrid.PGProperty_AreAllChildrenSpecified(*args, **kwargs) def UpdateParentValues(*args, **kwargs): """UpdateParentValues(self) -> PGProperty""" return _propgrid.PGProperty_UpdateParentValues(*args, **kwargs) def UsesAutoUnspecified(*args, **kwargs): """UsesAutoUnspecified(self) -> bool""" return _propgrid.PGProperty_UsesAutoUnspecified(*args, **kwargs) def GetValueImage(*args, **kwargs): """GetValueImage(self) -> Bitmap""" return _propgrid.PGProperty_GetValueImage(*args, **kwargs) def GetAttribute(*args): """ GetAttribute(self, String name) -> wxVariant GetAttribute(self, String name, String defVal) -> String """ return _propgrid.PGProperty_GetAttribute(*args) def GetAttributeAsLong(*args, **kwargs): """GetAttributeAsLong(self, String name, long defVal) -> long""" return _propgrid.PGProperty_GetAttributeAsLong(*args, **kwargs) def GetAttributeAsDouble(*args, **kwargs): """GetAttributeAsDouble(self, String name, double defVal) -> double""" return _propgrid.PGProperty_GetAttributeAsDouble(*args, **kwargs) def GetDepth(*args, **kwargs): """GetDepth(self) -> int""" return _propgrid.PGProperty_GetDepth(*args, **kwargs) def GetFlagsAsString(*args, **kwargs): """GetFlagsAsString(self, FlagType flagsMask) -> String""" return _propgrid.PGProperty_GetFlagsAsString(*args, **kwargs) def GetIndexInParent(*args, **kwargs): """GetIndexInParent(self) -> int""" return _propgrid.PGProperty_GetIndexInParent(*args, **kwargs) def Hide(*args, **kwargs): """Hide(self, bool hide, int flags=PG_RECURSE) -> bool""" return _propgrid.PGProperty_Hide(*args, **kwargs) def IsExpanded(*args, **kwargs): """IsExpanded(self) -> bool""" return _propgrid.PGProperty_IsExpanded(*args, **kwargs) def IsVisible(*args, **kwargs): """IsVisible(self) -> bool""" return _propgrid.PGProperty_IsVisible(*args, **kwargs) def IsEnabled(*args, **kwargs): """IsEnabled(self) -> bool""" return _propgrid.PGProperty_IsEnabled(*args, **kwargs) def RecreateEditor(*args, **kwargs): """RecreateEditor(self) -> bool""" return _propgrid.PGProperty_RecreateEditor(*args, **kwargs) def RefreshEditor(*args, **kwargs): """RefreshEditor(self)""" return _propgrid.PGProperty_RefreshEditor(*args, **kwargs) def SetAttribute(*args, **kwargs): """SetAttribute(self, String name, wxVariant value)""" return _propgrid.PGProperty_SetAttribute(*args, **kwargs) def SetAttributes(*args, **kwargs): """SetAttributes(self, attributes)""" return _propgrid.PGProperty_SetAttributes(*args, **kwargs) def SetAutoUnspecified(*args, **kwargs): """SetAutoUnspecified(self, bool enable=True)""" return _propgrid.PGProperty_SetAutoUnspecified(*args, **kwargs) def SetBackgroundColour(*args, **kwargs): """SetBackgroundColour(self, Colour colour, int flags=PG_RECURSE)""" return _propgrid.PGProperty_SetBackgroundColour(*args, **kwargs) def SetTextColour(*args, **kwargs): """SetTextColour(self, Colour colour, int flags=PG_RECURSE)""" return _propgrid.PGProperty_SetTextColour(*args, **kwargs) def SetDefaultValue(*args, **kwargs): """SetDefaultValue(self, wxVariant value)""" return _propgrid.PGProperty_SetDefaultValue(*args, **kwargs) def SetEditor(*args, **kwargs): """SetEditor(self, String editorName)""" return _propgrid.PGProperty_SetEditor(*args, **kwargs) def SetCell(*args, **kwargs): """SetCell(self, int column, PGCell cell)""" return _propgrid.PGProperty_SetCell(*args, **kwargs) def SetCommonValue(*args, **kwargs): """SetCommonValue(self, int commonValue)""" return _propgrid.PGProperty_SetCommonValue(*args, **kwargs) def SetFlagsFromString(*args, **kwargs): """SetFlagsFromString(self, String str)""" return _propgrid.PGProperty_SetFlagsFromString(*args, **kwargs) def SetModifiedStatus(*args, **kwargs): """SetModifiedStatus(self, bool modified)""" return _propgrid.PGProperty_SetModifiedStatus(*args, **kwargs) def SetValueInEvent(*args, **kwargs): """SetValueInEvent(self, wxVariant value)""" return _propgrid.PGProperty_SetValueInEvent(*args, **kwargs) def SetValue(*args, **kwargs): """SetValue(self, wxVariant value, wxVariant pList=None, int flags=PG_SETVAL_REFRESH_EDITOR)""" return _propgrid.PGProperty_SetValue(*args, **kwargs) def SetValueImage(*args, **kwargs): """SetValueImage(self, Bitmap bmp)""" return _propgrid.PGProperty_SetValueImage(*args, **kwargs) def SetChoiceSelection(*args, **kwargs): """SetChoiceSelection(self, int newValue)""" return _propgrid.PGProperty_SetChoiceSelection(*args, **kwargs) def SetExpanded(*args, **kwargs): """SetExpanded(self, bool expanded)""" return _propgrid.PGProperty_SetExpanded(*args, **kwargs) def ChangeFlag(*args, **kwargs): """ChangeFlag(self, int flag, bool set)""" return _propgrid.PGProperty_ChangeFlag(*args, **kwargs) def SetFlagRecursively(*args, **kwargs): """SetFlagRecursively(self, int flag, bool set)""" return _propgrid.PGProperty_SetFlagRecursively(*args, **kwargs) def SetHelpString(*args, **kwargs): """SetHelpString(self, String helpString)""" return _propgrid.PGProperty_SetHelpString(*args, **kwargs) def SetLabel(*args, **kwargs): """SetLabel(self, String label)""" return _propgrid.PGProperty_SetLabel(*args, **kwargs) def SetName(*args, **kwargs): """SetName(self, String newName)""" return _propgrid.PGProperty_SetName(*args, **kwargs) def SetParentalType(*args, **kwargs): """SetParentalType(self, int flag)""" return _propgrid.PGProperty_SetParentalType(*args, **kwargs) def SetValueToUnspecified(*args, **kwargs): """SetValueToUnspecified(self)""" return _propgrid.PGProperty_SetValueToUnspecified(*args, **kwargs) def SetValuePlain(*args, **kwargs): """SetValuePlain(self, wxVariant value)""" return _propgrid.PGProperty_SetValuePlain(*args, **kwargs) def SetValidator(*args, **kwargs): """SetValidator(self, Validator validator)""" return _propgrid.PGProperty_SetValidator(*args, **kwargs) def GetValidator(*args, **kwargs): """GetValidator(self) -> Validator""" return _propgrid.PGProperty_GetValidator(*args, **kwargs) def SetMaxLength(*args, **kwargs): """SetMaxLength(self, int maxLen) -> bool""" return _propgrid.PGProperty_SetMaxLength(*args, **kwargs) def SetWasModified(*args, **kwargs): """SetWasModified(self, bool set=True)""" return _propgrid.PGProperty_SetWasModified(*args, **kwargs) def GetHelpString(*args, **kwargs): """GetHelpString(self) -> String""" return _propgrid.PGProperty_GetHelpString(*args, **kwargs) def IsSomeParent(*args, **kwargs): """IsSomeParent(self, PGProperty candidate_parent) -> bool""" return _propgrid.PGProperty_IsSomeParent(*args, **kwargs) def AdaptListToValue(*args, **kwargs): """AdaptListToValue(self, wxVariant list, wxVariant value)""" return _propgrid.PGProperty_AdaptListToValue(*args, **kwargs) def AddPrivateChild(*args, **kwargs): """AddPrivateChild(self, PGProperty prop)""" return _propgrid.PGProperty_AddPrivateChild(*args, **kwargs) def AppendChild(*args, **kwargs): """AppendChild(self, PGProperty prop) -> PGProperty""" return _propgrid.PGProperty_AppendChild(*args, **kwargs) def GetChildrenHeight(*args, **kwargs): """GetChildrenHeight(self, int lh, int iMax=-1) -> int""" return _propgrid.PGProperty_GetChildrenHeight(*args, **kwargs) def GetChildCount(*args, **kwargs): """GetChildCount(self) -> int""" return _propgrid.PGProperty_GetChildCount(*args, **kwargs) def Item(*args, **kwargs): """Item(self, int i) -> PGProperty""" return _propgrid.PGProperty_Item(*args, **kwargs) def Last(*args, **kwargs): """Last(self) -> PGProperty""" return _propgrid.PGProperty_Last(*args, **kwargs) def Index(*args, **kwargs): """Index(self, PGProperty p) -> int""" return _propgrid.PGProperty_Index(*args, **kwargs) def FixIndicesOfChildren(*args, **kwargs): """FixIndicesOfChildren(self, int starthere=0)""" return _propgrid.PGProperty_FixIndicesOfChildren(*args, **kwargs) def GetImageOffset(*args, **kwargs): """GetImageOffset(self, int imageWidth) -> int""" return _propgrid.PGProperty_GetImageOffset(*args, **kwargs) def GetItemAtY(*args, **kwargs): """GetItemAtY(self, int y) -> PGProperty""" return _propgrid.PGProperty_GetItemAtY(*args, **kwargs) def GetPropertyByName(*args, **kwargs): """GetPropertyByName(self, String name) -> PGProperty""" return _propgrid.PGProperty_GetPropertyByName(*args, **kwargs) def SetPyChoices(*args): """ SetPyChoices(self, PGChoices chs) -> bool SetPyChoices(self, wxArrayString labels, wxArrayInt values=wxArrayInt()) -> bool """ return _propgrid.PGProperty_SetPyChoices(*args) def PyBase_StringToValue(*args, **kwargs): """PyBase_StringToValue(self, String text, int argFlags=0) -> wxPGVariantAndBool""" return _propgrid.PGProperty_PyBase_StringToValue(*args, **kwargs) def PyBase_IntToValue(*args, **kwargs): """PyBase_IntToValue(self, wxVariant value, int number, int argFlags=0) -> wxPGVariantAndBool""" return _propgrid.PGProperty_PyBase_IntToValue(*args, **kwargs) m_value = property(GetValuePlain,SetValuePlain) def GetPyClientData(*args, **kwargs): """ GetPyClientData(self) -> PyObject Returns the client data object for a property """ return _propgrid.PGProperty_GetPyClientData(*args, **kwargs) def SetPyClientData(*args, **kwargs): """ SetPyClientData(self, PyObject clientData) Associate the given client data. """ return _propgrid.PGProperty_SetPyClientData(*args, **kwargs) SetChoices = SetPyChoices StringToValue = PyBase_StringToValue IntToValue = PyBase_IntToValue GetClientObject = GetPyClientData SetClientObject = SetPyClientData GetClientData = GetPyClientData SetClientData = SetPyClientData _propgrid.PGProperty_swigregister(PGProperty) class PropertyGridHitTestResult(object): """Proxy of C++ PropertyGridHitTestResult class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """__init__(self) -> PropertyGridHitTestResult""" _propgrid.PropertyGridHitTestResult_swiginit(self,_propgrid.new_PropertyGridHitTestResult(*args, **kwargs)) __swig_destroy__ = _propgrid.delete_PropertyGridHitTestResult __del__ = lambda self : None; def GetColumn(*args, **kwargs): """GetColumn(self) -> int""" return _propgrid.PropertyGridHitTestResult_GetColumn(*args, **kwargs) def GetProperty(*args, **kwargs): """GetProperty(self) -> PGProperty""" return _propgrid.PropertyGridHitTestResult_GetProperty(*args, **kwargs) def GetSplitter(*args, **kwargs): """GetSplitter(self) -> int""" return _propgrid.PropertyGridHitTestResult_GetSplitter(*args, **kwargs) def GetSplitterHitOffset(*args, **kwargs): """GetSplitterHitOffset(self) -> int""" return _propgrid.PropertyGridHitTestResult_GetSplitterHitOffset(*args, **kwargs) _propgrid.PropertyGridHitTestResult_swigregister(PropertyGridHitTestResult) PG_ITERATE_PROPERTIES = _propgrid.PG_ITERATE_PROPERTIES PG_ITERATE_HIDDEN = _propgrid.PG_ITERATE_HIDDEN PG_ITERATE_FIXED_CHILDREN = _propgrid.PG_ITERATE_FIXED_CHILDREN PG_ITERATE_CATEGORIES = _propgrid.PG_ITERATE_CATEGORIES PG_ITERATE_ALL_PARENTS = _propgrid.PG_ITERATE_ALL_PARENTS PG_ITERATE_ALL_PARENTS_RECURSIVELY = _propgrid.PG_ITERATE_ALL_PARENTS_RECURSIVELY PG_ITERATOR_FLAGS_ALL = _propgrid.PG_ITERATOR_FLAGS_ALL PG_ITERATOR_MASK_OP_ITEM = _propgrid.PG_ITERATOR_MASK_OP_ITEM PG_ITERATOR_MASK_OP_PARENT = _propgrid.PG_ITERATOR_MASK_OP_PARENT PG_ITERATE_VISIBLE = _propgrid.PG_ITERATE_VISIBLE PG_ITERATE_ALL = _propgrid.PG_ITERATE_ALL PG_ITERATE_NORMAL = _propgrid.PG_ITERATE_NORMAL PG_ITERATE_DEFAULT = _propgrid.PG_ITERATE_DEFAULT class PropertyGridIteratorBase(object): """Proxy of C++ PropertyGridIteratorBase class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """__init__(self) -> PropertyGridIteratorBase""" _propgrid.PropertyGridIteratorBase_swiginit(self,_propgrid.new_PropertyGridIteratorBase(*args, **kwargs)) def Assign(*args, **kwargs): """Assign(self, PropertyGridIteratorBase it)""" return _propgrid.PropertyGridIteratorBase_Assign(*args, **kwargs) def AtEnd(*args, **kwargs): """AtEnd(self) -> bool""" return _propgrid.PropertyGridIteratorBase_AtEnd(*args, **kwargs) def GetProperty(*args, **kwargs): """GetProperty(self) -> PGProperty""" return _propgrid.PropertyGridIteratorBase_GetProperty(*args, **kwargs) def Init(*args): """ Init(self, state, int flags, PGProperty property, int dir=1) Init(self, state, int flags, int startPos=TOP, int dir=0) """ return _propgrid.PropertyGridIteratorBase_Init(*args) def Next(*args, **kwargs): """Next(self, bool iterateChildren=True)""" return _propgrid.PropertyGridIteratorBase_Next(*args, **kwargs) def Prev(*args, **kwargs): """Prev(self)""" return _propgrid.PropertyGridIteratorBase_Prev(*args, **kwargs) def SetBaseParent(*args, **kwargs): """SetBaseParent(self, PGProperty baseParent)""" return _propgrid.PropertyGridIteratorBase_SetBaseParent(*args, **kwargs) _propgrid.PropertyGridIteratorBase_swigregister(PropertyGridIteratorBase) class PropertyGridIterator(PropertyGridIteratorBase): """Proxy of C++ PropertyGridIterator class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args): """ __init__(self, state, int flags=PG_ITERATE_DEFAULT, PGProperty property=None, int dir=1) -> PropertyGridIterator __init__(self, state, int flags, int startPos, int dir=0) -> PropertyGridIterator __init__(self) -> PropertyGridIterator __init__(self, PropertyGridIterator it) -> PropertyGridIterator """ _propgrid.PropertyGridIterator_swiginit(self,_propgrid.new_PropertyGridIterator(*args)) __swig_destroy__ = _propgrid.delete_PropertyGridIterator __del__ = lambda self : None; def __ref__(*args, **kwargs): """__ref__(self) -> PGProperty""" return _propgrid.PropertyGridIterator___ref__(*args, **kwargs) def OneStep(*args, **kwargs): """ OneStep( state, int flags=PG_ITERATE_DEFAULT, PGProperty property=None, int dir=1) -> PGProperty """ return _propgrid.PropertyGridIterator_OneStep(*args, **kwargs) OneStep = staticmethod(OneStep) _propgrid.PropertyGridIterator_swigregister(PropertyGridIterator) def PropertyGridIterator_OneStep(*args, **kwargs): """ PropertyGridIterator_OneStep( state, int flags=PG_ITERATE_DEFAULT, PGProperty property=None, int dir=1) -> PGProperty """ return _propgrid.PropertyGridIterator_OneStep(*args, **kwargs) class PropertyGridConstIterator(PropertyGridIteratorBase): """Proxy of C++ PropertyGridConstIterator class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr __swig_destroy__ = _propgrid.delete_PropertyGridConstIterator __del__ = lambda self : None; def __ref__(*args, **kwargs): """__ref__(self) -> PGProperty""" return _propgrid.PropertyGridConstIterator___ref__(*args, **kwargs) def OneStep(*args, **kwargs): """ OneStep( state, int flags=PG_ITERATE_DEFAULT, PGProperty property=None, int dir=1) -> PGProperty """ return _propgrid.PropertyGridConstIterator_OneStep(*args, **kwargs) OneStep = staticmethod(OneStep) def __init__(self, *args): """ __init__(self, state, int flags=PG_ITERATE_DEFAULT, PGProperty property=None, int dir=1) -> PropertyGridConstIterator __init__(self, state, int flags, int startPos, int dir=0) -> PropertyGridConstIterator __init__(self) -> PropertyGridConstIterator __init__(self, PropertyGridConstIterator it) -> PropertyGridConstIterator __init__(self, PropertyGridIterator other) -> PropertyGridConstIterator """ _propgrid.PropertyGridConstIterator_swiginit(self,_propgrid.new_PropertyGridConstIterator(*args)) _propgrid.PropertyGridConstIterator_swigregister(PropertyGridConstIterator) def PropertyGridConstIterator_OneStep(*args, **kwargs): """ PropertyGridConstIterator_OneStep( state, int flags=PG_ITERATE_DEFAULT, PGProperty property=None, int dir=1) -> PGProperty """ return _propgrid.PropertyGridConstIterator_OneStep(*args, **kwargs) class PGVIteratorBase(_core.RefCounter): """Proxy of C++ PGVIteratorBase class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') def __init__(self): raise AttributeError, "No constructor defined" __repr__ = _swig_repr def Next(*args, **kwargs): """Next(self)""" return _propgrid.PGVIteratorBase_Next(*args, **kwargs) _propgrid.PGVIteratorBase_swigregister(PGVIteratorBase) class PGVIterator(object): """Proxy of C++ PGVIterator class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr __swig_destroy__ = _propgrid.delete_PGVIterator __del__ = lambda self : None; def UnRef(*args, **kwargs): """UnRef(self)""" return _propgrid.PGVIterator_UnRef(*args, **kwargs) def __init__(self, *args): """ __init__(self) -> PGVIterator __init__(self, PGVIteratorBase obj) -> PGVIterator __init__(self, PGVIterator it) -> PGVIterator """ _propgrid.PGVIterator_swiginit(self,_propgrid.new_PGVIterator(*args)) def Next(*args, **kwargs): """Next(self)""" return _propgrid.PGVIterator_Next(*args, **kwargs) def AtEnd(*args, **kwargs): """AtEnd(self) -> bool""" return _propgrid.PGVIterator_AtEnd(*args, **kwargs) def GetProperty(*args, **kwargs): """GetProperty(self) -> PGProperty""" return _propgrid.PGVIterator_GetProperty(*args, **kwargs) _propgrid.PGVIterator_swigregister(PGVIterator) def PGTypeOperationFailed(*args, **kwargs): """PGTypeOperationFailed(PGProperty p, String typestr, String op)""" return _propgrid.PGTypeOperationFailed(*args, **kwargs) def PGGetFailed(*args, **kwargs): """PGGetFailed(PGProperty p, String typestr)""" return _propgrid.PGGetFailed(*args, **kwargs) class PropertyGridInterface(object): """Proxy of C++ PropertyGridInterface class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') def __init__(self): raise AttributeError, "No constructor defined" __repr__ = _swig_repr __swig_destroy__ = _propgrid.delete_PropertyGridInterface __del__ = lambda self : None; def Append(*args, **kwargs): """Append(self, PGProperty property) -> PGProperty""" return _propgrid.PropertyGridInterface_Append(*args, **kwargs) def AppendIn(*args, **kwargs): """AppendIn(self, PGPropArg id, PGProperty newproperty) -> PGProperty""" return _propgrid.PropertyGridInterface_AppendIn(*args, **kwargs) def BeginAddChildren(*args, **kwargs): """BeginAddChildren(self, PGPropArg id)""" return _propgrid.PropertyGridInterface_BeginAddChildren(*args, **kwargs) def Clear(*args, **kwargs): """Clear(self)""" return _propgrid.PropertyGridInterface_Clear(*args, **kwargs) def ClearSelection(*args, **kwargs): """ClearSelection(self, bool validation=False) -> bool""" return _propgrid.PropertyGridInterface_ClearSelection(*args, **kwargs) def ClearModifiedStatus(*args, **kwargs): """ClearModifiedStatus(self)""" return _propgrid.PropertyGridInterface_ClearModifiedStatus(*args, **kwargs) def Collapse(*args, **kwargs): """Collapse(self, PGPropArg id) -> bool""" return _propgrid.PropertyGridInterface_Collapse(*args, **kwargs) def CollapseAll(*args, **kwargs): """CollapseAll(self) -> bool""" return _propgrid.PropertyGridInterface_CollapseAll(*args, **kwargs) def ChangePropertyValue(*args, **kwargs): """ChangePropertyValue(self, PGPropArg id, wxVariant newValue) -> bool""" return _propgrid.PropertyGridInterface_ChangePropertyValue(*args, **kwargs) def DeleteProperty(*args, **kwargs): """DeleteProperty(self, PGPropArg id)""" return _propgrid.PropertyGridInterface_DeleteProperty(*args, **kwargs) def RemoveProperty(*args, **kwargs): """RemoveProperty(self, PGPropArg id) -> PGProperty""" return _propgrid.PropertyGridInterface_RemoveProperty(*args, **kwargs) def DisableProperty(*args, **kwargs): """DisableProperty(self, PGPropArg id) -> bool""" return _propgrid.PropertyGridInterface_DisableProperty(*args, **kwargs) def EditorValidate(*args, **kwargs): """EditorValidate(self) -> bool""" return _propgrid.PropertyGridInterface_EditorValidate(*args, **kwargs) def EnableProperty(*args, **kwargs): """EnableProperty(self, PGPropArg id, bool enable=True) -> bool""" return _propgrid.PropertyGridInterface_EnableProperty(*args, **kwargs) def EndAddChildren(*args, **kwargs): """EndAddChildren(self, PGPropArg id)""" return _propgrid.PropertyGridInterface_EndAddChildren(*args, **kwargs) def Expand(*args, **kwargs): """Expand(self, PGPropArg id) -> bool""" return _propgrid.PropertyGridInterface_Expand(*args, **kwargs) def ExpandAll(*args, **kwargs): """ExpandAll(self, bool expand=True) -> bool""" return _propgrid.PropertyGridInterface_ExpandAll(*args, **kwargs) def GetFirstChild(*args, **kwargs): """GetFirstChild(self, PGPropArg id) -> PGProperty""" return _propgrid.PropertyGridInterface_GetFirstChild(*args, **kwargs) def GetIterator(*args): """ GetIterator(self, int flags=PG_ITERATE_DEFAULT, PGProperty firstProp=None) -> PropertyGridIterator GetIterator(self, int flags=PG_ITERATE_DEFAULT, PGProperty firstProp=None) -> PropertyGridConstIterator GetIterator(self, int flags, int startPos) -> PropertyGridIterator GetIterator(self, int flags, int startPos) -> PropertyGridConstIterator """ return _propgrid.PropertyGridInterface_GetIterator(*args) def GetFirst(*args): """ GetFirst(self, int flags=PG_ITERATE_ALL) -> PGProperty GetFirst(self, int flags=PG_ITERATE_ALL) -> PGProperty """ return _propgrid.PropertyGridInterface_GetFirst(*args) def GetProperty(*args, **kwargs): """GetProperty(self, String name) -> PGProperty""" return _propgrid.PropertyGridInterface_GetProperty(*args, **kwargs) def GetPropertyAttributes(*args, **kwargs): """GetPropertyAttributes(self, PGPropArg id)""" return _propgrid.PropertyGridInterface_GetPropertyAttributes(*args, **kwargs) def GetPropertiesWithFlag(*args, **kwargs): """ GetPropertiesWithFlag(self, wxArrayPGProperty targetArr, FlagType flags, bool inverse=False, int iterFlags=wxPG_ITERATE_PROPERTIES|wxPG_ITERATE_HIDDEN|wxPG_ITERATE_CATEGORIES) """ return _propgrid.PropertyGridInterface_GetPropertiesWithFlag(*args, **kwargs) def GetPropertyAttribute(*args, **kwargs): """GetPropertyAttribute(self, PGPropArg id, String attrName) -> wxVariant""" return _propgrid.PropertyGridInterface_GetPropertyAttribute(*args, **kwargs) def GetPropertyCategory(*args, **kwargs): """GetPropertyCategory(self, PGPropArg id)""" return _propgrid.PropertyGridInterface_GetPropertyCategory(*args, **kwargs) def GetPropertyByLabel(*args, **kwargs): """GetPropertyByLabel(self, String label) -> PGProperty""" return _propgrid.PropertyGridInterface_GetPropertyByLabel(*args, **kwargs) def GetPropertyByName(*args): """ GetPropertyByName(self, String name) -> PGProperty GetPropertyByName(self, String name, String subname) -> PGProperty """ return _propgrid.PropertyGridInterface_GetPropertyByName(*args) def GetPropertyEditor(*args, **kwargs): """GetPropertyEditor(self, PGPropArg id) -> PGEditor""" return _propgrid.PropertyGridInterface_GetPropertyEditor(*args, **kwargs) def GetPropertyHelpString(*args, **kwargs): """GetPropertyHelpString(self, PGPropArg id) -> String""" return _propgrid.PropertyGridInterface_GetPropertyHelpString(*args, **kwargs) def GetPropertyImage(*args, **kwargs): """GetPropertyImage(self, PGPropArg id) -> Bitmap""" return _propgrid.PropertyGridInterface_GetPropertyImage(*args, **kwargs) def GetPropertyLabel(*args, **kwargs): """GetPropertyLabel(self, PGPropArg id) -> String""" return _propgrid.PropertyGridInterface_GetPropertyLabel(*args, **kwargs) def GetPropertyName(*args, **kwargs): """GetPropertyName(self, PGProperty property) -> String""" return _propgrid.PropertyGridInterface_GetPropertyName(*args, **kwargs) def GetPropertyParent(*args, **kwargs): """GetPropertyParent(self, PGPropArg id) -> PGProperty""" return _propgrid.PropertyGridInterface_GetPropertyParent(*args, **kwargs) def GetPropertyValidator(*args, **kwargs): """GetPropertyValidator(self, PGPropArg id) -> Validator""" return _propgrid.PropertyGridInterface_GetPropertyValidator(*args, **kwargs) def GetPropertyValue(*args, **kwargs): """GetPropertyValue(self, PGPropArg id) -> wxVariant""" return _propgrid.PropertyGridInterface_GetPropertyValue(*args, **kwargs) def GetPropertyValueAsString(*args, **kwargs): """GetPropertyValueAsString(self, PGPropArg id) -> String""" return _propgrid.PropertyGridInterface_GetPropertyValueAsString(*args, **kwargs) def GetPropertyValueAsLong(*args, **kwargs): """GetPropertyValueAsLong(self, PGPropArg id) -> long""" return _propgrid.PropertyGridInterface_GetPropertyValueAsLong(*args, **kwargs) def GetPropertyValueAsULong(*args, **kwargs): """GetPropertyValueAsULong(self, PGPropArg id) -> long""" return _propgrid.PropertyGridInterface_GetPropertyValueAsULong(*args, **kwargs) def GetPropertyValueAsBool(*args, **kwargs): """GetPropertyValueAsBool(self, PGPropArg id) -> bool""" return _propgrid.PropertyGridInterface_GetPropertyValueAsBool(*args, **kwargs) def GetPropertyValueAsDouble(*args, **kwargs): """GetPropertyValueAsDouble(self, PGPropArg id) -> double""" return _propgrid.PropertyGridInterface_GetPropertyValueAsDouble(*args, **kwargs) def GetPropertyValueAsArrayString(*args, **kwargs): """GetPropertyValueAsArrayString(self, PGPropArg id) -> wxArrayString""" return _propgrid.PropertyGridInterface_GetPropertyValueAsArrayString(*args, **kwargs) def GetPropertyValueAsArrayInt(*args, **kwargs): """GetPropertyValueAsArrayInt(self, PGPropArg id) -> wxArrayInt""" return _propgrid.PropertyGridInterface_GetPropertyValueAsArrayInt(*args, **kwargs) def GetSelection(*args, **kwargs): """GetSelection(self) -> PGProperty""" return _propgrid.PropertyGridInterface_GetSelection(*args, **kwargs) def GetSelectedProperties(*args, **kwargs): """GetSelectedProperties(self) -> wxArrayPGProperty""" return _propgrid.PropertyGridInterface_GetSelectedProperties(*args, **kwargs) def GetVIterator(*args, **kwargs): """GetVIterator(self, int flags) -> PGVIterator""" return _propgrid.PropertyGridInterface_GetVIterator(*args, **kwargs) def HideProperty(*args, **kwargs): """HideProperty(self, PGPropArg id, bool hide=True, int flags=PG_RECURSE) -> bool""" return _propgrid.PropertyGridInterface_HideProperty(*args, **kwargs) def InitAllTypeHandlers(*args, **kwargs): """InitAllTypeHandlers()""" return _propgrid.PropertyGridInterface_InitAllTypeHandlers(*args, **kwargs) InitAllTypeHandlers = staticmethod(InitAllTypeHandlers) def Insert(*args): """ Insert(self, PGPropArg priorThis, PGProperty newproperty) -> PGProperty Insert(self, PGPropArg parent, int index, PGProperty newproperty) -> PGProperty """ return _propgrid.PropertyGridInterface_Insert(*args) def IsPropertyCategory(*args, **kwargs): """IsPropertyCategory(self, PGPropArg id) -> bool""" return _propgrid.PropertyGridInterface_IsPropertyCategory(*args, **kwargs) def IsPropertyEnabled(*args, **kwargs): """IsPropertyEnabled(self, PGPropArg id) -> bool""" return _propgrid.PropertyGridInterface_IsPropertyEnabled(*args, **kwargs) def IsPropertyExpanded(*args, **kwargs): """IsPropertyExpanded(self, PGPropArg id) -> bool""" return _propgrid.PropertyGridInterface_IsPropertyExpanded(*args, **kwargs) def IsPropertyModified(*args, **kwargs): """IsPropertyModified(self, PGPropArg id) -> bool""" return _propgrid.PropertyGridInterface_IsPropertyModified(*args, **kwargs) def IsPropertySelected(*args, **kwargs): """IsPropertySelected(self, PGPropArg id) -> bool""" return _propgrid.PropertyGridInterface_IsPropertySelected(*args, **kwargs) def IsPropertyShown(*args, **kwargs): """IsPropertyShown(self, PGPropArg id) -> bool""" return _propgrid.PropertyGridInterface_IsPropertyShown(*args, **kwargs) def IsPropertyValueUnspecified(*args, **kwargs): """IsPropertyValueUnspecified(self, PGPropArg id) -> bool""" return _propgrid.PropertyGridInterface_IsPropertyValueUnspecified(*args, **kwargs) def LimitPropertyEditing(*args, **kwargs): """LimitPropertyEditing(self, PGPropArg id, bool limit=True)""" return _propgrid.PropertyGridInterface_LimitPropertyEditing(*args, **kwargs) def RefreshGrid(*args, **kwargs): """RefreshGrid(self, state=None)""" return _propgrid.PropertyGridInterface_RefreshGrid(*args, **kwargs) def RegisterAdditionalEditors(*args, **kwargs): """RegisterAdditionalEditors()""" return _propgrid.PropertyGridInterface_RegisterAdditionalEditors(*args, **kwargs) RegisterAdditionalEditors = staticmethod(RegisterAdditionalEditors) def ReplaceProperty(*args, **kwargs): """ReplaceProperty(self, PGPropArg id, PGProperty property) -> PGProperty""" return _propgrid.PropertyGridInterface_ReplaceProperty(*args, **kwargs) SelectionState = _propgrid.PropertyGridInterface_SelectionState ExpandedState = _propgrid.PropertyGridInterface_ExpandedState ScrollPosState = _propgrid.PropertyGridInterface_ScrollPosState PageState = _propgrid.PropertyGridInterface_PageState SplitterPosState = _propgrid.PropertyGridInterface_SplitterPosState DescBoxState = _propgrid.PropertyGridInterface_DescBoxState AllStates = _propgrid.PropertyGridInterface_AllStates def RestoreEditableState(*args, **kwargs): """RestoreEditableState(self, String src, int restoreStates=AllStates) -> bool""" return _propgrid.PropertyGridInterface_RestoreEditableState(*args, **kwargs) def SaveEditableState(*args, **kwargs): """SaveEditableState(self, int includedStates=AllStates) -> String""" return _propgrid.PropertyGridInterface_SaveEditableState(*args, **kwargs) def SetBoolChoices(*args, **kwargs): """SetBoolChoices(String trueChoice, String falseChoice)""" return _propgrid.PropertyGridInterface_SetBoolChoices(*args, **kwargs) SetBoolChoices = staticmethod(SetBoolChoices) def SetColumnProportion(*args, **kwargs): """SetColumnProportion(self, int column, int proportion) -> bool""" return _propgrid.PropertyGridInterface_SetColumnProportion(*args, **kwargs) def GetColumnProportion(*args, **kwargs): """GetColumnProportion(self, int column) -> int""" return _propgrid.PropertyGridInterface_GetColumnProportion(*args, **kwargs) def SetPropertyAttribute(*args, **kwargs): """SetPropertyAttribute(self, PGPropArg id, String attrName, wxVariant value, long argFlags=0)""" return _propgrid.PropertyGridInterface_SetPropertyAttribute(*args, **kwargs) def SetPropertyAttributeAll(*args, **kwargs): """SetPropertyAttributeAll(self, String attrName, wxVariant value)""" return _propgrid.PropertyGridInterface_SetPropertyAttributeAll(*args, **kwargs) def SetPropertyBackgroundColour(*args, **kwargs): """SetPropertyBackgroundColour(self, PGPropArg id, Colour colour, int flags=PG_RECURSE)""" return _propgrid.PropertyGridInterface_SetPropertyBackgroundColour(*args, **kwargs) def SetPropertyColoursToDefault(*args, **kwargs): """SetPropertyColoursToDefault(self, PGPropArg id)""" return _propgrid.PropertyGridInterface_SetPropertyColoursToDefault(*args, **kwargs) def SetPropertyTextColour(*args, **kwargs): """SetPropertyTextColour(self, PGPropArg id, Colour col, int flags=PG_RECURSE)""" return _propgrid.PropertyGridInterface_SetPropertyTextColour(*args, **kwargs) def GetPropertyBackgroundColour(*args, **kwargs): """GetPropertyBackgroundColour(self, PGPropArg id) -> Colour""" return _propgrid.PropertyGridInterface_GetPropertyBackgroundColour(*args, **kwargs) def GetPropertyTextColour(*args, **kwargs): """GetPropertyTextColour(self, PGPropArg id) -> Colour""" return _propgrid.PropertyGridInterface_GetPropertyTextColour(*args, **kwargs) def SetPropertyCell(*args, **kwargs): """ SetPropertyCell(self, PGPropArg id, int column, String text=wxEmptyString, Bitmap bitmap=wxNullBitmap, Colour fgCol=wxNullColour, Colour bgCol=wxNullColour) """ return _propgrid.PropertyGridInterface_SetPropertyCell(*args, **kwargs) def SetPropertyEditor(*args, **kwargs): """SetPropertyEditor(self, PGPropArg id, String editorName)""" return _propgrid.PropertyGridInterface_SetPropertyEditor(*args, **kwargs) def SetPropertyLabel(*args, **kwargs): """SetPropertyLabel(self, PGPropArg id, String newproplabel)""" return _propgrid.PropertyGridInterface_SetPropertyLabel(*args, **kwargs) def SetPropertyName(*args, **kwargs): """SetPropertyName(self, PGPropArg id, String newName)""" return _propgrid.PropertyGridInterface_SetPropertyName(*args, **kwargs) def SetPropertyReadOnly(*args, **kwargs): """SetPropertyReadOnly(self, PGPropArg id, bool set=True, int flags=PG_RECURSE)""" return _propgrid.PropertyGridInterface_SetPropertyReadOnly(*args, **kwargs) def SetPropertyValueUnspecified(*args, **kwargs): """SetPropertyValueUnspecified(self, PGPropArg id)""" return _propgrid.PropertyGridInterface_SetPropertyValueUnspecified(*args, **kwargs) def SetPropertyHelpString(*args, **kwargs): """SetPropertyHelpString(self, PGPropArg id, String helpString)""" return _propgrid.PropertyGridInterface_SetPropertyHelpString(*args, **kwargs) def SetPropertyImage(*args, **kwargs): """SetPropertyImage(self, PGPropArg id, Bitmap bmp)""" return _propgrid.PropertyGridInterface_SetPropertyImage(*args, **kwargs) def SetPropertyMaxLength(*args, **kwargs): """SetPropertyMaxLength(self, PGPropArg id, int maxLen) -> bool""" return _propgrid.PropertyGridInterface_SetPropertyMaxLength(*args, **kwargs) def SetPropertyValidator(*args, **kwargs): """SetPropertyValidator(self, PGPropArg id, Validator validator)""" return _propgrid.PropertyGridInterface_SetPropertyValidator(*args, **kwargs) def SetPropertyValueString(*args, **kwargs): """SetPropertyValueString(self, PGPropArg id, String value)""" return _propgrid.PropertyGridInterface_SetPropertyValueString(*args, **kwargs) def SetPropertyValue(*args, **kwargs): """SetPropertyValue(self, PGPropArg id, wxVariant value)""" return _propgrid.PropertyGridInterface_SetPropertyValue(*args, **kwargs) def SetValidationFailureBehavior(*args, **kwargs): """SetValidationFailureBehavior(self, int vfbFlags)""" return _propgrid.PropertyGridInterface_SetValidationFailureBehavior(*args, **kwargs) def Sort(*args, **kwargs): """Sort(self, int flags=0)""" return _propgrid.PropertyGridInterface_Sort(*args, **kwargs) def SortChildren(*args, **kwargs): """SortChildren(self, PGPropArg id, int flags=0)""" return _propgrid.PropertyGridInterface_SortChildren(*args, **kwargs) def GetPropertyByNameA(*args, **kwargs): """GetPropertyByNameA(self, String name) -> PGProperty""" return _propgrid.PropertyGridInterface_GetPropertyByNameA(*args, **kwargs) def GetEditorByName(*args, **kwargs): """GetEditorByName(String editorName) -> PGEditor""" return _propgrid.PropertyGridInterface_GetEditorByName(*args, **kwargs) GetEditorByName = staticmethod(GetEditorByName) def RefreshProperty(*args, **kwargs): """RefreshProperty(self, PGProperty p)""" return _propgrid.PropertyGridInterface_RefreshProperty(*args, **kwargs) def MapType(class_,factory): "Registers Python type/class to property mapping.\n\nfactory: " "Property builder function/class." global _type2property try: mappings = _type2property except NameError: raise AssertionError("call only after a propertygrid or " "manager instance constructed") mappings[class_] = factory def DoDefaultTypeMappings(self): "Map built-in properties." global _type2property try: mappings = _type2property return except NameError: mappings = {} _type2property = mappings mappings[str] = StringProperty mappings[unicode] = StringProperty mappings[int] = IntProperty mappings[float] = FloatProperty mappings[bool] = BoolProperty mappings[list] = ArrayStringProperty mappings[tuple] = ArrayStringProperty mappings[wx.Font] = FontProperty mappings[wx.Colour] = ColourProperty "mappings[wx.Size] = SizeProperty" "mappings[wx.Point] = PointProperty" "mappings[wx.FontData] = FontDataProperty" def DoDefaultValueTypeMappings(self): "Map pg value type ids to getter methods." global _vt2getter try: vt2getter = _vt2getter return except NameError: vt2getter = {} _vt2getter = vt2getter def GetPropertyValues(self,dict_=None, as_strings=False, inc_attributes=False): "Returns values in the grid." "" "dict_: if not given, then a new one is created. dict_ can be" " object as well, in which case it's __dict__ is used." "as_strings: if True, then string representations of values" " are fetched instead of native types. Useful for config and " "such." "inc_attributes: if True, then property attributes are added" " as @<propname>@<attr>." "" "Return value: dictionary with values. It is always a dictionary," "so if dict_ was object with __dict__ attribute, then that " "attribute is returned." if dict_ is None: dict_ = {} elif hasattr(dict_,'__dict__'): dict_ = dict_.__dict__ if not as_strings: getter = self.GetPropertyValue else: getter = self.GetPropertyValueAsString it = self.GetVIterator(PG_ITERATE_PROPERTIES) while not it.AtEnd(): p = it.GetProperty() name = p.GetName() dict_[name] = getter(p) if inc_attributes: attrs = p.GetAttributes() if attrs and len(attrs): dict_['@%s@attr'%name] = attrs it.Next() return dict_ GetValues = GetPropertyValues def SetPropertyValues(self,dict_): "Sets property values from dict_, which can be either\ndictionary " "or an object with __dict__ attribute." "" "autofill: If true, keys with not relevant properties" " are auto-created. For more info, see AutoFill." "" "Notes:" " * Keys starting with underscore are ignored." " * Attributes can be set with entries named @<propname>@<attr>." "" autofill = False if dict_ is None: dict_ = {} elif hasattr(dict_,'__dict__'): dict_ = dict_.__dict__ attr_dicts = [] def set_sub_obj(k0,dict_): for k,v in dict_.iteritems(): if k[0] != '_': if k.endswith('@attr'): attr_dicts.append((k[1:-5],v)) else: try: self.SetPropertyValue(k,v) except: try: if autofill: self._AutoFillOne(k0,k,v) continue except: if isinstance(v,dict): set_sub_obj(k,v) elif hasattr(v,'__dict__'): set_sub_obj(k,v.__dict__) for k,v in attr_dicts: p = GetPropertyByName(k) if not p: raise AssertionError("No such property: '%s'"%k) for an,av in v.iteritems(): p.SetAttribute(an, av) cur_page = False is_manager = isinstance(self,PropertyGridManager) try: set_sub_obj(self.GetGrid().GetRoot(),dict_) except: import traceback traceback.print_exc() self.Refresh() SetValues = SetPropertyValues def _AutoFillMany(self,cat,dict_): for k,v in dict_.iteritems(): self._AutoFillOne(cat,k,v) def _AutoFillOne(self,cat,k,v): global _type2property factory = _type2property.get(v.__class__,None) if factory: self.AppendIn( cat, factory(k,k,v) ) elif hasattr(v,'__dict__'): cat2 = self.AppendIn( cat, PropertyCategory(k) ) self._AutoFillMany(cat2,v.__dict__) elif isinstance(v,dict): cat2 = self.AppendIn( cat, PropertyCategory(k) ) self._AutoFillMany(cat2,v) elif not k.startswith('_'): raise AssertionError("member '%s' is of unregisted type/" "class '%s'"%(k,v.__class__)) def AutoFill(self,obj,parent=None): "Clears properties and re-fills to match members and\nvalues of " "given object or dictionary obj." self.edited_objects[parent] = obj cur_page = False is_manager = isinstance(self,PropertyGridManager) if not parent: if is_manager: page = self.GetCurrentPage() page.Clear() parent = page.GetRoot() else: self.Clear() parent = self.GetGrid().GetRoot() else: it = self.GetIterator(PG_ITERATE_PROPERTIES, parent) it.Next() # Skip the parent while not it.AtEnd(): p = it.GetProperty() if not p.IsSomeParent(parent): break self.DeleteProperty(p) name = p.GetName() it.Next() if not is_manager or page == self.GetCurrentPage(): self.Freeze() cur_page = True try: self._AutoFillMany(parent,obj.__dict__) except: import traceback traceback.print_exc() if cur_page: self.Thaw() def RegisterEditor(self, editor, editorName=None): "Transform class into instance, if necessary." if not isinstance(editor, PGEditor): editor = editor() if not editorName: editorName = editor.__class__.__name__ try: self._editor_instances.append(editor) except: self._editor_instances = [editor] RegisterEditor(editor, editorName) def GetPropertyClientData(self, p): if isinstance(p, basestring): p = self.GetPropertyByName(p) return p.GetClientData() def SetPropertyClientData(self, p, data): if isinstance(p, basestring): p = self.GetPropertyByName(p) return p.SetClientData(data) def GetPyIterator(self, flags=PG_ITERATE_DEFAULT, firstProperty=None): """ Returns a pythonic property iterator for a single `PropertyGrid` or page in `PropertyGridManager`. Arguments are same as for `GetIterator`. Following example demonstrates iterating absolutely all items in a single grid:: iterator = propGrid.GetPyIterator(wx.propgrid.PG_ITERATE_ALL) for prop in iterator: print(prop) :see: `wx.propgrid.PropertyGridInterface.Properties` `wx.propgrid.PropertyGridInterface.Items` """ it = self.GetIterator(flags, firstProperty) while not it.AtEnd(): yield it.GetProperty() it.Next() def GetPyVIterator(self, flags=PG_ITERATE_DEFAULT): """ Returns a pythonic property iterator for a single `PropertyGrid` or entire `PropertyGridManager`. Arguments are same as for `GetIterator`. Following example demonstrates iterating absolutely all items in an entire `PropertyGridManager`:: iterator = propGridManager.GetPyVIterator(wx.propgrid.PG_ITERATE_ALL) for prop in iterator: print(prop) :see: `wx.propgrid.PropertyGridInterface.Properties` `wx.propgrid.PropertyGridInterface.Items` """ it = self.GetVIterator(flags) while not it.AtEnd(): yield it.GetProperty() it.Next() @property def Properties(self): """ This attribute is a pythonic iterator over all properties in this `PropertyGrid` property container. It will only skip categories and private child properties. Usage is simple:: for prop in propGrid.Properties: print(prop) :see: `wx.propgrid.PropertyGridInterface.Items` `wx.propgrid.PropertyGridInterface.GetPyIterator` """ it = self.GetVIterator(PG_ITERATE_NORMAL) while not it.AtEnd(): yield it.GetProperty() it.Next() @property def Items(self): """ This attribute is a pythonic iterator over all items in this `PropertyGrid` property container, excluding only private child properties. Usage is simple:: for prop in propGrid.Items: print(prop) :see: `wx.propgrid.PropertyGridInterface.Properties` `wx.propgrid.PropertyGridInterface.GetPyIterator` """ it = self.GetVIterator(PG_ITERATE_NORMAL | PG_ITERATE_CATEGORIES) while not it.AtEnd(): yield it.GetProperty() it.Next() _propgrid.PropertyGridInterface_swigregister(PropertyGridInterface) def PropertyGridInterface_InitAllTypeHandlers(*args): """PropertyGridInterface_InitAllTypeHandlers()""" return _propgrid.PropertyGridInterface_InitAllTypeHandlers(*args) def PropertyGridInterface_RegisterAdditionalEditors(*args): """PropertyGridInterface_RegisterAdditionalEditors()""" return _propgrid.PropertyGridInterface_RegisterAdditionalEditors(*args) def PropertyGridInterface_SetBoolChoices(*args, **kwargs): """PropertyGridInterface_SetBoolChoices(String trueChoice, String falseChoice)""" return _propgrid.PropertyGridInterface_SetBoolChoices(*args, **kwargs) def PropertyGridInterface_GetEditorByName(*args, **kwargs): """PropertyGridInterface_GetEditorByName(String editorName) -> PGEditor""" return _propgrid.PropertyGridInterface_GetEditorByName(*args, **kwargs) PG_AUTO_SORT = _propgrid.PG_AUTO_SORT PG_HIDE_CATEGORIES = _propgrid.PG_HIDE_CATEGORIES PG_ALPHABETIC_MODE = _propgrid.PG_ALPHABETIC_MODE PG_BOLD_MODIFIED = _propgrid.PG_BOLD_MODIFIED PG_SPLITTER_AUTO_CENTER = _propgrid.PG_SPLITTER_AUTO_CENTER PG_TOOLTIPS = _propgrid.PG_TOOLTIPS PG_HIDE_MARGIN = _propgrid.PG_HIDE_MARGIN PG_STATIC_SPLITTER = _propgrid.PG_STATIC_SPLITTER PG_STATIC_LAYOUT = _propgrid.PG_STATIC_LAYOUT PG_LIMITED_EDITING = _propgrid.PG_LIMITED_EDITING PG_TOOLBAR = _propgrid.PG_TOOLBAR PG_DESCRIPTION = _propgrid.PG_DESCRIPTION PG_NO_INTERNAL_BORDER = _propgrid.PG_NO_INTERNAL_BORDER PG_EX_INIT_NOCAT = _propgrid.PG_EX_INIT_NOCAT PG_EX_NO_FLAT_TOOLBAR = _propgrid.PG_EX_NO_FLAT_TOOLBAR PG_EX_MODE_BUTTONS = _propgrid.PG_EX_MODE_BUTTONS PG_EX_HELP_AS_TOOLTIPS = _propgrid.PG_EX_HELP_AS_TOOLTIPS PG_EX_NATIVE_DOUBLE_BUFFERING = _propgrid.PG_EX_NATIVE_DOUBLE_BUFFERING PG_EX_AUTO_UNSPECIFIED_VALUES = _propgrid.PG_EX_AUTO_UNSPECIFIED_VALUES PG_EX_WRITEONLY_BUILTIN_ATTRIBUTES = _propgrid.PG_EX_WRITEONLY_BUILTIN_ATTRIBUTES PG_EX_HIDE_PAGE_BUTTONS = _propgrid.PG_EX_HIDE_PAGE_BUTTONS PG_EX_MULTIPLE_SELECTION = _propgrid.PG_EX_MULTIPLE_SELECTION PG_EX_ENABLE_TLP_TRACKING = _propgrid.PG_EX_ENABLE_TLP_TRACKING PG_EX_NO_TOOLBAR_DIVIDER = _propgrid.PG_EX_NO_TOOLBAR_DIVIDER PG_EX_TOOLBAR_SEPARATOR = _propgrid.PG_EX_TOOLBAR_SEPARATOR PG_DEFAULT_STYLE = _propgrid.PG_DEFAULT_STYLE PGMAN_DEFAULT_STYLE = _propgrid.PGMAN_DEFAULT_STYLE PG_SUBID1 = _propgrid.PG_SUBID1 PG_SUBID2 = _propgrid.PG_SUBID2 PG_SUBID_TEMP1 = _propgrid.PG_SUBID_TEMP1 class PGCommonValue(object): """Proxy of C++ PGCommonValue class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """__init__(self, String label, renderer) -> PGCommonValue""" _propgrid.PGCommonValue_swiginit(self,_propgrid.new_PGCommonValue(*args, **kwargs)) __swig_destroy__ = _propgrid.delete_PGCommonValue __del__ = lambda self : None; def GetEditableText(*args, **kwargs): """GetEditableText(self) -> String""" return _propgrid.PGCommonValue_GetEditableText(*args, **kwargs) def GetLabel(*args, **kwargs): """GetLabel(self) -> String""" return _propgrid.PGCommonValue_GetLabel(*args, **kwargs) def GetRenderer(*args, **kwargs): """GetRenderer(self)""" return _propgrid.PGCommonValue_GetRenderer(*args, **kwargs) _propgrid.PGCommonValue_swigregister(PGCommonValue) PG_VFB_STAY_IN_PROPERTY = _propgrid.PG_VFB_STAY_IN_PROPERTY PG_VFB_BEEP = _propgrid.PG_VFB_BEEP PG_VFB_MARK_CELL = _propgrid.PG_VFB_MARK_CELL PG_VFB_SHOW_MESSAGE = _propgrid.PG_VFB_SHOW_MESSAGE PG_VFB_SHOW_MESSAGEBOX = _propgrid.PG_VFB_SHOW_MESSAGEBOX PG_VFB_SHOW_MESSAGE_ON_STATUSBAR = _propgrid.PG_VFB_SHOW_MESSAGE_ON_STATUSBAR PG_VFB_DEFAULT = _propgrid.PG_VFB_DEFAULT PG_VFB_UNDEFINED = _propgrid.PG_VFB_UNDEFINED class PGValidationInfo(object): """Proxy of C++ PGValidationInfo class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """__init__(self) -> PGValidationInfo""" _propgrid.PGValidationInfo_swiginit(self,_propgrid.new_PGValidationInfo(*args, **kwargs)) __swig_destroy__ = _propgrid.delete_PGValidationInfo __del__ = lambda self : None; def GetFailureBehavior(*args, **kwargs): """GetFailureBehavior(self) -> char""" return _propgrid.PGValidationInfo_GetFailureBehavior(*args, **kwargs) def GetFailureMessage(*args, **kwargs): """GetFailureMessage(self) -> String""" return _propgrid.PGValidationInfo_GetFailureMessage(*args, **kwargs) def GetValue(*args, **kwargs): """GetValue(self) -> wxVariant""" return _propgrid.PGValidationInfo_GetValue(*args, **kwargs) def SetFailureBehavior(*args, **kwargs): """SetFailureBehavior(self, char failureBehavior)""" return _propgrid.PGValidationInfo_SetFailureBehavior(*args, **kwargs) def SetFailureMessage(*args, **kwargs): """SetFailureMessage(self, String message)""" return _propgrid.PGValidationInfo_SetFailureMessage(*args, **kwargs) _propgrid.PGValidationInfo_swigregister(PGValidationInfo) PG_ACTION_INVALID = _propgrid.PG_ACTION_INVALID PG_ACTION_NEXT_PROPERTY = _propgrid.PG_ACTION_NEXT_PROPERTY PG_ACTION_PREV_PROPERTY = _propgrid.PG_ACTION_PREV_PROPERTY PG_ACTION_EXPAND_PROPERTY = _propgrid.PG_ACTION_EXPAND_PROPERTY PG_ACTION_COLLAPSE_PROPERTY = _propgrid.PG_ACTION_COLLAPSE_PROPERTY PG_ACTION_CANCEL_EDIT = _propgrid.PG_ACTION_CANCEL_EDIT PG_ACTION_EDIT = _propgrid.PG_ACTION_EDIT PG_ACTION_PRESS_BUTTON = _propgrid.PG_ACTION_PRESS_BUTTON PG_ACTION_MAX = _propgrid.PG_ACTION_MAX PG_SEL_FOCUS = _propgrid.PG_SEL_FOCUS PG_SEL_FORCE = _propgrid.PG_SEL_FORCE PG_SEL_NONVISIBLE = _propgrid.PG_SEL_NONVISIBLE PG_SEL_NOVALIDATE = _propgrid.PG_SEL_NOVALIDATE PG_SEL_DELETING = _propgrid.PG_SEL_DELETING PG_SEL_SETUNSPEC = _propgrid.PG_SEL_SETUNSPEC PG_SEL_DIALOGVAL = _propgrid.PG_SEL_DIALOGVAL PG_SEL_DONT_SEND_EVENT = _propgrid.PG_SEL_DONT_SEND_EVENT PG_SEL_NO_REFRESH = _propgrid.PG_SEL_NO_REFRESH PG_SPLITTER_REFRESH = _propgrid.PG_SPLITTER_REFRESH PG_SPLITTER_ALL_PAGES = _propgrid.PG_SPLITTER_ALL_PAGES PG_SPLITTER_FROM_EVENT = _propgrid.PG_SPLITTER_FROM_EVENT PG_SPLITTER_FROM_AUTO_CENTER = _propgrid.PG_SPLITTER_FROM_AUTO_CENTER PG_FL_INITIALIZED = _propgrid.PG_FL_INITIALIZED PG_FL_ACTIVATION_BY_CLICK = _propgrid.PG_FL_ACTIVATION_BY_CLICK PG_FL_DONT_CENTER_SPLITTER = _propgrid.PG_FL_DONT_CENTER_SPLITTER PG_FL_FOCUSED = _propgrid.PG_FL_FOCUSED PG_FL_MOUSE_CAPTURED = _propgrid.PG_FL_MOUSE_CAPTURED PG_FL_MOUSE_INSIDE = _propgrid.PG_FL_MOUSE_INSIDE PG_FL_VALUE_MODIFIED = _propgrid.PG_FL_VALUE_MODIFIED PG_FL_PRIMARY_FILLS_ENTIRE = _propgrid.PG_FL_PRIMARY_FILLS_ENTIRE PG_FL_CUR_USES_CUSTOM_IMAGE = _propgrid.PG_FL_CUR_USES_CUSTOM_IMAGE PG_FL_CELL_OVERRIDES_SEL = _propgrid.PG_FL_CELL_OVERRIDES_SEL PG_FL_SCROLLED = _propgrid.PG_FL_SCROLLED PG_FL_ADDING_HIDEABLES = _propgrid.PG_FL_ADDING_HIDEABLES PG_FL_NOSTATUSBARHELP = _propgrid.PG_FL_NOSTATUSBARHELP PG_FL_CREATEDSTATE = _propgrid.PG_FL_CREATEDSTATE PG_FL_SCROLLBAR_DETECTED = _propgrid.PG_FL_SCROLLBAR_DETECTED PG_FL_DESC_REFRESH_REQUIRED = _propgrid.PG_FL_DESC_REFRESH_REQUIRED PG_FL_IN_MANAGER = _propgrid.PG_FL_IN_MANAGER PG_FL_GOOD_SIZE_SET = _propgrid.PG_FL_GOOD_SIZE_SET PG_FL_IN_SELECT_PROPERTY = _propgrid.PG_FL_IN_SELECT_PROPERTY PG_FL_STRING_IN_STATUSBAR = _propgrid.PG_FL_STRING_IN_STATUSBAR PG_FL_CATMODE_AUTO_SORT = _propgrid.PG_FL_CATMODE_AUTO_SORT PG_MAN_FL_PAGE_INSERTED = _propgrid.PG_MAN_FL_PAGE_INSERTED PG_FL_ABNORMAL_EDITOR = _propgrid.PG_FL_ABNORMAL_EDITOR PG_FL_IN_HANDLECUSTOMEDITOREVENT = _propgrid.PG_FL_IN_HANDLECUSTOMEDITOREVENT PG_FL_VALUE_CHANGE_IN_EVENT = _propgrid.PG_FL_VALUE_CHANGE_IN_EVENT PG_FL_FIXED_WIDTH_EDITOR = _propgrid.PG_FL_FIXED_WIDTH_EDITOR PG_FL_HAS_VIRTUAL_WIDTH = _propgrid.PG_FL_HAS_VIRTUAL_WIDTH PG_FL_RECALCULATING_VIRTUAL_SIZE = _propgrid.PG_FL_RECALCULATING_VIRTUAL_SIZE class PropertyGrid(_core.Control,_windows.ScrollHelper,PropertyGridInterface): """Proxy of C++ PropertyGrid class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """ __init__(self, Window parent, int id=ID_ANY, Point pos=DefaultPosition, Size size=DefaultSize, long style=(0), String name=wxPropertyGridNameStr) -> PropertyGrid """ _propgrid.PropertyGrid_swiginit(self,_propgrid.new_PropertyGrid(*args, **kwargs)) self._setOORInfo(self) self.DoDefaultTypeMappings() self.edited_objects = {} self.DoDefaultValueTypeMappings() if not hasattr(self.__class__,'_vt2setter'): self.__class__._vt2setter = {} __swig_destroy__ = _propgrid.delete_PropertyGrid __del__ = lambda self : None; def AddActionTrigger(*args, **kwargs): """AddActionTrigger(self, int action, int keycode, int modifiers=0)""" return _propgrid.PropertyGrid_AddActionTrigger(*args, **kwargs) def DedicateKey(*args, **kwargs): """DedicateKey(self, int keycode)""" return _propgrid.PropertyGrid_DedicateKey(*args, **kwargs) def AutoGetTranslation(*args, **kwargs): """AutoGetTranslation(bool enable)""" return _propgrid.PropertyGrid_AutoGetTranslation(*args, **kwargs) AutoGetTranslation = staticmethod(AutoGetTranslation) def ChangePropertyValue(*args, **kwargs): """ChangePropertyValue(self, PGPropArg id, wxVariant newValue) -> bool""" return _propgrid.PropertyGrid_ChangePropertyValue(*args, **kwargs) def CenterSplitter(*args, **kwargs): """CenterSplitter(self, bool enableAutoResizing=False)""" return _propgrid.PropertyGrid_CenterSplitter(*args, **kwargs) def ClearActionTriggers(*args, **kwargs): """ClearActionTriggers(self, int action)""" return _propgrid.PropertyGrid_ClearActionTriggers(*args, **kwargs) def CommitChangesFromEditor(*args, **kwargs): """CommitChangesFromEditor(self, int flags=0) -> bool""" return _propgrid.PropertyGrid_CommitChangesFromEditor(*args, **kwargs) def Create(*args, **kwargs): """ Create(self, Window parent, int id=ID_ANY, Point pos=DefaultPosition, Size size=DefaultSize, long style=(0), String name=wxPropertyGridNameStr) -> bool """ return _propgrid.PropertyGrid_Create(*args, **kwargs) def EditorsValueWasModified(*args, **kwargs): """EditorsValueWasModified(self)""" return _propgrid.PropertyGrid_EditorsValueWasModified(*args, **kwargs) def EditorsValueWasNotModified(*args, **kwargs): """EditorsValueWasNotModified(self)""" return _propgrid.PropertyGrid_EditorsValueWasNotModified(*args, **kwargs) def EnableCategories(*args, **kwargs): """EnableCategories(self, bool enable) -> bool""" return _propgrid.PropertyGrid_EnableCategories(*args, **kwargs) def EnsureVisible(*args, **kwargs): """EnsureVisible(self, PGPropArg id) -> bool""" return _propgrid.PropertyGrid_EnsureVisible(*args, **kwargs) def FitColumns(*args, **kwargs): """FitColumns(self) -> Size""" return _propgrid.PropertyGrid_FitColumns(*args, **kwargs) def GetPanel(*args, **kwargs): """GetPanel(self) -> Window""" return _propgrid.PropertyGrid_GetPanel(*args, **kwargs) def GetCaptionBackgroundColour(*args, **kwargs): """GetCaptionBackgroundColour(self) -> Colour""" return _propgrid.PropertyGrid_GetCaptionBackgroundColour(*args, **kwargs) def GetCaptionFont(*args): """ GetCaptionFont(self) -> Font GetCaptionFont(self) -> Font """ return _propgrid.PropertyGrid_GetCaptionFont(*args) def GetCaptionForegroundColour(*args, **kwargs): """GetCaptionForegroundColour(self) -> Colour""" return _propgrid.PropertyGrid_GetCaptionForegroundColour(*args, **kwargs) def GetCellBackgroundColour(*args, **kwargs): """GetCellBackgroundColour(self) -> Colour""" return _propgrid.PropertyGrid_GetCellBackgroundColour(*args, **kwargs) def GetCellDisabledTextColour(*args, **kwargs): """GetCellDisabledTextColour(self) -> Colour""" return _propgrid.PropertyGrid_GetCellDisabledTextColour(*args, **kwargs) def GetCellTextColour(*args, **kwargs): """GetCellTextColour(self) -> Colour""" return _propgrid.PropertyGrid_GetCellTextColour(*args, **kwargs) def GetColumnCount(*args, **kwargs): """GetColumnCount(self) -> int""" return _propgrid.PropertyGrid_GetColumnCount(*args, **kwargs) def GetEmptySpaceColour(*args, **kwargs): """GetEmptySpaceColour(self) -> Colour""" return _propgrid.PropertyGrid_GetEmptySpaceColour(*args, **kwargs) def GetFontHeight(*args, **kwargs): """GetFontHeight(self) -> int""" return _propgrid.PropertyGrid_GetFontHeight(*args, **kwargs) def GetGrid(*args, **kwargs): """GetGrid(self) -> PropertyGrid""" return _propgrid.PropertyGrid_GetGrid(*args, **kwargs) def GetImageRect(*args, **kwargs): """GetImageRect(self, PGProperty p, int item) -> Rect""" return _propgrid.PropertyGrid_GetImageRect(*args, **kwargs) def GetImageSize(*args, **kwargs): """GetImageSize(self, PGProperty p=None, int item=-1) -> Size""" return _propgrid.PropertyGrid_GetImageSize(*args, **kwargs) def GetLastItem(*args): """ GetLastItem(self, int flags=PG_ITERATE_DEFAULT) -> PGProperty GetLastItem(self, int flags=PG_ITERATE_DEFAULT) -> PGProperty """ return _propgrid.PropertyGrid_GetLastItem(*args) def GetLineColour(*args, **kwargs): """GetLineColour(self) -> Colour""" return _propgrid.PropertyGrid_GetLineColour(*args, **kwargs) def GetMarginColour(*args, **kwargs): """GetMarginColour(self) -> Colour""" return _propgrid.PropertyGrid_GetMarginColour(*args, **kwargs) def GetMarginWidth(*args, **kwargs): """GetMarginWidth(self) -> int""" return _propgrid.PropertyGrid_GetMarginWidth(*args, **kwargs) def GetUncommittedPropertyValue(*args, **kwargs): """GetUncommittedPropertyValue(self) -> wxVariant""" return _propgrid.PropertyGrid_GetUncommittedPropertyValue(*args, **kwargs) def GetRoot(*args, **kwargs): """GetRoot(self) -> PGProperty""" return _propgrid.PropertyGrid_GetRoot(*args, **kwargs) def GetRowHeight(*args, **kwargs): """GetRowHeight(self) -> int""" return _propgrid.PropertyGrid_GetRowHeight(*args, **kwargs) def GetSelectedProperty(*args, **kwargs): """GetSelectedProperty(self) -> PGProperty""" return _propgrid.PropertyGrid_GetSelectedProperty(*args, **kwargs) def GetSelectionBackgroundColour(*args, **kwargs): """GetSelectionBackgroundColour(self) -> Colour""" return _propgrid.PropertyGrid_GetSelectionBackgroundColour(*args, **kwargs) def GetSelectionForegroundColour(*args, **kwargs): """GetSelectionForegroundColour(self) -> Colour""" return _propgrid.PropertyGrid_GetSelectionForegroundColour(*args, **kwargs) def GetSplitterPosition(*args, **kwargs): """GetSplitterPosition(self, int splitterIndex=0) -> int""" return _propgrid.PropertyGrid_GetSplitterPosition(*args, **kwargs) def GetEditorTextCtrl(*args, **kwargs): """GetEditorTextCtrl(self) -> wxTextCtrl""" return _propgrid.PropertyGrid_GetEditorTextCtrl(*args, **kwargs) def GetValidationInfo(*args, **kwargs): """GetValidationInfo(self) -> PGValidationInfo""" return _propgrid.PropertyGrid_GetValidationInfo(*args, **kwargs) def GetVerticalSpacing(*args, **kwargs): """GetVerticalSpacing(self) -> int""" return _propgrid.PropertyGrid_GetVerticalSpacing(*args, **kwargs) def IsEditorFocused(*args, **kwargs): """IsEditorFocused(self) -> bool""" return _propgrid.PropertyGrid_IsEditorFocused(*args, **kwargs) def IsEditorsValueModified(*args, **kwargs): """IsEditorsValueModified(self) -> bool""" return _propgrid.PropertyGrid_IsEditorsValueModified(*args, **kwargs) def HitTest(*args, **kwargs): """ HitTest(self, Point pt) -> PropertyGridHitTestResult Test where the given (in client coords) point lies """ return _propgrid.PropertyGrid_HitTest(*args, **kwargs) def IsAnyModified(*args, **kwargs): """IsAnyModified(self) -> bool""" return _propgrid.PropertyGrid_IsAnyModified(*args, **kwargs) def IsFrozen(*args, **kwargs): """ IsFrozen(self) -> bool Returns ``True`` if the window has been frozen and not thawed yet. :see: `Freeze` and `Thaw` """ return _propgrid.PropertyGrid_IsFrozen(*args, **kwargs) def OnTLPChanging(*args, **kwargs): """OnTLPChanging(self, Window newTLP)""" return _propgrid.PropertyGrid_OnTLPChanging(*args, **kwargs) def ResetColours(*args, **kwargs): """ResetColours(self)""" return _propgrid.PropertyGrid_ResetColours(*args, **kwargs) def ResetColumnSizes(*args, **kwargs): """ResetColumnSizes(self, bool enableAutoResizing=False)""" return _propgrid.PropertyGrid_ResetColumnSizes(*args, **kwargs) def SelectProperty(*args, **kwargs): """SelectProperty(self, PGPropArg id, bool focus=False) -> bool""" return _propgrid.PropertyGrid_SelectProperty(*args, **kwargs) def SetSelection(*args, **kwargs): """SetSelection(self, wxArrayPGProperty newSelection)""" return _propgrid.PropertyGrid_SetSelection(*args, **kwargs) def AddToSelection(*args, **kwargs): """AddToSelection(self, PGPropArg id) -> bool""" return _propgrid.PropertyGrid_AddToSelection(*args, **kwargs) def RemoveFromSelection(*args, **kwargs): """RemoveFromSelection(self, PGPropArg id) -> bool""" return _propgrid.PropertyGrid_RemoveFromSelection(*args, **kwargs) def MakeColumnEditable(*args, **kwargs): """MakeColumnEditable(self, int column, bool editable=True)""" return _propgrid.PropertyGrid_MakeColumnEditable(*args, **kwargs) def BeginLabelEdit(*args, **kwargs): """BeginLabelEdit(self, int column=0)""" return _propgrid.PropertyGrid_BeginLabelEdit(*args, **kwargs) def EndLabelEdit(*args, **kwargs): """EndLabelEdit(self, bool commit=True)""" return _propgrid.PropertyGrid_EndLabelEdit(*args, **kwargs) def GetLabelEditor(*args, **kwargs): """GetLabelEditor(self) -> wxTextCtrl""" return _propgrid.PropertyGrid_GetLabelEditor(*args, **kwargs) def SetCaptionBackgroundColour(*args, **kwargs): """SetCaptionBackgroundColour(self, Colour col)""" return _propgrid.PropertyGrid_SetCaptionBackgroundColour(*args, **kwargs) def SetCaptionTextColour(*args, **kwargs): """SetCaptionTextColour(self, Colour col)""" return _propgrid.PropertyGrid_SetCaptionTextColour(*args, **kwargs) def SetCellBackgroundColour(*args, **kwargs): """SetCellBackgroundColour(self, Colour col)""" return _propgrid.PropertyGrid_SetCellBackgroundColour(*args, **kwargs) def SetCellDisabledTextColour(*args, **kwargs): """SetCellDisabledTextColour(self, Colour col)""" return _propgrid.PropertyGrid_SetCellDisabledTextColour(*args, **kwargs) def SetCellTextColour(*args, **kwargs): """SetCellTextColour(self, Colour col)""" return _propgrid.PropertyGrid_SetCellTextColour(*args, **kwargs) def SetColumnCount(*args, **kwargs): """SetColumnCount(self, int colCount)""" return _propgrid.PropertyGrid_SetColumnCount(*args, **kwargs) def SetCurrentCategory(*args, **kwargs): """SetCurrentCategory(self, PGPropArg id)""" return _propgrid.PropertyGrid_SetCurrentCategory(*args, **kwargs) def SetEmptySpaceColour(*args, **kwargs): """SetEmptySpaceColour(self, Colour col)""" return _propgrid.PropertyGrid_SetEmptySpaceColour(*args, **kwargs) def SetLineColour(*args, **kwargs): """SetLineColour(self, Colour col)""" return _propgrid.PropertyGrid_SetLineColour(*args, **kwargs) def SetMarginColour(*args, **kwargs): """SetMarginColour(self, Colour col)""" return _propgrid.PropertyGrid_SetMarginColour(*args, **kwargs) def SetSelectionBackgroundColour(*args, **kwargs): """SetSelectionBackgroundColour(self, Colour col)""" return _propgrid.PropertyGrid_SetSelectionBackgroundColour(*args, **kwargs) def SetSelectionTextColour(*args, **kwargs): """SetSelectionTextColour(self, Colour col)""" return _propgrid.PropertyGrid_SetSelectionTextColour(*args, **kwargs) def SetSplitterPosition(*args, **kwargs): """SetSplitterPosition(self, int newXPos, int col=0)""" return _propgrid.PropertyGrid_SetSplitterPosition(*args, **kwargs) def SetSortFunction(*args, **kwargs): """SetSortFunction(self, PGSortCallback sortFunction)""" return _propgrid.PropertyGrid_SetSortFunction(*args, **kwargs) def GetSortFunction(*args, **kwargs): """GetSortFunction(self) -> PGSortCallback""" return _propgrid.PropertyGrid_GetSortFunction(*args, **kwargs) def SetUnspecifiedValueAppearance(*args, **kwargs): """SetUnspecifiedValueAppearance(self, PGCell cell)""" return _propgrid.PropertyGrid_SetUnspecifiedValueAppearance(*args, **kwargs) def GetUnspecifiedValueAppearance(*args, **kwargs): """GetUnspecifiedValueAppearance(self) -> PGCell""" return _propgrid.PropertyGrid_GetUnspecifiedValueAppearance(*args, **kwargs) def GetUnspecifiedValueText(*args, **kwargs): """GetUnspecifiedValueText(self, int argFlags=0) -> String""" return _propgrid.PropertyGrid_GetUnspecifiedValueText(*args, **kwargs) def SetVirtualWidth(*args, **kwargs): """SetVirtualWidth(self, int width)""" return _propgrid.PropertyGrid_SetVirtualWidth(*args, **kwargs) def SetSplitterLeft(*args, **kwargs): """SetSplitterLeft(self, bool privateChildrenToo=False)""" return _propgrid.PropertyGrid_SetSplitterLeft(*args, **kwargs) def SetVerticalSpacing(*args, **kwargs): """SetVerticalSpacing(self, int vspacing)""" return _propgrid.PropertyGrid_SetVerticalSpacing(*args, **kwargs) def ShowPropertyError(*args, **kwargs): """ShowPropertyError(self, PGPropArg id, String msg)""" return _propgrid.PropertyGrid_ShowPropertyError(*args, **kwargs) def HasVirtualWidth(*args, **kwargs): """HasVirtualWidth(self) -> bool""" return _propgrid.PropertyGrid_HasVirtualWidth(*args, **kwargs) def GetCommonValue(*args, **kwargs): """GetCommonValue(self, int i) -> PGCommonValue""" return _propgrid.PropertyGrid_GetCommonValue(*args, **kwargs) def GetCommonValueCount(*args, **kwargs): """GetCommonValueCount(self) -> int""" return _propgrid.PropertyGrid_GetCommonValueCount(*args, **kwargs) def GetCommonValueLabel(*args, **kwargs): """GetCommonValueLabel(self, int i) -> String""" return _propgrid.PropertyGrid_GetCommonValueLabel(*args, **kwargs) def GetUnspecifiedCommonValue(*args, **kwargs): """GetUnspecifiedCommonValue(self) -> int""" return _propgrid.PropertyGrid_GetUnspecifiedCommonValue(*args, **kwargs) def SetUnspecifiedCommonValue(*args, **kwargs): """SetUnspecifiedCommonValue(self, int index)""" return _propgrid.PropertyGrid_SetUnspecifiedCommonValue(*args, **kwargs) def GenerateEditorButton(*args, **kwargs): """GenerateEditorButton(self, Point pos, Size sz) -> Window""" return _propgrid.PropertyGrid_GenerateEditorButton(*args, **kwargs) def FixPosForTextCtrl(*args, **kwargs): """FixPosForTextCtrl(self, Window ctrl, int forColumn=1, Point offset=wxPoint(0, 0))""" return _propgrid.PropertyGrid_FixPosForTextCtrl(*args, **kwargs) def GenerateEditorTextCtrl(*args, **kwargs): """ GenerateEditorTextCtrl(self, Point pos, Size sz, String value, Window secondary, int extraStyle=0, int maxLen=0, int forColumn=1) -> Window """ return _propgrid.PropertyGrid_GenerateEditorTextCtrl(*args, **kwargs) def GenerateEditorTextCtrlAndButton(*args, **kwargs): """ GenerateEditorTextCtrlAndButton(self, Point pos, Size sz, Window psecondary, int limited_editing, PGProperty property) -> Window """ return _propgrid.PropertyGrid_GenerateEditorTextCtrlAndButton(*args, **kwargs) def GetGoodEditorDialogPosition(*args, **kwargs): """GetGoodEditorDialogPosition(self, PGProperty p, Size sz) -> Point""" return _propgrid.PropertyGrid_GetGoodEditorDialogPosition(*args, **kwargs) def ExpandEscapeSequences(*args, **kwargs): """ExpandEscapeSequences(String dst_str, String src_str) -> String""" return _propgrid.PropertyGrid_ExpandEscapeSequences(*args, **kwargs) ExpandEscapeSequences = staticmethod(ExpandEscapeSequences) def CreateEscapeSequences(*args, **kwargs): """CreateEscapeSequences(String dst_str, String src_str) -> String""" return _propgrid.PropertyGrid_CreateEscapeSequences(*args, **kwargs) CreateEscapeSequences = staticmethod(CreateEscapeSequences) def GetPropertyRect(*args, **kwargs): """GetPropertyRect(self, PGProperty p1, PGProperty p2) -> Rect""" return _propgrid.PropertyGrid_GetPropertyRect(*args, **kwargs) def GetEditorControl(*args, **kwargs): """GetEditorControl(self) -> Window""" return _propgrid.PropertyGrid_GetEditorControl(*args, **kwargs) def GetPrimaryEditor(*args, **kwargs): """GetPrimaryEditor(self) -> Window""" return _propgrid.PropertyGrid_GetPrimaryEditor(*args, **kwargs) def GetEditorControlSecondary(*args, **kwargs): """GetEditorControlSecondary(self) -> Window""" return _propgrid.PropertyGrid_GetEditorControlSecondary(*args, **kwargs) def RefreshEditor(*args, **kwargs): """RefreshEditor(self)""" return _propgrid.PropertyGrid_RefreshEditor(*args, **kwargs) def HandleCustomEditorEvent(*args, **kwargs): """HandleCustomEditorEvent(self, Event event) -> bool""" return _propgrid.PropertyGrid_HandleCustomEditorEvent(*args, **kwargs) def GetInternalFlags(*args, **kwargs): """GetInternalFlags(self) -> long""" return _propgrid.PropertyGrid_GetInternalFlags(*args, **kwargs) def HasInternalFlag(*args, **kwargs): """HasInternalFlag(self, long flag) -> bool""" return _propgrid.PropertyGrid_HasInternalFlag(*args, **kwargs) def SetInternalFlag(*args, **kwargs): """SetInternalFlag(self, long flag)""" return _propgrid.PropertyGrid_SetInternalFlag(*args, **kwargs) def ClearInternalFlag(*args, **kwargs): """ClearInternalFlag(self, long flag)""" return _propgrid.PropertyGrid_ClearInternalFlag(*args, **kwargs) def DoubleToString(*args, **kwargs): """ DoubleToString(String target, double value, int precision, bool removeZeroes, String precTemplate=None) -> String """ return _propgrid.PropertyGrid_DoubleToString(*args, **kwargs) DoubleToString = staticmethod(DoubleToString) def ValueChangeInEvent(*args, **kwargs): """ValueChangeInEvent(self, wxVariant variant)""" return _propgrid.PropertyGrid_ValueChangeInEvent(*args, **kwargs) def WasValueChangedInEvent(*args, **kwargs): """WasValueChangedInEvent(self) -> bool""" return _propgrid.PropertyGrid_WasValueChangedInEvent(*args, **kwargs) def IsMainButtonEvent(*args, **kwargs): """IsMainButtonEvent(self, Event event) -> bool""" return _propgrid.PropertyGrid_IsMainButtonEvent(*args, **kwargs) def DoHidePropertyError(*args, **kwargs): """DoHidePropertyError(self, PGProperty property)""" return _propgrid.PropertyGrid_DoHidePropertyError(*args, **kwargs) def GetSpacingY(*args, **kwargs): """GetSpacingY(self) -> int""" return _propgrid.PropertyGrid_GetSpacingY(*args, **kwargs) def SetupTextCtrlValue(*args, **kwargs): """SetupTextCtrlValue(self, String text)""" return _propgrid.PropertyGrid_SetupTextCtrlValue(*args, **kwargs) def UnfocusEditor(*args, **kwargs): """UnfocusEditor(self) -> bool""" return _propgrid.PropertyGrid_UnfocusEditor(*args, **kwargs) def GetPropertyDefaultCell(*args, **kwargs): """GetPropertyDefaultCell(self) -> PGCell""" return _propgrid.PropertyGrid_GetPropertyDefaultCell(*args, **kwargs) def GetCategoryDefaultCell(*args, **kwargs): """GetCategoryDefaultCell(self) -> PGCell""" return _propgrid.PropertyGrid_GetCategoryDefaultCell(*args, **kwargs) def GetItemAtY(*args, **kwargs): """GetItemAtY(self, int y) -> PGProperty""" return _propgrid.PropertyGrid_GetItemAtY(*args, **kwargs) _propgrid.PropertyGrid_swigregister(PropertyGrid) def PropertyGrid_AutoGetTranslation(*args, **kwargs): """PropertyGrid_AutoGetTranslation(bool enable)""" return _propgrid.PropertyGrid_AutoGetTranslation(*args, **kwargs) def PropertyGrid_ExpandEscapeSequences(*args, **kwargs): """PropertyGrid_ExpandEscapeSequences(String dst_str, String src_str) -> String""" return _propgrid.PropertyGrid_ExpandEscapeSequences(*args, **kwargs) def PropertyGrid_CreateEscapeSequences(*args, **kwargs): """PropertyGrid_CreateEscapeSequences(String dst_str, String src_str) -> String""" return _propgrid.PropertyGrid_CreateEscapeSequences(*args, **kwargs) def PropertyGrid_DoubleToString(*args, **kwargs): """ PropertyGrid_DoubleToString(String target, double value, int precision, bool removeZeroes, String precTemplate=None) -> String """ return _propgrid.PropertyGrid_DoubleToString(*args, **kwargs) PG_BASE_EVT_PRE_ID = _propgrid.PG_BASE_EVT_PRE_ID wxEVT_PG_SELECTED = _propgrid.wxEVT_PG_SELECTED wxEVT_PG_CHANGING = _propgrid.wxEVT_PG_CHANGING wxEVT_PG_CHANGED = _propgrid.wxEVT_PG_CHANGED wxEVT_PG_HIGHLIGHTED = _propgrid.wxEVT_PG_HIGHLIGHTED wxEVT_PG_RIGHT_CLICK = _propgrid.wxEVT_PG_RIGHT_CLICK wxEVT_PG_PAGE_CHANGED = _propgrid.wxEVT_PG_PAGE_CHANGED wxEVT_PG_ITEM_COLLAPSED = _propgrid.wxEVT_PG_ITEM_COLLAPSED wxEVT_PG_ITEM_EXPANDED = _propgrid.wxEVT_PG_ITEM_EXPANDED wxEVT_PG_DOUBLE_CLICK = _propgrid.wxEVT_PG_DOUBLE_CLICK wxEVT_PG_LABEL_EDIT_BEGIN = _propgrid.wxEVT_PG_LABEL_EDIT_BEGIN wxEVT_PG_LABEL_EDIT_ENDING = _propgrid.wxEVT_PG_LABEL_EDIT_ENDING wxEVT_PG_COL_BEGIN_DRAG = _propgrid.wxEVT_PG_COL_BEGIN_DRAG wxEVT_PG_COL_DRAGGING = _propgrid.wxEVT_PG_COL_DRAGGING wxEVT_PG_COL_END_DRAG = _propgrid.wxEVT_PG_COL_END_DRAG class PropertyGridEvent(_core.CommandEvent): """Proxy of C++ PropertyGridEvent class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """__init__(self, EventType commandType=0, int id=0) -> PropertyGridEvent""" _propgrid.PropertyGridEvent_swiginit(self,_propgrid.new_PropertyGridEvent(*args, **kwargs)) __swig_destroy__ = _propgrid.delete_PropertyGridEvent __del__ = lambda self : None; def GetColumn(*args, **kwargs): """GetColumn(self) -> int""" return _propgrid.PropertyGridEvent_GetColumn(*args, **kwargs) def GetMainParent(*args, **kwargs): """GetMainParent(self) -> PGProperty""" return _propgrid.PropertyGridEvent_GetMainParent(*args, **kwargs) def GetProperty(*args, **kwargs): """GetProperty(self) -> PGProperty""" return _propgrid.PropertyGridEvent_GetProperty(*args, **kwargs) def GetValidationInfo(*args, **kwargs): """GetValidationInfo(self) -> PGValidationInfo""" return _propgrid.PropertyGridEvent_GetValidationInfo(*args, **kwargs) def CanVeto(*args, **kwargs): """CanVeto(self) -> bool""" return _propgrid.PropertyGridEvent_CanVeto(*args, **kwargs) def Veto(*args, **kwargs): """Veto(self, bool veto=True)""" return _propgrid.PropertyGridEvent_Veto(*args, **kwargs) def GetPropertyName(*args, **kwargs): """GetPropertyName(self) -> String""" return _propgrid.PropertyGridEvent_GetPropertyName(*args, **kwargs) def GetPropertyValue(*args, **kwargs): """GetPropertyValue(self) -> wxVariant""" return _propgrid.PropertyGridEvent_GetPropertyValue(*args, **kwargs) def GetValue(*args, **kwargs): """GetValue(self) -> wxVariant""" return _propgrid.PropertyGridEvent_GetValue(*args, **kwargs) def SetValidationFailureBehavior(*args, **kwargs): """SetValidationFailureBehavior(self, char flags)""" return _propgrid.PropertyGridEvent_SetValidationFailureBehavior(*args, **kwargs) def SetValidationFailureMessage(*args, **kwargs): """SetValidationFailureMessage(self, String message)""" return _propgrid.PropertyGridEvent_SetValidationFailureMessage(*args, **kwargs) def SetColumn(*args, **kwargs): """SetColumn(self, int column)""" return _propgrid.PropertyGridEvent_SetColumn(*args, **kwargs) def SetCanVeto(*args, **kwargs): """SetCanVeto(self, bool canVeto)""" return _propgrid.PropertyGridEvent_SetCanVeto(*args, **kwargs) def WasVetoed(*args, **kwargs): """WasVetoed(self) -> bool""" return _propgrid.PropertyGridEvent_WasVetoed(*args, **kwargs) def SetProperty(*args, **kwargs): """SetProperty(self, PGProperty p)""" return _propgrid.PropertyGridEvent_SetProperty(*args, **kwargs) _propgrid.PropertyGridEvent_swigregister(PropertyGridEvent) class PropertyGridPopulator(object): """Proxy of C++ PropertyGridPopulator class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') def __init__(self): raise AttributeError, "No constructor defined" __repr__ = _swig_repr __swig_destroy__ = _propgrid.delete_PropertyGridPopulator __del__ = lambda self : None; def SetState(*args, **kwargs): """SetState(self, state)""" return _propgrid.PropertyGridPopulator_SetState(*args, **kwargs) def SetGrid(*args, **kwargs): """SetGrid(self, PropertyGrid pg)""" return _propgrid.PropertyGridPopulator_SetGrid(*args, **kwargs) def Add(*args, **kwargs): """ Add(self, String propClass, String propLabel, String propName, String propValue, PGChoices pChoices=None) -> PGProperty """ return _propgrid.PropertyGridPopulator_Add(*args, **kwargs) def AddChildren(*args, **kwargs): """AddChildren(self, PGProperty property)""" return _propgrid.PropertyGridPopulator_AddChildren(*args, **kwargs) def AddAttribute(*args, **kwargs): """AddAttribute(self, String name, String type, String value) -> bool""" return _propgrid.PropertyGridPopulator_AddAttribute(*args, **kwargs) def DoScanForChildren(*args, **kwargs): """DoScanForChildren(self)""" return _propgrid.PropertyGridPopulator_DoScanForChildren(*args, **kwargs) def GetCurParent(*args, **kwargs): """GetCurParent(self) -> PGProperty""" return _propgrid.PropertyGridPopulator_GetCurParent(*args, **kwargs) def GetState(*args): """ GetState(self) GetState(self) """ return _propgrid.PropertyGridPopulator_GetState(*args) def ToLongPCT(*args, **kwargs): """ToLongPCT(String s, long pval, long max) -> bool""" return _propgrid.PropertyGridPopulator_ToLongPCT(*args, **kwargs) ToLongPCT = staticmethod(ToLongPCT) def ParseChoices(*args, **kwargs): """ParseChoices(self, String choicesString, String idString) -> PGChoices""" return _propgrid.PropertyGridPopulator_ParseChoices(*args, **kwargs) def ProcessError(*args, **kwargs): """ProcessError(self, String msg)""" return _propgrid.PropertyGridPopulator_ProcessError(*args, **kwargs) _propgrid.PropertyGridPopulator_swigregister(PropertyGridPopulator) def PropertyGridPopulator_ToLongPCT(*args, **kwargs): """PropertyGridPopulator_ToLongPCT(String s, long pval, long max) -> bool""" return _propgrid.PropertyGridPopulator_ToLongPCT(*args, **kwargs) class PGWindowList(object): """Proxy of C++ PGWindowList class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """__init__(self) -> PGWindowList""" _propgrid.PGWindowList_swiginit(self,_propgrid.new_PGWindowList(*args, **kwargs)) def SetSecondary(*args, **kwargs): """SetSecondary(self, Window secondary)""" return _propgrid.PGWindowList_SetSecondary(*args, **kwargs) m_primary = property(_propgrid.PGWindowList_m_primary_get, _propgrid.PGWindowList_m_primary_set) m_secondary = property(_propgrid.PGWindowList_m_secondary_get, _propgrid.PGWindowList_m_secondary_set) _propgrid.PGWindowList_swigregister(PGWindowList) class PGEditor(_core.Object): """Proxy of C++ PGEditor class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') def __init__(self): raise AttributeError, "No constructor defined" __repr__ = _swig_repr __swig_destroy__ = _propgrid.delete_PGEditor __del__ = lambda self : None; def GetName(*args, **kwargs): """GetName(self) -> String""" return _propgrid.PGEditor_GetName(*args, **kwargs) def CreateControls(*args, **kwargs): """ CreateControls(self, PropertyGrid propgrid, PGProperty property, Point pos, Size size) -> PGWindowList """ return _propgrid.PGEditor_CreateControls(*args, **kwargs) def UpdateControl(*args, **kwargs): """UpdateControl(self, PGProperty property, Window ctrl)""" return _propgrid.PGEditor_UpdateControl(*args, **kwargs) def DrawValue(*args, **kwargs): """DrawValue(self, DC dc, Rect rect, PGProperty property, String text)""" return _propgrid.PGEditor_DrawValue(*args, **kwargs) def OnEvent(*args, **kwargs): """ OnEvent(self, PropertyGrid propgrid, PGProperty property, Window wnd_primary, Event event) -> bool """ return _propgrid.PGEditor_OnEvent(*args, **kwargs) def SetControlAppearance(*args, **kwargs): """ SetControlAppearance(self, PropertyGrid pg, PGProperty property, Window ctrl, PGCell appearance, PGCell oldAppearance, bool unspecified) """ return _propgrid.PGEditor_SetControlAppearance(*args, **kwargs) def SetValueToUnspecified(*args, **kwargs): """SetValueToUnspecified(self, PGProperty property, Window ctrl)""" return _propgrid.PGEditor_SetValueToUnspecified(*args, **kwargs) def SetControlStringValue(*args, **kwargs): """SetControlStringValue(self, PGProperty property, Window ctrl, String txt)""" return _propgrid.PGEditor_SetControlStringValue(*args, **kwargs) def SetControlIntValue(*args, **kwargs): """SetControlIntValue(self, PGProperty property, Window ctrl, int value)""" return _propgrid.PGEditor_SetControlIntValue(*args, **kwargs) def InsertItem(*args, **kwargs): """InsertItem(self, Window ctrl, String label, int index) -> int""" return _propgrid.PGEditor_InsertItem(*args, **kwargs) def DeleteItem(*args, **kwargs): """DeleteItem(self, Window ctrl, int index)""" return _propgrid.PGEditor_DeleteItem(*args, **kwargs) def OnFocus(*args, **kwargs): """OnFocus(self, PGProperty property, Window wnd)""" return _propgrid.PGEditor_OnFocus(*args, **kwargs) def CanContainCustomImage(*args, **kwargs): """CanContainCustomImage(self) -> bool""" return _propgrid.PGEditor_CanContainCustomImage(*args, **kwargs) m_clientData = property(_propgrid.PGEditor_m_clientData_get, _propgrid.PGEditor_m_clientData_set) _propgrid.PGEditor_swigregister(PGEditor) class PGTextCtrlEditor(PGEditor): """Proxy of C++ PGTextCtrlEditor class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """__init__(self) -> PGTextCtrlEditor""" _propgrid.PGTextCtrlEditor_swiginit(self,_propgrid.new_PGTextCtrlEditor(*args, **kwargs)) __swig_destroy__ = _propgrid.delete_PGTextCtrlEditor __del__ = lambda self : None; def OnTextCtrlEvent(*args, **kwargs): """ OnTextCtrlEvent(PropertyGrid propgrid, PGProperty property, Window ctrl, Event event) -> bool """ return _propgrid.PGTextCtrlEditor_OnTextCtrlEvent(*args, **kwargs) OnTextCtrlEvent = staticmethod(OnTextCtrlEvent) def GetTextCtrlValueFromControl(*args, **kwargs): """GetTextCtrlValueFromControl(wxVariant variant, PGProperty property, Window ctrl) -> bool""" return _propgrid.PGTextCtrlEditor_GetTextCtrlValueFromControl(*args, **kwargs) GetTextCtrlValueFromControl = staticmethod(GetTextCtrlValueFromControl) _propgrid.PGTextCtrlEditor_swigregister(PGTextCtrlEditor) def PGTextCtrlEditor_OnTextCtrlEvent(*args, **kwargs): """ PGTextCtrlEditor_OnTextCtrlEvent(PropertyGrid propgrid, PGProperty property, Window ctrl, Event event) -> bool """ return _propgrid.PGTextCtrlEditor_OnTextCtrlEvent(*args, **kwargs) def PGTextCtrlEditor_GetTextCtrlValueFromControl(*args, **kwargs): """PGTextCtrlEditor_GetTextCtrlValueFromControl(wxVariant variant, PGProperty property, Window ctrl) -> bool""" return _propgrid.PGTextCtrlEditor_GetTextCtrlValueFromControl(*args, **kwargs) class PGChoiceEditor(PGEditor): """Proxy of C++ PGChoiceEditor class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """__init__(self) -> PGChoiceEditor""" _propgrid.PGChoiceEditor_swiginit(self,_propgrid.new_PGChoiceEditor(*args, **kwargs)) __swig_destroy__ = _propgrid.delete_PGChoiceEditor __del__ = lambda self : None; def CreateControlsBase(*args, **kwargs): """ CreateControlsBase(self, PropertyGrid propgrid, PGProperty property, Point pos, Size sz, long extraStyle) -> Window """ return _propgrid.PGChoiceEditor_CreateControlsBase(*args, **kwargs) _propgrid.PGChoiceEditor_swigregister(PGChoiceEditor) class PGComboBoxEditor(PGChoiceEditor): """Proxy of C++ PGComboBoxEditor class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """__init__(self) -> PGComboBoxEditor""" _propgrid.PGComboBoxEditor_swiginit(self,_propgrid.new_PGComboBoxEditor(*args, **kwargs)) __swig_destroy__ = _propgrid.delete_PGComboBoxEditor __del__ = lambda self : None; _propgrid.PGComboBoxEditor_swigregister(PGComboBoxEditor) class PGEditorDialogAdapter(_core.Object): """Proxy of C++ PGEditorDialogAdapter class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') def __init__(self): raise AttributeError, "No constructor defined" __repr__ = _swig_repr __swig_destroy__ = _propgrid.delete_PGEditorDialogAdapter __del__ = lambda self : None; def ShowDialog(*args, **kwargs): """ShowDialog(self, PropertyGrid propGrid, PGProperty property) -> bool""" return _propgrid.PGEditorDialogAdapter_ShowDialog(*args, **kwargs) def DoShowDialog(*args, **kwargs): """DoShowDialog(self, PropertyGrid propGrid, PGProperty property) -> bool""" return _propgrid.PGEditorDialogAdapter_DoShowDialog(*args, **kwargs) def SetValue(*args, **kwargs): """SetValue(self, wxVariant value)""" return _propgrid.PGEditorDialogAdapter_SetValue(*args, **kwargs) def GetValue(*args, **kwargs): """GetValue(self) -> wxVariant""" return _propgrid.PGEditorDialogAdapter_GetValue(*args, **kwargs) m_clientData = property(_propgrid.PGEditorDialogAdapter_m_clientData_get, _propgrid.PGEditorDialogAdapter_m_clientData_set) _propgrid.PGEditorDialogAdapter_swigregister(PGEditorDialogAdapter) class PGMultiButton(_core.Window): """Proxy of C++ PGMultiButton class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """__init__(self, PropertyGrid pg, Size sz) -> PGMultiButton""" _propgrid.PGMultiButton_swiginit(self,_propgrid.new_PGMultiButton(*args, **kwargs)) self._setOORInfo(self) __swig_destroy__ = _propgrid.delete_PGMultiButton __del__ = lambda self : None; def GetButton(*args): """ GetButton(self, int i) -> Window GetButton(self, int i) -> Window """ return _propgrid.PGMultiButton_GetButton(*args) def GetButtonId(*args, **kwargs): """GetButtonId(self, int i) -> int""" return _propgrid.PGMultiButton_GetButtonId(*args, **kwargs) def GetCount(*args, **kwargs): """GetCount(self) -> int""" return _propgrid.PGMultiButton_GetCount(*args, **kwargs) def Add(*args, **kwargs): """Add(self, String label, int id=-2)""" return _propgrid.PGMultiButton_Add(*args, **kwargs) def GetPrimarySize(*args, **kwargs): """GetPrimarySize(self) -> Size""" return _propgrid.PGMultiButton_GetPrimarySize(*args, **kwargs) def Finalize(*args, **kwargs): """Finalize(self, PropertyGrid propGrid, Point pos)""" return _propgrid.PGMultiButton_Finalize(*args, **kwargs) def AddBitmapButton(*args, **kwargs): """AddBitmapButton(self, Bitmap bitmap, int id=-2)""" return _propgrid.PGMultiButton_AddBitmapButton(*args, **kwargs) def AddButton(self, *args, **kwargs): return self.Add(*args, **kwargs) _propgrid.PGMultiButton_swigregister(PGMultiButton) class StringProperty(PGProperty): """Proxy of C++ StringProperty class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """ __init__(self, String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), String value=wxEmptyString) -> StringProperty """ _propgrid.StringProperty_swiginit(self,_propgrid.new_StringProperty(*args, **kwargs)) __swig_destroy__ = _propgrid.delete_StringProperty __del__ = lambda self : None; _propgrid.StringProperty_swigregister(StringProperty) PG_PROPERTY_VALIDATION_ERROR_MESSAGE = _propgrid.PG_PROPERTY_VALIDATION_ERROR_MESSAGE PG_PROPERTY_VALIDATION_SATURATE = _propgrid.PG_PROPERTY_VALIDATION_SATURATE PG_PROPERTY_VALIDATION_WRAP = _propgrid.PG_PROPERTY_VALIDATION_WRAP class NumericPropertyValidator(object): """Proxy of C++ NumericPropertyValidator class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr Signed = _propgrid.NumericPropertyValidator_Signed Unsigned = _propgrid.NumericPropertyValidator_Unsigned Float = _propgrid.NumericPropertyValidator_Float def __init__(self, *args, **kwargs): """__init__(self, int numericType, int base=10) -> NumericPropertyValidator""" _propgrid.NumericPropertyValidator_swiginit(self,_propgrid.new_NumericPropertyValidator(*args, **kwargs)) __swig_destroy__ = _propgrid.delete_NumericPropertyValidator __del__ = lambda self : None; def Validate(*args, **kwargs): """Validate(self, Window parent) -> bool""" return _propgrid.NumericPropertyValidator_Validate(*args, **kwargs) _propgrid.NumericPropertyValidator_swigregister(NumericPropertyValidator) class IntProperty(PGProperty): """Proxy of C++ IntProperty class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr __swig_destroy__ = _propgrid.delete_IntProperty __del__ = lambda self : None; def __init__(self, *args): """ __init__(self, String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), long value=0) -> IntProperty __init__(self, String label, String name, wxLongLong value) -> IntProperty """ _propgrid.IntProperty_swiginit(self,_propgrid.new_IntProperty(*args)) def GetClassValidator(*args, **kwargs): """GetClassValidator() -> Validator""" return _propgrid.IntProperty_GetClassValidator(*args, **kwargs) GetClassValidator = staticmethod(GetClassValidator) def DoValidation(*args, **kwargs): """ DoValidation(PGProperty property, wxLongLong_t value, PGValidationInfo pValidationInfo, int mode=PG_PROPERTY_VALIDATION_ERROR_MESSAGE) -> bool """ return _propgrid.IntProperty_DoValidation(*args, **kwargs) DoValidation = staticmethod(DoValidation) _propgrid.IntProperty_swigregister(IntProperty) def IntProperty_GetClassValidator(*args): """IntProperty_GetClassValidator() -> Validator""" return _propgrid.IntProperty_GetClassValidator(*args) def IntProperty_DoValidation(*args, **kwargs): """ IntProperty_DoValidation(PGProperty property, wxLongLong_t value, PGValidationInfo pValidationInfo, int mode=PG_PROPERTY_VALIDATION_ERROR_MESSAGE) -> bool """ return _propgrid.IntProperty_DoValidation(*args, **kwargs) class UIntProperty(PGProperty): """Proxy of C++ UIntProperty class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr __swig_destroy__ = _propgrid.delete_UIntProperty __del__ = lambda self : None; def __init__(self, *args): """ __init__(self, String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), long value=0) -> UIntProperty __init__(self, String label, String name, wxULongLong value) -> UIntProperty """ _propgrid.UIntProperty_swiginit(self,_propgrid.new_UIntProperty(*args)) _propgrid.UIntProperty_swigregister(UIntProperty) class FloatProperty(PGProperty): """Proxy of C++ FloatProperty class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """ __init__(self, String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), double value=0.0) -> FloatProperty """ _propgrid.FloatProperty_swiginit(self,_propgrid.new_FloatProperty(*args, **kwargs)) __swig_destroy__ = _propgrid.delete_FloatProperty __del__ = lambda self : None; def DoValidation(*args, **kwargs): """ DoValidation(PGProperty property, double value, PGValidationInfo pValidationInfo, int mode=PG_PROPERTY_VALIDATION_ERROR_MESSAGE) -> bool """ return _propgrid.FloatProperty_DoValidation(*args, **kwargs) DoValidation = staticmethod(DoValidation) def GetClassValidator(*args, **kwargs): """GetClassValidator() -> Validator""" return _propgrid.FloatProperty_GetClassValidator(*args, **kwargs) GetClassValidator = staticmethod(GetClassValidator) _propgrid.FloatProperty_swigregister(FloatProperty) def FloatProperty_DoValidation(*args, **kwargs): """ FloatProperty_DoValidation(PGProperty property, double value, PGValidationInfo pValidationInfo, int mode=PG_PROPERTY_VALIDATION_ERROR_MESSAGE) -> bool """ return _propgrid.FloatProperty_DoValidation(*args, **kwargs) def FloatProperty_GetClassValidator(*args): """FloatProperty_GetClassValidator() -> Validator""" return _propgrid.FloatProperty_GetClassValidator(*args) class EnumProperty(PGProperty): """Proxy of C++ EnumProperty class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """ __init__(self, String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), wxArrayString labels=wxArrayString(), wxArrayInt values=wxArrayInt(), int value=0) -> EnumProperty """ _propgrid.EnumProperty_swiginit(self,_propgrid.new_EnumProperty(*args, **kwargs)) __swig_destroy__ = _propgrid.delete_EnumProperty __del__ = lambda self : None; def GetItemCount(*args, **kwargs): """GetItemCount(self) -> size_t""" return _propgrid.EnumProperty_GetItemCount(*args, **kwargs) def GetIndexForValue(*args, **kwargs): """GetIndexForValue(self, int value) -> int""" return _propgrid.EnumProperty_GetIndexForValue(*args, **kwargs) _propgrid.EnumProperty_swigregister(EnumProperty) class EditEnumProperty(EnumProperty): """Proxy of C++ EditEnumProperty class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args): """ __init__(self, String label, String name, wxChar labels, long values, String value) -> EditEnumProperty __init__(self, String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), wxArrayString labels=wxArrayString(), wxArrayInt values=wxArrayInt(), String value=wxEmptyString) -> EditEnumProperty __init__(self, String label, String name, PGChoices choices, String value=wxEmptyString) -> EditEnumProperty __init__(self, String label, String name, wxChar labels, long values, PGChoices choicesCache, String value) -> EditEnumProperty """ _propgrid.EditEnumProperty_swiginit(self,_propgrid.new_EditEnumProperty(*args)) __swig_destroy__ = _propgrid.delete_EditEnumProperty __del__ = lambda self : None; _propgrid.EditEnumProperty_swigregister(EditEnumProperty) class FlagsProperty(PGProperty): """Proxy of C++ FlagsProperty class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """ __init__(self, String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), wxArrayString labels=wxArrayString(), wxArrayInt values=wxArrayInt(), int value=0) -> FlagsProperty """ _propgrid.FlagsProperty_swiginit(self,_propgrid.new_FlagsProperty(*args, **kwargs)) __swig_destroy__ = _propgrid.delete_FlagsProperty __del__ = lambda self : None; def GetItemCount(*args, **kwargs): """GetItemCount(self) -> size_t""" return _propgrid.FlagsProperty_GetItemCount(*args, **kwargs) def GetLabel(*args, **kwargs): """GetLabel(self, size_t ind) -> String""" return _propgrid.FlagsProperty_GetLabel(*args, **kwargs) _propgrid.FlagsProperty_swigregister(FlagsProperty) class PGFileDialogAdapter(PGEditorDialogAdapter): """Proxy of C++ PGFileDialogAdapter class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') def __init__(self): raise AttributeError, "No constructor defined" __repr__ = _swig_repr _propgrid.PGFileDialogAdapter_swigregister(PGFileDialogAdapter) class FileProperty(PGProperty): """Proxy of C++ FileProperty class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """ __init__(self, String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), String value=wxEmptyString) -> FileProperty """ _propgrid.FileProperty_swiginit(self,_propgrid.new_FileProperty(*args, **kwargs)) __swig_destroy__ = _propgrid.delete_FileProperty __del__ = lambda self : None; def GetClassValidator(*args, **kwargs): """GetClassValidator() -> Validator""" return _propgrid.FileProperty_GetClassValidator(*args, **kwargs) GetClassValidator = staticmethod(GetClassValidator) def GetFileName(*args, **kwargs): """GetFileName(self) -> wxFileName""" return _propgrid.FileProperty_GetFileName(*args, **kwargs) _propgrid.FileProperty_swigregister(FileProperty) def FileProperty_GetClassValidator(*args): """FileProperty_GetClassValidator() -> Validator""" return _propgrid.FileProperty_GetClassValidator(*args) class PGLongStringDialogAdapter(PGEditorDialogAdapter): """Proxy of C++ PGLongStringDialogAdapter class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') def __init__(self): raise AttributeError, "No constructor defined" __repr__ = _swig_repr _propgrid.PGLongStringDialogAdapter_swigregister(PGLongStringDialogAdapter) class LongStringProperty(PGProperty): """Proxy of C++ LongStringProperty class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """ __init__(self, String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), String value=wxEmptyString) -> LongStringProperty """ _propgrid.LongStringProperty_swiginit(self,_propgrid.new_LongStringProperty(*args, **kwargs)) __swig_destroy__ = _propgrid.delete_LongStringProperty __del__ = lambda self : None; def OnButtonClick(*args, **kwargs): """OnButtonClick(self, PropertyGrid propgrid, String value) -> bool""" return _propgrid.LongStringProperty_OnButtonClick(*args, **kwargs) def DisplayEditorDialog(*args, **kwargs): """DisplayEditorDialog(PGProperty prop, PropertyGrid propGrid, String value) -> bool""" return _propgrid.LongStringProperty_DisplayEditorDialog(*args, **kwargs) DisplayEditorDialog = staticmethod(DisplayEditorDialog) _propgrid.LongStringProperty_swigregister(LongStringProperty) def LongStringProperty_DisplayEditorDialog(*args, **kwargs): """LongStringProperty_DisplayEditorDialog(PGProperty prop, PropertyGrid propGrid, String value) -> bool""" return _propgrid.LongStringProperty_DisplayEditorDialog(*args, **kwargs) class ArrayStringProperty(PGProperty): """Proxy of C++ ArrayStringProperty class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """ __init__(self, String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), wxArrayString value=wxArrayString()) -> ArrayStringProperty """ _propgrid.ArrayStringProperty_swiginit(self,_propgrid.new_ArrayStringProperty(*args, **kwargs)) __swig_destroy__ = _propgrid.delete_ArrayStringProperty __del__ = lambda self : None; def ConvertArrayToString(*args, **kwargs): """ConvertArrayToString(self, wxArrayString arr, String pString, wxUniChar delimiter)""" return _propgrid.ArrayStringProperty_ConvertArrayToString(*args, **kwargs) def OnCustomStringEdit(*args, **kwargs): """OnCustomStringEdit(self, Window parent, String value) -> bool""" return _propgrid.ArrayStringProperty_OnCustomStringEdit(*args, **kwargs) def OnButtonClick(*args, **kwargs): """OnButtonClick(self, PropertyGrid propgrid, Window primary, wxChar cbt) -> bool""" return _propgrid.ArrayStringProperty_OnButtonClick(*args, **kwargs) Escape = _propgrid.ArrayStringProperty_Escape QuoteStrings = _propgrid.ArrayStringProperty_QuoteStrings def ArrayStringToString(*args, **kwargs): """ ArrayStringToString(String dst, wxArrayString src, wxUniChar delimiter, int flags) """ return _propgrid.ArrayStringProperty_ArrayStringToString(*args, **kwargs) ArrayStringToString = staticmethod(ArrayStringToString) _propgrid.ArrayStringProperty_swigregister(ArrayStringProperty) def ArrayStringProperty_ArrayStringToString(*args, **kwargs): """ ArrayStringProperty_ArrayStringToString(String dst, wxArrayString src, wxUniChar delimiter, int flags) """ return _propgrid.ArrayStringProperty_ArrayStringToString(*args, **kwargs) class PGArrayEditorDialog(_windows.Dialog): """Proxy of C++ PGArrayEditorDialog class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') def __init__(self): raise AttributeError, "No constructor defined" __repr__ = _swig_repr __swig_destroy__ = _propgrid.delete_PGArrayEditorDialog __del__ = lambda self : None; def Init(*args, **kwargs): """Init(self)""" return _propgrid.PGArrayEditorDialog_Init(*args, **kwargs) def Create(*args, **kwargs): """ Create(self, Window parent, String message, String caption, long style=(wxDEFAULT_DIALOG_STYLE|wxRESIZE_BORDER|wxOK|wxCANCEL|wxCENTRE), Point pos=DefaultPosition, Size sz=DefaultSize) -> bool """ return _propgrid.PGArrayEditorDialog_Create(*args, **kwargs) def EnableCustomNewAction(*args, **kwargs): """EnableCustomNewAction(self)""" return _propgrid.PGArrayEditorDialog_EnableCustomNewAction(*args, **kwargs) def SetDialogValue(*args, **kwargs): """SetDialogValue(self, wxVariant value)""" return _propgrid.PGArrayEditorDialog_SetDialogValue(*args, **kwargs) def GetDialogValue(*args, **kwargs): """GetDialogValue(self) -> wxVariant""" return _propgrid.PGArrayEditorDialog_GetDialogValue(*args, **kwargs) def GetTextCtrlValidator(*args, **kwargs): """GetTextCtrlValidator(self) -> Validator""" return _propgrid.PGArrayEditorDialog_GetTextCtrlValidator(*args, **kwargs) def IsModified(*args, **kwargs): """IsModified(self) -> bool""" return _propgrid.PGArrayEditorDialog_IsModified(*args, **kwargs) def GetSelection(*args, **kwargs): """GetSelection(self) -> int""" return _propgrid.PGArrayEditorDialog_GetSelection(*args, **kwargs) def OnAddClick(*args, **kwargs): """OnAddClick(self, CommandEvent event)""" return _propgrid.PGArrayEditorDialog_OnAddClick(*args, **kwargs) def OnDeleteClick(*args, **kwargs): """OnDeleteClick(self, CommandEvent event)""" return _propgrid.PGArrayEditorDialog_OnDeleteClick(*args, **kwargs) def OnUpClick(*args, **kwargs): """OnUpClick(self, CommandEvent event)""" return _propgrid.PGArrayEditorDialog_OnUpClick(*args, **kwargs) def OnDownClick(*args, **kwargs): """OnDownClick(self, CommandEvent event)""" return _propgrid.PGArrayEditorDialog_OnDownClick(*args, **kwargs) def OnEndLabelEdit(*args, **kwargs): """OnEndLabelEdit(self, ListEvent event)""" return _propgrid.PGArrayEditorDialog_OnEndLabelEdit(*args, **kwargs) def OnIdle(*args, **kwargs): """OnIdle(self, IdleEvent event)""" return _propgrid.PGArrayEditorDialog_OnIdle(*args, **kwargs) _propgrid.PGArrayEditorDialog_swigregister(PGArrayEditorDialog) class PGArrayStringEditorDialog(PGArrayEditorDialog): """Proxy of C++ PGArrayStringEditorDialog class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """__init__(self) -> PGArrayStringEditorDialog""" _propgrid.PGArrayStringEditorDialog_swiginit(self,_propgrid.new_PGArrayStringEditorDialog(*args, **kwargs)) __swig_destroy__ = _propgrid.delete_PGArrayStringEditorDialog __del__ = lambda self : None; def Init(*args, **kwargs): """Init(self)""" return _propgrid.PGArrayStringEditorDialog_Init(*args, **kwargs) def SetCustomButton(*args, **kwargs): """SetCustomButton(self, String custBtText, ArrayStringProperty pcc)""" return _propgrid.PGArrayStringEditorDialog_SetCustomButton(*args, **kwargs) def OnCustomNewAction(*args, **kwargs): """OnCustomNewAction(self, String resString) -> bool""" return _propgrid.PGArrayStringEditorDialog_OnCustomNewAction(*args, **kwargs) _propgrid.PGArrayStringEditorDialog_swigregister(PGArrayStringEditorDialog) PG_COLOUR_WEB_BASE = _propgrid.PG_COLOUR_WEB_BASE PG_COLOUR_CUSTOM = _propgrid.PG_COLOUR_CUSTOM PG_COLOUR_UNSPECIFIED = _propgrid.PG_COLOUR_UNSPECIFIED class ColourPropertyValue(_core.Object): """Proxy of C++ ColourPropertyValue class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr m_type = property(_propgrid.ColourPropertyValue_m_type_get, _propgrid.ColourPropertyValue_m_type_set) m_colour = property(_propgrid.ColourPropertyValue_m_colour_get, _propgrid.ColourPropertyValue_m_colour_set) __swig_destroy__ = _propgrid.delete_ColourPropertyValue __del__ = lambda self : None; def Init(*args, **kwargs): """Init(self, int type, Colour colour)""" return _propgrid.ColourPropertyValue_Init(*args, **kwargs) def __init__(self, *args): """ __init__(self) -> ColourPropertyValue __init__(self, ColourPropertyValue v) -> ColourPropertyValue __init__(self, Colour colour) -> ColourPropertyValue __init__(self, int type) -> ColourPropertyValue __init__(self, int type, Colour colour) -> ColourPropertyValue """ _propgrid.ColourPropertyValue_swiginit(self,_propgrid.new_ColourPropertyValue(*args)) _propgrid.ColourPropertyValue_swigregister(ColourPropertyValue) class FontProperty(PGProperty): """Proxy of C++ FontProperty class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """ __init__(self, String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), Font value=wxFont()) -> FontProperty """ _propgrid.FontProperty_swiginit(self,_propgrid.new_FontProperty(*args, **kwargs)) __swig_destroy__ = _propgrid.delete_FontProperty __del__ = lambda self : None; _propgrid.FontProperty_swigregister(FontProperty) class SystemColourProperty(EnumProperty): """Proxy of C++ SystemColourProperty class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """ __init__(self, String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), ColourPropertyValue value=wxColourPropertyValue()) -> SystemColourProperty """ _propgrid.SystemColourProperty_swiginit(self,_propgrid.new_SystemColourProperty(*args, **kwargs)) __swig_destroy__ = _propgrid.delete_SystemColourProperty __del__ = lambda self : None; def ColourToString(*args, **kwargs): """ColourToString(self, Colour col, int index, int argFlags=0) -> String""" return _propgrid.SystemColourProperty_ColourToString(*args, **kwargs) def GetCustomColourIndex(*args, **kwargs): """GetCustomColourIndex(self) -> int""" return _propgrid.SystemColourProperty_GetCustomColourIndex(*args, **kwargs) def QueryColourFromUser(*args, **kwargs): """QueryColourFromUser(self, wxVariant variant) -> bool""" return _propgrid.SystemColourProperty_QueryColourFromUser(*args, **kwargs) def GetColour(*args, **kwargs): """GetColour(self, int index) -> Colour""" return _propgrid.SystemColourProperty_GetColour(*args, **kwargs) def GetVal(*args, **kwargs): """GetVal(self, wxVariant pVariant=None) -> ColourPropertyValue""" return _propgrid.SystemColourProperty_GetVal(*args, **kwargs) _propgrid.SystemColourProperty_swigregister(SystemColourProperty) class ColourProperty(SystemColourProperty): """Proxy of C++ ColourProperty class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """ __init__(self, String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), Colour value=*wxWHITE) -> ColourProperty """ _propgrid.ColourProperty_swiginit(self,_propgrid.new_ColourProperty(*args, **kwargs)) __swig_destroy__ = _propgrid.delete_ColourProperty __del__ = lambda self : None; _propgrid.ColourProperty_swigregister(ColourProperty) class PropertyGridPage(_core.EvtHandler,PropertyGridInterface,): """Proxy of C++ PropertyGridPage class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """__init__(self) -> PropertyGridPage""" _propgrid.PropertyGridPage_swiginit(self,_propgrid.new_PropertyGridPage(*args, **kwargs)) __swig_destroy__ = _propgrid.delete_PropertyGridPage __del__ = lambda self : None; def FitColumns(*args, **kwargs): """FitColumns(self) -> Size""" return _propgrid.PropertyGridPage_FitColumns(*args, **kwargs) def GetIndex(*args, **kwargs): """GetIndex(self) -> int""" return _propgrid.PropertyGridPage_GetIndex(*args, **kwargs) def GetSplitterPosition(*args, **kwargs): """GetSplitterPosition(self, int col=0) -> int""" return _propgrid.PropertyGridPage_GetSplitterPosition(*args, **kwargs) def GetRoot(*args, **kwargs): """GetRoot(self) -> PGProperty""" return _propgrid.PropertyGridPage_GetRoot(*args, **kwargs) def GetStatePtr(*args): """ GetStatePtr(self) GetStatePtr(self) """ return _propgrid.PropertyGridPage_GetStatePtr(*args) def GetToolId(*args, **kwargs): """GetToolId(self) -> int""" return _propgrid.PropertyGridPage_GetToolId(*args, **kwargs) def Init(*args, **kwargs): """Init(self)""" return _propgrid.PropertyGridPage_Init(*args, **kwargs) def IsHandlingAllEvents(*args, **kwargs): """IsHandlingAllEvents(self) -> bool""" return _propgrid.PropertyGridPage_IsHandlingAllEvents(*args, **kwargs) def OnShow(*args, **kwargs): """OnShow(self)""" return _propgrid.PropertyGridPage_OnShow(*args, **kwargs) def SetSplitterPosition(*args, **kwargs): """SetSplitterPosition(self, int splitterPos, int col=0)""" return _propgrid.PropertyGridPage_SetSplitterPosition(*args, **kwargs) _propgrid.PropertyGridPage_swigregister(PropertyGridPage) class PropertyGridManager(_windows.Panel,PropertyGridInterface): """Proxy of C++ PropertyGridManager class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """ __init__(self, Window parent, int id=ID_ANY, Point pos=DefaultPosition, Size size=DefaultSize, long style=(0), String name=wxPropertyGridManagerNameStr) -> PropertyGridManager """ _propgrid.PropertyGridManager_swiginit(self,_propgrid.new_PropertyGridManager(*args, **kwargs)) self._setOORInfo(self) self.DoDefaultTypeMappings() self.edited_objects = {} self.DoDefaultValueTypeMappings() if not hasattr(self.__class__,'_vt2setter'): self.__class__._vt2setter = {} __swig_destroy__ = _propgrid.delete_PropertyGridManager __del__ = lambda self : None; def AddPage(*args, **kwargs): """ AddPage(self, String label=wxEmptyString, Bitmap bmp=wxNullBitmap, PropertyGridPage pageObj=None) -> PropertyGridPage """ return _propgrid.PropertyGridManager_AddPage(*args, **kwargs) def ClearPage(*args, **kwargs): """ClearPage(self, int page)""" return _propgrid.PropertyGridManager_ClearPage(*args, **kwargs) def CommitChangesFromEditor(*args, **kwargs): """CommitChangesFromEditor(self, int flags=0) -> bool""" return _propgrid.PropertyGridManager_CommitChangesFromEditor(*args, **kwargs) def Create(*args, **kwargs): """ Create(self, Window parent, int id=ID_ANY, Point pos=DefaultPosition, Size size=DefaultSize, long style=(0), String name=wxPropertyGridManagerNameStr) -> bool """ return _propgrid.PropertyGridManager_Create(*args, **kwargs) def EnableCategories(*args, **kwargs): """EnableCategories(self, bool enable) -> bool""" return _propgrid.PropertyGridManager_EnableCategories(*args, **kwargs) def EnsureVisible(*args, **kwargs): """EnsureVisible(self, PGPropArg id) -> bool""" return _propgrid.PropertyGridManager_EnsureVisible(*args, **kwargs) def GetColumnCount(*args, **kwargs): """GetColumnCount(self, int page=-1) -> int""" return _propgrid.PropertyGridManager_GetColumnCount(*args, **kwargs) def GetDescBoxHeight(*args, **kwargs): """GetDescBoxHeight(self) -> int""" return _propgrid.PropertyGridManager_GetDescBoxHeight(*args, **kwargs) def GetGrid(*args): """ GetGrid(self) -> PropertyGrid GetGrid(self) -> PropertyGrid """ return _propgrid.PropertyGridManager_GetGrid(*args) def GetIterator(*args): """ GetIterator(self, int flags=PG_ITERATE_DEFAULT, PGProperty firstProp=None) -> PropertyGridIterator GetIterator(self, int flags=PG_ITERATE_DEFAULT, PGProperty firstProp=None) -> PropertyGridConstIterator GetIterator(self, int flags, int startPos) -> PropertyGridIterator GetIterator(self, int flags, int startPos) -> PropertyGridConstIterator """ return _propgrid.PropertyGridManager_GetIterator(*args) def GetCurrentPage(*args, **kwargs): """GetCurrentPage(self) -> PropertyGridPage""" return _propgrid.PropertyGridManager_GetCurrentPage(*args, **kwargs) def GetPage(*args): """ GetPage(self, int ind) -> PropertyGridPage GetPage(self, String name) -> PropertyGridPage """ return _propgrid.PropertyGridManager_GetPage(*args) def GetPageByName(*args, **kwargs): """GetPageByName(self, String name) -> int""" return _propgrid.PropertyGridManager_GetPageByName(*args, **kwargs) def GetPageByState(*args, **kwargs): """GetPageByState(self, pstate) -> int""" return _propgrid.PropertyGridManager_GetPageByState(*args, **kwargs) def GetPageCount(*args, **kwargs): """GetPageCount(self) -> size_t""" return _propgrid.PropertyGridManager_GetPageCount(*args, **kwargs) def GetPageName(*args, **kwargs): """GetPageName(self, int index) -> String""" return _propgrid.PropertyGridManager_GetPageName(*args, **kwargs) def GetPageRoot(*args, **kwargs): """GetPageRoot(self, int index) -> PGProperty""" return _propgrid.PropertyGridManager_GetPageRoot(*args, **kwargs) def GetSelectedPage(*args, **kwargs): """GetSelectedPage(self) -> int""" return _propgrid.PropertyGridManager_GetSelectedPage(*args, **kwargs) def GetSelectedProperty(*args, **kwargs): """GetSelectedProperty(self) -> PGProperty""" return _propgrid.PropertyGridManager_GetSelectedProperty(*args, **kwargs) def GetSelection(*args, **kwargs): """GetSelection(self) -> PGProperty""" return _propgrid.PropertyGridManager_GetSelection(*args, **kwargs) def GetToolBar(*args, **kwargs): """GetToolBar(self) -> wxToolBar""" return _propgrid.PropertyGridManager_GetToolBar(*args, **kwargs) def InsertPage(*args, **kwargs): """InsertPage(self, int index, String label, Bitmap bmp=wxNullBitmap, PropertyGridPage pageObj=None) -> PropertyGridPage""" return _propgrid.PropertyGridManager_InsertPage(*args, **kwargs) def IsAnyModified(*args, **kwargs): """IsAnyModified(self) -> bool""" return _propgrid.PropertyGridManager_IsAnyModified(*args, **kwargs) def IsFrozen(*args, **kwargs): """ IsFrozen(self) -> bool Returns ``True`` if the window has been frozen and not thawed yet. :see: `Freeze` and `Thaw` """ return _propgrid.PropertyGridManager_IsFrozen(*args, **kwargs) def IsPageModified(*args, **kwargs): """IsPageModified(self, size_t index) -> bool""" return _propgrid.PropertyGridManager_IsPageModified(*args, **kwargs) def IsPropertySelected(*args, **kwargs): """IsPropertySelected(self, PGPropArg id) -> bool""" return _propgrid.PropertyGridManager_IsPropertySelected(*args, **kwargs) def RemovePage(*args, **kwargs): """RemovePage(self, int page) -> bool""" return _propgrid.PropertyGridManager_RemovePage(*args, **kwargs) def SelectPage(*args): """ SelectPage(self, int index) SelectPage(self, String label) SelectPage(self, PropertyGridPage ptr) """ return _propgrid.PropertyGridManager_SelectPage(*args) def SelectProperty(*args, **kwargs): """SelectProperty(self, PGPropArg id, bool focus=False) -> bool""" return _propgrid.PropertyGridManager_SelectProperty(*args, **kwargs) def SetColumnTitle(*args, **kwargs): """SetColumnTitle(self, int idx, String title)""" return _propgrid.PropertyGridManager_SetColumnTitle(*args, **kwargs) def SetColumnCount(*args, **kwargs): """SetColumnCount(self, int colCount, int page=-1)""" return _propgrid.PropertyGridManager_SetColumnCount(*args, **kwargs) def SetDescription(*args, **kwargs): """SetDescription(self, String label, String content)""" return _propgrid.PropertyGridManager_SetDescription(*args, **kwargs) def SetDescBoxHeight(*args, **kwargs): """SetDescBoxHeight(self, int ht, bool refresh=True)""" return _propgrid.PropertyGridManager_SetDescBoxHeight(*args, **kwargs) def SetSplitterLeft(*args, **kwargs): """SetSplitterLeft(self, bool subProps=False, bool allPages=True)""" return _propgrid.PropertyGridManager_SetSplitterLeft(*args, **kwargs) def SetPageSplitterLeft(*args, **kwargs): """SetPageSplitterLeft(self, int page, bool subProps=False)""" return _propgrid.PropertyGridManager_SetPageSplitterLeft(*args, **kwargs) def SetPageSplitterPosition(*args, **kwargs): """SetPageSplitterPosition(self, int page, int pos, int column=0)""" return _propgrid.PropertyGridManager_SetPageSplitterPosition(*args, **kwargs) def SetSplitterPosition(*args, **kwargs): """SetSplitterPosition(self, int pos, int column=0)""" return _propgrid.PropertyGridManager_SetSplitterPosition(*args, **kwargs) def SetId(*args, **kwargs): """ SetId(self, int winid) Sets the identifier of the window. Each window has an integer identifier. If the application has not provided one, an identifier will be generated. Normally, the identifier should be provided on creation and should not be modified subsequently. """ return _propgrid.PropertyGridManager_SetId(*args, **kwargs) def Freeze(*args, **kwargs): """ Freeze(self) Freezes the window or, in other words, prevents any updates from taking place on screen, the window is not redrawn at all. Thaw must be called to reenable window redrawing. Calls to Freeze/Thaw may be nested, with the actual Thaw being delayed until all the nesting has been undone. This method is useful for visual appearance optimization (for example, it is a good idea to use it before inserting large amount of text into a wxTextCtrl under wxGTK) but is not implemented on all platforms nor for all controls so it is mostly just a hint to wxWindows and not a mandatory directive. """ return _propgrid.PropertyGridManager_Freeze(*args, **kwargs) def Thaw(*args, **kwargs): """ Thaw(self) Reenables window updating after a previous call to Freeze. Calls to Freeze/Thaw may be nested, so Thaw must be called the same number of times that Freeze was before the window will be updated. """ return _propgrid.PropertyGridManager_Thaw(*args, **kwargs) def Reparent(*args, **kwargs): """Reparent(self, wxWindowBase newParent) -> bool""" return _propgrid.PropertyGridManager_Reparent(*args, **kwargs) def GetValuesFromPage(self, page, dict_=None, as_strings=False, inc_attributes=False): "Same as GetValues, but returns values from specific page only." "For argument descriptions, see GetValues." return page.GetPropertyValues(dict_, as_strings, inc_attributes) _propgrid.PropertyGridManager_swigregister(PropertyGridManager) def NewPropertyCategory(*args, **kwargs): """NewPropertyCategory(String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL)) -> PGProperty""" return _propgrid.NewPropertyCategory(*args, **kwargs) def NewStringProperty(*args, **kwargs): """ NewStringProperty(String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), String value=wxEmptyString) -> PGProperty """ return _propgrid.NewStringProperty(*args, **kwargs) def NewUIntProperty(*args, **kwargs): """ NewUIntProperty(String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), long value=0) -> PGProperty """ return _propgrid.NewUIntProperty(*args, **kwargs) def NewIntProperty(*args, **kwargs): """ NewIntProperty(String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), long value=0) -> PGProperty """ return _propgrid.NewIntProperty(*args, **kwargs) def NewFloatProperty(*args, **kwargs): """ NewFloatProperty(String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), double value=0.0) -> PGProperty """ return _propgrid.NewFloatProperty(*args, **kwargs) def NewBoolProperty(*args, **kwargs): """ NewBoolProperty(String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), bool value=False) -> PGProperty """ return _propgrid.NewBoolProperty(*args, **kwargs) def NewEnumProperty(*args, **kwargs): """ NewEnumProperty(String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), wxArrayString labels=wxArrayString(), wxArrayInt values=wxArrayInt(), int value=0) -> PGProperty """ return _propgrid.NewEnumProperty(*args, **kwargs) def NewEditEnumProperty(*args, **kwargs): """ NewEditEnumProperty(String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), wxArrayString labels=wxArrayString(), wxArrayInt values=wxArrayInt(), String value=wxEmptyString) -> PGProperty """ return _propgrid.NewEditEnumProperty(*args, **kwargs) def NewFlagsProperty(*args, **kwargs): """ NewFlagsProperty(String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), wxArrayString labels=wxArrayString(), wxArrayInt values=wxArrayInt(), int value=0) -> PGProperty """ return _propgrid.NewFlagsProperty(*args, **kwargs) def NewLongStringProperty(*args, **kwargs): """ NewLongStringProperty(String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), String value=wxEmptyString) -> PGProperty """ return _propgrid.NewLongStringProperty(*args, **kwargs) def NewFileProperty(*args, **kwargs): """ NewFileProperty(String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), String value=wxEmptyString) -> PGProperty """ return _propgrid.NewFileProperty(*args, **kwargs) def NewDirProperty(*args, **kwargs): """ NewDirProperty(String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), String value=wxEmptyString) -> PGProperty """ return _propgrid.NewDirProperty(*args, **kwargs) def NewArrayStringProperty(*args, **kwargs): """ NewArrayStringProperty(String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), wxArrayString value=wxArrayString()) -> PGProperty """ return _propgrid.NewArrayStringProperty(*args, **kwargs) def NewFontProperty(*args, **kwargs): """ NewFontProperty(String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), Font value=wxFont()) -> PGProperty """ return _propgrid.NewFontProperty(*args, **kwargs) def NewSystemColourProperty(*args, **kwargs): """ NewSystemColourProperty(String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), ColourPropertyValue value=wxColourPropertyValue()) -> PGProperty """ return _propgrid.NewSystemColourProperty(*args, **kwargs) def NewColourProperty(*args, **kwargs): """ NewColourProperty(String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), Colour value=wxColour()) -> PGProperty """ return _propgrid.NewColourProperty(*args, **kwargs) def NewCursorProperty(*args, **kwargs): """ NewCursorProperty(String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), int value=0) -> PGProperty """ return _propgrid.NewCursorProperty(*args, **kwargs) def NewImageFileProperty(*args, **kwargs): """ NewImageFileProperty(String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), String value=wxEmptyString) -> PGProperty """ return _propgrid.NewImageFileProperty(*args, **kwargs) def NewMultiChoiceProperty(*args, **kwargs): """ NewMultiChoiceProperty(String label, String name=(*wxPGProperty::sm_wxPG_LABEL), wxArrayString choices=wxArrayString(), wxArrayString value=wxArrayString()) -> PGProperty """ return _propgrid.NewMultiChoiceProperty(*args, **kwargs) def NewDateProperty(*args, **kwargs): """ NewDateProperty(String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), DateTime value=wxDateTime()) -> PGProperty """ return _propgrid.NewDateProperty(*args, **kwargs) class PyFloatProperty(FloatProperty): """Proxy of C++ PyFloatProperty class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """ __init__(self, String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), double value=0.0) -> PyFloatProperty """ _propgrid.PyFloatProperty_swiginit(self,_propgrid.new_PyFloatProperty(*args, **kwargs)) self._SetSelf(self); self._RegisterMethods() def CallSuperMethod(self, *args, **kwargs): funcname = args[0] args2 = list(args) args2[0] = self self._super_call = True try: res = getattr(PyFloatProperty, funcname)(*args2, **kwargs) finally: del self._super_call return res def _RegisterMethods(self): cls = self.__class__ if not hasattr(cls,'_pyswig_methods_registered'): cls._pyswig_methods_registered = True ls = [ab for ab in cls.__dict__.iteritems()] for a, b in ls: if not a.startswith('_'): setattr(cls, '%s_t_'%a, b) def _SetSelf(*args, **kwargs): """_SetSelf(self, PyObject self)""" return _propgrid.PyFloatProperty__SetSelf(*args, **kwargs) _propgrid.PyFloatProperty_swigregister(PyFloatProperty) class PyEditorDialogAdapter(PGEditorDialogAdapter): """Proxy of C++ PyEditorDialogAdapter class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """__init__(self) -> PyEditorDialogAdapter""" _propgrid.PyEditorDialogAdapter_swiginit(self,_propgrid.new_PyEditorDialogAdapter(*args, **kwargs)) self._SetSelf(self); self._RegisterMethods() def CallSuperMethod(self, *args, **kwargs): funcname = args[0] args2 = list(args) args2[0] = self self._super_call = True try: res = getattr(PyEditorDialogAdapter, funcname)(*args2, **kwargs) finally: del self._super_call return res def _RegisterMethods(self): cls = self.__class__ if not hasattr(cls,'_pyswig_methods_registered'): cls._pyswig_methods_registered = True ls = [ab for ab in cls.__dict__.iteritems()] for a, b in ls: if not a.startswith('_'): setattr(cls, '%s_t_'%a, b) def _SetSelf(*args, **kwargs): """_SetSelf(self, PyObject self)""" return _propgrid.PyEditorDialogAdapter__SetSelf(*args, **kwargs) _propgrid.PyEditorDialogAdapter_swigregister(PyEditorDialogAdapter) class PyEnumProperty(EnumProperty): """Proxy of C++ PyEnumProperty class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """ __init__(self, String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), wxArrayString labels=wxArrayString(), wxArrayInt values=wxArrayInt(), int value=0) -> PyEnumProperty """ _propgrid.PyEnumProperty_swiginit(self,_propgrid.new_PyEnumProperty(*args, **kwargs)) self._SetSelf(self); self._RegisterMethods() def CallSuperMethod(self, *args, **kwargs): funcname = args[0] args2 = list(args) args2[0] = self self._super_call = True try: res = getattr(PyEnumProperty, funcname)(*args2, **kwargs) finally: del self._super_call return res def _RegisterMethods(self): cls = self.__class__ if not hasattr(cls,'_pyswig_methods_registered'): cls._pyswig_methods_registered = True ls = [ab for ab in cls.__dict__.iteritems()] for a, b in ls: if not a.startswith('_'): setattr(cls, '%s_t_'%a, b) def _SetSelf(*args, **kwargs): """_SetSelf(self, PyObject self)""" return _propgrid.PyEnumProperty__SetSelf(*args, **kwargs) _propgrid.PyEnumProperty_swigregister(PyEnumProperty) class PyArrayStringProperty(ArrayStringProperty): """Proxy of C++ PyArrayStringProperty class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """ __init__(self, String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), wxArrayString value=wxArrayString()) -> PyArrayStringProperty """ _propgrid.PyArrayStringProperty_swiginit(self,_propgrid.new_PyArrayStringProperty(*args, **kwargs)) self._SetSelf(self); self._RegisterMethods() def CallSuperMethod(self, *args, **kwargs): funcname = args[0] args2 = list(args) args2[0] = self self._super_call = True try: res = getattr(PyArrayStringProperty, funcname)(*args2, **kwargs) finally: del self._super_call return res def _RegisterMethods(self): cls = self.__class__ if not hasattr(cls,'_pyswig_methods_registered'): cls._pyswig_methods_registered = True ls = [ab for ab in cls.__dict__.iteritems()] for a, b in ls: if not a.startswith('_'): setattr(cls, '%s_t_'%a, b) def _SetSelf(*args, **kwargs): """_SetSelf(self, PyObject self)""" return _propgrid.PyArrayStringProperty__SetSelf(*args, **kwargs) _propgrid.PyArrayStringProperty_swigregister(PyArrayStringProperty) class PyComboBoxEditor(PGComboBoxEditor): """Proxy of C++ PyComboBoxEditor class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """__init__(self) -> PyComboBoxEditor""" _propgrid.PyComboBoxEditor_swiginit(self,_propgrid.new_PyComboBoxEditor(*args, **kwargs)) self._SetSelf(self); self._RegisterMethods() def CallSuperMethod(self, *args, **kwargs): funcname = args[0] args2 = list(args) args2[0] = self self._super_call = True try: res = getattr(PyComboBoxEditor, funcname)(*args2, **kwargs) finally: del self._super_call return res def _RegisterMethods(self): cls = self.__class__ if not hasattr(cls,'_pyswig_methods_registered'): cls._pyswig_methods_registered = True ls = [ab for ab in cls.__dict__.iteritems()] for a, b in ls: if not a.startswith('_'): setattr(cls, '%s_t_'%a, b) def _SetSelf(*args, **kwargs): """_SetSelf(self, PyObject self)""" return _propgrid.PyComboBoxEditor__SetSelf(*args, **kwargs) _propgrid.PyComboBoxEditor_swigregister(PyComboBoxEditor) class PyFileDialogAdapter(PGFileDialogAdapter): """Proxy of C++ PyFileDialogAdapter class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """__init__(self) -> PyFileDialogAdapter""" _propgrid.PyFileDialogAdapter_swiginit(self,_propgrid.new_PyFileDialogAdapter(*args, **kwargs)) self._SetSelf(self); self._RegisterMethods() def CallSuperMethod(self, *args, **kwargs): funcname = args[0] args2 = list(args) args2[0] = self self._super_call = True try: res = getattr(PyFileDialogAdapter, funcname)(*args2, **kwargs) finally: del self._super_call return res def _RegisterMethods(self): cls = self.__class__ if not hasattr(cls,'_pyswig_methods_registered'): cls._pyswig_methods_registered = True ls = [ab for ab in cls.__dict__.iteritems()] for a, b in ls: if not a.startswith('_'): setattr(cls, '%s_t_'%a, b) def _SetSelf(*args, **kwargs): """_SetSelf(self, PyObject self)""" return _propgrid.PyFileDialogAdapter__SetSelf(*args, **kwargs) _propgrid.PyFileDialogAdapter_swigregister(PyFileDialogAdapter) class PyStringProperty(StringProperty): """Proxy of C++ PyStringProperty class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """ __init__(self, String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), String value=wxEmptyString) -> PyStringProperty """ _propgrid.PyStringProperty_swiginit(self,_propgrid.new_PyStringProperty(*args, **kwargs)) self._SetSelf(self); self._RegisterMethods() def CallSuperMethod(self, *args, **kwargs): funcname = args[0] args2 = list(args) args2[0] = self self._super_call = True try: res = getattr(PyStringProperty, funcname)(*args2, **kwargs) finally: del self._super_call return res def _RegisterMethods(self): cls = self.__class__ if not hasattr(cls,'_pyswig_methods_registered'): cls._pyswig_methods_registered = True ls = [ab for ab in cls.__dict__.iteritems()] for a, b in ls: if not a.startswith('_'): setattr(cls, '%s_t_'%a, b) def _SetSelf(*args, **kwargs): """_SetSelf(self, PyObject self)""" return _propgrid.PyStringProperty__SetSelf(*args, **kwargs) _propgrid.PyStringProperty_swigregister(PyStringProperty) class PyLongStringDialogAdapter(PGLongStringDialogAdapter): """Proxy of C++ PyLongStringDialogAdapter class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """__init__(self) -> PyLongStringDialogAdapter""" _propgrid.PyLongStringDialogAdapter_swiginit(self,_propgrid.new_PyLongStringDialogAdapter(*args, **kwargs)) self._SetSelf(self); self._RegisterMethods() def CallSuperMethod(self, *args, **kwargs): funcname = args[0] args2 = list(args) args2[0] = self self._super_call = True try: res = getattr(PyLongStringDialogAdapter, funcname)(*args2, **kwargs) finally: del self._super_call return res def _RegisterMethods(self): cls = self.__class__ if not hasattr(cls,'_pyswig_methods_registered'): cls._pyswig_methods_registered = True ls = [ab for ab in cls.__dict__.iteritems()] for a, b in ls: if not a.startswith('_'): setattr(cls, '%s_t_'%a, b) def _SetSelf(*args, **kwargs): """_SetSelf(self, PyObject self)""" return _propgrid.PyLongStringDialogAdapter__SetSelf(*args, **kwargs) _propgrid.PyLongStringDialogAdapter_swigregister(PyLongStringDialogAdapter) class PyEditEnumProperty(EditEnumProperty): """Proxy of C++ PyEditEnumProperty class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args): """ __init__(self, String label, String name, wxChar labels, long values, String value) -> PyEditEnumProperty __init__(self, String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), wxArrayString labels=wxArrayString(), wxArrayInt values=wxArrayInt(), String value=wxEmptyString) -> PyEditEnumProperty __init__(self, String label, String name, PGChoices choices, String value=wxEmptyString) -> PyEditEnumProperty __init__(self, String label, String name, wxChar labels, long values, PGChoices choicesCache, String value) -> PyEditEnumProperty """ _propgrid.PyEditEnumProperty_swiginit(self,_propgrid.new_PyEditEnumProperty(*args)) self._SetSelf(self); self._RegisterMethods() def CallSuperMethod(self, *args, **kwargs): funcname = args[0] args2 = list(args) args2[0] = self self._super_call = True try: res = getattr(PyEditEnumProperty, funcname)(*args2, **kwargs) finally: del self._super_call return res def _RegisterMethods(self): cls = self.__class__ if not hasattr(cls,'_pyswig_methods_registered'): cls._pyswig_methods_registered = True ls = [ab for ab in cls.__dict__.iteritems()] for a, b in ls: if not a.startswith('_'): setattr(cls, '%s_t_'%a, b) def _SetSelf(*args, **kwargs): """_SetSelf(self, PyObject self)""" return _propgrid.PyEditEnumProperty__SetSelf(*args, **kwargs) _propgrid.PyEditEnumProperty_swigregister(PyEditEnumProperty) class PyTextCtrlEditor(PGTextCtrlEditor): """Proxy of C++ PyTextCtrlEditor class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """__init__(self) -> PyTextCtrlEditor""" _propgrid.PyTextCtrlEditor_swiginit(self,_propgrid.new_PyTextCtrlEditor(*args, **kwargs)) self._SetSelf(self); self._RegisterMethods() def CallSuperMethod(self, *args, **kwargs): funcname = args[0] args2 = list(args) args2[0] = self self._super_call = True try: res = getattr(PyTextCtrlEditor, funcname)(*args2, **kwargs) finally: del self._super_call return res def _RegisterMethods(self): cls = self.__class__ if not hasattr(cls,'_pyswig_methods_registered'): cls._pyswig_methods_registered = True ls = [ab for ab in cls.__dict__.iteritems()] for a, b in ls: if not a.startswith('_'): setattr(cls, '%s_t_'%a, b) def _SetSelf(*args, **kwargs): """_SetSelf(self, PyObject self)""" return _propgrid.PyTextCtrlEditor__SetSelf(*args, **kwargs) _propgrid.PyTextCtrlEditor_swigregister(PyTextCtrlEditor) class PySystemColourProperty(SystemColourProperty): """Proxy of C++ PySystemColourProperty class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args): """ __init__(self, String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), ColourPropertyValue value=wxColourPropertyValue()) -> PySystemColourProperty __init__(self, String label, String name, wxChar labels, long values, PGChoices choicesCache, ColourPropertyValue value) -> PySystemColourProperty __init__(self, String label, String name, wxChar labels, long values, PGChoices choicesCache, Colour value) -> PySystemColourProperty """ _propgrid.PySystemColourProperty_swiginit(self,_propgrid.new_PySystemColourProperty(*args)) self._SetSelf(self); self._RegisterMethods() def CallSuperMethod(self, *args, **kwargs): funcname = args[0] args2 = list(args) args2[0] = self self._super_call = True try: res = getattr(PySystemColourProperty, funcname)(*args2, **kwargs) finally: del self._super_call return res def _RegisterMethods(self): cls = self.__class__ if not hasattr(cls,'_pyswig_methods_registered'): cls._pyswig_methods_registered = True ls = [ab for ab in cls.__dict__.iteritems()] for a, b in ls: if not a.startswith('_'): setattr(cls, '%s_t_'%a, b) def _SetSelf(*args, **kwargs): """_SetSelf(self, PyObject self)""" return _propgrid.PySystemColourProperty__SetSelf(*args, **kwargs) _propgrid.PySystemColourProperty_swigregister(PySystemColourProperty) class PyFlagsProperty(FlagsProperty): """Proxy of C++ PyFlagsProperty class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """ __init__(self, String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), wxArrayString labels=wxArrayString(), wxArrayInt values=wxArrayInt(), int value=0) -> PyFlagsProperty """ _propgrid.PyFlagsProperty_swiginit(self,_propgrid.new_PyFlagsProperty(*args, **kwargs)) self._SetSelf(self); self._RegisterMethods() def CallSuperMethod(self, *args, **kwargs): funcname = args[0] args2 = list(args) args2[0] = self self._super_call = True try: res = getattr(PyFlagsProperty, funcname)(*args2, **kwargs) finally: del self._super_call return res def _RegisterMethods(self): cls = self.__class__ if not hasattr(cls,'_pyswig_methods_registered'): cls._pyswig_methods_registered = True ls = [ab for ab in cls.__dict__.iteritems()] for a, b in ls: if not a.startswith('_'): setattr(cls, '%s_t_'%a, b) def _SetSelf(*args, **kwargs): """_SetSelf(self, PyObject self)""" return _propgrid.PyFlagsProperty__SetSelf(*args, **kwargs) _propgrid.PyFlagsProperty_swigregister(PyFlagsProperty) class PyFontProperty(FontProperty): """Proxy of C++ PyFontProperty class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """ __init__(self, String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), Font value=wxFont()) -> PyFontProperty """ _propgrid.PyFontProperty_swiginit(self,_propgrid.new_PyFontProperty(*args, **kwargs)) self._SetSelf(self); self._RegisterMethods() def CallSuperMethod(self, *args, **kwargs): funcname = args[0] args2 = list(args) args2[0] = self self._super_call = True try: res = getattr(PyFontProperty, funcname)(*args2, **kwargs) finally: del self._super_call return res def _RegisterMethods(self): cls = self.__class__ if not hasattr(cls,'_pyswig_methods_registered'): cls._pyswig_methods_registered = True ls = [ab for ab in cls.__dict__.iteritems()] for a, b in ls: if not a.startswith('_'): setattr(cls, '%s_t_'%a, b) def _SetSelf(*args, **kwargs): """_SetSelf(self, PyObject self)""" return _propgrid.PyFontProperty__SetSelf(*args, **kwargs) _propgrid.PyFontProperty_swigregister(PyFontProperty) class PyColourProperty(ColourProperty): """Proxy of C++ PyColourProperty class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """ __init__(self, String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), Colour value=*wxWHITE) -> PyColourProperty """ _propgrid.PyColourProperty_swiginit(self,_propgrid.new_PyColourProperty(*args, **kwargs)) self._SetSelf(self); self._RegisterMethods() def CallSuperMethod(self, *args, **kwargs): funcname = args[0] args2 = list(args) args2[0] = self self._super_call = True try: res = getattr(PyColourProperty, funcname)(*args2, **kwargs) finally: del self._super_call return res def _RegisterMethods(self): cls = self.__class__ if not hasattr(cls,'_pyswig_methods_registered'): cls._pyswig_methods_registered = True ls = [ab for ab in cls.__dict__.iteritems()] for a, b in ls: if not a.startswith('_'): setattr(cls, '%s_t_'%a, b) def _SetSelf(*args, **kwargs): """_SetSelf(self, PyObject self)""" return _propgrid.PyColourProperty__SetSelf(*args, **kwargs) _propgrid.PyColourProperty_swigregister(PyColourProperty) class PyFileProperty(FileProperty): """Proxy of C++ PyFileProperty class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """ __init__(self, String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), String value=wxEmptyString) -> PyFileProperty """ _propgrid.PyFileProperty_swiginit(self,_propgrid.new_PyFileProperty(*args, **kwargs)) self._SetSelf(self); self._RegisterMethods() def CallSuperMethod(self, *args, **kwargs): funcname = args[0] args2 = list(args) args2[0] = self self._super_call = True try: res = getattr(PyFileProperty, funcname)(*args2, **kwargs) finally: del self._super_call return res def _RegisterMethods(self): cls = self.__class__ if not hasattr(cls,'_pyswig_methods_registered'): cls._pyswig_methods_registered = True ls = [ab for ab in cls.__dict__.iteritems()] for a, b in ls: if not a.startswith('_'): setattr(cls, '%s_t_'%a, b) def _SetSelf(*args, **kwargs): """_SetSelf(self, PyObject self)""" return _propgrid.PyFileProperty__SetSelf(*args, **kwargs) _propgrid.PyFileProperty_swigregister(PyFileProperty) class PyIntProperty(IntProperty): """Proxy of C++ PyIntProperty class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args): """ __init__(self, String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), long value=0) -> PyIntProperty __init__(self, String label, String name, wxLongLong value) -> PyIntProperty """ _propgrid.PyIntProperty_swiginit(self,_propgrid.new_PyIntProperty(*args)) self._SetSelf(self); self._RegisterMethods() def CallSuperMethod(self, *args, **kwargs): funcname = args[0] args2 = list(args) args2[0] = self self._super_call = True try: res = getattr(PyIntProperty, funcname)(*args2, **kwargs) finally: del self._super_call return res def _RegisterMethods(self): cls = self.__class__ if not hasattr(cls,'_pyswig_methods_registered'): cls._pyswig_methods_registered = True ls = [ab for ab in cls.__dict__.iteritems()] for a, b in ls: if not a.startswith('_'): setattr(cls, '%s_t_'%a, b) def _SetSelf(*args, **kwargs): """_SetSelf(self, PyObject self)""" return _propgrid.PyIntProperty__SetSelf(*args, **kwargs) _propgrid.PyIntProperty_swigregister(PyIntProperty) class PyEditor(PGEditor): """Proxy of C++ PyEditor class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """__init__(self) -> PyEditor""" _propgrid.PyEditor_swiginit(self,_propgrid.new_PyEditor(*args, **kwargs)) self._SetSelf(self); self._RegisterMethods() def CallSuperMethod(self, *args, **kwargs): funcname = args[0] args2 = list(args) args2[0] = self self._super_call = True try: res = getattr(PyEditor, funcname)(*args2, **kwargs) finally: del self._super_call return res def _RegisterMethods(self): cls = self.__class__ if not hasattr(cls,'_pyswig_methods_registered'): cls._pyswig_methods_registered = True ls = [ab for ab in cls.__dict__.iteritems()] for a, b in ls: if not a.startswith('_'): setattr(cls, '%s_t_'%a, b) def _SetSelf(*args, **kwargs): """_SetSelf(self, PyObject self)""" return _propgrid.PyEditor__SetSelf(*args, **kwargs) _propgrid.PyEditor_swigregister(PyEditor) class PyChoiceEditor(PGChoiceEditor): """Proxy of C++ PyChoiceEditor class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """__init__(self) -> PyChoiceEditor""" _propgrid.PyChoiceEditor_swiginit(self,_propgrid.new_PyChoiceEditor(*args, **kwargs)) self._SetSelf(self); self._RegisterMethods() def CallSuperMethod(self, *args, **kwargs): funcname = args[0] args2 = list(args) args2[0] = self self._super_call = True try: res = getattr(PyChoiceEditor, funcname)(*args2, **kwargs) finally: del self._super_call return res def _RegisterMethods(self): cls = self.__class__ if not hasattr(cls,'_pyswig_methods_registered'): cls._pyswig_methods_registered = True ls = [ab for ab in cls.__dict__.iteritems()] for a, b in ls: if not a.startswith('_'): setattr(cls, '%s_t_'%a, b) def _SetSelf(*args, **kwargs): """_SetSelf(self, PyObject self)""" return _propgrid.PyChoiceEditor__SetSelf(*args, **kwargs) _propgrid.PyChoiceEditor_swigregister(PyChoiceEditor) class PyProperty(PGProperty): """Proxy of C++ PyProperty class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args): """ __init__(self) -> PyProperty __init__(self, String label, String name) -> PyProperty """ _propgrid.PyProperty_swiginit(self,_propgrid.new_PyProperty(*args)) self._SetSelf(self); self._RegisterMethods() def CallSuperMethod(self, *args, **kwargs): funcname = args[0] args2 = list(args) args2[0] = self self._super_call = True try: res = getattr(PyProperty, funcname)(*args2, **kwargs) finally: del self._super_call return res def _RegisterMethods(self): cls = self.__class__ if not hasattr(cls,'_pyswig_methods_registered'): cls._pyswig_methods_registered = True ls = [ab for ab in cls.__dict__.iteritems()] for a, b in ls: if not a.startswith('_'): setattr(cls, '%s_t_'%a, b) def _SetSelf(*args, **kwargs): """_SetSelf(self, PyObject self)""" return _propgrid.PyProperty__SetSelf(*args, **kwargs) _propgrid.PyProperty_swigregister(PyProperty) class PyUIntProperty(UIntProperty): """Proxy of C++ PyUIntProperty class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args): """ __init__(self, String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), long value=0) -> PyUIntProperty __init__(self, String label, String name, wxULongLong value) -> PyUIntProperty """ _propgrid.PyUIntProperty_swiginit(self,_propgrid.new_PyUIntProperty(*args)) self._SetSelf(self); self._RegisterMethods() def CallSuperMethod(self, *args, **kwargs): funcname = args[0] args2 = list(args) args2[0] = self self._super_call = True try: res = getattr(PyUIntProperty, funcname)(*args2, **kwargs) finally: del self._super_call return res def _RegisterMethods(self): cls = self.__class__ if not hasattr(cls,'_pyswig_methods_registered'): cls._pyswig_methods_registered = True ls = [ab for ab in cls.__dict__.iteritems()] for a, b in ls: if not a.startswith('_'): setattr(cls, '%s_t_'%a, b) def _SetSelf(*args, **kwargs): """_SetSelf(self, PyObject self)""" return _propgrid.PyUIntProperty__SetSelf(*args, **kwargs) _propgrid.PyUIntProperty_swigregister(PyUIntProperty) class PyLongStringProperty(LongStringProperty): """Proxy of C++ PyLongStringProperty class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """ __init__(self, String label=(*wxPGProperty::sm_wxPG_LABEL), String name=(*wxPGProperty::sm_wxPG_LABEL), String value=wxEmptyString) -> PyLongStringProperty """ _propgrid.PyLongStringProperty_swiginit(self,_propgrid.new_PyLongStringProperty(*args, **kwargs)) self._SetSelf(self); self._RegisterMethods() def CallSuperMethod(self, *args, **kwargs): funcname = args[0] args2 = list(args) args2[0] = self self._super_call = True try: res = getattr(PyLongStringProperty, funcname)(*args2, **kwargs) finally: del self._super_call return res def _RegisterMethods(self): cls = self.__class__ if not hasattr(cls,'_pyswig_methods_registered'): cls._pyswig_methods_registered = True ls = [ab for ab in cls.__dict__.iteritems()] for a, b in ls: if not a.startswith('_'): setattr(cls, '%s_t_'%a, b) def _SetSelf(*args, **kwargs): """_SetSelf(self, PyObject self)""" return _propgrid.PyLongStringProperty__SetSelf(*args, **kwargs) _propgrid.PyLongStringProperty_swigregister(PyLongStringProperty) def RegisterEditor(*args, **kwargs): """RegisterEditor(PGEditor editor, String editorName)""" return _propgrid.RegisterEditor(*args, **kwargs) EVT_PG_CHANGED = wx.PyEventBinder( wxEVT_PG_CHANGED, 1 ) EVT_PG_CHANGING = wx.PyEventBinder( wxEVT_PG_CHANGING, 1 ) EVT_PG_SELECTED = wx.PyEventBinder( wxEVT_PG_SELECTED, 1 ) EVT_PG_HIGHLIGHTED = wx.PyEventBinder( wxEVT_PG_HIGHLIGHTED, 1 ) EVT_PG_RIGHT_CLICK = wx.PyEventBinder( wxEVT_PG_RIGHT_CLICK, 1 ) EVT_PG_PAGE_CHANGED = wx.PyEventBinder( wxEVT_PG_PAGE_CHANGED, 1 ) EVT_PG_ITEM_COLLAPSED = wx.PyEventBinder( wxEVT_PG_ITEM_COLLAPSED, 1 ) EVT_PG_ITEM_EXPANDED = wx.PyEventBinder( wxEVT_PG_ITEM_EXPANDED, 1 ) EVT_PG_DOUBLE_CLICK = wx.PyEventBinder( wxEVT_PG_DOUBLE_CLICK, 1 ) EVT_PG_LABEL_EDIT_BEGIN = wx.PyEventBinder( wxEVT_PG_LABEL_EDIT_BEGIN, 1 ) EVT_PG_LABEL_EDIT_ENDING = wx.PyEventBinder( wxEVT_PG_LABEL_EDIT_ENDING, 1 ) EVT_PG_COL_BEGIN_DRAG = wx.PyEventBinder( wxEVT_PG_COL_BEGIN_DRAG, 1 ) EVT_PG_COL_DRAGGING = wx.PyEventBinder( wxEVT_PG_COL_DRAGGING, 1 ) EVT_PG_COL_END_DRAG = wx.PyEventBinder( wxEVT_PG_COL_END_DRAG, 1 ) LABEL_AS_NAME = "@!" DEFAULT_IMAGE_SIZE = (-1,-1) NO_IMAGE_SIZE = (0,0) PG_BOOL_USE_CHECKBOX = "UseCheckbox" PG_BOOL_USE_DOUBLE_CLICK_CYCLING = "UseDClickCycling" PG_FLOAT_PRECISION = "Precision" PG_STRING_PASSWORD = "Password" PG_UINT_BASE = "Base" PG_UINT_PREFIX = "Prefix" PG_FILE_WILDCARD = "Wildcard" PG_FILE_SHOW_FULL_PATH = "ShowFullPath" PG_FILE_SHOW_RELATIVE_PATH = "ShowRelativePath" PG_FILE_INITIAL_PATH = "InitialPath" PG_FILE_DIALOG_TITLE = "DialogTitle" PG_DIR_DIALOG_MESSAGE = "DialogMessage" PG_DATE_FORMAT = "DateFormat" PG_DATE_PICKER_STYLE = "PickerStyle" PropertyCategory = NewPropertyCategory StringProperty = NewStringProperty IntProperty = NewIntProperty UIntProperty = NewUIntProperty FloatProperty = NewFloatProperty BoolProperty = NewBoolProperty EnumProperty = NewEnumProperty EditEnumProperty = NewEditEnumProperty FlagsProperty = NewFlagsProperty LongStringProperty = NewLongStringProperty FileProperty = NewFileProperty DirProperty = NewDirProperty ArrayStringProperty = NewArrayStringProperty FontProperty = NewFontProperty SystemColourProperty = NewSystemColourProperty ColourProperty = NewColourProperty CursorProperty = NewCursorProperty ImageFileProperty = NewImageFileProperty MultiChoiceProperty = NewMultiChoiceProperty DateProperty = NewDateProperty ``` #### File: wx-3.0-msw/wx/stc.py ```python import _stc import new new_instancemethod = new.instancemethod def _swig_setattr_nondynamic(self,class_type,name,value,static=1): if (name == "thisown"): return self.this.own(value) if (name == "this"): if type(value).__name__ == 'PySwigObject': self.__dict__[name] = value return method = class_type.__swig_setmethods__.get(name,None) if method: return method(self,value) if (not static) or hasattr(self,name): self.__dict__[name] = value else: raise AttributeError("You cannot add attributes to %s" % self) def _swig_setattr(self,class_type,name,value): return _swig_setattr_nondynamic(self,class_type,name,value,0) def _swig_getattr(self,class_type,name): if (name == "thisown"): return self.this.own() method = class_type.__swig_getmethods__.get(name,None) if method: return method(self) raise AttributeError,name def _swig_repr(self): try: strthis = "proxy of " + self.this.__repr__() except: strthis = "" return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,) import types try: _object = types.ObjectType _newclass = 1 except AttributeError: class _object : pass _newclass = 0 del types def _swig_setattr_nondynamic_method(set): def set_attr(self,name,value): if (name == "thisown"): return self.this.own(value) if hasattr(self,name) or (name == "this"): set(self,name,value) else: raise AttributeError("You cannot add attributes to %s" % self) return set_attr import _core import _misc wx = _core __docfilter__ = wx.__DocFilter(globals()) USE_STC = _stc.USE_STC USE_TEXTCTRL = _stc.USE_TEXTCTRL STC_USE_DND = _stc.STC_USE_DND STC_INVALID_POSITION = _stc.STC_INVALID_POSITION STC_START = _stc.STC_START STC_OPTIONAL_START = _stc.STC_OPTIONAL_START STC_LEXER_START = _stc.STC_LEXER_START STC_WS_INVISIBLE = _stc.STC_WS_INVISIBLE STC_WS_VISIBLEALWAYS = _stc.STC_WS_VISIBLEALWAYS STC_WS_VISIBLEAFTERINDENT = _stc.STC_WS_VISIBLEAFTERINDENT STC_EOL_CRLF = _stc.STC_EOL_CRLF STC_EOL_CR = _stc.STC_EOL_CR STC_EOL_LF = _stc.STC_EOL_LF STC_CP_UTF8 = _stc.STC_CP_UTF8 STC_MARKER_MAX = _stc.STC_MARKER_MAX STC_MARK_CIRCLE = _stc.STC_MARK_CIRCLE STC_MARK_ROUNDRECT = _stc.STC_MARK_ROUNDRECT STC_MARK_ARROW = _stc.STC_MARK_ARROW STC_MARK_SMALLRECT = _stc.STC_MARK_SMALLRECT STC_MARK_SHORTARROW = _stc.STC_MARK_SHORTARROW STC_MARK_EMPTY = _stc.STC_MARK_EMPTY STC_MARK_ARROWDOWN = _stc.STC_MARK_ARROWDOWN STC_MARK_MINUS = _stc.STC_MARK_MINUS STC_MARK_PLUS = _stc.STC_MARK_PLUS STC_MARK_VLINE = _stc.STC_MARK_VLINE STC_MARK_LCORNER = _stc.STC_MARK_LCORNER STC_MARK_TCORNER = _stc.STC_MARK_TCORNER STC_MARK_BOXPLUS = _stc.STC_MARK_BOXPLUS STC_MARK_BOXPLUSCONNECTED = _stc.STC_MARK_BOXPLUSCONNECTED STC_MARK_BOXMINUS = _stc.STC_MARK_BOXMINUS STC_MARK_BOXMINUSCONNECTED = _stc.STC_MARK_BOXMINUSCONNECTED STC_MARK_LCORNERCURVE = _stc.STC_MARK_LCORNERCURVE STC_MARK_TCORNERCURVE = _stc.STC_MARK_TCORNERCURVE STC_MARK_CIRCLEPLUS = _stc.STC_MARK_CIRCLEPLUS STC_MARK_CIRCLEPLUSCONNECTED = _stc.STC_MARK_CIRCLEPLUSCONNECTED STC_MARK_CIRCLEMINUS = _stc.STC_MARK_CIRCLEMINUS STC_MARK_CIRCLEMINUSCONNECTED = _stc.STC_MARK_CIRCLEMINUSCONNECTED STC_MARK_BACKGROUND = _stc.STC_MARK_BACKGROUND STC_MARK_DOTDOTDOT = _stc.STC_MARK_DOTDOTDOT STC_MARK_ARROWS = _stc.STC_MARK_ARROWS STC_MARK_PIXMAP = _stc.STC_MARK_PIXMAP STC_MARK_FULLRECT = _stc.STC_MARK_FULLRECT STC_MARK_LEFTRECT = _stc.STC_MARK_LEFTRECT STC_MARK_AVAILABLE = _stc.STC_MARK_AVAILABLE STC_MARK_UNDERLINE = _stc.STC_MARK_UNDERLINE STC_MARK_RGBAIMAGE = _stc.STC_MARK_RGBAIMAGE STC_MARK_CHARACTER = _stc.STC_MARK_CHARACTER STC_MARKNUM_FOLDEREND = _stc.STC_MARKNUM_FOLDEREND STC_MARKNUM_FOLDEROPENMID = _stc.STC_MARKNUM_FOLDEROPENMID STC_MARKNUM_FOLDERMIDTAIL = _stc.STC_MARKNUM_FOLDERMIDTAIL STC_MARKNUM_FOLDERTAIL = _stc.STC_MARKNUM_FOLDERTAIL STC_MARKNUM_FOLDERSUB = _stc.STC_MARKNUM_FOLDERSUB STC_MARKNUM_FOLDER = _stc.STC_MARKNUM_FOLDER STC_MARKNUM_FOLDEROPEN = _stc.STC_MARKNUM_FOLDEROPEN STC_MASK_FOLDERS = _stc.STC_MASK_FOLDERS STC_MARGIN_SYMBOL = _stc.STC_MARGIN_SYMBOL STC_MARGIN_NUMBER = _stc.STC_MARGIN_NUMBER STC_MARGIN_BACK = _stc.STC_MARGIN_BACK STC_MARGIN_FORE = _stc.STC_MARGIN_FORE STC_MARGIN_TEXT = _stc.STC_MARGIN_TEXT STC_MARGIN_RTEXT = _stc.STC_MARGIN_RTEXT STC_STYLE_DEFAULT = _stc.STC_STYLE_DEFAULT STC_STYLE_LINENUMBER = _stc.STC_STYLE_LINENUMBER STC_STYLE_BRACELIGHT = _stc.STC_STYLE_BRACELIGHT STC_STYLE_BRACEBAD = _stc.STC_STYLE_BRACEBAD STC_STYLE_CONTROLCHAR = _stc.STC_STYLE_CONTROLCHAR STC_STYLE_INDENTGUIDE = _stc.STC_STYLE_INDENTGUIDE STC_STYLE_CALLTIP = _stc.STC_STYLE_CALLTIP STC_STYLE_LASTPREDEFINED = _stc.STC_STYLE_LASTPREDEFINED STC_STYLE_MAX = _stc.STC_STYLE_MAX STC_CHARSET_ANSI = _stc.STC_CHARSET_ANSI STC_CHARSET_DEFAULT = _stc.STC_CHARSET_DEFAULT STC_CHARSET_BALTIC = _stc.STC_CHARSET_BALTIC STC_CHARSET_CHINESEBIG5 = _stc.STC_CHARSET_CHINESEBIG5 STC_CHARSET_EASTEUROPE = _stc.STC_CHARSET_EASTEUROPE STC_CHARSET_GB2312 = _stc.STC_CHARSET_GB2312 STC_CHARSET_GREEK = _stc.STC_CHARSET_GREEK STC_CHARSET_HANGUL = _stc.STC_CHARSET_HANGUL STC_CHARSET_MAC = _stc.STC_CHARSET_MAC STC_CHARSET_OEM = _stc.STC_CHARSET_OEM STC_CHARSET_RUSSIAN = _stc.STC_CHARSET_RUSSIAN STC_CHARSET_CYRILLIC = _stc.STC_CHARSET_CYRILLIC STC_CHARSET_SHIFTJIS = _stc.STC_CHARSET_SHIFTJIS STC_CHARSET_SYMBOL = _stc.STC_CHARSET_SYMBOL STC_CHARSET_TURKISH = _stc.STC_CHARSET_TURKISH STC_CHARSET_JOHAB = _stc.STC_CHARSET_JOHAB STC_CHARSET_HEBREW = _stc.STC_CHARSET_HEBREW STC_CHARSET_ARABIC = _stc.STC_CHARSET_ARABIC STC_CHARSET_VIETNAMESE = _stc.STC_CHARSET_VIETNAMESE STC_CHARSET_THAI = _stc.STC_CHARSET_THAI STC_CHARSET_8859_15 = _stc.STC_CHARSET_8859_15 STC_CASE_MIXED = _stc.STC_CASE_MIXED STC_CASE_UPPER = _stc.STC_CASE_UPPER STC_CASE_LOWER = _stc.STC_CASE_LOWER STC_FONT_SIZE_MULTIPLIER = _stc.STC_FONT_SIZE_MULTIPLIER STC_WEIGHT_NORMAL = _stc.STC_WEIGHT_NORMAL STC_WEIGHT_SEMIBOLD = _stc.STC_WEIGHT_SEMIBOLD STC_WEIGHT_BOLD = _stc.STC_WEIGHT_BOLD STC_INDIC_PLAIN = _stc.STC_INDIC_PLAIN STC_INDIC_SQUIGGLE = _stc.STC_INDIC_SQUIGGLE STC_INDIC_TT = _stc.STC_INDIC_TT STC_INDIC_DIAGONAL = _stc.STC_INDIC_DIAGONAL STC_INDIC_STRIKE = _stc.STC_INDIC_STRIKE STC_INDIC_HIDDEN = _stc.STC_INDIC_HIDDEN STC_INDIC_BOX = _stc.STC_INDIC_BOX STC_INDIC_ROUNDBOX = _stc.STC_INDIC_ROUNDBOX STC_INDIC_STRAIGHTBOX = _stc.STC_INDIC_STRAIGHTBOX STC_INDIC_DASH = _stc.STC_INDIC_DASH STC_INDIC_DOTS = _stc.STC_INDIC_DOTS STC_INDIC_SQUIGGLELOW = _stc.STC_INDIC_SQUIGGLELOW STC_INDIC_DOTBOX = _stc.STC_INDIC_DOTBOX STC_INDIC_MAX = _stc.STC_INDIC_MAX STC_INDIC_CONTAINER = _stc.STC_INDIC_CONTAINER STC_INDIC0_MASK = _stc.STC_INDIC0_MASK STC_INDIC1_MASK = _stc.STC_INDIC1_MASK STC_INDIC2_MASK = _stc.STC_INDIC2_MASK STC_INDICS_MASK = _stc.STC_INDICS_MASK STC_IV_NONE = _stc.STC_IV_NONE STC_IV_REAL = _stc.STC_IV_REAL STC_IV_LOOKFORWARD = _stc.STC_IV_LOOKFORWARD STC_IV_LOOKBOTH = _stc.STC_IV_LOOKBOTH STC_PRINT_NORMAL = _stc.STC_PRINT_NORMAL STC_PRINT_INVERTLIGHT = _stc.STC_PRINT_INVERTLIGHT STC_PRINT_BLACKONWHITE = _stc.STC_PRINT_BLACKONWHITE STC_PRINT_COLOURONWHITE = _stc.STC_PRINT_COLOURONWHITE STC_PRINT_COLOURONWHITEDEFAULTBG = _stc.STC_PRINT_COLOURONWHITEDEFAULTBG STC_FIND_WHOLEWORD = _stc.STC_FIND_WHOLEWORD STC_FIND_MATCHCASE = _stc.STC_FIND_MATCHCASE STC_FIND_WORDSTART = _stc.STC_FIND_WORDSTART STC_FIND_REGEXP = _stc.STC_FIND_REGEXP STC_FIND_POSIX = _stc.STC_FIND_POSIX STC_FOLDLEVELBASE = _stc.STC_FOLDLEVELBASE STC_FOLDLEVELWHITEFLAG = _stc.STC_FOLDLEVELWHITEFLAG STC_FOLDLEVELHEADERFLAG = _stc.STC_FOLDLEVELHEADERFLAG STC_FOLDLEVELNUMBERMASK = _stc.STC_FOLDLEVELNUMBERMASK STC_FOLDFLAG_LINEBEFORE_EXPANDED = _stc.STC_FOLDFLAG_LINEBEFORE_EXPANDED STC_FOLDFLAG_LINEBEFORE_CONTRACTED = _stc.STC_FOLDFLAG_LINEBEFORE_CONTRACTED STC_FOLDFLAG_LINEAFTER_EXPANDED = _stc.STC_FOLDFLAG_LINEAFTER_EXPANDED STC_FOLDFLAG_LINEAFTER_CONTRACTED = _stc.STC_FOLDFLAG_LINEAFTER_CONTRACTED STC_FOLDFLAG_LEVELNUMBERS = _stc.STC_FOLDFLAG_LEVELNUMBERS STC_TIME_FOREVER = _stc.STC_TIME_FOREVER STC_WRAP_NONE = _stc.STC_WRAP_NONE STC_WRAP_WORD = _stc.STC_WRAP_WORD STC_WRAP_CHAR = _stc.STC_WRAP_CHAR STC_WRAPVISUALFLAG_NONE = _stc.STC_WRAPVISUALFLAG_NONE STC_WRAPVISUALFLAG_END = _stc.STC_WRAPVISUALFLAG_END STC_WRAPVISUALFLAG_START = _stc.STC_WRAPVISUALFLAG_START STC_WRAPVISUALFLAG_MARGIN = _stc.STC_WRAPVISUALFLAG_MARGIN STC_WRAPVISUALFLAGLOC_DEFAULT = _stc.STC_WRAPVISUALFLAGLOC_DEFAULT STC_WRAPVISUALFLAGLOC_END_BY_TEXT = _stc.STC_WRAPVISUALFLAGLOC_END_BY_TEXT STC_WRAPVISUALFLAGLOC_START_BY_TEXT = _stc.STC_WRAPVISUALFLAGLOC_START_BY_TEXT STC_WRAPINDENT_FIXED = _stc.STC_WRAPINDENT_FIXED STC_WRAPINDENT_SAME = _stc.STC_WRAPINDENT_SAME STC_WRAPINDENT_INDENT = _stc.STC_WRAPINDENT_INDENT STC_CACHE_NONE = _stc.STC_CACHE_NONE STC_CACHE_CARET = _stc.STC_CACHE_CARET STC_CACHE_PAGE = _stc.STC_CACHE_PAGE STC_CACHE_DOCUMENT = _stc.STC_CACHE_DOCUMENT STC_EFF_QUALITY_MASK = _stc.STC_EFF_QUALITY_MASK STC_EFF_QUALITY_DEFAULT = _stc.STC_EFF_QUALITY_DEFAULT STC_EFF_QUALITY_NON_ANTIALIASED = _stc.STC_EFF_QUALITY_NON_ANTIALIASED STC_EFF_QUALITY_ANTIALIASED = _stc.STC_EFF_QUALITY_ANTIALIASED STC_EFF_QUALITY_LCD_OPTIMIZED = _stc.STC_EFF_QUALITY_LCD_OPTIMIZED STC_MULTIPASTE_ONCE = _stc.STC_MULTIPASTE_ONCE STC_MULTIPASTE_EACH = _stc.STC_MULTIPASTE_EACH STC_EDGE_NONE = _stc.STC_EDGE_NONE STC_EDGE_LINE = _stc.STC_EDGE_LINE STC_EDGE_BACKGROUND = _stc.STC_EDGE_BACKGROUND STC_STATUS_OK = _stc.STC_STATUS_OK STC_STATUS_FAILURE = _stc.STC_STATUS_FAILURE STC_STATUS_BADALLOC = _stc.STC_STATUS_BADALLOC STC_CURSORNORMAL = _stc.STC_CURSORNORMAL STC_CURSORARROW = _stc.STC_CURSORARROW STC_CURSORWAIT = _stc.STC_CURSORWAIT STC_CURSORREVERSEARROW = _stc.STC_CURSORREVERSEARROW STC_VISIBLE_SLOP = _stc.STC_VISIBLE_SLOP STC_VISIBLE_STRICT = _stc.STC_VISIBLE_STRICT STC_CARET_SLOP = _stc.STC_CARET_SLOP STC_CARET_STRICT = _stc.STC_CARET_STRICT STC_CARET_JUMPS = _stc.STC_CARET_JUMPS STC_CARET_EVEN = _stc.STC_CARET_EVEN STC_SEL_STREAM = _stc.STC_SEL_STREAM STC_SEL_RECTANGLE = _stc.STC_SEL_RECTANGLE STC_SEL_LINES = _stc.STC_SEL_LINES STC_SEL_THIN = _stc.STC_SEL_THIN STC_CASEINSENSITIVEBEHAVIOUR_RESPECTCASE = _stc.STC_CASEINSENSITIVEBEHAVIOUR_RESPECTCASE STC_CASEINSENSITIVEBEHAVIOUR_IGNORECASE = _stc.STC_CASEINSENSITIVEBEHAVIOUR_IGNORECASE STC_CARETSTICKY_OFF = _stc.STC_CARETSTICKY_OFF STC_CARETSTICKY_ON = _stc.STC_CARETSTICKY_ON STC_CARETSTICKY_WHITESPACE = _stc.STC_CARETSTICKY_WHITESPACE STC_ALPHA_TRANSPARENT = _stc.STC_ALPHA_TRANSPARENT STC_ALPHA_OPAQUE = _stc.STC_ALPHA_OPAQUE STC_ALPHA_NOALPHA = _stc.STC_ALPHA_NOALPHA STC_CARETSTYLE_INVISIBLE = _stc.STC_CARETSTYLE_INVISIBLE STC_CARETSTYLE_LINE = _stc.STC_CARETSTYLE_LINE STC_CARETSTYLE_BLOCK = _stc.STC_CARETSTYLE_BLOCK STC_MARGINOPTION_NONE = _stc.STC_MARGINOPTION_NONE STC_MARGINOPTION_SUBLINESELECT = _stc.STC_MARGINOPTION_SUBLINESELECT STC_ANNOTATION_HIDDEN = _stc.STC_ANNOTATION_HIDDEN STC_ANNOTATION_STANDARD = _stc.STC_ANNOTATION_STANDARD STC_ANNOTATION_BOXED = _stc.STC_ANNOTATION_BOXED STC_UNDO_MAY_COALESCE = _stc.STC_UNDO_MAY_COALESCE STC_SCVS_NONE = _stc.STC_SCVS_NONE STC_SCVS_RECTANGULARSELECTION = _stc.STC_SCVS_RECTANGULARSELECTION STC_SCVS_USERACCESSIBLE = _stc.STC_SCVS_USERACCESSIBLE STC_TECHNOLOGY_DEFAULT = _stc.STC_TECHNOLOGY_DEFAULT STC_TECHNOLOGY_DIRECTWRITE = _stc.STC_TECHNOLOGY_DIRECTWRITE STC_KEYWORDSET_MAX = _stc.STC_KEYWORDSET_MAX STC_TYPE_BOOLEAN = _stc.STC_TYPE_BOOLEAN STC_TYPE_INTEGER = _stc.STC_TYPE_INTEGER STC_TYPE_STRING = _stc.STC_TYPE_STRING STC_MOD_INSERTTEXT = _stc.STC_MOD_INSERTTEXT STC_MOD_DELETETEXT = _stc.STC_MOD_DELETETEXT STC_MOD_CHANGESTYLE = _stc.STC_MOD_CHANGESTYLE STC_MOD_CHANGEFOLD = _stc.STC_MOD_CHANGEFOLD STC_PERFORMED_USER = _stc.STC_PERFORMED_USER STC_PERFORMED_UNDO = _stc.STC_PERFORMED_UNDO STC_PERFORMED_REDO = _stc.STC_PERFORMED_REDO STC_MULTISTEPUNDOREDO = _stc.STC_MULTISTEPUNDOREDO STC_LASTSTEPINUNDOREDO = _stc.STC_LASTSTEPINUNDOREDO STC_MOD_CHANGEMARKER = _stc.STC_MOD_CHANGEMARKER STC_MOD_BEFOREINSERT = _stc.STC_MOD_BEFOREINSERT STC_MOD_BEFOREDELETE = _stc.STC_MOD_BEFOREDELETE STC_MULTILINEUNDOREDO = _stc.STC_MULTILINEUNDOREDO STC_STARTACTION = _stc.STC_STARTACTION STC_MOD_CHANGEINDICATOR = _stc.STC_MOD_CHANGEINDICATOR STC_MOD_CHANGELINESTATE = _stc.STC_MOD_CHANGELINESTATE STC_MOD_CHANGEMARGIN = _stc.STC_MOD_CHANGEMARGIN STC_MOD_CHANGEANNOTATION = _stc.STC_MOD_CHANGEANNOTATION STC_MOD_CONTAINER = _stc.STC_MOD_CONTAINER STC_MOD_LEXERSTATE = _stc.STC_MOD_LEXERSTATE STC_MODEVENTMASKALL = _stc.STC_MODEVENTMASKALL STC_UPDATE_CONTENT = _stc.STC_UPDATE_CONTENT STC_UPDATE_SELECTION = _stc.STC_UPDATE_SELECTION STC_UPDATE_V_SCROLL = _stc.STC_UPDATE_V_SCROLL STC_UPDATE_H_SCROLL = _stc.STC_UPDATE_H_SCROLL STC_KEY_DOWN = _stc.STC_KEY_DOWN STC_KEY_UP = _stc.STC_KEY_UP STC_KEY_LEFT = _stc.STC_KEY_LEFT STC_KEY_RIGHT = _stc.STC_KEY_RIGHT STC_KEY_HOME = _stc.STC_KEY_HOME STC_KEY_END = _stc.STC_KEY_END STC_KEY_PRIOR = _stc.STC_KEY_PRIOR STC_KEY_NEXT = _stc.STC_KEY_NEXT STC_KEY_DELETE = _stc.STC_KEY_DELETE STC_KEY_INSERT = _stc.STC_KEY_INSERT STC_KEY_ESCAPE = _stc.STC_KEY_ESCAPE STC_KEY_BACK = _stc.STC_KEY_BACK STC_KEY_TAB = _stc.STC_KEY_TAB STC_KEY_RETURN = _stc.STC_KEY_RETURN STC_KEY_ADD = _stc.STC_KEY_ADD STC_KEY_SUBTRACT = _stc.STC_KEY_SUBTRACT STC_KEY_DIVIDE = _stc.STC_KEY_DIVIDE STC_KEY_WIN = _stc.STC_KEY_WIN STC_KEY_RWIN = _stc.STC_KEY_RWIN STC_KEY_MENU = _stc.STC_KEY_MENU STC_SCMOD_NORM = _stc.STC_SCMOD_NORM STC_SCMOD_SHIFT = _stc.STC_SCMOD_SHIFT STC_SCMOD_CTRL = _stc.STC_SCMOD_CTRL STC_SCMOD_ALT = _stc.STC_SCMOD_ALT STC_SCMOD_SUPER = _stc.STC_SCMOD_SUPER STC_SCMOD_META = _stc.STC_SCMOD_META STC_LEX_CONTAINER = _stc.STC_LEX_CONTAINER STC_LEX_NULL = _stc.STC_LEX_NULL STC_LEX_PYTHON = _stc.STC_LEX_PYTHON STC_LEX_CPP = _stc.STC_LEX_CPP STC_LEX_HTML = _stc.STC_LEX_HTML STC_LEX_XML = _stc.STC_LEX_XML STC_LEX_PERL = _stc.STC_LEX_PERL STC_LEX_SQL = _stc.STC_LEX_SQL STC_LEX_VB = _stc.STC_LEX_VB STC_LEX_PROPERTIES = _stc.STC_LEX_PROPERTIES STC_LEX_ERRORLIST = _stc.STC_LEX_ERRORLIST STC_LEX_MAKEFILE = _stc.STC_LEX_MAKEFILE STC_LEX_BATCH = _stc.STC_LEX_BATCH STC_LEX_XCODE = _stc.STC_LEX_XCODE STC_LEX_LATEX = _stc.STC_LEX_LATEX STC_LEX_LUA = _stc.STC_LEX_LUA STC_LEX_DIFF = _stc.STC_LEX_DIFF STC_LEX_CONF = _stc.STC_LEX_CONF STC_LEX_PASCAL = _stc.STC_LEX_PASCAL STC_LEX_AVE = _stc.STC_LEX_AVE STC_LEX_ADA = _stc.STC_LEX_ADA STC_LEX_LISP = _stc.STC_LEX_LISP STC_LEX_RUBY = _stc.STC_LEX_RUBY STC_LEX_EIFFEL = _stc.STC_LEX_EIFFEL STC_LEX_EIFFELKW = _stc.STC_LEX_EIFFELKW STC_LEX_TCL = _stc.STC_LEX_TCL STC_LEX_NNCRONTAB = _stc.STC_LEX_NNCRONTAB STC_LEX_BULLANT = _stc.STC_LEX_BULLANT STC_LEX_VBSCRIPT = _stc.STC_LEX_VBSCRIPT STC_LEX_BAAN = _stc.STC_LEX_BAAN STC_LEX_MATLAB = _stc.STC_LEX_MATLAB STC_LEX_SCRIPTOL = _stc.STC_LEX_SCRIPTOL STC_LEX_ASM = _stc.STC_LEX_ASM STC_LEX_CPPNOCASE = _stc.STC_LEX_CPPNOCASE STC_LEX_FORTRAN = _stc.STC_LEX_FORTRAN STC_LEX_F77 = _stc.STC_LEX_F77 STC_LEX_CSS = _stc.STC_LEX_CSS STC_LEX_POV = _stc.STC_LEX_POV STC_LEX_LOUT = _stc.STC_LEX_LOUT STC_LEX_ESCRIPT = _stc.STC_LEX_ESCRIPT STC_LEX_PS = _stc.STC_LEX_PS STC_LEX_NSIS = _stc.STC_LEX_NSIS STC_LEX_MMIXAL = _stc.STC_LEX_MMIXAL STC_LEX_CLW = _stc.STC_LEX_CLW STC_LEX_CLWNOCASE = _stc.STC_LEX_CLWNOCASE STC_LEX_LOT = _stc.STC_LEX_LOT STC_LEX_YAML = _stc.STC_LEX_YAML STC_LEX_TEX = _stc.STC_LEX_TEX STC_LEX_METAPOST = _stc.STC_LEX_METAPOST STC_LEX_POWERBASIC = _stc.STC_LEX_POWERBASIC STC_LEX_FORTH = _stc.STC_LEX_FORTH STC_LEX_ERLANG = _stc.STC_LEX_ERLANG STC_LEX_OCTAVE = _stc.STC_LEX_OCTAVE STC_LEX_MSSQL = _stc.STC_LEX_MSSQL STC_LEX_VERILOG = _stc.STC_LEX_VERILOG STC_LEX_KIX = _stc.STC_LEX_KIX STC_LEX_GUI4CLI = _stc.STC_LEX_GUI4CLI STC_LEX_SPECMAN = _stc.STC_LEX_SPECMAN STC_LEX_AU3 = _stc.STC_LEX_AU3 STC_LEX_APDL = _stc.STC_LEX_APDL STC_LEX_BASH = _stc.STC_LEX_BASH STC_LEX_ASN1 = _stc.STC_LEX_ASN1 STC_LEX_VHDL = _stc.STC_LEX_VHDL STC_LEX_CAML = _stc.STC_LEX_CAML STC_LEX_BLITZBASIC = _stc.STC_LEX_BLITZBASIC STC_LEX_PUREBASIC = _stc.STC_LEX_PUREBASIC STC_LEX_HASKELL = _stc.STC_LEX_HASKELL STC_LEX_PHPSCRIPT = _stc.STC_LEX_PHPSCRIPT STC_LEX_TADS3 = _stc.STC_LEX_TADS3 STC_LEX_REBOL = _stc.STC_LEX_REBOL STC_LEX_SMALLTALK = _stc.STC_LEX_SMALLTALK STC_LEX_FLAGSHIP = _stc.STC_LEX_FLAGSHIP STC_LEX_CSOUND = _stc.STC_LEX_CSOUND STC_LEX_FREEBASIC = _stc.STC_LEX_FREEBASIC STC_LEX_INNOSETUP = _stc.STC_LEX_INNOSETUP STC_LEX_OPAL = _stc.STC_LEX_OPAL STC_LEX_SPICE = _stc.STC_LEX_SPICE STC_LEX_D = _stc.STC_LEX_D STC_LEX_CMAKE = _stc.STC_LEX_CMAKE STC_LEX_GAP = _stc.STC_LEX_GAP STC_LEX_PLM = _stc.STC_LEX_PLM STC_LEX_PROGRESS = _stc.STC_LEX_PROGRESS STC_LEX_ABAQUS = _stc.STC_LEX_ABAQUS STC_LEX_ASYMPTOTE = _stc.STC_LEX_ASYMPTOTE STC_LEX_R = _stc.STC_LEX_R STC_LEX_MAGIK = _stc.STC_LEX_MAGIK STC_LEX_POWERSHELL = _stc.STC_LEX_POWERSHELL STC_LEX_MYSQL = _stc.STC_LEX_MYSQL STC_LEX_PO = _stc.STC_LEX_PO STC_LEX_TAL = _stc.STC_LEX_TAL STC_LEX_COBOL = _stc.STC_LEX_COBOL STC_LEX_TACL = _stc.STC_LEX_TACL STC_LEX_SORCUS = _stc.STC_LEX_SORCUS STC_LEX_POWERPRO = _stc.STC_LEX_POWERPRO STC_LEX_NIMROD = _stc.STC_LEX_NIMROD STC_LEX_SML = _stc.STC_LEX_SML STC_LEX_MARKDOWN = _stc.STC_LEX_MARKDOWN STC_LEX_TXT2TAGS = _stc.STC_LEX_TXT2TAGS STC_LEX_A68K = _stc.STC_LEX_A68K STC_LEX_MODULA = _stc.STC_LEX_MODULA STC_LEX_COFFEESCRIPT = _stc.STC_LEX_COFFEESCRIPT STC_LEX_TCMD = _stc.STC_LEX_TCMD STC_LEX_AVS = _stc.STC_LEX_AVS STC_LEX_ECL = _stc.STC_LEX_ECL STC_LEX_OSCRIPT = _stc.STC_LEX_OSCRIPT STC_LEX_VISUALPROLOG = _stc.STC_LEX_VISUALPROLOG STC_LEX_AUTOMATIC = _stc.STC_LEX_AUTOMATIC STC_P_DEFAULT = _stc.STC_P_DEFAULT STC_P_COMMENTLINE = _stc.STC_P_COMMENTLINE STC_P_NUMBER = _stc.STC_P_NUMBER STC_P_STRING = _stc.STC_P_STRING STC_P_CHARACTER = _stc.STC_P_CHARACTER STC_P_WORD = _stc.STC_P_WORD STC_P_TRIPLE = _stc.STC_P_TRIPLE STC_P_TRIPLEDOUBLE = _stc.STC_P_TRIPLEDOUBLE STC_P_CLASSNAME = _stc.STC_P_CLASSNAME STC_P_DEFNAME = _stc.STC_P_DEFNAME STC_P_OPERATOR = _stc.STC_P_OPERATOR STC_P_IDENTIFIER = _stc.STC_P_IDENTIFIER STC_P_COMMENTBLOCK = _stc.STC_P_COMMENTBLOCK STC_P_STRINGEOL = _stc.STC_P_STRINGEOL STC_P_WORD2 = _stc.STC_P_WORD2 STC_P_DECORATOR = _stc.STC_P_DECORATOR STC_C_DEFAULT = _stc.STC_C_DEFAULT STC_C_COMMENT = _stc.STC_C_COMMENT STC_C_COMMENTLINE = _stc.STC_C_COMMENTLINE STC_C_COMMENTDOC = _stc.STC_C_COMMENTDOC STC_C_NUMBER = _stc.STC_C_NUMBER STC_C_WORD = _stc.STC_C_WORD STC_C_STRING = _stc.STC_C_STRING STC_C_CHARACTER = _stc.STC_C_CHARACTER STC_C_UUID = _stc.STC_C_UUID STC_C_PREPROCESSOR = _stc.STC_C_PREPROCESSOR STC_C_OPERATOR = _stc.STC_C_OPERATOR STC_C_IDENTIFIER = _stc.STC_C_IDENTIFIER STC_C_STRINGEOL = _stc.STC_C_STRINGEOL STC_C_VERBATIM = _stc.STC_C_VERBATIM STC_C_REGEX = _stc.STC_C_REGEX STC_C_COMMENTLINEDOC = _stc.STC_C_COMMENTLINEDOC STC_C_WORD2 = _stc.STC_C_WORD2 STC_C_COMMENTDOCKEYWORD = _stc.STC_C_COMMENTDOCKEYWORD STC_C_COMMENTDOCKEYWORDERROR = _stc.STC_C_COMMENTDOCKEYWORDERROR STC_C_GLOBALCLASS = _stc.STC_C_GLOBALCLASS STC_C_STRINGRAW = _stc.STC_C_STRINGRAW STC_C_TRIPLEVERBATIM = _stc.STC_C_TRIPLEVERBATIM STC_C_HASHQUOTEDSTRING = _stc.STC_C_HASHQUOTEDSTRING STC_C_PREPROCESSORCOMMENT = _stc.STC_C_PREPROCESSORCOMMENT STC_D_DEFAULT = _stc.STC_D_DEFAULT STC_D_COMMENT = _stc.STC_D_COMMENT STC_D_COMMENTLINE = _stc.STC_D_COMMENTLINE STC_D_COMMENTDOC = _stc.STC_D_COMMENTDOC STC_D_COMMENTNESTED = _stc.STC_D_COMMENTNESTED STC_D_NUMBER = _stc.STC_D_NUMBER STC_D_WORD = _stc.STC_D_WORD STC_D_WORD2 = _stc.STC_D_WORD2 STC_D_WORD3 = _stc.STC_D_WORD3 STC_D_TYPEDEF = _stc.STC_D_TYPEDEF STC_D_STRING = _stc.STC_D_STRING STC_D_STRINGEOL = _stc.STC_D_STRINGEOL STC_D_CHARACTER = _stc.STC_D_CHARACTER STC_D_OPERATOR = _stc.STC_D_OPERATOR STC_D_IDENTIFIER = _stc.STC_D_IDENTIFIER STC_D_COMMENTLINEDOC = _stc.STC_D_COMMENTLINEDOC STC_D_COMMENTDOCKEYWORD = _stc.STC_D_COMMENTDOCKEYWORD STC_D_COMMENTDOCKEYWORDERROR = _stc.STC_D_COMMENTDOCKEYWORDERROR STC_D_STRINGB = _stc.STC_D_STRINGB STC_D_STRINGR = _stc.STC_D_STRINGR STC_D_WORD5 = _stc.STC_D_WORD5 STC_D_WORD6 = _stc.STC_D_WORD6 STC_D_WORD7 = _stc.STC_D_WORD7 STC_TCL_DEFAULT = _stc.STC_TCL_DEFAULT STC_TCL_COMMENT = _stc.STC_TCL_COMMENT STC_TCL_COMMENTLINE = _stc.STC_TCL_COMMENTLINE STC_TCL_NUMBER = _stc.STC_TCL_NUMBER STC_TCL_WORD_IN_QUOTE = _stc.STC_TCL_WORD_IN_QUOTE STC_TCL_IN_QUOTE = _stc.STC_TCL_IN_QUOTE STC_TCL_OPERATOR = _stc.STC_TCL_OPERATOR STC_TCL_IDENTIFIER = _stc.STC_TCL_IDENTIFIER STC_TCL_SUBSTITUTION = _stc.STC_TCL_SUBSTITUTION STC_TCL_SUB_BRACE = _stc.STC_TCL_SUB_BRACE STC_TCL_MODIFIER = _stc.STC_TCL_MODIFIER STC_TCL_EXPAND = _stc.STC_TCL_EXPAND STC_TCL_WORD = _stc.STC_TCL_WORD STC_TCL_WORD2 = _stc.STC_TCL_WORD2 STC_TCL_WORD3 = _stc.STC_TCL_WORD3 STC_TCL_WORD4 = _stc.STC_TCL_WORD4 STC_TCL_WORD5 = _stc.STC_TCL_WORD5 STC_TCL_WORD6 = _stc.STC_TCL_WORD6 STC_TCL_WORD7 = _stc.STC_TCL_WORD7 STC_TCL_WORD8 = _stc.STC_TCL_WORD8 STC_TCL_COMMENT_BOX = _stc.STC_TCL_COMMENT_BOX STC_TCL_BLOCK_COMMENT = _stc.STC_TCL_BLOCK_COMMENT STC_H_DEFAULT = _stc.STC_H_DEFAULT STC_H_TAG = _stc.STC_H_TAG STC_H_TAGUNKNOWN = _stc.STC_H_TAGUNKNOWN STC_H_ATTRIBUTE = _stc.STC_H_ATTRIBUTE STC_H_ATTRIBUTEUNKNOWN = _stc.STC_H_ATTRIBUTEUNKNOWN STC_H_NUMBER = _stc.STC_H_NUMBER STC_H_DOUBLESTRING = _stc.STC_H_DOUBLESTRING STC_H_SINGLESTRING = _stc.STC_H_SINGLESTRING STC_H_OTHER = _stc.STC_H_OTHER STC_H_COMMENT = _stc.STC_H_COMMENT STC_H_ENTITY = _stc.STC_H_ENTITY STC_H_TAGEND = _stc.STC_H_TAGEND STC_H_XMLSTART = _stc.STC_H_XMLSTART STC_H_XMLEND = _stc.STC_H_XMLEND STC_H_SCRIPT = _stc.STC_H_SCRIPT STC_H_ASP = _stc.STC_H_ASP STC_H_ASPAT = _stc.STC_H_ASPAT STC_H_CDATA = _stc.STC_H_CDATA STC_H_QUESTION = _stc.STC_H_QUESTION STC_H_VALUE = _stc.STC_H_VALUE STC_H_XCCOMMENT = _stc.STC_H_XCCOMMENT STC_H_SGML_DEFAULT = _stc.STC_H_SGML_DEFAULT STC_H_SGML_COMMAND = _stc.STC_H_SGML_COMMAND STC_H_SGML_1ST_PARAM = _stc.STC_H_SGML_1ST_PARAM STC_H_SGML_DOUBLESTRING = _stc.STC_H_SGML_DOUBLESTRING STC_H_SGML_SIMPLESTRING = _stc.STC_H_SGML_SIMPLESTRING STC_H_SGML_ERROR = _stc.STC_H_SGML_ERROR STC_H_SGML_SPECIAL = _stc.STC_H_SGML_SPECIAL STC_H_SGML_ENTITY = _stc.STC_H_SGML_ENTITY STC_H_SGML_COMMENT = _stc.STC_H_SGML_COMMENT STC_H_SGML_1ST_PARAM_COMMENT = _stc.STC_H_SGML_1ST_PARAM_COMMENT STC_H_SGML_BLOCK_DEFAULT = _stc.STC_H_SGML_BLOCK_DEFAULT STC_HJ_START = _stc.STC_HJ_START STC_HJ_DEFAULT = _stc.STC_HJ_DEFAULT STC_HJ_COMMENT = _stc.STC_HJ_COMMENT STC_HJ_COMMENTLINE = _stc.STC_HJ_COMMENTLINE STC_HJ_COMMENTDOC = _stc.STC_HJ_COMMENTDOC STC_HJ_NUMBER = _stc.STC_HJ_NUMBER STC_HJ_WORD = _stc.STC_HJ_WORD STC_HJ_KEYWORD = _stc.STC_HJ_KEYWORD STC_HJ_DOUBLESTRING = _stc.STC_HJ_DOUBLESTRING STC_HJ_SINGLESTRING = _stc.STC_HJ_SINGLESTRING STC_HJ_SYMBOLS = _stc.STC_HJ_SYMBOLS STC_HJ_STRINGEOL = _stc.STC_HJ_STRINGEOL STC_HJ_REGEX = _stc.STC_HJ_REGEX STC_HJA_START = _stc.STC_HJA_START STC_HJA_DEFAULT = _stc.STC_HJA_DEFAULT STC_HJA_COMMENT = _stc.STC_HJA_COMMENT STC_HJA_COMMENTLINE = _stc.STC_HJA_COMMENTLINE STC_HJA_COMMENTDOC = _stc.STC_HJA_COMMENTDOC STC_HJA_NUMBER = _stc.STC_HJA_NUMBER STC_HJA_WORD = _stc.STC_HJA_WORD STC_HJA_KEYWORD = _stc.STC_HJA_KEYWORD STC_HJA_DOUBLESTRING = _stc.STC_HJA_DOUBLESTRING STC_HJA_SINGLESTRING = _stc.STC_HJA_SINGLESTRING STC_HJA_SYMBOLS = _stc.STC_HJA_SYMBOLS STC_HJA_STRINGEOL = _stc.STC_HJA_STRINGEOL STC_HJA_REGEX = _stc.STC_HJA_REGEX STC_HB_START = _stc.STC_HB_START STC_HB_DEFAULT = _stc.STC_HB_DEFAULT STC_HB_COMMENTLINE = _stc.STC_HB_COMMENTLINE STC_HB_NUMBER = _stc.STC_HB_NUMBER STC_HB_WORD = _stc.STC_HB_WORD STC_HB_STRING = _stc.STC_HB_STRING STC_HB_IDENTIFIER = _stc.STC_HB_IDENTIFIER STC_HB_STRINGEOL = _stc.STC_HB_STRINGEOL STC_HBA_START = _stc.STC_HBA_START STC_HBA_DEFAULT = _stc.STC_HBA_DEFAULT STC_HBA_COMMENTLINE = _stc.STC_HBA_COMMENTLINE STC_HBA_NUMBER = _stc.STC_HBA_NUMBER STC_HBA_WORD = _stc.STC_HBA_WORD STC_HBA_STRING = _stc.STC_HBA_STRING STC_HBA_IDENTIFIER = _stc.STC_HBA_IDENTIFIER STC_HBA_STRINGEOL = _stc.STC_HBA_STRINGEOL STC_HP_START = _stc.STC_HP_START STC_HP_DEFAULT = _stc.STC_HP_DEFAULT STC_HP_COMMENTLINE = _stc.STC_HP_COMMENTLINE STC_HP_NUMBER = _stc.STC_HP_NUMBER STC_HP_STRING = _stc.STC_HP_STRING STC_HP_CHARACTER = _stc.STC_HP_CHARACTER STC_HP_WORD = _stc.STC_HP_WORD STC_HP_TRIPLE = _stc.STC_HP_TRIPLE STC_HP_TRIPLEDOUBLE = _stc.STC_HP_TRIPLEDOUBLE STC_HP_CLASSNAME = _stc.STC_HP_CLASSNAME STC_HP_DEFNAME = _stc.STC_HP_DEFNAME STC_HP_OPERATOR = _stc.STC_HP_OPERATOR STC_HP_IDENTIFIER = _stc.STC_HP_IDENTIFIER STC_HPHP_COMPLEX_VARIABLE = _stc.STC_HPHP_COMPLEX_VARIABLE STC_HPA_START = _stc.STC_HPA_START STC_HPA_DEFAULT = _stc.STC_HPA_DEFAULT STC_HPA_COMMENTLINE = _stc.STC_HPA_COMMENTLINE STC_HPA_NUMBER = _stc.STC_HPA_NUMBER STC_HPA_STRING = _stc.STC_HPA_STRING STC_HPA_CHARACTER = _stc.STC_HPA_CHARACTER STC_HPA_WORD = _stc.STC_HPA_WORD STC_HPA_TRIPLE = _stc.STC_HPA_TRIPLE STC_HPA_TRIPLEDOUBLE = _stc.STC_HPA_TRIPLEDOUBLE STC_HPA_CLASSNAME = _stc.STC_HPA_CLASSNAME STC_HPA_DEFNAME = _stc.STC_HPA_DEFNAME STC_HPA_OPERATOR = _stc.STC_HPA_OPERATOR STC_HPA_IDENTIFIER = _stc.STC_HPA_IDENTIFIER STC_HPHP_DEFAULT = _stc.STC_HPHP_DEFAULT STC_HPHP_HSTRING = _stc.STC_HPHP_HSTRING STC_HPHP_SIMPLESTRING = _stc.STC_HPHP_SIMPLESTRING STC_HPHP_WORD = _stc.STC_HPHP_WORD STC_HPHP_NUMBER = _stc.STC_HPHP_NUMBER STC_HPHP_VARIABLE = _stc.STC_HPHP_VARIABLE STC_HPHP_COMMENT = _stc.STC_HPHP_COMMENT STC_HPHP_COMMENTLINE = _stc.STC_HPHP_COMMENTLINE STC_HPHP_HSTRING_VARIABLE = _stc.STC_HPHP_HSTRING_VARIABLE STC_HPHP_OPERATOR = _stc.STC_HPHP_OPERATOR STC_PL_DEFAULT = _stc.STC_PL_DEFAULT STC_PL_ERROR = _stc.STC_PL_ERROR STC_PL_COMMENTLINE = _stc.STC_PL_COMMENTLINE STC_PL_POD = _stc.STC_PL_POD STC_PL_NUMBER = _stc.STC_PL_NUMBER STC_PL_WORD = _stc.STC_PL_WORD STC_PL_STRING = _stc.STC_PL_STRING STC_PL_CHARACTER = _stc.STC_PL_CHARACTER STC_PL_PUNCTUATION = _stc.STC_PL_PUNCTUATION STC_PL_PREPROCESSOR = _stc.STC_PL_PREPROCESSOR STC_PL_OPERATOR = _stc.STC_PL_OPERATOR STC_PL_IDENTIFIER = _stc.STC_PL_IDENTIFIER STC_PL_SCALAR = _stc.STC_PL_SCALAR STC_PL_ARRAY = _stc.STC_PL_ARRAY STC_PL_HASH = _stc.STC_PL_HASH STC_PL_SYMBOLTABLE = _stc.STC_PL_SYMBOLTABLE STC_PL_VARIABLE_INDEXER = _stc.STC_PL_VARIABLE_INDEXER STC_PL_REGEX = _stc.STC_PL_REGEX STC_PL_REGSUBST = _stc.STC_PL_REGSUBST STC_PL_LONGQUOTE = _stc.STC_PL_LONGQUOTE STC_PL_BACKTICKS = _stc.STC_PL_BACKTICKS STC_PL_DATASECTION = _stc.STC_PL_DATASECTION STC_PL_HERE_DELIM = _stc.STC_PL_HERE_DELIM STC_PL_HERE_Q = _stc.STC_PL_HERE_Q STC_PL_HERE_QQ = _stc.STC_PL_HERE_QQ STC_PL_HERE_QX = _stc.STC_PL_HERE_QX STC_PL_STRING_Q = _stc.STC_PL_STRING_Q STC_PL_STRING_QQ = _stc.STC_PL_STRING_QQ STC_PL_STRING_QX = _stc.STC_PL_STRING_QX STC_PL_STRING_QR = _stc.STC_PL_STRING_QR STC_PL_STRING_QW = _stc.STC_PL_STRING_QW STC_PL_POD_VERB = _stc.STC_PL_POD_VERB STC_PL_SUB_PROTOTYPE = _stc.STC_PL_SUB_PROTOTYPE STC_PL_FORMAT_IDENT = _stc.STC_PL_FORMAT_IDENT STC_PL_FORMAT = _stc.STC_PL_FORMAT STC_PL_STRING_VAR = _stc.STC_PL_STRING_VAR STC_PL_XLAT = _stc.STC_PL_XLAT STC_PL_REGEX_VAR = _stc.STC_PL_REGEX_VAR STC_PL_REGSUBST_VAR = _stc.STC_PL_REGSUBST_VAR STC_PL_BACKTICKS_VAR = _stc.STC_PL_BACKTICKS_VAR STC_PL_HERE_QQ_VAR = _stc.STC_PL_HERE_QQ_VAR STC_PL_HERE_QX_VAR = _stc.STC_PL_HERE_QX_VAR STC_PL_STRING_QQ_VAR = _stc.STC_PL_STRING_QQ_VAR STC_PL_STRING_QX_VAR = _stc.STC_PL_STRING_QX_VAR STC_PL_STRING_QR_VAR = _stc.STC_PL_STRING_QR_VAR STC_RB_DEFAULT = _stc.STC_RB_DEFAULT STC_RB_ERROR = _stc.STC_RB_ERROR STC_RB_COMMENTLINE = _stc.STC_RB_COMMENTLINE STC_RB_POD = _stc.STC_RB_POD STC_RB_NUMBER = _stc.STC_RB_NUMBER STC_RB_WORD = _stc.STC_RB_WORD STC_RB_STRING = _stc.STC_RB_STRING STC_RB_CHARACTER = _stc.STC_RB_CHARACTER STC_RB_CLASSNAME = _stc.STC_RB_CLASSNAME STC_RB_DEFNAME = _stc.STC_RB_DEFNAME STC_RB_OPERATOR = _stc.STC_RB_OPERATOR STC_RB_IDENTIFIER = _stc.STC_RB_IDENTIFIER STC_RB_REGEX = _stc.STC_RB_REGEX STC_RB_GLOBAL = _stc.STC_RB_GLOBAL STC_RB_SYMBOL = _stc.STC_RB_SYMBOL STC_RB_MODULE_NAME = _stc.STC_RB_MODULE_NAME STC_RB_INSTANCE_VAR = _stc.STC_RB_INSTANCE_VAR STC_RB_CLASS_VAR = _stc.STC_RB_CLASS_VAR STC_RB_BACKTICKS = _stc.STC_RB_BACKTICKS STC_RB_DATASECTION = _stc.STC_RB_DATASECTION STC_RB_HERE_DELIM = _stc.STC_RB_HERE_DELIM STC_RB_HERE_Q = _stc.STC_RB_HERE_Q STC_RB_HERE_QQ = _stc.STC_RB_HERE_QQ STC_RB_HERE_QX = _stc.STC_RB_HERE_QX STC_RB_STRING_Q = _stc.STC_RB_STRING_Q STC_RB_STRING_QQ = _stc.STC_RB_STRING_QQ STC_RB_STRING_QX = _stc.STC_RB_STRING_QX STC_RB_STRING_QR = _stc.STC_RB_STRING_QR STC_RB_STRING_QW = _stc.STC_RB_STRING_QW STC_RB_WORD_DEMOTED = _stc.STC_RB_WORD_DEMOTED STC_RB_STDIN = _stc.STC_RB_STDIN STC_RB_STDOUT = _stc.STC_RB_STDOUT STC_RB_STDERR = _stc.STC_RB_STDERR STC_RB_UPPER_BOUND = _stc.STC_RB_UPPER_BOUND STC_B_DEFAULT = _stc.STC_B_DEFAULT STC_B_COMMENT = _stc.STC_B_COMMENT STC_B_NUMBER = _stc.STC_B_NUMBER STC_B_KEYWORD = _stc.STC_B_KEYWORD STC_B_STRING = _stc.STC_B_STRING STC_B_PREPROCESSOR = _stc.STC_B_PREPROCESSOR STC_B_OPERATOR = _stc.STC_B_OPERATOR STC_B_IDENTIFIER = _stc.STC_B_IDENTIFIER STC_B_DATE = _stc.STC_B_DATE STC_B_STRINGEOL = _stc.STC_B_STRINGEOL STC_B_KEYWORD2 = _stc.STC_B_KEYWORD2 STC_B_KEYWORD3 = _stc.STC_B_KEYWORD3 STC_B_KEYWORD4 = _stc.STC_B_KEYWORD4 STC_B_CONSTANT = _stc.STC_B_CONSTANT STC_B_ASM = _stc.STC_B_ASM STC_B_LABEL = _stc.STC_B_LABEL STC_B_ERROR = _stc.STC_B_ERROR STC_B_HEXNUMBER = _stc.STC_B_HEXNUMBER STC_B_BINNUMBER = _stc.STC_B_BINNUMBER STC_PROPS_DEFAULT = _stc.STC_PROPS_DEFAULT STC_PROPS_COMMENT = _stc.STC_PROPS_COMMENT STC_PROPS_SECTION = _stc.STC_PROPS_SECTION STC_PROPS_ASSIGNMENT = _stc.STC_PROPS_ASSIGNMENT STC_PROPS_DEFVAL = _stc.STC_PROPS_DEFVAL STC_PROPS_KEY = _stc.STC_PROPS_KEY STC_L_DEFAULT = _stc.STC_L_DEFAULT STC_L_COMMAND = _stc.STC_L_COMMAND STC_L_TAG = _stc.STC_L_TAG STC_L_MATH = _stc.STC_L_MATH STC_L_COMMENT = _stc.STC_L_COMMENT STC_L_TAG2 = _stc.STC_L_TAG2 STC_L_MATH2 = _stc.STC_L_MATH2 STC_L_COMMENT2 = _stc.STC_L_COMMENT2 STC_L_VERBATIM = _stc.STC_L_VERBATIM STC_L_SHORTCMD = _stc.STC_L_SHORTCMD STC_L_SPECIAL = _stc.STC_L_SPECIAL STC_L_CMDOPT = _stc.STC_L_CMDOPT STC_L_ERROR = _stc.STC_L_ERROR STC_LUA_DEFAULT = _stc.STC_LUA_DEFAULT STC_LUA_COMMENT = _stc.STC_LUA_COMMENT STC_LUA_COMMENTLINE = _stc.STC_LUA_COMMENTLINE STC_LUA_COMMENTDOC = _stc.STC_LUA_COMMENTDOC STC_LUA_NUMBER = _stc.STC_LUA_NUMBER STC_LUA_WORD = _stc.STC_LUA_WORD STC_LUA_STRING = _stc.STC_LUA_STRING STC_LUA_CHARACTER = _stc.STC_LUA_CHARACTER STC_LUA_LITERALSTRING = _stc.STC_LUA_LITERALSTRING STC_LUA_PREPROCESSOR = _stc.STC_LUA_PREPROCESSOR STC_LUA_OPERATOR = _stc.STC_LUA_OPERATOR STC_LUA_IDENTIFIER = _stc.STC_LUA_IDENTIFIER STC_LUA_STRINGEOL = _stc.STC_LUA_STRINGEOL STC_LUA_WORD2 = _stc.STC_LUA_WORD2 STC_LUA_WORD3 = _stc.STC_LUA_WORD3 STC_LUA_WORD4 = _stc.STC_LUA_WORD4 STC_LUA_WORD5 = _stc.STC_LUA_WORD5 STC_LUA_WORD6 = _stc.STC_LUA_WORD6 STC_LUA_WORD7 = _stc.STC_LUA_WORD7 STC_LUA_WORD8 = _stc.STC_LUA_WORD8 STC_LUA_LABEL = _stc.STC_LUA_LABEL STC_ERR_DEFAULT = _stc.STC_ERR_DEFAULT STC_ERR_PYTHON = _stc.STC_ERR_PYTHON STC_ERR_GCC = _stc.STC_ERR_GCC STC_ERR_MS = _stc.STC_ERR_MS STC_ERR_CMD = _stc.STC_ERR_CMD STC_ERR_BORLAND = _stc.STC_ERR_BORLAND STC_ERR_PERL = _stc.STC_ERR_PERL STC_ERR_NET = _stc.STC_ERR_NET STC_ERR_LUA = _stc.STC_ERR_LUA STC_ERR_CTAG = _stc.STC_ERR_CTAG STC_ERR_DIFF_CHANGED = _stc.STC_ERR_DIFF_CHANGED STC_ERR_DIFF_ADDITION = _stc.STC_ERR_DIFF_ADDITION STC_ERR_DIFF_DELETION = _stc.STC_ERR_DIFF_DELETION STC_ERR_DIFF_MESSAGE = _stc.STC_ERR_DIFF_MESSAGE STC_ERR_PHP = _stc.STC_ERR_PHP STC_ERR_ELF = _stc.STC_ERR_ELF STC_ERR_IFC = _stc.STC_ERR_IFC STC_ERR_IFORT = _stc.STC_ERR_IFORT STC_ERR_ABSF = _stc.STC_ERR_ABSF STC_ERR_TIDY = _stc.STC_ERR_TIDY STC_ERR_JAVA_STACK = _stc.STC_ERR_JAVA_STACK STC_ERR_VALUE = _stc.STC_ERR_VALUE STC_BAT_DEFAULT = _stc.STC_BAT_DEFAULT STC_BAT_COMMENT = _stc.STC_BAT_COMMENT STC_BAT_WORD = _stc.STC_BAT_WORD STC_BAT_LABEL = _stc.STC_BAT_LABEL STC_BAT_HIDE = _stc.STC_BAT_HIDE STC_BAT_COMMAND = _stc.STC_BAT_COMMAND STC_BAT_IDENTIFIER = _stc.STC_BAT_IDENTIFIER STC_BAT_OPERATOR = _stc.STC_BAT_OPERATOR STC_TCMD_DEFAULT = _stc.STC_TCMD_DEFAULT STC_TCMD_COMMENT = _stc.STC_TCMD_COMMENT STC_TCMD_WORD = _stc.STC_TCMD_WORD STC_TCMD_LABEL = _stc.STC_TCMD_LABEL STC_TCMD_HIDE = _stc.STC_TCMD_HIDE STC_TCMD_COMMAND = _stc.STC_TCMD_COMMAND STC_TCMD_IDENTIFIER = _stc.STC_TCMD_IDENTIFIER STC_TCMD_OPERATOR = _stc.STC_TCMD_OPERATOR STC_TCMD_ENVIRONMENT = _stc.STC_TCMD_ENVIRONMENT STC_TCMD_EXPANSION = _stc.STC_TCMD_EXPANSION STC_TCMD_CLABEL = _stc.STC_TCMD_CLABEL STC_MAKE_DEFAULT = _stc.STC_MAKE_DEFAULT STC_MAKE_COMMENT = _stc.STC_MAKE_COMMENT STC_MAKE_PREPROCESSOR = _stc.STC_MAKE_PREPROCESSOR STC_MAKE_IDENTIFIER = _stc.STC_MAKE_IDENTIFIER STC_MAKE_OPERATOR = _stc.STC_MAKE_OPERATOR STC_MAKE_TARGET = _stc.STC_MAKE_TARGET STC_MAKE_IDEOL = _stc.STC_MAKE_IDEOL STC_DIFF_DEFAULT = _stc.STC_DIFF_DEFAULT STC_DIFF_COMMENT = _stc.STC_DIFF_COMMENT STC_DIFF_COMMAND = _stc.STC_DIFF_COMMAND STC_DIFF_HEADER = _stc.STC_DIFF_HEADER STC_DIFF_POSITION = _stc.STC_DIFF_POSITION STC_DIFF_DELETED = _stc.STC_DIFF_DELETED STC_DIFF_ADDED = _stc.STC_DIFF_ADDED STC_DIFF_CHANGED = _stc.STC_DIFF_CHANGED STC_CONF_DEFAULT = _stc.STC_CONF_DEFAULT STC_CONF_COMMENT = _stc.STC_CONF_COMMENT STC_CONF_NUMBER = _stc.STC_CONF_NUMBER STC_CONF_IDENTIFIER = _stc.STC_CONF_IDENTIFIER STC_CONF_EXTENSION = _stc.STC_CONF_EXTENSION STC_CONF_PARAMETER = _stc.STC_CONF_PARAMETER STC_CONF_STRING = _stc.STC_CONF_STRING STC_CONF_OPERATOR = _stc.STC_CONF_OPERATOR STC_CONF_IP = _stc.STC_CONF_IP STC_CONF_DIRECTIVE = _stc.STC_CONF_DIRECTIVE STC_AVE_DEFAULT = _stc.STC_AVE_DEFAULT STC_AVE_COMMENT = _stc.STC_AVE_COMMENT STC_AVE_NUMBER = _stc.STC_AVE_NUMBER STC_AVE_WORD = _stc.STC_AVE_WORD STC_AVE_STRING = _stc.STC_AVE_STRING STC_AVE_ENUM = _stc.STC_AVE_ENUM STC_AVE_STRINGEOL = _stc.STC_AVE_STRINGEOL STC_AVE_IDENTIFIER = _stc.STC_AVE_IDENTIFIER STC_AVE_OPERATOR = _stc.STC_AVE_OPERATOR STC_AVE_WORD1 = _stc.STC_AVE_WORD1 STC_AVE_WORD2 = _stc.STC_AVE_WORD2 STC_AVE_WORD3 = _stc.STC_AVE_WORD3 STC_AVE_WORD4 = _stc.STC_AVE_WORD4 STC_AVE_WORD5 = _stc.STC_AVE_WORD5 STC_AVE_WORD6 = _stc.STC_AVE_WORD6 STC_ADA_DEFAULT = _stc.STC_ADA_DEFAULT STC_ADA_WORD = _stc.STC_ADA_WORD STC_ADA_IDENTIFIER = _stc.STC_ADA_IDENTIFIER STC_ADA_NUMBER = _stc.STC_ADA_NUMBER STC_ADA_DELIMITER = _stc.STC_ADA_DELIMITER STC_ADA_CHARACTER = _stc.STC_ADA_CHARACTER STC_ADA_CHARACTEREOL = _stc.STC_ADA_CHARACTEREOL STC_ADA_STRING = _stc.STC_ADA_STRING STC_ADA_STRINGEOL = _stc.STC_ADA_STRINGEOL STC_ADA_LABEL = _stc.STC_ADA_LABEL STC_ADA_COMMENTLINE = _stc.STC_ADA_COMMENTLINE STC_ADA_ILLEGAL = _stc.STC_ADA_ILLEGAL STC_BAAN_DEFAULT = _stc.STC_BAAN_DEFAULT STC_BAAN_COMMENT = _stc.STC_BAAN_COMMENT STC_BAAN_COMMENTDOC = _stc.STC_BAAN_COMMENTDOC STC_BAAN_NUMBER = _stc.STC_BAAN_NUMBER STC_BAAN_WORD = _stc.STC_BAAN_WORD STC_BAAN_STRING = _stc.STC_BAAN_STRING STC_BAAN_PREPROCESSOR = _stc.STC_BAAN_PREPROCESSOR STC_BAAN_OPERATOR = _stc.STC_BAAN_OPERATOR STC_BAAN_IDENTIFIER = _stc.STC_BAAN_IDENTIFIER STC_BAAN_STRINGEOL = _stc.STC_BAAN_STRINGEOL STC_BAAN_WORD2 = _stc.STC_BAAN_WORD2 STC_LISP_DEFAULT = _stc.STC_LISP_DEFAULT STC_LISP_COMMENT = _stc.STC_LISP_COMMENT STC_LISP_NUMBER = _stc.STC_LISP_NUMBER STC_LISP_KEYWORD = _stc.STC_LISP_KEYWORD STC_LISP_KEYWORD_KW = _stc.STC_LISP_KEYWORD_KW STC_LISP_SYMBOL = _stc.STC_LISP_SYMBOL STC_LISP_STRING = _stc.STC_LISP_STRING STC_LISP_STRINGEOL = _stc.STC_LISP_STRINGEOL STC_LISP_IDENTIFIER = _stc.STC_LISP_IDENTIFIER STC_LISP_OPERATOR = _stc.STC_LISP_OPERATOR STC_LISP_SPECIAL = _stc.STC_LISP_SPECIAL STC_LISP_MULTI_COMMENT = _stc.STC_LISP_MULTI_COMMENT STC_EIFFEL_DEFAULT = _stc.STC_EIFFEL_DEFAULT STC_EIFFEL_COMMENTLINE = _stc.STC_EIFFEL_COMMENTLINE STC_EIFFEL_NUMBER = _stc.STC_EIFFEL_NUMBER STC_EIFFEL_WORD = _stc.STC_EIFFEL_WORD STC_EIFFEL_STRING = _stc.STC_EIFFEL_STRING STC_EIFFEL_CHARACTER = _stc.STC_EIFFEL_CHARACTER STC_EIFFEL_OPERATOR = _stc.STC_EIFFEL_OPERATOR STC_EIFFEL_IDENTIFIER = _stc.STC_EIFFEL_IDENTIFIER STC_EIFFEL_STRINGEOL = _stc.STC_EIFFEL_STRINGEOL STC_NNCRONTAB_DEFAULT = _stc.STC_NNCRONTAB_DEFAULT STC_NNCRONTAB_COMMENT = _stc.STC_NNCRONTAB_COMMENT STC_NNCRONTAB_TASK = _stc.STC_NNCRONTAB_TASK STC_NNCRONTAB_SECTION = _stc.STC_NNCRONTAB_SECTION STC_NNCRONTAB_KEYWORD = _stc.STC_NNCRONTAB_KEYWORD STC_NNCRONTAB_MODIFIER = _stc.STC_NNCRONTAB_MODIFIER STC_NNCRONTAB_ASTERISK = _stc.STC_NNCRONTAB_ASTERISK STC_NNCRONTAB_NUMBER = _stc.STC_NNCRONTAB_NUMBER STC_NNCRONTAB_STRING = _stc.STC_NNCRONTAB_STRING STC_NNCRONTAB_ENVIRONMENT = _stc.STC_NNCRONTAB_ENVIRONMENT STC_NNCRONTAB_IDENTIFIER = _stc.STC_NNCRONTAB_IDENTIFIER STC_FORTH_DEFAULT = _stc.STC_FORTH_DEFAULT STC_FORTH_COMMENT = _stc.STC_FORTH_COMMENT STC_FORTH_COMMENT_ML = _stc.STC_FORTH_COMMENT_ML STC_FORTH_IDENTIFIER = _stc.STC_FORTH_IDENTIFIER STC_FORTH_CONTROL = _stc.STC_FORTH_CONTROL STC_FORTH_KEYWORD = _stc.STC_FORTH_KEYWORD STC_FORTH_DEFWORD = _stc.STC_FORTH_DEFWORD STC_FORTH_PREWORD1 = _stc.STC_FORTH_PREWORD1 STC_FORTH_PREWORD2 = _stc.STC_FORTH_PREWORD2 STC_FORTH_NUMBER = _stc.STC_FORTH_NUMBER STC_FORTH_STRING = _stc.STC_FORTH_STRING STC_FORTH_LOCALE = _stc.STC_FORTH_LOCALE STC_MATLAB_DEFAULT = _stc.STC_MATLAB_DEFAULT STC_MATLAB_COMMENT = _stc.STC_MATLAB_COMMENT STC_MATLAB_COMMAND = _stc.STC_MATLAB_COMMAND STC_MATLAB_NUMBER = _stc.STC_MATLAB_NUMBER STC_MATLAB_KEYWORD = _stc.STC_MATLAB_KEYWORD STC_MATLAB_STRING = _stc.STC_MATLAB_STRING STC_MATLAB_OPERATOR = _stc.STC_MATLAB_OPERATOR STC_MATLAB_IDENTIFIER = _stc.STC_MATLAB_IDENTIFIER STC_MATLAB_DOUBLEQUOTESTRING = _stc.STC_MATLAB_DOUBLEQUOTESTRING STC_SCRIPTOL_DEFAULT = _stc.STC_SCRIPTOL_DEFAULT STC_SCRIPTOL_WHITE = _stc.STC_SCRIPTOL_WHITE STC_SCRIPTOL_COMMENTLINE = _stc.STC_SCRIPTOL_COMMENTLINE STC_SCRIPTOL_PERSISTENT = _stc.STC_SCRIPTOL_PERSISTENT STC_SCRIPTOL_CSTYLE = _stc.STC_SCRIPTOL_CSTYLE STC_SCRIPTOL_COMMENTBLOCK = _stc.STC_SCRIPTOL_COMMENTBLOCK STC_SCRIPTOL_NUMBER = _stc.STC_SCRIPTOL_NUMBER STC_SCRIPTOL_STRING = _stc.STC_SCRIPTOL_STRING STC_SCRIPTOL_CHARACTER = _stc.STC_SCRIPTOL_CHARACTER STC_SCRIPTOL_STRINGEOL = _stc.STC_SCRIPTOL_STRINGEOL STC_SCRIPTOL_KEYWORD = _stc.STC_SCRIPTOL_KEYWORD STC_SCRIPTOL_OPERATOR = _stc.STC_SCRIPTOL_OPERATOR STC_SCRIPTOL_IDENTIFIER = _stc.STC_SCRIPTOL_IDENTIFIER STC_SCRIPTOL_TRIPLE = _stc.STC_SCRIPTOL_TRIPLE STC_SCRIPTOL_CLASSNAME = _stc.STC_SCRIPTOL_CLASSNAME STC_SCRIPTOL_PREPROCESSOR = _stc.STC_SCRIPTOL_PREPROCESSOR STC_ASM_DEFAULT = _stc.STC_ASM_DEFAULT STC_ASM_COMMENT = _stc.STC_ASM_COMMENT STC_ASM_NUMBER = _stc.STC_ASM_NUMBER STC_ASM_STRING = _stc.STC_ASM_STRING STC_ASM_OPERATOR = _stc.STC_ASM_OPERATOR STC_ASM_IDENTIFIER = _stc.STC_ASM_IDENTIFIER STC_ASM_CPUINSTRUCTION = _stc.STC_ASM_CPUINSTRUCTION STC_ASM_MATHINSTRUCTION = _stc.STC_ASM_MATHINSTRUCTION STC_ASM_REGISTER = _stc.STC_ASM_REGISTER STC_ASM_DIRECTIVE = _stc.STC_ASM_DIRECTIVE STC_ASM_DIRECTIVEOPERAND = _stc.STC_ASM_DIRECTIVEOPERAND STC_ASM_COMMENTBLOCK = _stc.STC_ASM_COMMENTBLOCK STC_ASM_CHARACTER = _stc.STC_ASM_CHARACTER STC_ASM_STRINGEOL = _stc.STC_ASM_STRINGEOL STC_ASM_EXTINSTRUCTION = _stc.STC_ASM_EXTINSTRUCTION STC_ASM_COMMENTDIRECTIVE = _stc.STC_ASM_COMMENTDIRECTIVE STC_F_DEFAULT = _stc.STC_F_DEFAULT STC_F_COMMENT = _stc.STC_F_COMMENT STC_F_NUMBER = _stc.STC_F_NUMBER STC_F_STRING1 = _stc.STC_F_STRING1 STC_F_STRING2 = _stc.STC_F_STRING2 STC_F_STRINGEOL = _stc.STC_F_STRINGEOL STC_F_OPERATOR = _stc.STC_F_OPERATOR STC_F_IDENTIFIER = _stc.STC_F_IDENTIFIER STC_F_WORD = _stc.STC_F_WORD STC_F_WORD2 = _stc.STC_F_WORD2 STC_F_WORD3 = _stc.STC_F_WORD3 STC_F_PREPROCESSOR = _stc.STC_F_PREPROCESSOR STC_F_OPERATOR2 = _stc.STC_F_OPERATOR2 STC_F_LABEL = _stc.STC_F_LABEL STC_F_CONTINUATION = _stc.STC_F_CONTINUATION STC_CSS_DEFAULT = _stc.STC_CSS_DEFAULT STC_CSS_TAG = _stc.STC_CSS_TAG STC_CSS_CLASS = _stc.STC_CSS_CLASS STC_CSS_PSEUDOCLASS = _stc.STC_CSS_PSEUDOCLASS STC_CSS_UNKNOWN_PSEUDOCLASS = _stc.STC_CSS_UNKNOWN_PSEUDOCLASS STC_CSS_OPERATOR = _stc.STC_CSS_OPERATOR STC_CSS_IDENTIFIER = _stc.STC_CSS_IDENTIFIER STC_CSS_UNKNOWN_IDENTIFIER = _stc.STC_CSS_UNKNOWN_IDENTIFIER STC_CSS_VALUE = _stc.STC_CSS_VALUE STC_CSS_COMMENT = _stc.STC_CSS_COMMENT STC_CSS_ID = _stc.STC_CSS_ID STC_CSS_IMPORTANT = _stc.STC_CSS_IMPORTANT STC_CSS_DIRECTIVE = _stc.STC_CSS_DIRECTIVE STC_CSS_DOUBLESTRING = _stc.STC_CSS_DOUBLESTRING STC_CSS_SINGLESTRING = _stc.STC_CSS_SINGLESTRING STC_CSS_IDENTIFIER2 = _stc.STC_CSS_IDENTIFIER2 STC_CSS_ATTRIBUTE = _stc.STC_CSS_ATTRIBUTE STC_CSS_IDENTIFIER3 = _stc.STC_CSS_IDENTIFIER3 STC_CSS_PSEUDOELEMENT = _stc.STC_CSS_PSEUDOELEMENT STC_CSS_EXTENDED_IDENTIFIER = _stc.STC_CSS_EXTENDED_IDENTIFIER STC_CSS_EXTENDED_PSEUDOCLASS = _stc.STC_CSS_EXTENDED_PSEUDOCLASS STC_CSS_EXTENDED_PSEUDOELEMENT = _stc.STC_CSS_EXTENDED_PSEUDOELEMENT STC_CSS_MEDIA = _stc.STC_CSS_MEDIA STC_CSS_VARIABLE = _stc.STC_CSS_VARIABLE STC_POV_DEFAULT = _stc.STC_POV_DEFAULT STC_POV_COMMENT = _stc.STC_POV_COMMENT STC_POV_COMMENTLINE = _stc.STC_POV_COMMENTLINE STC_POV_NUMBER = _stc.STC_POV_NUMBER STC_POV_OPERATOR = _stc.STC_POV_OPERATOR STC_POV_IDENTIFIER = _stc.STC_POV_IDENTIFIER STC_POV_STRING = _stc.STC_POV_STRING STC_POV_STRINGEOL = _stc.STC_POV_STRINGEOL STC_POV_DIRECTIVE = _stc.STC_POV_DIRECTIVE STC_POV_BADDIRECTIVE = _stc.STC_POV_BADDIRECTIVE STC_POV_WORD2 = _stc.STC_POV_WORD2 STC_POV_WORD3 = _stc.STC_POV_WORD3 STC_POV_WORD4 = _stc.STC_POV_WORD4 STC_POV_WORD5 = _stc.STC_POV_WORD5 STC_POV_WORD6 = _stc.STC_POV_WORD6 STC_POV_WORD7 = _stc.STC_POV_WORD7 STC_POV_WORD8 = _stc.STC_POV_WORD8 STC_LOUT_DEFAULT = _stc.STC_LOUT_DEFAULT STC_LOUT_COMMENT = _stc.STC_LOUT_COMMENT STC_LOUT_NUMBER = _stc.STC_LOUT_NUMBER STC_LOUT_WORD = _stc.STC_LOUT_WORD STC_LOUT_WORD2 = _stc.STC_LOUT_WORD2 STC_LOUT_WORD3 = _stc.STC_LOUT_WORD3 STC_LOUT_WORD4 = _stc.STC_LOUT_WORD4 STC_LOUT_STRING = _stc.STC_LOUT_STRING STC_LOUT_OPERATOR = _stc.STC_LOUT_OPERATOR STC_LOUT_IDENTIFIER = _stc.STC_LOUT_IDENTIFIER STC_LOUT_STRINGEOL = _stc.STC_LOUT_STRINGEOL STC_ESCRIPT_DEFAULT = _stc.STC_ESCRIPT_DEFAULT STC_ESCRIPT_COMMENT = _stc.STC_ESCRIPT_COMMENT STC_ESCRIPT_COMMENTLINE = _stc.STC_ESCRIPT_COMMENTLINE STC_ESCRIPT_COMMENTDOC = _stc.STC_ESCRIPT_COMMENTDOC STC_ESCRIPT_NUMBER = _stc.STC_ESCRIPT_NUMBER STC_ESCRIPT_WORD = _stc.STC_ESCRIPT_WORD STC_ESCRIPT_STRING = _stc.STC_ESCRIPT_STRING STC_ESCRIPT_OPERATOR = _stc.STC_ESCRIPT_OPERATOR STC_ESCRIPT_IDENTIFIER = _stc.STC_ESCRIPT_IDENTIFIER STC_ESCRIPT_BRACE = _stc.STC_ESCRIPT_BRACE STC_ESCRIPT_WORD2 = _stc.STC_ESCRIPT_WORD2 STC_ESCRIPT_WORD3 = _stc.STC_ESCRIPT_WORD3 STC_PS_DEFAULT = _stc.STC_PS_DEFAULT STC_PS_COMMENT = _stc.STC_PS_COMMENT STC_PS_DSC_COMMENT = _stc.STC_PS_DSC_COMMENT STC_PS_DSC_VALUE = _stc.STC_PS_DSC_VALUE STC_PS_NUMBER = _stc.STC_PS_NUMBER STC_PS_NAME = _stc.STC_PS_NAME STC_PS_KEYWORD = _stc.STC_PS_KEYWORD STC_PS_LITERAL = _stc.STC_PS_LITERAL STC_PS_IMMEVAL = _stc.STC_PS_IMMEVAL STC_PS_PAREN_ARRAY = _stc.STC_PS_PAREN_ARRAY STC_PS_PAREN_DICT = _stc.STC_PS_PAREN_DICT STC_PS_PAREN_PROC = _stc.STC_PS_PAREN_PROC STC_PS_TEXT = _stc.STC_PS_TEXT STC_PS_HEXSTRING = _stc.STC_PS_HEXSTRING STC_PS_BASE85STRING = _stc.STC_PS_BASE85STRING STC_PS_BADSTRINGCHAR = _stc.STC_PS_BADSTRINGCHAR STC_NSIS_DEFAULT = _stc.STC_NSIS_DEFAULT STC_NSIS_COMMENT = _stc.STC_NSIS_COMMENT STC_NSIS_STRINGDQ = _stc.STC_NSIS_STRINGDQ STC_NSIS_STRINGLQ = _stc.STC_NSIS_STRINGLQ STC_NSIS_STRINGRQ = _stc.STC_NSIS_STRINGRQ STC_NSIS_FUNCTION = _stc.STC_NSIS_FUNCTION STC_NSIS_VARIABLE = _stc.STC_NSIS_VARIABLE STC_NSIS_LABEL = _stc.STC_NSIS_LABEL STC_NSIS_USERDEFINED = _stc.STC_NSIS_USERDEFINED STC_NSIS_SECTIONDEF = _stc.STC_NSIS_SECTIONDEF STC_NSIS_SUBSECTIONDEF = _stc.STC_NSIS_SUBSECTIONDEF STC_NSIS_IFDEFINEDEF = _stc.STC_NSIS_IFDEFINEDEF STC_NSIS_MACRODEF = _stc.STC_NSIS_MACRODEF STC_NSIS_STRINGVAR = _stc.STC_NSIS_STRINGVAR STC_NSIS_NUMBER = _stc.STC_NSIS_NUMBER STC_NSIS_SECTIONGROUP = _stc.STC_NSIS_SECTIONGROUP STC_NSIS_PAGEEX = _stc.STC_NSIS_PAGEEX STC_NSIS_FUNCTIONDEF = _stc.STC_NSIS_FUNCTIONDEF STC_NSIS_COMMENTBOX = _stc.STC_NSIS_COMMENTBOX STC_MMIXAL_LEADWS = _stc.STC_MMIXAL_LEADWS STC_MMIXAL_COMMENT = _stc.STC_MMIXAL_COMMENT STC_MMIXAL_LABEL = _stc.STC_MMIXAL_LABEL STC_MMIXAL_OPCODE = _stc.STC_MMIXAL_OPCODE STC_MMIXAL_OPCODE_PRE = _stc.STC_MMIXAL_OPCODE_PRE STC_MMIXAL_OPCODE_VALID = _stc.STC_MMIXAL_OPCODE_VALID STC_MMIXAL_OPCODE_UNKNOWN = _stc.STC_MMIXAL_OPCODE_UNKNOWN STC_MMIXAL_OPCODE_POST = _stc.STC_MMIXAL_OPCODE_POST STC_MMIXAL_OPERANDS = _stc.STC_MMIXAL_OPERANDS STC_MMIXAL_NUMBER = _stc.STC_MMIXAL_NUMBER STC_MMIXAL_REF = _stc.STC_MMIXAL_REF STC_MMIXAL_CHAR = _stc.STC_MMIXAL_CHAR STC_MMIXAL_STRING = _stc.STC_MMIXAL_STRING STC_MMIXAL_REGISTER = _stc.STC_MMIXAL_REGISTER STC_MMIXAL_HEX = _stc.STC_MMIXAL_HEX STC_MMIXAL_OPERATOR = _stc.STC_MMIXAL_OPERATOR STC_MMIXAL_SYMBOL = _stc.STC_MMIXAL_SYMBOL STC_MMIXAL_INCLUDE = _stc.STC_MMIXAL_INCLUDE STC_CLW_DEFAULT = _stc.STC_CLW_DEFAULT STC_CLW_LABEL = _stc.STC_CLW_LABEL STC_CLW_COMMENT = _stc.STC_CLW_COMMENT STC_CLW_STRING = _stc.STC_CLW_STRING STC_CLW_USER_IDENTIFIER = _stc.STC_CLW_USER_IDENTIFIER STC_CLW_INTEGER_CONSTANT = _stc.STC_CLW_INTEGER_CONSTANT STC_CLW_REAL_CONSTANT = _stc.STC_CLW_REAL_CONSTANT STC_CLW_PICTURE_STRING = _stc.STC_CLW_PICTURE_STRING STC_CLW_KEYWORD = _stc.STC_CLW_KEYWORD STC_CLW_COMPILER_DIRECTIVE = _stc.STC_CLW_COMPILER_DIRECTIVE STC_CLW_RUNTIME_EXPRESSIONS = _stc.STC_CLW_RUNTIME_EXPRESSIONS STC_CLW_BUILTIN_PROCEDURES_FUNCTION = _stc.STC_CLW_BUILTIN_PROCEDURES_FUNCTION STC_CLW_STRUCTURE_DATA_TYPE = _stc.STC_CLW_STRUCTURE_DATA_TYPE STC_CLW_ATTRIBUTE = _stc.STC_CLW_ATTRIBUTE STC_CLW_STANDARD_EQUATE = _stc.STC_CLW_STANDARD_EQUATE STC_CLW_ERROR = _stc.STC_CLW_ERROR STC_CLW_DEPRECATED = _stc.STC_CLW_DEPRECATED STC_LOT_DEFAULT = _stc.STC_LOT_DEFAULT STC_LOT_HEADER = _stc.STC_LOT_HEADER STC_LOT_BREAK = _stc.STC_LOT_BREAK STC_LOT_SET = _stc.STC_LOT_SET STC_LOT_PASS = _stc.STC_LOT_PASS STC_LOT_FAIL = _stc.STC_LOT_FAIL STC_LOT_ABORT = _stc.STC_LOT_ABORT STC_YAML_DEFAULT = _stc.STC_YAML_DEFAULT STC_YAML_COMMENT = _stc.STC_YAML_COMMENT STC_YAML_IDENTIFIER = _stc.STC_YAML_IDENTIFIER STC_YAML_KEYWORD = _stc.STC_YAML_KEYWORD STC_YAML_NUMBER = _stc.STC_YAML_NUMBER STC_YAML_REFERENCE = _stc.STC_YAML_REFERENCE STC_YAML_DOCUMENT = _stc.STC_YAML_DOCUMENT STC_YAML_TEXT = _stc.STC_YAML_TEXT STC_YAML_ERROR = _stc.STC_YAML_ERROR STC_YAML_OPERATOR = _stc.STC_YAML_OPERATOR STC_TEX_DEFAULT = _stc.STC_TEX_DEFAULT STC_TEX_SPECIAL = _stc.STC_TEX_SPECIAL STC_TEX_GROUP = _stc.STC_TEX_GROUP STC_TEX_SYMBOL = _stc.STC_TEX_SYMBOL STC_TEX_COMMAND = _stc.STC_TEX_COMMAND STC_TEX_TEXT = _stc.STC_TEX_TEXT STC_METAPOST_DEFAULT = _stc.STC_METAPOST_DEFAULT STC_METAPOST_SPECIAL = _stc.STC_METAPOST_SPECIAL STC_METAPOST_GROUP = _stc.STC_METAPOST_GROUP STC_METAPOST_SYMBOL = _stc.STC_METAPOST_SYMBOL STC_METAPOST_COMMAND = _stc.STC_METAPOST_COMMAND STC_METAPOST_TEXT = _stc.STC_METAPOST_TEXT STC_METAPOST_EXTRA = _stc.STC_METAPOST_EXTRA STC_ERLANG_DEFAULT = _stc.STC_ERLANG_DEFAULT STC_ERLANG_COMMENT = _stc.STC_ERLANG_COMMENT STC_ERLANG_VARIABLE = _stc.STC_ERLANG_VARIABLE STC_ERLANG_NUMBER = _stc.STC_ERLANG_NUMBER STC_ERLANG_KEYWORD = _stc.STC_ERLANG_KEYWORD STC_ERLANG_STRING = _stc.STC_ERLANG_STRING STC_ERLANG_OPERATOR = _stc.STC_ERLANG_OPERATOR STC_ERLANG_ATOM = _stc.STC_ERLANG_ATOM STC_ERLANG_FUNCTION_NAME = _stc.STC_ERLANG_FUNCTION_NAME STC_ERLANG_CHARACTER = _stc.STC_ERLANG_CHARACTER STC_ERLANG_MACRO = _stc.STC_ERLANG_MACRO STC_ERLANG_RECORD = _stc.STC_ERLANG_RECORD STC_ERLANG_PREPROC = _stc.STC_ERLANG_PREPROC STC_ERLANG_NODE_NAME = _stc.STC_ERLANG_NODE_NAME STC_ERLANG_COMMENT_FUNCTION = _stc.STC_ERLANG_COMMENT_FUNCTION STC_ERLANG_COMMENT_MODULE = _stc.STC_ERLANG_COMMENT_MODULE STC_ERLANG_COMMENT_DOC = _stc.STC_ERLANG_COMMENT_DOC STC_ERLANG_COMMENT_DOC_MACRO = _stc.STC_ERLANG_COMMENT_DOC_MACRO STC_ERLANG_ATOM_QUOTED = _stc.STC_ERLANG_ATOM_QUOTED STC_ERLANG_MACRO_QUOTED = _stc.STC_ERLANG_MACRO_QUOTED STC_ERLANG_RECORD_QUOTED = _stc.STC_ERLANG_RECORD_QUOTED STC_ERLANG_NODE_NAME_QUOTED = _stc.STC_ERLANG_NODE_NAME_QUOTED STC_ERLANG_BIFS = _stc.STC_ERLANG_BIFS STC_ERLANG_MODULES = _stc.STC_ERLANG_MODULES STC_ERLANG_MODULES_ATT = _stc.STC_ERLANG_MODULES_ATT STC_ERLANG_UNKNOWN = _stc.STC_ERLANG_UNKNOWN STC_MSSQL_DEFAULT = _stc.STC_MSSQL_DEFAULT STC_MSSQL_COMMENT = _stc.STC_MSSQL_COMMENT STC_MSSQL_LINE_COMMENT = _stc.STC_MSSQL_LINE_COMMENT STC_MSSQL_NUMBER = _stc.STC_MSSQL_NUMBER STC_MSSQL_STRING = _stc.STC_MSSQL_STRING STC_MSSQL_OPERATOR = _stc.STC_MSSQL_OPERATOR STC_MSSQL_IDENTIFIER = _stc.STC_MSSQL_IDENTIFIER STC_MSSQL_VARIABLE = _stc.STC_MSSQL_VARIABLE STC_MSSQL_COLUMN_NAME = _stc.STC_MSSQL_COLUMN_NAME STC_MSSQL_STATEMENT = _stc.STC_MSSQL_STATEMENT STC_MSSQL_DATATYPE = _stc.STC_MSSQL_DATATYPE STC_MSSQL_SYSTABLE = _stc.STC_MSSQL_SYSTABLE STC_MSSQL_GLOBAL_VARIABLE = _stc.STC_MSSQL_GLOBAL_VARIABLE STC_MSSQL_FUNCTION = _stc.STC_MSSQL_FUNCTION STC_MSSQL_STORED_PROCEDURE = _stc.STC_MSSQL_STORED_PROCEDURE STC_MSSQL_DEFAULT_PREF_DATATYPE = _stc.STC_MSSQL_DEFAULT_PREF_DATATYPE STC_MSSQL_COLUMN_NAME_2 = _stc.STC_MSSQL_COLUMN_NAME_2 STC_V_DEFAULT = _stc.STC_V_DEFAULT STC_V_COMMENT = _stc.STC_V_COMMENT STC_V_COMMENTLINE = _stc.STC_V_COMMENTLINE STC_V_COMMENTLINEBANG = _stc.STC_V_COMMENTLINEBANG STC_V_NUMBER = _stc.STC_V_NUMBER STC_V_WORD = _stc.STC_V_WORD STC_V_STRING = _stc.STC_V_STRING STC_V_WORD2 = _stc.STC_V_WORD2 STC_V_WORD3 = _stc.STC_V_WORD3 STC_V_PREPROCESSOR = _stc.STC_V_PREPROCESSOR STC_V_OPERATOR = _stc.STC_V_OPERATOR STC_V_IDENTIFIER = _stc.STC_V_IDENTIFIER STC_V_STRINGEOL = _stc.STC_V_STRINGEOL STC_V_USER = _stc.STC_V_USER STC_KIX_DEFAULT = _stc.STC_KIX_DEFAULT STC_KIX_COMMENT = _stc.STC_KIX_COMMENT STC_KIX_STRING1 = _stc.STC_KIX_STRING1 STC_KIX_STRING2 = _stc.STC_KIX_STRING2 STC_KIX_NUMBER = _stc.STC_KIX_NUMBER STC_KIX_VAR = _stc.STC_KIX_VAR STC_KIX_MACRO = _stc.STC_KIX_MACRO STC_KIX_KEYWORD = _stc.STC_KIX_KEYWORD STC_KIX_FUNCTIONS = _stc.STC_KIX_FUNCTIONS STC_KIX_OPERATOR = _stc.STC_KIX_OPERATOR STC_KIX_IDENTIFIER = _stc.STC_KIX_IDENTIFIER STC_GC_DEFAULT = _stc.STC_GC_DEFAULT STC_GC_COMMENTLINE = _stc.STC_GC_COMMENTLINE STC_GC_COMMENTBLOCK = _stc.STC_GC_COMMENTBLOCK STC_GC_GLOBAL = _stc.STC_GC_GLOBAL STC_GC_EVENT = _stc.STC_GC_EVENT STC_GC_ATTRIBUTE = _stc.STC_GC_ATTRIBUTE STC_GC_CONTROL = _stc.STC_GC_CONTROL STC_GC_COMMAND = _stc.STC_GC_COMMAND STC_GC_STRING = _stc.STC_GC_STRING STC_GC_OPERATOR = _stc.STC_GC_OPERATOR STC_SN_DEFAULT = _stc.STC_SN_DEFAULT STC_SN_CODE = _stc.STC_SN_CODE STC_SN_COMMENTLINE = _stc.STC_SN_COMMENTLINE STC_SN_COMMENTLINEBANG = _stc.STC_SN_COMMENTLINEBANG STC_SN_NUMBER = _stc.STC_SN_NUMBER STC_SN_WORD = _stc.STC_SN_WORD STC_SN_STRING = _stc.STC_SN_STRING STC_SN_WORD2 = _stc.STC_SN_WORD2 STC_SN_WORD3 = _stc.STC_SN_WORD3 STC_SN_PREPROCESSOR = _stc.STC_SN_PREPROCESSOR STC_SN_OPERATOR = _stc.STC_SN_OPERATOR STC_SN_IDENTIFIER = _stc.STC_SN_IDENTIFIER STC_SN_STRINGEOL = _stc.STC_SN_STRINGEOL STC_SN_REGEXTAG = _stc.STC_SN_REGEXTAG STC_SN_SIGNAL = _stc.STC_SN_SIGNAL STC_SN_USER = _stc.STC_SN_USER STC_AU3_DEFAULT = _stc.STC_AU3_DEFAULT STC_AU3_COMMENT = _stc.STC_AU3_COMMENT STC_AU3_COMMENTBLOCK = _stc.STC_AU3_COMMENTBLOCK STC_AU3_NUMBER = _stc.STC_AU3_NUMBER STC_AU3_FUNCTION = _stc.STC_AU3_FUNCTION STC_AU3_KEYWORD = _stc.STC_AU3_KEYWORD STC_AU3_MACRO = _stc.STC_AU3_MACRO STC_AU3_STRING = _stc.STC_AU3_STRING STC_AU3_OPERATOR = _stc.STC_AU3_OPERATOR STC_AU3_VARIABLE = _stc.STC_AU3_VARIABLE STC_AU3_SENT = _stc.STC_AU3_SENT STC_AU3_PREPROCESSOR = _stc.STC_AU3_PREPROCESSOR STC_AU3_SPECIAL = _stc.STC_AU3_SPECIAL STC_AU3_EXPAND = _stc.STC_AU3_EXPAND STC_AU3_COMOBJ = _stc.STC_AU3_COMOBJ STC_AU3_UDF = _stc.STC_AU3_UDF STC_APDL_DEFAULT = _stc.STC_APDL_DEFAULT STC_APDL_COMMENT = _stc.STC_APDL_COMMENT STC_APDL_COMMENTBLOCK = _stc.STC_APDL_COMMENTBLOCK STC_APDL_NUMBER = _stc.STC_APDL_NUMBER STC_APDL_STRING = _stc.STC_APDL_STRING STC_APDL_OPERATOR = _stc.STC_APDL_OPERATOR STC_APDL_WORD = _stc.STC_APDL_WORD STC_APDL_PROCESSOR = _stc.STC_APDL_PROCESSOR STC_APDL_COMMAND = _stc.STC_APDL_COMMAND STC_APDL_SLASHCOMMAND = _stc.STC_APDL_SLASHCOMMAND STC_APDL_STARCOMMAND = _stc.STC_APDL_STARCOMMAND STC_APDL_ARGUMENT = _stc.STC_APDL_ARGUMENT STC_APDL_FUNCTION = _stc.STC_APDL_FUNCTION STC_SH_DEFAULT = _stc.STC_SH_DEFAULT STC_SH_ERROR = _stc.STC_SH_ERROR STC_SH_COMMENTLINE = _stc.STC_SH_COMMENTLINE STC_SH_NUMBER = _stc.STC_SH_NUMBER STC_SH_WORD = _stc.STC_SH_WORD STC_SH_STRING = _stc.STC_SH_STRING STC_SH_CHARACTER = _stc.STC_SH_CHARACTER STC_SH_OPERATOR = _stc.STC_SH_OPERATOR STC_SH_IDENTIFIER = _stc.STC_SH_IDENTIFIER STC_SH_SCALAR = _stc.STC_SH_SCALAR STC_SH_PARAM = _stc.STC_SH_PARAM STC_SH_BACKTICKS = _stc.STC_SH_BACKTICKS STC_SH_HERE_DELIM = _stc.STC_SH_HERE_DELIM STC_SH_HERE_Q = _stc.STC_SH_HERE_Q STC_ASN1_DEFAULT = _stc.STC_ASN1_DEFAULT STC_ASN1_COMMENT = _stc.STC_ASN1_COMMENT STC_ASN1_IDENTIFIER = _stc.STC_ASN1_IDENTIFIER STC_ASN1_STRING = _stc.STC_ASN1_STRING STC_ASN1_OID = _stc.STC_ASN1_OID STC_ASN1_SCALAR = _stc.STC_ASN1_SCALAR STC_ASN1_KEYWORD = _stc.STC_ASN1_KEYWORD STC_ASN1_ATTRIBUTE = _stc.STC_ASN1_ATTRIBUTE STC_ASN1_DESCRIPTOR = _stc.STC_ASN1_DESCRIPTOR STC_ASN1_TYPE = _stc.STC_ASN1_TYPE STC_ASN1_OPERATOR = _stc.STC_ASN1_OPERATOR STC_VHDL_DEFAULT = _stc.STC_VHDL_DEFAULT STC_VHDL_COMMENT = _stc.STC_VHDL_COMMENT STC_VHDL_COMMENTLINEBANG = _stc.STC_VHDL_COMMENTLINEBANG STC_VHDL_NUMBER = _stc.STC_VHDL_NUMBER STC_VHDL_STRING = _stc.STC_VHDL_STRING STC_VHDL_OPERATOR = _stc.STC_VHDL_OPERATOR STC_VHDL_IDENTIFIER = _stc.STC_VHDL_IDENTIFIER STC_VHDL_STRINGEOL = _stc.STC_VHDL_STRINGEOL STC_VHDL_KEYWORD = _stc.STC_VHDL_KEYWORD STC_VHDL_STDOPERATOR = _stc.STC_VHDL_STDOPERATOR STC_VHDL_ATTRIBUTE = _stc.STC_VHDL_ATTRIBUTE STC_VHDL_STDFUNCTION = _stc.STC_VHDL_STDFUNCTION STC_VHDL_STDPACKAGE = _stc.STC_VHDL_STDPACKAGE STC_VHDL_STDTYPE = _stc.STC_VHDL_STDTYPE STC_VHDL_USERWORD = _stc.STC_VHDL_USERWORD STC_CAML_DEFAULT = _stc.STC_CAML_DEFAULT STC_CAML_IDENTIFIER = _stc.STC_CAML_IDENTIFIER STC_CAML_TAGNAME = _stc.STC_CAML_TAGNAME STC_CAML_KEYWORD = _stc.STC_CAML_KEYWORD STC_CAML_KEYWORD2 = _stc.STC_CAML_KEYWORD2 STC_CAML_KEYWORD3 = _stc.STC_CAML_KEYWORD3 STC_CAML_LINENUM = _stc.STC_CAML_LINENUM STC_CAML_OPERATOR = _stc.STC_CAML_OPERATOR STC_CAML_NUMBER = _stc.STC_CAML_NUMBER STC_CAML_CHAR = _stc.STC_CAML_CHAR STC_CAML_WHITE = _stc.STC_CAML_WHITE STC_CAML_STRING = _stc.STC_CAML_STRING STC_CAML_COMMENT = _stc.STC_CAML_COMMENT STC_CAML_COMMENT1 = _stc.STC_CAML_COMMENT1 STC_CAML_COMMENT2 = _stc.STC_CAML_COMMENT2 STC_CAML_COMMENT3 = _stc.STC_CAML_COMMENT3 STC_HA_DEFAULT = _stc.STC_HA_DEFAULT STC_HA_IDENTIFIER = _stc.STC_HA_IDENTIFIER STC_HA_KEYWORD = _stc.STC_HA_KEYWORD STC_HA_NUMBER = _stc.STC_HA_NUMBER STC_HA_STRING = _stc.STC_HA_STRING STC_HA_CHARACTER = _stc.STC_HA_CHARACTER STC_HA_CLASS = _stc.STC_HA_CLASS STC_HA_MODULE = _stc.STC_HA_MODULE STC_HA_CAPITAL = _stc.STC_HA_CAPITAL STC_HA_DATA = _stc.STC_HA_DATA STC_HA_IMPORT = _stc.STC_HA_IMPORT STC_HA_OPERATOR = _stc.STC_HA_OPERATOR STC_HA_INSTANCE = _stc.STC_HA_INSTANCE STC_HA_COMMENTLINE = _stc.STC_HA_COMMENTLINE STC_HA_COMMENTBLOCK = _stc.STC_HA_COMMENTBLOCK STC_HA_COMMENTBLOCK2 = _stc.STC_HA_COMMENTBLOCK2 STC_HA_COMMENTBLOCK3 = _stc.STC_HA_COMMENTBLOCK3 STC_T3_DEFAULT = _stc.STC_T3_DEFAULT STC_T3_X_DEFAULT = _stc.STC_T3_X_DEFAULT STC_T3_PREPROCESSOR = _stc.STC_T3_PREPROCESSOR STC_T3_BLOCK_COMMENT = _stc.STC_T3_BLOCK_COMMENT STC_T3_LINE_COMMENT = _stc.STC_T3_LINE_COMMENT STC_T3_OPERATOR = _stc.STC_T3_OPERATOR STC_T3_KEYWORD = _stc.STC_T3_KEYWORD STC_T3_NUMBER = _stc.STC_T3_NUMBER STC_T3_IDENTIFIER = _stc.STC_T3_IDENTIFIER STC_T3_S_STRING = _stc.STC_T3_S_STRING STC_T3_D_STRING = _stc.STC_T3_D_STRING STC_T3_X_STRING = _stc.STC_T3_X_STRING STC_T3_LIB_DIRECTIVE = _stc.STC_T3_LIB_DIRECTIVE STC_T3_MSG_PARAM = _stc.STC_T3_MSG_PARAM STC_T3_HTML_TAG = _stc.STC_T3_HTML_TAG STC_T3_HTML_DEFAULT = _stc.STC_T3_HTML_DEFAULT STC_T3_HTML_STRING = _stc.STC_T3_HTML_STRING STC_T3_USER1 = _stc.STC_T3_USER1 STC_T3_USER2 = _stc.STC_T3_USER2 STC_T3_USER3 = _stc.STC_T3_USER3 STC_T3_BRACE = _stc.STC_T3_BRACE STC_REBOL_DEFAULT = _stc.STC_REBOL_DEFAULT STC_REBOL_COMMENTLINE = _stc.STC_REBOL_COMMENTLINE STC_REBOL_COMMENTBLOCK = _stc.STC_REBOL_COMMENTBLOCK STC_REBOL_PREFACE = _stc.STC_REBOL_PREFACE STC_REBOL_OPERATOR = _stc.STC_REBOL_OPERATOR STC_REBOL_CHARACTER = _stc.STC_REBOL_CHARACTER STC_REBOL_QUOTEDSTRING = _stc.STC_REBOL_QUOTEDSTRING STC_REBOL_BRACEDSTRING = _stc.STC_REBOL_BRACEDSTRING STC_REBOL_NUMBER = _stc.STC_REBOL_NUMBER STC_REBOL_PAIR = _stc.STC_REBOL_PAIR STC_REBOL_TUPLE = _stc.STC_REBOL_TUPLE STC_REBOL_BINARY = _stc.STC_REBOL_BINARY STC_REBOL_MONEY = _stc.STC_REBOL_MONEY STC_REBOL_ISSUE = _stc.STC_REBOL_ISSUE STC_REBOL_TAG = _stc.STC_REBOL_TAG STC_REBOL_FILE = _stc.STC_REBOL_FILE STC_REBOL_EMAIL = _stc.STC_REBOL_EMAIL STC_REBOL_URL = _stc.STC_REBOL_URL STC_REBOL_DATE = _stc.STC_REBOL_DATE STC_REBOL_TIME = _stc.STC_REBOL_TIME STC_REBOL_IDENTIFIER = _stc.STC_REBOL_IDENTIFIER STC_REBOL_WORD = _stc.STC_REBOL_WORD STC_REBOL_WORD2 = _stc.STC_REBOL_WORD2 STC_REBOL_WORD3 = _stc.STC_REBOL_WORD3 STC_REBOL_WORD4 = _stc.STC_REBOL_WORD4 STC_REBOL_WORD5 = _stc.STC_REBOL_WORD5 STC_REBOL_WORD6 = _stc.STC_REBOL_WORD6 STC_REBOL_WORD7 = _stc.STC_REBOL_WORD7 STC_REBOL_WORD8 = _stc.STC_REBOL_WORD8 STC_SQL_DEFAULT = _stc.STC_SQL_DEFAULT STC_SQL_COMMENT = _stc.STC_SQL_COMMENT STC_SQL_COMMENTLINE = _stc.STC_SQL_COMMENTLINE STC_SQL_COMMENTDOC = _stc.STC_SQL_COMMENTDOC STC_SQL_NUMBER = _stc.STC_SQL_NUMBER STC_SQL_WORD = _stc.STC_SQL_WORD STC_SQL_STRING = _stc.STC_SQL_STRING STC_SQL_CHARACTER = _stc.STC_SQL_CHARACTER STC_SQL_SQLPLUS = _stc.STC_SQL_SQLPLUS STC_SQL_SQLPLUS_PROMPT = _stc.STC_SQL_SQLPLUS_PROMPT STC_SQL_OPERATOR = _stc.STC_SQL_OPERATOR STC_SQL_IDENTIFIER = _stc.STC_SQL_IDENTIFIER STC_SQL_SQLPLUS_COMMENT = _stc.STC_SQL_SQLPLUS_COMMENT STC_SQL_COMMENTLINEDOC = _stc.STC_SQL_COMMENTLINEDOC STC_SQL_WORD2 = _stc.STC_SQL_WORD2 STC_SQL_COMMENTDOCKEYWORD = _stc.STC_SQL_COMMENTDOCKEYWORD STC_SQL_COMMENTDOCKEYWORDERROR = _stc.STC_SQL_COMMENTDOCKEYWORDERROR STC_SQL_USER1 = _stc.STC_SQL_USER1 STC_SQL_USER2 = _stc.STC_SQL_USER2 STC_SQL_USER3 = _stc.STC_SQL_USER3 STC_SQL_USER4 = _stc.STC_SQL_USER4 STC_SQL_QUOTEDIDENTIFIER = _stc.STC_SQL_QUOTEDIDENTIFIER STC_ST_DEFAULT = _stc.STC_ST_DEFAULT STC_ST_STRING = _stc.STC_ST_STRING STC_ST_NUMBER = _stc.STC_ST_NUMBER STC_ST_COMMENT = _stc.STC_ST_COMMENT STC_ST_SYMBOL = _stc.STC_ST_SYMBOL STC_ST_BINARY = _stc.STC_ST_BINARY STC_ST_BOOL = _stc.STC_ST_BOOL STC_ST_SELF = _stc.STC_ST_SELF STC_ST_SUPER = _stc.STC_ST_SUPER STC_ST_NIL = _stc.STC_ST_NIL STC_ST_GLOBAL = _stc.STC_ST_GLOBAL STC_ST_RETURN = _stc.STC_ST_RETURN STC_ST_SPECIAL = _stc.STC_ST_SPECIAL STC_ST_KWSEND = _stc.STC_ST_KWSEND STC_ST_ASSIGN = _stc.STC_ST_ASSIGN STC_ST_CHARACTER = _stc.STC_ST_CHARACTER STC_ST_SPEC_SEL = _stc.STC_ST_SPEC_SEL STC_FS_DEFAULT = _stc.STC_FS_DEFAULT STC_FS_COMMENT = _stc.STC_FS_COMMENT STC_FS_COMMENTLINE = _stc.STC_FS_COMMENTLINE STC_FS_COMMENTDOC = _stc.STC_FS_COMMENTDOC STC_FS_COMMENTLINEDOC = _stc.STC_FS_COMMENTLINEDOC STC_FS_COMMENTDOCKEYWORD = _stc.STC_FS_COMMENTDOCKEYWORD STC_FS_COMMENTDOCKEYWORDERROR = _stc.STC_FS_COMMENTDOCKEYWORDERROR STC_FS_KEYWORD = _stc.STC_FS_KEYWORD STC_FS_KEYWORD2 = _stc.STC_FS_KEYWORD2 STC_FS_KEYWORD3 = _stc.STC_FS_KEYWORD3 STC_FS_KEYWORD4 = _stc.STC_FS_KEYWORD4 STC_FS_NUMBER = _stc.STC_FS_NUMBER STC_FS_STRING = _stc.STC_FS_STRING STC_FS_PREPROCESSOR = _stc.STC_FS_PREPROCESSOR STC_FS_OPERATOR = _stc.STC_FS_OPERATOR STC_FS_IDENTIFIER = _stc.STC_FS_IDENTIFIER STC_FS_DATE = _stc.STC_FS_DATE STC_FS_STRINGEOL = _stc.STC_FS_STRINGEOL STC_FS_CONSTANT = _stc.STC_FS_CONSTANT STC_FS_WORDOPERATOR = _stc.STC_FS_WORDOPERATOR STC_FS_DISABLEDCODE = _stc.STC_FS_DISABLEDCODE STC_FS_DEFAULT_C = _stc.STC_FS_DEFAULT_C STC_FS_COMMENTDOC_C = _stc.STC_FS_COMMENTDOC_C STC_FS_COMMENTLINEDOC_C = _stc.STC_FS_COMMENTLINEDOC_C STC_FS_KEYWORD_C = _stc.STC_FS_KEYWORD_C STC_FS_KEYWORD2_C = _stc.STC_FS_KEYWORD2_C STC_FS_NUMBER_C = _stc.STC_FS_NUMBER_C STC_FS_STRING_C = _stc.STC_FS_STRING_C STC_FS_PREPROCESSOR_C = _stc.STC_FS_PREPROCESSOR_C STC_FS_OPERATOR_C = _stc.STC_FS_OPERATOR_C STC_FS_IDENTIFIER_C = _stc.STC_FS_IDENTIFIER_C STC_FS_STRINGEOL_C = _stc.STC_FS_STRINGEOL_C STC_CSOUND_DEFAULT = _stc.STC_CSOUND_DEFAULT STC_CSOUND_COMMENT = _stc.STC_CSOUND_COMMENT STC_CSOUND_NUMBER = _stc.STC_CSOUND_NUMBER STC_CSOUND_OPERATOR = _stc.STC_CSOUND_OPERATOR STC_CSOUND_INSTR = _stc.STC_CSOUND_INSTR STC_CSOUND_IDENTIFIER = _stc.STC_CSOUND_IDENTIFIER STC_CSOUND_OPCODE = _stc.STC_CSOUND_OPCODE STC_CSOUND_HEADERSTMT = _stc.STC_CSOUND_HEADERSTMT STC_CSOUND_USERKEYWORD = _stc.STC_CSOUND_USERKEYWORD STC_CSOUND_COMMENTBLOCK = _stc.STC_CSOUND_COMMENTBLOCK STC_CSOUND_PARAM = _stc.STC_CSOUND_PARAM STC_CSOUND_ARATE_VAR = _stc.STC_CSOUND_ARATE_VAR STC_CSOUND_KRATE_VAR = _stc.STC_CSOUND_KRATE_VAR STC_CSOUND_IRATE_VAR = _stc.STC_CSOUND_IRATE_VAR STC_CSOUND_GLOBAL_VAR = _stc.STC_CSOUND_GLOBAL_VAR STC_CSOUND_STRINGEOL = _stc.STC_CSOUND_STRINGEOL STC_INNO_DEFAULT = _stc.STC_INNO_DEFAULT STC_INNO_COMMENT = _stc.STC_INNO_COMMENT STC_INNO_KEYWORD = _stc.STC_INNO_KEYWORD STC_INNO_PARAMETER = _stc.STC_INNO_PARAMETER STC_INNO_SECTION = _stc.STC_INNO_SECTION STC_INNO_PREPROC = _stc.STC_INNO_PREPROC STC_INNO_INLINE_EXPANSION = _stc.STC_INNO_INLINE_EXPANSION STC_INNO_COMMENT_PASCAL = _stc.STC_INNO_COMMENT_PASCAL STC_INNO_KEYWORD_PASCAL = _stc.STC_INNO_KEYWORD_PASCAL STC_INNO_KEYWORD_USER = _stc.STC_INNO_KEYWORD_USER STC_INNO_STRING_DOUBLE = _stc.STC_INNO_STRING_DOUBLE STC_INNO_STRING_SINGLE = _stc.STC_INNO_STRING_SINGLE STC_INNO_IDENTIFIER = _stc.STC_INNO_IDENTIFIER STC_OPAL_SPACE = _stc.STC_OPAL_SPACE STC_OPAL_COMMENT_BLOCK = _stc.STC_OPAL_COMMENT_BLOCK STC_OPAL_COMMENT_LINE = _stc.STC_OPAL_COMMENT_LINE STC_OPAL_INTEGER = _stc.STC_OPAL_INTEGER STC_OPAL_KEYWORD = _stc.STC_OPAL_KEYWORD STC_OPAL_SORT = _stc.STC_OPAL_SORT STC_OPAL_STRING = _stc.STC_OPAL_STRING STC_OPAL_PAR = _stc.STC_OPAL_PAR STC_OPAL_BOOL_CONST = _stc.STC_OPAL_BOOL_CONST STC_OPAL_DEFAULT = _stc.STC_OPAL_DEFAULT STC_SPICE_DEFAULT = _stc.STC_SPICE_DEFAULT STC_SPICE_IDENTIFIER = _stc.STC_SPICE_IDENTIFIER STC_SPICE_KEYWORD = _stc.STC_SPICE_KEYWORD STC_SPICE_KEYWORD2 = _stc.STC_SPICE_KEYWORD2 STC_SPICE_KEYWORD3 = _stc.STC_SPICE_KEYWORD3 STC_SPICE_NUMBER = _stc.STC_SPICE_NUMBER STC_SPICE_DELIMITER = _stc.STC_SPICE_DELIMITER STC_SPICE_VALUE = _stc.STC_SPICE_VALUE STC_SPICE_COMMENTLINE = _stc.STC_SPICE_COMMENTLINE STC_CMAKE_DEFAULT = _stc.STC_CMAKE_DEFAULT STC_CMAKE_COMMENT = _stc.STC_CMAKE_COMMENT STC_CMAKE_STRINGDQ = _stc.STC_CMAKE_STRINGDQ STC_CMAKE_STRINGLQ = _stc.STC_CMAKE_STRINGLQ STC_CMAKE_STRINGRQ = _stc.STC_CMAKE_STRINGRQ STC_CMAKE_COMMANDS = _stc.STC_CMAKE_COMMANDS STC_CMAKE_PARAMETERS = _stc.STC_CMAKE_PARAMETERS STC_CMAKE_VARIABLE = _stc.STC_CMAKE_VARIABLE STC_CMAKE_USERDEFINED = _stc.STC_CMAKE_USERDEFINED STC_CMAKE_WHILEDEF = _stc.STC_CMAKE_WHILEDEF STC_CMAKE_FOREACHDEF = _stc.STC_CMAKE_FOREACHDEF STC_CMAKE_IFDEFINEDEF = _stc.STC_CMAKE_IFDEFINEDEF STC_CMAKE_MACRODEF = _stc.STC_CMAKE_MACRODEF STC_CMAKE_STRINGVAR = _stc.STC_CMAKE_STRINGVAR STC_CMAKE_NUMBER = _stc.STC_CMAKE_NUMBER STC_GAP_DEFAULT = _stc.STC_GAP_DEFAULT STC_GAP_IDENTIFIER = _stc.STC_GAP_IDENTIFIER STC_GAP_KEYWORD = _stc.STC_GAP_KEYWORD STC_GAP_KEYWORD2 = _stc.STC_GAP_KEYWORD2 STC_GAP_KEYWORD3 = _stc.STC_GAP_KEYWORD3 STC_GAP_KEYWORD4 = _stc.STC_GAP_KEYWORD4 STC_GAP_STRING = _stc.STC_GAP_STRING STC_GAP_CHAR = _stc.STC_GAP_CHAR STC_GAP_OPERATOR = _stc.STC_GAP_OPERATOR STC_GAP_COMMENT = _stc.STC_GAP_COMMENT STC_GAP_NUMBER = _stc.STC_GAP_NUMBER STC_GAP_STRINGEOL = _stc.STC_GAP_STRINGEOL STC_PLM_DEFAULT = _stc.STC_PLM_DEFAULT STC_PLM_COMMENT = _stc.STC_PLM_COMMENT STC_PLM_STRING = _stc.STC_PLM_STRING STC_PLM_NUMBER = _stc.STC_PLM_NUMBER STC_PLM_IDENTIFIER = _stc.STC_PLM_IDENTIFIER STC_PLM_OPERATOR = _stc.STC_PLM_OPERATOR STC_PLM_CONTROL = _stc.STC_PLM_CONTROL STC_PLM_KEYWORD = _stc.STC_PLM_KEYWORD STC_4GL_DEFAULT = _stc.STC_4GL_DEFAULT STC_4GL_NUMBER = _stc.STC_4GL_NUMBER STC_4GL_WORD = _stc.STC_4GL_WORD STC_4GL_STRING = _stc.STC_4GL_STRING STC_4GL_CHARACTER = _stc.STC_4GL_CHARACTER STC_4GL_PREPROCESSOR = _stc.STC_4GL_PREPROCESSOR STC_4GL_OPERATOR = _stc.STC_4GL_OPERATOR STC_4GL_IDENTIFIER = _stc.STC_4GL_IDENTIFIER STC_4GL_BLOCK = _stc.STC_4GL_BLOCK STC_4GL_END = _stc.STC_4GL_END STC_4GL_COMMENT1 = _stc.STC_4GL_COMMENT1 STC_4GL_COMMENT2 = _stc.STC_4GL_COMMENT2 STC_4GL_COMMENT3 = _stc.STC_4GL_COMMENT3 STC_4GL_COMMENT4 = _stc.STC_4GL_COMMENT4 STC_4GL_COMMENT5 = _stc.STC_4GL_COMMENT5 STC_4GL_COMMENT6 = _stc.STC_4GL_COMMENT6 STC_4GL_DEFAULT_ = _stc.STC_4GL_DEFAULT_ STC_4GL_NUMBER_ = _stc.STC_4GL_NUMBER_ STC_4GL_WORD_ = _stc.STC_4GL_WORD_ STC_4GL_STRING_ = _stc.STC_4GL_STRING_ STC_4GL_CHARACTER_ = _stc.STC_4GL_CHARACTER_ STC_4GL_PREPROCESSOR_ = _stc.STC_4GL_PREPROCESSOR_ STC_4GL_OPERATOR_ = _stc.STC_4GL_OPERATOR_ STC_4GL_IDENTIFIER_ = _stc.STC_4GL_IDENTIFIER_ STC_4GL_BLOCK_ = _stc.STC_4GL_BLOCK_ STC_4GL_END_ = _stc.STC_4GL_END_ STC_4GL_COMMENT1_ = _stc.STC_4GL_COMMENT1_ STC_4GL_COMMENT2_ = _stc.STC_4GL_COMMENT2_ STC_4GL_COMMENT3_ = _stc.STC_4GL_COMMENT3_ STC_4GL_COMMENT4_ = _stc.STC_4GL_COMMENT4_ STC_4GL_COMMENT5_ = _stc.STC_4GL_COMMENT5_ STC_4GL_COMMENT6_ = _stc.STC_4GL_COMMENT6_ STC_ABAQUS_DEFAULT = _stc.STC_ABAQUS_DEFAULT STC_ABAQUS_COMMENT = _stc.STC_ABAQUS_COMMENT STC_ABAQUS_COMMENTBLOCK = _stc.STC_ABAQUS_COMMENTBLOCK STC_ABAQUS_NUMBER = _stc.STC_ABAQUS_NUMBER STC_ABAQUS_STRING = _stc.STC_ABAQUS_STRING STC_ABAQUS_OPERATOR = _stc.STC_ABAQUS_OPERATOR STC_ABAQUS_WORD = _stc.STC_ABAQUS_WORD STC_ABAQUS_PROCESSOR = _stc.STC_ABAQUS_PROCESSOR STC_ABAQUS_COMMAND = _stc.STC_ABAQUS_COMMAND STC_ABAQUS_SLASHCOMMAND = _stc.STC_ABAQUS_SLASHCOMMAND STC_ABAQUS_STARCOMMAND = _stc.STC_ABAQUS_STARCOMMAND STC_ABAQUS_ARGUMENT = _stc.STC_ABAQUS_ARGUMENT STC_ABAQUS_FUNCTION = _stc.STC_ABAQUS_FUNCTION STC_ASY_DEFAULT = _stc.STC_ASY_DEFAULT STC_ASY_COMMENT = _stc.STC_ASY_COMMENT STC_ASY_COMMENTLINE = _stc.STC_ASY_COMMENTLINE STC_ASY_NUMBER = _stc.STC_ASY_NUMBER STC_ASY_WORD = _stc.STC_ASY_WORD STC_ASY_STRING = _stc.STC_ASY_STRING STC_ASY_CHARACTER = _stc.STC_ASY_CHARACTER STC_ASY_OPERATOR = _stc.STC_ASY_OPERATOR STC_ASY_IDENTIFIER = _stc.STC_ASY_IDENTIFIER STC_ASY_STRINGEOL = _stc.STC_ASY_STRINGEOL STC_ASY_COMMENTLINEDOC = _stc.STC_ASY_COMMENTLINEDOC STC_ASY_WORD2 = _stc.STC_ASY_WORD2 STC_R_DEFAULT = _stc.STC_R_DEFAULT STC_R_COMMENT = _stc.STC_R_COMMENT STC_R_KWORD = _stc.STC_R_KWORD STC_R_BASEKWORD = _stc.STC_R_BASEKWORD STC_R_OTHERKWORD = _stc.STC_R_OTHERKWORD STC_R_NUMBER = _stc.STC_R_NUMBER STC_R_STRING = _stc.STC_R_STRING STC_R_STRING2 = _stc.STC_R_STRING2 STC_R_OPERATOR = _stc.STC_R_OPERATOR STC_R_IDENTIFIER = _stc.STC_R_IDENTIFIER STC_R_INFIX = _stc.STC_R_INFIX STC_R_INFIXEOL = _stc.STC_R_INFIXEOL STC_MAGIK_DEFAULT = _stc.STC_MAGIK_DEFAULT STC_MAGIK_COMMENT = _stc.STC_MAGIK_COMMENT STC_MAGIK_HYPER_COMMENT = _stc.STC_MAGIK_HYPER_COMMENT STC_MAGIK_STRING = _stc.STC_MAGIK_STRING STC_MAGIK_CHARACTER = _stc.STC_MAGIK_CHARACTER STC_MAGIK_NUMBER = _stc.STC_MAGIK_NUMBER STC_MAGIK_IDENTIFIER = _stc.STC_MAGIK_IDENTIFIER STC_MAGIK_OPERATOR = _stc.STC_MAGIK_OPERATOR STC_MAGIK_FLOW = _stc.STC_MAGIK_FLOW STC_MAGIK_CONTAINER = _stc.STC_MAGIK_CONTAINER STC_MAGIK_BRACKET_BLOCK = _stc.STC_MAGIK_BRACKET_BLOCK STC_MAGIK_BRACE_BLOCK = _stc.STC_MAGIK_BRACE_BLOCK STC_MAGIK_SQBRACKET_BLOCK = _stc.STC_MAGIK_SQBRACKET_BLOCK STC_MAGIK_UNKNOWN_KEYWORD = _stc.STC_MAGIK_UNKNOWN_KEYWORD STC_MAGIK_KEYWORD = _stc.STC_MAGIK_KEYWORD STC_MAGIK_PRAGMA = _stc.STC_MAGIK_PRAGMA STC_MAGIK_SYMBOL = _stc.STC_MAGIK_SYMBOL STC_POWERSHELL_DEFAULT = _stc.STC_POWERSHELL_DEFAULT STC_POWERSHELL_COMMENT = _stc.STC_POWERSHELL_COMMENT STC_POWERSHELL_STRING = _stc.STC_POWERSHELL_STRING STC_POWERSHELL_CHARACTER = _stc.STC_POWERSHELL_CHARACTER STC_POWERSHELL_NUMBER = _stc.STC_POWERSHELL_NUMBER STC_POWERSHELL_VARIABLE = _stc.STC_POWERSHELL_VARIABLE STC_POWERSHELL_OPERATOR = _stc.STC_POWERSHELL_OPERATOR STC_POWERSHELL_IDENTIFIER = _stc.STC_POWERSHELL_IDENTIFIER STC_POWERSHELL_KEYWORD = _stc.STC_POWERSHELL_KEYWORD STC_POWERSHELL_CMDLET = _stc.STC_POWERSHELL_CMDLET STC_POWERSHELL_ALIAS = _stc.STC_POWERSHELL_ALIAS STC_POWERSHELL_FUNCTION = _stc.STC_POWERSHELL_FUNCTION STC_POWERSHELL_USER1 = _stc.STC_POWERSHELL_USER1 STC_POWERSHELL_COMMENTSTREAM = _stc.STC_POWERSHELL_COMMENTSTREAM STC_MYSQL_DEFAULT = _stc.STC_MYSQL_DEFAULT STC_MYSQL_COMMENT = _stc.STC_MYSQL_COMMENT STC_MYSQL_COMMENTLINE = _stc.STC_MYSQL_COMMENTLINE STC_MYSQL_VARIABLE = _stc.STC_MYSQL_VARIABLE STC_MYSQL_SYSTEMVARIABLE = _stc.STC_MYSQL_SYSTEMVARIABLE STC_MYSQL_KNOWNSYSTEMVARIABLE = _stc.STC_MYSQL_KNOWNSYSTEMVARIABLE STC_MYSQL_NUMBER = _stc.STC_MYSQL_NUMBER STC_MYSQL_MAJORKEYWORD = _stc.STC_MYSQL_MAJORKEYWORD STC_MYSQL_KEYWORD = _stc.STC_MYSQL_KEYWORD STC_MYSQL_DATABASEOBJECT = _stc.STC_MYSQL_DATABASEOBJECT STC_MYSQL_PROCEDUREKEYWORD = _stc.STC_MYSQL_PROCEDUREKEYWORD STC_MYSQL_STRING = _stc.STC_MYSQL_STRING STC_MYSQL_SQSTRING = _stc.STC_MYSQL_SQSTRING STC_MYSQL_DQSTRING = _stc.STC_MYSQL_DQSTRING STC_MYSQL_OPERATOR = _stc.STC_MYSQL_OPERATOR STC_MYSQL_FUNCTION = _stc.STC_MYSQL_FUNCTION STC_MYSQL_IDENTIFIER = _stc.STC_MYSQL_IDENTIFIER STC_MYSQL_QUOTEDIDENTIFIER = _stc.STC_MYSQL_QUOTEDIDENTIFIER STC_MYSQL_USER1 = _stc.STC_MYSQL_USER1 STC_MYSQL_USER2 = _stc.STC_MYSQL_USER2 STC_MYSQL_USER3 = _stc.STC_MYSQL_USER3 STC_MYSQL_HIDDENCOMMAND = _stc.STC_MYSQL_HIDDENCOMMAND STC_PO_DEFAULT = _stc.STC_PO_DEFAULT STC_PO_COMMENT = _stc.STC_PO_COMMENT STC_PO_MSGID = _stc.STC_PO_MSGID STC_PO_MSGID_TEXT = _stc.STC_PO_MSGID_TEXT STC_PO_MSGSTR = _stc.STC_PO_MSGSTR STC_PO_MSGSTR_TEXT = _stc.STC_PO_MSGSTR_TEXT STC_PO_MSGCTXT = _stc.STC_PO_MSGCTXT STC_PO_MSGCTXT_TEXT = _stc.STC_PO_MSGCTXT_TEXT STC_PO_FUZZY = _stc.STC_PO_FUZZY STC_PAS_DEFAULT = _stc.STC_PAS_DEFAULT STC_PAS_IDENTIFIER = _stc.STC_PAS_IDENTIFIER STC_PAS_COMMENT = _stc.STC_PAS_COMMENT STC_PAS_COMMENT2 = _stc.STC_PAS_COMMENT2 STC_PAS_COMMENTLINE = _stc.STC_PAS_COMMENTLINE STC_PAS_PREPROCESSOR = _stc.STC_PAS_PREPROCESSOR STC_PAS_PREPROCESSOR2 = _stc.STC_PAS_PREPROCESSOR2 STC_PAS_NUMBER = _stc.STC_PAS_NUMBER STC_PAS_HEXNUMBER = _stc.STC_PAS_HEXNUMBER STC_PAS_WORD = _stc.STC_PAS_WORD STC_PAS_STRING = _stc.STC_PAS_STRING STC_PAS_STRINGEOL = _stc.STC_PAS_STRINGEOL STC_PAS_CHARACTER = _stc.STC_PAS_CHARACTER STC_PAS_OPERATOR = _stc.STC_PAS_OPERATOR STC_PAS_ASM = _stc.STC_PAS_ASM STC_SORCUS_DEFAULT = _stc.STC_SORCUS_DEFAULT STC_SORCUS_COMMAND = _stc.STC_SORCUS_COMMAND STC_SORCUS_PARAMETER = _stc.STC_SORCUS_PARAMETER STC_SORCUS_COMMENTLINE = _stc.STC_SORCUS_COMMENTLINE STC_SORCUS_STRING = _stc.STC_SORCUS_STRING STC_SORCUS_STRINGEOL = _stc.STC_SORCUS_STRINGEOL STC_SORCUS_IDENTIFIER = _stc.STC_SORCUS_IDENTIFIER STC_SORCUS_OPERATOR = _stc.STC_SORCUS_OPERATOR STC_SORCUS_NUMBER = _stc.STC_SORCUS_NUMBER STC_SORCUS_CONSTANT = _stc.STC_SORCUS_CONSTANT STC_POWERPRO_DEFAULT = _stc.STC_POWERPRO_DEFAULT STC_POWERPRO_COMMENTBLOCK = _stc.STC_POWERPRO_COMMENTBLOCK STC_POWERPRO_COMMENTLINE = _stc.STC_POWERPRO_COMMENTLINE STC_POWERPRO_NUMBER = _stc.STC_POWERPRO_NUMBER STC_POWERPRO_WORD = _stc.STC_POWERPRO_WORD STC_POWERPRO_WORD2 = _stc.STC_POWERPRO_WORD2 STC_POWERPRO_WORD3 = _stc.STC_POWERPRO_WORD3 STC_POWERPRO_WORD4 = _stc.STC_POWERPRO_WORD4 STC_POWERPRO_DOUBLEQUOTEDSTRING = _stc.STC_POWERPRO_DOUBLEQUOTEDSTRING STC_POWERPRO_SINGLEQUOTEDSTRING = _stc.STC_POWERPRO_SINGLEQUOTEDSTRING STC_POWERPRO_LINECONTINUE = _stc.STC_POWERPRO_LINECONTINUE STC_POWERPRO_OPERATOR = _stc.STC_POWERPRO_OPERATOR STC_POWERPRO_IDENTIFIER = _stc.STC_POWERPRO_IDENTIFIER STC_POWERPRO_STRINGEOL = _stc.STC_POWERPRO_STRINGEOL STC_POWERPRO_VERBATIM = _stc.STC_POWERPRO_VERBATIM STC_POWERPRO_ALTQUOTE = _stc.STC_POWERPRO_ALTQUOTE STC_POWERPRO_FUNCTION = _stc.STC_POWERPRO_FUNCTION STC_SML_DEFAULT = _stc.STC_SML_DEFAULT STC_SML_IDENTIFIER = _stc.STC_SML_IDENTIFIER STC_SML_TAGNAME = _stc.STC_SML_TAGNAME STC_SML_KEYWORD = _stc.STC_SML_KEYWORD STC_SML_KEYWORD2 = _stc.STC_SML_KEYWORD2 STC_SML_KEYWORD3 = _stc.STC_SML_KEYWORD3 STC_SML_LINENUM = _stc.STC_SML_LINENUM STC_SML_OPERATOR = _stc.STC_SML_OPERATOR STC_SML_NUMBER = _stc.STC_SML_NUMBER STC_SML_CHAR = _stc.STC_SML_CHAR STC_SML_STRING = _stc.STC_SML_STRING STC_SML_COMMENT = _stc.STC_SML_COMMENT STC_SML_COMMENT1 = _stc.STC_SML_COMMENT1 STC_SML_COMMENT2 = _stc.STC_SML_COMMENT2 STC_SML_COMMENT3 = _stc.STC_SML_COMMENT3 STC_MARKDOWN_DEFAULT = _stc.STC_MARKDOWN_DEFAULT STC_MARKDOWN_LINE_BEGIN = _stc.STC_MARKDOWN_LINE_BEGIN STC_MARKDOWN_STRONG1 = _stc.STC_MARKDOWN_STRONG1 STC_MARKDOWN_STRONG2 = _stc.STC_MARKDOWN_STRONG2 STC_MARKDOWN_EM1 = _stc.STC_MARKDOWN_EM1 STC_MARKDOWN_EM2 = _stc.STC_MARKDOWN_EM2 STC_MARKDOWN_HEADER1 = _stc.STC_MARKDOWN_HEADER1 STC_MARKDOWN_HEADER2 = _stc.STC_MARKDOWN_HEADER2 STC_MARKDOWN_HEADER3 = _stc.STC_MARKDOWN_HEADER3 STC_MARKDOWN_HEADER4 = _stc.STC_MARKDOWN_HEADER4 STC_MARKDOWN_HEADER5 = _stc.STC_MARKDOWN_HEADER5 STC_MARKDOWN_HEADER6 = _stc.STC_MARKDOWN_HEADER6 STC_MARKDOWN_PRECHAR = _stc.STC_MARKDOWN_PRECHAR STC_MARKDOWN_ULIST_ITEM = _stc.STC_MARKDOWN_ULIST_ITEM STC_MARKDOWN_OLIST_ITEM = _stc.STC_MARKDOWN_OLIST_ITEM STC_MARKDOWN_BLOCKQUOTE = _stc.STC_MARKDOWN_BLOCKQUOTE STC_MARKDOWN_STRIKEOUT = _stc.STC_MARKDOWN_STRIKEOUT STC_MARKDOWN_HRULE = _stc.STC_MARKDOWN_HRULE STC_MARKDOWN_LINK = _stc.STC_MARKDOWN_LINK STC_MARKDOWN_CODE = _stc.STC_MARKDOWN_CODE STC_MARKDOWN_CODE2 = _stc.STC_MARKDOWN_CODE2 STC_MARKDOWN_CODEBK = _stc.STC_MARKDOWN_CODEBK STC_TXT2TAGS_DEFAULT = _stc.STC_TXT2TAGS_DEFAULT STC_TXT2TAGS_LINE_BEGIN = _stc.STC_TXT2TAGS_LINE_BEGIN STC_TXT2TAGS_STRONG1 = _stc.STC_TXT2TAGS_STRONG1 STC_TXT2TAGS_STRONG2 = _stc.STC_TXT2TAGS_STRONG2 STC_TXT2TAGS_EM1 = _stc.STC_TXT2TAGS_EM1 STC_TXT2TAGS_EM2 = _stc.STC_TXT2TAGS_EM2 STC_TXT2TAGS_HEADER1 = _stc.STC_TXT2TAGS_HEADER1 STC_TXT2TAGS_HEADER2 = _stc.STC_TXT2TAGS_HEADER2 STC_TXT2TAGS_HEADER3 = _stc.STC_TXT2TAGS_HEADER3 STC_TXT2TAGS_HEADER4 = _stc.STC_TXT2TAGS_HEADER4 STC_TXT2TAGS_HEADER5 = _stc.STC_TXT2TAGS_HEADER5 STC_TXT2TAGS_HEADER6 = _stc.STC_TXT2TAGS_HEADER6 STC_TXT2TAGS_PRECHAR = _stc.STC_TXT2TAGS_PRECHAR STC_TXT2TAGS_ULIST_ITEM = _stc.STC_TXT2TAGS_ULIST_ITEM STC_TXT2TAGS_OLIST_ITEM = _stc.STC_TXT2TAGS_OLIST_ITEM STC_TXT2TAGS_BLOCKQUOTE = _stc.STC_TXT2TAGS_BLOCKQUOTE STC_TXT2TAGS_STRIKEOUT = _stc.STC_TXT2TAGS_STRIKEOUT STC_TXT2TAGS_HRULE = _stc.STC_TXT2TAGS_HRULE STC_TXT2TAGS_LINK = _stc.STC_TXT2TAGS_LINK STC_TXT2TAGS_CODE = _stc.STC_TXT2TAGS_CODE STC_TXT2TAGS_CODE2 = _stc.STC_TXT2TAGS_CODE2 STC_TXT2TAGS_CODEBK = _stc.STC_TXT2TAGS_CODEBK STC_TXT2TAGS_COMMENT = _stc.STC_TXT2TAGS_COMMENT STC_TXT2TAGS_OPTION = _stc.STC_TXT2TAGS_OPTION STC_TXT2TAGS_PREPROC = _stc.STC_TXT2TAGS_PREPROC STC_TXT2TAGS_POSTPROC = _stc.STC_TXT2TAGS_POSTPROC STC_A68K_DEFAULT = _stc.STC_A68K_DEFAULT STC_A68K_COMMENT = _stc.STC_A68K_COMMENT STC_A68K_NUMBER_DEC = _stc.STC_A68K_NUMBER_DEC STC_A68K_NUMBER_BIN = _stc.STC_A68K_NUMBER_BIN STC_A68K_NUMBER_HEX = _stc.STC_A68K_NUMBER_HEX STC_A68K_STRING1 = _stc.STC_A68K_STRING1 STC_A68K_OPERATOR = _stc.STC_A68K_OPERATOR STC_A68K_CPUINSTRUCTION = _stc.STC_A68K_CPUINSTRUCTION STC_A68K_EXTINSTRUCTION = _stc.STC_A68K_EXTINSTRUCTION STC_A68K_REGISTER = _stc.STC_A68K_REGISTER STC_A68K_DIRECTIVE = _stc.STC_A68K_DIRECTIVE STC_A68K_MACRO_ARG = _stc.STC_A68K_MACRO_ARG STC_A68K_LABEL = _stc.STC_A68K_LABEL STC_A68K_STRING2 = _stc.STC_A68K_STRING2 STC_A68K_IDENTIFIER = _stc.STC_A68K_IDENTIFIER STC_A68K_MACRO_DECLARATION = _stc.STC_A68K_MACRO_DECLARATION STC_A68K_COMMENT_WORD = _stc.STC_A68K_COMMENT_WORD STC_A68K_COMMENT_SPECIAL = _stc.STC_A68K_COMMENT_SPECIAL STC_A68K_COMMENT_DOXYGEN = _stc.STC_A68K_COMMENT_DOXYGEN STC_MODULA_DEFAULT = _stc.STC_MODULA_DEFAULT STC_MODULA_COMMENT = _stc.STC_MODULA_COMMENT STC_MODULA_DOXYCOMM = _stc.STC_MODULA_DOXYCOMM STC_MODULA_DOXYKEY = _stc.STC_MODULA_DOXYKEY STC_MODULA_KEYWORD = _stc.STC_MODULA_KEYWORD STC_MODULA_RESERVED = _stc.STC_MODULA_RESERVED STC_MODULA_NUMBER = _stc.STC_MODULA_NUMBER STC_MODULA_BASENUM = _stc.STC_MODULA_BASENUM STC_MODULA_FLOAT = _stc.STC_MODULA_FLOAT STC_MODULA_STRING = _stc.STC_MODULA_STRING STC_MODULA_STRSPEC = _stc.STC_MODULA_STRSPEC STC_MODULA_CHAR = _stc.STC_MODULA_CHAR STC_MODULA_CHARSPEC = _stc.STC_MODULA_CHARSPEC STC_MODULA_PROC = _stc.STC_MODULA_PROC STC_MODULA_PRAGMA = _stc.STC_MODULA_PRAGMA STC_MODULA_PRGKEY = _stc.STC_MODULA_PRGKEY STC_MODULA_OPERATOR = _stc.STC_MODULA_OPERATOR STC_MODULA_BADSTR = _stc.STC_MODULA_BADSTR STC_COFFEESCRIPT_DEFAULT = _stc.STC_COFFEESCRIPT_DEFAULT STC_COFFEESCRIPT_COMMENT = _stc.STC_COFFEESCRIPT_COMMENT STC_COFFEESCRIPT_COMMENTLINE = _stc.STC_COFFEESCRIPT_COMMENTLINE STC_COFFEESCRIPT_COMMENTDOC = _stc.STC_COFFEESCRIPT_COMMENTDOC STC_COFFEESCRIPT_NUMBER = _stc.STC_COFFEESCRIPT_NUMBER STC_COFFEESCRIPT_WORD = _stc.STC_COFFEESCRIPT_WORD STC_COFFEESCRIPT_STRING = _stc.STC_COFFEESCRIPT_STRING STC_COFFEESCRIPT_CHARACTER = _stc.STC_COFFEESCRIPT_CHARACTER STC_COFFEESCRIPT_UUID = _stc.STC_COFFEESCRIPT_UUID STC_COFFEESCRIPT_PREPROCESSOR = _stc.STC_COFFEESCRIPT_PREPROCESSOR STC_COFFEESCRIPT_OPERATOR = _stc.STC_COFFEESCRIPT_OPERATOR STC_COFFEESCRIPT_IDENTIFIER = _stc.STC_COFFEESCRIPT_IDENTIFIER STC_COFFEESCRIPT_STRINGEOL = _stc.STC_COFFEESCRIPT_STRINGEOL STC_COFFEESCRIPT_VERBATIM = _stc.STC_COFFEESCRIPT_VERBATIM STC_COFFEESCRIPT_REGEX = _stc.STC_COFFEESCRIPT_REGEX STC_COFFEESCRIPT_COMMENTLINEDOC = _stc.STC_COFFEESCRIPT_COMMENTLINEDOC STC_COFFEESCRIPT_WORD2 = _stc.STC_COFFEESCRIPT_WORD2 STC_COFFEESCRIPT_COMMENTDOCKEYWORD = _stc.STC_COFFEESCRIPT_COMMENTDOCKEYWORD STC_COFFEESCRIPT_COMMENTDOCKEYWORDERROR = _stc.STC_COFFEESCRIPT_COMMENTDOCKEYWORDERROR STC_COFFEESCRIPT_GLOBALCLASS = _stc.STC_COFFEESCRIPT_GLOBALCLASS STC_COFFEESCRIPT_STRINGRAW = _stc.STC_COFFEESCRIPT_STRINGRAW STC_COFFEESCRIPT_TRIPLEVERBATIM = _stc.STC_COFFEESCRIPT_TRIPLEVERBATIM STC_COFFEESCRIPT_HASHQUOTEDSTRING = _stc.STC_COFFEESCRIPT_HASHQUOTEDSTRING STC_COFFEESCRIPT_COMMENTBLOCK = _stc.STC_COFFEESCRIPT_COMMENTBLOCK STC_COFFEESCRIPT_VERBOSE_REGEX = _stc.STC_COFFEESCRIPT_VERBOSE_REGEX STC_COFFEESCRIPT_VERBOSE_REGEX_COMMENT = _stc.STC_COFFEESCRIPT_VERBOSE_REGEX_COMMENT STC_AVS_DEFAULT = _stc.STC_AVS_DEFAULT STC_AVS_COMMENTBLOCK = _stc.STC_AVS_COMMENTBLOCK STC_AVS_COMMENTBLOCKN = _stc.STC_AVS_COMMENTBLOCKN STC_AVS_COMMENTLINE = _stc.STC_AVS_COMMENTLINE STC_AVS_NUMBER = _stc.STC_AVS_NUMBER STC_AVS_OPERATOR = _stc.STC_AVS_OPERATOR STC_AVS_IDENTIFIER = _stc.STC_AVS_IDENTIFIER STC_AVS_STRING = _stc.STC_AVS_STRING STC_AVS_TRIPLESTRING = _stc.STC_AVS_TRIPLESTRING STC_AVS_KEYWORD = _stc.STC_AVS_KEYWORD STC_AVS_FILTER = _stc.STC_AVS_FILTER STC_AVS_PLUGIN = _stc.STC_AVS_PLUGIN STC_AVS_FUNCTION = _stc.STC_AVS_FUNCTION STC_AVS_CLIPPROP = _stc.STC_AVS_CLIPPROP STC_AVS_USERDFN = _stc.STC_AVS_USERDFN STC_ECL_DEFAULT = _stc.STC_ECL_DEFAULT STC_ECL_COMMENT = _stc.STC_ECL_COMMENT STC_ECL_COMMENTLINE = _stc.STC_ECL_COMMENTLINE STC_ECL_NUMBER = _stc.STC_ECL_NUMBER STC_ECL_STRING = _stc.STC_ECL_STRING STC_ECL_WORD0 = _stc.STC_ECL_WORD0 STC_ECL_OPERATOR = _stc.STC_ECL_OPERATOR STC_ECL_CHARACTER = _stc.STC_ECL_CHARACTER STC_ECL_UUID = _stc.STC_ECL_UUID STC_ECL_PREPROCESSOR = _stc.STC_ECL_PREPROCESSOR STC_ECL_UNKNOWN = _stc.STC_ECL_UNKNOWN STC_ECL_IDENTIFIER = _stc.STC_ECL_IDENTIFIER STC_ECL_STRINGEOL = _stc.STC_ECL_STRINGEOL STC_ECL_VERBATIM = _stc.STC_ECL_VERBATIM STC_ECL_REGEX = _stc.STC_ECL_REGEX STC_ECL_COMMENTLINEDOC = _stc.STC_ECL_COMMENTLINEDOC STC_ECL_WORD1 = _stc.STC_ECL_WORD1 STC_ECL_COMMENTDOCKEYWORD = _stc.STC_ECL_COMMENTDOCKEYWORD STC_ECL_COMMENTDOCKEYWORDERROR = _stc.STC_ECL_COMMENTDOCKEYWORDERROR STC_ECL_WORD2 = _stc.STC_ECL_WORD2 STC_ECL_WORD3 = _stc.STC_ECL_WORD3 STC_ECL_WORD4 = _stc.STC_ECL_WORD4 STC_ECL_WORD5 = _stc.STC_ECL_WORD5 STC_ECL_COMMENTDOC = _stc.STC_ECL_COMMENTDOC STC_ECL_ADDED = _stc.STC_ECL_ADDED STC_ECL_DELETED = _stc.STC_ECL_DELETED STC_ECL_CHANGED = _stc.STC_ECL_CHANGED STC_ECL_MOVED = _stc.STC_ECL_MOVED STC_OSCRIPT_DEFAULT = _stc.STC_OSCRIPT_DEFAULT STC_OSCRIPT_LINE_COMMENT = _stc.STC_OSCRIPT_LINE_COMMENT STC_OSCRIPT_BLOCK_COMMENT = _stc.STC_OSCRIPT_BLOCK_COMMENT STC_OSCRIPT_DOC_COMMENT = _stc.STC_OSCRIPT_DOC_COMMENT STC_OSCRIPT_PREPROCESSOR = _stc.STC_OSCRIPT_PREPROCESSOR STC_OSCRIPT_NUMBER = _stc.STC_OSCRIPT_NUMBER STC_OSCRIPT_SINGLEQUOTE_STRING = _stc.STC_OSCRIPT_SINGLEQUOTE_STRING STC_OSCRIPT_DOUBLEQUOTE_STRING = _stc.STC_OSCRIPT_DOUBLEQUOTE_STRING STC_OSCRIPT_CONSTANT = _stc.STC_OSCRIPT_CONSTANT STC_OSCRIPT_IDENTIFIER = _stc.STC_OSCRIPT_IDENTIFIER STC_OSCRIPT_GLOBAL = _stc.STC_OSCRIPT_GLOBAL STC_OSCRIPT_KEYWORD = _stc.STC_OSCRIPT_KEYWORD STC_OSCRIPT_OPERATOR = _stc.STC_OSCRIPT_OPERATOR STC_OSCRIPT_LABEL = _stc.STC_OSCRIPT_LABEL STC_OSCRIPT_TYPE = _stc.STC_OSCRIPT_TYPE STC_OSCRIPT_FUNCTION = _stc.STC_OSCRIPT_FUNCTION STC_OSCRIPT_OBJECT = _stc.STC_OSCRIPT_OBJECT STC_OSCRIPT_PROPERTY = _stc.STC_OSCRIPT_PROPERTY STC_OSCRIPT_METHOD = _stc.STC_OSCRIPT_METHOD STC_VISUALPROLOG_DEFAULT = _stc.STC_VISUALPROLOG_DEFAULT STC_VISUALPROLOG_KEY_MAJOR = _stc.STC_VISUALPROLOG_KEY_MAJOR STC_VISUALPROLOG_KEY_MINOR = _stc.STC_VISUALPROLOG_KEY_MINOR STC_VISUALPROLOG_KEY_DIRECTIVE = _stc.STC_VISUALPROLOG_KEY_DIRECTIVE STC_VISUALPROLOG_COMMENT_BLOCK = _stc.STC_VISUALPROLOG_COMMENT_BLOCK STC_VISUALPROLOG_COMMENT_LINE = _stc.STC_VISUALPROLOG_COMMENT_LINE STC_VISUALPROLOG_COMMENT_KEY = _stc.STC_VISUALPROLOG_COMMENT_KEY STC_VISUALPROLOG_COMMENT_KEY_ERROR = _stc.STC_VISUALPROLOG_COMMENT_KEY_ERROR STC_VISUALPROLOG_IDENTIFIER = _stc.STC_VISUALPROLOG_IDENTIFIER STC_VISUALPROLOG_VARIABLE = _stc.STC_VISUALPROLOG_VARIABLE STC_VISUALPROLOG_ANONYMOUS = _stc.STC_VISUALPROLOG_ANONYMOUS STC_VISUALPROLOG_NUMBER = _stc.STC_VISUALPROLOG_NUMBER STC_VISUALPROLOG_OPERATOR = _stc.STC_VISUALPROLOG_OPERATOR STC_VISUALPROLOG_CHARACTER = _stc.STC_VISUALPROLOG_CHARACTER STC_VISUALPROLOG_CHARACTER_TOO_MANY = _stc.STC_VISUALPROLOG_CHARACTER_TOO_MANY STC_VISUALPROLOG_CHARACTER_ESCAPE_ERROR = _stc.STC_VISUALPROLOG_CHARACTER_ESCAPE_ERROR STC_VISUALPROLOG_STRING = _stc.STC_VISUALPROLOG_STRING STC_VISUALPROLOG_STRING_ESCAPE = _stc.STC_VISUALPROLOG_STRING_ESCAPE STC_VISUALPROLOG_STRING_ESCAPE_ERROR = _stc.STC_VISUALPROLOG_STRING_ESCAPE_ERROR STC_VISUALPROLOG_STRING_EOL_OPEN = _stc.STC_VISUALPROLOG_STRING_EOL_OPEN STC_VISUALPROLOG_STRING_VERBATIM = _stc.STC_VISUALPROLOG_STRING_VERBATIM STC_VISUALPROLOG_STRING_VERBATIM_SPECIAL = _stc.STC_VISUALPROLOG_STRING_VERBATIM_SPECIAL STC_VISUALPROLOG_STRING_VERBATIM_EOL = _stc.STC_VISUALPROLOG_STRING_VERBATIM_EOL STC_CMD_REDO = _stc.STC_CMD_REDO STC_CMD_SELECTALL = _stc.STC_CMD_SELECTALL STC_CMD_UNDO = _stc.STC_CMD_UNDO STC_CMD_CUT = _stc.STC_CMD_CUT STC_CMD_COPY = _stc.STC_CMD_COPY STC_CMD_PASTE = _stc.STC_CMD_PASTE STC_CMD_CLEAR = _stc.STC_CMD_CLEAR STC_CMD_LINEDOWN = _stc.STC_CMD_LINEDOWN STC_CMD_LINEDOWNEXTEND = _stc.STC_CMD_LINEDOWNEXTEND STC_CMD_LINEUP = _stc.STC_CMD_LINEUP STC_CMD_LINEUPEXTEND = _stc.STC_CMD_LINEUPEXTEND STC_CMD_CHARLEFT = _stc.STC_CMD_CHARLEFT STC_CMD_CHARLEFTEXTEND = _stc.STC_CMD_CHARLEFTEXTEND STC_CMD_CHARRIGHT = _stc.STC_CMD_CHARRIGHT STC_CMD_CHARRIGHTEXTEND = _stc.STC_CMD_CHARRIGHTEXTEND STC_CMD_WORDLEFT = _stc.STC_CMD_WORDLEFT STC_CMD_WORDLEFTEXTEND = _stc.STC_CMD_WORDLEFTEXTEND STC_CMD_WORDRIGHT = _stc.STC_CMD_WORDRIGHT STC_CMD_WORDRIGHTEXTEND = _stc.STC_CMD_WORDRIGHTEXTEND STC_CMD_HOME = _stc.STC_CMD_HOME STC_CMD_HOMEEXTEND = _stc.STC_CMD_HOMEEXTEND STC_CMD_LINEEND = _stc.STC_CMD_LINEEND STC_CMD_LINEENDEXTEND = _stc.STC_CMD_LINEENDEXTEND STC_CMD_DOCUMENTSTART = _stc.STC_CMD_DOCUMENTSTART STC_CMD_DOCUMENTSTARTEXTEND = _stc.STC_CMD_DOCUMENTSTARTEXTEND STC_CMD_DOCUMENTEND = _stc.STC_CMD_DOCUMENTEND STC_CMD_DOCUMENTENDEXTEND = _stc.STC_CMD_DOCUMENTENDEXTEND STC_CMD_PAGEUP = _stc.STC_CMD_PAGEUP STC_CMD_PAGEUPEXTEND = _stc.STC_CMD_PAGEUPEXTEND STC_CMD_PAGEDOWN = _stc.STC_CMD_PAGEDOWN STC_CMD_PAGEDOWNEXTEND = _stc.STC_CMD_PAGEDOWNEXTEND STC_CMD_EDITTOGGLEOVERTYPE = _stc.STC_CMD_EDITTOGGLEOVERTYPE STC_CMD_CANCEL = _stc.STC_CMD_CANCEL STC_CMD_DELETEBACK = _stc.STC_CMD_DELETEBACK STC_CMD_TAB = _stc.STC_CMD_TAB STC_CMD_BACKTAB = _stc.STC_CMD_BACKTAB STC_CMD_NEWLINE = _stc.STC_CMD_NEWLINE STC_CMD_FORMFEED = _stc.STC_CMD_FORMFEED STC_CMD_VCHOME = _stc.STC_CMD_VCHOME STC_CMD_VCHOMEEXTEND = _stc.STC_CMD_VCHOMEEXTEND STC_CMD_ZOOMIN = _stc.STC_CMD_ZOOMIN STC_CMD_ZOOMOUT = _stc.STC_CMD_ZOOMOUT STC_CMD_DELWORDLEFT = _stc.STC_CMD_DELWORDLEFT STC_CMD_DELWORDRIGHT = _stc.STC_CMD_DELWORDRIGHT STC_CMD_DELWORDRIGHTEND = _stc.STC_CMD_DELWORDRIGHTEND STC_CMD_LINECUT = _stc.STC_CMD_LINECUT STC_CMD_LINEDELETE = _stc.STC_CMD_LINEDELETE STC_CMD_LINETRANSPOSE = _stc.STC_CMD_LINETRANSPOSE STC_CMD_LINEDUPLICATE = _stc.STC_CMD_LINEDUPLICATE STC_CMD_LOWERCASE = _stc.STC_CMD_LOWERCASE STC_CMD_UPPERCASE = _stc.STC_CMD_UPPERCASE STC_CMD_LINESCROLLDOWN = _stc.STC_CMD_LINESCROLLDOWN STC_CMD_LINESCROLLUP = _stc.STC_CMD_LINESCROLLUP STC_CMD_DELETEBACKNOTLINE = _stc.STC_CMD_DELETEBACKNOTLINE STC_CMD_HOMEDISPLAY = _stc.STC_CMD_HOMEDISPLAY STC_CMD_HOMEDISPLAYEXTEND = _stc.STC_CMD_HOMEDISPLAYEXTEND STC_CMD_LINEENDDISPLAY = _stc.STC_CMD_LINEENDDISPLAY STC_CMD_LINEENDDISPLAYEXTEND = _stc.STC_CMD_LINEENDDISPLAYEXTEND STC_CMD_HOMEWRAP = _stc.STC_CMD_HOMEWRAP STC_CMD_HOMEWRAPEXTEND = _stc.STC_CMD_HOMEWRAPEXTEND STC_CMD_LINEENDWRAP = _stc.STC_CMD_LINEENDWRAP STC_CMD_LINEENDWRAPEXTEND = _stc.STC_CMD_LINEENDWRAPEXTEND STC_CMD_VCHOMEWRAP = _stc.STC_CMD_VCHOMEWRAP STC_CMD_VCHOMEWRAPEXTEND = _stc.STC_CMD_VCHOMEWRAPEXTEND STC_CMD_LINECOPY = _stc.STC_CMD_LINECOPY STC_CMD_WORDPARTLEFT = _stc.STC_CMD_WORDPARTLEFT STC_CMD_WORDPARTLEFTEXTEND = _stc.STC_CMD_WORDPARTLEFTEXTEND STC_CMD_WORDPARTRIGHT = _stc.STC_CMD_WORDPARTRIGHT STC_CMD_WORDPARTRIGHTEXTEND = _stc.STC_CMD_WORDPARTRIGHTEXTEND STC_CMD_DELLINELEFT = _stc.STC_CMD_DELLINELEFT STC_CMD_DELLINERIGHT = _stc.STC_CMD_DELLINERIGHT STC_CMD_PARADOWN = _stc.STC_CMD_PARADOWN STC_CMD_PARADOWNEXTEND = _stc.STC_CMD_PARADOWNEXTEND STC_CMD_PARAUP = _stc.STC_CMD_PARAUP STC_CMD_PARAUPEXTEND = _stc.STC_CMD_PARAUPEXTEND STC_CMD_LINEDOWNRECTEXTEND = _stc.STC_CMD_LINEDOWNRECTEXTEND STC_CMD_LINEUPRECTEXTEND = _stc.STC_CMD_LINEUPRECTEXTEND STC_CMD_CHARLEFTRECTEXTEND = _stc.STC_CMD_CHARLEFTRECTEXTEND STC_CMD_CHARRIGHTRECTEXTEND = _stc.STC_CMD_CHARRIGHTRECTEXTEND STC_CMD_HOMERECTEXTEND = _stc.STC_CMD_HOMERECTEXTEND STC_CMD_VCHOMERECTEXTEND = _stc.STC_CMD_VCHOMERECTEXTEND STC_CMD_LINEENDRECTEXTEND = _stc.STC_CMD_LINEENDRECTEXTEND STC_CMD_PAGEUPRECTEXTEND = _stc.STC_CMD_PAGEUPRECTEXTEND STC_CMD_PAGEDOWNRECTEXTEND = _stc.STC_CMD_PAGEDOWNRECTEXTEND STC_CMD_STUTTEREDPAGEUP = _stc.STC_CMD_STUTTEREDPAGEUP STC_CMD_STUTTEREDPAGEUPEXTEND = _stc.STC_CMD_STUTTEREDPAGEUPEXTEND STC_CMD_STUTTEREDPAGEDOWN = _stc.STC_CMD_STUTTEREDPAGEDOWN STC_CMD_STUTTEREDPAGEDOWNEXTEND = _stc.STC_CMD_STUTTEREDPAGEDOWNEXTEND STC_CMD_WORDLEFTEND = _stc.STC_CMD_WORDLEFTEND STC_CMD_WORDLEFTENDEXTEND = _stc.STC_CMD_WORDLEFTENDEXTEND STC_CMD_WORDRIGHTEND = _stc.STC_CMD_WORDRIGHTEND STC_CMD_WORDRIGHTENDEXTEND = _stc.STC_CMD_WORDRIGHTENDEXTEND STC_CMD_VERTICALCENTRECARET = _stc.STC_CMD_VERTICALCENTRECARET STC_CMD_MOVESELECTEDLINESUP = _stc.STC_CMD_MOVESELECTEDLINESUP STC_CMD_MOVESELECTEDLINESDOWN = _stc.STC_CMD_MOVESELECTEDLINESDOWN STC_CMD_SCROLLTOSTART = _stc.STC_CMD_SCROLLTOSTART STC_CMD_SCROLLTOEND = _stc.STC_CMD_SCROLLTOEND class StyledTextCtrl(_core.Control,_core.TextCtrlIface): """Proxy of C++ StyledTextCtrl class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """ __init__(self, Window parent, int id=ID_ANY, Point pos=DefaultPosition, Size size=DefaultSize, long style=0, String name=STCNameStr) -> StyledTextCtrl """ _stc.StyledTextCtrl_swiginit(self,_stc.new_StyledTextCtrl(*args, **kwargs)) self._setOORInfo(self) def Create(*args, **kwargs): """ Create(self, Window parent, int id=ID_ANY, Point pos=DefaultPosition, Size size=DefaultSize, long style=0, String name=wxSTCNameStr) -> bool """ return _stc.StyledTextCtrl_Create(*args, **kwargs) def AddText(*args, **kwargs): """ AddText(self, String text) Add text to the document at current position. """ return _stc.StyledTextCtrl_AddText(*args, **kwargs) def AddStyledText(*args, **kwargs): """ AddStyledText(self, wxMemoryBuffer data) Add array of cells to document. """ return _stc.StyledTextCtrl_AddStyledText(*args, **kwargs) def InsertText(*args, **kwargs): """ InsertText(self, int pos, String text) Insert string at a position. """ return _stc.StyledTextCtrl_InsertText(*args, **kwargs) def ClearAll(*args, **kwargs): """ ClearAll(self) Delete all text in the document. """ return _stc.StyledTextCtrl_ClearAll(*args, **kwargs) def DeleteRange(*args, **kwargs): """DeleteRange(self, int pos, int deleteLength)""" return _stc.StyledTextCtrl_DeleteRange(*args, **kwargs) def ClearDocumentStyle(*args, **kwargs): """ ClearDocumentStyle(self) Set all style bytes to 0, remove all folding information. """ return _stc.StyledTextCtrl_ClearDocumentStyle(*args, **kwargs) def GetLength(*args, **kwargs): """ GetLength(self) -> int Returns the number of bytes in the document. """ return _stc.StyledTextCtrl_GetLength(*args, **kwargs) def GetCharAt(*args, **kwargs): """ GetCharAt(self, int pos) -> int Returns the character byte at the position. """ return _stc.StyledTextCtrl_GetCharAt(*args, **kwargs) def GetCurrentPos(*args, **kwargs): """ GetCurrentPos(self) -> int Returns the position of the caret. """ return _stc.StyledTextCtrl_GetCurrentPos(*args, **kwargs) def GetAnchor(*args, **kwargs): """ GetAnchor(self) -> int Returns the position of the opposite end of the selection to the caret. """ return _stc.StyledTextCtrl_GetAnchor(*args, **kwargs) def GetStyleAt(*args, **kwargs): """ GetStyleAt(self, int pos) -> int Returns the style byte at the position. """ return _stc.StyledTextCtrl_GetStyleAt(*args, **kwargs) def SetUndoCollection(*args, **kwargs): """ SetUndoCollection(self, bool collectUndo) Choose between collecting actions into the undo history and discarding them. """ return _stc.StyledTextCtrl_SetUndoCollection(*args, **kwargs) def SetSavePoint(*args, **kwargs): """ SetSavePoint(self) Remember the current position in the undo history as the position at which the document was saved. """ return _stc.StyledTextCtrl_SetSavePoint(*args, **kwargs) def GetStyledText(*args, **kwargs): """ GetStyledText(self, int startPos, int endPos) -> wxMemoryBuffer Retrieve a buffer of cells. """ return _stc.StyledTextCtrl_GetStyledText(*args, **kwargs) def MarkerLineFromHandle(*args, **kwargs): """ MarkerLineFromHandle(self, int handle) -> int Retrieve the line number at which a particular marker is located. """ return _stc.StyledTextCtrl_MarkerLineFromHandle(*args, **kwargs) def MarkerDeleteHandle(*args, **kwargs): """ MarkerDeleteHandle(self, int handle) Delete a marker. """ return _stc.StyledTextCtrl_MarkerDeleteHandle(*args, **kwargs) def GetUndoCollection(*args, **kwargs): """ GetUndoCollection(self) -> bool Is undo history being collected? """ return _stc.StyledTextCtrl_GetUndoCollection(*args, **kwargs) def GetViewWhiteSpace(*args, **kwargs): """ GetViewWhiteSpace(self) -> int Are white space characters currently visible? Returns one of SCWS_* constants. """ return _stc.StyledTextCtrl_GetViewWhiteSpace(*args, **kwargs) def SetViewWhiteSpace(*args, **kwargs): """ SetViewWhiteSpace(self, int viewWS) Make white space characters invisible, always visible or visible outside indentation. """ return _stc.StyledTextCtrl_SetViewWhiteSpace(*args, **kwargs) def PositionFromPoint(*args, **kwargs): """ PositionFromPoint(self, Point pt) -> int Find the position from a point within the window. """ return _stc.StyledTextCtrl_PositionFromPoint(*args, **kwargs) def PositionFromPointClose(*args, **kwargs): """ PositionFromPointClose(self, int x, int y) -> int Find the position from a point within the window but return INVALID_POSITION if not close to text. """ return _stc.StyledTextCtrl_PositionFromPointClose(*args, **kwargs) def GotoLine(*args, **kwargs): """ GotoLine(self, int line) Set caret to start of a line and ensure it is visible. """ return _stc.StyledTextCtrl_GotoLine(*args, **kwargs) def GotoPos(*args, **kwargs): """ GotoPos(self, int pos) Set caret to a position and ensure it is visible. """ return _stc.StyledTextCtrl_GotoPos(*args, **kwargs) def SetAnchor(*args, **kwargs): """ SetAnchor(self, int posAnchor) Set the selection anchor to a position. The anchor is the opposite end of the selection from the caret. """ return _stc.StyledTextCtrl_SetAnchor(*args, **kwargs) def GetCurLine(*args, **kwargs): """ GetCurLine(self) -> (text, pos) Retrieve the text of the line containing the caret, and also theindex of the caret on the line. """ return _stc.StyledTextCtrl_GetCurLine(*args, **kwargs) def GetEndStyled(*args, **kwargs): """ GetEndStyled(self) -> int Retrieve the position of the last correctly styled character. """ return _stc.StyledTextCtrl_GetEndStyled(*args, **kwargs) def ConvertEOLs(*args, **kwargs): """ ConvertEOLs(self, int eolMode) Convert all line endings in the document to one mode. """ return _stc.StyledTextCtrl_ConvertEOLs(*args, **kwargs) def GetEOLMode(*args, **kwargs): """ GetEOLMode(self) -> int Retrieve the current end of line mode - one of CRLF, CR, or LF. """ return _stc.StyledTextCtrl_GetEOLMode(*args, **kwargs) def SetEOLMode(*args, **kwargs): """ SetEOLMode(self, int eolMode) Set the current end of line mode. """ return _stc.StyledTextCtrl_SetEOLMode(*args, **kwargs) def StartStyling(*args, **kwargs): """ StartStyling(self, int pos, int mask) Set the current styling position to pos and the styling mask to mask. The styling mask can be used to protect some bits in each styling byte from modification. """ return _stc.StyledTextCtrl_StartStyling(*args, **kwargs) def SetStyling(*args, **kwargs): """ SetStyling(self, int length, int style) Change style from current styling position for length characters to a style and move the current styling position to after this newly styled segment. """ return _stc.StyledTextCtrl_SetStyling(*args, **kwargs) def GetBufferedDraw(*args, **kwargs): """ GetBufferedDraw(self) -> bool Is drawing done first into a buffer or direct to the screen? """ return _stc.StyledTextCtrl_GetBufferedDraw(*args, **kwargs) def SetBufferedDraw(*args, **kwargs): """ SetBufferedDraw(self, bool buffered) If drawing is buffered then each line of text is drawn into a bitmap buffer before drawing it to the screen to avoid flicker. """ return _stc.StyledTextCtrl_SetBufferedDraw(*args, **kwargs) def SetTabWidth(*args, **kwargs): """ SetTabWidth(self, int tabWidth) Change the visible size of a tab to be a multiple of the width of a space character. """ return _stc.StyledTextCtrl_SetTabWidth(*args, **kwargs) def GetTabWidth(*args, **kwargs): """ GetTabWidth(self) -> int Retrieve the visible size of a tab. """ return _stc.StyledTextCtrl_GetTabWidth(*args, **kwargs) def SetCodePage(*args, **kwargs): """ SetCodePage(self, int codePage) Set the code page used to interpret the bytes of the document as characters. """ return _stc.StyledTextCtrl_SetCodePage(*args, **kwargs) def MarkerDefine(*args, **kwargs): """ MarkerDefine(self, int markerNumber, int markerSymbol, Colour foreground=wxNullColour, Colour background=wxNullColour) Set the symbol used for a particular marker number, and optionally the fore and background colours. """ return _stc.StyledTextCtrl_MarkerDefine(*args, **kwargs) def MarkerSetForeground(*args, **kwargs): """ MarkerSetForeground(self, int markerNumber, Colour fore) Set the foreground colour used for a particular marker number. """ return _stc.StyledTextCtrl_MarkerSetForeground(*args, **kwargs) def MarkerSetBackground(*args, **kwargs): """ MarkerSetBackground(self, int markerNumber, Colour back) Set the background colour used for a particular marker number. """ return _stc.StyledTextCtrl_MarkerSetBackground(*args, **kwargs) def MarkerSetBackgroundSelected(*args, **kwargs): """MarkerSetBackgroundSelected(self, int markerNumber, Colour back)""" return _stc.StyledTextCtrl_MarkerSetBackgroundSelected(*args, **kwargs) def MarkerEnableHighlight(*args, **kwargs): """MarkerEnableHighlight(self, bool enabled)""" return _stc.StyledTextCtrl_MarkerEnableHighlight(*args, **kwargs) def MarkerAdd(*args, **kwargs): """ MarkerAdd(self, int line, int markerNumber) -> int Add a marker to a line, returning an ID which can be used to find or delete the marker. """ return _stc.StyledTextCtrl_MarkerAdd(*args, **kwargs) def MarkerDelete(*args, **kwargs): """ MarkerDelete(self, int line, int markerNumber) Delete a marker from a line. """ return _stc.StyledTextCtrl_MarkerDelete(*args, **kwargs) def MarkerDeleteAll(*args, **kwargs): """ MarkerDeleteAll(self, int markerNumber) Delete all markers with a particular number from all lines. """ return _stc.StyledTextCtrl_MarkerDeleteAll(*args, **kwargs) def MarkerGet(*args, **kwargs): """ MarkerGet(self, int line) -> int Get a bit mask of all the markers set on a line. """ return _stc.StyledTextCtrl_MarkerGet(*args, **kwargs) def MarkerNext(*args, **kwargs): """ MarkerNext(self, int lineStart, int markerMask) -> int Find the next line after lineStart that includes a marker in mask. """ return _stc.StyledTextCtrl_MarkerNext(*args, **kwargs) def MarkerPrevious(*args, **kwargs): """ MarkerPrevious(self, int lineStart, int markerMask) -> int Find the previous line before lineStart that includes a marker in mask. """ return _stc.StyledTextCtrl_MarkerPrevious(*args, **kwargs) def MarkerDefineBitmap(*args, **kwargs): """ MarkerDefineBitmap(self, int markerNumber, Bitmap bmp) Define a marker from a bitmap """ return _stc.StyledTextCtrl_MarkerDefineBitmap(*args, **kwargs) def MarkerAddSet(*args, **kwargs): """ MarkerAddSet(self, int line, int set) Add a set of markers to a line. """ return _stc.StyledTextCtrl_MarkerAddSet(*args, **kwargs) def MarkerSetAlpha(*args, **kwargs): """ MarkerSetAlpha(self, int markerNumber, int alpha) Set the alpha used for a marker that is drawn in the text area, not the margin. """ return _stc.StyledTextCtrl_MarkerSetAlpha(*args, **kwargs) def SetMarginType(*args, **kwargs): """ SetMarginType(self, int margin, int marginType) Set a margin to be either numeric or symbolic. """ return _stc.StyledTextCtrl_SetMarginType(*args, **kwargs) def GetMarginType(*args, **kwargs): """ GetMarginType(self, int margin) -> int Retrieve the type of a margin. """ return _stc.StyledTextCtrl_GetMarginType(*args, **kwargs) def SetMarginWidth(*args, **kwargs): """ SetMarginWidth(self, int margin, int pixelWidth) Set the width of a margin to a width expressed in pixels. """ return _stc.StyledTextCtrl_SetMarginWidth(*args, **kwargs) def GetMarginWidth(*args, **kwargs): """ GetMarginWidth(self, int margin) -> int Retrieve the width of a margin in pixels. """ return _stc.StyledTextCtrl_GetMarginWidth(*args, **kwargs) def SetMarginMask(*args, **kwargs): """ SetMarginMask(self, int margin, int mask) Set a mask that determines which markers are displayed in a margin. """ return _stc.StyledTextCtrl_SetMarginMask(*args, **kwargs) def GetMarginMask(*args, **kwargs): """ GetMarginMask(self, int margin) -> int Retrieve the marker mask of a margin. """ return _stc.StyledTextCtrl_GetMarginMask(*args, **kwargs) def SetMarginSensitive(*args, **kwargs): """ SetMarginSensitive(self, int margin, bool sensitive) Make a margin sensitive or insensitive to mouse clicks. """ return _stc.StyledTextCtrl_SetMarginSensitive(*args, **kwargs) def GetMarginSensitive(*args, **kwargs): """ GetMarginSensitive(self, int margin) -> bool Retrieve the mouse click sensitivity of a margin. """ return _stc.StyledTextCtrl_GetMarginSensitive(*args, **kwargs) def SetMarginCursor(*args, **kwargs): """SetMarginCursor(self, int margin, int cursor)""" return _stc.StyledTextCtrl_SetMarginCursor(*args, **kwargs) def GetMarginCursor(*args, **kwargs): """GetMarginCursor(self, int margin) -> int""" return _stc.StyledTextCtrl_GetMarginCursor(*args, **kwargs) def StyleClearAll(*args, **kwargs): """ StyleClearAll(self) Clear all the styles and make equivalent to the global default style. """ return _stc.StyledTextCtrl_StyleClearAll(*args, **kwargs) def StyleSetForeground(*args, **kwargs): """ StyleSetForeground(self, int style, Colour fore) Set the foreground colour of a style. """ return _stc.StyledTextCtrl_StyleSetForeground(*args, **kwargs) def StyleSetBackground(*args, **kwargs): """ StyleSetBackground(self, int style, Colour back) Set the background colour of a style. """ return _stc.StyledTextCtrl_StyleSetBackground(*args, **kwargs) def StyleSetBold(*args, **kwargs): """ StyleSetBold(self, int style, bool bold) Set a style to be bold or not. """ return _stc.StyledTextCtrl_StyleSetBold(*args, **kwargs) def StyleSetItalic(*args, **kwargs): """ StyleSetItalic(self, int style, bool italic) Set a style to be italic or not. """ return _stc.StyledTextCtrl_StyleSetItalic(*args, **kwargs) def StyleSetSize(*args, **kwargs): """ StyleSetSize(self, int style, int sizePoints) Set the size of characters of a style. """ return _stc.StyledTextCtrl_StyleSetSize(*args, **kwargs) def StyleSetFaceName(*args, **kwargs): """ StyleSetFaceName(self, int style, String fontName) Set the font of a style. """ return _stc.StyledTextCtrl_StyleSetFaceName(*args, **kwargs) def StyleSetEOLFilled(*args, **kwargs): """ StyleSetEOLFilled(self, int style, bool filled) Set a style to have its end of line filled or not. """ return _stc.StyledTextCtrl_StyleSetEOLFilled(*args, **kwargs) def StyleResetDefault(*args, **kwargs): """ StyleResetDefault(self) Reset the default style to its state at startup """ return _stc.StyledTextCtrl_StyleResetDefault(*args, **kwargs) def StyleSetUnderline(*args, **kwargs): """ StyleSetUnderline(self, int style, bool underline) Set a style to be underlined or not. """ return _stc.StyledTextCtrl_StyleSetUnderline(*args, **kwargs) def StyleGetForeground(*args, **kwargs): """ StyleGetForeground(self, int style) -> Colour Get the foreground colour of a style. """ return _stc.StyledTextCtrl_StyleGetForeground(*args, **kwargs) def StyleGetBackground(*args, **kwargs): """ StyleGetBackground(self, int style) -> Colour Get the background colour of a style. """ return _stc.StyledTextCtrl_StyleGetBackground(*args, **kwargs) def StyleGetBold(*args, **kwargs): """ StyleGetBold(self, int style) -> bool Get is a style bold or not. """ return _stc.StyledTextCtrl_StyleGetBold(*args, **kwargs) def StyleGetItalic(*args, **kwargs): """ StyleGetItalic(self, int style) -> bool Get is a style italic or not. """ return _stc.StyledTextCtrl_StyleGetItalic(*args, **kwargs) def StyleGetSize(*args, **kwargs): """ StyleGetSize(self, int style) -> int Get the size of characters of a style. """ return _stc.StyledTextCtrl_StyleGetSize(*args, **kwargs) def StyleGetFaceName(*args, **kwargs): """ StyleGetFaceName(self, int style) -> String Get the font facename of a style """ return _stc.StyledTextCtrl_StyleGetFaceName(*args, **kwargs) def StyleGetEOLFilled(*args, **kwargs): """ StyleGetEOLFilled(self, int style) -> bool Get is a style to have its end of line filled or not. """ return _stc.StyledTextCtrl_StyleGetEOLFilled(*args, **kwargs) def StyleGetUnderline(*args, **kwargs): """ StyleGetUnderline(self, int style) -> bool Get is a style underlined or not. """ return _stc.StyledTextCtrl_StyleGetUnderline(*args, **kwargs) def StyleGetCase(*args, **kwargs): """ StyleGetCase(self, int style) -> int Get is a style mixed case, or to force upper or lower case. """ return _stc.StyledTextCtrl_StyleGetCase(*args, **kwargs) def StyleGetCharacterSet(*args, **kwargs): """ StyleGetCharacterSet(self, int style) -> int Get the character set of the font in a style. """ return _stc.StyledTextCtrl_StyleGetCharacterSet(*args, **kwargs) def StyleGetVisible(*args, **kwargs): """ StyleGetVisible(self, int style) -> bool Get is a style visible or not. """ return _stc.StyledTextCtrl_StyleGetVisible(*args, **kwargs) def StyleGetChangeable(*args, **kwargs): """ StyleGetChangeable(self, int style) -> bool Get is a style changeable or not (read only). Experimental feature, currently buggy. """ return _stc.StyledTextCtrl_StyleGetChangeable(*args, **kwargs) def StyleGetHotSpot(*args, **kwargs): """ StyleGetHotSpot(self, int style) -> bool Get is a style a hotspot or not. """ return _stc.StyledTextCtrl_StyleGetHotSpot(*args, **kwargs) def StyleSetCase(*args, **kwargs): """ StyleSetCase(self, int style, int caseForce) Set a style to be mixed case, or to force upper or lower case. """ return _stc.StyledTextCtrl_StyleSetCase(*args, **kwargs) def StyleSetSizeFractional(*args, **kwargs): """StyleSetSizeFractional(self, int style, int caseForce)""" return _stc.StyledTextCtrl_StyleSetSizeFractional(*args, **kwargs) def StyleGetSizeFractional(*args, **kwargs): """StyleGetSizeFractional(self, int style) -> int""" return _stc.StyledTextCtrl_StyleGetSizeFractional(*args, **kwargs) def StyleSetWeight(*args, **kwargs): """StyleSetWeight(self, int style, int weight)""" return _stc.StyledTextCtrl_StyleSetWeight(*args, **kwargs) def StyleGetWeight(*args, **kwargs): """StyleGetWeight(self, int style) -> int""" return _stc.StyledTextCtrl_StyleGetWeight(*args, **kwargs) def StyleSetHotSpot(*args, **kwargs): """ StyleSetHotSpot(self, int style, bool hotspot) Set a style to be a hotspot or not. """ return _stc.StyledTextCtrl_StyleSetHotSpot(*args, **kwargs) def SetSelForeground(*args, **kwargs): """ SetSelForeground(self, bool useSetting, Colour fore) Set the foreground colour of the main and additional selections and whether to use this setting. """ return _stc.StyledTextCtrl_SetSelForeground(*args, **kwargs) def SetSelBackground(*args, **kwargs): """ SetSelBackground(self, bool useSetting, Colour back) Set the background colour of the main and additional selections and whether to use this setting. """ return _stc.StyledTextCtrl_SetSelBackground(*args, **kwargs) def GetSelAlpha(*args, **kwargs): """ GetSelAlpha(self) -> int Get the alpha of the selection. """ return _stc.StyledTextCtrl_GetSelAlpha(*args, **kwargs) def SetSelAlpha(*args, **kwargs): """ SetSelAlpha(self, int alpha) Set the alpha of the selection. """ return _stc.StyledTextCtrl_SetSelAlpha(*args, **kwargs) def GetSelEOLFilled(*args, **kwargs): """ GetSelEOLFilled(self) -> bool Is the selection end of line filled? """ return _stc.StyledTextCtrl_GetSelEOLFilled(*args, **kwargs) def SetSelEOLFilled(*args, **kwargs): """ SetSelEOLFilled(self, bool filled) Set the selection to have its end of line filled or not. """ return _stc.StyledTextCtrl_SetSelEOLFilled(*args, **kwargs) def SetCaretForeground(*args, **kwargs): """ SetCaretForeground(self, Colour fore) Set the foreground colour of the caret. """ return _stc.StyledTextCtrl_SetCaretForeground(*args, **kwargs) def CmdKeyAssign(*args, **kwargs): """ CmdKeyAssign(self, int key, int modifiers, int cmd) When key+modifier combination km is pressed perform msg. """ return _stc.StyledTextCtrl_CmdKeyAssign(*args, **kwargs) def CmdKeyClear(*args, **kwargs): """ CmdKeyClear(self, int key, int modifiers) When key+modifier combination km is pressed do nothing. """ return _stc.StyledTextCtrl_CmdKeyClear(*args, **kwargs) def CmdKeyClearAll(*args, **kwargs): """ CmdKeyClearAll(self) Drop all key mappings. """ return _stc.StyledTextCtrl_CmdKeyClearAll(*args, **kwargs) def SetStyleBytes(*args, **kwargs): """ SetStyleBytes(self, int length, char styleBytes) Set the styles for a segment of the document. """ return _stc.StyledTextCtrl_SetStyleBytes(*args, **kwargs) def StyleSetVisible(*args, **kwargs): """ StyleSetVisible(self, int style, bool visible) Set a style to be visible or not. """ return _stc.StyledTextCtrl_StyleSetVisible(*args, **kwargs) def GetCaretPeriod(*args, **kwargs): """ GetCaretPeriod(self) -> int Get the time in milliseconds that the caret is on and off. """ return _stc.StyledTextCtrl_GetCaretPeriod(*args, **kwargs) def SetCaretPeriod(*args, **kwargs): """ SetCaretPeriod(self, int periodMilliseconds) Get the time in milliseconds that the caret is on and off. 0 = steady on. """ return _stc.StyledTextCtrl_SetCaretPeriod(*args, **kwargs) def SetWordChars(*args, **kwargs): """ SetWordChars(self, String characters) Set the set of characters making up words for when moving or selecting by word. First sets defaults like SetCharsDefault. """ return _stc.StyledTextCtrl_SetWordChars(*args, **kwargs) def GetWordChars(*args, **kwargs): """GetWordChars(self) -> String""" return _stc.StyledTextCtrl_GetWordChars(*args, **kwargs) def BeginUndoAction(*args, **kwargs): """ BeginUndoAction(self) Start a sequence of actions that is undone and redone as a unit. May be nested. """ return _stc.StyledTextCtrl_BeginUndoAction(*args, **kwargs) def EndUndoAction(*args, **kwargs): """ EndUndoAction(self) End a sequence of actions that is undone and redone as a unit. """ return _stc.StyledTextCtrl_EndUndoAction(*args, **kwargs) def IndicatorSetStyle(*args, **kwargs): """ IndicatorSetStyle(self, int indic, int style) Set an indicator to plain, squiggle or TT. """ return _stc.StyledTextCtrl_IndicatorSetStyle(*args, **kwargs) def IndicatorGetStyle(*args, **kwargs): """ IndicatorGetStyle(self, int indic) -> int Retrieve the style of an indicator. """ return _stc.StyledTextCtrl_IndicatorGetStyle(*args, **kwargs) def IndicatorSetForeground(*args, **kwargs): """ IndicatorSetForeground(self, int indic, Colour fore) Set the foreground colour of an indicator. """ return _stc.StyledTextCtrl_IndicatorSetForeground(*args, **kwargs) def IndicatorGetForeground(*args, **kwargs): """ IndicatorGetForeground(self, int indic) -> Colour Retrieve the foreground colour of an indicator. """ return _stc.StyledTextCtrl_IndicatorGetForeground(*args, **kwargs) def IndicatorSetUnder(*args, **kwargs): """ IndicatorSetUnder(self, int indic, bool under) Set an indicator to draw under text or over(default). """ return _stc.StyledTextCtrl_IndicatorSetUnder(*args, **kwargs) def IndicatorGetUnder(*args, **kwargs): """ IndicatorGetUnder(self, int indic) -> bool Retrieve whether indicator drawn under or over text. """ return _stc.StyledTextCtrl_IndicatorGetUnder(*args, **kwargs) def SetWhitespaceForeground(*args, **kwargs): """ SetWhitespaceForeground(self, bool useSetting, Colour fore) Set the foreground colour of all whitespace and whether to use this setting. """ return _stc.StyledTextCtrl_SetWhitespaceForeground(*args, **kwargs) def SetWhitespaceBackground(*args, **kwargs): """ SetWhitespaceBackground(self, bool useSetting, Colour back) Set the background colour of all whitespace and whether to use this setting. """ return _stc.StyledTextCtrl_SetWhitespaceBackground(*args, **kwargs) def SetWhitespaceSize(*args, **kwargs): """ SetWhitespaceSize(self, int size) Set the size of the dots used to mark space characters. """ return _stc.StyledTextCtrl_SetWhitespaceSize(*args, **kwargs) def GetWhitespaceSize(*args, **kwargs): """ GetWhitespaceSize(self) -> int Get the size of the dots used to mark space characters. """ return _stc.StyledTextCtrl_GetWhitespaceSize(*args, **kwargs) def SetStyleBits(*args, **kwargs): """ SetStyleBits(self, int bits) Divide each styling byte into lexical class bits (default: 5) and indicator bits (default: 3). If a lexer requires more than 32 lexical states, then this is used to expand the possible states. """ return _stc.StyledTextCtrl_SetStyleBits(*args, **kwargs) def GetStyleBits(*args, **kwargs): """ GetStyleBits(self) -> int Retrieve number of bits in style bytes used to hold the lexical state. """ return _stc.StyledTextCtrl_GetStyleBits(*args, **kwargs) def SetLineState(*args, **kwargs): """ SetLineState(self, int line, int state) Used to hold extra styling information for each line. """ return _stc.StyledTextCtrl_SetLineState(*args, **kwargs) def GetLineState(*args, **kwargs): """ GetLineState(self, int line) -> int Retrieve the extra styling information for a line. """ return _stc.StyledTextCtrl_GetLineState(*args, **kwargs) def GetMaxLineState(*args, **kwargs): """ GetMaxLineState(self) -> int Retrieve the last line number that has line state. """ return _stc.StyledTextCtrl_GetMaxLineState(*args, **kwargs) def GetCaretLineVisible(*args, **kwargs): """ GetCaretLineVisible(self) -> bool Is the background of the line containing the caret in a different colour? """ return _stc.StyledTextCtrl_GetCaretLineVisible(*args, **kwargs) def SetCaretLineVisible(*args, **kwargs): """ SetCaretLineVisible(self, bool show) Display the background of the line containing the caret in a different colour. """ return _stc.StyledTextCtrl_SetCaretLineVisible(*args, **kwargs) def GetCaretLineBackground(*args, **kwargs): """ GetCaretLineBackground(self) -> Colour Get the colour of the background of the line containing the caret. """ return _stc.StyledTextCtrl_GetCaretLineBackground(*args, **kwargs) def SetCaretLineBackground(*args, **kwargs): """ SetCaretLineBackground(self, Colour back) Set the colour of the background of the line containing the caret. """ return _stc.StyledTextCtrl_SetCaretLineBackground(*args, **kwargs) def StyleSetChangeable(*args, **kwargs): """ StyleSetChangeable(self, int style, bool changeable) Set a style to be changeable or not (read only). Experimental feature, currently buggy. """ return _stc.StyledTextCtrl_StyleSetChangeable(*args, **kwargs) def AutoCompShow(*args, **kwargs): """ AutoCompShow(self, int lenEntered, String itemList) Display a auto-completion list. The lenEntered parameter indicates how many characters before the caret should be used to provide context. """ return _stc.StyledTextCtrl_AutoCompShow(*args, **kwargs) def AutoCompCancel(*args, **kwargs): """ AutoCompCancel(self) Remove the auto-completion list from the screen. """ return _stc.StyledTextCtrl_AutoCompCancel(*args, **kwargs) def AutoCompActive(*args, **kwargs): """ AutoCompActive(self) -> bool Is there an auto-completion list visible? """ return _stc.StyledTextCtrl_AutoCompActive(*args, **kwargs) def AutoCompPosStart(*args, **kwargs): """ AutoCompPosStart(self) -> int Retrieve the position of the caret when the auto-completion list was displayed. """ return _stc.StyledTextCtrl_AutoCompPosStart(*args, **kwargs) def AutoCompComplete(*args, **kwargs): """ AutoCompComplete(self) User has selected an item so remove the list and insert the selection. """ return _stc.StyledTextCtrl_AutoCompComplete(*args, **kwargs) def AutoCompStops(*args, **kwargs): """ AutoCompStops(self, String characterSet) Define a set of character that when typed cancel the auto-completion list. """ return _stc.StyledTextCtrl_AutoCompStops(*args, **kwargs) def AutoCompSetSeparator(*args, **kwargs): """ AutoCompSetSeparator(self, int separatorCharacter) Change the separator character in the string setting up an auto-completion list. Default is space but can be changed if items contain space. """ return _stc.StyledTextCtrl_AutoCompSetSeparator(*args, **kwargs) def AutoCompGetSeparator(*args, **kwargs): """ AutoCompGetSeparator(self) -> int Retrieve the auto-completion list separator character. """ return _stc.StyledTextCtrl_AutoCompGetSeparator(*args, **kwargs) def AutoCompSelect(*args, **kwargs): """ AutoCompSelect(self, String text) Select the item in the auto-completion list that starts with a string. """ return _stc.StyledTextCtrl_AutoCompSelect(*args, **kwargs) def AutoCompSetCancelAtStart(*args, **kwargs): """ AutoCompSetCancelAtStart(self, bool cancel) Should the auto-completion list be cancelled if the user backspaces to a position before where the box was created. """ return _stc.StyledTextCtrl_AutoCompSetCancelAtStart(*args, **kwargs) def AutoCompGetCancelAtStart(*args, **kwargs): """ AutoCompGetCancelAtStart(self) -> bool Retrieve whether auto-completion cancelled by backspacing before start. """ return _stc.StyledTextCtrl_AutoCompGetCancelAtStart(*args, **kwargs) def AutoCompSetFillUps(*args, **kwargs): """ AutoCompSetFillUps(self, String characterSet) Define a set of characters that when typed will cause the autocompletion to choose the selected item. """ return _stc.StyledTextCtrl_AutoCompSetFillUps(*args, **kwargs) def AutoCompSetChooseSingle(*args, **kwargs): """ AutoCompSetChooseSingle(self, bool chooseSingle) Should a single item auto-completion list automatically choose the item. """ return _stc.StyledTextCtrl_AutoCompSetChooseSingle(*args, **kwargs) def AutoCompGetChooseSingle(*args, **kwargs): """ AutoCompGetChooseSingle(self) -> bool Retrieve whether a single item auto-completion list automatically choose the item. """ return _stc.StyledTextCtrl_AutoCompGetChooseSingle(*args, **kwargs) def AutoCompSetIgnoreCase(*args, **kwargs): """ AutoCompSetIgnoreCase(self, bool ignoreCase) Set whether case is significant when performing auto-completion searches. """ return _stc.StyledTextCtrl_AutoCompSetIgnoreCase(*args, **kwargs) def AutoCompGetIgnoreCase(*args, **kwargs): """ AutoCompGetIgnoreCase(self) -> bool Retrieve state of ignore case flag. """ return _stc.StyledTextCtrl_AutoCompGetIgnoreCase(*args, **kwargs) def UserListShow(*args, **kwargs): """ UserListShow(self, int listType, String itemList) Display a list of strings and send notification when user chooses one. """ return _stc.StyledTextCtrl_UserListShow(*args, **kwargs) def AutoCompSetAutoHide(*args, **kwargs): """ AutoCompSetAutoHide(self, bool autoHide) Set whether or not autocompletion is hidden automatically when nothing matches. """ return _stc.StyledTextCtrl_AutoCompSetAutoHide(*args, **kwargs) def AutoCompGetAutoHide(*args, **kwargs): """ AutoCompGetAutoHide(self) -> bool Retrieve whether or not autocompletion is hidden automatically when nothing matches. """ return _stc.StyledTextCtrl_AutoCompGetAutoHide(*args, **kwargs) def AutoCompSetDropRestOfWord(*args, **kwargs): """ AutoCompSetDropRestOfWord(self, bool dropRestOfWord) Set whether or not autocompletion deletes any word characters after the inserted text upon completion. """ return _stc.StyledTextCtrl_AutoCompSetDropRestOfWord(*args, **kwargs) def AutoCompGetDropRestOfWord(*args, **kwargs): """ AutoCompGetDropRestOfWord(self) -> bool Retrieve whether or not autocompletion deletes any word characters after the inserted text upon completion. """ return _stc.StyledTextCtrl_AutoCompGetDropRestOfWord(*args, **kwargs) def RegisterImage(*args, **kwargs): """ RegisterImage(self, int type, Bitmap bmp) Register an image for use in autocompletion lists. """ return _stc.StyledTextCtrl_RegisterImage(*args, **kwargs) def ClearRegisteredImages(*args, **kwargs): """ ClearRegisteredImages(self) Clear all the registered images. """ return _stc.StyledTextCtrl_ClearRegisteredImages(*args, **kwargs) def AutoCompGetTypeSeparator(*args, **kwargs): """ AutoCompGetTypeSeparator(self) -> int Retrieve the auto-completion list type-separator character. """ return _stc.StyledTextCtrl_AutoCompGetTypeSeparator(*args, **kwargs) def AutoCompSetTypeSeparator(*args, **kwargs): """ AutoCompSetTypeSeparator(self, int separatorCharacter) Change the type-separator character in the string setting up an auto-completion list. Default is '?' but can be changed if items contain '?'. """ return _stc.StyledTextCtrl_AutoCompSetTypeSeparator(*args, **kwargs) def AutoCompSetMaxWidth(*args, **kwargs): """ AutoCompSetMaxWidth(self, int characterCount) Set the maximum width, in characters, of auto-completion and user lists. Set to 0 to autosize to fit longest item, which is the default. """ return _stc.StyledTextCtrl_AutoCompSetMaxWidth(*args, **kwargs) def AutoCompGetMaxWidth(*args, **kwargs): """ AutoCompGetMaxWidth(self) -> int Get the maximum width, in characters, of auto-completion and user lists. """ return _stc.StyledTextCtrl_AutoCompGetMaxWidth(*args, **kwargs) def AutoCompSetMaxHeight(*args, **kwargs): """ AutoCompSetMaxHeight(self, int rowCount) Set the maximum height, in rows, of auto-completion and user lists. The default is 5 rows. """ return _stc.StyledTextCtrl_AutoCompSetMaxHeight(*args, **kwargs) def AutoCompGetMaxHeight(*args, **kwargs): """ AutoCompGetMaxHeight(self) -> int Set the maximum height, in rows, of auto-completion and user lists. """ return _stc.StyledTextCtrl_AutoCompGetMaxHeight(*args, **kwargs) def SetIndent(*args, **kwargs): """ SetIndent(self, int indentSize) Set the number of spaces used for one level of indentation. """ return _stc.StyledTextCtrl_SetIndent(*args, **kwargs) def GetIndent(*args, **kwargs): """ GetIndent(self) -> int Retrieve indentation size. """ return _stc.StyledTextCtrl_GetIndent(*args, **kwargs) def SetUseTabs(*args, **kwargs): """ SetUseTabs(self, bool useTabs) Indentation will only use space characters if useTabs is false, otherwise it will use a combination of tabs and spaces. """ return _stc.StyledTextCtrl_SetUseTabs(*args, **kwargs) def GetUseTabs(*args, **kwargs): """ GetUseTabs(self) -> bool Retrieve whether tabs will be used in indentation. """ return _stc.StyledTextCtrl_GetUseTabs(*args, **kwargs) def SetLineIndentation(*args, **kwargs): """ SetLineIndentation(self, int line, int indentSize) Change the indentation of a line to a number of columns. """ return _stc.StyledTextCtrl_SetLineIndentation(*args, **kwargs) def GetLineIndentation(*args, **kwargs): """ GetLineIndentation(self, int line) -> int Retrieve the number of columns that a line is indented. """ return _stc.StyledTextCtrl_GetLineIndentation(*args, **kwargs) def GetLineIndentPosition(*args, **kwargs): """ GetLineIndentPosition(self, int line) -> int Retrieve the position before the first non indentation character on a line. """ return _stc.StyledTextCtrl_GetLineIndentPosition(*args, **kwargs) def GetColumn(*args, **kwargs): """ GetColumn(self, int pos) -> int Retrieve the column number of a position, taking tab width into account. """ return _stc.StyledTextCtrl_GetColumn(*args, **kwargs) def CountCharacters(*args, **kwargs): """CountCharacters(self, int startPos, int endPos) -> int""" return _stc.StyledTextCtrl_CountCharacters(*args, **kwargs) def SetUseHorizontalScrollBar(*args, **kwargs): """ SetUseHorizontalScrollBar(self, bool show) Show or hide the horizontal scroll bar. """ return _stc.StyledTextCtrl_SetUseHorizontalScrollBar(*args, **kwargs) def GetUseHorizontalScrollBar(*args, **kwargs): """ GetUseHorizontalScrollBar(self) -> bool Is the horizontal scroll bar visible? """ return _stc.StyledTextCtrl_GetUseHorizontalScrollBar(*args, **kwargs) def SetIndentationGuides(*args, **kwargs): """ SetIndentationGuides(self, int indentView) Show or hide indentation guides. """ return _stc.StyledTextCtrl_SetIndentationGuides(*args, **kwargs) def GetIndentationGuides(*args, **kwargs): """ GetIndentationGuides(self) -> int Are the indentation guides visible? """ return _stc.StyledTextCtrl_GetIndentationGuides(*args, **kwargs) def SetHighlightGuide(*args, **kwargs): """ SetHighlightGuide(self, int column) Set the highlighted indentation guide column. 0 = no highlighted guide. """ return _stc.StyledTextCtrl_SetHighlightGuide(*args, **kwargs) def GetHighlightGuide(*args, **kwargs): """ GetHighlightGuide(self) -> int Get the highlighted indentation guide column. """ return _stc.StyledTextCtrl_GetHighlightGuide(*args, **kwargs) def GetLineEndPosition(*args, **kwargs): """ GetLineEndPosition(self, int line) -> int Get the position after the last visible characters on a line. """ return _stc.StyledTextCtrl_GetLineEndPosition(*args, **kwargs) def GetCodePage(*args, **kwargs): """ GetCodePage(self) -> int Get the code page used to interpret the bytes of the document as characters. """ return _stc.StyledTextCtrl_GetCodePage(*args, **kwargs) def GetCaretForeground(*args, **kwargs): """ GetCaretForeground(self) -> Colour Get the foreground colour of the caret. """ return _stc.StyledTextCtrl_GetCaretForeground(*args, **kwargs) def GetReadOnly(*args, **kwargs): """ GetReadOnly(self) -> bool In read-only mode? """ return _stc.StyledTextCtrl_GetReadOnly(*args, **kwargs) def SetCurrentPos(*args, **kwargs): """ SetCurrentPos(self, int pos) Sets the position of the caret. """ return _stc.StyledTextCtrl_SetCurrentPos(*args, **kwargs) def SetSelectionStart(*args, **kwargs): """ SetSelectionStart(self, int pos) Sets the position that starts the selection - this becomes the anchor. """ return _stc.StyledTextCtrl_SetSelectionStart(*args, **kwargs) def GetSelectionStart(*args, **kwargs): """ GetSelectionStart(self) -> int Returns the position at the start of the selection. """ return _stc.StyledTextCtrl_GetSelectionStart(*args, **kwargs) def SetSelectionEnd(*args, **kwargs): """ SetSelectionEnd(self, int pos) Sets the position that ends the selection - this becomes the currentPosition. """ return _stc.StyledTextCtrl_SetSelectionEnd(*args, **kwargs) def GetSelectionEnd(*args, **kwargs): """ GetSelectionEnd(self) -> int Returns the position at the end of the selection. """ return _stc.StyledTextCtrl_GetSelectionEnd(*args, **kwargs) def SetEmptySelection(*args, **kwargs): """SetEmptySelection(self, int pos)""" return _stc.StyledTextCtrl_SetEmptySelection(*args, **kwargs) def SetPrintMagnification(*args, **kwargs): """ SetPrintMagnification(self, int magnification) Sets the print magnification added to the point size of each style for printing. """ return _stc.StyledTextCtrl_SetPrintMagnification(*args, **kwargs) def GetPrintMagnification(*args, **kwargs): """ GetPrintMagnification(self) -> int Returns the print magnification. """ return _stc.StyledTextCtrl_GetPrintMagnification(*args, **kwargs) def SetPrintColourMode(*args, **kwargs): """ SetPrintColourMode(self, int mode) Modify colours when printing for clearer printed text. """ return _stc.StyledTextCtrl_SetPrintColourMode(*args, **kwargs) def GetPrintColourMode(*args, **kwargs): """ GetPrintColourMode(self) -> int Returns the print colour mode. """ return _stc.StyledTextCtrl_GetPrintColourMode(*args, **kwargs) def FindText(*args, **kwargs): """ FindText(self, int minPos, int maxPos, String text, int flags=0) -> int Find some text in the document. """ return _stc.StyledTextCtrl_FindText(*args, **kwargs) def FormatRange(*args, **kwargs): """ FormatRange(self, bool doDraw, int startPos, int endPos, DC draw, DC target, Rect renderRect, Rect pageRect) -> int On Windows, will draw the document into a display context such as a printer. """ return _stc.StyledTextCtrl_FormatRange(*args, **kwargs) def GetFirstVisibleLine(*args, **kwargs): """ GetFirstVisibleLine(self) -> int Retrieve the display line at the top of the display. """ return _stc.StyledTextCtrl_GetFirstVisibleLine(*args, **kwargs) def GetLine(*args, **kwargs): """ GetLine(self, int line) -> String Retrieve the contents of a line. """ return _stc.StyledTextCtrl_GetLine(*args, **kwargs) def GetLineCount(*args, **kwargs): """ GetLineCount(self) -> int Returns the number of lines in the document. There is always at least one. """ return _stc.StyledTextCtrl_GetLineCount(*args, **kwargs) def SetMarginLeft(*args, **kwargs): """ SetMarginLeft(self, int pixelWidth) Sets the size in pixels of the left margin. """ return _stc.StyledTextCtrl_SetMarginLeft(*args, **kwargs) def GetMarginLeft(*args, **kwargs): """ GetMarginLeft(self) -> int Returns the size in pixels of the left margin. """ return _stc.StyledTextCtrl_GetMarginLeft(*args, **kwargs) def SetMarginRight(*args, **kwargs): """ SetMarginRight(self, int pixelWidth) Sets the size in pixels of the right margin. """ return _stc.StyledTextCtrl_SetMarginRight(*args, **kwargs) def GetMarginRight(*args, **kwargs): """ GetMarginRight(self) -> int Returns the size in pixels of the right margin. """ return _stc.StyledTextCtrl_GetMarginRight(*args, **kwargs) def GetModify(*args, **kwargs): """ GetModify(self) -> bool Is the document different from when it was last saved? """ return _stc.StyledTextCtrl_GetModify(*args, **kwargs) def GetSelectedText(*args, **kwargs): """ GetSelectedText(self) -> String Retrieve the selected text. """ return _stc.StyledTextCtrl_GetSelectedText(*args, **kwargs) def GetTextRange(*args, **kwargs): """ GetTextRange(self, int startPos, int endPos) -> String Retrieve a range of text. """ return _stc.StyledTextCtrl_GetTextRange(*args, **kwargs) def HideSelection(*args, **kwargs): """ HideSelection(self, bool normal) Draw the selection in normal style or with selection highlighted. """ return _stc.StyledTextCtrl_HideSelection(*args, **kwargs) def LineFromPosition(*args, **kwargs): """ LineFromPosition(self, int pos) -> int Retrieve the line containing a position. """ return _stc.StyledTextCtrl_LineFromPosition(*args, **kwargs) def PositionFromLine(*args, **kwargs): """ PositionFromLine(self, int line) -> int Retrieve the position at the start of a line. """ return _stc.StyledTextCtrl_PositionFromLine(*args, **kwargs) def LineScroll(*args, **kwargs): """ LineScroll(self, int columns, int lines) Scroll horizontally and vertically. """ return _stc.StyledTextCtrl_LineScroll(*args, **kwargs) def EnsureCaretVisible(*args, **kwargs): """ EnsureCaretVisible(self) Ensure the caret is visible. """ return _stc.StyledTextCtrl_EnsureCaretVisible(*args, **kwargs) def ReplaceSelection(*args, **kwargs): """ ReplaceSelection(self, String text) Replace the selected text with the argument text. """ return _stc.StyledTextCtrl_ReplaceSelection(*args, **kwargs) def SetReadOnly(*args, **kwargs): """ SetReadOnly(self, bool readOnly) Set to read only or read write. """ return _stc.StyledTextCtrl_SetReadOnly(*args, **kwargs) def EmptyUndoBuffer(*args, **kwargs): """ EmptyUndoBuffer(self) Delete the undo history. """ return _stc.StyledTextCtrl_EmptyUndoBuffer(*args, **kwargs) def SetText(*args, **kwargs): """ SetText(self, String text) Replace the contents of the document with the argument text. """ return _stc.StyledTextCtrl_SetText(*args, **kwargs) def GetText(*args, **kwargs): """ GetText(self) -> String Retrieve all the text in the document. """ return _stc.StyledTextCtrl_GetText(*args, **kwargs) def GetTextLength(*args, **kwargs): """ GetTextLength(self) -> int Retrieve the number of characters in the document. """ return _stc.StyledTextCtrl_GetTextLength(*args, **kwargs) def SetOvertype(*args, **kwargs): """ SetOvertype(self, bool overtype) Set to overtype (true) or insert mode. """ return _stc.StyledTextCtrl_SetOvertype(*args, **kwargs) def GetOvertype(*args, **kwargs): """ GetOvertype(self) -> bool Returns true if overtype mode is active otherwise false is returned. """ return _stc.StyledTextCtrl_GetOvertype(*args, **kwargs) def SetCaretWidth(*args, **kwargs): """ SetCaretWidth(self, int pixelWidth) Set the width of the insert mode caret. """ return _stc.StyledTextCtrl_SetCaretWidth(*args, **kwargs) def GetCaretWidth(*args, **kwargs): """ GetCaretWidth(self) -> int Returns the width of the insert mode caret. """ return _stc.StyledTextCtrl_GetCaretWidth(*args, **kwargs) def SetTargetStart(*args, **kwargs): """ SetTargetStart(self, int pos) Sets the position that starts the target which is used for updating the document without affecting the scroll position. """ return _stc.StyledTextCtrl_SetTargetStart(*args, **kwargs) def GetTargetStart(*args, **kwargs): """ GetTargetStart(self) -> int Get the position that starts the target. """ return _stc.StyledTextCtrl_GetTargetStart(*args, **kwargs) def SetTargetEnd(*args, **kwargs): """ SetTargetEnd(self, int pos) Sets the position that ends the target which is used for updating the document without affecting the scroll position. """ return _stc.StyledTextCtrl_SetTargetEnd(*args, **kwargs) def GetTargetEnd(*args, **kwargs): """ GetTargetEnd(self) -> int Get the position that ends the target. """ return _stc.StyledTextCtrl_GetTargetEnd(*args, **kwargs) def ReplaceTarget(*args, **kwargs): """ ReplaceTarget(self, String text) -> int Replace the target text with the argument text. Text is counted so it can contain NULs. Returns the length of the replacement text. """ return _stc.StyledTextCtrl_ReplaceTarget(*args, **kwargs) def ReplaceTargetRE(*args, **kwargs): """ ReplaceTargetRE(self, String text) -> int Replace the target text with the argument text after \d processing. Text is counted so it can contain NULs. Looks for \d where d is between 1 and 9 and replaces these with the strings matched in the last search operation which were surrounded by \( and \). Returns the length of the replacement text including any change caused by processing the \d patterns. """ return _stc.StyledTextCtrl_ReplaceTargetRE(*args, **kwargs) def SearchInTarget(*args, **kwargs): """ SearchInTarget(self, String text) -> int Search for a counted string in the target and set the target to the found range. Text is counted so it can contain NULs. Returns length of range or -1 for failure in which case target is not moved. """ return _stc.StyledTextCtrl_SearchInTarget(*args, **kwargs) def SetSearchFlags(*args, **kwargs): """ SetSearchFlags(self, int flags) Set the search flags used by SearchInTarget. """ return _stc.StyledTextCtrl_SetSearchFlags(*args, **kwargs) def GetSearchFlags(*args, **kwargs): """ GetSearchFlags(self) -> int Get the search flags used by SearchInTarget. """ return _stc.StyledTextCtrl_GetSearchFlags(*args, **kwargs) def CallTipShow(*args, **kwargs): """ CallTipShow(self, int pos, String definition) Show a call tip containing a definition near position pos. """ return _stc.StyledTextCtrl_CallTipShow(*args, **kwargs) def CallTipCancel(*args, **kwargs): """ CallTipCancel(self) Remove the call tip from the screen. """ return _stc.StyledTextCtrl_CallTipCancel(*args, **kwargs) def CallTipActive(*args, **kwargs): """ CallTipActive(self) -> bool Is there an active call tip? """ return _stc.StyledTextCtrl_CallTipActive(*args, **kwargs) def CallTipPosAtStart(*args, **kwargs): """ CallTipPosAtStart(self) -> int Retrieve the position where the caret was before displaying the call tip. """ return _stc.StyledTextCtrl_CallTipPosAtStart(*args, **kwargs) def CallTipSetHighlight(*args, **kwargs): """ CallTipSetHighlight(self, int start, int end) Highlight a segment of the definition. """ return _stc.StyledTextCtrl_CallTipSetHighlight(*args, **kwargs) def CallTipSetBackground(*args, **kwargs): """ CallTipSetBackground(self, Colour back) Set the background colour for the call tip. """ return _stc.StyledTextCtrl_CallTipSetBackground(*args, **kwargs) def CallTipSetForeground(*args, **kwargs): """ CallTipSetForeground(self, Colour fore) Set the foreground colour for the call tip. """ return _stc.StyledTextCtrl_CallTipSetForeground(*args, **kwargs) def CallTipSetForegroundHighlight(*args, **kwargs): """ CallTipSetForegroundHighlight(self, Colour fore) Set the foreground colour for the highlighted part of the call tip. """ return _stc.StyledTextCtrl_CallTipSetForegroundHighlight(*args, **kwargs) def CallTipUseStyle(*args, **kwargs): """ CallTipUseStyle(self, int tabSize) Enable use of STYLE_CALLTIP and set call tip tab size in pixels. """ return _stc.StyledTextCtrl_CallTipUseStyle(*args, **kwargs) def CallTipSetPosition(*args, **kwargs): """CallTipSetPosition(self, bool above)""" return _stc.StyledTextCtrl_CallTipSetPosition(*args, **kwargs) def VisibleFromDocLine(*args, **kwargs): """ VisibleFromDocLine(self, int line) -> int Find the display line of a document line taking hidden lines into account. """ return _stc.StyledTextCtrl_VisibleFromDocLine(*args, **kwargs) def DocLineFromVisible(*args, **kwargs): """ DocLineFromVisible(self, int lineDisplay) -> int Find the document line of a display line taking hidden lines into account. """ return _stc.StyledTextCtrl_DocLineFromVisible(*args, **kwargs) def WrapCount(*args, **kwargs): """ WrapCount(self, int line) -> int The number of display lines needed to wrap a document line """ return _stc.StyledTextCtrl_WrapCount(*args, **kwargs) def SetFoldLevel(*args, **kwargs): """ SetFoldLevel(self, int line, int level) Set the fold level of a line. This encodes an integer level along with flags indicating whether the line is a header and whether it is effectively white space. """ return _stc.StyledTextCtrl_SetFoldLevel(*args, **kwargs) def GetFoldLevel(*args, **kwargs): """ GetFoldLevel(self, int line) -> int Retrieve the fold level of a line. """ return _stc.StyledTextCtrl_GetFoldLevel(*args, **kwargs) def GetLastChild(*args, **kwargs): """ GetLastChild(self, int line, int level) -> int Find the last child line of a header line. """ return _stc.StyledTextCtrl_GetLastChild(*args, **kwargs) def GetFoldParent(*args, **kwargs): """ GetFoldParent(self, int line) -> int Find the parent line of a child line. """ return _stc.StyledTextCtrl_GetFoldParent(*args, **kwargs) def ShowLines(*args, **kwargs): """ ShowLines(self, int lineStart, int lineEnd) Make a range of lines visible. """ return _stc.StyledTextCtrl_ShowLines(*args, **kwargs) def HideLines(*args, **kwargs): """ HideLines(self, int lineStart, int lineEnd) Make a range of lines invisible. """ return _stc.StyledTextCtrl_HideLines(*args, **kwargs) def GetLineVisible(*args, **kwargs): """ GetLineVisible(self, int line) -> bool Is a line visible? """ return _stc.StyledTextCtrl_GetLineVisible(*args, **kwargs) def GetAllLinesVisible(*args, **kwargs): """GetAllLinesVisible(self) -> bool""" return _stc.StyledTextCtrl_GetAllLinesVisible(*args, **kwargs) def SetFoldExpanded(*args, **kwargs): """ SetFoldExpanded(self, int line, bool expanded) Show the children of a header line. """ return _stc.StyledTextCtrl_SetFoldExpanded(*args, **kwargs) def GetFoldExpanded(*args, **kwargs): """ GetFoldExpanded(self, int line) -> bool Is a header line expanded? """ return _stc.StyledTextCtrl_GetFoldExpanded(*args, **kwargs) def ToggleFold(*args, **kwargs): """ ToggleFold(self, int line) Switch a header line between expanded and contracted. """ return _stc.StyledTextCtrl_ToggleFold(*args, **kwargs) def EnsureVisible(*args, **kwargs): """ EnsureVisible(self, int line) Ensure a particular line is visible by expanding any header line hiding it. """ return _stc.StyledTextCtrl_EnsureVisible(*args, **kwargs) def SetFoldFlags(*args, **kwargs): """ SetFoldFlags(self, int flags) Set some style options for folding. """ return _stc.StyledTextCtrl_SetFoldFlags(*args, **kwargs) def EnsureVisibleEnforcePolicy(*args, **kwargs): """ EnsureVisibleEnforcePolicy(self, int line) Ensure a particular line is visible by expanding any header line hiding it. Use the currently set visibility policy to determine which range to display. """ return _stc.StyledTextCtrl_EnsureVisibleEnforcePolicy(*args, **kwargs) def SetTabIndents(*args, **kwargs): """ SetTabIndents(self, bool tabIndents) Sets whether a tab pressed when caret is within indentation indents. """ return _stc.StyledTextCtrl_SetTabIndents(*args, **kwargs) def GetTabIndents(*args, **kwargs): """ GetTabIndents(self) -> bool Does a tab pressed when caret is within indentation indent? """ return _stc.StyledTextCtrl_GetTabIndents(*args, **kwargs) def SetBackSpaceUnIndents(*args, **kwargs): """ SetBackSpaceUnIndents(self, bool bsUnIndents) Sets whether a backspace pressed when caret is within indentation unindents. """ return _stc.StyledTextCtrl_SetBackSpaceUnIndents(*args, **kwargs) def GetBackSpaceUnIndents(*args, **kwargs): """ GetBackSpaceUnIndents(self) -> bool Does a backspace pressed when caret is within indentation unindent? """ return _stc.StyledTextCtrl_GetBackSpaceUnIndents(*args, **kwargs) def SetMouseDwellTime(*args, **kwargs): """ SetMouseDwellTime(self, int periodMilliseconds) Sets the time the mouse must sit still to generate a mouse dwell event. """ return _stc.StyledTextCtrl_SetMouseDwellTime(*args, **kwargs) def GetMouseDwellTime(*args, **kwargs): """ GetMouseDwellTime(self) -> int Retrieve the time the mouse must sit still to generate a mouse dwell event. """ return _stc.StyledTextCtrl_GetMouseDwellTime(*args, **kwargs) def WordStartPosition(*args, **kwargs): """ WordStartPosition(self, int pos, bool onlyWordCharacters) -> int Get position of start of word. """ return _stc.StyledTextCtrl_WordStartPosition(*args, **kwargs) def WordEndPosition(*args, **kwargs): """ WordEndPosition(self, int pos, bool onlyWordCharacters) -> int Get position of end of word. """ return _stc.StyledTextCtrl_WordEndPosition(*args, **kwargs) def SetWrapMode(*args, **kwargs): """ SetWrapMode(self, int mode) Sets whether text is word wrapped. """ return _stc.StyledTextCtrl_SetWrapMode(*args, **kwargs) def GetWrapMode(*args, **kwargs): """ GetWrapMode(self) -> int Retrieve whether text is word wrapped. """ return _stc.StyledTextCtrl_GetWrapMode(*args, **kwargs) def SetWrapVisualFlags(*args, **kwargs): """ SetWrapVisualFlags(self, int wrapVisualFlags) Set the display mode of visual flags for wrapped lines. """ return _stc.StyledTextCtrl_SetWrapVisualFlags(*args, **kwargs) def GetWrapVisualFlags(*args, **kwargs): """ GetWrapVisualFlags(self) -> int Retrive the display mode of visual flags for wrapped lines. """ return _stc.StyledTextCtrl_GetWrapVisualFlags(*args, **kwargs) def SetWrapVisualFlagsLocation(*args, **kwargs): """ SetWrapVisualFlagsLocation(self, int wrapVisualFlagsLocation) Set the location of visual flags for wrapped lines. """ return _stc.StyledTextCtrl_SetWrapVisualFlagsLocation(*args, **kwargs) def GetWrapVisualFlagsLocation(*args, **kwargs): """ GetWrapVisualFlagsLocation(self) -> int Retrive the location of visual flags for wrapped lines. """ return _stc.StyledTextCtrl_GetWrapVisualFlagsLocation(*args, **kwargs) def SetWrapStartIndent(*args, **kwargs): """ SetWrapStartIndent(self, int indent) Set the start indent for wrapped lines. """ return _stc.StyledTextCtrl_SetWrapStartIndent(*args, **kwargs) def GetWrapStartIndent(*args, **kwargs): """ GetWrapStartIndent(self) -> int Retrive the start indent for wrapped lines. """ return _stc.StyledTextCtrl_GetWrapStartIndent(*args, **kwargs) def SetWrapIndentMode(*args, **kwargs): """ SetWrapIndentMode(self, int mode) Sets how wrapped sublines are placed. Default is fixed. """ return _stc.StyledTextCtrl_SetWrapIndentMode(*args, **kwargs) def GetWrapIndentMode(*args, **kwargs): """ GetWrapIndentMode(self) -> int Retrieve how wrapped sublines are placed. Default is fixed. """ return _stc.StyledTextCtrl_GetWrapIndentMode(*args, **kwargs) def SetLayoutCache(*args, **kwargs): """ SetLayoutCache(self, int mode) Sets the degree of caching of layout information. """ return _stc.StyledTextCtrl_SetLayoutCache(*args, **kwargs) def GetLayoutCache(*args, **kwargs): """ GetLayoutCache(self) -> int Retrieve the degree of caching of layout information. """ return _stc.StyledTextCtrl_GetLayoutCache(*args, **kwargs) def SetScrollWidth(*args, **kwargs): """ SetScrollWidth(self, int pixelWidth) Sets the document width assumed for scrolling. """ return _stc.StyledTextCtrl_SetScrollWidth(*args, **kwargs) def GetScrollWidth(*args, **kwargs): """ GetScrollWidth(self) -> int Retrieve the document width assumed for scrolling. """ return _stc.StyledTextCtrl_GetScrollWidth(*args, **kwargs) def SetScrollWidthTracking(*args, **kwargs): """ SetScrollWidthTracking(self, bool tracking) Sets whether the maximum width line displayed is used to set scroll width. """ return _stc.StyledTextCtrl_SetScrollWidthTracking(*args, **kwargs) def GetScrollWidthTracking(*args, **kwargs): """ GetScrollWidthTracking(self) -> bool Retrieve whether the scroll width tracks wide lines. """ return _stc.StyledTextCtrl_GetScrollWidthTracking(*args, **kwargs) def TextWidth(*args, **kwargs): """ TextWidth(self, int style, String text) -> int Measure the pixel width of some text in a particular style. NUL terminated text argument. Does not handle tab or control characters. """ return _stc.StyledTextCtrl_TextWidth(*args, **kwargs) def SetEndAtLastLine(*args, **kwargs): """ SetEndAtLastLine(self, bool endAtLastLine) Sets the scroll range so that maximum scroll position has the last line at the bottom of the view (default). Setting this to false allows scrolling one page below the last line. """ return _stc.StyledTextCtrl_SetEndAtLastLine(*args, **kwargs) def GetEndAtLastLine(*args, **kwargs): """ GetEndAtLastLine(self) -> bool Retrieve whether the maximum scroll position has the last line at the bottom of the view. """ return _stc.StyledTextCtrl_GetEndAtLastLine(*args, **kwargs) def TextHeight(*args, **kwargs): """ TextHeight(self, int line) -> int Retrieve the height of a particular line of text in pixels. """ return _stc.StyledTextCtrl_TextHeight(*args, **kwargs) def SetUseVerticalScrollBar(*args, **kwargs): """ SetUseVerticalScrollBar(self, bool show) Show or hide the vertical scroll bar. """ return _stc.StyledTextCtrl_SetUseVerticalScrollBar(*args, **kwargs) def GetUseVerticalScrollBar(*args, **kwargs): """ GetUseVerticalScrollBar(self) -> bool Is the vertical scroll bar visible? """ return _stc.StyledTextCtrl_GetUseVerticalScrollBar(*args, **kwargs) def GetTwoPhaseDraw(*args, **kwargs): """ GetTwoPhaseDraw(self) -> bool Is drawing done in two phases with backgrounds drawn before foregrounds? """ return _stc.StyledTextCtrl_GetTwoPhaseDraw(*args, **kwargs) def SetTwoPhaseDraw(*args, **kwargs): """ SetTwoPhaseDraw(self, bool twoPhase) In twoPhaseDraw mode, drawing is performed in two phases, first the background and then the foreground. This avoids chopping off characters that overlap the next run. """ return _stc.StyledTextCtrl_SetTwoPhaseDraw(*args, **kwargs) def SetFirstVisibleLine(*args, **kwargs): """ SetFirstVisibleLine(self, int lineDisplay) Scroll so that a display line is at the top of the display. """ return _stc.StyledTextCtrl_SetFirstVisibleLine(*args, **kwargs) def SetMultiPaste(*args, **kwargs): """SetMultiPaste(self, int multiPaste)""" return _stc.StyledTextCtrl_SetMultiPaste(*args, **kwargs) def GetMultiPaste(*args, **kwargs): """GetMultiPaste(self) -> int""" return _stc.StyledTextCtrl_GetMultiPaste(*args, **kwargs) def GetTag(*args, **kwargs): """GetTag(self, int tagNumber) -> String""" return _stc.StyledTextCtrl_GetTag(*args, **kwargs) def TargetFromSelection(*args, **kwargs): """ TargetFromSelection(self) Make the target range start and end be the same as the selection range start and end. """ return _stc.StyledTextCtrl_TargetFromSelection(*args, **kwargs) def LinesJoin(*args, **kwargs): """ LinesJoin(self) Join the lines in the target. """ return _stc.StyledTextCtrl_LinesJoin(*args, **kwargs) def LinesSplit(*args, **kwargs): """ LinesSplit(self, int pixelWidth) Split the lines in the target into lines that are less wide than pixelWidth where possible. """ return _stc.StyledTextCtrl_LinesSplit(*args, **kwargs) def SetFoldMarginColour(*args, **kwargs): """ SetFoldMarginColour(self, bool useSetting, Colour back) Set the colours used as a chequerboard pattern in the fold margin """ return _stc.StyledTextCtrl_SetFoldMarginColour(*args, **kwargs) def SetFoldMarginHiColour(*args, **kwargs): """SetFoldMarginHiColour(self, bool useSetting, Colour fore)""" return _stc.StyledTextCtrl_SetFoldMarginHiColour(*args, **kwargs) def LineDown(*args, **kwargs): """ LineDown(self) Move caret down one line. """ return _stc.StyledTextCtrl_LineDown(*args, **kwargs) def LineDownExtend(*args, **kwargs): """ LineDownExtend(self) Move caret down one line extending selection to new caret position. """ return _stc.StyledTextCtrl_LineDownExtend(*args, **kwargs) def LineUp(*args, **kwargs): """ LineUp(self) Move caret up one line. """ return _stc.StyledTextCtrl_LineUp(*args, **kwargs) def LineUpExtend(*args, **kwargs): """ LineUpExtend(self) Move caret up one line extending selection to new caret position. """ return _stc.StyledTextCtrl_LineUpExtend(*args, **kwargs) def CharLeft(*args, **kwargs): """ CharLeft(self) Move caret left one character. """ return _stc.StyledTextCtrl_CharLeft(*args, **kwargs) def CharLeftExtend(*args, **kwargs): """ CharLeftExtend(self) Move caret left one character extending selection to new caret position. """ return _stc.StyledTextCtrl_CharLeftExtend(*args, **kwargs) def CharRight(*args, **kwargs): """ CharRight(self) Move caret right one character. """ return _stc.StyledTextCtrl_CharRight(*args, **kwargs) def CharRightExtend(*args, **kwargs): """ CharRightExtend(self) Move caret right one character extending selection to new caret position. """ return _stc.StyledTextCtrl_CharRightExtend(*args, **kwargs) def WordLeft(*args, **kwargs): """ WordLeft(self) Move caret left one word. """ return _stc.StyledTextCtrl_WordLeft(*args, **kwargs) def WordLeftExtend(*args, **kwargs): """ WordLeftExtend(self) Move caret left one word extending selection to new caret position. """ return _stc.StyledTextCtrl_WordLeftExtend(*args, **kwargs) def WordRight(*args, **kwargs): """ WordRight(self) Move caret right one word. """ return _stc.StyledTextCtrl_WordRight(*args, **kwargs) def WordRightExtend(*args, **kwargs): """ WordRightExtend(self) Move caret right one word extending selection to new caret position. """ return _stc.StyledTextCtrl_WordRightExtend(*args, **kwargs) def Home(*args, **kwargs): """ Home(self) Move caret to first position on line. """ return _stc.StyledTextCtrl_Home(*args, **kwargs) def HomeExtend(*args, **kwargs): """ HomeExtend(self) Move caret to first position on line extending selection to new caret position. """ return _stc.StyledTextCtrl_HomeExtend(*args, **kwargs) def LineEnd(*args, **kwargs): """ LineEnd(self) Move caret to last position on line. """ return _stc.StyledTextCtrl_LineEnd(*args, **kwargs) def LineEndExtend(*args, **kwargs): """ LineEndExtend(self) Move caret to last position on line extending selection to new caret position. """ return _stc.StyledTextCtrl_LineEndExtend(*args, **kwargs) def DocumentStart(*args, **kwargs): """ DocumentStart(self) Move caret to first position in document. """ return _stc.StyledTextCtrl_DocumentStart(*args, **kwargs) def DocumentStartExtend(*args, **kwargs): """ DocumentStartExtend(self) Move caret to first position in document extending selection to new caret position. """ return _stc.StyledTextCtrl_DocumentStartExtend(*args, **kwargs) def DocumentEnd(*args, **kwargs): """ DocumentEnd(self) Move caret to last position in document. """ return _stc.StyledTextCtrl_DocumentEnd(*args, **kwargs) def DocumentEndExtend(*args, **kwargs): """ DocumentEndExtend(self) Move caret to last position in document extending selection to new caret position. """ return _stc.StyledTextCtrl_DocumentEndExtend(*args, **kwargs) def PageUp(*args, **kwargs): """ PageUp(self) Move caret one page up. """ return _stc.StyledTextCtrl_PageUp(*args, **kwargs) def PageUpExtend(*args, **kwargs): """ PageUpExtend(self) Move caret one page up extending selection to new caret position. """ return _stc.StyledTextCtrl_PageUpExtend(*args, **kwargs) def PageDown(*args, **kwargs): """ PageDown(self) Move caret one page down. """ return _stc.StyledTextCtrl_PageDown(*args, **kwargs) def PageDownExtend(*args, **kwargs): """ PageDownExtend(self) Move caret one page down extending selection to new caret position. """ return _stc.StyledTextCtrl_PageDownExtend(*args, **kwargs) def EditToggleOvertype(*args, **kwargs): """ EditToggleOvertype(self) Switch from insert to overtype mode or the reverse. """ return _stc.StyledTextCtrl_EditToggleOvertype(*args, **kwargs) def Cancel(*args, **kwargs): """ Cancel(self) Cancel any modes such as call tip or auto-completion list display. """ return _stc.StyledTextCtrl_Cancel(*args, **kwargs) def DeleteBack(*args, **kwargs): """ DeleteBack(self) Delete the selection or if no selection, the character before the caret. """ return _stc.StyledTextCtrl_DeleteBack(*args, **kwargs) def Tab(*args, **kwargs): """ Tab(self) If selection is empty or all on one line replace the selection with a tab character. If more than one line selected, indent the lines. """ return _stc.StyledTextCtrl_Tab(*args, **kwargs) def BackTab(*args, **kwargs): """ BackTab(self) Dedent the selected lines. """ return _stc.StyledTextCtrl_BackTab(*args, **kwargs) def NewLine(*args, **kwargs): """ NewLine(self) Insert a new line, may use a CRLF, CR or LF depending on EOL mode. """ return _stc.StyledTextCtrl_NewLine(*args, **kwargs) def FormFeed(*args, **kwargs): """ FormFeed(self) Insert a Form Feed character. """ return _stc.StyledTextCtrl_FormFeed(*args, **kwargs) def VCHome(*args, **kwargs): """ VCHome(self) Move caret to before first visible character on line. If already there move to first character on line. """ return _stc.StyledTextCtrl_VCHome(*args, **kwargs) def VCHomeExtend(*args, **kwargs): """ VCHomeExtend(self) Like VCHome but extending selection to new caret position. """ return _stc.StyledTextCtrl_VCHomeExtend(*args, **kwargs) def ZoomIn(*args, **kwargs): """ ZoomIn(self) Magnify the displayed text by increasing the sizes by 1 point. """ return _stc.StyledTextCtrl_ZoomIn(*args, **kwargs) def ZoomOut(*args, **kwargs): """ ZoomOut(self) Make the displayed text smaller by decreasing the sizes by 1 point. """ return _stc.StyledTextCtrl_ZoomOut(*args, **kwargs) def DelWordLeft(*args, **kwargs): """ DelWordLeft(self) Delete the word to the left of the caret. """ return _stc.StyledTextCtrl_DelWordLeft(*args, **kwargs) def DelWordRight(*args, **kwargs): """ DelWordRight(self) Delete the word to the right of the caret. """ return _stc.StyledTextCtrl_DelWordRight(*args, **kwargs) def DelWordRightEnd(*args, **kwargs): """ DelWordRightEnd(self) Delete the word to the right of the caret, but not the trailing non-word characters. """ return _stc.StyledTextCtrl_DelWordRightEnd(*args, **kwargs) def LineCut(*args, **kwargs): """ LineCut(self) Cut the line containing the caret. """ return _stc.StyledTextCtrl_LineCut(*args, **kwargs) def LineDelete(*args, **kwargs): """ LineDelete(self) Delete the line containing the caret. """ return _stc.StyledTextCtrl_LineDelete(*args, **kwargs) def LineTranspose(*args, **kwargs): """ LineTranspose(self) Switch the current line with the previous. """ return _stc.StyledTextCtrl_LineTranspose(*args, **kwargs) def LineDuplicate(*args, **kwargs): """ LineDuplicate(self) Duplicate the current line. """ return _stc.StyledTextCtrl_LineDuplicate(*args, **kwargs) def LowerCase(*args, **kwargs): """ LowerCase(self) Transform the selection to lower case. """ return _stc.StyledTextCtrl_LowerCase(*args, **kwargs) def UpperCase(*args, **kwargs): """ UpperCase(self) Transform the selection to upper case. """ return _stc.StyledTextCtrl_UpperCase(*args, **kwargs) def LineScrollDown(*args, **kwargs): """ LineScrollDown(self) Scroll the document down, keeping the caret visible. """ return _stc.StyledTextCtrl_LineScrollDown(*args, **kwargs) def LineScrollUp(*args, **kwargs): """ LineScrollUp(self) Scroll the document up, keeping the caret visible. """ return _stc.StyledTextCtrl_LineScrollUp(*args, **kwargs) def DeleteBackNotLine(*args, **kwargs): """ DeleteBackNotLine(self) Delete the selection or if no selection, the character before the caret. Will not delete the character before at the start of a line. """ return _stc.StyledTextCtrl_DeleteBackNotLine(*args, **kwargs) def HomeDisplay(*args, **kwargs): """ HomeDisplay(self) Move caret to first position on display line. """ return _stc.StyledTextCtrl_HomeDisplay(*args, **kwargs) def HomeDisplayExtend(*args, **kwargs): """ HomeDisplayExtend(self) Move caret to first position on display line extending selection to new caret position. """ return _stc.StyledTextCtrl_HomeDisplayExtend(*args, **kwargs) def LineEndDisplay(*args, **kwargs): """ LineEndDisplay(self) Move caret to last position on display line. """ return _stc.StyledTextCtrl_LineEndDisplay(*args, **kwargs) def LineEndDisplayExtend(*args, **kwargs): """ LineEndDisplayExtend(self) Move caret to last position on display line extending selection to new caret position. """ return _stc.StyledTextCtrl_LineEndDisplayExtend(*args, **kwargs) def HomeWrap(*args, **kwargs): """ HomeWrap(self) These are like their namesakes Home(Extend)?, LineEnd(Extend)?, VCHome(Extend)? except they behave differently when word-wrap is enabled: They go first to the start / end of the display line, like (Home|LineEnd)Display The difference is that, the cursor is already at the point, it goes on to the start or end of the document line, as appropriate for (Home|LineEnd|VCHome)(Extend)?. """ return _stc.StyledTextCtrl_HomeWrap(*args, **kwargs) def HomeWrapExtend(*args, **kwargs): """HomeWrapExtend(self)""" return _stc.StyledTextCtrl_HomeWrapExtend(*args, **kwargs) def LineEndWrap(*args, **kwargs): """LineEndWrap(self)""" return _stc.StyledTextCtrl_LineEndWrap(*args, **kwargs) def LineEndWrapExtend(*args, **kwargs): """LineEndWrapExtend(self)""" return _stc.StyledTextCtrl_LineEndWrapExtend(*args, **kwargs) def VCHomeWrap(*args, **kwargs): """VCHomeWrap(self)""" return _stc.StyledTextCtrl_VCHomeWrap(*args, **kwargs) def VCHomeWrapExtend(*args, **kwargs): """VCHomeWrapExtend(self)""" return _stc.StyledTextCtrl_VCHomeWrapExtend(*args, **kwargs) def LineCopy(*args, **kwargs): """ LineCopy(self) Copy the line containing the caret. """ return _stc.StyledTextCtrl_LineCopy(*args, **kwargs) def MoveCaretInsideView(*args, **kwargs): """ MoveCaretInsideView(self) Move the caret inside current view if it's not there already. """ return _stc.StyledTextCtrl_MoveCaretInsideView(*args, **kwargs) def LineLength(*args, **kwargs): """ LineLength(self, int line) -> int How many characters are on a line, including end of line characters? """ return _stc.StyledTextCtrl_LineLength(*args, **kwargs) def BraceHighlight(*args, **kwargs): """ BraceHighlight(self, int pos1, int pos2) Highlight the characters at two positions. """ return _stc.StyledTextCtrl_BraceHighlight(*args, **kwargs) def BraceHighlightIndicator(*args, **kwargs): """BraceHighlightIndicator(self, bool useBraceHighlightIndicator, int indicator)""" return _stc.StyledTextCtrl_BraceHighlightIndicator(*args, **kwargs) def BraceBadLight(*args, **kwargs): """ BraceBadLight(self, int pos) Highlight the character at a position indicating there is no matching brace. """ return _stc.StyledTextCtrl_BraceBadLight(*args, **kwargs) def BraceBadLightIndicator(*args, **kwargs): """BraceBadLightIndicator(self, bool useBraceBadLightIndicator, int indicator)""" return _stc.StyledTextCtrl_BraceBadLightIndicator(*args, **kwargs) def BraceMatch(*args, **kwargs): """ BraceMatch(self, int pos) -> int Find the position of a matching brace or INVALID_POSITION if no match. """ return _stc.StyledTextCtrl_BraceMatch(*args, **kwargs) def GetViewEOL(*args, **kwargs): """ GetViewEOL(self) -> bool Are the end of line characters visible? """ return _stc.StyledTextCtrl_GetViewEOL(*args, **kwargs) def SetViewEOL(*args, **kwargs): """ SetViewEOL(self, bool visible) Make the end of line characters visible or invisible. """ return _stc.StyledTextCtrl_SetViewEOL(*args, **kwargs) def GetDocPointer(*args, **kwargs): """ GetDocPointer(self) -> void Retrieve a pointer to the document object. """ return _stc.StyledTextCtrl_GetDocPointer(*args, **kwargs) def SetDocPointer(*args, **kwargs): """ SetDocPointer(self, void docPointer) Change the document object used. """ return _stc.StyledTextCtrl_SetDocPointer(*args, **kwargs) def SetModEventMask(*args, **kwargs): """ SetModEventMask(self, int mask) Set which document modification events are sent to the container. """ return _stc.StyledTextCtrl_SetModEventMask(*args, **kwargs) def GetEdgeColumn(*args, **kwargs): """ GetEdgeColumn(self) -> int Retrieve the column number which text should be kept within. """ return _stc.StyledTextCtrl_GetEdgeColumn(*args, **kwargs) def SetEdgeColumn(*args, **kwargs): """ SetEdgeColumn(self, int column) Set the column number of the edge. If text goes past the edge then it is highlighted. """ return _stc.StyledTextCtrl_SetEdgeColumn(*args, **kwargs) def GetEdgeMode(*args, **kwargs): """ GetEdgeMode(self) -> int Retrieve the edge highlight mode. """ return _stc.StyledTextCtrl_GetEdgeMode(*args, **kwargs) def SetEdgeMode(*args, **kwargs): """ SetEdgeMode(self, int mode) The edge may be displayed by a line (EDGE_LINE) or by highlighting text that goes beyond it (EDGE_BACKGROUND) or not displayed at all (EDGE_NONE). """ return _stc.StyledTextCtrl_SetEdgeMode(*args, **kwargs) def GetEdgeColour(*args, **kwargs): """ GetEdgeColour(self) -> Colour Retrieve the colour used in edge indication. """ return _stc.StyledTextCtrl_GetEdgeColour(*args, **kwargs) def SetEdgeColour(*args, **kwargs): """ SetEdgeColour(self, Colour edgeColour) Change the colour used in edge indication. """ return _stc.StyledTextCtrl_SetEdgeColour(*args, **kwargs) def SearchAnchor(*args, **kwargs): """ SearchAnchor(self) Sets the current caret position to be the search anchor. """ return _stc.StyledTextCtrl_SearchAnchor(*args, **kwargs) def SearchNext(*args, **kwargs): """ SearchNext(self, int flags, String text) -> int Find some text starting at the search anchor. Does not ensure the selection is visible. """ return _stc.StyledTextCtrl_SearchNext(*args, **kwargs) def SearchPrev(*args, **kwargs): """ SearchPrev(self, int flags, String text) -> int Find some text starting at the search anchor and moving backwards. Does not ensure the selection is visible. """ return _stc.StyledTextCtrl_SearchPrev(*args, **kwargs) def LinesOnScreen(*args, **kwargs): """ LinesOnScreen(self) -> int Retrieves the number of lines completely visible. """ return _stc.StyledTextCtrl_LinesOnScreen(*args, **kwargs) def UsePopUp(*args, **kwargs): """ UsePopUp(self, bool allowPopUp) Set whether a pop up menu is displayed automatically when the user presses the wrong mouse button. """ return _stc.StyledTextCtrl_UsePopUp(*args, **kwargs) def SelectionIsRectangle(*args, **kwargs): """ SelectionIsRectangle(self) -> bool Is the selection rectangular? The alternative is the more common stream selection. """ return _stc.StyledTextCtrl_SelectionIsRectangle(*args, **kwargs) def SetZoom(*args, **kwargs): """ SetZoom(self, int zoom) Set the zoom level. This number of points is added to the size of all fonts. It may be positive to magnify or negative to reduce. """ return _stc.StyledTextCtrl_SetZoom(*args, **kwargs) def GetZoom(*args, **kwargs): """ GetZoom(self) -> int Retrieve the zoom level. """ return _stc.StyledTextCtrl_GetZoom(*args, **kwargs) def CreateDocument(*args, **kwargs): """ CreateDocument(self) -> void Create a new document object. Starts with reference count of 1 and not selected into editor. """ return _stc.StyledTextCtrl_CreateDocument(*args, **kwargs) def AddRefDocument(*args, **kwargs): """ AddRefDocument(self, void docPointer) Extend life of document. """ return _stc.StyledTextCtrl_AddRefDocument(*args, **kwargs) def ReleaseDocument(*args, **kwargs): """ ReleaseDocument(self, void docPointer) Release a reference to the document, deleting document if it fades to black. """ return _stc.StyledTextCtrl_ReleaseDocument(*args, **kwargs) def GetModEventMask(*args, **kwargs): """ GetModEventMask(self) -> int Get which document modification events are sent to the container. """ return _stc.StyledTextCtrl_GetModEventMask(*args, **kwargs) def SetSTCFocus(*args, **kwargs): """ SetSTCFocus(self, bool focus) Change internal focus flag. """ return _stc.StyledTextCtrl_SetSTCFocus(*args, **kwargs) def GetSTCFocus(*args, **kwargs): """ GetSTCFocus(self) -> bool Get internal focus flag. """ return _stc.StyledTextCtrl_GetSTCFocus(*args, **kwargs) def SetStatus(*args, **kwargs): """ SetStatus(self, int statusCode) Change error status - 0 = OK. """ return _stc.StyledTextCtrl_SetStatus(*args, **kwargs) def GetStatus(*args, **kwargs): """ GetStatus(self) -> int Get error status. """ return _stc.StyledTextCtrl_GetStatus(*args, **kwargs) def SetMouseDownCaptures(*args, **kwargs): """ SetMouseDownCaptures(self, bool captures) Set whether the mouse is captured when its button is pressed. """ return _stc.StyledTextCtrl_SetMouseDownCaptures(*args, **kwargs) def GetMouseDownCaptures(*args, **kwargs): """ GetMouseDownCaptures(self) -> bool Get whether mouse gets captured. """ return _stc.StyledTextCtrl_GetMouseDownCaptures(*args, **kwargs) def SetSTCCursor(*args, **kwargs): """ SetSTCCursor(self, int cursorType) Sets the cursor to one of the SC_CURSOR* values. """ return _stc.StyledTextCtrl_SetSTCCursor(*args, **kwargs) def GetSTCCursor(*args, **kwargs): """ GetSTCCursor(self) -> int Get cursor type. """ return _stc.StyledTextCtrl_GetSTCCursor(*args, **kwargs) def SetControlCharSymbol(*args, **kwargs): """ SetControlCharSymbol(self, int symbol) Change the way control characters are displayed: If symbol is < 32, keep the drawn way, else, use the given character. """ return _stc.StyledTextCtrl_SetControlCharSymbol(*args, **kwargs) def GetControlCharSymbol(*args, **kwargs): """ GetControlCharSymbol(self) -> int Get the way control characters are displayed. """ return _stc.StyledTextCtrl_GetControlCharSymbol(*args, **kwargs) def WordPartLeft(*args, **kwargs): """ WordPartLeft(self) Move to the previous change in capitalisation. """ return _stc.StyledTextCtrl_WordPartLeft(*args, **kwargs) def WordPartLeftExtend(*args, **kwargs): """ WordPartLeftExtend(self) Move to the previous change in capitalisation extending selection to new caret position. """ return _stc.StyledTextCtrl_WordPartLeftExtend(*args, **kwargs) def WordPartRight(*args, **kwargs): """ WordPartRight(self) Move to the change next in capitalisation. """ return _stc.StyledTextCtrl_WordPartRight(*args, **kwargs) def WordPartRightExtend(*args, **kwargs): """ WordPartRightExtend(self) Move to the next change in capitalisation extending selection to new caret position. """ return _stc.StyledTextCtrl_WordPartRightExtend(*args, **kwargs) def SetVisiblePolicy(*args, **kwargs): """ SetVisiblePolicy(self, int visiblePolicy, int visibleSlop) Set the way the display area is determined when a particular line is to be moved to by Find, FindNext, GotoLine, etc. """ return _stc.StyledTextCtrl_SetVisiblePolicy(*args, **kwargs) def DelLineLeft(*args, **kwargs): """ DelLineLeft(self) Delete back from the current position to the start of the line. """ return _stc.StyledTextCtrl_DelLineLeft(*args, **kwargs) def DelLineRight(*args, **kwargs): """ DelLineRight(self) Delete forwards from the current position to the end of the line. """ return _stc.StyledTextCtrl_DelLineRight(*args, **kwargs) def SetXOffset(*args, **kwargs): """ SetXOffset(self, int newOffset) Get and Set the xOffset (ie, horizonal scroll position). """ return _stc.StyledTextCtrl_SetXOffset(*args, **kwargs) def GetXOffset(*args, **kwargs): """GetXOffset(self) -> int""" return _stc.StyledTextCtrl_GetXOffset(*args, **kwargs) def ChooseCaretX(*args, **kwargs): """ ChooseCaretX(self) Set the last x chosen value to be the caret x position. """ return _stc.StyledTextCtrl_ChooseCaretX(*args, **kwargs) def SetXCaretPolicy(*args, **kwargs): """ SetXCaretPolicy(self, int caretPolicy, int caretSlop) Set the way the caret is kept visible when going sideway. The exclusion zone is given in pixels. """ return _stc.StyledTextCtrl_SetXCaretPolicy(*args, **kwargs) def SetYCaretPolicy(*args, **kwargs): """ SetYCaretPolicy(self, int caretPolicy, int caretSlop) Set the way the line the caret is on is kept visible. The exclusion zone is given in lines. """ return _stc.StyledTextCtrl_SetYCaretPolicy(*args, **kwargs) def SetPrintWrapMode(*args, **kwargs): """ SetPrintWrapMode(self, int mode) Set printing to line wrapped (SC_WRAP_WORD) or not line wrapped (SC_WRAP_NONE). """ return _stc.StyledTextCtrl_SetPrintWrapMode(*args, **kwargs) def GetPrintWrapMode(*args, **kwargs): """ GetPrintWrapMode(self) -> int Is printing line wrapped? """ return _stc.StyledTextCtrl_GetPrintWrapMode(*args, **kwargs) def SetHotspotActiveForeground(*args, **kwargs): """ SetHotspotActiveForeground(self, bool useSetting, Colour fore) Set a fore colour for active hotspots. """ return _stc.StyledTextCtrl_SetHotspotActiveForeground(*args, **kwargs) def GetHotspotActiveForeground(*args, **kwargs): """ GetHotspotActiveForeground(self) -> Colour Get the fore colour for active hotspots. """ return _stc.StyledTextCtrl_GetHotspotActiveForeground(*args, **kwargs) def SetHotspotActiveBackground(*args, **kwargs): """ SetHotspotActiveBackground(self, bool useSetting, Colour back) Set a back colour for active hotspots. """ return _stc.StyledTextCtrl_SetHotspotActiveBackground(*args, **kwargs) def GetHotspotActiveBackground(*args, **kwargs): """ GetHotspotActiveBackground(self) -> Colour Get the back colour for active hotspots. """ return _stc.StyledTextCtrl_GetHotspotActiveBackground(*args, **kwargs) def SetHotspotActiveUnderline(*args, **kwargs): """ SetHotspotActiveUnderline(self, bool underline) Enable / Disable underlining active hotspots. """ return _stc.StyledTextCtrl_SetHotspotActiveUnderline(*args, **kwargs) def GetHotspotActiveUnderline(*args, **kwargs): """ GetHotspotActiveUnderline(self) -> bool Get whether underlining for active hotspots. """ return _stc.StyledTextCtrl_GetHotspotActiveUnderline(*args, **kwargs) def SetHotspotSingleLine(*args, **kwargs): """ SetHotspotSingleLine(self, bool singleLine) Limit hotspots to single line so hotspots on two lines don't merge. """ return _stc.StyledTextCtrl_SetHotspotSingleLine(*args, **kwargs) def GetHotspotSingleLine(*args, **kwargs): """ GetHotspotSingleLine(self) -> bool Get the HotspotSingleLine property """ return _stc.StyledTextCtrl_GetHotspotSingleLine(*args, **kwargs) def ParaDown(*args, **kwargs): """ ParaDown(self) Move caret between paragraphs (delimited by empty lines). """ return _stc.StyledTextCtrl_ParaDown(*args, **kwargs) def ParaDownExtend(*args, **kwargs): """ParaDownExtend(self)""" return _stc.StyledTextCtrl_ParaDownExtend(*args, **kwargs) def ParaUp(*args, **kwargs): """ParaUp(self)""" return _stc.StyledTextCtrl_ParaUp(*args, **kwargs) def ParaUpExtend(*args, **kwargs): """ParaUpExtend(self)""" return _stc.StyledTextCtrl_ParaUpExtend(*args, **kwargs) def PositionBefore(*args, **kwargs): """ PositionBefore(self, int pos) -> int Given a valid document position, return the previous position taking code page into account. Returns 0 if passed 0. """ return _stc.StyledTextCtrl_PositionBefore(*args, **kwargs) def PositionAfter(*args, **kwargs): """ PositionAfter(self, int pos) -> int Given a valid document position, return the next position taking code page into account. Maximum value returned is the last position in the document. """ return _stc.StyledTextCtrl_PositionAfter(*args, **kwargs) def CopyRange(*args, **kwargs): """ CopyRange(self, int start, int end) Copy a range of text to the clipboard. Positions are clipped into the document. """ return _stc.StyledTextCtrl_CopyRange(*args, **kwargs) def CopyText(*args, **kwargs): """ CopyText(self, int length, String text) Copy argument text to the clipboard. """ return _stc.StyledTextCtrl_CopyText(*args, **kwargs) def SetSelectionMode(*args, **kwargs): """ SetSelectionMode(self, int mode) Set the selection mode to stream (SC_SEL_STREAM) or rectangular (SC_SEL_RECTANGLE/SC_SEL_THIN) or by lines (SC_SEL_LINES). """ return _stc.StyledTextCtrl_SetSelectionMode(*args, **kwargs) def GetSelectionMode(*args, **kwargs): """ GetSelectionMode(self) -> int Get the mode of the current selection. """ return _stc.StyledTextCtrl_GetSelectionMode(*args, **kwargs) def GetLineSelStartPosition(*args, **kwargs): """ GetLineSelStartPosition(self, int line) -> int Retrieve the position of the start of the selection at the given line (INVALID_POSITION if no selection on this line). """ return _stc.StyledTextCtrl_GetLineSelStartPosition(*args, **kwargs) def GetLineSelEndPosition(*args, **kwargs): """ GetLineSelEndPosition(self, int line) -> int Retrieve the position of the end of the selection at the given line (INVALID_POSITION if no selection on this line). """ return _stc.StyledTextCtrl_GetLineSelEndPosition(*args, **kwargs) def LineDownRectExtend(*args, **kwargs): """ LineDownRectExtend(self) Move caret down one line, extending rectangular selection to new caret position. """ return _stc.StyledTextCtrl_LineDownRectExtend(*args, **kwargs) def LineUpRectExtend(*args, **kwargs): """ LineUpRectExtend(self) Move caret up one line, extending rectangular selection to new caret position. """ return _stc.StyledTextCtrl_LineUpRectExtend(*args, **kwargs) def CharLeftRectExtend(*args, **kwargs): """ CharLeftRectExtend(self) Move caret left one character, extending rectangular selection to new caret position. """ return _stc.StyledTextCtrl_CharLeftRectExtend(*args, **kwargs) def CharRightRectExtend(*args, **kwargs): """ CharRightRectExtend(self) Move caret right one character, extending rectangular selection to new caret position. """ return _stc.StyledTextCtrl_CharRightRectExtend(*args, **kwargs) def HomeRectExtend(*args, **kwargs): """ HomeRectExtend(self) Move caret to first position on line, extending rectangular selection to new caret position. """ return _stc.StyledTextCtrl_HomeRectExtend(*args, **kwargs) def VCHomeRectExtend(*args, **kwargs): """ VCHomeRectExtend(self) Move caret to before first visible character on line. If already there move to first character on line. In either case, extend rectangular selection to new caret position. """ return _stc.StyledTextCtrl_VCHomeRectExtend(*args, **kwargs) def LineEndRectExtend(*args, **kwargs): """ LineEndRectExtend(self) Move caret to last position on line, extending rectangular selection to new caret position. """ return _stc.StyledTextCtrl_LineEndRectExtend(*args, **kwargs) def PageUpRectExtend(*args, **kwargs): """ PageUpRectExtend(self) Move caret one page up, extending rectangular selection to new caret position. """ return _stc.StyledTextCtrl_PageUpRectExtend(*args, **kwargs) def PageDownRectExtend(*args, **kwargs): """ PageDownRectExtend(self) Move caret one page down, extending rectangular selection to new caret position. """ return _stc.StyledTextCtrl_PageDownRectExtend(*args, **kwargs) def StutteredPageUp(*args, **kwargs): """ StutteredPageUp(self) Move caret to top of page, or one page up if already at top of page. """ return _stc.StyledTextCtrl_StutteredPageUp(*args, **kwargs) def StutteredPageUpExtend(*args, **kwargs): """ StutteredPageUpExtend(self) Move caret to top of page, or one page up if already at top of page, extending selection to new caret position. """ return _stc.StyledTextCtrl_StutteredPageUpExtend(*args, **kwargs) def StutteredPageDown(*args, **kwargs): """ StutteredPageDown(self) Move caret to bottom of page, or one page down if already at bottom of page. """ return _stc.StyledTextCtrl_StutteredPageDown(*args, **kwargs) def StutteredPageDownExtend(*args, **kwargs): """ StutteredPageDownExtend(self) Move caret to bottom of page, or one page down if already at bottom of page, extending selection to new caret position. """ return _stc.StyledTextCtrl_StutteredPageDownExtend(*args, **kwargs) def WordLeftEnd(*args, **kwargs): """ WordLeftEnd(self) Move caret left one word, position cursor at end of word. """ return _stc.StyledTextCtrl_WordLeftEnd(*args, **kwargs) def WordLeftEndExtend(*args, **kwargs): """ WordLeftEndExtend(self) Move caret left one word, position cursor at end of word, extending selection to new caret position. """ return _stc.StyledTextCtrl_WordLeftEndExtend(*args, **kwargs) def WordRightEnd(*args, **kwargs): """ WordRightEnd(self) Move caret right one word, position cursor at end of word. """ return _stc.StyledTextCtrl_WordRightEnd(*args, **kwargs) def WordRightEndExtend(*args, **kwargs): """ WordRightEndExtend(self) Move caret right one word, position cursor at end of word, extending selection to new caret position. """ return _stc.StyledTextCtrl_WordRightEndExtend(*args, **kwargs) def SetWhitespaceChars(*args, **kwargs): """ SetWhitespaceChars(self, String characters) Set the set of characters making up whitespace for when moving or selecting by word. Should be called after SetWordChars. """ return _stc.StyledTextCtrl_SetWhitespaceChars(*args, **kwargs) def GetWhitespaceChars(*args, **kwargs): """GetWhitespaceChars(self) -> String""" return _stc.StyledTextCtrl_GetWhitespaceChars(*args, **kwargs) def SetPunctuationChars(*args, **kwargs): """SetPunctuationChars(self, String characters)""" return _stc.StyledTextCtrl_SetPunctuationChars(*args, **kwargs) def GetPunctuationChars(*args, **kwargs): """GetPunctuationChars(self) -> String""" return _stc.StyledTextCtrl_GetPunctuationChars(*args, **kwargs) def SetCharsDefault(*args, **kwargs): """ SetCharsDefault(self) Reset the set of characters for whitespace and word characters to the defaults. """ return _stc.StyledTextCtrl_SetCharsDefault(*args, **kwargs) def AutoCompGetCurrent(*args, **kwargs): """ AutoCompGetCurrent(self) -> int Get currently selected item position in the auto-completion list """ return _stc.StyledTextCtrl_AutoCompGetCurrent(*args, **kwargs) def AutoCompSetCaseInsensitiveBehaviour(*args, **kwargs): """AutoCompSetCaseInsensitiveBehaviour(self, int behaviour)""" return _stc.StyledTextCtrl_AutoCompSetCaseInsensitiveBehaviour(*args, **kwargs) def AutoCompGetCaseInsensitiveBehaviour(*args, **kwargs): """AutoCompGetCaseInsensitiveBehaviour(self) -> int""" return _stc.StyledTextCtrl_AutoCompGetCaseInsensitiveBehaviour(*args, **kwargs) def Allocate(*args, **kwargs): """ Allocate(self, int bytes) Enlarge the document to a particular size of text bytes. """ return _stc.StyledTextCtrl_Allocate(*args, **kwargs) def FindColumn(*args, **kwargs): """ FindColumn(self, int line, int column) -> int Find the position of a column on a line taking into account tabs and multi-byte characters. If beyond end of line, return line end position. """ return _stc.StyledTextCtrl_FindColumn(*args, **kwargs) def GetCaretSticky(*args, **kwargs): """ GetCaretSticky(self) -> int Can the caret preferred x position only be changed by explicit movement commands? """ return _stc.StyledTextCtrl_GetCaretSticky(*args, **kwargs) def SetCaretSticky(*args, **kwargs): """ SetCaretSticky(self, int useCaretStickyBehaviour) Stop the caret preferred x position changing when the user types. """ return _stc.StyledTextCtrl_SetCaretSticky(*args, **kwargs) def ToggleCaretSticky(*args, **kwargs): """ ToggleCaretSticky(self) Switch between sticky and non-sticky: meant to be bound to a key. """ return _stc.StyledTextCtrl_ToggleCaretSticky(*args, **kwargs) def SetPasteConvertEndings(*args, **kwargs): """ SetPasteConvertEndings(self, bool convert) Enable/Disable convert-on-paste for line endings """ return _stc.StyledTextCtrl_SetPasteConvertEndings(*args, **kwargs) def GetPasteConvertEndings(*args, **kwargs): """ GetPasteConvertEndings(self) -> bool Get convert-on-paste setting """ return _stc.StyledTextCtrl_GetPasteConvertEndings(*args, **kwargs) def SelectionDuplicate(*args, **kwargs): """ SelectionDuplicate(self) Duplicate the selection. If selection empty duplicate the line containing the caret. """ return _stc.StyledTextCtrl_SelectionDuplicate(*args, **kwargs) def SetCaretLineBackAlpha(*args, **kwargs): """ SetCaretLineBackAlpha(self, int alpha) Set background alpha of the caret line. """ return _stc.StyledTextCtrl_SetCaretLineBackAlpha(*args, **kwargs) def GetCaretLineBackAlpha(*args, **kwargs): """ GetCaretLineBackAlpha(self) -> int Get the background alpha of the caret line. """ return _stc.StyledTextCtrl_GetCaretLineBackAlpha(*args, **kwargs) def SetCaretStyle(*args, **kwargs): """ SetCaretStyle(self, int caretStyle) Set the style of the caret to be drawn. """ return _stc.StyledTextCtrl_SetCaretStyle(*args, **kwargs) def GetCaretStyle(*args, **kwargs): """ GetCaretStyle(self) -> int Returns the current style of the caret. """ return _stc.StyledTextCtrl_GetCaretStyle(*args, **kwargs) def SetIndicatorCurrent(*args, **kwargs): """ SetIndicatorCurrent(self, int indicator) Set the indicator used for IndicatorFillRange and IndicatorClearRange """ return _stc.StyledTextCtrl_SetIndicatorCurrent(*args, **kwargs) def GetIndicatorCurrent(*args, **kwargs): """ GetIndicatorCurrent(self) -> int Get the current indicator """ return _stc.StyledTextCtrl_GetIndicatorCurrent(*args, **kwargs) def SetIndicatorValue(*args, **kwargs): """ SetIndicatorValue(self, int value) Set the value used for IndicatorFillRange """ return _stc.StyledTextCtrl_SetIndicatorValue(*args, **kwargs) def GetIndicatorValue(*args, **kwargs): """ GetIndicatorValue(self) -> int Get the current indicator vaue """ return _stc.StyledTextCtrl_GetIndicatorValue(*args, **kwargs) def IndicatorFillRange(*args, **kwargs): """ IndicatorFillRange(self, int position, int fillLength) Turn a indicator on over a range. """ return _stc.StyledTextCtrl_IndicatorFillRange(*args, **kwargs) def IndicatorClearRange(*args, **kwargs): """ IndicatorClearRange(self, int position, int clearLength) Turn a indicator off over a range. """ return _stc.StyledTextCtrl_IndicatorClearRange(*args, **kwargs) def IndicatorAllOnFor(*args, **kwargs): """ IndicatorAllOnFor(self, int position) -> int Are any indicators present at position? """ return _stc.StyledTextCtrl_IndicatorAllOnFor(*args, **kwargs) def IndicatorValueAt(*args, **kwargs): """ IndicatorValueAt(self, int indicator, int position) -> int What value does a particular indicator have at at a position? """ return _stc.StyledTextCtrl_IndicatorValueAt(*args, **kwargs) def IndicatorStart(*args, **kwargs): """ IndicatorStart(self, int indicator, int position) -> int Where does a particular indicator start? """ return _stc.StyledTextCtrl_IndicatorStart(*args, **kwargs) def IndicatorEnd(*args, **kwargs): """ IndicatorEnd(self, int indicator, int position) -> int Where does a particular indicator end? """ return _stc.StyledTextCtrl_IndicatorEnd(*args, **kwargs) def SetPositionCacheSize(*args, **kwargs): """ SetPositionCacheSize(self, int size) Set number of entries in position cache """ return _stc.StyledTextCtrl_SetPositionCacheSize(*args, **kwargs) def GetPositionCacheSize(*args, **kwargs): """ GetPositionCacheSize(self) -> int How many entries are allocated to the position cache? """ return _stc.StyledTextCtrl_GetPositionCacheSize(*args, **kwargs) def CopyAllowLine(*args, **kwargs): """ CopyAllowLine(self) Copy the selection, if selection empty copy the line with the caret """ return _stc.StyledTextCtrl_CopyAllowLine(*args, **kwargs) def GetRangePointer(*args, **kwargs): """GetRangePointer(self, int position, int rangeLength) -> char""" return _stc.StyledTextCtrl_GetRangePointer(*args, **kwargs) def GetGapPosition(*args, **kwargs): """GetGapPosition(self) -> int""" return _stc.StyledTextCtrl_GetGapPosition(*args, **kwargs) def SetKeysUnicode(*args, **kwargs): """ SetKeysUnicode(self, bool keysUnicode) Always interpret keyboard input as Unicode """ return _stc.StyledTextCtrl_SetKeysUnicode(*args, **kwargs) def GetKeysUnicode(*args, **kwargs): """ GetKeysUnicode(self) -> bool Are keys always interpreted as Unicode? """ return _stc.StyledTextCtrl_GetKeysUnicode(*args, **kwargs) def IndicatorSetAlpha(*args, **kwargs): """ IndicatorSetAlpha(self, int indicator, int alpha) Set the alpha fill colour of the given indicator. """ return _stc.StyledTextCtrl_IndicatorSetAlpha(*args, **kwargs) def IndicatorGetAlpha(*args, **kwargs): """ IndicatorGetAlpha(self, int indicator) -> int Get the alpha fill colour of the given indicator. """ return _stc.StyledTextCtrl_IndicatorGetAlpha(*args, **kwargs) def IndicatorSetOutlineAlpha(*args, **kwargs): """IndicatorSetOutlineAlpha(self, int indicator, int alpha)""" return _stc.StyledTextCtrl_IndicatorSetOutlineAlpha(*args, **kwargs) def IndicatorGetOutlineAlpha(*args, **kwargs): """IndicatorGetOutlineAlpha(self, int indicator) -> int""" return _stc.StyledTextCtrl_IndicatorGetOutlineAlpha(*args, **kwargs) def SetExtraAscent(*args, **kwargs): """ SetExtraAscent(self, int extraAscent) Set extra ascent for each line """ return _stc.StyledTextCtrl_SetExtraAscent(*args, **kwargs) def GetExtraAscent(*args, **kwargs): """ GetExtraAscent(self) -> int Get extra ascent for each line """ return _stc.StyledTextCtrl_GetExtraAscent(*args, **kwargs) def SetExtraDescent(*args, **kwargs): """ SetExtraDescent(self, int extraDescent) Set extra descent for each line """ return _stc.StyledTextCtrl_SetExtraDescent(*args, **kwargs) def GetExtraDescent(*args, **kwargs): """ GetExtraDescent(self) -> int Get extra descent for each line """ return _stc.StyledTextCtrl_GetExtraDescent(*args, **kwargs) def GetMarkerSymbolDefined(*args, **kwargs): """ GetMarkerSymbolDefined(self, int markerNumber) -> int Which symbol was defined for markerNumber with MarkerDefine """ return _stc.StyledTextCtrl_GetMarkerSymbolDefined(*args, **kwargs) def MarginSetText(*args, **kwargs): """ MarginSetText(self, int line, String text) Set the text in the text margin for a line """ return _stc.StyledTextCtrl_MarginSetText(*args, **kwargs) def MarginGetText(*args, **kwargs): """ MarginGetText(self, int line) -> String Get the text in the text margin for a line """ return _stc.StyledTextCtrl_MarginGetText(*args, **kwargs) def MarginSetStyle(*args, **kwargs): """ MarginSetStyle(self, int line, int style) Set the style number for the text margin for a line """ return _stc.StyledTextCtrl_MarginSetStyle(*args, **kwargs) def MarginGetStyle(*args, **kwargs): """ MarginGetStyle(self, int line) -> int Get the style number for the text margin for a line """ return _stc.StyledTextCtrl_MarginGetStyle(*args, **kwargs) def MarginSetStyles(*args, **kwargs): """ MarginSetStyles(self, int line, String styles) Set the style in the text margin for a line """ return _stc.StyledTextCtrl_MarginSetStyles(*args, **kwargs) def MarginGetStyles(*args, **kwargs): """ MarginGetStyles(self, int line) -> String Get the styles in the text margin for a line """ return _stc.StyledTextCtrl_MarginGetStyles(*args, **kwargs) def MarginTextClearAll(*args, **kwargs): """ MarginTextClearAll(self) Clear the margin text on all lines """ return _stc.StyledTextCtrl_MarginTextClearAll(*args, **kwargs) def MarginSetStyleOffset(*args, **kwargs): """ MarginSetStyleOffset(self, int style) Get the start of the range of style numbers used for margin text """ return _stc.StyledTextCtrl_MarginSetStyleOffset(*args, **kwargs) def MarginGetStyleOffset(*args, **kwargs): """ MarginGetStyleOffset(self) -> int Get the start of the range of style numbers used for margin text """ return _stc.StyledTextCtrl_MarginGetStyleOffset(*args, **kwargs) def SetMarginOptions(*args, **kwargs): """SetMarginOptions(self, int marginOptions)""" return _stc.StyledTextCtrl_SetMarginOptions(*args, **kwargs) def GetMarginOptions(*args, **kwargs): """GetMarginOptions(self) -> int""" return _stc.StyledTextCtrl_GetMarginOptions(*args, **kwargs) def AnnotationSetText(*args, **kwargs): """ AnnotationSetText(self, int line, String text) Set the annotation text for a line """ return _stc.StyledTextCtrl_AnnotationSetText(*args, **kwargs) def AnnotationGetText(*args, **kwargs): """ AnnotationGetText(self, int line) -> String Get the annotation text for a line """ return _stc.StyledTextCtrl_AnnotationGetText(*args, **kwargs) def AnnotationSetStyle(*args, **kwargs): """ AnnotationSetStyle(self, int line, int style) Set the style number for the annotations for a line """ return _stc.StyledTextCtrl_AnnotationSetStyle(*args, **kwargs) def AnnotationGetStyle(*args, **kwargs): """ AnnotationGetStyle(self, int line) -> int Get the style number for the annotations for a line """ return _stc.StyledTextCtrl_AnnotationGetStyle(*args, **kwargs) def AnnotationSetStyles(*args, **kwargs): """ AnnotationSetStyles(self, int line, String styles) Set the annotation styles for a line """ return _stc.StyledTextCtrl_AnnotationSetStyles(*args, **kwargs) def AnnotationGetStyles(*args, **kwargs): """ AnnotationGetStyles(self, int line) -> String Get the annotation styles for a line """ return _stc.StyledTextCtrl_AnnotationGetStyles(*args, **kwargs) def AnnotationGetLines(*args, **kwargs): """ AnnotationGetLines(self, int line) -> int Get the number of annotation lines for a line """ return _stc.StyledTextCtrl_AnnotationGetLines(*args, **kwargs) def AnnotationClearAll(*args, **kwargs): """ AnnotationClearAll(self) Clear the annotations from all lines """ return _stc.StyledTextCtrl_AnnotationClearAll(*args, **kwargs) def AnnotationSetVisible(*args, **kwargs): """ AnnotationSetVisible(self, int visible) Set the visibility for the annotations for a view """ return _stc.StyledTextCtrl_AnnotationSetVisible(*args, **kwargs) def AnnotationGetVisible(*args, **kwargs): """ AnnotationGetVisible(self) -> int Get the visibility for the annotations for a view """ return _stc.StyledTextCtrl_AnnotationGetVisible(*args, **kwargs) def AnnotationSetStyleOffset(*args, **kwargs): """ AnnotationSetStyleOffset(self, int style) Get the start of the range of style numbers used for annotations """ return _stc.StyledTextCtrl_AnnotationSetStyleOffset(*args, **kwargs) def AnnotationGetStyleOffset(*args, **kwargs): """ AnnotationGetStyleOffset(self) -> int Get the start of the range of style numbers used for annotations """ return _stc.StyledTextCtrl_AnnotationGetStyleOffset(*args, **kwargs) def AddUndoAction(*args, **kwargs): """ AddUndoAction(self, int token, int flags) Add a container action to the undo stack """ return _stc.StyledTextCtrl_AddUndoAction(*args, **kwargs) def CharPositionFromPoint(*args, **kwargs): """ CharPositionFromPoint(self, int x, int y) -> int Find the position of a character from a point within the window. """ return _stc.StyledTextCtrl_CharPositionFromPoint(*args, **kwargs) def CharPositionFromPointClose(*args, **kwargs): """ CharPositionFromPointClose(self, int x, int y) -> int Find the position of a character from a point within the window. Return INVALID_POSITION if not close to text. """ return _stc.StyledTextCtrl_CharPositionFromPointClose(*args, **kwargs) def SetMultipleSelection(*args, **kwargs): """ SetMultipleSelection(self, bool multipleSelection) Set whether multiple selections can be made """ return _stc.StyledTextCtrl_SetMultipleSelection(*args, **kwargs) def GetMultipleSelection(*args, **kwargs): """ GetMultipleSelection(self) -> bool Whether multiple selections can be made """ return _stc.StyledTextCtrl_GetMultipleSelection(*args, **kwargs) def SetAdditionalSelectionTyping(*args, **kwargs): """ SetAdditionalSelectionTyping(self, bool additionalSelectionTyping) Set whether typing can be performed into multiple selections """ return _stc.StyledTextCtrl_SetAdditionalSelectionTyping(*args, **kwargs) def GetAdditionalSelectionTyping(*args, **kwargs): """ GetAdditionalSelectionTyping(self) -> bool Whether typing can be performed into multiple selections """ return _stc.StyledTextCtrl_GetAdditionalSelectionTyping(*args, **kwargs) def SetAdditionalCaretsBlink(*args, **kwargs): """ SetAdditionalCaretsBlink(self, bool additionalCaretsBlink) Set whether additional carets will blink """ return _stc.StyledTextCtrl_SetAdditionalCaretsBlink(*args, **kwargs) def GetAdditionalCaretsBlink(*args, **kwargs): """ GetAdditionalCaretsBlink(self) -> bool Whether additional carets will blink """ return _stc.StyledTextCtrl_GetAdditionalCaretsBlink(*args, **kwargs) def SetAdditionalCaretsVisible(*args, **kwargs): """ SetAdditionalCaretsVisible(self, bool additionalCaretsBlink) Set whether additional carets are visible """ return _stc.StyledTextCtrl_SetAdditionalCaretsVisible(*args, **kwargs) def GetAdditionalCaretsVisible(*args, **kwargs): """ GetAdditionalCaretsVisible(self) -> bool Whether additional carets are visible """ return _stc.StyledTextCtrl_GetAdditionalCaretsVisible(*args, **kwargs) def GetSelections(*args, **kwargs): """ GetSelections(self) -> int How many selections are there? """ return _stc.StyledTextCtrl_GetSelections(*args, **kwargs) def ClearSelections(*args, **kwargs): """ ClearSelections(self) Clear selections to a single empty stream selection """ return _stc.StyledTextCtrl_ClearSelections(*args, **kwargs) def AddSelection(*args, **kwargs): """ AddSelection(self, int caret, int anchor) -> int Add a selection """ return _stc.StyledTextCtrl_AddSelection(*args, **kwargs) def SetMainSelection(*args, **kwargs): """ SetMainSelection(self, int selection) Set the main selection """ return _stc.StyledTextCtrl_SetMainSelection(*args, **kwargs) def GetMainSelection(*args, **kwargs): """ GetMainSelection(self) -> int Which selection is the main selection """ return _stc.StyledTextCtrl_GetMainSelection(*args, **kwargs) def SetSelectionNCaret(*args, **kwargs): """SetSelectionNCaret(self, int selection, int pos)""" return _stc.StyledTextCtrl_SetSelectionNCaret(*args, **kwargs) def GetSelectionNCaret(*args, **kwargs): """GetSelectionNCaret(self, int selection) -> int""" return _stc.StyledTextCtrl_GetSelectionNCaret(*args, **kwargs) def SetSelectionNAnchor(*args, **kwargs): """SetSelectionNAnchor(self, int selection, int posAnchor)""" return _stc.StyledTextCtrl_SetSelectionNAnchor(*args, **kwargs) def GetSelectionNAnchor(*args, **kwargs): """GetSelectionNAnchor(self, int selection) -> int""" return _stc.StyledTextCtrl_GetSelectionNAnchor(*args, **kwargs) def SetSelectionNCaretVirtualSpace(*args, **kwargs): """SetSelectionNCaretVirtualSpace(self, int selection, int space)""" return _stc.StyledTextCtrl_SetSelectionNCaretVirtualSpace(*args, **kwargs) def GetSelectionNCaretVirtualSpace(*args, **kwargs): """GetSelectionNCaretVirtualSpace(self, int selection) -> int""" return _stc.StyledTextCtrl_GetSelectionNCaretVirtualSpace(*args, **kwargs) def SetSelectionNAnchorVirtualSpace(*args, **kwargs): """SetSelectionNAnchorVirtualSpace(self, int selection, int space)""" return _stc.StyledTextCtrl_SetSelectionNAnchorVirtualSpace(*args, **kwargs) def GetSelectionNAnchorVirtualSpace(*args, **kwargs): """GetSelectionNAnchorVirtualSpace(self, int selection) -> int""" return _stc.StyledTextCtrl_GetSelectionNAnchorVirtualSpace(*args, **kwargs) def SetSelectionNStart(*args, **kwargs): """ SetSelectionNStart(self, int selection, int pos) Sets the position that starts the selection - this becomes the anchor. """ return _stc.StyledTextCtrl_SetSelectionNStart(*args, **kwargs) def GetSelectionNStart(*args, **kwargs): """ GetSelectionNStart(self, int selection) -> int Returns the position at the start of the selection. """ return _stc.StyledTextCtrl_GetSelectionNStart(*args, **kwargs) def SetSelectionNEnd(*args, **kwargs): """ SetSelectionNEnd(self, int selection, int pos) Sets the position that ends the selection - this becomes the currentPosition. """ return _stc.StyledTextCtrl_SetSelectionNEnd(*args, **kwargs) def GetSelectionNEnd(*args, **kwargs): """ GetSelectionNEnd(self, int selection) -> int Returns the position at the end of the selection. """ return _stc.StyledTextCtrl_GetSelectionNEnd(*args, **kwargs) def SetRectangularSelectionCaret(*args, **kwargs): """SetRectangularSelectionCaret(self, int pos)""" return _stc.StyledTextCtrl_SetRectangularSelectionCaret(*args, **kwargs) def GetRectangularSelectionCaret(*args, **kwargs): """GetRectangularSelectionCaret(self) -> int""" return _stc.StyledTextCtrl_GetRectangularSelectionCaret(*args, **kwargs) def SetRectangularSelectionAnchor(*args, **kwargs): """SetRectangularSelectionAnchor(self, int posAnchor)""" return _stc.StyledTextCtrl_SetRectangularSelectionAnchor(*args, **kwargs) def GetRectangularSelectionAnchor(*args, **kwargs): """GetRectangularSelectionAnchor(self) -> int""" return _stc.StyledTextCtrl_GetRectangularSelectionAnchor(*args, **kwargs) def SetRectangularSelectionCaretVirtualSpace(*args, **kwargs): """SetRectangularSelectionCaretVirtualSpace(self, int space)""" return _stc.StyledTextCtrl_SetRectangularSelectionCaretVirtualSpace(*args, **kwargs) def GetRectangularSelectionCaretVirtualSpace(*args, **kwargs): """GetRectangularSelectionCaretVirtualSpace(self) -> int""" return _stc.StyledTextCtrl_GetRectangularSelectionCaretVirtualSpace(*args, **kwargs) def SetRectangularSelectionAnchorVirtualSpace(*args, **kwargs): """SetRectangularSelectionAnchorVirtualSpace(self, int space)""" return _stc.StyledTextCtrl_SetRectangularSelectionAnchorVirtualSpace(*args, **kwargs) def GetRectangularSelectionAnchorVirtualSpace(*args, **kwargs): """GetRectangularSelectionAnchorVirtualSpace(self) -> int""" return _stc.StyledTextCtrl_GetRectangularSelectionAnchorVirtualSpace(*args, **kwargs) def SetVirtualSpaceOptions(*args, **kwargs): """SetVirtualSpaceOptions(self, int virtualSpaceOptions)""" return _stc.StyledTextCtrl_SetVirtualSpaceOptions(*args, **kwargs) def GetVirtualSpaceOptions(*args, **kwargs): """GetVirtualSpaceOptions(self) -> int""" return _stc.StyledTextCtrl_GetVirtualSpaceOptions(*args, **kwargs) def SetRectangularSelectionModifier(*args, **kwargs): """ SetRectangularSelectionModifier(self, int modifier) On GTK+, allow selecting the modifier key to use for mouse-based rectangular selection. Often the window manager requires Alt+Mouse Drag for moving windows. Valid values are SCMOD_CTRL(default), SCMOD_ALT, or SCMOD_SUPER. """ return _stc.StyledTextCtrl_SetRectangularSelectionModifier(*args, **kwargs) def GetRectangularSelectionModifier(*args, **kwargs): """ GetRectangularSelectionModifier(self) -> int Get the modifier key used for rectangular selection. """ return _stc.StyledTextCtrl_GetRectangularSelectionModifier(*args, **kwargs) def SetAdditionalSelForeground(*args, **kwargs): """ SetAdditionalSelForeground(self, Colour fore) Set the foreground colour of additional selections. Must have previously called SetSelFore with non-zero first argument for this to have an effect. """ return _stc.StyledTextCtrl_SetAdditionalSelForeground(*args, **kwargs) def SetAdditionalSelBackground(*args, **kwargs): """ SetAdditionalSelBackground(self, Colour back) Set the background colour of additional selections. Must have previously called SetSelBack with non-zero first argument for this to have an effect. """ return _stc.StyledTextCtrl_SetAdditionalSelBackground(*args, **kwargs) def SetAdditionalSelAlpha(*args, **kwargs): """ SetAdditionalSelAlpha(self, int alpha) Set the alpha of the selection. """ return _stc.StyledTextCtrl_SetAdditionalSelAlpha(*args, **kwargs) def GetAdditionalSelAlpha(*args, **kwargs): """ GetAdditionalSelAlpha(self) -> int Get the alpha of the selection. """ return _stc.StyledTextCtrl_GetAdditionalSelAlpha(*args, **kwargs) def SetAdditionalCaretForeground(*args, **kwargs): """ SetAdditionalCaretForeground(self, Colour fore) Set the foreground colour of additional carets. """ return _stc.StyledTextCtrl_SetAdditionalCaretForeground(*args, **kwargs) def GetAdditionalCaretForeground(*args, **kwargs): """ GetAdditionalCaretForeground(self) -> Colour Get the foreground colour of additional carets. """ return _stc.StyledTextCtrl_GetAdditionalCaretForeground(*args, **kwargs) def RotateSelection(*args, **kwargs): """ RotateSelection(self) Set the main selection to the next selection. """ return _stc.StyledTextCtrl_RotateSelection(*args, **kwargs) def SwapMainAnchorCaret(*args, **kwargs): """ SwapMainAnchorCaret(self) Swap that caret and anchor of the main selection. """ return _stc.StyledTextCtrl_SwapMainAnchorCaret(*args, **kwargs) def ChangeLexerState(*args, **kwargs): """ChangeLexerState(self, int start, int end) -> int""" return _stc.StyledTextCtrl_ChangeLexerState(*args, **kwargs) def ContractedFoldNext(*args, **kwargs): """ContractedFoldNext(self, int lineStart) -> int""" return _stc.StyledTextCtrl_ContractedFoldNext(*args, **kwargs) def VerticalCentreCaret(*args, **kwargs): """VerticalCentreCaret(self)""" return _stc.StyledTextCtrl_VerticalCentreCaret(*args, **kwargs) def MoveSelectedLinesUp(*args, **kwargs): """MoveSelectedLinesUp(self)""" return _stc.StyledTextCtrl_MoveSelectedLinesUp(*args, **kwargs) def MoveSelectedLinesDown(*args, **kwargs): """MoveSelectedLinesDown(self)""" return _stc.StyledTextCtrl_MoveSelectedLinesDown(*args, **kwargs) def SetIdentifier(*args, **kwargs): """SetIdentifier(self, int identifier)""" return _stc.StyledTextCtrl_SetIdentifier(*args, **kwargs) def GetIdentifier(*args, **kwargs): """GetIdentifier(self) -> int""" return _stc.StyledTextCtrl_GetIdentifier(*args, **kwargs) def RGBAImageSetWidth(*args, **kwargs): """RGBAImageSetWidth(self, int width)""" return _stc.StyledTextCtrl_RGBAImageSetWidth(*args, **kwargs) def RGBAImageSetHeight(*args, **kwargs): """RGBAImageSetHeight(self, int height)""" return _stc.StyledTextCtrl_RGBAImageSetHeight(*args, **kwargs) def MarkerDefineRGBAImage(*args, **kwargs): """MarkerDefineRGBAImage(self, int markerNumber, unsigned char pixels)""" return _stc.StyledTextCtrl_MarkerDefineRGBAImage(*args, **kwargs) def RegisterRGBAImage(*args, **kwargs): """RegisterRGBAImage(self, int type, unsigned char pixels)""" return _stc.StyledTextCtrl_RegisterRGBAImage(*args, **kwargs) def ScrollToStart(*args, **kwargs): """ScrollToStart(self)""" return _stc.StyledTextCtrl_ScrollToStart(*args, **kwargs) def ScrollToEnd(*args, **kwargs): """ScrollToEnd(self)""" return _stc.StyledTextCtrl_ScrollToEnd(*args, **kwargs) def SetTechnology(*args, **kwargs): """SetTechnology(self, int technology)""" return _stc.StyledTextCtrl_SetTechnology(*args, **kwargs) def GetTechnology(*args, **kwargs): """GetTechnology(self) -> int""" return _stc.StyledTextCtrl_GetTechnology(*args, **kwargs) def CreateLoader(*args, **kwargs): """CreateLoader(self, int bytes) -> void""" return _stc.StyledTextCtrl_CreateLoader(*args, **kwargs) def StartRecord(*args, **kwargs): """ StartRecord(self) Start notifying the container of all key presses and commands. """ return _stc.StyledTextCtrl_StartRecord(*args, **kwargs) def StopRecord(*args, **kwargs): """ StopRecord(self) Stop notifying the container of all key presses and commands. """ return _stc.StyledTextCtrl_StopRecord(*args, **kwargs) def SetLexer(*args, **kwargs): """ SetLexer(self, int lexer) Set the lexing language of the document. """ return _stc.StyledTextCtrl_SetLexer(*args, **kwargs) def GetLexer(*args, **kwargs): """ GetLexer(self) -> int Retrieve the lexing language of the document. """ return _stc.StyledTextCtrl_GetLexer(*args, **kwargs) def Colourise(*args, **kwargs): """ Colourise(self, int start, int end) Colourise a segment of the document using the current lexing language. """ return _stc.StyledTextCtrl_Colourise(*args, **kwargs) def SetProperty(*args, **kwargs): """ SetProperty(self, String key, String value) Set up a value that may be used by a lexer for some optional feature. """ return _stc.StyledTextCtrl_SetProperty(*args, **kwargs) def SetKeyWords(*args, **kwargs): """ SetKeyWords(self, int keywordSet, String keyWords) Set up the key words used by the lexer. """ return _stc.StyledTextCtrl_SetKeyWords(*args, **kwargs) def SetLexerLanguage(*args, **kwargs): """ SetLexerLanguage(self, String language) Set the lexing language of the document based on string name. """ return _stc.StyledTextCtrl_SetLexerLanguage(*args, **kwargs) def GetProperty(*args, **kwargs): """ GetProperty(self, String key) -> String Retrieve a 'property' value previously set with SetProperty. """ return _stc.StyledTextCtrl_GetProperty(*args, **kwargs) def GetPropertyExpanded(*args, **kwargs): """ GetPropertyExpanded(self, String key) -> String Retrieve a 'property' value previously set with SetProperty, with '$()' variable replacement on returned buffer. """ return _stc.StyledTextCtrl_GetPropertyExpanded(*args, **kwargs) def GetPropertyInt(*args, **kwargs): """ GetPropertyInt(self, String key) -> int Retrieve a 'property' value previously set with SetProperty, interpreted as an int AFTER any '$()' variable replacement. """ return _stc.StyledTextCtrl_GetPropertyInt(*args, **kwargs) def GetStyleBitsNeeded(*args, **kwargs): """ GetStyleBitsNeeded(self) -> int Retrieve the number of bits the current lexer needs for styling. """ return _stc.StyledTextCtrl_GetStyleBitsNeeded(*args, **kwargs) def PrivateLexerCall(*args, **kwargs): """PrivateLexerCall(self, int operation, void pointer) -> void""" return _stc.StyledTextCtrl_PrivateLexerCall(*args, **kwargs) def PropertyNames(*args, **kwargs): """PropertyNames(self) -> String""" return _stc.StyledTextCtrl_PropertyNames(*args, **kwargs) def PropertyType(*args, **kwargs): """PropertyType(self, String name) -> int""" return _stc.StyledTextCtrl_PropertyType(*args, **kwargs) def DescribeProperty(*args, **kwargs): """DescribeProperty(self, String name) -> String""" return _stc.StyledTextCtrl_DescribeProperty(*args, **kwargs) def DescribeKeyWordSets(*args, **kwargs): """DescribeKeyWordSets(self) -> String""" return _stc.StyledTextCtrl_DescribeKeyWordSets(*args, **kwargs) def GetCurrentLine(*args, **kwargs): """ GetCurrentLine(self) -> int Returns the line number of the line with the caret. """ return _stc.StyledTextCtrl_GetCurrentLine(*args, **kwargs) def StyleSetSpec(*args, **kwargs): """ StyleSetSpec(self, int styleNum, String spec) Extract style settings from a spec-string which is composed of one or more of the following comma separated elements:: bold turns on bold italic turns on italics fore:[name or #RRGGBB] sets the foreground colour back:[name or #RRGGBB] sets the background colour face:[facename] sets the font face name to use size:[num] sets the font size in points eol turns on eol filling underline turns on underlining """ return _stc.StyledTextCtrl_StyleSetSpec(*args, **kwargs) def StyleGetFont(*args, **kwargs): """StyleGetFont(self, int style) -> Font""" return _stc.StyledTextCtrl_StyleGetFont(*args, **kwargs) def StyleSetFont(*args, **kwargs): """ StyleSetFont(self, int styleNum, Font font) Set style size, face, bold, italic, and underline attributes from the attributes of a `wx.Font`. """ return _stc.StyledTextCtrl_StyleSetFont(*args, **kwargs) def StyleSetFontAttr(*args, **kwargs): """ StyleSetFontAttr(self, int styleNum, int size, String faceName, bool bold, bool italic, bool underline, int encoding=wxFONTENCODING_DEFAULT) Set all font style attributes at once. """ return _stc.StyledTextCtrl_StyleSetFontAttr(*args, **kwargs) def StyleSetCharacterSet(*args, **kwargs): """ StyleSetCharacterSet(self, int style, int characterSet) Set the character set of the font in a style. Converts the Scintilla wx.stc.STC_CHARSET_* set values to a wxFontEncoding. """ return _stc.StyledTextCtrl_StyleSetCharacterSet(*args, **kwargs) def StyleSetFontEncoding(*args, **kwargs): """ StyleSetFontEncoding(self, int style, int encoding) Set the font encoding to be used by a style. """ return _stc.StyledTextCtrl_StyleSetFontEncoding(*args, **kwargs) def CmdKeyExecute(*args, **kwargs): """ CmdKeyExecute(self, int cmd) Perform one of the operations defined by the wx.stc.STC_CMD_* constants. """ return _stc.StyledTextCtrl_CmdKeyExecute(*args, **kwargs) def SetMargins(*args, **kwargs): """ SetMargins(self, int left, int right) Set the left and right margin in the edit area, measured in pixels. """ return _stc.StyledTextCtrl_SetMargins(*args, **kwargs) def PointFromPosition(*args, **kwargs): """ PointFromPosition(self, int pos) -> Point Retrieve the point in the window where a position is displayed. """ return _stc.StyledTextCtrl_PointFromPosition(*args, **kwargs) def ScrollToLine(*args, **kwargs): """ ScrollToLine(self, int line) Scroll enough to make the given line visible. """ return _stc.StyledTextCtrl_ScrollToLine(*args, **kwargs) def ScrollToColumn(*args, **kwargs): """ ScrollToColumn(self, int column) Scroll enough to make the given column visible """ return _stc.StyledTextCtrl_ScrollToColumn(*args, **kwargs) def SendMsg(*args, **kwargs): """ SendMsg(self, int msg, UIntPtr wp=0, wxIntPtr lp=0) -> wxIntPtr Send a message to Scintilla. """ return _stc.StyledTextCtrl_SendMsg(*args, **kwargs) def SetVScrollBar(*args, **kwargs): """ SetVScrollBar(self, ScrollBar bar) Set the vertical scrollbar to use instead of the one that's built-in. """ return _stc.StyledTextCtrl_SetVScrollBar(*args, **kwargs) def SetHScrollBar(*args, **kwargs): """ SetHScrollBar(self, ScrollBar bar) Set the horizontal scrollbar to use instead of the ont that's built-in. """ return _stc.StyledTextCtrl_SetHScrollBar(*args, **kwargs) def GetLastKeydownProcessed(*args, **kwargs): """GetLastKeydownProcessed(self) -> bool""" return _stc.StyledTextCtrl_GetLastKeydownProcessed(*args, **kwargs) def SetLastKeydownProcessed(*args, **kwargs): """SetLastKeydownProcessed(self, bool val)""" return _stc.StyledTextCtrl_SetLastKeydownProcessed(*args, **kwargs) def DoDragOver(*args, **kwargs): """ DoDragOver(self, int x, int y, int def) -> int Allow for simulating a DnD DragOver. """ return _stc.StyledTextCtrl_DoDragOver(*args, **kwargs) def DoDropText(*args, **kwargs): """ DoDropText(self, long x, long y, String data) -> bool Allow for simulating a DnD DropText. """ return _stc.StyledTextCtrl_DoDropText(*args, **kwargs) def SetUseAntiAliasing(*args, **kwargs): """ SetUseAntiAliasing(self, bool useAA) Specify whether anti-aliased fonts should be used. Will have no effect on some platforms, but on some (wxMac for example) can greatly improve performance. """ return _stc.StyledTextCtrl_SetUseAntiAliasing(*args, **kwargs) def GetUseAntiAliasing(*args, **kwargs): """ GetUseAntiAliasing(self) -> bool Returns the current UseAntiAliasing setting. """ return _stc.StyledTextCtrl_GetUseAntiAliasing(*args, **kwargs) def AnnotationClearLine(*args, **kwargs): """AnnotationClearLine(self, int line)""" return _stc.StyledTextCtrl_AnnotationClearLine(*args, **kwargs) def AddTextRaw(*args, **kwargs): """ AddTextRaw(self, char text, int length=-1) Add text to the document at current position. The text should be utf-8 encoded on unicode builds of wxPython, or can be any 8-bit text in ansi builds. """ return _stc.StyledTextCtrl_AddTextRaw(*args, **kwargs) def InsertTextRaw(*args, **kwargs): """ InsertTextRaw(self, int pos, char text) Insert string at a position. The text should be utf-8 encoded on unicode builds of wxPython, or can be any 8-bit text in ansi builds. """ return _stc.StyledTextCtrl_InsertTextRaw(*args, **kwargs) def GetCurLineRaw(*args, **kwargs): """ GetCurLineRaw() -> (text, index) Retrieve the text of the line containing the caret, and also the index of the caret on the line. The returned value is a utf-8 encoded string in unicode builds of wxPython, or raw 8-bit text otherwise. """ return _stc.StyledTextCtrl_GetCurLineRaw(*args, **kwargs) def GetLineRaw(*args, **kwargs): """ GetLineRaw(self, int line) -> wxCharBuffer Retrieve the contents of a line. The returned value is a utf-8 encoded string in unicode builds of wxPython, or raw 8-bit text otherwise. """ return _stc.StyledTextCtrl_GetLineRaw(*args, **kwargs) def GetSelectedTextRaw(*args, **kwargs): """ GetSelectedTextRaw(self) -> wxCharBuffer Retrieve the selected text. The returned value is a utf-8 encoded string in unicode builds of wxPython, or raw 8-bit text otherwise. """ return _stc.StyledTextCtrl_GetSelectedTextRaw(*args, **kwargs) def GetTextRangeRaw(*args, **kwargs): """ GetTextRangeRaw(self, int startPos, int endPos) -> wxCharBuffer Retrieve a range of text. The returned value is a utf-8 encoded string in unicode builds of wxPython, or raw 8-bit text otherwise. """ return _stc.StyledTextCtrl_GetTextRangeRaw(*args, **kwargs) def SetTextRaw(*args, **kwargs): """ SetTextRaw(self, char text) Replace the contents of the document with the argument text. The text should be utf-8 encoded on unicode builds of wxPython, or can be any 8-bit text in ansi builds. """ return _stc.StyledTextCtrl_SetTextRaw(*args, **kwargs) def GetTextRaw(*args, **kwargs): """ GetTextRaw(self) -> wxCharBuffer Retrieve all the text in the document. The returned value is a utf-8 encoded string in unicode builds of wxPython, or raw 8-bit text otherwise. """ return _stc.StyledTextCtrl_GetTextRaw(*args, **kwargs) def AppendTextRaw(*args, **kwargs): """ AppendTextRaw(self, char text, int length=-1) Append a string to the end of the document without changing the selection. The text should be utf-8 encoded on unicode builds of wxPython, or can be any 8-bit text in ansi builds. """ return _stc.StyledTextCtrl_AppendTextRaw(*args, **kwargs) def AddTextUTF8(self, text): """ Add UTF8 encoded text to the document at the current position. Works 'natively' in a unicode build of wxPython, and will also work in an ansi build if the UTF8 text is compatible with the current encoding. """ if not wx.USE_UNICODE: u = text.decode('utf-8') text = u.encode(wx.GetDefaultPyEncoding()) self.AddTextRaw(text) def InsertTextUTF8(self, pos, text): """ Insert UTF8 encoded text at a position. Works 'natively' in a unicode build of wxPython, and will also work in an ansi build if the UTF8 text is compatible with the current encoding. """ if not wx.USE_UNICODE: u = text.decode('utf-8') text = u.encode(wx.GetDefaultPyEncoding()) self.InsertTextRaw(pos, text) def GetCurLineUTF8(self): """ Retrieve the UTF8 text of the line containing the caret, and also the index of the caret on the line. In an ansi build of wxPython the text retrieved from the document is assumed to be in the current default encoding. """ text, pos = self.GetCurLineRaw() if not wx.USE_UNICODE: u = text.decode(wx.GetDefaultPyEncoding()) text = u.encode('utf-8') return text, pos def GetLineUTF8(self, line): """ Retrieve the contents of a line as UTF8. In an ansi build of wxPython the text retrieved from the document is assumed to be in the current default encoding. """ text = self.GetLineRaw(line) if not wx.USE_UNICODE: u = text.decode(wx.GetDefaultPyEncoding()) text = u.encode('utf-8') return text def GetSelectedTextUTF8(self): """ Retrieve the selected text as UTF8. In an ansi build of wxPython the text retrieved from the document is assumed to be in the current default encoding. """ text = self.GetSelectedTextRaw() if not wx.USE_UNICODE: u = text.decode(wx.GetDefaultPyEncoding()) text = u.encode('utf-8') return text def GetTextRangeUTF8(self, startPos, endPos): """ Retrieve a range of text as UTF8. In an ansi build of wxPython the text retrieved from the document is assumed to be in the current default encoding. """ text = self.GetTextRangeRaw(startPos, endPos) if not wx.USE_UNICODE: u = text.decode(wx.GetDefaultPyEncoding()) text = u.encode('utf-8') return text def SetTextUTF8(self, text): """ Replace the contents of the document with the UTF8 text given. Works 'natively' in a unicode build of wxPython, and will also work in an ansi build if the UTF8 text is compatible with the current encoding. """ if not wx.USE_UNICODE: u = text.decode('utf-8') text = u.encode(wx.GetDefaultPyEncoding()) self.SetTextRaw(text) def GetTextUTF8(self): """ Retrieve all the text in the document as UTF8. In an ansi build of wxPython the text retrieved from the document is assumed to be in the current default encoding. """ text = self.GetTextRaw() if not wx.USE_UNICODE: u = text.decode(wx.GetDefaultPyEncoding()) text = u.encode('utf-8') return text def AppendTextUTF8(self, text): """ Append a UTF8 string to the end of the document without changing the selection. Works 'natively' in a unicode build of wxPython, and will also work in an ansi build if the UTF8 text is compatible with the current encoding. """ if not wx.USE_UNICODE: u = text.decode('utf-8') text = u.encode(wx.GetDefaultPyEncoding()) self.AppendTextRaw(text) def SelectNone(*args, **kwargs): """SelectNone(self)""" return _stc.StyledTextCtrl_SelectNone(*args, **kwargs) def PositionToXY(*args, **kwargs): """PositionToXY(long pos) -> (x, y)""" return _stc.StyledTextCtrl_PositionToXY(*args, **kwargs) def GetLibraryVersionInfo(*args, **kwargs): """GetLibraryVersionInfo() -> VersionInfo""" return _stc.StyledTextCtrl_GetLibraryVersionInfo(*args, **kwargs) GetLibraryVersionInfo = staticmethod(GetLibraryVersionInfo) GetCaretLineBack = GetCaretLineBackground SetCaretLineBack = SetCaretLineBackground Anchor = property(GetAnchor,SetAnchor) BackSpaceUnIndents = property(GetBackSpaceUnIndents,SetBackSpaceUnIndents) BufferedDraw = property(GetBufferedDraw,SetBufferedDraw) CaretForeground = property(GetCaretForeground,SetCaretForeground) CaretLineBack = property(GetCaretLineBack,SetCaretLineBack) CaretLineBackAlpha = property(GetCaretLineBackAlpha,SetCaretLineBackAlpha) CaretLineBackground = property(GetCaretLineBackground,SetCaretLineBackground) CaretLineVisible = property(GetCaretLineVisible,SetCaretLineVisible) CaretPeriod = property(GetCaretPeriod,SetCaretPeriod) CaretSticky = property(GetCaretSticky,SetCaretSticky) CaretWidth = property(GetCaretWidth,SetCaretWidth) CodePage = property(GetCodePage,SetCodePage) ControlCharSymbol = property(GetControlCharSymbol,SetControlCharSymbol) CurLine = property(GetCurLine) CurLineRaw = property(GetCurLineRaw) CurLineUTF8 = property(GetCurLineUTF8) CurrentLine = property(GetCurrentLine) CurrentPos = property(GetCurrentPos,SetCurrentPos) DocPointer = property(GetDocPointer,SetDocPointer) EOLMode = property(GetEOLMode,SetEOLMode) EdgeColour = property(GetEdgeColour,SetEdgeColour) EdgeColumn = property(GetEdgeColumn,SetEdgeColumn) EdgeMode = property(GetEdgeMode,SetEdgeMode) EndAtLastLine = property(GetEndAtLastLine,SetEndAtLastLine) EndStyled = property(GetEndStyled) FirstVisibleLine = property(GetFirstVisibleLine) HighlightGuide = property(GetHighlightGuide,SetHighlightGuide) Indent = property(GetIndent,SetIndent) IndentationGuides = property(GetIndentationGuides,SetIndentationGuides) LastKeydownProcessed = property(GetLastKeydownProcessed,SetLastKeydownProcessed) LayoutCache = property(GetLayoutCache,SetLayoutCache) Length = property(GetLength) Lexer = property(GetLexer,SetLexer) LineCount = property(GetLineCount) MarginLeft = property(GetMarginLeft,SetMarginLeft) MarginRight = property(GetMarginRight,SetMarginRight) MaxLineState = property(GetMaxLineState) ModEventMask = property(GetModEventMask,SetModEventMask) Modify = property(GetModify) MouseDownCaptures = property(GetMouseDownCaptures,SetMouseDownCaptures) MouseDwellTime = property(GetMouseDwellTime,SetMouseDwellTime) Overtype = property(GetOvertype,SetOvertype) PasteConvertEndings = property(GetPasteConvertEndings,SetPasteConvertEndings) PrintColourMode = property(GetPrintColourMode,SetPrintColourMode) PrintMagnification = property(GetPrintMagnification,SetPrintMagnification) PrintWrapMode = property(GetPrintWrapMode,SetPrintWrapMode) ReadOnly = property(GetReadOnly,SetReadOnly) STCCursor = property(GetSTCCursor,SetSTCCursor) STCFocus = property(GetSTCFocus,SetSTCFocus) ScrollWidth = property(GetScrollWidth,SetScrollWidth) SearchFlags = property(GetSearchFlags,SetSearchFlags) SelAlpha = property(GetSelAlpha,SetSelAlpha) SelectedText = property(GetSelectedText) SelectedTextRaw = property(GetSelectedTextRaw) SelectedTextUTF8 = property(GetSelectedTextUTF8) SelectionEnd = property(GetSelectionEnd,SetSelectionEnd) SelectionMode = property(GetSelectionMode,SetSelectionMode) SelectionStart = property(GetSelectionStart,SetSelectionStart) Status = property(GetStatus,SetStatus) StyleBits = property(GetStyleBits,SetStyleBits) StyleBitsNeeded = property(GetStyleBitsNeeded) TabIndents = property(GetTabIndents,SetTabIndents) TabWidth = property(GetTabWidth,SetTabWidth) TargetEnd = property(GetTargetEnd,SetTargetEnd) TargetStart = property(GetTargetStart,SetTargetStart) Text = property(GetText,SetText) TextLength = property(GetTextLength) TextRaw = property(GetTextRaw,SetTextRaw) TextUTF8 = property(GetTextUTF8,SetTextUTF8) TwoPhaseDraw = property(GetTwoPhaseDraw,SetTwoPhaseDraw) UndoCollection = property(GetUndoCollection,SetUndoCollection) UseAntiAliasing = property(GetUseAntiAliasing,SetUseAntiAliasing) UseHorizontalScrollBar = property(GetUseHorizontalScrollBar,SetUseHorizontalScrollBar) UseTabs = property(GetUseTabs,SetUseTabs) UseVerticalScrollBar = property(GetUseVerticalScrollBar,SetUseVerticalScrollBar) ViewEOL = property(GetViewEOL,SetViewEOL) ViewWhiteSpace = property(GetViewWhiteSpace,SetViewWhiteSpace) WrapMode = property(GetWrapMode,SetWrapMode) WrapStartIndent = property(GetWrapStartIndent,SetWrapStartIndent) WrapVisualFlags = property(GetWrapVisualFlags,SetWrapVisualFlags) WrapVisualFlagsLocation = property(GetWrapVisualFlagsLocation,SetWrapVisualFlagsLocation) XOffset = property(GetXOffset,SetXOffset) Zoom = property(GetZoom,SetZoom) SelEOLFilled = property(GetSelEOLFilled,SetSelEOLFilled) ScrollWidthTracking = property(GetScrollWidthTracking,SetScrollWidthTracking) HotspotActiveForeground = property(GetHotspotActiveForeground,SetHotspotActiveForeground) HotspotActiveBackground = property(GetHotspotActiveBackground,SetHotspotActiveBackground) HotspotActiveUnderline = property(GetHotspotActiveUnderline,SetHotspotActiveUnderline) HotspotSingleLine = property(GetHotspotSingleLine,SetHotspotSingleLine) CaretStyle = property(GetCaretStyle,SetCaretStyle) IndicatorCurrent = property(GetIndicatorCurrent,SetIndicatorCurrent) IndicatorValue = property(GetIndicatorValue,SetIndicatorValue) PositionCacheSize = property(GetPositionCacheSize,SetPositionCacheSize) _stc.StyledTextCtrl_swigregister(StyledTextCtrl) cvar = _stc.cvar STCNameStr = cvar.STCNameStr def PreStyledTextCtrl(*args, **kwargs): """PreStyledTextCtrl() -> StyledTextCtrl""" val = _stc.new_PreStyledTextCtrl(*args, **kwargs) return val def StyledTextCtrl_GetLibraryVersionInfo(*args): """StyledTextCtrl_GetLibraryVersionInfo() -> VersionInfo""" return _stc.StyledTextCtrl_GetLibraryVersionInfo(*args) class StyledTextEvent(_core.CommandEvent): """Proxy of C++ StyledTextEvent class""" thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') __repr__ = _swig_repr def __init__(self, *args, **kwargs): """__init__(self, EventType commandType=0, int id=0) -> StyledTextEvent""" _stc.StyledTextEvent_swiginit(self,_stc.new_StyledTextEvent(*args, **kwargs)) __swig_destroy__ = _stc.delete_StyledTextEvent __del__ = lambda self : None; def SetPosition(*args, **kwargs): """SetPosition(self, int pos)""" return _stc.StyledTextEvent_SetPosition(*args, **kwargs) def SetKey(*args, **kwargs): """SetKey(self, int k)""" return _stc.StyledTextEvent_SetKey(*args, **kwargs) def SetModifiers(*args, **kwargs): """SetModifiers(self, int m)""" return _stc.StyledTextEvent_SetModifiers(*args, **kwargs) def SetModificationType(*args, **kwargs): """SetModificationType(self, int t)""" return _stc.StyledTextEvent_SetModificationType(*args, **kwargs) def SetText(*args, **kwargs): """SetText(self, String t)""" return _stc.StyledTextEvent_SetText(*args, **kwargs) def SetLength(*args, **kwargs): """SetLength(self, int len)""" return _stc.StyledTextEvent_SetLength(*args, **kwargs) def SetLinesAdded(*args, **kwargs): """SetLinesAdded(self, int num)""" return _stc.StyledTextEvent_SetLinesAdded(*args, **kwargs) def SetLine(*args, **kwargs): """SetLine(self, int val)""" return _stc.StyledTextEvent_SetLine(*args, **kwargs) def SetFoldLevelNow(*args, **kwargs): """SetFoldLevelNow(self, int val)""" return _stc.StyledTextEvent_SetFoldLevelNow(*args, **kwargs) def SetFoldLevelPrev(*args, **kwargs): """SetFoldLevelPrev(self, int val)""" return _stc.StyledTextEvent_SetFoldLevelPrev(*args, **kwargs) def SetMargin(*args, **kwargs): """SetMargin(self, int val)""" return _stc.StyledTextEvent_SetMargin(*args, **kwargs) def SetMessage(*args, **kwargs): """SetMessage(self, int val)""" return _stc.StyledTextEvent_SetMessage(*args, **kwargs) def SetWParam(*args, **kwargs): """SetWParam(self, int val)""" return _stc.StyledTextEvent_SetWParam(*args, **kwargs) def SetLParam(*args, **kwargs): """SetLParam(self, int val)""" return _stc.StyledTextEvent_SetLParam(*args, **kwargs) def SetListType(*args, **kwargs): """SetListType(self, int val)""" return _stc.StyledTextEvent_SetListType(*args, **kwargs) def SetX(*args, **kwargs): """SetX(self, int val)""" return _stc.StyledTextEvent_SetX(*args, **kwargs) def SetY(*args, **kwargs): """SetY(self, int val)""" return _stc.StyledTextEvent_SetY(*args, **kwargs) def SetToken(*args, **kwargs): """SetToken(self, int val)""" return _stc.StyledTextEvent_SetToken(*args, **kwargs) def SetAnnotationLinesAdded(*args, **kwargs): """SetAnnotationLinesAdded(self, int val)""" return _stc.StyledTextEvent_SetAnnotationLinesAdded(*args, **kwargs) def SetUpdated(*args, **kwargs): """SetUpdated(self, int val)""" return _stc.StyledTextEvent_SetUpdated(*args, **kwargs) def SetDragText(*args, **kwargs): """SetDragText(self, String val)""" return _stc.StyledTextEvent_SetDragText(*args, **kwargs) def SetDragFlags(*args, **kwargs): """SetDragFlags(self, int flags)""" return _stc.StyledTextEvent_SetDragFlags(*args, **kwargs) def SetDragResult(*args, **kwargs): """SetDragResult(self, int val)""" return _stc.StyledTextEvent_SetDragResult(*args, **kwargs) def SetDragAllowMove(*args, **kwargs): """SetDragAllowMove(self, bool allow)""" return _stc.StyledTextEvent_SetDragAllowMove(*args, **kwargs) def GetPosition(*args, **kwargs): """GetPosition(self) -> int""" return _stc.StyledTextEvent_GetPosition(*args, **kwargs) def GetKey(*args, **kwargs): """GetKey(self) -> int""" return _stc.StyledTextEvent_GetKey(*args, **kwargs) def GetModifiers(*args, **kwargs): """GetModifiers(self) -> int""" return _stc.StyledTextEvent_GetModifiers(*args, **kwargs) def GetModificationType(*args, **kwargs): """GetModificationType(self) -> int""" return _stc.StyledTextEvent_GetModificationType(*args, **kwargs) def GetText(*args, **kwargs): """GetText(self) -> String""" return _stc.StyledTextEvent_GetText(*args, **kwargs) def GetLength(*args, **kwargs): """GetLength(self) -> int""" return _stc.StyledTextEvent_GetLength(*args, **kwargs) def GetLinesAdded(*args, **kwargs): """GetLinesAdded(self) -> int""" return _stc.StyledTextEvent_GetLinesAdded(*args, **kwargs) def GetLine(*args, **kwargs): """GetLine(self) -> int""" return _stc.StyledTextEvent_GetLine(*args, **kwargs) def GetFoldLevelNow(*args, **kwargs): """GetFoldLevelNow(self) -> int""" return _stc.StyledTextEvent_GetFoldLevelNow(*args, **kwargs) def GetFoldLevelPrev(*args, **kwargs): """GetFoldLevelPrev(self) -> int""" return _stc.StyledTextEvent_GetFoldLevelPrev(*args, **kwargs) def GetMargin(*args, **kwargs): """GetMargin(self) -> int""" return _stc.StyledTextEvent_GetMargin(*args, **kwargs) def GetMessage(*args, **kwargs): """GetMessage(self) -> int""" return _stc.StyledTextEvent_GetMessage(*args, **kwargs) def GetWParam(*args, **kwargs): """GetWParam(self) -> int""" return _stc.StyledTextEvent_GetWParam(*args, **kwargs) def GetLParam(*args, **kwargs): """GetLParam(self) -> int""" return _stc.StyledTextEvent_GetLParam(*args, **kwargs) def GetListType(*args, **kwargs): """GetListType(self) -> int""" return _stc.StyledTextEvent_GetListType(*args, **kwargs) def GetX(*args, **kwargs): """GetX(self) -> int""" return _stc.StyledTextEvent_GetX(*args, **kwargs) def GetY(*args, **kwargs): """GetY(self) -> int""" return _stc.StyledTextEvent_GetY(*args, **kwargs) def GetToken(*args, **kwargs): """GetToken(self) -> int""" return _stc.StyledTextEvent_GetToken(*args, **kwargs) def GetAnnotationsLinesAdded(*args, **kwargs): """GetAnnotationsLinesAdded(self) -> int""" return _stc.StyledTextEvent_GetAnnotationsLinesAdded(*args, **kwargs) def GetUpdated(*args, **kwargs): """GetUpdated(self) -> int""" return _stc.StyledTextEvent_GetUpdated(*args, **kwargs) def GetDragText(*args, **kwargs): """GetDragText(self) -> String""" return _stc.StyledTextEvent_GetDragText(*args, **kwargs) def GetDragFlags(*args, **kwargs): """GetDragFlags(self) -> int""" return _stc.StyledTextEvent_GetDragFlags(*args, **kwargs) def GetDragResult(*args, **kwargs): """GetDragResult(self) -> int""" return _stc.StyledTextEvent_GetDragResult(*args, **kwargs) def GetDragAllowMove(*args, **kwargs): """GetDragAllowMove(self) -> bool""" return _stc.StyledTextEvent_GetDragAllowMove(*args, **kwargs) def GetShift(*args, **kwargs): """GetShift(self) -> bool""" return _stc.StyledTextEvent_GetShift(*args, **kwargs) def GetControl(*args, **kwargs): """GetControl(self) -> bool""" return _stc.StyledTextEvent_GetControl(*args, **kwargs) def GetAlt(*args, **kwargs): """GetAlt(self) -> bool""" return _stc.StyledTextEvent_GetAlt(*args, **kwargs) Alt = property(GetAlt) Control = property(GetControl) DragAllowMove = property(GetDragAllowMove,SetDragAllowMove) DragResult = property(GetDragResult,SetDragResult) DragText = property(GetDragText,SetDragText) FoldLevelNow = property(GetFoldLevelNow,SetFoldLevelNow) FoldLevelPrev = property(GetFoldLevelPrev,SetFoldLevelPrev) Key = property(GetKey,SetKey) LParam = property(GetLParam,SetLParam) Length = property(GetLength,SetLength) Line = property(GetLine,SetLine) LinesAdded = property(GetLinesAdded,SetLinesAdded) ListType = property(GetListType,SetListType) Margin = property(GetMargin,SetMargin) Message = property(GetMessage,SetMessage) ModificationType = property(GetModificationType,SetModificationType) Modifiers = property(GetModifiers,SetModifiers) Position = property(GetPosition,SetPosition) Shift = property(GetShift) Text = property(GetText,SetText) WParam = property(GetWParam,SetWParam) X = property(GetX,SetX) Y = property(GetY,SetY) _stc.StyledTextEvent_swigregister(StyledTextEvent) wxEVT_STC_CHANGE = _stc.wxEVT_STC_CHANGE wxEVT_STC_STYLENEEDED = _stc.wxEVT_STC_STYLENEEDED wxEVT_STC_CHARADDED = _stc.wxEVT_STC_CHARADDED wxEVT_STC_SAVEPOINTREACHED = _stc.wxEVT_STC_SAVEPOINTREACHED wxEVT_STC_SAVEPOINTLEFT = _stc.wxEVT_STC_SAVEPOINTLEFT wxEVT_STC_ROMODIFYATTEMPT = _stc.wxEVT_STC_ROMODIFYATTEMPT wxEVT_STC_KEY = _stc.wxEVT_STC_KEY wxEVT_STC_DOUBLECLICK = _stc.wxEVT_STC_DOUBLECLICK wxEVT_STC_UPDATEUI = _stc.wxEVT_STC_UPDATEUI wxEVT_STC_MODIFIED = _stc.wxEVT_STC_MODIFIED wxEVT_STC_MACRORECORD = _stc.wxEVT_STC_MACRORECORD wxEVT_STC_MARGINCLICK = _stc.wxEVT_STC_MARGINCLICK wxEVT_STC_NEEDSHOWN = _stc.wxEVT_STC_NEEDSHOWN wxEVT_STC_PAINTED = _stc.wxEVT_STC_PAINTED wxEVT_STC_USERLISTSELECTION = _stc.wxEVT_STC_USERLISTSELECTION wxEVT_STC_URIDROPPED = _stc.wxEVT_STC_URIDROPPED wxEVT_STC_DWELLSTART = _stc.wxEVT_STC_DWELLSTART wxEVT_STC_DWELLEND = _stc.wxEVT_STC_DWELLEND wxEVT_STC_START_DRAG = _stc.wxEVT_STC_START_DRAG wxEVT_STC_DRAG_OVER = _stc.wxEVT_STC_DRAG_OVER wxEVT_STC_DO_DROP = _stc.wxEVT_STC_DO_DROP wxEVT_STC_ZOOM = _stc.wxEVT_STC_ZOOM wxEVT_STC_HOTSPOT_CLICK = _stc.wxEVT_STC_HOTSPOT_CLICK wxEVT_STC_HOTSPOT_DCLICK = _stc.wxEVT_STC_HOTSPOT_DCLICK wxEVT_STC_CALLTIP_CLICK = _stc.wxEVT_STC_CALLTIP_CLICK wxEVT_STC_AUTOCOMP_SELECTION = _stc.wxEVT_STC_AUTOCOMP_SELECTION wxEVT_STC_INDICATOR_CLICK = _stc.wxEVT_STC_INDICATOR_CLICK wxEVT_STC_INDICATOR_RELEASE = _stc.wxEVT_STC_INDICATOR_RELEASE wxEVT_STC_AUTOCOMP_CANCELLED = _stc.wxEVT_STC_AUTOCOMP_CANCELLED wxEVT_STC_AUTOCOMP_CHAR_DELETED = _stc.wxEVT_STC_AUTOCOMP_CHAR_DELETED wxEVT_STC_HOTSPOT_RELEASE_CLICK = _stc.wxEVT_STC_HOTSPOT_RELEASE_CLICK EVT_STC_CHANGE = wx.PyEventBinder( wxEVT_STC_CHANGE, 1 ) EVT_STC_STYLENEEDED = wx.PyEventBinder( wxEVT_STC_STYLENEEDED, 1 ) EVT_STC_CHARADDED = wx.PyEventBinder( wxEVT_STC_CHARADDED, 1 ) EVT_STC_SAVEPOINTREACHED = wx.PyEventBinder( wxEVT_STC_SAVEPOINTREACHED, 1 ) EVT_STC_SAVEPOINTLEFT = wx.PyEventBinder( wxEVT_STC_SAVEPOINTLEFT, 1 ) EVT_STC_ROMODIFYATTEMPT = wx.PyEventBinder( wxEVT_STC_ROMODIFYATTEMPT, 1 ) EVT_STC_KEY = wx.PyEventBinder( wxEVT_STC_KEY, 1 ) EVT_STC_DOUBLECLICK = wx.PyEventBinder( wxEVT_STC_DOUBLECLICK, 1 ) EVT_STC_UPDATEUI = wx.PyEventBinder( wxEVT_STC_UPDATEUI, 1 ) EVT_STC_MODIFIED = wx.PyEventBinder( wxEVT_STC_MODIFIED, 1 ) EVT_STC_MACRORECORD = wx.PyEventBinder( wxEVT_STC_MACRORECORD, 1 ) EVT_STC_MARGINCLICK = wx.PyEventBinder( wxEVT_STC_MARGINCLICK, 1 ) EVT_STC_NEEDSHOWN = wx.PyEventBinder( wxEVT_STC_NEEDSHOWN, 1 ) EVT_STC_PAINTED = wx.PyEventBinder( wxEVT_STC_PAINTED, 1 ) EVT_STC_USERLISTSELECTION = wx.PyEventBinder( wxEVT_STC_USERLISTSELECTION, 1 ) EVT_STC_URIDROPPED = wx.PyEventBinder( wxEVT_STC_URIDROPPED, 1 ) EVT_STC_DWELLSTART = wx.PyEventBinder( wxEVT_STC_DWELLSTART, 1 ) EVT_STC_DWELLEND = wx.PyEventBinder( wxEVT_STC_DWELLEND, 1 ) EVT_STC_START_DRAG = wx.PyEventBinder( wxEVT_STC_START_DRAG, 1 ) EVT_STC_DRAG_OVER = wx.PyEventBinder( wxEVT_STC_DRAG_OVER, 1 ) EVT_STC_DO_DROP = wx.PyEventBinder( wxEVT_STC_DO_DROP, 1 ) EVT_STC_ZOOM = wx.PyEventBinder( wxEVT_STC_ZOOM, 1 ) EVT_STC_HOTSPOT_CLICK = wx.PyEventBinder( wxEVT_STC_HOTSPOT_CLICK, 1 ) EVT_STC_HOTSPOT_DCLICK = wx.PyEventBinder( wxEVT_STC_HOTSPOT_DCLICK, 1 ) EVT_STC_CALLTIP_CLICK = wx.PyEventBinder( wxEVT_STC_CALLTIP_CLICK, 1 ) EVT_STC_AUTOCOMP_SELECTION = wx.PyEventBinder( wxEVT_STC_AUTOCOMP_SELECTION, 1 ) EVT_STC_INDICATOR_CLICK = wx.PyEventBinder( wxEVT_STC_INDICATOR_CLICK, 1 ) EVT_STC_INDICATOR_RELEASE = wx.PyEventBinder( wxEVT_STC_INDICATOR_RELEASE, 1 ) EVT_STC_AUTOCOMP_CANCELLED = wx.PyEventBinder( wxEVT_STC_AUTOCOMP_CANCELLED, 1 ) EVT_STC_AUTOCOMP_CHAR_DELETED = wx.PyEventBinder( wxEVT_STC_AUTOCOMP_CHAR_DELETED, 1 ) ``` #### File: src/ebmlib/efilehist.py ```python __author__ = "<NAME> <<EMAIL>>" __svnid__ = "$Id: efilehist.py 71668 2012-06-06 18:32:07Z CJP $" __revision__ = "$Revision: 71668 $" __all__ = ['EFileHistory',] #-----------------------------------------------------------------------------# # Imports import os import wx # Local Imports import txtutil #-----------------------------------------------------------------------------# class EFileHistory(object): """FileHistory Menu Manager""" def __init__(self, maxFile=9): assert maxFile <= 9, "supports at most 9 files" super(EFileHistory, self).__init__() # Attributes self._history = list() self._maxFiles = maxFile self._menu = None def _UpdateMenu(self): """Update the filehistory menu""" menu = self.Menu # optimization assert menu is not None for item in menu.GetMenuItems(): menu.RemoveItem(item) # Validate and cleanup any bad entries to_remove = list() for item in self.History: if not item: to_remove.append(item) elif not os.path.exists(item): to_remove.append(item) for item in to_remove: self.History.remove(item) for index, histfile in enumerate(self.History): menuid = wx.ID_FILE1 + index if menuid <= wx.ID_FILE9: menu.Append(menuid, histfile) else: break Count = property(lambda self: self.GetCount()) History = property(lambda self: self._history, lambda self, hist: self.SetHistory(hist)) MaxFiles = property(lambda self: self._maxFiles) Menu = property(lambda self: self._menu, lambda self, menu: self.UseMenu(menu)) def AddFileToHistory(self, fname): """Add a file to the history @param fname: Unicode """ if not fname: return assert txtutil.IsUnicode(fname) assert self.Menu is not None # Shuffle to top of history if already in there if fname in self.History: self.History.remove(fname) self.History.insert(0, fname) # Maintain set length if self.Count > self.MaxFiles: self._history.pop() # Update menu object for new history list self._UpdateMenu() def GetCount(self): """Get the number of files in the history @return: int """ return len(self._history) def GetHistoryFile(self, index): """Get the history file at the given index @param index: int @return: Unicode """ assert self.MaxFiles > index, "Index out of range" return self.History[index] def RemoveFileFromHistory(self, index): """Remove a file from the history""" assert self.MaxFiles > index, "Index out of range" self.History.pop(index) self._UpdateMenu() def SetHistory(self, hist): """Set the file history from a list @param hist: list of Unicode """ # Ensure list is unique hist = list(set(hist)) assert len(hist) <= self.MaxFiles self._history = hist self._UpdateMenu() def UseMenu(self, menu): """Set the menu for the file history to use @param menu: wx.Menu """ assert isinstance(menu, wx.Menu) if self.Menu is not None: self._menu.Destroy() self._menu = menu self._UpdateMenu() ``` #### File: src/ebmlib/osutil.py ```python __author__ = "<NAME> <<EMAIL>>" __svnid__ = "$Id: $" __revision__ = "$Revision: $" __all__ = ['InstallTermHandler', 'GetWindowsDrives', 'GetWindowsDriveType', 'GenericDrive', 'FixedDrive', 'CDROMDrive', 'RamDiskDrive', 'RemoteDrive', 'RemovableDrive' ] #-----------------------------------------------------------------------------# # Imports import wx import ctypes import signal import collections HASWIN32 = False if wx.Platform == '__WXMSW__': try: import win32api except ImportError: HASWIN32 = False else: HASWIN32 = True #-----------------------------------------------------------------------------# # Windows Drive Utilities class GenericDrive(object): def __init__(self, name): super(GenericDrive, self).__init__() # Attributes self._name = name Name = property(lambda self: self._name, lambda self, v: setattr(self, '_name', v)) class RemovableDrive(GenericDrive): pass class FixedDrive(GenericDrive): pass class RemoteDrive(GenericDrive): pass class CDROMDrive(GenericDrive): pass class RamDiskDrive(GenericDrive): pass def GetWindowsDrives(): """Get a list of all available windows drives @return: list of strings """ assert wx.Platform == '__WXMSW__', "Windows Only API Method" drives = list() try: dletters = list() bmask = ctypes.windll.kernel32.GetLogicalDrives() for dletter in u"ABCDEFGHIJKLMNOPQRSTUVWXYZ": if bmask & 1: dletters.append(dletter) bmask >>= 1 for dletter in dletters: dname = dletter + u":\\" dtype = GetWindowsDriveType(dname) if type(dtype) != GenericDrive: drives.append(dtype) except Exception, err: pass return drives def GetWindowsDriveType(dname): """Get the drive type for the given letter""" assert wx.Platform == '__WXMSW__', "Windows Only API Method" dtype = GenericDrive(dname) try: dtypes = [None, None, RemovableDrive, FixedDrive, RemoteDrive, CDROMDrive, RamDiskDrive] idx = ctypes.windll.kernel32.GetDriveTypeW(dname) if idx < len(dtypes): drive = dtypes[idx] if drive: dtype = drive(dname) except: pass return dtype #-----------------------------------------------------------------------------# def InstallTermHandler(callback, *args, **kwargs): """Install exit app handler for sigterm (unix/linux) and uses SetConsoleCtrlHandler on Windows. @param callback: callable(*args, **kwargs) @param args: positional arguments to pass to callback @param kwargs: keyword arguments to pass to callback @return: bool (installed or not) """ assert isinstance(callback, collections.Callable), "callback must be callable!" installed = True if wx.Platform == '__WXMSW__': if HASWIN32: win32api.SetConsoleCtrlHandler(lambda dummy : callback(*args, **kwargs), True) else: installed = False else: signal.signal(signal.SIGTERM, lambda signum, frame : callback(*args, **kwargs)) return installed ``` #### File: src/eclib/segmentbk.py ```python __author__ = "<NAME> <<EMAIL>>" __svnid__ = "$Id: segmentbk.py 69065 2011-09-11 19:18:25Z CJP $" __revision__ = "$Revision: 69065 $" __all__ = ['SegmentBook', 'SegmentBookEvent', 'SEGBOOK_STYLE_DEFAULT', 'SEGBOOK_STYLE_NO_DIVIDERS', 'SEGBOOK_STYLE_LABELS', 'SEGBOOK_STYLE_LEFT', 'SEGBOOK_STYLE_RIGHT', 'SEGBOOK_STYLE_TOP', 'SEGBOOK_STYLE_BOTTOM', 'SEGBOOK_NAME_STR', 'edEVT_SB_PAGE_CHANGING', 'EVT_SB_PAGE_CHANGING', 'edEVT_SB_PAGE_CHANGED', 'EVT_SB_PAGE_CHANGED', 'edEVT_SB_PAGE_CLOSED', 'EVT_SB_PAGE_CLOSED', 'edEVT_SB_PAGE_CONTEXT_MENU', 'EVT_SB_PAGE_CONTEXT_MENU', 'edEVT_SB_PAGE_CLOSING', 'EVT_SB_PAGE_CLOSING' ] #-----------------------------------------------------------------------------# # Imports import wx # Local Imports import ctrlbox from eclutil import Freezer #-----------------------------------------------------------------------------# # Events edEVT_SB_PAGE_CHANGING = wx.NewEventType() EVT_SB_PAGE_CHANGING = wx.PyEventBinder(edEVT_SB_PAGE_CHANGING, 1) edEVT_SB_PAGE_CHANGED = wx.NewEventType() EVT_SB_PAGE_CHANGED = wx.PyEventBinder(edEVT_SB_PAGE_CHANGED, 1) edEVT_SB_PAGE_CLOSING = wx.NewEventType() EVT_SB_PAGE_CLOSING = wx.PyEventBinder(edEVT_SB_PAGE_CLOSING, 1) edEVT_SB_PAGE_CLOSED = wx.NewEventType() EVT_SB_PAGE_CLOSED = wx.PyEventBinder(edEVT_SB_PAGE_CLOSED, 1) edEVT_SB_PAGE_CONTEXT_MENU = wx.NewEventType() EVT_SB_PAGE_CONTEXT_MENU = wx.PyEventBinder(edEVT_SB_PAGE_CONTEXT_MENU, 1) class SegmentBookEvent(wx.NotebookEvent): """SegmentBook event""" def __init__(self, etype=wx.wxEVT_NULL, id=-1, sel=-1, old_sel=-1): super(SegmentBookEvent, self).__init__(etype, id, sel, old_sel) #-----------------------------------------------------------------------------# # Global constants # Styles SEGBOOK_STYLE_NO_DIVIDERS = 1 # Don't put dividers between segments SEGBOOK_STYLE_LABELS = 2 # Use labels below the icons SEGBOOK_STYLE_TOP = 4 # Segments at top SEGBOOK_STYLE_BOTTOM = 8 # Segments at top SEGBOOK_STYLE_LEFT = 16 # Segments at top SEGBOOK_STYLE_RIGHT = 32 # Segments at top SEGBOOK_STYLE_DEFAULT = SEGBOOK_STYLE_TOP # Default Style # Misc SEGBOOK_NAME_STR = u"EditraSegmentBook" #-----------------------------------------------------------------------------# class SegmentBook(ctrlbox.ControlBox): """Notebook Class""" def __init__(self, parent, id=wx.ID_ANY, pos=wx.DefaultPosition, size=wx.DefaultSize, style=SEGBOOK_STYLE_DEFAULT, name=SEGBOOK_NAME_STR): """Initialize the SegmentBook""" super(SegmentBook, self).__init__(parent, id, pos, size, wx.TAB_TRAVERSAL|wx.NO_BORDER, name) # Attributes self._pages = list() self._imglist = None self._use_pylist = False self._style = style # Setup bstyle = ctrlbox.CTRLBAR_STYLE_BORDER_BOTTOM # Disable gradient on GTK due to coloring issues and having # to deal with various themes. if wx.Platform != '__WXGTK__': bstyle |= ctrlbox.CTRLBAR_STYLE_GRADIENT if style & SEGBOOK_STYLE_NO_DIVIDERS: bstyle |= ctrlbox.CTRLBAR_STYLE_NO_DIVIDERS if style & SEGBOOK_STYLE_LABELS: bstyle |= ctrlbox.CTRLBAR_STYLE_LABELS if style & SEGBOOK_STYLE_LEFT or style & SEGBOOK_STYLE_RIGHT: bstyle |= ctrlbox.CTRLBAR_STYLE_VERTICAL self._segbar = ctrlbox.SegmentBar(self, style=bstyle) self.SetControlBar(self._segbar, self._GetSegBarPos()) # Event Handlers self.Bind(ctrlbox.EVT_SEGMENT_SELECTED, self._OnSegmentSel) self._segbar.Bind(wx.EVT_RIGHT_DOWN, self._OnRightDown) self._segbar.Bind(ctrlbox.EVT_SEGMENT_CLOSE, self._OnSegClose) def _GetSegBarPos(self): pos = wx.TOP if self._style & SEGBOOK_STYLE_LEFT: pos = wx.LEFT elif self._style & SEGBOOK_STYLE_RIGHT: pos = wx.RIGHT elif self._style & SEGBOOK_STYLE_BOTTOM: pos = wx.BOTTOM return pos def _DoPageChange(self, psel, csel): """Change the page and post events @param psel: previous selection (int) @param csel: current selection (int) """ # Post page changing event event = SegmentBookEvent(edEVT_SB_PAGE_CHANGING, self.GetId(), csel, psel) event.SetEventObject(self) handler = self.GetEventHandler() if not handler.ProcessEvent(event) or event.IsAllowed(): # Do the actual page change with Freezer(self) as _tmp: self.ChangePage(csel) # Post page changed event event.SetEventType(edEVT_SB_PAGE_CHANGED) handler.ProcessEvent(event) changed = True else: # Reset the segment selection self._segbar.SetSelection(max(psel, 0)) changed = False return changed def _OnRightDown(self, evt): """Handle right click events""" pos = evt.GetPosition() where, index = self._segbar.HitTest(pos) print where, index if where in (ctrlbox.SEGMENT_HT_SEG, ctrlbox.SEGMENT_HT_X_BTN): if where == ctrlbox.SEGMENT_HT_SEG: self._segbar.SetSelection(index) changed = self._DoPageChange(self.GetSelection(), index) if changed: # Send Context Menu Event event = SegmentBookEvent(edEVT_SB_PAGE_CONTEXT_MENU, self.GetId()) event.SetSelection(index) event.SetOldSelection(index) event.SetEventObject(self) self.GetEventHandler().ProcessEvent(event) else: # TODO: Handle other right clicks pass evt.Skip() def _OnSegClose(self, evt): """Handle clicks on segment close buttons""" index = evt.GetPreviousSelection() change = -1 segcnt = self._segbar.GetSegmentCount() - 1 if index == 0 and segcnt: change = 1 elif index > 0 and segcnt > 1: change = index - 1 if change != -1: self._DoPageChange(index, change) self._pages[index]['page'].Destroy() del self._pages[index] def _OnSegmentSel(self, evt): """Change the page in the book""" psel = evt.GetPreviousSelection() csel = evt.GetCurrentSelection() self._DoPageChange(psel, csel) def AddPage(self, page, text, select=False, img_id=-1): """Add a page to the notebook @param page: wxWindow object @param text: Page text @keyword select: should the page be selected @keyword img_id: Image to use """ page.Hide() self._pages.append(dict(page=page, img=img_id)) segbar = self.GetControlBar(self._GetSegBarPos()) if self._use_pylist: bmp = self._imglist[img_id] else: bmp = self._imglist.GetBitmap(img_id) segbar.AddSegment(wx.ID_ANY, bmp, text) idx = len(self._pages) - 1 if select or idx == 0: segbar.SetSelection(idx) self._DoPageChange(segbar.GetSelection(), idx) def ChangePage(self, index): """Change the page to the given index""" cpage = self._pages[index]['page'] page = self.ChangeWindow(cpage) if page is not None: page.Hide() cpage.Show() self.Layout() def DeleteAllPages(self): """Remove all pages from the control""" for page in reversed(range(len(self._pages))): self.DeletePage() def DeletePage(self, index): """Delete the page at the given index @param index: int """ cpage = self._segbar.GetSelection() self._segbar.RemoveSegment(index) npage = self._segbar.GetSelection() self._DoPageChange(cpage, npage) self._pages[index]['page'].Destroy() del self._pages[index] def CurrentPage(self): """Get the currently selected page @return: wxWindow or None """ idx = self._segbar.GetSelection() if idx != -1: return self._pages[idx]['page'] else: return None def GetImageList(self): """Get the notebooks image list @return: wxImageList or None """ return self._imglist def GetPage(self, index): """Get the page at the given index @param index: int """ return self._pages[index]['page'] def GetPageCount(self): """Get the number of pages in the book @return: int """ return len(self._pages) def GetPageImage(self, index): """Get the image index of the current page @param index: page index @return: int """ return self._pages[index]['img'] def SetPageCloseButton(self, index): """Set the property of a page @param index: Segment index """ if wx.Platform != '__WXMAC__': self._segbar.SetSegmentOption(index, ctrlbox.SEGBTN_OPT_CLOSEBTNR) else: self._segbar.SetSegmentOption(index, ctrlbox.SEGBTN_OPT_CLOSEBTNL) def GetPageText(self, index): """Get the text of the current page @param index: page index @return: string """ return self._segbar.GetSegmentLabel(index) def SetSegmentCanClose(self, index, can_close=True): """Add a close button to the given segment @param index: segment index @keyword can_close: Enable/Disable """ if not can_close: opt = ctrlbox.SEGBTN_OPT_NONE elif wx.Platform == '__WXMAC__': opt = ctrlbox.SEGBTN_OPT_CLOSEBTNL else: opt = ctrlbox.SEGBTN_OPT_CLOSEBTNR self._segbar.SetSegmentOption(index, opt) def GetSelection(self): """Get the current selection @return: int """ return self._segbar.GetSelection() def GetSegmentBar(self): """Get the segment bar used by this control @return: SegmentBar """ return self._segbar def HasMultiplePages(self): """Does the book have multiple pages @return: bool """ return bool(self.GetPageCount()) def HitTest(self, pt): """Find if/where the given point is in the window @param pt: wxPoint @return: where, index """ where, index = (SEGBOOK_NO_WHERE, -1) index = self._segbar.GetIndexFromPosition(pt) if index != wx.NOT_FOUND: where = SEGBOOK_ON_SEGMENT # TOOD check for clicks elsewhere on bar return where, index def InsertPage(self, index, page, text, select=False, image_id=-1): """Insert a page a the given index @param index: index to insert page at @param page: page to add to book @param text: page text @keyword select: bool @keyword image_id: image list index """ raise NotImplementedError def Refresh(self): """Refresh the segmentbar @todo: temporary HACK till rework of SegmentBar class image handling """ segbar = self.GetSegmentBar() for page in range(self.GetPageCount()): idx = self.GetPageImage(page) bmp = self._imglist[idx] segbar.SetSegmentImage(page, bmp) segbar.Refresh() super(SegmentBook, self).Refresh() def SetImageList(self, imglist): """Set the notebooks image list @param imglist: wxImageList """ self._imglist = imglist def SetPageImage(self, index, img_id): """Set the image to use on the given page @param index: page index @param img_id: image list index """ page = self._pages[index] page['img'] = img_id self._segbar.SetSegmentImage(self._imglst.GetBitmap(img_id)) self.Layout() def SetPageText(self, index, text): """Set the text to use on the given page @param index: page index @param text: string """ self._segbar.SetSegmentLabel(index, text) def SetSelection(self, index): """Set the selected page @param index: index of page to select """ csel = self._segbar.GetSelection() if csel != index: self._segbar.SetSelection(index) self._DoPageChange(csel, index) def SetUsePyImageList(self, use_pylist): """Set whether the control us using a regular python list for storing images or a wxImageList. @param use_pylist: bool """ self._use_pylist = use_pylist ``` #### File: Editra/src/ed_mpane.py ```python __author__ = "<NAME> <<EMAIL>>" __svnid__ = "$Id: ed_mpane.py 72278 2012-08-02 14:24:23Z CJP $" __revision__ = "$Revision: 72278 $" #-----------------------------------------------------------------------------# # Imports import wx # Editra Libraries import ed_glob import ed_pages import ed_cmdbar import eclib #-----------------------------------------------------------------------------# class MainPanel(eclib.ControlBox): """Main panel view @todo: Add interface for registering additional commandbars. """ def __init__(self, parent): """Initialize the panel""" super(MainPanel, self).__init__(parent) # Attributes self.nb = ed_pages.EdPages(self) self._bars = dict() # Layout self.SetWindow(self.nb) self.Bind(wx.EVT_ERASE_BACKGROUND, self.OnEB) def OnEB(self, evt): """Empty method to fix notebook flashing issue on MSW""" pass Book = property(lambda self: self.nb) def GetNotebook(self): """Get the main notebook control @return: EdPages instance """ return self.nb def HideCommandBar(self): """Hide the command bar""" bar = self.GetControlBar(wx.BOTTOM) if bar: bar.Hide() self.Layout() def ShowCommandControl(self, ctrlid): """Change the mode of the commandbar @param ctrlid: CommandBar control id """ cur_bar = self.GetControlBar(wx.BOTTOM) if ctrlid in self._bars: nbar = self._bars[ctrlid] else: nbar = ed_cmdbar.CommandBarBase.FactoryCreate(ctrlid, self) if nbar: self._bars[ctrlid] = nbar if nbar and nbar is not cur_bar: if cur_bar is None: self.SetControlBar(nbar, wx.BOTTOM) else: cur_bar = self.ReplaceControlBar(nbar, wx.BOTTOM) if cur_bar: if cur_bar is not nbar: cur_bar.Hide() cbar = self.GetControlBar(wx.BOTTOM) if cbar is not None: cbar.Show() cbar.Layout() cbar.SetFocus() self.Layout() ``` #### File: src/syntax/_asm.py ```python __author__ = "<NAME> <<EMAIL>>" __svnid__ = "$Id: _asm.py 70228 2011-12-31 20:39:16Z CJP $" __revision__ = "$Revision: 70228 $" #-----------------------------------------------------------------------------# # Imports import wx.stc as stc # Local Imports import synglob import syndata #-----------------------------------------------------------------------------# # GNU Assembly CPU Instructions/Storage Types ASM_CPU_INST = (0, ".long .ascii .asciz .byte .double .float .hword .int .octa " ".quad .short .single .space .string .word") # GNU FPU Instructions ASM_MATH_INST = (1, "") # GNU Registers ASM_REGISTER = (2, "") # GNU Assembly Directives/Special statements/Macros ASM_DIRECTIVES = (3, ".include .macro .endm") #---- Language Styling Specs ----# SYNTAX_ITEMS = [ (stc.STC_ASM_DEFAULT, 'default_style'), (stc.STC_ASM_CHARACTER, 'char_style'), (stc.STC_ASM_COMMENT, 'comment_style'), (stc.STC_ASM_COMMENTBLOCK, 'comment_style'), (stc.STC_ASM_CPUINSTRUCTION, 'keyword_style'), (stc.STC_ASM_DIRECTIVE, 'keyword3_style'), (stc.STC_ASM_DIRECTIVEOPERAND, 'default_style'), (stc.STC_ASM_EXTINSTRUCTION, 'default_style'), (stc.STC_ASM_IDENTIFIER, 'default_style'), (stc.STC_ASM_MATHINSTRUCTION, 'keyword_style'), (stc.STC_ASM_NUMBER, 'number_style'), (stc.STC_ASM_OPERATOR, 'operator_style'), (stc.STC_ASM_REGISTER, 'keyword2_style'), (stc.STC_ASM_STRING, 'string_style'), (stc.STC_ASM_STRINGEOL, 'stringeol_style') ] #-----------------------------------------------------------------------------# class SyntaxData(syndata.SyntaxDataBase): """SyntaxData object for Assembly files""" def __init__(self, langid): super(SyntaxData, self).__init__(langid) # Setup # synglob.ID_LANG_ASM self.SetLexer(stc.STC_LEX_ASM) def GetKeywords(self): """Returns List of Keyword Specifications """ return [ASM_CPU_INST, ASM_DIRECTIVES] def GetSyntaxSpec(self): """Syntax Specifications """ return SYNTAX_ITEMS def GetCommentPattern(self): """Returns a list of characters used to comment a block of code """ return [u';'] ``` #### File: src/syntax/_cpp.py ```python __author__ = "<NAME> <<EMAIL>>" __svnid__ = "$Id: _cpp.py 68798 2011-08-20 17:17:05Z CJP $" __revision__ = "$Revision: 68798 $" #-----------------------------------------------------------------------------# # Imports import wx.stc as stc import re # Local imports import synglob import syndata #-----------------------------------------------------------------------------# #---- Keyword Specifications ----# # C Keywords C_KEYWORDS = ("asm break case const continue default do else for goto return " "if sizeof static switch typeof while") # C Types/Structures/Storage Classes C_TYPES = ("auto bool char clock_t complex div_t double enum extern float " "fpos_t inline int int_least8_t int_least16_t int_least32_t " "int_least64_t int8_t int16_t int32_t int64_t intmax_t intptr_t " "jmp_buf ldiv_t long mbstate_t ptrdiff_t register sig_atomic_t " "size_t ssize_t short signed struct typedef union time_t " "uint_fast8_t uint_fast16_t uint_fast32_t uint_fast64_t uint8_t " "uint16_t uint32_t uint64_t uintptr_t uintmax_t unsigned va_list " "void volatile wchar_t wctrans_t wctype_t wint_t FILE DIR __label__ " "__complex__ __volatile__ __attribute__") # C/CPP Documentation Keywords (includes Doxygen keywords) DOC_KEYWORDS = (2, "TODO FIXME XXX author brief bug callgraph category class " "code date def depreciated dir dot dotfile else elseif em " "endcode enddot endif endverbatim example exception file if " "ifnot image include link mainpage name namespace page par " "paragraph param pre post return retval section struct " "subpage subsection subsubsection test todo typedef union " "var verbatim version warning $ @ ~ < > # % HACK") # CPP Keyword Extensions CPP_KEYWORDS = ("and and_eq bitand bitor catch class compl const_cast delete " "dynamic_cast false friend new not not_eq operator or or_eq " "private protected public reinterpret_cast static_cast this " "throw try true typeid using xor xor_eq") # CPP Type/Structure/Storage Class Extensions CPP_TYPES = ("bool inline explicit export mutable namespace template typename " "virtual wchar_t") # C# Keywords CSHARP_KW = ("abstract as base break case catch checked class const continue " "default delegate do else event explicit extern false finally " "fixed for foreach goto if implicit in interface internal is lock " "new null operator out override params readonly ref return sealed " "sizeof stackalloc static switch this throw true try typeof " "unchecked unsafe using while") # C# Types CSHARP_TYPES = ("bool byte char decimal double enum float int long " "namespace object private protected public sbyte short string " "struct uint ulong ushort virtual void volatile") # Objective C OBJC_KEYWORDS = ("@catch @interface @implementation @end @finally @private " "@protected @protocol @public @throw @try self super false " "true") OBJC_TYPES = ("id") # Vala Keywords VALA_KEYWORDS = ("abstract as base break case catch checked construct continue " "default delegate do else event false finally for foreach get " "goto if implicit interface internal is lock new operator out " "override params readonly ref return sealed set sizeof " "stackalloc this throw true try typeof unchecked using while") VALA_TYPES = ("bool byte char class const decimal double enum explicit extern " "fixed float int long namespace private protected public sbyte " "short static string struct uint ulong unichar unsafe ushort var " "volatile void virtual") # Cilk Keywords CILK_KEYWORDS = ("abort private shared spawn sync SYNCHED") CILK_TYPES = ("cilk inlet") #---- Syntax Style Specs ----# SYNTAX_ITEMS = [ (stc.STC_C_DEFAULT, 'default_style'), (stc.STC_C_COMMENT, 'comment_style'), (stc.STC_C_COMMENTLINE, 'comment_style'), (stc.STC_C_COMMENTDOC, 'comment_style'), (stc.STC_C_COMMENTDOCKEYWORD, 'dockey_style'), (stc.STC_C_COMMENTDOCKEYWORDERROR, 'error_style'), (stc.STC_C_COMMENTLINE, 'comment_style'), (stc.STC_C_COMMENTLINEDOC, 'comment_style'), (stc.STC_C_CHARACTER, 'char_style'), (stc.STC_C_GLOBALCLASS, 'global_style'), (stc.STC_C_IDENTIFIER, 'default_style'), (stc.STC_C_NUMBER, 'number_style'), (stc.STC_C_OPERATOR, 'operator_style'), (stc.STC_C_PREPROCESSOR, 'pre_style'), (stc.STC_C_REGEX, 'pre_style'), (stc.STC_C_STRING, 'string_style'), (stc.STC_C_STRINGEOL, 'stringeol_style'), (stc.STC_C_UUID, 'pre_style'), (stc.STC_C_VERBATIM, 'number2_style'), (stc.STC_C_WORD, 'keyword_style'), (stc.STC_C_WORD2, 'keyword2_style') ] #---- Extra Properties ----# FOLD = ("fold", "1") FOLD_PRE = ("styling.within.preprocessor", "0") FOLD_COM = ("fold.comment", "1") FOLD_COMP = ("fold.compact", "1") FOLD_ELSE = ("fold.at.else", "0") ALLOW_DOLLARS = ("lexer.cpp.allow.dollars", "1") #------------------------------------------------------------------------------# class SyntaxData(syndata.SyntaxDataBase): """SyntaxData object for many C like languages""" def __init__(self, langid): super(SyntaxData, self).__init__(langid) # Setup self.SetLexer(stc.STC_LEX_CPP) self.RegisterFeature(synglob.FEATURE_AUTOINDENT, AutoIndenter) def GetKeywords(self): """Returns Specified Keywords List""" keywords = list() kw1_str = [C_KEYWORDS] kw2_str = [C_TYPES] if self.LangId == synglob.ID_LANG_CPP: kw1_str.append(CPP_KEYWORDS) kw2_str.append(CPP_TYPES) elif self.LangId == synglob.ID_LANG_CSHARP: kw1_str = [CSHARP_KW] kw2_str = [CSHARP_TYPES] elif self.LangId == synglob.ID_LANG_OBJC: kw1_str.append(OBJC_KEYWORDS) kw2_str.append(OBJC_TYPES) elif self.LangId == synglob.ID_LANG_VALA: kw1_str = [VALA_KEYWORDS] kw2_str = [VALA_TYPES] elif self.LangId == synglob.ID_LANG_CILK: kw1_str.append(CILK_KEYWORDS) kw2_str.append(CILK_TYPES) else: pass keywords.append((0, " ".join(kw1_str))) keywords.append((1, " ".join(kw2_str))) keywords.append(DOC_KEYWORDS) return keywords def GetSyntaxSpec(self): """Syntax Specifications """ return SYNTAX_ITEMS def GetProperties(self): """Returns a list of Extra Properties to set""" return [FOLD, FOLD_PRE, FOLD_COM] def GetCommentPattern(self): """Returns a list of characters used to comment a block of code """ if self.LangId in [ synglob.ID_LANG_CPP, synglob.ID_LANG_CSHARP, synglob.ID_LANG_OBJC, synglob.ID_LANG_VALA ]: return [u'//'] else: return [u'/*', u'*/'] #-----------------------------------------------------------------------------# def AutoIndenter(estc, pos, ichar): """Auto indent cpp code. @param estc: EditraStyledTextCtrl @param pos: current carat position @param ichar: Indentation character @return: string """ rtxt = u'' line = estc.GetCurrentLine() text = estc.GetTextRange(estc.PositionFromLine(line), pos) eolch = estc.GetEOLChar() indent = estc.GetLineIndentation(line) if ichar == u"\t": tabw = estc.GetTabWidth() else: tabw = estc.GetIndent() i_space = indent / tabw ndent = eolch + ichar * i_space rtxt = ndent + ((indent - (tabw * i_space)) * u' ') cdef_pat = re.compile('(public|private|protected)\s*\:') case_pat = re.compile('(case\s+.+|default)\:') text = text.strip() if text.endswith('{') or cdef_pat.match(text) or case_pat.match(text): rtxt += ichar # Put text in the buffer estc.AddText(rtxt) ``` #### File: src/syntax/_css.py ```python __author__ = "<NAME> <<EMAIL>>" __svnid__ = "$Id: _css.py 72399 2012-08-29 19:56:26Z CJP $" __revision__ = "$Revision: 72399 $" #-----------------------------------------------------------------------------# # Imports import wx import wx.stc as stc # Local Imports import synglob import syndata #-----------------------------------------------------------------------------# #---- Keyword Specifications ----# # CSS1 Keywords (Identifiers) CSS1_KEYWORDS = (0, "font-family font-style font-variant font-weight font-size " "font color background-color background-image " "background-repeat background-position background " "word-spacing letter-spacing text-decoration " "vertical-align text-transform text-align text-indent " "line-height margin-top margin-right margin-left margin " "padding-top padding-right padding-bottom padding-left " "padding border-top-width border-right-width " "border-bottom-width border-left-width border-width " "border-color border-style border-top border-right " "border-bottom border-left border width height float clear " "display white-space list-style-type list-style-image " "list-style-position list-style margin-bottom " "text-decoration min-width min-height " "background-attachment") # CSS Psuedo Classes CSS_PSUEDO_CLASS = (1, "link active visited indeterminate default " # CSS 2 "first-child focus hover lang left right first " # CSS 3 "empty enabled disabled checked not root target " "only-child last-child nth-child nth-last-child " "first-of-type last-of-type nth-of-type " "nth-last-of-type only-of-type valid invalid required " "optional") # CSS2 Keywords (Identifiers) # This is meant for css2 specific keywords, but in order to get a better # coloring effect this will contain special css properties as well. CSS2_KEYWORDS = (2, "ActiveBorder ActiveCaption AppWorkspace Background " "ButtonFace ButtonHighlight ButtonShadow ButtonText " "CaptionText GrayText Highlight HighlightText " "InactiveBorder InactiveCaption InactiveCaptionText " "InfoBackground InfoText Menu MenuText Scrollbar " "ThreeDDarkShadow ThreeDFace ThreeDHighlight " "ThreeDLightShadow ThreeDShadow Window WindowFrame " "WindowText above absolute all always aqua armenian ascent " "auto avoid azimuth baseline baseline bbox behind below " "bidi-override black blink block blue bold bolder both " "bottom capitalize center center centerline child circle " "clear clip code collapse color compact content continuous " "crop cross crosshair cursive cursor dashed default " "descent digits disc dotted double during elevation embed " "fantasy faster female fixed fixed float fuchsia georgian " "gray green groove hebrew height help hidden hide higher " "icon inherit inline inset inside inside invert italic " "justify landscape larger leftwards level lighter lime " "lowercase ltr male marks maroon mathline medium menu " "middle mix monospace move narrower navy non none normal " "nowrap oblique olive once orphans outset outside overflow " "overline pointer portrait position pre purple quotes red " "relative richness ridge rightwards rtl scroll scroll " "separate show silent silver size slope slower smaller " "solid square src static stemh stemv stress sub super teal " "thick thin top topline underline uppercase visibility " "visible volume wait wider widows width widths yellow " "z-index outline left") # CSS3 Keywords CSS3_KEYWORDS = (3, "border-radius border-top-left-radius " "border-top-right-radius border-bottom-left-radius " "border-bottom-right-radius border-image " "border-image-outset border-image-repeat " "border-image-source border-image-slice border-image-width " "break-after break-before break-inside columns " "column-count column-fill column-gap column-rule " "column-rule-color column-rule-style column-rule-width " "column-span column-width @keframes animation " "animation-delay animation-direction animation-duration " "animation-fill-mode animation-iteration-count " "animation-name animation-play-state " "animation-timing-function transition transition-delay " "transition-duration transition-timing-function " "transition-property backface-visibility perspective " "perspective-origin transform transform-origin " "transform-style background-clip background-origin " "background-size overflow-x overflow-y overflow-style " "marquee-direction marquee-play-count marquee-speed " "marquee-style box-shadow box-decoration-break opacity") PSEUDO_ELEMENTS = (4, "first-letter first-line before after selection") #---- Syntax Style Specs ----# SYNTAX_ITEMS = [ (stc.STC_CSS_DEFAULT, 'default_style'), (stc.STC_CSS_ATTRIBUTE, 'funct_style'), (stc.STC_CSS_CLASS, 'global_style'), (stc.STC_CSS_COMMENT, 'comment_style'), (stc.STC_CSS_DIRECTIVE, 'directive_style'), (stc.STC_CSS_DOUBLESTRING, 'string_style'), (stc.STC_CSS_ID, 'scalar_style'), (stc.STC_CSS_IDENTIFIER, 'keyword_style'), (stc.STC_CSS_IDENTIFIER2, 'keyword3_style'), (stc.STC_CSS_IMPORTANT, 'error_style'), (stc.STC_CSS_OPERATOR, 'operator_style'), (stc.STC_CSS_PSEUDOCLASS, 'scalar_style'), (stc.STC_CSS_SINGLESTRING, 'string_style'), (stc.STC_CSS_TAG, 'keyword_style'), (stc.STC_CSS_UNKNOWN_IDENTIFIER, 'unknown_style'), (stc.STC_CSS_UNKNOWN_PSEUDOCLASS, 'unknown_style'), (stc.STC_CSS_VALUE, 'char_style') ] # TODO: add styling and keywords for new style regions in 2.9 if wx.VERSION >= (2, 9, 0, 0, ''): # Browser specific identifiers SYNTAX_ITEMS.append((stc.STC_CSS_EXTENDED_IDENTIFIER, 'default_style')) SYNTAX_ITEMS.append((stc.STC_CSS_EXTENDED_PSEUDOCLASS, 'default_style')) SYNTAX_ITEMS.append((stc.STC_CSS_EXTENDED_PSEUDOELEMENT, 'default_style')) # CSS3 Properties SYNTAX_ITEMS.append((stc.STC_CSS_IDENTIFIER3, 'keyword2_style')) # Pseudo elements SYNTAX_ITEMS.append((stc.STC_CSS_PSEUDOELEMENT, 'default_style')) #---- Extra Properties ----# FOLD = ("fold", "1") #------------------------------------------------------------------------------# class SyntaxData(syndata.SyntaxDataBase): """SyntaxData object for CSS""" def __init__(self, langid): super(SyntaxData, self).__init__(langid) # Setup self.SetLexer(stc.STC_LEX_CSS) self.RegisterFeature(synglob.FEATURE_AUTOINDENT, AutoIndenter) def GetKeywords(self): """Returns Specified Keywords List """ kwlist = [CSS1_KEYWORDS , CSS_PSUEDO_CLASS] # 2.9 supports CSS3 so for 2.8 just add CSS3 keywords to the css2 list if wx.VERSION < (2, 9, 0, 0, ''): css2_kw = (CSS2_KEYWORDS[0], " ".join((CSS2_KEYWORDS[1], CSS3_KEYWORDS[1]))) kwlist.append(css2_kw) else: kwlist.append(CSS2_KEYWORDS) kwlist.append(CSS3_KEYWORDS) kwlist.append(PSEUDO_ELEMENTS) return kwlist def GetSyntaxSpec(self): """Syntax Specifications """ return SYNTAX_ITEMS def GetProperties(self): """Returns a list of Extra Properties to set """ return [FOLD] def GetCommentPattern(self): """Returns a list of characters used to comment a block of code """ return [u'/*', u'*/'] #-----------------------------------------------------------------------------# def AutoIndenter(estc, pos, ichar): """Auto indent cpp code. @param estc: EditraStyledTextCtrl @param pos: current carat position @param ichar: Indentation character """ rtxt = u'' line = estc.GetCurrentLine() text = estc.GetTextRange(estc.PositionFromLine(line), pos) eolch = estc.GetEOLChar() indent = estc.GetLineIndentation(line) if ichar == u"\t": tabw = estc.GetTabWidth() else: tabw = estc.GetIndent() i_space = indent / tabw ndent = eolch + ichar * i_space rtxt = ndent + ((indent - (tabw * i_space)) * u' ') if text.endswith('{'): rtxt += ichar # Put text in the buffer estc.AddText(rtxt) ``` #### File: src/syntax/_forth.py ```python __author__ = "<NAME> <<EMAIL>>" __svnid__ = "$Id: _forth.py 68798 2011-08-20 17:17:05Z CJP $" __revision__ = "$Revision: 68798 $" #-----------------------------------------------------------------------------# # Imports import wx.stc as stc # Local Imports import synglob import syndata #-----------------------------------------------------------------------------# #---- Keyword Definitions ----# # Control Keywords CONTROL_KW = (0, "again begin case do else endcase endof if loop of " "repeat then until while [if] [else] [then] ?do") # Keywords KEYWORDS = (1, "dup drop rot swap over @ ! 2@ 2! 2dup 2drop 2swap 2over nip " "r@ >r r&gt; 2r@ 2>r 2r>; 0= 0<; sp@ sp! w@ w! c@ c! < > = " "<> 0<> space spaces key? key throw catch abort */ 2* /mod " "cell+ cells char+ chars move erase dabs title hex decimal " "hold <# # #s #> sign d. . u. dump (.\") >number ' immediate " "exit recurse unloop leave here allot , c, w, compile, branch, " "ret, lit, dlit, ?branch, \", >mark >resolve1 <mark >resolve " "align aligned user-allot user-here header does> smudge hide " ":noname last-word ?error error2 find1 sfind set-current " "get-current definitions get-order forth only set-order also " "previous voc-name. order latest literal 2literal sliteral " "cliteral ?literal1 ?sliteral1 hex-literal hex-sliteral " "?literal2 ?sliteral2 source EndOfChunk CharAddr PeekChar " "IsDelimiter GetChar OnDelimiter SkipDelimiters OnNotDelimiter " "SkipWord SkipUpTo ParseWord NextWord parse skip " "console-handles refill depth ?stack ?comp word interpret bye " "quit main1 evaluate include-file included >body +word " "wordlist class! class@ par! par@ id. ?immediate ?voc " "immediate VOC WordByAddrWl WordByAddr nlist words save options " "/notransl ansi>oem accept emit cr type ekey? ekey ekey>char " "externtask erase-imports ModuleName ModuleDirName environment? " "drop-exc-handler set-exc-handler halt err close-file " "create-file create-file-shared open-file-shared delete-file " "file-position file-size open-file read-file reposition-file " "dos-lines unix-lines read-line write-file resize-file " "write-line allocate free resize start suspend resume stop " "pause min max true false asciiz> r/o w/o ;class endwith or and " "/string search compare export ;module space") # Definition Keywords DEFINITION_KW = (2, "variable create : value constant vm: m: var dvar chars " "obj constr: destr: class: object: pointer user " "user-create user-value vect wndproc: vocabulary -- task: " "cez: module:") # Prewords with one argument PREWORDS1 = (3, "CHAR [CHAR] POSTPONE WITH ['] TO [COMPILE] CHAR ASCII \\'") # Prewords with two arguments PREWORDS2 = (4, "REQUIRE WINAPI:") # String definition Keywords STRING_DEF_KW = (5, "S\" ABORT\" Z\" \" .\" C\"") #---- End Keyword Definitions ----# #---- Syntax Style Specs ----# SYNTAX_ITEMS = [(stc.STC_FORTH_DEFAULT, "default_style"), (stc.STC_FORTH_COMMENT, "comment_style"), (stc.STC_FORTH_COMMENT_ML, "comment_style"), # ("STC_FORTH_CONTROL", ""), # ("STC_FORTH_DEFWORD", ""), # ("STC_FORTH_IDENTIFIER", ""), (stc.STC_FORTH_KEYWORD, "keyword_style"), # ("STC_FORTH_LOCALE", ""), (stc.STC_FORTH_NUMBER, "number_style"), (stc.STC_FORTH_PREWORD1, "keyword2_style"), (stc.STC_FORTH_PREWORD2, "keyword3_style"), (stc.STC_FORTH_STRING, "string_style")] #---- Extra Properties ----# #-----------------------------------------------------------------------------# class SyntaxData(syndata.SyntaxDataBase): """SyntaxData object for Forth""" def __init__(self, langid): super(SyntaxData, self).__init__(langid) # Setup self.SetLexer(stc.STC_LEX_FORTH) def GetKeywords(self): """Returns Specified Keywords List """ return [CONTROL_KW, KEYWORDS, DEFINITION_KW, PREWORDS1, PREWORDS2, STRING_DEF_KW] def GetSyntaxSpec(self): """Syntax Specifications """ return SYNTAX_ITEMS def GetCommentPattern(self): """Returns a list of characters used to comment a block of code """ return [u'\\ '] ``` #### File: src/syntax/_groovy.py ```python __author__ = "<NAME> <<EMAIL>>" __svnid__ = "$Id: _groovy.py 68798 2011-08-20 17:17:05Z CJP $" __revision__ = "$Revision: 68798 $" #-----------------------------------------------------------------------------# # Imports import wx.stc as stc # Local Imports import synglob import syndata from _cpp import AutoIndenter #-----------------------------------------------------------------------------# #---- Keyword Specifications ----# MAIN_KEYWORDS = (0, """ as assert Boolean Byte Character Class Double Float Integer Long Number Object Short String property void abstract assert boolean break byte case catch char class const continue default do double else extends false final finally float for goto if implements import instanceof in int interface long native new null package private protected public return short static strictfp super switch synchronized this throw throws transient true try void volatile while def """ ) SECONDARY_KEYWORDS= (1, """ abs accept allProperties and any append asImmutable asSynchronized asWritable center collect compareTo contains count decodeBase64 div dump each eachByte eachFile eachFileRecurse eachLine eachMatch eachProperty eachPropertyName eachWithIndex encodeBase64 every execute filterLine find findAll findIndexOf flatten getErr getIn getOut getText inject inspect intersect intdiv invokeMethod isCase join leftShift max min minus mod multiply negate newInputStream newOutputStream newPrintWriter newReader newWriter next or padLeft padRight plus pop previous print println readBytes readLine readLines reverse reverseEach rightShift rightShiftUnsigned round size sort splitEachLine step subMap times toDouble toFloat toInteger tokenize toList toLong toURL transformChar transformLine upto use waitForOrKill withInputStream withOutputStream withPrintWriter withReader withStream withStreams withWriter withWriterAppend write writeLine """ ) #---- Syntax Style Specs ----# SYNTAX_ITEMS = [ (stc.STC_C_DEFAULT, 'default_style'), (stc.STC_C_COMMENT, 'comment_style'), (stc.STC_C_COMMENTDOC, 'comment_style'), (stc.STC_C_COMMENTDOCKEYWORD, 'dockey_style'), (stc.STC_C_COMMENTDOCKEYWORDERROR, 'error_style'), (stc.STC_C_COMMENTLINE, 'comment_style'), (stc.STC_C_COMMENTLINEDOC, 'comment_style'), (stc.STC_C_CHARACTER, 'char_style'), (stc.STC_C_GLOBALCLASS, 'global_style'), (stc.STC_C_IDENTIFIER, 'default_style'), (stc.STC_C_NUMBER, 'number_style'), (stc.STC_C_OPERATOR, 'operator_style'), (stc.STC_C_PREPROCESSOR, 'pre_style'), (stc.STC_C_REGEX, 'pre_style'), (stc.STC_C_STRING, 'string_style'), (stc.STC_C_STRINGEOL, 'stringeol_style'), (stc.STC_C_UUID, 'pre_style'), (stc.STC_C_VERBATIM, 'number2_style'), (stc.STC_C_WORD, 'keyword_style'), (stc.STC_C_WORD2, 'keyword2_style') ] #---- Extra Properties ----# FOLD = ("fold", "1") FOLD_PRE = ("styling.within.preprocessor", "0") FOLD_COM = ("fold.comment", "1") FOLD_COMP = ("fold.compact", "1") FOLD_ELSE = ("fold.at.else", "0") #------------------------------------------------------------------------------# class SyntaxData(syndata.SyntaxDataBase): """SyntaxData object for Groovy""" def __init__(self, langid): super(SyntaxData, self).__init__(langid) # Setup self.SetLexer(stc.STC_LEX_CPP) self.RegisterFeature(synglob.FEATURE_AUTOINDENT, AutoIndenter) def GetKeywords(self): """Returns Specified Keywords List """ return [MAIN_KEYWORDS, SECONDARY_KEYWORDS] def GetSyntaxSpec(self): """Syntax Specifications """ return SYNTAX_ITEMS def GetProperties(self): """Returns a list of Extra Properties to set """ return [FOLD, FOLD_PRE] def GetCommentPattern(self): """Returns a list of characters used to comment a block of code """ return [ u'//' ] ``` #### File: src/syntax/_haskell.py ```python __author__ = "<NAME> <<EMAIL>>" __svnid__ = "$Id: _haskell.py 68798 2011-08-20 17:17:05Z CJP $" __revision__ = "$Revision: 68798 $" #-----------------------------------------------------------------------------# # Imports import wx.stc as stc # Local Imports import synglob import syndata #-----------------------------------------------------------------------------# #---- Keyword Definitions ----# HA_KEYWORDS = (0, "as case class data default deriving do forall foreign " "hiding if import in infix infixl infixr instance else let " "mdo module newtype of qualified then type where") #---- End Keyword Definitions ----# #---- Syntax Style Specs ----# SYNTAX_ITEMS = [(stc.STC_HA_CAPITAL, 'default_style'), (stc.STC_HA_CHARACTER, 'char_style'), (stc.STC_HA_CLASS, 'class_style'), (stc.STC_HA_COMMENTBLOCK, 'comment_style'), (stc.STC_HA_COMMENTBLOCK2, 'comment_style'), (stc.STC_HA_COMMENTBLOCK3, 'comment_style'), (stc.STC_HA_COMMENTLINE, 'comment_style'), (stc.STC_HA_DATA, 'default_style'), (stc.STC_HA_DEFAULT, 'default_style'), (stc.STC_HA_IDENTIFIER, 'default_style'), (stc.STC_HA_IMPORT, 'default_style'), # possibly use custom style (stc.STC_HA_INSTANCE, 'default_style'), (stc.STC_HA_KEYWORD, 'keyword_style'), (stc.STC_HA_MODULE, 'default_style'), (stc.STC_HA_NUMBER, 'number_style'), (stc.STC_HA_OPERATOR, 'operator_style'), (stc.STC_HA_STRING, 'string_style')] #---- Extra Properties ----# FOLD = ('fold', '1') #-----------------------------------------------------------------------------# class SyntaxData(syndata.SyntaxDataBase): """SyntaxData object for Haskell""" def __init__(self, langid): super(SyntaxData, self).__init__(langid) # Setup self.SetLexer(stc.STC_LEX_HASKELL) def GetKeywords(self): """Returns Specified Keywords List """ return [HA_KEYWORDS] def GetSyntaxSpec(self): """Syntax Specifications """ return SYNTAX_ITEMS def GetProperties(self): """Returns a list of Extra Properties to set """ return [FOLD] def GetCommentPattern(self): """Returns a list of characters used to comment a block of code """ return [u'--'] ``` #### File: src/syntax/_lout.py ```python __author__ = "<NAME> <<EMAIL>>" __svnid__ = "$Id: _lout.py 68798 2011-08-20 17:17:05Z CJP $" __revision__ = "$Revision: 68798 $" #-----------------------------------------------------------------------------# # Imports import wx.stc as stc # Local Imports import synglob import syndata #-----------------------------------------------------------------------------# #---- Keyword Definitions ----# # @ prefixed keywords LOUT_KW1 = (0, "@OptGall @FontDef @Family @Face @Name @Metrics @ExtraMetrics " "@Mapping @Recode @Filter @FilterIn @FilterOut @FilterErr @AL " "@Common @Rump @Meld @Insert @OneOf @Next @Plus @Minus @Wide " "@High @HShift @VShift @BeginHeaderComponent @Document @TItle " "@SetHeaderComponent @ClearHeaderComponent @OneCol @OneRow @Doc " "@HMirror @VMirror @HScale @VScale @HCover @VCover @Scale @Text " "@KernShrink @HContract @VContract @HLimited @VLimited @HExpand " "@VExpand @StartHVSpan @StartHSpan @StartVSpan @HSpan @VSpan " "@PAdjust @HAdjust @VAdjust @Rotate @Background @IncludeGraphic " "@SysIncludeGraphic @Graphic @LinkSource @LinkDest @URLLink @BI " "@PlainGraphic @Verbatim @RawVerbatim @Case @Yield @BackEnd @BL " "@Char @Font @Space @YUnit @ZUnit @Break @Underline @SetColour " "@SetColor @SetUnderlineColour @SetUnderlineColor @SetTexture " "@Outline @Language @CurrLang @CurrFamily @CurrFace @CurrYUnit " "@CurrZUnit @LEnv @@A @@B @@C @@D @@E @LClos @@V @LUse @LEO @PP " "@Open @Use @NotRevealed @Tagged @Database @SysDatabase @I @B" "@Include @SysInclude @IncludeGraphicRepeated @InitialFont " "@SysIncludeGraphicRepeated @PrependGraphic @SysPrependGraphic " "@Target @Null @PageLabel @Galley @ForceGalley @LInput @Split " "@Tag @Key @Optimize @Merge @Enclose @Begin @End @Moment @Tab " "@Second @Minute @Hour @Day @Month @Year @Century @WeekDay " "@YearDay @DaylightSaving @SetContext @GetContext @Time @List " "@EndHeaderComponent @Section @BeginSections @EndNote @Abstract " "@AlphaList @Appendix @Author @Figure @Report @OuterNote " "@IndentedList @InitialBreak @InitialLanguage InnerNote " "@Heading @FootNote @Date @LeftList @LeftNote @ListItem " "@RightDisplay @RightNote @EndSections") # Symbols LOUT_KW2 = (1, "&&& && & ^// ^/ ^|| ^| ^& // / || |") # Non @ keywords LOUT_KW3 = (2, "def langdef force horizontally into extend import export " "precedence associativity left right body macro named " "compulsory following preceding foll_or_prec now " "Base Slope Bold BoldSlope Times Helvetica Courier Palatino " "adjust breakstyle clines lines linesep hyphen nonhyphen ragged " ) # Document Classes LOUT_KW4 = (3, "fx vx aformat bformat doc eq graph slides tab text tbl") #---- End Keyword Definitions ----# #---- Syntax Style Specs ----# SYNTAX_ITEMS = [(stc.STC_LOUT_COMMENT, 'comment_style'), (stc.STC_LOUT_DEFAULT,'default_style'), (stc.STC_LOUT_IDENTIFIER, 'default_style'), (stc.STC_LOUT_NUMBER, 'number_style'), (stc.STC_LOUT_OPERATOR, 'operator_style'), (stc.STC_LOUT_STRING, 'string_style'), (stc.STC_LOUT_STRINGEOL, 'stringeol_style'), (stc.STC_LOUT_WORD, 'scalar_style'), (stc.STC_LOUT_WORD2, 'keyword2_style'), (stc.STC_LOUT_WORD3, 'keyword_style'), (stc.STC_LOUT_WORD4, 'class_style')] #---- Extra Properties ----# FOLD_COMPACT = ("fold.compact", '1') #-----------------------------------------------------------------------------# class SyntaxData(syndata.SyntaxDataBase): """SyntaxData object for LOUT""" def __init__(self, langid): super(SyntaxData, self).__init__(langid) # Setup self.SetLexer(stc.STC_LEX_LOUT) def GetKeywords(self): """Returns Specified Keywords List """ return [LOUT_KW1, LOUT_KW2, LOUT_KW3] def GetSyntaxSpec(self): """Syntax Specifications """ return SYNTAX_ITEMS def GetProperties(self): """Returns a list of Extra Properties to set """ return [FOLD_COMPACT] def GetCommentPattern(self): """Returns a list of characters used to comment a block of code """ return [u'#'] ``` #### File: src/syntax/_nasm.py ```python __author__ = "<NAME> <<EMAIL>>" __svnid__ = "$Id: _nasm.py 68798 2011-08-20 17:17:05Z CJP $" __revision__ = "$Revision: 68798 $" #-----------------------------------------------------------------------------# # Imports import wx.stc as stc # Local Imports import syndata #-----------------------------------------------------------------------------# #---- Keyword Definitions ----# # NASM CPU Instructions NASM_CPU_INST = (0, "cmps movs lcs lods stos xlat aaa aad aam adc and bound " "bsf bsr bswap bt btc btr bts call cbw cdq clc cld cmc cmp " "cmpsb cmpsd cmpsw cmpxchg cmpxchg8b cpuid cwd cwde daa " "das enter int iret iretw jcxz jecxz jmp lahf lds lea " "leave les lfs lgs lodsb lodsd lodsw loop loope loopne " "loopnz loopz lss mov movsb movsd movsw movsx movzx neg " "nop not or popa popad popaw popf popfd popfw push pusha " "pushd pushaw pushf pushfd pushfw rcl rcr retf ret retn " "rol ror sahf sal sar sbb scasb scasd scasw shl shld shrd " "stc std stosb stosd stosw test xchg xlatb xor arpl lar " "lsl verr verw lldt sldt lgdt sgdt ltr str clts lock wait " "ins outs in insb insw insd out outsb outsw outsd cli sti " "lidt sidt hlt invd lmsw prefetcht0 prefetcht1 prefetcht2 " "prefetchnta rsm sfence smsw sysenter sysexit ud2 wbinvd " "invlpg int1 int3 rdmsr rdtsc rdpmc wrmsr add dec div idiv " "imul inc mul sub xaddf2xm1 " ) # NASM FPU Instructions NASM_FPU_INST = (1, "fchs fclex fcom fcomp fdecstp fdisi feni ffree ficom fild " "finit fist fld fldcw fldenv fldl2e fldl2e fldl2t fldlg2 " "fldln2 fldpi fldz fsave fscale fsetpm frndint frstor " "fscale fsetpm fstcw fstenv fsts fstsw ftst fucom fucomp " "fxam fxch fxtract fyl2x fyl2xp1"" fabs fadd faddp fbld " "fcos fdiv fdivr fiadd fidiv fimul fisub fmul fpatan fptan " "fsin fsincos fsqrt fsub fsubr fsave fbstp") # NASM Registers NASM_REGISTERS = (2, "ah al ax bh bl bp bx ch cl cr0 cr2 cr3 cr4 cs cx dh di " "dl dr0 dr1 dr2 dr3 dr6 dr7 ds dx eax ebp ebx ecx edi edx " "es esi esp fs gs si sp ss st tr3 tr4 tr5 tr6 tr7 st0 st1 " "st2 st3 st4 st5 st6 st7 mm0 mm1 mm2 mm3 mm4 mm5 mm6 mm7 " "xmm0 xmm1 xmm2 xmm3 xmm4 xmm5 xmm6 xmm7") # NASM Directives NASM_DIRECTIVES = (3, "DF EXTRN FWORD RESF TBYTE FAR NEAR SHORT BYTE WORD " "QWORD DQWORD HWORD DHWORD TWORD CDECL FASTCALL NONE " "PASCAL STDCALL DB DW DD DQ DDQ DT RESB RESW RESD RESQ " "REST EXTERN GLOBAL COMMON __BITS__ __DATE__ __FILE__ " "__FORMAT__ __LINE__ __NASM_MAJOR__ __NASM_MINOR__ " "__NASM_VERSION__ __TIME__ TIMES ALIGN ALIGNB INCBIN " "EQU NOSPLIT SPLIT ABSOLUTE BITS SECTION SEGMENT DWORD " "ENDSECTION ENDSEGMENT __SECT__ ENDPROC EPILOGUE LOCALS " "PROC PROLOGUE USES ENDIF ELSE ELIF ELSIF IF DO ENDFOR " "ENDWHILE FOR REPEAT UNTIL WHILE EXIT ORG EXPORT GROUP " "UPPERCASE SEG WRT LIBRARY _GLOBAL_OFFSET_TABLE_ " "__GLOBAL_OFFSET_TABLE_ ..start ..got ..gotoff ..gotpc " "..pit ..sym %define %idefine %xdefine %xidefine %undef " "%assign %iassign %strlen %substr %macro %imacro " "%endmacro %rotate .nolist %if %elif %else %endif %ifdef " "%ifndef %elifdef %elifndef %ifmacro %ifnmacro " "%elifnmacro %ifctk %ifnctk %elifctk %elifnctk %ifidn " "%ifnidn %elifidn %elifnidn %ifidni %ifnidni %elifidni " "%elifnidni %ifid %ifnid %elifid %elifnid %ifstr %ifnstr " "%elifstr %elifnstr %ifnum %ifnnum %elifnum %elifnnum " "%error %rep %endrep %exitrep %include %push %pop %repl " "struct endstruc istruc at iend align alignb %arg " "%stacksize %local %line bits use16 use32 section " "absolute extern global common cpu org section group " "import export %elifmacro ") NASM_DIREC_OP = (4, "a16 a32 o16 o32 byte word dword nosplit $ $$ seq wrt flat " "large small .text .data .bss near far %0 %1 %2 %3 %4 %5 " "%6 %7 %8 %9") NASM_EXT_INST = (5, "") #---- Language Styling Specs ----# SYNTAX_ITEMS = [ (stc.STC_ASM_DEFAULT, 'default_style'), (stc.STC_ASM_CHARACTER, 'char_style'), (stc.STC_ASM_COMMENT, 'comment_style'), (stc.STC_ASM_COMMENTBLOCK, 'comment_style'), (stc.STC_ASM_CPUINSTRUCTION, 'keyword_style'), (stc.STC_ASM_DIRECTIVE, 'keyword3_style'), (stc.STC_ASM_DIRECTIVEOPERAND, 'keyword4_style'), (stc.STC_ASM_EXTINSTRUCTION, 'funct_style'), (stc.STC_ASM_IDENTIFIER, 'default_style'), (stc.STC_ASM_MATHINSTRUCTION, 'keyword_style'), (stc.STC_ASM_NUMBER, 'number_style'), (stc.STC_ASM_OPERATOR, 'operator_style'), (stc.STC_ASM_REGISTER, 'keyword2_style'), (stc.STC_ASM_STRING, 'string_style'), (stc.STC_ASM_STRINGEOL, 'stringeol_style') ] #-----------------------------------------------------------------------------# class SyntaxData(syndata.SyntaxDataBase): """SyntaxData object for NASM""" def __init__(self, langid): super(SyntaxData, self).__init__(langid) # Setup self.SetLexer(stc.STC_LEX_ASM) def GetKeywords(self): """Returns Specified Keywords List """ return [NASM_CPU_INST, NASM_FPU_INST, NASM_REGISTERS, NASM_DIRECTIVES, NASM_DIREC_OP] def GetSyntaxSpec(self): """Syntax Specifications """ return SYNTAX_ITEMS def GetCommentPattern(self): """Returns a list of characters used to comment a block of code """ return [u';'] ``` #### File: src/syntax/_stata.py ```python __author__ = "<NAME> <<EMAIL>>" __svnid__ = "$Id: _stata.py 68798 2011-08-20 17:17:05Z CJP $" __revision__ = "$Revision: 0$" #-----------------------------------------------------------------------------# # Imports import wx.stc as stc # Local imports import synglob import syndata from _cpp import AutoIndenter #-----------------------------------------------------------------------------# #---- Keyword Specifications ----# # Documentation Keywords (includes Doxygen keywords) SECONDARY_KEYWORDS =(1, """ __GEEBT __GEERC __GEEUC _3dax0 _3daxmin _3daxout if _3daxtbl _3ddflts _3dmkdta _3dmnmx _3dproj _3drproj _3drshow _3dshad _3dsvusr _ac _addl _addop _adjksm _all _bsqreg _byobs _callerr _cpmatnm _cr1form _cr1invt _cr1se _cr1t _crc2use _crc4fld _crcacnt _crcar1 _crcause _crcbcrt _crcbin _crcbygr _crcchi2 _crcchkt _crcchkw _crcci _crccip _crceprs _crcexn1 _crcexn2 _crcexn4 _crcexn5 _crcexn6 _crcexn7 _crcexn8 _crcexn9 _crcexna _crcexnb _crcexnc _crcexnd _crcexne _crcexnf _crcexnt _crcgldv _crcglil _crcichi _crcird _crcirr _crcksm _crclf _crcmeq _crcmiss _crcnlou _crcnms2 _crcnuse _crcor _crcphdr _crcplst _crcra _crcrd _crcrnfd _crcrr _crcrsfl _crcseq _crcshdr _crcslbl _crcsrv2 _crcsrvc _crcstep _crcswxx _crct _crctmge _crcunab _crcunit _crcvarl _crcwsrv _crczsku _cu_c0 _diparm _evlist _fracchk _fraccox _fracddp _fracdis _fracdv _fracin _fracmdp _fracord _fracpp _fracpv _fracrep _fracwgt _fracxo _gcount _gcut _gdiff _getbv _getrhs _getrres _gfill _ggroup _giqr _gladder _glmfl _glmilnk _glmmapf _glmmapl _glmresd _glmrpt _glmwgt _gma _gmad _gmax _gmdmean _gmdmed _gmean _gmedian _gmin _gmtr _gpctile _grank _grank2 _grfirst _grlast _grmax _grmean _grmin _grmiss _grmiss2 _grobs _grsd _grsum _gsd _gsrank _gstd _gsum _gtma _gtrank _hu _hub _hube _huber _inlist _invlist _isfit _jprfpdt _jprfpdx _jprfpfp _jprfpgn _jprfpin _jprfplx _jprfpmm _jprfppp _jprfpre _jprfprp _jprfpse _jprfptp _jprfpxo _jprglef _jprglfl _jprglil _jprglld _jprglwz _jprxrpa _kalman1 _ksmwrk _ldrtest _linemax _maked _merge _mfrmvec _mkvec _mvec _newey _nlout _nobs _opnum _parsevl _parsewt _partset _pctile _pred_me _pred_se _predict _qreg _repart _result _rmcoll _robksm _robust _sfran _subchar _svy _sw_lik _sw_lik2 _sw_ood _ts _ts_dsmp _ts_flag _ts_gdat _ts_meqn _ts_pars _ts_peri _tsheadr _ttest _ttest1 _ttest2 _tutends _tx_mtr1 _tx_mtr2 _tx_mtr3 _tx_mtr4 _tx_mtr5 _tx_rpl _wkapm _wsrvcrv _xtrenorm """) MAIN_KEYWORDS = (0, """ abbrev about abs acprplot add adjust ado adopath alpha an ano anov anova anovadef aorder ap app appe appen append arch arch_dr arch_p areg areg_p args arima arima_dr arima_p as ass asse asser assert at avplot avplots aw aweight bcskew0 be bee beep binreg bipp_lf bipr_lf bipr_p biprobit bitest bitesti bitowt blogit bmemsize boot bootsamp boxcox boxcox_p bprobit break brier bs bsampl_w bsample bsqreg bstat bstrap by bys bysort byte c_local canon canon_p capture cat cc cchart cci cd cell cellsize centile cf char char chdir checksum chi2 chi2tail ci cii cksum clear clo clocal clog clog_lf clog_p clogi clogi_sw clogit clogit_p clogitp clogl_sw cloglog close cluster cmdlog cnr cnre cnreg cnreg_sw cnsreg codebook col collaps4 collapse compare compress compute cond conf confi confir confirm cons const constr constra constrai constrain constraint contract copy cor corc corr corr2data corre correl correla correlat correlate corrgram cou coun count cox cox_p cox_sw coxbase coxhaz coxvar cp cprplot crc cross cs cscript csi ct ct_is ctset ctst_5 ctst_st cttost cumsp cumul cusum d datetof dbeta de debug debugbuf dec deco decod decode def deff define des desc descr descri describ describe dfbeta dfuller di dir dis discard disp disp_res disp_s displ displa display do doe doed doedi doedit dotplot dprobit drawnorm drop ds dstdize dwstat dydx dyex dynre dynren e ed edi edit egen eivreg else emdef en enc enco encod encode end eq eqlist erase ereg ereg_lf ereg_p ereg_sw err erro error est esti estim estima estimat estimate estimates etodow etof etomdy ex exact exec execu execut execute exi exit expand export eydx eyex F fac fact facto factor fast fft fillin findit fit float for for5_0 force form forma format fpredict frac_154 frac_adj frac_chk frac_cox frac_ddp frac_dis frac_dv frac_in frac_mun frac_pp frac_pq frac_pv frac_wgt frac_xo fracgen fracplot fracpoly fracpred freq frequency Ftail ftodate ftoe ftomdy ftowdate fw fweight g gamma gamma_lf gamma_p gamma_sw ge gen gene gener genera generat generate genrank genstd genvmean gettoken gl gladder glm glm_p glm_sw glmpred glo glob globa global glogit glogit_p gnbre_lf gnbreg gnbreg_5 gnbreg_p gomp_lf gompe_sw gomper_p gompertz gph gphdot gphpen gphprint gprobi_p gprobit gr gr_print gra grap graph grebar greigen grmeanby group gsort gwood h hadimvo hareg hausman he heck_d2 heckma_p heckman heckp_lf heckpr_p heckprob hel help helpchk hereg hetpr_lf hetpr_p hetprob hettest hilite hist hlogit hlu hotel hprobit hreg icd9 icd9p iis impute in index inf infi infil infile infix inlist inp inpu input ins insh inshe inshee insheet insp inspe inspec inspect int integ intreg intrg_ll invchi2 invchi2tail invF invFtail invnchi2 invnorm invttail ipolate iqreg ir iri istdize ivreg iw iweight joinby kalarma1 kap kap_3 kapmeier kappa kapwgt kdensity keep ksm ksmirnov ktau kwallis l la lab labe label ladder length level leverage lfit lfit_p li lincom linesize linktest lis list llogi_sw llogis_p llogist ln lnorm_lf lnorm_sw lnorma_p lnormal lnskew0 lo loc loca local log logi logis_lf logistic logit logit_p loglogs logrank logtype loneway long loo look lookfor looku lookup lower lpredict lroc lrtest ls lsens lsens_x lstat ltable ltrim lv lvr2plot m ma mac macr macro man mantel mark markout marksample mat matcell match matcol matcproc matname matr matri matrix matrow matsize matstrik max mcc mcci md0_ md1_ md1debu_ md2_ md2debu_ mdytoe mdytof mean means median memory memsize meqparse mer merg merge mfx mhodds min missing mkdir mkmat mkspline ml ml_5 ml_adjs ml_bhhhs ml_c_d ml_check ml_clear ml_cnt ml_debug ml_defd ml_e0 ml_e0i ml_e1 ml_e2 ml_ebfg0 ml_ebfr0 ml_ebh0q ml_ebhh0 ml_ebhr0 ml_ebr0i ml_ecr0i ml_edfp0 ml_edfr0 ml_edr0i ml_eds ml_eer0i ml_egr0i ml_elf ml_elfi ml_elfs ml_enr0i ml_enrr0 ml_exde ml_geqnr ml_grad0 ml_graph ml_hbhhh ml_hd0 ml_init ml_inv ml_log ml_max ml_mlout ml_model ml_nb0 ml_opt ml_plot ml_query ml_rdgrd ml_repor ml_s_e ml_searc mleval mlf_ mlmatsum mlog mlogi mlogit mlogit_p mlopts mlsum mlvecsum mnl0_ mor more mov move mrdu0_ mvdecode mvencode mvreg n nbreg nbreg_al nbreg_lf nbreg_sw nchi2 net newey newey_p news nl nl_p nlexp2 nlexp2a nlexp3 nlgom3 nlgom4 nlinit nllog3 nllog4 nlogit nlpred no nobreak nod nodiscrete noe noesample nof nofreq noi nois noisi noisil noisily nol nolabel nonl nonlinear normden nose note notes notify now nowght npnchi2 nptrend numlist obs off old_ver olo olog ologi ologi_sw ologit ologit_p ologitp on one onew onewa oneway op_colnm op_comp op_diff op_inv op_str opr opro oprob oprob_sw oprobi oprobi_p oprobit oprobitp order orthog orthpoly ou out outf outfi outfil outfile outs outsh outshe outshee outsheet ovtest pac par pars parse pause pc pchart pchi pcorr pctile pentium percent pergram permanent personal pkcollapse pkcross pkequiv pkexamine pkshape pksumm pl playsnd plo plot plug plugi plugin pnorm poisgof poiss_lf poiss_sw poisso_p poisson pop popu popup post postclose postfile pperron pr prais prais_e prais_p pred predi predic predict predict preserve printgr priorest pro prob probi probit probit_p prog progr progra program prove prtest prtesti push pw pwcorr pwd pweight q qby qchi qnorm qqplot qreg qreg_c qreg_p qreg_sw qu quadchk quantile que quer query qui quie quiet quietl quietly range ranksum rawsum rchart rcof real recast recode reg reg3 reg3_p regdw regr regre regre_p2 regres regres_p regress regriv_p remap ren rena renam rename renpfix repeat replace replay reshape restore ret retu retur return reverse rm roccomp rocfit rocgold roctab rot rota rotat rotate round row rreg rreg_p rtrim ru run runtest rvfplot rvpplot sa safesum sample sampsi sav save saving say sca scal scala scalar sco scob_lf scob_p scobi_sw scobit scor score sd sdtest sdtesti se search separate seperate serrbar set sfrancia sh she shel shell shewhart showpoint signrank signtest simul simulinit sktest sleep smcl smooth snapspan so sor sort spearman speedchk1 speekchk2 spikeplt spline_x sqreg sret sretu sretur sreturn st st_ct st_hc st_hcd st_is st_issys st_note st_promo st_set st_show st_smpl st_subid stack stackdepth stackreset statsby stbase stci stcox stcox_p stcoxkm stcurv stcurve stdes stem stereg stfill stgen stinit stir stjoin stmc stmh stphplot stphtest stptime strate streg streset string sts stset stsplit stsum sttocc sttoct stvary stweib su subinstr subinword subpop substr subwin sum summ summa summar summari summariz summarize sureg survcurv survsum svmat svy_disp svy_dreg svy_est svy_get svy_head svy_sub svy_x svydes svyintrg svyivreg svylc svylog_p svylogit svymean svymlog svyolog svyoprob svypois svyprobt svyprop svyratio svyreg svyreg_p svyset svytab svytest svytotal sw swcnreg swcox swereg swilk swlogis swlogit swologit swoprbt swpois swprobit swqreg swtobit swweib symmetry symmi symplot syntax sysdir sysmenu ta tab tab_or tab1 tab2 tabd tabdi tabdis tabdisp tabi table tabodds tabstat tabu tabul tabula tabulat tabulate te tempfile tempname tempvar tes test testnl testparm teststd text timer tis tob tobi tobit tobit_p tobit_sw token tokeni tokeniz tokenize touch treatreg trim truncreg tset tsfill tsreport tsrevar tsset tsunab ttail ttest ttesti tut_chk tut_wait tutorial ty typ type typeof u unab unabbrev uniform update upper us use using val values var variable varlabelpos vce verinst vers versi versio version vif vwls wdatetof wdctl wdlg wdupdate weib_lf weib_lf0 weibu_sw weibul_p weibull wh whelp whi whic which whil while wilc_st wilcoxon win wind windo window winexec winhelp wmenu wntestb wntestq xchart xcorr xi xpose xt_iis xt_tis xtabond xtbin_p xtclog xtcnt_p xtcorr xtdata xtdes xtgee xtgee_p xtgls xtgls_p xthaus xtile xtint_p xtintreg xtivreg xtlogit xtnb_fe xtnb_lf xtnbreg xtpcse xtpois xtpred xtprobit xtps_fe xtps_lf xtps_ren xtrch_p xtrchh xtrefe_p xtreg xtreg_be xtreg_fe xtreg_ml xtreg_re xtregar xtrere_p xtsum xttab xttest0 xttobit xttrans xwin xwind xwindo xwindow zap_s zinb zinb_llf zinb_plf zip zip_llf zip_p zip_plf zt_ct_5 zt_hc_5 zt_hcd_5 zt_is_5 zt_iss_5 zt_sho_5 zt_smp_5 ztbase_5 ztcox_5 ztdes_5 ztereg_5 ztfill_5 ztgen_5 ztir_5 ztjoin_5 zts_5 ztset_5 ztspli_5 ztsum_5 zttoct_5 ztvary_5 ztweib_5 comma tab robust mvsktest oglm dlist mahapick viewresults valuesof textbarplot freduse hcavar distrate zipsave hutchens svylorenz goprobit regoprob digits cprplot2 devcon oaxaca supclust convert_top_lines graphbinary trellis traces wtd glst lxpct_2 clv backrasch mmsrm diplot kountry gologit2 intcens svygei_svyatk fitint xtarsim eclpci beamplot samplepps classplot spearman2 fixsort casefat mehetprob svyselmlog svybsamp2 moremata checkfor2 dbmscopybatch palette_all ivreset lookfor_all nmissing isvar descogini akdensity biplotvlab genscore rrlogit ivvif hnblogit hnbclg hglogit hgclg hplogit hpclg ztg cdfplot kdens cpoisson samplesize gnbstrat nbstrat genass outreg2 kapprevi sampsi_reg lgamma2 glgamma2 ivgauss2 givgauss2 sampsi_mcc cnbreg xtivreg2 sliceplot cquantile primes scheme_rbn1mono subsave insob geekel2d censornb estadd_plus estadd xtregre2 clorenz usmaps2 usswm tablemat tmap mif2dta surface kdens2 onespell diffpi plotbeta batplot relrank invcdf jmp jmp2 smithwelch reswage dfl p2ci heterogi shuffle8 shuffle lincheck bicdrop1 pre optifact matsort clustsens listmiss bic hdquantile dirlist variog svypxcon svypxcat seast xtlsdvc avplots4 eststo vclose gausshermite simirt cprplots glcurve sscsubmit sdecode harmby ellip mlboolean splitvallabels circular betacoef bnormpdf grqreg genhwcci mrtab zandrews cvxhull abar mat2txt gmlabvpos tableplot sortlistby mgen hlist ppplot rocss pnrcheck stexpect rbounds regaxis pgmhaz8 meta_lr mcqscore mvcorr ipf hapblock swblock raschtest gammasym raschtestv7 vanelteren fs estout1 estout loevh msp detect disjoint adjacent plotmatrix nnmatch senspec twoway_estfit mkdat soepren alphawgt center decompose wgttest cochran examples rollreg clemao_io heckprob2 gipf metagraph hshaz tslist collapse2 gen_tail carryforward floattolong doubletofloat margeff matin4-matout4 perturb coldiag2 sim_arma ndbci labelmiss mcenter sslope reorder scat3 dummieslab rfl xtvc metareg rc2 moments sxpose kaputil bystore mice gzsave witch cureregr hprescott tabout gamet duncan fview eret2 rc_spline tolerance modeldiag metaparm profhap nsplit hlm fieller xtfisher matwrite usmaps ellip6 ellip7 xi3 qhapipf slist nearest fedit extremes mypkg pairplot cycleplot ciplot selectvars stcompet full_palette catplot eclplot spellutil metadialog psmatch2 bygap ingap mylabels metaaggr cleanlog gpfobl mvprobit eqprhistogram slideplot majority hireg bigtab vartyp codebook2 dmariano whotdeck crtest collapseunique stripplot linkplot statsbyfast parplot mitools groups wclogit xcontract xcollapse metafunnel corrtab dmerge makematrix cibplot vreverse msplot nicedates mkcorr nearmrg tabstatmat panelunit panelauto safedrop gammafit gumbelfit betafit lookforit savasas usesas etime usagelog tmpdir confirmdir shortdir lambda survtime xtabond2 factortest checkvar vtokenize reshape8 pcorr2 tarow cb2html survwgt svr jonter xsampsi ci2 domdiag xtpattern nbfit distinct maketex triprobit smileplot tabmerge avplot3 datesum _gclsort cltest varlab decomp overlay tab3way tab2way ivreg2 hapipf varlag vececm isco isko ptrend dpplot cipolate tknz levels reformat xtab mvsumm fsum stkerhaz explist qlognorm tsspell gphepssj texteditors alphawgt decompose inccat adjksm outdat reshape7 qsim allpossible glmcorr gcause selmlog matsave est2tex log2do2 hansen2 gam ivhettest newey2 intext matrixof mrdum fastcd ivendog tabcount tabcond minap qrowname cij ciw textgph latab autolog histbox kdbox sunflower charlist adoedit lincomest stylerules strgen wntstmvq grnote xttrans2 inequal7 tablepc hegy4 regdplot denormix chi2fit bstut aboutreg _gstd01 cpcorr mktab vecar xdatelist strdate thinplate gfields takelogs catgraph dsconcat tosql outseries glcurve7 omninorm summdate sencode rgroup cf3 hlpdir descsave outmat svytabs mstore savesome stbget spsurv xtgraph effects stpm madfuller aformat todate _gwtmean tsgraph soreg fbar metaninf blist johans vecar6 outtable factmerg factref factext hadrilm levinlin nharvey ipshin gpreset geneigen dotex outtex sutex dsearch chiplot idonepsu statsmat ds3 dthaz paran gprefscode _gsoundex bpass bking labsort intreg2 sq powercal denton corr_svy log2html dfao xpredict mcl listtex raschcvt diagt estsave egenmore labutil concord avplot2 tablecol metabias coldiag fitstat doubmass cortesti fndmtch2 cusum6 ghistcum findval centcalc xrigls dfgls charutil icomp enlarge kpss metatrim ivgmm0 smhsiao matvsort roblpr modlpr recode2 showgph copydesc shapley rnd himatrix bspline stcascoh stselpre nct ncf hist3 dolog dologx tscollap bcoeff grfreq grlogit lrchg lrmatx lrplot lrutil forfile printgph readlog spaces title dashln lomodrs ctabstat expandby finddup fitmacro normtest predcalc _grmedf ranvar matmap svmatf lincom2 csjl shownear fracdiff genvars calibr fracirf xriml rowranks tgraph ordplot cpr mlcoint stcmd xttest3 atkplot fsreg ciform rowsort expgen epsigr dashgph addtxt swboot stak _grprod sskapp xttest2 trinary ivprob-ivtobit6 torumm split q linesize keyb expr xtlist xtcount xtcorr2 vce2 summvl ststrata stcount simul2 regh pcamv pca mvpatt mokken lrtest2 linest iia hordered htest elogit diag wraplist qsort precmd modfycmd matfunc listuniq multgof hilite2 unlabeld to_msp sample2 placevar listby listblck hh genl for2 dropvars countby bys icslib varcase tsplot diagtest ssizebi studysi poverty ineq dissim geivars ineqdec0 ineqdeco ineqfac povdeco sumdist xfrac dagumfit smfit cpyxplot mkbilogn univstat hotdeck matodd p_vlists gennorm tab_chi sbplot5 sbplot mfracpol keyplot taba ds5 tabplot cistat probitiv tobitiv _gslope sphdist ginidesc inequal2 kernreg1 kernreg2 outfix outfix2 seg outreg rfregk spautoc onewplot pwcorrs ltable2 tabhbar hbox tabhplot cihplot civplot sf36 pwploti partgam cf2 xtile2 ivglog kwallis2 far5 jb6 gby strparse _gprod mfilegr desmat areg2 margfx arimafit moreobs tsmktim durbinh bgtest mnthplot archlm gphudak renames skewplot cnsrsig recast2 doub2flt feldti tolower lfsum whitetst bpagan listutil mdensity kdmany stquant byvar cflpois workdays flower _grpos stcoxgof stcoxplt stpiece overid overidxt swapval adotype fndmtch svvarlbl gentrun storecmd sto lrdrop1 lrseq dmexogxt probexog-tobexog predxcon predxcat mmerge tablab for211 gmci grand nbinreg spikeplt ocratio biplot coranal mca heckman2 marker markov pgamma qgamma somersd pantest2 datmat distan missing quantil2 distplot tpred contrast cid rglm dtobit2 ljs ewma barplot genfreq hbar hplot fodstr catdev rmanova ranova seq intterms lmoments regresby reglike pweibull wbull qweibull regpred logpred adjmean adjprop spell switchr trnbin0 mvsamp1i mvsampsi tpvar mvtest addtex pwcorrw vlist violin eba mstdize orthog stcumh ccweight psbayes oprobpr cndnmb3 pbeta qbeta vmatch kr20 sbrowni canon stbtcalc stgtcalc zb_qrm catenate lprplot nnest longplot parmest qqplot2 jb zip zinb hetprob unique longch gwhet williams adjust barplot2 grand2 histplot xcorplot clarify mlogpred nproc labgraph vallist pexp qexp lms levene centroid medoid cluster fulltab t2way5 epiconf lstack deaton colelms confsvy median winsor bys torats venndiag chaos muxyplot muxplot irrepro triplot tomode circstat tryem white strip ralloc acplot stack symmetry omodel allcross dups iia sdtest vplplot summvl labsumm loopplot elapse istdize blogit2 sparl vallab gologit mkstrsn poisml trpois0 cenpois sssplot hausman stcstat forvalues """ ) #---- Syntax Style Specs ----# SYNTAX_ITEMS = [ (stc.STC_C_DEFAULT, 'default_style'), (stc.STC_C_COMMENT, 'comment_style'), (stc.STC_C_COMMENTLINE, 'comment_style'), (stc.STC_C_COMMENTDOC, 'comment_style'), (stc.STC_C_COMMENTDOCKEYWORD, 'dockey_style'), (stc.STC_C_COMMENTDOCKEYWORDERROR, 'error_style'), (stc.STC_C_COMMENTLINE, 'comment_style'), (stc.STC_C_COMMENTLINEDOC, 'comment_style'), (stc.STC_C_CHARACTER, 'char_style'), (stc.STC_C_GLOBALCLASS, 'global_style'), (stc.STC_C_IDENTIFIER, 'default_style'), (stc.STC_C_NUMBER, 'number_style'), (stc.STC_C_OPERATOR, 'operator_style'), (stc.STC_C_PREPROCESSOR, 'pre_style'), (stc.STC_C_REGEX, 'pre_style'), (stc.STC_C_STRING, 'string_style'), (stc.STC_C_STRINGEOL, 'default_style'), (stc.STC_C_UUID, 'pre_style'), (stc.STC_C_VERBATIM, 'number2_style'), (stc.STC_C_WORD, 'keyword_style'), (stc.STC_C_WORD2, 'keyword2_style') ] #---- Extra Properties ----# FOLD = ("fold", "1") FOLD_PRE = ("styling.within.preprocessor", "0") FOLD_COM = ("fold.comment", "1") FOLD_COMP = ("fold.compact", "1") FOLD_ELSE = ("fold.at.else", "0") #------------------------------------------------------------------------------# class SyntaxData(syndata.SyntaxDataBase): """SyntaxData object for STATA""" def __init__(self, langid): super(SyntaxData, self).__init__(langid) # Setup self.SetLexer(stc.STC_LEX_CPP) self.RegisterFeature(synglob.FEATURE_AUTOINDENT, AutoIndenter) def GetKeywords(self): """Returns Specified Keywords List """ return [MAIN_KEYWORDS, SECONDARY_KEYWORDS] def GetSyntaxSpec(self): """Syntax Specifications """ return SYNTAX_ITEMS def GetProperties(self): """Returns a list of Extra Properties to set """ return [FOLD, FOLD_PRE, FOLD_COM] # TODO: this doesnt' look right... def GetCommentPattern(self): """Returns a list of characters used to comment a block of code """ return [u'//', u'/*', u'*/',u'*' ] ``` #### File: src/syntax/synglob.py ```python __author__ = "<NAME> <<EMAIL>>" __svnid__ = "$Id: synglob.py 69887 2011-12-01 19:54:13Z CJP $" __revision__ = "$Revision: 69887 $" #-----------------------------------------------------------------------------# # Dependencies import wx.stc as stc # The language identifiers and the EXT_MAP have been moved out of this # module in order to be independent of Editra and wx, but they are # still needed here... from synextreg import * #-----------------------------------------------------------------------------# # Feature Identifiers FEATURE_AUTOINDENT = u"AutoIndenter" FEATURE_STYLETEXT = u"StyleText" #-----------------------------------------------------------------------------# # Maps file types to syntax definitions LANG_MAP = {LANG_4GL : (ID_LANG_4GL, '_progress'), LANG_DSP56K : (ID_LANG_DSP56K, '_asm68k'), LANG_68K : (ID_LANG_68K, '_asm68k'), LANG_ADA : (ID_LANG_ADA, '_ada'), LANG_APACHE : (ID_LANG_APACHE, '_apache'), LANG_AS : (ID_LANG_AS, '_actionscript'), LANG_ASM : (ID_LANG_ASM, '_asm'), LANG_BASH : (ID_LANG_BASH, '_sh'), LANG_BATCH : (ID_LANG_BATCH, '_batch'), LANG_BOO : (ID_LANG_BOO, '_boo'), LANG_C : (ID_LANG_C, '_cpp'), LANG_CAML : (ID_LANG_CAML, '_caml'), LANG_CILK : (ID_LANG_CILK, '_cpp'), LANG_COBRA : (ID_LANG_COBRA, '_cobra'), LANG_COLDFUSION : (ID_LANG_COLDFUSION, '_html'), LANG_CPP : (ID_LANG_CPP, '_cpp'), LANG_CSH : (ID_LANG_CSH, '_sh'), LANG_CSHARP : (ID_LANG_CSHARP, '_cpp'), LANG_CSS : (ID_LANG_CSS, '_css'), LANG_D : (ID_LANG_D, '_d'), LANG_DIFF : (ID_LANG_DIFF, '_diff'), LANG_DJANGO : (ID_LANG_DJANGO, '_django'), LANG_DOT : (ID_LANG_DOT, '_dot'), LANG_EDJE : (ID_LANG_EDJE, '_edje'), LANG_EIFFEL : (ID_LANG_EIFFEL, '_eiffel'), LANG_ERLANG : (ID_LANG_ERLANG, '_erlang'), LANG_ESS : (ID_LANG_ESS, '_editra_ss'), LANG_F77 : (ID_LANG_F77, '_fortran'), LANG_F95 : (ID_LANG_F95, '_fortran'), LANG_FERITE : (ID_LANG_FERITE, '_ferite'), LANG_FLAGSHIP: (ID_LANG_FLAGSHIP, '_flagship'), LANG_FORTH : (ID_LANG_FORTH, '_forth'), LANG_GLSL : (ID_LANG_GLSL, '_glsl'), LANG_GUI4CLI : (ID_LANG_GUI4CLI, '_gui4cli'), LANG_HASKELL : (ID_LANG_HASKELL, '_haskell'), LANG_HAXE : (ID_LANG_HAXE, '_haxe'), LANG_HTML : (ID_LANG_HTML, '_html'), LANG_INNO : (ID_LANG_INNO, '_inno'), LANG_ISSL : (ID_LANG_ISSL, '_issuelist'), LANG_JAVA : (ID_LANG_JAVA, '_java'), LANG_JS : (ID_LANG_JS, '_javascript'), LANG_KIX : (ID_LANG_KIX, '_kix'), LANG_KSH : (ID_LANG_KSH, '_sh'), LANG_LATEX : (ID_LANG_LATEX, '_latex'), LANG_LISP : (ID_LANG_LISP, '_lisp'), LANG_LOUT : (ID_LANG_LOUT, '_lout'), LANG_LUA : (ID_LANG_LUA, '_lua'), LANG_MAKE : (ID_LANG_MAKE, '_make'), LANG_MAKO : (ID_LANG_MAKO, '_mako'), LANG_MASM : (ID_LANG_MASM, '_masm'), LANG_MATLAB : (ID_LANG_MATLAB, '_matlab'), LANG_MSSQL : (ID_LANG_MSSQL, '_mssql'), LANG_NASM : (ID_LANG_NASM, '_nasm'), LANG_NEWLISP: (ID_LANG_NEWLISP,'_lisp'), LANG_NONMEM : (ID_LANG_NONMEM, '_nonmem'), LANG_NSIS : (ID_LANG_NSIS, '_nsis'), LANG_OBJC : (ID_LANG_OBJC, '_cpp'), LANG_OCTAVE : (ID_LANG_OCTAVE, '_matlab'), LANG_OOC : (ID_LANG_OOC, '_ooc'), LANG_PASCAL : (ID_LANG_PASCAL, '_pascal'), LANG_PERL : (ID_LANG_PERL, '_perl'), LANG_PHP : (ID_LANG_PHP, '_php'), LANG_PIKE : (ID_LANG_PIKE, '_pike'), LANG_PLSQL : (ID_LANG_PLSQL, '_sql'), LANG_PROPS : (ID_LANG_PROPS, '_props'), LANG_PS : (ID_LANG_PS, '_postscript'), LANG_PYTHON : (ID_LANG_PYTHON, '_python'), LANG_R : (ID_LANG_R, '_s'), LANG_RUBY : (ID_LANG_RUBY, '_ruby'), LANG_S : (ID_LANG_S, '_s'), LANG_SCHEME : (ID_LANG_SCHEME, '_lisp'), LANG_SQL : (ID_LANG_SQL, '_sql'), LANG_SQUIRREL : (ID_LANG_SQUIRREL, '_squirrel'), LANG_ST : (ID_LANG_ST, '_smalltalk'), LANG_STATA : (ID_LANG_STATA, '_stata'), LANG_SYSVERILOG : (ID_LANG_SYSVERILOG, '_verilog'), LANG_TCL : (ID_LANG_TCL, '_tcl'), LANG_TXT : (ID_LANG_TXT, None), LANG_VALA : (ID_LANG_VALA, '_cpp'), LANG_VB : (ID_LANG_VB, '_visualbasic'), LANG_VBSCRIPT : (ID_LANG_VBSCRIPT, '_vbscript'), LANG_VERILOG: (ID_LANG_VERILOG, '_verilog'), LANG_VHDL : (ID_LANG_VHDL, '_vhdl'), LANG_XML : (ID_LANG_XML, '_xml'), LANG_YAML : (ID_LANG_YAML, '_yaml'), LANG_GROOVY : (ID_LANG_GROOVY, '_groovy'), LANG_XTEXT : (ID_LANG_XTEXT, '_xtext') } ### TODO: Profiling on the following methods to see if caching is necessary ### # Dynamically finds the language description string that matches the given # language id. # Used when manually setting lexer from a menu/dialog def GetDescriptionFromId(lang_id): """Get the programming languages description string from the given language id. If no corresponding language is found the plain text description is returned. @param lang_id: Language Identifier ID @note: requires that all languages are defined in ID_LANG_NAME, LANG_NAME pairs to work properly. """ rval = LANG_TXT # Guard against async code that may be modifying globals globs = dict(globals()) for key, val in globs.iteritems(): if val == lang_id and key.startswith('ID_LANG'): rval = globs.get(key[3:], LANG_TXT) break return rval def GetIdFromDescription(desc): """Get the language identifier for the given file type string. The search is case insensitive. @param desc: unicode (i.e u"Python") @note: if lookup fails ID_LANG_TXT is returned """ rval = ID_LANG_TXT desc = desc.lower() # Guard against async code that may be modifying globals globs = dict(globals()) for key, val in globs.iteritems(): if isinstance(val, unicode): if val.lower() == desc and key.startswith('LANG_'): rval = globs.get("ID_" + key, ID_LANG_TXT) break return rval ``` #### File: Editra/src/util.py ```python __author__ = "<NAME> <<EMAIL>>" __svnid__ = "$Id: util.py 72623 2012-10-06 19:33:06Z CJP $" __revision__ = "$Revision: 72623 $" #--------------------------------------------------------------------------# # Imports import os import sys import mimetypes import encodings import codecs import urllib2 import wx # Editra Libraries import ed_glob import ed_event import ed_crypt import dev_tool import syntax.syntax as syntax import syntax.synglob as synglob import ebmlib _ = wx.GetTranslation #--------------------------------------------------------------------------# class DropTargetFT(wx.PyDropTarget): """Drop target capable of accepting dropped files and text @todo: has some issues with the clipboard on windows under certain conditions. They are not fatal but need fixing. """ def __init__(self, window, textcallback=None, filecallback=None): """Initializes the Drop target @param window: window to receive drop objects @keyword textcallback: Callback for when text is dropped @keyword filecallback: Callback for when file(s) are dropped """ super(DropTargetFT, self).__init__() # Attributes self.window = window self._data = dict(data=None, fdata=None, tdata=None, tcallb=textcallback, fcallb=filecallback) self._tmp = None self._lastp = None # Setup self.InitObjects() def CreateDragString(self, txt): """Creates a bitmap of the text that is being dragged @todo: possibly set colors to match highlighting of text @todo: generalize this to be usable by other widgets besides stc """ if not isinstance(self.window, wx.stc.StyledTextCtrl): return stc = self.window txt = txt.split(stc.GetEOLChar()) longest = (0, 0) for line in txt: ext = stc.GetTextExtent(line) if ext[0] > longest[0]: longest = ext cords = [ (0, x * longest[1]) for x in range(len(txt)) ] try: mdc = wx.MemoryDC(wx.EmptyBitmap(longest[0] + 5, longest[1] * len(txt), 32)) mdc.SetBackgroundMode(wx.TRANSPARENT) mdc.SetTextForeground(stc.GetDefaultForeColour()) mdc.SetFont(stc.GetDefaultFont()) mdc.DrawTextList(txt, cords) self._tmp = wx.DragImage(mdc.GetAsBitmap()) except wx.PyAssertionError, msg: Log("[droptargetft][err] %s" % str(msg)) def InitObjects(self): """Initializes the text and file data objects @postcondition: all data objects are initialized """ self._data['data'] = wx.DataObjectComposite() self._data['tdata'] = wx.TextDataObject() self._data['fdata'] = wx.FileDataObject() self._data['data'].Add(self._data['tdata'], True) self._data['data'].Add(self._data['fdata'], False) self.SetDataObject(self._data['data']) def OnEnter(self, x_cord, y_cord, drag_result): """Called when a drag starts @param x_cord: x cord of enter point @param y_cord: y cord of enter point @param drag_result: wxDrag value @return: result of drop object entering window """ # GetData seems to happen automatically on msw, calling it again # causes this to fail the first time. if wx.Platform in ['__WXGTK__', '__WXMSW__']: return wx.DragCopy if wx.Platform == '__WXMAC__': try: self.GetData() except wx.PyAssertionError: return wx.DragError self._lastp = (x_cord, y_cord) files = self._data['fdata'].GetFilenames() text = self._data['tdata'].GetText() if len(files): self.window.SetCursor(wx.StockCursor(wx.CURSOR_COPY_ARROW)) else: self.CreateDragString(text) return drag_result def OnDrop(self, x_cord=0, y_cord=0): """Gets the drop cords @keyword x_cord: x cord of drop object @keyword y_cord: y cord of drop object @todo: implement snapback when drop is out of range """ self._tmp = None self._lastp = None return True def OnDragOver(self, x_cord, y_cord, drag_result): """Called when the cursor is moved during a drag action @param x_cord: x cord of mouse @param y_cord: y cord of mouse @param drag_result: Drag result value @return: result of drag over @todo: For some reason the caret position changes which can be seen by the brackets getting highlighted. However the actual caret is not moved. """ stc = self.window if self._tmp is None: if hasattr(stc, 'DoDragOver'): val = stc.DoDragOver(x_cord, y_cord, drag_result) self.ScrollBuffer(stc, x_cord, y_cord) drag_result = wx.DragCopy else: # A drag image was created if hasattr(stc, 'DoDragOver'): point = wx.Point(x_cord, y_cord) self._tmp.BeginDrag(point - self._lastp, stc) self._tmp.Hide() stc.DoDragOver(x_cord, y_cord, drag_result) self._tmp.Move(point) self._tmp.Show() self._tmp.RedrawImage(self._lastp, point, True, True) self._lastp = point self.ScrollBuffer(stc, x_cord, y_cord) drag_result = wx.DragCopy return drag_result def OnData(self, x_cord, y_cord, drag_result): """Gets and processes the dropped data @param x_cord: x coordinate @param y_cord: y coordinate @param drag_result: wx Drag result value @postcondition: dropped data is processed """ self.window.SetCursor(wx.StockCursor(wx.CURSOR_ARROW)) if self.window.HasCapture(): self.window.ReleaseMouse() try: data = self.GetData() except wx.PyAssertionError: wx.PostEvent(self.window.GetTopLevelParent(), \ ed_event.StatusEvent(ed_event.edEVT_STATUS, -1, _("Unable to accept dropped file " "or text"))) data = False drag_result = wx.DragCancel if data: files = self._data['fdata'].GetFilenames() text = self._data['tdata'].GetText() if len(files) > 0 and self._data['fcallb'] is not None: self._data['fcallb'](files) elif len(text) > 0: if self._data['tcallb'] is not None: self._data['tcallb'](text) elif hasattr(self.window, 'DoDropText'): self.window.DoDropText(x_cord, y_cord, text) self.InitObjects() return drag_result def OnLeave(self): """Handles the event of when the drag object leaves the window @postcondition: Cursor is set back to normal state """ self.window.SetCursor(wx.StockCursor(wx.CURSOR_ARROW)) if self.window.HasCapture(): self.window.ReleaseMouse() if self._tmp is not None: try: self._tmp.EndDrag() except wx.PyAssertionError, msg: Log("[droptargetft][err] %s" % str(msg)) @staticmethod def ScrollBuffer(stc, x_cord, y_cord): """Scroll the buffer as the dragged text is moved towards the ends. @param stc: StyledTextCtrl @param x_cord: int (x position) @param y_cord: int (y position) @note: currently does not work on wxMac """ try: cline = stc.PositionFromPoint(wx.Point(x_cord, y_cord)) if cline != wx.stc.STC_INVALID_POSITION: cline = stc.LineFromPosition(cline) fline = stc.GetFirstVisibleLine() lline = stc.GetLastVisibleLine() if (cline - fline) < 2: stc.ScrollLines(-1) elif lline - cline < 2: stc.ScrollLines(1) else: pass except wx.PyAssertionError, msg: Log("[droptargetft][err] ScrollBuffer: %s" % msg) #---- End FileDropTarget ----# class EdClipboard(ebmlib.CycleCache): """Local clipboard object @todo: make into a singleton """ def GetNext(self): """Get the next item in the cache""" # Initialize the clipboard if it hasn't been loaded yet and # there is something in the system clipboard if self.GetCurrentSize() == 0: txt = GetClipboardText() if txt is not None: self.Put(txt) return super(EdClipboard, self).GetNext() def IsAtIndex(self, txt): """Is the passed in phrase at the current cycle index in the cache. Used to check if index should be reset or to continue in the cycle. @param txt: selected text """ pre = self.PeekPrev() next = self.PeekNext() if txt in (pre, next): return True else: return False def Put(self, txt): """Put some text in the clipboard @param txt: Text to put in the system clipboard """ pre = self.PeekPrev() next = self.PeekNext() if len(txt) and txt not in (pre, next): self.PutItem(txt) #---- Misc Common Function Library ----# # Used for holding the primary selection on mac/msw FAKE_CLIPBOARD = None def GetClipboardText(primary=False): """Get the primary selection from the clipboard if there is one @return: str or None """ if primary and wx.Platform == '__WXGTK__': wx.TheClipboard.UsePrimarySelection(True) elif primary: # Fake the primary selection on mac/msw global FAKE_CLIPBOARD return FAKE_CLIPBOARD else: pass text_obj = wx.TextDataObject() rtxt = None if wx.TheClipboard.IsOpened() or wx.TheClipboard.Open(): if wx.TheClipboard.GetData(text_obj): rtxt = text_obj.GetText() wx.TheClipboard.Close() if primary and wx.Platform == '__WXGTK__': wx.TheClipboard.UsePrimarySelection(False) return rtxt def SetClipboardText(txt, primary=False): """Copies text to the clipboard @param txt: text to put in clipboard @keyword primary: Set txt as primary selection (x11) """ # Check if using primary selection if primary and wx.Platform == '__WXGTK__': wx.TheClipboard.UsePrimarySelection(True) elif primary: # Fake the primary selection on mac/msw global FAKE_CLIPBOARD FAKE_CLIPBOARD = txt return True else: pass data_o = wx.TextDataObject() data_o.SetText(txt) if wx.TheClipboard.IsOpened() or wx.TheClipboard.Open(): wx.TheClipboard.SetData(data_o) wx.TheClipboard.Close() if primary and wx.Platform == '__WXGTK__': wx.TheClipboard.UsePrimarySelection(False) return True else: return False def FilterFiles(file_list): """Filters a list of paths and returns a list of paths that can probably be opened in the editor. @param file_list: list of files/folders to filter for good files in """ good = list() checker = ebmlib.FileTypeChecker() for path in file_list: if not checker.IsBinary(path): good.append(path) return good def GetFileType(fname): """Get what the type of the file is as Editra sees it in a formatted string. @param fname: file path @return: string (formatted/translated filetype) """ if os.path.isdir(fname): return _("Folder") eguess = syntax.GetTypeFromExt(fname.split('.')[-1]) if eguess == synglob.LANG_TXT and fname.split('.')[-1] == 'txt': return _("Text Document") elif eguess == synglob.LANG_TXT: mtype = mimetypes.guess_type(fname)[0] if mtype is not None: return mtype else: return _("Unknown") else: return _("%s Source File") % eguess def GetFileReader(file_name, enc='utf-8'): """Returns a file stream reader object for reading the supplied file name. It returns a file reader using the encoding (enc) which defaults to utf-8. If lookup of the reader fails on the host system it will return an ascii reader. If there is an error in creating the file reader the function will return a negative number. @param file_name: name of file to get a reader for @keyword enc: encoding to use for reading the file @return file reader, or int if error. """ try: file_h = file(file_name, "rb") except (IOError, OSError): dev_tool.DEBUGP("[file_reader] Failed to open file %s" % file_name) return -1 try: reader = codecs.getreader(enc)(file_h) except (LookupError, IndexError, ValueError): dev_tool.DEBUGP('[file_reader] Failed to get %s Reader' % enc) reader = file_h return reader def GetFileWriter(file_name, enc='utf-8'): """Returns a file stream writer object for reading the supplied file name. It returns a file writer in the supplied encoding if the host system supports it other wise it will return an ascii reader. The default will try and return a utf-8 reader. If there is an error in creating the file reader the function will return a negative number. @param file_name: path of file to get writer for @keyword enc: encoding to write text to file with """ try: file_h = open(file_name, "wb") except IOError: dev_tool.DEBUGP("[file_writer][err] Failed to open file %s" % file_name) return -1 try: writer = codecs.getwriter(enc)(file_h) except (LookupError, IndexError, ValueError): dev_tool.DEBUGP('[file_writer][err] Failed to get %s Writer' % enc) writer = file_h return writer # TODO: DEPRECATED - remove once callers migrate to ebmlib GetFileManagerCmd = ebmlib.GetFileManagerCmd def GetUserConfigBase(): """Get the base user configuration directory path""" cbase = ed_glob.CONFIG['CONFIG_BASE'] if cbase is None: cbase = wx.StandardPaths_Get().GetUserDataDir() if wx.Platform == '__WXGTK__': if u'.config' not in cbase and not os.path.exists(cbase): # If no existing configuration return xdg config path base, cfgdir = os.path.split(cbase) tmp_path = os.path.join(base, '.config') if os.path.exists(tmp_path): cbase = os.path.join(tmp_path, cfgdir.lstrip(u'.')) return cbase + os.sep def HasConfigDir(loc=u""): """ Checks if the user has a config directory and returns True if the config directory exists or False if it does not. @return: whether config dir in question exists on an expected path """ cbase = GetUserConfigBase() to_check = os.path.join(cbase, loc) return os.path.exists(to_check) def MakeConfigDir(name): """Makes a user config directory @param name: name of config directory to make in user config dir """ cbase = GetUserConfigBase() try: os.mkdir(cbase + name) except (OSError, IOError): pass def RepairConfigState(path): """Repair the state of profile path, updating and creating it it does not exist. @param path: path of profile """ if os.path.isabs(path) and os.path.exists(path): return path else: # Need to fix some stuff up CreateConfigDir() import profiler return profiler.Profile_Get("MYPROFILE") def CreateConfigDir(): """ Creates the user config directory its default sub directories and any of the default config files. @postcondition: all default configuration files/folders are created """ #---- Resolve Paths ----# config_dir = GetUserConfigBase() profile_dir = os.path.join(config_dir, u"profiles") dest_file = os.path.join(profile_dir, u"default.ppb") ext_cfg = [u"cache", u"styles", u"plugins"] #---- Create Directories ----# if not os.path.exists(config_dir): os.mkdir(config_dir) if not os.path.exists(profile_dir): os.mkdir(profile_dir) for cfg in ext_cfg: if not HasConfigDir(cfg): MakeConfigDir(cfg) import profiler profiler.TheProfile.LoadDefaults() profiler.Profile_Set("MYPROFILE", dest_file) profiler.TheProfile.Write(dest_file) profiler.UpdateProfileLoader() def ResolvConfigDir(config_dir, sys_only=False): """Checks for a user config directory and if it is not found it then resolves the absolute path of the executables directory from the relative execution path. This is then used to find the location of the specified directory as it relates to the executable directory, and returns that path as a string. @param config_dir: name of config directory to resolve @keyword sys_only: only get paths of system config directory or user one @note: This method is probably much more complex than it needs to be but the code has proven itself. """ # Try to get a User config directory if not sys_only: user_config = GetUserConfigBase() user_config = os.path.join(user_config, config_dir) if os.path.exists(user_config): return user_config + os.sep # Check if the system install path has already been resolved once before if ed_glob.CONFIG['INSTALL_DIR'] != u"": tmp = os.path.join(ed_glob.CONFIG['INSTALL_DIR'], config_dir) tmp = os.path.normpath(tmp) + os.sep if os.path.exists(tmp): return tmp else: del tmp # The following lines are used only when Editra is being run as a # source package. If the found path does not exist then Editra is # running as as a built package. if not hasattr(sys, 'frozen'): path = __file__ if not ebmlib.IsUnicode(path): path = path.decode(sys.getfilesystemencoding()) path = os.sep.join(path.split(os.sep)[:-2]) path = path + os.sep + config_dir + os.sep if os.path.exists(path): if not ebmlib.IsUnicode(path): path = unicode(path, sys.getfilesystemencoding()) return path # If we get here we need to do some platform dependent lookup # to find everything. path = sys.argv[0] if not ebmlib.IsUnicode(path): path = unicode(path, sys.getfilesystemencoding()) # If it is a link get the real path if os.path.islink(path): path = os.path.realpath(path) # Tokenize path pieces = path.split(os.sep) if wx.Platform == u'__WXMSW__': # On Windows the exe is in same dir as config directories pro_path = os.sep.join(pieces[:-1]) if os.path.isabs(pro_path): pass elif pro_path == u"": pro_path = os.getcwd() pieces = pro_path.split(os.sep) pro_path = os.sep.join(pieces[:-1]) else: pro_path = os.path.abspath(pro_path) elif wx.Platform == u'__WXMAC__': # On OS X the config directories are in the applet under Resources stdpath = wx.StandardPaths_Get() pro_path = stdpath.GetResourcesDir() pro_path = os.path.join(pro_path, config_dir) else: pro_path = os.sep.join(pieces[:-2]) if pro_path.startswith(os.sep): pass elif pro_path == u"": pro_path = os.getcwd() pieces = pro_path.split(os.sep) if pieces[-1] not in [ed_glob.PROG_NAME.lower(), ed_glob.PROG_NAME]: pro_path = os.sep.join(pieces[:-1]) else: pro_path = os.path.abspath(pro_path) if wx.Platform != u'__WXMAC__': pro_path = pro_path + os.sep + config_dir + os.sep path = os.path.normpath(pro_path) + os.sep # Make sure path is unicode if not ebmlib.IsUnicode(path): path = unicode(path, sys.getdefaultencoding()) return path def GetResources(resource): """Returns a list of resource directories from a given toplevel config dir @param resource: config directory name @return: list of resource directory that exist under the given resource path """ rec_dir = ResolvConfigDir(resource) if os.path.exists(rec_dir): rec_lst = [ rec.title() for rec in os.listdir(rec_dir) if os.path.isdir(rec_dir + rec) and rec[0] != u"." ] return rec_lst else: return -1 def GetResourceFiles(resource, trim=True, get_all=False, suffix=None, title=True): """Gets a list of resource files from a directory and trims the file extentions from the names if trim is set to True (default). If the get_all parameter is set to True the function will return a set of unique items by looking up both the user and system level files and combining them, the default behavior returns the user level files if they exist or the system level files if the user ones do not exist. @param resource: name of config directory to look in (i.e cache) @keyword trim: trim file extensions or not @keyword get_all: get a set of both system/user files or just user level @keyword suffix: Get files that have the specified suffix or all (default) @keyword title: Titlize the results """ rec_dir = ResolvConfigDir(resource) if get_all: rec_dir2 = ResolvConfigDir(resource, True) rec_list = list() if not os.path.exists(rec_dir): return -1 else: recs = os.listdir(rec_dir) if get_all and os.path.exists(rec_dir2): recs.extend(os.listdir(rec_dir2)) for rec in recs: if os.path.isfile(rec_dir + rec) or \ (get_all and os.path.isfile(rec_dir2 + rec)): # If a suffix was specified only keep files that match if suffix is not None: if not rec.endswith(suffix): continue # Trim the last part of an extension if one exists if trim: rec = ".".join(rec.split(u".")[:-1]).strip() # Make the resource name a title if requested if title and len(rec): rec = rec[0].upper() + rec[1:] if len(rec): rec_list.append(rec) rec_list.sort() return list(set(rec_list)) def GetAllEncodings(): """Get all encodings found on the system @return: list of strings """ elist = encodings.aliases.aliases.values() elist = list(set(elist)) elist.sort() elist = [ enc for enc in elist if not enc.endswith('codec') ] return elist def Log(msg, *args): """Push the message to the apps log @param msg: message string to log @param args: optional positional arguments to use as a printf formatting to the message. """ try: wx.GetApp().GetLog()(msg, args) except: pass def GetProxyOpener(proxy_set): """Get a urlopener for use with a proxy @param proxy_set: proxy settings to use """ Log("[util][info] Making proxy opener with %s" % str(proxy_set)) proxy_info = dict(proxy_set) auth_str = "%(uname)s:%(passwd)s@%(url)s" url = proxy_info['url'] if url.startswith('http://'): auth_str = "http://" + auth_str proxy_info['url'] = url.replace('http://', '') else: pass if len(proxy_info.get('port', '')): auth_str = auth_str + ":%(port)s" proxy_info['passwd'] = ed_crypt.Decrypt(proxy_info['passwd'], proxy_info['pid']) Log("[util][info] Formatted proxy request: %s" % \ (auth_str.replace('%(passwd)s', '****') % proxy_info)) proxy = urllib2.ProxyHandler({"http" : auth_str % proxy_info}) opener = urllib2.build_opener(proxy, urllib2.HTTPHandler) return opener #---- GUI helper functions ----# def SetWindowIcon(window): """Sets the given windows icon to be the programs application icon. @param window: window to set app icon for """ try: if wx.Platform == "__WXMSW__": ed_icon = ed_glob.CONFIG['SYSPIX_DIR'] + u"editra.ico" window.SetIcon(wx.Icon(ed_icon, wx.BITMAP_TYPE_ICO)) else: ed_icon = ed_glob.CONFIG['SYSPIX_DIR'] + u"editra.png" window.SetIcon(wx.Icon(ed_icon, wx.BITMAP_TYPE_PNG)) finally: pass #-----------------------------------------------------------------------------# class IntValidator(wx.PyValidator): """A Generic integer validator""" def __init__(self, min_=0, max_=0): """Initialize the validator @keyword min_: min value to accept @keyword max_: max value to accept """ wx.PyValidator.__init__(self) self._min = min_ self._max = max_ # Event management self.Bind(wx.EVT_CHAR, self.OnChar) def Clone(self): """Clones the current validator @return: clone of this object """ return IntValidator(self._min, self._max) def Validate(self, win): """Validate an window value @param win: window to validate """ val = win.GetValue() return val.isdigit() def OnChar(self, event): """Process values as they are entered into the control @param event: event that called this handler """ key = event.GetKeyCode() if key < wx.WXK_SPACE or key == wx.WXK_DELETE or \ key > 255 or chr(key) in '0123456789': event.Skip() return if not wx.Validator_IsSilent(): wx.Bell() return ``` #### File: wx/tools/img2py.py ```python import base64 import getopt import glob import os import re import sys import tempfile import wx import img2img try: b64encode = base64.b64encode except AttributeError: b64encode = base64.encodestring app = None DEFAULT_APPEND = False DEFAULT_COMPRESSED = True DEFAULT_MASKCLR = None DEFAULT_IMGNAME = "" DEFAULT_ICON = False DEFAULT_CATALOG = False DEFAULT_COMPATIBLE = False # THIS IS USED TO IDENTIFY, IN THE GENERATED SCRIPT, LINES IN THE FORM # "index.append('Image name')" indexPattern = re.compile(r"\s*index.append\('(.+)'\)\s*") def convert(fileName, maskClr, outputDir, outputName, outType, outExt): # if the file is already the right type then just use it directly if maskClr == DEFAULT_MASKCLR and fileName.upper().endswith(outExt.upper()): if outputName: newname = outputName else: newname = os.path.join(outputDir, os.path.basename(os.path.splitext(fileName)[0]) + outExt) file(newname, "wb").write(file(fileName, "rb").read()) return 1, "ok" else: return img2img.convert(fileName, maskClr, outputDir, outputName, outType, outExt) def img2py(image_file, python_file, append=DEFAULT_APPEND, compressed=DEFAULT_COMPRESSED, maskClr=DEFAULT_MASKCLR, imgName=DEFAULT_IMGNAME, icon=DEFAULT_ICON, catalog=DEFAULT_CATALOG, functionCompatible=DEFAULT_COMPATIBLE, functionCompatibile=-1, # typo version for backward compatibility ): """ Converts an image file to a data structure written in a Python file --image_file: string; the path of the source image file --python_file: string; the path of the destination python file --other arguments: they are equivalent to the command-line arguments """ # was the typo version used? if functionCompatibile != -1: functionCompatible = functionCompatibile global app if not wx.GetApp(): app = wx.App() # convert the image file to a temporary file tfname = tempfile.mktemp() try: ok, msg = convert(image_file, maskClr, None, tfname, wx.BITMAP_TYPE_PNG, ".png") if not ok: print msg return lines = [] data = b64encode(open(tfname, "rb").read()) while data: part = data[:72] data = data[72:] output = ' "%s"' % part if not data: output += ")" lines.append(output) data = "\n".join(lines) finally: if os.path.exists(tfname): os.remove(tfname) old_index = [] if catalog and append and python_file != '-': # check to see if catalog exists already (file may have been created # with an earlier version of img2py or without -c option) pyPath, pyFile = os.path.split(python_file) append_catalog = True sourcePy = open(python_file, "r") try: for line in sourcePy: if line == "catalog = {}\n": append_catalog = False else: lineMatcher = indexPattern.match(line) if lineMatcher: old_index.append(lineMatcher.groups()[0]) finally: sourcePy.close() if append_catalog: out = open(python_file, "a") try: out.write("\n# ***************** Catalog starts here *******************") out.write("\n\ncatalog = {}\n") out.write("index = []\n\n") finally: out.close() if python_file == '-': out = sys.stdout elif append: out = open(python_file, "a") else: out = open(python_file, "w") try: imgPath, imgFile = os.path.split(image_file) if not imgName: imgName = os.path.splitext(imgFile)[0] print "\nWarning: -n not specified. Using filename (%s) for name of image and/or catalog entry." % imgName out.write("#" + "-" * 70 + "\n") if not append: out.write("# This file was generated by %s\n#\n" % sys.argv[0]) out.write("from wx.lib.embeddedimage import PyEmbeddedImage\n\n") if catalog: out.write("catalog = {}\n") out.write("index = []\n\n") letters = [] for letter in imgName: if not letter.isalnum(): letter = "_" letters.append(letter) if not letters[0].isalpha() and letters[0] != '_': letters.insert(0, "_") varName = "".join(letters) out.write("%s = PyEmbeddedImage(\n%s\n" % (varName, data)) if catalog: if imgName in old_index: print "Warning: %s already in catalog." % imgName print " Only the last entry will be accessible.\n" old_index.append(imgName) out.write("index.append('%s')\n" % imgName) out.write("catalog['%s'] = %s\n" % (imgName, varName)) if functionCompatible: out.write("get%sData = %s.GetData\n" % (varName, varName)) out.write("get%sImage = %s.GetImage\n" % (varName, varName)) out.write("get%sBitmap = %s.GetBitmap\n" % (varName, varName)) if icon: out.write("get%sIcon = %s.GetIcon\n" % (varName, varName)) out.write("\n") if imgName: n_msg = ' using "%s"' % imgName else: n_msg = "" if maskClr: m_msg = " with mask %s" % maskClr else: m_msg = "" print "Embedded %s%s into %s%s" % (image_file, n_msg, python_file, m_msg) finally: if python_file != '-': out.close() def main(args=None): if not args: args = sys.argv[1:] if not args or ("-h" in args): print __doc__ return append = DEFAULT_APPEND compressed = DEFAULT_COMPRESSED maskClr = DEFAULT_MASKCLR imgName = DEFAULT_IMGNAME icon = DEFAULT_ICON catalog = DEFAULT_CATALOG compatible = DEFAULT_COMPATIBLE try: opts, fileArgs = getopt.getopt(args, "auicfFn:m:") except getopt.GetoptError: print __doc__ return for opt, val in opts: if opt == "-a": append = True elif opt == "-n": imgName = val elif opt == "-m": maskClr = val elif opt == "-i": icon = True elif opt == "-c": catalog = True elif opt == "-f": compatible = True elif opt == "-F": compatible = False if len(fileArgs) != 2: print __doc__ return image_file, python_file = fileArgs img2py(image_file, python_file, append, compressed, maskClr, imgName, icon, catalog, compatible) if __name__ == "__main__": main(sys.argv[1:]) ``` #### File: tools/XRCed/presenter.py ```python import os,tempfile,shutil from xml.parsers import expat import cPickle from globals import * import view from model import Model, MyDocument from component import Manager import undo # Presenter class linking model to view objects class _Presenter: def init(self): Model.init() self.path = '' # Global modified state self.setModified(False) # sets applied view.frame.Clear() view.tree.Clear() view.tree.SetPyData(view.tree.root, Model.mainNode) view.testWin.Init() g.undoMan.Clear() # Insert/append mode flags self.createSibling = self.insertBefore = False # Select main node attributes self.setData(view.tree.root) def loadXML(self, path): Model.loadXML(path) view.tree.Flush() view.tree.SetPyData(view.tree.root, Model.mainNode) self.setData(view.tree.root) if g.conf.expandOnOpen: view.tree.ExpandAll() def saveXML(self, path): Model.saveXML(path) def open(self, path): if not os.path.exists(path): wx.LogError('File does not exists: %s' % path) raise IOError try: self.path = os.path.abspath(path) TRACE('Loading XML file: %s', self.path) self.loadXML(self.path) # Change dir dir = os.path.dirname(self.path) if dir: os.chdir(dir) self.setModified(False) g.conf.localconf = self.createLocalConf(path) except: logger.exception('error loading XML file') wx.LogError('Error loading XML file: %s' % path) raise def save(self, path): # Apply changes if needed if not self.applied: self.update(self.item) try: tmpFile,tmpName = tempfile.mkstemp(prefix='xrced-') os.close(tmpFile) TRACE('Saving temporary file: %s', tmpName) self.saveXML(tmpName) TRACE('copying to the main file: %s', path) shutil.copy(tmpName, path) self.path = path self.setModified(False) except: logger.exception('error saving XML file') wx.LogError('Error saving XML file: %s' % path) raise def setModified(self, state=True, setDirty=True): '''Set global modified state.''' TRACE('setModified %s %s', state, setDirty) self.modified = state # Set applied flag if not state: self.applied = True name = os.path.basename(self.path) if not name: name = 'UNTITLED' # Update GUI if state: view.frame.SetTitle(progname + ': ' + name + ' *') # Update test window if view.testWin.IsShown() and setDirty: view.testWin.isDirty = True if g.conf.autoRefresh: self.refreshTestWin() else: view.frame.SetTitle(progname + ': ' + name) def setApplied(self, state=True): '''Set panel state.''' TRACE('setApplied %s', state) self.applied = state if not state and not self.modified: self.setModified(setDirty=False) # toggle global state def createUndoEdit(self, item=None, page=None): TRACE('createUndoEdit') # Create initial undo object if item is None: item = self.item if page is None: page = view.panel.nb.GetSelection() view.panel.undo = undo.UndoEdit(item, page) def registerUndoEdit(self): TRACE('registerUndoEdit') g.undoMan.RegisterUndo(view.panel.undo) view.panel.undo = None def panelIsDirty(self): '''Check if the panel was changed since last undo.''' # Register undo if view.panel.undo: panel = view.panel.GetActivePanel() if view.panel.undo.values != panel.GetValues(): return True return False def setData(self, item): '''Set data and view for current tree item.''' self.item = item if item == view.tree.root: TRACE('setData: root node') self.container = None self.comp = Manager.rootComponent self.panels = view.panel.SetData(self.container, self.comp, Model.mainNode) else: node = view.tree.GetPyData(item) if node.nodeType != node.COMMENT_NODE: TRACE('setData: %s', node.getAttribute('class')) self.comp = Manager.getNodeComp(node) parentItem = view.tree.GetItemParent(item) parentNode = view.tree.GetPyData(parentItem) if parentNode == Model.mainNode: self.container = Manager.rootComponent else: parentClass = parentNode.getAttribute('class') self.container = Manager.components[parentClass] self.panels = view.panel.SetData(self.container, self.comp, node) # Create new pending undo self.createUndoEdit(self.item) if view.testWin.IsShown(): self.highlight(item) def highlight(self, item): TRACE('highlight') if view.testWin.IsDirty() or item == view.tree.root or \ view.tree.GetPyData(item).nodeType == Model.dom.COMMENT_NODE: view.testWin.RemoveHighlight() return try: rect = view.testWin.FindObjectRect(item) if not rect: view.testWin.RemoveHighlight() return view.testWin.Highlight(rect) except: logger.exception('highlighting failed') def updateCreateState(self, forceSibling, forceInsert): if self.container: if self.comp.isContainer(): self.createSibling = forceSibling else: self.createSibling = True else: self.createSibling = False self.insertBefore = forceInsert TRACE('updateCreateState: %s %s', self.createSibling, self.insertBefore) def popupMenu(self, forceSibling, forceInsert, pos): '''Show popup menu and set sibling/insert flags.''' self.updateCreateState(forceSibling, forceInsert) menu = view.XMLTreeMenu(self.container, self.comp, view.tree, self.createSibling, self.insertBefore) view.tree.PopupMenu(menu, pos) menu.Destroy() def create(self, comp, child=None): ''' Add DOM node as child or sibling depending on flags. Return new item. If child is passed replace by existing data. ''' if child is None: child = Model.createObjectNode(comp.klass) # Set default values for k,v in comp.defaults.items(): comp.addAttribute(child, k, v) data = wx.TreeItemData(child) item = self.item if not self.applied: self.update(item) if item == view.tree.root: self.createSibling = False # can't create sibling of root if self.createSibling: parentItem = view.tree.GetItemParent(item) parentNode = view.tree.GetPyData(parentItem) else: parentNode = view.tree.GetPyData(item) label = comp.getTreeText(child) imageId = comp.getTreeImageId(child) if self.createSibling: node = view.tree.GetPyData(item) if self.insertBefore: self.container.insertBefore(parentNode, child, node) item = view.tree.InsertItemBefore( parentItem, item, label, imageId, data=data) else: self.container.insertAfter(parentNode, child, node) item = view.tree.InsertItem( parentItem, item, label, imageId, data=data) else: if self.insertBefore and view.tree.ItemHasChildren(item): nextNode = view.tree.GetPyData(view.tree.GetFirstChild(item)[0]) self.comp.insertBefore(parentNode, child, nextNode) item = view.tree.PrependItem(item, label, imageId, data=data) else: self.comp.appendChild(parentNode, child) item = view.tree.AppendItem(item, label, imageId, data=data) view.tree.SetItemStyle(item, child) view.tree.EnsureVisible(item) view.tree.UnselectAll() if view.testWin.IsShown(): view.testWin.isDirty = True view.tree.SelectItem(item) self.setModified() return item def createRef(self, ref, child=None): '''Create object_ref element node.''' if child is None: child = Model.createRefNode(ref) refNode = Model.findResource(ref) if refNode: comp = Manager.getNodeComp(refNode) else: comp = Manager.getNodeComp(child) self.create(comp, child) def createComment(self): '''Create comment node.''' node = Model.createCommentNode() comp = Manager.getNodeComp(node) self.create(comp, node) def replace(self, comp, node=None): '''Replace DOM node by new or passed node. Return new item.''' TRACE('replace') if node is None: node = Model.createObjectNode(comp.klass) if not self.applied: self.update(item) data = wx.TreeItemData(node) item = self.item parentItem = view.tree.GetItemParent(item) parentNode = view.tree.GetPyData(parentItem) oldNode = view.tree.GetPyData(item) self.container.replaceChild(parentNode, node, oldNode) # Replace tree item: insert new, remove old label = comp.getTreeText(node) imageId = comp.getTreeImageId(node) item = view.tree.InsertItem(parentItem, item, label, imageId, data=data) view.tree.Delete(view.tree.GetPrevSibling(item)) self.item = item # Add children for n in filter(is_object, node.childNodes): view.tree.AddNode(item, comp.getTreeNode(n)) view.tree.EnsureVisible(item) # Update panel view.tree.SelectItem(item) self.setModified() return oldNode def subclass(self, item, subclass): node = view.tree.GetPyData(item) if subclass: node.setAttribute('subclass', subclass) elif node.hasAttribute('subclass'): node.removeAttribute('subclass') # Update item label view.tree.SetItemImage(item, self.comp.getTreeImageId(node)) view.tree.SetItemText(item, self.comp.getTreeText(node)) # Update panel view.tree.SelectItem(item) self.setModified() def update(self, item): '''Update DOM with new attribute values. Update tree if necessary.''' node = view.tree.GetPyData(item) isComment = node.nodeType == node.COMMENT_NODE if isComment: subclass = None else: subclass = node.getAttribute('subclass') # Update (sub)class if needed cls = view.panel.textClass.GetValue() if not subclass: if not isComment and cls != self.comp.klass: if node.tagName == 'object_ref' and not cls: if node.hasAttribute('class'): node.removeAttribute('class') TRACE('removed "class" tag') else: TRACE('update class: %s', cls) node.setAttribute('class', cls) else: value = subclass + '(%s)' % self.comp.klass if cls != value: iLeft = cls.find('(') iRight = cls.find(')') if iLeft != -1 and iLeft < iRight: subclass = cls[:iLeft] klass = cls[iLeft+1:iRight] TRACE('update class/subclass: %s', cls) node.setAttribute('class', klass) node.setAttribute('subclass', subclass) else: TRACE('remove subclass') node.removeAttribute('subclass') node.setAttribute('class', cls) if self.comp and self.comp.hasName: name = view.panel.textName.GetValue() if name: node.setAttribute('name', name) elif node.hasAttribute('name'): # clean up empty names node.removeAttribute('name') if item != view.tree.root: for panel in self.panels: if not panel.node: continue # Replace node contents except object children for n in panel.node.childNodes[:]: if not is_object(n): panel.node.removeChild(n) n.unlink() for panel in self.panels: for a,value in panel.GetValues(): if value: try: if isinstance(panel, view.AttributePanel) and panel.comp: comp = panel.comp else: comp = self.comp comp.addAttribute(panel.node, a, value) except: logger.exception('addAttribute error: %s %s', a, value) if item != view.tree.root: view.tree.SetItemImage(item, self.comp.getTreeImageId(node)) view.tree.SetItemText(item, self.comp.getTreeText(node)) self.setApplied() # Set dirty flag if view.testWin.IsShown(): view.testWin.isDirty = True def unselect(self): if not self.applied: self.update(self.item) if view.testWin.IsShown() and view.testWin.item == self.item: view.testWin.Destroy() view.tree.UnselectAll() self.setData(view.tree.root) def flushSubtree(self, item=None, node=None): # Remember test item index TRACE('flushSubtree') if view.testWin.item is not None: itemIndex = view.tree.ItemFullIndex(view.testWin.item) view.tree.FlushSubtree(item, node) if view.testWin.item is not None: view.testWin.item = view.tree.ItemAtFullIndex(itemIndex) def delete(self, item): '''Delete selected object(s). Return removed XML node.''' TRACE('delete') parentItem = view.tree.GetItemParent(item) parentNode = view.tree.GetPyData(parentItem) node = view.tree.GetPyData(item) node = self.container.removeChild(parentNode, node) view.tree.Delete(item) # If deleting the top-level object, remove view if view.testWin.IsShown() and view.testWin.item == item: view.testWin.Destroy() self.setApplied() self.unselect() self.setModified() return node def deleteMany(self, items): '''Delete selected object(s).''' for item in items: if not item.IsOk(): continue # child already deleted parentItem = view.tree.GetItemParent(item) parentNode = view.tree.GetPyData(parentItem) node = view.tree.GetPyData(item) node = self.container.removeChild(parentNode, node) node.unlink() # delete completely view.tree.Delete(item) self.setApplied() self.unselect() self.setModified() def cut(self): self.copy() return self.delete(view.tree.GetSelection()) def copy(self): # Update values from panel first item = view.tree.GetSelection() if not self.applied: self.update(item) node = view.tree.GetPyData(item) if self.container.requireImplicit(node): implicit = node.parentNode else: implicit = None if wx.TheClipboard.Open(): if node.nodeType == node.ELEMENT_NODE: data = wx.CustomDataObject('XRCED_elem') s = node.toxml(encoding=expat.native_encoding) # Replace by a pair if implicit: s = [s, implicit.toxml(encoding=expat.native_encoding)] else: # Non-element nodes are normally comments data = wx.CustomDataObject('XRCED_node') s = node.data data.SetData(cPickle.dumps(s)) wx.TheClipboard.SetData(data) wx.TheClipboard.Close() else: wx.MessageBox("Unable to open the clipboard", "Error") def checkCompatibility(self, comp): '''Check parent/child compatibility.''' if self.createSibling: container = self.container else: container = self.comp if not container.canHaveChild(comp): wx.LogError('Incompatible parent/child: parent is %s, child is %s!' % (container.klass, comp.klass)) return False return True def paste(self): success = success_node = False if wx.TheClipboard.IsOpened() or wx.TheClipboard.Open(): try: data = wx.CustomDataObject('XRCED_elem') if wx.TheClipboard.IsSupported(data.GetFormat()): try: success = wx.TheClipboard.GetData(data) except: # there is a problem if XRCED_node is in clipboard # but previous SetData was for XRCED pass if not success: # try other format data = wx.CustomDataObject('XRCED_node') if wx.TheClipboard.IsSupported(data.GetFormat()): success_node = wx.TheClipboard.GetData(data) finally: wx.TheClipboard.Close() if not success and not success_node: wx.MessageBox( "There is no data in the clipboard in the required format", "Error") return # XML representation of element or node value string data = cPickle.loads(data.GetData()) implicit = None if success: if type(data) is list: node = Model.parseString(data[0]) implicit = Model.parseString(data[1]) else: node = Model.parseString(data) else: node = Model.dom.createComment(data) comp = Manager.getNodeComp(node) # Check compatibility if not self.checkCompatibility(comp): node.unlink() return item = view.tree.GetSelection() if item and not self.applied: self.update(item) item = self.create(comp, node) if implicit: # copy parameters for implicit node if possible parentNode = view.tree.GetPyData(view.tree.GetItemParent(item)) parentComp = Manager.getNodeComp(parentNode) if parentComp.requireImplicit(node) and \ parentComp.implicitKlass == implicit.getAttribute('class'): parentComp.copyImplicitAttributes(implicit, node.parentNode, parentComp) implicit.unlink() # Add children for n in filter(is_object, node.childNodes): view.tree.AddNode(item, comp.getTreeNode(n)) self.setModified() return item def moveUp(self): parentItem = view.tree.GetItemParent(self.item) treeNode = view.tree.GetPyData(self.item) node = self.container.getTreeOrImplicitNode(treeNode) parent = node.parentNode prevNode = node.previousSibling while not is_object(prevNode): prevNode = prevNode.previousSibling parent.removeChild(node) parent.insertBefore(node, prevNode) index = view.tree.ItemFullIndex(self.item) self.flushSubtree(parentItem, parent) index[-1] -= 1 self.item = view.tree.ItemAtFullIndex(index) self.setModified() view.tree.SelectItem(self.item) def moveDown(self): parentItem = view.tree.GetItemParent(self.item) treeNode = view.tree.GetPyData(self.item) node = self.container.getTreeOrImplicitNode(treeNode) parent = node.parentNode nextNode = node.nextSibling while not is_object(nextNode): nextNode = nextNode.nextSibling nextNode = nextNode.nextSibling while nextNode and not is_object(nextNode): nextNode = nextNode.nextSibling parent.removeChild(node) parent.insertBefore(node, nextNode) index = view.tree.ItemFullIndex(self.item) self.flushSubtree(parentItem, parent) index[-1] += 1 self.item = view.tree.ItemAtFullIndex(index) self.setModified() view.tree.SelectItem(self.item) def moveLeft(self): parentItem = view.tree.GetItemParent(self.item) grandParentItem = view.tree.GetItemParent(parentItem) parent = view.tree.GetPyData(parentItem) grandParent = view.tree.GetPyData(grandParentItem) if grandParent is Model.mainNode: grandParentComp = Manager.rootComponent else: grandParentComp = Manager.getNodeComp(grandParent) if not grandParentComp.canHaveChild(self.comp): wx.LogError('Incompatible parent/child: parent is %s, child is %s!' % (grandParentComp.klass, self.comp.klass)) return node = view.tree.GetPyData(self.item) nextItem = view.tree.GetNextSibling(parentItem) self.container.removeChild(parent, node) if nextItem: nextNode = view.tree.GetPyData(nextItem) grandParentComp.insertBefore(grandParent, node, nextNode) else: grandParentComp.appendChild(grandParent, node) index = view.tree.ItemFullIndex(self.item) self.flushSubtree(grandParentItem, grandParent) index.pop() index[-1] += 1 self.item = view.tree.ItemAtFullIndex(index) self.setModified() view.tree.SelectItem(self.item) def moveRight(self): parentItem = view.tree.GetItemParent(self.item) parent = view.tree.GetPyData(parentItem) newParent = view.tree.GetPyData(view.tree.GetPrevSibling(self.item)) newParentComp = Manager.getNodeComp(newParent) if not newParentComp.canHaveChild(self.comp): wx.LogError('Incompatible parent/child: parent is %s, child is %s!' % (newParentComp.klass, self.comp.klass)) return node = view.tree.GetPyData(self.item) self.container.removeChild(parent, node) newParentComp.appendChild(newParent, node) index = view.tree.ItemFullIndex(self.item) n = view.tree.GetChildrenCount(view.tree.GetPrevSibling(self.item)) self.flushSubtree(parentItem, parent) index[-1] -= 1 index.append(n) self.item = view.tree.ItemAtFullIndex(index) self.setModified() view.tree.SelectItem(self.item) def createLocalConf(self, path): name = os.path.splitext(path)[0] name += '.xcfg' return wx.FileConfig(localFilename=name) def createTestWin(self, item): TRACE('createTestWin') # Create a window with this resource node = view.tree.GetPyData(item) # Execute "pragma" comment node if node.nodeType == node.COMMENT_NODE: if node.data and node.data[0] == '%' and g.conf.allowExec != 'no': say = wx.NO if g.conf.allowExec == 'ask' and Model.allowExec is None: say = wx.MessageBox('Execute comment directive?', 'Warning', wx.ICON_EXCLAMATION | wx.YES_NO) if g.conf.allowExec == 'yes' or say == wx.YES: code = node.data[1:] # skip '%' view.tree.ExecCode(code) return # Close old window, remember where it was comp = Manager.getNodeComp(node) # Use parent object if the current one does not support test view testWinItem = item while not comp.isTestable: testWinItem = view.tree.GetItemParent(testWinItem) node = view.tree.GetPyData(testWinItem) comp = Manager.getNodeComp(node) # Create memory XML file elem = node.cloneNode(True) if not node.hasAttribute('name'): name = 'noname' else: name = node.getAttribute('name') elem.setAttribute('name', STD_NAME) Model.setTestElem(elem) Model.saveTestMemoryFile() xmlFlags = 0 if not g.conf.useSubclassing: xmlFlags |= xrc.XRC_NO_SUBCLASSING # Use translations if encoding is not specified if not Model.dom.encoding: xmlFlags |= xrc.XRC_USE_LOCALE res = xrc.EmptyXmlResource(xmlFlags) xrc.XmlResource.Set(res) # set as global # Init other handlers Manager.addXmlHandlers(res) Manager.preload(res) # Same module list res.Load('memory:test.xrc') testWin = view.testWin try: try: frame, object = comp.makeTestWin(res, name) if not object: # skip the rest raise EOFError # Reset previous tree item and locate tool if testWin.item: view.tree.SetItemBold(testWin.item, False) testWin.SetView(frame, object, testWinItem) testWin.Show() view.tree.SetItemBold(testWinItem, True) # For reused frame, object is not positioned immediately wx.CallAfter(self.highlight, item) except EOFError: pass except TestWinError: wx.LogError('Test window could not be created for %s' % node.getAttribute('class')) logger.exception('error creating test view') except: wx.LogError('Error creating test view') logger.exception('error creating test view') if get_debug(): raise finally: # Cleanup res.Unload(TEST_FILE) xrc.XmlResource.Set(None) wx.MemoryFSHandler.RemoveFile(TEST_FILE) def closeTestWin(self): TRACE('closeTestWin') if not view.testWin.object: return view.tree.SetItemBold(view.testWin.item, False) view.tree.Refresh() view.frame.tb.ToggleTool(view.frame.ID_TOOL_LOCATE, False) if view.frame.miniFrame: view.frame.miniFrame.tb.ToggleTool(view.frame.ID_TOOL_LOCATE, False) view.testWin.Destroy() def refreshTestWin(self): '''Refresh test window after some change.''' TRACE('refreshTestWin') if not view.testWin.IsDirty(): return if not self.applied: self.update(self.item) # Dumb refresh self.createTestWin(view.testWin.item) self.highlight(self.item) if view.frame.miniFrame and view.frame.miniFrame.IsShown(): view.frame.miniFrame.Raise() else: view.frame.Raise() def showXML(self): '''Show some source.''' node = view.tree.GetPyData(self.item) dom = MyDocument() node = dom.appendChild(node.cloneNode(True)) Model.indent(dom, node) text = node.toxml()#Model.dom.encoding) dom.unlink() lines = text.split('\n') maxLen = max(map(len, lines)) w = max(40, min(80, maxLen)) h = max(20, min(40, len(lines))) dlg = view.ScrolledMessageDialog(view.frame, text, 'XML Source', textSize=(w,h), centered=False) dlg.Bind(wx.EVT_CLOSE, lambda evt: dlg.Destroy()) dlg.Bind(wx.EVT_BUTTON, lambda evt: dlg.Destroy(), id=wx.ID_OK) dlg.Show() def generatePython(self, dataFile, pypath, embed, genGettext): try: from wx.tools import pywxrc rescomp = pywxrc.XmlResourceCompiler() rescomp.MakePythonModule([dataFile], pypath, embed, genGettext, assignVariables=False) except: logger.exception('error generating python code') wx.LogError('Error generating python code : %s' % pypath) raise # Singleton class Presenter = g.Presenter = _Presenter() undo.Presenter = Presenter ```
{ "source": "jicksy/behavioral_cloning", "score": 3 }
#### File: jicksy/behavioral_cloning/model.py ```python import os import csv # folder where data is contained path = '/opt/carnd_p3/data/' # read data and store it in lines lines = [] with open(path+'driving_log.csv') as csvfile: reader = csv.reader(csvfile) # skip first row next(reader) for line in reader: lines.append(line) from sklearn.model_selection import train_test_split from sklearn.utils import shuffle # Split data into tran and validation samples, 20% of data is used for validation samples train_samples, validation_samples = train_test_split(lines, test_size=0.2) import cv2 import numpy as np import sklearn # Generator function def generator(samples, batch_size=32): num_samples = len(samples) while 1: # Loop forever so the generator never terminates shuffle(samples) for offset in range(0, num_samples, batch_size): batch_samples = samples[offset:offset+batch_size] images = [] angles = [] for batch_sample in batch_samples: # center image name = '/opt/carnd_p3/data/IMG/'+batch_sample[0].split('/')[-1] center_image = cv2.imread(name) # convert to RGB center_image_rgb = cv2.cvtColor(center_image, cv2.COLOR_BGR2RGB) # append rgb images.append(center_image_rgb) center_angle = float(batch_sample[3]) angles.append(center_angle) # append flipped images.append(cv2.flip(center_image_rgb, 1)) angles.append(-center_angle) # left image name = '/opt/carnd_p3/data/IMG/'+batch_sample[1].split('/')[-1] left_image = cv2.imread(name) # convert to RGB left_image_rgb = cv2.cvtColor(left_image, cv2.COLOR_BGR2RGB) # append RGB images.append(left_image_rgb) left_angle = float(batch_sample[3]) + 0.1 angles.append(left_angle) # append flipped images.append(cv2.flip(left_image_rgb, 1)) angles.append(-left_angle) # right image name = '/opt/carnd_p3/data/IMG/'+batch_sample[2].split('/')[-1] right_image = cv2.imread(name) # convert to RGB right_image_rgb = cv2.cvtColor(right_image, cv2.COLOR_BGR2RGB) # append RGB images.append(right_image_rgb) right_angle = float(batch_sample[3]) - 0.1 angles.append(right_angle) # append flipped images.append(cv2.flip(right_image_rgb, 1)) angles.append(-right_angle) # X_train and y_train, Convert to numpy array X_train = np.array(images) y_train = np.array(angles) # shuffle yield sklearn.utils.shuffle(X_train, y_train) # compile and train the model using the generator function train_generator = generator(train_samples, batch_size=32) validation_generator = generator(validation_samples, batch_size=32) # import required methods from keras.models import Sequential from keras.layers import Flatten, Dense, Lambda, Cropping2D, Dropout from keras.layers.convolutional import Conv2D from keras.layers.pooling import MaxPooling2D model = Sequential() # Model based on NVIDIA paper: https://devblogs.nvidia.com/deep-learning-self-driving-cars/ # normalization model.add(Lambda(lambda x: x / 255.0 - 0.5, input_shape = (160,320,3))) # cropping2D layer: 70 row pixels from the top of the image, 25 row pixels from the bottom of the image model.add(Cropping2D(cropping=((70,25),(0,0)))) # Applying 24 filter of sizes (5,5) of strides of 2 with relu activation model.add(Conv2D(24, (5,5), strides=(2,2), activation='relu')) # Applying 36 filter of sizes (5,5) of strides of 2 with relu activation model.add(Conv2D(36, (5,5), strides=(2,2), activation='relu')) # Applying 48 filter of sizes (5,5) of strides of 2 with relu activation model.add(Conv2D(48, (5,5), strides=(2,2), activation='relu')) # Applying 64 filter of sizes (3,3) of strides of 1 with relu activation model.add(Conv2D(64, (3,3), activation='relu')) # Applying 24 filter of sizes (3,3) of strides of 1 with relu activation model.add(Conv2D(64, (3,3), activation='relu')) # dropout model.add(Dropout(0.5)) # flatten and dense model.add(Flatten()) model.add(Dense(100)) model.add(Dense(50)) model.add(Dense(10)) model.add(Dense(1)) # using adam optimizer and mse loss function model.compile(loss='mse', optimizer='adam') # training the model, 32 is the batch_size model.fit_generator(train_generator, steps_per_epoch= len(train_samples) * 10 // 32, validation_data=validation_generator, validation_steps=len(validation_samples) // 32, epochs=5, verbose = 1) # save the model model.save('model.h5') ```
{ "source": "Jicol95/Sentiment-Analysis", "score": 3 }
#### File: Sentiment-Analysis/sentiment analysis/polarity.py ```python from textblob import TextBlob def polarity(phrases, basal_sentiment_dictionary, basal_determiner_bank): frazes = [] # For each phrase in the parsed text phrase_counter = 0 for j in range(0,len(phrases)): phrase = phrases[j] phrase_counter +=1 words_to_symbols = [] words_to_symbols.append(phrase[0]) Effect = False # Iterate through the words in a phrase for i in range(1,len(phrase)): # If our knowledge base has the word's sentiment word = phrase[i] if word in basal_sentiment_dictionary and len(word) > 1: # Print the sentiment if basal_sentiment_dictionary[word] == 'N': word = 0 elif basal_sentiment_dictionary[word] == '+': word = 1 elif basal_sentiment_dictionary[word] == '-': word = -1 # If the the phrase is a known determiner elif word in basal_determiner_bank: word = 0 # print('N') # Neutralize everything before but elif word == 'but¬': word = 0 for k in range(0, j): for l in range(1, len(frazes[k])): frazes[k][l] = 0 elif '¬' == word[-1] and word[:-1] in basal_sentiment_dictionary and len(word) > 1: if basal_sentiment_dictionary[word[:-1]] == 'N': word = 0 elif basal_sentiment_dictionary[word[:-1]] == '+': word = 1 elif basal_sentiment_dictionary[word[:-1]] == '-': word = -1 elif '<¬>' in word and word[:-4] in basal_sentiment_dictionary: if basal_sentiment_dictionary[word.replace(' <¬>', '').strip()] == 'N': word = 0 elif basal_sentiment_dictionary[word.replace(' <¬>', '').strip()] == '+': word = -1 elif basal_sentiment_dictionary[word.replace(' <¬>', '').strip()] == '-': word = 1 else: word = 0 words_to_symbols.append(word) frazes.append(words_to_symbols) # print([word]) phrase_counter += 1 return frazes ```
{ "source": "Jictyvoo/EXA868--PathFinder", "score": 3 }
#### File: src/controllers/MainController.py ```python import math from models.business.OrganismController import OrganismController from models.value.Finder import Finder from models.value.Labyrinth import Labyrinth class MainController: def __init__(self): self.__labyrinth = Labyrinth("../config.json") self.__labyrinth.loadLabyrinth("../labyrinth.la") self.__controllerOrganism = OrganismController(Finder, self.__labyrinth.getBeginPosition()) self.__genomeDecoder = ("UP", "RIGHT", "DOWN", "LEFT") self.__stateDecoder = {'alive': 0, 'dead': -1, 'finished': 1} self.__ending = self.__labyrinth.getEndingPosition() self.__have_finished = False self.__generations_finished = 0 self.__generations_fitness_average = [] self.__best_fitness = [] self.__best_organisms = [] def finished_generations(self): return self.__generations_finished def get_generations_fitness_average(self): return self.__generations_fitness_average def get_best_fitness(self): return self.__best_fitness def get_genome_decoder(self): return self.__genomeDecoder def get_labyrinth(self): return self.__labyrinth def get_best_one(self): return self.__controllerOrganism.getSmallerPath(list_to_order=self.__best_organisms)[0] def __calculate_fitness(self, organism): x_diference = organism.getPosition()['x'] x_diference = x_diference - self.__ending['x'] y_diference = organism.getPosition()['y'] y_diference = y_diference - self.__ending['y'] # return math.sqrt(math.pow(x_diference, 2) + math.pow(y_diference, 2)) return math.fabs(x_diference) + math.fabs(y_diference) def move(self, organisms): for organism in organisms: count = 0 for genome in organism.getGenome(): if organism.getState() == self.__stateDecoder['alive']: position = organism.getPosition() has_moved = self.__labyrinth.move(self.__genomeDecoder[genome], position) if has_moved: organism.updateFitness(1) organism.setPosition(has_moved) if self.__labyrinth.isAtFinal(has_moved): organism.updateFitness(100) organism.setState(self.__stateDecoder['finished']) organism.setLast(count) print("Generation: " + str(organism.getGeneration()), organism.getGenome()) self.__have_finished = True else: organism.updateFitness(-5) # organism.setState(self.stateDecoder['dead']) count = count + 1 if organism.getState() == self.__stateDecoder['dead']: organism.updateFitness(-10) organism.updateFitness(-10 * self.__calculate_fitness(organism)) # print(organism.getPosition()) begin_position = self.__labyrinth.getBeginPosition() organism.setPosition({'x': begin_position['x'], 'y': begin_position['y']}) def execute(self): organisms = self.__controllerOrganism.getOrganisms() if not organisms: return None self.move(organisms) if self.__have_finished: self.__generations_finished = self.__generations_finished + 1 self.__have_finished = False self.__generations_fitness_average.append(self.__controllerOrganism.average_fitness()) mom, dad = self.__controllerOrganism.selectBestOnes() self.__best_fitness.append(mom.getFitness()) self.__best_organisms.append(mom) self.__controllerOrganism.crossover(mom, dad, 0.05) if mom.getGeneration() % 11 == 0: self.__controllerOrganism.saveGenomes("../LastsGenomes.json") ``` #### File: models/value/Labyrinth.py ```python import json class Labyrinth: def __init__(self, file_name): self.matrix = [] self.beginning = None self.ending = None if file_name: file = open(file_name, 'r') self.characters = json.decoder.JSONDecoder().decode(file.read()) def loadLabyrinth(self, file_name): file = open(file_name, 'r') for line in file.readlines(): column = [] self.matrix.append(column) line = line.replace("\n", "") for block in line: if (block not in self.characters.keys()) and block != " ": raise ValueError("Error, undefined Labyrinth Character " + block) else: column.append({'class': self.characters[block], 'character': block}) if self.characters[block] == "beginning": self.beginning = {'y': len(self.matrix) - 1, 'x': len(column) - 1} if self.characters[block] == "ending": self.ending = {'y': len(self.matrix) - 1, 'x': len(column) - 1} def getBeginPosition(self): return self.beginning def getEndingPosition(self): return self.ending def validPosition(self, x, y): if x and y: if y <= len(self.matrix): if x <= len(self.matrix[y]): position_class = self.matrix[y][x]['class'] return position_class != "wall" return False def move(self, direction, current_position): if self.validPosition(current_position['x'], current_position['y']): newX, newY = current_position['x'], current_position['y'] if direction == "UP": newX, newY = current_position['x'], current_position['y'] - 1 elif direction == "DOWN": newX, newY = current_position['x'], current_position['y'] + 1 elif direction == "RIGHT": newX, newY = current_position['x'] + 1, current_position['y'] elif direction == "LEFT": newX, newY = current_position['x'] - 1, current_position['y'] if self.validPosition(newX, newY): return {'x': newX, 'y': newY} return None def isAtFinal(self, position): if position: if self.validPosition(position['x'], position['y']): return self.matrix[position['y']][position['x']]['class'] == "ending" def get_robot_in_labyrinth(self, robot_genome, genome_decoder): allPath = [] currentPosition = {'x': 1, 'y': 1} labyrinthString = "" for genome in robot_genome: # Here will create labyrinth string for line in range(len(self.matrix)): for column in range(len(self.matrix[line])): if line == currentPosition['y'] and column == currentPosition['x']: labyrinthString = labyrinthString + "R" else: labyrinthString = labyrinthString + self.matrix[line][column]['character'] labyrinthString = labyrinthString + "\n" allPath.append(labyrinthString) newPosition = self.move(genome_decoder[genome], currentPosition) if newPosition: currentPosition = newPosition labyrinthString = "" return allPath ``` #### File: models/value/Organism.py ```python import random class Organism: def __init__(self, genome_size): self.generation = 0 # --[==[--This is an array containing the genome--]==] self.fitness = 0 self.genome = [] for index in range(0, genome_size): self.genome.append(random.randint(0, 3)) self.last = 99999 def getGeneration(self): return self.generation def setGeneration(self, generation): self.generation = generation def getGenome(self): return self.genome def setGenome(self, genome): self.genome = genome def getGenomeInIndex(self, index): return self.genome[index] def setGenomeInIndex(self, index, genome_part): self.genome[index] = genome_part def getLast(self): return self.last def setLast(self, last): self.last = last def getFitness(self): return self.fitness def setFitness(self, new_fitness): self.fitness = new_fitness def updateFitness(self, update): self.fitness = self.fitness + update def compareTo(self, to_compare): if to_compare: return self.generation - to_compare.getGeneration() ```
{ "source": "Jictyvoo/FilaLanche_SENAI", "score": 3 }
#### File: mainProgram/models/DatabaseManipulator.py ```python class DatabaseManipulator: def __init__(self, conexao): self.__conexao = conexao self.__cursor = self.__conexao.cursor() def getConexao(self): return self.__conexao def setConexao(self, conexao): self.__conexao = conexao def getCursor(self): return self.__cursor ```
{ "source": "Jictyvoo/TEC502-2018.1--English-Dictionary-Game", "score": 2 }
#### File: Client/views/MainMenuUi.py ```python from PyQt5 import QtCore, QtWidgets from Client.util.resource_rc import qInitResources class MainMenuUi(object): def __init__(self): self.__central_widget = None self.__vertical_layout_2 = None self.__vertical_layout_1 = None self.__horizontal_layout = None self.__boggle_logo_label = None self.__start_solo_game_button = None self.__search_room_button = None self.__menu_bar = None self.__status_bar = None self.__boggle_room_selection = None self.__main_menu = None self.__solo_game = None def setup_ui(self, main__menu): self.__main_menu = main__menu main__menu.setObjectName("main__menu") main__menu.resize(345, 315) self.__central_widget = QtWidgets.QWidget(main__menu) self.__central_widget.setObjectName("__central_widget") self.__vertical_layout_2 = QtWidgets.QVBoxLayout(self.__central_widget) self.__vertical_layout_2.setObjectName("__vertical_layout_2") self.__horizontal_layout = QtWidgets.QHBoxLayout() self.__horizontal_layout.setObjectName("__horizontal_layout") spacer_item_1 = QtWidgets.QSpacerItem(30, 20, QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Minimum) self.__horizontal_layout.addItem(spacer_item_1) self.__vertical_layout_1 = QtWidgets.QVBoxLayout() self.__vertical_layout_1.setObjectName("__vertical_layout_1") self.__boggle_logo_label = QtWidgets.QLabel(self.__central_widget) size_policy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Expanding) size_policy.setHorizontalStretch(0) size_policy.setVerticalStretch(0) size_policy.setHeightForWidth(self.__boggle_logo_label.sizePolicy().hasHeightForWidth()) self.__boggle_logo_label.setSizePolicy(size_policy) self.__boggle_logo_label.setMinimumSize(QtCore.QSize(200, 0)) self.__boggle_logo_label.setStyleSheet("image: url(:/boggle_logo/logo_boggle_212810.png);") self.__boggle_logo_label.setText("") self.__boggle_logo_label.setObjectName("__boggle_logo_label") self.__vertical_layout_1.addWidget(self.__boggle_logo_label) self.__start_solo_game_button = QtWidgets.QPushButton(self.__central_widget) self.__start_solo_game_button.setObjectName("__start_solo_game_button") self.__start_solo_game_button.clicked.connect(self.__start_solo_game) self.__vertical_layout_1.addWidget(self.__start_solo_game_button) self.__search_room_button = QtWidgets.QPushButton(self.__central_widget) self.__search_room_button.setObjectName("__search_room_button") self.__search_room_button.clicked.connect(self.__call_room_selection) self.__vertical_layout_1.addWidget(self.__search_room_button) self.__horizontal_layout.addLayout(self.__vertical_layout_1) spacer_item_2 = QtWidgets.QSpacerItem(30, 20, QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Minimum) self.__horizontal_layout.addItem(spacer_item_2) self.__vertical_layout_2.addLayout(self.__horizontal_layout) main__menu.setCentralWidget(self.__central_widget) self.__menu_bar = QtWidgets.QMenuBar(main__menu) self.__menu_bar.setGeometry(QtCore.QRect(0, 0, 345, 21)) self.__menu_bar.setObjectName("__menu_bar") main__menu.setMenuBar(self.__menu_bar) self.__status_bar = QtWidgets.QStatusBar(main__menu) self.__status_bar.setObjectName("__status_bar") main__menu.setStatusBar(self.__status_bar) self.__re_translate_ui(main__menu) QtCore.QMetaObject.connectSlotsByName(main__menu) def __re_translate_ui(self, main__menu): _translate = QtCore.QCoreApplication.translate main__menu.setWindowTitle(_translate("main__menu", "Boggle Game!")) self.__start_solo_game_button.setText(_translate("main__menu", "Iniciar Jogo Solo")) self.__search_room_button.setText(_translate("main__menu", "Buscar Salas de Jogo")) def __call_room_selection(self): qInitResources() from Client.views.RoomSelectionUi import RoomSelectionUi # self.__boggle_room_selection = QtWidgets.QMainWindow() ui = RoomSelectionUi() ui.setup_ui(self.__main_menu) # self.__main_menu.hide() # self.__boggle_room_selection.show() def __start_solo_game(self): from Client.views.InGameUi import InGameUi from Client.views.widgets.InGameWindow import InGameWindow if self.__solo_game: self.__solo_game.hide() # self.__solo_game = QtWidgets.QDialog(self.__main_menu) self.__solo_game = InGameWindow(self.__main_menu) ui = InGameUi() ui.setup_ui(self.__solo_game) # self.__main_menu.hide() self.__solo_game.show() ``` #### File: models/business/ListenThread.py ```python import json.decoder as decoder_json import socket from Server.models.business.ConnectedClientThread import ConnectedClientThread class ListenThread: def __init__(self): self.__socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) with open("config.conf", 'r') as content_file: decoder = decoder_json.JSONDecoder() decoded = decoder.decode(content_file.read()) host = decoded['host'] port = int(decoded['port']) server_address = (host, port) self.__socket.bind(server_address) # listen() puts the socket into server mode self.__socket.listen(1000) from Server.controllers.RoomsController import RoomsController self.__room_controller_thread = RoomsController() # self.__remove_room_thread = threading.Thread(target=self.__room_controller.remove_room) def main_execution(self): self.__room_controller_thread.start() while True: # Wait for a connection print("Listening Clients") connection, client_address = self.__socket.accept() print("Connection Address", client_address) connected_client = ConnectedClientThread(connection, client_address, self.__room_controller_thread) connected_client.start() ```
{ "source": "jidaqqa/mqtt_with_encryption", "score": 3 }
#### File: mqtt_with_encryption/util/authentication_plugin.py ```python import util.logger as logger class Authentication(object): @staticmethod def read_password_file(password_file, username, password): authenticated = False users = dict() try: with open(password_file) as f: for l in f: line = l.strip() if not line.startswith('#'): # Allow comments in files usr, pwd = line.split(sep=":", maxsplit=3) users[usr] = pwd if username in users.keys(): if users[username] == password: authenticated = True else: logger.logging.debug("Password is not correct, please check it!") else: logger.logging.debug("username is not correct, please check it!") except FileNotFoundError: logger.logging.debug("Password file %s not found" % password_file) return authenticated ```
{ "source": "jidasheng/grid-puzzle", "score": 3 }
#### File: grid-puzzle/grid_puzzle/table.py ```python import tkinter as tk class Table(tk.Frame): def __init__(self, master, line_with=1, xscroll=False, yscroll=False, line_color="#dfe2e5", row_colors=("#f6f8fa", "#ffffff"), title_color="#e6e8ea", bg_color=None, widths=None): tk.Frame.__init__(self, master, bd=0) self.grid_rowconfigure(0, weight=1) self.grid_columnconfigure(0, weight=1) self.line_with = line_with self.line_color = line_color self.row_colors = row_colors self.title_color = title_color self.bg_color = bg_color self.xscroll = xscroll self.yscroll = yscroll self.__build_framework() self.__children = [] self.horizontal_lines = [] self.vertical_lines = [] self.widths = widths def __build_framework(self): self.grid_propagate(False) canvas = tk.Canvas(self) canvas.grid(row=0, column=0, sticky=tk.NSEW) self.content_frame = tk.Frame(canvas, bg=self.bg_color) canvas.create_window((0, 0), window=self.content_frame, anchor='nw') if self.xscroll: self.horizontal_sb = tk.Scrollbar(self, orient="horizontal", command=canvas.xview) self.horizontal_sb.grid(row=1, column=0, sticky=tk.EW) canvas.config(xscrollcommand=self.horizontal_sb.set) if self.yscroll: self.vertical_sb = tk.Scrollbar(self, orient="vertical", command=canvas.yview) self.vertical_sb.grid(row=0, column=1, sticky=tk.NS) canvas.config(yscrollcommand=self.vertical_sb.set) self.canvas = canvas def load_data(self, data): for row, texts in enumerate(data): for column, text in enumerate(texts): self.add_item(str(text), row, column) self.complete() return self def add_item(self, widget, row, column, rowspan=1, columnspan=1, sticky=tk.NSEW, ipadx=5, ipady=3): if isinstance(widget, str): width = self.widths[column] if columnspan == 1 and self.widths and column < len(self.widths) else None color = self.title_color if row == 0 and rowspan == 1 else self.row_colors[row % 2] widget = tk.Label(self.content_frame, text=widget, bg=color, width=width) self.__children.append((widget, row, column, rowspan, columnspan)) widget.grid(row=row * 2 + 1, column=column * 2 + 1, rowspan=rowspan * 2 - 1, columnspan=columnspan * 2 - 1, sticky=sticky, ipadx=ipadx, ipady=ipady) def clear(self): for c in list(self.content_frame.children.values()): c.destroy() self.content_frame.children.clear() del self.__children[:] del self.horizontal_lines[:] del self.vertical_lines[:] def complete(self): max_row, max_column = 0, 0 for widget, row, column, rowspan, columnspan in self.__children: max_row = max(max_row, row + rowspan) max_column = max(max_column, column + columnspan) self.__build_separator_lines(max_row, max_column) self.relayout() def relayout(self): self.content_frame.update_idletasks() width = self.content_frame.winfo_width() + (20 if self.yscroll else 4) height = self.content_frame.winfo_height() + (20 if self.xscroll else 4) self.config(width=width, height=height) self.canvas.config(scrollregion=self.canvas.bbox("all")) def __build_separator_lines(self, max_row, max_column): # horizontal lines self.horizontal_lines = [] for i in range(0, max_row * 2 + 1, 2): f = tk.Frame(self.content_frame, height=self.line_with, bg=self.line_color) f.grid(row=i, column=0, columnspan=max_column * 2, sticky=tk.EW) f.lower() self.horizontal_lines.append(f) # vertical lines self.vertical_lines = [] for j in range(0, max_column * 2 + 1, 2): f = tk.Frame(self.content_frame, width=self.line_with, bg=self.line_color) f.grid(row=0, column=j, rowspan=max_row * 2, sticky=tk.NS) f.lower() self.vertical_lines.append(f) def __coordinate_from_event(self, event): xs = [line.winfo_rootx() for line in self.vertical_lines] ys = [line.winfo_rooty() for line in self.horizontal_lines] x, y = event.x_root, event.y_root row, column = -1, -1 for i, y_ in enumerate(ys): if y < y_: row = i - 1 break for i, x_ in enumerate(xs): if x < x_: column = i - 1 break return (row, column) if row >= 0 and column >= 0 else None def bind_cell(self, event, fn, master=None): """ :param event: <Button-3>, <Double-Button-1> :param fn: fn(event, row, column) :param master: the root """ def _callback(event): coor = self.__coordinate_from_event(event) if coor: fn(event, *coor) master = master if master else self.master master.bind(event, _callback) def test_complicated(): root = tk.Tk() table = Table(root, xscroll=False, yscroll=True) table.pack(fill=tk.BOTH, expand=True) words = "In first quarter 2018, the Company repurchased 50.6 million shares of its common stock".split() for idx, w in enumerate(words): table.add_item(w, row=0, column=idx) for row in range(1, len(words) - 1): for col in range(1, len(words) - 1, 2): c = tk.Checkbutton(table.content_frame, text="test") # c = tk.Button(table.content_frame, text="asdf") table.add_item(c, row=row, column=col, rowspan=1, columnspan=2, sticky=tk.NSEW) table.complete() root.mainloop() def test_simple(): import numpy as np root = tk.Tk() table = Table(root, xscroll=True, yscroll=True).load_data(np.random.randn(10, 6)) table.pack(fill=tk.BOTH, expand=True) table.bind_cell("<Double-Button-1>", lambda event, r, c: print(r, c)) root.mainloop() if __name__ == "__main__": # test_complicated() test_simple() ```
{ "source": "jidebingfeng/segmatch", "score": 2 }
#### File: segmatch/python/utilities.py ```python from __future__ import print_function import numpy as np def list_runs(folder='./database/'): from os import listdir from os.path import isfile, join allfiles = [f for f in listdir(folder) if isfile(join(folder, f))] runfiles = [f for f in allfiles if f[0:4] == 'run_'] datatypes = [filename.split('_')[-1].split('.')[ 0] for filename in runfiles] filetypes = [filename.split('_')[-1].split('.')[-1] for filename in runfiles] # Dates run_names = list(set([filename.split('_')[1] for filename in runfiles])) run_names.sort() # Sort filenames into runs runs = {run_name: {datatype: filename for datatype, filename in zip(datatypes, runfiles) if run_name in filename.split('_')} for run_name in run_names} return run_names, runs def import_run(run_name, folder='./database/'): # list runs run_names, runs = list_runs(folder) # import run from import_export import load_segments, load_features, load_matches, load_classes segments, sids = load_segments(folder=folder, filename=runs[run_name]['segments']) features, fnames, fids = load_features(folder=folder, filename=runs[run_name]['features']) matches = load_matches( folder=folder, filename=runs[run_name]['matches']) classes, cids = load_classes( folder=folder, filename=runs[run_name]['classes']) if 'classes' in runs[run_name] else ([], []) # The loaded ids should match. assert len(sids) == len(fids) non_matching_ids = np.where(np.array(sids) != np.array(fids))[0] assert non_matching_ids.shape[0] == 0 ids = sids assert len(ids) == len(segments) print(" Found " + str(len(ids)) + " segment ids") if len(cids) != 0: assert len(cids) == len(sids) non_matching_ids = np.where(np.array(sids) != np.array(cids))[0] if cids else np.array([]) assert non_matching_ids.shape[0] == 0 assert len(classes) == len(segments) print(" Found classes for " + str(len(classes)) + " segments") return segments, features, fnames, matches, classes, ids ``` #### File: segmatch/python/visuals.py ```python import numpy as np from matplotlib import pyplot as plt def points2d(x, y, c, small_fonts=True, no_axis=False, cmap_name='plasma', s=50): out = plt.scatter(x, y, c=c, alpha=0.8, marker='.', lw = 0, cmap=plt.get_cmap(cmap_name), s=s) if small_fonts: # Tiny fonts for axis tick numbers plt.setp(plt.gca().get_xticklabels(), fontsize=0) plt.setp(plt.gca().get_yticklabels(), fontsize=0) if no_axis: # No axes or bounding boxes at all plt.axis('off') return out def visuals_of_segments(segments, ids, features=None, directory="/tmp/online_matcher/visuals/", black_and_white=False, oneview=False): import os if not os.path.exists(directory): os.makedirs(directory) fig = None for id_, segment in zip(ids, segments): seg_features = None if features is None else features[ids.index(id_)] img_path = directory+"segment"+str(id_)+".png" single_segment_as_figure(segment, seg_features, black_and_white, fig, oneview).savefig(img_path) plt.close(plt.gcf()) def visuals_of_matches(matches, segments, ids, features=None, directory="/tmp/online_matcher/visuals/", black_and_white=False, oneview=False): import os if not os.path.exists(directory): os.makedirs(directory) fig = None for i, match in enumerate(matches): all_ids_in_match_are_known = True for j, id_ in enumerate(match): if id_ not in ids: all_ids_in_match_are_known = False print("Unknown id ("+str(id_)+") in match["+str(j)+"].") if all_ids_in_match_are_known: img_paths = [] for id_ in match: segment = segments[ids.index(id_)] seg_features = None if features is None else features[ids.index(id_)] img_path = directory+"match"+str(i)+"seg"+str(id_)+".png" img_paths.append(img_path) single_segment_as_figure(segment, seg_features, black_and_white, fig, oneview).savefig(img_path) plt.clf() # Concatenate images import sys from PIL import Image images = [Image.open(img_path) for img_path in img_paths] widths, heights = zip(*(i.size for i in images)) total_width = sum(widths) max_height = max(heights) new_im = Image.new('RGB', (total_width, max_height)) x_offset = 0 for im in images: new_im.paste(im, (x_offset,0)) x_offset += im.size[0] new_im.save(directory+"match"+str(i)+".png") for img_path in img_paths: os.remove(img_path) def single_segment_as_figure(segment, seg_features=None, black_and_white=False, fig=None, oneview=False): if fig == None: fig = plt.figure('visuals') X = segment[:,0] Y = segment[:,1] Z = segment[:,2] th = -np.pi/4 XP = X*np.cos(th) + Y*np.sin(th) ZP = Z*np.cos(th) - (-X*np.sin(th)+Y*np.cos(th))*np.sin(th) YP = Z*np.sin(th) + (-X*np.sin(th)+Y*np.cos(th))*np.cos(th) color= 'k' if black_and_white else YP if oneview: nvp = 2 if seg_features != None else 1 plt.subplot(nvp, 1, 1) points2d(XP, ZP, color) plt.axis('equal') else: nvp = 4 if seg_features != None else 3 plt.subplot(nvp, 3, 1) points2d(X, Z, color) plt.axis('equal') plt.subplot(nvp, 3, 3) points2d(-Y, Z, color) plt.axis('equal') plt.subplot(nvp, 3, 7) points2d(X, Y, color) plt.axis('equal') plt.subplot(nvp, 3, 2) points2d(X*np.cos(th) - Y*np.sin(th), Z, color) plt.axis('equal') plt.subplot(nvp, 3, 4) points2d(X, Z*np.cos(th) - Y*np.sin(th), color) plt.axis('equal') plt.subplot(nvp, 3, 5) points2d(XP, ZP, color) plt.axis('equal') if seg_features is not None: plt.subplot(nvp, 1, nvp) plt.bar(range(len(seg_features)), seg_features) plt.tight_layout() return fig def single_segment_as_gif(segment, directory="/tmp/online_matcher/visuals/animated/", frames=60, black_and_white=False): import os if not os.path.exists(directory): os.makedirs(directory) import voxelize rotations = voxelize.create_rotations([voxelize.recenter_segment(segment)], n_angles=frames) segments_as_gif(rotations, filename='segment', directory=directory, black_and_white=black_and_white) def single_segment_reconstruction_as_gif(segment, vae, confidence=0.3, directory="/tmp/online_matcher/visuals/animated/", frames=60, black_and_white=False): import os if not os.path.exists(directory): os.makedirs(directory) import voxelize import autoencoder.model VOXEL_SIDE = vae.MP.INPUT_SHAPE[0] segments_vox, features_voxel_scale = voxelize.voxelize([segment], VOXEL_SIDE) reconstruction_vox = vae.batch_encode_decode([np.reshape(sample, vae.MP.INPUT_SHAPE) for sample in segments_vox]) reconstruction_vox = [np.reshape(vox, [VOXEL_SIDE, VOXEL_SIDE, VOXEL_SIDE]) for vox in reconstruction_vox] from voxelize import unvoxelize reconstruction = [unvoxelize(vox > confidence) for vox in reconstruction_vox] reconstruction = [voxelize.recenter_segment(segment*scale) for (segment, scale) in zip(reconstruction, features_voxel_scale)] rotations = voxelize.create_rotations(reconstruction, n_angles=frames) segments_as_gif(rotations, rotate_YP=(2*np.pi/frames), filename='reconstruction', directory=directory, black_and_white=black_and_white) def single_segment_rotations_reconstruction_as_gif(segment, vae, confidence=0.3, directory="/tmp/online_matcher/visuals/animated/", frames=120, black_and_white=False): import os if not os.path.exists(directory): os.makedirs(directory) import voxelize rotations = voxelize.create_rotations([segment], n_angles=frames) import autoencoder.model VOXEL_SIDE = vae.MP.INPUT_SHAPE[0] rotations_vox, features_voxel_scale = voxelize.voxelize(rotations, VOXEL_SIDE) reconstruction_vox = vae.batch_encode_decode([np.reshape(sample, vae.MP.INPUT_SHAPE) for sample in rotations_vox], batch_size=120) reconstruction_vox = [np.reshape(vox, [VOXEL_SIDE, VOXEL_SIDE, VOXEL_SIDE]) for vox in reconstruction_vox] from voxelize import unvoxelize reconstruction = [unvoxelize(vox > confidence) for vox in reconstruction_vox] reconstruction = [voxelize.recenter_segment(segment*scale) for (segment, scale) in zip(reconstruction, features_voxel_scale)] segments_as_gif(reconstruction, rotate_YP=(2*np.pi/frames), filename='reconstruction_rot', directory=directory, black_and_white=black_and_white) def single_segment_degeneration_as_gif(segment, vae, confidence=0.3, directory="/tmp/online_matcher/visuals/animated/", frames=60, black_and_white=False): import os if not os.path.exists(directory): os.makedirs(directory) import voxelize import autoencoder.model VOXEL_SIDE = vae.MP.INPUT_SHAPE[0] segment_vox, features_voxel_scale = voxelize.voxelize([segment], VOXEL_SIDE) segment_vox = [np.reshape(sample, vae.MP.INPUT_SHAPE) for sample in segment_vox] for i in range(frames): reconstruction_vox = vae.batch_encode_decode(reconstruction_vox) if i > 0 else segment_vox degen_vox = degen_vox + list(reconstruction_vox) if i > 0 else list(reconstruction_vox) degen_vox = [np.reshape(vox, [VOXEL_SIDE, VOXEL_SIDE, VOXEL_SIDE]) for vox in degen_vox] from voxelize import unvoxelize reconstruction = [unvoxelize(vox > confidence) for vox in degen_vox] reconstruction = [voxelize.recenter_segment(segment*features_voxel_scale[0]) for segment in reconstruction] print(len(reconstruction)) segments_as_gif(reconstruction, rotate_YP=0, filename='degeneration', directory=directory, black_and_white=black_and_white) def single_segment_confidence_as_gif(segment, vae, directory="/tmp/online_matcher/visuals/animated/", frames=60, black_and_white=False): import os if not os.path.exists(directory): os.makedirs(directory) import voxelize import autoencoder.model VOXEL_SIDE = vae.MP.INPUT_SHAPE[0] segment_vox, features_voxel_scale = voxelize.voxelize([segment], VOXEL_SIDE) segment_vox = [np.reshape(sample, vae.MP.INPUT_SHAPE) for sample in segment_vox] reconstruction_vox = vae.batch_encode_decode(segment_vox) reconstruction_vox = [np.reshape(vox, [VOXEL_SIDE, VOXEL_SIDE, VOXEL_SIDE]) for vox in reconstruction_vox] from voxelize import unvoxelize cmin=0.1; cmax=np.amax(reconstruction_vox); confidences = list(np.linspace(cmin,cmax,frames/2))+list(np.linspace(cmax,cmin,frames/2)) reconstruction = [unvoxelize(reconstruction_vox[0] > confidence) for confidence in confidences] reconstruction = [segment*features_voxel_scale[0] for segment in reconstruction] segments_as_gif(reconstruction, rotate_YP=0, filename='confidence', directory=directory, black_and_white=black_and_white) def segments_as_gif(segments, filename='segment', rotate_YP=None, directory="/tmp/online_matcher/visuals/animated/", black_and_white=False, framerate=30): for i, segment in enumerate(segments): X = segment[:,0] Y = segment[:,1] Z = segment[:,2] th = -np.pi/4 XP = X*np.cos(th) + Y*np.sin(th) ZP = Z*np.cos(th) - (-X*np.sin(th)+Y*np.cos(th))*np.sin(th) zmin = min(min(ZP), zmin) if i > 0 else min(ZP) zmax = max(max(ZP), zmax) if i > 0 else max(ZP) xmin = min(min(XP), xmin) if i > 0 else min(XP) xmax = max(max(XP), xmax) if i > 0 else max(XP) if rotate_YP == None: YP = Z*np.sin(th) + (-X*np.sin(th)+Y*np.cos(th))*np.cos(th) for i, segment in enumerate(segments): X = segment[:,0] Y = segment[:,1] Z = segment[:,2] th = -np.pi/4 XP = X*np.cos(th) + Y*np.sin(th) ZP = Z*np.cos(th) - (-X*np.sin(th)+Y*np.cos(th))*np.sin(th) if rotate_YP != None: # keep color consistent between rotated reconstructions phi = rotate_YP*i x2x_y2y = np.array([ np.cos(phi), -np.cos(phi), 1 ]) x2y_y2x = np.array([ np.sin(phi), np.sin(phi), 0 ]) unrotated = segment*x2x_y2y+segment[:,[1,0,2]]*x2y_y2x XuR = unrotated[:,0] YuR = unrotated[:,1] YP = Z*np.sin(th) + (-XuR*np.sin(th)+YuR*np.cos(th))*np.cos(th) fig = plt.figure('visuals') color= 'k' if black_and_white else -YP points2d(XP, ZP, color, no_axis=True, s=130) plt.ylim([zmin, zmax]) plt.xlim([xmin, xmax]) plt.gca().set_aspect('equal', adjustable='box') plt.tight_layout() img_path = directory+"frame"+str(i).zfill(3)+".png" plt.gcf().savefig(img_path) saved_fig_paths = saved_fig_paths + [img_path] if i > 0 else [img_path] plt.clf() import subprocess subprocess.call(['ffmpeg', '-framerate', '30', '-i', directory+'frame%03d.png', '-r', '30', '-y', directory+'output.mp4']) subprocess.call(['ffmpeg', '-i', directory+'output.mp4', '-y', directory+filename+'.gif']) import os for path in saved_fig_paths: os.remove(path) def compare_features(features_a, features_b, feature_labels=None, N = 30, d = 1.5, title = ""): feature_labels = ['unknown']*len(features_a[0]) if feature_labels is None else feature_labels import matplotlib.pyplot as plt plt.ion() plt.figure(figsize=(26,8)) # a segments features ax = plt.subplot(2, 1, 1) plt.title("Comparison of Features"+title) colors = ['b','g','r','y','w'] n_features = min([len(features_a[0]),5]) for i in range(0,N): f = features_a[i] for j in range(0, n_features ): ax.bar((i*d)-0.2+j*0.2, f[j],width=0.2,color=colors[j],align='center') plt.xlim([-d, N*d+d]) plt.ylabel("Feature value in A") # legends import matplotlib.patches as mptch patches = [mptch.Patch(color=colors[i], label=feature_labels[i]) for i in range(0,n_features)] plt.legend(handles=patches) # b segments features ax = plt.subplot(2, 1, 2) for i in range(0,N): f = features_b[i] for j in range(0, n_features ): ax.bar((i*d)-0.2+j*0.2, f[j],width=0.2,color=colors[j],align='center') plt.xlim([-d, N*d+d]) plt.ylabel("Feature value in B") plt.xlabel("Object #") plt.tight_layout() return def cycle_color(i): import matplotlib colors=list(matplotlib.colors.cnames.keys()) return colors[i%len(colors)] def visualize_matches_for_two_features(x_axis_feature_index, y_axis_feature_index, ids, features, feature_names, matches, hide_segments_with_zero_matches=True): X = [] Y = [] C = [] for i, group in enumerate(matches): group_color = cycle_color(i) if hide_segments_with_zero_matches and len(group)==1: continue for seg_id in group: point_x = features[ids.index(seg_id)][x_axis_feature_index] point_y = features[ids.index(seg_id)][y_axis_feature_index] X.append(point_x) Y.append(point_y) C.append(group_color) import matplotlib.pyplot as plt import matplotlib.cm as cm plt.scatter(X,Y,color=C, lw = 0) plt.xlabel(feature_names[x_axis_feature_index]) plt.ylabel(feature_names[y_axis_feature_index]) def tSNE(ids, features): raise NotImplementedError return 0 ```
{ "source": "jideedu/SKILLVET_APP", "score": 2 }
#### File: jideedu/SKILLVET_APP/allfuctions.py ```python from PIL import Image from bs4 import BeautifulSoup from fake_useragent import UserAgent import requests, docx2txt, pytesseract, PyPDF2 import collections, pygal import pandas as pd import numpy as np ua=UserAgent() hdr = {'User-Agent': ua.random, 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3', 'Accept-Encoding': 'none', 'Accept-Language': 'en-US,en;q=0.8', 'Connection': 'keep-alive'} olderperms = [('Lists Read Access', 'Personal Information'), ('Lists Write Access', 'Personal Information'), ('lists write access (2)', 'Personal Information'), ('First Name', 'Name'), ('Full Name', 'Name'), ('Given Name', 'Name'), ('Device Country and Postcode', 'device country and postal code'), ('phone number','Mobile Number'), ('zip', 'device country and postal code'), ('address','Device Address'), ('location','Location Services'), ('birthday', 'Personal Information'), ('email','Email Address'), ('area code', 'device country and postal code'), ('gender', 'Personal Information'), ('born','Personal Information'), ('zipcode', 'device country and postal code'), ('postal code', 'device country and postal code') ] olderrems = ['alexa notifications', 'skill personisation', 'reminders', 'timer', 'birthday', 'gender','skill resumption', 'timers'] def clean_html_file(html): text = html.find_all(text=True) output = '' blacklist = ['[document]','noscript','header','html', 'meta', 'head', 'input','script','style' ] for t in text: if t.parent.name not in blacklist: output += '{} '.format(t.encode('utf-8').strip()) return output #getting privacy policy and permission with fake user agent def get_url(url): res = requests.get(url, headers=hdr) html_page = res.content soup = BeautifulSoup(html_page, 'html.parser') #getting the permission skill_permissions = [perm.get_text().strip() for perm in soup.findAll("li", attrs={"class": "a2s-permissions-list-item"})] # print(skill_permissions) #GETting DYNAMIC CONTENT link = [a for a in soup.findAll("a", attrs={"rel": "noopener"})] privacy_policy_link = (link[0]['href']) try: res = requests.get(privacy_policy_link, headers=hdr) html_page = res.content data = BeautifulSoup(html_page, 'html.parser') except: data = 'Error Page' return (skill_permissions, data) #getting data from non text files def GetData(file_path, file_ext): image_ext = ['.png','.jpg', '.jpeg','.gif'] word_doc= ['.doc','.docx'] data = 'Error' if file_ext == '.pdf': with open(file_path,'rb') as pdfFileObj: pdfReader = PyPDF2.PdfFileReader(pdfFileObj) for i in range (0 , pdfReader.numPages): pageObj = pdfReader.getPage(i) data = data + (pageObj.extractText())#.encode('ascii','ignore')) #print('\n\n\n\n\n',data,'\n\n\n\n\n') return data elif file_ext in image_ext: with Image.open(file_path) as img: data = pytesseract.image_to_string(img, lang='eng') return str(data.encode('ascii', 'ignore')) elif file_ext in word_doc: data = docx2txt.process(file_path) return data else: with open(file_path, "r") as fp: data = fp.read().encode('ascii', 'ignore') return data marketplaces = {'IT': 'Italy', "IN": 'India', "AU":"Australia", "US": "United State", "UK":"United Kingdom", "FR":"France","DE":"Germany","ES":"Spain","MX":"Mexico","CA":"Canada","JP":"Japan"} def getNumSkillsDevs(df2019, df2020, df2021): allmarkets = df2021.market.unique() unique_skills = list() unique_devs = list() for mk in allmarkets: mknew = marketplaces[mk] skillyear2019 = len(df2019.loc[df2019['market'] == mk]) skillyear2020 = len(df2020.loc[df2020['market'] == mk]) skillyear2021 = len(df2021.loc[df2021['market'] == mk]) skillyear2019, skillyear2020, skillyear2021 = "{:,}".format(skillyear2019), "{:,}".format(skillyear2020), "{:,}".format(skillyear2021) unique_skills.append((mknew, skillyear2019,skillyear2020,skillyear2021)) alldevs2019 = df2019.loc[df2019['market'] == mk].dev.unique() alldevs2020 = df2020.loc[df2020['market'] == mk].dev.unique() alldevs2021 = df2021.loc[df2021['market'] == mk].dev.unique() devyear2019, devyear2020, devyear2021 = len(alldevs2019), len(alldevs2020), len(alldevs2021) devyear2019, devyear2020, devyear2021 = "{:,}".format(devyear2019), "{:,}".format(devyear2020), "{:,}".format(devyear2021) unique_devs.append((mknew, devyear2019,devyear2020,devyear2021)) return (unique_skills, unique_devs) def preprocessSnapshotPerm(df): #skills with Permission in english markets per year df_selected = df.loc[ (df['market'].isin(['US', 'UK', 'CA', 'AU', 'IN'])) & (df['perm_requested_norm'].notnull()) ] df_selected = df_selected.drop_duplicates(subset='id_name_dev', keep="last") return df_selected def getallYearSkillPerm(df): df_selected = preprocessSnapshotPerm(df) df = df_selected.drop(['market'], axis = 1) df.rename(columns={'name':'Skill','acc_linking':'Account Linking', 'cat':'Category', 'dev':'Developer', 'market':'Market','perm_requested_original':'Permission', 'traceability':'Traceability','year':'Year', 'in_skill_purchase':'In Skill Purchase' }, inplace=True) df = df.iloc[:,1:].replace(np.nan, 'None', regex=True).reset_index(drop=True) df = df.drop(['skill_link','perm_found_norm','perm_requested_norm', 'id_name_dev'], axis=1) return df def traceByPermissionTypeSkill(df): df_selected = preprocessSnapshotPerm(df) tracebility = ['R','B','P','C'] finalresults = {} for trace in tracebility: if trace == 'R': df = df_selected else: df = df_selected[df_selected['traceability']== trace] to_replace = ["[","]", "'"] results = [] formatresult = [] for index,row in df.iterrows(): perms = row['perm_requested_norm'] try: perms = perms.split(",") except: pass if len(perms)> 0: for perm in perms: for i in to_replace: if i in perm: perm = perm.replace(i , "") for oldperm in olderperms: if perm.lower().strip() == oldperm[0].lower(): perm = perm[1].lower() if perm.lower().strip() not in olderrems: if perm.strip().lower() == 'personal information': perm = 'list' results.append(perm.strip().lower()) for result in (collections.Counter(results)).items(): formatresult.append(result) finalresults[trace] = (len(df), formatresult) return(finalresults) def traceByPermissionTypeDev(df): df_selected = preprocessSnapshotPerm(df) tracebility = ['R','B','P','C'] finalresult = {} for trace in tracebility: if trace == 'R': df = df_selected else: df = df_selected[df_selected['traceability']== trace] to_replace = ["[","]", "'"] result = [] developers_across = [] permissions = ['device country and postal code', 'device address', 'email address','personal information', 'name', 'mobile number', 'amazon pay','location services'] for aperm in permissions: developers = [] for index,row in df.iterrows(): perms = row['perm_requested_norm'] try: perms = perms.split(",") except: pass if len(perms)> 0: for perm in perms: for i in to_replace: if i in perm: perm = perm.replace(i , "") for olderperm in olderperms: if perm.lower().strip() == olderperm[0].lower(): perm = perm[1].lower() if perm.lower().strip() not in olderrems: if row['dev'] not in developers and (perm.strip().lower()) == aperm: developers.append(row['dev']) if row['dev'] not in developers_across: developers_across.append(row['dev']) if aperm == 'personal information': aperm = 'list' result.append((aperm, len(developers))) finalresult[trace] = (len(developers_across), result) return(finalresult ) def renderChartPermsDev(df, trace): devresults = traceByPermissionTypeDev(df) devresult = devresults[trace] TotalDev = devresult[0] dev_bar_chart = pygal.Bar(height=500) # instance of Bar class dev_bar_chart.title = 'Traceability by Permission across Developers' # title of bar chart for index,elem in enumerate(devresult[1]): dev_bar_chart.add(elem[0],elem[1]) devchart = dev_bar_chart.render_data_uri() # render bar chart return devchart def renderChartPermsSkill(df,trace): skillresults = traceByPermissionTypeSkill(df) skillresult = skillresults[trace] TotalDev = skillresult[0] skill_bar_chart = pygal.Bar(height=500) # instance of Bar class skill_bar_chart.title = 'Traceability by Permission across Skills' # title of bar chart for index,elem in enumerate(skillresult[1]): if elem[0]== '': continue skill_bar_chart.add(elem[0],elem[1]) skillchart = skill_bar_chart.render_data_uri() # render bar chart return skillchart def unpackGroupByResult(data): Broken, Partial, Complete =[],[],[] finalresult = {} for key, value in data.items(): cat, trace = key if trace == 'B': Broken.append((cat, value)) elif trace == 'P': Partial.append((cat, value)) elif trace == 'C': Complete.append((cat, value)) finalresult ['C']= Complete finalresult ['P']= Partial finalresult ['B']= Broken return finalresult def renderChartTotaltrace(df,df2,df3): x_labels = ['2021','2020','2019'] #checking for skill Totaltrace = df.groupby('traceability')['id_name_dev'].nunique() Totaltrace2 = df2.groupby('traceability')['id_name_dev'].nunique() Totaltrace3 = df3.groupby('traceability')['id_name_dev'].nunique() title = 'Traceability by Skills Per Year' skillchartTotaltrace = draw(x_labels,title, Totaltrace, Totaltrace2, Totaltrace3) #check for developers Totaltrace = df.groupby('traceability')['dev'].nunique() Totaltrace2 = df2.groupby('traceability')['dev'].nunique() Totaltrace3 = df3.groupby('traceability')['dev'].nunique() title = 'Traceability by Developers Per Year' devchartTotaltrace = draw(x_labels,title, Totaltrace, Totaltrace2, Totaltrace3) return skillchartTotaltrace, devchartTotaltrace def draw(x_labels,title, *argv): graph = pygal.Bar(height=500) graph.title = title graph.x_labels = x_labels Broken, Partial, Complete =[],[],[] for item in argv: for key, value in item.iteritems(): if key == 'B': Broken.append(value) elif key == 'P': Partial.append(value) elif key == 'C': Complete.append(value) graph.add('Broken', Broken) graph.add('Partial', Partial) graph.add('Complete', Complete) return graph.render_data_uri() #use this to recategorise the skills def CatMapping(df): categorylist = {'Business' : ['Wirtschaft & Finanzen','ビジネス・ファイナンス', 'Business & Finance', 'Affaires et finances','Negocios y Finanzas', 'Negocios y finanzas ', 'Affari e finanza', 'Negocios y finanzas',], 'car' : ['Vernetztes Auto','コネクテッドカー','Connected Car', 'Auto connessa', 'Coche conectado',], 'Education' : ['Bildung & Nachschlagewerke','Education & Reference',' Educación y Referencia', 'Educación y referencia', 'Etudes supérieures', 'Educación y Referencia', 'Enseignement et éducation', '教育・レファレンス'], 'Food': ['Essen & Trinken','フード・ドリンク', 'Food & Drink','Cooking & Recipes', 'Delivery & Takeout', 'Restaurant Booking, Info & Reviews', 'Wine & Beverages', 'Alimentation et gastronomie','Alimentos y Bebidas','Alimentos y bebidas', 'Cibo e bevande',], 'Games': ['Spiele und Quiz','Spiele & Quiz','Games & Trivia', 'Games','Game Info & Accessories','Knowledge & Trivia', 'Giochi e quiz', 'Juegos y Curiosidades','Juegos y curiosidades','Jeux et culture générale', 'ゲーム・トリビア', ], 'Health': ['Gesundheit & Fitness','ヘルス・フィットネス', 'Health & Fitness','Salud y Bienestar', 'Salud y bienestar', 'Fitness & Sports', 'Salute e benessere', 'Santé et bien-être','Safety'], 'Kids':['子ども向け','Kids', 'Bambini e ragazzi', 'Enfants', 'Kinder', 'Niños', 'Infantil',], 'Lifestyle': ['ライフスタイル' ,'Stili e tendenze' , 'Stili e tendenze ', 'Lifestyle', 'Home Services', 'Astrology','Cooking & Recipes','Event Finders','Fashion & Style','Friends & Family','Health & Fitness', 'Pets & Animals','Religion & Spirituality','Self Improvement','Fan Shop', 'To-Do Lists & Notes','Wine & Beverages', 'Estilo de Vida', 'Estilo de vida',], 'Local' : ['area','地域','Local','Event Finders','Food Delivery & Takeout','Movie Showtimes','Public Transportation', 'Restaurant Booking', 'Info & Reviews','Schools','Taxi & Ridesharing', 'Consultazione e informazione','Informazioni utili sulle città',], 'Movies' : ['Film & Fernsehen','映画・TV', 'Movies & TV', 'Knowledge & Trivia','Movie & TV Games','Movie Info & Reviews', 'Movie Showtimes','TV Guides', 'Film e TV', 'Películas y TV', 'Cinéma et télévision'], 'Music': ['Musik & Audio','音楽・オーディオ', 'Music & Audio','Accessories', 'Knowledge & Trivia', 'Music Games'," Music Info\, Reviews & Recognition Services", 'Podcasts', 'Streaming Services', 'Música y Audio', 'Música y audio', 'Music Info, Reviews & Recognition Services','Musica e audio' 'Musique, radio et audio', 'Maison connectée', 'Musica e audio', 'Musique, radio et audio' ], 'News' :['Nachrichten','ニュース', 'News','Actualités', 'Noticias', 'Notizie',], 'Novelty': ['Neuheiten & Humor','ノベルティ・ユーモア', 'Curiosidades y Humor', 'Novelty & Humor', 'Novelty & Humour', 'Curiosidades y humor', 'Fantaisie et humour', 'Umorismo e curiosità',], 'Productivity': ['Produktivität','仕事効率化','Productivité', 'Productivity','Alarms & Clocks', 'Calculators','Calendars & Reminders','Communication','Organizers & Assistants','Self Improvement','To-Do Lists & Notes','Translators', 'Productividad',' Productivité', 'Produttività',"Supporters' Gear"], 'Shopping' : ['Shopping', 'Compras', 'ショッピング', 'Boutique du supporter'], 'Home': ['Smart Home','Casa intelligente', 'Hogar digital', 'スマートホーム',], 'Social': ['Soziale Netzwerke', 'ソーシャル', 'Social','Communication', 'Communication', 'Dating','Friends & Family','Social Networking',], 'Sports': ['スポーツ', 'Sports','Exercise & Workout','Games', 'Sport', 'Deportes','Score Keeping','Football'], 'Travel' : ['Reise & Transport','旅行・交通', 'Travel & Transportation','Currency Guides & Converters','Flight Finders','Hotel Finders','Navigation & Trip Planners', 'Public Transportation','Taxi & Ridesharing','Translators', 'Viaggi e trasporti', 'Viaje y transporte', 'Tourisme et voyages', 'Viaje y Transporte', ], 'Utilities' : ['Tapes, Adhesives & Sealants','Dienstprogramme','ユーティリティ', 'Home Décor','Utilities','Alarms & Clocks','Calculators','Calendars & Reminders','Device Tracking', 'Translators','Unit Converters','Zip Code Lookup', 'Utility', 'Servicios',], 'Weather': ['Wetter','天気','Weather', 'Meteorologia', 'Météo', 'Clima',],} for i, row in df.iterrows(): try: cat = row['cat'].strip() except: pass for category in categorylist: if cat in categorylist[category]: df.at[i,'cat'] = category break else: continue return df def renderChartByCat(df,trace): elem = df.groupby('cat')['traceability'].value_counts() finalresult = unpackGroupByResult(elem) skill_per_cat = finalresult[trace] cat_skill_bar_chart = pygal.Bar(height=500) # instance of Bar class cat_skill_bar_chart.title = 'Traceability by Category across Skills' # title of bar chart [cat_skill_bar_chart.add(x[0], x[1]) for x in skill_per_cat] catskillchart = cat_skill_bar_chart.render_data_uri() # render bar chart return catskillchart ```
{ "source": "JideGuru/iJokeBot", "score": 3 }
#### File: JideGuru/iJokeBot/bothandler.py ```python import requests import datetime jokes_url = "https://icanhazdadjoke.com/" class BotHandler: def __init__(self, token): self.token = token self.api_url = "https://api.telegram.org/bot{}/".format(token) print(self.api_url) def get_updates(self, offset=None, timeout=30): method = 'getUpdates' params = {'timeout': timeout, 'offset': offset} resp = requests.get(self.api_url + method, params) result_json = resp.json()['result'] return result_json def send_message(self, chat_id, text): params = {'chat_id': chat_id, 'text': text} method = 'sendMessage' resp = requests.post(self.api_url + method, params) return resp def get_last_update(self): get_result = self.get_updates() if len(get_result) > 0: last_update = get_result[-1] else: last_update = get_result[len(get_result)] return last_update def get_jokes(self, offset=None, timeout=30): # method = 'getUpdates' params = {'timeout': timeout, 'offset': offset} resp = requests.get(jokes_url, params, headers={"Accept": "application/json"}) # setup = resp.json()['setup'] # punchline = resp.json()['punchline'] result_json = resp.json()['joke'] print(result_json) return result_json ```
{ "source": "jideobs/flask-gae-ndb-starter", "score": 3 }
#### File: server/resources/users.py ```python from flask_restful import Resource from flask_restful import abort from server.models.users import Users class UsersResource(Resource): @Users.method() def post(self, user): user.put() return user @Users.method() def put(self, user): if not user.from_datastore: abort(400, message='User does not exist') user.put() return user @Users.method() def delete(self, user): if not user.from_datastore: abort(400, message='User does not exist') user.key.delete() return user @Users.query_method() def get(self, user): return user ``` #### File: flask-gae-ndb-starter/server/utils.py ```python import datetime as main_datetime from google.appengine.ext import ndb DATE_FORMAT = '%Y-%m-%d' DATE_TIME_FORMAT = '%Y-%m-%d %H:%M:%S' TIME_FORMAT = '%H:%M:%S' def date_to_str(date_time): if type(date_time) is main_datetime.date: return main_datetime.date.strftime(date_time, DATE_FORMAT) elif type(date_time) is main_datetime.time: return main_datetime.time.strftime(date_time, TIME_FORMAT) else: return main_datetime.datetime.strftime(date_time, DATE_TIME_FORMAT) def date_from_str(prop_type, str_date): if isinstance(prop_type, ndb.DateProperty): return main_datetime.datetime.strptime(str_date, DATE_FORMAT) elif isinstance(prop_type, ndb.DateTimeProperty): return main_datetime.datetime.strptime(str_date, DATE_TIME_FORMAT) else: return main_datetime.datetime.strptime(str_date, TIME_FORMAT) ```
{ "source": "jideobs/projectx1", "score": 3 }
#### File: projectx1/projectx1/http_client.py ```python import requests from requests import Response from requests.exceptions import RequestException class HTTPClient: def __init__(self, base_url: str, session: requests.Session): self.base_url = base_url self.session = session def get_html(self, params: dict, url_path: str = '/') -> str: url = f'{self.base_url}/{url_path}' try: response = self.session.get(url) page_html = response.text except RequestException as e: page_html = '' return page_html ```
{ "source": "jideshv/donkeycar", "score": 3 }
#### File: donkeycar/tests/test_datastore_v2.py ```python import os import shutil import tempfile import time import unittest from pathlib import Path from donkeycar.parts.datastore_v2 import Manifest class TestDatastore(unittest.TestCase): def setUp(self): self._path = tempfile.mkdtemp() def test_basic_datastore_operations(self): # 2 records per catalog entry in the manifest manifest = Manifest(self._path, max_len=2) count = 10 for i in range(count): manifest.write_record(self._newRecord()) read_records = 0 for entry in manifest: print('Entry %s' % (entry)) read_records += 1 self.assertEqual(count, read_records) def test_deletion(self): manifest = Manifest(self._path, max_len=2) count = 10 deleted = 5 for i in range(count): manifest.write_record(self._newRecord()) for i in range(deleted): manifest.delete_record(i) read_records = 0 for entry in manifest: print('Entry %s' % (entry)) read_records += 1 self.assertEqual((count - deleted), read_records) def tearDown(self): shutil.rmtree(self._path) def _newRecord(self): record = {'at' : time.time()} return record if __name__ == '__main__': unittest.main() ``` #### File: donkeycar/tests/test_keras.py ```python import pytest from donkeycar.parts.keras import * from donkeycar.utils import * import numpy as np def test_categorical(): km = KerasCategorical() assert km.model is not None img = get_test_img(km.model) km.run(img) def test_linear(): km = KerasLinear() assert km.model is not None img = get_test_img(km.model) km.run(img) def test_imu(): km = KerasIMU() assert km.model is not None img = get_test_img(km.model) imu = np.random.rand(6).tolist() km.run(img, imu) def test_rnn(): km = KerasRNN_LSTM() assert km.model is not None img = get_test_img(km.model) km.run(img) def test_3dconv(): km = Keras3D_CNN() assert km.model is not None img = get_test_img(km.model) km.run(img) def test_localizer(): km = KerasLocalizer() assert km.model is not None img = get_test_img(km.model) km.run(img) ```
{ "source": "jidiai/ai_lab", "score": 2 }
#### File: ai_lab/env/chessandcard.py ```python import copy from gym.utils import seeding from env.simulators.game import Game from env.obs_interfaces.observation import * from utils.discrete import Discrete class ChessAndCard(Game, DictObservation): def __init__(self, conf): super(ChessAndCard, self).__init__(conf['n_player'], conf['is_obs_continuous'], conf['is_act_continuous'], conf['game_name'], conf['agent_nums'], conf['obs_type']) self.seed = None self.done = False self.dones = {} self.step_cnt = 0 self.max_step = int(conf["max_step"]) env_name = conf["game_name"] import_path = "from pettingzoo.classic import " + env_name + " as env_imported" exec(import_path) func_name = "env_imported" self.env_core = None self.env_core = eval(func_name).env() if self.env_core is None: raise Exception("ChessAndCard env_core is None!") self.init_info = None self.won = {} self.n_return = [0] * self.n_player self.step_cnt = 0 self.done = False self.env_core.reset() self.player_id_map, self.player_id_reverse_map = self.get_player_id_map(self.env_core.agents) # set up action spaces self.new_action_spaces = self.load_action_space() self.joint_action_space = self.set_action_space() self.action_dim = self.joint_action_space self.input_dimension = self.env_core.observation_spaces # set up first all_observes obs, _, _, _ = self.env_core.last() self.current_state = obs self.all_observes = self.get_all_observes() def reset(self): self.step_cnt = 0 self.done = False self.init_info = None self.env_core.reset() obs, _, _, _ = self.env_core.last() self.current_state = obs self.all_observes = self.get_all_observes() self.won = {} self.n_return = [0] * self.n_player return self.all_observes def step(self, joint_action): self.is_valid_action(joint_action) info_before = self.step_before_info() joint_action_decode = self.decode(joint_action) self.env_core.step(joint_action_decode) obs, reward, _, info_after = self.env_core.last() info_after = '' self.current_state = obs self.all_observes = self.get_all_observes() # print("debug all observes ", type(self.all_observes[0]["obs"])) self.set_n_return() self.step_cnt += 1 done = self.is_terminal() return self.all_observes, reward, done, info_before, info_after def is_valid_action(self, joint_action): if len(joint_action) != self.n_player: raise Exception("Input joint action dimension should be {}, not {}.".format( self.n_player, len(joint_action))) current_player_id = self.player_id_map[self.env_core.agent_selection] if (self.env_core.agent_selection in self.env_core.agents) and \ (not self.env_core.dones[self.env_core.agent_selection]): if joint_action[current_player_id] is None or joint_action[current_player_id][0] is None: raise Exception("Action of current player is needed. Current player is {}, {}".format( current_player_id, self.env_core.agent_selection)) for i in range(self.n_player): if joint_action[i] is None or joint_action[i][0] is None: continue if len(joint_action[i][0]) != self.joint_action_space[i][0].n: raise Exception("The input action dimension for player {} should be {}, not {}.".format( i, self.joint_action_space[i][0].n, len(joint_action[i][0]))) def step_before_info(self, info=''): return info def is_terminal(self): if self.step_cnt >= self.max_step: self.done = True if not self.env_core.agents: self.done = True if all(self.env_core.dones.values()): self.done = True return self.done def get_single_action_space(self, player_id): return self.joint_action_space[player_id] def load_action_space(self): origin_action_spaces = self.env_core.action_spaces new_action_spaces = {} for key, action_space in origin_action_spaces.items(): changed_key = self.player_id_map[key] new_action_spaces[changed_key] = Discrete(action_space.n) return new_action_spaces def set_action_space(self): action_space = [[self.new_action_spaces[i]] for i in range(self.n_player)] return action_space def check_win(self): if self.all_equals(self.n_return): return '-1' index = [] max_n = max(self.n_return) for i in range(len(self.n_return)): if self.n_return[i] == max_n: index.append(i) if len(index) == 1: return str(index[0]) else: return str(index) def decode(self, joint_action): if self.env_core.agent_selection not in self.env_core.agents or \ self.env_core.dones[self.env_core.agent_selection]: return None current_player_id = self.player_id_map[self.env_core.agent_selection] if joint_action[current_player_id] is None or joint_action[current_player_id][0] is None: return None joint_action_decode = joint_action[current_player_id][0].index(1) return joint_action_decode def set_n_return(self): for player_key, player_reward in self.env_core.rewards.items(): player_id = self.player_id_map[player_key] self.n_return[player_id] += player_reward def get_player_id_map(self, player_keys): player_id_map = {} player_id_reverse_map = {} for i, key in enumerate(player_keys): player_id_map[key] = i player_id_reverse_map[i] = key return player_id_map, player_id_reverse_map def create_seed(self): seed = seeding.create_seed(None, max_bytes=4) return seed def set_seed(self, seed=None): self.env_core.seed(seed) self.seed = seed def get_all_observes(self): all_observes = [] for i in range(self.n_player): player_name = self.player_id_reverse_map[i] each_obs = copy.deepcopy(self.current_state) each = {"obs": each_obs, "controlled_player_index": i, "controlled_player_name": player_name} all_observes.append(each) return all_observes def all_equals(self, list_to_compare): return len(set(list_to_compare)) == 1 ``` #### File: ai_lab/env/logisticsenv.py ```python import json import os.path import sys from pathlib import Path from random import randint, sample from matplotlib.cm import get_cmap from matplotlib.colors import Normalize, rgb2hex from scipy.spatial import distance import igraph import pygame from env.simulators.game import Game from env.obs_interfaces.observation import DictObservation import numpy as np from utils.box import Box current_dir = str(Path(__file__).resolve().parent) resource_path = os.path.join(current_dir, "logistics", "resources") def load_config(file_path): with open(file_path, 'r') as f: config = json.load(f) return config class LogisticsEnv(Game, DictObservation): def __init__(self, conf): super().__init__(conf['n_player'], conf['is_obs_continuous'], conf['is_act_continuous'], conf['game_name'], conf['agent_nums'], conf['obs_type']) map_path = os.path.join(current_dir, "logistics", "map_1.json") self.map_conf = load_config(map_path) # self.map_conf = generate_random_map() self.max_step = int(conf['max_step']) self.step_cnt = 0 self.players = [] # self.current_state = self.init_map(self.map_conf) # self.all_observes = self.get_all_observations() self.n_return = [0] * self.n_player self.won = {} self.init_info = None self.done = False self.interface_ctrl = None self.FPSClock = None self.joint_action_space = None self.info = {} self.render_mode = False self.render_start = False self.screen = None self.all_observes = self.reset() # 每个玩家的action space list, 可以根据player_id获取对应的single_action_space # self.joint_action_space = self.set_action_space() # self.info = { # 'upper_storages': [self.players[i].upper_storage for i in range(self.n_player)], # 'upper_capacity': [act[0].high.tolist() for act in self.joint_action_space] # } def init_map(self, conf): # 添加图中的节点 vertices = conf['vertices'].copy() for vertex_info in vertices: key = vertex_info['key'] self.add_vertex(key, vertex_info) # 添加图中的有向边 edges = conf['edges'].copy() for edge_info in edges: start = edge_info['start'] end = edge_info['end'] self.add_edge(start, end, edge_info) if not self.map_conf['is_graph_directed']: # 若是无向图,则加上反方向的边 self.add_edge(end, start, edge_info) # 对每个节点进行初始化 init_state = [] for i in range(self.n_player): self.players[i].update_init_storage() init_state.append(self.players[i].init_storage) return init_state def add_vertex(self, key, vertex_info): vertex = LogisticsVertex(key, vertex_info) self.players.append(vertex) def add_edge(self, start, end, edge_info): edge = LogisticsEdge(edge_info) start_vertex = self.players[start] start_vertex.add_neighbor(end, edge) def reset(self): self.step_cnt = 0 self.players = [] self.current_state = self.init_map(self.map_conf) self.all_observes = self.get_all_observations() self.n_return = [0] * self.n_player self.won = {} self.done = False self.joint_action_space = self.set_action_space() self.info = { 'productions': [self.players[i].production for i in range(self.n_player)], 'upper_storages': [self.players[i].upper_storage for i in range(self.n_player)], 'upper_capacity': [act[0].high.tolist() for act in self.joint_action_space] } self.render_mode = False return self.all_observes def render_reset(self): network_data = self.get_network_data() self.FPSClock = pygame.time.Clock() self.interface_ctrl = LogisticsInterface(1000, 800, network_data, self.screen) def step(self, all_actions): self.step_cnt += 1 all_actions = self.bound_actions(all_actions) self.info.update({'actual_actions': all_actions}) self.current_state = self.get_next_state(all_actions) self.all_observes = self.get_all_observations() reward, single_rewards = self.get_reward(all_actions) done = self.is_terminal() self.info.update({ 'actual_actions': all_actions, 'single_rewards': single_rewards }) return self.all_observes, single_rewards, done, "", self.info def bound_actions(self, all_actions): # 对每个节点的动作进行约束 bounded_actions = [] for i in range(self.n_player): vertex = self.players[i] action = all_actions[i].copy()[0] actual_trans = sum(action) if vertex.init_storage < 0: # 初始库存量为负(有货物缺口) bounded_actions.append([0] * len(action)) elif actual_trans > vertex.init_storage: # 运出的总货物量超过初始库存量 # 每条运输途径的货物量进行等比例缩放 bounded_action = [act * vertex.init_storage / actual_trans for act in action] bounded_actions.append(bounded_action) else: # 合法动作 bounded_actions.append(action) return bounded_actions def get_next_state(self, all_actions): assert len(all_actions) == self.n_player # 统计每个节点当天运出的货物量out_storages,以及接收的货物量in_storages out_storages, in_storages = [0] * self.n_player, [0] * self.n_player for i in range(self.n_player): action = all_actions[i] out_storages[i] = sum(action) connections = self.players[i].get_connections() for (act, nbr) in zip(action, connections): in_storages[nbr] += act # 更新每个节点当天的最终库存量以及下一天的初始库存量, # 并记录每个节点当天最开始的初始库存start_storages、生产量productions和消耗量demands,用于可视化 next_state = [] start_storages, demands = [], [] for i in range(self.n_player): start_storages.append(self.players[i].final_storage) demands.append(self.players[i].demand) self.players[i].update_final_storage(out_storages[i], in_storages[i]) self.players[i].update_init_storage() next_state.append(self.players[i].init_storage) self.info.update({ 'start_storages': start_storages, 'demands': demands }) return next_state def get_dict_observation(self, current_state, player_id, info_before): obs = { "obs": current_state, "connected_player_index": self.players[player_id].get_connections(), "controlled_player_index": player_id } return obs def get_all_observations(self, info_before=''): all_obs = self.get_dict_many_observation( self.current_state, range(self.n_player), info_before ) return all_obs def get_reward(self, all_actions): total_reward = 0 single_rewards = [] for i in range(self.n_player): action = all_actions[i] reward = self.players[i].calc_reward(action) total_reward += reward single_rewards.append(reward) self.n_return[i] += reward return total_reward, single_rewards def set_action_space(self): action_space = [] for i in range(self.n_player): vertex = self.players[i] high = [] for j in vertex.get_connections(): edge = vertex.get_edge(j) high.append(edge.upper_capacity) action_space_i = Box(np.zeros(len(high)), np.array(high), dtype=np.float64) action_space.append([action_space_i]) return action_space def get_single_action_space(self, player_id): return self.joint_action_space[player_id] def is_terminal(self): is_done = self.step_cnt >= self.max_step if is_done: self.done = True return is_done def get_network_data(self): pd_gap, all_connections, all_times = [], [], [] for i in range(self.n_player): vertex = self.players[i] pd_gap.append(vertex.production - vertex.lambda_) connections = vertex.get_connections() all_connections.append(connections) times = [vertex.get_edge(j).trans_time for j in connections] all_times.append(times) network_data = { 'n_vertex': self.n_player, 'v_coords': self.map_conf.get('coords'), 'pd_gap': pd_gap, # 记录每个节点生产量和平均消耗量之间的差距 'connections': all_connections, 'trans_times': all_times } return network_data def get_render_data(self, current_state=None): render_data = { 'day': self.step_cnt, 'storages': self.info['start_storages'], 'productions': self.info['productions'], 'demands': self.info['demands'], 'total_reward': sum(self.n_return), 'single_rewards': self.info['single_rewards'], 'actions': self.info['actual_actions'] } return render_data def check_win(self): return '-1' def render(self): if not self.render_start: pygame.init() pygame.display.set_caption("Simple Logistics Simulator") self.screen = pygame.display.set_mode([1000, 800]) self.render_start = True if not self.render_mode: self.render_reset() self.render_mode = True render_data = self.get_render_data() current_frame = 0 while current_frame < FPS * SPD: for event in pygame.event.get(): if event.type == pygame.QUIT: pygame.quit() sys.exit() self.interface_ctrl.refresh_background(render_data) self.interface_ctrl.move_trucks(render_data['actions']) current_frame += 1 self.FPSClock.tick(FPS) pygame.display.update() class LogisticsVertex(object): def __init__(self, key, info): self.key = key self.connectedTo = {} self.production = info['production'] self.init_storage = 0 self.final_storage = info['init_storage'] self.upper_storage = info['upper_storage'] self.store_cost = info['store_cost'] self.loss_cost = info['loss_cost'] self.storage_loss = 0 # 更新完当天的最终库存量后,统计当天的库存溢出量 self.init_storage_loss = 0 # 因为每次状态更新会提前计算下一天的初始库存量, # 若不单独记录初始库存的溢出量,则会在计算每日reward时出错 self.lambda_ = info['lambda'] self.demand = 0 def add_neighbor(self, nbr, edge): self.connectedTo.update({nbr: edge}) def get_connections(self): return list(self.connectedTo.keys()) def get_edge(self, nbr): return self.connectedTo.get(nbr) def get_demand(self): demand = np.random.poisson(lam=self.lambda_, size=1) return demand[0] def update_init_storage(self): self.demand = self.get_demand() self.init_storage = self.final_storage - self.demand + self.production self.init_storage_loss = 0 if self.init_storage > self.upper_storage: # 当天初始库存量超过存储上限 self.init_storage_loss = self.init_storage - self.upper_storage self.init_storage = self.upper_storage def update_final_storage(self, out_storage, in_storage): self.final_storage = self.init_storage - out_storage + in_storage self.storage_loss = self.init_storage_loss if self.final_storage > self.upper_storage: # 当天最终库存量超过存储上限 self.storage_loss += (self.final_storage - self.upper_storage) self.final_storage = self.upper_storage def calc_reward(self, action, mu=1, scale=100): connections = self.get_connections() assert len(action) == len(connections) # 舍弃超过库存货物造成的损失 reward = -self.loss_cost * self.storage_loss # 当日运输货物的成本 for (act, nbr) in zip(action, connections): edge = self.get_edge(nbr) reward -= (edge.trans_cost * edge.trans_time * act) if self.final_storage >= 0: # 因库存盈余所导致的存储成本 reward -= (self.store_cost * self.final_storage) else: # 因库存空缺而加的惩罚项 reward += (mu * self.final_storage) return reward / scale class LogisticsEdge(object): def __init__(self, info): self.upper_capacity = info['upper_capacity'] self.trans_time = info['trans_time'] self.trans_cost = info['trans_cost'] # NOTE: FPS*SPD应为24的倍数,否则可能导致货车到达终点时偏移仓库图标中心 FPS = 60 # Frame Per Second,帧率,即每秒播放的帧数 SPD = 4 # Second Per Day,游戏中每天所占的秒数 class Truck(object): def __init__(self, start, end, trans_time, size=(32, 32)): self.image = pygame.image.load(os.path.join(resource_path, "img/truck.png")).convert_alpha() self.image = pygame.transform.scale(self.image, size) self.rect = self.image.get_rect() self.rect.center = start self.font = pygame.font.Font(os.path.join(resource_path, "font/simhei.ttf"), 14) self.init_pos = (self.rect.x, self.rect.y) self.total_frame = trans_time * FPS * SPD // 24 self.update_frame = 0 speed_x = 24 * (end[0] - start[0]) / (trans_time * FPS * SPD) speed_y = 24 * (end[1] - start[1]) / (trans_time * FPS * SPD) self.speed = (speed_x, speed_y) def update(self): if self.update_frame < self.total_frame: self.update_frame += 1 self.rect.x = self.init_pos[0] + self.speed[0] * self.update_frame self.rect.y = self.init_pos[1] + self.speed[1] * self.update_frame else: self.update_frame += 1 if self.update_frame >= FPS * SPD: self.update_frame = 0 self.rect.topleft = self.init_pos def draw(self, screen, action): if action <= 0: # 若货车运输量为0,则不显示 return # 当货车在道路上时才显示 if 0 < self.update_frame < self.total_frame: screen.blit(self.image, self.rect) text = self.font.render(f"{round(action, 2)}", True, (44, 44, 44), (255, 255, 255)) text_rect = text.get_rect() text_rect.centerx, text_rect.y = self.rect.centerx, self.rect.y - 12 screen.blit(text, text_rect) class LogisticsInterface(object): def __init__(self, width, height, network_data, screen): self.width = width self.height = height self.v_radius = 42 self.n_vertex = network_data['n_vertex'] self.pd_gap = network_data['pd_gap'] # 每个节点生产量和平均消耗量之间的差距 self.connections = network_data['connections'] self.trans_times = network_data['trans_times'] self.v_coords = self._spread_vertex(network_data['v_coords']) self.v_colors = [] self.screen = screen # self.screen = pygame.display.set_mode([width, height]) self.screen.fill("white") self.font1 = pygame.font.Font(os.path.join(resource_path, "font/simhei.ttf"), 24) self.font2 = pygame.font.Font(os.path.join(resource_path, "font/simhei.ttf"), 18) self.font3 = pygame.font.Font(os.path.join(resource_path, "font/simhei.ttf"), 14) self.p_img = pygame.image.load(os.path.join(resource_path, "img/produce.png")).convert_alpha() self.p_img = pygame.transform.scale(self.p_img, (16, 16)) self.d_img = pygame.image.load(os.path.join(resource_path, "img/demand.png")).convert_alpha() self.d_img = pygame.transform.scale(self.d_img, (14, 14)) self.background = self.init_background() self.trucks = self.init_trucks() def init_background(self): # 绘制道路 drawn_roads = [] for i in range(self.n_vertex): start = self.v_coords[i] for j in self.connections[i]: if (j, i) in drawn_roads: continue end = self.v_coords[j] self._rotated_road(start, end, width=12, border_color=(252, 122, 90), fill_color=(255, 172, 77)) drawn_roads.append((i, j)) # 绘制仓库节点 norm = Normalize(vmin=min(self.pd_gap) - 200, vmax=max(self.pd_gap) + 200) # 数值映射范围(略微扩大) color_map = get_cmap('RdYlGn') # 颜色映射表 for coord, gap in zip(self.v_coords, self.pd_gap): rgb = color_map(norm(gap))[:3] color = pygame.Color(rgb2hex(rgb)) light_color = self._lighten_color(color) pygame.draw.circle(self.screen, light_color, coord, self.v_radius, width=0) pygame.draw.circle(self.screen, color, coord, self.v_radius, width=2) self.v_colors.append(light_color) # 加入固定的提示 self.add_notation() # 保存当前初始化的背景,便于后续刷新时使用 background = self.screen.copy() return background @staticmethod def _lighten_color(color, alpha=0.1): r = alpha * color.r + (1 - alpha) * 255 g = alpha * color.g + (1 - alpha) * 255 b = alpha * color.b + (1 - alpha) * 255 light_color = pygame.Color((r, g, b)) return light_color def _spread_vertex(self, v_coords): if not v_coords: # 若没有指定相对坐标,则随机将节点分布到画布上 g = igraph.Graph() g.add_vertices(self.n_vertex) for i in range(self.n_vertex): for j in self.connections[i]: g.add_edge(i, j) layout = g.layout_kamada_kawai() layout_coords = np.array(layout.coords).T else: # 否则使用地图数据中指定的节点相对坐标 layout_coords = np.array(v_coords).T # 将layout的坐标原点对齐到左上角 layout_coords[0] = layout_coords[0] - layout_coords[0].min() layout_coords[1] = layout_coords[1] - layout_coords[1].min() # 将layout的坐标映射到画布坐标,并将图形整体居中 stretch_rate = min((self.width - 2 * self.v_radius - 30) / layout_coords[0].max(), (self.height - 2 * self.v_radius - 30) / layout_coords[1].max()) margin_x = (self.width - layout_coords[0].max() * stretch_rate) // 2 margin_y = (self.height - layout_coords[1].max() * stretch_rate) // 2 vertex_coord = [] for i in range(self.n_vertex): x = margin_x + int(layout_coords[0, i] * stretch_rate) y = margin_y + int(layout_coords[1, i] * stretch_rate) vertex_coord.append((x, y)) return vertex_coord def _rotated_road(self, start, end, width, border_color=(0, 0, 0), fill_color=None): length = distance.euclidean(start, end) sin = (end[1] - start[1]) / length cos = (end[0] - start[0]) / length vertex = lambda e1, e2: ( start[0] + (e1 * length * cos + e2 * width * sin) / 2, start[1] + (e1 * length * sin - e2 * width * cos) / 2 ) vertices = [vertex(*e) for e in [(0, -1), (0, 1), (2, 1), (2, -1)]] if not fill_color: pygame.draw.polygon(self.screen, border_color, vertices, width=3) else: pygame.draw.polygon(self.screen, fill_color, vertices, width=0) pygame.draw.polygon(self.screen, border_color, vertices, width=2) def init_trucks(self): trucks_list = [] for i in range(self.n_vertex): start = self.v_coords[i] trucks = [] for j, time in zip(self.connections[i], self.trans_times[i]): end = self.v_coords[j] truck = Truck(start, end, time) trucks.append(truck) trucks_list.append(trucks) return trucks_list def move_trucks(self, actions): for i in range(self.n_vertex): for truck, action in zip(self.trucks[i], actions[i]): truck.update() truck.draw(self.screen, action) def refresh_background(self, render_data): day = render_data['day'] storages = render_data['storages'] productions = render_data['productions'] demands = render_data['demands'] total_reward = render_data['total_reward'] single_rewards = render_data['single_rewards'] self.screen.blit(self.background, (0, 0)) day_text = self.font1.render(f"第{day}天", True, (44, 44, 44), (255, 255, 255)) self.screen.blit(day_text, (18, 10)) r_text = self.font2.render(f"累计奖赏:{round(total_reward, 2)}", True, (44, 44, 44), (255, 255, 255)) self.screen.blit(r_text, (18, 40)) for coord, s, p, d, r, color in \ zip(self.v_coords, storages, productions, demands, single_rewards, self.v_colors): s_text = self.font3.render(f"{round(s, 2)}", True, (44, 44, 44), color) s_text_rect = s_text.get_rect() s_text_rect.centerx, s_text_rect.y = coord[0], coord[1] - 31 self.screen.blit(s_text, s_text_rect) p_text = self.font3.render(f"+{round(p, 2)}", True, (35, 138, 32), color) p_text_rect = p_text.get_rect() p_text_rect.centerx, p_text_rect.y = coord[0] + 8, coord[1] - 15 self.screen.blit(p_text, p_text_rect) p_img_rect = self.p_img.get_rect() p_img_rect.centerx, p_img_rect.y = coord[0] - 18, coord[1] - 15 self.screen.blit(self.p_img, p_img_rect) d_text = self.font3.render(f"-{round(d, 2)}", True, (251, 45, 45), color) d_text_rect = d_text.get_rect() d_text_rect.centerx, d_text_rect.y = coord[0] + 8, coord[1] + 1 self.screen.blit(d_text, d_text_rect) d_img_rect = self.d_img.get_rect() d_img_rect.centerx, d_img_rect.y = coord[0] - 18, coord[1] + 1 self.screen.blit(self.d_img, d_img_rect) r_text = self.font3.render(f"{round(r, 2)}", True, (12, 140, 210), color) r_text_rect = r_text.get_rect() r_text_rect.centerx, r_text_rect.y = coord[0], coord[1] + 17 self.screen.blit(r_text, r_text_rect) def add_notation(self): text1 = self.font3.render("黑:库存量", True, (44, 44, 44), (255, 255, 255)) self.screen.blit(text1, (18, 65)) text2 = self.font3.render(":生产量", True, (35, 138, 32), (255, 255, 255)) self.screen.blit(text2, (32, 85)) self.screen.blit(self.p_img, (17, 85)) text3 = self.font3.render(":消耗量", True, (251, 45, 45), (255, 255, 255)) self.screen.blit(text3, (32, 105)) self.screen.blit(self.d_img, (17, 105)) text4 = self.font3.render("蓝:节点奖赏", True, (12, 140, 210), (255, 255, 255)) self.screen.blit(text4, (18, 125)) MIN_PRODUCTION = 10 MAX_PRODUCTION = 50 MIN_INIT_STORAGE = 10 MAX_INIT_STORAGE = 80 MIN_UPPER_STORAGE = 80 MAX_UPPER_STORAGE = 150 # 扩大了10倍(×10) MIN_STORE_COST = 10 MAX_STORE_COST = 20 # 扩大了10倍(×10) MIN_LOSS_COST = 10 MAX_LOSS_COST = 20 MIN_LAMBDA = 10 MAX_LAMBDA = 50 MIN_UPPER_CAPACITY = 8 MAX_UPPER_CAPACITY = 20 MIN_TRANS_TIME = 4 MAX_TRANS_TIME = 24 # 扩大了100倍(×100) MIN_TRANS_COST = 8 MAX_TRANS_COST = 12 def generate_random_map(is_graph_directed=True): num_vertex = 10 vertices, edges, connections = [], [], [] for v in range(num_vertex): vertex = { "key": v, "production": randint(MIN_PRODUCTION, MAX_PRODUCTION), "init_storage": randint(MIN_INIT_STORAGE, MAX_INIT_STORAGE), "upper_storage": randint(MIN_UPPER_STORAGE, MAX_UPPER_STORAGE), "store_cost": randint(MIN_STORE_COST, MAX_STORE_COST) / 10, "loss_cost": randint(MIN_LOSS_COST, MAX_LOSS_COST) / 10, "lambda": randint(MIN_LAMBDA, MAX_LAMBDA) } vertices.append(vertex) num_circle = randint(3, num_vertex) used_vertex = sample(list(range(num_vertex)), num_circle) for i in range(num_circle): edge = { "start": used_vertex[i], "end": used_vertex[(i + 1) % num_circle], "upper_capacity": randint(MIN_UPPER_CAPACITY, MAX_UPPER_CAPACITY), "trans_time": randint(MIN_TRANS_TIME, MAX_TRANS_TIME), "trans_cost": randint(MIN_TRANS_COST, MAX_TRANS_COST) / 100 } edges.append(edge) for v in range(num_vertex): if v in used_vertex: continue in_num = randint(1, len(used_vertex) - 1) in_vertex = sample(used_vertex, in_num) for i in in_vertex: edge = { "start": i, "end": v, "upper_capacity": randint(MIN_UPPER_CAPACITY, MAX_UPPER_CAPACITY), "trans_time": randint(MIN_TRANS_TIME, MAX_TRANS_TIME), "trans_cost": randint(MIN_TRANS_COST, MAX_TRANS_COST) / 100 } edges.append(edge) left_vertex = list(set(used_vertex).difference(set(in_vertex))) out_num = randint(1, len(used_vertex) - in_num) out_vertex = sample(left_vertex, out_num) for i in out_vertex: edge = { "start": v, "end": i, "upper_capacity": randint(MIN_UPPER_CAPACITY, MAX_UPPER_CAPACITY), "trans_time": randint(MIN_TRANS_TIME, MAX_TRANS_TIME), "trans_cost": randint(MIN_TRANS_COST, MAX_TRANS_COST) / 100 } edges.append(edge) used_vertex.append(v) map_data = { "n_vertex": num_vertex, "vertices": vertices, "is_graph_directed": is_graph_directed, "edges": edges } return map_data ``` #### File: ai_lab/env/minigrid.py ```python from gym_minigrid.wrappers import * from env.simulators.gridgame import GridGame import random from env.obs_interfaces.observation import * from utils.discrete import Discrete import tkinter import time import gym import gym_minigrid # env = gym.make('MiniGrid-DoorKey-8x8-v0') class MiniGrid(GridGame, GridObservation): def __init__(self, conf): colors = conf.get('colors', [(255, 255, 255), (0, 0, 0), (245, 245, 245)]) super(MiniGrid, self).__init__(conf, colors) # self.renderer = Renderer() self.env_core = gym.make(conf['game_name']) self.action_dim = self.env_core.action_space.n self.input_dimension = self.env_core.observation_space['image'].shape # self.obs_type = [str(i) for i in str(conf["obs_type"]).split(',')] _ = self.reset() self.is_act_continuous = False self.is_obs_continuous = True def step(self, joint_action): # action = self.decode(joint_action) # self.renderer.render(self._env_core.grid, self._env_core.agent_pos) action = joint_action info_before = self.step_before_info() next_state, reward, self.done, info_after = self.get_next_state(action) self.current_state = next_state if isinstance(reward, np.ndarray): reward = reward.tolist() reward = self.get_reward(reward) self.step_cnt += 1 done = self.is_terminal() self.all_observes = self.get_all_observes() return self.all_observes, reward, done, info_before, info_after def reset(self): obs = self.env_core.reset() self.step_cnt = 0 self.done = False self.current_state = obs self.all_observes = self.get_all_observes() return self.all_observes def get_next_state(self, action): action = int(np.array(action[0][0]).argmax()) observation, reward, done, info = self.env_core.step(action) return observation, reward, done, info def set_action_space(self): action_space = [[Discrete(7)] for _ in range(self.n_player)] return action_space def is_terminal(self): if self.step_cnt >= self.max_step: self.done = True return self.done def get_grid_observation(self, current_state, player_id, info_before): return current_state def get_reward(self, reward): return [reward] def check_win(self): return True def set_seed(self, seed=None): self.env_core.seed(seed) def get_all_observes(self): all_observes = [] for i in range(self.n_player): each = {"obs": self.current_state, "controlled_player_index": i} all_observes.append(each) return all_observes class Renderer: def __init__(self): self.root = None self.color = { 'red' : np.array([255, 0, 0]), 'green' : np.array([0, 255, 0]), 'blue' : np.array([0, 0, 255]), 'purple': np.array([112, 39, 195]), 'yellow': np.array([255, 255, 0]), 'grey' : np.array([100, 100, 100]) } def _close_view(self): if self.root: self.root.destory() self.root = None self.canvas = None # self.done = True def render(self, map, agent_pos): time.sleep(0.1) scale = 30 width = map.width * scale height = map.height * scale if self.root is None: self.root = tkinter.Tk() self.root.title("gym_minigrid") self.root.protocol("WM_DELETE_WINDOW", self._close_view) self.canvas = tkinter.Canvas(self.root, width=width, height=height) self.canvas.pack() self.canvas.delete(tkinter.ALL) self.canvas.create_rectangle(0, 0, width, height, fill="black") def fill_cell(x, y, color): self.canvas.create_rectangle( x * scale, y * scale, (x + 1) * scale, (y + 1) * scale, fill=color ) for x in range(map.width): for y in range(map.height): if map.grid[int(x * width / scale + y)] != None: fill_cell(x, y, map.grid[int(x * width / scale) + y].color) # fill_cell(x,y,map[x,y]) fill_cell(agent_pos[0], agent_pos[1], "Pink") self.root.update() ``` #### File: ai_lab/env/MiniWorld.py ```python from env.simulators.game import Game from env.obs_interfaces.observation import * import numpy as np import json from utils.discrete import Discrete from utils.box import Box import gym import gym_miniworld class MiniWorld(Game, VectorObservation): def __init__(self, conf): super().__init__(conf['n_player'], conf['is_obs_continuous'], conf['is_act_continuous'], conf['game_name'], conf['agent_nums'], conf['obs_type']) self.done = False self.step_cnt = 0 self.max_step = int(conf["max_step"]) self.env_core = gym.make(self.game_name) self.load_action_space(conf) observation = self.env_core.reset() if not isinstance(observation, np.ndarray): observation = np.array(observation) obs_list = observation.reshape(-1).tolist() self.won = {} self.current_state = [obs_list] * self.n_player self.all_observes = self.get_all_observes() self.n_return = [0] * self.n_player self.joint_action_space = self.set_action_space() self.action_dim = self.get_action_dim() self.input_dimension = self.env_core.observation_space self.ob_space = [self.env_core.observation_space for _ in range(self.n_player)]#60* 80 *3 self.ob_vector_shape = [self.env_core.observation_space.shape] * self.n_player self.ob_vector_range = [self.env_core.observation_space.low, self.env_core.observation_space.high] * self.n_player#??? self.init_info = None def load_action_space(self, conf): if "act_box" in conf: input_action = json.loads(conf["act_box"]) if isinstance(conf["act_box"], str) else conf["act_box"] # print(input_action) if self.is_act_continuous: if ("high" not in input_action) or ("low" not in input_action) or ("shape" not in input_action): raise Exception("act_box in continuous case must have fields low, high, shape") shape = tuple(input_action["shape"]) self.env_core.action_space = Box(input_action["low"], input_action["high"], shape, np.float32) else: if "discrete_n" not in input_action: raise Exception("act_box in discrete case must have field discrete_n") discrete_n = int(input_action["discrete_n"]) self.env_core.action_space = Discrete(discrete_n) def get_next_state(self, action):#action=0/1/2 observation, reward, done, info = self.env_core.step(action) return observation, reward, done, info def set_action_space(self): if self.is_act_continuous: action_space = [[self.env_core.action_space] for _ in range(self.n_player)] else: action_space = [[self.env_core.action_space] for _ in range(self.n_player)]#discrete(3) return action_space def step(self, joint_action): action = self.decode(joint_action) info_before = self.step_before_info() # print("action in step ", action) next_state, reward, self.done, info_after = self.get_next_state(action) # self.current_state = next_state if isinstance(reward, np.ndarray): reward = reward.tolist()[0] reward = self.get_reward(reward) if not isinstance(next_state, np.ndarray): next_state = np.array(next_state) next_state = next_state.tolist() self.current_state = [next_state] * self.n_player self.all_observes = self.get_all_observes() done = self.is_terminal() info_after = self.parse_info(info_after) self.step_cnt += 1 return self.all_observes, reward, done, info_before, info_after def get_reward(self, reward): r = [0] * self.n_player # print("reward is ", reward) for i in range(self.n_player): r[i] = reward self.n_return[i] += r[i] return r def decode(self, joint_action): if not self.is_act_continuous: return joint_action[0][0].index(1)#?? else: return joint_action[0] def step_before_info(self, info=''): return info def parse_info(self, info): new_info = {} for key, val in info.items(): if isinstance(val, np.ndarray): new_info[key] = val.tolist() else: new_info[key] = val return new_info def is_terminal(self): if self.step_cnt > self.max_step: self.done = True return self.done def check_win(self): if self.env_core.near(self.env_core.box): return 1 else: return -1 def reset(self): observation = self.env_core.reset() if not isinstance(observation, np.ndarray): observation = np.array(observation) obs_list = observation.reshape(-1).tolist() self.step_cnt = 0 self.done = False self.current_state = [obs_list] * self.n_player self.all_observes = self.get_all_observes() return self.all_observes def get_action_dim(self): action_dim = 1 print("joint action space is ", self.joint_action_space[0][0]) if self.is_act_continuous: # if isinstance(self.joint_action_space[0][0], gym.spaces.Box): return self.joint_action_space[0][0] for i in range(len(self.joint_action_space[0])): action_dim *= self.joint_action_space[0][i].n return action_dim def get_single_action_space(self, player_id): return self.joint_action_space[player_id] def get_vector_obs_config(self, player_id): return self.ob_vector_shape[player_id], self.ob_vector_range[player_id] def get_vector_many_obs_space(self, player_id_list): all_obs_space = {} for i in player_id_list: m = self.ob_vector_shape[i] all_obs_space[i] = m return all_obs_space def get_vector_observation(self, current_state, player_id, info_before): return self.current_state[player_id] def get_render_data(self, current_state): return [] def set_seed(self, seed=None): self.env_core.seed(seed) def get_all_observes(self): all_observes = [] for i in range(self.n_player): each = {"obs": self.current_state[i], "controlled_player_index": i} all_observes.append(each) return all_observes ``` #### File: ai_lab/env/mpe_jidi.py ```python import copy import numpy as np from gym.utils import seeding from env.simulators.game import Game from env.obs_interfaces.observation import * from utils.discrete import Discrete from utils.box import Box from pettingzoo.mpe import simple_v2 from pettingzoo.mpe import simple_adversary_v2 from pettingzoo.mpe import simple_crypto_v2 from pettingzoo.mpe import simple_push_v2 from pettingzoo.mpe import simple_reference_v2 from pettingzoo.mpe import simple_speaker_listener_v3 from pettingzoo.mpe import simple_spread_v2 from pettingzoo.mpe import simple_tag_v2 from pettingzoo.mpe import simple_world_comm_v2 class MPE_Jidi(Game, DictObservation): def __init__(self, conf): super(MPE_Jidi, self).__init__(conf['n_player'], conf['is_obs_continuous'], conf['is_act_continuous'], conf['game_name'], conf['agent_nums'], conf['obs_type']) self.seed = None self.done = False self.dones = {} self.step_cnt = 0 self.max_step = int(conf["max_step"]) env_name = conf["game_name"].split("-")[1] action_continues = self.is_act_continuous self.env_core = None if env_name == "simple": self.env_core = simple_v2.parallel_env(max_cycles=25, continuous_actions=action_continues) elif env_name == "simple_adversary": self.env_core = simple_adversary_v2.parallel_env(N=2, max_cycles=25, continuous_actions=action_continues) elif env_name == "simple_crypto": self.env_core = simple_crypto_v2.parallel_env(max_cycles=25, continuous_actions=action_continues) elif env_name == "simple_push": self.env_core = simple_push_v2.parallel_env(max_cycles=25, continuous_actions=action_continues) elif env_name == "simple_reference": self.env_core = simple_reference_v2.parallel_env(local_ratio=0.5, max_cycles=25, continuous_actions=action_continues) elif env_name == "simple_speaker_listener": self.env_core = simple_speaker_listener_v3.parallel_env(max_cycles=25, continuous_actions=action_continues) elif env_name == "simple_spread": self.env_core = simple_spread_v2.parallel_env(N=3, local_ratio=0.5, max_cycles=25, continuous_actions=action_continues) elif env_name == "simple_tag": self.env_core = simple_tag_v2.parallel_env(num_good=1, num_adversaries=3, num_obstacles=2, max_cycles=25, continuous_actions=action_continues) elif env_name == "simple_world_comm": self.env_core = simple_world_comm_v2.parallel_env(num_good=2, num_adversaries=4, num_obstacles=1, num_food=2, max_cycles=25, num_forests=2, continuous_actions=action_continues) if self.env_core is None: raise Exception("MPE_Jidi env_core is None!") self.init_info = None self.won = {} self.n_return = [0] * self.n_player self.step_cnt = 0 self.done = False self.player_id_map, self.player_id_reverse_map = self.get_player_id_map(self.env_core.action_spaces.keys()) # set up action spaces self.new_action_spaces = self.load_action_space() self.joint_action_space = self.set_action_space() self.action_dim = self.joint_action_space self.input_dimension = self.env_core.observation_spaces # set up first all_observes obs = self.env_core.reset() self.current_state = obs self.all_observes = self.get_all_observes() self.dones = {agent: False for agent in self.env_core.possible_agents} def reset(self): self.step_cnt = 0 self.done = False self.init_info = None obs = self.env_core.reset() self.current_state = obs self.all_observes = self.get_all_observes() self.won = {} self.n_return = [0] * self.n_player self.dones = {agent: False for agent in self.env_core.possible_agents} return self.all_observes def step(self, joint_action): self.is_valid_action(joint_action) info_before = self.step_before_info() joint_action_decode = self.decode(joint_action) obs, reward, self.dones, info_after = self.env_core.step(joint_action_decode) info_after = '' self.current_state = obs self.all_observes = self.get_all_observes() # print("debug all observes ", type(self.all_observes[0]["obs"])) self.set_n_return(reward) self.step_cnt += 1 done = self.is_terminal() return self.all_observes, reward, done, info_before, info_after def is_valid_action(self, joint_action): if len(joint_action) != self.n_player: raise Exception("Input joint action dimension should be {}, not {}.".format( self.n_player, len(joint_action))) for i in range(self.n_player): player_name = self.player_id_reverse_map[i] if joint_action[i] is None or joint_action[i][0] is None: continue if not self.is_act_continuous: if len(joint_action[i][0]) != self.joint_action_space[i][0].n: raise Exception("The input action dimension for player {}, {} should be {}, not {}.".format( i, player_name, self.joint_action_space[i][0].n, len(joint_action[i][0]))) if not (1 in joint_action[i][0]): raise Exception("The input should be an one-hot vector!") else: if np.array(joint_action[i][0]).shape != self.joint_action_space[i][0].shape: raise Exception("The input action dimension for player {}, {} should be {}, not {}.".format( i, player_name, self.joint_action_space[i][0].shape, np.array(joint_action[i][0]).shape)) def step_before_info(self, info=''): return info def is_terminal(self): if self.step_cnt >= self.max_step: self.done = True if not self.env_core.agents: self.done = True if all(self.dones.values()): self.done = True return self.done def get_single_action_space(self, player_id): return self.joint_action_space[player_id] def load_action_space(self): origin_action_spaces = self.env_core.action_spaces new_action_spaces = {} for key, action_space in origin_action_spaces.items(): changed_key = self.player_id_map[key] if not self.is_act_continuous: new_action_spaces[changed_key] = Discrete(action_space.n) else: new_action_spaces[changed_key] = Box(action_space.low, action_space.high, action_space.shape, np.float32) return new_action_spaces def set_action_space(self): action_space = [[self.new_action_spaces[i]] for i in range(self.n_player)] return action_space def check_win(self): if len(self.agent_nums) == 1: return self.won left = sum(self.n_return[0:self.agent_nums[0]]) right = sum(self.n_return[self.agent_nums[0]:]) if left > right: return "0" elif left > right: return "1" else: return "-1" def decode(self, joint_action): joint_action_decode = {} for act_id, nested_action in enumerate(joint_action): # print("debug nested_action ", nested_action) key = self.player_id_reverse_map[act_id] if nested_action is None or nested_action[0] is None: continue if not self.is_act_continuous: if isinstance(nested_action[0], np.ndarray): nested_action[0] = nested_action[0].tolist() joint_action_decode[key] = nested_action[0].index(1) else: joint_action_decode[key] = nested_action[0] # joint_action_decode.append(nested_action[0]) # return np.array(joint_action_decode, dtype=object) return joint_action_decode def set_n_return(self, reward): for player_key, player_reward in reward.items(): player_id = self.player_id_map[player_key] self.n_return[player_id] += player_reward def get_player_id_map(self, player_keys): player_id_map = {} player_id_reverse_map = {} for i, key in enumerate(player_keys): player_id_map[key] = i player_id_reverse_map[i] = key return player_id_map, player_id_reverse_map def create_seed(self): seed = seeding.create_seed(None, max_bytes=4) return seed def set_seed(self, seed=None): self.env_core.seed(seed) self.seed = seed def get_all_observes(self): all_observes = [] for i in range(self.n_player): player_name = self.player_id_reverse_map[i] each_obs = copy.deepcopy(self.current_state[player_name]) each = {"obs": each_obs, "controlled_player_index": i, "controlled_player_name": player_name} all_observes.append(each) return all_observes def all_equals(self, list_to_compare): return len(set(list_to_compare)) == 1 ``` #### File: ai_lab/env/sc2.py ```python import copy import sys import numpy as np from env.simulators.game import Game from utils.discrete_sc2 import Discrete_SC2 from pysc2.env import sc2_env from absl import flags FLAGS = flags.FLAGS FLAGS(sys.argv) class SC2(Game): def __init__(self, conf): super(SC2, self).__init__(conf['n_player'], conf['is_obs_continuous'], conf['is_act_continuous'], conf['game_name'], conf['agent_nums'], conf['obs_type']) self.players = [sc2_env.Agent(sc2_env.Race[agent_type]) for agent_type in conf["agent_type"]] self.env_core = sc2_env.SC2Env(map_name=conf["map_name"], players=self.players, agent_interface_format=sc2_env.AgentInterfaceFormat( feature_dimensions=sc2_env.Dimensions(screen=84, minimap=64)), step_mul=16, game_steps_per_episode=200 * 16) self.max_step = int(conf["max_step"]) self.dones = False self.done = False timesteps = self.env_core.reset() self.current_state = timesteps self.all_observes = self.get_all_observevs() self.joint_action_space = self.set_action_space(timesteps) self.action_dim = self.joint_action_space self.input_dimension = None self.init_info = None self.step_cnt = 0 self.won = {} self.n_return = [0] * self.n_player def reset(self): self.dones = False self.done = False self.init_info = None timesteps = self.env_core.reset() self.current_state = timesteps self.all_observes = self.get_all_observevs() self.joint_action_space = self.set_action_space(timesteps) self.action_dim = self.joint_action_space self.step_cnt = 0 self.won = {} self.n_return = [0] * self.n_player def step(self, joint_action): info_before = '' joint_action_decode = self.decode(joint_action) timesteps = self.env_core.step(joint_action_decode) self.current_state = timesteps self.all_observes = self.get_all_observevs() reward = self.set_n_return() done = self.is_terminal() self.joint_action_space = self.set_action_space(timesteps) self.step_cnt += 1 info_after = '' return self.all_observes, reward, done, info_before, info_after def set_action_space(self, timesteps): new_joint_action_space = [] for timestep, agent_spec in zip(timesteps, self.env_core.action_spec()): new_joint_action_space.append([Discrete_SC2(timestep.observation.available_actions, agent_spec)]) return new_joint_action_space def get_single_action_space(self, player_id): return self.joint_action_space[player_id] def decode(self, joint_action): joint_action_decode = [] for act in joint_action: joint_action_decode.append(act[0]) return joint_action_decode def is_valid_action(self, joint_action): if len(joint_action) != self.n_player: raise Exception("Input joint action dimension should be {}, not {}".format( self.n_player, len(joint_action))) for i in range(self.n_player): if joint_action[i][0].function not in self.joint_action_space[i][0].available_actions: raise Exception("The input action dimension for player {} should be {}, does not have {}".format( i, self.joint_action_space[i][0].available_actions, joint_action[i][0].function)) def get_all_observevs(self): all_observes = [] for i in range(self.n_player): each = copy.deepcopy(self.current_state[i]) each = {"obs": each, "controlled_player_index": i} all_observes.append(each) return all_observes def set_n_return(self): reward = [] for idx, obs in enumerate(self.current_state): self.n_return[idx] += obs.reward reward.append(obs.reward) return reward def is_terminal(self): if self.step_cnt >= self.max_step: self.done = True for obs in self.current_state: if obs.last(): self.done = True return self.done def check_win(self): if len(self.n_return) == 1: return '' else: all_equal = True for i in range(1, len(self.n_return)): if self.n_return[i-1] != self.n_return[i]: all_equal = False break if all_equal: return -1 return np.argmax(self.n_return) ``` #### File: algo/maddpg/maddpg.py ```python import torch from torch import nn import torch.nn.functional as F import torch.optim as optim import numpy as np import os import sys from examples.common.buffer import Replay_buffer as buffer from networks.actor import OpenaiActor as net_a from networks.critic import OpenaiCritic as net_c class Agent(): def __init__(self, input_dim_a, input_dim_c, output_dim, lr_a=0.01, lr_c=0.01, buffer_capacity=1000000): self.lr_c = lr_c self.lr_a = lr_a self.actor_eval = net_a(input_dim_a, output_dim) self.actor_target = net_a(input_dim_a, output_dim) self.actor_target.load_state_dict(self.actor_eval.state_dict()) self.optimizer_a = optim.Adam(self.actor_eval.parameters(),lr=self.lr_a) self.critic_eval = net_c(input_dim_c[0], input_dim_c[1]) self.critic_target = net_c(input_dim_c[0], input_dim_c[1]) self.critic_target.load_state_dict(self.critic_eval.state_dict()) self.optimizer_c = optim.Adam(self.critic_eval.parameters(),lr=self.lr_c) self.memory = buffer(buffer_capacity, ["action"]) self.memory.init_item_buffers() def choose_action(self, observation, train=True): observation = torch.tensor(observation, dtype=torch.float64) if train: action = self.actor_eval(observation).detach().numpy() self.add_experience({"action": action}) return action else: action, _ = self.actor_target(observation, original_out=True) action = F.softmax(action, dim=-1) return {"action": action.detach().numpy()} def add_experience(self, output): for k,v in output.items(): self.memory.insert(k, None, v) class MADDPG(): def __init__(self, args): self.gamma = args.gamma #0.97 self.batch_size = args.batch_size #1256 self.agents = [] self.n = args.n_player self.lr_a = args.lr_a self.lr_c = args.lr_c self.tao = args.tao self.args = args self.step = 0 num_agent = args.n_player action_dim = args.action_space obs_dim = args.obs_space for n in range(num_agent): in_a = obs_dim[n] #actor输入是自己的observation in_c = [sum(obs_dim), sum(action_dim)] #critic的输入是act(cat(obs), cat(action)) out = action_dim[n] #输出是自己的action_space agent = Agent(in_a, in_c, out, self.lr_a, self.lr_c, args.buffer_capacity) self.agents.append(agent) def learn(self): self.step += 1 if self.step < self.args.start_step or not self.step % self.args.target_replace==0: return for id in range(self.n): seed = np.random.randint(2**31) obs, obs_, action, reward, done = [], [], [], [], [] for agent in self.agents: np.random.seed(seed) batch = agent.memory.sample(self.batch_size) obs.append(torch.tensor(batch['states'], dtype=torch.float64)) obs_.append(torch.tensor(batch['states_next'], dtype=torch.float64)) action.append(torch.tensor(batch['action'], dtype=torch.float64)) #reward.append(torch.tensor([torch.tensor(reward, dtype=torch.float64) for reward in batch['rewards']])) reward.append(torch.cat([torch.tensor(reward, dtype=torch.float64) for reward in batch['rewards']],dim=-1).view(self.batch_size,-1)) done.append(torch.tensor([torch.tensor(done, dtype=torch.float64) for done in batch['dones']])) reward =reward[id][:,id] done = done[id] ''' obs = torch.tensor(obs, dtype=torch.float64) action = torch.tensor(action, dtype=torch.float64) reward = torch.tensor(reward, dtype=torch.float64) done = torch.tensor(done, dtype=torch.float64) obs_ = torch.tensor(obs_, dtype=torch.float64) ''' action_ = [] for n in range(self.n): action_.append(self.agents[n].actor_target(obs_[n]).detach()) action_ = torch.cat(action_, dim=1).detach() obs_ = torch.cat(obs_, dim=1) #x = torch.cat(torch.cat(obs, dim=1), torch.cat(action, dim=1), dim=1) #x_ = torch.cat((obs_, action_), dim=1) agent = self.agents[id] y_target = reward + self.gamma * torch.mul((1 - done) , agent.critic_target(obs_,action_).squeeze().detach()) y_eval = agent.critic_eval(torch.cat(obs, dim=1), torch.cat(action, dim=1)).squeeze() loss = nn.MSELoss()(y_eval, y_target) agent.optimizer_c.zero_grad() loss.backward() nn.utils.clip_grad_norm_(agent.critic_eval.parameters(), 0.5) agent.optimizer_c.step() action_train, policy = agent.actor_eval(obs[id], original_out=True) action[id] = policy loss_pse = torch.mean(torch.pow(action_train, 2)) x_train = torch.cat(action,dim=1) J = -torch.mean(agent.critic_eval(torch.cat(obs, dim=1), x_train)) agent.optimizer_a.zero_grad() (J + 1e-3 * loss_pse).backward() nn.utils.clip_grad_norm_(agent.actor_eval.parameters(), 0.5) agent.optimizer_a.step() print("Loss_q:",loss,"Loss_a:",J) for id in range(self.n): agent = self.agents[id] for p_target, p_eval in zip(agent.actor_target.parameters(), agent.actor_eval.parameters()): p_target.data.copy_((1 - self.tao) * p_target.data + self.tao * p_eval.data) for p_target, p_eval in zip(agent.critic_target.parameters(), agent.critic_eval.parameters()): p_target.data.copy_((1 - self.tao) * p_target.data + self.tao * p_eval.data) def choose_action(self, obs, is_train=True): joint_action = [] for n in range(self.n): agent = self.agents[n] action = agent.choose_action(obs[n], is_train).detach().numpy() joint_action.append(action) return joint_action def save(self,p_dir,epoch): para_dict = {0:None ,1:None, 2:None} for n in range(self.n): agent = self.agents[n] para_dict[n] = agent.actor_target.state_dict() torch.save(para_dict, str(p_dir)+'/actor_dict_{}.pth'.format(epoch)) ``` #### File: algo/pg/pg.py ```python import numpy as np import torch import torch.optim as optim from torch.distributions import Categorical from networks.actor import Actor import os from pathlib import Path import sys base_dir = Path(__file__).resolve().parent.parent.parent sys.path.append(str(base_dir)) from common.buffer import Replay_buffer as buffer eps = np.finfo(np.float32).eps.item() def get_trajectory_property(): return ["action"] class PG(object): def __init__(self, args): self.state_dim = args.obs_space self.action_dim = args.action_space self.lr = args.lr self.gamma = args.gamma self.policy = Actor(self.state_dim, self.action_dim) self.optimizer = optim.Adam(self.policy.parameters(), lr=self.lr) self.saved_log_probs = [] self.rewards = [] self.buffer_size = args.buffer_capacity trajectory_property = get_trajectory_property() self.memory = buffer(self.buffer_size, trajectory_property) self.memory.init_item_buffers() def choose_action(self, observation, train=True): inference_output = self.inference(observation, train) if train: self.add_experience(inference_output) return inference_output def add_experience(self, output): agent_id = 0 for k, v in output.items(): self.memory.insert(k, agent_id, v) def inference(self, observation, train=True): if train: state = torch.tensor(observation, dtype=torch.float).unsqueeze(0) probs = self.policy(state) m = Categorical(probs) action = m.sample() self.saved_log_probs.append(m.log_prob(action)) else: state = torch.tensor(observation, dtype=torch.float).unsqueeze(0) probs = self.policy(state) action = torch.argmax(probs) return {"action": action.item()} def learn(self): self.rewards = self.memory.item_buffers["rewards"].data R = 0 policy_loss = [] rewards = [] for r in self.rewards[::-1]: R = r[0] + self.gamma * R rewards.insert(0, R) rewards = torch.tensor(rewards) rewards = (rewards - rewards.mean()) / (rewards.std() + eps) for log_prob, reward in zip(self.saved_log_probs, rewards): policy_loss.append(-log_prob * reward) self.optimizer.zero_grad() policy_loss = torch.cat(policy_loss).sum() policy_loss.backward() self.optimizer.step() del self.rewards[:] del self.saved_log_probs[:] def save(self, save_path, episode): base_path = os.path.join(save_path, 'trained_model') if not os.path.exists(base_path): os.makedirs(base_path) model_critic_path = os.path.join(base_path, "policy_" + str(episode) + ".pth") torch.save(self.policy.state_dict(), model_critic_path) def load(self, file): self.policy.load_state_dict(torch.load(file)) ``` #### File: algo/sac/sac.py ```python import sys from pathlib import Path import os import torch from torch.optim import Adam import torch.nn as nn import torch.nn.functional as F import numpy as np from networks.critic import Critic from networks.actor import NoisyActor, CategoricalActor, GaussianActor base_dir = Path(__file__).resolve().parent.parent.parent sys.path.append(str(base_dir)) from common.buffer import Replay_buffer as buffer def get_trajectory_property(): #for adding terms to the memory buffer return ["action"] def weights_init_(m): if isinstance(m, nn.Linear): torch.nn.init.xavier_uniform_(m.weight, gain=1) torch.nn.init.constant_(m.bias, 0) def update_params(optim, loss, clip=False, param_list=False,retain_graph=False): optim.zero_grad() loss.backward(retain_graph=retain_graph) if clip is not False: for i in param_list: torch.nn.utils.clip_grad_norm_(i, clip) optim.step() class SAC(object): def __init__(self, args): self.state_dim = args.obs_space self.action_dim = args.action_space self.gamma = args.gamma self.tau = args.tau self.action_continuous = args.action_continuous self.batch_size = args.batch_size self.hidden_size = args.hidden_size self.actor_lr = args.a_lr self.critic_lr = args.c_lr self.alpha_lr = args.alpha_lr self.buffer_size = args.buffer_capacity self.policy_type = 'discrete' if (not self.action_continuous) else args.policy_type #deterministic or gaussian policy self.device = 'cpu' given_critic = Critic #need to set a default value self.preset_alpha = args.alpha if self.policy_type == 'deterministic': self.tune_entropy = False hid_layer = args.num_hid_layer self.policy = NoisyActor(state_dim = self.state_dim, hidden_dim=self.hidden_size, out_dim=1, num_hidden_layer=hid_layer).to(self.device) self.policy_target = NoisyActor(state_dim = self.state_dim, hidden_dim=self.hidden_size, out_dim=1, num_hidden_layer=hid_layer).to(self.device) self.policy_target.load_state_dict(self.policy.state_dict()) self.q1 = given_critic(self.state_dim+self.action_dim, self.action_dim, self.hidden_size, hid_layer).to(self.device) self.q1.apply(weights_init_) self.q1_target = given_critic(self.state_dim+self.action_dim, self.action_dim, self.hidden_size, hid_layer).to(self.device) self.q1_target.load_state_dict(self.q1.state_dict()) self.critic_optim = Adam(self.q1.parameters(), lr = self.critic_lr) elif self.policy_type == 'discrete': self.tune_entropy = args.tune_entropy self.target_entropy_ratio = args.target_entropy_ratio self.policy = CategoricalActor(self.state_dim, self.hidden_size, self.action_dim).to(self.device) hid_layer = args.num_hid_layer self.q1 = given_critic(self.state_dim, self.action_dim, self.hidden_size, hid_layer).to(self.device) self.q1.apply(weights_init_) self.q2 = given_critic(self.state_dim, self.action_dim, self.hidden_size, hid_layer).to(self.device) self.q2.apply(weights_init_) self.q1_target = given_critic(self.state_dim, self.action_dim, self.hidden_size, hid_layer).to(self.device) self.q2_target = given_critic(self.state_dim, self.action_dim, self.hidden_size, hid_layer).to(self.device) self.q1_target.load_state_dict(self.q1.state_dict()) self.q2_target.load_state_dict(self.q2.state_dict()) self.critic_optim = Adam(list(self.q1.parameters()) + list(self.q2.parameters()), lr=self.critic_lr) elif self.policy_type == 'gaussian': self.tune_entropy = args.tune_entropy self.target_entropy_ratio = args.target_entropy_ratio self.policy = GaussianActor(self.state_dim, self.hidden_size, 1, tanh = False).to(self.device) #self.policy_target = GaussianActor(self.state_dim, self.hidden_size, 1, tanh = False).to(self.device) hid_layer = args.num_hid_layer self.q1 = given_critic(self.state_dim+self.action_dim, self.action_dim, self.hidden_size, hid_layer).to(self.device) self.q1.apply(weights_init_) self.critic_optim = Adam(self.q1.parameters(), lr = self.critic_lr) self.q1_target = given_critic(self.state_dim+self.action_dim, self.action_dim, self.hidden_size, hid_layer).to(self.device) self.q1_target.load_state_dict(self.q1.state_dict()) else: raise NotImplementedError self.eps = args.epsilon self.eps_end = args.epsilon_end self.eps_delay = 1 / (args.max_episodes * 100) self.learn_step_counter = 0 self.target_replace_iter = args.target_replace self.policy_optim = Adam(self.policy.parameters(), lr = self.actor_lr) trajectory_property = get_trajectory_property() self.memory = buffer(self.buffer_size, trajectory_property) self.memory.init_item_buffers() if self.tune_entropy: self.target_entropy = -np.log(1./self.action_dim) * self.target_entropy_ratio self.log_alpha = torch.zeros(1, requires_grad=True, device=self.device) #self.alpha = self.log_alpha.exp() self.alpha = torch.tensor([self.preset_alpha]) self.alpha_optim = Adam([self.log_alpha], lr=self.alpha_lr) else: self.alpha = torch.tensor([self.preset_alpha]) # coefficiency for entropy term def choose_action(self, state, train = True): state = torch.tensor(state, dtype=torch.float).view(1, -1) if self.policy_type == 'discrete': if train: action, _, _, _ = self.policy.sample(state) action = action.item() self.add_experience({"action": action}) else: _, _, _, action = self.policy.sample(state) action = action.item() return {'action': action} elif self.policy_type == 'deterministic': if train: _,_,_,action = self.policy.sample(state) action = action.item() self.add_experience({"action": action}) else: _,_,_,action = self.policy.sample(state) action = action.item() return {'action':action} elif self.policy_type == 'gaussian': if train: action, _, _ = self.policy.sample(state) action = action.detach().numpy().squeeze(1) self.add_experience({"action": action}) else: _, _, action = self.policy.sample(state) action = action.item() return {'action':action} else: raise NotImplementedError def add_experience(self, output): agent_id = 0 for k, v in output.items(): self.memory.insert(k, agent_id, v) def critic_loss(self, current_state, batch_action, next_state, reward, mask): with torch.no_grad(): next_state_action, next_state_pi, next_state_log_pi, _ = self.policy.sample(next_state) #qf1_next_target, qf2_next_target = self.critic_target(next_state) qf1_next_target = self.q1_target(next_state) qf2_next_target = self.q2_target(next_state) min_qf_next_target = next_state_pi * (torch.min(qf1_next_target, qf2_next_target) - self.alpha * next_state_log_pi) # V function min_qf_next_target = min_qf_next_target.sum(dim=1, keepdim=True) next_q_value = reward + mask * self.gamma * (min_qf_next_target) #qf1, qf2 = self.critic(current_state) # Two Q-functions to mitigate positive bias in the policy improvement step, [batch, action_num] qf1 = self.q1(current_state) qf2 = self.q2(current_state) qf1 = qf1.gather(1, batch_action.long()) qf2 = qf2.gather(1, batch_action.long()) #[batch, 1] , pick the actin-value for the given batched actions qf1_loss = torch.mean((qf1 - next_q_value).pow(2)) qf2_loss = torch.mean((qf2 - next_q_value).pow(2)) return qf1_loss, qf2_loss def policy_loss(self, current_state): with torch.no_grad(): #qf1_pi, qf2_pi = self.critic(current_state) qf1_pi = self.q1(current_state) qf2_pi = self.q2(current_state) min_qf_pi = torch.min(qf1_pi, qf2_pi) pi, prob, log_pi, _ = self.policy.sample(current_state) inside_term = self.alpha.detach() * log_pi - min_qf_pi # [batch, action_dim] policy_loss = ((prob * inside_term).sum(1)).mean() return policy_loss, prob.detach(), log_pi.detach() def alpha_loss(self, action_prob, action_logprob): if self.tune_entropy: entropies = -torch.sum(action_prob * action_logprob, dim=1, keepdim=True) #[batch, 1] entropies = entropies.detach() alpha_loss = -torch.mean(self.log_alpha * (self.target_entropy - entropies)) alpha_logs = self.log_alpha.exp().detach() else: alpha_loss = torch.tensor(0.).to(self.device) alpha_logs = self.alpha.detach().clone() return alpha_loss, alpha_logs def learn(self): data = self.memory.sample(self.batch_size) transitions = { "o_0": np.array(data['states']), "o_next_0": np.array(data['states_next']), "r_0": np.array(data['rewards']).reshape(-1, 1), "u_0": np.array(data['action']), "d_0": np.array(data['dones']).reshape(-1, 1), } obs = torch.tensor(transitions["o_0"], dtype=torch.float) obs_ = torch.tensor(transitions["o_next_0"], dtype=torch.float) action = torch.tensor(transitions["u_0"], dtype=torch.long).view(self.batch_size, -1) reward = torch.tensor(transitions["r_0"], dtype=torch.float) done = torch.tensor(transitions["d_0"], dtype=torch.float) if self.policy_type == 'discrete': qf1_loss, qf2_loss = self.critic_loss(obs, action, obs_, reward, (1-done)) policy_loss, prob, log_pi = self.policy_loss(obs) alpha_loss, alpha_logs = self.alpha_loss(prob, log_pi) qf_loss = qf1_loss + qf2_loss update_params(self.critic_optim,qf_loss) update_params(self.policy_optim, policy_loss) if self.tune_entropy: update_params(self.alpha_optim, alpha_loss) self.alpha = self.log_alpha.exp().detach() if self.learn_step_counter % self.target_replace_iter == 0: #self.critic_target.load_state_dict(self.critic.state_dict()) self.q1_target.load_state_dict(self.q1.state_dict()) self.q2_target.load_state_dict(self.q2.state_dict()) self.learn_step_counter += 1 elif self.policy_type == 'deterministic': current_q = self.q1(torch.cat([obs, action], 1)) target_next_action = self.policy_target(obs_) target_next_q = self.q1_target(torch.cat([obs_, target_next_action], 1)) next_q_value = reward + (1-done) * self.gamma * target_next_q qf_loss = F.mse_loss(current_q, next_q_value.detach()) self.critic_optim.zero_grad() qf_loss.backward() self.critic_optim.step() _, _, _, current_action = self.policy.sample(obs) qf_pi = self.q1(torch.cat([obs, current_action], 1)) policy_loss = -qf_pi.mean() self.policy_optim.zero_grad() policy_loss.backward() self.policy_optim.step() if self.learn_step_counter % self.target_replace_iter == 0: for param, target_param in zip(self.q1.parameters(), self.q1_target.parameters()): target_param.data.copy_(self.tau * param.data + (1.-self.tau) * target_param.data) for param, target_param in zip(self.policy.parameters(), self.policy_target.parameters()): target_param.data.copy_(self.tau * param.data + (1.-self.tau) * target_param.data) elif self.policy_type == 'gaussian': action = torch.tensor(transitions["u_0"], dtype=torch.float).view(self.batch_size, -1) with torch.no_grad(): # next_action, next_action_logprob, _ = self.policy_target.sample(obs_) next_action, next_action_logprob, _ = self.policy.sample(obs_) target_next_q = self.q1_target( torch.cat([obs_, next_action], 1)) - self.alpha * next_action_logprob next_q_value = reward + (1 - done) * self.gamma * target_next_q qf1 = self.q1(torch.cat([obs, action], 1)) qf_loss = F.mse_loss(qf1, next_q_value) self.critic_optim.zero_grad() qf_loss.backward() self.critic_optim.step() pi, log_pi, _ = self.policy.sample(obs) qf_pi = self.q1(torch.cat([obs, pi], 1)) policy_loss = ((self.alpha * log_pi) - qf_pi).mean() self.policy_optim.zero_grad() policy_loss.backward() self.policy_optim.step() if self.tune_entropy: alpha_loss = -(self.log_alpha * (log_pi + self.target_entropy).detach()).mean() self.alpha_optim.zero_grad() alpha_loss.backward() self.alpha_optim.step() self.alpha = self.log_alpha.exp() else: pass if self.learn_step_counter % self.target_replace_iter == 0: for param, target_param in zip(self.q1.parameters(), self.q1_target.parameters()): target_param.data.copy_(self.tau * param.data + (1. - self.tau) * target_param.data) # for param, target_param in zip(self.policy.parameters(), self.policy_target.parameters()): # target_param.data.copy_(self.tau * param.data + (1.-self.tau) * target_param.data) else: raise NotImplementedError def save(self, save_path, episode): base_path = os.path.join(save_path, 'trained_model') if not os.path.exists(base_path): os.makedirs(base_path) model_actor_path = os.path.join(base_path, "actor_" + str(episode) + ".pth") torch.save(self.policy.state_dict(), model_actor_path) def load(self, file): self.policy.load_state_dict(torch.load(file)) ``` #### File: examples/common/buffer.py ```python import numpy as np class Replay_buffer(object): def __init__(self, max_size, trajectory_property): self.storage = [] self.max_size = max_size self.property_list = ['states', 'states_next', 'rewards', 'dones'] self.property_additional = trajectory_property self.properties_all = self.property_list + self.property_additional self.item_buffers = dict() self.step_index_by_env = 0 self.buffer_dict = dict() self.buffer_dict_clear() self.ptr = 0 def buffer_dict_clear(self): for item in self.properties_all: self.buffer_dict[item] = list() def init_item_buffers(self): for p in self.properties_all: self.item_buffers[p] = ItemBuffer(self.max_size, p) def insert(self, item_name:str, agent_id:int, data:np.ndarray, step=None): if item_name == 'dones': agent_id = 0 self.item_buffers[item_name].insert(agent_id, step, data) def sample(self, batch_size): self.buffer_dict_clear() data_length = len(self.item_buffers["action"].data) ind = np.random.randint(0, data_length, size=batch_size) for name, item_buffer in self.item_buffers.items(): for i in ind: self.buffer_dict[name].append(np.array(item_buffer.data[i], copy=False)) return self.buffer_dict def get_trajectory(self): self.buffer_dict_clear() data_length = len(self.item_buffers["action"].data) for name, item_buffer in self.item_buffers.items(): for i in range(data_length): self.buffer_dict[name].append(np.array(item_buffer.data[i], copy=False)) return self.buffer_dict def get_step_data(self): self.buffer_dict_clear() for name, item_buffer in self.item_buffers.items(): self.buffer_dict[name] = item_buffer.data[0] return self.buffer_dict def item_buffer_clear(self): for p in self.properties_all: self.item_buffers[p].clear() class ItemBuffer(object): def __init__(self, max_size, name): self.name = name self.max_size = max_size self.A = 1 self.data = list() self.ptr = 0 def insert(self, agent_id:int, step:int, data:np.ndarray): if len(self.data) == self.max_size: self.data.pop(0) self.data.append(data) def clear(self): del self.data[:] ``` #### File: examples/networks/actor.py ```python import torch import torch.nn as nn import torch.nn.functional as F from torch.distributions import Categorical from torch.distributions import Normal def weights_init_(m): if isinstance(m, nn.Linear): torch.nn.init.xavier_uniform_(m.weight, gain=1) torch.nn.init.constant_(m.bias, 0) class Actor(nn.Module): def __init__(self, input_size, output_size): super().__init__() self.input_size = input_size self.output_size = output_size self.affine1 = nn.Linear(self.input_size, 128) self.affine2 = nn.Linear(128, self.output_size) def forward(self, x): x = F.relu(self.affine1(x)) action_scores = self.affine2(x) return F.softmax(action_scores, dim=1) class NoisyActor(nn.Module): """ continuous actor with random noise """ def __init__(self, state_dim, hidden_dim, out_dim, num_hidden_layer=0, tanh=False, action_high = 1, action_low = -1): super(NoisyActor, self).__init__() self.linear_in = nn.Linear(state_dim, hidden_dim) self.linear_out = nn.Linear(hidden_dim, out_dim) self.num_hidden_layer = num_hidden_layer if self.num_hidden_layer > 0: hid_net = [] for _ in range(self.num_hidden_layer): hid_net.append(nn.Linear(hidden_dim, hidden_dim)) hid_net.append(nn.ReLU()) self.linear_hid = nn.Sequential(*hid_net) self.apply(weights_init_) self.noise = torch.Tensor(1) self.tanh = tanh if tanh: #normalise the action self.action_scale = torch.FloatTensor([(action_high - action_low) / 2.]) self.action_bias = torch.FloatTensor([(action_high + action_low) / 2.]) def forward(self, state): x = F.relu(self.linear_in(state)) if self.num_hidden_layer > 0: x = self.linear_hid(x) x = self.linear_out(x) if self.tanh: mean = torch.tanh(x) * self.action_scale + self.action_bias else: mean = x return mean def sample(self, state): """ :return: (sampled_action, prob, logprob, mean) """ mean = self.forward(state) noise = self.noise.normal_(0., std = 0.1) #all these hyperparameters can be defined in advance noise = noise.clamp(-0.25, 0.25) action = mean + noise return action, torch.tensor(1.), torch.tensor(0.), mean class CategoricalActor(nn.Module): def __init__(self, state_dim, hidden_dim, action_dim): super(CategoricalActor, self).__init__() self.linear1 = nn.Linear(state_dim, hidden_dim) self.linear2 = nn.Linear(hidden_dim, hidden_dim) self.linear3 = nn.Linear(hidden_dim, action_dim) # should be followed by a softmax layer self.apply(weights_init_) def forward(self, state): x = F.relu(self.linear1(state)) x = F.relu(self.linear2(x)) x = self.linear3(x) prob = F.softmax(x, -1) # [batch_size, action_dim] return prob def sample(self, state): prob = self.forward(state) distribution = Categorical(probs=prob) sample_action = distribution.sample().unsqueeze(-1) # [batch, 1] z = (prob == 0.0).float() * 1e-8 logprob = torch.log(prob + z) greedy = torch.argmax(prob, dim=-1).unsqueeze(-1) # 1d tensor return sample_action, prob, logprob, greedy class openai_actor(nn.Module): def __init__(self, num_inputs, action_size): super(openai_actor, self).__init__() self.tanh= nn.Tanh() self.LReLU = nn.LeakyReLU(0.01) self.linear_a1 = nn.Linear(num_inputs, 128) self.linear_a2 = nn.Linear(128, 64) self.linear_a = nn.Linear(64, action_size) self.reset_parameters() self.train() def reset_parameters(self): gain = nn.init.calculate_gain('leaky_relu') gain_tanh = nn.init.calculate_gain('tanh') nn.init.xavier_uniform_(self.linear_a1.weight, gain=nn.init.calculate_gain('leaky_relu')) nn.init.xavier_uniform_(self.linear_a2.weight, gain=nn.init.calculate_gain('leaky_relu')) nn.init.xavier_uniform_(self.linear_a.weight, gain=nn.init.calculate_gain('leaky_relu')) def forward(self, input, original_out=False): x = self.LReLU(self.linear_a1(input)) x = self.LReLU(self.linear_a2(x)) model_out = self.linear_a(x) u = torch.rand_like(model_out) policy = F.softmax(model_out - torch.log(-torch.log(u)), dim=-1) if original_out == True: return model_out, policy return policy LOG_SIG_MAX = 2 LOG_SIG_MIN = -20 epsilon = 1e-6 class GaussianActor(nn.Module): def __init__(self, state_dim, hidden_dim, action_dim, tanh=False, action_high = 2, action_low = -2): super(GaussianActor, self).__init__() self.linear_in = nn.Linear(state_dim, hidden_dim) self.linear_hid = nn.Linear(hidden_dim, hidden_dim) self.mean_linear = nn.Linear(hidden_dim, action_dim) self.logstd_linear = nn.Linear(hidden_dim, action_dim) self.apply(weights_init_) self.tanh = tanh if tanh: # normalise the action self.action_scale = torch.FloatTensor([(action_high - action_low) / 2.]) self.action_bias = torch.FloatTensor([(action_high + action_low) / 2.]) def forward(self, state): x = F.relu(self.linear_in(state)) x = F.relu(self.linear_hid(x)) mean = self.mean_linear(x) log_std = self.logstd_linear(x) log_std = torch.clamp(log_std, min=LOG_SIG_MIN, max=LOG_SIG_MAX) return mean, log_std def sample(self, state): mean, logstd = self.forward(state) std = logstd.exp() normal = Normal(mean, std) x_t = normal.rsample() if self.tanh: y_t = torch.tanh(x_t) action = y_t * self.action_scale + self.action_bias log_prob = normal.log_prob(x_t) log_prob -= torch.log(self.action_scale * (1 - y_t.pow(2)) + epsilon) log_prob = log_prob.sum(1, keepdim=True) mean = torch.tanh(mean) * self.action_scale + self.action_bias else: action = x_t log_prob = normal.log_prob(x_t) log_prob = log_prob.sum(1, keepdim = True) mean = mean return action, log_prob, mean ``` #### File: examples/networks/network_td3.py ```python import torch import torch.nn as nn import torch.nn.functional as F class Actor(nn.Module): def __init__(self, state_dim, action_dim, max_action): super(Actor, self).__init__() self.fc1 = nn.Linear(state_dim, 400) self.fc2 = nn.Linear(400, 300) self.fc3 = nn.Linear(300, action_dim) self.max_action = max_action def forward(self, state): a = F.relu(self.fc1(state)) a = F.relu(self.fc2(a)) a = torch.tanh(self.fc3(a)) * self.max_action return a class Critic(nn.Module): def __init__(self, state_dim, action_dim): super(Critic, self).__init__() self.fc1 = nn.Linear(state_dim + action_dim, 400) self.fc2 = nn.Linear(400, 300) self.fc3 = nn.Linear(300, 1) def forward(self, state, action): state_action = torch.cat([state, action], 1) q = F.relu(self.fc1(state_action)) q = F.relu(self.fc2(q)) q = self.fc3(q) return q ```
{ "source": "jidiai/Competition_Olympics-Curling", "score": 3 }
#### File: rl_trainer/algo/network.py ```python import torch.cuda import torch.nn as nn import torch.nn.functional as F device = 'cuda' if torch.cuda.is_available() else 'cpu' class Actor(nn.Module): def __init__(self, state_space, action_space, hidden_size=64): super(Actor, self).__init__() self.linear_in = nn.Linear(state_space, hidden_size) self.action_head = nn.Linear(hidden_size, action_space) def forward(self, x): x = F.relu(self.linear_in(x)) action_prob = F.softmax(self.action_head(x), dim=1) return action_prob class Critic(nn.Module): def __init__(self, state_space, hidden_size=64): super(Critic, self).__init__() self.linear_in = nn.Linear(state_space, hidden_size) self.state_value = nn.Linear(hidden_size, 1) def forward(self, x): x = F.relu(self.linear_in(x)) value = self.state_value(x) return value ```
{ "source": "jidicu/genetools", "score": 3 }
#### File: jidicu/genetools/ribosome.py ```python import re # test comment + commit # class AminoAcid: # abbrev = "" # codon = "" # name = "" # # def __init__(abbrev, codon, name): # self.abbrev = abbrev # self.codon = codon # self.name = name # # # TODO Add polarity, charge, etc def translate(rna_seq):# Actual translation rna_seq = rna_seq.upper().replace('\n', '').replace(' ', '') peptide = '' for i in xrange(0, len(rna_seq), 3): codon = rna_seq[i: i+3] amino_acid = codon_table.get(codon, '*') if amino_acid != '*': peptide += amino_acid else: break return peptide # User input prompt and verification dna_seq = raw_input("Enter your sequence: ") allowed_char = re.compile(r"a|c|t|g|u|A|T|C|G|U") if not allowed_char.findall(dna_seq): print("You did not enter a valid sequence. Restart the program and try again.") raise SystemExit if len(dna_seq)%3 != 0: print("You did not enter a sequence consisting only of codons. Restart the program and try again.") raise SystemExit dna_seq = dna_seq.upper() rna_seq = dna_seq.replace("T","U") # Initialize codon dictionary bases = ['U', 'C', 'A', 'G'] codons = [a+b+c for a in bases for b in bases for c in bases] amino_acids = 'FFLLSSSSYY**CC*WLLLLPPPPHHQQRRRRIIIMTTTTNNKKSSRRVVVVAAAADDEEGGGG' codon_table = dict(zip(codons, amino_acids)) peptide = translate(rna_seq) print("RNA sequence: " + rna_seq + "\n" + "Amino acid sequence: " + peptide) ```
{ "source": "jidicula/fluoratio", "score": 3 }
#### File: jidicula/fluoratio/inpututil.py ```python import re # Use tail recursion to check if input is a natural number. def input_regex(prompt, regex, warning_msg): input_str = input(prompt) if re.match(regex, input_str): return input_str else: print(warning_msg) return input_regex(prompt, regex, warning_msg) ``` #### File: jidicula/fluoratio/proc_optimum.py ```python import multiprocessing as mp import time def square(x): return x*x # You can edit trials and square_range to change testing parameters trials = 10 square_range = 10000000 times = []*trials cpu_num = int(mp.cpu_count()) cpu_avgs = []*cpu_num for j in range(cpu_num): for i in range(trials): start = time.time() # Worker pool if __name__ == '__main__': # "with" will close the pool once the task is complete with mp.Pool(processes=(j+1)) as pool: pool.map(square, range(square_range)) end = time.time() print("Trial {}:".format(i+1), str(end-start), "seconds") times.append(end-start) avg = sum(times)/len(times) print("{} cpu avg:".format(j+1), avg, "seconds") cpu_avgs.append(avg) print("Optimum cpu count:", (cpu_avgs.index(min(cpu_avgs)))+1) ```
{ "source": "jidicula/quarantine-fourteen", "score": 3 }
#### File: quarantine-fourteen/internals/__init__.py ```python import copy from typing import Callable, Dict, List, Optional, Tuple from . import asciiart class Action: """ Generic action class for updating the game state. """ def __init__(self, fn: Callable[["QuarantineStatus"], Optional[str]]): self._fn: Callable[["QuarantineStatus"], Optional[str]] = fn def apply(self, state: "QuarantineStatus"): return self._fn(state) class BasicAction(Action): """Class for an action that can be done by the user.""" def __init__(self, delta_energy: int, delta_fulfillment: int, message: str): def _basic_apply(state: "QuarantineStatus") -> str: state.energy += delta_energy state.fulfillment += delta_fulfillment return message super().__init__(_basic_apply) self.delta_energy = delta_energy self.delta_fulfillment = delta_fulfillment # Action names ACTION_GET_SLOSHED = "drink_beer" ACTION_ORDER_TAKEOUT = "eat_delivery" ACTION_COOK_FOOD = "eat_homecooked" ACTION_INFINITE_REDDIT = "scroll_reddit" ACTION_REFRESH_INBOX = "check_email" ACTION_ONLINE_SHOPPING = "buy_online" ACTION_NETFLIX_AND_CHILL_W_YOURSELF = "binge_netflix" ACTION_BRO_SPLIT_IT_UP = "workout" ACTION_VIDEO_CHAT_WITH_THE_FAM = "zoom_call" ACTION_STARE_OUT_WINDOW = "people_watch" ACTION_COFFEEDENCE = "drink_caffeine" ACTION_DANCE_LIKE_NO_ONES_WATCHING = "listen_to_radio" # ASCII art associated with each action ACTIONS_ASCII_ART: Dict[str, str] = { ACTION_GET_SLOSHED: asciiart.ACTION_GET_SLOSHED_SCENE, ACTION_ORDER_TAKEOUT: asciiart.ACTION_ORDER_TAKEOUT_SCENE, ACTION_COOK_FOOD: asciiart.ACTION_COOK_FOOD_SCENE, ACTION_INFINITE_REDDIT: asciiart.ACTION_INFINITE_REDDIT_SCENE, ACTION_REFRESH_INBOX: asciiart.ACTION_REFRESH_INBOX_SCENE, ACTION_ONLINE_SHOPPING: asciiart.ACTION_ONLINE_SHOPPING_SCENE, ACTION_NETFLIX_AND_CHILL_W_YOURSELF: asciiart.ACTION_NETFLIX_AND_CHILL_W_YOURSELF_SCENE, ACTION_BRO_SPLIT_IT_UP: asciiart.ACTION_BRO_SPLIT_IT_UP_SCENE, ACTION_VIDEO_CHAT_WITH_THE_FAM: asciiart.ACTION_VIDEO_CHAT_WITH_THE_FAM_SCENE, ACTION_STARE_OUT_WINDOW: asciiart.ACTION_STARE_OUT_WINDOW_SCENE, ACTION_COFFEEDENCE: asciiart.ACTION_COFFEEDENCE_SCENE, ACTION_DANCE_LIKE_NO_ONES_WATCHING: asciiart.ACTION_DANCE_LIKE_NO_ONES_WATCHING_SCENE, } # Action properties ACTIONS: Dict[str, Action] = { ACTION_GET_SLOSHED: BasicAction( -10, +10, "You feel refreshed, and a little bit light-headed." ), # TODO: drunk_function? # "move_room": BasicAction( # -5, 0, "You're here. Now what?" # ), # TODO: decrease fulfillment multiplicatively ACTION_ORDER_TAKEOUT: BasicAction( +5, +5, "The delivery charge brought the price up a surprising amount. Still... you deserved it.", ), # TODO: decrease energy and fulfillment multiplicatively ACTION_COOK_FOOD: BasicAction( +5, +10, "You wonder why you ever order delivery until you look at the clock." ), # TODO: decrease energy from eating too much, increase fulfillment multiplicatively ACTION_INFINITE_REDDIT: BasicAction( -5, -5, "You're getting really good at recognizing reposts. Those cat gifs are cute, though.", ), # TODO: decrease energy, decrease fulfillment multiplicatively ACTION_REFRESH_INBOX: BasicAction( 0, 0, 'Another corporate email about "troubling and uncertain times" and a 20% off clearance sale.', ), # TODO: decrease fulfillment multiplicatively ACTION_ONLINE_SHOPPING: BasicAction( +10, +20, "How are you spending the same amount and you can't even leave your apartment?", ), # TODO: big decrease in energy and fulfillment ACTION_NETFLIX_AND_CHILL_W_YOURSELF: BasicAction( -10, +20, "Another episode down of a show you'll watch most of and then forget.\n " "Not the worst use of time.", ), # TODO: big decrease in fulfillment # "cook_food": BasicAction(-20, +20, "TODO"), # TODO: big increase in fulfillment ACTION_BRO_SPLIT_IT_UP: BasicAction( -20, +5, "You're tired, but in a good way." ), # TODO: Fibonacci increase in fulfillment # "nap": BasicAction( # +12, -10, "What a waste of time. Refreshing, though." # ), # TODO: drop fulfillment to zero if a portion of day is spent napping ACTION_VIDEO_CHAT_WITH_THE_FAM: BasicAction( -10, 0, "Sorry, could you repeat that? The call froze." ), # TODO: decrease fulfillment multiplicatively ACTION_STARE_OUT_WINDOW: BasicAction( 0, +15, "A few people drift by, maybe 30% slower than they'd usually walk." ), ACTION_COFFEEDENCE: BasicAction( +20, 0, "The buzzing at the base of your skull is louder. \n" "Maybe you should get it looked at?", ), # TODO: drink too much, can't sleep/nap for 3 actions ACTION_DANCE_LIKE_NO_ONES_WATCHING: BasicAction( 0, +15, "For better or for worse, you're now more informed about the \n" "state of the world. Some numbers are up; others are down.", ), } # TIME OF DAY # Dictionary of day-portion tuples TIME_OF_DAY = { "morning": ("dawn", "mid-morning", "late morning"), "afternoon": ("noon", "mid-afternoon", "late afternoon"), "night": ("early evening", "dusk", "late evening", "midnight"), } def time_of_day_generator(): for day in range(0, 15): for day_portion in TIME_OF_DAY: for time in TIME_OF_DAY[day_portion]: yield day + 1, time class QuarantineStatus(object): """ Object for tracking user state. Possible rooms are "bedroom", "living room", and "kitchen". """ def __init__( self, energy: int, fulfillment: int, action_history: List[Tuple["QuarantineStatus", Action]], ): self.energy: int = energy self.fulfillment: int = fulfillment self.current_room = "bedroom" self.time_gen = time_of_day_generator() self.day_count, self.current_time = next(self.time_gen) self._action_history: List[Tuple[QuarantineStatus, Action]] = action_history @property def available_actions(self) -> List[str]: """ Returns a list of available actions by copying the state and testing if they return None. TODO: Separate "invalid" from "will just say something dumb and not do anything" """ # TODO: Performance avail = [] for k, a in ACTIONS.items(): state_copy = copy.deepcopy(self) if a.apply(state_copy) is not None: avail.append(k) return avail # When applying an action, get the Action object from the global ACTIONS # dict: `state.apply_action(ACTIONS["drink_beer"])` def apply_action(self, action_name: str) -> str: action: Action = ACTIONS[action_name] result = action.apply(self) if result is not None: # TODO: handle exception when no more iteration can be done self.day_count, self.current_time = next(self.time_gen) return result return "Sorry... that's not something you can do now." ```
{ "source": "JIdigit/durak", "score": 3 }
#### File: JIdigit/durak/cards.py ```python import random from igrok import HumanPlayer import pravila class Game: def __init__(self, numHuman, deck=[]): if deck == []: self.deck = ['6♣', '7♣', '8♣', '9♣', '10♣', 'J♣', 'Q♣', 'K♣', 'A♣',\ '6♦', '7♦', '8♦', '9♦', '10♦', 'J♦', 'Q♦', 'K♦', 'A♦',\ '6♥', '7♥', '8♥', '9♥', '10♥', 'J♥', 'Q♥', 'K♥', 'A♥',\ '6♠', '7♠', '8♠', '9♠', '10♠', 'J♠', 'Q♠', 'K♠', 'A♠'] random.shuffle(self.deck) else: self.deck = deck self.trump = self.deck[0] # определяем игроков self.players = [] for i in range(numHuman): self.players.append(HumanPlayer("Игрок " + str(i+1))) for _ in range(6): self.players[i].addCard(self.drawCard()) def runFullGame(self): '''Начало ИГРЫ''' print('Добро пожаловать в дурак. Козырная карта - '+ self.trump) print('Делайте ходы') self.players = pravila.playOrder(self.players,self.trump[-1]) while len(self.players) > 1: attacker = self.players[0] # Ходит первым аттакующий defender = self.players[1] #Защищающийся после inPlay = [] #Карты которые играются в этом раунде attackCount = 0 maxAttackCount = min(len(defender.hand),6) #максимальное количество возможных ходов или 6 или же то количество катр которое осталось в руке у защищающегося attack = attacker.promptFirstAttack(defender) inPlay.append(attack) # Добавляется карта, которой сделали ход attackCount +=1 defence = defender.promptDefence(attacker, attack, self.trump) while defence != "" and attack != "": inPlay.append(defence) print(defender.name + " отбился катрой"+ defence + ". Карты на столе: ") print(inPlay) attack = attacker.promptFollowupAttack(defender, inPlay, attackCount, maxAttackCount) if attack != "": # Если игрок атаковал inPlay.append(attack) attackCount += 1 defence = defender.promptDefence(attacker, attack, self.trump) if defence == "": # Если защищиающийся не может/хочет отбиваться print(defender.name + " поднял карты") print(inPlay," добавлены в колоду ",defender.name) defender.hand += inPlay for player in self.players: #Игроки пополняют карты до тех пор пока они есть в колоде if len(player.hand) < 6: for _ in range(min(6 - len(player.hand), len(self.deck))): player.addCard(self.drawCard()) self.players = self.players[2:] + self.players[:2] #Защищающийся пропускает ход if attack == "": #attacker has chosen to stop attacking print("Атака завершена. Бита: ") print(inPlay) for player in self.players: #all players draw cards until they have 6, starting at the attacker, so long as there are cards in the deck if len(player.hand) < 6: for _ in range(min(6 - len(player.hand), len(self.deck))): player.addCard(self.drawCard()) self.players = self.players[1:] + self.players[:1] #Атакующий идет в конец очереди self.players = list(filter(lambda x: len(x.hand) != 0, self.players)) if len(self.players) == 1: #Победитель print(self.players[0].name + " Победитель!!!") if len(self.players) == 0: #Ничья print("Ничья!!!") def drawCard(self): """берет последнюю карту из колоды и возвращаеи её значение""" if self.deck != []: dealtCard = self.deck[-1] self.deck = self.deck[:-1] return dealtCard game = Game(2) game.runFullGame() ```
{ "source": "jidn/LDS-org", "score": 3 }
#### File: jidn/LDS-org/lds_org.py ```python import os import contextlib import logging import pprint import requests __version__ = '0.2.1' CONFIG_URL = "https://tech.lds.org/mobile/ldstools/config.json" ENV_USERNAME = 'LDSORG_USERNAME' ENV_PASSWORD = '<PASSWORD>' logger = logging.getLogger("lds-org") class Error(Exception): """Exceptions for module logic.""" pass @contextlib.contextmanager def session(username=None, password=None): """Use LDSOrg as a context manager. Example: >>> with session() as lds: ... rv = lds.get(....) """ lds = LDSOrg(username, password, signin=True) logger.debug(u"%x yielding start", id(lds.session)) yield lds logger.debug(u"%x yielding stop", id(lds.session)) lds.get('signout-url') class LDSOrg(object): """Access LDS.org JSON web tools. Access LDS.org and the lds tools in JSON. You can also use the session to access webpages and screen scrape from there. """ def __init__(self, username=None, password=<PASSWORD>, signin=False, url=None): """Get endpoints and possibly signin. Args: username (str): LDS.org username password (str): <PASSWORD> signin (bool): Sign in using environment variables when not supplying the username and password url (str): override the current signin URL when it changes """ self.session = requests.Session() self.unit_number = '' self._get_endpoints() if url is None: url = self['auth-url'] if username or signin: self.signin(username, password, url) def __iter__(self): """Iterate through the endpoints.""" return iter(self.endpoints) def __getitem__(self, key): """Simplify endpoint usage.""" return self.endpoints[key] def __getattr__(self, key): """Reflect to requests.Session for any needs. Now we can use the class instance just as we would a session. """ self._debug(u'getattr %s', key) return getattr(self.session, key) def signin(self, username=None, password=None, url=None): """Sign in to LDS.org using a member username and password. While allowed, use environment variable to keep credentials out of code repositories. Environment variables are: LDSORG_USERNAME LDSORG_PASSWORD Args: username (str or None): LDS.org username or use environ password (str or None): LDS.org password or use environ url (str): Override the default endpoint url Exceptions: Error Side effects: self.signed_in = True """ if username is None: username = os.getenv(ENV_USERNAME) if password is None: password = os.getenv(ENV_PASSWORD) if url is None: url = self['auth-url'] self._debug(u'SIGNIN %s %s', username, url) rv = self.session.post(url, data={'username': username, 'password': password}) if 'etag' not in rv.headers: raise Error('Username/password failed') self._debug(u'SIGNIN success!') self.signed_in = True def _get_unit(self): """Get unit number of currently logged in user. Returns: (str) unit number Side Effect: adds attribute 'unit_number' to object """ self._debug(u'Silently get unit number') rv = self.get('current-user-unit') assert rv.status_code == 200 self._debug(u'Headers %s', pprint.pformat(rv.headers)) self.unit_number = rv.json()['message'] self._debug(u'unit number = %s', self.unit_number) return self.unit_number def get(self, endpoint, *args, **kwargs): """Get an HTTP response from endpoint or URL. Some endpoints need substitution to create a valid URL. Usually, this appears as "{}" in the endpoint. By default this method will replace any "{unit}" with the authorized users unit number if not given. Args: endpoint (str): endpoint or URL args (tuple): substituation for any '{}' in the endpoint kwargs (dict): unit, paramaters for :meth:`requests.Session.get` unit: unit number member: member number Returns: :class:`requests.Response` Exceptions: Error for unknown endpoint KeyError for missing endpoint keyword arguments """ self._debug(u'GET %s', endpoint) try: url = self.endpoints[endpoint] except KeyError: if endpoint.startswith('http'): url = endpoint else: raise Error("Unknown endpoint", endpoint) # Get any unit or member information unit_member = dict() for key in ('member', 'unit'): try: v = kwargs.pop(key) if v is not None: unit_member[key] = v except KeyError: pass if 'unit' not in unit_member and self.unit_number: unit_member['unit'] = self.unit_number # Do any substitution in the endpoint try: url = url.format(*args, **unit_member) except IndexError: self._error(u"missing positional args %s", args) raise Error("Missing positional arguments", url, args, unit_member) except KeyError as err: if 'unit' in err.args: self._debug(u"'unit' needed. Get it and retry.") unit_member['unit'] = self._get_unit() kwargs.update(unit_member) return self.get(endpoint, *args, **kwargs) self._error(u"missing key words %s", (err.args)) raise self._debug('GET %s', url) rv = self.session.get(url, **kwargs) self._debug('Request Headers %s', pprint.pformat(dict(rv.request.headers))) try: length = len(rv.raw) except TypeError: length = 0 self._debug(u'response=%s length=%d', str(rv), length) self._debug('Response Headers %s', pprint.pformat(dict(rv.headers))) return rv def _debug(self, msg, *args): """Wrap logging with session number.""" return logger.debug(u'%x ' + msg, id(self.session), *args) def _error(self, msg, *args): """Wrap logging with session number.""" return logger.error(u'%x ' + msg, id(self.session), *args) def _get_endpoints(self): """Get the currently supported endpoints provided by LDS Tools. See https://tech.lds.org/wiki/LDS_Tools_Web_Services """ # Get the endpoints self._debug(u"Get endpoints") rv = self.session.get(CONFIG_URL) assert rv.status_code == 200 self.endpoints = rv.json() self._debug(u'Got %d endponts', len(self.endpoints)) ep = self.endpoints for k, v in ep.items(): if not v.startswith('http'): continue # Fix unit parameter if 'unit/%@' in v: v = ep[k] = v.replace('unit/%@', 'unit/{unit}') elif 'unitNumber=%@' in v: v = ep[k] = v.replace('=%@', '={unit}') elif k.startswith('unit-') and v.endswith('/%@'): v = ep[k] = v[:-2] + '{unit}' # Fix member parameter if 'membership-record/%@' in v: v = ep[k] = v.replace('%@', '{member}') elif 'photo/url/%@' in v: v = ep[k] = v.replace('url/%@', 'url/{member}') # Fix misc for pattern in ('%@', '%d', '%.0f'): if pattern in v: v = ep[k] = v.replace(pattern, '{}') class DataAdapter(object): """Adapts dict JSON data provided by LDS.org. Allows you to access json data as attributes. >>> DataAdapter({'a': 123}).a 123 """ def __init__(self, data): self._data = data def __getattr__(self, name): return self._data[name] if __name__ == "__main__": # pragma: no cover import sys import argparse import getpass import json def main(): """Remove module execution variables from globals.""" parser = argparse.ArgumentParser() parser.add_argument('-e', metavar='ENDPOINT', help="Endpoint to pretty print") parser.add_argument('-m', metavar='MEMBER', default=None, help="Member number") parser.add_argument('-u', metavar='UNIT', default=None, help='Unit number other than authorized users') parser.add_argument('-j', action='store_true', help="output as JSON") parser.add_argument('args', nargs='*', help='Arguments for endpoint URLs') parser.add_argument('--log', help='Filename for log, - for stdout') args = parser.parse_args() if args.log: if args.log == '-': h = logging.StreamHandler(sys.stdout) else: h = logging.FileHandler(args.log, 'wt') logger.addHandler(h) logger.setLevel(logging.DEBUG) lds = LDSOrg() if not args.e: # pprint available endoints for k, v in sorted((_ for _ in lds.endpoints.items() if _[-1].startswith('http'))): print("[{:25s}] {}".format(k, v)) else: username = os.getenv(ENV_USERNAME) password = os.getenv(ENV_PASSWORD) if not all((username, password)): logger.info("Asking for username and password.") asking = raw_input if sys.version_info.major < 3 else input username = asking('LDS.org username:') password = <PASSWORD>pass('<PASSWORD>:') if not all((username, password)): print("Give username and password at input or set environment" " %s and %s." % (ENV_USERNAME, ENV_PASSWORD)) sys.exit(1) lds.signin(username, password) rv = lds.get(args.e, member=args.m, unit=args.u, *args.args) if rv.status_code != 200: print("Error: %d %s" % (rv.status_code, str(rv))) content_type = rv.headers['content-type'] if 'html' in content_type: print("<!-- %s -->" % str(rv)) print("<!-- %s -->" % rv.url) print(rv.text) elif 'json' in content_type: if not args.j: pprint.pprint(rv.json()) else: print(json.dumps(rv.json(), sort_keys=True)) main() ```
{ "source": "jidn/lds-temple-recommend-notice", "score": 3 }
#### File: jidn/lds-temple-recommend-notice/recommends.py ```python import pdb import datetime import collections from email.mime.text import MIMEText try: import ConfigParser as configparser except ImportError: import configparser try: from future_builtins import filter, map except ImportError: pass import lds_org import email_markdown from dateutil import relativedelta __version__ = "1.0rc1" BISHOPRIC = {'Bishop': 'bishop', 'Bishopric First Counselor': 'counselor1', 'Bishopric Second Counselor': 'counselor2', 'Ward Executive Secretary': 'exec_sec', 'Ward Clerk': 'ward_clerk'} def find_yyyymm(months, today=None): """Get the month relative to this month. Args: months (int): 1 is next month, -1 last month today (datetime.date): if None use datetime.date.today() Returns: (int) YYYYMM >>> find_yyyymm(0, datetime.date(2017, 9, 4)) '201709' >>> find_yyyymm(-1, datetime.date(2017, 3, 30)) '201702' """ if today is None: today = datetime.date.today() relative = today + relativedelta.relativedelta(months=months) return str(relative.year * 100 + relative.month) class PeopleAdapter(lds_org.DataAdapter): """Adapts recommend data. >>> PeopleAdapter({'phone': '555-3315'}).phone '555-3315' >>> PeopleAdapter(dict(phone='', householdPhone='5551234')).phone '5551234' >>> PeopleAdapter({'email': '<EMAIL>'}).email '<EMAIL>' >>> PeopleAdapter(dict(email='', householdEmail='<EMAIL>')).email '<EMAIL>' >>> PeopleAdapter({'name': '<NAME>'}).surname 'Doe' """ @property def phone(self): return self._data['phone'] or self.householdPhone @property def email(self): return self._data['email'] or self.householdEmail @property def surname(self): try: return self._data['surname'] except KeyError: # Needed for callings return self._data['name'].split(',')[0].strip() def __repr__(self): return "<PeopleAdapter.name=%s>" % self.name class RecommendGroup(object): """Create recommend reports from config file.""" def __init__(self, config_file='config.cfg'): self.groups = {} self.report = {} self.get_config_info(file_name=config_file) def get_config_info(self, file_name, section='Reports'): """Load config file [Reports] x_title = Expire this month x_head = 0 x_tail = 0 y_title = Expire next month y_head = 1 y_tail = 1 z_title = Expired in last 3 months z_head = -3 z_tail = -1 Args: file_name (str): the INI filename section (str): the INI section name Side effects: self.config (configparser) """ self.config = configparser.ConfigParser() self.config.read(file_name) options = self.config.items(section) self.groups = {} # Get the names of all reports names = {_[0].split('_')[0] for _ in options} today = datetime.date.today() for name in names: head = self.config.getint(section, name + '_head') tail = self.config.getint(section, name + '_tail') head = find_yyyymm(head, today) tail = find_yyyymm(tail, today) func = lambda v, h=head, t=tail: h <= v <= t self.groups[name] = [func, self.config.get(section, name + '_title')] def select_recommends(self, recommends): """Create exipiring reports for a sequence of recomends. Args: recommends (list[dict]): long list of recomment holders Side effects: self.report (dict): Create text reports, one for each of the criteria in config. """ selected = collections.defaultdict(list) for recommend in recommends: try: expire_date = recommend['expirationDate'][:6] except TypeError: expire_date = '000000' for k, v in self.groups.items(): if v[0](expire_date): #print(k, expire_date, recommend['name']) selected[k].append(PeopleAdapter(recommend)) self.report = {} for k, v in selected.items(): v.sort(key=lambda x: x.name) title = self.groups[k][-1] self.report[k] = self.table(v) def table(self, recommends): """Create markdown tables. Args: recommends (iterable): of recommends to include Returns: (str) A table of Name | Phone | Email """ headers = ('Name', 'Expires', 'Phone', 'Email') keys = ('name', 'expire', 'phone', 'email') widths = [len(_) for _ in headers] text = [] # Get column widths for p in recommends: date = p.expirationDate p._data['expire'] = "{}-{}".format(date[:4], date[4:6]) for idx, k in enumerate(keys): widths[idx] = max(widths[idx], len(getattr(p, k))) # Markdown table header text.append(' | '.join("{1:{0}s}".format(w, _) for w, _ in zip(widths, headers))) text.append(' | '.join("-"*w for w in widths)) # Markdown table body for p in recommends: fields = ["{0:{1}s}".format(getattr(p, k), w) for k, w in zip(keys, widths)] text.append(' | '.join(fields).rstrip()) text.append('') return '\n'.join(text) def send_email_to_bishopric(self, smtp, to_whom, bishopric, test=False): """Send email to bishop with his reports. Args: smtp (instance): smtplib.SMTP instance to_whom (str): either 'BISHOP', or 'COUNSELOR' bishopric (dict): from Membership.bishopric() test (boolean): send email only to BCC-ADDR Returns: Email for sending """ section = 'Email' text = [self.config.get(section, to_whom+'-MSG'), ''] reports = self.config.get(section, to_whom+'-REPORTS') if not reports: return None for report in reports.split(' '): title = self.groups[report][-1] text.extend([title, '=' * len(title)]) text.append(self.report[report]) params = {'From': self.config.get(section, 'FROM-ADDR')} if to_whom == 'BISHOP': params['Subject'] = 'Temple recommends - Bishop' params['To'] = (bishopric['bishop'].email,) elif to_whom == 'COUNSELOR': params['Subject'] = 'Temple recommend renewals' params['To'] = [bishopric[_].email for _ in ('counselor1', 'counselor2')] params['Cc'] = (bishopric['bishop'].email,) if self.config.has_option(section, 'BCC-ADDR'): params['Bcc'] = (self.config.get(section, 'BCC-ADDR'),) to_addr = set() for group in ('To', 'Cc', 'Bcc'): addrs = params.get(group, tuple()) if not addrs: continue to_addr.update(set(addrs)) params[group] = ', '.join(addrs) if test: to_addr = [self.config.get(section, 'BCC-ADDR')] email_msg = email_markdown.Email().make('\n'.join(text), **params) pdb.set_trace() smtp.sendmail(params['From'], to_addr, email_msg.as_string()) def send_member_notices(self, smtp, bishopric, recommends, test=False): """Send email to members about expiring recommends. Args: smpt (instance): smtplib.SMTP instance bishopric (instance): Membership.bishopric() recommends (list): series of recommends test (boolean): send email only to BCC-ADDR """ section = 'Email' body = self.config.get(section, 'MEMBER-MSG') body = body.format(**bishopric) bcc = set() start, end = tuple(find_yyyymm(self.config.getint(section, x)) for x in ('MEMBER-HEAD', 'MEMBER-TAIL')) logger = ['Looking for members to notify.'] for recommend in recommends: try: expire_date = recommend['expirationDate'][:6] except TypeError: expire_date = '000000' if start <= expire_date <= end: addr = recommend['email'] or recommend['householdEmail'] if addr: logger.append('{0[name]} {1}'.format(recommend, addr)) bcc.add(addr) else: logger.append('{0[name]} missing email.'.format(recommend)) from_addr = self.config.get(section, 'FROM-ADDR') if not test: email_msg = email_markdown.Email().make(body, From=from_addr, To='undisclosed-recipients', Subject='Your temple recommend will expire soon') pdb.set_trace() err = smtp.sendmail(from_addr, list(bcc), email_msg.as_string()) if err: logger.append("Errors with %s" % str(err)) logger.append("\r\nMember emails sent to:") logger.extend(bcc) email_text = MIMEText('\r\n'.join(logger)) email_text['From'] = email_text['To'] = from_addr email_text['Subject'] = "Temple Recommend - Member emails" smtp.sendmail(from_addr, [from_addr], email_text.as_string()) if __name__ == "__main__": import argparse parser = argparse.ArgumentParser() parser.add_argument('-e', action='store_true', help='send emails') parser.add_argument('--be', metavar='EMAIL', help='bishop email') parser.add_argument('--c1', metavar='EMAIL', help='first counselor email') parser.add_argument('--c2', metavar='EMAIL', help='second counselor email') parser.add_argument('--cfg', metavar='FILE', default='config.cfg', help='configuration file') parser.add_argument('--test', action='store_true', help='send email with bishops name to self') args = parser.parse_args() with lds_org.session() as lds: callings = lds.get('callings-with-dates').json() predicate = lambda x: x['position'] in BISHOPRIC.keys() people = map(PeopleAdapter, filter(predicate, callings)) bishopric = dict((BISHOPRIC[_.position], _) for _ in people) if args.be: bishopric['bishop']._data['email'] = args.be if args.c1: bishopric['counselor1']._data['email'] = args.c1 if args.c2: bishopric['counselor2']._data['email'] = args.c2 recommends = lds.get('temple-recommend-status').json() rg = RecommendGroup(config_file=args.cfg) rg.select_recommends(recommends) if args.e or args.test: server = email_markdown.get_smtp(args.cfg, 'SMTP') rg.send_member_notices(server, bishopric, recommends, test=args.test) rg.send_email_to_bishopric(server, 'BISHOP', bishopric, test=args.test) rg.send_email_to_bishopric(server, 'COUNSELOR', bishopric, test=args.test) else: g = sorted(rg.groups.items(), key=lambda x: x[-1][-1]) for v in g: title = v[-1][-1] print(title) print('=' * len(title)) print(rg.report[v[0]]) ```
{ "source": "Jidnyesh/udemy-course-cracker", "score": 3 }
#### File: Jidnyesh/udemy-course-cracker/main.py ```python from requests import get from pattern.web import plaintext import sys import webbrowser headers = { 'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36', } with open('wordlist.txt','r') as wordl: wordlist = wordl.readlines() wordl.close() wordlist = [x.strip() for x in wordlist] course = input('Paste the url of course you want to crack\n') price = input('What is the current price of the course\n') compare = 'Current price: FreeOriginal price: ₹'+price+'Discount:100' def attack(course): for word in wordlist: url = course+'?couponCode='+word print("Trying : "+word) htmlString = get(url,headers=headers).text webText = plaintext(htmlString) if compare in webText: print('\n' + word + ' is the coupon code for the course and it is free now') webbrowser.open_new_tab(url) sys.exit() attack(course=course) ```
{ "source": "jidpn/tcgcreator_eternal_beta", "score": 2 }
#### File: mysite/tcgcreator/answer.py ```python from django.db.models import Q from django.http import HttpResponse, HttpResponseRedirect from django.urls import reverse from django.utils.html import format_html from time import time from .battle_functions import init_duel from django.db import connection import os from .models import ( Monster, FieldSize, Deck, Grave, Hand, Duel, Config, MonsterEffectWrapper, CostWrapper, Trigger, PacWrapper, Lock, DefaultDeck, UserDeck, UserDeckGroup, DuelDeck ) from pprint import pprint from .battle_det import battle_det from .choices import lock_lock from .custom_functions import cheat_get, create_user_deck_det import json import copy from .duel import DuelObj check_array = [] def cheat2(request): room_number = int(request.POST["room_number"]) user_turn = int(request.POST["user_turn"]) config = Config.objects.get() pwd = os.path.dirname(__file__) if config.cheat is False: return game_name = config.game_name if(user_turn == 1): log2 = open(pwd + "/logger2_" + game_name, mode="r", encoding="utf-8") else: log2 = open(pwd + "/logger3_" + game_name, mode="r", encoding="utf-8") with connection.cursor() as cursor: cursor.execute(log2.read()) connection.commit() duel = Duel.objects.filter(id=room_number).get() duelobj = DuelObj(room_number) duelobj.duel = duel duelobj.room_number = room_number duelobj.user = 1 duelobj.init_all(1, 2, room_number) return battle_det(request, duelobj) def answer_cost(duelobj, duel, request, room_number, lock): if duel.guest_flag == False: ID1 = -1 else: ID1 = duel.guest_id if duel.guest_flag2 == False: ID2 = -1 else: ID2 = duel.guest_id2 if "ID" in request.COOKIES : ID = request.COOKIES["ID"] else: ID = "" answer = json.loads(request.POST["answer"]) if duel.user_1 == request.user or (ID1 == ID and duel.guest_flag): if ( duel.cost_user == 1 and duel.user_turn == 1 or duel.cost_user == 2 and duel.user_turn == 2 ): if duel.ask == 1 or duel.ask == 3: return answer_det_cost( duelobj, duel, 1, answer, request, 1, room_number, lock,ID1,ID2 ) else: if duel.ask == 2 or duel.ask == 3: return answer_det_cost( duelobj, duel, 1, answer, request, 2, room_number, lock,ID1,ID2 ) elif duel.user_2 == request.user or (ID2 == ID and duel.guest_flag2): if ( duel.cost_user == 2 and duel.user_turn == 2 or duel.cost_user == 1 and duel.user_turn == 1 ): if duel.ask == 1 or duel.ask == 3: return answer_det_cost( duelobj, duel, 2, answer, request, 1, room_number, lock,ID1,ID2 ) else: if duel.ask == 2 or duel.ask == 3: return answer_det_cost( duelobj, duel, 2, answer, request, 2, room_number, lock,ID1,ID2 ) free_lock(room_number, lock) return HttpResponse("error") def chooseguestname(request): config = Config.objects.get(); room_time = config.room_time limit_time = config.limit_time room_number = int(request.POST["room_number"]) duel = Duel.objects.filter(id=room_number).get() if duel.guest_flag is False: ID1 = -1 else: ID1 = duel.guest_id if duel.guest_flag2 is False: ID2 = -1 else: ID2 = duel.guest_id2 if "ID" in request.COOKIES : ID = request.COOKIES["ID"] else: ID = "" if ID1 == ID and duel.guest_flag is True: user = 1 elif duel.user_1 is not None and duel.user_1 == request.user: user = 1 elif ID2 == ID and duel.guest_flag2 is True: user = 2 elif duel.user_2 is not None and duel.user_2 == request.user: user = 2 if user == 1 and duel.guest_name != "": return HttpResponse("error") elif user == 2 and duel.guest_name2 != "": return HttpResponse("error") if user == 1: if time() - duel.time_2 > limit_time * 2: duel.winner = user duel.save() return HttpResponse("time") duel.guest_name = format_html(request.POST["guest_name"]) duel.time_1 = time() elif user == 2: if time() - duel.time_1 > limit_time * 2: duel.winner = user duel.save() return HttpResponse("time") duel.guest_name2 = format_html(request.POST["guest_name"]) duel.time_2 = time() duel.save() return HttpResponse("true") def chooseuserdeck(request): config = Config.objects.get(); room_time = config.room_time limit_time = config.limit_time room_number = int(request.POST["room_number"]) duel = Duel.objects.filter(id=room_number).get() if duel.guest_flag is False: ID1 = -1 else: ID1 = duel.guest_id if duel.guest_flag2 is False: ID2 = -1 else: ID2 = duel.guest_id2 if "ID" in request.COOKIES : ID = request.COOKIES["ID"] else: ID = "" if time() - duel.time_1 > limit_time * 2: return HttpResponse("time") if ID1 == ID and duel.guest_flag is True: user = 1 elif duel.user_1 is not None and duel.user_1 == request.user: user = 1 elif ID2 == ID and duel.guest_flag2 is True: user = 2 elif duel.user_2 is not None and duel.user_2 == request.user: user = 2 if user == 1 and duel.deck_choose_flag1 is False: return HttpResponse("error") elif user == 2 and duel.deck_choose_flag2 is False: return HttpResponse("error") decks = Deck.objects.all() user_deck = int(request.POST["user_deck"]) user_decks = UserDeck.objects.filter(deck_group__id=user_deck) i = 1 for deck in decks: user_deck = user_decks.filter(deck_type=deck).first() if not user_deck: if not user_decks: return HttpResponse("error") else: user_deck_det = user_deck.deck.split("_") user_deck_det = create_user_deck_det(user_deck.deck, i, user) if deck.mine_or_other == 1: DuelDeck.objects.filter( room_number=room_number, mine_or_other=3, deck_id=i ).delete() DuelDeck( room_number=room_number, mine_or_other=3, deck_id=i, deck_content=user_deck_det, id=i*100+room_number*10+3 ).save() else: DuelDeck.objects.filter( room_number=room_number, mine_or_other=user, deck_id=i ).delete() DuelDeck( room_number=room_number, mine_or_other=user, deck_id=i, deck_content=user_deck_det, id=i*100+room_number*10+user ).save() i += 1 if user == 1: duel.deck_choose_flag1 = False duel.time_1 = time() elif user == 2: duel.deck_choose_flag2 = False duel.time_2 = time() duel.save() return HttpResponse("true") def choosedeck(request): config = Config.objects.get(); room_time = config.room_time limit_time = config.limit_time room_number = int(request.POST["room_number"]) duel = Duel.objects.filter(id=room_number).get() if duel.guest_flag is False: ID1 = -1 else: ID1 = duel.guest_id if duel.guest_flag2 is False: ID2 = -1 else: ID2 = duel.guest_id2 if "ID" in request.COOKIES : ID = request.COOKIES["ID"] else: ID = "" if time() - duel.time_1 > limit_time * 2: return HttpResponse("time") if ID1 == ID and duel.guest_flag is True: user = 1 elif duel.user_1 is not None and duel.user_1 == request.user: user = 1 elif ID2 == ID and duel.guest_flag2 is True: user = 2 elif duel.user_2 is not None and duel.user_2 == request.user: user = 2 if user == 1 and duel.deck_choose_flag1 is False: return HttpResponse("error") elif user == 2 and duel.deck_choose_flag2 is False: return HttpResponse("error") decks = Deck.objects.all() default_deck = request.POST["default_deck"] user_decks = DefaultDeck.objects.filter(deck_group__id=default_deck) i = 1 for deck in decks: user_deck = user_decks.filter(deck_type=deck).first() if not user_deck: if not user_decks: return HttpResponse("error") else: user_deck_det = user_deck.deck.split("_") user_deck_det = create_user_deck_det(user_deck.deck, i, user) if deck.mine_or_other == 1: DuelDeck.objects.filter( room_number=room_number, mine_or_other=3, deck_id=i ).delete() DuelDeck( room_number=room_number, mine_or_other=3, deck_id=i, deck_content=user_deck_det, id=i*100+room_number*10+3 ).save() else: DuelDeck.objects.filter( room_number=room_number, mine_or_other=user, deck_id=i ).delete() DuelDeck( room_number=room_number, mine_or_other=user, deck_id=i, deck_content=user_deck_det, id=i*100+room_number*10+user ).save() i += 1 if user == 1: duel.deck_choose_flag1 = False duel.time_1 = time() elif user == 2: duel.deck_choose_flag2 = False duel.time_2 = time() duel.save() return HttpResponse("true") def chooseai(request): config = Config.objects.get(); room_time = config.room_time limit_time = config.limit_time room_number = int(request.POST["room_number"]) duel = Duel.objects.filter(id=room_number).get() if duel.guest_flag is False: ID1 = -1 else: ID1 = duel.guest_id if "ID" in request.COOKIES : ID = request.COOKIES["ID"] else: ID = "" if time() - duel.time_1 > limit_time * 2: return HttpResponse("time") if ID1 != ID and duel.user_1 != request.user: return HttpResponse("error") default_deck = request.POST["default_deck"] if(default_deck == "-1"): default_deck = None #user_deck = request.POST["user_deck"] #if(user_deck == "-1"): user_deck = None enemy_deck = request.POST["enemy_deck"] if(init_duel(room_number,request.user,default_deck,enemy_deck,False,False,False,user_deck,1)): return HttpResponse("error") else: return HttpResponse("true") def cheat(request): room_number = int(request.POST["room_number"]) duel = Duel.objects.filter(id=room_number).get() if duel.guest_flag == False: ID1 = -1 else: ID1 = duel.guest_id if duel.guest_flag2 == False: ID2 = -1 else: ID2 = duel.guest_id2 if "ID" in request.COOKIES : ID = request.COOKIES["ID"] else: ID = "" place = request.POST["place"] deck_id = int(request.POST["deck_id"]) monster_id = request.POST["monster_id"].split("_") if len(monster_id) >=2: bottom = True else: bottom = False id = int(monster_id[0]) mine_or_other = int(request.POST["mine_or_other"]) config = Config.objects.get() if config.cheat is False: return duelobj = DuelObj(room_number) duelobj.duel = duel duelobj.room_number = room_number if duel.user_1 == request.user or (ID1 == ID and duel.guest_flag is True): user = 1 duelobj.user = 1 other_user = 2 elif duel.user_2 == request.user or (ID2 == ID and duel.guest_flag2 is True): user = 2 duelobj.user = 2 other_user = 1 duelobj.init_all(user, other_user, room_number) if mine_or_other == 2: owner = other_user else: owner = user card = cheat_get(id, deck_id, owner, place) if place == "deck": if mine_or_other == 1: deck = duelobj.decks[deck_id]["mydeck"] elif mine_or_other == 2: deck = duelobj.decks[deck_id]["otherdeck"] else: deck = duelobj.decks[deck_id]["commondeck"] user_decks = deck if bottom == True: user_decks.append(card) else: user_decks.insert(0, card) if mine_or_other == 1: duelobj.decks[deck_id]["mydeck"] = user_decks elif mine_or_other == 2: duelobj.decks[deck_id]["otherdeck"] = user_decks else: duelobj.decks[deck_id]["commondeck"] = user_decks elif place == "grave": if mine_or_other == 1: grave = duelobj.graves[deck_id]["mygrave"] elif mine_or_other == 2: grave = duelobj.graves[deck_id]["othergrave"] else: grave = duelobj.graves[deck_id]["commongrave"] user_graves = grave if bottom == True: user_graves.append( card) else: user_graves.insert(0, card) if mine_or_other == 1: duelobj.graves[deck_id]["mygrave"] = user_graves elif mine_or_other == 2: duelobj.graves[deck_id]["othergrave"] = user_graves else: duelobj.graves[deck_id]["commongrave"] = user_graves elif place == "hand": if mine_or_other == 1: hand = duelobj.hands[deck_id]["myhand"] elif mine_or_other == 2: hand = duelobj.hands[deck_id]["otherhand"] else: hand = duelobj.hands[deck_id]["commonhand"] user_hands = hand if bottom == True: user_hands.append( card) else: user_hands.insert(0, card) if mine_or_other == 1: duelobj.hands[deck_id]["myhand"] = user_hands elif mine_or_other == 2: duelobj.hands[deck_id]["otherhand"] = user_hands else: duelobj.hands[deck_id]["commonhand"] = user_hands duelobj.save_all(user, other_user, room_number) return battle_det(request, duelobj) def cancel(request): room_number = int(request.POST["room_number"]) lock = Lock.objects.get() lock_flag = lock_lock(room_number, lock,request) if lock_flag != "OK": return HttpResponse("waiting") duel = Duel.objects.filter(id=room_number).get() duelobj = DuelObj(room_number) duelobj.duel = duel duelobj.room_number = room_number ID1 = duel.guest_id ID2 = duel.guest_id2 if "ID" in request.COOKIES : ID = request.COOKIES["ID"] else: ID = "" if request.user != duel.user_1 and request.user != duel.user_2 : if (ID1 == ID and duel.guest_flag) or (ID2 == ID and duel.guest_flag2): pass else: free_lock(room_number, lock) return HttpResponse("Please Login") if duel.user_1 == request.user or (ID1 == ID and duel.guest_flag): user = 1 duelobj.user = 1 other_user = 2 else: duelobj.user = 2 user = 2 other_user = 1 duelobj.init_all(user, other_user, room_number) if duel.in_cost is False or duel.in_trigger_waiting is True or duel.in_cost_cancel is False or duel.in_cost_force is True: free_lock(room_number, lock) return HttpResponse("error") if duel.in_cost == 1 or duel.in_cost is True: in_pac = json.loads(duel.in_pac) in_pac[str(duel.chain - 1)] = [] duel.in_cost = False duelobj.in_execute = False cost = duelobj.cost if duel.chain == 0: del cost[str(duelobj.tmp_chain)] elif duel.chain >0: del cost[str(duel.chain)] duelobj.cost = cost duelobj.cost_result = {} ''' if duel.chain > 0: chain_det_trigger_json = json.loads(duel.chain_det_trigger) del chain_det_trigger_json[str(duel.chain)] duel.chain_det_trigger = json.dumps(chain_det_trigger_json) trigger_id = chain_det_trigger_json[str(duel.chain - 1)] trigger = Trigger.objects.get(id=trigger_id) if trigger.pac: in_pac[str(duel.chain - 1)].append(trigger.pac.id) duel.in_pac = json.dumps(in_pac) ''' duel.in_pac_cost = "[]" duel.cost_log = "" duel.ask = 0 duel.canbechained = True duel.tmponce_per_turn1 = "" duel.tmponce_per_turn_group1 = "" duel.tmponce_per_turn_group2 = "" duel.tmponce_per_turn2 = "" duel.tmponce_per_turn_monster1 = "" duel.tmponce_per_turn_monster2 = "" duel.tmponce_per_turn_exist1 = "" duel.tmponce_per_turn_exist2 = "" duel.tmponce_per_turn_relate1 = "" duel.tmponce_per_turn_relate2 = "" duelobj.save_all(user, other_user, room_number) free_lock(room_number, lock) return HttpResponse("OK") def none(request): room_number = int(request.POST["room_number"]) lock = Lock.objects.get() lock_flag = lock_lock(room_number, lock,request) if lock_flag != "OK": return HttpResponse("waiting") duel = Duel.objects.filter(id=room_number).get() decks = Deck.objects.all() graves = Grave.objects.all() hands = Hand.objects.all() if duel.ask > 0: free_lock(room_number, lock) return HttpResponse("error") duelobj = DuelObj(room_number) duelobj.duel = duel duelobj.room_number = room_number if "ID" in request.COOKIES : ID = request.COOKIES["ID"] else: ID = "" ID1 = duel.guest_id ID2 = duel.guest_id2 if request.user != duel.user_1 and request.user != duel.user_2 : if (ID1 == ID and duel.guest_flag) or (ID2 == ID and duel.guest_flag2): pass else: free_lock(room_number, lock) return HttpResponse("Please Login") if duel.user_1 == request.user or (ID1 == ID and duel.guest_flag): user = 1 duelobj.user = 1 other_user = 2 else: duelobj.user = 2 user = 2 other_user = 1 duelobj.init_all(user, other_user, room_number) duelobj.check_eternal_effect( duelobj.decks, duelobj.graves, duelobj.hands, duel.phase, duel.user_turn, user, other_user, ) chain_user = duelobj.get_current_chain_user() choices = duelobj.check_trigger( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) choices2 = duelobj.check_trigger( decks, graves, hands, duel.phase, duel.user_turn, other_user, user ) if duel.in_cost is True: free_lock(room_number, lock) return HttpResponse("error") if duel.user_1 != request.user and duel.user_2 != request.user: if (ID1 == ID and duel.guest_flag) or (ID2 == ID and duel.guest_flag2): pass else: free_lock(room_number, lock) return HttpResponseRedirect(reverse("tcgcreator:watch_battle")) if duel.user_1 == request.user or (ID1 == ID and duel.guest_flag): user = 1 other_user = 2 if duel.appoint != 1: free_lock(room_number, lock) return HttpResponse("error") if duel.none == False: duel.appoint = 2 duel.none = True duel.timing_fresh = False else: duel.current_priority = max(choices[1], choices2[1]) duel.appoint = 2 duel.none = False if duel.is_ai is True: duel.timing_fresh = False duelobj.save_all(user, other_user, room_number) if duel.user_2 == request.user or (ID2 == ID and duel.guest_flag2): user = 2 other_user = 1 if duel.appoint != 2: free_lock(room_number, lock) return HttpResponse("error") if duel.none == False: duel.appoint = 1 duel.none = True duel.timing_fresh = False else: duel.current_priority = max(choices[1], choices2[1]) duel.appoint = 1 duel.none = False if duel.is_ai is True: duel.timing_fresh = False ''' if duel.chain == 0 and duel.user_turn == user: if choices2[0] is None: duel.current_priority = max(choices[1], choices2[1]) duel.appoint = 1 elif duel.chain > 0 and chain_user != user: if choices2[0] is None or choices2[1] : duel.current_priority = max(choices[1], choices2[1]) duel.appoint = 1 elif duel.user_turn == user: duel.current_priority = max(choices[1], choices2[1]) duel.appoint = 1 else: duel.current_priority = max(choices[1], choices2[1]) duel.appoint = 1 ''' duelobj.save_all(user, other_user, room_number) free_lock(room_number, lock) data = {} data["sound_effect"] = duelobj.sound_effect data["OK"] = True return HttpResponse(json.dumps(data)) def multiple_answer_det( duelobj, duel, user, answer_json, request, del_ask, room_number, lock ): global check_array if "ID" in request.COOKIES : ID = request.COOKIES["ID"] else: ID = "" ID1 = duel.guest_id ID2 = duel.guest_id2 answer = json.loads(answer_json) room_number = int(request.POST["room_number"]) chain_det = json.loads(duel.chain_det) chain_user = json.loads(duel.chain_user) chain_user = int(chain_user[str(duel.chain - 1)]) if duel.in_copying is True: duelobj.tmp_chain = str(duel.chain - 1) else: duelobj.tmp_chain = str(duel.chain) if chain_user == 0: if request.user == duel.user_1 or (ID1 == ID and duel.guest_flag): chain_user = 1 else: chain_user = 2 if user == 1: other_user = 2 else: other_user = 1 monster_effect_wrapper = MonsterEffectWrapper.objects.get( id=int(chain_det[str(duel.chain - 1)]) ) monster_effect = monster_effect_wrapper.monster_effect monster_condition = monster_effect.monster_condition if monster_condition != "": monster_condition = json.loads(monster_condition) monster_condition = monster_condition["monster"][0]["monster"] monster_effect_text = json.loads(monster_effect.monster_effect) if "double" not in monster_effect_text or monster_effect_text["double"] is False: for tmp in answer: for tmp3 in tmp: if tmp.count(tmp3) > 2: return HttpResponse("error") exclude = monster_effect_text["exclude"] if "whether_monster" in monster_effect_text: whether_monster = monster_effect_text["whether_monster"] effect_kind = monster_effect_wrapper.monster_effect_kind to_effect_kind = monster_effect_text["multiple_effect_kind"] from_monster_effect_det = monster_effect_text["monster"][0] to_monster_effect_det = monster_effect_text["monster"][1] from_monster_effect_det_monster = from_monster_effect_det["monster"] to_monster_effect_det_monster = to_monster_effect_det["monster"] i = 0 field = duel.field to_monsters = [] if ( "as_monster_condition" in to_monster_effect_det and to_monster_effect_det["as_monster_condition"] != "" ): as_monsters = to_monster_effect_det["as_monster_condition"] if not isinstance(as_monsters, list): tmp_monster = [] tmp_monster.append(as_monsters) as_monsters = tmp_monster for as_monster in as_monsters: if as_monster[0] == "~": tmp = duelobj.cost tmp = tmp[str(int(duel.chain))] place1 = tmp[as_monster] elif as_monster[0] == "%": tmp = duelobj.timing_mess place1 = tmp[as_monster] else: tmp = duelobj.mess tmp = tmp[str(int(duel.chain - 1))] place1 = tmp[as_monster] for place2 in place1: place2["place_unique_id"] = place2["det"]["place_unique_id"] else: place_array_tmp = [] for place in to_monster_effect_det_monster["place"]: place_tmp = place["det"].split("_") if place["and_or"] != "" and place_tmp[0] == "field": tmptmp = {} tmptmp["and_or"] = place["and_or"] tmptmp["det"] = place["det"] place_array_tmp.append(tmptmp) continue else: tmptmp = {} tmptmp["and_or"] = place["and_or"] tmptmp["det"] = place["det"] place_array_tmp.append(tmptmp) place_array = place_array_tmp place_array_tmp = [] if place_tmp[2] == "1": mine_or_other = 1 elif place_tmp[2] == "2": mine_or_other = 2 elif place_tmp[2] == "3": mine_or_other = 3 if user == 1: mine_or_other2 = mine_or_other else: if mine_or_other == 1: mine_or_other2 = 2 elif mine_or_other == 2: mine_or_other2 = 1 else: mine_or_other = mine_or_other2 deck_id = int(place_tmp[1]) if place_tmp[0] == "deck": if mine_or_other2 == 1: deck = duelobj.decks[deck_id]["mydeck"] elif mine_or_other2 == 2: deck = duelobj.decks[deck_id]["otherdeck"] elif mine_or_other2 == 3: deck = duelobj.decks[deck_id]["commondeck"] user_decks = deck for user_deck in user_decks: tmp2 = {} tmp2["det"] = user_deck tmp2["mine_or_other"] = mine_or_other tmp2["user"] = user tmp2["place"] = "deck" tmp2["deck_id"] = deck_id tmp2["x"] = 0 tmp2["y"] = 0 tmp2["place_unique_id"] = user_deck["place_unique_id"] to_monsters.append(tmp2) elif place_tmp[0] == "grave": if mine_or_other2 == 1: grave = duelobj.graves[deck_id]["mygrave"] elif mine_or_other2 == 2: grave = duelobj.graves[deck_id]["othergrave"] elif mine_or_other2 == 3: grave = duelobj.graves[deck_id]["commongrave"] user_graves = grave for user_grave in user_graves: tmp2 = {} tmp2["det"] = user_grave tmp2["mine_or_other"] = mine_or_other tmp2["user"] = user tmp2["place"] = "grave" tmp2["deck_id"] = deck_id tmp2["x"] = 0 tmp2["y"] = 0 tmp2["place_unique_id"] = user_grave["place_unique_id"] to_monsters.append(tmp2) elif place_tmp[0] == "hand": if mine_or_other2 == 1: hand = duelobj.hands[deck_id]["myhand"] elif mine_or_other2 == 2: hand = duelobj.hands[deck_id]["otherhand"] elif mine_or_other2 == 3: hand = duelobj.hands[deck_id]["commonhand"] user_hands = hand for user_hand in user_hands: tmp2 = {} tmp2["det"] = user_hand tmp2["mine_or_other"] = mine_or_other tmp2["user"] = user tmp2["place"] = "hand" tmp2["deck_id"] = deck_id tmp2["x"] = 0 tmp2["y"] = 0 tmp2["place_unique_id"] = user_hand["place_unique_id"] to_monsters.append(tmp2) elif place_tmp[0] == "field": field_size = FieldSize.objects.get(id=1) field = duelobj.field if duelobj.field_free is True: field_x = 20 else: field_x = field_size.field_x for x in range(field_x): for y in range(field_size.field_y): flag_field_place = True current_and_or = "and" mine_or_others = [] for place_tmp2 in place_array: and_or = place_tmp2["and_or"] det = place_tmp2["det"] splitted_det = det.split("_") kind2 = splitted_det[1] if duelobj.field_free is False: kind = field[x][y]["kind"] else: kind = field[0][y]["kind"] if kind != "": tmp = kind.split("_") else: tmp = [] if current_and_or == "and": if kind2 in tmp: if flag_field_place is True: flag_field_place = True else: flag_field_place = False elif current_and_or == "or": if kind2 in tmp: flag_field_place = True else: if flag_field_place is False: flag_field_place = False mine_or_other = int(splitted_det[2]) if ( mine_or_other == 1 and user == 1 or mine_or_other == 2 and user == 2 ): mine_or_other = 1 elif ( mine_or_other == 1 and user == 2 or mine_or_other == 2 and user == 1 ): mine_or_other = 2 else: mine_or_other = 3 mine_or_others.append(mine_or_other) current_and_or = and_or if flag_field_place is False: continue if field[x][y]["mine_or_other"] not in mine_or_others: continue if field[x][y]["det"] is not None: if duelobj.check_not_effected( field[x][y]["det"], user, to_effect_kind, "field", 0, x, y, field[x][y]["mine_or_other"], ): continue tmp2 = {} tmp2["det"] = field[x][y]["det"] tmp2["mine_or_other"] = field[x][y]["mine_or_other"] tmp2["user"] = chain_user tmp2["place"] = "field" tmp2["deck_id"] = 0 tmp2["x"] = x tmp2["y"] = y tmp2["place_unique_id"] = field[x][y]["det"][ "place_unique_id" ] if not duelobj.validate_answer( tmp2, to_monster_effect_det["monster"], exclude, duel, 1, 0, effect_kind, user, ): continue if whether_monster == 0: if field[x][y]["det"] is not None: continue tmp2 = {} tmp2["det"] = field[x][y]["det"] if duelobj.field_free is True: tmp2["mine_or_other"] = field[0][y]["mine_or_other"] elif duelobj.field_free is False: tmp2["mine_or_other"] = field[x][y]["mine_or_other"] tmp2["hide"] = ( field["hide"] if ("hide" in field[x][y]) else False ) tmp2["x"] = x tmp2["y"] = y tmp2["deck_id"] = 0 tmp2["user"] = user tmp2["place"] = "field" to_monsters.append(tmp2) else: if field[x][y]["det"] is None: continue tmp2 = {} tmp2["det"] = field[x][y]["det"] if duelobj.field_free is True: tmp2["mine_or_other"] = field[0][y]["mine_or_other"] elif duelobj.field_free is False: tmp2["mine_or_other"] = field[x][y]["mine_or_other"] tmp2["hide"] = ( field["hide"] if ("hide" in field[x][y]) else False ) tmp2["x"] = x tmp2["y"] = y tmp2["deck_id"] = 0 tmp2["user"] = user tmp2["place_unique_id"] = field[x][y]["det"][ "place_unique_id" ] tmp2["place"] = "field" to_monsters.append(tmp2) to_monsters = copy.deepcopy(to_monsters) for key in range(len(to_monsters)): to_monsters[key]["det"]["rel"] = None if ( "as_monster_condition" in from_monster_effect_det and from_monster_effect_det["as_monster_condition"] != "" ): relation_name = from_monster_effect_det_monster["relation"][0] relation_to = int(from_monster_effect_det_monster["relation_to"][0]) relation_kind = from_monster_effect_det_monster["relation_kind"][0] if relation_to == 0: relation_from = 1 elif relation_to == 1: relation_from = 0 as_monsters = from_monster_effect_det["as_monster_condition"] if not isinstance(as_monsters, list): tmp_monster = [] tmp_monster.append(as_monsters) as_monsters = tmp_monster for as_monster in as_monsters: if as_monster[0] == "~": tmp = duelobj.cost tmp = tmp[str(int(duel.chain))] tmp = duelobj.timing_mess if as_monster in tmp: place1 = tmp[as_monster] else: place1 = [] elif as_monster[0] == "%": tmp = duelobj.timing_mess if as_monster in tmp: place1 = tmp[as_monster] else: place1 = [] else: tmp = duelobj.mess tmp = tmp[str(int(duel.chain - 1))] if as_monster in tmp: place1 = tmp[as_monster] else: place1 = [] for place2 in place1: if not duelobj.validate_answer( place2, monster_condition, "", duel, 1, 0, effect_kind, user ): continue if place2["place"] == "field": x = place2["x"] y = place2["y"] if "rel" not in field[x][y]["det"]: field[x][y]["det"]["rel"] = {} if relation_kind not in field[x][y]["det"]["rel"]: field[x][y]["det"]["rel"][relation_kind] = [] for tmp in answer[i]: tmp2 = {} tmp2["monster"] = to_monsters[int(tmp)] tmp2["to"] = relation_to tmp2["name"] = relation_name field[x][y]["det"]["rel"][relation_kind].append(tmp2) duelobj.field = field tmp2 = {} tmp2["det"] = field[x][y]["det"] tmp2["mine_or_other"] = field[x][y]["mine_or_other"] tmp2["user"] = user tmp2["place"] = "field" tmp2["deck_id"] = 0 tmp2["x"] = x tmp2["y"] = y tmp2["place_unique_id"] = field[x][y]["det"]["place_unique_id"] duelobj.set_relation( relation_name, to_monsters[int(tmp)], relation_kind, relation_from, tmp2, to_monsters[int(tmp)]["det"]["place_unique_id"], x, y, 0, ) i += 1 else: relation_name = from_monster_effect_det_monster["relation"][0] relation_to = int(from_monster_effect_det_monster["relation_to"][0]) relation_kind = from_monster_effect_det_monster["relation_kind"][0] if relation_to == 0: relation_from = 1 elif relation_to == 1: relation_from = 0 place_array_tmp = [] for place in from_monster_effect_det_monster["place"]: if place["and_or"] != "" and place_tmp[0] == "field": tmptmp = {} tmptmp["and_or"] = place["and_or"] tmptmp["det"] = place["det"] place_array_tmp.append(tmptmp) continue else: tmptmp = {} tmptmp["and_or"] = place["and_or"] tmptmp["det"] = place["det"] place_array_tmp.append(tmptmp) place_array = place_array_tmp place_array_tmp = [] place_tmp = place["det"].split("_") if place_tmp[2] == "1": mine_or_other = 1 elif place_tmp[2] == "2": mine_or_other = 2 elif place_tmp[2] == "3": mine_or_other = 3 if user == 1: mine_or_other = mine_or_other2 else: if mine_or_other == 1: mine_or_other2 = 2 elif mine_or_other == 2: mine_or_other2 = 1 else: mine_or_other = mine_or_other2 deck_id = int(place_tmp[1]) if place_tmp[0] == "deck": if mine_or_other2 == 1: deck = duelobj.decks[deck_id]["mydeck"] elif mine_or_other2 == 2: deck = duelobj.decks[deck_id]["otherdeck"] elif mine_or_other2 == 3: deck = duelobj.decks[deck_id]["commondeck"] user_decks = deck for key in range(len(user_decks)): if "rel" not in user_decks[key]: user_decks[key]["rel"] = {} if relation_kind not in user_decks[key]["rel"]: user_decks[key]["rel"][relation_kind] = [] for tmp in answer[i]: tmp2 = {} tmp2["monster"] = to_monsters[int(tmp)] tmp2["to"] = relation_to tmp2["name"] = relation_name user_decks[key]["rel"][relation_kind].append(tmp2) tmp2 = {} tmp2["det"] = user_decks[key] tmp2["mine_or_other"] = mine_or_other tmp2["user"] = user tmp2["place"] = "deck" tmp2["deck_id"] = deck_id tmp2["x"] = 0 tmp2["y"] = 0 tmp2["place_unique_id"] = user_decks[key]["place_unique_id"] duelobj.set_relation( relation_name, to_monsters[int(tmp)], relation_kind, relation_from, tmp2, to_monsters[int(tmp)]["place_unique_id"], 0, 0, deck_id, ) i += 1 if mine_or_other2 == 1: duelobj.decks[deck_id]["mydeck"] = user_decks elif mine_or_other2 == 2: duelobj.decks[deck_id]["otherdeck"] = user_decks elif mine_or_other2 == 3: duelobj.decks[deck_id]["commondeck"] = user_decks elif place_tmp[0] == "grave": if mine_or_other2 == 1: grave = duelobj.graves[deck_id]["mygrave"] elif mine_or_other2 == 2: grave = duelobj.graves[deck_id]["othergrave"] elif mine_or_other2 == 3: grave = duelobj.graves[deck_id]["commongrave"] user_graves = grave for key in range(len(user_graves)): if "rel" not in user_graves[key]: user_graves[key]["rel"] = {} if relation_kind not in user_graves[key]["rel"]: user_graves[key]["rel"][relation_kind] = [] for tmp in answer[i]: tmp2 = {} tmp2["monster"] = to_monsters[int(tmp)] tmp2["to"] = relation_to tmp2["name"] = relation_name user_graves[key]["rel"][relation_kind].append(tmp2) tmp2 = {} tmp2["det"] = user_graves[key] tmp2["mine_or_other"] = mine_or_other tmp2["user"] = user tmp2["place"] = "deck" tmp2["deck_id"] = deck_id tmp2["x"] = 0 tmp2["y"] = 0 tmp2["place_unique_id"] = user_graves[key]["place_unique_id"] duelobj.set_relation( relation_name, to_monsters[int(tmp)], relation_kind, relation_from, tmp2, to_monsters[int(tmp)]["place_unique_id"], 0, 0, deck_id, ) i += 1 if mine_or_other2 == 1: duelobj.graves[deck_id]["mygrave"] = user_graves elif mine_or_other2 == 2: duelobj.graves[deck_id]["othergrave"] = user_graves elif mine_or_other2 == 3: duelobj.graves[deck_id]["commongrave"] = user_graves elif place_tmp[0] == "hand": if mine_or_other2 == 1: hand = duelobj.hands[deck_id]["myhand"] elif mine_or_other2 == 2: hand = duelobj.hands[deck_id]["otherhand"] elif mine_or_other2 == 3: hand = duelobj.hands[deck_id]["commonhand"] user_hands = hand for key in range(len(user_hands)): if "rel" not in user_hands[key]: user_hands[key]["rel"] = {} if relation_kind not in user_hands[key]["rel"]: user_hands[key]["rel"][relation_kind] = [] for tmp in answer[i]: tmp2 = {} tmp2["monster"] = to_monsters[int(tmp)] tmp2["to"] = relation_to tmp2["name"] = relation_name user_hands[key]["rel"][relation_kind].append(tmp2) tmp2["det"] = user_hands[key] tmp2["mine_or_other"] = mine_or_other tmp2["user"] = user tmp2["place"] = "deck" tmp2["deck_id"] = deck_id tmp2["x"] = 0 tmp2["y"] = 0 tmp2["place_unique_id"] = user_hands[key]["place_unique_id"] duelobj.set_relation( relation_name, to_monsters[int(tmp)], relation_kind, relation_from, tmp2, to_monsters[int(tmp)]["place_unique_id"], 0, 0, deck_id, ) i += 1 if mine_or_other2 == 1: duelobj.hands[deck_id]["myhand"] = user_hands elif mine_or_other2 == 2: duelobj.hands[deck_id]["otherhand"] = user_hands elif mine_or_other2 == 3: duelobj.hands[deck_id]["commonhand"] = user_hands elif place_tmp[0] == "field": field_size = FieldSize.objects.get(id=1) field = duelobj.field if duelobj.field_free is True: field_x = 20 else: field_x = field_size.field_x for x in range(field_x): for y in range(field_size.field_y): flag_field_place = True current_and_or = "and" for place_tmp2 in place_array: and_or = place_tmp2["and_or"] det = place_tmp2["det"] splitted_det = det.split("_") kind2 = splitted_det[1] if duelobj.field_free is False: kind = field[x][y]["kind"] else: kind = field[0][y]["kind"] if kind != "": tmp = kind.split("_") else: tmp = [] if current_and_or == "and": if kind2 in tmp: if flag_field_place is True: flag_field_place = True else: flag_field_place = False elif current_and_or == "or": if kind2 in tmp: flag_field_place = True else: if flag_field_place is False: flag_field_place = False mine_or_other = int(splitted_det[2]) current_and_or = and_or if flag_field_place is False: continue if ( mine_or_other == 1 and user == 1 or mine_or_other == 2 and user == 2 ): mine_or_other = 1 elif ( mine_or_other == 1 and user == 2 or mine_or_other == 2 and user == 1 ): mine_or_other = 2 else: mine_or_other = 3 if field[x][y]["mine_or_other"] != mine_or_other: continue if duelobj.check_not_effected( field[x][y]["det"], user, effect_kind, "field", 0, x, y, field[x][y]["mine_or_other"], ): continue tmp2 = {} tmp2["det"] = field[x][y]["det"] tmp2["mine_or_other"] = field[x][y]["mine_or_other"] tmp2["user"] = chain_user tmp2["place"] = "field" tmp2["deck_id"] = 0 tmp2["x"] = x tmp2["y"] = y tmp2["place_unique_id"] = field[x][y]["det"]["place_unique_id"] if not duelobj.validate_answer( tmp2, to_monster_effect_det["monster"], exclude, duel, 1, 0, effect_kind, user, ): continue if whether_monster == 0: if field[x][y]["det"] is not None: continue if "rel" not in field[x][y]: field[x][y]["rel"] = {} if relation_kind not in field[x][y]["det"]["rel"]: field[x][y]["rel"][relation_kind] = [] for tmp in answer[i]: tmp2 = {} tmp2["monster"].append(to_monsters[int(tmp)]) tmp2["to"] = relation_to tmp2["name"] = relation_name field[x][y]["rel"][relation_kind].append(tmp2) tmp2 = {} tmp2["det"] = None tmp2["mine_or_other"] = field[x][y]["mine_or_other"] tmp2["user"] = user tmp2["place"] = "field" tmp2["deck_id"] = 0 tmp2["x"] = x tmp2["y"] = y tmp2["place_unique_id"] = 0 duelobj.set_relation( relation_name, to_monsters[int(tmp)], relation_kind, relation_from, tmp2, 0, x, y, 0, ) i += 1 else: if field[x][y]["det"] is None: continue if "rel" not in field[x][y]["det"]: field[x][y]["det"]["rel"] = {} if relation_kind not in field[x][y]["det"]["rel"]: field[x][y]["det"]["rel"][relation_kind] = [] for tmp in answer[i]: tmp2 = {} tmp2["monster"] = to_monsters[int(tmp)] tmp2["to"] = relation_to tmp2["name"] = relation_name field[x][y]["det"]["rel"][relation_kind].append(tmp2) tmp2 = {} tmp2["det"] = field[x][y]["det"] tmp2["mine_or_other"] = field[x][y]["mine_or_other"] tmp2["user"] = user tmp2["place"] = "field" tmp2["deck_id"] = 0 tmp2["x"] = x tmp2["y"] = y tmp2["place_unique_id"] = field[x][y]["det"][ "place_unique_id" ] duelobj.set_relation( relation_name, to_monsters[int(tmp)], relation_kind, relation_from, tmp2, field[x][y]["det"]["place_unique_id"], x, y, 0, ) i += 1 duel.field = field duel.ask -= del_ask choices = None if duel.ask == 0: chain_det = json.loads(duel.chain_det) current_chain = chain_det[str(duel.chain - 1)] effect = MonsterEffectWrapper.objects.get(id=current_chain) if effect.pac: next_effect = duelobj._pac(effect.pac) else: next_effect = effect.monster_effect_next if next_effect != 0 and next_effect is not None: chain_det[str(duel.chain - 1)] = next_effect.id else: pac = json.loads(duel.in_pac) if str(duel.chain - 1) in pac and pac[str(duel.chain - 1)] != []: pac_id = pac[str(duel.chain - 1)].pop() duel.in_pac = json.dumps(pac) pac = PacWrapper.objects.get(id=pac_id) next_effect = pac.monster_effect_next if next_effect is None: trigger = Triggtrigger = Trigger.objects.get(id=duel.current_trigger) if trigger.chain_flag is True: duel.virtual_chain -= 1 duel.chain -= 1 else: chain_det[str(duel.chain - 1)] = next_effect.id else: trigger = Triggtrigger = Trigger.objects.get(id=duel.current_trigger) if trigger.chain_flag is True: duel.virtual_chain -= 1 duel.chain -= 1 duel.chain_det = json.dumps(chain_det) decks = Deck.objects.all() graves = Grave.objects.all() hands = Hand.objects.all() duelobj.check_eternal_effect( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) duelobj.retrieve_chain( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) if duel.chain == 0: duelobj.invoke_after_chain_effect( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) duel.appoint = duel.user_turn tmp = {} duel.mess = json.dumps(tmp) duel.cost_result = json.dumps(tmp) duel.cost = json.dumps(tmp) duelobj.invoke_trigger_waiting(duel.trigger_waiting) duel.current_priority = 10000 choices = duelobj.check_trigger( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) # if monster_effect.monster_condition != "": # if not check_condition(duel,monster_effect.monster_condition,duelobj): # return HttpResponse("error") duelobj.save_all(user, other_user, room_number) free_lock(room_number, lock) return battle_det(request, duelobj, choices) def multiple_answer(request): room_number = int(request.POST["room_number"]) lock = Lock.objects.get() lock_flag = lock_lock(room_number, lock,request) if lock_flag != "OK": return HttpResponse("waiting") duelobj = DuelObj(room_number) duel = Duel.objects.filter(id=room_number).get() if "ID" in request.COOKIES : ID = request.COOKIES["ID"] else: ID = "" ID1 = duel.guest_id ID2 = duel.guest_id2 if duel.user_1 != request.user and duel.user_2 != request.user: if (ID1 == ID and duel.guest_flag) or (ID2 == ID and duel.guest_flag2): pass else: free_lock(room_number, lock) return HttpResponseRedirect(reverse("tcgcreator:watch_battle")) duelobj.duel = duel duelobj.room_number = room_number if duel.user_1 == request.user or ( ID1 == ID and duel.guest_flag): user = 1 other_user = 2 duelobj.user = 1 else: duelobj.user = 2 user = 2 other_user = 1 duelobj.init_all(user, other_user, room_number) decks = Deck.objects.all() graves = Grave.objects.all() hands = Hand.objects.all() duelobj.check_eternal_effect( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) answer = request.POST["answer"] if duel.user_1 == request.user or ( ID1 == ID and duel.guest_flag): if duel.user_turn == 1: if duel.ask == 1 or duel.ask == 3: return_value = multiple_answer_det( duelobj, duel, 1, answer, request, 1, room_number, lock ) free_lock(room_number, lock) return return_value else: if duel.ask == 2 or duel.ask == 3: return_value = multiple_answer_det( duelobj, duel, 1, answer, request, 2, room_number, lock ) free_lock(room_number, lock) return return_value elif duel.user_2 == request.user or ( ID2 == ID and duel.guest_flag2): if duel.user_turn == 2: if duel.ask == 1 or duel.ask == 3: return_value = multiple_answer_det( duelobj, duel, 2, answer, request, 1, room_number, lock ) free_lock(room_number, lock) return return_value else: if duel.ask == 2 or duel.ask == 3: return_value = multiple_answer_det( duelobj, duel, 2, answer, request, 2, room_number, lock ) free_lock(room_number, lock) return return_value free_lock(room_number, lock) return HttpResponse("error") def answerorder(request): room_number = int(request.POST["room_number"]) lock = Lock.objects.get() lock_flag = lock_lock(room_number, lock,request) if lock_flag != "OK": return HttpResponse("waiting") duelobj = DuelObj(room_number) duel = Duel.objects.filter(id=room_number).get() if "ID" in request.COOKIES : ID = request.COOKIES["ID"] else: ID = "" ID1 = duel.guest_id ID2 = duel.guest_id2 if duel.user_1 != request.user and duel.user_2 != request.user: if (ID1 == ID and duel.guest_flag) or (ID2 == ID and duel.guest_flag2): pass else: free_lock(room_number, lock) return HttpResponseRedirect(reverse("tcgcreator:watch_battle")) duelobj.duel = duel duelobj.room_number = room_number if duel.user_1 == request.user or (ID1 == ID and duel.guest_flag == True): user = 1 other_user = 2 duelobj.user = 1 else: duelobj.user = 2 user = 2 other_user = 1 if duel.user_turn == 1: if user == 1 and duel.ask != 1: free_lock(room_number, lock) return HttpResponse("error") elif user == 2 and duel.ask != 2: free_lock(room_number, lock) return HttpResponse("error") else: if user == 2 and duel.ask != 1: free_lock(room_number, lock) return HttpResponse("error") elif user == 1 and duel.ask != 2: free_lock(room_number, lock) return HttpResponse("error") duelobj.init_all(user, other_user, room_number) duelobj.in_execute = False acc = duelobj.acc_global order = request.POST["order"].split("_") if len(acc) != len(order): free_lock(room_number, lock) return HttpResponse("error") for tmp in order: if int(tmp) >= len(order): free_lock(room_number, lock) return HttpResponse("error") if order.count(tmp) > 1: free_lock(room_number, lock) return HttpResponse("error") dummy_list = [] for tmp in order: dummy_list.append(acc[int(tmp)]) duel.ask = 0 if duel.ask == 0: chain_det = json.loads(duel.chain_det) current_chain = chain_det[str(duel.chain - 1)] effect = MonsterEffectWrapper.objects.get(id=current_chain) if effect.pac: next_effect = duelobj._pac(effect.pac) else: next_effect = effect.monster_effect_next if next_effect != 0 and next_effect is not None: chain_det[str(duel.chain - 1)] = next_effect.id else: pac = json.loads(duel.in_pac) if str(duel.chain - 1) in pac and pac[str(duel.chain - 1)] != []: pac_id = pac[str(duel.chain - 1)].pop() pac = PacWrapper.objects.get(id=pac_id) next_effect = pac.monster_effect_next if next_effect is None: Trigger = Triggtrigger = Trigger.objects.get(id=duel.current_trigger) if trigger.chain_flag is True: duel.virtual_chain -= 1 duel.chain -= 1 else: chain_det[str(duel.chain - 1)] = next_effect.id else: Trigger = Triggtrigger = Trigger.objects.get(id=duel.current_trigger) if trigger.chain_flag is True: duel.virtual_chain -= 1 duel.chain -= 1 duelobj.duel.chain_det = json.dumps(chain_det) duelobj.acc_global = dummy_list duelobj.save_all(user, other_user, room_number) free_lock(room_number, lock) return battle_det(request, duelobj) def answer(request): global check_array room_number = int(request.POST["room_number"]) lock = Lock.objects.get() lock_flag = lock_lock(room_number, lock,request) if lock_flag != "OK": return HttpResponse("waiting") duelobj = DuelObj(room_number) check_array = [] duel = Duel.objects.filter(id=room_number).get() if "ID" in request.COOKIES : ID = request.COOKIES["ID"] else: ID = "" ID1 = duel.guest_id ID2 = duel.guest_id2 if duel.user_1 != request.user and duel.user_2 != request.user: if (ID1 == ID and duel.guest_flag) or (ID2 == ID and duel.guest_flag2): pass else: free_lock(room_number, lock) return HttpResponseRedirect(reverse("tcgcreator:watch_battle")) duelobj.duel = duel duelobj.room_number = room_number if duel.user_1 == request.user or (ID == ID1 and duel.guest_flag): user = 1 other_user = 2 duelobj.user = 1 else: duelobj.user = 2 user = 2 other_user = 1 duelobj.init_all(user, other_user, room_number) duelobj.in_execute = False decks = Deck.objects.all() graves = Grave.objects.all() hands = Hand.objects.all() duelobj.check_eternal_effect( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) if duel.in_cost: return answer_cost(duelobj, duel, request, room_number, lock) answer = request.POST["answer"] if duel.user_1 == request.user or (ID1 == ID and duel.guest_flag is True): if duel.user_turn == 1: if duel.ask == 1 or duel.ask == 3: return_value = answer_det(duelobj, duel, 1, answer, request, 1, lock,ID1,ID2) free_lock(room_number, lock) return return_value else: if duel.ask == 2 or duel.ask == 3: return_value = answer_det(duelobj, duel, 1, answer, request, 2, lock,ID1,ID2) free_lock(room_number, lock) return return_value elif duel.user_2 == request.user or (ID2 == ID and duel.guest_flag2 is True): if duel.user_turn == 2: if duel.ask == 1 or duel.ask == 3: return_value = answer_det(duelobj, duel, 2, answer, request, 1, lock,ID1,ID2) free_lock(room_number, lock) return return_value else: if duel.ask == 2 or duel.ask == 3: return_value = answer_det(duelobj, duel, 2, answer, request, 2, lock,ID1,ID2) free_lock(room_number, lock) return return_value free_lock(room_number, lock) return HttpResponse("error") def chain_variable(request): room_number = int(request.POST["room_number"]) lock = Lock.objects.get() lock_flag = lock_lock(room_number, lock,request) if lock_flag != "OK": return HttpResponse("waiting") duel = Duel.objects.filter(id=room_number).get() if "ID" in request.COOKIES : ID = request.COOKIES["ID"] else: ID = "" ID1 = duel.guest_id ID2 = duel.guest_id2 duelobj = DuelObj(room_number) duelobj.duel = duel duelobj.in_execute = False if duel.user_1 != request.user and duel.user_2 != request.user: if (ID1 == ID and duel.guest_flag) or (ID2 == ID and duel.guest_flag2): pass else: free_lock(room_number, lock) return HttpResponseRedirect(reverse("tcgcreator:watch_battle")) duelobj.duel = duel duelobj.room_number = room_number if duel.user_1 == request.user or (ID1 == ID and duel.guest_flag is True): user = 1 other_user = 2 duelobj.user = 1 else: duelobj.user = 2 user = 2 other_user = 1 duelobj.init_all(user, other_user, room_number) str_chain = str(duel.chain) chain_det = json.loads(duel.chain_det) chain_variable_det = json.loads(duel.chain_variable) decks = Deck.objects.all() graves = Grave.objects.all() hands = Hand.objects.all() duelobj.check_eternal_effect( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) if duel.in_cost is True: if duel.in_copying is True: duelobj.tmp_chain = duel.chain - 1 else: duelobj.tmp_chain = duel.chain cost_det = duel.cost_det cost = CostWrapper.objects.get(id=cost_det).cost cost_val = cost.cost_val if(cost_val == 63 or cost_val == 64): chain_variablle_det = json.loads(duel.global_variable) cost = json.loads(cost.cost) if(cost_val == 63 or cost_val == 64): tmp = cost["chain_variable"].split("_") mine_or_other = int(tmp[2]) chain_variable_name = tmp[1] else: chain_variable_name = cost["chain_variable"] min_equation_number = duelobj.calculate_boland(cost["min_equation_number"]) max_equation_number = duelobj.calculate_boland(cost["max_equation_number"]) chain_variable = int(request.POST["chain_variable"]) if chain_variable < min_equation_number or chain_variable > max_equation_number: free_lock(room_number, lock) return HttpResponse("error") if(cost_val == 63 or cost_val == 64): pass elif str_chain not in chain_variable_det: chain_variable_det[str_chain] = {} if duel.user_1 == request.user or (ID1 == ID and duel.guest_flag is True): if duel.user_turn == 1: if duel.ask == 1 or duel.ask == 3: duel.ask -= 1 if(cost_val == 63 or cost_val == 64): if mine_or_other == 1: chain_variable_det[chain_variable_name]["1_value"]= chain_variable elif mine_or_other == 2: chain_variable_det[chain_variable_name]["2_value"]= chain_variable else: chain_variable_det[chain_variable_name]["value"]= chain_variable duel.global_variable = json.dumps(chain_variable_det) else: chain_variable_det[str_chain][chain_variable_name] = chain_variable duel.chain_variable = json.dumps(chain_variable_det) duelobj.in_execute = False else: if duel.ask == 2 or duel.ask == 3: duel.ask -= 2 if(cost_val == 63 or cost_val == 64): if mine_or_other == 1: chain_variable_det[chain_variable_name]["2_value"]= chain_variable elif mine_or_other == 2: chain_variable_det[chain_variable_name]["1_value"]= chain_variable else: chain_variable_det[chain_variable_name]["value"]= chain_variable duel.global_variable = json.dumps(chain_variable_det) else: chain_variable_det[str_chain][chain_variable_name] = chain_variable duel.chain_variable = json.dumps(chain_variable_det) duelobj.in_execute = False elif duel.user_2 == request.user: if duel.user_turn == 2: if duel.ask == 1 or duel.ask == 3: duel.ask -= 1 if(cost_val == 63 or cost_val == 64): if mine_or_other == 1: chain_variable_det[chain_variable_name]["2_value"]= chain_variable elif mine_or_other == 2: chain_variable_det[chain_variable_name]["1_value"]= chain_variable else: chain_variable_det[chain_variable_name]["value"]= chain_variable duel.global_variable = json.dumps(chain_variable_det) else: chain_variable_det[str_chain][chain_variable_name] = chain_variable duel.chain_variable = json.dumps(chain_variable_det) duelobj.in_execute = False else: if duel.ask == 2 or duel.ask == 3: duel.ask -= 2 if(cost_val == 63 or cost_val == 64): if mine_or_other == 1: chain_variable_det[chain_variable_name]["1_value"]= chain_variable elif mine_or_other == 2: chain_variable_det[chain_variable_name]["2_value"]= chain_variable else: chain_variable_det[chain_variable_name]["value"]= chain_variable duel.global_variable = json.dumps(chain_variable_det) else: chain_variable_det[str_chain][chain_variable_name] = chain_variable duel.chain_variable = json.dumps(chain_variable_det) duelobj.in_execute = False if duel.ask == 0: duelobj.check_eternal_effect( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) cost_det = duel.cost_det effect = CostWrapper.objects.get(id=cost_det) next_effect = effect.cost_next trigger = Trigger.objects.get(id=duel.current_trigger) tmp = duelobj.pay_cost(next_effect, user,duel.chain,trigger) if next_effect == 0 or tmp is True: duelobj.end_cost(duel.cost_user,duel.chain,trigger) duelobj.save_all(user, other_user, room_number) else: duelobj.tmp_chain = duel.chain chain_det = json.loads(duel.chain_det) monster_effect_wrapper = MonsterEffectWrapper.objects.get( id=int(chain_det[str(duel.chain - 1)]) ) monster_effect = monster_effect_wrapper.monster_effect monster_effect_val = monster_effect.monster_effect_val if(monster_effect_val == 63 or monster_effect_val == 64): chain_variable_det = json.loads(duel.global_variable) chain_user = json.loads(duel.chain_user) effect_user = chain_user[str(duel.chain - 1)] monster_effect = json.loads(monster_effect.monster_effect) chain_variable_name = monster_effect["chain_variable"] if(monster_effect_val == 63 or monster_effect_val == 64): tmp = chain_variable_name.split("_") chain_variable_name = tmp[1] mine_or_other = int(tmp[2]) min_equation_number = duelobj.calculate_boland( monster_effect["min_equation_number"] ) max_equation_number = duelobj.calculate_boland( monster_effect["max_equation_number"] ) chain_variable = int(request.POST["chain_variable"]) if chain_variable < min_equation_number or chain_variable > max_equation_number: free_lock(room_number, lock) return HttpResponse("error") if(monster_effect_val == 63 or monster_effect_val == 64): pass elif duel.chain - 1 not in chain_variable_det: chain_variable_det[str(duel.chain - 1)] = {} if duel.user_1 == request.user or (ID1 == ID and duel.guest_flag is True): if duel.user_turn == 1: if duel.ask == 1 or duel.ask == 3: duel.ask -= 1 if(monster_effect_val == 63 or monster_effect_val == 64): if mine_or_other == 1: chain_variable_det[chain_variable_name]["1_value"]= chain_variable elif mine_or_other == 2: chain_variable_det[chain_variable_name]["2_value"]= chain_variable else: chain_variable_det[chain_variable_name]["value"]= chain_variable duel.global_variable = json.dumps(chain_variable_det) else: chain_variable_det[str(duel.chain - 1)][ chain_variable_name ] = chain_variable duel.chain_variable = json.dumps(chain_variable_det) else: if duel.ask == 2 or duel.ask == 3: duel.ask -= 2 if(monster_effect_val == 63 or monster_effect_val == 64): if mine_or_other == 1: chain_variable_det[chain_variable_name]["2_value"]= chain_variable elif mine_or_other == 2: chain_variable_det[chain_variable_name]["1_value"]= chain_variable else: chain_variable_det[chain_variable_name]["value"]= chain_variable duel.global_variable = json.dumps(chain_variable_det) else: chain_variable_det[str(duel.chain - 1)][ chain_variable_name ] = chain_variable duel.chain_variable = json.dumps(chain_variable_det) elif duel.user_2 == request.user: if duel.user_turn == 2: if duel.ask == 1 or duel.ask == 3: duel.ask -= 1 if(monster_effect_val == 63 or monster_effect_val == 64): if mine_or_other == 1: chain_variable_det[chain_variable_name]["2_value"]= chain_variable elif mine_or_other == 2: chain_variable_det[chain_variable_name]["1_value"]= chain_variable else: chain_variable_det[chain_variable_name]["value"]= chain_variable duel.global_variable = json.dumps(chain_variable_det) else: chain_variable_det[str(duel.chain - 1)][ chain_variable_name ] = chain_variable duel.chain_variable = json.dumps(chain_variable_det) else: if duel.ask == 2 or duel.ask == 3: duel.ask -= 2 if(monster_effect_val == 63 or monster_effect_val == 64): if mine_or_other == 1: chain_variable_det[chain_variable_name]["1_value"]= chain_variable elif mine_or_other == 2: chain_variable_det[chain_variable_name]["2_value"]= chain_variable else: chain_variable_det[chain_variable_name]["value"]= chain_variable duel.global_variable = json.dumps(chain_variable_det) else: chain_variable_det[str(duel.chain - 1)][ chain_variable_name ] = chain_variable duel.chain_variable = json.dumps(chain_variable_det) if duel.ask == 0: if monster_effect_wrapper.pac: next_effect = duelobj._pac(monster_effect_wrapper.pac) else: next_effect = monster_effect_wrapper.monster_effect_next if next_effect != 0 and next_effect is not None: chain_det[str(duel.chain - 1)] = next_effect.id duel.chain_det = json.dumps(chain_det) duelobj.check_eternal_effect( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) '''if duel.in_cost is False and duel.in_trigger_waiting is False: duelobj.retrieve_chain( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) ''' if duel.chain == 0: duel.appoint = duel.user_turn tmp = {} duel.mess = json.dumps(tmp) duel.cost_result = json.dumps(tmp) duel.cost = json.dumps(tmp) duelobj.invoke_trigger_waiting(duel.trigger_waiting) duel.current_priority = 10000 choices = duelobj.check_trigger( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) else: choices = None duelobj.save_all(user, other_user, room_number) free_lock(room_number, lock) return battle_det(request, duelobj, choices) def yes_or_no_cost(request, duelobj, user, other_user, room_number, lock): duel = duelobj.duel if "ID" in request.COOKIES : ID = request.COOKIES["ID"] else: ID = "" ID1 = duel.guest_id ID2 = duel.guest_id2 cost_det = duel.cost_det cost_user = duel.cost_user duelobj.tmp_chain = str(duel.chain) if cost_user == 0: if request.user == duel.user_1 or (ID1 == ID and duel.guest_flag is True): cost_user = 1 else: cost_user = 2 cost = CostWrapper.objects.get(id=cost_det).cost cost_effect_val = cost.cost_val if cost_effect_val != 48 and cost_effect_val != 16 and cost_effect_val != 26: free_lock(room_number, lock) return HttpResponse("error") answer = request.POST["answer"] if cost_user == duel.user_turn: if duel.ask != 1: free_lock(room_number, lock) return HttpResponse("error") if cost_user != duel.user_turn: if duel.ask != 2: free_lock(room_number, lock) return HttpResponse("error") decks = Deck.objects.all() graves = Grave.objects.all() hands = Hand.objects.all() duelobj.check_eternal_effect( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) if answer == "yes": duel.ask = 0 effect = CostWrapper.objects.get(id=cost_det) # 効果コピー if(cost_effect_val == 48): next_effect = duelobj.copy_special_effect(effect.cost,effect.cost_kind,True) tmp = False if next_effect is not None and isinstance(next_effect,int) is False and next_effect[0] is not None: duel.cost_det = next_effect[0].id trigger = Triggtrigger = Trigger.objects.get(id=duel.current_trigger) tmp = duelobj.pay_cost(next_effect[0], user,duel.chain,trigger) else: duel.cost_det = 0 if duel.cost_det == 0 and tmp is False: if duel.in_copying is False: duelobj.end_cost(duel.cost_user,duel.chain,trigger) trigger = Triggtrigger = Trigger.objects.get(id=duel.current_trigger) if trigger.chain_flag is True: duel.virtual_chain += 1 duel.chain += 1 else: duelobj.end_cost(duel.cost_user,duel.chain,trigger) choices = None else: duel.ask = 0 effect = CostWrapper.objects.get(id=cost_det) if cost_effect_val == 48: if effect.pac: next_effect = duelobj._pac_cost(effect.pac) elif effect.cost_next: next_effect = effect.cost_next else: next_effect = duelobj.pop_pac_cost(user) tmp = False if next_effect is not None and next_effect != -2: duel.cost_det = next_effect.id trigger = Triggtrigger = Trigger.objects.get(id=duel.current_trigger) tmp = duelobj.pay_cost(next_effect, user,duel.chain,trigger) else: duel.cost_det = 0 if duel.cost_det == 0 and tmp is False: if duel.in_copying is False: duelobj.end_cost(duel.cost_user,duel.chain,trigger) #効果をコピーしない場合はchainを増やさない #duel.chain += 1 else: duelobj.end_cost(duel.cost_user,duel.chain,trigger) choices = None if effect.pac2: next_effect = duelobj._pac_cost(effect.pac2) elif effect.cost_next2: next_effect = effect.cost_next else: next_effect = duelobj.pop_pac_cost(user) tmp = False if next_effect is not None and next_effect != -2: duel.cost_det = next_effect.id trigger = Triggtrigger = Trigger.objects.get(id=duel.current_trigger) tmp = duelobj.pay_cost(next_effect, user,duel.chain,trigger) else: duel.cost_det = 0 if duel.cost_det == 0 and tmp is False: if duel.in_copying is False : duelobj.end_cost(duel.cost_user,duel.chain,trigger) #効果をコピーしない場合はchainを増やさない #duel.chain += 1 else: duelobj.end_cost(duel.cost_user,duel.chain,trigger) choices = None duelobj.save_all(user, other_user, room_number) free_lock(room_number, lock) return battle_det(request, duelobj, choices) def multiple_choice(request): room_number = int(request.POST["room_number"]) lock = Lock.objects.get() lock_flag = lock_lock(room_number, lock,request) if lock_flag != "OK": return HttpResponse("waiting") duel = Duel.objects.filter(id=room_number).get() if "ID" in request.COOKIES : ID = request.COOKIES["ID"] else: ID = "" ID1 = duel.guest_id ID2 = duel.guest_id2 duelobj = DuelObj(room_number) duelobj.duel = duel if duel.user_1 != request.user and duel.user_2 != request.user: if (ID1 == ID and duel.guest_flag) or (ID2 == ID and duel.guest_flag2): pass else: free_lock(room_number, lock) return HttpResponseRedirect(reverse("tcgcreator:watch_battle")) duelobj.duel = duel duelobj.room_number = room_number if duel.user_1 == request.user or (ID1 == ID and duel.guest_flag is True): user = 1 other_user = 2 duelobj.user = 1 else: duelobj.user = 2 user = 2 other_user = 1 duelobj.init_all(user, other_user, room_number) if user == duel.user_turn: if duel.ask != 1: free_lock(room_number, lock) return HttpResponse("error") if duel.user_turn != user: if duel.ask != 2: free_lock(room_number, lock) return HttpResponse("error") chain_det = json.loads(duel.chain_det) monster_effect = MonsterEffectWrapper.objects.get( id=int(chain_det[str(duel.chain - 1)]) ).monster_effect monster_effect_val = monster_effect.monster_effect_val if monster_effect_val != 66 and monster_effect_val != 67: free_lock(room_number, lock) return HttpResponse("error") answer = request.POST["answer"] multiple_json = json.loads(monster_effect.monster_effect) if answer not in multiple_json["monster_effect_wrapper"]: free_lock(room_number, lock) return HttpResponse("error") duel.ask = 0 next_effect = MonsterEffectWrapper(id = int(answer)) chain_det[str(duel.chain - 1)] = next_effect.id duel.chain_det = json.dumps(chain_det) decks = Deck.objects.all() graves = Grave.objects.all() hands = Hand.objects.all() duelobj.check_eternal_effect( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) duelobj.retrieve_chain( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) if duel.chain == 0: duelobj.invoke_after_chain_effect( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) duel.appoint = duel.user_turn tmp = {} duel.mess = json.dumps(tmp) duel.cost_result = json.dumps(tmp) duel.cost = json.dumps(tmp) duelobj.invoke_trigger_waiting(duel.trigger_waiting) duel.current_priority = 10000 choices = duelobj.check_trigger( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) else: choices = None duelobj.save_all(user, other_user, room_number) free_lock(room_number, lock) return battle_det(request, duelobj, choices) def yes_or_no(request): room_number = int(request.POST["room_number"]) lock = Lock.objects.get() lock_flag = lock_lock(room_number, lock,request) if lock_flag != "OK": return HttpResponse("waiting") duel = Duel.objects.filter(id=room_number).get() if "ID" in request.COOKIES : ID = request.COOKIES["ID"] else: ID = "" ID1 = duel.guest_id ID2 = duel.guest_id2 duelobj = DuelObj(room_number) duelobj.duel = duel if duel.user_1 != request.user and duel.user_2 != request.user: if (ID1 == ID and duel.guest_flag) or (ID2 == ID and duel.guest_flag2): pass else: free_lock(room_number, lock) return HttpResponseRedirect(reverse("tcgcreator:watch_battle")) duelobj.duel = duel duelobj.room_number = room_number if duel.user_1 == request.user or (ID1 == ID and duel.guest_flag is True): user = 1 other_user = 2 duelobj.user = 1 else: duelobj.user = 2 user = 2 other_user = 1 duelobj.init_all(user, other_user, room_number) if user == duel.user_turn: if duel.ask != 1: free_lock(room_number, lock) return HttpResponse("error") if duel.user_turn != user: if duel.ask != 2: free_lock(room_number, lock) return HttpResponse("error") if duel.in_cost is True: return yes_or_no_cost(request, duelobj, user, other_user, room_number, lock) if duel.in_trigger_waiting is True and duel.force == 0: answer = request.POST["answer"] if answer == "yes": duel.force = 2 else: duel.force = 1 duel.ask = 0 duelobj.save_all(user, other_user, room_number) free_lock(room_number, lock) return battle_det(request, duelobj, None) chain_det = json.loads(duel.chain_det) monster_effect = MonsterEffectWrapper.objects.get( id=int(chain_det[str(duel.chain - 1)]) ).monster_effect monster_effect_val = monster_effect.monster_effect_val if monster_effect_val != 16 and monster_effect_val != 26: free_lock(room_number, lock) return HttpResponse("error") answer = request.POST["answer"] if answer == "yes": duel.ask = 0 chain_det = json.loads(duel.chain_det) current_chain = chain_det[str(duel.chain - 1)] effect = MonsterEffectWrapper.objects.get(id=current_chain) if effect.pac: next_effect = duelobj._pac(effect.pac) else: next_effect = effect.monster_effect_next if next_effect != 0 and next_effect is not None: chain_det[str(duel.chain - 1)] = next_effect.id else: pac = json.loads(duel.in_pac) if str(duel.chain - 1) in pac and pac[str(duel.chain - 1)] != []: pac_id = pac[str(duel.chain - 1)].pop() duel.in_pac = json.dumps(pac) pac = PacWrapper.objects.get(id=pac_id) next_effect = pac.monster_effect_next if next_effect is None: Trigger = Triggtrigger = Trigger.objects.get(id=duel.current_trigger) if trigger.chain_flag is True: duel.virtual_chain -= 1 duel.chain -= 1 else: chain_det[str(duel.chain - 1)] = next_effect.id else: Trigger = Triggtrigger = Trigger.objects.get(id=duel.current_trigger) if trigger.chain_flag is True: duel.virtual_chain -= 1 duel.chain -= 1 duel.chain_det = json.dumps(chain_det) else: duel.ask = 0 chain_det = json.loads(duel.chain_det) current_chain = chain_det[str(duel.chain - 1)] effect = MonsterEffectWrapper.objects.get(id=current_chain) next_effect = effect.monster_effect_next2 if effect.pac2: next_effect = duelobj._pac(effect.pac2) else: next_effect = effect.monster_effect_next2 if next_effect is not None and next_effect != 0: chain_det[str(duel.chain - 1)] = next_effect.id else: pac = json.loads(duel.in_pac) if str(duel.chain - 1) in pac and pac[str(duel.chain - 1)] != []: pac_id = pac[str(duel.chain - 1)].pop() duel.in_pac = json.dumps(pac) pac = PacWrapper.objects.get(id=pac_id) next_effect = pac.monster_effect_next if next_effect is None: Trigger = Triggtrigger = Trigger.objects.get(id=duel.current_trigger) if trigger.chain_flag is True: duel.virtual_chain -= 1 duel.chain -= 1 else: chain_det[str(duel.chain - 1)] = next_effect.id else: Trigger = Triggtrigger = Trigger.objects.get(id=duel.current_trigger) if trigger.chain_flag is True: duel.virtual_chain -= 1 duel.chain -= 1 duel.chain_det = json.dumps(chain_det) decks = Deck.objects.all() graves = Grave.objects.all() hands = Hand.objects.all() duelobj.check_eternal_effect( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) duelobj.retrieve_chain( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) if duel.chain == 0: duelobj.invoke_after_chain_effect( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) duel.appoint = duel.user_turn tmp = {} duel.mess = json.dumps(tmp) duel.cost_result = json.dumps(tmp) duel.cost = json.dumps(tmp) duelobj.invoke_trigger_waiting(duel.trigger_waiting) duel.current_priority = 10000 choices = duelobj.check_trigger( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) else: choices = None if(duel.ask == 1 ): duel.appoint = duel.user_turn elif(duel.ask == 2 ): if duel.user_turn == 1: duel.appoint = 2 else: duel.appoint = 1 duelobj.save_all(user, other_user, room_number) free_lock(room_number, lock) return battle_det(request, duelobj, choices) def answer_under_det( duelobj, duel, user, answer, exclude, whether_monster, monster_effect_text, monster_effect_val, request, cost_flag=0, log=None, lock=None, effect_kind="", ): global check_array if "ID" in request.COOKIES : ID = request.COOKIES["ID"] else: ID = "" ID1 = duel.guest_id ID2 = duel.guest_id2 room_number = int(request.POST["room_number"]) if user == 1: other_user = 2 else: other_user = 1 chain_det = json.loads(duel.chain_det) chain_user = json.loads(duel.chain_user) if cost_flag == 0: chain_user = int(chain_user[str(duel.chain - 1)]) else: chain_user = int(chain_user[duelobj.tmp_chain]) mess = duelobj.mess cost = duelobj.cost return_value = [] for answer_val in answer: x = int(answer_val["x"]) y = int(answer_val["y"]) under_id = answer_val["under_id"] tmp_count = 0 for monster_effect_det in monster_effect_text: as_monster_effect = monster_effect_det["as_monster_condition"] if monster_effect_val == 44: if as_monster_effect[0] == "~": cost = duelobj.cost #if str(int(duel.chain-2)) not in cost: cost[str(int(duel.chain - 2))] = {} cost[str(int(duel.chain - 2))]["choose"] = [] effect_cost_flag = 1 else: mess = duelobj.mess #if str(int(duel.chain-2)) not in mess: mess[str(int(duel.chain - 2))] = {} mess[str(int(duel.chain - 2))]["choose"] = [] effect_cost_flag == 0 tmp_count += 1 if (user == 1 and chain_user == 1) or (user == 2 and chain_user == 2): if ( (monster_effect_val == 3) or (monster_effect_val == 44) or (monster_effect_val == 5 and tmp_count == 1) ): monster_effect_det_monster = monster_effect_det["monster"] for place in monster_effect_det_monster["place"]: place_tmp = place["det"].split("_") if place_tmp[2] == "4": mine_or_other = user elif place_tmp[2] == "5": mine_or_other = other_user else: mine_or_other = 0 if place_tmp[0] == "field": fields = duelobj.field field = fields[x][y] if field["kind"].find(place_tmp[1]) == -1: continue if field["mine_or_other"] != mine_or_other: continue if field["det"] is None: return HttpResponse("error") else: if "under" not in field["det"]: return HttpResponse("error") under_flag = False for under in field["det"]["under"]: if under["place_unique_id"] == under_id: under_flag = True break if under_flag is False: return HttpResponse("error") tmp2 = {} tmp2["det"] = field["det"] tmp2["mine_or_other"] = field["mine_or_other"] tmp2["user"] = chain_user tmp2["place"] = "under" tmp2["deck_id"] = 0 tmp2["x"] = x tmp2["y"] = y tmp2["place_unique_id"] = field["det"]["place_unique_id"] tmp2["under_id"] = under_id return_value.append(tmp2) if not duelobj.validate_answer( tmp2, monster_effect_det_monster, exclude, duel, 1, 0, effect_kind, user, ): return HttpResponse("error") check_array.append(field["det"]) if cost_flag == 0: if monster_effect_val == 44: if effect_cost_flag == 0: if str(duel.chain - 2) not in mess: mess[str(duel.chain - 2)] = {} if "choose" not in mess[str(duel.chain - 1)]: mess[str(duel.chain - 2)]["choose"] = [] else: if str(duel.chain - 2) not in cost: cost[str(duel.chain - 2)] = {} if "choose" not in cost[str(duel.chain - 1)]: cost[str(duel.chain - 2)]["choose"] = [] else: if str(duel.chain - 1) not in mess: mess[str(duel.chain - 1)] = {} if "choose" not in mess[str(duel.chain - 1)]: mess[str(duel.chain - 1)]["choose"] = [] tmp2 = {} tmp2["det"] = field["det"] tmp2["hide"] = ( field["hide"] if ("hide" in field) else False ) tmp2["mine_or_other"] = field["mine_or_other"] tmp2["x"] = x tmp2["y"] = y tmp2["deck_id"] = 0 tmp2["place_unique_id"] = field["det"][ "place_unique_id" ] tmp2["user"] = user tmp2["place"] = "under" tmp2["under_id"] = under_id return_value.append(tmp2) if monster_effect_val == 44: if effect_cost_flag == 0: if ( as_monster_effect not in mess[str(duel.chain - 2)] ): mess[str(duel.chain - 2)][ as_monster_effect ] = [] mess[str(duel.chain - 2)][ as_monster_effect ].append(tmp2) else: if ( as_monster_effect not in cost[str(duel.chain - 2)] ): cost[str(duel.chain - 2)][ as_monster_effect ] = [] cost[str(duel.chain - 2)][ as_monster_effect ].append(tmp2) else: if ( as_monster_effect not in mess[str(duel.chain - 1)] ): mess[str(duel.chain - 1)][ as_monster_effect ] = [] mess[str(duel.chain - 1)][as_monster_effect].append( tmp2 ) else: if str(duelobj.tmp_chain) not in cost: cost[str(duelobj.tmp_chain)] = {} if "choose" not in cost[str(duelobj.tmp_chain)]: cost[str(duelobj.tmp_chain)]["choose"] = [] tmp2 = {} tmp2["det"] = field["det"] tmp2["hide"] = ( field["hide"] if ("hide" in field) else False ) tmp2["x"] = x tmp2["y"] = y tmp2["deck_id"] = 0 tmp2["place_unique_id"] = field["det"][ "place_unique_id" ] tmp2["user"] = user tmp2["place"] = "under" tmp2["mine_or_other"] = field["mine_or_other"] tmp2["under_id"] = under_id return_value.append(tmp2) if ( as_monster_effect not in cost[str(duelobj.tmp_chain)] ): cost[str(duelobj.tmp_chain)][as_monster_effect] = [] cost[str(duelobj.tmp_chain)][as_monster_effect].append( tmp2 ) elif (user == 2 and chain_user == 1) or (user == 1 and chain_user == 2): if (monster_effect_val == 4) or ( monster_effect_val == 5 and tmp_count == 2 ): monster_effect_det_monster = monster_effect_det["monster"] for place in monster_effect_det_monster["place"]: place_tmp = place["det"].split("_") if place_tmp[2] == "4": mine_or_other = user elif place_tmp[2] == "5": mine_or_other = other_user else: mine_or_other = 0 if place_tmp[0] == "field": fields = duelobj.field field = fields[x][y] if field["kind"].find(place_tmp[1]) == -1: continue if field["mine_or_other"] != mine_or_other: continue if field["det"] is None: return HttpResponse("error") else: if "under" not in field["det"]: return HttpResponse("error") under_flag = False for under in field["det"]["under"]: if under["place_unique_id"] == under_id: under_flag = True break if under_flag is False: return HttpResponse("error") tmp2 = {} tmp2["det"] = field["det"] tmp2["mine_or_other"] = field["mine_or_other"] tmp2["user"] = chain_user tmp2["place"] = "under" tmp2["deck_id"] = 0 tmp2["x"] = x tmp2["y"] = y tmp2["place_unique_id"] = field["det"]["place_unique_id"] tmp2["under_id"] = under_id return_value.append(tmp2) if not duelobj.validate_answer( tmp2, monster_effect_det_monster, exclude, duel, 1, 0, effect_kind, user, ): return HttpResponse("error") check_array.append(field["det"]) if cost_flag == 0: if monster_effect_val == 44: if effect_cost_flag == 0: if str(duel.chain - 2) not in mess: mess[str(duel.chain - 2)] = {} if "choose" not in mess[str(duel.chain - 2)]: mess[str(duel.chain - 2)]["choose"] = [] else: if str(duel.chain - 2) not in cost: cost[str(duel.chain - 2)] = {} if "choose" not in cost[str(duel.chain - 2)]: cost[str(duel.chain - 2)]["choose"] = [] else: if str(duel.chain - 1) not in mess: mess[str(duel.chain - 1)] = {} if "choose" not in mess[str(duel.chain - 1)]: mess[str(duel.chain - 1)]["choose"] = [] tmp2 = {} tmp2["det"] = field["det"] tmp2["hide"] = ( field["hide"] if ("hide" in field) else False ) tmp2["x"] = x tmp2["y"] = y tmp2["deck_id"] = 0 tmp2["place_unique_id"] = field["det"][ "place_unique_id" ] tmp2["user"] = other_user tmp2["place"] = "under" tmp2["mine_or_other"] = field["mine_or_other"] tmp2["under_id"] = under_id if monster_effect_val == 44: if effect_cost_flag == 0: if ( as_monster_effect not in mess[str(duel.chain - 2)] ): mess[str(duel.chain - 2)][ as_monster_effect ] = [] mess[str(duel.chain - 2)][ as_monster_effect ].append(tmp2) else: if ( as_monster_effect not in cost[str(duel.chain - 2)] ): cost[str(duel.chain - 2)][ as_monster_effect ] = [] cost[str(duel.chain - 2)][ as_monster_effect ].append(tmp2) else: if ( as_monster_effect not in mess[str(duel.chain - 1)] ): mess[str(duel.chain - 1)][ as_monster_effect ] = [] mess[str(duel.chain - 1)][as_monster_effect].append( tmp2 ) else: if str(duelobj.tmp_chain) not in cost: cost[str(duelobj.tmp_chain)] = {} if "choose" not in cost[str(duelobj.tmp_chain)]: cost[str(duelobj.tmp_chain)]["choose"] = [] tmp2 = {} tmp2["det"] = field["det"] tmp2["hide"] = ( field["hide"] if ("hide" in field) else False ) tmp2["x"] = x tmp2["y"] = y tmp2["deck_id"] = 0 tmp2["place_unique_id"] = field["det"][ "place_unique_id" ] tmp2["user"] = other_user tmp2["place"] = "under" tmp2["mine_or_other"] = field["mine_or_other"] tmp2["under_id"] = under_id return_value.append(tmp2) if ( as_monster_effect not in cost[str(duelobj.tmp_chain)] ): cost[str(duelobj.tmp_chain)][as_monster_effect] = [] cost[str(duelobj.tmp_chain)][as_monster_effect].append( tmp2 ) else: for place in monster_effect_det_monster["place"]: place_tmp = place["det"].split("_") if place_tmp[0] == "field": fields = duelobj.field field = fields[x][y] if field["kind"].find(place_tmp[1]) == -1: continue if int(field["mine_or_other"]) != 0: continue if field["det"] is None: return HttpResponse("error") else: if "under" not in field["det"]: return HttpResponse("error") under_flag = False for under in field["det"]["under"]: if under["place_unique_id"] == under_id: under_flag = True break if under_flag is False: return HttpResponse("error") tmp2 = {} tmp2["det"] = field[x][y]["det"] tmp2["mine_or_other"] = field[x][y]["mine_or_other"] tmp2["user"] = chain_user tmp2["place"] = "under" tmp2["deck_id"] = 0 tmp2["x"] = x tmp2["y"] = y tmp2["place_unique_id"] = field[x][y]["det"]["place_unique_id"] tmp2["under_id"] = under_id return_value.append(tmp2) if not duelobj.validate_answer( tmp2, monster_effect_det_monster, exclude, duel, 1, 0, effect_kind, user, ): return HttpResponse("error") check_array.append(field["det"]) if cost_flag == 0: if monster_effect_val != 44: if str(duel.chain - 1) not in mess: mess[str(duel.chain - 1)] = {} if "choose" not in mess[str(duel.chain - 1)]: mess[str(duel.chain - 1)]["choose"] = [] tmp2 = {} tmp2["det"] = field["det"] tmp2["hide"] = field["hide"] if ("hide" in field) else False tmp2["x"] = x tmp2["y"] = y tmp2["deck_id"] = 0 tmp2["place_unique_id"] = field["det"]["place_unique_id"] tmp2["user"] = user tmp2["place"] = "under" tmp2["under_id"] = under_id tmp2["mine_or_other"] = field["mine_or_other"] return_value.append(tmp2) if monster_effect_val != 44: if as_monster_effect not in mess[str(duel.chain - 1)]: mess[str(duel.chain - 1)][as_monster_effect] = [] mess[str(duel.chain - 1)][as_monster_effect].append( tmp2 ) else: if effect_cost_flag == 0: if ( as_monster_effect not in mess[str(duel.chain - 2)] ): mess[str(duel.chain - 2)][ as_monster_effect ] = [] mess[str(duel.chain - 2)][as_monster_effect].append( tmp2 ) else: if ( as_monster_effect not in cost[str(duel.chain - 2)] ): cost[str(duel.chain - 2)][ as_monster_effect ] = [] cost[str(duel.chain - 2)][as_monster_effect].append( tmp2 ) else: if str(duelobj.tmp_chain) not in cost: cost[str(duelobj.tmp_chain)] = {} if "choose" not in cost[str(duelobj.tmp_chain)]: cost[str(duelobj.tmp_chain)]["choose"] = [] tmp2 = {} tmp2["det"] = field["det"] tmp2["hide"] = field["hide"] if ("hide" in field) else False tmp2["x"] = x tmp2["y"] = y tmp2["deck_id"] = 0 tmp2["place_unique_id"] = field["det"]["place_unique_id"] tmp2["user"] = user tmp2["place"] = "under" tmp2["under_id"] = under_id tmp2["mine_or_other"] = field["mine_or_other"] return_value.append(tmp2) if as_monster_effect not in cost[str(duelobj.tmp_chain)]: cost[str(duelobj.tmp_chain)][as_monster_effect] = [] cost[str(duelobj.tmp_chain)][as_monster_effect].append(tmp2) duelobj.mess = mess duelobj.cost = cost choices = None if duel.user_1 == request.user or (ID1 == ID and duel.guest_flag is True): if duel.user_turn == 1: if duel.ask == 1 or duel.ask == 3: duel.ask -= 1 else: if duel.ask == 2 or duel.ask == 3: duel.ask -= 2 elif duel.user_2 == request.user: if duel.user_turn == 2: if duel.ask == 1 or duel.ask == 3: duel.ask -= 1 else: if duel.ask == 2 or duel.ask == 3: duel.ask -= 2 if duel.ask == 0 and duel.in_cost is False: chain_det = json.loads(duel.chain_det) current_chain = chain_det[str(duel.chain - 1)] effect = MonsterEffectWrapper.objects.get(id=current_chain) if effect.pac: next_effect = duelobj._pac(effect.pac) else: next_effect = effect.monster_effect_next if next_effect != 0 and next_effect is not None: chain_det[str(duel.chain - 1)] = next_effect.id else: pac = json.loads(duel.in_pac) if str(duel.chain - 1) in pac and pac[str(duel.chain - 1)] != []: pac_id = pac[str(duel.chain - 1)].pop() duel.in_pac = json.dumps(pac) pac = PacWrapper.objects.get(id=pac_id) next_effect = pac.monster_effect_next if next_effect is None: Trigger = Triggtrigger = Trigger.objects.get(id=duel.current_trigger) if trigger.chain_flag is True: duel.virtual_chain -= 1 duel.chain -= 1 else: chain_det[str(duel.chain - 1)] = next_effect.id else: Trigger = Triggtrigger = Trigger.objects.get(id=duel.current_trigger) if trigger.chain_flag is True: duel.virtual_chain -= 1 duel.chain -= 1 duelobj.duel.chain_det = json.dumps(chain_det) decks = Deck.objects.all() graves = Grave.objects.all() hands = Hand.objects.all() duelobj.check_eternal_effect( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) if duel.in_trigger_waiting is False: duelobj.retrieve_chain( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) if duel.chain == 0: duelobj.invoke_after_chain_effect( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) duel.appoint = duel.user_turn tmp = {} duel.mess = json.dumps(tmp) duel.cost_result = json.dumps(tmp) duel.cost = json.dumps(tmp) duelobj.invoke_trigger_waiting(duel.trigger_waiting) duel.current_priority = 10000 choices = duelobj.check_trigger( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) else: choices = None elif duel.ask == 0: cost_det = duel.cost_det effect = CostWrapper.objects.get(id=cost_det) if effect.pac: next_effect = duelobj._pac_cost(cost.pac) elif effect.cost_next: next_effect = effect.cost_next else: next_effect = duelobj.pop_pac_cost(user) tmp = False if next_effect is not None and next_effect != -2: duel.cost_det = next_effect.id trigger = Triggtrigger = Trigger.objects.get(id=duel.current_trigger) tmp = duelobj.pay_cost(next_effect, user,duel.chain,trigger) else: duel.cost_det = 0 if duel.cost_det == 0 and tmp is False: if duel.in_copying is False: duelobj.end_cost(duel.cost_user,duel.chain,trigger) trigger = Triggtrigger = Trigger.objects.get(id=duel.current_trigger) if trigger.chain_flag is True: duel.virtual_chain += 1 duel.chain += 1 else: duelobj.end_cost(duel.cost_user,duel.chain,trigger) choices = None if duel.in_cost is False: data = {} data["monsters"] = return_value if log is None: log = "" duel.log_turn += duelobj.write_log(log, user, data) duel.log += duelobj.write_log(log, user, data) duelobj.save_all(user, other_user, room_number) free_lock(room_number, lock) return battle_det(request, duelobj, choices) def answer_as_under( duelobj, duel, user, answer, exclude, whether_monster, monster_effect_text, monster_effect_val, request, cost_flag=0, log=None, lock=None, room_number=None, ): if user == 1: other_user = 2 else: other_user = 1 decks = Deck.objects.all() graves = Grave.objects.all() hands = Hand.objects.all() return_value = [] chain_det = json.loads(duel.chain_det) chain_user = json.loads(duel.chain_user) chain_user = int(chain_user[str(duel.chain - 1)]) monster_effect_wrapper = MonsterEffectWrapper.objects.get( id=int(chain_det[str(duel.chain - 1)]) ) monster_effect = monster_effect_wrapper.monster_effect effect_kind = monster_effect_wrapper.monster_effect_kind monster_effect_text = json.loads(monster_effect.monster_effect) as_monster = monster_effect_text["monster"][0]["as_monster_condition"] field = duelobj.field monster_effect_det = monster_effect_text["monster"][0] as_monster_to = monster_effect_text["as_monster_condition_to"] mess = duelobj.mess as_monsters = mess[str(int(duel.chain - 1))][as_monster] flag = False if as_monster_to not in mess[str(int(duel.chain - 1))]: mess[str(int(duel.chain - 1))][as_monster_to] = [] for answer_val in answer: if answer_val["place"] == "under": x = int(answer_val["x"]) y = int(answer_val["y"]) place_unique_id = answer_val["under_id"] flag = False for as_monster in as_monsters: x = as_monster["x"] y = as_monster["y"] if duelobj.check_monster_condition_det( monster_effect_det, as_monster["det"], user, effect_kind, 1, "field", 0, x, y, ): if "under" in field[x][y]["det"]: for under in field[x][y]["det"]["under"]: if under["place_unique_id"] == place_unique_id: tmp2 = {} tmp2["det"] = under tmp2["x"] = x tmp2["y"] = y tmp2["deck_id"] = 0 tmp2["user"] = user tmp2["place"] = "under" tmp2["mine_or_other"] = field[x][y]["mine_or_other"] tmp2["place_unique_id"] = field[x][y]["det"]["place_unique_id"] tmp2["under_id"] = under["place_unique_id"] flag = True return_value.append(tmp2) mess[str(int(duel.chain - 1))][as_monster_to].append(tmp2) if flag is False: return HttpResponse("error") duelobj.mess = mess duel.ask -= 1 if duel.ask == 0 and duel.in_cost is False: chain_det = json.loads(duel.chain_det) current_chain = chain_det[str(duel.chain - 1)] effect = MonsterEffectWrapper.objects.get(id=current_chain) if effect.pac: next_effect = duelobj._pac(effect.pac) else: next_effect = effect.monster_effect_next if next_effect != 0 and next_effect is not None: chain_det[str(duel.chain - 1)] = next_effect.id else: pac = json.loads(duel.in_pac) if str(duel.chain - 1) in pac and pac[str(duel.chain - 1)] != []: pac_id = pac[str(duel.chain - 1)].pop() duel.in_pac = json.dumps(pac) pac = PacWrapper.objects.get(id=pac_id) next_effect = pac.monster_effect_next if next_effect is None: Trigger = Triggtrigger = Trigger.objects.get(id=duel.current_trigger) if trigger.chain_flag is True: duel.virtual_chain -= 1 duel.chain -= 1 else: chain_det[str(duel.chain - 1)] = next_effect.id else: Trigger = Triggtrigger = Trigger.objects.get(id=duel.current_trigger) if trigger.chain_flag is True: duel.virtual_chain -= 1 duel.chain -= 1 duelobj.duel.chain_det = json.dumps(chain_det) decks = Deck.objects.all() graves = Grave.objects.all() hands = Hand.objects.all() duelobj.check_eternal_effect( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) if duel.in_trigger_waiting is False: duelobj.retrieve_chain( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) if duel.chain == 0: duelobj.invoke_after_chain_effect( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) duel.appoint = duel.user_turn tmp = {} duel.mess = json.dumps(tmp) duel.cost_result = json.dumps(tmp) duel.cost = json.dumps(tmp) duelobj.invoke_trigger_waiting(duel.trigger_waiting) duel.current_priority = 10000 choices = duelobj.check_trigger( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) else: choices = None if duel.in_cost is False: data = {} data["monsters"] = return_value if log is None: log = "" duel.log_turn += duelobj.write_log(log, user, data) duel.log += duelobj.write_log(log, user, data) duelobj.save_all(user, other_user, room_number) free_lock(room_number, lock) return battle_det(request, duelobj, choices) def answer_as( duelobj, duel, user, answer, exclude, whether_monster, monster_effect_text, monster_effect_val, request, cost_flag=0, log=None, lock=None, room_number=None, ): if user == 1: other_user = 2 else: other_user = 1 decks = Deck.objects.all() graves = Grave.objects.all() hands = Hand.objects.all() return_value = [] chain_det = json.loads(duel.chain_det) chain_user = json.loads(duel.chain_user) chain_user = int(chain_user[str(duel.chain - 1)]) monster_effect_wrapper = MonsterEffectWrapper.objects.get( id=int(chain_det[str(duel.chain - 1)]) ) monster_effect = monster_effect_wrapper.monster_effect effect_kind = monster_effect_wrapper.monster_effect_kind monster_effect_text = json.loads(monster_effect.monster_effect) as_monster = monster_effect_text["monster"][0]["as_monster_condition"] monster_effect_det = monster_effect_text["monster"][0] as_monster_to = monster_effect_text["as_monster_condition_to"] mess = duelobj.mess as_monsters = mess[str(int(duel.chain - 1))][as_monster] if as_monster_to not in mess[str(int(duel.chain - 1))]: mess[str(int(duel.chain - 1))][as_monster_to] = [] for answer_val in answer: if answer_val["place"] == "field": x = int(answer_val["x"]) y = int(answer_val["y"]) place_unique_id = duelobj.field[x][y]["det"]["place_unique_id"] flag = False for as_monster in as_monsters: if as_monster["det"]["place_unique_id"] == place_unique_id: if duelobj.check_monster_condition_det( monster_effect_det, as_monster["det"], user, effect_kind, 1, "field", 0, x, y, ): flag = True return_value.append(as_monster) mess[str(int(duel.chain - 1))][as_monster_to].append(as_monster) if flag is False: return HttpResponse("error") duelobj.mess = mess duel.ask -= 1 if duel.ask == 0 and duel.in_cost is False: chain_det = json.loads(duel.chain_det) current_chain = chain_det[str(duel.chain - 1)] effect = MonsterEffectWrapper.objects.get(id=current_chain) if effect.pac: next_effect = duelobj._pac(effect.pac) else: next_effect = effect.monster_effect_next if next_effect != 0 and next_effect is not None: chain_det[str(duel.chain - 1)] = next_effect.id else: pac = json.loads(duel.in_pac) if str(duel.chain - 1) in pac and pac[str(duel.chain - 1)] != []: pac_id = pac[str(duel.chain - 1)].pop() duel.in_pac = json.dumps(pac) pac = PacWrapper.objects.get(id=pac_id) next_effect = pac.monster_effect_next if next_effect is None: Trigger = Triggtrigger = Trigger.objects.get(id=duel.current_trigger) if trigger.chain_flag is True: duel.virtual_chain -= 1 duel.chain -= 1 else: chain_det[str(duel.chain - 1)] = next_effect.id else: Trigger = Triggtrigger = Trigger.objects.get(id=duel.current_trigger) if trigger.chain_flag is True: duel.virtual_chain -= 1 duel.chain -= 1 duelobj.duel.chain_det = json.dumps(chain_det) decks = Deck.objects.all() graves = Grave.objects.all() hands = Hand.objects.all() duelobj.check_eternal_effect( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) if duel.in_trigger_waiting is False: duelobj.retrieve_chain( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) if duel.chain == 0: duelobj.invoke_after_chain_effect( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) duel.appoint = duel.user_turn tmp = {} duel.mess = json.dumps(tmp) duel.cost_result = json.dumps(tmp) duel.cost = json.dumps(tmp) duelobj.invoke_trigger_waiting(duel.trigger_waiting) duel.current_priority = 10000 choices = duelobj.check_trigger( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) else: choices = None if duel.in_cost is False: data = {} data["monsters"] = return_value if log is None: log = "" duel.log_turn += duelobj.write_log(log, user, data) duel.log += duelobj.write_log(log, user, data) duelobj.save_all(user, other_user, room_number) free_lock(room_number, lock) return battle_det(request, duelobj, choices) def answer_field_det( duelobj, duel, user, answer_org, exclude, whether_monster, monster_effect_text, monster_effect_val, request, cost_flag=0, log=None, lock=None, effect_kind="", ): global check_array if "ID" in request.COOKIES : ID = request.COOKIES["ID"] else: ID = "" ID1 = duel.guest_id ID2 = duel.guest_id2 room_number = int(request.POST["room_number"]) if user == 1: other_user = 2 else: other_user = 1 chain_det = json.loads(duel.chain_det) chain_user = json.loads(duel.chain_user) if cost_flag == 0: chain_user = int(chain_user[str(duel.chain - 1)]) else: chain_user = int(chain_user[duelobj.tmp_chain]) mess = duelobj.mess timing_mess = duelobj.timing_mess cost = duelobj.cost return_value = [] answer = sorted(answer_org,key=lambda x:x["x"]) for answer_val in answer: x = int(answer_val["x"]) y = int(answer_val["y"]) tmp_count = 0 for monster_effect_det in monster_effect_text: as_monster_effect = monster_effect_det["as_monster_condition"] if monster_effect_val == 44: if as_monster_effect[0] == "~": #if str(int(duel.chain-2)) not in cost: cost[str(int(duel.chain - 2))] = {} if "choose" not in cost[str(int(duel.chain - 2))]: cost[str(int(duel.chain - 2))]["choose"] = [] effect_cost_flag = 1 else: #if str(int(duel.chain-2)) not in mess: mess[str(int(duel.chain - 2))] = {} if "choose" not in mess[str(int(duel.chain - 2))]: mess[str(int(duel.chain - 2))]["choose"] = [] effect_cost_flag == 0 tmp_count += 1 if (user == 1 and chain_user == 1) or (user == 2 and chain_user == 2): if ( (monster_effect_val == 3) or (monster_effect_val == 44) or (monster_effect_val == 5 and tmp_count == 1) ): monster_effect_det_monster = monster_effect_det["monster"] for place in monster_effect_det_monster["place"]: place_tmp = place["det"].split("_") if place_tmp[2] == "1": mine_or_other = user elif place_tmp[2] == "2": mine_or_other = other_user else: mine_or_other = 0 if place_tmp[0] == "field": fields = duelobj.field field = fields[x][y] if field["kind"].find(place_tmp[1]) == -1: continue if field["mine_or_other"] != mine_or_other: continue if whether_monster == 0: if field["det"] is not None: return HttpResponse("error") else: if cost_flag == 0: if monster_effect_val != 44: if not str(duel.chain - 1) in mess: mess[str(duel.chain - 1)] = {} if "choose" in mess[str(duel.chain - 1)]: mess[str(duel.chain - 1)]["choose"] = [] tmp2 = {} tmp2["det"] = field["det"] tmp2["hide"] = ( field["hide"] if ("hide" in field) else False ) tmp2["x"] = x tmp2["y"] = y tmp2["deck_id"] = 0 tmp2["user"] = user tmp2["place"] = "field" tmp2["mine_or_other"] = field["mine_or_other"] return_value.append(tmp2) if monster_effect_val == 44: if effect_cost_flag == 1: if ( as_monster_effect not in cost[str(duel.chain - 2)] ): cost[str(duel.chain - 2)][ as_monster_effect ] = [] cost[str(duel.chain - 2)][ as_monster_effect ].append(tmp2) else: if as_monster_effect[0] == "%": if as_monster_effect not in timing_mess: timing_mess[as_monster_effect]=[] timing_mess[as_monster_effect].append(tmp2) else: if ( as_monster_effect not in mess[str(duel.chain - 2)] ): mess[str(duel.chain - 2)][ as_monster_effect ] = [] mess[str(duel.chain - 2)][ as_monster_effect ].append(tmp2) else: if as_monster_effect[0] == "%": if as_monster_effect not in timing_mess: timing_mess[as_monster_effect]=[] timing_mess[as_monster_effect].append(tmp2) else: if ( as_monster_effect not in mess[str(duel.chain - 1)] ): mess[str(duel.chain - 1)][ as_monster_effect ] = [] mess[str(duel.chain - 1)][ as_monster_effect ].append(tmp2) else: if str(duelobj.tmp_chain) not in cost: cost[str(duelobj.tmp_chain)] = {} if "choose" not in cost[str(duelobj.tmp_chain)]: cost[str(duelobj.tmp_chain)]["choose"] = [] tmp2 = {} tmp2["det"] = field["det"] tmp2["mine_or_other"] = field["mine_or_other"] tmp2["hide"] = ( field["hide"] if ("hide" in field) else False ) tmp2["x"] = x tmp2["y"] = y tmp2["deck_id"] = 0 tmp2["user"] = user tmp2["place"] = "field" return_value.append(tmp2) if ( as_monster_effect not in cost[str(duelobj.tmp_chain)] ): cost[str(duelobj.tmp_chain)][ as_monster_effect ] = [] cost[str(duelobj.tmp_chain)][ as_monster_effect ].append(tmp2) else: if field["det"] is None: return HttpResponse("error") else: tmp2 = {} tmp2["det"] = field["det"] tmp2["mine_or_other"] = field["mine_or_other"] tmp2["user"] = chain_user tmp2["place"] = "field" tmp2["deck_id"] = 0 tmp2["x"] = x tmp2["y"] = y tmp2["place_unique_id"] = field["det"][ "place_unique_id" ] return_value.append(tmp2) if not duelobj.validate_answer( tmp2, monster_effect_det_monster, exclude, duel, 1, cost_flag, effect_kind, user, ): return HttpResponse("error") check_array.append(field["det"]) if cost_flag == 0: if monster_effect_val == 44: if effect_cost_flag == 0: if str(duel.chain - 2) not in mess: mess[str(duel.chain - 2)] = {} if ( "choose" not in mess[str(duel.chain - 1)] ): mess[str(duel.chain - 2)]["choose"] = [] else: if str(duel.chain - 2) not in cost: cost[str(duel.chain - 2)] = {} if ( "choose" not in cost[str(duel.chain - 1)] ): cost[str(duel.chain - 2)]["choose"] = [] else: if str(duel.chain - 1) not in mess: mess[str(duel.chain - 1)] = {} if "choose" not in mess[str(duel.chain - 1)]: mess[str(duel.chain - 1)]["choose"] = [] tmp2 = {} tmp2["det"] = field["det"] tmp2["hide"] = ( field["hide"] if ("hide" in field) else False ) tmp2["mine_or_other"] = field["mine_or_other"] tmp2["x"] = x tmp2["y"] = y tmp2["deck_id"] = 0 tmp2["place_unique_id"] = field["det"][ "place_unique_id" ] tmp2["user"] = user tmp2["place"] = "field" return_value.append(tmp2) if monster_effect_val == 44: if effect_cost_flag == 0: if as_monster_effect[0] == "%": if as_monster_effect not in timing_mess: timing_mess[as_monster_effect]=[] timing_mess[as_monster_effect].append(tmp2) else: if ( as_monster_effect not in mess[str(duel.chain - 2)] ): mess[str(duel.chain - 2)][ as_monster_effect ] = [] mess[str(duel.chain - 2)][ as_monster_effect ].append(tmp2) else: if ( as_monster_effect not in cost[str(duel.chain - 2)] ): cost[str(duel.chain - 2)][ as_monster_effect ] = [] cost[str(duel.chain - 2)][ as_monster_effect ].append(tmp2) else: if as_monster_effect[0] == "%": if as_monster_effect not in timing_mess: timing_mess[as_monster_effect]=[] timing_mess[as_monster_effect].append(tmp2) else: if ( as_monster_effect not in mess[str(duel.chain - 1)] ): mess[str(duel.chain - 1)][ as_monster_effect ] = [] mess[str(duel.chain - 1)][ as_monster_effect ].append(tmp2) else: if str(duelobj.tmp_chain) not in cost: cost[str(duelobj.tmp_chain)] = {} if "choose" not in cost[str(duelobj.tmp_chain)]: cost[str(duelobj.tmp_chain)]["choose"] = [] tmp2 = {} tmp2["det"] = field["det"] tmp2["hide"] = ( field["hide"] if ("hide" in field) else False ) tmp2["x"] = x tmp2["y"] = y tmp2["deck_id"] = 0 tmp2["place_unique_id"] = field["det"][ "place_unique_id" ] tmp2["user"] = user tmp2["place"] = "field" tmp2["mine_or_other"] = field["mine_or_other"] return_value.append(tmp2) if ( as_monster_effect not in cost[str(duelobj.tmp_chain)] ): cost[str(duelobj.tmp_chain)][ as_monster_effect ] = [] cost[str(duelobj.tmp_chain)][ as_monster_effect ].append(tmp2) elif (user == 2 and chain_user == 1) or (user == 1 and chain_user == 2): if (monster_effect_val == 4) or ( monster_effect_val == 5 and tmp_count == 2 ): monster_effect_det_monster = monster_effect_det["monster"] for place in monster_effect_det_monster["place"]: place_tmp = place["det"].split("_") if place_tmp[2] == "1": mine_or_other = user elif place_tmp[2] == "2": mine_or_other = other_user else: mine_or_other = 0 if place_tmp[0] == "field": fields = duelobj.field field = fields[x][y] if field["kind"].find(place_tmp[1]) == -1: continue if field["mine_or_other"] != mine_or_other: continue if whether_monster == 0: if field["det"] is not None: return HttpResponse("error") else: if cost_flag == 0: if monster_effect_val == 44: if effect_cost_flag == 0: if str(duel.chain - 2) not in mess: mess[str(duel.chain - 2)] = {} if ( "choose" not in mess[str(duel.chain - 2)] ): mess[str(duel.chain - 2)]["choose"] = [] elif effect_cost_flag == 1: if str(duel.chain - 2) not in cost: cost[str(duel.chain - 2)] = {} if ( "choose" not in cost[str(duel.chain - 2)] ): cost[str(duel.chain - 2)]["choose"] = [] else: if str(duel.chain - 1) not in mess: mess[str(duel.chain - 1)] = {} if "choose" not in mess[str(duel.chain - 1)]: mess[str(duel.chain - 1)]["choose"] = [] tmp2 = {} tmp2["det"] = field["det"] tmp2["hide"] = ( field["hide"] if ("hide" in field) else False ) tmp2["x"] = x tmp2["y"] = y tmp2["deck_id"] = 0 tmp2["user"] = other_user tmp2["place"] = "field" tmp2["mine_or_other"] = field["mine_or_other"] return_value.append(tmp2) if monster_effect_val == 44: if effect_cost_flag == 0: if as_monster_effect[0] == "%": if as_monster_effect not in timing_mess: timing_mess[as_monster_effect]=[] timing_mess[as_monster_effect].append(tmp2) else: if ( as_monster_effect not in mess[str(duel.chain - 2)] ): mess[str(duel.chain - 2)][ as_monster_effect ] = [] mess[str(duel.chain - 2)][ as_monster_effect ].append(tmp2) else: if ( as_monster_effect not in cost[str(duel.chain - 2)] ): cost[str(duel.chain - 2)][ as_monster_effect ] = [] cost[str(duel.chain - 2)][ as_monster_effect ].append(tmp2) else: if as_monster_effect[0] == "%": if as_monster_effect not in timing_mess: timing_mess[as_monster_effect]=[] timing_mess[as_monster_effect].append(tmp2) else: if ( as_monster_effect not in mess[str(duel.chain - 1)] ): mess[str(duel.chain - 1)][ as_monster_effect ] = [] mess[str(duel.chain - 1)][ as_monster_effect ].append(tmp2) else: if str(duelobj.tmp_chain) not in cost: cost[str(duelobj.tmp_chain)] = {} if "choose" not in cost[str(duelobj.tmp_chain)]: cost[str(duelobj.tmp_chain)]["choose"] = [] tmp2 = {} tmp2["det"] = field["det"] tmp2["hide"] = ( field["hide"] if ("hide" in field) else False ) tmp2["x"] = x tmp2["y"] = y tmp2["deck_id"] = 0 tmp2["user"] = other_user tmp2["place"] = "field" tmp2["mine_or_other"] = field["mine_or_other"] return_value.append(tmp2) if ( as_monster_effect not in cost[str(duelobj.tmp_chain)] ): cost[str(duelobj.tmp_chain)][ as_monster_effect ] = [] cost[str(duelobj.tmp_chain)][ as_monster_effect ].append(tmp2) else: if field["det"] is None: return HttpResponse("error") else: tmp2 = {} tmp2["det"] = field["det"] tmp2["mine_or_other"] = field["mine_or_other"] tmp2["user"] = chain_user tmp2["place"] = "field" tmp2["deck_id"] = 0 tmp2["x"] = x tmp2["y"] = y tmp2["place_unique_id"] = field["det"][ "place_unique_id" ] return_value.append(tmp2) if not duelobj.validate_answer( tmp2, monster_effect_det_monster, exclude, duel, 1, cost_flag, effect_kind, user, ): return HttpResponse("error") check_array.append(field["det"]) if cost_flag == 0: if monster_effect_val == 44: if effect_cost_flag == 0: if str(duel.chain - 2) not in mess: mess[str(duel.chain - 2)] = {} if ( "choose" not in mess[str(duel.chain - 2)] ): mess[str(duel.chain - 2)]["choose"] = [] else: if str(duel.chain - 2) not in cost: cost[str(duel.chain - 2)] = {} if ( "choose" not in cost[str(duel.chain - 2)] ): cost[str(duel.chain - 2)]["choose"] = [] else: if str(duel.chain - 1) not in mess: mess[str(duel.chain - 1)] = {} if "choose" not in mess[str(duel.chain - 1)]: mess[str(duel.chain - 1)]["choose"] = [] tmp2 = {} tmp2["det"] = field["det"] tmp2["hide"] = ( field["hide"] if ("hide" in field) else False ) tmp2["x"] = x tmp2["y"] = y tmp2["deck_id"] = 0 tmp2["place_unique_id"] = field["det"][ "place_unique_id" ] tmp2["user"] = other_user tmp2["place"] = "field" tmp2["mine_or_other"] = field["mine_or_other"] if monster_effect_val == 44: if effect_cost_flag == 0: if as_monster_effect[0] == "%": if as_monster_effect not in timing_mess: timing_mess[as_monster_effect]=[] timing_mess[as_monster_effect].append(tmp2) else: if ( as_monster_effect not in mess[str(duel.chain - 2)] ): mess[str(duel.chain - 2)][ as_monster_effect ] = [] mess[str(duel.chain - 2)][ as_monster_effect ].append(tmp2) else: if ( as_monster_effect not in cost[str(duel.chain - 2)] ): cost[str(duel.chain - 2)][ as_monster_effect ] = [] cost[str(duel.chain - 2)][ as_monster_effect ].append(tmp2) else: if as_monster_effect[0] == "%": if as_monster_effect not in timign_mess: timing_mess[as_monster_effect]=[] timing_mess[as_monster_effect].append(tmp2) else: if ( as_monster_effect not in mess[str(duel.chain - 1)] ): mess[str(duel.chain - 1)][ as_monster_effect ] = [] mess[str(duel.chain - 1)][ as_monster_effect ].append(tmp2) else: if str(duelobj.tmp_chain) not in cost: cost[str(duelobj.tmp_chain)] = {} if "choose" not in cost[str(duelobj.tmp_chain)]: cost[str(duelobj.tmp_chain)]["choose"] = [] tmp2 = {} tmp2["det"] = field["det"] tmp2["hide"] = ( field["hide"] if ("hide" in field) else False ) tmp2["x"] = x tmp2["y"] = y tmp2["deck_id"] = 0 tmp2["place_unique_id"] = field["det"][ "place_unique_id" ] tmp2["user"] = other_user tmp2["place"] = "field" tmp2["mine_or_other"] = field["mine_or_other"] return_value.append(tmp2) if ( as_monster_effect not in cost[str(duelobj.tmp_chain)] ): cost[str(duelobj.tmp_chain)][ as_monster_effect ] = [] cost[str(duelobj.tmp_chain)][ as_monster_effect ].append(tmp2) else: for place in monster_effect_det_monster["place"]: place_tmp = place["det"].split("_") if place_tmp[0] == "field": fields = duelobj.field field = fields[x][y] if field["kind"].find(place_tmp[1]) == -1: continue if int(field["mine_or_other"]) != 0: continue if whether_monster == 0: if field["det"] is not None: return HttpResponse("error") else: if cost_flag == 0: if monster_effect_val == 44: if effect_cost_flag == 0: if str(duel.chain - 2) not in mess: mess[str(duel.chain - 2)] = {} if "choose" not in mess[str(duel.chain - 2)]: mess[str(duel.chain - 2)]["choose"] = [] else: if str(duel.chain - 2) not in cost: cost[str(duel.chain - 2)] = {} if "choose" not in cost[str(duel.chain - 2)]: cost[str(duel.chain - 1)]["choose"] = [] else: if str(duel.chain - 1) not in mess: mess[str(duel.chain - 1)] = {} if "choose" not in mess[str(duel.chain - 1)]: mess[str(duel.chain - 1)]["choose"] = [] tmp2 = {} tmp2["mine_or_other"] = field["mine_or_other"] tmp2["det"] = field["det"] tmp2["hide"] = ( field["hide"] if ("hide" in field) else False ) tmp2["x"] = x tmp2["y"] = y tmp2["deck_id"] = 0 tmp2["user"] = user tmp2["place"] = "field" return_value.append(tmp2) if monster_effect_val == 44: if effect_cost_flag == 0: if as_monster_effect[0] == "%": if as_monster_effect not in timing_mess: timing_mess[as_monster_effect]=[] timing_mess[as_monster_effect].append(tmp2) else: if ( as_monster_effect not in mess[str(duel.chain - 2)] ): mess[str(duel.chain - 2)][ as_monster_effect ] = [] mess[str(duel.chain - 2)][ as_monster_effect ].append(tmp2) else: if ( as_monster_effect not in cost[str(duel.chain - 2)] ): cost[str(duel.chain - 2)][ as_monster_effect ] = [] cost[str(duel.chain - 2)][ as_monster_effect ].append(tmp2) else: if as_monster_effect[0] == "%": if as_monster_effect not in timing_mess: timing_mess[as_monster_effect]=[] timing_mess[as_monster_effect].append(tmp2) else: if ( as_monster_effect not in mess[str(duel.chain - 1)] ): mess[str(duel.chain - 1)][ as_monster_effect ] = [] mess[str(duel.chain - 1)][as_monster_effect].append( tmp2 ) else: if str(duelobj.tmp_chain) not in cost: cost[str(duelobj.tmp_chain)] = {} if "choose" not in cost[str(duelobj.tmp_chain)]: cost[str(duelobj.tmp_chain)]["choose"] = [] tmp2 = {} tmp2["det"] = field["det"] tmp2["hide"] = ( field["hide"] if ("hide" in field) else False ) tmp2["x"] = x tmp2["y"] = y tmp2["deck_id"] = 0 tmp2["user"] = user tmp2["place"] = "field" tmp2["mine_or_other"] = field["mine_or_other"] return_value.append(tmp2) if ( as_monster_effect not in cost[str(duelobj.tmp_chain)] ): cost[str(duelobj.tmp_chain)][as_monster_effect] = [] cost[str(duelobj.tmp_chain)][as_monster_effect].append( tmp2 ) else: if field["det"] is None: return HttpResponse("error") else: tmp2 = {} tmp2["det"] = field[x][y]["det"] tmp2["mine_or_other"] = field[x][y]["mine_or_other"] tmp2["user"] = chain_user tmp2["place"] = "field" tmp2["deck_id"] = 0 tmp2["x"] = x tmp2["y"] = y tmp2["place_unique_id"] = field[x][y]["det"][ "place_unique_id" ] return_value.append(tmp2) if not duelobj.validate_answer( tmp2, monster_effect_det_monster, exclude, duel, 1, cost_flag, effect_kind, user, ): return HttpResponse("error") check_array.append(field["det"]) if cost_flag == 0: if monster_effect_val != 44: if str(duel.chain - 1) not in mess: mess[str(duel.chain - 1)] = {} if "choose" not in mess[str(duel.chain - 1)]: mess[str(duel.chain - 1)]["choose"] = [] tmp2 = {} tmp2["det"] = field["det"] tmp2["hide"] = ( field["hide"] if ("hide" in field) else False ) tmp2["x"] = x tmp2["y"] = y tmp2["deck_id"] = 0 tmp2["place_unique_id"] = field["det"][ "place_unique_id" ] tmp2["user"] = user tmp2["place"] = "field" tmp2["mine_or_other"] = field["mine_or_other"] return_value.append(tmp2) if monster_effect_val != 44: if ( as_monster_effect not in mess[str(duel.chain - 1)] ): mess[str(duel.chain - 1)][ as_monster_effect ] = [] mess[str(duel.chain - 1)][as_monster_effect].append( tmp2 ) else: if effect_cost_flag == 0: if as_monster_effect[0] == "%": if as_monster_effect not in timing_mess: timing_mess[as_monster_effect]=[] timing_mess[as_monster_effect].append(tmp2) else: if ( as_monster_effect not in mess[str(duel.chain - 2)] ): mess[str(duel.chain - 2)][ as_monster_effect ] = [] mess[str(duel.chain - 2)][ as_monster_effect ].append(tmp2) else: if ( as_monster_effect not in cost[str(duel.chain - 2)] ): cost[str(duel.chain - 2)][ as_monster_effect ] = [] cost[str(duel.chain - 2)][ as_monster_effect ].append(tmp2) else: if str(duelobj.tmp_chain) not in cost: cost[str(duelobj.tmp_chain)] = {} if "choose" not in cost[str(duelobj.tmp_chain)]: cost[str(duelobj.tmp_chain)]["choose"] = [] tmp2 = {} tmp2["det"] = field["det"] tmp2["hide"] = ( field["hide"] if ("hide" in field) else False ) tmp2["x"] = x tmp2["y"] = y tmp2["deck_id"] = 0 tmp2["place_unique_id"] = field["det"][ "place_unique_id" ] tmp2["user"] = user tmp2["place"] = "field" tmp2["mine_or_other"] = field["mine_or_other"] return_value.append(tmp2) if as_monster_effect not in cost[str(duelobj.tmp_chain)]: cost[str(duelobj.tmp_chain)][as_monster_effect] = [] cost[str(duelobj.tmp_chain)][as_monster_effect].append(tmp2) duelobj.mess = mess duelobj.timing_mess = timing_mess duelobj.cost = cost choices = None if duel.user_1 == request.user or (ID1 == ID and duel.guest_flag is True): if duel.user_turn == 1: if duel.ask == 1 or duel.ask == 3: duel.ask -= 1 else: if duel.ask == 2 or duel.ask == 3: duel.ask -= 2 elif duel.user_2 == request.user or (ID2 == ID and duel.guest_flag2 is True): if duel.user_turn == 2: if duel.ask == 1 or duel.ask == 3: duel.ask -= 1 else: if duel.ask == 2 or duel.ask == 3: duel.ask -= 2 if duel.ask == 0 and duel.in_cost is False: chain_det = json.loads(duel.chain_det) current_chain = chain_det[str(duel.chain - 1)] effect = MonsterEffectWrapper.objects.get(id=current_chain) if effect.pac: next_effect = duelobj._pac(effect.pac) else: next_effect = effect.monster_effect_next if next_effect != 0 and next_effect is not None: chain_det[str(duel.chain - 1)] = next_effect.id else: pac = json.loads(duel.in_pac) if str(duel.chain - 1) in pac and pac[str(duel.chain - 1)] != []: pac_id = pac[str(duel.chain - 1)].pop() duel.in_pac = json.dumps(pac) pac = PacWrapper.objects.get(id=pac_id) next_effect = pac.monster_effect_next if next_effect is None: trigger = Triggtrigger = Trigger.objects.get(id=duel.current_trigger) if trigger.chain_flag is True: duel.virtual_chain -= 1 duel.chain -= 1 else: chain_det[str(duel.chain - 1)] = next_effect.id else: trigger = Triggtrigger = Trigger.objects.get(id=duel.current_trigger) if trigger.chain_flag is True: duel.virtual_chain -= 1 duel.chain -= 1 duelobj.duel.chain_det = json.dumps(chain_det) decks = Deck.objects.all() graves = Grave.objects.all() hands = Hand.objects.all() duelobj.check_eternal_effect( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) if duel.in_trigger_waiting is False: duelobj.retrieve_chain( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) if duel.chain == 0: duelobj.invoke_after_chain_effect( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) #duelobj.invoke_trigger_waiting(duel.trigger_waiting) #duelobj.retrieve_chain( # decks, graves, hands, duel.phase, duel.user_turn, user, other_user #) duel.appoint = duel.user_turn tmp = {} duel.mess = json.dumps(tmp) duel.cost_result = json.dumps(tmp) duel.cost = json.dumps(tmp) duelobj.invoke_trigger_waiting(duel.trigger_waiting) duel.current_priority = 10000 choices = duelobj.check_trigger( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) else: choices = None elif duel.ask == 0: cost_det = duel.cost_det effect = CostWrapper.objects.get(id=cost_det) if effect.pac: next_effect = duelobj._pac_cost(cost.pac) elif effect.cost_next: next_effect = effect.cost_next else: next_effect = duelobj.pop_pac_cost(user) tmp = False if next_effect is not None and next_effect != -2: duel.cost_det = next_effect.id trigger = Triggtrigger = Trigger.objects.get(id=duel.current_trigger) tmp = duelobj.pay_cost(next_effect, user,duel.chain,trigger) else: duel.cost_det = 0 if duel.cost_det == 0 and tmp is False: if duel.in_copying is False: trigger = Trigger.objects.get(id=duel.current_trigger) duelobj.end_cost(duel.cost_user,duel.chain,trigger) if trigger.chain_flag is True: duel.virtual_chain += 1 duel.chain += 1 else: duelobj.end_cost(duel.cost_user,duel.chain,trigger) if duel.in_cost is False and duel.chain == 0 and duel.in_copying is False: chain_det = json.loads(duel.chain_det) current_chain = chain_det[str(0)] if current_chain == 0: duelobj.cost = {} choices = None if duel.in_cost is False: data = {} data["monsters"] = return_value if log is None: log = "" duel.log_turn += duelobj.write_log(log, user, data) duel.log += duelobj.write_log(log, user, data) duelobj.save_all(user, other_user, room_number) free_lock(room_number, lock) return battle_det(request, duelobj, choices) def answer_det(duelobj, duel, user, answer_json, request, del_ask, lock,ID1,ID2): global check_array if "ID" in request.COOKIES : ID = request.COOKIES["ID"] else: ID = "" room_number = int(request.POST["room_number"]) answer = json.loads(answer_json) chain_det = json.loads(duel.chain_det) chain_user = json.loads(duel.chain_user) chain_user = int(chain_user[str(duel.chain - 1)]) if duel.in_copying is True: duelobj.tmp_chain = str(duel.chain - 1) else: duelobj.tmp_chain = str(duel.chain) if chain_user == 0: if request.user == duel.user_1 or (ID1 == ID or duel.guest_flag is True): chain_user = 1 else: chain_user = 2 if user == 1: other_user = 2 else: other_user = 1 monster_effect_wrapper = MonsterEffectWrapper.objects.get( id=int(chain_det[str(duel.chain - 1)]) ) duelobj.retrieve = 1 effect_kind = monster_effect_wrapper.monster_effect_kind monster_effect = monster_effect_wrapper.monster_effect other_user_flag = False if monster_effect.monster_effect_val == 4: other_user_flag = True if monster_effect.monster_effect_val == 5: if duelobj.user != chain_user: other_user_flag = True log = monster_effect_wrapper.log monster_effect_text = json.loads(monster_effect.monster_effect) monster_effect_val = monster_effect.monster_effect_val exclude = monster_effect_text["exclude"] if "whether_monster" in monster_effect_text: whether_monster = monster_effect_text["whether_monster"] else: whether_monster = 0 monster_effect_text = monster_effect_text["monster"] if len(answer) < duelobj.calculate_boland( monster_effect_text[0]["min_equation_number"], None, other_user_flag ) or len(answer) > duelobj.calculate_boland( monster_effect_text[0]["max_equation_number"], None, other_user_flag ): return HttpResponse("error") return_val = [] if monster_effect.monster_effect_val == 65: return answer_as_under( duelobj, duel, user, answer, exclude, whether_monster, monster_effect_text, monster_effect_val, request, 0, log, lock, room_number, ) if monster_effect.monster_effect_val == 57: return answer_as( duelobj, duel, user, answer, exclude, whether_monster, monster_effect_text, monster_effect_val, request, 0, log, lock, room_number, ) for answer_val in answer: place_for_answer = answer_val["place"] if place_for_answer == "player": effect_det_monster = effect_det["monster"] as_effect = effect_det["as_monster_condition"] for place in effect_det_monster["place"]: place_tmp = place["det"].split("_") mine_or_other = int(answer_val["mine_or_other"]) if(place_tmp[0] == "player" and place_tmp[1] == mine_or_other): tmp2 = {} tmp2["kind"] = "player" tmp2["mine_or_other"] = mine_or_other tmp[as_effect].append(tmp2) elif place_for_answer == "under": if duel.user_1 == request.user or (ID1 == ID and duel.guest_flag is True): if duel.user_turn == 1: if duel.ask == 1 or duel.ask == 3: return answer_under_det( duelobj, duel, 1, answer, exclude, whether_monster, monster_effect_text, monster_effect_val, request, 0, log, lock, effect_kind, ) else: if duel.ask == 2 or duel.ask == 3: return answer_under_det( duelobj, duel, 1, answer, exclude, whether_monster, monster_effect_text, monster_effect_val, request, 0, log, lock, effect_kind, ) elif duel.user_2 == request.user or (ID2 == ID and duel.guest_flag2 is True): if duel.user_turn == 2: if duel.ask == 1 or duel.ask == 3: return answer_under_det( duelobj, duel, 2, answer, exclude, whether_monster, monster_effect_text, monster_effect_val, request, 0, log, lock, effect_kind, ) else: if duel.ask == 2 or duel.ask == 3: return answer_under_det( duelobj, duel, 2, answer, exclude, whether_monster, monster_effect_text, monster_effect_val, request, 0, log, lock, effect_kind, ) elif place_for_answer == "field": if duel.user_1 == request.user or (ID1 == ID and duel.guest_flag is True): if duel.user_turn == 1: if duel.ask == 1 or duel.ask == 3: return answer_field_det( duelobj, duel, 1, answer, exclude, whether_monster, monster_effect_text, monster_effect_val, request, 0, log, lock, effect_kind, ) else: if duel.ask == 2 or duel.ask == 3: return answer_field_det( duelobj, duel, 1, answer, exclude, whether_monster, monster_effect_text, monster_effect_val, request, 0, log, lock, effect_kind, ) elif duel.user_2 == request.user or (ID2 == ID and duel.guest_flag2 is True): if duel.user_turn == 2: if duel.ask == 1 or duel.ask == 3: return answer_field_det( duelobj, duel, 2, answer, exclude, whether_monster, monster_effect_text, monster_effect_val, request, 0, log, lock, effect_kind, ) else: if duel.ask == 2 or duel.ask == 3: return answer_field_det( duelobj, duel, 2, answer, exclude, whether_monster, monster_effect_text, monster_effect_val, request, 0, log, lock, effect_kind, ) else: tmp_count = 0 place_unique_id = answer_val["place_unique_id"] mine_or_other = int(answer_val["mine_or_other"]) if user == 1: if mine_or_other == 1: mine_or_other = 1 mine_or_other_org = 1 elif mine_or_other == 2: mine_or_other = 2 mine_or_other_org = 2 else: mine_or_other = 3 mine_or_other_org = 3 else: if mine_or_other == 1: mine_or_other = 2 mine_or_other_org = 1 elif mine_or_other == 2: mine_or_other = 1 mine_or_other_org = 2 else: mine_or_other = 3 mine_or_other_org = 3 for monster_effect_det in monster_effect_text: tmp_count += 1 as_monster_effect = monster_effect_det["as_monster_condition"] if monster_effect_val == 44: if as_monster_effect[0] == "%": timing_mess = duelobj.timing_mess #cost_flagが2はtiming_mess cost_flag = 2 elif as_monster_effect[0] == "~": cost = duelobj.cost #if str(int(duel.chain-2)) not in cost: cost[str(int(duel.chain - 2))] = {} cost[str(int(duel.chain - 2))]["choose"] = [] cost_flag = 1 else: mess = duelobj.mess mess[str(int(duel.chain - 2))] = {} mess[str(int(duel.chain - 2))]["choose"] = [] cost_flag = 0 if (user == 1 and chain_user == 1) or (user == 2 and chain_user == 2): if (monster_effect_val == 3) or ( monster_effect_val == 5 and tmp_count == 1 ): monster_effect_det_monster = monster_effect_det["monster"] for place in monster_effect_det_monster["place"]: current_place_and_or = place["and_or"] place_tmp = place["det"].split("_") deck_id = -1 if place_tmp[0] == "deck" and "deck_id" in answer_val: deck_id = int(answer_val["deck_id"]) elif place_tmp[0] == "grave" and "grave_id" in answer_val: deck_id = int(answer_val["grave_id"]) elif place_tmp[0] == "hand" and "hand_id" in answer_val: deck_id = int(answer_val["hand_id"]) if deck_id == -1: continue if place_tmp[0] == place_for_answer: if place_tmp[0] == "deck" and deck_id == int( place_tmp[1] ): if mine_or_other_org == 1: tmp = duelobj.decks[deck_id]["mydeck"] elif mine_or_other_org == 2: tmp = duelobj.decks[deck_id]["otherdeck"] else: tmp = duelobj.decks[deck_id]["commondeck"] user_decks = tmp for user_deck in user_decks: if ( place_unique_id == user_deck["place_unique_id"] ): tmp2 = {} tmp2["det"] = user_deck tmp2["mine_or_other"] = mine_or_other tmp2["user"] = chain_user tmp2["place"] = "deck" tmp2["deck_id"] = deck_id tmp2["x"] = 0 tmp2["y"] = 0 tmp2["place_unique_id"] = user_deck[ "place_unique_id" ] if not duelobj.validate_answer( tmp2, monster_effect_det_monster, exclude, duel, 1, 0, effect_kind, user, ): return HttpResponse("error") check_array.append(user_deck) tmp = duelobj.mess if monster_effect_val != 44: if str(duel.chain - 1) not in tmp: tmp[str(duel.chain - 1)] = {} if ( "choose" not in tmp[str(duel.chain - 1)] ): tmp[str(duel.chain - 1)][ "choose" ] = [] tmp2 = {} tmp2["det"] = user_deck tmp2["mine_or_other"] = mine_or_other tmp2["user"] = user tmp2["place"] = "deck" tmp2["deck_id"] = deck_id tmp2["x"] = 0 tmp2["y"] = 0 tmp2["place_unique_id"] = place_unique_id return_val.append(tmp2) if monster_effect_val != 44: if ( as_monster_effect not in tmp[str(duel.chain - 1)] ): tmp[str(duel.chain - 1)][ as_monster_effect ] = [] tmp[str(duel.chain - 1)][ as_monster_effect ].append(tmp2) duelobj.mess = tmp else: if cost_flag == 2: timing_mess[ as_monster_effect ].append(tmp2) duelobj.timing_mess = timing_mess elif cost_flag == 0: mess[str(duel.chain - 2)][ as_monster_effect ].append(tmp2) duelobj.mess = mess else: cost[str(duel.chain - 2)][ as_monster_effect ].append(tmp2) duelobj.cost = cost if place_tmp[0] == "grave" and deck_id == int( place_tmp[1] ): if mine_or_other_org == 1: tmp = duelobj.graves[deck_id]["mygrave"] elif mine_or_other_org == 2: tmp = duelobj.graves[deck_id]["othergrave"] else: tmp = duelobj.graves[deck_id]["commongrave"] user_graves = tmp for user_grave in user_graves: if ( place_unique_id == user_grave["place_unique_id"] ): tmp2 = {} tmp2["det"] = user_grave tmp2["mine_or_other"] = mine_or_other tmp2["user"] = chain_user tmp2["place"] = "grave1" tmp2["deck_id"] = deck_id tmp2["x"] = 0 tmp2["y"] = 0 tmp2["place_unique_id"] = user_grave[ "place_unique_id" ] return_val.append(tmp2) if not duelobj.validate_answer( tmp2, monster_effect_det_monster, exclude, duel, 1, 0, effect_kind, user, ): return HttpResponse("error") check_array.append(user_grave) tmp = duelobj.mess if monster_effect_val != 44: if str(duel.chain - 1) not in tmp: tmp[str(duel.chain - 1)] = {} if ( "choose" not in tmp[str(duel.chain - 1)] ): tmp[str(duel.chain - 1)][ "choose" ] = [] tmp2 = {} tmp2["x"] = 0 tmp2["y"] = 0 tmp2["det"] = user_grave tmp2["mine_or_other"] = mine_or_other tmp2["user"] = user tmp2["place"] = "grave" tmp2["deck_id"] = deck_id tmp2["place_unique_id"] = place_unique_id return_val.append(tmp2) if monster_effect_val != 44: if ( as_monster_effect not in tmp[str(duel.chain - 1)] ): tmp[str(duel.chain - 1)][ as_monster_effect ] = [] tmp[str(duel.chain - 1)][ as_monster_effect ].append(tmp2) duelobj.mess = tmp else: if cost_flag == 2: timing_mess[ as_monster_effect ].append(tmp2) duelobj.timing_mess = timing_mess elif cost_flag == 0: mess[str(duel.chain - 2)][ as_monster_effect ].append(tmp2) duelobj.mess = mess else: cost[str(duel.chain - 2)][ as_monster_effect ].append(tmp2) duelobj.cost = cost if place_tmp[0] == "hand" and deck_id == int( place_tmp[1] ): deck_id = answer_val["hand_id"] if mine_or_other_org == 1: tmp = duelobj.hands[deck_id]["myhand"] elif mine_or_other_org == 2: tmp = duelobj.hands[deck_id]["otherhand"] else: tmp = duelobj.hands[deck_id]["commonhand"] user_hands = tmp for user_hand in user_hands: if ( place_unique_id == user_hand["place_unique_id"] ): tmp2 = {} tmp2["det"] = user_hand tmp2["mine_or_other"] = mine_or_other tmp2["user"] = chain_user tmp2["place"] = "hand" tmp2["deck_id"] = deck_id tmp2["x"] = 0 tmp2["y"] = 0 tmp2["place_unique_id"] = user_hand[ "place_unique_id" ] return_val.append(tmp2) if not duelobj.validate_answer( tmp2, monster_effect_det_monster, exclude, duel, 1, 0, effect_kind, user, ): return HttpResponse("error") check_array.append(user_hand) tmp = duelobj.mess if monster_effect_val != 44: if str(duel.chain - 1) not in tmp: tmp[str(duel.chain - 1)] = {} if ( "choose" not in tmp[str(duel.chain - 1)] ): tmp[str(duel.chain - 1)][ "choose" ] = [] tmp2 = {} tmp2["x"] = 0 tmp2["y"] = 0 tmp2["det"] = user_hand tmp2["mine_or_other"] = mine_or_other tmp2["user"] = user tmp2["place"] = "hand" tmp2["deck_id"] = deck_id tmp2["place_unique_id"] = place_unique_id return_val.append(tmp2) if monster_effect_val != 44: if ( as_monster_effect not in tmp[str(duel.chain - 1)] ): tmp[str(duel.chain - 1)][ as_monster_effect ] = [] tmp[str(duel.chain - 1)][ as_monster_effect ].append(tmp2) duelobj.mess = tmp else: if cost_flag == 2: timing_mess[ as_monster_effect ].append(tmp2) duelobj.timing_mess = timing_mess elif cost_flag == 0: mess[str(duel.chain - 2)][ as_monster_effect ].append(tmp2) duelobj.mess = mess else: cost[str(duel.chain - 2)][ as_monster_effect ].append(tmp2) duelobj.cost = cost if (user == 2 and chain_user == 1) or (user == 1 and chain_user == 2): if (monster_effect_val == 4) or ( monster_effect_val == 5 and tmp_count == 2 ): monster_effect_det_monster = monster_effect_det["monster"] for place in monster_effect_det_monster["place"]: place_tmp = place["det"].split("_") if place_tmp[0] == "deck": deck_id = answer_val["deck_id"] elif place_tmp[0] == "grave": deck_id = answer_val["grave_id"] elif place_tmp[0] == "hand": deck_id = answer_val["hand_id"] if place_tmp[0] == place_for_answer: if ( place_tmp[0] == "deck" and int(place_tmp[1]) == deck_id ): if mine_or_other_org == 1: tmp = duelobj.decks[deck_id]["mydeck"] elif mine_or_other_org == 2: tmp = duelobj.decks[deck_id]["otherdeck"] else: tmp = duelobj.decks[deck_id]["commondeck"] user_decks = tmp tmp_flag = False for user_deck in user_decks: if ( place_unique_id == user_deck["place_unique_id"] ): tmp_flag = True tmp2 = {} tmp2["det"] = user_deck tmp2["mine_or_other"] = mine_or_other tmp2["user"] = chain_user tmp2["place"] = "deck" tmp2["deck_id"] = deck_id tmp2["x"] = 0 tmp2["y"] = 0 tmp2["place_unique_id"] = user_deck[ "place_unique_id" ] return_val.append(tmp2) if not duelobj.validate_answer( tmp2, monster_effect_det_monster, exclude, duel, 1, 0, effect_kind, user, ): return HttpResponse("error") check_array.append(user_deck) tmp = duelobj.mess if monster_effect_val != 44: if str(duel.chain - 1) not in tmp: tmp[str(duel.chain - 1)] = {} if ( "choose" not in tmp[str(duel.chain - 1)] ): tmp[str(duel.chain - 1)][ "choose" ] = [] tmp2 = {} tmp2["x"] = 0 tmp2["y"] = 0 tmp2["det"] = user_deck tmp2["mine_or_other"] = mine_or_other tmp2["user"] = user tmp2["place"] = "deck" tmp2["deck_id"] = deck_id tmp2["place_unique_id"] = place_unique_id return_val.append(tmp2) if monster_effect_val != 44: if ( as_monster_effect not in tmp[str(duel.chain - 1)] ): tmp[str(duel.chain - 1)][ as_monster_effect ] = [] tmp[str(duel.chain - 1)][ as_monster_effect ].append(tmp2) duelobj.mess = tmp else: if cost_flag == 2: timing_mess[ as_monster_effect ].append(tmp2) duelobj.timing_mess = timing_mess elif cost_flag == 0: mess[str(duel.chain - 2)][ as_monster_effect ].append(tmp2) duelobj.mess = mess else: cost[str(duel.chain - 2)][ as_monster_effect ].append(tmp2) duelobj.cost = cost if tmp_flag is False: return HttpResponse("error") if ( place_tmp[0] == "grave" and int(place_tmp[1]) == deck_id ): if mine_or_other_org == 1: tmp = duelobj.graves[deck_id]["mygrave"] elif mine_or_other_org == 2: tmp = duelobj.graves[deck_id]["othergrave"] else: tmp = duelobj.graves[deck_id]["commongrave"] user_graves = tmp tmp_flag = False for user_grave in user_graves: if ( place_unique_id == user_grave["place_unique_id"] ): tmp_flag = True tmp2 = {} tmp2["det"] = user_grave tmp2["mine_or_other"] = mine_or_other tmp2["user"] = chain_user tmp2["place"] = "grave" tmp2["deck_id"] = deck_id tmp2["x"] = 0 tmp2["y"] = 0 tmp2["place_unique_id"] = user_grave[ "place_unique_id" ] return_val.append(tmp2) if not duelobj.validate_answer( tmp2, monster_effect_det_monster, exclude, duel, 1, 0, effect_kind, user, ): return HttpResponse("error") check_array.append(user_grave) tmp = duelobj.mess if monster_effect_val != 44: if str(duel.chain - 1) not in tmp: tmp[str(duel.chain - 1)] = {} if ( "choose" not in tmp[str(duel.chain - 1)] ): tmp[str(duel.chain - 1)][ "choose" ] = [] tmp2 = {} tmp2["x"] = 0 tmp2["y"] = 0 tmp2["det"] = user_grave tmp2["mine_or_other"] = mine_or_other tmp2["user"] = user tmp2["place"] = "grave" tmp2["deck_id"] = deck_id tmp2["place_unique_id"] = place_unique_id return_val.append(tmp2) if monster_effect_val != 44: if ( as_monster_effect not in tmp[str(duel.chain - 1)] ): tmp[str(duel.chain - 1)][ as_monster_effect ] = [] tmp[str(duel.chain - 1)][ as_monster_effect ].append(tmp2) duelobj.mess = tmp else: if cost_flag == 2: timing_mess[ as_monster_effect ].append(tmp2) duelobj.timing_mess = timing_mess elif cost_flag == 0: mess[str(duel.chain - 2)][ as_monster_effect ].append(tmp2) duelobj.mess = mess else: cost[str(duel.chain - 2)][ as_monster_effect ].append(tmp2) duelobj.cost = cost if tmp_flag is False: return HttpResponse("error") if ( place_tmp[0] == "hand" and int(place_tmp[1]) == deck_id ): deck_id = answer_val["hand_id"] if mine_or_other_org == 1: tmp = duelobj.hands[deck_id]["myhand"] elif mine_or_other_org == 2: tmp = duelobj.hands[deck_id]["otherhand"] else: tmp = duelobj.hands[deck_id]["commonhand"] user_hands = tmp tmp_flag = False for user_hand in user_hands: if ( place_unique_id == user_hand["place_unique_id"] ): tmp_flag = True tmp2 = {} tmp2["det"] = user_hand tmp2["mine_or_other"] = mine_or_other tmp2["user"] = chain_user tmp2["place"] = "hand" tmp2["deck_id"] = deck_id tmp2["x"] = 0 tmp2["y"] = 0 tmp2["place_unique_id"] = user_hand[ "place_unique_id" ] return_val.append(tmp2) if not duelobj.validate_answer( tmp2, monster_effect_det_monster, exclude, duel, 1, 0, effect_kind, user, ): return HttpResponse("error") check_array.append(user_hand) tmp = duelobj.mess if monster_effect_val != 44: if str(duel.chain - 1) not in tmp: tmp[str(duel.chain - 1)] = {} if ( "choose" not in tmp[str(duel.chain - 1)] ): tmp[str(duel.chain - 1)][ "choose" ] = [] tmp2 = {} tmp2["det"] = user_hand tmp2["x"] = 0 tmp2["y"] = 0 tmp2["mine_or_other"] = mine_or_other tmp2["user"] = user tmp2["place"] = "hand" tmp2["deck_id"] = deck_id tmp2["place_unique_id"] = place_unique_id return_val.append(tmp2) if monster_effect_val != 44: if ( as_monster_effect not in tmp[str(duel.chain - 1)] ): tmp[str(duel.chain - 1)][ as_monster_effect ] = [] tmp[str(duel.chain - 1)][ as_monster_effect ].append(tmp2) duelobj.mess = tmp else: if cost_flag == 2: timing_mess[ as_monster_effect ].append(tmp2) duelobj.timing_mess = timing_mess elif cost_flag == 0: mess[str(duel.chain - 2)][ as_monster_effect ].append(tmp2) duelobj.mess = mess else: cost[str(duel.chain - 2)][ as_monster_effect ].append(tmp2) duelobj.cost = cost if tmp_flag is False: return HttpResponse("error") choices = None duel.ask -= del_ask if monster_effect.monster_condition != "": if not check_condition(duel, monster_effect.monster_condition, duelobj): return HttpResponse("error") if duel.ask == 0: chain_det = json.loads(duel.chain_det) current_chain = chain_det[str(duel.chain - 1)] effect = MonsterEffectWrapper.objects.get(id=current_chain) if effect.pac: next_effect = duelobj._pac(effect.pac) else: next_effect = effect.monster_effect_next if next_effect != 0: chain_det[str(duel.chain - 1)] = next_effect.id else: pac = json.loads(duel.in_pac) if str(duel.chain - 1) in pac and pac[str(duel.chain - 1)] != []: pac_id = pac[str(duel.chain - 1)].pop() pac = PacWrapper.objects.get(id=pac_id) next_effect = pac.monster_effect_next if next_effect is None: trigger = Triggtrigger = Trigger.objects.get(id=duel.current_trigger) if trigger.chain_flag is True: duel.virtual_chain -= 1 duel.chain -= 1 else: chain_det[str(duel.chain - 1)] = next_effect.id else: trigger = Triggtrigger = Trigger.objects.get(id=duel.current_trigger) if trigger.chain_flag is True: duel.virtual_chain -= 1 duel.chain -= 1 duelobj.duel.chain_det = json.dumps(chain_det) decks = Deck.objects.all() graves = Grave.objects.all() hands = Hand.objects.all() duelobj.check_eternal_effect( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) if duel.in_trigger_waiting is False : duelobj.retrieve_chain( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) if duel.chain == 0: duelobj.invoke_after_chain_effect( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) #duelobj.nvoke_trigger_waiting(duel.trigger_waiting) #duelobj.retrieve_chain( # decks, graves, hands, duel.phase, duel.user_turn, user, other_user #) duel.appoint = duel.user_turn tmp = {} duel.mess = json.dumps(tmp) duel.cost_result = json.dumps(tmp) duel.cost = json.dumps(tmp) duelobj.invoke_trigger_waiting(duel.trigger_waiting) duel.current_priority = 10000 choices = duelobj.check_trigger( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) if duel.in_cost is False: data = {} data["monsters"] = return_val if log is None: log = "" duel.log_turn += duelobj.write_log(log, user, data) duel.log += duelobj.write_log(log, user, data) duelobj.save_all(user, other_user, room_number) free_lock(room_number, lock) return battle_det(request, duelobj, choices) def answer_field_det_cost( duelobj, duel, user, answer, exclude, whether_monster, cost_text, cost_effect_val, request, room_number, lock, effect_kind, ): return answer_field_det( duelobj, duel, user, answer, exclude, whether_monster, cost_text, cost_effect_val, request, room_number, None, lock, effect_kind, ) def answer_det_cost(duelobj, duel, user, answer, request, del_ask, room_number, lock,ID1,ID2): if "ID" in request.COOKIES : ID = request.COOKIES["ID"] else: ID = "" if user == 1: other_user = 2 else: other_user = 1 global check_array room_number = int(request.POST["room_number"]) cost_det = duel.cost_det cost_user = duel.cost_user if cost_user == 0: if request.user == duel.user_1 or (ID1 == ID and duel.guest_flag is True): cost_user = 1 else: cost_user = 2 cost_wrapper = CostWrapper.objects.get(id=cost_det) cost = cost_wrapper.cost other_user_flag = False if cost.cost_val == 4: other_user_flag = True if cost.cost_val == 5: if duelobj.user != cost_user: other_user_flag = True effect_kind = cost_wrapper.cost_kind cost_text = json.loads(cost.cost) cost_effect_val = cost.cost_val exclude = cost_text["exclude"] if duel.in_copying is True: duelobj.tmp_chain = str(duel.chain - 1) else: duelobj.tmp_chain = str(duel.chain) if "whether_monster" in cost_text: whether_monster = cost_text["whether_monster"] else: whether_monster = 0 cost_text = cost_text["monster"] if len(answer) < duelobj.calculate_boland( cost_text[0]["min_equation_number"], None, other_user_flag ) or len(answer) > duelobj.calculate_boland( cost_text[0]["max_equation_number"], None, other_user_flag ): free_lock(room_number, lock) return HttpResponse("error") own_player_flag = False other_player_flag = True for answer_val in answer: place_for_answer = answer_val["place"] if place_for_answer == "player": cost_det_monster = cost_det["monster"] as_cost = cost_det["as_monster_condition"] for place in cost_det_monster["place"]: place_tmp = place["det"].split("_") mine_or_other = int(answer_val["mine_or_other"]) if(place_tmp[0] == "player" and place_tmp[1] == mine_or_other): if place_tmp[1] == "1": if own_player_flag == True: free_lock(room_number, lock) return HttpResponse("error") else: own_player_flag = True if place_tmp[1] == "2": if other_player_flag == True: free_lock(room_number, lock) return HttpResponse("error") else: other_player_flag = True tmp2 = {} tmp2["place"] = "player" tmp2["mine_or_other"] = mine_or_other tmp[as_cost].append(tmp2) elif place_for_answer == "field": if duel.user_1 == request.user or (ID1 == ID and duel.guest_flag is True): if duel.user_turn == 1: if duel.ask == 1 or duel.ask == 3: return_value = answer_field_det_cost( duelobj, duel, 1, answer, exclude, whether_monster, cost_text, cost_effect_val, request, room_number, lock, effect_kind, ) free_lock(room_number, lock) return return_value else: if duel.ask == 2 or duel.ask == 3: return_value = answer_field_det_cost( duelobj, duel, 1, answer, exclude, whether_monster, cost_text, cost_effect_val, request, room_number, lock, effect_kind, ) free_lock(room_number, lock) return return_value elif duel.user_2 == request.user or (ID2 == ID and duel.guest_flag2 is True): if duel.user_turn == 2: if duel.ask == 1 or duel.ask == 3: return_value = answer_field_det_cost( duelobj, duel, 2, answer, exclude, whether_monster, cost_text, cost_effect_val, request, room_number, lock, effect_kind, ) free_lock(room_number, lock) return return_value else: if duel.ask == 2 or duel.ask == 3: return_value = answer_field_det_cost( duelobj, duel, 2, answer, exclude, whether_monster, cost_text, cost_effect_val, request, room_number, lock, effect_kind, ) free_lock(room_number, lock) return return_value else: place_unique_id = answer_val["place_unique_id"] mine_or_other = int(answer_val["mine_or_other"]) if user == 2: if mine_or_other == 1: mine_or_other_absolute = 2 elif mine_or_other == 2: mine_or_other_absolute = 1 else: mine_or_other_absolute = mine_or_other for cost_det in cost_text: as_cost = cost_det["as_monster_condition"] if (user == 1 and cost_user == 1) or (user == 2 and cost_user == 2): cost_det_monster = cost_det["monster"] for place in cost_det_monster["place"]: place_tmp = place["det"].split("_") if place_tmp[0] == place_for_answer: if place_tmp[0] == "deck": deck_id = answer_val["deck_id"] if mine_or_other == 1: tmp = duelobj.decks[deck_id]["mydeck"] elif mine_or_other == 2: tmp = duelobj.decks[deck_id]["otherdeck"] else: tmp = duelobj.decks[deck_id]["commondeck"] user_decks = tmp for user_deck in user_decks: if place_unique_id == user_deck["place_unique_id"]: tmp2 = {} tmp2["det"] = user_deck tmp2["mine_or_other"] = mine_or_other tmp2["user"] = user tmp2["place"] = "deck" tmp2["deck_id"] = deck_id tmp2["x"] = 0 tmp2["y"] = 0 tmp2["place_unique_id"] = user_deck[ "place_unique_id" ] if not duelobj.validate_answer( tmp2, cost_det_monster, exclude, duel, 1, 1, effect_kind, user, ): free_lock(room_number, lock) return HttpResponse("error") check_array.append(user_deck) tmp3 = duelobj.cost tmp = tmp3[str(duelobj.tmp_chain)] if "choose" not in tmp: tmp["choose"] = [] tmp2 = {} tmp2["x"] = 0 tmp2["y"] = 0 tmp2["det"] = user_deck tmp2["mine_or_other"] = mine_or_other_absolute tmp2["user"] = user tmp2["place"] = "deck" tmp2["deck_id"] = deck_id tmp2["place_unique_id"] = place_unique_id if as_cost not in tmp: tmp[as_cost] = [] tmp[as_cost].append(tmp2) tmp3[str(duel.chain)] = tmp duelobj.cost = tmp3 if place_tmp[0] == "grave": deck_id = answer_val["grave_id"] if mine_or_other == 1: tmp = duelobj.graves[deck_id]["mygrave"] elif mine_or_other == 2: tmp = duelobj.graves[deck_id]["othergrave"] else: tmp = duelobj.graves[deck_id]["commongrave"] user_graves = tmp for user_grave in user_graves: if place_unique_id == user_grave["place_unique_id"]: tmp2 = {} tmp2["det"] = user_grave tmp2["mine_or_other"] = mine_or_other tmp2["user"] = user tmp2["place"] = "grave" tmp2["deck_id"] = deck_id tmp2["x"] = 0 tmp2["y"] = 0 tmp2["place_unique_id"] = user_grave[ "place_unique_id" ] if not duelobj.validate_answer( tmp2, cost_det_monster, exclude, duel, 1, 0, effect_kind, user, ): free_lock(room_number, lock) return HttpResponse("error") check_array.append(user_grave) tmp3 = duelobj.cost tmp = tmp3[str(duelobj.tmp_chain)] if "choose" not in tmp: tmp["choose"] = [] tmp2 = {} tmp2["x"] = 0 tmp2["y"] = 0 tmp2["det"] = user_grave tmp2["mine_or_other"] = mine_or_other_absolute tmp2["user"] = user tmp2["place"] = "grave" tmp2["deck_id"] = deck_id tmp2["place_unique_id"] = place_unique_id if as_cost not in tmp: tmp[as_cost] = [] tmp[as_cost].append(tmp2) tmp3[str(duel.chain)] = tmp duelobj.cost = tmp3 if place_tmp[0] == "hand": deck_id = answer_val["hand_id"] if mine_or_other == 1: tmp = duelobj.hands[deck_id]["myhand"] elif mine_or_other == 2: tmp = duelobj.hands[deck_id]["otherhand"] else: tmp = duelobj.hands[deck_id]["commonhand"] user_hands = tmp for user_hand in user_hands: if place_unique_id == user_hand["place_unique_id"]: tmp2 = {} tmp2["det"] = user_hand tmp2["mine_or_other"] = mine_or_other tmp2["user"] = user tmp2["place"] = "hand" tmp2["deck_id"] = deck_id tmp2["x"] = 0 tmp2["y"] = 0 tmp2["place_unique_id"] = user_hand[ "place_unique_id" ] if not duelobj.validate_answer( tmp2, cost_det_monster, exclude, duel, 1, 0, effect_kind, user, ): free_lock(room_number, lock) return HttpResponse("error") check_array.append(user_hand) tmp3 = duelobj.cost tmp = tmp3[str(duelobj.tmp_chain)] if "choose" not in tmp: tmp["choose"] = [] tmp2 = {} tmp2["x"] = 0 tmp2["y"] = 0 tmp2["det"] = user_hand tmp2["mine_or_other"] = mine_or_other_absolute tmp2["user"] = user tmp2["place"] = "hand" tmp2["deck_id"] = deck_id tmp2["place_unique_id"] = place_unique_id if as_cost not in tmp: tmp[as_cost] = [] tmp[as_cost].append(tmp2) tmp3[str(duel.chain)] = tmp duelobj.cost = tmp3 if (user == 2 and cost_user == 1) or (user == 1 and cost_user == 2): for place in cost_det["place"].values(): place_tmp = place["det"].split("_") if place_tmp[0] == place_for_answer: if place_tmp[0] == "deck": deck_id = answer_val["deck_id"] if mine_or_other == 1: tmp = duelobj.decks[deck_id]["mydeck"] elif mine_or_other == 2: tmp = duelobj.decks[deck_id]["otherdeck"] else: tmp = duelobj.decks[deck_id]["commondeck"] user_decks = tmp for user_deck in user_decks: if place_unique_id == user_deck["place_unique_id"]: tmp2 = {} tmp2["det"] = user_deck tmp2["mine_or_other"] = mine_or_other tmp2["user"] = user tmp2["place"] = "deck" tmp2["deck_id"] = deck_id tmp2["x"] = 0 tmp2["y"] = 0 tmp2["place_unique_id"] = user_deck[ "place_unique_id" ] if not duelobj.validate_answer( tmp2, cost_det_monster, exclude, duel, 1, 0, effect_kind, user, ): free_lock(room_number, lock) return HttpResponse("error") check_array.append(user_deck) tmp3 = duelobj.cost tmp = tmp3[str(duelobj.tmp_chain)] if "choose" not in tmp: tmp["choose"] = [] tmp2 = {} tmp2["x"] = 0 tmp2["y"] = 0 tmp2["det"] = user_deck tmp2["mine_or_other"] = mine_or_other_absolute tmp2["user"] = user tmp2["place"] = "deck" tmp2["deck_id"] = deck_id tmp2["place_unique_id"] = place_unique_id if as_cost not in tmp: tmp[as_cost] = [] tmp[as_cost].append(tmp2) tmp3[str(duel.chain)] = tmp duelobj.cost = tmp3 if place_tmp[0] == "grave": deck_id = answer_val["grave_id"] if mine_or_other == 1: tmp = duelobj.graves[deck_id]["mygrave"] elif mine_or_other == 2: tmp = duelobj.graves[deck_id]["othergrave"] else: tmp = duelobj.graves[deck_id]["commongrave"] user_graves = tmp for user_grave in user_graves: if place_unique_id == user_grave["place_unique_id"]: tmp2 = {} tmp2["det"] = user_grave tmp2["mine_or_other"] = mine_or_other tmp2["user"] = user tmp2["place"] = "grave" tmp2["deck_id"] = deck_id tmp2["x"] = 0 tmp2["y"] = 0 tmp2["place_unique_id"] = user_grave[ "place_unique_id" ] if not duelobj.validate_answer( tmp2, cost_det_monster, exclude, duel, 1, 0, effect_kind, user, ): free_lock(room_number, lock) return HttpResponse("error") check_array.append(user_grave) tmp3 = duelobj.cost tmp = tmp3[str(duelobj.tmp_chain)] if "choose" not in tmp: tmp["choose"] = [] tmp2 = {} tmp2["x"] = 0 tmp2["y"] = 0 tmp2["det"] = user_grave tmp2["mine_or_other"] = mine_or_other_absolute tmp2["user"] = user tmp2["place"] = "grave" tmp2["deck_id"] = deck_id tmp2["place_unique_id"] = place_unique_id if as_cost not in tmp: tmp[as_cost] = [] tmp[as_cost].append(tmp2) tmp3[str(duel.chain)] = tmp duelobj.cost = tmp3 if place_tmp[0] == "hand": deck_id = answer_val["hand_id"] if mine_or_other == 1: tmp = duelobj.hands[deck_id]["myhand"] elif mine_or_other == 2: tmp = duelobj.hands[deck_id]["otherhand"] else: tmp = duelobj.hands[deck_id]["commonhand"] user_hands = tmp for user_hand in user_hands: if place_unique_id == user_hand["place_unique_id"]: tmp2 = {} tmp2["det"] = user_hand tmp2["mine_or_other"] = mine_or_other tmp2["user"] = user tmp2["place"] = "hand" tmp2["deck_id"] = deck_id tmp2["x"] = 0 tmp2["y"] = 0 tmp2["place_unique_id"] = user_hand[ "place_unique_id" ] if not duelobj.validate_answer( tmp2, cost_det_monster, exclude, duel, 1, 1, effect_kind, user, ): free_lock(room_number, lock) return HttpResponse("error") check_array.append(user_hand) tmp3 = duelobj.cost tmp = tmp3[str(duelobj.tmp_chain)] if "choose" not in tmp: tmp["choose"] = [] tmp2 = {} tmp2["x"] = 0 tmp2["y"] = 0 tmp2["det"] = user_hand tmp2["mine_or_other"] = mine_or_other_absolute tmp2["user"] = user tmp2["place"] = "hand" tmp2["deck_id"] = deck_id tmp2["place_unique_id"] = place_unique_id if as_cost not in tmp: tmp[as_cost] = [] tmp[as_cost].append(tmp2) tmp3[str(duel.chain)] = tmp duelobj.cost = tmp3 if cost.cost_condition != "": if not check_condition(cost.cost_condition, duelobj): free_lock(room_number, lock) return HttpResponse("error") if duel.user_1 == request.user or (ID1 == ID and duel.guest_flag is True): if duel.user_turn == 1: if duel.ask == 1 or duel.ask == 3: duel.ask -= 1 else: if duel.ask == 2 or duel.ask == 3: duel.ask -= 2 elif duel.user_2 == request.user or (ID2 == ID and duel.guest_flag2 is True): if duel.user_turn == 2: if duel.ask == 1 or duel.ask == 3: duel.ask -= 1 else: if duel.ask == 2 or duel.ask == 3: duel.ask -= 2 if duel.ask == 0: cost_det = duel.cost_det effect = CostWrapper.objects.get(id=cost_det) if effect.pac: next_effect = duelobj._pac_cost(cost.pac) elif effect.cost_next: next_effect = effect.cost_next else: next_effect = duelobj.pop_pac_cost(user) if next_effect is not None and next_effect != -2: duel.cost_det = next_effect.id else: duel.cost_det = 0 next_effect = None trigger = Trigger.objects.get(id=duel.current_trigger) tmp = duelobj.pay_cost(next_effect, user,duel.chain,trigger) if next_effect == 0 or tmp is True: duelobj.end_cost(duel.cost_user,duel.chain,trigger) duelobj.save_all(user, other_user, room_number) free_lock(room_number, lock) return battle_det(request, duelobj) def check_condition(duel, monster_condition, duelobj): monster = None duelobj.duel = duel global check_array effect_det_org = json.loads(monster_condition) if "different_flag" in effect_det_org: different_flag = effect_det_org["different_flag"] else: different_flag = False effect_det = effect_det_org["monster"][0]["monster"] monster_name_kind = effect_det["monster_name_kind"] equation_kind = effect_det_org["monster"][0]["equation"]["equation_kind"] current_and_or = "and" count = 0 variety = [] same_name = {} variable_variety = [] variable_counter = 0 counter = -1 x_counter = 0 y_counter = 0 if ( equation_kind != "number" and equation_kind != "kind" and equation_kind != "same_name" ): counter = equation_kind different_array = [] for monster in check_array: if different_flag ==True: if monster["monster_name"] in different_array: return False else: different_array.append(monster["monster_name"]) name_flag = True for name_kind in monster_name_kind: if name_kind != "": if name_kind["operator"] == "=": if monster["monster_name"] != duelobj.get_name( name_kind["monster_name"] ): if current_and_or == "and": name_flag = False else: if current_and_or == "or": name_flag = True current_and_or = name_kind["and_or"] elif name_kind["operator"] == "like": if ( monster["monster_name"].find( duelobj.get_name(name_kind["monster_name"]) ) > -1 ): if current_and_or == "and": name_flag = False else: if current_and_or == "or": name_flag = True current_and_or = name_kind["and_or"] if name_flag is False: continue monster_condition_val = effect_det["monster_condition"] cond_flag = True for cond_det in monster_condition_val: current_and_or = "and" tmp_flag = True for cond_val in cond_det: if len(cond_val) == 0: continue tmp = monster["variables"][cond_val["name"]] if cond_val["init"] == 0: value = tmp["value"] elif cond_val["init"] == 1: value = tmp["i_val"] elif cond_val["init"] == 2: value = tmp["i_i_val"] if cond_val["operator"] == "=" or cond_val["operator"] == "": if int(value) != duelobj.calculate_boland(cond_val["num"]): tmp_flag = False elif cond_val["operator"] == "<=": if int(value) > duelobj.calculate_boland(cond_val["num"]): tmp_flag = False elif cond_val["operator"] == ">=": if int(value) < duelobj.calculate_boland(cond_val["num"]): tmp_flag = False elif cond_val["operator"] == "!=": if int(value) == duelobj.calculate_boland(cond_val["num"]): tmp_flag = False if current_and_or == "and": if cond_flag is True: cond_flag = tmp_flag else: if cond_flag is False: cond_flag = tmp_flag if cond_flag is False: break if cond_flag is False: continue custom_monster_condition = effect_det["custom_monster_condition"] cond_flag = True for cond_det in custom_monster_condition: current_and_or = "and" tmp_flag = True for cond_val in cond_det: if not cond_val: continue tmp = monster["custom_variables"][cond_val["name"]] if cond_val["init"] == 0: value = tmp["value"] elif cond_val["init"] == 1: value = tmp["i_val"] elif cond_val["init"] == 2: value = tmp["i_i_val"] if cond_val["operator"] == "=" or cond_val["operator"] == "": if int(value) != duelobj.calculate_boland(cond_val["num"]): tmp_flag = False elif cond_val["operator"] == "<=": if int(value) > duelobj.calculate_boland(cond_val["num"]): tmp_flag = False elif cond_val["operator"] == ">=": if int(value) < duelobj.calculate_boland(cond_val["num"]): tmp_flag = False elif cond_val["operator"] == "!=": if int(value) == duelobj.calculate_boland(cond_val["num"]): tmp_flag = False if current_and_or == "and": if cond_flag is True: cond_flag = tmp_flag else: if cond_flag is False: cond_flag = tmp_flag if cond_flag is False: break if cond_flag is False: continue if counter != -1: variable = monster["variables"][counter]["value"] tmp_varieties = variable.split("_") for tmp_variety in tmp_varieties: variable_variety.append(tmp_variety) variable_counter += int(variable) if monster["id"] not in variety: variety.append(monster["id"]) if monster["monster_name"] not in same_name: same_name[monster["monster_name"]] = 0 same_name[monster["monster_name"]] += 1 count += 1 min_equation_number = effect_det_org["monster"][0]["min_equation_number"] max_equation_number = effect_det_org["monster"][0]["max_equation_number"] if equation_kind == "number": if count >= duelobj.calculate_boland( min_equation_number, monster ) and count <= duelobj.calculate_boland(max_equation_number, monster): return True else: return False elif equation_kind == "kind": if len(variety) >= duelobj.calculate_boland( min_equation_number, monster ) and len(variety) <= duelobj.calculate_boland(max_equation_number, monster): return True else: return False elif equation_kind == "same_name": same_name_max = max(same_name.values()) if ( same_name and same_name_max >= duelobj.calculate_boland(min_equation_number, monster) and same_name_max <= duelobj.calculate_boland(max_equation_number, monster) ): return True else: return False elif counter == "x": if x_counter >= duelobj.calculate_boland( min_equation_number, monster ) and x_counter <= duelobj.calculate_boland(max_equation_number, monster): return True else: return False elif counter == "y": if y_counter >= duelobj.calculate_boland( min_equation_number, monster ) and y_counter <= duelobj.calculate_boland(max_equation_number, monster): return True else: return False else: if variable_counter >= duelobj.calculate_boland( min_equation_number, monster ) and variable_counter <= duelobj.calculate_boland(max_equation_number, monster): return True else: return False def free_lock(room_number, lock): if room_number == 1: lock.lock_1 = False lock.save() elif room_number == 2: lock.lock_2 = False lock.save() elif room_number == 3: lock.lock_3 = False lock.save() def force_trigger(request): global check_array room_number = int(request.POST["room_number"]) lock = Lock.objects.get() lock_flag = lock_lock(room_number, lock,request) if lock_flag != "OK": return HttpResponse("waiting") duelobj = DuelObj(room_number) check_array = [] duel = Duel.objects.filter(id=room_number).get() if "ID" in request.COOKIES : ID = request.COOKIES["ID"] else: ID = "" ID1 = duel.guest_id ID2 = duel.guest_id2 if duel.user_1 != request.user and duel.user_2 != request.user: if (ID1 == ID and duel.guest_flag) or (ID2 == ID and duel.guest_flag2): pass else: free_lock(room_number, lock) return HttpResponseRedirect(reverse("tcgcreator:watch_battle")) duelobj.duel = duel duelobj.room_number = room_number if duel.user_1 == request.user or (ID1 == ID and duel.guest_flag): user = 1 other_user = 2 duelobj.user = 1 else: duelobj.user = 2 user = 2 other_user = 1 duelobj.init_all(user, other_user, room_number) duelobj.in_execute = False decks = Deck.objects.all() graves = Grave.objects.all() hands = Hand.objects.all() duelobj.check_eternal_effect( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) chain_det = json.loads(duel.chain_det) monster_effect_wrapper = MonsterEffectWrapper.objects.get( id=int(chain_det[str(duel.chain - 1)]) ) monster_effect = monster_effect_wrapper.monster_effect if monster_effect.monster_effect_val != 59: free_lock(room_number, lock) return HttpResponse("error") det = json.loads(monster_effect.monster_effect) deck_id = det["deck_id"] ignore_timing = det["ignore_timing"] if duel.user_turn == 1: if duel.ask == 1: if user == 2: return HttpResponse("error") elif duel.ask == 2: if user == 1: return HttpResponse("error") elif duel.user_turn == 2: if duel.ask == 2: if user == 2: return HttpResponse("error") elif duel.ask == 1: if user == 1: return HttpResponse("error") place_unique_id = request.POST["place_unique_id"] hand = duelobj.hands[deck_id]["otherhand"] user_hands = hand i=0 mine_or_other = other_user for user_hand in user_hands: if user_hand["place_unique_id"] != place_unique_id: continue id = duelobj.get_monster_id( user_hand, "hand", other_user,i, 0, 0, mine_or_other ) monster_det = Monster.objects.get(id=id) triggers = monster_det.trigger.all() triggers = triggers.filter(trigger_timing=False) phase = duel.phase turn = duel.user_turn place_unique_id = user_hand["place_unique_id"] tmp2 = {} tmp2["det"] = user_hand tmp2["mine_or_other"] = mine_or_other tmp2["user"] = user tmp2["place"] = "hand" tmp2["deck_id"] = deck_id tmp2["x"] = 0 tmp2["y"] = 0 tmp2["place_unique_id"] = user_hand["place_unique_id"] for trigger in triggers: if duelobj.check_launch_trigger_ignore_chain_and_timing( trigger, phase, turn, other_user, user, mine_or_other, "hand", place_unique_id, deck_id, ignore_timing ): duelobj.invoke_force_trigger( trigger, "hand", user_hand, mine_or_other, other_user, deck_id, 0, 0, None, None, None, None, None, ) duelobj.save_all(user, other_user, room_number) free_lock(room_number, lock) return battle_det(request, duelobj, None) free_lock(room_number, lock) return HttpResponse("error") def change_wait(request): room_number = int(request.POST["room_number"]) lock = Lock.objects.get() lock_flag = lock_lock(room_number, lock,request) if lock_flag != "OK": return HttpResponse("waiting") duel = Duel.objects.filter(id=room_number).get() if "ID" in request.COOKIES : ID = request.COOKIES["ID"] else: ID = "" ID1 = duel.guest_id ID2 = duel.guest_id2 duelobj = DuelObj(room_number) duelobj.duel = duel duelobj.room_number = room_number if not request.user.is_authenticated: if (ID1 == ID and duel.guest_flag) or (ID2 == ID and duel.guest_flag2): pass else: free_lock(room_number, lock) return HttpResponse("Please Login") if duel.user_1 == request.user or (ID1 == ID and duel.guest_flag is True): user = 1 duelobj.user = 1 other_user = 2 else: duelobj.user = 2 user = 2 other_user = 1 duelobj.init_all(user, other_user, room_number) if user == 1: whether_my_phase = duel.phase_whether_1_1.split("_") whether_my_kind = duel.kind_whether_1_1.split("_") whether_my_timing = duel.timing_whether_1_1.split("_") whether_other_phase = duel.phase_whether_1_2.split("_") whether_other_kind = duel.kind_whether_1_2.split("_") whether_other_timing = duel.timing_whether_1_2.split("_") else: whether_my_phase = duel.phase_whether_2_1.split("_") whether_my_kind = duel.kind_whether_2_1.split("_") whether_my_timing = duel.timing_whether_2_1.split("_") whether_other_phase = duel.phase_whether_2_2.split("_") whether_other_kind = duel.kind_whether_2_2.split("_") whether_other_timing = duel.timing_whether_2_2.split("_") checks = request.POST["check"].split("_") for check in checks: check_det = check.split("-") if check_det[0] == "phase": if check_det[1] == "my": if check_det[3] == "check": if check_det[2] in whether_my_phase: pass else: whether_my_phase.append(check_det[2]) else: if check_det[2] not in whether_my_phase: pass else: whether_my_phase.remove(check_det[2]) elif check_det[1] == "other": if check_det[3] == "check": if check_det[2] in whether_other_phase: pass else: whether_other_phase.append(check_det[2]) else: if check_det[2] not in whether_other_phase: pass else: whether_other_phase.remove(check_det[2]) elif check_det[0] == "kind": if check_det[1] == "my": if check_det[3] == "check": if check_det[2] in whether_my_kind: pass else: whether_my_kind.append(check_det[2]) else: if check_det[2] not in whether_my_kind: pass else: whether_my_kind.remove(check_det[2]) elif check_det[1] == "other": if check_det[3] == "check": if check_det[2] in whether_other_kind: pass else: whether_other_kind.append(check_det[2]) else: if check_det[2] not in whether_other_kind: pass else: whether_other_kind.remove(check_det[2]) elif check_det[0] == "timing": if check_det[1] == "my": if check_det[3] == "check": if check_det[2] in whether_my_timing: pass else: whether_my_timing.append(check_det[2]) else: if check_det[2] not in whether_my_timing: pass else: whether_my_timing.remove(check_det[2]) elif check_det[1] == "other": if check_det[3] == "check": if check_det[2] in whether_other_timing: pass else: whether_other_timing.append(check_det[2]) else: if check_det[2] not in whether_other_timing: pass else: whether_other_timing.remove(check_det[2]) if user == 1: duel.phase_whether_1_1 = "_".join(whether_my_phase) duel.kind_whether_1_1 = "_".join(whether_my_kind) duel.timing_whether_1_1 = "_".join(whether_my_timing) duel.phase_whether_1_2 = "_".join(whether_other_phase) duel.kind_whether_1_2 = "_".join(whether_other_kind) duel.timing_whether_1_2 = "_".join(whether_other_timing) else: duel.phase_whether_1_1 = "_".join(whether_my_phase) duel.kind_whether_2_1 = "_".join(whether_my_kind) duel.timing_whether_2_1 = "_".join(whether_my_timing) duel.phase_whether_2_2 = "_".join(whether_other_phase) duel.kind_whether_2_2 = "_".join(whether_other_kind) duel.timing_whether_2_2 = "_".join(whether_other_timing) duelobj.save_all(user, other_user, room_number) free_lock(room_number, lock) return HttpResponse("OK") ``` #### File: mysite/tcgcreator/choices.py ```python from django.http import HttpResponse, HttpResponseRedirect from django.urls import reverse from django.db.models import Q from .models import ( Deck, Grave, Hand, Duel, Trigger, Lock, ) from pprint import pprint from .battle_det import battle_det,battle_det_return_org_ai from .duel import DuelObj from time import time def lock_lock(room_number, lock,request): duel = Duel.objects.filter(id=room_number).get() if duel.guest_flag is False: ID1 = -1 else: ID1 = duel.guest_id if duel.guest_flag2 is False: ID2 = -1 else: ID2 = duel.guest_id2 if "ID" in request.COOKIES : ID = request.COOKIES["ID"] else: ID = "" if room_number == 1: if lock.lock_1 is True and time() - lock.time_1 < 20: if duel.is_ai is False: return HttpResponse("waiting") duelobj = DuelObj(room_number) duelobj.duel = duel duelobj.room_number = room_number duelobj.in_execute = False decks = Deck.objects.all() graves = Grave.objects.all() hands = Hand.objects.all() user_1 = duel.user_1 user_2 = duel.user_2 if request.user != user_1 and request.user != user_2: if (ID1 == ID and duel.guest_flag) or (ID2 == ID and duel.guest_flag2): pass else: return HttpResponse("error") if request.user == user_1 or (ID1 == ID and duel.guest_flag): duelobj.user = 1 user = 1 other_user = 2 if request.user == user_2 or (ID2 == ID and duel.guest_flag2): duelobj.user = 2 user = 2 other_user = 1 duelobj.init_all(user, other_user, room_number) return battle_det_return_org_ai( duelobj, decks, graves, hands, user, other_user, choices, room_number ) else: lock.lock_1 = True lock.time_1 = time() lock.save() elif room_number == 2: if lock.lock_2 is True and time() - lock.time_2 < 20: if duel.is_ai is False: return HttpResponse("waiting") duelobj = DuelObj(room_number) duelobj.duel = duel duelobj.room_number = room_number duelobj.in_execute = False decks = Deck.objects.all() graves = Grave.objects.all() hands = Hand.objects.all() user_1 = duel.user_1 user_2 = duel.user_2 if request.user != user_1 and request.user != user_2: return HttpResponse("error") if request.user == user_1: duelobj.user = 1 user = 1 other_user = 2 if request.user == user_2: duelobj.user = 2 user = 2 other_user = 1 duelobj.init_all(user, other_user, room_number) return battle_det_return_org_ai( duelobj, decks, graves, hands, user, other_user, choices, room_number ) else: lock.lock_2 = True lock.time_2 = time() lock.save() elif room_number == 3: if lock.lock_3 is True and time() - lock.time_3 < 20: if duel.is_ai is False: return HttpResponse("waiting") duelobj = DuelObj(room_number) duelobj.duel = duel duelobj.room_number = room_number duelobj.in_execute = False decks = Deck.objects.all() graves = Grave.objects.all() hands = Hand.objects.all() user_1 = duel.user_1 user_2 = duel.user_2 if request.user != user_1 and request.user != user_2: return HttpResponse("error") if request.user == user_1: duelobj.user = 1 user = 1 other_user = 2 if request.user == user_2: duelobj.user = 2 user = 2 other_user = 1 duelobj.init_all(user, other_user, room_number) return battle_det_return_org_ai( duelobj, decks, graves, hands, user, other_user, choices, room_number ) else: lock.lock_3 = True lock.time_3 = time() lock.save() return "OK" def choices(request): room_number = int(request.POST["room_number"]) trigger_id = request.POST["trigger_id"] lock = Lock.objects.get() lock_flag = lock_lock(room_number, lock,request) duel = Duel.objects.filter(id=room_number).get() if duel.guest_flag is False: ID1 = -1 else: ID1 = duel.guest_id if duel.guest_flag2 is False: ID2 = -1 else: ID2 = duel.guest_id2 if "ID" in request.COOKIES : ID = request.COOKIES["ID"] else: ID = "" if lock_flag != "OK": if duel.is_ai == False: return HttpResponse("waiting") else: duelobj = DuelObj(room_number) duelobj.duel = duel duelobj.room_number = room_number duelobj.in_execute = False decks = Deck.objects.all() graves = Grave.objects.all() hands = Hand.objects.all() user_1 = duel.user_1 user_2 = duel.user_2 if request.user != user_1 and request.user != user_2: if (ID1 == ID and duel.guest_flag) or (ID2 == ID and duel.guest_flag2): pass else: return HttpResponse("error") if request.user == user_1 or(ID1 == ID and duel.guest_flag is True): duelobj.user = 1 user = 1 other_user = 2 if request.user == user_2 or(ID2 == ID and duel.guest_flag2 is True): duelobj.user = 2 user = 2 other_user = 1 duelobj.init_all(user, other_user, room_number) return battle_det_return_org_ai( duelobj, decks, graves, hands, user, other_user, choices, room_number ) if duel.user_1 != request.user and duel.user_2 != request.user: if (ID1 == ID and duel.guest_flag) or (ID2 == ID and duel.guest_flag2): pass else: free_lock(room_number, lock) return HttpResponseRedirect(reverse("tcgcreator:watch_battle")) if duel.user_1 == request.user or ( ID1 == ID and duel.guest_flag is True): user = 1 other_user = 2 elif duel.user_2 == request.user or (ID2 == ID and duel.guest_flag2 is True): user = 2 other_user = 1 duelobj = DuelObj(room_number) duelobj.duel = duel duelobj.user = user duelobj.room_number = room_number decks = Deck.objects.all() graves = Grave.objects.all() hands = Hand.objects.all() duelobj.init_all(user, other_user, room_number) duelobj.check_eternal_effect( decks, graves, hands, duel.phase, duel.user_turn, user, other_user ) if duel.in_cost is True: free_lock(room_number, lock) return HttpResponse("error") if duel.user_1 == request.user or ( ID1 == ID and duel.guest_flag is True): if duel.appoint != 1: free_lock(room_number, lock) return HttpResponse("error") duelobj.user = 1 user = 1 other_user = 2 if choices_det(duelobj, trigger_id, request, user) != -1: duelobj.duel.mute = False duelobj.save_all(user, other_user, room_number) free_lock(room_number, lock) return battle_det(request, duelobj) else: free_lock(room_number, lock) return HttpResponse("error") elif duel.user_2 == request.user or (ID2 == ID and duel.guest_flag2 is True): if duel.appoint != 2: free_lock(room_number, lock) return HttpResponse("error") duelobj.user = 2 user = 2 other_user = 1 if choices_det(duelobj, trigger_id, request, user) != -1: duelobj.duel.mute = False duelobj.save_all(user, other_user, room_number) free_lock(room_number, lock) return battle_det(request, duelobj) else: free_lock(room_number, lock) return HttpResponse("error") free_lock(room_number, lock) return HttpResponse("error") def choices_det(duelobj, trigger_id, request, user): if user == 1: other_user = 2 else: other_user = 1 triggers = Trigger.objects.all() trigger = triggers.get(id=trigger_id) if trigger is not None and duelobj.check_launch_trigger( trigger, duelobj.duel.phase, duelobj.duel.user_turn, user, other_user, user): return duelobj.invoke_trigger(trigger, "", "", "", duelobj.user, "") else: return -1 def free_lock(room_number, lock): if room_number == 1: lock.lock_1 = False lock.save() elif room_number == 2: lock.lock_2 = False lock.save() elif room_number == 3: lock.lock_3 = False lock.save() ``` #### File: mysite/tcgcreator/custom_functions.py ```python from .models import ( MonsterItem, Monster, Field, UserDeck, UserDeckGroup, UserDeckChoice, Deck, EnemyDeckGroup, EnemyDeckChoice, EnemyDeck, DefaultDeckGroup, DefaultDeckChoice, DefaultDeck, Constraint, ) from django.http import HttpResponse, HttpResponseRedirect import json import uuid import numpy as np from pprint import pprint def init_monster_item(monster_variable): monster = Monster.objects.all() for tmp in monster: monster_item = MonsterItem( monster_id=tmp, monster_variables_id=monster_variable, monster_item_text=monster_variable.default_value, ) monster_item.save() def init_field(x, y): Field.objects.all().delete() for tmp_x in range(0, int(x)): for tmp_y in range(0, int(y)): field = Field(x=tmp_x, y=tmp_y, kind="", mine_or_other=0) field.save() def create_user_deck(user_id, deck_id, deck_group, default_deck_group_id): if default_deck_group_id != "0": default_deck_group_id = int(default_deck_group_id) default_deck = DefaultDeckGroup.objects.all().get( default_deck_id=default_deck_group_id ) default_deck = DefaultDeck.objects.all().get( deck_type=deck_id, deck_group=default_deck ) user_deck = UserDeck( user=user_id, deck_type=deck_id, deck=default_deck.deck, deck_group=deck_group ) else: user_deck = UserDeck( user=user_id, deck_type=deck_id, deck="", deck_group=deck_group ) user_deck.save() def create_user_deck_group(deck_group, user_id, deck_name): user_deck = UserDeckGroup( user_deck_id=deck_group, user=user_id, deck_name=deck_name ) user_deck.save() def create_user_deck_choice(deck_group, user_id): user_deck = UserDeckChoice(user=user_id, user_deck=deck_group) user_deck.save() def create_default_deck(deck_id, deck_group): default_deck = DefaultDeck(deck_type=deck_id, deck="", deck_group=deck_group) default_deck.save() def create_default_deck_group(deck_group, deck_name): default_deck = DefaultDeckGroup(default_deck_id=deck_group, deck_name=deck_name) default_deck.save() def create_enemy_deck(deck_id, deck_group): enemy_deck = EnemyDeck(deck_type=deck_id, deck="", deck_group=deck_group) enemy_deck.save() def create_enemy_deck_group(deck_group, deck_name): enemy_deck = EnemyDeckGroup(enemy_deck_id=deck_group, deck_name=deck_name) enemy_deck.save() def create_enemy_deck_choice(deck_group): enemy_deck = EnemyDeckChoice(enemy_deck=deck_group) enemy_deck.save() def create_default_deck_choice(deck_group): default_deck = DefaultDeckChoice(default_deck=deck_group) default_deck.save() def copy_to_enemy_deck(post, deck_group): decks = Deck.objects.all() all_decks = [] result_decks = [] enemy_decks = EnemyDeck.objects.filter(deck_group=deck_group) for deck in decks: result_deck = [] enemy_deck = enemy_decks.filter(deck_type_id=deck.id).first() exclude_deck = post.getlist("exclude_monster_deck_" + str(deck.id)) enemy_deck_array = enemy_deck.deck.split("_") for exclude_deck_det in exclude_deck: try: enemy_deck_array.remove(exclude_deck_det) except ValueError: pass if len(enemy_deck_array) != 0 and enemy_deck_array[0] != "": result_deck.extend(enemy_deck_array) all_decks.extend(enemy_deck_array) add_deck = post.getlist("monster_deck_" + str(deck.id)) for monster_id in add_deck: monster = Monster.objects.filter(id=monster_id).first() in_decks = monster.monster_deck.split("_") if(str(deck.id) not in in_decks): return HttpResponse("error") if len(add_deck) != 0: all_decks.extend(add_deck) result_deck.extend(add_deck) result_deck = sorted(result_deck) if enemy_deck.deck == "": enemy_deck_size = 0 else: enemy_deck_size = len(enemy_deck_array) add_deck_size = len(add_deck) if deck.max_deck_size < add_deck_size + enemy_deck_size: return "デッキ枚数が多すぎます" result_decks.append(result_deck) all_decks = sorted(all_decks) tmp = 0 for all_deck in all_decks: if all_deck != tmp: tmp = all_deck monster = Monster.objects.filter(id=int(all_deck)).first() if all_decks.count(all_deck) > monster.monster_limit: return monster.monster_name + "の制限を違反しています" i = 0 for deck in decks: enemy_deck = enemy_decks.filter(deck_type_id=deck.id).first() enemy_deck.deck = "_".join(result_decks[i]) enemy_deck.save() i += 1 return "" def copy_to_default_deck(post, deck_group): decks = Deck.objects.all() all_decks = [] result_decks = [] default_decks = DefaultDeck.objects.filter(deck_group=deck_group) for deck in decks: result_deck = [] default_deck = default_decks.filter(deck_type_id=deck.id).first() exclude_deck = post.getlist("exclude_monster_deck_" + str(deck.id)) default_deck_array = default_deck.deck.split("_") for exclude_deck_det in exclude_deck: try: default_deck_array.remove(exclude_deck_det) except ValueError: pass if len(default_deck_array) != 0 and default_deck_array[0] != "": result_deck.extend(default_deck_array) all_decks.extend(default_deck_array) add_deck = post.getlist("monster_deck_" + str(deck.id)) for monster_id in add_deck: monster = Monster.objects.filter(id=monster_id).first() in_decks = monster.monster_deck.split("_") if(str(deck.id) not in in_decks): return HttpResponse("error") if len(add_deck) != 0: all_decks.extend(add_deck) result_deck.extend(add_deck) result_deck = sorted(result_deck) if default_deck.deck == "": default_deck_size = 0 else: default_deck_size = len(default_deck_array) add_deck_size = len(add_deck) if deck.max_deck_size < add_deck_size + default_deck_size: return "デッキ枚数が多すぎます" result_decks.append(result_deck) all_decks = sorted(all_decks) tmp = 0 for all_deck in all_decks: if all_deck != tmp: tmp = all_deck monster = Monster.objects.filter(id=int(all_deck)).first() if all_decks.count(all_deck) > monster.monster_limit: return monster.monster_name + "の制限を違反しています" i = 0 for deck in decks: default_deck = default_decks.filter(deck_type_id=deck.id).first() default_deck.deck = "_".join(result_decks[i]) default_deck.save() i += 1 return "" def copy_to_deck_text(user_id, post, deck_group): decks = Deck.objects.all() all_decks = [] result_decks = [] user_decks = UserDeck.objects.filter(user=user_id, deck_group=deck_group) for deck in decks: result_deck = [] user_deck = user_decks.filter(deck_type_id=deck.id).first() add_decks = post["user_deck_text"].split("\r\n") tmp = [] if len(add_decks) != 0: for add_deck in add_decks: if add_deck == "": continue monster = Monster.objects.filter(monster_name=add_deck).get() if monster.token_flag is True: return HttpResponse("error") monster_id = monster.id monster_places = monster.monster_deck.split("_") if deck.id != int(monster_places[0]): break result_deck.append(str(monster_id)) else: continue result_deck = sorted(result_deck) result_deck_size = len(result_deck) if deck.max_deck_size < result_deck_size: return "デッキ枚数が多すぎます" result_decks.append(result_deck) all_decks = sorted(all_decks) tmp = 0 constraint = Constraint.objects.get() constraint_variable = constraint.monster_variable.id constraint_variety = [] for all_deck in all_decks: if all_deck != tmp: tmp = all_deck monster = Monster.objects.filter(id=int(all_deck)).first() monsteritem = ( MonsterItem.objects .filter(monster_id__id=int(all_deck) , monster_variables_id__id = constraint_variable) ).get() if all_decks.count(all_deck) > monster.monster_limit: return monster.monster_name + "の制限を違反しています" if not constraint_variable: continue if monsteritem.monster_item_text == constraint.except_val: continue elif monsteritem.monster_item_text not in constraint_variety and int(monsteritem.monster_item_text) != int(constraint.except_val): constraint_variety.append(monsteritem.monster_item_text) if(len(constraint_variety) > constraint.limit): return "制約に違反しています。" i = 0 for deck in decks: user_deck = user_decks.filter(deck_type_id=deck.id).first() user_deck.deck = "_".join(result_decks[i]) user_deck.save() i += 1 return "" def copy_to_deck(user_id, post, deck_group): decks = Deck.objects.filter( makedeckshow=True) all_decks = [] result_decks = [] user_decks = UserDeck.objects.filter(user=user_id, deck_group=deck_group) for deck in decks: result_deck = [] user_deck = user_decks.filter(deck_type_id=deck.id).first() exclude_deck = post.getlist("exclude_monster_deck_" + str(deck.id)) user_deck_array = user_deck.deck.split("_") for exclude_deck_det in exclude_deck: try: user_deck_array.remove(exclude_deck_det) except ValueError: pass if len(user_deck_array) != 0 and user_deck_array[0] != "": result_deck.extend(user_deck_array) all_decks.extend(user_deck_array) add_deck = post.getlist("monster_deck_" + str(deck.id)) for monster_id in add_deck: monster = Monster.objects.filter(id=monster_id).first() if monster.token_flag is True: return HttpResponse("error") in_decks = monster.monster_deck.split("_") if(str(deck.id) not in in_decks): return HttpResponse("error") if len(add_deck) != 0: all_decks.extend(add_deck) result_deck.extend(add_deck) result_deck = sorted(result_deck) if user_deck.deck == "": user_deck_size = 0 else: user_deck_size = len(user_deck_array) add_deck_size = len(add_deck) if deck.max_deck_size < add_deck_size + user_deck_size: return "デッキ枚数が多すぎます" result_decks.append(result_deck) all_decks = sorted(all_decks) tmp = 0 constraint = Constraint.objects.get() constraint_variable = constraint.monster_variable.id constraint_variety = [] for all_deck in all_decks: if all_deck != tmp: tmp = all_deck monster = Monster.objects.filter(id=int(all_deck)).first() monsteritem = ( MonsterItem.objects .filter(monster_id__id=int(all_deck) , monster_variables_id__id = constraint_variable) ).get() if all_decks.count(all_deck) > monster.monster_limit: return monster.monster_name + "の制限を違反しています" if not constraint_variable: continue if monsteritem.monster_item_text == constraint.except_val: continue elif monsteritem.monster_item_text not in constraint_variety and int(monsteritem.monster_item_text) != int(constraint.except_val): constraint_variety.append(monsteritem.monster_item_text) if(len(constraint_variety) > constraint.limit): return "制約に違反しています。" i = 0 for deck in decks: user_deck = user_decks.filter(deck_type_id=deck.id).first() user_deck.deck = "_".join(result_decks[i]) user_deck.save() i += 1 return "" def cheat_get(id, deck_id, owner, place): monster = Monster.objects.filter(id=int(id)).first() tmp = {} tmp6 = {} tmp["flag"] = 0 tmp["monster_name"] = monster.monster_name tmp["id"] = monster.id tmp["token"] = monster.token_flag tmp["org_id"] = monster.id tmp["monster_sentence"] = monster.monster_sentence tmp["img"] = monster.img tmp["user"]= owner tmp["mine_or_other"]= owner monsteritems = ( MonsterItem.objects.all() .filter(monster_id__id=id) .order_by("-monster_variables_id__priority") .select_related("monster_variables_id") .select_related("monster_variables_id__monster_variable_kind_id") ) for monsteritem in monsteritems: tmp5 = {} monster_variable = monsteritem.monster_variables_id tmp5["name"] = monster_variable.monster_variable_name tmp5["minus"] = monster_variable.monster_variable_minus tmp5["value"] = monsteritem.monster_item_text tmp5["i_val"] = monsteritem.monster_item_text tmp5["i_i_val"] = monsteritem.monster_item_text tmp2 = monsteritem.monster_item_text.split("_") if monster_variable.monster_variable_kind_id.monster_variable_name == "数値": tmp5["str"] = tmp5["value"] else: tmp5["str"] = "deck_" for tmp3 in tmp2: tmp4 = monster_variable.monster_variable_kind_id.monster_variable_sentence.split( "|" ) tmp5["str"] += tmp4[int(tmp3) - 1] tmp6[monster_variable.monster_variable_name] = tmp5 tmp["variables"] = tmp6 tmp["place"] = place tmp["from"] = None tmp["noeffect"] = "" tmp["nochoose"] = "" tmp["owner"] = owner tmp["user"] = owner tmp["deck_id"] = deck_id tmp["card_unique_id"] = str(uuid.uuid4()) tmp["place_unique_id"] = str(uuid.uuid4()) return tmp def create_user_deck_det(user_deck, deck_id, owner): ids = user_deck.split("_") return_value = [] if user_deck == "": return return_value for id in ids: tmp = {} tmp6 = {} monster = Monster.objects.filter(id=int(id)).first() tmp["flag"] = 0 tmp["token"] = monster.token_flag tmp["monster_name"] = monster.monster_name tmp["id"] = monster.id tmp["org_id"] = monster.id tmp["monster_sentence"] = monster.monster_sentence tmp["img"] = monster.img monsteritems = ( MonsterItem.objects.all() .filter(monster_id__id=id) .order_by("-monster_variables_id__priority") .select_related("monster_variables_id") .select_related("monster_variables_id__monster_variable_kind_id") ) for monsteritem in monsteritems: tmp5 = {} monster_variable = monsteritem.monster_variables_id tmp5["name"] = monster_variable.monster_variable_name tmp5["minus"] = monster_variable.monster_variable_minus tmp5["value"] = monsteritem.monster_item_text tmp5["i_val"] = monsteritem.monster_item_text tmp5["i_i_val"] = monsteritem.monster_item_text tmp2 = monsteritem.monster_item_text.split("_") if monster_variable.monster_variable_kind_id.monster_variable_name == "数値": tmp5["str"] = tmp5["value"] else: tmp5["str"] = "" for tmp3 in tmp2: tmp4 = monster_variable.monster_variable_kind_id.monster_variable_sentence.split( "|" ) tmp5["str"] += tmp4[int(tmp3) - 1] tmp6[monster_variable.monster_variable_name] = tmp5 tmp["variables"] = tmp6 tmp["place"] = "deck" tmp["from"] = None tmp["noeffect"] = "" tmp["nochoose"] = "" tmp["owner"] = owner tmp["user"] = owner tmp["mine_or_other"] = owner tmp["deck_id"] = deck_id tmp["card_unique_id"] = str(uuid.uuid4()) tmp["place_unique_id"] = str(uuid.uuid4()) return_value.append(tmp) np.random.shuffle(return_value) return json.dumps(return_value) def get_field_y_range(fields,field_size): ary = [0] *field_size ary2 = [0] *field_size for y in range(field_size): field = fields.filter(y=y,x=0).get() if field.no_clear is True and field.mine_or_other == 1: ary2.append(y) else: ary[y]=y for y in range(field_size): if ary[y] == 0 and y != 0: ary[y] = ary2[-1] del ary2[-1] return ary ``` #### File: mysite/tcgcreator/get_monster_variable.py ```python from django.http import HttpResponse from .models import ( MonsterVariables, ) from pprint import pprint import json def get_monster_variable(request): result = [] monster_variables = MonsterVariables.objects.order_by("-priority") for monster_variable in monster_variables: tmp = {} monster_variable_kind = monster_variable.monster_variable_kind_id tmp["variable_id"] = monster_variable_kind.id tmp["variable_name"] = monster_variable_kind.monster_variable_name if monster_variable_kind.id != 1: variable_sentence = monster_variable_kind.monster_variable_sentence tmp["sentence"] = variable_sentence.split("_") result.append(tmp) return HttpResponse(json.dumps(result)) ```
{ "source": "jiduque/scikit-fda", "score": 3 }
#### File: exploratory/depth/multivariate.py ```python from __future__ import annotations import abc import math from typing import Generic, Optional, TypeVar import numpy as np import scipy.stats import sklearn from scipy.special import comb from typing_extensions import Literal T = TypeVar("T", contravariant=True) SelfType = TypeVar("SelfType") _Side = Literal["left", "right"] class _DepthOrOutlyingness( abc.ABC, sklearn.base.BaseEstimator, # type: ignore Generic[T], ): """Abstract class representing a depth or outlyingness function.""" def fit(self: SelfType, X: T, y: None = None) -> SelfType: """ Learn the distribution from the observations. Args: X: Functional dataset from which the distribution of the data is inferred. y: Unused. Kept only for convention. Returns: Fitted estimator. """ return self @abc.abstractmethod def predict(self, X: T) -> np.ndarray: """ Compute the depth or outlyingness inside the learned distribution. Args: X: Points whose depth is going to be evaluated. Returns: Depth of each observation. """ pass def fit_predict(self, X: T, y: None = None) -> np.ndarray: """ Compute the depth or outlyingness of each observation. This computation is done with respect to the whole dataset. Args: X: Dataset. y: Unused. Kept only for convention. Returns: Depth of each observation. """ return self.fit(X).predict(X) def __call__( self, X: T, *, distribution: Optional[T] = None, ) -> np.ndarray: """ Allow the depth or outlyingness to be used as a function. Args: X: Points whose depth is going to be evaluated. distribution: Functional dataset from which the distribution of the data is inferred. If ``None`` it is the same as ``X``. Returns: Depth of each observation. """ copy = sklearn.base.clone(self) if distribution is None: return copy.fit_predict(X) return copy.fit(distribution).predict(X) @property # noqa: WPS125 def max(self) -> float: # noqa: WPS125 """ Maximum (or supremum if there is no maximum) of the possibly predicted values. """ return 1 @property # noqa: WPS125 def min(self) -> float: # noqa: WPS125 """ Minimum (or infimum if there is no maximum) of the possibly predicted values. """ return 0 class Depth(_DepthOrOutlyingness[T]): """Abstract class representing a depth function.""" class Outlyingness(_DepthOrOutlyingness[T]): """Abstract class representing an outlyingness function.""" def _searchsorted_one_dim( array: np.ndarray, values: np.ndarray, *, side: _Side = 'left', ) -> np.ndarray: return np.searchsorted(array, values, side=side) _searchsorted_vectorized = np.vectorize( _searchsorted_one_dim, signature='(n),(m),()->(m)', excluded='side', ) def _searchsorted_ordered( array: np.ndarray, values: np.ndarray, *, side: _Side = 'left', ) -> np.ndarray: return _searchsorted_vectorized(array, values, side=side) def _cumulative_distribution(column: np.ndarray) -> np.ndarray: """Calculate the cumulative distribution function at each point. Args: column (numpy.darray): Array containing the values over which the distribution function is calculated. Returns: numpy.darray: Array containing the evaluation at each point of the distribution function. Examples: >>> _cumulative_distribution(np.array([1, 4, 5, 1, 2, 2, 4, 1, 1, 3])) array([ 0.4, 0.9, 1. , 0.4, 0.6, 0.6, 0.9, 0.4, 0.4, 0.7]) """ return _searchsorted_ordered( np.sort(column), column, side='right', ) / len(column) class _UnivariateFraimanMuniz(Depth[np.ndarray]): r""" Univariate depth used to compute the Fraiman an Muniz depth. Each column is considered as the samples of an aleatory variable. The univariate depth of each of the samples of each column is calculated as follows: .. math:: D(x) = 1 - \left\lvert \frac{1}{2}- F(x)\right\rvert Where :math:`F` stands for the marginal univariate distribution function of each column. """ def fit(self: SelfType, X: np.ndarray, y: None = None) -> SelfType: self._sorted_values = np.sort(X, axis=0) return self def predict(self, X: np.ndarray) -> np.ndarray: cum_dist = _searchsorted_ordered( np.moveaxis(self._sorted_values, 0, -1), np.moveaxis(X, 0, -1), side='right', ) / len(self._sorted_values) assert cum_dist.shape[-2] == 1 return 1 - np.abs(0.5 - np.moveaxis(cum_dist, -1, 0)[..., 0]) @property # noqa: WPS125 def min(self) -> float: # noqa: WPS125 return 1 / 2 class SimplicialDepth(Depth[np.ndarray]): r""" Simplicial depth. The simplicial depth of a point :math:`x` in :math:`\mathbb{R}^p` given a distribution :math:`F` is the probability that a random simplex with its :math:`p + 1` points sampled from :math:`F` contains :math:`x`. References: <NAME>. (1990). On a Notion of Data Depth Based on Random Simplices. The Annals of Statistics, 18(1), 405–414. """ def fit( # noqa: D102 self, X: np.ndarray, y: None = None, ) -> SimplicialDepth: self._dim = X.shape[-1] if self._dim == 1: self.sorted_values = np.sort(X, axis=0) else: raise NotImplementedError( "SimplicialDepth is currently only " "implemented for one-dimensional data.", ) return self def predict(self, X: np.ndarray) -> np.ndarray: # noqa: D102 assert self._dim == X.shape[-1] if self._dim == 1: positions_left = _searchsorted_ordered( np.moveaxis(self.sorted_values, 0, -1), np.moveaxis(X, 0, -1), ) positions_left = np.moveaxis(positions_left, -1, 0)[..., 0] positions_right = _searchsorted_ordered( np.moveaxis(self.sorted_values, 0, -1), np.moveaxis(X, 0, -1), side='right', ) positions_right = np.moveaxis(positions_right, -1, 0)[..., 0] num_strictly_below = positions_left num_strictly_above = len(self.sorted_values) - positions_right total_pairs = comb(len(self.sorted_values), 2) return ( total_pairs - comb(num_strictly_below, 2) - comb(num_strictly_above, 2) ) / total_pairs class OutlyingnessBasedDepth(Depth[T]): r""" Computes depth based on an outlyingness measure. An outlyingness function :math:`O(x)` can be converted to a depth function as .. math:: D(x) = \frac{1}{1 + O(x)} if :math:`O(x)` is unbounded or as .. math:: D(x) = 1 - \frac{O(x)}{\sup O(x)} if :math:`O(x)` is bounded. If the infimum value of the outlyiness function is not zero, it is subtracted beforehand. Args: outlyingness (Outlyingness): Outlyingness object. References: <NAME>. (2006). Depth functions in nonparametric multivariate inference. DIMACS Series in Discrete Mathematics and Theoretical Computer Science, 72, 1. """ def __init__(self, outlyingness: Outlyingness[T]): self.outlyingness = outlyingness def fit( # noqa: D102 self, X: T, y: None = None, ) -> OutlyingnessBasedDepth[T]: self.outlyingness.fit(X) return self def predict(self, X: np.ndarray) -> np.ndarray: # noqa: D102 outlyingness_values = self.outlyingness.predict(X) min_val = self.outlyingness.min max_val = self.outlyingness.max if math.isinf(max_val): return 1 / (1 + outlyingness_values - min_val) return 1 - (outlyingness_values - min_val) / (max_val - min_val) class StahelDonohoOutlyingness(Outlyingness[np.ndarray]): r""" Computes Stahel-Donoho outlyingness. Stahel-Donoho outlyingness is defined as .. math:: \sup_{\|u\|=1} \frac{|u^T x - \text{Med}(u^T X))|}{\text{MAD}(u^TX)} where :math:`\text{X}` is a sample with distribution :math:`F`, :math:`\text{Med}` is the median and :math:`\text{MAD}` is the median absolute deviation. References: <NAME>., <NAME>., & <NAME>. (2004). On the Stahel-Donoho estimator and depth-weighted means of multivariate data. Annals of Statistics, 32(1), 167–188. https://doi.org/10.1214/aos/1079120132 """ def fit( # noqa: D102 self, X: np.ndarray, y: None = None, ) -> StahelDonohoOutlyingness: dim = X.shape[-1] if dim == 1: self._location = np.median(X, axis=0) self._scale = scipy.stats.median_abs_deviation(X, axis=0) else: raise NotImplementedError("Only implemented for one dimension") return self def predict(self, X: np.ndarray) -> np.ndarray: # noqa: D102 dim = X.shape[-1] if dim == 1: # Special case, can be computed exactly return ( np.abs(X - self._location) / self._scale )[..., 0] raise NotImplementedError("Only implemented for one dimension") @property # noqa: WPS125 def max(self) -> float: # noqa: WPS125 return math.inf class ProjectionDepth(OutlyingnessBasedDepth[np.ndarray]): r""" Computes Projection depth. It is defined as the depth induced by the :class:`Stahel-Donoho outlyingness <StahelDonohoOutlyingness>`. See also: :class:`StahelDonohoOutlyingness`: Stahel-Donoho outlyingness. References: <NAME>., <NAME>., & <NAME>. (2004). On the Stahel-Donoho estimator and depth-weighted means of multivariate data. Annals of Statistics, 32(1), 167–188. https://doi.org/10.1214/aos/1079120132 """ def __init__(self) -> None: super().__init__(outlyingness=StahelDonohoOutlyingness()) ``` #### File: misc/metrics/_angular.py ```python from __future__ import annotations from typing import Optional, TypeVar, Union import numpy as np from typing_extensions import Final from ...representation import FData from ...representation._typing import NDArrayFloat from .._math import cosine_similarity, cosine_similarity_matrix from ._utils import pairwise_metric_optimization T = TypeVar("T", bound=Union[NDArrayFloat, FData]) class AngularDistance(): r""" Calculate the angular distance between two objects. For each pair of observations x and y the angular distance between them is defined as the normalized "angle" between them: .. math:: d(x, y) = \frac{\arccos \left(\frac{\langle x, y \rangle}{ \sqrt{\langle x, x \rangle \langle y, y \rangle}} \right)}{\pi} where :math:`\langle {}\cdot{}, {}\cdot{} \rangle` is the inner product. This distance is defined in the interval [0, 1]. Args: e1: First object. e2: Second object. Returns: Numpy vector where the i-th coordinate has the angular distance between the i-th element of the first object and the i-th element of the second one. Examples: Computes the angular distances between an object containing functional data corresponding to the functions y = 1 and y = x defined over the interval [0, 1] and another ones containing data of the functions y = 0 and y = x/2. The result then is an array of size 2 with the computed l2 distance between the functions in the same position in both. >>> import skfda >>> import numpy as np >>> >>> x = np.linspace(0, 1, 1001) >>> fd = skfda.FDataGrid([np.ones(len(x)), x], x) >>> fd2 = skfda.FDataGrid([2*np.ones(len(x)), np.cos(x)], x) >>> >>> skfda.misc.metrics.angular_distance(fd, fd2).round(2) array([ 0. , 0.22]) """ def __call__( self, e1: T, e2: T, ) -> NDArrayFloat: """Compute the distance.""" return np.arccos(cosine_similarity(e1, e2)) / np.pi def __repr__(self) -> str: return ( f"{type(self).__name__}()" ) angular_distance: Final = AngularDistance() @pairwise_metric_optimization.register def _pairwise_metric_optimization_angular( metric: AngularDistance, elem1: Union[NDArrayFloat, FData], elem2: Optional[Union[NDArrayFloat, FData]], ) -> NDArrayFloat: return np.arccos(cosine_similarity_matrix(elem1, elem2)) / np.pi ``` #### File: misc/operators/_integral_transform.py ```python from __future__ import annotations from typing import Callable import numpy as np import scipy.integrate from ...representation import FData from ._operators import Operator class IntegralTransform(Operator[FData, Callable[[np.ndarray], np.ndarray]]): """Integral operator. Parameters: kernel_function: Kernel function corresponding to the operator. """ def __init__( self, kernel_function: Callable[[np.ndarray, np.ndarray], np.ndarray], ) -> None: self.kernel_function = kernel_function def __call__( # noqa: D102 self, f: FData, ) -> Callable[[np.ndarray], np.ndarray]: def evaluate_covariance( # noqa: WPS430 points: np.ndarray, ) -> np.ndarray: def integral_body( # noqa: WPS430 integration_var: np.ndarray, ) -> np.ndarray: return ( f(integration_var) * self.kernel_function(integration_var, points) ) domain_range = f.domain_range[0] return scipy.integrate.quad_vec( integral_body, domain_range[0], domain_range[1], )[0] return evaluate_covariance ``` #### File: misc/operators/_srvf.py ```python from __future__ import annotations from typing import Optional import numpy as np import scipy.integrate from sklearn.base import BaseEstimator, TransformerMixin from ..._utils import check_is_univariate from ...representation import FDataGrid from ...representation._typing import ArrayLike from ._operators import Operator class SRSF( Operator[FDataGrid, FDataGrid], BaseEstimator, # type: ignore TransformerMixin, # type: ignore ): r"""Square-Root Slope Function (SRSF) transform. Let :math:`f : [a,b] \rightarrow \mathbb{R}` be an absolutely continuous function, the SRSF transform is defined as .. math:: SRSF(f(t)) = sgn(f(t)) \sqrt{|\dot f(t)|} = q(t) This representation it is used to compute the extended non-parametric Fisher-Rao distance between functions, wich under the SRSF representation becomes the usual :math:`\mathbb{L}^2` distance between functions. See :footcite:`srivastava+klassen_2016_analysis_square`. The inverse SRSF transform is defined as .. math:: f(t) = f(a) + \int_{a}^t q(t)|q(t)|dt . This transformation is a mapping up to constant. Given the SRSF and the initial value :math:`f(a)` the original function can be obtained, for this reason it is necessary to store the value :math:`f(a)` during the fit, which is dropped due to derivation. If it is applied the inverse transformation without fit the estimator it is assumed that :math:`f(a)=0`. Args: eval_points: (array_like, optional): Set of points where the functions are evaluated, by default uses the sample points of the :class:`FDataGrid <skfda.FDataGrid>` transformed. initial_value (float, optional): Initial value to apply in the inverse transformation. If `None` there are stored the initial values of the functions during the transformation to apply during the inverse transformation. Defaults None. Attributes: eval_points: Set of points where the functions are evaluated, by default uses the grid points of the fdatagrid. initial_value: Initial value to apply in the inverse transformation. If `None` there are stored the initial values of the functions during the transformation to apply during the inverse transformation. Defaults None. Note: Due to the use of derivatives it is recommended that the samples are sufficiently smooth, or have passed a smoothing preprocessing before, in order to achieve good results. References: .. footbibliography:: Examples: Create a toy dataset and apply the transformation and its inverse. >>> from skfda.datasets import make_sinusoidal_process >>> from skfda.misc.operators import SRSF >>> fd = make_sinusoidal_process(error_std=0, random_state=0) >>> srsf = SRSF() >>> srsf SRSF(...) Fits the estimator (to apply the inverse transform) and apply the SRSF >>> q = srsf.fit_transform(fd) Apply the inverse transform. >>> fd_pull_back = srsf.inverse_transform(q) The original and the pull back `fd` are almost equal >>> zero = fd - fd_pull_back >>> zero.data_matrix.flatten().round(3) array([ 0., 0., 0., ..., -0., -0., -0.]) """ def __init__( self, output_points: Optional[ArrayLike] = None, initial_value: Optional[float] = None, ) -> None: self.output_points = output_points self.initial_value = initial_value def __call__(self, vector: FDataGrid) -> FDataGrid: return self.fit_transform(vector) def fit(self, X: FDataGrid, y: None = None) -> SRSF: """ Return self. This transformer does not need to be fitted. Args: X: Present for API conventions. y: Present for API conventions. Returns: (Estimator): self """ return self def transform(self, X: FDataGrid, y: None = None) -> FDataGrid: r""" Compute the square-root slope function (SRSF) transform. Let :math:`f : [a,b] \rightarrow \mathbb{R}` be an absolutely continuous function, the SRSF transform is defined as :footcite:`srivastava+klassen_2016_analysis_square`: .. math:: SRSF(f(t)) = sgn(f(t)) \sqrt{\dot f(t)|} = q(t) Args: X: Functions to be transformed. y: Present for API conventions. Returns: SRSF functions. Raises: ValueError: If functions are not univariate. """ check_is_univariate(X) if self.output_points is None: output_points = X.grid_points[0] else: output_points = np.asarray(self.output_points) g = X.derivative() # Evaluation with the corresponding interpolation data_matrix = g(output_points)[..., 0] # SRSF(f) = sign(f) * sqrt|Df| (avoiding multiple allocation) sign_g = np.sign(data_matrix) data_matrix = np.abs(data_matrix, out=data_matrix) data_matrix = np.sqrt(data_matrix, out=data_matrix) data_matrix *= sign_g # Store the values of the transformation if self.initial_value is None: a = X.domain_range[0][0] self.initial_value_ = X(a).reshape(X.n_samples, 1, X.dim_codomain) return X.copy(data_matrix=data_matrix, grid_points=output_points) def inverse_transform(self, X: FDataGrid, y: None = None) -> FDataGrid: r""" Compute the inverse SRSF transform. Given the srsf and the initial value the original function can be obtained as :footcite:`srivastava+klassen_2016_analysis_square`: .. math:: f(t) = f(a) + \int_{a}^t q(t)|q(t)|dt where :math:`q(t)=SRSF(f(t))`. If it is applied this inverse transformation without fitting the estimator it is assumed that :math:`f(a)=0`. Args: X: SRSF to be transformed. y: Present for API conventions. Returns: Functions in the original space. Raises: ValueError: If functions are multidimensional. """ check_is_univariate(X) stored_initial_value = getattr(self, 'initial_value_', None) if self.initial_value is None and stored_initial_value is None: raise AttributeError( "When initial_value=None is expected a " "previous transformation of the data to " "store the initial values to apply in the " "inverse transformation. Also it is possible " "to fix these values setting the attribute" "initial value without a previous " "transformation.", ) if self.output_points is None: output_points = X.grid_points[0] else: output_points = np.asarray(self.output_points) data_matrix = X(output_points) data_matrix *= np.abs(data_matrix) f_data_matrix = scipy.integrate.cumtrapz( data_matrix, x=output_points, axis=1, initial=0, ) # If the transformer was fitted, sum the initial value if self.initial_value is None: f_data_matrix += self.initial_value_ else: f_data_matrix += self.initial_value return X.copy(data_matrix=f_data_matrix, grid_points=output_points) ``` #### File: ml/clustering/_hierarchical.py ```python from __future__ import annotations import enum from typing import Callable, Generic, Optional, TypeVar, Union import joblib import numpy as np import sklearn.cluster from sklearn.base import BaseEstimator, ClusterMixin from typing_extensions import Literal from ...misc.metrics import PRECOMPUTED, Metric, PairwiseMetric, l2_distance from ...misc.metrics._typing import _parse_metric, _PrecomputedTypes from ...representation import FData kk = ["ward", "average", "complete"] MetricElementType = TypeVar( "MetricElementType", contravariant=True, bound=FData, ) MetricOrPrecomputed = Union[Metric[MetricElementType], _PrecomputedTypes] Connectivity = Union[ np.ndarray, Callable[[MetricElementType], np.ndarray], None, ] class LinkageCriterion(enum.Enum): """Linkage criterion to use in :class:`AgglomerativeClustering`.""" # WARD = "ward" Not until # https://github.com/scikit-learn/scikit-learn/issues/15287 is solved COMPLETE = "complete" AVERAGE = "average" SINGLE = "single" LinkageCriterionLike = Union[ LinkageCriterion, Literal["ward", "complete", "average", "single"], ] class AgglomerativeClustering( # noqa: WPS230 ClusterMixin, # type: ignore BaseEstimator, # type: ignore Generic[MetricElementType], ): r""" Agglomerative Clustering. Recursively merges the pair of clusters that minimally increases a given linkage distance. Notes: This class is an extension of :class:`sklearn.cluster.AgglomerativeClustering` that accepts functional data objects and metrics. Please check also the documentation of the original class. Parameters: n_clusters: The number of clusters to find. It must be ``None`` if ``distance_threshold`` is not ``None``. metric: Metric used to compute the linkage. If it is ``skfda.misc.metrics.PRECOMPUTED`` or the string ``"precomputed"``, a distance matrix (instead of a similarity matrix) is needed as input for the fit method. memory: Used to cache the output of the computation of the tree. By default, no caching is done. If a string is given, it is the path to the caching directory. connectivity: Connectivity matrix. Defines for each sample the neighboring samples following a given structure of the data. This can be a connectivity matrix itself or a callable that transforms the data into a connectivity matrix, such as derived from kneighbors_graph. Default is None, i.e, the hierarchical clustering algorithm is unstructured. compute_full_tree: Stop early the construction of the tree at n_clusters. This is useful to decrease computation time if the number of clusters is not small compared to the number of samples. This option is useful only when specifying a connectivity matrix. Note also that when varying the number of clusters and using caching, it may be advantageous to compute the full tree. It must be ``True`` if ``distance_threshold`` is not ``None``. By default `compute_full_tree` is "auto", which is equivalent to `True` when `distance_threshold` is not `None` or that `n_clusters` is inferior to the maximum between 100 or `0.02 * n_samples`. Otherwise, "auto" is equivalent to `False`. linkage: Which linkage criterion to use. The linkage criterion determines which distance to use between sets of observation. The algorithm will merge the pairs of clusters that minimize this criterion. - average uses the average of the distances of each observation of the two sets. - complete or maximum linkage uses the maximum distances between all observations of the two sets. - single uses the minimum of the distances between all observations of the two sets. distance_threshold: The linkage distance threshold above which, clusters will not be merged. If not ``None``, ``n_clusters`` must be ``None`` and ``compute_full_tree`` must be ``True``. Attributes: n_clusters\_: The number of clusters found by the algorithm. If ``distance_threshold=None``, it will be equal to the given ``n_clusters``. labels\_: cluster labels for each point n_leaves\_: Number of leaves in the hierarchical tree. n_connected_components\_: The estimated number of connected components in the graph. children\_ : The children of each non-leaf node. Values less than `n_samples` correspond to leaves of the tree which are the original samples. A node `i` greater than or equal to `n_samples` is a non-leaf node and has children `children_[i - n_samples]`. Alternatively at the i-th iteration, children[i][0] and children[i][1] are merged to form node `n_samples + i` Examples: >>> from skfda import FDataGrid >>> from skfda.ml.clustering import AgglomerativeClustering >>> import numpy as np >>> data_matrix = np.array([[1, 2], [1, 4], [1, 0], ... [4, 2], [4, 4], [4, 0]]) >>> X = FDataGrid(data_matrix) >>> clustering = AgglomerativeClustering( ... linkage=AgglomerativeClustering.LinkageCriterion.COMPLETE, ... ) >>> clustering.fit(X) AgglomerativeClustering(...) >>> clustering.labels_.astype(np.int_) array([0, 0, 1, 0, 0, 1]) """ LinkageCriterion = LinkageCriterion def __init__( self, n_clusters: Optional[int] = 2, *, metric: MetricOrPrecomputed[MetricElementType] = l2_distance, memory: Union[str, joblib.Memory, None] = None, connectivity: Connectivity[MetricElementType] = None, compute_full_tree: Union[Literal['auto'], bool] = 'auto', linkage: LinkageCriterionLike, distance_threshold: Optional[float] = None, ) -> None: self.n_clusters = n_clusters self.metric = metric self.memory = memory self.connectivity = connectivity self.compute_full_tree = compute_full_tree self.linkage = linkage self.distance_threshold = distance_threshold def _init_estimator(self) -> None: linkage = LinkageCriterion(self.linkage) self._estimator = sklearn.cluster.AgglomerativeClustering( n_clusters=self.n_clusters, affinity='precomputed', memory=self.memory, connectivity=self.connectivity, compute_full_tree=self.compute_full_tree, linkage=linkage.value, distance_threshold=self.distance_threshold, ) def _copy_attrs(self) -> None: self.n_clusters_: int = self._estimator.n_clusters_ self.labels_: np.ndarray = self._estimator.labels_ self.n_leaves_: int = self._estimator.n_leaves_ self.n_connected_components_: int = ( self._estimator.n_connected_components_ ) self.children_: np.ndarray = self._estimator.children_ def fit( # noqa: D102 self, X: MetricElementType, y: None = None, ) -> AgglomerativeClustering[MetricElementType]: self._init_estimator() metric = _parse_metric(self.metric) if metric is not PRECOMPUTED: data = PairwiseMetric(metric)(X) self._estimator.fit(data, y) self._copy_attrs() return self def fit_predict( # noqa: D102 self, X: MetricElementType, y: None = None, ) -> np.ndarray: self._init_estimator() metric = _parse_metric(self.metric) if metric is not PRECOMPUTED: data = PairwiseMetric(metric)(X) predicted = self._estimator.fit_predict(data, y) self._copy_attrs() return predicted ``` #### File: dim_reduction/feature_extraction/_fda_feature_union.py ```python from __future__ import annotations from typing import Union from numpy import ndarray from pandas import DataFrame from sklearn.pipeline import FeatureUnion from ....representation import FData class FDAFeatureUnion(FeatureUnion): # type: ignore """Concatenates results of multiple functional transformer objects. This estimator applies a list of transformer objects in parallel to the input data, then concatenates the results (They can be either FDataGrid and FDataBasis objects or multivariate data itself).This is useful to combine several feature extraction mechanisms into a single transformer. Parameters of the transformers may be set using its name and the parameter name separated by a '__'. A transformer may be replaced entirely by setting the parameter with its name to another transformer, or removed by setting to 'drop'. Parameters: transformer_list: list of tuple List of tuple containing `(str, transformer)`. The first element of the tuple is name affected to the transformer while the second element is a scikit-learn transformer instance. The transformer instance can also be `"drop"` for it to be ignored. n_jobs: int Number of jobs to run in parallel. ``None`` means 1 unless in a :obj:`joblib.parallel_backend` context. ``-1`` means using all processors. The default value is None transformer_weights: dict Multiplicative weights for features per transformer. Keys are transformer names, values the weights. Raises ValueError if key not present in ``transformer_list``. verbose: bool If True, the time elapsed while fitting each transformer will be printed as it is completed. By default the value is False array_output: bool indicates if the transformed data is requested to be a NumPy array output. By default the value is False. Examples: Firstly we will import the Berkeley Growth Study data set >>> from skfda.datasets import fetch_growth >>> X,y = fetch_growth(return_X_y=True) Then we need to import the transformers we want to use. In our case we will use Generalized depth-versus-depth transformer. Evaluation Transformer returns the original curve, and as it is helpful, we will concatenate it to the already metioned transformer. >>> from skfda.preprocessing.dim_reduction.feature_extraction import ( ... FDAFeatureUnion, ... ) >>> from skfda.preprocessing.dim_reduction.feature_extraction import ( ... DDGTransformer, ... ) >>> from skfda.exploratory.depth import ModifiedBandDepth >>> from skfda.representation import EvaluationTransformer >>> import numpy as np Finally we apply fit and transform. >>> union = FDAFeatureUnion( ... [ ... ( ... 'ddgtransformer', ... DDGTransformer(depth_method=[ModifiedBandDepth()]), ... ), ... ("eval", EvaluationTransformer()), ... ], ... array_output=True, ... ) >>> np.around(union.fit_transform(X,y), decimals=2) array([[ 2.100e-01, 9.000e-02, 8.130e+01, ..., 1.938e+02, 1.943e+02, 1.951e+02], [ 4.600e-01, 3.800e-01, 7.620e+01, ..., 1.761e+02, 1.774e+02, 1.787e+02], [ 2.000e-01, 3.300e-01, 7.680e+01, ..., 1.709e+02, 1.712e+02, 1.715e+02], ..., [ 3.900e-01, 5.100e-01, 6.860e+01, ..., 1.660e+02, 1.663e+02, 1.668e+02], [ 2.600e-01, 2.700e-01, 7.990e+01, ..., 1.683e+02, 1.684e+02, 1.686e+02], [ 3.300e-01, 3.200e-01, 7.610e+01, ..., 1.686e+02, 1.689e+02, 1.692e+02]]) """ def __init__( self, transformer_list: list, # type: ignore *, n_jobs: int = 1, transformer_weights: dict = None, # type: ignore verbose: bool = False, array_output: bool = False, ) -> None: self.array_output = array_output super().__init__( transformer_list, n_jobs=n_jobs, transformer_weights=transformer_weights, verbose=verbose, ) def _hstack(self, Xs: ndarray) -> Union[DataFrame, ndarray]: if self.array_output: for i in Xs: if isinstance(i, FData): raise TypeError( "There are transformed instances of FDataGrid or " "FDataBasis that can't be concatenated on a NumPy " "array.", ) return super()._hstack(Xs) return DataFrame({'Transformed data': Xs}) ``` #### File: dim_reduction/variable_selection/_rkvs.py ```python from __future__ import annotations from typing import Tuple import numpy as np import numpy.linalg as linalg import sklearn.utils.validation from ...._utils import _classifier_get_classes from ....representation import FDataGrid def _rkhs_vs( X: np.ndarray, Y: np.ndarray, n_features_to_select: int = 1, ) -> Tuple[np.ndarray, np.ndarray]: """ RKHS-VS implementation. Parameters: X: Matrix of trajectories Y: Vector of class labels n_features_to_select: Number of selected features Returns: Selected features and vector of scores. """ X = np.atleast_2d(X) assert n_features_to_select >= 1 assert n_features_to_select <= X.shape[1] _, Y = _classifier_get_classes(Y) selected_features = np.zeros(n_features_to_select, dtype=int) score = np.zeros(n_features_to_select) indexes = np.arange(0, X.shape[1]) # Calculate means and covariance matrix class_1_trajectories = X[Y.ravel() == 1] class_0_trajectories = X[Y.ravel() == 0] means = ( np.mean(class_1_trajectories, axis=0) - np.mean(class_0_trajectories, axis=0) ) class_1_count = sum(Y) class_0_count = Y.shape[0] - class_1_count class_1_proportion = class_1_count / Y.shape[0] class_0_proportion = class_0_count / Y.shape[0] # The result should be casted to 2D because of bug #11502 in numpy variances = ( class_1_proportion * np.atleast_2d( np.cov(class_1_trajectories, rowvar=False, bias=True), ) + class_0_proportion * np.atleast_2d( np.cov(class_0_trajectories, rowvar=False, bias=True), ) ) # The first variable maximizes |mu(t)|/sigma(t) mu_sigma = np.abs(means) / np.sqrt(np.diag(variances)) selected_features[0] = np.argmax(mu_sigma) score[0] = mu_sigma[selected_features[0]] indexes = np.delete(indexes, selected_features[0]) for i in range(1, n_features_to_select): aux = np.zeros_like(indexes, dtype=np.float_) for j in range(0, indexes.shape[0]): new_selection = np.concatenate([ selected_features[:i], [indexes[j]], ]) new_means = np.atleast_2d(means[new_selection]) lstsq_solution = linalg.lstsq( variances[new_selection[:, np.newaxis], new_selection], new_means.T, rcond=None, )[0] aux[j] = new_means @ lstsq_solution aux2 = np.argmax(aux) selected_features[i] = indexes[aux2] score[i] = aux[aux2] indexes = np.delete(indexes, aux2) return selected_features, score class RKHSVariableSelection( sklearn.base.BaseEstimator, # type: ignore sklearn.base.TransformerMixin, # type: ignore ): r""" Reproducing kernel variable selection. This is a filter variable selection method for binary classification problems. With a fixed number :math:`d` of variables to select, it aims to find the variables :math:`X(t_1), \ldots, X(t_d)` for the values :math:`t_1, \ldots, t_d` that maximize the separation of the class means in the reduced space, measured using the Mahalanobis distance .. math:: \phi(t_1, \ldots, t_d) = m_{t_1, \ldots, t_d}^T K_{t_1, \ldots, t_d}^{-1} m_{t_1, \ldots, t_d} where :math:`m_{t_1, \ldots, t_d}` is the difference of the mean functions of both classes evaluated at points :math:`t_1, \ldots, t_d` and :math:`K_{t_1, \ldots, t_d}` is the common covariance function evaluated at the same points. This method is optimal, with a fixed value of :math:`d`, for variable selection in Gaussian binary classification problems with the same covariance in both classes (homoscedasticity), when all possible combinations of points are taken into account. That means that for all possible selections of :math:`t_1, \ldots, t_d`, the one in which :math:`\phi(t_1, \ldots, t_d)` is greater minimizes the optimal misclassification error of all the classification problems with the reduced dimensionality. For a longer discussion about the optimality and consistence of this method, we refer the reader to the original article [1]_. In practice the points are selected one at a time, using a greedy approach, so this optimality is not always guaranteed. Parameters: n_features_to_select: number of features to select. Examples: >>> from skfda.preprocessing.dim_reduction import variable_selection >>> from skfda.datasets import make_gaussian_process >>> import skfda >>> import numpy as np We create trajectories from two classes, one with zero mean and the other with a peak-like mean. Both have Brownian covariance. >>> n_samples = 10000 >>> n_features = 1000 >>> >>> def mean_1(t): ... return (np.abs(t - 0.25) ... - 2 * np.abs(t - 0.5) ... + np.abs(t - 0.75)) >>> >>> X_0 = make_gaussian_process(n_samples=n_samples // 2, ... n_features=n_features, ... random_state=0) >>> X_1 = make_gaussian_process(n_samples=n_samples // 2, ... n_features=n_features, ... mean=mean_1, ... random_state=1) >>> X = skfda.concatenate((X_0, X_1)) >>> >>> y = np.zeros(n_samples) >>> y [n_samples // 2:] = 1 Select the relevant points to distinguish the two classes >>> rkvs = variable_selection.RKHSVariableSelection( ... n_features_to_select=3) >>> _ = rkvs.fit(X, y) >>> point_mask = rkvs.get_support() >>> points = X.grid_points[0][point_mask] >>> np.allclose(points, [0.25, 0.5, 0.75], rtol=1e-2) True Apply the learned dimensionality reduction >>> X_dimred = rkvs.transform(X) >>> len(X.grid_points[0]) 1000 >>> X_dimred.shape (10000, 3) References: .. [1] <NAME>, <NAME>, and <NAME>, «On the Use of Reproducing Kernel Hilbert Spaces in Functional Classification», Journal of the American Statistical Association, vol. 113, no. 523, pp. 1210-1218, jul. 2018, doi: 10.1080/01621459.2017.1320287. """ def __init__(self, n_features_to_select: int = 1) -> None: self.n_features_to_select = n_features_to_select def fit( # noqa: D102 self, X: FDataGrid, y: np.ndarray, ) -> RKHSVariableSelection: n_unique_labels = len(np.unique(y)) if n_unique_labels != 2: raise ValueError( f"RK-VS can only be used when there are only " f"two different labels, but there are " f"{n_unique_labels}", ) if X.dim_domain != 1 or X.dim_codomain != 1: raise ValueError("Domain and codomain dimensions must be 1") X, y = sklearn.utils.validation.check_X_y(X.data_matrix[..., 0], y) self._features_shape_ = X.shape[1:] features, scores = _rkhs_vs( X=X, Y=y, n_features_to_select=self.n_features_to_select, ) self._features_ = features self._scores_ = scores return self def transform( # noqa: D102 self, X: FDataGrid, Y: None = None, ) -> np.ndarray: sklearn.utils.validation.check_is_fitted(self) X_matrix = sklearn.utils.validation.check_array(X.data_matrix[..., 0]) if X_matrix.shape[1:] != self._features_shape_: raise ValueError( "The trajectories have a different number of " "points than the ones fitted", ) return X_matrix[:, self._features_] def get_support(self, indices: bool = False) -> np.ndarray: """ Get a mask, or integer index, of the features selected. Parameters: indices: If True, the return value will be an array of integers, rather than a boolean mask. Returns: An index that selects the retained features from a `FDataGrid` object. If `indices` is False, this is a boolean array of shape [# input features], in which an element is True iff its corresponding feature is selected for retention. If `indices` is True, this is an integer array of shape [# output features] whose values are indices into the input feature vector. """ features = self._features_ if indices: return features mask = np.zeros(self._features_shape_[0], dtype=bool) mask[features] = True return mask ``` #### File: skfda/representation/extrapolation.py ```python from __future__ import annotations from typing import ( TYPE_CHECKING, Any, Iterable, NoReturn, Optional, Union, cast, overload, ) import numpy as np from typing_extensions import Literal from ._typing import ArrayLike from .evaluator import Evaluator if TYPE_CHECKING: from . import FData ExtrapolationLike = Union[ Evaluator, Literal["bounds", "exception", "nan", "none", "periodic", "zeros"], ] class PeriodicExtrapolation(Evaluator): """Extend the :term:`domain` range periodically. Examples: >>> from skfda.datasets import make_sinusoidal_process >>> from skfda.representation.extrapolation import ( ... PeriodicExtrapolation) >>> fd = make_sinusoidal_process(n_samples=2, random_state=0) We can set the default type of extrapolation >>> fd.extrapolation = PeriodicExtrapolation() >>> fd([-.5, 0, 1.5]).round(3) array([[[-0.724], [ 0.976], [-0.724]], [[-1.086], [ 0.759], [-1.086]]]) This extrapolator is equivalent to the string `"periodic"` >>> fd.extrapolation = 'periodic' >>> fd([-.5, 0, 1.5]).round(3) array([[[-0.724], [ 0.976], [-0.724]], [[-1.086], [ 0.759], [-1.086]]]) """ def _evaluate( # noqa: D102 self, fdata: FData, eval_points: Union[ArrayLike, Iterable[ArrayLike]], *, aligned: bool = True, ) -> np.ndarray: domain_range = np.asarray(fdata.domain_range) # Extends the domain periodically in each dimension eval_points -= domain_range[:, 0] eval_points %= domain_range[:, 1] - domain_range[:, 0] eval_points += domain_range[:, 0] return fdata(eval_points, aligned=aligned) # type: ignore class BoundaryExtrapolation(Evaluator): """Extend the :term:`domain` range using the boundary values. Examples: >>> from skfda.datasets import make_sinusoidal_process >>> from skfda.representation.extrapolation import ( ... BoundaryExtrapolation) >>> fd = make_sinusoidal_process(n_samples=2, random_state=0) We can set the default type of extrapolation >>> fd.extrapolation = BoundaryExtrapolation() >>> fd([-.5, 0, 1.5]).round(3) array([[[ 0.976], [ 0.976], [ 0.797]], [[ 0.759], [ 0.759], [ 1.125]]]) This extrapolator is equivalent to the string `"bounds"`. >>> fd.extrapolation = 'bounds' >>> fd([-.5, 0, 1.5]).round(3) array([[[ 0.976], [ 0.976], [ 0.797]], [[ 0.759], [ 0.759], [ 1.125]]]) """ def _evaluate( # noqa: D102 self, fdata: FData, eval_points: Union[ArrayLike, Iterable[ArrayLike]], *, aligned: bool = True, ) -> np.ndarray: domain_range = fdata.domain_range if aligned: eval_points = np.asarray(eval_points) for i in range(fdata.dim_domain): a, b = domain_range[i] eval_points[eval_points[..., i] < a, i] = a eval_points[eval_points[..., i] > b, i] = b else: eval_points = cast(Iterable[ArrayLike], eval_points) for points_per_sample in eval_points: points_per_sample = np.asarray(points_per_sample) for i in range(fdata.dim_domain): a, b = domain_range[i] points_per_sample[points_per_sample[..., i] < a, i] = a points_per_sample[points_per_sample[..., i] > b, i] = b return fdata(eval_points, aligned=aligned) # type: ignore class ExceptionExtrapolation(Evaluator): """Raise an exception. Examples: >>> from skfda.datasets import make_sinusoidal_process >>> from skfda.representation.extrapolation import ( ... ExceptionExtrapolation) >>> fd = make_sinusoidal_process(n_samples=2, random_state=0) We can set the default type of extrapolation >>> fd.extrapolation = ExceptionExtrapolation() >>> try: ... fd([-.5, 0, 1.5]).round(3) ... except ValueError as e: ... print(e) Attempt to evaluate points outside the domain range. This extrapolator is equivalent to the string `"exception"`. >>> fd.extrapolation = 'exception' >>> try: ... fd([-.5, 0, 1.5]).round(3) ... except ValueError as e: ... print(e) Attempt to evaluate points outside the domain range. """ def _evaluate( # noqa: D102 self, fdata: FData, eval_points: Union[ArrayLike, Iterable[ArrayLike]], *, aligned: bool = True, ) -> NoReturn: raise ValueError( "Attempt to evaluate points outside the domain range.", ) class FillExtrapolation(Evaluator): """ Values outside the :term:`domain` range will be filled with a fixed value. Examples: >>> from skfda.datasets import make_sinusoidal_process >>> from skfda.representation.extrapolation import FillExtrapolation >>> fd = make_sinusoidal_process(n_samples=2, random_state=0) We can set the default type of extrapolation >>> fd.extrapolation = FillExtrapolation(0) >>> fd([-.5, 0, 1.5]).round(3) array([[[ 0. ], [ 0.976], [ 0. ]], [[ 0. ], [ 0.759], [ 0. ]]]) The previous extrapolator is equivalent to the string `"zeros"`. In the same way FillExtrapolation(np.nan) is equivalent to `"nan"`. >>> fd.extrapolation = "nan" >>> fd([-.5, 0, 1.5]).round(3) array([[[ nan], [ 0.976], [ nan]], [[ nan], [ 0.759], [ nan]]]) """ def __init__(self, fill_value: float) -> None: self.fill_value = fill_value def _fill(self, fdata: FData, eval_points: ArrayLike) -> np.ndarray: eval_points = np.asarray(eval_points) shape = ( fdata.n_samples, eval_points.shape[-2], fdata.dim_codomain, ) return np.full(shape, self.fill_value) def _evaluate( # noqa: D102 self, fdata: FData, eval_points: Union[ArrayLike, Iterable[ArrayLike]], *, aligned: bool = True, ) -> np.ndarray: from .._utils import _to_array_maybe_ragged if aligned: eval_points = cast(ArrayLike, eval_points) return self._fill(fdata, eval_points) eval_points = cast(Iterable[ArrayLike], eval_points) res_list = [self._fill(fdata, p) for p in eval_points] return _to_array_maybe_ragged(res_list) def __repr__(self) -> str: return ( f"{type(self).__name__}(" f"fill_value={self.fill_value})" ) def __eq__(self, other: Any) -> bool: return ( super().__eq__(other) and ( self.fill_value == other.fill_value # NaNs compare unequal. Should we distinguish between # different NaN types and payloads? or (np.isnan(self.fill_value) and np.isnan(other.fill_value)) ) ) @overload def _parse_extrapolation( extrapolation: None, ) -> None: pass @overload def _parse_extrapolation( extrapolation: ExtrapolationLike, ) -> Evaluator: pass def _parse_extrapolation( extrapolation: Optional[ExtrapolationLike], ) -> Optional[Evaluator]: """Parse the argument `extrapolation` of `FData`. If extrapolation is None returns the default extrapolator. Args: extrapolation (:class:´Extrapolator´, str or Callable): Argument extrapolation to be parsed. Returns: (:class:´Extrapolator´ or Callable): Extrapolation method. """ if extrapolation is None: return None elif isinstance(extrapolation, str): return extrapolation_methods[extrapolation.lower()] return extrapolation #: Dictionary with the extrapolation methods. extrapolation_methods = { "bounds": BoundaryExtrapolation(), "exception": ExceptionExtrapolation(), "nan": FillExtrapolation(np.nan), "none": None, "periodic": PeriodicExtrapolation(), "zeros": FillExtrapolation(0), } ``` #### File: skfda/representation/interpolation.py ```python from __future__ import annotations import abc from typing import ( TYPE_CHECKING, Any, Callable, Iterable, Sequence, Tuple, Union, cast, ) import numpy as np from scipy.interpolate import ( PchipInterpolator, RectBivariateSpline, RegularGridInterpolator, UnivariateSpline, ) from .._utils import _to_array_maybe_ragged from ._typing import ArrayLike from .evaluator import Evaluator if TYPE_CHECKING: from . import FData SplineCallable = Callable[..., np.ndarray] class _SplineList(abc.ABC): """ABC for list of interpolations.""" def __init__( self, fdatagrid: FData, interpolation_order: Union[int, Sequence[int]] = 1, smoothness_parameter: float = 0, ): super().__init__() self.fdatagrid = fdatagrid self.interpolation_order = interpolation_order self.smoothness_parameter = smoothness_parameter self.splines: Sequence[Sequence[SplineCallable]] # @abc.abstractmethod # @property # def splines(self) -> Sequence[SplineCallable]: # pass @abc.abstractmethod def _evaluate_one( self, spline: SplineCallable, eval_points: np.ndarray, ) -> np.ndarray: """Evaluate one spline of the list.""" pass def _evaluate_codomain( self, spline_list: Sequence[SplineCallable], eval_points: np.ndarray, ) -> np.ndarray: """Evaluate a multidimensional sample.""" return np.array([ self._evaluate_one(spl, eval_points) for spl in spline_list ]).T def evaluate( self, fdata: FData, eval_points: Union[ArrayLike, Iterable[ArrayLike]], *, aligned: bool = True, ) -> np.ndarray: res: np.ndarray if aligned: eval_points = np.asarray(eval_points) # Points evaluated inside the domain res = np.apply_along_axis( self._evaluate_codomain, 1, self.splines, eval_points, ) res = res.reshape( fdata.n_samples, eval_points.shape[0], fdata.dim_codomain, ) else: eval_points = cast(Iterable[ArrayLike], eval_points) res = _to_array_maybe_ragged([ self._evaluate_codomain(s, np.asarray(e)) for s, e in zip(self.splines, eval_points) ]) return res class _SplineList1D(_SplineList): """List of interpolations for curves. List of interpolations for objects with domain dimension = 1. Calling internally during the creation of the evaluator. Uses internally the scipy interpolation UnivariateSpline or PchipInterpolator. Args: fdatagrid (FDatagrid): Fdatagrid to interpolate. interpolation_order (int, optional): Order of the interpolation, 1 for linear interpolation, 2 for cuadratic, 3 for cubic and so on. In case of curves and surfaces there is available interpolation up to degree 5. For higher dimensional objects only linear or nearest interpolation is available. Default lineal interpolation. smoothness_parameter (float, optional): Penalisation to perform smoothness interpolation. Option only available for curves and surfaces. If 0 the residuals of the interpolation will be 0. Defaults 0. monotone (boolean, optional): Performs monotone interpolation in curves using a PCHIP interpolator. Only valid for curves (domain dimension equal to 1) and interpolation order equal to 1 or 3. Defaults false. Returns: (np.ndarray): Array of size n_samples x dim_codomain with the corresponding interpolation of the sample i, and image dimension j in the entry (i,j) of the array. Raises: ValueError: If the value of the interpolation k is not valid. """ def __init__( self, fdatagrid: FData, interpolation_order: Union[int, Sequence[int]] = 1, smoothness_parameter: float = 0, monotone: bool = False, ): super().__init__( fdatagrid=fdatagrid, interpolation_order=interpolation_order, smoothness_parameter=smoothness_parameter, ) self.monotone = monotone if ( isinstance(self.interpolation_order, Sequence) or not 1 <= self.interpolation_order <= 5 ): raise ValueError( f"Invalid degree of interpolation " f"({self.interpolation_order}). Must be " f"an integer greater than 0 and lower or " f"equal than 5.", ) if self.monotone and self.smoothness_parameter != 0: raise ValueError( "Smoothing interpolation is not supported with " "monotone interpolation", ) if self.monotone and self.interpolation_order in {2, 4}: raise ValueError( f"monotone interpolation of degree " f"{self.interpolation_order}" f"not supported.", ) # Monotone interpolation of degree 1 is performed with linear spline monotone = self.monotone if self.monotone and self.interpolation_order == 1: monotone = False grid_points = fdatagrid.grid_points[0] if monotone: def constructor( # noqa: WPS430 data: np.ndarray, ) -> SplineCallable: """Construct an unidimensional cubic monotone interpolation.""" return PchipInterpolator(grid_points, data) else: def constructor( # noqa: WPS430, WPS440 data: np.ndarray, ) -> SplineCallable: """Construct an unidimensional interpolation.""" return UnivariateSpline( grid_points, data, s=self.smoothness_parameter, k=self.interpolation_order, ) self.splines = np.apply_along_axis( constructor, 1, fdatagrid.data_matrix, ) def _evaluate_one( self, spline: SplineCallable, eval_points: np.ndarray, ) -> np.ndarray: try: return spline(eval_points)[:, 0] except ValueError: return np.zeros_like(eval_points) class _SplineList2D(_SplineList): """List of interpolations for surfaces. List of interpolations for objects with domain dimension = 2. Calling internally during the creationg of the evaluator. Uses internally the scipy interpolation RectBivariateSpline. Args: fdatagrid (FDatagrid): Fdatagrid to interpolate. interpolation_order (int, optional): Order of the interpolation, 1 for linear interpolation, 2 for cuadratic, 3 for cubic and so on. In case of curves and surfaces there is available interpolation up to degree 5. For higher dimensional objects only linear or nearest interpolation is available. Default lineal interpolation. smoothness_parameter (float, optional): Penalisation to perform smoothness interpolation. Option only available for curves and surfaces. If 0 the residuals of the interpolation will be 0. Defaults 0. monotone (boolean, optional): Performs monotone interpolation in curves using a PCHIP interpolator. Only valid for curves (domain dimension equal to 1) and interpolation order equal to 1 or 3. Defaults false. Returns: (np.ndarray): Array of size n_samples x dim_codomain with the corresponding interpolation of the sample i, and image dimension j in the entry (i,j) of the array. Raises: ValueError: If the value of the interpolation k is not valid. """ def __init__( self, fdatagrid: FData, interpolation_order: Union[int, Sequence[int]] = 1, smoothness_parameter: float = 0, ): super().__init__( fdatagrid=fdatagrid, interpolation_order=interpolation_order, smoothness_parameter=smoothness_parameter, ) if isinstance(self.interpolation_order, int): kx = self.interpolation_order ky = kx elif len(self.interpolation_order) == 2: kx = self.interpolation_order[0] ky = self.interpolation_order[1] else: raise ValueError("k should be numeric or a tuple of length 2.") if kx > 5 or kx <= 0 or ky > 5 or ky <= 0: raise ValueError( f"Invalid degree of interpolation ({kx},{ky}). " f"Must be an integer greater than 0 and lower or " f"equal than 5.", ) # Matrix of splines splines = np.empty( (fdatagrid.n_samples, fdatagrid.dim_codomain), dtype=object, ) for i in range(fdatagrid.n_samples): for j in range(fdatagrid.dim_codomain): splines[i, j] = RectBivariateSpline( fdatagrid.grid_points[0], fdatagrid.grid_points[1], fdatagrid.data_matrix[i, :, :, j], kx=kx, ky=ky, s=self.smoothness_parameter, ) self.splines = splines def _evaluate_one( self, spline: SplineCallable, eval_points: np.ndarray, ) -> np.ndarray: return spline( eval_points[:, 0], eval_points[:, 1], grid=False, ) class _SplineListND(_SplineList): """ List of interpolations. List of interpolations for objects with domain dimension > 2. Calling internally during the creationg of the evaluator. Only linear and nearest interpolations are available for objects with domain dimension >= 3. Uses internally the scipy interpolation RegularGridInterpolator. Args: grid_points (np.ndarray): Sample points of the fdatagrid. data_matrix (np.ndarray): Data matrix of the fdatagrid. k (integer): Order of the spline interpolations. Returns: (np.ndarray): Array of size n_samples x dim_codomain with the corresponding interpolation of the sample i, and image dimension j in the entry (i,j) of the array. Raises: ValueError: If the value of the interpolation k is not valid. """ def __init__( self, fdatagrid: FData, interpolation_order: Union[int, Sequence[int]] = 1, smoothness_parameter: float = 0, ) -> None: super().__init__( fdatagrid=fdatagrid, interpolation_order=interpolation_order, smoothness_parameter=smoothness_parameter, ) if self.smoothness_parameter != 0: raise ValueError( "Smoothing interpolation is only supported with " "domain dimension up to 2.", ) # Parses method of interpolation if self.interpolation_order == 0: method = 'nearest' elif self.interpolation_order == 1: method = 'linear' else: raise ValueError( "interpolation order should be 0 (nearest) or 1 (linear).", ) splines = np.empty( (fdatagrid.n_samples, fdatagrid.dim_codomain), dtype=object, ) for i in range(fdatagrid.n_samples): for j in range(fdatagrid.dim_codomain): splines[i, j] = RegularGridInterpolator( fdatagrid.grid_points, fdatagrid.data_matrix[i, ..., j], method=method, bounds_error=False, ) self.splines = splines def _evaluate_one( self, spline: SplineCallable, eval_points: np.ndarray, ) -> np.ndarray: return spline(eval_points) class SplineInterpolation(Evaluator): """ Spline interpolation. Spline interpolation of discretized functional objects. Implements different interpolation methods based in splines, using the sample points of the grid as nodes to interpolate. See the interpolation example to a detailled explanation. Attributes: interpolation_order (int, optional): Order of the interpolation, 1 for linear interpolation, 2 for cuadratic, 3 for cubic and so on. In case of curves and surfaces there is available interpolation up to degree 5. For higher dimensional objects only linear or nearest interpolation is available. Default lineal interpolation. smoothness_parameter (float, optional): Penalisation to perform smoothness interpolation. Option only available for curves and surfaces. If 0 the residuals of the interpolation will be 0. Defaults 0. monotone (boolean, optional): Performs monotone interpolation in curves using a PCHIP interpolator. Only valid for curves (domain dimension equal to 1) and interpolation order equal to 1 or 3. Defaults false. """ def __init__( self, interpolation_order: Union[int, Sequence[int]] = 1, *, smoothness_parameter: float = 0, monotone: bool = False, ) -> None: self._interpolation_order = interpolation_order self._smoothness_parameter = smoothness_parameter self._monotone = monotone @property def interpolation_order(self) -> Union[int, Tuple[int, ...]]: """Interpolation order.""" return ( self._interpolation_order if isinstance(self._interpolation_order, int) else tuple(self._interpolation_order) ) @property def smoothness_parameter(self) -> float: """Smoothness parameter.""" return self._smoothness_parameter @property def monotone(self) -> bool: """Flag to perform monotone interpolation.""" return self._monotone def _build_interpolator( self, fdatagrid: FData, ) -> _SplineList: if fdatagrid.dim_domain == 1: return _SplineList1D( fdatagrid=fdatagrid, interpolation_order=self.interpolation_order, smoothness_parameter=self.smoothness_parameter, monotone=self.monotone, ) elif self.monotone: raise ValueError( "Monotone interpolation is only supported with " "domain dimension equal to 1.", ) elif fdatagrid.dim_domain == 2: return _SplineList2D( fdatagrid=fdatagrid, interpolation_order=self.interpolation_order, smoothness_parameter=self.smoothness_parameter, ) return _SplineListND( fdatagrid=fdatagrid, interpolation_order=self.interpolation_order, smoothness_parameter=self.smoothness_parameter, ) def _evaluate( # noqa: D102 self, fdata: FData, eval_points: Union[ArrayLike, Iterable[ArrayLike]], *, aligned: bool = True, ) -> np.ndarray: spline_list = self._build_interpolator(fdata) return spline_list.evaluate(fdata, eval_points, aligned=aligned) def __repr__(self) -> str: return ( f"{type(self).__name__}(" f"interpolation_order={self.interpolation_order}, " f"smoothness_parameter={self.smoothness_parameter}, " f"monotone={self.monotone})" ) def __eq__(self, other: Any) -> bool: return ( super().__eq__(other) and self.interpolation_order == other.interpolation_order and self.smoothness_parameter == other.smoothness_parameter and self.monotone == other.monotone ) ``` #### File: skfda/_utils/_warping.py ```python from __future__ import annotations from typing import TYPE_CHECKING, Optional import numpy as np from scipy.interpolate import PchipInterpolator from ..representation._typing import ArrayLike, DomainRangeLike, NDArrayFloat from ._utils import _to_domain_range, check_is_univariate if TYPE_CHECKING: from ..representation import FDataGrid def invert_warping( warping: FDataGrid, *, output_points: Optional[ArrayLike] = None, ) -> FDataGrid: r""" Compute the inverse of a diffeomorphism. Let :math:`\gamma : [a,b] \rightarrow [a,b]` be a function strictly increasing, calculates the corresponding inverse :math:`\gamma^{-1} : [a,b] \rightarrow [a,b]` such that :math:`\gamma^{-1} \circ \gamma = \gamma \circ \gamma^{-1} = \gamma_{id}`. Uses a PCHIP interpolator to compute approximately the inverse. Args: warping: Functions to be inverted. output_points: Set of points where the functions are interpolated to obtain the inverse, by default uses the sample points of the fdatagrid. Returns: Inverse of the original functions. Raises: ValueError: If the functions are not strictly increasing or are multidimensional. Examples: >>> import numpy as np >>> from skfda import FDataGrid We will construct the warping :math:`\gamma : [0,1] \rightarrow [0,1]` wich maps t to t^3. >>> t = np.linspace(0, 1) >>> gamma = FDataGrid(t**3, t) >>> gamma FDataGrid(...) We will compute the inverse. >>> inverse = invert_warping(gamma) >>> inverse FDataGrid(...) The result of the composition should be approximately the identity function . >>> identity = gamma.compose(inverse) >>> identity([0, 0.25, 0.5, 0.75, 1]).round(3) array([[[ 0. ], [ 0.25], [ 0.5 ], [ 0.75], [ 1. ]]]) """ check_is_univariate(warping) output_points = ( warping.grid_points[0] if output_points is None else np.asarray(output_points) ) y = warping(output_points)[..., 0] data_matrix = np.empty((warping.n_samples, len(output_points))) for i in range(warping.n_samples): data_matrix[i] = PchipInterpolator(y[i], output_points)(output_points) return warping.copy(data_matrix=data_matrix, grid_points=output_points) def normalize_scale( t: NDArrayFloat, a: float = 0, b: float = 1, ) -> NDArrayFloat: """ Perfoms an afine translation to normalize an interval. Args: t: Array of dim 1 or 2 with at least 2 values. a: Starting point of the new interval. Defaults 0. b: Stopping point of the new interval. Defaults 1. Returns: Array with the transformed interval. """ t = t.T # Broadcast to normalize multiple arrays t1 = (t - t[0]).astype(float) # Translation to [0, t[-1] - t[0]] t1 *= (b - a) / (t[-1] - t[0]) # Scale to [0, b-a] t1 += a # Translation to [a, b] t1[0] = a # Fix possible round errors t1[-1] = b return t1.T def normalize_warping( warping: FDataGrid, domain_range: Optional[DomainRangeLike] = None, ) -> FDataGrid: r""" Rescale a warping to normalize their :term:`domain`. Given a set of warpings :math:`\gamma_i:[a,b]\rightarrow [a,b]` it is used an affine traslation to change the domain of the transformation to other domain, :math:`\tilde \gamma_i:[\tilde a,\tilde b] \rightarrow [\tilde a, \tilde b]`. Args: warping: Set of warpings to rescale. domain_range: New domain range of the warping. By default it is used the same domain range. Returns: Normalized warpings. """ domain_range_tuple = ( warping.domain_range[0] if domain_range is None else _to_domain_range(domain_range)[0] ) data_matrix = normalize_scale( warping.data_matrix[..., 0], *domain_range_tuple, ) grid_points = normalize_scale(warping.grid_points[0], *domain_range_tuple) return warping.copy( data_matrix=data_matrix, grid_points=grid_points, domain_range=domain_range, ) ``` #### File: scikit-fda/tests/test_depth.py ```python import skfda from skfda.exploratory.depth import IntegratedDepth, ModifiedBandDepth import unittest import numpy as np class TestsDepthSameCurves(unittest.TestCase): def setUp(self): data_matrix = [[1, 2, 3, 4], [1, 2, 3, 4], [1, 2, 3, 4], [1, 2, 3, 4], [1, 2, 3, 4]] self.fd = skfda.FDataGrid(data_matrix) def test_integrated_equal(self): depth = IntegratedDepth() np.testing.assert_almost_equal( depth(self.fd), [0.5, 0.5, 0.5, 0.5, 0.5]) def test_modified_band_depth_equal(self): depth = ModifiedBandDepth() np.testing.assert_almost_equal( depth(self.fd), [1, 1, 1, 1, 1]) ``` #### File: scikit-fda/tests/test_recursive_maxima_hunting.py ```python import skfda from skfda.datasets import make_gaussian_process from skfda.preprocessing.dim_reduction import variable_selection as vs import unittest import numpy as np class TestRMH(unittest.TestCase): def test_rmh(self): n_samples = 10000 n_features = 100 def mean_1(t): return (np.abs(t - 0.25) - 2 * np.abs(t - 0.5) + np.abs(t - 0.75)) X_0 = make_gaussian_process(n_samples=n_samples // 2, n_features=n_features, random_state=0) X_1 = make_gaussian_process(n_samples=n_samples // 2, n_features=n_features, mean=mean_1, random_state=1) X = skfda.concatenate((X_0, X_1)) y = np.zeros(n_samples) y[n_samples // 2:] = 1 correction = vs.recursive_maxima_hunting.GaussianSampleCorrection() stopping_condition = vs.recursive_maxima_hunting.ScoreThresholdStop( threshold=0.05) rmh = vs.RecursiveMaximaHunting( correction=correction, stopping_condition=stopping_condition) _ = rmh.fit(X, y) point_mask = rmh.get_support() points = X.grid_points[0][point_mask] np.testing.assert_allclose(points, [0.25, 0.5, 0.75], rtol=1e-1) if __name__ == '__main__': unittest.main() ``` #### File: scikit-fda/tests/test_ufunc_numpy.py ```python import unittest from typing import Any, Callable, TypeVar import numpy as np import pytest import skfda from skfda import FDataGrid @pytest.fixture(params=[ np.sqrt, np.absolute, np.round, np.exp, np.log, np.log10, np.log2, ]) def monary(request: Any) -> Any: """ Fixture providing the monary function to validate. Not all of them are ufuncs. """ return request.param T = TypeVar("T", np.ndarray, FDataGrid) def test_monary_ufuncs(monary: Callable[[T], T]) -> None: """Test that unary ufuncs can be applied to FDataGrid.""" data_matrix = np.arange(15).reshape(3, 5) + 1 fd = FDataGrid(data_matrix) fd_monary = monary(fd) fd_monary_build = FDataGrid(monary(data_matrix)) assert fd_monary.equals(fd_monary_build) def test_binary_ufunc() -> None: """Test that binary ufuncs can be applied to FDataGrid.""" data_matrix = np.arange(15).reshape(3, 5) data_matrix2 = 2 * np.arange(15).reshape(3, 5) fd = FDataGrid(data_matrix) fd2 = FDataGrid(data_matrix2) fd_mul = np.multiply(fd, fd2) fd_mul_build = FDataGrid(data_matrix * data_matrix2) assert fd_mul.equals(fd_mul_build) def test_out_ufunc(monary: Callable[..., Any]) -> None: """Test that the out parameter of ufuncs work for FDataGrid.""" data_matrix = np.arange(15).reshape(3, 5) + 1 data_matrix_copy = np.copy(data_matrix) fd = FDataGrid(data_matrix) monary(fd, out=fd) fd_monary_build = FDataGrid(monary(data_matrix_copy)) assert fd.equals(fd_monary_build) class TestOperators(unittest.TestCase): """Tests for operators.""" def test_commutativity(self) -> None: """Test that operations with numpy arrays commute.""" X = FDataGrid([[1, 2, 3], [4, 5, 6]]) arr = np.array([1, 2]) self.assertTrue((arr + X).equals((X + arr))) def test_commutativity_basis(self) -> None: """Test that operations with numpy arrays for basis commute.""" X = FDataGrid([[1, 2, 3], [4, 5, 6]]) arr = np.array([1, 2]) basis = skfda.representation.basis.Fourier(n_basis=5) X_basis = X.to_basis(basis) self.assertTrue((arr * X_basis).equals((X_basis * arr))) ```
{ "source": "jidv/outils_graphiques", "score": 3 }
#### File: Puzzles/affichages/drapeau_polonais.py ```python from random import choice from outils.interface_grille import Vue class Dessin: def __init__(self): self.n = 17 self.plateau = [choice(('R', 'B')) for i in range(self.n)] self.vue = Vue(1, self.n) self.redessiner_plateau() def echanger(self, i, j): self.plateau[i], self.plateau[j] = self.plateau[j], self.plateau[i] self.redessiner_plateau() def redessiner_plateau(self): dico = {'R':'./pions/pion_rouge.png', 'B': './pions/pion_blanc.png'} lig = 0 for col in range(self.n): self.vue.modifier_grille('fond', lig, col, c = (220, 220, 220), visible = True) self.vue.modifier_grille('image', lig, col, cf = dico[self.plateau[col]], visible = True) ``` #### File: Puzzles/affichages/les_trois_seaux.py ```python from outils.vizu_graphe import VizuGraphe from outils.xile import File, Pile class Dessin: def __init__(self): self.graphe = dict() self.depart = (8, 0, 0) self.gagnant = (4, 0, 0) self.volumes = {0:8, 1:5, 2:3} #self.couleurs = {elt : (0.67, 0.4, 1) for elt in self.bon_chemin} self.peupler() filiations = self.parcours_2_ter() bon_chemin = self.donner_chemin(filiations) bon_chemin[self.tts(self.depart)] = (0.85, 1, 1) bon_chemin[self.tts(self.gagnant)] = (0.85, 1, 1) self.v = VizuGraphe('liste', self.graphe, oriente = True, moteur = 'dot', couleurs = bon_chemin) def peupler(self): file = File() file.ajouter(self.depart) while not file.est_vide(): t = file.extraire() descendants = self.donner_descendants(t) for desc in descendants: if self.gerer_graphe(t, desc): #<------------------- #on met à jour le graphe et file.ajouter(desc) #si nécessaire on ajoute #le descendant à la file #if desc == self.gagnant: #<-------------------- # pourquoi pas ? #return def tts(self, t): return str(t[0]) + str(t[1]) + str(t[2]) def gerer_graphe(self, t, desc): ''' met à jour le graphe et renvoie True si le descendant de t a besoin d'être exploré (mis dans la file) et False sinon (pas mis dans la pile) ''' #effet de bord t_as_str = self.tts(t) desc_as_str = self.tts(desc) if t_as_str in self.graphe.keys(): if desc_as_str not in self.graphe[t_as_str]: self.graphe[t_as_str].append(desc_as_str) else: self.graphe[t_as_str] = [desc_as_str] if desc_as_str not in self.graphe.keys(): self.graphe[desc_as_str] = [] return True else: return False def donner_descendants(self, t): #t = 3-uplet des volumes descendants = set() descendants.add((0, t[1], t[2])) descendants.add((t[0], 0, t[2])) descendants.add((t[0], t[1], 0)) for i in range(3): for j in range(3): if j != i: l = list(t) volume_de_j_apres_transvase = min(self.volumes[j], t[j] + t[i]) volume_de_i_apres_transvase = t[i] - (volume_de_j_apres_transvase - t[j]) l[i] = volume_de_i_apres_transvase l[j] = volume_de_j_apres_transvase descendants.add(tuple(l)) return descendants def parcours_2_ter(self): ''' On renvoie un dictionnaire {sommet : predecesseur for sommet in graphe} ''' graphe = self.graphe depart = self.tts(self.depart) parents = dict() def visiter(successeur, sommet): #<sss parents[successeur] = sommet a_explorer = File() deja_vu = dict() deja_vu[depart] = True parents[depart] = None #<sss a_explorer.ajouter(depart) while not a_explorer.est_vide(): sommet = a_explorer.extraire() for successeur in graphe[sommet]: if successeur not in deja_vu.keys(): deja_vu[successeur] = True visiter(successeur, sommet) #<sss a_explorer.ajouter(successeur) return parents def donner_chemin(self, parents): depart = self.tts(self.depart) sommet = self.tts(self.gagnant) if sommet not in parents.keys(): return dict() chemin_inverse = Pile() chemin_inverse.ajouter(sommet) longueur = 0 while parents[sommet] != None: sommet = parents[sommet] chemin_inverse.ajouter(sommet) longueur = longueur + 1 longueur_chemin = longueur chemin = dict() while longueur>=0: sommet = chemin_inverse.extraire() chemin[sommet] = (1, 1, 1) longueur = longueur - 1 return chemin ``` #### File: Puzzles/affichages/missionnaires_et_cannibales.py ```python from outils.vizu_graphe import VizuGraphe from outils.xile import File, Pile from copy import deepcopy class Dessin: def __init__(self): self.graphe = dict() self.nb_m = 3 self.nb_c = 3 self.depart = Situation(self.nb_m,self.nb_c,'g',0,0) self.gagnant = Situation(0,0,'d',self.nb_m,self.nb_c) self.peupler() #filiations = self.parcours_2_ter() #bon_chemin = self.donner_chemin(filiations) #bon_chemin[self.tts(self.depart)] = (0.85, 1, 1) #bon_chemin[self.tts(self.gagnant)] = (0.85, 1, 1) bon_chemin = {self.tts(self.depart) : (0.85, 1, 1), self.tts(self.gagnant) : (0.85, 1, 1)} self.v = VizuGraphe('liste', self.graphe, oriente = True, moteur = 'neato', couleurs = bon_chemin) def peupler(self): file = File() file.ajouter(self.depart) while not file.est_vide(): t = file.extraire() descendants = self.donner_descendants(t) for desc in descendants: if self.gerer_graphe(t, desc): #<------------------- #on met à jour le graphe et file.ajouter(desc) #si nécessaire on ajoute #le descendant à la file #if desc == self.gagnant: #<-------------------- #pourquoi pas ? #return def tts(self, s): s = str(s.mg) + str(s.cg) + ('*| ' if s.bb == 'g' else ' |*') + str(s.md) + str(s.cd) return s def gerer_graphe(self, t, desc): ''' met à jour le graphe et renvoie True si le descendant de t a besoin d'être exploré (mis dans la file) et False sinon (pas mis dans la pile) ''' #effet de bord t_as_str = self.tts(t) desc_as_str = self.tts(desc) if t_as_str in self.graphe.keys(): if desc_as_str not in self.graphe[t_as_str]: self.graphe[t_as_str].append(desc_as_str) else: self.graphe[t_as_str] = [desc_as_str] if desc_as_str not in self.graphe.keys(): self.graphe[desc_as_str] = [] return True else: return False def donner_descendants(self, s): descendants = set() if s.bb == 'g': if s.mg>=1 and s.cg>=1 and not (s.md+1 < s.cd+1): # and not pour éviter le cas (0, 1) --> (1, 2) descendants.add(Situation(s.mg-1, s.cg-1, 'd', s.md+1, s.cd+1)) if s.mg>=1 and not (0 < s.mg-1 and s.mg-1 < s.cg): descendants.add(Situation(s.mg-1, s.cg, 'd', s.md+1, s.cd )) if s.cg>=1 and not (0 < s.md and s.md < s.cd+1): descendants.add(Situation(s.mg , s.cg-1, 'd', s.md, s.cd+1)) if s.mg>=2 and not (0 < s.mg-2 and s.mg - 2 < s.cg): descendants.add(Situation(s.mg-2, s.cg, 'd', s.md+2, s.cd )) if s.cg>=2 and not (0 < s.md and s.md < s.cd + 2): descendants.add(Situation(s.mg, s.cg-2, 'd', s.md, s.cd+2)) if s.bb == 'd': if s.md>=1 and s.cd>=1 and not (s.mg+1 < s.cg+1): descendants.add(Situation(s.mg+1, s.cg+1, 'g', s.md-1, s.cd-1)) if s.md>=1 and not (0 < s.md-1 and s.md-1 < s.cd): descendants.add(Situation(s.mg+1, s.cg, 'g', s.md-1, s.cd )) if s.cd>=1 and not (0 < s.mg and s.mg < s.cg+1): descendants.add(Situation(s.mg , s.cg+1, 'g', s.md, s.cd-1)) if s.md>=2 and not (0 < s.md-2 and s.md-2 < s.cd): descendants.add(Situation(s.mg+2, s.cg, 'g', s.md-2, s.cd )) if s.cd>=2 and not (0 < s.mg and s.mg < s.cg+2): descendants.add(Situation(s.mg, s.cg+2, 'g', s.md, s.cd-2)) return descendants def parcours_2_ter(self): ''' On renvoie un dictionnaire {sommet : predecesseur for sommet in graphe} ''' graphe = self.graphe depart = self.tts(self.depart) parents = dict() def visiter(successeur, sommet): #<sss parents[successeur] = sommet a_explorer = File() deja_vu = dict() deja_vu[depart] = True parents[depart] = None #<sss a_explorer.ajouter(depart) while not a_explorer.est_vide(): sommet = a_explorer.extraire() for successeur in graphe[sommet]: if successeur not in deja_vu.keys(): deja_vu[successeur] = True visiter(successeur, sommet) #<sss a_explorer.ajouter(successeur) return parents def donner_chemin(self, parents): depart = self.tts(self.depart) sommet = self.tts(self.gagnant) if sommet not in parents.keys(): return dict() chemin_inverse = Pile() chemin_inverse.ajouter(sommet) longueur = 0 while parents[sommet] != None: sommet = parents[sommet] chemin_inverse.ajouter(sommet) longueur = longueur + 1 longueur_chemin = longueur chemin = dict() while longueur>=0: sommet = chemin_inverse.extraire() chemin[sommet] = (1, 1, 1) longueur = longueur - 1 return chemin class Situation: def __init__(self, mg, cg, berge_bateau, md, cd): self.mg = mg self.cg = cg self.md = md self.cd = cd self.bb = berge_bateau def est_egal(self, s): return s.mg == self.mg and s.cg == self.cg and s.md == self.md and s.cd == self.cd and s.bb == self.bb ```
{ "source": "jie123108/TestGit", "score": 3 }
#### File: rest_test/rest_http_test/httptest.py ```python import unittest import httpclient as http import json import re from dotmap import DotMap as dotdict import jsonschema as jschema from funcs import * from . import * import logging log = logging.getLogger() ht_save_data = {} def get_save_data(section_name): return ht_save_data.get(section_name) class TestDataError(Exception): def __init__(self, message): super(TestDataError, self).__init__(message) def assertTrue(assertval, errmsg): if not assertval: raise TestDataError(errmsg) code_re = re.compile(r"`[^`]*`") ## 执行,并替换字符串中的`code`部分的内容。 def dynamic_execute(text, env): if not text: return text def execute_and_replace(matchobj): text = matchobj.group(0) text = text[1:len(text)-1] # log.error("`%s` globals: %s", text, json.dumps(globals().keys())) value = eval(text, globals(), env) assertTrue(value != None, " code `" + text + "` not return") return str(value) or '' newtext = code_re.sub(execute_and_replace, text) return newtext class HttpTest(unittest.TestCase): pass def raw_args_eval(raw_args, current_section): raw_text = "\n".join(raw_args) if raw_text and current_section.funcs: for i, func in enumerate(current_section.funcs): raw_text = func(raw_text) return raw_text def parse_args(str_args): if not str_args: return None arr = str_args.split("&") args = {} for i, arg in enumerate(arr): pair = arg.split("=") key = None value = None if len(pair) == 1: key = pair[0].strip() value = "" elif len(pair) == 2: key = pair[0].strip() value = pair[1].strip() else: log.error("---- invalid arg [%s]", arg) if key: args[key] = value return args def parse_headers(raw_headers): headers = http.NewHeaders() header_lines = raw_headers.split('\n') for i, line in enumerate(header_lines): if line.strip(): arr = line.split(':') assertTrue(len(arr) == 2, "invalid header:[" + line + "]") headers[arr[0].strip()]=arr[1].strip() return headers ## 去掉空行. def lines_trim(lines): if lines: return [line for line in lines if (line and line.strip())] return lines methods = {"GET": True, "POST": True} def request_parse(raw_args, current_section, env): if current_section.funcs: request = raw_args_eval(raw_args, current_section) raw_args = request.split('\n') req_line = raw_args[0] arr = req_line.split(' ') assertTrue(len(arr) == 2, "invalid request line: " + req_line + " arr len(" + str(len(arr)) + ")") method = arr[0] assertTrue(methods.get(method) != None, "unexpected http method: " + method) args = dotdict({}) args.method = method args.uri = arr[1].strip() if method == 'POST': raw_args = raw_args[1:] args.body = "\n".join(raw_args) else: args.body = "" return args def timeout_parse(raw_args, current_section, env): timeout = raw_args_eval(raw_args, current_section) return float(timeout) def more_headers_parse(raw_args, current_section, env): headers = '' raw_args = lines_trim(raw_args) if current_section.funcs: args = ''.join(raw_args) if args: for i, func in enumerate(current_section.funcs): args = func(args) func_name = current_section.func_names[i] assertTrue(type(args) == 'string', "more_headers function [" + func_name + "] return a no string value!") headers = args else: headers = "\n".join(raw_args) return headers def error_code_parse(raw_args, current_section, env): raw_args = lines_trim(raw_args) raw_args = [raw_args_eval(raw_args, current_section)] assertTrue(len(raw_args) ==1, "invalid error_code lines: " + str(len(raw_args))) error_code = int(raw_args[0]) assertTrue(error_code != None, "Invalid error_code:" + raw_args[0]) return error_code def response_body_parse(raw_args, current_section, env): expected_body = raw_args_eval(raw_args, current_section) return expected_body def response_body_filter_parse(raw_args, current_section, env): raw_args = lines_trim(raw_args) if raw_args: funcs = get_func_by_name(raw_args, env) return funcs else: return [] def on_fail_parse(raw_args, current_section, env): raw_args = lines_trim(raw_args) if raw_args: funcs = get_func_by_name(raw_args, env) return funcs else: return [] ## http://json-schema.org/latest/json-schema-validation.html def response_body_schema_parse(raw_args, current_section, env): schema_text = raw_args_eval(raw_args, current_section) schema = json.loads(schema_text) jschema.Draft4Validator.check_schema(schema) return schema def response_body_save_parse(raw_args, current_section, env): return True ## TODO: timeout指令支持。 directives = { "request" : {"parse": request_parse}, "timeout" : {"parse": timeout_parse}, "more_headers" : {"parse": more_headers_parse}, "error_code" : {"parse": error_code_parse}, "response_body" : {"parse": response_body_parse}, "response_body_schema": {"parse": response_body_schema_parse}, "response_body_filter" : {"parse": response_body_filter_parse}, "response_body_save" : {"parse": response_body_save_parse}, "on_fail" : {"parse": on_fail_parse}, } def args_proc(current_section, env): if current_section.raw_args: secinfo = current_section.secinfo if secinfo.parse: parse = secinfo.parse current_section.args = parse(current_section.raw_args, current_section, env) else: current_section.args = current_section.raw_args current_section.pop("raw_args") current_section.pop("secinfo") def get_func_by_name(arr, env): funcs = [] for i, func in enumerate(arr): func = func.strip() f = env.get(func) if f == None: OBJ = globals() f = OBJ.get(func) assertTrue(f != None, "global function [" + func + "] not found!") funcs.append(f) return funcs def block_parse(block, block_pattern): lines = None if type(block) == list: lines = block else: lines = block.split("\n") sections = [] current_section = None for i, line in enumerate(lines): if line.startswith(block_pattern): section = line[len(block_pattern):].strip() if current_section: sections.append(current_section) current_section = dotdict({'section_name': section}) else: if current_section: if current_section.get('content') == None: current_section['content'] = [] current_section['content'].append(line) if i == len(lines)-1 and current_section: sections.append(current_section) return sections def section_parse(block, env): raw_sections = block_parse(block, "--- ") sections = dotdict({}) for i, section_info in enumerate(raw_sections): section = section_info["section_name"] content = section_info["content"] arr = re.split(r"\s*", section) section_name = arr[0].strip() secinfo = directives.get(section_name) assertTrue(secinfo != None, "unexpected section : " + section_name) current_section = dotdict({"section_name": section_name, "secinfo": secinfo}) if len(arr) > 1: arr = arr[1:] current_section.funcs = get_func_by_name(arr, env) current_section.func_names = arr current_section.raw_args = content args_proc(current_section, env) sections[current_section["section_name"]] = current_section return sections def section_check(section): ## request check, args, method, url assertTrue(section.request != None, "'--- request' missing!") if section.error_code == None and section.response_body == None and section.response_body_schema == None: assertTrue(False, "'--- error_code' or '--- response_body' missing!") ## error_code check. def str_match(string, pattern): m = re.match(r"^" + pattern + "$", string) return m != None def short_str(string, slen): if not string: return string if len(string) <= slen: return string else: return string[0:slen-3] + u"+." FILENAME_COUNTER = 1 def response_check(self, testname, req_info, res, env): global ht_save_data global FILENAME_COUNTER # Check Http Code expected_code = 200 if req_info.error_code and req_info.error_code.args: expected_code = req_info.error_code.args self.assertEquals(res.status, expected_code, "expected error_code [" + str(expected_code) + "], but got [" + str(res.status) + "] reason [" + str(res.body) + "]") expected_body = None response_body = req_info.response_body if response_body and response_body.args: # env["req_info"] = req_info response_body.args = dynamic_execute(response_body.args, env) # env.pop("req_info") expected_body = response_body.args rsp_body = res.body if req_info.response_body_save: ht_save_data[testname] = rsp_body response_body_filter = req_info.response_body_filter if rsp_body and response_body_filter and response_body_filter.args: for i, filter in enumerate(response_body_filter.args): if rsp_body: rsp_body = filter(rsp_body) if expected_body: matched = rsp_body == expected_body or str_match(rsp_body, expected_body) if not matched: ## TODO: 更准确定位差异点。 if len(rsp_body) > 1000 or len(expected_body) > 1000: filename_rsp_body = "./%s.rsp_body.%d.txt" % (testname, FILENAME_COUNTER) filename_exp_body = "./%s.exp_body.%d.txt" % (testname, FILENAME_COUNTER) FILENAME_COUNTER += 1 log.error("write debug content to: %s", filename_rsp_body) log.error("write debug content to: %s", filename_exp_body) write_content(filename_rsp_body, rsp_body) write_content(filename_exp_body, expected_body) self.assertTrue(matched, u"expected response_body [file:%s], but got [file:%s]" % ( filename_exp_body, filename_rsp_body)) else: log.error(u"expected response_body[[%s]]", expected_body) log.error(u" but got [[%s]]", rsp_body) self.assertTrue(matched, u"expected response_body [%s], but got [%s]" % ( short_str(expected_body,1024), short_str(rsp_body, 1024))) else: response_body_schema = req_info.response_body_schema if response_body_schema and response_body_schema.args: schema = response_body_schema.args try: rsp_body = json.loads(rsp_body) jschema.validate(rsp_body, schema) except jschema.exceptions.ValidationError, ex: self.fail(ex) except ValueError, ex: self.fail(ex) return True def make_test_function(testname, block, url, env): ## 支持变量内动态执行的包括: ## request:URL, request:POST-BODY, more_headers, response_body def http_test_internal(self): self.testname = testname req_info = section_parse(block, env) global G G.req_info = req_info self.req_info = req_info req_info.testname = testname section_check(req_info) request = req_info.request args = request.args method = args.method # env["req_info"] = req_info if args.uri: args.uri = dynamic_execute(args.uri, env) more_headers = req_info.more_headers myheaders = http.NewHeaders() ## timeout = req_info.args or 1000*10 if more_headers: # 可以在动态的代码里面,引用req_info. # env["req_info"] = req_info more_headers.args = dynamic_execute(more_headers.args, env) # env.pop("req_info") myheaders = parse_headers(more_headers.args) if args.body: args.body = dynamic_execute(args.body, env) # env.pop("req_info") if args.uri.startswith("http://") or \ args.uri.startswith("https://"): uri = args.uri else: uri = url + args.uri timeout = 10 if req_info.timeout: timeout = req_info.timeout.args assertTrue(method == "GET" or method == "POST", "unexpected http method: " + method) if method == "GET": res = http.HttpGet(uri, myheaders, timeout) elif method == "POST": res = http.HttpPost(uri, request.args.body, myheaders, timeout) else: assertTrue(False, "method [%s] not supported" % (method)) self.res = res assertTrue(res != None, "request to '" + uri + "' failed! err:" + str(res.reason)) return response_check(self, testname, req_info, res, env) return http_test_internal FMT = "@%s --- %s [%.3fs]" class HttpTestResult(unittest.TextTestResult): def getDebugInfo(self, test): cost = 0.0 server_ip = "" try: if test and test.get("res"): if test.res.cost: cost = test.res.cost if test.res.server_ip: server_ip = test.res.server_ip except: pass return cost, server_ip def addError(self, test, err): # super(HttpTestResult, self).addError(test, err) cost, server_ip = self.getDebugInfo(test) if self.showAll: self.stream.writeln(FMT % (server_ip, RED("ERROR"), cost)) elif self.dots: self.stream.write(RED('E')) self.stream.flush() error = self._exc_info_to_string(err, test) log.error(RED(error)) def addFailure(self, test, err): # super(HttpTestResult, self).addFailure(test, err) cost, server_ip = self.getDebugInfo(test) if self.showAll: self.stream.writeln(FMT % (server_ip, RED("FAIL"), cost)) elif self.dots: self.stream.write(RED('F')) self.stream.flush() error = self._exc_info_to_string(err, test) log.error(RED(error)) testname = test.testname req_info = test.req_info if req_info and req_info.on_fail: on_fail = req_info["on_fail"] if on_fail.args: for i, callback in enumerate(on_fail.args): callback(test, testname, error) # log.error(RED(test.testname + ":" + str(type(test)))) def addSuccess(self, test): # unittest.TestResult.addSuccess(self, test) cost, server_ip = self.getDebugInfo(test) if self.showAll: self.stream.writeln(FMT % (server_ip, GREEN("OK"), cost)) elif self.dots: self.stream.write(GREEN('.')) self.stream.flush() def run(blocks, url, env): suite = unittest.TestSuite() testcases = block_parse(blocks, "=== ") for testcase in testcases: testname = testcase.section_name testcontent = testcase.content test_func = make_test_function(testname, testcontent, url, env) func_name = "test_" + re.sub(r"[ #]", "_", testname) setattr(HttpTest, func_name, test_func) suite.addTest(HttpTest(func_name)) # 执行测试 runner = unittest.TextTestRunner(verbosity=2, resultclass=HttpTestResult) runner.run(suite) ```
{ "source": "jie1789/smartbugs", "score": 3 }
#### File: jie1789/smartbugs/main.py ```python import argparse import json import os import time from jsonpath import jsonpath import smartBugs from src.interface.cli import create_parser_with_args class Issue: def __init__(self, issue_type: int, name: str): self.issue_type = issue_type self.name = name class AnalysisResult: def __init__(self, output_filepath: str): if os.path.exists(output_filepath): print("[-]Error: Output filepath is not empty") print("[-]Exit...") exit() self.output_filepath = output_filepath self.issues = {} def add_issue(self, position: str, issue_list): self.issues[position] = issue_list def save(self): f = open(self.output_filepath, "w") f.write(self.to_json()) f.close() def to_json(self) -> str: return json.dumps(self.issues, default=lambda o: o.__dict__, sort_keys=True, indent=4, ensure_ascii=False) STRATEGY = 0 # 当有超过多少tools报出漏洞即认为可信 # TOOLS = ["conkas", "mythril", "osiris", "slither", "oyente", "solhint", "smartcheck", "honeybadger", "manticore", # "maian", "securify"] TOOLS = ["slither"] ISSUE_UNKNOWN = Issue(-1, "未知") ISSUE_OTHER = Issue(0, "其他") ISSUE_ARITHMETIC = Issue(1, "整数溢出") ISSUE_ACCESS_CONTROL = Issue(2, "访问控制") ISSUE_REENTRANCY = Issue(3, "重入") ISSUE_TIME_MANIPULATION = (4, "时间操纵") ISSUE_UNCHECKED_CALLS = Issue(5, "未检查的call") ISSUE_TRANSACTION_ORDER_DEPENDENCE = Issue(6, "交易顺序依赖") ISSUE_DOS = Issue(7, "拒绝服务") ISSUE_RANDOM = Issue(8, "弱随机数") ISSUE_LIST = [ISSUE_OTHER, ISSUE_ARITHMETIC, ISSUE_ACCESS_CONTROL, ISSUE_REENTRANCY, ISSUE_TIME_MANIPULATION, ISSUE_UNCHECKED_CALLS, ISSUE_TRANSACTION_ORDER_DEPENDENCE, ISSUE_DOS, ISSUE_RANDOM] VULNERABILITY_MAPPING = { "is_lock_vulnerable": ISSUE_OTHER, "is_prodigal_vulnerable": ISSUE_ACCESS_CONTROL, "is_suicidal_vulnerable": ISSUE_ACCESS_CONTROL, "Delegatecall to user controlled address": ISSUE_ACCESS_CONTROL, "Delegatecall to user controlled function": ISSUE_ACCESS_CONTROL, "INVALID instruction": ISSUE_OTHER, "Potential reentrancy vulnerability": ISSUE_REENTRANCY, "Potentially reading uninitialized memory at instruction": ISSUE_OTHER, "Potentially reading uninitialized storage": ISSUE_OTHER, "Reachable ether leak to sender": ISSUE_ACCESS_CONTROL, "Reachable ether leak to sender via argument": ISSUE_ACCESS_CONTROL, "Reachable external call to sender": ISSUE_ACCESS_CONTROL, "Reachable external call to sender via argument": ISSUE_ACCESS_CONTROL, "Reachable SELFDESTRUCT": ISSUE_ACCESS_CONTROL, "Reentrancy multi-million ether bug": ISSUE_REENTRANCY, "Returned value at CALL instruction is not used": ISSUE_UNCHECKED_CALLS, "Unsigned integer overflow at ADD instruction": ISSUE_ARITHMETIC, "Unsigned integer overflow at MUL instruction": ISSUE_ARITHMETIC, "Unsigned integer overflow at SUB instruction": ISSUE_ARITHMETIC, "Warning BLOCKHASH instruction used": ISSUE_OTHER, "Warning NUMBER instruction used": ISSUE_OTHER, "Warning ORIGIN instruction used": ISSUE_ACCESS_CONTROL, "Warning TIMESTAMP instruction used": ISSUE_TIME_MANIPULATION, "Call data forwarded with delegatecall()": ISSUE_ACCESS_CONTROL, "DELEGATECALL to a user-supplied address": ISSUE_ACCESS_CONTROL, "Dependence on predictable environment variable": ISSUE_OTHER, "Dependence on predictable variable": ISSUE_OTHER, "Ether send": ISSUE_ACCESS_CONTROL, "Exception state": ISSUE_OTHER, "Integer Overflow": ISSUE_ARITHMETIC, "Integer Overflow ": ISSUE_ARITHMETIC, "Integer Underflow": ISSUE_ARITHMETIC, "Integer Underflow ": ISSUE_ARITHMETIC, "Message call to external contract": ISSUE_REENTRANCY, "Multiple Calls": ISSUE_OTHER, "State change after external call": ISSUE_REENTRANCY, "Transaction order dependence": ISSUE_TRANSACTION_ORDER_DEPENDENCE, "Unchecked CALL return value": ISSUE_UNCHECKED_CALLS, "Unchecked SUICIDE": ISSUE_ACCESS_CONTROL, "Use of tx.origin": ISSUE_ACCESS_CONTROL, "callstack_bug": ISSUE_DOS, "concurrency_bug": ISSUE_OTHER, "division_bugs": ISSUE_ARITHMETIC, "overflow_bugs": ISSUE_ARITHMETIC, "reentrancy_bug": ISSUE_REENTRANCY, "signedness_bugs": ISSUE_ARITHMETIC, "time_dependency_bug": ISSUE_TIME_MANIPULATION, "truncation_bugs": ISSUE_ARITHMETIC, "underflow_bugs": ISSUE_ARITHMETIC, "Callstack Depth Attack Vulnerability.": ISSUE_DOS, "Integer Overflow.": ISSUE_ARITHMETIC, "Integer Underflow.": ISSUE_ARITHMETIC, "Parity Multisig Bug 2.": ISSUE_ACCESS_CONTROL, "Re-Entrancy Vulnerability.": ISSUE_REENTRANCY, "Timestamp Dependency.": ISSUE_TIME_MANIPULATION, "DAO": ISSUE_REENTRANCY, "DAOConstantGas": ISSUE_REENTRANCY, "LockedEther": ISSUE_OTHER, "MissingInputValidation": ISSUE_OTHER, "RepeatedCall": ISSUE_OTHER, "TODAmount": ISSUE_TRANSACTION_ORDER_DEPENDENCE, "TODReceiver": ISSUE_TRANSACTION_ORDER_DEPENDENCE, "TODTransfer": ISSUE_TRANSACTION_ORDER_DEPENDENCE, "UnhandledException": ISSUE_UNCHECKED_CALLS, "UnrestrictedEtherFlow": ISSUE_ACCESS_CONTROL, "UnrestrictedWrite": ISSUE_ACCESS_CONTROL, "arbitrary-send": ISSUE_ACCESS_CONTROL, "assembly": ISSUE_OTHER, "calls-loop": ISSUE_DOS, "constable-states": ISSUE_OTHER, "constant-function": ISSUE_OTHER, "controlled-delegatecall": ISSUE_ACCESS_CONTROL, "deprecated-standards": ISSUE_OTHER, "erc20-indexed": ISSUE_OTHER, "erc20-interface": ISSUE_OTHER, "external-function": ISSUE_OTHER, "incorrect-equality": ISSUE_OTHER, "locked-ether": ISSUE_OTHER, "low-level-calls": ISSUE_UNCHECKED_CALLS, "naming-convention": ISSUE_OTHER, "reentrancy-benign": ISSUE_REENTRANCY, "reentrancy-eth": ISSUE_REENTRANCY, "reentrancy-no-eth": ISSUE_REENTRANCY, "shadowing-abstract": ISSUE_OTHER, "shadowing-builtin": ISSUE_OTHER, "shadowing-local": ISSUE_OTHER, "shadowing-state": ISSUE_OTHER, "solc-version": ISSUE_OTHER, "suicidal": ISSUE_ACCESS_CONTROL, "timestamp": ISSUE_TIME_MANIPULATION, "tx-origin": ISSUE_ACCESS_CONTROL, "uninitialized-local": ISSUE_OTHER, "uninitialized-state": ISSUE_OTHER, "uninitialized-storage": ISSUE_OTHER, "unused-return": ISSUE_UNCHECKED_CALLS, "unused-state": ISSUE_OTHER, "SOLIDITY_ADDRESS_HARDCODED": ISSUE_OTHER, "SOLIDITY_ARRAY_LENGTH_MANIPULATION": ISSUE_ARITHMETIC, "SOLIDITY_BALANCE_EQUALITY": ISSUE_OTHER, "SOLIDITY_BYTE_ARRAY_INSTEAD_BYTES": ISSUE_OTHER, "SOLIDITY_CALL_WITHOUT_DATA": ISSUE_REENTRANCY, "SOLIDITY_DEPRECATED_CONSTRUCTIONS": ISSUE_OTHER, "SOLIDITY_DIV_MUL": ISSUE_ARITHMETIC, "SOLIDITY_ERC20_APPROVE": ISSUE_OTHER, "SOLIDITY_ERC20_FUNCTIONS_ALWAYS_RETURN_FALSE": ISSUE_OTHER, "SOLIDITY_ERC20_TRANSFER_SHOULD_THROW": ISSUE_OTHER, "SOLIDITY_EXACT_TIME": ISSUE_TIME_MANIPULATION, "SOLIDITY_EXTRA_GAS_IN_LOOPS": ISSUE_OTHER, "SOLIDITY_FUNCTIONS_RETURNS_TYPE_AND_NO_RETURN": ISSUE_OTHER, "SOLIDITY_GAS_LIMIT_IN_LOOPS": ISSUE_DOS, "SOLIDITY_INCORRECT_BLOCKHASH": ISSUE_OTHER, "SOLIDITY_LOCKED_MONEY": ISSUE_OTHER, "SOLIDITY_MSGVALUE_EQUALS_ZERO": ISSUE_OTHER, "SOLIDITY_OVERPOWERED_ROLE": ISSUE_OTHER, "SOLIDITY_PRAGMAS_VERSION": ISSUE_OTHER, "SOLIDITY_PRIVATE_MODIFIER_DONT_HIDE_DATA": ISSUE_OTHER, "SOLIDITY_REDUNDANT_FALLBACK_REJECT": ISSUE_OTHER, "SOLIDITY_REVERT_REQUIRE": ISSUE_OTHER, "SOLIDITY_SAFEMATH": ISSUE_OTHER, "SOLIDITY_SEND": ISSUE_UNCHECKED_CALLS, "SOLIDITY_SHOULD_NOT_BE_PURE": ISSUE_OTHER, "SOLIDITY_SHOULD_NOT_BE_VIEW": ISSUE_OTHER, "SOLIDITY_SHOULD_RETURN_STRUCT": ISSUE_OTHER, "SOLIDITY_TRANSFER_IN_LOOP": ISSUE_DOS, "SOLIDITY_TX_ORIGIN": ISSUE_ACCESS_CONTROL, "SOLIDITY_UINT_CANT_BE_NEGATIVE": ISSUE_ARITHMETIC, "SOLIDITY_UNCHECKED_CALL": ISSUE_UNCHECKED_CALLS, "SOLIDITY_UPGRADE_TO_050": ISSUE_OTHER, "SOLIDITY_USING_INLINE_ASSEMBLY": ISSUE_OTHER, "SOLIDITY_VAR": ISSUE_ARITHMETIC, "SOLIDITY_VAR_IN_LOOP_FOR": ISSUE_ARITHMETIC, "SOLIDITY_VISIBILITY": ISSUE_OTHER, "SOLIDITY_WRONG_SIGNATURE": ISSUE_OTHER, "indent": ISSUE_OTHER, "max-line-length": ISSUE_OTHER, "hidden_state_update": ISSUE_OTHER, "uninitialised_struct": ISSUE_OTHER, "inheritance_disorder": ISSUE_OTHER, "straw_man_contract": ISSUE_REENTRANCY, "hidden_transfer": ISSUE_OTHER, "balance_disorder": ISSUE_OTHER, "type_overflow": ISSUE_ARITHMETIC, "Integer_Overflow": ISSUE_ARITHMETIC, "Integer_Underflow": ISSUE_ARITHMETIC, "Reentrancy": ISSUE_REENTRANCY, "Time Manipulation": ISSUE_TIME_MANIPULATION, "Transaction Ordering Dependence": ISSUE_TRANSACTION_ORDER_DEPENDENCE, "Unchecked Low Level Call": ISSUE_UNCHECKED_CALLS, } TOOL_VULNERABILITY_RANGE = { "conkas": [ISSUE_ARITHMETIC, ISSUE_REENTRANCY, ISSUE_TIME_MANIPULATION, ISSUE_TRANSACTION_ORDER_DEPENDENCE, ISSUE_UNCHECKED_CALLS], "mythril": [ISSUE_ARITHMETIC, ISSUE_ACCESS_CONTROL, ISSUE_REENTRANCY, ISSUE_UNCHECKED_CALLS, ISSUE_TIME_MANIPULATION], "osiris": [ISSUE_DOS, ISSUE_ARITHMETIC, ISSUE_REENTRANCY, ISSUE_TIME_MANIPULATION, ], "slither": [ISSUE_ACCESS_CONTROL, ISSUE_DOS, ISSUE_ACCESS_CONTROL, ISSUE_UNCHECKED_CALLS, ISSUE_REENTRANCY, ISSUE_TIME_MANIPULATION], "oyente": [ISSUE_DOS, ISSUE_ARITHMETIC, ISSUE_ACCESS_CONTROL, ISSUE_REENTRANCY, ISSUE_TIME_MANIPULATION], "solhint": [], "smartcheck": [ISSUE_ARITHMETIC, ISSUE_REENTRANCY, ISSUE_TIME_MANIPULATION, ISSUE_DOS, ISSUE_UNCHECKED_CALLS, ISSUE_ACCESS_CONTROL], "honeybadger": [ISSUE_REENTRANCY, ISSUE_ARITHMETIC], "manticore": [ISSUE_ARITHMETIC, ISSUE_ACCESS_CONTROL, ISSUE_REENTRANCY, ISSUE_UNCHECKED_CALLS, ISSUE_TIME_MANIPULATION], "maian": [ISSUE_ACCESS_CONTROL], "securify": [ISSUE_REENTRANCY, ISSUE_TRANSACTION_ORDER_DEPENDENCE, ISSUE_UNCHECKED_CALLS, ISSUE_ACCESS_CONTROL] } class Contract: def __init__(self, name: str, language: str, filepath: str): self.name = name # 名称 self.language = language # 语言种类 self.filepath = filepath # 源代码路径 def analyze(self) -> AnalysisResult: result = {} time_now = time.time() if not os.path.isdir("aggregated_result"): if os.path.exists("aggregated_result"): print("[-]Error: Result dir \"aggregated_result\" is not empty") exit() os.mkdir("aggregated_result") analysis_result = AnalysisResult( "aggregated_result/" + time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime(time_now)) + "_" + self.name) tool_start_time = {} # dict[tool: start time] for i in TOOLS: time_now = time.time() tool_start_time[i] = time_now smartBugs.exec_cmd(create_parser_with_args(["-t", i, "-f", self.filepath])) for i in TOOLS: time_now_format_list = [time.strftime("%Y%m%d_%H%M", time.localtime(tool_start_time[i] + 60)), time.strftime("%Y%m%d_%H%M", time.localtime(tool_start_time[i]))] # 时间可能有误差 flag = False result_json_filepath = "" for time_now_format in time_now_format_list: result_json_filepath = "results/{0}/{1}/{2}/result.json".format(i, time_now_format, self.name) if os.path.exists(result_json_filepath): flag = True break if not flag: print("[-]Error: contract {} tool {} exec result not found, result filepath {}".format(self.name, i, time_now_format_list)) continue result_i, ok = phase_result_json(result_json_filepath, i) if not ok: print( "[-]Error: contract {} tool {} exec result cannot phase, result filepath {}".format(self.name, i, result_json_filepath)) continue result[i] = result_i for position, issue_list in aggregate(result).items(): analysis_result.add_issue(position, issue_list) analysis_result.save() return analysis_result class AnalysisTask: def __init__(self, contracts): self.contracts = contracts def exec_in_batch(self): t0 = time.time() print("[+]Info: task begins (exec in batch)") result = {} if not os.path.isdir("aggregated_result"): if os.path.exists("aggregated_result"): print("[-]Error: Result dir \"aggregated_result\" is not empty") exit() os.mkdir("aggregated_result") analysis_results = {} # 存储各合约执行结果dict[contract.name:analysis_result] contract_files = [] # 各合约路径列表 for contract in self.contracts: analysis_results[contract.name] = AnalysisResult( "aggregated_result/" + time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime(t0)) + "_" + contract.name) contract_files.append(contract.filepath) tool_start_time = {} # 存储各工具开始执行的时间dict[tool: start time] for tool in TOOLS: time_now = time.time() tool_start_time[tool] = time_now smartBugs.exec_cmd(create_parser_with_args(["-t", tool, "-f"] + contract_files)) for contract in self.contracts: for tool in TOOLS: time_now_format_list = [time.strftime("%Y%m%d_%H%M", time.localtime(tool_start_time[tool] + 60)), time.strftime("%Y%m%d_%H%M", time.localtime(tool_start_time[tool]))] # 时间可能有误差 flag = False result_json_filepath = "" for time_now_format in time_now_format_list: result_json_filepath = "results/{0}/{1}/{2}/result.json".format(tool, time_now_format, contract.name) if os.path.exists(result_json_filepath): flag = True break if not flag: print("[-]Error: contract {} tool {} exec result not found, result filepath {}".format( contract.filepath, tool, time_now_format_list)) continue result_tool, ok = phase_result_json(result_json_filepath, tool) if not ok: print( "[-]Error: contract {} tool {} exec result cannot phase, result filepath {}".format( contract.filepath, tool, result_json_filepath)) continue result[tool] = result_tool for position, issue_list in aggregate(result).items(): analysis_results[contract.name].add_issue(position, issue_list) analysis_results[contract.name].save() print("[+]Info: Task is finished, total time: {}".format( time.strftime("%Hh%Mm%Ss", time.localtime(time.time() - t0 - 8*3600)))) def exec_in_order(self): t0 = time.time() print("[+]Info: task begins (exec in order)") for contract in self.contracts: t1 = time.time() print("[+]Info: File {} analysis begins".format(contract.filepath)) result = contract.analyze() t2 = time.time() print("[+]Info: Save result of {} in {} successfully, time: {}".format(contract.filepath, result.output_filepath, time.strftime("%Hh%Mm%Ss", time.localtime(t2 - t1 - 8*3600)))) print("[+]Info: Task is finished, total time: {}".format( time.strftime("%Hh%Mm%Ss", time.localtime(time.time() - t0 - 8*3600)))) # 聚合结果 def aggregate(result) : aggregate_result = {} statistical_result = {} confidence_count = {} for issue_type in ISSUE_LIST: confidence_count[issue_type] = 0 for tool in result.keys(): for issue_type in TOOL_VULNERABILITY_RANGE[tool]: confidence_count[issue_type] += 1 for tool, tool_res in result.items(): for line, issue_list in tool_res.items(): for issue in issue_list: if line not in statistical_result: statistical_result[line] = {} if issue not in statistical_result[line]: statistical_result[line][issue] = 0 statistical_result[line][issue] += 1 for line, issue_count in statistical_result.items(): for issue, count in issue_count.items(): if (count >= STRATEGY * confidence_count[issue]) and issue != ISSUE_OTHER and issue != ISSUE_UNKNOWN: if line not in aggregate_result: aggregate_result[line] = [] aggregate_result[line].append(issue) return aggregate_result def phase_result_json(filepath: str, tool: str) : if tool == "conkas": return phase_result_json_conkas(filepath) elif tool == "mythril": return phase_result_json_mythril(filepath) elif tool == "osiris": return phase_result_json_osiris(filepath) elif tool == "slither": return phase_result_json_slither(filepath) elif tool == "oyente": return phase_result_json_oyente(filepath) elif tool == "solhint": return phase_result_json_solhint(filepath) elif tool == "smartcheck": return phase_result_json_smartcheck(filepath) elif tool == "honeybadger": return phase_result_json_honeybadger(filepath) elif tool == "manticore": return phase_result_json_manticore(filepath) elif tool == "maian": return phase_result_json_maian(filepath) elif tool == "securify": return phase_result_json_securify(filepath) else: print("[-]ERROR: Unknown tool", tool) return {}, False def phase_result_json_conkas(filepath: str) : f = open(filepath) data = json.load(f) result = {} if ("analysis" not in data) or (not data["analysis"]): f.close() return result, False for i in data['analysis']: line = int(i['line_number']) issue = VULNERABILITY_MAPPING[i['vuln_type']] if line not in result: result[line] = [] result[line].append(issue) f.close() return result, True def phase_result_json_mythril(filepath: str): f = open(filepath) data = json.load(f) result = {} issues = jsonpath(data, "$..issues") for i in issues: for iss in i: line = iss['lineno'] issue = VULNERABILITY_MAPPING[iss['title']] if line not in result: result[line] = [] result[line].append(issue) f.close() return result, True def phase_result_json_osiris(filepath: str): f = open(filepath) data = json.load(f) result = {} issues = jsonpath(data, "$..errors") for i in issues: for iss in i: line = iss['line'] issue = VULNERABILITY_MAPPING[iss['message']] if line not in result: result[line] = [] result[line].append(issue) f.close() return result, True def phase_result_json_slither(filepath: str): f = open(filepath) data = json.load(f) result = {} for i in data['analysis']: title = i['check'] lines = jsonpath(i['elements'], "$..lines") for li in lines: for line in li: issue = VULNERABILITY_MAPPING[title] if line not in result: result[line] = [] result[line].append(issue) f.close() return result, True def phase_result_json_oyente(filepath: str): f = open(filepath) data = json.load(f) result = {} issues = jsonpath(data, "$..errors") for i in issues: for iss in i: line = iss['line'] issue = VULNERABILITY_MAPPING[iss['message']] if line not in result: result[line] = [] result[line].append(issue) f.close() return result, True def phase_result_json_solhint(filepath: str) : # TODO f = open(filepath) data = json.load(f) result = {} if "analysis" not in data: f.close() return result, False for i in data['analysis']: line = i['line'] issue = VULNERABILITY_MAPPING[i['message']] if line not in result: result[line] = [] result[line].append(issue) f.close() return result, True def phase_result_json_smartcheck(filepath: str) : f = open(filepath) data = json.load(f) result = {} if "analysis" not in data: f.close() return result, False for i in data["analysis"]: line = i["line"] issue = VULNERABILITY_MAPPING[i["name"]] if line not in result: result[line] = [] result[line].append(issue) f.close() return result, True def phase_result_json_honeybadger(filepath: str) : f = open(filepath) data = json.load(f) result = {} if ("analysis" not in data) or (len(data["analysis"]) == 0) or ("errors" not in data["analysis"][0]): f.close() return result, False for error in data["analysis"][0]["errors"]: line = error["line"] issue = VULNERABILITY_MAPPING[error["message"]] if line not in result: result[line] = [] result[line].append(issue) f.close() return result, True def phase_result_json_manticore(filepath: str): f = open(filepath) data = json.load(f) result = {} if "analysis" not in data or (len(data["analysis"]) == 0): f.close() return result, False for i in data["analysis"][0]: line = i["line"] issue = VULNERABILITY_MAPPING[i["name"]] if line not in result: result[line] = [] result[line].append(issue) f.close() return result, True def phase_result_json_maian(filepath: str) : f = open(filepath) data = json.load(f) result = {} if "analysis" not in data: f.close() return result, False for k, v in data["analysis"].items(): if v: line = 0 if line not in result: result[line] = [] result[line].append(VULNERABILITY_MAPPING[k]) f.close() return result, True def phase_result_json_securify(filepath: str) : f = open(filepath) data = json.load(f) result = {} if ("analysis" not in data) or (len(data["analysis"]) == 0) or ( "results" not in list(data["analysis"].values())[0]): f.close() return result, False for k, v in list(data["analysis"].values())[0]["results"].items(): for line in v["violations"]: if line not in result: result[line] = [] result[line].append(VULNERABILITY_MAPPING[k]) f.close() return result, True if __name__ == '__main__': print("[+]Info: Analyzing start") parser = argparse.ArgumentParser(description='Use integrated tools to detect vulnerabilities of smart contract (' 'solidity)') parser.add_argument('files', metavar='FILE', type=str, nargs='+', help='smart contract files') parser.add_argument('--mode', '-m', type=str, help='exec mode, \"order\": analyze contract by contract, ' '\"batch\": analyze together', default="order") args = parser.parse_args() contracts = [] for file in args.files: if not (file.endswith(".sol") and os.path.isfile(file)): print("[-]Error: File \"{}\" is not a solidity file".format(file)) else: _, name = os.path.split(file) contracts.append(Contract(name[:-4], "solidity", file)) if len(contracts) == 0: print("[-]Error: No file given, exit") exit() task = AnalysisTask(contracts) if args.mode == "order": task.exec_in_order() elif args.mode == "batch": task.exec_in_batch() ``` #### File: src/output_parser/Mythril2.py ```python from sarif_om import * from src.output_parser.SarifHolder import isNotDuplicateRule, parseLogicalLocation, parseRule, \ parseResult, parseArtifact, isNotDuplicateLogicalLocation class Mythril2: def parseSarif(self, mythril_output_results, file_path_in_repo): resultsList = [] logicalLocationsList = [] rulesList = [] for issue in mythril_output_results["analysis"]["issues"]: rule = parseRule(tool="mythril2", vulnerability=issue["title"], full_description=issue["description"]) result = parseResult(tool="mythril2", vulnerability=issue["title"], level=issue["type"], uri=file_path_in_repo, line=issue["lineno"], snippet=issue["code"] if "code" in issue.keys() else None, logicalLocation=parseLogicalLocation(issue["function"], kind="function")) logicalLocation = parseLogicalLocation(name=issue["function"], kind="function") if isNotDuplicateLogicalLocation(logicalLocation, logicalLocationsList): logicalLocationsList.append(logicalLocation) resultsList.append(result) if isNotDuplicateRule(rule, rulesList): rulesList.append(rule) artifact = parseArtifact(uri=file_path_in_repo) tool = Tool(driver=ToolComponent(name="Mythril2", version="0.4.25", rules=rulesList, information_uri="https://mythx.io/", full_description=MultiformatMessageString( text="Mythril analyses EVM bytecode using symbolic analysis, taint analysis and control flow checking to detect a variety of security vulnerabilities."))) run = Run(tool=tool, artifacts=[artifact], logical_locations=logicalLocationsList, results=resultsList) return run # from sarif_om import * # from src.output_parser.Parser import Parser # from src.output_parser.SarifHolder import isNotDuplicateRule, parseRule, parseResult, \ # parseArtifact, parseLogicalLocation, isNotDuplicateLogicalLocation # class Mythril2(Parser): # def __init__(self): # pass # def parse(self, str_output): # output = [] # current_contract = None # lines = str_output.splitlines() # for line in lines: # if "====" in line: # if current_contract is not None: # output.append(current_contract) # current_contract = { # 'errors': [] # } # # (file, contract_name, _) = line.replace("INFO:root:contract ", '').split(':') # # current_contract['file'] = file # # current_contract['name'] = contract_name # elif "In file:" in line: # (file, lineno) = line.replace("In file: ", '').split(':') # current_contract['file'] = file # current_contract['errors'].append({'line':int(lineno)}) # elif "Contract:" in line: # contract_name = line.replace("Contract: ",'') # current_contract['name'] = contract_name # if current_contract is not None: # output.append(current_contract) # return output # def parseSarif(self, myth_output_results, file_path_in_repo): # resultsList = [] # logicalLocationsList = [] # rulesList = [] # for analysis in myth_output_results["analysis"]: # for result in analysis["errors"]: # #rule = parseRule(tool="myth", vulnerability=result["message"]) # result = parseResult(tool="mythril2", # uri=file_path_in_repo, line=result["line"]) # resultsList.append(result) # # if isNotDuplicateRule(rule, rulesList): # # rulesList.append(rule) # # logicalLocation = parseLogicalLocation(name=analysis["name"]) # # if isNotDuplicateLogicalLocation(logicalLocation, logicalLocationsList): # # logicalLocationsList.append(logicalLocation) # artifact = parseArtifact(uri=file_path_in_repo) # tool = Tool(driver=ToolComponent(name="mythril2", version="mythril2",)) # run = Run(tool=tool, artifacts=[artifact], logical_locations=logicalLocationsList, results=resultsList) # return run ```
{ "source": "jie20011205/dlnap", "score": 2 }
#### File: dlnap/dlnap/dlnap.py ```python __version__ = "0.15" import re import sys import time import signal import socket import select import logging import traceback import mimetypes from contextlib import contextmanager import os py3 = sys.version_info[0] == 3 if py3: from urllib.request import urlopen from http.server import HTTPServer from http.server import BaseHTTPRequestHandler else: from urllib2 import urlopen from BaseHTTPServer import BaseHTTPRequestHandler from BaseHTTPServer import HTTPServer import shutil import threading SSDP_GROUP = ("172.16.58.3", 1900) URN_AVTransport = "urn:schemas-upnp-org:service:AVTransport:1" URN_AVTransport_Fmt = "urn:schemas-upnp-org:service:AVTransport:{}" URN_RenderingControl = "urn:schemas-upnp-org:service:RenderingControl:1" URN_RenderingControl_Fmt = "urn:schemas-upnp-org:service:RenderingControl:{}" SSDP_ALL = "ssdp:all" # ================================================================================================= # XML to DICT # def _get_tag_value(x, i = 0): """ Get the nearest to 'i' position xml tag name. x -- xml string i -- position to start searching tag from return -- (tag, value) pair. e.g <d> <e>value4</e> </d> result is ('d', '<e>value4</e>') """ x = x.strip() value = '' tag = '' # skip <? > tag if x[i:].startswith('<?'): i += 2 while i < len(x) and x[i] != '<': i += 1 # check for empty tag like '</tag>' if x[i:].startswith('</'): i += 2 in_attr = False while i < len(x) and x[i] != '>': if x[i] == ' ': in_attr = True if not in_attr: tag += x[i] i += 1 return (tag.strip(), '', x[i+1:]) # not an xml, treat like a value if not x[i:].startswith('<'): return ('', x[i:], '') i += 1 # < # read first open tag in_attr = False while i < len(x) and x[i] != '>': # get rid of attributes if x[i] == ' ': in_attr = True if not in_attr: tag += x[i] i += 1 i += 1 # > # replace self-closing <tag/> by <tag>None</tag> empty_elmt = '<' + tag + ' />' closed_elmt = '<' + tag + '>None</'+tag+'>' if x.startswith(empty_elmt): x = x.replace(empty_elmt, closed_elmt) while i < len(x): value += x[i] if x[i] == '>' and value.endswith('</' + tag + '>'): # Note: will not work with xml like <a> <a></a> </a> close_tag_len = len(tag) + 2 # /> value = value[:-close_tag_len] break i += 1 return (tag.strip(), value[:-1], x[i+1:]) def _xml2dict(s, ignoreUntilXML = False): """ Convert xml to dictionary. <?xml version="1.0"?> <a any_tag="tag value"> <b> <bb>value1</bb> </b> <b> <bb>value2</bb> </b> </c> <d> <e>value4</e> </d> <g>value</g> </a> => { 'a': { 'b': [ {'bb':value1}, {'bb':value2} ], 'c': [], 'd': { 'e': [value4] }, 'g': [value] } } """ if ignoreUntilXML: s = ''.join(re.findall(".*?(<.*)", s, re.M)) d = {} while s: tag, value, s = _get_tag_value(s) value = value.strip() isXml, dummy, dummy2 = _get_tag_value(value) if tag not in d: d[tag] = [] if not isXml: if not value: continue d[tag].append(value.strip()) else: if tag not in d: d[tag] = [] d[tag].append(_xml2dict(value)) return d s = """ hello this is a bad strings <?xml version="1.0"?> <a any_tag="tag value"> <b><bb>value1</bb></b> <b><bb>value2</bb> <v>value3</v></b> </c> <d> <e>value4</e> </d> <g>value</g> </a> """ def _xpath(d, path): """ Return value from xml dictionary at path. d -- xml dictionary path -- string path like root/device/serviceList/service@serviceType=URN_AVTransport/controlURL return -- value at path or None if path not found """ for p in path.split('/'): tag_attr = p.split('@') tag = tag_attr[0] if tag not in d: return None attr = tag_attr[1] if len(tag_attr) > 1 else '' if attr: a, aval = attr.split('=') for s in d[tag]: if s[a] == [aval]: d = s break else: d = d[tag][0] return d # # XML to DICT # ================================================================================================= # PROXY # running = False class DownloadProxy(BaseHTTPRequestHandler): def log_message(self, format, *args): pass def log_request(self, code='-', size='-'): pass def response_success(self): url = self.path[1:] # replace '/' if os.path.exists(url): f = open(url) content_type = mimetypes.guess_type(url)[0] else: f = urlopen(url=url) if py3: content_type = f.getheader("Content-Type") else: content_type = f.info().getheaders("Content-Type")[0] self.send_response(200, "ok") self.send_header('Access-Control-Allow-Origin', '*') self.send_header('Access-Control-Allow-Methods', 'GET, OPTIONS') self.send_header("Access-Control-Allow-Headers", "X-Requested-With") self.send_header("Access-Control-Allow-Headers", "Content-Type") self.send_header("Content-Type", content_type) self.end_headers() def do_OPTIONS(self): self.response_success() def do_HEAD(self): self.response_success() def do_GET(self): global running url = self.path[1:] # replace '/' content_type = '' if os.path.exists(url): f = open(url) content_type = mimetypes.guess_type(url)[0] size = os.path.getsize(url) elif not url or not url.startswith('http'): self.response_success() return else: f = urlopen(url=url) try: if not content_type: if py3: content_type = f.getheader("Content-Type") size = f.getheader("Content-Length") else: content_type = f.info().getheaders("Content-Type")[0] size = f.info().getheaders("Content-Length")[0] self.send_response(200) self.send_header('Access-Control-Allow-Origin', '*') self.send_header("Content-Type", content_type) self.send_header("Content-Disposition", 'attachment; filename="{}"'.format(os.path.basename(url))) self.send_header("Content-Length", str(size)) self.end_headers() shutil.copyfileobj(f, self.wfile) finally: running = False f.close() def runProxy(ip = '', port = 8000): global running running = True DownloadProxy.protocol_version = "HTTP/1.0" httpd = HTTPServer((ip, port), DownloadProxy) while running: httpd.handle_request() # # PROXY # ================================================================================================= def _get_port(location): """ Extract port number from url. location -- string like http://anyurl:port/whatever/path return -- port number """ port = re.findall('http://.*?:(\d+).*', location) return int(port[0]) if port else 80 def _get_control_url(xml, urn): """ Extract AVTransport contol url from device description xml xml -- device description xml return -- control url or empty string if wasn't found """ return _xpath(xml, 'root/device/serviceList/service@serviceType={}/controlURL'.format(urn)) @contextmanager def _send_udp(to, packet): """ Send UDP message to group to -- (host, port) group to send the packet to packet -- message to send """ sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) sock.sendto(packet.encode(), to) yield sock sock.close() def _unescape_xml(xml): """ Replace escaped xml symbols with real ones. """ return xml.replace('&lt;', '<').replace('&gt;', '>').replace('&quot;', '"') def _send_tcp(to, payload): """ Send TCP message to group to -- (host, port) group to send to payload to payload -- message to send """ try: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.settimeout(5) sock.connect(to) sock.sendall(payload.encode('utf-8')) data = sock.recv(2048) if py3: data = data.decode('utf-8') data = _xml2dict(_unescape_xml(data), True) errorDescription = _xpath(data, 's:Envelope/s:Body/s:Fault/detail/UPnPError/errorDescription') if errorDescription is not None: logging.error(errorDescription) except Exception as e: data = '' finally: sock.close() return data def _get_location_url(raw): """ Extract device description url from discovery response raw -- raw discovery response return -- location url string """ t = re.findall('\n(?i)location:\s*(.*)\r\s*', raw, re.M) if len(t) > 0: return t[0] return '' def _get_friendly_name(xml): """ Extract device name from description xml xml -- device description xml return -- device name """ name = _xpath(xml, 'root/device/friendlyName') return name if name is not None else 'Unknown' def _get_serve_ip(target_ip, target_port=80): """ Find ip address of network interface used to communicate with target target-ip -- ip address of target return -- ip address of interface connected to target """ s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.connect((target_ip, target_port)) my_ip = s.getsockname()[0] s.close() return my_ip class DlnapDevice: """ Represents DLNA/UPnP device. """ def __init__(self, raw, ip): self.__logger = logging.getLogger(self.__class__.__name__) self.__logger.info('=> New DlnapDevice (ip = {}) initialization..'.format(ip)) self.ip = ip self.ssdp_version = 1 self.port = None self.name = 'Unknown' self.control_url = None self.rendering_control_url = None self.has_av_transport = False try: self.__raw = raw.decode() self.location = _get_location_url(self.__raw) self.__logger.info('location: {}'.format(self.location)) self.port = _get_port(self.location) self.__logger.info('port: {}'.format(self.port)) raw_desc_xml = urlopen(self.location).read().decode() self.__desc_xml = _xml2dict(raw_desc_xml) self.__logger.debug('description xml: {}'.format(self.__desc_xml)) self.name = _get_friendly_name(self.__desc_xml) self.__logger.info('friendlyName: {}'.format(self.name)) self.control_url = _get_control_url(self.__desc_xml, URN_AVTransport) self.__logger.info('control_url: {}'.format(self.control_url)) self.rendering_control_url = _get_control_url(self.__desc_xml, URN_RenderingControl) self.__logger.info('rendering_control_url: {}'.format(self.rendering_control_url)) self.has_av_transport = self.control_url is not None self.__logger.info('=> Initialization completed'.format(ip)) except Exception as e: self.__logger.warning('DlnapDevice (ip = {}) init exception:\n{}'.format(ip, traceback.format_exc())) def __repr__(self): return '{} @ {}'.format(self.name, self.ip) def __eq__(self, d): return self.name == d.name and self.ip == d.ip def _payload_from_template(self, action, data, urn): """ Assembly payload from template. """ fields = '' for tag, value in data.items(): fields += '<{tag}>{value}</{tag}>'.format(tag=tag, value=value) payload = """<?xml version="1.0" encoding="utf-8"?> <s:Envelope xmlns:s="http://schemas.xmlsoap.org/soap/envelope/" s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/"> <s:Body> <u:{action} xmlns:u="{urn}"> {fields} </u:{action}> </s:Body> </s:Envelope>""".format(action=action, urn=urn, fields=fields) return payload def _create_packet(self, action, data): """ Create packet to send to device control url. action -- control action data -- dictionary with XML fields value """ if action in ["SetVolume", "SetMute", "GetVolume"]: url = self.rendering_control_url urn = URN_RenderingControl_Fmt.format(self.ssdp_version) else: url = self.control_url urn = URN_AVTransport_Fmt.format(self.ssdp_version) payload = self._payload_from_template(action=action, data=data, urn=urn) packet = "\r\n".join([ 'POST {} HTTP/1.1'.format(url), 'User-Agent: {}/{}'.format(__file__, __version__), 'Accept: */*', 'Content-Type: text/xml; charset="utf-8"', 'HOST: {}:{}'.format(self.ip, self.port), 'Content-Length: {}'.format(len(payload)), 'SOAPACTION: "{}#{}"'.format(urn, action), 'Connection: close', '', payload, ]) self.__logger.debug(packet) return packet def set_current_media(self, url, instance_id = 0): """ Set media to playback. url -- media url instance_id -- device instance id """ packet = self._create_packet('SetAVTransportURI', {'InstanceID':instance_id, 'CurrentURI':url, 'CurrentURIMetaData':'' }) _send_tcp((self.ip, self.port), packet) def play(self, instance_id = 0): """ Play media that was already set as current. instance_id -- device instance id """ packet = self._create_packet('Play', {'InstanceID': instance_id, 'Speed': 1}) _send_tcp((self.ip, self.port), packet) def pause(self, instance_id = 0): """ Pause media that is currently playing back. instance_id -- device instance id """ packet = self._create_packet('Pause', {'InstanceID': instance_id, 'Speed':1}) _send_tcp((self.ip, self.port), packet) def stop(self, instance_id = 0): """ Stop media that is currently playing back. instance_id -- device instance id """ packet = self._create_packet('Stop', {'InstanceID': instance_id, 'Speed': 1}) _send_tcp((self.ip, self.port), packet) def seek(self, position, instance_id = 0): """ Seek position """ packet = self._create_packet('Seek', {'InstanceID':instance_id, 'Unit':'REL_TIME', 'Target': position }) _send_tcp((self.ip, self.port), packet) def volume(self, volume=10, instance_id = 0): """ Stop media that is currently playing back. instance_id -- device instance id """ packet = self._create_packet('SetVolume', {'InstanceID': instance_id, 'DesiredVolume': volume, 'Channel': 'Master'}) _send_tcp((self.ip, self.port), packet) def get_volume(self, instance_id = 0): """ get volume """ packet = self._create_packet('GetVolume', {'InstanceID':instance_id, 'Channel': 'Master'}) _send_tcp((self.ip, self.port), packet) def mute(self, instance_id = 0): """ Stop media that is currently playing back. instance_id -- device instance id """ packet = self._create_packet('SetMute', {'InstanceID': instance_id, 'DesiredMute': '1', 'Channel': 'Master'}) _send_tcp((self.ip, self.port), packet) def unmute(self, instance_id = 0): """ Stop media that is currently playing back. instance_id -- device instance id """ packet = self._create_packet('SetMute', {'InstanceID': instance_id, 'DesiredMute': '0', 'Channel': 'Master'}) _send_tcp((self.ip, self.port), packet) def info(self, instance_id=0): """ Transport info. instance_id -- device instance id """ packet = self._create_packet('GetTransportInfo', {'InstanceID': instance_id}) return _send_tcp((self.ip, self.port), packet) def media_info(self, instance_id=0): """ Media info. instance_id -- device instance id """ packet = self._create_packet('GetMediaInfo', {'InstanceID': instance_id}) return _send_tcp((self.ip, self.port), packet) def position_info(self, instance_id=0): """ Position info. instance_id -- device instance id """ packet = self._create_packet('GetPositionInfo', {'InstanceID': instance_id}) return _send_tcp((self.ip, self.port), packet) def set_next(self, url): pass def next(self): pass def discover(name = '', ip = '', timeout = 1, st = SSDP_ALL, mx = 3, ssdp_version = 1): """ Discover UPnP devices in the local network. name -- name or part of the name to filter devices timeout -- timeout to perform discover st -- st field of discovery packet mx -- mx field of discovery packet return -- list of DlnapDevice """ st = st.format(ssdp_version) payload = "\r\n".join([ 'M-SEARCH * HTTP/1.1', 'User-Agent: {}/{}'.format(__file__, __version__), 'HOST: {}:{}'.format(*SSDP_GROUP), 'Accept: */*', 'MAN: "ssdp:discover"', 'ST: {}'.format(st), 'MX: {}'.format(mx), '', '']) devices = [] with _send_udp(SSDP_GROUP, payload) as sock: start = time.time() while True: if time.time() - start > timeout: # timed out break r, w, x = select.select([sock], [], [sock], 1) if sock in r: data, addr = sock.recvfrom(1024) if ip and addr[0] != ip: continue d = DlnapDevice(data, addr[0]) d.ssdp_version = ssdp_version if d not in devices: if not name or name is None or name.lower() in d.name.lower(): if not ip: devices.append(d) elif d.has_av_transport: # no need in further searching by ip devices.append(d) break elif sock in x: raise Exception('Getting response failed') else: # Nothing to read pass return devices # # Signal of Ctrl+C # ================================================================================================= def signal_handler(signal, frame): print(' Got Ctrl + C, exit now!') sys.exit(1) signal.signal(signal.SIGINT, signal_handler) if __name__ == '__main__': import getopt def usage(): print('{} [--ip <device ip>] [-d[evice] <name>] [--all] [-t[imeout] <seconds>] [--play <url>] [--pause] [--stop] [--proxy]'.format(__file__)) print(' --ip <device ip> - ip address for faster access to the known device') print(' --device <device name or part of the name> - discover devices with this name as substring') print(' --all - flag to discover all upnp devices, not only devices with AVTransport ability') print(' --play <url> - set current url for play and start playback it. In case of url is empty - continue playing recent media.') print(' --pause - pause current playback') print(' --stop - stop current playback') print(' --mute - mute playback') print(' --unmute - unmute playback') print(' --volume <vol> - set current volume for playback') print(' --seek <position in HH:MM:SS> - set current position for playback') print(' --timeout <seconds> - discover timeout') print(' --ssdp-version <version> - discover devices by protocol version, default 1') print(' --proxy - use local proxy on proxy port') print(' --proxy-port <port number> - proxy port to listen incomming connections from devices, default 8000') print(' --help - this help') def version(): print(__version__) try: opts, args = getopt.getopt(sys.argv[1:], "hvd:t:i:", [ # information arguments 'help', 'version', 'log=', # device arguments 'device=', 'ip=', # action arguments 'play=', 'pause', 'stop', 'volume=', 'mute', 'unmute', 'seek=', # discover arguments 'list', 'all', 'timeout=', 'ssdp-version=', # transport info 'info', 'media-info', # download proxy 'proxy', 'proxy-port=']) except getopt.GetoptError: usage() sys.exit(1) device = '' url = '' vol = 10 position = '00:00:00' timeout = 1 action = '' logLevel = logging.WARN compatibleOnly = True ip = '' proxy = False proxy_port = 8000 ssdp_version = 1 for opt, arg in opts: if opt in ('-h', '--help'): usage() sys.exit(0) elif opt in ('-v', '--version'): version() sys.exit(0) elif opt in ('--log'): if arg.lower() == 'debug': logLevel = logging.DEBUG elif arg.lower() == 'info': logLevel = logging.INFO elif arg.lower() == 'warn': logLevel = logging.WARN elif opt in ('--all'): compatibleOnly = False elif opt in ('-d', '--device'): device = arg elif opt in ('-t', '--timeout'): timeout = float(arg) elif opt in ('--ssdp-version'): ssdp_version = int(arg) elif opt in ('-i', '--ip'): ip = arg compatibleOnly = False timeout = 10 elif opt in ('--list'): action = 'list' elif opt in ('--play'): action = 'play' url = arg elif opt in ('--pause'): action = 'pause' elif opt in ('--stop'): action = 'stop' elif opt in ('--volume'): action = 'volume' vol = arg elif opt in ('--seek'): action = 'seek' position = arg elif opt in ('--mute'): action = 'mute' elif opt in ('--unmute'): action = 'unmute' elif opt in ('--info'): action = 'info' elif opt in ('--media-info'): action = 'media-info' elif opt in ('--proxy'): proxy = True elif opt in ('--proxy-port'): proxy_port = int(arg) logging.basicConfig(level=logLevel) st = URN_AVTransport_Fmt if compatibleOnly else SSDP_ALL allDevices = discover(name=device, ip=ip, timeout=timeout, st=st, ssdp_version=ssdp_version) if not allDevices: print('No compatible devices found.') sys.exit(1) if action in ('', 'list'): print('Discovered devices:') for d in allDevices: print(' {} {} {}'.format('[a]' if d.has_av_transport else '[x]', d,d.port)) sys.exit(0) d = allDevices[0] print(d) if url.lower().replace('https://', '').replace('www.', '').startswith('youtube.'): import subprocess process = subprocess.Popen(['youtube-dl', '-g', url], stdout = subprocess.PIPE) url, err = process.communicate() if url.lower().startswith('https://'): proxy = True if proxy: ip = _get_serve_ip(d.ip) t = threading.Thread(target=runProxy, kwargs={'ip' : ip, 'port' : proxy_port}) t.daemon = True t.start() time.sleep(2) if action == 'play': try: d.stop() url = 'http://{}:{}/{}'.format(ip, proxy_port, url) if proxy else url d.set_current_media(url=url) d.play() except Exception as e: print('Device is unable to play media.') logging.warn('Play exception:\n{}'.format(traceback.format_exc())) sys.exit(1) elif action == 'pause': d.pause() elif action == 'stop': d.stop() elif action == 'volume': d.volume(vol) elif action == 'seek': d.seek(position) elif action == 'mute': d.mute() elif action == 'unmute': d.unmute() elif action == 'info': print(d.info()) elif action == 'media-info': print(d.media_info()) if proxy: while running: time.sleep(30) ```
{ "source": "jie2311260561/Spiderdemo", "score": 3 }
#### File: ArticleSpider/spiders/baidu1.py ```python import scrapy import re import os from scrapy import Request from urllib import parse class Baidu1Spider(scrapy.Spider): name = 'baidu1' allowed_domains = ['https://baike.baidu.com/'] start_urls = ['https://baike.baidu.com/item/%E7%BD%91%E7%BB%9C%E7%88%AC%E8%99%AB/5162711'] url = ["/item/%E7%BD%91%E7%BB%9C%E7%88%AC%E8%99%AB/5162711"] def parse(self, response): ''' 获取某一页的 url 交给scrapy 进行下载 完成解析 parse 函数 解析列表页中的文章url :param response: :return: ''' sub_urls = response.css("a[href^='/item/']").extract() # 选取以items开头的a元素 for sub_url in sub_urls: yield Request(url=parse.urljoin(response.url,sub_url),callback=self.parse_detail) # Request(url=sub_url) print(sub_url) #提取下一页 # title= response.xpath("//div[@class='lemma-summary']/div[1]/text()") # picture = response.xpath("//img[@class='picture']") # text = response.xpath("//div[@class='main-content']") # regek = ".*?([\u4E00-\u8FA5])" # 取消贪婪。。 匹配汉字 # match_obj = re.match(regek, text) # print(picture) # print(title.extract()) # print(match_obj) # n = int(input('输入爬取的网页数:')) # for i in range(n): def parse_detail(self,response): title = response.css(".lemmaWgt-lemmaTitle-title h1::text").extract() print(title) zannum = response.css("span.vote-count::text").extract() print("{}".format(zannum)) picture = response.css("img.picture").extract() for j in range(len(picture)): t = re.search('(https:[^"]*)"', picture[j]) print(t.group()) text = response.css(".para::text").extract() print(text) # fo = open('{}.txt'.format(title[0]), 'w', encoding='utf-8') # for x in text: # fo.write(x) print('写入完成') for j in range(len(sub_urls)): t = re.search('(/item/[^"]*)"', sub_urls.extract()[j]).group() name = re.search('([\u4e00-\u9fa5]+)', sub_urls.extract()[j]).group() print(name, '的url是:', t) # sub_urls = soup.find_all("a", {"target": "_blank", "href": re.compile("/item/(%.{2})+$")}) ```
{ "source": "jie311/FGD", "score": 2 }
#### File: jie311/FGD/pth_transfer.py ```python import torch import argparse from collections import OrderedDict def change_model(args): fgd_model = torch.load(args.fgd_path) all_name = [] for name, v in fgd_model["state_dict"].items(): if name.startswith("student."): all_name.append((name[8:], v)) else: continue state_dict = OrderedDict(all_name) fgd_model['state_dict'] = state_dict torch.save(fgd_model, args.output_path) if __name__ == '__main__': parser = argparse.ArgumentParser(description='Transfer CKPT') parser.add_argument('--fgd_path', type=str, default='work_dirs/fgd_retina_rx101_64x4d_distill_retina_r50_fpn_2x_coco/epoch_24.pth', metavar='N',help='fgd_model path') parser.add_argument('--output_path', type=str, default='retina_res50_new.pth',metavar='N', help = 'pair path') args = parser.parse_args() change_model(args) ```
{ "source": "jie311/miemiedetection", "score": 2 }
#### File: demo/win10_ppyolo_opencv/test2_cpp_conv_step1.py ```python import paddle import numpy as np import paddle.nn.functional as F ''' 测试C++版的卷积层 ''' def write_line(name, ndarray, dims, content): content += '%s '%name print('uuuuuuuuuuuuuuuuuuuuuuuuuuuuuu') if dims==4: for i in range(ndarray.shape[0]): for j in range(ndarray.shape[1]): for k in range(ndarray.shape[2]): for l in range(ndarray.shape[3]): content += '%f,' % ndarray[i, j, k, l] elif dims==1: for i in range(ndarray.shape[0]): content += '%f,' % ndarray[i] content = content[:-1]+'\n' print('uuuuuuuuuuuuuuuuuuuuuuuuuuuuuu') return content # x = paddle.uniform([8, 3, 256, 256], min=-1.0, max=1.0, dtype='float32') x = paddle.uniform([2, 1, 1, 1], min=-1.0, max=1.0, dtype='float32') dic = {} content = '' dic['x'] = x.numpy() print('uuuuuuuuuuuuuuuuuuuuuuuuuuuuuu') content = write_line('x', dic['x'], 4, content) # 测试用例1 input_dim = 1 filters = 1 filter_size = 1 stride = 1 padding = 0 groups = 1 bias_attr = False # 测试用例2 # input_dim = 3 # filters = 2 # filter_size = 3 # stride = 1 # padding = 1 # groups = 1 # bias_attr = paddle.ParamAttr(initializer=paddle.nn.initializer.Normal()) conv = paddle.nn.Conv2D( in_channels=input_dim, out_channels=filters, kernel_size=filter_size, stride=stride, padding=padding, groups=groups, bias_attr=bias_attr) conv.eval() dic['w'] = conv.weight.numpy() content = write_line('w', dic['w'], 4, content) if bias_attr == False: pass else: dic['b'] = conv.bias.numpy() content = write_line('b', dic['b'], 1, content) out = conv(x) dic['out'] = out.numpy() print('uuuuuuuuuuuuuuuuuuuuuuuuuuuuuu2222222222222222') content = write_line('out', dic['out'], 4, content) np.savez('conv2d', **dic) with open('conv2d.txt', 'w', encoding='utf-8') as f: f.write(content) f.close() print() ``` #### File: models/architectures/ppyoloe.py ```python import torch class PPYOLOE(torch.nn.Module): def __init__(self, backbone, neck, yolo_head): super(PPYOLOE, self).__init__() self.backbone = backbone self.neck = neck self.yolo_head = yolo_head def forward(self, x, scale_factor=None, targets=None): ''' 获得损失(训练)、推理 都要放在forward()中进行,否则DDP会计算错误结果。 ''' body_feats = self.backbone(x) fpn_feats = self.neck(body_feats) out = self.yolo_head(fpn_feats, targets) if self.training: return out else: out = self.yolo_head.post_process(out, scale_factor) return out def add_param_group(self, param_groups, base_lr, base_wd, need_clip, clip_norm): self.backbone.add_param_group(param_groups, base_lr, base_wd, need_clip, clip_norm) self.neck.add_param_group(param_groups, base_lr, base_wd, need_clip, clip_norm) self.yolo_head.add_param_group(param_groups, base_lr, base_wd, need_clip, clip_norm) ``` #### File: models/backbones/cspresnet.py ```python from __future__ import absolute_import from __future__ import division from __future__ import print_function import torch import torch.nn as nn import torch.nn.functional as F from mmdet.models.ops import get_act_fn from mmdet.models.custom_layers import ShapeSpec class ConvBNLayer(nn.Module): def __init__(self, ch_in, ch_out, filter_size=3, stride=1, groups=1, padding=0, act=None): super(ConvBNLayer, self).__init__() self.conv = nn.Conv2d( in_channels=ch_in, out_channels=ch_out, kernel_size=filter_size, stride=stride, padding=padding, groups=groups, bias=False) self.bn = nn.BatchNorm2d(ch_out) self.act = get_act_fn(act) if act is None or isinstance(act, ( str, dict)) else act def forward(self, x): x = self.conv(x) x = self.bn(x) x = self.act(x) return x def add_param_group(self, param_groups, base_lr, base_wd, need_clip, clip_norm): if isinstance(self.conv, torch.nn.Conv2d): if self.conv.weight.requires_grad: param_group_conv = {'params': [self.conv.weight]} param_group_conv['lr'] = base_lr * 1.0 param_group_conv['base_lr'] = base_lr * 1.0 param_group_conv['weight_decay'] = base_wd param_group_conv['need_clip'] = need_clip param_group_conv['clip_norm'] = clip_norm param_groups.append(param_group_conv) if self.bn is not None: if self.bn.weight.requires_grad: param_group_norm_weight = {'params': [self.bn.weight]} param_group_norm_weight['lr'] = base_lr * 1.0 param_group_norm_weight['base_lr'] = base_lr * 1.0 param_group_norm_weight['weight_decay'] = 0.0 param_group_norm_weight['need_clip'] = need_clip param_group_norm_weight['clip_norm'] = clip_norm param_groups.append(param_group_norm_weight) if self.bn.bias.requires_grad: param_group_norm_bias = {'params': [self.bn.bias]} param_group_norm_bias['lr'] = base_lr * 1.0 param_group_norm_bias['base_lr'] = base_lr * 1.0 param_group_norm_bias['weight_decay'] = 0.0 param_group_norm_bias['need_clip'] = need_clip param_group_norm_bias['clip_norm'] = clip_norm param_groups.append(param_group_norm_bias) class RepVggBlock(nn.Module): def __init__(self, ch_in, ch_out, act='relu'): super(RepVggBlock, self).__init__() self.ch_in = ch_in self.ch_out = ch_out self.conv1 = ConvBNLayer( ch_in, ch_out, 3, stride=1, padding=1, act=None) self.conv2 = ConvBNLayer( ch_in, ch_out, 1, stride=1, padding=0, act=None) self.act = get_act_fn(act) if act is None or isinstance(act, ( str, dict)) else act def forward(self, x): if hasattr(self, 'conv'): y = self.conv(x) else: y = self.conv1(x) + self.conv2(x) y = self.act(y) return y def add_param_group(self, param_groups, base_lr, base_wd, need_clip, clip_norm): if hasattr(self, 'conv'): self.conv.add_param_group(param_groups, base_lr, base_wd, need_clip, clip_norm) else: self.conv1.add_param_group(param_groups, base_lr, base_wd, need_clip, clip_norm) self.conv2.add_param_group(param_groups, base_lr, base_wd, need_clip, clip_norm) def convert_to_deploy(self): if not hasattr(self, 'conv'): self.conv = nn.Conv2d( in_channels=self.ch_in, out_channels=self.ch_out, kernel_size=3, stride=1, padding=1, groups=1) kernel, bias = self.get_equivalent_kernel_bias() self.conv.weight.copy_(kernel) self.conv.bias.copy_(bias) self.__delattr__('conv1') self.__delattr__('conv2') def get_equivalent_kernel_bias(self): kernel3x3, bias3x3 = self._fuse_bn_tensor(self.conv1) kernel1x1, bias1x1 = self._fuse_bn_tensor(self.conv2) return kernel3x3 + self._pad_1x1_to_3x3_tensor( kernel1x1), bias3x3 + bias1x1 def _pad_1x1_to_3x3_tensor(self, kernel1x1): if kernel1x1 is None: return 0 else: return nn.functional.pad(kernel1x1, [1, 1, 1, 1]) def _fuse_bn_tensor(self, branch): if branch is None: return 0, 0 kernel = branch.conv.weight running_mean = branch.bn._mean running_var = branch.bn._variance gamma = branch.bn.weight beta = branch.bn.bias eps = branch.bn._epsilon std = (running_var + eps).sqrt() t = (gamma / std).reshape((-1, 1, 1, 1)) return kernel * t, beta - running_mean * gamma / std class BasicBlock(nn.Module): def __init__(self, ch_in, ch_out, act='relu', shortcut=True): super(BasicBlock, self).__init__() assert ch_in == ch_out self.conv1 = ConvBNLayer(ch_in, ch_out, 3, stride=1, padding=1, act=act) self.conv2 = RepVggBlock(ch_out, ch_out, act=act) self.shortcut = shortcut def forward(self, x): y = self.conv1(x) y = self.conv2(y) if self.shortcut: return x + y else: return y def add_param_group(self, param_groups, base_lr, base_wd, need_clip, clip_norm): self.conv1.add_param_group(param_groups, base_lr, base_wd, need_clip, clip_norm) self.conv2.add_param_group(param_groups, base_lr, base_wd, need_clip, clip_norm) class EffectiveSELayer(nn.Module): """ Effective Squeeze-Excitation From `CenterMask : Real-Time Anchor-Free Instance Segmentation` - https://arxiv.org/abs/1911.06667 """ def __init__(self, channels, act='hardsigmoid'): super(EffectiveSELayer, self).__init__() self.fc = nn.Conv2d(channels, channels, kernel_size=1, padding=0) self.act = get_act_fn(act) if act is None or isinstance(act, ( str, dict)) else act def forward(self, x): x_se = x.mean((2, 3), keepdim=True) x_se = self.fc(x_se) return x * self.act(x_se) def add_param_group(self, param_groups, base_lr, base_wd, need_clip, clip_norm): if isinstance(self.fc, torch.nn.Conv2d): if self.fc.weight.requires_grad: param_group_conv_weight = {'params': [self.fc.weight]} param_group_conv_weight['lr'] = base_lr * 1.0 param_group_conv_weight['base_lr'] = base_lr * 1.0 param_group_conv_weight['weight_decay'] = base_wd param_group_conv_weight['need_clip'] = need_clip param_group_conv_weight['clip_norm'] = clip_norm param_groups.append(param_group_conv_weight) if self.fc.bias.requires_grad: param_group_conv_bias = {'params': [self.fc.bias]} param_group_conv_bias['lr'] = base_lr * 1.0 param_group_conv_bias['base_lr'] = base_lr * 1.0 param_group_conv_bias['weight_decay'] = base_wd param_group_conv_bias['need_clip'] = need_clip param_group_conv_bias['clip_norm'] = clip_norm param_groups.append(param_group_conv_bias) class CSPResStage(nn.Module): def __init__(self, block_fn, ch_in, ch_out, n, stride, act='relu', attn='eca'): super(CSPResStage, self).__init__() ch_mid = (ch_in + ch_out) // 2 if stride == 2: self.conv_down = ConvBNLayer( ch_in, ch_mid, 3, stride=2, padding=1, act=act) else: self.conv_down = None self.conv1 = ConvBNLayer(ch_mid, ch_mid // 2, 1, act=act) self.conv2 = ConvBNLayer(ch_mid, ch_mid // 2, 1, act=act) self.blocks = nn.Sequential(*[ block_fn( ch_mid // 2, ch_mid // 2, act=act, shortcut=True) for i in range(n) ]) if attn: self.attn = EffectiveSELayer(ch_mid, act='hardsigmoid') else: self.attn = None self.conv3 = ConvBNLayer(ch_mid, ch_out, 1, act=act) def forward(self, x): if self.conv_down is not None: x = self.conv_down(x) y1 = self.conv1(x) y2 = self.blocks(self.conv2(x)) y = torch.cat([y1, y2], 1) if self.attn is not None: y = self.attn(y) y = self.conv3(y) return y def add_param_group(self, param_groups, base_lr, base_wd, need_clip, clip_norm): if self.conv_down is not None: self.conv_down.add_param_group(param_groups, base_lr, base_wd, need_clip, clip_norm) self.conv1.add_param_group(param_groups, base_lr, base_wd, need_clip, clip_norm) self.conv2.add_param_group(param_groups, base_lr, base_wd, need_clip, clip_norm) for layer in self.blocks: layer.add_param_group(param_groups, base_lr, base_wd, need_clip, clip_norm) if self.attn is not None: self.attn.add_param_group(param_groups, base_lr, base_wd, need_clip, clip_norm) self.conv3.add_param_group(param_groups, base_lr, base_wd, need_clip, clip_norm) class CSPResNet(nn.Module): __shared__ = ['width_mult', 'depth_mult', 'trt'] def __init__(self, layers=[3, 6, 6, 3], channels=[64, 128, 256, 512, 1024], act='swish', return_idx=[0, 1, 2, 3, 4], depth_wise=False, use_large_stem=False, width_mult=1.0, depth_mult=1.0, freeze_at=-1, trt=False): super(CSPResNet, self).__init__() channels = [max(round(c * width_mult), 1) for c in channels] layers = [max(round(l * depth_mult), 1) for l in layers] act = get_act_fn( act, trt=trt) if act is None or isinstance(act, (str, dict)) else act if use_large_stem: self.stem = nn.Sequential() self.stem.add_module('conv1', ConvBNLayer(3, channels[0] // 2, 3, stride=2, padding=1, act=act)) self.stem.add_module('conv2', ConvBNLayer(channels[0] // 2, channels[0] // 2, 3, stride=1, padding=1, act=act)) self.stem.add_module('conv3', ConvBNLayer(channels[0] // 2, channels[0], 3, stride=1, padding=1, act=act)) else: self.stem = nn.Sequential() self.stem.add_module('conv1', ConvBNLayer(3, channels[0] // 2, 3, stride=2, padding=1, act=act)) self.stem.add_module('conv2', ConvBNLayer(channels[0] // 2, channels[0], 3, stride=1, padding=1, act=act)) n = len(channels) - 1 self.stages = nn.Sequential() for i in range(n): self.stages.add_module(str(i), CSPResStage(BasicBlock, channels[i], channels[i + 1], layers[i], 2, act=act)) self._out_channels = channels[1:] self._out_strides = [4, 8, 16, 32] self.return_idx = return_idx if freeze_at >= 0: self._freeze_parameters(self.stem) for i in range(min(freeze_at + 1, n)): self._freeze_parameters(self.stages[i]) def _freeze_parameters(self, m): for p in m.parameters(): p.requires_grad_(False) def forward(self, inputs): x = self.stem(inputs) outs = [] for idx, stage in enumerate(self.stages): x = stage(x) if idx in self.return_idx: outs.append(x) return outs def add_param_group(self, param_groups, base_lr, base_wd, need_clip, clip_norm): for layer in self.stem: layer.add_param_group(param_groups, base_lr, base_wd, need_clip, clip_norm) for idx, stage in enumerate(self.stages): stage.add_param_group(param_groups, base_lr, base_wd, need_clip, clip_norm) @property def out_shape(self): return [ ShapeSpec( channels=self._out_channels[i], stride=self._out_strides[i]) for i in self.return_idx ] ``` #### File: models/losses/iou_losses.py ```python import torch import torch.nn as nn import torch as T import torch.nn.functional as F import numpy as np from mmdet.models.bbox_utils import bbox_iou class IouLoss(nn.Module): """ iou loss, see https://arxiv.org/abs/1908.03851 loss = 1.0 - iou * iou Args: loss_weight (float): iou loss weight, default is 2.5 max_height (int): max height of input to support random shape input max_width (int): max width of input to support random shape input ciou_term (bool): whether to add ciou_term loss_square (bool): whether to square the iou term """ def __init__(self, loss_weight=2.5, giou=False, diou=False, ciou=False, loss_square=True): super(IouLoss, self).__init__() self.loss_weight = loss_weight self.giou = giou self.diou = diou self.ciou = ciou self.loss_square = loss_square def forward(self, pbox, gbox): iou = bbox_iou( pbox, gbox, giou=self.giou, diou=self.diou, ciou=self.ciou) if self.loss_square: loss_iou = 1 - iou * iou else: loss_iou = 1 - iou loss_iou = loss_iou * self.loss_weight return loss_iou class IouAwareLoss(IouLoss): """ iou aware loss, see https://arxiv.org/abs/1912.05992 Args: loss_weight (float): iou aware loss weight, default is 1.0 max_height (int): max height of input to support random shape input max_width (int): max width of input to support random shape input """ def __init__(self, loss_weight=1.0, giou=False, diou=False, ciou=False): super(IouAwareLoss, self).__init__( loss_weight=loss_weight, giou=giou, diou=diou, ciou=ciou) def forward(self, ioup, pbox, gbox): iou = bbox_iou( pbox, gbox, giou=self.giou, diou=self.diou, ciou=self.ciou) # iou.requires_grad = False iou = iou.detach() loss_iou_aware = F.binary_cross_entropy_with_logits( ioup, iou, reduction='none') loss_iou_aware = loss_iou_aware * self.loss_weight return loss_iou_aware class MyIOUloss(nn.Module): def __init__(self, reduction="none", loss_type="iou"): super(MyIOUloss, self).__init__() self.reduction = reduction self.loss_type = loss_type def forward(self, pred, target): ''' 输入矩形的格式是cx cy w h ''' assert pred.shape[0] == target.shape[0] boxes1 = pred boxes2 = target # 变成左上角坐标、右下角坐标 boxes1_x0y0x1y1 = torch.cat([boxes1[:, :2] - boxes1[:, 2:] * 0.5, boxes1[:, :2] + boxes1[:, 2:] * 0.5], dim=-1) boxes2_x0y0x1y1 = torch.cat([boxes2[:, :2] - boxes2[:, 2:] * 0.5, boxes2[:, :2] + boxes2[:, 2:] * 0.5], dim=-1) # 两个矩形的面积 boxes1_area = (boxes1_x0y0x1y1[:, 2] - boxes1_x0y0x1y1[:, 0]) * (boxes1_x0y0x1y1[:, 3] - boxes1_x0y0x1y1[:, 1]) boxes2_area = (boxes2_x0y0x1y1[:, 2] - boxes2_x0y0x1y1[:, 0]) * (boxes2_x0y0x1y1[:, 3] - boxes2_x0y0x1y1[:, 1]) # 相交矩形的左上角坐标、右下角坐标 left_up = torch.maximum(boxes1_x0y0x1y1[:, :2], boxes2_x0y0x1y1[:, :2]) right_down = torch.minimum(boxes1_x0y0x1y1[:, 2:], boxes2_x0y0x1y1[:, 2:]) # 相交矩形的面积inter_area。iou inter_section = F.relu(right_down - left_up) inter_area = inter_section[:, 0] * inter_section[:, 1] union_area = boxes1_area + boxes2_area - inter_area iou = inter_area / (union_area + 1e-16) if self.loss_type == "iou": loss = 1 - iou ** 2 elif self.loss_type == "giou": # 包围矩形的左上角坐标、右下角坐标 enclose_left_up = torch.minimum(boxes1_x0y0x1y1[:, :2], boxes2_x0y0x1y1[:, :2]) enclose_right_down = torch.maximum(boxes1_x0y0x1y1[:, 2:], boxes2_x0y0x1y1[:, 2:]) # 包围矩形的面积 enclose_wh = enclose_right_down - enclose_left_up enclose_area = enclose_wh[:, 0] * enclose_wh[:, 1] giou = iou - (enclose_area - union_area) / enclose_area # giou限制在区间[-1.0, 1.0]内 giou = torch.clamp(giou, -1.0, 1.0) loss = 1 - giou if self.reduction == "mean": loss = loss.mean() elif self.reduction == "sum": loss = loss.sum() return loss class GIoULoss(object): """ Generalized Intersection over Union, see https://arxiv.org/abs/1902.09630 Args: loss_weight (float): giou loss weight, default as 1 eps (float): epsilon to avoid divide by zero, default as 1e-10 reduction (string): Options are "none", "mean" and "sum". default as none """ def __init__(self, loss_weight=1., eps=1e-10, reduction='none'): self.loss_weight = loss_weight self.eps = eps assert reduction in ('none', 'mean', 'sum') self.reduction = reduction def bbox_overlap(self, box1, box2, eps=1e-10): """calculate the iou of box1 and box2 Args: box1 (Tensor): box1 with the shape (..., 4) box2 (Tensor): box1 with the shape (..., 4) eps (float): epsilon to avoid divide by zero Return: iou (Tensor): iou of box1 and box2 overlap (Tensor): overlap of box1 and box2 union (Tensor): union of box1 and box2 """ x1, y1, x2, y2 = box1 x1g, y1g, x2g, y2g = box2 xkis1 = torch.maximum(x1, x1g) ykis1 = torch.maximum(y1, y1g) xkis2 = torch.minimum(x2, x2g) ykis2 = torch.minimum(y2, y2g) w_inter = F.relu(xkis2 - xkis1) h_inter = F.relu(ykis2 - ykis1) overlap = w_inter * h_inter area1 = (x2 - x1) * (y2 - y1) area2 = (x2g - x1g) * (y2g - y1g) union = area1 + area2 - overlap + eps iou = overlap / union return iou, overlap, union def __call__(self, pbox, gbox, iou_weight=1., loc_reweight=None): # x1, y1, x2, y2 = paddle.split(pbox, num_or_sections=4, axis=-1) # x1g, y1g, x2g, y2g = paddle.split(gbox, num_or_sections=4, axis=-1) # torch的split和paddle有点不同,torch的第二个参数表示的是每一份的大小,paddle的第二个参数表示的是分成几份。 x1, y1, x2, y2 = torch.split(pbox, split_size_or_sections=1, dim=-1) x1g, y1g, x2g, y2g = torch.split(gbox, split_size_or_sections=1, dim=-1) box1 = [x1, y1, x2, y2] box2 = [x1g, y1g, x2g, y2g] iou, overlap, union = self.bbox_overlap(box1, box2, self.eps) xc1 = torch.minimum(x1, x1g) yc1 = torch.minimum(y1, y1g) xc2 = torch.maximum(x2, x2g) yc2 = torch.maximum(y2, y2g) area_c = (xc2 - xc1) * (yc2 - yc1) + self.eps miou = iou - ((area_c - union) / area_c) if loc_reweight is not None: loc_reweight = torch.reshape(loc_reweight, shape=(-1, 1)) loc_thresh = 0.9 giou = 1 - (1 - loc_thresh ) * miou - loc_thresh * miou * loc_reweight else: giou = 1 - miou if self.reduction == 'none': loss = giou elif self.reduction == 'sum': loss = torch.sum(giou * iou_weight) else: loss = torch.mean(giou * iou_weight) return loss * self.loss_weight ``` #### File: models/losses/yolov3_loss.py ```python import torch import torch.nn as nn import torch.nn.functional as F import torch as T import numpy as np from mmdet.models.custom_layers import paddle_yolo_box from mmdet.models.matrix_nms import jaccard from mmdet.utils import bboxes_iou_batch try: from collections.abc import Sequence except Exception: from collections import Sequence class YOLOv3Loss2(nn.Module): """ Combined loss for YOLOv3 network Args: batch_size (int): training batch size ignore_thresh (float): threshold to ignore confidence loss label_smooth (bool): whether to use label smoothing use_fine_grained_loss (bool): whether use fine grained YOLOv3 loss instead of fluid.layers.yolov3_loss """ def __init__(self, ignore_thresh=0.7, label_smooth=True, use_fine_grained_loss=False, iou_loss=None, iou_aware_loss=None, downsample=[32, 16, 8], scale_x_y=1., match_score=False): super(YOLOv3Loss2, self).__init__() self._ignore_thresh = ignore_thresh self._label_smooth = label_smooth self._use_fine_grained_loss = use_fine_grained_loss self._iou_loss = iou_loss self._iou_aware_loss = iou_aware_loss self.downsample = downsample self.scale_x_y = scale_x_y self.match_score = match_score def forward(self, outputs, gt_box, targets, anchors, anchor_masks, mask_anchors, num_classes): return self._get_fine_grained_loss( outputs, targets, gt_box, num_classes, mask_anchors, self._ignore_thresh) def _get_fine_grained_loss(self, outputs, targets, gt_box, num_classes, mask_anchors, ignore_thresh, eps=1.e-10): """ Calculate fine grained YOLOv3 loss Args: outputs ([Variables]): List of Variables, output of backbone stages targets ([Variables]): List of Variables, The targets for yolo loss calculatation. gt_box (Variable): The ground-truth boudding boxes. batch_size (int): The training batch size num_classes (int): class num of dataset mask_anchors ([[float]]): list of anchors in each output layer ignore_thresh (float): prediction bbox overlap any gt_box greater than ignore_thresh, objectness loss will be ignored. Returns: Type: dict xy_loss (Variable): YOLOv3 (x, y) coordinates loss wh_loss (Variable): YOLOv3 (w, h) coordinates loss obj_loss (Variable): YOLOv3 objectness score loss cls_loss (Variable): YOLOv3 classification loss """ assert len(outputs) == len(targets), \ "YOLOv3 output layer number not equal target number" batch_size = gt_box.shape[0] loss_xys, loss_whs, loss_objs, loss_clss = 0.0, 0.0, 0.0, 0.0 if self._iou_loss is not None: loss_ious = 0.0 if self._iou_aware_loss is not None: loss_iou_awares = 0.0 for i, (output, target, anchors) in enumerate(zip(outputs, targets, mask_anchors)): downsample = self.downsample[i] an_num = len(anchors) // 2 if self._iou_aware_loss is not None: ioup, output = self._split_ioup(output, an_num, num_classes) x, y, w, h, obj, cls = self._split_output(output, an_num, num_classes) tx, ty, tw, th, tscale, tobj, tcls = self._split_target(target) tscale_tobj = tscale * tobj scale_x_y = self.scale_x_y if not isinstance( self.scale_x_y, Sequence) else self.scale_x_y[i] if (abs(scale_x_y - 1.0) < eps): sigmoid_x = torch.sigmoid(x) loss_x = tx * (0 - torch.log(sigmoid_x + 1e-9)) + (1 - tx) * (0 - torch.log(1 - sigmoid_x + 1e-9)) loss_x *= tscale_tobj loss_x = loss_x.sum((1, 2, 3)) sigmoid_y = torch.sigmoid(y) loss_y = ty * (0 - torch.log(sigmoid_y + 1e-9)) + (1 - ty) * (0 - torch.log(1 - sigmoid_y + 1e-9)) loss_y *= tscale_tobj loss_y = loss_y.sum((1, 2, 3)) else: # Grid Sensitive dx = scale_x_y * torch.sigmoid(x) - 0.5 * (scale_x_y - 1.0) dy = scale_x_y * torch.sigmoid(y) - 0.5 * (scale_x_y - 1.0) loss_x = torch.abs(dx - tx) * tscale_tobj loss_x = loss_x.sum((1, 2, 3)) loss_y = torch.abs(dy - ty) * tscale_tobj loss_y = loss_y.sum((1, 2, 3)) # NOTE: we refined loss function of (w, h) as L1Loss loss_w = torch.abs(w - tw) * tscale_tobj loss_w = loss_w.sum((1, 2, 3)) loss_h = torch.abs(h - th) * tscale_tobj loss_h = loss_h.sum((1, 2, 3)) if self._iou_loss is not None: loss_iou = self._iou_loss(x, y, w, h, tx, ty, tw, th, anchors, downsample, batch_size, scale_x_y) loss_iou = loss_iou * tscale_tobj loss_iou = loss_iou.sum((1, 2, 3)) loss_ious += loss_iou.mean() if self._iou_aware_loss is not None: loss_iou_aware = self._iou_aware_loss( ioup, x, y, w, h, tx, ty, tw, th, anchors, downsample, batch_size, scale_x_y) loss_iou_aware = loss_iou_aware * tobj loss_iou_aware = loss_iou_aware.sum((1, 2, 3)) loss_iou_awares += loss_iou_aware.mean() loss_obj_pos, loss_obj_neg = self._calc_obj_loss( output, obj, tobj, gt_box, batch_size, anchors, num_classes, downsample, self._ignore_thresh, scale_x_y) sigmoid_cls = torch.sigmoid(cls) loss_cls = tcls * (0 - torch.log(sigmoid_cls + 1e-9)) + (1 - tcls) * (0 - torch.log(1 - sigmoid_cls + 1e-9)) loss_cls = loss_cls.sum(4) loss_cls *= tobj loss_cls = loss_cls.sum((1, 2, 3)) loss_xys += (loss_x + loss_y).mean() loss_whs += (loss_w + loss_h).mean() loss_objs += (loss_obj_pos + loss_obj_neg).mean() loss_clss += loss_cls.mean() total_loss = loss_xys + loss_whs + loss_objs + loss_clss losses_all = { "loss_xy": loss_xys, "loss_wh": loss_whs, "loss_obj": loss_objs, "loss_cls": loss_clss, } if self._iou_loss is not None: losses_all["loss_iou"] = loss_ious total_loss += loss_ious if self._iou_aware_loss is not None: losses_all["loss_iou_aware"] = loss_iou_awares total_loss += loss_iou_awares losses_all["total_loss"] = total_loss return losses_all def _split_ioup(self, output, an_num, num_classes): """ Split output feature map to output, predicted iou along channel dimension """ ioup = output[:, :an_num, :, :] ioup = torch.sigmoid(ioup) oriout = output[:, an_num:, :, :] return (ioup, oriout) def _split_output(self, output, an_num, num_classes): """ Split output feature map to x, y, w, h, objectness, classification along channel dimension """ batch_size = output.shape[0] output_size = output.shape[2] output = output.reshape((batch_size, an_num, 5 + num_classes, output_size, output_size)) x = output[:, :, 0, :, :] y = output[:, :, 1, :, :] w = output[:, :, 2, :, :] h = output[:, :, 3, :, :] obj = output[:, :, 4, :, :] cls = output[:, :, 5:, :, :] cls = cls.permute(0, 1, 3, 4, 2) return (x, y, w, h, obj, cls) def _split_target(self, target): """ split target to x, y, w, h, objectness, classification along dimension 2 target is in shape [N, an_num, 6 + class_num, H, W] """ tx = target[:, :, 0, :, :] ty = target[:, :, 1, :, :] tw = target[:, :, 2, :, :] th = target[:, :, 3, :, :] tscale = target[:, :, 4, :, :] tobj = target[:, :, 5, :, :] tcls = target[:, :, 6:, :, :] tcls = tcls.permute(0, 1, 3, 4, 2) tcls.requires_grad = False return (tx, ty, tw, th, tscale, tobj, tcls) def _calc_obj_loss(self, output, obj, tobj, gt_box, batch_size, anchors, num_classes, downsample, ignore_thresh, scale_x_y): # A prediction bbox overlap any gt_bbox over ignore_thresh, # objectness loss will be ignored, process as follows: _anchors = np.array(anchors) _anchors = np.reshape(_anchors, (-1, 2)).astype(np.float32) im_size = torch.ones((batch_size, 2), dtype=torch.float32, device=output.device) im_size.requires_grad = False bbox, prob = paddle_yolo_box(output, _anchors, downsample, num_classes, scale_x_y, im_size, clip_bbox=False, conf_thresh=0.0) # 2. split pred bbox and gt bbox by sample, calculate IoU between pred bbox # and gt bbox in each sample ious = [] for pred, gt in zip(bbox, gt_box): def box_xywh2xyxy(box): x = box[:, 0:1] y = box[:, 1:2] w = box[:, 2:3] h = box[:, 3:4] return torch.cat( [ x - w / 2., y - h / 2., x + w / 2., y + h / 2., ], dim=1) gt = box_xywh2xyxy(gt) # [50, 4] ious.append(jaccard(pred, gt).unsqueeze(0)) # [1, 3*13*13, 50] iou = torch.cat(ious, dim=0) # [bz, 3*13*13, 50] 每张图片的这个输出层的所有预测框(比如3*13*13个)与所有gt(50个)两两之间的iou # 3. Get iou_mask by IoU between gt bbox and prediction bbox, # Get obj_mask by tobj(holds gt_score), calculate objectness loss max_iou, _ = iou.max(-1) # [bz, 3*13*13] 预测框与所有gt最高的iou iou_mask = (max_iou <= ignore_thresh).float() # [bz, 3*13*13] 候选负样本处为1 if self.match_score: max_prob, _ = prob.max(-1) # [bz, 3*13*13] 预测框所有类别最高分数 iou_mask = iou_mask * (max_prob <= 0.25).float() # 最高分数低于0.25的预测框,被视作负样本或者忽略样本,虽然在训练初期该分数不可信。 output_shape = output.shape an_num = len(anchors) // 2 iou_mask = iou_mask.reshape((output_shape[0], an_num, output_shape[2], output_shape[3])) # [bz, 3, 13, 13] 候选负样本处为1 iou_mask.requires_grad = False # NOTE: tobj holds gt_score, obj_mask holds object existence mask obj_mask = (tobj > 0.).float() # [bz, 3, 13, 13] 正样本处为1 obj_mask.requires_grad = False # 候选负样本 中的 非正样本 才是负样本。所有样本中,正样本和负样本之外的样本是忽略样本。 noobj_mask = (1.0 - obj_mask) * iou_mask # [N, 3, n_grid, n_grid] 负样本处为1 noobj_mask.requires_grad = False # For positive objectness grids, objectness loss should be calculated # For negative objectness grids, objectness loss is calculated only iou_mask == 1.0 sigmoid_obj = torch.sigmoid(obj) loss_obj_pos = tobj * (0 - torch.log(sigmoid_obj + 1e-9)) # 由于有mixup增强,tobj正样本处不一定为1.0 loss_obj_neg = noobj_mask * (0 - torch.log(1 - sigmoid_obj + 1e-9)) # 负样本的损失 loss_obj_pos = loss_obj_pos.sum((1, 2, 3)) loss_obj_neg = loss_obj_neg.sum((1, 2, 3)) return loss_obj_pos, loss_obj_neg def xywh2xyxy(box): x, y, w, h = box x1 = x - w * 0.5 y1 = y - h * 0.5 x2 = x + w * 0.5 y2 = y + h * 0.5 return [x1, y1, x2, y2] def make_grid(h, w, dtype, device): yv, xv = torch.meshgrid([torch.arange(h, dtype=dtype, device=device), torch.arange(w, dtype=dtype, device=device)]) xy = torch.stack((xv, yv), 2).float() # [h, w, 2] 值为[[[0, 0], [1, 0], [2, 0], ...] return xy def decode_yolo(box, anchor, downsample_ratio): """decode yolo box Args: box (list): [x, y, w, h], all have the shape [b, na, h, w, 1] anchor (list): anchor with the shape [na, 2] downsample_ratio (int): downsample ratio, default 32 scale (float): scale, default 1. Return: box (list): decoded box, [x, y, w, h], all have the shape [b, na, h, w, 1] """ x, y, w, h = box # x.shape=[N, 3, h, w, 1] na, grid_h, grid_w = x.shape[1:4] grid = make_grid(grid_h, grid_w, x.dtype, x.device) # [h, w, 2] 值为[[[0, 0], [1, 0], [2, 0], ...] grid = torch.reshape(grid, (1, 1, grid_h, grid_w, 2)) # [1, 1, h, w, 2] x1 = (x + grid[:, :, :, :, 0:1]) / grid_w # [N, 3, h, w, 1] 预测框中心点在输入图片中的绝对x坐标,除以图片宽进行归一化。 y1 = (y + grid[:, :, :, :, 1:2]) / grid_h # [N, 3, h, w, 1] 预测框中心点在输入图片中的绝对y坐标,除以图片高进行归一化。 device_name = w.device.type device_index = w.device.index anchor_ndarray = np.array(anchor).astype(np.float32) _anchor = torch.from_numpy(anchor_ndarray) if device_name == 'cuda': _anchor = torch.from_numpy(anchor_ndarray).cuda(device_index) _anchor = _anchor.to(x) _anchor = torch.reshape(_anchor, (1, na, 1, 1, 2)) w1 = torch.exp(w) * _anchor[:, :, :, :, 0:1] / (downsample_ratio * grid_w) # [N, 3, h, w, 1] 预测框的宽,除以图片宽进行归一化。 h1 = torch.exp(h) * _anchor[:, :, :, :, 1:2] / (downsample_ratio * grid_h) # [N, 3, h, w, 1] 预测框的高,除以图片高进行归一化。 return [x1, y1, w1, h1] def bbox_transform(pbox, anchor, downsample): pbox = decode_yolo(pbox, anchor, downsample) pbox = xywh2xyxy(pbox) return pbox class YOLOv3Loss(nn.Module): __inject__ = ['iou_loss', 'iou_aware_loss'] __shared__ = ['num_classes'] def __init__(self, num_classes=80, ignore_thresh=0.7, label_smooth=False, downsample=[32, 16, 8], scale_x_y=1., iou_loss=None, iou_aware_loss=None): """ YOLOv3Loss layer Args: num_calsses (int): number of foreground classes ignore_thresh (float): threshold to ignore confidence loss label_smooth (bool): whether to use label smoothing downsample (list): downsample ratio for each detection block scale_x_y (float): scale_x_y factor iou_loss (object): IoULoss instance iou_aware_loss (object): IouAwareLoss instance """ super(YOLOv3Loss, self).__init__() self.num_classes = num_classes self.ignore_thresh = ignore_thresh self.label_smooth = label_smooth self.downsample = downsample self.scale_x_y = scale_x_y self.iou_loss = iou_loss self.iou_aware_loss = iou_aware_loss self.distill_pairs = [] def obj_loss(self, pbox, gbox, pobj, tobj, anchor, downsample): # pbox pbox = decode_yolo(pbox, anchor, downsample) pbox = xywh2xyxy(pbox) # [N, 3, h, w, 1] 左上角+右下角xy坐标,除以图片宽高进行归一化。 pbox = torch.cat(pbox, -1) # [N, 3, h, w, 4] 左上角+右下角xy坐标,除以图片宽高进行归一化。 b = pbox.shape[0] pbox = pbox.reshape((b, -1, 4)) # [N, 3*h*w, 4] 左上角+右下角xy坐标,除以图片宽高进行归一化。 # gbox gxy = gbox[:, :, 0:2] - gbox[:, :, 2:4] * 0.5 gwh = gbox[:, :, 0:2] + gbox[:, :, 2:4] * 0.5 gbox = torch.cat([gxy, gwh], -1) # [N, 50, 4] 所有gt的左上角+右下角xy坐标,除以图片宽高进行归一化。 iou = bboxes_iou_batch(pbox, gbox, xyxy=True) # [N, 3*h*w, 50] 每张图片 每个预测框和每个gt两两之间的iou iou_max, _ = iou.max(2) # [N, 3*h*w] 预测框与所有gt最高的iou iou_mask = (iou_max <= self.ignore_thresh).to(pbox) # [N, 3*h*w] 候选负样本处为1 iou_mask.requires_grad = False pobj = pobj.reshape((b, -1)) # [N, 3*h*w] tobj = tobj.reshape((b, -1)) # [N, 3*h*w] obj_mask = (tobj > 0).to(pbox) # [N, 3*h*w] 正样本处为1 obj_mask.requires_grad = False loss_obj = F.binary_cross_entropy_with_logits(pobj, obj_mask, reduction='none') loss_obj_pos = (loss_obj * tobj) loss_obj_neg = (loss_obj * (1 - obj_mask) * iou_mask) # 候选负样本中,不是正样本的才是最终的负样本。 return loss_obj_pos + loss_obj_neg def cls_loss(self, pcls, tcls): # pcls [N, 3, h, w, 80] 预测的未激活的pcls # tcls [N, 3, h, w, 80] 真实的tcls if self.label_smooth: delta = min(1. / self.num_classes, 1. / 40) pos, neg = 1 - delta, delta # 1 for positive, 0 for negative # 修改监督值tcls tcls = pos * (tcls > 0.).to(tcls) + neg * (tcls <= 0.).to(tcls) loss_cls = F.binary_cross_entropy_with_logits(pcls, tcls, reduction='none') return loss_cls def yolov3_loss(self, p, t, gt_box, anchor, downsample, scale=1., eps=1e-10): na = len(anchor) b, c, h, w = p.shape if self.iou_aware_loss: ioup, p = p[:, 0:na, :, :], p[:, na:, :, :] ioup = ioup.unsqueeze(-1) p = p.reshape((b, na, -1, h, w)) # [N, 3, 85, h, w] p = p.permute(0, 1, 3, 4, 2) # [N, 3, h, w, 85] x, y = p[:, :, :, :, 0:1], p[:, :, :, :, 1:2] # [N, 3, h, w, 1]、[N, 3, h, w, 1] 预测的未解码的x, y w, h = p[:, :, :, :, 2:3], p[:, :, :, :, 3:4] # [N, 3, h, w, 1]、[N, 3, h, w, 1] 预测的未解码的w, h obj, pcls = p[:, :, :, :, 4:5], p[:, :, :, :, 5:] # [N, 3, h, w, 1]、[N, 3, h, w, 80] 预测的未激活的obj, pcls self.distill_pairs.append([x, y, w, h, obj, pcls]) t = t.permute(0, 1, 3, 4, 2) # [N, 3, h, w, 86] tx, ty = t[:, :, :, :, 0:1], t[:, :, :, :, 1:2] # [N, 3, h, w, 1]、[N, 3, h, w, 1] 真实的已Grid Sensitive解码的x, y,0到1之间的值 tw, th = t[:, :, :, :, 2:3], t[:, :, :, :, 3:4] # [N, 3, h, w, 1]、[N, 3, h, w, 1] 真实的未解码的w, h tscale = t[:, :, :, :, 4:5] # [N, 3, h, w, 1] tobj, tcls = t[:, :, :, :, 5:6], t[:, :, :, :, 6:] # [N, 3, h, w, 1]、[N, 3, h, w, 80] 真实的tobj, tcls tscale_obj = tscale * tobj # [N, 3, h, w, 1] loss = dict() # 对x、y进行Grid Sensitive解码 x = scale * torch.sigmoid(x) - 0.5 * (scale - 1.) y = scale * torch.sigmoid(y) - 0.5 * (scale - 1.) if abs(scale - 1.) < eps: # 当不使用Grid Sensitive时 # tx是0到1之间的值,x是sigmoid()激活后的x,所以不要使用带有logits字样的api计算损失。 loss_x = F.binary_cross_entropy(x, tx, reduction='none') loss_y = F.binary_cross_entropy(y, ty, reduction='none') loss_xy = tscale_obj * (loss_x + loss_y) else: # Grid Sensitive loss_x = torch.abs(x - tx) loss_y = torch.abs(y - ty) loss_xy = tscale_obj * (loss_x + loss_y) loss_xy = loss_xy.sum([1, 2, 3, 4]).mean() loss_w = torch.abs(w - tw) loss_h = torch.abs(h - th) loss_wh = tscale_obj * (loss_w + loss_h) loss_wh = loss_wh.sum([1, 2, 3, 4]).mean() loss['loss_xy'] = loss_xy loss['loss_wh'] = loss_wh if self.iou_loss is not None: # warn: do not modify x, y, w, h in place # 警告:不要把x, y, w, h改掉。其中x、y已经进行Grid Sensitive解码,约0到1之间的值 box, tbox = [x, y, w, h], [tx, ty, tw, th] pbox = bbox_transform(box, anchor, downsample) gbox = bbox_transform(tbox, anchor, downsample) loss_iou = self.iou_loss(pbox, gbox) loss_iou = loss_iou * tscale_obj loss_iou = loss_iou.sum([1, 2, 3, 4]).mean() loss['loss_iou'] = loss_iou if self.iou_aware_loss is not None: # warn: do not modify x, y, w, h in place # 警告:不要把x, y, w, h改掉。其中x、y已经进行Grid Sensitive解码,约0到1之间的值 box, tbox = [x, y, w, h], [tx, ty, tw, th] pbox = bbox_transform(box, anchor, downsample) gbox = bbox_transform(tbox, anchor, downsample) loss_iou_aware = self.iou_aware_loss(ioup, pbox, gbox) loss_iou_aware = loss_iou_aware * tobj loss_iou_aware = loss_iou_aware.sum([1, 2, 3, 4]).mean() loss['loss_iou_aware'] = loss_iou_aware box = [x, y, w, h] loss_obj = self.obj_loss(box, gt_box, obj, tobj, anchor, downsample) loss_obj = loss_obj.sum(-1).mean() loss['loss_obj'] = loss_obj loss_cls = self.cls_loss(pcls, tcls) * tobj loss_cls = loss_cls.sum([1, 2, 3, 4]).mean() loss['loss_cls'] = loss_cls return loss def forward(self, inputs, gt_bbox, gt_targets, anchors): yolo_losses = dict() self.distill_pairs.clear() for x, t, anchor, downsample in zip(inputs, gt_targets, anchors, self.downsample): yolo_loss = self.yolov3_loss(x, t, gt_bbox, anchor, downsample, self.scale_x_y) for k, v in yolo_loss.items(): if k in yolo_losses: yolo_losses[k] += v else: yolo_losses[k] = v loss = 0 for k, v in yolo_losses.items(): loss += v yolo_losses['total_loss'] = loss return yolo_losses ``` #### File: mmdet/models/matrix_nms.py ```python import torch # 相交矩形的面积 def intersect(box_a, box_b): """计算两组矩形两两之间相交区域的面积 Args: box_a: (tensor) bounding boxes, Shape: [A, 4]. box_b: (tensor) bounding boxes, Shape: [B, 4]. Return: (tensor) intersection area, Shape: [A, B]. """ A = box_a.size(0) B = box_b.size(0) max_xy = torch.min(box_a[:, 2:].unsqueeze(1).expand(A, B, 2), box_b[:, 2:].unsqueeze(0).expand(A, B, 2)) min_xy = torch.max(box_a[:, :2].unsqueeze(1).expand(A, B, 2), box_b[:, :2].unsqueeze(0).expand(A, B, 2)) inter = torch.clamp((max_xy - min_xy), min=0) return inter[:, :, 0] * inter[:, :, 1] def jaccard(box_a, box_b): """计算两组矩形两两之间的iou Args: box_a: (tensor) bounding boxes, Shape: [A, 4]. box_b: (tensor) bounding boxes, Shape: [B, 4]. Return: ious: (tensor) Shape: [A, B] """ inter = intersect(box_a, box_b) area_a = ((box_a[:, 2]-box_a[:, 0]) * (box_a[:, 3]-box_a[:, 1])).unsqueeze(1).expand_as(inter) # [A, B] area_b = ((box_b[:, 2]-box_b[:, 0]) * (box_b[:, 3]-box_b[:, 1])).unsqueeze(0).expand_as(inter) # [A, B] union = area_a + area_b - inter return inter / union # [A, B] def _matrix_nms(bboxes, cate_labels, cate_scores, kernel='gaussian', sigma=2.0): """Matrix NMS for multi-class bboxes. Args: bboxes (Tensor): shape (n, 4) cate_labels (Tensor): shape (n), mask labels in descending order cate_scores (Tensor): shape (n), mask scores in descending order kernel (str): 'linear' or 'gaussian' sigma (float): std in gaussian method Returns: Tensor: cate_scores_update, tensors of shape (n) """ n_samples = len(cate_labels) if n_samples == 0: return [] # 计算一个n×n的IOU矩阵,两组矩形两两之间的IOU iou_matrix = jaccard(bboxes, bboxes) # shape: [n_samples, n_samples] iou_matrix = iou_matrix.triu(diagonal=1) # 只取上三角部分 # label_specific matrix. cate_labels_x = cate_labels.expand(n_samples, n_samples) # shape: [n_samples, n_samples] # 第i行第j列表示的是第i个预测框和第j个预测框的类别id是否相同。我们抑制的是同类的预测框。 label_matrix = (cate_labels_x == cate_labels_x.transpose(1, 0)).float().triu(diagonal=1) # shape: [n_samples, n_samples] # IoU compensation # 非同类的iou置为0,同类的iou保留。逐列取最大iou compensate_iou, _ = (iou_matrix * label_matrix).max(0) # shape: [n_samples, ] compensate_iou = compensate_iou.expand(n_samples, n_samples).transpose(1, 0) # shape: [n_samples, n_samples] # IoU decay # 非同类的iou置为0,同类的iou保留。 decay_iou = iou_matrix * label_matrix # shape: [n_samples, n_samples] # matrix nms if kernel == 'gaussian': decay_matrix = torch.exp(-1 * sigma * (decay_iou ** 2)) compensate_matrix = torch.exp(-1 * sigma * (compensate_iou ** 2)) decay_coefficient, _ = (decay_matrix / compensate_matrix).min(0) elif kernel == 'linear': decay_matrix = (1-decay_iou)/(1-compensate_iou) decay_coefficient, _ = decay_matrix.min(0) else: raise NotImplementedError # 更新分数 cate_scores_update = cate_scores * decay_coefficient return cate_scores_update def matrix_nms(bboxes, scores, score_threshold, post_threshold, nms_top_k, keep_top_k, use_gaussian=False, gaussian_sigma=2.): inds = (scores > score_threshold) cate_scores = scores[inds] if len(cate_scores) == 0: return torch.zeros((1, 6), device=bboxes.device) - 1.0 inds = inds.nonzero() cate_labels = inds[:, 1] bboxes = bboxes[inds[:, 0]] # sort and keep top nms_top_k sort_inds = torch.argsort(cate_scores, descending=True) if nms_top_k > 0 and len(sort_inds) > nms_top_k: sort_inds = sort_inds[:nms_top_k] bboxes = bboxes[sort_inds, :] cate_scores = cate_scores[sort_inds] cate_labels = cate_labels[sort_inds] # Matrix NMS kernel = 'gaussian' if use_gaussian else 'linear' cate_scores = _matrix_nms(bboxes, cate_labels, cate_scores, kernel=kernel, sigma=gaussian_sigma) # filter. keep = cate_scores >= post_threshold if keep.sum() == 0: return torch.zeros((1, 6), device=bboxes.device) - 1.0 bboxes = bboxes[keep, :] cate_scores = cate_scores[keep] cate_labels = cate_labels[keep] # sort and keep keep_top_k sort_inds = torch.argsort(cate_scores, descending=True) if len(sort_inds) > keep_top_k: sort_inds = sort_inds[:keep_top_k] bboxes = bboxes[sort_inds, :] cate_scores = cate_scores[sort_inds] cate_labels = cate_labels[sort_inds] cate_scores = cate_scores.unsqueeze(1) cate_labels = cate_labels.unsqueeze(1).float() pred = torch.cat([cate_labels, cate_scores, bboxes], 1) return pred ``` #### File: models/necks/custom_pan.py ```python import torch import torch.nn as nn import torch.nn.functional as F from mmdet.models.backbones.cspresnet import ConvBNLayer, BasicBlock from mmdet.models.custom_layers import DropBlock from mmdet.models.ops import get_act_fn from mmdet.models.custom_layers import ShapeSpec __all__ = ['CustomCSPPAN'] class SPP(nn.Module): def __init__(self, ch_in, ch_out, k, pool_size, act='swish', data_format='NCHW'): super(SPP, self).__init__() self.pool = [] self.data_format = data_format for i, size in enumerate(pool_size): name = 'pool{}'.format(i) pool = nn.MaxPool2d( kernel_size=size, stride=1, padding=size // 2, ceil_mode=False) self.add_module(name, pool) self.pool.append(pool) self.conv = ConvBNLayer(ch_in, ch_out, k, padding=k // 2, act=act) def forward(self, x): outs = [x] for pool in self.pool: outs.append(pool(x)) if self.data_format == 'NCHW': y = torch.cat(outs, 1) else: y = torch.cat(outs, -1) y = self.conv(y) return y def add_param_group(self, param_groups, base_lr, base_wd, need_clip, clip_norm): self.conv.add_param_group(param_groups, base_lr, base_wd, need_clip, clip_norm) class CSPStage(nn.Module): def __init__(self, block_fn, ch_in, ch_out, n, act='swish', spp=False): super(CSPStage, self).__init__() ch_mid = int(ch_out // 2) self.conv1 = ConvBNLayer(ch_in, ch_mid, 1, act=act) self.conv2 = ConvBNLayer(ch_in, ch_mid, 1, act=act) self.convs = nn.Sequential() next_ch_in = ch_mid for i in range(n): self.convs.add_module( str(i), eval(block_fn)(next_ch_in, ch_mid, act=act, shortcut=False)) if i == (n - 1) // 2 and spp: self.convs.add_module( 'spp', SPP(ch_mid * 4, ch_mid, 1, [5, 9, 13], act=act)) next_ch_in = ch_mid self.conv3 = ConvBNLayer(ch_mid * 2, ch_out, 1, act=act) def forward(self, x): y1 = self.conv1(x) y2 = self.conv2(x) y2 = self.convs(y2) y = torch.cat([y1, y2], 1) y = self.conv3(y) return y def add_param_group(self, param_groups, base_lr, base_wd, need_clip, clip_norm): self.conv1.add_param_group(param_groups, base_lr, base_wd, need_clip, clip_norm) self.conv2.add_param_group(param_groups, base_lr, base_wd, need_clip, clip_norm) for layer in self.convs: layer.add_param_group(param_groups, base_lr, base_wd, need_clip, clip_norm) self.conv3.add_param_group(param_groups, base_lr, base_wd, need_clip, clip_norm) class CustomCSPPAN(nn.Module): __shared__ = ['norm_type', 'data_format', 'width_mult', 'depth_mult', 'trt'] def __init__(self, in_channels=[256, 512, 1024], out_channels=[1024, 512, 256], norm_type='bn', act='leaky', stage_fn='CSPStage', block_fn='BasicBlock', stage_num=1, block_num=3, drop_block=False, block_size=3, keep_prob=0.9, spp=False, data_format='NCHW', width_mult=1.0, depth_mult=1.0, trt=False): super(CustomCSPPAN, self).__init__() out_channels = [max(round(c * width_mult), 1) for c in out_channels] block_num = max(round(block_num * depth_mult), 1) act = get_act_fn( act, trt=trt) if act is None or isinstance(act, (str, dict)) else act self.num_blocks = len(in_channels) self.data_format = data_format self._out_channels = out_channels in_channels = in_channels[::-1] fpn_stages = [] fpn_routes = [] for i, (ch_in, ch_out) in enumerate(zip(in_channels, out_channels)): if i > 0: ch_in += ch_pre // 2 stage = nn.Sequential() for j in range(stage_num): stage.add_module( str(j), eval(stage_fn)(block_fn, ch_in if j == 0 else ch_out, ch_out, block_num, act=act, spp=(spp and i == 0))) if drop_block: stage.add_module('drop', DropBlock(block_size, keep_prob)) fpn_stages.append(stage) if i < self.num_blocks - 1: fpn_routes.append( ConvBNLayer( ch_in=ch_out, ch_out=ch_out // 2, filter_size=1, stride=1, padding=0, act=act)) ch_pre = ch_out self.fpn_stages = nn.ModuleList(fpn_stages) self.fpn_routes = nn.ModuleList(fpn_routes) pan_stages = [] pan_routes = [] for i in reversed(range(self.num_blocks - 1)): pan_routes.append( ConvBNLayer( ch_in=out_channels[i + 1], ch_out=out_channels[i + 1], filter_size=3, stride=2, padding=1, act=act)) ch_in = out_channels[i] + out_channels[i + 1] ch_out = out_channels[i] stage = nn.Sequential() for j in range(stage_num): stage.add_module( str(j), eval(stage_fn)(block_fn, ch_in if j == 0 else ch_out, ch_out, block_num, act=act, spp=False)) if drop_block: stage.add_module('drop', DropBlock(block_size, keep_prob)) pan_stages.append(stage) self.pan_stages = nn.ModuleList(pan_stages[::-1]) self.pan_routes = nn.ModuleList(pan_routes[::-1]) def forward(self, blocks, for_mot=False): blocks = blocks[::-1] fpn_feats = [] for i, block in enumerate(blocks): if i > 0: block = torch.cat([route, block], 1) route = self.fpn_stages[i](block) fpn_feats.append(route) if i < self.num_blocks - 1: route = self.fpn_routes[i](route) route = F.interpolate(route, scale_factor=2.) pan_feats = [fpn_feats[-1], ] route = fpn_feats[-1] for i in reversed(range(self.num_blocks - 1)): block = fpn_feats[i] route = self.pan_routes[i](route) block = torch.cat([route, block], 1) route = self.pan_stages[i](block) pan_feats.append(route) return pan_feats[::-1] def add_param_group(self, param_groups, base_lr, base_wd, need_clip, clip_norm): for i in range(self.num_blocks): for layer in self.fpn_stages[i]: layer.add_param_group(param_groups, base_lr, base_wd, need_clip, clip_norm) if i < self.num_blocks - 1: self.fpn_routes[i].add_param_group(param_groups, base_lr, base_wd, need_clip, clip_norm) for i in reversed(range(self.num_blocks - 1)): self.pan_routes[i].add_param_group(param_groups, base_lr, base_wd, need_clip, clip_norm) for layer in self.pan_stages[i]: layer.add_param_group(param_groups, base_lr, base_wd, need_clip, clip_norm) @classmethod def from_config(cls, cfg, input_shape): return {'in_channels': [i.channels for i in input_shape], } @property def out_shape(self): return [ShapeSpec(channels=c) for c in self._out_channels] ``` #### File: models/necks/fpn.py ```python import torch import copy import torch.nn.functional as F from mmdet.models.custom_layers import Conv2dUnit class FPN(torch.nn.Module): def __init__(self, in_channels=[2048, 1024, 512, 256], num_chan=256, min_level=2, max_level=6, spatial_scale=[1. / 32., 1. / 16., 1. / 8., 1. / 4.], has_extra_convs=False, norm_type=None, norm_decay=0., freeze_norm=False, use_c5=True, relu_before_extra_convs=False, reverse_out=False): super(FPN, self).__init__() self.in_channels = in_channels self.freeze_norm = freeze_norm self.num_chan = num_chan self.min_level = min_level self.max_level = max_level self.spatial_scale = spatial_scale self.has_extra_convs = has_extra_convs self.norm_type = norm_type self.norm_decay = norm_decay self.use_c5 = use_c5 self.relu_before_extra_convs = relu_before_extra_convs self.reverse_out = reverse_out self.num_backbone_stages = len(in_channels) # 进入FPN的张量个数 self.fpn_inner_convs = torch.nn.ModuleList() # 骨干网络的张量s32, s16, s8, ...使用的卷积 self.fpn_convs = torch.nn.ModuleList() # fs32, fs16, fs8, ...使用的卷积 # fpn_inner_convs for i in range(0, self.num_backbone_stages): cname = 'fpn_inner_res%d_sum_lateral' % (5 - i, ) if i == 0: cname = 'fpn_inner_res%d_sum' % (5 - i, ) use_bias = True if norm_type is None else False conv = Conv2dUnit(in_channels[i], self.num_chan, 1, stride=1, bias_attr=use_bias, norm_type=norm_type, bias_lr=2.0, act=None, freeze_norm=self.freeze_norm, norm_decay=self.norm_decay, name=cname) self.fpn_inner_convs.append(conv) # fpn_convs for i in range(0, self.num_backbone_stages): use_bias = True if norm_type is None else False conv = Conv2dUnit(self.num_chan, self.num_chan, 3, stride=1, bias_attr=use_bias, norm_type=norm_type, bias_lr=2.0, act=None, freeze_norm=self.freeze_norm, norm_decay=self.norm_decay, name='fpn_res%d_sum' % (5 - i, )) self.fpn_convs.append(conv) # 生成其它尺度的特征图时如果用的是池化层 self.pool = torch.nn.MaxPool2d(kernel_size=1, stride=2, padding=0) # 生成其它尺度的特征图时如果用的是卷积层 self.extra_convs = None highest_backbone_level = self.min_level + len(spatial_scale) - 1 if self.has_extra_convs and self.max_level > highest_backbone_level: self.extra_convs = torch.nn.ModuleList() if self.use_c5: in_c = in_channels[0] fan = in_c * 3 * 3 else: in_c = self.num_chan fan = in_c * 3 * 3 for i in range(highest_backbone_level + 1, self.max_level + 1): use_bias = True if norm_type is None else False conv = Conv2dUnit(in_c, self.num_chan, 3, stride=2, bias_attr=use_bias, norm_type=norm_type, bias_lr=2.0, act=None, freeze_norm=self.freeze_norm, norm_decay=self.norm_decay, name='fpn_%d' % (i, )) self.extra_convs.append(conv) in_c = self.num_chan self.upsample = torch.nn.Upsample(scale_factor=2, mode='nearest') def add_param_group(self, param_groups, base_lr, base_wd, need_clip, clip_norm): for i in range(0, self.num_backbone_stages): self.fpn_inner_convs[i].add_param_group(param_groups, base_lr, base_wd, need_clip, clip_norm) self.fpn_convs[i].add_param_group(param_groups, base_lr, base_wd, need_clip, clip_norm) # 生成其它尺度的特征图时如果用的是卷积层 highest_backbone_level = self.min_level + len(self.spatial_scale) - 1 if self.has_extra_convs and self.max_level > highest_backbone_level: j = 0 for i in range(highest_backbone_level + 1, self.max_level + 1): self.extra_convs[j].add_param_group(param_groups, base_lr, base_wd, need_clip, clip_norm) j += 1 def forward(self, body_feats): ''' 一个示例 :param body_feats: [s8, s16, s32] :return: bs32 | 卷积 | bs16 [fs32] | | 卷积 上采样 | | lateral topdown \ / 相加 | bs8 [fs16] | | 卷积 上采样 | | lateral topdown \ / 相加 | [fs8] fpn_inner_output = [fs32, fs16, fs8] 然后 fs32, fs16, fs8 分别再接一个卷积得到 p5, p4, p3 ; p5 接一个卷积得到 p6, p6 接一个卷积得到 p7。 ''' spatial_scale = copy.deepcopy(self.spatial_scale) num_backbone_stages = self.num_backbone_stages # 进入FPN的张量个数 body_feats = body_feats[-1:-num_backbone_stages - 1:-1] # 倒序。 [s32, s16, s8, ...] fpn_inner_output = [None] * num_backbone_stages fpn_inner_output[0] = self.fpn_inner_convs[0](body_feats[0]) for i in range(1, num_backbone_stages): body_input = body_feats[i] top_output = fpn_inner_output[i - 1] fpn_inner_single = self._add_topdown_lateral(i, body_input, top_output) fpn_inner_output[i] = fpn_inner_single fpn_output = [None] * num_backbone_stages for i in range(num_backbone_stages): fpn_output[i] = self.fpn_convs[i](fpn_inner_output[i]) # 生成其它尺度的特征图时如果用的是池化层 if not self.has_extra_convs and self.max_level - self.min_level == len(spatial_scale): body_top_extension = self.pool(fpn_output[0]) fpn_output.insert(0, body_top_extension) spatial_scale.insert(0, spatial_scale[0] * 0.5) # 生成其它尺度的特征图时如果用的是卷积层 highest_backbone_level = self.min_level + len(spatial_scale) - 1 if self.has_extra_convs and self.max_level > highest_backbone_level: if self.use_c5: fpn_blob = body_feats[0] else: fpn_blob = fpn_output[0] for i in range(highest_backbone_level + 1, self.max_level + 1): fpn_blob_in = fpn_blob if i > highest_backbone_level + 1 and self.relu_before_extra_convs: fpn_blob_in = torch.relu(fpn_blob) fpn_blob = self.extra_convs[i - highest_backbone_level - 1](fpn_blob_in) fpn_output.insert(0, fpn_blob) spatial_scale.insert(0, spatial_scale[0] * 0.5) if self.reverse_out: fpn_output = fpn_output[::-1] # 倒序。 return fpn_output, spatial_scale def _add_topdown_lateral(self, i, body_input, upper_output): lateral = self.fpn_inner_convs[i](body_input) if body_input.shape[2] == -1 and body_input.shape[3] == -1: topdown = self.upsample(upper_output) else: topdown = F.interpolate(upper_output, size=(body_input.shape[2], body_input.shape[3]), mode='nearest') return lateral + topdown ``` #### File: miemiedetection/test_grad/test2_51_ResNet_grad_2pytorch.py ```python import pickle import six import torch from mmdet.models import ResNet depth = 50 variant = 'd' return_idx = [1, 2, 3] dcn_v2_stages = [-1] freeze_at = -1 freeze_norm = False norm_decay = 0. depth = 50 variant = 'd' return_idx = [1, 2, 3] dcn_v2_stages = [-1] freeze_at = 2 freeze_norm = False norm_decay = 0. model = ResNet(depth=depth, variant=variant, return_idx=return_idx, dcn_v2_stages=dcn_v2_stages, freeze_at=freeze_at, freeze_norm=freeze_norm, norm_decay=norm_decay) model.train() model_std = model.state_dict() def copy(name, w, std): if isinstance(w, dict): print() value2 = torch.Tensor(w) value = std[name] value.copy_(value2) std[name] = value ckpt_file = '51_00.pdparams' save_name = '51_00.pth' with open(ckpt_file, 'rb') as f: model_dic = pickle.load(f) if six.PY2 else pickle.load(f, encoding='latin1') for key in model_dic.keys(): name2 = key w = model_dic[key] if 'StructuredToParameterName@@' in key: continue else: if '._mean' in key: name2 = name2.replace('._mean', '.running_mean') if '._variance' in key: name2 = name2.replace('._variance', '.running_var') copy(name2, w, model_std) model.load_state_dict(model_std) torch.save(model_std, save_name) print(torch.__version__) ckpt_file = '51_08.pdparams' save_name = '51_08_paddle.pth' with open(ckpt_file, 'rb') as f: model_dic = pickle.load(f) if six.PY2 else pickle.load(f, encoding='latin1') for key in model_dic.keys(): name2 = key w = model_dic[key] if 'StructuredToParameterName@@' in key: continue else: if '._mean' in key: name2 = name2.replace('._mean', '.running_mean') if '._variance' in key: name2 = name2.replace('._variance', '.running_var') copy(name2, w, model_std) model.load_state_dict(model_std) torch.save(model_std, save_name) print(torch.__version__) ``` #### File: miemiedetection/test_grad/test2_53_BottleNeck_grad_2pytorch.py ```python import pickle import six import torch from mmdet.models import BottleNeck ch_in = 64 ch_out = 64 stride = 1 shortcut = False variant = 'd' groups = 1 base_width = 64 lr = 1.0 norm_type = 'bn' norm_decay = 0.0 freeze_norm = False dcn_v2 = False std_senet = False ch_in = 256 ch_out = 64 stride = 1 shortcut = True variant = 'd' groups = 1 base_width = 64 lr = 1.0 norm_type = 'bn' norm_decay = 0.0 freeze_norm = False dcn_v2 = False std_senet = False ch_in = 256 ch_out = 64 stride = 1 shortcut = True variant = 'd' groups = 1 base_width = 64 lr = 1.0 norm_type = 'bn' norm_decay = 0.0 freeze_norm = False dcn_v2 = False std_senet = False ch_in = 256 ch_out = 128 stride = 2 shortcut = False variant = 'd' groups = 1 base_width = 64 lr = 1.0 norm_type = 'bn' norm_decay = 0.0 freeze_norm = False dcn_v2 = False std_senet = False ch_in = 512 ch_out = 128 stride = 1 shortcut = True variant = 'd' groups = 1 base_width = 64 lr = 1.0 norm_type = 'bn' norm_decay = 0.0 freeze_norm = False dcn_v2 = False std_senet = False ch_in = 512 ch_out = 128 stride = 1 shortcut = True variant = 'd' groups = 1 base_width = 64 lr = 1.0 norm_type = 'bn' norm_decay = 0.0 freeze_norm = False dcn_v2 = False std_senet = False ch_in = 512 ch_out = 128 stride = 1 shortcut = True variant = 'd' groups = 1 base_width = 64 lr = 1.0 norm_type = 'bn' norm_decay = 0.0 freeze_norm = False dcn_v2 = False std_senet = False ch_in = 512 ch_out = 256 stride = 2 shortcut = False variant = 'd' groups = 1 base_width = 64 lr = 1.0 norm_type = 'bn' norm_decay = 0.0 freeze_norm = False dcn_v2 = False std_senet = False ch_in = 1024 ch_out = 256 stride = 1 shortcut = True variant = 'd' groups = 1 base_width = 64 lr = 1.0 norm_type = 'bn' norm_decay = 0.0 freeze_norm = False dcn_v2 = False std_senet = False ch_in = 1024 ch_out = 256 stride = 1 shortcut = True variant = 'd' groups = 1 base_width = 64 lr = 1.0 norm_type = 'bn' norm_decay = 0.0 freeze_norm = False dcn_v2 = False std_senet = False ch_in = 1024 ch_out = 256 stride = 1 shortcut = True variant = 'd' groups = 1 base_width = 64 lr = 1.0 norm_type = 'bn' norm_decay = 0.0 freeze_norm = False dcn_v2 = False std_senet = False ch_in = 1024 ch_out = 256 stride = 1 shortcut = True variant = 'd' groups = 1 base_width = 64 lr = 1.0 norm_type = 'bn' norm_decay = 0.0 freeze_norm = False dcn_v2 = False std_senet = False ch_in = 1024 ch_out = 256 stride = 1 shortcut = True variant = 'd' groups = 1 base_width = 64 lr = 1.0 norm_type = 'bn' norm_decay = 0.0 freeze_norm = False dcn_v2 = False std_senet = False ch_in = 1024 ch_out = 512 stride = 2 shortcut = False variant = 'd' groups = 1 base_width = 64 lr = 1.0 norm_type = 'bn' norm_decay = 0.0 freeze_norm = False dcn_v2 = False std_senet = False ch_in = 2048 ch_out = 512 stride = 1 shortcut = True variant = 'd' groups = 1 base_width = 64 lr = 1.0 norm_type = 'bn' norm_decay = 0.0 freeze_norm = False dcn_v2 = False std_senet = False ch_in = 2048 ch_out = 512 stride = 1 shortcut = True variant = 'd' groups = 1 base_width = 64 lr = 0.5 norm_type = 'bn' norm_decay = 0.0 freeze_norm = False dcn_v2 = False std_senet = False model = BottleNeck(ch_in=ch_in, ch_out=ch_out, stride=stride, shortcut=shortcut, variant=variant, groups=groups, base_width=base_width, lr=lr, norm_type=norm_type, norm_decay=norm_decay, freeze_norm=freeze_norm, dcn_v2=dcn_v2, std_senet=std_senet, ) model.train() model_std = model.state_dict() def copy(name, w, std): if isinstance(w, dict): print() value2 = torch.Tensor(w) value = std[name] value.copy_(value2) std[name] = value ckpt_file = '53_00.pdparams' save_name = '53_00.pth' with open(ckpt_file, 'rb') as f: model_dic = pickle.load(f) if six.PY2 else pickle.load(f, encoding='latin1') for key in model_dic.keys(): name2 = key w = model_dic[key] if 'StructuredToParameterName@@' in key: continue else: if '._mean' in key: name2 = name2.replace('._mean', '.running_mean') if '._variance' in key: name2 = name2.replace('._variance', '.running_var') copy(name2, w, model_std) model.load_state_dict(model_std) torch.save(model_std, save_name) print(torch.__version__) ckpt_file = '53_08.pdparams' save_name = '53_08_paddle.pth' with open(ckpt_file, 'rb') as f: model_dic = pickle.load(f) if six.PY2 else pickle.load(f, encoding='latin1') for key in model_dic.keys(): name2 = key w = model_dic[key] if 'StructuredToParameterName@@' in key: continue else: if '._mean' in key: name2 = name2.replace('._mean', '.running_mean') if '._variance' in key: name2 = name2.replace('._variance', '.running_var') copy(name2, w, model_std) model.load_state_dict(model_std) torch.save(model_std, save_name) print(torch.__version__) ``` #### File: miemiedetection/tools/convert_weights.py ```python import argparse import os import time from loguru import logger import cv2 import torch # import paddle.fluid as fluid import pickle import six # add python path of this repo to sys.path import sys parent_path = os.path.abspath(os.path.join(__file__, *(['..'] * 2))) sys.path.insert(0, parent_path) from mmdet.exp import get_exp from mmdet.utils import fuse_model, get_model_info, postprocess, vis, get_classes from mmdet.models import * from mmdet.models.custom_layers import * from mmdet.models.necks.yolo_fpn import PPYOLOFPN, PPYOLOPAN def make_parser(): parser = argparse.ArgumentParser("MieMieDetection convert weights") parser.add_argument("-expn", "--experiment-name", type=str, default=None) parser.add_argument("-n", "--name", type=str, default=None, help="model name") # exp file parser.add_argument( "-f", "--exp_file", default=None, type=str, help="pls input your experiment description file", ) parser.add_argument("-c", "--ckpt", default=None, type=str, help="checkpoint") parser.add_argument("-oc", "--output_ckpt", default=None, type=str, help="output checkpoint") parser.add_argument("-nc", "--num_classes", default=80, type=int, help="dataset num_classes") parser.add_argument( "--only_backbone", default=False, type=bool, help="only convert backbone", ) parser.add_argument( "--device", default="cpu", type=str, help="device to run our model, can either be cpu or gpu", ) return parser def copy_conv_bn(conv_unit, w, scale, offset, m, v, use_gpu): if use_gpu: conv_unit.conv.weight.data = torch.Tensor(w).cuda() conv_unit.bn.weight.data = torch.Tensor(scale).cuda() conv_unit.bn.bias.data = torch.Tensor(offset).cuda() conv_unit.bn.running_mean.data = torch.Tensor(m).cuda() conv_unit.bn.running_var.data = torch.Tensor(v).cuda() else: conv_unit.conv.weight.data = torch.Tensor(w) conv_unit.bn.weight.data = torch.Tensor(scale) conv_unit.bn.bias.data = torch.Tensor(offset) conv_unit.bn.running_mean.data = torch.Tensor(m) conv_unit.bn.running_var.data = torch.Tensor(v) def copy_conv_gn(conv_unit, w, b, scale, offset, use_gpu): if use_gpu: conv_unit.conv.weight.data = torch.Tensor(w).cuda() conv_unit.conv.bias.data = torch.Tensor(b).cuda() conv_unit.gn.weight.data = torch.Tensor(scale).cuda() conv_unit.gn.bias.data = torch.Tensor(offset).cuda() else: conv_unit.conv.weight.data = torch.Tensor(w) conv_unit.conv.bias.data = torch.Tensor(b) conv_unit.gn.weight.data = torch.Tensor(scale) conv_unit.gn.bias.data = torch.Tensor(offset) def copy_conv_af(conv_unit, w, scale, offset, use_gpu): if use_gpu: conv_unit.conv.weight.data = torch.Tensor(w).cuda() conv_unit.af.weight.data = torch.Tensor(scale).cuda() conv_unit.af.bias.data = torch.Tensor(offset).cuda() else: conv_unit.conv.weight.data = torch.Tensor(w) conv_unit.af.weight.data = torch.Tensor(scale) conv_unit.af.bias.data = torch.Tensor(offset) def copy_conv(conv_layer, w, b, use_gpu): if use_gpu: conv_layer.weight.data = torch.Tensor(w).cuda() conv_layer.bias.data = torch.Tensor(b).cuda() else: conv_layer.weight.data = torch.Tensor(w) conv_layer.bias.data = torch.Tensor(b) def copy(name, w, std): value2 = torch.Tensor(w) value = std[name] value.copy_(value2) std[name] = value def main(exp, args): if not args.experiment_name: args.experiment_name = exp.exp_name logger.info("Args: {}".format(args)) # 强制改配置文件中的类别数为args.num_classes exp.num_classes = args.num_classes if getattr(exp, "head", None) is not None: if 'num_classes' in exp.head.keys(): exp.head['num_classes'] = args.num_classes # 这些预训练骨干网络没有使用DCNv2 no_dcnv2_backbones = ['ResNet50_vd_ssld_pretrained.pdparams', 'ResNet101_vd_ssld_pretrained.pdparams'] if args.only_backbone and args.ckpt in no_dcnv2_backbones: exp.backbone['dcn_v2_stages'] = [-1] model = exp.get_model() # 算法名字 model_class_name = model.__class__.__name__ # logger.info("Model Summary: {}".format(get_model_info(model_class_name, model, exp.test_size))) use_gpu = False if args.device == "gpu": model.cuda() use_gpu = True model.eval() model_std = model.state_dict() # 新增算法时这里也要增加elif if model_class_name == 'YOLOX': pass elif model_class_name == 'PPYOLO': with open(args.ckpt, 'rb') as f: state_dict = pickle.load(f) if six.PY2 else pickle.load(f, encoding='latin1') # state_dict = fluid.io.load_program_state(args.ckpt) backbone_dic = {} fpn_dic = {} head_dic = {} others = {} for key, value in state_dict.items(): if 'tracked' in key: continue if 'backbone' in key: backbone_dic[key] = value elif 'neck' in key: fpn_dic[key] = value elif 'head' in key: head_dic[key] = value else: others[key] = value backbone_dic2 = {} fpn_dic2 = {} head_dic2 = {} others2 = {} for key, value in model_std.items(): if 'tracked' in key: continue if 'backbone' in key: backbone_dic2[key] = value elif 'neck' in key: fpn_dic2[key] = value elif 'head' in key: head_dic2[key] = value else: others2[key] = value backbone = model.backbone fpn = model.neck head = model.yolo_head for key in state_dict.keys(): name2 = key w = state_dict[key] if 'StructuredToParameterName@@' in key: continue else: if '._mean' in key: name2 = name2.replace('._mean', '.running_mean') if '._variance' in key: name2 = name2.replace('._variance', '.running_var') if 'yolo_block.' in key: name2 = name2.replace('yolo_block.', 'yolo_block_') if 'yolo_transition.' in key: name2 = name2.replace('yolo_transition.', 'yolo_transition_') if 'yolo_output.' in key: name2 = name2.replace('yolo_output.', 'yolo_output_') if 'fpn.' in key: name2 = name2.replace('fpn.', 'fpn_') name2 = name2.replace('0.0', '0_0') name2 = name2.replace('0.1', '0_1') name2 = name2.replace('1.0', '1_0') name2 = name2.replace('1.1', '1_1') name2 = name2.replace('2.0', '2_0') name2 = name2.replace('2.1', '2_1') if 'fpn_transition.' in key: name2 = name2.replace('fpn_transition.', 'fpn_transition_') if 'pan_transition.' in key: name2 = name2.replace('pan_transition.', 'pan_transition_') if 'pan.' in key: name2 = name2.replace('pan.', 'pan_') name2 = name2.replace('0.0', '0_0') name2 = name2.replace('0.1', '0_1') name2 = name2.replace('1.0', '1_0') name2 = name2.replace('1.1', '1_1') name2 = name2.replace('2.0', '2_0') name2 = name2.replace('2.1', '2_1') copy(name2, w, model_std) if args.only_backbone: delattr(model, "neck") delattr(model, "yolo_head") elif model_class_name == 'PPYOLOE': temp_x = torch.randn((2, 3, 640, 640)) temp_scale_factor = torch.ones((2, 2)) if args.device == "gpu": temp_x = temp_x.cuda() temp_scale_factor = temp_scale_factor.cuda() temp_out = model(temp_x, temp_scale_factor) with open(args.ckpt, 'rb') as f: state_dict = pickle.load(f) if six.PY2 else pickle.load(f, encoding='latin1') # state_dict = fluid.io.load_program_state(args.ckpt) backbone_dic = {} fpn_dic = {} head_dic = {} others = {} for key, value in state_dict.items(): if 'tracked' in key: continue if 'backbone' in key: backbone_dic[key] = value elif 'neck' in key: fpn_dic[key] = value elif 'head' in key: head_dic[key] = value else: others[key] = value backbone_dic2 = {} fpn_dic2 = {} head_dic2 = {} others2 = {} for key, value in model_std.items(): if 'tracked' in key: continue if 'backbone' in key: backbone_dic2[key] = value elif 'neck' in key: fpn_dic2[key] = value elif 'head' in key: head_dic2[key] = value else: others2[key] = value backbone = model.backbone fpn = model.neck head = model.yolo_head for key in state_dict.keys(): name2 = key w = state_dict[key] if 'StructuredToParameterName@@' in key: continue else: if '._mean' in key: name2 = name2.replace('._mean', '.running_mean') if '._variance' in key: name2 = name2.replace('._variance', '.running_var') if 'yolo_block.' in key: name2 = name2.replace('yolo_block.', 'yolo_block_') if 'yolo_transition.' in key: name2 = name2.replace('yolo_transition.', 'yolo_transition_') if 'yolo_output.' in key: name2 = name2.replace('yolo_output.', 'yolo_output_') if 'fpn.' in key: name2 = name2.replace('fpn.', 'fpn_') name2 = name2.replace('0.0', '0_0') name2 = name2.replace('0.1', '0_1') name2 = name2.replace('1.0', '1_0') name2 = name2.replace('1.1', '1_1') name2 = name2.replace('2.0', '2_0') name2 = name2.replace('2.1', '2_1') if 'fpn_transition.' in key: name2 = name2.replace('fpn_transition.', 'fpn_transition_') if 'pan_transition.' in key: name2 = name2.replace('pan_transition.', 'pan_transition_') if 'pan.' in key: name2 = name2.replace('pan.', 'pan_') name2 = name2.replace('0.0', '0_0') name2 = name2.replace('0.1', '0_1') name2 = name2.replace('1.0', '1_0') name2 = name2.replace('1.1', '1_1') name2 = name2.replace('2.0', '2_0') name2 = name2.replace('2.1', '2_1') if args.only_backbone: name2 = 'backbone.' + name2 copy(name2, w, model_std) if args.only_backbone: delattr(model, "neck") delattr(model, "yolo_head") elif model_class_name == 'FCOS': pass else: raise NotImplementedError("Architectures \'{}\' is not implemented.".format(model_class_name)) # save checkpoint. ckpt_state = { "start_epoch": 0, "model": model.state_dict(), "optimizer": None, } torch.save(ckpt_state, args.output_ckpt) logger.info("Done.") if __name__ == "__main__": args = make_parser().parse_args() # 判断是否是调试状态 isDebug = True if sys.gettrace() else False if isDebug: print('Debug Mode.') args.exp_file = '../' + args.exp_file args.ckpt = '../' + args.ckpt args.output_ckpt = '../' + args.output_ckpt exp = get_exp(args.exp_file, args.name) main(exp, args) ```
{ "source": "jie311/RangeDet", "score": 2 }
#### File: RangeDet/operator_py/batch_rotated_iou.py ```python import mxnet as mx import mxnet.numpy as np from mxnet.ndarray.contrib import RotatedIOU class BatchRotatedIOU(mx.operator.CustomOp): def __init__(self, iou_type): super(BatchRotatedIOU, self).__init__() self.iou_type = iou_type def forward(self, is_train, req, in_data, out_data, aux): # input proposal = in_data[0] # (3, 169984, 10) gt_bbox = in_data[1] # (3, 200, 8) if self.iou_type == '3d': proposal = self.to_box_type_7(proposal.copy()) iou_map = np.zeros((proposal.shape[0], proposal.shape[1]), dtype=gt_bbox.dtype, ctx=gt_bbox.context) for pred_bbox_per_batch, gt_bbox_per_batch, iou_map_per_batch in zip(proposal, gt_bbox, iou_map): iou_3d = self.get_iou(pred_bbox_per_batch, gt_bbox_per_batch) iou_map_per_batch[...] = iou_3d for ind, val in enumerate([iou_map, ]): self.assign(out_data[ind], req[ind], val.as_nd_ndarray()) def backward(self, req, out_grad, in_data, out_data, in_grad, aux): for ind in range(2): self.assign(in_grad[ind], req[ind], 0) def get_iou(self, roi_batch, gt_batch): if self.iou_type == 'bev': iou_mat = RotatedIOU(roi_batch[:, :8], gt_batch).as_np_ndarray() # [num_roi, num_gt] elif self.iou_type == '3d': roi_batch[:, -1] = -1 * roi_batch[:, -1] gt_batch[:, -1] = -1 * gt_batch[:, -1] iou_mat = RotatedIOU(roi_batch.as_nd_ndarray(), gt_batch).as_np_ndarray() # [num_roi, num_gt] else: raise Exception("no supported type") # iou_mat = iou_mat.as_np_ndarray() # iou_mat = np.minimum(1, np.maximum(0, 2 * iou_mat - 0.5)) iou_mat[np.isnan(iou_mat)] = 0 iou_mat[np.isinf(iou_mat)] = 0 iou_mat[iou_mat > 1.0] = 0 iou_mat[iou_mat < 0] = 0 iou_mat = iou_mat.max(axis=1) return iou_mat @staticmethod def to_box_type_7(proposal): """ [x1, y1, x2, y2, x3, y3, x4, y4, z0, z1] change to [cen_x, cen_y, cen_z, l, w, h, yaw] :param proposal: :return: """ proposal_4pts = proposal[:, :, :8].reshape(proposal.shape[0], -1, 4, 2) center_xy = proposal_4pts.mean(axis=2) # [b, n, 2] center_z = proposal[:, :, -2:].mean(axis=2, keepdims=True) length = ((proposal_4pts[:, :, 0, :] - proposal_4pts[:, :, 1, :]) ** 2).sum(axis=2, keepdims=True) ** 0.5 width = ((proposal_4pts[:, :, 1, :] - proposal_4pts[:, :, 2, :]) ** 2).sum(axis=2, keepdims=True) ** 0.5 height = proposal[:, :, -1:] - proposal[:, :, -2:-1] yaw = np.arctan2( proposal_4pts[:, :, 0, 1] - proposal_4pts[:, :, 1, 1], proposal_4pts[:, :, 0, 0] - proposal_4pts[:, :, 1, 0]) proposal_type7 = np.concatenate([center_xy, center_z, length, width, height, yaw[:, :, None]], axis=2) return proposal_type7 @mx.operator.register('batch_rotated_iou') class BatchRotatedIOUProp(mx.operator.CustomOpProp): def __init__(self, iou_type): super(BatchRotatedIOUProp, self).__init__(need_top_grad=False) self.iou_type = iou_type def list_arguments(self): return ['proposal', 'gt_bbox'] def list_outputs(self): return ['iou_map'] def infer_shape(self, in_shape): proposal_shape = in_shape[0] # [b, 169984, 10] batch_size = proposal_shape[0] sample_num = proposal_shape[1] proposal_dim = proposal_shape[2] assert proposal_dim == 10 gt_bbox_shape = in_shape[1] assert len(gt_bbox_shape) == 3 # [b, 200, 8] max_num_box = gt_bbox_shape[1] assert max_num_box == 200 gt_bbox_dim = gt_bbox_shape[2] if self.iou_type == 'bev': assert gt_bbox_dim == 8 elif self.iou_type == '3d': assert gt_bbox_dim == 7 else: raise ValueError('Unknown iou type!') out_shape = (batch_size, sample_num) return in_shape, [out_shape] def create_operator(self, ctx, shapes, dtypes): return BatchRotatedIOU(self.iou_type) def declare_backward_dependency(self, out_grad, in_data, out_data): return [] ``` #### File: rangedet/core/util_func.py ```python from __future__ import absolute_import from __future__ import division from __future__ import print_function import processing_cxx import numpy as np from numba import jit, njit @njit def sample_data(data, slice_begin=None, stride_width=None): """ Only supports sampling three-dimensional arrays :param data: (C, H, W) or (H, W, C) :param c_dim: channel_dim: str 'front' 'rear' :param stride_height: int (stride in vertical axis) :param stride_width: int (stride in horizontal axis) :return: stride sample data """ data = data.copy() data_width = data.shape[2] s_w = slice(slice_begin, data_width, stride_width) data = data[:, :, s_w] return data def class_aware_expand(data, class_target, num_classes): """ num_pts = H * W :param data: (num_pts, num_channel) :param class_target: (num_pts) # Start from zero and consider the background category, background category is the last dimension :param num_classes: (int) num fg classes :return: (num_pts, num_fg_classes, num_channel) """ output_data = np.zeros(shape=(data.shape[0], num_classes + 1, data.shape[1]), dtype=np.float32) output_data[:, class_target, :] = data return output_data[:, :-1, :] @jit(nopython=True, nogil=True) def jit_class_aware_expand(data, class_target, num_classes): """ num_pts = H * W :param data: (num_pts, num_channel) :param class_target: (num_pts) # Start from zero and consider the background category, background category is the last dimension :param num_classes: (int) num fg classes :return: (num_pts, num_fg_classes, num_channel) """ output_data = np.zeros(shape=(data.shape[0], num_classes + 1, data.shape[1]), dtype=np.float32) for i in range(data.shape[0]): output_data[i, class_target[i], :] = data[i] return output_data[:, :-1, :] def inv_points_frequency(bbox_inds): """ :param bbox_inds: (H * W) :return: normalization_weight: (H * W) """ # (H * W) num_pts_in_bbox = processing_cxx.get_point_num( bbox_inds.astype(np.float32)).reshape(-1) normalization_weight = 1 / num_pts_in_bbox return normalization_weight ``` #### File: symbol/backbone/meta_kernel.py ```python from __future__ import division import mxnet as mx from mxnext.simple import conv, to_fp16, relu class MetaKernel(object): def __init__(self, num_batch, feat_height, feat_width, fp16, num_frame=1): self.num_batch = num_batch self.H = feat_height self.W = feat_width self.fp16 = fp16 self.num_frame = num_frame @staticmethod def sampler_im2col(data, name, kernel=1, stride=1, pad=None, dilate=1): """ please refer to mx.symbol.im2col """ if isinstance(kernel, int): kernel = (kernel, kernel) if isinstance(stride, int): stride = (stride, stride) if isinstance(dilate, int): dilate = (dilate, dilate) if pad is None: assert kernel[0] % 2 == 1, "Specify pad for an even kernel size for {}".format(name) pad = ((kernel[0] - 1) * dilate[0] + 1) // 2 if isinstance(pad, int): pad = (pad, pad) output = mx.symbol.im2col( name=name + "sampler", data=data, kernel=kernel, stride=stride, dilate=dilate, pad=pad ) return output def sample_data(self, name, data, kernel_size): """ data sample :param name: str :param data: num_batch, num_channel_in, H, W :param kernel_size: int default=3 :return: sample_output: num_batch, num_channel_in * kernel_size * kernel_size, H, W """ sample_output = self.sampler_im2col( data=data, name=name + "data_", kernel=kernel_size, stride=1, pad=1, dilate=1 ) return sample_output def sample_coord(self, name, coord, kernel_size): """ coord sample :param name: str :param coord: num_batch, num_channel_in, H, W :param kernel_size: int default=3 :return: coord_sample_data: num_batch, num_channel_in * kernel_size * kernel_size, H, W """ coord_sample_data = self.sampler_im2col( data=coord, name=name + "coord_", kernel=kernel_size, stride=1, pad=1, dilate=1 ) return coord_sample_data def relative_coord(self, sample_coord, center_coord, num_channel_in, kernel_size): """ :param sample_coord: num_batch, num_channel_in * kernel_size * kernel_size, H, W :param center_coord: num_batch, num_channel_in, H, W :param num_channel_in: int :param kernel_size: int :return: rel_coord: num_batch, num_channel_in, kernel_size * kernel_size, H, W """ sample_reshape = mx.sym.reshape( sample_coord, shape=( self.num_batch, num_channel_in, kernel_size * kernel_size, self.H, self.W ) ) center_coord_expand = mx.sym.expand_dims( center_coord, axis=2 ) rel_coord = mx.sym.broadcast_minus( sample_reshape, center_coord_expand, name="relative_dis" ) return rel_coord def mlp(self, data, name, in_channels, norm, channel_list=None, b_mul=1, no_bias=True, use_norm=False): """ :param data: num_batch, num_channel_in * kernel_size * kernel_size, H, W :param name: str :param in_channels: int :param norm: normalizer :param channel_list: List[int] :param b_mul: int default=1 :param no_bias: bool default=True :param use_norm: bool default=False :return: mlp_output_reshape: num_batch, out_channels, kernel_size * kernel_size, H, W """ assert isinstance(channel_list, list) x = mx.sym.reshape( data, shape=( self.num_batch * b_mul, in_channels, -1, self.W ) ) for i, out_channel in enumerate(channel_list): x = conv( x, name=name + "{}_mlp{}".format(self.W, i), filter=out_channel, kernel=1, stride=1, pad=0, dilate=1, no_bias=no_bias ) if i != len(channel_list) - 1: if use_norm: x = norm( x, name=name + "{}_mlp_bn{}".format(self.W, i)) x = relu( x, name + "{}_mlp_relu{}".format(self.W, i)) mlp_output_reshape = mx.sym.reshape( x, shape=( self.num_batch * b_mul, channel_list[-1], -1, self.H, self.W ) ) return mlp_output_reshape def meta_baseline_bias(self, name, data, coord_data, data_channels, coord_channels, channel_list, norm, conv1_filter, kernel_size=3, **kwargs): """ # Without data mlp; # MLP: fc + norm + relu + fc; # Using normalized coordinates :param name: str :param data: num_batch, num_channel_in, H, W :param coord_data: num_batch, 3, H, W :param data_channels: num_channel_in :param coord_channels: 3 :param channel_list: List[int] :param norm: normalizer :param conv1_filter: int :param kernel_size: int default=3 :param kwargs: :return: conv1: num_batch, conv1_filter, H, W """ if self.fp16: coord_data = to_fp16( coord_data, name + 'coord_data_fp16') name = name + '_' coord_sample_data = self.sample_coord( name, coord_data, kernel_size) rel_coord = self.relative_coord( coord_sample_data, coord_data, coord_channels, kernel_size) weights = self.mlp( rel_coord, name, in_channels=coord_channels, channel_list=channel_list, norm=norm, no_bias=False) data_sample = self.sample_data( name, data, kernel_size) data_sample_reshape = mx.sym.reshape( data=data_sample, shape=( self.num_batch, data_channels, kernel_size * kernel_size, self.H, self.W) ) output = data_sample_reshape * weights output_reshape = mx.sym.reshape( output, shape=( self.num_batch, -1, self.H, self.W) ) return output_reshape ``` #### File: RangeDet/utils/cpu_affinity.py ```python import psutil import os import subprocess import logging def simple_bind_cpus(rank, num_partition, logical=False): pid = os.getpid() p = psutil.Process(pid) cpu_count = psutil.cpu_count(logical=logical) cpu_count_per_worker = cpu_count // num_partition cpu_list = list(range(rank * cpu_count_per_worker, (rank + 1) * cpu_count_per_worker)) print("bind cpu list:{}".format(cpu_list)) p.cpu_affinity(cpu_list) logging.info("rank: {}, pid:{}, affinity to cpu {}".format(rank, pid, cpu_list)) def simple_bind_cpus_with_superthread(rank, num_partition): pid = os.getpid() p = psutil.Process(pid) phy_cpu_count = psutil.cpu_count(logical=False) cpu_count_per_worker = phy_cpu_count // num_partition cpu_list = list(range(rank * cpu_count_per_worker, (rank + 1) * cpu_count_per_worker)) cpu_list += list( range(phy_cpu_count + rank * cpu_count_per_worker, phy_cpu_count + (rank + 1) * cpu_count_per_worker)) p.cpu_affinity(cpu_list) logging.info("rank: {}, pid:{}, affinity to cpu {}".format(rank, pid, cpu_list)) def bind_cpus_with_list(cpu_list): pid = os.getpid() p = psutil.Process(pid) p.cpu_affinity(cpu_list) logging.info("pid:{}, affinity to cpu {}".format(pid, cpu_list)) def bind_cpus_on_ecos(rank, num_partition): pid = os.getpid() p = psutil.Process(pid) allowed_list = cpu_allowed_list() if rank == 0: print("cpu allowed list len:{}, {}".format(len(allowed_list), allowed_list)) cpu_count_per_worker = len(allowed_list) // num_partition cpu_list = allowed_list[int(rank * cpu_count_per_worker):int((rank + 1) * cpu_count_per_worker)] p.cpu_affinity(cpu_list) logging.info("rank: {}, pid:{}, affinity to cpu {}".format(rank, pid, cpu_list)) def cpu_allowed_list(): byte_info = subprocess.check_output("cat /proc/$$/status|grep Cpus_allowed_list|awk '{print $2}'", shell=True) cpu_list = byte_info.decode("utf-8").replace("\n", "").split(",") allowed_list = [] for item in cpu_list: ranges = [int(cpuid) for cpuid in item.split('-')] if len(ranges) == 1: allowed_list.append(ranges[0]) else: allowed_list += list(range(ranges[0], ranges[1] + 1)) return allowed_list ``` #### File: RangeDet/utils/train_utils.py ```python import mxnet as mx import numpy as np import time import warnings import math from functools import partial class Timer(object): def __init__(self): self.reset() def tic(self): self.start = time.time() def toc(self): self.time += time.time() - self.start self.count += 1 def get(self): return self.time / self.count def reset(self): self.time = 0 self.count = 0 class OneCycleScheduler(object): """ Reduce the learning rate according to a cosine function """ def __init__(self, max_update, lr_max=0.003, div_factor=10.0, pct_start=0.4, begin_update=0): assert isinstance(max_update, int) self.max_update = max_update self.begin_update = begin_update self.lr_max = lr_max self.base_lr = lr_max self.div_factor = div_factor self.pct_start = pct_start self.warmup_steps = int(max_update * self.pct_start) low_lr = self.lr_max / self.div_factor self.lr_phases = (partial(OneCycleScheduler.annealing_cos, low_lr, self.lr_max), partial(OneCycleScheduler.annealing_cos, self.lr_max, low_lr / 1e4)) print('lr_phases:', self.lr_phases) @staticmethod def annealing_cos(start, end, pct): # print(pct, start, end) "Cosine anneal from `start` to `end` as pct goes from 0.0 to 1.0." cos_out = math.cos(math.pi * pct) + 1 return end + (start - end) / 2 * cos_out def __call__(self, num_update): if self.begin_update > 0: num_update += self.begin_update if num_update <= self.warmup_steps: self.base_lr = self.lr_phases[0](float(num_update) / float(self.warmup_steps)) elif num_update <= self.max_update: self.base_lr = self.lr_phases[1](float(num_update - self.warmup_steps) / float(self.max_update - self.warmup_steps)) return self.base_lr class OneCycleMomentumScheduler(object): """ Reduce the momentum according to a cosine function """ def __init__(self, max_update, moms=[0.95, 0.85], pct_start=0.4): assert isinstance(max_update, int) self.max_update = max_update self.moms = moms self.pct_start = pct_start self.warmup_steps = int(max_update * self.pct_start) self.lr_phases = (partial(OneCycleScheduler.annealing_cos, moms[0], moms[1]), partial(OneCycleScheduler.annealing_cos, moms[1], moms[0])) print('mom_phases:', self.lr_phases) @staticmethod def annealing_cos(start, end, pct): # print(pct, start, end) "Cosine anneal from `start` to `end` as pct goes from 0.0 to 1.0." cos_out = math.cos(math.pi * pct) + 1 return end + (start - end) / 2 * cos_out def __call__(self, num_update): if num_update <= self.warmup_steps: self.mom = self.lr_phases[0](float(num_update) / float(self.warmup_steps)) elif num_update <= self.max_update: self.mom = self.lr_phases[1](float(num_update - self.warmup_steps) / float(self.max_update - self.warmup_steps)) return self.mom def clip_global_norm(arrays, max_norm, check_isfinite=True): """Rescales NDArrays so that the sum of their 2-norm is smaller than `max_norm`. Parameters ---------- arrays : list of NDArray max_norm : float check_isfinite : bool, default True If True, check that the total_norm is finite (not nan or inf). This requires a blocking .asscalar() call. Returns ------- NDArray or float Total norm. Return type is NDArray of shape (1,) if check_isfinite is False. Otherwise a float is returned. """ def _norm(array): if array.stype == 'default': x = array.reshape((-1,)) return mx.ndarray.dot(x, x) return array.norm().square() assert len(arrays) > 0 ctx = arrays[0][0].context total_norm = mx.ndarray.add_n(*[_norm(arr[0]).as_in_context(ctx) for arr in arrays]) total_norm = mx.ndarray.sqrt(total_norm) if check_isfinite: if not np.isfinite(total_norm.asscalar()): warnings.warn( UserWarning('nan or inf is detected. ' 'Clipping results will be undefined.'), stacklevel=2) scale = max_norm / (total_norm + 1e-8) scale = mx.ndarray.min(mx.ndarray.concat(scale, mx.ndarray.ones(1, ctx=ctx), dim=0)) for arr in arrays: arr[0] *= scale.as_in_context(arr[0].context) if check_isfinite: return total_norm.asscalar() else: return total_norm @mx.optimizer.Optimizer.register class AdamW(mx.optimizer.Optimizer): """The Adam optimizer with weight decay regularization. Updates are applied by:: rescaled_grad = clip(grad * rescale_grad, clip_gradient) m = beta1 * m + (1 - beta1) * rescaled_grad v = beta2 * v + (1 - beta2) * (rescaled_grad**2) w = w - learning_rate * (m / (sqrt(v) + epsilon) + wd * w) Note that this is different from `mxnet.optimizer.Adam`, where L2 loss is added and accumulated in m and v. In AdamW, the weight decay term decoupled from gradient based update. This is also slightly different from the AdamW optimizer described in *Fixing Weight Decay Regularization in Adam*, where the schedule multiplier and learning rate is decoupled. The BERTAdam optimizer uses the same learning rate to apply gradients w.r.t. the loss and weight decay. This optimizer accepts the following parameters in addition to those accepted by :class:`mxnet.optimizer.Optimizer`. Parameters ---------- beta1 : float, optional Exponential decay rate for the first moment estimates. beta2 : float, optional Exponential decay rate for the second moment estimates. epsilon : float, optional Small value to avoid division by 0. """ def __init__(self, learning_rate=0.001, beta1=0.9, beta2=0.99, epsilon=1e-8, **kwargs): if 'clip_weight' in kwargs: self.clip_weight = kwargs.pop('clip_weight') super(AdamW, self).__init__(learning_rate=learning_rate, **kwargs) self.beta1 = beta1 self.beta2 = beta2 self.epsilon = epsilon def create_state(self, index, weight): # pylint-disable=unused-argument """Initialization for mean and var.""" return (mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype), # mean mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype)) # variance def _get_mom(self): if isinstance(self.beta1, float): return self.beta1 else: return self.beta1(self.num_update) def update(self, index, weight, grad, state): """Update method.""" try: from mxnet.ndarray.contrib import adamw_update except ImportError: raise ImportError("Failed to import nd.contrib.adamw_update from MXNet. " "BERTAdam optimizer requires mxnet>=1.5.0b20181228. " "Please upgrade your MXNet version.") # print(type(weight), len(weight)) # assert(isinstance(weight, mx.nd.NDArray)) # assert(isinstance(grad, mx.nd.NDArray)) if isinstance(index, list): assert len(index) == 1 index = index[0] if isinstance(weight, list): assert len(weight) == 1 weight = weight[0] if isinstance(grad, list): assert len(grad) == 1 grad = grad[0] if isinstance(state, list): assert len(state) == 1 state = state[0] self._update_count(index) lr = self._get_lr(index) wd = self._get_wd(index) beta1 = self._get_mom() t = self._index_update_count[index] coef1 = 1. - beta1 ** t coef2 = 1. - self.beta2 ** t lr *= math.sqrt(coef2) / coef1 kwargs = {'beta1': beta1, 'beta2': self.beta2, 'epsilon': self.epsilon, 'rescale_grad': self.rescale_grad} if self.clip_gradient: kwargs['clip_gradient'] = self.clip_gradient mean, var = state adamw_update(weight, grad, mean, var, out=weight, lr=1, wd=wd, eta=lr, **kwargs) if self.clip_weight is not None and len(weight.shape) == 4: pass # if weight.abs().sum() > 0: # weight[:] = weight * 0.9 # weight[:] = mx.nd.clip(weight, self.clip_weight * -1, self.clip_weight) @mx.optimizer.Optimizer.register class AdamWS(mx.optimizer.Optimizer): def __init__(self, learning_rate=0.001, beta1=0.9, beta2=0.99, epsilon=1e-8, **kwargs): super(AdamWS, self).__init__(learning_rate=learning_rate, **kwargs) self.beta1 = beta1 self.beta2 = beta2 self.epsilon = epsilon def create_state(self, index, weight): # pylint-disable=unused-argument """Initialization for mean and var.""" return (mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype), # mean mx.nd.zeros(weight.shape, weight.context, dtype=weight.dtype)) # variance def _get_mom(self): if isinstance(self.beta1, float): return self.beta1 else: return self.beta1(self.num_update) def update(self, index, weight, grad, state): """Update method.""" try: from mxnet.ndarray.contrib import adamw_update except ImportError: raise ImportError("Failed to import nd.contrib.adamw_update from MXNet. " "BERTAdam optimizer requires mxnet>=1.5.0b20181228. " "Please upgrade your MXNet version.") # print(type(weight), len(weight)) # assert(isinstance(weight, mx.nd.NDArray)) # assert(isinstance(grad, mx.nd.NDArray)) if isinstance(index, list): assert len(index) == 1 index = index[0] if isinstance(weight, list): assert len(weight) == 1 weight = weight[0] if isinstance(grad, list): assert len(grad) == 1 grad = grad[0] if isinstance(state, list): assert len(state) == 1 state = state[0] self._update_count(index) lr = self._get_lr(index) wd = self._get_wd(index) beta1 = self._get_mom() t = self._index_update_count[index] coef1 = 1. - beta1 ** t coef2 = 1. - self.beta2 ** t lr *= math.sqrt(coef2) / coef1 kwargs = {'beta1': beta1, 'beta2': self.beta2, 'epsilon': self.epsilon, 'rescale_grad': self.rescale_grad} if self.clip_gradient: kwargs['clip_gradient'] = self.clip_gradient mean, var = state adamw_update(weight, grad, mean, var, out=weight, lr=1, wd=wd, eta=lr, **kwargs) # print(weight.shape) if len(weight.shape) == 4: weight_mean = weight.mean(keepdims=True, axis=(1, 2, 3)) weight_std = ((weight - weight_mean) ** 2).mean(keepdims=True, axis=(1, 2, 3)) ** 0.5 + 1e-10 weight[:] = (weight - weight_mean) / weight_std ```
{ "source": "jie311/reproductionSTDC-Seg", "score": 2 }
#### File: jie311/reproductionSTDC-Seg/evaluation.py ```python from logger import setup_logger from models.model_stages import BiSeNet from cityscapes import CityScapes import torch import torch.nn as nn from torch.utils.data import DataLoader import torch.nn.functional as F import torch.distributed as dist import os import os.path as osp import logging import time import numpy as np from tqdm import tqdm import math # miou算法 class MscEvalV0(object): def __init__(self, scale=0.5, ignore_label=255): self.ignore_label = ignore_label self.scale = scale # 传递net网络,dl数据集,n_classes种类 def __call__(self, net, dl, n_classes): ## evaluate # hist矩阵 hist = torch.zeros(n_classes, n_classes).cuda().detach() if dist.is_initialized() and dist.get_rank() != 0: diter = enumerate(dl) else: diter = enumerate(tqdm(dl)) for i, (imgs, label) in diter: # 这里在测试中莫名奇妙的包了一层1, N, _, H, W = label.shape label = label.squeeze(1).cuda() # 转换一下维度 # 如上述所言 size = label.size()[-2:] imgs = imgs.cuda() N, C, H, W = imgs.size() new_hw = [int(H * self.scale), int(W * self.scale)] imgs = F.interpolate(imgs, new_hw, mode='bilinear', align_corners=True) logits = net(imgs)[0] logits = F.interpolate(logits, size=size, mode='bilinear', align_corners=True) probs = torch.softmax(logits, dim=1) preds = torch.argmax(probs, dim=1) keep = label != self.ignore_label hist += torch.bincount( label[keep] * n_classes + preds[keep], minlength=n_classes ** 2 ).view(n_classes, n_classes).float() if dist.is_initialized(): dist.all_reduce(hist, dist.ReduceOp.SUM) ious = hist.diag() / (hist.sum(dim=0) + hist.sum(dim=1) - hist.diag()) miou = ious.mean() return miou.item() # 验证函数,dspth为根目录 def evaluatev0(respth='./pretrained', dspth='./data', backbone='CatNetSmall', scale=0.75, use_boundary_2=False, use_boundary_4=False, use_boundary_8=False, use_boundary_16=False, use_conv_last=False): print('scale', scale) print('use_boundary_2', use_boundary_2) print('use_boundary_4', use_boundary_4) print('use_boundary_8', use_boundary_8) print('use_boundary_16', use_boundary_16) ## dataset batchsize = 5 n_workers = 2 dsval = CityScapes(dspth, mode='val') dl = DataLoader(dsval, batch_size=batchsize, shuffle=False, num_workers=n_workers, drop_last=False) n_classes = 19 print("backbone:", backbone) net = BiSeNet(backbone=backbone, n_classes=n_classes, use_boundary_2=use_boundary_2, use_boundary_4=use_boundary_4, use_boundary_8=use_boundary_8, use_boundary_16=use_boundary_16, use_conv_last=use_conv_last) net.load_state_dict(torch.load(respth)) net.cuda() net.eval() with torch.no_grad(): single_scale = MscEvalV0(scale=scale) mIOU = single_scale(net, dl, 19) logger = logging.getLogger() logger.info('mIOU is: %s\n', mIOU) class MscEval(object): def __init__(self, model, dataloader, scales=[0.5, 0.75, 1, 1.25, 1.5, 1.75], n_classes=19, lb_ignore=255, cropsize=1024, flip=True, *args, **kwargs): self.scales = scales self.n_classes = n_classes self.lb_ignore = lb_ignore self.flip = flip self.cropsize = cropsize ## dataloader self.dl = dataloader self.net = model def pad_tensor(self, inten, size): N, C, H, W = inten.size() outten = torch.zeros(N, C, size[0], size[1]).cuda() outten.requires_grad = False margin_h, margin_w = size[0] - H, size[1] - W hst, hed = margin_h // 2, margin_h // 2 + H wst, wed = margin_w // 2, margin_w // 2 + W outten[:, :, hst:hed, wst:wed] = inten return outten, [hst, hed, wst, wed] def eval_chip(self, crop): with torch.no_grad(): out = self.net(crop)[0] prob = F.softmax(out, 1) if self.flip: crop = torch.flip(crop, dims=(3,)) out = self.net(crop)[0] out = torch.flip(out, dims=(3,)) prob += F.softmax(out, 1) prob = torch.exp(prob) return prob def crop_eval(self, im): cropsize = self.cropsize stride_rate = 5 / 6. N, C, H, W = im.size() long_size, short_size = (H, W) if H > W else (W, H) if long_size < cropsize: im, indices = self.pad_tensor(im, (cropsize, cropsize)) prob = self.eval_chip(im) prob = prob[:, :, indices[0]:indices[1], indices[2]:indices[3]] else: stride = math.ceil(cropsize * stride_rate) if short_size < cropsize: if H < W: im, indices = self.pad_tensor(im, (cropsize, W)) else: im, indices = self.pad_tensor(im, (H, cropsize)) N, C, H, W = im.size() n_x = math.ceil((W - cropsize) / stride) + 1 n_y = math.ceil((H - cropsize) / stride) + 1 prob = torch.zeros(N, self.n_classes, H, W).cuda() prob.requires_grad = False for iy in range(n_y): for ix in range(n_x): hed, wed = min(H, stride * iy + cropsize), min(W, stride * ix + cropsize) hst, wst = hed - cropsize, wed - cropsize chip = im[:, :, hst:hed, wst:wed] prob_chip = self.eval_chip(chip) prob[:, :, hst:hed, wst:wed] += prob_chip if short_size < cropsize: prob = prob[:, :, indices[0]:indices[1], indices[2]:indices[3]] return prob def scale_crop_eval(self, im, scale): N, C, H, W = im.size() new_hw = [int(H * scale), int(W * scale)] im = F.interpolate(im, new_hw, mode='bilinear', align_corners=True) prob = self.crop_eval(im) prob = F.interpolate(prob, (H, W), mode='bilinear', align_corners=True) return prob def compute_hist(self, pred, lb): n_classes = self.n_classes ignore_idx = self.lb_ignore keep = np.logical_not(lb == ignore_idx) merge = pred[keep] * n_classes + lb[keep] hist = np.bincount(merge, minlength=n_classes ** 2) hist = hist.reshape((n_classes, n_classes)) return hist def evaluate(self): ## evaluate n_classes = self.n_classes hist = np.zeros((n_classes, n_classes), dtype=np.float32) dloader = tqdm(self.dl) if dist.is_initialized() and not dist.get_rank() == 0: dloader = self.dl for i, (imgs, label) in enumerate(dloader): N, _, H, W = label.shape probs = torch.zeros((N, self.n_classes, H, W)) probs.requires_grad = False imgs = imgs.cuda() for sc in self.scales: # prob = self.scale_crop_eval(imgs, sc) prob = self.eval_chip(imgs) probs += prob.detach().cpu() probs = probs.data.numpy() preds = np.argmax(probs, axis=1) hist_once = self.compute_hist(preds, label.data.numpy().squeeze(1)) hist = hist + hist_once IOUs = np.diag(hist) / (np.sum(hist, axis=0) + np.sum(hist, axis=1) - np.diag(hist)) mIOU = np.mean(IOUs) return mIOU def evaluate(respth='./resv1_catnet/pths/', dspth='./data'): ## logger logger = logging.getLogger() ## model logger.info('\n') logger.info('====' * 20) logger.info('evaluating the model ...\n') logger.info('setup and restore model') n_classes = 19 net = BiSeNet(n_classes=n_classes) net.load_state_dict(torch.load(respth)) net.cuda() net.eval() ## dataset batchsize = 5 n_workers = 2 dsval = CityScapes(dspth, mode='val') dl = DataLoader(dsval, batch_size=batchsize, shuffle=False, num_workers=n_workers, drop_last=False) ## evaluator logger.info('compute the mIOU') evaluator = MscEval(net, dl, scales=[1], flip=False) ## eval mIOU = evaluator.evaluate() logger.info('mIOU is: {:.6f}'.format(mIOU)) if __name__ == "__main__": log_dir = 'evaluation_logs/' if not os.path.exists(log_dir): os.makedirs(log_dir) setup_logger(log_dir) # STDC1-Seg50 mIoU 0.7222 # evaluatev0('./checkpoints/STDC1-Seg/model_maxmIOU50.pth', dspth='./data', backbone='STDCNet813', scale=0.5, # use_boundary_2=False, use_boundary_4=False, use_boundary_8=True, use_boundary_16=False) # STDC1-Seg75 mIoU 0.7450 # evaluatev0('./checkpoints/STDC1-Seg/model_maxmIOU75.pth', dspth='./data', backbone='STDCNet813', scale=0.75, # use_boundary_2=False, use_boundary_4=False, use_boundary_8=True, use_boundary_16=False) # STDC2-Seg50 mIoU 0.7424 # evaluatev0('./checkpoints/STDC2-Seg/model_maxmIOU50.pth', dspth='./data', backbone='STDCNet1446', scale=0.5, # use_boundary_2=False, use_boundary_4=False, use_boundary_8=True, use_boundary_16=False) # STDC2-Seg75 mIoU 0.7704 # evaluatev0('./checkpoints/STDC2-Seg/model_maxmIOU75.pth', dspth='./data', backbone='STDCNet1446', scale=0.75, # use_boundary_2=False, use_boundary_4=False, use_boundary_8=True, use_boundary_16=False) # 再训练 evaluatev0('./checkpoints/train_STDC2-Seg/pths/model_final.pth', dspth='./data', backbone='STDCNet1446', scale=0.75, use_boundary_2=False, use_boundary_4=False, use_boundary_8=True, use_boundary_16=False) ```
{ "source": "jie311/TraDeS", "score": 2 }
#### File: lib/model/losses.py ```python from __future__ import absolute_import from __future__ import division from __future__ import print_function import torch import torch.nn as nn from .utils import _tranpose_and_gather_feat, _nms, _topk, _tranpose_and_gather_feat_1d import torch.nn.functional as F from utils.image import draw_umich_gaussian def _slow_neg_loss(pred, gt): '''focal loss from CornerNet''' pos_inds = gt.eq(1).float() neg_inds = gt.lt(1).float() neg_weights = torch.pow(1 - gt[neg_inds], 4) loss = 0 pos_pred = pred[pos_inds] neg_pred = pred[neg_inds] pos_loss = torch.log(pos_pred) * torch.pow(1 - pos_pred, 2) neg_loss = torch.log(1 - neg_pred) * torch.pow(neg_pred, 2) * neg_weights num_pos = pos_inds.float().sum() pos_loss = pos_loss.sum() neg_loss = neg_loss.sum() if pos_pred.nelement() == 0: loss = loss - neg_loss else: loss = loss - (pos_loss + neg_loss) / num_pos return loss def _neg_loss(pred, gt): ''' Reimplemented focal loss. Exactly the same as CornerNet. Runs faster and costs a little bit more memory Arguments: pred (batch x c x h x w) gt_regr (batch x c x h x w) ''' pos_inds = gt.eq(1).float() neg_inds = gt.lt(1).float() neg_weights = torch.pow(1 - gt, 4) loss = 0 pos_loss = torch.log(pred) * torch.pow(1 - pred, 2) * pos_inds neg_loss = torch.log(1 - pred) * torch.pow(pred, 2) * neg_weights * neg_inds num_pos = pos_inds.float().sum() pos_loss = pos_loss.sum() neg_loss = neg_loss.sum() if num_pos == 0: loss = loss - neg_loss else: loss = loss - (pos_loss + neg_loss) / num_pos return loss def _only_neg_loss(pred, gt): gt = torch.pow(1 - gt, 4) neg_loss = torch.log(1 - pred) * torch.pow(pred, 2) * gt return neg_loss.sum() class FastFocalLoss(nn.Module): ''' Reimplemented focal loss, exactly the same as the CornerNet version. Faster and costs much less memory. ''' def __init__(self, opt=None): super(FastFocalLoss, self).__init__() self.only_neg_loss = _only_neg_loss def forward(self, out, target, ind, mask, cat): ''' Arguments: out, target: B x C x H x W ind, mask: B x M cat (category id for peaks): B x M ''' # print(out.shape,mask.shape) neg_loss = self.only_neg_loss(out, target) pos_pred_pix = _tranpose_and_gather_feat(out, ind) # B x M x C pos_pred = pos_pred_pix.gather(2, cat.unsqueeze(2)) # B x M num_pos = mask.sum() pos_loss = torch.log(pos_pred) * torch.pow(1 - pos_pred, 2) * \ mask.unsqueeze(2) pos_loss = pos_loss.sum() if num_pos == 0: return - neg_loss return - (pos_loss + neg_loss) / num_pos class CostVolumeLoss1D(nn.Module): ''' L_CVA: Please refer to TraDeS for details ''' def __init__(self): super(CostVolumeLoss1D, self).__init__() self.only_neg_loss = _only_neg_loss def forward(self, out, target, ind, mask, cat): ''' Arguments: out, target: B x C x H ind, mask: B x M cat (category id for peaks): B x M ''' out = F.softmax(out * (1-target), dim=2) pos_pred_pix = _tranpose_and_gather_feat_1d(out, ind) # B x M x C pos_pred = pos_pred_pix.gather(2, cat.unsqueeze(2)) # B x M num_pos = mask.sum() pos_loss = torch.log(pos_pred) * torch.pow(1 - pos_pred, 2) * \ mask.unsqueeze(2) pos_loss = pos_loss.sum() if num_pos == 0: return - pos_loss return - (pos_loss) / num_pos def _reg_loss(regr, gt_regr, mask): ''' L1 regression loss Arguments: regr (batch x max_objects x dim) gt_regr (batch x max_objects x dim) mask (batch x max_objects) ''' num = mask.float().sum() mask = mask.unsqueeze(2).expand_as(gt_regr).float() regr = regr * mask gt_regr = gt_regr * mask regr_loss = nn.functional.smooth_l1_loss(regr, gt_regr, reduction='sum') regr_loss = regr_loss / (num + 1e-4) return regr_loss class RegWeightedL1Loss(nn.Module): def __init__(self): super(RegWeightedL1Loss, self).__init__() def forward(self, output, mask, ind, target): pred = _tranpose_and_gather_feat(output, ind) # loss = F.l1_loss(pred * mask, target * mask, reduction='elementwise_mean') loss = F.l1_loss(pred * mask, target * mask, reduction='sum') loss = loss / (mask.sum() + 1e-4) return loss class WeightedBCELoss(nn.Module): def __init__(self): super(WeightedBCELoss, self).__init__() self.bceloss = torch.nn.BCEWithLogitsLoss(reduction='none') def forward(self, output, mask, ind, target): # output: B x F x H x W # ind: B x M # mask: B x M x F # target: B x M x F pred = _tranpose_and_gather_feat(output, ind) # B x M x F loss = mask * self.bceloss(pred, target) loss = loss.sum() / (mask.sum() + 1e-4) return loss class BinRotLoss(nn.Module): def __init__(self): super(BinRotLoss, self).__init__() def forward(self, output, mask, ind, rotbin, rotres): pred = _tranpose_and_gather_feat(output, ind) loss = compute_rot_loss(pred, rotbin, rotres, mask) return loss def compute_res_loss(output, target): return F.smooth_l1_loss(output, target, reduction='elementwise_mean') def compute_bin_loss(output, target, mask): mask = mask.expand_as(output) output = output * mask.float() return F.cross_entropy(output, target, reduction='elementwise_mean') def compute_rot_loss(output, target_bin, target_res, mask): # output: (B, 128, 8) [bin1_cls[0], bin1_cls[1], bin1_sin, bin1_cos, # bin2_cls[0], bin2_cls[1], bin2_sin, bin2_cos] # target_bin: (B, 128, 2) [bin1_cls, bin2_cls] # target_res: (B, 128, 2) [bin1_res, bin2_res] # mask: (B, 128, 1) output = output.view(-1, 8) target_bin = target_bin.view(-1, 2) target_res = target_res.view(-1, 2) mask = mask.view(-1, 1) loss_bin1 = compute_bin_loss(output[:, 0:2], target_bin[:, 0], mask) loss_bin2 = compute_bin_loss(output[:, 4:6], target_bin[:, 1], mask) loss_res = torch.zeros_like(loss_bin1) if target_bin[:, 0].nonzero().shape[0] > 0: idx1 = target_bin[:, 0].nonzero()[:, 0] valid_output1 = torch.index_select(output, 0, idx1.long()) valid_target_res1 = torch.index_select(target_res, 0, idx1.long()) loss_sin1 = compute_res_loss( valid_output1[:, 2], torch.sin(valid_target_res1[:, 0])) loss_cos1 = compute_res_loss( valid_output1[:, 3], torch.cos(valid_target_res1[:, 0])) loss_res += loss_sin1 + loss_cos1 if target_bin[:, 1].nonzero().shape[0] > 0: idx2 = target_bin[:, 1].nonzero()[:, 0] valid_output2 = torch.index_select(output, 0, idx2.long()) valid_target_res2 = torch.index_select(target_res, 0, idx2.long()) loss_sin2 = compute_res_loss( valid_output2[:, 6], torch.sin(valid_target_res2[:, 1])) loss_cos2 = compute_res_loss( valid_output2[:, 7], torch.cos(valid_target_res2[:, 1])) loss_res += loss_sin2 + loss_cos2 return loss_bin1 + loss_bin2 + loss_res def dice_loss(input, target): smooth = 1. iflat = input.contiguous().view(-1) tflat = target.contiguous().view(-1) intersection = (iflat * tflat).sum() return 1 - ((2. * intersection + smooth) /((iflat*iflat).sum() + (tflat*tflat).sum() + smooth)) class DiceLoss(nn.Module): def __init__(self,feat_channel): super(DiceLoss, self).__init__() self.feat_channel=feat_channel def forward(self, seg_feat, conv_weight, mask, ind, target, batch_num_obj): mask_loss=0. batch_size = seg_feat.size(0) weight = _tranpose_and_gather_feat(conv_weight, ind) # (2, 256, 169) h,w = seg_feat.size(-2),seg_feat.size(-1) x,y = ind%w,ind/w x_range = torch.arange(w).float().to(device=seg_feat.device) y_range = torch.arange(h).float().to(device=seg_feat.device) y_grid, x_grid = torch.meshgrid([y_range, x_range]) for i in range(batch_size): num_obj = batch_num_obj.detach().cpu().numpy() num_obj = int(num_obj[i]) conv1w,conv1b,conv2w,conv2b,conv3w,conv3b= \ torch.split(weight[i,:num_obj],[(self.feat_channel+2)*self.feat_channel,self.feat_channel, self.feat_channel**2,self.feat_channel, self.feat_channel,1],dim=-1) y_rel_coord = (y_grid[None,None] - y[i,:num_obj].unsqueeze(-1).unsqueeze(-1).unsqueeze(-1).float())/128. x_rel_coord = (x_grid[None,None] - x[i,:num_obj].unsqueeze(-1).unsqueeze(-1).unsqueeze(-1).float())/128. feat = seg_feat[i][None].repeat([num_obj,1,1,1]) feat = torch.cat([feat,x_rel_coord, y_rel_coord],dim=1).view(1,-1,h,w) conv1w=conv1w.contiguous().view(-1,self.feat_channel+2,1,1) conv1b=conv1b.contiguous().flatten() feat = F.conv2d(feat,conv1w,conv1b,groups=num_obj).relu() conv2w=conv2w.contiguous().view(-1,self.feat_channel,1,1) conv2b=conv2b.contiguous().flatten() feat = F.conv2d(feat,conv2w,conv2b,groups=num_obj).relu() conv3w=conv3w.contiguous().view(-1,self.feat_channel,1,1) conv3b=conv3b.contiguous().flatten() feat = F.conv2d(feat,conv3w,conv3b,groups=num_obj).sigmoid().squeeze() true_mask = mask[i,:num_obj,None,None].float() mask_loss+=dice_loss(feat*true_mask,target[i][:num_obj]*true_mask) return mask_loss/batch_size ``` #### File: eval/detection/utils.py ```python from typing import List, Optional def category_to_detection_name(category_name: str) -> Optional[str]: """ Default label mapping from nuScenes to nuScenes detection classes. Note that pedestrian does not include personal_mobility, stroller and wheelchair. :param category_name: Generic nuScenes class. :return: nuScenes detection class. """ detection_mapping = { 'movable_object.barrier': 'barrier', 'vehicle.bicycle': 'bicycle', 'vehicle.bus.bendy': 'bus', 'vehicle.bus.rigid': 'bus', 'vehicle.car': 'car', 'vehicle.construction': 'construction_vehicle', 'vehicle.motorcycle': 'motorcycle', 'human.pedestrian.adult': 'pedestrian', 'human.pedestrian.child': 'pedestrian', 'human.pedestrian.construction_worker': 'pedestrian', 'human.pedestrian.police_officer': 'pedestrian', 'movable_object.trafficcone': 'traffic_cone', 'vehicle.trailer': 'trailer', 'vehicle.truck': 'truck' } if category_name in detection_mapping: return detection_mapping[category_name] else: return None def detection_name_to_rel_attributes(detection_name: str) -> List[str]: """ Returns a list of relevant attributes for a given detection class. :param detection_name: The detection class. :return: List of relevant attributes. """ if detection_name in ['pedestrian']: rel_attributes = ['pedestrian.moving', 'pedestrian.sitting_lying_down', 'pedestrian.standing'] elif detection_name in ['bicycle', 'motorcycle']: rel_attributes = ['cycle.with_rider', 'cycle.without_rider'] elif detection_name in ['car', 'bus', 'construction_vehicle', 'trailer', 'truck']: rel_attributes = ['vehicle.moving', 'vehicle.parked', 'vehicle.stopped'] elif detection_name in ['barrier', 'traffic_cone']: # Classes without attributes: barrier, traffic_cone. rel_attributes = [] else: raise ValueError('Error: %s is not a valid detection class.' % detection_name) return rel_attributes ``` #### File: prediction/tests/test_dataclasses.py ```python import unittest import numpy as np from nuscenes.eval.prediction.data_classes import Prediction class TestPrediction(unittest.TestCase): def test(self): prediction = Prediction('instance', 'sample', np.ones((2, 2, 2)), np.zeros(2)) self.assertEqual(prediction.number_of_modes, 2) self.assertDictEqual(prediction.serialize(), {'instance': 'instance', 'sample': 'sample', 'prediction': [[[1, 1], [1, 1]], [[1, 1], [1, 1]]], 'probabilities': [0, 0]}) ``` #### File: nuscenes-devkit/map_expansion/arcline_path_utils.py ```python import math from typing import Dict, Any, Tuple, List import numpy as np # (x, y, yaw) in global frame Pose = Tuple[float, float, float] ArcLinePath = Dict[str, Any] def principal_value(angle_in_radians: float) -> float: """ Ensures the angle is within [-pi, pi). :param angle_in_radians: Angle in radians. :return: Scaled angle in radians. """ interval_min = -math.pi two_pi = 2 * math.pi scaled_angle = (angle_in_radians - interval_min) % two_pi + interval_min return scaled_angle def compute_segment_sign(arcline_path: ArcLinePath) -> Tuple[int, int, int]: """ Compute the sign of an arcline path based on its shape. :param arcline_path: arcline path record. :return: Tuple of signs for all three parts of the path. 0 if straight, -1 if right, 1 if left. """ shape = arcline_path['shape'] segment_sign = [0, 0, 0] if shape in ("LRL", "LSL", "LSR"): segment_sign[0] = 1 else: segment_sign[0] = -1 if shape == "RLR": segment_sign[1] = 1 elif shape == "LRL": segment_sign[1] = -1 else: segment_sign[1] = 0 if shape in ("LRL", "LSL", "RSL"): segment_sign[2] = 1 else: segment_sign[2] = -1 return segment_sign[0], segment_sign[1], segment_sign[2] def get_transformation_at_step(pose: Pose, step: float) -> Pose: """ Get the affine transformation at s meters along the path. :param pose: Pose represented as tuple (x, y, yaw). :param step: Length along the arcline path in range (0, length_of_arcline_path]. :return: Transformation represented as pose tuple. """ theta = pose[2] * step ctheta = math.cos(theta) stheta = math.sin(theta) if abs(pose[2]) < 1e-6: return pose[0] * step, pose[1] * step, theta else: new_x = (pose[1] * (ctheta - 1.0) + pose[0] * stheta) / pose[2] new_y = (pose[0] * (1.0 - ctheta) + pose[1] * stheta) / pose[2] return new_x, new_y, theta def apply_affine_transformation(pose: Pose, transformation: Pose) -> Pose: """ Apply affine transformation to pose. :param pose: Starting pose. :param transformation: Affine transformation represented as a pose tuple. :return: Pose tuple - the result of applying the transformation to the starting pose. """ new_x = math.cos(pose[2]) * transformation[0] - math.sin(pose[2]) * transformation[1] + pose[0] new_y = math.sin(pose[2]) * transformation[0] + math.cos(pose[2]) * transformation[1] + pose[1] new_yaw = principal_value(pose[2] + transformation[2]) return new_x, new_y, new_yaw def _get_lie_algebra(segment_sign: Tuple[int, int, int], radius: float) -> List[Tuple[float, float, float]]: """ Gets the Lie algebra for an arcline path. :param segment_sign: Tuple of signs for each segment in the arcline path. :param radius: Radius of curvature of the arcline path. :return: List of lie algebra poses. """ return [(1.0, 0.0, segment_sign[0] / radius), (1.0, 0.0, segment_sign[1] / radius), (1.0, 0.0, segment_sign[2] / radius)] def pose_at_length(arcline_path: ArcLinePath, pos: float) -> Tuple[float, float, float]: """ Retrieves pose at l meters along the arcline path. :param arcline_path: Arcline path object. :param pos: Get the pose this many meters along the path. :return: Pose tuple. """ path_length = sum(arcline_path['segment_length']) assert 1e-6 <= pos pos = max(0.0, min(pos, path_length)) result = arcline_path['start_pose'] segment_sign = compute_segment_sign(arcline_path) break_points = _get_lie_algebra(segment_sign, arcline_path['radius']) for i in range(len(break_points)): length = arcline_path['segment_length'][i] if pos <= length: transformation = get_transformation_at_step(break_points[i], pos) result = apply_affine_transformation(result, transformation) break transformation = get_transformation_at_step(break_points[i], length) result = apply_affine_transformation(result, transformation) pos -= length return result def discretize(arcline_path: ArcLinePath, resolution_meters: float) -> List[Pose]: """ Discretize an arcline path. :param arcline_path: Arcline path record. :param resolution_meters: How finely to discretize the path. :return: List of pose tuples. """ path_length = sum(arcline_path['segment_length']) radius = arcline_path['radius'] n_points = int(max(math.ceil(path_length / resolution_meters) + 1.5, 2)) resolution_meters = path_length / (n_points - 1) discretization = [] cumulative_length = [arcline_path['segment_length'][0], arcline_path['segment_length'][0] + arcline_path['segment_length'][1], path_length + resolution_meters] segment_sign = compute_segment_sign(arcline_path) poses = _get_lie_algebra(segment_sign, radius) temp_pose = arcline_path['start_pose'] g_i = 0 g_s = 0.0 for step in range(n_points): step_along_path = step * resolution_meters if step_along_path > cumulative_length[g_i]: temp_pose = pose_at_length(arcline_path, step_along_path) g_s = step_along_path g_i += 1 transformation = get_transformation_at_step(poses[g_i], step_along_path - g_s) new_pose = apply_affine_transformation(temp_pose, transformation) discretization.append(new_pose) return discretization def discretize_lane(lane: List[ArcLinePath], resolution_meters: float) -> List[Pose]: """ Discretizes a lane and returns list of all the poses alone the lane. :param lane: Lanes are represented as a list of arcline paths. :param resolution_meters: How finely to discretize the lane. Smaller values ensure curved lanes are properly represented. :return: List of pose tuples along the lane. """ pose_list = [] for path in lane: poses = discretize(path, resolution_meters) for pose in poses: pose_list.append(pose) return pose_list def length_of_lane(lane: List[ArcLinePath]) -> float: """ Calculates the length of a lane in meters. :param lane: Lane. :return: Length of lane in meters. """ # Meters return sum(sum(path['segment_length']) for path in lane) def project_pose_to_lane(pose: Pose, lane: List[ArcLinePath], resolution_meters: float = 0.5) -> Tuple[Pose, float]: """ Find the closest pose on a lane to a query pose and additionally return the distance along the lane for this pose. Note that this function does not take the heading of the query pose into account. :param pose: Query pose. :param lane: Will find the closest pose on this lane. :param resolution_meters: How finely to discretize the lane. :return: Tuple of the closest pose and the distance along the lane """ discretized_lane = discretize_lane(lane, resolution_meters=resolution_meters) xy_points = np.array(discretized_lane)[:, :2] closest_pose_index = np.linalg.norm(xy_points - pose[:2], axis=1).argmin() closest_pose = discretized_lane[closest_pose_index] distance_along_lane = closest_pose_index * 0.5 return closest_pose, distance_along_lane def _find_index(distance_along_lane: float, lengths: List[float]) -> int: """ Helper function for finding of path along lane corresponding to the distance_along_lane. :param distance_along_lane: Distance along the lane (in meters). :param lengths: Cumulative distance at each end point along the paths in the lane. :return: Index of path. """ if len(lengths) == 1: return 0 else: return min(index for index, length in enumerate(lengths) if distance_along_lane <= length) def get_curvature_at_distance_along_lane(distance_along_lane: float, lane: List[ArcLinePath]) -> float: """ Computes the unsigned curvature (1 / meters) at a distance along a lane. :param distance_along_lane: Distance along the lane to calculate the curvature at. :param lane: Lane to query. :return: Curvature, always non negative. """ total_length_at_segments = np.cumsum([sum(path['segment_length']) for path in lane]) segment_index = _find_index(distance_along_lane, total_length_at_segments) path = lane[segment_index] path_length = path['segment_length'] if segment_index > 0: distance_along_path = distance_along_lane - total_length_at_segments[segment_index - 1] else: distance_along_path = distance_along_lane segment_index = _find_index(distance_along_path, np.cumsum(path_length)) segment_shape = path['shape'][segment_index] # Straight lanes have no curvature if segment_shape == 'S': return 0 else: return 1 / path['radius'] ``` #### File: prediction/input_representation/agents.py ```python import colorsys from typing import Any, Dict, List, Tuple, Callable import cv2 import numpy as np from pyquaternion import Quaternion from nuscenes.prediction import PredictHelper from nuscenes.prediction.helper import quaternion_yaw from nuscenes.prediction.input_representation.interface import AgentRepresentation from nuscenes.prediction.input_representation.utils import convert_to_pixel_coords, get_crops, get_rotation_matrix History = Dict[str, List[Dict[str, Any]]] def pixels_to_box_corners(row_pixel: int, column_pixel: int, length_in_pixels: float, width_in_pixels: float, yaw_in_radians: float) -> np.ndarray: """ Computes four corners of 2d bounding box for agent. The coordinates of the box are in pixels. :param row_pixel: Row pixel of the agent. :param column_pixel: Column pixel of the agent. :param length_in_pixels: Length of the agent. :param width_in_pixels: Width of the agent. :param yaw_in_radians: Yaw of the agent (global coordinates). :return: numpy array representing the four corners of the agent. """ # cv2 has the convention where they flip rows and columns so it matches # the convention of x and y on a coordinate plane # Also, a positive angle is a clockwise rotation as opposed to counterclockwise # so that is why we negate the rotation angle coord_tuple = ((column_pixel, row_pixel), (length_in_pixels, width_in_pixels), -yaw_in_radians * 180 / np.pi) box = cv2.boxPoints(coord_tuple) return box def get_track_box(annotation: Dict[str, Any], center_coordinates: Tuple[float, float], center_pixels: Tuple[float, float], resolution: float = 0.1) -> np.ndarray: """ Get four corners of bounding box for agent in pixels. :param annotation: The annotation record of the agent. :param center_coordinates: (x, y) coordinates in global frame of the center of the image. :param center_pixels: (row_index, column_index) location of the center of the image in pixel coordinates. :param resolution: Resolution pixels/meter of the image. """ assert resolution > 0 location = annotation['translation'][:2] yaw_in_radians = quaternion_yaw(Quaternion(annotation['rotation'])) row_pixel, column_pixel = convert_to_pixel_coords(location, center_coordinates, center_pixels, resolution) width = annotation['size'][0] / resolution length = annotation['size'][1] / resolution # Width and length are switched here so that we can draw them along the x-axis as # opposed to the y. This makes rotation easier. return pixels_to_box_corners(row_pixel, column_pixel, length, width, yaw_in_radians) def reverse_history(history: History) -> History: """ Reverse history so that most distant observations are first. We do this because we want to draw more recent bounding boxes on top of older ones. :param history: result of get_past_for_sample PredictHelper method. :return: History with the values reversed. """ return {token: anns[::-1] for token, anns in history.items()} def add_present_time_to_history(current_time: List[Dict[str, Any]], history: History) -> History: """ Adds the sample annotation records from the current time to the history object. :param current_time: List of sample annotation records from the current time. Result of get_annotations_for_sample method of PredictHelper. :param history: Result of get_past_for_sample method of PredictHelper. :return: History with values from current_time appended. """ for annotation in current_time: token = annotation['instance_token'] if token in history: # We append because we've reversed the history history[token].append(annotation) else: history[token] = [annotation] return history def fade_color(color: Tuple[int, int, int], step: int, total_number_of_steps: int) -> Tuple[int, int, int]: """ Fades a color so that past observations are darker in the image. :param color: Tuple of ints describing an RGB color. :param step: The current time step. :param total_number_of_steps: The total number of time steps the agent has in the image. :return: Tuple representing faded rgb color. """ LOWEST_VALUE = 0.4 if step == total_number_of_steps: return color hsv_color = colorsys.rgb_to_hsv(*color) increment = (float(hsv_color[2])/255. - LOWEST_VALUE) / total_number_of_steps new_value = LOWEST_VALUE + step * increment new_rgb = colorsys.hsv_to_rgb(float(hsv_color[0]), float(hsv_color[1]), new_value * 255.) return new_rgb def default_colors(category_name: str) -> Tuple[int, int, int]: """ Maps a category name to an rgb color (without fading). :param category_name: Name of object category for the annotation. :return: Tuple representing rgb color. """ if 'vehicle' in category_name: return 255, 255, 0 # yellow elif 'object' in category_name: return 204, 0, 204 # violet elif 'human' in category_name or 'animal' in category_name: return 255, 153, 51 # orange else: raise ValueError(f"Cannot map {category_name} to a color.") def draw_agent_boxes(center_agent_annotation: Dict[str, Any], center_agent_pixels: Tuple[float, float], agent_history: History, base_image: np.ndarray, get_color: Callable[[str], Tuple[int, int, int]], resolution: float = 0.1) -> None: """ Draws past sequence of agent boxes on the image. :param center_agent_annotation: Annotation record for the agent that is in the center of the image. :param center_agent_pixels: Pixel location of the agent in the center of the image. :param agent_history: History for all agents in the scene. :param base_image: Image to draw the agents in. :param get_color: Mapping from category_name to RGB tuple. :param resolution: Size of the image in pixels / meter. :return: None. """ agent_x, agent_y = center_agent_annotation['translation'][:2] for instance_token, annotations in agent_history.items(): num_points = len(annotations) for i, annotation in enumerate(annotations): box = get_track_box(annotation, (agent_x, agent_y), center_agent_pixels, resolution) if instance_token == center_agent_annotation['instance_token']: color = (255, 0, 0) else: color = get_color(annotation['category_name']) # Don't fade the colors if there is no history if num_points > 1: color = fade_color(color, i, num_points - 1) cv2.fillPoly(base_image, pts=[np.int0(box)], color=color) class AgentBoxesWithFadedHistory(AgentRepresentation): """ Represents the past sequence of agent states as a three-channel image with faded 2d boxes. """ def __init__(self, helper: PredictHelper, seconds_of_history: float = 2, frequency_in_hz: float = 2, resolution: float = 0.1, # meters / pixel meters_ahead: float = 40, meters_behind: float = 10, meters_left: float = 25, meters_right: float = 25, color_mapping: Callable[[str], Tuple[int, int, int]] = None): self.helper = helper self.seconds_of_history = seconds_of_history self.frequency_in_hz = frequency_in_hz if not resolution > 0: raise ValueError(f"Resolution must be positive. Received {resolution}.") self.resolution = resolution self.meters_ahead = meters_ahead self.meters_behind = meters_behind self.meters_left = meters_left self.meters_right = meters_right if not color_mapping: color_mapping = default_colors self.color_mapping = color_mapping def make_representation(self, instance_token: str, sample_token: str) -> np.ndarray: """ Draws agent boxes with faded history into a black background. :param instance_token: Instance token. :param sample_token: Sample token. :return: np.ndarray representing a 3 channel image. """ # Taking radius around track before to ensure all actors are in image buffer = max([self.meters_ahead, self.meters_behind, self.meters_left, self.meters_right]) * 2 image_side_length = int(buffer/self.resolution) # We will center the track in the image central_track_pixels = (image_side_length / 2, image_side_length / 2) base_image = np.zeros((image_side_length, image_side_length, 3)) history = self.helper.get_past_for_sample(sample_token, self.seconds_of_history, in_agent_frame=False, just_xy=False) history = reverse_history(history) present_time = self.helper.get_annotations_for_sample(sample_token) history = add_present_time_to_history(present_time, history) center_agent_annotation = self.helper.get_sample_annotation(instance_token, sample_token) draw_agent_boxes(center_agent_annotation, central_track_pixels, history, base_image, resolution=self.resolution, get_color=self.color_mapping) center_agent_yaw = quaternion_yaw(Quaternion(center_agent_annotation['rotation'])) rotation_mat = get_rotation_matrix(base_image.shape, center_agent_yaw) rotated_image = cv2.warpAffine(base_image, rotation_mat, (base_image.shape[1], base_image.shape[0])) row_crop, col_crop = get_crops(self.meters_ahead, self.meters_behind, self.meters_left, self.meters_right, self.resolution, image_side_length) return rotated_image[row_crop, col_crop].astype('uint8') ``` #### File: prediction/models/physics.py ```python import abc from typing import Tuple import numpy as np from pyquaternion import Quaternion from nuscenes.eval.common.utils import quaternion_yaw from nuscenes.eval.prediction.data_classes import Prediction from nuscenes.prediction import PredictHelper KinematicsData = Tuple[float, float, float, float, float, float, float, float, float, float] def _kinematics_from_tokens(helper: PredictHelper, instance: str, sample: str) -> KinematicsData: """ Returns the 2D position, velocity and acceleration vectors from the given track records, along with the speed, yaw rate, (scalar) acceleration (magnitude), and heading. :param helper: Instance of PredictHelper. :instance: Token of instance. :sample: Token of sample. :return: KinematicsData. """ annotation = helper.get_sample_annotation(instance, sample) x, y, _ = annotation['translation'] yaw = quaternion_yaw(Quaternion(annotation['rotation'])) velocity = helper.get_velocity_for_agent(instance, sample) acceleration = helper.get_acceleration_for_agent(instance, sample) yaw_rate = helper.get_heading_change_rate_for_agent(instance, sample) if np.isnan(velocity): velocity = 0.0 if np.isnan(acceleration): acceleration = 0.0 if np.isnan(yaw_rate): yaw_rate = 0.0 hx, hy = np.cos(yaw), np.sin(yaw) vx, vy = velocity * hx, velocity * hy ax, ay = acceleration * hx, acceleration * hy return x, y, vx, vy, ax, ay, velocity, yaw_rate, acceleration, yaw def _constant_velocity_heading_from_kinematics(kinematics_data: KinematicsData, sec_from_now: float, sampled_at: int) -> np.ndarray: """ Computes a constant velocity baseline for given kinematics data, time window and frequency. :param kinematics_data: KinematicsData for agent. :param sec_from_now: How many future seconds to use. :param sampled_at: Number of predictions to make per second. """ x, y, vx, vy, _, _, _, _, _, _ = kinematics_data preds = [] time_step = 1.0 / sampled_at for time in np.arange(time_step, sec_from_now + time_step, time_step): preds.append((x + time * vx, y + time * vy)) return np.array(preds) def _constant_acceleration_and_heading(kinematics_data: KinematicsData, sec_from_now: float, sampled_at: int) -> np.ndarray: """ Computes a baseline prediction for the given time window and frequency, under the assumption that the acceleration and heading are constant. :param kinematics_data: KinematicsData for agent. :param sec_from_now: How many future seconds to use. :param sampled_at: Number of predictions to make per second. """ x, y, vx, vy, ax, ay, _, _, _, _ = kinematics_data preds = [] time_step = 1.0 / sampled_at for time in np.arange(time_step, sec_from_now + time_step, time_step): half_time_squared = 0.5 * time * time preds.append((x + time * vx + half_time_squared * ax, y + time * vy + half_time_squared * ay)) return np.array(preds) def _constant_speed_and_yaw_rate(kinematics_data: KinematicsData, sec_from_now: float, sampled_at: int) -> np.ndarray: """ Computes a baseline prediction for the given time window and frequency, under the assumption that the (scalar) speed and yaw rate are constant. :param kinematics_data: KinematicsData for agent. :param sec_from_now: How many future seconds to use. :param sampled_at: Number of predictions to make per second. """ x, y, vx, vy, _, _, speed, yaw_rate, _, yaw = kinematics_data preds = [] time_step = 1.0 / sampled_at distance_step = time_step * speed yaw_step = time_step * yaw_rate for _ in np.arange(time_step, sec_from_now + time_step, time_step): x += distance_step * np.cos(yaw) y += distance_step * np.sin(yaw) preds.append((x, y)) yaw += yaw_step return np.array(preds) def _constant_magnitude_accel_and_yaw_rate(kinematics_data: KinematicsData, sec_from_now: float, sampled_at: int) -> np.ndarray: """ Computes a baseline prediction for the given time window and frequency, under the assumption that the rates of change of speed and yaw are constant. :param kinematics_data: KinematicsData for agent. :param sec_from_now: How many future seconds to use. :param sampled_at: Number of predictions to make per second. """ x, y, vx, vy, _, _, speed, yaw_rate, accel, yaw = kinematics_data preds = [] time_step = 1.0 / sampled_at speed_step = time_step * accel yaw_step = time_step * yaw_rate for _ in np.arange(time_step, sec_from_now + time_step, time_step): distance_step = time_step * speed x += distance_step * np.cos(yaw) y += distance_step * np.sin(yaw) preds.append((x, y)) speed += speed_step yaw += yaw_step return np.array(preds) class Baseline(abc.ABC): def __init__(self, sec_from_now: float, helper: PredictHelper): """ Inits Baseline. :param sec_from_now: How many seconds into the future to make the prediction. :param helper: Instance of PredictHelper. """ assert sec_from_now % 0.5 == 0, f"Parameter sec from now must be divisible by 0.5. Received {sec_from_now}." self.helper = helper self.sec_from_now = sec_from_now self.sampled_at = 2 # 2 Hz between annotations. @abc.abstractmethod def __call__(self, token: str) -> Prediction: pass class ConstantVelocityHeading(Baseline): """ Makes predictions according to constant velocity and heading model. """ def __call__(self, token: str) -> Prediction: """ Makes prediction. :param token: string of format {instance_token}_{sample_token}. """ instance, sample = token.split("_") kinematics = _kinematics_from_tokens(self.helper, instance, sample) cv_heading = _constant_velocity_heading_from_kinematics(kinematics, self.sec_from_now, self.sampled_at) # Need the prediction to have 2d. return Prediction(instance, sample, np.expand_dims(cv_heading, 0), np.array([1])) class PhysicsOracle(Baseline): """ Makes several physics-based predictions and picks the one closest to the ground truth. """ def __call__(self, token) -> Prediction: """ Makes prediction. :param token: string of format {instance_token}_{sample_token}. """ instance, sample = token.split("_") kinematics = _kinematics_from_tokens(self.helper, instance, sample) ground_truth = self.helper.get_future_for_agent(instance, sample, self.sec_from_now, in_agent_frame=False) assert ground_truth.shape[0] == int(self.sec_from_now * self.sampled_at), ("Ground truth does not correspond " f"to {self.sec_from_now} seconds.") path_funs = [ _constant_acceleration_and_heading, _constant_magnitude_accel_and_yaw_rate, _constant_speed_and_yaw_rate, _constant_velocity_heading_from_kinematics ] paths = [path_fun(kinematics, self.sec_from_now, self.sampled_at) for path_fun in path_funs] # Select the one with the least l2 error, averaged (or equivalently, summed) over all # points of the path. This is (proportional to) the Frobenius norm of the difference # between the path (as an n x 2 matrix) and the ground truth. oracle = sorted(paths, key=lambda path: np.linalg.norm(np.array(path) - ground_truth, ord="fro"))[0] # Need the prediction to have 2d. return Prediction(instance, sample, np.expand_dims(oracle, 0), np.array([1])) ``` #### File: prediction/tests/test_backbone.py ```python import unittest import torch from torchvision.models.resnet import BasicBlock, Bottleneck from nuscenes.prediction.models.backbone import ResNetBackbone, MobileNetBackbone class TestBackBones(unittest.TestCase): def count_layers(self, model): if isinstance(model[4][0], BasicBlock): n_convs = 2 elif isinstance(model[4][0], Bottleneck): n_convs = 3 else: raise ValueError("Backbone layer block not supported!") return sum([len(model[i]) for i in range(4, 8)]) * n_convs + 2 def test_resnet(self): rn_18 = ResNetBackbone('resnet18') rn_34 = ResNetBackbone('resnet34') rn_50 = ResNetBackbone('resnet50') rn_101 = ResNetBackbone('resnet101') rn_152 = ResNetBackbone('resnet152') tensor = torch.ones((1, 3, 100, 100)) self.assertEqual(rn_18(tensor).shape[1], 512) self.assertEqual(rn_34(tensor).shape[1], 512) self.assertEqual(rn_50(tensor).shape[1], 2048) self.assertEqual(rn_101(tensor).shape[1], 2048) self.assertAlmostEqual(rn_152(tensor).shape[1], 2048) self.assertEqual(self.count_layers(list(rn_18.backbone.children())), 18) self.assertEqual(self.count_layers(list(rn_34.backbone.children())), 34) self.assertEqual(self.count_layers(list(rn_50.backbone.children())), 50) self.assertEqual(self.count_layers(list(rn_101.backbone.children())), 101) self.assertEqual(self.count_layers(list(rn_152.backbone.children())), 152) with self.assertRaises(ValueError): ResNetBackbone('resnet51') def test_mobilenet(self): mobilenet = MobileNetBackbone('mobilenet_v2') tensor = torch.ones((1, 3, 100, 100)) self.assertEqual(mobilenet(tensor).shape[1], 1280) ``` #### File: prediction/tests/test_mtp_loss.py ```python import math import unittest import torch from nuscenes.prediction.models import mtp class TestMTPLoss(unittest.TestCase): """ Test each component of MTPLoss as well as the __call__ method. """ def test_get_trajectories_and_modes(self): loss_n_modes_5 = mtp.MTPLoss(5, 0, 0) loss_n_modes_1 = mtp.MTPLoss(1, 0, 0) xy_pred = torch.arange(60).view(1, -1).repeat(1, 5).view(-1, 60) mode_pred = torch.arange(5).view(1, -1) prediction_bs_1 = torch.cat([xy_pred.reshape(1, -1), mode_pred], dim=1) prediction_bs_2 = prediction_bs_1.repeat(2, 1) # Testing many modes with batch size 1. traj, modes = loss_n_modes_5._get_trajectory_and_modes(prediction_bs_1) self.assertTrue(torch.allclose(traj, xy_pred.unsqueeze(0).reshape(1, 5, 30, 2))) self.assertTrue(torch.allclose(modes, mode_pred)) # Testing many modes with batch size > 1. traj, modes = loss_n_modes_5._get_trajectory_and_modes(prediction_bs_2) self.assertTrue(torch.allclose(traj, xy_pred.repeat(1, 2).unsqueeze(0).reshape(2, 5, 30, 2))) self.assertTrue(torch.allclose(modes, mode_pred.repeat(2, 1))) xy_pred = torch.arange(60).view(1, -1).repeat(1, 1).view(-1, 60) mode_pred = torch.arange(1).view(1, -1) prediction_bs_1 = torch.cat([xy_pred.reshape(1, -1), mode_pred], dim=1) prediction_bs_2 = prediction_bs_1.repeat(2, 1) # Testing one mode with batch size 1. traj, modes = loss_n_modes_1._get_trajectory_and_modes(prediction_bs_1) self.assertTrue(torch.allclose(traj, xy_pred.unsqueeze(0).reshape(1, 1, 30, 2))) self.assertTrue(torch.allclose(modes, mode_pred)) # Testing one mode with batch size > 1. traj, modes = loss_n_modes_1._get_trajectory_and_modes(prediction_bs_2) self.assertTrue(torch.allclose(traj, xy_pred.repeat(1, 2).unsqueeze(0).reshape(2, 1, 30, 2))) self.assertTrue(torch.allclose(modes, mode_pred.repeat(2, 1))) def test_angle_between_trajectories(self): def make_trajectory(last_point): traj = torch.zeros((12, 2)) traj[-1] = torch.Tensor(last_point) return traj loss = mtp.MTPLoss(0, 0, 0) # test angle is 0. self.assertEqual(loss._angle_between(make_trajectory([0, 0]), make_trajectory([0, 0])), 0.) self.assertEqual(loss._angle_between(make_trajectory([15, 15]), make_trajectory([15, 15])), 0.) # test angle is 15. self.assertAlmostEqual(loss._angle_between(make_trajectory([1, 1]), make_trajectory([math.sqrt(3)/2, 0.5])), 15., places=4) # test angle is 30. self.assertAlmostEqual(loss._angle_between(make_trajectory([1, 0]), make_trajectory([math.sqrt(3)/2, 0.5])), 30., places=4) # test angle is 45. self.assertAlmostEqual(loss._angle_between(make_trajectory([1, 1]), make_trajectory([0, 1])), 45., places=4) # test angle is 90. self.assertAlmostEqual(loss._angle_between(make_trajectory([1, 1]), make_trajectory([-1, 1])), 90., places=4) self.assertAlmostEqual(loss._angle_between(make_trajectory([1, 0]), make_trajectory([0, 1])), 90., places=4) # test angle is 180. self.assertAlmostEqual(loss._angle_between(make_trajectory([1, 0]), make_trajectory([-1, 0])), 180., places=4) self.assertAlmostEqual(loss._angle_between(make_trajectory([0, 1]), make_trajectory([0, -1])), 180., places=4) self.assertAlmostEqual(loss._angle_between(make_trajectory([3, 1]), make_trajectory([-3, -1])), 180., places=4) def test_compute_best_mode_nothing_below_threshold(self): angles = [(90, 0), (80, 1), (70, 2)] target = None traj = None loss = mtp.MTPLoss(3, 0, 5) self.assertTrue(loss._compute_best_mode(angles, target, traj) in {0, 1, 2}) loss = mtp.MTPLoss(3, 0, 65) self.assertTrue(loss._compute_best_mode(angles, target, traj) in {0, 1, 2}) def test_compute_best_mode_only_one_below_threshold(self): angles = [(30, 1), (3, 0), (25, 2)] target = torch.ones((1, 6, 2)) trajectory = torch.zeros((3, 6, 2)) loss = mtp.MTPLoss(3, 0, 5) self.assertEqual(loss._compute_best_mode(angles, target, trajectory), 0) def test_compute_best_mode_multiple_below_threshold(self): angles = [(2, 2), (4, 1), (10, 0)] target = torch.ones((1, 6, 2)) trajectory = torch.zeros((3, 6, 2)) trajectory[1] = 1 loss = mtp.MTPLoss(3, 0, 5) self.assertEqual(loss._compute_best_mode(angles, target, trajectory), 1) def test_compute_best_mode_only_one_mode(self): angles = [(25, 0)] target = torch.ones((1, 6, 2)) trajectory = torch.zeros((1, 6, 2)) loss = mtp.MTPLoss(1, 0, 5) self.assertEqual(loss._compute_best_mode(angles, target, trajectory), 0) trajectory[0] = 1 self.assertEqual(loss._compute_best_mode(angles, target, trajectory), 0) def test_loss_single_mode(self): targets = torch.zeros((16, 1, 30, 2)) targets[:, :, :, 1] = torch.arange(start=0, end=3, step=0.1) predictions = torch.ones((16, 61)) predictions[:, :60] = targets[0, 0, :, :].reshape(-1, 60) predictions[:, 60] = 1/10 loss = mtp.MTPLoss(1, 1, angle_threshold_degrees=20) # Only regression loss in single mode case. self.assertAlmostEqual(float(loss(predictions, targets).detach().numpy()), 0, places=4) # Now the best mode differs by 1 from the ground truth. # Smooth l1 loss subtracts 0.5 from l1 norm if diff >= 1. predictions[:, :60] += 1 self.assertAlmostEqual(float(loss(predictions, targets).detach().numpy()), 0.5, places=4) # In this case, one element has perfect regression, the others are off by 1. predictions[1, :60] -= 1 self.assertAlmostEqual(float(loss(predictions, targets).detach().numpy()), (15/16)*0.5, places=4) def test_loss_many_modes(self): targets = torch.zeros((16, 1, 30, 2)) targets[:, :, :, 1] = torch.arange(start=0, end=3, step=0.1) predictions = torch.ones((16, 610)) predictions[:, 540:600] = targets[0, 0, :, :].reshape(-1, 60) predictions[:, -10:] = 1/10 loss = mtp.MTPLoss(10, 1, angle_threshold_degrees=20) # Since one mode exactly matches gt, loss should only be classification error. self.assertAlmostEqual(float(loss(predictions, targets).detach().numpy()), -math.log(1/10), places=4) # Now the best mode differs by 1 from the ground truth. # Smooth l1 loss subtracts 0.5 from l1 norm if diff >= 1. predictions[:, 540:600] += 1 self.assertAlmostEqual(float(loss(predictions, targets).detach().numpy()), -math.log(1/10) + 0.5, places=4) # In this case, one element has perfect regression, the others are off by 1. predictions[1, 540:600] -= 1 self.assertAlmostEqual(float(loss(predictions, targets).detach().numpy()), -math.log(1/10) + (15/16)*0.5, places=4) ``` #### File: nuscenes-devkit/tests/test_nuscenes.py ```python import os import unittest from nuscenes import NuScenes class TestNuScenes(unittest.TestCase): def test_load(self): """ Loads up NuScenes. This is intended to simply run the NuScenes class to check for import errors, typos, etc. """ assert 'NUSCENES' in os.environ, 'Set NUSCENES env. variable to enable tests.' nusc = NuScenes(version='v1.0-mini', dataroot=os.environ['NUSCENES'], verbose=False) # Trivial assert statement self.assertEqual(nusc.table_root, os.path.join(os.environ['NUSCENES'], 'v1.0-mini')) if __name__ == '__main__': unittest.main() ```
{ "source": "jie311/vedadet", "score": 2 }
#### File: trainval/tinaface/filter_widerface_val.py ```python import os import argparse import tqdm import numpy as np import xml.etree.ElementTree as ET from scipy.io import loadmat def parse_args(): parser = argparse.ArgumentParser( description='MMDet test (and eval) a model') parser.add_argument('--gt_path', help='path of mat file of ground truth') parser.add_argument('--ann_path', help='path of generated xml files') args = parser.parse_args() return args def get_gt_boxes(gt_dir): """ gt dir: (wider_face_val.mat, wider_easy_val.mat, wider_medium_val.mat, wider_hard_val.mat)""" gt_mat = loadmat(os.path.join(gt_dir, 'wider_face_val.mat')) hard_mat = loadmat(os.path.join(gt_dir, 'wider_hard_val.mat')) medium_mat = loadmat(os.path.join(gt_dir, 'wider_medium_val.mat')) easy_mat = loadmat(os.path.join(gt_dir, 'wider_easy_val.mat')) facebox_list = gt_mat['face_bbx_list'] event_list = gt_mat['event_list'] file_list = gt_mat['file_list'] hard_gt_list = hard_mat['gt_list'] medium_gt_list = medium_mat['gt_list'] easy_gt_list = easy_mat['gt_list'] return facebox_list, event_list, file_list, hard_gt_list, medium_gt_list, easy_gt_list def parse_xml(xml_path, keep_index, gt_boxes): tree = ET.parse(xml_path) root = tree.getroot() objs = root.findall('object') keep_index = keep_index.reshape(-1) for i in range(len(objs)): obj = objs[i] gt_box = gt_boxes[i] bnd_box = obj.find('bndbox') bbox = np.array([ int(float(bnd_box.find('xmin').text)), int(float(bnd_box.find('ymin').text)), int(float(bnd_box.find('xmax').text)), int(float(bnd_box.find('ymax').text)) ]) bbox[2:] -= bbox[:2] assert np.sum(bbox == gt_box) == 4 if i + 1 in keep_index and (bbox[2] > 0 and bbox[3] > 0): obj.find('difficult').text = '0' else: obj.find('difficult').text = '1' tree.write(xml_path) def main(): args = parse_args() (facebox_list, event_list, file_list, hard_gt_list, medium_gt_list, easy_gt_list) = get_gt_boxes(args.gt_path) event_num = len(event_list) settings = ['easy', 'medium', 'hard'] setting_gts = [easy_gt_list, medium_gt_list, hard_gt_list] for setting_id in range(0, 3): # different setting gt_list = setting_gts[setting_id] count_face = 0 # [hard, medium, easy] pbar = tqdm.tqdm(range(event_num)) for i in pbar: pbar.set_description('Processing {}'.format(settings[setting_id])) img_list = file_list[i][0] sub_gt_list = gt_list[i][0] gt_bbx_list = facebox_list[i][0] for j in range(len(img_list)): xml_path = os.path.join(args.ann_path, str(img_list[j][0][0]) + '.xml') gt_boxes = gt_bbx_list[j][0].astype('float') keep_index = sub_gt_list[j][0] count_face += len(keep_index) parse_xml(xml_path, keep_index, gt_boxes) if __name__ == '__main__': main() ``` #### File: modules/bricks/hsigmoid.py ```python import torch.nn as nn from vedacore.misc import registry @registry.register_module('activation') class HSigmoid(nn.Module): """Hard Sigmoid Module. Apply the hard sigmoid function: Hsigmoid(x) = min(max((x + 1) / 2, 0), 1) Returns: Tensor: The output tensor. """ def __init__(self): super(HSigmoid, self).__init__() def forward(self, x): x = (x + 1) / 2 return x.clamp_(0, 1) ``` #### File: modules/bricks/scale.py ```python import torch import torch.nn as nn class Scale(nn.Module): """A learnable scale parameter. This layer scales the input by a learnable factor. It multiplies a learnable scale parameter of shape (1,) with input of any shape. Args: scale (float): Initial value of scale factor. Default: 1.0 """ def __init__(self, scale=1.0): super(Scale, self).__init__() self.scale = nn.Parameter(torch.tensor(scale, dtype=torch.float)) def forward(self, x): return x * self.scale ``` #### File: vedacore/optimizers/builder.py ```python import copy import inspect import torch from vedacore.misc import build_from_cfg, registry def register_torch_optimizers(): torch_optimizers = [] for module_name in dir(torch.optim): if module_name.startswith('__'): continue _optim = getattr(torch.optim, module_name) if inspect.isclass(_optim) and issubclass(_optim, torch.optim.Optimizer): registry.register_module('optimizer')(_optim) torch_optimizers.append(module_name) return torch_optimizers register_torch_optimizers() def build_optimizer_constructor(cfg): return build_from_cfg(cfg, registry, 'optimizer_builder') def build_optimizer(model, cfg): optimizer_cfg = copy.deepcopy(cfg) constructor_type = optimizer_cfg.pop('constructor', 'DefaultOptimizerConstructor') paramwise_cfg = optimizer_cfg.pop('paramwise_cfg', None) optim_constructor = build_optimizer_constructor( dict( typename=constructor_type, optimizer_cfg=optimizer_cfg, paramwise_cfg=paramwise_cfg)) optimizer = optim_constructor(model) return optimizer ``` #### File: vedacore/parallel/dist_utils.py ```python import functools import os import torch import torch.distributed as dist import torch.multiprocessing as mp def init_dist(launcher, backend='nccl', **kwargs): if mp.get_start_method(allow_none=True) is None: mp.set_start_method('spawn') if launcher == 'pytorch': _init_dist_pytorch(backend, **kwargs) else: raise ValueError(f'Invalid launcher type: {launcher}') def _init_dist_pytorch(backend, **kwargs): # TODO: use local_rank instead of rank % num_gpus rank = int(os.environ['RANK']) num_gpus = torch.cuda.device_count() torch.cuda.set_device(rank % num_gpus) dist.init_process_group(backend=backend, **kwargs) def get_dist_info(): if dist.is_available(): initialized = dist.is_initialized() else: initialized = False if initialized: rank = dist.get_rank() world_size = dist.get_world_size() else: rank = 0 world_size = 1 return rank, world_size def master_only(func): @functools.wraps(func) def wrapper(*args, **kwargs): rank, _ = get_dist_info() if rank == 0: return func(*args, **kwargs) return wrapper ``` #### File: criteria/losses/builder.py ```python from vedacore.misc import build_from_cfg, registry def build_loss(cfg): return build_from_cfg(cfg, registry, 'loss') ``` #### File: vedadet/criteria/point_anchor_criterion.py ```python import torch from vedacore.misc import multi_apply, registry from vedadet.bridge import build_meshgrid from vedadet.misc.bbox import distance2bbox from .base_criterion import BaseCriterion from .losses import build_loss @registry.register_module('criterion') class PointAnchorCriterion(BaseCriterion): """Anchor-based head (RPN, RetinaNet, SSD, etc.). Args: num_classes (int): Number of categories excluding the background category. in_channels (int): Number of channels in the input feature map. feat_channels (int): Number of hidden channels. Used in child classes. anchor_generator (dict): Config dict for anchor generator bbox_coder (dict): Config of bounding box coder. reg_decoded_bbox (bool): If true, the regression loss would be applied on decoded bounding boxes. Default: False background_label (int | None): Label ID of background, set as 0 for RPN and num_classes for other heads. It will automatically set as num_classes if None is given. loss_cls (dict): Config of classification loss. loss_bbox (dict): Config of localization loss. train_cfg (dict): Training config of anchor head. test_cfg (dict): Testing config of anchor head. """ # noqa: W605 INF = 1e8 def __init__(self, num_classes, meshgrid, strides, regress_ranges, center_sampling=False, center_sample_radius=1.5, loss_cls=dict( typename='FocalLoss', use_sigmoid=True, gamma=2.0, alpha=0.25, loss_weight=1.0), loss_bbox=dict(typename='IoULoss', loss_weight=1.0), loss_centerness=dict( typename='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0)): super().__init__() self.num_classes = num_classes use_sigmoid_cls = loss_cls.get('use_sigmoid', False) assert use_sigmoid_cls is True self.meshgrid = build_meshgrid(meshgrid) self.strides = strides self.regress_ranges = regress_ranges self.center_sampling = center_sampling self.center_sample_radius = center_sample_radius self.cls_out_channels = num_classes if self.cls_out_channels <= 0: raise ValueError(f'num_classes={num_classes} is too small') self.background_label = num_classes self.loss_cls = build_loss(loss_cls) self.loss_bbox = build_loss(loss_bbox) self.loss_centerness = build_loss(loss_centerness) def loss(self, feats, img_metas, gt_labels, gt_bboxes, gt_bboxes_ignore=None): """ Adapted from https://github.com/open-mmlab/mmdetection """ cls_scores = feats[0] featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] device = cls_scores[0].device dtype = cls_scores[0].dtype anchor_mesh = self.meshgrid.gen_anchor_mesh(featmap_sizes, img_metas, dtype, device) target_labels, target_bboxes = self.get_targets( anchor_mesh, gt_labels, gt_bboxes) losses = self._loss(anchor_mesh, target_labels, target_bboxes, feats) loss, log_vars = self._parse_losses(losses) return dict(loss=loss, log_vars=log_vars) def _loss(self, all_level_points, labels, bbox_targets, feats): """ Adapted from https://github.com/open-mmlab/mmdetection """ assert len(feats) == 3 cls_scores, bbox_preds, centernesses = feats assert len(cls_scores) == len(bbox_preds) # == len(centernesses) num_imgs = cls_scores[0].size(0) # flatten cls_scores, bbox_preds and centerness flatten_cls_scores = [ cls_score.permute(0, 2, 3, 1).reshape(-1, self.cls_out_channels) for cls_score in cls_scores ] flatten_bbox_preds = [ bbox_pred.permute(0, 2, 3, 1).reshape(-1, 4) for bbox_pred in bbox_preds ] flatten_centerness = [ centerness.permute(0, 2, 3, 1).reshape(-1) for centerness in centernesses ] flatten_cls_scores = torch.cat(flatten_cls_scores) flatten_bbox_preds = torch.cat(flatten_bbox_preds) flatten_centerness = torch.cat(flatten_centerness) flatten_labels = torch.cat(labels) flatten_bbox_targets = torch.cat(bbox_targets) # repeat points to align with bbox_preds flatten_points = torch.cat( [points.repeat(num_imgs, 1) for points in all_level_points]) # FG cat_id: [0, num_classes -1], BG cat_id: num_classes bg_class_ind = self.num_classes pos_inds = ((flatten_labels >= 0) & (flatten_labels < bg_class_ind)).nonzero( as_tuple=False).reshape(-1) num_pos = len(pos_inds) loss_cls = self.loss_cls( flatten_cls_scores, flatten_labels, avg_factor=num_pos + num_imgs) # avoid num_pos is 0 pos_bbox_preds = flatten_bbox_preds[pos_inds] pos_centerness = flatten_centerness[pos_inds] if num_pos > 0: pos_bbox_targets = flatten_bbox_targets[pos_inds] pos_centerness_targets = self.centerness_target(pos_bbox_targets) pos_points = flatten_points[pos_inds] pos_decoded_bbox_preds = distance2bbox(pos_points, pos_bbox_preds) pos_decoded_target_preds = distance2bbox(pos_points, pos_bbox_targets) loss_bbox = self.loss_bbox( pos_decoded_bbox_preds, pos_decoded_target_preds, weight=pos_centerness_targets, avg_factor=pos_centerness_targets.sum()) loss_centerness = self.loss_centerness(pos_centerness, pos_centerness_targets) else: loss_bbox = pos_bbox_preds.sum() loss_centerness = pos_centerness.sum() return dict( loss_bbox=loss_bbox, loss_cls=loss_cls, loss_centerness=loss_centerness) def get_targets(self, points, gt_labels_list, gt_bboxes_list): """Compute regression, classification and centerss targets for points in multiple images. Adapted from https://github.com/open-mmlab/mmdetection Args: points (list[Tensor]): Points of each fpn level, each has shape (num_points, 2). gt_bboxes_list (list[Tensor]): Ground truth bboxes of each image, each has shape (num_gt, 4). gt_labels_list (list[Tensor]): Ground truth labels of each box, each has shape (num_gt,). Returns: tuple: concat_lvl_labels (list[Tensor]): Labels of each level. \ concat_lvl_bbox_targets (list[Tensor]): BBox targets of each \ level. """ assert len(points) == len(self.regress_ranges) num_levels = len(points) # expand regress ranges to align with points expanded_regress_ranges = [ points[i].new_tensor(self.regress_ranges[i])[None].expand_as( points[i]) for i in range(num_levels) ] # concat all levels points and regress ranges concat_regress_ranges = torch.cat(expanded_regress_ranges, dim=0) concat_points = torch.cat(points, dim=0) # the number of points per img, per lvl num_points = [center.size(0) for center in points] # get labels and bbox_targets of each image labels_list, bbox_targets_list = multi_apply( self._get_target_single, gt_bboxes_list, gt_labels_list, points=concat_points, regress_ranges=concat_regress_ranges, num_points_per_lvl=num_points) # split to per img, per level labels_list = [labels.split(num_points, 0) for labels in labels_list] bbox_targets_list = [ bbox_targets.split(num_points, 0) for bbox_targets in bbox_targets_list ] # concat per level image concat_lvl_labels = [] concat_lvl_bbox_targets = [] for i in range(num_levels): concat_lvl_labels.append( torch.cat([labels[i] for labels in labels_list])) bbox_targets = torch.cat( [bbox_targets[i] for bbox_targets in bbox_targets_list]) # if self.norm_on_bbox: # bbox_targets = bbox_targets / self.strides[i] concat_lvl_bbox_targets.append(bbox_targets) return concat_lvl_labels, concat_lvl_bbox_targets def _get_target_single(self, gt_bboxes, gt_labels, points, regress_ranges, num_points_per_lvl): """Compute regression and classification targets for a single image. Adapted from https://github.com/open-mmlab/mmdetection """ # TODO: adapt to the API in Bridge class num_points = points.size(0) num_gts = gt_labels.size(0) if num_gts == 0: return gt_labels.new_full((num_points,), self.background_label), \ gt_bboxes.new_zeros((num_points, 4)) areas = (gt_bboxes[:, 2] - gt_bboxes[:, 0]) * ( gt_bboxes[:, 3] - gt_bboxes[:, 1]) # TODO: figure out why these two are different # areas = areas[None].expand(num_points, num_gts) areas = areas[None].repeat(num_points, 1) regress_ranges = regress_ranges[:, None, :].expand( num_points, num_gts, 2) gt_bboxes = gt_bboxes[None].expand(num_points, num_gts, 4) xs, ys = points[:, 0], points[:, 1] xs = xs[:, None].expand(num_points, num_gts) ys = ys[:, None].expand(num_points, num_gts) left = xs - gt_bboxes[..., 0] right = gt_bboxes[..., 2] - xs top = ys - gt_bboxes[..., 1] bottom = gt_bboxes[..., 3] - ys bbox_targets = torch.stack((left, top, right, bottom), -1) if self.center_sampling: # condition1: inside a `center bbox` radius = self.center_sample_radius center_xs = (gt_bboxes[..., 0] + gt_bboxes[..., 2]) / 2 center_ys = (gt_bboxes[..., 1] + gt_bboxes[..., 3]) / 2 center_gts = torch.zeros_like(gt_bboxes) stride = center_xs.new_zeros(center_xs.shape) # project the points on current lvl back to the `original` sizes lvl_begin = 0 for lvl_idx, num_points_lvl in enumerate(num_points_per_lvl): lvl_end = lvl_begin + num_points_lvl stride[lvl_begin:lvl_end] = self.strides[lvl_idx] * radius lvl_begin = lvl_end x_mins = center_xs - stride y_mins = center_ys - stride x_maxs = center_xs + stride y_maxs = center_ys + stride center_gts[..., 0] = torch.where(x_mins > gt_bboxes[..., 0], x_mins, gt_bboxes[..., 0]) center_gts[..., 1] = torch.where(y_mins > gt_bboxes[..., 1], y_mins, gt_bboxes[..., 1]) center_gts[..., 2] = torch.where(x_maxs > gt_bboxes[..., 2], gt_bboxes[..., 2], x_maxs) center_gts[..., 3] = torch.where(y_maxs > gt_bboxes[..., 3], gt_bboxes[..., 3], y_maxs) cb_dist_left = xs - center_gts[..., 0] cb_dist_right = center_gts[..., 2] - xs cb_dist_top = ys - center_gts[..., 1] cb_dist_bottom = center_gts[..., 3] - ys center_bbox = torch.stack( (cb_dist_left, cb_dist_top, cb_dist_right, cb_dist_bottom), -1) inside_gt_bbox_mask = center_bbox.min(-1)[0] > 0 else: # condition1: inside a gt bbox inside_gt_bbox_mask = bbox_targets.min(-1)[0] > 0 # condition2: limit the regression range for each location max_regress_distance = bbox_targets.max(-1)[0] inside_regress_range = ( (max_regress_distance >= regress_ranges[..., 0]) & (max_regress_distance <= regress_ranges[..., 1])) # if there are still more than one objects for a location, # we choose the one with minimal area areas[inside_gt_bbox_mask == 0] = PointAnchorCriterion.INF areas[inside_regress_range == 0] = PointAnchorCriterion.INF min_area, min_area_inds = areas.min(dim=1) labels = gt_labels[min_area_inds] labels[min_area == PointAnchorCriterion.INF] = self.background_label # set as BG bbox_targets = bbox_targets[range(num_points), min_area_inds] return labels, bbox_targets def centerness_target(self, pos_bbox_targets): """Compute centerness targets. Adapted from https://github.com/open-mmlab/mmdetection Args: pos_bbox_targets (Tensor): BBox targets of positive bboxes in shape (num_pos, 4) Returns: Tensor: Centerness target. """ # only calculate pos centerness targets, otherwise there may be nan left_right = pos_bbox_targets[:, [0, 2]] top_bottom = pos_bbox_targets[:, [1, 3]] centerness_targets = ( left_right.min(dim=-1)[0] / left_right.max(dim=-1)[0]) * ( top_bottom.min(dim=-1)[0] / top_bottom.max(dim=-1)[0]) return torch.sqrt(centerness_targets) ``` #### File: vedadet/engines/builder.py ```python from vedacore.misc import build_from_cfg, registry def build_engine(cfg): return build_from_cfg(cfg, registry, 'engine') ``` #### File: vedadet/engines/infer_engine.py ```python import torch from vedacore.misc import registry from vedadet.bridge import build_converter, build_meshgrid from vedadet.misc.bbox import bbox2result, multiclass_nms from .base_engine import BaseEngine @registry.register_module('engine') class InferEngine(BaseEngine): def __init__(self, model, meshgrid, converter, num_classes, use_sigmoid, test_cfg): super().__init__(model) self.meshgrid = build_meshgrid(meshgrid) self.converter = build_converter(converter) if use_sigmoid: self.cls_out_channels = num_classes else: self.cls_out_channels = num_classes + 1 self.test_cfg = test_cfg def extract_feats(self, img): feats = self.model(img, train=False) return feats def _get_raw_dets(self, img, img_metas): """ Args: img(torch.Tensor): shape N*3*H*W, N is batch size img_metas(list): len(img_metas) = N Returns: dets(list): len(dets) is the batch size, len(dets[ii]) = #classes, dets[ii][jj] is an np.array whose shape is N*5 """ feats = self.extract_feats(img) featmap_sizes = [feat.shape[-2:] for feat in feats[0]] dtype = feats[0][0].dtype device = feats[0][0].device anchor_mesh = self.meshgrid.gen_anchor_mesh(featmap_sizes, img_metas, dtype, device) # bboxes, scores, score_factor dets = self.converter.get_bboxes(anchor_mesh, img_metas, *feats) return dets def _simple_infer(self, img, img_metas): """ Args: img(torch.Tensor): shape N*3*H*W, N is batch size img_metas(list): len(img_metas) = N Returns: dets(list): len(dets) is the batch size, len(dets[ii]) = #classes, dets[ii][jj] is an np.array whose shape is N*5 """ dets = self._get_raw_dets(img, img_metas) batch_size = len(dets) result_list = [] for ii in range(batch_size): bboxes, scores, centerness = dets[ii] det_bboxes, det_labels = multiclass_nms( bboxes, scores, self.test_cfg.score_thr, self.test_cfg.nms, self.test_cfg.max_per_img, score_factors=centerness) bbox_result = bbox2result(det_bboxes, det_labels, self.cls_out_channels) result_list.append(bbox_result) return result_list def _aug_infer(self, img_list, img_metas_list): assert len(img_list) == len(img_metas_list) dets = [] ntransforms = len(img_list) for idx in range(len(img_list)): img = img_list[idx] img_metas = img_metas_list[idx] tdets = self._get_raw_dets(img, img_metas) dets.append(tdets) batch_size = len(dets[0]) nclasses = len(dets[0][0]) merged_dets = [] for ii in range(batch_size): single_image = [] for kk in range(nclasses): single_class = [] for jj in range(ntransforms): single_class.append(dets[jj][ii][kk]) single_image.append(torch.cat(single_class, axis=0)) merged_dets.append(single_image) result_list = [] for ii in range(batch_size): bboxes, scores, centerness = merged_dets[ii] det_bboxes, det_labels = multiclass_nms( bboxes, scores, self.test_cfg.score_thr, self.test_cfg.nms, self.test_cfg.max_per_img, score_factors=centerness) bbox_result = bbox2result(det_bboxes, det_labels, self.cls_out_channels) result_list.append(bbox_result) return result_list def infer(self, img, img_metas): if len(img) == 1: return self._simple_infer(img[0], img_metas[0]) else: return self._aug_infer(img, img_metas) ``` #### File: vedadet/engines/train_engine.py ```python from vedacore.misc import registry from vedacore.optimizers import build_optimizer from vedadet.criteria import build_criterion from .base_engine import BaseEngine @registry.register_module('engine') class TrainEngine(BaseEngine): def __init__(self, model, criterion, optimizer): super().__init__(model) self.criterion = build_criterion(criterion) self.optimizer = build_optimizer(self.model, optimizer) def extract_feats(self, img): feats = self.model(img, train=True) return feats def forward(self, data): return self.forward_impl(**data) def forward_impl(self, img, img_metas, gt_labels, gt_bboxes, gt_bboxes_ignore=None): feats = self.extract_feats(img) losses = self.criterion.loss(feats, img_metas, gt_labels, gt_bboxes, gt_bboxes_ignore) return losses ``` #### File: models/heads/base_dense_head.py ```python import torch.nn as nn from abc import ABCMeta class BaseDenseHead(nn.Module, metaclass=ABCMeta): """Base class for DenseHeads.""" def __init__(self): super(BaseDenseHead, self).__init__() ```
{ "source": "jie311/vega", "score": 2 }
#### File: algorithms/hpo/pbt_conf.py ```python from vega.common import ConfigSerializable class PBTPolicyConfig(ConfigSerializable): """PBT Policy Config.""" config_count = 16 each_epochs = 3 total_rungs = 200 @classmethod def rules(cls): """Return rules for checking.""" rules_PBTPolicyConfig = {"config_count": {"type": int}, "each_epochs": {"type": int}, "total_rungs": {"type": int} } return rules_PBTPolicyConfig class PBTConfig(ConfigSerializable): """PBT Config.""" policy = PBTPolicyConfig objective_keys = 'accuracy' @classmethod def rules(cls): """Return rules for checking.""" rules_BoConfig = {"policy": {"type": dict}, "objective_keys": {"type": (list, str)} } return rules_BoConfig @classmethod def get_config(cls): """Get sub config.""" return { "policy": cls.policy } ``` #### File: algorithms/hpo/pbt_hpo.py ```python import os import copy import shutil from vega.algorithms.hpo.sha_base.pbt import PBT from vega.common import ClassFactory, ClassType from vega.common import FileOps from vega.algorithms.hpo.hpo_base import HPOBase from .pbt_conf import PBTConfig import numpy as np import logging @ClassFactory.register(ClassType.SEARCH_ALGORITHM) class PBTHpo(HPOBase): """An Hpo of PBT.""" config = PBTConfig() def __init__(self, search_space=None, **kwargs): """Init PBTHpo.""" self.search_space = search_space super(PBTHpo, self).__init__(search_space, **kwargs) self.hyperparameter_list = self.get_hyperparameters(self.config.policy.config_count) self.hpo = PBT(self.config.policy.config_count, self.config.policy.each_epochs, self.config.policy.total_rungs, self.local_base_path, paras_list=self.hyperparameter_list) def get_hyperparameters(self, num): """Use the trained model to propose a set of params from SearchSpace. :param int num: number of random samples from hyperparameter space. :return: list of random sampled config from hyperparameter space. :rtype: list. """ params_list = [] for _ in range(num): parameters = self.search_space.get_sample_space() if parameters is None: return None predictions = np.random.rand(parameters.shape[0], 1) index = np.argmax(predictions) param = self.search_space.decode(parameters[index, :]) params_list.append(param) return params_list def search(self): """Search an id and hps from hpo.""" sample = self.hpo.propose() if sample is None: return None re_hps = {} sample = copy.deepcopy(sample) sample_id = sample.get('config_id') cur_configs = sample.get('configs') all_configs = sample.get("all_configs") rung_id = sample.get('rung_id') checkpoint_path = FileOps.join_path(self.local_base_path, 'cache', str(sample_id), 'checkpoint') FileOps.make_dir(checkpoint_path) if os.path.exists(checkpoint_path): re_hps['trainer.checkpoint_path'] = checkpoint_path if 'epoch' in sample: re_hps['trainer.epochs'] = sample.get('epoch') re_hps.update(cur_configs) re_hps['trainer.all_configs'] = all_configs logging.info("Current rung [ {} /{}] ".format(rung_id, self.config.policy.total_rungs)) return dict(worker_id=sample_id, encoded_desc=re_hps, rung_id=rung_id) def update(self, record): """Update current performance into hpo score board. :param hps: hyper parameters need to update :param performance: trainer performance """ super().update(record) config_id = str(record.get('worker_id')) step_name = record.get('step_name') worker_result_path = self.get_local_worker_path(step_name, config_id) new_worker_result_path = FileOps.join_path(self.local_base_path, 'cache', config_id, 'checkpoint') FileOps.make_dir(worker_result_path) FileOps.make_dir(new_worker_result_path) if os.path.exists(new_worker_result_path): shutil.rmtree(new_worker_result_path) shutil.copytree(worker_result_path, new_worker_result_path) ``` #### File: sha_base/tuner/double_gaussian.py ```python from sklearn.mixture import GaussianMixture import numpy as np class DoubleMultiGaussian(object): """Gaussian Process. :param gamma: gamma. :type gamma: int """ def __init__(self, gamma=0.25): """Init TunerModel.""" self.gamma = gamma self.means_ = None self.covariances_ = None def fit(self, X, y): """Divide X according to y and get two Gaussian model.""" X_sorted = X[np.argsort(-y)] if X.shape[0] < 4: gaussian_high = GaussianMixture().fit(X_sorted) gaussian_low = gaussian_high else: point_segmentation = max(2, int(self.gamma * X.shape[0])) gaussian_high = GaussianMixture().fit(X_sorted[:point_segmentation]) gaussian_low = GaussianMixture().fit(X_sorted[point_segmentation:]) self.means_ = [gaussian_high.means_[0], gaussian_low.means_[0]] self.covariances_ = [gaussian_high.covariances_[0], gaussian_low.covariances_[0]] ``` #### File: nas/auto_lane/auto_lane_trainer_callback.py ```python import logging from vega.common import ClassFactory, ClassType from vega.common import FileOps from vega.trainer.callbacks import Callback logger = logging.getLogger(__name__) @ClassFactory.register(ClassType.CALLBACK) class AutoLaneTrainerCallback(Callback): """Construct the trainer of Auto Lane.""" disable_callbacks = ['ProgressLogger', 'MetricsEvaluator', "ModelStatistics"] def logger_patch(self): """Patch the default logger.""" worker_path = self.trainer.get_local_worker_path() worker_spec_log_file = FileOps.join_path(worker_path, 'current_worker.log') logger = logging.getLogger(__name__) for hdlr in logger.handlers: logger.removeHandler(hdlr) for hdlr in logging.root.handlers: logging.root.removeHandler(hdlr) logger.addHandler(logging.FileHandler(worker_spec_log_file)) logger.addHandler(logging.StreamHandler()) logger.setLevel(logging.INFO) logging.root = logger def before_train(self, logs=None): """Be called before the whole train process.""" self.trainer.config.call_metrics_on_train = False self.cfg = self.trainer.config self.worker_id = self.trainer.worker_id self.local_base_path = self.trainer.local_base_path self.local_output_path = self.trainer.local_output_path self.result_path = FileOps.join_path(self.trainer.local_base_path, "result") FileOps.make_dir(self.result_path) self.logger_patch() def make_batch(self, batch): """Make batch for each training step.""" image = batch.pop('image').cuda(non_blocking=True).float() return image, batch def train_step(self, batch): """Replace the default train_step function.""" self.trainer.model.train() image, train_item_spec = batch gt_loc = train_item_spec.pop('gt_loc').cuda(non_blocking=True).float() gt_cls = train_item_spec.pop('gt_cls').cuda(non_blocking=True).float() self.trainer.optimizer.zero_grad() model_out = self.trainer.model(input=image, gt_loc=gt_loc, gt_cls=gt_cls, forward_switch='train', **train_item_spec) loss_pos = model_out['loss_pos'] loss_neg = model_out['loss_neg'] loss_loc = model_out['loss_loc'] loss = loss_loc + loss_pos + loss_neg if self.trainer.use_amp: raise NotImplementedError('Amp is not implemented in algorithm auto lane.') loss.backward() self.trainer.optimizer.step() return {'loss': loss.item(), 'cls_pos_loss': loss_pos.item(), 'cls_neg_loss': loss_neg.item(), 'loc_loss': loss_loc.item(), 'train_batch_output': None} # def before_valid(self, logs=None): # """Be called before a batch validation.""" # epochs = self.params['epochs'] def valid_step(self, batch): """Be called on each batch validing.""" self.trainer.model.eval() image, valid_item_spec = batch results = self.trainer.model(input=image, forward_switch='valid', **valid_item_spec) return {'valid_batch_output': results} ``` #### File: nas/dnet_nas/dnet_nas_codec.py ```python import copy import numpy as np from vega.common import ClassType, ClassFactory from vega.core.search_algs.codec import Codec @ClassFactory.register(ClassType.CODEC) class DnetNasCodec(Codec): """DnetNasCodec. :param codec_name: name of current Codec. :type codec_name: str :param search_space: input search_space. :type search_space: SearchSpace """ def __init__(self, search_space=None, **kwargs): """Init DnetNasCodec.""" super(DnetNasCodec, self).__init__(search_space, **kwargs) def encode(self, sample_desc, is_random=False): """Encode. :param sample_desc: a sample desc to encode. :type sample_desc: dict :param is_random: if use random to encode, default is False. :type is_random: bool :return: an encoded sample. :rtype: dict """ code_length = sample_desc['network.backbone.code_length'] base_channel = sample_desc['network.backbone.base_channel'] final_channel = sample_desc['network.backbone.final_channel'] down_sample = sample_desc['network.backbone.downsample'] block_coding = sample_desc['block_coding'] macro_coding = ['1' for _ in range(code_length)] channel_times = int(np.log2(final_channel // base_channel)) while True: variant_num = down_sample + channel_times variant_positions = np.random.permutation(code_length)[0:variant_num] variant_positions.sort() down_indexes = np.random.permutation(variant_num)[0:down_sample] down_indexes.sort() down_positions = variant_positions[down_indexes] adjacent_positions = set(down_positions) & set(down_positions + 1) if len(adjacent_positions) > 0: continue break variant_positions = list(variant_positions) down_positions = list(down_positions) for i in variant_positions: macro_coding[i] = '2' if i in down_positions: macro_coding[i] = '-' macro_coding = ''.join(macro_coding) code = {} code['network.backbone.block_coding'] = block_coding code['network.backbone.base_channel'] = base_channel code['network.backbone.macro_coding'] = macro_coding sample = {'code': code} return sample def decode(self, sample): """Decode. :param sample: input sample to decode. :type sample: dict :return: return a decoded sample desc. :rtype: dict """ if 'code' not in sample: raise ValueError('No code to decode in sample:{}'.format(sample)) code = sample.pop('code') desc = copy.deepcopy(sample) block_coding = code['network.backbone.block_coding'] base_channel = code['network.backbone.base_channel'] macro_coding = code['network.backbone.macro_coding'] desc['network.backbone.encoding'] = f'{block_coding}_{base_channel}_{macro_coding}' return desc ``` #### File: modnas/callback/base.py ```python from modnas.core.event import event_on, event_off from modnas.utils.logging import get_logger class CallbackBase(): """Base callback class.""" logger = get_logger('callback') priority = 0 def __init__(self, handler_conf=None) -> None: self.handlers = None self.bind_handlers(handler_conf) def bind_handlers(self, handler_conf): """Bind event handlers.""" handlers = {} for ev, conf in handler_conf.items(): prio = None if isinstance(conf, (list, tuple)): h = conf[0] if len(conf) > 1: prio = conf[1] else: h = conf event_on(ev, h, self.priority if prio is None else prio) handlers[ev] = h self.handlers = handlers def unbind_handlers(self): """Un-bind event handlers.""" for ev, h in self.handlers.items(): event_off(ev, h) ``` #### File: callback/predefined/trainer_reporter.py ```python from functools import partial from modnas.registry.callback import register from modnas.utils import format_dict, AverageMeter from ..base import CallbackBase @register class TrainerReporter(CallbackBase): """Trainer statistics reporter class.""" priority = -1 def __init__(self, interval=0.2, format_fn=None): super().__init__({ 'after:TrainerBase.train_step': partial(self.report_step, 'train'), 'after:TrainerBase.valid_step': partial(self.report_step, 'valid'), 'after:TrainerBase.train_epoch': self.report_epoch, 'after:TrainerBase.valid_epoch': self.report_epoch, 'after:TrainerBase.loss': self.on_loss, }) self.interval = interval self.format_fn = format_fn self.last_batch_size = 1 self.stats = None def init_stats(self, keys): """Initialize statistics.""" self.stats = {k: AverageMeter() for k in keys} def reset(self): """Reset statistics.""" self.stats = None self.last_batch_size = 1 def on_loss(self, ret, trainer, output, data, model): """Record batch size in each loss call.""" self.last_batch_size = len(data[-1]) def report_epoch(self, ret, *args, **kwargs): """Log statistics report in each epoch.""" ret = ret or {} if self.stats: ret.update({k: v.avg for k, v in self.stats.items()}) self.reset() return None if not ret else ret def report_step(self, proc, ret, trainer, estim, model, epoch, tot_epochs, step, tot_steps): """Log statistics report in each step.""" if step >= tot_steps: return if step == 0: self.reset() cur_step = epoch * tot_steps + step interval = self.interval if interval and interval < 1: interval = int(interval * tot_steps) stats = ret.copy() if isinstance(ret, dict) else {} stats = {k: v for k, v in stats.items() if isinstance(v, (int, float))} stats_len = stats.pop('N', self.last_batch_size) if self.stats is None and stats: self.init_stats(stats.keys()) writer = trainer.writer for k, v in stats.items(): self.stats[k].update(v, n=stats_len) if writer is not None: writer.add_scalar('/'.join(['trainer', proc, k]), v, cur_step) if interval is None or (interval != 0 and (step + 1) % interval == 0) or step + 1 == tot_steps: fmt_info = format_dict({k: v.avg for k, v in self.stats.items()}, fmt_val=self.format_fn) trainer.logger.info('{}: [{:3d}/{}] {}'.format(proc.title(), step + 1, tot_steps, fmt_info)) ``` #### File: data_provider/predefined/default.py ```python from ..base import DataProviderBase from modnas.registry.data_provider import register @register class DefaultDataProvider(DataProviderBase): """Default DataProvider with dataloader.""" def __init__(self, train_loader, valid_loader): super().__init__() self.train_loader = train_loader self.valid_loader = valid_loader self.train_iter = None self.valid_iter = None self.no_valid_warn = True self.reset_train_iter() self.reset_valid_iter() def get_next_train_batch(self): """Return the next train batch.""" if self.train_loader is None: self.logger.error('no train loader') return None try: trn_batch = next(self.get_train_iter()) except StopIteration: self.reset_train_iter() trn_batch = next(self.get_train_iter()) return trn_batch def get_next_valid_batch(self): """Return the next validate batch.""" if self.valid_loader is None: if self.no_valid_warn: self.logger.warning('no valid loader, returning training batch instead') self.no_valid_warn = False return self.get_next_train_batch() try: val_batch = next(self.get_valid_iter()) except StopIteration: self.reset_valid_iter() val_batch = next(self.get_valid_iter()) return val_batch def get_train_iter(self): """Return train iterator.""" return self.train_iter def get_valid_iter(self): """Return validate iterator.""" return self.valid_iter def reset_train_iter(self): """Reset train iterator.""" self.train_iter = None if self.train_loader is None else iter(self.train_loader) def reset_valid_iter(self): """Reset validate iterator.""" self.valid_iter = None if self.valid_loader is None else iter(self.valid_loader) def get_num_train_batch(self, epoch): """Return number of train batches in current epoch.""" return 0 if self.train_loader is None else len(self.train_loader) def get_num_valid_batch(self, epoch): """Return number of validate batches in current epoch.""" return 0 if self.valid_loader is None else len(self.valid_loader) ``` #### File: estim/dist_backend/base.py ```python import threading class RemoteBase(): """Distributed remote client class.""" def __init__(self): super().__init__() self.on_done = None self.on_failed = None def call(self, func, *args, on_done=None, on_failed=None, **kwargs): """Call function on remote client with callbacks.""" self.on_done = on_done self.on_failed = on_failed self.th_rpc = threading.Thread(target=self.rpc, args=(func,) + args, kwargs=kwargs) self.th_rpc.start() def close(self): """Close the remote client.""" raise NotImplementedError def rpc(self, func, *args, **kwargs): """Call function on remote client.""" raise NotImplementedError def on_rpc_done(self, ret): """Invoke callback when remote call finishes.""" self.ret = ret self.on_done(ret) def on_rpc_failed(self, ret): """Invoke callback when remote call fails.""" self.on_failed(ret) class WorkerBase(): """Distributed remote worker (server) class.""" def run(self, estim): """Run worker.""" raise NotImplementedError def close(self): """Close worker.""" raise NotImplementedError ``` #### File: estim/predefined/hptune.py ```python import copy import itertools import traceback import multiprocessing as mp import yaml from ..base import EstimBase from modnas.utils.config import Config from modnas.utils.wrapper import run from modnas.registry.estim import register def _default_trial_runner(conn, trial_args): ret = run(**(yaml.load(trial_args, Loader=yaml.SafeLoader) or {})) conn.send(ret) @register class HPTuneEstim(EstimBase): """Hyperparameter-tuning Estimator class.""" def __init__(self, measure_fn=None, batch_size=1, early_stopping=None, trial_config=None, trial_args=None, *args, **kwargs): super().__init__(*args, **kwargs) self.measure_fn = measure_fn or self._default_measure_fn self.batch_size = batch_size self.early_stopping = early_stopping self.trial_config = trial_config self.trial_args = trial_args self.best_hparams = None self.best_score = None self.best_iter = 0 self.trial_index = 0 self.is_succ = False def _default_measure_fn(self, hp, **kwargs): trial_config = copy.deepcopy(Config.load(self.trial_config)) Config.apply(trial_config, hp) trial_args = dict(copy.deepcopy(self.trial_args)) trial_args['name'] = '{}_{}'.format(trial_args.get('name', 'trial'), self.trial_index) trial_args['config'] = trial_config.to_dict() ctx = mp.get_context('spawn') p_con, c_con = ctx.Pipe() proc = ctx.Process(target=_default_trial_runner, args=(c_con, yaml.dump(trial_args))) proc.start() proc.join() if not p_con.poll(0): return 0 ret = p_con.recv() ret = ret.get('final', list(ret.values())[-1]) return ret.get('best_score', list(ret.values())[0]) def step(self, hp): """Return evaluation results of a parameter set.""" self.trial_index += 1 logger = self.logger config = self.config fn_args = config.get('trial_args', {}) try: score = self.measure_fn(hp, **fn_args) self.is_succ = True except RuntimeError: score = 0 logger.info('trial {} failed with error: {}'.format(self.trial_index, traceback.format_exc())) result = { 'score': score, } logger.info('Evaluate hparam: {} -> {}'.format(hp, result)) return result def run_epoch(self, optim, epoch, tot_epochs): """Run Estimator routine for one epoch.""" batch_size = self.batch_size early_stopping = self.early_stopping if tot_epochs != -1 and epoch >= tot_epochs: return {'stop': True} if not optim.has_next(): self.logger.info('HPTune: all finished') return {'stop': True} inputs = optim.next(batch_size) self.clear_buffer() for hp in inputs: res = self.stepped(hp) self.wait_done() for hp, res, _ in self.buffer(): score = res['score'] if self.best_score is None or score > self.best_score: self.best_score = score self.best_iter = epoch optim.step(self) if early_stopping is not None and epoch >= self.best_iter + early_stopping: self.logger.info('HPTune: early stopped: {}'.format(epoch)) return {'stop': True} def run(self, optim): """Run Estimator routine.""" config = self.config tot_epochs = config.epochs for epoch in itertools.count(self.cur_epoch + 1): if (self.run_epoch(optim, epoch, tot_epochs) or {}).get('stop'): break if not self.is_succ: raise RuntimeError('All trials failed') ``` #### File: metrics/torch/traversal.py ```python from ..base import MetricsBase from modnas.registry.metrics import register, build from modnas.arch_space.mixed_ops import MixedOp @register class MixedOpTraversalMetrics(MetricsBase): """Mixed operator traversal metrics class.""" def __init__(self, metrics): super().__init__() self.metrics = build(metrics) def __call__(self, estim): """Return metrics output.""" mt = 0 for m in estim.model.mixed_ops(): for p, op in zip(m.prob(), m.candidates()): mt = mt + self.metrics(op) * p return mt @register class ModuleTraversalMetrics(MetricsBase): """Module traversal metrics class.""" def __init__(self, metrics): super().__init__() self.metrics = build(metrics) def __call__(self, estim): """Return metrics output.""" mt = 0 for m in estim.model.modules(): if not isinstance(m, MixedOp): mt = mt + self.metrics(m) else: for p, op in zip(m.prob(), m.candidates()): mt = mt + self.metrics(op) * p return mt ``` #### File: modnas/registry/__init__.py ```python import sys import importlib.util from functools import partial from .registry import registry def register(_reg_path, builder, _reg_id=None): """Register class as name.""" if _reg_id is None: _reg_id = builder.__qualname__ registry.register(builder, _reg_path, _reg_id) return builder def get_builder(_reg_path, _reg_id): """Return class builder by name.""" return registry.get(_reg_path, _reg_id) def parse_spec(spec): """Return parsed id and arguments from build spec.""" if isinstance(spec, dict): return spec['type'], spec.get('args', {}) if isinstance(spec, (tuple, list)) and isinstance(spec[0], str): return spec[0], {} if len(spec) < 2 else spec[1] if isinstance(spec, str): return spec, {} raise ValueError('Invalid build spec: {}'.format(spec)) def to_spec(reg_id, kwargs): """Return build spec from id and arguments.""" return { 'type': reg_id, 'args': kwargs } def build(_reg_path, _spec, *args, **kwargs): """Instantiate class by name.""" reg_id, sp_kwargs = parse_spec(_spec) kwargs.update(sp_kwargs) return registry.get(_reg_path, reg_id)(*args, **kwargs) def register_as(_reg_path, _reg_id=None): """Return a registration decorator.""" def reg_builder(func): register(_reg_path, func, _reg_id) return func return reg_builder def get_registry_utils(_reg_path): """Return registration utilities.""" _register = partial(register, _reg_path) _get_builder = partial(get_builder, _reg_path) _build = partial(build, _reg_path) _register_as = partial(register_as, _reg_path) return _reg_path, _register, _get_builder, _build, _register_as def _get_registry_name(path): return '.'.join(path[path.index('modnas') + 2:]) class RegistryModule(): """Registry as a module.""" def __init__(self, fullname): path = fullname.split('.') registry_name = _get_registry_name(path) self.__package__ = fullname self.__path__ = path self.__name__ = registry_name self.__loader__ = None self.__spec__ = None self.reg_path, self.register, self.get_builder, self.build, self.register_as = get_registry_utils(registry_name) def __getattr__(self, attr): """Return builder by attribute name.""" if attr in self.__dict__: return self.__dict__.get(attr) return self.get_builder(attr) class RegistryImporter(): """Create new Registry using import hooks (PEP 302).""" def find_spec(self, fullname, path, target=None): """Handle registry imports.""" if 'modnas.registry' in fullname: return importlib.util.spec_from_loader(fullname, self) def load_module(self, fullname): """Create and find registry by import path.""" path = fullname.split('.') reg_path, reg_id = path[:-1], path[-1] reg_fullname = '.'.join(reg_path) registry_name = _get_registry_name(reg_path) if reg_fullname in sys.modules and len(registry_name): mod = get_builder(registry_name, reg_id) sys.modules[fullname] = mod return mod mod = sys.modules.get(fullname, RegistryModule(fullname)) sys.modules[fullname] = mod return mod sys.meta_path.append(RegistryImporter()) ``` #### File: datasets/common/avazu.py ```python import numpy as np import logging from .utils.avazu_util import AVAZUDataset from .dataset import Dataset from vega.common import FileOps from vega.datasets.conf.avazu import AvazuConfig from vega.common import ClassFactory, ClassType @ClassFactory.register(ClassType.DATASET) class AvazuDataset(Dataset): """This is a class for Avazu dataset. :param train: if the mode is train or not, defaults to True :type train: bool :param cfg: the config the dataset need, defaults to None, and if the cfg is None, the default config will be used, the default config file is a yml file with the same name of the class :type cfg: yml, py or dict """ config = AvazuConfig() def __init__(self, **kwargs): """Construct the AvazuDataset class.""" super(AvazuDataset, self).__init__(**kwargs) self.args.data_path = FileOps.download_dataset(self.args.data_path) logging.info("init new avazu_dataset finish. 0721 debug.") @property def data_loader(self): """Dataloader arrtribute which is a unified interface to generate the data. :return: a batch data :rtype: dict, list, optional """ return AvazuLoader(args=self.args, gen_type=self.mode, batch_size=self.args.batch_size, random_sample=self.args.random_sample, shuffle_block=self.args.shuffle_block, dir_path=self.args.data_path) class AvazuLoader(AVAZUDataset): """Avazu dataset's data loader.""" def __init__(self, args=None, gen_type="train", batch_size=2000, random_sample=False, shuffle_block=False, dir_path="./"): """Construct avazu_loader class.""" self.args = args AVAZUDataset.__init__(self, dir_path=dir_path) self.gen_type = gen_type self.batch_size = batch_size self.random_sample = random_sample self.shuffle_block = shuffle_block def __iter__(self): """Iterate method for AvazuLoader.""" return self.batch_generator(gen_type=self.gen_type, batch_size=self.batch_size, random_sample=self.random_sample, shuffle_block=self.shuffle_block) def __len__(self): """Calculate the length of avazu dataset, thus, number of batch.""" if self.gen_type == "train": return int(np.ceil(1.0 * self.args.train_size / self.args.batch_size)) else: return int(np.ceil(1.0 * self.args.test_size / self.args.batch_size)) ``` #### File: datasets/common/mrpc.py ```python import logging import os import csv from vega.datasets.common.dataset import Dataset from vega.common import ClassFactory, ClassType from ..conf.mrpc import MrpcConfig from vega.common.config import Config from pytorch_pretrained_bert import BertTokenizer @ClassFactory.register(ClassType.DATASET) class MrpcDataset(Dataset): """MRPC data set (GLUE version).""" config = MrpcConfig() def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) label_list = self.get_labels() tokenizer = BertTokenizer.from_pretrained(self.args.vocab_file, do_lower_case=self.args.do_lower_case) if tokenizer is None: raise ValueError("Tokenizer can't be None.") if self.mode == 'train': examples = self.get_train_examples(self.args.data_path) elif self.mode == 'val': examples = self.get_val_examples(self.args.data_path) else: examples = self.get_test_examples(self.args.data_path) self.examples = self.convert_examples_to_features(examples, label_list, self.args.max_seq_length, tokenizer) def __getitem__(self, idx): """Get item.""" example = self.examples[idx] input_ids = example.get('input_ids') input_mask = example.get('input_mask') segment_ids = example.get('segment_ids') label_ids = example.get('label_id') if self.transforms is not None: input_ids, input_mask, segment_ids, label_ids = self.transforms(input_ids, input_mask, segment_ids, label_ids) target = label_ids data = dict(input_ids=input_ids, attention_mask=input_mask, token_type_ids=segment_ids, labels=label_ids) return data, target def __len__(self): """Get the length of the dataset.""" return len(self.examples) def get_train_examples(self, data_dir): """See base class.""" return self._create_examples(self._read_tsv(os.path.join(data_dir, "train.tsv")), "train") def get_val_examples(self, data_dir): """See base class.""" return self._create_examples(self._read_tsv(os.path.join(data_dir, "test.tsv")), "dev") def get_test_examples(self, data_dir): """See base class.""" return self._create_examples(self._read_tsv(os.path.join(data_dir, "test.tsv")), "test") def get_labels(self): """See base class.""" return ["0", "1"] def _create_examples(self, lines, set_type): """Create examples for the training, dev and test sets.""" examples = [] for (i, line) in enumerate(lines): if i == 0: continue guid = "%s-%s" % (set_type, i) text_a = line[3] text_b = line[4] label = None if set_type == "test" else line[0] examples.append(Config(dict(guid=guid, text_a=text_a, text_b=text_b, label=label))) return examples @classmethod def _read_tsv(cls, input_file, quotechar=None): """Read a tab separated value file.""" with open(input_file, "r", encoding="utf-8-sig") as f: return list(csv.reader(f, delimiter="\t", quotechar=quotechar)) def convert_examples_to_features(self, examples, label_list, max_seq_length, tokenizer): """Load a data file into a list of `InputBatch`s.""" label_map = {label: i for i, label in enumerate(label_list)} features = [] for (ex_index, example) in enumerate(examples): tokens_a = tokenizer.tokenize(example.text_a) tokens_b = None if example.text_b: tokens_b = tokenizer.tokenize(example.text_b) # Modifies `tokens_a` and `tokens_b` in place so that the total # length is less than the specified length. # Account for [CLS], [SEP], [SEP] with "- 3" _truncate_seq_pair(tokens_a, tokens_b, max_seq_length - 3) else: # Account for [CLS] and [SEP] with "- 2" if len(tokens_a) > max_seq_length - 2: tokens_a = tokens_a[:(max_seq_length - 2)] # The convention in BERT is: # (a) For sequence pairs: # tokens: [CLS] is this jack ##son ##ville ? [SEP] no it is not . [SEP] # type_ids: 0 0 0 0 0 0 0 0 1 1 1 1 1 1 # (b) For single sequences: # tokens: [CLS] the dog is hairy . [SEP] # type_ids: 0 0 0 0 0 0 0 # # Where "type_ids" are used to indicate whether this is the first # sequence or the second sequence. The embedding vectors for `type=0` and # `type=1` were learned during pre-training and are added to the wordpiece # embedding vector (and position vector). This is not *strictly* necessary # since the [SEP] token unambigiously separates the sequences, but it makes # it easier for the model to learn the concept of sequences. # # For classification tasks, the first vector (corresponding to [CLS]) is # used as as the "sentence vector". Note that this only makes sense because # the entire model is fine-tuned. tokens = ["[CLS]"] + tokens_a + ["[SEP]"] segment_ids = [0] * len(tokens) if tokens_b: tokens += tokens_b + ["[SEP]"] segment_ids += [1] * (len(tokens_b) + 1) input_ids = tokenizer.convert_tokens_to_ids(tokens) # The mask has 1 for real tokens and 0 for padding tokens. Only real # tokens are attended to. input_mask = [1] * len(input_ids) # Zero-pad up to the sequence length. padding = [0] * (max_seq_length - len(input_ids)) input_ids += padding input_mask += padding segment_ids += padding assert len(input_ids) == max_seq_length assert len(input_mask) == max_seq_length assert len(segment_ids) == max_seq_length label_id = label_map[example.label] if ex_index < 5: logging.info("*** Example ***") logging.info("guid: %s" % (example.guid)) logging.info("tokens: %s" % " ".join([str(x) for x in tokens])) logging.info("input_ids: %s" % " ".join([str(x) for x in input_ids])) logging.info("input_mask: %s" % " ".join([str(x) for x in input_mask])) logging.info( "segment_ids: %s" % " ".join([str(x) for x in segment_ids])) logging.info("label: %s (id = %d)" % (example.label, label_id)) features.append(Config( dict(input_ids=input_ids, input_mask=input_mask, segment_ids=segment_ids, label_id=label_id))) return features def _truncate_seq_pair(tokens_a, tokens_b, max_length): """Truncate a sequence pair in place to the maximum length.""" while True: total_length = len(tokens_a) + len(tokens_b) if total_length <= max_length: break if len(tokens_a) > len(tokens_b): tokens_a.pop() else: tokens_b.pop() ``` #### File: datasets/pytorch/adapter.py ```python from torch.utils import data as torch_data from .samplers import DistributedSampler from torch.utils.data.sampler import SubsetRandomSampler import numpy as np class TorchAdapter(object): """This is the base class of the dataset, which is a subclass of `TaskOps`. The Dataset provide several basic attribute like dataloader, transform and sampler. """ def __init__(self, dataset): self.dataset = dataset self.args = dataset.args self.sampler = self._init_sampler() self.collate_fn = dataset.collate_fn @property def sampler(self): """Sampler function which can replace sampler.""" return self._sampler @sampler.setter def sampler(self, value): """Set function of sampler.""" self._sampler = value def _init_sampler(self): """Initialize sampler method. :return: if the distributed is True, return a sampler object, else return None :rtype: an object or None """ if self.dataset.world_size > 1: self.args.shuffle = False sampler = DistributedSampler(self.dataset, num_replicas=self.dataset.world_size, rank=self.dataset.rank, shuffle=self.args.shuffle) elif not hasattr(self.args, "train_portion"): sampler = None elif self.dataset.mode == 'test' or self.args.train_portion == 1: sampler = None else: self.args.shuffle = False num_train = len(self.dataset) indices = list(range(num_train)) split = int(np.floor(self.args.train_portion * num_train)) if self.dataset.mode == 'train': sampler = SubsetRandomSampler(indices[:split]) elif self.dataset.mode == 'val': sampler = SubsetRandomSampler(indices[split:num_train]) else: raise ValueError('the mode should be train, val or test') return sampler @property def loader(self): """Dataloader arrtribute which is a unified interface to generate the data. :return: a batch data :rtype: dict, list, optional """ if hasattr(self.dataset, "data_loader"): return self.dataset.data_loader data_loader = torch_data.DataLoader(dataset=self.dataset, batch_size=self.args.batch_size, shuffle=self.args.shuffle, num_workers=self.args.num_workers, pin_memory=self.args.pin_memory, sampler=self.sampler, drop_last=self.args.drop_last, collate_fn=self.collate_fn) return data_loader ``` #### File: datasets/tensorflow/imagenet.py ```python import os import tensorflow as tf from official.r1.resnet.imagenet_preprocessing import preprocess_image from vega.common import ClassFactory, ClassType from vega.common import FileOps from ..common.dataset import Dataset from vega.datasets.conf.imagenet import ImagenetConfig @ClassFactory.register(ClassType.DATASET) class Imagenet(Dataset): """This is a class for Imagenet dataset. :param data_dir: Imagenet data directory :type data_dir: str :param image_size: input imagenet size :type image_size: int :param batch_size: batch size :type batch_size: int :param mode: dataset mode, train or val :type mode: str :param fp16: whether to use fp16 :type fp16: bool, default False :param num_parallel_batches: number of parallel batches :type num_parallel_batches: int, default 8 :param drop_remainder: whether to drop data remainder :type drop_remainder: bool, default False :param transpose_input: whether to transpose input dimention :type transpose_input: bool, default false """ config = ImagenetConfig() def __init__(self, **kwargs): """Init Cifar10.""" super(Imagenet, self).__init__(**kwargs) self.data_path = FileOps.download_dataset(self.args.data_path) self.fp16 = self.args.fp16 self.num_parallel_batches = self.args.num_parallel_batches self.image_size = self.args.image_size self.drop_remainder = self.args.drop_last if self.data_path == 'null' or not self.data_path: self.data_path = None self.num_parallel_calls = self.args.num_parallel_calls def _record_parser(self, raw_record): """Parse dataset function.""" features_dict = { 'image/encoded': tf.FixedLenFeature((), tf.string, ''), 'image/class/label': tf.FixedLenFeature([], tf.int64, -1), } parsed = tf.parse_single_example(raw_record, features_dict) image_buffer = parsed['image/encoded'] bbox = tf.constant([0.0, 0.0, 1.0, 1.0], dtype=tf.float32, shape=[1, 1, 4]) image = preprocess_image(image_buffer=image_buffer, bbox=bbox, output_height=self.image_size, output_width=self.image_size, num_channels=3, is_training=self.train) image = tf.cast(image, dtype=tf.float16 if self.fp16 else tf.float32) label = tf.cast(parsed['image/class/label'], dtype=tf.int32) - 1 return image, label def _read_raw_data(self, data_file): """Read raw data.""" dataset = tf.data.TFRecordDataset(data_file, buffer_size=8 * 1024 ** 2) return dataset def input_fn(self): """Define input_fn used by Tensorflow Estimator.""" data_files = os.path.join( self.data_path, 'train/train-*' if self.mode == 'train' else 'val/val-*') dataset = tf.data.Dataset.list_files(data_files, shuffle=False) if self.world_size > 1: dataset = dataset.shard(self.world_size, self.rank) if self.mode == 'train': dataset = dataset.shuffle(buffer_size=1024) dataset = dataset.repeat() dataset = dataset.apply(tf.contrib.data.parallel_interleave( self._read_raw_data, cycle_length=self.num_parallel_calls, sloppy=True)) dataset = dataset.apply( tf.contrib.data.map_and_batch( self._record_parser, batch_size=self.args.batch_size, num_parallel_batches=self.num_parallel_batches, drop_remainder=self.drop_remainder)) dataset = dataset.prefetch(tf.contrib.data.AUTOTUNE) return dataset ``` #### File: transforms/pytorch/PBATransformer.py ```python import numpy as np from ..Cutout import Cutout from vega.common import ClassFactory, ClassType @ClassFactory.register(ClassType.TRANSFORM) class PBATransformer(object): """Applies PBATransformer to 'img'. The PBATransformer operation combine 15 transformer operations to convert image. :param para_array: parameters of the operation specified as an Array. :type para_array: array """ transforms = dict() def __init__(self, para_array, operation_names, **kwargs): """Construct the PBATransformer class.""" self.para_array = para_array self.operation_names = operation_names self.split_policy(self.para_array) def split_policy(self, raw_policys): """Decode raw_policys, get the name, probability and level of each operation. :param raw_policys: raw policys which got from .csv file :type raw_policys: array """ split = len(raw_policys) // 2 if split % 2 == 1: raise ValueError( 'set raw_policys illegal, length of raw_policys should be even number!') self.policys = self.decode_policys(raw_policys[:split]) self.policys += self.decode_policys(raw_policys[split:]) def decode_policys(self, raw_policys): """Decode raw_policys, get the name, probability and level of each operation. :param raw_policys: raw policys which hasn't been decoded, a list of int number :type raw_policys: list :return: policys which has been decoded, a list of [name, probability, level] of each operation, :rtype: list """ policys = [] for i, operation_name in enumerate(self.operation_names): policys.append((operation_name, int(raw_policys[2 * i]) / 10., int(raw_policys[2 * i + 1]))) return policys def __call__(self, img): """Call function of PBATransformer. :param img: input image :type img: numpy or tensor :return: the image after transform :rtype: numpy or tensor """ count = np.random.choice([0, 1, 2], p=[0.2, 0.3, 0.5]) policys = self.policys np.random.shuffle(policys) whether_cutout = [0, 0] for policy in policys: if count == 0: break if len(policy) != 3: raise ValueError( 'set policy illegal, policy should be (op, prob, mag)!') op, prob, mag = policy if np.random.random() > prob: continue else: count -= 1 if op == "Cutout": if whether_cutout[0] == 0: whether_cutout[0] = mag else: whether_cutout[1] = mag continue operation = ClassFactory.get_cls(ClassType.TRANSFORM, op) current_operation = operation(mag) img = current_operation(img) from torchvision.transforms import functional as F if F._is_pil_image(img): img = F.to_tensor(img) img = Cutout(8)(img) for i in whether_cutout: if i: img = Cutout(i)(img) return img ``` #### File: evaluator/tools/evaluate_davinci_bolt.py ```python import os import requests import logging import subprocess import pickle import numpy as np def evaluate(backend, hardware, remote_host, model, weight, test_data, input_shape=None, reuse_model=False, job_id=None, quantize=False, repeat_times=1): """Evaluate interface of the EvaluateService. :param backend: the backend can be one of "tensorflow", "caffe" and "pytorch" :type backend: str :param hardware: the backend can be one of "Davinci", "Bolt" :type hardware: str :param remote_host: the remote host ip and port of evaluate service :type remote_host: str :param model: model file, .pb file for tensorflow and .prototxt for caffe, and a model class for Pytorch :type model: str or Class :param weight: .caffemodel file for caffe :type weight: str :param test_data: binary file, .data or .bin :type test_data: str :return: the latency in Davinci or Bolt :rtype: float """ if backend not in ["tensorflow", "caffe", "pytorch", "mindspore"]: raise ValueError("The backend only support tensorflow, caffe, pytorch and mindspore.") if hardware not in ["Davinci", "Bolt", "Kirin990_npu"]: raise ValueError("The hardware only support Davinci and Bolt.") if input_shape is None: raise ValueError("The input shape must be provided.") if not reuse_model: base_save_dir = os.path.dirname(test_data) model, weight, backend = preprocessing_model(backend, hardware, model, weight, input_shape, base_save_dir, quantize, test_data) model_file = open(model, "rb") data_file = open(test_data, "rb") if backend == "caffe": weight_file = open(weight, "rb") upload_data = {"model_file": model_file, "weight_file": weight_file, "data_file": data_file} else: upload_data = {"model_file": model_file, "data_file": data_file} else: data_file = open(test_data, "rb") upload_data = {"data_file": data_file} evaluate_config = {"backend": backend, "hardware": hardware, "remote_host": remote_host, "reuse_model": reuse_model, "job_id": job_id, "repeat_times": repeat_times} if backend == 'tensorflow': shape_list = [str(s) for s in input_shape] shape_cfg = {"input_shape": "Placeholder:" + ",".join(shape_list)} evaluate_config.update(shape_cfg) if backend == "tensorflow" and hardware == "Kirin990_npu": out_node_name = _get_pb_out_nodes(model) out_node_cfg = {"out_nodes": out_node_name} evaluate_config.update(out_node_cfg) evaluate_result = requests.post(remote_host, files=upload_data, data=evaluate_config, proxies={"http": None}).json() # evaluate_result = requests.get(remote_host, proxies={"http": None}).json() if evaluate_result.get("status") != "sucess": logging.warning("Evaluate failed and will try again, the status is {}, the timestamp is {}".format( evaluate_result.get("status"), evaluate_result.get("timestamp"))) evaluate_config["reuse_model"] = True upload_data = {"data_file": open(test_data, "rb")} retry_times = 4 for i in range(retry_times): evaluate_result = requests.post(remote_host, files=upload_data, data=evaluate_config, proxies={"http": None}).json() if evaluate_result.get("status") == "sucess": logging.info("Evaluate sucess! The latency is {}.".format(evaluate_result["latency"])) break else: if i == 3: logging.error( "Evaluate failed, the status is {},the timestamp is {}, the retry times is {}.".format( evaluate_result.get("status"), evaluate_result.get("timestamp"), i + 1)) else: logging.warning( "Evaluate failed, the status is {},the timestamp is {}, the retry times is {}.".format( evaluate_result.get("status"), evaluate_result.get("timestamp"), i + 1)) else: logging.info("Evaluate sucess! The latency is {}.".format(evaluate_result["latency"])) return evaluate_result def preprocessing_model(backend, hardware, model, weight, input_shape, base_save_dir, quantize, test_data): """Preprocess the model. :param backend: the backend can be one of "tensorflow", "caffe" , "pytorch" and "mindspore". :type backend: str :param hardware: the backend can be one of "Davinci", "Bolt" :type hardware: str :param model: model file, .pb file for tensorflow and .prototxt for caffe, and a model class for Pytorch :type model: str or Class :param weight: .caffemodel file for caffe :type weight: str :param input_shape: the shape of input data :type input_shape: list :param base_save_dir: the save dir of the preprocessed model and weight :type base_save_dir: str """ if backend == "pytorch": if hardware == "Bolt": from .pytorch2onnx import pytorch2onnx model = pytorch2onnx(model, input_shape) else: model_file = os.path.join(base_save_dir, "torch_model.pkl") shape_file = os.path.join(base_save_dir, "input_shape.pkl") with open(model_file, "wb") as f: pickle.dump(model, f) with open(shape_file, "wb") as f: pickle.dump(input_shape, f) env = os.environ.copy() abs_path = os.path.abspath(__file__) cur_dir = os.path.dirname(abs_path) shell_file = os.path.join(cur_dir, "pytorch2caffe.sh") command_line = ["bash", shell_file, cur_dir, model_file, shape_file] try: subprocess.check_output(command_line, env=env) except subprocess.CalledProcessError as exc: logging.error("convert torch model to caffe model failed.\ the return code is: {}.".format(exc.returncode)) model = os.path.join(base_save_dir, "torch2caffe.prototxt") weight = os.path.join(base_save_dir, "torch2caffe.caffemodel") backend = "caffe" elif backend == "tensorflow": pb_model_file = os.path.join(base_save_dir, "tf_model.pb") if os.path.exists(pb_model_file): os.remove(pb_model_file) freeze_graph(model, weight, pb_model_file, input_shape, quantize, test_data) model = pb_model_file elif backend == "mindspore": from mindspore.train.serialization import export from mindspore import Tensor fake_input = np.random.random(input_shape).astype(np.float32) save_name = os.path.join(base_save_dir, "ms2air.air") export(model, Tensor(fake_input), file_name=save_name, file_format='AIR') model = save_name return model, weight, backend def freeze_graph(model, weight_file, output_graph_file, input_shape, quantize, test_data): """Freeze the tensorflow graph. :param model: the tensorflow model :type model: str :param output_graph_file: the file to save the freeze graph model :type output_graph_file: str """ import tensorflow as tf from tensorflow.python.framework import graph_util with tf.Graph().as_default(): input_holder_shape = (None,) + tuple(input_shape[1:]) input_holder = tf.placeholder(dtype=tf.float32, shape=input_holder_shape) model.training = False output = model(input_holder) if isinstance(output, tuple): output_name = [output[0].name.split(":")[0]] else: output_name = [output.name.split(":")[0]] with tf.Session() as sess: sess.run(tf.global_variables_initializer()) # if weight_file is None, only latency can be evaluated if weight_file is not None: saver = tf.train.Saver() last_weight_file = tf.train.latest_checkpoint(weight_file) if last_weight_file: saver.restore(sess, last_weight_file) constant_graph = graph_util.convert_variables_to_constants(sess, sess.graph_def, output_name) with tf.gfile.FastGFile(output_graph_file, mode='wb') as f: f.write(constant_graph.SerializeToString()) if quantize: from .quantize_model import quantize_model quantize_model(output_graph_file, test_data, input_holder, output) def _get_pb_out_nodes(pb_file): """Get the out nodes of pb model. :param pb_file: the pb model file :type pb_file: str """ import tensorflow as tf new_graph = tf.Graph() with new_graph.as_default(): with tf.gfile.FastGFile(pb_file, 'rb') as f: graph_def = tf.GraphDef() graph_def.ParseFromString(f.read()) tf.import_graph_def(graph_def, name='') tensor_name_list = [tensor.name for tensor in new_graph.as_graph_def().node] out_node = tensor_name_list[-1] out_node_name = str(out_node) + ":0" return out_node_name ``` #### File: vega/vega/__init__.py ```python __version__ = "1.5.0" import sys if sys.version_info < (3, 6): sys.exit('Sorry, Python < 3.6 is not supported.') from .common.backend_register import * from .common.class_factory import ClassFactory, ClassType from .core import run, init_cluster_args, module_existed from .trainer.trial_agent import TrialAgent def network(name, **kwargs): """Return network.""" return ClassFactory.get_cls(ClassType.NETWORK, name)(**kwargs) def dataset(name, **kwargs): """Return dataset.""" return ClassFactory.get_cls(ClassType.DATASET, name)(**kwargs) def trainer(name="Trainer", **kwargs): """Return trainer.""" return ClassFactory.get_cls(ClassType.TRAINER, name)(**kwargs) ``` #### File: metrics/pytorch/detection_metric.py ```python import os import json from pycocotools.coco import COCO from pycocotools.cocoeval import COCOeval from vega.common import ClassFactory, ClassType from vega.metrics.pytorch.metrics import MetricBase from vega.common.task_ops import TaskOps @ClassFactory.register(ClassType.METRIC, alias='coco') class CocoMetric(MetricBase): """Save and summary metric from mdc dataset using coco tools.""" __metric_name__ = "coco" def __init__(self, anno_path=None, category=None): self.anno_path = anno_path or os.path.join(TaskOps().local_output_path, 'instances.json') self.category = category or [] self.result_record = [] @property def objective(self): """Define reward mode, default is max.""" return {'mAP': 'MAX', 'AP50': 'MAX', 'AP_small': 'MAX', 'AP_medium': 'MAX', 'AP_large': 'MAX'} def __call__(self, output, targets, *args, **kwargs): """Append input into result record cache. :param output: output data :param target: target data :return: """ if isinstance(output, dict): return None coco_results = [] for id, prediction in enumerate(output): boxes = xyxy2xywh(prediction['boxes']) scores = prediction["scores"].tolist() labels = prediction["labels"].tolist() img_id = targets[id]['image_id'].tolist()[0] for idx, box in enumerate(boxes): data = {} data['image_id'] = img_id data['bbox'] = box data['score'] = scores[idx] data['category_id'] = labels[idx] coco_results.append(data) self.result_record.extend(coco_results) return None def reset(self): """Reset states for new evaluation after each epoch.""" self.result_record = [] def summary(self): """Summary all record from result cache, and get performance.""" if not self.result_record: return {"mAP": -1, "AP_small": -1, "AP_medium": -1, "AP_large": -1} det_json_file = os.path.join(TaskOps().local_output_path, 'det_json_file.json') with open(det_json_file, 'w') as f: json.dump(self.result_record, f) eval_result = self.print_scores(det_json_file, self.anno_path) ap_result = eval_result.pop('AP(bbox)') ap_result = list(ap_result) ap_result = { "mAP": ap_result[0] * 100, "AP50": ap_result[1] * 100, "AP_small": ap_result[3] * 100, "AP_medium": ap_result[4] * 100, "AP_large": ap_result[5] * 100 } if eval_result: ap_result.update(eval_result) return ap_result def print_scores(self, det_json_file, json_file): """Print scores. :param det_json_file: dest json file :param json_file: gt json file :return: """ ret = {} coco = COCO(json_file) cocoDt = coco.loadRes(det_json_file) cocoEval = COCOeval(coco, cocoDt, 'bbox') cocoEval.evaluate() cocoEval.accumulate() cocoEval.summarize() ret['AP(bbox)'] = cocoEval.stats for id, item in enumerate(self.category): cocoEval = COCOeval(coco, cocoDt, 'bbox') cocoEval.params.catIds = [id + 1] # cocoEval.params.iouThrs = [0.5] cocoEval.evaluate() cocoEval.accumulate() cocoEval.summarize() if len(cocoEval.stats) > 0: ret[item] = cocoEval.stats[1] * 100 return ret def xyxy2xywh(boxes): """Transform the bbox coordinate to [x,y ,w,h]. :param bbox: the predict bounding box coordinate :type bbox: list :return: [x,y ,w,h] :rtype: list """ xmin, ymin, xmax, ymax = boxes.unbind(1) import torch return torch.stack((xmin, ymin, xmax - xmin, ymax - ymin), dim=1).tolist() ``` #### File: vega/model_zoo/__init__.py ```python from .model_zoo import ModelZoo def register_modelzoo(backend): """Import and register modelzoo automatically.""" if backend != "pytorch": return from .torch_vision_model import import_all_torchvision_models import logging try: import_all_torchvision_models() except Exception as e: logging.warn("Failed to import torchvision models, msg={}".format(str(e))) ``` #### File: modules/blocks/blocks.py ```python from vega.common import ClassType, ClassFactory from vega.modules.module import Module from vega.modules.connections import Add from vega.modules.operators import ops @ClassFactory.register(ClassType.NETWORK) class ShortCut(Module): """Create Shortcut SearchSpace.""" def __init__(self, inchannel, outchannel, expansion, stride=1, norm_layer={"norm_type": 'BN'}): """Create ShortCut layer. :param inchannel: input channel. :type inchannel: int :param outchannel: output channel. :type outchannel: int :param expansion: expansion :type expansion: int :param stride: the number to jump, default 1 :type stride: int """ super(ShortCut, self).__init__() if stride != 1 or inchannel != outchannel * expansion: self.conv1 = ops.Conv2d(in_channels=inchannel, out_channels=outchannel * expansion, kernel_size=1, stride=stride, bias=False) self.batch = build_norm_layer(features=outchannel * expansion, **norm_layer) else: self.identity = ops.Identity() @ClassFactory.register(ClassType.NETWORK) class BottleConv(Module): """Create BottleConv Searchspace.""" def __init__(self, inchannel, outchannel, expansion, groups, base_width, stride=1, norm_layer={"norm_type": 'BN'}, Conv2d='Conv2d'): """Create BottleConv layer. :param inchannel: input channel. :type inchannel: int :param outchannel: output channel. :type outchannel: int :param expansion: expansion :type expansion: int :param stride: the number to jump, default 1 :type stride: int """ super(BottleConv, self).__init__() outchannel = int(outchannel * (base_width / 64.)) * groups self.conv1 = build_conv_layer(in_channels=inchannel, out_channels=outchannel, kernel_size=1, stride=1, bias=False, Conv2d=Conv2d) self.batch1 = build_norm_layer(features=outchannel, **norm_layer) self.relu1 = ops.Relu(inplace=True) self.conv2 = build_conv_layer(in_channels=outchannel, out_channels=outchannel, kernel_size=3, stride=stride, padding=1, groups=groups, bias=False, Conv2d=Conv2d) self.batch2 = build_norm_layer(features=outchannel, **norm_layer) self.relu2 = ops.Relu(inplace=True) self.conv3 = build_conv_layer(in_channels=outchannel, out_channels=outchannel * expansion, kernel_size=1, stride=1, bias=False, Conv2d=Conv2d) self.batch3 = build_norm_layer(features=outchannel * expansion, **norm_layer) @ClassFactory.register(ClassType.NETWORK) class BasicConv(Module): """Create BasicConv Searchspace.""" def __init__(self, inchannel, outchannel, groups=1, base_width=64, stride=1, norm_layer={"norm_type": 'BN'}, Conv2d='Conv2d'): """Create BasicConv layer. :param inchannel: input channel. :type inchannel: int :param outchannel: output channel. :type outchannel: int :param stride: the number to jump, default 1 :type stride: int """ super(BasicConv, self).__init__() self.conv = build_conv_layer(in_channels=inchannel, out_channels=outchannel, kernel_size=3, stride=stride, padding=1, groups=groups, bias=False, Conv2d=Conv2d) self.batch = build_norm_layer(features=outchannel, **norm_layer) self.relu = ops.Relu(inplace=True) self.conv2 = build_conv_layer(in_channels=outchannel, out_channels=outchannel, kernel_size=3, stride=1, padding=1, groups=groups, bias=False, Conv2d=Conv2d) self.batch2 = build_norm_layer(features=outchannel, **norm_layer) @ClassFactory.register(ClassType.NETWORK) class SmallInputInitialBlock(Module): """Create SmallInputInitialBlock SearchSpace.""" def __init__(self, init_plane): """Create SmallInputInitialBlock layer. :param init_plane: input channel. :type init_plane: int """ super(SmallInputInitialBlock, self).__init__() self.conv = ops.Conv2d(in_channels=3, out_channels=init_plane, kernel_size=3, stride=1, padding=1, bias=False) self.bn = ops.BatchNorm2d(num_features=init_plane) self.relu = ops.Relu() @ClassFactory.register(ClassType.NETWORK) class InitialBlock(Module): """Create InitialBlock SearchSpace.""" def __init__(self, init_plane): """Create InitialBlock layer. :param init_plane: input channel. :type init_plane: int """ super(InitialBlock, self).__init__() self.conv = ops.Conv2d(in_channels=3, out_channels=init_plane, kernel_size=7, stride=2, padding=3, bias=False) self.batch = ops.BatchNorm2d(num_features=init_plane) self.relu = ops.Relu() self.maxpool2d = ops.MaxPool2d(kernel_size=3, stride=2, padding=1) @ClassFactory.register(ClassType.NETWORK) class BasicBlock(Module): """Create BasicBlock SearchSpace.""" expansion = 1 def __init__(self, inchannel, outchannel, groups=1, base_width=64, stride=1, norm_layer={"norm_type": 'BN'}, Conv2d='Conv2d'): """Create BasicBlock layers. :param inchannel: input channel. :type inchannel: int :param outchannel: output channel. :type outchannel: int :param stride: the number to jump, default 1 :type stride: int """ super(BasicBlock, self).__init__() base_conv = BasicConv(inchannel=inchannel, outchannel=outchannel, stride=stride, groups=groups, base_width=base_width, norm_layer=norm_layer, Conv2d=Conv2d) shortcut = ShortCut(inchannel=inchannel, outchannel=outchannel, expansion=self.expansion, stride=stride, norm_layer=norm_layer) self.block = Add(base_conv, shortcut) self.relu = ops.Relu() @ClassFactory.register(ClassType.NETWORK) class BottleneckBlock(Module): """Create BottleneckBlock SearchSpace.""" expansion = 4 def __init__(self, inchannel, outchannel, groups=1, base_width=64, stride=1, norm_layer={"norm_type": 'BN'}, Conv2d='Conv2d'): """Create BottleneckBlock layers. :param inchannel: input channel. :type inchannel: int :param outchannel: output channel. :type outchannel: int :param stride: the number to jump, default 1 :type stride: int """ super(BottleneckBlock, self).__init__() bottle_conv = BottleConv(inchannel=inchannel, outchannel=outchannel, expansion=self.expansion, stride=stride, groups=groups, base_width=base_width, norm_layer=norm_layer, Conv2d=Conv2d) shortcut = ShortCut(inchannel=inchannel, outchannel=outchannel, expansion=self.expansion, stride=stride, norm_layer=norm_layer) self.block = Add(bottle_conv, shortcut) self.relu = ops.Relu() @ClassFactory.register(ClassType.NETWORK) class PruneBasicBlock(Module): """Basic block class in prune resnet.""" expansion = 1 def __init__(self, inchannel, outchannel, innerchannel, stride=1): """Init PruneBasicBlock.""" super(PruneBasicBlock, self).__init__() conv_block = PruneBasicConv(inchannel, outchannel, innerchannel, stride) shortcut = ShortCut(inchannel, outchannel, self.expansion, stride) self.block = Add(conv_block, shortcut) self.relu3 = ops.Relu() @ClassFactory.register(ClassType.NETWORK) class PruneBasicConv(Module): """Create PruneBasicConv Searchspace.""" def __init__(self, in_planes, planes, inner_plane, stride=1): """Create BottleConv layer.""" super(PruneBasicConv, self).__init__() self.conv1 = ops.Conv2d( in_planes, inner_plane, kernel_size=3, stride=stride, padding=1, bias=False) self.bn1 = ops.BatchNorm2d(inner_plane) self.relu = ops.Relu() self.conv2 = ops.Conv2d(inner_plane, planes, kernel_size=3, stride=1, padding=1, bias=False) self.bn2 = ops.BatchNorm2d(planes) self.relu2 = ops.Relu() @ClassFactory.register(ClassType.NETWORK) class TextConvBlock(Module): """Create Conv Block in text CNN.""" def __init__(self, in_channels=1, out_channels=16, kernel_size=(3, 3)): super(TextConvBlock, self).__init__() self.conv1 = ops.Conv2d(in_channels, out_channels=out_channels, kernel_size=kernel_size) self.squeeze = ops.Squeeze(3) self.relu = ops.Relu() self.max_pool = ops.GlobalMaxPool1d() self.squeeze2 = ops.Squeeze(-1) def build_norm_layer(features, norm_type='BN', **kwargs): """Build norm layers according to their type. :param features: input tensor. :param norm_type: type of norm layer. :param **kwargs: other optional parameters. """ if norm_type == 'BN': return ops.BatchNorm2d(features, **kwargs) elif norm_type == 'GN': assert 'num_groups' in kwargs.keys(), 'num_groups is required for group normalization' num_groups = kwargs.pop('num_groups') return ops.GroupNorm(num_groups, features, **kwargs) elif norm_type == 'Sync': return ops.SyncBatchNorm(features, **kwargs) else: raise ValueError('norm type {} is not defined'.format(norm_type)) def build_conv_layer(in_channels, out_channels, kernel_size, bias, Conv2d, padding=0, groups=1, stride=1): """Build conv layers according to their type. :param features: input tensor. :param norm_type: type of norm layer. :param **kwargs: other optional parameters. """ if Conv2d == 'Conv2d': return ops.Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride, padding=padding, groups=groups, bias=bias) elif Conv2d == 'ConvWS2d': return ops.ConvWS2d(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, stride=stride, padding=padding, groups=groups, bias=bias) ``` #### File: vega/modules/graph_utils.py ```python import logging from dag import DAG from collections import OrderedDict import re from .nodes import Node from .nodes import Sequential, Add def graph2desc(graph): """Parse graph to Network Desc.""" ops = get_ops_from_graph(graph) dag = ops2dag(ops) desc = Dag2Module(dag, ops).parse() logging.info("Success to create desc form graph") logging.debug(desc) return desc def get_ops_from_graph(graph): """Get ops from graph and convert Node.""" ops = graph.get_operations() merged_ops = OrderedDict() for op in ops: support_ops_name = None scope_name = None for _support_ops_name in Node.__support_ops__: if re.findall(_support_ops_name, op.name) or op.name.endswith(_support_ops_name): support_ops_name = _support_ops_name if op.type in Node.__support_ops_types__: support_ops_name = op.name scope_name = op.name if not support_ops_name: continue scope_name = scope_name or op.name[:op.name.index(support_ops_name)] all_ops_in_scope = [op for op in ops if op.name.startswith(scope_name + '/') or op.name == scope_name] if not all_ops_in_scope and len(op.inputs) == 0: continue inputs = op.inputs if inputs and inputs[0].op.type == 'Identity': all_ops_in_scope.insert(0, inputs) inputs = op.inputs[0].op.inputs type_name = op.type if op.type != 'Const' else op.name.split('/')[-1] if op.type == 'Const': continue node = Node(inputs=inputs, outputs=op.outputs[0], type_name=type_name, op_name=op.name, op_list=all_ops_in_scope) merged_ops[node.op_name] = node if op.name.endswith('Softmax'): break return merged_ops def ops2dag(merged_ops): """Load ops dict into dag.""" dag = DAG() dot = DagGraphVisual() dot.node(name='root', label='root') outs = {op['outputs'].name: op for name, op in merged_ops.items() if op['outputs'] is not None} outs = {k.replace('Conv2D:0', 'BiasAdd:0'): v for k, v in outs.items()} for name, node in merged_ops.items(): inps = node['inputs'] pre_node_name = 'root' dag.add_node_if_not_exists(name) dot.node(name=name, label=name) if inps is not None: for inp in inps: pre_node = outs.get(inp.name) if pre_node is not None: pre_node_name = pre_node.op_name dag.add_edge(pre_node_name, name) dot.edge(pre_node_name, name) else: dag.add_edge(pre_node_name, name) dot.edge(pre_node_name, name) dot.show() return dag class Dag2Module(object): """Parse dag to module desc.""" def __init__(self, dag, ops): self.g = dag.graph self.ops = ops self.e = self._convert_edge_list() self.muti_edges = [k for k, v in self.g.items() if len(v) > 1] self.muti_node = [k for k, v in self.e.items() if len(v) > 1] def parse(self): """Parse graph to Sequential desc.""" result = Sequential() while self.g: k, v = self.g.popitem(False) if self._is_connection_node(k): continue result.append(self.ops.get(k)) if self._is_branch_node(k): branch_seq = [] for _ in v: seq = self._parse_branch_graph(self.g) branch_seq.append(seq) branch = Add(*branch_seq) result.append(branch) return result.to_json() def _convert_edge_list(self): e = OrderedDict() for node, edge in self.g.items(): for v in edge: e[v] = [node] if v not in e else e[v] + [node] return e def _is_branch_node(self, node): return node in self.muti_edges def _is_connection_node(self, node): if not node: return False if isinstance(node, set): return node.issubset(self.muti_node) else: return node in self.muti_node def _parse_branch_graph(self, g): seq = Sequential() k, v = g.popitem(False) if self._is_connection_node(k): return seq seq.append(self.ops.get(k)) while not self._is_connection_node(v): k, v = g.popitem(False) seq.append(self.ops.get(k)) return seq class DagGraphVisual(object): """Dag Graph Visual.""" def __init__(self, show_dag=False): if show_dag: from graphviz import Digraph self.dot = Digraph(name="Root", comment="network", format="png") else: self.dot = None def node(self, name, label): """Add node to dot.""" if self.dot: self.dot.node(name=name, label=label, color='green') def edge(self, pre_node_name, name): """Add edge to dot.""" if self.dot: self.dot.edge(pre_node_name, name) def show(self): """Show dot.""" if self.dot: self.dot.view() ``` #### File: modules/loss/mean_loss.py ```python from vega.modules.module import Module from vega.common import ClassType, ClassFactory @ClassFactory.register(ClassType.LOSS) class MeanLoss(Module): """MeanLoss Loss for data.""" def __init__(self): super(MeanLoss, self).__init__() def call(self, inputs, targets): """Compute loss, mean() to average on multi-gpu.""" return inputs.mean() ``` #### File: operators/functions/pytorch_to_ms.py ```python import os import torch import logging from mindspore.train.serialization import save_checkpoint, load_checkpoint from mindspore import Tensor import numpy as np import uuid def pytorch2mindspore(pth_file): """Convert pytorch weight to mindspore checkpoint.""" torch_para_dict = torch.load(pth_file) torch_weight_list = [] torch_paras_name_list = [] ms_params_list = [] ms_para_name_list = [] for index, name in enumerate(torch_para_dict): torch_paras_name_list.append(name) torch_weight = torch_para_dict[name] # if name == "fc.weight": # ms_name = "fc.linear.weight" # elif name == "fc.bias": # ms_name = "fc.linear.bias" if name.endswith("weight"): name = name[:name.rfind("weight")] ms_name = "backbone." + name + "conv2d.weight" elif name.endswith('bias'): name = name[:name.rfind('bias')] ms_name = "backbone." + name + 'batch_norm.beta' elif name.endswith('.running_mean'): # fix batch_norm name old_name_gamma = ms_para_name_list[index - 2] new_name_gamma = old_name_gamma[:old_name_gamma.rfind('conv2d.weight')] + "batch_norm.gamma" ms_para_name_list[index - 2] = new_name_gamma name = name[:name.rfind('.running_mean')] ms_name = "backbone." + name + '.batch_norm.moving_mean' elif name.endswith('.running_var'): name = name[:name.rfind('.running_var')] ms_name = "backbone." + name + '.batch_norm.moving_variance' elif name.endswith(".num_batches_tracked"): ms_name = name torch_weight_list.append(torch_weight) ms_para_name_list.append(ms_name) for index, name in enumerate(ms_para_name_list): logging.debug('========================py_name: {}'.format(torch_paras_name_list[index])) logging.debug('========================ms_name: {}'.format(name)) param_dict = {} param_dict['name'] = name parameter = torch_weight_list[index] param_dict['data'] = Tensor(parameter.detach().numpy()) ms_params_list.append(param_dict) save_path = os.path.dirname(pth_file) save_file_name = os.path.join(save_path, "torch2ms_" + uuid.uuid1().hex[:8] + ".ckpt") save_checkpoint(ms_params_list, save_file_name) return save_file_name def pytorch2mindspore_extend(pth_file, model): """Convert torchvison weight to vega weight of ms.""" init_para_dict = torch.load(pth_file) init_names_list = [] init_weights_list = [] for index, name in enumerate(init_para_dict): init_names_list.append(name) init_weights_list.append(init_para_dict[name]) vega_names_list = [] vega_weights_list = [] valid_names_list = [] for name in model.parameters_dict(): if not name.endswith("num_batches_tracked"): vega_names_list.append(name) for index, name in enumerate(vega_names_list): init_name = init_names_list[index] # if index < 1: # continue if name.endswith("weight") and ("conv" or "downsample" in name or "down_sample" in name) and init_name.endswith( "weight") and ("conv" in init_name or "downsample" in init_name or "down_sample" in init_name): valid_names_list.append(name) vega_weights_list.append(init_weights_list[index]) elif name.endswith("moving_mean") and init_name.endswith("running_mean"): valid_names_list.append(name) vega_weights_list.append(init_weights_list[index]) elif name.endswith("moving_variance") and init_name.endswith( "running_var"): valid_names_list.append(name) vega_weights_list.append(init_weights_list[index]) elif name.endswith("gamma") and init_name.endswith("weight") and ( "bn" in init_name or "downsample" in init_name or "down_sample" in init_name): valid_names_list.append(name) vega_weights_list.append(init_weights_list[index]) elif name.endswith("beta") and init_name.endswith("bias") and ( "bn" in init_name or "downsample" in init_name or "down_sample" in init_name): valid_names_list.append(name) vega_weights_list.append(init_weights_list[index]) else: continue ms_params_list = [] for index, name in enumerate(valid_names_list): param_dict = {} param_dict['name'] = name parameter = vega_weights_list[index] param_dict['data'] = Tensor(parameter.detach().numpy()) ms_params_list.append(param_dict) save_path = os.path.dirname(pth_file) save_file_name = os.path.join(save_path, "torch2ms_" + uuid.uuid1().hex[:8] + ".ckpt") save_checkpoint(ms_params_list, save_file_name) return save_file_name def adaptive_weight(ckpt_file, ms_model): """Adapte the weight shape.""" parameter_dict = load_checkpoint(ckpt_file) net_parameter = ms_model.parameters_and_names() new_ms_params_list = [] for index, paras in enumerate(net_parameter): net_para_name = paras[0] net_para_shape = paras[1].data.shape if net_para_name in parameter_dict: init_weight = parameter_dict[net_para_name].data init_para_shape = init_weight.shape if net_para_shape != init_para_shape: if "conv" in net_para_name: new_weight = _adaptive_conv(init_weight, net_para_shape) elif "batch_norm" in net_para_name: new_weight = _adaptive_bn(init_weight, net_para_shape) else: continue logging.debug("parameter shape not match,para name: {}, init_shape:{}, net_para_shape:{}". format(net_para_name, init_para_shape, net_para_shape)) param_dict = {} param_dict['name'] = net_para_name param_dict['data'] = init_weight if net_para_shape == init_para_shape else new_weight new_ms_params_list.append(param_dict) # parameter_dict[net_para_name].data = new_weight save_path = os.path.dirname(ckpt_file) save_file_name = os.path.join(save_path, "adaptive_" + uuid.uuid1().hex[:8] + ".ckpt") save_checkpoint(new_ms_params_list, save_file_name) if ckpt_file.startswith("torch2ms_"): os.remove(ckpt_file) return save_file_name def _adaptive_conv(init_weight, new_shape): new_weight = init_weight.asnumpy() init_shape = init_weight.shape if init_shape[0] >= new_shape[0]: new_weight = new_weight[0:new_shape[0]] else: new_weight = np.tile(new_weight, (int(new_shape[0] / init_shape[0]), 1, 1, 1)) if init_shape[1] >= new_shape[1]: new_weight = new_weight[:, 0:new_shape[1]] else: new_weight = np.tile(new_weight, (1, int(new_shape[1] / init_shape[1]), 1, 1)) return Tensor(new_weight) def _adaptive_bn(init_weight, new_shape): new_weight = init_weight.asnumpy() init_shape = init_weight.shape if init_shape[0] >= new_shape[0]: new_weight = new_weight[0:new_shape[0]] else: new_weight = np.tile(new_weight, int(new_shape[0] / init_shape[0])) return Tensor(new_weight) ``` #### File: customs/fis/layers.py ```python from itertools import combinations import logging import torch import torch.nn as nn def generate_pair_index(n, order=2, selected_pairs=None): """Return enumeration of feature combination pair index. :param n: number of valid features, usually equals to `input_dim4lookup` :type n: int :param order: order of interaction. defaults to 2 :type order: int :param selected_pairs: specifying selected pair of index :type selected_pairs: sequence of tuples, optional :return: a list of tuple, each containing feature index :rtype: list of tuple :Example: >>> generate_pair_index(5, 2) >>> [(0, 0, 0, 0, 1, 1, 1, 2, 2, 3), (1, 2, 3, 4, 2, 3, 4, 3, 4, 4)] >>> generate_pair_index(5, 3) >>> [(0, 0, 0, 0, 0, 0, 1, 1, 1, 2), (1, 1, 1, 2, 2, 3, 2, 2, 3, 3), (2, 3, 4, 3, 4, 4, 3, 4, 4, 4)] >>> generate_pair_index(5, 2, [(0,1),(1,3),(2,3)]) >>> [(0, 1, 2), (1, 3, 3)] """ if n < 2: raise ValueError("undefined. please ensure n >= 2") pairs = list(combinations(range(n), order)) if selected_pairs is not None and len(selected_pairs) > 0: valid_pairs = set(selected_pairs) pairs = list(filter(lambda x: x in valid_pairs, pairs)) logging.info("Using following selected feature pairs \n{}".format(pairs)) if len(pairs) != len(selected_pairs): logging.warning("Pair number {} != specified pair number {}".format(len(pairs), len(selected_pairs))) return list(zip(*pairs)) class LinearLayer(torch.nn.Module): """Logistic Regression module.""" def __init__(self, input_dim): """Class of LinearLayer. :param input_dim: feature space of dataset :type input_dim: int """ super(LinearLayer, self).__init__() self.w = torch.nn.Embedding(input_dim, 1) torch.nn.init.xavier_uniform_(self.w.weight.data) self.bias = torch.nn.Parameter(torch.zeros(1)) def forward(self, feature_id, feature_val=None): """Logit = W^T*X + bias. :param feature_id: a batch of feature id, tensor of size ``(batch_size, input_dim4lookup)`` :type feature_id: torch.int :param feature_val: a batch of feature value, defaults to None :type feature_val: torch.float, optional :return: logit of LR :rtype: torch.float """ if feature_val is None: return torch.sum(self.w(feature_id), dim=1) + self.bias return torch.sum(self.w(feature_id).squeeze(2) * feature_val, dim=1) + self.bias class EmbeddingLayer(torch.nn.Module): """Embedding module. It is a sparse to dense operation that lookup embedding for given features. """ def __init__(self, input_dim, embed_dim): """Class of EmbeddingLayer. :param input_dim: feature space of dataset :type input_dim: int :param embed_dim: length of each feature's latent vector aka embedding vector :type embed_dim: int """ super(EmbeddingLayer, self).__init__() # todo: add padding_idx = 0 self.embedding = torch.nn.Embedding(input_dim, embed_dim) torch.nn.init.xavier_uniform_(self.embedding.weight.data) def forward(self, feature_id, feature_val=None): """Forward function. :param feature_id: a batch of feature id, tensor of size ``(batch_size, input_dim4lookup)`` :type feature_id: torch.int :param feature_val: a batch of feature value, defaults to None :type feature_val: torch.float, optional :return: embedding tensor of size ``(batch_size, input_dim4lookup, embed_dim)`` :rtype: torch.float """ if feature_val is None: return self.embedding(feature_id) return self.embedding(feature_id) * feature_val.unsqueeze(-1) class FactorizationMachineLayer(torch.nn.Module): """Factorization Machines module. https://www.csie.ntu.edu.tw/~b97053/paper/Rendle2010FM.pdf. :param reduce_sum: whether to sum interaction score of all feature pairs, defaults to `True` :type reduce_sum: bool, optional """ def __init__(self, reduce_sum=True): super(FactorizationMachineLayer, self).__init__() self.reduce_sum = reduce_sum def forward(self, embed_matrix): """Y = sum {<emebd_i, embed_j>}. :param embed_matrix: a batch of embedding features, tensor of size ``(batch_size, input_dim4lookup, embed_dim)`` :type embed_matrix: torch.float :return: FM layer's score. :rtype: torch.float, size ``(batch_size, 1)``(`reduce_sum==True`) or size ``(batch_size, embed_dim)``(`reduce_sum==False`) """ square_of_sum = torch.sum(embed_matrix, dim=1) ** 2 sum_of_square = torch.sum(embed_matrix ** 2, dim=1) ix = square_of_sum - sum_of_square if self.reduce_sum: ix = torch.sum(ix, dim=1, keepdim=True) return 0.5 * ix class NormalizedWeightedFMLayer(torch.nn.Module): """NormalizedWeightedFMLayer module for autogate.""" def __init__(self, input_dim4lookup, alpha_init_mean=0.5, alpha_init_radius=0.001, alpha_activation='tanh', selected_pairs=None, reduce_sum=True): """ Autogate key component, learning to identify & select useful feature interactions with the help of `alpha`. :param input_dim4lookup: feature number in `feature_id`, usually equals to number of non-zero features. :type input_dim4lookup: int :param alpha_init_mean: mean of initialization value for `alpha`, defaults to 0.5 :type alpha_init_mean: float, optional :param alpha_init_radius: radius of initialization range for `alpha`, defaults to 0.001 :type alpha_init_radius: float, optional :param alpha_activation: activation function for `alpha`, one of 'tanh' or 'identity', defaults to 'tanh' :type alpha_activation: str, optional :param selected_pairs: use selected feature pairs (denoted by their index in given arrangement), defaults to [] :type selected_pairs: list of tuple, optional :param reduce_sum: whether to sum interaction score of feature pairs, defaults to `True` :type reduce_sum: bool, optional """ super(NormalizedWeightedFMLayer, self).__init__() self.reduce_sum = reduce_sum self.register_buffer('pair_indexes', torch.tensor(generate_pair_index(input_dim4lookup, 2, selected_pairs))) interaction_pair_number = len(self.pair_indexes[0]) self._alpha = torch.nn.Parameter( torch.empty(interaction_pair_number).uniform_( alpha_init_mean - alpha_init_radius, alpha_init_mean + alpha_init_radius), requires_grad=True) self.activate = nn.Tanh() if alpha_activation == 'tanh' else nn.Identity() logging.info("using activation {}".format(self.activate)) self.batch_norm = torch.nn.BatchNorm1d(interaction_pair_number, affine=False, momentum=0.01, eps=1e-3) def forward(self, embed_matrix): """Y = sum{alpha_i_j * BatchNorm(<e_i, e_j>)}. :param embed_matrix: a batch of embedding features, tensor of size ``(batch_size, input_dim4lookup, embed_dim)`` :type embed_matrix: torch.float :return: normalized weighted FM layer's score :rtype: torch.float, size ``(batch_size, 1)``(`reduce_sum==True`) or size ``(batch_size, embed_dim)``(`reduce_sum==False`) """ feat_i, feat_j = self.pair_indexes embed_i = torch.index_select(embed_matrix, 1, feat_i) embed_j = torch.index_select(embed_matrix, 1, feat_j) embed_product = torch.sum(torch.mul(embed_i, embed_j), dim=2) normed_emded_product = self.batch_norm(embed_product) weighted_embed_product = torch.mul(normed_emded_product, self.activate(self._alpha.unsqueeze(0))) if self.reduce_sum: return torch.sum(weighted_embed_product, dim=1, keepdim=True) return weighted_embed_product class MultiLayerPerceptron(torch.nn.Module): """MultiLayerPerceptron module.""" def __init__(self, input_dim, hidden_dims, dropout_prob, add_output_layer=True, batch_norm=False, layer_norm=False): """ Multi Layer Perceptron module. :param input_dim: feature space of dataset :type input_dim: int :param hidden_dims: width of each hidden layer, from bottom to top :type hidden_dims: list of int :param dropout_prob: dropout probability of all hidden layer :type dropout_prob: float :param add_output_layer: whether to add an output layer for binary classification, defaults to `True` :type add_output_layer: bool, optional :param batch_norm: applies batch normalization before activation, defaults to `False` :type batch_norm: bool, optional :param layer_norm: applies layer normalization before activation, defaults to `False` :type layer_norm: bool, optional """ if batch_norm and layer_norm: logging.warning("batch norm and layer norm are not supposed to work together! be careful...") super(MultiLayerPerceptron, self).__init__() layers = list() for hidden_dim in hidden_dims: layers.append(torch.nn.Linear(input_dim, hidden_dim, bias=True)) # default init: uniform if batch_norm: layers.append(torch.nn.BatchNorm1d(hidden_dim)) if layer_norm: layers.append(torch.nn.LayerNorm(hidden_dim)) layers.append(torch.nn.ReLU()) layers.append(torch.nn.Dropout(p=dropout_prob)) input_dim = hidden_dim if add_output_layer: layers.append(torch.nn.Linear(input_dim, 1)) self.mlp = torch.nn.Sequential(*layers) def forward(self, embed_matrix): """Forward function. :param embed_matrix: a batch of embedding features, tensor of size ``(batch_size, input_dim4lookup, embed_dim)`` :type embed_matrix: torch.float :return: MLP module's score :rtype: torch.float, size ``(batch_size, 1)``(`add_output_layer==True`) or size ``(batch_size, hidden_dims[-1])``(`add_output_layer==False`) """ return self.mlp(embed_matrix) class FeatureGroupLayer(torch.nn.Module): """FeatureGroupLayer module.""" def __init__(self, input_dim4lookup, embed_dim, bucket_num, temperature, lambda_c, epsilon=1e-20): """ Autogroup key component, applies differentiable feature group selection. :param input_dim4lookup: feature number in `feature_id`, usually equals to number of non-zero features :type input_dim4lookup: int :param embed_dim: length of each feature's latent vector(embedding vector) :type embed_dim: int :param bucket_num: number of hash buckets :type bucket_num: int :param temperature: temperature in Gumbel-Softmax :type temperature: float (0,1] :param lambda_c: compensation coefficient for feature interaction score :type lambda_c: float [0,1] :param epsilon: term added to the denominator to improve numerical stabilit, defaults to 1e-20 :type epsilon: float, optional """ super(FeatureGroupLayer, self).__init__() # params for gumbel-softmax sampling self.structure_logits = torch.nn.Parameter( torch.empty(input_dim4lookup, bucket_num, 2).uniform_(-0.001, 0.001), requires_grad=True) # (input_dim4lookup, bucket_num, 2) self.hash_wt = torch.nn.Parameter( torch.nn.init.xavier_uniform_( torch.empty(input_dim4lookup, bucket_num) ), requires_grad=True) # embed weight. formula (6) if temperature <= 0: raise ValueError("temperature supposed to be in range (0,1])") self.register_buffer('temperature', torch.tensor(temperature)) self.register_buffer('lambda_c', torch.tensor(lambda_c)) self.register_buffer('epsilon', torch.tensor(epsilon)) self.register_buffer('noise', torch.zeros(self.structure_logits.shape, dtype=torch.float)) self.register_buffer('mask_choice', torch.tensor([[1.], [0.]])) self.softmax = torch.nn.Softmax(dim=-1) self.bn = torch.nn.BatchNorm1d(input_dim4lookup * embed_dim, affine=False) def forward(self, embed_matrix, order, fix_structure): """Calculate grouped features' interaction score in specified `order`. :param embed_matrix: a batch of embedding features, tensor of size ``(batch_size, input_dim4lookup, embed_dim)`` :type embed_matrix: torch.float :param order: order of feature interaction to be calculated. :type order: int :param fix_structure: whether to fix structure params during forward calculation :type fix_structure: bool :return: grouped features' interaction score :rtype: torch.float, size ``(batch, bucket_num*embed_dim)``(`order==1`), or size ``(batch, bucket_num)``(`order>1`) """ if order < 1: raise ValueError("`order` should be a positive integer.") # bn for embed embed_matrix = self.bn( embed_matrix.view( -1, embed_matrix.shape[1] * embed_matrix.shape[2] )).view(*embed_matrix.shape) choices = self._differentiable_sampling(fix_structure) # [input_dim4lookup, bucket_num] # high order fm formula comb_tmp = torch.matmul( torch.transpose(embed_matrix, 1, 2), torch.mul(choices, self.hash_wt) ) # [batch, k, bucket_num] comb = torch.pow(comb_tmp, order) # compensation if lambda_c != 0 compensation = self.lambda_c * torch.matmul( torch.pow( torch.transpose(embed_matrix, 1, 2), order), # [batch, k, input_dim4lookup] torch.pow(self.hash_wt, order) # [input_dim4lookup, bucket_num] ) # [batch, k, bucket_num] comp_comb = comb - compensation if order == 1: return torch.reshape(comp_comb, (-1, comp_comb.shape[1] * comp_comb.shape[2])) reduced_comb = torch.sum(comp_comb, dim=1) return torch.reshape(reduced_comb, (-1, reduced_comb.shape[1])) def _differentiable_sampling(self, fix_structure): """Use Gumbel-Softmax trick to take argmax, while keeping differentiate w.r.t soft sample y. :param fix_structure: whether to fix structure params during forward calculation :type fix_structure: bool :return: """ if fix_structure: logits = self.structure_logits else: noise = self.noise.uniform_(0.0, 1.0) logits = self.structure_logits - torch.log(-torch.log(noise + self.epsilon) + self.epsilon) y = self.softmax(logits / self.temperature) y_hard = torch.eq(y, torch.max(y, -1, keepdim=True).values).type(y.dtype) output = torch.matmul( (y_hard - y).detach() + y, self.mask_choice) # [input_dim4lookup, bucket_num, 1] return output.squeeze(-1) ``` #### File: construct/predefined/init.py ```python import numpy as np from modnas.registry.construct import register from modnas.core.param_space import ParamSpace @register class DefaultInitConstructor(): """Constructor that initializes the architecture space.""" def __init__(self, seed=None): self.seed = seed def __call__(self, model): """Run constructor.""" ParamSpace().reset() seed = self.seed if seed: np.random.seed(seed) return model ``` #### File: pytorch/gan/fully_basic_blocks.py ```python from torch import nn import torch.nn.functional as F # 7 PRIMITIVES = [ 'none', 'skip_connect', 'conv_1x1', 'conv_3x3', 'conv_5x5', 'dil_conv_3x3', 'dil_conv_5x5' ] # 3 PRIMITIVES_up = [ 'nearest', 'bilinear', 'ConvTranspose' ] # 6 PRIMITIVES_down = [ 'avg_pool', 'max_pool', 'conv_3x3', 'conv_5x5', 'dil_conv_3x3', 'dil_conv_5x5' ] # ------------------------------------------------------------------------------------------------------------------- # OPS = { 'none': lambda in_ch, out_ch, stride, sn, act: Zero(), 'skip_connect': lambda in_ch, out_ch, stride, sn, act: Identity(), 'conv_1x1': lambda in_ch, out_ch, stride, sn, act: Conv(in_ch, out_ch, 1, stride, 0, sn, act), 'conv_3x3': lambda in_ch, out_ch, stride, sn, act: Conv(in_ch, out_ch, 3, stride, 1, sn, act), 'conv_5x5': lambda in_ch, out_ch, stride, sn, act: Conv(in_ch, out_ch, 5, stride, 2, sn, act), 'dil_conv_3x3': lambda in_ch, out_ch, stride, sn, act: DilConv(in_ch, out_ch, 3, stride, 2, 2, sn, act), 'dil_conv_5x5': lambda in_ch, out_ch, stride, sn, act: DilConv(in_ch, out_ch, 5, stride, 4, 2, sn, act) } OPS_down = { 'avg_pool': lambda in_ch, out_ch, stride, sn, act: Pool(in_ch, out_ch, mode='Avg'), 'max_pool': lambda in_ch, out_ch, stride, sn, act: Pool(in_ch, out_ch, mode='Max'), 'conv_3x3': lambda in_ch, out_ch, stride, sn, act: Conv(in_ch, out_ch, 3, stride, 1, sn, act), 'conv_5x5': lambda in_ch, out_ch, stride, sn, act: Conv(in_ch, out_ch, 5, stride, 2, sn, act), 'dil_conv_3x3': lambda in_ch, out_ch, stride, sn, act: DilConv(in_ch, out_ch, 3, stride, 2, 2, sn, act), 'dil_conv_5x5': lambda in_ch, out_ch, stride, sn, act: DilConv(in_ch, out_ch, 5, stride, 4, 2, sn, act) } UPS = { 'nearest': lambda in_ch, out_ch: Up(in_ch, out_ch, mode='nearest'), 'bilinear': lambda in_ch, out_ch: Up(in_ch, out_ch, mode='bilinear'), 'ConvTranspose': lambda in_ch, out_ch: Up(in_ch, out_ch, mode='convT') } # ------------------------------------------------------------------------------------------------------------------- # class Conv(nn.Module): """Conv class.""" def __init__(self, in_ch, out_ch, kernel_size, stride, padding, sn, act): super(Conv, self).__init__() if sn: self.conv = nn.utils.spectral_norm( nn.Conv2d(in_ch, out_ch, kernel_size, stride=stride, padding=padding)) else: self.conv = nn.Conv2d(in_ch, out_ch, kernel_size, stride=stride, padding=padding) if act: self.op = nn.Sequential(nn.ReLU(), self.conv) else: self.op = nn.Sequential(self.conv) def forward(self, x): """call.""" return self.op(x) class DilConv(nn.Module): """DilConv class.""" def __init__(self, in_ch, out_ch, kernel_size, stride, padding, dilation, sn, act): super(DilConv, self).__init__() if sn: self.dilconv = nn.utils.spectral_norm( nn.Conv2d(in_ch, out_ch, kernel_size=kernel_size, stride=stride, padding=padding, dilation=dilation)) else: self.dilconv = \ nn.Conv2d(in_ch, out_ch, kernel_size=kernel_size, stride=stride, padding=padding, dilation=dilation) if act: self.op = nn.Sequential(nn.ReLU(), self.dilconv) else: self.op = nn.Sequential(self.dilconv) def forward(self, x): """call.""" return self.op(x) class Identity(nn.Module): """Identity class.""" def __init__(self): super(Identity, self).__init__() def forward(self, x): """call.""" return x class Zero(nn.Module): """Zero class.""" def __init__(self): super(Zero, self).__init__() def forward(self, x): """call.""" return x.mul(0.) class Up(nn.Module): """Up class.""" def __init__(self, in_ch, out_ch, mode=None): super(Up, self).__init__() self.up_mode = mode if self.up_mode == 'convT': self.convT = nn.Sequential( nn.ReLU(), nn.ConvTranspose2d( in_ch, in_ch, kernel_size=3, stride=2, padding=1, output_padding=1, groups=in_ch, bias=False), nn.Conv2d(in_ch, out_ch, kernel_size=1, padding=0, bias=False) ) else: self.c = nn.Sequential( nn.ReLU(), nn.Conv2d(in_ch, out_ch, kernel_size=1) ) def forward(self, x): """call.""" if self.up_mode == 'convT': return self.convT(x) else: return self.c(F.interpolate(x, scale_factor=2, mode=self.up_mode)) class Pool(nn.Module): """Pool class.""" def __init__(self, in_ch, out_ch, mode=None): super(Pool, self).__init__() if mode == 'Avg': self.pool = nn.AvgPool2d(kernel_size=2, stride=2, padding=0) elif mode == 'Max': self.pool = nn.MaxPool2d( kernel_size=2, stride=2, padding=0, dilation=1) def forward(self, x): """call.""" return self.pool(x) class MixedOp(nn.Module): """MixedOp class.""" def __init__(self, in_ch, out_ch, stride, sn, act, primitives): super(MixedOp, self).__init__() self.ops = nn.ModuleList() for primitive in primitives: op = OPS[primitive](in_ch, out_ch, stride, sn, act) self.ops.append(op) def forward(self, x): """call.""" return sum(op(x) for op in self.ops) class MixedUp(nn.Module): """MixedUp class.""" def __init__(self, in_ch, out_ch, primitives): super(MixedUp, self).__init__() self.ups = nn.ModuleList() for primitive in primitives: up = UPS[primitive](in_ch, out_ch) self.ups.append(up) def forward(self, x): """call.""" return sum(up(x) for up in self.ups) class MixedDown(nn.Module): """MixedDown class.""" def __init__(self, in_ch, out_ch, stride, sn, act, primitives): super(MixedDown, self).__init__() self.ops = nn.ModuleList() for primitive in primitives: op = OPS_down[primitive](in_ch, out_ch, stride, sn, act) self.ops.append(op) def forward(self, x): """call.""" return sum(op(x) for op in self.ops) class Cell(nn.Module): """Cell class.""" def __init__(self, in_channels, out_channels, up_mode, genotype, num_skip_in=0, norm=None): super(Cell, self).__init__() self.up0 = MixedUp(in_channels, out_channels, [ PRIMITIVES_up[genotype[0]]]) self.up1 = MixedUp(in_channels, out_channels, [ PRIMITIVES_up[genotype[1]]]) if genotype[2] > 0: self.c0 = MixedOp(out_channels, out_channels, 1, False, True, [PRIMITIVES[genotype[2]]]) if genotype[3] > 0: self.c1 = MixedOp(out_channels, out_channels, 1, False, True, [PRIMITIVES[genotype[3]]]) if genotype[4] > 0: self.c2 = MixedOp(out_channels, out_channels, 1, False, True, [PRIMITIVES[genotype[4]]]) if genotype[5] > 0: self.c3 = MixedOp(out_channels, out_channels, 1, False, True, [PRIMITIVES[genotype[5]]]) if genotype[6] > 0: self.c4 = MixedOp(out_channels, out_channels, 1, False, True, [PRIMITIVES[genotype[6]]]) self.up_mode = up_mode self.norm = norm # no norm if norm: if norm == 'bn': self.n1 = nn.BatchNorm2d(in_channels) self.n2 = nn.BatchNorm2d(out_channels) elif norm == 'in': self.n1 = nn.InstanceNorm2d(in_channels) self.n2 = nn.InstanceNorm2d(out_channels) else: raise NotImplementedError(norm) # cross scale skip self.skip_in_ops = None if num_skip_in: self.skip_in_ops = nn.ModuleList( [nn.Conv2d(in_channels, out_channels, kernel_size=1) for _ in range(num_skip_in)] ) def forward(self, x, skip_ft=None): """call.""" node0 = self.up0(x) node1 = self.up1(x) _, _, ht, wt = node0.size() # for different topologies if hasattr(self, 'c0'): node2 = self.c0(node0) if hasattr(self, 'c1'): node2 = node2 + self.c1(node1) else: node2 = self.c1(node1) # skip out feat h_skip_out = node2 # skip in feat if self.skip_in_ops: assert len(self.skip_in_ops) == len(skip_ft) for ft, skip_in_op in zip(skip_ft, self.skip_in_ops): node2 += skip_in_op(F.interpolate(ft, size=(ht, wt), mode=self.up_mode)) # for different topologies if hasattr(self, 'c2'): node3 = self.c2(node0) if hasattr(self, 'c3'): node3 = node3 + self.c3(node1) if hasattr(self, 'c4'): node3 = node3 + self.c4(node2) else: if hasattr(self, 'c4'): node3 = node3 + self.c4(node2) else: if hasattr(self, 'c3'): node3 = self.c3(node1) if hasattr(self, 'c4'): node3 = node3 + self.c4(node2) else: node3 = self.c4(node2) return h_skip_out, node3 def _downsample(x): """Downsample with Avg Pooling.""" return nn.AvgPool2d(kernel_size=2)(x) class OptimizedDisBlock(nn.Module): """OptimizedDisBlock class.""" def __init__(self, in_channels, out_channels, ksize=3, pad=1, activation=nn.ReLU()): super(OptimizedDisBlock, self).__init__() self.activation = activation self.c1 = nn.Conv2d(in_channels, out_channels, kernel_size=ksize, padding=pad) self.c2 = nn.Conv2d(out_channels, out_channels, kernel_size=ksize, padding=pad) self.c_sc = nn.Conv2d(in_channels, out_channels, kernel_size=1, padding=0) self.c1 = nn.utils.spectral_norm(self.c1) self.c2 = nn.utils.spectral_norm(self.c2) self.c_sc = nn.utils.spectral_norm(self.c_sc) def residual(self, x): """call.""" h = x h = self.c1(h) h = self.activation(h) h = self.c2(h) h = _downsample(h) return h def shortcut(self, x): """call.""" return self.c_sc(_downsample(x)) def forward(self, x): """call.""" return self.residual(x) + self.shortcut(x) class DisCell(nn.Module): """DisCell class.""" def __init__(self, in_channels, out_channels, hidden_channels=None, activation=nn.ReLU(), genotype=None): super(DisCell, self).__init__() if genotype[5] >= 0: self.down0 = MixedDown(in_channels, out_channels, 2, True, True, [ PRIMITIVES_down[genotype[5]]]) self.down1 = MixedDown(in_channels, out_channels, 2, True, True, [ PRIMITIVES_down[genotype[6]]]) if genotype[0] > 0: self.c0 = MixedOp(out_channels, out_channels, 1, True, True, [PRIMITIVES[genotype[0]]]) if genotype[1] > 0: self.c1 = MixedOp(out_channels, out_channels, 1, True, True, [PRIMITIVES[genotype[1]]]) if genotype[2] > 0: self.c2 = MixedOp(out_channels, out_channels, 1, True, True, [PRIMITIVES[genotype[2]]]) if genotype[3] > 0: self.c3 = MixedOp(out_channels, out_channels, 1, True, True, [PRIMITIVES[genotype[3]]]) if genotype[4] > 0: self.c4 = MixedOp(out_channels, out_channels, 1, True, False, [PRIMITIVES[genotype[4]]]) def forward(self, x): """call.""" node0 = x node1 = self.c0(node0) if hasattr(self, 'c1'): node2 = self.c1(node0) if hasattr(self, 'c2'): node2 = node2 + self.c2(node1) else: node2 = self.c2(node1) if hasattr(self, 'c3'): node3 = self.c3(node1) if hasattr(self, 'c4'): node3 = node3 + self.c4(node0) else: node3 = self.c4(node0) if hasattr(self, 'down0'): node4 = self.down0(node2) + self.down1(node3) else: node4 = node2 + node3 return node4 ``` #### File: pytorch/losses/smooth_l1_loss.py ```python import torch from vega.modules.module import Module from .reduce_loss import weighted_loss from vega.common import ClassType, ClassFactory @weighted_loss def smooth_l1_loss(pred, target, beta=1.0): """Smooth l1 loss. :param pred: predict :param target: target :param beta: beta :return: loss """ assert beta > 0 assert pred.size() == target.size() and target.numel() > 0 diff = torch.abs(pred - target) loss = torch.where(diff < beta, 0.5 * diff * diff / beta, diff - 0.5 * beta) return loss @ClassFactory.register(ClassType.NETWORK) class SmoothL1Loss(Module): """Smooth L1 Loss.""" def __init__(self, desc): """Init smooth l1 loss. :param desc: config dict """ super(SmoothL1Loss, self).__init__() self.beta = desc['beta'] if 'beta' in desc else 1.0 self.reduction = desc['reduction'] if 'reduction' in desc else 'mean' self.loss_weight = desc['loss_weight'] if 'loss_weight' in desc else 1.0 def forward(self, pred, target, weight=None, avg_factor=None, reduction_override=None, **kwargs): """Forward compute. :param pred: predict :param target: target :param weight: weight :param avg_factor: avg factor :param reduction_override: reduce override :return: loss """ reduction = ( reduction_override if reduction_override else self.reduction) if target.numel() > 0: loss_bbox = self.loss_weight * smooth_l1_loss( pred, target, weight, beta=self.beta, reduction=reduction, avg_factor=avg_factor, **kwargs) return loss_bbox else: return torch.FloatTensor([0.0]).cuda() ``` #### File: pytorch/losses/sum_loss.py ```python import torch from torch import nn from collections import OrderedDict from vega.common import ClassType, ClassFactory import os import pickle import logging @ClassFactory.register(ClassType.LOSS) class SumLoss(nn.Module): """Calculate sum of input losses.""" def __init__(self): """Init SumLoss.""" super(SumLoss, self).__init__() def forward(self, input, target=None): """Calculate sum of input losses. :param input: dict of losses. :type input: dict :param target: `target` Tensor, default None. :type target: type torch.Tensor :return: return sum of losses. :rtype: torch.Tensor """ losses = input if not isinstance(losses, dict) and not isinstance(losses, OrderedDict): return None log_vars = OrderedDict() for loss_name, loss_value in losses.items(): if isinstance(loss_value, torch.Tensor): log_vars[loss_name] = loss_value.mean() elif isinstance(loss_value, list): log_vars[loss_name] = sum(_loss.mean() for _loss in loss_value) else: raise TypeError( '{} is not a tensor or list of tensors'.format(loss_name)) init_loss = [_value for _key, _value in log_vars.items() if 'loss' in _key] if hasattr(self, "dynamic_loss_weight"): # save the init loss loss_save = [float(_value.detach().cpu().numpy()) for _value in init_loss] save_file = os.path.join(self.save_path, "muti_loss.pkl") with open(save_file, "wb") as f: pickle.dump(loss_save, f) if len(self.dynamic_loss_weight) != len(init_loss): logging.error("The length of the loss must be same with the length of the weight, but got {} and {}" .format(len(init_loss), len(self.dynamic_loss_weight))) weighted_loss = [self.dynamic_loss_weight[i] * init_loss[i] for i in range(len(init_loss))] sum_loss = sum(weighted_loss) else: sum_loss = sum(init_loss) # Debug """ if loss > 100: logging.error(str(losses)) import os os._exit() """ return sum_loss def adaptive_muti_loss(self, save_path, weight): """Set adaptive muti loss params.""" self.save_path = save_path self.dynamic_loss_weight = weight ``` #### File: pytorch/necks/ffm.py ```python import torch import torch.nn as nn from ..blocks.layer_creator import LayerCreator from vega.common import ClassType, ClassFactory class ConvPack(nn.Module): """ConvPack. :param block: block function :type block: nn.Module :param inplanes: input feature map channel num :type inplanes: int :param planes: output feature map channel num :type planes: int :param arch: model arch :type arch: list :param groups: group num :type groups: int :param base_width: base width :type base_width: int :param base_channel: base channel :type base_channel: int :param stride: stride :type stride: int :param dilation: dilation :type dilation: int :param style: style :type style: str :param conv_cfg: conv config :type conv_cfg: dict :param norm_cfg: norm config :type norm_cfg: dict :return: Conv pack layer :rtype: nn.Module """ def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0, dilation=1, groups=1, bias='auto', conv_cfg=None, norm_cfg=None, activation='relu', inplace=True): super().__init__() self.conv_cfg = conv_cfg self.norm_cfg = norm_cfg self.activation = activation self.inplace = inplace self.with_norm = norm_cfg is not None self.with_activatation = activation is not None if bias == 'auto': bias = False if self.with_norm else True self.with_bias = bias conv_creator = LayerCreator(**conv_cfg) self.conv = conv_creator.create_layer( in_channels, out_channels, kernel_size, stride=stride, padding=padding, dilation=dilation, groups=groups, bias=bias) if self.with_norm: norm_channels = out_channels norm_creator = LayerCreator(**norm_cfg) norm = norm_creator.create_layer(num_features=norm_channels) self.norm_name = norm_creator.get_name() self.add_module(self.norm_name, norm) if self.with_activatation: act_cfg = {'type': 'ReLU'} act_creator = LayerCreator(**act_cfg) self.activate = act_creator.create_layer(inplace=inplace) def norm(self, x): """Apply norm.""" x = getattr(self, self.norm_name)(x) return x def forward(self, x, activate=True, norm=True): """Forward compute. :param x: input feature map :type x: tensor :param activate: whether activate or not :type activate: bool :param norm: whether norm or not :type norm: bool :return: output feature map :rtype: tensor """ x = self.conv(x) if norm and self.with_norm: x = self.norm(x) if activate and self.with_activatation: x = self.activate(x) return x class FeatureFusionNetwork(nn.Module): """The Core of FeatureFusionNetwork. :param out_channels: out_channels :type out_channels: int :param num_outs: num_outs :type num_outs: int :param start_level: start_level :type start_level: int :param end_level: end_level :type end_level: int :param in_channels: in_channels :type in_channels: int :param add_extra_convs: add_extra_convs :type add_extra_convs: bool :param extra_convs_on_inputs: extra_convs_on_inputs :type extra_convs_on_inputs: bool :param relu_before_extra_convs: relu_before_extra_convs :type relu_before_extra_convs: bool :param conv_cfg: conv_cfg :type conv_cfg: dict :param norm_cfg: norm_cfg :type norm_cfg: dict :param activation: activation :type activation: dict :param feature_fusion_arch_str: feature_fusion_arch_str :type feature_fusion_arch_str: atr """ def __init__(self, out_channels=128, num_outs=4, start_level=0, end_level=-1, in_channels=None, add_extra_convs=False, extra_convs_on_inputs=True, relu_before_extra_convs=False, conv_cfg=None, norm_cfg=None, activation=None, feature_fusion_arch_str=None): super(FeatureFusionNetwork, self).__init__() if conv_cfg is None: conv_cfg = {'type': 'Conv'} self.in_channels = in_channels self.out_channels = out_channels self.num_ins = len(in_channels) self.num_outs = num_outs self.activation = activation self.relu_before_extra_convs = relu_before_extra_convs if end_level == -1: self.backbone_end_level = self.num_ins else: self.backbone_end_level = end_level self.start_level = start_level self.end_level = end_level self.add_extra_convs = add_extra_convs self.extra_convs_on_inputs = extra_convs_on_inputs self.lateral_convs = nn.ModuleList() self.fpn_convs = nn.ModuleList() self.feature_fusion_arch_str = feature_fusion_arch_str self.c34_maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) self.c24_maxpool = nn.MaxPool2d(kernel_size=5, stride=4, padding=1) for i in range(self.start_level, self.backbone_end_level): l_conv = ConvPack( in_channels[i], out_channels, 1, conv_cfg=conv_cfg, norm_cfg=norm_cfg, activation=self.activation, inplace=False) fpn_conv = ConvPack( out_channels * 2, out_channels * 2, 3, padding=1, conv_cfg=conv_cfg, norm_cfg=norm_cfg, activation=self.activation, inplace=False) self.lateral_convs.append(l_conv) self.fpn_convs.append(fpn_conv) extra_levels = num_outs - self.backbone_end_level + self.start_level if add_extra_convs and extra_levels >= 1: for i in range(extra_levels): if i == 0 and self.extra_convs_on_inputs: in_channels = self.in_channels[self.backbone_end_level - 1] else: in_channels = out_channels extra_fpn_conv = ConvPack( in_channels, out_channels, 3, stride=2, padding=1, conv_cfg=conv_cfg, norm_cfg=norm_cfg, activation=self.activation, inplace=False) self.fpn_convs.append(extra_fpn_conv) def decoder_ffm_arch(self): """Decode ffm arch.""" feature_fusion_arch = [] block_arch = [] for i in self.feature_fusion_arch_str: if i == '-': feature_fusion_arch.append(block_arch) block_arch = [] else: block_arch.append(int(i)) feature_fusion_arch.append(block_arch) return feature_fusion_arch def forward(self, inputs): """Forward method.""" build_out = [] fpn_arch = self.decoder_ffm_arch() for i in range(len(fpn_arch)): input1, input2 = fpn_arch[i][0], fpn_arch[i][1] laterals = [self.lateral_convs[input1](inputs[input1]), self.lateral_convs[input2](inputs[input2])] # sum of the two input if input1 == 0: laterals[0] = self.c24_maxpool(laterals[0]) elif input1 == 1: laterals[0] = self.c34_maxpool(laterals[0]) if input2 == 0: laterals[1] = self.c24_maxpool(laterals[1]) elif input2 == 1: laterals[1] = self.c34_maxpool(laterals[1]) build_out.append(self.fpn_convs[i](torch.cat((laterals[0], laterals[1]), 1))) outs = torch.cat((inputs[2], torch.cat((build_out[0], build_out[1]), 1)), 1) return outs def PseudoFeatureFusionNetwork(feature_map_list): """Pseudo FeatureFusionNetwork, just get the third layer of target featuremap.""" return feature_map_list[2] def ArchChannels2Module(feature_fusion_arch_code, in_channels): """Ffn warpper.""" if feature_fusion_arch_code != '-': return FeatureFusionNetwork(in_channels=in_channels, out_channels=64, num_outs=4, feature_fusion_arch_str=feature_fusion_arch_code) else: return PseudoFeatureFusionNetwork @ClassFactory.register(ClassType.NETWORK) class FeatureFusionModule(nn.Module): """FeatureFusionModule backbone. :param desc: Description of ResNetVariantDet. :type desc: NetworkDesc """ def __init__(self, desc): super(FeatureFusionModule, self).__init__() self.in_channels = desc["in_channels"][0:4] self.feature_fusion_arch_code = desc["arch_code"] self.num_ins = len(self.in_channels) self.neck = ArchChannels2Module(self.feature_fusion_arch_code, self.in_channels) def forward(self, inputs): """Get the result of ffm.""" out = self.neck(inputs[0:4]) return out def init_weights(self): """Initialize ffm weight.""" if self.feature_fusion_arch_code != '-': self.neck.init_weights() ``` #### File: tensorflow/necks/mask_rcnn_box.py ```python from object_detection.predictors.heads import box_head from object_detection.predictors import mask_rcnn_box_predictor from object_detection.predictors.heads import class_head from vega.common import ClassType, ClassFactory from vega.networks.tensorflow.utils.hyperparams import scope_generator @ClassFactory.register(ClassType.NETWORK) class MaskRCNNBox(object): """Mask RCNN Box.""" def __init__(self, desc): """Init MaskRCNNBox. :param desc: config dict """ self.model = None self.num_classes = desc.num_classes self.add_background_class = desc.add_background_class if 'add_background_class' in desc else True self.num_class_slots = self.num_classes + \ 1 if self.add_background_class else self.num_classes self.use_dropout = desc.use_dropout if 'use_dropout' in desc else False self.dropout_keep_prob = desc.dropout_keep_prob if 'dropout_keep_prob' in desc else 1.0 self.box_code_size = desc.box_code_size if 'box_code_size' in desc else 4 self.share_box_across_classes = desc.share_box_across_classes if 'share_box_across_classes' in desc else False self.fc_hyperparams = scope_generator.get_hyper_params_scope( desc.fc_hyperparams) def get_real_model(self, training): """Get real model of maskRcnnBox.""" if self.model: return self.model else: self.box_prediction_head = box_head.MaskRCNNBoxHead( is_training=training, num_classes=self.num_classes, fc_hyperparams_fn=self.fc_hyperparams, use_dropout=self.use_dropout, dropout_keep_prob=self.dropout_keep_prob, box_code_size=self.box_code_size, share_box_across_classes=self.share_box_across_classes) self.class_prediction_head = class_head.MaskRCNNClassHead( is_training=training, num_class_slots=self.num_class_slots, fc_hyperparams_fn=self.fc_hyperparams, use_dropout=self.use_dropout, dropout_keep_prob=self.dropout_keep_prob) third_stage_heads = {} self.model = mask_rcnn_box_predictor.MaskRCNNBoxPredictor( is_training=training, num_classes=self.num_classes, box_prediction_head=self.box_prediction_head, class_prediction_head=self.class_prediction_head, third_stage_heads=third_stage_heads) return self.model def __call__(self, features, labels, training): """Forward function of maskRcnnBox.""" return self.get_real_model(training).predict(features, labels) ``` #### File: utils/hyperparams/regularizer.py ```python import tf_slim as slim from vega.common import ClassType, ClassFactory @ClassFactory.register(ClassType.NETWORK) class Regularizer(object): """Regularizer.""" def __init__(self, desc): """Init ScopeGenerator. :param desc: config dict """ self.model = None self.type = desc.type if 'type' in desc else None self.weight = desc.weight def get_real_model(self): """Get real model of regularizer.""" if self.model: return self.model else: if self.type == 'l1_regularizer': self.model = slim.l1_regularizer(scale=float(self.weight)) elif self.type == 'l2_regularizer': self.model = slim.l2_regularizer(scale=float(self.weight)) else: self.model = None raise ValueError( 'Unknown regularizer type: {}'.format(self.type)) return self.model ``` #### File: vega/quota/target_terminate.py ```python from vega.common import ClassFactory, ClassType from .filter_terminate_base import FilterTerminateBase @ClassFactory.register(ClassType.QUOTA) class TargetTerminate(FilterTerminateBase): """Determine whether to satisfy target.""" def __init__(self): super(TargetTerminate, self).__init__() self.target_type = self.target_config.type self.target_value = self.target_config.value def is_halted(self, *args, **kwargs): """Halt or not.""" if self.target_type is None or self.target_value is None: return False valid_metric = kwargs[self.target_type] if valid_metric > self.target_value: return True else: return False ``` #### File: vega/quota/valid_filter.py ```python import logging from vega.common import ClassFactory, ClassType from .filter_terminate_base import FilterTerminateBase @ClassFactory.register(ClassType.QUOTA) class ValidFilter(FilterTerminateBase): """Valid Filter class.""" def __init__(self): super(ValidFilter, self).__init__() self.dataloader = None def is_filtered(self, desc=None): """Filter function of latency.""" try: if not self.dataloader: dataset_cls = ClassFactory.get_cls(ClassType.DATASET) self.dataset = dataset_cls() from vega.datasets import Adapter self.dataloader = Adapter(self.dataset).loader model, count_input = self.get_model_input(desc) model(count_input) return False except Exception as e: encoding = desc['backbone']['encoding'] logging.info(f"Invalid encoding: {encoding}, message: {str(e)}") return True ``` #### File: vega/tools/verify_cluster.py ```python import os import subprocess import uuid import time import psutil import shutil import signal import json from dask.distributed import Client from vega.common import argment_parser from vega.common.general import General from vega.common.utils import get_available_port def _parse_args(): parser = argment_parser("Verify cluster.") parser.add_argument("-m", "--master", default=None, type=str, required=True, help="master node IP") parser.add_argument("-s", "--slaves", dest="slaves", nargs="+", required=True, help="slaves node IP, eg. -s 192.168.0.2 192.168.0.3") parser.add_argument("-n", "--nfs_folder", default=None, type=str, required=True, help="shared NFS folder") parser.add_argument("-j", "--json", action='store_true', help="silence mode, print result with json format") args = parser.parse_args() return args _json = None _port = None def _print(value): global _json if not _json: print(value) def _call(cmd, **kwargs): global _json if _json: return subprocess.call(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs) else: return subprocess.call(cmd, **kwargs) def _check_output(cmd): global _json if _json: return subprocess.check_output(cmd, stderr=subprocess.PIPE).decode("utf-8") else: return subprocess.check_output(cmd).decode("utf-8") def _popen(cmd): global _json if _json: return subprocess.Popen(cmd, close_fds=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) else: return subprocess.Popen(cmd, close_fds=True) def _verify_ip(args): _print("*" * 32) _print("Start verify IP.") for slave in args.slaves: msg = f"Failed to access slave ({slave})." try: result = _call(["ping", "-c", "4", slave]) except Exception: raise Exception(msg) if result != 0: raise Exception(msg) msg = f"Failed to login slave ({slave}) without password." try: result = _call([ "ssh", "-o", "NumberOfPasswordPrompts=0", "-o", "StrictHostKeyChecking=yes", f"{slave}", "\"/bin/echo\""]) except Exception: raise Exception(msg) if result != 0: raise Exception(msg) _print("Pass.") def _verify_nfs(args): _print("*" * 32) _print("Start verify NFS.") if not os.path.exists(args.nfs_folder): raise Exception(f"Shared NFS folder({args.nfs_folder}) is not existed.") for slave in args.slaves: temp_folder = os.path.join(args.nfs_folder, uuid.uuid1().hex) msg = f"Shared NFS folder ({slave}:{args.nfs_folder}) is not accessed." try: result = _call(["ssh", slave, f"mkdir {temp_folder}"]) except Exception: raise Exception(msg) if result != 0: raise Exception(msg) try: result = _call(["ssh", slave, f"rm -r {temp_folder}"]) except Exception: raise Exception(msg) if result != 0: raise Exception(msg) _print("Pass.") def _verify_pkg(args): _print("*" * 32) _print("Start verify packages.") # python main_output = _check_output([General.python_command, "--version"]) for slave in args.slaves: slave_output = _check_output(["ssh", slave, General.python_command, "--version"]) if main_output != slave_output: raise Exception(f"Python version is different.\nmaster:\n{main_output}\nslave:\n{slave_output}.") # main packages pkgs = ["noah-vega", "distributed", "torch"] for pkg in pkgs: main_output = _check_output(["pip3", "show", pkg]) properties = main_output.split("\n") main_version = "" for prop in properties: if "Version:" in prop: main_version = prop if main_version == "": raise Exception(f"Package ({pkg}) is missing.") for slave in args.slaves: slave_output = _check_output(["ssh", slave, "pip3", "show", pkg]) properties = slave_output.split("\n") slave_version = "" for prop in properties: if "Version:" in prop: slave_version = prop if main_version != slave_version: raise Exception(f"Package is different.\n\nmaster:\n{main_output}\n\nslave:\n{slave_output}.") _print("Pass.") def _kill_existed_dask(args): pids = psutil.pids() dask_pids = [] for pid in pids: try: process = psutil.Process(pid) pname = process.name() if "dask-scheduler" in pname or "dask-worker" in pname: dask_pids.append(pid) except Exception: pass if dask_pids: _print("Found existed dask scheduler or dask worker processes.") _input = input("Do you want kill dask processes and continue to verify? [Y/n]: ") if _input.upper() in ["N", "NO"]: _print("Cluster verification canceled.") os._exit(0) elif _input.upper() not in ["", "Y", "YES"]: _print("Input Error.") os._exit(0) for pid in dask_pids: os.kill(int(pid), signal.SIGKILL) time.sleep(10) def _init_dask_scheduler(args): _print("Start verify scheduler.") global _port _port = str(get_available_port()) try: result = _popen(["dask-scheduler", "--port", _port]) except Exception: raise Exception("Failed to start dask scheduler.") if not isinstance(result, subprocess.Popen): _print("Failed to start dask scheduler.") _print("Please run the command in CLI, and resovlue the problems.") _print(f"dask-scheduler --port {_port}") raise Exception("Failed to start dask scheduler.") time.sleep(5) _print("Pass.") def _verfiy_local(args): global _port _print(f"Start verify local worker, IP:{args.master}, port: {_port}.") try: result = _popen(["dask-worker", f"{args.master}:{_port}"]) except Exception: raise Exception("Can not start local dask-worker.") if not isinstance(result, subprocess.Popen): raise Exception("Can not start local dask-worker.") time.sleep(5) _print("Pass.") _print("Test local dask Client.") cmd = f"{General.python_command} -c \"from dask.distributed import Client;"\ f"client=Client('{args.master}:{_port}');client.close()\"" try: result = _call(cmd, shell=True) except Exception: raise Exception("Can not start local dask client.") if result != 0: raise Exception("Can not start local dask client.") _print("Pass.") def _verify_client(args): global _port _print("Start verify slave workers.") for slave in args.slaves: _print(f"Start verify slave({slave}) worker.") try: result = _popen(["ssh", slave, f"{shutil.which('dask-worker')} {args.master}:{_port}"]) except Exception: raise Exception(f"Can not start slave({slave}) dask-worker.") if not isinstance(result, subprocess.Popen): raise Exception(f"Can not start slave({slave}) dask-worker.") time.sleep(5) _print("Pass.") _print(f"Test slave({slave}) dask Client.") cmd = f"{General.python_command} -c \"from dask.distributed import Client;"\ f"client=Client('{args.master}:{_port}');client.close()\"" try: result = _call(cmd, shell=True, env=os.environ) except Exception: raise Exception(f"Can not start slave({slave}) dask client.") if result != 0: raise Exception(f"Can not start slave({slave}) dask client.") time.sleep(5) _print("Pass.") _print("Pass.") def _stop_dask_scheduler(args): global _port _print("Start stop scheduler.") client = Client(f"{args.master}:{_port}") try: client.shutdown() client.close() del client time.sleep(8) except Exception: _print("Failed to stop scheduler, please stop it manually.") def _verify_dask(args): _print("*" * 32) # _kill_existed_dask(args) _init_dask_scheduler(args) _verfiy_local(args) _verify_client(args) _stop_dask_scheduler(args) _print("Pass.") def _verify_cluster(): args = _parse_args() global _json _json = args.json try: _verify_ip(args) _verify_nfs(args) _verify_pkg(args) _verify_dask(args) _print("All cluster check items have passed.") if args.json: print(json.dumps({"status": "success"}, indent=4)) except Exception as e: _print("") _print(f"Exception:\n\n{str(e)}") if args.json: print(json.dumps({"status": "error", "message": str(e)}, indent=4)) if __name__ == "__main__": _verify_cluster() ``` #### File: trainer/callbacks/runtime_callback.py ```python from .callback import Callback from vega.common import ClassFactory, ClassType from vega.metrics.runtime_estimate import RuntimeEstimator @ClassFactory.register(ClassType.CALLBACK) class RuntimeCallback(Callback): """Running time estimate callback.""" def __init__(self): super(RuntimeCallback, self).__init__() self.remain_time = dict() self.whole_time = dict() self.priority = 210 def before_train(self, logs=None): """Define runtime type and mark train start time.""" epochs = self.trainer.epochs self.rt_est = RuntimeEstimator(types='train', max_steps=epochs) train_steps = self.trainer.batch_num_train self.rt_est.add_runtime_est(type='epoch', max_step=train_steps) self.rt_est.mark_start_time('train', step=0) def before_epoch(self, epoch, logs=None): """Mark epoch start time.""" self.rt_est.mark_start_time('epoch', step=0) def after_epoch(self, epoch, logs=None): """Obtain estimated running time after epoch.""" self.remain_time['train'] = self.rt_est.remaining_time('train', step=epoch + 1) using_time = self.rt_est.using_time('train') self.whole_time['train'] = self.remain_time['train'] + using_time logs.update({'runtime': {'remain_time': self.remain_time, 'whole_time': self.whole_time}}) self.trainer.runtime = self.whole_time['train'] def after_train_step(self, batch_index, logs=None): """Obtain estimated running time after step.""" self.remain_time['epoch'] = self.rt_est.remaining_time('epoch', step=batch_index + 1) using_time = self.rt_est.using_time('epoch') self.whole_time['epoch'] = self.remain_time['epoch'] + using_time def after_train(self, logs=None): """Restore train time in trainer.""" if 'train' not in self.whole_time: self.after_epoch(0, logs) self.trainer.runtime = self.whole_time['train'] ``` #### File: modules/conf/optim.py ```python from vega.common import ConfigSerializable class OptimConfig(ConfigSerializable): """Default Optim Config.""" _class_type = "trainer.optimizer" _exclude_keys = ['type'] _update_all_attrs = True type = 'Adam' params = {"lr": 0.1} @classmethod def from_dict(cls, data, skip_check=True): """Restore config from a dictionary or a file.""" cls = super(OptimConfig, cls).from_dict(data, skip_check) if "params" not in data: cls.params = {} return cls @classmethod def rules(cls): """Return rules for checking.""" rules = {"type": {"type": str}, "params": {"type": dict}} return rules class OptimMappingDict(object): """Optimizer Mapping Dictionary.""" type_mapping_dict = dict( SGD=dict(torch='SGD', tf='MomentumOptimizer', ms='Momentum'), Adam=dict(torch='Adam', tf='AdamOptimizer', ms='Adam'), RMSProp=dict(torch='RMSProp', tf='RMSPropOptimizer', ms='RMSProp') ) params_mapping_dict = dict( SGD=dict( lr=dict(torch='lr', tf='learning_rate', ms='learning_rate'), momentum=dict(torch='momentum', tf='momentum', ms='momentum'), weight_decay=dict(torch='weight_decay', tf='weight_decay', ms='weight_decay'), ), Adam=dict( lr=dict(torch='lr', tf='learning_rate', ms='learning_rate'), weight_decay=dict(torch='weight_decay', tf='weight_decay', ms='weight_decay'), ), RMSProp=dict( lr=dict(torch='lr', tf='learning_rate', ms='learning_rate'), weight_decay=dict(torch='weight_decay', tf='weight_decay', ms='weight_decay'), ) ) ``` #### File: modules/optimizer/multi_optimizer.py ```python from collections import OrderedDict from vega.common import ClassFactory, ClassType from vega.trainer.modules.lr_schedulers import LrScheduler from vega.common.config import Config from .optim import Optimizer from ..conf.optim import OptimConfig @ClassFactory.register(ClassType.OPTIMIZER) class MultiOptimizers(object): """Register and call multi-optimizer class.""" config = OptimConfig() def __init__(self, config=None): """Initialize.""" self.is_multi_opt = True if config is not None: self.config = Config(config) self._opts = OrderedDict() def __call__(self, model=None, distributed=False): """Call Optimizer class.""" for config in self.config: name = config.get('model') sub_model = getattr(model, config.get('model')) sub_opt = Optimizer(config)(sub_model, distributed) sub_lr = None sub_loss = None if config.get('lr_scheduler'): sub_lr = LrScheduler(config=config.get('lr_scheduler'))(sub_opt) if config.get('loss'): sub_loss = ClassFactory.get_instance(ClassType.LOSS, config.get('loss')) self._opts[name] = dict(opt=sub_opt, lr=sub_lr, loss=sub_loss, model=sub_model) return self def get_opts(self): """Get opts.""" return self._opts.items() ``` #### File: vega/trainer/trainer_base.py ```python import glob import logging import vega from vega.common import FileOps, init_log from vega.common.class_factory import ClassFactory, ClassType from vega.common.config import Config from vega.trainer.callbacks import CallbackList from vega.trainer.conf import TrainerConfig from vega.trainer.distributed_worker import DistributedWorker from vega.trainer.utils import WorkerTypes from vega.datasets import Adapter from vega.common.general import General class TrainerBase(DistributedWorker): """Trainer base class.""" def __init__(self, model=None, id=None, hps=None, load_ckpt_flag=False, model_desc=None, multi_task=None, **kwargs): super().__init__() self.config = TrainerConfig() self.worker_type = WorkerTypes.TRAINER if id is not None: self._worker_id = id self.actions_list = self.config.actions_list # Data Memeber list of Trainer self.is_chief = True self.epochs = self.config.epochs self.do_validation = True self.auto_save_ckpt = True self.auto_save_perf = True self.save_ext_model = self.config.save_ext_model self.skip_train = False self.valid_interval = self.config.valid_interval self.hps = hps self.model = model self.model_desc = model_desc self.optimizer = None self.lr_scheduler = None self.loss = None self.use_syncbn = self.config.syncbn self.use_amp = self.config.amp self.train_metrics = None self.valid_metrics = None self.call_metrics_on_train = self.config.call_metrics_on_train self.train_verbose = self.config.train_verbose self.valid_verbose = self.config.valid_verbose self.train_report_steps = self.config.train_report_steps self.valid_report_steps = self.config.valid_report_steps self.multi_task = multi_task self.train_loader = None self.valid_loader = None self.train_step = None self.valid_step = None self.make_batch = None self.model_fn = None self.train_input_fn = None self.valid_input_fn = None self.callbacks = None self.performance = None self.best_performance = None self.runtime = None self.load_checkpoint = False self.load_weights_file = self.config.load_weights_file self._resume_training = False self.ext_model = None self._start_epoch = 0 self.visual_data = {} self.load_ckpt_flag = load_ckpt_flag self.distributed = self.config.distributed if vega.is_gpu_device(): self.distributed = not General._parallel and self.distributed # Used by TimmTrainerCallbacks since it builds its trainer in # the before_train callback self.lazy_built = self.config.lazy_built # Indicate whether the necessary components of a trainer # has been built for running self._world_size = 1 self._rank_id = 0 self._local_rank_id = 0 self._next_rung = False self.config.kwargs = kwargs self.checkpoint_file_name = 'checkpoint.pth' self.model_pickle_file_name = 'model.pkl' worker_path = self.get_local_worker_path() self.model_path = FileOps.join_path(worker_path, self.model_pickle_file_name) self.checkpoint_file = FileOps.join_path(worker_path, self.checkpoint_file_name) if self.multi_task: self.weights_file = FileOps.join_path(worker_path, "model_{}.pth".format(self.multi_task)) else: self.weights_file = FileOps.join_path(worker_path, "model_{}.pth".format(self.worker_id)) self.loss_input = kwargs.get('loss_input', None) self.gpu_nums = kwargs.get('gpu_nums', 1) self.use_unsupervised_pretrain = self.config.use_unsupervised_pretrain if TrainerConfig.model_desc is None: TrainerConfig.model_desc = model_desc self.standalone = General.cluster.master_ip is None or General.message_port is None def train_process(self): """Whole train process of the TrainWorker specified in config. After training, the model and validation results are saved to local_worker_path and s3_path. """ init_log(level=General.logger.level, log_file=f"{self.step_name}_worker_{self.worker_id}.log", log_path=self.local_log_path) if self.standalone: logging.info("Standalone mode. The result data will not be sent to server through report.") self._set_default_funcs() self._set_condition() self._init_callbacks() self.callbacks.init_trainer() if not self.lazy_built: self.build() self._train_loop() return self.model def build(self): """Build the trainer by assembling the necessary components.""" logging.debug("Trainer Config: {}".format(self.config)) self._init_hps() self.do_validation = self.config.with_valid self.use_syncbn = self.config.syncbn if self.use_syncbn and vega.is_torch_backend(): import apex self.model = apex.parallel.convert_syncbn_model(self.model) if not self.train_loader: self.train_loader = self._init_dataloader(mode='train') if not self.valid_loader: self.valid_loader = self._init_dataloader(mode='val') self.batch_num_train = len(self.train_loader) self.batch_num_valid = len(self.valid_loader) def train(self, inputs, labels): """Train model.""" pass def predict(self, input): """Inference model.""" pass def save(self, file_name): """Save model.""" pass def load(self, model_name, by_name): """Load model.""" pass def set_weights(self, weights): """Set weight with memory tensor.""" pass def get_weights(self): """Get the weights.""" pass def _train_epoch(self): pass def _valid_epoch(self): pass def _set_default_funcs(self): pass def _set_condition(self): pass def _init_tf_estimator(self): pass def _init_horovod_setting(self): """Init horovod setting.""" self.is_chief = True def _init_hps(self, hps=None): """Load hps from file.""" # load config if hps is not None: pass elif self.config.hps_file is not None: desc_file = self.config.hps_file.replace("{local_base_path}", self.local_base_path) hps = Config(desc_file) if "trainer" in hps: if "epochs" in hps["trainer"]: hps["trainer"].pop("epochs") if "checkpoint_path" in hps["trainer"]: hps["trainer"].pop("checkpoint_path") elif self.config.hps_folder is not None: folder = self.config.hps_folder.replace("{local_base_path}", self.local_base_path) pattern = FileOps.join_path(folder, "hps_*.json") desc_file = glob.glob(pattern)[0] hps = Config(desc_file) if "trainer" in hps: if "epochs" in hps["trainer"]: hps["trainer"].pop("epochs") if "checkpoint_path" in hps["trainer"]: hps["trainer"].pop("checkpoint_path") # merge config if not self.hps: self.hps = hps elif hps: hps.from_dict(self.hps) self.hps = hps # set config if self.hps and self.hps.get('trainer'): self.config.from_dict(self.hps.get('trainer')) self.load_checkpoint = self.config.load_checkpoint self.epochs = self.config.epochs def _init_minimize_op(self, loss, global_step, var_list=None): """Init loss minimize operation, include loss scale method.""" loss_scale = self.config.loss_scale if self.use_amp else 1. if loss_scale != 1: scaled_grad_vars = self.optimizer.compute_gradients(loss * loss_scale, var_list=var_list) unscaled_grad_vars = [] for grad, var in scaled_grad_vars: unscaled_grad_vars.append((grad, var) if grad is None else (grad / loss_scale, var)) minimize_op = self.optimizer.apply_gradients(unscaled_grad_vars, global_step) else: grad_vars = self.optimizer.compute_gradients(loss, var_list=var_list) minimize_op = self.optimizer.apply_gradients(grad_vars, global_step) return minimize_op def _init_metrics(self, metrics=None): """Init metrics.""" if metrics is not None: return metrics else: if vega.is_torch_backend(): from vega.metrics.pytorch.metrics import Metrics elif vega.is_tf_backend(): from vega.metrics.tensorflow.metrics import Metrics elif vega.is_ms_backend(): from vega.metrics.mindspore.metrics import Metrics return Metrics() def _init_dataloader(self, mode, loader=None, transforms=None): """Init dataloader.""" if loader is not None: return loader if mode == "train" and self.hps is not None and self.hps.get("dataset") is not None: if self.hps.get("dataset") and self.hps.get("dataset").get('type'): dataset_cls = ClassFactory.get_cls(ClassType.DATASET, self.hps.get("dataset").get('type')) else: dataset_cls = ClassFactory.get_cls(ClassType.DATASET) dataset = dataset_cls(mode=mode, hps=self.hps.get("dataset")) elif self.hps: if self.hps.get("dataset") and self.hps.get("dataset").get('type'): dataset_cls = ClassFactory.get_cls(ClassType.DATASET, self.hps.get("dataset").get('type')) dataset = dataset_cls(mode=mode, hps=self.hps.get("dataset")) else: dataset_cls = ClassFactory.get_cls(ClassType.DATASET) dataset = dataset_cls(mode=mode) else: dataset_cls = ClassFactory.get_cls(ClassType.DATASET) dataset = dataset_cls(mode=mode) if transforms is not None: dataset.transforms = transforms if self.distributed and mode == "train": dataset.set_distributed(self._world_size, self._rank_id) # adapt the dataset to specific backend dataloader = Adapter(dataset).loader return dataloader def _train_loop(self): """Do the training with data, callbacks and step functions etc.""" # Allow user to build trainer in before_train() callback, but they # should set lazy_built in configuration file to True self.callbacks.before_train() if self.skip_train: return if self.use_unsupervised_pretrain and vega.is_torch_backend(): from .simclr.transforms import TransformsSimCLR from .simclr.train import simclr_train train_loader = self._init_dataloader(mode="train", transforms=TransformsSimCLR()) self.model = simclr_train(self.model, train_loader) while True: repeat_time = 1 if vega.is_ms_backend() else self.epochs repeat_time = 1 if vega.is_tf_backend() and self.config.train_in_once else repeat_time for epoch in range(self._start_epoch, repeat_time): epoch_logs = {'train_num_batches': self.batch_num_train} if self.do_validation: epoch_logs.update({'valid_num_batches': self.batch_num_valid}) self.callbacks.before_epoch(epoch, epoch_logs) if self.config.with_train: self._train_epoch() if self.do_validation and self._should_run_validation(epoch): self._valid_epoch() self.callbacks.after_epoch(epoch) self.callbacks.after_train() if not self._next_rung: break if self.distributed: self._shutdown_distributed() def _should_run_validation(self, epoch): # Zero valid_interval means doesn't run _valid_loop of the trainer # and user may provide _valid_loop in other callbacks if self.valid_interval == 0: return False else: return epoch % self.valid_interval == 0 or (epoch + 1) == self.epochs def _init_callbacks(self): disables = [] customs = self.config.callbacks or [] if customs and not isinstance(customs, list): customs = [customs] if not self.config.model_statistics: disables.append('ModelStatistics') self.callbacks = CallbackList(customs, disables) self.callbacks.set_trainer(self) def _metric_average(self, val, name): """Do metric average. :param val: input value :param name: metric name :return: """ import torch import horovod.torch as hvd tensor = torch.tensor(val) avg_tensor = hvd.allreduce(tensor, name=name) return avg_tensor.item() def _backup(self): """Backup result worker folder.""" if self.need_backup is True and self.backup_base_path is not None: backup_worker_path = FileOps.join_path( self.backup_base_path, self.get_worker_subpath()) FileOps.copy_folder( self.get_local_worker_path(self.step_name, self.worker_id), backup_worker_path) def _shutdown_distributed(self): if vega.is_npu_device() and self.distributed and vega.is_tf_backend(): self.sess.run(self.npu_shutdown) self.sess.close() ```
{ "source": "jie311/YOLOP", "score": 2 }
#### File: lib/utils/utils.py ```python import os import logging import time from collections import namedtuple from pathlib import Path import torch import torch.optim as optim import torch.nn as nn import numpy as np from torch.utils.data import DataLoader from prefetch_generator import BackgroundGenerator from contextlib import contextmanager import re def clean_str(s): # Cleans a string by replacing special characters with underscore _ return re.sub(pattern="[|@#!¡·$€%&()=?¿^*;:,¨´><+]", repl="_", string=s) def create_logger(cfg, cfg_path, phase='train', rank=-1): # set up logger dir dataset = cfg.DATASET.DATASET dataset = dataset.replace(':', '_') model = cfg.MODEL.NAME cfg_path = os.path.basename(cfg_path).split('.')[0] if rank in [-1, 0]: time_str = time.strftime('%Y-%m-%d-%H-%M') log_file = '{}_{}_{}.log'.format(cfg_path, time_str, phase) # set up tensorboard_log_dir tensorboard_log_dir = Path(cfg.LOG_DIR) / dataset / model / \ (cfg_path + '_' + time_str) final_output_dir = tensorboard_log_dir if not tensorboard_log_dir.exists(): print('=> creating {}'.format(tensorboard_log_dir)) tensorboard_log_dir.mkdir(parents=True) final_log_file = tensorboard_log_dir / log_file head = '%(asctime)-15s %(message)s' logging.basicConfig(filename=str(final_log_file), format=head) logger = logging.getLogger() logger.setLevel(logging.INFO) console = logging.StreamHandler() logging.getLogger('').addHandler(console) return logger, str(final_output_dir), str(tensorboard_log_dir) else: return None, None, None def select_device(logger=None, device='', batch_size=None): # device = 'cpu' or '0' or '0,1,2,3' cpu_request = device.lower() == 'cpu' if device and not cpu_request: # if device requested other than 'cpu' os.environ['CUDA_VISIBLE_DEVICES'] = device # set environment variable assert torch.cuda.is_available(), 'CUDA unavailable, invalid device %s requested' % device # check availablity cuda = False if cpu_request else torch.cuda.is_available() if cuda: c = 1024 ** 2 # bytes to MB ng = torch.cuda.device_count() if ng > 1 and batch_size: # check that batch_size is compatible with device_count assert batch_size % ng == 0, 'batch-size %g not multiple of GPU count %g' % (batch_size, ng) x = [torch.cuda.get_device_properties(i) for i in range(ng)] s = f'Using torch {torch.__version__} ' for i in range(0, ng): if i == 1: s = ' ' * len(s) if logger: logger.info("%sCUDA:%g (%s, %dMB)" % (s, i, x[i].name, x[i].total_memory / c)) else: if logger: logger.info(f'Using torch {torch.__version__} CPU') if logger: logger.info('') # skip a line return torch.device('cuda:0' if cuda else 'cpu') def get_optimizer(cfg, model): optimizer = None if cfg.TRAIN.OPTIMIZER == 'sgd': optimizer = optim.SGD( filter(lambda p: p.requires_grad, model.parameters()), lr=cfg.TRAIN.LR0, momentum=cfg.TRAIN.MOMENTUM, weight_decay=cfg.TRAIN.WD, nesterov=cfg.TRAIN.NESTEROV ) elif cfg.TRAIN.OPTIMIZER == 'adam': optimizer = optim.Adam( filter(lambda p: p.requires_grad, model.parameters()), #model.parameters(), lr=cfg.TRAIN.LR0, betas=(cfg.TRAIN.MOMENTUM, 0.999) ) return optimizer def save_checkpoint(epoch, name, model, optimizer, output_dir, filename, is_best=False): model_state = model.module.state_dict() if is_parallel(model) else model.state_dict() checkpoint = { 'epoch': epoch, 'model': name, 'state_dict': model_state, # 'best_state_dict': model.module.state_dict(), # 'perf': perf_indicator, 'optimizer': optimizer.state_dict(), } torch.save(checkpoint, os.path.join(output_dir, filename)) if is_best and 'state_dict' in checkpoint: torch.save(checkpoint['best_state_dict'], os.path.join(output_dir, 'model_best.pth')) def initialize_weights(model): for m in model.modules(): t = type(m) if t is nn.Conv2d: pass # nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') elif t is nn.BatchNorm2d: m.eps = 1e-3 m.momentum = 0.03 elif t in [nn.Hardswish, nn.LeakyReLU, nn.ReLU, nn.ReLU6]: # elif t in [nn.LeakyReLU, nn.ReLU, nn.ReLU6]: m.inplace = True def xyxy2xywh(x): # Convert nx4 boxes from [x1, y1, x2, y2] to [x, y, w, h] where xy1=top-left, xy2=bottom-right y = x.clone() if isinstance(x, torch.Tensor) else np.copy(x) y[:, 0] = (x[:, 0] + x[:, 2]) / 2 # x center y[:, 1] = (x[:, 1] + x[:, 3]) / 2 # y center y[:, 2] = x[:, 2] - x[:, 0] # width y[:, 3] = x[:, 3] - x[:, 1] # height return y def is_parallel(model): return type(model) in (nn.parallel.DataParallel, nn.parallel.DistributedDataParallel) def time_synchronized(): torch.cuda.synchronize() if torch.cuda.is_available() else None return time.time() class DataLoaderX(DataLoader): """prefetch dataloader""" def __iter__(self): return BackgroundGenerator(super().__iter__()) @contextmanager def torch_distributed_zero_first(local_rank: int): """ Decorator to make all processes in distributed training wait for each local_master to do something. """ if local_rank not in [-1, 0]: torch.distributed.barrier() yield if local_rank == 0: torch.distributed.barrier() ``` #### File: jie311/YOLOP/test_onnx.py ```python import os import cv2 import torch import argparse import onnxruntime as ort import numpy as np from lib.core.general import non_max_suppression def resize_unscale(img, new_shape=(640, 640), color=114): shape = img.shape[:2] # current shape [height, width] if isinstance(new_shape, int): new_shape = (new_shape, new_shape) canvas = np.zeros((new_shape[0], new_shape[1], 3)) canvas.fill(color) # Scale ratio (new / old) new_shape(h,w) r = min(new_shape[0] / shape[0], new_shape[1] / shape[1]) # Compute padding new_unpad = int(round(shape[1] * r)), int(round(shape[0] * r)) # w,h new_unpad_w = new_unpad[0] new_unpad_h = new_unpad[1] pad_w, pad_h = new_shape[1] - new_unpad_w, new_shape[0] - new_unpad_h # wh padding dw = pad_w // 2 # divide padding into 2 sides dh = pad_h // 2 if shape[::-1] != new_unpad: # resize img = cv2.resize(img, new_unpad, interpolation=cv2.INTER_AREA) canvas[dh:dh + new_unpad_h, dw:dw + new_unpad_w, :] = img return canvas, r, dw, dh, new_unpad_w, new_unpad_h # (dw,dh) def infer_yolop(weight="yolop-640-640.onnx", img_path="./inference/images/7dd9ef45-f197db95.jpg"): ort.set_default_logger_severity(4) onnx_path = f"./weights/{weight}" ort_session = ort.InferenceSession(onnx_path) print(f"Load {onnx_path} done!") outputs_info = ort_session.get_outputs() inputs_info = ort_session.get_inputs() for ii in inputs_info: print("Input: ", ii) for oo in outputs_info: print("Output: ", oo) print("num outputs: ", len(outputs_info)) save_det_path = f"./pictures/detect_onnx.jpg" save_da_path = f"./pictures/da_onnx.jpg" save_ll_path = f"./pictures/ll_onnx.jpg" save_merge_path = f"./pictures/output_onnx.jpg" img_bgr = cv2.imread(img_path) height, width, _ = img_bgr.shape # convert to RGB img_rgb = img_bgr[:, :, ::-1].copy() # resize & normalize canvas, r, dw, dh, new_unpad_w, new_unpad_h = resize_unscale(img_rgb, (640, 640)) img = canvas.copy().astype(np.float32) # (3,640,640) RGB img /= 255.0 img[:, :, 0] -= 0.485 img[:, :, 1] -= 0.456 img[:, :, 2] -= 0.406 img[:, :, 0] /= 0.229 img[:, :, 1] /= 0.224 img[:, :, 2] /= 0.225 img = img.transpose(2, 0, 1) img = np.expand_dims(img, 0) # (1, 3,640,640) # inference: (1,n,6) (1,2,640,640) (1,2,640,640) det_out, da_seg_out, ll_seg_out = ort_session.run( ['det_out', 'drive_area_seg', 'lane_line_seg'], input_feed={"images": img} ) det_out = torch.from_numpy(det_out).float() boxes = non_max_suppression(det_out)[0] # [n,6] [x1,y1,x2,y2,conf,cls] boxes = boxes.cpu().numpy().astype(np.float32) if boxes.shape[0] == 0: print("no bounding boxes detected.") return # scale coords to original size. boxes[:, 0] -= dw boxes[:, 1] -= dh boxes[:, 2] -= dw boxes[:, 3] -= dh boxes[:, :4] /= r print(f"detect {boxes.shape[0]} bounding boxes.") img_det = img_rgb[:, :, ::-1].copy() for i in range(boxes.shape[0]): x1, y1, x2, y2, conf, label = boxes[i] x1, y1, x2, y2, label = int(x1), int(y1), int(x2), int(y2), int(label) img_det = cv2.rectangle(img_det, (x1, y1), (x2, y2), (0, 255, 0), 2, 2) cv2.imwrite(save_det_path, img_det) # select da & ll segment area. da_seg_out = da_seg_out[:, :, dh:dh + new_unpad_h, dw:dw + new_unpad_w] ll_seg_out = ll_seg_out[:, :, dh:dh + new_unpad_h, dw:dw + new_unpad_w] da_seg_mask = np.argmax(da_seg_out, axis=1)[0] # (?,?) (0|1) ll_seg_mask = np.argmax(ll_seg_out, axis=1)[0] # (?,?) (0|1) print(da_seg_mask.shape) print(ll_seg_mask.shape) color_area = np.zeros((new_unpad_h, new_unpad_w, 3), dtype=np.uint8) color_area[da_seg_mask == 1] = [0, 255, 0] color_area[ll_seg_mask == 1] = [255, 0, 0] color_seg = color_area # convert to BGR color_seg = color_seg[..., ::-1] color_mask = np.mean(color_seg, 2) img_merge = canvas[dh:dh + new_unpad_h, dw:dw + new_unpad_w, :] img_merge = img_merge[:, :, ::-1] # merge: resize to original size img_merge[color_mask != 0] = \ img_merge[color_mask != 0] * 0.5 + color_seg[color_mask != 0] * 0.5 img_merge = img_merge.astype(np.uint8) img_merge = cv2.resize(img_merge, (width, height), interpolation=cv2.INTER_LINEAR) for i in range(boxes.shape[0]): x1, y1, x2, y2, conf, label = boxes[i] x1, y1, x2, y2, label = int(x1), int(y1), int(x2), int(y2), int(label) img_merge = cv2.rectangle(img_merge, (x1, y1), (x2, y2), (0, 255, 0), 2, 2) # da: resize to original size da_seg_mask = da_seg_mask * 255 da_seg_mask = da_seg_mask.astype(np.uint8) da_seg_mask = cv2.resize(da_seg_mask, (width, height), interpolation=cv2.INTER_LINEAR) # ll: resize to original size ll_seg_mask = ll_seg_mask * 255 ll_seg_mask = ll_seg_mask.astype(np.uint8) ll_seg_mask = cv2.resize(ll_seg_mask, (width, height), interpolation=cv2.INTER_LINEAR) cv2.imwrite(save_merge_path, img_merge) cv2.imwrite(save_da_path, da_seg_mask) cv2.imwrite(save_ll_path, ll_seg_mask) print("detect done.") if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('--weight', type=str, default="yolop-640-640.onnx") parser.add_argument('--img', type=str, default="./inference/images/9aa94005-ff1d4c9a.jpg") args = parser.parse_args() infer_yolop(weight=args.weight, img_path=args.img) """ PYTHONPATH=. python3 ./test_onnx.py --weight yolop-640-640.onnx --img test.jpg """ ```
{ "source": "jie311/yolox_keypoint_segment", "score": 3 }
#### File: datasets/plate_kp/yolotovoc.py ```python from xml.dom.minidom import Document from lxml.etree import Element, SubElement, tostring import pprint from xml.dom.minidom import parseString import cv2, os class XmlMaker: def __init__(self, txtpath, xmlpath): self.txtPath = txtpath self.xmlPath = xmlpath self.txtList = [] self.keypoint = True self.color = ['blue', 'green'] def readtxt(self): jpg = [] txtfile = open(self.txtPath, "r", encoding='gbk', errors='ignore') self.txtList = txtfile.readlines() for i in self.txtList: jpg = i.strip().split(" ")[0] if self.keypoint: try: keypoints = i.strip().split(" ")[1] xys = i.strip().split(" ")[2:] except: xys = [] else: xys = i.strip().split(" ")[1:] node_root = Element('annotation') node_folder = SubElement(node_root, 'folder') node_folder.text = 'VOC2012' node_filename = SubElement(node_root, 'filename') node_filename.text = jpg img = cv2.imread(jpg) try: shape = img.shape except: print('skip ',jpg) node_size = SubElement(node_root, 'size') node_width = SubElement(node_size, 'width') node_width.text = str(shape[1]) node_height = SubElement(node_size, 'height') node_height.text = str(shape[0]) node_depth = SubElement(node_size, 'depth') node_depth.text = '3' for xy in xys: list_xy = xy.split(",") x_min = list_xy[0] y_min = list_xy[1] x_max = list_xy[2] y_max = list_xy[3] classes = list_xy[4] node_object = SubElement(node_root, 'object') node_name = SubElement(node_object, 'name') node_name.text = self.color[int(classes)] node_difficult = SubElement(node_object, 'difficult') node_difficult.text = '0' node_bndbox = SubElement(node_object, 'bndbox') node_xmin = SubElement(node_bndbox, 'xmin') node_xmin.text = str(x_min) node_ymin = SubElement(node_bndbox, 'ymin') node_ymin.text = str(y_min) node_xmax = SubElement(node_bndbox, 'xmax') node_xmax.text = str(x_max) node_ymax = SubElement(node_bndbox, 'ymax') node_ymax.text = str(y_max) if self.keypoint: node_keypint = SubElement(node_object, 'keypoints') node_keypint.text = keypoints xml = tostring(node_root, pretty_print=True) # 格式化显示,该换行的换行 xml_name = jpg.split("/")[-1][:-4]+".xml" print(xml_name) with open(self.xmlPath+"/"+xml_name, "wb") as f: f.write(xml) f.close() if __name__ == "__main__": read =XmlMaker("train.txt","images") read.readtxt() os.rename('./images', './nnotations') ``` #### File: yolox/models/darknet.py ```python from torch import nn from .network_blocks import BaseConv, CSPLayer, DWConv, Focus, ResLayer, SPPBottleneck class Darknet(nn.Module): # number of blocks from dark2 to dark5. depth2blocks = {21: [1, 2, 2, 1], 53: [2, 8, 8, 4]} def __init__( self, depth, in_channels=3, stem_out_channels=32, out_features=("dark3", "dark4", "dark5"), ): """ Args: depth (int): depth of darknet used in model, usually use [21, 53] for this param. in_channels (int): number of input channels, for example, use 3 for RGB image. stem_out_channels (int): number of output chanels of darknet stem. It decides channels of darknet layer2 to layer5. out_features (Tuple[str]): desired output layer name. """ super().__init__() assert out_features, "please provide output features of Darknet" self.out_features = out_features self.stem = nn.Sequential( BaseConv(in_channels, stem_out_channels, ksize=3, stride=1, act="lrelu"), *self.make_group_layer(stem_out_channels, num_blocks=1, stride=2), ) in_channels = stem_out_channels * 2 # 64 num_blocks = Darknet.depth2blocks[depth] # create darknet with `stem_out_channels` and `num_blocks` layers. # to make model structure more clear, we don't use `for` statement in python. self.dark2 = nn.Sequential( *self.make_group_layer(in_channels, num_blocks[0], stride=2) ) in_channels *= 2 # 128 self.dark3 = nn.Sequential( *self.make_group_layer(in_channels, num_blocks[1], stride=2) ) in_channels *= 2 # 256 self.dark4 = nn.Sequential( *self.make_group_layer(in_channels, num_blocks[2], stride=2) ) in_channels *= 2 # 512 self.dark5 = nn.Sequential( *self.make_group_layer(in_channels, num_blocks[3], stride=2), *self.make_spp_block([in_channels, in_channels * 2], in_channels * 2), ) def make_group_layer(self, in_channels: int, num_blocks: int, stride: int = 1): "starts with conv layer then has `num_blocks` `ResLayer`" return [ BaseConv(in_channels, in_channels * 2, ksize=3, stride=stride, act="lrelu"), *[(ResLayer(in_channels * 2)) for _ in range(num_blocks)], ] def make_spp_block(self, filters_list, in_filters): m = nn.Sequential( *[ BaseConv(in_filters, filters_list[0], 1, stride=1, act="lrelu"), BaseConv(filters_list[0], filters_list[1], 3, stride=1, act="lrelu"), SPPBottleneck( in_channels=filters_list[1], out_channels=filters_list[0], activation="lrelu", ), BaseConv(filters_list[0], filters_list[1], 3, stride=1, act="lrelu"), BaseConv(filters_list[1], filters_list[0], 1, stride=1, act="lrelu"), ] ) return m def forward(self, x): outputs = {} x = self.stem(x) outputs["stem"] = x x = self.dark2(x) outputs["dark2"] = x x = self.dark3(x) outputs["dark3"] = x x = self.dark4(x) outputs["dark4"] = x x = self.dark5(x) outputs["dark5"] = x return {k: v for k, v in outputs.items() if k in self.out_features} class CSPDarknet(nn.Module): def __init__( self, img_channel, dep_mul, wid_mul, out_features=("dark3", "dark4", "dark5"), depthwise=False, act="silu", ): super().__init__() assert out_features, "please provide output features of Darknet" self.out_features = out_features Conv = DWConv if depthwise else BaseConv base_channels = int(wid_mul * 64) # 64 base_depth = max(round(dep_mul * 3), 1) # 3 # stem self.stem = Focus(img_channel, base_channels, ksize=3, act=act) # dark2 self.dark2 = nn.Sequential( Conv(base_channels, base_channels * 2, 3, 2, act=act), CSPLayer( base_channels * 2, base_channels * 2, n=base_depth, depthwise=depthwise, act=act, ), ) # dark3 self.dark3 = nn.Sequential( Conv(base_channels * 2, base_channels * 4, 3, 2, act=act), CSPLayer( base_channels * 4, base_channels * 4, n=base_depth * 3, depthwise=depthwise, act=act, ), ) # dark4 self.dark4 = nn.Sequential( Conv(base_channels * 4, base_channels * 8, 3, 2, act=act), CSPLayer( base_channels * 8, base_channels * 8, n=base_depth * 3, depthwise=depthwise, act=act, ), ) # dark5 self.dark5 = nn.Sequential( Conv(base_channels * 8, base_channels * 16, 3, 2, act=act), SPPBottleneck(base_channels * 16, base_channels * 16, activation=act), CSPLayer( base_channels * 16, base_channels * 16, n=base_depth, shortcut=False, depthwise=depthwise, act=act, ), ) def forward(self, x): outputs = {} x = self.stem(x) outputs["stem"] = x x = self.dark2(x) outputs["dark2"] = x x = self.dark3(x) outputs["dark3"] = x x = self.dark4(x) outputs["dark4"] = x x = self.dark5(x) outputs["dark5"] = x return {k: v for k, v in outputs.items() if k in self.out_features} ``` #### File: yolox/models/yolo_pafpn.py ```python import torch import torch.nn as nn from .darknet import CSPDarknet from .coatnet import coatnet_0, coatnet_2 from .network_blocks import BaseConv, CSPLayer, DWConv class YOLOPAFPN(nn.Module): """ YOLOv3 model. Darknet 53 is the default backbone of this model. """ def __init__( self, img_channel=3, depth=1.0, width=1.0, in_features=("dark3", "dark4", "dark5"), in_channels=[256, 512, 1024], depthwise=False, act="silu", backbone_name='CSPDarknet', input_size=(320, 320) ): super().__init__() if backbone_name == 'CoAtNet': self.backbone = coatnet_2(img_shape=input_size, img_channel=img_channel, dep_mul=depth, wid_mul=width, out_features=in_features) else: self.backbone = CSPDarknet(img_channel, depth, width, depthwise=depthwise, act=act, out_features=in_features) self.in_features = in_features self.in_channels = in_channels Conv = DWConv if depthwise else BaseConv self.upsample = nn.Upsample(scale_factor=2, mode="nearest") self.lateral_conv0 = BaseConv( int(in_channels[-1] * width), int(in_channels[-2] * width), 1, 1, act=act ) self.C3_p4 = CSPLayer( int(2 * in_channels[-2] * width), int(in_channels[-2] * width), round(3 * depth), False, depthwise=depthwise, act=act, ) # cat self.reduce_conv1 = BaseConv( int(in_channels[-2] * width), int(in_channels[-3] * width), 1, 1, act=act ) self.C3_p3 = CSPLayer( int(2 * in_channels[-3] * width), int(in_channels[-3] * width), round(3 * depth), False, depthwise=depthwise, act=act, ) # bottom-up conv self.bu_conv2 = Conv( int(in_channels[-3] * width), int(in_channels[-3] * width), 3, 2, act=act ) self.C3_n3 = CSPLayer( int(2 * in_channels[-3] * width), int(in_channels[-2] * width), round(3 * depth), False, depthwise=depthwise, act=act, ) # bottom-up conv self.bu_conv1 = Conv( int(in_channels[-2] * width), int(in_channels[-2] * width), 3, 2, act=act ) self.C3_n4 = CSPLayer( int(2 * in_channels[-2] * width), int(in_channels[-1] * width), round(3 * depth), False, depthwise=depthwise, act=act, ) if len(self.in_channels) == 4: self.reduce_conv2 = BaseConv( int(in_channels[-3] * width), int(in_channels[-4] * width), 1, 1, act=act ) self.C3_p2 = CSPLayer( int(2 * in_channels[-4] * width), int(in_channels[-4] * width), round(3 * depth), False, depthwise=depthwise, act=act, ) self.bu_conv3 = Conv( int(in_channels[-4] * width), int(in_channels[-4] * width), 3, 2, act=act ) self.C3_n2 = CSPLayer( int(2 * in_channels[-4] * width), int(in_channels[-3] * width), round(3 * depth), False, depthwise=depthwise, act=act, ) def forward(self, input): """ Args: inputs: input images. Returns: Tuple[Tensor]: FPN feature. """ # backbone out_features = self.backbone(input) features = [out_features[f] for f in self.in_features] if len(features) == 3: [x2, x1, x0] = features # 尺寸从大到小 fpn_out0 = self.lateral_conv0(x0) # in:512,10,10 out:v,10,10 f_out0 = self.upsample(fpn_out0) # in:256,10,10 out:256,20,20 f_out0 = torch.cat([f_out0, x1], 1) # in:256,20,20 out:512,20,20 f_out0 = self.C3_p4(f_out0) # in:512,20,20 out:256,20,20 fpn_out1 = self.reduce_conv1(f_out0) # in:256,20,20 out:128,20,20 f_out1 = self.upsample(fpn_out1) # in:128,20,20 out:128,40,40 f_out1 = torch.cat([f_out1, x2], 1) # in::128,40,40 out:256,40,40 pan_out2 = self.C3_p3(f_out1) # in:256,40,40 out:128,40,40 p_out1 = self.bu_conv2(pan_out2) # in:128,40,40 out:128,20,20 p_out1 = torch.cat([p_out1, fpn_out1], 1) # int:128,20,20 out:256,20,20 pan_out1 = self.C3_n3(p_out1) # in:256,20,20 out:256,20,20 p_out0 = self.bu_conv1(pan_out1) # in:256,20,20 out:256,10,10 p_out0 = torch.cat([p_out0, fpn_out0], 1) # in:256,10,10 out:512,10,10 pan_out0 = self.C3_n4(p_out0) # in:512,10,10 out:512,10,10 outputs = (pan_out2, pan_out1, pan_out0) else: [x3, x2, x1, x0] = features # 尺寸从大到小 fpn_out0 = self.lateral_conv0(x0) # in:512,10,10 out:v,10,10 f_out0 = self.upsample(fpn_out0) # in:256,10,10 out:256,20,20 f_out0 = torch.cat([f_out0, x1], 1) # in:256,20,20 out:512,20,20 f_out0 = self.C3_p4(f_out0) # in:512,20,20 out:256,20,20 fpn_out1 = self.reduce_conv1(f_out0) # in:256,20,20 out:128,20,20 f_out1 = self.upsample(fpn_out1) # in:128,20,20 out:128,40,40 f_out1 = torch.cat([f_out1, x2], 1) # in::128,40,40 out:256,40,40 f_out1 = self.C3_p3(f_out1) # in:256,40,40 out:128,40,40 fpn_out2 = self.reduce_conv2(f_out1) # in:128,40,40 out:64,40,40 f_out2 = self.upsample(fpn_out2) # in:64,40,40 out:64,80,80 f_out2 = torch.cat([f_out2, x3], 1) # in::64,80,80 out:128,80,80 pan_out3 = self.C3_p2(f_out2) # in:128,80,80 out:64,80,80 p_out2 = self.bu_conv3(pan_out3) # in:64,80,80 out:64,40,40 p_out2 = torch.cat([p_out2, fpn_out2], 1) # int:64,40,40 out:128,40,40 pan_out2 = self.C3_n2(p_out2) # in:128,40,40 out:128,40,40 p_out1 = self.bu_conv2(pan_out2) # in:128,40,40 out:128,20,20 p_out1 = torch.cat([p_out1, fpn_out1], 1) # int:128,20,20 out:256,20,20 pan_out1 = self.C3_n3(p_out1) # in:256,20,20 out:256,20,20 p_out0 = self.bu_conv1(pan_out1) # in:256,20,20 out:256,10,10 p_out0 = torch.cat([p_out0, fpn_out0], 1) # in:256,10,10 out:512,10,10 pan_out0 = self.C3_n4(p_out0) # in:512,10,10 out:512,10,10 outputs = (pan_out3, pan_out2, pan_out1, pan_out0) return outputs ```
{ "source": "jie8357IOII/airflow-dynamic-etl", "score": 2 }
#### File: airflow-dynamic-etl/etl/sample.py ```python import errno import logging import os import random import sys import time from etl_register import ETLCrawler class sample_ETL(ETLCrawler): execute_cron_time = "00 20 * * *" @ETLCrawler.register_extract(1) def extract_something(self, ds, **task_kwargs): print('I am something') @ETLCrawler.register_extract(2) def extract_otherthing(self, ds, **task_kwargs): print('otherthing') @ETLCrawler.register_extract(3) def extract_anotherthing(self, ds, **task_kwargs): print('anotherthing') @ETLCrawler.register_transform(1) def transform1(self, *args, **kwargs): print('transform1 by order 1') @ETLCrawler.register_transform(3) def transform2(self, *args, **kwargs): print('transform2 by order 3') @ETLCrawler.register_transform(2) def transform3(self, *args, **kwargs): print('transform3 by order 2') @ETLCrawler.register_load(1) def load(self, *args, **kwargs): print('do load') ```
{ "source": "jieatelement/quickstart-aws-industrial-machine-connectivity", "score": 2 }
#### File: source/AssetModelIngestion/sitewiseUtils.py ```python from datetime import datetime, date import json import logging import pprint import time import boto3 from botocore.exceptions import ClientError log = logging.getLogger('assetModelConverter') log.setLevel(logging.DEBUG) class SitewiseUtils: """ """ def __init__(self): self.sitewise = boto3.client('iotsitewise') self.unsupportedDataTypes = ['Template'] self.dataTypeTable = { "Int8": "INTEGER", "Int16": "INTEGER", "Int32": "INTEGER", "Int64": "INTEGER", "Float": "DOUBLE", "Double": "DOUBLE", "Boolean": "BOOLEAN", "String": "STRING", "DateTime": "INTEGER" } self.pollWaitTime = 0.5 @staticmethod def jsonSerial(obj): if isinstance(obj, (datetime, date)): return obj.isoformat() raise TypeError("Type %s not serializable" % type(obj)) def waitForActiveAssetModel(self, assetModelId): modelDescription = self.sitewise.describe_asset_model( assetModelId=assetModelId ) while modelDescription['assetModelStatus']['state'] != 'ACTIVE': time.sleep(self.pollWaitTime) modelDescription = self.sitewise.describe_asset_model( assetModelId=assetModelId ) return modelDescription def createAssetModel(self, modelName, modelMetrics=None, modelHierarchies=None): log.info('Creating model {}'.format(modelName)) modelProperties = [] if not modelHierarchies: modelHierarchies = [] if modelMetrics: for metric in modelMetrics: if metric['dataType'] in self.unsupportedDataTypes: continue modelProperties.append({ 'name': metric['name'], 'dataType': self.dataTypeTable.get(metric['dataType']), 'type': { 'measurement': {} } }) model = self.sitewise.create_asset_model( assetModelName=modelName, assetModelProperties=modelProperties, assetModelHierarchies=modelHierarchies, ) return self.waitForActiveAssetModel(assetModelId=model['assetModelId']) def listAssetModels(self): paginator = self.sitewise.get_paginator('list_asset_models') pageIterator = paginator.paginate() modelList = [] for page in pageIterator: for assetModel in page['assetModelSummaries']: modelList.append(assetModel) return modelList def describeAssetModel(self, assetModelId): try: return self.sitewise.describe_asset_model(assetModelId=assetModelId) except self.sitewise.exceptions.ResourceNotFoundException: return None def updateAssetModel(self, assetModelId, hierarchies=None, overwriteData=False): log.info(f'Updating AssetModelId {assetModelId}') myModelData = self.describeAssetModel(assetModelId=assetModelId) myHierarchies = myModelData['assetModelHierarchies'] if hierarchies is not None: if overwriteData: myHierarchies = hierarchies else: myHierarchies += hierarchies response = self.sitewise.update_asset_model( assetModelName=myModelData['assetModelName'], assetModelId=assetModelId, assetModelHierarchies=myHierarchies, ) modelDescription = self.waitForActiveAssetModel(assetModelId=assetModelId) return modelDescription def deleteAssetModel(self, assetModelId): log.info(f'Deleting AssetModelId {assetModelId}') self.sitewise.delete_asset_model(assetModelId=assetModelId) modelDescription = self.describeAssetModel(assetModelId=assetModelId) while modelDescription and modelDescription['assetModelStatus']['state'] == 'DELETING': time.sleep(1) modelDescription = self.describeAssetModel(assetModelId=assetModelId) def createAsset(self, assetName, assetModelId): log.info(f"Creating asset {assetName}") asset = self.sitewise.create_asset( assetName=assetName, assetModelId=assetModelId ) assetDescription = self.sitewise.describe_asset( assetId=asset['assetId'] ) while assetDescription['assetStatus']['state'] != 'ACTIVE': # log.info('Wait 1 second until asset is active') time.sleep(1) assetDescription = self.sitewise.describe_asset( assetId=asset['assetId'] ) return assetDescription def updateAssetProperties(self, assetId, assetProperties, assetAliases): for propRef in assetProperties: self.sitewise.update_asset_property( assetId=assetId, propertyId=propRef['id'], propertyNotificationState='ENABLED', propertyAlias=assetAliases[propRef['name']] ) def listAssets(self, assetModelId): paginator = self.sitewise.get_paginator('list_assets') pageIterator = paginator.paginate(assetModelId=assetModelId) assetList = [] for page in pageIterator: for asset in page['assetSummaries']: assetList.append(asset) return assetList def listAssociatedAssets(self, assetId, hierarchyId): paginator = self.sitewise.get_paginator('list_associated_assets') pageIterator = paginator.paginate(assetId=assetId, hierarchyId=hierarchyId) assocAssetsList = [] for page in pageIterator: # print(json.dumps(page, indent=4, sort_keys=True, default=self.jsonSerial)) for asset in page['assetSummaries']: assocAssetsList.append(asset) return assocAssetsList def describeAsset(self, assetId): try: return self.sitewise.describe_asset(assetId=assetId) except self.sitewise.exceptions.ResourceNotFoundException: return None def deleteAsset(self, assetId): log.info(f'Deleting AssetId {assetId}') self.sitewise.delete_asset(assetId=assetId) assetDescription = self.describeAsset(assetId=assetId) while assetDescription and assetDescription['assetStatus']['state'] == 'DELETING': time.sleep(1) assetDescription = self.describeAsset(assetId=assetId) def disassociateAllAssets(self): assetModelsList = self.listAssetModels() for model in assetModelsList: assetsList = self.listAssets(model['id']) for asset in assetsList: for hierRef in asset['hierarchies']: assocAssetsList = self.listAssociatedAssets(asset['id'], hierRef['id']) for assocRef in assocAssetsList: self.sitewise.disassociate_assets( assetId=asset['id'], hierarchyId=hierRef['id'], childAssetId=assocRef['id'], ) def deleteAllAssets(self): self.disassociateAllAssets() assetModelsList = self.listAssetModels() for model in assetModelsList: assetsList = self.listAssets(model['id']) for asset in assetsList: self.deleteAsset(asset['id']) def deleteAllModels(self): self.deleteAllAssets() assetModelsList = self.listAssetModels() for model in assetModelsList: self.updateAssetModel(assetModelId=model['id'], hierarchies=[], overwriteData=True) for model in assetModelsList: self.deleteAssetModel(model['id']) ``` #### File: tests/unit/test_logs.py ```python import mock from chalice import logs from chalice.awsclient import TypedAWSClient from six import StringIO def message(log_message, log_stream_name='logStreamName'): return { 'logStreamName': log_stream_name, 'message': log_message, } def test_can_retrieve_all_logs(): client = mock.Mock(spec=TypedAWSClient) log_message = message('first') client.iter_log_events.return_value = [log_message] retriever = logs.LogRetriever(client, 'loggroup') messages = list(retriever.retrieve_logs()) expected = log_message.copy() # We also inject a logShortId. expected['logShortId'] = 'logStreamName' assert messages == [expected] def test_can_support_max_entries(): client = mock.Mock(spec=TypedAWSClient) client.iter_log_events.return_value = [message('first'), message('second')] retriever = logs.LogRetriever(client, 'loggroup') messages = list(retriever.retrieve_logs(max_entries=1)) assert len(messages) == 1 assert messages[0]['message'] == 'first' def test_can_exclude_lambda_messages(): client = mock.Mock(spec=TypedAWSClient) client.iter_log_events.return_value = [ message('START RequestId: id Version: $LATEST'), message('END RequestId: id'), message('REPORT RequestId: id Duration: 0.42 ms ' 'Billed Duration: 100 ms ' 'Memory Size: 128 MB Max Memory Used: 19 MB'), message('Not a lambda message'), ] retriever = logs.LogRetriever(client, 'loggroup') messages = list(retriever.retrieve_logs(include_lambda_messages=False)) assert len(messages) == 1 assert messages[0]['message'] == 'Not a lambda message' def test_can_parse_short_id(): log_message = message( 'Log Message', '2017/04/28/[$LATEST]fc219a0d613b40e9b5c58e6b8fd2320c' ) client = mock.Mock(spec=TypedAWSClient) client.iter_log_events.return_value = [log_message] retriever = logs.LogRetriever(client, 'loggroup') messages = list(retriever.retrieve_logs(include_lambda_messages=False)) assert len(messages) == 1 assert messages[0]['logShortId'] == 'fc219a' def test_can_create_from_arn(): retriever = logs.LogRetriever.create_from_lambda_arn( mock.sentinel.client, 'arn:aws:lambda:us-east-1:123:function:my-function' ) assert isinstance(retriever, logs.LogRetriever) def test_can_display_logs(): retriever = mock.Mock(spec=logs.LogRetriever) retriever.retrieve_logs.return_value = [ {'timestamp': 'NOW', 'logShortId': 'shortId', 'message': 'One'}, {'timestamp': 'NOW', 'logShortId': 'shortId', 'message': 'Two'}, {'timestamp': 'NOW', 'logShortId': 'shortId', 'message': 'Three'}, ] stream = StringIO() logs.display_logs(retriever, max_entries=None, include_lambda_messages=True, stream=stream) assert stream.getvalue().splitlines() == [ 'NOW shortId One', 'NOW shortId Two', 'NOW shortId Three', ] ``` #### File: source/StackCleanup/lambda_function.py ```python import os import json import logging import threading import pprint import boto3 import cfnresponse import time import sys from botocore.exceptions import ClientError from cleanupQuicksight import CleanupQuicksight from sitewiseUtils import SitewiseUtils s3 = boto3.resource("s3") greengrass = boto3.client("greengrass") sitewise = boto3.client("iotsitewise") stackName = os.environ['stackName'] region = os.environ['AWS_REGION'] def lambda_handler(event, context): # make sure we send a failure to CloudFormation if the function # is going to timeout timer = threading.Timer((context.get_remaining_time_in_millis() / 1000.00) - 0.5, timeout, args=[event, context]) timer.start() print('Received event: %s' % json.dumps(event)) status = cfnresponse.SUCCESS try: if event['RequestType'] == 'Delete': delete_greengrass = event['ResourceProperties']['delete_greengrass'] if (delete_greengrass == 'Yes'): group_names = event['ResourceProperties']['group_names'] for group_name in group_names: reset_greengrass_deployment(group_name) buckets = event['ResourceProperties']['buckets'] clear_s3_buckets(buckets) delete_sitewise_portal() CleanupQuicksight(region=region, stackName=stackName).cleanup() delete_models = event['ResourceProperties']['delete_models'] if (delete_models == 'Yes'): sitewiseUtils = SitewiseUtils() sitewiseUtils.deleteAllModels() except Exception as e: logging.error('Exception: %s' % e, exc_info=True) status = cfnresponse.FAILED finally: timer.cancel() cfnresponse.send(event, context, status, {}, None) def delete_sitewise_portal(): portalsList = sitewise.list_portals() try: for portal in portalsList['portalSummaries']: print(portal) if stackName in portal['name']: projectList = sitewise.list_projects( portalId=portal['id'], ) for project in projectList['projectSummaries']: dashboardList = sitewise.list_dashboards( projectId = project['id'] ) for dashboard in dashboardList['dashboardSummaries']: deleteDashboard = sitewise.delete_dashboard( dashboardId = dashboard['id'] ) time.sleep(2) deleteProject = sitewise.delete_project( projectId = project['id'] ) time.sleep(2) accessList = sitewise.list_access_policies( resourceType='PORTAL', resourceId=portal['id'], ) for accessPolicy in accessList['accessPolicySummaries']: deleteAccessPolity = sitewise.delete_access_policy( accessPolicyId=accessPolicy['id'], ) time.sleep(2) deleteProtal = sitewise.delete_portal( portalId = portal['id'] ) print(deleteProtal) time.sleep(2) except Exception as e: print("Error:", e) def clear_s3_buckets(buckets): for bucket in buckets: bucket_resource = s3.Bucket(bucket) bucket_resource.objects.all().delete() def reset_greengrass_deployment(group_name): groups = greengrass.list_groups() if 'Groups' in groups: payload = groups['Groups'] while 'NextToken' in groups: groups = greengrass.list_groups( NextToken = groups['NextToken']) payload.extend(groups['Groups']) for group in payload: if group['Name'] == group_name: group_id = group['Id'] break response = greengrass.reset_deployments( Force = True, GroupId = group_id) def timeout(event, context): logging.error('Execution is about to time out, sending failure response to CloudFormation') cfnresponse.send(event, context, cfnresponse.FAILED, {}, None) ``` #### File: source/unify_common/secrets_manager.py ```python import argparse import base64 import json import logging from pprint import pprint import time import boto3 from botocore.exceptions import ClientError import json from dataclasses import dataclass logger = logging.getLogger(__name__) class Secret(): def __init__(self, user_id, password, org_id, cluster): self.user_id = user_id self.password = password self.org_id = org_id self.cluster = cluster class SecretsManager: def __init__(self, region_name, name): self.name = name session = boto3.session.Session() self._client = session.client( service_name='secretsmanager', region_name=region_name ) def get_value(self): if self.name is None: raise ValueError try: kwargs = {'SecretId': self.name} response = self._client.get_secret_value(**kwargs) logger.info("Got value for secret %s.", self.name) except ClientError: logger.exception("Couldn't get value for secret %s.", self.name) raise try: secret = json.loads(response['SecretString']) user_id = secret['user_id'] password = <PASSWORD>['password'] org_id = secret['org_id'] cluster = secret['cluster'] except Exception: logger.exception("Secret does not have correct setting") raise return Secret(user_id, password, org_id, cluster) def get_random_password(self, pw_length): try: response = self._client.get_random_password( PasswordLength=pw_length) password = response['RandomPassword'] logger.info("Got random password.") except ClientError: logger.exception("Couldn't get random password.") raise else: return password def put_value(self, secret_value): if self.name is None: raise ValueError try: kwargs = {'SecretId': self.name} if isinstance(secret_value, str): kwargs['SecretString'] = secret_value elif isinstance(secret_value, bytes): kwargs['SecretBinary'] = secret_value response = self._client.put_secret_value(**kwargs) logger.info("Value put in secret %s.", self.name) except ClientError: logger.exception("Couldn't put value in secret %s.", self.name) raise else: return response ``` #### File: source/UnifySourceIngest/utils.py ```python import re import codecs split_char = '-' def parse_file_name(s3_file): site_name = 'default' server_name = 'default' file_name = s3_file.split('/')[-1] matched = re.match(".+{}.+{}.+\..+".format(split_char, split_char), file_name) if (bool(matched)): site_name, server_name = file_name.split(split_char)[:2] return site_name, server_name, file_name def check_for_bom(filename): with open(filename, 'rb') as binFile: rawData = binFile.read(64) if rawData.startswith(codecs.BOM_UTF8): return True return False ```
{ "source": "jiebinzhuang/insgraph-flask", "score": 2 }
#### File: insgraph-flask/insgraph/case.py ```python import json import os import shutil from flask import ( Blueprint, request, send_from_directory, make_response) from insgraph.db import get_db from insgraph.utils import httputil bp = Blueprint('caseManagement', __name__, url_prefix='/caseManagement') @bp.route('/getProjectList', methods=['GET']) def getProjectList(): print("getProjectList") cds = get_db().execute( 'SELECT projecct_code FROM project ' ).fetchall() project_list = [] for cd in cds: print(cd[0]) project_list.append(cd[0]) content = json.dumps(project_list) resp = httputil.Response_headers(content) return resp ``` #### File: insgraph-flask/insgraph/__init__.py ```python import os from flask import Response,Flask, request from flask_cors import CORS from insgraph import util, instagram def create_app(test_config=None): """Create and configure an instance of the Flask application.""" app = Flask(__name__, instance_relative_config=True) print("zhuangjb flask start.....:"+__name__) CORS(app) app.config.from_mapping( # a default secret that should be overridden by instance config SECRET_KEY='dev', # store the database in the instance folder DATABASE=os.path.join(app.instance_path, 'insgraph.sqlite'), ) if test_config is None: # load the instance config, if it exists, when not testing app.config.from_pyfile('config.py', silent=True) else: # load the test config if passed in app.config.update(test_config) # ensure the instance folder exists try: os.makedirs(app.instance_path) except OSError: pass @app.route('/hello') def hello(): return 'Hello, World!' @app.before_request def option_replay(): if request.method =='OPTIONS': resp = Response('') print('xxx') resp.headers['Access-Control-Allow-Origin'] = '*' resp.headers['Access-Control-Allow-Headers'] = '*' resp.headers['Access-Control-Request-Method'] = request.headers['Access-Control-Request-Method'] return resp # @app.after_request # def set_allow_origin(resp): # h = resp.headers # if request.method != 'OPTIONS' and 'Origin' in request.headers: # h['Access-Control-Allow-Origin'] = request.headers['Origin'] # register the database commands from insgraph import db db.init_app(app) # apply the blueprints to the app from insgraph import auth, user,case app.register_blueprint(auth.bp) app.register_blueprint(user.bp) app.register_blueprint(case.bp) app.register_blueprint(instagram.bp) # make url_for('index') == url_for('blog.index') # in another app, you might define a separate main index here with # app.route, while giving the blog blueprint a url_prefix, but for # the tutorial the blog will be the main index app.add_url_rule('/', endpoint='index') return app ``` #### File: insgraph-flask/insgraph/instagram.py ```python import json import sys from flask import ( Blueprint, request) from selenium import webdriver from selenium.webdriver import DesiredCapabilities from selenium.webdriver.chrome.options import Options from insgraph.util import extractor from insgraph.util.extractor import extract_posts from insgraph.util.settings import Settings from insgraph.util.util import web_adress_navigator from insgraph.util.zjb_extractor import zjb_extract_tag_posts, zjb_search, zjb_extract_postlist from insgraph.util.zjb_extractor_posts import zjb_extract_post_info from insgraph.utils import httputil from .util.account import login from .util.chromedriver import init_chromedriver bp = Blueprint('instagram', __name__, url_prefix='/instagram') chrome_options = Options() chromeOptions = webdriver.ChromeOptions() prefs = {'profile.managed_default_content_settings.images': 2, 'disk-cache-size': 4096} chromeOptions.add_experimental_option("prefs", prefs) chrome_options.add_argument('--dns-prefetch-disable') chrome_options.add_argument('--no-sandbox') chrome_options.add_argument('--lang=en-US') chrome_options.add_argument('--headless') chrome_options.add_argument("--proxy-server=socks5://127.0.0.1:1080") chrome_options.add_experimental_option('prefs', {'intl.accept_languages': 'en-US'}) capabilities = DesiredCapabilities.CHROME try: browser = init_chromedriver(chrome_options, capabilities) except Exception as exc: print(exc) sys.exit() @bp.route('/getUserInfo', methods=['GET']) def getUserInfo(): username = request.args.get("username") print('getUserInfo---Extracting information from ' + username) try: if len(Settings.login_username) != 0: login(browser, Settings.login_username, Settings.login_password) information = extractor.extract_userinfo(browser, username) except: print("Error with user " + username) sys.exit(1) # print(information) content = json.dumps(information) resp = httputil.Response_headers(content) return resp @bp.route('/getPostList', methods=['GET']) def getPostList(): username = request.args.get("username") amount = request.args.get("amount") if amount == 0 or amount is None: amount = 2 print('getPostList---Extracting information from ' + username) try: from insgraph.util.settings import Settings if len(Settings.login_username) != 0: login(browser, Settings.login_username, Settings.login_password) post_infos = extract_posts(browser, username, int(amount)) except: print("Error with user " + username) sys.exit(1) content = json.dumps(post_infos) resp = httputil.Response_headers(content) return resp @bp.route('/getTagList', methods=['GET']) def getTagList(): tagname = request.args.get("tagname") amount = request.args.get("amount") if amount == 0 or amount is None: amount = 12 try: post_infos = zjb_extract_tag_posts(browser, tagname, int(amount)) except: print("Error with user " + tagname) sys.exit(1) content = json.dumps(post_infos) resp = httputil.Response_headers(content) return resp @bp.route('/search', methods=['GET']) def search(): searchcontent = request.args.get("content") if searchcontent=="" or searchcontent is None: return "" try: result= zjb_search(browser, searchcontent) except: print("Error with searchcontent " + searchcontent) sys.exit(1) content = json.dumps(result) resp = httputil.Response_headers(content) return resp @bp.route('/getPostPreList', methods=['GET']) def getPostPreList(): tagname = request.args.get("tagname") amount = request.args.get("amount") if amount == 0 or amount is None: amount = 12 try: user_link = "https://www.instagram.com/explore/tags/{}/".format(tagname) web_adress_navigator(browser, user_link) post_infos = zjb_extract_postlist(browser,int(amount)) except: print("Error with user " + tagname) sys.exit(1) content = json.dumps(post_infos) resp = httputil.Response_headers(content) return resp @bp.route('/getUserPostIndex', methods=['GET']) def getUserPostIndex(): username = request.args.get("username") amount = request.args.get("amount") if amount == 0 or amount is None: amount = 12 try: user_link = "https://www.instagram.com/{}/".format(username) web_adress_navigator(browser, user_link) post_infos = zjb_extract_postlist(browser,int(amount)) except: print("Error with user " + username) sys.exit(1) content = json.dumps(post_infos) resp = httputil.Response_headers(content) return resp @bp.route('/getPostByUrl', methods=['GET']) def getPostByUrl(): url = request.args.get("url") post_infos = [] print('getPostList---Extracting information from ' + url) try: imgs, imgdesc,\ likes, commentscount, views, video_url = zjb_extract_post_info( browser, url) post_infos.append({ 'imgs': imgs, 'imgdesc': imgdesc, 'likes': likes, 'views': views, 'url': url, 'comments': commentscount, 'video_url': video_url }) except: print("Error with user " + url) sys.exit(1) content = json.dumps(post_infos) resp = httputil.Response_headers(content) return resp ``` #### File: insgraph-flask/insgraph/user.py ```python import json from flask import ( Blueprint, flash, redirect, request, session, url_for ) from insgraph import util from insgraph.db import get_db from insgraph.utils import httputil bp = Blueprint('user', __name__, url_prefix='/user') @bp.route('/info', methods=['GET', 'POST']) def login(): print("user info") token = request.args.get("token") db = get_db() error = None user = db.execute( 'SELECT * FROM user WHERE id = ?', (token,) ).fetchone() if user is None: error = 'Incorrect username.' if error is None: # store the user id in a new session and return to the index # 默认全部是管理员 角色后面再实现 response = { 'roles': ['admin'], 'token': token, 'avatar': 'https://wpimg.wallstcn.com/f778738c-e4f8-4870-b634-56703b4acafe.gif', 'name': user['username'] } content = json.dumps(response) return util.Response_headers(content) # return httputil.success(user['id']) flash(error) return httputil.error(error) @bp.route('/logout') def logout(): """Clear the current session, including the stored user id.""" session.clear() return redirect(url_for('index')) ``` #### File: insgraph/util/zjb_extractor.py ```python import math from time import sleep from selenium.common.exceptions import NoSuchElementException from selenium.webdriver.common.keys import Keys from insgraph.util.extractor_posts import extract_post_info from .exceptions import PageNotFound404, NoInstaProfilePageFound from .instalogger import logger from .settings import Settings from .util import web_adress_navigator def extract_user_posts(browser, num_of_posts_to_do): """Get all posts from user""" links2 = [] preview_imgs = {} try: body_elem = browser.find_element_by_tag_name('body') previouslen = 0 breaking = 0 print("number of posts to do: ", num_of_posts_to_do) num_of_posts_to_scroll = 12 * math.ceil(num_of_posts_to_do / 12) print("Getting first", num_of_posts_to_scroll, "posts but checking ", num_of_posts_to_do, " posts only, if you want to change this limit, change limit_amount value in crawl_profile.py\n") while (len(links2) < num_of_posts_to_do): prev_divs = browser.find_elements_by_tag_name('main') links_elems = [div.find_elements_by_tag_name('a') for div in prev_divs] links = sum([[link_elem.get_attribute('href') for link_elem in elems] for elems in links_elems], []) for elems in links_elems: for link_elem in elems: href = link_elem.get_attribute('href') try: if "/p/" in href: try: img = link_elem.find_element_by_tag_name('img') src = img.get_attribute('src') preview_imgs[href] = src except NoSuchElementException: print("img exception 132") continue except Exception as err: print(err) for link in links: if "/p/" in link: if (len(links2) < num_of_posts_to_do): links2.append(link) # links2 = list(set(links2)) print("Scrolling profile ", len(links2), "/", num_of_posts_to_scroll) body_elem.send_keys(Keys.END) sleep(Settings.sleep_time_between_post_scroll) ##remove bellow part to never break the scrolling script before reaching the num_of_posts if (len(links2) == previouslen): breaking += 1 print("breaking in ", 4 - breaking, "...\nIf you believe this is only caused by slow internet, increase sleep time 'sleep_time_between_post_scroll' in settings.py") else: breaking = 0 if breaking > 3: print("Not getting any more posts, ending scrolling") sleep(2) break previouslen = len(links2) ## except NoSuchElementException as err: logger.error('Something went terribly wrong') post_infos = [] counter = 1 # into user_commented_total_list I will add all username links who commented on any post of this user user_commented_total_list = [] for postlink in links2: print("\n", counter, "/", len(links2)) counter = counter + 1 try: caption, location_url, location_name, location_id, lat, lng, imgs, \ imgdesc, tags, likes, commentscount, date, user_commented_list, user_comments, \ mentions, user_liked_post, views, video_url = extract_post_info( browser, postlink) location = { 'location_url': location_url, 'location_name': location_name, 'location_id': location_id, 'latitude': lat, 'longitude': lng, } post_infos.append({ 'caption': caption, 'location': location, 'imgs': imgs, 'imgdesc': imgdesc, 'preview_img': preview_imgs.get(postlink, None), 'date': date, 'tags': tags, 'likes': { 'count': likes, 'list': user_liked_post }, 'views': views, 'url': postlink, 'comments': { 'count': commentscount, 'list': user_comments }, 'mentions': mentions, 'video_url': video_url }) user_commented_total_list = user_commented_total_list + user_commented_list except NoSuchElementException as err: logger.error("Could not get information from post: " + postlink) logger.error(err) except: logger.error("Could not get information from post: " + postlink) return post_infos, user_commented_total_list def zjb_extract_tag_posts(browser, tagname, limit_amount): # print 222 # print username logger.info("Extracting extract_posts from " + tagname) # print 123 isprivate = False try: user_link = "https://www.instagram.com/explore/tags/{}/".format(tagname) web_adress_navigator(browser, user_link) except PageNotFound404 as e: raise NoInstaProfilePageFound(e) try: post_infos, user_commented_total_list = extract_user_posts(browser, limit_amount) except: logger.error("Couldn't get user posts.") return post_infos def zjb_search(browser, content): logger.info("Extracting extract_posts from " + content) try: user_link = "https://www.instagram.com/web/search/topsearch/?context=blended&query=" + content + "&rank_token=0.<PASSWORD>&include_reel=true" response = browser.get(user_link) jsonEle = browser.find_element_by_tag_name('pre') jsonEle.text logger.info(jsonEle.text) return jsonEle.text except PageNotFound404 as e: raise NoInstaProfilePageFound(e) return "" def zjb_extract_postlist(browser, num_of_posts_to_do): """Get all posts from user""" links2 = [] post_infos = [] try: body_elem = browser.find_element_by_tag_name('body') previouslen = 0 breaking = 0 print("number of posts to do: ", num_of_posts_to_do) num_of_posts_to_scroll = 12 * math.ceil(num_of_posts_to_do / 12) print("Getting first", num_of_posts_to_scroll, "posts but checking ", num_of_posts_to_do, " posts only, if you want to change this limit, change limit_amount value in crawl_profile.py\n") while (len(links2) < num_of_posts_to_do): prev_divs = browser.find_elements_by_tag_name('main') links_elems = [div.find_elements_by_tag_name('a') for div in prev_divs] links = sum([[link_elem.get_attribute('href') for link_elem in elems] for elems in links_elems], []) for elems in links_elems: for link_elem in elems: href = link_elem.get_attribute('href') try: if "/p/" in href: try: img = link_elem.find_element_by_tag_name('img') src = img.get_attribute('src') post_info = {} post_info["href"] = href post_info["preview_img"] = src try: span = link_elem.find_element_by_tag_name('span') post_info["video"] = "true" except NoSuchElementException: print("video exception 132") post_info["video"] = "false" post_infos.append(post_info) except NoSuchElementException: print("img exception 132") continue except Exception as err: print(err) for link in links: if "/p/" in link: if (len(links2) < num_of_posts_to_do): links2.append(link) # links2 = list(set(links2)) print("Scrolling profile ", len(links2), "/", num_of_posts_to_scroll) body_elem.send_keys(Keys.END) sleep(Settings.sleep_time_between_post_scroll) ##remove bellow part to never break the scrolling script before reaching the num_of_posts if (len(links2) == previouslen): breaking += 1 print("breaking in ", 4 - breaking, "...\nIf you believe this is only caused by slow internet, increase sleep time 'sleep_time_between_post_scroll' in settings.py") else: breaking = 0 if breaking > 3: print("Not getting any more posts, ending scrolling") sleep(2) break previouslen = len(links2) ## except NoSuchElementException as err: logger.error('Something went terribly wrong') return post_infos # def zjb_get_post_detail(browser,url): ```
{ "source": "jiecchen/StreamingCC", "score": 2 }
#### File: python/streamingcc/bloom_filter.py ```python import streamingcc._bloom_filter as _bloom_filter bloom_filter = _bloom_filter.Pybloom_filter class BF: def __init__(self, c): self.bf = bloom_filter(c) def add(self, x): self.bf.Add(x) def has(self, x): return self.bf.Has(x) ``` #### File: python/streamingcc/hyper_loglog.py ```python import streamingcc._hyper_loglog as _hyper_loglog hyper_loglog = _hyper_loglog.Pyhyper_loglog class HL: def __init__(self): self.hl = hyper_loglog() def update(self, x): self.hl.Update(x) def estimate(self): return self.hl.Estimate() ```
{ "source": "jiechen2358/VirtualFittingRoom", "score": 2 }
#### File: samples/fashion/fashion.py ```python import os import sys import random import math import re import time import numpy as np import cv2 import matplotlib import matplotlib.pyplot as plt import io import lmdb import sqlite3 import pandas as pd import json import argparse from PIL import Image from IPython.display import display from pycocotools.coco import COCO from pycocotools import mask as maskUtils # Root directory of the project ROOT_DIR = os.path.abspath("../../") # Import Mask RCNN sys.path.append(ROOT_DIR) from mrcnn.config import Config from mrcnn import utils import mrcnn.model as modellib from mrcnn import visualize from mrcnn.model import log parser = argparse.ArgumentParser(description='option for Fashion Segmentation') parser.add_argument('-t', '--test', action='store_true', help='perform test') args = parser.parse_args() # Items used # 1:bag|2:belt|3:boots|4:footwear|5:outer|6:dress|7:sunglasses|8:pants|9:top|10:shorts|11:skirt|12:headwear|13:scarf/tie subset = ['bag', 'belt', 'outer', 'dress', 'pants', 'top', 'shorts', 'skirt', 'scarf/tie'] class TrainConfig(Config): """ Configuration for training on the toy shapes dataset. Derives from the base Config class and overrides values specific to the toy shapes dataset. """ NAME = "fashion" # Train on 1 GPU and 8 images per GPU. We can put multiple images on each # GPU because the images are small. Batch size is 8 (GPUs * images/GPU). GPU_COUNT = 1 IMAGES_PER_GPU = 8 # Number of classes (including background) NUM_CLASSES = 1 + 13 # background + 13 shapes # Use small images for faster training. Set the limits of the small side # the large side, and that determines the image shape. IMAGE_MIN_DIM = 256 IMAGE_MAX_DIM = 256 # Use smaller anchors because our image and objects are small RPN_ANCHOR_SCALES = (8, 16, 32, 64, 128) # anchor side in pixels # Reduce training ROIs per image because the images are small and have # few objects. Aim to allow ROI sampling to pick 33% positive ROIs. TRAIN_ROIS_PER_IMAGE = 32 # Use a small epoch since the data is simple STEPS_PER_EPOCH = 100 # use small validation steps since the epoch is small VALIDATION_STEPS = 5 class TestConfig(TrainConfig): GPU_COUNT = 1 IMAGES_PER_GPU = 1 class PhotoData(object): def __init__(self, path): self.env = lmdb.open( path, map_size=2 ** 36, readonly=True, lock=False ) def __iter__(self): with self.env.begin() as t: with t.cursor() as c: for key, value in c: yield key, value def __getitem__(self, index): key = str(index).encode('ascii') with self.env.begin() as t: data = t.get(key) if not data: return None with io.BytesIO(data) as f: image = Image.open(f) image.load() return image def __len__(self): return self.env.stat()['entries'] class FashionDataset(utils.Dataset): def load_fashion(self, count=5, start=0, class_ids=None): if args.test: print('load data for testing.') else: print('load data for training.') if not class_ids: class_ids = sorted(coco.getCatIds()) if class_ids: all_ids = [] for id in class_ids: all_ids.extend(list(coco.getImgIds(catIds=[id]))) # Remove duplicates all_ids = list(set(all_ids)) else: # All images all_ids = list(coco.imgs.keys()) random.seed(2) random.shuffle(all_ids) # Add classes all_class_ids = sorted(coco.getCatIds()) for i in all_class_ids: print('{}:{}'.format(i, coco.loadCats(i)[0]['name'])) self.add_class("fashion", i, coco.loadCats(i)[0]['name']) image_ids = [] print('number of images: ' + str(count)) # Since we have 50K+ images and we only use several Thousand images # There will be no overlaps. If you plan to use 50k+, please redefine retrival logic. if args.test: image_ids = all_ids[-(count+start):] else: image_ids = all_ids[:count+start] # Add images for i in image_ids: self.add_image( "fashion", image_id=i, path=None, width=coco.imgs[i]["width"], height=coco.imgs[i]["height"], annotations=coco.loadAnns(coco.getAnnIds( imgIds=[i], catIds=class_ids, iscrowd=None))) def load_image(self, image_id): imgId = self.image_info[image_id]['id'] image = photo_data[imgId] try: out = np.array(image.getdata()).astype(np.int32).reshape((image.size[1], image.size[0], 3)) except: # This handles GrayScaleImage out = np.array(image.getdata()).astype(np.int32).reshape((image.size[1], image.size[0])) out = np.stack((out,)*3, axis=-1) return out def image_reference(self, image_id): """Return the shapes data of the image.""" pass def load_mask(self, image_id): """Load instance masks for the given image. Different datasets use different ways to store masks. This function converts the different mask format to one format in the form of a bitmap [height, width, instances]. Returns: masks: A bool array of shape [height, width, instance count] with one mask per instance. class_ids: a 1D array of class IDs of the instance masks. """ # If not a COCO image, delegate to parent class. image_info = self.image_info[image_id] instance_masks = [] class_ids = [] annotations = self.image_info[image_id]["annotations"] # Build mask of shape [height, width, instance_count] and list # of class IDs that correspond to each channel of the mask. for annotation in annotations: class_id = annotation['category_id'] if class_id: m = self.annToMask(annotation, image_info["height"], image_info["width"]) # Some objects are so small that they're less than 1 pixel area # and end up rounded out. Skip those objects. if m.max() < 1: continue # Is it a crowd? If so, use a negative class ID. if annotation['iscrowd']: # Use negative class ID for crowds class_id *= -1 # For crowd masks, annToMask() sometimes returns a mask # smaller than the given dimensions. If so, resize it. if m.shape[0] != image_info["height"] or m.shape[1] != image_info["width"]: m = np.ones([image_info["height"], image_info["width"]], dtype=bool) instance_masks.append(m) class_ids.append(class_id) # Pack instance masks into an array if class_ids: mask = np.stack(instance_masks, axis=2).astype(np.bool) class_ids = np.array(class_ids, dtype=np.int32) return mask, class_ids else: # Call super class to return an empty mask return super(FashionDataset, self).load_mask(image_id) def annToRLE(self, ann, height, width): """ Convert annotation which can be polygons, uncompressed RLE to RLE. :return: binary mask (numpy 2D array) """ segm = ann['segmentation'] if isinstance(segm, list): # polygon -- a single object might consist of multiple parts # we merge all parts into one mask rle code rles = maskUtils.frPyObjects(segm, height, width) rle = maskUtils.merge(rles) elif isinstance(segm['counts'], list): # uncompressed RLE rle = maskUtils.frPyObjects(segm, height, width) else: # rle rle = ann['segmentation'] return rle def annToMask(self, ann, height, width): """ Convert annotation which can be polygons, uncompressed RLE, or RLE to binary mask. :return: binary mask (numpy 2D array) """ rle = self.annToRLE(ann, height, width) m = maskUtils.decode(rle) return m def train(init_with='coco'): # imagenet, coco, or last ''' Perform training. ''' # Load and display random samples # image_ids = np.random.choice(dataset_train.image_ids, 1) # for image_id in image_ids: # image = dataset_train.load_image(image_id) # mask, class_ids = dataset_train.load_mask(image_id) # visualize.display_top_masks(image, mask, class_ids, dataset_train.class_names) # Create model in training mode model = modellib.MaskRCNN(mode="training", config=config, model_dir=MODEL_DIR) # Which weights to start with? if init_with == "imagenet": model.load_weights(model.get_imagenet_weights(), by_name=True) elif init_with == "coco": # Load weights trained on MS COCO, but skip layers that # are different due to the different number of classes # See README for instructions to download the COCO weights model.load_weights(COCO_MODEL_PATH, by_name=True, exclude=["mrcnn_class_logits", "mrcnn_bbox_fc", "mrcnn_bbox", "mrcnn_mask"]) elif init_with == "last": # Load the last model you trained and continue training model.load_weights(model.find_last(), by_name=True) epoch_1 = 100 epoch_2 = epoch_1 + 100 # Train the head branches # Passing layers="heads" freezes all layers except the head # layers. You can also pass a regular expression to select # which layers to train by name pattern. print("Training network heads") model.train(dataset_train, dataset_val, learning_rate=config.LEARNING_RATE, epochs=epoch_1, layers='heads') # Fine tune all layers # Passing layers="all" trains all layers. You can also # pass a regular expression to select which layers to # train by name pattern. print("Fine tune all layers") model.train(dataset_train, dataset_val, learning_rate=config.LEARNING_RATE / 10, epochs=epoch_2, layers="all") print("Training completed.") def test(): ''' Perform testing. ''' # Recreate the model in inference mode model = modellib.MaskRCNN(mode="inference", config=test_config, model_dir=MODEL_DIR) # Get path to saved weights # Either set a specific path or find last trained weights # model_path = os.path.join(ROOT_DIR, ".h5 file name here") model_path = model.find_last() # Load trained weights print("Loading weights from ", model_path) model.load_weights(model_path, by_name=True) APs = [] # Test on a random image image_ids = np.random.choice(dataset_val.image_ids, 10) # image_ids = dataset_val.image_ids for image_id in image_ids: print('=== Test on image: ' + str(image_id)) print('=> Load Ground truth:') original_image, image_meta, gt_class_id, gt_bbox, gt_mask = \ modellib.load_image_gt(dataset_val, test_config, image_id, use_mini_mask=False) log("original_image", original_image) log("image_meta", image_meta) log("gt_class_id", gt_class_id) log("gt_bbox", gt_bbox) log("gt_mask", gt_mask) print('=> Result:') results = model.detect([original_image], verbose=1) r = results[0] # Compute AP AP, precisions, recalls, overlaps = \ utils.compute_ap(gt_bbox, gt_class_id, gt_mask, r["rois"], r["class_ids"], r["scores"], r['masks']) APs.append(AP) print('AP for image ' + str(image_id) + ': ', AP) print("mAP: ", np.mean(APs)) print("Test Complete.") # Directory to save logs and trained model MODEL_DIR = os.path.join(ROOT_DIR, "logs") # Local path to trained weights file COCO_MODEL_PATH = os.path.join(ROOT_DIR, "mask_rcnn_coco.h5") # Download COCO trained weights from Releases if needed if not os.path.exists(COCO_MODEL_PATH): utils.download_trained_weights(COCO_MODEL_PATH) # Load photo data from lmdb photo_data = PhotoData(r'..' + os.path.sep + '..' + os.path.sep + '..' + os.path.sep + 'photos.lmdb') print('Length of photo data from Paperdoll lmdb:', len(photo_data)) if __name__ == "__main__": json_file = r'..' + os.path.sep + '..' + os.path.sep + '..' + os.path.sep + 'modanet2018_instances_train.json' d = json.load(open(json_file)) coco = COCO(json_file) subset_ids = sorted(coco.getCatIds(catNms=subset)) config = TrainConfig() config.display() test_config = TestConfig() test_config.display() # Training dataset train_count = 20000 dataset_train = FashionDataset() dataset_train.load_fashion(train_count, start=0, class_ids=subset_ids) dataset_train.prepare() # Validation dataset val_count = 50 dataset_val = FashionDataset() dataset_val.load_fashion(val_count, start=0, class_ids=subset_ids) dataset_val.prepare() if args.test: print("Perform testing ...") test() else: print("Perform training ...") train() ```
{ "source": "jiechencn/jiechenme.github.io", "score": 2 }
#### File: jiechenme.github.io/lab/SQLView.py ```python import re import shutil import sys import codecs from java.util import Date from java.text import SimpleDateFormat from java.lang import System from java.io import File #import time as pytime ##################################################################################################### __script_name = 'SQLView' __script_version = '1.0' __script_authtor = '<EMAIL>' __script_copyright = '(C) Oracle' __k_conn = '_C' __k_state = '_S' __k_result = '_R' __r_newrow = '_NR_' __t_start = '_START' __t_end = '_END' #__dt_format = 'MMM d, yyyy, h:mm:ss,S a z' __dt_format = 'MMM d, yyyy h:mm:ss,SSS a z' __s_dummysql = '_DUMMY_' __fileSize = 50000 # KB __prompt_cur = ' >>' # prompt cursor space __sleep = 1 # frequent interval seconds to check flush __flush_max_wait = 300 # seconds __flush_time_buffer_gap = 20 # secons to fill in gap of weblogic flush log time ##################################################################################################### wlsVersion = '' pyVersion = '' wlsInstance = 'Offline' rptStart = '' rptEnd = '' dfLogTimestamp = SimpleDateFormat(__dt_format) # by default sqlConns = {} sqlStatementConnMap = {} #{'C1000S1001':'sql1', 'C1000S1002':'sql2', 'C2000S2001':'sql3'} sqlStatementTime = {} #{'S1001':['20170102','20170103', 'S1002':['20170105','20170106'} sqlStatementsParas = {} #{'S1001':['setInt(1, 6018285)', 'setString(2, 'hello')'], 'S1002':['setInt(1, 6018299)', 'setString(2, 'world')'], 'S2001':[]} sqlStatementResultMap = {} #{'S1001':'R001', 'S1002':'0', 'S2001':'1'} sqlStatementResultReverseMap = {} # {'R1001':'S001', 'R1002':'S0001', 'R2001':'S0001'} for bulk select results only sqlStatementResults = {} #{'R001':['_NR_', '704', '2000007951', '_NR_', '704', '2000007952']} sqlResultIndexes = {} #{'R001':[1,2,3,4,5]} ##################################################################################################### def elapsed(t0, t1): try: unix0 = time2unix(t0) unix1 = time2unix(t1) return str(unix1-unix0) except: return 'N/A' def time2unix(tt): return dfLogTimestamp.parse(tt).getTime() # get connection ID(s) def getConns(oneline): try: #1. prepared: yes #2. simplestatment: yes #3. callable: yes #4. metadata: yes #1. weblogic.jdbc.wrapper.JTSConnection_oracle_jdbc_driver_T4CConnection@bf0f] prepareStatement(selec * from ... #2. weblogic.jdbc.wrapper.JTSConnection_oracle_jdbc_driver_T4CConnection@bf0c] CreateStatement()> #3. weblogic.jdbc.wrapper.JTSConnection_oracle_jdbc_driver_T4CConnection@bf11] prepareCall(select * from ... #4. weblogic.jdbc.wrapper.PoolConnection_oracle_jdbc_driver_T4CConnection@bee9] prepareStatement(SELECT t1.JPS_ATTRS_ID, t1.ATTRNAME, patternPrepare = r'(.*)weblogic\.jdbc\.wrapper\.(.*)Connection_oracle_jdbc_driver_T4CConnection(.*)\] prepareStatement\((.*)\)>' patternCall = r'(.*)weblogic\.jdbc\.wrapper\.(.*)Connection_oracle_jdbc_driver_T4CConnection(.*)\] prepareCall\((.*)\)>' patternSimpleSt = r'(.*)weblogic\.jdbc\.wrapper\.(.*)Connection_oracle_jdbc_driver_T4CConnection(.*)\] CreateStatement\(\)>' patternMatch = r'(.*)weblogic\.jdbc\.wrapper\.(.*)Connection_oracle_jdbc_driver_T4CConnection(.*)\] (prepareStatement|CreateStatement|prepareCall)\((.*)\)>' matchObj = re.match(patternMatch, oneline, re.M|re.I) if matchObj: connID = matchObj.group(3) sqlBody = matchObj.group(5) if sqlBody=='': sqlBody = __s_dummysql sqlConns[__k_conn + connID] = sqlBody return True else: return False except Exception: raise # get mappings between connection ID(s) and statements def getStatementConnMap(oneline): try: #1. prepared: yes #2. simplestatment: yes #3. callable: yes #4. metadata: yes #x. simplestatment: yes: specially process sql #1. weblogic.jdbc.wrapper.JTSConnection_oracle_jdbc_driver_T4CConnection@bf0f] prepareStatement returns weblogic.jdbc.wrapper.PreparedStatement_oracle_jdbc_driver_OraclePreparedStatementWrapper@bf10> #2. weblogic.jdbc.wrapper.JTSConnection_oracle_jdbc_driver_T4CConnection@bf0c] CreateStatement returns weblogic.jdbc.wrapper.Statement_oracle_jdbc_driver_OracleStatementWrapper@bf0d> #3. weblogic.jdbc.wrapper.JTSConnection_oracle_jdbc_driver_T4CConnection@bf11] prepareCall returns weblogic.jdbc.wrapper.CallableStatement_oracle_jdbc_driver_OracleCallableStatementWrapper@bf12> #4. weblogic.jdbc.wrapper.PoolConnection_oracle_jdbc_driver_T4CConnection@bee9] prepareStatement returns weblogic.jdbc.wrapper.PreparedStatement_oracle_jdbc_driver_OraclePreparedStatementWrapper@beeb> #x. weblogic.jdbc.wrapper.Statement_oracle_jdbc_driver_OracleStatementWrapper@bf0d] executeQuery(selec * from agileuser)> patternStatement = r'(.*)<(.*)> <Debug> <JDBCSQL> (.*)weblogic\.jdbc\.wrapper\.(.*)Connection_oracle_jdbc_driver_T4CConnection(.*)\] (p|c)re(.*) returns weblogic\.jdbc\.wrapper\.(.*)Statement_oracle_jdbc_driver_Oracl(.*)StatementWrapper(.*)>' patternPrepare = r'(.*)<(.*)> <Debug> <JDBCSQL> (.*)weblogic\.jdbc\.wrapper\.(.*)Connection_oracle_jdbc_driver_T4CConnection(.*)\] prepare(.*)returns weblogic\.jdbc\.wrapper\.(.*)Statement_oracle_jdbc_driver_Oracle(.*)StatementWrapper(.*)>' patternSimpleSt = r'(.*)<(.*)> <Debug> <JDBCSQL> (.*)weblogic\.jdbc\.wrapper\.(.*)Connection_oracle_jdbc_driver_T4CConnection(.*)\] CreateStatement returns weblogic\.jdbc\.wrapper\.Statement_oracle_jdbc_driver_OracleStatementWrapper(.*)>' patternSimpleStSQL = r'(.*)<(.*)> <Debug> <JDBCSQL> (.*)weblogic\.jdbc\.wrapper\.Statement_oracle_jdbc_driver_OracleStatementWrapper(.*)\] executeQuery\((.*)\)>' matchObj = re.match(patternStatement , oneline, re.M|re.I) if matchObj: startTime = matchObj.group(2) connID = matchObj.group(5) stateID = matchObj.group(10) tmpSQL = sqlConns.get(__k_conn + connID, __s_dummysql) sqlConns[__k_conn + connID + __k_state + stateID ] = tmpSQL sqlConns[__k_conn + connID] = __s_dummysql; del sqlConns[__k_conn + connID] sqlStatementConnMap[__k_state + stateID] = __k_conn + connID sTime = [] sTime.append(startTime) sqlStatementTime[__k_state + stateID] = sTime return True else:# get simplestatement's SQL matchObj = re.match(patternSimpleStSQL , oneline, re.M|re.I) if matchObj: stateID = matchObj.group(4) curSQL = matchObj.group(5) cconnID = sqlStatementConnMap.get(__k_state + stateID, '') sqlConns[cconnID + __k_state + stateID ] = curSQL sqlConns[cconnID] = __s_dummysql; del sqlConns[cconnID] return True else: return False except Exception: #print(sqlConns) raise # get statements' end time def getStatementEndTime(oneline): try: #1. prepared: yes #2. simplestatment: yes #3. callable: yes #4. metadata: yes #1. weblogic.jdbc.wrapper.PreparedStatement_oracle_jdbc_driver_OraclePreparedStatementWrapper@bf10] close returns> #2. weblogic.jdbc.wrapper.Statement_oracle_jdbc_driver_OracleStatementWrapper@bf0d] close returns> #3. weblogic.jdbc.wrapper.CallableStatement_oracle_jdbc_driver_OracleCallableStatementWrapper@bf12] close returns> #4. weblogic.jdbc.wrapper.PreparedStatement_oracle_jdbc_driver_OraclePreparedStatementWrapper@beeb] close returns> patternState = r'(.*)<(.*)> <Debug> <JDBCSQL> (.*)weblogic\.jdbc\.wrapper(.*)Statement_oracle_jdbc_driver(.*)StatementWrapper(.*)\] close returns>' matchObj = re.match(patternState , oneline, re.M|re.I) if matchObj: endTime = matchObj.group(2) stateID = matchObj.group(6) sTime = sqlStatementTime.get(__k_state + stateID, []) sTime.append(endTime) sqlStatementTime[__k_state + stateID] = sTime return True else: return False except Exception: raise # get statement's parameter def getStatementsParas(oneline): try: #1. prepared: yes #2. simplestatment: no #3. callable: yes #4. metadata: yes #1. weblogic.jdbc.wrapper.PreparedStatement_oracle_jdbc_driver_OraclePreparedStatementWrapper@bf08] setInt(2, 2)> #3. weblogic.jdbc.wrapper.CallableStatement_oracle_jdbc_driver_OracleCallableStatementWrapper@bf12] setTimeStamp(1, 2018-01-31 08:31:30.596)> #3b.weblogic.jdbc.wrapper.CallableStatement_oracle_jdbc_driver_OracleCallableStatementWrapper@31bbf] registerOutParameter(1, 4)> #4. weblogic.jdbc.wrapper.PreparedStatement_oracle_jdbc_driver_OraclePreparedStatementWrapper@beeb] setString(1, cn=globalpolicy)> patternMatch = r'(.*)weblogic\.jdbc\.wrapper\.(.*)Statement_oracle_jdbc_driver_Oracle(.*)StatementWrapper(.*)] (.*)\(([0-9]+),(.*)\)>' patternStatePara = r'(.*)weblogic\.jdbc\.wrapper\.(.*)Statement_oracle_jdbc_driver_Oracle(.*)StatementWrapper(.*)] set(.*)\((.*),(.*)\)>' patternCallRegisterPara = r'(.*)weblogic\.jdbc\.wrapper\.(.*)Statement_oracle_jdbc_driver_Oracle(.*)StatementWrapper(.*)] registerOutParameter\((.*)>' matchObj = re.match(patternMatch, oneline, re.M|re.I) if matchObj: stateID = matchObj.group(4) ptype = matchObj.group(5) pOrder = matchObj.group(6) pvalue = matchObj.group(7) #sqlStatements[__k_state + stateID] = __k_conn + connID #print connID paras = [] paras = sqlStatementsParas.get(__k_state + stateID, []) paras.append(ptype + '(' + pOrder + ', ' + pvalue + ')') sqlStatementsParas[__k_state + stateID] = paras return True else: return False except Exception: raise #get resultsets def getStatementResultMap(oneline): try: #1. prepared: yes #2. simplestatment: yes #3. callable: yes #4. metadata: yes #1a. weblogic.jdbc.wrapper.PreparedStatement_oracle_jdbc_driver_OraclePreparedStatementWrapper@1ad7a] executeQuery returns weblogic.jdbc.wrapper.ResultSet_oracle_jdbc_driver_OracleResultSetImpl@1ad7b> #1b. weblogic.jdbc.wrapper.PreparedStatement_oracle_jdbc_driver_OraclePreparedStatementWrapper@beeb] executeQuery returns weblogic.jdbc.wrapper.ResultSet_oracle_jdbc_driver_ForwardOnlyResultSet@beec> #2. weblogic.jdbc.wrapper.Statement_oracle_jdbc_driver_OracleStatementWrapper@bf0d] executeQuery returns weblogic.jdbc.wrapper.ResultSet_oracle_jdbc_driver_ForwardOnlyResultSet@bf0e> #3. weblogic.jdbc.wrapper.CallableStatement_oracle_jdbc_driver_OracleCallableStatementWrapper@bf12] executeQuery returns weblogic.jdbc.wrapper.ResultSet_oracle_jdbc_driver_ForwardOnlyResultSet@bf13> #1/4. weblogic.jdbc.wrapper.PreparedStatement_oracle_jdbc_driver_OraclePreparedStatementWrapper@beee] executeQuery returns weblogic.jdbc.wrapper.ResultSet_oracle_jdbc_driver_ForwardOnlyResultSet@beef> # for bulk select, one statement has multiple resultID, so only record the first recordID patternSelectResultMap = r'(.*)weblogic\.jdbc\.wrapper(.*)Statement_oracle_jdbc_driver_Ora(.*)StatementWrapper(.*)\] executeQuery returns weblogic\.jdbc\.wrapper\.ResultSet_oracle_jdbc_driver(.*)ResultS(.*)@(.*)>' matchObj = re.match(patternSelectResultMap , oneline, re.M|re.I) if matchObj: stateID = matchObj.group(4) resultID = '@' + matchObj.group(7) if sqlStatementResultMap.get(__k_state + stateID, '') == '': sqlStatementResultMap[__k_state + stateID] = __k_result + resultID #print sqlStatementResultMap sqlStatementResultReverseMap[__k_result + resultID] = __k_state + stateID return True else: return False except Exception: raise #get executeUpdate result (insert/update/delete) def getAddDelUpdResultMap(oneline): try: #1. prepared: yes #2. simplestatment: yes #3. callable: yes #4. metadata: yes #1/4. weblogic.jdbc.wrapper.PreparedStatement_oracle_jdbc_driver_OraclePreparedStatementWrapper@bf03] executeUpdate returns 1> #2/4. weblogic.jdbc.wrapper.Statement_oracle_jdbc_driver_OracleStatementWrapper@bf03] executeUpdate returns 1> #2b. weblogic.jdbc.wrapper.PreparedStatement_oracle_jdbc_driver_OraclePreparedStatementWrapper@31bbb] executeBatch returns [I@623312d5> #3. weblogic.jdbc.wrapper.CallableStatement_oracle_jdbc_driver_OracleCallableStatementWrapper@bf03] executeUpdate returns 1> patternAddDelUpdResult = r'(.*)weblogic\.jdbc\.wrapper(.*)Statement_oracle_jdbc_driver_Ora(.*)StatementWrapper(.*)\] execute(.*) returns (.*)>' matchObj = re.match(patternAddDelUpdResult, oneline, re.M|re.I) if matchObj: stateID = matchObj.group(4) result = matchObj.group(6) sqlStatementResultMap[__k_state + stateID] = result return True else: return False except Exception: raise def getFirstResultIDforBulkSelect(curResultID): try: sid = sqlStatementResultReverseMap.get(curResultID, '') rid = sqlStatementResultMap.get(sid) return rid except Exception: raise # get next token of new row from resultset def getResultNextToken(oneline): try: #1. prepared: yes #2. simplestatment: yes #3. callable: yes #4. metadata: yes #1/2/3/4. weblogic.jdbc.wrapper.ResultSet_oracle_jdbc_driver_OracleResultSetImpl@1ad7b] next returns true> #3 . weblogic.jdbc.wrapper.CallableStatement_oracle_jdbc_driver_OracleCallableStatementWrapper@31bbd] next returns true> #1/2/3/4. weblogic.jdbc.wrapper.ResultSet_oracle_jdbc_driver_ForwardOnlyResultSet@beec] next returns true> patternResultNextToken = r'(.*)weblogic\.jdbc\.wrapper\.(.*)@(.*)\] next returns true>' matchObj = re.match(patternResultNextToken, oneline, re.M|re.I) if matchObj: resultID = '@' + matchObj.group(3) results = [] firstRID = getFirstResultIDforBulkSelect(__k_result + resultID) results = sqlStatementResults.get(firstRID, []) results.append(__r_newrow) sqlResultIndexes[firstRID] = [] #new row begins, clean old column indexes sqlStatementResults[firstRID] = results return True else: return False except Exception: raise #get resultset column is column is duplicated def isResultColumnDuplicated(oneline): try: # weblogic.jdbc.wrapper.ResultSet_oracle_jdbc_driver_OracleResultSetImpl@beec] getString(1)> # weblogic.jdbc.wrapper.ResultSet_oracle_jdbc_driver_OracleResultSetImpl@beec] getString(1)> # weblogic.jdbc.wrapper.ResultSet_oracle_jdbc_driver_OracleResultSetImpl@beec] getInt(2)> # return: # 1: duplicated # 2: not duplicated # 3: not matched patternResultIndex = r'(.*)weblogic\.jdbc\.wrapper\.(.*)@(.*)\] get(.*)\((\d+)\)>' matchObj = re.match(patternResultIndex, oneline, re.M|re.I) if matchObj: resultID = '@' + matchObj.group(3) index = matchObj.group(5) firstRID = getFirstResultIDforBulkSelect(__k_result + resultID) indexes = sqlResultIndexes.get(firstRID, []) if index in indexes: # need to remove duplicated column, 1 if duplicated return 1 indexes.append(index) sqlResultIndexes[firstRID] = indexes return 2 else: return 3 except Exception: raise #get each data from resultset def getResultNextValue(oneline): try: #1. prepared: yes #2. simplestatment: yes #3. callable: yes #4. metadata: yes #1/2/3/4. weblogic.jdbc.wrapper.ResultSet_oracle_jdbc_driver_ForwardOnlyResultSet@beec] getString returns hello> #1/2/3/4. weblogic.jdbc.wrapper.ResultSet_oracle_jdbc_driver_OracleResultSetImpl@beec] getString returns hello> #3. weblogic.jdbc.wrapper.CallableStatement_oracle_jdbc_driver_OracleCallableStatementWrapper@31bbd] getInt returns 6014965> #4. weblogic.jdbc.wrapper.ResultSetMetaData_oracle_jdbc_driver_OracleResultSetMetaData@beed] getColumnCount returns 4 #patternResultValue = r'(.*)weblogic\.jdbc\.wrapper\.ResultSet(.*)oracle_jdbc_driver_(.*)ResultS(.*)@(.*)\] get(.*) returns (.*)>' #Note: filter out: weblogic.jdbc.wrapper.ResultSet_oracle_jdbc_driver_ForwardOnlyResultSet@beec] getMetaData returns weblogic.jdbc.wrapper.ResultSetMetaData_oracle_jdbc_driver_OracleResultSetMetaData@beed> patternResultValue = r'(.*)weblogic\.jdbc\.wrapper\.(.*)@(.*)\] get(.*) returns(.*)>' matchObj = re.match(patternResultValue, oneline, re.M|re.I) if matchObj: if (matchObj.group(4)=='MetaData'): return False resultID = '@' + matchObj.group(3) value = matchObj.group(5) if value[:1]==' ': value = value[0-len(value)+1:] results = [] firstRID = getFirstResultIDforBulkSelect(__k_result + resultID) results = sqlStatementResults.get(firstRID, []) results.append(value) sqlStatementResults[firstRID] = results #print value return True else: return False except Exception: raise def info(action, result): System.out.println(__prompt_cur + ' ' + str(action) + " " + str(result)) def showProgress(s): System.out.print(s) def flushed(userStopUnixTime, logModifiedUnixTime): diff = logModifiedUnixTime - userStopUnixTime if (diff >= __flush_time_buffer_gap * 1000): return True else: curUnix = Date().getTime() if (curUnix >= userStopUnixTime + __flush_max_wait*1000): return True return False def collectJDBCSQL(): global wlsInstance global wlsVersion global rptStart global rptEnd try: wlsVersion = version info('Weblogic Version:', wlsVersion) info('Note:', 'You must run this script on the Weblogic Managed Server which you connect.') info('', '') connect() edit() startEdit() serverNames=cmo.getServers() allServers = [] for name in serverNames: curServerName = name.getName() allServers.append(curServerName) #allServers.append('agile-server2') #allServers.append('agile-server3') #allServers.append('agile-server4') info('Find following Weblogic instance(s):', len(allServers)) info('', '') for i in range(len(allServers)): srv = allServers[i] info(' ' + str(i) + ':', srv) info('', '') info('Type the number to select the correct Weblogic instance to connect, or type x to exit.', '') user_sel = '' while user_sel == '': user_sel = raw_input(__prompt_cur + ' Your choice: ') if user_sel.lower()=='x': save() activate() disconnect() info('User quits.', 'Bye') exit() wlsInstance = allServers[int(user_sel)] cd('/Servers/'+ wlsInstance + '/Log/' + wlsInstance) #ls() sqlLogFile = get('FileName') info('Get log file:', sqlLogFile) sqlLogOrigSize = get('FileMinSize') info('Get log size:', str(sqlLogOrigSize)) logDTFormatStr = get('DateFormatPattern') info('Get log date format:', str(logDTFormatStr)) set('FileMinSize', __fileSize) info('Set log size:', str(__fileSize)) set('DateFormatPattern', __dt_format) info('Set log date format:', __dt_format) cd('/Servers/' + wlsInstance + '/ServerDebug/' + wlsInstance) set('DebugJDBCSQL','true') info('Set DebugJDBCSQL:', 'true') save() activate() sqlLogFilePath = os.getcwd() + '/../servers/' + wlsInstance + '/' + sqlLogFile rptStart = dfLogTimestamp.format(Date()) info('It is collecting SQL data. Press Enter after collected.', '') raw_input(__prompt_cur + ' ') dtRpt = Date() rptEnd = dfLogTimestamp.format(dtRpt) ## info(__script_name + ' is waiting for Weblogic to flush log, please hold on...', '') #pytime.sleep(__sleep) jfile = File(sqlLogFilePath) showProgress(__prompt_cur + ' ') while True: jfmodifiedUnix = jfile.lastModified() rpt_endtime_unix = dtRpt.getTime() dtCurrent = Date() if (flushed(rpt_endtime_unix, jfmodifiedUnix)): break showProgress('.') Thread.sleep(__sleep * 1000) showProgress('\n') sqlLogFilePathCopy = sqlLogFilePath + '.' + __script_name shutil.copyfile(sqlLogFilePath, sqlLogFilePathCopy) # copy jdbc log file info('Copy ' + sqlLogFile + ' to', sqlLogFilePathCopy) ## ## revert back to original setting edit() startEdit() info('Get server:', wlsInstance) cd('/Servers/'+ wlsInstance + '/Log/' + wlsInstance) set('FileMinSize', sqlLogOrigSize) info('Reset log size:', str(sqlLogOrigSize)) set('DateFormatPattern', logDTFormatStr) info('Reset log date format:', str(logDTFormatStr)) cd('/Servers/' + wlsInstance + '/ServerDebug/' + wlsInstance) set('DebugJDBCSQL','false') info('Reset DebugJDBCSQL:', 'false') save() activate() disconnect() return sqlLogFilePathCopy #rpt_endtime = pytime.strftime(__dt_format, pytime.localtime()) except Exception: save() activate() disconnect() raise def createHTMLReport(jdbcLogFile): global wlsInstance global wlsVersion global rptStart global rptEnd info('Generating HTML Report...', '') f = codecs.open(jdbcLogFile, 'r', encoding='utf-8') try: lines = f.read().splitlines() except: f = open(jdbcLogFile, 'r') lines = f.read().splitlines() f.close() curDupCol = 3 preDupCol = 3 for line in lines: #print(line) line.strip() if line=='': continue if (getConns(line)==False): if (getStatementEndTime(line)==False): if (getStatementConnMap(line)==False): if (getStatementsParas(line)==False): if (getStatementResultMap(line)==False): if (getAddDelUpdResultMap(line)==False): if (getResultNextToken(line)==False): curDupCol = isResultColumnDuplicated(line); if (curDupCol==1 or curDupCol==2): preDupCol = curDupCol continue; if (curDupCol==3 and preDupCol==2): getResultNextValue(line) htmlText = [] htmlText.append('<html lang="en-US">') htmlText.append('<head>') htmlText.append('<meta charset="utf-8">') htmlText.append('<title>' + __script_name + ' Report for '+ wlsInstance +'</title>') htmlText.append('<style type="text/css">') htmlText.append('body.awr {font:bold 10pt Arial,Helvetica,Geneva,sans-serif;color:black; background:White;}') htmlText.append('h1.awr {font:bold 20pt Arial,Helvetica,Geneva,sans-serif;color:#336699;background-color:White;border-bottom:1px solid #cccc99;margin-top:0pt; margin-bottom:0pt;padding:0px 0px 0px 0px;}') htmlText.append('th.awrnobg {font:bold 8pt Arial,Helvetica,Geneva,sans-serif; color:green; background:white;padding-left:4px; padding-right:4px;padding-bottom:2px}') htmlText.append('th.awrbg {font:bold 8pt Arial,Helvetica,Geneva,sans-serif; color:White; background:#0066CC;padding-left:4px; padding-right:4px;padding-bottom:2px}') htmlText.append('td.awrnc {font:8pt Arial,Helvetica,Geneva,sans-serif;color:black;background:White;vertical-align:top;}') htmlText.append('td.awrnc2 {font:8pt Arial,Helvetica,Geneva,sans-serif;color:black;background:#dedede;vertical-align:top;}') htmlText.append('table.tdiff {border_collapse: collapse; }') htmlText.append('table.xTBResult {border-collapse: collapse;}') htmlText.append('td.xTDResult {border:#ccc solid 1px;font:8pt Arial,Helvetica,Geneva,sans-serif;color:black;background:White;vertical-align:top;}') htmlText.append('</style>') htmlText.append('</head>') htmlText.append('<body class="awr">') htmlText.append('<h1 class="awr">') htmlText.append(__script_name + ' Report for '+ wlsInstance ) htmlText.append('</h1>') htmlText.append('<p/>') htmlText.append('<table border="0" width="100%" class="tdiff">') htmlText.append('<tr><th class="awrbg">' + __script_name + ' Version</th><th class="awrbg">Jython Version</th><th class="awrbg">Weblogic Instance</th><th class="awrbg">Weblogic Version</th><th class="awrbg">Report Start</th><th class="awrbg">Report End</th></tr>') htmlText.append('<tr><td class="awrnc">' + __script_version + '</td><td class="awrnc">'+ pyVersion +'</td><td class="awrnc">'+ wlsInstance +'</td><td class="awrnc">'+ wlsVersion +'</td><td class="awrnc">'+ rptStart +'</td><td class="awrnc">'+ rptEnd +'</td></tr>') htmlText.append('</table>') htmlText.append('<p/>') htmlText.append('<p/>') htmlText.append('<h3 class="awr"><a class="awr"></a>Report Summary</h3>') htmlText.append('<table border="0" width="100%" class="tdiff">') htmlText.append('<tr><th class="awrbg">Connection</th><th class="awrbg">Statement</th><th class="awrbg">Resultset</th><th class="awrbg">Start Time</th><th class="awrbg">End Time</th><th class="awrbg">Elapsed(ms)</th></tr>') sqlDetails = [] #print(sqlConns) #print(sqlStatementConnMap) #print(sqlStatementsParas) #print(sqlStatementResults) #print(sqlStatementTime) cssTd2 = 'awrnc' keyOfStatement = sqlStatementConnMap.keys() keyOfStatement.sort() for sid in keyOfStatement: #for sid in sqlStatementConnMap.keys(): cid = sqlStatementConnMap.get(sid, '') sql = sqlConns.get(cid + sid, '') rid = sqlStatementResultMap.get(sid, sid) seTime = sqlStatementTime.get(sid, ['0','0']) paras = sqlStatementsParas.get(sid, '') resultAmout = 0 rows = '0' if rid.startswith(__k_state): # for callablestatement, once case that resultset is not used. So sql result is saved in statement directly rid = rid.replace(__k_state, __k_result) if rid.startswith(__k_result): rows = sqlStatementResults.get(rid, '0') #if select returns no result, there is no matched record. So use default 0 if isinstance(rows, str): # is '0' or '1' or '2' .... resultAmout = rows else: #resultAmout = len(rows) pass else: resultAmout = rid if len(seTime)==0: seTime = ['',''] else: if len(seTime)==1: seTime.append('') sqlResult = [] trFlag = 0 callableResRow = 0 if isinstance(rows, list): # if it is R@xxxx for r in range(len(rows)): rvalue = rows[r] #print(rvalue) if rvalue==__r_newrow: # new row if ((trFlag!=0) and (trFlag%2==1)): sqlResult.append('</tr>') sqlResult.append('<tr>') trFlag += 1 else: sqlResult.append('<td class="xTDResult">' + rvalue + '</td>') callableResRow = 1 resultAmout = trFlag else: sqlResult.append('<tr>') sqlResult.append('<td class="xTDResult">Rows: ' + resultAmout + '</td>') if (callableResRow == 1 and resultAmout==0): resultAmout = callableResRow #cssTd2 = 'awrnc' htmlText.append('<tr><td class="'+ cssTd2 +'">' + cid + '</td><td class="'+ cssTd2 +'"><a href="#' + sid + '">' + sid + '</a></td><td class="'+ cssTd2 +'">' + str(resultAmout) + '</td><td class="'+ cssTd2 +'">'+ seTime[0] + '</td><td class="'+ cssTd2 +'">'+ seTime[1] + '</td><td class="'+ cssTd2 +'">' + elapsed(seTime[0], seTime[1]) + '</td></tr>') if cssTd2=='awrnc': cssTd2 = 'awrnc2' else: cssTd2 = 'awrnc' sqlDetails.append('<p></p>') sqlDetails.append('<table width="100%" class="tdiff">') sqlDetails.append('<tr><th class="awrnobg" align="left"><a name="' + sid + '">' + sid + '</a></th><th class="awrbg">Value</th></tr>') sqlDetails.append('<tr><td scope="row" width="80" class="awrnc">SQL Body</td><td class="awrnc">' + sql + '</td></tr>') sqlDetails.append('<tr><td scope="row" width="80" class="awrnc">Parameter</td><td class="awrnc">' + '<br> '.join(paras) + '</td></tr>') sqlDetails.append('<tr><td scope="row" width="80" class="awrnc">Start Time</td><td class="awrnc">' + seTime[0] + '</td></tr>') sqlDetails.append('<tr><td scope="row" width="80" class="awrnc">End Time</td><td class="awrnc">' + seTime[1] + '</td></tr>') sqlDetails.append('<tr><td scope="row" width="80" class="awrnc">Elapsed(ms)</td><td class="awrnc">' + elapsed(seTime[0], seTime[1]) + '</td></tr>') sqlDetails.append('<tr><td scope="row" width="80" class="awrnc">Result('+ str(resultAmout) +')</td><td class="awrnc">') sqlDetails.append('<table width="100%" class="xTBResult xTDResult">') sqlDetails += sqlResult sqlDetails.append('</tr>') sqlDetails.append('</table>') sqlDetails.append('</td>') sqlDetails.append('</tr>') sqlDetails.append('</table>') htmlText.append('</table>') htmlText.append('<p/>') htmlText.append('<p/>') htmlText.append('<h3 class="awr"><a class="awr"></a>SQL Detail</h3>') htmlText += sqlDetails htmlText.append('<p/>') htmlText.append('<p/>') htmlText.append('<a class="awr" href="#top">Back to Top</a>') htmlText.append('<p/>End of Report') htmlText.append('</body>') htmlText.append('</html>') reportFilePath = os.getcwd() + "/" + __script_name + "-" + wlsInstance + ".html" f = codecs.open(reportFilePath, 'w', encoding='utf-8') try: f.writelines(htmlText) except: f = open(reportFilePath, 'w') f.writelines(htmlText) f.close() return reportFilePath ##################################################################################################### info('Welcome to ' + __script_name, __script_version) pyVersion = sys.version info('Jython Version:', pyVersion) info('Type the number to select mode', "") info('','') info(' 0:', " Online mode") info(' 1:', " Offline mode (for Oracle Support only)") info('','') user_sel = raw_input(__prompt_cur + ' Your choice: ') if user_sel.lower()=='0': sqlLogFilePath = collectJDBCSQL() else: info('Please copy the JDBC log file to directory: ', os.getcwd()) user_input_filename = '' while user_input_filename=='': info('','') user_input_filename = raw_input(__prompt_cur + ' JDBC log file name: ') sqlLogFilePath = os.getcwd() + "/" + user_input_filename reportFilePath = createHTMLReport(sqlLogFilePath) info('','') info('HTML Report file:', reportFilePath) info('JDBC Log file :', sqlLogFilePath) info('','') info('Script quits.', 'Bye') ##################################################################################################### exit() ```
{ "source": "jiechencyz/pyroSAR", "score": 2 }
#### File: pyroSAR/pyroSAR/_dev_config.py ```python import os import warnings from distutils.spawn import find_executable __LOCAL__ = ['sensor', 'projection', 'orbit', 'polarizations', 'acquisition_mode', 'start', 'stop', 'product', 'spacing', 'samples', 'lines'] class Storage(dict): """ Dict class with point access to store the lookups, pattern and URLs Attributes ---------- STORAGE.LOOKUP : Storage All lookup table merged in a Storage class instance: * snap : SNAP process. * attributes : Attributes for different sensors. STORAGE.URL : dict (with point access) All URLs for DEMs, orbit files etc.: * dem : URL to download specific DEMs: * strm3 * ace2 * strm3_FTP * strm1HGT * ace * orbit : URL to download the orbit files: * ers1 * ers2 * s1_poe * s1_pres * doris * auxcal : URL to download the auxcal data: * s1 * envisat * ers Note ---- There may be additional attributes not listed above depending of the specific solver. Since this class is essentially a subclass of dict with attribute accessors, one can see which attributes are available using the `keys()` method. """ def __getattr__(self, name): try: return self[name] except KeyError: raise AttributeError(name) __setattr__ = dict.__setitem__ __delattr__ = dict.__delitem__ def __repr__(self): if self.keys(): m = max(map(len, list(self.keys()))) + 1 return '\n'.join([k.rjust(m) + ': ' + repr(v) for k, v in sorted(self.items())]) else: return self.__class__.__name__ + "()" def __dir__(self): return list(self.keys()) # ============================================================================== # LOOKUP # ============================================================================== snap_suffix = {'Apply-Orbit-File': 'Orb', 'Calibration': 'Cal', 'Cross-Correlation': '', 'LinearToFromdB': 'dB', 'Remove-GRD-Border-Noise': 'bnr', 'SAR-Simulation': 'Sim', 'SARSim-Terrain-Correction': 'TC', 'Subset': '', 'Terrain-Correction': 'TC', 'Terrain-Flattening': 'TF', 'Read': '', 'Write': ''} snap = Storage(suffix=snap_suffix) LOOKUP = Storage(snap=snap, attributes={'sensor': 'TEXT', 'orbit': 'TEXT', 'acquisition_mode': 'TEXT', 'start': 'TEXT', 'stop': 'TEXT', 'product': 'TEXT', 'samples': 'INTEGER', 'lines': 'INTEGER', 'outname_base': 'TEXT PRIMARY KEY', 'scene': 'TEXT', 'hh': 'INTEGER', 'vv': 'INTEGER', 'hv': 'INTEGER', 'vh': 'INTEGER'}) # ============================================================================== # URL # ============================================================================== dem = Storage(ace2='http://step.esa.int/auxdata/dem/ACE2/5M/', ace='http://step.esa.int/auxdata/dem/ACE30/', srtm3_FTP='xftp.jrc.it', srtm3='http://srtm.csi.cgiar.org/SRT-ZIP/SRTM_V41/SRTM_Data_GeoTiff/', srtm1Hgt='http://step.esa.int/auxdata/dem/SRTMGL1/', ) orbit = Storage(doris='http://step.esa.int/auxdata/orbits/Doris/vor', ers1='http://step.esa.int/auxdata/orbits/ers_precise_orb/ERS1', ers2='http://step.esa.int/auxdata/orbits/ers_precise_orb/ERS2', s1_poe='http://step.esa.int/auxdata/orbits/Sentinel-1/POEORB/', s1_res='http://step.esa.int/auxdata/orbits/Sentinel-1/RESORB/') auxcal = Storage(s1='http://step.esa.int/auxdata/auxcal/S1/', envisat='http://step.esa.int/auxdata/auxcal/ENVISAT/', ers='http://step.esa.int/auxdata/auxcal/ERS/') URL = Storage(dem=dem, orbit=orbit, auxcal=auxcal) # ============================================================================== # Merge # ============================================================================== STORAGE = Storage(URL=URL, LOOKUP=LOOKUP) # ============================================================================== # Class Definitions # ============================================================================== class ExamineExe(object): def __init__(self): # todo: Update Docstrings pass @staticmethod def examine(name): executable_list = [] if isinstance(name, (tuple, list)): for item in name: executable_temp = find_executable(item) is not None executable_list.append(executable_temp) # Check True values True_values = [item for item in executable_list if item] # True_values = executable[np.where(executable is True)] if len(True_values) > 1: raise ValueError( "There are more than one instances installed. Define which one you want to use with self.set_path(...)") else: status = any(item == True for item in executable_list) try: temp_loc = [item for item, executable_list in enumerate(executable_list) if executable_list][0] return status, os.path.abspath(find_executable(name[temp_loc])) except IndexError: # raise ValueError("One of the executables {0} must be installed.".format(name)) warnings.warn( "One of the executables {0} should be installed. You can download it from http://step.esa.int/main/toolboxes/snap/ or you can specify a path with snap_config.set_path(path_to_snap)".format( name), UserWarning) # return status, os.path.abspath(find_executable(name[temp_loc])) else: status = find_executable(name) is not None if status is False: warnings.warn("The executables {0} must be installed. You can download it from http://step.esa.int/main/toolboxes/snap/ or you can specify a path with snap_config.set_path(path_to_snap)".format(name), UserWarning) # raise ValueError("The executables {0} must be installed.".format(name)) else: return status, os.path.abspath(find_executable(name)) ``` #### File: pyroSAR/snap/auxil.py ```python import sys if sys.version_info >= (3, 0): from io import StringIO from urllib.request import urlopen from urllib.error import HTTPError else: from cStringIO import StringIO from urllib2 import urlopen, HTTPError import os import re import shutil import subprocess as sp import xml.etree.ElementTree as ET import zipfile as zf from ftplib import FTP from time import strftime, gmtime from xml.dom import minidom from os.path import expanduser import pyroSAR from pyroSAR import identify from pyroSAR.ancillary import dissolve, finder from pyroSAR.spatial import gdal_translate from .._dev_config import LOOKUP, ExamineExe def parse_recipe(name): name = name if name.endswith('.xml') else name + '.xml' absname = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'recipes', name) with open(absname, 'r') as workflow: tree = ET.fromstring(workflow.read()) return tree def parse_node(name): name = name if name.endswith('.xml') else name + '.xml' absname = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'recipes', 'nodes', name) with open(absname, 'r') as workflow: tree = ET.fromstring(workflow.read()) return tree def parse_suffix(workflow): nodes = workflow.findall('node') suffix = '_'.join(filter(None, [LOOKUP.snap.suffix[x] for x in [y.attrib['id'] for y in nodes]])) return suffix def insert_node(workflow, predecessor_id, node): predecessor = workflow.find('.//node[@id="{}"]'.format(predecessor_id)) position = list(workflow).index(predecessor) + 1 workflow.insert(position, node) newnode = workflow[position] newnode.find('.//sources/sourceProduct').attrib['refid'] = predecessor.attrib['id'] workflow[position + 1].find('.//sources/sourceProduct').attrib['refid'] = newnode.attrib['id'] def write_recipe(recipe, outfile): outfile = outfile if outfile.endswith('.xml') else outfile + '.xml' rough_string = ET.tostring(recipe, 'utf-8') reparsed = minidom.parseString(rough_string) with open(outfile, 'w') as out: out.write(reparsed.toprettyxml(indent='\t', newl='')) def getOrbitContentVersions(contentVersion): return dict( [re.split('\s*=\s*', x.strip('\r')) for x in contentVersion.read().split('\n') if re.search('^[0-9]{4}', x)]) class GetAuxdata: def __init__(self, datasets, scenes): self.datasets = datasets self.scenes = [identify(scene) if isinstance(scene, str) else scene for scene in scenes] self.sensors = list(set([scene.sensor for scene in scenes])) try: self.auxDataPath = os.path.join(os.environ['HOME'], '.snap/auxdata') except KeyError: self.auxDataPath = os.path.join(os.environ['USERPROFILE'], '.snap/auxdata') def srtm_1sec_hgt(self): pass # Wird nicht benutzt? # for dataset in self.datasets: # files = [x.replace('hgt', 'SRTMGL1.hgt.zip') for x in # list(set(dissolve([scene.getHGT() for scene in self.scenes])))] def getAuxdata(datasets, scenes): auxDataPath = os.path.join(expanduser("~"), '.snap/auxdata') scenes = [identify(scene) if isinstance(scene, str) else scene for scene in scenes] sensors = list(set([scene.sensor for scene in scenes])) for dataset in datasets: if dataset == 'SRTM 1Sec HGT': files = [x.replace('hgt', 'SRTMGL1.hgt.zip') for x in list(set(dissolve([scene.getHGT() for scene in scenes])))] for file in files: infile = os.path.join('http://step.esa.int/auxdata/dem/SRTMGL1', file) outfile = os.path.join(auxDataPath, 'dem/SRTM 1Sec HGT', file) if not os.path.isfile(outfile): print(infile) try: input = urlopen(infile) except HTTPError: print('-> not available') continue with open(outfile, 'wb') as output: output.write(input.read()) input.close() elif dataset == 'POEORB': for sensor in sensors: if re.search('S1[AB]', sensor): dates = [(scene.start[:4], scene.start[4:6]) for scene in scenes] years = list(set([x[0] for x in dates])) remote_contentVersion = urlopen( 'http://step.esa.int/auxdata/orbits/Sentinel-1/POEORB/remote_contentVersion.txt') versions_remote = getOrbitContentVersions(remote_contentVersion) for year in years: dir_orb = os.path.join(auxDataPath, 'Orbits/Sentinel-1/POEORB', year) if not os.path.isdir(dir_orb): os.makedirs(dir_orb) contentVersionFile = os.path.join(dir_orb, 'contentVersion.txt') if os.path.isfile(contentVersionFile): contentVersion = open(contentVersionFile, 'r+') versions_local = getOrbitContentVersions(contentVersion) else: contentVersion = open(contentVersionFile, 'w') versions_local = {} combine = dict(set(versions_local.items()) & set(versions_remote.items())) dates_select = [x for x in dates if x[0] == year] months = list(set([x[1] for x in dates_select])) orb_ids = sorted( [x for x in ['{}-{}.zip'.format(year, month) for month in months] if not x in combine]) if len(orb_ids) > 0: contentVersion.write('#\n#{}\n'.format(strftime('%a %b %d %H:%M:%S %Z %Y', gmtime()))) for orb_id in orb_ids: orb_remote = urlopen( 'http://step.esa.int/auxdata/orbits/Sentinel-1/POEORB/{}'.format(orb_id)) orb_remote_stream = zf.ZipFile(StringIO(orb_remote.read()), 'r') orb_remote.close() targets = [x for x in orb_remote_stream.namelist() if not os.path.isfile(os.path.join(dir_orb, x))] orb_remote_stream.extractall(dir_orb, targets) orb_remote_stream.close() versions_local[orb_id] = versions_remote[orb_id] for key, val in versions_local.iteritems(): contentVersion.write('{}={}\n'.format(key, val)) contentVersion.close() remote_contentVersion.close() else: print('not implemented yet') elif dataset == 'Delft Precise Orbits': path_server = 'dutlru2.lr.tudelft.nl' subdirs = {'ASAR:': 'ODR.ENVISAT1/eigen-cg03c', 'ERS1': 'ODR.ERS-1/dgm-e04', 'ERS2': 'ODR.ERS-2/dgm-e04'} ftp = FTP(path_server) ftp.login() for sensor in sensors: if sensor in subdirs.keys(): path_target = os.path.join('pub/orbits', subdirs[sensor]) path_local = os.path.join(auxDataPath, 'Orbits/Delft Precise Orbits', subdirs[sensor]) ftp.cwd(path_target) for item in ftp.nlst(): ftp.retrbinary('RETR ' + item, open(os.path.join(path_local, item), 'wb').write) ftp.quit() else: print('not implemented yet') def gpt(xmlfile): """ wrapper for ESA SNAP Graph Processing Tool GPT input is a readily formatted workflow xml file as created by function geocode in module snap.util """ with open(xmlfile, 'r') as infile: workflow = ET.fromstring(infile.read()) write = workflow.find('.//node[@id="Write"]') outname = write.find('.//parameters/file').text outdir = os.path.dirname(outname) format = write.find('.//parameters/formatName').text infile = workflow.find('.//node[@id="Read"]/parameters/file').text if format == 'GeoTiff-BigTIFF': cmd = ['gpt', # '-Dsnap.dataio.reader.tileWidth=*', # '-Dsnap.dataio.reader.tileHeight=1', '-Dsnap.dataio.bigtiff.tiling.width=256', '-Dsnap.dataio.bigtiff.tiling.height=256', # '-Dsnap.dataio.bigtiff.compression.type=LZW', # '-Dsnap.dataio.bigtiff.compression.quality=0.75', xmlfile] else: cmd = ['gpt', xmlfile] proc = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE) out, err = proc.communicate() if proc.returncode != 0: print('failed: ', os.path.basename(infile)) if os.path.isfile(outname + '.tif'): os.remove(outname + '.tif') elif os.path.isdir(outname): shutil.rmtree(outname) raise RuntimeError(err) if format == 'ENVI': id = pyroSAR.identify(infile) suffix = parse_suffix(workflow) for item in finder(outname, ['*.img']): pol = re.search('[HV]{2}', item).group() name_new = os.path.join(outdir, '{}_{}_{}.tif'.format(id.outname_base(), pol, suffix)) translateoptions = {'options': ['-q', '-co', 'INTERLEAVE=BAND', '-co', 'TILED=YES'], 'format': 'GTiff'} gdal_translate(item, name_new, translateoptions) shutil.rmtree(outname) class ExamineSnap(ExamineExe): """ Class to check if snap is installed. This will be called with snap.__init__ as snap_config. If you are running multiple snap versions or the package can not find the snap executable, you can set an path via: snap_config.set_path("path") """ def __init__(self, snap_executable=('snap64.exe', 'snap32.exe', 'snap.exe', 'snap')): super(ExamineSnap, self).__init__() try: self.status, self.path = self.examine(snap_executable) if os.path.islink(self.path): self.path = os.path.realpath(self.path) self.auxdatapath = os.path.join(expanduser("~"), '.snap/auxdata') self.__get_etc() self.__read_config() except TypeError: pass def __get_etc(self): try: self.etc = os.path.join(os.path.dirname(os.path.dirname(self.path)), 'etc') self.auxdata = os.listdir(self.etc) self.config_path = os.path.join(self.etc, [s for s in self.auxdata if "snap.auxdata.properties" in s][0]) except OSError: raise AssertionError("ETC directory is not existent.") def set_path(self, path): self.path = os.path.abspath(path) self.__get_etc() def __read_config(self): with open(self.config_path) as config: self.config = [] for line in config: self.config.append(line) ``` #### File: pyroSAR/tests/test_snap.py ```python import os from pyroSAR.snap import geocode def test_geocode(): scene = 'pyroSAR/tests/data/S1A_IW_GRDH_1SDV_20150222T170750_20150222T170815_004739_005DD8_3768.zip' geocode(scene, 'pyroSAR/tests/data', test=True) os.remove('pyroSAR/tests/data/S1A__IW___A_20150222T170750_bnr_Orb_Cal_TF_TC_dB_proc.xml') ``` #### File: pyroSAR/pyroSAR/xml_util.py ```python import os import re import ast import xml.etree.ElementTree as ET class XMLHandler(object): def __init__(self, xml): errormessage = 'xmlfile must be a string pointing to an existing file, ' \ 'a string or bytes object from which an xml can be parsed or a file object' if 'readline' in dir(xml): self.infile = xml.name if hasattr(xml, 'name') else None xml.seek(0) self.text = xml.read() xml.seek(0) elif isinstance(xml, (bytes, str)): if os.path.isfile(xml): self.infile = xml with open(xml, 'r') as infile: self.text = infile.read() else: try: tree = ET.fromstring(xml) self.infile = None self.text = str(xml) del tree except ET.ParseError: raise IOError(errormessage) else: raise IOError(errormessage) defs = re.findall('xmlns:[a-z0-9]+="[^"]*"', self.text) dictstring = '{{{}}}'.format(re.sub(r'xmlns:([a-z0-9]*)=', r'"\1":', ', '.join(defs))) self.namespaces = ast.literal_eval(dictstring) def restoreNamespaces(self): for key, val in self.namespaces.items(): val_new = val.split('/')[-1] self.text = self.text.replace(key, val_new) def write(self, outname, mode): with open(outname, mode) as out: out.write(self.text) def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): return def getNamespaces(xmlfile): with XMLHandler(xmlfile) as xml: return xml.namespaces ```
{ "source": "Jie-Cheng/iv-data-converter", "score": 2 }
#### File: Jie-Cheng/iv-data-converter/data_table.py ```python from PySide2.QtWidgets import QWidget, QSplitter, QComboBox, QHBoxLayout, QLabel from PySide2.QtWidgets import QVBoxLayout, QGridLayout, QTableWidget from PySide2.QtWidgets import QSizePolicy, QTableWidgetItem, QTableView from PySide2.QtCore import QAbstractTableModel, QModelIndex, Qt from PySide2.QtGui import QStandardItemModel, QColor class DataModel(QAbstractTableModel): def __init__(self, data=None): QAbstractTableModel.__init__(self) self.row_count = 0 self.column_count = 0 self.header_labels = [] if (data is not None): self.load_data(data) def load_data(self, data): self.row_count = data.shape[0] self.column_count = data.shape[1] + 1 self.header_labels = ["Index"] + list(data.columns) self.raw_data = data.to_numpy() def rowCount(self, parent=QModelIndex()): return self.row_count def columnCount(self, parent=QModelIndex()): return self.column_count def headerData(self, section, orientation, role): if (role != Qt.DisplayRole): return None if (orientation == Qt.Horizontal): return self.header_labels[section] else: return None def data(self, index, role=Qt.DisplayRole): column = index.column() row = index.row() if (role == Qt.DisplayRole): if (column == 0): return "{:d}".format(row + 1) else: return "{:7.5e}".format(self.raw_data[row, column-1]) elif (role == Qt.BackgroundRole): return QColor(Qt.white) elif (role == Qt.TextAlignmentRole): return Qt.AlignRight return None class DataTabel(QWidget): def __init__(self, parent=None): QWidget.__init__(self, parent) self.combo_file = QComboBox() label1 = QLabel("Input:") self.data_models = {} self.table_view = QTableView() layout1 = QHBoxLayout() layout1.addWidget(label1) layout1.addWidget(self.combo_file) layout1.setStretch(1, 1) self.layout2 = QVBoxLayout() self.layout2.addLayout(layout1) self.layout2.addWidget(self.table_view) self.setLayout(self.layout2) self.combo_file.currentIndexChanged.connect(self.update_model) def initialize(self, data_sets): self.combo_file.clear() for (file_name, data_frame) in data_sets.items(): self.combo_file.addItem(file_name) model = DataModel(data_frame) self.data_models[file_name] = model if (self.combo_file.count() != 0): self.combo_file.setCurrentIndex(self.combo_file.count()-1) self.update_model() def update_model(self): if (self.combo_file.currentText() in self.data_models): self.table_view.setModel(self.data_models[self.combo_file.currentText()]) else: self.table_view.setModel(DataModel()) def current_file(self): return self.combo_file.currentText() ``` #### File: Jie-Cheng/iv-data-converter/main_window.py ```python import sys, os from PySide2.QtWidgets import QApplication, QMainWindow, QFileDialog, QAction, QMessageBox from PySide2.QtWidgets import QLabel, QSplitter from PySide2.QtCore import QDir, QObject, QFileInfo, QSettings, QStandardPaths from PySide2.QtGui import QKeySequence, QIcon import pandas as pd from reader import read_file from data_table import * from output_widget import * class MainWindow(QMainWindow): def __init__(self): QMainWindow.__init__(self) self.setWindowTitle("Lab-Data-Converter") self.create_actions() self.setup_menu() self.settings = QSettings("UCB", "Lab-Data-Converter") self.data_sets = {} self.output_widget = OutputWidget(self) self.data_table = DataTabel(self) self.splitter1 = QSplitter() self.splitter1.setOrientation(Qt.Vertical) self.splitter1.addWidget(self.data_table) self.splitter1.addWidget(self.output_widget) self.splitter1.setSizes([200, 100]) #self.splitter1.setStretchFactor(0, 8) #self.splitter1.setStretchFactor(1, 4) self.setCentralWidget(self.splitter1) QDir.setCurrent(QStandardPaths.standardLocations( QStandardPaths.DocumentsLocation)[-1]) if (self.settings.value("work_dir")): try: QDir.setCurrent(self.settings.value("work_dir")) except: pass def create_actions(self): self.open_act = QAction("Open", self) self.open_act.setShortcuts(QKeySequence.Open) self.open_act.setStatusTip("Open one or multiple files"); self.open_act.triggered.connect(self.open_file) self.close_act = QAction("Close", self) self.close_act.setShortcuts(QKeySequence.Close) self.close_act.setStatusTip("Close selected files"); self.close_act.triggered.connect(self.close_file) def setup_menu(self): self.menuBar().addAction(self.open_act) self.menuBar().addAction(self.close_act) def open_file(self): files = QFileDialog.getOpenFileNames(self, "Open file", QDir.currentPath(), "Files (*.csv *.txt)") for file in files[0]: if (file not in self.data_sets): data = read_file(file) if (data is not None): self.data_sets[file] = data self.settings.setValue("work_dir", QFileInfo(file).absolutePath()) self.output_widget.add_to_list(file) else: QMessageBox.critical(self, "Main window", "File " + file[0] + " has an unknown format!") self.data_table.initialize(self.data_sets) def close_file(self): files_to_close = self.output_widget.files_selected() for file_to_close in files_to_close: self.data_sets.pop(file_to_close, None) if (not self.output_widget.remove_from_list(file_to_close)): raise Exception("{} doesn't exist in checkboxes!".format(file_to_close)) self.data_table.initialize(self.data_sets) def on_convert(self): export_dialog = QFileDialog() outfile = export_dialog.getSaveFileName(self, "Export to csv file", QDir.currentPath() + "/preview.csv", "csv (*.csv)") info = self.output_widget.output_info() min_rows = min([data.shape[0] for data in self.data_sets.values()]) if (info["first_line"] < 1 or info["first_line"] > min_rows or info["last_line"] < 1 or info["last_line"] > min_rows or info["first_line"] > info["last_line"]): QMessageBox.critical(self, "Row range", "Invalid row range!") else: self.write_file(outfile[0], info["files"], info["first_line"], info["last_line"]) def write_file(self, out_file, in_files, first, last): res = pd.DataFrame() for in_file in in_files: data = self.data_sets[in_file] base_name = QFileInfo(in_file).baseName() if base_name in res: QMessageBox.warning(self, "Export", "Identical file name! Use full path instead.") base_name = in_file for col in data.columns: res[base_name+":"+col] = data[col][first-1:last] res.to_csv(out_file, index=False) QMessageBox.information(self, "Export", "Selected rows have been written to {}.".format(out_file)) if __name__ == "__main__": app = QApplication([]) app.setWindowIcon(QIcon("images/main_icon.png")) # A workaround to fix the missing taskbar icon issue when running this code # in Windows shell. # Another pitfall is that when packaging with PyInstaller, images/main_icon.png # must be manually put in the output folder. if (os.name == "nt"): import ctypes app_id = u"UCB.Lab-Data-Converter.1.0.0" ctypes.windll.shell32.SetCurrentProcessExplicitAppUserModelID(app_id) window = MainWindow() window.show() sys.exit(app.exec_()) ```
{ "source": "jiechuanjiang/GENE", "score": 2 }
#### File: jiechuanjiang/GENE/vae_model.py ```python import numpy as np from keras.layers import Input, Dense, Lambda from keras.models import Model from keras import backend as K from keras.losses import mse from keras.optimizers import Adam def sampling(args): mu, log_sigma = args batch = K.shape(mu)[0] dim = K.int_shape(mu)[1] epsilon = K.random_normal(shape=(batch, dim)) return mu + K.exp(0.5 * log_sigma) * epsilon def vae_loss(log_sigma, mu): def my_loss(y_true, y_pred): recon = mse(y_true, y_pred) kl = 0.5 *K.sum(K.exp(log_sigma) + K.square(mu) - 1. - log_sigma, axis=1) return recon + 0.001*kl return my_loss def kl_loss(log_sigma, mu): def my_kl_loss(y_true, y_pred): return 0.5 * K.sum(K.exp(log_sigma) + K.square(mu) - 1. - log_sigma, axis=1) return my_kl_loss def recon_loss(y_true, y_pred): return mse(y_true, y_pred) def build_vae(state_space, latent_dim): s = Input(shape=(state_space, )) h = Dense(64, activation='relu')(s) h = Dense(64, activation='relu')(h) mu = Dense(latent_dim)(h) log_sigma = Dense(latent_dim)(h) z = Lambda(sampling)([mu, log_sigma]) p = Lambda(lambda x: K.exp(K.sum(-0.5*(x**2 + np.log(2*np.pi)), axis=1)))(mu) encoder = Model(s, [mu, log_sigma, z, p]) latent_inputs = Input(shape=(latent_dim,)) h = Dense(64, activation='relu')(latent_inputs) h = Dense(64, activation='relu')(h) outputs = Dense(state_space,activation='tanh')(h) decoder = Model(latent_inputs, outputs) vae = Model(s, decoder(encoder(s)[2])) vae.compile(optimizer=Adam(0.0003),loss=vae_loss(log_sigma, mu), metrics = [kl_loss(log_sigma, mu), recon_loss]) return encoder, decoder, vae ```
{ "source": "JiecongYANG/voc", "score": 3 }
#### File: tests/stdlib/test_math.py ```python import unittest from ..utils import TranspileTestCase class MathModuleTests(TranspileTestCase): def test_trunc_positive(self): self.assertCodeExecution(""" import math x = 3.1 y = 0.9 z = 2 print(math.trunc(x)) print(math.trunc(y)) print(math.trunc(z)) """) def test_trunc_negative(self): self.assertCodeExecution(""" import math x = -1.1 y = -0.9999999999999999999999999999999999 z = -2 print(math.trunc(x)) print(math.trunc(y)) print(math.trunc(z)) """) @unittest.expectedFailure def test_trunc_exception(self): self.assertCodeExecution(""" import math print(math.trunc('not a number')) """) def test_fabs(self): self.assertCodeExecution(""" import math x1 = 0.0 x2 = 0.9 x3 = -1.1 x4 = -1 x5 = 10 x6 = 0 print(math.fabs(x1)) print(math.fabs(x2)) print(math.fabs(x3)) print(math.fabs(x4)) print(math.fabs(x5)) print(math.fabs(x6)) """) @unittest.expectedFailure def test_fabs_incorrect(self): self.assertCodeExecution(""" import math x7 = "incorrect input" print(math.fabs(x7)) """) ####################################################### # factorial def test_factorial(self): self.assertCodeExecution(""" from math import factorial x = 2 y = 3 z = 4 w = 5 print(factorial(x)) print(factorial(y)) print(factorial(z)) print(factorial(w)) """) @unittest.expectedFailure def test_factorial_fail(self): self.assertCodeExecution(""" from math import factorial x = -1 y = 0 z = 2.4 w = 'a' print(factorial(x)) print(factorial(y)) print(factorial(z)) """) ```
{ "source": "JieDiscovery/ChatWhileWatching", "score": 4 }
#### File: ChatWhileWatching/gitsandbox/lower.py ```python import argparse import os # Credit: http://stackoverflow.com/questions/11540854/ \ # file-as-command-line-argument-for-argparse-error-message-if-argument-is-not-va def is_valid_file(parser, arg): if not os.path.exists(arg): parser.error("The file %s does not exist!" % arg) else: return open(arg, 'rb+') # return an open file handle parser = argparse.ArgumentParser(description='lower utility') parser.add_argument('-f', '--file', dest='file', help='input file', metavar="FILE", type=lambda x: is_valid_file(parser, x)) options = parser.parse_args() while 1: c = options.file.read(1) if not c: break options.file.seek(-1,1) options.file.write(c.lower()) options.file.close() ``` #### File: sibyl/protocol/sibyl_client_tcp_bin_protocol.py ```python from twisted.internet.protocol import Protocol class SibylClientTcpBinProtocol(Protocol): """ The class implementing the Sibyl TCP binary client protocol. It has the following attribute: .. attribute:: proxy The reference to the SibylCientProxy (instance of the :py:class:`~sibyl.main.sibyl_client_proxy.SibylClientProxy` class). .. warning:: All interactions between the client protocol and the user interface *must* go through the SibylClientProxy. In other words you must call one of the methods of :py:class:`~sibyl.main.sibyl_client_proxy.SibylClientProxy` whenever you would like the user interface to do something. .. note:: You must not instantiate this class. This is done by the code called by the main function. .. note:: You have to implement this class. You may add any attribute and method that you see fit to this class. You must implement two methods: :py:meth:`~sibyl.main.protocol.sibyl_cliend_udp_text_protocol.sendRequest` and :py:meth:`~sibyl.main.protocol.sibyl_cliend_udp_text_protocol.dataReceived`. See the corresponding documentation below. """ def __init__(self, sibylProxy): """The implementation of the UDP Text Protocol. Args: sibylClientProxy: the instance of the client proxy, this is the only way to interact with the user interface; """ self.clientProxy = sibylProxy def connectionMade(self): """ The Graphical User Interface (GUI) needs this function to know when to display the request window. DO NOT MODIFY IT. """ self.clientProxy.connectionSuccess() def sendRequest(self, line): """Called by the controller to send the request The :py:class:`~sibyl.main.sibyl_client_proxy.SibylClientProxy` calls this method when the user clicks on the "Send Question" button. Args: line (string): the text of the question .. warning:: You must implement this method. You must not change the parameters, as the controller calls it. """ pass def dataReceived(self, line): """Called by Twisted whenever a data is received Twisted calls this method whenever it has received at least one byte from the corresponding TCP connection. Args: line (bytes): the data received (can be of any length greater than one); .. warning:: You must implement this method. You must not change the parameters, as Twisted calls it. """ pass ```
{ "source": "JieDiscovery/dataframe-export", "score": 3 }
#### File: JieDiscovery/dataframe-export/dataframe-export.py ```python import pandas as pd import pandas.io.formats.excel import sys def main(df, **kwargs): """ This function is to transform dataframe to csv file or xlsx file which will be saved in /tmp/ Usage: create_csv_or_xlsx_file(df,sep=None,file_name='data.xlsx',encoding='utf-8',create_xlsx_file=True,sheet_name='Overall') :param df: dataframe which needs to be transformed :arg string file_name: filename generated :arg string sheet_name: name of worksheet generated :arg boolean create_csv_file: if True, transform df into xlsx file; Otherwise, into csv file. :return nothing as this function is used to generate a file instead of returning any value """ file_name = kwargs.get('file_name').lstrip("/") header = kwargs.get('header', True) if isinstance(header, basestring): header = header.upper() == 'TRUE' create_xlsx_file = kwargs.get('create_xlsx_file', False) if isinstance(create_xlsx_file, basestring): create_xlsx_file = create_xlsx_file.upper() == 'TRUE' sheet_name = kwargs.get('sheet_name', None) # set the name of worksheet which is "Sheet1" by default sheet_name = (sheet_name if sheet_name else "Sheet1") sep = kwargs.get('sep', "\t") encoding = kwargs.get('encoding', "UTF-8") if create_xlsx_file: # transform DataFrame df into xlsx file pandas.io.formats.excel.ExcelFormatter.header_style = None panda_df = df.toPandas() writer = pd.ExcelWriter("/tmp/" + file_name) panda_df.to_excel( writer, sheet_name=sheet_name, index=kwargs.get("index"), header=header, na_rep='', float_format=kwargs.get("float_format"), columns=kwargs.get('columns', None), index_label=kwargs.get("index_label"), startrow=kwargs.get('startrow', 0), startcol=kwargs.get('startcol', 0), engine=kwargs.get('engine', None), merge_cells=kwargs.get('merge_cells', None), encoding=encoding, verbose=kwargs.get('verbose', True), freeze_panes=kwargs.get('freeze_panes') ) i = 0 # to read .xlsx file more friendly # adjust column width in .xlsx file generated according to the max length of cell's content in this column for column_name, column_items in panda_df.items(): column_width = max(column_items.astype(str).map(len).max(), len(str(column_name)) * 0.91) * 0.98 worksheet = writer.sheets[sheet_name] worksheet.set_column(i, i, column_width) i += 1 writer.save() else: # transform DataFrame df into csv file df.coalesce(1).toPandas().to_csv( sep=sep, header=header, index=kwargs.get("index", True), encoding=encoding, path_or_buf='/tmp/' + file_name, float_format=kwargs.get("float_format"), na_rep=kwargs.get('na_rep', ""), columns=kwargs.get("columns"), index_label=kwargs.get("index_label"), mode='w', compression='infer', quoting=kwargs.get("quoting"), quotechar=kwargs.get("quotechar", '"'), line_terminator=kwargs.get("line_terminator"), chunksize=kwargs.get("chunksize"), tupleize_cols=kwargs.get("tupleize_cols"), date_format=kwargs.get("date_format"), doublequote=kwargs.get("doublequote", True), escapechar=kwargs.get("escapechar"), decimal=kwargs.get("decimal", '.') ) return if __name__=='__main__': main(sys.argv[1], # df **dict(arg.split('=') for arg in sys.argv[2:])) # kwargs ```
{ "source": "jiedou/study", "score": 3 }
#### File: study/python/decorate.py ```python import os def decorate_f(func): def inner(): print("before f() is called") func() print("after f() is called") return inner @decorate_f def f(): print("f() is called") if __name__=="__main__": f() ``` #### File: study/python/property02.py ```python class Test: def __init__(self,val=0): print("init") self.__val=val def get_val(self): print("get_val") return self.__val def set_val(self,val): print("set_val:",val) ##属性val可以认为是私有变量的对外接口 self.__val=val val=property(get_val,set_val) a=Test() a.val=1 #相当于调用 set_val print(a.val) #相当于调用 get_val ```
{ "source": "jieeeeeeeeeee/SatelliteSurfaceReconstruction", "score": 2 }
#### File: ssr/gdal_utility/pan_sharpening.py ```python import os import subprocess from ssr.utility.logging_extension import logger from ssr.utility.os_extension import get_corresponding_files_in_directories from ssr.utility.os_extension import mkdir_safely from ssr.gdal_utility.run_gdal import run_gdal_cmd def perform_pan_sharpening( pan_ifp, msi_ifp, ofp, resampling_algorithm="cubic" ): # https://gdal.org/programs/gdal_pansharpen.html # https://gis.stackexchange.com/questions/270476/pansharpening-using-gdal-tools # GDAL pan sharpening algorithm = weighted Brovey algorithm ext = os.path.splitext(ofp)[1] of = ext[1:] call_params = ["python C:/Users/Administrator/anaconda3/envs/colmap_sat/Lib/site-packages/osgeo/scripts/gdal_pansharpen.py"] call_params += ["-of", of, "-r", resampling_algorithm] call_params += [pan_ifp, msi_ifp, ofp] logger.vinfo("call_params", call_params) # for windows call_params = ' '.join(call_params) print(call_params) run_gdal_cmd(call_params) #sharp_process = subprocess.Popen(call_params) # sharp_process.wait() def perform_pan_sharpening_for_folder( pan_idp, msi_idp, odp, resampling_algorithm="cubic" ): # The created files are using the PAN (AND NOT THE MSI) STEM (i.e. P1BS # instead of M1BS) so they can directly be used to replace the original # PAN images mkdir_safely(odp) def get_correspondence_callback(pan_fn): # PAN example name: 0001_WV03_15JAN05_135727-P1BS-500497282040_01_P001.png # MSI example name: 0001_WV03_15JAN05_135727-M1BS-500497282040_01_P001.png pan_parts = pan_fn.split("-P1BS-", 1) msi_fn = pan_parts[0] + "-M1BS-" + pan_parts[1] return msi_fn pan_list, msi_list = get_corresponding_files_in_directories( pan_idp, msi_idp, get_correspondence_callback=get_correspondence_callback, ) for pan_ifp, msi_ifp in zip(pan_list, msi_list): msi_sharpened_ofp = os.path.join(odp, os.path.basename(pan_ifp)) perform_pan_sharpening( pan_ifp, msi_ifp, msi_sharpened_ofp, resampling_algorithm=resampling_algorithm, ) if __name__ == "__main__": # ========================= Single File ======================= # pan_ifp = '/path/to/0001_WV03_15JAN05_135727-P1BS-500497282040_01_P001_PAN.png' # msi_ifp = '/path/to/0001_WV03_15JAN05_135727-M1BS-500497282040_01_P001_MSI.png' # ofp = 'path/to/0001_WV03_15JAN05_135727-P1BS-500497282040_01_P001_SHARPENED.png' # # perform_pan_sharpening( # pan_ifp, # msi_ifp, # ofp, # resampling_algorithm='cubic') # ========================= Single File ======================= #pan_idp = "/path/to/pan" #msi_idp = "/path/to/msi" #odp = "path/to/pansharped" # #perform_pan_sharpening_for_folder( # pan_idp, msi_idp, odp, resampling_algorithm="cubic" #) # ========================= Single File ======================= pan_ifp = r"D:\experiment\site1-5-13\ssr\pan\images\0000_WV03_15APR02_134716-P1BS-500276959010_02_P001.png" msi_ifp = r'D:\experiment\site1-5-13\ssr\msi\images\0000_WV03_15APR02_134716-M1BS-500276959010_02_P001.png' ofp = r'D:\experiment\site1-5-13\ssr\sharpened_with_skew/a.png' perform_pan_sharpening( pan_ifp, msi_ifp, ofp, resampling_algorithm='cubic') ``` #### File: ssr/gdal_utility/run_gdal.py ```python import logging import subprocess import shlex def run_gdal_cmd(cmd, disable_log=False, input=None): logging.info("logging run_cmd") if not disable_log: logging.info("Running subprocess: {}".format(cmd)) try: #for windows #process = subprocess.Popen( # shlex.split(cmd), # stdout=subprocess.PIPE, # stderr=subprocess.STDOUT, # stdin=subprocess.PIPE, #) process = subprocess.Popen( cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, stdin=subprocess.PIPE, ) if input is not None: # interacting with short-running subprocess output = process.communicate(input=input.encode())[0] if not disable_log: logging.info(output.decode()) else: process.wait() else: # interacting with long-running subprocess if not disable_log: while True: output = process.stdout.readline().decode() if output == "" and process.poll() is not None: break if output: logging.info(output) else: process.wait() except (OSError, subprocess.CalledProcessError) as exception: logging.info("oh my goodness!") logging.error("Exception occured: {}, cmd: {}".format(exception, cmd)) logging.error("Subprocess failed") exit(-1) else: if not disable_log: # no exception was raised logging.info("Subprocess finished") ```
{ "source": "jieeshen/TF_toys", "score": 3 }
#### File: TF_toys/TFqueue/TF_fifo.py ```python import tensorflow as tf import numpy as np N_SAMPES=10 N_THREADS=10 def main(): all_x = 10 * np.random.randn(N_SAMPES,4)+1 all_y = np.random.randint(0,2,size=N_SAMPES) print [all_x, all_y] # tf.random_shuffle([all_x,all_y]) # queue = tf.FIFOQueue(capacity=1000, dtypes=[tf.float32,tf.int32],shapes=[[8],[]]) queue = tf.RandomShuffleQueue(capacity=10, min_after_dequeue=0, dtypes=[tf.float32,tf.int32],shapes=[[4],[]]) enqueue_op = queue.enqueue_many([all_x,all_y]) data_sample, label_sample = queue.dequeue_many(2) # label_sample=tf.Print(label_sample, label_sample=[queue.size()],message="left in queue: ") qr=tf.train.QueueRunner(queue, [enqueue_op] * N_THREADS) tf.train.add_queue_runner(qr) with tf.Session() as sess: coord=tf.train.Coordinator() enqueue_threads = qr.create_threads(sess, coord=coord, start=True) for epoch in xrange(3): for step in xrange(10/2): # if coord.should_stop(): # break one_data, one_label = sess.run([data_sample,label_sample]) print step, print(one_data), print(one_label) coord.request_stop() coord.join(enqueue_threads) if __name__ == '__main__': main() ```
{ "source": "Jie-Fang/Fleet", "score": 2 }
#### File: examples/distribute_ctr/model.py ```python import math import paddle.fluid as fluid from distribute_base import FleetRunnerBase from argument import params_args class CTR(FleetRunnerBase): """ DNN for Click-Through Rate prediction help: https://github.com/PaddlePaddle/models/tree/develop/PaddleRec/ctr """ def input_data(self, params): dense_feature_dim = params.dense_feature_dim self.dense_input = fluid.layers.data( name="dense_input", shape=[dense_feature_dim], dtype='float32') self.sparse_input_ids = [ fluid.layers.data( name="C" + str(i), shape=[1], lod_level=1, dtype='int64') for i in range(1, 27) ] self.label = fluid.layers.data( name='label', shape=[1], dtype='int64') self._words = [self.dense_input ] + self.sparse_input_ids + [self.label] return self._words def net(self, inputs, params): sparse_feature_dim = params.sparse_feature_dim embedding_size = params.embedding_size words = inputs def embedding_layer(input): return fluid.layers.embedding( input=input, is_sparse=True, is_distributed=False, size=[sparse_feature_dim, embedding_size], param_attr=fluid.ParamAttr( name="SparseFeatFactors", initializer=fluid.initializer.Uniform())) sparse_embed_seq = list(map(embedding_layer, words[1:-1])) concated = fluid.layers.concat( sparse_embed_seq + words[0:1], axis=1) fc1 = fluid.layers.fc(input=concated, size=400, act='relu', param_attr=fluid.ParamAttr(initializer=fluid.initializer.Normal( scale=1 / math.sqrt(concated.shape[1])))) fc2 = fluid.layers.fc(input=fc1, size=400, act='relu', param_attr=fluid.ParamAttr( initializer=fluid.initializer.Normal( scale=1 / math.sqrt(fc1.shape[1])))) fc3 = fluid.layers.fc(input=fc2, size=400, act='relu', param_attr=fluid.ParamAttr( initializer=fluid.initializer.Normal( scale=1 / math.sqrt(fc2.shape[1])))) predict = fluid.layers.fc(input=fc3, size=2, act='softmax', param_attr=fluid.ParamAttr(initializer=fluid.initializer.Normal( scale=1 / math.sqrt(fc3.shape[1])))) cost = fluid.layers.cross_entropy(input=predict, label=words[-1]) avg_cost = fluid.layers.reduce_sum(cost) accuracy = fluid.layers.accuracy(input=predict, label=words[-1]) auc_var, batch_auc_var, auc_states = \ fluid.layers.auc(input=predict, label=words[-1], num_thresholds=2 ** 12, slide_steps=20) return avg_cost, auc_var, batch_auc_var def py_reader(self, params): py_reader = fluid.layers.create_py_reader_by_data( capacity=64, feed_list=self._words, name='py_reader', use_double_buffer=False) return py_reader def dataset_reader(self, inputs, params): dataset = fluid.DatasetFactory().create_dataset() dataset.set_use_var([self.dense_input] + self.sparse_input_ids + [self.label]) pipe_command = "python dataset_generator.py" dataset.set_pipe_command(pipe_command) dataset.set_batch_size(params.batch_size) thread_num = int(params.cpu_num) dataset.set_thread(thread_num) return dataset if __name__ == '__main__': params = params_args() model = CTR() model.runtime_main(params) ```
{ "source": "jiefangjun/PyScript", "score": 3 }
#### File: jiefangjun/PyScript/nmapScan.py ```python import nmap import argparse def nmapScan(tgtHost, tgtPort): nmScan = nmap.PortScanner() nmScan.scan(tgtHost, tgtPort) state = nmScan[tgtHost]['tcp'][int(tgtPort)]['state'] print("[*] " + tgtHost + " tcp/" + tgtPort + " " + state) def main(): parser = argparse.ArgumentParser(description='nmapScan powerd by python') parser.add_argument('-H', metavar='host', help='specify target host') parser.add_argument('-P', metavar='ports', help='specify target port[s] separated by comma') args = parser.parse_args() tgtHost = args.H tgtPorts = str(args.P).split(',') for tgtPort in tgtPorts: nmapScan(tgtHost, tgtPort) if __name__ == '__main__': main() ```
{ "source": "Jie-Fang/trt-samples-for-hackathon-cn", "score": 2 }
#### File: cookbook/00-MNISTData/loadMnistData.py ```python import cv2 import gzip import numpy as np # http://yann.lecun.com/exdb/mnist/, https://storage.googleapis.com/cvdf-datasets/mnist/ class MnistData(): def __init__(self, dataPath, isOneHot = False): with open(dataPath+'train-images-idx3-ubyte.gz', 'rb') as f: self.trainImage = self.extractImage(f) with open(dataPath+'train-labels-idx1-ubyte.gz', 'rb') as f: self.trainLabel = self.extractLabel(f) with open(dataPath+'t10k-images-idx3-ubyte.gz', 'rb') as f: self.testImage = self.extractImage(f) with open(dataPath+'t10k-labels-idx1-ubyte.gz', 'rb') as f: self.testLabel = self.extractLabel(f, isOneHot=isOneHot) self.isOneHot = isOneHot if self.isOneHot: self.trainLabel = self.convertToOneHot(self.trainLabel) self.testLabel = self.convertToOneHot(self.testLabel) else: self.trainLabel = self.trainLabel.astype(np.float32) self.testLabel = self.testLabel.astype(np.float32) def getBatch(self, batchSize, isTrain): if isTrain: index = np.random.choice(len(self.trainImage), batchSize, True) return self.trainImage[index], self.trainLabel[index] else: index = np.random.choice(len(self.testImage), batchSize, True) return self.testImage[index], self.testLabel[index] def read4Byte(self, byteStream): dt = np.dtype(np.uint32).newbyteorder('>') return np.frombuffer(byteStream.read(4), dtype=dt)[0] def extractImage(self, f): print('Extracting', f.name) with gzip.GzipFile(fileobj=f) as byteStream: if self.read4Byte(byteStream) != 2051: raise ValueError("Failed reading file!") nImage = self.read4Byte(byteStream) rows = self.read4Byte(byteStream) cols = self.read4Byte(byteStream) buf = byteStream.read(rows * cols * nImage) return np.frombuffer(buf, dtype = np.uint8).astype(np.float32).reshape(nImage, rows, cols, 1)/255 def extractLabel(self, f, isOneHot=False, nClass=10): print('Extracting', f.name) with gzip.GzipFile(fileobj=f) as byteStream: if self.read4Byte(byteStream) != 2049: raise ValueError("Failed reading file!") nLabel = self.read4Byte(byteStream) buf = byteStream.read(nLabel) return np.frombuffer(buf, dtype=np.uint8) def convertToOneHot(self, labelIndex, nClass=10): nLabel = labelIndex.shape[0] res = np.zeros((nLabel, nClass), dtype=np.float32) offset = np.arange(nLabel) * nClass res.flat[offset + labelIndex] = 1 return res def saveImage(self, count, outputPath, isTrain): if self.isOneHot: return image, label = ([self.testImage, self.testLabel],[self.trainImage, self.trainLabel])[isTrain] for i in range(min(count,10000)): cv2.imwrite(outputPath+str(i).zfill(5)+"-"+str(label[i])+".jpg", (image[i]*255).astype(np.uint8)) ``` #### File: CCLPlugin/TRT6-StaticShape/testCCLPlugin.py ```python import ctypes import numpy as np import tensorrt as trt import pycuda.autoinit import pycuda.driver as cuda soFilePath = "./CCLPlugin.so" height = 384 width = 640 np.random.seed(97) def getCCLPlugin(): for c in trt.get_plugin_registry().plugin_creator_list: if c.name == 'CCLPlugin': p0 = trt.PluginField("minPixelScore", np.array([0.7], dtype=np.float32), trt.PluginFieldType.FLOAT32) p1 = trt.PluginField("minLinkScore", np.array([0.7], dtype=np.float32), trt.PluginFieldType.FLOAT32) p2 = trt.PluginField("minArea", np.array([10], dtype=np.int32), trt.PluginFieldType.INT32) p3 = trt.PluginField("maxcomponentCount", np.array([65536], dtype=np.int32), trt.PluginFieldType.INT32) return c.create_plugin(c.name, trt.PluginFieldCollection([p0, p1, p2, p3])) return None def buildEngine(logger): builder = trt.Builder(logger) builder.max_batch_size = 1 builder.max_workspace_size = 3 << 30 builder.fp16_mode = False network = builder.create_network() inputT0 = network.add_input('pixelScore', trt.float32, (height, width)) inputT1 = network.add_input('linkScore', trt.float32, (8, height, width)) cclLayer = network.add_plugin_v2([inputT0, inputT1], getCCLPlugin()) network.mark_output(cclLayer.get_output(0)) network.mark_output(cclLayer.get_output(1)) return builder.build_cuda_engine(network) def run(): logger = trt.Logger(trt.Logger.ERROR) trt.init_libnvinfer_plugins(logger, '') ctypes.cdll.LoadLibrary(soFilePath) engine = buildEngine(logger) if engine == None: print("Failed building engine!") return None print("Succeeded building engine!") context = engine.create_execution_context() stream = cuda.Stream() inputH0 = np.ascontiguousarray(np.random.rand(height, width).reshape(-1)) inputH1 = np.ascontiguousarray(np.random.rand(8, height, width).reshape(-1)) inputD0 = cuda.mem_alloc(inputH0.nbytes) inputD1 = cuda.mem_alloc(inputH1.nbytes) outputH0 = np.empty(context.get_binding_shape(2), dtype=trt.nptype(engine.get_binding_dtype(2))) outputH1 = np.empty(context.get_binding_shape(3), dtype=trt.nptype(engine.get_binding_dtype(3))) outputD0 = cuda.mem_alloc(outputH0.nbytes) outputD1 = cuda.mem_alloc(outputH1.nbytes) cuda.memcpy_htod_async(inputD0, inputH0, stream) cuda.memcpy_htod_async(inputD1, inputH1, stream) stream.synchronize() context.execute_async(1, [int(inputD0), int(inputD1), int(outputD0), int(outputD1)], stream.handle) stream.synchronize() cuda.memcpy_dtoh_async(outputH0, outputD0, stream) cuda.memcpy_dtoh_async(outputH1, outputD1, stream) stream.synchronize() print(np.shape(outputH0), np.shape(outputH1)) #print(outputH0) #print(outputH1) if __name__ == '__main__': run() print("test finish!") ``` #### File: PluginReposity/Mask2DPlugin/testMask2DPlugin.py ```python import ctypes import numpy as np import tensorrt as trt import pycuda.autoinit import pycuda.driver as cuda npToNumber = {np.float32: 0, np.float16: 1, np.int8: 2, np.int32: 3} soFilePath = "./Mask2DPlugin.so" globalMask2DTrueValue = 5 globalMask2DFalseValue = -5 np.random.seed(97) def mask2DCPU(inputH0, inputH1, inputH2, mask2DTrueValue, mask2DFalseValue): outputH0CPU = np.full([inputH0.shape[0], 1, *(inputH0.shape[2:])], mask2DFalseValue, dtype=np.float32) for j in range(inputH2.shape[0]): outputH0CPU[j, 0, :inputH1[j], :inputH2[j]] = mask2DTrueValue return outputH0CPU def getMask2DPlugin(datatype, mask2DTrueValue, mask2DFalseValue): for c in trt.get_plugin_registry().plugin_creator_list: if c.name == 'Mask2DPlugin': p0 = trt.PluginField("datatype", np.array([npToNumber[datatype]], dtype=np.int32), trt.PluginFieldType.INT32) p1 = trt.PluginField("mask2DTrueValue", np.array([mask2DTrueValue], dtype=np.float32), trt.PluginFieldType.FLOAT32) p2 = trt.PluginField("mask2DFalseValue", np.array([mask2DFalseValue], dtype=np.float32), trt.PluginFieldType.FLOAT32) return c.create_plugin(c.name, trt.PluginFieldCollection([p0, p1, p2])) return None def buildEngine(logger, outDatatype): builder = trt.Builder(logger) network = builder.create_network(1) profile = builder.create_optimization_profile() config = builder.create_builder_config() config.max_workspace_size = 1 << 30 config.flags = int(outDatatype == np.float16) inputT0 = network.add_input('inputT0', trt.float32, [-1, -1, -1, -1]) profile.set_shape(inputT0.name, [1, 1, 1, 1], [4, 3, 30, 40], [9, 12, 30, 40]) inputT1 = network.add_input('inputT1', trt.int32, [-1]) profile.set_shape(inputT1.name, [1], [4], [9]) inputT2 = network.add_input('inputT2', trt.int32, [-1]) profile.set_shape(inputT2.name, [1], [4], [9]) config.add_optimization_profile(profile) pluginLayer = network.add_plugin_v2([inputT0, inputT1, inputT2], getMask2DPlugin(outDatatype, globalMask2DTrueValue, globalMask2DFalseValue)) network.mark_output(pluginLayer.get_output(0)) return builder.build_engine(network, config) def run(inDim, outDatatype): print("test", inDim, outDatatype) logger = trt.Logger(trt.Logger.ERROR) trt.init_libnvinfer_plugins(logger, '') ctypes.cdll.LoadLibrary(soFilePath) engine = buildEngine(logger, outDatatype) if engine == None: print("Failed building engine!") return None print("Succeeded building engine!") context = engine.create_execution_context() context.set_binding_shape(0, inDim) context.set_binding_shape(1, inDim[:1]) context.set_binding_shape(2, inDim[:1]) #print("Bind0->",engine.get_binding_shape(0),context.get_binding_shape(0)); #print("Bind1->",engine.get_binding_shape(1),context.get_binding_shape(1)); #print("Bind2->",engine.get_binding_shape(2),context.get_binding_shape(2)); print("All bind:", context.all_binding_shapes_specified) stream = cuda.Stream() data0 = np.full(inDim, 1, dtype=np.float32) data1 = np.random.randint(1, inDim[2], inDim[:1], dtype=np.int32) data2 = np.random.randint(1, inDim[3], inDim[:1], dtype=np.int32) inputH0 = np.ascontiguousarray(data0) inputD0 = cuda.mem_alloc(inputH0.nbytes) inputH1 = np.ascontiguousarray(data1) inputD1 = cuda.mem_alloc(inputH1.nbytes) inputH2 = np.ascontiguousarray(data2) inputD2 = cuda.mem_alloc(inputH2.nbytes) outputH0 = np.empty(context.get_binding_shape(3), dtype=trt.nptype(engine.get_binding_dtype(3))) outputD0 = cuda.mem_alloc(outputH0.nbytes) cuda.memcpy_htod_async(inputD0, inputH0, stream) cuda.memcpy_htod_async(inputD1, inputH1, stream) cuda.memcpy_htod_async(inputD2, inputH2, stream) context.execute_async_v2([int(inputD0), int(inputD1), int(inputD2), int(outputD0)], stream.handle) cuda.memcpy_dtoh_async(outputH0, outputD0, stream) stream.synchronize() outputH0CPU = mask2DCPU(inputH0, inputH1, inputH2, globalMask2DTrueValue, globalMask2DFalseValue) #print("InputH0->",inputH0.shape, engine.get_binding_dtype(0)) #print(inputH0) #print("InputH1->",inputH1.shape, engine.get_binding_dtype(1)) #print(inputH1) #print("InputH2->",inputH2.shape, engine.get_binding_dtype(2)) #print(inputH2) #print("OutputH0->",outputH0.shape, engine.get_binding_dtype(3)) #print(outputH0) #print("OutputH0CPU->",outputH0CPU.shape) #print(outputH0CPU) print("Check result:", ["True" if np.all(outputH0 == outputH0CPU) else "False"][0]) if __name__ == '__main__': np.set_printoptions(precision=4, linewidth=200, suppress=True) cuda.Device(0).make_context() run([4, 3, 30, 40], np.float32) run([4, 3, 30, 40], np.float16) cuda.Context.pop() print("test finish!") ``` #### File: PluginReposity/ReversePlugin/testReversePlugin.py ```python import ctypes import numpy as np import tensorrt as trt import pycuda.autoinit import pycuda.driver as cuda npToTrt = {np.int8: trt.int8, np.float16: trt.float16, np.int32: trt.int32, np.float32: trt.float32} soFilePath = "./ReversePlugin.so" def reverseCPU(inputH0, inputH1): outputH0CPU = np.zeros_like(inputH0) for i in range(inputH0.shape[0]): validWidth = inputH1[i] for k in range(validWidth): outputH0CPU[i, validWidth - 1 - k, :] = inputH0[i, k, :] return outputH0CPU def cleanTrash(outputH0, inputH1): # clean the trash data in the output of GPU sh = outputH0.shape for i in range(sh[0]): outputH0[i, inputH1[i]:, :] = 0 return outputH0 def getReversePlugin(): for c in trt.get_plugin_registry().plugin_creator_list: if c.name == 'ReversePlugin': return c.create_plugin(c.name, trt.PluginFieldCollection([])) return None def buildEngine(logger, inDatatype, nDimIn): builder = trt.Builder(logger) network = builder.create_network(1) profile = builder.create_optimization_profile() config = builder.create_builder_config() config.max_workspace_size = 1 << 30 config.flags = int(inDatatype == np.float16) inputT0 = network.add_input('inputT0', npToTrt[inDatatype], [-1, -1, -1]) profile.set_shape(inputT0.name, (1, 1, 1), (2, 4, 3), (4, 9, 12)) inputT1 = network.add_input('inputT1', trt.int32, [-1]) profile.set_shape(inputT1.name, [1], [4], [9]) config.add_optimization_profile(profile) pluginLayer = network.add_plugin_v2([inputT0, inputT1], getReversePlugin()) network.mark_output(pluginLayer.get_output(0)) return builder.build_engine(network, config) def run(inDim, inDatatype): print("test", inDim, inDatatype) logger = trt.Logger(trt.Logger.ERROR) trt.init_libnvinfer_plugins(logger, '') ctypes.cdll.LoadLibrary(soFilePath) engine = buildEngine(logger, inDatatype, len(inDim)) if engine == None: print("Failed building engine!") return None print("Succeeded building engine!") context = engine.create_execution_context() context.set_binding_shape(0, inDim) context.set_binding_shape(1, inDim[:1]) #print("Bind0->",engine.get_binding_shape(0),context.get_binding_shape(0)) #print("Bind1->",engine.get_binding_shape(1),context.get_binding_shape(1)) #print("Bind2->",engine.get_binding_shape(2),context.get_binding_shape(2)) #print("All bind:",context.all_binding_shapes_specified) stream = cuda.Stream() data0 = np.arange(np.prod(inDim), dtype=inDatatype).reshape(inDim) data1 = np.arange(1, inDim[0] + 1, dtype=np.int32) data1[data1 > inDim[1]] = inDim[1] inputH0 = np.ascontiguousarray(data0) inputD0 = cuda.mem_alloc(inputH0.nbytes) inputH1 = np.ascontiguousarray(data1) inputD1 = cuda.mem_alloc(inputH1.nbytes) outputH0 = np.empty(context.get_binding_shape(2), dtype=trt.nptype(engine.get_binding_dtype(2))) outputD0 = cuda.mem_alloc(outputH0.nbytes) cuda.memcpy_htod_async(inputD0, inputH0, stream) cuda.memcpy_htod_async(inputD1, inputH1, stream) context.execute_async_v2([int(inputD0), int(inputD1), int(outputD0)], stream.handle) cuda.memcpy_dtoh_async(outputH0, outputD0, stream) stream.synchronize() outputH0CPU = reverseCPU(inputH0, inputH1) #print("InputH0->",inputH0.shape, engine.get_binding_dtype(0)) #print(inputH0) #print("InputH1->",inputH1.shape, engine.get_binding_dtype(1)) #print(inputH1) #print("OutputH0->",outputH0.shape, engine.get_binding_dtype(2)) #print(cleanTrash(outputH0,inputH1)) #print("OutputH0CPU->",outputH0CPU.shape) #print(outputH0CPU) print("Check result:", ["True" if np.all(cleanTrash(outputH0, inputH1) == outputH0CPU) else "False"][0]) if __name__ == '__main__': np.set_printoptions(precision=4, linewidth=200, suppress=True) cuda.Device(0).make_context() run([2, 4, 3], np.int32) run([4, 9, 12], np.int32) run([2, 4, 3], np.float32) run([4, 9, 3], np.float32) run([2, 4, 3], np.float16) run([4, 9, 12], np.float16) cuda.Context.pop() print("test finish!") ``` #### File: PluginReposity/WherePlugin/testWherePlugin.py ```python import os import ctypes import numpy as np import tensorrt as trt import pycuda.autoinit import pycuda.driver as cuda soFilePath = "./WherePlugin.so" usingFp16 = False def whereCPU(condition, inputX, inputY): return inputX * condition + inputY * (1 - condition) def getWherePlugin(): for c in trt.get_plugin_registry().plugin_creator_list: if c.name == 'WherePlugin': return c.create_plugin(c.name, trt.PluginFieldCollection([])) return None def buildEngine(logger, nRow, nCol): builder = trt.Builder(logger) builder.max_batch_size = 4 builder.max_workspace_size = 3 << 30 builder.fp16_mode = usingFp16 network = builder.create_network() tensor1 = network.add_input('condition', trt.int32, (nRow, nCol)) tensor2 = network.add_input('inputX', trt.float32, (nRow, nCol)) tensor3 = network.add_input('inputY', trt.float32, (nRow, nCol)) whereLayer = network.add_plugin_v2([tensor1, tensor2, tensor3], getWherePlugin()) network.mark_output(whereLayer.get_output(0)) return builder.build_cuda_engine(network) def run(batchSize, nRow, nCol): print("test", batchSize, nRow, nCol) logger = trt.Logger(trt.Logger.ERROR) trt.init_libnvinfer_plugins(logger, '') ctypes.cdll.LoadLibrary(soFilePath) engine = buildEngine(logger, nRow, nCol) if engine == None: print("Failed building engine!") return None print("Succeeded building engine!") context = engine.create_execution_context() stream = cuda.Stream() condition = np.array(np.random.randint(0, 2, [batchSize, nRow, nCol]), dtype=np.int32) inputX = np.full([batchSize, nRow, nCol], 1, dtype=np.float32) inputY = np.full([batchSize, nRow, nCol], -1, dtype=np.float32) inputH0 = np.ascontiguousarray(condition.reshape(-1)) inputH1 = np.ascontiguousarray(inputX.reshape(-1)) inputH2 = np.ascontiguousarray(inputY.reshape(-1)) inputD0 = cuda.mem_alloc(inputH0.nbytes) inputD1 = cuda.mem_alloc(inputH1.nbytes) inputD2 = cuda.mem_alloc(inputH2.nbytes) outputH0 = np.empty((batchSize, ) + tuple(engine.get_binding_shape(3)), dtype=trt.nptype(engine.get_binding_dtype(3))) outputD0 = cuda.mem_alloc(outputH0.nbytes) cuda.memcpy_htod_async(inputD0, inputH0, stream) cuda.memcpy_htod_async(inputD1, inputH1, stream) cuda.memcpy_htod_async(inputD2, inputH2, stream) context.execute_async(batchSize, [int(inputD0), int(inputD1), int(inputD2), int(outputD0)], stream.handle) cuda.memcpy_dtoh_async(outputH0, outputD0, stream) stream.synchronize() outputH0CPU = whereCPU(condition, inputX, inputY) print("Check result:", ["True" if np.all(outputH0 == outputH0CPU) else "False"][0]) if __name__ == '__main__': np.set_printoptions(precision=4, linewidth=200, suppress=True) run(4, 5, 4) run(4, 20, 9) run(4, 200, 10) print("test finish!") ``` #### File: 06-PluginAndParser/pyTorch-LayerNorm/testLayerNormPlugin.py ```python import os import ctypes import numpy as np from time import time_ns import tensorrt as trt from cuda import cudart soFilePath = "./LayerNorm.so" nTime = 30 nIn, cIn, hIn, wIn = 2, 3, 4, 5 npDataType = np.float32 globalEpsilon = 1e-5 np.random.seed(97) def check(a, b, weak=False): if weak: return np.all(np.abs(a - b) < globalEpsilon) else: return np.all(a == b) def layerNormCPU(bufferH, epsilon): _x = bufferH[0] _0 = np.mean(_x.reshape(_x.shape[0], -1), 1)[:, np.newaxis, np.newaxis, np.newaxis] _1 = _x - _0 _2 = _1 * _1 _3 = np.mean(_2.reshape(_x.shape[0], -1), 1)[:, np.newaxis, np.newaxis, np.newaxis] _4 = _3 + epsilon _5 = np.sqrt(_4) _6 = 1 / _5 # 1/sqrt(...) _7 = _1 * _6 # (x-μ)/sqrt(...) return [_7] def getLayerNormPlugin(): for c in trt.get_plugin_registry().plugin_creator_list: #print(c.name) if c.name == 'LayerNorm': p0 = trt.PluginField('epsilon', np.float32(globalEpsilon), trt.PluginFieldType.FLOAT32) return c.create_plugin(c.name, trt.PluginFieldCollection([p0])) return None if __name__ == '__main__': os.system("rm -f ./*.plan") np.set_printoptions(precision=4, linewidth=200, suppress=True) testCase = "fp%s" % ('16' if int(npDataType == np.float16) else '32') print("Test <%s>" % testCase) logger = trt.Logger(trt.Logger.ERROR) trt.init_libnvinfer_plugins(logger, '') ctypes.cdll.LoadLibrary(soFilePath) trtFile = "./model-" + testCase + ".plan" if os.path.isfile(trtFile): with open(trtFile, 'rb') as f: engineStr = f.read() engine = trt.Runtime(logger).deserialize_cuda_engine(engineStr) if engine == None: print("Failed loading engine!") exit() print("Succeeded loading engine!") else: builder = trt.Builder(logger) network = builder.create_network(1 << 0) config = builder.create_builder_config() config.max_workspace_size = 1 << 30 config.flags = 1 << int(trt.BuilderFlag.FP16) if int(npDataType == np.float16) else 0 inputTensorList = [] trtDataType = trt.float16 if int(npDataType == np.float16) else trt.float32 inputTensorList.append(network.add_input('inputT', trtDataType, [-1, -1, -1, -1])) profile = builder.create_optimization_profile() profile.set_shape('inputT', [1, 1, 1, 1], [nIn, cIn, hIn, wIn], [nIn * 2, cIn * 2, hIn * 2, wIn * 2]) config.add_optimization_profile(profile) pluginLayer = network.add_plugin_v2(inputTensorList, getLayerNormPlugin()) pluginLayer.get_output(0).dtype = trtDataType network.mark_output(pluginLayer.get_output(0)) engineString = builder.build_serialized_network(network, config) if engineString == None: print("Failed building engine!") exit() print("Succeeded building engine!") with open(trtFile, 'wb') as f: f.write(engineString) engine = trt.Runtime(logger).deserialize_cuda_engine(engineString) context = engine.create_execution_context() context.set_binding_shape(0, [nIn, cIn, hIn, wIn]) print("Binding all? %s" % (["No", "Yes"][int(context.all_binding_shapes_specified)])) _, stream = cudart.cudaStreamCreate() nInput = np.sum([engine.binding_is_input(i) for i in range(engine.num_bindings)]) nOutput = engine.num_bindings - nInput for i in range(engine.num_bindings): print("input ->" if engine.binding_is_input(i) else "output->", engine.get_binding_dtype(i), engine.get_binding_shape(i), context.get_binding_shape(i)) #data = np.random.rand(nIn,cIn,hIn,wIn).astype(np.float32) data = np.arange(nIn * cIn * hIn * wIn).reshape(nIn, cIn, hIn, wIn).astype(npDataType) bufferH = [] bufferH.append(data) bufferH.append(np.empty(context.get_binding_shape(1), dtype=trt.nptype(engine.get_binding_dtype(1)))) bufferD = [] for i in range(engine.num_bindings): bufferD.append(cudart.cudaMalloc(bufferH[i].nbytes)[1]) for i in range(nInput): cudart.cudaMemcpyAsync(bufferD[i], np.ascontiguousarray(bufferH[i].reshape(-1)).ctypes.data, bufferH[i].nbytes, cudart.cudaMemcpyKind.cudaMemcpyHostToDevice, stream) context.execute_async_v2(bufferD, stream) for i in range(nInput, nInput + nOutput): cudart.cudaMemcpyAsync(bufferH[i].ctypes.data, bufferD[i], bufferH[i].nbytes, cudart.cudaMemcpyKind.cudaMemcpyDeviceToHost, stream) cudart.cudaStreamSynchronize(stream) resCPU = layerNormCPU(bufferH, globalEpsilon) print("check result:", check(resCPU[0], bufferH[-1], True)) print("Test <%s> finish!" % testCase) ``` #### File: 09-Advance/MultiStream/MultiStream.py ```python import os import ctypes import numpy as np from cuda import cudart from time import time import tensorrt as trt np.random.seed(97) # HtoD-bound nIn, cIn, hIn, wIn = 8, 64, 256, 256 cOut, hW, wW = 1, 3, 3 # Calculation-bound #nIn,cIn,hIn,wIn = 8,64,128,128 #cOut,hW,wW = 64,9,9 # DtoH-bound #nIn,cIn,hIn,wIn = 8,64,128,128 #cOut,hW,wW = 256,3,3 trtFile = "./engin.plan" def getEngine(): logger = trt.Logger(trt.Logger.ERROR) if os.path.isfile(trtFile): with open(trtFile, 'rb') as f: engine = trt.Runtime(logger).deserialize_cuda_engine(f.read()) if engine == None: print("Failed loading engine!") return print("Succeeded loading engine!") else: builder = trt.Builder(logger) network = builder.create_network(1 << int(trt.NetworkDefinitionCreationFlag.EXPLICIT_BATCH)) profile = builder.create_optimization_profile() config = builder.create_builder_config() config.max_workspace_size = 6 << 30 inputT0 = network.add_input('inputT0', trt.DataType.FLOAT, [-1, cIn, hIn, wIn]) profile.set_shape(inputT0.name, (1, cIn, hIn, wIn), (nIn, cIn, hIn, wIn), (nIn * 2, cIn, hIn, wIn)) config.add_optimization_profile(profile) weight = np.random.rand(cOut, cIn, hW, wW).astype(np.float32) * 2 - 1 bias = np.random.rand(cOut).astype(np.float32) * 2 - 1 _0 = network.add_convolution_nd(inputT0, cOut, [hW, wW], weight, bias) _0.padding_nd = (hW // 2, wW // 2) _1 = network.add_activation(_0.get_output(0), trt.ActivationType.RELU) network.mark_output(_1.get_output(0)) engineString = builder.build_serialized_network(network, config) if engineString == None: print("Failed building engine!") return print("Succeeded building engine!") with open(trtFile, 'wb') as f: f.write(engineString) engine = trt.Runtime(logger).deserialize_cuda_engine(engineString) return engine def run1(engine): context = engine.create_execution_context() context.set_binding_shape(0, [nIn, cIn, hIn, wIn]) _, stream = cudart.cudaStreamCreate() data = np.random.rand(nIn * cIn * hIn * wIn).astype(np.float32).reshape(nIn, cIn, hIn, wIn) inputH0 = np.ascontiguousarray(data.reshape(-1)) outputH0 = np.empty(context.get_binding_shape(1), dtype=trt.nptype(engine.get_binding_dtype(1))) _, inputD0 = cudart.cudaMallocAsync(inputH0.nbytes, stream) _, outputD0 = cudart.cudaMallocAsync(outputH0.nbytes, stream) # 完整一次推理 cudart.cudaMemcpyAsync(inputD0, inputH0.ctypes.data, inputH0.nbytes, cudart.cudaMemcpyKind.cudaMemcpyHostToDevice, stream) context.execute_async_v2([int(inputD0), int(outputD0)], stream) cudart.cudaMemcpyAsync(outputH0.ctypes.data, outputD0, outputH0.nbytes, cudart.cudaMemcpyKind.cudaMemcpyDeviceToHost, stream) cudart.cudaStreamSynchronize(stream) # 数据拷贝 HtoD 计时 for i in range(10): cudart.cudaMemcpyAsync(inputD0, inputH0.ctypes.data, inputH0.nbytes, cudart.cudaMemcpyKind.cudaMemcpyHostToDevice, stream) trtTimeStart = time() for i in range(30): cudart.cudaMemcpyAsync(inputD0, inputH0.ctypes.data, inputH0.nbytes, cudart.cudaMemcpyKind.cudaMemcpyHostToDevice, stream) cudart.cudaStreamSynchronize(stream) trtTimeEnd = time() print("%6.3fms - 1 stream, DataCopyHtoD" % ((trtTimeEnd - trtTimeStart) / 30 * 1000)) # 推理计时 for i in range(10): context.execute_async_v2([int(inputD0), int(outputD0)], stream) trtTimeStart = time() for i in range(30): context.execute_async_v2([int(inputD0), int(outputD0)], stream) cudart.cudaStreamSynchronize(stream) trtTimeEnd = time() print("%6.3fms - 1 stream, Inference" % ((trtTimeEnd - trtTimeStart) / 30 * 1000)) # 数据拷贝 DtoH 计时 for i in range(10): cudart.cudaMemcpyAsync(outputH0.ctypes.data, outputD0, outputH0.nbytes, cudart.cudaMemcpyKind.cudaMemcpyDeviceToHost, stream) trtTimeStart = time() for i in range(30): cudart.cudaMemcpyAsync(outputH0.ctypes.data, outputD0, outputH0.nbytes, cudart.cudaMemcpyKind.cudaMemcpyDeviceToHost, stream) cudart.cudaStreamSynchronize(stream) trtTimeEnd = time() print("%6.3fms - 1 stream, DataCopyDtoH" % ((trtTimeEnd - trtTimeStart) / 30 * 1000)) # 总时间计时 for i in range(10): context.execute_async_v2([int(inputD0), int(outputD0)], stream) trtTimeStart = time() for i in range(30): cudart.cudaMemcpyAsync(inputD0, inputH0.ctypes.data, inputH0.nbytes, cudart.cudaMemcpyKind.cudaMemcpyHostToDevice, stream) context.execute_async_v2([int(inputD0), int(outputD0)], stream) cudart.cudaMemcpyAsync(outputH0.ctypes.data, outputD0, outputH0.nbytes, cudart.cudaMemcpyKind.cudaMemcpyDeviceToHost, stream) cudart.cudaStreamSynchronize(stream) trtTimeEnd = time() print("%6.3fms - 1 stream, DataCopy + Inference" % ((trtTimeEnd - trtTimeStart) / 30 * 1000)) cudart.cudaStreamDestroy(stream) cudart.cudaFree(inputD0) cudart.cudaFree(outputD0) def run2(engine): context = engine.create_execution_context() context.set_binding_shape(0, [nIn, cIn, hIn, wIn]) _, stream0 = cudart.cudaStreamCreate() _, stream1 = cudart.cudaStreamCreate() _, event0 = cudart.cudaEventCreate() _, event1 = cudart.cudaEventCreate() data = np.random.rand(nIn * cIn * hIn * wIn).astype(np.float32).reshape(nIn, cIn, hIn, wIn) inputSize = trt.volume(context.get_binding_shape(0)) * np.array([0], dtype=trt.nptype(engine.get_binding_dtype(0))).nbytes outputSize = trt.volume(context.get_binding_shape(1)) * np.array([0], dtype=trt.nptype(engine.get_binding_dtype(1))).nbytes _, inputH0 = cudart.cudaHostAlloc(inputSize, cudart.cudaHostAllocWriteCombined) _, inputH1 = cudart.cudaHostAlloc(inputSize, cudart.cudaHostAllocWriteCombined) _, outputH0 = cudart.cudaHostAlloc(outputSize, cudart.cudaHostAllocWriteCombined) _, outputH1 = cudart.cudaHostAlloc(outputSize, cudart.cudaHostAllocWriteCombined) _, inputD0 = cudart.cudaMallocAsync(inputSize, stream0) _, inputD1 = cudart.cudaMallocAsync(inputSize, stream1) _, outputD0 = cudart.cudaMallocAsync(outputSize, stream0) _, outputD1 = cudart.cudaMallocAsync(outputSize, stream1) # 总时间计时 for i in range(10): context.execute_async_v2([int(inputD0), int(outputD0)], stream0) trtTimeStart = time() cudart.cudaEventRecord(event1, stream1) for i in range(30): inputH, outputH = [inputH1, outputH1] if i & 1 else [inputH0, outputH0] inputD, outputD = [inputD1, outputD1] if i & 1 else [inputD0, outputD0] eventBefore, eventAfter = [event0, event1] if i & 1 else [event1, event0] stream = stream1 if i & 1 else stream0 cudart.cudaMemcpyAsync(inputD, inputH, inputSize, cudart.cudaMemcpyKind.cudaMemcpyHostToDevice, stream) cudart.cudaStreamWaitEvent(stream, eventBefore, cudart.cudaEventWaitDefault) context.execute_async_v2([int(inputD), int(outputD)], stream) cudart.cudaEventRecord(eventAfter, stream) cudart.cudaMemcpyAsync(outputH, outputD, outputSize, cudart.cudaMemcpyKind.cudaMemcpyDeviceToHost, stream) '''# 奇偶循环拆开写 for i in range(30//2): cudart.cudaMemcpyAsync(inputD0, inputH0, inputSize, cudart.cudaMemcpyKind.cudaMemcpyHostToDevice, stream0) cudart.cudaStreamWaitEvent(stream0,event1,cudart.cudaEventWaitDefault) context.execute_async_v2([int(inputD0), int(outputD0)], stream0) cudart.cudaEventRecord(event0,stream0) cudart.cudaMemcpyAsync(outputH0, outputD0, outputSize, cudart.cudaMemcpyKind.cudaMemcpyDeviceToHost, stream0) cudart.cudaMemcpyAsync(inputD1, inputH1, inputSize, cudart.cudaMemcpyKind.cudaMemcpyHostToDevice, stream1) cudart.cudaStreamWaitEvent(stream1,event0,cudart.cudaEventWaitDefault) context.execute_async_v2([int(inputD1), int(outputD1)], stream1) cudart.cudaEventRecord(event1,stream1) cudart.cudaMemcpyAsync(outputH1, outputD1, outputSize, cudart.cudaMemcpyKind.cudaMemcpyDeviceToHost, stream1) ''' cudart.cudaEventSynchronize(event1) trtTimeEnd = time() print("%6.3fms - 2 stream, DataCopy + Inference" % ((trtTimeEnd - trtTimeStart) / 30 * 1000)) if __name__ == '__main__': #os.system("rm -rf ./*.plan") cudart.cudaDeviceSynchronize() engine = getEngine() # 创建 engine run1(engine) # 单 stream 推理 run2(engine) # 双 stream 推理 ``` #### File: 09-Advance/Refit/Refit.py ```python import os import numpy as np from cuda import cudart import tensorrt as trt nIn, cIn, hIn, wIn = 1, 1, 6, 9 # 输入张量 NCHW cOut, hW, wW = 1, 3, 3 data = np.tile(np.arange(1, 1 + hW * wW, dtype=np.float32).reshape(hW, wW), (cIn, hIn // hW, wIn // wW)).reshape(cIn, hIn, wIn) # 输入张量 weight = np.power(10, range(4, -5, -1), dtype=np.float32).reshape(cOut, hW, wW) # 卷积窗口 bias = np.zeros(cOut, dtype=np.float32) # 卷积偏置 trtFile = "./model.plan" def run(nRunTime): logger = trt.Logger(trt.Logger.ERROR) if os.path.isfile(trtFile): with open(trtFile, 'rb') as f: engine = trt.Runtime(logger).deserialize_cuda_engine(f.read()) if engine == None: print("Failed loading engine!") return print("Succeeded loading engine!") else: builder = trt.Builder(logger) network = builder.create_network(1 << int(trt.NetworkDefinitionCreationFlag.EXPLICIT_BATCH)) config = builder.create_builder_config() config.flags = 1 << int(trt.BuilderFlag.REFIT) inputT0 = network.add_input('inputT0', trt.DataType.FLOAT, (nIn, cIn, hIn, wIn)) fakeWeight = np.zeros([cOut, cIn, wW, wW], dtype=np.float32) fakeBias = np.zeros([cOut], dtype=np.float32) convolutionLayer = network.add_convolution_nd(inputT0, cOut, (hW, wW), fakeWeight, fakeBias) convolutionLayer.name = 'conv' network.mark_output(convolutionLayer.get_output(0)) engineString = builder.build_serialized_network(network, config) if engineString == None: print("Failed building engine!") return print("Succeeded building engine!") with open(trtFile, 'wb') as f: f.write(engineString) engine = trt.Runtime(logger).deserialize_cuda_engine(engineString) if nRunTime == 0: print("Do not refit!") else: print("Refit!") refitter = trt.Refitter(engine, logger) refitter.set_weights("conv", trt.WeightsRole.KERNEL, weight) # 所有权重都需要更新,否则报错 refitter.set_weights("conv", trt.WeightsRole.BIAS, bias) [missingLayer, weightRole] = refitter.get_missing() for layer, role in zip(missingLayer, weightRole): print("[", layer, "-", role, "]") if refitter.refit_cuda_engine() == False: print("Failed Refitting engine!") return context = engine.create_execution_context() _, stream = cudart.cudaStreamCreate() inputH0 = np.ascontiguousarray(data.reshape(-1)) outputH0 = np.empty(context.get_binding_shape(1), dtype=trt.nptype(engine.get_binding_dtype(1))) _, inputD0 = cudart.cudaMallocAsync(inputH0.nbytes, stream) _, outputD0 = cudart.cudaMallocAsync(outputH0.nbytes, stream) cudart.cudaMemcpyAsync(inputD0, inputH0.ctypes.data, inputH0.nbytes, cudart.cudaMemcpyKind.cudaMemcpyHostToDevice, stream) context.execute_async_v2([int(inputD0), int(outputD0)], stream) cudart.cudaMemcpyAsync(outputH0.ctypes.data, outputD0, outputH0.nbytes, cudart.cudaMemcpyKind.cudaMemcpyDeviceToHost, stream) cudart.cudaStreamSynchronize(stream) print("data:", data.shape) print(data) print("outputH0:", outputH0.shape) print(outputH0) cudart.cudaStreamDestroy(stream) cudart.cudaFree(inputD0) cudart.cudaFree(outputD0) if __name__ == '__main__': os.system('rm ./*.plan') np.set_printoptions(precision=8, linewidth=200, suppress=True) cudart.cudaDeviceSynchronize() run(0) run(1) ```
{ "source": "jiefangxuanyan/tensorflow", "score": 2 }
#### File: autograph/converters/control_flow.py ```python from __future__ import absolute_import from __future__ import division from __future__ import print_function import gast from tensorflow.contrib.autograph.pyct import anno from tensorflow.contrib.autograph.pyct import ast_util from tensorflow.contrib.autograph.pyct import parser from tensorflow.contrib.autograph.pyct import templates from tensorflow.contrib.autograph.pyct import transformer from tensorflow.contrib.autograph.pyct.static_analysis import cfg from tensorflow.contrib.autograph.pyct.static_analysis.annos import NodeAnno class SymbolNamer(object): """Describes the interface for ControlFlowTransformer's namer.""" def new_symbol(self, name_root, reserved_locals): """Generate a new unique symbol. Args: name_root: String, used as stem in the new name. reserved_locals: Set(string), additional local symbols that are reserved and which should not be used. Returns: String. """ raise NotImplementedError() class ControlFlowTransformer(transformer.Base): """Transforms control flow structures like loops an conditionals.""" def _create_cond_branch(self, body_name, aliased_orig_names, aliased_new_names, body, returns): if aliased_orig_names: template = """ def body_name(): aliased_new_names, = aliased_orig_names, body return (returns,) """ return templates.replace( template, body_name=body_name, body=body, aliased_orig_names=aliased_orig_names, aliased_new_names=aliased_new_names, returns=returns) else: template = """ def body_name(): body return (returns,) """ return templates.replace( template, body_name=body_name, body=body, returns=returns) def _create_cond_expr(self, results, test, body_name, orelse_name): if results is not None: template = """ results = ag__.utils.run_cond(test, body_name, orelse_name) """ return templates.replace( template, test=test, results=results, body_name=body_name, orelse_name=orelse_name) else: template = """ ag__.utils.run_cond(test, body_name, orelse_name) """ return templates.replace( template, test=test, body_name=body_name, orelse_name=orelse_name) def visit_If(self, node): self.generic_visit(node) body_scope = anno.getanno(node, NodeAnno.BODY_SCOPE) orelse_scope = anno.getanno(node, NodeAnno.ORELSE_SCOPE) body_defs = body_scope.created | body_scope.modified orelse_defs = orelse_scope.created | orelse_scope.modified live = anno.getanno(node, 'live_out') # We'll need to check if we're closing over variables that are defined # elsewhere in the function # NOTE: we can only detect syntactic closure in the scope # of the code passed in. If the AutoGraph'd function itself closes # over other variables, this analysis won't take that into account. defined = anno.getanno(node, 'defined_in') # We only need to return variables that are # - modified by one or both branches # - live (or has a live parent) at the end of the conditional modified = [] for def_ in body_defs | orelse_defs: def_with_parents = set((def_,)) | def_.support_set if live & def_with_parents: modified.append(def_) # We need to check if live created variables are balanced # in both branches created = live & (body_scope.created | orelse_scope.created) # The if statement is illegal if there are variables that are created, # that are also live, but both branches don't create them. if created: if created != (body_scope.created & live): raise ValueError( 'The main branch does not create all live symbols that the else ' 'branch does.') if created != (orelse_scope.created & live): raise ValueError( 'The else branch does not create all live symbols that the main ' 'branch does.') # Alias the closure variables inside the conditional functions # to avoid errors caused by the local variables created in the branch # functions. # We will alias variables independently for body and orelse scope, # because different branches might write different variables. aliased_body_orig_names = tuple(body_scope.modified - body_scope.created) aliased_orelse_orig_names = tuple(orelse_scope.modified - orelse_scope.created) aliased_body_new_names = tuple( self.context.namer.new_symbol(s.ssf(), body_scope.referenced) for s in aliased_body_orig_names) aliased_orelse_new_names = tuple( self.context.namer.new_symbol(s.ssf(), orelse_scope.referenced) for s in aliased_orelse_orig_names) alias_body_map = dict(zip(aliased_body_orig_names, aliased_body_new_names)) alias_orelse_map = dict( zip(aliased_orelse_orig_names, aliased_orelse_new_names)) node_body = ast_util.rename_symbols(node.body, alias_body_map) node_orelse = ast_util.rename_symbols(node.orelse, alias_orelse_map) if not modified: # When the cond would return no value, we leave the cond called without # results. That in turn should trigger the side effect guards. The # branch functions will return a dummy value that ensures cond # actually has some return value as well. results = None elif len(modified) == 1: results = modified[0] else: results = gast.Tuple([s.ast() for s in modified], None) body_name = self.context.namer.new_symbol('if_true', body_scope.referenced) orelse_name = self.context.namer.new_symbol('if_false', orelse_scope.referenced) if modified: def build_returns(aliased_names, alias_map, scope): """Builds list of return variables for a branch of a conditional.""" returns = [] for s in modified: if s in aliased_names: returns.append(alias_map[s]) else: if s not in scope.created | defined: raise ValueError( 'Attempting to return variable "%s" from the true branch of ' 'a conditional, but it was not closed over, or created in ' 'this branch.' % str(s)) else: returns.append(s) return tuple(returns) body_returns = build_returns(aliased_body_orig_names, alias_body_map, body_scope) orelse_returns = build_returns(aliased_orelse_orig_names, alias_orelse_map, orelse_scope) else: body_returns = orelse_returns = templates.replace('tf.ones(())')[0].value body_def = self._create_cond_branch( body_name, aliased_orig_names=tuple(aliased_body_orig_names), aliased_new_names=tuple(aliased_body_new_names), body=node_body, returns=body_returns) orelse_def = self._create_cond_branch( orelse_name, aliased_orig_names=tuple(aliased_orelse_orig_names), aliased_new_names=tuple(aliased_orelse_new_names), body=node_orelse, returns=orelse_returns) cond_expr = self._create_cond_expr(results, node.test, body_name, orelse_name) return body_def + orelse_def + cond_expr def visit_While(self, node): self.generic_visit(node) body_scope = anno.getanno(node, NodeAnno.BODY_SCOPE) body_closure = body_scope.modified - body_scope.created all_referenced = body_scope.referenced cond_scope = anno.getanno(node, NodeAnno.COND_SCOPE) cond_closure = set() for s in cond_scope.referenced: for root in s.support_set: if root not in body_scope.created: cond_closure.add(root) state = list(body_closure) if not state: # TODO(mdan): Implement this properly. # To complete this statement, we need to check whether any variable # created inside the body scope is used before being modified outside the # scope. This should be done during activity analysis, and in general # should cover the case where variables may not be initialized. raise ValueError('cannot convert while loop: no outputs') state_ssf = [ self.context.namer.new_symbol(s.ssf(), all_referenced) for s in state ] ssf_map = { name: ssf for name, ssf in zip(state, state_ssf) if str(name) != ssf } if len(state) == 1: state = state[0] state_ssf = state_ssf[0] state_ast_tuple = state else: state_ast_tuple = gast.Tuple([n.ast() for n in state], None) node_body = ast_util.rename_symbols(node.body, ssf_map) test = ast_util.rename_symbols(node.test, ssf_map) template = """ def test_name(state_ssf): return test def body_name(state_ssf): body return state_ssf, state_ast_tuple = ag__.while_stmt( test_name, body_name, (state,), (extra_deps,)) """ node = templates.replace( template, state=state, state_ssf=state_ssf, state_ast_tuple=state_ast_tuple, test_name=self.context.namer.new_symbol('loop_test', body_scope.referenced), test=test, body_name=self.context.namer.new_symbol('loop_body', body_scope.referenced), body=node_body, extra_deps=tuple(s.ast() for s in cond_closure), ) return node def visit_For(self, node): self.generic_visit(node) body_scope = anno.getanno(node, NodeAnno.BODY_SCOPE) body_closure = body_scope.modified - body_scope.created all_referenced = body_scope.referenced state = list(body_closure) state_ssf = [ self.context.namer.new_symbol(s.ssf(), all_referenced) for s in state ] ssf_map = { name: ssf for name, ssf in zip(state, state_ssf) if str(name) != ssf } if len(state) == 1: state = state[0] state_ssf = state_ssf[0] state_ast_tuple = state else: state_ast_tuple = gast.Tuple([n.ast() for n in state], None) node_body = ast_util.rename_symbols(node.body, ssf_map) if anno.hasanno(node, 'extra_test'): extra_test = anno.getanno(node, 'extra_test') extra_test = ast_util.rename_symbols(extra_test, ssf_map) else: extra_test = parser.parse_expression('True') template = """ def extra_test_name(state_ssf): return extra_test_expr def body_name(iterate, state_ssf): body return state_ssf, state_ast_tuple = ag__.for_stmt( iter_, extra_test_name, body_name, (state,)) """ node = templates.replace( template, state=state, state_ssf=state_ssf, state_ast_tuple=state_ast_tuple, iter_=node.iter, iterate=node.target, extra_test_name=self.context.namer.new_symbol('extra_test', all_referenced), extra_test_expr=extra_test, body_name=self.context.namer.new_symbol('loop_body', all_referenced), body=node_body) return node def transform(node, context): cfg.run_analyses(node, cfg.Liveness(context)) cfg.run_analyses(node, cfg.Defined(context)) node = ControlFlowTransformer(context).visit(node) return node ``` #### File: pyct/static_analysis/cfg_test.py ```python from __future__ import absolute_import from __future__ import division from __future__ import print_function import functools import gast from tensorflow.contrib.autograph.pyct import anno from tensorflow.contrib.autograph.pyct import context from tensorflow.contrib.autograph.pyct import parser from tensorflow.contrib.autograph.pyct import qual_names from tensorflow.contrib.autograph.pyct.static_analysis import cfg from tensorflow.python.platform import test class CFGTest(test.TestCase): def _parse_and_analyze(self, test_fn, namespace, arg_types=None): arg_types = arg_types or {} node, source = parser.parse_entity(test_fn) ctx = context.EntityContext( namer=None, source_code=source, source_file=None, namespace=namespace, arg_values=None, arg_types=arg_types, owner_type=None, recursive=True) node = qual_names.resolve(node) return node, ctx def _check_anno_matches(self, node, anno_name, var_names): if isinstance(var_names, str): var_names = (var_names,) qual_vars = set() for var_name in var_names: if isinstance(var_name, str): if '[' in var_name or ']' in var_name: raise ValueError('Annotation matching not supported with subscript.') if '.' not in var_name: qual_vars.add(qual_names.QN(var_name)) else: attrs = var_name.split('.') this_qn = functools.reduce(qual_names.QN, attrs[1:], qual_names.QN(attrs[0])) qual_vars.add(this_qn) self.assertEqual(anno.getanno(node, anno_name), qual_vars) def test_reaching(self): def f(x): print(x) while True: x = x x = x return x node, ctx = self._parse_and_analyze(f, {}) cfg.run_analyses(node, cfg.ReachingDefinitions(ctx)) body = node.body[0].body # Only the argument reaches the expression def_in = anno.getanno(body[0], 'definitions_in') # One element, x, from arguments self.assertEqual(set(type(d[1]) for d in def_in), set((gast.arguments,))) while_body = body[1].body def_in = anno.getanno(while_body[0], 'definitions_in') # One definition, two possible sources. # - One from an assignment (if the loop is entered) # - The other from the arguments (if loop is not entered) self.assertEqual( set(type(d[1]) for d in def_in), set((gast.arguments, gast.Assign))) def_in = anno.getanno(while_body[1], 'definitions_in') # If we've reached this line, the only reaching definition of x is the # Assign node in previous line self.assertEqual(set(type(d[1]) for d in def_in), set((gast.Assign,))) def_in = anno.getanno(body[2], 'definitions_in') # Same situation as while_body[0] self.assertEqual( set(type(d[1]) for d in def_in), set((gast.arguments, gast.Assign))) def test_defined(self): def f(x): if x: y = 2 # pylint: disable=unused-variable return x node, ctx = self._parse_and_analyze(f, {}) cfg.run_analyses(node, cfg.Defined(ctx)) body = node.body[0].body # only x is for sure defined at the end self._check_anno_matches(body[1], 'defined_in', 'x') # at the end of the if body both x and y are defined if_body = body[0].body self._check_anno_matches(if_body[0], 'defined_out', ('x', 'y')) def _get_live_annotated_fnbody(self, f): node, ctx = self._parse_and_analyze(f, {}) cfg.run_analyses(node, cfg.Liveness(ctx)) body = node.body[0].body return body def test_live_straightline(self): def f1(x): a = g(x) # pylint: disable=undefined-variable b = h(a) # pylint: disable=undefined-variable, unused-variable return x body = self._get_live_annotated_fnbody(f1) self._check_anno_matches(body[1], 'live_in', ('a', 'h', 'x')) self._check_anno_matches(body[2], 'live_in', ('x')) self._check_anno_matches(body[0], 'live_in', ('g', 'h', 'x')) self._check_anno_matches(body[2], 'live_out', ()) def test_live_stacked_conds_with_else(self): def f2(x, a): # pylint: disable=unused-argument if a > 0: # x should not be live x = 0 if a > 1: x = 1 else: x = 2 body = self._get_live_annotated_fnbody(f2) self._check_anno_matches(body[0], 'live_in', ('a')) self._check_anno_matches(body[1], 'live_in', ('a')) def test_live_stacked_conds(self): def f3(x, a): if a > 0: # x and a should be live x = 0 if a > 1: # x and a should be live_in x = 1 return x # x should be live body = self._get_live_annotated_fnbody(f3) self._check_anno_matches(body[0], 'live_in', ('a', 'x')) self._check_anno_matches(body[1], 'live_in', ('a', 'x')) self._check_anno_matches(body[2], 'live_in', ('x')) def test_live_possibly_unused_cond(self): def f4(x, a): if a > 0: # x should be live x = 0 x += 1 body = self._get_live_annotated_fnbody(f4) self._check_anno_matches(body[0], 'live_in', ('x', 'a')) self._check_anno_matches(body[1], 'live_in', ('x')) def test_live_attribute_in_cond(self): def f5(x, a): if a > 0: # x.y should be live x.y = 0 return x.y body = self._get_live_annotated_fnbody(f5) self._check_anno_matches(body[0], 'live_in', ('x', 'x.y', 'a')) def test_live_noop(self): def f6(x): return x # should this cause x.* to be live? body = self._get_live_annotated_fnbody(f6) self._check_anno_matches(body[0], 'live_in', ('x')) def test_live_loop(self): def f7(x, n): for i in range(n): x += i return x body = self._get_live_annotated_fnbody(f7) self._check_anno_matches(body[0], 'live_in', ('x', 'n', 'range')) self._check_anno_matches(body[1], 'live_in', ('x')) def test_live_context_manager(self): def f8(x, f): with f: x += 1 body = self._get_live_annotated_fnbody(f8) self._check_anno_matches(body[0], 'live_in', ('f', 'x')) def test_node_equality(self): node_a = gast.parse('y = x').body[0] node_b = gast.parse('y = x').body[0] self.assertNotEqual(node_a, node_b) def test_nested_functions_defined(self): def f(x): y = x * 2 def g(z): return z + y return g(x) node, ctx = self._parse_and_analyze(f, {}) cfg.run_analyses(node, cfg.Defined(ctx)) body = node.body[0].body self.assertEqual( anno.getanno(body[2], 'defined_in'), frozenset(map(qual_names.QN, ('g', 'x', 'y')))) # TODO(alexbw): CFG analysis doesn't currently cross FunctionDef boundaries. # NOTE: 'z' is easy to find, but 'y' is not identified as # defined, because CFG analysis is applied with each function separately. # fndef_body = body[1].body # self.assertEqual( # anno.getanno(fndef_body[0], 'defined_in'), # frozenset(map(qual_names.QN, ('z', 'y')))) def test_nested_functions_dont_leak_definitions(self): def f(x): print(x) def g(): y = 2 return y return g() # y is not defined here node, ctx = self._parse_and_analyze(f, {}) cfg.run_analyses(node, cfg.Defined(ctx)) body = node.body[0].body self.assertEqual( anno.getanno(body[2], 'defined_in'), frozenset(map(qual_names.QN, ('x', 'g')))) def test_loop_else(self): # Disabling useless-else-on-loop error, because 'break' and 'continue' # canonicalization are a separate analysis pass, and here we test # the CFG analysis in isolation. def for_orelse(x): y = 0 for i in range(len(x)): x += i else: # pylint: disable=useless-else-on-loop y = 1 return x, y def while_orelse(x, i): y = 0 while x < 10: x += i else: # pylint: disable=useless-else-on-loop y = 1 return x, y for f in (for_orelse, while_orelse): node, ctx = self._parse_and_analyze(f, {}) cfg.run_analyses(node, cfg.ReachingDefinitions(ctx)) body = node.body[0].body return_node = body[-1] reaching_defs = anno.getanno(return_node, 'definitions_in') # Y could be defined by Assign(Num(0)) or Assign(Num(1)) # X could be defined as an argument or an AugAssign. y_defs = [node for var, node in reaching_defs if str(var) == 'y'] x_defs = [node for var, node in reaching_defs if str(var) == 'x'] self.assertEqual(set((gast.Assign,)), set(type(def_) for def_ in y_defs)) self.assertEqual(set((0, 1)), set(def_.value.n for def_ in y_defs)) self.assertEqual(len(y_defs), 2) self.assertEqual( set((gast.arguments, gast.AugAssign)), set(type(def_) for def_ in x_defs)) self.assertEqual(len(x_defs), 2) if __name__ == '__main__': test.main() ``` #### File: python/ops/shape.py ```python from __future__ import absolute_import from __future__ import division from __future__ import print_function import contextlib from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.framework import tensor_util from tensorflow.python.ops import array_ops from tensorflow.python.ops import check_ops from tensorflow.python.ops import control_flow_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops.distributions import util as distribution_util class _DistributionShape(object): """Manage and manipulate `Distribution` shape. #### Terminology Recall that a `Tensor` has: - `shape`: size of `Tensor` dimensions, - `ndims`: size of `shape`; number of `Tensor` dimensions, - `dims`: indexes into `shape`; useful for transpose, reduce. `Tensor`s sampled from a `Distribution` can be partitioned by `sample_dims`, `batch_dims`, and `event_dims`. To understand the semantics of these dimensions, consider when two of the three are fixed and the remaining is varied: - `sample_dims`: indexes independent draws from identical parameterizations of the `Distribution`. - `batch_dims`: indexes independent draws from non-identical parameterizations of the `Distribution`. - `event_dims`: indexes event coordinates from one sample. The `sample`, `batch`, and `event` dimensions constitute the entirety of a `Distribution` `Tensor`'s shape. The dimensions are always in `sample`, `batch`, `event` order. #### Purpose This class partitions `Tensor` notions of `shape`, `ndims`, and `dims` into `Distribution` notions of `sample,` `batch,` and `event` dimensions. That is, it computes any of: ``` sample_shape batch_shape event_shape sample_dims batch_dims event_dims sample_ndims batch_ndims event_ndims ``` for a given `Tensor`, e.g., the result of `Distribution.sample(sample_shape=...)`. For a given `Tensor`, this class computes the above table using minimal information: `batch_ndims` and `event_ndims`. #### Examples We show examples of distribution shape semantics. - Sample dimensions: Computing summary statistics, i.e., the average is a reduction over sample dimensions. ```python sample_dims = [0] tf.reduce_mean(Normal(loc=1.3, scale=1.).sample_n(1000), axis=sample_dims) # ~= 1.3 ``` - Batch dimensions: Monte Carlo estimation of a marginal probability: Average over batch dimensions where batch dimensions are associated with random draws from a prior. E.g., suppose we want to find the Monte Carlo estimate of the marginal distribution of a `Normal` with a random `Laplace` location: ``` P(X=x) = integral P(X=x|y) P(Y=y) dy ~= 1/n sum_{i=1}^n P(X=x|y_i), y_i ~iid Laplace(0,1) = tf.reduce_mean(Normal(loc=Laplace(0., 1.).sample_n(n=1000), scale=tf.ones(1000)).prob(x), axis=batch_dims) ``` The `Laplace` distribution generates a `Tensor` of shape `[1000]`. When fed to a `Normal`, this is interpreted as 1000 different locations, i.e., 1000 non-identical Normals. Therefore a single call to `prob(x)` yields 1000 probabilities, one for every location. The average over this batch yields the marginal. - Event dimensions: Computing the determinant of the Jacobian of a function of a random variable involves a reduction over event dimensions. E.g., Jacobian of the transform `Y = g(X) = exp(X)`: ```python tf.div(1., tf.reduce_prod(x, event_dims)) ``` We show examples using this class. Write `S, B, E` for `sample_shape`, `batch_shape`, and `event_shape`. ```python # 150 iid samples from one multivariate Normal with two degrees of freedom. mu = [0., 0] sigma = [[1., 0], [0, 1]] mvn = MultivariateNormal(mu, sigma) rand_mvn = mvn.sample(sample_shape=[3, 50]) shaper = DistributionShape(batch_ndims=0, event_ndims=1) S, B, E = shaper.get_shape(rand_mvn) # S = [3, 50] # B = [] # E = [2] # 12 iid samples from one Wishart with 2x2 events. sigma = [[1., 0], [2, 1]] wishart = Wishart(df=5, scale=sigma) rand_wishart = wishart.sample(sample_shape=[3, 4]) shaper = DistributionShape(batch_ndims=0, event_ndims=2) S, B, E = shaper.get_shape(rand_wishart) # S = [3, 4] # B = [] # E = [2, 2] # 100 iid samples from two, non-identical trivariate Normal distributions. mu = ... # shape(2, 3) sigma = ... # shape(2, 3, 3) X = MultivariateNormal(mu, sigma).sample(shape=[4, 25]) # S = [4, 25] # B = [2] # E = [3] ``` #### Argument Validation When `validate_args=False`, checks that cannot be done during graph construction are performed at graph execution. This may result in a performance degradation because data must be switched from GPU to CPU. For example, when `validate_args=False` and `event_ndims` is a non-constant `Tensor`, it is checked to be a non-negative integer at graph execution. (Same for `batch_ndims`). Constant `Tensor`s and non-`Tensor` arguments are always checked for correctness since this can be done for "free," i.e., during graph construction. """ def __init__(self, batch_ndims=None, event_ndims=None, validate_args=False, name="DistributionShape"): """Construct `DistributionShape` with fixed `batch_ndims`, `event_ndims`. `batch_ndims` and `event_ndims` are fixed throughout the lifetime of a `Distribution`. They may only be known at graph execution. If both `batch_ndims` and `event_ndims` are python scalars (rather than either being a `Tensor`), functions in this class automatically perform sanity checks during graph construction. Args: batch_ndims: `Tensor`. Number of `dims` (`rank`) of the batch portion of indexes of a `Tensor`. A "batch" is a non-identical distribution, i.e, Normal with different parameters. event_ndims: `Tensor`. Number of `dims` (`rank`) of the event portion of indexes of a `Tensor`. An "event" is what is sampled from a distribution, i.e., a trivariate Normal has an event shape of [3] and a 4 dimensional Wishart has an event shape of [4, 4]. validate_args: Python `bool`, default `False`. When `True`, non-`tf.constant` `Tensor` arguments are checked for correctness. (`tf.constant` arguments are always checked.) name: Python `str`. The name prepended to Ops created by this class. Raises: ValueError: if either `batch_ndims` or `event_ndims` are: `None`, negative, not `int32`. """ if batch_ndims is None: raise ValueError("batch_ndims cannot be None") if event_ndims is None: raise ValueError("event_ndims cannot be None") self._batch_ndims = batch_ndims self._event_ndims = event_ndims self._validate_args = validate_args with ops.name_scope(name): self._name = name with ops.name_scope("init"): self._batch_ndims = self._assert_non_negative_int32_scalar( ops.convert_to_tensor( batch_ndims, name="batch_ndims")) self._batch_ndims_static, self._batch_ndims_is_0 = ( self._introspect_ndims(self._batch_ndims)) self._event_ndims = self._assert_non_negative_int32_scalar( ops.convert_to_tensor( event_ndims, name="event_ndims")) self._event_ndims_static, self._event_ndims_is_0 = ( self._introspect_ndims(self._event_ndims)) @property def name(self): """Name given to ops created by this class.""" return self._name @property def batch_ndims(self): """Returns number of dimensions corresponding to non-identical draws.""" return self._batch_ndims @property def event_ndims(self): """Returns number of dimensions needed to index a sample's coordinates.""" return self._event_ndims @property def validate_args(self): """Returns True if graph-runtime `Tensor` checks are enabled.""" return self._validate_args def get_ndims(self, x, name="get_ndims"): """Get `Tensor` number of dimensions (rank). Args: x: `Tensor`. name: Python `str`. The name to give this op. Returns: ndims: Scalar number of dimensions associated with a `Tensor`. """ with self._name_scope(name, values=[x]): x = ops.convert_to_tensor(x, name="x") ndims = x.get_shape().ndims if ndims is None: return array_ops.rank(x, name="ndims") return ops.convert_to_tensor(ndims, dtype=dtypes.int32, name="ndims") def get_sample_ndims(self, x, name="get_sample_ndims"): """Returns number of dimensions corresponding to iid draws ("sample"). Args: x: `Tensor`. name: Python `str`. The name to give this op. Returns: sample_ndims: `Tensor` (0D, `int32`). Raises: ValueError: if `sample_ndims` is calculated to be negative. """ with self._name_scope(name, values=[x]): ndims = self.get_ndims(x, name=name) if self._is_all_constant_helper(ndims, self.batch_ndims, self.event_ndims): ndims = tensor_util.constant_value(ndims) sample_ndims = (ndims - self._batch_ndims_static - self._event_ndims_static) if sample_ndims < 0: raise ValueError( "expected batch_ndims(%d) + event_ndims(%d) <= ndims(%d)" % (self._batch_ndims_static, self._event_ndims_static, ndims)) return ops.convert_to_tensor(sample_ndims, name="sample_ndims") else: with ops.name_scope(name="sample_ndims"): sample_ndims = ndims - self.batch_ndims - self.event_ndims if self.validate_args: sample_ndims = control_flow_ops.with_dependencies( [check_ops.assert_non_negative(sample_ndims)], sample_ndims) return sample_ndims def get_dims(self, x, name="get_dims"): """Returns dimensions indexing `sample_shape`, `batch_shape`, `event_shape`. Example: ```python x = ... # Tensor with shape [4, 3, 2, 1] sample_dims, batch_dims, event_dims = _DistributionShape( batch_ndims=2, event_ndims=1).get_dims(x) # sample_dims == [0] # batch_dims == [1, 2] # event_dims == [3] # Note that these are not the shape parts, but rather indexes into shape. ``` Args: x: `Tensor`. name: Python `str`. The name to give this op. Returns: sample_dims: `Tensor` (1D, `int32`). batch_dims: `Tensor` (1D, `int32`). event_dims: `Tensor` (1D, `int32`). """ with self._name_scope(name, values=[x]): def make_dims(start_sum, size, name): """Closure to make dims range.""" start_sum = start_sum if start_sum else [ array_ops.zeros([], dtype=dtypes.int32, name="zero")] if self._is_all_constant_helper(size, *start_sum): start = sum(tensor_util.constant_value(s) for s in start_sum) stop = start + tensor_util.constant_value(size) return ops.convert_to_tensor( list(range(start, stop)), dtype=dtypes.int32, name=name) else: start = sum(start_sum) return math_ops.range(start, start + size) sample_ndims = self.get_sample_ndims(x, name=name) return (make_dims([], sample_ndims, name="sample_dims"), make_dims([sample_ndims], self.batch_ndims, name="batch_dims"), make_dims([sample_ndims, self.batch_ndims], self.event_ndims, name="event_dims")) def get_shape(self, x, name="get_shape"): """Returns `Tensor`'s shape partitioned into `sample`, `batch`, `event`. Args: x: `Tensor`. name: Python `str`. The name to give this op. Returns: sample_shape: `Tensor` (1D, `int32`). batch_shape: `Tensor` (1D, `int32`). event_shape: `Tensor` (1D, `int32`). """ with self._name_scope(name, values=[x]): x = ops.convert_to_tensor(x, name="x") def slice_shape(start_sum, size, name): """Closure to slice out shape.""" start_sum = start_sum if start_sum else [ array_ops.zeros([], dtype=dtypes.int32, name="zero")] if (x.get_shape().ndims is not None and self._is_all_constant_helper(size, *start_sum)): start = sum(tensor_util.constant_value(s) for s in start_sum) stop = start + tensor_util.constant_value(size) slice_ = x.get_shape()[start:stop].as_list() if all(s is not None for s in slice_): return ops.convert_to_tensor(slice_, dtype=dtypes.int32, name=name) return array_ops.slice(array_ops.shape(x), [sum(start_sum)], [size]) sample_ndims = self.get_sample_ndims(x, name=name) return (slice_shape([], sample_ndims, name="sample_shape"), slice_shape([sample_ndims], self.batch_ndims, name="batch_shape"), slice_shape([sample_ndims, self.batch_ndims], self.event_ndims, name="event_shape")) # TODO(jvdillon): Make remove expand_batch_dim and make expand_batch_dim=False # the default behavior. def make_batch_of_event_sample_matrices( self, x, expand_batch_dim=True, name="make_batch_of_event_sample_matrices"): """Reshapes/transposes `Distribution` `Tensor` from S+B+E to B_+E_+S_. Where: - `B_ = B if B or not expand_batch_dim else [1]`, - `E_ = E if E else [1]`, - `S_ = [tf.reduce_prod(S)]`. Args: x: `Tensor`. expand_batch_dim: Python `bool`. If `True` the batch dims will be expanded such that `batch_ndims >= 1`. name: Python `str`. The name to give this op. Returns: x: `Tensor`. Input transposed/reshaped to `B_+E_+S_`. sample_shape: `Tensor` (1D, `int32`). """ with self._name_scope(name, values=[x]): x = ops.convert_to_tensor(x, name="x") # x.shape: S+B+E sample_shape, batch_shape, event_shape = self.get_shape(x) event_shape = distribution_util.pick_vector( self._event_ndims_is_0, [1], event_shape) if expand_batch_dim: batch_shape = distribution_util.pick_vector( self._batch_ndims_is_0, [1], batch_shape) new_shape = array_ops.concat([[-1], batch_shape, event_shape], 0) x = array_ops.reshape(x, shape=new_shape) # x.shape: [prod(S)]+B_+E_ x = distribution_util.rotate_transpose(x, shift=-1) # x.shape: B_+E_+[prod(S)] return x, sample_shape # TODO(jvdillon): Make remove expand_batch_dim and make expand_batch_dim=False # the default behavior. def undo_make_batch_of_event_sample_matrices( self, x, sample_shape, expand_batch_dim=True, name="undo_make_batch_of_event_sample_matrices"): """Reshapes/transposes `Distribution` `Tensor` from B_+E_+S_ to S+B+E. Where: - `B_ = B if B or not expand_batch_dim else [1]`, - `E_ = E if E else [1]`, - `S_ = [tf.reduce_prod(S)]`. This function "reverses" `make_batch_of_event_sample_matrices`. Args: x: `Tensor` of shape `B_+E_+S_`. sample_shape: `Tensor` (1D, `int32`). expand_batch_dim: Python `bool`. If `True` the batch dims will be expanded such that `batch_ndims>=1`. name: Python `str`. The name to give this op. Returns: x: `Tensor`. Input transposed/reshaped to `S+B+E`. """ with self._name_scope(name, values=[x, sample_shape]): x = ops.convert_to_tensor(x, name="x") # x.shape: _B+_E+[prod(S)] sample_shape = ops.convert_to_tensor(sample_shape, name="sample_shape") x = distribution_util.rotate_transpose(x, shift=1) # x.shape: [prod(S)]+_B+_E if self._is_all_constant_helper(self.batch_ndims, self.event_ndims): if self._batch_ndims_is_0 or self._event_ndims_is_0: squeeze_dims = [] if self._event_ndims_is_0: squeeze_dims += [-1] if self._batch_ndims_is_0 and expand_batch_dim: squeeze_dims += [1] if squeeze_dims: x = array_ops.squeeze(x, axis=squeeze_dims) # x.shape: [prod(S)]+B+E _, batch_shape, event_shape = self.get_shape(x) else: s = (x.get_shape().as_list() if x.get_shape().is_fully_defined() else array_ops.shape(x)) batch_shape = s[1:1+self.batch_ndims] # Since sample_dims=1 and is left-most, we add 1 to the number of # batch_ndims to get the event start dim. event_start = array_ops.where( math_ops.logical_and(expand_batch_dim, self._batch_ndims_is_0), 2, 1 + self.batch_ndims) event_shape = s[event_start:event_start+self.event_ndims] new_shape = array_ops.concat([sample_shape, batch_shape, event_shape], 0) x = array_ops.reshape(x, shape=new_shape) # x.shape: S+B+E return x @contextlib.contextmanager def _name_scope(self, name=None, values=None): """Helper function to standardize op scope.""" with ops.name_scope(self.name): with ops.name_scope(name, values=( (values or []) + [self.batch_ndims, self.event_ndims])) as scope: yield scope def _is_all_constant_helper(self, *args): """Helper which returns True if all inputs are constant_value.""" return all(tensor_util.constant_value(x) is not None for x in args) def _assert_non_negative_int32_scalar(self, x): """Helper which ensures that input is a non-negative, int32, scalar.""" x = ops.convert_to_tensor(x, name="x") if x.dtype.base_dtype != dtypes.int32.base_dtype: raise TypeError("%s.dtype=%s is not %s" % (x.name, x.dtype, dtypes.int32)) x_value_static = tensor_util.constant_value(x) if x.get_shape().ndims is not None and x_value_static is not None: if x.get_shape().ndims != 0: raise ValueError("%s.ndims=%d is not 0 (scalar)" % (x.name, x.get_shape().ndims)) if x_value_static < 0: raise ValueError("%s.value=%d cannot be negative" % (x.name, x_value_static)) return x if self.validate_args: x = control_flow_ops.with_dependencies([ check_ops.assert_rank(x, 0), check_ops.assert_non_negative(x)], x) return x def _introspect_ndims(self, ndims): """Helper to establish some properties of input ndims args.""" if self._is_all_constant_helper(ndims): return (tensor_util.constant_value(ndims), tensor_util.constant_value(ndims) == 0) return None, math_ops.equal(ndims, 0) ```