code
stringlengths 20
1.05M
| apis
sequence | extract_api
stringlengths 75
5.24M
|
---|---|---|
import unittest
import tempfile
import os
from archive_manager import ArchiveManager, ArchiveManagerException
from module import get_config
from fallocate import fallocate
import shutil
import subprocess
import time
class ArchiveManagerTestCase(unittest.TestCase):
"""Tests for cloud archive manager"""
def setUp(self):
self.test_dir = tempfile.mkdtemp(dir="/var/tmp")
def tearDown(self):
shutil.rmtree(self.test_dir)
def generic_archive(self, backup_extension=".tar.gz"):
"""create a generic archive"""
self.create_test_files(backup_extension)
filename = self.create_test_config()
cfg = get_config(filename)
cfg['backup_root'] = self.test_dir
cfg['backup_extension'] = backup_extension
verbose = None
archive = ArchiveManager(cfg, verbose)
return archive
def test_custom_backup_extension(self):
"""Test that custom backup extensions work"""
backup_extension = ".tgz"
archive = self.generic_archive(backup_extension)
self.assertEqual(archive.backup_extension, backup_extension)
for d in archive.backup_dirs:
files = archive.get_files(d)
self.assertEqual(len(files), 100)
last_file = "99-test%s" % backup_extension
first_file = "0-test%s" % backup_extension
self.assertEqual(files[-1], last_file)
self.assertEqual(files[0], first_file)
# Delete one file
try:
archive.delete_oldest(d, files)
except:
self.fail("delete oldest failed")
self.assertEqual(len(files), 99)
filename = "98-test%s" % backup_extension
self.assertEqual(files[-1], filename)
def test_newest_oldest(self):
"""Ensure the first file is the oldest and the last file is the newest"""
archive = self.generic_archive()
for dir in archive.backup_dirs:
files = archive.get_files(dir)
oldest_file = files[-1]
filepath = os.path.join(archive.backup_root, dir, oldest_file)
oldest_file_mtime = os.stat(filepath).st_mtime
newest_file = files[0]
filepath = os.path.join(archive.backup_root, dir, newest_file)
newest_file_mtime = os.stat(filepath).st_mtime
self.assertGreater(newest_file_mtime, oldest_file_mtime)
def test_bad_file_names(self):
"""Test that only the backup_extension files are picked up"""
archive = self.generic_archive()
# FIXME: a better way to create these kind of files?
bad_filename = "shouldntexist"
file_extension = []
file_extension.append('.tar.gz')
file_extension.append('.tgz')
file_extension.append('.zip')
file_extension.append('.txt')
file_extension.append('.rar')
file_extension.append('.rpm')
# If the custom backup extension is in the list, remove it
try:
file_extension.remove(archive.backup_extension)
except:
pass
for dir in archive.backup_dirs:
# Create bad files
for f in file_extension:
bf = "%s%s" % (bad_filename, f)
filepath = os.path.join(archive.backup_root, dir, bf)
with open(filepath, "w+") as f:
size = 1024
with open(filepath, "w+b") as f:
fallocate(f, 0, size)
f.close()
files = archive.get_files(dir)
self.assertEqual(len(files), 100)
# Bad files should not be included in the files list
for f in file_extension:
bf = "%s%s" % (bad_filename, f)
self.assertNotIn(bf, files)
def test_delete_oldest(self):
"""Test that when deleted_oldest is called that the oldest file is deleted"""
archive = self.generic_archive()
for dir in archive.backup_dirs:
files = archive.get_files(dir)
try:
archive.delete_oldest(dir, files)
except:
self.fail("delete oldest failed")
self.assertEqual(len(files), 99)
self.assertEqual(files[-1], "98-test.tar.gz")
# Try listing the files again and deleting another file
files = archive.get_files(dir)
try:
archive.delete_oldest(dir, files)
except:
self.fail("delete oldest failed")
self.assertEqual(len(files), 98)
self.assertEqual(files[-1], "97-test.tar.gz")
def test_get_files(self):
archive = self.generic_archive()
for dir in archive.backup_dirs:
files = archive.get_files(dir)
oldest_file = files[-1]
newest_file = files[0]
self.assertEqual(len(files), 100)
self.assertEqual(oldest_file, "99-test.tar.gz")
self.assertEqual(newest_file, "0-test.tar.gz")
def test_get_size(self):
archive = self.generic_archive()
for dir in archive.backup_dirs:
size = archive.get_size(dir)
self.assertEqual(size, 5171200)
def test_keep_max_files(self):
archive = self.generic_archive()
for dir in archive.backup_dirs:
try:
archive.keep_max_files(dir)
except:
self.fail("should not fail")
files = archive.get_files(dir)
self.assertEqual(len(files), 36)
def test_delete_until_max_dir_size(self):
"""Delete until we have a minimum amount of files, which takes precedence over size"""
self.create_test_files()
filename = self.create_test_config()
cfg = get_config(filename)
cfg['max_dir_size'] = 3145728
cfg['backup_root'] = self.test_dir
verbose = None
archive = ArchiveManager(cfg, verbose)
for dir in archive.backup_dirs:
try:
archive.delete_until_size_or_min(dir)
except ArchiveManagerException as err:
self.fail("ERROR: %s\n" % str(err))
# I guess do this here, as the actual files is in the function
# FIXME: this suggests this is not the right way to do it
files = archive.get_files(dir)
self.assertEqual(len(files), 77)
self.assertEqual(files[-1], '76-test.tar.gz')
self.assertEqual(files[0], '0-test.tar.gz')
def test_min_larger_than_max(self):
self.create_test_files()
filename = self.create_test_config()
cfg = get_config(filename)
cfg['max_dir_size'] = 3145728
cfg['backup_root'] = self.test_dir
# Max should be larger than min, here we set it to be smaller
cfg['max_num_backup_files'] = 80
cfg['min_num_backup_files'] = 90
# Should fail
with self.assertRaises(ArchiveManagerException):
verbose = None
unused_archive = ArchiveManager(cfg, verbose)
# Now set min/max properly
cfg['max_num_backup_files'] = 90
cfg['min_num_backup_files'] = 80
try:
verbose = None
unused_archive = ArchiveManager(cfg, verbose)
except ArchiveManagerException as err:
self.fail("ERROR: %s\n" % str(err))
def create_test_config(self):
filename = os.path.join(self.test_dir, "config.ini")
try:
fh = open(filename, "w+")
# NOTE: no backup_root setting, everything but
config_string = ("---\n"
"max_num_backup_files: 36\n"
"min_num_backup_files: 24\n"
"max_dir_size: 5000000000\n"
"backup_root: '/backup'\n"
"backup_dirs:\n"
" - 'backups-1'\n"
" - 'backups-2'\n")
fh.write(config_string)
fh.close()
except:
# TODO: handle error
pass
return filename
def create_test_files(self, backup_extension=".tar.gz"):
filename = self.create_test_config()
cfg = get_config(filename)
cfg['backup_root'] = self.test_dir
for dir in cfg['backup_dirs']:
full_dir = os.path.join(cfg['backup_root'], dir)
if not os.path.exists(full_dir):
os.makedirs(full_dir)
for i in range(100):
filename = str(i) + "-test%s" % backup_extension
filepath = os.path.join(full_dir, filename)
# Note the use of fallocate here to create files quickly that have a
# given size.
with open(filepath, "w+") as f:
size = (i+1)*1024
with open(filepath, "w+b") as f:
fallocate(f, 0, size)
f.close()
# Here we are setting the mtime so that cloud archive manager
# has some newer and older files to use, as far as its concerned
stat = os.stat(filepath)
mtime = stat.st_mtime
atime = stat.st_atime
# Make each file one hour older...though this will make file 99
# the oldest file which is a bit counter intuitive...
new_mtime = mtime - i*3600
os.utime(filepath,(atime, new_mtime))
def test_read_bad_config(self):
filename = self.create_test_config()
cfg = get_config(filename)
# Remove backup_root to test failing archive creation
del cfg['backup_root']
with self.assertRaises(ArchiveManagerException):
verbose = None
unused_archive = ArchiveManager(cfg, verbose)
def test_read_good_config(self):
self.create_test_files()
filename = self.create_test_config()
cfg = get_config(filename)
cfg['backup_root'] = self.test_dir
verbose = None
try:
archive = ArchiveManager(cfg, verbose)
except ArchiveManagerException as err:
self.fail("ERROR: %s\n" % str(err))
backup_dirs_test = []
backup_dirs_test.append("backups-1")
backup_dirs_test.append("backups-2")
self.assertEqual(archive.backup_root, self.test_dir)
self.assertEqual(archive.max_num_backup_files, 36)
self.assertEqual(archive.min_num_backup_files, 24)
self.assertEqual(archive.max_dir_size, 5000000000)
self.assertEqual(archive.backup_dirs, backup_dirs_test)
self.assertEqual(archive.backup_dirs[0], 'backups-1')
self.assertEqual(archive.backup_dirs[1], 'backups-2')
if __name__ == '__main__':
unittest.main() | [
"os.path.exists",
"module.get_config",
"fallocate.fallocate",
"os.makedirs",
"os.path.join",
"os.utime",
"tempfile.mkdtemp",
"archive_manager.ArchiveManager",
"shutil.rmtree",
"unittest.main",
"os.stat"
] | [((10969, 10984), 'unittest.main', 'unittest.main', ([], {}), '()\n', (10982, 10984), False, 'import unittest\n'), ((358, 390), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {'dir': '"""/var/tmp"""'}), "(dir='/var/tmp')\n", (374, 390), False, 'import tempfile\n'), ((424, 452), 'shutil.rmtree', 'shutil.rmtree', (['self.test_dir'], {}), '(self.test_dir)\n', (437, 452), False, 'import shutil\n'), ((660, 680), 'module.get_config', 'get_config', (['filename'], {}), '(filename)\n', (670, 680), False, 'from module import get_config\n'), ((816, 844), 'archive_manager.ArchiveManager', 'ArchiveManager', (['cfg', 'verbose'], {}), '(cfg, verbose)\n', (830, 844), False, 'from archive_manager import ArchiveManager, ArchiveManagerException\n'), ((5885, 5905), 'module.get_config', 'get_config', (['filename'], {}), '(filename)\n', (5895, 5905), False, 'from module import get_config\n'), ((6029, 6057), 'archive_manager.ArchiveManager', 'ArchiveManager', (['cfg', 'verbose'], {}), '(cfg, verbose)\n', (6043, 6057), False, 'from archive_manager import ArchiveManager, ArchiveManagerException\n'), ((6765, 6785), 'module.get_config', 'get_config', (['filename'], {}), '(filename)\n', (6775, 6785), False, 'from module import get_config\n'), ((7567, 7608), 'os.path.join', 'os.path.join', (['self.test_dir', '"""config.ini"""'], {}), "(self.test_dir, 'config.ini')\n", (7579, 7608), False, 'import os\n'), ((8394, 8414), 'module.get_config', 'get_config', (['filename'], {}), '(filename)\n', (8404, 8414), False, 'from module import get_config\n'), ((9752, 9772), 'module.get_config', 'get_config', (['filename'], {}), '(filename)\n', (9762, 9772), False, 'from module import get_config\n'), ((10140, 10160), 'module.get_config', 'get_config', (['filename'], {}), '(filename)\n', (10150, 10160), False, 'from module import get_config\n'), ((2101, 2152), 'os.path.join', 'os.path.join', (['archive.backup_root', 'dir', 'oldest_file'], {}), '(archive.backup_root, dir, oldest_file)\n', (2113, 2152), False, 'import os\n'), ((2271, 2322), 'os.path.join', 'os.path.join', (['archive.backup_root', 'dir', 'newest_file'], {}), '(archive.backup_root, dir, newest_file)\n', (2283, 2322), False, 'import os\n'), ((7164, 7192), 'archive_manager.ArchiveManager', 'ArchiveManager', (['cfg', 'verbose'], {}), '(cfg, verbose)\n', (7178, 7192), False, 'from archive_manager import ArchiveManager, ArchiveManagerException\n'), ((7389, 7417), 'archive_manager.ArchiveManager', 'ArchiveManager', (['cfg', 'verbose'], {}), '(cfg, verbose)\n', (7403, 7417), False, 'from archive_manager import ArchiveManager, ArchiveManagerException\n'), ((8521, 8558), 'os.path.join', 'os.path.join', (["cfg['backup_root']", 'dir'], {}), "(cfg['backup_root'], dir)\n", (8533, 8558), False, 'import os\n'), ((9980, 10008), 'archive_manager.ArchiveManager', 'ArchiveManager', (['cfg', 'verbose'], {}), '(cfg, verbose)\n', (9994, 10008), False, 'from archive_manager import ArchiveManager, ArchiveManagerException\n'), ((10265, 10293), 'archive_manager.ArchiveManager', 'ArchiveManager', (['cfg', 'verbose'], {}), '(cfg, verbose)\n', (10279, 10293), False, 'from archive_manager import ArchiveManager, ArchiveManagerException\n'), ((2185, 2202), 'os.stat', 'os.stat', (['filepath'], {}), '(filepath)\n', (2192, 2202), False, 'import os\n'), ((2355, 2372), 'os.stat', 'os.stat', (['filepath'], {}), '(filepath)\n', (2362, 2372), False, 'import os\n'), ((3318, 3360), 'os.path.join', 'os.path.join', (['archive.backup_root', 'dir', 'bf'], {}), '(archive.backup_root, dir, bf)\n', (3330, 3360), False, 'import os\n'), ((8578, 8602), 'os.path.exists', 'os.path.exists', (['full_dir'], {}), '(full_dir)\n', (8592, 8602), False, 'import os\n'), ((8620, 8641), 'os.makedirs', 'os.makedirs', (['full_dir'], {}), '(full_dir)\n', (8631, 8641), False, 'import os\n'), ((8767, 8799), 'os.path.join', 'os.path.join', (['full_dir', 'filename'], {}), '(full_dir, filename)\n', (8779, 8799), False, 'import os\n'), ((9314, 9331), 'os.stat', 'os.stat', (['filepath'], {}), '(filepath)\n', (9321, 9331), False, 'import os\n'), ((9618, 9656), 'os.utime', 'os.utime', (['filepath', '(atime, new_mtime)'], {}), '(filepath, (atime, new_mtime))\n', (9626, 9656), False, 'import os\n'), ((3518, 3539), 'fallocate.fallocate', 'fallocate', (['f', '(0)', 'size'], {}), '(f, 0, size)\n', (3527, 3539), False, 'from fallocate import fallocate\n'), ((9079, 9100), 'fallocate.fallocate', 'fallocate', (['f', '(0)', 'size'], {}), '(f, 0, size)\n', (9088, 9100), False, 'from fallocate import fallocate\n')] |
import filecmp
import importlib
import os
import sys
import pytest
from pytest import fixture
tests_path = os.path.dirname(__file__)
examples_path = os.path.normpath(
os.path.join(tests_path, os.pardir, "examples"))
def withraises(method):
# Decorator to wrap a test utility method. This allows the same
# (wrapped) test utility method to be called for positive and
# negative (error-raising) tests. The positive/negative switch is
# triggered by the "raises" kwarg.
def withraises_wrapper(self, *args, raises=None, **kwargs):
if raises:
with pytest.raises(raises) as excinfo:
method(self, *args, **kwargs)
error = excinfo.value
self.error = error
return error
return method(self, *args, **kwargs)
return withraises_wrapper
class OutputFilesTest:
@property
def dir_path(self):
# Return the dir path of the module that subclassed this class.
try:
dir_path = self._dir_path
except AttributeError:
module = sys.modules[self.__module__]
dir_path = os.path.dirname(module.__file__)
self._dir_path = dir_path
return dir_path
@fixture(autouse=True)
def _setup_user_module(self, monkeypatch):
# Setup the cwd and import path for the module that subclassed
# this class.
monkeypatch.chdir(self.dir_path)
monkeypatch.syspath_prepend(self.dir_path)
try:
del sys.modules[self.user_module_name]
except KeyError:
pass
self.user_module = importlib.import_module(self.user_module_name)
def _redirect_copy(self, input_path, output_path):
# A patch for doxhooks.fileio.copy.
test_output_path = os.path.join(self.test_output_root, output_path)
return self._fileio_copy(input_path, test_output_path)
def _redirect_open_output(self, path, *args, **kwargs):
# A patch for doxhooks.fileio.open_output.
test_output_path = os.path.join(self.test_output_root, path)
return self._fileio_open_output(test_output_path, *args, **kwargs)
@fixture(autouse=True)
def _setup_output_tmpdir(self, monkeypatch, tmpdir):
# Patch doxhooks.fileio to prefix output paths with a tmpdir path.
self.test_output_root = tmpdir.strpath
fileio = importlib.import_module("doxhooks.fileio")
fileio.add_output_roots(self.test_output_root)
self._fileio_copy = fileio.copy
monkeypatch.setattr(fileio, "copy", self._redirect_copy)
self._fileio_open_output = fileio.open_output
monkeypatch.setattr(fileio, "open_output", self._redirect_open_output)
def _raise_error(self, error):
raise error
def assert_the_output_files_match_the_established_output_files(self):
for path, directories, files in os.walk(
self.output_directory, onerror=self._raise_error):
test_path = os.path.join(self.test_output_root, path)
entries = os.listdir(test_path)
entries.sort()
test_directories = []
test_files = []
for entry in entries:
entry_path = os.path.join(test_path, entry)
if os.path.isfile(entry_path):
test_files.append(entry)
else:
test_directories.append(entry)
assert test_directories == directories
assert test_files == files
for file in files:
file_path = os.path.join(path, file)
test_file_path = os.path.join(test_path, file)
files_are_the_same = filecmp.cmp(
file_path, test_file_path, shallow=False)
assert files_are_the_same, file + " are not the same."
class OutputTestDriver(OutputFilesTest):
def test_the_output_files_match_the_established_output_files(self):
self.user_module.main()
self.assert_the_output_files_match_the_established_output_files()
class ExampleTestDriver(OutputTestDriver):
user_module_name = "example"
output_directory = "www"
class SystemTestDriver(OutputTestDriver):
user_module_name = "feature"
output_directory = "output"
| [
"os.listdir",
"importlib.import_module",
"os.path.join",
"os.path.isfile",
"os.path.dirname",
"pytest.raises",
"pytest.fixture",
"filecmp.cmp",
"os.walk"
] | [((110, 135), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (125, 135), False, 'import os\n'), ((174, 221), 'os.path.join', 'os.path.join', (['tests_path', 'os.pardir', '"""examples"""'], {}), "(tests_path, os.pardir, 'examples')\n", (186, 221), False, 'import os\n'), ((1228, 1249), 'pytest.fixture', 'fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (1235, 1249), False, 'from pytest import fixture\n'), ((2164, 2185), 'pytest.fixture', 'fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (2171, 2185), False, 'from pytest import fixture\n'), ((1616, 1662), 'importlib.import_module', 'importlib.import_module', (['self.user_module_name'], {}), '(self.user_module_name)\n', (1639, 1662), False, 'import importlib\n'), ((1790, 1838), 'os.path.join', 'os.path.join', (['self.test_output_root', 'output_path'], {}), '(self.test_output_root, output_path)\n', (1802, 1838), False, 'import os\n'), ((2041, 2082), 'os.path.join', 'os.path.join', (['self.test_output_root', 'path'], {}), '(self.test_output_root, path)\n', (2053, 2082), False, 'import os\n'), ((2383, 2425), 'importlib.import_module', 'importlib.import_module', (['"""doxhooks.fileio"""'], {}), "('doxhooks.fileio')\n", (2406, 2425), False, 'import importlib\n'), ((2890, 2947), 'os.walk', 'os.walk', (['self.output_directory'], {'onerror': 'self._raise_error'}), '(self.output_directory, onerror=self._raise_error)\n', (2897, 2947), False, 'import os\n'), ((2991, 3032), 'os.path.join', 'os.path.join', (['self.test_output_root', 'path'], {}), '(self.test_output_root, path)\n', (3003, 3032), False, 'import os\n'), ((3055, 3076), 'os.listdir', 'os.listdir', (['test_path'], {}), '(test_path)\n', (3065, 3076), False, 'import os\n'), ((592, 613), 'pytest.raises', 'pytest.raises', (['raises'], {}), '(raises)\n', (605, 613), False, 'import pytest\n'), ((1127, 1159), 'os.path.dirname', 'os.path.dirname', (['module.__file__'], {}), '(module.__file__)\n', (1142, 1159), False, 'import os\n'), ((3229, 3259), 'os.path.join', 'os.path.join', (['test_path', 'entry'], {}), '(test_path, entry)\n', (3241, 3259), False, 'import os\n'), ((3279, 3305), 'os.path.isfile', 'os.path.isfile', (['entry_path'], {}), '(entry_path)\n', (3293, 3305), False, 'import os\n'), ((3576, 3600), 'os.path.join', 'os.path.join', (['path', 'file'], {}), '(path, file)\n', (3588, 3600), False, 'import os\n'), ((3634, 3663), 'os.path.join', 'os.path.join', (['test_path', 'file'], {}), '(test_path, file)\n', (3646, 3663), False, 'import os\n'), ((3702, 3755), 'filecmp.cmp', 'filecmp.cmp', (['file_path', 'test_file_path'], {'shallow': '(False)'}), '(file_path, test_file_path, shallow=False)\n', (3713, 3755), False, 'import filecmp\n')] |
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from django.utils.translation import ugettext_lazy as _
class CountManager(models.Manager):
def get_for_object(self, obj):
ctype = ContentType.objects.get_for_model(obj)
return self.get_or_create(content_type=ctype, object_id=obj.id)[0]
def get_for_model(self, model):
ctype = ContentType.objects.get_for_model(model)
return self.filter(content_type=ctype)
def increment(self, ctype_id, object_id):
ctype = ContentType.objects.get(id=ctype_id)
counter, created = self.get_or_create(content_type=ctype, object_id=object_id)
counter.count += 1
counter.save()
return counter
def inrement_for_object(self, obj):
counter = self.get_for_object(obj)
counter.counter += 1
counter.save()
return counter
class ViewCounter(models.Model):
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField(_('Object ID'))
object = generic.GenericForeignKey('content_type', 'object_id')
count = models.PositiveIntegerField(_('Counter'), default = 0)
objects = CountManager()
def get_object_title(self):
return unicode(self.object)
get_object_title.short_description = _('Object title')
def get_content_type(self):
return self.content_type
get_content_type.short_description = _('Content type')
class Meta:
verbose_name = _('View Counter')
verbose_name_plural = _('View Counters')
unique_together = (('content_type', 'object_id'),)
ordering = ('-count',)
def __unicode__(self):
return _(u'Counter for %(object)s = %(count)d') % dict(object = self.object, count = self.count)
class RedirCounter( models.Model ):
title = models.CharField( _('Title'), max_length = 40, blank=True)
url = models.CharField(_('Redirect URL'), max_length=255, unique = True)
count = models.PositiveIntegerField( _('Counter'), default = 0 )
def __unicode__(self):
return self.title
class Meta:
verbose_name = _('Download Counter')
verbose_name_plural = _('Download Counters')
class Referer( models.Model ):
counter = models.ForeignKey(RedirCounter, related_name = 'referers')
url = models.CharField(_('URL'), max_length = 255)
count = models.PositiveIntegerField( _('Counter'), default = 0 )
update_date = models.DateTimeField( editable = False, auto_now = True )
def __unicode__(self):
return _(u'To %(counter)s from %(url)s - %(count)s') % dict(
counter = self.counter, url = self.url, count = self.count)
class Meta:
verbose_name = _('Referer')
verbose_name_plural = _('Referers')
ordering = ('-update_date',)
get_latest_by = 'update_date'
| [
"django.utils.translation.ugettext_lazy",
"django.contrib.contenttypes.models.ContentType.objects.get_for_model",
"django.contrib.contenttypes.models.ContentType.objects.get",
"django.db.models.ForeignKey",
"django.contrib.contenttypes.generic.GenericForeignKey",
"django.db.models.DateTimeField"
] | [((1007, 1037), 'django.db.models.ForeignKey', 'models.ForeignKey', (['ContentType'], {}), '(ContentType)\n', (1024, 1037), False, 'from django.db import models\n'), ((1111, 1165), 'django.contrib.contenttypes.generic.GenericForeignKey', 'generic.GenericForeignKey', (['"""content_type"""', '"""object_id"""'], {}), "('content_type', 'object_id')\n", (1136, 1165), False, 'from django.contrib.contenttypes import generic\n'), ((1373, 1390), 'django.utils.translation.ugettext_lazy', '_', (['"""Object title"""'], {}), "('Object title')\n", (1374, 1390), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1499, 1516), 'django.utils.translation.ugettext_lazy', '_', (['"""Content type"""'], {}), "('Content type')\n", (1500, 1516), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2316, 2372), 'django.db.models.ForeignKey', 'models.ForeignKey', (['RedirCounter'], {'related_name': '"""referers"""'}), "(RedirCounter, related_name='referers')\n", (2333, 2372), False, 'from django.db import models\n'), ((2517, 2568), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'editable': '(False)', 'auto_now': '(True)'}), '(editable=False, auto_now=True)\n', (2537, 2568), False, 'from django.db import models\n'), ((280, 318), 'django.contrib.contenttypes.models.ContentType.objects.get_for_model', 'ContentType.objects.get_for_model', (['obj'], {}), '(obj)\n', (313, 318), False, 'from django.contrib.contenttypes.models import ContentType\n'), ((447, 487), 'django.contrib.contenttypes.models.ContentType.objects.get_for_model', 'ContentType.objects.get_for_model', (['model'], {}), '(model)\n', (480, 487), False, 'from django.contrib.contenttypes.models import ContentType\n'), ((598, 634), 'django.contrib.contenttypes.models.ContentType.objects.get', 'ContentType.objects.get', ([], {'id': 'ctype_id'}), '(id=ctype_id)\n', (621, 634), False, 'from django.contrib.contenttypes.models import ContentType\n'), ((1082, 1096), 'django.utils.translation.ugettext_lazy', '_', (['"""Object ID"""'], {}), "('Object ID')\n", (1083, 1096), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1206, 1218), 'django.utils.translation.ugettext_lazy', '_', (['"""Counter"""'], {}), "('Counter')\n", (1207, 1218), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1557, 1574), 'django.utils.translation.ugettext_lazy', '_', (['"""View Counter"""'], {}), "('View Counter')\n", (1558, 1574), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1605, 1623), 'django.utils.translation.ugettext_lazy', '_', (['"""View Counters"""'], {}), "('View Counters')\n", (1606, 1623), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1914, 1924), 'django.utils.translation.ugettext_lazy', '_', (['"""Title"""'], {}), "('Title')\n", (1915, 1924), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1982, 1999), 'django.utils.translation.ugettext_lazy', '_', (['"""Redirect URL"""'], {}), "('Redirect URL')\n", (1983, 1999), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2073, 2085), 'django.utils.translation.ugettext_lazy', '_', (['"""Counter"""'], {}), "('Counter')\n", (2074, 2085), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2195, 2216), 'django.utils.translation.ugettext_lazy', '_', (['"""Download Counter"""'], {}), "('Download Counter')\n", (2196, 2216), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2247, 2269), 'django.utils.translation.ugettext_lazy', '_', (['"""Download Counters"""'], {}), "('Download Counters')\n", (2248, 2269), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2402, 2410), 'django.utils.translation.ugettext_lazy', '_', (['"""URL"""'], {}), "('URL')\n", (2403, 2410), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2471, 2483), 'django.utils.translation.ugettext_lazy', '_', (['"""Counter"""'], {}), "('Counter')\n", (2472, 2483), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2784, 2796), 'django.utils.translation.ugettext_lazy', '_', (['"""Referer"""'], {}), "('Referer')\n", (2785, 2796), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2827, 2840), 'django.utils.translation.ugettext_lazy', '_', (['"""Referers"""'], {}), "('Referers')\n", (2828, 2840), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1757, 1797), 'django.utils.translation.ugettext_lazy', '_', (['u"""Counter for %(object)s = %(count)d"""'], {}), "(u'Counter for %(object)s = %(count)d')\n", (1758, 1797), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2618, 2663), 'django.utils.translation.ugettext_lazy', '_', (['u"""To %(counter)s from %(url)s - %(count)s"""'], {}), "(u'To %(counter)s from %(url)s - %(count)s')\n", (2619, 2663), True, 'from django.utils.translation import ugettext_lazy as _\n')] |
""" Few tests for annlowlevel helpers
"""
from pypy.rpython.test.tool import BaseRtypingTest, LLRtypeMixin, OORtypeMixin
from pypy.rpython.lltypesystem.rstr import mallocstr, mallocunicode
from pypy.rpython.lltypesystem import lltype
from pypy.rpython.ootypesystem import ootype
from pypy.rpython.annlowlevel import hlstr, llstr, oostr
from pypy.rpython.annlowlevel import hlunicode, llunicode
from pypy.rpython import annlowlevel
class TestLLType(BaseRtypingTest, LLRtypeMixin):
def test_hlstr(self):
s = mallocstr(3)
s.chars[0] = "a"
s.chars[1] = "b"
s.chars[2] = "c"
assert hlstr(s) == "abc"
def test_llstr(self):
s = llstr("abc")
assert len(s.chars) == 3
assert s.chars[0] == "a"
assert s.chars[1] == "b"
assert s.chars[2] == "c"
def test_llstr_compile(self):
def f(arg):
s = llstr(hlstr(arg))
return len(s.chars)
res = self.interpret(f, [self.string_to_ll("abc")])
assert res == 3
def test_hlunicode(self):
s = mallocunicode(3)
s.chars[0] = u"a"
s.chars[1] = u"b"
s.chars[2] = u"c"
assert hlunicode(s) == u"abc"
def test_llunicode(self):
s = llunicode(u"abc")
assert len(s.chars) == 3
assert s.chars[0] == u"a"
assert s.chars[1] == u"b"
assert s.chars[2] == u"c"
def test_llunicode_compile(self):
def f(arg):
s = llunicode(hlunicode(arg))
return len(s.chars)
res = self.interpret(f, [self.unicode_to_ll(u"abc")])
assert res == 3
def test_cast_instance_to_base_ptr(self):
class X(object):
pass
x = X()
ptr = annlowlevel.cast_instance_to_base_ptr(x)
assert lltype.typeOf(ptr) == annlowlevel.base_ptr_lltype()
y = annlowlevel.cast_base_ptr_to_instance(X, ptr)
assert y is x
class TestOOType(BaseRtypingTest, OORtypeMixin):
def test_hlstr(self):
s = ootype.make_string("abc")
assert hlstr(s) == "abc"
def test_oostr(self):
s = oostr("abc")
assert ootype.typeOf(s) == ootype.String
assert s._str == "abc"
def test_oostr_compile(self):
def f(arg):
s = oostr(hlstr(arg))
return s.ll_strlen()
res = self.interpret(f, [self.string_to_ll("abc")])
assert res == 3
def test_cast_instance_to_base_obj(self):
class X(object):
pass
x = X()
obj = annlowlevel.cast_instance_to_base_obj(x)
assert lltype.typeOf(obj) == annlowlevel.base_obj_ootype()
y = annlowlevel.cast_base_ptr_to_instance(X, obj)
assert y is x
| [
"pypy.rpython.annlowlevel.base_obj_ootype",
"pypy.rpython.lltypesystem.rstr.mallocstr",
"pypy.rpython.annlowlevel.cast_instance_to_base_obj",
"pypy.rpython.lltypesystem.lltype.typeOf",
"pypy.rpython.annlowlevel.hlstr",
"pypy.rpython.annlowlevel.cast_base_ptr_to_instance",
"pypy.rpython.annlowlevel.hlunicode",
"pypy.rpython.annlowlevel.base_ptr_lltype",
"pypy.rpython.lltypesystem.rstr.mallocunicode",
"pypy.rpython.annlowlevel.oostr",
"pypy.rpython.annlowlevel.cast_instance_to_base_ptr",
"pypy.rpython.annlowlevel.llstr",
"pypy.rpython.ootypesystem.ootype.make_string",
"pypy.rpython.annlowlevel.llunicode",
"pypy.rpython.ootypesystem.ootype.typeOf"
] | [((522, 534), 'pypy.rpython.lltypesystem.rstr.mallocstr', 'mallocstr', (['(3)'], {}), '(3)\n', (531, 534), False, 'from pypy.rpython.lltypesystem.rstr import mallocstr, mallocunicode\n'), ((682, 694), 'pypy.rpython.annlowlevel.llstr', 'llstr', (['"""abc"""'], {}), "('abc')\n", (687, 694), False, 'from pypy.rpython.annlowlevel import hlstr, llstr, oostr\n'), ((1080, 1096), 'pypy.rpython.lltypesystem.rstr.mallocunicode', 'mallocunicode', (['(3)'], {}), '(3)\n', (1093, 1096), False, 'from pypy.rpython.lltypesystem.rstr import mallocstr, mallocunicode\n'), ((1256, 1273), 'pypy.rpython.annlowlevel.llunicode', 'llunicode', (['u"""abc"""'], {}), "(u'abc')\n", (1265, 1273), False, 'from pypy.rpython.annlowlevel import hlunicode, llunicode\n'), ((1748, 1788), 'pypy.rpython.annlowlevel.cast_instance_to_base_ptr', 'annlowlevel.cast_instance_to_base_ptr', (['x'], {}), '(x)\n', (1785, 1788), False, 'from pypy.rpython import annlowlevel\n'), ((1868, 1913), 'pypy.rpython.annlowlevel.cast_base_ptr_to_instance', 'annlowlevel.cast_base_ptr_to_instance', (['X', 'ptr'], {}), '(X, ptr)\n', (1905, 1913), False, 'from pypy.rpython import annlowlevel\n'), ((2025, 2050), 'pypy.rpython.ootypesystem.ootype.make_string', 'ootype.make_string', (['"""abc"""'], {}), "('abc')\n", (2043, 2050), False, 'from pypy.rpython.ootypesystem import ootype\n'), ((2123, 2135), 'pypy.rpython.annlowlevel.oostr', 'oostr', (['"""abc"""'], {}), "('abc')\n", (2128, 2135), False, 'from pypy.rpython.annlowlevel import hlstr, llstr, oostr\n'), ((2542, 2582), 'pypy.rpython.annlowlevel.cast_instance_to_base_obj', 'annlowlevel.cast_instance_to_base_obj', (['x'], {}), '(x)\n', (2579, 2582), False, 'from pypy.rpython import annlowlevel\n'), ((2662, 2707), 'pypy.rpython.annlowlevel.cast_base_ptr_to_instance', 'annlowlevel.cast_base_ptr_to_instance', (['X', 'obj'], {}), '(X, obj)\n', (2699, 2707), False, 'from pypy.rpython import annlowlevel\n'), ((625, 633), 'pypy.rpython.annlowlevel.hlstr', 'hlstr', (['s'], {}), '(s)\n', (630, 633), False, 'from pypy.rpython.annlowlevel import hlstr, llstr, oostr\n'), ((1190, 1202), 'pypy.rpython.annlowlevel.hlunicode', 'hlunicode', (['s'], {}), '(s)\n', (1199, 1202), False, 'from pypy.rpython.annlowlevel import hlunicode, llunicode\n'), ((1804, 1822), 'pypy.rpython.lltypesystem.lltype.typeOf', 'lltype.typeOf', (['ptr'], {}), '(ptr)\n', (1817, 1822), False, 'from pypy.rpython.lltypesystem import lltype\n'), ((1826, 1855), 'pypy.rpython.annlowlevel.base_ptr_lltype', 'annlowlevel.base_ptr_lltype', ([], {}), '()\n', (1853, 1855), False, 'from pypy.rpython import annlowlevel\n'), ((2066, 2074), 'pypy.rpython.annlowlevel.hlstr', 'hlstr', (['s'], {}), '(s)\n', (2071, 2074), False, 'from pypy.rpython.annlowlevel import hlstr, llstr, oostr\n'), ((2151, 2167), 'pypy.rpython.ootypesystem.ootype.typeOf', 'ootype.typeOf', (['s'], {}), '(s)\n', (2164, 2167), False, 'from pypy.rpython.ootypesystem import ootype\n'), ((2598, 2616), 'pypy.rpython.lltypesystem.lltype.typeOf', 'lltype.typeOf', (['obj'], {}), '(obj)\n', (2611, 2616), False, 'from pypy.rpython.lltypesystem import lltype\n'), ((2620, 2649), 'pypy.rpython.annlowlevel.base_obj_ootype', 'annlowlevel.base_obj_ootype', ([], {}), '()\n', (2647, 2649), False, 'from pypy.rpython import annlowlevel\n'), ((904, 914), 'pypy.rpython.annlowlevel.hlstr', 'hlstr', (['arg'], {}), '(arg)\n', (909, 914), False, 'from pypy.rpython.annlowlevel import hlstr, llstr, oostr\n'), ((1494, 1508), 'pypy.rpython.annlowlevel.hlunicode', 'hlunicode', (['arg'], {}), '(arg)\n', (1503, 1508), False, 'from pypy.rpython.annlowlevel import hlunicode, llunicode\n'), ((2293, 2303), 'pypy.rpython.annlowlevel.hlstr', 'hlstr', (['arg'], {}), '(arg)\n', (2298, 2303), False, 'from pypy.rpython.annlowlevel import hlstr, llstr, oostr\n')] |
import setuptools
with open("README.md", "r") as readme:
readme = readme.read()
with open("requirements.txt", "r") as requirements:
requirements = requirements.read()
setuptools.setup(
name="udls", # Replace with your own username
version="1.7",
author="<NAME>",
author_email="<EMAIL>",
description="Base class and presets for fast dataset creation inside IRCAM",
long_description=readme,
long_description_content_type="text/markdown",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
entry_points={
"console_scripts":
["resample = udls.resample:main", "duration = udls.duration:main"]
},
install_requires=requirements.split("\n"),
python_requires='>=3.7',
)
| [
"setuptools.find_packages"
] | [((489, 515), 'setuptools.find_packages', 'setuptools.find_packages', ([], {}), '()\n', (513, 515), False, 'import setuptools\n')] |
import unittest
import string
import numpy as np
import pandas as pd
from pandas.testing import assert_frame_equal
from views_storage.serializers import csv, parquet, pickle, serializer, json
class TestSerializers(unittest.TestCase):
def assert_serializer_identity(self, dataframe, ser: serializer.Serializer):
assert_frame_equal(dataframe, ser.deserialize(ser.serialize(dataframe)))
def test_serializers_identity(self):
"""
This tests whether the listed serializers return an identical dataframe
when the DF is serialized and un-serialized.
"""
df = pd.DataFrame(np.random.rand(10,10), columns = list(string.ascii_lowercase[:10]))
for ser in csv.Csv, parquet.Parquet, pickle.Pickle:
self.assert_serializer_identity(df, ser())
ser = json.Json()
d = {
"a":1,
"2":[1,2,3],
"3": None,
"z": {"foo": 5.5}
}
self.assertEqual(d, ser.deserialize(ser.serialize(d)))
| [
"views_storage.serializers.json.Json",
"numpy.random.rand"
] | [((821, 832), 'views_storage.serializers.json.Json', 'json.Json', ([], {}), '()\n', (830, 832), False, 'from views_storage.serializers import csv, parquet, pickle, serializer, json\n'), ((623, 645), 'numpy.random.rand', 'np.random.rand', (['(10)', '(10)'], {}), '(10, 10)\n', (637, 645), True, 'import numpy as np\n')] |
# flake8: noqa
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
# https://seaborn.pydata.org/generated/seaborn.set_context.html
# https://seaborn.pydata.org/generated/seaborn.set_style.html
sns.set_style("white")
sns.set_context("paper", font_scale=0.5)
from numpy_ml.nonparametric import GPRegression, KNN, KernelRegression
from numpy_ml.linear_models.lm import LinearRegression
from sklearn.model_selection import train_test_split
def random_regression_problem(n_ex, n_in, n_out, d=3, intercept=0, std=1, seed=0):
coef = np.random.uniform(0, 50, size=d)
coef[-1] = intercept
y = []
X = np.random.uniform(-100, 100, size=(n_ex, n_in))
for x in X:
val = np.polyval(coef, x) + np.random.normal(0, std)
y.append(val)
y = np.array(y)
X_train, X_test, y_train, y_test = train_test_split(
X, y, test_size=0.3, random_state=seed
)
return X_train, y_train, X_test, y_test, coef
def plot_regression():
np.random.seed(12345)
fig, axes = plt.subplots(4, 4)
for i, ax in enumerate(axes.flatten()):
n_in = 1
n_out = 1
d = np.random.randint(1, 5)
n_ex = np.random.randint(5, 500)
std = np.random.randint(0, 1000)
intercept = np.random.rand() * np.random.randint(-300, 300)
X_train, y_train, X_test, y_test, coefs = random_regression_problem(
n_ex, n_in, n_out, d=d, intercept=intercept, std=std, seed=i
)
LR = LinearRegression(fit_intercept=True)
LR.fit(X_train, y_train)
y_pred = LR.predict(X_test)
loss = np.mean((y_test.flatten() - y_pred.flatten()) ** 2)
d = 3
best_loss = np.inf
for gamma in np.linspace(1e-10, 1, 100):
for c0 in np.linspace(-1, 1000, 100):
kernel = "PolynomialKernel(d={}, gamma={}, c0={})".format(d, gamma, c0)
KR_poly = KernelRegression(kernel=kernel)
KR_poly.fit(X_train, y_train)
y_pred_poly = KR_poly.predict(X_test)
loss_poly = np.mean((y_test.flatten() - y_pred_poly.flatten()) ** 2)
if loss_poly <= best_loss:
KR_poly_best = kernel
best_loss = loss_poly
print("Best kernel: {} || loss: {:.4f}".format(KR_poly_best, best_loss))
KR_poly = KernelRegression(kernel=KR_poly_best)
KR_poly.fit(X_train, y_train)
KR_rbf = KernelRegression(kernel="RBFKernel(sigma=1)")
KR_rbf.fit(X_train, y_train)
y_pred_rbf = KR_rbf.predict(X_test)
loss_rbf = np.mean((y_test.flatten() - y_pred_rbf.flatten()) ** 2)
xmin = min(X_test) - 0.1 * (max(X_test) - min(X_test))
xmax = max(X_test) + 0.1 * (max(X_test) - min(X_test))
X_plot = np.linspace(xmin, xmax, 100)
y_plot = LR.predict(X_plot)
y_plot_poly = KR_poly.predict(X_plot)
y_plot_rbf = KR_rbf.predict(X_plot)
ax.scatter(X_test, y_test, alpha=0.5)
ax.plot(X_plot, y_plot, label="OLS", alpha=0.5)
ax.plot(
X_plot, y_plot_poly, label="KR (poly kernel, d={})".format(d), alpha=0.5
)
ax.plot(X_plot, y_plot_rbf, label="KR (rbf kernel)", alpha=0.5)
ax.legend()
# ax.set_title(
# "MSE\nLR: {:.2f} KR (poly): {:.2f}\nKR (rbf): {:.2f}".format(
# loss, loss_poly, loss_rbf
# )
# )
ax.xaxis.set_ticklabels([])
ax.yaxis.set_ticklabels([])
plt.tight_layout()
plt.savefig("img/kr_plots.png", dpi=300)
plt.close("all")
def plot_knn():
np.random.seed(12345)
fig, axes = plt.subplots(4, 4)
for i, ax in enumerate(axes.flatten()):
n_in = 1
n_out = 1
d = np.random.randint(1, 5)
n_ex = np.random.randint(5, 500)
std = np.random.randint(0, 1000)
intercept = np.random.rand() * np.random.randint(-300, 300)
X_train, y_train, X_test, y_test, coefs = random_regression_problem(
n_ex, n_in, n_out, d=d, intercept=intercept, std=std, seed=i
)
LR = LinearRegression(fit_intercept=True)
LR.fit(X_train, y_train)
y_pred = LR.predict(X_test)
loss = np.mean((y_test.flatten() - y_pred.flatten()) ** 2)
knn_1 = KNN(k=1, classifier=False, leaf_size=10, weights="uniform")
knn_1.fit(X_train, y_train)
y_pred_1 = knn_1.predict(X_test)
loss_1 = np.mean((y_test.flatten() - y_pred_1.flatten()) ** 2)
knn_5 = KNN(k=5, classifier=False, leaf_size=10, weights="uniform")
knn_5.fit(X_train, y_train)
y_pred_5 = knn_5.predict(X_test)
loss_5 = np.mean((y_test.flatten() - y_pred_5.flatten()) ** 2)
knn_10 = KNN(k=10, classifier=False, leaf_size=10, weights="uniform")
knn_10.fit(X_train, y_train)
y_pred_10 = knn_10.predict(X_test)
loss_10 = np.mean((y_test.flatten() - y_pred_10.flatten()) ** 2)
xmin = min(X_test) - 0.1 * (max(X_test) - min(X_test))
xmax = max(X_test) + 0.1 * (max(X_test) - min(X_test))
X_plot = np.linspace(xmin, xmax, 100)
y_plot = LR.predict(X_plot)
y_plot_1 = knn_1.predict(X_plot)
y_plot_5 = knn_5.predict(X_plot)
y_plot_10 = knn_10.predict(X_plot)
ax.scatter(X_test, y_test, alpha=0.5)
ax.plot(X_plot, y_plot, label="OLS", alpha=0.5)
ax.plot(X_plot, y_plot_1, label="KNN (k=1)", alpha=0.5)
ax.plot(X_plot, y_plot_5, label="KNN (k=5)", alpha=0.5)
ax.plot(X_plot, y_plot_10, label="KNN (k=10)", alpha=0.5)
ax.legend()
# ax.set_title(
# "MSE\nLR: {:.2f} KR (poly): {:.2f}\nKR (rbf): {:.2f}".format(
# loss, loss_poly, loss_rbf
# )
# )
ax.xaxis.set_ticklabels([])
ax.yaxis.set_ticklabels([])
plt.tight_layout()
plt.savefig("img/knn_plots.png", dpi=300)
plt.close("all")
def plot_gp():
np.random.seed(12345)
sns.set_context("paper", font_scale=0.65)
X_test = np.linspace(-10, 10, 100)
X_train = np.array([-3, 0, 7, 1, -9])
y_train = np.sin(X_train)
fig, axes = plt.subplots(2, 2)
alphas = [0, 1e-10, 1e-5, 1]
for ix, (ax, alpha) in enumerate(zip(axes.flatten(), alphas)):
G = GPRegression(kernel="RBFKernel", alpha=alpha)
G.fit(X_train, y_train)
y_pred, conf = G.predict(X_test)
ax.plot(X_train, y_train, "rx", label="observed")
ax.plot(X_test, np.sin(X_test), label="true fn")
ax.plot(X_test, y_pred, "--", label="MAP (alpha={})".format(alpha))
ax.fill_between(X_test, y_pred + conf, y_pred - conf, alpha=0.1)
ax.set_xticks([])
ax.set_yticks([])
sns.despine()
ax.legend()
plt.tight_layout()
plt.savefig("img/gp_alpha.png", dpi=300)
plt.close("all")
def plot_gp_dist():
np.random.seed(12345)
sns.set_context("paper", font_scale=0.95)
X_test = np.linspace(-10, 10, 100)
X_train = np.array([-3, 0, 7, 1, -9])
y_train = np.sin(X_train)
fig, axes = plt.subplots(1, 3)
G = GPRegression(kernel="RBFKernel", alpha=0)
G.fit(X_train, y_train)
y_pred_prior = G.sample(X_test, 3, "prior")
y_pred_posterior = G.sample(X_test, 3, "posterior_predictive")
for prior_sample in y_pred_prior:
axes[0].plot(X_test, prior_sample.ravel(), lw=1)
axes[0].set_title("Prior samples")
axes[0].set_xticks([])
axes[0].set_yticks([])
for post_sample in y_pred_posterior:
axes[1].plot(X_test, post_sample.ravel(), lw=1)
axes[1].plot(X_train, y_train, "ko", ms=1.2)
axes[1].set_title("Posterior samples")
axes[1].set_xticks([])
axes[1].set_yticks([])
y_pred, conf = G.predict(X_test)
axes[2].plot(X_test, np.sin(X_test), lw=1, label="true function")
axes[2].plot(X_test, y_pred, lw=1, label="MAP estimate")
axes[2].fill_between(X_test, y_pred + conf, y_pred - conf, alpha=0.1)
axes[2].plot(X_train, y_train, "ko", ms=1.2, label="observed")
axes[2].legend(fontsize="x-small")
axes[2].set_title("Posterior mean")
axes[2].set_xticks([])
axes[2].set_yticks([])
fig.set_size_inches(6, 2)
plt.tight_layout()
plt.savefig("img/gp_dist.png", dpi=300)
plt.close("all")
| [
"numpy.random.rand",
"seaborn.set_style",
"numpy.array",
"numpy.sin",
"seaborn.despine",
"numpy_ml.nonparametric.GPRegression",
"matplotlib.pyplot.close",
"numpy.linspace",
"numpy.polyval",
"numpy.random.seed",
"numpy.random.normal",
"matplotlib.pyplot.savefig",
"sklearn.model_selection.train_test_split",
"seaborn.set_context",
"numpy_ml.nonparametric.KernelRegression",
"numpy_ml.nonparametric.KNN",
"numpy_ml.linear_models.lm.LinearRegression",
"numpy.random.randint",
"matplotlib.pyplot.tight_layout",
"numpy.random.uniform",
"matplotlib.pyplot.subplots"
] | [((216, 238), 'seaborn.set_style', 'sns.set_style', (['"""white"""'], {}), "('white')\n", (229, 238), True, 'import seaborn as sns\n'), ((239, 279), 'seaborn.set_context', 'sns.set_context', (['"""paper"""'], {'font_scale': '(0.5)'}), "('paper', font_scale=0.5)\n", (254, 279), True, 'import seaborn as sns\n'), ((557, 589), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(50)'], {'size': 'd'}), '(0, 50, size=d)\n', (574, 589), True, 'import numpy as np\n'), ((635, 682), 'numpy.random.uniform', 'np.random.uniform', (['(-100)', '(100)'], {'size': '(n_ex, n_in)'}), '(-100, 100, size=(n_ex, n_in))\n', (652, 682), True, 'import numpy as np\n'), ((790, 801), 'numpy.array', 'np.array', (['y'], {}), '(y)\n', (798, 801), True, 'import numpy as np\n'), ((842, 898), 'sklearn.model_selection.train_test_split', 'train_test_split', (['X', 'y'], {'test_size': '(0.3)', 'random_state': 'seed'}), '(X, y, test_size=0.3, random_state=seed)\n', (858, 898), False, 'from sklearn.model_selection import train_test_split\n'), ((992, 1013), 'numpy.random.seed', 'np.random.seed', (['(12345)'], {}), '(12345)\n', (1006, 1013), True, 'import numpy as np\n'), ((1030, 1048), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(4)', '(4)'], {}), '(4, 4)\n', (1042, 1048), True, 'import matplotlib.pyplot as plt\n'), ((3517, 3535), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (3533, 3535), True, 'import matplotlib.pyplot as plt\n'), ((3540, 3580), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""img/kr_plots.png"""'], {'dpi': '(300)'}), "('img/kr_plots.png', dpi=300)\n", (3551, 3580), True, 'import matplotlib.pyplot as plt\n'), ((3585, 3601), 'matplotlib.pyplot.close', 'plt.close', (['"""all"""'], {}), "('all')\n", (3594, 3601), True, 'import matplotlib.pyplot as plt\n'), ((3624, 3645), 'numpy.random.seed', 'np.random.seed', (['(12345)'], {}), '(12345)\n', (3638, 3645), True, 'import numpy as np\n'), ((3662, 3680), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(4)', '(4)'], {}), '(4, 4)\n', (3674, 3680), True, 'import matplotlib.pyplot as plt\n'), ((5881, 5899), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (5897, 5899), True, 'import matplotlib.pyplot as plt\n'), ((5904, 5945), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""img/knn_plots.png"""'], {'dpi': '(300)'}), "('img/knn_plots.png', dpi=300)\n", (5915, 5945), True, 'import matplotlib.pyplot as plt\n'), ((5950, 5966), 'matplotlib.pyplot.close', 'plt.close', (['"""all"""'], {}), "('all')\n", (5959, 5966), True, 'import matplotlib.pyplot as plt\n'), ((5988, 6009), 'numpy.random.seed', 'np.random.seed', (['(12345)'], {}), '(12345)\n', (6002, 6009), True, 'import numpy as np\n'), ((6014, 6055), 'seaborn.set_context', 'sns.set_context', (['"""paper"""'], {'font_scale': '(0.65)'}), "('paper', font_scale=0.65)\n", (6029, 6055), True, 'import seaborn as sns\n'), ((6070, 6095), 'numpy.linspace', 'np.linspace', (['(-10)', '(10)', '(100)'], {}), '(-10, 10, 100)\n', (6081, 6095), True, 'import numpy as np\n'), ((6110, 6137), 'numpy.array', 'np.array', (['[-3, 0, 7, 1, -9]'], {}), '([-3, 0, 7, 1, -9])\n', (6118, 6137), True, 'import numpy as np\n'), ((6152, 6167), 'numpy.sin', 'np.sin', (['X_train'], {}), '(X_train)\n', (6158, 6167), True, 'import numpy as np\n'), ((6185, 6203), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(2)', '(2)'], {}), '(2, 2)\n', (6197, 6203), True, 'import matplotlib.pyplot as plt\n'), ((6800, 6818), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (6816, 6818), True, 'import matplotlib.pyplot as plt\n'), ((6823, 6863), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""img/gp_alpha.png"""'], {'dpi': '(300)'}), "('img/gp_alpha.png', dpi=300)\n", (6834, 6863), True, 'import matplotlib.pyplot as plt\n'), ((6868, 6884), 'matplotlib.pyplot.close', 'plt.close', (['"""all"""'], {}), "('all')\n", (6877, 6884), True, 'import matplotlib.pyplot as plt\n'), ((6911, 6932), 'numpy.random.seed', 'np.random.seed', (['(12345)'], {}), '(12345)\n', (6925, 6932), True, 'import numpy as np\n'), ((6937, 6978), 'seaborn.set_context', 'sns.set_context', (['"""paper"""'], {'font_scale': '(0.95)'}), "('paper', font_scale=0.95)\n", (6952, 6978), True, 'import seaborn as sns\n'), ((6993, 7018), 'numpy.linspace', 'np.linspace', (['(-10)', '(10)', '(100)'], {}), '(-10, 10, 100)\n', (7004, 7018), True, 'import numpy as np\n'), ((7033, 7060), 'numpy.array', 'np.array', (['[-3, 0, 7, 1, -9]'], {}), '([-3, 0, 7, 1, -9])\n', (7041, 7060), True, 'import numpy as np\n'), ((7075, 7090), 'numpy.sin', 'np.sin', (['X_train'], {}), '(X_train)\n', (7081, 7090), True, 'import numpy as np\n'), ((7108, 7126), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(3)'], {}), '(1, 3)\n', (7120, 7126), True, 'import matplotlib.pyplot as plt\n'), ((7135, 7176), 'numpy_ml.nonparametric.GPRegression', 'GPRegression', ([], {'kernel': '"""RBFKernel"""', 'alpha': '(0)'}), "(kernel='RBFKernel', alpha=0)\n", (7147, 7176), False, 'from numpy_ml.nonparametric import GPRegression, KNN, KernelRegression\n'), ((8233, 8251), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (8249, 8251), True, 'import matplotlib.pyplot as plt\n'), ((8256, 8295), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""img/gp_dist.png"""'], {'dpi': '(300)'}), "('img/gp_dist.png', dpi=300)\n", (8267, 8295), True, 'import matplotlib.pyplot as plt\n'), ((8300, 8316), 'matplotlib.pyplot.close', 'plt.close', (['"""all"""'], {}), "('all')\n", (8309, 8316), True, 'import matplotlib.pyplot as plt\n'), ((1140, 1163), 'numpy.random.randint', 'np.random.randint', (['(1)', '(5)'], {}), '(1, 5)\n', (1157, 1163), True, 'import numpy as np\n'), ((1179, 1204), 'numpy.random.randint', 'np.random.randint', (['(5)', '(500)'], {}), '(5, 500)\n', (1196, 1204), True, 'import numpy as np\n'), ((1219, 1245), 'numpy.random.randint', 'np.random.randint', (['(0)', '(1000)'], {}), '(0, 1000)\n', (1236, 1245), True, 'import numpy as np\n'), ((1488, 1524), 'numpy_ml.linear_models.lm.LinearRegression', 'LinearRegression', ([], {'fit_intercept': '(True)'}), '(fit_intercept=True)\n', (1504, 1524), False, 'from numpy_ml.linear_models.lm import LinearRegression\n'), ((1724, 1750), 'numpy.linspace', 'np.linspace', (['(1e-10)', '(1)', '(100)'], {}), '(1e-10, 1, 100)\n', (1735, 1750), True, 'import numpy as np\n'), ((2360, 2397), 'numpy_ml.nonparametric.KernelRegression', 'KernelRegression', ([], {'kernel': 'KR_poly_best'}), '(kernel=KR_poly_best)\n', (2376, 2397), False, 'from numpy_ml.nonparametric import GPRegression, KNN, KernelRegression\n'), ((2454, 2499), 'numpy_ml.nonparametric.KernelRegression', 'KernelRegression', ([], {'kernel': '"""RBFKernel(sigma=1)"""'}), "(kernel='RBFKernel(sigma=1)')\n", (2470, 2499), False, 'from numpy_ml.nonparametric import GPRegression, KNN, KernelRegression\n'), ((2800, 2828), 'numpy.linspace', 'np.linspace', (['xmin', 'xmax', '(100)'], {}), '(xmin, xmax, 100)\n', (2811, 2828), True, 'import numpy as np\n'), ((3772, 3795), 'numpy.random.randint', 'np.random.randint', (['(1)', '(5)'], {}), '(1, 5)\n', (3789, 3795), True, 'import numpy as np\n'), ((3811, 3836), 'numpy.random.randint', 'np.random.randint', (['(5)', '(500)'], {}), '(5, 500)\n', (3828, 3836), True, 'import numpy as np\n'), ((3851, 3877), 'numpy.random.randint', 'np.random.randint', (['(0)', '(1000)'], {}), '(0, 1000)\n', (3868, 3877), True, 'import numpy as np\n'), ((4120, 4156), 'numpy_ml.linear_models.lm.LinearRegression', 'LinearRegression', ([], {'fit_intercept': '(True)'}), '(fit_intercept=True)\n', (4136, 4156), False, 'from numpy_ml.linear_models.lm import LinearRegression\n'), ((4310, 4369), 'numpy_ml.nonparametric.KNN', 'KNN', ([], {'k': '(1)', 'classifier': '(False)', 'leaf_size': '(10)', 'weights': '"""uniform"""'}), "(k=1, classifier=False, leaf_size=10, weights='uniform')\n", (4313, 4369), False, 'from numpy_ml.nonparametric import GPRegression, KNN, KernelRegression\n'), ((4535, 4594), 'numpy_ml.nonparametric.KNN', 'KNN', ([], {'k': '(5)', 'classifier': '(False)', 'leaf_size': '(10)', 'weights': '"""uniform"""'}), "(k=5, classifier=False, leaf_size=10, weights='uniform')\n", (4538, 4594), False, 'from numpy_ml.nonparametric import GPRegression, KNN, KernelRegression\n'), ((4761, 4821), 'numpy_ml.nonparametric.KNN', 'KNN', ([], {'k': '(10)', 'classifier': '(False)', 'leaf_size': '(10)', 'weights': '"""uniform"""'}), "(k=10, classifier=False, leaf_size=10, weights='uniform')\n", (4764, 4821), False, 'from numpy_ml.nonparametric import GPRegression, KNN, KernelRegression\n'), ((5119, 5147), 'numpy.linspace', 'np.linspace', (['xmin', 'xmax', '(100)'], {}), '(xmin, xmax, 100)\n', (5130, 5147), True, 'import numpy as np\n'), ((6316, 6361), 'numpy_ml.nonparametric.GPRegression', 'GPRegression', ([], {'kernel': '"""RBFKernel"""', 'alpha': 'alpha'}), "(kernel='RBFKernel', alpha=alpha)\n", (6328, 6361), False, 'from numpy_ml.nonparametric import GPRegression, KNN, KernelRegression\n'), ((6760, 6773), 'seaborn.despine', 'sns.despine', ([], {}), '()\n', (6771, 6773), True, 'import seaborn as sns\n'), ((7818, 7832), 'numpy.sin', 'np.sin', (['X_test'], {}), '(X_test)\n', (7824, 7832), True, 'import numpy as np\n'), ((713, 732), 'numpy.polyval', 'np.polyval', (['coef', 'x'], {}), '(coef, x)\n', (723, 732), True, 'import numpy as np\n'), ((735, 759), 'numpy.random.normal', 'np.random.normal', (['(0)', 'std'], {}), '(0, std)\n', (751, 759), True, 'import numpy as np\n'), ((1266, 1282), 'numpy.random.rand', 'np.random.rand', ([], {}), '()\n', (1280, 1282), True, 'import numpy as np\n'), ((1285, 1313), 'numpy.random.randint', 'np.random.randint', (['(-300)', '(300)'], {}), '(-300, 300)\n', (1302, 1313), True, 'import numpy as np\n'), ((1774, 1800), 'numpy.linspace', 'np.linspace', (['(-1)', '(1000)', '(100)'], {}), '(-1, 1000, 100)\n', (1785, 1800), True, 'import numpy as np\n'), ((3898, 3914), 'numpy.random.rand', 'np.random.rand', ([], {}), '()\n', (3912, 3914), True, 'import numpy as np\n'), ((3917, 3945), 'numpy.random.randint', 'np.random.randint', (['(-300)', '(300)'], {}), '(-300, 300)\n', (3934, 3945), True, 'import numpy as np\n'), ((6518, 6532), 'numpy.sin', 'np.sin', (['X_test'], {}), '(X_test)\n', (6524, 6532), True, 'import numpy as np\n'), ((1916, 1947), 'numpy_ml.nonparametric.KernelRegression', 'KernelRegression', ([], {'kernel': 'kernel'}), '(kernel=kernel)\n', (1932, 1947), False, 'from numpy_ml.nonparametric import GPRegression, KNN, KernelRegression\n')] |
from typing import List
from fastapi import FastAPI, Header
from fastapi.responses import PlainTextResponse, JSONResponse
import json
from pydantic import BaseModel
from typing import List
import datetime
from fibonacci import fib
app = FastAPI()
@app.get("/serialize")
def getSerialized():
return {"message": "Hello, World!"}
@app.get("/serialize/big")
def getSerializedBig():
resp = {
"family": "Elephantidae",
"scientificClassification": {
"kingdom": "Animalia",
"phylum": "Chordata",
"class": "Mammalia",
"order": "Proboscidea",
"superfamily": "Elephantoidea",
"classifier": {
"name": "<NAME>",
"born": {
"year": 1800,
"month": "February",
"day": 12,
"city": "Walsall",
"country": "England"
},
"died": {
"year": 1875,
"month": "March",
"day": 7,
"city": "London",
"country": "England"
},
"publications": []
}
}
}
for i in range(0, 50):
resp["scientificClassification"]["classifier"]["publications"].append({
"year": 1821 + i,
"related": True,
"description": "Some discovery in " + str(1821 + i)
})
return resp
@app.get("/plain/text")
def getPlainText():
return PlainTextResponse("Hello, World!")
@app.get("/query/{userId}/tools/{offset}")
def getQueryResult(userId: int, offset: int, model: str, factor: str, length: int, width: int, allow: bool, x_api_key: str = Header(str), x_session_id: str = Header(str)):
if userId == 300 and \
offset == 10 and \
x_api_key == "<KEY>" and \
x_session_id == "jhg723bf" and \
model == "Dozer" and \
factor == "ATX" and \
length == 800 and \
width == 800 and \
allow:
return {"id": 6000, "foundAt": datetime.datetime.now().isoformat()}
return JSONResponse({"message": "Error: Check your query arguments again."}, 404)
class Address(BaseModel):
street: str
number: int
city: str
class ObjectIn(BaseModel):
name: str
addresses: List[Address]
oldTown: bool
class ObjectOut(BaseModel):
name: str
addresses: List[Address]
oldTown: bool
createdAt: str
id: int
@app.post("/insert", response_model=ObjectOut, status_code=201)
def insertObject(body: ObjectIn):
resp = json.loads(body.json())
resp["createdAt"] = datetime.datetime.now().isoformat()
resp["id"] = 300
return resp
@app.get("/calculate")
def getCalculated():
return {"fibonacci": fib(27)}
| [
"fastapi.Header",
"fastapi.responses.PlainTextResponse",
"fastapi.FastAPI",
"fibonacci.fib",
"fastapi.responses.JSONResponse",
"datetime.datetime.now"
] | [((238, 247), 'fastapi.FastAPI', 'FastAPI', ([], {}), '()\n', (245, 247), False, 'from fastapi import FastAPI, Header\n'), ((1552, 1586), 'fastapi.responses.PlainTextResponse', 'PlainTextResponse', (['"""Hello, World!"""'], {}), "('Hello, World!')\n", (1569, 1586), False, 'from fastapi.responses import PlainTextResponse, JSONResponse\n'), ((1757, 1768), 'fastapi.Header', 'Header', (['str'], {}), '(str)\n', (1763, 1768), False, 'from fastapi import FastAPI, Header\n'), ((1790, 1801), 'fastapi.Header', 'Header', (['str'], {}), '(str)\n', (1796, 1801), False, 'from fastapi import FastAPI, Header\n'), ((2185, 2259), 'fastapi.responses.JSONResponse', 'JSONResponse', (["{'message': 'Error: Check your query arguments again.'}", '(404)'], {}), "({'message': 'Error: Check your query arguments again.'}, 404)\n", (2197, 2259), False, 'from fastapi.responses import PlainTextResponse, JSONResponse\n'), ((2849, 2856), 'fibonacci.fib', 'fib', (['(27)'], {}), '(27)\n', (2852, 2856), False, 'from fibonacci import fib\n'), ((2705, 2728), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2726, 2728), False, 'import datetime\n'), ((2136, 2159), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2157, 2159), False, 'import datetime\n')] |
"""empty message
Revision ID: 30fe0e82d100
Revises: d21b7288ec04
Create Date: 2017-09-14 15:47:02.032628
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '30fe0e82d100'
down_revision = 'd21b7288ec04'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('session', sa.Column('location', sa.TEXT(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('session', 'location')
# ### end Alembic commands ###
| [
"alembic.op.drop_column",
"sqlalchemy.TEXT"
] | [((582, 619), 'alembic.op.drop_column', 'op.drop_column', (['"""session"""', '"""location"""'], {}), "('session', 'location')\n", (596, 619), False, 'from alembic import op\n'), ((431, 440), 'sqlalchemy.TEXT', 'sa.TEXT', ([], {}), '()\n', (438, 440), True, 'import sqlalchemy as sa\n')] |
import pytest
from .common import GRAINS_EXPECTATIONS
pytestmark = pytest.mark.usefixtures("master")
def pytest_generate_tests(metafunc):
functions = [
'test_get_osarch',
'test_get_os',
'test_get_oscodename',
'test_get_os_family',
'test_get_osfullname',
'test_get_osrelease',
'test_get_osrelease_info'
]
expectations = GRAINS_EXPECTATIONS
tags = set(metafunc.config.getini('TAGS'))
tag = set(tags).intersection(set(expectations)).pop()
if metafunc.function.__name__ in functions and tag:
metafunc.parametrize(
'expected', [expectations[tag]], ids=lambda it: tag)
def test_get_cpuarch(minion):
assert minion.salt_call('grains.get', 'cpuarch') == 'x86_64'
def test_get_os(minion, expected):
key = 'os'
assert minion.salt_call('grains.get', key) == expected[key]
def test_get_items(minion):
assert minion.salt_call('grains.get', 'items') == ''
def test_get_os_family(minion, expected):
key = 'os_family'
assert minion.salt_call('grains.get', key) == expected[key]
def test_get_oscodename(minion, expected):
key = 'oscodename'
assert minion.salt_call('grains.get', key) == expected[key]
def test_get_osfullname(minion, expected):
key = 'osfullname'
assert minion.salt_call('grains.get', key) == expected[key]
def test_get_osarch(minion, expected):
assert minion.salt_call('grains.get', 'osarch') == expected.get('osarch', 'x86_64')
def test_get_osrelease(minion, expected):
key = 'osrelease'
assert minion.salt_call('grains.get', key) == expected[key]
def test_get_osrelease_info(minion, expected):
key = 'osrelease_info'
assert minion.salt_call('grains.get', 'osrelease_info') == expected[key]
@pytest.mark.skiptags('products-next', 'ubuntu', 'devel')
def test_salt_version(minion):
rpm_version = str(minion['container'].run('rpm -q salt --queryformat "%{VERSION}"').decode())
assert minion.salt_call('grains.get', 'saltversion') == rpm_version
| [
"pytest.mark.skiptags",
"pytest.mark.usefixtures"
] | [((69, 102), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""master"""'], {}), "('master')\n", (92, 102), False, 'import pytest\n'), ((1776, 1832), 'pytest.mark.skiptags', 'pytest.mark.skiptags', (['"""products-next"""', '"""ubuntu"""', '"""devel"""'], {}), "('products-next', 'ubuntu', 'devel')\n", (1796, 1832), False, 'import pytest\n')] |
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^(?P<selected_id>[0-9]+)/dashboard/$', views.admin_dashboard, name='admin_dashboard'),
url(r'^(?P<selected_id>[0-9]+)/get_user/$', views.admin_get_user, name='admin_get_user'),
url(r'^(?P<selected_id>[0-9]+)/delete_user/$', views.admin_delete_user, name='admin_delete_user'),
url(r'^(?P<selected_id>[0-9]+)/update_user/$', views.admin_update_user, name='admin_update_user'),
url(r'^(?P<selected_id>[0-9]+)/get_all_users/$', views.admin_get_all_users, name='admin_get_all_users'),
url(r'^(?P<selected_id>[0-9]+)/analytics/$', views.admin_analytics, name='admin_analytics'),
]
| [
"django.conf.urls.url"
] | [((74, 169), 'django.conf.urls.url', 'url', (['"""^(?P<selected_id>[0-9]+)/dashboard/$"""', 'views.admin_dashboard'], {'name': '"""admin_dashboard"""'}), "('^(?P<selected_id>[0-9]+)/dashboard/$', views.admin_dashboard, name=\n 'admin_dashboard')\n", (77, 169), False, 'from django.conf.urls import url\n'), ((171, 263), 'django.conf.urls.url', 'url', (['"""^(?P<selected_id>[0-9]+)/get_user/$"""', 'views.admin_get_user'], {'name': '"""admin_get_user"""'}), "('^(?P<selected_id>[0-9]+)/get_user/$', views.admin_get_user, name=\n 'admin_get_user')\n", (174, 263), False, 'from django.conf.urls import url\n'), ((265, 366), 'django.conf.urls.url', 'url', (['"""^(?P<selected_id>[0-9]+)/delete_user/$"""', 'views.admin_delete_user'], {'name': '"""admin_delete_user"""'}), "('^(?P<selected_id>[0-9]+)/delete_user/$', views.admin_delete_user, name\n ='admin_delete_user')\n", (268, 366), False, 'from django.conf.urls import url\n'), ((368, 469), 'django.conf.urls.url', 'url', (['"""^(?P<selected_id>[0-9]+)/update_user/$"""', 'views.admin_update_user'], {'name': '"""admin_update_user"""'}), "('^(?P<selected_id>[0-9]+)/update_user/$', views.admin_update_user, name\n ='admin_update_user')\n", (371, 469), False, 'from django.conf.urls import url\n'), ((471, 577), 'django.conf.urls.url', 'url', (['"""^(?P<selected_id>[0-9]+)/get_all_users/$"""', 'views.admin_get_all_users'], {'name': '"""admin_get_all_users"""'}), "('^(?P<selected_id>[0-9]+)/get_all_users/$', views.admin_get_all_users,\n name='admin_get_all_users')\n", (474, 577), False, 'from django.conf.urls import url\n'), ((580, 675), 'django.conf.urls.url', 'url', (['"""^(?P<selected_id>[0-9]+)/analytics/$"""', 'views.admin_analytics'], {'name': '"""admin_analytics"""'}), "('^(?P<selected_id>[0-9]+)/analytics/$', views.admin_analytics, name=\n 'admin_analytics')\n", (583, 675), False, 'from django.conf.urls import url\n')] |
import libs
import basetype
import operator
import ast
import xutils
import sys
import xutils
import parser
def calcClassMatchingDigree(sourceCls, targetCls):
# print('calcClassMatchingDigree', sourceCls, targetCls)
assert sourceCls is not None, (sourceCls, targetCls)
if targetCls is None:
# default case
return sys.maxint
if sourceCls is targetCls:
# print('calcClassMatchingDigree exact match', sourceCls, targetCls)
return 0
minDegree = sys.maxint
for base in sourceCls.bases:
degree = calcClassMatchingDigree(base.getTarget(), targetCls)
if degree == 0:
minDegree = degree + 1
# print('calcClassMatchingDigree exact match in base', sourceCls, base, targetCls)
break
elif degree < minDegree:
minDegree = degree + 1
# print('calcClassMatchingDigree result', sourceCls, targetCls, minDegree)
return minDegree
def calcClassFuncMatchingDigree(sourceCls, funcname):
# print('calcClassFuncMatchingDigree', sourceCls, funcname)
if sourceCls.hasSymbol(funcname):
func = sourceCls.findLocalSymbol(funcname)
if func.info.dispatched is False:
# print('calcClassFuncMatchingDigree hasSymbol', sourceCls, funcname, func)
return 0, func
minDegree = sys.maxint
minSymbol = None
for base in sourceCls.bases:
degree, symbol = calcClassFuncMatchingDigree(base.getTarget(), funcname)
if degree == 0:
minDegree = degree + 1
minSymbol = symbol
# print('calcClassFuncMatchingDigree hasSymbol in direct base', sourceCls, base, minDegree, funcname)
break
if degree < minDegree:
minDegree = degree + 1
minSymbol = symbol
# print('calcClassFuncMatchingDigree hasSymbol in more direct base', sourceCls, base, degree, minDegree, funcname)
# print('calcClassFuncMatchingDigree return', sourceCls, minDegree, minSymbol, funcname)
return minDegree, minSymbol
class GlobalScope(object):
def __init__(self, unit):
self.unit = unit
# def has(self, name):
# return name in self.values
def getVar(self, name):
# print('GlobalScope.getVar', name, self.unit.name, self.unit.pkg.fullpath)
assert False, (self, name)
var = self.unit.findSymbol(name)
if var.isVar() and var.owner.isUnit():
# global var
return var
# assert False, (self, name, var)
return None
def setValue(self, name, val):
assert name in self.values, (name, val, self.values)
assert False
def getValue(self, name):
# print('GlobalScope.getValue', name, self.values)
assert False
pass
class EvaluatorScope(object):
def __init__(self):
self.vars = {}
def __repr__(self):
return 'EvaluatorScope(%s)' % self.vars
# def has(self, name):
# return name in self.vars
def addVar(self, name, var):
# print('EvaluatorScope.addVar', name, var, self)
assert name not in self.vars, (name, var, self.vars)
self.vars[name] = var
def getVar(self, name):
# print('EvaluatorScope.getVar', name, self.vars)
return self.vars.get(name)
def setValue(self, name, val):
assert name in self.vars, (name, val, self.vars)
if name == 'text':
assert isinstance(val, str), (name, val)
# print('EvaluatorScope.setValue', name, val)
self.vars[name].setValue(val)
def getValue(self, name):
# print('EvaluatorScope.getValue', name, self.vars)
return self.vars.get(name).getValue()
class VarHolder(ast.AstNode):
def __init__(self, var, value):
assert var is not None and value is not None, ('VarHolder.init', var, value)
self.var = var
self.value = value
# print('VarHolder.new', var.name, var, val, self)
# assert var.getType() != ast.makePrimitiveType('int') or not isinstance(value, str)
def setValue(self, val):
assert val is not None, ('VarHolder.setValue', self.var, self.value, val)
oldval = self.value
self.value = val
# print('VarHolder.setValue', self.var.name, self.var, self.value, val, self, oldval)
def getValue(self):
# print('VarHolder.getValue', self.var.name, self.var, self.value)
return self.value
def __repr__(self):
return 'VarHolder:%s:%s:val=%s' % (self.var, ast.formatId(self), self.value)
FlowFlags = xutils.Enum('FlowFlags', 'NORMAL', 'RETURN', 'BREAK', 'CONTINUE')
class EvaluatorStack(object):
def __init__(self, f, args, this, env, nilVal):
assert this is None or isinstance(this, ClassDefEvaluator), this
self.func = f
self.args = args
self.this = this
self.unit = f.getOwnerUnit()
# self.global_scope = GlobalScope(self.unit)# if env is None else None
self.scopes = [this, self.unit.globalScope] if this else [self.unit.globalScope]
self.flowFlag = FlowFlags.NORMAL
self.returnValue = nilVal
# env is the variable context for closure
self.env = env
def __repr__(self):
return 'EvaluatorStack(%s)' % self.func.shortname()
def addVar(self, name, val):
# print('EvaluatorStack.addVar', name, val, self, self.this, self.scopes)
return self.scopes[0].addVar(name, val)
def getVar(self, name):
# print('EvaluatorStack.getVar', name)
for scope in self.scopes:
# print('EvaluatorStack.getVar scope', name, scope, type(scope), self, self.scopes)
var = scope.getVar(name)
if var:
# print('EvaluatorStack.getVar scope found', name, var, scope)
return var
if self.env:
# print('EvaluatorStack.getVar search in env', name, self.env, self.env.scopes, self, self.scopes)
return self.env.getVar(name)
assert False, (name, self.scopes, self.env)
return None
def getValue(self, name):
return self.getVar(name).getValue()
def setValue(self, name, val):
return self.getVar(name).setValue(val)
class ExprEvaluator(ast.AstVisitor):
def __init__(self, interpreter):
self.interpreter = interpreter
self.name = 'exprEvaluator'
self.opname = 'evalExprEvaluation'
def evalExprEvaluation_AstNode(self, node):
node.visitChildren(self)
return node
def evalExprEvaluation_ExprEvaluation(self, expreval):
# print('evalExprEvaluation_ExprEvaluation', expreval, expreval.expr, expreval.getOwnerFunc(), expreval.expr.getOwner())
return expreval.expr
def evalExprEvaluation_Param(self, param):
param.type = param.type.visit(self)
# print('evalExprEvaluation_Param type', param.type)
return param
def evalExprEvaluation_TupleType(self, tupletype):
tupletype.elementTypes = [t.visit(self) for t in tupletype.elementTypes]
return tupletype
def evalExprEvaluation_FuncSpec(self, spec):
for param in spec.params:
param.visit(self)
if spec.returnType is not None:
spec.returnType = spec.returnType.visit(self)
# print('evalExprEvaluation_FuncSpec returnType', spec.returnType)
return spec
class NilValue(ast.SimpleNode):
def __init__(self):
ast.SimpleNode.__init__(self)
def evaluateAttr(self, visitor, attr):
assert False, ('NilValue.evaluateAttr', self, visitor, attr)
class Interpreter(ast.AstVisitor):
def __init__(self):
self.name = 'interpreter'
self.opname = 'evaluate'
self.stacks = []
self.ops = {'==' : operator.eq, '>':operator.gt, '!=':operator.ne,
'<':operator.lt, '>=':operator.ge, '<=':operator.le, 'not':operator.not_,
'in':self.op_in,
'+':operator.add,'-':operator.sub,'*':operator.mul,'/':operator.div,'%':operator.mod}
self.assign_ops ={'+=':operator.iadd, '-=':operator.isub, '=':self.op_assign}
self.implicit_args_stack = []
self.nilValue = NilValue()
self.logger = xutils.createLogger('Interpreter')
def op_assign(self, var, val):
return val
def op_not_in(self, left, right):
# self.logger.debug('op_not_in', left, right)
return left not in right
def op_in(self, left, right):
# self.logger.debug('op_in', left, right)
return left in right
def getValue(self, name):
# self.logger.debug('Interpreter.getValue', name)
return self.getCurrentStack().getValue(name)
def setValue(self, name, val):
# self.logger.debug('Interpreter.setValue', name, val)
return self.getCurrentStack().setValue(name, val)
def addVar(self, name, val):
# self.logger.debug('Interpreter.addVar', name, val)
return self.getCurrentStack().addVar(name, val)
def getVar(self, name):
return self.getCurrentStack().getVar(name)
def getCurrentStack(self):
return self.stacks[0]
def pushStack(self, f, args, this, env):
# self.logger.debug('pushStack', f, args, this, len(self.stacks))
stack = EvaluatorStack(f, args, this, env, self.nilValue)
self.stacks.insert(0, stack)
return stack
def popStack(self):
# self.logger.debug('popStack', len(self.stacks))
del self.stacks[0]
def popScope(self):
del self.getCurrentStack().scopes[0]
def pushScope(self):
scope = EvaluatorScope()
self.getCurrentStack().scopes.insert(0, scope)
# self.logger.debug('pushScope', scope, self.getCurrentStack().scopes)
return scope
def printStack(self):
self.logger.error('printStack', len(self.stacks))
i = 0
for stack in self.stacks[::-1]:
funcname = stack.func.shortname()
self.logger.error('stack', i, funcname, stack.args, stack.this, stack.unit)
i += 1
self.logger.error('printStack end.', i)
def evaluateGlobalVar(self, units):
# self.logger.debug('evaluateGlobalVar', units)
for unit in units:
# self.logger.debug('evaluateGlobalVar unit', unit, unit.ast)
unit.ast.evaluateGlobalVar(self)
def evaluateGlobalVar_SingleVarDef(self, var):
if var.cls is None:
# global var
# self.logger.debug('evaluateGlobalVar_SingleVarDef global var', var)
initialval = var.initial.visit(self) if var.initial else self.nilValue
self.logger.error('evaluateGlobalVar_SingleVarDef var initial', var, initialval)
v = VarHolder(var.name, initialval)
unit = var.getOwnerUnit()
unit.globalScope.addVar(var.name, v)
def evaluateGlobalVar_MultipleVarDef(self, var):
for v in var.vars:
v.evaluateGlobalVar(self)
def evaluateGlobalVar_FuncDef(self, func):
pass
def evaluateGlobalVar_TraitDef(self, cls):
pass
def evaluateGlobalVar_ClassDef(self, cls):
pass
def evaluateGlobalVar_EnumDef(self, enumdef):
pass
def evaluateGlobalVar_ExtensionDef(self, extdef):
pass
def evaluateGlobalVar_CodeUnit(self, unit):
unit.globalScope = EvaluatorScope()
for d in unit.definitions:
d.evaluateGlobalVar(self)
def execute(self, entryfunc, args):
self.logger.error('execute', self, entryfunc)
try:
func = self.project.pkg.resolveSymbol(entryfunc)
# self.logger.error('execute', self, entryfunc, func)
return self.evalFunc(func, None, [args], {})
except Exception as e:
# etype, evalue, etb = sys.exc_info()
# print('execute error:', e, etype, evalue, etb, dir(etb))
self.logger.error('execute error:', e)
xutils.printException(e, 50)
self.printStack()
return e
def evaluate_AstNode(self, node):
assert False, ('evaluate_AstNode', node, node.getOwnerFunc())
def evaluate_ExprEvaluation(self, expreval):
assert False, expreval
def evaluate_EmbeddedCode(self, ec):
text = ec.code.visit(self)
expr = parser.exprParser.parse(text)
# self.logger.debug('evaluate_EmbeddedCode', ec, text, expr)
expr.setOwner(ec.getOwner())
self.project.visitNewItem(expr)
ret = expr.visit(self)
# self.logger.debug('evaluate_EmbeddedCode ret', ec, text, expr, ret)
return ret
def evaluate_EmbeddedStatement(self, ebd):
# self.logger.debug('evaluate_EmbeddedStatement', ebd, ebd.getOwnerFunc())
astConstructor = libs.GmlAstConstructor(self, True)
ret = astConstructor.visit(ebd.statement)
# self.logger.debug('evaluate_EmbeddedStatement', ec, ret)
return ret
def evaluate_EmbeddedExpr(self, ebd):
# self.logger.debug('evaluate_EmbeddedExpr', ebd, ebd.getOwnerFunc())
astConstructor = libs.GmlAstConstructor(self, True)
ret = astConstructor.visit(ebd.expr)
# self.logger.debug('evaluate_EmbeddedExpr', ec, ret)
return ret
def evaluateNil_UserTypeClass(self, utc):
return self.nilValue
def callConstructor(self, cls, args, named_args, topclsvar):
args = [arg.evaluateParam(self) for arg in args]
named_args = dict([(arg.name, arg.value.evaluateParam(self)) for arg in named_args])
return self.evalConstructor(cls, args, named_args, topclsvar)
def evalConstructor(self, cls, args, named_args, topclsvar):
# self.logger.debug('evalConstructor', cls.name, cls, args, cls.constructors, self.getThis())
clsvar = ClassDefEvaluator(cls, self)
clsvar.evalConstructor(self, args, named_args, topclsvar)
for name, arg in named_args.iteritems():
# self.logger.debug('evalConstructor arg', name, arg, clsvar, cls)
if name == 'text':
assert isinstance(arg, str), (clsvar, name, arg)
clsvar.setValue(name, arg)
return clsvar
def evaluate_FuncDef(self, f):
# self.logger.debug('evaluate_FuncDef', f, f.getOwnerClass(), f.cls, self.getThis(), f.body)
if f.cls and f.cls.singleton and f.name == 'instance':
assert len(f.body.statements) == 0
if not hasattr(f.cls, 'singleton_instance') or f.cls.singleton_instance is None:
f.cls.singleton_instance = self.evalConstructor(f.cls, [], {}, None)
return f.cls.singleton_instance
# assert len(f.spec.params) == 1
assert self.getCurrentStack().flowFlag == FlowFlags.NORMAL
f.body.visit(self)
return self.getCurrentStack().returnValue
def createVar(self, var):
# self.logger.debug('createVar', var, var.initial, self.getThis(), var.getOwnerFunc(), var.getOwnerClass())
val = self.nilValue
if var.initial and var.cls is None:
val = var.initial.visit(self)
# self.logger.debug('createVar initial', var, val, var.getOwnerFunc())
assert val is not None, ('createVar initial', var, var.initial, self.getThis(), var.getOwnerFunc(), var.getOwnerClass(), val)
else:
val = var.getType().getTypeClass().evaluateNil(self)
assert val is not None, ('createVar', var, var.initial, self.getThis(), var.getOwnerFunc(), var.getOwnerClass(), val)
# self.logger.debug('createVar initial none', var, val, var.getOwnerFunc())
# self.logger.debug('createVar initial ok', var, val)
if var.name == 'text':
assert isinstance(val, str), (var, val, var.getOwnerFunc(), var.getOwnerClass())
varholder = VarHolder(var, val)
# self.logger.debug('createVar ret', var, val, varholder)
return varholder
def createTupleVar(self, var):
# self.logger.debug('createTupleVar', var, var.initial, self.getThis(), var.getOwnerFunc(), var.getOwnerClass())
vals = self.nilValue
if var.initial and var.cls is None:
vals = var.initial.visit(self)
# self.logger.debug('createVar initial', var, val, var.getOwnerFunc())
else:
vals = var.getType().getTypeClass().evaluateNil(self)
assert len(vals) == len(var.vars)
holders = []
for i in range(len(vals)):
holders.append(VarHolder(var.vars[i], vals[i]))
return holders
def constructVar(self, var):
# self.logger.debug('constructVar', var, var.initial, self.getThis())
val = self.nilValue
if var.initial:
val = var.initial.visit(self)
# self.logger.debug('constructVar setValue', var.name, var, val)
self.setValue(var.name, val)
# return val
def evaluate_SingleVarDef(self, var):
# self.logger.debug('evaluate_SingleVarDef', var)
val = self.createVar(var)
self.addVar(var.name, val)
# self.logger.debug('evaluate_SingleVarDef', var, val)
return val
def evaluate_TupleVarDef(self, var):
# self.logger.debug('evaluate_SingleVarDef', var)
vals = self.createTupleVar(var)
for val in vals:
self.addVar(val.var.name, val)
# self.logger.debug('evaluate_SingleVarDef', var, val)
return vals
def evalStatementBlock(self, stmtblock):
assert not self.is_func_finished()
# self.logger.debug('evaluate_StatementBlock', stmtblock)
ret = stmtblock.body.visit(self)
return ret
def evaluate_Break(self, stmt):
# assert False, (stmt, stmt.owner, stmt.getOwnerFunc(), stmt.getOwnerClass())
self.set_loop_break()
def evaluate_Continue(self, stmt):
# assert False, (stmt, stmt.getOwnerFunc(), stmt.getOwnerClass())
self.set_loop_continue()
def checkFlowAbort(self):
flowFlag = self.getFlowFlag()
if flowFlag == FlowFlags.RETURN:
return True
if flowFlag == FlowFlags.BREAK:
self.getCurrentStack().flowFlag = FlowFlags.NORMAL
return True
if flowFlag == FlowFlags.CONTINUE:
self.getCurrentStack().flowFlag = FlowFlags.NORMAL
return False
def evaluate_ForStatement(self, stmt):
# self.logger.debug('evaluate_ForStatement', stmt, len(stmt.stmtblock.statements), stmt.inits)
scope = self.pushScope()
if stmt.init:
stmt.init.visit(self)
while stmt.condition is None or stmt.condition.visit(self):
self.pushScope()
stmt.body.visit(self)
needAbort = self.checkFlowAbort()
if needAbort:
self.popScope()
break
if stmt.step:
stmt.step.visit(self)
self.popScope()
# assert False, (self, stmt)
self.popScope()
def evaluate_ForEachStatement(self, stmt):
# self.logger.debug('evaluate_ForEachStatement', stmt, stmt.collection, len(stmt.body.statements))
scope = self.pushScope()
stmt.item.visit(self)
coll = stmt.collection.visit(self)
# self.logger.debug('evaluate_ForEachStatement coll', coll, stmt.item)
for item in coll:
self.pushScope()
scope.setValue(stmt.item.name, item)
# self.logger.debug('evaluate_ForEachStatement item', item, stmt.item.name)
stmt.body.visit(self)
self.popScope()
self.popScope()
def evaluate_ForEachDictStatement(self, stmt):
# self.logger.debug('evaluate_ForEachDictStatement', stmt, stmt.collection, len(stmt.body.statements))
scope = self.pushScope()
stmt.key.visit(self)
stmt.value.visit(self)
coll = stmt.collection.visit(self)
for key, val in coll.iteritems():
scope.setValue(stmt.key.name, key)
scope.setValue(stmt.value.name, val)
# self.logger.debug('evaluate_ForEachDictStatement item', key, val, stmt.key.name, stmt.value.name)
stmt.body.visit(self)
self.popScope()
def evaluateCall(self, callinfo):
assert False
return
def evaluate_CallStatement(self, callstmt):
callstmt.call.visit(self)
def evaluate_ArgumentPlaceholder(self, param):
# self.logger.debug('evaluate_ArgumentPlaceholder', self, param, param.sequence, self.implicit_args_stack[0])
return self.implicit_args_stack[0][param.sequence]
def evaluateVar_Identifier(self, identifier):
# self.logger.debug('evaluateVar_Identifier', identifier, self)
return self.getVar(identifier.name)
def evaluateVar_AttrRef(self, attr):
# self.logger.debug('evaluateVar_AttrRef', attr, self)
obj = attr.object.visit(self)
# self.logger.debug('evaluateVar_AttrRef obj', attr, self, obj)
return obj.evaluateAttrVar(self, attr)
def evaluateVar_Subscript(self, subscript):
coll = subscript.collection.visit(self)
# self.logger.debug('evaluateVar_Subscript', subscript, self, coll)
assert False
return self.getVar(identifier.name)
def evalAssignment(self, op, var, val):
# self.logger.debug('evalAssignment', op, var, val)
varholder = var.evaluateVar(self)
oldval = var.visit(self)
newval = op(oldval, val)
# self.logger.debug('evalAssignment op', op, var, oldval, val, newval)
varholder.setValue(newval)
def evaluate_AssertStatement(self, stmt):
# self.logger.debug('evaluate_AssertStatement', stmt.expr, stmt.msg, stmt.getOwnerFunc())
expr = stmt.expr.visit(self)
if stmt.msg:
assert expr, (stmt.msg.visit(self), stmt.getOwnerFunc(), stmt.getOwnerClass())
else:
assert expr, (stmt.getOwnerFunc(), stmt.getOwnerClass())
def evaluate_Assignment(self, assign):
# self.logger.debug('evaluate_Assignment', assign, assign.getOwnerFunc())
values = [val.visit(self) for val in assign.values]
for i in range(len(assign.targets)):
target = assign.targets[i]
# self.logger.debug('evaluate_Assignment target', i, target, assign.values[i], assign.op)
if isinstance(target, ast.Subscript):
if assign.op == '=':
val = assign.values[i]
coll = target.collection.visit(self)
# self.logger.debug('evaluate_Assignment subscript key', coll, target.key, val)
key = target.key.visit(self)
# self.logger.debug('evaluate_Assignment val', coll, key, target.key, val)
newval = values[i]
# self.logger.debug('evaluate_Assignment set', coll, key, val, newval)
# assert False, (assign, assign.variable.collection.getTypeClass().eval_set)
target.collection.getTypeClass().eval_set(coll, key, newval)
continue
op = self.assign_ops[assign.op]
# self.logger.debug('evaluate_Assignment normal', op, val, target, values[i])
self.evalAssignment(op, target, values[i])
def evaluate_Subscript(self, subscript):
# self.logger.debug('evaluate_Subscript', subscript, subscript.collection, subscript.collection.getTarget(), subscript.key)
coll = subscript.collection.visit(self)
key = subscript.key.visit(self)
# self.logger.debug('evaluate_Subscript coll', subscript, coll, key, coll[key])
if isinstance(coll, list):
assert isinstance(key, int) and key < len(coll), ('evaluate_Subscript invalid list coll', subscript, subscript.collection, coll, key, len(coll), subscript.getOwnerFunc())
elif isinstance(coll, dict):
assert key in coll, ('evaluate_Subscript invalid dict coll', subscript, subscript.collection, coll, key, subscript.getOwnerFunc())
elif isinstance(coll, str):
assert isinstance(key, int) and key < len(coll), ('evaluate_Subscript invalid str coll', subscript, subscript.collection, coll, key, subscript.getOwnerFunc())
else:
assert False, ('evaluate_Subscript invalid coll', subscript, subscript.collection, coll, key, subscript.getOwnerFunc())
return coll[key]
def evaluate_Slicing(self, slicing):
# self.logger.debug('evaluate_Slicing', slicing)
seq = slicing.collection.visit(self)
start = slicing.start.visit(self) if slicing.start and slicing.start != self.nilValue else None
stop = slicing.stop.visit(self) if slicing.stop and slicing.stop != self.nilValue else None
# self.logger.debug('evaluate_Subscript coll', seq, start, stop)
return seq[start:stop]
def evaluate_AttrRef(self, attr):
# self.logger.debug('evaluate_AttrRef', attr, attr.object, attr.ref, attr.getOwnerFunc())
obj = attr.object.visit(self)
# # self.logger.debug('evaluate_AttrRef obj', attr, attr.object, obj, attr.getOwnerFunc())
return obj.evaluateAttr(self, attr)
def evaluate_TypeCast(self, typecast):
src = typecast.source.visit(self)
if src is None or src == self.nilValue:
return self.nilValue
srctype = src.cls
dsttype = typecast.type.getTarget()
# self.logger.debug('evaluate_TypeCast', typecast, typecast.source, typecast.type, srctype, dsttype, src)
if ast.isSubClass(srctype, dsttype):
return src
return self.nilValue
def evaluateAttr_UserTypeClass(self, utc, attr):
assert False
if attr.ref in utc.cls.classes:
assert False
return utc.cls.classes[attr.ref]
if attr.ref in utc.cls.functions:
assert False, (self, attr.ref, utc.cls.name)
return utc.cls.functions[attr.ref]
assert False, (self, utc, attr)
def evaluateAttr1_AstNode(self, node, attr):
# self.logger.debug('evaluateAttr_SingleVarDef', var, attr)
return node.getTypeClass().evaluateAttr(self, attr)
def evaluateAttr1_SingleVarDef(self, var, attr):
# self.logger.debug('evaluateAttr_SingleVarDef', var, attr)
return var.getTypeClass().evaluateAttr(self, attr)
def evaluateAttr1_Param(self, param, attr):
# self.logger.debug('evaluateAttr_Param', param)
return param.getTypeClass().evaluateAttr(self, attr)
def evaluateAttr1_Identifier(self, identifier, attr):
# self.logger.debug('evaluateAttr_Identifier', identifier, attr, attr.getTarget())
target = identifier.getTarget()
return target.evaluateIdentifierAttr(self, attr)
if isinstance(target, (ast.ClassDef, ast.EnumDef, basetype.LibClass)):
# self.logger.debug('evaluateAttr_Identifier target', attr, target, attr.target)
# assert not isinstance(attr.target, ast.FuncDef), (identifier, target, attr, attr.target)
if isinstance(attr.target, ast.FuncDef):
return attr.target
return attr.target.visit(self)
obj = identifier.visit(self)
# self.logger.debug('evaluateAttr_Identifier normal', attr, target, obj)
assert obj and isinstance(obj, ast.AstNode), ('evaluateAttr_Identifier obj is nil', identifier, obj, attr, attr.getOwnerFunc(), target)
return obj.evaluateAttr(self, attr)
def evaluateIdentifierAttr_AstNode(self, node, attr):
identifier = attr.object
obj = identifier.visit(self)
# self.logger.debug('evaluateAttr_Identifier normal', attr, target, obj)
assert obj and isinstance(obj, ast.AstNode), ('evaluateAttr_Identifier obj is nil', identifier, obj, attr, attr.getOwnerFunc(), target)
return obj.evaluateAttr(self, attr)
def evaluateIdentifierAttr_EnumDef(self, enumdef, attr):
return attr.target.visit(self)
def evaluateAttr_LibClass(self, cls, attr):
return attr.target# if isinstance(attr.target, ast.LibFuncBase) else attr.target.visit(self)
def evaluateAttr_ClassDef(self, cls, attr):
return attr.target# if isinstance(attr.target, (ast.FuncDef, ast.FuncProto)) else attr.target.visit(self)
def evaluateAttr_ScriptClass(self, cls, attr):
return attr.target# if isinstance(attr.target, (ast.FuncDef, ast.FuncProto)) else attr.target.visit(self)
def evaluateAttr_ClassDef(self, cls, attr):
# self.logger.debug('evaluateAttr_ClassDef', attr)
assert False, (cls, attr, attr.getOwnerFunc())
ret = cls.findLocalSymbol(attr.ref)
assert ret, (cls, attr)
return ret
def evaluate_NamedExpressionItem(self, item):
# self.logger.debug('evaluate_NamedExpressionItem', item.value)
# return item.value.value if isinstance(item.value, xutils.EnumItem) else item.value
return item.value.visit(self)
def evaluateAttr_EnumDef(self, enumdef, attr):
# self.logger.debug('evaluateAttr_EnumDef', attr, enumdef.symbols.get(attr.ref).value, enumdef.symbols)
return enumdef.symbols.get(attr.ref).visit(self)
def evaluate_Closure(self, c):
# self.logger.debug('evaluate_Closure', c)
# assert False, (c, c.owner, c.getOwnerFunc())
assert c.stack, ('evaluate_Closure nil stack', c, c.owner, c.getOwnerFunc())
ret = c.body.visit(self)
return self.getCurrentStack().returnValue
def evaluate_Call(self, callinfo):
# self.logger.debug('evaluate_Call', callinfo, callinfo.owner, callinfo.getOwnerFunc(), callinfo.getOwnerClass())
ret = callinfo.caller.evaluateCall(self, callinfo)
# self.logger.debug('evaluate_Call ret', callinfo, callinfo.caller, ret)
return ret
def evaluateCall_GenericExpr(self, expr, callinfo):
ret = expr.getTarget().evaluateCall(self, callinfo)
# self.logger.debug('evaluateCall_GenericExpr ret', expr, callinfo, expr.getTarget(), ret)
return ret
def evaluateCall_Closure(self, closure, callinfo):
# self.logger.debug('evaluateCall_Closure', closure, callinfo)
args = [arg.visit(self) for arg in callinfo.args]
stack = self.pushStack(closure, args, None, closure.stack)
scope = self.pushScope()
self.implicit_args_stack.insert(0, args)
ret = closure.visit(self)
del self.implicit_args_stack[0]
self.popScope()
self.popStack()
return ret
def evaluateCall_AttrRef(self, attr, callinfo):
# self.logger.debug('evaluateCall_AttrRef', attr, callinfo, attr.getTarget(), attr.getOwnerFunc())
target = attr.getTarget()
if isinstance(target, (ast.FuncDef, ast.FuncProto)):
if (not target.spec.static) or target.injection_cls:
# self.logger.debug('evaluateCall_AttrRef func start', attr, callinfo, attr.getTarget(), target.injection_cls, target.spec.static)
caller = callinfo.caller.visit(self)
# self.logger.debug('evaluateCall_AttrRef func', attr, callinfo, attr.getTarget(), caller, target.injection_cls)
return caller.evaluateCall(self, callinfo)
if isinstance(target, libs.ScriptFunc) and (not target.spec.static):
# self.logger.debug('evaluateCall_AttrRef script func start', attr, callinfo, attr.getTarget())
obj = callinfo.caller.object.visit(self)
callerfunc = getattr(obj, 'ScriptMethod_' + callinfo.caller.ref)
assert callerfunc, (callinfo, callinfo.caller.object, obj, callerfunc)
# self.logger.debug('evaluateCall_AttrRef script func', attr, callinfo, attr.getTarget(), callerfunc)
args = [arg.evaluateParam(self) for arg in callinfo.args]
named_args = dict([(arg.name, arg.value.visit(self)) for arg in callinfo.namedArgs])
return callerfunc(*args, **named_args)
# self.logger.debug('evaluateCall_AttrRef attr target', attr, callinfo, target)
ret = target.evaluateCall(self, callinfo)
# self.logger.debug('evaluateCall_AttrRef ret', attr, callinfo, attr.getTarget(), ret)
return ret
def evaluateCall_ClassDef(self, cls, callinfo):
# self.logger.debug('evaluateCall_ClassDef', cls.name, callinfo.args, callinfo.getOwnerFunc(), self.getThis())
return self.callConstructor(cls, callinfo.args, callinfo.namedArgs, None)
def callFunc(self, func, this, args, named_args):
# self.logger.debug('callFunc', func, this, args, named_args, self.getThis())
if func.injection_cls:
assert this, (func, this, args)
args.insert(0, this)
# self.logger.debug('callFunc evaluateParam', func, this, args, named_args)
args = [arg.evaluateParam(self) for arg in args]
named_args = dict([(arg.name, arg.value.evaluateParam(self)) for arg in named_args])
ret = self.evalFunc(func, this, args, named_args)
# self.logger.debug('callFunc end', func, this, args, named_args, ret)
return ret
def evalFunc(self, func, this, args, named_args):
# self.logger.debug('evalFunc', func, this, args, named_args, self.getThis(), func.getOwnerClass(), func.getOwnerUnit())
assert len(args) == len(func.spec.params), (args, func.name, func.spec, this, func.spec.static, func, func.getOwnerClass(), func.getOwnerUnit())
# assert func.name != 'cacheName', (func, this, args, func.injection_cls)
self.prepareEvalFunc(func, args, this)
if func.info.type == ast.FuncType.constructor:
for var in func.cls.vars:
self.constructVar(var)
ret = func.visit(self)
self.popStack()
# self.logger.debug('evalFunc end', func, this, args, named_args, ret)
return ret
def prepareEvalFunc(self, f, args, this):
stack = self.pushStack(f, args, this, None)
scope = self.pushScope()
assert scope
# self.logger.debug('prepareEvalFunc', f, this, stack, scope, len(args), len(f.spec.params), len(self.stacks), args)
if len(args) == len(f.spec.params):
for i in range(len(args)):
# self.logger.debug('prepareEvalFunc arg', i, f, args[i], f.spec.params[i])
# assert not isinstance(args[i], ast.AstNode) or isinstance(args[i], ClassDefEvaluator), (f, args[i], f.spec.params[i])
scope.addVar(f.spec.params[i].name, VarHolder(f.spec.params[i], args[i]))
else:
assert False
def evaluateCall_FuncDef(self, f, callinfo):
# self.logger.debug('evaluateCall_FuncDef', f, callinfo.caller, callinfo.args, f.injection_cls)
this = None
args = callinfo.args
if (not f.spec.static) or f.injection_cls:
if isinstance(callinfo.caller, ast.AttrRef):
this = callinfo.caller.object.visit(self)
# self.logger.debug('evaluateCall_FuncDef new this', f, this, callinfo.caller.object)
else:
this = self.getCurrentStack().this
# self.logger.debug('evaluateCall_FuncDef old this', f, this, callinfo.caller)
return self.callFunc(f, this, args, callinfo.namedArgs)
def evaluateCall_AstNode(self, node, callinfo):
assert False, (node, callinfo, callinfo.caller, callinfo.getOwnerFunc())
def evaluateCall_Identifier(self, identifier, callinfo):
# self.logger.debug('evaluateCall_Identifier', identifier, identifier.getTarget(), callinfo, self.getThis(), callinfo.getOwnerFunc())
target = identifier.getTarget()
if isinstance(target, ast.ClassDef) or isinstance(target, basetype.LibClass):
return target.evaluateCall(self, callinfo)
if isinstance(target, ast.UserType):
assert target.getTarget() and not isinstance(target.getTarget(), ast.UserType)
return target.getTarget().evaluateCall(self, callinfo)
if isinstance(target, ast.FuncDef) or isinstance(target, ast.FuncProto) or isinstance(target, basetype.LibFunc):
if target.spec.static or target.cls is None:
# self.logger.debug('evaluateCall_Identifier static func', identifier, target, callinfo)
return target.evaluateCall(self, callinfo)
this = self.getThis()
if this is None:
return target.evaluateCall(self, callinfo)
func = this.getVar(identifier.name)
assert func, ('evaluateCall_Identifier FuncDef or FuncProto or LibFunc', identifier, identifier.getTarget(), callinfo, self.getThis(), callinfo.getOwnerFunc())
return func.evaluateCall(self, callinfo)
# self.logger.debug('evaluateCall_Identifier var func', identifier, target, callinfo, self.getValue(identifier.name))
return self.getValue(identifier.name).evaluateCall(self, callinfo)
def evaluateListComprehension(self, listcomp, i):
# self.logger.debug('evaluateListComprehension', listcomp.expr, listcomp.fors[0].source, i)
if i >= len(listcomp.fors):
return [listcomp.expr.visit(self)]
listfor = listcomp.fors[i]
coll = listfor.source.visit(self)
ret = []
# self.logger.debug('evaluateListComprehension coll', coll, i, len(listcomp.fors))
for item in coll:
scope = self.pushScope()
# self.logger.debug('evaluateListComprehension item', coll, i, item, scope, listfor.name)
self.addVar(listfor.variable.name, VarHolder(listfor.variable, item))
cond = listfor.condition.visit(self) if listfor.condition else True
if cond:
ret.extend(self.evaluateListComprehension(listcomp, i + 1))
self.popScope()
return ret
def evaluate_ListComprehension(self, listcomp):
# self.logger.debug('evaluate_ListComprehension', listcomp, listcomp.expr)
ret = self.evaluateListComprehension(listcomp, 0)
# self.logger.debug('evaluate_ListComprehension ret', listcomp, listcomp.expr, ret)
return ret
def evaluate_StringEvaluation(self, se):
# self.logger.debug('evaluate_StringEvaluation', se, se.evaluation, se.getOwnerFunc())
return se.literal.visit(self) if se.evaluation is None else se.evaluation.visit(self)
def evaluate_PrimitiveLiteral(self, literal):
# self.logger.debug('evaluate_PrimitiveLiteral', literal, literal.value, literal.text)
return literal.value
def evaluate_TupleLiteral(self, literal):
# non-layzy evaluation
# self.logger.debug('evaluate_TupleLiteral', literal.values)
return tuple([val.visit(self) for val in literal.values])
def evaluate_ListLiteral(self, literal):
# non-layzy evaluation
# self.logger.debug('evaluate_ListLiteral', literal.values)
return [val.visit(self) for val in literal.values]
# return literal.values
def evaluate_DictLiteral(self, literal):
# non-layzy evaluation
# self.logger.debug('evaluate_ListLiteral', literal.values)
return dict([(item.key.visit(self), item.value.visit(self)) for item in literal.values])
# return literal.values
def getThis(self):
# self.logger.debug('getThis', self, self.getCurrentStack(), self.stacks)
return self.getCurrentStack().this if len(self.stacks) > 0 else None
def evaluate_CaseBlock(self, caseblock):
assert False, caseblock
def evaluate_CaseEntryExpr(self, caseexpr):
return caseexpr.expr.visit(self)
def evaluate_CaseEntryStmt(self, casestmt):
return casestmt.body.visit(self)
def evaluate_Identifier(self, identifier):
# self.logger.debug('evaluate_Identifier', identifier, identifier.getTarget(), identifier.getOwner(), identifier.getOwnerFunc(), self.getThis())
target = identifier.getTarget()
if isinstance(target, (ast.ClassDef, ast.LibClassBase, ast.EnumDef)):
return target
if isinstance(target, basetype.LibLiteral):
return target.value
if isinstance(target, ast.FuncDef) or isinstance(target, ast.FuncProto) or isinstance(target, basetype.LibFunc):
if target.spec.static:
return target
assert self.getThis()
return self.getThis().getValue(target.name)
ret = self.getValue(identifier.name)
# self.logger.debug('evaluate_Identifier ret', ret, identifier, identifier.getTarget(), identifier.getOwner(), identifier.getOwnerFunc(), identifier.getOwner().getOwner(), identifier.getOwner().getOwner().getOwner(), identifier.getOwner().getOwner().getOwner().getOwner())
return ret
def evaluate_Nil(self, nil):
return self.nilValue
def evaluate_This(self, this):
# self.logger.debug('evaluate_This', this.owner, self.getThis())
# assert not this.owner.getOwnerFunc().spec.static, (this, this.getOwnerFunc())
assert self.getThis(), (this, this.getOwnerFunc(), this.getOwnerClass())
return self.getThis()
def evaluate_IfElseExpr(self, expr):
cond = expr.condition.visit(self)
# self.logger.debug('evaluate_IfElseExpr', expr, expr.condition, expr.truePart, expr.falsePart, cond)
assert cond is not None, ('evaluate_IfElseExpr', expr, expr.condition, cond)
if cond and cond != self.nilValue:
return expr.truePart.visit(self)
return expr.falsePart.visit(self)
def evaluate_ExprList(self, exprlist):
exprs = [expr.visit(self) for expr in exprlist.exprs]
# self.logger.debug('evaluate_ExprList', exprs, exprlist)
return exprs
def evaluate_UnaryOp(self, expr):
# self.logger.debug('evaluate_UnaryOp', expr)
operand = expr.operand.visit(self)
return expr.operand.getTypeClass().evaluateUnaryOp(self, expr.op, operand)
def evaluate_BinaryOp(self, expr):
# self.logger.debug('evaluate_BinaryOp', expr, expr.getOwnerFunc(), expr.left, expr.left.getTypeClass(), expr.right, expr.right.getTypeClass())
if expr.op == 'not-in':
left = expr.left.visit(self)
right = expr.right.visit(self)
return expr.right.getTypeClass().eval_not_contains(right, left)
if expr.op == 'in':
left = expr.left.visit(self)
right = expr.right.visit(self)
return expr.right.getTypeClass().eval_contains(right, left)
assert expr.left.getTypeClass(), ('evaluate_BinaryOp invalid left', expr.left, expr.right, expr.op, 'types', expr.left.getType(), expr.right.getType())
# self.logger.debug('evaluate_BinaryOp normal', expr, expr.getOwnerFunc(), expr.left, expr.left.getTypeClass(), expr.right, expr.right.getTypeClass())
return expr.left.getTypeClass().evaluateBinaryOp(self, expr.op, expr.left, expr.right)
def evaluate_Return(self, stmt):
# self.logger.debug('evaluate_Return', stmt, stmt.values[0] if len(stmt.values) > 0 else None)
ret = stmt.value.visit(self) if stmt.value is not None else self.nilValue
# assert ret is not None, ('evaluate_Return', stmt, stmt.value, ret)
if ret is None:
ret = self.nilValue
# self.logger.debug('evaluate_Return ret', stmt, ret)
self.set_func_finished(ret)
def resetFlowFlag(self):
self.getCurrentStack().flowFlag = FlowFlags.NORMAL
def getFlowFlag(self):
return self.getCurrentStack().flowFlag
def is_func_finished(self):
return self.getCurrentStack().flowFlag == FlowFlags.RETURN
def set_func_finished(self, ret):
# self.logger.debug('set_func_finished', self, ret)
self.getCurrentStack().flowFlag = FlowFlags.RETURN
self.getCurrentStack().returnValue = ret
def set_loop_break(self):
# self.logger.debug('set_loop_break', self)
self.getCurrentStack().flowFlag = FlowFlags.BREAK
def set_loop_continue(self):
# self.logger.debug('set_loop_continue', self)
self.getCurrentStack().flowFlag = FlowFlags.CONTINUE
def matchClasses(self, cls, entries, funcname, argcount):
# self.logger.debug('matchClasses start', cls, funcname)
matchingDegree, matchingEntry = calcClassFuncMatchingDigree(cls, funcname) if 1 == argcount else (None, None)
if matchingDegree == 0:
# self.logger.debug('matchClasses exact entry', cls, funcname, matchingEntry, entry)
return matchingEntry
for entry in entries:
targetcls = None
if entry and entry.pattern:
targetcls = entry.pattern.getTarget() if isinstance(entry.pattern, ast.Identifier) else entry.pattern.getType().getTypeClass()
degree = self.calcClassMatchingDigree(cls, targetcls)
if matchingDegree is None or matchingDegree > degree:
# self.logger.debug('matchClasses check entry', cls, funcname, matchingDegree, degree, matchingEntry, entry, matchingEntry.pattern if matchingEntry else None, entry.pattern)
matchingDegree = degree
matchingEntry = entry
if matchingEntry:
# self.logger.debug('matchClasses found entry', cls, funcname, matchingDegree, matchingEntry)
return matchingEntry
return None
def calcClassMatchingDigree(self, cls, targetcls):
return calcClassMatchingDigree(cls, targetcls)
def evaluate_SwitchCaseExpr(self, expr):
# self.logger.debug('evaluate_SwitchCaseExpr', expr)
if isinstance(expr.entries[0], ast.CaseEntryExpr):
matchexpr = expr.expr.visit(self)
# self.logger.debug('evaluate_SwitchCaseExpr CaseEntryExpr', expr.getOwnerFunc(), expr.entries[0], expr.entries[0].pattern, expr.expr, matchexpr)
matchingDegree = sys.maxint
matchingEntry = None
for entry in expr.entries:
targetcls = entry.pattern.variable.getType().getTarget()
degree = calcClassMatchingDigree(matchexpr.cls, targetcls)
if degree < matchingDegree:
# self.logger.debug('evaluate_SwitchCaseExpr try match entry', matchingDegree, degree, matchexpr.cls, targetcls, expr.expr)
matchingDegree = degree
matchingEntry = entry
# self.logger.debug('evaluate_SwitchCaseExpr final match', matchingDegree, degree, matchexpr.cls, targetcls, expr.expr, matchingEntry.pattern.variable if matchingEntry else None)
assert matchingEntry, (matchingDegree, degree, matchexpr.cls, targetcls, expr.expr, matchingEntry)
var = self.getVar(expr.expr.name)
self.addVar(matchingEntry.pattern.variable.name, var)
return matchingEntry.expr.visit(self)
else:
assert False, (stmt, stmt.entries[0])
def evaluate_UsingStatement(self, stmt):
scope = self.pushScope()
stmt.variable.visit(self)
stmt.body.visit(self)
self.popScope()
def evaluate_SwitchCaseStatement(self, stmt):
# self.logger.debug('evaluate_SwitchCaseStatement', stmt, stmt.getOwnerFunc(), stmt.getOwnerClass())
if isinstance(stmt.entries[0], ast.CaseEntryStmt):
matchexpr = stmt.expr.visit(self)
# self.logger.debug('evaluate_SwitchCaseStatement CaseEntryStmt', stmt, stmt.entries[0], stmt.entries[0].pattern, stmt.expr, matchexpr)
matchingDegree = sys.maxint
matchingEntry = None
for entry in stmt.entries:
targetcls = entry.pattern.variable.getType().getTarget()
degree = calcClassMatchingDigree(matchexpr.cls, targetcls)
if degree < matchingDegree:
# self.logger.debug('evaluate_SwitchCaseStatement try match entry', matchingDegree, degree, matchexpr.cls, targetcls, stmt.expr)
matchingDegree = degree
matchingEntry = entry
# self.logger.debug('evaluate_SwitchCaseStatement final match', matchingDegree, degree, matchexpr.cls, targetcls, stmt.expr)
assert matchingEntry, (matchingDegree, degree, matchexpr.cls, targetcls, stmt.expr, matchingEntry, stmt.getOwnerFunc(), stmt.getOwnerClass())
var = self.getVar(stmt.expr.name)
self.addVar(matchingEntry.pattern.variable.name, var)
matchingEntry.body.visit(self)
else:
assert False, (stmt, stmt.entries[0])
def evaluate_StatementBlock(self, stmtblock):
scope = self.pushScope()
stmtblock.body.visit(self)
self.popScope()
def evaluate_StatementBody(self, stmtbody):
# self.logger.debug('evaluate_StatementBody', stmtbody, len(stmtbody.statements))
for stmt in stmtbody.statements:
# self.logger.debug('evaluate_StatementBody stmt', stmt)
stmt.visit(self)
flowFlag = self.getFlowFlag()
if flowFlag != FlowFlags.NORMAL:
# self.logger.debug('evaluate_StatementBody break', stmt, flowFlag)
break
def evaluate_IfStatement(self, stmt):
# self.logger.debug('evaluate_IfStatement', len(stmt.branches))
for branch in stmt.branches:
scope = self.pushScope()
cond = branch.condition.visit(self)
# self.logger.debug('evaluate_IfStatement branch', branch.condition, cond)
assert cond is not None, ('evaluate_IfStatement', expr, expr.condition, cond)
if cond and cond != self.nilValue:
branch.body.visit(self)
self.popScope()
# self.logger.debug('evaluate_IfStatement branch match ret', branch.condition, cond, branch.body)
return
self.popScope()
# self.logger.debug('evaluate_IfStatement else', len(stmt.branches))
if stmt.elseBranch:
stmt.elseBranch.visit(self)
def evaluateNil_ClassDef(self, cls):
return self.nilValue
def evaluateNil_EnumDef(self, enumdef):
return enumdef.items[0].value
def evaluateUnaryOp_ClassDef(self, cls, op, operand):
assert op in ['not']
return operand is None
def evaluateBinaryOp_ClassDef(self, cls, op, left, right):
# self.logger.debug('evaluateBinaryOp_ClassDef', cls, op, left, right)
left = left.visit(self)
right = right.visit(self)
# self.logger.debug('evaluateBinaryOp_ClassDef', cls, op, left, right)
assert op in ['==', '!=', 'and', 'or']
if op == '==':
if right is None:
return left is None
return left == right
if op == '!=':
if right is None:
return left is not None
return left != right
if op == 'and':
return left and right
if op == 'or':
return left or right
assert False, (op, left, right)
def evaluateBinaryOp_EnumDef(self, cls, op, left, right):
# self.logger.debug('evaluateBinaryOp_ClassDef', cls, op, left, right)
left = left.visit(self)
right = right.visit(self)
# self.logger.debug('evaluateBinaryOp_ClassDef', cls, op, left, right)
assert op in ['==', '!=', 'and', 'or']
if op == '==':
if right is None:
return left is None
return left == right
if op == '!=':
if right is None:
return left is not None
return left != right
if op == 'and':
return left and right
if op == 'or':
return left or right
assert False, (op, left, right)
class FuncDefEvaluator(ast.AstNode):
def __init__(self, f, clsvar):
self.func = f
self.clsvar = clsvar
def __repr__(self):
return 'FuncDefEvaluator:%s%s:this=%s:%s' % (self.func.cls.name + '.' if self.func.cls else '', self.func.name, self.clsvar, ast.formatId(self))
def getValue(self):
return self
def evaluateCall(self, visitor, callinfo):
# visitor.logger.debug('FuncDefEvaluator.evaluateCall', self, self.clsvar, callinfo.caller, visitor.getThis(), callinfo.args)
# assert visitor.getThis() == self.clsvar
this = None
if (not self.func.spec.static) or self.func.injection_cls:
if isinstance(callinfo.caller, ast.AttrRef):
this = callinfo.caller.object.visit(visitor)
# visitor.logger.debug('FuncDefEvaluator.evaluateCall new this', self.func, this, callinfo.caller.object)
else:
this = visitor.getThis()
# visitor.logger.debug('FuncDefEvaluator.evaluateCall old this', self.func, this, callinfo.caller)
# assert (visitor.getThis() is not None and not self.func.spec.static) or (visitor.getThis() is None and self.func.spec.static), (visitor.getThis(), self.func)
return visitor.callFunc(self.func, self.clsvar, callinfo.args, dict(callinfo.namedArgs))
class ClassDefEvaluator(ast.AstNode):
def __init__(self, cls, visitor):
self.cls = cls
self.visitor = visitor
self.vars = {}
self.bases = [ClassDefEvaluator(base.getTarget(), visitor) for base in cls.bases]
for var in cls.vars:
# print('ClassDefEvaluator.add', var.name, var, self.cls)
innervar = visitor.createVar(var)
# print('ClassDefEvaluator.add ok', var.name, var, self.cls, innervar)
self.vars[var.name] = innervar
# print('ClassDefEvaluator.init', self)
def __repr__(self):
name = self.vars.get('name')
return 'ClassDefEvaluator:%s%s:%s' % (self.cls.name, '(%s)' % name.getValue() if name else '', ast.formatId(self))
def getLocalVar(self, name, clsvar):
# assert False, (self, self.cls, name)
# print('ClassDefEvaluator.getLocalVar', name, self, self.cls, clsvar, self.vars)
obj = self.vars.get(name)
if obj:
# print('ClassDefEvaluator.getLocalVar var obj', name, self, self.cls, clsvar, obj)
return obj
obj = self.cls.symbols.get(name)
# print('ClassDefEvaluator.getLocalVar found symbol', name, self, self.cls, self.vars, obj)
if isinstance(obj, ast.FuncDef):
# print('ClassDefEvaluator.getLocalVar found function', name, self, self.cls, self.vars, obj)
return FuncDefEvaluator(obj, clsvar or self)
assert obj is None, (self, name, clsvar, obj)
return None
def getVar(self, name, clsvar=None):
# print('ClassDefEvaluator.getVar search bases', name, self, self.vars)
if clsvar is None:
clsvar = self
nodes = [self]
while len(nodes) > 0:
node = nodes.pop(0)
ret = node.getLocalVar(name, clsvar)
if ret:
return ret
for base in node.bases:
nodes.append(base)
return None
def getValue(self, name):
# print('ClassDefEvaluator.getValue', name, self)
return self.getVar(name).getValue()
def setValue(self, name, val):
if self.getValue(name) == [] and val is None:
assert False, ('Class.setValue', self, name, val, self.getValue(name))
self.getVar(name).setValue(val)
def evaluateAttr(self, visitor, attr):
# visitor.logger.debug('ClassDefEvaluator.evaluateAttr', attr.ref, self, attr)
obj = self.getVar(attr.ref)
if obj:
return obj.getValue()
visitor.logger.debug('ClassDefEvaluator.evaluateAttr cls', attr.ref, self, self.cls, attr, obj)
return self.cls.evaluateAttr(visitor, attr)
def evaluateAttrVar(self, visitor, attr):
# visitor.logger.debug('ClassDefEvaluator.evaluateAttrVar', attr.ref, self, attr)
return self.getVar(attr.ref)
def evalConstructor(self, visitor, args, named_args, topclsvar):
# visitor.logger.debug('ClassDefEvaluator.evalConstructor', self, visitor, args, named_args, self.bases)
clsvar = topclsvar if topclsvar else self
for b in self.bases:
b.evalConstructor(visitor, [], {}, clsvar)
visitor.evalFunc(self.cls.constructors[0], clsvar, args, named_args)
| [
"xutils.Enum",
"ast.isSubClass",
"xutils.printException",
"parser.exprParser.parse",
"xutils.createLogger",
"ast.formatId",
"libs.GmlAstConstructor",
"ast.SimpleNode.__init__"
] | [((4553, 4618), 'xutils.Enum', 'xutils.Enum', (['"""FlowFlags"""', '"""NORMAL"""', '"""RETURN"""', '"""BREAK"""', '"""CONTINUE"""'], {}), "('FlowFlags', 'NORMAL', 'RETURN', 'BREAK', 'CONTINUE')\n", (4564, 4618), False, 'import xutils\n'), ((7436, 7465), 'ast.SimpleNode.__init__', 'ast.SimpleNode.__init__', (['self'], {}), '(self)\n', (7459, 7465), False, 'import ast\n'), ((8187, 8221), 'xutils.createLogger', 'xutils.createLogger', (['"""Interpreter"""'], {}), "('Interpreter')\n", (8206, 8221), False, 'import xutils\n'), ((12271, 12300), 'parser.exprParser.parse', 'parser.exprParser.parse', (['text'], {}), '(text)\n', (12294, 12300), False, 'import parser\n'), ((12730, 12764), 'libs.GmlAstConstructor', 'libs.GmlAstConstructor', (['self', '(True)'], {}), '(self, True)\n', (12752, 12764), False, 'import libs\n'), ((13046, 13080), 'libs.GmlAstConstructor', 'libs.GmlAstConstructor', (['self', '(True)'], {}), '(self, True)\n', (13068, 13080), False, 'import libs\n'), ((25483, 25515), 'ast.isSubClass', 'ast.isSubClass', (['srctype', 'dsttype'], {}), '(srctype, dsttype)\n', (25497, 25515), False, 'import ast\n'), ((4508, 4526), 'ast.formatId', 'ast.formatId', (['self'], {}), '(self)\n', (4520, 4526), False, 'import ast\n'), ((11912, 11940), 'xutils.printException', 'xutils.printException', (['e', '(50)'], {}), '(e, 50)\n', (11933, 11940), False, 'import xutils\n'), ((51933, 51951), 'ast.formatId', 'ast.formatId', (['self'], {}), '(self)\n', (51945, 51951), False, 'import ast\n'), ((53722, 53740), 'ast.formatId', 'ast.formatId', (['self'], {}), '(self)\n', (53734, 53740), False, 'import ast\n')] |
import os
from keras import layers, optimizers, models
from keras.applications.resnet import ResNet152, preprocess_input
from keras.layers import *
from keras.models import Model
train_dir = os.path.join(r'D:\jupyterlab\training')
validation_dir = os.path.join(r'D:\jupyterlab\val')
from keras.callbacks import ReduceLROnPlateau,EarlyStopping
Reduce=ReduceLROnPlateau(
monitor ='val_loss',#监测的值,可以是accuracy,val_loss,val_accuracy
factor=0.1,#缩放学习率的值,学习率将以lr = lr*factor的形式被减少
patience=2,#当patience个epoch过去而模型性能不提升时,学习率减少的动作会被触发
verbose=1,
mode='auto',#‘auto’,‘min’,‘max’之一 默认‘auto’就行
cooldown=0,#学习率减少后,会经过cooldown个epoch才重新进行正常操作
min_lr=0 #学习率最小值,能缩小到的下限
)
early_stop = EarlyStopping(
monitor='val_loss',
patience=10
)
resnet152 = ResNet152(weights='imagenet', include_top=False, input_shape=(300,300, 3))
model = models.Sequential()
model.add(resnet152)
model.add(GlobalAveragePooling2D())
model.add(Dense(1024))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(256))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(Dropout(0.2))
# model.add(layers.Flatten())
# model.add(Dropout(0.5))
model.add(layers.Dense(1, activation='sigmoid'))
resnet152.trainable = False
#冻结一个层意味着将其排除在训练之外,即其权重将永远不会更新
optimizer = optimizers.RMSprop(lr=1e-4)
def get_lr_metric(optimizer):
def lr(y_true, y_pred):
return optimizer.lr
return lr
lr_metric = get_lr_metric(optimizer)
model.compile(loss='binary_crossentropy', optimizer = optimizers.RMSprop(lr=1e-4), metrics=['acc',lr_metric])
from keras.preprocessing.image import ImageDataGenerator
batch_size = 64
train_datagen = ImageDataGenerator(
rotation_range=45,#随机旋转
width_shift_range=0.2,#是图像在水平上平移的范围
height_shift_range=0.2,#垂直方向上平移的范围
shear_range=0.2,#随机错切变换的角度
zoom_range=0.2,#随机缩放的范围
horizontal_flip=True,#随机将图像水平翻转
preprocessing_function=preprocess_input#归一化
)
val_datagen = ImageDataGenerator(preprocessing_function=preprocess_input)
train_generator = train_datagen.flow_from_directory(
train_dir,
target_size=(300,300),
batch_size=batch_size,
class_mode='binary')
validation_generator = val_datagen.flow_from_directory(
validation_dir,
target_size=(300,300),
batch_size=batch_size,
class_mode='binary')
model.save('cat-dog-resnet152-epoch30.h5') | [
"keras.callbacks.ReduceLROnPlateau",
"os.path.join",
"keras.preprocessing.image.ImageDataGenerator",
"keras.models.Sequential",
"keras.callbacks.EarlyStopping",
"keras.layers.Dense",
"keras.optimizers.RMSprop",
"keras.applications.resnet.ResNet152"
] | [((195, 235), 'os.path.join', 'os.path.join', (['"""D:\\\\jupyterlab\\\\training"""'], {}), "('D:\\\\jupyterlab\\\\training')\n", (207, 235), False, 'import os\n'), ((252, 287), 'os.path.join', 'os.path.join', (['"""D:\\\\jupyterlab\\\\val"""'], {}), "('D:\\\\jupyterlab\\\\val')\n", (264, 287), False, 'import os\n'), ((354, 465), 'keras.callbacks.ReduceLROnPlateau', 'ReduceLROnPlateau', ([], {'monitor': '"""val_loss"""', 'factor': '(0.1)', 'patience': '(2)', 'verbose': '(1)', 'mode': '"""auto"""', 'cooldown': '(0)', 'min_lr': '(0)'}), "(monitor='val_loss', factor=0.1, patience=2, verbose=1,\n mode='auto', cooldown=0, min_lr=0)\n", (371, 465), False, 'from keras.callbacks import ReduceLROnPlateau, EarlyStopping\n'), ((701, 747), 'keras.callbacks.EarlyStopping', 'EarlyStopping', ([], {'monitor': '"""val_loss"""', 'patience': '(10)'}), "(monitor='val_loss', patience=10)\n", (714, 747), False, 'from keras.callbacks import ReduceLROnPlateau, EarlyStopping\n'), ((771, 846), 'keras.applications.resnet.ResNet152', 'ResNet152', ([], {'weights': '"""imagenet"""', 'include_top': '(False)', 'input_shape': '(300, 300, 3)'}), "(weights='imagenet', include_top=False, input_shape=(300, 300, 3))\n", (780, 846), False, 'from keras.applications.resnet import ResNet152, preprocess_input\n'), ((855, 874), 'keras.models.Sequential', 'models.Sequential', ([], {}), '()\n', (872, 874), False, 'from keras import layers, optimizers, models\n'), ((1326, 1355), 'keras.optimizers.RMSprop', 'optimizers.RMSprop', ([], {'lr': '(0.0001)'}), '(lr=0.0001)\n', (1344, 1355), False, 'from keras import layers, optimizers, models\n'), ((1694, 1882), 'keras.preprocessing.image.ImageDataGenerator', 'ImageDataGenerator', ([], {'rotation_range': '(45)', 'width_shift_range': '(0.2)', 'height_shift_range': '(0.2)', 'shear_range': '(0.2)', 'zoom_range': '(0.2)', 'horizontal_flip': '(True)', 'preprocessing_function': 'preprocess_input'}), '(rotation_range=45, width_shift_range=0.2,\n height_shift_range=0.2, shear_range=0.2, zoom_range=0.2,\n horizontal_flip=True, preprocessing_function=preprocess_input)\n', (1712, 1882), False, 'from keras.preprocessing.image import ImageDataGenerator\n'), ((1980, 2039), 'keras.preprocessing.image.ImageDataGenerator', 'ImageDataGenerator', ([], {'preprocessing_function': 'preprocess_input'}), '(preprocessing_function=preprocess_input)\n', (1998, 2039), False, 'from keras.preprocessing.image import ImageDataGenerator\n'), ((1215, 1252), 'keras.layers.Dense', 'layers.Dense', (['(1)'], {'activation': '"""sigmoid"""'}), "(1, activation='sigmoid')\n", (1227, 1252), False, 'from keras import layers, optimizers, models\n'), ((1546, 1575), 'keras.optimizers.RMSprop', 'optimizers.RMSprop', ([], {'lr': '(0.0001)'}), '(lr=0.0001)\n', (1564, 1575), False, 'from keras import layers, optimizers, models\n')] |
"""
Main image processing class
Must be initialized with an image path
"""
from threading import Thread
from tkinter import Variable
import pytesseract
import numpy as np
from .helper import get_boxes
from gui.components.loading_popup.loading_popup import LoadingPopup
class TableExtractor:
"""
Runs OCR on table image
Methods:
+ extract_table = run the extraction, return an array of extracted text
- correct_bounding = change bounding boxes from (x,y,w,h) -> (x1,y1,x2,y2)
- run_tesseract = uses tesseract with a custom config
Attributes:
+ data
+ root
- image
"""
def __init__(self, gui_root):
pytesseract.pytesseract.tesseract_cmd = r"bin\Tesseract-OCR\tesseract.exe"
self.data = Variable(value=None)
self.__image = None
self.root = gui_root
def extract_table(self, img: np.ndarray, bounding_box: list) -> None:
"""
Run table extraction and return array of shape(rows, columns)
"""
def thread_work(bounding_box):
loading = LoadingPopup(
self.root,
title="Running OCR",
desc="Extracting table data, please wait",
)
bounding_box = self.__correct_bounding(bounding_box)
self.__image = img[
int(bounding_box[1]) : int(bounding_box[3] + bounding_box[1]),
int(bounding_box[0]) : int(bounding_box[2] + bounding_box[0]),
]
processed_image, bounding_boxes = get_boxes(self.__image)
load_length = len(bounding_boxes)
load_i = 0
row = []
for i in bounding_boxes:
for j in i:
if len(j) == 0:
row.append(["", -2])
else:
col = []
for k in j:
y, x, w, h = (
k[0],
k[1],
k[2],
k[3],
)
cropped_img = processed_image[x : x + h, y : y + w]
col = self.__run_tesseract(cropped_img)
row.append(col)
loading.change_progress(load_i * 100 / load_length)
load_i += 1
arr = np.array(row)
loading.change_progress(100)
self.data.set(
value=arr.reshape(
(len(bounding_boxes), len(bounding_boxes[0]), 2)
).tolist()
)
ocr_thread = Thread(target=thread_work, args=[bounding_box])
ocr_thread.start()
def __correct_bounding(self, box: list) -> list:
x, y, x2, y2 = 0, 0, 0, 0
if int(box[2]) < 0:
x = int(box[0]) + int(box[2])
x2 = abs(int(box[2]))
box[0] = x
box[2] = x2
if int(box[3]) < 0:
y = int(box[1]) + int(box[3])
y2 = abs(int(box[3]))
box[1] = y
box[3] = y2
return box
def __run_tesseract(self, image: np.ndarray) -> list:
tesseract_config = """-c tessedit_char_whitelist=
"01234567890ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz.-/ '"
--psm 7 --oem 1"""
out = pytesseract.image_to_data(
image,
lang="eng",
config=tesseract_config,
output_type=pytesseract.Output.DICT,
)
ind = np.where(np.array(out.get("conf")) != "-1")
text = ""
conf = 0
if len(ind[0]) >= 1:
for i in ind[0]:
text = " ".join([text, out.get("text")[i]])
conf += float(out.get("conf")[i])
conf = conf / len(ind[0])
if text == "" and conf == 0:
conf = -2 # this denotes a empty space predition
return [text, conf]
| [
"tkinter.Variable",
"numpy.array",
"gui.components.loading_popup.loading_popup.LoadingPopup",
"pytesseract.image_to_data",
"threading.Thread"
] | [((799, 819), 'tkinter.Variable', 'Variable', ([], {'value': 'None'}), '(value=None)\n', (807, 819), False, 'from tkinter import Variable\n'), ((2708, 2755), 'threading.Thread', 'Thread', ([], {'target': 'thread_work', 'args': '[bounding_box]'}), '(target=thread_work, args=[bounding_box])\n', (2714, 2755), False, 'from threading import Thread\n'), ((3438, 3548), 'pytesseract.image_to_data', 'pytesseract.image_to_data', (['image'], {'lang': '"""eng"""', 'config': 'tesseract_config', 'output_type': 'pytesseract.Output.DICT'}), "(image, lang='eng', config=tesseract_config,\n output_type=pytesseract.Output.DICT)\n", (3463, 3548), False, 'import pytesseract\n'), ((1108, 1200), 'gui.components.loading_popup.loading_popup.LoadingPopup', 'LoadingPopup', (['self.root'], {'title': '"""Running OCR"""', 'desc': '"""Extracting table data, please wait"""'}), "(self.root, title='Running OCR', desc=\n 'Extracting table data, please wait')\n", (1120, 1200), False, 'from gui.components.loading_popup.loading_popup import LoadingPopup\n'), ((2459, 2472), 'numpy.array', 'np.array', (['row'], {}), '(row)\n', (2467, 2472), True, 'import numpy as np\n')] |
import pytest
from unittest import mock, result
from django.urls import reverse
from mixer.backend.django import mixer
from dataset.models import Dataset
from cueSearch.elasticSearch import ESQueryingUtils
from cueSearch.elasticSearch import ESIndexingUtils
from cueSearch.services import GlobalDimensionServices
from cueSearch.elasticSearch.utils import Utils
@pytest.mark.django_db(transaction=True)
def test_elastic_search_indexing(client, mocker):
"""
Method to create test dataset
"""
ESIndexingUtils.deleteAllIndex()
connection = mixer.blend("dataset.connection")
testDataset = mixer.blend(
"dataset.dataset",
name="orders",
id=1,
dimensions='["Brand", "Color", "State"]',
metrics='["Orders", "OrderAmount", "OrderQuantity"]',
granularity="day",
timestampColumn="TestDate",
sql="Select * from testTable",
)
mockResponse = mocker.patch(
"cueSearch.elasticSearch.elastic_search_indexing.ESIndexingUtils.runAllIndexDimension",
new=mock.MagicMock(autospec=True, return_value=True),
)
mockResponse.start()
path = reverse("createDataset")
data = {
"name": "demo_dataset",
"sql": "SELECT * from TEST_TABLE",
"connectionId": connection.id,
"metrics": ["Amount", "Quantity"],
"dimensions": ["Category", "Region"],
"timestamp": "CreatedAt",
"granularity": "day",
"isNonRollup": False,
}
response = client.post(path, data=data, content_type="application/json")
# create dimension for testing
dataset = Dataset.objects.get(id=1)
mockResponse.start()
path = reverse("globalDimensionCreate")
gd_data = {
"name": "test",
"dimensionalValues": [
{
"datasetId": dataset.id,
"dataset": "Returns",
"dimension": "WarehouseCode",
}
],
}
response = client.post(path, gd_data, content_type="application/json")
assert response.data["success"] == True
assert response.status_code == 200
globalDimsId = GlobalDimensionServices.getGlobalDimensions()
globalDimensionId = globalDimsId.data[0]["values"][0]["id"]
# Publishing global dimension by id
path = reverse("pubGlobalDimension")
payload = {"id": globalDimensionId, "published": True}
response = client.post(path, payload)
mockResponse.stop()
# Creating a index value
res = {"success": True, "data": ["TestData", "TestDataOne"]}
mockResponse = mocker.patch(
"cueSearch.elasticSearch.utils.Utils.getDimensionalValuesForDimension",
new=mock.MagicMock(autospec=True, return_value=res),
)
ESIndexingUtils.indexGlobalDimensionsDataForSearchSuggestion()
mockResponse.stop()
# Deeply testing of global dimension indexing
dataIndex = Utils.getDimensionalValuesForDimension(dataset.id, "Brand")
assert dataIndex["data"] == ["TestData", "TestDataOne"]
query = "TestData"
result = ESQueryingUtils.findGlobalDimensionResultsForSearchSuggestion(query=query)
count = 0
while not result:
count += 1
result = ESQueryingUtils.findGlobalDimensionResultsForSearchSuggestion(
query=query
)
if count == 10:
result = []
return result
expectedResult = [
{
"value": "TestData",
"user_entity_identifier": "test",
"id": globalDimensionId,
"type": "GLOBALDIMENSION",
},
{
"value": "TestDataOne",
"user_entity_identifier": "test",
"id": globalDimensionId,
"type": "GLOBALDIMENSION",
},
]
assert result == expectedResult
@pytest.mark.django_db(transaction=True)
def testIndexGlobalDimensinData(client, mocker):
"""Method to test index global dimension data"""
ESIndexingUtils.deleteAllIndex()
connection = mixer.blend("dataset.connection")
testDataset = mixer.blend(
"dataset.dataset",
name="orders",
id=1,
dimensions='["Brand", "Color", "State"]',
metrics='["Orders", "OrderAmount", "OrderQuantity"]',
granularity="day",
timestampColumn="TestDate",
sql="Select * from testTable",
)
mockResponse = mocker.patch(
"cueSearch.elasticSearch.elastic_search_indexing.ESIndexingUtils.runAllIndexDimension",
new=mock.MagicMock(autospec=True, return_value=True),
)
mockResponse.start()
path = reverse("createDataset")
data = {
"name": "demo_dataset",
"sql": "SELECT * from TEST_TABLE",
"connectionId": connection.id,
"metrics": ["Amount", "Quantity"],
"dimensions": ["Category", "Region"],
"timestamp": "CreatedAt",
"granularity": "day",
"isNonRollup": False,
}
response = client.post(path, data=data, content_type="application/json")
# create dimension for testing
dataset = Dataset.objects.get(id=1)
mockResponse.start()
path = reverse("globalDimensionCreate")
gd_data = {
"name": "test",
"dimensionalValues": [
{
"datasetId": dataset.id,
"dataset": "Returns",
"dimension": "WarehouseCode",
}
],
}
response = client.post(path, gd_data, content_type="application/json")
assert response.data["success"] == True
assert response.status_code == 200
globalDimsId = GlobalDimensionServices.getGlobalDimensions()
globalDimensionId = globalDimsId.data[0]["values"][0]["id"]
# Publishing global dimension by id
path = reverse("pubGlobalDimension")
payload = {"id": globalDimensionId, "published": True}
response = client.post(path, payload)
mockResponse.stop()
##################### Global dimension data index ######################
# Creating a index value
res = {"success": True, "data": ["TestData", "TestDataOne"]}
mockResponse = mocker.patch(
"cueSearch.elasticSearch.utils.Utils.getDimensionalValuesForDimension",
new=mock.MagicMock(autospec=True, return_value=res),
)
ESIndexingUtils.indexGlobalDimensionsData()
mockResponse.stop()
query = "TestData"
result = ESQueryingUtils.findGlobalDimensionResults(query=query)
count = 0
while not result:
count += 1
result = ESQueryingUtils.findGlobalDimensionResults(query=query)
if count == 10:
result = []
return result
expectedResults = [
{
"value": "TestData",
"dimension": "WarehouseCode",
"globalDimensionName": "test",
"user_entity_identifier": "test",
"id": globalDimensionId,
"dataset": "orders",
"datasetId": testDataset.id,
"type": "GLOBALDIMENSION",
}
]
assert result == expectedResults
@pytest.mark.django_db(transaction=True)
def testNonGlobalDimensionDataIndex(client, mocker):
"""Method to test non global dimension index"""
ESIndexingUtils.deleteAllIndex()
connection = mixer.blend("dataset.connection")
testDataset = mixer.blend(
"dataset.dataset",
name="orders",
id=1,
dimensions='["Brand", "Color", "State"]',
metrics='["Orders", "OrderAmount", "OrderQuantity"]',
granularity="day",
timestampColumn="TestDate",
sql="Select * from testTable",
)
mockResponse = mocker.patch(
"cueSearch.elasticSearch.elastic_search_indexing.ESIndexingUtils.runAllIndexDimension",
new=mock.MagicMock(autospec=True, return_value=True),
)
mockResponse.start()
path = reverse("createDataset")
data = {
"name": "demo_dataset",
"sql": "SELECT * from TEST_TABLE",
"connectionId": connection.id,
"metrics": ["Amount", "Quantity"],
"dimensions": ["Category", "Region"],
"timestamp": "CreatedAt",
"granularity": "day",
"isNonRollup": False,
}
response = client.post(path, data=data, content_type="application/json")
# create dimension for testing
dataset = Dataset.objects.get(id=1)
mockResponse.start()
path = reverse("globalDimensionCreate")
gd_data = {
"name": "test",
"dimensionalValues": [
{
"datasetId": dataset.id,
"dataset": "Returns",
"dimension": "WarehouseCode",
}
],
}
response = client.post(path, gd_data, content_type="application/json")
assert response.data["success"] == True
assert response.status_code == 200
globalDimsId = GlobalDimensionServices.getGlobalDimensions()
globalDimensionId = globalDimsId.data[0]["values"][0]["id"]
# Publishing global dimension by id
path = reverse("pubGlobalDimension")
payload = {"id": globalDimensionId, "published": True}
response = client.post(path, payload)
mockResponse.stop()
################################ Global dimension data index #################
listToIndex = [
{"dataset": "Test data", "datasetId": 1, "dimension": "Brand"},
{"dataset": "Test data", "datasetId": 1, "dimension": "WarehouseCode"},
]
res = {"success": True, "data": listToIndex}
mockResponse = mocker.patch(
"cueSearch.services.globalDimension.GlobalDimensionServices.nonGlobalDimensionForIndexing",
new=mock.MagicMock(autospec=True, return_value=res),
)
mockResponse.start()
ESIndexingUtils.indexNonGlobalDimensionsDataForSearchSuggestion()
mockResponse.stop()
query = "TestData"
result = ESQueryingUtils.findNonGlobalDimensionResultsForSearchSuggestion(
query=query
)
count = 0
while not result:
count += 1
result = ESQueryingUtils.findNonGlobalDimensionResultsForSearchSuggestion(
query=query
)
if count == 10:
result = []
return result
expectedResult = [
{
"value": "TestData",
"user_entity_identifier": "Test data_Brand",
"id": "Brand_TestData_1",
"datasetId": testDataset.id,
"globalDimensionId": "Brand_TestData_1",
"type": "DATASETDIMENSION",
},
{
"value": "TestData",
"user_entity_identifier": "Test data_WarehouseCode",
"id": "WarehouseCode_TestData_1",
"datasetId": testDataset.id,
"globalDimensionId": "WarehouseCode_TestData_1",
"type": "DATASETDIMENSION",
},
{
"value": "TestDataOne",
"user_entity_identifier": "Test data_Brand",
"id": "Brand_TestDataOne_1",
"datasetId": testDataset.id,
"globalDimensionId": "Brand_TestDataOne_1",
"type": "DATASETDIMENSION",
},
{
"value": "TestDataOne",
"user_entity_identifier": "Test data_WarehouseCode",
"id": "WarehouseCode_TestDataOne_1",
"datasetId": testDataset.id,
"globalDimensionId": "WarehouseCode_TestDataOne_1",
"type": "DATASETDIMENSION",
},
]
assert result == expectedResult
@pytest.mark.django_db(transaction=True)
def testFindNonGlobalDimensionSuggestionResult(client, mocker):
"""Method to test non global dimension index"""
ESIndexingUtils.deleteAllIndex()
connection = mixer.blend("dataset.connection")
testDataset = mixer.blend(
"dataset.dataset",
name="orders",
id=1,
dimensions='["Brand", "Color", "State"]',
metrics='["Orders", "OrderAmount", "OrderQuantity"]',
granularity="day",
timestampColumn="TestDate",
sql="Select * from testTable",
)
mockResponse = mocker.patch(
"cueSearch.elasticSearch.elastic_search_indexing.ESIndexingUtils.runAllIndexDimension",
new=mock.MagicMock(autospec=True, return_value=True),
)
mockResponse.start()
path = reverse("createDataset")
data = {
"name": "demo_dataset",
"sql": "SELECT * from TEST_TABLE",
"connectionId": connection.id,
"metrics": ["Amount", "Quantity"],
"dimensions": ["Category", "Region"],
"timestamp": "CreatedAt",
"granularity": "day",
"isNonRollup": False,
}
response = client.post(path, data=data, content_type="application/json")
# create dimension for testing
dataset = Dataset.objects.get(id=1)
mockResponse.start()
path = reverse("globalDimensionCreate")
gd_data = {
"name": "test",
"dimensionalValues": [
{
"datasetId": dataset.id,
"dataset": "Returns",
"dimension": "WarehouseCode",
}
],
}
response = client.post(path, gd_data, content_type="application/json")
assert response.data["success"] == True
assert response.status_code == 200
globalDimsId = GlobalDimensionServices.getGlobalDimensions()
globalDimensionId = globalDimsId.data[0]["values"][0]["id"]
# Publishing global dimension by id
path = reverse("pubGlobalDimension")
payload = {"id": globalDimensionId, "published": True}
response = client.post(path, payload)
mockResponse.stop()
listToIndex = [
{"dataset": "Test data", "datasetId": 1, "dimension": "Brand"},
{"dataset": "Test data", "datasetId": 1, "dimension": "WarehouseCode"},
]
res = {"success": True, "data": listToIndex}
mockResponse = mocker.patch(
"cueSearch.services.globalDimension.GlobalDimensionServices.nonGlobalDimensionForIndexing",
new=mock.MagicMock(autospec=True, return_value=res),
)
mockResponse.start()
ESIndexingUtils.indexNonGlobalDimensionsDataForSearchSuggestion()
mockResponse.stop()
expectedResults = [
{
"value": "TestData",
"dimension": "Brand",
"globalDimensionName": "Test data_Brand",
"user_entity_identifier": "Test data_Brand",
"id": "Brand_TestData_1",
"dataset": "Test data",
"datasetId": testDataset.id,
"type": "DATASETDIMENSION",
},
{
"value": "TestData",
"dimension": "WarehouseCode",
"globalDimensionName": "Test data_WarehouseCode",
"user_entity_identifier": "Test data_WarehouseCode",
"id": "WarehouseCode_TestData_1",
"dataset": "Test data",
"datasetId": testDataset.id,
"type": "DATASETDIMENSION",
},
]
query = "TestData"
result = ESQueryingUtils.findNonGlobalDimensionResultsForSearchSuggestion(
query=query
)
# breakpoint()
count = 0
while not result:
count += 1
result = ESQueryingUtils.findNonGlobalDimensionResultsForSearchSuggestion(
query=query
)
if count == 10:
result = []
return result
assert result == expectedResults
################################ Delete all indexes ##############
# @pytest.mark.django_db(transaction=True)
# def testFindNonGlobalDimensionResultData(client, mocker):
# """Method to test non global dimension index"""
# ESIndexingUtils.deleteAllIndex()
# connection = mixer.blend("dataset.connection")
# testDataset = mixer.blend(
# "dataset.dataset",
# name="orders",
# id=1,
# dimensions='["Brand", "Color", "State"]',
# metrics='["Orders", "OrderAmount", "OrderQuantity"]',
# granularity="day",
# timestampColumn="TestDate",
# sql="Select * from testTable",
# )
# mockResponse = mocker.patch(
# "cueSearch.elasticSearch.elastic_search_indexing.ESIndexingUtils.runAllIndexDimension",
# new=mock.MagicMock(autospec=True, return_value=True),
# )
# mockResponse.start()
# path = reverse("createDataset")
# data = {
# "name": "demo_dataset",
# "sql": "SELECT * from TEST_TABLE",
# "connectionId": connection.id,
# "metrics": ["Amount", "Quantity"],
# "dimensions": ["Category", "Region"],
# "timestamp": "CreatedAt",
# "granularity": "day",
# "isNonRollup": False,
# }
# response = client.post(path, data=data, content_type="application/json")
# # create dimension for testing
# dataset = Dataset.objects.get(id=1)
# mockResponse.start()
# path = reverse("globalDimensionCreate")
# gd_data = {
# "name": "test",
# "dimensionalValues": [
# {
# "datasetId": dataset.id,
# "dataset": "Returns",
# "dimension": "WarehouseCode",
# }
# ],
# }
# response = client.post(path, gd_data, content_type="application/json")
# assert response.data["success"] == True
# assert response.status_code == 200
# globalDimsId = GlobalDimensionServices.getGlobalDimensions()
# globalDimensionId = globalDimsId.data[0]["values"][0]["id"]
# # Publishing global dimension by id
# path = reverse("pubGlobalDimension")
# payload = {"id": globalDimensionId, "published": True}
# response = client.post(path, payload)
# mockResponse.stop()
# listToIndex = [
# {"dataset": "Test data", "datasetId": 1, "dimension": "Brand"},
# {"dataset": "Test data", "datasetId": 1, "dimension": "WarehouseCode"},
# ]
# res = {"success": True, "data": listToIndex}
# mockResponse = mocker.patch(
# "cueSearch.services.globalDimension.GlobalDimensionServices.nonGlobalDimensionForIndexing",
# new=mock.MagicMock(autospec=True, return_value=res),
# )
# mockResponse.start()
# ESIndexingUtils.indexNonGlobalDimensionsData()
# mockResponse.stop()
# expectedResults = [
# {
# "value": "TestData",
# "dimension": "Brand",
# "globalDimensionName": "Test data_Brand",
# "user_entity_identifier": "Test data_Brand",
# "id": "Brand_TestData_1",
# "dataset": "Test data",
# "datasetId": testDataset.id,
# "type": "DATASETDIMENSION",
# },
# {
# "value": "TestData",
# "dimension": "WarehouseCode",
# "globalDimensionName": "Test data_WarehouseCode",
# "user_entity_identifier": "Test data_WarehouseCode",
# "id": "WarehouseCode_TestData_1",
# "dataset": "Test data",
# "datasetId": testDataset.id,
# "type": "DATASETDIMENSION",
# },
# ]
# query = "TestData"
# result = ESQueryingUtils.findNonGlobalDimensionResults(query=query)
# count = 0
# while not result:
# count += 1
# result = ESQueryingUtils.findNonGlobalDimensionResults(query=query)
# if count == 10:
# result = []
# return result
# assert result == expectedResults
# ################################ Delete all indexes ##############
def testRunAllIndexing(client, mocker):
mockResponse = mocker.patch(
"cueSearch.elasticSearch.elastic_search_indexing.ESIndexingUtils.indexGlobalDimensionsDataForSearchSuggestion",
new=mock.MagicMock(autospec=True, return_value=True),
)
mockResponse.start()
mockResponse1 = mocker.patch(
"cueSearch.elasticSearch.elastic_search_indexing.ESIndexingUtils.indexGlobalDimensionsData",
new=mock.MagicMock(autospec=True, return_value=True),
)
mockResponse1.start()
mockResponse2 = mocker.patch(
"cueSearch.elasticSearch.elastic_search_indexing.ESIndexingUtils.indexNonGlobalDimensionsDataForSearchSuggestion",
new=mock.MagicMock(autospec=True, return_value=True),
)
mockResponse2.start()
ESIndexingUtils.runAllIndexDimension()
mockResponse.stop()
mockResponse1.stop()
mockResponse2.stop()
| [
"cueSearch.elasticSearch.ESIndexingUtils.indexGlobalDimensionsDataForSearchSuggestion",
"dataset.models.Dataset.objects.get",
"cueSearch.elasticSearch.ESIndexingUtils.deleteAllIndex",
"cueSearch.elasticSearch.utils.Utils.getDimensionalValuesForDimension",
"cueSearch.elasticSearch.ESIndexingUtils.runAllIndexDimension",
"cueSearch.elasticSearch.ESIndexingUtils.indexNonGlobalDimensionsDataForSearchSuggestion",
"cueSearch.elasticSearch.ESIndexingUtils.indexGlobalDimensionsData",
"unittest.mock.MagicMock",
"cueSearch.elasticSearch.ESQueryingUtils.findNonGlobalDimensionResultsForSearchSuggestion",
"mixer.backend.django.mixer.blend",
"cueSearch.services.GlobalDimensionServices.getGlobalDimensions",
"django.urls.reverse",
"cueSearch.elasticSearch.ESQueryingUtils.findGlobalDimensionResults",
"pytest.mark.django_db",
"cueSearch.elasticSearch.ESQueryingUtils.findGlobalDimensionResultsForSearchSuggestion"
] | [((364, 403), 'pytest.mark.django_db', 'pytest.mark.django_db', ([], {'transaction': '(True)'}), '(transaction=True)\n', (385, 403), False, 'import pytest\n'), ((3772, 3811), 'pytest.mark.django_db', 'pytest.mark.django_db', ([], {'transaction': '(True)'}), '(transaction=True)\n', (3793, 3811), False, 'import pytest\n'), ((6974, 7013), 'pytest.mark.django_db', 'pytest.mark.django_db', ([], {'transaction': '(True)'}), '(transaction=True)\n', (6995, 7013), False, 'import pytest\n'), ((11330, 11369), 'pytest.mark.django_db', 'pytest.mark.django_db', ([], {'transaction': '(True)'}), '(transaction=True)\n', (11351, 11369), False, 'import pytest\n'), ((508, 540), 'cueSearch.elasticSearch.ESIndexingUtils.deleteAllIndex', 'ESIndexingUtils.deleteAllIndex', ([], {}), '()\n', (538, 540), False, 'from cueSearch.elasticSearch import ESIndexingUtils\n'), ((558, 591), 'mixer.backend.django.mixer.blend', 'mixer.blend', (['"""dataset.connection"""'], {}), "('dataset.connection')\n", (569, 591), False, 'from mixer.backend.django import mixer\n'), ((610, 849), 'mixer.backend.django.mixer.blend', 'mixer.blend', (['"""dataset.dataset"""'], {'name': '"""orders"""', 'id': '(1)', 'dimensions': '"""["Brand", "Color", "State"]"""', 'metrics': '"""["Orders", "OrderAmount", "OrderQuantity"]"""', 'granularity': '"""day"""', 'timestampColumn': '"""TestDate"""', 'sql': '"""Select * from testTable"""'}), '(\'dataset.dataset\', name=\'orders\', id=1, dimensions=\n \'["Brand", "Color", "State"]\', metrics=\n \'["Orders", "OrderAmount", "OrderQuantity"]\', granularity=\'day\',\n timestampColumn=\'TestDate\', sql=\'Select * from testTable\')\n', (621, 849), False, 'from mixer.backend.django import mixer\n'), ((1140, 1164), 'django.urls.reverse', 'reverse', (['"""createDataset"""'], {}), "('createDataset')\n", (1147, 1164), False, 'from django.urls import reverse\n'), ((1608, 1633), 'dataset.models.Dataset.objects.get', 'Dataset.objects.get', ([], {'id': '(1)'}), '(id=1)\n', (1627, 1633), False, 'from dataset.models import Dataset\n'), ((1670, 1702), 'django.urls.reverse', 'reverse', (['"""globalDimensionCreate"""'], {}), "('globalDimensionCreate')\n", (1677, 1702), False, 'from django.urls import reverse\n'), ((2122, 2167), 'cueSearch.services.GlobalDimensionServices.getGlobalDimensions', 'GlobalDimensionServices.getGlobalDimensions', ([], {}), '()\n', (2165, 2167), False, 'from cueSearch.services import GlobalDimensionServices\n'), ((2284, 2313), 'django.urls.reverse', 'reverse', (['"""pubGlobalDimension"""'], {}), "('pubGlobalDimension')\n", (2291, 2313), False, 'from django.urls import reverse\n'), ((2719, 2781), 'cueSearch.elasticSearch.ESIndexingUtils.indexGlobalDimensionsDataForSearchSuggestion', 'ESIndexingUtils.indexGlobalDimensionsDataForSearchSuggestion', ([], {}), '()\n', (2779, 2781), False, 'from cueSearch.elasticSearch import ESIndexingUtils\n'), ((2872, 2931), 'cueSearch.elasticSearch.utils.Utils.getDimensionalValuesForDimension', 'Utils.getDimensionalValuesForDimension', (['dataset.id', '"""Brand"""'], {}), "(dataset.id, 'Brand')\n", (2910, 2931), False, 'from cueSearch.elasticSearch.utils import Utils\n'), ((3029, 3103), 'cueSearch.elasticSearch.ESQueryingUtils.findGlobalDimensionResultsForSearchSuggestion', 'ESQueryingUtils.findGlobalDimensionResultsForSearchSuggestion', ([], {'query': 'query'}), '(query=query)\n', (3090, 3103), False, 'from cueSearch.elasticSearch import ESQueryingUtils\n'), ((3918, 3950), 'cueSearch.elasticSearch.ESIndexingUtils.deleteAllIndex', 'ESIndexingUtils.deleteAllIndex', ([], {}), '()\n', (3948, 3950), False, 'from cueSearch.elasticSearch import ESIndexingUtils\n'), ((3968, 4001), 'mixer.backend.django.mixer.blend', 'mixer.blend', (['"""dataset.connection"""'], {}), "('dataset.connection')\n", (3979, 4001), False, 'from mixer.backend.django import mixer\n'), ((4020, 4259), 'mixer.backend.django.mixer.blend', 'mixer.blend', (['"""dataset.dataset"""'], {'name': '"""orders"""', 'id': '(1)', 'dimensions': '"""["Brand", "Color", "State"]"""', 'metrics': '"""["Orders", "OrderAmount", "OrderQuantity"]"""', 'granularity': '"""day"""', 'timestampColumn': '"""TestDate"""', 'sql': '"""Select * from testTable"""'}), '(\'dataset.dataset\', name=\'orders\', id=1, dimensions=\n \'["Brand", "Color", "State"]\', metrics=\n \'["Orders", "OrderAmount", "OrderQuantity"]\', granularity=\'day\',\n timestampColumn=\'TestDate\', sql=\'Select * from testTable\')\n', (4031, 4259), False, 'from mixer.backend.django import mixer\n'), ((4550, 4574), 'django.urls.reverse', 'reverse', (['"""createDataset"""'], {}), "('createDataset')\n", (4557, 4574), False, 'from django.urls import reverse\n'), ((5018, 5043), 'dataset.models.Dataset.objects.get', 'Dataset.objects.get', ([], {'id': '(1)'}), '(id=1)\n', (5037, 5043), False, 'from dataset.models import Dataset\n'), ((5080, 5112), 'django.urls.reverse', 'reverse', (['"""globalDimensionCreate"""'], {}), "('globalDimensionCreate')\n", (5087, 5112), False, 'from django.urls import reverse\n'), ((5532, 5577), 'cueSearch.services.GlobalDimensionServices.getGlobalDimensions', 'GlobalDimensionServices.getGlobalDimensions', ([], {}), '()\n', (5575, 5577), False, 'from cueSearch.services import GlobalDimensionServices\n'), ((5694, 5723), 'django.urls.reverse', 'reverse', (['"""pubGlobalDimension"""'], {}), "('pubGlobalDimension')\n", (5701, 5723), False, 'from django.urls import reverse\n'), ((6206, 6249), 'cueSearch.elasticSearch.ESIndexingUtils.indexGlobalDimensionsData', 'ESIndexingUtils.indexGlobalDimensionsData', ([], {}), '()\n', (6247, 6249), False, 'from cueSearch.elasticSearch import ESIndexingUtils\n'), ((6310, 6365), 'cueSearch.elasticSearch.ESQueryingUtils.findGlobalDimensionResults', 'ESQueryingUtils.findGlobalDimensionResults', ([], {'query': 'query'}), '(query=query)\n', (6352, 6365), False, 'from cueSearch.elasticSearch import ESQueryingUtils\n'), ((7123, 7155), 'cueSearch.elasticSearch.ESIndexingUtils.deleteAllIndex', 'ESIndexingUtils.deleteAllIndex', ([], {}), '()\n', (7153, 7155), False, 'from cueSearch.elasticSearch import ESIndexingUtils\n'), ((7174, 7207), 'mixer.backend.django.mixer.blend', 'mixer.blend', (['"""dataset.connection"""'], {}), "('dataset.connection')\n", (7185, 7207), False, 'from mixer.backend.django import mixer\n'), ((7226, 7465), 'mixer.backend.django.mixer.blend', 'mixer.blend', (['"""dataset.dataset"""'], {'name': '"""orders"""', 'id': '(1)', 'dimensions': '"""["Brand", "Color", "State"]"""', 'metrics': '"""["Orders", "OrderAmount", "OrderQuantity"]"""', 'granularity': '"""day"""', 'timestampColumn': '"""TestDate"""', 'sql': '"""Select * from testTable"""'}), '(\'dataset.dataset\', name=\'orders\', id=1, dimensions=\n \'["Brand", "Color", "State"]\', metrics=\n \'["Orders", "OrderAmount", "OrderQuantity"]\', granularity=\'day\',\n timestampColumn=\'TestDate\', sql=\'Select * from testTable\')\n', (7237, 7465), False, 'from mixer.backend.django import mixer\n'), ((7756, 7780), 'django.urls.reverse', 'reverse', (['"""createDataset"""'], {}), "('createDataset')\n", (7763, 7780), False, 'from django.urls import reverse\n'), ((8224, 8249), 'dataset.models.Dataset.objects.get', 'Dataset.objects.get', ([], {'id': '(1)'}), '(id=1)\n', (8243, 8249), False, 'from dataset.models import Dataset\n'), ((8286, 8318), 'django.urls.reverse', 'reverse', (['"""globalDimensionCreate"""'], {}), "('globalDimensionCreate')\n", (8293, 8318), False, 'from django.urls import reverse\n'), ((8738, 8783), 'cueSearch.services.GlobalDimensionServices.getGlobalDimensions', 'GlobalDimensionServices.getGlobalDimensions', ([], {}), '()\n', (8781, 8783), False, 'from cueSearch.services import GlobalDimensionServices\n'), ((8900, 8929), 'django.urls.reverse', 'reverse', (['"""pubGlobalDimension"""'], {}), "('pubGlobalDimension')\n", (8907, 8929), False, 'from django.urls import reverse\n'), ((9597, 9662), 'cueSearch.elasticSearch.ESIndexingUtils.indexNonGlobalDimensionsDataForSearchSuggestion', 'ESIndexingUtils.indexNonGlobalDimensionsDataForSearchSuggestion', ([], {}), '()\n', (9660, 9662), False, 'from cueSearch.elasticSearch import ESIndexingUtils\n'), ((9724, 9801), 'cueSearch.elasticSearch.ESQueryingUtils.findNonGlobalDimensionResultsForSearchSuggestion', 'ESQueryingUtils.findNonGlobalDimensionResultsForSearchSuggestion', ([], {'query': 'query'}), '(query=query)\n', (9788, 9801), False, 'from cueSearch.elasticSearch import ESQueryingUtils\n'), ((11490, 11522), 'cueSearch.elasticSearch.ESIndexingUtils.deleteAllIndex', 'ESIndexingUtils.deleteAllIndex', ([], {}), '()\n', (11520, 11522), False, 'from cueSearch.elasticSearch import ESIndexingUtils\n'), ((11541, 11574), 'mixer.backend.django.mixer.blend', 'mixer.blend', (['"""dataset.connection"""'], {}), "('dataset.connection')\n", (11552, 11574), False, 'from mixer.backend.django import mixer\n'), ((11593, 11832), 'mixer.backend.django.mixer.blend', 'mixer.blend', (['"""dataset.dataset"""'], {'name': '"""orders"""', 'id': '(1)', 'dimensions': '"""["Brand", "Color", "State"]"""', 'metrics': '"""["Orders", "OrderAmount", "OrderQuantity"]"""', 'granularity': '"""day"""', 'timestampColumn': '"""TestDate"""', 'sql': '"""Select * from testTable"""'}), '(\'dataset.dataset\', name=\'orders\', id=1, dimensions=\n \'["Brand", "Color", "State"]\', metrics=\n \'["Orders", "OrderAmount", "OrderQuantity"]\', granularity=\'day\',\n timestampColumn=\'TestDate\', sql=\'Select * from testTable\')\n', (11604, 11832), False, 'from mixer.backend.django import mixer\n'), ((12123, 12147), 'django.urls.reverse', 'reverse', (['"""createDataset"""'], {}), "('createDataset')\n", (12130, 12147), False, 'from django.urls import reverse\n'), ((12591, 12616), 'dataset.models.Dataset.objects.get', 'Dataset.objects.get', ([], {'id': '(1)'}), '(id=1)\n', (12610, 12616), False, 'from dataset.models import Dataset\n'), ((12653, 12685), 'django.urls.reverse', 'reverse', (['"""globalDimensionCreate"""'], {}), "('globalDimensionCreate')\n", (12660, 12685), False, 'from django.urls import reverse\n'), ((13105, 13150), 'cueSearch.services.GlobalDimensionServices.getGlobalDimensions', 'GlobalDimensionServices.getGlobalDimensions', ([], {}), '()\n', (13148, 13150), False, 'from cueSearch.services import GlobalDimensionServices\n'), ((13267, 13296), 'django.urls.reverse', 'reverse', (['"""pubGlobalDimension"""'], {}), "('pubGlobalDimension')\n", (13274, 13296), False, 'from django.urls import reverse\n'), ((13879, 13944), 'cueSearch.elasticSearch.ESIndexingUtils.indexNonGlobalDimensionsDataForSearchSuggestion', 'ESIndexingUtils.indexNonGlobalDimensionsDataForSearchSuggestion', ([], {}), '()\n', (13942, 13944), False, 'from cueSearch.elasticSearch import ESIndexingUtils\n'), ((14776, 14853), 'cueSearch.elasticSearch.ESQueryingUtils.findNonGlobalDimensionResultsForSearchSuggestion', 'ESQueryingUtils.findNonGlobalDimensionResultsForSearchSuggestion', ([], {'query': 'query'}), '(query=query)\n', (14840, 14853), False, 'from cueSearch.elasticSearch import ESQueryingUtils\n'), ((20017, 20055), 'cueSearch.elasticSearch.ESIndexingUtils.runAllIndexDimension', 'ESIndexingUtils.runAllIndexDimension', ([], {}), '()\n', (20053, 20055), False, 'from cueSearch.elasticSearch import ESIndexingUtils\n'), ((3177, 3251), 'cueSearch.elasticSearch.ESQueryingUtils.findGlobalDimensionResultsForSearchSuggestion', 'ESQueryingUtils.findGlobalDimensionResultsForSearchSuggestion', ([], {'query': 'query'}), '(query=query)\n', (3238, 3251), False, 'from cueSearch.elasticSearch import ESQueryingUtils\n'), ((6438, 6493), 'cueSearch.elasticSearch.ESQueryingUtils.findGlobalDimensionResults', 'ESQueryingUtils.findGlobalDimensionResults', ([], {'query': 'query'}), '(query=query)\n', (6480, 6493), False, 'from cueSearch.elasticSearch import ESQueryingUtils\n'), ((9888, 9965), 'cueSearch.elasticSearch.ESQueryingUtils.findNonGlobalDimensionResultsForSearchSuggestion', 'ESQueryingUtils.findNonGlobalDimensionResultsForSearchSuggestion', ([], {'query': 'query'}), '(query=query)\n', (9952, 9965), False, 'from cueSearch.elasticSearch import ESQueryingUtils\n'), ((14959, 15036), 'cueSearch.elasticSearch.ESQueryingUtils.findNonGlobalDimensionResultsForSearchSuggestion', 'ESQueryingUtils.findNonGlobalDimensionResultsForSearchSuggestion', ([], {'query': 'query'}), '(query=query)\n', (15023, 15036), False, 'from cueSearch.elasticSearch import ESQueryingUtils\n'), ((1048, 1096), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {'autospec': '(True)', 'return_value': '(True)'}), '(autospec=True, return_value=True)\n', (1062, 1096), False, 'from unittest import mock, result\n'), ((2660, 2707), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {'autospec': '(True)', 'return_value': 'res'}), '(autospec=True, return_value=res)\n', (2674, 2707), False, 'from unittest import mock, result\n'), ((4458, 4506), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {'autospec': '(True)', 'return_value': '(True)'}), '(autospec=True, return_value=True)\n', (4472, 4506), False, 'from unittest import mock, result\n'), ((6147, 6194), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {'autospec': '(True)', 'return_value': 'res'}), '(autospec=True, return_value=res)\n', (6161, 6194), False, 'from unittest import mock, result\n'), ((7664, 7712), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {'autospec': '(True)', 'return_value': '(True)'}), '(autospec=True, return_value=True)\n', (7678, 7712), False, 'from unittest import mock, result\n'), ((9513, 9560), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {'autospec': '(True)', 'return_value': 'res'}), '(autospec=True, return_value=res)\n', (9527, 9560), False, 'from unittest import mock, result\n'), ((12031, 12079), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {'autospec': '(True)', 'return_value': '(True)'}), '(autospec=True, return_value=True)\n', (12045, 12079), False, 'from unittest import mock, result\n'), ((13795, 13842), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {'autospec': '(True)', 'return_value': 'res'}), '(autospec=True, return_value=res)\n', (13809, 13842), False, 'from unittest import mock, result\n'), ((19451, 19499), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {'autospec': '(True)', 'return_value': '(True)'}), '(autospec=True, return_value=True)\n', (19465, 19499), False, 'from unittest import mock, result\n'), ((19679, 19727), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {'autospec': '(True)', 'return_value': '(True)'}), '(autospec=True, return_value=True)\n', (19693, 19727), False, 'from unittest import mock, result\n'), ((19930, 19978), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {'autospec': '(True)', 'return_value': '(True)'}), '(autospec=True, return_value=True)\n', (19944, 19978), False, 'from unittest import mock, result\n')] |
import numpy as np
def load_mesh():
import csv
f = open('tet.csv')
reader = csv.reader(f, delimiter=' ')
_eind = []
for line in reader:
_eind += [int(x) for x in line]
_nv = max(_eind) + 1
_ne = len(_eind) // 4
return (
_nv, _ne,
np.asarray(np.arange(0, len(_eind) + 4, 4), dtype='int32'),
np.asarray(_eind, dtype='int32')
)
| [
"numpy.asarray",
"csv.reader"
] | [((90, 118), 'csv.reader', 'csv.reader', (['f'], {'delimiter': '""" """'}), "(f, delimiter=' ')\n", (100, 118), False, 'import csv\n'), ((356, 388), 'numpy.asarray', 'np.asarray', (['_eind'], {'dtype': '"""int32"""'}), "(_eind, dtype='int32')\n", (366, 388), True, 'import numpy as np\n')] |
import numpy as np
class Car:
"""
The car object has the following attributes:
1. Track - the associated track
2. current position: Current position on the track. Floating point coordinate.
3. integer_position_: defined as current_position.astype('int16')
4. Bool collided_on_last
5. sensors: LIDAR type function
6. init_speed_
7. max_speed
8. sampling frequency: The frequency of play.
9. speed: Current speed
10. car_angle: current car angle as define below.
"""
def __init__(self, track, max_speed, sampling_frequency):
"""
track: The track as a track object
max_speed: Max attainable speed
sampling_frequency:
car angle is defined as the angle of the (unimplemented) velocity vector from
the standard mathematical X axis, i.e.,
y ^
|
|
|
-------> x
Positive is ACW.
"""
self.track=track
self.last_checkpoint=self.track.spawn_at[np.random.choice(range(len(self.track.spawn_at)))]
self.next_checkpoint=self.track.next_checkpoint(self.last_checkpoint)
self.time_in_current_sector=0
# self.possible_position=spawn_at
self.current_position=self.last_checkpoint
self.integer_position_=self.current_position.astype('int16')
self.collided_on_last=None
self.init_speed__=0
self.speed=self.init_speed__
self.max_speed=max_speed
self.sampling_frequency=sampling_frequency
self.car_angle=np.random.uniform(0, 1)*2*np.pi
self.sensors=np.zeros((8,))
self.load_sensors()
return
def duplicate(self):
"""
Returns: A car object, completely mimicing the current car object in every way.
"""
car=Car(self.track, self.max_speed)
car.last_checkpoint=np.zeros((2, ))+self.last_checkpoint
car.next_checkpoint=car.track.next_checkpoint(car.last_checkpoint)
car.time_in_current_sector=self.time_in_current_sector
car.current_position=car.current_position*0+self.current_position
car.integer_position_=car.current_position.astype('int16')
car.collided_on_last=self.collided_on_last
car.speed=self.speed
car.car_angle=self.car_angle
car.sampling_frequency=self.sampling_frequency
car.load_sensors()
return car
def re_spawn(self):
"""
Re-spawn the current car at a legal position. Consult spawn_at attribute of associated track.
Set all car state attributes.
"""
self.speed=self.init_speed__
self.last_checkpoint=self.track.spawn_at[np.random.choice(range(len(self.track.spawn_at)))]
self.next_checkpoint=self.track.next_checkpoint(self.last_checkpoint)
self.time_in_current_sector=0
self.current_position=self.last_checkpoint
self.integer_position_=self.current_position.astype('int16')
self.collided_on_last=None
self.load_sensors()
self.speed=self.init_speed__
self.car_angle=np.random.uniform(0, 1)*2*np.pi
return
def execute_forces(self, f1, f2, max_magnitudes=50):
"""
Execute the forces.
Update car state attributes:
speed
car_angle
collided_on_last
current_position, integer_position
sensors
f1 is the force in the vertical direction
f2 is the force in the horizrontal direction
^
|
| this is f1
------> this is f2
f1 is expected between -1, 1
f2 is expected between -1, 1
"""
f1=max_magnitudes*f1
f2=max_magnitudes*f2
if self.speed==0:
if f1==0 or f2==0:
if f1==0 and f2==0:
self.time_in_current_sector=self.time_in_current_sector+1.0/self.sampling_frequency
return
elif f1!=0 and f2==0:
self.car_angle=np.pi/2
if f1<0:
self.car_angle=3*np.pi/2
else:
self.car_angle=0
if f2<0:
self.car_angle=self.car_angle+np.pi
else:
abs_angle=np.arctan(abs(f1/f2))
if f1>0 and f2>0:
self.car_angle=abs_angle
elif f1>0 and f2<0:
self.car_angle=np.pi-abs_angle
elif f1<0 and f2<0:
self.car_angle=np.pi+abs_angle
else:
self.car_angle=2*np.pi-abs_angle
self.speed=min(
abs(self.speed+(f2*np.cos(self.car_angle)+f1*np.sin(self.car_angle))*(1.0/self.sampling_frequency)),
self.max_speed
)
delta_angle=0
if self.speed!=0:
delta_angle=np.arctan( (f1*np.cos(self.car_angle)-f2*np.sin(self.car_angle))
/
(self.speed*self.sampling_frequency)
)
self.car_angle=np.mod(self.car_angle+delta_angle, 2*np.pi)
movement=np.asarray([
-1*self.speed*np.sin(self.car_angle)*1.0/self.sampling_frequency,
self.speed*np.cos(self.car_angle)*1.0/self.sampling_frequency])
if max(abs(movement))==0:
print("Zero Movement recorded. Speed is: ", self.speed, " sampling_frequency is: ", self.sampling_frequency)
old_position=np.zeros((2,))+self.current_position
old_int_position=np.zeros((2,))+self.integer_position_
self.current_position=self.current_position+movement
self.integer_position_=self.current_position.astype('int16')
cond=(
np.min(self.current_position)<0 or
(self.current_position>=self.track.track.shape).any() or
self.track.track[self.integer_position_[0], self.integer_position_[1]]!=1
)
if cond:
for distance in range(0, int(np.ceil(self.max_speed*self.sampling_frequency))+1):
movement=np.asarray([-distance*np.sin(self.car_angle), distance*np.cos(self.car_angle)])
temp_pos=old_int_position+movement
temp_pos_int=temp_pos.astype('int16')
if min(temp_pos_int)>=0 and (temp_pos_int<self.track.track.shape).all():
if self.track.track[temp_pos_int[0], temp_pos_int[1]]==1:
self.current_position=self.current_position*0+temp_pos
self.integer_position_=self.current_position.astype('int16')
else:
break
else:
break
self.speed=0
self.collided_on_last=True
else:
self.collided_on_last=False
self.load_sensors()
## ------------- timing calculation---------
dx, dy=self.current_position[0]-self.next_checkpoint[0], self.current_position[1]-self.next_checkpoint[1]
cnd=(
np.sqrt(dx**2+dy**2)<self.track.min_checkpoint_distance and
(old_position[1]<self.next_checkpoint[1] and self.current_position[1]>=self.next_checkpoint[1]
or
old_position[1]>self.next_checkpoint[1] and self.current_position[1]<=self.next_checkpoint[1]
)
)
if cnd:
self.last_checkpoint=self.next_checkpoint
self.next_checkpoint=self.track.next_checkpoint(self.last_checkpoint)
self.time_in_current_sector=0
else:
self.time_in_current_sector=self.time_in_current_sector+1.0/self.sampling_frequency
return
def load_sensors(self):
"""
sensors will be at
0, 30, 60, 90, -30
-60, -90, 180 (directly backward)
degrees
"""
angles=[0, np.pi/6, np.pi/3, np.pi/2, -np.pi/6, -np.pi/3, -np.pi/2, np.pi]
temp_data=np.zeros((8, ))
for angle_index in range(len(angles)):
cur_angle=np.mod(self.car_angle+angles[angle_index], 2*np.pi)
for distance in range(1, 101):
r, c=int(self.integer_position_[0]-distance*np.sin(cur_angle)), int(self.integer_position_[1]+distance*np.cos(cur_angle))
if min(r, c)<0 or r>=self.track.track.shape[0] or c>=self.track.track.shape[1]:
temp_data[angle_index]=distance-1
break
if self.track.track[r, c]==0:
temp_data[angle_index]=distance-1
break
if distance==100:
temp_data[angle_index]=distance
self.sensors[0]=temp_data[7]
self.sensors[4]=temp_data[0]
self.sensors[5]=temp_data[1]
self.sensors[6]=temp_data[2]
self.sensors[7]=temp_data[3]
self.sensors[1]=temp_data[6]
self.sensors[2]=temp_data[5]
self.sensors[3]=temp_data[4]
return
| [
"numpy.ceil",
"numpy.sqrt",
"numpy.zeros",
"numpy.random.uniform",
"numpy.cos",
"numpy.min",
"numpy.sin",
"numpy.mod"
] | [((1665, 1679), 'numpy.zeros', 'np.zeros', (['(8,)'], {}), '((8,))\n', (1673, 1679), True, 'import numpy as np\n'), ((5274, 5321), 'numpy.mod', 'np.mod', (['(self.car_angle + delta_angle)', '(2 * np.pi)'], {}), '(self.car_angle + delta_angle, 2 * np.pi)\n', (5280, 5321), True, 'import numpy as np\n'), ((8152, 8166), 'numpy.zeros', 'np.zeros', (['(8,)'], {}), '((8,))\n', (8160, 8166), True, 'import numpy as np\n'), ((1941, 1955), 'numpy.zeros', 'np.zeros', (['(2,)'], {}), '((2,))\n', (1949, 1955), True, 'import numpy as np\n'), ((5681, 5695), 'numpy.zeros', 'np.zeros', (['(2,)'], {}), '((2,))\n', (5689, 5695), True, 'import numpy as np\n'), ((5743, 5757), 'numpy.zeros', 'np.zeros', (['(2,)'], {}), '((2,))\n', (5751, 5757), True, 'import numpy as np\n'), ((8237, 8292), 'numpy.mod', 'np.mod', (['(self.car_angle + angles[angle_index])', '(2 * np.pi)'], {}), '(self.car_angle + angles[angle_index], 2 * np.pi)\n', (8243, 8292), True, 'import numpy as np\n'), ((1603, 1626), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(1)'], {}), '(0, 1)\n', (1620, 1626), True, 'import numpy as np\n'), ((3212, 3235), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(1)'], {}), '(0, 1)\n', (3229, 3235), True, 'import numpy as np\n'), ((5938, 5967), 'numpy.min', 'np.min', (['self.current_position'], {}), '(self.current_position)\n', (5944, 5967), True, 'import numpy as np\n'), ((7234, 7260), 'numpy.sqrt', 'np.sqrt', (['(dx ** 2 + dy ** 2)'], {}), '(dx ** 2 + dy ** 2)\n', (7241, 7260), True, 'import numpy as np\n'), ((6196, 6245), 'numpy.ceil', 'np.ceil', (['(self.max_speed * self.sampling_frequency)'], {}), '(self.max_speed * self.sampling_frequency)\n', (6203, 6245), True, 'import numpy as np\n'), ((5112, 5134), 'numpy.cos', 'np.cos', (['self.car_angle'], {}), '(self.car_angle)\n', (5118, 5134), True, 'import numpy as np\n'), ((5138, 5160), 'numpy.sin', 'np.sin', (['self.car_angle'], {}), '(self.car_angle)\n', (5144, 5160), True, 'import numpy as np\n'), ((5375, 5397), 'numpy.sin', 'np.sin', (['self.car_angle'], {}), '(self.car_angle)\n', (5381, 5397), True, 'import numpy as np\n'), ((5451, 5473), 'numpy.cos', 'np.cos', (['self.car_angle'], {}), '(self.car_angle)\n', (5457, 5473), True, 'import numpy as np\n'), ((6296, 6318), 'numpy.sin', 'np.sin', (['self.car_angle'], {}), '(self.car_angle)\n', (6302, 6318), True, 'import numpy as np\n'), ((6329, 6351), 'numpy.cos', 'np.cos', (['self.car_angle'], {}), '(self.car_angle)\n', (6335, 6351), True, 'import numpy as np\n'), ((4897, 4919), 'numpy.cos', 'np.cos', (['self.car_angle'], {}), '(self.car_angle)\n', (4903, 4919), True, 'import numpy as np\n'), ((4923, 4945), 'numpy.sin', 'np.sin', (['self.car_angle'], {}), '(self.car_angle)\n', (4929, 4945), True, 'import numpy as np\n'), ((8392, 8409), 'numpy.sin', 'np.sin', (['cur_angle'], {}), '(cur_angle)\n', (8398, 8409), True, 'import numpy as np\n'), ((8451, 8468), 'numpy.cos', 'np.cos', (['cur_angle'], {}), '(cur_angle)\n', (8457, 8468), True, 'import numpy as np\n')] |
################# LocoDrone Python Control ##################
# Written by: <NAME>
# Filename: locodrone.py
##############################################################
# Import LocoRobo Libraries For Communication with Robot
from errors import DroneBindException, \
SerialClosedException, \
DroneUnbindException, \
DroneReconnectException, \
SerialMessageException, DroneInitializationException
from control_codes import OutgoingMessageType, \
IncomingMessageType, InitializationFailureType, OperatingModeType, ControlSourceType
import math
import serial
import time
DEBUG = True
# Accelerometer tilt range, to be mapped to PWM range
MAX_TILT_ANGLE = 60
# Time in seconds for the fly up and land methods
FLY_UP_TIME = 12
LAND_TIME = 7
# Pre-set control values for programmable flight routines
FLY_UP_THROTTLE = 215
LAND_THROTTLE = 127
DRIVE_THROTTLE = 210
MOVE_FORWARD_VALUE = 150
MOVE_BACKWARD_VALUE = 100
TURN_RIGHT_VALUE = 150
TURN_LEFT_VALUE = 100
class LocoDrone:
def __init__(self, port, baudrate):
self.port = port
self.baudrate = baudrate
self.serial = None
self.bound = False
self.throttle = 0
self.yaw = 127
self.pitch = 127
self.roll = 127
self.yawTrim = 64
self.pitchTrim = 64
self.rollTrim = 64
self.flyDrive = 0 # 0 = Fly, 15 = Drive
def open_serial(self):
self.serial = serial.Serial(self.port, self.baudrate, timeout=10)
time.sleep(5)
print("Serial Port Opened!")
print('Checking Communication...')
self.ping()
print('Communication Ok')
def bind(self):
self.verify_serial()
print("Binding to drone...")
self.serial.write([OutgoingMessageType.Bind, 0])
status = self.read_response_status(IncomingMessageType.BindSuccess,
IncomingMessageType.BindFailure)
if status:
self.bound = True
time.sleep(2)
print("Bound!")
self.set_throttle(0)
else:
raise DroneBindException('Drone failed to bind')
def unbind(self):
print("Unbinding from drone...")
self.serial.write([OutgoingMessageType.Unbind, 0])
status = self.read_response_status(IncomingMessageType.UnbindSuccess,
IncomingMessageType.UnbindFailure)
if status:
self.bound = False
else:
raise DroneUnbindException('Drone failed to unbind')
def reconnect(self):
print("Re-Connecting with drone...")
self.serial.write([OutgoingMessageType.Reconnect, 0])
status = self.read_response_status(IncomingMessageType.ReconnectSuccess ,IncomingMessageType.ReconnectFailure)
if status:
self.bound = True
print("Drone Reconnected!")
else:
raise DroneReconnectException('Drone failed to reconnect')
def fly_mode(self):
print("Selecting Fly Mode.")
self.serial.write([OutgoingMessageType.OperatingMode, 1, OperatingModeType.Fly])
self.flyDrive = 0
def drive_mode(self):
print("Selecting Drive Mode.")
self.serial.write([OutgoingMessageType.OperatingMode, 1, OperatingModeType.Drive])
self.flyDrive = 15
def joystick_control(self):
print("Selecting Joystick Control.")
self.serial.write([OutgoingMessageType.ControlSource, 1, ControlSourceType.Joystick])
def accelerometer_control(self):
print("Selecting Accelerometer Control.")
self.serial.write([OutgoingMessageType.ControlSource, 1, ControlSourceType.Accelerometer])
def serial_control(self):
print("Selecting Serial Control.")
self.serial.write([OutgoingMessageType.ControlSource, 1, ControlSourceType.Serial])
def get_throttle(self):
return self.throttle
def get_yaw(self):
return self.yaw
def get_roll(self):
return self.roll
def get_pitch(self):
return self.pitch
def get_yaw_trim(self):
return self.yawTrim
def get_pitch_trim(self):
return self.pitchTrim
def get_roll_trim(self):
return self.rollTrim
def set_throttle(self, value):
self.throttle = max(0, min(255, value))
def set_yaw(self, value):
self.yaw = max(0, min(255, value))
def set_pitch(self, value):
self.pitch = max(0, min(255, value))
def set_roll(self, value):
self.roll = max(0, min(255, value))
def set_yaw_trim(self, value):
self.yawTrim = max(0, min(128, value))
def set_pitch_trim(self, value):
self.pitchTrim = max(0, min(128, value))
def set_roll_trim(self, value):
self.rollTrim = max(0, min(128, value))
def reset_payload(self):
self.set_throttle(0)
self.set_yaw(127)
self.set_pitch(127)
self.set_roll(127)
def update_payload(self):
payload = [OutgoingMessageType.ControlPayload,
8,
self.throttle,
self.yaw,
self.yawTrim,
self.pitch,
self.roll,
self.pitchTrim,
self.rollTrim,
self.flyDrive]
self.serial.write(payload)
if DEBUG:
print(payload)
def ping(self):
self.serial.write([OutgoingMessageType.Echo, 0])
self.read_response_status(IncomingMessageType.Ping,
None)
def request(self, data_request_code):
self.serial.write([data_request_code, 0])
def read_response_status(self, success_code, failure_code):
control = self.read_byte()
length = self.read_byte()
msg = None
if length > 0:
msg = self.serial.read(length)
if control == success_code:
return True
if control == failure_code:
return False
if control == IncomingMessageType.InitializationFailure:
self.handle_initialization_failure(msg)
if control == IncomingMessageType.Debug:
print('Incoming Debug Message', msg.decode('utf-8'))
return self.read_response_status(success_code, failure_code)
else:
raise SerialMessageException('Unexpected serial message {}'.format(control))
def handle_initialization_failure(self, message):
if len(message) == 0:
if message[0] == InitializationFailureType.AccelerometerFailure:
raise DroneInitializationException('Accelerometer Failed to Initialize')
if message[0] == InitializationFailureType.JoystickFailure:
raise DroneInitializationException('Joysticks Failed to Initialize')
raise DroneInitializationException('Unknown Failed to Initialize')
def read_byte(self):
b = self.serial.read()
#print(b)
return list(b)[0]
def verify_serial(self):
pass
# if self.serial is None or not self.serial.is_open:
# raise SerialClosedException('Serial is not open')
# Time-based programmable fly functions
def fly_up(self):
timeout = time.time() + FLY_UP_TIME
while time.time() < timeout:
self.set_throttle(FLY_UP_THROTTLE)
self.set_pitch(127)
self.set_roll(127)
self.set_yaw(127)
self.update_payload()
time.sleep(0.1)
def land(self):
timeout = time.time() + LAND_TIME
while time.time() < timeout:
self.set_throttle(LAND_THROTTLE)
self.set_pitch(127)
self.set_roll(127)
self.set_yaw(127)
self.update_payload()
time.sleep(0.1)
def move_forward(self, seconds):
timeout = time.time() + seconds
while time.time() < timeout:
self.set_throttle(DRIVE_THROTTLE)
self.set_pitch(MOVE_FORWARD_VALUE)
self.set_roll(127)
self.set_yaw(127)
self.update_payload()
time.sleep(0.1)
def move_backward(self, seconds):
timeout = time.time() + seconds
while time.time() < timeout:
self.set_throttle(MOVE_BACKWARD_VALUE)
self.set_pitch(127)
self.set_roll(127)
self.set_yaw(127)
self.update_payload()
time.sleep(0.1)
def turn_left(self, seconds):
timeout = time.time() + seconds
while time.time() < timeout:
self.set_throttle(DRIVE_THROTTLE)
self.set_pitch(127)
self.set_roll(127)
self.set_yaw(TURN_LEFT_VALUE)
self.update_payload()
time.sleep(0.1)
def turn_right(self, seconds):
timeout = time.time() + seconds
while time.time() < timeout:
self.set_throttle(DRIVE_THROTTLE)
self.set_pitch(127)
self.set_roll(127)
self.set_yaw(TURN_RIGHT_VALUE)
self.update_payload()
time.sleep(0.1)
| [
"errors.DroneBindException",
"time.sleep",
"errors.DroneInitializationException",
"serial.Serial",
"time.time",
"errors.DroneUnbindException",
"errors.DroneReconnectException"
] | [((1518, 1569), 'serial.Serial', 'serial.Serial', (['self.port', 'self.baudrate'], {'timeout': '(10)'}), '(self.port, self.baudrate, timeout=10)\n', (1531, 1569), False, 'import serial\n'), ((1579, 1592), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (1589, 1592), False, 'import time\n'), ((6987, 7047), 'errors.DroneInitializationException', 'DroneInitializationException', (['"""Unknown Failed to Initialize"""'], {}), "('Unknown Failed to Initialize')\n", (7015, 7047), False, 'from errors import DroneBindException, SerialClosedException, DroneUnbindException, DroneReconnectException, SerialMessageException, DroneInitializationException\n'), ((2089, 2102), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (2099, 2102), False, 'import time\n'), ((2196, 2238), 'errors.DroneBindException', 'DroneBindException', (['"""Drone failed to bind"""'], {}), "('Drone failed to bind')\n", (2214, 2238), False, 'from errors import DroneBindException, SerialClosedException, DroneUnbindException, DroneReconnectException, SerialMessageException, DroneInitializationException\n'), ((2603, 2649), 'errors.DroneUnbindException', 'DroneUnbindException', (['"""Drone failed to unbind"""'], {}), "('Drone failed to unbind')\n", (2623, 2649), False, 'from errors import DroneBindException, SerialClosedException, DroneUnbindException, DroneReconnectException, SerialMessageException, DroneInitializationException\n'), ((3053, 3105), 'errors.DroneReconnectException', 'DroneReconnectException', (['"""Drone failed to reconnect"""'], {}), "('Drone failed to reconnect')\n", (3076, 3105), False, 'from errors import DroneBindException, SerialClosedException, DroneUnbindException, DroneReconnectException, SerialMessageException, DroneInitializationException\n'), ((7403, 7414), 'time.time', 'time.time', ([], {}), '()\n', (7412, 7414), False, 'import time\n'), ((7443, 7454), 'time.time', 'time.time', ([], {}), '()\n', (7452, 7454), False, 'import time\n'), ((7652, 7667), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (7662, 7667), False, 'import time\n'), ((7707, 7718), 'time.time', 'time.time', ([], {}), '()\n', (7716, 7718), False, 'import time\n'), ((7745, 7756), 'time.time', 'time.time', ([], {}), '()\n', (7754, 7756), False, 'import time\n'), ((7952, 7967), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (7962, 7967), False, 'import time\n'), ((8024, 8035), 'time.time', 'time.time', ([], {}), '()\n', (8033, 8035), False, 'import time\n'), ((8060, 8071), 'time.time', 'time.time', ([], {}), '()\n', (8069, 8071), False, 'import time\n'), ((8283, 8298), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (8293, 8298), False, 'import time\n'), ((8357, 8368), 'time.time', 'time.time', ([], {}), '()\n', (8366, 8368), False, 'import time\n'), ((8393, 8404), 'time.time', 'time.time', ([], {}), '()\n', (8402, 8404), False, 'import time\n'), ((8606, 8621), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (8616, 8621), False, 'import time\n'), ((8676, 8687), 'time.time', 'time.time', ([], {}), '()\n', (8685, 8687), False, 'import time\n'), ((8712, 8723), 'time.time', 'time.time', ([], {}), '()\n', (8721, 8723), False, 'import time\n'), ((8932, 8947), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (8942, 8947), False, 'import time\n'), ((9003, 9014), 'time.time', 'time.time', ([], {}), '()\n', (9012, 9014), False, 'import time\n'), ((9039, 9050), 'time.time', 'time.time', ([], {}), '()\n', (9048, 9050), False, 'import time\n'), ((9260, 9275), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (9270, 9275), False, 'import time\n'), ((6748, 6814), 'errors.DroneInitializationException', 'DroneInitializationException', (['"""Accelerometer Failed to Initialize"""'], {}), "('Accelerometer Failed to Initialize')\n", (6776, 6814), False, 'from errors import DroneBindException, SerialClosedException, DroneUnbindException, DroneReconnectException, SerialMessageException, DroneInitializationException\n'), ((6909, 6971), 'errors.DroneInitializationException', 'DroneInitializationException', (['"""Joysticks Failed to Initialize"""'], {}), "('Joysticks Failed to Initialize')\n", (6937, 6971), False, 'from errors import DroneBindException, SerialClosedException, DroneUnbindException, DroneReconnectException, SerialMessageException, DroneInitializationException\n')] |
"""
Custom integration to integrate PortHuTV with Home Assistant.
For more details about this integration, please refer to
https://github.com/vorostamas/portHuTV-homeassistant
"""
import asyncio
import logging
from datetime import timedelta
from datetime import datetime
from dateutil import tz
import pytz
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import Config, HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from custom_components.porthutv.const import (
CONF_TV_CHANNEL_ID,
CONF_TV_CHANNEL_NAME,
DOMAIN,
PLATFORMS,
STARTUP_MESSAGE,
CONF_TIME_ZONE,
)
from custom_components.porthutv.schedules import get_schedules, get_attributes
SCAN_INTERVAL = timedelta(minutes=5)
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass: HomeAssistant, config: Config):
"""Set up this integration using YAML is not supported."""
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up this integration using UI."""
if hass.data.get(DOMAIN) is None:
hass.data.setdefault(DOMAIN, {})
_LOGGER.info(STARTUP_MESSAGE)
channel_id = entry.data.get(CONF_TV_CHANNEL_ID)
channel_name = entry.data.get(CONF_TV_CHANNEL_NAME)
coordinator = PortHuTvDataUpdateCoordinator(
hass, channel_id=channel_id, channel_name=channel_name
)
await coordinator.async_refresh()
if not coordinator.last_update_success:
raise ConfigEntryNotReady
hass.data[DOMAIN][entry.entry_id] = coordinator
for platform in PLATFORMS:
if entry.options.get(platform, True):
coordinator.platforms.append(platform)
hass.async_add_job(
hass.config_entries.async_forward_entry_setup(entry, platform)
)
entry.add_update_listener(async_reload_entry)
return True
class PortHuTvDataUpdateCoordinator(DataUpdateCoordinator):
"""Class to manage fetching data from the API."""
def __init__(self, hass, channel_id, channel_name):
"""Initialize."""
self.platforms = []
self.channel_id = channel_id
self.channel_name = channel_name
self.hass = hass
super().__init__(hass, _LOGGER, name=DOMAIN, update_interval=SCAN_INTERVAL)
async def _async_update_data(self):
"""Update data via library."""
try:
_LOGGER.debug("Channel Name: %s", self.channel_name)
(
actual_show,
previous_show,
next_show,
schedule,
) = await self.hass.async_add_executor_job(get_attributes, self.channel_id)
_LOGGER.debug("Actual show: %s", actual_show.get("title"))
data = {
"channel_name": self.channel_name,
"actual_show_title": actual_show.get("title"),
"next_show_title": next_show.get("title"),
"previous_show_title": previous_show.get("title"),
"schedule": schedule,
}
return data
except Exception as exception:
raise UpdateFailed(exception)
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Handle removal of an entry."""
coordinator = hass.data[DOMAIN][entry.entry_id]
unloaded = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, platform)
for platform in PLATFORMS
if platform in coordinator.platforms
]
)
)
if unloaded:
hass.data[DOMAIN].pop(entry.entry_id)
return unloaded
async def async_reload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Reload config entry."""
await async_unload_entry(hass, entry)
await async_setup_entry(hass, entry)
| [
"logging.getLogger",
"homeassistant.helpers.update_coordinator.UpdateFailed",
"datetime.timedelta"
] | [((826, 846), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(5)'}), '(minutes=5)\n', (835, 846), False, 'from datetime import timedelta\n'), ((858, 885), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (875, 885), False, 'import logging\n'), ((3235, 3258), 'homeassistant.helpers.update_coordinator.UpdateFailed', 'UpdateFailed', (['exception'], {}), '(exception)\n', (3247, 3258), False, 'from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed\n')] |
"""
Python file that contains the class for our ElevRepository in which we store our "Elev" instances
"""
from Domain.elev import Elev
from Exceptions.exceptii import ExistentElev
class Repository:
def __init__(self , filename):
self.filename = filename
self.elevi = self.loadfromfile()
def loadfromfile(self):
"""
Functie care incarca din fisierul text "filename" obiectele "Elev" in memorie,in lista self.elevi
:return: list_elevi - list of "Elev" instances
"""
try:
f = open(self.filename , "r")
except IOError:
return []
list_elevi = []
with open(self.filename , "r") as f:
for linie in f:
if linie != "\n":
atrs = linie.split(";")
elev = Elev(atrs[0] , atrs[1] , int(atrs[2]) , atrs[3])
list_elevi.append(elev)
return list_elevi
def storetofile(self , list_elevi):
"""
Functie care incarca in fisierul text "filename" obiecte "Elev: din memorie
"""
try:
f = open(self.filename , "w")
except IOError:
return []
with open(self.filename, "w") as f:
for elev in list_elevi:
string_line = elev.getnume() + ";" + elev.getprenume() + ";" + str(elev.getnrcls()) + ";" + elev.getnumecls() + ";"
f.write(string_line)
f.write("\n")
def addelev(self , elev:Elev):
"""
Functie care adauga un elev in lista de elevi "Elev" instance
:param elev - "Elev" object
:raises: ExistentElev - daca deja exista elevul pe care dorim sa il adaugam in lista
"""
for e in self.elevi:
if e.getnume() == elev.getnume() and e.getprenume() == elev.getprenume() and e.getnrcls() == elev.getnrcls() and e.getnumecls() == elev.getnumecls():
raise ExistentElev("Elevul pe care doriti sa il adaugati deja exista in lista!")
self.elevi.append(elev)
self.storetofile(self.elevi)
def getall(self):
"""
Functie care returneaza intreaga lista de elevi, "Elev instance"
:return: list_elevi - list of "Elev" instances
"""
return self.elevi | [
"Exceptions.exceptii.ExistentElev"
] | [((1951, 2025), 'Exceptions.exceptii.ExistentElev', 'ExistentElev', (['"""Elevul pe care doriti sa il adaugati deja exista in lista!"""'], {}), "('Elevul pe care doriti sa il adaugati deja exista in lista!')\n", (1963, 2025), False, 'from Exceptions.exceptii import ExistentElev\n')] |
#!/usr/bin/env python
"""This program parses all the .c files in the current directory and finds the header files
each file depends on. Having done that, it appends those dependencies to Makefile.Depends."""
import re, os, os.path, string, sys
incpatt = """^\s*[#]\s*include\s+["']?([A-Za-z._0-9]+)['"]?\s+.*"""
# searches for file "name" in a specified list of directories
# and returns the absolute name
def findINCmatch(name):
global INCDirs
global INCcache
if INCcache.has_key(name): return INCcache[name]
for d in INCDirs:
dname = os.path.join(d,name)
if os.path.isfile(dname):
INCcache[name] = dname
return dname
INCcache[name]=""
return ""
# Given a filename computes the include files it depends on
# returns the include files as a list of strings
def depends(filename):
"""Handle one file"""
incs = {}
if not os.path.isfile(filename): return []
for x in file(filename).readlines():
m = incRE.match(x)
if m:
incs[m.group(1)] = 1
continue
return [findINCmatch(x) for x in incs.keys()]
##################
def main():
global incRE
global INCcache
global filelist, INCDirs
# setup global variables
incRE = re.compile(incpatt,re.I)
# Process all the files
INCcache = {}
filelist = os.listdir(".") # All files in current directory
INCDirs = [""] # List of directories which contain include files. "" is current directory
toProcess = [] # List of files to generate dependency info for
# Process argument list and isolate filenames , inc dirs, arbitrary flags
for arg in sys.argv[1:]: # for each option given
if not arg: continue
if arg[0] == "-" and len(arg) > 2 and arg[1] == "I": # option -I
INCDirs.append(arg[2:])
elif arg[0] != "-": # not an option so must be a filename to process
if arg[0] != '*': toProcess.append(arg) # unprocessed wild card means nu such files
else:
pass # Ignore all other options
ofd = file("Makefile.Depend","a")
ofd.write("\n###Automatically appended dependencies of C files on headers###\n")
for filename in toProcess:
basename,ext = os.path.splitext(filename)
incs = depends(filename)
for h in incs: # Add more files to process if required
if (h in filelist) and (h not in toProcess): # new header file in current directory
toProcess.append(h)
incpart = " ".join(incs)
ofd.write("%s.o: %s\n" % (basename, incpart))
ofd.close()
if __name__=="__main__":
main()
| [
"os.listdir",
"re.compile",
"os.path.join",
"os.path.splitext",
"os.path.isfile"
] | [((1256, 1281), 're.compile', 're.compile', (['incpatt', 're.I'], {}), '(incpatt, re.I)\n', (1266, 1281), False, 'import re, os, os.path, string, sys\n'), ((1342, 1357), 'os.listdir', 'os.listdir', (['"""."""'], {}), "('.')\n", (1352, 1357), False, 'import re, os, os.path, string, sys\n'), ((565, 586), 'os.path.join', 'os.path.join', (['d', 'name'], {}), '(d, name)\n', (577, 586), False, 'import re, os, os.path, string, sys\n'), ((597, 618), 'os.path.isfile', 'os.path.isfile', (['dname'], {}), '(dname)\n', (611, 618), False, 'import re, os, os.path, string, sys\n'), ((899, 923), 'os.path.isfile', 'os.path.isfile', (['filename'], {}), '(filename)\n', (913, 923), False, 'import re, os, os.path, string, sys\n'), ((2215, 2241), 'os.path.splitext', 'os.path.splitext', (['filename'], {}), '(filename)\n', (2231, 2241), False, 'import re, os, os.path, string, sys\n')] |
from analytic import Fetcher, RepoAnalyzer, ContributorAnalyzer, Homepage
import sys
sys.path.append("./PyGithub")
from github import Github
import getpass
from datetime import datetime
username = input("Github Username: ")
password = getpass.getpass()
account = Github(username, password)
repo_name = input("Name of Repository: ")
repository = account.get_user().get_repo(name=repo_name)
fetcher = Fetcher(repository)
repo = fetcher.get_repository()
repo_analyzer = RepoAnalyzer(repo)
homepage = Homepage(repo)
contr_analyzer = ContributorAnalyzer(repository=repo, username='koaning')
# test repo analyzer
# print(repo_analyzer.weekly_code_frequency())
# print(repo_analyzer.language_distribution())
# print(repo_analyzer.weekly_commit_frequency())
# print(repo_analyzer.top_contributors_monthly())
# print(repo_analyzer.get_most_fire_files())
# test homepage
# print(homepage.get_repo_name())
# print(homepage.get_description())
# print(homepage.get_topics())
# print(homepage.get_commits())
# test contributor analyzer
# print(contr_analyzer.get_commits())
# print(contr_analyzer.get_name())
# print(contr_analyzer.get_added_lines())
# print(contr_analyzer.get_deleted_lines())
# print(contr_analyzer.weekly_commits_stats())
# print(contr_analyzer.get_favorite_files()) | [
"analytic.Fetcher",
"github.Github",
"analytic.RepoAnalyzer",
"analytic.Homepage",
"getpass.getpass",
"analytic.ContributorAnalyzer",
"sys.path.append"
] | [((85, 114), 'sys.path.append', 'sys.path.append', (['"""./PyGithub"""'], {}), "('./PyGithub')\n", (100, 114), False, 'import sys\n'), ((237, 254), 'getpass.getpass', 'getpass.getpass', ([], {}), '()\n', (252, 254), False, 'import getpass\n'), ((266, 292), 'github.Github', 'Github', (['username', 'password'], {}), '(username, password)\n', (272, 292), False, 'from github import Github\n'), ((402, 421), 'analytic.Fetcher', 'Fetcher', (['repository'], {}), '(repository)\n', (409, 421), False, 'from analytic import Fetcher, RepoAnalyzer, ContributorAnalyzer, Homepage\n'), ((470, 488), 'analytic.RepoAnalyzer', 'RepoAnalyzer', (['repo'], {}), '(repo)\n', (482, 488), False, 'from analytic import Fetcher, RepoAnalyzer, ContributorAnalyzer, Homepage\n'), ((500, 514), 'analytic.Homepage', 'Homepage', (['repo'], {}), '(repo)\n', (508, 514), False, 'from analytic import Fetcher, RepoAnalyzer, ContributorAnalyzer, Homepage\n'), ((532, 588), 'analytic.ContributorAnalyzer', 'ContributorAnalyzer', ([], {'repository': 'repo', 'username': '"""koaning"""'}), "(repository=repo, username='koaning')\n", (551, 588), False, 'from analytic import Fetcher, RepoAnalyzer, ContributorAnalyzer, Homepage\n')] |
from __future__ import division
import sys
import os
from math import pi
import numpy as np
import pandas as pd
from collections import Counter
from multiprocessing import Pool, cpu_count
import matplotlib.pyplot as plt
import seaborn as sns
sns.set(style='darkgrid', font_scale=1.5)
# =========
# FUNCTIONS
# =========
def create_folder(complete_path):
"""
Function to create a folder.
Parameter
---------
complete_path : str
Complete path of the new folder.
Returns
-------
Create the new folder.
"""
if not os.path.exists(complete_path):
os.makedirs(complete_path)
return 0
def clean_cuisine_names(cuisine_names):
"""
String manipulation of cuisine names.
Parameter:
---------
cuisine_names : list
List containg the cuisine names.
Returns:
-------
clean_names : list
List with the with the new names.
"""
clean_names = []
for i, name in enumerate(cuisine_names):
new_name = name.title()
if new_name.find('_') > 0:
new_name = new_name.replace('_', ' ')
clean_names.append(new_name)
return clean_names
def parallel_counting(data):
"""
Auxiliary function for parallel counting.
"""
return data.map(Counter).sum()
def ingredients_counter(data):
"""
Function to count the ingredients in parallel fashion.
Parameter:
---------
data : pandas series
Pandas Series object with the ingredients for counting.
Returns:
-------
ingredients_count : pandas series
Series with count for each ingredient.
Note:
----
The ingredients are returned in descending order
"""
# Let's make this counter process parallel
# using the 'multiprocessing' library
cores = cpu_count()
# separate data into chunks for the parallel processing
data_chunks = np.array_split(data, cores)
pool = Pool(cores)
counter_list = pool.map(parallel_counting, data_chunks)
pool.close()
ingredients_count = pd.Series(sum(counter_list, \
Counter())).sort_values(ascending=False)
return ingredients_count
if __name__ == '__main__':
# =======
# FOLDERS
# =======
package_path = os.path.dirname(os.getcwd())
data_path = os.path.join(package_path, 'data')
# create folder for figures
fig_path = os.path.join(package_path, 'figures')
create_folder(fig_path)
# =========
# LOAD DATA
# =========
input_file = os.path.join(data_path, 'train.json')
df = pd.read_json(input_file)
# get the total number of recipes
n_recipes = df.shape[0]
print('>> Data <<')
print(' The training dataset has %i recipes.\n' % (n_recipes))
# ========
# CUISINES
# ========
cuisine = df['cuisine'].value_counts()
n_cuisines = cuisine.nunique()
print('>> Cuisines <<')
print(' This dataset has %i different cuisines.' % n_cuisines)
cuisine_names = list(cuisine.index)
cuisine_values = list(cuisine.values)
cuisine_clean_names = clean_cuisine_names(cuisine_names)
# cuisines bar plot
fig_file = os.path.join(fig_path, 'cuisines_barplot.pdf')
plt.figure(figsize=(10, 7))
sns.barplot(x=cuisine_values,
y=cuisine_clean_names,
edgecolor=(0, 0, 0),
linewidth=1)
plt.xlabel('Counts')
plt.ylabel('Cuisines')
plt.savefig(fig_file, bbox_inches='tight', dpi=1200)
plt.close()
# cuisines pie chart
fig_file = os.path.join(fig_path, 'cuisines_piechart.pdf')
top_cuisines = 5
short_cuisine_values = cuisine_values[0:top_cuisines]
short_cuisine_values.append(sum(cuisine_values[top_cuisines:]))
short_cuisine_names = cuisine_clean_names[0:top_cuisines]
short_cuisine_names.append(u'Others')
plt.figure(figsize=(7, 7))
explode = list(np.zeros(top_cuisines)) # explode the last slice ('Others')
explode.append(0.08)
wedgeprops={"edgecolor":"k", 'linewidth': 1} # edges properties
plt.pie(short_cuisine_values, labels=short_cuisine_names, startangle=30,
autopct='%1.1f%%', explode=explode, wedgeprops=wedgeprops)
plt.title('Cuisines')
plt.tight_layout()
plt.axis('equal')
plt.savefig(fig_file, bbox_inches='tight', dpi=1200)
plt.close()
# ===========
# INGREDIENTS
# ===========
df['n_ingredients'] = df['ingredients'].str.len()
# string manipulation of cuisine names
cuisine_clean_names = clean_cuisine_names(df.cuisine.unique())
# box plot number of ingredients
fig_file = os.path.join(fig_path, 'ingredients_boxplot.pdf')
plt.figure(figsize=(16, 6))
ax = sns.boxplot(x='cuisine', y='n_ingredients', data=df)
plt.ylabel('Number of Ingredients')
plt.xlabel('Cuisine')
plt.xticks(plt.xticks()[0], cuisine_clean_names)
ax.set_xticklabels(ax.get_xticklabels(), rotation=35)
plt.savefig(fig_file, bbox_inches='tight', dpt=1200)
plt.close()
# counting ingredients from the entire dataset
ingredients_count = ingredients_counter(df['ingredients'])
# getting the top ingredients in the whole dataset
top_common = 15
top_ingredients_names = list(ingredients_count[:top_common].index)
top_ingredients_values = list(ingredients_count[:top_common].values)
# string manipulation of cuisine names
cuisine_clean_names = clean_cuisine_names(top_ingredients_names)
# top ingredients barplot
fig_file = os.path.join(fig_path, 'top_ingredients_barplot.pdf')
plt.figure(figsize=(10,7))
sns.barplot(x=top_ingredients_values,
y=cuisine_clean_names,
edgecolor=(0,0,0),
linewidth=1)
plt.ylabel('Ingredients')
plt.xlabel('Counts')
plt.title('Top %i Most Used Ingredients' % int(top_common))
plt.savefig(fig_file, bbox_inches='tight', dpi=1200)
plt.close()
| [
"matplotlib.pyplot.ylabel",
"multiprocessing.cpu_count",
"numpy.array_split",
"os.path.exists",
"seaborn.set",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.close",
"matplotlib.pyplot.axis",
"pandas.read_json",
"matplotlib.pyplot.savefig",
"matplotlib.pyplot.xticks",
"matplotlib.pyplot.title",
"os.makedirs",
"os.path.join",
"matplotlib.pyplot.pie",
"os.getcwd",
"seaborn.boxplot",
"collections.Counter",
"matplotlib.pyplot.figure",
"numpy.zeros",
"multiprocessing.Pool",
"matplotlib.pyplot.tight_layout",
"seaborn.barplot"
] | [((246, 287), 'seaborn.set', 'sns.set', ([], {'style': '"""darkgrid"""', 'font_scale': '(1.5)'}), "(style='darkgrid', font_scale=1.5)\n", (253, 287), True, 'import seaborn as sns\n'), ((1816, 1827), 'multiprocessing.cpu_count', 'cpu_count', ([], {}), '()\n', (1825, 1827), False, 'from multiprocessing import Pool, cpu_count\n'), ((1907, 1934), 'numpy.array_split', 'np.array_split', (['data', 'cores'], {}), '(data, cores)\n', (1921, 1934), True, 'import numpy as np\n'), ((1947, 1958), 'multiprocessing.Pool', 'Pool', (['cores'], {}), '(cores)\n', (1951, 1958), False, 'from multiprocessing import Pool, cpu_count\n'), ((2301, 2335), 'os.path.join', 'os.path.join', (['package_path', '"""data"""'], {}), "(package_path, 'data')\n", (2313, 2335), False, 'import os\n'), ((2384, 2421), 'os.path.join', 'os.path.join', (['package_path', '"""figures"""'], {}), "(package_path, 'figures')\n", (2396, 2421), False, 'import os\n'), ((2516, 2553), 'os.path.join', 'os.path.join', (['data_path', '"""train.json"""'], {}), "(data_path, 'train.json')\n", (2528, 2553), False, 'import os\n'), ((2564, 2588), 'pandas.read_json', 'pd.read_json', (['input_file'], {}), '(input_file)\n', (2576, 2588), True, 'import pandas as pd\n'), ((3157, 3203), 'os.path.join', 'os.path.join', (['fig_path', '"""cuisines_barplot.pdf"""'], {}), "(fig_path, 'cuisines_barplot.pdf')\n", (3169, 3203), False, 'import os\n'), ((3209, 3236), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(10, 7)'}), '(figsize=(10, 7))\n', (3219, 3236), True, 'import matplotlib.pyplot as plt\n'), ((3241, 3331), 'seaborn.barplot', 'sns.barplot', ([], {'x': 'cuisine_values', 'y': 'cuisine_clean_names', 'edgecolor': '(0, 0, 0)', 'linewidth': '(1)'}), '(x=cuisine_values, y=cuisine_clean_names, edgecolor=(0, 0, 0),\n linewidth=1)\n', (3252, 3331), True, 'import seaborn as sns\n'), ((3380, 3400), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Counts"""'], {}), "('Counts')\n", (3390, 3400), True, 'import matplotlib.pyplot as plt\n'), ((3405, 3427), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Cuisines"""'], {}), "('Cuisines')\n", (3415, 3427), True, 'import matplotlib.pyplot as plt\n'), ((3432, 3484), 'matplotlib.pyplot.savefig', 'plt.savefig', (['fig_file'], {'bbox_inches': '"""tight"""', 'dpi': '(1200)'}), "(fig_file, bbox_inches='tight', dpi=1200)\n", (3443, 3484), True, 'import matplotlib.pyplot as plt\n'), ((3489, 3500), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (3498, 3500), True, 'import matplotlib.pyplot as plt\n'), ((3542, 3589), 'os.path.join', 'os.path.join', (['fig_path', '"""cuisines_piechart.pdf"""'], {}), "(fig_path, 'cuisines_piechart.pdf')\n", (3554, 3589), False, 'import os\n'), ((3846, 3872), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(7, 7)'}), '(figsize=(7, 7))\n', (3856, 3872), True, 'import matplotlib.pyplot as plt\n'), ((4051, 4186), 'matplotlib.pyplot.pie', 'plt.pie', (['short_cuisine_values'], {'labels': 'short_cuisine_names', 'startangle': '(30)', 'autopct': '"""%1.1f%%"""', 'explode': 'explode', 'wedgeprops': 'wedgeprops'}), "(short_cuisine_values, labels=short_cuisine_names, startangle=30,\n autopct='%1.1f%%', explode=explode, wedgeprops=wedgeprops)\n", (4058, 4186), True, 'import matplotlib.pyplot as plt\n'), ((4199, 4220), 'matplotlib.pyplot.title', 'plt.title', (['"""Cuisines"""'], {}), "('Cuisines')\n", (4208, 4220), True, 'import matplotlib.pyplot as plt\n'), ((4225, 4243), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (4241, 4243), True, 'import matplotlib.pyplot as plt\n'), ((4248, 4265), 'matplotlib.pyplot.axis', 'plt.axis', (['"""equal"""'], {}), "('equal')\n", (4256, 4265), True, 'import matplotlib.pyplot as plt\n'), ((4270, 4322), 'matplotlib.pyplot.savefig', 'plt.savefig', (['fig_file'], {'bbox_inches': '"""tight"""', 'dpi': '(1200)'}), "(fig_file, bbox_inches='tight', dpi=1200)\n", (4281, 4322), True, 'import matplotlib.pyplot as plt\n'), ((4327, 4338), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (4336, 4338), True, 'import matplotlib.pyplot as plt\n'), ((4612, 4661), 'os.path.join', 'os.path.join', (['fig_path', '"""ingredients_boxplot.pdf"""'], {}), "(fig_path, 'ingredients_boxplot.pdf')\n", (4624, 4661), False, 'import os\n'), ((4666, 4693), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(16, 6)'}), '(figsize=(16, 6))\n', (4676, 4693), True, 'import matplotlib.pyplot as plt\n'), ((4703, 4755), 'seaborn.boxplot', 'sns.boxplot', ([], {'x': '"""cuisine"""', 'y': '"""n_ingredients"""', 'data': 'df'}), "(x='cuisine', y='n_ingredients', data=df)\n", (4714, 4755), True, 'import seaborn as sns\n'), ((4760, 4795), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Number of Ingredients"""'], {}), "('Number of Ingredients')\n", (4770, 4795), True, 'import matplotlib.pyplot as plt\n'), ((4800, 4821), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Cuisine"""'], {}), "('Cuisine')\n", (4810, 4821), True, 'import matplotlib.pyplot as plt\n'), ((4937, 4989), 'matplotlib.pyplot.savefig', 'plt.savefig', (['fig_file'], {'bbox_inches': '"""tight"""', 'dpt': '(1200)'}), "(fig_file, bbox_inches='tight', dpt=1200)\n", (4948, 4989), True, 'import matplotlib.pyplot as plt\n'), ((4994, 5005), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (5003, 5005), True, 'import matplotlib.pyplot as plt\n'), ((5500, 5553), 'os.path.join', 'os.path.join', (['fig_path', '"""top_ingredients_barplot.pdf"""'], {}), "(fig_path, 'top_ingredients_barplot.pdf')\n", (5512, 5553), False, 'import os\n'), ((5559, 5586), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(10, 7)'}), '(figsize=(10, 7))\n', (5569, 5586), True, 'import matplotlib.pyplot as plt\n'), ((5590, 5689), 'seaborn.barplot', 'sns.barplot', ([], {'x': 'top_ingredients_values', 'y': 'cuisine_clean_names', 'edgecolor': '(0, 0, 0)', 'linewidth': '(1)'}), '(x=top_ingredients_values, y=cuisine_clean_names, edgecolor=(0, \n 0, 0), linewidth=1)\n', (5601, 5689), True, 'import seaborn as sns\n'), ((5735, 5760), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Ingredients"""'], {}), "('Ingredients')\n", (5745, 5760), True, 'import matplotlib.pyplot as plt\n'), ((5765, 5785), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Counts"""'], {}), "('Counts')\n", (5775, 5785), True, 'import matplotlib.pyplot as plt\n'), ((5854, 5906), 'matplotlib.pyplot.savefig', 'plt.savefig', (['fig_file'], {'bbox_inches': '"""tight"""', 'dpi': '(1200)'}), "(fig_file, bbox_inches='tight', dpi=1200)\n", (5865, 5906), True, 'import matplotlib.pyplot as plt\n'), ((5911, 5922), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (5920, 5922), True, 'import matplotlib.pyplot as plt\n'), ((565, 594), 'os.path.exists', 'os.path.exists', (['complete_path'], {}), '(complete_path)\n', (579, 594), False, 'import os\n'), ((604, 630), 'os.makedirs', 'os.makedirs', (['complete_path'], {}), '(complete_path)\n', (615, 630), False, 'import os\n'), ((2272, 2283), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2281, 2283), False, 'import os\n'), ((3892, 3914), 'numpy.zeros', 'np.zeros', (['top_cuisines'], {}), '(top_cuisines)\n', (3900, 3914), True, 'import numpy as np\n'), ((4837, 4849), 'matplotlib.pyplot.xticks', 'plt.xticks', ([], {}), '()\n', (4847, 4849), True, 'import matplotlib.pyplot as plt\n'), ((2095, 2104), 'collections.Counter', 'Counter', ([], {}), '()\n', (2102, 2104), False, 'from collections import Counter\n')] |
#
# Copyright (c) 2021 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
# -*- encoding: utf-8 -*-
#
import random
import mock
import testtools
import uuid
from cgtsclient import exc
from cgtsclient.v1 import ilvg
class ILvgTest(testtools.TestCase):
def test__find_ilvg_numeric(self):
mock_cc = mock.MagicMock()
mock_ihost = mock.MagicMock()
fake_id = str(random.randrange(1, 9))
ilvg._find_ilvg(mock_cc, mock_ihost, fake_id)
mock_cc.ilvg.get.assert_called_with(fake_id)
mock_cc.ilvg.list.assert_not_called()
def test__find_ilvg_uuid(self):
mock_cc = mock.MagicMock()
mock_ihost = mock.MagicMock()
fake_id = str(uuid.uuid4())
fake_name = "fake_ilvg"
mock_cc.ilvg.list.return_value = [
ilvg.ilvg(mock.MagicMock, info={
"uuid": fake_id, "lvm_vg_name": fake_name
})
]
ilvg_found = ilvg._find_ilvg(mock_cc, mock_ihost, fake_id)
mock_cc.ilvg.list.assert_called_with(mock_ihost.uuid)
self.assertEqual(fake_id, ilvg_found.uuid)
def test__find_ilvg_uuid_not_found(self):
mock_cc = mock.MagicMock()
mock_ihost = mock.MagicMock()
fake_id = str(uuid.uuid4())
mock_cc.ilvg.list.return_value = []
self.assertRaisesRegexp(
exc.CommandError,
"Local volume group not found by name or uuid: %s" % fake_id,
ilvg._find_ilvg,
mock_cc,
mock_ihost,
fake_id
)
mock_cc.ilvg.list.assert_called_with(mock_ihost.uuid)
def test__find_ilvg_name(self):
mock_cc = mock.MagicMock()
mock_ihost = mock.MagicMock()
fake_id = str(uuid.uuid4())
fake_name = "fake_ilvg"
mock_cc.ilvg.list.return_value = [
ilvg.ilvg(mock.MagicMock, info={
"uuid": fake_id, "lvm_vg_name": fake_name
})
]
ilvg_found = ilvg._find_ilvg(mock_cc, mock_ihost, fake_name)
mock_cc.ilvg.list.assert_called_with(mock_ihost.uuid)
self.assertEqual(fake_name, ilvg_found.lvm_vg_name)
def test__find_ilvg_name_not_found(self):
mock_cc = mock.MagicMock()
mock_ihost = mock.MagicMock()
fake_name = "fake_lvg_name"
mock_cc.ilvg.list.return_value = []
self.assertRaisesRegexp(
exc.CommandError,
"Local volume group not found by name or uuid: %s" % fake_name,
ilvg._find_ilvg,
mock_cc,
mock_ihost,
fake_name
)
mock_cc.ilvg.list.assert_called_with(mock_ihost.uuid)
| [
"cgtsclient.v1.ilvg.ilvg",
"random.randrange",
"cgtsclient.v1.ilvg._find_ilvg",
"uuid.uuid4",
"mock.MagicMock"
] | [((330, 346), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (344, 346), False, 'import mock\n'), ((368, 384), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (382, 384), False, 'import mock\n'), ((440, 485), 'cgtsclient.v1.ilvg._find_ilvg', 'ilvg._find_ilvg', (['mock_cc', 'mock_ihost', 'fake_id'], {}), '(mock_cc, mock_ihost, fake_id)\n', (455, 485), False, 'from cgtsclient.v1 import ilvg\n'), ((641, 657), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (655, 657), False, 'import mock\n'), ((679, 695), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (693, 695), False, 'import mock\n'), ((957, 1002), 'cgtsclient.v1.ilvg._find_ilvg', 'ilvg._find_ilvg', (['mock_cc', 'mock_ihost', 'fake_id'], {}), '(mock_cc, mock_ihost, fake_id)\n', (972, 1002), False, 'from cgtsclient.v1 import ilvg\n'), ((1182, 1198), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (1196, 1198), False, 'import mock\n'), ((1220, 1236), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (1234, 1236), False, 'import mock\n'), ((1676, 1692), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (1690, 1692), False, 'import mock\n'), ((1714, 1730), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (1728, 1730), False, 'import mock\n'), ((1992, 2039), 'cgtsclient.v1.ilvg._find_ilvg', 'ilvg._find_ilvg', (['mock_cc', 'mock_ihost', 'fake_name'], {}), '(mock_cc, mock_ihost, fake_name)\n', (2007, 2039), False, 'from cgtsclient.v1 import ilvg\n'), ((2228, 2244), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (2242, 2244), False, 'import mock\n'), ((2266, 2282), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (2280, 2282), False, 'import mock\n'), ((408, 430), 'random.randrange', 'random.randrange', (['(1)', '(9)'], {}), '(1, 9)\n', (424, 430), False, 'import random\n'), ((718, 730), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (728, 730), False, 'import uuid\n'), ((819, 894), 'cgtsclient.v1.ilvg.ilvg', 'ilvg.ilvg', (['mock.MagicMock'], {'info': "{'uuid': fake_id, 'lvm_vg_name': fake_name}"}), "(mock.MagicMock, info={'uuid': fake_id, 'lvm_vg_name': fake_name})\n", (828, 894), False, 'from cgtsclient.v1 import ilvg\n'), ((1259, 1271), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1269, 1271), False, 'import uuid\n'), ((1753, 1765), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1763, 1765), False, 'import uuid\n'), ((1854, 1929), 'cgtsclient.v1.ilvg.ilvg', 'ilvg.ilvg', (['mock.MagicMock'], {'info': "{'uuid': fake_id, 'lvm_vg_name': fake_name}"}), "(mock.MagicMock, info={'uuid': fake_id, 'lvm_vg_name': fake_name})\n", (1863, 1929), False, 'from cgtsclient.v1 import ilvg\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*
#/// DEPENDENCIES
import typing, aiofiles
import discord #python3.7 -m pip install -U discord.py
import logging, json, re
import traceback, sys
from util.priz_err import *
from util import embedify, getPre, dbman
from discord.ext import commands
from discord.ext.commands import Bot, MissingPermissions, has_permissions
bot = commands.Bot(command_prefix=getPre.getPre)
##///---------------------///##
##/// BOT DEFINITIONS ///##
##///---------------------///##
def escape(st):
for regex in [r"(\_)", r"(\*)", r"(\~)", r"(\#)",
r"(\@)", r"(\|)", r"(\`)", r"(\.)", r"(\:)"]:
st = re.sub(regex, r" \1", str(st.encode("utf-8"))[2:-1])
return st
async def log(bot, head, text):
chnl = bot.get_channel(569698278271090728)
msgs = await chnl.send(embed=embedify.embedify(desc=f'''```md\n#] {head}!\n> {text}```'''))
return msgs
##///---------------------///##
##/// BOT EVENTS ///##
##///---------------------///##
@bot.listen()
async def on_error(event, *args, **kwargs):
print(event, *args, kwargs)
@bot.listen()
async def on_command_error(ctx, error):
"""
>>> ERROR HANDLER <<<
Only for discord errors, not syntax, out of bounds, etc
"""
if isinstance(error, commands.CommandNotFound):
return await ctx.invoke(ctx.bot.get_command('text'), convo=ctx.message.content[2:])
try:
if ctx.guild and not dbman.get('com', ctx.command.name, id=ctx.guild.id):
return await ctx.send('```diff\n-] THIS COMMAND ISNT ENABLED```')
except:
pass
try:
errr = error.original
except:
errr = error
if issubclass(type(errr), PrizmError):
await handler(ctx.bot, "COMMAND FAILURE", errr, ctx=ctx, found=True)
return await ctx.send(f"""```md
#] PRIZM {errr.typ} ;[
=] Something wrong happened internally
> More info about the issue can be found below
``````{errr.syntax}
{errr.message}```""")
st = str(type(errr)).split('.')[-1][:-2]
found = False
typ, obj, tb = sys.exc_info()
errors = {
'DiscordException': "Unknown",
'LoginFailue': 'Verification',
'NoMoreItems': 'Iter',
'Forbidden': 'Forbidden',
'NotFound': 'NotFound',
'InvalidData': 'Invalid',
'InvalidArgument': 'InvalidArg',
'GatewayNotFound': 'Gateway',
'ConnectionClosed': 'Connection',
'OpusError': 'Opus',
'Opus': 'Opus',
'CommandError': 'Com',
'ConversionError': 'Conversion',
'MissingRequiredArgument': 'MissingArgs',
'ArgumentParsingError': 'Parse',
'UnexpectedQuoteError': 'BadQuotes',
'InvalidEndOfQuoteStringError': 'BadQuotes',
'ExpectedClosingQuoteError': 'MissingQuotes',
'BadArgument': 'BadArgs',
'BadUnionArgument': 'BadArgs',
'PrivateMessageOnly': 'DMsOnly',
'NoPrivateMessage': 'GuildOnly',
'CheckFailure': 'Checks',
'CommandNotFound': 'WtfHowDidWeGetHere', #This shouldn't ever happen
'DisabledCommand': 'Disabled',
'CommandInvokeError': 'Invoke',
'TooManyArguments': 'TooManyArgs',
'UserInputError': 'Input',
'CommandOnCooldown': 'Cooldown',
'NotOwner': 'Forbidden',
'MissingPermissions': 'MissingPerms',
'BotMissingPermissions': 'PrizmPerms',
'MissingRole': 'MissingRole',
'BotMissingRole': 'PrizmRole',
'MissingAnyRole': 'MissingRole',
'BotMissingAnyRole': 'PrizmRole',
'NSFWChannelRequired': 'Nsfw',
'ExtensionError': 'Ext',
'ExtensionAlreadyLoaded': 'ExtLoaded',
'ExtensionNotLoaded': 'ExtUnloaded',
'NoEntryPointError': 'Entry',
'ExtensionFailed': 'ExtFailed',
'ExtensionNotFound': 'ExtNotFound'
}
if st in errors:
await ctx.send(f'''```md
#] PRIZM {errors[st]}Error ;[
=] This is most likely an issue with what you did
> More info about the issue can be found below
``````diff
-] {errr}```''')
found = True
await handler(ctx.bot, "COMMAND FAILURE", errr, ctx=ctx, found=found)
async def handler(bot, ex_type, ex, event=None, message=None, ctx = None, found=False):
if message is None and event is not None and hasattr(event, "message"):
message = event.message
if message is None and ctx is not None:
message = ctx.message
try:
tb = "".join(traceback.format_tb(ex.__traceback__)).replace('`','\u200b`')
async with aiofiles.open("txt/tb.txt", "w+") as tbfile:
await tbfile.write(tb)
await bot.get_channel(569698278271090728).send(
embed=embedify.embedify(title='AN ERROR OCCURED ;[',
desc = '```md\n#] SEE BELOW FOR DETAILS```',
fields = [['`EXCEPTION ---`',
f"```diff\n-] {type(ex)} '{str(ex)}'```",
False],
['`ARGS --------`',
'```'+str(ctx.args)+'```',
False],
['`KWARGS ------`',
'```'+json.dumps(ctx.kwargs,indent=4)+'```',
False],
['`EVENT INFO --`',
'```'+str(event)+'```',
False],
['`COMMAND -----`',
f"""```;]{ctx.command.name} in #{'Private Message' if isinstance(ctx.channel, discord.abc.PrivateChannel) else f"{ctx.channel.name} [`{ctx.channel.id}`]"} by {str(ctx.author)} [`{ctx.author.id}`]```""",
False],
['`MESSAGE -----`',
'```'+message.content+'```',
False],
['`TRACEBACK ---`',
'```'+(tb if len(tb) <= 1024 else 'IN ATTACHED FILE')+'```',
False]]
),
file = discord.File('txt/tb.txt') if len(tb) > 1024 else None
)
if not found:
await ctx.send(f"""```md
#] PRIZM {str(type(ex)).split("'")[1]} ;[
=] You found a bug, thank you ;]
> More info about the issue can be found below
``````diff
-] {escape(str(ex))}
=] Traceback is available in the attached file
```""", file=discord.File("txt/tb.txt"))
except Exception as ex:
msgs = await log(bot, "SOMETHING IS SERIOUSLY FUCKED UP", f"ERROR // {ex}")
##///---------------------///##
##/// OTHER STUFF ///##
##///---------------------///##
def setup(bot):
print('+LIS')
bot.add_listener(on_command_error)
bot.add_listener(on_error)
print('GOOD')
def teardown(bot):
print('-LIS')
bot.remove_listener('on_command_error')
bot.remove_listener('on_error')
print('GOOD')
| [
"util.embedify.embedify",
"traceback.format_tb",
"util.dbman.get",
"discord.ext.commands.Bot",
"json.dumps",
"aiofiles.open",
"sys.exc_info",
"discord.File"
] | [((393, 435), 'discord.ext.commands.Bot', 'commands.Bot', ([], {'command_prefix': 'getPre.getPre'}), '(command_prefix=getPre.getPre)\n', (405, 435), False, 'from discord.ext import commands\n'), ((2090, 2104), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (2102, 2104), False, 'import traceback, sys\n'), ((4546, 4579), 'aiofiles.open', 'aiofiles.open', (['"""txt/tb.txt"""', '"""w+"""'], {}), "('txt/tb.txt', 'w+')\n", (4559, 4579), False, 'import typing, aiofiles\n'), ((862, 921), 'util.embedify.embedify', 'embedify.embedify', ([], {'desc': 'f"""```md\n#] {head}!\n> {text}```"""'}), '(desc=f"""```md\n#] {head}!\n> {text}```""")\n', (879, 921), False, 'from util import embedify, getPre, dbman\n'), ((1468, 1519), 'util.dbman.get', 'dbman.get', (['"""com"""', 'ctx.command.name'], {'id': 'ctx.guild.id'}), "('com', ctx.command.name, id=ctx.guild.id)\n", (1477, 1519), False, 'from util import embedify, getPre, dbman\n'), ((4465, 4502), 'traceback.format_tb', 'traceback.format_tb', (['ex.__traceback__'], {}), '(ex.__traceback__)\n', (4484, 4502), False, 'import traceback, sys\n'), ((6011, 6037), 'discord.File', 'discord.File', (['"""txt/tb.txt"""'], {}), "('txt/tb.txt')\n", (6023, 6037), False, 'import discord\n'), ((6350, 6376), 'discord.File', 'discord.File', (['"""txt/tb.txt"""'], {}), "('txt/tb.txt')\n", (6362, 6376), False, 'import discord\n'), ((5173, 5205), 'json.dumps', 'json.dumps', (['ctx.kwargs'], {'indent': '(4)'}), '(ctx.kwargs, indent=4)\n', (5183, 5205), False, 'import logging, json, re\n')] |
from pathlib import Path
from typing import Dict
import pandas as pd
from tabulate import tabulate
from python import *
from python.pipeline.pipeline import PipelineStage
from python.util.util import read_dataframe
class AnchorsIntoOneFileStage(PipelineStage):
"""
Writes all hyperlink anchor texts into a single plaintext file (great for manual inspection).
"""
def __init__(self, pos, config, config_global, logger):
super().__init__(pos, config, config_global, logger)
self.page_infos_file = self.sentences_file = self.hyperlinks_file = None
self.minimum_content_chars_for_article_status = config.get("minimum_content_chars_for_article_status", None)
def requires_files(self, provided: Dict[str, Path]):
self.page_infos_file = provided[PAGE_INFOS]
self.sentences_file = provided[SENTENCES]
self.hyperlinks_file = provided[HYPERLINKS]
def run(self, live_objects: dict):
# load all the files
page_infos = read_dataframe(self.page_infos_file) # type: pd.DataFrame
sentences = read_dataframe(self.sentences_file) # type: pd.DataFrame
hyperlinks = read_dataframe(self.hyperlinks_file) # type: pd.DataFrame
if self.minimum_content_chars_for_article_status is None:
# if no threshold is given: plot distribution of text length (unit: number of characters) for all extracted pages
ax = page_infos[CONTENT_CHARS].plot(kind="hist", bins=30, loglog=True)
fig = ax.get_figure()
fig.savefig(self.stage_disk_location / "characters_per_article.png")
fig.clf()
else:
# identify non-article webpages (those with insufficient amount of text)
# based on the above plot, we define that a webpage needs to have at least 1000 chars of content to count as an article
too_short_articles = page_infos[CONTENT_CHARS] < self.minimum_content_chars_for_article_status
non_article_webpages = page_infos.loc[~too_short_articles] # TODO consider using this
self.logger.info(f"{too_short_articles.value_counts().get(True, 0)} of {len(too_short_articles)} pages had less than {self.minimum_content_chars_for_article_status} characters of textual content and are therefore not considered as news articles.")
# obtain link anchor text
df = pd.merge(sentences, hyperlinks, on=[URL_NORMALIZED, SENTENCE_IDX])
df[ANCHOR_TEXT] = df.apply(lambda v: v[SENTENCE][v[CHARS_START]:v[CHARS_END]], axis=1)
# just extract the spans and sort them and print them
self.logger.info("Writing hyperlink anchor texts to a file (for reference)...")
with open(Path(self.stage_disk_location) / "link_anchors.txt", "w") as f:
pretty_df = tabulate(df[[ANCHOR_TEXT, TO_URL_NORMALIZED]].sort_values(ANCHOR_TEXT), headers="keys", showindex=False)
f.write(pretty_df)
component = AnchorsIntoOneFileStage
| [
"python.util.util.read_dataframe",
"pandas.merge",
"pathlib.Path"
] | [((1002, 1038), 'python.util.util.read_dataframe', 'read_dataframe', (['self.page_infos_file'], {}), '(self.page_infos_file)\n', (1016, 1038), False, 'from python.util.util import read_dataframe\n'), ((1085, 1120), 'python.util.util.read_dataframe', 'read_dataframe', (['self.sentences_file'], {}), '(self.sentences_file)\n', (1099, 1120), False, 'from python.util.util import read_dataframe\n'), ((1170, 1206), 'python.util.util.read_dataframe', 'read_dataframe', (['self.hyperlinks_file'], {}), '(self.hyperlinks_file)\n', (1184, 1206), False, 'from python.util.util import read_dataframe\n'), ((2392, 2458), 'pandas.merge', 'pd.merge', (['sentences', 'hyperlinks'], {'on': '[URL_NORMALIZED, SENTENCE_IDX]'}), '(sentences, hyperlinks, on=[URL_NORMALIZED, SENTENCE_IDX])\n', (2400, 2458), True, 'import pandas as pd\n'), ((2723, 2753), 'pathlib.Path', 'Path', (['self.stage_disk_location'], {}), '(self.stage_disk_location)\n', (2727, 2753), False, 'from pathlib import Path\n')] |
from django.contrib.auth.models import User
from django.urls import reverse
from rest_framework import status
from rest_framework.authtoken.models import Token
from rest_framework.test import APIClient, APITestCase
from course.models import Course, ShoppingCart
from course.serializers import CourseSerializer
class Course_Test(APITestCase):
def setUp(self):
self.client = APIClient()
self.user = User.objects.create_user(username='testUser', password='<PASSWORD>')
self.course = Course.objects.create(subject="MATH", number=323, term='201909', description='test course')
self.data = {
'username': 'testUser',
'password': '<PASSWORD>'
}
self.client.post(reverse('login'), data=self.data)
token = Token.objects.get(user=self.user)
self.client.credentials(HTTP_AUTHORIZATION='Token ' + token.key)
def test_course_create(self):
url = reverse('course_create')
data = {
'subject': 'MATH',
'number': 324,
'term': '201909',
'description': 'test course'
}
response = self.client.post(url, data=data)
self.assertEqual(Course.objects.count(), 2)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
response = response.json()
self.assertEqual(response['subject'], 'MATH')
self.assertEqual(response['number'], 324)
self.assertEqual(response['term'], '201909')
self.assertEqual(response['description'], 'test course')
self.assertEqual(response['time_chosed'], 0)
self.assertFalse(response['isAvailable'])
self.assertEqual(response['price'], 0)
self.assertIsNone(response['tutor'], 0)
def test_get_courses(self):
self.client.credentials(HTTP_AUTHORIZATION='')
url = reverse('course_list')
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response = response.json()
course_json = CourseSerializer(self.course)
self.assertEqual(response[0], course_json.data)
def test_course_detail(self):
"""
Retrieve a course
"""
url = reverse('course_detail', args=[1])
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response = response.json()
course_json = CourseSerializer(self.course)
self.assertEqual(response, course_json.data)
"""
Update a course
"""
data = {
"tutor": 1
}
response = self.client.patch(url, data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response = response.json()
self.course.tutor = self.user
course_json = CourseSerializer(self.course)
self.assertEqual(response, course_json.data)
"""
Delete a course
"""
self.assertEqual(Course.objects.count(), 1)
self.client.delete(url)
self.assertEqual(Course.objects.count(), 0)
def test_cart_create(self):
url = reverse('cart_list')
data = {
'number': 1,
'total': 50,
'user': self.user.id,
'course': self.course.id
}
response = self.client.post(url, data)
self.assertEqual(response.status_code,status.HTTP_201_CREATED)
response = response.json()
self.assertEqual(response['number'],1)
self.assertEqual(response['total'],50)
self.assertEqual(response['user'],self.user.id)
self.assertEqual(response['course'],self.course.id)
self.assertEqual(ShoppingCart.objects.count(),1)
def test_get_carts(self):
url = reverse('cart_list')
data = {
'user': 1,
'course': self.course.id
}
self.client.post(url, data)
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response = response.json()[0]
self.assertEqual(response['user'], self.user.id)
self.assertEqual(response['course'], self.course.id)
self.assertEqual(ShoppingCart.objects.count(), 1)
| [
"rest_framework.authtoken.models.Token.objects.get",
"course.models.ShoppingCart.objects.count",
"course.models.Course.objects.count",
"rest_framework.test.APIClient",
"course.models.Course.objects.create",
"django.urls.reverse",
"django.contrib.auth.models.User.objects.create_user",
"course.serializers.CourseSerializer"
] | [((388, 399), 'rest_framework.test.APIClient', 'APIClient', ([], {}), '()\n', (397, 399), False, 'from rest_framework.test import APIClient, APITestCase\n'), ((420, 488), 'django.contrib.auth.models.User.objects.create_user', 'User.objects.create_user', ([], {'username': '"""testUser"""', 'password': '"""<PASSWORD>"""'}), "(username='testUser', password='<PASSWORD>')\n", (444, 488), False, 'from django.contrib.auth.models import User\n'), ((511, 606), 'course.models.Course.objects.create', 'Course.objects.create', ([], {'subject': '"""MATH"""', 'number': '(323)', 'term': '"""201909"""', 'description': '"""test course"""'}), "(subject='MATH', number=323, term='201909',\n description='test course')\n", (532, 606), False, 'from course.models import Course, ShoppingCart\n'), ((783, 816), 'rest_framework.authtoken.models.Token.objects.get', 'Token.objects.get', ([], {'user': 'self.user'}), '(user=self.user)\n', (800, 816), False, 'from rest_framework.authtoken.models import Token\n'), ((939, 963), 'django.urls.reverse', 'reverse', (['"""course_create"""'], {}), "('course_create')\n", (946, 963), False, 'from django.urls import reverse\n'), ((1853, 1875), 'django.urls.reverse', 'reverse', (['"""course_list"""'], {}), "('course_list')\n", (1860, 1875), False, 'from django.urls import reverse\n'), ((2040, 2069), 'course.serializers.CourseSerializer', 'CourseSerializer', (['self.course'], {}), '(self.course)\n', (2056, 2069), False, 'from course.serializers import CourseSerializer\n'), ((2225, 2259), 'django.urls.reverse', 'reverse', (['"""course_detail"""'], {'args': '[1]'}), "('course_detail', args=[1])\n", (2232, 2259), False, 'from django.urls import reverse\n'), ((2424, 2453), 'course.serializers.CourseSerializer', 'CourseSerializer', (['self.course'], {}), '(self.course)\n', (2440, 2453), False, 'from course.serializers import CourseSerializer\n'), ((2816, 2845), 'course.serializers.CourseSerializer', 'CourseSerializer', (['self.course'], {}), '(self.course)\n', (2832, 2845), False, 'from course.serializers import CourseSerializer\n'), ((3131, 3151), 'django.urls.reverse', 'reverse', (['"""cart_list"""'], {}), "('cart_list')\n", (3138, 3151), False, 'from django.urls import reverse\n'), ((3766, 3786), 'django.urls.reverse', 'reverse', (['"""cart_list"""'], {}), "('cart_list')\n", (3773, 3786), False, 'from django.urls import reverse\n'), ((733, 749), 'django.urls.reverse', 'reverse', (['"""login"""'], {}), "('login')\n", (740, 749), False, 'from django.urls import reverse\n'), ((1197, 1219), 'course.models.Course.objects.count', 'Course.objects.count', ([], {}), '()\n', (1217, 1219), False, 'from course.models import Course, ShoppingCart\n'), ((2973, 2995), 'course.models.Course.objects.count', 'Course.objects.count', ([], {}), '()\n', (2993, 2995), False, 'from course.models import Course, ShoppingCart\n'), ((3057, 3079), 'course.models.Course.objects.count', 'Course.objects.count', ([], {}), '()\n', (3077, 3079), False, 'from course.models import Course, ShoppingCart\n'), ((3689, 3717), 'course.models.ShoppingCart.objects.count', 'ShoppingCart.objects.count', ([], {}), '()\n', (3715, 3717), False, 'from course.models import Course, ShoppingCart\n'), ((4198, 4226), 'course.models.ShoppingCart.objects.count', 'ShoppingCart.objects.count', ([], {}), '()\n', (4224, 4226), False, 'from course.models import Course, ShoppingCart\n')] |
import argparse
import os.path as osp
import numpy as np
from env_frontend import MantsinenBasic
from common.server_utils import is_server_running
from time import sleep
from stable_baselines.ppo.ppod import PPOD as ppo
from stable_baselines.ppo.policies import PPOPolicy as policy
from stable_baselines.common.vec_env.mevea_vec_env import MeveaVecEnv
from common.data_utils import get_test_waypoints
from config import *
def make_env(env_class, *args):
fn = lambda: env_class(*args)
return fn
if __name__ == '__main__':
# process arguments
parser = argparse.ArgumentParser()
parser.add_argument('-w', '--waypoints', help='Text file with waypoints.', default='example_waypoints.txt')
parser.add_argument('-m', '--model', help='Model directory.', default='models/mevea/mantsinen/ppo')
parser.add_argument('-c', '--checkpoint', help='Checkpoint', default='best', choices=['first', 'last', 'best'])
parser.add_argument('-v', '--video', help='Record video?', type=bool)
args = parser.parse_args()
chkpt_dir = args.model
# check that server is running
while not is_server_running(server):
print('Start the server: python3 env_server.py')
sleep(sleep_interval)
# extract waypoints
waypoints = get_test_waypoints(args.waypoints)
last_dist_max = np.linalg.norm(waypoints[-1] - waypoints[-2])
n_stay_max = np.inf
# create environment
env_fns = [make_env(
MantsinenBasic,
0,
model_path,
model_dir,
signal_dir,
server,
waypoints,
nsteps,
lookback,
use_inputs,
use_outputs,
action_scale,
tstep,
n_stay_max,
last_dist_max,
bonus
)]
env = MeveaVecEnv(env_fns)
# load model and run it in demo mode
try:
model = ppo(policy, env, policy_kwargs=dict(net_arch=[256, 256, dict(vf=[64, 64]), dict(pi=[64, 64])]), batch_size=batch_size, n_steps=nsteps,
model_path=chkpt_dir, chkpt_name=args.checkpoint, tensorboard_log='tensorboard_log', verbose=1)
if args.video:
cp_name = osp.basename(args.checkpoint)
video_fname = f"{args.trajectory.split('.csv')[0]}_{cp_name.split('.zip')[0]}.mp4"
video_fpath = osp.join(video_output, 'ppo', video_fname)
print(f'Recording to {video_fpath}')
else:
video_fpath = None
model.demo(video_file=video_fpath)
except Exception as e:
print(e) | [
"stable_baselines.common.vec_env.mevea_vec_env.MeveaVecEnv",
"common.server_utils.is_server_running",
"argparse.ArgumentParser",
"common.data_utils.get_test_waypoints",
"os.path.join",
"time.sleep",
"os.path.basename",
"numpy.linalg.norm"
] | [((571, 596), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (594, 596), False, 'import argparse\n'), ((1269, 1303), 'common.data_utils.get_test_waypoints', 'get_test_waypoints', (['args.waypoints'], {}), '(args.waypoints)\n', (1287, 1303), False, 'from common.data_utils import get_test_waypoints\n'), ((1324, 1369), 'numpy.linalg.norm', 'np.linalg.norm', (['(waypoints[-1] - waypoints[-2])'], {}), '(waypoints[-1] - waypoints[-2])\n', (1338, 1369), True, 'import numpy as np\n'), ((1761, 1781), 'stable_baselines.common.vec_env.mevea_vec_env.MeveaVecEnv', 'MeveaVecEnv', (['env_fns'], {}), '(env_fns)\n', (1772, 1781), False, 'from stable_baselines.common.vec_env.mevea_vec_env import MeveaVecEnv\n'), ((1113, 1138), 'common.server_utils.is_server_running', 'is_server_running', (['server'], {}), '(server)\n', (1130, 1138), False, 'from common.server_utils import is_server_running\n'), ((1205, 1226), 'time.sleep', 'sleep', (['sleep_interval'], {}), '(sleep_interval)\n', (1210, 1226), False, 'from time import sleep\n'), ((2146, 2175), 'os.path.basename', 'osp.basename', (['args.checkpoint'], {}), '(args.checkpoint)\n', (2158, 2175), True, 'import os.path as osp\n'), ((2297, 2339), 'os.path.join', 'osp.join', (['video_output', '"""ppo"""', 'video_fname'], {}), "(video_output, 'ppo', video_fname)\n", (2305, 2339), True, 'import os.path as osp\n')] |
import click
from flask import current_app, g
from flask.cli import with_appcontext
from pymongo import MongoClient
from pymongo.errors import PyMongoError
from controllers.config import Config
def get_db():
if 'dbclient' not in g:
if current_app.config['DBTYPE'] == 'mongoDB':
try:
g.dbclient = MongoClient(Config['MONGO_URL'])
except PyMongoError:
print("MongoDB connection failed.")
if 'dbclient' in g:
g.pop('dbclient', None)
return None
g.db = g.dbclient[Config.DB_NAME]
return g.db
def close_db(e=None):
if current_app.config['DBTYPE'] == 'mongoDB':
g.pop('db', None)
dbclient = g.pop('dbclient', None)
if dbclient is not None:
dbclient.close()
def init_db():
db = get_db()
# with current_app.open_resource('schema.sql') as f:
# db.executescript(f.read().decode('utf8'))
def init_app(app):
app.teardown_appcontext(close_db)
app.cli.add_command(init_db_command)
@click.command('init-db')
@with_appcontext
def init_db_command():
"""Clear the existing data and create new tables."""
init_db()
click.echo('Initialized the database.')
| [
"click.echo",
"pymongo.MongoClient",
"click.command",
"flask.g.pop"
] | [((1079, 1103), 'click.command', 'click.command', (['"""init-db"""'], {}), "('init-db')\n", (1092, 1103), False, 'import click\n'), ((1219, 1258), 'click.echo', 'click.echo', (['"""Initialized the database."""'], {}), "('Initialized the database.')\n", (1229, 1258), False, 'import click\n'), ((709, 726), 'flask.g.pop', 'g.pop', (['"""db"""', 'None'], {}), "('db', None)\n", (714, 726), False, 'from flask import current_app, g\n'), ((746, 769), 'flask.g.pop', 'g.pop', (['"""dbclient"""', 'None'], {}), "('dbclient', None)\n", (751, 769), False, 'from flask import current_app, g\n'), ((339, 371), 'pymongo.MongoClient', 'MongoClient', (["Config['MONGO_URL']"], {}), "(Config['MONGO_URL'])\n", (350, 371), False, 'from pymongo import MongoClient\n'), ((513, 536), 'flask.g.pop', 'g.pop', (['"""dbclient"""', 'None'], {}), "('dbclient', None)\n", (518, 536), False, 'from flask import current_app, g\n')] |
from pybloom import BloomFilter
if __name__ == '__main__':
f = BloomFilter(10, error_rate=0.0000003, backend='redis',
redis_connection='redis://localhost:6379/0') # 10000000000
for i in range(10):
f.add(i)
assert i in f
print(f.false_positive_probability, 11 in f)
| [
"pybloom.BloomFilter"
] | [((68, 168), 'pybloom.BloomFilter', 'BloomFilter', (['(10)'], {'error_rate': '(3e-07)', 'backend': '"""redis"""', 'redis_connection': '"""redis://localhost:6379/0"""'}), "(10, error_rate=3e-07, backend='redis', redis_connection=\n 'redis://localhost:6379/0')\n", (79, 168), False, 'from pybloom import BloomFilter\n')] |
import sys
import json
import transformers
import csv
import re
import random
def normtext(t):
t=re.sub("<i>|</i>|<br ?/>"," ",t)
t=re.sub("[0-9]","N",t)
return t
def proc_text(d):
d["text"]=" ".join(normtext(d[f]) for f in ("name","synopsis","contentdescription")).strip()
def csv2dict(inp):
r=csv.DictReader(inp,dialect="excel-tab",fieldnames="filmiri,year,name,synopsis,contentdescription".split(","))
for i,d in enumerate(r):
if d["synopsis"]=="None" or d["contentdescription"]=="None":
continue
proc_text(d)
decade=int(d["year"])//10*10
new_d={"sentence":d["text"], "label":str(decade), "id":str(i)}
yield new_d
if __name__=="__main__":
a=[]
for d in csv2dict(sys.stdin):
a.append(d)
random.shuffle(a)
for d in a:
print(json.dumps(d,ensure_ascii=False,sort_keys=True))
| [
"re.sub",
"json.dumps",
"random.shuffle"
] | [((102, 136), 're.sub', 're.sub', (['"""<i>|</i>|<br ?/>"""', '""" """', 't'], {}), "('<i>|</i>|<br ?/>', ' ', t)\n", (108, 136), False, 'import re\n'), ((141, 164), 're.sub', 're.sub', (['"""[0-9]"""', '"""N"""', 't'], {}), "('[0-9]', 'N', t)\n", (147, 164), False, 'import re\n'), ((802, 819), 'random.shuffle', 'random.shuffle', (['a'], {}), '(a)\n', (816, 819), False, 'import random\n'), ((850, 899), 'json.dumps', 'json.dumps', (['d'], {'ensure_ascii': '(False)', 'sort_keys': '(True)'}), '(d, ensure_ascii=False, sort_keys=True)\n', (860, 899), False, 'import json\n')] |
from iguanas.rule_optimisation import DirectSearchOptimiser
from iguanas.metrics import FScore, AlertsPerDay
from iguanas.rules import Rules
from iguanas.warnings import RulesNotOptimisedWarning
from iguanas.exceptions import RulesNotOptimisedError
import pytest
import numpy as np
import pandas as pd
@pytest.fixture
def _create_data():
np.random.seed(0)
X = pd.DataFrame({
'A': np.random.randint(0, 10, 10000),
'B': np.random.randint(0, 100, 10000),
'C': np.random.uniform(0, 1, 10000),
'D': [True, False] * 5000,
'E': ['yes', 'no'] * 5000,
'AllNa': [np.nan] * 10000,
'ZeroVar': [1] * 10000
})
X.loc[10000] = [np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan]
X['A'] = X['A'].astype('Int64')
X['B'] = X['B'].astype('Int64')
X['D'] = X['D'].astype('boolean')
y = pd.Series(np.random.randint(0, 2, 10001))
sample_weight = pd.Series(
np.where((X['A'] > 7).fillna(False) & (y == 0), 100, 1))
return X, y, sample_weight
@pytest.fixture
def _create_inputs():
rule_lambdas = {
'integer': lambda **kwargs: "(X['A']>{A})".format(**kwargs),
'float': lambda **kwargs: "(X['C']>{C})".format(**kwargs),
'categoric': lambda **kwargs: "(X['E']=='yes')".format(**kwargs),
'boolean': lambda **kwargs: "(X['D']==True)".format(**kwargs),
'is_na': lambda **kwargs: "(X['A']>{A})|(X['A'].isna())".format(**kwargs),
'mixed': lambda **kwargs: "((X['A']>{A})&(X['C']>{C})&(X['E']=='yes')&(X['D']==True))|(X['C']>{C%0})".format(**kwargs),
'missing_col': lambda **kwargs: "(X['Z']>{Z})".format(**kwargs),
'all_na': lambda **kwargs: "(X['AllNa']>{AllNa})".format(**kwargs),
'zero_var': lambda **kwargs: "(X['ZeroVar']>{ZeroVar})".format(**kwargs),
'already_optimal': lambda **kwargs: "(X['A']>={A})".format(**kwargs),
'float_with_zero_var': lambda **kwargs: "(X['C']>{C})&(X['ZeroVar']>={ZeroVar})".format(**kwargs),
'float_with_all_na_greater': lambda **kwargs: "(X['C']>{C})&(X['AllNa']>{AllNa})".format(**kwargs),
'float_with_all_na_is_na': lambda **kwargs: "(X['C']>{C})&(X['AllNa'].isna())".format(**kwargs),
'multi_zero_var': lambda **kwargs: "((X['C']>{C})&(X['ZeroVar']>={ZeroVar}))|((X['A']>{A})&(X['ZeroVar']>={ZeroVar%0}))".format(**kwargs),
}
lambda_kwargs = {
'integer': {'A': 9},
'float': {'C': 1.5},
'categoric': {},
'boolean': {},
'is_na': {'A': 9},
'mixed': {'A': 1, 'C': 1.5, 'C%0': 2.5},
'missing_col': {'Z': 1},
'all_na': {'AllNa': 5},
'zero_var': {'ZeroVar': 1},
'already_optimal': {'A': 0},
'float_with_zero_var': {'C': 1.5, 'ZeroVar': 1},
'float_with_all_na_greater': {'C': 1.5, 'AllNa': 1},
'float_with_all_na_is_na': {'C': 1.5},
'multi_zero_var': {'C': 1.5, 'ZeroVar': 1, 'A': 9, 'ZeroVar%0': 1}
}
return rule_lambdas, lambda_kwargs
@pytest.fixture
def _expected_rule_strings():
opt_rule_strings = {
'Nelder-Mead': {
'integer': "(X['A']>8.549999999999999)",
'float': "(X['C']>1.5)",
'is_na': "(X['A']>8.549999999999999)|(X['A'].isna())",
'mixed': "((X['A']>1)&(X['C']>1.5)&(X['E']=='yes')&(X['D']==True))|(X['C']>2.5)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>1.5)&(X['ZeroVar']>=1)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>1.5)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"
},
'Powell': {
'integer': "(X['A']>9)",
'float': "(X['C']>1.5)",
'is_na': "(X['A']>9)|(X['A'].isna())",
'mixed': "((X['A']>1)&(X['C']>1.5)&(X['E']=='yes')&(X['D']==True))|(X['C']>2.5)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>1.5)&(X['ZeroVar']>=1)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>1.5)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"
},
'CG': {
'integer': "(X['A']>9)",
'float': "(X['C']>1.5)",
'is_na': "(X['A']>9)|(X['A'].isna())",
'mixed': "((X['A']>1)&(X['C']>1.5)&(X['E']=='yes')&(X['D']==True))|(X['C']>2.5)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>1.5)&(X['ZeroVar']>=1)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>1.5)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"
},
'BFGS': {
'integer': "(X['A']>9)",
'float': "(X['C']>1.5)",
'is_na': "(X['A']>9)|(X['A'].isna())",
'mixed': "((X['A']>1)&(X['C']>1.5)&(X['E']=='yes')&(X['D']==True))|(X['C']>2.5)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>1.5)&(X['ZeroVar']>=1)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>1.5)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"
},
'L-BFGS-B': {
'integer': "(X['A']>9)",
'float': "(X['C']>1.5)",
'is_na': "(X['A']>9)|(X['A'].isna())",
'mixed': "((X['A']>1)&(X['C']>1.5)&(X['E']=='yes')&(X['D']==True))|(X['C']>2.5)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>1.5)&(X['ZeroVar']>=1)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>1.5)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"
},
'TNC': {
'integer': "(X['A']>9)",
'float': "(X['C']>1.5)",
'is_na': "(X['A']>9)|(X['A'].isna())",
'mixed': "((X['A']>1)&(X['C']>1.5)&(X['E']=='yes')&(X['D']==True))|(X['C']>2.5)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>1.5)&(X['ZeroVar']>=1)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>1.5)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"
},
'COBYLA': {
'integer': "(X['A']>9)",
'float': "(X['C']>1.5)",
'is_na': "(X['A']>9)|(X['A'].isna())",
'mixed': "((X['A']>1)&(X['C']>1.5)&(X['E']=='yes')&(X['D']==True))|(X['C']>2.5)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>1.5)&(X['ZeroVar']>=1)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>1.5)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"
},
'SLSQP': {
'integer': "(X['A']>9)",
'float': "(X['C']>1.5)",
'is_na': "(X['A']>9)|(X['A'].isna())",
'mixed': "((X['A']>1)&(X['C']>1.5)&(X['E']=='yes')&(X['D']==True))|(X['C']>2.5)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>1.5)&(X['ZeroVar']>=1)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>1.5)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"
},
'trust-constr': {
'integer': "(X['A']>9)",
'float': "(X['C']>1.5)",
'is_na': "(X['A']>9)|(X['A'].isna())",
'mixed': "((X['A']>1)&(X['C']>1.5)&(X['E']=='yes')&(X['D']==True))|(X['C']>2.5)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>1.5)&(X['ZeroVar']>=1)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>1.5)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"
}
}
return opt_rule_strings
@pytest.fixture
def _expected_rule_strings_x0_bounds():
opt_rule_strings = {'Nelder-Mead': {'integer': "(X['A']>4.5)",
'float': "(X['C']>0.0003641365574362787)",
'is_na': "(X['A']>4.5)|(X['A'].isna())",
'mixed': "((X['A']>5.966666666666709)&(X['C']>0.6372486347111281)&(X['E']=='yes')&(X['D']==True))|(X['C']>0.0003641365574362787)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>0.0003641365574362787)&(X['ZeroVar']>=1.0)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>0.0003641365574362787)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>0.0003641365574362787)&(X['ZeroVar']>=1.0))|((X['A']>6.537012970447561)&(X['ZeroVar']>=1.0))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'Powell': {'integer': "(X['A']>0.5015693034320075)",
'float': "(X['C']>0.0010925322093125105)",
'is_na': "(X['A']>0.5015693034320075)|(X['A'].isna())",
'mixed': "((X['A']>8.99973693287774)&(X['C']>0.9999237523200665)&(X['E']=='yes')&(X['D']==True))|(X['C']>0.0014302632042792506)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>0.0010925322093125105)&(X['ZeroVar']>=1.0)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>0.0010925322093125105)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>0.0008760021490056879)&(X['ZeroVar']>=1.0))|((X['A']>2.1232419197928096)&(X['ZeroVar']>=1.0))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'CG': {'integer': "(X['A']>4.5)",
'float': "(X['C']>0.5001660795697812)",
'is_na': "(X['A']>4.5)|(X['A'].isna())",
'mixed': "((X['A']>4.5)&(X['C']>0.5001660795697812)&(X['E']=='yes')&(X['D']==True))|(X['C']>0.5001660795697812)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>0.5001660795697812)&(X['ZeroVar']>=1.0)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>0.5001660795697812)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>0.5001660795697812)&(X['ZeroVar']>=1.0))|((X['A']>4.5)&(X['ZeroVar']>=1.0))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'BFGS': {'integer': "(X['A']>4.5)",
'float': "(X['C']>0.5001660795697812)",
'is_na': "(X['A']>4.5)|(X['A'].isna())",
'mixed': "((X['A']>4.5)&(X['C']>0.5001660795697812)&(X['E']=='yes')&(X['D']==True))|(X['C']>0.5001660795697812)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>0.5001660795697812)&(X['ZeroVar']>=1.0)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>0.5001660795697812)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>0.5001660795697812)&(X['ZeroVar']>=1.0))|((X['A']>4.5)&(X['ZeroVar']>=1.0))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'L-BFGS-B': {'integer': "(X['A']>4.5)",
'float': "(X['C']>0.5001660795697812)",
'is_na': "(X['A']>4.5)|(X['A'].isna())",
'mixed': "((X['A']>4.5)&(X['C']>0.5001660795697812)&(X['E']=='yes')&(X['D']==True))|(X['C']>0.5001660795697812)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>0.5001660795697812)&(X['ZeroVar']>=1.0)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>0.5001660795697812)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>0.5001660795697812)&(X['ZeroVar']>=1.0))|((X['A']>4.5)&(X['ZeroVar']>=1.0))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'TNC': {'integer': "(X['A']>4.5)",
'float': "(X['C']>0.5001660795697812)",
'is_na': "(X['A']>4.5)|(X['A'].isna())",
'mixed': "((X['A']>4.5)&(X['C']>0.5001660795697812)&(X['E']=='yes')&(X['D']==True))|(X['C']>0.5001660795697812)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>0.5001660795697812)&(X['ZeroVar']>=1.0)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>0.5001660795697812)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>0.5001660795697812)&(X['ZeroVar']>=1.0))|((X['A']>4.5)&(X['ZeroVar']>=1.0))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'COBYLA': {'integer': "(X['A']>-0.5)",
'float': "(X['C']>-0.4998339204302188)",
'is_na': "(X['A']>-0.5)|(X['A'].isna())",
'mixed': "((X['A']>4.5)&(X['C']>0.5001660795697812)&(X['E']=='yes')&(X['D']==True))|(X['C']>-0.4998339204302188)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>-0.20694070161676625)&(X['ZeroVar']>=0.29289321881345254)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>-0.4998339204302188)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>-0.077968673622132)&(X['ZeroVar']>=0.4218652468080868))|((X['A']>4.407184364166293)&(X['ZeroVar']>=0.4317521033490438))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'SLSQP': {'integer': "(X['A']>4.5)",
'float': "(X['C']>0.5001660795697812)",
'is_na': "(X['A']>4.5)|(X['A'].isna())",
'mixed': "((X['A']>4.5)&(X['C']>0.5001660795697812)&(X['E']=='yes')&(X['D']==True))|(X['C']>0.5001660795697812)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>0.5001660795697812)&(X['ZeroVar']>=1.0)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>0.5001660795697812)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>0.5001660795697812)&(X['ZeroVar']>=1.0))|((X['A']>4.5)&(X['ZeroVar']>=1.0))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'trust-constr': {'integer': "(X['A']>4.5)",
'float': "(X['C']>0.5001660795697812)",
'is_na': "(X['A']>4.5)|(X['A'].isna())",
'mixed': "((X['A']>4.5)&(X['C']>0.5001660795697812)&(X['E']=='yes')&(X['D']==True))|(X['C']>0.5001660795697812)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>0.5001660795697812)&(X['ZeroVar']>=1.0)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>0.5001660795697812)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>0.5001660795697812)&(X['ZeroVar']>=1.0))|((X['A']>4.5)&(X['ZeroVar']>=1.0))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"}
}
return opt_rule_strings
@ pytest.fixture
def _expected_rule_strings_weighted():
return {
'Nelder-Mead': {'integer': "(X['A']>8.549999999999999)",
'float': "(X['C']>1.5)",
'is_na': "(X['A']>8.549999999999999)|(X['A'].isna())",
'mixed': "((X['A']>1)&(X['C']>1.5)&(X['E']=='yes')&(X['D']==True))|(X['C']>2.5)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>1.5)&(X['ZeroVar']>=1)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>1.5)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'Powell': {'integer': "(X['A']>9)",
'float': "(X['C']>1.5)",
'is_na': "(X['A']>9)|(X['A'].isna())",
'mixed': "((X['A']>1)&(X['C']>1.5)&(X['E']=='yes')&(X['D']==True))|(X['C']>2.5)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>1.5)&(X['ZeroVar']>=1)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>1.5)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'CG': {'integer': "(X['A']>9)",
'float': "(X['C']>1.5)",
'is_na': "(X['A']>9)|(X['A'].isna())",
'mixed': "((X['A']>1)&(X['C']>1.5)&(X['E']=='yes')&(X['D']==True))|(X['C']>2.5)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>1.5)&(X['ZeroVar']>=1)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>1.5)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'BFGS': {'integer': "(X['A']>9)",
'float': "(X['C']>1.5)",
'is_na': "(X['A']>9)|(X['A'].isna())",
'mixed': "((X['A']>1)&(X['C']>1.5)&(X['E']=='yes')&(X['D']==True))|(X['C']>2.5)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>1.5)&(X['ZeroVar']>=1)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>1.5)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'L-BFGS-B': {'integer': "(X['A']>9)",
'float': "(X['C']>1.5)",
'is_na': "(X['A']>9)|(X['A'].isna())",
'mixed': "((X['A']>1)&(X['C']>1.5)&(X['E']=='yes')&(X['D']==True))|(X['C']>2.5)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>1.5)&(X['ZeroVar']>=1)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>1.5)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'TNC': {'integer': "(X['A']>9)",
'float': "(X['C']>1.5)",
'is_na': "(X['A']>9)|(X['A'].isna())",
'mixed': "((X['A']>1)&(X['C']>1.5)&(X['E']=='yes')&(X['D']==True))|(X['C']>2.5)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>1.5)&(X['ZeroVar']>=1)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>1.5)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'COBYLA': {'integer': "(X['A']>9)",
'float': "(X['C']>1.5)",
'is_na': "(X['A']>9)|(X['A'].isna())",
'mixed': "((X['A']>1)&(X['C']>1.5)&(X['E']=='yes')&(X['D']==True))|(X['C']>2.5)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>1.5)&(X['ZeroVar']>=1)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>1.5)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'SLSQP': {'integer': "(X['A']>9)",
'float': "(X['C']>1.5)",
'is_na': "(X['A']>9)|(X['A'].isna())",
'mixed': "((X['A']>1)&(X['C']>1.5)&(X['E']=='yes')&(X['D']==True))|(X['C']>2.5)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>1.5)&(X['ZeroVar']>=1)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>1.5)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'trust-constr': {'integer': "(X['A']>9)",
'float': "(X['C']>1.5)",
'is_na': "(X['A']>9)|(X['A'].isna())",
'mixed': "((X['A']>1)&(X['C']>1.5)&(X['E']=='yes')&(X['D']==True))|(X['C']>2.5)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>1.5)&(X['ZeroVar']>=1)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>1.5)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"}
}
@ pytest.fixture
def _expected_rule_strings_weighted_x0_bounds():
return {
'Nelder-Mead': {'integer': "(X['A']>4.5)",
'float': "(X['C']>0.5056366460650756)",
'is_na': "(X['A']>4.5)|(X['A'].isna())",
'mixed': "((X['A']>4.544030630550376)&(X['C']>0.5062001821281391)&(X['E']=='yes')&(X['D']==True))|(X['C']>0.5059645205154579)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>0.5056366460650756)&(X['ZeroVar']>=1.0)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>0.5056366460650756)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>0.0003641365574362787)&(X['ZeroVar']>=1.0))|((X['A']>6.537012970447561)&(X['ZeroVar']>=1.0))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'Powell': {'integer': "(X['A']>0.5015693034320075)",
'float': "(X['C']>0.14610333258470667)",
'is_na': "(X['A']>0.5015693034320075)|(X['A'].isna())",
'mixed': "((X['A']>8.677776699611885)&(X['C']>0.9914509400603203)&(X['E']=='yes')&(X['D']==True))|(X['C']>0.21861873480767938)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>0.14610333258470667)&(X['ZeroVar']>=1.0)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>0.14610333258470667)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>0.000621823691021652)&(X['ZeroVar']>=1.0))|((X['A']>2.1240360255929973)&(X['ZeroVar']>=1.0))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'CG': {'integer': "(X['A']>4.5)",
'float': "(X['C']>0.5001660795697812)",
'is_na': "(X['A']>4.5)|(X['A'].isna())",
'mixed': "((X['A']>4.5)&(X['C']>0.5001660795697812)&(X['E']=='yes')&(X['D']==True))|(X['C']>0.5001660795697812)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>0.5001660795697812)&(X['ZeroVar']>=1.0)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>0.5001660795697812)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>0.5001660795697812)&(X['ZeroVar']>=0.9999608585886306))|((X['A']>4.5)&(X['ZeroVar']>=1.0000330891590339))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'BFGS': {'integer': "(X['A']>4.5)",
'float': "(X['C']>0.5001660795697812)",
'is_na': "(X['A']>4.5)|(X['A'].isna())",
'mixed': "((X['A']>4.5)&(X['C']>0.5001660795697812)&(X['E']=='yes')&(X['D']==True))|(X['C']>0.5001660795697812)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>0.5001660795697812)&(X['ZeroVar']>=1.0)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>0.5001660795697812)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>0.5001660795697812)&(X['ZeroVar']>=0.9999608585886306))|((X['A']>4.5)&(X['ZeroVar']>=1.0000330891590339))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'L-BFGS-B': {'integer': "(X['A']>4.5)",
'float': "(X['C']>0.5001660795697812)",
'is_na': "(X['A']>4.5)|(X['A'].isna())",
'mixed': "((X['A']>4.5)&(X['C']>0.5001660795697812)&(X['E']=='yes')&(X['D']==True))|(X['C']>0.5001660795697812)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>0.5001660795697812)&(X['ZeroVar']>=1.0)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>0.5001660795697812)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>0.5001660795697812)&(X['ZeroVar']>=1.0))|((X['A']>4.5)&(X['ZeroVar']>=1.0))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'TNC': {'integer': "(X['A']>4.5)",
'float': "(X['C']>0.5001660795697812)",
'is_na': "(X['A']>4.5)|(X['A'].isna())",
'mixed': "((X['A']>4.5)&(X['C']>0.5001660795697812)&(X['E']=='yes')&(X['D']==True))|(X['C']>0.5001660795697812)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>0.5001660795697812)&(X['ZeroVar']>=1.0)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>0.5001660795697812)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>0.5001660795697812)&(X['ZeroVar']>=1.0))|((X['A']>4.5)&(X['ZeroVar']>=1.0))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'COBYLA': {'integer': "(X['A']>-0.5)",
'float': "(X['C']>-0.4998339204302188)",
'is_na': "(X['A']>-0.5)|(X['A'].isna())",
'mixed': "((X['A']>4.5)&(X['C']>0.5001660795697812)&(X['E']=='yes')&(X['D']==True))|(X['C']>-0.4998339204302188)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>0.15012050213720127)&(X['ZeroVar']>=-0.07234657931438221)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>-0.4998339204302188)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>-0.10251065272063686)&(X['ZeroVar']>=0.3973232677095818))|((X['A']>4.381746741970505)&(X['ZeroVar']>=2.50948766391042))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'SLSQP': {'integer': "(X['A']>4.5)",
'float': "(X['C']>0.5001660795697812)",
'is_na': "(X['A']>4.5)|(X['A'].isna())",
'mixed': "((X['A']>4.5)&(X['C']>0.5001660795697812)&(X['E']=='yes')&(X['D']==True))|(X['C']>0.5001660795697812)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>0.5001660795697812)&(X['ZeroVar']>=1.0)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>0.5001660795697812)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>0.5001660795697812)&(X['ZeroVar']>=1.0))|((X['A']>4.5)&(X['ZeroVar']>=1.0))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'trust-constr': {'integer': "(X['A']>4.5)",
'float': "(X['C']>0.5001660795697812)",
'is_na': "(X['A']>4.5)|(X['A'].isna())",
'mixed': "((X['A']>4.5)&(X['C']>0.5001660795697812)&(X['E']=='yes')&(X['D']==True))|(X['C']>0.5001660795697812)",
'already_optimal': "(X['A']>=0)",
'float_with_zero_var': "(X['C']>0.5001660795697812)&(X['ZeroVar']>=1.0)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>0.5001660795697812)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>0.5001660795697812)&(X['ZeroVar']>=1.0))|((X['A']>4.5)&(X['ZeroVar']>=1.0))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"}
}
@ pytest.fixture
def _expected_rule_strings_unlabelled():
return {
'Nelder-Mead': {'integer': "(X['A']>9)",
'float': "(X['C']>1.5)",
'is_na': "(X['A']>9)|(X['A'].isna())",
'mixed': "((X['A']>1)&(X['C']>1.5)&(X['E']=='yes')&(X['D']==True))|(X['C']>2.5)",
'already_optimal': "(X['A']>=0.00025)",
'float_with_zero_var': "(X['C']>1.5)&(X['ZeroVar']>=1)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>1.5)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'Powell': {'integer': "(X['A']>9)",
'float': "(X['C']>1.5)",
'is_na': "(X['A']>9)|(X['A'].isna())",
'mixed': "((X['A']>1)&(X['C']>1.5)&(X['E']=='yes')&(X['D']==True))|(X['C']>2.5)",
'already_optimal': "(X['A']>=177.53363890059836)",
'float_with_zero_var': "(X['C']>1.5)&(X['ZeroVar']>=1)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>1.5)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'CG': {'integer': "(X['A']>9)",
'float': "(X['C']>1.5)",
'is_na': "(X['A']>9)|(X['A'].isna())",
'mixed': "((X['A']>1)&(X['C']>1.5)&(X['E']=='yes')&(X['D']==True))|(X['C']>2.5)",
'already_optimal': "(X['A']>=5.125898936452442e-05)",
'float_with_zero_var': "(X['C']>1.5)&(X['ZeroVar']>=1)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>1.5)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'BFGS': {'integer': "(X['A']>9)",
'float': "(X['C']>1.5)",
'is_na': "(X['A']>9)|(X['A'].isna())",
'mixed': "((X['A']>1)&(X['C']>1.5)&(X['E']=='yes')&(X['D']==True))|(X['C']>2.5)",
'already_optimal': "(X['A']>=5.125898936452442e-05)",
'float_with_zero_var': "(X['C']>1.5)&(X['ZeroVar']>=1)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>1.5)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'L-BFGS-B': {'integer': "(X['A']>9)",
'float': "(X['C']>1.5)",
'is_na': "(X['A']>9)|(X['A'].isna())",
'mixed': "((X['A']>1)&(X['C']>1.5)&(X['E']=='yes')&(X['D']==True))|(X['C']>2.5)",
'already_optimal': "(X['A']>=3.595679261195249e-06)",
'float_with_zero_var': "(X['C']>1.5)&(X['ZeroVar']>=1)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>1.5)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'TNC': {'integer': "(X['A']>9)",
'float': "(X['C']>1.5)",
'is_na': "(X['A']>9)|(X['A'].isna())",
'mixed': "((X['A']>1)&(X['C']>1.5)&(X['E']=='yes')&(X['D']==True))|(X['C']>2.5)",
'already_optimal': "(X['A']>=1.4901161193847656e-08)",
'float_with_zero_var': "(X['C']>1.5)&(X['ZeroVar']>=1)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>1.5)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'COBYLA': {'integer': "(X['A']>9)",
'float': "(X['C']>1.5)",
'is_na': "(X['A']>9)|(X['A'].isna())",
'mixed': "((X['A']>1)&(X['C']>1.5)&(X['E']=='yes')&(X['D']==True))|(X['C']>2.5)",
'already_optimal': "(X['A']>=10.0)",
'float_with_zero_var': "(X['C']>1.5)&(X['ZeroVar']>=1)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>1.5)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'SLSQP': {'integer': "(X['A']>9)",
'float': "(X['C']>1.5)",
'is_na': "(X['A']>9)|(X['A'].isna())",
'mixed': "((X['A']>1)&(X['C']>1.5)&(X['E']=='yes')&(X['D']==True))|(X['C']>2.5)",
'already_optimal': "(X['A']>=11935095848.960007)",
'float_with_zero_var': "(X['C']>1.5)&(X['ZeroVar']>=1)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>1.5)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'trust-constr': {'integer': "(X['A']>9)",
'float': "(X['C']>1.5)",
'is_na': "(X['A']>9)|(X['A'].isna())",
'mixed': "((X['A']>1)&(X['C']>1.5)&(X['E']=='yes')&(X['D']==True))|(X['C']>2.5)",
'already_optimal': "(X['A']>=2.7020533257768187)",
'float_with_zero_var': "(X['C']>1.5)&(X['ZeroVar']>=1)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>1.5)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"}
}
@ pytest.fixture
def _expected_rule_strings_unlabelled_x0_bounds():
return {
'Nelder-Mead': {'integer': "(X['A']>9)",
'float': "(X['C']>1.5)",
'is_na': "(X['A']>9)|(X['A'].isna())",
'mixed': "((X['A']>7.775353965311879)&(X['C']>0.9884794975944149)&(X['E']=='yes')&(X['D']==True))|(X['C']>0.9884919578072813)",
'already_optimal': "(X['A']>=4.5)",
'float_with_zero_var': "(X['C']>0.999968022582126)&(X['ZeroVar']>=1.0)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>1.5)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'Powell': {'integer': "(X['A']>9)",
'float': "(X['C']>0.9882521688325012)",
'is_na': "(X['A']>9)|(X['A'].isna())",
'mixed': "((X['A']>7.7943442656079)&(X['C']>0.896293283056058)&(X['E']=='yes')&(X['D']==True))|(X['C']>0.9993343463105192)",
'already_optimal': "(X['A']>=8.203192518090468)",
'float_with_zero_var': "(X['C']>0.9882521688325012)&(X['ZeroVar']>=1.0)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>0.9882521688325012)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'CG': {'integer': "(X['A']>9)",
'float': "(X['C']>1.5)",
'is_na': "(X['A']>9)|(X['A'].isna())",
'mixed': "((X['A']>1)&(X['C']>1.5)&(X['E']=='yes')&(X['D']==True))|(X['C']>2.5)",
'already_optimal': "(X['A']>=4.5)",
'float_with_zero_var': "(X['C']>1.5)&(X['ZeroVar']>=1)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>1.5)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'BFGS': {'integer': "(X['A']>9)",
'float': "(X['C']>1.5)",
'is_na': "(X['A']>9)|(X['A'].isna())",
'mixed': "((X['A']>1)&(X['C']>1.5)&(X['E']=='yes')&(X['D']==True))|(X['C']>2.5)",
'already_optimal': "(X['A']>=4.5)",
'float_with_zero_var': "(X['C']>1.5)&(X['ZeroVar']>=1)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>1.5)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'L-BFGS-B': {'integer': "(X['A']>9)",
'float': "(X['C']>1.5)",
'is_na': "(X['A']>9)|(X['A'].isna())",
'mixed': "((X['A']>1)&(X['C']>1.5)&(X['E']=='yes')&(X['D']==True))|(X['C']>2.5)",
'already_optimal': "(X['A']>=4.5)",
'float_with_zero_var': "(X['C']>1.5)&(X['ZeroVar']>=1)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>1.5)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'TNC': {'integer': "(X['A']>9)",
'float': "(X['C']>1.5)",
'is_na': "(X['A']>9)|(X['A'].isna())",
'mixed': "((X['A']>1)&(X['C']>1.5)&(X['E']=='yes')&(X['D']==True))|(X['C']>2.5)",
'already_optimal': "(X['A']>=4.5)",
'float_with_zero_var': "(X['C']>1.5)&(X['ZeroVar']>=1)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>1.5)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'COBYLA': {'integer': "(X['A']>9)",
'float': "(X['C']>1.5)",
'is_na': "(X['A']>9)|(X['A'].isna())",
'mixed': "((X['A']>1)&(X['C']>1.5)&(X['E']=='yes')&(X['D']==True))|(X['C']>2.5)",
'already_optimal': "(X['A']>=9.5)",
'float_with_zero_var': "(X['C']>1.5)&(X['ZeroVar']>=1)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>1.5)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'SLSQP': {'integer': "(X['A']>9)",
'float': "(X['C']>1.5)",
'is_na': "(X['A']>9)|(X['A'].isna())",
'mixed': "((X['A']>1)&(X['C']>1.5)&(X['E']=='yes')&(X['D']==True))|(X['C']>2.5)",
'already_optimal': "(X['A']>=4.5)",
'float_with_zero_var': "(X['C']>1.5)&(X['ZeroVar']>=1)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>1.5)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"},
'trust-constr': {'integer': "(X['A']>9)",
'float': "(X['C']>1.5)",
'is_na': "(X['A']>9)|(X['A'].isna())",
'mixed': "((X['A']>1)&(X['C']>1.5)&(X['E']=='yes')&(X['D']==True))|(X['C']>2.5)",
'already_optimal': "(X['A']>=4.5)",
'float_with_zero_var': "(X['C']>1.5)&(X['ZeroVar']>=1)",
'float_with_all_na_greater': "(X['C']>1.5)&(X['AllNa']>1)",
'float_with_all_na_is_na': "(X['C']>1.5)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>1.5)&(X['ZeroVar']>=1))|((X['A']>9)&(X['ZeroVar']>=1))",
'categoric': "(X['E']=='yes')",
'boolean': "(X['D']==True)"}
}
@ pytest.fixture
def _expected_X_rules_mean():
X_rules_means = {
'Nelder-Mead': {
'integer': 0.10428957104289571,
'float': 0.0,
'is_na': 0.10438956104389562,
'mixed': 0.0,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004
},
'Powell': {
'integer': 0.0,
'float': 0.0,
'is_na': 9.999000099990002e-05,
'mixed': 0.0,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004
},
'CG': {
'integer': 0.0,
'float': 0.0,
'is_na': 9.999000099990002e-05,
'mixed': 0.0,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004
},
'BFGS': {
'integer': 0.0,
'float': 0.0,
'is_na': 9.999000099990002e-05,
'mixed': 0.0,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004
},
'L-BFGS-B': {
'integer': 0.0,
'float': 0.0,
'is_na': 9.999000099990002e-05,
'mixed': 0.0,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004
},
'TNC': {
'integer': 0.0,
'float': 0.0,
'is_na': 9.999000099990002e-05,
'mixed': 0.0,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004
},
'COBYLA': {
'integer': 0.0,
'float': 0.0,
'is_na': 9.999000099990002e-05,
'mixed': 0.0,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004
},
'SLSQP': {
'integer': 0.0,
'float': 0.0,
'is_na': 9.999000099990002e-05,
'mixed': 0.0,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004
},
'trust-constr': {
'integer': 0.0,
'float': 0.0,
'is_na': 9.999000099990002e-05,
'mixed': 0.0,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004
}
}
return X_rules_means
@ pytest.fixture
def _expected_X_rules_mean_x0_bounds():
X_rules_means = {
'Nelder-Mead': {'integer': 0.5061493850614939,
'float': 0.9998000199980002,
'is_na': 0.5062493750624938,
'mixed': 0.9998000199980002,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.9998000199980002,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.9998000199980002,
'multi_zero_var': 0.9998000199980002,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'Powell': {'integer': 0.9032096790320968,
'float': 0.9988001199880012,
'is_na': 0.9033096690330967,
'mixed': 0.9986001399860014,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.9988001199880012,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.9988001199880012,
'multi_zero_var': 0.9996000399960004,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'CG': {'integer': 0.5061493850614939,
'float': 0.49865013498650135,
'is_na': 0.5062493750624938,
'mixed': 0.49865013498650135,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.49865013498650135,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.49865013498650135,
'multi_zero_var': 0.7498250174982501,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'BFGS': {'integer': 0.5061493850614939,
'float': 0.49865013498650135,
'is_na': 0.5062493750624938,
'mixed': 0.49865013498650135,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.49865013498650135,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.49865013498650135,
'multi_zero_var': 0.7498250174982501,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'L-BFGS-B': {'integer': 0.5061493850614939,
'float': 0.49865013498650135,
'is_na': 0.5062493750624938,
'mixed': 0.49865013498650135,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.49865013498650135,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.49865013498650135,
'multi_zero_var': 0.7498250174982501,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'TNC': {'integer': 0.5061493850614939,
'float': 0.49865013498650135,
'is_na': 0.5062493750624938,
'mixed': 0.49865013498650135,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.49865013498650135,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.49865013498650135,
'multi_zero_var': 0.7498250174982501,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'COBYLA': {'integer': 0.9999000099990001,
'float': 0.9999000099990001,
'is_na': 1.0,
'mixed': 0.9999000099990001,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.9999000099990001,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.9999000099990001,
'multi_zero_var': 0.9999000099990001,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'SLSQP': {'integer': 0.5061493850614939,
'float': 0.49865013498650135,
'is_na': 0.5062493750624938,
'mixed': 0.49865013498650135,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.49865013498650135,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.49865013498650135,
'multi_zero_var': 0.7498250174982501,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'trust-constr': {'integer': 0.5061493850614939,
'float': 0.49865013498650135,
'is_na': 0.5062493750624938,
'mixed': 0.49865013498650135,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.49865013498650135,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.49865013498650135,
'multi_zero_var': 0.7498250174982501,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004}
}
return X_rules_means
@ pytest.fixture
def _expected_X_rules_mean_weighted():
return {
'Nelder-Mead': {'integer': 0.10428957104289571,
'float': 0.0,
'is_na': 0.10438956104389562,
'mixed': 0.0,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'Powell': {'integer': 0.0,
'float': 0.0,
'is_na': 9.999000099990002e-05,
'mixed': 0.0,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'CG': {'integer': 0.0,
'float': 0.0,
'is_na': 9.999000099990002e-05,
'mixed': 0.0,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'BFGS': {'integer': 0.0,
'float': 0.0,
'is_na': 9.999000099990002e-05,
'mixed': 0.0,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'L-BFGS-B': {'integer': 0.0,
'float': 0.0,
'is_na': 9.999000099990002e-05,
'mixed': 0.0,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'TNC': {'integer': 0.0,
'float': 0.0,
'is_na': 9.999000099990002e-05,
'mixed': 0.0,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'COBYLA': {'integer': 0.0,
'float': 0.0,
'is_na': 9.999000099990002e-05,
'mixed': 0.0,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'SLSQP': {'integer': 0.0,
'float': 0.0,
'is_na': 9.999000099990002e-05,
'mixed': 0.0,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'trust-constr': {'integer': 0.0,
'float': 0.0,
'is_na': 9.999000099990002e-05,
'mixed': 0.0,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004}
}
@ pytest.fixture
def _expected_X_rules_mean_weighted_x0_bounds():
return {
'Nelder-Mead': {'integer': 0.5061493850614939,
'float': 0.49375062493750627,
'is_na': 0.5062493750624938,
'mixed': 0.49365063493650635,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.49375062493750627,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.49375062493750627,
'multi_zero_var': 0.9998000199980002,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'Powell': {'integer': 0.9032096790320968,
'float': 0.8528147185281472,
'is_na': 0.9033096690330967,
'mixed': 0.7774222577742226,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.8528147185281472,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.8528147185281472,
'multi_zero_var': 0.9997000299970003,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'CG': {'integer': 0.5061493850614939,
'float': 0.49865013498650135,
'is_na': 0.5062493750624938,
'mixed': 0.49865013498650135,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.49865013498650135,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.49865013498650135,
'multi_zero_var': 0.49865013498650135,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'BFGS': {'integer': 0.5061493850614939,
'float': 0.49865013498650135,
'is_na': 0.5062493750624938,
'mixed': 0.49865013498650135,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.49865013498650135,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.49865013498650135,
'multi_zero_var': 0.49865013498650135,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'L-BFGS-B': {'integer': 0.5061493850614939,
'float': 0.49865013498650135,
'is_na': 0.5062493750624938,
'mixed': 0.49865013498650135,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.49865013498650135,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.49865013498650135,
'multi_zero_var': 0.7498250174982501,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'TNC': {'integer': 0.5061493850614939,
'float': 0.49865013498650135,
'is_na': 0.5062493750624938,
'mixed': 0.49865013498650135,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.49865013498650135,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.49865013498650135,
'multi_zero_var': 0.7498250174982501,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'COBYLA': {'integer': 0.9999000099990001,
'float': 0.9999000099990001,
'is_na': 1.0,
'mixed': 0.9999000099990001,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.8487151284871512,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.9999000099990001,
'multi_zero_var': 0.9999000099990001,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'SLSQP': {'integer': 0.5061493850614939,
'float': 0.49865013498650135,
'is_na': 0.5062493750624938,
'mixed': 0.49865013498650135,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.49865013498650135,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.49865013498650135,
'multi_zero_var': 0.7498250174982501,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'trust-constr': {'integer': 0.5061493850614939,
'float': 0.49865013498650135,
'is_na': 0.5062493750624938,
'mixed': 0.49865013498650135,
'already_optimal': 0.9999000099990001,
'float_with_zero_var': 0.49865013498650135,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.49865013498650135,
'multi_zero_var': 0.7498250174982501,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004}
}
@ pytest.fixture
def _expected_X_rules_mean_unlabelled():
return {
'Nelder-Mead': {'integer': 0.0,
'float': 0.0,
'is_na': 9.999000099990002e-05,
'mixed': 0.0,
'already_optimal': 0.9032096790320968,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'Powell': {'integer': 0.0,
'float': 0.0,
'is_na': 9.999000099990002e-05,
'mixed': 0.0,
'already_optimal': 0.0,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'CG': {'integer': 0.0,
'float': 0.0,
'is_na': 9.999000099990002e-05,
'mixed': 0.0,
'already_optimal': 0.9032096790320968,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'BFGS': {'integer': 0.0,
'float': 0.0,
'is_na': 9.999000099990002e-05,
'mixed': 0.0,
'already_optimal': 0.9032096790320968,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'L-BFGS-B': {'integer': 0.0,
'float': 0.0,
'is_na': 9.999000099990002e-05,
'mixed': 0.0,
'already_optimal': 0.9032096790320968,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'TNC': {'integer': 0.0,
'float': 0.0,
'is_na': 9.999000099990002e-05,
'mixed': 0.0,
'already_optimal': 0.9032096790320968,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'COBYLA': {'integer': 0.0,
'float': 0.0,
'is_na': 9.999000099990002e-05,
'mixed': 0.0,
'already_optimal': 0.0,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'SLSQP': {'integer': 0.0,
'float': 0.0,
'is_na': 9.999000099990002e-05,
'mixed': 0.0,
'already_optimal': 0.0,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'trust-constr': {'integer': 0.0,
'float': 0.0,
'is_na': 9.999000099990002e-05,
'mixed': 0.0,
'already_optimal': 0.7034296570342966,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004}
}
@ pytest.fixture
def _expected_X_rules_mean_unlabelled_x0_bounds():
return {
'Nelder-Mead': {'integer': 0.0,
'float': 0.0,
'is_na': 9.999000099990002e-05,
'mixed': 0.0098990100989901,
'already_optimal': 0.5061493850614939,
'float_with_zero_var': 9.999000099990002e-05,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'Powell': {'integer': 0.0,
'float': 0.0098990100989901,
'is_na': 9.999000099990002e-05,
'mixed': 0.009799020097990201,
'already_optimal': 0.10428957104289571,
'float_with_zero_var': 0.0098990100989901,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0098990100989901,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'CG': {'integer': 0.0,
'float': 0.0,
'is_na': 9.999000099990002e-05,
'mixed': 0.0,
'already_optimal': 0.5061493850614939,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'BFGS': {'integer': 0.0,
'float': 0.0,
'is_na': 9.999000099990002e-05,
'mixed': 0.0,
'already_optimal': 0.5061493850614939,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'L-BFGS-B': {'integer': 0.0,
'float': 0.0,
'is_na': 9.999000099990002e-05,
'mixed': 0.0,
'already_optimal': 0.5061493850614939,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'TNC': {'integer': 0.0,
'float': 0.0,
'is_na': 9.999000099990002e-05,
'mixed': 0.0,
'already_optimal': 0.5061493850614939,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'COBYLA': {'integer': 0.0,
'float': 0.0,
'is_na': 9.999000099990002e-05,
'mixed': 0.0,
'already_optimal': 0.0,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'SLSQP': {'integer': 0.0,
'float': 0.0,
'is_na': 9.999000099990002e-05,
'mixed': 0.0,
'already_optimal': 0.5061493850614939,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004},
'trust-constr': {'integer': 0.0,
'float': 0.0,
'is_na': 9.999000099990002e-05,
'mixed': 0.0,
'already_optimal': 0.5061493850614939,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0,
'categoric': 0.49995000499950004,
'boolean': 0.49995000499950004}
}
@ pytest.fixture
def _exp_orig_rule_performances():
return {
'already_optimal': 0.6657771847898598,
'integer': 0.0,
'float': 0.0,
'is_na': 0.0,
'mixed': 0.0,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0
}
@ pytest.fixture
def _exp_orig_rule_performances_weighted():
return {
'integer': 0.0,
'float': 0.0,
'is_na': 0.0,
'mixed': 0.0,
'already_optimal': 0.08504038992467365,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0
}
@ pytest.fixture
def _exp_orig_rule_performances_unlabelled():
return {
'integer': -100.0,
'float': -100.0,
'is_na': -98.01,
'mixed': -100.0,
'already_optimal': -980100.0,
'float_with_zero_var': -100.0,
'float_with_all_na_greater': -100.0,
'float_with_all_na_is_na': -100.0,
'multi_zero_var': -100.0
}
@ pytest.fixture
def _exp_opt_rule_performances():
return {
'Nelder-Mead': {
'already_optimal': 0.6657771847898598,
'integer': 0.18332504558262888,
'is_na': 0.18329466357308585,
'float': 0.0,
'mixed': 0.0,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0
},
'Powell': {
'already_optimal': 0.6657771847898598,
'integer': 0.0,
'float': 0.0,
'is_na': 0.0,
'mixed': 0.0,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0
},
'CG': {
'already_optimal': 0.6657771847898598,
'integer': 0.0,
'float': 0.0,
'is_na': 0.0,
'mixed': 0.0,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0
},
'BFGS': {
'already_optimal': 0.6657771847898598,
'integer': 0.0,
'float': 0.0,
'is_na': 0.0,
'mixed': 0.0,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0
},
'L-BFGS-B': {
'already_optimal': 0.6657771847898598,
'integer': 0.0,
'float': 0.0,
'is_na': 0.0,
'mixed': 0.0,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0
},
'TNC': {
'already_optimal': 0.6657771847898598,
'integer': 0.0,
'float': 0.0,
'is_na': 0.0,
'mixed': 0.0,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0
},
'COBYLA': {
'already_optimal': 0.6657771847898598,
'integer': 0.0,
'float': 0.0,
'is_na': 0.0,
'mixed': 0.0,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0
},
'SLSQP': {
'already_optimal': 0.6657771847898598,
'integer': 0.0,
'float': 0.0,
'is_na': 0.0,
'mixed': 0.0,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0
},
'trust-constr': {
'already_optimal': 0.6657771847898598,
'integer': 0.0,
'float': 0.0,
'is_na': 0.0,
'mixed': 0.0,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0
}
}
@ pytest.fixture
def _exp_opt_rule_performances_x0_bounds():
return {
'Nelder-Mead': {'integer': 0.4988062077198568,
'float': 0.6658216025085062,
'is_na': 0.4987565900726152,
'mixed': 0.6658216025085062,
'already_optimal': 0.6657771847898598,
'float_with_zero_var': 0.6658216025085062,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.6658216025085062,
'multi_zero_var': 0.6658216025085062},
'Powell': {'integer': 0.6422306211224418,
'float': 0.66506442352627,
'is_na': 0.6421848260125499,
'mixed': 0.6650196968685318,
'already_optimal': 0.6657771847898598,
'float_with_zero_var': 0.66506442352627,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.66506442352627,
'multi_zero_var': 0.6655101087609262},
'CG': {'integer': 0.4988062077198568,
'float': 0.5005512679162072,
'is_na': 0.4987565900726152,
'mixed': 0.5005512679162072,
'already_optimal': 0.6657771847898598,
'float_with_zero_var': 0.5005512679162072,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.5005512679162072,
'multi_zero_var': 0.6008487468972696},
'BFGS': {'integer': 0.4988062077198568,
'float': 0.5005512679162072,
'is_na': 0.4987565900726152,
'mixed': 0.5005512679162072,
'already_optimal': 0.6657771847898598,
'float_with_zero_var': 0.5005512679162072,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.5005512679162072,
'multi_zero_var': 0.6008487468972696},
'L-BFGS-B': {'integer': 0.4988062077198568,
'float': 0.5005512679162072,
'is_na': 0.4987565900726152,
'mixed': 0.5005512679162072,
'already_optimal': 0.6657771847898598,
'float_with_zero_var': 0.5005512679162072,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.5005512679162072,
'multi_zero_var': 0.6008487468972696},
'TNC': {'integer': 0.4988062077198568,
'float': 0.5005512679162072,
'is_na': 0.4987565900726152,
'mixed': 0.5005512679162072,
'already_optimal': 0.6657771847898598,
'float_with_zero_var': 0.5005512679162072,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.5005512679162072,
'multi_zero_var': 0.6008487468972696},
'COBYLA': {'integer': 0.6657771847898598,
'float': 0.6657771847898598,
'is_na': 0.6657327729971316,
'mixed': 0.6657771847898598,
'already_optimal': 0.6657771847898598,
'float_with_zero_var': 0.6657771847898598,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.6657771847898598,
'multi_zero_var': 0.6657771847898598},
'SLSQP': {'integer': 0.4988062077198568,
'float': 0.5005512679162072,
'is_na': 0.4987565900726152,
'mixed': 0.5005512679162072,
'already_optimal': 0.6657771847898598,
'float_with_zero_var': 0.5005512679162072,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.5005512679162072,
'multi_zero_var': 0.6008487468972696},
'trust-constr': {'integer': 0.4988062077198568,
'float': 0.5005512679162072,
'is_na': 0.4987565900726152,
'mixed': 0.5005512679162072,
'already_optimal': 0.6657771847898598,
'float_with_zero_var': 0.5005512679162072,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.5005512679162072,
'multi_zero_var': 0.6008487468972696}
}
@ pytest.fixture
def _exp_opt_rule_performances_weighted():
return {
'Nelder-Mead': {'integer': 0.020277579157728768,
'float': 0.0,
'is_na': 0.020277207392197127,
'mixed': 0.0,
'already_optimal': 0.08504038992467365,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0},
'Powell': {'integer': 0.0,
'float': 0.0,
'is_na': 0.0,
'mixed': 0.0,
'already_optimal': 0.08504038992467365,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0},
'CG': {'integer': 0.0,
'float': 0.0,
'is_na': 0.0,
'mixed': 0.0,
'already_optimal': 0.08504038992467365,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0},
'BFGS': {'integer': 0.0,
'float': 0.0,
'is_na': 0.0,
'mixed': 0.0,
'already_optimal': 0.08504038992467365,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0},
'L-BFGS-B': {'integer': 0.0,
'float': 0.0,
'is_na': 0.0,
'mixed': 0.0,
'already_optimal': 0.08504038992467365,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0},
'TNC': {'integer': 0.0,
'float': 0.0,
'is_na': 0.0,
'mixed': 0.0,
'already_optimal': 0.08504038992467365,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0},
'COBYLA': {'integer': 0.0,
'float': 0.0,
'is_na': 0.0,
'mixed': 0.0,
'already_optimal': 0.08504038992467365,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0},
'SLSQP': {'integer': 0.0,
'float': 0.0,
'is_na': 0.0,
'mixed': 0.0,
'already_optimal': 0.08504038992467365,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0},
'trust-constr': {'integer': 0.0,
'float': 0.0,
'is_na': 0.0,
'mixed': 0.0,
'already_optimal': 0.08504038992467365,
'float_with_zero_var': 0.0,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.0,
'multi_zero_var': 0.0}
}
@ pytest.fixture
def _exp_opt_rule_performances_weighted_x0_bounds():
return {
'Nelder-Mead': {'integer': 0.044601398352576996,
'float': 0.08315554286290255,
'is_na': 0.044601001610048124,
'mixed': 0.08315693982461446,
'already_optimal': 0.08504038992467365,
'float_with_zero_var': 0.08315554286290255,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.08315554286290255,
'multi_zero_var': 0.08504111456691237},
'Powell': {'integer': 0.07737844641675759,
'float': 0.08649740043410227,
'is_na': 0.07737778159635708,
'mixed': 0.08528603220316795,
'already_optimal': 0.08504038992467365,
'float_with_zero_var': 0.08649740043410227,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.08649740043410227,
'multi_zero_var': 0.08500775431600115},
'CG': {'integer': 0.044601398352576996,
'float': 0.08286183610147839,
'is_na': 0.044601001610048124,
'mixed': 0.08286183610147839,
'already_optimal': 0.08504038992467365,
'float_with_zero_var': 0.08286183610147839,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.08286183610147839,
'multi_zero_var': 0.08286183610147839},
'BFGS': {'integer': 0.044601398352576996,
'float': 0.08286183610147839,
'is_na': 0.044601001610048124,
'mixed': 0.08286183610147839,
'already_optimal': 0.08504038992467365,
'float_with_zero_var': 0.08286183610147839,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.08286183610147839,
'multi_zero_var': 0.08286183610147839},
'L-BFGS-B': {'integer': 0.044601398352576996,
'float': 0.08286183610147839,
'is_na': 0.044601001610048124,
'mixed': 0.08286183610147839,
'already_optimal': 0.08504038992467365,
'float_with_zero_var': 0.08286183610147839,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.08286183610147839,
'multi_zero_var': 0.06533455226154716},
'TNC': {'integer': 0.044601398352576996,
'float': 0.08286183610147839,
'is_na': 0.044601001610048124,
'mixed': 0.08286183610147839,
'already_optimal': 0.08504038992467365,
'float_with_zero_var': 0.08286183610147839,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.08286183610147839,
'multi_zero_var': 0.06533455226154716},
'COBYLA': {'integer': 0.08504038992467365,
'float': 0.08504038992467365,
'is_na': 0.0850396652947843,
'mixed': 0.08504038992467365,
'already_optimal': 0.08504038992467365,
'float_with_zero_var': 0.0865677707782971,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.08504038992467365,
'multi_zero_var': 0.08504038992467365},
'SLSQP': {'integer': 0.044601398352576996,
'float': 0.08286183610147839,
'is_na': 0.044601001610048124,
'mixed': 0.08286183610147839,
'already_optimal': 0.08504038992467365,
'float_with_zero_var': 0.08286183610147839,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.08286183610147839,
'multi_zero_var': 0.06533455226154716},
'trust-constr': {'integer': 0.044601398352576996,
'float': 0.08286183610147839,
'is_na': 0.044601001610048124,
'mixed': 0.08286183610147839,
'already_optimal': 0.08504038992467365,
'float_with_zero_var': 0.08286183610147839,
'float_with_all_na_greater': 0.0,
'float_with_all_na_is_na': 0.08286183610147839,
'multi_zero_var': 0.06533455226154716}
}
@ pytest.fixture
def _exp_opt_rule_performances_unlabelled():
return {
'Nelder-Mead': {'integer': -100.0,
'float': -100.0,
'is_na': -98.01,
'mixed': -100.0,
'already_optimal': -797984.8899999999,
'float_with_zero_var': -100.0,
'float_with_all_na_greater': -100.0,
'float_with_all_na_is_na': -100.0,
'multi_zero_var': -100.0},
'Powell': {'integer': -100.0,
'float': -100.0,
'is_na': -98.01,
'mixed': -100.0,
'already_optimal': -100.0,
'float_with_zero_var': -100.0,
'float_with_all_na_greater': -100.0,
'float_with_all_na_is_na': -100.0,
'multi_zero_var': -100.0},
'CG': {'integer': -100.0,
'float': -100.0,
'is_na': -98.01,
'mixed': -100.0,
'already_optimal': -797984.8899999999,
'float_with_zero_var': -100.0,
'float_with_all_na_greater': -100.0,
'float_with_all_na_is_na': -100.0,
'multi_zero_var': -100.0},
'BFGS': {'integer': -100.0,
'float': -100.0,
'is_na': -98.01,
'mixed': -100.0,
'already_optimal': -797984.8899999999,
'float_with_zero_var': -100.0,
'float_with_all_na_greater': -100.0,
'float_with_all_na_is_na': -100.0,
'multi_zero_var': -100.0},
'L-BFGS-B': {'integer': -100.0,
'float': -100.0,
'is_na': -98.01,
'mixed': -100.0,
'already_optimal': -797984.8899999999,
'float_with_zero_var': -100.0,
'float_with_all_na_greater': -100.0,
'float_with_all_na_is_na': -100.0,
'multi_zero_var': -100.0},
'TNC': {'integer': -100.0,
'float': -100.0,
'is_na': -98.01,
'mixed': -100.0,
'already_optimal': -797984.8899999999,
'float_with_zero_var': -100.0,
'float_with_all_na_greater': -100.0,
'float_with_all_na_is_na': -100.0,
'multi_zero_var': -100.0},
'COBYLA': {'integer': -100.0,
'float': -100.0,
'is_na': -98.01,
'mixed': -100.0,
'already_optimal': -100.0,
'float_with_zero_var': -100.0,
'float_with_all_na_greater': -100.0,
'float_with_all_na_is_na': -100.0,
'multi_zero_var': -100.0},
'SLSQP': {'integer': -100.0,
'float': -100.0,
'is_na': -98.01,
'mixed': -100.0,
'already_optimal': -100.0,
'float_with_zero_var': -100.0,
'float_with_all_na_greater': -100.0,
'float_with_all_na_is_na': -100.0,
'multi_zero_var': -100.0},
'trust-constr': {'integer': -100.0,
'float': -100.0,
'is_na': -98.01,
'mixed': -100.0,
'already_optimal': -480942.25,
'float_with_zero_var': -100.0,
'float_with_all_na_greater': -100.0,
'float_with_all_na_is_na': -100.0,
'multi_zero_var': -100.0}
}
@ pytest.fixture
def _exp_opt_rule_performances_unlabelled_x0_bounds():
return {
'Nelder-Mead': {'integer': -100.0,
'float': -100.0,
'is_na': -98.01,
'mixed': -0.009999999999999929,
'already_optimal': -246214.44,
'float_with_zero_var': -98.01,
'float_with_all_na_greater': -100.0,
'float_with_all_na_is_na': -100.0,
'multi_zero_var': -100.0},
'Powell': {'integer': -100.0,
'float': -0.009999999999999929,
'is_na': -98.01,
'mixed': -0.039999999999999716,
'already_optimal': -8892.49,
'float_with_zero_var': -0.009999999999999929,
'float_with_all_na_greater': -100.0,
'float_with_all_na_is_na': -0.009999999999999929,
'multi_zero_var': -100.0},
'CG': {'integer': -100.0,
'float': -100.0,
'is_na': -98.01,
'mixed': -100.0,
'already_optimal': -246214.44,
'float_with_zero_var': -100.0,
'float_with_all_na_greater': -100.0,
'float_with_all_na_is_na': -100.0,
'multi_zero_var': -100.0},
'BFGS': {'integer': -100.0,
'float': -100.0,
'is_na': -98.01,
'mixed': -100.0,
'already_optimal': -246214.44,
'float_with_zero_var': -100.0,
'float_with_all_na_greater': -100.0,
'float_with_all_na_is_na': -100.0,
'multi_zero_var': -100.0},
'L-BFGS-B': {'integer': -100.0,
'float': -100.0,
'is_na': -98.01,
'mixed': -100.0,
'already_optimal': -246214.44,
'float_with_zero_var': -100.0,
'float_with_all_na_greater': -100.0,
'float_with_all_na_is_na': -100.0,
'multi_zero_var': -100.0},
'TNC': {'integer': -100.0,
'float': -100.0,
'is_na': -98.01,
'mixed': -100.0,
'already_optimal': -246214.44,
'float_with_zero_var': -100.0,
'float_with_all_na_greater': -100.0,
'float_with_all_na_is_na': -100.0,
'multi_zero_var': -100.0},
'COBYLA': {'integer': -100.0,
'float': -100.0,
'is_na': -98.01,
'mixed': -100.0,
'already_optimal': -100.0,
'float_with_zero_var': -100.0,
'float_with_all_na_greater': -100.0,
'float_with_all_na_is_na': -100.0,
'multi_zero_var': -100.0},
'SLSQP': {'integer': -100.0,
'float': -100.0,
'is_na': -98.01,
'mixed': -100.0,
'already_optimal': -246214.44,
'float_with_zero_var': -100.0,
'float_with_all_na_greater': -100.0,
'float_with_all_na_is_na': -100.0,
'multi_zero_var': -100.0},
'trust-constr': {'integer': -100.0,
'float': -100.0,
'is_na': -98.01,
'mixed': -100.0,
'already_optimal': -246214.44,
'float_with_zero_var': -100.0,
'float_with_all_na_greater': -100.0,
'float_with_all_na_is_na': -100.0,
'multi_zero_var': -100.0}
}
@ pytest.fixture
def _instantiate(_create_data, _create_inputs):
X, _, _ = _create_data
rule_lambdas, lambda_kwargs = _create_inputs
f1 = FScore(beta=1)
with pytest.warns(UserWarning) as warnings:
ro = DirectSearchOptimiser(
rule_lambdas=rule_lambdas, lambda_kwargs=lambda_kwargs,
method='Nelder-mead',
x0=DirectSearchOptimiser.create_x0(X, lambda_kwargs),
bounds=DirectSearchOptimiser.create_bounds(X, lambda_kwargs),
metric=f1.fit
)
warnings = [w.message.args[0] for w in warnings]
assert "Rules `categoric`, `boolean` have no optimisable conditions - unable to calculate `x0` for these rules" in warnings
assert "Rules `missing_col` use features that are missing from `X` - unable to calculate `x0` for these rules" in warnings
assert "Rules `categoric`, `boolean` have no optimisable conditions - unable to calculate `bounds` for these rules" in warnings
assert "Rules `missing_col` use features that are missing from `X` - unable to calculate `bounds` for these rules" in warnings
return ro
@ pytest.fixture
def _instantiate_unlabelled(_create_data, _create_inputs):
X, _, _ = _create_data
rule_lambdas, lambda_kwargs = _create_inputs
apd = AlertsPerDay(10, 10)
with pytest.warns(UserWarning) as warnings:
ro = DirectSearchOptimiser(
rule_lambdas=rule_lambdas, lambda_kwargs=lambda_kwargs,
method='Nelder-mead',
x0=DirectSearchOptimiser.create_x0(X, lambda_kwargs),
bounds=DirectSearchOptimiser.create_bounds(X, lambda_kwargs),
metric=apd.fit
)
warnings = [w.message.args[0] for w in warnings]
assert "Rules `categoric`, `boolean` have no optimisable conditions - unable to calculate `x0` for these rules" in warnings
assert "Rules `missing_col` use features that are missing from `X` - unable to calculate `x0` for these rules" in warnings
assert "Rules `categoric`, `boolean` have no optimisable conditions - unable to calculate `bounds` for these rules" in warnings
assert "Rules `missing_col` use features that are missing from `X` - unable to calculate `bounds` for these rules" in warnings
return ro
def test_fit(_create_data, _create_inputs, _expected_rule_strings,
_expected_X_rules_mean, _exp_orig_rule_performances,
_exp_opt_rule_performances):
f1 = FScore(1)
X, y, _ = _create_data
exp_rule_strings = _expected_rule_strings
exp_X_rules_mean = _expected_X_rules_mean
exp_orig_rule_performances = _exp_orig_rule_performances
exp_opt_rule_performances = _exp_opt_rule_performances
rule_lambdas, lambda_kwargs = _create_inputs
_fit(
rule_lambdas, lambda_kwargs, X, y, None, f1.fit, exp_rule_strings,
exp_X_rules_mean, exp_orig_rule_performances,
exp_opt_rule_performances, None, None
)
def test_fit_transform(_create_data, _create_inputs, _expected_rule_strings,
_expected_X_rules_mean, _exp_orig_rule_performances,
_exp_opt_rule_performances):
f1 = FScore(1)
X, y, _ = _create_data
exp_rule_strings = _expected_rule_strings
exp_X_rules_mean = _expected_X_rules_mean
exp_orig_rule_performances = _exp_orig_rule_performances
exp_opt_rule_performances = _exp_opt_rule_performances
rule_lambdas, lambda_kwargs = _create_inputs
_fit(
rule_lambdas, lambda_kwargs, X, y, None, f1.fit, exp_rule_strings,
exp_X_rules_mean, exp_orig_rule_performances,
exp_opt_rule_performances, None, None, fit_transform=True
)
def test_fit_with_x0_and_bounds(_create_data, _create_inputs,
_expected_rule_strings_x0_bounds,
_expected_X_rules_mean_x0_bounds,
_exp_orig_rule_performances,
_exp_opt_rule_performances_x0_bounds):
f1 = FScore(1)
X, y, _ = _create_data
exp_rule_strings = _expected_rule_strings_x0_bounds
exp_X_rules_mean = _expected_X_rules_mean_x0_bounds
exp_orig_rule_performances = _exp_orig_rule_performances
exp_opt_rule_performances = _exp_opt_rule_performances_x0_bounds
rule_lambdas, lambda_kwargs = _create_inputs
_fit(
rule_lambdas, lambda_kwargs, X, y, None, f1.fit, exp_rule_strings,
exp_X_rules_mean, exp_orig_rule_performances,
exp_opt_rule_performances, True, True
)
def test_fit_weighted(_create_data, _create_inputs,
_expected_rule_strings_weighted,
_expected_X_rules_mean_weighted,
_exp_orig_rule_performances_weighted,
_exp_opt_rule_performances_weighted):
f1 = FScore(1)
X, y, sample_weight = _create_data
exp_rule_strings = _expected_rule_strings_weighted
exp_X_rules_mean = _expected_X_rules_mean_weighted
exp_orig_rule_performances = _exp_orig_rule_performances_weighted
exp_opt_rule_performances = _exp_opt_rule_performances_weighted
rule_lambdas, lambda_kwargs = _create_inputs
_fit(
rule_lambdas, lambda_kwargs, X, y, sample_weight, f1.fit, exp_rule_strings,
exp_X_rules_mean, exp_orig_rule_performances,
exp_opt_rule_performances, None, None
)
def test_fit_weighted_with_x0_and_bounds(_create_data, _create_inputs,
_expected_rule_strings_weighted_x0_bounds,
_expected_X_rules_mean_weighted_x0_bounds,
_exp_orig_rule_performances_weighted,
_exp_opt_rule_performances_weighted_x0_bounds):
f1 = FScore(1)
X, y, sample_weight = _create_data
exp_rule_strings = _expected_rule_strings_weighted_x0_bounds
exp_X_rules_mean = _expected_X_rules_mean_weighted_x0_bounds
exp_orig_rule_performances = _exp_orig_rule_performances_weighted
exp_opt_rule_performances = _exp_opt_rule_performances_weighted_x0_bounds
rule_lambdas, lambda_kwargs = _create_inputs
_fit(
rule_lambdas, lambda_kwargs, X, y, sample_weight, f1.fit, exp_rule_strings,
exp_X_rules_mean, exp_orig_rule_performances,
exp_opt_rule_performances, True, True
)
def test_fit_unlabelled(_create_data, _create_inputs,
_expected_rule_strings_unlabelled,
_expected_X_rules_mean_unlabelled,
_exp_orig_rule_performances_unlabelled,
_exp_opt_rule_performances_unlabelled):
apd = AlertsPerDay(10, 10)
X, y, _ = _create_data
exp_rule_strings = _expected_rule_strings_unlabelled
exp_X_rules_mean = _expected_X_rules_mean_unlabelled
exp_orig_rule_performances = _exp_orig_rule_performances_unlabelled
exp_opt_rule_performances = _exp_opt_rule_performances_unlabelled
rule_lambdas, lambda_kwargs = _create_inputs
_fit(
rule_lambdas, lambda_kwargs, X, None, None, apd.fit, exp_rule_strings,
exp_X_rules_mean, exp_orig_rule_performances,
exp_opt_rule_performances, None, None
)
def test_fit_unlabelled_with_x0_and_bounds(_create_data, _create_inputs,
_expected_rule_strings_unlabelled_x0_bounds,
_expected_X_rules_mean_unlabelled_x0_bounds,
_exp_orig_rule_performances_unlabelled,
_exp_opt_rule_performances_unlabelled_x0_bounds):
apd = AlertsPerDay(10, 10)
X, y, _ = _create_data
exp_rule_strings = _expected_rule_strings_unlabelled_x0_bounds
exp_X_rules_mean = _expected_X_rules_mean_unlabelled_x0_bounds
exp_orig_rule_performances = _exp_orig_rule_performances_unlabelled
exp_opt_rule_performances = _exp_opt_rule_performances_unlabelled_x0_bounds
rule_lambdas, lambda_kwargs = _create_inputs
_fit(
rule_lambdas, lambda_kwargs, X, None, None, apd.fit, exp_rule_strings,
exp_X_rules_mean, exp_orig_rule_performances,
exp_opt_rule_performances, True, True
)
def test_transform(_instantiate):
ro = _instantiate
X = pd.DataFrame({
'A': [1, 2, 0, 1, 0, 2]
})
exp_X_rules = pd.DataFrame({
'Rule': [0, 1, 0, 0, 0, 1]
})
ro.rule_strings = {'Rule': "(X['A']>1)"}
X_rules = ro.transform(X)
assert all(X_rules == exp_X_rules)
def test_create_bounds(_create_data, _create_inputs):
exp_bounds = {
'integer': [(0.0, 9.0)],
'float': [(0.0003641365574362787, 0.9999680225821261)],
'is_na': [(0.0, 9.0)],
'mixed': [(0.0, 9.0),
(0.0003641365574362787, 0.9999680225821261),
(0.0003641365574362787, 0.9999680225821261)],
'all_na': [(0.0, 0.0)],
'zero_var': [(1.0, 1.0)],
'already_optimal': [(0.0, 9.0)],
'float_with_zero_var': [(0.0003641365574362787, 0.9999680225821261),
(1.0, 1.0)],
'float_with_all_na_greater': [(0.0003641365574362787, 0.9999680225821261),
(0.0, 0.0)],
'float_with_all_na_is_na': [(0.0003641365574362787, 0.9999680225821261)],
'multi_zero_var': [(0.0003641365574362787, 0.9999680225821261),
(1.0, 1.0),
(0.0, 9.0),
(1.0, 1.0)]
}
X, _, _ = _create_data
_, lambda_kwargs = _create_inputs
with pytest.warns(UserWarning) as warnings:
bounds = DirectSearchOptimiser.create_bounds(X, lambda_kwargs)
assert bounds == exp_bounds
warnings = [w.message.args[0] for w in warnings]
assert "Rules `categoric`, `boolean` have no optimisable conditions - unable to calculate `bounds` for these rules" in warnings
assert "Rules `missing_col` use features that are missing from `X` - unable to calculate `bounds` for these rules" in warnings
def test_create_x0(_create_data, _create_inputs):
exp_x0 = {
'integer': np.array([4.5]),
'float': np.array([0.50016608]),
'is_na': np.array([4.5]),
'mixed': np.array([4.5, 0.50016608, 0.50016608]),
'all_na': np.array([0.]),
'zero_var': np.array([1.]),
'already_optimal': np.array([4.5]),
'float_with_zero_var': np.array([0.50016608, 1.]),
'float_with_all_na_greater': np.array([0.50016608, 0.]),
'float_with_all_na_is_na': np.array([0.50016608]),
'multi_zero_var': np.array([0.50016608, 1., 4.5, 1.])
}
X, _, _ = _create_data
_, lambda_kwargs = _create_inputs
with pytest.warns(UserWarning) as warnings:
x0 = DirectSearchOptimiser.create_x0(X, lambda_kwargs)
for rule_name in x0.keys():
np.testing.assert_array_almost_equal(
x0[rule_name], exp_x0[rule_name]
)
warnings = [w.message.args[0] for w in warnings]
assert "Rules `categoric`, `boolean` have no optimisable conditions - unable to calculate `x0` for these rules" in warnings
assert "Rules `missing_col` use features that are missing from `X` - unable to calculate `x0` for these rules" in warnings
def test_create_initial_simplexes(_create_data, _create_inputs):
exp_simplexes = {
'Origin-based': {
'integer': {'initial_simplex': np.array([[0.],
[9.]])},
'float': {'initial_simplex': np.array([[3.64136557e-04],
[9.99968023e-01]])},
'is_na': {'initial_simplex': np.array([[0.],
[9.]])},
'mixed': {'initial_simplex': np.array([[0.00000000e+00, 3.64136557e-04, 3.64136557e-04],
[9.00000000e+00, 0.00000000e+00,
0.00000000e+00],
[0.00000000e+00, 9.99968023e-01,
0.00000000e+00],
[0.00000000e+00, 0.00000000e+00, 9.99968023e-01]])},
'all_na': {'initial_simplex': np.array([[0.],
[0.]])},
'zero_var': {'initial_simplex': np.array([[1.],
[1.]])},
'already_optimal': {'initial_simplex': np.array([[0.],
[9.]])},
'float_with_zero_var': {'initial_simplex': np.array([[3.64136557e-04, 1.00000000e+00],
[9.99968023e-01,
0.00000000e+00],
[0.00000000e+00, 1.00000000e+00]])},
'float_with_all_na_greater': {'initial_simplex': np.array([[3.64136557e-04, 0.00000000e+00],
[9.99968023e-01,
0.00000000e+00],
[0.00000000e+00, 0.00000000e+00]])},
'float_with_all_na_is_na': {'initial_simplex': np.array([[3.64136557e-04],
[9.99968023e-01]])},
'multi_zero_var': {'initial_simplex': np.array([[3.64136557e-04, 1.00000000e+00, 0.00000000e+00, 1.00000000e+00],
[9.99968023e-01, 0.00000000e+00,
0.00000000e+00, 0.00000000e+00],
[0.00000000e+00, 1.00000000e+00,
0.00000000e+00, 0.00000000e+00],
[0.00000000e+00, 0.00000000e+00,
9.00000000e+00, 0.00000000e+00],
[0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 1.00000000e+00]])}
},
'Minimum-based': {
'integer': {'initial_simplex': np.array([[0.],
[9.]])},
'float': {'initial_simplex': np.array([[3.64136557e-04],
[9.99968023e-01]])},
'is_na': {'initial_simplex': np.array([[0.],
[9.]])},
'mixed': {'initial_simplex': np.array([[0.00000000e+00, 3.64136557e-04, 3.64136557e-04],
[9.00000000e+00, 3.64136557e-04,
3.64136557e-04],
[0.00000000e+00, 9.99968023e-01,
3.64136557e-04],
[0.00000000e+00, 3.64136557e-04, 9.99968023e-01]])},
'all_na': {'initial_simplex': np.array([[0.],
[0.]])},
'zero_var': {'initial_simplex': np.array([[1.],
[1.]])},
'already_optimal': {'initial_simplex': np.array([[0.],
[9.]])},
'float_with_zero_var': {'initial_simplex': np.array([[3.64136557e-04, 1.00000000e+00],
[9.99968023e-01,
1.00000000e+00],
[3.64136557e-04, 1.00000000e+00]])},
'float_with_all_na_greater': {'initial_simplex': np.array([[3.64136557e-04, 0.00000000e+00],
[9.99968023e-01,
0.00000000e+00],
[3.64136557e-04, 0.00000000e+00]])},
'float_with_all_na_is_na': {'initial_simplex': np.array([[3.64136557e-04],
[9.99968023e-01]])},
'multi_zero_var': {'initial_simplex': np.array([[3.64136557e-04, 1.00000000e+00, 0.00000000e+00, 1.00000000e+00],
[9.99968023e-01, 1.00000000e+00,
0.00000000e+00, 1.00000000e+00],
[3.64136557e-04, 1.00000000e+00,
0.00000000e+00, 1.00000000e+00],
[3.64136557e-04, 1.00000000e+00,
9.00000000e+00, 1.00000000e+00],
[3.64136557e-04, 1.00000000e+00, 0.00000000e+00, 1.00000000e+00]])}
},
'Random-based': {
'integer': {'initial_simplex': np.array([[4.94426086],
[6.443141]])},
'float': {'initial_simplex': np.array([[0.54950884],
[0.71598511]])},
'is_na': {'initial_simplex': np.array([[4.94426086],
[6.443141]])},
'mixed': {'initial_simplex': np.array([[4.94426086, 0.42427461, 0.96460846],
[6.443141, 0.64664804,
0.38403706],
[5.43029526, 0.43821543,
0.79256697],
[4.9088526, 0.89267531, 0.52957824]])},
'all_na': {'initial_simplex': np.array([[0.],
[0.]])},
'zero_var': {'initial_simplex': np.array([[1.],
[1.]])},
'already_optimal': {'initial_simplex': np.array([[4.94426086],
[6.443141]])},
'float_with_zero_var': {'initial_simplex': np.array([[0.54950884, 1.],
[0.71598511, 1.],
[0.60349127, 1.]])},
'float_with_all_na_greater': {'initial_simplex': np.array([[0.54950884, 0.],
[0.71598511, 0.],
[0.60349127, 0.]])},
'float_with_all_na_is_na': {'initial_simplex': np.array([[0.54950884],
[0.71598511]])},
'multi_zero_var': {'initial_simplex': np.array([[0.54950884, 1., 7.13265087, 1.],
[0.71598511, 1.,
4.76481433, 1.],
[0.60349127, 1.,
5.11751345, 1.],
[0.54557615, 1.,
8.33870011, 1.],
[0.42427461, 1., 0.63996385, 1.]])}
}
}
X, _, _ = _create_data
_, lambda_kwargs = _create_inputs
for shape in ['Origin-based', 'Minimum-based', 'Random-based']:
with pytest.warns(UserWarning) as warnings:
initial_simplex = DirectSearchOptimiser.create_initial_simplexes(
X, lambda_kwargs, shape
)
for rule_name in initial_simplex.keys():
np.testing.assert_array_almost_equal(
initial_simplex[rule_name]['initial_simplex'],
exp_simplexes[shape][rule_name]['initial_simplex']
)
warnings = [w.message.args[0] for w in warnings]
assert "Rules `categoric`, `boolean` have no optimisable conditions - unable to calculate `initial_simplex` for these rules" in warnings
assert "Rules `missing_col` use features that are missing from `X` - unable to calculate `initial_simplex` for these rules" in warnings
with pytest.raises(
ValueError,
match='`shape` must be either "Origin-based", "Minimum-based" or "Random-based"'):
DirectSearchOptimiser.create_initial_simplexes(
X, lambda_kwargs, 'ERROR'
)
def test_optimise_rules(_instantiate, _create_inputs, _create_data):
X, y, _ = _create_data
# exp_rule_strings are different to ones seen in test_fit* since there's
# no comparison to the original rules in this test
exp_rule_strings = {
'integer': "(X['A']>4.5)",
'float': "(X['C']>0.0003641365574362787)",
'is_na': "(X['A']>4.5)|(X['A'].isna())",
'mixed': "((X['A']>5.966666666666709)&(X['C']>0.6372486347111281)&(X['E']=='yes')&(X['D']==True))|(X['C']>0.0003641365574362787)",
'already_optimal': "(X['A']>=4.5)",
'float_with_zero_var': "(X['C']>0.0003641365574362787)&(X['ZeroVar']>=1.0)",
'float_with_all_na_greater': "(X['C']>0.5001660795697812)&(X['AllNa']>0.0)",
'float_with_all_na_is_na': "(X['C']>0.0003641365574362787)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>0.0003641365574362787)&(X['ZeroVar']>=1.0))|((X['A']>6.537012970447561)&(X['ZeroVar']>=1.0))"
}
ro = _instantiate
rule_lambdas, lambda_kwargs = _create_inputs
rules_to_drop = [
'missing_col', 'categoric', 'boolean', 'all_na', 'zero_var'
]
rule_lambdas_ = {
rule_name: rule_lambda for rule_name, rule_lambda in rule_lambdas.items() if rule_name not in rules_to_drop}
lambda_kwargs_ = {
rule_name: lambda_kwarg for rule_name, lambda_kwarg in lambda_kwargs.items() if rule_name not in rules_to_drop}
opt_rule_strings = ro._optimise_rules(
rule_lambdas_, lambda_kwargs_, X, y, None
)
assert opt_rule_strings == exp_rule_strings
def test_optimise_rules_weighted(_instantiate, _create_inputs, _create_data):
X, y, sample_weight = _create_data
exp_rule_strings = {
'integer': "(X['A']>4.5)",
'float': "(X['C']>0.5056366460650756)",
'is_na': "(X['A']>4.5)|(X['A'].isna())",
'mixed': "((X['A']>4.544030630550376)&(X['C']>0.5062001821281391)&(X['E']=='yes')&(X['D']==True))|(X['C']>0.5059645205154579)",
'already_optimal': "(X['A']>=4.5)",
'float_with_zero_var': "(X['C']>0.5056366460650756)&(X['ZeroVar']>=1.0)",
'float_with_all_na_greater': "(X['C']>0.5001660795697812)&(X['AllNa']>0.0)",
'float_with_all_na_is_na': "(X['C']>0.5056366460650756)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>0.0003641365574362787)&(X['ZeroVar']>=1.0))|((X['A']>6.537012970447561)&(X['ZeroVar']>=1.0))"
}
ro = _instantiate
rule_lambdas, lambda_kwargs = _create_inputs
rules_to_drop = [
'missing_col', 'categoric', 'boolean', 'all_na', 'zero_var'
]
rule_lambdas_ = {
rule_name: rule_lambda for rule_name, rule_lambda in rule_lambdas.items() if rule_name not in rules_to_drop}
lambda_kwargs_ = {
rule_name: lambda_kwarg for rule_name, lambda_kwarg in lambda_kwargs.items() if rule_name not in rules_to_drop}
opt_rule_strings = ro._optimise_rules(
rule_lambdas_, lambda_kwargs_, X, y, sample_weight)
assert opt_rule_strings == exp_rule_strings
def test_optimise_rules_unlabelled(_instantiate_unlabelled, _create_inputs, _create_data):
X, _, _ = _create_data
exp_rule_strings = {
'integer': "(X['A']>4.5)",
'float': "(X['C']>0.9999680225821261)",
'is_na': "(X['A']>4.5)|(X['A'].isna())",
'mixed': "((X['A']>7.775353965311879)&(X['C']>0.9884794975944149)&(X['E']=='yes')&(X['D']==True))|(X['C']>0.9884919578072813)",
'already_optimal': "(X['A']>=4.5)",
'float_with_zero_var': "(X['C']>0.999968022582126)&(X['ZeroVar']>=1.0)",
'float_with_all_na_greater': "(X['C']>0.5001660795697812)&(X['AllNa']>0.0)",
'float_with_all_na_is_na': "(X['C']>0.9999680225821261)&(X['AllNa'].isna())",
'multi_zero_var': "((X['C']>0.9999680225821261)&(X['ZeroVar']>=1.0))|((X['A']>4.70320544242861)&(X['ZeroVar']>=1.0))"
}
ro = _instantiate_unlabelled
rule_lambdas, lambda_kwargs = _create_inputs
rules_to_drop = [
'missing_col', 'categoric', 'boolean', 'all_na', 'zero_var'
]
rule_lambdas_ = {
rule_name: rule_lambda for rule_name, rule_lambda in rule_lambdas.items() if rule_name not in rules_to_drop}
lambda_kwargs_ = {
rule_name: lambda_kwarg for rule_name, lambda_kwarg in lambda_kwargs.items() if rule_name not in rules_to_drop}
opt_rule_strings = ro._optimise_rules(
rule_lambdas_, lambda_kwargs_, X, None, None
)
assert opt_rule_strings == exp_rule_strings
def test_optimise_rules_numpy(_instantiate, _create_data):
X, y, _ = _create_data
exp_rule_strings = {
'already_optimal': "(X['A'].to_numpy(na_value=np.nan)>=4.5)",
'float': "(X['C'].to_numpy(na_value=np.nan)>0.0003641365574362787)",
'integer': "(X['A'].to_numpy(na_value=np.nan)>4.5)",
'is_na': "(X['A'].to_numpy(na_value=np.nan)>4.5)|(pd.isna(X['A'].to_numpy(na_value=np.nan)))",
'mixed': "((X['A'].to_numpy(na_value=np.nan)>5.966666666666709)&(X['C'].to_numpy(na_value=np.nan)>0.6372486347111281)&(X['E'].to_numpy(na_value=np.nan)=='yes')&(X['D'].to_numpy(na_value=np.nan)==True))|(X['C'].to_numpy(na_value=np.nan)>0.0003641365574362787)"
}
ro = _instantiate
rules = Rules(rule_strings=exp_rule_strings)
rule_lambdas = rules.as_rule_lambdas(as_numpy=True, with_kwargs=True)
lambda_kwargs = rules.lambda_kwargs
opt_rule_strings = ro._optimise_rules(
rule_lambdas, lambda_kwargs, X, y, None
)
assert opt_rule_strings == exp_rule_strings
def test_optimise_single_rule(_create_inputs, _instantiate, _create_data):
rule_lambdas, lambda_kwargs = _create_inputs
X, y, _ = _create_data
exp_result = 'integer', "(X['A']>4.5)"
ro = _instantiate
rule_name = 'integer'
result = ro._optimise_single_rule(
rule_name=rule_name, rule_lambda=rule_lambdas[rule_name],
lambda_kwargs=lambda_kwargs, X=X, y=y, sample_weight=None
)
assert result == exp_result
def test_return_kwargs_for_minimize(_instantiate):
exp_kwargs = {
'x0': np.array([4.5]),
'jac': None,
'hess': None,
'hessp': None,
'bounds': [(0, 9)],
'constraints': (),
'tol': None,
'callback': None,
'options': None
}
ro = _instantiate
minimize_kwargs = ro._return_kwargs_for_minimize('integer')
assert minimize_kwargs == exp_kwargs
ro.x0 = None
ro.constraints = None
exp_kwargs['x0'] = np.array(
list(ro.orig_lambda_kwargs['integer'].values()))
exp_kwargs['constraints'] = ()
minimize_kwargs = ro._return_kwargs_for_minimize('integer')
assert minimize_kwargs == exp_kwargs
ro.options = []
with pytest.raises(TypeError, match='`options` must be a dictionary with each element aligning with a rule.'):
ro._return_kwargs_for_minimize('integer')
def test_return_opt_param_for_rule(_instantiate):
ro = _instantiate
param = ro._return_opt_param_for_rule('constraints', None, 'integer')
assert param == ()
param = ro._return_opt_param_for_rule('x0', None, 'integer')
assert param == np.array(list(ro.orig_lambda_kwargs['integer'].values()))
param = ro._return_opt_param_for_rule('tol', None, 'integer')
assert param is None
param = ro._return_opt_param_for_rule('tol', {'integer': 0.1}, 'integer')
assert param == 0.1
with pytest.raises(TypeError, match='`options` must be a dictionary with each element aligning with a rule.'):
ro._return_opt_param_for_rule('options', [], 'integer')
def test_param_base_calc(_instantiate, _create_data, _create_inputs):
def _bounds_func(X_min, X_max):
X_min = np.where(np.isnan(X_min), 0, X_min)
X_max = np.where(np.isnan(X_max), 0, X_max)
return list(zip(X_min, X_max))
exp_bounds = {
'integer': [(0.0, 9.0)],
'float': [(0.0003641365574362787, 0.9999680225821261)],
'is_na': [(0.0, 9.0)],
'mixed': [(0.0, 9.0),
(0.0003641365574362787, 0.9999680225821261),
(0.0003641365574362787, 0.9999680225821261)],
'all_na': [(0.0, 0.0)],
'zero_var': [(1.0, 1.0)],
'already_optimal': [(0.0, 9.0)],
'float_with_zero_var': [(0.0003641365574362787, 0.9999680225821261),
(1.0, 1.0)],
'float_with_all_na_greater': [(0.0003641365574362787, 0.9999680225821261),
(0.0, 0.0)],
'float_with_all_na_is_na': [(0.0003641365574362787, 0.9999680225821261)],
'multi_zero_var': [(0.0003641365574362787, 0.9999680225821261),
(1.0, 1.0),
(0.0, 9.0),
(1.0, 1.0)]
}
ro = _instantiate
X, _, _ = _create_data
_, lambda_kwargs = _create_inputs
with pytest.warns(UserWarning) as warnings:
bounds = ro._param_base_calc(
X, lambda_kwargs, 'bounds', _bounds_func
)
warnings = [w.message.args[0] for w in warnings]
assert "Rules `categoric`, `boolean` have no optimisable conditions - unable to calculate `bounds` for these rules" in warnings
assert "Rules `missing_col` use features that are missing from `X` - unable to calculate `bounds` for these rules" in warnings
assert bounds == exp_bounds
def test_errors(_create_data, _instantiate):
X, y, _ = _create_data
ro = _instantiate
with pytest.raises(TypeError, match='`X` must be a pandas.core.frame.DataFrame. Current type is list.'):
ro.fit(X=[], y=y)
with pytest.raises(TypeError, match='`y` must be a pandas.core.series.Series. Current type is list.'):
ro.fit(X=X, y=[])
with pytest.raises(TypeError, match='`sample_weight` must be a pandas.core.series.Series. Current type is list.'):
ro.fit(X=X, y=y, sample_weight=[])
X = pd.DataFrame({'ZeroVar': [0, 0, 0]})
y = pd.Series([0, 1, 0])
with pytest.raises(RulesNotOptimisedError, match='There are no optimisable rules in the set'):
with pytest.warns(RulesNotOptimisedWarning, match='Rules `zero_var` have all zero variance features based on the dataset `X` - unable to optimise these rules'):
ro.fit(X=X, y=y)
def _fit(rule_lambdas, lambda_kwargs, X, y, sample_weight, metric, exp_rule_strings,
exp_X_rules_mean, exp_orig_rule_performances,
exp_opt_rule_performances, x0, bounds, fit_transform=False):
methods = [
'Nelder-Mead',
'Powell',
'CG',
'BFGS',
'L-BFGS-B',
'TNC',
'COBYLA',
'SLSQP',
'trust-constr'
]
if x0:
with pytest.warns(UserWarning) as warnings:
x0 = DirectSearchOptimiser.create_x0(X, lambda_kwargs)
warnings = [w.message.args[0] for w in warnings]
assert "Rules `categoric`, `boolean` have no optimisable conditions - unable to calculate `x0` for these rules" in warnings
assert "Rules `missing_col` use features that are missing from `X` - unable to calculate `x0` for these rules" in warnings
if bounds:
with pytest.warns(UserWarning) as warnings:
bounds = DirectSearchOptimiser.create_bounds(X, lambda_kwargs)
warnings = [w.message.args[0] for w in warnings]
assert "Rules `categoric`, `boolean` have no optimisable conditions - unable to calculate `bounds` for these rules" in warnings
assert "Rules `missing_col` use features that are missing from `X` - unable to calculate `bounds` for these rules" in warnings
for method in methods:
with pytest.warns(RulesNotOptimisedWarning) as warnings:
ro = DirectSearchOptimiser(
rule_lambdas=rule_lambdas,
lambda_kwargs=lambda_kwargs,
metric=metric,
x0=x0,
bounds=bounds,
num_cores=2,
verbose=1,
method=method,
)
assert ro.__repr__() == 'DirectSearchOptimiser object with 14 rules to optimise'
if fit_transform:
X_rules = ro.fit_transform(
X=X, y=y, sample_weight=sample_weight)
else:
X_rules = ro.fit(X=X, y=y, sample_weight=sample_weight)
assert ro.__repr__() == 'DirectSearchOptimiser object with 9 optimised rules and 2 unoptimisable rules'
assert ro.rule_strings == ro.rules.rule_strings == exp_rule_strings[method]
assert ro.rule_names == list(exp_rule_strings[method].keys())
assert X_rules.mean().to_dict() == exp_X_rules_mean[method]
assert ro.orig_rule_performances == exp_orig_rule_performances
assert ro.opt_rule_performances == exp_opt_rule_performances[method]
assert ro.rule_names_missing_features == ['missing_col']
assert ro.rule_names_no_opt_conditions == ['categoric', 'boolean']
assert ro.rule_names_zero_var_features == ['all_na', 'zero_var']
# Assert warnings
warnings = [w.message.args[0] for w in warnings]
assert "Rules `missing_col` use features that are missing from `X` - unable to optimise or apply these rules" in warnings
assert "Rules `categoric`, `boolean` have no optimisable conditions - unable to optimise these rules" in warnings
assert "Rules `all_na`, `zero_var` have all zero variance features based on the dataset `X` - unable to optimise these rules" in warnings
| [
"pandas.Series",
"iguanas.rule_optimisation.DirectSearchOptimiser.create_x0",
"numpy.testing.assert_array_almost_equal",
"iguanas.rule_optimisation.DirectSearchOptimiser",
"iguanas.rule_optimisation.DirectSearchOptimiser.create_initial_simplexes",
"iguanas.metrics.AlertsPerDay",
"iguanas.rules.Rules",
"numpy.random.randint",
"iguanas.metrics.FScore",
"numpy.random.seed",
"numpy.array",
"iguanas.rule_optimisation.DirectSearchOptimiser.create_bounds",
"pandas.DataFrame",
"pytest.raises",
"numpy.random.uniform",
"numpy.isnan",
"pytest.warns"
] | [((344, 361), 'numpy.random.seed', 'np.random.seed', (['(0)'], {}), '(0)\n', (358, 361), True, 'import numpy as np\n'), ((101761, 101775), 'iguanas.metrics.FScore', 'FScore', ([], {'beta': '(1)'}), '(beta=1)\n', (101767, 101775), False, 'from iguanas.metrics import FScore, AlertsPerDay\n'), ((102907, 102927), 'iguanas.metrics.AlertsPerDay', 'AlertsPerDay', (['(10)', '(10)'], {}), '(10, 10)\n', (102919, 102927), False, 'from iguanas.metrics import FScore, AlertsPerDay\n'), ((104082, 104091), 'iguanas.metrics.FScore', 'FScore', (['(1)'], {}), '(1)\n', (104088, 104091), False, 'from iguanas.metrics import FScore, AlertsPerDay\n'), ((104787, 104796), 'iguanas.metrics.FScore', 'FScore', (['(1)'], {}), '(1)\n', (104793, 104796), False, 'from iguanas.metrics import FScore, AlertsPerDay\n'), ((105633, 105642), 'iguanas.metrics.FScore', 'FScore', (['(1)'], {}), '(1)\n', (105639, 105642), False, 'from iguanas.metrics import FScore, AlertsPerDay\n'), ((106445, 106454), 'iguanas.metrics.FScore', 'FScore', (['(1)'], {}), '(1)\n', (106451, 106454), False, 'from iguanas.metrics import FScore, AlertsPerDay\n'), ((107409, 107418), 'iguanas.metrics.FScore', 'FScore', (['(1)'], {}), '(1)\n', (107415, 107418), False, 'from iguanas.metrics import FScore, AlertsPerDay\n'), ((108297, 108317), 'iguanas.metrics.AlertsPerDay', 'AlertsPerDay', (['(10)', '(10)'], {}), '(10, 10)\n', (108309, 108317), False, 'from iguanas.metrics import FScore, AlertsPerDay\n'), ((109282, 109302), 'iguanas.metrics.AlertsPerDay', 'AlertsPerDay', (['(10)', '(10)'], {}), '(10, 10)\n', (109294, 109302), False, 'from iguanas.metrics import FScore, AlertsPerDay\n'), ((109926, 109965), 'pandas.DataFrame', 'pd.DataFrame', (["{'A': [1, 2, 0, 1, 0, 2]}"], {}), "({'A': [1, 2, 0, 1, 0, 2]})\n", (109938, 109965), True, 'import pandas as pd\n'), ((109998, 110040), 'pandas.DataFrame', 'pd.DataFrame', (["{'Rule': [0, 1, 0, 0, 0, 1]}"], {}), "({'Rule': [0, 1, 0, 0, 0, 1]})\n", (110010, 110040), True, 'import pandas as pd\n'), ((128184, 128220), 'iguanas.rules.Rules', 'Rules', ([], {'rule_strings': 'exp_rule_strings'}), '(rule_strings=exp_rule_strings)\n', (128189, 128220), False, 'from iguanas.rules import Rules\n'), ((132832, 132868), 'pandas.DataFrame', 'pd.DataFrame', (["{'ZeroVar': [0, 0, 0]}"], {}), "({'ZeroVar': [0, 0, 0]})\n", (132844, 132868), True, 'import pandas as pd\n'), ((132877, 132897), 'pandas.Series', 'pd.Series', (['[0, 1, 0]'], {}), '([0, 1, 0])\n', (132886, 132897), True, 'import pandas as pd\n'), ((870, 900), 'numpy.random.randint', 'np.random.randint', (['(0)', '(2)', '(10001)'], {}), '(0, 2, 10001)\n', (887, 900), True, 'import numpy as np\n'), ((101785, 101810), 'pytest.warns', 'pytest.warns', (['UserWarning'], {}), '(UserWarning)\n', (101797, 101810), False, 'import pytest\n'), ((102937, 102962), 'pytest.warns', 'pytest.warns', (['UserWarning'], {}), '(UserWarning)\n', (102949, 102962), False, 'import pytest\n'), ((111243, 111268), 'pytest.warns', 'pytest.warns', (['UserWarning'], {}), '(UserWarning)\n', (111255, 111268), False, 'import pytest\n'), ((111299, 111352), 'iguanas.rule_optimisation.DirectSearchOptimiser.create_bounds', 'DirectSearchOptimiser.create_bounds', (['X', 'lambda_kwargs'], {}), '(X, lambda_kwargs)\n', (111334, 111352), False, 'from iguanas.rule_optimisation import DirectSearchOptimiser\n'), ((111803, 111818), 'numpy.array', 'np.array', (['[4.5]'], {}), '([4.5])\n', (111811, 111818), True, 'import numpy as np\n'), ((111837, 111859), 'numpy.array', 'np.array', (['[0.50016608]'], {}), '([0.50016608])\n', (111845, 111859), True, 'import numpy as np\n'), ((111878, 111893), 'numpy.array', 'np.array', (['[4.5]'], {}), '([4.5])\n', (111886, 111893), True, 'import numpy as np\n'), ((111912, 111951), 'numpy.array', 'np.array', (['[4.5, 0.50016608, 0.50016608]'], {}), '([4.5, 0.50016608, 0.50016608])\n', (111920, 111951), True, 'import numpy as np\n'), ((111971, 111986), 'numpy.array', 'np.array', (['[0.0]'], {}), '([0.0])\n', (111979, 111986), True, 'import numpy as np\n'), ((112007, 112022), 'numpy.array', 'np.array', (['[1.0]'], {}), '([1.0])\n', (112015, 112022), True, 'import numpy as np\n'), ((112050, 112065), 'numpy.array', 'np.array', (['[4.5]'], {}), '([4.5])\n', (112058, 112065), True, 'import numpy as np\n'), ((112098, 112125), 'numpy.array', 'np.array', (['[0.50016608, 1.0]'], {}), '([0.50016608, 1.0])\n', (112106, 112125), True, 'import numpy as np\n'), ((112163, 112190), 'numpy.array', 'np.array', (['[0.50016608, 0.0]'], {}), '([0.50016608, 0.0])\n', (112171, 112190), True, 'import numpy as np\n'), ((112226, 112248), 'numpy.array', 'np.array', (['[0.50016608]'], {}), '([0.50016608])\n', (112234, 112248), True, 'import numpy as np\n'), ((112276, 112313), 'numpy.array', 'np.array', (['[0.50016608, 1.0, 4.5, 1.0]'], {}), '([0.50016608, 1.0, 4.5, 1.0])\n', (112284, 112313), True, 'import numpy as np\n'), ((112392, 112417), 'pytest.warns', 'pytest.warns', (['UserWarning'], {}), '(UserWarning)\n', (112404, 112417), False, 'import pytest\n'), ((112444, 112493), 'iguanas.rule_optimisation.DirectSearchOptimiser.create_x0', 'DirectSearchOptimiser.create_x0', (['X', 'lambda_kwargs'], {}), '(X, lambda_kwargs)\n', (112475, 112493), False, 'from iguanas.rule_optimisation import DirectSearchOptimiser\n'), ((122754, 122866), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': '"""`shape` must be either "Origin-based", "Minimum-based" or "Random-based\\""""'}), '(ValueError, match=\n \'`shape` must be either "Origin-based", "Minimum-based" or "Random-based"\')\n', (122767, 122866), False, 'import pytest\n'), ((122896, 122969), 'iguanas.rule_optimisation.DirectSearchOptimiser.create_initial_simplexes', 'DirectSearchOptimiser.create_initial_simplexes', (['X', 'lambda_kwargs', '"""ERROR"""'], {}), "(X, lambda_kwargs, 'ERROR')\n", (122942, 122969), False, 'from iguanas.rule_optimisation import DirectSearchOptimiser\n'), ((129019, 129034), 'numpy.array', 'np.array', (['[4.5]'], {}), '([4.5])\n', (129027, 129034), True, 'import numpy as np\n'), ((129663, 129772), 'pytest.raises', 'pytest.raises', (['TypeError'], {'match': '"""`options` must be a dictionary with each element aligning with a rule."""'}), "(TypeError, match=\n '`options` must be a dictionary with each element aligning with a rule.')\n", (129676, 129772), False, 'import pytest\n'), ((130335, 130444), 'pytest.raises', 'pytest.raises', (['TypeError'], {'match': '"""`options` must be a dictionary with each element aligning with a rule."""'}), "(TypeError, match=\n '`options` must be a dictionary with each element aligning with a rule.')\n", (130348, 130444), False, 'import pytest\n'), ((131798, 131823), 'pytest.warns', 'pytest.warns', (['UserWarning'], {}), '(UserWarning)\n', (131810, 131823), False, 'import pytest\n'), ((132403, 132506), 'pytest.raises', 'pytest.raises', (['TypeError'], {'match': '"""`X` must be a pandas.core.frame.DataFrame. Current type is list."""'}), "(TypeError, match=\n '`X` must be a pandas.core.frame.DataFrame. Current type is list.')\n", (132416, 132506), False, 'import pytest\n'), ((132538, 132639), 'pytest.raises', 'pytest.raises', (['TypeError'], {'match': '"""`y` must be a pandas.core.series.Series. Current type is list."""'}), "(TypeError, match=\n '`y` must be a pandas.core.series.Series. Current type is list.')\n", (132551, 132639), False, 'import pytest\n'), ((132671, 132789), 'pytest.raises', 'pytest.raises', (['TypeError'], {'match': '"""`sample_weight` must be a pandas.core.series.Series. Current type is list."""'}), "(TypeError, match=\n '`sample_weight` must be a pandas.core.series.Series. Current type is list.'\n )\n", (132684, 132789), False, 'import pytest\n'), ((132907, 133000), 'pytest.raises', 'pytest.raises', (['RulesNotOptimisedError'], {'match': '"""There are no optimisable rules in the set"""'}), "(RulesNotOptimisedError, match=\n 'There are no optimisable rules in the set')\n", (132920, 133000), False, 'import pytest\n'), ((398, 429), 'numpy.random.randint', 'np.random.randint', (['(0)', '(10)', '(10000)'], {}), '(0, 10, 10000)\n', (415, 429), True, 'import numpy as np\n'), ((444, 476), 'numpy.random.randint', 'np.random.randint', (['(0)', '(100)', '(10000)'], {}), '(0, 100, 10000)\n', (461, 476), True, 'import numpy as np\n'), ((491, 521), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(1)', '(10000)'], {}), '(0, 1, 10000)\n', (508, 521), True, 'import numpy as np\n'), ((112542, 112612), 'numpy.testing.assert_array_almost_equal', 'np.testing.assert_array_almost_equal', (['x0[rule_name]', 'exp_x0[rule_name]'], {}), '(x0[rule_name], exp_x0[rule_name])\n', (112578, 112612), True, 'import numpy as np\n'), ((121953, 121978), 'pytest.warns', 'pytest.warns', (['UserWarning'], {}), '(UserWarning)\n', (121965, 121978), False, 'import pytest\n'), ((122022, 122093), 'iguanas.rule_optimisation.DirectSearchOptimiser.create_initial_simplexes', 'DirectSearchOptimiser.create_initial_simplexes', (['X', 'lambda_kwargs', 'shape'], {}), '(X, lambda_kwargs, shape)\n', (122068, 122093), False, 'from iguanas.rule_optimisation import DirectSearchOptimiser\n'), ((130639, 130654), 'numpy.isnan', 'np.isnan', (['X_min'], {}), '(X_min)\n', (130647, 130654), True, 'import numpy as np\n'), ((130691, 130706), 'numpy.isnan', 'np.isnan', (['X_max'], {}), '(X_max)\n', (130699, 130706), True, 'import numpy as np\n'), ((133010, 133174), 'pytest.warns', 'pytest.warns', (['RulesNotOptimisedWarning'], {'match': '"""Rules `zero_var` have all zero variance features based on the dataset `X` - unable to optimise these rules"""'}), "(RulesNotOptimisedWarning, match=\n 'Rules `zero_var` have all zero variance features based on the dataset `X` - unable to optimise these rules'\n )\n", (133022, 133174), False, 'import pytest\n'), ((133617, 133642), 'pytest.warns', 'pytest.warns', (['UserWarning'], {}), '(UserWarning)\n', (133629, 133642), False, 'import pytest\n'), ((133673, 133722), 'iguanas.rule_optimisation.DirectSearchOptimiser.create_x0', 'DirectSearchOptimiser.create_x0', (['X', 'lambda_kwargs'], {}), '(X, lambda_kwargs)\n', (133704, 133722), False, 'from iguanas.rule_optimisation import DirectSearchOptimiser\n'), ((134083, 134108), 'pytest.warns', 'pytest.warns', (['UserWarning'], {}), '(UserWarning)\n', (134095, 134108), False, 'import pytest\n'), ((134143, 134196), 'iguanas.rule_optimisation.DirectSearchOptimiser.create_bounds', 'DirectSearchOptimiser.create_bounds', (['X', 'lambda_kwargs'], {}), '(X, lambda_kwargs)\n', (134178, 134196), False, 'from iguanas.rule_optimisation import DirectSearchOptimiser\n'), ((134577, 134615), 'pytest.warns', 'pytest.warns', (['RulesNotOptimisedWarning'], {}), '(RulesNotOptimisedWarning)\n', (134589, 134615), False, 'import pytest\n'), ((134646, 134808), 'iguanas.rule_optimisation.DirectSearchOptimiser', 'DirectSearchOptimiser', ([], {'rule_lambdas': 'rule_lambdas', 'lambda_kwargs': 'lambda_kwargs', 'metric': 'metric', 'x0': 'x0', 'bounds': 'bounds', 'num_cores': '(2)', 'verbose': '(1)', 'method': 'method'}), '(rule_lambdas=rule_lambdas, lambda_kwargs=\n lambda_kwargs, metric=metric, x0=x0, bounds=bounds, num_cores=2,\n verbose=1, method=method)\n', (134667, 134808), False, 'from iguanas.rule_optimisation import DirectSearchOptimiser\n'), ((101977, 102026), 'iguanas.rule_optimisation.DirectSearchOptimiser.create_x0', 'DirectSearchOptimiser.create_x0', (['X', 'lambda_kwargs'], {}), '(X, lambda_kwargs)\n', (102008, 102026), False, 'from iguanas.rule_optimisation import DirectSearchOptimiser\n'), ((102047, 102100), 'iguanas.rule_optimisation.DirectSearchOptimiser.create_bounds', 'DirectSearchOptimiser.create_bounds', (['X', 'lambda_kwargs'], {}), '(X, lambda_kwargs)\n', (102082, 102100), False, 'from iguanas.rule_optimisation import DirectSearchOptimiser\n'), ((103129, 103178), 'iguanas.rule_optimisation.DirectSearchOptimiser.create_x0', 'DirectSearchOptimiser.create_x0', (['X', 'lambda_kwargs'], {}), '(X, lambda_kwargs)\n', (103160, 103178), False, 'from iguanas.rule_optimisation import DirectSearchOptimiser\n'), ((103199, 103252), 'iguanas.rule_optimisation.DirectSearchOptimiser.create_bounds', 'DirectSearchOptimiser.create_bounds', (['X', 'lambda_kwargs'], {}), '(X, lambda_kwargs)\n', (103234, 103252), False, 'from iguanas.rule_optimisation import DirectSearchOptimiser\n'), ((113121, 113145), 'numpy.array', 'np.array', (['[[0.0], [9.0]]'], {}), '([[0.0], [9.0]])\n', (113129, 113145), True, 'import numpy as np\n'), ((113240, 113283), 'numpy.array', 'np.array', (['[[0.000364136557], [0.999968023]]'], {}), '([[0.000364136557], [0.999968023]])\n', (113248, 113283), True, 'import numpy as np\n'), ((113381, 113405), 'numpy.array', 'np.array', (['[[0.0], [9.0]]'], {}), '([[0.0], [9.0]])\n', (113389, 113405), True, 'import numpy as np\n'), ((113498, 113619), 'numpy.array', 'np.array', (['[[0.0, 0.000364136557, 0.000364136557], [9.0, 0.0, 0.0], [0.0, 0.999968023,\n 0.0], [0.0, 0.0, 0.999968023]]'], {}), '([[0.0, 0.000364136557, 0.000364136557], [9.0, 0.0, 0.0], [0.0, \n 0.999968023, 0.0], [0.0, 0.0, 0.999968023]])\n', (113506, 113619), True, 'import numpy as np\n'), ((114010, 114034), 'numpy.array', 'np.array', (['[[0.0], [0.0]]'], {}), '([[0.0], [0.0]])\n', (114018, 114034), True, 'import numpy as np\n'), ((114131, 114155), 'numpy.array', 'np.array', (['[[1.0], [1.0]]'], {}), '([[1.0], [1.0]])\n', (114139, 114155), True, 'import numpy as np\n'), ((114261, 114285), 'numpy.array', 'np.array', (['[[0.0], [9.0]]'], {}), '([[0.0], [9.0]])\n', (114269, 114285), True, 'import numpy as np\n'), ((114402, 114467), 'numpy.array', 'np.array', (['[[0.000364136557, 1.0], [0.999968023, 0.0], [0.0, 1.0]]'], {}), '([[0.000364136557, 1.0], [0.999968023, 0.0], [0.0, 1.0]])\n', (114410, 114467), True, 'import numpy as np\n'), ((114774, 114839), 'numpy.array', 'np.array', (['[[0.000364136557, 0.0], [0.999968023, 0.0], [0.0, 0.0]]'], {}), '([[0.000364136557, 0.0], [0.999968023, 0.0], [0.0, 0.0]])\n', (114782, 114839), True, 'import numpy as np\n'), ((115162, 115205), 'numpy.array', 'np.array', (['[[0.000364136557], [0.999968023]]'], {}), '([[0.000364136557], [0.999968023]])\n', (115170, 115205), True, 'import numpy as np\n'), ((115330, 115474), 'numpy.array', 'np.array', (['[[0.000364136557, 1.0, 0.0, 1.0], [0.999968023, 0.0, 0.0, 0.0], [0.0, 1.0, \n 0.0, 0.0], [0.0, 0.0, 9.0, 0.0], [0.0, 0.0, 0.0, 1.0]]'], {}), '([[0.000364136557, 1.0, 0.0, 1.0], [0.999968023, 0.0, 0.0, 0.0], [\n 0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 9.0, 0.0], [0.0, 0.0, 0.0, 1.0]])\n', (115338, 115474), True, 'import numpy as np\n'), ((116176, 116200), 'numpy.array', 'np.array', (['[[0.0], [9.0]]'], {}), '([[0.0], [9.0]])\n', (116184, 116200), True, 'import numpy as np\n'), ((116295, 116338), 'numpy.array', 'np.array', (['[[0.000364136557], [0.999968023]]'], {}), '([[0.000364136557], [0.999968023]])\n', (116303, 116338), True, 'import numpy as np\n'), ((116436, 116460), 'numpy.array', 'np.array', (['[[0.0], [9.0]]'], {}), '([[0.0], [9.0]])\n', (116444, 116460), True, 'import numpy as np\n'), ((116553, 116723), 'numpy.array', 'np.array', (['[[0.0, 0.000364136557, 0.000364136557], [9.0, 0.000364136557, \n 0.000364136557], [0.0, 0.999968023, 0.000364136557], [0.0, \n 0.000364136557, 0.999968023]]'], {}), '([[0.0, 0.000364136557, 0.000364136557], [9.0, 0.000364136557, \n 0.000364136557], [0.0, 0.999968023, 0.000364136557], [0.0, \n 0.000364136557, 0.999968023]])\n', (116561, 116723), True, 'import numpy as np\n'), ((117065, 117089), 'numpy.array', 'np.array', (['[[0.0], [0.0]]'], {}), '([[0.0], [0.0]])\n', (117073, 117089), True, 'import numpy as np\n'), ((117186, 117210), 'numpy.array', 'np.array', (['[[1.0], [1.0]]'], {}), '([[1.0], [1.0]])\n', (117194, 117210), True, 'import numpy as np\n'), ((117316, 117340), 'numpy.array', 'np.array', (['[[0.0], [9.0]]'], {}), '([[0.0], [9.0]])\n', (117324, 117340), True, 'import numpy as np\n'), ((117457, 117533), 'numpy.array', 'np.array', (['[[0.000364136557, 1.0], [0.999968023, 1.0], [0.000364136557, 1.0]]'], {}), '([[0.000364136557, 1.0], [0.999968023, 1.0], [0.000364136557, 1.0]])\n', (117465, 117533), True, 'import numpy as np\n'), ((117829, 117905), 'numpy.array', 'np.array', (['[[0.000364136557, 0.0], [0.999968023, 0.0], [0.000364136557, 0.0]]'], {}), '([[0.000364136557, 0.0], [0.999968023, 0.0], [0.000364136557, 0.0]])\n', (117837, 117905), True, 'import numpy as np\n'), ((118217, 118260), 'numpy.array', 'np.array', (['[[0.000364136557], [0.999968023]]'], {}), '([[0.000364136557], [0.999968023]])\n', (118225, 118260), True, 'import numpy as np\n'), ((118385, 118567), 'numpy.array', 'np.array', (['[[0.000364136557, 1.0, 0.0, 1.0], [0.999968023, 1.0, 0.0, 1.0], [\n 0.000364136557, 1.0, 0.0, 1.0], [0.000364136557, 1.0, 9.0, 1.0], [\n 0.000364136557, 1.0, 0.0, 1.0]]'], {}), '([[0.000364136557, 1.0, 0.0, 1.0], [0.999968023, 1.0, 0.0, 1.0], [\n 0.000364136557, 1.0, 0.0, 1.0], [0.000364136557, 1.0, 9.0, 1.0], [\n 0.000364136557, 1.0, 0.0, 1.0]])\n', (118393, 118567), True, 'import numpy as np\n'), ((119230, 119266), 'numpy.array', 'np.array', (['[[4.94426086], [6.443141]]'], {}), '([[4.94426086], [6.443141]])\n', (119238, 119266), True, 'import numpy as np\n'), ((119363, 119401), 'numpy.array', 'np.array', (['[[0.54950884], [0.71598511]]'], {}), '([[0.54950884], [0.71598511]])\n', (119371, 119401), True, 'import numpy as np\n'), ((119496, 119532), 'numpy.array', 'np.array', (['[[4.94426086], [6.443141]]'], {}), '([[4.94426086], [6.443141]])\n', (119504, 119532), True, 'import numpy as np\n'), ((119627, 119796), 'numpy.array', 'np.array', (['[[4.94426086, 0.42427461, 0.96460846], [6.443141, 0.64664804, 0.38403706],\n [5.43029526, 0.43821543, 0.79256697], [4.9088526, 0.89267531, 0.52957824]]'], {}), '([[4.94426086, 0.42427461, 0.96460846], [6.443141, 0.64664804, \n 0.38403706], [5.43029526, 0.43821543, 0.79256697], [4.9088526, \n 0.89267531, 0.52957824]])\n', (119635, 119796), True, 'import numpy as np\n'), ((120094, 120118), 'numpy.array', 'np.array', (['[[0.0], [0.0]]'], {}), '([[0.0], [0.0]])\n', (120102, 120118), True, 'import numpy as np\n'), ((120215, 120239), 'numpy.array', 'np.array', (['[[1.0], [1.0]]'], {}), '([[1.0], [1.0]])\n', (120223, 120239), True, 'import numpy as np\n'), ((120345, 120381), 'numpy.array', 'np.array', (['[[4.94426086], [6.443141]]'], {}), '([[4.94426086], [6.443141]])\n', (120353, 120381), True, 'import numpy as np\n'), ((120500, 120567), 'numpy.array', 'np.array', (['[[0.54950884, 1.0], [0.71598511, 1.0], [0.60349127, 1.0]]'], {}), '([[0.54950884, 1.0], [0.71598511, 1.0], [0.60349127, 1.0]])\n', (120508, 120567), True, 'import numpy as np\n'), ((120758, 120825), 'numpy.array', 'np.array', (['[[0.54950884, 0.0], [0.71598511, 0.0], [0.60349127, 0.0]]'], {}), '([[0.54950884, 0.0], [0.71598511, 0.0], [0.60349127, 0.0]])\n', (120766, 120825), True, 'import numpy as np\n'), ((121026, 121064), 'numpy.array', 'np.array', (['[[0.54950884], [0.71598511]]'], {}), '([[0.54950884], [0.71598511]])\n', (121034, 121064), True, 'import numpy as np\n'), ((121186, 121384), 'numpy.array', 'np.array', (['[[0.54950884, 1.0, 7.13265087, 1.0], [0.71598511, 1.0, 4.76481433, 1.0], [\n 0.60349127, 1.0, 5.11751345, 1.0], [0.54557615, 1.0, 8.33870011, 1.0],\n [0.42427461, 1.0, 0.63996385, 1.0]]'], {}), '([[0.54950884, 1.0, 7.13265087, 1.0], [0.71598511, 1.0, 4.76481433,\n 1.0], [0.60349127, 1.0, 5.11751345, 1.0], [0.54557615, 1.0, 8.33870011,\n 1.0], [0.42427461, 1.0, 0.63996385, 1.0]])\n', (121194, 121384), True, 'import numpy as np\n'), ((122193, 122333), 'numpy.testing.assert_array_almost_equal', 'np.testing.assert_array_almost_equal', (["initial_simplex[rule_name]['initial_simplex']", "exp_simplexes[shape][rule_name]['initial_simplex']"], {}), "(initial_simplex[rule_name][\n 'initial_simplex'], exp_simplexes[shape][rule_name]['initial_simplex'])\n", (122229, 122333), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
import logging
import time
def Procesar():
print("Procesar")
trabajos=[]
try:
while True:
#print (".")
buscar=db(db.tbl_control_maestro.estado=='I').select()
for trabajo in buscar:
print("Asignado")
trabajo.update_record(estado="P")
db.commit()
print ('trabajo:', trabajo.args)
datos=eval(trabajo.args)
print (datos)
GenerarPDF(datos["archivo"],datos['modulo'])
trabajo.update_record(estado="F")
db.commit()
print ("Fin Generar PDF")
time.sleep(10)
db.commit()
except KeyboardInterrupt:
print ("finalizado. por ctl+c ")
if __name__=="__main__":
logging.basicConfig(level=logging.DEBUG, format='[%(levelname)s] (%(threadName)-10s) %(message)s',)
Procesar()
print ("fin ")
| [
"logging.basicConfig",
"time.sleep"
] | [((830, 933), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG', 'format': '"""[%(levelname)s] (%(threadName)-10s) %(message)s"""'}), "(level=logging.DEBUG, format=\n '[%(levelname)s] (%(threadName)-10s) %(message)s')\n", (849, 933), False, 'import logging\n'), ((688, 702), 'time.sleep', 'time.sleep', (['(10)'], {}), '(10)\n', (698, 702), False, 'import time\n')] |
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.contrib.auth.decorators import login_required
from django.conf import settings
from scrum.views import *
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^$', login_required(ProjectListView.as_view()), name='projectlist'),
url(r'^sprint/(?P<pk>[0-9]+)/$', login_required(SprintView.as_view()), name='sprint'),
url(r'^sprint/(?P<pk>[0-9]+)/close/$', close_sprint, name='closesprint'),
url(r'^project/(?P<pk>[0-9]+)/$', login_required(WhiteBoardView.as_view()), name='project'),
url(r'^project/(?P<pk_project>[0-9]+)/update/$', login_required(update_project), name='updateproject'),
url(r'^project/(?P<pk_project>[0-9]+)/story/add/$', login_required(add_story), name='addstory'),
url(r'^project/[0-9]+/story/(?P<pk_story>[0-9]+)/$', login_required(update_story), name='updatestory'),
url(r'^project/(?P<pk_project>[0-9]+)/task/add/$', login_required(add_task), name='addtask'),
url(r'^project/[0-9]+/task/(?P<pk_task>[0-9]+)/$', login_required(update_task), name='updatetask'),
url(r'^project/[0-9]+/sprint-task/add/$', login_required(add_sprint_task), name='updatetask'),
url(r'^project/(?P<pk>[0-9]+)/sprint/add/$', login_required(add_sprint), name='sprintadd'),
url(r'^project/add/$', login_required(add_project), name='addproject'),
url(r'^task/(?P<pk_task>[0-9]+)/update-status/$', login_required(update_task), name='updatetaskstatus'),
url(r'^login/?$', 'django.contrib.auth.views.login', {'template_name': 'scrum/registration/login.html', }, name='login'),
url(r'^logout/$', 'django.contrib.auth.views.logout', {'template_name': 'scrum/registration/logged_out.html', }, name='logout'),
)
urlpatterns += patterns('',
(r'^static/(.*)$', 'django.views.static.serve', {
'document_root': settings.STATIC_ROOT
}),
)
| [
"django.conf.urls.url",
"django.conf.urls.include",
"django.conf.urls.patterns",
"django.contrib.auth.decorators.login_required",
"django.contrib.admin.autodiscover"
] | [((205, 225), 'django.contrib.admin.autodiscover', 'admin.autodiscover', ([], {}), '()\n', (223, 225), False, 'from django.contrib import admin\n'), ((1824, 1930), 'django.conf.urls.patterns', 'patterns', (['""""""', "('^static/(.*)$', 'django.views.static.serve', {'document_root': settings.\n STATIC_ROOT})"], {}), "('', ('^static/(.*)$', 'django.views.static.serve', {\n 'document_root': settings.STATIC_ROOT}))\n", (1832, 1930), False, 'from django.conf.urls import patterns, include, url\n'), ((476, 547), 'django.conf.urls.url', 'url', (['"""^sprint/(?P<pk>[0-9]+)/close/$"""', 'close_sprint'], {'name': '"""closesprint"""'}), "('^sprint/(?P<pk>[0-9]+)/close/$', close_sprint, name='closesprint')\n", (479, 547), False, 'from django.conf.urls import patterns, include, url\n'), ((1551, 1672), 'django.conf.urls.url', 'url', (['"""^login/?$"""', '"""django.contrib.auth.views.login"""', "{'template_name': 'scrum/registration/login.html'}"], {'name': '"""login"""'}), "('^login/?$', 'django.contrib.auth.views.login', {'template_name':\n 'scrum/registration/login.html'}, name='login')\n", (1554, 1672), False, 'from django.conf.urls import patterns, include, url\n'), ((1677, 1805), 'django.conf.urls.url', 'url', (['"""^logout/$"""', '"""django.contrib.auth.views.logout"""', "{'template_name': 'scrum/registration/logged_out.html'}"], {'name': '"""logout"""'}), "('^logout/$', 'django.contrib.auth.views.logout', {'template_name':\n 'scrum/registration/logged_out.html'}, name='logout')\n", (1680, 1805), False, 'from django.conf.urls import patterns, include, url\n'), ((274, 298), 'django.conf.urls.include', 'include', (['admin.site.urls'], {}), '(admin.site.urls)\n', (281, 298), False, 'from django.conf.urls import patterns, include, url\n'), ((700, 730), 'django.contrib.auth.decorators.login_required', 'login_required', (['update_project'], {}), '(update_project)\n', (714, 730), False, 'from django.contrib.auth.decorators import login_required\n'), ((811, 836), 'django.contrib.auth.decorators.login_required', 'login_required', (['add_story'], {}), '(add_story)\n', (825, 836), False, 'from django.contrib.auth.decorators import login_required\n'), ((913, 941), 'django.contrib.auth.decorators.login_required', 'login_required', (['update_story'], {}), '(update_story)\n', (927, 941), False, 'from django.contrib.auth.decorators import login_required\n'), ((1019, 1043), 'django.contrib.auth.decorators.login_required', 'login_required', (['add_task'], {}), '(add_task)\n', (1033, 1043), False, 'from django.contrib.auth.decorators import login_required\n'), ((1117, 1144), 'django.contrib.auth.decorators.login_required', 'login_required', (['update_task'], {}), '(update_task)\n', (1131, 1144), False, 'from django.contrib.auth.decorators import login_required\n'), ((1212, 1243), 'django.contrib.auth.decorators.login_required', 'login_required', (['add_sprint_task'], {}), '(add_sprint_task)\n', (1226, 1243), False, 'from django.contrib.auth.decorators import login_required\n'), ((1314, 1340), 'django.contrib.auth.decorators.login_required', 'login_required', (['add_sprint'], {}), '(add_sprint)\n', (1328, 1340), False, 'from django.contrib.auth.decorators import login_required\n'), ((1388, 1415), 'django.contrib.auth.decorators.login_required', 'login_required', (['add_project'], {}), '(add_project)\n', (1402, 1415), False, 'from django.contrib.auth.decorators import login_required\n'), ((1491, 1518), 'django.contrib.auth.decorators.login_required', 'login_required', (['update_task'], {}), '(update_task)\n', (1505, 1518), False, 'from django.contrib.auth.decorators import login_required\n')] |
# Generated by Django 2.2.5 on 2019-11-11 07:01
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('Aeon_Citadel', '0010_auto_20191111_1446'),
]
operations = [
migrations.RemoveField(
model_name='journey',
name='current_event',
),
]
| [
"django.db.migrations.RemoveField"
] | [((232, 298), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""journey"""', 'name': '"""current_event"""'}), "(model_name='journey', name='current_event')\n", (254, 298), False, 'from django.db import migrations\n')] |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '.\windows\models\gui\MainWindow.ui'
#
# Created by: PyQt5 UI code generator 5.15.0
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtWidgets import QApplication
from models.processing.Process import Process
from models.gui.DialogFactory import DialogFactory
from models.visualizer.Visualizer import Visualizer
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.setEnabled(True)
MainWindow.resize(700, 500)
MainWindow.setMaximumSize(QtCore.QSize(700, 500))
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.gridLayoutWidget = QtWidgets.QWidget(self.centralwidget)
self.gridLayoutWidget.setGeometry(QtCore.QRect(40, 50, 601, 81))
self.gridLayoutWidget.setObjectName("gridLayoutWidget")
self.gridLayout = QtWidgets.QGridLayout(self.gridLayoutWidget)
self.gridLayout.setContentsMargins(0, 0, 0, 0)
self.gridLayout.setHorizontalSpacing(10)
self.gridLayout.setVerticalSpacing(20)
self.gridLayout.setObjectName("gridLayout")
self.pushButton_search = QtWidgets.QPushButton(self.gridLayoutWidget)
self.pushButton_search.setObjectName("pushButton_search")
self.gridLayout.addWidget(self.pushButton_search, 1, 1, 1, 1)
self.label = QtWidgets.QLabel(self.gridLayoutWidget)
font = QtGui.QFont()
font.setPointSize(21)
self.label.setFont(font)
self.label.setObjectName("label")
self.gridLayout.addWidget(self.label, 0, 0, 1, 1)
self.lineEdit_path = QtWidgets.QLineEdit(self.gridLayoutWidget)
self.lineEdit_path.setReadOnly(True)
self.lineEdit_path.setObjectName("lineEdit_path")
self.gridLayout.addWidget(self.lineEdit_path, 1, 0, 1, 1)
self.pushButton_process = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_process.setGeometry(QtCore.QRect(40, 200, 301, 51))
self.pushButton_process.setObjectName("pushButton_process")
self.pushButton_show_raw = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_show_raw.setGeometry(QtCore.QRect(360, 200, 281, 23))
self.pushButton_show_raw.setObjectName("pushButton_show_raw")
self.pushButton_show_processed = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_show_processed.setGeometry(QtCore.QRect(360, 230, 281, 23))
self.pushButton_show_processed.setObjectName("pushButton_show_processed")
self.textEdit_description = QtWidgets.QTextEdit(self.centralwidget)
self.textEdit_description.setGeometry(QtCore.QRect(40, 280, 601, 191))
self.textEdit_description.setReadOnly(True)
self.textEdit_description.setObjectName("textEdit_description")
MainWindow.setCentralWidget(self.centralwidget)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
# Edições manuais
self.pushButton_search.clicked.connect(self.select_file)
self.pushButton_process.clicked.connect(self.process_file)
self.pushButton_show_processed.clicked.connect(self.show_processed)
self.pushButton_show_raw.clicked.connect(self.show_raw)
# Fim edições manuais
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "Reconhecimento Espacial Qualitativo"))
self.pushButton_search.setText(_translate("MainWindow", "Buscar"))
self.label.setText(_translate("MainWindow", "Escolha um arquivo:"))
self.pushButton_process.setText(_translate("MainWindow", "Processar Imagem"))
self.pushButton_show_raw.setText(_translate("MainWindow", "Mostrar imagem original"))
self.pushButton_show_processed.setText(_translate("MainWindow", "Mostrar imagem processada"))
def select_file(self):
dialog = QtWidgets.QFileDialog()
filename, _ = dialog.getOpenFileName(None,
"Escolha um arquivo",
"",
"Arquivo de imagem (*.jpg)");
self.lineEdit_path.setText(filename)
def process_file(self):
filename = self.lineEdit_path.text()
if filename == '':
file_not_selected = DialogFactory(title='Erro',
message='Nenhum arquivo selecionado',
type='critical',
ok_button=True)
file_not_selected.show()
file_not_selected.wait()
return
self.textEdit_description.append("Processando...")
QApplication.processEvents()
self.p = Process(filename)
description = self.p.run()
if description:
self.textEdit_description.append("Processado com sucesso")
self.textEdit_description.append(description)
else:
failed_dialog = DialogFactory(title='Erro',
message='Um erro desconhecido aconteceu!',
type='critical',
ok_button=True)
failed_dialog.show()
failed_dialog.wait()
return
def show_processed(self):
try:
self.p.show()
except Exception as e:
failed_dialog = DialogFactory(title='Erro',
message='Imagem não foi processada ainda!',
type='critical',
ok_button=True)
failed_dialog.show()
failed_dialog.wait()
def show_raw(self):
filename = self.lineEdit_path.text()
if filename == '':
file_not_selected = DialogFactory(title='Erro',
message='Nenhum arquivo selecionado',
type='critical',
ok_button=True)
file_not_selected.show()
file_not_selected.wait()
return
v = Visualizer(filename)
v.show()
| [
"PyQt5.QtWidgets.QWidget",
"PyQt5.QtWidgets.QTextEdit",
"models.gui.DialogFactory.DialogFactory",
"PyQt5.QtWidgets.QLineEdit",
"PyQt5.QtGui.QFont",
"models.processing.Process.Process",
"PyQt5.QtCore.QMetaObject.connectSlotsByName",
"PyQt5.QtWidgets.QFileDialog",
"models.visualizer.Visualizer.Visualizer",
"PyQt5.QtCore.QRect",
"PyQt5.QtWidgets.QGridLayout",
"PyQt5.QtWidgets.QStatusBar",
"PyQt5.QtWidgets.QLabel",
"PyQt5.QtWidgets.QApplication.processEvents",
"PyQt5.QtWidgets.QPushButton",
"PyQt5.QtCore.QSize"
] | [((818, 847), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', (['MainWindow'], {}), '(MainWindow)\n', (835, 847), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((938, 975), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', (['self.centralwidget'], {}), '(self.centralwidget)\n', (955, 975), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1139, 1183), 'PyQt5.QtWidgets.QGridLayout', 'QtWidgets.QGridLayout', (['self.gridLayoutWidget'], {}), '(self.gridLayoutWidget)\n', (1160, 1183), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1420, 1464), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.gridLayoutWidget'], {}), '(self.gridLayoutWidget)\n', (1441, 1464), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1622, 1661), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.gridLayoutWidget'], {}), '(self.gridLayoutWidget)\n', (1638, 1661), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1677, 1690), 'PyQt5.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (1688, 1690), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1883, 1925), 'PyQt5.QtWidgets.QLineEdit', 'QtWidgets.QLineEdit', (['self.gridLayoutWidget'], {}), '(self.gridLayoutWidget)\n', (1902, 1925), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2129, 2170), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.centralwidget'], {}), '(self.centralwidget)\n', (2150, 2170), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2350, 2391), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.centralwidget'], {}), '(self.centralwidget)\n', (2371, 2391), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2581, 2622), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.centralwidget'], {}), '(self.centralwidget)\n', (2602, 2622), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2825, 2864), 'PyQt5.QtWidgets.QTextEdit', 'QtWidgets.QTextEdit', (['self.centralwidget'], {}), '(self.centralwidget)\n', (2844, 2864), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3149, 3181), 'PyQt5.QtWidgets.QStatusBar', 'QtWidgets.QStatusBar', (['MainWindow'], {}), '(MainWindow)\n', (3169, 3181), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3665, 3714), 'PyQt5.QtCore.QMetaObject.connectSlotsByName', 'QtCore.QMetaObject.connectSlotsByName', (['MainWindow'], {}), '(MainWindow)\n', (3702, 3714), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4389, 4412), 'PyQt5.QtWidgets.QFileDialog', 'QtWidgets.QFileDialog', ([], {}), '()\n', (4410, 4412), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((5241, 5269), 'PyQt5.QtWidgets.QApplication.processEvents', 'QApplication.processEvents', ([], {}), '()\n', (5267, 5269), False, 'from PyQt5.QtWidgets import QApplication\n'), ((5296, 5313), 'models.processing.Process.Process', 'Process', (['filename'], {}), '(filename)\n', (5303, 5313), False, 'from models.processing.Process import Process\n'), ((6807, 6827), 'models.visualizer.Visualizer.Visualizer', 'Visualizer', (['filename'], {}), '(filename)\n', (6817, 6827), False, 'from models.visualizer.Visualizer import Visualizer\n'), ((765, 787), 'PyQt5.QtCore.QSize', 'QtCore.QSize', (['(700)', '(500)'], {}), '(700, 500)\n', (777, 787), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1018, 1047), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(40)', '(50)', '(601)', '(81)'], {}), '(40, 50, 601, 81)\n', (1030, 1047), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2215, 2245), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(40)', '(200)', '(301)', '(51)'], {}), '(40, 200, 301, 51)\n', (2227, 2245), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2437, 2468), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(360)', '(200)', '(281)', '(23)'], {}), '(360, 200, 281, 23)\n', (2449, 2468), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2674, 2705), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(360)', '(230)', '(281)', '(23)'], {}), '(360, 230, 281, 23)\n', (2686, 2705), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2911, 2942), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(40)', '(280)', '(601)', '(191)'], {}), '(40, 280, 601, 191)\n', (2923, 2942), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((4835, 4938), 'models.gui.DialogFactory.DialogFactory', 'DialogFactory', ([], {'title': '"""Erro"""', 'message': '"""Nenhum arquivo selecionado"""', 'type': '"""critical"""', 'ok_button': '(True)'}), "(title='Erro', message='Nenhum arquivo selecionado', type=\n 'critical', ok_button=True)\n", (4848, 4938), False, 'from models.gui.DialogFactory import DialogFactory\n'), ((5566, 5674), 'models.gui.DialogFactory.DialogFactory', 'DialogFactory', ([], {'title': '"""Erro"""', 'message': '"""Um erro desconhecido aconteceu!"""', 'type': '"""critical"""', 'ok_button': '(True)'}), "(title='Erro', message='Um erro desconhecido aconteceu!', type\n ='critical', ok_button=True)\n", (5579, 5674), False, 'from models.gui.DialogFactory import DialogFactory\n'), ((6456, 6559), 'models.gui.DialogFactory.DialogFactory', 'DialogFactory', ([], {'title': '"""Erro"""', 'message': '"""Nenhum arquivo selecionado"""', 'type': '"""critical"""', 'ok_button': '(True)'}), "(title='Erro', message='Nenhum arquivo selecionado', type=\n 'critical', ok_button=True)\n", (6469, 6559), False, 'from models.gui.DialogFactory import DialogFactory\n'), ((6018, 6126), 'models.gui.DialogFactory.DialogFactory', 'DialogFactory', ([], {'title': '"""Erro"""', 'message': '"""Imagem não foi processada ainda!"""', 'type': '"""critical"""', 'ok_button': '(True)'}), "(title='Erro', message='Imagem não foi processada ainda!',\n type='critical', ok_button=True)\n", (6031, 6126), False, 'from models.gui.DialogFactory import DialogFactory\n')] |
from distutils.core import setup
from distutils.extension import Extension
from Cython.Build import cythonize
extensions = [
Extension("*", ["*.pyx"])
]
setup(
ext_modules=cythonize(extensions)
)
| [
"Cython.Build.cythonize",
"distutils.extension.Extension"
] | [((131, 156), 'distutils.extension.Extension', 'Extension', (['"""*"""', "['*.pyx']"], {}), "('*', ['*.pyx'])\n", (140, 156), False, 'from distutils.extension import Extension\n'), ((183, 204), 'Cython.Build.cythonize', 'cythonize', (['extensions'], {}), '(extensions)\n', (192, 204), False, 'from Cython.Build import cythonize\n')] |
# ___________________________________________________________________________
#
# Prescient
# Copyright 2020 National Technology & Engineering Solutions of Sandia, LLC
# (NTESS). Under the terms of Contract DE-NA0003525 with NTESS, the U.S.
# Government retains certain rights in this software.
# This software is distributed under the Revised BSD License.
# ___________________________________________________________________________
import math
def round_small_values(x, p=1e-6):
# Rounds values that are within (-1e-6, 1e-6) to 0.
try:
if math.fabs(x) < p:
return 0.0
return x
except:
raise RuntimeError("Utility function round_small_values failed on input=%s, p=%f" % (str(x), p))
| [
"math.fabs"
] | [((567, 579), 'math.fabs', 'math.fabs', (['x'], {}), '(x)\n', (576, 579), False, 'import math\n')] |
import requests
import bs4
import collections
WeatherReport = collections.namedtuple('WeatherReport',
'loc, condition, temp, scale')
def main():
print_the_header()
city = input('What city do you want the weather for?(London) ').lower()
html = get_html_from_web(city)
report = get_weather_from_html(html)
# display forecast
print('The weather in {} is {} and temperature is {} {}'.format(
report.loc,
report.condition,
report.temp,
report.scale
))
def print_the_header():
print('-------------------------------')
print(' WEATHER CLIENT APP')
print('-------------------------------')
def get_html_from_web(city):
url = f'https://www.wunderground.com/weather-forecast/sk/{city}'
response = requests.get(url)
# print(url)
# print(response.status_code)
return response.text
def get_weather_from_html(html):
soup = bs4.BeautifulSoup(html, 'html.parser')
# print(soup.prettify()) used for figuring out what the HTML looks like
loc = soup.find(id='location').find(class_='city-nav-header').get_text()
condition = soup.find(class_='small-3 columns').find(class_='wx-value').get_text()
temp = soup.find(id='curTemp').find(class_='wx-data').find(class_='wx-value').get_text()
scale = soup.find(id='curTemp').find(class_='wx-data').find(class_='wx-unit').get_text()
loc = cleanup_text(loc)
condition = cleanup_text(condition)
temp = cleanup_text(temp)
scale = cleanup_text(scale)
report = WeatherReport(loc=loc, condition=condition, temp=temp, scale=scale)
return report
def cleanup_text(text: str):
if not text:
return text
text = text.strip()
return text
if __name__ == '__main__':
main()
| [
"bs4.BeautifulSoup",
"collections.namedtuple",
"requests.get"
] | [((63, 133), 'collections.namedtuple', 'collections.namedtuple', (['"""WeatherReport"""', '"""loc, condition, temp, scale"""'], {}), "('WeatherReport', 'loc, condition, temp, scale')\n", (85, 133), False, 'import collections\n'), ((823, 840), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (835, 840), False, 'import requests\n'), ((964, 1002), 'bs4.BeautifulSoup', 'bs4.BeautifulSoup', (['html', '"""html.parser"""'], {}), "(html, 'html.parser')\n", (981, 1002), False, 'import bs4\n')] |
# -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2016 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
from django.conf import settings
from geonode import get_version
from geonode.catalogue import default_catalogue_backend
from django.contrib.sites.models import Site
from geonode.notifications_helper import has_notifications
from geonode.base.models import Configuration
def resource_urls(request):
"""Global values to pass to templates"""
site = Site.objects.get_current()
defaults = dict(
STATIC_URL=settings.STATIC_URL,
CATALOGUE_BASE_URL=default_catalogue_backend()['URL'],
ACCOUNT_OPEN_SIGNUP=settings.ACCOUNT_OPEN_SIGNUP,
ACCOUNT_APPROVAL_REQUIRED=settings.ACCOUNT_APPROVAL_REQUIRED,
VERSION=get_version(),
SITE_NAME=site.name,
SITE_DOMAIN=site.domain,
SITEURL=settings.SITEURL,
INSTALLED_APPS=settings.INSTALLED_APPS,
THEME_ACCOUNT_CONTACT_EMAIL=settings.THEME_ACCOUNT_CONTACT_EMAIL,
TINYMCE_DEFAULT_CONFIG=settings.TINYMCE_DEFAULT_CONFIG,
DEBUG_STATIC=getattr(
settings,
"DEBUG_STATIC",
False),
PROXY_URL=getattr(
settings,
'PROXY_URL',
'/proxy/?url='),
DISPLAY_SOCIAL=getattr(
settings,
'DISPLAY_SOCIAL',
False),
DISPLAY_COMMENTS=getattr(
settings,
'DISPLAY_COMMENTS',
False),
DISPLAY_RATINGS=getattr(
settings,
'DISPLAY_RATINGS',
False),
DISPLAY_WMS_LINKS=getattr(
settings,
'DISPLAY_WMS_LINKS',
True),
CREATE_LAYER=getattr(
settings,
'CREATE_LAYER',
True),
TWITTER_CARD=getattr(
settings,
'TWITTER_CARD',
False),
TWITTER_SITE=getattr(
settings,
'TWITTER_SITE',
'@GeoNode'),
TWITTER_HASHTAGS=getattr(
settings,
'TWITTER_HASHTAGS',
[]),
OPENGRAPH_ENABLED=getattr(
settings,
'OPENGRAPH_ENABLED',
False),
ADMIN_MODERATE_UPLOADS=getattr(
settings,
'ADMIN_MODERATE_UPLOADS',
False),
TOPICCATEGORY_MANDATORY=getattr(
settings,
'TOPICCATEGORY_MANDATORY',
False),
GROUP_MANDATORY_RESOURCES=getattr(
settings,
'GROUP_MANDATORY_RESOURCES',
False),
GROUP_PRIVATE_RESOURCES=getattr(
settings,
'GROUP_PRIVATE_RESOURCES',
False),
RESOURCE_PUBLISHING=getattr(
settings,
'RESOURCE_PUBLISHING',
False),
HAYSTACK_SEARCH=getattr(
settings,
'HAYSTACK_SEARCH',
False),
SKIP_PERMS_FILTER=getattr(
settings,
'SKIP_PERMS_FILTER',
False),
HAYSTACK_FACET_COUNTS=getattr(
settings,
'HAYSTACK_FACET_COUNTS',
False),
CLIENT_RESULTS_LIMIT=getattr(
settings,
'CLIENT_RESULTS_LIMIT',
10),
API_LIMIT_PER_PAGE=getattr(
settings,
'API_LIMIT_PER_PAGE',
20),
SRID_DETAIL=getattr(
settings,
'SRID',
dict()).get(
'DETAIL',
'never'),
LICENSES_ENABLED=getattr(
settings,
'LICENSES',
dict()).get(
'ENABLED',
False),
LICENSES_DETAIL=getattr(
settings,
'LICENSES',
dict()).get(
'DETAIL',
'never'),
LICENSES_METADATA=getattr(
settings,
'LICENSES',
dict()).get(
'METADATA',
'never'),
USE_GEOSERVER=getattr(settings, 'USE_GEOSERVER', False),
USE_NOTIFICATIONS=has_notifications,
USE_MONITORING='geonode.monitoring' in settings.INSTALLED_APPS and settings.MONITORING_ENABLED,
DEFAULT_ANONYMOUS_VIEW_PERMISSION=getattr(settings, 'DEFAULT_ANONYMOUS_VIEW_PERMISSION', False),
DEFAULT_ANONYMOUS_DOWNLOAD_PERMISSION=getattr(settings, 'DEFAULT_ANONYMOUS_DOWNLOAD_PERMISSION', False),
EXIF_ENABLED=getattr(
settings,
"EXIF_ENABLED",
False),
FAVORITE_ENABLED=getattr(
settings,
"FAVORITE_ENABLED",
False),
SEARCH_FILTERS=getattr(
settings,
'SEARCH_FILTERS',
False
),
THESAURI_FILTERS=[t['name'] for t in [settings.THESAURUS, ] if
t.get('filter')] if hasattr(settings, 'THESAURUS') else None,
MAP_CLIENT_USE_CROSS_ORIGIN_CREDENTIALS=getattr(
settings, 'MAP_CLIENT_USE_CROSS_ORIGIN_CREDENTIALS', False
),
SHOW_PROFILE_EMAIL=getattr(
settings,
"SHOW_PROFILE_EMAIL",
False
),
OGC_SERVER=getattr(settings, 'OGC_SERVER', None),
DELAYED_SECURITY_SIGNALS=getattr(settings, 'DELAYED_SECURITY_SIGNALS', False),
READ_ONLY_MODE=getattr(Configuration.load(), 'read_only', False)
)
return defaults
| [
"geonode.get_version",
"geonode.catalogue.default_catalogue_backend",
"django.contrib.sites.models.Site.objects.get_current",
"geonode.base.models.Configuration.load"
] | [((1196, 1222), 'django.contrib.sites.models.Site.objects.get_current', 'Site.objects.get_current', ([], {}), '()\n', (1220, 1222), False, 'from django.contrib.sites.models import Site\n'), ((1492, 1505), 'geonode.get_version', 'get_version', ([], {}), '()\n', (1503, 1505), False, 'from geonode import get_version\n'), ((1312, 1339), 'geonode.catalogue.default_catalogue_backend', 'default_catalogue_backend', ([], {}), '()\n', (1337, 1339), False, 'from geonode.catalogue import default_catalogue_backend\n'), ((6034, 6054), 'geonode.base.models.Configuration.load', 'Configuration.load', ([], {}), '()\n', (6052, 6054), False, 'from geonode.base.models import Configuration\n')] |
# -*- coding: utf-8 -*-
#
# Monad documentation build configuration file.
import os
import sys
PROJECT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '../'))
sys.path.insert(0, PROJECT_DIR)
import monad
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.viewcode',
]
source_suffix = '.rst'
master_doc = 'index'
project = u'Monad'
copyright = u'2012-2015, <NAME>'
version = '%d.%d' % monad.__version__
release = monad.VERSION
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = 'agogo'
# use RTD new theme
RTD_NEW_THEME = True
htmlhelp_basename = 'Monaddoc'
latex_documents = [
('index', 'Monad.tex', u'Monad Documentation',
u'Philip Xu', 'manual'),
]
man_pages = [
('index', 'monad', u'Monad Documentation',
[u'Philip Xu'], 1)
]
texinfo_documents = [
('index', 'Monad', u'Monad Documentation',
u'Philip Xu', 'Monad', monad.__doc__,
'Miscellaneous'),
]
| [
"os.path.dirname",
"sys.path.insert"
] | [((174, 205), 'sys.path.insert', 'sys.path.insert', (['(0)', 'PROJECT_DIR'], {}), '(0, PROJECT_DIR)\n', (189, 205), False, 'import sys\n'), ((139, 164), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (154, 164), False, 'import os\n')] |
import argparse
from typing import Any, List, Sequence
import torch
from torch import nn
from cassle.distillers.base import base_distill_wrapper
from cassle.losses.barlow import barlow_loss_func
def decorrelative_distill_wrapper(Method=object):
class DecorrelativeDistillWrapper(base_distill_wrapper(Method)):
def __init__(
self,
distill_lamb: float,
distill_proj_hidden_dim: int,
distill_barlow_lamb: float,
distill_scale_loss: float,
**kwargs
):
super().__init__(**kwargs)
output_dim = kwargs["output_dim"]
self.distill_lamb = distill_lamb
self.distill_barlow_lamb = distill_barlow_lamb
self.distill_scale_loss = distill_scale_loss
self.distill_predictor = nn.Sequential(
nn.Linear(output_dim, distill_proj_hidden_dim),
nn.BatchNorm1d(distill_proj_hidden_dim),
nn.ReLU(),
nn.Linear(distill_proj_hidden_dim, output_dim),
)
@staticmethod
def add_model_specific_args(
parent_parser: argparse.ArgumentParser,
) -> argparse.ArgumentParser:
parser = parent_parser.add_argument_group("contrastive_distiller")
parser.add_argument("--distill_lamb", type=float, default=1)
parser.add_argument("--distill_proj_hidden_dim", type=int, default=2048)
parser.add_argument("--distill_barlow_lamb", type=float, default=5e-3)
parser.add_argument("--distill_scale_loss", type=float, default=0.1)
return parent_parser
@property
def learnable_params(self) -> List[dict]:
"""Adds distill predictor parameters to the parent's learnable parameters.
Returns:
List[dict]: list of learnable parameters.
"""
extra_learnable_params = [
{"params": self.distill_predictor.parameters()},
]
return super().learnable_params + extra_learnable_params
def training_step(self, batch: Sequence[Any], batch_idx: int) -> torch.Tensor:
out = super().training_step(batch, batch_idx)
z1, z2 = out["z"]
frozen_z1, frozen_z2 = out["frozen_z"]
p1 = self.distill_predictor(z1)
p2 = self.distill_predictor(z2)
distill_loss = (
barlow_loss_func(
p1,
frozen_z1,
lamb=self.distill_barlow_lamb,
scale_loss=self.distill_scale_loss,
)
+ barlow_loss_func(
p2,
frozen_z2,
lamb=self.distill_barlow_lamb,
scale_loss=self.distill_scale_loss,
)
) / 2
self.log(
"train_decorrelative_distill_loss", distill_loss, on_epoch=True, sync_dist=True
)
return out["loss"] + self.distill_lamb * distill_loss
return DecorrelativeDistillWrapper
| [
"torch.nn.ReLU",
"torch.nn.BatchNorm1d",
"torch.nn.Linear",
"cassle.losses.barlow.barlow_loss_func",
"cassle.distillers.base.base_distill_wrapper"
] | [((286, 314), 'cassle.distillers.base.base_distill_wrapper', 'base_distill_wrapper', (['Method'], {}), '(Method)\n', (306, 314), False, 'from cassle.distillers.base import base_distill_wrapper\n'), ((859, 905), 'torch.nn.Linear', 'nn.Linear', (['output_dim', 'distill_proj_hidden_dim'], {}), '(output_dim, distill_proj_hidden_dim)\n', (868, 905), False, 'from torch import nn\n'), ((923, 962), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', (['distill_proj_hidden_dim'], {}), '(distill_proj_hidden_dim)\n', (937, 962), False, 'from torch import nn\n'), ((980, 989), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (987, 989), False, 'from torch import nn\n'), ((1007, 1053), 'torch.nn.Linear', 'nn.Linear', (['distill_proj_hidden_dim', 'output_dim'], {}), '(distill_proj_hidden_dim, output_dim)\n', (1016, 1053), False, 'from torch import nn\n'), ((2457, 2560), 'cassle.losses.barlow.barlow_loss_func', 'barlow_loss_func', (['p1', 'frozen_z1'], {'lamb': 'self.distill_barlow_lamb', 'scale_loss': 'self.distill_scale_loss'}), '(p1, frozen_z1, lamb=self.distill_barlow_lamb, scale_loss=\n self.distill_scale_loss)\n', (2473, 2560), False, 'from cassle.losses.barlow import barlow_loss_func\n'), ((2673, 2776), 'cassle.losses.barlow.barlow_loss_func', 'barlow_loss_func', (['p2', 'frozen_z2'], {'lamb': 'self.distill_barlow_lamb', 'scale_loss': 'self.distill_scale_loss'}), '(p2, frozen_z2, lamb=self.distill_barlow_lamb, scale_loss=\n self.distill_scale_loss)\n', (2689, 2776), False, 'from cassle.losses.barlow import barlow_loss_func\n')] |
import copy
class PathManager(object):
def __init__(self):
self.debug=False #shows the path commands and the stack
# self.debug=True
self.path_stack=[]
# pops back to 'stop_at'
# then pops back one further and returns the track-ref of this track for replaying
# if stop-at is not found returns ''
def back_to(self,stop_at,):
if self.debug: print('pathmanager command - back_to: ',stop_at)
for page in self.path_stack:
if page[0] == stop_at:
break
else:
return ''
# found, so pop until we reach it
while self.path_stack[len(self.path_stack)-1][0] != stop_at:
self.path_stack.pop()
track_to_play = self.path_stack[len(self.path_stack)-1][0]
self.path_stack.pop()
return track_to_play
# pops back 'number' tracks or to 'stop_at' whichever is first
# then pops back one further and returns the track-ref of this track for replaying
# if stop-at is not found and everything is popped the stack is left empty and the first track is returned
def back_by(self,stop_at,back_by_text='1000'):
if self.debug: print('pathmanager command - back by: ',back_by_text,' or stop at: ',stop_at)
back_by=int(back_by_text)
count=0
while self.path_stack != []:
top = self.path_stack.pop()
if top[0] == stop_at or count == back_by-1:
break
count=count+1
# go back 1 if not empty
if self.path_stack != []:
top=self.path_stack.pop()
track_to_play = top[0]
if self.debug:
print(' removed for playing: ',track_to_play)
return track_to_play
def append(self,page):
if self.debug: print('pathmanager command - append: ',page)
self.path_stack.append([page])
def empty(self):
self.path_stack=[]
# sibling - just pop the media track so sibling is appended and can go back to page track
def pop_for_sibling(self):
if self.debug: print('pathmanger command - pop for sibling: ')
self.path_stack.pop()
def pretty_path(self):
path= '\nPath now is:'
for page in self.path_stack:
path += "\n " + page[0]
print()
return path
# *******************
# Extract links
# ***********************
def parse_links(self,links_text,allowed_list):
links=[]
lines = links_text.split('\n')
num_lines=0
for line in lines:
if line.strip() == "":
continue
num_lines+=1
error_text,link=self.parse_link(line,allowed_list)
if error_text != "":
return 'error',error_text,links
links.append(copy.deepcopy(link))
# print "\nreading"
# print links
return 'normal','',links
def parse_link(self,line,allowed_list):
fields = line.split()
if len(fields)<2 or len(fields)>3:
return "incorrect number of fields in command",['','','']
symbol=fields[0]
operation=fields[1]
if operation in allowed_list or operation[0:4] == 'omx-' or operation[0:6] == 'mplay-'or operation[0:5] == 'uzbl-':
if len(fields) == 3:
arg=fields[2]
else:
arg=''
return '',[symbol,operation,arg]
else:
return "unknown operation: "+operation,['','','']
def merge_links(self,current_links,track_links):
for track_link in track_links:
for link in current_links:
if track_link[0] == link[0]:
# link exists so overwrite
link[1]=track_link[1]
link[2]=track_link[2]
break
else:
# new link so append it
current_links.append(track_link)
# print "\n merging"
# print current_links
def find_link(self,symbol,links):
for link in links:
# print link
if symbol == link[0]:
return True,link[1],link[2]
return False,'',''
| [
"copy.deepcopy"
] | [((2876, 2895), 'copy.deepcopy', 'copy.deepcopy', (['link'], {}), '(link)\n', (2889, 2895), False, 'import copy\n')] |
#
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nitro.resource.base.base_resource import base_resource
from nitro.resource.base.base_resource import base_response
from nitro.service.options import options
from nitro.exception.nitro_exception import nitro_exception
from nitro.util.nitro_util import nitro_util
class lsnstatic(base_resource) :
"""Configuration for static mapping resource."""
def __init__(self) :
self._name = ""
self._transportprotocol = ""
self._subscrip = ""
self._subscrport = 0
self._network6 = ""
self._td = 0
self._natip = ""
self._natport = 0
self._destip = ""
self._dsttd = 0
self._nattype = ""
self._status = ""
self.___count = 0
@property
def name(self) :
"""Name for the LSN static mapping entry. Must begin with an ASCII alphanumeric or underscore (_) character, and must contain only ASCII alphanumeric, underscore, hash (#), period (.), space, colon (:), at (@), equals (=), and hyphen (-) characters. Cannot be changed after the LSN group is created. The following requirement applies only to the NetScaler CLI: If the name includes one or more spaces, enclose the name in double or single quotation marks (for example, "lsn static1" or 'lsn static1').<br/>Minimum length = 1<br/>Maximum length = 127."""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
"""Name for the LSN static mapping entry. Must begin with an ASCII alphanumeric or underscore (_) character, and must contain only ASCII alphanumeric, underscore, hash (#), period (.), space, colon (:), at (@), equals (=), and hyphen (-) characters. Cannot be changed after the LSN group is created. The following requirement applies only to the NetScaler CLI: If the name includes one or more spaces, enclose the name in double or single quotation marks (for example, "lsn static1" or 'lsn static1').<br/>Minimum length = 1<br/>Maximum length = 127
:param name:
"""
try :
self._name = name
except Exception as e:
raise e
@property
def transportprotocol(self) :
"""Protocol for the LSN mapping entry.<br/>Possible values = TCP, UDP, ICMP."""
try :
return self._transportprotocol
except Exception as e:
raise e
@transportprotocol.setter
def transportprotocol(self, transportprotocol) :
"""Protocol for the LSN mapping entry.<br/>Possible values = TCP, UDP, ICMP
:param transportprotocol:
"""
try :
self._transportprotocol = transportprotocol
except Exception as e:
raise e
@property
def subscrip(self) :
"""IPv4(NAT44 & DS-Lite)/IPv6(NAT64) address of an LSN subscriber for the LSN static mapping entry."""
try :
return self._subscrip
except Exception as e:
raise e
@subscrip.setter
def subscrip(self, subscrip) :
"""IPv4(NAT44 & DS-Lite)/IPv6(NAT64) address of an LSN subscriber for the LSN static mapping entry.
:param subscrip:
"""
try :
self._subscrip = subscrip
except Exception as e:
raise e
@property
def subscrport(self) :
"""Port of the LSN subscriber for the LSN mapping entry.<br/>Maximum length = 65535."""
try :
return self._subscrport
except Exception as e:
raise e
@subscrport.setter
def subscrport(self, subscrport) :
"""Port of the LSN subscriber for the LSN mapping entry.<br/>Maximum length = 65535
:param subscrport:
"""
try :
self._subscrport = subscrport
except Exception as e:
raise e
@property
def network6(self) :
"""B4 address in DS-Lite setup.<br/>Minimum length = 1."""
try :
return self._network6
except Exception as e:
raise e
@network6.setter
def network6(self, network6) :
"""B4 address in DS-Lite setup.<br/>Minimum length = 1
:param network6:
"""
try :
self._network6 = network6
except Exception as e:
raise e
@property
def td(self) :
"""ID of the traffic domain to which the subscriber belongs.
If you do not specify an ID, the subscriber is assumed to be a part of the default traffic domain.<br/>Default value: 0<br/>Maximum length = 4094.
"""
try :
return self._td
except Exception as e:
raise e
@td.setter
def td(self, td) :
"""ID of the traffic domain to which the subscriber belongs.
If you do not specify an ID, the subscriber is assumed to be a part of the default traffic domain.<br/>Default value: 0<br/>Maximum length = 4094
:param td:
"""
try :
self._td = td
except Exception as e:
raise e
@property
def natip(self) :
"""IPv4 address, already existing on the NetScaler ADC as type LSN, to be used as NAT IP address for this mapping entry."""
try :
return self._natip
except Exception as e:
raise e
@natip.setter
def natip(self, natip) :
"""IPv4 address, already existing on the NetScaler ADC as type LSN, to be used as NAT IP address for this mapping entry.
:param natip:
"""
try :
self._natip = natip
except Exception as e:
raise e
@property
def natport(self) :
"""NAT port for this LSN mapping entry."""
try :
return self._natport
except Exception as e:
raise e
@natport.setter
def natport(self, natport) :
"""NAT port for this LSN mapping entry.
:param natport:
"""
try :
self._natport = natport
except Exception as e:
raise e
@property
def destip(self) :
"""Destination IP address for the LSN mapping entry."""
try :
return self._destip
except Exception as e:
raise e
@destip.setter
def destip(self, destip) :
"""Destination IP address for the LSN mapping entry.
:param destip:
"""
try :
self._destip = destip
except Exception as e:
raise e
@property
def dsttd(self) :
"""ID of the traffic domain through which the destination IP address for this LSN mapping entry is reachable from the NetScaler ADC.
If you do not specify an ID, the destination IP address is assumed to be reachable through the default traffic domain, which has an ID of 0.<br/>Default value: 0<br/>Maximum length = 4094.
"""
try :
return self._dsttd
except Exception as e:
raise e
@dsttd.setter
def dsttd(self, dsttd) :
"""ID of the traffic domain through which the destination IP address for this LSN mapping entry is reachable from the NetScaler ADC.
If you do not specify an ID, the destination IP address is assumed to be reachable through the default traffic domain, which has an ID of 0.<br/>Default value: 0<br/>Maximum length = 4094
:param dsttd:
"""
try :
self._dsttd = dsttd
except Exception as e:
raise e
@property
def nattype(self) :
"""Type of sessions to be displayed.<br/>Possible values = NAT44, DS-Lite."""
try :
return self._nattype
except Exception as e:
raise e
@nattype.setter
def nattype(self, nattype) :
"""Type of sessions to be displayed.<br/>Possible values = NAT44, DS-Lite
:param nattype:
"""
try :
self._nattype = nattype
except Exception as e:
raise e
@property
def status(self) :
"""The status of the Mapping. Status could be Inactive, if mapping addition failed due to already existing dynamic/static mapping, port allocation failure.<br/>Possible values = ACTIVE, INACTIVE."""
try :
return self._status
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
"""converts nitro response into object and returns the object array in case of get request.
:param service:
:param response:
"""
try :
result = service.payload_formatter.string_to_resource(lsnstatic_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.lsnstatic
except Exception as e :
raise e
def _get_object_name(self) :
"""Returns the value of object identifier argument"""
try :
if self.name is not None :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def add(cls, client, resource) :
"""Use this API to add lsnstatic.
:param client:
:param resource:
"""
try :
if type(resource) is not list :
addresource = lsnstatic()
addresource.name = resource.name
addresource.transportprotocol = resource.transportprotocol
addresource.subscrip = resource.subscrip
addresource.subscrport = resource.subscrport
addresource.network6 = resource.network6
addresource.td = resource.td
addresource.natip = resource.natip
addresource.natport = resource.natport
addresource.destip = resource.destip
addresource.dsttd = resource.dsttd
return addresource.add_resource(client)
else :
if (resource and len(resource) > 0) :
addresources = [ lsnstatic() for _ in range(len(resource))]
for i in range(len(resource)) :
addresources[i].name = resource[i].name
addresources[i].transportprotocol = resource[i].transportprotocol
addresources[i].subscrip = resource[i].subscrip
addresources[i].subscrport = resource[i].subscrport
addresources[i].network6 = resource[i].network6
addresources[i].td = resource[i].td
addresources[i].natip = resource[i].natip
addresources[i].natport = resource[i].natport
addresources[i].destip = resource[i].destip
addresources[i].dsttd = resource[i].dsttd
result = cls.add_bulk_request(client, addresources)
return result
except Exception as e :
raise e
@classmethod
def delete(cls, client, resource) :
"""Use this API to delete lsnstatic.
:param client:
:param resource:
"""
try :
if type(resource) is not list :
deleteresource = lsnstatic()
if type(resource) != type(deleteresource):
deleteresource.name = resource
else :
deleteresource.name = resource.name
return deleteresource.delete_resource(client)
else :
if type(resource[0]) != cls :
if (resource and len(resource) > 0) :
deleteresources = [ lsnstatic() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].name = resource[i]
else :
if (resource and len(resource) > 0) :
deleteresources = [ lsnstatic() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].name = resource[i].name
result = cls.delete_bulk_request(client, deleteresources)
return result
except Exception as e :
raise e
@classmethod
def get(cls, client, name="", option_="") :
"""Use this API to fetch all the lsnstatic resources that are configured on netscaler.
:param client:
:param name: (Default value = "")
:param option_: (Default value = "")
"""
try :
if not name :
obj = lsnstatic()
response = obj.get_resources(client, option_)
else :
if type(name) != cls :
if type(name) is not list :
obj = lsnstatic()
obj.name = name
response = obj.get_resource(client, option_)
else :
if name and len(name) > 0 :
response = [lsnstatic() for _ in range(len(name))]
obj = [lsnstatic() for _ in range(len(name))]
for i in range(len(name)) :
obj[i] = lsnstatic()
obj[i].name = name[i]
response[i] = obj[i].get_resource(client, option_)
return response
except Exception as e :
raise e
@classmethod
def get_args(cls, client, args) :
"""Use this API to fetch all the lsnstatic resources that are configured on netscaler.
# This uses lsnstatic_args which is a way to provide additional arguments while fetching the resources.
:param client:
:param args:
"""
try :
obj = lsnstatic()
option_ = options()
option_.args = nitro_util.object_to_string_withoutquotes(args)
response = obj.get_resources(client, option_)
return response
except Exception as e :
raise e
@classmethod
def get_filtered(cls, client, filter_) :
"""Use this API to fetch filtered set of lsnstatic resources.
filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
:param client:
:param filter_:
"""
try :
obj = lsnstatic()
option_ = options()
option_.filter = filter_
response = obj.getfiltered(client, option_)
return response
except Exception as e :
raise e
@classmethod
def count(cls, client) :
"""Use this API to count the lsnstatic resources configured on NetScaler.
:param client:
"""
try :
obj = lsnstatic()
option_ = options()
option_.count = True
response = obj.get_resources(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
@classmethod
def count_filtered(cls, client, filter_) :
"""Use this API to count filtered the set of lsnstatic resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
:param client:
:param filter_:
"""
try :
obj = lsnstatic()
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
class Transportprotocol:
""" """
TCP = "TCP"
UDP = "UDP"
ICMP = "ICMP"
class Status:
""" """
ACTIVE = "ACTIVE"
INACTIVE = "INACTIVE"
class Nattype:
""" """
NAT44 = "NAT44"
DS_Lite = "DS-Lite"
class lsnstatic_response(base_response) :
""" """
def __init__(self, length=1) :
self.lsnstatic = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.lsnstatic = [lsnstatic() for _ in range(length)]
| [
"nitro.util.nitro_util.nitro_util.object_to_string_withoutquotes",
"nitro.service.options.options"
] | [((15012, 15021), 'nitro.service.options.options', 'options', ([], {}), '()\n', (15019, 15021), False, 'from nitro.service.options import options\n'), ((15049, 15096), 'nitro.util.nitro_util.nitro_util.object_to_string_withoutquotes', 'nitro_util.object_to_string_withoutquotes', (['args'], {}), '(args)\n', (15090, 15096), False, 'from nitro.util.nitro_util import nitro_util\n'), ((15577, 15586), 'nitro.service.options.options', 'options', ([], {}), '()\n', (15584, 15586), False, 'from nitro.service.options import options\n'), ((15994, 16003), 'nitro.service.options.options', 'options', ([], {}), '()\n', (16001, 16003), False, 'from nitro.service.options import options\n'), ((16597, 16606), 'nitro.service.options.options', 'options', ([], {}), '()\n', (16604, 16606), False, 'from nitro.service.options import options\n')] |
from setuptools import setup
import importlib
VERSION = '0.0.6'
NAME = 'packular'
setup(
name = NAME,
version = VERSION,
description = "JS/CSS/Partials packing and referencing",
long_description = importlib.import_module(NAME).__doc__,
license = 'BSD',
author = "<NAME>",
author_email = '<EMAIL>',
url = 'https://github.com/johaness/%s' % (NAME,),
zip_safe = True,
py_modules = [NAME,],
entry_points = {
'console_scripts': [
'%s=%s:main' % (NAME, NAME,),
],
},
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'License :: OSI Approved :: BSD License',
],
)
| [
"importlib.import_module"
] | [((231, 260), 'importlib.import_module', 'importlib.import_module', (['NAME'], {}), '(NAME)\n', (254, 260), False, 'import importlib\n')] |
"""# Wired JS Button
See https://github.com/rough-stuff/wired-elements/blob/master/docs/wired-button.md
"""
import param
from ...shared.widgets.button import ButtonBase
from .wired_widget import WiredWidget
class WiredButton(WiredWidget, ButtonBase): # pylint: disable=too-many-ancestors
"""# Wired JS Button
See https://github.com/rough-stuff/wired-elements/blob/master/docs/wired-button.md"""
elevation = param.Integer(default=1, bounds=(1, 5))
_template = """
<wired-button id="component" onclick="${script('click')}">${name}</wired-button>
"""
__javascript_modules__ = [
"https://unpkg.com/[email protected]/lib/wired-button.js?module"
]
_scripts = {
**ButtonBase._scripts,
"render": ButtonBase._scripts["render"]
+ "\n"
+ """
component.elevation=data.elevation
""",
"elevation": "component.elevation=data.elevation",
}
@classmethod
def example(cls):
return cls(name="Run Pipeline", tooltip="Trains the model", button_type="primary")
| [
"param.Integer"
] | [((441, 480), 'param.Integer', 'param.Integer', ([], {'default': '(1)', 'bounds': '(1, 5)'}), '(default=1, bounds=(1, 5))\n', (454, 480), False, 'import param\n')] |
import os
import numpy as np
import matplotlib.pyplot as plt
import cv2
import json
import glob
from Mask.config import Config
import Mask.utils as utils
import Mask.model as modellib
import Mask.visualize as visualize
from coco import CocoConfig
np.set_printoptions(threshold=np.inf)
dir_path = os.path.dirname(os.path.realpath(__file__))
MODEL_DIR = dir_path + "/models/"
MODEL_PATH = input("Insert the path of your trained model [ Like models/moles.../mask_rcnn_moles_0090.h5 ]: ")
if os.path.isfile(MODEL_PATH) == False:
raise Exception(MODEL_PATH + " Does not exists")
path_data = input("Insert the path of Data [ Link /home/../ISIC-Archive-Downloader/Data/ ] : ")
if not os.path.exists(path_data):
raise Exception(path_data + " Does not exists")
config = CocoConfig()
model = modellib.MaskRCNN(mode="inference", model_dir=MODEL_DIR, config=config)
model.load_weights(MODEL_PATH, by_name=True)
class_names = ["BG", "malignant", "benign"]
all_desc_path = glob.glob(path_data + "Descriptions/ISIC_*")
for filename in os.listdir(path_data+"Descriptions/"):
data = json.load(open(path_data+"/Descriptions/"+filename))
img = cv2.imread(path_data+"Images/"+filename+".jpeg")
img = cv2.resize(img, (128, 128))
## ground truth of the class
print(data["meta"]["clinical"]["benign_malignant"])
r = model.detect([img])[0]
visualize.display_instances(img, r['rois'], r['masks'], r['class_ids'],
class_names, r['scores']) | [
"os.path.exists",
"os.listdir",
"Mask.model.MaskRCNN",
"os.path.realpath",
"os.path.isfile",
"coco.CocoConfig",
"Mask.visualize.display_instances",
"cv2.resize",
"cv2.imread",
"glob.glob",
"numpy.set_printoptions"
] | [((248, 285), 'numpy.set_printoptions', 'np.set_printoptions', ([], {'threshold': 'np.inf'}), '(threshold=np.inf)\n', (267, 285), True, 'import numpy as np\n'), ((773, 785), 'coco.CocoConfig', 'CocoConfig', ([], {}), '()\n', (783, 785), False, 'from coco import CocoConfig\n'), ((795, 866), 'Mask.model.MaskRCNN', 'modellib.MaskRCNN', ([], {'mode': '"""inference"""', 'model_dir': 'MODEL_DIR', 'config': 'config'}), "(mode='inference', model_dir=MODEL_DIR, config=config)\n", (812, 866), True, 'import Mask.model as modellib\n'), ((974, 1018), 'glob.glob', 'glob.glob', (["(path_data + 'Descriptions/ISIC_*')"], {}), "(path_data + 'Descriptions/ISIC_*')\n", (983, 1018), False, 'import glob\n'), ((1035, 1074), 'os.listdir', 'os.listdir', (["(path_data + 'Descriptions/')"], {}), "(path_data + 'Descriptions/')\n", (1045, 1074), False, 'import os\n'), ((314, 340), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (330, 340), False, 'import os\n'), ((490, 516), 'os.path.isfile', 'os.path.isfile', (['MODEL_PATH'], {}), '(MODEL_PATH)\n', (504, 516), False, 'import os\n'), ((684, 709), 'os.path.exists', 'os.path.exists', (['path_data'], {}), '(path_data)\n', (698, 709), False, 'import os\n'), ((1148, 1202), 'cv2.imread', 'cv2.imread', (["(path_data + 'Images/' + filename + '.jpeg')"], {}), "(path_data + 'Images/' + filename + '.jpeg')\n", (1158, 1202), False, 'import cv2\n'), ((1207, 1234), 'cv2.resize', 'cv2.resize', (['img', '(128, 128)'], {}), '(img, (128, 128))\n', (1217, 1234), False, 'import cv2\n'), ((1365, 1466), 'Mask.visualize.display_instances', 'visualize.display_instances', (['img', "r['rois']", "r['masks']", "r['class_ids']", 'class_names', "r['scores']"], {}), "(img, r['rois'], r['masks'], r['class_ids'],\n class_names, r['scores'])\n", (1392, 1466), True, 'import Mask.visualize as visualize\n')] |
import click
import pandas as pd
import math
import numpy as np
import re
import itertools
from datetime import datetime,date
from sklearn.preprocessing import OneHotEncoder,LabelBinarizer,LabelEncoder,MinMaxScaler
import logging
from data.preprocessing import load_file,save_file,translate_df
from features.tools import create_encoder
import settings
@click.command()
def main():
""" Build features for classification and prediction models
"""
try:
logger = settings.get_logger(__name__)
logger.info("*** Build features for classification and prediction models ***")
logger.info("Load raw sales file...")
file_name = "p2_raw"
sales = load_file(file_name, index='Product')
#product description
logger.info("Load product description file...")
file_name = "product_7cerf.txt"
products = pd.read_csv(settings.raw_path+file_name, sep='\t',encoding="utf8")
products = products.drop_duplicates(["Key_lvl2","Description"]).set_index(["Key_lvl2"])
products.index.names = ["Product"]
sales_desc = products.join(sales,how="inner")[products.columns]
unbalanced = ["Key_lvl1","Description","Key_lvl7","Product Status"]
sales_desc.drop(unbalanced,axis=1,inplace=True)
en_sales_desc = translate_df(sales_desc,columns=["Key_lvl3","Key_lvl4","Key_lvl5","Key_lvl6"])
keep_features = ["Key_lvl3","Color","Size","Launch Date","Age Group","Sales Season","Tag Price"]
dataframe = en_sales_desc[keep_features].copy()
# add number of clients
logger.info("Load clients count file...")
p2c = load_file("p2c1_count",index="Product").astype(np.float64)
p2c.columns = ["Nstore"]
# store_counts = prp.load_file("store_counts",index="Product")
#add number of clients by week
logger.info("Load number of clients by week...")
p2cc = load_file("p1cc",index="Product").iloc[:,:5]
dataframe = dataframe.join(p2cc,how="left").fillna(0)
dataframe = dataframe.join(p2c,how="left").fillna(0)
dataframe["Missing"] = 0
features_list = ["Color","Size","Ldate","Age Group","Person","Pname","Ptype","Tprice","Currency","Sales Season"]+list(p2cc.columns)
raw_df = dataframe[dataframe.Nstore!=0.].copy()
logger.info("Feature engineering...")
numeric_features = ["Tprice"] + list(p2cc.columns)#Nstore
features = extract_features(raw_df, non_categorical =numeric_features)
features_df = features[features_list]
filename = "clf_features"
logger.info("==> Saving features file to <<%s>> ..."%filename)
save_file(features_df,filename,type_="P",index = True)
logger.info("Dataset %s succefully made !"%(features_df.shape[0]))
logger.info("Creating encoders...")
categorical_features = ["Color","Size","Age Group","Ldate","Person","Pname","Ptype","Currency","Sales Season"]
create_encoder(features_df,le_name="prd_le", ohe_name="prd_ohe", scaler_name="prd_scaler", categorical_features=categorical_features,numeric_features=numeric_features)
logger.info("Encoders created...")
except Exception as err:
logger.error(err)
def extract_features(rdf, non_categorical):
data_frame = rdf.copy()
data_frame["Person"] = data_frame["Key_lvl3"].map(lambda x: GetInfo(x,0))
data_frame["Pname"] = data_frame["Key_lvl3"].map(lambda x: GetInfo(x,1))
data_frame["Ptype"] = data_frame["Key_lvl3"].map(lambda x: GetInfo(x,2))
data_frame["Price"] = data_frame["Key_lvl3"].map(lambda x: GetInfo(x,3))
# season = rdf["Sales Season"].min()
# data_frame["Launch Date"] = data_frame["Launch Date"].map(lambda x: date_to_week(x,season)).astype(str)
data_frame["Ldate"] = data_frame["Launch Date"].map(lambda x: get_week_number(x)).astype(str)
data_frame.drop(["Key_lvl3"],axis=1,inplace = True)
data_frame["Tprice"] = data_frame["Tag Price"]
# data_frame["Age Group"] = data_frame["Age Group"].map(lambda x: _redefine_age(x))
for num_fearture in non_categorical:
values = data_frame[num_fearture].values.astype(np.float64).reshape((-1,1))
scaled = values# MinMaxScaler().fit_transform(values)
data_frame[num_fearture] = scaled.astype(np.float64)
data_frame["Currency"] = data_frame["Price"].map(lambda x: re.findall(r'[^\d\.]',x)[0] if (len(re.findall(r'[^\d\.]',x))>0) else "Y")
#missing values
data_frame.Person.fillna("Female")
data_frame.Pname.fillna("One-Piece Pants Inside")
#reduce colors:
data_frame.pipe(_reduce_colors)
# data_frame["Currency"] ="bella ciao"
return data_frame
def _reduce_size(s):
return {
"":""
}[s]
def _reduce_colors(df):
df.loc[df.Color.str.contains("Grey"),"Color"]="Grey"
df.loc[df.Color.str.contains("Gray"),"Color"]="Grey"
df.loc[df.Color.str.contains("Blue"),"Color"]="Blue"
df.loc[df.Color.str.contains("Cyan"),"Color"]="Blue"
df.loc[df.Color.str.contains("Navy"),"Color"]="Blue"
df.loc[df.Color.str.contains("Red"),"Color"]="Red"
df.loc[df.Color.str.contains("No Color"),"Color"]="Other"
df.loc[df.Color.str.contains("Green"),"Color"]="Green"
df.loc[df.Color.str.contains("Pink"),"Color"]="Pink"
df.loc[df.Color.str.contains("Purple"),"Color"]="Pink"
df.loc[df.Color.str.contains("Rose"),"Color"]="Pink"
df.loc[df.Color.str.contains("Pink"),"Color"]="Pink"
df.loc[df.Color.str.contains("Brown"),"Color"]="Brown"
df.loc[df.Color.str.contains("Cameo"),"Color"]="Brown"
df.loc[df.Color.str.contains("Coffee"),"Color"]="Brown"
df.loc[df.Color.str.contains("Sheer Beige"),"Color"]="Brown"
df.loc[df.Color.str.contains("Black"),"Color"]="Black"
return df
def _discretize_client(nb_client):
if nb_client>1000: return 7
if nb_client>500: return 6
if nb_client>150: return 5
if nb_client>50: return 4
if nb_client>10: return 3
if nb_client>5: return 2
return 1
def _get_price(s):
"""Get the price from the key_lvl3 using a regex
Arguments:
s {Key} -- The Key_lvl3
Returns:
str -- Returns the price tag + the currrency (etiher $ or (Y)uan)
"""
try:
regex = r"^[^\d\$]*(\$?\s?\d{1,3}\.?\d{0,2}\D{0,5}$)"
matches = re.findall(regex,s)
price = matches[0].replace(" ","").upper().replace("RMB","YUAN").replace("YUAN","Y").replace("%","Y").strip()
return price
except Exception as ex:
raise ex
def _first_week_of_season(season,year):
"""Returns the first week number of a given season
Arguments:
season {str} -- the season
year {int} -- the year to considerate
Returns:
int -- the week number
"""
return {
"Autumn":date(year,9,21).isocalendar()[1],
"Winter":date(year,12,21).isocalendar()[1],
"Spring":date(year,3,21).isocalendar()[1],
"Summer":date(year,6,21).isocalendar()[1]
}[season]
def date_to_week(d,season):
try:
the_date = datetime.strptime(d,"%d/%m/%Y")
first_week = _first_week_of_season(season,the_date.year)
if(d=="01/01/1900"): return 1
week_number = the_date.isocalendar()[1]
return (week_number - first_week)+1
except Exception as err:
print(err)
return d
def get_week_number(d):
the_date = datetime.strptime(d,"%d/%m/%Y")
return the_date.isocalendar()[1]
def GetInfo(key3,order,sep = " -"):
try:
splits = key3.split(sep)
if len(splits)<4:
if order == 3: res = _get_price(key3).strip()
if order == 2: res = "Thin"
if order == 0: res="Female"
if order == 1: res = "One-piece pants inside"
else:
if order == 3: res = _get_price(key3).strip()
else: res = splits[order].strip()
return str(_redefine_group(res)).title()
except Exception:
print("An error occured (%d,%s)"%(order,key3))
return None
def _redefine_group(key):
key = key.title()
dico = {
"Boy":"Boys",
"Pregnant Woman" : "Pregnant",
"Pregnant Women" : "Pregnant",
"Women" : "Female",
"Male" : "Men"
}
return dico[key] if key in dico else key
def _redefine_age(age):
dico ={
"4-6":"Young",
"7-9":"Young",
"10-15":"Young",
"18-28":"Adult",
"29-38":"Adult",
"39-48":"Senior"
}
return dico[age]
if __name__ == '__main__':
main() | [
"data.preprocessing.translate_df",
"pandas.read_csv",
"datetime.datetime.strptime",
"data.preprocessing.load_file",
"data.preprocessing.save_file",
"features.tools.create_encoder",
"settings.get_logger",
"datetime.date",
"re.findall",
"click.command"
] | [((357, 372), 'click.command', 'click.command', ([], {}), '()\n', (370, 372), False, 'import click\n'), ((7454, 7486), 'datetime.datetime.strptime', 'datetime.strptime', (['d', '"""%d/%m/%Y"""'], {}), "(d, '%d/%m/%Y')\n", (7471, 7486), False, 'from datetime import datetime, date\n'), ((483, 512), 'settings.get_logger', 'settings.get_logger', (['__name__'], {}), '(__name__)\n', (502, 512), False, 'import settings\n'), ((692, 729), 'data.preprocessing.load_file', 'load_file', (['file_name'], {'index': '"""Product"""'}), "(file_name, index='Product')\n", (701, 729), False, 'from data.preprocessing import load_file, save_file, translate_df\n'), ((875, 944), 'pandas.read_csv', 'pd.read_csv', (['(settings.raw_path + file_name)'], {'sep': '"""\t"""', 'encoding': '"""utf8"""'}), "(settings.raw_path + file_name, sep='\\t', encoding='utf8')\n", (886, 944), True, 'import pandas as pd\n'), ((1311, 1397), 'data.preprocessing.translate_df', 'translate_df', (['sales_desc'], {'columns': "['Key_lvl3', 'Key_lvl4', 'Key_lvl5', 'Key_lvl6']"}), "(sales_desc, columns=['Key_lvl3', 'Key_lvl4', 'Key_lvl5',\n 'Key_lvl6'])\n", (1323, 1397), False, 'from data.preprocessing import load_file, save_file, translate_df\n'), ((2696, 2751), 'data.preprocessing.save_file', 'save_file', (['features_df', 'filename'], {'type_': '"""P"""', 'index': '(True)'}), "(features_df, filename, type_='P', index=True)\n", (2705, 2751), False, 'from data.preprocessing import load_file, save_file, translate_df\n'), ((3001, 3178), 'features.tools.create_encoder', 'create_encoder', (['features_df'], {'le_name': '"""prd_le"""', 'ohe_name': '"""prd_ohe"""', 'scaler_name': '"""prd_scaler"""', 'categorical_features': 'categorical_features', 'numeric_features': 'numeric_features'}), "(features_df, le_name='prd_le', ohe_name='prd_ohe',\n scaler_name='prd_scaler', categorical_features=categorical_features,\n numeric_features=numeric_features)\n", (3015, 3178), False, 'from features.tools import create_encoder\n'), ((6371, 6391), 're.findall', 're.findall', (['regex', 's'], {}), '(regex, s)\n', (6381, 6391), False, 'import re\n'), ((7122, 7154), 'datetime.datetime.strptime', 'datetime.strptime', (['d', '"""%d/%m/%Y"""'], {}), "(d, '%d/%m/%Y')\n", (7139, 7154), False, 'from datetime import datetime, date\n'), ((1651, 1691), 'data.preprocessing.load_file', 'load_file', (['"""p2c1_count"""'], {'index': '"""Product"""'}), "('p2c1_count', index='Product')\n", (1660, 1691), False, 'from data.preprocessing import load_file, save_file, translate_df\n'), ((1926, 1960), 'data.preprocessing.load_file', 'load_file', (['"""p1cc"""'], {'index': '"""Product"""'}), "('p1cc', index='Product')\n", (1935, 1960), False, 'from data.preprocessing import load_file, save_file, translate_df\n'), ((4415, 4441), 're.findall', 're.findall', (['"""[^\\\\d\\\\.]"""', 'x'], {}), "('[^\\\\d\\\\.]', x)\n", (4425, 4441), False, 'import re\n'), ((4451, 4477), 're.findall', 're.findall', (['"""[^\\\\d\\\\.]"""', 'x'], {}), "('[^\\\\d\\\\.]', x)\n", (4461, 4477), False, 'import re\n'), ((6864, 6881), 'datetime.date', 'date', (['year', '(9)', '(21)'], {}), '(year, 9, 21)\n', (6868, 6881), False, 'from datetime import datetime, date\n'), ((6915, 6933), 'datetime.date', 'date', (['year', '(12)', '(21)'], {}), '(year, 12, 21)\n', (6919, 6933), False, 'from datetime import datetime, date\n'), ((6967, 6984), 'datetime.date', 'date', (['year', '(3)', '(21)'], {}), '(year, 3, 21)\n', (6971, 6984), False, 'from datetime import datetime, date\n'), ((7018, 7035), 'datetime.date', 'date', (['year', '(6)', '(21)'], {}), '(year, 6, 21)\n', (7022, 7035), False, 'from datetime import datetime, date\n')] |
#!/usr/bin/env python
# @author <NAME> / <NAME>
from model import Model
from tkcalendar import DateEntry
import datetime
class OrderScheduleModel(Model):
def __init__(self):
self.date_entries = {}
def load_month(self, month, year):
sql = 'SELECT order_id, start_date, end_date FROM order WHERE MONTH(start_date) = {} AND YEAR(start_date) = {}'
cursor = self.execute_sql(sql.format(month, year))
#date_entries = []
row = cursor.fetchone()
while row:
date = datetime.date(row[1])
message = self.__make_order_message(row[0], row[1], row[2])
entry = (date, message)
self.date_entries.append(entry)
def __make_order_message(self, order_id, start_date, end_date):
return "Order id: {}\nStart date: {}\nEnd date: {}".format(order_id, start_date, end_date)
def add_employee_to_event(self, date, name, surname):
get_order = 'SELECT order_id FROM "Order" WHERE start_date = \'{}\''
order_id_cur = self.execute_sql(get_order.format(date))
order_id = order_id_cur.fetchone()
get_employee = 'SELECT employee_id FROM employee WHERE name = \'{}\' AND surname = \'{}\''
employee_id_cur = self.execute_sql(get_employee.format(name, surname))
employee_id = employee_id_cur.fetchone()
sql = 'INSERT INTO employees_for_order VALUES ({}, {})'
self.execute_sql(sql.format(employee_id.employee_id, order_id.order_id)).commit()
def delete_order(self, date):
sql = 'DELETE FROM "Order" WHERE start_date = \'{}\''
self.execute_sql(sql.format(date)).commit
def show_order_info(self, date):
get_address = """SELECT address_id
FROM "Order"
WHERE start_date = \'{}\'"""
adress_id_cur = self.execute_sql(get_address.format(date))
adress_id = adress_id_cur.fetchone()
get_address_info = """SELECT a.postal_code, a.street_name, a.building_number, a.apartment_number, c.name, c.district
FROM address AS a
JOIN city AS c
ON (c.city_id = a.city_id)
WHERE a.address_id = {}"""
address_info_cur = self.execute_sql(get_address_info.format(adress_id[0]))
return address_info_cur
def get_date_and_type(self):
sql = """SELECT e.event_name, year(o.start_date) AS year, month(o.start_date) AS month, day(o.start_date) AS day
FROM event_type AS e
JOIN "Order" AS o
ON (o.event_type_id = e.event_type_id) """
cursor = self.execute_sql(sql)
return cursor | [
"datetime.date"
] | [((531, 552), 'datetime.date', 'datetime.date', (['row[1]'], {}), '(row[1])\n', (544, 552), False, 'import datetime\n')] |
"""
In this example we use the pysid library to estimate a SIMO armax model
"""
#Import Libraries
from numpy import concatenate, dot, zeros, sqrt
from numpy.random import rand, randn #To generate the experiment
from scipy.signal import lfilter #To generate the data
from pysid import armax #To estimate an arx model
#True System
#Number of inputs
nu = 1
#Number of outputs
ny = 2
#Orders
na = [[2, 2], [2, 2]] #This variable must be (ny x ny)
nb = [1, 1] #This variable must be (ny x nu)
nk = [1, 1] #This variable must be (ny x nu)
nc = [2, 2] #This variable must be (ny x 1)
#with the following true parameters
A1o = [1, -1.2, 0.36]
A12o = [0, 0.09, -0.1]
A2o = [1, -1.6, 0.64]
A21o = [0, 0.2, -0.01]
B1o = [0, 0.5, 0.4]
B2o = [0, 0.2,-0.3]
C1o = [1, 0.8,-0.1]
C2o = [1, 0.9,-0.2]
#True parameter vector
thetao = [-1.2, 0.36, 0.5, 0.4, 0.2, -0.3, 0.8, -0.1]
#Generate the experiment
#The true system is generates by the following relation:
# S: y(t) = Go(q)*u(t) + Ho(q)*e(t),
#with u(t) the input and e white noise.
#Number of Samples
N = 400
#Take u as uniform
u = -sqrt(3) + 2*sqrt(3)*rand(N, nu)
#Generate gaussian white noise with standat deviation 0.01
e = 0.01*randn(N, ny)
#Calculate the y through S (ARX: G(q) = B(q)/A(q) and H(q) = 1/A(q))
#Calculate the y through S (ARX: G(q) = B(q)/A(q) and H(q) = 1/A(q))
y1 = zeros((N, 1))
y2 = zeros((N, 1))
v1 = lfilter(C1o, [1], e[:,0:1], axis=0)
v2 = lfilter(C2o, [1], e[:,1:2], axis=0)
#Simulate the true process
for i in range(2, N):
y1[i] = -dot(A1o[1:3] ,y1[i-2:i][::-1]) - dot(A12o[1:3],y2[i-2:i][::-1]) + dot(B1o[1:3], u[i-2:i, 0][::-1])
y2[i] = -dot(A21o[1:3], y1[i-2:i][::-1]) - dot(A2o[1:3], y2[i-2:i][::-1]) + dot(B2o[1:3], u[i-2:i, 0][::-1])
y = concatenate((y1+v1, y2+v2), axis=1)
#Estimate the model and get only the parameters
A, B, C = armax(na, nb, nc, nk, u, y)
| [
"numpy.sqrt",
"numpy.random.rand",
"numpy.zeros",
"scipy.signal.lfilter",
"numpy.dot",
"numpy.concatenate",
"pysid.armax",
"numpy.random.randn"
] | [((1407, 1420), 'numpy.zeros', 'zeros', (['(N, 1)'], {}), '((N, 1))\n', (1412, 1420), False, 'from numpy import concatenate, dot, zeros, sqrt\n'), ((1426, 1439), 'numpy.zeros', 'zeros', (['(N, 1)'], {}), '((N, 1))\n', (1431, 1439), False, 'from numpy import concatenate, dot, zeros, sqrt\n'), ((1445, 1481), 'scipy.signal.lfilter', 'lfilter', (['C1o', '[1]', 'e[:, 0:1]'], {'axis': '(0)'}), '(C1o, [1], e[:, 0:1], axis=0)\n', (1452, 1481), False, 'from scipy.signal import lfilter\n'), ((1486, 1522), 'scipy.signal.lfilter', 'lfilter', (['C2o', '[1]', 'e[:, 1:2]'], {'axis': '(0)'}), '(C2o, [1], e[:, 1:2], axis=0)\n', (1493, 1522), False, 'from scipy.signal import lfilter\n'), ((1800, 1839), 'numpy.concatenate', 'concatenate', (['(y1 + v1, y2 + v2)'], {'axis': '(1)'}), '((y1 + v1, y2 + v2), axis=1)\n', (1811, 1839), False, 'from numpy import concatenate, dot, zeros, sqrt\n'), ((1894, 1921), 'pysid.armax', 'armax', (['na', 'nb', 'nc', 'nk', 'u', 'y'], {}), '(na, nb, nc, nk, u, y)\n', (1899, 1921), False, 'from pysid import armax\n'), ((1251, 1263), 'numpy.random.randn', 'randn', (['N', 'ny'], {}), '(N, ny)\n', (1256, 1263), False, 'from numpy.random import rand, randn\n'), ((1151, 1158), 'numpy.sqrt', 'sqrt', (['(3)'], {}), '(3)\n', (1155, 1158), False, 'from numpy import concatenate, dot, zeros, sqrt\n'), ((1171, 1182), 'numpy.random.rand', 'rand', (['N', 'nu'], {}), '(N, nu)\n', (1175, 1182), False, 'from numpy.random import rand, randn\n'), ((1650, 1684), 'numpy.dot', 'dot', (['B1o[1:3]', 'u[i - 2:i, 0][::-1]'], {}), '(B1o[1:3], u[i - 2:i, 0][::-1])\n', (1653, 1684), False, 'from numpy import concatenate, dot, zeros, sqrt\n'), ((1763, 1797), 'numpy.dot', 'dot', (['B2o[1:3]', 'u[i - 2:i, 0][::-1]'], {}), '(B2o[1:3], u[i - 2:i, 0][::-1])\n', (1766, 1797), False, 'from numpy import concatenate, dot, zeros, sqrt\n'), ((1163, 1170), 'numpy.sqrt', 'sqrt', (['(3)'], {}), '(3)\n', (1167, 1170), False, 'from numpy import concatenate, dot, zeros, sqrt\n'), ((1617, 1650), 'numpy.dot', 'dot', (['A12o[1:3]', 'y2[i - 2:i][::-1]'], {}), '(A12o[1:3], y2[i - 2:i][::-1])\n', (1620, 1650), False, 'from numpy import concatenate, dot, zeros, sqrt\n'), ((1730, 1762), 'numpy.dot', 'dot', (['A2o[1:3]', 'y2[i - 2:i][::-1]'], {}), '(A2o[1:3], y2[i - 2:i][::-1])\n', (1733, 1762), False, 'from numpy import concatenate, dot, zeros, sqrt\n'), ((1584, 1616), 'numpy.dot', 'dot', (['A1o[1:3]', 'y1[i - 2:i][::-1]'], {}), '(A1o[1:3], y1[i - 2:i][::-1])\n', (1587, 1616), False, 'from numpy import concatenate, dot, zeros, sqrt\n'), ((1696, 1729), 'numpy.dot', 'dot', (['A21o[1:3]', 'y1[i - 2:i][::-1]'], {}), '(A21o[1:3], y1[i - 2:i][::-1])\n', (1699, 1729), False, 'from numpy import concatenate, dot, zeros, sqrt\n')] |
# Generated by Django 2.2 on 2019-04-14 07:52
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('api', '0031_auto_20190412_2352'),
]
operations = [
migrations.RenameModel(
old_name='PyconKorea',
new_name='Schedule',
),
]
| [
"django.db.migrations.RenameModel"
] | [((221, 287), 'django.db.migrations.RenameModel', 'migrations.RenameModel', ([], {'old_name': '"""PyconKorea"""', 'new_name': '"""Schedule"""'}), "(old_name='PyconKorea', new_name='Schedule')\n", (243, 287), False, 'from django.db import migrations\n')] |
# -------------------------------------------------------------------------------
# Licence:
# Copyright (c) 2012-2021 <NAME>
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
#
# Name: rasterlike.py
# Purpose:
#
# Author: <NAME>, <NAME>
#
# Created: 16/06/2021
# -------------------------------------------------------------------------------
import os
import shutil
import site
import glob
import tempfile
import datetime
import numpy as np
from osgeo import osr, ogr
from osgeo import gdal, gdalconst
def now():
return datetime.datetime.now()
def total_seconds_from(t):
return (datetime.datetime.now() - t).total_seconds()
def printf( text, verbose = False):
"""
printf - print just if verbose
"""
if verbose:
print(f"{text}")
def done(text, t, verbose = False):
"""
done - print the total seconds elapsed from t
"""
if verbose:
seconds = total_seconds_from(t)
print(f"{text} done in {seconds}s.")
def normpath(pathname):
"""
normpath
"""
if not pathname:
return ""
return os.path.normpath(pathname.replace("\\", "/")).replace("\\", "/")
def justpath(pathname, n=1):
"""
justpath
"""
for j in range(n):
(pathname, _) = os.path.split(normpath(pathname))
if pathname == "":
return "."
return normpath(pathname)
def juststem(pathname):
"""
juststem
"""
pathname = os.path.basename(normpath(pathname))
(root, _) = os.path.splitext(pathname)
return root
def justext(pathname):
"""
justext
"""
pathname = os.path.basename(normpath(pathname))
(_, ext) = os.path.splitext(pathname)
return ext.lstrip(".")
def forceext(pathname, newext):
"""
forceext
"""
(root, _) = os.path.splitext(normpath(pathname))
pathname = root + ("." + newext if len(newext.strip()) > 0 else "")
return normpath(pathname)
def tempfilename(prefix="tmp", suffix=""):
"""
tempfilename
"""
return "%s/%s%s%s" % (tempfile.gettempdir(), prefix, datetime.datetime.now().timestamp(), suffix)
def mkdirs(pathname):
"""
mkdirs - create a folder
mkdirs("hello/world)
mkdirs("hello/world/file.tif) #file.tif must exists
"""
if not os.path.isdir(pathname):
try:
if os.path.isfile(pathname):
pathname = justpath(pathname)
os.makedirs(pathname)
except:
pass
return os.path.isdir(pathname)
return True
def __Numpy2GTiff__(arr, geotransform, projection, filename, save_nodata_as=-9999):
"""
__Numpy2GTiff__
"""
GDT = {
'uint8': gdal.GDT_Byte,
'uint16': gdal.GDT_UInt16,
'uint32': gdal.GDT_UInt32,
'int16': gdal.GDT_Int16,
'int32': gdal.GDT_Int32,
'float32': gdal.GDT_Float32,
'float64': gdal.GDT_Float64
}
if isinstance(arr, np.ndarray):
rows, cols = arr.shape
if rows > 0 and cols > 0:
dtype = str(arr.dtype).lower()
fmt = GDT[dtype] if dtype in GDT else gdal.GDT_Float64
mkdirs(justpath(filename))
CO = ["BIGTIFF=YES", "TILED=YES", "BLOCKXSIZE=256", "BLOCKYSIZE=256", 'COMPRESS=LZW']
driver = gdal.GetDriverByName("GTiff")
dataset = driver.Create(filename, cols, rows, 1, fmt, CO)
if (geotransform != None):
dataset.SetGeoTransform(geotransform)
if (projection != None):
dataset.SetProjection(projection)
dataset.GetRasterBand(1).SetNoDataValue(save_nodata_as)
dataset.GetRasterBand(1).WriteArray(arr)
dataset = None
return filename
return None
def SetGDALEnv():
"""
SetGDALEnv
"""
os.environ["__PROJ_LIB__"] = os.environ["PROJ_LIB"] if "PROJ_LIB" in os.environ else ""
os.environ["__GDAL_DATA__"] = os.environ["GDAL_DATA"] if "GDAL_DATA" in os.environ else ""
os.environ["PROJ_LIB"] = find_PROJ_LIB()
os.environ["GDAL_DATA"] = find_GDAL_DATA()
def RestoreGDALEnv():
"""
RestoreGDALEnv
"""
if "__PROJ_LIB__" in os.environ:
os.environ["PROJ_LIB"] = os.environ["__PROJ_LIB__"]
if "__GDAL_DATA__" in os.environ:
os.environ["GDAL_DATA"] = os.environ["__GDAL_DATA__"]
def find_PROJ_LIB():
"""
find_PROJ_LIB - the path of proj_lib
"""
pathnames = []
roots = site.getsitepackages()
for root in roots:
pathnames += glob.glob(root + "/osgeo/**/proj.db", recursive=True)
if len(pathnames):
break
return justpath(pathnames[0]) if len(pathnames) else ""
def find_GDAL_DATA():
"""
find_GDAL_DATA - the path of GDAL_DATA
"""
pathnames = []
roots = site.getsitepackages()
for root in roots:
pathnames += glob.glob(root + "/osgeo/**/gt_datum.csv", recursive=True)
if len(pathnames):
break
return justpath(pathnames[0]) if len(pathnames) else ""
def gdalwarp(filelist, fileout=None, dstSRS="", cutline="", cropToCutline=False, tap=False, multithread=False, pixelsize=(0, 0), verbose=False):
"""
gdalwarp
"""
filelist = [filelist] if isinstance(filelist,str) else filelist
fileout = fileout if fileout else tempfilename(suffix=".tif")
kwargs = {
"format": "GTiff",
"outputType": gdalconst.GDT_Float32,
"creationOptions": ["BIGTIFF=YES", "TILED=YES", "BLOCKXSIZE=256", "BLOCKYSIZE=256", "COMPRESS=LZW"],
"dstNodata": -9999,
"resampleAlg": gdalconst.GRIORA_Bilinear,
"multithread": multithread
}
if pixelsize[0] > 0 and pixelsize[1] > 0:
kwargs["xRes"] = pixelsize[0]
kwargs["yRes"] = pixelsize[1]
if dstSRS:
kwargs["dstSRS"] = dstSRS
if os.path.isfile(cutline):
kwargs["cropToCutline"] = cropToCutline
kwargs["cutlineDSName"] = cutline
kwargs["cutlineLayer"] = juststem(cutline)
# gdal.Warp depends on PROJ_LIB and GDAL_DATA --------------------------
# os.environ["PROJ_LIB"] = ..../site-packages/osgeo/data/proj
# patch PROJ_LIB - save it before and restore after gdalwarp
PROJ_LIB = os.environ["PROJ_LIB"] if "PROJ_LIB" in os.environ else ""
GDAL_DATA = os.environ["GDAL_DATA"] if "GDAL_DATA" in os.environ else ""
# print(find_PROJ_LIB())
os.environ["PROJ_LIB"] = find_PROJ_LIB()
# print(find_GDAL_DATA())
os.environ["GDAL_DATA"] = find_GDAL_DATA()
gdal.Warp(fileout, filelist, **kwargs)
if PROJ_LIB:
os.environ["PROJ_LIB"] = PROJ_LIB
if GDAL_DATA:
os.environ["GDAL_DATA"] = GDAL_DATA
# ----------------------------------------------------------------------
return fileout
def GetPixelSize(filename):
"""
GetPixelSize
"""
dataset = gdal.Open(filename, gdalconst.GA_ReadOnly)
if dataset:
gt = dataset.GetGeoTransform()
_, px, _, _, _, py = gt
dataset = None
return px, abs(py)
return 0, 0
def SamePixelSize(filename1, filename2, decimals=-1):
"""
SamePixelSize
"""
size1 = GetPixelSize(filename1)
size2 = GetPixelSize(filename2)
if decimals >= 0:
size1 = [round(item, decimals) for item in size1]
size2 = [round(item, decimals) for item in size2]
return size1 == size2
def GetEmptyLike(filename, dtype=np.float32, default=np.nan):
"""
GetMetadata
"""
dataset = gdal.Open(filename, gdalconst.GA_ReadOnly)
if dataset:
m, n = dataset.RasterYSize, dataset.RasterXSize
prj = dataset.GetProjection()
gt = dataset.GetGeoTransform()
dataset = None
res = np.empty( (m,n), dtype = dtype)
res.fill(default)
return res, gt, prj
return np.array([np.nan]), None, None
def GetArea(filename):
"""
GetArea
"""
dataset = gdal.Open(filename, gdalconst.GA_ReadOnly)
if dataset:
m, n = dataset.RasterYSize, dataset.RasterXSize
gt = dataset.GetGeoTransform()
_, px, _, _, _, py = gt
dataset = None
return m*n*px*abs(py)
return 0
def GetExtent(filename):
"""
GetExtent
"""
ext = justext(filename).lower()
if ext == "tif":
dataset = gdal.Open(filename, gdalconst.GA_ReadOnly)
if dataset:
"{xmin} {ymin} {xmax} {ymax}"
m, n = dataset.RasterYSize, dataset.RasterXSize
gt = dataset.GetGeoTransform()
xmin, px, _, ymax, _, py = gt
xmax = xmin + n * px
ymin = ymax + m * py
ymin, ymax = min(ymin, ymax), max(ymin, ymax)
dataset = None
return xmin, ymin, xmax, ymax
elif ext in ("shp", "dbf"):
filename = forceext(filename, "shp")
driver = ogr.GetDriverByName("ESRI Shapefile")
dataset = driver.Open(filename, 0)
if dataset:
layer = dataset.GetLayer()
extent = layer.GetExtent()
dataset = None
xmin, xmax, ymin, ymax = extent
return xmin, ymin, xmax, ymax
return 0, 0, 0, 0
def SameExtent(filename1, filename2, decimals=-1):
"""
SameExtent
"""
extent1 = GetExtent(filename1)
extent2 = GetExtent(filename2)
if decimals >= 0:
extent1 = [round(item, decimals) for item in extent1]
extent2 = [round(item, decimals) for item in extent2]
return extent1 == extent2
def GetSpatialRef(filename):
"""
GetSpatialRef
"""
if isinstance(filename, osr.SpatialReference):
srs = filename
elif isinstance(filename, int):
srs = osr.SpatialReference()
srs.ImportFromEPSG(filename)
elif isinstance(filename, str) and filename.lower().startswith("epsg:"):
code = int(filename.split(":")[1])
srs = osr.SpatialReference()
srs.ImportFromEPSG(code)
elif isinstance(filename, str) and os.path.isfile(filename) and filename.lower().endswith(".shp"):
ds = ogr.OpenShared(filename)
if ds:
srs = ds.GetLayer().GetSpatialRef()
ds = None
elif isinstance(filename, str) and os.path.isfile(filename) and filename.lower().endswith(".tif"):
ds = gdal.Open(filename, gdalconst.GA_ReadOnly)
if ds:
wkt = ds.GetProjection()
srs = osr.SpatialReference()
srs.ImportFromWkt(wkt)
ds = None
else:
srs = osr.SpatialReference()
return srs
def SameSpatialRef(filename1, filename2):
"""
SameSpatialRef
"""
srs1 = GetSpatialRef(filename1)
srs2 = GetSpatialRef(filename2)
if srs1 and srs2:
return srs1.IsSame(srs2)
return None
def Rectangle(minx, miny, maxx, maxy):
"""
Rectangle
"""
ring = ogr.Geometry(ogr.wkbLinearRing)
ring.AddPoint_2D(minx, miny)
ring.AddPoint_2D(maxx, miny)
ring.AddPoint_2D(maxx, maxy)
ring.AddPoint_2D(minx, maxy)
ring.AddPoint_2D(minx, miny)
# Create polygon
poly = ogr.Geometry(ogr.wkbPolygon)
poly.AddGeometry(ring)
return poly
def RectangleFromFileExtent(filename):
"""
RectangleFromFileExtent
"""
minx, miny, maxx, maxy = GetExtent(filename)
return Rectangle(minx, miny, maxx, maxy) if minx else None
def ShapeExtentFrom(filetif, fileshp=""):
"""
ShapeExtentFrom
"""
fileshp = fileshp if fileshp else tempfilename(prefix="rect_",suffix=".shp")
# Write rest to Shapefile
driver = ogr.GetDriverByName("ESRI Shapefile")
if os.path.exists(fileshp):
driver.DeleteDataSource(fileshp)
ds = driver.CreateDataSource(fileshp)
layer = ds.CreateLayer(fileshp, srs=GetSpatialRef(filetif), geom_type=ogr.wkbPolygon)
featureDefn = layer.GetLayerDefn()
feature = ogr.Feature(featureDefn)
rect = RectangleFromFileExtent(filetif)
feature.SetGeometry(rect)
layer.CreateFeature(feature)
feature, layer, ds = None, None, None
return fileshp
def RasterLike(filetif, filetpl, fileout=None, verbose=False):
"""
RasterLike: adatta un raster al raster template ( dem ) ricampionando, riproiettando estendendo/clippando il file raster se necessario.
"""
t0 = now()
if SameSpatialRef(filetif, filetpl) and SamePixelSize(filetif, filetpl, decimals=2) and SameExtent(filetif, filetpl, decimals=3):
fileout = fileout if fileout else tempfilename(suffix=".tif")
if fileout != filetif:
#Copy the file with the fileout name
shutil.copy2(filetif, fileout)
return fileout
return filetif
# Special case where filetif is bigger than filetpl so we make crop first an then resampling
if GetArea(filetif) >= 4 * GetArea(filetpl):
#1) Crop
printf("1) Crop...",verbose)
file_rect = ShapeExtentFrom(filetpl)
file_warp1 = gdalwarp(filetif, cutline=file_rect, cropToCutline=True, dstSRS=GetSpatialRef(filetpl))
#2) Resampling and refine the extent
printf("2) Resampling...",verbose)
fileout = gdalwarp(file_warp1, fileout, pixelsize=GetPixelSize(filetpl), cutline=file_rect, cropToCutline=True)
os.unlink(file_warp1)
os.unlink(file_rect)
done("gdalwarp",t0,verbose)
return fileout
printf("1) gdalwarp for resampling...",verbose)
file_warp1 = gdalwarp(filetif, dstSRS=GetSpatialRef(filetpl), pixelsize=GetPixelSize(filetpl))
#tif_minx, tif_miny, tif_maxx, tif_maxy = GetExtent(file_warp1)
#tpl_minx, tpl_miny, tpl_maxx, tpl_maxy = GetExtent(filetpl)
## create tif and template rectangles
## to detect intersections
#tif_rectangle = Rectangle(tif_minx, tif_miny, tif_maxx, tif_maxy)
#tpl_rectangle = Rectangle(tpl_minx, tpl_miny, tpl_maxx, tpl_maxy)
tif_rectangle = RectangleFromFileExtent(file_warp1)
tpl_rectangle = RectangleFromFileExtent(filetpl)
if tif_rectangle.Intersects(tpl_rectangle):
#file_rect = tempfilename(suffix=".shp")
#spatialRefSys = GetSpatialRef(filetpl)
#file_rect = CreateRectangleShape(tpl_minx, tpl_miny, tpl_maxx, tpl_maxy,srs=spatialRefSys,fileshp=file_rect)
file_rect = ShapeExtentFrom(filetpl)
printf("2) gdalwarp for crop...",verbose)
gdalwarp(file_warp1, fileout, cutline=file_rect, cropToCutline=True,
dstSRS=GetSpatialRef(filetpl), pixelsize=GetPixelSize(filetpl))
os.unlink(file_rect)
else:
#GDAL2Numpy cause access to disk
#wdata, gt, prj = GDAL2Numpy(filetpl, band=1, dtype=np.float32, load_nodata_as=np.nan)
#wdata.fill(np.nan)
#__Numpy2GTiff__(wdata, gt, prj, fileout)
wdata, gt, prj = GetEmptyLike(filetpl)
__Numpy2GTiff__(wdata, gt, prj, fileout)
os.unlink(file_warp1)
done("gdalwarp",t0,verbose)
return fileout if os.path.exists(fileout) else None
| [
"osgeo.gdal.Open",
"numpy.array",
"os.path.exists",
"osgeo.gdal.Warp",
"shutil.copy2",
"os.path.isdir",
"numpy.empty",
"os.unlink",
"glob.glob",
"osgeo.ogr.Geometry",
"os.path.splitext",
"os.path.isfile",
"osgeo.ogr.OpenShared",
"osgeo.ogr.Feature",
"os.makedirs",
"osgeo.osr.SpatialReference",
"datetime.datetime.now",
"tempfile.gettempdir",
"osgeo.ogr.GetDriverByName",
"site.getsitepackages",
"osgeo.gdal.GetDriverByName"
] | [((1107, 1130), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1128, 1130), False, 'import datetime\n'), ((2054, 2080), 'os.path.splitext', 'os.path.splitext', (['pathname'], {}), '(pathname)\n', (2070, 2080), False, 'import os\n'), ((2217, 2243), 'os.path.splitext', 'os.path.splitext', (['pathname'], {}), '(pathname)\n', (2233, 2243), False, 'import os\n'), ((4999, 5021), 'site.getsitepackages', 'site.getsitepackages', ([], {}), '()\n', (5019, 5021), False, 'import site\n'), ((5339, 5361), 'site.getsitepackages', 'site.getsitepackages', ([], {}), '()\n', (5359, 5361), False, 'import site\n'), ((6377, 6400), 'os.path.isfile', 'os.path.isfile', (['cutline'], {}), '(cutline)\n', (6391, 6400), False, 'import os\n'), ((7059, 7097), 'osgeo.gdal.Warp', 'gdal.Warp', (['fileout', 'filelist'], {}), '(fileout, filelist, **kwargs)\n', (7068, 7097), False, 'from osgeo import gdal, gdalconst\n'), ((7392, 7434), 'osgeo.gdal.Open', 'gdal.Open', (['filename', 'gdalconst.GA_ReadOnly'], {}), '(filename, gdalconst.GA_ReadOnly)\n', (7401, 7434), False, 'from osgeo import gdal, gdalconst\n'), ((8024, 8066), 'osgeo.gdal.Open', 'gdal.Open', (['filename', 'gdalconst.GA_ReadOnly'], {}), '(filename, gdalconst.GA_ReadOnly)\n', (8033, 8066), False, 'from osgeo import gdal, gdalconst\n'), ((8448, 8490), 'osgeo.gdal.Open', 'gdal.Open', (['filename', 'gdalconst.GA_ReadOnly'], {}), '(filename, gdalconst.GA_ReadOnly)\n', (8457, 8490), False, 'from osgeo import gdal, gdalconst\n'), ((11352, 11383), 'osgeo.ogr.Geometry', 'ogr.Geometry', (['ogr.wkbLinearRing'], {}), '(ogr.wkbLinearRing)\n', (11364, 11383), False, 'from osgeo import osr, ogr\n'), ((11581, 11609), 'osgeo.ogr.Geometry', 'ogr.Geometry', (['ogr.wkbPolygon'], {}), '(ogr.wkbPolygon)\n', (11593, 11609), False, 'from osgeo import osr, ogr\n'), ((12054, 12091), 'osgeo.ogr.GetDriverByName', 'ogr.GetDriverByName', (['"""ESRI Shapefile"""'], {}), "('ESRI Shapefile')\n", (12073, 12091), False, 'from osgeo import osr, ogr\n'), ((12099, 12122), 'os.path.exists', 'os.path.exists', (['fileshp'], {}), '(fileshp)\n', (12113, 12122), False, 'import os\n'), ((12350, 12374), 'osgeo.ogr.Feature', 'ogr.Feature', (['featureDefn'], {}), '(featureDefn)\n', (12361, 12374), False, 'from osgeo import osr, ogr\n'), ((15323, 15344), 'os.unlink', 'os.unlink', (['file_warp1'], {}), '(file_warp1)\n', (15332, 15344), False, 'import os\n'), ((2830, 2853), 'os.path.isdir', 'os.path.isdir', (['pathname'], {}), '(pathname)\n', (2843, 2853), False, 'import os\n'), ((3037, 3060), 'os.path.isdir', 'os.path.isdir', (['pathname'], {}), '(pathname)\n', (3050, 3060), False, 'import os\n'), ((5066, 5119), 'glob.glob', 'glob.glob', (["(root + '/osgeo/**/proj.db')"], {'recursive': '(True)'}), "(root + '/osgeo/**/proj.db', recursive=True)\n", (5075, 5119), False, 'import glob\n'), ((5406, 5464), 'glob.glob', 'glob.glob', (["(root + '/osgeo/**/gt_datum.csv')"], {'recursive': '(True)'}), "(root + '/osgeo/**/gt_datum.csv', recursive=True)\n", (5415, 5464), False, 'import glob\n'), ((8253, 8282), 'numpy.empty', 'np.empty', (['(m, n)'], {'dtype': 'dtype'}), '((m, n), dtype=dtype)\n', (8261, 8282), True, 'import numpy as np\n'), ((8350, 8368), 'numpy.array', 'np.array', (['[np.nan]'], {}), '([np.nan])\n', (8358, 8368), True, 'import numpy as np\n'), ((8832, 8874), 'osgeo.gdal.Open', 'gdal.Open', (['filename', 'gdalconst.GA_ReadOnly'], {}), '(filename, gdalconst.GA_ReadOnly)\n', (8841, 8874), False, 'from osgeo import gdal, gdalconst\n'), ((13728, 13749), 'os.unlink', 'os.unlink', (['file_warp1'], {}), '(file_warp1)\n', (13737, 13749), False, 'import os\n'), ((13758, 13778), 'os.unlink', 'os.unlink', (['file_rect'], {}), '(file_rect)\n', (13767, 13778), False, 'import os\n'), ((14977, 14997), 'os.unlink', 'os.unlink', (['file_rect'], {}), '(file_rect)\n', (14986, 14997), False, 'import os\n'), ((15399, 15422), 'os.path.exists', 'os.path.exists', (['fileout'], {}), '(fileout)\n', (15413, 15422), False, 'import os\n'), ((2593, 2614), 'tempfile.gettempdir', 'tempfile.gettempdir', ([], {}), '()\n', (2612, 2614), False, 'import tempfile\n'), ((2883, 2907), 'os.path.isfile', 'os.path.isfile', (['pathname'], {}), '(pathname)\n', (2897, 2907), False, 'import os\n'), ((2967, 2988), 'os.makedirs', 'os.makedirs', (['pathname'], {}), '(pathname)\n', (2978, 2988), False, 'import os\n'), ((3832, 3861), 'osgeo.gdal.GetDriverByName', 'gdal.GetDriverByName', (['"""GTiff"""'], {}), "('GTiff')\n", (3852, 3861), False, 'from osgeo import gdal, gdalconst\n'), ((9371, 9408), 'osgeo.ogr.GetDriverByName', 'ogr.GetDriverByName', (['"""ESRI Shapefile"""'], {}), "('ESRI Shapefile')\n", (9390, 9408), False, 'from osgeo import osr, ogr\n'), ((10206, 10228), 'osgeo.osr.SpatialReference', 'osr.SpatialReference', ([], {}), '()\n', (10226, 10228), False, 'from osgeo import osr, ogr\n'), ((13075, 13105), 'shutil.copy2', 'shutil.copy2', (['filetif', 'fileout'], {}), '(filetif, fileout)\n', (13087, 13105), False, 'import shutil\n'), ((1171, 1194), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1192, 1194), False, 'import datetime\n'), ((10401, 10423), 'osgeo.osr.SpatialReference', 'osr.SpatialReference', ([], {}), '()\n', (10421, 10423), False, 'from osgeo import osr, ogr\n'), ((2624, 2647), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2645, 2647), False, 'import datetime\n'), ((10497, 10521), 'os.path.isfile', 'os.path.isfile', (['filename'], {}), '(filename)\n', (10511, 10521), False, 'import os\n'), ((10574, 10598), 'osgeo.ogr.OpenShared', 'ogr.OpenShared', (['filename'], {}), '(filename)\n', (10588, 10598), False, 'from osgeo import osr, ogr\n'), ((10720, 10744), 'os.path.isfile', 'os.path.isfile', (['filename'], {}), '(filename)\n', (10734, 10744), False, 'import os\n'), ((10797, 10839), 'osgeo.gdal.Open', 'gdal.Open', (['filename', 'gdalconst.GA_ReadOnly'], {}), '(filename, gdalconst.GA_ReadOnly)\n', (10806, 10839), False, 'from osgeo import gdal, gdalconst\n'), ((11010, 11032), 'osgeo.osr.SpatialReference', 'osr.SpatialReference', ([], {}), '()\n', (11030, 11032), False, 'from osgeo import osr, ogr\n'), ((10910, 10932), 'osgeo.osr.SpatialReference', 'osr.SpatialReference', ([], {}), '()\n', (10930, 10932), False, 'from osgeo import osr, ogr\n')] |
# -*- coding: utf-8 -*-
"""
Created on Tue Nov 24 13:10:27 2020
@author: Oliver
"""
import os
import numpy as np
import scipy.io
import pandas as pd
import matplotlib.pyplot as plt
from pathlib import Path
from scipy.signal import savgol_filter, find_peaks
database = pd.DataFrame(columns=['condition', 'name', 'ecg'])
from libraries.io import FileWizard
path1 = 'C:/Users/Oliver/Documents/FYP/code/database/MLII/'
fw = FileWizard()
database = fw.start(path1, database)
from libraries.noise_removal import BaselineNoiseRemover
# DC Notch filter to remove baseline noise from all signals
bnr = BaselineNoiseRemover(c = -0.99)
ecg_waves = database['ecg'].tolist()
ecg_filt = []
for wave in ecg_waves:
filt = bnr.fit(wave)
ecg_filt.append(filt)
database['ecg'] = pd.Series(ecg_filt)
from libraries.feature_extraction import LargeFrequencyExtractor
lfe = LargeFrequencyExtractor()
database = lfe.fit(database)
# Multilevel discrete decomposition to extract large frequencies from time series
from libraries.feature_extraction import PeakExtractor
LISTS2 = ['3 AFL', '4 AFIB', '5 SVTA', '6 WPW',
'7 PVC', '8 Bigeminy', '9 Trigeminy', '10 VT', '11 IVR',
'12 VFL', '13 Fusion', '14 LBBBB', '15 RBBBB', '16 SDHB', '17 PR']
for item in LISTS2:
database = database[database['condition'] != item]
thresh = 20
pe = PeakExtractor(c=thresh)
database = pe.fit(database)
examples = database[database['condition'] == '1 NSR']
example1 = examples.iloc[1]
peaks1 = example1['peaks']
position1 = example1['peak position']
ecg1 = example1['coefficient 4']
from libraries.feature_extraction import MidPointExtractor
mpe = MidPointExtractor()
database = mpe.fit(database)
ecg = database.iloc[0]
print(ecg['midpoints'])
qrs_db = pd.DataFrame(columns=['condition', 'name', 'ecg'])
for i in range(0, len(database)):
subject = database.iloc[i]
midpoint = subject['midpoints']
ecg = subject['coefficient 4']
condition = subject['condition']
name = subject['name']
x = midpoint[0,:]
for j in range(0, len(x)):
if j == 0:
wavelet = ecg[0:int(x[j])]
else:
wavelet = ecg[int(x[j - 1]):int(x[j])]
print(j, wavelet)
new_row = {'wavelet':wavelet, 'condition':condition, 'partof':name}
qrs_db = qrs_db.append(new_row, ignore_index=True)
examples = qrs_db[qrs_db['condition'] == '1 NSR']
for i in range(0, 50):
e1 = examples.loc[i]
plt.plot(e1['wavelet'])
e1 = qrs_db.loc[15]
e2 = qrs_db.loc[22]
e1w = e1['wavelet']
e2w = e2['wavelet']
heights = []
high, pos1 = find_peaks(e1w, height=20)
low, pos2 = find_peaks(-e1w, height=20)
height1 = (high - low)
pos1 = pos1['peak_heights']
pos1 = pos1[0]
pos2 = pos2['peak_heights']
pos2 = pos2[0]
height2 = (pos1 - pos2)
height = (high + (low * -1))
heights.append(height)
| [
"pandas.Series",
"libraries.feature_extraction.PeakExtractor",
"libraries.io.FileWizard",
"matplotlib.pyplot.plot",
"libraries.feature_extraction.MidPointExtractor",
"scipy.signal.find_peaks",
"pandas.DataFrame",
"libraries.feature_extraction.LargeFrequencyExtractor",
"libraries.noise_removal.BaselineNoiseRemover"
] | [((270, 320), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': "['condition', 'name', 'ecg']"}), "(columns=['condition', 'name', 'ecg'])\n", (282, 320), True, 'import pandas as pd\n'), ((425, 437), 'libraries.io.FileWizard', 'FileWizard', ([], {}), '()\n', (435, 437), False, 'from libraries.io import FileWizard\n'), ((605, 634), 'libraries.noise_removal.BaselineNoiseRemover', 'BaselineNoiseRemover', ([], {'c': '(-0.99)'}), '(c=-0.99)\n', (625, 634), False, 'from libraries.noise_removal import BaselineNoiseRemover\n'), ((783, 802), 'pandas.Series', 'pd.Series', (['ecg_filt'], {}), '(ecg_filt)\n', (792, 802), True, 'import pandas as pd\n'), ((876, 901), 'libraries.feature_extraction.LargeFrequencyExtractor', 'LargeFrequencyExtractor', ([], {}), '()\n', (899, 901), False, 'from libraries.feature_extraction import LargeFrequencyExtractor\n'), ((1356, 1379), 'libraries.feature_extraction.PeakExtractor', 'PeakExtractor', ([], {'c': 'thresh'}), '(c=thresh)\n', (1369, 1379), False, 'from libraries.feature_extraction import PeakExtractor\n'), ((1656, 1675), 'libraries.feature_extraction.MidPointExtractor', 'MidPointExtractor', ([], {}), '()\n', (1673, 1675), False, 'from libraries.feature_extraction import MidPointExtractor\n'), ((1763, 1813), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': "['condition', 'name', 'ecg']"}), "(columns=['condition', 'name', 'ecg'])\n", (1775, 1813), True, 'import pandas as pd\n'), ((2605, 2631), 'scipy.signal.find_peaks', 'find_peaks', (['e1w'], {'height': '(20)'}), '(e1w, height=20)\n', (2615, 2631), False, 'from scipy.signal import savgol_filter, find_peaks\n'), ((2644, 2671), 'scipy.signal.find_peaks', 'find_peaks', (['(-e1w)'], {'height': '(20)'}), '(-e1w, height=20)\n', (2654, 2671), False, 'from scipy.signal import savgol_filter, find_peaks\n'), ((2455, 2478), 'matplotlib.pyplot.plot', 'plt.plot', (["e1['wavelet']"], {}), "(e1['wavelet'])\n", (2463, 2478), True, 'import matplotlib.pyplot as plt\n')] |
import os
import hiero
from avalon import api
def file_extensions():
return [".hrox"]
def has_unsaved_changes():
# There are no methods for querying unsaved changes to a project, so
# enforcing to always save.
return True
def save_file(filepath):
project = hiero.core.projects()[-1]
if project:
project.saveAs(filepath)
else:
project = hiero.core.newProject()
project.saveAs(filepath)
def open_file(filepath):
hiero.core.openProject(filepath)
return True
def current_file():
current_file = hiero.core.projects()[-1].path()
normalised = os.path.normpath(current_file)
# Unsaved current file
if normalised == "":
return None
return normalised
def work_root():
return os.path.normpath(api.Session["AVALON_WORKDIR"]).replace("\\", "/")
| [
"os.path.normpath",
"hiero.core.newProject",
"hiero.core.openProject",
"hiero.core.projects"
] | [((476, 508), 'hiero.core.openProject', 'hiero.core.openProject', (['filepath'], {}), '(filepath)\n', (498, 508), False, 'import hiero\n'), ((616, 646), 'os.path.normpath', 'os.path.normpath', (['current_file'], {}), '(current_file)\n', (632, 646), False, 'import os\n'), ((285, 306), 'hiero.core.projects', 'hiero.core.projects', ([], {}), '()\n', (304, 306), False, 'import hiero\n'), ((388, 411), 'hiero.core.newProject', 'hiero.core.newProject', ([], {}), '()\n', (409, 411), False, 'import hiero\n'), ((773, 820), 'os.path.normpath', 'os.path.normpath', (["api.Session['AVALON_WORKDIR']"], {}), "(api.Session['AVALON_WORKDIR'])\n", (789, 820), False, 'import os\n'), ((566, 587), 'hiero.core.projects', 'hiero.core.projects', ([], {}), '()\n', (585, 587), False, 'import hiero\n')] |
# coding: utf-8
from fabric.api import run, local, env, settings, abort
from fabric.contrib.console import confirm
from taskset import TaskSet, task_method
from fab_deploy.utils import upload_config_template
class Hg(TaskSet):
BRANCH_OPTION = 'HG_BRANCH'
@task_method
def init(self):
run('hg init')
@task_method
def up(self, branch):
run('hg up -C ' + branch)
@task_method
def push(self, branch=None):
with settings(warn_only=True):
res = local('hg push ssh://%s/src/%s/ --new-branch' % (env.hosts[0], env.conf.INSTANCE_NAME))
if res.failed:
if not confirm("Error occured during push. Continue anyway?", default=False):
abort("Aborting.")
@task_method
def configure(self):
upload_config_template('hgrc', env.conf.SRC_DIR + '/.hg/hgrc',
skip_unexistent=True)
instance = Hg()
__all__ = instance.expose_to_current_module() | [
"fabric.contrib.console.confirm",
"fabric.api.run",
"fabric.api.settings",
"fab_deploy.utils.upload_config_template",
"fabric.api.abort",
"fabric.api.local"
] | [((307, 321), 'fabric.api.run', 'run', (['"""hg init"""'], {}), "('hg init')\n", (310, 321), False, 'from fabric.api import run, local, env, settings, abort\n'), ((374, 399), 'fabric.api.run', 'run', (["('hg up -C ' + branch)"], {}), "('hg up -C ' + branch)\n", (377, 399), False, 'from fabric.api import run, local, env, settings, abort\n'), ((807, 895), 'fab_deploy.utils.upload_config_template', 'upload_config_template', (['"""hgrc"""', "(env.conf.SRC_DIR + '/.hg/hgrc')"], {'skip_unexistent': '(True)'}), "('hgrc', env.conf.SRC_DIR + '/.hg/hgrc',\n skip_unexistent=True)\n", (829, 895), False, 'from fab_deploy.utils import upload_config_template\n'), ((464, 488), 'fabric.api.settings', 'settings', ([], {'warn_only': '(True)'}), '(warn_only=True)\n', (472, 488), False, 'from fabric.api import run, local, env, settings, abort\n'), ((508, 600), 'fabric.api.local', 'local', (["('hg push ssh://%s/src/%s/ --new-branch' % (env.hosts[0], env.conf.\n INSTANCE_NAME))"], {}), "('hg push ssh://%s/src/%s/ --new-branch' % (env.hosts[0], env.conf.\n INSTANCE_NAME))\n", (513, 600), False, 'from fabric.api import run, local, env, settings, abort\n'), ((646, 715), 'fabric.contrib.console.confirm', 'confirm', (['"""Error occured during push. Continue anyway?"""'], {'default': '(False)'}), "('Error occured during push. Continue anyway?', default=False)\n", (653, 715), False, 'from fabric.contrib.console import confirm\n'), ((737, 755), 'fabric.api.abort', 'abort', (['"""Aborting."""'], {}), "('Aborting.')\n", (742, 755), False, 'from fabric.api import run, local, env, settings, abort\n')] |
# saved_state.py
# <NAME>
# August 1, 2017
# Routines to save and restore some stage between audit stages.
import json
import os
import OpenAuditTool
import utils
def write_initial_saved_state(e):
"""
Write the first saved-state, after the election-spec has been read."
"""
initial_stage_time = "0000-00-00-00-00-00" # stage_time for initial saved-state
e.sn_tp[initial_stage_time] = {}
for pbcid in e.pbcids:
# no sampling done yet
e.sn_tp[initial_stage_time][pbcid] = 0
e.plan_tp[initial_stage_time] = {}
for pbcid in e.pbcids:
e.plan_tp[initial_stage_time][pbcid] = int(e.max_audit_rate_p[pbcid])
e.status_tm[initial_stage_time] = {}
for mid in e.mids:
# initial contest state
e.status_tm[initial_stage_time][mid] = e.initial_status_m[mid]
ss = {} # saved state dict, to be written out
ss["stage_time"] = initial_stage_time
ss["sn_tp"] = e.sn_tp # sample sizes, by stage and pbcid
ss["status_tm"] = e.status_tm # measurement statuses, by stage and mid
ss["plan_tp"] = e.plan_tp # plan for next stage of audit
write_state(e, ss)
def write_intermediate_saved_state(e):
"""
Write an intermediate saved-state,
after the election-spec has been read and the first audit stage done.
"""
ss = {} # saved state dict, to be written out
ss["stage_time"] = e.stage_time
ss["sn_tp"] = e.sn_tp # sample sizes, by stage and pbcid
ss["status_tm"] = e.status_tm # measurement statuses, by stage and mid
ss["plan_tp"] = e.plan_tp # plan for next stage of audit
write_state(e, ss)
def write_state(e, ss):
"""
Save some state to 3-audit/34-audit-output/audit-output-saved-state.json
Data ss saved is needed in the next audit stage.
ss is a dict with the saved-state information, including
the stage_time.
"""
dirpath = os.path.join(OpenAuditTool.ELECTIONS_ROOT,
e.election_dirname,
"3-audit",
"34-audit-output")
os.makedirs(dirpath, exist_ok=True)
filename = os.path.join(dirpath,
"audit-output-saved-state-"+ss["stage_time"]+".json")
with open(filename, "w") as file:
json.dump(ss, file, indent=2)
def read_saved_state(e):
"""
Read state from latest 3-audit/34-audit-output/audit-output-saved-state.json
"""
dirpath = os.path.join(OpenAuditTool.ELECTIONS_ROOT,
e.election_dirname,
"3-audit",
"34-audit-output")
filename = utils.greatest_name(dirpath,
"audit-output-saved-state",
".json")
file_pathname = os.path.join(dirpath, filename)
file = open(file_pathname, "r")
e.saved_state = json.load(file)
if __name__ == "__main__":
pass
| [
"os.makedirs",
"utils.greatest_name",
"os.path.join",
"json.load",
"json.dump"
] | [((2015, 2111), 'os.path.join', 'os.path.join', (['OpenAuditTool.ELECTIONS_ROOT', 'e.election_dirname', '"""3-audit"""', '"""34-audit-output"""'], {}), "(OpenAuditTool.ELECTIONS_ROOT, e.election_dirname, '3-audit',\n '34-audit-output')\n", (2027, 2111), False, 'import os\n'), ((2193, 2228), 'os.makedirs', 'os.makedirs', (['dirpath'], {'exist_ok': '(True)'}), '(dirpath, exist_ok=True)\n', (2204, 2228), False, 'import os\n'), ((2244, 2323), 'os.path.join', 'os.path.join', (['dirpath', "('audit-output-saved-state-' + ss['stage_time'] + '.json')"], {}), "(dirpath, 'audit-output-saved-state-' + ss['stage_time'] + '.json')\n", (2256, 2323), False, 'import os\n'), ((2565, 2661), 'os.path.join', 'os.path.join', (['OpenAuditTool.ELECTIONS_ROOT', 'e.election_dirname', '"""3-audit"""', '"""34-audit-output"""'], {}), "(OpenAuditTool.ELECTIONS_ROOT, e.election_dirname, '3-audit',\n '34-audit-output')\n", (2577, 2661), False, 'import os\n'), ((2754, 2819), 'utils.greatest_name', 'utils.greatest_name', (['dirpath', '"""audit-output-saved-state"""', '""".json"""'], {}), "(dirpath, 'audit-output-saved-state', '.json')\n", (2773, 2819), False, 'import utils\n'), ((2910, 2941), 'os.path.join', 'os.path.join', (['dirpath', 'filename'], {}), '(dirpath, filename)\n', (2922, 2941), False, 'import os\n'), ((2999, 3014), 'json.load', 'json.load', (['file'], {}), '(file)\n', (3008, 3014), False, 'import json\n'), ((2395, 2424), 'json.dump', 'json.dump', (['ss', 'file'], {'indent': '(2)'}), '(ss, file, indent=2)\n', (2404, 2424), False, 'import json\n')] |
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch_geometric.nn as pyg_nn
import torch_geometric.utils as pyg_utils
from rlkit.torch.networks import Mlp
from network_utils import get_activation, build_conv_model
class GNNNet(torch.nn.Module):
def __init__(self,
pre_graph_builder,
node_dim,
conv_type='GSage',
num_conv_layers=3,
hidden_activation=None,
output_activation=None,
):
super(GNNNet, self).__init__()
print('gnn')
# graph builder
self.pre_graph_builder = pre_graph_builder
# convs
self.node_input_dim = pre_graph_builder.output_dim
self.node_dim = node_dim
self.conv_type = conv_type
self.num_conv_layers = num_conv_layers
self.convs = self.build_convs(self.node_input_dim, self.node_dim, self.num_conv_layers)
self.hidden_activations = nn.ModuleList([get_activation(hidden_activation) for l in range(num_conv_layers)])
self.output_activation = get_activation(output_activation)
def build_convs(self, node_input_dim, node_dim, num_conv_layers):
convs = nn.ModuleList()
conv = build_conv_model(self.conv_type, node_input_dim, node_dim)
convs.append(conv)
for l in range(1,num_conv_layers):
conv = build_conv_model(self.conv_type, node_dim, node_dim)
convs.append(conv)
return convs
def forward(self, obs, valid_mask=None):
batch_size = obs.shape[0]
x, edge_index = self.pre_graph_builder(obs, valid_mask)
for l, conv in enumerate(self.convs):
# self.check_input(x, edge_index)
x = conv(x, edge_index)
x = self.hidden_activations[l](x)
x = x.reshape(batch_size,-1,self.node_dim)
x = self.output_activation(x)
return x
def check_input(self, xs, edge_index):
Os = {}
for indx in range(edge_index.shape[1]):
i=edge_index[1,indx].detach().numpy()
j=edge_index[0,indx].detach().numpy()
xi=xs[i].detach().numpy()
xj=list(xs[j].detach().numpy())
if str(i) not in Os.keys():
Os[str(i)] = {'x_j':[]}
Os[str(i)]['x_i'] = xi
Os[str(i)]['x_j'] += xj
import matplotlib.pyplot as plt
plt.figure()
plt.subplot(1,2,1)
for i in Os.keys():
plt.plot(Os[str(i)]['x_i'],label=str(i))
plt.title('x_i')
plt.legend()
plt.subplot(1,2,2)
for i in Os.keys():
plt.plot(Os[str(i)]['x_j'],label=str(i))
plt.title('x_j')
plt.legend()
plt.show()
| [
"matplotlib.pyplot.title",
"torch.nn.ModuleList",
"network_utils.get_activation",
"matplotlib.pyplot.figure",
"network_utils.build_conv_model",
"matplotlib.pyplot.subplot",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.show"
] | [((1122, 1155), 'network_utils.get_activation', 'get_activation', (['output_activation'], {}), '(output_activation)\n', (1136, 1155), False, 'from network_utils import get_activation, build_conv_model\n'), ((1252, 1267), 'torch.nn.ModuleList', 'nn.ModuleList', ([], {}), '()\n', (1265, 1267), True, 'import torch.nn as nn\n'), ((1283, 1341), 'network_utils.build_conv_model', 'build_conv_model', (['self.conv_type', 'node_input_dim', 'node_dim'], {}), '(self.conv_type, node_input_dim, node_dim)\n', (1299, 1341), False, 'from network_utils import get_activation, build_conv_model\n'), ((2450, 2462), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2460, 2462), True, 'import matplotlib.pyplot as plt\n'), ((2471, 2491), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(1)'], {}), '(1, 2, 1)\n', (2482, 2491), True, 'import matplotlib.pyplot as plt\n'), ((2608, 2620), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (2618, 2620), True, 'import matplotlib.pyplot as plt\n'), ((2629, 2649), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(2)'], {}), '(1, 2, 2)\n', (2640, 2649), True, 'import matplotlib.pyplot as plt\n'), ((2766, 2778), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (2776, 2778), True, 'import matplotlib.pyplot as plt\n'), ((2787, 2797), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2795, 2797), True, 'import matplotlib.pyplot as plt\n'), ((1431, 1483), 'network_utils.build_conv_model', 'build_conv_model', (['self.conv_type', 'node_dim', 'node_dim'], {}), '(self.conv_type, node_dim, node_dim)\n', (1447, 1483), False, 'from network_utils import get_activation, build_conv_model\n'), ((2583, 2599), 'matplotlib.pyplot.title', 'plt.title', (['"""x_i"""'], {}), "('x_i')\n", (2592, 2599), True, 'import matplotlib.pyplot as plt\n'), ((2741, 2757), 'matplotlib.pyplot.title', 'plt.title', (['"""x_j"""'], {}), "('x_j')\n", (2750, 2757), True, 'import matplotlib.pyplot as plt\n'), ((1021, 1054), 'network_utils.get_activation', 'get_activation', (['hidden_activation'], {}), '(hidden_activation)\n', (1035, 1054), False, 'from network_utils import get_activation, build_conv_model\n')] |
import numpy as np
import matplotlib.pyplot as plt
# Simple data to display in various forms
x = np.linspace(0, 2 * np.pi, 400)
y = np.sin(x ** 2)
# row and column sharing
f, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2, 2, sharex='col', sharey='row')
ax1.plot(x, y)
ax1.set_title('Sharing x per column, y per row')
ax2.scatter(x, y)
ax3.scatter(x, 2 * y ** 2 - 1, color='r')
ax4.plot(x, 2 * y ** 2 - 1, color='r')
plt.show()
| [
"numpy.sin",
"numpy.linspace",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.show"
] | [((98, 128), 'numpy.linspace', 'np.linspace', (['(0)', '(2 * np.pi)', '(400)'], {}), '(0, 2 * np.pi, 400)\n', (109, 128), True, 'import numpy as np\n'), ((133, 147), 'numpy.sin', 'np.sin', (['(x ** 2)'], {}), '(x ** 2)\n', (139, 147), True, 'import numpy as np\n'), ((204, 250), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(2)', '(2)'], {'sharex': '"""col"""', 'sharey': '"""row"""'}), "(2, 2, sharex='col', sharey='row')\n", (216, 250), True, 'import matplotlib.pyplot as plt\n'), ((415, 425), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (423, 425), True, 'import matplotlib.pyplot as plt\n')] |
import os
from base64 import b64decode as decode
h = os.environ['QHOME']
l = decode(os.environ['QLIC_KC'])
with open(os.path.join(h, 'kc.lic'), 'wb') as f:
f.write(l)
| [
"os.path.join",
"base64.b64decode"
] | [((78, 107), 'base64.b64decode', 'decode', (["os.environ['QLIC_KC']"], {}), "(os.environ['QLIC_KC'])\n", (84, 107), True, 'from base64 import b64decode as decode\n'), ((118, 143), 'os.path.join', 'os.path.join', (['h', '"""kc.lic"""'], {}), "(h, 'kc.lic')\n", (130, 143), False, 'import os\n')] |
import sys
import time
def progress_bar():
for i in range(1, 101):
print("\r", end="")
print("Progress: {}%: ".format(i), "▋" * (i // 2), end="") # ▋ * -
time.sleep(0.05)
sys.stdout.flush()
time.sleep(0.05)
if __name__ == '__main__':
progress_bar() | [
"sys.stdout.flush",
"time.sleep"
] | [((211, 229), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (227, 229), False, 'import sys\n'), ((231, 247), 'time.sleep', 'time.sleep', (['(0.05)'], {}), '(0.05)\n', (241, 247), False, 'import time\n'), ((191, 207), 'time.sleep', 'time.sleep', (['(0.05)'], {}), '(0.05)\n', (201, 207), False, 'import time\n')] |
import sys
sys.path.append('../')
import numpy as np
import os
import pickle
from pathlib import Path
import matplotlib.pyplot as plt
import matplotlib.colors
from matplotlib import collections as mc
from py_diff_stokes_flow.env.amplifier_env_2d import AmplifierEnv2d
from py_diff_stokes_flow.common.common import ndarray, print_error, create_folder
if __name__ == '__main__':
# This script assumes the data folder exists.
data_folder = Path('amplifier')
cnt = 0
while True:
data_file_name = data_folder / '{:04d}.data'.format(cnt)
if not os.path.exists(data_file_name):
cnt -= 1
break
cnt += 1
data_file_name = data_folder / '{:04d}.data'.format(cnt)
opt_history = pickle.load(open(data_file_name, 'rb'))
# Setting up the environment.
folder = Path('draw_design')
create_folder(folder, exist_ok=True)
seed = 42
env = AmplifierEnv2d(seed, folder)
create_folder(folder / 'init_design', exist_ok=True)
def draw_init_design(design_params, file_name, draw_control_points=False):
_, info = env.solve(design_params, False, { 'solver': 'eigen' })
u = info[0]['velocity_field']
node_nums = env.node_nums()
sdf = np.zeros(node_nums)
for i in range(node_nums[0]):
for j in range(node_nums[1]):
sdf[i, j] = info[0]['scene'].GetSignedDistance((i, j))
if sdf[i, j] >= 0:
u[i, j] = 0
# Draw design parameters.
plt.rc('font', size=20)
plt.rc('axes', titlesize=24)
plt.rc('axes', labelsize=24)
plt.rc('xtick', labelsize=18)
plt.rc('ytick', labelsize=18)
plt.rc('legend', fontsize=20)
plt.rc('figure', titlesize=20)
face_color = ndarray([247 / 255, 247 / 255, 247 / 255])
plt.rcParams['figure.facecolor'] = face_color
plt.rcParams['axes.facecolor'] = face_color
padding = 5
fig = plt.figure(figsize=(12, 10))
ax = fig.add_subplot(111)
lines = []
# Draw horizontal boundaries.
cell_nums = [n - 1 for n in node_nums]
for i in range(cell_nums[0]):
v0 = ndarray([i, 0])
v1 = ndarray([i + 1, 0])
lines.append((v0, v1))
v0 = ndarray([i, cell_nums[1]])
v1 = ndarray([i + 1, cell_nums[1]])
lines.append((v0, v1))
def intercept(d0, d1):
# (0, d0), (t, 0), (1, d1).
# t / -d0 = 1 / (d1 - d0).
return -d0 / (d1 - d0)
# Draw vertical boundaries.
thin_lines = []
for j in range(cell_nums[1]):
for i in [0, cell_nums[0]]:
d0 = sdf[i, j]
d1 = sdf[i, j + 1]
v0 = ndarray([i, j])
v1 = ndarray([i, j + 1])
if d0 >= 0 and d1 >= 0:
lines.append((v0, v1))
elif d0 * d1 < 0:
t = intercept(d0, d1)
vt = (1 - t) * v0 + t * v1
if d0 > 0:
lines.append((v0, vt))
thin_lines.append((vt, v1))
else:
lines.append((vt, v1))
thin_lines.append((v0, vt))
else:
thin_lines.append((v0, v1))
# Draw the interface.
intercepts = []
for i in range(node_nums[0]):
ds = set()
for j in range(cell_nums[1]):
d0 = sdf[i, j]
d1 = sdf[i, j + 1]
if d0 * d1 <= 0:
ds.add(j + intercept(d0, d1))
ds = sorted(tuple(ds))
assert len(ds) == 2
intercepts.append(ds)
interfaces = []
for i in range(cell_nums[0]):
for k in [0, 1]:
v0 = ndarray([i, intercepts[i][k]])
v1 = ndarray([i + 1, intercepts[i + 1][k]])
interfaces.append((v0, v1))
if draw_control_points:
shape_params, _ = env._variables_to_shape_params(design_params)
shape_params = shape_params.reshape((8, 2))
control_lines = []
for i in range(3):
v0 = shape_params[i]
v1 = shape_params[i + 1]
control_lines.append((v0, v1))
v0 = shape_params[4 + i]
v1 = shape_params[4 + i + 1]
control_lines.append((v0, v1))
ax.add_collection(mc.LineCollection(control_lines, colors='tab:orange', linestyles='-.', linewidth=2.0))
ax.add_collection(mc.LineCollection(interfaces, colors='k', linewidth=4.0))
ax.add_collection(mc.LineCollection(thin_lines, colors='k', linewidth=1.0))
ax.add_collection(mc.LineCollection(lines, colors='k', linewidth=4.0))
ax.set_xticks([])
ax.set_yticks([])
ax.set_xlim([-padding, cell_nums[0] + padding])
ax.set_ylim([-padding, cell_nums[1] + padding])
ax.axis('off')
fig.savefig(folder / file_name)
plt.close()
# Draw the signed distance.
create_folder(folder / 'signed_dist', exist_ok=True)
def draw_signed_distance(design_params, file_name):
_, info = env.solve(design_params, False, { 'solver': 'eigen' })
node_nums = env.node_nums()
sdf = np.zeros(node_nums)
for i in range(node_nums[0]):
for j in range(node_nums[1]):
sdf[i, j] = info[0]['scene'].GetSignedDistance((i, j))
# Draw design parameters.
plt.rc('font', size=20)
plt.rc('axes', titlesize=24)
plt.rc('axes', labelsize=24)
plt.rc('xtick', labelsize=18)
plt.rc('ytick', labelsize=18)
plt.rc('legend', fontsize=20)
plt.rc('figure', titlesize=20)
face_color = ndarray([247 / 255, 247 / 255, 247 / 255])
plt.rcParams['figure.facecolor'] = face_color
plt.rcParams['axes.facecolor'] = face_color
padding = 5
fig = plt.figure(figsize=(12, 10))
ax = fig.add_subplot(111)
# Draw horizontal boundaries.
cell_nums = [n - 1 for n in node_nums]
nx, ny = node_nums
Y, X = np.meshgrid(np.arange(ny), np.arange(nx))
Z = np.zeros((nx, ny))
cs = ax.contour(X, Y, sdf, 20)
ax.clabel(cs, fontsize=10, inline=1)
ax.set_aspect('equal')
ax.grid(True)
lines = []
# Draw horizontal boundaries.
cell_nums = [n - 1 for n in node_nums]
for i in range(cell_nums[0]):
v0 = ndarray([i, 0])
v1 = ndarray([i + 1, 0])
lines.append((v0, v1))
v0 = ndarray([i, cell_nums[1]])
v1 = ndarray([i + 1, cell_nums[1]])
lines.append((v0, v1))
def intercept(d0, d1):
# (0, d0), (t, 0), (1, d1).
# t / -d0 = 1 / (d1 - d0).
return -d0 / (d1 - d0)
# Draw vertical boundaries.
thin_lines = []
for j in range(cell_nums[1]):
for i in [0, cell_nums[0]]:
d0 = sdf[i, j]
d1 = sdf[i, j + 1]
v0 = ndarray([i, j])
v1 = ndarray([i, j + 1])
if d0 >= 0 and d1 >= 0:
lines.append((v0, v1))
elif d0 * d1 < 0:
t = intercept(d0, d1)
vt = (1 - t) * v0 + t * v1
if d0 > 0:
lines.append((v0, vt))
thin_lines.append((vt, v1))
else:
lines.append((vt, v1))
thin_lines.append((v0, vt))
else:
thin_lines.append((v0, v1))
# Draw the interface.
intercepts = []
for i in range(node_nums[0]):
ds = set()
for j in range(cell_nums[1]):
d0 = sdf[i, j]
d1 = sdf[i, j + 1]
if d0 * d1 <= 0:
ds.add(j + intercept(d0, d1))
ds = sorted(tuple(ds))
assert len(ds) == 2
intercepts.append(ds)
interfaces = []
for i in range(cell_nums[0]):
for k in [0, 1]:
v0 = ndarray([i, intercepts[i][k]])
v1 = ndarray([i + 1, intercepts[i + 1][k]])
interfaces.append((v0, v1))
ax.add_collection(mc.LineCollection(interfaces, colors='k', linewidth=4.0))
ax.add_collection(mc.LineCollection(thin_lines, colors='k', linewidth=1.0))
ax.add_collection(mc.LineCollection(lines, colors='k', linewidth=4.0))
ax.set_xticks([])
ax.set_yticks([])
ax.set_xlim([-padding, cell_nums[0] + padding])
ax.set_ylim([-padding, cell_nums[1] + padding])
ax.axis('off')
fig.savefig(folder / file_name)
plt.close()
# Draw init guesses.
sample_num = 8
theta = [np.random.uniform(env.lower_bound(), env.upper_bound()) for _ in range(sample_num)]
fps = 10
for k in range(sample_num - 1):
xk0 = theta[k]
xk1 = theta[k + 1]
for i in range(fps):
t = i / fps
xk = (1 - t) * xk0 + t * xk1
draw_init_design(xk, 'init_design/{:04d}.png'.format(k * fps + i), draw_control_points=True)
if k == 0 and i == 0:
draw_init_design(xk, '{:04d}.png'.format(k * fps + i), draw_control_points=False)
draw_signed_distance(xk, 'signed_dist/{:04d}.png'.format(k * fps + i)) | [
"os.path.exists",
"py_diff_stokes_flow.env.amplifier_env_2d.AmplifierEnv2d",
"py_diff_stokes_flow.common.common.ndarray",
"pathlib.Path",
"numpy.arange",
"matplotlib.collections.LineCollection",
"matplotlib.pyplot.close",
"numpy.zeros",
"matplotlib.pyplot.figure",
"sys.path.append",
"py_diff_stokes_flow.common.common.create_folder",
"matplotlib.pyplot.rc"
] | [((11, 33), 'sys.path.append', 'sys.path.append', (['"""../"""'], {}), "('../')\n", (26, 33), False, 'import sys\n'), ((448, 465), 'pathlib.Path', 'Path', (['"""amplifier"""'], {}), "('amplifier')\n", (452, 465), False, 'from pathlib import Path\n'), ((829, 848), 'pathlib.Path', 'Path', (['"""draw_design"""'], {}), "('draw_design')\n", (833, 848), False, 'from pathlib import Path\n'), ((853, 889), 'py_diff_stokes_flow.common.common.create_folder', 'create_folder', (['folder'], {'exist_ok': '(True)'}), '(folder, exist_ok=True)\n', (866, 889), False, 'from py_diff_stokes_flow.common.common import ndarray, print_error, create_folder\n'), ((914, 942), 'py_diff_stokes_flow.env.amplifier_env_2d.AmplifierEnv2d', 'AmplifierEnv2d', (['seed', 'folder'], {}), '(seed, folder)\n', (928, 942), False, 'from py_diff_stokes_flow.env.amplifier_env_2d import AmplifierEnv2d\n'), ((948, 1000), 'py_diff_stokes_flow.common.common.create_folder', 'create_folder', (["(folder / 'init_design')"], {'exist_ok': '(True)'}), "(folder / 'init_design', exist_ok=True)\n", (961, 1000), False, 'from py_diff_stokes_flow.common.common import ndarray, print_error, create_folder\n'), ((5166, 5218), 'py_diff_stokes_flow.common.common.create_folder', 'create_folder', (["(folder / 'signed_dist')"], {'exist_ok': '(True)'}), "(folder / 'signed_dist', exist_ok=True)\n", (5179, 5218), False, 'from py_diff_stokes_flow.common.common import ndarray, print_error, create_folder\n'), ((1241, 1260), 'numpy.zeros', 'np.zeros', (['node_nums'], {}), '(node_nums)\n', (1249, 1260), True, 'import numpy as np\n'), ((1522, 1545), 'matplotlib.pyplot.rc', 'plt.rc', (['"""font"""'], {'size': '(20)'}), "('font', size=20)\n", (1528, 1545), True, 'import matplotlib.pyplot as plt\n'), ((1554, 1582), 'matplotlib.pyplot.rc', 'plt.rc', (['"""axes"""'], {'titlesize': '(24)'}), "('axes', titlesize=24)\n", (1560, 1582), True, 'import matplotlib.pyplot as plt\n'), ((1591, 1619), 'matplotlib.pyplot.rc', 'plt.rc', (['"""axes"""'], {'labelsize': '(24)'}), "('axes', labelsize=24)\n", (1597, 1619), True, 'import matplotlib.pyplot as plt\n'), ((1628, 1657), 'matplotlib.pyplot.rc', 'plt.rc', (['"""xtick"""'], {'labelsize': '(18)'}), "('xtick', labelsize=18)\n", (1634, 1657), True, 'import matplotlib.pyplot as plt\n'), ((1666, 1695), 'matplotlib.pyplot.rc', 'plt.rc', (['"""ytick"""'], {'labelsize': '(18)'}), "('ytick', labelsize=18)\n", (1672, 1695), True, 'import matplotlib.pyplot as plt\n'), ((1704, 1733), 'matplotlib.pyplot.rc', 'plt.rc', (['"""legend"""'], {'fontsize': '(20)'}), "('legend', fontsize=20)\n", (1710, 1733), True, 'import matplotlib.pyplot as plt\n'), ((1742, 1772), 'matplotlib.pyplot.rc', 'plt.rc', (['"""figure"""'], {'titlesize': '(20)'}), "('figure', titlesize=20)\n", (1748, 1772), True, 'import matplotlib.pyplot as plt\n'), ((1794, 1836), 'py_diff_stokes_flow.common.common.ndarray', 'ndarray', (['[247 / 255, 247 / 255, 247 / 255]'], {}), '([247 / 255, 247 / 255, 247 / 255])\n', (1801, 1836), False, 'from py_diff_stokes_flow.common.common import ndarray, print_error, create_folder\n'), ((1978, 2006), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 10)'}), '(figsize=(12, 10))\n', (1988, 2006), True, 'import matplotlib.pyplot as plt\n'), ((5117, 5128), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (5126, 5128), True, 'import matplotlib.pyplot as plt\n'), ((5398, 5417), 'numpy.zeros', 'np.zeros', (['node_nums'], {}), '(node_nums)\n', (5406, 5417), True, 'import numpy as np\n'), ((5612, 5635), 'matplotlib.pyplot.rc', 'plt.rc', (['"""font"""'], {'size': '(20)'}), "('font', size=20)\n", (5618, 5635), True, 'import matplotlib.pyplot as plt\n'), ((5644, 5672), 'matplotlib.pyplot.rc', 'plt.rc', (['"""axes"""'], {'titlesize': '(24)'}), "('axes', titlesize=24)\n", (5650, 5672), True, 'import matplotlib.pyplot as plt\n'), ((5681, 5709), 'matplotlib.pyplot.rc', 'plt.rc', (['"""axes"""'], {'labelsize': '(24)'}), "('axes', labelsize=24)\n", (5687, 5709), True, 'import matplotlib.pyplot as plt\n'), ((5718, 5747), 'matplotlib.pyplot.rc', 'plt.rc', (['"""xtick"""'], {'labelsize': '(18)'}), "('xtick', labelsize=18)\n", (5724, 5747), True, 'import matplotlib.pyplot as plt\n'), ((5756, 5785), 'matplotlib.pyplot.rc', 'plt.rc', (['"""ytick"""'], {'labelsize': '(18)'}), "('ytick', labelsize=18)\n", (5762, 5785), True, 'import matplotlib.pyplot as plt\n'), ((5794, 5823), 'matplotlib.pyplot.rc', 'plt.rc', (['"""legend"""'], {'fontsize': '(20)'}), "('legend', fontsize=20)\n", (5800, 5823), True, 'import matplotlib.pyplot as plt\n'), ((5832, 5862), 'matplotlib.pyplot.rc', 'plt.rc', (['"""figure"""'], {'titlesize': '(20)'}), "('figure', titlesize=20)\n", (5838, 5862), True, 'import matplotlib.pyplot as plt\n'), ((5884, 5926), 'py_diff_stokes_flow.common.common.ndarray', 'ndarray', (['[247 / 255, 247 / 255, 247 / 255]'], {}), '([247 / 255, 247 / 255, 247 / 255])\n', (5891, 5926), False, 'from py_diff_stokes_flow.common.common import ndarray, print_error, create_folder\n'), ((6068, 6096), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 10)'}), '(figsize=(12, 10))\n', (6078, 6096), True, 'import matplotlib.pyplot as plt\n'), ((6312, 6330), 'numpy.zeros', 'np.zeros', (['(nx, ny)'], {}), '((nx, ny))\n', (6320, 6330), True, 'import numpy as np\n'), ((8943, 8954), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (8952, 8954), True, 'import matplotlib.pyplot as plt\n'), ((574, 604), 'os.path.exists', 'os.path.exists', (['data_file_name'], {}), '(data_file_name)\n', (588, 604), False, 'import os\n'), ((2200, 2215), 'py_diff_stokes_flow.common.common.ndarray', 'ndarray', (['[i, 0]'], {}), '([i, 0])\n', (2207, 2215), False, 'from py_diff_stokes_flow.common.common import ndarray, print_error, create_folder\n'), ((2233, 2252), 'py_diff_stokes_flow.common.common.ndarray', 'ndarray', (['[i + 1, 0]'], {}), '([i + 1, 0])\n', (2240, 2252), False, 'from py_diff_stokes_flow.common.common import ndarray, print_error, create_folder\n'), ((2305, 2331), 'py_diff_stokes_flow.common.common.ndarray', 'ndarray', (['[i, cell_nums[1]]'], {}), '([i, cell_nums[1]])\n', (2312, 2331), False, 'from py_diff_stokes_flow.common.common import ndarray, print_error, create_folder\n'), ((2349, 2379), 'py_diff_stokes_flow.common.common.ndarray', 'ndarray', (['[i + 1, cell_nums[1]]'], {}), '([i + 1, cell_nums[1]])\n', (2356, 2379), False, 'from py_diff_stokes_flow.common.common import ndarray, print_error, create_folder\n'), ((4660, 4716), 'matplotlib.collections.LineCollection', 'mc.LineCollection', (['interfaces'], {'colors': '"""k"""', 'linewidth': '(4.0)'}), "(interfaces, colors='k', linewidth=4.0)\n", (4677, 4716), True, 'from matplotlib import collections as mc\n'), ((4744, 4800), 'matplotlib.collections.LineCollection', 'mc.LineCollection', (['thin_lines'], {'colors': '"""k"""', 'linewidth': '(1.0)'}), "(thin_lines, colors='k', linewidth=1.0)\n", (4761, 4800), True, 'from matplotlib import collections as mc\n'), ((4828, 4879), 'matplotlib.collections.LineCollection', 'mc.LineCollection', (['lines'], {'colors': '"""k"""', 'linewidth': '(4.0)'}), "(lines, colors='k', linewidth=4.0)\n", (4845, 4879), True, 'from matplotlib import collections as mc\n'), ((6270, 6283), 'numpy.arange', 'np.arange', (['ny'], {}), '(ny)\n', (6279, 6283), True, 'import numpy as np\n'), ((6285, 6298), 'numpy.arange', 'np.arange', (['nx'], {}), '(nx)\n', (6294, 6298), True, 'import numpy as np\n'), ((6628, 6643), 'py_diff_stokes_flow.common.common.ndarray', 'ndarray', (['[i, 0]'], {}), '([i, 0])\n', (6635, 6643), False, 'from py_diff_stokes_flow.common.common import ndarray, print_error, create_folder\n'), ((6661, 6680), 'py_diff_stokes_flow.common.common.ndarray', 'ndarray', (['[i + 1, 0]'], {}), '([i + 1, 0])\n', (6668, 6680), False, 'from py_diff_stokes_flow.common.common import ndarray, print_error, create_folder\n'), ((6733, 6759), 'py_diff_stokes_flow.common.common.ndarray', 'ndarray', (['[i, cell_nums[1]]'], {}), '([i, cell_nums[1]])\n', (6740, 6759), False, 'from py_diff_stokes_flow.common.common import ndarray, print_error, create_folder\n'), ((6777, 6807), 'py_diff_stokes_flow.common.common.ndarray', 'ndarray', (['[i + 1, cell_nums[1]]'], {}), '([i + 1, cell_nums[1]])\n', (6784, 6807), False, 'from py_diff_stokes_flow.common.common import ndarray, print_error, create_folder\n'), ((8486, 8542), 'matplotlib.collections.LineCollection', 'mc.LineCollection', (['interfaces'], {'colors': '"""k"""', 'linewidth': '(4.0)'}), "(interfaces, colors='k', linewidth=4.0)\n", (8503, 8542), True, 'from matplotlib import collections as mc\n'), ((8570, 8626), 'matplotlib.collections.LineCollection', 'mc.LineCollection', (['thin_lines'], {'colors': '"""k"""', 'linewidth': '(1.0)'}), "(thin_lines, colors='k', linewidth=1.0)\n", (8587, 8626), True, 'from matplotlib import collections as mc\n'), ((8654, 8705), 'matplotlib.collections.LineCollection', 'mc.LineCollection', (['lines'], {'colors': '"""k"""', 'linewidth': '(4.0)'}), "(lines, colors='k', linewidth=4.0)\n", (8671, 8705), True, 'from matplotlib import collections as mc\n'), ((2787, 2802), 'py_diff_stokes_flow.common.common.ndarray', 'ndarray', (['[i, j]'], {}), '([i, j])\n', (2794, 2802), False, 'from py_diff_stokes_flow.common.common import ndarray, print_error, create_folder\n'), ((2824, 2843), 'py_diff_stokes_flow.common.common.ndarray', 'ndarray', (['[i, j + 1]'], {}), '([i, j + 1])\n', (2831, 2843), False, 'from py_diff_stokes_flow.common.common import ndarray, print_error, create_folder\n'), ((3896, 3926), 'py_diff_stokes_flow.common.common.ndarray', 'ndarray', (['[i, intercepts[i][k]]'], {}), '([i, intercepts[i][k]])\n', (3903, 3926), False, 'from py_diff_stokes_flow.common.common import ndarray, print_error, create_folder\n'), ((3948, 3986), 'py_diff_stokes_flow.common.common.ndarray', 'ndarray', (['[i + 1, intercepts[i + 1][k]]'], {}), '([i + 1, intercepts[i + 1][k]])\n', (3955, 3986), False, 'from py_diff_stokes_flow.common.common import ndarray, print_error, create_folder\n'), ((4546, 4635), 'matplotlib.collections.LineCollection', 'mc.LineCollection', (['control_lines'], {'colors': '"""tab:orange"""', 'linestyles': '"""-."""', 'linewidth': '(2.0)'}), "(control_lines, colors='tab:orange', linestyles='-.',\n linewidth=2.0)\n", (4563, 4635), True, 'from matplotlib import collections as mc\n'), ((7215, 7230), 'py_diff_stokes_flow.common.common.ndarray', 'ndarray', (['[i, j]'], {}), '([i, j])\n', (7222, 7230), False, 'from py_diff_stokes_flow.common.common import ndarray, print_error, create_folder\n'), ((7252, 7271), 'py_diff_stokes_flow.common.common.ndarray', 'ndarray', (['[i, j + 1]'], {}), '([i, j + 1])\n', (7259, 7271), False, 'from py_diff_stokes_flow.common.common import ndarray, print_error, create_folder\n'), ((8324, 8354), 'py_diff_stokes_flow.common.common.ndarray', 'ndarray', (['[i, intercepts[i][k]]'], {}), '([i, intercepts[i][k]])\n', (8331, 8354), False, 'from py_diff_stokes_flow.common.common import ndarray, print_error, create_folder\n'), ((8376, 8414), 'py_diff_stokes_flow.common.common.ndarray', 'ndarray', (['[i + 1, intercepts[i + 1][k]]'], {}), '([i + 1, intercepts[i + 1][k]])\n', (8383, 8414), False, 'from py_diff_stokes_flow.common.common import ndarray, print_error, create_folder\n')] |
from flask import Blueprint, render_template
adventure_bp = Blueprint('adventure', __name__, template_folder='templates', static_folder='static', url_prefix='/adventure')
@adventure_bp.route('')
def adventure():
return render_template('adventure.html')
| [
"flask.render_template",
"flask.Blueprint"
] | [((61, 176), 'flask.Blueprint', 'Blueprint', (['"""adventure"""', '__name__'], {'template_folder': '"""templates"""', 'static_folder': '"""static"""', 'url_prefix': '"""/adventure"""'}), "('adventure', __name__, template_folder='templates', static_folder\n ='static', url_prefix='/adventure')\n", (70, 176), False, 'from flask import Blueprint, render_template\n'), ((226, 259), 'flask.render_template', 'render_template', (['"""adventure.html"""'], {}), "('adventure.html')\n", (241, 259), False, 'from flask import Blueprint, render_template\n')] |
from abc import ABC
from screws.numerical._2d_space.partial_derivative import NumericalPartialDerivative_xy
class NumericalJacobian_xy_22(ABC):
"""
For a mapping: ``x = Phi_x(r, s), y = Phi_y(r, s)``,
``self._func_(r, s) = (Phi_x(r, s), Phi_y(r, s))``, we compute its Jacobian numerically:
``(( dx/dr, dx/ds ), ( dy/dr, dy/ds ))``.
"""
def __init__(self, func22):
""" """
self._func22_ = func22
def ___PRIVATE_evaluate_func22_for_x_rs___(self, r, s):
return self._func22_(r, s)[0]
def ___PRIVATE_evaluate_func22_for_y_rs___(self, r, s):
return self._func22_(r, s)[1]
def scipy_derivative(self, r, s, dr_ds=1e-8, n=1, order=3):
xr, xs = NumericalPartialDerivative_xy(self.___PRIVATE_evaluate_func22_for_x_rs___,
r, s, dx_dy=dr_ds, n=n, order=order).scipy_total
yr, ys = NumericalPartialDerivative_xy(self.___PRIVATE_evaluate_func22_for_y_rs___,
r, s, dx_dy=dr_ds, n=n, order=order).scipy_total
return ((xr, xs),
(yr, ys))
| [
"screws.numerical._2d_space.partial_derivative.NumericalPartialDerivative_xy"
] | [((720, 835), 'screws.numerical._2d_space.partial_derivative.NumericalPartialDerivative_xy', 'NumericalPartialDerivative_xy', (['self.___PRIVATE_evaluate_func22_for_x_rs___', 'r', 's'], {'dx_dy': 'dr_ds', 'n': 'n', 'order': 'order'}), '(self.___PRIVATE_evaluate_func22_for_x_rs___,\n r, s, dx_dy=dr_ds, n=n, order=order)\n', (749, 835), False, 'from screws.numerical._2d_space.partial_derivative import NumericalPartialDerivative_xy\n'), ((908, 1023), 'screws.numerical._2d_space.partial_derivative.NumericalPartialDerivative_xy', 'NumericalPartialDerivative_xy', (['self.___PRIVATE_evaluate_func22_for_y_rs___', 'r', 's'], {'dx_dy': 'dr_ds', 'n': 'n', 'order': 'order'}), '(self.___PRIVATE_evaluate_func22_for_y_rs___,\n r, s, dx_dy=dr_ds, n=n, order=order)\n', (937, 1023), False, 'from screws.numerical._2d_space.partial_derivative import NumericalPartialDerivative_xy\n')] |
import pprint
class SymbolTable:
def __init__(self):
self.ct = {}
self.st = {}
def printTables(self):
pp = pprint.PrettyPrinter(indent=4)
pp.pprint(self.ct)
pp.pprint(self.st)
# clear the subroutine-level symbol table on
# encountering a new subroutine declaration
def startSubroutine(self):
self.st.clear()
return
# returns number of variables of given kind
def varCount(self, kind):
ans = 0
for symbol in self.ct:
if self.ct[symbol]['kind'] == kind:
ans += 1
for symbol in self.st:
if self.st[symbol]['kind'] == kind:
ans += 1
return ans
def define(self, name, type, kind):
# this function is going to be used when variables are declared:
# compileVarDec, compileClassVarDec, compileParameterList
# compileSubroutine (for adding current object in methods)
index = self.varCount(kind)
# name => identifier
# type => int / char / bool / className
# kind => static / field / arg / local (var)
# index => needs to be computed for every KIND
# scope => class / subroutine
if kind in ['STATIC', 'FIELD']:
self.ct[name] = {
'type': type,
'kind': kind,
'index': index
}
elif kind in ['VAR', 'ARG']:
self.st[name] = {
'type': type,
'kind': kind,
'index': index
}
def kindOf(self, name):
if name in self.ct:
return self.ct[name]['kind']
elif name in self.st:
return self.st[name]['kind']
def typeOf(self, name):
if name in self.ct:
return self.ct[name]['type']
elif name in self.st:
return self.st[name]['type']
def indexOf(self, name):
if name in self.ct:
return self.ct[name]['index']
elif name in self.st:
return self.st[name]['index']
| [
"pprint.PrettyPrinter"
] | [((142, 172), 'pprint.PrettyPrinter', 'pprint.PrettyPrinter', ([], {'indent': '(4)'}), '(indent=4)\n', (162, 172), False, 'import pprint\n')] |
import os
import logging
from data_loader import Dataset
from model.patcher import Patcher
import argparse
import torch
import math
import random
from transformers import BertModel, AdamW
from transformers.optimization import get_constant_schedule_with_warmup
from utils.tokenizer import Tokenizer
from utils.recoder import Statistic
from utils.patch_handler import Patch_handler
from tqdm import tqdm
import numpy as np
import logging
logging.getLogger("train.py").setLevel(logging.INFO)
def handle_a_batch(step, model, batch, recoder, args):
discriminator_loss, detector_loss, decoder_loss = None, None, None
data = {"input_ids": batch.input_ids, "masks": batch.attention_mask, "token_type_ids": batch.token_type_ids}
encoder_outputs = model("encode", data)
if args.discriminating:
data = {}
data["first_hiddens"] = encoder_outputs[1]
data["target_tfs"] = batch.target_tfs
predict_tf_logits, discriminator_loss = model("discriminate", data)
predict_tfs = predict_tf_logits > args.discriminating_threshold
recoder.update_discriminator(step + 1,
discriminator_loss.mean().item(),
predict_tfs.cpu().tolist(),
batch.target_tfs.cpu().tolist())
if args.detecting:
data = {}
data["masks"] = batch.attention_mask[:, 1:]
data["encoder_output"] = encoder_outputs[0][:, 1:, :]
data["target_labels"] = batch.target_labels[:, 1:]
labeling_output, detector_loss = model("detect", data)
predict_labels = torch.softmax(labeling_output, dim=-1).argmax(dim=-1)
list_predict_labels = [labels[mask].cpu().tolist() for labels, mask in zip(predict_labels, data["masks"])]
list_target_labels = [labels[mask].cpu().tolist() for labels, mask in zip(data["target_labels"], data["masks"])]
recoder.update_detector(list_predict_labels, list_target_labels, batch.examples)
if args.discriminating:
detector_loss = detector_loss[batch.error_example_mask].mean()
else:
detector_loss = detector_loss.mean()
recoder.update_detector_loss(step + 1, detector_loss.item())
if args.correcting:
start_pos = batch.target_starts
end_pos = batch.target_ends
patch_ids = batch.target_ids
if patch_ids is not None:
patch_ids = patch_ids[:, :args.max_decode_step]
encoder_output = encoder_outputs[0]
patch_start_states = encoder_output[start_pos[0], start_pos[1]]
patch_end_states = encoder_output[end_pos[0], end_pos[1]]
patch_mid_states = []
for batch_idx, start_pos, end_pos in zip(start_pos[0], start_pos[1], end_pos[1]):
if start_pos + 1 == end_pos:
if gpu_num > 1:
patch_mid_states.append(model.module.corrector.emtpy_state)
else:
patch_mid_states.append(model.corrector.emtpy_state)
else:
patch_mid_states.append(torch.mean(encoder_output[batch_idx, start_pos + 1:end_pos], dim=0))
patch_mid_states = torch.stack(patch_mid_states)
data = {}
data["patch_start_states"] = patch_start_states
data["patch_end_states"] = patch_end_states
data["patch_mid_states"] = patch_mid_states
data["patch_ids"] = patch_ids
data["length"] = patch_ids.size(-1)
_, corrector_loss = model("correct", data)
recoder.update_corrector(corrector_loss.mean().item())
losses = [discriminator_loss, detector_loss, corrector_loss]
return losses
def train(model: Patcher, train_data: Dataset, valid_data: Dataset, model_save_dir: str, gpu_num: int, recoder: Statistic,
args):
if args.freeze:
for name, value in model.encoder.named_parameters():
value.requires_grad = False
param_optimizer = list(model.named_parameters())
no_decay = ["bias", "LayerNorm.bias", "LayerNorm.weight", 'gamma', 'beta']
optimizer_grouped_parameters = [{
'params': [p for n, p in param_optimizer if not any(nd in n for nd in no_decay) and p.requires_grad],
'weight_decay':
args.decay
}, {
'params': [p for n, p in param_optimizer if any(nd in n for nd in no_decay) and p.requires_grad],
'weight_decay':
0.0
}]
total_train_steps = train_data.get_batch_num() * args.epoch
warmup_steps = int(args.warmup * total_train_steps)
optimizer = AdamW(optimizer_grouped_parameters, lr=args.lr, betas=(0.9, 0.98), eps=1e-6, weight_decay=0.01)
scheduler = get_constant_schedule_with_warmup(optimizer, warmup_steps)
current_step = 0
decay_ratio = None
for i in range(args.epoch):
model.train()
recoder.reset("train", i + 1)
train_gen = train_data.generator()
step_num = train_data.get_batch_num()
process_bar = tqdm(enumerate(train_gen), total=step_num, desc="Training in epoch %d/%d" % (i + 1, args.epoch))
for step, batch in process_bar:
losses = handle_a_batch(step, model, batch, recoder, args)
optimizer.zero_grad()
loss = sum(filter(lambda x: x is not None, losses))
if gpu_num > 1:
loss.mean().backward()
else:
loss.backward()
process_bar.set_postfix(recoder.get_current_log())
optimizer.step()
scheduler.step()
current_step += 1
recoder.save()
if valid_data is not None:
with torch.no_grad():
model.eval()
recoder.reset("valid", i + 1)
valid_gen = valid_data.generator()
step_num = valid_data.get_batch_num()
process_bar = tqdm(enumerate(valid_gen), total=step_num, desc="Validing in epoch %d/%d" % (i + 1, args.epoch))
for step, batch in process_bar:
handle_a_batch(step, model, batch, recoder, args)
# discriminator_loss, detector_loss, predict_labels, decoder_loss = outputs
process_bar.set_postfix(recoder.get_current_log())
recoder.save()
if gpu_num > 1:
model.module.save(model_save_dir, i + 1)
else:
model.save(model_save_dir, i + 1)
tokenizer.save(model_save_dir, i+1)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--bert_dir", type=str, required=True)
parser.add_argument("--output_dir", type=str, required=True)
parser.add_argument("--train_file", type=str, required=True)
parser.add_argument("--valid_file", type=str)
parser.add_argument("--epoch", type=int, default=3)
parser.add_argument("--batch_size", type=int, default=32)
parser.add_argument("--lr", type=float, default=3e-5)
parser.add_argument("--gpus", type=int, nargs='+', default=None)
parser.add_argument("-lower_case", default=False, action="store_true")
parser.add_argument("-only_wrong", default=False, action="store_true")
parser.add_argument("-discriminating", default=True, action="store_false")
parser.add_argument("--discriminating_threshold", default=0.5, type=float)
parser.add_argument("-detecting", default=True, action="store_false")
parser.add_argument("-use_crf", default=True, action="store_false")
parser.add_argument("-use_lstm", default=False, action="store_true")
parser.add_argument("-dir_del", default=False, action="store_true")
parser.add_argument("-correcting", default=True, action="store_false")
parser.add_argument("-use_detect_out", default=False, action="store_true")
parser.add_argument("--max_decode_step", default=4, type=int)
parser.add_argument("-freeze", default=False, action="store_true")
parser.add_argument("--truncate", type=int, default=512)
parser.add_argument("--warmup", type=float, default=0.05)
parser.add_argument("--decay", type=float, default=1e-2)
args = parser.parse_args()
random.seed(123)
np.random.seed(123)
torch.manual_seed(123)
if not (args.correcting or args.detecting or args.discriminating):
raise ValueError("Cannot set discriminating, detecting and correcting to False at same time.")
if args.gpus:
gpu_num = len(args.gpus)
os.environ["CUDA_VISIBLE_DEVICES"] = ",".join([str(i) for i in args.gpus])
else:
gpu_num = 0
if not os.path.exists(args.output_dir):
os.makedirs(args.output_dir)
with open(os.path.join(args.output_dir, "cmd"), "w") as f:
f.write(str(args))
log_dir = os.path.join(args.output_dir, "log")
model_save_dir = os.path.join(args.output_dir, "model")
tokenizer = Tokenizer(args.bert_dir, args.lower_case)
patch_handler = Patch_handler(tokenizer.PATCH_EMPTY_ID, args.dir_del)
recoder = Statistic(log_dir,
args.discriminating,
args.detecting,
args.correcting,
max_decode_step=args.max_decode_step,
patch_handler=patch_handler)
model = Patcher(args.bert_dir,
discriminating=args.discriminating,
detecting=args.detecting,
correcting=args.correcting,
use_crf=args.use_crf,
use_lstm=args.use_lstm)
if gpu_num == 1:
model = model.cuda()
if gpu_num > 1:
model = torch.nn.DataParallel(model).cuda()
train_data = Dataset(args.train_file,
args.batch_size,
inference=False,
tokenizer=tokenizer,
discriminating=args.discriminating,
detecting=args.detecting,
correcting=args.correcting,
dir_del=args.dir_del,
only_wrong=args.only_wrong,
truncate=args.truncate)
if args.valid_file:
valid_data = Dataset(args.valid_file,
args.batch_size,
inference=False,
tokenizer=tokenizer,
discriminating=args.discriminating,
detecting=args.detecting,
correcting=args.correcting,
dir_del=args.dir_del,
only_wrong=args.only_wrong,
truncate=args.truncate)
else:
valid_data = None
train(model, train_data, valid_data, model_save_dir, gpu_num, recoder, args)
| [
"logging.getLogger",
"torch.softmax",
"os.path.exists",
"argparse.ArgumentParser",
"torch.mean",
"transformers.AdamW",
"data_loader.Dataset",
"model.patcher.Patcher",
"numpy.random.seed",
"utils.patch_handler.Patch_handler",
"torch.manual_seed",
"utils.tokenizer.Tokenizer",
"os.makedirs",
"utils.recoder.Statistic",
"torch.stack",
"os.path.join",
"torch.nn.DataParallel",
"transformers.optimization.get_constant_schedule_with_warmup",
"random.seed",
"torch.no_grad"
] | [((4624, 4725), 'transformers.AdamW', 'AdamW', (['optimizer_grouped_parameters'], {'lr': 'args.lr', 'betas': '(0.9, 0.98)', 'eps': '(1e-06)', 'weight_decay': '(0.01)'}), '(optimizer_grouped_parameters, lr=args.lr, betas=(0.9, 0.98), eps=\n 1e-06, weight_decay=0.01)\n', (4629, 4725), False, 'from transformers import BertModel, AdamW\n'), ((4736, 4794), 'transformers.optimization.get_constant_schedule_with_warmup', 'get_constant_schedule_with_warmup', (['optimizer', 'warmup_steps'], {}), '(optimizer, warmup_steps)\n', (4769, 4794), False, 'from transformers.optimization import get_constant_schedule_with_warmup\n'), ((6555, 6580), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (6578, 6580), False, 'import argparse\n'), ((8184, 8200), 'random.seed', 'random.seed', (['(123)'], {}), '(123)\n', (8195, 8200), False, 'import random\n'), ((8205, 8224), 'numpy.random.seed', 'np.random.seed', (['(123)'], {}), '(123)\n', (8219, 8224), True, 'import numpy as np\n'), ((8229, 8251), 'torch.manual_seed', 'torch.manual_seed', (['(123)'], {}), '(123)\n', (8246, 8251), False, 'import torch\n'), ((8780, 8816), 'os.path.join', 'os.path.join', (['args.output_dir', '"""log"""'], {}), "(args.output_dir, 'log')\n", (8792, 8816), False, 'import os\n'), ((8838, 8876), 'os.path.join', 'os.path.join', (['args.output_dir', '"""model"""'], {}), "(args.output_dir, 'model')\n", (8850, 8876), False, 'import os\n'), ((8893, 8934), 'utils.tokenizer.Tokenizer', 'Tokenizer', (['args.bert_dir', 'args.lower_case'], {}), '(args.bert_dir, args.lower_case)\n', (8902, 8934), False, 'from utils.tokenizer import Tokenizer\n'), ((8955, 9008), 'utils.patch_handler.Patch_handler', 'Patch_handler', (['tokenizer.PATCH_EMPTY_ID', 'args.dir_del'], {}), '(tokenizer.PATCH_EMPTY_ID, args.dir_del)\n', (8968, 9008), False, 'from utils.patch_handler import Patch_handler\n'), ((9023, 9166), 'utils.recoder.Statistic', 'Statistic', (['log_dir', 'args.discriminating', 'args.detecting', 'args.correcting'], {'max_decode_step': 'args.max_decode_step', 'patch_handler': 'patch_handler'}), '(log_dir, args.discriminating, args.detecting, args.correcting,\n max_decode_step=args.max_decode_step, patch_handler=patch_handler)\n', (9032, 9166), False, 'from utils.recoder import Statistic\n'), ((9295, 9463), 'model.patcher.Patcher', 'Patcher', (['args.bert_dir'], {'discriminating': 'args.discriminating', 'detecting': 'args.detecting', 'correcting': 'args.correcting', 'use_crf': 'args.use_crf', 'use_lstm': 'args.use_lstm'}), '(args.bert_dir, discriminating=args.discriminating, detecting=args.\n detecting, correcting=args.correcting, use_crf=args.use_crf, use_lstm=\n args.use_lstm)\n', (9302, 9463), False, 'from model.patcher import Patcher\n'), ((9693, 9950), 'data_loader.Dataset', 'Dataset', (['args.train_file', 'args.batch_size'], {'inference': '(False)', 'tokenizer': 'tokenizer', 'discriminating': 'args.discriminating', 'detecting': 'args.detecting', 'correcting': 'args.correcting', 'dir_del': 'args.dir_del', 'only_wrong': 'args.only_wrong', 'truncate': 'args.truncate'}), '(args.train_file, args.batch_size, inference=False, tokenizer=\n tokenizer, discriminating=args.discriminating, detecting=args.detecting,\n correcting=args.correcting, dir_del=args.dir_del, only_wrong=args.\n only_wrong, truncate=args.truncate)\n', (9700, 9950), False, 'from data_loader import Dataset\n'), ((436, 465), 'logging.getLogger', 'logging.getLogger', (['"""train.py"""'], {}), "('train.py')\n", (453, 465), False, 'import logging\n'), ((8606, 8637), 'os.path.exists', 'os.path.exists', (['args.output_dir'], {}), '(args.output_dir)\n', (8620, 8637), False, 'import os\n'), ((8647, 8675), 'os.makedirs', 'os.makedirs', (['args.output_dir'], {}), '(args.output_dir)\n', (8658, 8675), False, 'import os\n'), ((10207, 10464), 'data_loader.Dataset', 'Dataset', (['args.valid_file', 'args.batch_size'], {'inference': '(False)', 'tokenizer': 'tokenizer', 'discriminating': 'args.discriminating', 'detecting': 'args.detecting', 'correcting': 'args.correcting', 'dir_del': 'args.dir_del', 'only_wrong': 'args.only_wrong', 'truncate': 'args.truncate'}), '(args.valid_file, args.batch_size, inference=False, tokenizer=\n tokenizer, discriminating=args.discriminating, detecting=args.detecting,\n correcting=args.correcting, dir_del=args.dir_del, only_wrong=args.\n only_wrong, truncate=args.truncate)\n', (10214, 10464), False, 'from data_loader import Dataset\n'), ((3225, 3254), 'torch.stack', 'torch.stack', (['patch_mid_states'], {}), '(patch_mid_states)\n', (3236, 3254), False, 'import torch\n'), ((8690, 8726), 'os.path.join', 'os.path.join', (['args.output_dir', '"""cmd"""'], {}), "(args.output_dir, 'cmd')\n", (8702, 8726), False, 'import os\n'), ((1618, 1656), 'torch.softmax', 'torch.softmax', (['labeling_output'], {'dim': '(-1)'}), '(labeling_output, dim=-1)\n', (1631, 1656), False, 'import torch\n'), ((5692, 5707), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (5705, 5707), False, 'import torch\n'), ((9640, 9668), 'torch.nn.DataParallel', 'torch.nn.DataParallel', (['model'], {}), '(model)\n', (9661, 9668), False, 'import torch\n'), ((3125, 3192), 'torch.mean', 'torch.mean', (['encoder_output[batch_idx, start_pos + 1:end_pos]'], {'dim': '(0)'}), '(encoder_output[batch_idx, start_pos + 1:end_pos], dim=0)\n', (3135, 3192), False, 'import torch\n')] |
#!/usr/bin/env python
import sys
import os
import subprocess
import string
def getData(lines, energies, meanf):
for line in lines:
if line.count(b'%%'):
print(line)
words=line.split()
energy = words[5].decode()
energies.append(energy.replace(',', ''))
if line.count(b'mean') and line.count(b'movable'):
words=line.decode("ascii").split(",")
meanf.append(words[0].split("(")[-1])
meanf.append(words[1])
meanf.append(words[2].split(")")[0])
print("{} {} {}".format(meanf[0],meanf[1],meanf[2]))
break
print("Test ProjectedMatrices...")
nargs=len(sys.argv)
mpicmd = sys.argv[1]+" "+sys.argv[2]+" "+sys.argv[3]
for i in range(4,nargs-6):
mpicmd = mpicmd + " "+sys.argv[i]
print("MPI run command: {}".format(mpicmd))
exe = sys.argv[nargs-6]
inp1 = sys.argv[nargs-5]
inp2 = sys.argv[nargs-4]
coords = sys.argv[nargs-3]
print("coordinates file: %s"%coords)
lrs = sys.argv[-2]
#create links to potentials files
dst = 'pseudo.D_ONCV_PBE_SG15'
src = sys.argv[-1] + '/' + dst
if not os.path.exists(dst):
print("Create link to %s"%dst)
os.symlink(src, dst)
#run quench
command1 = "{} {} -c {} -i {} -l {}".format(mpicmd,exe,inp1,coords,lrs)
print("Run command: {}".format(command1))
output1 = subprocess.check_output(command1,shell=True)
lines1=output1.split(b'\n')
#analyse output of quench1
energies1=[]
meanf1=[]
getData(lines1, energies1, meanf1)
#run 2nd quench
command2 = "{} {} -c {} -i {} -l {}".format(mpicmd,exe,inp2,coords,lrs)
print("Run command: {}".format(command2))
output2 = subprocess.check_output(command2,shell=True)
lines2=output2.split(b'\n')
#analyse output of quench2
energies2=[]
meanf2=[]
getData(lines2, energies2, meanf2)
if len(energies1) != len(energies2):
print("Energies1:")
print(energies1)
print("Energies2:")
print(energies2)
sys.exit(1)
print("Check energies...")
tol = 1.e-6
for i in range(len(energies1)):
energy1=eval(energies1[i])
energy2=eval(energies2[i])
diff=energy2-energy1
if abs(diff)>tol:
print("Energies differ: {} vs {} !!!".format(energies1[i],energies2[i]))
sys.exit(1)
print("Check mean forces...")
if (eval(meanf2[0])-eval(meanf1[0]))>tol:
print("mean F values in x-direction: {}, {}".format(meanf1[0],meanf2[0]))
sys.exit(1)
if (eval(meanf2[1])-eval(meanf1[1]))>tol:
print("mean F values in y-direction: {}, {}".format(meanf1[1],meanf2[1]))
sys.exit(1)
if (eval(meanf2[2])-eval(meanf1[2]))>tol:
print("mean F values in z-direction: {}, {}".format(meanf1[2],meanf2[2]))
sys.exit(1)
print("Test SUCCESSFUL!")
sys.exit(0)
| [
"subprocess.check_output",
"os.path.exists",
"os.symlink",
"sys.exit"
] | [((1288, 1333), 'subprocess.check_output', 'subprocess.check_output', (['command1'], {'shell': '(True)'}), '(command1, shell=True)\n', (1311, 1333), False, 'import subprocess\n'), ((1588, 1633), 'subprocess.check_output', 'subprocess.check_output', (['command2'], {'shell': '(True)'}), '(command2, shell=True)\n', (1611, 1633), False, 'import subprocess\n'), ((2601, 2612), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (2609, 2612), False, 'import sys\n'), ((1074, 1093), 'os.path.exists', 'os.path.exists', (['dst'], {}), '(dst)\n', (1088, 1093), False, 'import os\n'), ((1130, 1150), 'os.symlink', 'os.symlink', (['src', 'dst'], {}), '(src, dst)\n', (1140, 1150), False, 'import os\n'), ((1869, 1880), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1877, 1880), False, 'import sys\n'), ((2298, 2309), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2306, 2309), False, 'import sys\n'), ((2430, 2441), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2438, 2441), False, 'import sys\n'), ((2562, 2573), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2570, 2573), False, 'import sys\n'), ((2135, 2146), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2143, 2146), False, 'import sys\n')] |
# bca4abm
# See full license in LICENSE.txt.
import logging
import os
import pandas as pd
import numpy as np
import openmatrix as omx
from activitysim.core import config
from activitysim.core import inject
from activitysim.core import tracing
from activitysim.core import pipeline
from bca4abm import bca4abm as bca
from ..util.misc import missing_columns, add_summary_results
logger = logging.getLogger(__name__)
"""
Aggregate trips processor
"""
@inject.injectable()
def aggregate_trips_spec():
return bca.read_assignment_spec('aggregate_trips.csv')
def read_aggregate_trips_manifest(data_dir, model_settings):
manifest_fname = model_settings.get('aggregate_trips_manifest', 'aggregate_data_manifest.csv')
fname = os.path.join(data_dir, manifest_fname)
# strings that might be empty and hence misconstrued as nans
converters = {
'toll_file_name': str,
'toll_units': str,
}
manifest = pd.read_csv(fname, header=0, comment='#', converters=converters)
column_map = "aggregate_trips_manifest_column_map"
if column_map in model_settings:
manifest.rename(columns=model_settings[column_map], inplace=True)
assert not missing_columns(manifest, list(model_settings[column_map].values()))
return manifest
def get_omx_matrix(matrix_dir, omx_file_name, omx_key, close_after_read=True):
if not omx_file_name:
return 0.0
# print "reading %s / %s '%s'" % (matrix_dir, omx_file_name, omx_key)
omx_file_name = os.path.join(matrix_dir, omx_file_name)
omx_file = omx.open_file(omx_file_name, 'r')
matrix = omx_file[omx_key][:, :]
if close_after_read:
# print "closing %s / %s '%s'" % (matrix_dir, omx_file_name, omx_key)
omx_file.close()
return matrix
@inject.step()
def aggregate_trips_processor(
aggregate_trips_spec,
settings, data_dir):
"""
Compute aggregate trips benefits
The data manifest contains a list of trip count files (one for base, one for build)
along with their their corresponding in-vehicle-time (ivt), operating cost (aoc),
and toll skims.
Since the skims are all aligned numpy arrays , we can express their benefit calculation as
vector computations in the aggregate_trips_spec
"""
trace_label = 'aggregate_trips'
model_settings = config.read_model_settings('aggregate_trips.yaml')
aggregate_trips_manifest = read_aggregate_trips_manifest(data_dir, model_settings)
logger.info("Running %s" % trace_label)
locals_dict = config.get_model_constants(model_settings)
locals_dict.update(config.setting('globals'))
results = None
for row in aggregate_trips_manifest.itertuples(index=True):
matrix_dir = os.path.join(data_dir, "base-data")
locals_dict['base_trips'] = \
get_omx_matrix(matrix_dir, row.trip_file_name, row.trip_table_name)
locals_dict['base_ivt'] = \
get_omx_matrix(matrix_dir, row.ivt_file_name, row.ivt_table_name)
locals_dict['base_aoc'] = \
get_omx_matrix(matrix_dir, row.aoc_file_name, row.aoc_table_name)
locals_dict['base_toll'] = \
get_omx_matrix(matrix_dir, row.toll_file_name, row.toll_table_name)
matrix_dir = os.path.join(data_dir, "build-data")
locals_dict['build_trips'] = \
get_omx_matrix(matrix_dir, row.trip_file_name, row.trip_table_name)
locals_dict['build_ivt'] = \
get_omx_matrix(matrix_dir, row.ivt_file_name, row.ivt_table_name)
locals_dict['build_aoc'] = \
get_omx_matrix(matrix_dir, row.aoc_file_name, row.aoc_table_name)
locals_dict['build_toll'] = \
get_omx_matrix(matrix_dir, row.toll_file_name, row.toll_table_name)
locals_dict['aoc_units'] = row.aoc_units
locals_dict['toll_units'] = row.toll_units
locals_dict['vot'] = row.vot
row_results = bca.scalar_assign_variables(assignment_expressions=aggregate_trips_spec,
locals_dict=locals_dict)
assigned_column_names = row_results.columns.values
row_results.insert(loc=0, column='description', value=row.description)
row_results.insert(loc=0, column='manifest_idx', value=row.Index)
if results is None:
results = row_results
else:
results = results.append(row_results, ignore_index=True)
results.reset_index(inplace=True)
add_summary_results(results, summary_column_names=assigned_column_names,
prefix='AT_', spec=aggregate_trips_spec)
# for troubleshooting, write table with benefits for each row in manifest
pipeline.replace_table("aggregate_trips_benefits", results)
| [
"logging.getLogger",
"bca4abm.bca4abm.scalar_assign_variables",
"activitysim.core.config.setting",
"pandas.read_csv",
"activitysim.core.config.get_model_constants",
"activitysim.core.pipeline.replace_table",
"os.path.join",
"activitysim.core.inject.injectable",
"openmatrix.open_file",
"activitysim.core.inject.step",
"bca4abm.bca4abm.read_assignment_spec",
"activitysim.core.config.read_model_settings"
] | [((390, 417), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (407, 417), False, 'import logging\n'), ((457, 476), 'activitysim.core.inject.injectable', 'inject.injectable', ([], {}), '()\n', (474, 476), False, 'from activitysim.core import inject\n'), ((1778, 1791), 'activitysim.core.inject.step', 'inject.step', ([], {}), '()\n', (1789, 1791), False, 'from activitysim.core import inject\n'), ((516, 563), 'bca4abm.bca4abm.read_assignment_spec', 'bca.read_assignment_spec', (['"""aggregate_trips.csv"""'], {}), "('aggregate_trips.csv')\n", (540, 563), True, 'from bca4abm import bca4abm as bca\n'), ((739, 777), 'os.path.join', 'os.path.join', (['data_dir', 'manifest_fname'], {}), '(data_dir, manifest_fname)\n', (751, 777), False, 'import os\n'), ((942, 1006), 'pandas.read_csv', 'pd.read_csv', (['fname'], {'header': '(0)', 'comment': '"""#"""', 'converters': 'converters'}), "(fname, header=0, comment='#', converters=converters)\n", (953, 1006), True, 'import pandas as pd\n'), ((1503, 1542), 'os.path.join', 'os.path.join', (['matrix_dir', 'omx_file_name'], {}), '(matrix_dir, omx_file_name)\n', (1515, 1542), False, 'import os\n'), ((1558, 1591), 'openmatrix.open_file', 'omx.open_file', (['omx_file_name', '"""r"""'], {}), "(omx_file_name, 'r')\n", (1571, 1591), True, 'import openmatrix as omx\n'), ((2338, 2388), 'activitysim.core.config.read_model_settings', 'config.read_model_settings', (['"""aggregate_trips.yaml"""'], {}), "('aggregate_trips.yaml')\n", (2364, 2388), False, 'from activitysim.core import config\n'), ((2541, 2583), 'activitysim.core.config.get_model_constants', 'config.get_model_constants', (['model_settings'], {}), '(model_settings)\n', (2567, 2583), False, 'from activitysim.core import config\n'), ((4698, 4757), 'activitysim.core.pipeline.replace_table', 'pipeline.replace_table', (['"""aggregate_trips_benefits"""', 'results'], {}), "('aggregate_trips_benefits', results)\n", (4720, 4757), False, 'from activitysim.core import pipeline\n'), ((2607, 2632), 'activitysim.core.config.setting', 'config.setting', (['"""globals"""'], {}), "('globals')\n", (2621, 2632), False, 'from activitysim.core import config\n'), ((2740, 2775), 'os.path.join', 'os.path.join', (['data_dir', '"""base-data"""'], {}), "(data_dir, 'base-data')\n", (2752, 2775), False, 'import os\n'), ((3261, 3297), 'os.path.join', 'os.path.join', (['data_dir', '"""build-data"""'], {}), "(data_dir, 'build-data')\n", (3273, 3297), False, 'import os\n'), ((3926, 4027), 'bca4abm.bca4abm.scalar_assign_variables', 'bca.scalar_assign_variables', ([], {'assignment_expressions': 'aggregate_trips_spec', 'locals_dict': 'locals_dict'}), '(assignment_expressions=aggregate_trips_spec,\n locals_dict=locals_dict)\n', (3953, 4027), True, 'from bca4abm import bca4abm as bca\n')] |
from __future__ import annotations
from uuid import UUID
from magiclinks.utils import generate_timeflake, get_url_path
def test_generate_timeflake():
assert isinstance(generate_timeflake(), UUID)
assert generate_timeflake() != generate_timeflake()
def test_get_url_path_with_name():
url_name = 'no_login'
url = get_url_path(url_name)
assert url == '/no-login/'
def test_get_url_path_with_path():
url_name = '/test/'
url = get_url_path(url_name)
assert url == '/test/'
| [
"magiclinks.utils.get_url_path",
"magiclinks.utils.generate_timeflake"
] | [((333, 355), 'magiclinks.utils.get_url_path', 'get_url_path', (['url_name'], {}), '(url_name)\n', (345, 355), False, 'from magiclinks.utils import generate_timeflake, get_url_path\n'), ((458, 480), 'magiclinks.utils.get_url_path', 'get_url_path', (['url_name'], {}), '(url_name)\n', (470, 480), False, 'from magiclinks.utils import generate_timeflake, get_url_path\n'), ((176, 196), 'magiclinks.utils.generate_timeflake', 'generate_timeflake', ([], {}), '()\n', (194, 196), False, 'from magiclinks.utils import generate_timeflake, get_url_path\n'), ((215, 235), 'magiclinks.utils.generate_timeflake', 'generate_timeflake', ([], {}), '()\n', (233, 235), False, 'from magiclinks.utils import generate_timeflake, get_url_path\n'), ((239, 259), 'magiclinks.utils.generate_timeflake', 'generate_timeflake', ([], {}), '()\n', (257, 259), False, 'from magiclinks.utils import generate_timeflake, get_url_path\n')] |
import unittest
# Below is the interface for Iterator, which is already defined for you.
class Iterator:
def __init__(self, nums):
"""
Initializes an iterator object to the beginning of a list.
:type nums: List[int]
"""
self.nums = nums
self.curr = -1
def hasNext(self):
"""
Returns true if the iteration has more elements.
:rtype: bool
"""
return self.curr + 1 < len(self.nums)
def next(self):
"""
Returns the next element in the iteration.
:rtype: int
"""
self.curr += 1
return self.nums[self.curr]
class PeekingIterator:
def __init__(self, iterator):
"""
Initialize your data structure here.
:type iterator: Iterator
"""
self.it = iterator
# We need to introduce a hasNext variable if None is a valid item in the iterator
self.next_item = iterator.next() if iterator.hasNext() else None
def peek(self):
"""
Returns the next element in the iteration without advancing the iterator.
:rtype: int
"""
return self.next_item
def next(self):
"""
:rtype: int
"""
next_item = self.next_item
self.next_item = self.it.next() if self.it.hasNext() else None
return next_item
def hasNext(self):
"""
:rtype: bool
"""
return self.next_item is not None
# Your PeekingIterator object will be instantiated and called as such:
# iter = PeekingIterator(Iterator(nums))
# while iter.hasNext():
# val = iter.peek() # Get the next element but not advance the iterator.
# iter.next() # Should return the same value as [val].
class Test(unittest.TestCase):
def test(self):
self._test([0, 1, 2, 3])
def _test(self, nums):
it = PeekingIterator(Iterator(nums))
for num in nums:
self.assertEqual(True, it.hasNext())
self.assertEqual(num, it.peek())
self.assertEqual(num, it.next())
self.assertEqual(False, it.hasNext())
if __name__ == '__main__':
unittest.main()
| [
"unittest.main"
] | [((2175, 2190), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2188, 2190), False, 'import unittest\n')] |
import pyautogui as gui
from pyautogui import Window
import logging
from helpers.location import get_button_location
from time import sleep
def leave_meeting(window: Window) -> None:
l_coords = get_button_location("icons/leave_button", region=(0, 49, 1919, 74))
if l_coords is None:
logging.warning("Could not find leave button location! Attempting to force quit...")
window.close()
if window.isActive: # There can be a confirmation box telling if you want to leave
leave_confirm_coords = get_button_location("icons/confirm_leave")
if leave_confirm_coords is None:
raise ValueError("Could not find the confirmation of leave button!")
lc_x, lc_y = leave_confirm_coords
gui.click(lc_x, lc_y)
return
l_x, l_y = l_coords
gui.click(l_x, l_y)
def close_participants_tab(x: int, y: int) -> None:
logging.debug('attempting to close participant list')
gui.click(x, y)
gui.move(100, 100, 0)
def open_participants_tab(x: int, y: int) -> None:
logging.debug('attempting to open participant list')
gui.click(x, y) # Click on it to view members, etc
sleep(0.8) # sleep to let the list render
| [
"helpers.location.get_button_location",
"pyautogui.move",
"logging.debug",
"logging.warning",
"time.sleep",
"pyautogui.click"
] | [((202, 269), 'helpers.location.get_button_location', 'get_button_location', (['"""icons/leave_button"""'], {'region': '(0, 49, 1919, 74)'}), "('icons/leave_button', region=(0, 49, 1919, 74))\n", (221, 269), False, 'from helpers.location import get_button_location\n'), ((836, 855), 'pyautogui.click', 'gui.click', (['l_x', 'l_y'], {}), '(l_x, l_y)\n', (845, 855), True, 'import pyautogui as gui\n'), ((914, 967), 'logging.debug', 'logging.debug', (['"""attempting to close participant list"""'], {}), "('attempting to close participant list')\n", (927, 967), False, 'import logging\n'), ((972, 987), 'pyautogui.click', 'gui.click', (['x', 'y'], {}), '(x, y)\n', (981, 987), True, 'import pyautogui as gui\n'), ((992, 1013), 'pyautogui.move', 'gui.move', (['(100)', '(100)', '(0)'], {}), '(100, 100, 0)\n', (1000, 1013), True, 'import pyautogui as gui\n'), ((1071, 1123), 'logging.debug', 'logging.debug', (['"""attempting to open participant list"""'], {}), "('attempting to open participant list')\n", (1084, 1123), False, 'import logging\n'), ((1128, 1143), 'pyautogui.click', 'gui.click', (['x', 'y'], {}), '(x, y)\n', (1137, 1143), True, 'import pyautogui as gui\n'), ((1184, 1194), 'time.sleep', 'sleep', (['(0.8)'], {}), '(0.8)\n', (1189, 1194), False, 'from time import sleep\n'), ((303, 392), 'logging.warning', 'logging.warning', (['"""Could not find leave button location! Attempting to force quit..."""'], {}), "(\n 'Could not find leave button location! Attempting to force quit...')\n", (318, 392), False, 'import logging\n'), ((538, 580), 'helpers.location.get_button_location', 'get_button_location', (['"""icons/confirm_leave"""'], {}), "('icons/confirm_leave')\n", (557, 580), False, 'from helpers.location import get_button_location\n'), ((770, 791), 'pyautogui.click', 'gui.click', (['lc_x', 'lc_y'], {}), '(lc_x, lc_y)\n', (779, 791), True, 'import pyautogui as gui\n')] |
### Truss Interactive Report Functions ###
### April 2021 ###
# Import packages
import os
import glob
import pandas as pd
import ipywidgets as widgets
# 1. Function to get trades
def Get_Trades():
# Get files names
all_files = glob.glob('*.csv')
# Remove receipt files
all_files.remove('BR Receipt.csv')
all_files.remove('US Futures Receipt.csv')
all_files.remove('US Equities Receipt.csv')
all_files.remove('All Trades Receipt.csv')
# Create list of dfs
list_of_dfs = [pd.read_csv(filename, index_col=None, error_bad_lines=False) for filename in all_files]
# Return output
return(list_of_dfs)
# 2. Set table negative values as red
def color_negative_red(x):
color = 'red' if x < 0 else 'black'
return 'color: %s' % color
# 3. Create submit button
def Button_Sumbmit():
# Create button
output = widgets.Button(description='Submmit',
tooltip = 'OK',
icon='check',
button_style = 'success')
# Return output
return(output)
# 5. Create clean button
def Button_Clean():
# Create button
output = widgets.Button(description='Clean',
tooltip = 'OK',
icon='remove',
button_style = 'danger')
# Return output
return(output)
# 6. Display red and bold text
def prRed(skk): print('\033[1m' + "\033[91m {}\033[00m" .format(skk))
| [
"ipywidgets.Button",
"glob.glob",
"pandas.read_csv"
] | [((238, 256), 'glob.glob', 'glob.glob', (['"""*.csv"""'], {}), "('*.csv')\n", (247, 256), False, 'import glob\n'), ((876, 969), 'ipywidgets.Button', 'widgets.Button', ([], {'description': '"""Submmit"""', 'tooltip': '"""OK"""', 'icon': '"""check"""', 'button_style': '"""success"""'}), "(description='Submmit', tooltip='OK', icon='check',\n button_style='success')\n", (890, 969), True, 'import ipywidgets as widgets\n'), ((1194, 1285), 'ipywidgets.Button', 'widgets.Button', ([], {'description': '"""Clean"""', 'tooltip': '"""OK"""', 'icon': '"""remove"""', 'button_style': '"""danger"""'}), "(description='Clean', tooltip='OK', icon='remove',\n button_style='danger')\n", (1208, 1285), True, 'import ipywidgets as widgets\n'), ((515, 575), 'pandas.read_csv', 'pd.read_csv', (['filename'], {'index_col': 'None', 'error_bad_lines': '(False)'}), '(filename, index_col=None, error_bad_lines=False)\n', (526, 575), True, 'import pandas as pd\n')] |
#!/usr/bin/env python
import time
import cv2
if __name__ == '__main__' :
# Start default camera
video = cv2.VideoCapture("test.MOV");
# Find OpenCV version
(major_ver, minor_ver, subminor_ver) = (cv2.__version__).split('.')
# With webcam get(CV_CAP_PROP_FPS) does not work.
# Let's see for ourselves.
if int(major_ver) < 3 :
fps = video.get(cv2.cv.CV_CAP_PROP_FPS)
print(f"Frames per second using video.get(cv2.cv.CV_CAP_PROP_FPS): {fps}")
else :
fps = video.get(cv2.CAP_PROP_FPS)
print(f"Frames per second using video.get(cv2.CAP_PROP_FPS) : {fps}")
print(fps)
count = 0
start = time.time()
while video.isOpened():
ret, frame = video.read()
if ret:
count += 1
else:
break
end = time.time()
elapsed = end - start
# Release video
video.release()
print(f"Total number of frame: {count}, elapsed time: {elapsed}")
print(f"fps: {count//elapsed}") | [
"cv2.__version__.split",
"time.time",
"cv2.VideoCapture"
] | [((115, 143), 'cv2.VideoCapture', 'cv2.VideoCapture', (['"""test.MOV"""'], {}), "('test.MOV')\n", (131, 143), False, 'import cv2\n'), ((215, 241), 'cv2.__version__.split', 'cv2.__version__.split', (['"""."""'], {}), "('.')\n", (236, 241), False, 'import cv2\n'), ((665, 676), 'time.time', 'time.time', ([], {}), '()\n', (674, 676), False, 'import time\n'), ((825, 836), 'time.time', 'time.time', ([], {}), '()\n', (834, 836), False, 'import time\n')] |
import math
import random
from collections import namedtuple
from itertools import count
import gym
import torch
import torch.nn as nn
import torch.optim as optim
import torch.nn.functional as F
from torch.autograd import Variable
env = gym.make('CartPole-v0')
state_dim = env.observation_space.shape[0]
out_dim = env.action_space.n
Transition = namedtuple('Transition',
('state', 'action', 'next_state', 'reward'))
BATCH_SIZE = 32
GAMMA = 0.9
INITIAL_EPSILON = 0.5
FINAL_EPSILON = 0.01
CAPACITY = 10000
torch.manual_seed(1234)
use_cuda = torch.cuda.is_available()
if use_cuda:
torch.cuda.manual_seed(1234)
if use_cuda:
byteTensor = torch.cuda.ByteTensor
tensor = torch.cuda.FloatTensor
longTensor = torch.cuda.LongTensor
else:
byteTensor = torch.ByteTensor
tensor = torch.Tensor
longTensor = torch.LongTensor
class DQN(nn.Module):
def __init__(self, state_dim, out_dim, capacity, bsz, epsilon):
super().__init__()
self.steps_done = 0
self.position = 0
self.pool = []
self.capacity = capacity
self.bsz = bsz
self.epsilon = epsilon
self.fc1 = nn.Linear(state_dim, 32)
self.fc2 = nn.Linear(32, out_dim)
self.fc1.weight.data.uniform_(-.1, .1)
self.fc2.weight.data.uniform_(-.1, .1)
def forward(self, x):
x = F.relu(self.fc1(x))
return self.fc2(x)
def action(self, state):
self.epsilon -= (INITIAL_EPSILON - FINAL_EPSILON) / 10000
if random.random() > self.epsilon:
return self(Variable(state, volatile=True)).data.max(1)[1].view(1, 1)
else:
return longTensor([[random.randrange(2)]])
def push(self, *args):
if len(self) < self.capacity:
self.pool.append(None)
self.pool[self.position] = Transition(*args)
self.position = (self.position + 1) % self.capacity
def sample(self):
return random.sample(self.pool, self.bsz)
def __len__(self):
return len(self.pool)
dqn = DQN(state_dim, out_dim, CAPACITY, BATCH_SIZE, INITIAL_EPSILON)
if use_cuda:
dqn = dqn.cuda()
optimizer = optim.Adam(dqn.parameters(), lr=0.0001)
def optimize_model():
if len(dqn) < BATCH_SIZE:
return
transitions = dqn.sample()
batch = Transition(*zip(*transitions))
non_final_mask = byteTensor(
tuple(map(lambda x: x is not None, batch.next_state)))
non_final_next_states = Variable(
torch.cat([s for s in batch.next_state if s is not None]), volatile=True)
next_state_values = Variable(torch.zeros(BATCH_SIZE).type(tensor))
next_state_values[non_final_mask] = dqn(non_final_next_states).max(1)[0]
next_state_values.volatile = False
state_batch = Variable(torch.cat(batch.state))
action_batch = Variable(torch.cat(batch.action))
reward_batch = Variable(torch.cat(batch.reward))
state_action_values = dqn(state_batch).gather(1, action_batch)
expected_state_action_values = (next_state_values * GAMMA) + reward_batch
loss = F.smooth_l1_loss(state_action_values, expected_state_action_values)
optimizer.zero_grad()
loss.backward()
optimizer.step()
perfect = 0
for _ in range(10000):
state = env.reset()
state = torch.from_numpy(state).type(tensor).view(1, -1)
for t in count():
action = dqn.action(state)
next_state, reward, done, _ = env.step(action[0, 0])
next_state = torch.from_numpy(
next_state).type(tensor).view(1, -1)
if done:
next_state = None
reward = tensor([reward])
dqn.push(state, action, next_state, reward)
state = next_state
optimize_model()
if done:
if t > perfect:
print(t)
perfect = t
break
| [
"torch.manual_seed",
"random.sample",
"collections.namedtuple",
"random.randrange",
"torch.from_numpy",
"torch.nn.functional.smooth_l1_loss",
"random.random",
"torch.cuda.is_available",
"itertools.count",
"torch.nn.Linear",
"torch.zeros",
"torch.cuda.manual_seed",
"torch.autograd.Variable",
"gym.make",
"torch.cat"
] | [((240, 263), 'gym.make', 'gym.make', (['"""CartPole-v0"""'], {}), "('CartPole-v0')\n", (248, 263), False, 'import gym\n'), ((351, 420), 'collections.namedtuple', 'namedtuple', (['"""Transition"""', "('state', 'action', 'next_state', 'reward')"], {}), "('Transition', ('state', 'action', 'next_state', 'reward'))\n", (361, 420), False, 'from collections import namedtuple\n'), ((535, 558), 'torch.manual_seed', 'torch.manual_seed', (['(1234)'], {}), '(1234)\n', (552, 558), False, 'import torch\n'), ((570, 595), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (593, 595), False, 'import torch\n'), ((613, 641), 'torch.cuda.manual_seed', 'torch.cuda.manual_seed', (['(1234)'], {}), '(1234)\n', (635, 641), False, 'import torch\n'), ((3075, 3142), 'torch.nn.functional.smooth_l1_loss', 'F.smooth_l1_loss', (['state_action_values', 'expected_state_action_values'], {}), '(state_action_values, expected_state_action_values)\n', (3091, 3142), True, 'import torch.nn.functional as F\n'), ((3347, 3354), 'itertools.count', 'count', ([], {}), '()\n', (3352, 3354), False, 'from itertools import count\n'), ((1173, 1197), 'torch.nn.Linear', 'nn.Linear', (['state_dim', '(32)'], {}), '(state_dim, 32)\n', (1182, 1197), True, 'import torch.nn as nn\n'), ((1217, 1239), 'torch.nn.Linear', 'nn.Linear', (['(32)', 'out_dim'], {}), '(32, out_dim)\n', (1226, 1239), True, 'import torch.nn as nn\n'), ((1965, 1999), 'random.sample', 'random.sample', (['self.pool', 'self.bsz'], {}), '(self.pool, self.bsz)\n', (1978, 1999), False, 'import random\n'), ((2497, 2554), 'torch.cat', 'torch.cat', (['[s for s in batch.next_state if s is not None]'], {}), '([s for s in batch.next_state if s is not None])\n', (2506, 2554), False, 'import torch\n'), ((2786, 2808), 'torch.cat', 'torch.cat', (['batch.state'], {}), '(batch.state)\n', (2795, 2808), False, 'import torch\n'), ((2838, 2861), 'torch.cat', 'torch.cat', (['batch.action'], {}), '(batch.action)\n', (2847, 2861), False, 'import torch\n'), ((2891, 2914), 'torch.cat', 'torch.cat', (['batch.reward'], {}), '(batch.reward)\n', (2900, 2914), False, 'import torch\n'), ((1530, 1545), 'random.random', 'random.random', ([], {}), '()\n', (1543, 1545), False, 'import random\n'), ((2604, 2627), 'torch.zeros', 'torch.zeros', (['BATCH_SIZE'], {}), '(BATCH_SIZE)\n', (2615, 2627), False, 'import torch\n'), ((3285, 3308), 'torch.from_numpy', 'torch.from_numpy', (['state'], {}), '(state)\n', (3301, 3308), False, 'import torch\n'), ((1690, 1709), 'random.randrange', 'random.randrange', (['(2)'], {}), '(2)\n', (1706, 1709), False, 'import random\n'), ((3473, 3501), 'torch.from_numpy', 'torch.from_numpy', (['next_state'], {}), '(next_state)\n', (3489, 3501), False, 'import torch\n'), ((1586, 1616), 'torch.autograd.Variable', 'Variable', (['state'], {'volatile': '(True)'}), '(state, volatile=True)\n', (1594, 1616), False, 'from torch.autograd import Variable\n')] |
"""schlag package setup.
"""
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="schlag",
version="0.0.1",
author="<NAME>",
author_email="<EMAIL>",
description="Schlager lyrics generation using transformers.",
long_description=long_description,
long_description_content_type="text/markdown",
packages=["schlag"],
python_requires=">=3.8",
)
| [
"setuptools.setup"
] | [((117, 414), 'setuptools.setup', 'setuptools.setup', ([], {'name': '"""schlag"""', 'version': '"""0.0.1"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'description': '"""Schlager lyrics generation using transformers."""', 'long_description': 'long_description', 'long_description_content_type': '"""text/markdown"""', 'packages': "['schlag']", 'python_requires': '""">=3.8"""'}), "(name='schlag', version='0.0.1', author='<NAME>',\n author_email='<EMAIL>', description=\n 'Schlager lyrics generation using transformers.', long_description=\n long_description, long_description_content_type='text/markdown',\n packages=['schlag'], python_requires='>=3.8')\n", (133, 414), False, 'import setuptools\n')] |
from datetime import datetime
from flask import request
from flask_utils.error_handler import handle_not_found
from flask_utils.ms_request import reset_entity_cache
from flask_utils.search_params import parse_search_params
from . import logger
class GenericResourceClass:
def __init__(
self, repository, schema, list_schema, name, entity_name_key, result_key
):
self.repository = repository
self.schema = schema
self.list_schema = list_schema
self.name = name
self.entity_name_key = entity_name_key
self.result_key = result_key
def get(self, entity_id, **kwargs):
entity = self.repository.get(entity_id, **kwargs)
handle_not_found(entity, self.name, entity_id)
return self.schema.dump(entity)
def delete(self, entity_id, validator=None):
entity = self.repository.get(entity_id)
handle_not_found(entity, self.name, entity_id)
if validator is not None:
validator.validate(entity)
self.repository.delete(entity_id)
self.log_entity_change(
entity_id, getattr(entity, self.entity_name_key, ""), entity, "deleted"
)
reset_entity_cache(name=self.name, _id=entity_id)
return None, 204
def update(self, entity_id, validator=None, **kwargs):
entity_json = request.get_json()
for key, value in kwargs.items():
entity_json[key] = value
updated_entity = self.schema.load(entity_json)
existing_entity = self.repository.get(entity_id)
handle_not_found(existing_entity, self.name, entity_id)
updated_entity.updated_at = datetime.now()
if validator is not None:
validator.validate(updated_entity, existing_entity)
self.repository.update(entity_id, updated_entity)
self.log_entity_change(
entity_id,
getattr(existing_entity, self.entity_name_key, ""),
updated_entity,
"updated",
)
reset_entity_cache(name=self.name, _id=entity_id)
return None, 204
def create(self, validator=None, **kwargs):
entity_json = request.get_json()
for key, value in kwargs.items():
entity_json[key] = value
new_entity = self.schema.load(entity_json)
new_entity.created_at = datetime.now()
new_entity.updated_at = datetime.now()
if validator is not None:
validator.validate(new_entity)
created_entity = self.repository.create(new_entity)
self.log_entity_change(
created_entity.id,
getattr(created_entity, self.entity_name_key, ""),
created_entity,
"created",
)
return created_entity.id, 201
def search(self, **kwargs):
search_term, page, items_per_page, only_active = parse_search_params(request)
entities, count = self.repository.search(
search_term=search_term,
page=page,
items_per_page=items_per_page,
only_active=only_active,
**kwargs
)
result = {"count": count, self.result_key: self.list_schema.dump(entities)}
return result
def log_entity_change(self, entity_id, entity_name, new_entity, action):
username = request.headers.get("username", default="")
logger.log_info(
"{} {} ({}) {} by {}".format(
self.name, entity_id, entity_name, action, username
),
{"id": entity_id, "data": self.schema.dump(new_entity)},
)
| [
"flask_utils.error_handler.handle_not_found",
"flask_utils.search_params.parse_search_params",
"flask_utils.ms_request.reset_entity_cache",
"datetime.datetime.now",
"flask.request.get_json",
"flask.request.headers.get"
] | [((704, 750), 'flask_utils.error_handler.handle_not_found', 'handle_not_found', (['entity', 'self.name', 'entity_id'], {}), '(entity, self.name, entity_id)\n', (720, 750), False, 'from flask_utils.error_handler import handle_not_found\n'), ((897, 943), 'flask_utils.error_handler.handle_not_found', 'handle_not_found', (['entity', 'self.name', 'entity_id'], {}), '(entity, self.name, entity_id)\n', (913, 943), False, 'from flask_utils.error_handler import handle_not_found\n'), ((1193, 1242), 'flask_utils.ms_request.reset_entity_cache', 'reset_entity_cache', ([], {'name': 'self.name', '_id': 'entity_id'}), '(name=self.name, _id=entity_id)\n', (1211, 1242), False, 'from flask_utils.ms_request import reset_entity_cache\n'), ((1350, 1368), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (1366, 1368), False, 'from flask import request\n'), ((1568, 1623), 'flask_utils.error_handler.handle_not_found', 'handle_not_found', (['existing_entity', 'self.name', 'entity_id'], {}), '(existing_entity, self.name, entity_id)\n', (1584, 1623), False, 'from flask_utils.error_handler import handle_not_found\n'), ((1660, 1674), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1672, 1674), False, 'from datetime import datetime\n'), ((2019, 2068), 'flask_utils.ms_request.reset_entity_cache', 'reset_entity_cache', ([], {'name': 'self.name', '_id': 'entity_id'}), '(name=self.name, _id=entity_id)\n', (2037, 2068), False, 'from flask_utils.ms_request import reset_entity_cache\n'), ((2165, 2183), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (2181, 2183), False, 'from flask import request\n'), ((2346, 2360), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2358, 2360), False, 'from datetime import datetime\n'), ((2393, 2407), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2405, 2407), False, 'from datetime import datetime\n'), ((2860, 2888), 'flask_utils.search_params.parse_search_params', 'parse_search_params', (['request'], {}), '(request)\n', (2879, 2888), False, 'from flask_utils.search_params import parse_search_params\n'), ((3313, 3356), 'flask.request.headers.get', 'request.headers.get', (['"""username"""'], {'default': '""""""'}), "('username', default='')\n", (3332, 3356), False, 'from flask import request\n')] |
##############################################################################
#
# Copyright (c) 2008 Zope Foundation and Contributors.
#
# This software is subject to the provisions of the Zope Visible Source
# License, Version 1.0 (ZVSL). A copy of the ZVSL should accompany this
# distribution.
#
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""tracelog tests
"""
__docformat__ = "reStructuredText"
import datetime
import doctest
import os
import re
import unittest
import manuel.doctest
import manuel.footnote
import manuel.testing
import zope.testing.renormalizing
from zc.zservertracelog.fseek import FSeekTest # noqa
from zc.zservertracelog.tracereport import seconds_difference
here = os.path.dirname(os.path.abspath(__file__))
optionflags = (
doctest.NORMALIZE_WHITESPACE
| doctest.ELLIPSIS
| doctest.REPORT_ONLY_FIRST_FAILURE
)
checker = zope.testing.renormalizing.RENormalizing([
# normalize the channel id and iso8601 timestamp
(re.compile(r'-?\d+ \d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{6}'),
'23418928 2008-08-26 10:55:00.000000'),
(re.compile(r'^usage: '), 'Usage: '),
(re.compile(r'options:'), 'Options:'),
])
def _null_app(environ, start_response):
pass
class FauxApplication(object):
"""Fake WSGI application. Doesn't need to do much!"""
app_hook = None
def __call__(self, environ, start_response):
app = self.app_hook or _null_app
return app(environ, start_response)
class TestHelpers(unittest.TestCase):
def test_seconds_difference(self):
dt1 = datetime.datetime(2019, 2, 23, 14, 5, 54, 451)
dt2 = dt1 + datetime.timedelta(minutes=15, seconds=3, microseconds=42)
self.assertEqual(seconds_difference(dt2, dt1), 15 * 60 + 3 + 0.000042)
def setUp(test):
test.globs['FauxApplication'] = FauxApplication
def analysis_setUp(test):
test.globs['sample_log'] = here + '/samples/trace.log'
def test_suite():
m = manuel.doctest.Manuel(
optionflags=optionflags,
checker=checker,
)
m += manuel.footnote.Manuel()
return unittest.TestSuite([
manuel.testing.TestSuite(m, 'README.rst', setUp=setUp),
doctest.DocFileTest(
'tracereport.rst',
checker=checker,
setUp=analysis_setUp),
unittest.defaultTestLoader.loadTestsFromName(__name__),
])
| [
"datetime.datetime",
"re.compile",
"doctest.DocFileTest",
"zc.zservertracelog.tracereport.seconds_difference",
"os.path.abspath",
"datetime.timedelta",
"unittest.defaultTestLoader.loadTestsFromName"
] | [((997, 1022), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (1012, 1022), False, 'import os\n'), ((1844, 1890), 'datetime.datetime', 'datetime.datetime', (['(2019)', '(2)', '(23)', '(14)', '(5)', '(54)', '(451)'], {}), '(2019, 2, 23, 14, 5, 54, 451)\n', (1861, 1890), False, 'import datetime\n'), ((1251, 1320), 're.compile', 're.compile', (['"""-?\\\\d+ \\\\d{4}-\\\\d{2}-\\\\d{2} \\\\d{2}:\\\\d{2}:\\\\d{2}.\\\\d{6}"""'], {}), "('-?\\\\d+ \\\\d{4}-\\\\d{2}-\\\\d{2} \\\\d{2}:\\\\d{2}:\\\\d{2}.\\\\d{6}')\n", (1261, 1320), False, 'import re\n'), ((1368, 1390), 're.compile', 're.compile', (['"""^usage: """'], {}), "('^usage: ')\n", (1378, 1390), False, 'import re\n'), ((1410, 1432), 're.compile', 're.compile', (['"""options:"""'], {}), "('options:')\n", (1420, 1432), False, 'import re\n'), ((1911, 1969), 'datetime.timedelta', 'datetime.timedelta', ([], {'minutes': '(15)', 'seconds': '(3)', 'microseconds': '(42)'}), '(minutes=15, seconds=3, microseconds=42)\n', (1929, 1969), False, 'import datetime\n'), ((1995, 2023), 'zc.zservertracelog.tracereport.seconds_difference', 'seconds_difference', (['dt2', 'dt1'], {}), '(dt2, dt1)\n', (2013, 2023), False, 'from zc.zservertracelog.tracereport import seconds_difference\n'), ((2460, 2537), 'doctest.DocFileTest', 'doctest.DocFileTest', (['"""tracereport.rst"""'], {'checker': 'checker', 'setUp': 'analysis_setUp'}), "('tracereport.rst', checker=checker, setUp=analysis_setUp)\n", (2479, 2537), False, 'import doctest\n'), ((2584, 2638), 'unittest.defaultTestLoader.loadTestsFromName', 'unittest.defaultTestLoader.loadTestsFromName', (['__name__'], {}), '(__name__)\n', (2628, 2638), False, 'import unittest\n')] |
import pytest
import datetime
from swaptacular_debtor.models import db, Debtor, Account, Withdrawal, WithdrawalRequest
from swaptacular_debtor import procedures
def test_create_debtor(db_session):
debtor = procedures.create_debtor(user_id=666)
debtor = Debtor.query.filter_by(debtor_id=debtor.debtor_id).one()
assert len(debtor.operator_list) == 1
assert len(debtor.branch_list) == 1
assert len(debtor.coordinator_list) == 1
assert len(debtor.account_list) == 1
def test_prepare_direct_transfer(db_session):
@db.execute_atomic
def transfer():
debtor = procedures.create_debtor(user_id=666)
account = Account(debtor=debtor, creditor_id=777, balance=2000, avl_balance=2000)
assert account in db_session
db_session.add(account)
assert account in db_session
return procedures.prepare_direct_transfer(account, 888, 1500)
assert transfer.amount == 1500
with pytest.raises(procedures.InsufficientFunds):
procedures.prepare_direct_transfer((transfer.debtor_id, transfer.sender_creditor_id), 888, 1500)
@db.atomic
def test_get_account(db_session):
debtor = procedures.create_debtor(user_id=666)
account = procedures._get_account((debtor.debtor_id, 777))
assert account
assert account.balance == 0
assert procedures._get_account((debtor.debtor_id, 777))
account.balance = 10
a = procedures._get_account(account)
assert a.balance == 10
def test_cancel_prepared_transfer(db_session):
debtor = procedures.create_debtor(user_id=666)
debtor = Debtor.query.filter_by(debtor_id=debtor.debtor_id).one()
account = Account(debtor=debtor, creditor_id=777, balance=3000, avl_balance=3000)
db_session.add(account)
transfer = procedures.prepare_direct_transfer(account, recipient_creditor_id=888, amount=500)
a = Account.query.filter_by(debtor_id=debtor.debtor_id, creditor_id=777).one()
assert a.balance == 3000
assert a.avl_balance == 2500
procedures.cancel_creditor_prepared_transfer(transfer)
a = Account.query.filter_by(debtor_id=debtor.debtor_id, creditor_id=777).one()
assert a.balance == 3000
assert a.avl_balance == 3000
with pytest.raises(procedures.InvalidPreparedTransfer):
procedures.cancel_creditor_prepared_transfer(transfer)
| [
"swaptacular_debtor.procedures.prepare_direct_transfer",
"swaptacular_debtor.procedures._get_account",
"swaptacular_debtor.models.Account",
"swaptacular_debtor.models.Debtor.query.filter_by",
"pytest.raises",
"swaptacular_debtor.procedures.create_debtor",
"swaptacular_debtor.procedures.cancel_creditor_prepared_transfer",
"swaptacular_debtor.models.Account.query.filter_by"
] | [((212, 249), 'swaptacular_debtor.procedures.create_debtor', 'procedures.create_debtor', ([], {'user_id': '(666)'}), '(user_id=666)\n', (236, 249), False, 'from swaptacular_debtor import procedures\n'), ((1154, 1191), 'swaptacular_debtor.procedures.create_debtor', 'procedures.create_debtor', ([], {'user_id': '(666)'}), '(user_id=666)\n', (1178, 1191), False, 'from swaptacular_debtor import procedures\n'), ((1206, 1254), 'swaptacular_debtor.procedures._get_account', 'procedures._get_account', (['(debtor.debtor_id, 777)'], {}), '((debtor.debtor_id, 777))\n', (1229, 1254), False, 'from swaptacular_debtor import procedures\n'), ((1317, 1365), 'swaptacular_debtor.procedures._get_account', 'procedures._get_account', (['(debtor.debtor_id, 777)'], {}), '((debtor.debtor_id, 777))\n', (1340, 1365), False, 'from swaptacular_debtor import procedures\n'), ((1399, 1431), 'swaptacular_debtor.procedures._get_account', 'procedures._get_account', (['account'], {}), '(account)\n', (1422, 1431), False, 'from swaptacular_debtor import procedures\n'), ((1521, 1558), 'swaptacular_debtor.procedures.create_debtor', 'procedures.create_debtor', ([], {'user_id': '(666)'}), '(user_id=666)\n', (1545, 1558), False, 'from swaptacular_debtor import procedures\n'), ((1643, 1714), 'swaptacular_debtor.models.Account', 'Account', ([], {'debtor': 'debtor', 'creditor_id': '(777)', 'balance': '(3000)', 'avl_balance': '(3000)'}), '(debtor=debtor, creditor_id=777, balance=3000, avl_balance=3000)\n', (1650, 1714), False, 'from swaptacular_debtor.models import db, Debtor, Account, Withdrawal, WithdrawalRequest\n'), ((1758, 1844), 'swaptacular_debtor.procedures.prepare_direct_transfer', 'procedures.prepare_direct_transfer', (['account'], {'recipient_creditor_id': '(888)', 'amount': '(500)'}), '(account, recipient_creditor_id=888,\n amount=500)\n', (1792, 1844), False, 'from swaptacular_debtor import procedures\n'), ((1990, 2044), 'swaptacular_debtor.procedures.cancel_creditor_prepared_transfer', 'procedures.cancel_creditor_prepared_transfer', (['transfer'], {}), '(transfer)\n', (2034, 2044), False, 'from swaptacular_debtor import procedures\n'), ((596, 633), 'swaptacular_debtor.procedures.create_debtor', 'procedures.create_debtor', ([], {'user_id': '(666)'}), '(user_id=666)\n', (620, 633), False, 'from swaptacular_debtor import procedures\n'), ((652, 723), 'swaptacular_debtor.models.Account', 'Account', ([], {'debtor': 'debtor', 'creditor_id': '(777)', 'balance': '(2000)', 'avl_balance': '(2000)'}), '(debtor=debtor, creditor_id=777, balance=2000, avl_balance=2000)\n', (659, 723), False, 'from swaptacular_debtor.models import db, Debtor, Account, Withdrawal, WithdrawalRequest\n'), ((845, 899), 'swaptacular_debtor.procedures.prepare_direct_transfer', 'procedures.prepare_direct_transfer', (['account', '(888)', '(1500)'], {}), '(account, 888, 1500)\n', (879, 899), False, 'from swaptacular_debtor import procedures\n'), ((944, 987), 'pytest.raises', 'pytest.raises', (['procedures.InsufficientFunds'], {}), '(procedures.InsufficientFunds)\n', (957, 987), False, 'import pytest\n'), ((997, 1098), 'swaptacular_debtor.procedures.prepare_direct_transfer', 'procedures.prepare_direct_transfer', (['(transfer.debtor_id, transfer.sender_creditor_id)', '(888)', '(1500)'], {}), '((transfer.debtor_id, transfer.\n sender_creditor_id), 888, 1500)\n', (1031, 1098), False, 'from swaptacular_debtor import procedures\n'), ((2199, 2248), 'pytest.raises', 'pytest.raises', (['procedures.InvalidPreparedTransfer'], {}), '(procedures.InvalidPreparedTransfer)\n', (2212, 2248), False, 'import pytest\n'), ((2258, 2312), 'swaptacular_debtor.procedures.cancel_creditor_prepared_transfer', 'procedures.cancel_creditor_prepared_transfer', (['transfer'], {}), '(transfer)\n', (2302, 2312), False, 'from swaptacular_debtor import procedures\n'), ((263, 313), 'swaptacular_debtor.models.Debtor.query.filter_by', 'Debtor.query.filter_by', ([], {'debtor_id': 'debtor.debtor_id'}), '(debtor_id=debtor.debtor_id)\n', (285, 313), False, 'from swaptacular_debtor.models import db, Debtor, Account, Withdrawal, WithdrawalRequest\n'), ((1572, 1622), 'swaptacular_debtor.models.Debtor.query.filter_by', 'Debtor.query.filter_by', ([], {'debtor_id': 'debtor.debtor_id'}), '(debtor_id=debtor.debtor_id)\n', (1594, 1622), False, 'from swaptacular_debtor.models import db, Debtor, Account, Withdrawal, WithdrawalRequest\n'), ((1849, 1917), 'swaptacular_debtor.models.Account.query.filter_by', 'Account.query.filter_by', ([], {'debtor_id': 'debtor.debtor_id', 'creditor_id': '(777)'}), '(debtor_id=debtor.debtor_id, creditor_id=777)\n', (1872, 1917), False, 'from swaptacular_debtor.models import db, Debtor, Account, Withdrawal, WithdrawalRequest\n'), ((2053, 2121), 'swaptacular_debtor.models.Account.query.filter_by', 'Account.query.filter_by', ([], {'debtor_id': 'debtor.debtor_id', 'creditor_id': '(777)'}), '(debtor_id=debtor.debtor_id, creditor_id=777)\n', (2076, 2121), False, 'from swaptacular_debtor.models import db, Debtor, Account, Withdrawal, WithdrawalRequest\n')] |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2020-2021 Alibaba Group Holding Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright 2020-2021 Alibaba Group Holding Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import copy
import itertools
import logging
import os
DEP_MISSING_ERROR = '''
Dependencies {dep} cannot be found, please try again after:
pip3 install {dep}
'''
try:
import clang.cindex
from clang.cindex import CursorKind, TranslationUnit
except ImportError:
raise RuntimeError(DEP_MISSING_ERROR.format(dep='libclang'))
try:
import parsec
except ImportError:
raise RuntimeError(DEP_MISSING_ERROR.format(dep='parsec'))
###############################################################################
#
# parse codegen spec
#
# __attribute__((annotate("codegen"))): meta codegen
# __attribute__((annotate("codegen:Type"))): member type: Type member_
# __attribute__((annotate("codegen:Type*"))): member type: std::shared_ptr<Type> member_
# __attribute__((annotate("codegen:[Type*]"))): list member type: std::vector<Type> member_
# __attribute__((annotate("codegen:{Type}"))): set member type: std::set<Type> member_
# __attribute__((annotate("codegen:{Type*}"))): set member type: std::set<std::shared_ptr<Type>> member_
# __attribute__((annotate("codegen:{int32_t: Type}"))): dict member type: std::map<int32_t, Type> member_
# __attribute__((annotate("codegen:{int32_t: Type*}"))): dict member type: std::map<int32_t, std::shared_ptr<Type>> member_
#
# FIXME(hetao): parse the codegen spec directly from the type signature of the member variable
#
class CodeGenKind:
def __init__(self, kind='meta', element_type=None):
self.kind = kind
if element_type is None:
self.element_type = None
self.star = ''
else:
if isinstance(element_type[0], tuple):
self.element_type = (element_type[0][0], element_type[1][0])
self.star = element_type[1][1]
else:
self.element_type = element_type[0]
self.star = element_type[1]
if self.star:
self.deref = ''
else:
self.deref = '*'
@property
def is_meta(self):
return self.kind == 'meta'
@property
def is_plain(self):
return self.kind == 'plain'
@property
def is_set(self):
return self.kind == 'set'
@property
def is_list(self):
return self.kind == 'list'
@property
def is_dlist(self):
return self.kind == 'dlist'
@property
def is_dict(self):
return self.kind == 'dict'
def __repr__(self):
star_str = '*' if self.star else ''
if self.is_meta:
return 'meta'
if self.is_plain:
return '%s%s' % (self.element_type, star_str)
if self.is_list:
return '[%s%s]' % (self.element_type, star_str)
if self.is_dlist:
return '[[%s%s]]' % (self.element_type, star_str)
if self.is_set:
return '{%s%s}' % (self.element_type, star_str)
if self.is_dict:
return '{%s: %s%s}' % (self.element_type[0], self.element_type[1], star_str)
raise RuntimeError('Invalid codegen kind: %s' % self.kind)
name_pattern = parsec.spaces() >> parsec.regex(
r'[_a-zA-Z][_a-zA-Z0-9<>, ]*(::[_a-zA-Z][_a-zA-Z0-9<>, ]*)*') << parsec.spaces()
star_pattern = parsec.spaces() >> parsec.optional(parsec.string('*'), '') << parsec.spaces()
parse_meta = parsec.spaces().parsecmap(lambda _: CodeGenKind('meta'))
parse_plain = (parsec.spaces() >>
(name_pattern + star_pattern) << parsec.spaces()).parsecmap(lambda value: CodeGenKind('plain', value))
parse_list = (parsec.string('[') >>
(name_pattern + star_pattern) << parsec.string(']')).parsecmap(lambda value: CodeGenKind('list', value))
parse_dlist = (
parsec.string('[[') >>
(name_pattern + star_pattern) << parsec.string(']]')).parsecmap(lambda value: CodeGenKind('dlist', value))
parse_set = (parsec.string('{') >>
(name_pattern + star_pattern) << parsec.string('}')).parsecmap(lambda value: CodeGenKind('set', value))
parse_dict = (parsec.string('{') >> parsec.separated((name_pattern + star_pattern), parsec.string(':'), 2, 2) <<
parsec.string('}')).parsecmap(lambda values: CodeGenKind('dict', tuple(values)))
codegen_spec_parser = parse_dict ^ parse_set ^ parse_dlist ^ parse_list ^ parse_plain ^ parse_meta
def parse_codegen_spec(kind):
if kind.startswith('vineyard'):
kind = kind[len('vineyard'):]
if kind.startswith('codegen'):
kind = kind[len('codegen'):]
if kind.startswith(':'):
kind = kind[1:]
return codegen_spec_parser.parse(kind)
###############################################################################
#
# dump the AST for debugging
#
def dump_ast(node, indent, saw, base_indent=4, include_refs=False):
def is_std_ns(node):
return node.kind == CursorKind.NAMESPACE and node.spelling == 'std'
k = node.kind # type: clang.cindex.CursorKind
# skip printting UNEXPOSED_*
if not k.is_unexposed():
tpl = '{indent}{kind}{name}{type_name}'
if node.spelling:
name = ' s: %s' % node.spelling
else:
name = ''
if node.type and node.type.spelling:
type_name = ', t: %s' % node.type.spelling
else:
type_name = ''
# FIXME: print opcode or literal
print(tpl.format(indent=' ' * indent, kind=k.name, name=name, type_name=type_name))
saw.add(node.hash)
if include_refs:
if node.referenced is not None and node.referenced.hash not in saw:
dump_ast(node.referenced, indent + base_indent, saw, base_indent, include_refs)
# FIXME: skip auto generated decls
skip = len([c for c in node.get_children() if indent == 0 and is_std_ns(c)])
for c in node.get_children():
if not skip:
dump_ast(c, indent + base_indent, saw, base_indent, include_refs)
if indent == 0 and is_std_ns(c):
skip -= 1
saw.remove(node.hash)
class ParseOption:
Default = 0x0
DetailedPreprocessingRecord = 0x01
Incomplete = 0x02
PrecompiledPreamble = 0x04
CacheCompletionResults = 0x08
ForSerialization = 0x10
CXXChainedPCH = 0x20
SkipFunctionBodies = 0x40
IncludeBriefCommentsInCodeCompletion = 0x80
CreatePreambleOnFirstParse = 0x100
KeepGoing = 0x200
SingleFileParse = 0x400
LimitSkipFunctionBodiesToPreamble = 0x800
IncludeAttributedTypes = 0x1000
VisitImplicitAttributes = 0x2000
IgnoreNonErrorsFromIncludedFiles = 0x4000
RetainExcludedConditionalBlocks = 0x8000
###############################################################################
#
# AST utils
#
def check_serialize_attribute(node):
for child in node.get_children():
if child.kind == CursorKind.ANNOTATE_ATTR:
for attr_kind in ['vineyard', 'no-vineyard', 'codegen']:
if child.spelling.startswith(attr_kind):
return child.spelling
return None
def check_if_class_definition(node):
for child in node.get_children():
if child.kind in [
CursorKind.CXX_BASE_SPECIFIER, CursorKind.CXX_ACCESS_SPEC_DECL, CursorKind.CXX_METHOD,
CursorKind.FIELD_DECL
]:
return True
return False
def filter_the_module(root, filepath):
children = []
for child in root.get_children():
if child.location and child.location.file and \
child.location.file.name == filepath:
children.append(child)
return children
def traverse(node, to_reflect, to_include, namespaces=None):
''' Traverse the AST tree.
'''
if node.kind in [CursorKind.CLASS_DECL, CursorKind.CLASS_TEMPLATE, CursorKind.STRUCT_DECL]:
# codegen for all top-level classes (definitions, not declarations) in the given file.
if check_if_class_definition(node):
attr = check_serialize_attribute(node)
if attr is None or 'no-vineyard' not in attr:
to_reflect.append(('vineyard', namespaces, node))
if node.kind == CursorKind.INCLUSION_DIRECTIVE:
to_include.append(node)
if node.kind in [CursorKind.TRANSLATION_UNIT, CursorKind.NAMESPACE]:
if node.kind == CursorKind.NAMESPACE:
if namespaces is None:
namespaces = []
else:
namespaces = copy.copy(namespaces)
namespaces.append(node.spelling)
for child in node.get_children():
traverse(child, to_reflect, to_include, namespaces=namespaces)
def find_fields(definition):
fields, using_alias, first_mmeber_offset, has_post_construct = [], [], -1, False
for child in definition.get_children():
if first_mmeber_offset == -1:
if child.kind not in [
CursorKind.TEMPLATE_TYPE_PARAMETER, CursorKind.CXX_BASE_SPECIFIER, CursorKind.ANNOTATE_ATTR
]:
first_mmeber_offset = child.extent.start.offset
if child.kind == CursorKind.FIELD_DECL:
attr = check_serialize_attribute(child)
if attr:
fields.append((attr, child))
continue
if child.kind == CursorKind.TYPE_ALIAS_DECL:
using_alias.append((child.spelling, child.extent))
continue
if not has_post_construct and \
child.kind == CursorKind.CXX_METHOD and child.spelling == 'PostConstruct':
for body in child.get_children():
if body.kind == CursorKind.CXX_OVERRIDE_ATTR:
has_post_construct = True
return fields, using_alias, first_mmeber_offset, has_post_construct
def check_class(node):
template_parameters = []
for child in node.get_children():
if child.kind == CursorKind.TEMPLATE_TYPE_PARAMETER:
template_parameters.append((child.spelling, child.extent))
return node.spelling, template_parameters
def generate_template_header(ts):
if not ts:
return ''
ps = []
for t in ts:
if t.startswith('typename'):
ps.append(t)
else:
ps.append('typename %s' % t)
return 'template<{ps}>'.format(ps=', '.join(ps))
def generate_template_type(name, ts):
if not ts:
return name
return '{name}<{ps}>'.format(name=name, ps=', '.join(ts))
def parse_compilation_database(build_directory):
# check if the file exists first to suppress the clang warning.
compile_commands_json = os.path.join(build_directory, 'compile_commands.json')
if not os.path.isfile(compile_commands_json) or not os.access(compile_commands_json, os.R_OK):
return None
try:
return clang.cindex.CompilationDatabase.fromDirectory(build_directory)
except clang.cindex.CompilationDatabaseError:
return None
def validate_and_strip_input_file(source):
if not os.path.isfile(source) or not os.access(source, os.R_OK):
return None, 'File not exists'
with open(source, 'r') as fp:
content = fp.read().splitlines(keepends=False)
# TODO: valid and remove the first line
return '\n'.join(content), ''
def strip_flags(flags):
stripped_flags = []
for flag in flags:
if flag == '-c' or flag.startswith('-O') or flags == '-Werror':
continue
stripped_flags.append(flag)
return stripped_flags
def resolve_include(inc_node, system_includes, includes):
inc_name = inc_node.spelling
if not inc_name.endswith('.vineyard.h'): # os.path.splitext won't work
return None
mod_name = inc_name[:-len(".vineyard.h")] + ".vineyard-mod"
for inc in itertools.chain(system_includes, includes):
target = os.path.join(inc, mod_name)
if os.path.isfile(target) and os.access(target, os.R_OK):
return os.path.join(inc, inc_name)
return None
def parse_module(root_directory,
source,
target=None,
system_includes=None,
includes=None,
extra_flags=None,
build_directory=None,
delayed=True,
parse_only=True,
verbose=False):
# prepare inputs
content, message = validate_and_strip_input_file(source)
if content is None:
raise RuntimeError('Invalid input: %s' % message)
unsaved_files = [(source, content)]
# NB:
# `-nostdinc` and `-nostdinc++`: to avoid libclang find incorrect gcc installation.
# `-Wunused-private-field`: we skip parsing the function bodies.
base_flags = [
'-x',
'c++',
'-std=c++14',
'-nostdinc',
'-nostdinc++',
'-Wno-unused-private-field',
]
# prepare flags
flags = None
compliation_db = parse_compilation_database(build_directory)
if compliation_db is not None:
commands = compliation_db.getCompileCommands(source)
if commands is not None and len(commands) > 0:
# strip flags
flags = strip_flags(list(commands[0].arguments)[1:-1])
# NB: even use compilation database we still needs to include the
# system includes, since we `-nostdinc{++}`.
if system_includes:
for inc in system_includes.split(';'):
flags.append('-isystem')
flags.append(inc)
if extra_flags:
flags.extend(extra_flags)
if flags is None:
flags = []
if system_includes:
for inc in system_includes.split(';'):
flags.append('-isystem')
flags.append(inc)
if includes:
for inc in includes.split(';'):
flags.append('-I%s' % inc)
if extra_flags:
flags.extend(extra_flags)
if delayed:
flags.append('-fdelayed-template-parsing')
else:
flags.append('-fno-delayed-template-parsing')
# parse
index = clang.cindex.Index.create()
options = ParseOption.Default \
| ParseOption.DetailedPreprocessingRecord \
| ParseOption.SkipFunctionBodies \
| ParseOption.IncludeAttributedTypes \
| ParseOption.KeepGoing
if parse_only:
options |= ParseOption.SingleFileParse
parse_flags = base_flags + flags
unit = index.parse(source, unsaved_files=unsaved_files, args=parse_flags, options=options)
if not parse_only:
for diag in unit.diagnostics:
if verbose or (diag.location and diag.location.file and \
diag.location.file.name == source):
logging.warning(diag)
# traverse
modules = filter_the_module(unit.cursor, source)
to_reflect, to_include = [], []
for module in modules:
if verbose:
dump_ast(module, 0, set())
traverse(module, to_reflect, to_include)
return content, to_reflect, to_include, parse_flags
def parse_deps(root_directory,
source,
target=None,
system_includes=None,
includes=None,
extra_flags=None,
build_directory=None,
delayed=True,
verbose=False):
_, _, to_include, parse_flags = parse_module(root_directory=root_directory,
source=source,
target=target,
system_includes=system_includes,
includes=includes,
extra_flags=extra_flags,
build_directory=build_directory,
delayed=delayed,
parse_only=True,
verbose=verbose)
logging.info('Generating for %s ...', os.path.basename(source))
# analyze include directories from parse flags
i, include_in_flags = 0, []
while i < len(parse_flags):
if parse_flags[i].startswith('-I'):
if parse_flags[i][2:]:
include_in_flags.append(parse_flags[i][2:])
else:
include_in_flags.append(parse_flags[i + 1])
i += 1
if parse_flags[i] == '-isystem':
include_in_flags.append(parse_flags[i + 1])
i += 1
i += 1
for inc in to_include:
header = resolve_include(inc, [], include_in_flags)
if header is not None:
print('Depends:%s' % header.strip())
| [
"itertools.chain",
"parsec.string",
"os.access",
"os.path.join",
"parsec.regex",
"logging.warning",
"copy.copy",
"os.path.isfile",
"os.path.basename",
"parsec.spaces"
] | [((4501, 4516), 'parsec.spaces', 'parsec.spaces', ([], {}), '()\n', (4514, 4516), False, 'import parsec\n'), ((4595, 4610), 'parsec.spaces', 'parsec.spaces', ([], {}), '()\n', (4608, 4610), False, 'import parsec\n'), ((11773, 11827), 'os.path.join', 'os.path.join', (['build_directory', '"""compile_commands.json"""'], {}), "(build_directory, 'compile_commands.json')\n", (11785, 11827), False, 'import os\n'), ((12921, 12963), 'itertools.chain', 'itertools.chain', (['system_includes', 'includes'], {}), '(system_includes, includes)\n', (12936, 12963), False, 'import itertools\n'), ((4399, 4414), 'parsec.spaces', 'parsec.spaces', ([], {}), '()\n', (4412, 4414), False, 'import parsec\n'), ((4418, 4491), 'parsec.regex', 'parsec.regex', (['"""[_a-zA-Z][_a-zA-Z0-9<>, ]*(::[_a-zA-Z][_a-zA-Z0-9<>, ]*)*"""'], {}), "('[_a-zA-Z][_a-zA-Z0-9<>, ]*(::[_a-zA-Z][_a-zA-Z0-9<>, ]*)*')\n", (4430, 4491), False, 'import parsec\n'), ((4533, 4548), 'parsec.spaces', 'parsec.spaces', ([], {}), '()\n', (4546, 4548), False, 'import parsec\n'), ((4625, 4640), 'parsec.spaces', 'parsec.spaces', ([], {}), '()\n', (4638, 4640), False, 'import parsec\n'), ((12982, 13009), 'os.path.join', 'os.path.join', (['inc', 'mod_name'], {}), '(inc, mod_name)\n', (12994, 13009), False, 'import os\n'), ((17265, 17289), 'os.path.basename', 'os.path.basename', (['source'], {}), '(source)\n', (17281, 17289), False, 'import os\n'), ((4568, 4586), 'parsec.string', 'parsec.string', (['"""*"""'], {}), "('*')\n", (4581, 4586), False, 'import parsec\n'), ((4765, 4780), 'parsec.spaces', 'parsec.spaces', ([], {}), '()\n', (4778, 4780), False, 'import parsec\n'), ((4918, 4936), 'parsec.string', 'parsec.string', (['"""]"""'], {}), "(']')\n", (4931, 4936), False, 'import parsec\n'), ((5070, 5089), 'parsec.string', 'parsec.string', (['"""]]"""'], {}), "(']]')\n", (5083, 5089), False, 'import parsec\n'), ((5225, 5243), 'parsec.string', 'parsec.string', (['"""}"""'], {}), "('}')\n", (5238, 5243), False, 'import parsec\n'), ((5423, 5441), 'parsec.string', 'parsec.string', (['"""}"""'], {}), "('}')\n", (5436, 5441), False, 'import parsec\n'), ((11839, 11876), 'os.path.isfile', 'os.path.isfile', (['compile_commands_json'], {}), '(compile_commands_json)\n', (11853, 11876), False, 'import os\n'), ((11884, 11925), 'os.access', 'os.access', (['compile_commands_json', 'os.R_OK'], {}), '(compile_commands_json, os.R_OK)\n', (11893, 11925), False, 'import os\n'), ((12161, 12183), 'os.path.isfile', 'os.path.isfile', (['source'], {}), '(source)\n', (12175, 12183), False, 'import os\n'), ((12191, 12217), 'os.access', 'os.access', (['source', 'os.R_OK'], {}), '(source, os.R_OK)\n', (12200, 12217), False, 'import os\n'), ((13021, 13043), 'os.path.isfile', 'os.path.isfile', (['target'], {}), '(target)\n', (13035, 13043), False, 'import os\n'), ((13048, 13074), 'os.access', 'os.access', (['target', 'os.R_OK'], {}), '(target, os.R_OK)\n', (13057, 13074), False, 'import os\n'), ((13095, 13122), 'os.path.join', 'os.path.join', (['inc', 'inc_name'], {}), '(inc, inc_name)\n', (13107, 13122), False, 'import os\n'), ((4698, 4713), 'parsec.spaces', 'parsec.spaces', ([], {}), '()\n', (4711, 4713), False, 'import parsec\n'), ((4849, 4867), 'parsec.string', 'parsec.string', (['"""["""'], {}), "('[')\n", (4862, 4867), False, 'import parsec\n'), ((5010, 5029), 'parsec.string', 'parsec.string', (['"""[["""'], {}), "('[[')\n", (5023, 5029), False, 'import parsec\n'), ((5157, 5175), 'parsec.string', 'parsec.string', (['"""{"""'], {}), "('{')\n", (5170, 5175), False, 'import parsec\n'), ((5310, 5328), 'parsec.string', 'parsec.string', (['"""{"""'], {}), "('{')\n", (5323, 5328), False, 'import parsec\n'), ((9659, 9680), 'copy.copy', 'copy.copy', (['namespaces'], {}), '(namespaces)\n', (9668, 9680), False, 'import copy\n'), ((15910, 15931), 'logging.warning', 'logging.warning', (['diag'], {}), '(diag)\n', (15925, 15931), False, 'import logging\n'), ((5380, 5398), 'parsec.string', 'parsec.string', (['""":"""'], {}), "(':')\n", (5393, 5398), False, 'import parsec\n')] |
from weatbag import words
from weatbag.utils import transfer
class Tile:
def __init__(self):
self.contents = {'berries': 1}
def describe(self):
print("The path crosses a small stream.")
if self.contents['berries']:
print("There are some berries hanging from a tree. "
"They look edible. "
"If you're hungry enough, anyway.")
def action(self, player, do):
if (do[0] in words.take) and ('berries' in do):
self.take_berries(player)
else:
print("Sorry, I don't understand.")
def take_berries(self, player):
if transfer('berries', self.contents, player.inventory):
print("Reaching up, you pick the berries.")
else:
print("There are no berries here.")
def leave(self,player,direction):
if direction == 's':
print("You decide the path is too boring for you,\n"
"so you follow the stream south, looking for adventure...")
input()
return True
else:
return True
| [
"weatbag.utils.transfer"
] | [((654, 706), 'weatbag.utils.transfer', 'transfer', (['"""berries"""', 'self.contents', 'player.inventory'], {}), "('berries', self.contents, player.inventory)\n", (662, 706), False, 'from weatbag.utils import transfer\n')] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from decimal import Decimal
import autoslug.fields
class Migration(migrations.Migration):
dependencies = [
('crm', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Project',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, auto_created=True, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('slug', autoslug.fields.AutoSlugField(populate_from='name', editable=False)),
('base_rate', models.DecimalField(verbose_name='hourly base rate', decimal_places=2, max_digits=8)),
('flat_fee', models.DecimalField(verbose_name='flat fee', decimal_places=2, max_digits=10)),
('tax_rate', models.DecimalField(choices=[(Decimal('0.06'), 'low'), (Decimal('0.21'), 'high')], verbose_name='tax rate', decimal_places=2, max_digits=4, default=Decimal('0.21'))),
('client', models.ForeignKey(to='crm.Client', on_delete=models.PROTECT)),
],
),
migrations.AlterField(
model_name='contact',
name='city',
field=models.CharField(verbose_name='city', max_length=255, blank=True),
),
migrations.AlterField(
model_name='contact',
name='postal_code',
field=models.CharField(verbose_name='postal code', max_length=10, blank=True),
),
migrations.AlterUniqueTogether(
name='project',
unique_together=set([('client', 'slug')]),
),
]
| [
"django.db.models.ForeignKey",
"django.db.models.AutoField",
"django.db.models.DecimalField",
"django.db.models.CharField",
"decimal.Decimal"
] | [((1275, 1340), 'django.db.models.CharField', 'models.CharField', ([], {'verbose_name': '"""city"""', 'max_length': '(255)', 'blank': '(True)'}), "(verbose_name='city', max_length=255, blank=True)\n", (1291, 1340), False, 'from django.db import models, migrations\n'), ((1468, 1539), 'django.db.models.CharField', 'models.CharField', ([], {'verbose_name': '"""postal code"""', 'max_length': '(10)', 'blank': '(True)'}), "(verbose_name='postal code', max_length=10, blank=True)\n", (1484, 1539), False, 'from django.db import models, migrations\n'), ((383, 476), 'django.db.models.AutoField', 'models.AutoField', ([], {'primary_key': '(True)', 'serialize': '(False)', 'auto_created': '(True)', 'verbose_name': '"""ID"""'}), "(primary_key=True, serialize=False, auto_created=True,\n verbose_name='ID')\n", (399, 476), False, 'from django.db import models, migrations\n'), ((500, 531), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (516, 531), False, 'from django.db import models, migrations\n'), ((659, 747), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'verbose_name': '"""hourly base rate"""', 'decimal_places': '(2)', 'max_digits': '(8)'}), "(verbose_name='hourly base rate', decimal_places=2,\n max_digits=8)\n", (678, 747), False, 'from django.db import models, migrations\n'), ((775, 852), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'verbose_name': '"""flat fee"""', 'decimal_places': '(2)', 'max_digits': '(10)'}), "(verbose_name='flat fee', decimal_places=2, max_digits=10)\n", (794, 852), False, 'from django.db import models, migrations\n'), ((1078, 1138), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'to': '"""crm.Client"""', 'on_delete': 'models.PROTECT'}), "(to='crm.Client', on_delete=models.PROTECT)\n", (1095, 1138), False, 'from django.db import models, migrations\n'), ((1032, 1047), 'decimal.Decimal', 'Decimal', (['"""0.21"""'], {}), "('0.21')\n", (1039, 1047), False, 'from decimal import Decimal\n'), ((914, 929), 'decimal.Decimal', 'Decimal', (['"""0.06"""'], {}), "('0.06')\n", (921, 929), False, 'from decimal import Decimal\n'), ((940, 955), 'decimal.Decimal', 'Decimal', (['"""0.21"""'], {}), "('0.21')\n", (947, 955), False, 'from decimal import Decimal\n')] |
# -*- coding:utf-8 -*-
from src.Update.Conf.config import *
from src.Update.SystemTools.ConfFileRead import configFileRead
class JudgeNeedUpdate():
def __init__(self):
try:
self.configFileReadTools = configFileRead.ConfigFileRead()
self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.s.connect((SERVER_IP, SERVER_MES_PORT))
print(SERVER_IP)
except socket.error as msg:
# 打开错误日志文件
wrongFile = open('data/wrongMessage.dat', 'a+')
# 获取当前时间
currentTime = str(datetime.datetime.strptime(time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime()),
'%Y-%m-%d-%H-%M-%S'))
# 生成报错的错误信息
wrongMessage = {
'|currentTime': currentTime,
'|file': 'VersionControlSystem-GetNewFile-judgeNeedUpdate',
'|wrongMessage': msg
}
# 存入文件
wrongFile.write(str(wrongMessage))
# 增加换行符
wrongFile.write('\n')
wrongFile.close()
def judge(self):
# return True
version = self.configFileReadTools.readFile('VERSION', 'version')
print(str(version))
# 请求码为100时 返回最新版本号
code = '100'.encode('utf-8')
self.s.send(code)
returnData = self.s.recv(1024)
print(returnData)
if version == returnData:
return False, returnData
else:
return True, returnData
pass
| [
"src.Update.SystemTools.ConfFileRead.configFileRead.ConfigFileRead"
] | [((226, 257), 'src.Update.SystemTools.ConfFileRead.configFileRead.ConfigFileRead', 'configFileRead.ConfigFileRead', ([], {}), '()\n', (255, 257), False, 'from src.Update.SystemTools.ConfFileRead import configFileRead\n')] |
from pprint import pprint
import pymongo
client = pymongo.MongoClient('localhost', 27017)
db = client.test
posts = db.get_collection('restaurants')
# 1
print('COUNT')
print(posts.count_documents({}))
print('-----------------------------------------------------')
# 2
print('Print first document')
pprint(posts.find_one())
print('-----------------------------------------------------')
# 3
print("Select 3 places from 'borough': 'Bronx'")
print(posts.count_documents({'borough': 'Bronx'}))
for p in posts.find({'borough': 'Bronx'}, limit=3):
pprint(p)
print('-----------------------------------------------------')
# 4
print('List of distinct values for key among all documents in this collection.')
pprint(posts.distinct('cuisine'))
print('-----------------------------------------------------')
# 5
print('Count all cuisines')
cuisine = posts.distinct('cuisine')
cuisine_count = {
cu: posts.count_documents({'cuisine': cu})
for cu in cuisine
}
pprint(cuisine_count)
| [
"pymongo.MongoClient",
"pprint.pprint"
] | [((52, 91), 'pymongo.MongoClient', 'pymongo.MongoClient', (['"""localhost"""', '(27017)'], {}), "('localhost', 27017)\n", (71, 91), False, 'import pymongo\n'), ((966, 987), 'pprint.pprint', 'pprint', (['cuisine_count'], {}), '(cuisine_count)\n', (972, 987), False, 'from pprint import pprint\n'), ((552, 561), 'pprint.pprint', 'pprint', (['p'], {}), '(p)\n', (558, 561), False, 'from pprint import pprint\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from conans import ConanFile, tools
from os.path import join, dirname, realpath
class LibTensorflowConan(ConanFile):
name = "libtensorflow"
version = "1.13.1"
settings = "os", "arch"
options = {"gpu": [True, False]}
default_options = {"gpu": False}
topics = ("conan", "tensorflow", "libtensorflow")
homepage = "https://github.com/tensorflow/tensorflow"
url = "http://github.com/hardsetting/conan-libtensorflow"
license = "Apache License 2.0"
description = "Tensorflow C API library."
exports = ["LICENSE.md"]
def configure(self):
if self.settings.os != "Linux" and self.settings.os != "Macos":
raise ConanInvalidConfiguration("This library is only supported on Linux and Macos")
def triplet_name(self):
if self.settings.os == "Linux":
osname = "linux"
elif self.settings.os == "Macos":
osname = "darwin"
gpuname = "gpu" if self.options.gpu else "cpu"
return "libtensorflow-%s-%s-%s-%s" % (gpuname, osname, str(self.settings.arch), str(self.version))
def package(self):
if 'TF_ROOT' in os.environ:
tensorflow_location = os.environ['TF_ROOT']
elif 'CONAN_USER_HOME' in os.environ:
# we pass this variable through docker
tensorflow_location = os.environ['CONAN_USER_HOME'] + "/Develop/jetson/tensorflow/"
else:
tensorflow_location = os.environ['HOME'] + "/Develop/jetson/tensorflow/"
# tensorflow_location = dirname(realpath(__file__))
# raise RuntimeError('Please specifiy TF_ROOT in your environment.')
prefix = join(tensorflow_location, self.triplet_name())
print('Prefix: ', prefix)
# copy the non-symlink versions first to avoid conan error when copying symlinks first
self.copy(pattern="*.so."+str(self.version), dst="lib", src=prefix+"/lib", symlinks=True)
self.copy(pattern="*.so.1", dst="lib", src=prefix+"/lib", symlinks=True)
self.copy(pattern="*.so", dst="lib", src=prefix+"/lib", symlinks=True)
self.copy(pattern="*", dst="include", src=prefix+"/include", symlinks=True)
# self.copy("lib/*.so")
# self.copy("include/*")
def package_info(self):
self.cpp_info.libs = ['tensorflow', 'tensorflow_framework']
self.cpp_info.includedirs.append(os.path.join("include"))
| [
"os.path.join"
] | [((2432, 2455), 'os.path.join', 'os.path.join', (['"""include"""'], {}), "('include')\n", (2444, 2455), False, 'import os\n')] |
# -*- coding: utf-8 -*-
from apscheduler.schedulers.background import BackgroundScheduler
import sys
from modules.check import new_clock, second_clock
from config import client, Telegram_user_id,aria2
from pyrogram.handlers import MessageHandler,CallbackQueryHandler
from pyrogram import filters
from modules.pixiv import start_download_pixiv,start_download_id,start_download_pixivtg,start_download_pixivphoto,start_download_pixivtele
from modules.control import send_telegram_file,start_http_download,start_download,start_http_downloadtg,check_upload
from modules.call import start_pause,start_remove,start_Resume,start_benzi_down,start_download_video
from modules.moretg import get_telegram_file,get_file_id,sendfile_by_id
from modules.picacg import seach_main
from modules.rclone import start_rclonecopy,start_rclonelsd,start_rclonels,start_rclonecopyurl
from modules.video import start_get_video_info
import hashlib
import os
#import md5 #Python2里的引用
import socket
# 获取本机计算机名称
hostname = socket.gethostname()
# 获取本机ip
ip = socket.gethostbyname(hostname)
print(ip)
async def chexk_group(_, client, query):
print(query)
try:
info=await client.get_chat_member(chat_id=int(Telegram_user_id),user_id=query.from_user.id)
print(info)
sys.stdout.flush()
return True
except:
return False
async def test(client, message):
print(client)
print(message)
message.reply_text(message.text)
await client.send_message(chat_id=int(Telegram_user_id), text="test")
def start_bot():
#scheduler = BlockingScheduler()
scheduler = BackgroundScheduler()
scheduler.add_job(new_clock, "interval", seconds=60)
scheduler.add_job(second_clock, "interval", seconds=60)
scheduler.start()
print("开启监控")
sys.stdout.flush()
print("开始bot")
print(Telegram_user_id)
sys.stdout.flush()
aria2.listen_to_notifications(on_download_complete=check_upload, threaded=True)
start_message_handler = MessageHandler(
test,
#filters=filters.command("start") & filters.user(int(Telegram_user_id))
filters=filters.command("start") & filters.create(chexk_group)
)
pixivuser_message_handler = MessageHandler(
start_download_pixiv,
filters=filters.command("pixivuser") & filters.user(int(Telegram_user_id))
)
pixivid_message_handler = MessageHandler(
start_download_id,
filters=filters.command("pixivpid") & filters.user(int(Telegram_user_id))
)
magfile_message_handler = MessageHandler(
send_telegram_file,
filters=filters.command("magfile") & filters.user(int(Telegram_user_id))
)
http_download_message_handler = MessageHandler(
start_http_download,
filters=filters.command("mirror") & filters.user(int(Telegram_user_id))
)
magnet_download_message_handler = MessageHandler(
start_download,
filters=filters.command("magnet") & filters.user(int(Telegram_user_id))
)
telegram_file_message_handler = MessageHandler(
get_telegram_file,
filters=filters.command("downtgfile") & filters.user(int(Telegram_user_id))
)
seach_main_file_message_handler = MessageHandler(
seach_main,
filters=filters.command("search") & filters.user(int(Telegram_user_id))
)
start_download_idtg_message_handler = MessageHandler(
start_download_pixivtg,
filters=filters.command("pixivusertg") & filters.user(int(Telegram_user_id))
)
start_http_downloadtg_message_handler = MessageHandler(
start_http_downloadtg,
filters=filters.command("mirrortg") & filters.user(int(Telegram_user_id))
)
start_rclonecopy_message_handler = MessageHandler(
start_rclonecopy,
filters=filters.command("rclonecopy") & filters.user(int(Telegram_user_id))
)
start_rclonelsd_message_handler = MessageHandler(
start_rclonelsd,
filters=filters.command("rclonelsd") & filters.user(int(Telegram_user_id))
)
start_rclone_message_handler = MessageHandler(
start_rclonels,
filters=filters.command("rclone") & filters.user(int(Telegram_user_id))
)
start_rclonecopyurl_message_handler = MessageHandler(
start_rclonecopyurl,
filters=filters.command("rclonecopyurl") & filters.user(int(Telegram_user_id))
)
get_file_id_message_handler = MessageHandler(
get_file_id,
filters=filters.command("getfileid") & filters.user(int(Telegram_user_id))
)
sendfile_by_id_message_handler = MessageHandler(
sendfile_by_id,
filters=filters.command("getfile") & filters.user(int(Telegram_user_id))
)
start_download_pixivphoto_message_handler = MessageHandler(
start_download_pixivphoto,
filters=filters.command("pixivuserphoto") & filters.user(int(Telegram_user_id))
)
start_download_pixivtele_message_handler = MessageHandler(
start_download_pixivtele,
filters=filters.command("pixivusertele") & filters.user(int(Telegram_user_id))
)
start_get_video_info_message_handler = MessageHandler(
start_get_video_info,
filters=filters.command("video") & filters.user(int(Telegram_user_id))
)
start_Resume_handler = CallbackQueryHandler(
callback=start_Resume,
filters=filters.create(lambda _, __, query: "Resume" in query.data )
)
start_pause_handler = CallbackQueryHandler(
callback=start_pause,
filters=filters.create(lambda _, __, query: "Pause" in query.data )
)
start_remove_handler = CallbackQueryHandler(
callback=start_remove,
filters=filters.create(lambda _, __, query: "Remove" in query.data )
)
start_benzi_down_handler = CallbackQueryHandler(
callback=start_benzi_down,
filters=filters.create(lambda _, __, query: "down" in query.data )
)
start_download_video_handler = CallbackQueryHandler(
callback=start_download_video,
filters=filters.create(lambda _, __, query: "video" in query.data )
)
client.add_handler(start_download_video_handler, group=0)
client.add_handler(start_Resume_handler, group=0)
client.add_handler(start_pause_handler, group=0)
client.add_handler(start_remove_handler, group=0)
client.add_handler(start_benzi_down_handler, group=0)
client.add_handler(start_message_handler,group=1)
client.add_handler(pixivuser_message_handler,group=1)
client.add_handler(pixivid_message_handler,group=1)
client.add_handler(magfile_message_handler,group=3)
client.add_handler(http_download_message_handler,group=1)
client.add_handler(magnet_download_message_handler, group=1)
client.add_handler(telegram_file_message_handler, group=1)
client.add_handler(seach_main_file_message_handler, group=1)
client.add_handler(start_download_idtg_message_handler, group=1)
client.add_handler(start_http_downloadtg_message_handler, group=1)
client.add_handler(start_rclonecopy_message_handler , group=1)
client.add_handler(start_rclonelsd_message_handler, group=1)
client.add_handler(start_rclone_message_handler, group=1)
client.add_handler(start_rclonecopyurl_message_handler, group=1)
client.add_handler(get_file_id_message_handler, group=1)
client.add_handler(sendfile_by_id_message_handler, group=1)
client.add_handler(start_download_pixivphoto_message_handler, group=1)
client.add_handler(start_download_pixivtele_message_handler, group=1)
client.add_handler(start_get_video_info_message_handler, group=1)
client.run()
if __name__ == '__main__':
start_bot()
| [
"config.client.run",
"socket.gethostbyname",
"config.aria2.listen_to_notifications",
"pyrogram.filters.command",
"config.client.add_handler",
"pyrogram.filters.create",
"sys.stdout.flush",
"socket.gethostname",
"apscheduler.schedulers.background.BackgroundScheduler"
] | [((993, 1013), 'socket.gethostname', 'socket.gethostname', ([], {}), '()\n', (1011, 1013), False, 'import socket\n'), ((1028, 1058), 'socket.gethostbyname', 'socket.gethostbyname', (['hostname'], {}), '(hostname)\n', (1048, 1058), False, 'import socket\n'), ((1595, 1616), 'apscheduler.schedulers.background.BackgroundScheduler', 'BackgroundScheduler', ([], {}), '()\n', (1614, 1616), False, 'from apscheduler.schedulers.background import BackgroundScheduler\n'), ((1780, 1798), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (1796, 1798), False, 'import sys\n'), ((1850, 1868), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (1866, 1868), False, 'import sys\n'), ((1873, 1952), 'config.aria2.listen_to_notifications', 'aria2.listen_to_notifications', ([], {'on_download_complete': 'check_upload', 'threaded': '(True)'}), '(on_download_complete=check_upload, threaded=True)\n', (1902, 1952), False, 'from config import client, Telegram_user_id, aria2\n'), ((6119, 6176), 'config.client.add_handler', 'client.add_handler', (['start_download_video_handler'], {'group': '(0)'}), '(start_download_video_handler, group=0)\n', (6137, 6176), False, 'from config import client, Telegram_user_id, aria2\n'), ((6181, 6230), 'config.client.add_handler', 'client.add_handler', (['start_Resume_handler'], {'group': '(0)'}), '(start_Resume_handler, group=0)\n', (6199, 6230), False, 'from config import client, Telegram_user_id, aria2\n'), ((6235, 6283), 'config.client.add_handler', 'client.add_handler', (['start_pause_handler'], {'group': '(0)'}), '(start_pause_handler, group=0)\n', (6253, 6283), False, 'from config import client, Telegram_user_id, aria2\n'), ((6288, 6337), 'config.client.add_handler', 'client.add_handler', (['start_remove_handler'], {'group': '(0)'}), '(start_remove_handler, group=0)\n', (6306, 6337), False, 'from config import client, Telegram_user_id, aria2\n'), ((6342, 6395), 'config.client.add_handler', 'client.add_handler', (['start_benzi_down_handler'], {'group': '(0)'}), '(start_benzi_down_handler, group=0)\n', (6360, 6395), False, 'from config import client, Telegram_user_id, aria2\n'), ((6401, 6451), 'config.client.add_handler', 'client.add_handler', (['start_message_handler'], {'group': '(1)'}), '(start_message_handler, group=1)\n', (6419, 6451), False, 'from config import client, Telegram_user_id, aria2\n'), ((6455, 6509), 'config.client.add_handler', 'client.add_handler', (['pixivuser_message_handler'], {'group': '(1)'}), '(pixivuser_message_handler, group=1)\n', (6473, 6509), False, 'from config import client, Telegram_user_id, aria2\n'), ((6513, 6565), 'config.client.add_handler', 'client.add_handler', (['pixivid_message_handler'], {'group': '(1)'}), '(pixivid_message_handler, group=1)\n', (6531, 6565), False, 'from config import client, Telegram_user_id, aria2\n'), ((6569, 6621), 'config.client.add_handler', 'client.add_handler', (['magfile_message_handler'], {'group': '(3)'}), '(magfile_message_handler, group=3)\n', (6587, 6621), False, 'from config import client, Telegram_user_id, aria2\n'), ((6626, 6684), 'config.client.add_handler', 'client.add_handler', (['http_download_message_handler'], {'group': '(1)'}), '(http_download_message_handler, group=1)\n', (6644, 6684), False, 'from config import client, Telegram_user_id, aria2\n'), ((6688, 6748), 'config.client.add_handler', 'client.add_handler', (['magnet_download_message_handler'], {'group': '(1)'}), '(magnet_download_message_handler, group=1)\n', (6706, 6748), False, 'from config import client, Telegram_user_id, aria2\n'), ((6753, 6811), 'config.client.add_handler', 'client.add_handler', (['telegram_file_message_handler'], {'group': '(1)'}), '(telegram_file_message_handler, group=1)\n', (6771, 6811), False, 'from config import client, Telegram_user_id, aria2\n'), ((6816, 6876), 'config.client.add_handler', 'client.add_handler', (['seach_main_file_message_handler'], {'group': '(1)'}), '(seach_main_file_message_handler, group=1)\n', (6834, 6876), False, 'from config import client, Telegram_user_id, aria2\n'), ((6881, 6945), 'config.client.add_handler', 'client.add_handler', (['start_download_idtg_message_handler'], {'group': '(1)'}), '(start_download_idtg_message_handler, group=1)\n', (6899, 6945), False, 'from config import client, Telegram_user_id, aria2\n'), ((6950, 7016), 'config.client.add_handler', 'client.add_handler', (['start_http_downloadtg_message_handler'], {'group': '(1)'}), '(start_http_downloadtg_message_handler, group=1)\n', (6968, 7016), False, 'from config import client, Telegram_user_id, aria2\n'), ((7021, 7082), 'config.client.add_handler', 'client.add_handler', (['start_rclonecopy_message_handler'], {'group': '(1)'}), '(start_rclonecopy_message_handler, group=1)\n', (7039, 7082), False, 'from config import client, Telegram_user_id, aria2\n'), ((7088, 7148), 'config.client.add_handler', 'client.add_handler', (['start_rclonelsd_message_handler'], {'group': '(1)'}), '(start_rclonelsd_message_handler, group=1)\n', (7106, 7148), False, 'from config import client, Telegram_user_id, aria2\n'), ((7153, 7210), 'config.client.add_handler', 'client.add_handler', (['start_rclone_message_handler'], {'group': '(1)'}), '(start_rclone_message_handler, group=1)\n', (7171, 7210), False, 'from config import client, Telegram_user_id, aria2\n'), ((7215, 7279), 'config.client.add_handler', 'client.add_handler', (['start_rclonecopyurl_message_handler'], {'group': '(1)'}), '(start_rclonecopyurl_message_handler, group=1)\n', (7233, 7279), False, 'from config import client, Telegram_user_id, aria2\n'), ((7284, 7340), 'config.client.add_handler', 'client.add_handler', (['get_file_id_message_handler'], {'group': '(1)'}), '(get_file_id_message_handler, group=1)\n', (7302, 7340), False, 'from config import client, Telegram_user_id, aria2\n'), ((7345, 7404), 'config.client.add_handler', 'client.add_handler', (['sendfile_by_id_message_handler'], {'group': '(1)'}), '(sendfile_by_id_message_handler, group=1)\n', (7363, 7404), False, 'from config import client, Telegram_user_id, aria2\n'), ((7409, 7479), 'config.client.add_handler', 'client.add_handler', (['start_download_pixivphoto_message_handler'], {'group': '(1)'}), '(start_download_pixivphoto_message_handler, group=1)\n', (7427, 7479), False, 'from config import client, Telegram_user_id, aria2\n'), ((7484, 7553), 'config.client.add_handler', 'client.add_handler', (['start_download_pixivtele_message_handler'], {'group': '(1)'}), '(start_download_pixivtele_message_handler, group=1)\n', (7502, 7553), False, 'from config import client, Telegram_user_id, aria2\n'), ((7558, 7623), 'config.client.add_handler', 'client.add_handler', (['start_get_video_info_message_handler'], {'group': '(1)'}), '(start_get_video_info_message_handler, group=1)\n', (7576, 7623), False, 'from config import client, Telegram_user_id, aria2\n'), ((7628, 7640), 'config.client.run', 'client.run', ([], {}), '()\n', (7638, 7640), False, 'from config import client, Telegram_user_id, aria2\n'), ((1267, 1285), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (1283, 1285), False, 'import sys\n'), ((5355, 5414), 'pyrogram.filters.create', 'filters.create', (["(lambda _, __, query: 'Resume' in query.data)"], {}), "(lambda _, __, query: 'Resume' in query.data)\n", (5369, 5414), False, 'from pyrogram import filters\n'), ((5521, 5579), 'pyrogram.filters.create', 'filters.create', (["(lambda _, __, query: 'Pause' in query.data)"], {}), "(lambda _, __, query: 'Pause' in query.data)\n", (5535, 5579), False, 'from pyrogram import filters\n'), ((5687, 5746), 'pyrogram.filters.create', 'filters.create', (["(lambda _, __, query: 'Remove' in query.data)"], {}), "(lambda _, __, query: 'Remove' in query.data)\n", (5701, 5746), False, 'from pyrogram import filters\n'), ((5863, 5920), 'pyrogram.filters.create', 'filters.create', (["(lambda _, __, query: 'down' in query.data)"], {}), "(lambda _, __, query: 'down' in query.data)\n", (5877, 5920), False, 'from pyrogram import filters\n'), ((6044, 6102), 'pyrogram.filters.create', 'filters.create', (["(lambda _, __, query: 'video' in query.data)"], {}), "(lambda _, __, query: 'video' in query.data)\n", (6058, 6102), False, 'from pyrogram import filters\n'), ((2110, 2134), 'pyrogram.filters.command', 'filters.command', (['"""start"""'], {}), "('start')\n", (2125, 2134), False, 'from pyrogram import filters\n'), ((2137, 2164), 'pyrogram.filters.create', 'filters.create', (['chexk_group'], {}), '(chexk_group)\n', (2151, 2164), False, 'from pyrogram import filters\n'), ((2266, 2294), 'pyrogram.filters.command', 'filters.command', (['"""pixivuser"""'], {}), "('pixivuser')\n", (2281, 2294), False, 'from pyrogram import filters\n'), ((2430, 2457), 'pyrogram.filters.command', 'filters.command', (['"""pixivpid"""'], {}), "('pixivpid')\n", (2445, 2457), False, 'from pyrogram import filters\n'), ((2593, 2619), 'pyrogram.filters.command', 'filters.command', (['"""magfile"""'], {}), "('magfile')\n", (2608, 2619), False, 'from pyrogram import filters\n'), ((2764, 2789), 'pyrogram.filters.command', 'filters.command', (['"""mirror"""'], {}), "('mirror')\n", (2779, 2789), False, 'from pyrogram import filters\n'), ((2928, 2953), 'pyrogram.filters.command', 'filters.command', (['"""magnet"""'], {}), "('magnet')\n", (2943, 2953), False, 'from pyrogram import filters\n'), ((3094, 3123), 'pyrogram.filters.command', 'filters.command', (['"""downtgfile"""'], {}), "('downtgfile')\n", (3109, 3123), False, 'from pyrogram import filters\n'), ((3258, 3283), 'pyrogram.filters.command', 'filters.command', (['"""search"""'], {}), "('search')\n", (3273, 3283), False, 'from pyrogram import filters\n'), ((3435, 3465), 'pyrogram.filters.command', 'filters.command', (['"""pixivusertg"""'], {}), "('pixivusertg')\n", (3450, 3465), False, 'from pyrogram import filters\n'), ((3618, 3645), 'pyrogram.filters.command', 'filters.command', (['"""mirrortg"""'], {}), "('mirrortg')\n", (3633, 3645), False, 'from pyrogram import filters\n'), ((3787, 3816), 'pyrogram.filters.command', 'filters.command', (['"""rclonecopy"""'], {}), "('rclonecopy')\n", (3802, 3816), False, 'from pyrogram import filters\n'), ((3957, 3985), 'pyrogram.filters.command', 'filters.command', (['"""rclonelsd"""'], {}), "('rclonelsd')\n", (3972, 3985), False, 'from pyrogram import filters\n'), ((4122, 4147), 'pyrogram.filters.command', 'filters.command', (['"""rclone"""'], {}), "('rclone')\n", (4137, 4147), False, 'from pyrogram import filters\n'), ((4296, 4328), 'pyrogram.filters.command', 'filters.command', (['"""rclonecopyurl"""'], {}), "('rclonecopyurl')\n", (4311, 4328), False, 'from pyrogram import filters\n'), ((4461, 4489), 'pyrogram.filters.command', 'filters.command', (['"""getfileid"""'], {}), "('getfileid')\n", (4476, 4489), False, 'from pyrogram import filters\n'), ((4627, 4653), 'pyrogram.filters.command', 'filters.command', (['"""getfile"""'], {}), "('getfile')\n", (4642, 4653), False, 'from pyrogram import filters\n'), ((4814, 4847), 'pyrogram.filters.command', 'filters.command', (['"""pixivuserphoto"""'], {}), "('pixivuserphoto')\n", (4829, 4847), False, 'from pyrogram import filters\n'), ((5006, 5038), 'pyrogram.filters.command', 'filters.command', (['"""pixivusertele"""'], {}), "('pixivusertele')\n", (5021, 5038), False, 'from pyrogram import filters\n'), ((5189, 5213), 'pyrogram.filters.command', 'filters.command', (['"""video"""'], {}), "('video')\n", (5204, 5213), False, 'from pyrogram import filters\n')] |
from setuptools import setup
setup(name='lynxy',
version='0.1',
description='blah-blah',
url='http://github.com/imminfo/lynxy',
author='<NAME>',
author_email='<EMAIL>',
license='Apache v2',
packages=['lynxy'],
zip_safe=False) | [
"setuptools.setup"
] | [((30, 236), 'setuptools.setup', 'setup', ([], {'name': '"""lynxy"""', 'version': '"""0.1"""', 'description': '"""blah-blah"""', 'url': '"""http://github.com/imminfo/lynxy"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'license': '"""Apache v2"""', 'packages': "['lynxy']", 'zip_safe': '(False)'}), "(name='lynxy', version='0.1', description='blah-blah', url=\n 'http://github.com/imminfo/lynxy', author='<NAME>', author_email=\n '<EMAIL>', license='Apache v2', packages=['lynxy'], zip_safe=False)\n", (35, 236), False, 'from setuptools import setup\n')] |
import json
import os
import pathlib
from urllib.request import urlopen
import pytest
from contracts_lib_py.contract_handler import ContractHandler
from contracts_lib_py.utils import get_account
from contracts_lib_py.web3_provider import Web3Provider
from nevermined_gateway.util import get_config, init_account_envvars
def get_resource_path(dir_name, file_name):
base = os.path.realpath(__file__).split(os.path.sep)[1:-1]
if dir_name:
return pathlib.Path(os.path.join(os.path.sep, *base, dir_name, file_name))
else:
return pathlib.Path(os.path.join(os.path.sep, *base, file_name))
@pytest.fixture(autouse=True)
def env_setup(monkeypatch):
"""Set test environment variables so that we can run the tests without having
to set them.
"""
provider_keyfile = pathlib.Path(__file__).parent / "resources/data/publisher_key_file.json"
rsa_priv_keyfile = pathlib.Path(__file__).parent / "resources/data/rsa_priv_key.pem"
rsa_pub_keyfile = pathlib.Path(__file__).parent / "resources/data/rsa_pub_key.pem"
monkeypatch.setenv("PROVIDER_ADDRESS", "0x00bd138abd70e2f00903268f3db08f2d25677c9e")
monkeypatch.setenv("PROVIDER_PASSWORD", "<PASSWORD>")
monkeypatch.setenv("PROVIDER_KEYFILE", provider_keyfile.as_posix())
monkeypatch.setenv("RSA_PRIVKEY_FILE", rsa_priv_keyfile.as_posix())
monkeypatch.setenv("RSA_PUBKEY_FILE", rsa_pub_keyfile.as_posix())
monkeypatch.setenv("ESTUARY_GATEWAY", "https://shuttle-4.estuary.tech")
monkeypatch.setenv("IPFS_GATEWAY", "https://dweb.link/ipfs/:cid")
@pytest.fixture
def client():
# This import is done here so that the `env_setup` fixture is called before we
# initialize the flask app.
from nevermined_gateway.run import app
client = app.test_client()
yield client
@pytest.fixture(autouse=True)
def setup_all():
config = get_config()
Web3Provider.get_web3(config.keeper_url)
ContractHandler.artifacts_path = os.path.expanduser(
'~/.nevermined/nevermined-contracts/artifacts')
init_account_envvars()
@pytest.fixture
def provider_account():
return get_account(0)
@pytest.fixture
def consumer_account():
os.environ['PARITY_ADDRESS1'] = '0x068ed00cf0441e4829d9784fcbe7b9e26d4bd8d0'
os.environ['PARITY_PASSWORD1'] = '<PASSWORD>'
os.environ['PARITY_KEYFILE1'] = 'tests/resources/data/consumer_key_file.json'
return get_account(1)
@pytest.fixture
def publisher_account():
os.environ['PARITY_ADDRESS2'] = '0xa99d43d86a0758d5632313b8fa3972b6088a21bb'
os.environ['PARITY_PASSWORD2'] = '<PASSWORD>'
os.environ['PARITY_KEYFILE2'] = 'tests/resources/data/publisher2_key_file.json'
return get_account(2)
def get_sample_ddo():
return json.loads(urlopen(
'https://raw.githubusercontent.com/nevermined-io/docs/master/docs/architecture/specs'
'/examples/access/v0.1/ddo1.json').read().decode(
'utf-8'))
| [
"nevermined_gateway.run.app.test_client",
"nevermined_gateway.util.get_config",
"pathlib.Path",
"os.path.join",
"os.path.realpath",
"nevermined_gateway.util.init_account_envvars",
"contracts_lib_py.utils.get_account",
"pytest.fixture",
"urllib.request.urlopen",
"os.path.expanduser",
"contracts_lib_py.web3_provider.Web3Provider.get_web3"
] | [((617, 645), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (631, 645), False, 'import pytest\n'), ((1802, 1830), 'pytest.fixture', 'pytest.fixture', ([], {'autouse': '(True)'}), '(autouse=True)\n', (1816, 1830), False, 'import pytest\n'), ((1764, 1781), 'nevermined_gateway.run.app.test_client', 'app.test_client', ([], {}), '()\n', (1779, 1781), False, 'from nevermined_gateway.run import app\n'), ((1861, 1873), 'nevermined_gateway.util.get_config', 'get_config', ([], {}), '()\n', (1871, 1873), False, 'from nevermined_gateway.util import get_config, init_account_envvars\n'), ((1878, 1918), 'contracts_lib_py.web3_provider.Web3Provider.get_web3', 'Web3Provider.get_web3', (['config.keeper_url'], {}), '(config.keeper_url)\n', (1899, 1918), False, 'from contracts_lib_py.web3_provider import Web3Provider\n'), ((1956, 2022), 'os.path.expanduser', 'os.path.expanduser', (['"""~/.nevermined/nevermined-contracts/artifacts"""'], {}), "('~/.nevermined/nevermined-contracts/artifacts')\n", (1974, 2022), False, 'import os\n'), ((2036, 2058), 'nevermined_gateway.util.init_account_envvars', 'init_account_envvars', ([], {}), '()\n', (2056, 2058), False, 'from nevermined_gateway.util import get_config, init_account_envvars\n'), ((2112, 2126), 'contracts_lib_py.utils.get_account', 'get_account', (['(0)'], {}), '(0)\n', (2123, 2126), False, 'from contracts_lib_py.utils import get_account\n'), ((2393, 2407), 'contracts_lib_py.utils.get_account', 'get_account', (['(1)'], {}), '(1)\n', (2404, 2407), False, 'from contracts_lib_py.utils import get_account\n'), ((2677, 2691), 'contracts_lib_py.utils.get_account', 'get_account', (['(2)'], {}), '(2)\n', (2688, 2691), False, 'from contracts_lib_py.utils import get_account\n'), ((476, 529), 'os.path.join', 'os.path.join', (['os.path.sep', '*base', 'dir_name', 'file_name'], {}), '(os.path.sep, *base, dir_name, file_name)\n', (488, 529), False, 'import os\n'), ((569, 612), 'os.path.join', 'os.path.join', (['os.path.sep', '*base', 'file_name'], {}), '(os.path.sep, *base, file_name)\n', (581, 612), False, 'import os\n'), ((804, 826), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (816, 826), False, 'import pathlib\n'), ((900, 922), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (912, 922), False, 'import pathlib\n'), ((988, 1010), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (1000, 1010), False, 'import pathlib\n'), ((379, 405), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (395, 405), False, 'import os\n'), ((2738, 2873), 'urllib.request.urlopen', 'urlopen', (['"""https://raw.githubusercontent.com/nevermined-io/docs/master/docs/architecture/specs/examples/access/v0.1/ddo1.json"""'], {}), "(\n 'https://raw.githubusercontent.com/nevermined-io/docs/master/docs/architecture/specs/examples/access/v0.1/ddo1.json'\n )\n", (2745, 2873), False, 'from urllib.request import urlopen\n')] |
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from OpenGL.GLUT import *
from OpenGL.GLU import *
from OpenGL.GL import *
from math import sqrt
from compas.geometry import centroid_points
from compas_viewers.viewer import Viewer
from compas_viewers.core.drawing import draw_points
from compas_viewers.core.drawing import draw_lines
from compas.utilities import color_to_colordict
from compas.utilities import color_to_rgb
__author__ = '<NAME>'
__copyright__ = 'Copyright 2014, Block Research Group - ETH Zurich'
__license__ = 'MIT License'
__email__ = '<<EMAIL>>'
__all__ = ['NetworkViewer']
class NetworkViewer(Viewer):
"""An OpenGL viewer for networks.
Parameters
----------
network : Network
The network object.
width : int
Optional. The viewport width. Default is ``1280``.
height : int
Optional. The viewport height. Default is ``800``.
Example
-------
.. code-block:: python
import compas
from compas.datastructures.network import Network
from compas.datastructures.network.viewer import NetworkViewer
network = Network.from_obj(compas.get('lines.obj'))
network.add_edge(0, 14)
network.add_edge(15, 10)
network.add_edge(21, 24)
viewer = NetworkViewer(network, 600, 600)
viewer.grid_on = False
viewer.setup()
viewer.show()
"""
def __init__(self, network, width=1280, height=800):
super(NetworkViewer, self).__init__(width=width, height=height)
self.default_vertexcolor = (0, 0, 0)
self.default_edgecolor = (0, 0, 0)
self.default_facecolor = (0, 0, 0)
self.vertices_on = True
self.edges_on = True
self.faces_on = False
self.vertexcolor = None
self.edgecolor = None
self.facecolor = None
self.vertexlabel = None
self.edgelabel = None
self.facelabel = None
self.vertexsize = None
self.edgewidth = None
self.network = network
self.center()
# --------------------------------------------------------------------------
# helpers (temp)
# --------------------------------------------------------------------------
def center(self):
xyz = [self.network.vertex_coordinates(key) for key in self.network.vertices()]
cx, cy, cz = centroid_points(xyz)
for key, attr in self.network.vertices(True):
attr['x'] -= cx
attr['y'] -= cy
# --------------------------------------------------------------------------
# main drawing functionality
# --------------------------------------------------------------------------
def display(self):
points = []
vcolor = self.network.attributes['color.vertex']
vcolor = vcolor or self.default_vertexcolor
vcolor = color_to_rgb(vcolor, True)
for key, attr in self.network.vertices(True):
points.append({
'pos' : (attr['x'], attr['y'], attr['z']),
'size' : 6.0,
'color': vcolor,
})
lines = []
ecolor = self.network.attributes['color.vertex']
ecolor = ecolor or self.default_edgecolor
ecolor = color_to_rgb(ecolor, True)
for u, v in self.network.edges():
lines.append({
'start': self.network.vertex_coordinates(u),
'end' : self.network.vertex_coordinates(v),
'color': ecolor,
'width': 1.0
})
# loads = []
# for key, attr in self.network.vertices(True):
# if attr['is_fixed']:
# continue
# if 'p' in attr:
# p = attr['p']
# l = sqrt(p[0] ** 2 + p[1] ** 2 + p[2] ** 2)
# if l:
# start = self.network.vertex_coordinates(key)
# end = [start[i] + p[i] for i in range(3)]
# loads.append({
# 'start': start,
# 'end' : end,
# 'color': (0, 1.0, 0),
# 'width': 3.0
# })
draw_points(points)
draw_lines(lines)
# draw_lines(loads)
# --------------------------------------------------------------------------
# keyboard functionality
# --------------------------------------------------------------------------
def keypress(self, key, x, y):
"""Assign network functionality to keys.
"""
if key == 'c':
self.screenshot(os.path.join(compas.TEMP, 'screenshots/network-viewer_screenshot.jpg'))
def special(self, key, x, y):
"""Define the meaning of pressing function keys.
"""
pass
# ==============================================================================
# Main
# ==============================================================================
if __name__ == '__main__':
import compas
from compas.datastructures import Network
network = Network.from_obj(compas.get('saddle.obj'))
viewer = NetworkViewer(network, 600, 600)
viewer.setup()
viewer.show()
| [
"compas.get",
"compas.geometry.centroid_points",
"compas_viewers.core.drawing.draw_lines",
"compas.utilities.color_to_rgb",
"compas_viewers.core.drawing.draw_points"
] | [((2467, 2487), 'compas.geometry.centroid_points', 'centroid_points', (['xyz'], {}), '(xyz)\n', (2482, 2487), False, 'from compas.geometry import centroid_points\n'), ((2964, 2990), 'compas.utilities.color_to_rgb', 'color_to_rgb', (['vcolor', '(True)'], {}), '(vcolor, True)\n', (2976, 2990), False, 'from compas.utilities import color_to_rgb\n'), ((3354, 3380), 'compas.utilities.color_to_rgb', 'color_to_rgb', (['ecolor', '(True)'], {}), '(ecolor, True)\n', (3366, 3380), False, 'from compas.utilities import color_to_rgb\n'), ((4308, 4327), 'compas_viewers.core.drawing.draw_points', 'draw_points', (['points'], {}), '(points)\n', (4319, 4327), False, 'from compas_viewers.core.drawing import draw_points\n'), ((4336, 4353), 'compas_viewers.core.drawing.draw_lines', 'draw_lines', (['lines'], {}), '(lines)\n', (4346, 4353), False, 'from compas_viewers.core.drawing import draw_lines\n'), ((5207, 5231), 'compas.get', 'compas.get', (['"""saddle.obj"""'], {}), "('saddle.obj')\n", (5217, 5231), False, 'import compas\n')] |
# pylint: disable=redefined-outer-name,no-member
import json
import os
import shutil
import subprocess
import time
import pytest
from parse import parse
from sqlalchemy import create_engine
from alembic_utils.testbase import TEST_VERSIONS_ROOT, reset_event_listener_registry
PYTEST_DB = "postgresql://alem_user:password@localhost:5680/alem_db"
@pytest.fixture(scope="session")
def maybe_start_pg() -> None:
"""Creates a postgres 12 docker container that can be connected
to using the PYTEST_DB connection string"""
container_name = "alembic_utils_pg"
image = "postgres:12"
connection_template = "postgresql://{user}:{pw}@{host}:{port:d}/{db}"
conn_args = parse(connection_template, PYTEST_DB)
# Don't attempt to instantiate a container if
# we're on CI
if "GITHUB_SHA" in os.environ:
yield
return
try:
is_running = (
subprocess.check_output(
["docker", "inspect", "-f", "{{.State.Running}}", container_name]
)
.decode()
.strip()
== "true"
)
except subprocess.CalledProcessError:
# Can't inspect container if it isn't running
is_running = False
if is_running:
yield
return
subprocess.call(
[
"docker",
"run",
"--rm",
"--name",
container_name,
"-p",
f"{conn_args['port']}:5432",
"-d",
"-e",
f"POSTGRES_DB={conn_args['db']}",
"-e",
f"POSTGRES_PASSWORD={conn_args['pw']}",
"-e",
f"POSTGRES_USER={conn_args['user']}",
"--health-cmd",
"pg_isready",
"--health-interval",
"3s",
"--health-timeout",
"3s",
"--health-retries",
"15",
image,
]
)
# Wait for postgres to become healthy
for _ in range(10):
out = subprocess.check_output(["docker", "inspect", container_name])
inspect_info = json.loads(out)[0]
health_status = inspect_info["State"]["Health"]["Status"]
if health_status == "healthy":
break
else:
time.sleep(1)
else:
raise Exception("Could not reach postgres comtainer. Check docker installation")
yield
# subprocess.call(["docker", "stop", container_name])
return
@pytest.fixture(scope="session")
def raw_engine(maybe_start_pg: None):
"""sqlalchemy engine fixture"""
eng = create_engine(PYTEST_DB)
yield eng
eng.dispose()
@pytest.fixture(scope="function")
def engine(raw_engine):
"""Engine that has been reset between tests"""
def run_cleaners():
reset_event_listener_registry()
raw_engine.execute("drop schema public cascade; create schema public;")
raw_engine.execute('drop schema if exists "DEV" cascade; create schema "DEV";')
# Remove any migrations that were left behind
TEST_VERSIONS_ROOT.mkdir(exist_ok=True, parents=True)
shutil.rmtree(TEST_VERSIONS_ROOT)
TEST_VERSIONS_ROOT.mkdir(exist_ok=True, parents=True)
run_cleaners()
yield raw_engine
run_cleaners()
| [
"subprocess.check_output",
"json.loads",
"alembic_utils.testbase.TEST_VERSIONS_ROOT.mkdir",
"parse.parse",
"sqlalchemy.create_engine",
"time.sleep",
"subprocess.call",
"shutil.rmtree",
"pytest.fixture",
"alembic_utils.testbase.reset_event_listener_registry"
] | [((351, 382), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (365, 382), False, 'import pytest\n'), ((2462, 2493), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (2476, 2493), False, 'import pytest\n'), ((2638, 2670), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (2652, 2670), False, 'import pytest\n'), ((687, 724), 'parse.parse', 'parse', (['connection_template', 'PYTEST_DB'], {}), '(connection_template, PYTEST_DB)\n', (692, 724), False, 'from parse import parse\n'), ((1276, 1655), 'subprocess.call', 'subprocess.call', (['[\'docker\', \'run\', \'--rm\', \'--name\', container_name, \'-p\',\n f"{conn_args[\'port\']}:5432", \'-d\', \'-e\',\n f"POSTGRES_DB={conn_args[\'db\']}", \'-e\',\n f"POSTGRES_PASSWORD={conn_args[\'pw\']}", \'-e\',\n f"POSTGRES_USER={conn_args[\'user\']}", \'--health-cmd\', \'pg_isready\',\n \'--health-interval\', \'3s\', \'--health-timeout\', \'3s\', \'--health-retries\',\n \'15\', image]'], {}), '([\'docker\', \'run\', \'--rm\', \'--name\', container_name, \'-p\',\n f"{conn_args[\'port\']}:5432", \'-d\', \'-e\',\n f"POSTGRES_DB={conn_args[\'db\']}", \'-e\',\n f"POSTGRES_PASSWORD={conn_args[\'pw\']}", \'-e\',\n f"POSTGRES_USER={conn_args[\'user\']}", \'--health-cmd\', \'pg_isready\',\n \'--health-interval\', \'3s\', \'--health-timeout\', \'3s\', \'--health-retries\',\n \'15\', image])\n', (1291, 1655), False, 'import subprocess\n'), ((2578, 2602), 'sqlalchemy.create_engine', 'create_engine', (['PYTEST_DB'], {}), '(PYTEST_DB)\n', (2591, 2602), False, 'from sqlalchemy import create_engine\n'), ((2013, 2075), 'subprocess.check_output', 'subprocess.check_output', (["['docker', 'inspect', container_name]"], {}), "(['docker', 'inspect', container_name])\n", (2036, 2075), False, 'import subprocess\n'), ((2779, 2810), 'alembic_utils.testbase.reset_event_listener_registry', 'reset_event_listener_registry', ([], {}), '()\n', (2808, 2810), False, 'from alembic_utils.testbase import TEST_VERSIONS_ROOT, reset_event_listener_registry\n'), ((3041, 3094), 'alembic_utils.testbase.TEST_VERSIONS_ROOT.mkdir', 'TEST_VERSIONS_ROOT.mkdir', ([], {'exist_ok': '(True)', 'parents': '(True)'}), '(exist_ok=True, parents=True)\n', (3065, 3094), False, 'from alembic_utils.testbase import TEST_VERSIONS_ROOT, reset_event_listener_registry\n'), ((3103, 3136), 'shutil.rmtree', 'shutil.rmtree', (['TEST_VERSIONS_ROOT'], {}), '(TEST_VERSIONS_ROOT)\n', (3116, 3136), False, 'import shutil\n'), ((3145, 3198), 'alembic_utils.testbase.TEST_VERSIONS_ROOT.mkdir', 'TEST_VERSIONS_ROOT.mkdir', ([], {'exist_ok': '(True)', 'parents': '(True)'}), '(exist_ok=True, parents=True)\n', (3169, 3198), False, 'from alembic_utils.testbase import TEST_VERSIONS_ROOT, reset_event_listener_registry\n'), ((2099, 2114), 'json.loads', 'json.loads', (['out'], {}), '(out)\n', (2109, 2114), False, 'import json\n'), ((2267, 2280), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (2277, 2280), False, 'import time\n'), ((903, 997), 'subprocess.check_output', 'subprocess.check_output', (["['docker', 'inspect', '-f', '{{.State.Running}}', container_name]"], {}), "(['docker', 'inspect', '-f', '{{.State.Running}}',\n container_name])\n", (926, 997), False, 'import subprocess\n')] |
from scipy.spatial import distance as dist
from collections import OrderedDict
import numpy as np
import cv2
from color import Color
class ColorLabeler:
def __init__(self):
# initialize the colors dictionary, containing the color
# name as the key and the RGB tuple as the value
colors = OrderedDict({
Color.beige: (0xB8, 0x86, 0x0B),
Color.white: (0xFF, 0xFF, 0xFF),
Color.red: (0xFF, 0x00, 0x00),
Color.dark_orange: (0xFF, 0x8C, 0x00),
Color.green: (0x00, 0x80, 0x00),
Color.yellow: (0xFF, 0xFF, 0x00),
# "DarkGreen": (0x00, 0x64, 0x00),
# "LightGreen": (0x90, 0xEE, 0x90),
# "Gray": (0x80, 0x80, 0x80),
# "Orange": (0xFF, 0xA5, 0x00),
# "LightGray": (0xD3, 0xD3, 0xD3),
# "AliceBlue": (0xF0, 0xF8, 0xFF),
# "AntiqueWhite": (0xFA, 0xEB, 0xD7),
# "Aqua": (0x00, 0xFF, 0xFF),
# "Aquamarine": (0x7F, 0xFF, 0xD4),
# "Azure": (0xF0, 0xFF, 0xFF),
# "Beige": (0xF5, 0xF5, 0xDC),
# "Bisque": (0xFF, 0xE4, 0xC4),
# "Black": (0x00, 0x00, 0x00),
# "BlanchedAlmond": (0xFF, 0xEB, 0xCD),
# "Blue": (0x00, 0x00, 0xFF),
# "BlueViolet": (0x8A, 0x2B, 0xE2),
# "Brown": (0xA5, 0x2A, 0x2A),
# "BurlyWood": (0xDE, 0xB8, 0x87),
# "CadetBlue": (0x5F, 0x9E, 0xA0),
# "Chartreuse": (0x7F, 0xFF, 0x00),
# "Chocolate": (0xD2, 0x69, 0x1E),
# "Coral": (0xFF, 0x7F, 0x50),
# "CornflowerBlue": (0x64, 0x95, 0xED),
# "Cornsilk": (0xFF, 0xF8, 0xDC),
# "Crimson": (0xDC, 0x14, 0x3C),
# "Cyan": (0x00, 0xFF, 0xFF),
# "DarkBlue": (0x00, 0x00, 0x8B),
# "DarkCyan": (0x00, 0x8B, 0x8B),
# "DarkGray": (0xA9, 0xA9, 0xA9),
# "DarkGrey": (0xA9, 0xA9, 0xA9),
# "DarkKhaki": (0xBD, 0xB7, 0x6B),
# "DarkMagenta": (0x8B, 0x00, 0x8B),
# "DarkOliveGreen": (0x55, 0x6B, 0x2F),
# "DarkOrchid": (0x99, 0x32, 0xCC),
# "DarkRed": (0x8B, 0x00, 0x00),
# "DarkSalmon": (0xE9, 0x96, 0x7A),
# "DarkSeaGreen": (0x8F, 0xBC, 0x8F),
# "DarkSlateBlue": (0x48, 0x3D, 0x8B),
# "DarkSlateGray": (0x2F, 0x4F, 0x4F),
# "DarkSlateGrey": (0x2F, 0x4F, 0x4F),
# "DarkTurquoise": (0x00, 0xCE, 0xD1),
# "DarkViolet": (0x94, 0x00, 0xD3),
# "DeepPink": (0xFF, 0x14, 0x93),
# "DeepSkyBlue": (0x00, 0xBF, 0xFF),
# "DimGray": (0x69, 0x69, 0x69),
# "DimGrey": (0x69, 0x69, 0x69),
# "DodgerBlue": (0x1E, 0x90, 0xFF),
# "FireBrick": (0xB2, 0x22, 0x22),
# "FloralWhite": (0xFF, 0xFA, 0xF0),
# "ForestGreen": (0x22, 0x8B, 0x22),
# "Fuchsia": (0xFF, 0x00, 0xFF),
# "Gainsboro": (0xDC, 0xDC, 0xDC),
# "GhostWhite": (0xF8, 0xF8, 0xFF),
# "Gold": (0xFF, 0xD7, 0x00),
# "GoldenRod": (0xDA, 0xA5, 0x20),
# "Grey": (0x80, 0x80, 0x80),
# "GreenYellow": (0xAD, 0xFF, 0x2F),
# "HoneyDew": (0xF0, 0xFF, 0xF0),
# "HotPink": (0xFF, 0x69, 0xB4),
# "IndianRed": (0xCD, 0x5C, 0x5C),
# "Indigo": (0x4B, 0x00, 0x82),
# "Ivory": (0xFF, 0xFF, 0xF0),
# "Khaki": (0xF0, 0xE6, 0x8C),
# "Lavender": (0xE6, 0xE6, 0xFA),
# "LavenderBlush": (0xFF, 0xF0, 0xF5),
# "LawnGreen": (0x7C, 0xFC, 0x00),
# "LemonChiffon": (0xFF, 0xFA, 0xCD),
# "LightBlue": (0xAD, 0xD8, 0xE6),
# "LightCoral": (0xF0, 0x80, 0x80),
# "LightCyan": (0xE0, 0xFF, 0xFF),
# "LightGoldenRodYellow": (0xFA, 0xFA, 0xD2),
# "LightGrey": (0xD3, 0xD3, 0xD3),
# "LightPink": (0xFF, 0xB6, 0xC1),
# "LightSalmon": (0xFF, 0xA0, 0x7A),
# "LightSeaGreen": (0x20, 0xB2, 0xAA),
# "LightSkyBlue": (0x87, 0xCE, 0xFA),
# "LightSlateGray": (0x77, 0x88, 0x99),
# "LightSlateGrey": (0x77, 0x88, 0x99),
# "LightSteelBlue": (0xB0, 0xC4, 0xDE),
# "LightYellow": (0xFF, 0xFF, 0xE0),
# "Lime": (0x00, 0xFF, 0x00),
# "LimeGreen": (0x32, 0xCD, 0x32),
# "Linen": (0xFA, 0xF0, 0xE6),
# "Magenta": (0xFF, 0x00, 0xFF),
# "Maroon": (0x80, 0x00, 0x00),
# "MediumAquaMarine": (0x66, 0xCD, 0xAA),
# "MediumBlue": (0x00, 0x00, 0xCD),
# "MediumOrchid": (0xBA, 0x55, 0xD3),
# "MediumPurple": (0x93, 0x70, 0xDB),
# "MediumSeaGreen": (0x3C, 0xB3, 0x71),
# "MediumSlateBlue": (0x7B, 0x68, 0xEE),
# "MediumSpringGreen": (0x00, 0xFA, 0x9A),
# "MediumTurquoise": (0x48, 0xD1, 0xCC),
# "MediumVioletRed": (0xC7, 0x15, 0x85),
# "MidnightBlue": (0x19, 0x19, 0x70),
# "MintCream": (0xF5, 0xFF, 0xFA),
# "MistyRose": (0xFF, 0xE4, 0xE1),
# "Moccasin": (0xFF, 0xE4, 0xB5),
# "NavajoWhite": (0xFF, 0xDE, 0xAD),
# "Navy": (0x00, 0x00, 0x80),
# "OldLace": (0xFD, 0xF5, 0xE6),
# "Olive": (0x80, 0x80, 0x00),
# "OliveDrab": (0x6B, 0x8E, 0x23),
# "OrangeRed": (0xFF, 0x45, 0x00),
# "Orchid": (0xDA, 0x70, 0xD6),
# "PaleGoldenRod": (0xEE, 0xE8, 0xAA),
# "PaleGreen": (0x98, 0xFB, 0x98),
# "PaleTurquoise": (0xAF, 0xEE, 0xEE),
# "PaleVioletRed": (0xDB, 0x70, 0x93),
# "PapayaWhip": (0xFF, 0xEF, 0xD5),
# "PeachPuff": (0xFF, 0xDA, 0xB9),
# "Peru": (0xCD, 0x85, 0x3F),
# "Pink": (0xFF, 0xC0, 0xCB),
# "Plum": (0xDD, 0xA0, 0xDD),
# "PowderBlue": (0xB0, 0xE0, 0xE6),
# "Purple": (0x80, 0x00, 0x80),
# "RebeccaPurple": (0x66, 0x33, 0x99),
# "RosyBrown": (0xBC, 0x8F, 0x8F),
# "RoyalBlue": (0x41, 0x69, 0xE1),
# "SaddleBrown": (0x8B, 0x45, 0x13),
# "Salmon": (0xFA, 0x80, 0x72),
# "SandyBrown": (0xF4, 0xA4, 0x60),
# "SeaGreen": (0x2E, 0x8B, 0x57),
# "SeaShell": (0xFF, 0xF5, 0xEE),
# "Sienna": (0xA0, 0x52, 0x2D),
# "Silver": (0xC0, 0xC0, 0xC0),
# "SkyBlue": (0x87, 0xCE, 0xEB),
# "SlateBlue": (0x6A, 0x5A, 0xCD),
# "SlateGray": (0x70, 0x80, 0x90),
# "SlateGrey": (0x70, 0x80, 0x90),
# "Snow": (0xFF, 0xFA, 0xFA),
# "SpringGreen": (0x00, 0xFF, 0x7F),
# "SteelBlue": (0x46, 0x82, 0xB4),
# "Tan": (0xD2, 0xB4, 0x8C),
# "Teal": (0x00, 0x80, 0x80),
# "Thistle": (0xD8, 0xBF, 0xD8),
# "Tomato": (0xFF, 0x63, 0x47),
# "Turquoise": (0x40, 0xE0, 0xD0),
# "Violet": (0xEE, 0x82, 0xEE),
# "Wheat": (0xF5, 0xDE, 0xB3),
# "WhiteSmoke": (0xF5, 0xF5, 0xF5),
# "YellowGreen": (0x9A, 0xCD, 0x32)
})
# allocate memory for the L*a*b* image, then initialize
# the color names list
self.lab = np.zeros((len(colors), 1, 3), dtype="uint8")
self.colorNames = []
# loop over the colors dictionary
for (i, (name, rgb)) in enumerate(colors.items()):
# update the L*a*b* array and the color names list
self.lab[i] = rgb
self.colorNames.append(name)
# convert the L*a*b* array from the RGB color space
# to L*a*b*
self.lab = cv2.cvtColor(self.lab, cv2.COLOR_RGB2LAB)
def label(self, image, c):
try:
# construct a mask for the contour, then compute the
# average L*a*b* value for the masked region
mask = np.zeros(image.shape[:2], dtype="uint8")
cv2.drawContours(mask, [c], -1, 255, -1)
mask = cv2.erode(mask, None, iterations=2)
mean = cv2.mean(image, mask=mask)[:3]
# initialize the minimum distance found thus far
minDist = (np.inf, None)
# loop over the known L*a*b* color values
for (i, row) in enumerate(self.lab):
# compute the distance between the current L*a*b*
# color value and the mean of the image
d = dist.euclidean(row[0], mean)
# if the distance is smaller than the current distance,
# then update the bookkeeping variable
if d < minDist[0]:
minDist = (d, i)
# return the name of the color with the smallest distance
return self.colorNames[minDist[1]]
except Exception:
return "Unknown"
| [
"collections.OrderedDict",
"cv2.drawContours",
"cv2.erode",
"numpy.zeros",
"cv2.cvtColor",
"scipy.spatial.distance.euclidean",
"cv2.mean"
] | [((319, 512), 'collections.OrderedDict', 'OrderedDict', (['{Color.beige: (184, 134, 11), Color.white: (255, 255, 255), Color.red: (255,\n 0, 0), Color.dark_orange: (255, 140, 0), Color.green: (0, 128, 0),\n Color.yellow: (255, 255, 0)}'], {}), '({Color.beige: (184, 134, 11), Color.white: (255, 255, 255),\n Color.red: (255, 0, 0), Color.dark_orange: (255, 140, 0), Color.green:\n (0, 128, 0), Color.yellow: (255, 255, 0)})\n', (330, 512), False, 'from collections import OrderedDict\n'), ((7820, 7861), 'cv2.cvtColor', 'cv2.cvtColor', (['self.lab', 'cv2.COLOR_RGB2LAB'], {}), '(self.lab, cv2.COLOR_RGB2LAB)\n', (7832, 7861), False, 'import cv2\n'), ((8048, 8088), 'numpy.zeros', 'np.zeros', (['image.shape[:2]'], {'dtype': '"""uint8"""'}), "(image.shape[:2], dtype='uint8')\n", (8056, 8088), True, 'import numpy as np\n'), ((8101, 8141), 'cv2.drawContours', 'cv2.drawContours', (['mask', '[c]', '(-1)', '(255)', '(-1)'], {}), '(mask, [c], -1, 255, -1)\n', (8117, 8141), False, 'import cv2\n'), ((8161, 8196), 'cv2.erode', 'cv2.erode', (['mask', 'None'], {'iterations': '(2)'}), '(mask, None, iterations=2)\n', (8170, 8196), False, 'import cv2\n'), ((8216, 8242), 'cv2.mean', 'cv2.mean', (['image'], {'mask': 'mask'}), '(image, mask=mask)\n', (8224, 8242), False, 'import cv2\n'), ((8592, 8620), 'scipy.spatial.distance.euclidean', 'dist.euclidean', (['row[0]', 'mean'], {}), '(row[0], mean)\n', (8606, 8620), True, 'from scipy.spatial import distance as dist\n')] |
# Copyright 2006-2018 <NAME>
"""
Read genePreds from mysql queries.
"""
from pycbio.db import mysqlOps
from pycbio.hgdata.genePred import GenePred
class GenePredMySqlReader(object):
"""Read genePreds from a mysql query"""
def __init__(self, conn, query, queryArgs=None):
self.conn = conn
self.query = query
self.queryArgs = queryArgs
def __iter__(self):
cur = self.conn.cursor()
try:
cur.execute(self.query, self.queryArgs)
colIdxMap = mysqlOps.cursorColIdxMap(cur)
for row in cur:
yield GenePred(row, dbColIdxMap=colIdxMap)
finally:
cur.close()
| [
"pycbio.db.mysqlOps.cursorColIdxMap",
"pycbio.hgdata.genePred.GenePred"
] | [((515, 544), 'pycbio.db.mysqlOps.cursorColIdxMap', 'mysqlOps.cursorColIdxMap', (['cur'], {}), '(cur)\n', (539, 544), False, 'from pycbio.db import mysqlOps\n'), ((595, 631), 'pycbio.hgdata.genePred.GenePred', 'GenePred', (['row'], {'dbColIdxMap': 'colIdxMap'}), '(row, dbColIdxMap=colIdxMap)\n', (603, 631), False, 'from pycbio.hgdata.genePred import GenePred\n')] |
from setuptools import setup, find_packages
def get_requirements():
with open('requirements.txt') as f:
return f.read().splitlines()
def get_long_description():
with open('README.md') as f:
rv = f.read()
return rv
setup(
name='Flask-ACL',
version='0.1.0',
description='Access control lists for Flask.',
long_description=get_long_description(),
url='http://github.com/mikeboers/Flask-ACL',
author='<NAME>',
author_email='<EMAIL>',
license='BSD-3',
packages=find_packages(),
zip_safe=False,
include_package_data=True,
install_requires=get_requirements(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
],
)
| [
"setuptools.find_packages"
] | [((526, 541), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (539, 541), False, 'from setuptools import setup, find_packages\n')] |
from typing import Dict as _Dict
from typing import Tuple, Union
from libsa4py.cst_transformers import TypeQualifierResolver
from .representations import Bar
from ..test_imports import TestImports
import typing as t
import typing
from collections.abc import Sequence
from builtins import str
import numpy as np
import builtins
d: _Dict = {}
l: t.List[str] = []
q_v: typing.List[builtins.int] = [10]
l_n: t.List[Tuple[int, int]]
t_e: t.Tuple[typing.Any, ...] = ()
u_d: Union[t.List[Tuple[str, int]], t.Tuple[t.Any], t.Tuple[t.List[t.Tuple[t.Set[int]]]]] = None
c: t.Callable[..., t.List] = None
c_h: t.Callable[[t.List, t.Dict], int] = None
t_a: t.Type[t.List] = t.List
tqr: TypeQualifierResolver = TypeQualifierResolver()
lt: t.Literal["123"] = "123"
s: [] = [1, 2]
N: Union[t.List, None] = []
rl: Bar = Bar()
b: True = True
relative_i: TestImports = TestImports()
class Foo:
foo_seq: Sequence = []
def __init__(self, x: t.Tuple, y: t.Pattern=None):
class Delta:
pass
self.x: Tuple = x
n: np.int = np.int(12)
d: Delta = Delta()
def bar(self) -> np.array:
return np.array([1, 2, 3])
def shadow_qn(self):
sq: str = 'Shadow qualified name'
for str in sq:
print(str)
u: "Foo" = Foo(t_e)
foo: Foo = Foo(t_e)
foo_t: Tuple[Foo, TypeQualifierResolver] = (Foo(t_e), TypeQualifierResolver())
| [
"numpy.array",
"libsa4py.cst_transformers.TypeQualifierResolver",
"numpy.int"
] | [((699, 722), 'libsa4py.cst_transformers.TypeQualifierResolver', 'TypeQualifierResolver', ([], {}), '()\n', (720, 722), False, 'from libsa4py.cst_transformers import TypeQualifierResolver\n'), ((1359, 1382), 'libsa4py.cst_transformers.TypeQualifierResolver', 'TypeQualifierResolver', ([], {}), '()\n', (1380, 1382), False, 'from libsa4py.cst_transformers import TypeQualifierResolver\n'), ((1044, 1054), 'numpy.int', 'np.int', (['(12)'], {}), '(12)\n', (1050, 1054), True, 'import numpy as np\n'), ((1129, 1148), 'numpy.array', 'np.array', (['[1, 2, 3]'], {}), '([1, 2, 3])\n', (1137, 1148), True, 'import numpy as np\n')] |
from django.db import models
from app.settings import SECRET_KEY
import hashlib
from django.db.models.signals import pre_save
from django.dispatch import receiver
from lms.utils import check_email
from django.core.exceptions import ValidationError
class User(models.Model):
email = models.EmailField(max_length=100, unique=True)
password_hash = models.CharField(max_length=300, blank=True)
ROLE = [
('admin','admin'),
('customer','customer'),
('agent', 'agent')
]
role = models.CharField(max_length=10, default='customer', choices=ROLE)
is_active = models.BooleanField(default=False)
def __str__(self):
return self.email
def save_password_hash(self, password):
hash_obj = hashlib.sha256()
hash_obj.update(str(password).encode('utf-8'))
hash_obj.update(str(SECRET_KEY).encode('utf-8'))
self.password_hash = hash_obj.hexdigest()
def check_password(self, password):
hash_obj = hashlib.sha256()
hash_obj.update(str(password).encode('utf-8'))
hash_obj.update(str(SECRET_KEY).encode('utf-8'))
return self.password_hash == hash_obj.hexdigest()
def create_user(self, email, password, role="customer"):
self.email = email
self.validate_unique()
self.save_password_hash(password=password)
self.role = role
if role == "customer":
self.is_active = True
self.save()
class Loan(models.Model):
created_for = models.ForeignKey(User, on_delete=models.CASCADE, related_name='customer')
created_by = models.ForeignKey(User, on_delete=models.CASCADE, related_name='agent')
approved_by = models.ForeignKey(User, on_delete=models.CASCADE, related_name='admin',blank=True, null=True)
is_approved = models.BooleanField(default=False)
principal_amount = models.IntegerField(default=0)
interest_rate = models.FloatField(default=0.0)
tenure_months = models.IntegerField(default=0)
emi = models.FloatField(default=0.0)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
STATUS = [
('new','new'),
('rejected','rejected'),
('approved', 'approved')
]
status = models.CharField(max_length=10, default='new', choices=STATUS)
class EditLoanHistory(models.Model):
edit_history_of = models.ForeignKey(Loan, on_delete=models.CASCADE,blank=True, null=True)
created_for = models.ForeignKey(User, on_delete=models.CASCADE, related_name='customer_edit_loan')
created_by = models.ForeignKey(User, on_delete=models.CASCADE, related_name='agent_edit_loan')
approved_by = models.ForeignKey(User, on_delete=models.CASCADE, related_name='admin_edit_loan',blank=True, null=True)
is_approved = models.BooleanField(default=False)
principal_amount = models.IntegerField(default=0)
interest_rate = models.FloatField(default=0.0)
tenure_months = models.IntegerField(default=0)
emi = models.FloatField(default=0.0)
created_at = models.DateTimeField()
STATUS = [
('new','new'),
('rejected','rejected'),
('approved', 'approved')
]
status = models.CharField(max_length=10, default='new', choices=STATUS)
instance_created_at = models.DateTimeField(auto_now_add=True)
@receiver(pre_save, sender=Loan)
def validate_loan_data(instance, sender, **kwargs):
if instance.principal_amount < 1 or instance.interest_rate < 0 or instance.interest_rate > 100 or instance.tenure_months < 0:
raise ValidationError("Invalid value in loan db.") | [
"django.db.models.EmailField",
"hashlib.sha256",
"django.db.models.FloatField",
"django.db.models.ForeignKey",
"django.db.models.IntegerField",
"django.core.exceptions.ValidationError",
"django.db.models.DateTimeField",
"django.db.models.BooleanField",
"django.dispatch.receiver",
"django.db.models.CharField"
] | [((3341, 3372), 'django.dispatch.receiver', 'receiver', (['pre_save'], {'sender': 'Loan'}), '(pre_save, sender=Loan)\n', (3349, 3372), False, 'from django.dispatch import receiver\n'), ((286, 332), 'django.db.models.EmailField', 'models.EmailField', ([], {'max_length': '(100)', 'unique': '(True)'}), '(max_length=100, unique=True)\n', (303, 332), False, 'from django.db import models\n'), ((353, 397), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)', 'blank': '(True)'}), '(max_length=300, blank=True)\n', (369, 397), False, 'from django.db import models\n'), ((515, 580), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)', 'default': '"""customer"""', 'choices': 'ROLE'}), "(max_length=10, default='customer', choices=ROLE)\n", (531, 580), False, 'from django.db import models\n'), ((597, 631), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (616, 631), False, 'from django.db import models\n'), ((1509, 1583), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.CASCADE', 'related_name': '"""customer"""'}), "(User, on_delete=models.CASCADE, related_name='customer')\n", (1526, 1583), False, 'from django.db import models\n'), ((1601, 1672), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.CASCADE', 'related_name': '"""agent"""'}), "(User, on_delete=models.CASCADE, related_name='agent')\n", (1618, 1672), False, 'from django.db import models\n'), ((1691, 1789), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.CASCADE', 'related_name': '"""admin"""', 'blank': '(True)', 'null': '(True)'}), "(User, on_delete=models.CASCADE, related_name='admin',\n blank=True, null=True)\n", (1708, 1789), False, 'from django.db import models\n'), ((1803, 1837), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (1822, 1837), False, 'from django.db import models\n'), ((1861, 1891), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (1880, 1891), False, 'from django.db import models\n'), ((1912, 1942), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': '(0.0)'}), '(default=0.0)\n', (1929, 1942), False, 'from django.db import models\n'), ((1963, 1993), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (1982, 1993), False, 'from django.db import models\n'), ((2004, 2034), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': '(0.0)'}), '(default=0.0)\n', (2021, 2034), False, 'from django.db import models\n'), ((2052, 2091), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (2072, 2091), False, 'from django.db import models\n'), ((2109, 2144), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (2129, 2144), False, 'from django.db import models\n'), ((2272, 2334), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)', 'default': '"""new"""', 'choices': 'STATUS'}), "(max_length=10, default='new', choices=STATUS)\n", (2288, 2334), False, 'from django.db import models\n'), ((2395, 2467), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Loan'], {'on_delete': 'models.CASCADE', 'blank': '(True)', 'null': '(True)'}), '(Loan, on_delete=models.CASCADE, blank=True, null=True)\n', (2412, 2467), False, 'from django.db import models\n'), ((2485, 2574), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.CASCADE', 'related_name': '"""customer_edit_loan"""'}), "(User, on_delete=models.CASCADE, related_name=\n 'customer_edit_loan')\n", (2502, 2574), False, 'from django.db import models\n'), ((2587, 2673), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.CASCADE', 'related_name': '"""agent_edit_loan"""'}), "(User, on_delete=models.CASCADE, related_name=\n 'agent_edit_loan')\n", (2604, 2673), False, 'from django.db import models\n'), ((2687, 2796), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.CASCADE', 'related_name': '"""admin_edit_loan"""', 'blank': '(True)', 'null': '(True)'}), "(User, on_delete=models.CASCADE, related_name=\n 'admin_edit_loan', blank=True, null=True)\n", (2704, 2796), False, 'from django.db import models\n'), ((2809, 2843), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (2828, 2843), False, 'from django.db import models\n'), ((2867, 2897), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (2886, 2897), False, 'from django.db import models\n'), ((2918, 2948), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': '(0.0)'}), '(default=0.0)\n', (2935, 2948), False, 'from django.db import models\n'), ((2969, 2999), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (2988, 2999), False, 'from django.db import models\n'), ((3010, 3040), 'django.db.models.FloatField', 'models.FloatField', ([], {'default': '(0.0)'}), '(default=0.0)\n', (3027, 3040), False, 'from django.db import models\n'), ((3058, 3080), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (3078, 3080), False, 'from django.db import models\n'), ((3208, 3270), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(10)', 'default': '"""new"""', 'choices': 'STATUS'}), "(max_length=10, default='new', choices=STATUS)\n", (3224, 3270), False, 'from django.db import models\n'), ((3298, 3337), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (3318, 3337), False, 'from django.db import models\n'), ((746, 762), 'hashlib.sha256', 'hashlib.sha256', ([], {}), '()\n', (760, 762), False, 'import hashlib\n'), ((989, 1005), 'hashlib.sha256', 'hashlib.sha256', ([], {}), '()\n', (1003, 1005), False, 'import hashlib\n'), ((3569, 3613), 'django.core.exceptions.ValidationError', 'ValidationError', (['"""Invalid value in loan db."""'], {}), "('Invalid value in loan db.')\n", (3584, 3613), False, 'from django.core.exceptions import ValidationError\n')] |
import setuptools
import pyhpo
with open("README.rst", "r") as fh:
long_description = fh.read()
PACKAGES = (
'pyhpo',
)
CLASSIFIERS = [
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Healthcare Industry',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Topic :: Scientific/Engineering :: Medical Science Apps.',
'Topic :: Software Development :: Libraries :: Python Modules',
]
setuptools.setup(
name='pyhpo',
version=pyhpo.__version__,
author=pyhpo.__author__,
author_email="<EMAIL>",
description="A Python package to work with the HPO Ontology",
long_description=long_description,
long_description_content_type="text/x-rst",
url="https://github.com/anergictcell/pyhpo",
packages=PACKAGES,
classifiers=CLASSIFIERS,
python_requires='>=3.6',
include_package_data=True,
extras_require={
'pandas': ['pandas'],
'scipy': ['scipy'],
'all': ['pandas', 'scipy']
}
)
| [
"setuptools.setup"
] | [((865, 1374), 'setuptools.setup', 'setuptools.setup', ([], {'name': '"""pyhpo"""', 'version': 'pyhpo.__version__', 'author': 'pyhpo.__author__', 'author_email': '"""<EMAIL>"""', 'description': '"""A Python package to work with the HPO Ontology"""', 'long_description': 'long_description', 'long_description_content_type': '"""text/x-rst"""', 'url': '"""https://github.com/anergictcell/pyhpo"""', 'packages': 'PACKAGES', 'classifiers': 'CLASSIFIERS', 'python_requires': '""">=3.6"""', 'include_package_data': '(True)', 'extras_require': "{'pandas': ['pandas'], 'scipy': ['scipy'], 'all': ['pandas', 'scipy']}"}), "(name='pyhpo', version=pyhpo.__version__, author=pyhpo.\n __author__, author_email='<EMAIL>', description=\n 'A Python package to work with the HPO Ontology', long_description=\n long_description, long_description_content_type='text/x-rst', url=\n 'https://github.com/anergictcell/pyhpo', packages=PACKAGES, classifiers\n =CLASSIFIERS, python_requires='>=3.6', include_package_data=True,\n extras_require={'pandas': ['pandas'], 'scipy': ['scipy'], 'all': [\n 'pandas', 'scipy']})\n", (881, 1374), False, 'import setuptools\n')] |
import tensorflow as tf
from utils import mkdir_p
from PGGAN import PGGAN
from utils import CelebA, CelebA_HQ
flags = tf.app.flags
import os
os.environ['CUDA_VISIBLE_DEVICES']='0'
flags.DEFINE_string("OPER_NAME", "Experiment_6_30_1", "the name of experiments")
flags.DEFINE_integer("OPER_FLAG", 0, "Flag of opertion: 0 is for training ")
flags.DEFINE_string("path" , '?', "Path of training data, for example /home/hehe/")
flags.DEFINE_integer("batch_size", 16, "Batch size")
flags.DEFINE_integer("sample_size", 512, "Size of sample")
flags.DEFINE_integer("max_iters", 40000, "Maxmization of training number")
flags.DEFINE_float("learn_rate", 0.001, "Learning rate for G and D networks")
flags.DEFINE_integer("lam_gp", 10, "Weight of gradient penalty term")
flags.DEFINE_float("lam_eps", 0.001, "Weight for the epsilon term")
flags.DEFINE_integer("flag", 11, "FLAG of gan training process")
flags.DEFINE_boolean("use_wscale", True, "Using the scale of weight")
flags.DEFINE_boolean("celeba", True, "Whether using celeba or using CelebA-HQ")
FLAGS = flags.FLAGS
if __name__ == "__main__":
root_log_dir = "./output/{}/logs/".format(FLAGS.OPER_NAME)
mkdir_p(root_log_dir)
if FLAGS.celeba:
data_In = CelebA(FLAGS.path)
else:
data_In = CelebA_HQ(FLAGS.path)
print ("the num of dataset", len(data_In.image_list))
if FLAGS.OPER_FLAG == 0:
fl = [1,2,2,3,3,4,4,5,5,6,6]
r_fl = [1,1,2,2,3,3,4,4,5,5,6]
for i in range(FLAGS.flag):
t = False if (i % 2 == 0) else True
pggan_checkpoint_dir_write = "./output/{}/model_pggan_{}/{}/".format(FLAGS.OPER_NAME, FLAGS.OPER_FLAG, fl[i])
sample_path = "./output/{}/{}/sample_{}_{}".format(FLAGS.OPER_NAME, FLAGS.OPER_FLAG, fl[i], t)
mkdir_p(pggan_checkpoint_dir_write)
mkdir_p(sample_path)
pggan_checkpoint_dir_read = "./output/{}/model_pggan_{}/{}/".format(FLAGS.OPER_NAME, FLAGS.OPER_FLAG, r_fl[i])
pggan = PGGAN(batch_size=FLAGS.batch_size, max_iters=FLAGS.max_iters,
model_path=pggan_checkpoint_dir_write, read_model_path=pggan_checkpoint_dir_read,
data=data_In, sample_size=FLAGS.sample_size,
sample_path=sample_path, log_dir=root_log_dir, learn_rate=FLAGS.learn_rate, lam_gp=FLAGS.lam_gp, lam_eps=FLAGS.lam_eps, PG= fl[i],
t=t, use_wscale=FLAGS.use_wscale, is_celeba=FLAGS.celeba)
pggan.build_model_PGGan()
pggan.train()
| [
"utils.mkdir_p",
"utils.CelebA_HQ",
"utils.CelebA",
"PGGAN.PGGAN"
] | [((1159, 1180), 'utils.mkdir_p', 'mkdir_p', (['root_log_dir'], {}), '(root_log_dir)\n', (1166, 1180), False, 'from utils import mkdir_p\n'), ((1221, 1239), 'utils.CelebA', 'CelebA', (['FLAGS.path'], {}), '(FLAGS.path)\n', (1227, 1239), False, 'from utils import CelebA, CelebA_HQ\n'), ((1268, 1289), 'utils.CelebA_HQ', 'CelebA_HQ', (['FLAGS.path'], {}), '(FLAGS.path)\n', (1277, 1289), False, 'from utils import CelebA, CelebA_HQ\n'), ((1783, 1818), 'utils.mkdir_p', 'mkdir_p', (['pggan_checkpoint_dir_write'], {}), '(pggan_checkpoint_dir_write)\n', (1790, 1818), False, 'from utils import mkdir_p\n'), ((1831, 1851), 'utils.mkdir_p', 'mkdir_p', (['sample_path'], {}), '(sample_path)\n', (1838, 1851), False, 'from utils import mkdir_p\n'), ((1996, 2393), 'PGGAN.PGGAN', 'PGGAN', ([], {'batch_size': 'FLAGS.batch_size', 'max_iters': 'FLAGS.max_iters', 'model_path': 'pggan_checkpoint_dir_write', 'read_model_path': 'pggan_checkpoint_dir_read', 'data': 'data_In', 'sample_size': 'FLAGS.sample_size', 'sample_path': 'sample_path', 'log_dir': 'root_log_dir', 'learn_rate': 'FLAGS.learn_rate', 'lam_gp': 'FLAGS.lam_gp', 'lam_eps': 'FLAGS.lam_eps', 'PG': 'fl[i]', 't': 't', 'use_wscale': 'FLAGS.use_wscale', 'is_celeba': 'FLAGS.celeba'}), '(batch_size=FLAGS.batch_size, max_iters=FLAGS.max_iters, model_path=\n pggan_checkpoint_dir_write, read_model_path=pggan_checkpoint_dir_read,\n data=data_In, sample_size=FLAGS.sample_size, sample_path=sample_path,\n log_dir=root_log_dir, learn_rate=FLAGS.learn_rate, lam_gp=FLAGS.lam_gp,\n lam_eps=FLAGS.lam_eps, PG=fl[i], t=t, use_wscale=FLAGS.use_wscale,\n is_celeba=FLAGS.celeba)\n', (2001, 2393), False, 'from PGGAN import PGGAN\n')] |
import MultiCal
import TUGmTRL
import skrf as rf
import numpy as np
c0 = 299792458 # speed of light in vacuum (m/s)
def correct_sw(NW, gamma_f, gamma_r):
# correct for switch-terms
# gamma_f forward (source by port-1)
# gamma_r reverse (source by port-2)
G21 = gamma_f.s.squeeze()
G12 = gamma_r.s.squeeze()
freq = NW.frequency
SS = []
for S,g21,g12 in zip(NW.s,G21,G12):
S11 = (S[0,0]-S[0,1]*S[1,0]*g21)/(1-S[0,1]*S[1,0]*g21*g12)
S12 = (S[0,1]-S[0,0]*S[0,1]*g12)/(1-S[0,1]*S[1,0]*g21*g12)
S21 = (S[1,0]-S[1,1]*S[1,0]*g21)/(1-S[0,1]*S[1,0]*g21*g12)
S22 = (S[1,1]-S[0,1]*S[1,0]*g12)/(1-S[0,1]*S[1,0]*g21*g12)
SS.append([[S11,S12],[S21,S22]])
SS = np.array(SS)
return rf.Network(frequency=freq, s=SS)
class mTRL:
"""
Multiline TRL calibration.
Two algorithms implemented here:
1. The classical mTRL from NIST (MultiCal) [2,3]
2. Improved implementation based on [1]
[1] <NAME>, <NAME>, <NAME>, "Improving the Reliability
of the Multiline TRL Calibration Algorithm," 98th ARFTG Conference, Jan. 2022
[2] <NAME>, <NAME> and <NAME>, "Multiline TRL revealed,"
60th ARFTG Conference Digest, Fall 2002, pp. 131-155
[3] <NAME>, "A multiline method of network analyzer calibration",
IEEE Transactions on Microwave Theory and Techniques,
vol. 39, no. 7, pp. 1205-1215, July 1991.
"""
def __init__(self, lines, line_lengths, reflect,
reflect_est=[-1], reflect_offset=[0], ereff_est=1+0j, switch_term=None):
"""
mTRL initializer.
Parameters
--------------
lines : list of :class:`~skrf.network.Network`
Measured lines. The first one is defined as Thru,
and by default calibration is defined in its middel.
line_lengths : list of float
Lengths of the line. In the same order as the paramter 'lines'
reflect : list of :class:`~skrf.network.Network`
Measured reflect standards (2-port device)
reflect_est : list of float
Estimated reflection coefficient of the reflect standard.
In the same order as the parameter 'reflect'.
E.g., if you have a short : [-1]
reflect_offset : list of float
Offsets of the reflect standards from the reference plane (mid of Thru standard)
Negative: towards the port
Positive: away from port
Units in meters.
ereff_est : complex
Estimated effective permittivity.
switch_term : list of :class:`~skrf.network.Network`
list of 1-port networks. Holds 2 elements:
1. network for forward switch term.
2. network for reverse switch term.
"""
self.lines = lines
self.line_lengths = line_lengths
self.reflect = reflect
self.reflect_est = reflect_est
self.reflect_offset = reflect_offset
self.ereff_est = ereff_est
self.switch_term = switch_term
# correct for switch terms
if self.switch_term is not None:
self.lines = [correct_sw(NT, switch_term[0], switch_term[1]) for NT in self.lines]
self.reflect = [correct_sw(NT, switch_term[0], switch_term[1]) for NT in self.reflect]
def run_multical(self):
# MultiCal
print('\nMultiCal mTRL in progress:')
f = self.lines[0].frequency.f
# measurements
T_lines = [ rf.s2t(x.s) for x in self.lines ]
S_short = [ x.s for x in self.reflect ]
line_lengths = self.line_lengths
reflect_est = self.reflect_est
reflect_offset = self.reflect_offset
# initial arrays to fill
gamma_full = []
X_full = []
K_full = []
# initial estimate
ereff_0 = self.ereff_est
gamma_0 = 2*np.pi*f[0]/c0*np.sqrt(-ereff_0)
gamma_0 = abs(gamma_0.real) + 1j*abs(gamma_0.imag)
# perform the calibration
for inx, ff in enumerate(f):
meas_lines_T = [ x[inx] for x in T_lines ]
meas_reflect_S = [ x[inx] for x in S_short ]
X, K, gamma = MultiCal.mTRL(meas_lines_T, line_lengths, meas_reflect_S,
gamma_0, reflect_est, reflect_offset)
if inx+1 < len(f):
gamma_0 = gamma.real + 1j*gamma.imag*f[inx+1]/ff
X_full.append(X)
K_full.append(K)
gamma_full.append(gamma)
print(f'Frequency: {(ff*1e-9).round(4)} GHz done!', end='\r', flush=True)
self.X = np.array(X_full)
self.K = np.array(K_full)
self.gamma = np.array(gamma_full)
self.error_coef()
def run_tug(self):
# TUG mTRL
print('\nTUG mTRL in progress:')
f = self.lines[0].frequency.f
# measurements
T_lines = [ rf.s2t(x.s) for x in self.lines ]
S_short = [ x.s for x in self.reflect ]
line_lengths = self.line_lengths
reflect_est = self.reflect_est
reflect_offset = self.reflect_offset
# initial arrays to fill
gamma_full = []
X_full = []
K_full = []
abs_lambda_full = []
# initial estimate
ereff_0 = self.ereff_est
# perform the calibration
for inx, ff in enumerate(f):
meas_lines_T = [ x[inx] for x in T_lines ]
meas_reflect_S = [ x[inx] for x in S_short ]
X, K, ereff_0, gamma, abs_lambda = TUGmTRL.mTRL(meas_lines_T, line_lengths,
meas_reflect_S, ereff_0, reflect_est, reflect_offset, ff)
X_full.append(X)
K_full.append(K)
gamma_full.append(gamma)
abs_lambda_full.append(abs_lambda)
print(f'Frequency: {(ff*1e-9).round(4)} GHz done!', end='\r', flush=True)
self.X = np.array(X_full)
self.K = np.array(K_full)
self.gamma = np.array(gamma_full)
self.abs_lambda = np.array(abs_lambda_full)
self.error_coef()
def apply_cal(self, NW, left=True):
# apply calibration to a 1-port or 2-port network.
# NW: the network to be calibrated (1- or 2-port).
# left: boolean: define which port to use when 1-port network is given
# if left is True, left port is used; otherwise right port is used.
nports = np.sqrt(len(NW.port_tuples)).astype('int') # number of ports
# if 1-port, convert to 2-port (later convert back to 1-port)
if nports < 2:
NW = rf.two_port_reflect(NW)
if self.switch_term is not None:
NW = correct_sw(NW, self.switch_term[0], self.switch_term[1])
# apply cal
S_cal = []
for x,k,s in zip(self.X, self.K, NW.s):
xinv = np.linalg.pinv(x)
M_ = np.array([-s[0,0]*s[1,1]+s[0,1]*s[1,0], -s[1,1], s[0,0], 1])
T_ = xinv@M_
s21_cal = k*s[1,0]/T_[-1]
T_ = T_/T_[-1]
S_cal.append([[T_[2], (T_[0]-T_[2]*T_[1])/s21_cal],[s21_cal, -T_[1]]])
S_cal = np.array(S_cal)
freq = NW.frequency
# revert to 1-port device if the input was a 1-port device
if nports < 2:
if left: # left port
S_cal = S_cal[:,0,0]
else: # right port
S_cal = S_cal[:,1,1]
return rf.Network(frequency=freq, s=S_cal)
def error_coef(self):
# return the 3 error terms of each port
X = self.X
self.coefs = {}
# forward errors
self.coefs['ERF'] = X[:,2,2] - X[:,2,3]*X[:,3,2]
self.coefs['EDF'] = X[:,2,3]
self.coefs['ESF'] = -X[:,3,2]
# reverse errors
self.coefs['ERR'] = X[:,1,1] - X[:,3,1]*X[:,1,3]
self.coefs['EDR'] = -X[:,1,3]
self.coefs['ESR'] = X[:,3,1]
def shift_plane(self, d=0):
# shift calibration plane by distance d
# negative: shift toward port
# positive: shift away from port
# e.g., if your Thru has a length of L,
# then d=-L/2 to shift the plane backward
X_new = []
K_new = []
for x,k,g in zip(self.X, self.K, self.gamma):
z = np.exp(-g*d)
KX_new = k*[email protected]([z**2, 1, 1, 1/z**2])
X_new.append(KX_new/KX_new[-1,-1])
K_new.append(KX_new[-1,-1])
self.X = np.array(X_new)
self.K = np.array(K_new)
def renorm_impedance(self, Z_new, Z0=50):
# re-normalize reference calibration impedance
# by default, the ref impedance is the characteristic
# impedance of the line standards.
# Z_new: new ref. impedance (can be array if frequency dependent)
# Z0: old ref. impedance (can be array if frequency dependent)
# ensure correct array dimensions (if not, you get an error!)
N = len(self.K)
Z_new = Z_new*np.ones(N)
Z0 = Z0*np.ones(N)
G = (Z_new-Z0)/(Z_new+Z0)
X_new = []
K_new = []
for x,k,g in zip(self.X, self.K, G):
KX_new = k*[email protected]([[1, -g],[-g, 1]],[[1, g],[g, 1]])/(1-g**2)
X_new.append(KX_new/KX_new[-1,-1])
K_new.append(KX_new[-1,-1])
self.X = np.array(X_new)
self.K = np.array(K_new)
| [
"MultiCal.mTRL",
"skrf.Network",
"numpy.sqrt",
"numpy.linalg.pinv",
"numpy.ones",
"TUGmTRL.mTRL",
"numpy.diag",
"numpy.exp",
"numpy.array",
"skrf.s2t",
"numpy.kron",
"skrf.two_port_reflect"
] | [((761, 773), 'numpy.array', 'np.array', (['SS'], {}), '(SS)\n', (769, 773), True, 'import numpy as np\n'), ((786, 818), 'skrf.Network', 'rf.Network', ([], {'frequency': 'freq', 's': 'SS'}), '(frequency=freq, s=SS)\n', (796, 818), True, 'import skrf as rf\n'), ((4987, 5003), 'numpy.array', 'np.array', (['X_full'], {}), '(X_full)\n', (4995, 5003), True, 'import numpy as np\n'), ((5022, 5038), 'numpy.array', 'np.array', (['K_full'], {}), '(K_full)\n', (5030, 5038), True, 'import numpy as np\n'), ((5061, 5081), 'numpy.array', 'np.array', (['gamma_full'], {}), '(gamma_full)\n', (5069, 5081), True, 'import numpy as np\n'), ((6415, 6431), 'numpy.array', 'np.array', (['X_full'], {}), '(X_full)\n', (6423, 6431), True, 'import numpy as np\n'), ((6450, 6466), 'numpy.array', 'np.array', (['K_full'], {}), '(K_full)\n', (6458, 6466), True, 'import numpy as np\n'), ((6489, 6509), 'numpy.array', 'np.array', (['gamma_full'], {}), '(gamma_full)\n', (6497, 6509), True, 'import numpy as np\n'), ((6537, 6562), 'numpy.array', 'np.array', (['abs_lambda_full'], {}), '(abs_lambda_full)\n', (6545, 6562), True, 'import numpy as np\n'), ((7694, 7709), 'numpy.array', 'np.array', (['S_cal'], {}), '(S_cal)\n', (7702, 7709), True, 'import numpy as np\n'), ((8011, 8046), 'skrf.Network', 'rf.Network', ([], {'frequency': 'freq', 's': 'S_cal'}), '(frequency=freq, s=S_cal)\n', (8021, 8046), True, 'import skrf as rf\n'), ((9117, 9132), 'numpy.array', 'np.array', (['X_new'], {}), '(X_new)\n', (9125, 9132), True, 'import numpy as np\n'), ((9151, 9166), 'numpy.array', 'np.array', (['K_new'], {}), '(K_new)\n', (9159, 9166), True, 'import numpy as np\n'), ((10020, 10035), 'numpy.array', 'np.array', (['X_new'], {}), '(X_new)\n', (10028, 10035), True, 'import numpy as np\n'), ((10054, 10069), 'numpy.array', 'np.array', (['K_new'], {}), '(K_new)\n', (10062, 10069), True, 'import numpy as np\n'), ((3759, 3770), 'skrf.s2t', 'rf.s2t', (['x.s'], {}), '(x.s)\n', (3765, 3770), True, 'import skrf as rf\n'), ((4209, 4226), 'numpy.sqrt', 'np.sqrt', (['(-ereff_0)'], {}), '(-ereff_0)\n', (4216, 4226), True, 'import numpy as np\n'), ((4516, 4615), 'MultiCal.mTRL', 'MultiCal.mTRL', (['meas_lines_T', 'line_lengths', 'meas_reflect_S', 'gamma_0', 'reflect_est', 'reflect_offset'], {}), '(meas_lines_T, line_lengths, meas_reflect_S, gamma_0,\n reflect_est, reflect_offset)\n', (4529, 4615), False, 'import MultiCal\n'), ((5296, 5307), 'skrf.s2t', 'rf.s2t', (['x.s'], {}), '(x.s)\n', (5302, 5307), True, 'import skrf as rf\n'), ((5989, 6091), 'TUGmTRL.mTRL', 'TUGmTRL.mTRL', (['meas_lines_T', 'line_lengths', 'meas_reflect_S', 'ereff_0', 'reflect_est', 'reflect_offset', 'ff'], {}), '(meas_lines_T, line_lengths, meas_reflect_S, ereff_0,\n reflect_est, reflect_offset, ff)\n', (6001, 6091), False, 'import TUGmTRL\n'), ((7122, 7145), 'skrf.two_port_reflect', 'rf.two_port_reflect', (['NW'], {}), '(NW)\n', (7141, 7145), True, 'import skrf as rf\n'), ((7393, 7410), 'numpy.linalg.pinv', 'np.linalg.pinv', (['x'], {}), '(x)\n', (7407, 7410), True, 'import numpy as np\n'), ((7429, 7501), 'numpy.array', 'np.array', (['[-s[0, 0] * s[1, 1] + s[0, 1] * s[1, 0], -s[1, 1], s[0, 0], 1]'], {}), '([-s[0, 0] * s[1, 1] + s[0, 1] * s[1, 0], -s[1, 1], s[0, 0], 1])\n', (7437, 7501), True, 'import numpy as np\n'), ((8927, 8941), 'numpy.exp', 'np.exp', (['(-g * d)'], {}), '(-g * d)\n', (8933, 8941), True, 'import numpy as np\n'), ((9660, 9670), 'numpy.ones', 'np.ones', (['N'], {}), '(N)\n', (9667, 9670), True, 'import numpy as np\n'), ((9691, 9701), 'numpy.ones', 'np.ones', (['N'], {}), '(N)\n', (9698, 9701), True, 'import numpy as np\n'), ((8966, 9001), 'numpy.diag', 'np.diag', (['[z ** 2, 1, 1, 1 / z ** 2]'], {}), '([z ** 2, 1, 1, 1 / z ** 2])\n', (8973, 9001), True, 'import numpy as np\n'), ((9859, 9904), 'numpy.kron', 'np.kron', (['[[1, -g], [-g, 1]]', '[[1, g], [g, 1]]'], {}), '([[1, -g], [-g, 1]], [[1, g], [g, 1]])\n', (9866, 9904), True, 'import numpy as np\n')] |
import h5py
import numpy as np
'''
def loading_data(path):
print('******************************************************')
print('dataset:{0}'.format(path))
print('******************************************************')
file = h5py.File(path)
images = file['IAll'][:].transpose(0,3,2,1)
labels = file['LAll'][:].transpose(1,0)
tags = file['YAll'][:].transpose(1,0)
file.close()
return images, tags, labels
'''
def loading_data(dataname):
if dataname == 'flickr':
path = '../Data/raw_mir.mat'
elif dataname == 'nuswide':
path = '../Data/raw_nus.mat'
elif dataname == 'coco':
path = '../Data/raw_coco.mat'
else:
print('Dataname Error!')
f = h5py.File(path, 'r')
X, Y, L = {}, {}, {}
X['train'] = f['I_tr'][:].transpose(3, 0, 1, 2)
Y['train'] = f['T_tr'][:].T
L['train'] = f['L_tr'][:].T
X['query'] = f['I_te'][:].transpose(3, 0, 1, 2)
Y['query'] = f['T_te'][:].T
L['query'] = f['L_te'][:].T
X['retrieval'] = f['I_db'][:].transpose(3, 0, 1, 2)
Y['retrieval'] = f['T_db'][:].T
L['retrieval'] = f['L_db'][:].T
f.close()
return X, Y, L
def split_data(images, tags, labels, QUERY_SIZE, TRAINING_SIZE, DATABASE_SIZE):
X = {}
index_all = np.random.permutation(QUERY_SIZE+DATABASE_SIZE)
ind_Q = index_all[0:QUERY_SIZE]
ind_T = index_all[QUERY_SIZE:TRAINING_SIZE + QUERY_SIZE]
ind_R = index_all[QUERY_SIZE:DATABASE_SIZE + QUERY_SIZE]
X['query'] = images[ind_Q, :, :, :]
X['train'] = images[ind_T, :, :, :]
X['retrieval'] = images[ind_R, :, :, :]
Y = {}
Y['query'] = tags[ind_Q, :]
Y['train'] = tags[ind_T, :]
Y['retrieval'] = tags[ind_R, :]
L = {}
L['query'] = labels[ind_Q, :]
L['train'] = labels[ind_T, :]
L['retrieval'] = labels[ind_R, :]
return X, Y, L
| [
"numpy.random.permutation",
"h5py.File"
] | [((708, 728), 'h5py.File', 'h5py.File', (['path', '"""r"""'], {}), "(path, 'r')\n", (717, 728), False, 'import h5py\n'), ((1254, 1303), 'numpy.random.permutation', 'np.random.permutation', (['(QUERY_SIZE + DATABASE_SIZE)'], {}), '(QUERY_SIZE + DATABASE_SIZE)\n', (1275, 1303), True, 'import numpy as np\n')] |
from flask import Flask, request, jsonify, make_response, abort
from http import HTTPStatus
import logging
import tohil
from . import message_handlers
APP_NAME = 'piaware-configurator'
app = Flask(__name__)
app.url_map.strict_slashes = False
@app.before_first_request
def before_first_request_func():
""" Initialization of Tcl package dependencies and logger
"""
tohil.eval('package require fa_piaware_config')
tohil.eval('package require fa_sudo')
tohil.eval('package require fa_sysinfo')
tohil.eval('::fa_piaware_config::new_combined_config piawareConfig')
@app.route('/configurator/', methods=["POST"])
def configurator():
"""Endpoint to serve POST requests. Only accepts content-type application/json.
Returns a Flask Response object
"""
json_payload = validate_json(request)
response = process_json(json_payload)
return response
def validate_json(request):
"""Validate request to ensure it is json
Return the json data if valid
"""
# Check content type
if not request.is_json:
response = make_response(jsonify(success=False,
error="content-type must be application/json"),
HTTPStatus.UNSUPPORTED_MEDIA_TYPE)
abort(response)
try:
_ = request.data.decode("utf-8")
except UnicodeDecodeError:
response = make_response(jsonify(sucess=False,
error="Data must be UTF-8 encoded"),
HTTPStatus.BAD_REQUEST)
abort(response)
# Check for valid json
if not isinstance(request.get_json(silent=True), dict):
response = make_response(jsonify(sucess=False,
error="Invalid json in request"),
HTTPStatus.BAD_REQUEST)
abort(response)
return request.get_json()
def validate_form(request):
"""Validate request to ensure it's application/x-www-form-urlencoded
Return form data as a dict
"""
if request.content_type != 'application/x-www-form-urlencoded':
response = make_response(jsonify(success=False, error="content-type must be application/x-www-form-urlencoded"), HTTPStatus.UNSUPPORTED_MEDIA_TYPE)
abort(response)
return request.form.to_dict()
def process_json(json_payload):
""" Process json payload and call approriate piaware-config handler functions
Returns a Flask Response object
"""
try:
request = json_payload['request']
except KeyError:
response = make_response(jsonify(success=False, error="Missing request field"), HTTPStatus.BAD_REQUEST)
abort(response)
app.logger.info(f'Incoming request: {request}')
app.logger.debug(f'{json_payload}')
if request == 'piaware_config_read':
json_response, status_code = message_handlers.handle_read_config_request(json_payload)
elif request == 'piaware_config_write':
json_response, status_code = message_handlers.handle_write_config_request(json_payload)
elif request == 'get_device_info':
json_response, status_code = message_handlers.handle_get_device_info_request()
elif request == 'get_device_state':
json_response, status_code = message_handlers.handle_get_device_state_request()
elif request == 'get_wifi_networks':
json_response, status_code = message_handlers.handle_get_wifi_networks_request()
elif request == 'set_wifi_config':
json_response, status_code = message_handlers.handle_set_wifi_config_request(json_payload)
else:
app.logger.error(f'Unrecognized request: {request}')
json_response, status_code = {"success": False, "error": "Unsupported request"}, HTTPStatus.BAD_REQUEST
app.logger.debug(f'Response: {json_response}')
return make_response(jsonify(json_response), status_code)
class ContextFilter(logging.Filter):
"""
This is a filter which injects contextual information into the log.
"""
def filter(self, record):
record.APP_NAME = APP_NAME
return True
def setup_production_logger():
""" Set up logger properly when deployed behind Gunicorn WSGI server
"""
# Get Gunicorn logger
gunicorn_logger = logging.getLogger('gunicorn.error')
# Use Gunicorn logger handlers and log level
app.logger.handlers = gunicorn_logger.handlers
app.logger.setLevel(gunicorn_logger.level)
# Modify log message format to include app name
formatter = logging.Formatter('%(asctime)s - %(APP_NAME)s - [%(levelname)s] - %(message)s')
filter = ContextFilter()
app.logger.handlers[0].setFormatter(formatter)
app.logger.handlers[0].addFilter(filter)
| [
"logging.getLogger",
"flask.Flask",
"logging.Formatter",
"flask.request.data.decode",
"tohil.eval",
"flask.request.get_json",
"flask.request.form.to_dict",
"flask.abort",
"flask.jsonify"
] | [((194, 209), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (199, 209), False, 'from flask import Flask, request, jsonify, make_response, abort\n'), ((381, 428), 'tohil.eval', 'tohil.eval', (['"""package require fa_piaware_config"""'], {}), "('package require fa_piaware_config')\n", (391, 428), False, 'import tohil\n'), ((433, 470), 'tohil.eval', 'tohil.eval', (['"""package require fa_sudo"""'], {}), "('package require fa_sudo')\n", (443, 470), False, 'import tohil\n'), ((475, 515), 'tohil.eval', 'tohil.eval', (['"""package require fa_sysinfo"""'], {}), "('package require fa_sysinfo')\n", (485, 515), False, 'import tohil\n'), ((520, 588), 'tohil.eval', 'tohil.eval', (['"""::fa_piaware_config::new_combined_config piawareConfig"""'], {}), "('::fa_piaware_config::new_combined_config piawareConfig')\n", (530, 588), False, 'import tohil\n'), ((1886, 1904), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (1902, 1904), False, 'from flask import Flask, request, jsonify, make_response, abort\n'), ((2309, 2331), 'flask.request.form.to_dict', 'request.form.to_dict', ([], {}), '()\n', (2329, 2331), False, 'from flask import Flask, request, jsonify, make_response, abort\n'), ((4266, 4301), 'logging.getLogger', 'logging.getLogger', (['"""gunicorn.error"""'], {}), "('gunicorn.error')\n", (4283, 4301), False, 'import logging\n'), ((4519, 4598), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s - %(APP_NAME)s - [%(levelname)s] - %(message)s"""'], {}), "('%(asctime)s - %(APP_NAME)s - [%(levelname)s] - %(message)s')\n", (4536, 4598), False, 'import logging\n'), ((1279, 1294), 'flask.abort', 'abort', (['response'], {}), '(response)\n', (1284, 1294), False, 'from flask import Flask, request, jsonify, make_response, abort\n'), ((1317, 1345), 'flask.request.data.decode', 'request.data.decode', (['"""utf-8"""'], {}), "('utf-8')\n", (1336, 1345), False, 'from flask import Flask, request, jsonify, make_response, abort\n'), ((1858, 1873), 'flask.abort', 'abort', (['response'], {}), '(response)\n', (1863, 1873), False, 'from flask import Flask, request, jsonify, make_response, abort\n'), ((2281, 2296), 'flask.abort', 'abort', (['response'], {}), '(response)\n', (2286, 2296), False, 'from flask import Flask, request, jsonify, make_response, abort\n'), ((3854, 3876), 'flask.jsonify', 'jsonify', (['json_response'], {}), '(json_response)\n', (3861, 3876), False, 'from flask import Flask, request, jsonify, make_response, abort\n'), ((1099, 1168), 'flask.jsonify', 'jsonify', ([], {'success': '(False)', 'error': '"""content-type must be application/json"""'}), "(success=False, error='content-type must be application/json')\n", (1106, 1168), False, 'from flask import Flask, request, jsonify, make_response, abort\n'), ((1567, 1582), 'flask.abort', 'abort', (['response'], {}), '(response)\n', (1572, 1582), False, 'from flask import Flask, request, jsonify, make_response, abort\n'), ((1633, 1662), 'flask.request.get_json', 'request.get_json', ([], {'silent': '(True)'}), '(silent=True)\n', (1649, 1662), False, 'from flask import Flask, request, jsonify, make_response, abort\n'), ((1704, 1758), 'flask.jsonify', 'jsonify', ([], {'sucess': '(False)', 'error': '"""Invalid json in request"""'}), "(sucess=False, error='Invalid json in request')\n", (1711, 1758), False, 'from flask import Flask, request, jsonify, make_response, abort\n'), ((2150, 2241), 'flask.jsonify', 'jsonify', ([], {'success': '(False)', 'error': '"""content-type must be application/x-www-form-urlencoded"""'}), "(success=False, error=\n 'content-type must be application/x-www-form-urlencoded')\n", (2157, 2241), False, 'from flask import Flask, request, jsonify, make_response, abort\n'), ((2686, 2701), 'flask.abort', 'abort', (['response'], {}), '(response)\n', (2691, 2701), False, 'from flask import Flask, request, jsonify, make_response, abort\n'), ((1410, 1467), 'flask.jsonify', 'jsonify', ([], {'sucess': '(False)', 'error': '"""Data must be UTF-8 encoded"""'}), "(sucess=False, error='Data must be UTF-8 encoded')\n", (1417, 1467), False, 'from flask import Flask, request, jsonify, make_response, abort\n'), ((2599, 2652), 'flask.jsonify', 'jsonify', ([], {'success': '(False)', 'error': '"""Missing request field"""'}), "(success=False, error='Missing request field')\n", (2606, 2652), False, 'from flask import Flask, request, jsonify, make_response, abort\n')] |
"""
Created on 21 Jun 2019
@author: <NAME> (<EMAIL>)
"""
import sys
from PIL import ImageFont
from scs_core.data.datetime import LocalizedDatetime, ISO8601
from scs_core.data.queue_report import QueueReport, QueueStatus
from scs_core.position.gps_datum import GPSDatum
from scs_core.sys.system_id import SystemID
from scs_display.display.text_display import TextDisplay
from scs_host.sys.host import Host
from scs_host.sys.hostname import Hostname
from scs_host.sys.nmcli import NMCLi
# --------------------------------------------------------------------------------------------------------------------
class SystemDisplay(object):
"""
classdocs
"""
__QUEUE_STATE = {
QueueStatus.NONE: "FAULT",
QueueStatus.INHIBITED: "PUBLISHING INHIBITED",
QueueStatus.STARTING: "STARTING",
QueueStatus.CONNECTING: "CONNECTING",
QueueStatus.WAITING_FOR_DATA: "PREPARING DATA",
QueueStatus.PUBLISHING: "PUBLISHING DATA",
QueueStatus.CLEARING: "PUBLISHING DATA",
QueueStatus.QUEUING: "QUEUING DATA"
}
__FONT = ImageFont.load_default()
# ----------------------------------------------------------------------------------------------------------------
@staticmethod
def system_tag():
id = SystemID.load(Host)
return id.message_tag()
@staticmethod
def system_hostname():
hostname = Hostname.find()
return hostname.operational
@staticmethod
def formatted_datetime(datetime):
if datetime is None:
return ""
iso = ISO8601.construct(datetime)
if iso is None:
return ""
return "%s %s %s" % (iso.date, iso.time, iso.timezone)
# ----------------------------------------------------------------------------------------------------------------
@classmethod
def construct(cls, device_name, status_message, show_time, psu_report_class,
psu_report_filename, queue_report_filename, gps_report_filename):
tag = cls.system_tag()
hostname = cls.system_hostname()
return cls(device_name, tag, hostname, status_message, show_time, psu_report_class,
psu_report_filename, queue_report_filename, gps_report_filename)
# ----------------------------------------------------------------------------------------------------------------
def __init__(self, device_name, tag, hostname, status_message, show_time, psu_report_class,
psu_report_filename, queue_report_filename, gps_report_filename):
"""
Constructor
"""
self.__device_name = device_name # string
self.__tag = tag # string
self.__hostname = hostname # string
self.__status_message = status_message # string
self.__show_time = show_time # bool
self.__psu_report_class = psu_report_class # PSUStatus implementation
self.__psu_report_filename = psu_report_filename # string
self.__queue_report_filename = queue_report_filename # string
self.__gps_report_filename = gps_report_filename # string
self.__display = TextDisplay(self.__FONT)
# ----------------------------------------------------------------------------------------------------------------
def clean(self):
self.__display.clean()
def update(self):
# time...
display_datetime = self.__show_time and Host.time_is_synchronized()
datetime = LocalizedDatetime.now() if display_datetime else None
# network...
nmcli = NMCLi.find()
homes = {} if nmcli is None else nmcli.connections
# message...
message = self.__status_message
# PSU...
if self.__psu_report_filename:
psu_report = self.__psu_report_class.load(self.__psu_report_filename)
batt_percent = None if psu_report is None else psu_report.batt_percent
else:
batt_percent = None
# MQTT queue...
if self.__queue_report_filename:
queue_report = QueueReport.load(self.__queue_report_filename)
queue_message = self.__QUEUE_STATE[queue_report.queue_state()]
message += ':' + queue_message
# GPS quality...
if self.__gps_report_filename:
gps_report = GPSDatum.load(self.__gps_report_filename)
gps_quality = gps_report.quality
message += ' GPS:' + str(gps_quality)
return self.render(datetime, BattDisplay(batt_percent), homes, message)
def clear(self):
return self.render(None, '', {}, self.__status_message)
def render(self, datetime, batt, homes, message):
self.__display.set_text(0, self.__device_name, True)
self.__display.set_text(1, self.formatted_datetime(datetime), True)
self.__display.set_text(2, batt, True)
self.__display.set_text(3, " tag: %s" % self.__tag)
self.__display.set_text(4, " host: %s" % self.__hostname)
self.__display.set_text(5, "")
self.__display.set_text(6, "")
self.__display.set_text(7, "")
count = 0
for port, network in homes.items():
self.__display.set_text(6 + count, "%5s: %s" % (port, network))
count += 1
if count > 1: # maximum 2 homes
break
self.__display.set_text(8, "")
self.__display.set_text(9, message, True)
return self.__display.render()
# ----------------------------------------------------------------------------------------------------------------
@property
def status_message(self):
return self.__status_message
@status_message.setter
def status_message(self, status_message):
self.__status_message = status_message
# ----------------------------------------------------------------------------------------------------------------
def print(self, file=sys.stdout):
self.__display.print_buffer(file)
def __str__(self, *args, **kwargs):
psu_report_class_name = self.__psu_report_class.__name__
return "SystemDisplay:{device_name:%s, tag:%s, hostname:%s status_message:%s, show_time:%s, " \
"psu_report_class:%s, psu_report_filename:%s, queue_report_filename:%s, " \
"gps_report_filename:%s, display:%s}" % \
(self.__device_name, self.__tag, self.__hostname, self.__status_message, self.__show_time,
psu_report_class_name, self.__psu_report_filename, self.__queue_report_filename,
self.__gps_report_filename, self.__display)
# --------------------------------------------------------------------------------------------------------------------
class BattDisplay(object):
"""
classdocs
"""
__BARS = {
95: '||||||||||',
85: '-|||||||||',
75: '--||||||||',
65: '---|||||||',
55: '----||||||',
45: '-----|||||',
35: '------||||',
25: '-------|||',
15: '--------||',
5: '---------|',
0: '----------'
}
# ----------------------------------------------------------------------------------------------------------------
def __init__(self, level):
self.__level = level
def __str__(self, *args, **kwargs):
if self.__level is None:
return ''
for level in self.__BARS.keys():
if self.__level > level:
return self.__BARS[level]
return self.__BARS[0]
| [
"PIL.ImageFont.load_default",
"scs_display.display.text_display.TextDisplay",
"scs_core.data.datetime.LocalizedDatetime.now",
"scs_core.data.queue_report.QueueReport.load",
"scs_core.position.gps_datum.GPSDatum.load",
"scs_host.sys.hostname.Hostname.find",
"scs_host.sys.host.Host.time_is_synchronized",
"scs_core.data.datetime.ISO8601.construct",
"scs_host.sys.nmcli.NMCLi.find",
"scs_core.sys.system_id.SystemID.load"
] | [((1167, 1191), 'PIL.ImageFont.load_default', 'ImageFont.load_default', ([], {}), '()\n', (1189, 1191), False, 'from PIL import ImageFont\n'), ((1367, 1386), 'scs_core.sys.system_id.SystemID.load', 'SystemID.load', (['Host'], {}), '(Host)\n', (1380, 1386), False, 'from scs_core.sys.system_id import SystemID\n'), ((1486, 1501), 'scs_host.sys.hostname.Hostname.find', 'Hostname.find', ([], {}), '()\n', (1499, 1501), False, 'from scs_host.sys.hostname import Hostname\n'), ((1663, 1690), 'scs_core.data.datetime.ISO8601.construct', 'ISO8601.construct', (['datetime'], {}), '(datetime)\n', (1680, 1690), False, 'from scs_core.data.datetime import LocalizedDatetime, ISO8601\n'), ((3510, 3534), 'scs_display.display.text_display.TextDisplay', 'TextDisplay', (['self.__FONT'], {}), '(self.__FONT)\n', (3521, 3534), False, 'from scs_display.display.text_display import TextDisplay\n'), ((3938, 3950), 'scs_host.sys.nmcli.NMCLi.find', 'NMCLi.find', ([], {}), '()\n', (3948, 3950), False, 'from scs_host.sys.nmcli import NMCLi\n'), ((3799, 3826), 'scs_host.sys.host.Host.time_is_synchronized', 'Host.time_is_synchronized', ([], {}), '()\n', (3824, 3826), False, 'from scs_host.sys.host import Host\n'), ((3846, 3869), 'scs_core.data.datetime.LocalizedDatetime.now', 'LocalizedDatetime.now', ([], {}), '()\n', (3867, 3869), False, 'from scs_core.data.datetime import LocalizedDatetime, ISO8601\n'), ((4433, 4479), 'scs_core.data.queue_report.QueueReport.load', 'QueueReport.load', (['self.__queue_report_filename'], {}), '(self.__queue_report_filename)\n', (4449, 4479), False, 'from scs_core.data.queue_report import QueueReport, QueueStatus\n'), ((4689, 4730), 'scs_core.position.gps_datum.GPSDatum.load', 'GPSDatum.load', (['self.__gps_report_filename'], {}), '(self.__gps_report_filename)\n', (4702, 4730), False, 'from scs_core.position.gps_datum import GPSDatum\n')] |
#!/usr/bin/python3
import system_calls
popular_syscalls = {}
syscalls = system_calls.syscalls()
for syscall_name in syscalls.names():
counter = 0
for arch in syscalls.archs():
try:
syscalls.get(syscall_name, arch)
counter += 1
except system_calls.NotSupportedSystemCall:
pass
try:
popular_syscalls[counter].append(syscall_name)
except KeyError:
popular_syscalls[counter] = []
popular_syscalls[counter].append(syscall_name)
amount_of_archs = len(syscalls.archs())
for amount in range(1, amount_of_archs + 1):
try:
tmp = popular_syscalls[amount]
print(f"System calls supported on {amount} of {amount_of_archs} "
"architectures:")
for syscall in popular_syscalls[amount]:
print(f"\t{syscall}")
print("\n")
except KeyError:
pass
| [
"system_calls.syscalls"
] | [((75, 98), 'system_calls.syscalls', 'system_calls.syscalls', ([], {}), '()\n', (96, 98), False, 'import system_calls\n')] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.