gt
stringclasses 1
value | context
stringlengths 2.49k
119k
|
---|---|
import os
import shutil
import subprocess
import tempfile
import unittest
import git
import testutils
class TestChanges(unittest.TestCase):
def _output(self, command):
proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
return proc.communicate()[0].strip()
def test_changes_version(self):
# expect
self.assertRegexpMatches(self._output('git changes -v'.split()), 'git-changes \\d+\\.\\d+\\.\\d+')
self.assertRegexpMatches(self._output('git changes --version'.split()), 'git-changes \\d+\\.\\d+\\.\\d+')
def test_changes_help(self):
# expect
self.assertTrue(self._output('git changes -h'.split()))
self.assertTrue(self._output('git changes --help'.split()))
class TestChangesView(unittest.TestCase):
def setUp(self):
self.dirpath = tempfile.mkdtemp()
os.chdir(self.dirpath)
self.repo = git.Repo.init(self.dirpath)
testutils.init_local_config(self.repo)
subprocess.call('touch README.md'.split())
with open('README.md', 'w') as a_file:
a_file.write('readme\n')
subprocess.call('git add -A'.split())
# initial commit
pyenv = os.environ.copy()
pyenv['GIT_COMMITTER_DATE'] = '2016-02-15T00:00:00Z'
pyenv['GIT_AUTHOR_DATE'] = '2016-02-15T00:00:00Z'
subprocess.call(['git', 'commit', '--quiet', '-m', 'Initial commit'], env=pyenv)
self.commit0_log = subprocess.check_output('git rev-parse --short HEAD'.split()).strip() + ' Initial commit'
# edit readme
with open('README.md', 'a') as a_file:
a_file.write('readme\n')
pyenv['GIT_COMMITTER_DATE'] = '2016-02-16T00:00:00Z'
pyenv['GIT_AUTHOR_DATE'] = '2016-02-16T00:00:00Z'
subprocess.call(['git', 'commit', '--quiet', '-a', '-m', 'edit readme'], env=pyenv)
self.commit1_log = subprocess.check_output('git rev-parse --short HEAD'.split()).strip() + ' edit readme'
# add changelog
subprocess.call('touch CHANGELOG.md'.split())
subprocess.call('git add -A'.split())
pyenv['GIT_COMMITTER_DATE'] = '2016-02-17T00:00:00Z'
pyenv['GIT_AUTHOR_DATE'] = '2016-02-17T00:00:00Z'
subprocess.call(['git', 'commit', '--quiet', '-m', 'add changelog'], env=pyenv)
self.commit2_log = subprocess.check_output('git rev-parse --short HEAD'.split()).strip() + ' add changelog'
# edit changelog
with open('CHANGELOG.md', 'w') as a_file:
a_file.write('changelog\n')
pyenv['GIT_COMMITTER_DATE'] = '2016-02-18T00:00:00Z'
pyenv['GIT_AUTHOR_DATE'] = '2016-02-18T00:00:00Z'
subprocess.call(['git', 'commit', '--quiet', '-a', '-m', 'edit changelog'], env=pyenv)
self.commit3_log = subprocess.check_output('git rev-parse --short HEAD'.split()).strip() + ' edit changelog'
def tearDown(self):
shutil.rmtree(self.dirpath)
def test_view(self):
# expect: no changes when there are none
self.assertFalse(self.repo.git.changes())
self.assertFalse(self.repo.git.changes('--log'))
self.assertFalse(self.repo.git.changes('-l'))
self.assertFalse(self.repo.git.changes('view'))
self.assertFalse(self.repo.git.changes('view', '--log'))
self.assertFalse(self.repo.git.changes('view', '-l'))
def test_view_noAssociation_defaultOverridden(self):
# given
self.repo.git.config('git-changes.default-commit-ish', str(self.repo.rev_parse('HEAD^')))
# expect
self.assertEqual(self.commit3_log, self.repo.git.changes())
self.assertEqual(self.commit3_log, self.repo.git.changes('--log'))
self.assertEqual(self.commit3_log, self.repo.git.changes('-l'))
self.assertEqual(self.commit3_log, self.repo.git.changes('view'))
self.assertEqual(self.commit3_log, self.repo.git.changes('view', '--log'))
self.assertEqual(self.commit3_log, self.repo.git.changes('view', '-l'))
def test_view_withCommittish(self):
# expect: changes when viewing with a commit-ish
self.assertFalse(self.repo.git.changes('HEAD'))
self.assertFalse(self.repo.git.changes('view', 'HEAD'))
self.assertEqual(self.commit3_log, self.repo.git.changes('HEAD^'))
self.assertEqual(self.commit3_log, self.repo.git.changes('view', 'HEAD^'))
self.assertEqual(self.commit3_log, self.repo.git.changes('view', 'HEAD^', '--', '*md'))
self.assertFalse(self.repo.git.changes('view', 'HEAD^', '--', '*py'))
self.assertEqual(os.linesep.join([self.commit3_log, self.commit2_log]), self.repo.git.changes('HEAD^^'))
self.assertEqual(os.linesep.join([self.commit3_log, self.commit2_log]), self.repo.git.changes('view', 'HEAD^^'))
def test_view_upstream(self):
# given: a local upstream branch
self.repo.git.branch('upstream-branch', 'HEAD^')
self.repo.git.branch('--set-upstream-to=upstream-branch')
# given: an association
self.repo.git.config('git-changes.associations.master.with', self.repo.rev_parse('HEAD^^'))
# expect
self.assertEqual(self.commit3_log, self.repo.git.changes('-u'))
self.assertEqual(self.commit3_log, self.repo.git.changes('--upstream'))
self.assertNotEqual(self.repo.git.changes(), self.repo.git.changes('--upstream'))
def test_view_count(self):
# expect:
self.assertEqual('0', self.repo.git.changes('view', '-c'))
self.assertEqual('0', self.repo.git.changes('view', '--count'))
self.assertEqual('1', self.repo.git.changes('HEAD^', '-c'))
self.assertEqual('1', self.repo.git.changes('view', 'HEAD^', '-c'))
self.assertEqual('1', self.repo.git.changes('HEAD^', '--count'))
self.assertEqual('1', self.repo.git.changes('view', 'HEAD^', '--count'))
self.assertEqual('1', self.repo.git.changes('view', 'HEAD^', '--count', '--', '*md'))
self.assertEqual('0', self.repo.git.changes('view', 'HEAD^', '--count', '--', '*py'))
def test_view_stat(self):
# expect:
self.assertIn('1 insertion', self.repo.git.changes('HEAD^', '-s'))
self.assertIn('1 insertion', self.repo.git.changes('view', 'HEAD^', '-s'))
self.assertIn('1 insertion', self.repo.git.changes('HEAD^', '--stat'))
self.assertIn('1 insertion', self.repo.git.changes('view', 'HEAD^', '--stat'))
self.assertIn('1 insertion', self.repo.git.changes('view', 'HEAD^', '--stat', '--', '*md'))
self.assertFalse(self.repo.git.changes('view', '--stat'))
self.assertFalse(self.repo.git.changes('view', 'HEAD^', '--stat', '--', '*py'))
def test_view_diff(self):
# expect:
self.assertIn('diff --git a/CHANGELOG.md b/CHANGELOG.md', self.repo.git.changes('HEAD^', '-d'))
self.assertIn('diff --git a/CHANGELOG.md b/CHANGELOG.md', self.repo.git.changes('view', 'HEAD^', '-d'))
self.assertIn('diff --git a/CHANGELOG.md b/CHANGELOG.md', self.repo.git.changes('HEAD^', '--diff'))
self.assertIn('diff --git a/CHANGELOG.md b/CHANGELOG.md', self.repo.git.changes('view', 'HEAD^', '--diff'))
self.assertIn('diff --git a/CHANGELOG.md b/CHANGELOG.md', self.repo.git.changes('view', 'HEAD^', '--diff', '--', '*md'))
self.assertFalse(self.repo.git.changes('view', '--diff'))
self.assertFalse(self.repo.git.changes('view', 'HEAD^', '--diff', '--', '*py'))
def test_view_inverse(self):
# given
self.repo.git.config('git-changes.default-commit-ish', str(self.repo.rev_parse('HEAD^')))
# expect
expected = os.linesep.join([self.commit2_log, self.commit1_log, self.commit0_log])
self.assertEqual(expected, self.repo.git.changes('view', '--inverse'))
self.assertEqual(expected, self.repo.git.changes('view', '-i'))
self.assertEqual(self.commit0_log, self.repo.git.changes('view', '-i', 'HEAD^^^'))
def test_view_useAssociation_changesExist(self):
# given
self.repo.git.config('git-changes.associations.master.with', self.repo.rev_parse('HEAD^'))
# expect
self.assertEqual(self.commit3_log, self.repo.git.changes())
self.assertEqual(self.commit3_log, self.repo.git.changes('view'))
def test_view_useAssociation_noChangesExist(self):
# given
self.repo.git.config('git-changes.associations.master.with', self.repo.rev_parse('HEAD'))
# expect
self.assertFalse(self.repo.git.changes())
self.assertFalse(self.repo.git.changes('view'))
def test_view_overrideDefaultView(self):
# given:
self.repo.git.config('git-changes.default-view', 'count')
# expect:
self.assertEqual('0', self.repo.git.changes('view'))
self.assertEqual('1', self.repo.git.changes('HEAD^', '-c'))
class TestChangesAssociate(unittest.TestCase):
def setUp(self):
self.dirpath = tempfile.mkdtemp()
os.chdir(self.dirpath)
self.repo = git.Repo.init(self.dirpath)
testutils.init_local_config(self.repo)
# initial commit
open('README.md', 'w').close()
self.repo.index.add(['README.md'])
self.repo.index.commit('Initial commit')
# add changelog
open('CHANGELOG.md', 'w').close()
self.repo.index.add(['CHANGELOG.md'])
self.repo.index.commit('Add changelog')
def tearDown(self):
shutil.rmtree(self.dirpath)
def test_associate(self):
# when
output = self.repo.git.changes('associate', 'HEAD^')
# then
sha = str(self.repo.rev_parse('HEAD^'))
self.assertEqual('master has been associated with {}'.format(sha), output)
self.assertEqual(sha, self.repo.git.config('git-changes.associations.master.with'))
def test_associate_quiet(self):
# when
output = self.repo.git.changes('associate', '--quiet', 'HEAD^')
# then
self.assertFalse(output)
self.assertEqual(
str(self.repo.rev_parse('HEAD^')),
self.repo.git.config('git-changes.associations.master.with')
)
def test_associate_withUpstream(self):
# given: a local upstream branch
self.repo.git.branch('upstream-branch', 'HEAD^')
self.repo.git.branch('--set-upstream-to=upstream-branch')
# when
output = self.repo.git.changes('associate', '--upstream')
# then
self.assertEqual('master has been associated with refs/heads/upstream-branch', output)
self.assertEqual('refs/heads/upstream-branch', self.repo.git.config('git-changes.associations.master.with'))
def test_associate_withUpstream_quiet(self):
# given: a local upstream branch
self.repo.git.branch('upstream-branch', 'HEAD^')
self.repo.git.branch('--set-upstream-to=upstream-branch')
# when
output = self.repo.git.changes('associate', '--upstream', '--quiet')
# then
self.assertFalse(output)
self.assertEqual('refs/heads/upstream-branch', self.repo.git.config('git-changes.associations.master.with'))
def test_associate_get_emptyRepository(self):
# given
shutil.rmtree(self.dirpath)
self.dirpath = tempfile.mkdtemp()
os.chdir(self.dirpath)
self.repo = git.Repo.init(self.dirpath)
# expect:
self.assertEqual('warn: repository is empty', self.repo.git.changes('associate'))
def test_associate_get_noAssociationExists_defaultUnspecified(self):
# expect
self.assertFalse(self.repo.git.changes('associate'))
def test_associate_get_noAssociationExists_defaultUnspecified_verbose(self):
# expect
self.assertEqual('refs/heads/master', self.repo.git.changes('associate', '--verbose'))
self.assertEqual('refs/heads/master', self.repo.git.changes('associate', '-V'))
def test_associate_get_noAssociationExists_defaultOverridden(self):
# given: overridden default commit-ish
self.repo.git.config('--local', 'git-changes.default-commit-ish', self.repo.rev_parse('HEAD'))
# expect
self.assertFalse(self.repo.git.changes('associate'))
def test_associate_get_noAssociationExists_defaultOverridden_verbose(self):
# given: overridden default commit-ish
self.repo.git.config('--local', 'git-changes.default-commit-ish', self.repo.rev_parse('HEAD'))
# expect
self.assertEqual(str(self.repo.rev_parse('HEAD')), self.repo.git.changes('associate', '--verbose'))
self.assertEqual(str(self.repo.rev_parse('HEAD')), self.repo.git.changes('associate', '-V'))
def test_associate_get_cannotBeUsedWithQuiet(self):
# when
output = subprocess.Popen(
'git changes associate --quiet'.split(),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
).communicate()
# then
self.assertEqual('usage: git changes associate [-h] [-u] [-V] [COMMIT-ISH [-q]]\n', output[0])
self.assertEqual('git changes: error: argument -q/--quiet: not allowed without positional argument committish '
'or option -u/--upstream\n', output[1])
def test_associate_filesNotSupported(self):
# when
output = subprocess.Popen(
'git changes associate -- file.txt'.split(),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
).communicate()
# then
self.assertEqual('usage: git changes associate [-h] [-u] [-V] [COMMIT-ISH [-q]]\n', output[0])
self.assertEqual('git changes: error: argument FILES: only supported for view sub-command\n', output[1])
class TestChangesUnassociate(unittest.TestCase):
def _output(self, command):
proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
return proc.communicate()[0].strip()
def setUp(self):
self.dirpath = tempfile.mkdtemp()
os.chdir(self.dirpath)
self.repo = git.Repo.init(self.dirpath)
testutils.init_local_config(self.repo)
subprocess.call('touch README.md'.split())
with open('README.md', 'w') as a_file:
a_file.write('readme\n')
subprocess.call('git add -A'.split())
# initial commit
subprocess.call(['git', 'commit', '--quiet', '-m', 'Initial commit'])
# edit readme
with open('README.md', 'a') as a_file:
a_file.write('readme\n')
subprocess.call(['git', 'commit', '--quiet', '-a', '-m', 'edit readme'])
# associate master
head = self.repo.rev_parse('HEAD')
self.repo.git.config('git-changes.associations.master.with', head)
# associate a valid branch
self.repo.git.branch('valid-branch')
self.repo.git.config('git-changes.associations.valid-branch.with', head)
# associate a non-existent branch
self.repo.git.config('git-changes.associations.stale-branch.with', head)
def tearDown(self):
shutil.rmtree(self.dirpath)
def test_unassociate_currentBranch(self):
# when
output = self.repo.git.changes('unassociate')
# then
self.assertFalse(output)
self.assertFalse(self._output('git config git-changes.associations.master.with'.split()))
self.assertTrue(self.repo.git.config('git-changes.associations.valid-branch.with'))
self.assertTrue(self.repo.git.config('git-changes.associations.stale-branch.with'))
def test_unassociate_all(self):
# when
output = self.repo.git.changes('unassociate', '--all')
# then
self.assertEqual("""Removed association 'master'
Removed association 'valid-branch'
Removed association 'stale-branch'""", output)
self.assertFalse(self._output('git config git-changes.associations.master.with'.split()))
self.assertFalse(self._output('git config git-changes.associations.valid-branch.with'.split()))
self.assertFalse(self._output('git config git-changes.associations.stale-branch.with'.split()))
def test_unassociate_prune(self):
# when
output = self.repo.git.changes('unassociate', '--prune')
# then
self.assertEqual("Removed association 'stale-branch'", output)
self.assertTrue(self.repo.git.config('git-changes.associations.master.with'))
self.assertTrue(self.repo.git.config('git-changes.associations.valid-branch.with'))
self.assertFalse(self._output('git config git-changes.associations.stale-branch.with'.split()))
def test_unassociate_quiet_currentBranch(self):
# when
output = self.repo.git.changes('unassociate', '--quiet')
# then
self.assertFalse(output)
self.assertFalse(self._output('git config git-changes.associations.master.with'.split()))
self.assertTrue(self.repo.git.config('git-changes.associations.valid-branch.with'))
self.assertTrue(self.repo.git.config('git-changes.associations.stale-branch.with'))
def test_unassociate_quiet_all(self):
# when
output = self.repo.git.changes('unassociate', '--quiet', '--all')
# then
self.assertFalse(output)
self.assertFalse(self._output('git config git-changes.associations.master.with'.split()))
self.assertFalse(self._output('git config git-changes.associations.valid-branch.with'.split()))
self.assertFalse(self._output('git config git-changes.associations.stale-branch.with'.split()))
def test_unassociate_quiet_prune(self):
# when
output = self.repo.git.changes('unassociate', '--quiet', '--prune')
# then
self.assertFalse(output)
self.assertTrue(self.repo.git.config('git-changes.associations.master.with'))
self.assertTrue(self.repo.git.config('git-changes.associations.valid-branch.with'))
self.assertFalse(self._output('git config git-changes.associations.stale-branch.with'.split()))
def test_unassociate_dryRun(self):
# expect
self.assertEqual(
"Would unassociate 'master' from '{}'".format(self.repo.rev_parse('HEAD')),
self.repo.git.changes('unassociate', '--dry-run')
)
self.assertEqual("""Would remove association 'master'
Would remove association 'valid-branch'
Would remove association 'stale-branch'""", self.repo.git.changes('unassociate', '--dry-run', '--all'))
self.assertEqual(
"Would remove association 'stale-branch'",
self.repo.git.changes('unassociate', '--dry-run', '--prune')
)
def test_unassociate_dryRunDoesNotSupportQuiet(self):
# when
output = subprocess.Popen(
'git changes unassociate --dry-run --quiet'.split(),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
).communicate()
# then
# TODO: usage should print to STDOUT
self.assertFalse(output[0])
self.assertEqual("""usage: git changes unassociate [-h] [-a | -p] [-q | -d]
git changes unassociate: error: argument -q/--quiet: not allowed with argument -d/--dry-run
""", output[1])
def test_unassociate_filesNotSupported(self):
# when
output = subprocess.Popen(
'git changes unassociate -- file.txt'.split(),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
).communicate()
# then
self.assertEqual('usage: git changes unassociate [-h] [-a | -p] [-q | -d]\n', output[0])
self.assertEqual('git changes: error: argument FILES: only supported for view sub-command\n', output[1])
|
|
# python -m unittest discover
import unittest
from datetime import datetime
from tasks.issue_score_calculator import ScoreCalculator
class TestScore(unittest.TestCase):
def test_each_contribution(self):
scorer = ScoreCalculator(data={
'contributors': [
{ 'login': 'user1', 'contributions': 10 }
],
'issue': {
'user': {
'login': 'user1'
}
}
})
scorer.each_contribution(add=2, max_contribution=100)
self.assertEquals(scorer.score, 20)
scorer = ScoreCalculator(data={
'contributors': [
{ 'login': 'user1', 'contributions': 10000 }
],
'issue': {
'user': {
'login': 'user1'
}
}
})
scorer.each_contribution(add=2, max_contribution=100)
self.assertEquals(scorer.score, 100)
scorer = ScoreCalculator(data={
'contributors': [
{ 'login': 'user2', 'contributions': 10000 }
],
'issue': {
'user': {
'login': 'user1'
}
}
})
scorer.each_contribution(add=2, max_contribution=100)
self.assertEquals(scorer.score, 0)
scorer = ScoreCalculator(data={
'contributors': [
{ 'login': 'abe', 'contributions': 10000 }
],
'issue': {
'user': {
'login': 'abe'
}
},
'org_members': ['abe', 'jeb', 'rocky']
})
scorer.each_contribution(add=2, max_contribution=100)
self.assertEquals(scorer.score, 0)
def test_short_title_text(self):
scorer = ScoreCalculator(data={
'issue': {
'title': '123'
}
})
scorer.short_title_text(subtract=50, short_title_text_length=5)
self.assertEquals(scorer.score, -50)
scorer = ScoreCalculator(data={
'issue': {
'title': '123456'
}
})
scorer.short_title_text(subtract=50, short_title_text_length=5)
self.assertEquals(scorer.score, 0)
def test_short_body_text(self):
scorer = ScoreCalculator(data=setup_data('123'))
scorer.short_body_text(subtract=50, short_body_text_length=5)
self.assertEquals(scorer.score, -50)
scorer = ScoreCalculator(data=setup_data('123456'))
scorer.short_body_text(subtract=50, short_body_text_length=5)
self.assertEquals(scorer.score, 0)
def test_every_x_characters_in_body(self):
scorer = ScoreCalculator(data=setup_data('1234567890'))
scorer.every_x_characters_in_body(add=2, x=1)
self.assertEquals(scorer.score, 20)
scorer = ScoreCalculator(data=setup_data('1234567890'))
scorer.every_x_characters_in_body(add=2, x=5)
self.assertEquals(scorer.score, 4)
scorer = ScoreCalculator(data=setup_data('1234567890'))
scorer.every_x_characters_in_body(add=2, x=5, max=3)
self.assertEquals(scorer.score, 3)
scorer = ScoreCalculator(data=setup_data(''))
scorer.every_x_characters_in_body(add=2, x=5)
self.assertEquals(scorer.score, 0)
def test_every_x_characters_in_comments(self):
scorer = ScoreCalculator(data={
'issue_comments': [
{ 'body': '1234567890', 'user': { 'login': 'rocky' } },
{ 'body': '1234567890', 'user': { 'login': 'steve' } },
{ 'body': '1234567890', 'user': { 'login': 'bill' } },
{ 'body': '1234567890', 'user': { 'login': 'abe' } },
],
'org_members': ['abe', 'jeb', 'rocky']
})
scorer.every_x_characters_in_comments(add=3, x=2)
self.assertEquals(scorer.score, 30)
scorer = ScoreCalculator(data={
'issue_comments': [
{ 'body': '1234567890', 'user': { 'login': 'rocky' } },
{ 'body': '1234567890', 'user': { 'login': 'steve' } },
{ 'body': '1234567890', 'user': { 'login': 'bill' } },
{ 'body': '1234567890', 'user': { 'login': 'abe' } },
],
'org_members': ['abe', 'jeb', 'rocky']
})
scorer.every_x_characters_in_comments(add=3, x=2, max=15)
self.assertEquals(scorer.score, 15)
scorer = ScoreCalculator(data={})
scorer.every_x_characters_in_comments(add=3, x=2)
self.assertEquals(scorer.score, 0)
def test_code_demos(self):
scorer = ScoreCalculator(data=setup_data('''
http://codepen.io/agesef HTTPS://jsbin http://plnkr.co HTTP://www.jsfiddle.com/asdfsag/asesd
'''))
scorer.code_demos(add=2)
self.assertEquals(scorer.score, 8)
scorer = ScoreCalculator(data={
'issue': {
'body': 'http://plnkr.co'
},
'issue_comments': [
{ 'body': 'http://codepen.io/agesef' },
{ 'body': 'http://codepen.io/agesef' },
{ 'body': 'http://jsbin.io/agesef' },
{ 'body': 'http://jsbin.io/agesef' },
]
})
scorer.code_demos(add=3)
self.assertEquals(scorer.score, 9)
scorer = ScoreCalculator(data={})
scorer.code_demos(add=2)
self.assertEquals(scorer.score, 0)
def test_daily_decay_since_creation(self):
scorer = ScoreCalculator(data={
'issue': {
'created_at': '2000-01-01T00:00:00Z'
}
})
d = scorer.daily_decay_since_creation(exp=1.5, start=50, now=datetime(2000, 1, 11))
self.assertEquals(d['days_since_creation'], 10)
self.assertEquals(d['start'], 50)
self.assertEquals(d['score'], 18)
d = scorer.daily_decay_since_creation(exp=1.5, start=50, now=datetime(2000, 1, 13))
self.assertEquals(d['days_since_creation'], 12)
self.assertEquals(d['score'], 8)
d = scorer.daily_decay_since_creation(exp=1.5, start=50, now=datetime(2000, 1, 21))
self.assertEquals(d['days_since_creation'], 20)
self.assertEquals(d['score'], 0.0)
def test_daily_decay_since_last_update(self):
scorer = ScoreCalculator(data={
'issue': {
'updated_at': '2000-01-01T00:00:00Z'
}
})
d = scorer.daily_decay_since_last_update(exp=1.5, start=50, now=datetime(2000, 1, 11))
self.assertEquals(d['days_since_update'], 10)
self.assertEquals(d['start'], 50)
self.assertEquals(d['score'], 18)
d = scorer.daily_decay_since_last_update(exp=1.5, start=50, now=datetime(2000, 1, 13))
self.assertEquals(d['days_since_update'], 12)
self.assertEquals(d['score'], 8)
d = scorer.daily_decay_since_last_update(exp=1.5, start=50, now=datetime(2000, 1, 21))
self.assertEquals(d['days_since_update'], 20)
self.assertEquals(d['score'], 0.0)
def test_high_priority(self):
scorer = ScoreCalculator(data={
'issue': {
'labels': [
{
"name": "bug",
}
]
}
})
scorer.high_priority(add=2)
self.assertEquals(scorer.score, 0)
scorer = ScoreCalculator(data={
'issue': {
'labels': [
{
"name": "high priority",
}
]
}
})
scorer.high_priority(add=2)
self.assertEquals(scorer.score, 2)
def test_awaiting_reply(self):
scorer = ScoreCalculator(data={
'issue': {
'labels': [
{
"name": "bug",
}
]
}
})
scorer.awaiting_reply(subtract=2)
self.assertEquals(scorer.score, 0)
scorer = ScoreCalculator(data={
'issue': {
'labels': [
{
"name": "needs reply",
}
]
}
})
scorer.awaiting_reply(subtract=2)
self.assertEquals(scorer.score, -2)
def test_each_unique_commenter(self):
scorer = ScoreCalculator(data={ 'issue_comments': [
{ 'user': { 'login': 'dude1' } },
{ 'user': { 'login': 'dude2' } },
{ 'user': { 'login': 'creator' } },
{ 'user': { 'login': 'creator' } },
{ 'user': { 'login': 'dude3' } },
], 'issue': { 'user': { 'login': 'creator' } } })
scorer.each_unique_commenter(add=2)
self.assertEquals(scorer.score, 6)
scorer = ScoreCalculator(data={ 'issue_comments': [
{'user': { 'login': 'dude1' } },
{'user': { 'login': 'dude1' } },
{'user': { 'login': 'dude1' } },
] })
scorer.each_unique_commenter(add=2)
self.assertEquals(scorer.score, 2)
scorer = ScoreCalculator(data={ 'issue_comments': [
{ 'user': { 'login': 'dude1' } },
{ 'user': { 'login': 'dude2' } },
{ 'user': { 'login': 'abe' } },
{ 'user': { 'login': 'jeb' } },
{ 'user': { 'login': 'dude2' } },
], 'org_members': ['abe', 'jeb', 'rocky'] })
scorer.each_unique_commenter(add=2)
self.assertEquals(scorer.score, 4)
scorer = ScoreCalculator(data={})
scorer.each_unique_commenter(add=2)
self.assertEquals(scorer.score, 0)
def test_each_comment(self):
scorer = ScoreCalculator(data={ 'issue_comments': [1,2,3] })
scorer.each_comment(add=2)
self.assertEquals(scorer.score, 6)
scorer = ScoreCalculator(data={ 'issue_comments': [1] })
scorer.each_comment(add=2)
self.assertEquals(scorer.score, 2)
scorer = ScoreCalculator(data={})
scorer.each_comment(add=2)
self.assertEquals(scorer.score, 0)
def test_code_snippets(self):
scorer = ScoreCalculator(data=setup_data('```line1\nline2\nline3```'))
scorer.code_snippets(add=50, per_line=100, line_max=300)
self.assertEquals(scorer.score, 350)
scorer = ScoreCalculator(data=setup_data('whatever text \n```line1\nline2\nline3``` more whatever ```line4```'))
scorer.code_snippets(add=10, per_line=1)
self.assertEquals(scorer.score, 14)
scorer = ScoreCalculator(data=setup_data('Hellow!\n im code!\n im code!\n im code!'))
scorer.code_snippets(add=10, per_line=1)
self.assertEquals(scorer.score, 13)
scorer = ScoreCalculator(data=setup_data('Hellow!\n im code!\n im code!\nwhatever```im code!```'))
scorer.code_snippets(add=10, per_line=1, line_max=2)
self.assertEquals(scorer.score, 12)
scorer = ScoreCalculator(data=setup_data('blah blah'))
scorer.code_snippets(add=2, per_line=1)
self.assertEquals(scorer.score, 0)
scorer = ScoreCalculator(data=setup_data('hello\n me code\n mecode', issue_comments=[
{ 'body': 'nothing' },
{ 'body': '```code\ncode\ncode```' },
{ 'body': '```code\ncode\ncode``` text ```code\ncode\ncode```' }
]))
scorer.code_snippets(add=10, per_line=1, line_max=100)
self.assertEquals(scorer.score, 21)
def test_videos(self):
scorer = ScoreCalculator(data=setup_data('''
https://www.dropbox.com/s/gxe6kl1bwzcvcxf/IMG_0229.MOV?dl=0
https://www.dropbox.com/s/gxe6kl1bwzcvcxf/IMG_0229.MOV?dl=1
https://www.dropbox.com/s/gxe6kl1bwzcvcxf/IMG_0229.avi?dl=3
'''))
scorer.videos(add=2)
self.assertEquals(scorer.score, 6)
scorer = ScoreCalculator(data=setup_data('''



'''))
scorer.videos(add=2)
self.assertEquals(scorer.score, 4)
scorer = ScoreCalculator(data={})
scorer.videos(add=2)
self.assertEquals(scorer.score, 0)
def test_images(self):
scorer = ScoreCalculator(data=setup_data('''
<img src="http://hellow.jpg?h=49"> <img src="hi2"> <img src="https://asdf.png">
<img src="https://asdf.png"> <img src="https://asdf.jpeg">
'''))
scorer.images(add=2)
self.assertEquals(scorer.score, 6)
scorer = ScoreCalculator(data=setup_data('''



'''))
scorer.images(add=2)
self.assertEquals(scorer.score, 4)
scorer = ScoreCalculator(data=setup_data('''

''', issue_comments=[
{ 'body': 'nothing' },
{ 'body': '<img src="https://asdf.jpeg">' },
{ 'body': '<img src="https://asdf.jpeg">' },
{ 'body': '<img src="https://asdf.gif">' },
{ 'body': '' }
]))
scorer.images(add=2)
self.assertEquals(scorer.score, 6)
scorer = ScoreCalculator(data={})
scorer.images(add=2)
self.assertEquals(scorer.score, 0)
def test_forum_links(self):
scorer = ScoreCalculator(data=setup_data('''
http://forum.ionicframework.com http://forum.ionicframework.com
'''))
scorer.forum_links(add=2, forum_url='forum.ionicframework.com')
self.assertEquals(scorer.score, 2)
scorer = ScoreCalculator(data=setup_data('''
whatever text
'''))
scorer.forum_links(add=2, forum_url='forum.ionicframework.com')
self.assertEquals(scorer.score, 0)
def test_links(self):
scorer = ScoreCalculator(data=setup_data('''
http://awesome.com https://awesome.com
http://image.png http://image.jpeg
''', issue_comments=[
{ 'body': 'nothing' },
{ 'body': 'http://asdfasdf' },
{ 'body': 'http://asdfasdf' },
{ 'body': 'https://newlink.com' },
{ 'body': 'https://awesome.com' },
{ 'body': 'https://forum.ionicframework.com/post' },
]))
scorer.links(add=2)
self.assertEquals(scorer.score, 8)
scorer = ScoreCalculator(data=setup_data('''
whatever text
'''))
scorer.links(add=2)
self.assertEquals(scorer.score, 0)
def test_issue_references(self):
scorer = ScoreCalculator(data=setup_data('''
I need help yo. Just like issue #123 and issue #456 #456 #456. 10 34534 2323423 5434
'''))
scorer.issue_references(add=2)
self.assertEquals(scorer.score, 4)
scorer = ScoreCalculator(data=setup_data('''
This is similar to issue #432 but not #issue.
''', issue_comments=[
{ 'body': 'nothing' },
{ 'body': 'Whatever #654 #643 #643 #643' }
]))
scorer.issue_references(add=2)
self.assertEquals(scorer.score, 6)
scorer = ScoreCalculator(data=setup_data('''
2323423
'''))
scorer.issue_references(add=2)
self.assertEquals(scorer.score, 0)
def setup_data(body, login='tester', issue_comments={}, org_members=[]):
return {
'issue': {
'body': body
},
'user': {
'login': login
},
'issue_comments': issue_comments,
'org_members': org_members
}
|
|
# Copyright (c) 2014 Ahmed H. Ismail
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
from spdx import checksum
from spdx import document
from spdx import package
from spdx import version
from spdx.parsers.builderexceptions import CardinalityError
from spdx.parsers.builderexceptions import OrderError
from spdx.parsers.builderexceptions import SPDXValueError
from spdx.parsers import tagvaluebuilders
from spdx.parsers import validations
class DocBuilder(object):
VERS_STR_REGEX = re.compile(r"SPDX-(\d+)\.(\d+)", re.UNICODE)
def __init__(self):
# FIXME: this state does not make sense
self.reset_document()
def set_doc_version(self, doc, value):
"""
Set the document version.
Raise SPDXValueError if malformed value.
Raise CardinalityError if already defined.
"""
if not self.doc_version_set:
self.doc_version_set = True
m = self.VERS_STR_REGEX.match(value)
if m is None:
raise SPDXValueError("Document::Version")
else:
doc.version = version.Version(
major=int(m.group(1)), minor=int(m.group(2))
)
return True
else:
raise CardinalityError("Document::Version")
def set_doc_data_lic(self, doc, res):
"""
Set the document data license.
Raise SPDXValueError if malformed value.
Raise CardinalityError if already defined.
"""
if not self.doc_data_lics_set:
self.doc_data_lics_set = True
# TODO: what is this split?
res_parts = res.split("/")
if len(res_parts) != 0:
identifier = res_parts[-1]
doc.data_license = document.License.from_identifier(identifier)
else:
raise SPDXValueError("Document::License")
else:
raise CardinalityError("Document::License")
def set_doc_name(self, doc, name):
"""
Set the document name.
Raise CardinalityError if already defined.
"""
if not self.doc_name_set:
doc.name = name
self.doc_name_set = True
return True
else:
raise CardinalityError("Document::Name")
def set_doc_spdx_id(self, doc, doc_spdx_id_line):
"""
Set the document SPDX Identifier.
Raise value error if malformed value.
Raise CardinalityError if already defined.
"""
if not self.doc_spdx_id_set:
if validations.validate_doc_spdx_id(doc_spdx_id_line):
doc.spdx_id = doc_spdx_id_line
self.doc_spdx_id_set = True
return True
else:
raise SPDXValueError("Document::SPDXID")
else:
raise CardinalityError("Document::SPDXID")
def set_doc_comment(self, doc, comment):
"""
Set document comment.
Raise CardinalityError if comment already set.
"""
if not self.doc_comment_set:
self.doc_comment_set = True
doc.comment = comment
else:
raise CardinalityError("Document::Comment")
def set_doc_namespace(self, doc, namespace):
"""
Set the document namespace.
Raise SPDXValueError if malformed value.
Raise CardinalityError if already defined.
"""
if not self.doc_namespace_set:
self.doc_namespace_set = True
if validations.validate_doc_namespace(namespace):
doc.namespace = namespace
return True
else:
raise SPDXValueError("Document::Namespace")
else:
raise CardinalityError("Document::Comment")
def reset_document(self):
"""
Reset the internal state to allow building new document
"""
# FIXME: this state does not make sense
self.doc_version_set = False
self.doc_comment_set = False
self.doc_namespace_set = False
self.doc_data_lics_set = False
self.doc_name_set = False
self.doc_spdx_id_set = False
class ExternalDocumentRefBuilder(tagvaluebuilders.ExternalDocumentRefBuilder):
def set_chksum(self, doc, chk_sum):
"""
Set the external document reference's check sum, if not already set.
chk_sum - The checksum value in the form of a string.
"""
if chk_sum:
doc.ext_document_references[-1].check_sum = checksum.Algorithm(
"SHA1", chk_sum
)
else:
raise SPDXValueError("ExternalDocumentRef::Checksum")
class EntityBuilder(tagvaluebuilders.EntityBuilder):
def __init__(self):
super(EntityBuilder, self).__init__()
def create_entity(self, doc, value):
if self.tool_re.match(value):
return self.build_tool(doc, value)
elif self.person_re.match(value):
return self.build_person(doc, value)
elif self.org_re.match(value):
return self.build_org(doc, value)
else:
raise SPDXValueError("Entity")
class CreationInfoBuilder(tagvaluebuilders.CreationInfoBuilder):
def __init__(self):
super(CreationInfoBuilder, self).__init__()
def set_creation_comment(self, doc, comment):
"""
Set creation comment.
Raise CardinalityError if comment already set.
Raise SPDXValueError if not free form text.
"""
if not self.creation_comment_set:
self.creation_comment_set = True
doc.creation_info.comment = comment
return True
else:
raise CardinalityError("CreationInfo::Comment")
class PackageBuilder(tagvaluebuilders.PackageBuilder):
def __init__(self):
super(PackageBuilder, self).__init__()
def set_pkg_chk_sum(self, doc, chk_sum):
"""
Set the package check sum, if not already set.
chk_sum - A string
Raise CardinalityError if already defined.
Raise OrderError if no package previously defined.
"""
self.assert_package_exists()
if not self.package_chk_sum_set:
self.package_chk_sum_set = True
doc.packages[-1].check_sum = checksum.Algorithm("SHA1", chk_sum)
else:
raise CardinalityError("Package::CheckSum")
def set_pkg_source_info(self, doc, text):
"""
Set the package's source information, if not already set.
text - Free form text.
Raise CardinalityError if already defined.
Raise OrderError if no package previously defined.
"""
self.assert_package_exists()
if not self.package_source_info_set:
self.package_source_info_set = True
doc.packages[-1].source_info = text
return True
else:
raise CardinalityError("Package::SourceInfo")
def set_pkg_verif_code(self, doc, code):
"""
Set the package verification code, if not already set.
code - A string.
Raise CardinalityError if already defined.
Raise OrderError if no package previously defined.
"""
self.assert_package_exists()
if not self.package_verif_set:
self.package_verif_set = True
doc.packages[-1].verif_code = code
else:
raise CardinalityError("Package::VerificationCode")
def set_pkg_excl_file(self, doc, filename):
"""
Set the package's verification code excluded file.
Raise OrderError if no package previously defined.
"""
self.assert_package_exists()
doc.packages[-1].add_exc_file(filename)
def set_pkg_license_comment(self, doc, text):
"""
Set the package's license comment.
Raise OrderError if no package previously defined.
Raise CardinalityError if already set.
"""
self.assert_package_exists()
if not self.package_license_comment_set:
self.package_license_comment_set = True
doc.packages[-1].license_comment = text
return True
else:
raise CardinalityError("Package::LicenseComment")
def set_pkg_attribution_text(self, doc, text):
"""
Set the package's attribution text.
"""
self.assert_package_exists()
doc.packages[-1].attribution_text = text
return True
def set_pkg_cr_text(self, doc, text):
"""
Set the package's license comment.
Raise OrderError if no package previously defined.
Raise CardinalityError if already set.
"""
self.assert_package_exists()
if not self.package_cr_text_set:
self.package_cr_text_set = True
doc.packages[-1].cr_text = text
else:
raise CardinalityError("Package::CopyrightText")
def set_pkg_summary(self, doc, text):
"""
Set the package summary.
Raise CardinalityError if summary already set.
Raise OrderError if no package previously defined.
"""
self.assert_package_exists()
if not self.package_summary_set:
self.package_summary_set = True
doc.packages[-1].summary = text
else:
raise CardinalityError("Package::Summary")
def set_pkg_desc(self, doc, text):
"""
Set the package's description.
Raise CardinalityError if description already set.
Raise OrderError if no package previously defined.
"""
self.assert_package_exists()
if not self.package_desc_set:
self.package_desc_set = True
doc.packages[-1].description = text
else:
raise CardinalityError("Package::Description")
def set_pkg_comment(self, doc, text):
"""
Set the package's comment.
Raise CardinalityError if comment already set.
Raise OrderError if no package previously defined.
"""
self.assert_package_exists()
if not self.package_comment_set:
self.package_comment_set = True
doc.packages[-1].comment = text
else:
raise CardinalityError("Package::Comment")
def set_pkg_ext_ref_category(self, doc, category):
"""
Set the package's external reference locator.
Raise OrderError if no package previously defined.
Raise SPDXValueError if malformed value.
"""
self.assert_package_exists()
category = category.split("_")[-1]
if category.lower() == "packagemanager":
category = "PACKAGE-MANAGER"
if validations.validate_pkg_ext_ref_category(category):
if (
len(doc.packages[-1].pkg_ext_refs)
and doc.packages[-1].pkg_ext_refs[-1].category is None
):
doc.packages[-1].pkg_ext_refs[-1].category = category
else:
doc.packages[-1].add_pkg_ext_refs(
package.ExternalPackageRef(category=category)
)
else:
raise SPDXValueError("ExternalRef::Category")
def set_pkg_ext_ref_type(self, doc, typ):
"""
Set the package's external reference type.
Raise OrderError if no package previously defined.
Raise SPDXValueError if malformed value.
"""
self.assert_package_exists()
if "#" in typ:
typ = typ.split("#")[-1]
else:
typ = typ.split("/")[-1]
if validations.validate_pkg_ext_ref_type(typ):
if (
len(doc.packages[-1].pkg_ext_refs)
and doc.packages[-1].pkg_ext_refs[-1].pkg_ext_ref_type is None
):
doc.packages[-1].pkg_ext_refs[-1].pkg_ext_ref_type = typ
else:
doc.packages[-1].add_pkg_ext_refs(
package.ExternalPackageRef(pkg_ext_ref_type=typ)
)
else:
raise SPDXValueError("ExternalRef::Type")
def set_pkg_ext_ref_comment(self, doc, comment):
"""
Set the package's external reference comment.
Raise CardinalityError if comment already set.
Raise OrderError if no package previously defined.
"""
self.assert_package_exists()
if not len(doc.packages[-1].pkg_ext_refs):
raise OrderError("Package::ExternalRef")
if not self.pkg_ext_comment_set:
self.pkg_ext_comment_set = True
doc.packages[-1].pkg_ext_refs[-1].comment = comment
return True
else:
raise CardinalityError("ExternalRef::Comment")
class FileBuilder(tagvaluebuilders.FileBuilder):
def __init__(self):
super(FileBuilder, self).__init__()
def set_file_chksum(self, doc, chk_sum):
"""
Set the file check sum, if not already set.
chk_sum - A string
Raise CardinalityError if already defined.
Raise OrderError if no package previously defined.
"""
if self.has_package(doc) and self.has_file(doc):
if not self.file_chksum_set:
self.file_chksum_set = True
self.file(doc).chk_sum = checksum.Algorithm("SHA1", chk_sum)
return True
else:
raise CardinalityError("File::CheckSum")
else:
raise OrderError("File::CheckSum")
def set_file_license_comment(self, doc, text):
"""
Raise OrderError if no package or file defined.
Raise CardinalityError if more than one per file.
"""
if self.has_package(doc) and self.has_file(doc):
if not self.file_license_comment_set:
self.file_license_comment_set = True
self.file(doc).license_comment = text
return True
else:
raise CardinalityError("File::LicenseComment")
else:
raise OrderError("File::LicenseComment")
def set_file_attribution_text(self, doc, text):
"""
Set the file's attribution text.
"""
if self.has_package(doc) and self.has_file(doc):
self.assert_package_exists()
self.file(doc).attribution_text = text
return True
def set_file_copyright(self, doc, text):
"""
Raise OrderError if no package or file defined.
Raise CardinalityError if more than one.
"""
if self.has_package(doc) and self.has_file(doc):
if not self.file_copytext_set:
self.file_copytext_set = True
self.file(doc).copyright = text
return True
else:
raise CardinalityError("File::CopyRight")
else:
raise OrderError("File::CopyRight")
def set_file_comment(self, doc, text):
"""
Raise OrderError if no package or no file defined.
Raise CardinalityError if more than one comment set.
"""
if self.has_package(doc) and self.has_file(doc):
if not self.file_comment_set:
self.file_comment_set = True
self.file(doc).comment = text
return True
else:
raise CardinalityError("File::Comment")
else:
raise OrderError("File::Comment")
def set_file_notice(self, doc, text):
"""
Raise OrderError if no package or file defined.
Raise CardinalityError if more than one.
"""
if self.has_package(doc) and self.has_file(doc):
if not self.file_notice_set:
self.file_notice_set = True
self.file(doc).notice = tagvaluebuilders.str_from_text(text)
return True
else:
raise CardinalityError("File::Notice")
else:
raise OrderError("File::Notice")
class SnippetBuilder(tagvaluebuilders.SnippetBuilder):
def __init__(self):
super(SnippetBuilder, self).__init__()
def set_snippet_lic_comment(self, doc, lic_comment):
"""
Set the snippet's license comment.
Raise OrderError if no snippet previously defined.
Raise CardinalityError if already set.
"""
self.assert_snippet_exists()
if not self.snippet_lic_comment_set:
self.snippet_lic_comment_set = True
doc.snippet[-1].license_comment = lic_comment
else:
CardinalityError("Snippet::licenseComments")
def set_snippet_comment(self, doc, comment):
"""
Set general comments about the snippet.
Raise OrderError if no snippet previously defined.
Raise CardinalityError if comment already set.
"""
self.assert_snippet_exists()
if not self.snippet_comment_set:
self.snippet_comment_set = True
doc.snippet[-1].comment = comment
return True
else:
raise CardinalityError("Snippet::comment")
def set_snippet_attribution_text(self, doc, text):
"""
Set the snippet's attribution text.
"""
self.assert_snippet_exists()
doc.snippet[-1].attribution_text = text
return True
def set_snippet_copyright(self, doc, copyright):
"""
Set the snippet's copyright text.
Raise OrderError if no snippet previously defined.
Raise CardinalityError if already set.
"""
self.assert_snippet_exists()
if not self.snippet_copyright_set:
self.snippet_copyright_set = True
doc.snippet[-1].copyright = copyright
else:
raise CardinalityError("Snippet::copyrightText")
class ReviewBuilder(tagvaluebuilders.ReviewBuilder):
def __init__(self):
super(ReviewBuilder, self).__init__()
def add_review_comment(self, doc, comment):
"""
Set the review comment.
Raise CardinalityError if already set.
Raise OrderError if no reviewer defined before.
"""
if len(doc.reviews) != 0:
if not self.review_comment_set:
self.review_comment_set = True
doc.reviews[-1].comment = comment
return True
else:
raise CardinalityError("ReviewComment")
else:
raise OrderError("ReviewComment")
class AnnotationBuilder(tagvaluebuilders.AnnotationBuilder):
def __init__(self):
super(AnnotationBuilder, self).__init__()
def add_annotation_comment(self, doc, comment):
"""
Set the annotation comment.
Raise CardinalityError if already set.
Raise OrderError if no annotator defined before.
"""
if len(doc.annotations) != 0:
if not self.annotation_comment_set:
self.annotation_comment_set = True
doc.annotations[-1].comment = comment
return True
else:
raise CardinalityError("AnnotationComment")
else:
raise OrderError("AnnotationComment")
def add_annotation_type(self, doc, annotation_type):
"""
Set the annotation type.
Raise CardinalityError if already set.
Raise OrderError if no annotator defined before.
"""
if len(doc.annotations) != 0:
if not self.annotation_type_set:
if annotation_type.endswith("annotationType_other"):
self.annotation_type_set = True
doc.annotations[-1].annotation_type = "OTHER"
return True
elif annotation_type.endswith("annotationType_review"):
self.annotation_type_set = True
doc.annotations[-1].annotation_type = "REVIEW"
return True
else:
raise SPDXValueError("Annotation::AnnotationType")
else:
raise CardinalityError("Annotation::AnnotationType")
else:
raise OrderError("Annotation::AnnotationType")
class RelationshipBuilder(tagvaluebuilders.RelationshipBuilder):
def __init__(self):
super(RelationshipBuilder, self).__init__()
def add_relationship_comment(self, doc, comment):
"""
Set the relationship comment.
Raise CardinalityError if already set.
Raise OrderError if no annotator defined before.
"""
if len(doc.relationships) != 0:
if not self.relationship_comment_set:
self.relationship_comment_set = True
doc.relationships[-1].comment = comment
return True
else:
raise CardinalityError("RelationshipComment")
else:
raise OrderError("RelationshipComment")
class Builder(
DocBuilder,
EntityBuilder,
CreationInfoBuilder,
PackageBuilder,
FileBuilder,
SnippetBuilder,
ReviewBuilder,
ExternalDocumentRefBuilder,
AnnotationBuilder,
RelationshipBuilder,
):
def __init__(self):
super(Builder, self).__init__()
# FIXME: this state does not make sense
self.reset()
def reset(self):
"""
Reset builder's state for building new documents.
Must be called between usage with different documents.
"""
# FIXME: this state does not make sense
self.reset_creation_info()
self.reset_document()
self.reset_package()
self.reset_file_stat()
self.reset_reviews()
self.reset_annotations()
self.reset_relationship()
|
|
'''
Created on 11 May 2012
@author: Jeff
'''
import os
import unittest
from google.appengine.ext import testbed, ndb
from actions import register_new_user, get_profiles, _get_my_profile, \
NoUserException, ActionException, NoUserNameException, get_my_profile_name, \
set_playing, create_role
from model import Profile, SiteMasterProfile, DataModelException
def setCurrentUser(email, user_id, is_admin=False):
os.environ['USER_EMAIL'] = email or ''
os.environ['USER_ID'] = user_id or ''
os.environ['USER_IS_ADMIN'] = '1' if is_admin else '0'
def logoutCurrentUser():
setCurrentUser(None, None)
class Test(unittest.TestCase):
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_user_stub()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
def tearDown(self):
self.testbed.deactivate()
def testNotLoggedIn(self):
'''Should just fail with a NoUserException'''
self.assertRaises(NoUserException, _get_my_profile)
def testBlankProfileNameSignup(self):
setCurrentUser("[email protected]", "Administrator")
register_new_user('SiteAdmin')
logoutCurrentUser()
setCurrentUser("[email protected]", "NewUser1")
self.assertRaises(DataModelException, register_new_user, '')
self.assertRaises(ActionException, register_new_user, None)
def testFirstSignup(self):
setCurrentUser("[email protected]", "Administrator")
'''Should fail repeatedly with a NoUserNameException'''
self.assertRaises(NoUserNameException, _get_my_profile)
self.assertRaises(NoUserNameException, _get_my_profile)
'''Should create the Site Administrator with its own place for template Roles'''
register_new_user('SiteAdmin')
'''Should have a SiteAdmin now'''
self.assertIsInstance(_get_my_profile(), SiteMasterProfile)
self.assertEquals(SiteMasterProfile.query().count(), 1)
self.assertEquals(Profile.query().count(), 1)
'''Should be called whatever it was set up as'''
self.assertEquals('SiteAdmin', get_my_profile_name())
'''Check number of entities'''
self.assertEquals(1, SiteMasterProfile.query().count())
'''Should just fail with a NoUserException if logged out'''
logoutCurrentUser()
self.assertRaises(NoUserException, _get_my_profile)
def testNextSignup(self):
setCurrentUser("[email protected]", "Administrator")
self.assertRaises(NoUserNameException, _get_my_profile)
self.assertRaises(NoUserNameException, _get_my_profile)
register_new_user('SiteAdmin')
self.assertIsInstance(_get_my_profile(), Profile)
self.assertEquals('SiteAdmin', get_my_profile_name())
self.assertEquals(1, SiteMasterProfile.query().count())
self.assertEquals(1, Profile.query().count())
logoutCurrentUser()
self.assertRaises(NoUserException, _get_my_profile)
setCurrentUser("[email protected]", "NewUser1")
self.assertRaises(NoUserNameException, _get_my_profile)
self.assertRaises(NoUserNameException, _get_my_profile)
'''Should create a GameMaster for this new profile with its own Place for private conversations'''
register_new_user('NewUserOne')
'''Should have an profile now'''
self.assertIsInstance(_get_my_profile(), Profile)
'''Should be called whatever it was set up as'''
self.assertEquals('NewUserOne', get_my_profile_name())
self.assertEquals(2, Profile.query().count())
logoutCurrentUser()
self.assertRaises(NoUserException, _get_my_profile)
def testManySignups(self):
setCurrentUser("[email protected]", "Administrator")
self.assertRaises(NoUserNameException, _get_my_profile)
register_new_user('SiteAdmin')
logoutCurrentUser()
setCurrentUser("[email protected]", "NewUser1")
self.assertRaises(NoUserNameException, _get_my_profile)
register_new_user('NewUserOne')
logoutCurrentUser()
setCurrentUser("[email protected]", "NewUser2")
self.assertRaises(NoUserNameException, _get_my_profile)
register_new_user('NewUserTwo')
logoutCurrentUser()
setCurrentUser("[email protected]", "NewUser3")
self.assertRaises(NoUserNameException, _get_my_profile)
register_new_user('NewUserThree')
logoutCurrentUser()
setCurrentUser("[email protected]", "AnotherUser1")
self.assertRaises(NoUserNameException, _get_my_profile)
register_new_user('AnotherUserOne')
logoutCurrentUser()
setCurrentUser("[email protected]", "AnotherUser2")
self.assertRaises(NoUserNameException, _get_my_profile)
register_new_user('AnotherUserTwo')
logoutCurrentUser()
setCurrentUser("[email protected]", "AnotherUser3")
self.assertRaises(NoUserNameException, _get_my_profile)
register_new_user('AnotherUserThree')
self.assertEqual(len(get_profiles("Another")), 3)
self.assertEqual(len(get_profiles("another")), 3)
self.assertEqual(len(get_profiles("new")), 3)
def testChangingSomeoneElsesProfile(self):
setCurrentUser("[email protected]", "Administrator")
register_new_user('SiteAdmin')
site_admin_id = _get_my_profile().key.urlsafe()
logoutCurrentUser()
setCurrentUser("[email protected]", "NewUser1")
register_new_user('NewUserOne')
new_user_1_id = _get_my_profile().key.urlsafe()
role_1_id = create_role("Test Role 1")
set_playing(role_1_id)
logoutCurrentUser()
setCurrentUser("[email protected]", "NewUser2")
''' This code simulates an attack against the Model, using the Google Account ID to load another users Profile '''
profile = ndb.Key(urlsafe=new_user_1_id).get()
profile.playing = ndb.Key(urlsafe=role_1_id)
self.assertRaises(DataModelException, profile.put)
profile = ndb.Key(urlsafe=site_admin_id).get()
profile.playing = ndb.Key(urlsafe=role_1_id)
self.assertRaises(DataModelException, profile.put)
register_new_user('NewUserTwo')
''' This code simulates an attack against the Model, using the Google Account ID to load another users Profile '''
profile = ndb.Key(urlsafe=new_user_1_id).get()
profile.playing = ndb.Key(urlsafe=role_1_id)
self.assertRaises(DataModelException, profile.put)
profile = ndb.Key(urlsafe=site_admin_id).get()
profile.playing = ndb.Key(urlsafe=role_1_id)
self.assertRaises(DataModelException, profile.put)
def testNameUsed(self):
setCurrentUser("[email protected]", "Administrator")
register_new_user('SiteAdmin')
logoutCurrentUser()
setCurrentUser("[email protected]", "NewUser1")
register_new_user('NewUserOne')
logoutCurrentUser()
setCurrentUser("[email protected]", "NewUser2")
register_new_user('NewUserTwo')
logoutCurrentUser()
setCurrentUser("[email protected]", "NewUser3")
self.assertRaises(ActionException, register_new_user, 'NewUserOne')
def testProfilePreviouslyRegistered(self):
setCurrentUser("[email protected]", "Administrator")
register_new_user('SiteAdmin')
logoutCurrentUser()
setCurrentUser("[email protected]", "NewUser1")
register_new_user('NewUserOne')
logoutCurrentUser()
setCurrentUser("[email protected]", "NewUser2")
register_new_user('NewUserTwo')
logoutCurrentUser()
setCurrentUser("[email protected]", "NewUser3")
self.assertRaises(ActionException, register_new_user, 'NewUserOne')
def testLoginAndOut(self):
setCurrentUser("[email protected]", "Administrator")
self.assertRaises(NoUserNameException, _get_my_profile)
logoutCurrentUser()
setCurrentUser("[email protected]", "Administrator")
self.assertRaises(NoUserNameException, _get_my_profile)
register_new_user('AnotherUserTwo')
logoutCurrentUser()
setCurrentUser("[email protected]", "Administrator")
self.assertIsInstance(_get_my_profile(), Profile)
def testGetProfiles(self):
self.assertRaises(ActionException, get_profiles, None)
self.assertEqual(len(get_profiles('a')), 0)
setCurrentUser("[email protected]", "Administrator")
register_new_user('Admin')
logoutCurrentUser()
setCurrentUser("[email protected]", "User1")
register_new_user('User1')
logoutCurrentUser()
self.assertEqual(len(get_profiles('a')), 1)
self.assertEqual(len(get_profiles('u')), 1)
self.assertEqual(len(get_profiles('A')), 1)
self.assertEqual(len(get_profiles('U')), 1)
if __name__ == "__main__":
unittest.main()
|
|
from java.lang import String
import random
from threading import Timer
import math
#OPTIONS
faceRecognizer=False
joystick=False
randomMove=False
headTracking=True
lastName="christian"
leftPort="COM20"
rightPort="COM21"
webgui=Runtime.create("WebGui","WebGui")
webgui.autoStartBrowser(False)
webgui.startService()
#start speech recognition and AI
wksr=Runtime.createAndStart("webkitspeechrecognition","WebkitSpeechRecognition")
pinocchio = Runtime.createAndStart("pinocchio", "ProgramAB")
pinocchio.startSession("christian", "pinocchio")
htmlfilter=Runtime.createAndStart("htmlfilter","HtmlFilter")
mouth=Runtime.createAndStart("i01.mouth","AcapelaSpeech")
wksr.addListener("publishText","python","heard")
pinocchio.addTextListener(htmlfilter)
htmlfilter.addTextListener(mouth)
#Start inMoov
i01 = Runtime.createAndStart("i01", "InMoov")
i01.startMouth()
i01.startMouthControl(leftPort)
#Head
i01.head.jaw.setMinMax(60,90)
i01.mouthControl.setmouth(60,90)
i01.head.jaw.setRest(90)
i01.head.neck.map(0,180,25,180)
i01.head.neck.setRest(90)
i01.head.rothead.map(0,180,25,170)
i01.head.rothead.setRest(115)
i01.head.eyeY.setMinMax(85,150)
#i01.head.eyeY.map(0,180,80,100)
i01.head.eyeY.setRest(115)
i01.head.eyeX.setMinMax(75,115)
#i01.head.eyeX.map(0,180,70,100)
i01.head.eyeX.setRest(95)
#Left Arm
i01.startLeftArm(leftPort)
i01.leftArm.shoulder.map(0,180,30,100)
i01.leftArm.omoplate.map(0,180,10,75)
i01.leftArm.rotate.map(0,180,46,160)
i01.leftArm.shoulder.setRest(30)
i01.leftArm.omoplate.setRest(15)
i01.leftArm.rotate.setRest(90)
#Right Arm
i01.startRightArm(rightPort)
i01.rightArm.shoulder.map(0,180,00,180)
i01.rightArm.omoplate.map(0,180,10,70)
i01.rightArm.rotate.map(0,180,46,160)
i01.rightArm.bicep.map(0,180,5,82)
i01.rightArm.shoulder.setRest(20)
i01.rightArm.omoplate.setRest(15)
i01.rightArm.rotate.setRest(90)
i01.rightArm.bicep.setRest(10)
i01.rest()
# OpenCV
opencv = i01.startOpenCV()
opencv.setCameraIndex(2)
opencv.capture()
opencvR = Runtime.createAndStart("opencvR","OpenCV")
opencvR.setCameraIndex(1)
opencvR.capture()
pinocchio.addListener("publishResponse","python","randomMoveAction")
def headTrackingInit():
i01.startEyesTracking(leftPort)
i01.startHeadTracking(leftPort)
i01.headTracking.faceDetect()
i01.eyesTracking.faceDetect()
#i01.autoPowerDownOnInactivity(120)
def randomMoveAction(data):
if randomMove:
#print "test1"
i01.setHeadSpeed(1.0,1.0)
neck=i01.head.neck.getPos()+random.randint(-10,10)
rothead=i01.head.rothead.getPos()+random.randint(-20,20)
if neck<45 or neck>135:
neck=90
if rothead<45 or rothead>135:
rothead=90
i01.moveHead(neck,rothead)
i01.setHeadSpeed(1.0,1.0)
else:
print "test2"
def heard(data):
global lastName
if(faceRecognizer):
lastName=fr.getLastRecognizedName()
if((lastName+"-pinocchio" not in pinocchio.getSessionNames())):
mouth.speak("Hello "+lastName)
sleep(2)
pinocchio.getResponse(lastName,data)
def joystickInit():
joystickId = 3
global uberjoy
global controllerButtonMap
global controllerButtonMapTrigger
global controllerButtonReverse
global controllerButtonTrigger
global controllerButtonTriggerState
uberjoy = Runtime.createAndStart("uberjoy", "Joystick")
uberjoy.setController(joystickId)
uberjoy.startPolling()
controllerButtonMap={"x":i01.leftArm.rotate,"y":i01.leftArm.bicep,"z":i01.rightArm.rotate,"rz":i01.rightArm.bicep,"4":i01.head.neck,"5":i01.head.neck,"6":i01.head.rothead,"7":i01.head.rothead}
controllerButtonMapTrigger={"x":i01.leftArm.omoplate,"y":i01.leftArm.shoulder,"z":i01.rightArm.omoplate,"rz":i01.rightArm.shoulder}
controllerButtonReverse={"x":True,"y":True,"z":False,"rz":True,"4":True,"5":False,"6":True,"7":False}
controllerButtonTrigger={"x":"10","y":"10","z":"11","rz":"11"}
controllerButtonTriggerState={"10":False,"11":False}
for button,servo in controllerButtonMap.iteritems():
servo.setSpeedControlOnUC(False)
uberjoy.addListener("publishInput", "python", "joystickOnPublishInput")
def joystickOnPublishInput(data):
global controllerButtonTriggerState
if(controllerButtonReverse.get(data.id)):
data.value*=-1
if(controllerButtonTriggerState.has_key(data.id)):
print "trigger button pressed"
for k,v in controllerButtonTrigger.iteritems():
if v==data.id:
if controllerButtonTriggerState.get(data.id):
controllerButtonMapTrigger.get(k).stop()
else:
controllerButtonMap.get(k).stop()
controllerButtonTriggerState[data.id]=bool(data.value)
return
if(controllerButtonMap.has_key(data.id)):
servotmp=[None]
if(controllerButtonMapTrigger.has_key(data.id)):
print "found trigger "+data.id+" = "+ controllerButtonMapTrigger.get(data.id).getName()
if(controllerButtonTriggerState.get(controllerButtonTrigger.get(data.id))):
servotmp[0]=controllerButtonMapTrigger.get(data.id)
print "using alt servo: "+servotmp[0].getName()
else:
servotmp[0]=controllerButtonMap.get(data.id)
print "using normal servo: "+ servotmp[0].getName()
else:
servotmp[0]=controllerButtonMap.get(data.id)
print "using normal servo: "+ servotmp[0].getName()
servo=servotmp[0]
print servo.getName()
absValue = math.fabs(data.value)
if (absValue < 0.300):
servo.stop()
return
absValue = absValue-0.01
servo.setSpeed(absValue)
delay = int((1-absValue) * 200)+25
servo.stop()
if (data.value > 0.0):
#servo.sweep(servo.getPos(), int(servo.getMax()), delay, 1, True)
servo.sweep(servo.getPos(), 180, delay, 1, True)
else:
servo.sweep(0, servo.getPos(), delay, -1, True)
if (headTracking):
headTrackingInit()
if(joystick):
joystickInit()
headTilt=Runtime.createAndStart("headTilt","Servo")
headTilt.attach(i01.arduinos.get(leftPort).getName(),30)
headTilt.setMinMax(30,180)
headTilt.setRest(105)
def power_up():
headTilt.attach()
def power_down():
headTilt.detach()
i01.beginCheckingOnInactivity(60)
i01.startPIR(leftPort,23)
pinocchio.getResponse("Initialize Pinocchio")
|
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
from the month of edge formation, find the SR before, at the time and after
"""
from collections import defaultdict
import codecs
import os
import json
import numpy as np
from igraph import *
IN_DIR = "../../../DATA/General/"
os.chdir(IN_DIR)
F_IN = "mention/edge_formation_deletion_MOs.dat"
F_OUT = "mention/edge_formation_REL_ST_stats_STRICT.dat"
MONTHS = ["5", "6", "7", "8", "9", "10", "11"]
#########################
# read from a file that is an edge list with weights
#########################
def read_in_MO_graph(MO):
G = Graph.Read_Ncol('mention/' + MO + '_MENT_weight_dir_self_loops', directed=True, weights=True)
print G.summary()
return G
def read_in_MO_graph_MUTUAL_UNW(MO):
G = Graph.Read_Ncol('mention/' + MO + '_MENT_weight_dir_self_loops', directed=True, weights=True)
G.to_undirected(mode="mutual", combine_edges='ignore')
print G.summary()
return G
def extract_edge_formation_REL_ST_with_STDEV_POP():
MO_MENT = defaultdict(int)
for MO in MONTHS:
MO_MENT[MO] = read_in_MO_graph(MO).copy()
output_file = open(F_OUT, 'w')
cnt = 0
TOT_BEFORE = []
TOT_FORMATION = []
TOT_AFTER = []
with codecs.open(F_IN,'r', encoding='utf8') as input_file:
for line in input_file:
(userA, userB, MO_formation, MO_deletion) = line.split()
MO_formation = int(MO_formation)
MO_deletion = int(MO_deletion)
if MO_formation == 4 or MO_formation >= 10:
continue
# remove or no
if MO_deletion >= 6 and MO_deletion <= 10:
continue
cnt += 1
userA = int(userA)
userB = int(userB)
if userA < userB:
u1 = userA
u2 = userB
else:
u1 = userB
u2 = userA
MO_prior = MONTHS[int(MO_formation)-1-5]
MO_prior = str(MO_prior)
G = MO_MENT[MO_prior]
nA = G.vs.select(name = str(u1))
nB = G.vs.select(name = str(u2))
try:
popA = G.strength(nA[0].index, mode=IN, weights='weight')
except IndexError:
popA = 0
try:
popB = G.strength(nB[0].index, mode=IN, weights='weight')
except IndexError:
popB = 0
prior = abs(popA + popB)
MO_formation = str(MO_formation)
G = MO_MENT[MO_formation]
nA = G.vs.select(name = str(u1))
nB = G.vs.select(name = str(u2))
try:
popA = G.strength(nA[0].index, mode=IN, weights='weight')
except IndexError:
popA = 0
print u1, u2, MO_formation
try:
popB = G.strength(nB[0].index, mode=IN, weights='weight')
except IndexError:
popB = 0
print u2, u1, MO_formation
formation = abs(popA + popB)
MO_after = MONTHS[int(MO_formation)+1-5]
MO_after = str(MO_after)
G = MO_MENT[MO_after]
nA = G.vs.select(name = str(u1))
nB = G.vs.select(name = str(u2))
try:
popA = G.strength(nA[0].index, mode=IN, weights='weight')
except IndexError:
popA = 0
try:
popB = G.strength(nB[0].index, mode=IN, weights='weight')
except IndexError:
popB = 0
after = abs(popA + popB)
TOT_AFTER.append(after)
TOT_FORMATION.append(formation)
TOT_BEFORE.append(prior)
output_file.write(str(u1) + '\t' + str(u2) + '\t' + str(MO_formation) + '\t' + \
str(prior)+ '\t' + str(formation)+ '\t' + str(after) + '\n')
print "processed %d edges " % cnt
cnt = float(cnt)
TOT_BEFORE = np.array(TOT_BEFORE)
TOT_FORMATION = np.array(TOT_FORMATION)
TOT_AFTER = np.array(TOT_AFTER)
avg_bef = np.mean(TOT_BEFORE)
stdev_bef = np.std(TOT_BEFORE, dtype=np.float64)
avg_at = np.mean(TOT_FORMATION)
stdev_at = np.std(TOT_FORMATION, dtype=np.float64)
avg_aft = np.mean(TOT_AFTER)
stdev_aft = np.std(TOT_AFTER, dtype=np.float64)
print "Average REL POP %f and stdev %f before, at the time %f, %f and after %f, %f edges formation " % \
(avg_bef, stdev_bef, avg_at, stdev_at, avg_aft, stdev_aft)
print avg_bef, avg_at, avg_aft
print
print stdev_bef, stdev_at, stdev_aft
def extract_edge_formation_REL_ST_with_STDEV_ACT():
MO_MENT = defaultdict(int)
for MO in MONTHS:
MO_MENT[MO] = read_in_MO_graph(MO).copy()
output_file = open(F_OUT, 'w')
cnt = 0
TOT_BEFORE = []
TOT_FORMATION = []
TOT_AFTER = []
with codecs.open(F_IN,'r', encoding='utf8') as input_file:
for line in input_file:
(userA, userB, MO_formation, MO_deletion) = line.split()
MO_formation = int(MO_formation)
MO_deletion = int(MO_deletion)
if MO_formation == 4 or MO_formation >= 10:
continue
cnt += 1
userA = int(userA)
userB = int(userB)
if userA < userB:
u1 = userA
u2 = userB
else:
u1 = userB
u2 = userA
MO_prior = MONTHS[int(MO_formation)-1-5]
MO_prior = str(MO_prior)
G = MO_MENT[MO_prior]
nA = G.vs.select(name = str(u1))
nB = G.vs.select(name = str(u2))
try:
popA = G.strength(nA[0].index, mode=OUT, weights='weight')
except IndexError:
popA = 0
try:
popB = G.strength(nB[0].index, mode=OUT, weights='weight')
except IndexError:
popB = 0
prior = abs(popA + popB)
MO_formation = str(MO_formation)
G = MO_MENT[MO_formation]
nA = G.vs.select(name = str(u1))
nB = G.vs.select(name = str(u2))
try:
popA = G.strength(nA[0].index, mode=OUT, weights='weight')
except IndexError:
popA = 0
print u1, u2, MO_formation
try:
popB = G.strength(nB[0].index, mode=OUT, weights='weight')
except IndexError:
popB = 0
print u2, u1, MO_formation
formation = abs(popA + popB)
MO_after = MONTHS[int(MO_formation)+1-5]
MO_after = str(MO_after)
G = MO_MENT[MO_after]
nA = G.vs.select(name = str(u1))
nB = G.vs.select(name = str(u2))
try:
popA = G.strength(nA[0].index, mode=OUT, weights='weight')
except IndexError:
popA = 0
try:
popB = G.strength(nB[0].index, mode=OUT, weights='weight')
except IndexError:
popB = 0
after = abs(popA + popB)
TOT_AFTER.append(after)
TOT_FORMATION.append(formation)
TOT_BEFORE.append(prior)
output_file.write(str(u1) + '\t' + str(u2) + '\t' + str(MO_formation) + '\t' + \
str(prior)+ '\t' + str(formation)+ '\t' + str(after) + '\n')
print "processed %d edges " % cnt
cnt = float(cnt)
TOT_BEFORE = np.array(TOT_BEFORE)
TOT_FORMATION = np.array(TOT_FORMATION)
TOT_AFTER = np.array(TOT_AFTER)
avg_bef = np.mean(TOT_BEFORE)
stdev_bef = np.std(TOT_BEFORE, dtype=np.float64)
avg_at = np.mean(TOT_FORMATION)
stdev_at = np.std(TOT_FORMATION, dtype=np.float64)
avg_aft = np.mean(TOT_AFTER)
stdev_aft = np.std(TOT_AFTER, dtype=np.float64)
print "Average REL ST ACT %f and stdev %f before, at the time %f, %f and after %f, %f edges formation " % \
(avg_bef, stdev_bef, avg_at, stdev_at, avg_aft, stdev_aft)
print avg_bef, avg_at, avg_aft
print
print stdev_bef, stdev_at, stdev_aft
def extract_edge_formation_REL_ST_with_STDEV_MUTUAL_UNW():
MO_MENT = defaultdict(int)
for MO in MONTHS:
# strong
MO_MENT[MO] = read_in_MO_graph_MUTUAL_UNW(MO).copy()
# weak
#MO_MENT[MO] = read_in_MO_graph(MO).copy()
output_file = open(F_OUT, 'w')
cnt = 0
TOT_BEFORE = []
TOT_FORMATION = []
TOT_AFTER = []
with codecs.open(F_IN,'r', encoding='utf8') as input_file:
for line in input_file:
(userA, userB, MO_formation, MO_deletion) = line.split()
MO_formation = int(MO_formation)
MO_deletion = int(MO_deletion)
if MO_formation == 4 or MO_formation >= 10:
continue
# remove or no
if (MO_deletion <= 11):
continue
cnt += 1
userA = int(userA)
userB = int(userB)
if userA < userB:
u1 = userA
u2 = userB
else:
u1 = userB
u2 = userA
MO_prior = MONTHS[int(MO_formation)-1-5]
MO_prior = str(MO_prior)
G = MO_MENT[MO_prior]
nA = G.vs.select(name = str(u1))
nB = G.vs.select(name = str(u2))
try:
popA = G.degree(nA[0].index)
except IndexError:
popA = 0
try:
popB = G.degree(nB[0].index)
except IndexError:
popB = 0
prior = abs(popA + popB)
MO_formation = str(MO_formation)
G = MO_MENT[MO_formation]
nA = G.vs.select(name = str(u1))
nB = G.vs.select(name = str(u2))
try:
popA = G.degree(nA[0].index)
except IndexError:
popA = 0
print u1, u2, MO_formation
try:
popB = G.degree(nB[0].index)
except IndexError:
popB = 0
print u2, u1, MO_formation
formation = abs(popA + popB)
MO_after = MONTHS[int(MO_formation)+1-5]
MO_after = str(MO_after)
G = MO_MENT[MO_after]
nA = G.vs.select(name = str(u1))
nB = G.vs.select(name = str(u2))
try:
popA = G.degree(nA[0].index)
except IndexError:
popA = 0
try:
popB = G.degree(nB[0].index)
except IndexError:
popB = 0
after = abs(popA + popB)
TOT_AFTER.append(after)
TOT_FORMATION.append(formation)
TOT_BEFORE.append(prior)
output_file.write(str(u1) + '\t' + str(u2) + '\t' + str(MO_formation) + '\t' + \
str(prior)+ '\t' + str(formation)+ '\t' + str(after) + '\n')
print "processed %d edges " % cnt
cnt = float(cnt)
TOT_BEFORE = np.array(TOT_BEFORE)
TOT_FORMATION = np.array(TOT_FORMATION)
TOT_AFTER = np.array(TOT_AFTER)
avg_bef = np.mean(TOT_BEFORE)
stdev_bef = np.std(TOT_BEFORE, dtype=np.float64)
avg_at = np.mean(TOT_FORMATION)
stdev_at = np.std(TOT_FORMATION, dtype=np.float64)
avg_aft = np.mean(TOT_AFTER)
stdev_aft = np.std(TOT_AFTER, dtype=np.float64)
print "Average REL ST MUTUAL CONTACTS %f and stdev %f before, at the time %f, %f and after %f, %f edges formation " % \
(avg_bef, stdev_bef, avg_at, stdev_at, avg_aft, stdev_aft)
print avg_bef, avg_at, avg_aft
print stdev_bef, stdev_at, stdev_aft
def extract_edge_formation_REL_ST_with_STDEV_TOTAL_UNW():
MO_MENT = defaultdict(int)
for MO in MONTHS:
# strong
#MO_MENT[MO] = read_in_MO_graph_MUTUAL_UNW(MO).copy()
# weak
MO_MENT[MO] = read_in_MO_graph(MO).copy()
output_file = open(F_OUT, 'w')
cnt = 0
TOT_BEFORE = []
TOT_FORMATION = []
TOT_AFTER = []
with codecs.open(F_IN,'r', encoding='utf8') as input_file:
for line in input_file:
(userA, userB, MO_formation, MO_deletion) = line.split()
MO_formation = int(MO_formation)
MO_deletion = int(MO_deletion)
if MO_formation == 4 or MO_formation >= 10:
continue
# remove or no
if (MO_deletion <= 11):
continue
cnt += 1
userA = int(userA)
userB = int(userB)
if userA < userB:
u1 = userA
u2 = userB
else:
u1 = userB
u2 = userA
MO_prior = MONTHS[int(MO_formation)-1-5]
MO_prior = str(MO_prior)
G = MO_MENT[MO_prior]
nA = G.vs.select(name = str(u1))
nB = G.vs.select(name = str(u2))
try:
popA = G.degree(nA[0].index)
except IndexError:
popA = 0
try:
popB = G.degree(nB[0].index)
except IndexError:
popB = 0
prior = abs(popA + popB)
MO_formation = str(MO_formation)
G = MO_MENT[MO_formation]
nA = G.vs.select(name = str(u1))
nB = G.vs.select(name = str(u2))
try:
popA = G.degree(nA[0].index)
except IndexError:
popA = 0
print u1, u2, MO_formation
try:
popB = G.degree(nB[0].index)
except IndexError:
popB = 0
print u2, u1, MO_formation
formation = abs(popA + popB)
MO_after = MONTHS[int(MO_formation)+1-5]
MO_after = str(MO_after)
G = MO_MENT[MO_after]
nA = G.vs.select(name = str(u1))
nB = G.vs.select(name = str(u2))
try:
popA = G.degree(nA[0].index)
except IndexError:
popA = 0
try:
popB = G.degree(nB[0].index)
except IndexError:
popB = 0
after = abs(popA + popB)
TOT_AFTER.append(after)
TOT_FORMATION.append(formation)
TOT_BEFORE.append(prior)
output_file.write(str(u1) + '\t' + str(u2) + '\t' + str(MO_formation) + '\t' + \
str(prior)+ '\t' + str(formation)+ '\t' + str(after) + '\n')
print "processed %d edges " % cnt
cnt = float(cnt)
TOT_BEFORE = np.array(TOT_BEFORE)
TOT_FORMATION = np.array(TOT_FORMATION)
TOT_AFTER = np.array(TOT_AFTER)
avg_bef = np.mean(TOT_BEFORE)
stdev_bef = np.std(TOT_BEFORE, dtype=np.float64)
avg_at = np.mean(TOT_FORMATION)
stdev_at = np.std(TOT_FORMATION, dtype=np.float64)
avg_aft = np.mean(TOT_AFTER)
stdev_aft = np.std(TOT_AFTER, dtype=np.float64)
print "Average REL ST MUTUAL CONTACTS %f and stdev %f before, at the time %f, %f and after %f, %f edges formation " % \
(avg_bef, stdev_bef, avg_at, stdev_at, avg_aft, stdev_aft)
print avg_bef, avg_at, avg_aft
print stdev_bef, stdev_at, stdev_aft
print 'Strong contacts sum'
extract_edge_formation_REL_ST_with_STDEV_MUTUAL_UNW()
print 'Total contacts, including weak sum'
extract_edge_formation_REL_ST_with_STDEV_TOTAL_UNW()
|
|
# Copyright 2020 The StackStorm Authors.
# Copyright 2019 Extreme Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import six
from mongoengine import ValidationError
from six.moves import http_client
from st2api.controllers import resource
from st2common import log as logging
from st2common.exceptions.apivalidation import ValueValidationException
from st2common.models.api.policy import PolicyTypeAPI, PolicyAPI
from st2common.models.db.policy import PolicyTypeReference
from st2common.persistence.policy import PolicyType, Policy
from st2common.validators.api.misc import validate_not_part_of_system_pack
from st2common.exceptions.db import StackStormDBObjectNotFoundError
from st2common.rbac.types import PermissionType
from st2common.rbac.backends import get_rbac_backend
from st2common.router import abort
from st2common.router import Response
LOG = logging.getLogger(__name__)
class PolicyTypeController(resource.ResourceController):
model = PolicyTypeAPI
access = PolicyType
mandatory_include_fields_retrieve = ["id", "name", "resource_type"]
supported_filters = {"resource_type": "resource_type"}
query_options = {"sort": ["resource_type", "name"]}
def get_one(self, ref_or_id, requester_user):
return self._get_one(ref_or_id, requester_user=requester_user)
def get_all(
self,
exclude_attributes=None,
include_attributes=None,
sort=None,
offset=0,
limit=None,
requester_user=None,
**raw_filters,
):
return self._get_all(
exclude_fields=exclude_attributes,
include_fields=include_attributes,
sort=sort,
offset=offset,
limit=limit,
raw_filters=raw_filters,
requester_user=requester_user,
)
def _get_one(self, ref_or_id, requester_user):
instance = self._get_by_ref_or_id(ref_or_id=ref_or_id)
permission_type = PermissionType.POLICY_TYPE_VIEW
rbac_utils = get_rbac_backend().get_utils_class()
rbac_utils.assert_user_has_resource_db_permission(
user_db=requester_user,
resource_db=instance,
permission_type=permission_type,
)
result = self.model.from_model(instance)
return result
def _get_all(
self,
exclude_fields=None,
include_fields=None,
sort=None,
offset=0,
limit=None,
query_options=None,
from_model_kwargs=None,
raw_filters=None,
requester_user=None,
):
resp = super(PolicyTypeController, self)._get_all(
exclude_fields=exclude_fields,
include_fields=include_fields,
sort=sort,
offset=offset,
limit=limit,
query_options=query_options,
from_model_kwargs=from_model_kwargs,
raw_filters=raw_filters,
requester_user=requester_user,
)
return resp
def _get_by_ref_or_id(self, ref_or_id):
if PolicyTypeReference.is_reference(ref_or_id):
resource_db = self._get_by_ref(resource_ref=ref_or_id)
else:
resource_db = self._get_by_id(resource_id=ref_or_id)
if not resource_db:
msg = 'PolicyType with a reference of id "%s" not found.' % (ref_or_id)
raise StackStormDBObjectNotFoundError(msg)
return resource_db
def _get_by_id(self, resource_id):
try:
resource_db = self.access.get_by_id(resource_id)
except Exception:
resource_db = None
return resource_db
def _get_by_ref(self, resource_ref):
try:
ref = PolicyTypeReference.from_string_reference(ref=resource_ref)
except Exception:
return None
resource_db = self.access.query(
name=ref.name, resource_type=ref.resource_type
).first()
return resource_db
class PolicyController(resource.ContentPackResourceController):
model = PolicyAPI
access = Policy
supported_filters = {
"pack": "pack",
"resource_ref": "resource_ref",
"policy_type": "policy_type",
}
query_options = {"sort": ["pack", "name"]}
def get_all(
self,
exclude_attributes=None,
include_attributes=None,
sort=None,
offset=0,
limit=None,
requester_user=None,
**raw_filters,
):
return self._get_all(
exclude_fields=exclude_attributes,
include_fields=include_attributes,
sort=sort,
offset=offset,
limit=limit,
raw_filters=raw_filters,
requester_user=requester_user,
)
def get_one(self, ref_or_id, requester_user):
permission_type = PermissionType.POLICY_VIEW
return self._get_one(
ref_or_id, permission_type=permission_type, requester_user=requester_user
)
def post(self, instance, requester_user):
"""
Create a new policy.
Handles requests:
POST /policies/
"""
permission_type = PermissionType.POLICY_CREATE
rbac_utils = get_rbac_backend().get_utils_class()
rbac_utils.assert_user_has_resource_api_permission(
user_db=requester_user,
resource_api=instance,
permission_type=permission_type,
)
op = "POST /policies/"
db_model = self.model.to_model(instance)
LOG.debug("%s verified object: %s", op, db_model)
db_model = self.access.add_or_update(db_model)
LOG.debug("%s created object: %s", op, db_model)
LOG.audit(
"Policy created. Policy.id=%s" % (db_model.id),
extra={"policy_db": db_model},
)
exec_result = self.model.from_model(db_model)
return Response(json=exec_result, status=http_client.CREATED)
def put(self, instance, ref_or_id, requester_user):
op = "PUT /policies/%s/" % ref_or_id
db_model = self._get_by_ref_or_id(ref_or_id=ref_or_id)
LOG.debug("%s found object: %s", op, db_model)
permission_type = PermissionType.POLICY_MODIFY
rbac_utils = get_rbac_backend().get_utils_class()
rbac_utils.assert_user_has_resource_db_permission(
user_db=requester_user,
resource_db=db_model,
permission_type=permission_type,
)
db_model_id = db_model.id
try:
validate_not_part_of_system_pack(db_model)
except ValueValidationException as e:
LOG.exception("%s unable to update object from system pack.", op)
abort(http_client.BAD_REQUEST, six.text_type(e))
if not getattr(instance, "pack", None):
instance.pack = db_model.pack
try:
db_model = self.model.to_model(instance)
db_model.id = db_model_id
db_model = self.access.add_or_update(db_model)
except (ValidationError, ValueError) as e:
LOG.exception("%s unable to update object: %s", op, db_model)
abort(http_client.BAD_REQUEST, six.text_type(e))
return
LOG.debug("%s updated object: %s", op, db_model)
LOG.audit(
"Policy updated. Policy.id=%s" % (db_model.id),
extra={"policy_db": db_model},
)
exec_result = self.model.from_model(db_model)
return Response(json=exec_result, status=http_client.OK)
def delete(self, ref_or_id, requester_user):
"""
Delete a policy.
Handles requests:
POST /policies/1?_method=delete
DELETE /policies/1
DELETE /policies/mypack.mypolicy
"""
op = "DELETE /policies/%s/" % ref_or_id
db_model = self._get_by_ref_or_id(ref_or_id=ref_or_id)
LOG.debug("%s found object: %s", op, db_model)
permission_type = PermissionType.POLICY_DELETE
rbac_utils = get_rbac_backend().get_utils_class()
rbac_utils.assert_user_has_resource_db_permission(
user_db=requester_user,
resource_db=db_model,
permission_type=permission_type,
)
try:
validate_not_part_of_system_pack(db_model)
except ValueValidationException as e:
LOG.exception("%s unable to delete object from system pack.", op)
abort(http_client.BAD_REQUEST, six.text_type(e))
try:
self.access.delete(db_model)
except Exception as e:
LOG.exception("%s unable to delete object: %s", op, db_model)
abort(http_client.INTERNAL_SERVER_ERROR, six.text_type(e))
return
LOG.debug("%s deleted object: %s", op, db_model)
LOG.audit(
"Policy deleted. Policy.id=%s" % (db_model.id),
extra={"policy_db": db_model},
)
# return None
return Response(status=http_client.NO_CONTENT)
policy_type_controller = PolicyTypeController()
policy_controller = PolicyController()
|
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
# Copyright (c) 2018-2019 NVIDIA CORPORATION. All rights reserved.
import torch
from torch.nn import functional as F
from maskrcnn_benchmark.layers import smooth_l1_loss
from maskrcnn_benchmark.structures.bounding_box import BoxList
from maskrcnn_benchmark.modeling.box_coder import BoxCoder
from maskrcnn_benchmark.modeling.matcher import Matcher
from maskrcnn_benchmark.structures.boxlist_ops import boxlist_iou, boxlist_iou_batched
from maskrcnn_benchmark.modeling.balanced_positive_negative_sampler import (
BalancedPositiveNegativeSampler
)
from maskrcnn_benchmark.modeling.utils import cat
from torch.nn.utils.rnn import pad_sequence
class FastRCNNLossComputation(object):
"""
Computes the loss for Faster R-CNN.
Also supports FPN
"""
def __init__(
self,
proposal_matcher,
fg_bg_sampler,
box_coder,
cls_agnostic_bbox_reg=False
):
"""
Arguments:
proposal_matcher (Matcher)
fg_bg_sampler (BalancedPositiveNegativeSampler)
box_coder (BoxCoder)
"""
self.proposal_matcher = proposal_matcher
self.fg_bg_sampler = fg_bg_sampler
self.box_coder = box_coder
self.cls_agnostic_bbox_reg = cls_agnostic_bbox_reg
def match_targets_to_proposals(self, proposal, target):
match_quality_matrix = boxlist_iou(target, proposal)
matched_idxs = self.proposal_matcher(match_quality_matrix)
# Fast RCNN only need "labels" field for selecting the targets
target = target.copy_with_fields("labels")
# get the targets corresponding GT for each proposal
# NB: need to clamp the indices because we can have a single
# GT in the image, and matched_idxs can be -2, which goes
# out of bounds
matched_targets = target[matched_idxs.clamp(min=0)]
matched_targets.add_field("matched_idxs", matched_idxs)
return matched_targets
def match_targets_to_proposals_batched(self, proposal, target):
match_quality_matrix = boxlist_iou_batched(target, proposal)
matched_idxs = self.proposal_matcher(match_quality_matrix, batched=1)
# Fast RCNN only need "labels" field for selecting the targets
# how to do this for batched case?
# target = target.copy_with_fields("labels")
return matched_idxs
def prepare_targets(self, proposals, targets):
labels = []
regression_targets = []
matched_idxs = []
for proposals_per_image, targets_per_image in zip(proposals, targets):
matched_targets_per_image = self.match_targets_to_proposals(
proposals_per_image, targets_per_image
)
matched_idxs_per_image = matched_targets_per_image.get_field("matched_idxs")
labels_per_image = matched_targets_per_image.get_field("labels")
labels_per_image = labels_per_image.to(dtype=torch.int64)
# Label background (below the low threshold)
bg_inds = matched_idxs_per_image == Matcher.BELOW_LOW_THRESHOLD
labels_per_image.masked_fill_(bg_inds, 0)
# Label ignore proposals (between low and high thresholds)
ignore_inds = matched_idxs_per_image == Matcher.BETWEEN_THRESHOLDS
labels_per_image.masked_fill(ignore_inds, -1) # -1 is ignored by sampler
# compute regression targets
regression_targets_per_image = self.box_coder.encode(
matched_targets_per_image.bbox, proposals_per_image.bbox
)
labels.append(labels_per_image)
regression_targets.append(regression_targets_per_image)
matched_idxs.append(matched_idxs_per_image)
return labels, regression_targets, matched_idxs
def prepare_targets_batched(self, proposals, targets, target_labels):
num_images = proposals.size(0)
matched_idxs = self.match_targets_to_proposals_batched(proposals, targets)
img_idx = torch.arange(num_images, device = proposals.device)[:, None]
labels = target_labels[img_idx, matched_idxs.clamp(min=0)]
labels = labels.to(dtype=torch.int64)
bg_inds = matched_idxs == Matcher.BELOW_LOW_THRESHOLD
labels.masked_fill_(bg_inds, 0)
ignore_inds = matched_idxs == Matcher.BETWEEN_THRESHOLDS
labels.masked_fill_(ignore_inds, -1)
matched_targets = targets[img_idx, matched_idxs.clamp(min=0)]
regression_targets = self.box_coder.encode(
matched_targets.view(-1,4), proposals.view(-1,4)
)
return labels, regression_targets.view(num_images, -1, 4), matched_idxs
def subsample(self, proposals, targets):
"""
This method performs the positive/negative sampling, and return
the sampled proposals.
Note: this function keeps a state.
Arguments:
proposals (list[BoxList])
targets (list[BoxList])
"""
num_images = len(proposals[0])
target_boxes = pad_sequence([target.bbox for target in targets], batch_first = True, padding_value=-1)
target_labels = pad_sequence([target.get_field("labels") for target in targets], batch_first = True, padding_value = -1)
prop_boxes, prop_scores, image_sizes = proposals[0], proposals[1], proposals[2]
labels, regression_targets, matched_idxs = self.prepare_targets_batched(prop_boxes, target_boxes, target_labels)
# scores is used as a mask, -1 means box is invalid
if num_images == 1:
sampled_pos_inds, sampled_neg_inds = self.fg_bg_sampler(labels, is_rpn=0, objectness=prop_scores)
# when num_images=1, sampled pos inds only has 1 item, so avoid copy in torch.cat
pos_inds_per_image = [torch.nonzero(sampled_pos_inds[0]).squeeze(1)]
neg_inds_per_image = [torch.nonzero(sampled_neg_inds[0]).squeeze(1)]
else:
sampled_pos_inds, sampled_neg_inds, num_pos_samples, num_neg_samples = self.fg_bg_sampler(labels, is_rpn=0, objectness=prop_scores)
pos_inds_per_image = sampled_pos_inds.split(list(num_pos_samples))
neg_inds_per_image = sampled_neg_inds.split(list(num_neg_samples))
prop_boxes = prop_boxes.view(-1,4)
regression_targets = regression_targets.view(-1,4)
labels = labels.view(-1)
matched_idxs = matched_idxs.view(-1)
result_proposals = []
for i in range(num_images):
inds = torch.cat([pos_inds_per_image[i], neg_inds_per_image[i]])
box = BoxList(prop_boxes[inds], image_size = image_sizes[i])
box.add_field("matched_idxs", matched_idxs[inds])
box.add_field("regression_targets", regression_targets[inds])
box.add_field("labels", labels[inds])
result_proposals.append(box)
self._proposals = result_proposals
return result_proposals
def __call__(self, class_logits, box_regression):
"""
Computes the loss for Faster R-CNN.
This requires that the subsample method has been called beforehand.
Arguments:
class_logits (list[Tensor])
box_regression (list[Tensor])
Returns:
classification_loss (Tensor)
box_loss (Tensor)
"""
class_logits = cat(class_logits, dim=0)
box_regression = cat(box_regression, dim=0)
device = class_logits.device
if not hasattr(self, "_proposals"):
raise RuntimeError("subsample needs to be called before")
proposals = self._proposals
labels = cat([proposal.get_field("labels") for proposal in proposals], dim=0)
regression_targets = cat(
[proposal.get_field("regression_targets") for proposal in proposals], dim=0
)
classification_loss = F.cross_entropy(class_logits, labels)
# get indices that correspond to the regression targets for
# the corresponding ground truth labels, to be used with
# advanced indexing
sampled_pos_inds_subset = torch.nonzero(labels > 0).squeeze(1)
labels_pos = labels.index_select(0, sampled_pos_inds_subset)
if self.cls_agnostic_bbox_reg:
map_inds = torch.tensor([4, 5, 6, 7], device=device)
else:
map_inds = 4 * labels_pos[:, None] + torch.tensor(
[0, 1, 2, 3], device=device)
index_select_indices=((sampled_pos_inds_subset[:,None]) * box_regression.size(1) + map_inds).view(-1)
box_regression_sampled=box_regression.view(-1).index_select(0, index_select_indices).view(map_inds.shape[0],
map_inds.shape[1])
regression_targets_sampled = regression_targets.index_select(0, sampled_pos_inds_subset)
box_loss = smooth_l1_loss(
box_regression_sampled,
regression_targets_sampled,
size_average=False,
beta=1,
)
box_loss = box_loss / labels.numel()
return classification_loss, box_loss
def make_roi_box_loss_evaluator(cfg):
matcher = Matcher(
cfg.MODEL.ROI_HEADS.FG_IOU_THRESHOLD,
cfg.MODEL.ROI_HEADS.BG_IOU_THRESHOLD,
allow_low_quality_matches=False,
)
bbox_reg_weights = cfg.MODEL.ROI_HEADS.BBOX_REG_WEIGHTS
box_coder = BoxCoder(weights=bbox_reg_weights)
fg_bg_sampler = BalancedPositiveNegativeSampler(
cfg.MODEL.ROI_HEADS.BATCH_SIZE_PER_IMAGE, cfg.MODEL.ROI_HEADS.POSITIVE_FRACTION
)
cls_agnostic_bbox_reg = cfg.MODEL.CLS_AGNOSTIC_BBOX_REG
loss_evaluator = FastRCNNLossComputation(
matcher,
fg_bg_sampler,
box_coder,
cls_agnostic_bbox_reg
)
return loss_evaluator
|
|
# -*- coding: utf-8 -*-
'''
Interaction with Mercurial repositories
=======================================
Before using hg over ssh, make sure the remote host fingerprint already exists
in ~/.ssh/known_hosts, and the remote host has this host's public key.
.. code-block:: yaml
https://bitbucket.org/example_user/example_repo:
hg.latest:
- rev: tip
- target: /tmp/example_repo
'''
# Import python libs
import logging
import os
import shutil
# Import salt libs
import salt.utils
from salt.states.git import _fail, _neutral_test
log = logging.getLogger(__name__)
if salt.utils.is_windows():
HG_BINARY = "hg.exe"
else:
HG_BINARY = "hg"
def __virtual__():
'''
Only load if hg is available
'''
return __salt__['cmd.has_exec'](HG_BINARY)
def latest(name,
rev=None,
target=None,
clean=False,
runas=None,
user=None,
force=False,
opts=False):
'''
Make sure the repository is cloned to the given directory and is up to date
name
Address of the remote repository as passed to "hg clone"
rev
The remote branch, tag, or revision hash to clone/pull
target
Name of the target directory where repository is about to be cloned
clean
Force a clean update with -C (Default: False)
runas
Name of the user performing repository management operations
.. deprecated:: 0.17.0
user
Name of the user performing repository management operations
.. versionadded: 0.17.0
force
Force hg to clone into pre-existing directories (deletes contents)
opts
Include additional arguments and options to the hg command line
'''
ret = {'name': name, 'result': True, 'comment': '', 'changes': {}, 'state_stdout': ''}
salt.utils.warn_until(
'Hydrogen',
'Please remove \'runas\' support at this stage. \'user\' support was '
'added in 0.17.0',
_dont_call_warnings=True
)
if runas:
# Warn users about the deprecation
ret.setdefault('warnings', []).append(
'The \'runas\' argument is being deprecated in favor of \'user\', '
'please update your state files.'
)
if user is not None and runas is not None:
# user wins over runas but let warn about the deprecation.
ret.setdefault('warnings', []).append(
'Passed both the \'runas\' and \'user\' arguments. Please don\'t. '
'\'runas\' is being ignored in favor of \'user\'.'
)
runas = None
elif runas is not None:
# Support old runas usage
user = runas
runas = None
if not target:
return _fail(ret, '"target option is required')
is_repository = (
os.path.isdir(target) and
os.path.isdir('{0}/.hg'.format(target)))
if is_repository:
ret = _update_repo(ret, target, clean, user, rev, opts)
else:
if os.path.isdir(target):
fail = _handle_existing(ret, target, force)
if fail is not None:
return fail
else:
log.debug(
'target {0} is not found, "hg clone" is required'.format(
target))
if __opts__['test']:
return _neutral_test(
ret,
'Repository {0} is about to be cloned to {1}'.format(
name, target))
_clone_repo(ret, target, name, user, rev, opts)
return ret
def _update_repo(ret, target, clean, user, rev, opts):
'''
Update the repo to a given revision. Using clean passes -C to the hg up
'''
log.debug(
'target {0} is found, '
'"hg pull && hg up is probably required"'.format(target)
)
current_rev = __salt__['hg.revision'](target, user=user, state_ret=ret)
if not current_rev:
return _fail(
ret,
'Seems that {0} is not a valid hg repo'.format(target))
if __opts__['test']:
test_result = (
'Repository {0} update is probably required (current '
'revision is {1})').format(target, current_rev)
return _neutral_test(
ret,
test_result)
pull_out = __salt__['hg.pull'](target, user=user, opts=opts, state_ret=ret)
if rev:
__salt__['hg.update'](target, rev, force=clean, user=user, state_ret=ret)
else:
__salt__['hg.update'](target, 'tip', force=clean, user=user, state_ret=ret)
new_rev = __salt__['hg.revision'](cwd=target, user=user, state_ret=ret)
if current_rev != new_rev:
revision_text = '{0} => {1}'.format(current_rev, new_rev)
log.info(
'Repository {0} updated: {1}'.format(
target, revision_text)
)
ret['comment'] = 'Repository {0} updated.'.format(target)
ret['changes']['revision'] = revision_text
elif 'error:' in pull_out:
return _fail(
ret,
'An error was thrown by hg:\n{0}'.format(pull_out)
)
return ret
def _handle_existing(ret, target, force):
not_empty = os.listdir(target)
if not not_empty:
log.debug(
'target {0} found, but directory is empty, automatically '
'deleting'.format(target))
shutil.rmtree(target)
elif force:
log.debug(
'target {0} found and is not empty. Since force option is'
' in use, deleting anyway.'.format(target))
shutil.rmtree(target)
else:
return _fail(ret, 'Directory exists, and is not empty')
def _clone_repo(ret, target, name, user, rev, opts):
result = __salt__['hg.clone'](target, name, user=user, opts=opts)
if not os.path.isdir(target):
return _fail(ret, result)
if rev:
__salt__['hg.update'](target, rev, user=user, state_ret=ret)
new_rev = __salt__['hg.revision'](cwd=target, user=user, state_ret=ret)
message = 'Repository {0} cloned to {1}'.format(name, target)
log.info(message)
ret['comment'] = message
ret['changes']['new'] = name
ret['changes']['revision'] = new_rev
return ret
|
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import pipes
import sys
import time
from telemetry.core import exceptions
from telemetry.core import forwarders
from telemetry.core import util
from telemetry.core.backends import adb_commands
from telemetry.core.backends import browser_backend
from telemetry.core.backends.chrome import chrome_browser_backend
from telemetry.core.platform import android_platform_backend as \
android_platform_backend_module
from telemetry.core.forwarders import android_forwarder
util.AddDirToPythonPath(util.GetChromiumSrcDir(), 'build', 'android')
from pylib.device import device_errors # pylint: disable=F0401
from pylib.device import intent # pylint: disable=F0401
class AndroidBrowserBackendSettings(object):
def __init__(self, activity, cmdline_file, package, pseudo_exec_name,
supports_tab_control):
self.activity = activity
self._cmdline_file = cmdline_file
self.package = package
self.pseudo_exec_name = pseudo_exec_name
self.supports_tab_control = supports_tab_control
def GetCommandLineFile(self, is_user_debug_build): # pylint: disable=W0613
return self._cmdline_file
def GetDevtoolsRemotePort(self, adb):
raise NotImplementedError()
@property
def profile_ignore_list(self):
# Don't delete lib, since it is created by the installer.
return ['lib']
class ChromeBackendSettings(AndroidBrowserBackendSettings):
# Stores a default Preferences file, re-used to speed up "--page-repeat".
_default_preferences_file = None
def GetCommandLineFile(self, is_user_debug_build):
if is_user_debug_build:
return '/data/local/tmp/chrome-command-line'
else:
return '/data/local/chrome-command-line'
def __init__(self, package):
super(ChromeBackendSettings, self).__init__(
activity='com.google.android.apps.chrome.Main',
cmdline_file=None,
package=package,
pseudo_exec_name='chrome',
supports_tab_control=True)
def GetDevtoolsRemotePort(self, adb):
return 'localabstract:chrome_devtools_remote'
class ContentShellBackendSettings(AndroidBrowserBackendSettings):
def __init__(self, package):
super(ContentShellBackendSettings, self).__init__(
activity='org.chromium.content_shell_apk.ContentShellActivity',
cmdline_file='/data/local/tmp/content-shell-command-line',
package=package,
pseudo_exec_name='content_shell',
supports_tab_control=False)
def GetDevtoolsRemotePort(self, adb):
return 'localabstract:content_shell_devtools_remote'
class ChromeShellBackendSettings(AndroidBrowserBackendSettings):
def __init__(self, package):
super(ChromeShellBackendSettings, self).__init__(
activity='org.chromium.chrome.shell.ChromeShellActivity',
cmdline_file='/data/local/tmp/chrome-shell-command-line',
package=package,
pseudo_exec_name='chrome_shell',
supports_tab_control=False)
def GetDevtoolsRemotePort(self, adb):
return 'localabstract:chrome_shell_devtools_remote'
class WebviewBackendSettings(AndroidBrowserBackendSettings):
def __init__(self, package,
activity='org.chromium.telemetry_shell.TelemetryActivity',
cmdline_file='/data/local/tmp/webview-command-line'):
super(WebviewBackendSettings, self).__init__(
activity=activity,
cmdline_file=cmdline_file,
package=package,
pseudo_exec_name='webview',
supports_tab_control=False)
def GetDevtoolsRemotePort(self, adb):
# The DevTools socket name for WebView depends on the activity PID's.
retries = 0
timeout = 1
pid = None
while True:
pids = adb.ExtractPid(self.package)
if len(pids) > 0:
pid = pids[-1]
break
time.sleep(timeout)
retries += 1
timeout *= 2
if retries == 4:
logging.critical('android_browser_backend: Timeout while waiting for '
'activity %s:%s to come up',
self.package,
self.activity)
raise exceptions.BrowserGoneException(self.browser,
'Timeout waiting for PID.')
return 'localabstract:webview_devtools_remote_%s' % str(pid)
class WebviewShellBackendSettings(WebviewBackendSettings):
def __init__(self, package):
super(WebviewShellBackendSettings, self).__init__(
activity='org.chromium.android_webview.shell.AwShellActivity',
cmdline_file='/data/local/tmp/android-webview-command-line',
package=package)
class AndroidBrowserBackend(chrome_browser_backend.ChromeBrowserBackend):
"""The backend for controlling a browser instance running on Android."""
def __init__(self, android_platform_backend, browser_options,
backend_settings, use_rndis_forwarder, output_profile_path,
extensions_to_load, target_arch):
assert isinstance(android_platform_backend,
android_platform_backend_module.AndroidPlatformBackend)
super(AndroidBrowserBackend, self).__init__(
android_platform_backend,
supports_tab_control=backend_settings.supports_tab_control,
supports_extensions=False, browser_options=browser_options,
output_profile_path=output_profile_path,
extensions_to_load=extensions_to_load)
if len(extensions_to_load) > 0:
raise browser_backend.ExtensionsNotSupportedException(
'Android browser does not support extensions.')
# Initialize fields so that an explosion during init doesn't break in Close.
self._backend_settings = backend_settings
self._saved_cmdline = ''
self._target_arch = target_arch
self._saved_sslflag = ''
# TODO(tonyg): This is flaky because it doesn't reserve the port that it
# allocates. Need to fix this.
self._port = adb_commands.AllocateTestServerPort()
# TODO(wuhu): Move to network controller backend.
self.platform_backend.InstallTestCa()
# Kill old browser.
self._KillBrowser()
if self._adb.device().old_interface.CanAccessProtectedFileContents():
if self.browser_options.profile_dir:
self.platform_backend.PushProfile(
self._backend_settings.package,
self.browser_options.profile_dir)
elif not self.browser_options.dont_override_profile:
self.platform_backend.RemoveProfile(
self._backend_settings.package,
self._backend_settings.profile_ignore_list)
self._forwarder_factory = android_forwarder.AndroidForwarderFactory(
self._adb, use_rndis_forwarder)
if self.browser_options.netsim or use_rndis_forwarder:
assert use_rndis_forwarder, 'Netsim requires RNDIS forwarding.'
self.wpr_port_pairs = forwarders.PortPairs(
http=forwarders.PortPair(0, 80),
https=forwarders.PortPair(0, 443),
dns=forwarders.PortPair(0, 53))
# Set the debug app if needed.
self.platform_backend.SetDebugApp(self._backend_settings.package)
@property
def _adb(self):
return self.platform_backend.adb
def _KillBrowser(self):
self.platform_backend.KillApplication(self._backend_settings.package)
def _SetUpCommandLine(self):
def QuoteIfNeeded(arg):
# Properly escape "key=valueA valueB" to "key='valueA valueB'"
# Values without spaces, or that seem to be quoted are left untouched.
# This is required so CommandLine.java can parse valueB correctly rather
# than as a separate switch.
params = arg.split('=', 1)
if len(params) != 2:
return arg
key, values = params
if ' ' not in values:
return arg
if values[0] in '"\'' and values[-1] == values[0]:
return arg
return '%s=%s' % (key, pipes.quote(values))
args = [self._backend_settings.pseudo_exec_name]
args.extend(self.GetBrowserStartupArgs())
content = ' '.join(QuoteIfNeeded(arg) for arg in args)
cmdline_file = self._backend_settings.GetCommandLineFile(
self._adb.IsUserBuild())
try:
# Save the current command line to restore later, except if it appears to
# be a Telemetry created one. This is to prevent a common bug where
# --host-resolver-rules borks people's browsers if something goes wrong
# with Telemetry.
self._saved_cmdline = ''.join(self._adb.device().ReadFile(cmdline_file))
if '--host-resolver-rules' in self._saved_cmdline:
self._saved_cmdline = ''
self._adb.device().WriteFile(cmdline_file, content, as_root=True)
except device_errors.CommandFailedError:
logging.critical('Cannot set Chrome command line. '
'Fix this by flashing to a userdebug build.')
sys.exit(1)
def _RestoreCommandLine(self):
cmdline_file = self._backend_settings.GetCommandLineFile(
self._adb.IsUserBuild())
self._adb.device().WriteFile(cmdline_file, self._saved_cmdline,
as_root=True)
def Start(self):
self._SetUpCommandLine()
self._adb.device().RunShellCommand('logcat -c')
if self.browser_options.startup_url:
url = self.browser_options.startup_url
elif self.browser_options.profile_dir:
url = None
else:
# If we have no existing tabs start with a blank page since default
# startup with the NTP can lead to race conditions with Telemetry
url = 'about:blank'
self.platform_backend.DismissCrashDialogIfNeeded()
self._adb.device().StartActivity(
intent.Intent(package=self._backend_settings.package,
activity=self._backend_settings.activity,
action=None, data=url, category=None),
blocking=True)
self.platform_backend.ForwardHostToDevice(
self._port, self._backend_settings.GetDevtoolsRemotePort(self._adb))
try:
self._WaitForBrowserToComeUp()
except exceptions.BrowserGoneException:
logging.critical('Failed to connect to browser.')
if not self._adb.device().old_interface.CanAccessProtectedFileContents():
logging.critical(
'Resolve this by either: '
'(1) Flashing to a userdebug build OR '
'(2) Manually enabling web debugging in Chrome at '
'Settings > Developer tools > Enable USB Web debugging.')
sys.exit(1)
except:
import traceback
traceback.print_exc()
self.Close()
raise
finally:
self._RestoreCommandLine()
def GetBrowserStartupArgs(self):
args = super(AndroidBrowserBackend, self).GetBrowserStartupArgs()
args.append('--enable-remote-debugging')
args.append('--disable-fre')
args.append('--disable-external-intent-requests')
return args
@property
def forwarder_factory(self):
return self._forwarder_factory
@property
def adb(self):
return self._adb
@property
def pid(self):
pids = self._adb.ExtractPid(self._backend_settings.package)
if not pids:
raise exceptions.BrowserGoneException(self.browser)
return int(pids[0])
@property
def browser_directory(self):
return None
@property
def profile_directory(self):
return self._backend_settings.profile_dir
@property
def package(self):
return self._backend_settings.package
@property
def activity(self):
return self._backend_settings.activity
def __del__(self):
self.Close()
def Close(self):
super(AndroidBrowserBackend, self).Close()
self.platform_backend.RemoveTestCa()
self._KillBrowser()
if self._output_profile_path:
self.platform_backend.PullProfile(
self._backend_settings.package, self._output_profile_path)
def IsBrowserRunning(self):
return self.platform_backend.IsAppRunning(self._backend_settings.package)
def GetRemotePort(self, local_port):
return local_port
def GetStandardOutput(self):
return self.platform_backend.GetStandardOutput()
def GetStackTrace(self):
return self.platform_backend.GetStackTrace(self._target_arch)
@property
def should_ignore_certificate_errors(self):
return not self.platform_backend.is_test_ca_installed
|
|
# Copyright (c) 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import six
from six.moves import urllib
from swift.common import bufferedhttp
from swift.common import exceptions
from swift.common import http
def encode_missing(object_hash, ts_data, ts_meta=None, ts_ctype=None):
"""
Returns a string representing the object hash, its data file timestamp
and the delta forwards to its metafile and content-type timestamps, if
non-zero, in the form:
``<hash> <ts_data> [m:<hex delta to ts_meta>[,t:<hex delta to ts_ctype>]]``
The decoder for this line is
:py:func:`~swift.obj.ssync_receiver.decode_missing`
"""
msg = ('%s %s'
% (urllib.parse.quote(object_hash),
urllib.parse.quote(ts_data.internal)))
if ts_meta and ts_meta != ts_data:
delta = ts_meta.raw - ts_data.raw
msg = '%s m:%x' % (msg, delta)
if ts_ctype and ts_ctype != ts_data:
delta = ts_ctype.raw - ts_data.raw
msg = '%s,t:%x' % (msg, delta)
return msg
def decode_wanted(parts):
"""
Parse missing_check line parts to determine which parts of local
diskfile were wanted by the receiver.
The encoder for parts is
:py:func:`~swift.obj.ssync_receiver.encode_wanted`
"""
wanted = {}
key_map = dict(d='data', m='meta')
if parts:
# receiver specified data and/or meta wanted, so use those as
# conditions for sending PUT and/or POST subrequests
for k in key_map:
if k in parts[0]:
wanted[key_map[k]] = True
if not wanted:
# assume legacy receiver which will only accept PUTs. There is no
# way to send any meta file content without morphing the timestamp
# of either the data or the metadata, so we just send data file
# content to a legacy receiver. Once the receiver gets updated we
# will be able to send it the meta file content.
wanted['data'] = True
return wanted
class Sender(object):
"""
Sends SSYNC requests to the object server.
These requests are eventually handled by
:py:mod:`.ssync_receiver` and full documentation about the
process is there.
"""
def __init__(self, daemon, node, job, suffixes, remote_check_objs=None):
self.daemon = daemon
self.df_mgr = self.daemon._diskfile_mgr
self.node = node
self.job = job
self.suffixes = suffixes
self.connection = None
self.response = None
self.response_buffer = ''
self.response_chunk_left = 0
# available_map has an entry for each object in given suffixes that
# is available to be sync'd; each entry is a hash => dict of timestamps
# of data file or tombstone file and/or meta file
self.available_map = {}
# When remote_check_objs is given in job, ssync_sender trys only to
# make sure those objects exist or not in remote.
self.remote_check_objs = remote_check_objs
# send_map has an entry for each object that the receiver wants to
# be sync'ed; each entry maps an object hash => dict of wanted parts
self.send_map = {}
self.failures = 0
def __call__(self):
"""
Perform ssync with remote node.
:returns: a 2-tuple, in the form (success, can_delete_objs) where
success is a boolean and can_delete_objs is the map of
objects that are in sync with the receiver. Each entry in
can_delete_objs maps a hash => timestamp of data file or
tombstone file
"""
if not self.suffixes:
return True, {}
try:
# Double try blocks in case our main error handler fails.
try:
# The general theme for these functions is that they should
# raise exceptions.MessageTimeout for client timeouts and
# exceptions.ReplicationException for common issues that will
# abort the replication attempt and log a simple error. All
# other exceptions will be logged with a full stack trace.
self.connect()
self.missing_check()
if self.remote_check_objs is None:
self.updates()
can_delete_obj = self.available_map
else:
# when we are initialized with remote_check_objs we don't
# *send* any requested updates; instead we only collect
# what's already in sync and safe for deletion
in_sync_hashes = (set(self.available_map.keys()) -
set(self.send_map.keys()))
can_delete_obj = dict((hash_, self.available_map[hash_])
for hash_ in in_sync_hashes)
if not self.failures:
return True, can_delete_obj
else:
return False, {}
except (exceptions.MessageTimeout,
exceptions.ReplicationException) as err:
self.daemon.logger.error(
'%s:%s/%s/%s %s', self.node.get('replication_ip'),
self.node.get('replication_port'), self.node.get('device'),
self.job.get('partition'), err)
except Exception:
# We don't want any exceptions to escape our code and possibly
# mess up the original replicator code that called us since it
# was originally written to shell out to rsync which would do
# no such thing.
self.daemon.logger.exception(
'%s:%s/%s/%s EXCEPTION in ssync.Sender',
self.node.get('replication_ip'),
self.node.get('replication_port'),
self.node.get('device'), self.job.get('partition'))
finally:
self.disconnect()
except Exception:
# We don't want any exceptions to escape our code and possibly
# mess up the original replicator code that called us since it
# was originally written to shell out to rsync which would do
# no such thing.
# This particular exception handler does the minimal amount as it
# would only get called if the above except Exception handler
# failed (bad node or job data).
self.daemon.logger.exception('EXCEPTION in ssync.Sender')
return False, {}
def connect(self):
"""
Establishes a connection and starts an SSYNC request
with the object server.
"""
with exceptions.MessageTimeout(
self.daemon.conn_timeout, 'connect send'):
self.connection = bufferedhttp.BufferedHTTPConnection(
'%s:%s' % (self.node['replication_ip'],
self.node['replication_port']))
self.connection.putrequest('SSYNC', '/%s/%s' % (
self.node['device'], self.job['partition']))
self.connection.putheader('Transfer-Encoding', 'chunked')
self.connection.putheader('X-Backend-Storage-Policy-Index',
int(self.job['policy']))
# a sync job must use the node's index for the frag_index of the
# rebuilt fragments instead of the frag_index from the job which
# will be rebuilding them
frag_index = self.node.get('index', self.job.get('frag_index'))
if frag_index is None:
# replication jobs will not have a frag_index key;
# reconstructor jobs with only tombstones will have a
# frag_index key explicitly set to the value of None - in both
# cases on the wire we write the empty string which
# ssync_receiver will translate to None
frag_index = ''
self.connection.putheader('X-Backend-Ssync-Frag-Index',
frag_index)
# a revert job to a handoff will not have a node index
self.connection.putheader('X-Backend-Ssync-Node-Index',
self.node.get('index', ''))
self.connection.endheaders()
with exceptions.MessageTimeout(
self.daemon.node_timeout, 'connect receive'):
self.response = self.connection.getresponse()
if self.response.status != http.HTTP_OK:
err_msg = self.response.read()[:1024]
raise exceptions.ReplicationException(
'Expected status %s; got %s (%s)' %
(http.HTTP_OK, self.response.status, err_msg))
def readline(self):
"""
Reads a line from the SSYNC response body.
httplib has no readline and will block on read(x) until x is
read, so we have to do the work ourselves. A bit of this is
taken from Python's httplib itself.
"""
data = self.response_buffer
self.response_buffer = ''
while '\n' not in data and len(data) < self.daemon.network_chunk_size:
if self.response_chunk_left == -1: # EOF-already indicator
break
if self.response_chunk_left == 0:
line = self.response.fp.readline()
i = line.find(';')
if i >= 0:
line = line[:i] # strip chunk-extensions
try:
self.response_chunk_left = int(line.strip(), 16)
except ValueError:
# close the connection as protocol synchronisation is
# probably lost
self.response.close()
raise exceptions.ReplicationException('Early disconnect')
if self.response_chunk_left == 0:
self.response_chunk_left = -1
break
chunk = self.response.fp.read(min(
self.response_chunk_left,
self.daemon.network_chunk_size - len(data)))
if not chunk:
# close the connection as protocol synchronisation is
# probably lost
self.response.close()
raise exceptions.ReplicationException('Early disconnect')
self.response_chunk_left -= len(chunk)
if self.response_chunk_left == 0:
self.response.fp.read(2) # discard the trailing \r\n
data += chunk
if '\n' in data:
data, self.response_buffer = data.split('\n', 1)
data += '\n'
return data
def missing_check(self):
"""
Handles the sender-side of the MISSING_CHECK step of a
SSYNC request.
Full documentation of this can be found at
:py:meth:`.Receiver.missing_check`.
"""
# First, send our list.
with exceptions.MessageTimeout(
self.daemon.node_timeout, 'missing_check start'):
msg = ':MISSING_CHECK: START\r\n'
self.connection.send('%x\r\n%s\r\n' % (len(msg), msg))
hash_gen = self.df_mgr.yield_hashes(
self.job['device'], self.job['partition'],
self.job['policy'], self.suffixes,
frag_index=self.job.get('frag_index'))
if self.remote_check_objs is not None:
hash_gen = six.moves.filter(
lambda path_objhash_timestamps:
path_objhash_timestamps[1] in
self.remote_check_objs, hash_gen)
for path, object_hash, timestamps in hash_gen:
self.available_map[object_hash] = timestamps
with exceptions.MessageTimeout(
self.daemon.node_timeout,
'missing_check send line'):
msg = '%s\r\n' % encode_missing(object_hash, **timestamps)
self.connection.send('%x\r\n%s\r\n' % (len(msg), msg))
with exceptions.MessageTimeout(
self.daemon.node_timeout, 'missing_check end'):
msg = ':MISSING_CHECK: END\r\n'
self.connection.send('%x\r\n%s\r\n' % (len(msg), msg))
# Now, retrieve the list of what they want.
while True:
with exceptions.MessageTimeout(
self.daemon.http_timeout, 'missing_check start wait'):
line = self.readline()
if not line:
raise exceptions.ReplicationException('Early disconnect')
line = line.strip()
if line == ':MISSING_CHECK: START':
break
elif line:
raise exceptions.ReplicationException(
'Unexpected response: %r' % line[:1024])
while True:
with exceptions.MessageTimeout(
self.daemon.http_timeout, 'missing_check line wait'):
line = self.readline()
if not line:
raise exceptions.ReplicationException('Early disconnect')
line = line.strip()
if line == ':MISSING_CHECK: END':
break
parts = line.split()
if parts:
self.send_map[parts[0]] = decode_wanted(parts[1:])
def updates(self):
"""
Handles the sender-side of the UPDATES step of an SSYNC
request.
Full documentation of this can be found at
:py:meth:`.Receiver.updates`.
"""
# First, send all our subrequests based on the send_map.
with exceptions.MessageTimeout(
self.daemon.node_timeout, 'updates start'):
msg = ':UPDATES: START\r\n'
self.connection.send('%x\r\n%s\r\n' % (len(msg), msg))
for object_hash, want in self.send_map.items():
object_hash = urllib.parse.unquote(object_hash)
try:
df = self.df_mgr.get_diskfile_from_hash(
self.job['device'], self.job['partition'], object_hash,
self.job['policy'], frag_index=self.job.get('frag_index'))
except exceptions.DiskFileNotExist:
continue
url_path = urllib.parse.quote(
'/%s/%s/%s' % (df.account, df.container, df.obj))
try:
df.open()
if want.get('data'):
# EC reconstructor may have passed a callback to build an
# alternative diskfile - construct it using the metadata
# from the data file only.
df_alt = self.job.get(
'sync_diskfile_builder', lambda *args: df)(
self.job, self.node, df.get_datafile_metadata())
self.send_put(url_path, df_alt)
if want.get('meta') and df.data_timestamp != df.timestamp:
self.send_post(url_path, df)
except exceptions.DiskFileDeleted as err:
if want.get('data'):
self.send_delete(url_path, err.timestamp)
except exceptions.DiskFileError:
# DiskFileErrors are expected while opening the diskfile,
# before any data is read and sent. Since there is no partial
# state on the receiver it's ok to ignore this diskfile and
# continue. The diskfile may however be deleted after a
# successful ssync since it remains in the send_map.
pass
with exceptions.MessageTimeout(
self.daemon.node_timeout, 'updates end'):
msg = ':UPDATES: END\r\n'
self.connection.send('%x\r\n%s\r\n' % (len(msg), msg))
# Now, read their response for any issues.
while True:
with exceptions.MessageTimeout(
self.daemon.http_timeout, 'updates start wait'):
line = self.readline()
if not line:
raise exceptions.ReplicationException('Early disconnect')
line = line.strip()
if line == ':UPDATES: START':
break
elif line:
raise exceptions.ReplicationException(
'Unexpected response: %r' % line[:1024])
while True:
with exceptions.MessageTimeout(
self.daemon.http_timeout, 'updates line wait'):
line = self.readline()
if not line:
raise exceptions.ReplicationException('Early disconnect')
line = line.strip()
if line == ':UPDATES: END':
break
elif line:
raise exceptions.ReplicationException(
'Unexpected response: %r' % line[:1024])
def send_delete(self, url_path, timestamp):
"""
Sends a DELETE subrequest with the given information.
"""
msg = ['DELETE ' + url_path, 'X-Timestamp: ' + timestamp.internal]
msg = '\r\n'.join(msg) + '\r\n\r\n'
with exceptions.MessageTimeout(
self.daemon.node_timeout, 'send_delete'):
self.connection.send('%x\r\n%s\r\n' % (len(msg), msg))
def send_put(self, url_path, df):
"""
Sends a PUT subrequest for the url_path using the source df
(DiskFile) and content_length.
"""
msg = ['PUT ' + url_path, 'Content-Length: ' + str(df.content_length)]
# Sorted to make it easier to test.
for key, value in sorted(df.get_datafile_metadata().items()):
if key not in ('name', 'Content-Length'):
msg.append('%s: %s' % (key, value))
msg = '\r\n'.join(msg) + '\r\n\r\n'
with exceptions.MessageTimeout(self.daemon.node_timeout, 'send_put'):
self.connection.send('%x\r\n%s\r\n' % (len(msg), msg))
bytes_read = 0
for chunk in df.reader():
bytes_read += len(chunk)
with exceptions.MessageTimeout(
self.daemon.node_timeout, 'send_put chunk'):
self.connection.send('%x\r\n%s\r\n' % (len(chunk), chunk))
if bytes_read != df.content_length:
# Since we may now have partial state on the receiver we have to
# prevent the receiver finalising what may well be a bad or
# partially written diskfile. Unfortunately we have no other option
# than to pull the plug on this ssync session. If ssync supported
# multiphase PUTs like the proxy uses for EC we could send a bad
# etag in a footer of this subrequest, but that is not supported.
raise exceptions.ReplicationException(
'Sent data length does not match content-length')
def send_post(self, url_path, df):
metadata = df.get_metafile_metadata()
if metadata is None:
return
msg = ['POST ' + url_path]
# Sorted to make it easier to test.
for key, value in sorted(metadata.items()):
msg.append('%s: %s' % (key, value))
msg = '\r\n'.join(msg) + '\r\n\r\n'
with exceptions.MessageTimeout(self.daemon.node_timeout, 'send_post'):
self.connection.send('%x\r\n%s\r\n' % (len(msg), msg))
def disconnect(self):
"""
Closes down the connection to the object server once done
with the SSYNC request.
"""
if not self.connection:
return
try:
with exceptions.MessageTimeout(
self.daemon.node_timeout, 'disconnect'):
self.connection.send('0\r\n\r\n')
except (Exception, exceptions.Timeout):
pass # We're okay with the above failing.
self.connection.close()
|
|
import luigi
import subprocess
import glob
import os
from time import strftime, gmtime
#############################################################################################################
#This pipeline automates genome downloading and processing from CGHub. The bottom of the file is the head of the graph.
#############################################################################################################
__author__ = "David Liu"
__version__ = 1.0
"""
Global variables
"""
#Configure things here.
output_dir = os.path.abspath("./all_outputs")
download_dir = os.path.abspath("./all_downloads")
subprocess.call(["mkdir", output_dir])
subprocess.call(["mkdir", download_dir])
pipeline_output = luigi.Parameter(is_global = True, default = output_dir)
pipeline_downloads = luigi.Parameter(is_global = True, default = download_dir)
#############################################################################################################
#Deletes the BAM files after everything is finished.
#############################################################################################################
class deleteBAMFiles(luigi.Task):
prefix = luigi.Parameter()
pipeline_output = pipeline_output
pipeline_downloads = pipeline_downloads
time_began = strftime("Time began: %a, %d %b %Y %H:%M:%S", gmtime())
this_download_dir = ""
this_out_dir = ""
def requires(self):
self.this_out_dir = os.path.join(self.pipeline_output, self.prefix)
self.this_download_dir = os.path.join(self.pipeline_downloads, self.prefix)
subprocess.call(["mkdir", self.this_out_dir])
subprocess.call(["mkdir", self.this_download_dir])
return {"RunTHetA":RunTHetA(prefix = self.prefix, out_dir = self.this_out_dir, download_dir = self.this_download_dir),
"virtualSNPArray": virtualSNPArray(prefix = self.prefix, out_dir = self.this_out_dir, download_dir = self.this_download_dir),
"intervalCountingPipeline": intervalCountingPipeline(prefix = self.prefix, out_dir = self.this_out_dir, download_dir = self.this_download_dir)}
def run(self):
# subprocess.call(["rm", "-rf", self.this_download_dir])
file = open(os.path.join(self.this_out_dir, "job_summary.txt"), "w")
file.write("Sample barcode: " + self.prefix + "\n")
file.write("Sample analysis_uri_id " + prefix_to_id[self.prefix] + "\n")
file.write(self.time_began + "\n")
file.write(strftime("Time finished: %a, %d %b %Y %H:%M:%S", gmtime()))
file.close()
def output(self):
return luigi.LocalTarget(os.path.join(self.this_out_dir, "job_summary.txt"))
#############################################################################################################
#The virtualSNPArray depends on the intervalCountingPipeline and the SNP filter.
#############################################################################################################
class virtualSNPArray(luigi.Task):
prefix = luigi.Parameter()
out_dir = luigi.Parameter()
download_dir = luigi.Parameter()
this_out_dir = ""
def requires(self):
self.this_out_dir = os.path.join(self.out_dir, "SNP_array")
subprocess.call(["mkdir", self.this_out_dir])
return {'countAndFilterSNPs': countAndFilterSNPs(prefix = self.prefix, out_dir = self.out_dir, download_dir = self.download_dir),
'intervalCountingPipeline': intervalCountingPipeline(prefix = self.prefix, out_dir = self.out_dir, download_dir = self.download_dir)}
def run(self):
#run theta
subprocess.call(["touch", os.path.join(self.this_out_dir, "virtualSNPArray.txt")])
def output(self):
return luigi.LocalTarget(os.path.join(self.this_out_dir, "virtualSNPArray.txt"))
#############################################################################################################
#RunTheta depends on the intervalCountingPipeline and BICSeqToTHetA.
#############################################################################################################
class RunTHetA(luigi.Task):
prefix = luigi.Parameter()
out_dir = luigi.Parameter()
download_dir = luigi.Parameter()
this_out_dir = ""
def requires(self):
self.this_out_dir = os.path.join(theta_dir, "THetA_input")
subprocess.call(["mkdir", self.this_out_dir])
self.this_out_dir = os.path.join(self.out_dir, "THetA")
subprocess.call(["mkdir", self.this_out_dir])
return {'BICSeq': BICSeq(prefix = self.prefix, out_dir = self.out_dir, download_dir = self.download_dir),
'intervalCountingPipeline': intervalCountingPipeline(prefix = self.prefix, out_dir = self.out_dir, download_dir = self.download_dir)}
def run(self):
#Get bicseg location
bicseq_output_loc = os.path.join(self.out_dir, "BICSeq/output", prefix + ".bicseg")
subprocess.call(["./pipeline/scripts/RunTHetA.sh", bicseq_output_loc, self.prefix, self.this_out_dir, READ_DEPTH_FILE_LOC])
subprocess.call(["touch", os.path.join(self.this_out_dir, "THetA_complete.txt")])
def output(self):
return luigi.LocalTarget(os.path.join(self.this_out_dir, "THetA_complete.txt"))
#############################################################################################################
#Define the main workflow dependency graph.
#############################################################################################################
#############################################################################################################
#Once the BAM file is downloaded, we run three processes. The intervalCountingPipeline, BAMtoGASV, and count SNPs
#############################################################################################################
"""
Java program that counts SNPs. Python code that filters them.
"""
class countAndFilterSNPs(luigi.Task):
prefix = luigi.Parameter()
out_dir = luigi.Parameter()
download_dir = luigi.Parameter()
this_out_dir = ""
def requires(self):
snp_dir = os.path.join(self.out_dir, "SNP_array")
subprocess.call(["mkdir", snp_dir])
self.this_out_dir = os.path.join(snp_dir, "Processed_SNPs")
subprocess.call(["mkdir", self.this_out_dir])
return downloadGenome(self.prefix)
def run(self):
#Write the parameters file.
#Make the file
#Write SNP_FILE = path/to/snp/file
#OUTPUT_PREFIX = out_dir/output
#Write all bam files
# subprocess.call(["java - classpath SOMESTUFF"])
#Filter and grouping code
subprocess.call(["touch", os.path.join(self.this_out_dir, "countAndFilterSNPs.txt")])
def output(self):
# return glob.glob("OUT_DIR/output.withCounts")
return luigi.LocalTarget(os.path.join(self.this_out_dir, "countAndFilterSNPs.txt"))
"""
THetA stuff
"""
class BAMtoGASV(luigi.Task):
#Also runs GASV.
prefix = luigi.Parameter()
out_dir = luigi.Parameter()
download_dir = luigi.Parameter()
this_out_dir = ""
def requires(self):
self.this_out_dir = os.path.join(self.out_dir, "BAMtoGASV_output")
subprocess.call(["mkdir", self.this_out_dir])
return downloadGenome(self.prefix)
def run(self):
#Run BAMtoGASV
normal_dir = os.path.join(this_out_dir, "NORMAL")
tumor_dir = os.path.join(this_out_dir, "TUMOR")
subprocess.call(["mkdir", normal_dir])
subprocess.call(["mkdir", tumor_dir])
#Run on normal
subprocess.call(["./pipeline/scripts/runBAMtoGASV.sh", normal_dir, PATHTONORMALBAMFILE, "NORMAL"])
#move files
# subprocess.call(["mv", "*.gasv.in", normal_dir])
# subprocess.call(["mv", "*.info", normal_dir])
# subprocess.call(["mv", "*.deletion", normal_dir])
# subprocess.call(["mv", "*.divergent", normal_dir])
# subprocess.call(["mv", "*.insertion", normal_dir])
# subprocess.call(["mv", "*.inversion", normal_dir])
# subprocess.call(["mv", "*.translocation", normal_dir])
# subprocess.call(["mv", "*.concordant", normal_dir])
#Run on tumor
subprocess.call(["./pipeline/scripts/runBAMtoGASV.sh", tumor_dir, PATHTOTUMORBAMFILE, "TUMOR"])
#Move files
# subprocess.call(["mv", "*.gasv.in", normal_dir])
# subprocess.call(["mv", "*.info", tumor_dir])
# subprocess.call(["mv", "*.deletion", tumor_dir])
# subprocess.call(["mv", "*.divergent", tumor_dir])
# subprocess.call(["mv", "*.insertion", tumor_dir])
# subprocess.call(["mv", "*.inversion", tumor_dir])
# subprocess.call(["mv", "*.translocation", tumor_dir])
# subprocess.call(["mv", "*.concordant", tumor_dir])
subprocess.call(["touch", os.path.join(self.this_out_dir, "BAMtoGASVfinished.txt")])
def output(self):
# return luigi.LocalTarget("path/to/output/stuffz")
return luigi.LocalTarget(os.path.join(self.this_out_dir, "BAMtoGASVfinished.txt"))
#BICSeq
class BICSeq(luigi.Task):
prefix = luigi.Parameter()
out_dir = luigi.Parameter()
download_dir = luigi.Parameter()
this_out_dir = ""
def requires(self):
self.this_out_dir = os.path.join(self.out_dir, "BICSeq")
subprocess.call(["mkdir", self.this_out_dir])
return BAMtoGASV(prefix = self.prefix, out_dir = self.out_dir, download_dir = self.download_dir)
def run(self):
# Takes concordant files as input
normal_conc = os.path.join(out_dir, "BAMtoGASV_output", "NORMAL_", prefix + ".concordant")
tumor_conc = os.path.join(out_dir, "BAMtoGASV_output", "TUMOR_", prefix + ".concordant")
bicseq_input_loc = this_out_dir #To be created
#Run script
subprocess.call(["./pipeline/scripts/runBICseq.sh", self.this_out_dir, tumor_conc, normal_conc, bicseq_input_loc, self.prefix])
#Remove input file
subprocess.call(["rm", "-f", os.path.join(this_out_dir, "*.input")])
#done
subprocess.call(["touch", os.path.join(self.this_out_dir, "BICSeqDone.txt")])
def output(self):
return luigi.LocalTarget(os.path.join(self.this_out_dir, "BICSeqDone.txt"))
# #Returns the interval files
# class BICSeqToTHetA(luigi.Task):
# prefix = luigi.Parameter()
# out_dir = luigi.Parameter()
# download_dir = luigi.Parameter()
# this_out_dir = ""
# def requires(self):
# theta_dir = os.path.join(self.out_dir, "THetA")
# self.this_out_dir = os.path.join(theta_dir, "THetA_input")
# subprocess.call(["mkdir", self.this_out_dir])
# return BICSeq(prefix = self.prefix, out_dir = self.out_dir, download_dir = self.download_dir)
# def run(self):
# subprocess.call(["touch", os.path.join(self.this_out_dir, "BICSeqToTHetA.txt")])
# def output(self):
# return luigi.LocalTarget(os.path.join(self.this_out_dir, "BICSeqToTHetA.txt"))
"""
The C++ code that counts the 50kb bins.
"""
class intervalCountingPipeline(luigi.Task):
prefix = luigi.Parameter()
out_dir = luigi.Parameter()
download_dir = luigi.Parameter()
this_out_dir = ""
def requires(self):
return BAMtoGASV(prefix = self.prefix, out_dir = self.out_dir, download_dir = self.download_dir)
def run(self):
self.this_out_dir = os.path.join(self.out_dir, "intervalPipeline")
subprocess.call(["mkdir", self.this_out_dir])
parameter_file_path = os.path.abspath(os.path.join(self.this_out_dir, "parameters.txt")))
with open(parameter_file_path, "w") as parameter_file:
#MAKE SURE YOU USE ABS PATHS!
parameter_file.write("IntervalFile: "+ os.path.abspath(os.path.join(self.this_out_dir, "intervals.txt")))
parameter_file.write("Software: PREGO")
parameter_file.write("ConcordantFile: " + NORMAL CONCORDANT FILE)
parameter_file.write("Concordantfile: " + TUMOR CONCORDANT FILE)
if subprocess.call(["./pipeline/scripts/runIntervalPipeline.sh", self.this_out_dir, parameter_file_path]) != 0:
sys.exit()
subprocess.call(["touch", os.path.join(self.this_out_dir, "intervalsDone.txt")])
def output(self):
#????????????????
return luigi.LocalTarget(os.path.join(self.this_out_dir, "intervalsDone.txt"))
# def complete(self):
# return True
#############################################################################################################
#Head of the stream. Download a file from CGHub.
#############################################################################################################
class downloadGenome(luigi.Task):
global samples
prefix = luigi.Parameter()
pipeline_downloads = pipeline_downloads
download_dir = ""
def run(self):
self.download_dir = os.path.join(self.pipeline_downloads, self.prefix)
normal_dir = os.path.join(self.download_dir, "NORMAL")
tumor_dir = os.path.join(self.download_dir, "TUMOR")
mkdir(normal_dir)
mkdir(tumor_dir)
#Download normal
# subprocess.call(["./CGHub/runGeneTorrentNew.bash", samples[prefix][normal_aurid], self.download_dir])
#Download tumor
# subprocess.call(["./CGHub/runGeneTorrentNew.bash", samples[prefix][tumor_aurid], self.download_dir])
subprocess.call(["touch", os.path.join(self.download_dir, self.prefix + "downloadComplete.txt")])
def output(self):
return luigi.LocalTarget(os.path.join(self.download_dir, self.prefix + "downloadComplete.txt"))
#############################################################################################################
#Run Pipeline
#############################################################################################################
class TriggerAll(luigi.Task):
global tasks_to_run
def requires(self):
for task in tasks_to_run:
yield task
def run(self):
subprocess.call(["touch", "pipelineComplete.txt"])
def output(self):
return luigi.LocalTarget("pipelineComplete.txt")
#Create tasks_to_run from the names in the file.
tasks_to_run = []
global prefix_to_id
prefix_to_id = {}
with open("seq_info.txt", "r") as seq_file:
count = 0
for line in URI_file:
count += 1
if count == 8:
break
line = line.strip()
components = line.split("\t")
#0: id, 1:prefix
#Add to the dictionary
try:
prefix_to_id[components[1]] = components[0]
tasks_to_run.append(deleteBAMFiles(components[1]))
except:
continue
#Make a dictionary for reference sequences
#Dictionary of prefix ->
# {
# norm_dir
# tumor_dir
# ref_assem
# norm_aurid
# tumor_aurid
# }
global samples
samples =
# print tasks_to_run
luigi.build([TriggerAll()], workers=1)
|
|
# (c) 2018, NetApp Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from ansible.modules.storage.netapp.netapp_e_iscsi_interface import IscsiInterface
from units.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
__metaclass__ = type
import mock
class IscsiInterfaceTest(ModuleTestCase):
REQUIRED_PARAMS = {
'api_username': 'rw',
'api_password': 'password',
'api_url': 'http://localhost',
'ssid': '1',
'state': 'disabled',
'name': 1,
'controller': 'A',
}
REQ_FUNC = 'ansible.modules.storage.netapp.netapp_e_iscsi_interface.request'
def _set_args(self, args=None):
module_args = self.REQUIRED_PARAMS.copy()
if args is not None:
module_args.update(args)
set_module_args(module_args)
def test_validate_params(self):
"""Ensure we can pass valid parameters to the module"""
# Provide a range of valid values for each
for controller in ['A', 'B']:
for i in range(1, 10):
for mtu in [1500, 2500, 9000]:
self._set_args(dict(
state='disabled',
name=i,
controller=controller,
mtu=mtu,
))
iface = IscsiInterface()
def test_invalid_params(self):
"""Ensure that our input validation catches invalid parameters"""
# Currently a 'C' controller is invalid
self._set_args(dict(
state='disabled',
name=1,
controller="C",
))
with self.assertRaises(AnsibleFailJson) as result:
iface = IscsiInterface()
# Each of these mtu values are invalid
for mtu in [500, 1499, 9001]:
self._set_args({
'state': 'disabled',
'name': 1,
'controller': 'A',
'mtu': mtu
})
with self.assertRaises(AnsibleFailJson) as result:
iface = IscsiInterface()
def test_interfaces(self):
"""Validate that we are processing the interface list properly"""
self._set_args()
interfaces = [
dict(interfaceType='iscsi',
iscsi=dict()),
dict(interfaceType='iscsi',
iscsi=dict()),
dict(interfaceType='fc', )
]
# Ensure we filter out anything without an interfaceType of iscsi
expected = [iface['iscsi'] for iface in interfaces if iface['interfaceType'] == 'iscsi']
# We expect a single call to the API: retrieve the list of interfaces from the objectGraph.
with mock.patch(self.REQ_FUNC, return_value=(200, interfaces)):
iface = IscsiInterface()
interfaces = iface.interfaces
self.assertEquals(interfaces, expected)
def test_interfaces_fail(self):
"""Ensure we fail gracefully on an error to retrieve the interfaces"""
self._set_args()
with self.assertRaises(AnsibleFailJson) as result:
# Simulate a failed call to the API
with mock.patch(self.REQ_FUNC, side_effect=Exception("Failure")):
iface = IscsiInterface()
interfaces = iface.interfaces
def test_fetch_target_interface_bad_channel(self):
"""Ensure we fail correctly when a bad channel is provided"""
self._set_args()
interfaces = list(dict(channel=1, controllerId='1'))
with self.assertRaisesRegexp(AnsibleFailJson, r".*?channels include.*"):
with mock.patch.object(IscsiInterface, 'interfaces', return_value=interfaces):
iface = IscsiInterface()
interfaces = iface.fetch_target_interface()
def test_make_update_body_dhcp(self):
"""Ensure the update body generates correctly for a transition from static to dhcp"""
self._set_args(dict(state='enabled',
config_method='dhcp')
)
iface = dict(id='1',
ipv4Enabled=False,
ipv4Data=dict(ipv4AddressData=dict(ipv4Address="0.0.0.0",
ipv4SubnetMask="0.0.0.0",
ipv4GatewayAddress="0.0.0.0", ),
ipv4AddressConfigMethod='configStatic', ),
interfaceData=dict(ethernetData=dict(maximumFramePayloadSize=1500, ), ),
)
# Test a transition from static to dhcp
inst = IscsiInterface()
update, body = inst.make_update_body(iface)
self.assertTrue(update, msg="An update was expected!")
self.assertEquals(body['settings']['ipv4Enabled'][0], True)
self.assertEquals(body['settings']['ipv4AddressConfigMethod'][0], 'configDhcp')
def test_make_update_body_static(self):
"""Ensure the update body generates correctly for a transition from dhcp to static"""
iface = dict(id='1',
ipv4Enabled=False,
ipv4Data=dict(ipv4AddressConfigMethod='configDhcp',
ipv4AddressData=dict(ipv4Address="0.0.0.0",
ipv4SubnetMask="0.0.0.0",
ipv4GatewayAddress="0.0.0.0", ), ),
interfaceData=dict(ethernetData=dict(maximumFramePayloadSize=1500, ), ), )
self._set_args(dict(state='enabled',
config_method='static',
address='10.10.10.10',
subnet_mask='255.255.255.0',
gateway='1.1.1.1'))
inst = IscsiInterface()
update, body = inst.make_update_body(iface)
self.assertTrue(update, msg="An update was expected!")
self.assertEquals(body['settings']['ipv4Enabled'][0], True)
self.assertEquals(body['settings']['ipv4AddressConfigMethod'][0], 'configStatic')
self.assertEquals(body['settings']['ipv4Address'][0], '10.10.10.10')
self.assertEquals(body['settings']['ipv4SubnetMask'][0], '255.255.255.0')
self.assertEquals(body['settings']['ipv4GatewayAddress'][0], '1.1.1.1')
CONTROLLERS = dict(A='1', B='2')
def test_update_bad_controller(self):
"""Ensure a bad controller fails gracefully"""
self._set_args(dict(controller='B'))
inst = IscsiInterface()
with self.assertRaises(AnsibleFailJson) as result:
with mock.patch.object(inst, 'get_controllers', return_value=dict(A='1')) as get_controllers:
inst()
@mock.patch.object(IscsiInterface, 'get_controllers', return_value=CONTROLLERS)
def test_update(self, get_controllers):
"""Validate the good path"""
self._set_args()
inst = IscsiInterface()
with self.assertRaises(AnsibleExitJson):
with mock.patch(self.REQ_FUNC, return_value=(200, "")) as request:
with mock.patch.object(inst, 'fetch_target_interface', side_effect=[{}, mock.MagicMock()]):
with mock.patch.object(inst, 'make_update_body', return_value=(True, {})):
inst()
request.assert_called_once()
@mock.patch.object(IscsiInterface, 'get_controllers', return_value=CONTROLLERS)
def test_update_not_required(self, get_controllers):
"""Ensure we don't trigger the update if one isn't required or if check mode is enabled"""
self._set_args()
# make_update_body will report that no change is required, so we should see no call to the API.
inst = IscsiInterface()
with self.assertRaises(AnsibleExitJson) as result:
with mock.patch(self.REQ_FUNC, return_value=(200, "")) as request:
with mock.patch.object(inst, 'fetch_target_interface', side_effect=[{}, mock.MagicMock()]):
with mock.patch.object(inst, 'make_update_body', return_value=(False, {})):
inst()
request.assert_not_called()
self.assertFalse(result.exception.args[0]['changed'], msg="No change was expected.")
# Since check_mode is enabled, we will run everything normally, but not make a request to the API
# to perform the actual change.
inst = IscsiInterface()
inst.check_mode = True
with self.assertRaises(AnsibleExitJson) as result:
with mock.patch(self.REQ_FUNC, return_value=(200, "")) as request:
with mock.patch.object(inst, 'fetch_target_interface', side_effect=[{}, mock.MagicMock()]):
with mock.patch.object(inst, 'make_update_body', return_value=(True, {})):
inst()
request.assert_not_called()
self.assertTrue(result.exception.args[0]['changed'], msg="A change was expected.")
@mock.patch.object(IscsiInterface, 'get_controllers', return_value=CONTROLLERS)
def test_update_fail_busy(self, get_controllers):
"""Ensure we fail correctly on receiving a busy response from the API."""
self._set_args()
inst = IscsiInterface()
with self.assertRaisesRegexp(AnsibleFailJson, r".*?busy.*") as result:
with mock.patch(self.REQ_FUNC, return_value=(422, dict(retcode="3"))) as request:
with mock.patch.object(inst, 'fetch_target_interface', side_effect=[{}, mock.MagicMock()]):
with mock.patch.object(inst, 'make_update_body', return_value=(True, {})):
inst()
request.assert_called_once()
@mock.patch.object(IscsiInterface, 'get_controllers', return_value=CONTROLLERS)
@mock.patch.object(IscsiInterface, 'make_update_body', return_value=(True, {}))
def test_update_fail(self, get_controllers, make_body):
"""Ensure we fail correctly on receiving a normal failure from the API."""
self._set_args()
inst = IscsiInterface()
# Test a 422 error with a non-busy status
with self.assertRaisesRegexp(AnsibleFailJson, r".*?Failed to modify.*") as result:
with mock.patch(self.REQ_FUNC, return_value=(422, mock.MagicMock())) as request:
with mock.patch.object(inst, 'fetch_target_interface', side_effect=[{}, mock.MagicMock()]):
inst()
request.assert_called_once()
# Test a 401 (authentication) error
with self.assertRaisesRegexp(AnsibleFailJson, r".*?Failed to modify.*") as result:
with mock.patch(self.REQ_FUNC, return_value=(401, mock.MagicMock())) as request:
with mock.patch.object(inst, 'fetch_target_interface', side_effect=[{}, mock.MagicMock()]):
inst()
request.assert_called_once()
# Test with a connection failure
with self.assertRaisesRegexp(AnsibleFailJson, r".*?Connection failure.*") as result:
with mock.patch(self.REQ_FUNC, side_effect=Exception()) as request:
with mock.patch.object(inst, 'fetch_target_interface', side_effect=[{}, mock.MagicMock()]):
inst()
request.assert_called_once()
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
##############################################################
# Originally From https://github.com/mdipierro/kryten
# modified by fsp
# created by Massimo Di Pierro
# license: http://opensource.org/licenses/BSD-2-Clause
#
# Commands in shell:
# SPACE execute and next line
# q quit
# p previous
# n next without execue
# x quickly execute and move to next line
# Commends in editor:
# SPACE next line
# UP, DOWN for moving highlited line
# q quit
# b previous
# n next
# s save partial
# x quickly execute end exit
# (i intrecative mode - not suported sorry)
##############################################################
QUOTE1 = """
## ## ######## ## ## ######## ######## ## ##
## ## ## ## ## ## ## ## ### ##
## ## ## ## #### ## ## #### ##
##### ######## ## ## ###### ## ## ##
## ## ## ## ## ## ## ## ####
## ## ## ## ## ## ## ## ###
## ## ## ## ## ## ######## ## ##
> That's easy for you to say, Mr David, you're a human
"""
QUOTE2 = """
> Oh, it's not the end for me, sir, it's just the beginning.
> I have served my human masters, now I can look forward to my reward in silicon heaven.
"""
DEMO = """
# the line below shoud print hello world
print 'hello world'
# the line below should print 'hello world'
echo 'hello world'
# the line below should print 'hello world'
$ echo 'hello world'
# the lines below should be hidden but print 'hello world'
% this line should be hidden
## title
### paragraph
#### subparagraph
@@SHELL@@
echo 'these are hidden shell commands'
@@END@@
@@PYTHON@@
print 'this is hidden python code'
@@END@@
@@TEXT@@
this will be markmin code to be used verbatim
@@END@@
@@READ@@
this text must be read aloud
@@END@@
@@UPDATE@@ test.test.py
print 'hello world'
@@END@@
quit
"""
import platform
import difflib
import time
import os
import sys
import logging
import types
import re
import optparse
import glob
import sys
import traceback
import cStringIO
import pickle
import subprocess
import math
import random
import readline
import rlcompleter
import curses
import time
import sys
import traceback
import termios
import shutil
import fcntl
import struct
BORDER = 6
TEMPFILENAME = '/tmp/kryten.txt'
CORRECTIONS = {'web2py':'web-2-pie',
'(':', ',
')':', ',
'@':' at ',
'[':' of ',
'+':' plus ',
'-':' minus ',
'_':' underscore ',
'*':' times ', '/':' over ', '\\':' backslash ', '=':' equal ', '^':' to power ', '&':', and, ',
' sin ':' sign ',
' cos ':' cosign ',
' tan ':' tangent ',
' exp ':' exponential '}
regex_shell = re.compile('^[\w/]+\s*(?![,\=\[\(\:])')
def is_shell_command(command):
PYCOMMANDS = ('for','if','def','class','elif','else','pass','try','except','finally','from','import','print','raise','del','while','with','lambda','and','or','conitnue','break','yield','as','assert','exec','is','global','not')
if command[:1] in ('/','.','~','$'): return True
return regex_shell.match(command) and not command.split(' ')[0] in PYCOMMANDS
def say(text):
for key, value in CORRECTIONS.items(): text = text.replace(key,value)
text = text.replace('"','\\"').replace("'","\\'")
if platform.mac_ver()[0]:
command = 'say "%s"' % text
else: #assume linux
command = 'espeak -s 120 "%s"' % text
os.system(command)
def press_key():
fd = sys.stdin.fileno()
termios.tcflush(fd, termios.TCIFLUSH)
old = termios.tcgetattr(fd)
new = termios.tcgetattr(fd)
new[3] = new[3] & ~termios.ICANON & ~termios.ECHO
new[6][termios.VMIN] = 1
new[6][termios.VTIME] = 0
termios.tcsetattr(fd, termios.TCSANOW, new)
key = None
try:
key = os.read(fd, 1)
finally:
termios.tcsetattr(fd, termios.TCSAFLUSH, old)
return key
class Document:
def __init__(self, screen, filename, options={}):
self.filename = filename
code = open(filename,'r').read()
self.screen = screen
self.lines = code.split('\n')+['']
self.speak = options.speak
self.delay = options.delay
self.nonblock = options.nonblock
self.highlight = 0
self.before = 12
self.history = []
self.diff = []
def save(self,filename=None):
if not filename:
filename = self.filename
open(filename,'w').write('\n'.join(self.lines).strip())
def up(self):
self.moveto(self.highlight-1)
def down(self):
self.moveto(self.highlight+1)
def pause(self,n=1):
time.sleep(self.delay*random.random()*n)
def revert(self):
if len(self.history):
command,r,old = self.history[-1]
del self.history[-1]
self.moveto(r)
if command == 'delete_line':
self.lines.insert(r,old)
elif command == 'insert_line':
del self.lines[r]
else:
self.lines[r] = old
self.render()
def insert_line(self,r,text):
while len(self.lines)<r+1: self.lines.append('')
self.history.append(('insert_line',r,self.lines[r]))
self.moveto(r)
self.lines.insert(r,'')
self.type(r,0,text)
def insert_text(self,r,c,text):
while len(self.lines)<r+1: self.lines.append('')
self.history.append(('insert_text',r,self.lines[r]))
self.moveto(r)
self.type(r,c,text)
def type(self,r,c,text):
for k,x in enumerate(text):
line = self.lines[r]
pre = line[:c+k]
self.lines[r] = pre+' '*(c+k-len(pre))+x+line[c+k:]
self.render()
if text[:k].strip(): self.pause(2)
if text[k]==' ': self.pause(2)
if self.speak and text.lstrip()[:2]=='# ': say(text.lstrip()[2:])
def delete_line(self,r):
self.moveto(r)
self.history.append(('delete_line',r,self.lines[r]))
del self.lines[r]
self.render()
def delete_text(self,r,c,length):
self.moveto(r)
self.history.append(('delete_text',r,self.lines[r]))
for i in range(length):
line = self.lines[r]
self.lines[r] = line[:c]+line[c+1:]
self.render()
self.pause(1)
def moveto(self,r):
r = max(0,min(r,len(self.lines)-1))
while r>self.highlight:
self.highlight+=1
self.render()
self.pause(1)
while r<self.highlight:
self.highlight-=1
self.render()
self.pause(1)
def render(self):
screen = self.screen
screen.clear()
ROWS, COLS = screen.getmaxyx()
COLS = COLS - BORDER - 1
ROWS = ROWS - 1
header = 'File: %s | Row: %s | Step: %s/%s | Speed: %s' % \
(self.filename, self.highlight, len(self.history), len(self.diff), self.delay)
screen.addstr(0,0,header+' '*(BORDER+COLS-len(header)), curses.A_REVERSE)
r = 1
i = min(max(0,self.highlight + r - 1 - self.before),len(self.lines)-1)
while r<ROWS and i<len(self.lines):
k=0
line = self.lines[i]
while r<ROWS:
if k==0:
label = ' '*(BORDER-len(str(i+1)))+str(i+1)
screen.addstr(r,0,label, curses.A_REVERSE)
else:
screen.addstr(r,0,' '*BORDER, curses.A_REVERSE)
if i==self.highlight:
screen.addstr(r,BORDER,line[:COLS-1],curses.A_BOLD)
else:
screen.addstr(r,BORDER,line[:COLS-1])
r=r+1
if len(line)<=COLS: break
line=line[COLS:]
k=1
i=i+1
screen.refresh()
def compute_diff(output,input):
lines1=open(output,'r').read().split('\n')
lines2=open(input,'r').read().split('\n')
diff = difflib.ndiff(lines1,lines2)
commands=[]
k=0
for line in diff:
if line.startswith(' '):
k+=1
elif line.startswith('-'):
commands.append(('delete_line',k,0,line[2:]))
elif line.startswith('+'):
commands.append(('insert_line',k,0,line[2:]))
k+=1
return commands
def editor(options):
output = options.output
input = options.input or output
if not os.path.exists(output):
open(output,'w').write('')
if input and not os.path.exists(input):
open(input,'w').write('')
diff = compute_diff(output,input)
delay = options.delay
nonblock = options.nonblock
if delay>0 and nonblock:
time.sleep(min(delay*5,2))
screen = curses.initscr()
curses.noecho()
curses.cbreak()
screen.keypad(1)
save = True
try:
d=Document(screen, output, options=options)
d.render()
d.step = 0
d.diff = diff
while True:
if nonblock:
if d.step>=len(diff):
char=ord('x')
else:
char = 32
else:
char = screen.getch()
if char==curses.KEY_UP:
d.up()
elif char==curses.KEY_DOWN:
d.down()
elif (char==curses.KEY_RIGHT or char==32) and d.step<len(diff):
command, r, c, text = diff[d.step]
if command=='insert_line':
d.insert_line(r,text)
elif command=='delete_line':
d.delete_line(r)
elif command=='insert_text':
d.insert_text(r,c,text)
elif command=='delete_text':
d.delete_text(r,c,int(text))
d.step+=1
# d.delete(random.randint(0,20),random.randint(0,20),10)
elif char==curses.KEY_LEFT or char==ord('b'):
d.revert()
d.step=max(0,d.step-1)
elif char==ord('+'):
d.delay*=2
elif char==ord('-'):
d.delay/=2
elif char==ord('n'):
d.delay=0
elif char==ord('x'):
break
elif char==ord('q'):
save = False
break
elif char==ord('s'):
if input!=output: d.save(output)
if input!=output and save:
shutil.copyfile(input,output)
finally:
if delay>0 and nonblock:
time.sleep(max(delay*5,2))
screen.keypad(0)
curses.nocbreak()
curses.echo()
curses.endwin()
curses.nl()
def fast_print(input, pad=' | '):
input = str(input).strip()
if not input: return
for line in input.split('\n'):
sys.stdout.write(pad+line+'\n')
time.sleep(0.02)
def set_color(color):
ansi_foreground = curses.tigetstr('setaf')
if(ansi_foreground):
code = getattr(curses, 'COLOR_%s' % color.upper())
sys.stdout.write(curses.tparm(ansi_foreground, code))
def slow_print(prefix,postfix,options):
if not postfix:
sys.stdout.write('\n')
else:
for i in range(len(postfix)+1):
if i>0: sys.stdout.write(curses.tigetstr('cuu1'))
set_color('yellow')
sys.stdout.write(prefix)
if postfix[:2]=='# ': set_color('cyan')
else: set_color('green')
sys.stdout.write(postfix[:i]+'\n')
time.sleep(options.delay*2*random.random())
if not options.nonblock and postfix and i==0:
press_key()
if postfix[i:i+1]==' ':
time.sleep(options.delay*2)
set_color('white')
if options.speak and postfix[:2]=='# ': say(postfix[2:])
def get_long_text(fileobj):
code = ''
while True:
line = fileobj.readline()
if line.startswith('@@END@@'): break
else: code=code+line
return code
def read_command(i,fileobj,options):
command = ''
multiline = False
while True:
prompt = '%i' % i
prompt = '.'*(5-len(prompt))+prompt+'> '
if fileobj:
new_line = fileobj.readline()
if new_line == None:
return '', 'quit'
new_line = new_line.rstrip()
postfix=new_line.replace('@@UPDATE@@','edit')
if not postfix.startswith('@@') and not postfix.startswith('%'):
line_width = len(postfix) + len(prompt)
(console_width, console_height) = getTerminalSize()
h = '# ' if postfix.startswith('# ') else ''
while line_width > console_width:
shift = console_width-len(prompt)-1
if postfix.startswith('`'):
slow_print(prompt,postfix[2:shift],options)
else:
slow_print(prompt,postfix[:shift],options)
postfix = h+postfix[shift:]
line_width -= shift
if postfix.startswith('`'):
slow_print(prompt,postfix[2:],options)
else:
slow_print(prompt,postfix,options)
time.sleep(options.delay)
else:
if not multiline:
new_line = raw_input(prompt)
else:
new_line = raw_input('.'*len(prompt))
spaces = len(new_line)-len(new_line.lstrip())
if not new_line.strip():
break
if new_line.strip().endswith(':') or \
new_line.strip().count('"""') % 2 == 1 or \
new_line.strip().count("'''") % 2 == 1 or \
new_line.strip().endswith(',') or \
new_line.strip().endswith('\\'):
multiline = True
command = command +'\n'+new_line
if not multiline:
break
if fileobj and command.strip() and not options.nonblock:
key = press_key()
else:
key = None
return key, command.lstrip()
_env = {}
OS_COMMANDS = ['ls','rm','echo','mkdir','rmdir','find','open','say','cp','mv',
'ssh','scp','ftp','grep','set','export','hg','diff','patch',
'wget','curl','zip','unzip','tar','python','ps','kill','nohup','sudo','make']
def getTerminalSize():
env = os.environ
def ioctl_GWINSZ(fd):
try:
cr = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ,'1234'))
except:
return
return cr
cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
if not cr:
try:
fd = os.open(os.ctermid(), os.O_RDONLY)
cr = ioctl_GWINSZ(fd)
os.close(fd)
except:
pass
if not cr:
cr = (env.get('LINES', 25), env.get('COLUMNS', 80))
### Use get(key[, default]) instead of a try/catch
#try:
# cr = (env['LINES'], env['COLUMNS'])
#except:
# cr = (25, 80)
return int(cr[1]), int(cr[0])
def actor(command,code,history,options):
try: meta,other= command.split(' ',1)
except: meta, other = command, ''
if meta=='#':
pass
elif meta=='`':
pass
elif meta=='quit' and not other.startswith('='):
return False
elif meta=='cd' and not other.startswith('='):
os.chdir(other)
elif meta=='save' and not other.startswith('='):
pass
elif meta=='load' and not other.startswith('='):
pass
elif meta=='edit' and not other.startswith('='):
raise RuntimeError
elif meta=='commit' and not other.startswith('='):
raise RuntimeError
elif is_shell_command(command):
ocommand = command
if meta.startswith('$'): command=command[1:]
proc = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
if not command.endswith('&'):
output = proc.communicate()[0]
history.append((ocommand,output))
fast_print(output)
elif meta=='@@SHELL@@':
command = code
proc = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
output = proc.communicate()[0]
history.append((command,output))
elif meta=='@@TEXT@@':
pass
elif meta=='@@READ@@':
say(code)
elif meta=='@@UPDATE@@':
options.input = TEMPFILENAME
open(options.input,'w').write(code)
options.output = filename
if sys.argv[0].startswith('/'):
path = sys.argv[0]
else:
path = '%s/%s' % (options.path,sys.argv[0])
os.system('%s -d %s %s %s -i %s -o %s' % (path,options.delay,
'-n' if options.nonblock else '',
'-s' if options.speak else '',
options.input,options.output))
else:
if meta == '@@PYTHON@@':
command = code
STDOUT = sys.stdout
STDERR = sys.stderr
sys.stdout = sys.stderr = cStringIO.StringIO()
exec(command,_env)
output = sys.stdout.getvalue()
sys.stdout, sys.stderr = STDOUT, STDERR
history.append((command,output))
fast_print(output)
return True
def typist(command,code,history,options):
try: meta,other= command.split(' ',1)
except: meta, other = command, ''
if meta=='#':
history.append(('#',other))
elif meta=='quit' and not other.startswith('='):
return False
elif meta=='cd' and not other.startswith('='):
os.chdir(other)
history.append((command,''))
elif meta=='save' and not other.startswith('='):
if not other:
other = raw_input('filename: ')
pickle.dump(history,open(other+'.pickle','wb'))
f = open(other+'.play','wb')
for k in history:
f.write(k[0]+'\n')
if k[0].startswith('@@UPDATE@@ '):
f.write(k[1]+'@@END@@\n\n')
f.write('quit\n')
elif meta=='load' and not other.startswith('='):
history = pickle.dump(open(other,'wb'))
elif meta=='edit' and not other.startswith('='):
filename = command[5:]
if not os.path.exists(filename):
open(filename,'w').write('')
os.system('emacs %s' % filename)
lastfilename = filename
elif meta=='commit' and not other.startswith('='):
filename = command[7:]
code = open(filename,'rb').read()
history.append(('@@UPDATE@@ %s' % filename,code))
elif is_shell_command(command):
ocommand = command
if meta.startswith('$'): command=command[1:]
proc = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
if not command.endswith('&'):
output = proc.communicate()[0]
history.append((ocommand,output))
fast_print(output)
elif meta=='@@UPDATE@@':
raise RuntimeError
else:
if meta == '@@PYTHON@@':
raise RuntimeError
command = code
STDOUT = sys.stdout
STDERR = sys.stderr
sys.stdout = sys.stderr = cStringIO.StringIO()
exec(command,_env)
output = sys.stdout.getvalue()
sys.stdout, sys.stderr = STDOUT, STDERR
history.append((command,output))
fast_print(output)
return True
def actor_markmin(command,code,history,options):
# this needs work
if command.startswith('%'):
pass
elif command.startswith('#'):
options.markmin_file.write(command[1:]+'\n')
elif command=='quit':
return False
elif code:
if command.startswith('@@TEXT@@'):
options.markmin_file.write(code+'\n\n')
if command.startswith('@@READ@@'):
options.markmin_file.write(code+'\n\n')
if command.startswith('@@UPDATE@@'):
options.markmin_file.write('\nFILE: %s\n' % other)
options.markmin_file.write('``\n%s\n``\n\n' % code.strip())
elif is_shell_command(command):
options.markmin_file.write('``\n$ %s\n``:shell\n\n' % command)
else:
options.markmin_file.write('``\n%s\n``:python\n\n' % command)
return True
class Lines:
def __init__(self,filename):
self.lines = open(filename,'rb').readlines()
self.i = 0
self.history = []
def readline(self):
if self.i>=len(self.lines):
return None
self.i += 1
return self.lines[self.i-1]
def play(options,actor=actor):
curses.setupterm()
options.path = os.getcwd()
filename = options.play
delay = options.delay
nonblock = options.nonblock
i=0
counters = {}
history = []
STDOUT = sys.stdout
STDERR = sys.stderr
if not filename: #interactive
readline.set_completer(rlcompleter.Completer(_env).complete)
readline.parse_and_bind('tab:complete')
fileobj=None
else:
fileobj=Lines(filename)
while True:
try:
# key, command = None, None # in case error below
key, command = read_command(i,fileobj,options)
code = ''
if command == None:
break
elif command:
i+=1
if delay != options.delay:
options.delay = delay
if not command.strip() or command.lstrip().startswith('## '):
continue
elif command.startswith('@@'):
code = get_long_text(fileobj)
if not actor(command,code,history,options):
break
except KeyboardInterrupt:
sys.stdout, sys.stderr = STDOUT, STDERR
if raw_input('\n\nkill kryten (y/n)?')[0].lower()=='y':
if not options.input: print QUOTE2
set_color('white')
sys.exit(0)
except EOFError:
sys.stdout.write('\n')
sys.stdout, sys.stderr = STDOUT, STDERR
except:
sys.stdout, sys.stderr = STDOUT, STDERR
set_color('red')
input = traceback.format_exc()
if command: history.append((command,input))
fast_print(input)
set_color('white')
if options.debug: press_key()
def main():
usage = '''
play (record mode)"
play [options] -p filename.play (playback mode)"
play [options] -i input -o output (editor mode)"
'''
version= "0.1"
parser = optparse.OptionParser(usage, None, optparse.Option, version)
parser.add_option("-n", "--nonblock", dest="nonblock",default=False,
action='store_true',
help="do not ask for intput")
parser.add_option("-d", "--delay", dest="delay",default='0.05',
help="typing delay")
parser.add_option("-p", "--play", dest="play",default=None,
help="file to play (play mode)")
parser.add_option("-i", "--input", dest="input",default=None,
help="input file (editor mode)")
parser.add_option("-o", "--output", dest="output",default=None,
help="output file (editor mode)")
parser.add_option("-s", "--speak", dest="speak",default=False, action="store_true",
help="read comments, mac only")
parser.add_option("-D", "--debug", dest="debug",default=False, action="store_true",
help="stops on tracbacks")
parser.add_option("-m", "--markmin", dest="markmin",default=None,
help="saves markmin output")
(options, args) = parser.parse_args()
options.delay=float(options.delay) # important!
if not options.input: print QUOTE1
if options.output:
editor(options)
elif options.markmin:
options.markmin_file = open(options.markmin,'w')
play(options,actor_markmin)
elif options.play:
play(options,actor)
else:
play(options,typist)
if __name__=='__main__':
main()
|
|
# -*- coding: utf-8 -*-
"""
Some utilities and common stuff for tests
"""
import os
from boussole.conf.model import Settings
from boussole.inspector import ScssInspector
from boussole.watcher import SassLibraryEventHandler, SassProjectEventHandler
class DummyBaseEvent(object):
"""
Dummy event base to pass to almost all handler event methods
"""
event_type = "boussole-dummy"
is_directory = False
def __init__(self, src, dst=None):
self.src_path = src
self.dest_path = dst or src
class DummyCreatedEvent(DummyBaseEvent):
"""
Dummy event to pass to handler event 'on_created' method
"""
event_type = "created"
class DummyModifiedEvent(DummyBaseEvent):
"""
Dummy event to pass to handler event 'on_modified' method
"""
event_type = "modified"
class DummyDeletedEvent(DummyBaseEvent):
"""
Dummy event to pass to handler event 'on_deleted' method
"""
event_type = "deleted"
class DummyMoveEvent(DummyBaseEvent):
"""
Dummy event to pass to handler event 'on_moved' method
"""
event_type = "moved"
class DummyBaseHandler(object):
"""
Fake watchdog event handler
Reproduce some behavior from watchdog event handler to ease tests
"""
def __init__(self, *args, **kwargs):
self.patterns = kwargs.pop('patterns')
self.ignore_patterns = kwargs.pop('ignore_patterns')
self.ignore_directories = kwargs.pop('ignore_directories')
self.case_sensitive = kwargs.pop('case_sensitive')
super(DummyBaseHandler, self).__init__(*args, **kwargs)
def on_moved(self, event):
self.on_any_event(event)
super(DummyBaseHandler, self).on_moved(event)
def on_created(self, event):
self.on_any_event(event)
super(DummyBaseHandler, self).on_created(event)
def on_modified(self, event):
self.on_any_event(event)
super(DummyBaseHandler, self).on_modified(event)
def on_deleted(self, event):
self.on_any_event(event)
super(DummyBaseHandler, self).on_deleted(event)
class UnitTestableLibraryEventHandler(DummyBaseHandler, SassLibraryEventHandler):
"""
Testable watch event handler for library sources
"""
pass
class UnitTestableProjectEventHandler(DummyBaseHandler, SassProjectEventHandler):
"""
Testable watch event handler for project sources
"""
pass
def join_basedir(basedir):
"""
Return a shortcut function to join basedir to given path
"""
def proceed_joining(path):
return os.path.join(basedir, path)
return proceed_joining
def start_env(basedir):
"""
Initialize a basedir path, a dummy Settings object, Inspector object and
Watcher options needed for handler testing.
"""
join_basedir_curry = join_basedir(basedir.strpath)
inspector = ScssInspector()
minimal_conf = {
'SOURCES_PATH': basedir.join('sass').strpath,
'TARGET_PATH': basedir.join('css').strpath,
'LIBRARY_PATHS': [basedir.join('lib').strpath],
}
settings = Settings(initial=minimal_conf)
watcher_opts = {
'patterns': ['*.scss', '*.sass'],
'ignore_patterns': ['*.part'],
'ignore_directories': False,
'case_sensitive': True,
}
return join_basedir_curry, inspector, settings, watcher_opts
def build_scss_sample_structure(settings_object, basedir):
"""
Build Scss sample files structure for handler testing
"""
# Write needed dirs
os.makedirs(settings_object.SOURCES_PATH)
os.makedirs(settings_object.TARGET_PATH)
os.makedirs(os.path.join(settings_object.LIBRARY_PATHS[0], "components"))
# Write a minimal main Sass source importing partial
source = "\n".join((
"""/* Main sample */""",
"""@import "toinclude";""",
"""#content{ color: red; }""",
))
with open(basedir.join('sass/main.scss').strpath, 'w') as f:
f.write(source)
# Write a main Sass source importing minimal source
source = "\n".join((
"""/* Main importing sample */""",
"""@import "main";""",
))
with open(basedir.join('sass/main_importing.scss').strpath, 'w') as f:
f.write(source)
# Write a main Sass source importing library component and partial source
source = "\n".join((
"""/* Main importing library */""",
"""@import "toinclude";""",
"""@import "components/buttons";""",
))
with open(basedir.join('sass/main_usinglib.scss').strpath, 'w') as f:
f.write(source)
# Write a partial Sass source to include
source = "\n".join((
"""/* Partial source to include */""",
""".included-partial{ color: gold !important; }""",
))
with open(basedir.join('sass/_toinclude.scss').strpath, 'w') as f:
f.write(source)
# Write a partial Sass source to ignore
source = "\n".join((
"""/* Partial source to ignore because not included */""",
""".toignore-partial{ font-weight: bold; }""",
))
with open(basedir.join('sass/_notincluded.scss').strpath, 'w') as f:
f.write(source)
# Write a main source within library directory
source = "\n".join((
"""/* Main source for library */""",
"""@import "components/buttons";""",
))
with open(basedir.join('lib/libmain.scss').strpath, 'w') as f:
f.write(source)
# Write a partial source within library directory
source = "\n".join((
"""/* Buttons component */""",
""".button{ display: block; border: 1px solid black; padding: 5px; }""",
))
with open(basedir.join('lib/components/_buttons.scss').strpath, 'w') as f:
f.write(source)
def build_sass_sample_structure(settings_object, basedir):
"""
Build Sass (indented syntax) sample files structure for handler testing
"""
# Write needed dirs
os.makedirs(settings_object.SOURCES_PATH)
os.makedirs(settings_object.TARGET_PATH)
os.makedirs(os.path.join(settings_object.LIBRARY_PATHS[0], "components"))
# Write a minimal main Sass source importing partial
source = "\n".join((
"""/* Main sample */""",
"""@import toinclude""",
"""#content""",
""" color: red""",
"",
))
with open(basedir.join('sass/main.sass').strpath, 'w') as f:
f.write(source)
# Write a main Sass source importing minimal source
source = "\n".join((
"""/* Main importing sample */""",
"""@import main""",
))
with open(basedir.join('sass/main_importing.sass').strpath, 'w') as f:
f.write(source)
# Write a main Sass source importing library component and partial source
source = "\n".join((
"""/* Main importing library */""",
"""@import toinclude""",
"""@import components/buttons""",
))
with open(basedir.join('sass/main_usinglib.sass').strpath, 'w') as f:
f.write(source)
# Write a partial Sass source to include
source = "\n".join((
"""/* Partial source to include */""",
""".included-partial""",
""" color: gold !important""",
"",
))
with open(basedir.join('sass/_toinclude.sass').strpath, 'w') as f:
f.write(source)
# Write a partial Sass source to ignore
source = "\n".join((
"""/* Partial source to ignore because not included */""",
""".toignore-partial""",
""" font-weight: bold""",
"",
))
with open(basedir.join('sass/_notincluded.sass').strpath, 'w') as f:
f.write(source)
# Write a main source within library directory
source = "\n".join((
"""/* Main source for library */""",
"""@import components/buttons""",
))
with open(basedir.join('lib/libmain.sass').strpath, 'w') as f:
f.write(source)
# Write a partial source within library directory
source = "\n".join((
"""/* Buttons component */""",
""".button""",
""" display: block""",
""" border: 1px solid black""",
""" padding: 5px""",
"",
))
with open(basedir.join('lib/components/_buttons.sass').strpath, 'w') as f:
f.write(source)
|
|
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from twisted.web import html
import urllib, time
from twisted.python import log
from twisted.internet import defer
from buildbot import interfaces
from buildbot.status.web.base import HtmlResource, BuildLineMixin, \
path_to_build, path_to_slave, path_to_builder, path_to_change, \
path_to_root, ICurrentBox, build_get_class, \
map_branches, path_to_authzfail, ActionResource, \
getRequestCharset
from buildbot.schedulers.forcesched import ForceScheduler
from buildbot.schedulers.forcesched import ValidationError
from buildbot.status.web.build import BuildsResource, StatusResourceBuild
from buildbot import util
import collections
class ForceAction(ActionResource):
@defer.inlineCallbacks
def force(self, req, builderNames):
master = self.getBuildmaster(req)
owner = self.getAuthz(req).getUsernameFull(req)
schedulername = req.args.get("forcescheduler", ["<unknown>"])[0]
if schedulername == "<unknown>":
defer.returnValue((path_to_builder(req, self.builder_status),
"forcescheduler arg not found"))
return
args = {}
# decode all of the args
encoding = getRequestCharset(req)
for name, argl in req.args.iteritems():
if name == "checkbox":
# damn html's ungeneric checkbox implementation...
for cb in argl:
args[cb.decode(encoding)] = True
else:
args[name] = [ arg.decode(encoding) for arg in argl ]
for sch in master.allSchedulers():
if schedulername == sch.name:
try:
yield sch.force(owner, builderNames, **args)
msg = ""
except ValidationError, e:
msg = html.escape(e.message.encode('ascii','ignore'))
break
# send the user back to the builder page
defer.returnValue(msg)
class ForceAllBuildsActionResource(ForceAction):
def __init__(self, status, selectedOrAll):
self.status = status
self.selectedOrAll = selectedOrAll
self.action = "forceAllBuilds"
@defer.inlineCallbacks
def performAction(self, req):
authz = self.getAuthz(req)
res = yield authz.actionAllowed('forceAllBuilds', req)
if not res:
defer.returnValue(path_to_authzfail(req))
return
if self.selectedOrAll == 'all':
builderNames = None
elif self.selectedOrAll == 'selected':
builderNames = [b for b in req.args.get("selected", []) if b]
msg = yield self.force(req, builderNames)
# back to the welcome page
defer.returnValue((path_to_root(req) + "builders", msg))
class StopAllBuildsActionResource(ActionResource):
def __init__(self, status, selectedOrAll):
self.status = status
self.selectedOrAll = selectedOrAll
self.action = "stopAllBuilds"
@defer.inlineCallbacks
def performAction(self, req):
authz = self.getAuthz(req)
res = yield authz.actionAllowed('stopAllBuilds', req)
if not res:
defer.returnValue(path_to_authzfail(req))
return
builders = None
if self.selectedOrAll == 'all':
builders = self.status.getBuilderNames()
elif self.selectedOrAll == 'selected':
builders = [b for b in req.args.get("selected", []) if b]
for bname in builders:
builder_status = self.status.getBuilder(bname)
(state, current_builds) = builder_status.getState()
if state != "building":
continue
for b in current_builds:
build_status = builder_status.getBuild(b.number)
if not build_status:
continue
build = StatusResourceBuild(build_status)
build.stop(req, auth_ok=True)
# go back to the welcome page
defer.returnValue(path_to_root(req))
class CancelAllPendingBuildsActionResource(ActionResource):
def __init__(self, status, selectedOrAll):
self.status = status
self.selectedOrAll = selectedOrAll
self.action = 'cancelAllPendingBuilds'
@defer.inlineCallbacks
def performAction(self, req):
authz = self.getAuthz(req)
res = yield authz.actionAllowed('cancelAllPendingBuilds', req)
if not res:
defer.returnValue(path_to_authzfail(req))
return
builders = None
if self.selectedOrAll == 'all':
builders = self.status.getBuilderNames()
elif self.selectedOrAll == 'selected':
builders = [b for b in req.args.get("selected", []) if b]
c = interfaces.IControl(self.getBuildmaster(req))
for bname in builders:
authz = self.getAuthz(req)
builder_control = c.getBuilder(bname)
brcontrols = yield builder_control.getPendingBuildRequestControls()
for build_req in brcontrols:
log.msg("Cancelling %s" % build_req)
build_req.cancel()
# go back to the welcome page
defer.returnValue(path_to_root(req))
class PingBuilderActionResource(ActionResource):
def __init__(self, builder_status):
self.builder_status = builder_status
self.action = "pingBuilder"
@defer.inlineCallbacks
def performAction(self, req):
log.msg("web ping of builder '%s'" % self.builder_status.getName())
res = yield self.getAuthz(req).actionAllowed('pingBuilder', req,
self.builder_status)
if not res:
log.msg("..but not authorized")
defer.returnValue(path_to_authzfail(req))
return
c = interfaces.IControl(self.getBuildmaster(req))
bc = c.getBuilder(self.builder_status.getName())
bc.ping()
# send the user back to the builder page
defer.returnValue(path_to_builder(req, self.builder_status))
class ForceBuildActionResource(ForceAction):
def __init__(self, builder_status):
self.builder_status = builder_status
self.action = "forceBuild"
@defer.inlineCallbacks
def performAction(self, req):
# check if this is allowed
res = yield self.getAuthz(req).actionAllowed(self.action, req,
self.builder_status)
if not res:
log.msg("..but not authorized")
defer.returnValue(path_to_authzfail(req))
return
builderName = self.builder_status.getName()
msg = yield self.force(req, [builderName])
# send the user back to the builder page
defer.returnValue((path_to_builder(req, self.builder_status), msg))
def buildForceContextForField(req, default_props, sch, field, master, buildername):
pname = "%s.%s"%(sch.name, field.fullName)
default = field.default
if "list" in field.type:
choices = field.getChoices(master, sch, buildername)
if choices:
default = choices[0]
default_props[pname+".choices"] = choices
default = req.args.get(pname, [default])[0]
if "bool" in field.type:
default = "checked" if default else ""
elif isinstance(default, unicode):
# filter out unicode chars, and html stuff
default = html.escape(default.encode('utf-8','ignore'))
default_props[pname] = default
if "nested" in field.type:
for subfield in field.fields:
buildForceContextForField(req, default_props, sch, subfield, master, buildername)
def buildForceContext(cxt, req, master, buildername=None):
force_schedulers = {}
default_props = collections.defaultdict(str)
for sch in master.allSchedulers():
if isinstance(sch, ForceScheduler) and (buildername is None or(buildername in sch.builderNames)):
force_schedulers[sch.name] = sch
for field in sch.all_fields:
buildForceContextForField(req, default_props, sch, field, master, buildername)
cxt['force_schedulers'] = force_schedulers
cxt['default_props'] = default_props
# /builders/$builder
class StatusResourceBuilder(HtmlResource, BuildLineMixin):
addSlash = True
def __init__(self, builder_status, numbuilds=20):
HtmlResource.__init__(self)
self.builder_status = builder_status
self.numbuilds = numbuilds
def getPageTitle(self, request):
return "Buildbot: %s" % self.builder_status.getName()
def builder(self, build, req):
b = {}
b['num'] = build.getNumber()
b['link'] = path_to_build(req, build)
when = build.getETA()
if when is not None:
b['when'] = util.formatInterval(when)
b['when_time'] = time.strftime("%H:%M:%S",
time.localtime(time.time() + when))
step = build.getCurrentStep()
# TODO: is this necessarily the case?
if not step:
b['current_step'] = "[waiting for Lock]"
else:
if step.isWaitingForLocks():
b['current_step'] = "%s [waiting for Lock]" % step.getName()
else:
b['current_step'] = step.getName()
b['stop_url'] = path_to_build(req, build) + '/stop'
return b
@defer.inlineCallbacks
def content(self, req, cxt):
b = self.builder_status
cxt['name'] = b.getName()
cxt['description'] = b.getDescription()
req.setHeader('Cache-Control', 'no-cache')
slaves = b.getSlaves()
connected_slaves = [s for s in slaves if s.isConnected()]
cxt['current'] = [self.builder(x, req) for x in b.getCurrentBuilds()]
cxt['pending'] = []
statuses = yield b.getPendingBuildRequestStatuses()
for pb in statuses:
changes = []
source = yield pb.getSourceStamp()
submitTime = yield pb.getSubmitTime()
bsid = yield pb.getBsid()
properties = yield \
pb.master.db.buildsets.getBuildsetProperties(bsid)
if source.changes:
for c in source.changes:
changes.append({ 'url' : path_to_change(req, c),
'who' : c.who,
'revision' : c.revision,
'repo' : c.repository })
cxt['pending'].append({
'when': time.strftime("%b %d %H:%M:%S",
time.localtime(submitTime)),
'delay': util.formatInterval(util.now() - submitTime),
'id': pb.brid,
'changes' : changes,
'num_changes' : len(changes),
'properties' : properties,
})
numbuilds = cxt['numbuilds'] = int(req.args.get('numbuilds', [self.numbuilds])[0])
recent = cxt['recent'] = []
for build in b.generateFinishedBuilds(num_builds=int(numbuilds)):
recent.append(self.get_line_values(req, build, False))
sl = cxt['slaves'] = []
connected_slaves = 0
for slave in slaves:
s = {}
sl.append(s)
s['link'] = path_to_slave(req, slave)
s['name'] = slave.getName()
c = s['connected'] = slave.isConnected()
s['paused'] = slave.isPaused()
s['admin'] = unicode(slave.getAdmin() or '', 'utf-8')
if c:
connected_slaves += 1
cxt['connected_slaves'] = connected_slaves
cxt['authz'] = self.getAuthz(req)
cxt['builder_url'] = path_to_builder(req, b)
buildForceContext(cxt, req, self.getBuildmaster(req), b.getName())
template = req.site.buildbot_service.templates.get_template("builder.html")
defer.returnValue(template.render(**cxt))
def ping(self, req):
return PingBuilderActionResource(self.builder_status)
def getChild(self, path, req):
if path == "force":
return ForceBuildActionResource(self.builder_status)
if path == "ping":
return self.ping(req)
if path == "cancelbuild":
return CancelChangeResource(self.builder_status)
if path == "stopchange":
return StopChangeResource(self.builder_status)
if path == "builds":
return BuildsResource(self.builder_status)
return HtmlResource.getChild(self, path, req)
class CancelChangeResource(ActionResource):
def __init__(self, builder_status):
ActionResource.__init__(self)
self.builder_status = builder_status
@defer.inlineCallbacks
def performAction(self, req):
try:
request_id = req.args.get("id", [None])[0]
if request_id == "all":
cancel_all = True
else:
cancel_all = False
request_id = int(request_id)
except:
request_id = None
authz = self.getAuthz(req)
if request_id:
c = interfaces.IControl(self.getBuildmaster(req))
builder_control = c.getBuilder(self.builder_status.getName())
brcontrols = yield builder_control.getPendingBuildRequestControls()
for build_req in brcontrols:
if cancel_all or (build_req.brid == request_id):
log.msg("Cancelling %s" % build_req)
res = yield authz.actionAllowed('cancelPendingBuild', req,
build_req)
if res:
build_req.cancel()
else:
defer.returnValue(path_to_authzfail(req))
return
if not cancel_all:
break
defer.returnValue(path_to_builder(req, self.builder_status))
class StopChangeMixin(object):
@defer.inlineCallbacks
def stopChangeForBuilder(self, req, builder_status, auth_ok=False):
try:
request_change = req.args.get("change", [None])[0]
request_change = int(request_change)
except:
request_change = None
authz = self.getAuthz(req)
if request_change:
c = interfaces.IControl(self.getBuildmaster(req))
builder_control = c.getBuilder(builder_status.getName())
brcontrols = yield builder_control.getPendingBuildRequestControls()
build_controls = dict((x.brid, x) for x in brcontrols)
build_req_statuses = yield \
builder_status.getPendingBuildRequestStatuses()
for build_req in build_req_statuses:
ss = yield build_req.getSourceStamp()
if not ss.changes:
continue
for change in ss.changes:
if change.number == request_change:
control = build_controls[build_req.brid]
log.msg("Cancelling %s" % control)
res = yield authz.actionAllowed('stopChange', req, control)
if (auth_ok or res):
control.cancel()
else:
defer.returnValue(False)
return
defer.returnValue(True)
class StopChangeResource(StopChangeMixin, ActionResource):
def __init__(self, builder_status):
ActionResource.__init__(self)
self.builder_status = builder_status
@defer.inlineCallbacks
def performAction(self, req):
"""Cancel all pending builds that include a given numbered change."""
success = yield self.stopChangeForBuilder(req, self.builder_status)
if not success:
defer.returnValue(path_to_authzfail(req))
else:
defer.returnValue(path_to_builder(req, self.builder_status))
class StopChangeAllResource(StopChangeMixin, ActionResource):
def __init__(self, status):
ActionResource.__init__(self)
self.status = status
@defer.inlineCallbacks
def performAction(self, req):
"""Cancel all pending builds that include a given numbered change."""
authz = self.getAuthz(req)
res = yield authz.actionAllowed('stopChange', req)
if not res:
defer.returnValue(path_to_authzfail(req))
return
for bname in self.status.getBuilderNames():
builder_status = self.status.getBuilder(bname)
res = yield self.stopChangeForBuilder(req, builder_status, auth_ok=True)
if not res:
defer.returnValue(path_to_authzfail(req))
return
defer.returnValue(path_to_root(req))
# /builders/_all
class StatusResourceAllBuilders(HtmlResource, BuildLineMixin):
def __init__(self, status):
HtmlResource.__init__(self)
self.status = status
def getChild(self, path, req):
if path == "forceall":
return self.forceall(req)
if path == "stopall":
return self.stopall(req)
if path == "stopchangeall":
return StopChangeAllResource(self.status)
if path == "cancelpendingall":
return CancelAllPendingBuildsActionResource(self.status, 'all')
return HtmlResource.getChild(self, path, req)
def forceall(self, req):
return ForceAllBuildsActionResource(self.status, 'all')
def stopall(self, req):
return StopAllBuildsActionResource(self.status, 'all')
# /builders/_selected
class StatusResourceSelectedBuilders(HtmlResource, BuildLineMixin):
def __init__(self, status):
HtmlResource.__init__(self)
self.status = status
def getChild(self, path, req):
if path == "forceselected":
return self.forceselected(req)
if path == "stopselected":
return self.stopselected(req)
if path == "cancelpendingselected":
return CancelAllPendingBuildsActionResource(self.status, 'selected')
return HtmlResource.getChild(self, path, req)
def forceselected(self, req):
return ForceAllBuildsActionResource(self.status, 'selected')
def stopselected(self, req):
return StopAllBuildsActionResource(self.status, 'selected')
# /builders
class BuildersResource(HtmlResource):
pageTitle = "Builders"
addSlash = True
def __init__(self, numbuilds=20):
HtmlResource.__init__(self)
self.numbuilds = numbuilds
@defer.inlineCallbacks
def content(self, req, cxt):
status = self.getStatus(req)
encoding = getRequestCharset(req)
builders = req.args.get("builder", status.getBuilderNames())
branches = [ b.decode(encoding)
for b in req.args.get("branch", [])
if b ]
# get counts of pending builds for each builder
brstatus_ds = []
brcounts = {}
def keep_count(statuses, builderName):
brcounts[builderName] = len(statuses)
for builderName in builders:
builder_status = status.getBuilder(builderName)
d = builder_status.getPendingBuildRequestStatuses()
d.addCallback(keep_count, builderName)
brstatus_ds.append(d)
yield defer.gatherResults(brstatus_ds)
cxt['branches'] = branches
bs = cxt['builders'] = []
building = 0
online = 0
base_builders_url = path_to_root(req) + "builders/"
for bn in builders:
bld = { 'link': base_builders_url + urllib.quote(bn, safe=''),
'name': bn }
bs.append(bld)
builder = status.getBuilder(bn)
builds = list(builder.generateFinishedBuilds(map_branches(branches),
num_builds=1))
if builds:
b = builds[0]
bld['build_url'] = (bld['link'] + "/builds/%d" % b.getNumber())
label = None
all_got_revisions = b.getAllGotRevisions()
# If len = 1 then try if revision can be used as label.
if len(all_got_revisions) == 1:
label = all_got_revisions[all_got_revisions.keys()[0]]
if not label or len(str(label)) > 20:
label = "#%d" % b.getNumber()
bld['build_label'] = label
bld['build_text'] = " ".join(b.getText())
bld['build_css_class'] = build_get_class(b)
current_box = ICurrentBox(builder).getBox(status, brcounts)
bld['current_box'] = current_box.td()
builder_status = builder.getState()[0]
if builder_status == "building":
building += 1
online += 1
elif builder_status != "offline":
online += 1
cxt['authz'] = self.getAuthz(req)
cxt['num_building'] = building
cxt['num_online'] = online
buildForceContext(cxt, req, self.getBuildmaster(req))
template = req.site.buildbot_service.templates.get_template("builders.html")
defer.returnValue(template.render(**cxt))
def getChild(self, path, req):
s = self.getStatus(req)
if path in s.getBuilderNames():
builder_status = s.getBuilder(path)
return StatusResourceBuilder(builder_status, self.numbuilds)
if path == "_all":
return StatusResourceAllBuilders(self.getStatus(req))
if path == "_selected":
return StatusResourceSelectedBuilders(self.getStatus(req))
return HtmlResource.getChild(self, path, req)
|
|
# Copyright 2011 OpenStack Foundation
# Copyright 2013 Rackspace Hosting
# Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
OpenStack Client interface. Handles the REST calls and responses.
"""
from __future__ import print_function
import logging
from keystoneauth1 import adapter
from oslo_utils import importutils
import requests
import six.moves.urllib.parse as urlparse
from troveclient.apiclient import client
from troveclient import exceptions
from troveclient import service_catalog
try:
import eventlet as sleep_lib
except ImportError:
import time as sleep_lib
try:
import json
except ImportError:
import simplejson as json
# Python 2.5 compat fix
if not hasattr(urlparse, 'parse_qsl'):
import cgi
urlparse.parse_qsl = cgi.parse_qsl
osprofiler_web = importutils.try_import("osprofiler.web")
class TroveClientMixin(object):
def get_database_api_version_from_endpoint(self):
magic_tuple = urlparse.urlsplit(self.management_url)
scheme, netloc, path, query, frag = magic_tuple
v = path.split("/")[1]
valid_versions = ['v1.0']
if v not in valid_versions:
msg = "Invalid client version '%s'. must be one of: %s" % (
(v, ', '.join(valid_versions)))
raise exceptions.UnsupportedVersion(msg)
return v[1:]
class HTTPClient(TroveClientMixin):
USER_AGENT = 'python-troveclient'
def __init__(self, user, password, projectid, auth_url, insecure=False,
timeout=None, tenant_id=None, proxy_tenant_id=None,
proxy_token=None, region_name=None,
endpoint_type='publicURL', service_type=None,
service_name=None, database_service_name=None, retries=None,
http_log_debug=False, cacert=None, bypass_url=None,
auth_system='keystone', auth_plugin=None):
if auth_system and auth_system != 'keystone' and not auth_plugin:
raise exceptions.AuthSystemNotFound(auth_system)
if not auth_url and auth_system and auth_system != 'keystone':
auth_url = auth_plugin.get_auth_url()
if not auth_url:
raise exceptions.EndpointNotFound()
self.user = user
self.password = password
self.projectid = projectid
self.tenant_id = tenant_id
self.auth_url = auth_url.rstrip('/') if auth_url else auth_url
self.version = 'v1'
self.region_name = region_name
self.endpoint_type = endpoint_type
self.service_type = service_type
self.service_name = service_name
self.database_service_name = database_service_name
self.retries = int(retries or 0)
self.http_log_debug = http_log_debug
self.management_url = None
self.auth_token = None
self.proxy_token = proxy_token
self.proxy_tenant_id = proxy_tenant_id
self.timeout = timeout
self.bypass_url = bypass_url
self.auth_system = auth_system
self.auth_plugin = auth_plugin
if insecure:
self.verify_cert = False
else:
if cacert:
self.verify_cert = cacert
else:
self.verify_cert = True
self.auth_system = auth_system
self.auth_plugin = auth_plugin
self._logger = logging.getLogger(__name__)
if self.http_log_debug and not self._logger.handlers:
ch = logging.StreamHandler()
self._logger.setLevel(logging.DEBUG)
self._logger.addHandler(ch)
if hasattr(requests, 'logging'):
requests.logging.getLogger(requests.__name__).addHandler(ch)
def http_log_req(self, args, kwargs):
if not self.http_log_debug:
return
string_parts = ['curl -i']
for element in args:
if element in ('GET', 'POST', 'DELETE', 'PUT'):
string_parts.append(' -X %s' % element)
else:
string_parts.append(' %s' % element)
for element in kwargs['headers']:
header = ' -H "%s: %s"' % (element, kwargs['headers'][element])
string_parts.append(header)
if 'data' in kwargs:
string_parts.append(" -d '%s'" % (kwargs['data']))
self._logger.debug("\nREQ: %s\n" % "".join(string_parts))
def http_log_resp(self, resp):
if not self.http_log_debug:
return
self._logger.debug(
"RESP: [%s] %s\nRESP BODY: %s\n",
resp.status_code,
resp.headers,
resp.text)
def request(self, url, method, **kwargs):
kwargs.setdefault('headers', kwargs.get('headers', {}))
kwargs['headers']['User-Agent'] = self.USER_AGENT
kwargs['headers']['Accept'] = 'application/json'
if osprofiler_web:
kwargs['headers'].update(osprofiler_web.get_trace_id_headers())
if 'body' in kwargs:
kwargs['headers']['Content-Type'] = 'application/json'
kwargs['data'] = json.dumps(kwargs['body'])
del kwargs['body']
if self.timeout:
kwargs.setdefault('timeout', self.timeout)
self.http_log_req((url, method,), kwargs)
resp = requests.request(
method,
url,
verify=self.verify_cert,
**kwargs)
self.http_log_resp(resp)
if resp.text:
try:
body = json.loads(resp.text)
except ValueError:
pass
body = None
else:
body = None
if resp.status_code >= 400:
raise exceptions.from_response(resp, body, url)
return resp, body
def _cs_request(self, url, method, **kwargs):
auth_attempts = 0
attempts = 0
backoff = 1
while True:
attempts += 1
if not self.management_url or not self.auth_token:
self.authenticate()
kwargs.setdefault('headers', {})['X-Auth-Token'] = self.auth_token
if self.projectid:
kwargs['headers']['X-Auth-Project-Id'] = self.projectid
try:
resp, body = self.request(self.management_url + url, method,
**kwargs)
return resp, body
except exceptions.BadRequest:
if attempts > self.retries:
raise
except exceptions.Unauthorized:
if auth_attempts > 0:
raise
self._logger.debug("Unauthorized, reauthenticating.")
self.management_url = self.auth_token = None
# First reauth. Discount this attempt.
attempts -= 1
auth_attempts += 1
continue
except exceptions.ClientException as e:
if attempts > self.retries:
raise
if 500 <= e.code <= 599:
pass
else:
raise
except requests.exceptions.ConnectionError as e:
# Catch a connection refused from requests.request
self._logger.debug("Connection refused: %s" % e)
msg = 'Unable to establish connection: %s' % e
raise exceptions.ConnectionRefused(msg)
self._logger.debug(
"Failed attempt(%s of %s), retrying in %s seconds" %
(attempts, self.retries, backoff))
sleep_lib.sleep(backoff)
backoff *= 2
def get(self, url, **kwargs):
return self._cs_request(url, 'GET', **kwargs)
def patch(self, url, **kwargs):
return self._cs_request(url, 'PATCH', **kwargs)
def post(self, url, **kwargs):
return self._cs_request(url, 'POST', **kwargs)
def put(self, url, **kwargs):
return self._cs_request(url, 'PUT', **kwargs)
def delete(self, url, **kwargs):
return self._cs_request(url, 'DELETE', **kwargs)
def _extract_service_catalog(self, url, resp, body, extract_token=True):
"""See what the auth service told us and process the response.
We may get redirected to another site, fail or actually get
back a service catalog with a token and our endpoints.
"""
if resp.status_code == 200: # content must always present
try:
self.auth_url = url
self.service_catalog = \
service_catalog.ServiceCatalog(body)
if extract_token:
self.auth_token = self.service_catalog.get_token()
management_url = self.service_catalog.url_for(
attr='region',
filter_value=self.region_name,
endpoint_type=self.endpoint_type,
service_type=self.service_type,
service_name=self.service_name,
database_service_name=self.database_service_name)
self.management_url = management_url.rstrip('/')
return None
except exceptions.AmbiguousEndpoints:
print("Found more than one valid endpoint. Use a more "
"restrictive filter")
raise
except KeyError:
raise exceptions.AuthorizationFailure()
except exceptions.EndpointNotFound:
print("Could not find any suitable endpoint. Correct region?")
raise
elif resp.status_code == 305:
return resp['location']
else:
raise exceptions.from_response(resp, body, url)
def _fetch_endpoints_from_auth(self, url):
"""We have a token, but don't know the final endpoint for
the region. We have to go back to the auth service and
ask again. This request requires an admin-level token
to work. The proxy token supplied could be from a low-level enduser.
We can't get this from the keystone service endpoint, we have to use
the admin endpoint.
This will overwrite our admin token with the user token.
"""
# GET ...:5001/v2.0/tokens/#####/endpoints
url = '/'.join([url, 'tokens', '%s?belongsTo=%s'
% (self.proxy_token, self.proxy_tenant_id)])
self._logger.debug("Using Endpoint URL: %s" % url)
resp, body = self.request(url, "GET",
headers={'X-Auth-Token': self.auth_token})
return self._extract_service_catalog(url, resp, body,
extract_token=False)
def authenticate(self):
magic_tuple = urlparse.urlsplit(self.auth_url)
scheme, netloc, path, query, frag = magic_tuple
port = magic_tuple.port
if port is None:
port = 80
path_parts = path.split('/')
for part in path_parts:
if len(part) > 0 and part[0] == 'v':
self.version = part
break
# TODO(sandy): Assume admin endpoint is 35357 for now.
# Ideally this is going to have to be provided by the service catalog.
new_netloc = netloc.replace(':%d' % port, ':%d' % (35357,))
admin_url = urlparse.urlunsplit((scheme, new_netloc,
path, query, frag))
auth_url = self.auth_url
if self.version == "v2.0":
while auth_url:
if not self.auth_system or self.auth_system == 'keystone':
auth_url = self._v2_auth(auth_url)
else:
auth_url = self._plugin_auth(auth_url)
# Are we acting on behalf of another user via an
# existing token? If so, our actual endpoints may
# be different than that of the admin token.
if self.proxy_token:
self._fetch_endpoints_from_auth(admin_url)
# Since keystone no longer returns the user token
# with the endpoints any more, we need to replace
# our service account token with the user token.
self.auth_token = self.proxy_token
else:
try:
while auth_url:
auth_url = self._v1_auth(auth_url)
# In some configurations trove makes redirection to
# v2.0 keystone endpoint. Also, new location does not contain
# real endpoint, only hostname and port.
except exceptions.AuthorizationFailure:
if auth_url.find('v2.0') < 0:
auth_url = auth_url + '/v2.0'
self._v2_auth(auth_url)
# Allows for setting an endpoint not defined in the catalog
if self.bypass_url is not None and self.bypass_url != '':
self.management_url = self.bypass_url
def _plugin_auth(self, auth_url):
return self.auth_plugin.authenticate(self, auth_url)
def _v1_auth(self, url):
if self.proxy_token:
raise exceptions.NoTokenLookupException()
headers = {'X-Auth-User': self.user,
'X-Auth-Key': self.password}
if self.projectid:
headers['X-Auth-Project-Id'] = self.projectid
resp, body = self.request(url, 'GET', headers=headers)
if resp.status_code in (200, 204): # in some cases we get No Content
try:
mgmt_header = 'x-server-management-url'
self.management_url = resp.headers[mgmt_header].rstrip('/')
self.auth_token = resp.headers['x-auth-token']
self.auth_url = url
except (KeyError, TypeError):
raise exceptions.AuthorizationFailure()
elif resp.status_code == 305:
return resp.headers['location']
else:
raise exceptions.from_response(resp, body, url)
def _v2_auth(self, url):
"""Authenticate against a v2.0 auth service."""
body = {"auth": {
"passwordCredentials": {"username": self.user,
"password": self.password}}}
if self.projectid:
body['auth']['tenantName'] = self.projectid
elif self.tenant_id:
body['auth']['tenantId'] = self.tenant_id
self._authenticate(url, body)
def _authenticate(self, url, body):
"""Authenticate and extract the service catalog."""
token_url = url + "/tokens"
# Make sure we follow redirects when trying to reach Keystone
resp, body = self.request(
token_url,
"POST",
body=body,
allow_redirects=True)
return self._extract_service_catalog(url, resp, body)
class SessionClient(adapter.LegacyJsonAdapter, TroveClientMixin):
def __init__(self, session, auth, **kwargs):
self.database_service_name = kwargs.pop('database_service_name', None)
super(SessionClient, self).__init__(session=session,
auth=auth,
**kwargs)
# FIXME(jamielennox): this is going to cause an authentication request
# on client init. This is different to how the other clients work.
endpoint = self.get_endpoint()
if not endpoint:
raise exceptions.EndpointNotFound()
self.management_url = endpoint.rstrip('/')
def request(self, url, method, **kwargs):
raise_exc = kwargs.pop('raise_exc', True)
resp, body = super(SessionClient, self).request(url,
method,
raise_exc=False,
**kwargs)
if raise_exc and resp.status_code >= 400:
raise exceptions.from_response(resp, body, url)
return resp, body
def _construct_http_client(username=None, password=None, project_id=None,
auth_url=None, insecure=False, timeout=None,
proxy_tenant_id=None, proxy_token=None,
region_name=None, endpoint_type='publicURL',
service_type='database',
service_name=None, database_service_name=None,
retries=None,
http_log_debug=False,
auth_system='keystone', auth_plugin=None,
cacert=None, bypass_url=None, tenant_id=None,
session=None,
**kwargs):
if session:
try:
kwargs.setdefault('interface', endpoint_type)
except KeyError:
pass
return SessionClient(session=session,
service_type=service_type,
service_name=service_name,
region_name=region_name,
database_service_name=database_service_name,
connect_retries=retries,
**kwargs)
else:
return HTTPClient(username,
password,
projectid=project_id,
auth_url=auth_url,
insecure=insecure,
timeout=timeout,
tenant_id=tenant_id,
proxy_token=proxy_token,
proxy_tenant_id=proxy_tenant_id,
region_name=region_name,
endpoint_type=endpoint_type,
service_type=service_type,
service_name=service_name,
database_service_name=database_service_name,
retries=retries,
http_log_debug=http_log_debug,
cacert=cacert,
bypass_url=bypass_url,
auth_system=auth_system,
auth_plugin=auth_plugin,
)
def get_version_map():
return {
'1.0': 'troveclient.v1.client.Client',
}
def Client(version, *args, **kwargs):
version_map = get_version_map()
client_class = client.BaseClient.get_class('database',
version, version_map)
return client_class(*args, **kwargs)
|
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for debugger functionalities in tf.compat.v1.Session with grpc:// URLs.
This test file focuses on the grpc:// debugging of local (non-distributed)
tf.Sessions.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import shutil
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.client import session
from tensorflow.python.debug.lib import debug_data
from tensorflow.python.debug.lib import debug_utils
from tensorflow.python.debug.lib import grpc_debug_test_server
from tensorflow.python.debug.lib import session_debug_testlib
from tensorflow.python.debug.wrappers import framework
from tensorflow.python.debug.wrappers import grpc_wrapper
from tensorflow.python.debug.wrappers import hooks
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import googletest
from tensorflow.python.training import monitored_session
class GrpcDebugServerTest(test_util.TensorFlowTestCase):
def testRepeatedRunServerRaisesException(self):
(_, _, _, server_thread,
server) = grpc_debug_test_server.start_server_on_separate_thread(
poll_server=True)
# The server is started asynchronously. It needs to be polled till its state
# has become started.
with self.assertRaisesRegexp(
ValueError, "Server has already started running"):
server.run_server()
server.stop_server().wait()
server_thread.join()
def testRepeatedStopServerRaisesException(self):
(_, _, _, server_thread,
server) = grpc_debug_test_server.start_server_on_separate_thread(
poll_server=True)
server.stop_server().wait()
server_thread.join()
with self.assertRaisesRegexp(ValueError, "Server has already stopped"):
server.stop_server().wait()
def testRunServerAfterStopRaisesException(self):
(_, _, _, server_thread,
server) = grpc_debug_test_server.start_server_on_separate_thread(
poll_server=True)
server.stop_server().wait()
server_thread.join()
with self.assertRaisesRegexp(ValueError, "Server has already stopped"):
server.run_server()
def testStartServerWithoutBlocking(self):
(_, _, _, server_thread,
server) = grpc_debug_test_server.start_server_on_separate_thread(
poll_server=True, blocking=False)
# The thread that starts the server shouldn't block, so we should be able to
# join it before stopping the server.
server_thread.join()
server.stop_server().wait()
@test_util.run_v1_only("b/120545219")
class SessionDebugGrpcTest(session_debug_testlib.SessionDebugTestBase):
@classmethod
def setUpClass(cls):
session_debug_testlib.SessionDebugTestBase.setUpClass()
(cls._server_port, cls._debug_server_url, cls._server_dump_dir,
cls._server_thread,
cls._server) = grpc_debug_test_server.start_server_on_separate_thread()
@classmethod
def tearDownClass(cls):
# Stop the test server and join the thread.
cls._server.stop_server().wait()
cls._server_thread.join()
session_debug_testlib.SessionDebugTestBase.tearDownClass()
def setUp(self):
# Override the dump root as the test server's dump directory.
self._dump_root = self._server_dump_dir
def tearDown(self):
if os.path.isdir(self._server_dump_dir):
shutil.rmtree(self._server_dump_dir)
session_debug_testlib.SessionDebugTestBase.tearDown(self)
def _debug_urls(self, run_number=None):
return ["grpc://localhost:%d" % self._server_port]
def _debug_dump_dir(self, run_number=None):
if run_number is None:
return self._dump_root
else:
return os.path.join(self._dump_root, "run_%d" % run_number)
def testConstructGrpcDebugWrapperSessionWithInvalidTypeRaisesException(self):
sess = session.Session(
config=session_debug_testlib.no_rewrite_session_config())
with self.assertRaisesRegexp(
TypeError, "Expected type str or list in grpc_debug_server_addresses"):
grpc_wrapper.GrpcDebugWrapperSession(sess, 1337)
def testConstructGrpcDebugWrapperSessionWithInvalidTypeRaisesException2(self):
sess = session.Session(
config=session_debug_testlib.no_rewrite_session_config())
with self.assertRaisesRegexp(
TypeError, "Expected type str in list grpc_debug_server_addresses"):
grpc_wrapper.GrpcDebugWrapperSession(sess, ["localhost:1337", 1338])
def testUseInvalidWatchFnTypeWithGrpcDebugWrapperSessionRaisesException(self):
sess = session.Session(
config=session_debug_testlib.no_rewrite_session_config())
with self.assertRaises(TypeError):
grpc_wrapper.GrpcDebugWrapperSession(
sess, "localhost:%d" % self._server_port, watch_fn="foo")
def testGrpcDebugWrapperSessionWithoutWatchFnWorks(self):
u = variables.VariableV1(2.1, name="u")
v = variables.VariableV1(20.0, name="v")
w = math_ops.multiply(u, v, name="w")
sess = session.Session(
config=session_debug_testlib.no_rewrite_session_config())
sess.run(u.initializer)
sess.run(v.initializer)
sess = grpc_wrapper.GrpcDebugWrapperSession(
sess, "localhost:%d" % self._server_port)
w_result = sess.run(w)
self.assertAllClose(42.0, w_result)
dump = debug_data.DebugDumpDir(self._dump_root)
self.assertLessEqual(5, dump.size)
self.assertAllClose([2.1], dump.get_tensors("u", 0, "DebugIdentity"))
self.assertAllClose([2.1], dump.get_tensors("u/read", 0, "DebugIdentity"))
self.assertAllClose([20.0], dump.get_tensors("v", 0, "DebugIdentity"))
self.assertAllClose([20.0], dump.get_tensors("v/read", 0, "DebugIdentity"))
self.assertAllClose([42.0], dump.get_tensors("w", 0, "DebugIdentity"))
def testGrpcDebugWrapperSessionWithWatchFnWorks(self):
def watch_fn(feeds, fetch_keys):
del feeds, fetch_keys
return ["DebugIdentity", "DebugNumericSummary"], r".*/read", None
u = variables.VariableV1(2.1, name="u")
v = variables.VariableV1(20.0, name="v")
w = math_ops.multiply(u, v, name="w")
sess = session.Session(
config=session_debug_testlib.no_rewrite_session_config())
sess.run(u.initializer)
sess.run(v.initializer)
sess = grpc_wrapper.GrpcDebugWrapperSession(
sess, "localhost:%d" % self._server_port, watch_fn=watch_fn)
w_result = sess.run(w)
self.assertAllClose(42.0, w_result)
dump = debug_data.DebugDumpDir(self._dump_root)
self.assertEqual(4, dump.size)
self.assertAllClose([2.1], dump.get_tensors("u/read", 0, "DebugIdentity"))
self.assertEqual(
14, len(dump.get_tensors("u/read", 0, "DebugNumericSummary")[0]))
self.assertAllClose([20.0], dump.get_tensors("v/read", 0, "DebugIdentity"))
self.assertEqual(
14, len(dump.get_tensors("v/read", 0, "DebugNumericSummary")[0]))
def testGrpcDebugHookWithStatelessWatchFnWorks(self):
# Perform some set up. Specifically, construct a simple TensorFlow graph and
# create a watch function for certain ops.
def watch_fn(feeds, fetch_keys):
del feeds, fetch_keys
return framework.WatchOptions(
debug_ops=["DebugIdentity", "DebugNumericSummary"],
node_name_regex_whitelist=r".*/read",
op_type_regex_whitelist=None,
tolerate_debug_op_creation_failures=True)
u = variables.VariableV1(2.1, name="u")
v = variables.VariableV1(20.0, name="v")
w = math_ops.multiply(u, v, name="w")
sess = session.Session(
config=session_debug_testlib.no_rewrite_session_config())
sess.run(u.initializer)
sess.run(v.initializer)
# Create a hook. One could use this hook with say a tflearn Estimator.
# However, we use a HookedSession in this test to avoid depending on the
# internal implementation of Estimators.
grpc_debug_hook = hooks.GrpcDebugHook(
["localhost:%d" % self._server_port], watch_fn=watch_fn)
sess = monitored_session._HookedSession(sess, [grpc_debug_hook])
# Run the hooked session. This should stream tensor data to the GRPC
# endpoints.
w_result = sess.run(w)
# Verify that the hook monitored the correct tensors.
self.assertAllClose(42.0, w_result)
dump = debug_data.DebugDumpDir(self._dump_root)
self.assertEqual(4, dump.size)
self.assertAllClose([2.1], dump.get_tensors("u/read", 0, "DebugIdentity"))
self.assertEqual(
14, len(dump.get_tensors("u/read", 0, "DebugNumericSummary")[0]))
self.assertAllClose([20.0], dump.get_tensors("v/read", 0, "DebugIdentity"))
self.assertEqual(
14, len(dump.get_tensors("v/read", 0, "DebugNumericSummary")[0]))
def testTensorBoardDebugHookWorks(self):
u = variables.VariableV1(2.1, name="u")
v = variables.VariableV1(20.0, name="v")
w = math_ops.multiply(u, v, name="w")
sess = session.Session(
config=session_debug_testlib.no_rewrite_session_config())
sess.run(u.initializer)
sess.run(v.initializer)
grpc_debug_hook = hooks.TensorBoardDebugHook(
["localhost:%d" % self._server_port])
sess = monitored_session._HookedSession(sess, [grpc_debug_hook])
# Activate watch point on a tensor before calling sess.run().
self._server.request_watch("u/read", 0, "DebugIdentity")
self.assertAllClose(42.0, sess.run(w))
# self.assertAllClose(42.0, sess.run(w))
dump = debug_data.DebugDumpDir(self._dump_root)
self.assertAllClose([2.1], dump.get_tensors("u/read", 0, "DebugIdentity"))
# Check that the server has received the stack trace.
self.assertTrue(self._server.query_op_traceback("u"))
self.assertTrue(self._server.query_op_traceback("u/read"))
self.assertTrue(self._server.query_op_traceback("v"))
self.assertTrue(self._server.query_op_traceback("v/read"))
self.assertTrue(self._server.query_op_traceback("w"))
# Check that the server has received the python file content.
# Query an arbitrary line to make sure that is the case.
with open(__file__, "rt") as this_source_file:
first_line = this_source_file.readline().strip()
self.assertEqual(
first_line, self._server.query_source_file_line(__file__, 1))
self._server.clear_data()
# Call sess.run() again, and verify that this time the traceback and source
# code is not sent, because the graph version is not newer.
self.assertAllClose(42.0, sess.run(w))
with self.assertRaises(ValueError):
self._server.query_op_traceback("delta_1")
with self.assertRaises(ValueError):
self._server.query_source_file_line(__file__, 1)
def testTensorBoardDebugHookDisablingTracebackSourceCodeSendingWorks(self):
u = variables.VariableV1(2.1, name="u")
v = variables.VariableV1(20.0, name="v")
w = math_ops.multiply(u, v, name="w")
sess = session.Session(
config=session_debug_testlib.no_rewrite_session_config())
sess.run(variables.global_variables_initializer())
grpc_debug_hook = hooks.TensorBoardDebugHook(
["localhost:%d" % self._server_port],
send_traceback_and_source_code=False)
sess = monitored_session._HookedSession(sess, [grpc_debug_hook])
# Activate watch point on a tensor before calling sess.run().
self._server.request_watch("u/read", 0, "DebugIdentity")
self.assertAllClose(42.0, sess.run(w))
# Check that the server has _not_ received any tracebacks, as a result of
# the disabling above.
with self.assertRaisesRegexp(
ValueError, r"Op .*u/read.* does not exist"):
self.assertTrue(self._server.query_op_traceback("u/read"))
with self.assertRaisesRegexp(
ValueError, r".* has not received any source file"):
self._server.query_source_file_line(__file__, 1)
def testConstructGrpcDebugHookWithOrWithouGrpcInUrlWorks(self):
hooks.GrpcDebugHook(["grpc://foo:42424"])
hooks.GrpcDebugHook(["foo:42424"])
class SessionDebugConcurrentTest(
session_debug_testlib.DebugConcurrentRunCallsTest):
@classmethod
def setUpClass(cls):
session_debug_testlib.SessionDebugTestBase.setUpClass()
(cls._server_port, cls._debug_server_url, cls._server_dump_dir,
cls._server_thread,
cls._server) = grpc_debug_test_server.start_server_on_separate_thread()
@classmethod
def tearDownClass(cls):
# Stop the test server and join the thread.
cls._server.stop_server().wait()
cls._server_thread.join()
session_debug_testlib.SessionDebugTestBase.tearDownClass()
def setUp(self):
self._num_concurrent_runs = 3
self._dump_roots = []
for i in range(self._num_concurrent_runs):
self._dump_roots.append(
os.path.join(self._server_dump_dir, "thread%d" % i))
def tearDown(self):
ops.reset_default_graph()
if os.path.isdir(self._server_dump_dir):
shutil.rmtree(self._server_dump_dir)
def _get_concurrent_debug_urls(self):
urls = []
for i in range(self._num_concurrent_runs):
urls.append(self._debug_server_url + "/thread%d" % i)
return urls
@test_util.run_v1_only("b/120545219")
class SessionDebugGrpcGatingTest(test_util.TensorFlowTestCase):
"""Test server gating of debug ops."""
@classmethod
def setUpClass(cls):
(cls._server_port_1, cls._debug_server_url_1, _, cls._server_thread_1,
cls._server_1) = grpc_debug_test_server.start_server_on_separate_thread(
dump_to_filesystem=False)
(cls._server_port_2, cls._debug_server_url_2, _, cls._server_thread_2,
cls._server_2) = grpc_debug_test_server.start_server_on_separate_thread(
dump_to_filesystem=False)
cls._servers_and_threads = [(cls._server_1, cls._server_thread_1),
(cls._server_2, cls._server_thread_2)]
@classmethod
def tearDownClass(cls):
for server, thread in cls._servers_and_threads:
server.stop_server().wait()
thread.join()
def tearDown(self):
ops.reset_default_graph()
self._server_1.clear_data()
self._server_2.clear_data()
def testToggleEnableTwoDebugWatchesNoCrosstalkBetweenDebugNodes(self):
with session.Session(
config=session_debug_testlib.no_rewrite_session_config()) as sess:
v_1 = variables.VariableV1(50.0, name="v_1")
v_2 = variables.VariableV1(-50.0, name="v_1")
delta_1 = constant_op.constant(5.0, name="delta_1")
delta_2 = constant_op.constant(-5.0, name="delta_2")
inc_v_1 = state_ops.assign_add(v_1, delta_1, name="inc_v_1")
inc_v_2 = state_ops.assign_add(v_2, delta_2, name="inc_v_2")
sess.run([v_1.initializer, v_2.initializer])
run_metadata = config_pb2.RunMetadata()
run_options = config_pb2.RunOptions(output_partition_graphs=True)
debug_utils.watch_graph(
run_options,
sess.graph,
debug_ops=["DebugIdentity(gated_grpc=true)",
"DebugNumericSummary(gated_grpc=true)"],
debug_urls=[self._debug_server_url_1])
for i in xrange(4):
self._server_1.clear_data()
if i % 2 == 0:
self._server_1.request_watch("delta_1", 0, "DebugIdentity")
self._server_1.request_watch("delta_2", 0, "DebugIdentity")
self._server_1.request_unwatch("delta_1", 0, "DebugNumericSummary")
self._server_1.request_unwatch("delta_2", 0, "DebugNumericSummary")
else:
self._server_1.request_unwatch("delta_1", 0, "DebugIdentity")
self._server_1.request_unwatch("delta_2", 0, "DebugIdentity")
self._server_1.request_watch("delta_1", 0, "DebugNumericSummary")
self._server_1.request_watch("delta_2", 0, "DebugNumericSummary")
sess.run([inc_v_1, inc_v_2],
options=run_options, run_metadata=run_metadata)
# Watched debug tensors are:
# Run 0: delta_[1,2]:0:DebugIdentity
# Run 1: delta_[1,2]:0:DebugNumericSummary
# Run 2: delta_[1,2]:0:DebugIdentity
# Run 3: delta_[1,2]:0:DebugNumericSummary
self.assertEqual(2, len(self._server_1.debug_tensor_values))
if i % 2 == 0:
self.assertAllClose(
[5.0],
self._server_1.debug_tensor_values["delta_1:0:DebugIdentity"])
self.assertAllClose(
[-5.0],
self._server_1.debug_tensor_values["delta_2:0:DebugIdentity"])
else:
self.assertAllClose(
[[1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 5.0, 5.0, 5.0,
0.0, 1.0, 0.0]],
self._server_1.debug_tensor_values[
"delta_1:0:DebugNumericSummary"])
self.assertAllClose(
[[1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, -5.0, -5.0, -5.0,
0.0, 1.0, 0.0]],
self._server_1.debug_tensor_values[
"delta_2:0:DebugNumericSummary"])
def testToggleWatchesOnCoreMetadata(self):
(_, debug_server_url, _, server_thread,
server) = grpc_debug_test_server.start_server_on_separate_thread(
dump_to_filesystem=False,
toggle_watch_on_core_metadata=[("toggled_1", 0, "DebugIdentity"),
("toggled_2", 0, "DebugIdentity")])
self._servers_and_threads.append((server, server_thread))
with session.Session(
config=session_debug_testlib.no_rewrite_session_config()) as sess:
v_1 = variables.VariableV1(50.0, name="v_1")
v_2 = variables.VariableV1(-50.0, name="v_1")
# These two nodes have names that match those in the
# toggle_watch_on_core_metadata argument used when calling
# start_server_on_separate_thread().
toggled_1 = constant_op.constant(5.0, name="toggled_1")
toggled_2 = constant_op.constant(-5.0, name="toggled_2")
inc_v_1 = state_ops.assign_add(v_1, toggled_1, name="inc_v_1")
inc_v_2 = state_ops.assign_add(v_2, toggled_2, name="inc_v_2")
sess.run([v_1.initializer, v_2.initializer])
run_metadata = config_pb2.RunMetadata()
run_options = config_pb2.RunOptions(output_partition_graphs=True)
debug_utils.watch_graph(
run_options,
sess.graph,
debug_ops=["DebugIdentity(gated_grpc=true)"],
debug_urls=[debug_server_url])
for i in xrange(4):
server.clear_data()
sess.run([inc_v_1, inc_v_2],
options=run_options, run_metadata=run_metadata)
if i % 2 == 0:
self.assertEqual(2, len(server.debug_tensor_values))
self.assertAllClose(
[5.0],
server.debug_tensor_values["toggled_1:0:DebugIdentity"])
self.assertAllClose(
[-5.0],
server.debug_tensor_values["toggled_2:0:DebugIdentity"])
else:
self.assertEqual(0, len(server.debug_tensor_values))
def testToggleEnableTwoDebugWatchesNoCrosstalkBetweenServers(self):
with session.Session(
config=session_debug_testlib.no_rewrite_session_config()) as sess:
v = variables.VariableV1(50.0, name="v")
delta = constant_op.constant(5.0, name="delta")
inc_v = state_ops.assign_add(v, delta, name="inc_v")
sess.run(v.initializer)
run_metadata = config_pb2.RunMetadata()
run_options = config_pb2.RunOptions(output_partition_graphs=True)
debug_utils.watch_graph(
run_options,
sess.graph,
debug_ops=["DebugIdentity(gated_grpc=true)"],
debug_urls=[self._debug_server_url_1, self._debug_server_url_2])
for i in xrange(4):
self._server_1.clear_data()
self._server_2.clear_data()
if i % 2 == 0:
self._server_1.request_watch("delta", 0, "DebugIdentity")
self._server_2.request_watch("v", 0, "DebugIdentity")
else:
self._server_1.request_unwatch("delta", 0, "DebugIdentity")
self._server_2.request_unwatch("v", 0, "DebugIdentity")
sess.run(inc_v, options=run_options, run_metadata=run_metadata)
if i % 2 == 0:
self.assertEqual(1, len(self._server_1.debug_tensor_values))
self.assertEqual(1, len(self._server_2.debug_tensor_values))
self.assertAllClose(
[5.0],
self._server_1.debug_tensor_values["delta:0:DebugIdentity"])
self.assertAllClose(
[50 + 5.0 * i],
self._server_2.debug_tensor_values["v:0:DebugIdentity"])
else:
self.assertEqual(0, len(self._server_1.debug_tensor_values))
self.assertEqual(0, len(self._server_2.debug_tensor_values))
def testToggleBreakpointsWorks(self):
with session.Session(
config=session_debug_testlib.no_rewrite_session_config()) as sess:
v_1 = variables.VariableV1(50.0, name="v_1")
v_2 = variables.VariableV1(-50.0, name="v_2")
delta_1 = constant_op.constant(5.0, name="delta_1")
delta_2 = constant_op.constant(-5.0, name="delta_2")
inc_v_1 = state_ops.assign_add(v_1, delta_1, name="inc_v_1")
inc_v_2 = state_ops.assign_add(v_2, delta_2, name="inc_v_2")
sess.run([v_1.initializer, v_2.initializer])
run_metadata = config_pb2.RunMetadata()
run_options = config_pb2.RunOptions(output_partition_graphs=True)
debug_utils.watch_graph(
run_options,
sess.graph,
debug_ops=["DebugIdentity(gated_grpc=true)"],
debug_urls=[self._debug_server_url_1])
for i in xrange(4):
self._server_1.clear_data()
if i in (0, 2):
# Enable breakpoint at delta_[1,2]:0:DebugIdentity in runs 0 and 2.
self._server_1.request_watch(
"delta_1", 0, "DebugIdentity", breakpoint=True)
self._server_1.request_watch(
"delta_2", 0, "DebugIdentity", breakpoint=True)
else:
# Disable the breakpoint in runs 1 and 3.
self._server_1.request_unwatch("delta_1", 0, "DebugIdentity")
self._server_1.request_unwatch("delta_2", 0, "DebugIdentity")
output = sess.run([inc_v_1, inc_v_2],
options=run_options, run_metadata=run_metadata)
self.assertAllClose([50.0 + 5.0 * (i + 1), -50 - 5.0 * (i + 1)], output)
if i in (0, 2):
# During runs 0 and 2, the server should have received the published
# debug tensor delta:0:DebugIdentity. The breakpoint should have been
# unblocked by EventReply reponses from the server.
self.assertAllClose(
[5.0],
self._server_1.debug_tensor_values["delta_1:0:DebugIdentity"])
self.assertAllClose(
[-5.0],
self._server_1.debug_tensor_values["delta_2:0:DebugIdentity"])
# After the runs, the server should have properly registered the
# breakpoints due to the request_unwatch calls.
self.assertSetEqual({("delta_1", 0, "DebugIdentity"),
("delta_2", 0, "DebugIdentity")},
self._server_1.breakpoints)
else:
# After the end of runs 1 and 3, the server has received the requests
# to disable the breakpoint at delta:0:DebugIdentity.
self.assertSetEqual(set(), self._server_1.breakpoints)
def testTensorBoardDebuggerWrapperToggleBreakpointsWorks(self):
with session.Session(
config=session_debug_testlib.no_rewrite_session_config()) as sess:
v_1 = variables.VariableV1(50.0, name="v_1")
v_2 = variables.VariableV1(-50.0, name="v_2")
delta_1 = constant_op.constant(5.0, name="delta_1")
delta_2 = constant_op.constant(-5.0, name="delta_2")
inc_v_1 = state_ops.assign_add(v_1, delta_1, name="inc_v_1")
inc_v_2 = state_ops.assign_add(v_2, delta_2, name="inc_v_2")
sess.run([v_1.initializer, v_2.initializer])
# The TensorBoardDebugWrapperSession should add a DebugIdentity debug op
# with attribute gated_grpc=True for every tensor in the graph.
sess = grpc_wrapper.TensorBoardDebugWrapperSession(
sess, self._debug_server_url_1)
for i in xrange(4):
self._server_1.clear_data()
if i in (0, 2):
# Enable breakpoint at delta_[1,2]:0:DebugIdentity in runs 0 and 2.
self._server_1.request_watch(
"delta_1", 0, "DebugIdentity", breakpoint=True)
self._server_1.request_watch(
"delta_2", 0, "DebugIdentity", breakpoint=True)
else:
# Disable the breakpoint in runs 1 and 3.
self._server_1.request_unwatch("delta_1", 0, "DebugIdentity")
self._server_1.request_unwatch("delta_2", 0, "DebugIdentity")
output = sess.run([inc_v_1, inc_v_2])
self.assertAllClose([50.0 + 5.0 * (i + 1), -50 - 5.0 * (i + 1)], output)
if i in (0, 2):
# During runs 0 and 2, the server should have received the published
# debug tensor delta:0:DebugIdentity. The breakpoint should have been
# unblocked by EventReply reponses from the server.
self.assertAllClose(
[5.0],
self._server_1.debug_tensor_values["delta_1:0:DebugIdentity"])
self.assertAllClose(
[-5.0],
self._server_1.debug_tensor_values["delta_2:0:DebugIdentity"])
# After the runs, the server should have properly registered the
# breakpoints.
else:
# After the end of runs 1 and 3, the server has received the requests
# to disable the breakpoint at delta:0:DebugIdentity.
self.assertSetEqual(set(), self._server_1.breakpoints)
if i == 0:
# Check that the server has received the stack trace.
self.assertTrue(self._server_1.query_op_traceback("delta_1"))
self.assertTrue(self._server_1.query_op_traceback("delta_2"))
self.assertTrue(self._server_1.query_op_traceback("inc_v_1"))
self.assertTrue(self._server_1.query_op_traceback("inc_v_2"))
# Check that the server has received the python file content.
# Query an arbitrary line to make sure that is the case.
with open(__file__, "rt") as this_source_file:
first_line = this_source_file.readline().strip()
self.assertEqual(
first_line, self._server_1.query_source_file_line(__file__, 1))
else:
# In later Session.run() calls, the traceback shouldn't have been sent
# because it is already sent in the 1st call. So calling
# query_op_traceback() should lead to an exception, because the test
# debug server clears the data at the beginning of every iteration.
with self.assertRaises(ValueError):
self._server_1.query_op_traceback("delta_1")
with self.assertRaises(ValueError):
self._server_1.query_source_file_line(__file__, 1)
def testTensorBoardDebuggerWrapperDisablingTracebackSourceSendingWorks(self):
with session.Session(
config=session_debug_testlib.no_rewrite_session_config()) as sess:
v_1 = variables.VariableV1(50.0, name="v_1")
v_2 = variables.VariableV1(-50.0, name="v_2")
delta_1 = constant_op.constant(5.0, name="delta_1")
delta_2 = constant_op.constant(-5.0, name="delta_2")
inc_v_1 = state_ops.assign_add(v_1, delta_1, name="inc_v_1")
inc_v_2 = state_ops.assign_add(v_2, delta_2, name="inc_v_2")
sess.run(variables.global_variables_initializer())
# Disable the sending of traceback and source code.
sess = grpc_wrapper.TensorBoardDebugWrapperSession(
sess, self._debug_server_url_1, send_traceback_and_source_code=False)
for i in xrange(4):
self._server_1.clear_data()
if i == 0:
self._server_1.request_watch(
"delta_1", 0, "DebugIdentity", breakpoint=True)
output = sess.run([inc_v_1, inc_v_2])
self.assertAllClose([50.0 + 5.0 * (i + 1), -50 - 5.0 * (i + 1)], output)
# No op traceback or source code should have been received by the debug
# server due to the disabling above.
with self.assertRaisesRegexp(
ValueError, r"Op .*delta_1.* does not exist"):
self.assertTrue(self._server_1.query_op_traceback("delta_1"))
with self.assertRaisesRegexp(
ValueError, r".* has not received any source file"):
self._server_1.query_source_file_line(__file__, 1)
def testGetGrpcDebugWatchesReturnsCorrectAnswer(self):
with session.Session() as sess:
v = variables.VariableV1(50.0, name="v")
delta = constant_op.constant(5.0, name="delta")
inc_v = state_ops.assign_add(v, delta, name="inc_v")
sess.run(v.initializer)
# Before any debugged runs, the server should be aware of no debug
# watches.
self.assertEqual([], self._server_1.gated_grpc_debug_watches())
run_metadata = config_pb2.RunMetadata()
run_options = config_pb2.RunOptions(output_partition_graphs=True)
debug_utils.add_debug_tensor_watch(
run_options, "delta", output_slot=0,
debug_ops=["DebugNumericSummary(gated_grpc=true)"],
debug_urls=[self._debug_server_url_1])
debug_utils.add_debug_tensor_watch(
run_options, "v", output_slot=0,
debug_ops=["DebugIdentity"],
debug_urls=[self._debug_server_url_1])
sess.run(inc_v, options=run_options, run_metadata=run_metadata)
# After the first run, the server should have noted the debug watches
# for which gated_grpc == True, but not the ones with gated_grpc == False.
self.assertEqual(1, len(self._server_1.gated_grpc_debug_watches()))
debug_watch = self._server_1.gated_grpc_debug_watches()[0]
self.assertEqual("delta", debug_watch.node_name)
self.assertEqual(0, debug_watch.output_slot)
self.assertEqual("DebugNumericSummary", debug_watch.debug_op)
@test_util.run_v1_only("b/120545219")
class DelayedDebugServerTest(test_util.TensorFlowTestCase):
def testDebuggedSessionRunWorksWithDelayedDebugServerStartup(self):
"""Test debugged Session.run() tolerates delayed debug server startup."""
ops.reset_default_graph()
# Start a debug server asynchronously, with a certain amount of delay.
(debug_server_port, _, _, server_thread,
debug_server) = grpc_debug_test_server.start_server_on_separate_thread(
server_start_delay_sec=2.0, dump_to_filesystem=False)
with self.cached_session() as sess:
a_init = constant_op.constant(42.0, name="a_init")
a = variables.VariableV1(a_init, name="a")
def watch_fn(fetches, feeds):
del fetches, feeds
return framework.WatchOptions(debug_ops=["DebugIdentity"])
sess = grpc_wrapper.GrpcDebugWrapperSession(
sess, "localhost:%d" % debug_server_port, watch_fn=watch_fn)
sess.run(a.initializer)
self.assertAllClose(
[42.0], debug_server.debug_tensor_values["a_init:0:DebugIdentity"])
debug_server.stop_server().wait()
server_thread.join()
if __name__ == "__main__":
googletest.main()
|
|
import json
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.core.paginator import Paginator, InvalidPage
from django.db import models
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.template.loader import render_to_string
Relationship = models.get_model('relationships', 'relationship')
FOLLOWING_PER_PAGE = getattr(settings, 'RELATIONSHIPS_FOLLOWING_PER_PAGE', 20)
FOLLOWERS_PER_PAGE = getattr(settings, 'RELATIONSHIPS_FOLLOWERS_PER_PAGE', 20)
def following(request, username,
template_name='relationships/relationship_following.html',
flat=True):
from_user = get_object_or_404(User, username=username)
following_ids = Relationship.objects.get_friends_for_user(from_user, flat=flat)
following = User.objects.filter(pk__in=following_ids)
paginator = Paginator(following, FOLLOWING_PER_PAGE)
try:
page = paginator.page(int(request.GET.get('page', 1)))
except InvalidPage:
raise Http404("No such page.")
return render_to_response(template_name, {
'person': from_user,
'page': page,
'paginator': paginator,
}, context_instance=RequestContext(request))
def followers(request, username,
template_name='relationships/relationship_followers.html',
flat=True):
to_user = get_object_or_404(User, username=username)
followers_ids = Relationship.objects.get_followers_for_user(to_user, flat=True)
followers = User.objects.filter(pk__in=followers_ids)
paginator = Paginator(followers, FOLLOWERS_PER_PAGE)
try:
page = paginator.page(int(request.GET.get('page', 1)))
except InvalidPage:
raise Http404("No such page.")
return render_to_response(template_name, {
'person': to_user,
'page': page,
'paginator': paginator,
}, context_instance=RequestContext(request))
@login_required
def follow(request, username,
template_name='relationships/relationship_add_confirm.html',
success_template_name='relationships/relationship_add_success.html',
content_type='text/html'):
"""
Allows a user to follow another user.
Templates: ``relationships/relationship_add_confirm.html`` and ``relationships/relationship_add_success.html``
Context:
to_user
User object
"""
to_user = get_object_or_404(User, username=username)
from_user = request.user
next = request.GET.get('next', None)
if request.method == 'POST':
relationship, created = Relationship.objects.get_or_create(from_user=from_user, to_user=to_user)
if request.is_ajax():
response = {
'success': 'Success',
'to_user': {
'username': to_user.username,
'user_id': to_user.pk
},
'from_user': {
'username': from_user.username,
'user_id': from_user.pk
}
}
return HttpResponse(json.dumps(response), content_type="application/json")
if next:
return HttpResponseRedirect(next)
template_name = success_template_name
context = {
'to_user': to_user,
'next': next
}
return render_to_response(template_name, context, context_instance=RequestContext(request), content_type=content_type)
@login_required
def unfollow(request, username,
template_name='relationships/relationship_delete_confirm.html',
success_template_name='relationships/relationship_delete_success.html',
content_type='text/html'):
"""
Allows a user to stop following another user.
Templates: ``relationships/relationship_delete_confirm.html`` and ``relationships/relationship_delete_success.html``
Context:
to_user
User object
"""
to_user = get_object_or_404(User, username=username)
from_user = request.user
next = request.GET.get('next', None)
if request.method == 'POST':
relationship = get_object_or_404(Relationship, to_user=to_user, from_user=from_user)
relationship.delete()
if request.is_ajax():
response = {
'success': 'Success',
'to_user': {
'username': to_user.username,
'user_id': to_user.pk
},
'from_user': {
'username': from_user.username,
'user_id': from_user.pk
}
}
return HttpResponse(json.dumps(response), content_type="application/json")
if next:
return HttpResponseRedirect(next)
template_name = success_template_name
context = {
'to_user': to_user,
'next': next
}
return render_to_response(template_name, context, context_instance=RequestContext(request), content_type=content_type)
@login_required
def block(request, username,
template_name='relationships/block_confirm.html',
success_template_name='relationships/block_success.html',
content_type='text/html'):
"""
Allows a user to block another user.
Templates: ``relationships/block_confirm.html`` and ``relationships/block_success.html``
Context:
user_to_block
User object
"""
user_to_block = get_object_or_404(User, username=username)
user = request.user
next = request.GET.get('next', None)
if request.method == 'POST':
relationship, created = Relationship.objects.get_or_create(to_user=user_to_block, from_user=user)
relationship.is_blocked = True
relationship.save()
if request.is_ajax():
response = {'success': 'Success'}
return HttpResponse(json.dumps(response), content_type="application/json")
if next:
return HttpResponseRedirect(next)
template_name = success_template_name
context = {
'user_to_block': user_to_block,
'next': next
}
return render_to_response(template_name, context, context_instance=RequestContext(request), content_type=content_type)
@login_required
def unblock(request, username,
template_name='relationships/block_delete_confirm.html',
success_template_name='relationships/block_delete_success.html',
content_type='text/html'):
"""
Allows a user to stop blocking another user.
Templates: ``relationships/block_delete_confirm.html`` and ``relationships/block_delete_success.html``
Context:
user_to_block
User object
"""
user_to_block = get_object_or_404(User, username=username)
user = request.user
if request.method == 'POST':
relationship = get_object_or_404(Relationship, to_user=user_to_block, from_user=user, is_blocked=True)
relationship.delete()
if request.is_ajax():
response = {'success': 'Success'}
return HttpResponse(json.dumps(response), content_type="application/json")
else:
template_name = success_template_name
context = {'user_to_block': user_to_block}
return render_to_response(template_name, context, context_instance=RequestContext(request), content_type=content_type)
|
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Test the conversion to/from astropy.table
"""
import io
import os
import pathlib
import pytest
import numpy as np
from astropy.config import set_temp_config, reload_config
from astropy.utils.data import get_pkg_data_filename, get_pkg_data_fileobj
from astropy.io.votable.table import parse, writeto
from astropy.io.votable import tree, conf
from astropy.io.votable.exceptions import VOWarning, W39, E25
from astropy.table import Column, Table
from astropy.units import Unit
from astropy.utils.exceptions import AstropyDeprecationWarning
def test_table(tmpdir):
# Read the VOTABLE
votable = parse(get_pkg_data_filename('data/regression.xml'))
table = votable.get_first_table()
astropy_table = table.to_table()
for name in table.array.dtype.names:
assert np.all(astropy_table.mask[name] == table.array.mask[name])
votable2 = tree.VOTableFile.from_table(astropy_table)
t = votable2.get_first_table()
field_types = [
('string_test', {'datatype': 'char', 'arraysize': '*'}),
('string_test_2', {'datatype': 'char', 'arraysize': '10'}),
('unicode_test', {'datatype': 'unicodeChar', 'arraysize': '*'}),
('fixed_unicode_test', {'datatype': 'unicodeChar', 'arraysize': '10'}),
('string_array_test', {'datatype': 'char', 'arraysize': '4'}),
('unsignedByte', {'datatype': 'unsignedByte'}),
('short', {'datatype': 'short'}),
('int', {'datatype': 'int'}),
('long', {'datatype': 'long'}),
('double', {'datatype': 'double'}),
('float', {'datatype': 'float'}),
('array', {'datatype': 'long', 'arraysize': '2*'}),
('bit', {'datatype': 'bit'}),
('bitarray', {'datatype': 'bit', 'arraysize': '3x2'}),
('bitvararray', {'datatype': 'bit', 'arraysize': '*'}),
('bitvararray2', {'datatype': 'bit', 'arraysize': '3x2*'}),
('floatComplex', {'datatype': 'floatComplex'}),
('doubleComplex', {'datatype': 'doubleComplex'}),
('doubleComplexArray', {'datatype': 'doubleComplex', 'arraysize': '*'}),
('doubleComplexArrayFixed', {'datatype': 'doubleComplex', 'arraysize': '2'}),
('boolean', {'datatype': 'bit'}),
('booleanArray', {'datatype': 'bit', 'arraysize': '4'}),
('nulls', {'datatype': 'int'}),
('nulls_array', {'datatype': 'int', 'arraysize': '2x2'}),
('precision1', {'datatype': 'double'}),
('precision2', {'datatype': 'double'}),
('doublearray', {'datatype': 'double', 'arraysize': '*'}),
('bitarray2', {'datatype': 'bit', 'arraysize': '16'})]
for field, type in zip(t.fields, field_types):
name, d = type
assert field.ID == name
assert field.datatype == d['datatype'], f'{name} expected {d["datatype"]} but get {field.datatype}' # noqa
if 'arraysize' in d:
assert field.arraysize == d['arraysize']
# W39: Bit values can not be masked
with pytest.warns(W39):
writeto(votable2, os.path.join(str(tmpdir), "through_table.xml"))
def test_read_through_table_interface(tmpdir):
with get_pkg_data_fileobj('data/regression.xml', encoding='binary') as fd:
t = Table.read(fd, format='votable', table_id='main_table')
assert len(t) == 5
# Issue 8354
assert t['float'].format is None
fn = os.path.join(str(tmpdir), "table_interface.xml")
# W39: Bit values can not be masked
with pytest.warns(W39):
t.write(fn, table_id='FOO', format='votable')
with open(fn, 'rb') as fd:
t2 = Table.read(fd, format='votable', table_id='FOO')
assert len(t2) == 5
def test_read_through_table_interface2():
with get_pkg_data_fileobj('data/regression.xml', encoding='binary') as fd:
t = Table.read(fd, format='votable', table_id='last_table')
assert len(t) == 0
def test_pass_kwargs_through_table_interface():
# Table.read() should pass on keyword arguments meant for parse()
filename = get_pkg_data_filename('data/nonstandard_units.xml')
t = Table.read(filename, format='votable', unit_format='generic')
assert t['Flux1'].unit == Unit("erg / (Angstrom cm2 s)")
def test_names_over_ids():
with get_pkg_data_fileobj('data/names.xml', encoding='binary') as fd:
votable = parse(fd)
table = votable.get_first_table().to_table(use_names_over_ids=True)
assert table.colnames == [
'Name', 'GLON', 'GLAT', 'RAdeg', 'DEdeg', 'Jmag', 'Hmag', 'Kmag',
'G3.6mag', 'G4.5mag', 'G5.8mag', 'G8.0mag', '4.5mag', '8.0mag',
'Emag', '24mag', 'f_Name']
def test_explicit_ids():
with get_pkg_data_fileobj('data/names.xml', encoding='binary') as fd:
votable = parse(fd)
table = votable.get_first_table().to_table(use_names_over_ids=False)
assert table.colnames == [
'col1', 'col2', 'col3', 'col4', 'col5', 'col6', 'col7', 'col8', 'col9',
'col10', 'col11', 'col12', 'col13', 'col14', 'col15', 'col16', 'col17']
def test_table_read_with_unnamed_tables():
"""
Issue #927
"""
with get_pkg_data_fileobj('data/names.xml', encoding='binary') as fd:
t = Table.read(fd, format='votable')
assert len(t) == 1
def test_votable_path_object():
"""
Testing when votable is passed as pathlib.Path object #4412.
"""
fpath = pathlib.Path(get_pkg_data_filename('data/names.xml'))
table = parse(fpath).get_first_table().to_table()
assert len(table) == 1
assert int(table[0][3]) == 266
def test_from_table_without_mask():
t = Table()
c = Column(data=[1, 2, 3], name='a')
t.add_column(c)
output = io.BytesIO()
t.write(output, format='votable')
def test_write_with_format():
t = Table()
c = Column(data=[1, 2, 3], name='a')
t.add_column(c)
output = io.BytesIO()
t.write(output, format='votable', tabledata_format="binary")
obuff = output.getvalue()
assert b'VOTABLE version="1.4"' in obuff
assert b'BINARY' in obuff
assert b'TABLEDATA' not in obuff
output = io.BytesIO()
t.write(output, format='votable', tabledata_format="binary2")
obuff = output.getvalue()
assert b'VOTABLE version="1.4"' in obuff
assert b'BINARY2' in obuff
assert b'TABLEDATA' not in obuff
def test_empty_table():
votable = parse(get_pkg_data_filename('data/empty_table.xml'))
table = votable.get_first_table()
astropy_table = table.to_table() # noqa
def test_no_field_not_empty_table():
votable = parse(get_pkg_data_filename('data/no_field_not_empty_table.xml'))
table = votable.get_first_table()
assert len(table.fields) == 0
assert len(table.infos) == 1
def test_no_field_not_empty_table_exception():
with pytest.raises(E25):
parse(get_pkg_data_filename('data/no_field_not_empty_table.xml'), verify='exception')
def test_binary2_masked_strings():
"""
Issue #8995
"""
# Read a VOTable which sets the null mask bit for each empty string value.
votable = parse(get_pkg_data_filename('data/binary2_masked_strings.xml'))
table = votable.get_first_table()
astropy_table = table.to_table()
# Ensure string columns have no masked values and can be written out
assert not np.any(table.array.mask['epoch_photometry_url'])
output = io.BytesIO()
astropy_table.write(output, format='votable')
class TestVerifyOptions:
# Start off by checking the default (ignore)
def test_default(self):
parse(get_pkg_data_filename('data/gemini.xml'))
# Then try the various explicit options
def test_verify_ignore(self):
parse(get_pkg_data_filename('data/gemini.xml'), verify='ignore')
def test_verify_warn(self):
with pytest.warns(VOWarning) as w:
parse(get_pkg_data_filename('data/gemini.xml'), verify='warn')
assert len(w) == 24
def test_verify_exception(self):
with pytest.raises(VOWarning):
parse(get_pkg_data_filename('data/gemini.xml'), verify='exception')
# Make sure the pedantic option still works for now (pending deprecation)
def test_pedantic_false(self):
with pytest.warns(VOWarning) as w:
parse(get_pkg_data_filename('data/gemini.xml'), pedantic=False)
assert len(w) == 24
def test_pedantic_true(self):
with pytest.raises(VOWarning):
parse(get_pkg_data_filename('data/gemini.xml'), pedantic=True)
# Make sure that the default behavior can be set via configuration items
def test_conf_verify_ignore(self):
with conf.set_temp('verify', 'ignore'):
parse(get_pkg_data_filename('data/gemini.xml'))
def test_conf_verify_warn(self):
with conf.set_temp('verify', 'warn'):
with pytest.warns(VOWarning) as w:
parse(get_pkg_data_filename('data/gemini.xml'))
assert len(w) == 24
def test_conf_verify_exception(self):
with conf.set_temp('verify', 'exception'):
with pytest.raises(VOWarning):
parse(get_pkg_data_filename('data/gemini.xml'))
# And make sure the old configuration item will keep working
def test_conf_pedantic_false(self, tmpdir):
with set_temp_config(tmpdir.strpath):
with open(tmpdir.join('astropy').join('astropy.cfg').strpath, 'w') as f:
f.write('[io.votable]\npedantic = False')
reload_config('astropy.io.votable')
with pytest.warns(VOWarning) as w:
parse(get_pkg_data_filename('data/gemini.xml'))
assert len(w) == 24
def test_conf_pedantic_true(self, tmpdir):
with set_temp_config(tmpdir.strpath):
with open(tmpdir.join('astropy').join('astropy.cfg').strpath, 'w') as f:
f.write('[io.votable]\npedantic = True')
reload_config('astropy.io.votable')
with pytest.raises(VOWarning):
parse(get_pkg_data_filename('data/gemini.xml'))
|
|
#!/usr/bin/env python
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""This tool creates an html visualization of a TensorFlow Lite graph.
Example usage:
python visualize.py foo.tflite foo.html
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import os
import sys
from tensorflow.python.platform import resource_loader
# Schema to use for flatbuffers
_SCHEMA = "third_party/tensorflow/lite/schema/schema.fbs"
# TODO(angerson): fix later when rules are simplified..
_SCHEMA = resource_loader.get_path_to_datafile("../schema/schema.fbs")
_BINARY = resource_loader.get_path_to_datafile("../../../flatbuffers/flatc")
# Account for different package positioning internal vs. external.
if not os.path.exists(_BINARY):
_BINARY = resource_loader.get_path_to_datafile(
"../../../../flatbuffers/flatc")
if not os.path.exists(_SCHEMA):
raise RuntimeError("Sorry, schema file cannot be found at %r" % _SCHEMA)
if not os.path.exists(_BINARY):
raise RuntimeError("Sorry, flatc is not available at %r" % _BINARY)
# A CSS description for making the visualizer
_CSS = """
<html>
<head>
<style>
body {font-family: sans-serif; background-color: #ffaa00;}
table {background-color: #eeccaa;}
th {background-color: black; color: white;}
h1 {
background-color: ffaa00;
padding:5px;
color: black;
}
div {
border-radius: 5px;
background-color: #ffeecc;
padding:5px;
margin:5px;
}
.tooltip {color: blue;}
.tooltip .tooltipcontent {
visibility: hidden;
color: black;
background-color: yellow;
padding: 5px;
border-radius: 4px;
position: absolute;
z-index: 1;
}
.tooltip:hover .tooltipcontent {
visibility: visible;
}
.edges line {
stroke: #333333;
}
.nodes text {
color: black;
pointer-events: none;
font-family: sans-serif;
font-size: 11px;
}
</style>
<script src="https://d3js.org/d3.v4.min.js"></script>
</head>
<body>
"""
_D3_HTML_TEMPLATE = """
<script>
// Build graph data
var graph = %s;
var svg = d3.select("#subgraph%d");
var width = svg.attr("width");
var height = svg.attr("height");
var color = d3.scaleOrdinal(d3.schemeCategory20);
var simulation = d3.forceSimulation()
.force("link", d3.forceLink().id(function(d) {return d.id;}))
.force("charge", d3.forceManyBody())
.force("center", d3.forceCenter(0.5 * width, 0.5 * height));
function buildGraph() {
var edge = svg.append("g").attr("class", "edges").selectAll("line")
.data(graph.edges).enter().append("line")
// Make the node group
var node = svg.selectAll(".nodes")
.data(graph.nodes)
.enter().append("g")
.attr("class", "nodes")
.call(d3.drag()
.on("start", function(d) {
if(!d3.event.active) simulation.alphaTarget(1.0).restart();
d.fx = d.x;d.fy = d.y;
})
.on("drag", function(d) {
d.fx = d3.event.x; d.fy = d3.event.y;
})
.on("end", function(d) {
if (!d3.event.active) simulation.alphaTarget(0);
d.fx = d.fy = null;
}));
// Within the group, draw a circle for the node position and text
// on the side.
node.append("circle")
.attr("r", "5px")
.attr("fill", function(d) { return color(d.group); })
node.append("text")
.attr("dx", 8).attr("dy", 5).text(function(d) { return d.name; });
// Setup force parameters and update position callback
simulation.nodes(graph.nodes).on("tick", forceSimulationUpdated);
simulation.force("link").links(graph.edges);
function forceSimulationUpdated() {
// Update edges.
edge.attr("x1", function(d) {return d.source.x;})
.attr("y1", function(d) {return d.source.y;})
.attr("x2", function(d) {return d.target.x;})
.attr("y2", function(d) {return d.target.y;});
// Update node positions
node.attr("transform", function(d) { return "translate(" + d.x + "," + d.y + ")"; });
}
}
buildGraph()
</script>
"""
class OpCodeMapper(object):
"""Maps an opcode index to an op name."""
def __init__(self, data):
self.code_to_name = {}
for idx, d in enumerate(data["operator_codes"]):
self.code_to_name[idx] = d["builtin_code"]
def __call__(self, x):
if x not in self.code_to_name:
s = "<UNKNOWN>"
else:
s = self.code_to_name[x]
return "%s (opcode=%d)" % (s, x)
class DataSizeMapper(object):
"""For buffers, report the number of bytes."""
def __call__(self, x):
if x is not None:
return "%d bytes" % len(x)
else:
return "--"
class TensorMapper(object):
"""Maps a list of tensor indices to a tooltip hoverable indicator of more."""
def __init__(self, subgraph_data):
self.data = subgraph_data
def __call__(self, x):
html = ""
html += "<span class='tooltip'><span class='tooltipcontent'>"
for i in x:
tensor = self.data["tensors"][i]
html += str(i) + " "
html += tensor["name"] + " "
html += str(tensor["type"]) + " "
html += (repr(tensor["shape"]) if "shape" in tensor else "[]") + "<br>"
html += "</span>"
html += repr(x)
html += "</span>"
return html
def GenerateGraph(subgraph_idx, g, opcode_mapper):
"""Produces the HTML required to have a d3 visualization of the dag."""
def TensorName(idx):
return "t%d" % idx
def OpName(idx):
return "o%d" % idx
edges = []
nodes = []
first = {}
pixel_mult = 50 # TODO(aselle): multiplier for initial placement
for op_index, op in enumerate(g["operators"]):
for tensor_input_position, tensor_index in enumerate(op["inputs"]):
if tensor_index not in first:
first[tensor_index] = (
op_index * pixel_mult,
tensor_input_position * pixel_mult - pixel_mult / 2)
edges.append({
"source": TensorName(tensor_index),
"target": OpName(op_index)
})
for tensor_index in op["outputs"]:
edges.append({
"target": TensorName(tensor_index),
"source": OpName(op_index)
})
nodes.append({
"id": OpName(op_index),
"name": opcode_mapper(op["opcode_index"]),
"group": 2,
"x": pixel_mult,
"y": op_index * pixel_mult
})
for tensor_index, tensor in enumerate(g["tensors"]):
initial_y = (
first[tensor_index] if tensor_index in first else len(g["operators"]))
nodes.append({
"id": TensorName(tensor_index),
"name": "%s (%d)" % (tensor["name"], tensor_index),
"group": 1,
"x": 2,
"y": initial_y
})
graph_str = json.dumps({"nodes": nodes, "edges": edges})
html = _D3_HTML_TEMPLATE % (graph_str, subgraph_idx)
return html
def GenerateTableHtml(items, keys_to_print, display_index=True):
"""Given a list of object values and keys to print, make an HTML table.
Args:
items: Items to print an array of dicts.
keys_to_print: (key, display_fn). `key` is a key in the object. i.e.
items[0][key] should exist. display_fn is the mapping function on display.
i.e. the displayed html cell will have the string returned by
`mapping_fn(items[0][key])`.
display_index: add a column which is the index of each row in `items`.
Returns:
An html table.
"""
html = ""
# Print the list of items
html += "<table><tr>\n"
html += "<tr>\n"
if display_index:
html += "<th>index</th>"
for h, mapper in keys_to_print:
html += "<th>%s</th>" % h
html += "</tr>\n"
for idx, tensor in enumerate(items):
html += "<tr>\n"
if display_index:
html += "<td>%d</td>" % idx
# print tensor.keys()
for h, mapper in keys_to_print:
val = tensor[h] if h in tensor else None
val = val if mapper is None else mapper(val)
html += "<td>%s</td>\n" % val
html += "</tr>\n"
html += "</table>\n"
return html
def CreateHtmlFile(tflite_input, html_output):
"""Given a tflite model in `tflite_input` file, produce html description."""
# Convert the model into a JSON flatbuffer using flatc (build if doesn't
# exist.
if not os.path.exists(tflite_input):
raise RuntimeError("Invalid filename %r" % tflite_input)
if tflite_input.endswith(".tflite") or tflite_input.endswith(".bin"):
# Run convert
cmd = (
_BINARY + " -t "
"--strict-json --defaults-json -o /tmp {schema} -- {input}".format(
input=tflite_input, schema=_SCHEMA))
print(cmd)
os.system(cmd)
real_output = ("/tmp/" + os.path.splitext(
os.path.split(tflite_input)[-1])[0] + ".json")
data = json.load(open(real_output))
elif tflite_input.endswith(".json"):
data = json.load(open(tflite_input))
else:
raise RuntimeError("Input file was not .tflite or .json")
html = ""
html += _CSS
html += "<h1>TensorFlow Lite Model</h2>"
data["filename"] = tflite_input # Avoid special case
toplevel_stuff = [("filename", None), ("version", None), ("description",
None)]
html += "<table>\n"
for key, mapping in toplevel_stuff:
if not mapping:
mapping = lambda x: x
html += "<tr><th>%s</th><td>%s</td></tr>\n" % (key, mapping(data.get(key)))
html += "</table>\n"
# Spec on what keys to display
buffer_keys_to_display = [("data", DataSizeMapper())]
operator_keys_to_display = [("builtin_code", None), ("custom_code", None),
("version", None)]
for subgraph_idx, g in enumerate(data["subgraphs"]):
# Subgraph local specs on what to display
html += "<div class='subgraph'>"
tensor_mapper = TensorMapper(g)
opcode_mapper = OpCodeMapper(data)
op_keys_to_display = [("inputs", tensor_mapper), ("outputs", tensor_mapper),
("builtin_options", None), ("opcode_index",
opcode_mapper)]
tensor_keys_to_display = [("name", None), ("type", None), ("shape", None),
("buffer", None), ("quantization", None)]
html += "<h2>Subgraph %d</h2>\n" % subgraph_idx
# Inputs and outputs.
html += "<h3>Inputs/Outputs</h3>\n"
html += GenerateTableHtml(
[{
"inputs": g["inputs"],
"outputs": g["outputs"]
}], [("inputs", tensor_mapper), ("outputs", tensor_mapper)],
display_index=False)
# Print the tensors.
html += "<h3>Tensors</h3>\n"
html += GenerateTableHtml(g["tensors"], tensor_keys_to_display)
# Print the ops.
html += "<h3>Ops</h3>\n"
html += GenerateTableHtml(g["operators"], op_keys_to_display)
# Visual graph.
html += "<svg id='subgraph%d' width='960' height='1600'></svg>\n" % (
subgraph_idx,)
html += GenerateGraph(subgraph_idx, g, opcode_mapper)
html += "</div>"
# Buffers have no data, but maybe in the future they will
html += "<h2>Buffers</h2>\n"
html += GenerateTableHtml(data["buffers"], buffer_keys_to_display)
# Operator codes
html += "<h2>Operator Codes</h2>\n"
html += GenerateTableHtml(data["operator_codes"], operator_keys_to_display)
html += "</body></html>\n"
open(html_output, "w").write(html)
def main(argv):
try:
tflite_input = argv[1]
html_output = argv[2]
except IndexError:
print("Usage: %s <input tflite> <output html>" % (argv[0]))
else:
CreateHtmlFile(tflite_input, html_output)
if __name__ == "__main__":
main(sys.argv)
|
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
import frappe.model.meta
from frappe.model.dynamic_links import get_dynamic_link_map
import frappe.defaults
from frappe.utils.file_manager import remove_all
from frappe.utils.password import delete_all_passwords_for
from frappe import _
from frappe.model.naming import revert_series_if_last
from frappe.utils.global_search import delete_for_document
from six import string_types, integer_types
def delete_doc(doctype=None, name=None, force=0, ignore_doctypes=None, for_reload=False,
ignore_permissions=False, flags=None, ignore_on_trash=False, ignore_missing=True):
"""
Deletes a doc(dt, dn) and validates if it is not submitted and not linked in a live record
"""
if not ignore_doctypes: ignore_doctypes = []
# get from form
if not doctype:
doctype = frappe.form_dict.get('dt')
name = frappe.form_dict.get('dn')
names = name
if isinstance(name, string_types) or isinstance(name, integer_types):
names = [name]
for name in names or []:
# already deleted..?
if not frappe.db.exists(doctype, name):
if not ignore_missing:
raise frappe.DoesNotExistError
else:
return False
# delete passwords
delete_all_passwords_for(doctype, name)
doc = None
if doctype=="DocType":
if for_reload:
try:
doc = frappe.get_doc(doctype, name)
except frappe.DoesNotExistError:
pass
else:
doc.run_method("before_reload")
else:
doc = frappe.get_doc(doctype, name)
update_flags(doc, flags, ignore_permissions)
check_permission_and_not_submitted(doc)
frappe.db.sql("delete from `tabCustom Field` where dt = %s", name)
frappe.db.sql("delete from `tabCustom Script` where dt = %s", name)
frappe.db.sql("delete from `tabProperty Setter` where doc_type = %s", name)
frappe.db.sql("delete from `tabReport` where ref_doctype=%s", name)
frappe.db.sql("delete from `tabCustom DocPerm` where parent=%s", name)
delete_from_table(doctype, name, ignore_doctypes, None)
else:
doc = frappe.get_doc(doctype, name)
if not for_reload:
update_flags(doc, flags, ignore_permissions)
check_permission_and_not_submitted(doc)
if not ignore_on_trash:
doc.run_method("on_trash")
doc.flags.in_delete = True
doc.run_method('on_change')
frappe.enqueue('frappe.model.delete_doc.delete_dynamic_links', doctype=doc.doctype, name=doc.name,
async=False if frappe.flags.in_test else True)
# check if links exist
if not force:
check_if_doc_is_linked(doc)
check_if_doc_is_dynamically_linked(doc)
update_naming_series(doc)
delete_from_table(doctype, name, ignore_doctypes, doc)
doc.run_method("after_delete")
# delete attachments
remove_all(doctype, name, from_delete=True)
# delete global search entry
delete_for_document(doc)
if doc and not for_reload:
add_to_deleted_document(doc)
if not frappe.flags.in_patch:
try:
doc.notify_update()
insert_feed(doc)
except ImportError:
pass
# delete user_permissions
frappe.defaults.clear_default(parenttype="User Permission", key=doctype, value=name)
def add_to_deleted_document(doc):
'''Add this document to Deleted Document table. Called after delete'''
if doc.doctype != 'Deleted Document' and frappe.flags.in_install != 'frappe':
frappe.get_doc(dict(
doctype='Deleted Document',
deleted_doctype=doc.doctype,
deleted_name=doc.name,
data=doc.as_json(),
owner=frappe.session.user
)).db_insert()
def update_naming_series(doc):
if doc.meta.autoname:
if doc.meta.autoname.startswith("naming_series:") \
and getattr(doc, "naming_series", None):
revert_series_if_last(doc.naming_series, doc.name)
elif doc.meta.autoname.split(":")[0] not in ("Prompt", "field", "hash"):
revert_series_if_last(doc.meta.autoname, doc.name)
def delete_from_table(doctype, name, ignore_doctypes, doc):
if doctype!="DocType" and doctype==name:
frappe.db.sql("delete from `tabSingles` where doctype=%s", name)
else:
frappe.db.sql("delete from `tab{0}` where name=%s".format(doctype), name)
# get child tables
if doc:
tables = [d.options for d in doc.meta.get_table_fields()]
else:
def get_table_fields(field_doctype):
return frappe.db.sql_list("""select options from `tab{}` where fieldtype='Table'
and parent=%s""".format(field_doctype), doctype)
tables = get_table_fields("DocField")
if not frappe.flags.in_install=="frappe":
tables += get_table_fields("Custom Field")
# delete from child tables
for t in list(set(tables)):
if t not in ignore_doctypes:
frappe.db.sql("delete from `tab%s` where parenttype=%s and parent = %s" % (t, '%s', '%s'), (doctype, name))
def update_flags(doc, flags=None, ignore_permissions=False):
if ignore_permissions:
if not flags: flags = {}
flags["ignore_permissions"] = ignore_permissions
if flags:
doc.flags.update(flags)
def check_permission_and_not_submitted(doc):
# permission
if (not doc.flags.ignore_permissions
and frappe.session.user!="Administrator"
and (
not doc.has_permission("delete")
or (doc.doctype=="DocType" and not doc.custom))):
frappe.msgprint(_("User not allowed to delete {0}: {1}")
.format(doc.doctype, doc.name), raise_exception=frappe.PermissionError)
# check if submitted
if doc.docstatus == 1:
frappe.msgprint(_("{0} {1}: Submitted Record cannot be deleted.").format(_(doc.doctype), doc.name),
raise_exception=True)
def check_if_doc_is_linked(doc, method="Delete"):
"""
Raises excption if the given doc(dt, dn) is linked in another record.
"""
from frappe.model.rename_doc import get_link_fields
link_fields = get_link_fields(doc.doctype)
link_fields = [[lf['parent'], lf['fieldname'], lf['issingle']] for lf in link_fields]
for link_dt, link_field, issingle in link_fields:
if not issingle:
for item in frappe.db.get_values(link_dt, {link_field:doc.name},
["name", "parent", "parenttype", "docstatus"], as_dict=True):
linked_doctype = item.parenttype if item.parent else link_dt
if linked_doctype in ("Communication", "ToDo", "DocShare", "Email Unsubscribe", 'File', 'Version', "Activity Log"):
# don't check for communication and todo!
continue
if not item:
continue
elif (method != "Delete" or item.docstatus == 2) and (method != "Cancel" or item.docstatus != 1):
# don't raise exception if not
# linked to a non-cancelled doc when deleting or to a submitted doc when cancelling
continue
elif link_dt == doc.doctype and (item.parent or item.name) == doc.name:
# don't raise exception if not
# linked to same item or doc having same name as the item
continue
else:
reference_docname = item.parent or item.name
raise_link_exists_exception(doc, linked_doctype, reference_docname)
else:
if frappe.db.get_value(link_dt, None, link_field) == doc.name:
raise_link_exists_exception(doc, link_dt, link_dt)
def check_if_doc_is_dynamically_linked(doc, method="Delete"):
'''Raise `frappe.LinkExistsError` if the document is dynamically linked'''
for df in get_dynamic_link_map().get(doc.doctype, []):
if df.parent in ("Communication", "ToDo", "DocShare", "Email Unsubscribe", "Activity Log", 'File', 'Version'):
# don't check for communication and todo!
continue
meta = frappe.get_meta(df.parent)
if meta.issingle:
# dynamic link in single doc
refdoc = frappe.db.get_singles_dict(df.parent)
if (refdoc.get(df.options)==doc.doctype
and refdoc.get(df.fieldname)==doc.name
and ((method=="Delete" and refdoc.docstatus < 2)
or (method=="Cancel" and refdoc.docstatus==1))
):
# raise exception only if
# linked to an non-cancelled doc when deleting
# or linked to a submitted doc when cancelling
raise_link_exists_exception(doc, df.parent, df.parent)
else:
# dynamic link in table
df["table"] = ", parent, parenttype, idx" if meta.istable else ""
for refdoc in frappe.db.sql("""select name, docstatus{table} from `tab{parent}` where
{options}=%s and {fieldname}=%s""".format(**df), (doc.doctype, doc.name), as_dict=True):
if ((method=="Delete" and refdoc.docstatus < 2) or (method=="Cancel" and refdoc.docstatus==1)):
# raise exception only if
# linked to an non-cancelled doc when deleting
# or linked to a submitted doc when cancelling
reference_doctype = refdoc.parenttype if meta.istable else df.parent
reference_docname = refdoc.parent if meta.istable else refdoc.name
at_position = "at Row: {0}".format(refdoc.idx) if meta.istable else ""
raise_link_exists_exception(doc, reference_doctype, reference_docname, at_position)
def raise_link_exists_exception(doc, reference_doctype, reference_docname, row=''):
doc_link = '<a href="#Form/{0}/{1}">{1}</a>'.format(doc.doctype, doc.name)
reference_link = '<a href="#Form/{0}/{1}">{1}</a>'.format(reference_doctype, reference_docname)
#hack to display Single doctype only once in message
if reference_doctype == reference_docname:
reference_doctype = ''
frappe.throw(_('Cannot delete or cancel because {0} {1} is linked with {2} {3} {4}')
.format(doc.doctype, doc_link, reference_doctype, reference_link, row), frappe.LinkExistsError)
def delete_dynamic_links(doctype, name):
delete_doc("ToDo", frappe.db.sql_list("""select name from `tabToDo`
where reference_type=%s and reference_name=%s""", (doctype, name)),
ignore_permissions=True, force=True)
frappe.db.sql('''delete from `tabEmail Unsubscribe`
where reference_doctype=%s and reference_name=%s''', (doctype, name))
# delete shares
frappe.db.sql("""delete from `tabDocShare`
where share_doctype=%s and share_name=%s""", (doctype, name))
# delete versions
frappe.db.sql('delete from tabVersion where ref_doctype=%s and docname=%s', (doctype, name))
# delete comments
frappe.db.sql("""delete from `tabCommunication`
where
communication_type = 'Comment'
and reference_doctype=%s and reference_name=%s""", (doctype, name))
# unlink communications
frappe.db.sql("""update `tabCommunication`
set reference_doctype=null, reference_name=null
where
communication_type = 'Communication'
and reference_doctype=%s
and reference_name=%s""", (doctype, name))
# unlink secondary references
frappe.db.sql("""update `tabCommunication`
set link_doctype=null, link_name=null
where link_doctype=%s and link_name=%s""", (doctype, name))
# unlink feed
frappe.db.sql("""update `tabCommunication`
set timeline_doctype=null, timeline_name=null
where timeline_doctype=%s and timeline_name=%s""", (doctype, name))
# unlink activity_log reference_doctype
frappe.db.sql("""update `tabActivity Log`
set reference_doctype=null, reference_name=null
where
reference_doctype=%s
and reference_name=%s""", (doctype, name))
# unlink activity_log timeline_doctype
frappe.db.sql("""update `tabActivity Log`
set timeline_doctype=null, timeline_name=null
where timeline_doctype=%s and timeline_name=%s""", (doctype, name))
def insert_feed(doc):
from frappe.utils import get_fullname
if frappe.flags.in_install or frappe.flags.in_import or getattr(doc, "no_feed_on_delete", False):
return
frappe.get_doc({
"doctype": "Communication",
"communication_type": "Comment",
"comment_type": "Deleted",
"reference_doctype": doc.doctype,
"subject": "{0} {1}".format(_(doc.doctype), doc.name),
"full_name": get_fullname(doc.owner)
}).insert(ignore_permissions=True)
|
|
#!/usr/bin/env python
import sys
try:
import json
except ImportError:
import simplejson as json
from optparse import OptionParser
import time
import logging
from twisted.words.protocols import irc
from twisted.internet import protocol, reactor
from twisted.web.server import Site
from twisted.web import server, http
from twisted.web.resource import Resource
from twisted.web.static import File
from twisted.web.error import NoResource
auth_user = auth_password = None
def setup_logger(logger):
console_handler = logging.StreamHandler()
format = "[%(levelname)s] - %(asctime)s - %(message)s"
console_handler.setFormatter(logging.Formatter(format))
logger.addHandler(console_handler)
class LogBot(irc.IRCClient):
"""IRC bot that sends updates to the web handler.
"""
def _get_nickname(self):
return self.factory.nickname
nickname = property(_get_nickname)
def signedOn(self):
self.join(self.factory.channel)
logger.info("Connected to server as %s" % self.nickname)
def joined(self, channel):
logger.info("Joined channel %s" % channel)
def privmsg(self, user, channel, message):
logger.info("Message from %s: %s" % (user.split('!')[0], message))
action = {
'command': 'privmsg',
'user': user,
'message': message,
'timestamp': int(time.time()),
}
self.factory.add_to_history(action)
self.factory.web_resource.update(action)
def userJoined(self, user, channel):
logger.info("%s joined" % user.split('!')[0])
action = {
'command': 'userjoined',
'user': user,
'timestamp': int(time.time()),
}
self.factory.add_to_history(action)
self.factory.web_resource.update(action)
def userLeft(self, user, channel):
logger.info("%s left" % user.split('!')[0])
action = {
'command': 'userleft',
'user': user,
'timestamp': int(time.time()),
}
self.factory.add_to_history(action)
self.factory.web_resource.update(action)
def userQuit(self, user, message):
logger.info("%s quit" % user.split('!')[0])
action = {
'command': 'userquit',
'user': user,
'timestamp': int(time.time()),
}
self.factory.add_to_history(action)
self.factory.web_resource.update(action)
class LogBotFactory(protocol.ClientFactory):
protocol = LogBot
def __init__(self, channel, web_resource, nickname='LogBot',
history_file='irc-history.log', history_cache_size=500):
self.channel = channel
self.web_resource = web_resource
self.nickname = nickname
self.history_file = history_file
self.history_cache_size = history_cache_size
self.history = self.load_history()
def load_history(self):
"""Load history from a JSON history file, where each line should be
valid JSON and correspond to a single action.
"""
history = []
try:
for line in open(self.history_file):
history.append(line)
# Ensure that we're not loading more than is necessary
if len(history) > self.history_cache_size:
history.pop(0)
# Convert the JSON strings to actions (dicts)
history = [json.loads(line) for line in history]
except:
pass
return history
def add_to_history(self, action):
"""Add an item to the history, this will also be saved to disk.
"""
self.history.append(action)
while len(self.history) > self.history_cache_size:
self.history.pop(0)
open(self.history_file, 'a').write(json.dumps(action) + '\n')
def clientConnectionLost(self, connector, reason):
logger.warn("Lost connection (%s)" % reason.getErrorMessage())
logger.warn("Reconnecting to server...")
connector.connect()
def clientConnectionFailed(self, connector, reason):
logger.error("Could not connect (%s)" % reason.getErrorMessage())
sys.exit(1)
def escape_html(text):
"""Produce entities within text."""
html_escape_table = {
"&": "&",
'"': """,
"'": "'",
">": ">",
"<": "<",
}
return "".join(html_escape_table.get(c,c) for c in text)
def authenticate(func):
"""Decorator to restrict access to pages to authenticated clients.
"""
def auth_func(self, request, *args, **kwargs):
user = request.getUser()
password = request.getPassword()
if auth_user and auth_password:
if user != auth_user or password != auth_password:
request.setResponseCode(http.UNAUTHORIZED)
realm = 'basic realm="IRC Viewer"'
request.setHeader('WWW-authenticate', realm)
return ''
return func(self, request, *args, **kwargs)
return auth_func
def prepare_action(action):
"""Prepare an action for sending to the client - escape quotes, etc.
"""
action = action.copy()
# Select only the *name* part of the user's name
if 'user' in action:
action['user'] = action['user'].split('!')[0]
for key, value in action.items():
if isinstance(key, basestring):
key = escape_html(key)
if isinstance(value, basestring):
value = escape_html(value)
action[key] = value
return action
class IrcLogUpdate(Resource):
"""A Twisted web resource that uses long-polling to send IRC updates to
clients as JSON.
"""
isLeaf = True
def __init__(self):
self.clients = []
Resource.__init__(self)
@authenticate
def render_GET(self, request):
request.setHeader('Content-Type', 'application/json')
# Add the client to the client list; the response will be returned when
# a new action occurs
self.clients.append(request)
request.notifyFinish().addErrback(self.lost_client, request)
return server.NOT_DONE_YET
def lost_client(self, err, client):
"""Remove the client in the event of a disconnect or error.
"""
if client in self.clients:
self.clients.remove(client)
def update(self, action):
"""Update all waiting clients with a new action.
"""
payload = json.dumps(prepare_action(action))
clients = self.clients[:]
self.clients = []
for client in clients:
client.write(payload)
client.finish()
class IrcLogHistory(Resource):
"""A Twisted web resource that returns IRC history as JSON.
"""
isLeaf = True
def __init__(self, log_bot_factory):
self.log_bot_factory = log_bot_factory
@authenticate
def render_GET(self, request):
request.setHeader('Content-Type', 'application/json')
history = []
for action in self.log_bot_factory.history:
history.append(prepare_action(action))
return json.dumps(history)
# Set up logging
logger = logging.getLogger('ircviewer')
setup_logger(logger)
def main():
usage = 'usage: %prog [options] irc_host[:port] channel'
parser = OptionParser(usage)
parser.add_option('-p', '--port', help='Port to run the HTTP server on',
default=8080, type='int')
parser.add_option('-n', '--nick', help='IRC bot nickname', type='str',
default='IRCViewer')
parser.add_option('-a', '--auth', help='Authentication (user:password)',
type='str')
options, args = parser.parse_args()
# Parse the connection string with format host[:port]
try:
irc_host, irc_port = args[0], 6667
if ':' in irc_host:
irc_host, irc_port = args[0].split(':')
irc_port = int(irc_port)
except:
parser.error('invalid IRC host/port, use format hostname[:port]')
# Ensure the channel is present
try:
channel = '#' + args[1].strip('#')
except:
parser.error('invalid or missing IRC channel')
# Set up authentication details if provided
if options.auth is not None:
if len(options.auth.split(':')) != 2:
parser.error("invalid auth details, use format user:password")
auth_user, auth_password = options.auth.split(':')
if not auth_user or not auth_password:
parser.error("invalid auth details, use format user:password")
logger.setLevel(logging.INFO)
# Set up the web resources for displaying the logs
web_resource = Resource()
update_web_resource = IrcLogUpdate()
web_resource.putChild("", File('main.html'))
web_resource.putChild("update.js", update_web_resource)
web_resource.putChild("static", File("static"))
web_factory = Site(web_resource)
reactor.listenTCP(options.port, web_factory)
log_bot_factory = LogBotFactory(channel, update_web_resource,
nickname=options.nick)
reactor.connectTCP(irc_host, irc_port, log_bot_factory)
web_resource.putChild("history.js", IrcLogHistory(log_bot_factory))
logger.info('Starting HTTP server on port %d' % options.port)
reactor.run()
if __name__ == '__main__':
main()
|
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# The idea for this module (but no code) was borrowed from the
# quantities (http://pythonhosted.org/quantities/) package.
"""Helper functions for Quantity.
In particular, this implements the logic that determines scaling and result
units for a given ufunc, given input units.
"""
from fractions import Fraction
import numpy as np
from . import UFUNC_HELPERS, UNSUPPORTED_UFUNCS
from astropy.units.core import (
UnitsError, UnitConversionError, UnitTypeError,
dimensionless_unscaled, get_current_unit_registry,
unit_scale_converter)
def _d(unit):
if unit is None:
return dimensionless_unscaled
else:
return unit
def get_converter(from_unit, to_unit):
"""Like Unit._get_converter, except returns None if no scaling is needed,
i.e., if the inferred scale is unity."""
converter = from_unit._get_converter(to_unit)
return None if converter is unit_scale_converter else converter
def get_converters_and_unit(f, unit1, unit2):
converters = [None, None]
# By default, we try adjusting unit2 to unit1, so that the result will
# be unit1 as well. But if there is no second unit, we have to try
# adjusting unit1 (to dimensionless, see below).
if unit2 is None:
if unit1 is None:
# No units for any input -- e.g., np.add(a1, a2, out=q)
return converters, dimensionless_unscaled
changeable = 0
# swap units.
unit2 = unit1
unit1 = None
elif unit2 is unit1:
# ensure identical units is fast ("==" is slow, so avoid that).
return converters, unit1
else:
changeable = 1
# Try to get a converter from unit2 to unit1.
if unit1 is None:
try:
converters[changeable] = get_converter(unit2,
dimensionless_unscaled)
except UnitsError:
# special case: would be OK if unitless number is zero, inf, nan
converters[1-changeable] = False
return converters, unit2
else:
return converters, dimensionless_unscaled
else:
try:
converters[changeable] = get_converter(unit2, unit1)
except UnitsError:
raise UnitConversionError(
"Can only apply '{}' function to quantities "
"with compatible dimensions"
.format(f.__name__))
return converters, unit1
# SINGLE ARGUMENT UFUNC HELPERS
#
# The functions below take a single argument, which is the quantity upon which
# the ufunc is being used. The output of the helper function should be two
# values: a list with a single converter to be used to scale the input before
# it is being passed to the ufunc (or None if no conversion is needed), and
# the unit the output will be in.
def helper_onearg_test(f, unit):
return ([None], None)
def helper_invariant(f, unit):
return ([None], _d(unit))
def helper_square(f, unit):
return ([None], unit ** 2 if unit is not None else dimensionless_unscaled)
def helper_reciprocal(f, unit):
return ([None], unit ** -1 if unit is not None else dimensionless_unscaled)
one_half = 0.5 # faster than Fraction(1, 2)
one_third = Fraction(1, 3)
def helper_sqrt(f, unit):
return ([None], unit ** one_half if unit is not None
else dimensionless_unscaled)
def helper_cbrt(f, unit):
return ([None], (unit ** one_third if unit is not None
else dimensionless_unscaled))
def helper_modf(f, unit):
if unit is None:
return [None], (dimensionless_unscaled, dimensionless_unscaled)
try:
return ([get_converter(unit, dimensionless_unscaled)],
(dimensionless_unscaled, dimensionless_unscaled))
except UnitsError:
raise UnitTypeError("Can only apply '{}' function to "
"dimensionless quantities"
.format(f.__name__))
def helper__ones_like(f, unit):
return [None], dimensionless_unscaled
def helper_dimensionless_to_dimensionless(f, unit):
if unit is None:
return [None], dimensionless_unscaled
try:
return ([get_converter(unit, dimensionless_unscaled)],
dimensionless_unscaled)
except UnitsError:
raise UnitTypeError("Can only apply '{}' function to "
"dimensionless quantities"
.format(f.__name__))
def helper_dimensionless_to_radian(f, unit):
from astropy.units.si import radian
if unit is None:
return [None], radian
try:
return [get_converter(unit, dimensionless_unscaled)], radian
except UnitsError:
raise UnitTypeError("Can only apply '{}' function to "
"dimensionless quantities"
.format(f.__name__))
def helper_degree_to_radian(f, unit):
from astropy.units.si import degree, radian
try:
return [get_converter(unit, degree)], radian
except UnitsError:
raise UnitTypeError("Can only apply '{}' function to "
"quantities with angle units"
.format(f.__name__))
def helper_radian_to_degree(f, unit):
from astropy.units.si import degree, radian
try:
return [get_converter(unit, radian)], degree
except UnitsError:
raise UnitTypeError("Can only apply '{}' function to "
"quantities with angle units"
.format(f.__name__))
def helper_radian_to_dimensionless(f, unit):
from astropy.units.si import radian
try:
return [get_converter(unit, radian)], dimensionless_unscaled
except UnitsError:
raise UnitTypeError("Can only apply '{}' function to "
"quantities with angle units"
.format(f.__name__))
def helper_frexp(f, unit):
if not unit.is_unity():
raise UnitTypeError("Can only apply '{}' function to "
"unscaled dimensionless quantities"
.format(f.__name__))
return [None], (None, None)
# TWO ARGUMENT UFUNC HELPERS
#
# The functions below take a two arguments. The output of the helper function
# should be two values: a tuple of two converters to be used to scale the
# inputs before being passed to the ufunc (None if no conversion is needed),
# and the unit the output will be in.
def helper_multiplication(f, unit1, unit2):
return [None, None], _d(unit1) * _d(unit2)
def helper_division(f, unit1, unit2):
return [None, None], _d(unit1) / _d(unit2)
def helper_power(f, unit1, unit2):
# TODO: find a better way to do this, currently need to signal that one
# still needs to raise power of unit1 in main code
if unit2 is None:
return [None, None], False
try:
return [None, get_converter(unit2, dimensionless_unscaled)], False
except UnitsError:
raise UnitTypeError("Can only raise something to a "
"dimensionless quantity")
def helper_ldexp(f, unit1, unit2):
if unit2 is not None:
raise TypeError("Cannot use ldexp with a quantity "
"as second argument.")
else:
return [None, None], _d(unit1)
def helper_copysign(f, unit1, unit2):
# if first arg is not a quantity, just return plain array
if unit1 is None:
return [None, None], None
else:
return [None, None], unit1
def helper_heaviside(f, unit1, unit2):
try:
converter2 = (get_converter(unit2, dimensionless_unscaled)
if unit2 is not None else None)
except UnitsError:
raise UnitTypeError("Can only apply 'heaviside' function with a "
"dimensionless second argument.")
return ([None, converter2], dimensionless_unscaled)
def helper_two_arg_dimensionless(f, unit1, unit2):
try:
converter1 = (get_converter(unit1, dimensionless_unscaled)
if unit1 is not None else None)
converter2 = (get_converter(unit2, dimensionless_unscaled)
if unit2 is not None else None)
except UnitsError:
raise UnitTypeError("Can only apply '{}' function to "
"dimensionless quantities"
.format(f.__name__))
return ([converter1, converter2], dimensionless_unscaled)
# This used to be a separate function that just called get_converters_and_unit.
# Using it directly saves a few us; keeping the clearer name.
helper_twoarg_invariant = get_converters_and_unit
def helper_twoarg_comparison(f, unit1, unit2):
converters, _ = get_converters_and_unit(f, unit1, unit2)
return converters, None
def helper_twoarg_invtrig(f, unit1, unit2):
from astropy.units.si import radian
converters, _ = get_converters_and_unit(f, unit1, unit2)
return converters, radian
def helper_twoarg_floor_divide(f, unit1, unit2):
converters, _ = get_converters_and_unit(f, unit1, unit2)
return converters, dimensionless_unscaled
def helper_divmod(f, unit1, unit2):
converters, result_unit = get_converters_and_unit(f, unit1, unit2)
return converters, (dimensionless_unscaled, result_unit)
def helper_clip(f, unit1, unit2, unit3):
# Treat the array being clipped as primary.
converters = [None]
if unit1 is None:
result_unit = dimensionless_unscaled
try:
converters += [(None if unit is None else
get_converter(unit, dimensionless_unscaled))
for unit in (unit2, unit3)]
except UnitsError:
raise UnitConversionError(
"Can only apply '{}' function to quantities with "
"compatible dimensions".format(f.__name__))
else:
result_unit = unit1
for unit in unit2, unit3:
try:
converter = get_converter(_d(unit), result_unit)
except UnitsError:
if unit is None:
# special case: OK if unitless number is zero, inf, nan
converters.append(False)
else:
raise UnitConversionError(
"Can only apply '{}' function to quantities with "
"compatible dimensions".format(f.__name__))
else:
converters.append(converter)
return converters, result_unit
# list of ufuncs:
# https://numpy.org/doc/stable/reference/ufuncs.html#available-ufuncs
UNSUPPORTED_UFUNCS |= {
np.bitwise_and, np.bitwise_or, np.bitwise_xor, np.invert, np.left_shift,
np.right_shift, np.logical_and, np.logical_or, np.logical_xor,
np.logical_not, np.isnat, np.gcd, np.lcm}
# SINGLE ARGUMENT UFUNCS
# ufuncs that do not care about the unit and do not return a Quantity
# (but rather a boolean, or -1, 0, or +1 for np.sign).
onearg_test_ufuncs = (np.isfinite, np.isinf, np.isnan, np.sign, np.signbit)
for ufunc in onearg_test_ufuncs:
UFUNC_HELPERS[ufunc] = helper_onearg_test
# ufuncs that return a value with the same unit as the input
invariant_ufuncs = (np.absolute, np.fabs, np.conj, np.conjugate, np.negative,
np.spacing, np.rint, np.floor, np.ceil, np.trunc,
np.positive)
for ufunc in invariant_ufuncs:
UFUNC_HELPERS[ufunc] = helper_invariant
# ufuncs that require dimensionless input and and give dimensionless output
dimensionless_to_dimensionless_ufuncs = (np.exp, np.expm1, np.exp2, np.log,
np.log10, np.log2, np.log1p)
# Default numpy does not ship an "erf" ufunc, but some versions hacked by
# intel do. This is bad, since it means code written for that numpy will
# not run on non-hacked numpy. But still, we might as well support it.
if isinstance(getattr(np.core.umath, 'erf', None), np.ufunc):
dimensionless_to_dimensionless_ufuncs += (np.core.umath.erf,)
for ufunc in dimensionless_to_dimensionless_ufuncs:
UFUNC_HELPERS[ufunc] = helper_dimensionless_to_dimensionless
# ufuncs that require dimensionless input and give output in radians
dimensionless_to_radian_ufuncs = (np.arccos, np.arcsin, np.arctan, np.arccosh,
np.arcsinh, np.arctanh)
for ufunc in dimensionless_to_radian_ufuncs:
UFUNC_HELPERS[ufunc] = helper_dimensionless_to_radian
# ufuncs that require input in degrees and give output in radians
degree_to_radian_ufuncs = (np.radians, np.deg2rad)
for ufunc in degree_to_radian_ufuncs:
UFUNC_HELPERS[ufunc] = helper_degree_to_radian
# ufuncs that require input in radians and give output in degrees
radian_to_degree_ufuncs = (np.degrees, np.rad2deg)
for ufunc in radian_to_degree_ufuncs:
UFUNC_HELPERS[ufunc] = helper_radian_to_degree
# ufuncs that require input in radians and give dimensionless output
radian_to_dimensionless_ufuncs = (np.cos, np.sin, np.tan, np.cosh, np.sinh,
np.tanh)
for ufunc in radian_to_dimensionless_ufuncs:
UFUNC_HELPERS[ufunc] = helper_radian_to_dimensionless
# ufuncs handled as special cases
UFUNC_HELPERS[np.sqrt] = helper_sqrt
UFUNC_HELPERS[np.square] = helper_square
UFUNC_HELPERS[np.reciprocal] = helper_reciprocal
UFUNC_HELPERS[np.cbrt] = helper_cbrt
UFUNC_HELPERS[np.core.umath._ones_like] = helper__ones_like
UFUNC_HELPERS[np.modf] = helper_modf
UFUNC_HELPERS[np.frexp] = helper_frexp
# TWO ARGUMENT UFUNCS
# two argument ufuncs that require dimensionless input and and give
# dimensionless output
two_arg_dimensionless_ufuncs = (np.logaddexp, np.logaddexp2)
for ufunc in two_arg_dimensionless_ufuncs:
UFUNC_HELPERS[ufunc] = helper_two_arg_dimensionless
# two argument ufuncs that return a value with the same unit as the input
twoarg_invariant_ufuncs = (np.add, np.subtract, np.hypot, np.maximum,
np.minimum, np.fmin, np.fmax, np.nextafter,
np.remainder, np.mod, np.fmod)
for ufunc in twoarg_invariant_ufuncs:
UFUNC_HELPERS[ufunc] = helper_twoarg_invariant
# two argument ufuncs that need compatible inputs and return a boolean
twoarg_comparison_ufuncs = (np.greater, np.greater_equal, np.less,
np.less_equal, np.not_equal, np.equal)
for ufunc in twoarg_comparison_ufuncs:
UFUNC_HELPERS[ufunc] = helper_twoarg_comparison
# two argument ufuncs that do inverse trigonometry
twoarg_invtrig_ufuncs = (np.arctan2,)
# another private function in numpy; use getattr in case it disappears
if isinstance(getattr(np.core.umath, '_arg', None), np.ufunc):
twoarg_invtrig_ufuncs += (np.core.umath._arg,)
for ufunc in twoarg_invtrig_ufuncs:
UFUNC_HELPERS[ufunc] = helper_twoarg_invtrig
# ufuncs handled as special cases
UFUNC_HELPERS[np.multiply] = helper_multiplication
if isinstance(getattr(np, 'matmul', None), np.ufunc):
UFUNC_HELPERS[np.matmul] = helper_multiplication
UFUNC_HELPERS[np.divide] = helper_division
UFUNC_HELPERS[np.true_divide] = helper_division
UFUNC_HELPERS[np.power] = helper_power
UFUNC_HELPERS[np.ldexp] = helper_ldexp
UFUNC_HELPERS[np.copysign] = helper_copysign
UFUNC_HELPERS[np.floor_divide] = helper_twoarg_floor_divide
UFUNC_HELPERS[np.heaviside] = helper_heaviside
UFUNC_HELPERS[np.float_power] = helper_power
UFUNC_HELPERS[np.divmod] = helper_divmod
# Check for clip ufunc; note that np.clip is a wrapper function, not the ufunc.
if isinstance(getattr(np.core.umath, 'clip', None), np.ufunc):
UFUNC_HELPERS[np.core.umath.clip] = helper_clip
del ufunc
|
|
#!/use/bin/env python2
from pwn import *
r = remote('140.112.31.96', 10120)
# warmup
r.recvuntil('= ')
m1 = r.recvline().rstrip('\n\r')
r.recvuntil(': ')
r.sendline(m1)
# round 1
# 1 -> a, 2 -> b ...
print 'Round 1'
r.recvuntil('c1 = ')
c1 = r.recvline().rstrip('\n\r')
r.recvuntil(': ')
m1 = []
for tok in c1.split():
res = ''
for i in range(0, len(tok), 2):
res += chr(ord('a')+int(tok[i:i+2])-1)
m1.append(res)
m1 = ' '.join(m1)
print 'c1 = ', c1
print 'm1 = ', m1
r.sendline(m1)
r.recvuntil('FLAG_PIECES: ')
flag = r.recvline().rstrip('\n\r')
# round 2
# Caesar cipher
print ''
print 'Round 2'
r.recvuntil('m1 = ')
m1 = r.recvline().rstrip('\n\r')
r.recvuntil('c1 = ')
c1 = r.recvline().rstrip('\n\r')
r.recvuntil('c2 = ')
c2 = r.recvline().rstrip('\n\r')
r.recvuntil(': ')
def rot(msg, k):
m = ''
for c in msg:
if c == ' ':
m += ' '
elif c.islower():
m += chr((ord(c) - ord('a') + k + 26) % 26 + ord('a'))
else:
m += chr((ord(c) - ord('A') + k + 26) % 26 + ord('A'))
return m
k = ord(c1[0]) - ord(m1[0])
m2 = rot(c2, -k)
print 'c1 = ', c1
print 'm1 = ', m1
print 'c2 = ', c2
print 'm2 = ', m2
r.sendline(m2)
r.recvuntil('FLAG_PIECES: ')
flag += r.recvline().rstrip('\n\r')
# round 3
# Caesar cipher again
print ''
print 'Round 3'
r.recvuntil('c1 = ')
c1 = r.recvline().rstrip('\n\r')
r.recvuntil(': ')
for k in range(26):
print k, rot(c1, k)
k = int(raw_input('Choose a k: '))
m1 = rot(c1, k)
print 'c1 = ', c1
print 'm1 = ', m1
r.sendline(m1)
r.recvuntil('FLAG_PIECES: ')
flag += r.recvline().rstrip('\n\r')
# round 4
# character subsitution
print ''
print 'Round 4'
r.recvuntil('m1 = ')
m1 = r.recvline().rstrip('\n\r')
r.recvuntil('c1 = ')
c1 = r.recvline().rstrip('\n\r')
r.recvuntil('c2 = ')
c2 = r.recvline().rstrip('\n\r')
r.recvuntil(': ')
mapping = {' ': ' '}
for i in range(len(c1)):
mapping[c1[i].upper()] = m1[i].upper()
mapping[c1[i].lower()] = m1[i].lower()
m2 = ''
ambiguous = []
for i in range(len(c2)):
if mapping.has_key(c2[i]):
m2 += mapping[c2[i]]
else:
m2 += '-'
ambiguous += c2[i].lower()
ambiguous = set(ambiguous)
print 'c1 = ', c1
print 'm1 = ', m1
print 'c2 = ', c2
if len(ambiguous) > 0:
print 'm2 = ', m2
print 'Solve ambiguity:'
for c in ambiguous:
m = raw_input(c + ': ').rstrip('\n\r')
mapping[c] = m
mapping[c.upper()] = m.upper()
m2 = ''
for i in range(len(c2)):
m2 += mapping[c2[i]]
print 'm2 = ', m2
r.sendline(m2)
r.recvuntil('FLAG_PIECES: ')
flag += r.recvline().rstrip('\n\r')
# round 5
# m[i] = c[k * i mod len(m)] and k _|_ len(m)
print ''
print 'Round 5'
r.recvuntil('m1 = ')
m1 = r.recvline().rstrip('\n\r')
r.recvuntil('c1 = ')
c1 = r.recvline().rstrip('\n\r')
r.recvuntil('c2 = ')
c2 = r.recvline().rstrip('\n\r')
r.recvuntil(': ')
assert(len(m1) == len(c1))
assert(len(c2) == len(c1))
def round5(cipher_text, k):
p = 0
m = ''
l = len(cipher_text)
if k < 0:
k += l
while k < 0:
k += len(cipher_text)
for i in range(len(cipher_text)):
m += cipher_text[p]
p = p + k
if p >= l:
p -= l
return m
def gcd(p, q):
if q == 0:
return p
return gcd(q, p % q)
def coprime(p, q):
return gcd(p, q) == 1
def inv(q, p):
"""
calculate q^-1 mod p
"""
for i in range(p):
if q * i % p == 1:
return i
l = len(m1)
k = 0
for i in range(l):
if coprime(i, l):
if round5(m1, i) == c1:
k = i
break
assert(k != 0)
m2 = round5(c2, inv(k, l))
print 'c1 = ', c1
print 'm1 = ', m1
print 'c2 = ', c2
print 'm2 = ', m2
print 'k, inv(k) = ', (k, inv(k, l))
r.sendline(m2)
r.recvuntil('FLAG_PIECES: ')
flag += r.recvline().rstrip('\n\r')
# round 6
# rail fence cipher
print ''
print 'Round 6'
r.recvuntil('m1 = ')
m1 = r.recvline().rstrip('\n\r')
r.recvuntil('c1 = ')
c1 = r.recvline().rstrip('\n\r')
r.recvuntil('c2 = ')
c2 = r.recvline().rstrip('\r\n')
r.recvuntil(': ')
def rail_fence_encrypt(m, k):
assert(k >= 3)
bucket = [''] * k
p = 0
d = 1
for i in range(len(m)):
bucket[p] += m[i]
p += d
if p == 0:
d = 1
elif p == k - 1:
d = -1
return ''.join(bucket)
def rail_fence_decrypt(c, k):
t = k * 2 - 2
l = len(c)
cycles = int(l / t)
p = 0
m = [[]] * l
for j in range(k):
for i in range(cycles+1):
if t * i + j < l:
m[t * i + j] = c[p]
p += 1
if j != 0 and j != k-1 and t * i + t - j < l:
m[t * i + t - j] = c[p]
p += 1
return ''.join(m)
k = 0
for i in range(3, len(m1)):
if rail_fence_encrypt(m1, i) == c1:
k = i
break
assert(k != 0)
m2 = rail_fence_decrypt(c2, k)
print 'c1 = ', c1
print 'm1 = ', m1
print 'c2 = ', c2
print 'm2 = ', m2
print 'k = ', k
r.sendline(m2)
r.recvuntil('FLAG_PIECES: ')
flag += r.recvline().rstrip('\n\r')
r.close()
print ''
print 'flag = ', flag
flag = base64.b64decode(flag)
with open('flag4.png', 'w') as f:
f.write(flag)
|
|
# -*- coding: utf-8 -*-
# Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import collections
from multiprocessing import managers
import os
import pickle
import threading
import futurist
from oslo_utils import excutils
from oslo_utils import reflection
from oslo_utils import timeutils
from oslo_utils import uuidutils
import six
from six.moves import queue as compat_queue
from taskflow import logging
from taskflow import task as task_atom
from taskflow.types import failure
from taskflow.types import notifier
from taskflow.utils import threading_utils
# Execution and reversion events.
EXECUTED = 'executed'
REVERTED = 'reverted'
# See http://bugs.python.org/issue1457119 for why this is so complex...
_PICKLE_ERRORS = [pickle.PickleError, TypeError]
try:
import cPickle as _cPickle
_PICKLE_ERRORS.append(_cPickle.PickleError)
except ImportError:
pass
_PICKLE_ERRORS = tuple(_PICKLE_ERRORS)
_SEND_ERRORS = (IOError, EOFError)
_UPDATE_PROGRESS = task_atom.EVENT_UPDATE_PROGRESS
# Message types/kind sent from worker/child processes...
_KIND_COMPLETE_ME = 'complete_me'
_KIND_EVENT = 'event'
LOG = logging.getLogger(__name__)
def _execute_retry(retry, arguments):
try:
result = retry.execute(**arguments)
except Exception:
result = failure.Failure()
return (EXECUTED, result)
def _revert_retry(retry, arguments):
try:
result = retry.revert(**arguments)
except Exception:
result = failure.Failure()
return (REVERTED, result)
def _execute_task(task, arguments, progress_callback=None):
with notifier.register_deregister(task.notifier,
_UPDATE_PROGRESS,
callback=progress_callback):
try:
task.pre_execute()
result = task.execute(**arguments)
except Exception:
# NOTE(imelnikov): wrap current exception with Failure
# object and return it.
result = failure.Failure()
finally:
task.post_execute()
return (EXECUTED, result)
def _revert_task(task, arguments, result, failures, progress_callback=None):
arguments = arguments.copy()
arguments[task_atom.REVERT_RESULT] = result
arguments[task_atom.REVERT_FLOW_FAILURES] = failures
with notifier.register_deregister(task.notifier,
_UPDATE_PROGRESS,
callback=progress_callback):
try:
task.pre_revert()
result = task.revert(**arguments)
except Exception:
# NOTE(imelnikov): wrap current exception with Failure
# object and return it.
result = failure.Failure()
finally:
task.post_revert()
return (REVERTED, result)
class _ViewableSyncManager(managers.SyncManager):
"""Manager that exposes its state as methods."""
def is_shutdown(self):
return self._state.value == managers.State.SHUTDOWN
def is_running(self):
return self._state.value == managers.State.STARTED
class _Channel(object):
"""Helper wrapper around a multiprocessing queue used by a worker."""
def __init__(self, queue, identity):
self._queue = queue
self._identity = identity
self._sent_messages = collections.defaultdict(int)
self._pid = None
@property
def sent_messages(self):
return self._sent_messages
def put(self, message):
# NOTE(harlowja): this is done in late in execution to ensure that this
# happens in the child process and not the parent process (where the
# constructor is called).
if self._pid is None:
self._pid = os.getpid()
message.update({
'sent_on': timeutils.utcnow(),
'sender': {
'pid': self._pid,
'id': self._identity,
},
})
if 'body' not in message:
message['body'] = {}
try:
self._queue.put(message)
except _PICKLE_ERRORS:
LOG.warn("Failed serializing message %s", message, exc_info=True)
return False
except _SEND_ERRORS:
LOG.warn("Failed sending message %s", message, exc_info=True)
return False
else:
self._sent_messages[message['kind']] += 1
return True
class _WaitWorkItem(object):
"""The piece of work that will executed by a process executor.
This will call the target function, then wait until the tasks emitted
events/items have been depleted before offically being finished.
NOTE(harlowja): this is done so that the task function will *not* return
until all of its notifications have been proxied back to its originating
task. If we didn't do this then the executor would see this task as done
and then potentially start tasks that are successors of the task that just
finished even though notifications are still left to be sent from the
previously finished task...
"""
def __init__(self, channel, barrier,
func, task, *args, **kwargs):
self._channel = channel
self._barrier = barrier
self._func = func
self._task = task
self._args = args
self._kwargs = kwargs
def _on_finish(self):
sent_events = self._channel.sent_messages.get(_KIND_EVENT, 0)
if sent_events:
message = {
'created_on': timeutils.utcnow(),
'kind': _KIND_COMPLETE_ME,
}
if self._channel.put(message):
watch = timeutils.StopWatch()
watch.start()
self._barrier.wait()
LOG.blather("Waited %s seconds until task '%s' %s emitted"
" notifications were depleted", watch.elapsed(),
self._task, sent_events)
def __call__(self):
args = self._args
kwargs = self._kwargs
try:
return self._func(self._task, *args, **kwargs)
finally:
self._on_finish()
class _EventSender(object):
"""Sends event information from a child worker process to its creator."""
def __init__(self, channel):
self._channel = channel
def __call__(self, event_type, details):
message = {
'created_on': timeutils.utcnow(),
'kind': _KIND_EVENT,
'body': {
'event_type': event_type,
'details': details,
},
}
self._channel.put(message)
class _Target(object):
"""An immutable helper object that represents a target of a message."""
def __init__(self, task, barrier, identity):
self.task = task
self.barrier = barrier
self.identity = identity
# Counters used to track how many message 'kinds' were proxied...
self.dispatched = collections.defaultdict(int)
def __repr__(self):
return "<%s at 0x%x targeting '%s' with identity '%s'>" % (
reflection.get_class_name(self), id(self),
self.task, self.identity)
class _Dispatcher(object):
"""Dispatches messages received from child worker processes."""
# When the run() method is busy (typically in a thread) we want to set
# these so that the thread can know how long to sleep when there is no
# active work to dispatch.
_SPIN_PERIODICITY = 0.01
def __init__(self, dispatch_periodicity=None):
if dispatch_periodicity is None:
dispatch_periodicity = self._SPIN_PERIODICITY
if dispatch_periodicity <= 0:
raise ValueError("Provided dispatch periodicity must be greater"
" than zero and not '%s'" % dispatch_periodicity)
self._targets = {}
self._dead = threading.Event()
self._dispatch_periodicity = dispatch_periodicity
self._stop_when_empty = False
def register(self, identity, target):
self._targets[identity] = target
def deregister(self, identity):
try:
target = self._targets.pop(identity)
except KeyError:
pass
else:
# Just incase set the barrier to unblock any worker...
target.barrier.set()
if LOG.isEnabledFor(logging.BLATHER):
LOG.blather("Dispatched %s messages %s to target '%s' during"
" the lifetime of its existence in the dispatcher",
sum(six.itervalues(target.dispatched)),
dict(target.dispatched), target)
def reset(self):
self._stop_when_empty = False
self._dead.clear()
if self._targets:
leftover = set(six.iterkeys(self._targets))
while leftover:
self.deregister(leftover.pop())
def interrupt(self):
self._stop_when_empty = True
self._dead.set()
def _dispatch(self, message):
if LOG.isEnabledFor(logging.BLATHER):
LOG.blather("Dispatching message %s (it took %s seconds"
" for it to arrive for processing after being"
" sent)", message,
timeutils.delta_seconds(message['sent_on'],
timeutils.utcnow()))
try:
kind = message['kind']
sender = message['sender']
body = message['body']
except (KeyError, ValueError, TypeError):
LOG.warn("Badly formatted message %s received", message,
exc_info=True)
return
target = self._targets.get(sender['id'])
if target is None:
# Must of been removed...
return
if kind == _KIND_COMPLETE_ME:
target.dispatched[kind] += 1
target.barrier.set()
elif kind == _KIND_EVENT:
task = target.task
target.dispatched[kind] += 1
task.notifier.notify(body['event_type'], body['details'])
else:
LOG.warn("Unknown message '%s' found in message from sender"
" %s to target '%s'", kind, sender, target)
def run(self, queue):
watch = timeutils.StopWatch(duration=self._dispatch_periodicity)
while (not self._dead.is_set() or
(self._stop_when_empty and self._targets)):
watch.restart()
leftover = watch.leftover()
while leftover:
try:
message = queue.get(timeout=leftover)
except compat_queue.Empty:
break
else:
self._dispatch(message)
leftover = watch.leftover()
leftover = watch.leftover()
if leftover:
self._dead.wait(leftover)
class SerialRetryExecutor(object):
"""Executes and reverts retries."""
def __init__(self):
self._executor = futurist.SynchronousExecutor()
def start(self):
"""Prepare to execute retries."""
self._executor.restart()
def stop(self):
"""Finalize retry executor."""
self._executor.shutdown()
def execute_retry(self, retry, arguments):
"""Schedules retry execution."""
fut = self._executor.submit(_execute_retry, retry, arguments)
fut.atom = retry
return fut
def revert_retry(self, retry, arguments):
"""Schedules retry reversion."""
fut = self._executor.submit(_revert_retry, retry, arguments)
fut.atom = retry
return fut
@six.add_metaclass(abc.ABCMeta)
class TaskExecutor(object):
"""Executes and reverts tasks.
This class takes task and its arguments and executes or reverts it.
It encapsulates knowledge on how task should be executed or reverted:
right now, on separate thread, on another machine, etc.
"""
@abc.abstractmethod
def execute_task(self, task, task_uuid, arguments,
progress_callback=None):
"""Schedules task execution."""
@abc.abstractmethod
def revert_task(self, task, task_uuid, arguments, result, failures,
progress_callback=None):
"""Schedules task reversion."""
def start(self):
"""Prepare to execute tasks."""
def stop(self):
"""Finalize task executor."""
class SerialTaskExecutor(TaskExecutor):
"""Executes tasks one after another."""
def __init__(self):
self._executor = futurist.SynchronousExecutor()
def start(self):
self._executor.restart()
def stop(self):
self._executor.shutdown()
def execute_task(self, task, task_uuid, arguments, progress_callback=None):
fut = self._executor.submit(_execute_task,
task, arguments,
progress_callback=progress_callback)
fut.atom = task
return fut
def revert_task(self, task, task_uuid, arguments, result, failures,
progress_callback=None):
fut = self._executor.submit(_revert_task,
task, arguments, result, failures,
progress_callback=progress_callback)
fut.atom = task
return fut
class ParallelTaskExecutor(TaskExecutor):
"""Executes tasks in parallel.
Submits tasks to an executor which should provide an interface similar
to concurrent.Futures.Executor.
"""
#: Options this executor supports (passed in from engine options).
OPTIONS = frozenset(['max_workers'])
def __init__(self, executor=None, max_workers=None):
self._executor = executor
self._max_workers = max_workers
self._own_executor = executor is None
@abc.abstractmethod
def _create_executor(self, max_workers=None):
"""Called when an executor has not been provided to make one."""
def _submit_task(self, func, task, *args, **kwargs):
fut = self._executor.submit(func, task, *args, **kwargs)
fut.atom = task
return fut
def execute_task(self, task, task_uuid, arguments, progress_callback=None):
return self._submit_task(_execute_task, task, arguments,
progress_callback=progress_callback)
def revert_task(self, task, task_uuid, arguments, result, failures,
progress_callback=None):
return self._submit_task(_revert_task, task, arguments, result,
failures, progress_callback=progress_callback)
def start(self):
if self._own_executor:
self._executor = self._create_executor(
max_workers=self._max_workers)
def stop(self):
if self._own_executor:
self._executor.shutdown(wait=True)
self._executor = None
class ParallelThreadTaskExecutor(ParallelTaskExecutor):
"""Executes tasks in parallel using a thread pool executor."""
def _create_executor(self, max_workers=None):
return futurist.ThreadPoolExecutor(max_workers=max_workers)
class ParallelProcessTaskExecutor(ParallelTaskExecutor):
"""Executes tasks in parallel using a process pool executor.
NOTE(harlowja): this executor executes tasks in external processes, so that
implies that tasks that are sent to that external process are pickleable
since this is how the multiprocessing works (sending pickled objects back
and forth) and that the bound handlers (for progress updating in
particular) are proxied correctly from that external process to the one
that is alive in the parent process to ensure that callbacks registered in
the parent are executed on events in the child.
"""
#: Options this executor supports (passed in from engine options).
OPTIONS = frozenset(['max_workers', 'dispatch_periodicity'])
def __init__(self, executor=None, max_workers=None,
dispatch_periodicity=None):
super(ParallelProcessTaskExecutor, self).__init__(
executor=executor, max_workers=max_workers)
self._manager = _ViewableSyncManager()
self._dispatcher = _Dispatcher(
dispatch_periodicity=dispatch_periodicity)
# Only created after starting...
self._worker = None
self._queue = None
def _create_executor(self, max_workers=None):
return futurist.ProcessPoolExecutor(max_workers=max_workers)
def start(self):
if threading_utils.is_alive(self._worker):
raise RuntimeError("Worker thread must be stopped via stop()"
" before starting/restarting")
super(ParallelProcessTaskExecutor, self).start()
# These don't seem restartable; make a new one...
if self._manager.is_shutdown():
self._manager = _ViewableSyncManager()
if not self._manager.is_running():
self._manager.start()
self._dispatcher.reset()
self._queue = self._manager.Queue()
self._worker = threading_utils.daemon_thread(self._dispatcher.run,
self._queue)
self._worker.start()
def stop(self):
self._dispatcher.interrupt()
super(ParallelProcessTaskExecutor, self).stop()
if threading_utils.is_alive(self._worker):
self._worker.join()
self._worker = None
self._queue = None
self._dispatcher.reset()
self._manager.shutdown()
self._manager.join()
def _rebind_task(self, task, clone, channel, progress_callback=None):
# Creates and binds proxies for all events the task could receive
# so that when the clone runs in another process that this task
# can recieve the same notifications (thus making it look like the
# the notifications are transparently happening in this process).
needed = set()
for (event_type, listeners) in task.notifier.listeners_iter():
if listeners:
needed.add(event_type)
if progress_callback is not None:
needed.add(_UPDATE_PROGRESS)
if needed:
sender = _EventSender(channel)
for event_type in needed:
clone.notifier.register(event_type, sender)
def _submit_task(self, func, task, *args, **kwargs):
"""Submit a function to run the given task (with given args/kwargs).
NOTE(harlowja): Adjust all events to be proxies instead since we want
those callbacks to be activated in this process, not in the child,
also since typically callbacks are functors (or callables) we can
not pickle those in the first place...
To make sure people understand how this works, the following is a
lengthy description of what is going on here, read at will:
So to ensure that we are proxying task triggered events that occur
in the executed subprocess (which will be created and used by the
thing using the multiprocessing based executor) we need to establish
a link between that process and this process that ensures that when a
event is triggered in that task in that process that a corresponding
event is triggered on the original task that was requested to be ran
in this process.
To accomplish this we have to create a copy of the task (without
any listeners) and then reattach a new set of listeners that will
now instead of calling the desired listeners just place messages
for this process (a dispatcher thread that is created in this class)
to dispatch to the original task (using a common queue + per task
sender identity/target that is used and associated to know which task
to proxy back too, since it is possible that there many be *many*
subprocess running at the same time, each running a different task
and using the same common queue to submit messages back to).
Once the subprocess task has finished execution, the executor will
then trigger a callback that will remove the task + target from the
dispatcher (which will stop any further proxying back to the original
task).
"""
progress_callback = kwargs.pop('progress_callback', None)
clone = task.copy(retain_listeners=False)
identity = uuidutils.generate_uuid()
target = _Target(task, self._manager.Event(), identity)
channel = _Channel(self._queue, identity)
self._rebind_task(task, clone, channel,
progress_callback=progress_callback)
def register():
if progress_callback is not None:
task.notifier.register(_UPDATE_PROGRESS, progress_callback)
self._dispatcher.register(identity, target)
def deregister():
if progress_callback is not None:
task.notifier.deregister(_UPDATE_PROGRESS, progress_callback)
self._dispatcher.deregister(identity)
register()
work = _WaitWorkItem(channel, target.barrier,
func, clone, *args, **kwargs)
try:
fut = self._executor.submit(work)
except RuntimeError:
with excutils.save_and_reraise_exception():
deregister()
fut.atom = task
fut.add_done_callback(lambda fut: deregister())
return fut
|
|
from sympy.integrals.transforms import (mellin_transform,
inverse_mellin_transform, laplace_transform, inverse_laplace_transform,
fourier_transform, inverse_fourier_transform,
sine_transform, inverse_sine_transform,
cosine_transform, inverse_cosine_transform,
hankel_transform, inverse_hankel_transform,
LaplaceTransform, FourierTransform, SineTransform, CosineTransform,
InverseLaplaceTransform, InverseFourierTransform, InverseSineTransform, InverseCosineTransform,
HankelTransform, InverseHankelTransform)
from sympy import (
gamma, exp, oo, Heaviside, symbols, Symbol, re, factorial, pi,
cos, S, And, sin, sqrt, I, log, tan, hyperexpand, meijerg,
EulerGamma, erf, besselj, bessely, besseli, besselk,
exp_polar, polar_lift, unpolarify, Function, expint, expand_mul,
combsimp, trigsimp)
from sympy.utilities.pytest import XFAIL, slow, skip
from sympy.abc import x, s, a, b, c, d
nu, beta, rho = symbols('nu beta rho')
def test_undefined_function():
from sympy import Function, MellinTransform
f = Function('f')
assert mellin_transform(f(x), x, s) == MellinTransform(f(x), x, s)
assert mellin_transform(f(x) + exp(-x), x, s) == \
(MellinTransform(f(x), x, s) + gamma(s), (0, oo), True)
assert laplace_transform(2*f(x), x, s) == 2*LaplaceTransform(f(x), x, s)
# TODO test derivative and other rules when implemented
def test_free_symbols():
from sympy import Function
f = Function('f')
assert mellin_transform(f(x), x, s).free_symbols == set([s])
assert mellin_transform(f(x)*a, x, s).free_symbols == set([s, a])
def test_as_integral():
from sympy import Function, Integral
f = Function('f')
assert mellin_transform(f(x), x, s).rewrite('Integral') == \
Integral(x**(s - 1)*f(x), (x, 0, oo))
assert fourier_transform(f(x), x, s).rewrite('Integral') == \
Integral(f(x)*exp(-2*I*pi*s*x), (x, -oo, oo))
assert laplace_transform(f(x), x, s).rewrite('Integral') == \
Integral(f(x)*exp(-s*x), (x, 0, oo))
assert str(inverse_mellin_transform(f(s), s, x, (a, b)).rewrite('Integral')) \
== "Integral(x**(-s)*f(s), (s, _c - oo*I, _c + oo*I))"
assert str(inverse_laplace_transform(f(s), s, x).rewrite('Integral')) == \
"Integral(f(s)*exp(s*x), (s, _c - oo*I, _c + oo*I))"
assert inverse_fourier_transform(f(s), s, x).rewrite('Integral') == \
Integral(f(s)*exp(2*I*pi*s*x), (s, -oo, oo))
# NOTE this is stuck in risch because meijerint cannot handle it
@slow
@XFAIL
def test_mellin_transform_fail():
skip("Risch takes forever.")
from sympy import Max, Min
MT = mellin_transform
bpos = symbols('b', positive=True)
bneg = symbols('b', negative=True)
expr = (sqrt(x + b**2) + b)**a/sqrt(x + b**2)
# TODO does not work with bneg, argument wrong. Needs changes to matching.
assert MT(expr.subs(b, -bpos), x, s) == \
((-1)**(a + 1)*2**(a + 2*s)*bpos**(a + 2*s - 1)*gamma(a + s)
*gamma(1 - a - 2*s)/gamma(1 - s),
(-re(a), -re(a)/2 + S(1)/2), True)
expr = (sqrt(x + b**2) + b)**a
assert MT(expr.subs(b, -bpos), x, s) == \
(
2**(a + 2*s)*a*bpos**(a + 2*s)*gamma(-a - 2*
s)*gamma(a + s)/gamma(-s + 1),
(-re(a), -re(a)/2), True)
# Test exponent 1:
assert MT(expr.subs({b: -bpos, a: 1}), x, s) == \
(-bpos**(2*s + 1)*gamma(s)*gamma(-s - S(1)/2)/(2*sqrt(pi)),
(-1, -S(1)/2), True)
def test_mellin_transform():
from sympy import Max, Min, Ne
MT = mellin_transform
bpos = symbols('b', positive=True)
# 8.4.2
assert MT(x**nu*Heaviside(x - 1), x, s) == \
(-1/(nu + s), (-oo, -re(nu)), True)
assert MT(x**nu*Heaviside(1 - x), x, s) == \
(1/(nu + s), (-re(nu), oo), True)
assert MT((1 - x)**(beta - 1)*Heaviside(1 - x), x, s) == \
(gamma(beta)*gamma(s)/gamma(beta + s), (0, oo), re(-beta) < 0)
assert MT((x - 1)**(beta - 1)*Heaviside(x - 1), x, s) == \
(gamma(beta)*gamma(1 - beta - s)/gamma(1 - s),
(-oo, -re(beta) + 1), re(-beta) < 0)
assert MT((1 + x)**(-rho), x, s) == \
(gamma(s)*gamma(rho - s)/gamma(rho), (0, re(rho)), True)
# TODO also the conditions should be simplified
assert MT(abs(1 - x)**(-rho), x, s) == (
cos(pi*(rho/2 - s))*gamma(s)*gamma(rho - s)/(cos(pi*rho/2)*gamma(rho)),
(0, re(rho)), And(re(rho) - 1 < 0, re(rho) < 1))
mt = MT((1 - x)**(beta - 1)*Heaviside(1 - x)
+ a*(x - 1)**(beta - 1)*Heaviside(x - 1), x, s)
assert mt[1], mt[2] == ((0, -re(beta) + 1), True)
assert MT((x**a - b**a)/(x - b), x, s)[0] == \
pi*b**(a + s - 1)*sin(pi*a)/(sin(pi*s)*sin(pi*(a + s)))
assert MT((x**a - bpos**a)/(x - bpos), x, s) == \
(pi*bpos**(a + s - 1)*sin(pi*a)/(sin(pi*s)*sin(pi*(a + s))),
(Max(-re(a), 0), Min(1 - re(a), 1)), True)
expr = (sqrt(x + b**2) + b)**a
assert MT(expr.subs(b, bpos), x, s) == \
(-a*(2*bpos)**(a + 2*s)*gamma(s)*gamma(-a - 2*s)/gamma(-a - s + 1),
(0, -re(a)/2), True)
expr = (sqrt(x + b**2) + b)**a/sqrt(x + b**2)
assert MT(expr.subs(b, bpos), x, s) == \
(2**(a + 2*s)*bpos**(a + 2*s - 1)*gamma(s)
*gamma(1 - a - 2*s)/gamma(1 - a - s),
(0, -re(a)/2 + S(1)/2), True)
# 8.4.2
assert MT(exp(-x), x, s) == (gamma(s), (0, oo), True)
assert MT(exp(-1/x), x, s) == (gamma(-s), (-oo, 0), True)
# 8.4.5
assert MT(log(x)**4*Heaviside(1 - x), x, s) == (24/s**5, (0, oo), True)
assert MT(log(x)**3*Heaviside(x - 1), x, s) == (6/s**4, (-oo, 0), True)
assert MT(log(x + 1), x, s) == (pi/(s*sin(pi*s)), (-1, 0), True)
assert MT(log(1/x + 1), x, s) == (pi/(s*sin(pi*s)), (0, 1), True)
assert MT(log(abs(1 - x)), x, s) == (pi/(s*tan(pi*s)), (-1, 0), True)
assert MT(log(abs(1 - 1/x)), x, s) == (pi/(s*tan(pi*s)), (0, 1), True)
# TODO we cannot currently do these (needs summation of 3F2(-1))
# this also implies that they cannot be written as a single g-function
# (although this is possible)
mt = MT(log(x)/(x + 1), x, s)
assert mt[1:] == ((0, 1), True)
assert not hyperexpand(mt[0], allow_hyper=True).has(meijerg)
mt = MT(log(x)**2/(x + 1), x, s)
assert mt[1:] == ((0, 1), True)
assert not hyperexpand(mt[0], allow_hyper=True).has(meijerg)
mt = MT(log(x)/(x + 1)**2, x, s)
assert mt[1:] == ((0, 2), True)
assert not hyperexpand(mt[0], allow_hyper=True).has(meijerg)
# 8.4.14
assert MT(erf(sqrt(x)), x, s) == \
(-gamma(s + S(1)/2)/(sqrt(pi)*s), (-S(1)/2, 0), True)
def test_mellin_transform_bessel():
from sympy import Max, Min, hyper, meijerg
MT = mellin_transform
# 8.4.19
assert MT(besselj(a, 2*sqrt(x)), x, s) == \
(gamma(a/2 + s)/gamma(a/2 - s + 1), (-re(a)/2, S(3)/4), True)
assert MT(sin(sqrt(x))*besselj(a, sqrt(x)), x, s) == \
(2**a*gamma(-2*s + S(1)/2)*gamma(a/2 + s + S(1)/2)/(
gamma(-a/2 - s + 1)*gamma(a - 2*s + 1)), (
-re(a)/2 - S(1)/2, S(1)/4), True)
assert MT(cos(sqrt(x))*besselj(a, sqrt(x)), x, s) == \
(2**a*gamma(a/2 + s)*gamma(-2*s + S(1)/2)/(
gamma(-a/2 - s + S(1)/2)*gamma(a - 2*s + 1)), (
-re(a)/2, S(1)/4), True)
assert MT(besselj(a, sqrt(x))**2, x, s) == \
(gamma(a + s)*gamma(S(1)/2 - s)
/ (sqrt(pi)*gamma(1 - s)*gamma(1 + a - s)),
(-re(a), S(1)/2), True)
assert MT(besselj(a, sqrt(x))*besselj(-a, sqrt(x)), x, s) == \
(gamma(s)*gamma(S(1)/2 - s)
/ (sqrt(pi)*gamma(1 - a - s)*gamma(1 + a - s)),
(0, S(1)/2), True)
# NOTE: prudnikov gives the strip below as (1/2 - re(a), 1). As far as
# I can see this is wrong (since besselj(z) ~ 1/sqrt(z) for z large)
assert MT(besselj(a - 1, sqrt(x))*besselj(a, sqrt(x)), x, s) == \
(gamma(1 - s)*gamma(a + s - S(1)/2)
/ (sqrt(pi)*gamma(S(3)/2 - s)*gamma(a - s + S(1)/2)),
(S(1)/2 - re(a), S(1)/2), True)
assert MT(besselj(a, sqrt(x))*besselj(b, sqrt(x)), x, s) == \
(4**s*gamma(1 - 2*s)*gamma((a + b)/2 + s)
/ (gamma(1 - s + (b - a)/2)*gamma(1 - s + (a - b)/2)
*gamma( 1 - s + (a + b)/2)),
(-(re(a) + re(b))/2, S(1)/2), True)
assert MT(besselj(a, sqrt(x))**2 + besselj(-a, sqrt(x))**2, x, s)[1:] == \
((Max(re(a), -re(a)), S(1)/2), True)
# Section 8.4.20
assert MT(bessely(a, 2*sqrt(x)), x, s) == \
(-cos(pi*(a/2 - s))*gamma(s - a/2)*gamma(s + a/2)/pi,
(Max(-re(a)/2, re(a)/2), S(3)/4), True)
assert MT(sin(sqrt(x))*bessely(a, sqrt(x)), x, s) == \
(-4**s*sin(pi*(a/2 - s))*gamma(S(1)/2 - 2*s)
* gamma((1 - a)/2 + s)*gamma((1 + a)/2 + s)
/ (sqrt(pi)*gamma(1 - s - a/2)*gamma(1 - s + a/2)),
(Max(-(re(a) + 1)/2, (re(a) - 1)/2), S(1)/4), True)
assert MT(cos(sqrt(x))*bessely(a, sqrt(x)), x, s) == \
(-4**s*cos(pi*(a/2 - s))*gamma(s - a/2)*gamma(s + a/2)*gamma(S(1)/2 - 2*s)
/ (sqrt(pi)*gamma(S(1)/2 - s - a/2)*gamma(S(1)/2 - s + a/2)),
(Max(-re(a)/2, re(a)/2), S(1)/4), True)
assert MT(besselj(a, sqrt(x))*bessely(a, sqrt(x)), x, s) == \
(-cos(pi*s)*gamma(s)*gamma(a + s)*gamma(S(1)/2 - s)
/ (pi**S('3/2')*gamma(1 + a - s)),
(Max(-re(a), 0), S(1)/2), True)
assert MT(besselj(a, sqrt(x))*bessely(b, sqrt(x)), x, s) == \
(-4**s*cos(pi*(a/2 - b/2 + s))*gamma(1 - 2*s)
* gamma(a/2 - b/2 + s)*gamma(a/2 + b/2 + s)
/ (pi*gamma(a/2 - b/2 - s + 1)*gamma(a/2 + b/2 - s + 1)),
(Max((-re(a) + re(b))/2, (-re(a) - re(b))/2), S(1)/2), True)
# NOTE bessely(a, sqrt(x))**2 and bessely(a, sqrt(x))*bessely(b, sqrt(x))
# are a mess (no matter what way you look at it ...)
assert MT(bessely(a, sqrt(x))**2, x, s)[1:] == \
((Max(-re(a), 0, re(a)), S(1)/2), True)
# Section 8.4.22
# TODO we can't do any of these (delicate cancellation)
# Section 8.4.23
assert MT(besselk(a, 2*sqrt(x)), x, s) == \
(gamma(
s - a/2)*gamma(s + a/2)/2, (Max(-re(a)/2, re(a)/2), oo), True)
assert MT(besselj(a, 2*sqrt(2*sqrt(x)))*besselk(
a, 2*sqrt(2*sqrt(x))), x, s) == (4**(-s)*gamma(2*s)*
gamma(a/2 + s)/(2*gamma(a/2 - s + 1)), (Max(0, -re(a)/2), oo), True)
# TODO bessely(a, x)*besselk(a, x) is a mess
assert MT(besseli(a, sqrt(x))*besselk(a, sqrt(x)), x, s) == \
(gamma(s)*gamma(
a + s)*gamma(-s + S(1)/2)/(2*sqrt(pi)*gamma(a - s + 1)),
(Max(-re(a), 0), S(1)/2), True)
assert MT(besseli(b, sqrt(x))*besselk(a, sqrt(x)), x, s) == \
(2**(2*s - 1)*gamma(-2*s + 1)*gamma(-a/2 + b/2 + s)* \
gamma(a/2 + b/2 + s)/(gamma(-a/2 + b/2 - s + 1)* \
gamma(a/2 + b/2 - s + 1)), (Max(-re(a)/2 - re(b)/2, \
re(a)/2 - re(b)/2), S(1)/2), True)
# TODO products of besselk are a mess
mt = MT(exp(-x/2)*besselk(a, x/2), x, s)
mt0 = combsimp((trigsimp(combsimp(mt[0].expand(func=True)))))
assert mt0 == 2*pi**(S(3)/2)*cos(pi*s)*gamma(-s + S(1)/2)/(
(cos(2*pi*a) - cos(2*pi*s))*gamma(-a - s + 1)*gamma(a - s + 1))
assert mt[1:] == ((Max(-re(a), re(a)), oo), True)
# TODO exp(x/2)*besselk(a, x/2) [etc] cannot currently be done
# TODO various strange products of special orders
def test_expint():
from sympy import E1, expint, Max, re, lerchphi, Symbol, simplify, Si, Ci, Ei
aneg = Symbol('a', negative=True)
u = Symbol('u', polar=True)
assert mellin_transform(E1(x), x, s) == (gamma(s)/s, (0, oo), True)
assert inverse_mellin_transform(gamma(s)/s, s, x,
(0, oo)).rewrite(expint).expand() == E1(x)
assert mellin_transform(expint(a, x), x, s) == \
(gamma(s)/(a + s - 1), (Max(1 - re(a), 0), oo), True)
# XXX IMT has hickups with complicated strips ...
assert simplify(unpolarify(
inverse_mellin_transform(gamma(s)/(aneg + s - 1), s, x,
(1 - aneg, oo)).rewrite(expint).expand(func=True))) == \
expint(aneg, x)
assert mellin_transform(Si(x), x, s) == \
(-2**s*sqrt(pi)*gamma(s/2 + S(1)/2)/(
2*s*gamma(-s/2 + 1)), (-1, 0), True)
assert inverse_mellin_transform(-2**s*sqrt(pi)*gamma((s + 1)/2)
/(2*s*gamma(-s/2 + 1)), s, x, (-1, 0)) \
== Si(x)
assert mellin_transform(Ci(sqrt(x)), x, s) == \
(-2**(2*s - 1)*sqrt(pi)*gamma(s)/(s*gamma(-s + S(1)/2)), (0, 1), True)
assert inverse_mellin_transform(
-4**s*sqrt(pi)*gamma(s)/(2*s*gamma(-s + S(1)/2)),
s, u, (0, 1)).expand() == Ci(sqrt(u))
# TODO LT of Si, Shi, Chi is a mess ...
assert laplace_transform(Ci(x), x, s) == (-log(1 + s**2)/2/s, 0, True)
assert laplace_transform(expint(a, x), x, s) == \
(lerchphi(s*polar_lift(-1), 1, a), 0, S(0) < re(a))
assert laplace_transform(expint(1, x), x, s) == (log(s + 1)/s, 0, True)
assert laplace_transform(expint(2, x), x, s) == \
((s - log(s + 1))/s**2, 0, True)
assert inverse_laplace_transform(-log(1 + s**2)/2/s, s, u).expand() == \
Heaviside(u)*Ci(u)
assert inverse_laplace_transform(log(s + 1)/s, s, x).rewrite(expint) == \
Heaviside(x)*E1(x)
assert inverse_laplace_transform((s - log(s + 1))/s**2, s,
x).rewrite(expint).expand() == \
(expint(2, x)*Heaviside(x)).rewrite(Ei).rewrite(expint).expand()
def test_inverse_mellin_transform():
from sympy import (sin, simplify, expand_func, powsimp, Max, Min, expand,
powdenest, powsimp, exp_polar, combsimp, cos, cot)
IMT = inverse_mellin_transform
assert IMT(gamma(s), s, x, (0, oo)) == exp(-x)
assert IMT(gamma(-s), s, x, (-oo, 0)) == exp(-1/x)
assert simplify(IMT(s/(2*s**2 - 2), s, x, (2, oo))) == \
(x**2 + 1)*Heaviside(1 - x)/(4*x)
# test passing "None"
assert IMT(1/(s**2 - 1), s, x, (-1, None)) == \
-x*Heaviside(-x + 1)/2 - Heaviside(x - 1)/(2*x)
assert IMT(1/(s**2 - 1), s, x, (None, 1)) == \
-x*Heaviside(-x + 1)/2 - Heaviside(x - 1)/(2*x)
# test expansion of sums
assert IMT(gamma(s) + gamma(s - 1), s, x, (1, oo)) == (x + 1)*exp(-x)/x
# test factorisation of polys
r = symbols('r', real=True)
assert IMT(1/(s**2 + 1), s, exp(-x), (None, oo)
).subs(x, r).rewrite(sin).simplify() \
== sin(r)*Heaviside(1 - exp(-r))
# test multiplicative substitution
_a, _b = symbols('a b', positive=True)
assert IMT(_b**(-s/_a)*factorial(s/_a)/s, s, x, (0, oo)) == exp(-_b*x**_a)
assert IMT(factorial(_a/_b + s/_b)/(_a + s), s, x, (-_a, oo)) == x**_a*exp(-x**_b)
def simp_pows(expr):
return simplify(powsimp(expand_mul(expr, deep=False), force=True)).replace(exp_polar, exp)
# Now test the inverses of all direct transforms tested above
# Section 8.4.2
assert IMT(-1/(nu + s), s, x, (-oo, None)) == x**nu*Heaviside(x - 1)
assert IMT(1/(nu + s), s, x, (None, oo)) == x**nu*Heaviside(1 - x)
assert simp_pows(IMT(gamma(beta)*gamma(s)/gamma(s + beta), s, x, (0, oo))) \
== (1 - x)**(beta - 1)*Heaviside(1 - x)
assert simp_pows(IMT(gamma(beta)*gamma(1 - beta - s)/gamma(1 - s),
s, x, (-oo, None))) \
== (x - 1)**(beta - 1)*Heaviside(x - 1)
assert simp_pows(IMT(gamma(s)*gamma(rho - s)/gamma(rho), s, x, (0, None))) \
== (1/(x + 1))**rho
assert simp_pows(IMT(d**c*d**(s - 1)*sin(pi*c)
*gamma(s)*gamma(s + c)*gamma(1 - s)*gamma(1 - s - c)/pi,
s, x, (Max(-re(c), 0), Min(1 - re(c), 1)))) \
== (x**c - d**c)/(x - d)
assert simplify(IMT(1/sqrt(pi)*(-c/2)*gamma(s)*gamma((1 - c)/2 - s)
*gamma(-c/2 - s)/gamma(1 - c - s),
s, x, (0, -re(c)/2))) == \
(1 + sqrt(x + 1))**c
assert simplify(IMT(2**(a + 2*s)*b**(a + 2*s - 1)*gamma(s)*gamma(1 - a - 2*s)
/gamma(1 - a - s), s, x, (0, (-re(a) + 1)/2))) == \
b**(a - 1)*(sqrt(1 + x/b**2) + 1)**(a - 1)*(b**2*sqrt(1 + x/b**2) +
b**2 + x)/(b**2 + x)
assert simplify(IMT(-2**(c + 2*s)*c*b**(c + 2*s)*gamma(s)*gamma(-c - 2*s)
/ gamma(-c - s + 1), s, x, (0, -re(c)/2))) == \
b**c*(sqrt(1 + x/b**2) + 1)**c
# Section 8.4.5
assert IMT(24/s**5, s, x, (0, oo)) == log(x)**4*Heaviside(1 - x)
assert expand(IMT(6/s**4, s, x, (-oo, 0)), force=True) == \
log(x)**3*Heaviside(x - 1)
assert IMT(pi/(s*sin(pi*s)), s, x, (-1, 0)) == log(x + 1)
assert IMT(pi/(s*sin(pi*s/2)), s, x, (-2, 0)) == log(x**2 + 1)
assert IMT(pi/(s*sin(2*pi*s)), s, x, (-S(1)/2, 0)) == log(sqrt(x) + 1)
assert IMT(pi/(s*sin(pi*s)), s, x, (0, 1)) == log(1 + 1/x)
# TODO
def mysimp(expr):
from sympy import expand, logcombine, powsimp
return expand(
powsimp(logcombine(expr, force=True), force=True, deep=True),
force=True).replace(exp_polar, exp)
assert mysimp(mysimp(IMT(pi/(s*tan(pi*s)), s, x, (-1, 0)))) in [
log(1 - x)*Heaviside(1 - x) + log(x - 1)*Heaviside(x - 1),
log(x)*Heaviside(x - 1) + log(1 - 1/x)*Heaviside(x - 1) + log(-x +
1)*Heaviside(-x + 1)]
# test passing cot
assert mysimp(IMT(pi*cot(pi*s)/s, s, x, (0, 1))) in [
log(1/x - 1)*Heaviside(1 - x) + log(1 - 1/x)*Heaviside(x - 1),
-log(x)*Heaviside(-x + 1) + log(1 - 1/x)*Heaviside(x - 1) + log(-x +
1)*Heaviside(-x + 1), ]
# 8.4.14
assert IMT(-gamma(s + S(1)/2)/(sqrt(pi)*s), s, x, (-S(1)/2, 0)) == \
erf(sqrt(x))
# 8.4.19
assert simplify(IMT(gamma(a/2 + s)/gamma(a/2 - s + 1), s, x, (-re(a)/2, S(3)/4))) \
== besselj(a, 2*sqrt(x))
assert simplify(IMT(2**a*gamma(S(1)/2 - 2*s)*gamma(s + (a + 1)/2)
/ (gamma(1 - s - a/2)*gamma(1 - 2*s + a)),
s, x, (-(re(a) + 1)/2, S(1)/4))) == \
sin(sqrt(x))*besselj(a, sqrt(x))
assert simplify(IMT(2**a*gamma(a/2 + s)*gamma(S(1)/2 - 2*s)
/ (gamma(S(1)/2 - s - a/2)*gamma(1 - 2*s + a)),
s, x, (-re(a)/2, S(1)/4))) == \
cos(sqrt(x))*besselj(a, sqrt(x))
# TODO this comes out as an amazing mess, but simplifies nicely
assert simplify(IMT(gamma(a + s)*gamma(S(1)/2 - s)
/ (sqrt(pi)*gamma(1 - s)*gamma(1 + a - s)),
s, x, (-re(a), S(1)/2))) == \
besselj(a, sqrt(x))**2
assert simplify(IMT(gamma(s)*gamma(S(1)/2 - s)
/ (sqrt(pi)*gamma(1 - s - a)*gamma(1 + a - s)),
s, x, (0, S(1)/2))) == \
besselj(-a, sqrt(x))*besselj(a, sqrt(x))
assert simplify(IMT(4**s*gamma(-2*s + 1)*gamma(a/2 + b/2 + s)
/ (gamma(-a/2 + b/2 - s + 1)*gamma(a/2 - b/2 - s + 1)
*gamma(a/2 + b/2 - s + 1)),
s, x, (-(re(a) + re(b))/2, S(1)/2))) == \
besselj(a, sqrt(x))*besselj(b, sqrt(x))
# Section 8.4.20
# TODO this can be further simplified!
assert simplify(IMT(-2**(2*s)*cos(pi*a/2 - pi*b/2 + pi*s)*gamma(-2*s + 1) *
gamma(a/2 - b/2 + s)*gamma(a/2 + b/2 + s) /
(pi*gamma(a/2 - b/2 - s + 1)*gamma(a/2 + b/2 - s + 1)),
s, x,
(Max(-re(a)/2 - re(b)/2, -re(a)/2 + re(b)/2), S(1)/2))) == \
besselj(a, sqrt(x))*-(besselj(-b, sqrt(x)) -
besselj(b, sqrt(x))*cos(pi*b))/sin(pi*b)
# TODO more
# for coverage
assert IMT(pi/cos(pi*s), s, x, (0, S(1)/2)) == sqrt(x)/(x + 1)
def test_laplace_transform():
from sympy import (fresnels, fresnelc, hyper)
LT = laplace_transform
a, b, c, = symbols('a b c', positive=True)
t = symbols('t')
w = Symbol("w")
f = Function("f")
# Test unevaluated form
assert laplace_transform(f(t), t, w) == LaplaceTransform(f(t), t, w)
assert inverse_laplace_transform(
f(w), w, t, plane=0) == InverseLaplaceTransform(f(w), w, t, 0)
# test a bug
spos = symbols('s', positive=True)
assert LT(exp(t), t, spos)[:2] == (1/(spos - 1), True)
# basic tests from wikipedia
assert LT((t - a)**b*exp(-c*(t - a))*Heaviside(t - a), t, s) == \
((s + c)**(-b - 1)*exp(-a*s)*gamma(b + 1), -c, True)
assert LT(t**a, t, s) == (s**(-a - 1)*gamma(a + 1), 0, True)
assert LT(Heaviside(t), t, s) == (1/s, 0, True)
assert LT(Heaviside(t - a), t, s) == (exp(-a*s)/s, 0, True)
assert LT(1 - exp(-a*t), t, s) == (a/(s*(a + s)), 0, True)
assert LT((exp(2*t) - 1)*exp(-b - t)*Heaviside(t)/2, t, s, noconds=True) \
== exp(-b)/(s**2 - 1)
assert LT(exp(t), t, s)[:2] == (1/(s - 1), 1)
assert LT(exp(2*t), t, s)[:2] == (1/(s - 2), 2)
assert LT(exp(a*t), t, s)[:2] == (1/(s - a), a)
assert LT(log(t/a), t, s) == ((log(a*s) + EulerGamma)/s/-1, 0, True)
assert LT(erf(t), t, s) == ((-erf(s/2) + 1)*exp(s**2/4)/s, 0, True)
assert LT(sin(a*t), t, s) == (a/(a**2 + s**2), 0, True)
assert LT(cos(a*t), t, s) == (s/(a**2 + s**2), 0, True)
# TODO would be nice to have these come out better
assert LT(
exp(-a*t)*sin(b*t), t, s) == (b/(b**2 + (a + s)**2), -a, True)
assert LT(exp(-a*t)*cos(b*t), t, s) == \
((a + s)/(b**2 + (a + s)**2), -a, True)
# TODO sinh, cosh have delicate cancellation
assert LT(besselj(0, t), t, s) == (1/sqrt(1 + s**2), 0, True)
assert LT(besselj(1, t), t, s) == (1 - 1/sqrt(1 + 1/s**2), 0, True)
# TODO general order works, but is a *mess*
# TODO besseli also works, but is an even greater mess
# test a bug in conditions processing
# TODO the auxiliary condition should be recognised/simplified
assert LT(exp(t)*cos(t), t, s)[:-1] in [
((s - 1)/(s**2 - 2*s + 2), -oo),
((s - 1)/((s - 1)**2 + 1), -oo),
]
# Fresnel functions
assert laplace_transform(fresnels(t), t, s) == \
((-sin(s**2/(2*pi))*fresnels(s/pi) + sin(s**2/(2*pi))/2 -
cos(s**2/(2*pi))*fresnelc(s/pi) + cos(s**2/(2*pi))/2)/s, 0, True)
assert laplace_transform(fresnelc(t), t, s) == (
(sin(s**2/(2*pi))*fresnelc(s/pi)/s - cos(s**2/(2*pi))*fresnels(s/pi)/s
+ sqrt(2)*cos(s**2/(2*pi) + pi/4)/(2*s), 0, True))
def test_inverse_laplace_transform():
from sympy import (expand, sinh, cosh, besselj, besseli, exp_polar,
unpolarify, simplify, factor_terms)
ILT = inverse_laplace_transform
a, b, c, = symbols('a b c', positive=True)
t = symbols('t')
def simp_hyp(expr):
return factor_terms(expand_mul(expr)).rewrite(sin)
# just test inverses of all of the above
assert ILT(1/s, s, t) == Heaviside(t)
assert ILT(1/s**2, s, t) == t*Heaviside(t)
assert ILT(1/s**5, s, t) == t**4*Heaviside(t)/24
assert ILT(exp(-a*s)/s, s, t) == Heaviside(t - a)
assert ILT(exp(-a*s)/(s + b), s, t) == exp(b*(a - t))*Heaviside(-a + t)
assert ILT(a/(s**2 + a**2), s, t) == sin(a*t)*Heaviside(t)
assert ILT(s/(s**2 + a**2), s, t) == cos(a*t)*Heaviside(t)
# TODO is there a way around simp_hyp?
assert simp_hyp(ILT(a/(s**2 - a**2), s, t)) == sinh(a*t)*Heaviside(t)
assert simp_hyp(ILT(s/(s**2 - a**2), s, t)) == cosh(a*t)*Heaviside(t)
assert ILT(a/((s + b)**2 + a**2), s, t) == exp(-b*t)*sin(a*t)*Heaviside(t)
assert ILT(
(s + b)/((s + b)**2 + a**2), s, t) == exp(-b*t)*cos(a*t)*Heaviside(t)
# TODO sinh/cosh shifted come out a mess. also delayed trig is a mess
# TODO should this simplify further?
assert ILT(exp(-a*s)/s**b, s, t) == \
(t - a)**(b - 1)*Heaviside(t - a)/gamma(b)
assert ILT(exp(-a*s)/sqrt(1 + s**2), s, t) == \
Heaviside(t - a)*besselj(0, a - t) # note: besselj(0, x) is even
# XXX ILT turns these branch factor into trig functions ...
assert simplify(ILT(a**b*(s + sqrt(s**2 - a**2))**(-b)/sqrt(s**2 - a**2),
s, t).rewrite(exp)) == \
Heaviside(t)*besseli(b, a*t)
assert ILT(a**b*(s + sqrt(s**2 + a**2))**(-b)/sqrt(s**2 + a**2),
s, t).rewrite(exp) == \
Heaviside(t)*besselj(b, a*t)
assert ILT(1/(s*sqrt(s + 1)), s, t) == Heaviside(t)*erf(sqrt(t))
# TODO can we make erf(t) work?
assert ILT(1/(s**2*(s**2 + 1)),s,t) == (t - sin(t))*Heaviside(t)
def test_fourier_transform():
from sympy import simplify, expand, expand_complex, factor, expand_trig
FT = fourier_transform
IFT = inverse_fourier_transform
def simp(x):
return simplify(expand_trig(expand_complex(expand(x))))
def sinc(x):
return sin(pi*x)/(pi*x)
k = symbols('k', real=True)
f = Function("f")
# TODO for this to work with real a, need to expand abs(a*x) to abs(a)*abs(x)
a = symbols('a', positive=True)
b = symbols('b', positive=True)
posk = symbols('posk', positive=True)
# Test unevaluated form
assert fourier_transform(f(x), x, k) == FourierTransform(f(x), x, k)
assert inverse_fourier_transform(
f(k), k, x) == InverseFourierTransform(f(k), k, x)
# basic examples from wikipedia
assert simp(FT(Heaviside(1 - abs(2*a*x)), x, k)) == sinc(k/a)/a
# TODO IFT is a *mess*
assert simp(FT(Heaviside(1 - abs(a*x))*(1 - abs(a*x)), x, k)) == sinc(k/a)**2/a
# TODO IFT
assert factor(FT(exp(-a*x)*Heaviside(x), x, k), extension=I) == \
1/(a + 2*pi*I*k)
# NOTE: the ift comes out in pieces
assert IFT(1/(a + 2*pi*I*x), x, posk,
noconds=False) == (exp(-a*posk), True)
assert IFT(1/(a + 2*pi*I*x), x, -posk,
noconds=False) == (0, True)
assert IFT(1/(a + 2*pi*I*x), x, symbols('k', negative=True),
noconds=False) == (0, True)
# TODO IFT without factoring comes out as meijer g
assert factor(FT(x*exp(-a*x)*Heaviside(x), x, k), extension=I) == \
1/(a + 2*pi*I*k)**2
assert FT(exp(-a*x)*sin(b*x)*Heaviside(x), x, k) == \
b/(b**2 + (a + 2*I*pi*k)**2)
assert FT(exp(-a*x**2), x, k) == sqrt(pi)*exp(-pi**2*k**2/a)/sqrt(a)
assert IFT(sqrt(pi/a)*exp(-(pi*k)**2/a), k, x) == exp(-a*x**2)
assert FT(exp(-a*abs(x)), x, k) == 2*a/(a**2 + 4*pi**2*k**2)
# TODO IFT (comes out as meijer G)
# TODO besselj(n, x), n an integer > 0 actually can be done...
# TODO are there other common transforms (no distributions!)?
def test_sine_transform():
from sympy import sinh, cosh, EulerGamma
t = symbols("t")
w = symbols("w")
a = symbols("a")
f = Function("f")
# Test unevaluated form
assert sine_transform(f(t), t, w) == SineTransform(f(t), t, w)
assert inverse_sine_transform(
f(w), w, t) == InverseSineTransform(f(w), w, t)
assert sine_transform(1/sqrt(t), t, w) == 1/sqrt(w)
assert inverse_sine_transform(1/sqrt(w), w, t) == 1/sqrt(t)
assert sine_transform(
(1/sqrt(t))**3, t, w) == sqrt(w)*gamma(S(1)/4)/(2*gamma(S(5)/4))
assert sine_transform(t**(-a), t, w) == 2**(
-a + S(1)/2)*w**(a - 1)*gamma(-a/2 + 1)/gamma((a + 1)/2)
assert inverse_sine_transform(2**(-a + S(
1)/2)*w**(a - 1)*gamma(-a/2 + 1)/gamma(a/2 + S(1)/2), w, t) == t**(-a)
assert sine_transform(
exp(-a*t), t, w) == sqrt(2)*w/(sqrt(pi)*(a**2 + w**2))
assert inverse_sine_transform(
sqrt(2)*w/(sqrt(pi)*(a**2 + w**2)), w, t) == exp(-a*t)
assert sine_transform(
log(t)/t, t, w) == -sqrt(2)*sqrt(pi)*(log(w**2) + 2*EulerGamma)/4
assert sine_transform(
t*exp(-a*t**2), t, w) == sqrt(2)*w*exp(-w**2/(4*a))/(4*a**(S(3)/2))
assert inverse_sine_transform(
sqrt(2)*w*exp(-w**2/(4*a))/(4*a**(S(3)/2)), w, t) == t*exp(-a*t**2)
def test_cosine_transform():
from sympy import sinh, cosh, Si, Ci
t = symbols("t")
w = symbols("w")
a = symbols("a")
f = Function("f")
# Test unevaluated form
assert cosine_transform(f(t), t, w) == CosineTransform(f(t), t, w)
assert inverse_cosine_transform(
f(w), w, t) == InverseCosineTransform(f(w), w, t)
assert cosine_transform(1/sqrt(t), t, w) == 1/sqrt(w)
assert inverse_cosine_transform(1/sqrt(w), w, t) == 1/sqrt(t)
assert cosine_transform(1/(
a**2 + t**2), t, w) == sqrt(2)*sqrt(pi)*exp(-a*w)/(2*a)
assert cosine_transform(t**(
-a), t, w) == 2**(-a + S(1)/2)*w**(a - 1)*gamma((-a + 1)/2)/gamma(a/2)
assert inverse_cosine_transform(2**(-a + S(
1)/2)*w**(a - 1)*gamma(-a/2 + S(1)/2)/gamma(a/2), w, t) == t**(-a)
assert cosine_transform(
exp(-a*t), t, w) == sqrt(2)*a/(sqrt(pi)*(a**2 + w**2))
assert inverse_cosine_transform(
sqrt(2)*a/(sqrt(pi)*(a**2 + w**2)), w, t) == exp(-a*t)
assert cosine_transform(exp(-a*sqrt(t))*cos(a*sqrt(
t)), t, w) == a*exp(-a**2/(2*w))/(2*w**(S(3)/2))
assert cosine_transform(1/(a + t), t, w) == sqrt(2)*(
(-2*Si(a*w) + pi)*sin(a*w)/2 - cos(a*w)*Ci(a*w))/sqrt(pi)
assert inverse_cosine_transform(sqrt(2)*meijerg(((S(1)/2, 0), ()), (
(S(1)/2, 0, 0), (S(1)/2,)), a**2*w**2/4)/(2*pi), w, t) == 1/(a + t)
assert cosine_transform(1/sqrt(a**2 + t**2), t, w) == sqrt(2)*meijerg(
((S(1)/2,), ()), ((0, 0), (S(1)/2,)), a**2*w**2/4)/(2*sqrt(pi))
assert inverse_cosine_transform(sqrt(2)*meijerg(((S(1)/2,), ()), ((0, 0), (S(1)/2,)), a**2*w**2/4)/(2*sqrt(pi)), w, t) == 1/(t*sqrt(a**2/t**2 + 1))
def test_hankel_transform():
from sympy import sinh, cosh, gamma, sqrt, exp
r = Symbol("r")
k = Symbol("k")
nu = Symbol("nu")
m = Symbol("m")
a = symbols("a")
assert hankel_transform(1/r, r, k, 0) == 1/k
assert inverse_hankel_transform(1/k, k, r, 0) == 1/r
assert hankel_transform(
1/r**m, r, k, 0) == 2**(-m + 1)*k**(m - 2)*gamma(-m/2 + 1)/gamma(m/2)
assert inverse_hankel_transform(
2**(-m + 1)*k**(m - 2)*gamma(-m/2 + 1)/gamma(m/2), k, r, 0) == r**(-m)
assert hankel_transform(1/r**m, r, k, nu) == (
2*2**(-m)*k**(m - 2)*gamma(-m/2 + nu/2 + 1)/gamma(m/2 + nu/2))
assert inverse_hankel_transform(2**(-m + 1)*k**(
m - 2)*gamma(-m/2 + nu/2 + 1)/gamma(m/2 + nu/2), k, r, nu) == r**(-m)
assert hankel_transform(r**nu*exp(-a*r), r, k, nu) == \
2**(nu + 1)*a*k**(-nu - 3)*(a**2/k**2 + 1)**(-nu - S(
3)/2)*gamma(nu + S(3)/2)/sqrt(pi)
assert inverse_hankel_transform(
2**(nu + 1)*a*k**(-nu - 3)*(a**2/k**2 + 1)**(-nu - S(3)/2)*gamma(
nu + S(3)/2)/sqrt(pi), k, r, nu) == r**nu*exp(-a*r)
def test_issue_7181():
assert mellin_transform(1/(1 - x), x, s) != None
|
|
import io
import json
import tempfile
import zipfile
from uuid import uuid4
from django.core.files.uploadedfile import SimpleUploadedFile
from model_bakery import baker
from datetime import date, timedelta
from unittest.mock import patch, PropertyMock, Mock
from django.conf import settings
from django.contrib import messages
from django.contrib.messages import get_messages
from django.core import mail
from django.core.handlers.wsgi import WSGIRequest
from django.http import HttpResponse
from django.test import TestCase, RequestFactory
from django.urls import reverse
from .utils import assertMessage, get_static_image_file_as_upload
from ..models import Sponsorship, Contract, SponsorshipBenefit, SponsorBenefit, SponsorEmailNotificationTemplate, \
GenericAsset, ImgAsset, TextAsset
from ..forms import SponsorshipReviewAdminForm, SponsorshipsListForm, SignedSponsorshipReviewAdminForm, SendSponsorshipNotificationForm
from sponsors.views_admin import send_sponsorship_notifications_action, export_assets_as_zipfile
from sponsors.use_cases import SendSponsorshipNotificationUseCase
class RollbackSponsorshipToEditingAdminViewTests(TestCase):
def setUp(self):
self.user = baker.make(
settings.AUTH_USER_MODEL, is_staff=True, is_superuser=True
)
self.client.force_login(self.user)
self.sponsorship = baker.make(
Sponsorship,
status=Sponsorship.APPROVED,
submited_by=self.user,
_fill_optional=True,
)
self.url = reverse(
"admin:sponsors_sponsorship_rollback_to_edit", args=[self.sponsorship.pk]
)
def test_display_confirmation_form_on_get(self):
response = self.client.get(self.url)
context = response.context
self.sponsorship.refresh_from_db()
self.assertTemplateUsed(
response, "sponsors/admin/rollback_sponsorship_to_editing.html"
)
self.assertEqual(context["sponsorship"], self.sponsorship)
self.assertNotEqual(
self.sponsorship.status, Sponsorship.APPLIED
) # did not update
def test_rollback_sponsorship_to_applied_on_post(self):
data = {"confirm": "yes"}
response = self.client.post(self.url, data=data)
self.sponsorship.refresh_from_db()
expected_url = reverse(
"admin:sponsors_sponsorship_change", args=[self.sponsorship.pk]
)
self.assertRedirects(response, expected_url, fetch_redirect_response=True)
self.assertEqual(self.sponsorship.status, Sponsorship.APPLIED)
msg = list(get_messages(response.wsgi_request))[0]
assertMessage(msg, "Sponsorship is now editable!", messages.SUCCESS)
def test_do_not_rollback_if_invalid_post(self):
response = self.client.post(self.url, data={})
self.sponsorship.refresh_from_db()
self.assertTemplateUsed(
response, "sponsors/admin/rollback_sponsorship_to_editing.html"
)
self.assertNotEqual(
self.sponsorship.status, Sponsorship.APPLIED
) # did not update
response = self.client.post(self.url, data={"confirm": "invalid"})
self.sponsorship.refresh_from_db()
self.assertTemplateUsed(
response, "sponsors/admin/rollback_sponsorship_to_editing.html"
)
self.assertNotEqual(self.sponsorship.status, Sponsorship.APPLIED)
def test_404_if_sponsorship_does_not_exist(self):
self.sponsorship.delete()
response = self.client.get(self.url)
self.assertEqual(response.status_code, 404)
def test_login_required(self):
login_url = reverse("admin:login")
redirect_url = f"{login_url}?next={self.url}"
self.client.logout()
r = self.client.get(self.url)
self.assertRedirects(r, redirect_url)
def test_staff_required(self):
login_url = reverse("admin:login")
redirect_url = f"{login_url}?next={self.url}"
self.user.is_staff = False
self.user.save()
self.client.force_login(self.user)
r = self.client.get(self.url)
self.assertRedirects(r, redirect_url, fetch_redirect_response=False)
def test_message_user_if_rejecting_invalid_sponsorship(self):
self.sponsorship.status = Sponsorship.FINALIZED
self.sponsorship.save()
data = {"confirm": "yes"}
response = self.client.post(self.url, data=data)
self.sponsorship.refresh_from_db()
expected_url = reverse(
"admin:sponsors_sponsorship_change", args=[self.sponsorship.pk]
)
self.assertRedirects(response, expected_url, fetch_redirect_response=True)
self.assertEqual(self.sponsorship.status, Sponsorship.FINALIZED)
msg = list(get_messages(response.wsgi_request))[0]
assertMessage(
msg, "Can't rollback to edit a Finalized sponsorship.", messages.ERROR
)
class RejectedSponsorshipAdminViewTests(TestCase):
def setUp(self):
self.user = baker.make(
settings.AUTH_USER_MODEL, is_staff=True, is_superuser=True
)
self.client.force_login(self.user)
self.sponsorship = baker.make(
Sponsorship,
status=Sponsorship.APPLIED,
submited_by=self.user,
_fill_optional=True,
)
self.url = reverse(
"admin:sponsors_sponsorship_reject", args=[self.sponsorship.pk]
)
def test_display_confirmation_form_on_get(self):
response = self.client.get(self.url)
context = response.context
self.sponsorship.refresh_from_db()
self.assertTemplateUsed(response, "sponsors/admin/reject_application.html")
self.assertEqual(context["sponsorship"], self.sponsorship)
self.assertNotEqual(
self.sponsorship.status, Sponsorship.REJECTED
) # did not update
def test_reject_sponsorship_on_post(self):
data = {"confirm": "yes"}
response = self.client.post(self.url, data=data)
self.sponsorship.refresh_from_db()
expected_url = reverse(
"admin:sponsors_sponsorship_change", args=[self.sponsorship.pk]
)
self.assertRedirects(response, expected_url, fetch_redirect_response=True)
self.assertTrue(mail.outbox)
self.assertEqual(self.sponsorship.status, Sponsorship.REJECTED)
msg = list(get_messages(response.wsgi_request))[0]
assertMessage(msg, "Sponsorship was rejected!", messages.SUCCESS)
def test_do_not_reject_if_invalid_post(self):
response = self.client.post(self.url, data={})
self.sponsorship.refresh_from_db()
self.assertTemplateUsed(response, "sponsors/admin/reject_application.html")
self.assertNotEqual(
self.sponsorship.status, Sponsorship.REJECTED
) # did not update
response = self.client.post(self.url, data={"confirm": "invalid"})
self.sponsorship.refresh_from_db()
self.assertTemplateUsed(response, "sponsors/admin/reject_application.html")
self.assertNotEqual(self.sponsorship.status, Sponsorship.REJECTED)
def test_404_if_sponsorship_does_not_exist(self):
self.sponsorship.delete()
response = self.client.get(self.url)
self.assertEqual(response.status_code, 404)
def test_login_required(self):
login_url = reverse("admin:login")
redirect_url = f"{login_url}?next={self.url}"
self.client.logout()
r = self.client.get(self.url)
self.assertRedirects(r, redirect_url)
def test_staff_required(self):
login_url = reverse("admin:login")
redirect_url = f"{login_url}?next={self.url}"
self.user.is_staff = False
self.user.save()
self.client.force_login(self.user)
r = self.client.get(self.url)
self.assertRedirects(r, redirect_url, fetch_redirect_response=False)
def test_message_user_if_rejecting_invalid_sponsorship(self):
self.sponsorship.status = Sponsorship.FINALIZED
self.sponsorship.save()
data = {"confirm": "yes"}
response = self.client.post(self.url, data=data)
self.sponsorship.refresh_from_db()
expected_url = reverse(
"admin:sponsors_sponsorship_change", args=[self.sponsorship.pk]
)
self.assertRedirects(response, expected_url, fetch_redirect_response=True)
self.assertEqual(self.sponsorship.status, Sponsorship.FINALIZED)
msg = list(get_messages(response.wsgi_request))[0]
assertMessage(msg, "Can't reject a Finalized sponsorship.", messages.ERROR)
class ApproveSponsorshipAdminViewTests(TestCase):
def setUp(self):
self.user = baker.make(
settings.AUTH_USER_MODEL, is_staff=True, is_superuser=True
)
self.client.force_login(self.user)
self.sponsorship = baker.make(
Sponsorship, status=Sponsorship.APPLIED, _fill_optional=True
)
self.url = reverse(
"admin:sponsors_sponsorship_approve", args=[self.sponsorship.pk]
)
today = date.today()
self.package = baker.make("sponsors.SponsorshipPackage")
self.data = {
"confirm": "yes",
"start_date": today,
"end_date": today + timedelta(days=100),
"package": self.package.pk,
"sponsorship_fee": 500,
}
def test_display_confirmation_form_on_get(self):
response = self.client.get(self.url)
context = response.context
form = context["form"]
self.sponsorship.refresh_from_db()
self.assertTemplateUsed(response, "sponsors/admin/approve_application.html")
self.assertEqual(context["sponsorship"], self.sponsorship)
self.assertIsInstance(form, SponsorshipReviewAdminForm)
self.assertEqual(form.initial["package"], self.sponsorship.package)
self.assertEqual(form.initial["start_date"], self.sponsorship.start_date)
self.assertEqual(form.initial["end_date"], self.sponsorship.end_date)
self.assertEqual(
form.initial["sponsorship_fee"], self.sponsorship.sponsorship_fee
)
self.assertNotEqual(
self.sponsorship.status, Sponsorship.APPROVED
) # did not update
def test_approve_sponsorship_on_post(self):
response = self.client.post(self.url, data=self.data)
self.sponsorship.refresh_from_db()
expected_url = reverse(
"admin:sponsors_sponsorship_change", args=[self.sponsorship.pk]
)
self.assertRedirects(response, expected_url, fetch_redirect_response=True)
self.assertEqual(self.sponsorship.status, Sponsorship.APPROVED)
msg = list(get_messages(response.wsgi_request))[0]
assertMessage(msg, "Sponsorship was approved!", messages.SUCCESS)
def test_do_not_approve_if_no_confirmation_in_the_post(self):
self.data.pop("confirm")
response = self.client.post(self.url, data=self.data)
self.sponsorship.refresh_from_db()
self.assertTemplateUsed(response, "sponsors/admin/approve_application.html")
self.assertNotEqual(
self.sponsorship.status, Sponsorship.APPROVED
) # did not update
self.data["confirm"] = "invalid"
response = self.client.post(self.url, data=self.data)
self.sponsorship.refresh_from_db()
self.assertTemplateUsed(response, "sponsors/admin/approve_application.html")
self.assertNotEqual(self.sponsorship.status, Sponsorship.APPROVED)
def test_do_not_approve_if_form_with_invalid_data(self):
self.data = {"confirm": "yes"}
response = self.client.post(self.url, data=self.data)
self.sponsorship.refresh_from_db()
self.assertTemplateUsed(response, "sponsors/admin/approve_application.html")
self.assertNotEqual(
self.sponsorship.status, Sponsorship.APPROVED
) # did not update
self.assertTrue(response.context["form"].errors)
def test_404_if_sponsorship_does_not_exist(self):
self.sponsorship.delete()
response = self.client.get(self.url)
self.assertEqual(response.status_code, 404)
def test_login_required(self):
login_url = reverse("admin:login")
redirect_url = f"{login_url}?next={self.url}"
self.client.logout()
r = self.client.get(self.url)
self.assertRedirects(r, redirect_url)
def test_staff_required(self):
login_url = reverse("admin:login")
redirect_url = f"{login_url}?next={self.url}"
self.user.is_staff = False
self.user.save()
self.client.force_login(self.user)
r = self.client.get(self.url)
self.assertRedirects(r, redirect_url, fetch_redirect_response=False)
def test_message_user_if_approving_invalid_sponsorship(self):
self.sponsorship.status = Sponsorship.FINALIZED
self.sponsorship.save()
response = self.client.post(self.url, data=self.data)
self.sponsorship.refresh_from_db()
expected_url = reverse(
"admin:sponsors_sponsorship_change", args=[self.sponsorship.pk]
)
self.assertRedirects(response, expected_url, fetch_redirect_response=True)
self.assertEqual(self.sponsorship.status, Sponsorship.FINALIZED)
msg = list(get_messages(response.wsgi_request))[0]
assertMessage(msg, "Can't approve a Finalized sponsorship.", messages.ERROR)
class ApproveSignedSponsorshipAdminViewTests(TestCase):
def setUp(self):
self.user = baker.make(
settings.AUTH_USER_MODEL, is_staff=True, is_superuser=True
)
self.client.force_login(self.user)
self.sponsorship = baker.make(
Sponsorship, status=Sponsorship.APPLIED, _fill_optional=True
)
self.url = reverse(
"admin:sponsors_sponsorship_approve_existing_contract", args=[self.sponsorship.pk]
)
today = date.today()
self.package = baker.make("sponsors.SponsorshipPackage")
self.data = {
"confirm": "yes",
"start_date": today,
"end_date": today + timedelta(days=100),
"package": self.package.pk,
"sponsorship_fee": 500,
"signed_contract": io.BytesIO(b"Signed contract")
}
def test_display_confirmation_form_on_get(self):
response = self.client.get(self.url)
context = response.context
form = context["form"]
self.sponsorship.refresh_from_db()
self.assertTemplateUsed(response, "sponsors/admin/approve_application.html")
self.assertEqual(context["sponsorship"], self.sponsorship)
self.assertIsInstance(form, SignedSponsorshipReviewAdminForm)
self.assertEqual(form.initial["package"], self.sponsorship.package)
self.assertEqual(form.initial["start_date"], self.sponsorship.start_date)
self.assertEqual(form.initial["end_date"], self.sponsorship.end_date)
self.assertEqual(
form.initial["sponsorship_fee"], self.sponsorship.sponsorship_fee
)
self.assertNotEqual(
self.sponsorship.status, Sponsorship.APPROVED
) # did not update
def test_approve_sponsorship_and_execute_contract_on_post(self):
response = self.client.post(self.url, data=self.data)
self.sponsorship.refresh_from_db()
contract = self.sponsorship.contract
expected_url = reverse(
"admin:sponsors_sponsorship_change", args=[self.sponsorship.pk]
)
self.assertRedirects(response, expected_url, fetch_redirect_response=True)
self.assertEqual(self.sponsorship.status, Sponsorship.FINALIZED)
self.assertEqual(contract.status, Contract.EXECUTED)
self.assertEqual(contract.signed_document.read(), b"Signed contract")
msg = list(get_messages(response.wsgi_request))[0]
assertMessage(msg, "Signed sponsorship was approved!", messages.SUCCESS)
def test_do_not_approve_if_no_confirmation_in_the_post(self):
self.data.pop("confirm")
response = self.client.post(self.url, data=self.data)
self.sponsorship.refresh_from_db()
self.assertTemplateUsed(response, "sponsors/admin/approve_application.html")
self.assertNotEqual(
self.sponsorship.status, Sponsorship.APPROVED
) # did not update
self.data["confirm"] = "invalid"
response = self.client.post(self.url, data=self.data)
self.sponsorship.refresh_from_db()
self.assertTemplateUsed(response, "sponsors/admin/approve_application.html")
self.assertNotEqual(self.sponsorship.status, Sponsorship.APPROVED)
def test_do_not_approve_if_form_with_invalid_data(self):
self.data = {"confirm": "yes"}
response = self.client.post(self.url, data=self.data)
self.sponsorship.refresh_from_db()
self.assertTemplateUsed(response, "sponsors/admin/approve_application.html")
self.assertNotEqual(
self.sponsorship.status, Sponsorship.APPROVED
) # did not update
self.assertTrue(response.context["form"].errors)
def test_404_if_sponsorship_does_not_exist(self):
self.sponsorship.delete()
response = self.client.get(self.url)
self.assertEqual(response.status_code, 404)
def test_login_required(self):
login_url = reverse("admin:login")
redirect_url = f"{login_url}?next={self.url}"
self.client.logout()
r = self.client.get(self.url)
self.assertRedirects(r, redirect_url)
def test_staff_required(self):
login_url = reverse("admin:login")
redirect_url = f"{login_url}?next={self.url}"
self.user.is_staff = False
self.user.save()
self.client.force_login(self.user)
r = self.client.get(self.url)
self.assertRedirects(r, redirect_url, fetch_redirect_response=False)
def test_message_user_if_approving_invalid_sponsorship(self):
self.sponsorship.status = Sponsorship.FINALIZED
self.sponsorship.save()
response = self.client.post(self.url, data=self.data)
self.sponsorship.refresh_from_db()
expected_url = reverse(
"admin:sponsors_sponsorship_change", args=[self.sponsorship.pk]
)
self.assertRedirects(response, expected_url, fetch_redirect_response=True)
self.assertEqual(self.sponsorship.status, Sponsorship.FINALIZED)
msg = list(get_messages(response.wsgi_request))[0]
assertMessage(msg, "Can't approve a Finalized sponsorship.", messages.ERROR)
class SendContractViewTests(TestCase):
def setUp(self):
self.user = baker.make(
settings.AUTH_USER_MODEL, is_staff=True, is_superuser=True
)
self.client.force_login(self.user)
self.contract = baker.make_recipe("sponsors.tests.empty_contract")
self.url = reverse(
"admin:sponsors_contract_send", args=[self.contract.pk]
)
self.data = {
"confirm": "yes",
}
def test_display_confirmation_form_on_get(self):
response = self.client.get(self.url)
context = response.context
self.assertTemplateUsed(response, "sponsors/admin/send_contract.html")
self.assertEqual(context["contract"], self.contract)
@patch.object(
Sponsorship, "verified_emails", PropertyMock(return_value=["[email protected]"])
)
def test_approve_sponsorship_on_post(self):
response = self.client.post(self.url, data=self.data)
expected_url = reverse(
"admin:sponsors_contract_change", args=[self.contract.pk]
)
self.contract.refresh_from_db()
self.assertRedirects(response, expected_url, fetch_redirect_response=True)
self.assertTrue(self.contract.document.name)
self.assertEqual(1, len(mail.outbox))
msg = list(get_messages(response.wsgi_request))[0]
assertMessage(msg, "Contract was sent!", messages.SUCCESS)
@patch.object(
Sponsorship, "verified_emails", PropertyMock(return_value=["[email protected]"])
)
def test_display_error_message_to_user_if_invalid_status(self):
self.contract.status = Contract.AWAITING_SIGNATURE
self.contract.save()
expected_url = reverse(
"admin:sponsors_contract_change", args=[self.contract.pk]
)
response = self.client.post(self.url, data=self.data)
self.contract.refresh_from_db()
self.assertRedirects(response, expected_url, fetch_redirect_response=True)
self.assertEqual(0, len(mail.outbox))
msg = list(get_messages(response.wsgi_request))[0]
assertMessage(
msg,
"Contract with status Awaiting Signature can't be sent.",
messages.ERROR,
)
def test_do_not_send_if_no_confirmation_in_the_post(self):
self.data.pop("confirm")
response = self.client.post(self.url, data=self.data)
self.contract.refresh_from_db()
self.assertTemplateUsed(response, "sponsors/admin/send_contract.html")
self.assertFalse(self.contract.document.name)
self.data["confirm"] = "invalid"
response = self.client.post(self.url, data=self.data)
self.assertTemplateUsed(response, "sponsors/admin/send_contract.html")
self.assertFalse(self.contract.document.name)
self.assertEqual(0, len(mail.outbox))
def test_404_if_contract_does_not_exist(self):
self.contract.delete()
response = self.client.get(self.url)
self.assertEqual(response.status_code, 404)
def test_login_required(self):
login_url = reverse("admin:login")
redirect_url = f"{login_url}?next={self.url}"
self.client.logout()
r = self.client.get(self.url)
self.assertRedirects(r, redirect_url)
def test_staff_required(self):
login_url = reverse("admin:login")
redirect_url = f"{login_url}?next={self.url}"
self.user.is_staff = False
self.user.save()
self.client.force_login(self.user)
r = self.client.get(self.url)
self.assertRedirects(r, redirect_url, fetch_redirect_response=False)
class ExecuteContractViewTests(TestCase):
def setUp(self):
self.user = baker.make(
settings.AUTH_USER_MODEL, is_staff=True, is_superuser=True
)
self.client.force_login(self.user)
self.contract = baker.make_recipe("sponsors.tests.empty_contract", status=Contract.AWAITING_SIGNATURE)
self.url = reverse(
"admin:sponsors_contract_execute", args=[self.contract.pk]
)
self.data = {
"confirm": "yes",
"signed_document": SimpleUploadedFile("contract.txt", b"Contract content"),
}
def tearDown(self):
try:
self.contract.refresh_from_db()
if self.contract.signed_document:
self.contract.signed_document.delete()
except Contract.DoesNotExist:
pass
def test_display_confirmation_form_on_get(self):
response = self.client.get(self.url)
context = response.context
self.assertTemplateUsed(response, "sponsors/admin/execute_contract.html")
self.assertEqual(context["contract"], self.contract)
def test_execute_sponsorship_on_post(self):
response = self.client.post(self.url, data=self.data)
expected_url = reverse(
"admin:sponsors_contract_change", args=[self.contract.pk]
)
self.contract.refresh_from_db()
msg = list(get_messages(response.wsgi_request))[0]
self.assertRedirects(response, expected_url, fetch_redirect_response=True)
self.assertEqual(self.contract.status, Contract.EXECUTED)
assertMessage(msg, "Contract was executed!", messages.SUCCESS)
def test_display_error_message_to_user_if_invalid_status(self):
self.contract.status = Contract.OUTDATED
self.contract.save()
expected_url = reverse(
"admin:sponsors_contract_change", args=[self.contract.pk]
)
response = self.client.post(self.url, data=self.data)
self.contract.refresh_from_db()
msg = list(get_messages(response.wsgi_request))[0]
self.assertRedirects(response, expected_url, fetch_redirect_response=True)
self.assertEqual(self.contract.status, Contract.OUTDATED)
assertMessage(
msg,
"Contract with status Outdated can't be executed.",
messages.ERROR,
)
def test_do_not_execute_contract_if_no_confirmation_in_the_post(self):
self.data.pop("confirm")
response = self.client.post(self.url, data=self.data)
self.contract.refresh_from_db()
self.assertTemplateUsed(response, "sponsors/admin/execute_contract.html")
self.assertEqual(self.contract.status, Contract.AWAITING_SIGNATURE)
self.data["confirm"] = "invalid"
response = self.client.post(self.url, data=self.data)
self.assertTemplateUsed(response, "sponsors/admin/execute_contract.html")
self.contract.refresh_from_db()
self.assertEqual(self.contract.status, Contract.AWAITING_SIGNATURE)
def test_display_error_message_to_user_if_no_signed_document(self):
self.data.pop("signed_document")
response = self.client.post(self.url, data=self.data)
context = response.context
self.assertTemplateUsed(response, "sponsors/admin/execute_contract.html")
self.assertEqual(context["contract"], self.contract)
self.assertTrue(context["error_msg"])
def test_404_if_contract_does_not_exist(self):
self.contract.delete()
response = self.client.get(self.url)
self.assertEqual(response.status_code, 404)
def test_login_required(self):
login_url = reverse("admin:login")
redirect_url = f"{login_url}?next={self.url}"
self.client.logout()
r = self.client.get(self.url)
self.assertRedirects(r, redirect_url)
def test_staff_required(self):
login_url = reverse("admin:login")
redirect_url = f"{login_url}?next={self.url}"
self.user.is_staff = False
self.user.save()
self.client.force_login(self.user)
r = self.client.get(self.url)
self.assertRedirects(r, redirect_url, fetch_redirect_response=False)
class NullifyContractViewTests(TestCase):
def setUp(self):
self.user = baker.make(
settings.AUTH_USER_MODEL, is_staff=True, is_superuser=True
)
self.client.force_login(self.user)
self.contract = baker.make_recipe("sponsors.tests.empty_contract", status=Contract.AWAITING_SIGNATURE)
self.url = reverse(
"admin:sponsors_contract_nullify", args=[self.contract.pk]
)
self.data = {
"confirm": "yes",
}
def test_display_confirmation_form_on_get(self):
response = self.client.get(self.url)
context = response.context
self.assertTemplateUsed(response, "sponsors/admin/nullify_contract.html")
self.assertEqual(context["contract"], self.contract)
def test_nullify_sponsorship_on_post(self):
response = self.client.post(self.url, data=self.data)
expected_url = reverse(
"admin:sponsors_contract_change", args=[self.contract.pk]
)
self.contract.refresh_from_db()
msg = list(get_messages(response.wsgi_request))[0]
self.assertRedirects(response, expected_url, fetch_redirect_response=True)
self.assertEqual(self.contract.status, Contract.NULLIFIED)
assertMessage(msg, "Contract was nullified!", messages.SUCCESS)
def test_display_error_message_to_user_if_invalid_status(self):
self.contract.status = Contract.DRAFT
self.contract.save()
expected_url = reverse(
"admin:sponsors_contract_change", args=[self.contract.pk]
)
response = self.client.post(self.url, data=self.data)
self.contract.refresh_from_db()
msg = list(get_messages(response.wsgi_request))[0]
self.assertRedirects(response, expected_url, fetch_redirect_response=True)
self.assertEqual(self.contract.status, Contract.DRAFT)
assertMessage(
msg,
"Contract with status Draft can't be nullified.",
messages.ERROR,
)
def test_do_not_nullify_contract_if_no_confirmation_in_the_post(self):
self.data.pop("confirm")
response = self.client.post(self.url, data=self.data)
self.contract.refresh_from_db()
self.assertTemplateUsed(response, "sponsors/admin/nullify_contract.html")
self.assertEqual(self.contract.status, Contract.AWAITING_SIGNATURE)
self.data["confirm"] = "invalid"
response = self.client.post(self.url, data=self.data)
self.assertTemplateUsed(response, "sponsors/admin/nullify_contract.html")
self.contract.refresh_from_db()
self.assertEqual(self.contract.status, Contract.AWAITING_SIGNATURE)
def test_404_if_contract_does_not_exist(self):
self.contract.delete()
response = self.client.get(self.url)
self.assertEqual(response.status_code, 404)
def test_login_required(self):
login_url = reverse("admin:login")
redirect_url = f"{login_url}?next={self.url}"
self.client.logout()
r = self.client.get(self.url)
self.assertRedirects(r, redirect_url)
def test_staff_required(self):
login_url = reverse("admin:login")
redirect_url = f"{login_url}?next={self.url}"
self.user.is_staff = False
self.user.save()
self.client.force_login(self.user)
r = self.client.get(self.url)
self.assertRedirects(r, redirect_url, fetch_redirect_response=False)
class UpdateRelatedSponsorshipsTests(TestCase):
def setUp(self):
self.user = baker.make(
settings.AUTH_USER_MODEL, is_staff=True, is_superuser=True
)
self.client.force_login(self.user)
self.benefit = baker.make(SponsorshipBenefit)
self.sponsor_benefit = baker.make(
SponsorBenefit,
sponsorship_benefit=self.benefit,
sponsorship__sponsor__name="Foo",
added_by_user=True, # to make sure we keep previous fields
)
self.url = reverse(
"admin:sponsors_sponsorshipbenefit_update_related", args=[self.benefit.pk]
)
self.data = {"sponsorships": [self.sponsor_benefit.sponsorship.pk]}
def test_display_form_from_benefit_on_get(self):
response = self.client.get(self.url)
context = response.context
self.assertTemplateUsed(response, "sponsors/admin/update_related_sponsorships.html")
self.assertEqual(context["benefit"], self.benefit)
self.assertIsInstance(context["form"], SponsorshipsListForm)
self.assertEqual(context["form"].sponsorship_benefit, self.benefit)
def test_list_related_sponsorships_with_initial(self):
baker.make(Sponsorship) # unrelated-sponsorship
other_sponsor_benefit = baker.make(
SponsorBenefit,
sponsorship_benefit=self.benefit,
sponsorship__sponsor__name="Bar",
)
response = self.client.get(self.url)
initial = response.context["form"].initial
self.assertEqual(2, len(initial["sponsorships"]))
self.assertIn(self.sponsor_benefit.sponsorship.pk, initial["sponsorships"])
self.assertIn(other_sponsor_benefit.sponsorship.pk, initial["sponsorships"])
def test_bad_request_if_invalid_post_data(self):
self.data["sponsorships"] = []
response = self.client.post(self.url, data=self.data)
self.assertTrue(response.context["form"].errors)
def test_redirect_back_to_benefit_page_if_success(self):
redirect_url = reverse(
"admin:sponsors_sponsorshipbenefit_change", args=[self.benefit.pk]
)
response = self.client.post(self.url, data=self.data)
self.assertRedirects(response, redirect_url)
msg = list(get_messages(response.wsgi_request))[0]
assertMessage(msg, "1 related sponsorships updated!", messages.SUCCESS)
def test_update_selected_sponsorships_only(self):
other_sponsor_benefit = baker.make(
SponsorBenefit,
sponsorship_benefit=self.benefit,
sponsorship__sponsor__name="Bar",
name=self.benefit.name,
description=self.benefit.description,
)
prev_name, prev_description = self.benefit.name, self.benefit.description
self.benefit.name = 'New name'
self.benefit.description = 'New description'
self.benefit.save()
response = self.client.post(self.url, data=self.data)
self.sponsor_benefit.refresh_from_db()
self.assertEqual(self.sponsor_benefit.name, "New name")
self.assertEqual(self.sponsor_benefit.description, "New description")
self.assertTrue(self.sponsor_benefit.added_by_user)
# make sure sponsor benefit from unselected sponsorships wasn't deleted
other_sponsor_benefit.refresh_from_db()
self.assertEqual(other_sponsor_benefit.name, prev_name)
self.assertEqual(other_sponsor_benefit.description, prev_description)
def test_404_if_benefit_does_not_exist(self):
self.benefit.delete()
response = self.client.get(self.url)
self.assertEqual(response.status_code, 404)
def test_login_required(self):
login_url = reverse("admin:login")
redirect_url = f"{login_url}?next={self.url}"
self.client.logout()
r = self.client.get(self.url)
self.assertRedirects(r, redirect_url)
def test_staff_required(self):
login_url = reverse("admin:login")
redirect_url = f"{login_url}?next={self.url}"
self.user.is_staff = False
self.user.save()
self.client.force_login(self.user)
r = self.client.get(self.url)
self.assertRedirects(r, redirect_url, fetch_redirect_response=False)
class PreviewContractViewTests(TestCase):
def setUp(self):
self.user = baker.make(
settings.AUTH_USER_MODEL, is_staff=True, is_superuser=True
)
self.client.force_login(self.user)
self.contract = baker.make_recipe(
"sponsors.tests.empty_contract", sponsorship__start_date=date.today()
)
self.url = reverse(
"admin:sponsors_contract_preview", args=[self.contract.pk]
)
@patch("sponsors.views_admin.render_contract_to_pdf_response")
def test_render_pdf_by_default(self, mocked_render):
response = HttpResponse()
mocked_render.return_value = response
r = self.client.get(self.url)
self.assertEqual(r, response)
self.assertEqual(r.get("X-Frame-Options"), "SAMEORIGIN")
self.assertEqual(mocked_render.call_count, 1)
self.assertEqual(mocked_render.call_args[0][1], self.contract)
self.assertIsInstance(mocked_render.call_args[0][0], WSGIRequest)
@patch("sponsors.views_admin.render_contract_to_docx_response")
def test_render_docx_if_specified_in_the_querystring(self, mocked_render):
response = HttpResponse()
mocked_render.return_value = response
r = self.client.get(self.url + "?format=docx")
self.assertEqual(r, response)
self.assertEqual(r.get("X-Frame-Options"), "SAMEORIGIN")
self.assertEqual(mocked_render.call_count, 1)
self.assertEqual(mocked_render.call_args[0][1], self.contract)
self.assertIsInstance(mocked_render.call_args[0][0], WSGIRequest)
class PreviewSponsorEmailNotificationTemplateTests(TestCase):
def setUp(self):
self.user = baker.make(
settings.AUTH_USER_MODEL, is_staff=True, is_superuser=True
)
self.client.force_login(self.user)
self.sponsor_notification = baker.make(SponsorEmailNotificationTemplate, content="{{'content'|upper}}")
self.url = self.sponsor_notification.preview_content_url
def test_display_content_on_response(self):
response = self.client.get(self.url)
self.assertEqual(200, response.status_code)
self.assertEqual(b"CONTENT", response.content)
def test_404_if_template_does_not_exist(self):
self.sponsor_notification.delete()
response = self.client.get(self.url)
self.assertEqual(response.status_code, 404)
def test_login_required(self):
login_url = reverse("admin:login")
redirect_url = f"{login_url}?next={self.url}"
self.client.logout()
r = self.client.get(self.url)
self.assertRedirects(r, redirect_url)
def test_staff_required(self):
login_url = reverse("admin:login")
redirect_url = f"{login_url}?next={self.url}"
self.user.is_staff = False
self.user.save()
self.client.force_login(self.user)
r = self.client.get(self.url)
self.assertRedirects(r, redirect_url, fetch_redirect_response=False)
#######################
### TEST CUSTOM ACTIONS
class SendSponsorshipNotificationTests(TestCase):
def setUp(self):
self.request_factory = RequestFactory()
baker.make(Sponsorship, _quantity=3, sponsor__name='foo')
self.sponsorship = Sponsorship.objects.all()[0]
baker.make('sponsors.EmailTargetable', sponsor_benefit__sponsorship=self.sponsorship)
self.queryset = Sponsorship.objects.all()
self.user = baker.make("users.User")
@patch("sponsors.views_admin.render")
def test_render_template_and_context_as_expected(self, mocked_render):
mocked_render.return_value = "HTTP Response"
request = self.request_factory.post("/", data={})
request.user = self.user
resp = send_sponsorship_notifications_action(Mock(), request, self.queryset)
self.assertEqual("HTTP Response", resp)
self.assertEqual(1, mocked_render.call_count)
ret_request, template = mocked_render.call_args[0]
context = mocked_render.call_args[1]["context"]
self.assertEqual(request, request)
self.assertEqual("sponsors/admin/send_sponsors_notification.html", template)
self.assertEqual([self.sponsorship], list(context["to_notify"]))
self.assertEqual(2, len(context["to_ignore"]))
self.assertNotIn(self.sponsorship, context["to_ignore"])
self.assertIsInstance(context["form"], SendSponsorshipNotificationForm)
@patch("sponsors.views_admin.render")
def test_render_form_error_if_invalid(self, mocked_render):
mocked_render.return_value = "HTTP Response"
request = self.request_factory.post("/", data={"confirm": "yes"})
request.user = self.user
resp = send_sponsorship_notifications_action(Mock(), request, self.queryset)
context = mocked_render.call_args[1]["context"]
form = context["form"]
self.assertIn("contact_types", form.errors)
@patch.object(SendSponsorshipNotificationUseCase, "build")
def test_call_use_case_and_redirect_with_success(self, mock_build):
notification = baker.make("SponsorEmailNotificationTemplate")
mocked_uc = Mock(SendSponsorshipNotificationUseCase, autospec=True)
mock_build.return_value = mocked_uc
data = {"confirm": "yes", "notification": notification.pk, "contact_types": ["primary"]}
request = self.request_factory.post("/", data=data)
request.user = self.user
resp = send_sponsorship_notifications_action(Mock(), request, self.queryset)
expected_url = reverse("admin:sponsors_sponsorship_changelist")
self.assertEqual(302, resp.status_code)
self.assertEqual(expected_url, resp["Location"])
mock_build.assert_called_once_with()
self.assertEqual(1, mocked_uc.execute.call_count)
kwargs = mocked_uc.execute.call_args[1]
self.assertEqual(request, kwargs["request"])
self.assertEqual(notification, kwargs["notification"])
self.assertEqual(list(self.queryset), list(kwargs["sponsorships"]))
self.assertEqual(["primary"], kwargs["contact_types"])
class ExportAssetsAsZipTests(TestCase):
def setUp(self):
self.request_factory = RequestFactory()
self.request = self.request_factory.get("/")
self.request.user = baker.make("users.User")
self.sponsorship = baker.make(Sponsorship, sponsor__name='Sponsor Name')
self.ModelAdmin = Mock()
self.text_asset = TextAsset.objects.create(
uuid=uuid4(),
content_object=self.sponsorship,
internal_name="text_input",
)
self.img_asset = ImgAsset.objects.create(
uuid=uuid4(),
content_object=self.sponsorship.sponsor,
internal_name="img_input",
)
def test_display_same_page_with_warning_message_if_no_query(self):
queryset = GenericAsset.objects.none()
response = export_assets_as_zipfile(self.ModelAdmin, self.request, queryset)
self.assertEqual(302, response.status_code)
self.assertEqual(self.request.path, response["Location"])
msg = "You have to select at least one asset to export."
self.ModelAdmin.message_user.assert_called_once_with(self.request, msg, messages.WARNING)
def test_display_same_page_with_warning_message_if_any_asset_without_value(self):
self.text_asset.value = "Foo"
self.text_asset.save()
queryset = GenericAsset.objects.all()
response = export_assets_as_zipfile(self.ModelAdmin, self.request, queryset)
self.assertEqual(302, response.status_code)
self.assertEqual(self.request.path, response["Location"])
msg = "1 assets from the selection doesn't have data to export. Please review your selection!"
self.ModelAdmin.message_user.assert_called_once_with(self.request, msg, messages.WARNING)
def test_response_is_configured_to_be_zip_file(self):
self.text_asset.value = "foo"
self.img_asset.value = SimpleUploadedFile(name='test_image.jpg', content=b"content", content_type='image/jpeg')
self.text_asset.save()
self.img_asset.save()
queryset = GenericAsset.objects.all()
response = export_assets_as_zipfile(self.ModelAdmin, self.request, queryset)
self.assertEqual("application/x-zip-compressed", response["Content-Type"])
self.assertEqual("attachment; filename=assets.zip", response["Content-Disposition"])
def test_zip_file_organize_assets_within_sponsors_directories(self):
self.text_asset.value = "foo"
self.img_asset.value = get_static_image_file_as_upload("psf-logo.png")
self.text_asset.save()
self.img_asset.save()
queryset = GenericAsset.objects.all()
response = export_assets_as_zipfile(self.ModelAdmin, self.request, queryset)
content = io.BytesIO(response.content)
with zipfile.ZipFile(content, "r") as zip_file:
self.assertEqual(2, len(zip_file.infolist()))
with zip_file.open("Sponsor Name/text_input.txt") as cur_file:
self.assertEqual("foo", cur_file.read().decode())
with zip_file.open("Sponsor Name/img_input.png") as cur_file:
self.assertEqual(self.img_asset.value.read(), cur_file.read())
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import argparse
import time
import json
from collections import defaultdict
import logging
logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO)
from speech2text import log_kv
from text2stats import IBM_TRANSCRIPT_STATS_FILENAME, GOOGLE_TRANSCRIPT_STATS_FILENAME
from text2stats import IBM_PROCESSED_STATS_FILENAME, GOOGLE_PROCESSED_STATS_FILENAME
from tally_audio import RESULT_FILENAME, time_string_to_decimal_minutes
GOOGLE_PATH = "google_stt/"
IBM_PATH = "ibm_stt/"
BASE_PATH = "/temp/stt/AudioJournals/"
"""
Run Dependencies:
- text2stats.py
- tally_audio.py
Compares IBM vs Google on the basis of the following summary stats :
- Number of transcripts generated
- Portion of IBM transcripts that have Google transcript, and vice versa.
- The transcription rate: ratio of number of transcripts to the number of audio files processed
- Transcript words per minute of audio
- Processing time (seconds) per minute of audio
Counts total number of audio files.
"""
def load_json(path):
result = {}
log_kv("Loading", path)
if os.path.exists(path):
with open(path) as file1:
result = json.load(file1)
else:
logging.error("Not exist: %s", path)
return result
def calc_transcript_counts(ibm_stats_path, google_stats_path):
"""
:param ibm_stats_path: to file containing index of IBM transcripts
:param google_stats_path: to file containing index of Google transcripts
:return:
Counts number of IBM transcripts
Counts number of Google transcripts
Counts portion of IBM transcripts within processed folders that have Google transcript
Counts portion of Google transcripts within processed folders that have IBM transcript
"""
ibm_stats = load_json(ibm_stats_path)
google_stats = load_json(google_stats_path)
log_kv("Number of IBM Transcripts", len(ibm_stats))
log_kv("Numberof Google Transcripts", len(google_stats))
i_set = set([os.path.dirname(x).replace(IBM_PATH,'').replace(BASE_PATH,'') for x in ibm_stats])
g_set = set([os.path.dirname(x).replace(GOOGLE_PATH,'').replace(BASE_PATH,'') for x in google_stats])
print
i_top_level_folders = sorted(set([xx.split("/")[0] for xx in i_set]))
print "IBM folders in %s :\n%s " % (ibm_stats_path, i_top_level_folders)
print
g_top_level_folders = sorted(set([xx.split("/")[0] for xx in g_set]))
print "Google folders in %s :\n%s" % (google_stats_path, g_top_level_folders)
i_count = 0
i_in_g = 0
for xx in i_set:
if xx.split("/")[0] not in g_top_level_folders:
continue
i_count += 1
if xx in g_set:
i_in_g += 1
i_portion = float(i_in_g) / i_count
g_count = 0
g_in_i = 0
for xx in g_set:
if xx.split("/")[0] not in i_top_level_folders:
continue
g_count += 1
if xx in i_set:
g_in_i += 1
g_portion = float(g_in_i) / g_count
print;print
print "IBM also in Google: %d/%d (%.2f) " % (i_in_g, i_count, i_portion)
print "Google also in IBM: %d/%d (%.2f) " % (g_in_i, g_count, g_portion)
def calc_stat_per_minute(stats, audio_stats, prefix, suffix, suffix2, fieldname,
out_fieldname=None, duration_threshold = (3.0/60)):
"""
:param stats: dict containing field "stat"
:param audio_stats: dict containing field "duration"
:param prefix: string to be stripped from each key of stats
:param suffix: string to be stripped from each key of stats
:param suffix2: string to be stripped from each key of stats
:param duration_threshold: minimum minutes of audio duration required to admit item into sample.
:return: dict
"""
if not out_fieldname:
out_fieldname = fieldname + "_per_minute"
word_count_stats = defaultdict(dict)
no_duration = 0
count = 0
skipped = 0
sum_stat = 0
sum_duration = 0
for transcript in stats:
count += 1
# Strip down to filepath
basepath = transcript.replace(prefix, '').replace(suffix,'').replace(suffix2,'')
if basepath in audio_stats:
stat = stats[transcript].get(fieldname)
duration = time_string_to_decimal_minutes(audio_stats[basepath]["duration"])
if duration and stat and duration > duration_threshold:
stat_per_minute = float(stat)/duration
word_count_stats[basepath][out_fieldname] = stat_per_minute
sum_stat += stat
sum_duration += duration
logging.debug("%.0f %s/minute", stat_per_minute, fieldname)
else:
skipped += 1
no_duration += 0 if duration else 1
print
if sum_duration > 0:
logging.info("Average %s per minute: %.1f", fieldname, (sum_stat/sum_duration))
else:
logging.error("Expected sum duration > 0")
if skipped > 0:
logging.warn("Skipped %d", skipped)
if no_duration > 0:
logging.warn("Had no duration: %d", no_duration)
return word_count_stats
def calc_transcript_words_per_minute(ibm_stats_path, google_stats_path, ibm_pstats_path, google_pstats_path, audio_stats_path):
ibm_stats = load_json(ibm_stats_path)
google_stats = load_json(google_stats_path)
ibm_pstats = load_json(ibm_pstats_path)
google_pstats = load_json(google_pstats_path)
audio_stats = load_json(audio_stats_path)
count_processed_ibm = len(ibm_pstats)
count_processed_google = len(google_pstats)
count_transcribed_ibm = len(ibm_stats)
count_transcribed_google = len(google_stats)
print
if count_processed_ibm < count_transcribed_ibm :
logging.error("count_processed_ibm < count_transcribed_ibm")
log_kv("IBM Transcribed/Processed", "%d/%d" % (count_transcribed_ibm, count_processed_ibm))
if count_processed_google < count_transcribed_google:
logging.error("count_processed_google < count_transcribed_google")
log_kv("Google Transcribed/Processed", "%d/%d" % (count_transcribed_google, count_processed_google))
print
log_kv("Num audio files", len(audio_stats))
print
print "==============================================================="
print "Calculating number of IBM transcript words per minute of audio"
print "==============================================================="
suffix = ".out/hypotheses.txt.dictated"
suffix2 = ".out/hypotheses.txt"
prefix = "ibm_stt/"
i_words_per_min = calc_stat_per_minute(ibm_stats, audio_stats, prefix, suffix, suffix2, "word_count")
print "==============================================================="
print "IBM wpm tallied: %d" % len(i_words_per_min)
print "==============================================================="
print
print "==============================================================="
print "Calculating IBM processing time per minute of audio"
print "==============================================================="
i_proc_per_min = calc_stat_per_minute(ibm_pstats, audio_stats, prefix, suffix, suffix2, "transcribe_seconds")
print "==============================================================="
print "IBM ppm tallied: %d" % len(i_proc_per_min)
print "==============================================================="
print
print "==============================================================="
print "Calculating number of Google transcript words per minute of audio"
print "==============================================================="
suffix = ".out/transcript.txt.dictated"
suffix2 = ".out/transcript.txt"
prefix = "google_stt/"
g_words_per_min = calc_stat_per_minute(google_stats, audio_stats, prefix, suffix, suffix2, "word_count")
print "==============================================================="
print "Google wpm tallied: %d" % len(g_words_per_min)
print "==============================================================="
print
print "==============================================================="
print "Calculating Google processing time per minute of audio"
print "==============================================================="
i_proc_per_min = calc_stat_per_minute(google_pstats, audio_stats, prefix, suffix, suffix2, "transcribe_seconds")
print "==============================================================="
print "Google ppm tallied: %d" % len(i_proc_per_min)
print "==============================================================="
print
print
if __name__ == '__main__':
start_time = time.time()
parser = argparse.ArgumentParser(description='Compare Google STT vs IBM STT')
parser.add_argument('--folder','-f', action='store', default='/tmp/transcription/text2stats', help='text2stats.py output directory')
parser.add_argument('--verbose','-v', action='store_true', help='Spew logs profusely.')
args = parser.parse_args()
if args.verbose:
print "Relies on the following intermediate result files under %s :" % args.folder
print ", ".join([IBM_TRANSCRIPT_STATS_FILENAME, GOOGLE_TRANSCRIPT_STATS_FILENAME, IBM_PROCESSED_STATS_FILENAME,
GOOGLE_PROCESSED_STATS_FILENAME, RESULT_FILENAME])
log_kv("Running", __file__)
log_kv("From", os.path.dirname(os.path.realpath(__file__)))
folder = args.folder
log_kv("--folder", folder)
path = os.path.realpath(folder)
if not os.path.isdir(path):
raise IOError("Path not found: %s" % path)
ibm_stats_path = os.path.join(path, IBM_TRANSCRIPT_STATS_FILENAME)
google_stats_path = os.path.join(path, GOOGLE_TRANSCRIPT_STATS_FILENAME)
ibm_pstats_path = os.path.join(path, IBM_PROCESSED_STATS_FILENAME)
google_pstats_path = os.path.join(path, GOOGLE_PROCESSED_STATS_FILENAME)
audio_stats_path = os.path.join(path, RESULT_FILENAME)
calc_transcript_counts(ibm_stats_path, google_stats_path)
calc_transcript_words_per_minute(ibm_stats_path, google_stats_path, ibm_pstats_path, google_pstats_path, audio_stats_path)
log_kv("Done", __file__)
print("(%.2f sec)" % (time.time() - start_time))
# python compare.py -f /tmp/transcription/text2stats
|
|
#!/usr/bin/python
# Copyright (c) 2013 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import logging
import os
import platform
import subprocess
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import pynacl.platform
import pynacl.file_tools
import buildbot_lib
import packages
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
NACL_DIR = os.path.dirname(SCRIPT_DIR)
TOOLCHAIN_BUILD_DIR = os.path.join(NACL_DIR, 'toolchain_build')
TOOLCHAIN_BUILD_OUT_DIR = os.path.join(TOOLCHAIN_BUILD_DIR, 'out')
TEMP_PACKAGES_FILE = os.path.join(TOOLCHAIN_BUILD_OUT_DIR, 'packages.txt')
BUILD_DIR = os.path.join(NACL_DIR, 'build')
PACKAGE_VERSION_DIR = os.path.join(BUILD_DIR, 'package_version')
PACKAGE_VERSION_SCRIPT = os.path.join(PACKAGE_VERSION_DIR, 'package_version.py')
# As this is a buildbot script, we want verbose logging. Note however, that
# toolchain_build has its own log settings, controlled by its CLI flags.
logging.getLogger().setLevel(logging.DEBUG)
parser = argparse.ArgumentParser(description='PNaCl toolchain buildbot script')
group = parser.add_mutually_exclusive_group()
group.add_argument('--buildbot', action='store_true',
help='Buildbot mode (build and archive the toolchain)')
group.add_argument('--trybot', action='store_true',
help='Trybot mode (build but do not archove the toolchain)')
args = parser.parse_args()
host_os = buildbot_lib.GetHostPlatform()
# This is a minimal context, not useful for running tests yet, but enough for
# basic Step handling.
context = buildbot_lib.BuildContext()
buildbot_lib.SetDefaultContextAttributes(context)
context['pnacl'] = True
status = buildbot_lib.BuildStatus(context)
toolchain_install_dir = os.path.join(
NACL_DIR,
'toolchain',
'%s_%s' % (host_os, pynacl.platform.GetArch()),
'pnacl_newlib')
toolchain_build_cmd = [
sys.executable,
os.path.join(
NACL_DIR, 'toolchain_build', 'toolchain_build_pnacl.py'),
'--verbose', '--sync', '--clobber', '--build-64bit-host',
'--install', toolchain_install_dir,
]
# Sync the git repos used by build.sh
with buildbot_lib.Step('Sync build.sh repos', status, halt_on_fail=True):
buildbot_lib.Command(context, toolchain_build_cmd + ['--legacy-repo-sync'])
# Clean out any installed toolchain parts that were built by previous bot runs.
with buildbot_lib.Step('Sync TC install dir', status):
pynacl.file_tools.RemoveDirectoryIfPresent(toolchain_install_dir)
buildbot_lib.Command(
context,
[sys.executable, PACKAGE_VERSION_SCRIPT,
'--packages', 'pnacl_newlib', 'sync', '--extract'])
# Run checkdeps so that the PNaCl toolchain trybots catch mistakes that would
# cause the normal NaCl bots to fail.
with buildbot_lib.Step('checkdeps', status):
buildbot_lib.Command(
context,
[sys.executable,
os.path.join(NACL_DIR, 'tools', 'checkdeps', 'checkdeps.py')])
# Test the pinned toolchain. Since we don't yet have main waterfall
# Windows or mac bots, we need to test the full assembled toolchain here.
if host_os == 'win' or host_os == 'mac' or not pynacl.platform.IsArch64Bit():
with buildbot_lib.Step('Test NaCl-pinned toolchain', status,
halt_on_fail=False):
buildbot_lib.SCons(context, args=['smoke_tests'], parallel=True)
buildbot_lib.SCons(context, args=['large_tests'], parallel=False)
buildbot_lib.SCons(context, args=['pnacl_generate_pexe=0',
'nonpexe_tests'], parallel=True)
# toolchain_build outputs its own buildbot annotations, so don't use
# buildbot_lib.Step to run it here.
try:
gsd_arg = []
if args.buildbot:
gsd_arg = ['--buildbot']
elif args.trybot:
gsd_arg = ['--trybot']
cmd = toolchain_build_cmd + gsd_arg + ['--packages-file', TEMP_PACKAGES_FILE]
logging.info('Running: ' + ' '.join(cmd))
subprocess.check_call(cmd)
if args.buildbot or args.trybot:
# Don't upload packages from the 32-bit linux bot to avoid racing on
# uploading the same packages as the 64-bit linux bot
if host_os != 'linux' or pynacl.platform.IsArch64Bit():
packages.UploadPackages(TEMP_PACKAGES_FILE, args.trybot)
except subprocess.CalledProcessError:
# Ignore any failures and keep going (but make the bot stage red).
print '@@@STEP_FAILURE@@@'
sys.stdout.flush()
# Since mac and windows bots don't build target libraries or run tests yet,
# Run a basic sanity check that tests the host components (LLVM, binutils,
# gold plugin)
if host_os == 'win' or host_os == 'mac':
with buildbot_lib.Step('Test host binaries and gold plugin', status,
halt_on_fail=False):
buildbot_lib.Command(
context,
[sys.executable,
os.path.join('tests', 'gold_plugin', 'gold_plugin_test.py'),
'--toolchaindir', toolchain_install_dir])
if host_os != 'win':
# TODO(dschuff): Fix windows regression test runner (upstream in the LLVM
# codebase or locally in the way we build LLVM) ASAP
with buildbot_lib.Step('LLVM Regression (toolchain_build)', status):
llvm_test = [sys.executable,
os.path.join(NACL_DIR, 'pnacl', 'scripts', 'llvm-test.py'),
'--llvm-regression',
'--verbose']
buildbot_lib.Command(context, llvm_test)
with buildbot_lib.Step('Update cygwin/check bash', status, halt_on_fail=True):
# Update cygwin if necessary.
if host_os == 'win':
if sys.platform == 'cygwin':
print 'This script does not support running from inside cygwin!'
sys.exit(1)
subprocess.check_call(os.path.join(SCRIPT_DIR, 'cygwin_env.bat'))
print os.environ['PATH']
paths = os.environ['PATH'].split(os.pathsep)
# Put path to cygwin tools at the beginning, so cygwin tools like python
# and cmake will supercede others (which do not understand cygwin paths)
paths = [os.path.join(NACL_DIR, 'cygwin', 'bin')] + paths
print paths
os.environ['PATH'] = os.pathsep.join(paths)
print os.environ['PATH']
bash = os.path.join(NACL_DIR, 'cygwin', 'bin', 'bash')
else:
# Assume bash is in the path
bash = 'bash'
try:
print 'Bash version:'
sys.stdout.flush()
subprocess.check_call([bash , '--version'])
except subprocess.CalledProcessError:
print 'Bash not found in path!'
raise buildbot_lib.StepFailed()
# Now we run the PNaCl buildbot script. It in turn runs the PNaCl build.sh
# script and runs scons tests.
# TODO(dschuff): re-implement the test-running portion of buildbot_pnacl.sh
# using buildbot_lib, and use them here and in the non-toolchain builder.
buildbot_shell = os.path.join(NACL_DIR, 'buildbot', 'buildbot_pnacl.sh')
# Because patching mangles the shell script on the trybots, fix it up here
# so we can have working windows trybots.
def FixCRLF(f):
with open(f, 'rb') as script:
data = script.read().replace('\r\n', '\n')
with open(f, 'wb') as script:
script.write(data)
if host_os == 'win':
FixCRLF(buildbot_shell)
FixCRLF(os.path.join(NACL_DIR, 'pnacl', 'build.sh'))
FixCRLF(os.path.join(NACL_DIR, 'pnacl', 'scripts', 'common-tools.sh'))
# Generate flags for buildbot_pnacl.sh
if host_os == 'linux':
arg_os = 'linux'
# TODO(dschuff): Figure out if it makes sense to import the utilities from
# build/ into scripts from buildbot/ or only use things from buildbot_lib,
# or unify them in some way.
arch = 'x8664' if platform.machine() == 'x86_64' else 'x8632'
elif host_os == 'mac':
arg_os = 'mac'
arch = 'x8632'
elif host_os == 'win':
arg_os = 'win'
arch = 'x8664'
else:
print 'Unrecognized platform: ', host_os
sys.exit(1)
if args.buildbot:
trybot_mode = 'false'
elif args.trybot:
trybot_mode = 'true'
platform_arg = 'mode-buildbot-tc-' + arch + '-' + arg_os
command = [bash,
buildbot_shell,
platform_arg,
trybot_mode]
logging.info('Running: ' + ' '.join(command))
subprocess.check_call(command)
|
|
#!/usr/bin/env python
# Copyright 2012 Isaku Yamahata <yamahata at private email ne jp>
# Based on openvswitch agent.
#
# Copyright 2011 VMware, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# @author: Isaku Yamahata
import httplib
import socket
import sys
import time
import eventlet
eventlet.monkey_patch()
from oslo.config import cfg
from ryu.app import client
from ryu.app import conf_switch_key
from ryu.app import rest_nw_id
from neutron.agent.linux import ip_lib
from neutron.agent.linux import ovs_lib
from neutron.agent import rpc as agent_rpc
from neutron.agent import securitygroups_rpc as sg_rpc
from neutron.common import config as common_config
from neutron.common import exceptions as n_exc
from neutron.common import rpc as n_rpc
from neutron.common import topics
from neutron import context as q_context
from neutron.extensions import securitygroup as ext_sg
from neutron.openstack.common import log
from neutron.plugins.ryu.common import config # noqa
LOG = log.getLogger(__name__)
# This is copied of nova.flags._get_my_ip()
# Agent shouldn't depend on nova module
def _get_my_ip():
"""Return the actual ip of the local machine.
This code figures out what source address would be used if some traffic
were to be sent out to some well known address on the Internet. In this
case, a Google DNS server is used, but the specific address does not
matter much. No traffic is actually sent.
"""
csock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
csock.connect(('8.8.8.8', 80))
(addr, _port) = csock.getsockname()
csock.close()
return addr
def _get_ip_from_nic(nic):
ip_wrapper = ip_lib.IPWrapper()
dev = ip_wrapper.device(nic)
addrs = dev.addr.list(scope='global')
for addr in addrs:
if addr['ip_version'] == 4:
return addr['cidr'].split('/')[0]
def _get_ip(cfg_ip_str, cfg_interface_str):
ip = None
try:
ip = getattr(cfg.CONF.OVS, cfg_ip_str)
except (cfg.NoSuchOptError, cfg.NoSuchGroupError):
pass
if ip:
return ip
iface = None
try:
iface = getattr(cfg.CONF.OVS, cfg_interface_str)
except (cfg.NoSuchOptError, cfg.NoSuchGroupError):
pass
if iface:
ip = _get_ip_from_nic(iface)
if ip:
return ip
LOG.warning(_('Could not get IPv4 address from %(nic)s: %(cfg)s'),
{'nic': iface, 'cfg': cfg_interface_str})
return _get_my_ip()
def _get_tunnel_ip():
return _get_ip('tunnel_ip', 'tunnel_interface')
def _get_ovsdb_ip():
return _get_ip('ovsdb_ip', 'ovsdb_interface')
class OVSBridge(ovs_lib.OVSBridge):
def __init__(self, br_name, root_helper):
ovs_lib.OVSBridge.__init__(self, br_name, root_helper)
self.datapath_id = None
def find_datapath_id(self):
self.datapath_id = self.get_datapath_id()
def set_manager(self, target):
self.run_vsctl(["set-manager", target])
def get_ofport(self, name):
return self.db_get_val("Interface", name, "ofport")
def _get_ports(self, get_port):
ports = []
port_names = self.get_port_name_list()
for name in port_names:
if self.get_ofport(name) < 0:
continue
port = get_port(name)
if port:
ports.append(port)
return ports
def _get_external_port(self, name):
# exclude vif ports
external_ids = self.db_get_map("Interface", name, "external_ids")
if external_ids:
return
# exclude tunnel ports
options = self.db_get_map("Interface", name, "options")
if "remote_ip" in options:
return
ofport = self.get_ofport(name)
return ovs_lib.VifPort(name, ofport, None, None, self)
def get_external_ports(self):
return self._get_ports(self._get_external_port)
class VifPortSet(object):
def __init__(self, int_br, ryu_rest_client):
super(VifPortSet, self).__init__()
self.int_br = int_br
self.api = ryu_rest_client
def setup(self):
for port in self.int_br.get_external_ports():
LOG.debug(_('External port %s'), port)
self.api.update_port(rest_nw_id.NW_ID_EXTERNAL,
port.switch.datapath_id, port.ofport)
class RyuPluginApi(agent_rpc.PluginApi,
sg_rpc.SecurityGroupServerRpcApiMixin):
def get_ofp_rest_api_addr(self, context):
LOG.debug(_("Get Ryu rest API address"))
return self.call(context,
self.make_msg('get_ofp_rest_api'),
topic=self.topic)
class RyuSecurityGroupAgent(sg_rpc.SecurityGroupAgentRpcMixin):
def __init__(self, context, plugin_rpc, root_helper):
self.context = context
self.plugin_rpc = plugin_rpc
self.root_helper = root_helper
self.init_firewall()
class OVSNeutronOFPRyuAgent(n_rpc.RpcCallback,
sg_rpc.SecurityGroupAgentRpcCallbackMixin):
RPC_API_VERSION = '1.1'
def __init__(self, integ_br, tunnel_ip, ovsdb_ip, ovsdb_port,
polling_interval, root_helper):
super(OVSNeutronOFPRyuAgent, self).__init__()
self.polling_interval = polling_interval
self._setup_rpc()
self.sg_agent = RyuSecurityGroupAgent(self.context,
self.plugin_rpc,
root_helper)
self._setup_integration_br(root_helper, integ_br, tunnel_ip,
ovsdb_port, ovsdb_ip)
def _setup_rpc(self):
self.topic = topics.AGENT
self.plugin_rpc = RyuPluginApi(topics.PLUGIN)
self.context = q_context.get_admin_context_without_session()
self.endpoints = [self]
consumers = [[topics.PORT, topics.UPDATE],
[topics.SECURITY_GROUP, topics.UPDATE]]
self.connection = agent_rpc.create_consumers(self.endpoints,
self.topic,
consumers)
def _setup_integration_br(self, root_helper, integ_br,
tunnel_ip, ovsdb_port, ovsdb_ip):
self.int_br = OVSBridge(integ_br, root_helper)
self.int_br.find_datapath_id()
rest_api_addr = self.plugin_rpc.get_ofp_rest_api_addr(self.context)
if not rest_api_addr:
raise n_exc.Invalid(_("Ryu rest API port isn't specified"))
LOG.debug(_("Going to ofp controller mode %s"), rest_api_addr)
ryu_rest_client = client.OFPClient(rest_api_addr)
self.vif_ports = VifPortSet(self.int_br, ryu_rest_client)
self.vif_ports.setup()
sc_client = client.SwitchConfClient(rest_api_addr)
sc_client.set_key(self.int_br.datapath_id,
conf_switch_key.OVS_TUNNEL_ADDR, tunnel_ip)
# Currently Ryu supports only tcp methods. (ssl isn't supported yet)
self.int_br.set_manager('ptcp:%d' % ovsdb_port)
sc_client.set_key(self.int_br.datapath_id, conf_switch_key.OVSDB_ADDR,
'tcp:%s:%d' % (ovsdb_ip, ovsdb_port))
def port_update(self, context, **kwargs):
LOG.debug(_("Port update received"))
port = kwargs.get('port')
vif_port = self.int_br.get_vif_port_by_id(port['id'])
if not vif_port:
return
if ext_sg.SECURITYGROUPS in port:
self.sg_agent.refresh_firewall()
def _update_ports(self, registered_ports):
ports = self.int_br.get_vif_port_set()
if ports == registered_ports:
return
added = ports - registered_ports
removed = registered_ports - ports
return {'current': ports,
'added': added,
'removed': removed}
def _process_devices_filter(self, port_info):
if 'added' in port_info:
self.sg_agent.prepare_devices_filter(port_info['added'])
if 'removed' in port_info:
self.sg_agent.remove_devices_filter(port_info['removed'])
def daemon_loop(self):
ports = set()
while True:
start = time.time()
try:
port_info = self._update_ports(ports)
if port_info:
LOG.debug(_("Agent loop has new device"))
self._process_devices_filter(port_info)
ports = port_info['current']
except Exception:
LOG.exception(_("Error in agent event loop"))
elapsed = max(time.time() - start, 0)
if (elapsed < self.polling_interval):
time.sleep(self.polling_interval - elapsed)
else:
LOG.debug(_("Loop iteration exceeded interval "
"(%(polling_interval)s vs. %(elapsed)s)!"),
{'polling_interval': self.polling_interval,
'elapsed': elapsed})
def main():
common_config.init(sys.argv[1:])
common_config.setup_logging(cfg.CONF)
integ_br = cfg.CONF.OVS.integration_bridge
polling_interval = cfg.CONF.AGENT.polling_interval
root_helper = cfg.CONF.AGENT.root_helper
tunnel_ip = _get_tunnel_ip()
LOG.debug(_('tunnel_ip %s'), tunnel_ip)
ovsdb_port = cfg.CONF.OVS.ovsdb_port
LOG.debug(_('ovsdb_port %s'), ovsdb_port)
ovsdb_ip = _get_ovsdb_ip()
LOG.debug(_('ovsdb_ip %s'), ovsdb_ip)
try:
agent = OVSNeutronOFPRyuAgent(integ_br, tunnel_ip, ovsdb_ip,
ovsdb_port, polling_interval,
root_helper)
except httplib.HTTPException as e:
LOG.error(_("Initialization failed: %s"), e)
sys.exit(1)
LOG.info(_("Ryu initialization on the node is done. "
"Agent initialized successfully, now running..."))
agent.daemon_loop()
sys.exit(0)
if __name__ == "__main__":
main()
|
|
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Notification module.
Provides Manager.send_async, which sends notifications; and Manager.query, which
queries the current status of notifications.
Notifications are transported by email. Every message you send consumes email
quota. A message is a single payload delivered to a single user. We do not
provide the entire interface email does (no CC, BCC, attachments, or HTML
bodies). Note that messages are not sent when you call Manager.send_async(), but
instead enqueued and sent later -- usually within a minute.
This module has several advantages over using App Engine's mail.send_mail()
directly.
First, we queue and retry sending messages. This happens on two levels: first,
send_async() adds items to a task queue, which retries if there are transient
failures (like the datastore being slow, or you enqueueing more messages than
App Engine's mail service can send in a minute). Second, we provide a cron that
retries message delivery for several days, so if you exhaust your daily mail
quota today we'll try again tomorrow.
The second major advantage is that we keep a record of messages sent, so you can
do analytics on them. We provide a base set of dashboards in the admin UI
showing both overall and recent notification state.
For users who are sending mail occasionally, this module smoothes away some of
the gotchas of App Engine's mail service. However, App Engine is not optimized
to be a bulk mail delivery service, so if you need to send amounts of mail in
excess of App Engine's max daily quota (1.7M messages) or minute-by-minute quota
(5k messages), you should consider using a third-party mail delivery service.
We provide a second module that allows your users to opt out of receiving email.
We strongly encourage use of that module so you don't spam people. See
modules/unsubscribe/unsubscribe.py. The general pattern for using these modules
is:
from modules.notifications import notifications
from modules.unsubscribe import unsubscribe
from google.appengine.api import users
user = users.get_current_user()
if user and not unsubscribe.has_unsubscribed(user.email):
notifications.Manager.send_async(
user.email, '[email protected]', 'intent', 'subject', 'body'
)
"""
__author__ = [
'[email protected] (John Cox)'
]
import datetime
import logging
from models import counters
from models import custom_modules
from models import entities
from models import services
from models import transforms
from models import utils
from modules import dashboard
from google.appengine.api import mail
from google.appengine.api import mail_errors
from google.appengine.api import taskqueue
from google.appengine.datastore import datastore_rpc
from google.appengine.ext import db
from google.appengine.ext import deferred
_LOG = logging.getLogger('modules.notifications.notifications')
logging.basicConfig()
_APP_ENGINE_MAIL_FATAL_ERRORS = frozenset([
mail_errors.BadRequestError, mail_errors.InvalidSenderError,
])
_ENQUEUED_BUFFER_MULTIPLIER = 1.5
_KEY_DELIMITER = ':'
_MAX_ENQUEUED_HOURS = 3
_MAX_RETRY_DAYS = 3
# Number of times past which recoverable failure of send_mail() calls becomes
# hard failure. Used as a brake on runaway queues. Should be larger than the
# expected cap on the number of retries imposed by taskqueue.
_RECOVERABLE_FAILURE_CAP = 20
_SECONDS_PER_HOUR = 60 * 60
_SECONDS_PER_DAY = 24 * _SECONDS_PER_HOUR
_USECS_PER_SECOND = 10 ** 6
COUNTER_RETENTION_POLICY_RUN = counters.PerfCounter(
'gcb-notifications-retention-policy-run',
'number of times a retention policy was run'
)
COUNTER_SEND_ASYNC_FAILED_BAD_ARGUMENTS = counters.PerfCounter(
'gcb-notifications-send-async-failed-bad-arguments',
'number of times send_async failed because arguments were bad'
)
COUNTER_SEND_ASYNC_FAILED_DATASTORE_ERROR = counters.PerfCounter(
'gcb-notifications-send-async-failed-datastore-error',
'number of times send_async failed because of datastore error'
)
COUNTER_SEND_ASYNC_START = counters.PerfCounter(
'gcb-notifications-send-async-called',
'number of times send_async has been called'
)
COUNTER_SEND_ASYNC_SUCCESS = counters.PerfCounter(
'gcb-notifications-send-async-success',
'number of times send_async succeeded'
)
COUNTER_SEND_MAIL_TASK_FAILED = counters.PerfCounter(
'gcb-notifications-send-mail-task-failed',
'number of times the send mail task failed, but could be retried'
)
COUNTER_SEND_MAIL_TASK_FAILED_PERMANENTLY = counters.PerfCounter(
'gcb-notifications-send-mail-task-failed-permanently',
'number of times the send mail task failed permanently'
)
COUNTER_SEND_MAIL_TASK_FAILURE_CAP_EXCEEDED = counters.PerfCounter(
'gcb-notifications-send-mail-task-recoverable-failure-cap-exceeded',
'number of times the recoverable failure cap was exceeded'
)
COUNTER_SEND_MAIL_TASK_RECORD_FAILURE_CALLED = counters.PerfCounter(
'gcb-notifications-send-mail-task-record-failure-called',
'number of times _record_failure was called in the send mail task'
)
COUNTER_SEND_MAIL_TASK_RECORD_FAILURE_FAILED = counters.PerfCounter(
'gcb-notifications-send-mail-task-record-failure-failed',
'number of times _record_failure failed in the send mail task'
)
COUNTER_SEND_MAIL_TASK_RECORD_FAILURE_SUCCESS = counters.PerfCounter(
'gcb-notifications-send-mail-task-record-failure-success',
'number of times _record_failure succeeded in the send mail task'
)
COUNTER_SEND_MAIL_TASK_SENT = counters.PerfCounter(
'gcb-notifications-send-mail-task-sent',
'number of times the send mail task called send_mail successfully'
)
COUNTER_SEND_MAIL_TASK_SKIPPED = counters.PerfCounter(
'gcb-notifications-send-mail-task-skipped',
'number of times send mail task skipped sending mail'
)
COUNTER_SEND_MAIL_TASK_STARTED = counters.PerfCounter(
'gcb-notifications-send-mail-task-started',
'number of times the send mail task was dequeued and started')
COUNTER_SEND_MAIL_TASK_SUCCESS = counters.PerfCounter(
'gcb-notifications-send-mail-task-success',
'number of times send mail task completed successfully'
)
# TODO(johncox): remove suppression once stubs are implemented.
# pylint: disable=unused-argument
def _dt_to_epoch_usec(dt):
"""Converts datetime (assumed UTC) to epoch microseconds."""
return int((_USECS_PER_SECOND) * (
dt - datetime.datetime.utcfromtimestamp(0)).total_seconds())
def _epoch_usec_to_dt(usec):
"""Converts microseconds since epoch int to datetime (UTC, no tzinfo)."""
return (
datetime.datetime.utcfromtimestamp(0) +
datetime.timedelta(microseconds=usec)
)
class Error(Exception):
"""Base error class."""
class NotificationTooOldError(Error):
"""Recorded on a notification by cron when it's too old to re-enqueue."""
class RetentionPolicy(object):
"""Retention policy for notification data.
Notification data is spread between the Notification and Payload objects (of
which see below). Two parts of this data may be large:
Notification.audit_trail, and Payload.body.
We allow clients to specify a retention policy when calling
Manager.send_async(). This retention policy is a bundle of logic applied after
we know a notification has been sent. How and when the retention policy is run
is up to the implementation; we make no guarantees except that once the
notification is sent we will attempt run() at least once, and if it mutates
its input we will attempt to apply those mutations at least once.
Practically, it can be used to prevent retention of data in the datastore that
is of no use to the client, even for audit purposes.
Note that 'retention' here has nothing to do with broader user data privacy
and retention concerns -- this is purely about responsible resource usage.
"""
# String. Name used to identify the retention policy (in the datastore, for)
# example.
NAME = None
@classmethod
def run(cls, notification, payload):
"""Runs the policy, transforming notification and payload in place.
run does not apply mutations to the backing datastore entities; it merely
returns versions of those entities that we will later attempt to persist.
Your transforms must not touch protected fields on notification or payload;
those are used by the subsystem, and changing them can violate constraints
and cause unpredictable behavior and data corruption.
Args:
notification: Notification. The notification to process.
payload: Payload. The payload to process.
"""
pass
class RetainAll(RetentionPolicy):
"""Policy that retains all data."""
NAME = 'all'
class RetainAuditTrail(RetentionPolicy):
"""Policy that blanks Payload.body but not Notification.audit_trail."""
NAME = 'audit_trail'
@classmethod
def run(cls, unused_notification, payload):
payload.body = None
# Dict of string -> RetentionPolicy where key is the policy's NAME. All
# available retention policies.
_RETENTION_POLICIES = {
RetainAll.NAME: RetainAll,
RetainAuditTrail.NAME: RetainAuditTrail,
}
class Status(object):
"""DTO for email status."""
FAILED = 'failed'
PENDING = 'pending'
SUCCEEDED = 'succeeded'
_STATES = frozenset((FAILED, PENDING, SUCCEEDED))
def __init__(self, to, sender, intent, enqueue_date, state):
assert state in self._STATES
self.enqueue_date = enqueue_date
self.intent = intent
self.sender = sender
self.state = state
self.to = to
@classmethod
def from_notification(cls, notification):
state = cls.PENDING
# Treating as module-protected. pylint: disable=protected-access
if notification._fail_date:
state = cls.FAILED
elif notification._done_date:
state = cls.SUCCEEDED
return cls(
notification.to, notification.sender, notification.intent,
notification.enqueue_date, state
)
def __eq__(self, other):
return (
self.enqueue_date == other.enqueue_date and
self.intent == other.intent and
self.sender == other.sender and
self.state == other.state and
self.to == other.to
)
def __str__(self):
return (
'Status - to: %(to)s, from: %(sender)s, intent: %(intent)s, '
'enqueued: %(enqueue_date)s, state: %(state)s' % {
'enqueue_date': self.enqueue_date,
'intent': self.intent,
'sender': self.sender,
'state': self.state,
'to': self.to,
})
def _accumulate_statuses(notification, results):
for_user = results.get(notification.to, [])
for_user.append(Status.from_notification(notification))
results[notification.to] = for_user
class Manager(object):
"""Manages state and operation of the notifications subsystem."""
# Treating access as module-protected. pylint: disable=protected-access
@classmethod
def query(cls, to, intent):
"""Gets the Status of notifications queued previously via send_async().
Serially performs one datastore query per user in the to list.
Args:
to: list of string. The recipients of the notification.
intent: string. Short string identifier of the intent of the notification
(for example, 'invitation' or 'reminder').
Returns:
Dict of to string -> [Status, sorted by descending enqueue date].
"""
results = {}
for address in to:
mapper = utils.QueryMapper(cls._get_query_query(address, intent))
mapper.run(_accumulate_statuses, results)
return results
@classmethod
def send_async(
cls, to, sender, intent, body, subject, audit_trail=None,
retention_policy=None):
"""Asyncronously sends a notification via email.
Args:
to: string. Recipient email address. Must have a valid form, but we cannot
know that the address can actually be delivered to.
sender: string. Email address of the sender of the notification. Must be a
valid sender for the App Engine deployment at the time the deferred
send_mail() call actually executes (meaning it cannot be the email
address of the user currently in session, because the user will not be
in session at call time). See
https://developers.google.com/appengine/docs/python/mail/emailmessagefields.
intent: string. Short string identifier of the intent of the notification
(for example, 'invitation' or 'reminder'). Each kind of notification
you are sending should have its own intent. Used when creating keys in
the index; values that cause the resulting key to be >500B will fail.
May not contain a colon.
body: string. The data payload of the notification. Must fit in a
datastore entity.
subject: string. Subject line for the notification.
audit_trail: JSON-serializable object. An optional audit trail that, when
used with the default retention policy, will be retained even after
the body is scrubbed from the datastore.
retention_policy: RetentionPolicy. The retention policy to use for data
after a Notification has been sent. By default, we retain the
audit_trail but not the body.
Returns:
(notification_key, payload_key). A 2-tuple of datastore keys for the
created notification and payload.
Raises:
Exception: if values delegated to model initializers are invalid.
ValueError: if to or sender are malformed according to App Engine (note
that well-formed values do not guarantee success).
"""
COUNTER_SEND_ASYNC_START.inc()
enqueue_date = datetime.datetime.utcnow()
retention_policy = (
retention_policy if retention_policy else RetainAuditTrail)
for email in (to, sender):
if not mail.is_email_valid(email):
COUNTER_SEND_ASYNC_FAILED_BAD_ARGUMENTS.inc()
raise ValueError('Malformed email address: "%s"' % email)
if retention_policy.NAME not in _RETENTION_POLICIES:
COUNTER_SEND_ASYNC_FAILED_BAD_ARGUMENTS.inc()
raise ValueError('Invalid retention policy: ' + str(retention_policy))
try:
notification, payload = cls._make_unsaved_models(
audit_trail, body, enqueue_date, intent, retention_policy.NAME,
sender, subject, to,
)
except Exception, e:
COUNTER_SEND_ASYNC_FAILED_BAD_ARGUMENTS.inc()
raise e
cls._mark_enqueued(notification, enqueue_date)
try:
notification_key, payload_key = cls._save_notification_and_payload(
notification, payload,
)
except Exception, e:
COUNTER_SEND_ASYNC_FAILED_DATASTORE_ERROR.inc()
raise e
deferred.defer(
cls._transactional_send_mail_task, notification_key, payload_key,
_retry_options=cls._get_retry_options())
COUNTER_SEND_ASYNC_SUCCESS.inc()
return notification_key, payload_key
@classmethod
def _make_unsaved_models(
cls, audit_trail, body, enqueue_date, intent, retention_policy, sender,
subject, to):
notification = Notification(
audit_trail=audit_trail, enqueue_date=enqueue_date, intent=intent,
_retention_policy=retention_policy, sender=sender, subject=subject,
to=to,
)
payload = Payload(
body=body, enqueue_date=enqueue_date, intent=intent, to=to,
_retention_policy=retention_policy,
)
return notification, payload
@classmethod
@db.transactional(xg=True)
def _save_notification_and_payload(cls, notification, payload):
return db.put([notification, payload])
@classmethod
def _send_mail_task(
cls, notification_key, payload_key, test_send_mail_fn=None):
exception = None
failed_permanently = False
now = datetime.datetime.utcnow()
notification, payload = db.get([notification_key, payload_key])
send_mail_fn = test_send_mail_fn if test_send_mail_fn else mail.send_mail
sent = False
COUNTER_SEND_MAIL_TASK_STARTED.inc()
if not notification:
COUNTER_SEND_MAIL_TASK_FAILED_PERMANENTLY.inc()
raise deferred.PermanentTaskFailure(
'Notification missing: ' + str(notification_key)
)
if not payload:
COUNTER_SEND_MAIL_TASK_FAILED_PERMANENTLY.inc()
raise deferred.PermanentTaskFailure(
'Payload missing: ' + str(payload_key)
)
policy = _RETENTION_POLICIES.get(notification._retention_policy)
if not policy:
COUNTER_SEND_MAIL_TASK_FAILED_PERMANENTLY.inc()
raise deferred.PermanentTaskFailure(
'Unknown retention policy: ' + notification._retention_policy
)
if (cls._done(notification) or cls._failed(notification) or
cls._sent(notification)):
COUNTER_SEND_MAIL_TASK_SKIPPED.inc()
COUNTER_SEND_MAIL_TASK_SUCCESS.inc()
return
if notification._recoverable_failure_count > _RECOVERABLE_FAILURE_CAP:
message = (
'Recoverable failure cap (%s) exceeded for notification with '
'key %s'
) % (_RECOVERABLE_FAILURE_CAP, str(notification.key()))
_LOG.error(message)
permanent_failure = deferred.PermanentTaskFailure(message)
try:
COUNTER_SEND_MAIL_TASK_RECORD_FAILURE_CALLED.inc()
cls._record_failure(
notification, payload, permanent_failure, dt=now, permanent=True,
policy=policy
)
COUNTER_SEND_MAIL_TASK_RECORD_FAILURE_SUCCESS.inc()
# Must be vague. pylint: disable=broad-except
except Exception, e:
_LOG.error(
cls._get_record_failure_error_message(notification, payload, e)
)
COUNTER_SEND_MAIL_TASK_RECORD_FAILURE_FAILED.inc()
COUNTER_SEND_MAIL_TASK_FAILED_PERMANENTLY.inc()
COUNTER_SEND_MAIL_TASK_FAILURE_CAP_EXCEEDED.inc()
raise permanent_failure
try:
send_mail_fn(
notification.sender, notification.to, notification.subject,
payload.body
)
sent = True
# Must be vague. pylint: disable=broad-except
except Exception, exception:
failed_permanently = cls._is_send_mail_error_permanent(exception)
if not failed_permanently:
try:
COUNTER_SEND_MAIL_TASK_RECORD_FAILURE_CALLED.inc()
cls._record_failure(notification, payload, exception)
COUNTER_SEND_MAIL_TASK_RECORD_FAILURE_SUCCESS.inc()
# Must be vague. pylint: disable=broad-except
except Exception, e:
_LOG.error(
cls._get_record_failure_error_message(
notification, payload, exception
)
)
COUNTER_SEND_MAIL_TASK_RECORD_FAILURE_FAILED.inc()
_LOG.error(
('Recoverable error encountered when processing notification task; '
'will retry. Error was: ' + str(exception))
)
COUNTER_SEND_MAIL_TASK_FAILED.inc()
# Set by except: clause above. pylint: disable=raising-bad-type
raise exception
if sent:
cls._mark_sent(notification, now)
if failed_permanently:
cls._mark_failed(notification, now, exception, permanent=True)
if sent or failed_permanently:
policy.run(notification, payload)
cls._mark_done(notification, now)
db.put([notification, payload])
COUNTER_RETENTION_POLICY_RUN.inc()
if sent:
COUNTER_SEND_MAIL_TASK_SENT.inc()
elif failed_permanently:
COUNTER_SEND_MAIL_TASK_FAILED_PERMANENTLY.inc()
COUNTER_SEND_MAIL_TASK_SUCCESS.inc()
@classmethod
@db.transactional(
propagation=datastore_rpc.TransactionOptions.INDEPENDENT, xg=True)
def _record_failure(
cls, notification, payload, exception, dt=None, permanent=False,
policy=None):
"""Marks failure data on entities in an external transaction.
IMPORTANT: because we're using datastore_rpc.TransactionOptions.INDEPENDENT,
mutations on notification and payload here are *not* transactionally
consistent in the caller. Consequently, callers must not read or mutate them
after calling this method.
The upside is that this allows us to record failure data on entities inside
a transaction, and that transaction can throw without rolling back these
mutations.
Args:
notification: Notification. The notification to mutate.
payload: Payload. The payload to mutate.
exception: Exception. The exception that prompted the mutation.
dt: datetime. notification_fail_time and notification._done_time to record
if permanent is True.
permanent: boolean. If True, the notification will be marked done and
the retention policy will be run.
policy: RetentionPolicy. The retention policy to apply if permanent was
True.
Returns:
(notification_key, payload_key) 2-tuple.
"""
notification._recoverable_failure_count += 1
cls._mark_failed(notification, dt, exception, permanent=permanent)
if permanent:
assert dt and policy
cls._mark_done(notification, dt)
policy.run(notification, payload)
COUNTER_RETENTION_POLICY_RUN.inc()
return db.put([notification, payload])
@classmethod
def _get_record_failure_error_message(cls, notification, payload, exception):
return (
'Unable to record failure for notification with key %s and payload '
'with key %s; encountered %s error with text: "%s"') % (
str(notification.key()), str(payload.key()),
exception.__class__.__name__, str(exception))
@classmethod
def _transactional_send_mail_task(cls, notification_key, payload_key):
# Can't use decorator because of taskqueue serialization.
db.run_in_transaction_options(
db.create_transaction_options(xg=True), cls._send_mail_task,
notification_key, payload_key)
@classmethod
def _done(cls, notification):
return bool(notification._done_date)
@classmethod
def _failed(cls, notification):
return bool(notification._fail_date)
@classmethod
def _get_in_process_notifications_query(cls):
return Notification.all(
).filter(
'%s =' % Notification._done_date.name, None
).order(
'-' + Notification.enqueue_date.name
)
@classmethod
def _get_query_query(cls, to, intent):
return Notification.all(
).filter(
Notification.to.name, to
).filter(
Notification.intent.name, intent
).order(
'-' + Notification.enqueue_date.name
)
@classmethod
def _get_last_exception_value(cls, exception):
return {
'type': '%s.%s' % (
exception.__class__.__module__, exception.__class__.__name__),
'string': str(exception),
}
@classmethod
def _get_retry_options(cls):
# Retry up to once every hour with exponential backoff; limit tasks to
# three hours; cron will re-enqueue them for days. This is because the
# purpose of the queue is retrying in case of transient errors (datastore or
# send_mail burbles), and the purpose of the cron is retrying in case of
# longer errors (quota exhaustion).
return taskqueue.TaskRetryOptions(
min_backoff_seconds=1, max_backoff_seconds=_SECONDS_PER_HOUR,
max_doublings=12, # Overflows task age limit -- don't want underflow.
task_age_limit=cls._get_task_age_limit_seconds(),
)
@classmethod
def _get_task_age_limit_seconds(cls):
return _MAX_ENQUEUED_HOURS * _SECONDS_PER_HOUR
@classmethod
def _is_too_old_to_reenqueue(cls, dt, now):
return now - dt > datetime.timedelta(days=_MAX_RETRY_DAYS)
@classmethod
def _is_send_mail_error_permanent(cls, exception):
return type(exception) in _APP_ENGINE_MAIL_FATAL_ERRORS
@classmethod
def _is_still_enqueued(cls, notification, dt):
"""Whether or not an item is still on the deferred queue.
This isn't exact -- we can't query the queue. We can know how long items can
be on the queue, so we can make a guess. Our guess has false positives:
there is clock skew between datastore and taskqueue, and false negatives are
terrible because they cause multiple messages to get sent. Consequently, we
consider items that were last enqueued slightly too long ago to still be on
the queue. This can cause re-enqueueing of some items to get delayed by one
cron interval. We ameliorate this a bit by checking for side-effects of the
dequeue (_done|fail|send_date set).
Args:
notification: Notification. The notification to check status of.
dt: datetime, assumed UTC. The datetime to check enqueued status at.
Returns:
Boolean. False if the item has never been enqueued, or was enqueued long
enough ago we're sure it's no longer on the queue, or has already been
processed (indicating it's been enqueued and dequeued). True otherwise.
"""
if (notification._done_date or notification._fail_date or
notification._send_date) or not notification._last_enqueue_date:
return False
return cls._get_task_age_limit_seconds() > (
((dt - notification._last_enqueue_date).total_seconds() *
_ENQUEUED_BUFFER_MULTIPLIER)
)
@classmethod
def _mark_done(cls, notification, dt):
notification._done_date = dt
@classmethod
def _mark_enqueued(cls, notification, dt):
notification._last_enqueue_date = dt
@classmethod
def _mark_failed(cls, notification, dt, exception, permanent=False):
notification._last_exception = cls._get_last_exception_value(exception)
if permanent:
notification._fail_date = dt
@classmethod
def _mark_sent(cls, notification, dt):
notification._send_date = dt
@classmethod
def _sent(cls, notification):
return bool(notification._send_date)
class _IntentProperty(db.StringProperty):
"""Property that holds intent strings."""
@classmethod
def _get_message(cls, value):
return 'Intent "%s" cannot contain "%s"' % (value, _KEY_DELIMITER)
@classmethod
def check(cls, value):
if _KEY_DELIMITER in value:
raise ValueError(cls._get_message(value))
def validate(self, value):
value = super(_IntentProperty, self).validate(value)
try:
self.check(value)
except ValueError:
raise db.BadValueError(self._get_message(value))
return value
class _SerializedProperty(db.Property):
"""Custom property that stores JSON-serialized data."""
def __init__(self, *args, **kwargs):
# Disallow indexing and explicitly set indexed=False. If indexed is unset it
# defaults to True; if True, it imposes a 500 byte limit on the value, and
# longer values throw during db.put(). We want to support larger values
# rather than searching, and we do not want this to be a TextProperty
# because the underlying type is not db.Text.
if kwargs.get('indexed'):
raise ValueError('_SerializedProperty does not support indexing')
kwargs['indexed'] = False
super(_SerializedProperty, self).__init__(*args, **kwargs)
def get_value_for_datastore(self, model_instance):
return transforms.dumps(super(
_SerializedProperty, self
).get_value_for_datastore(model_instance))
def make_value_from_datastore(self, value):
return transforms.loads(value)
def validate(self, value):
value = super(_SerializedProperty, self).validate(value)
try:
transforms.dumps(value)
except TypeError, e:
raise db.BadValueError(
'%s is not JSON-serializable; error was "%s"' % (value, e))
return value
class _Model(entities.BaseEntity):
"""Abstract base model that handles key calculation."""
# String. Template used in key generation.
_KEY_TEMPLATE = (
'(%(kind)s%(delim)s%(to)s%(delim)s%(intent)s%(delim)s%(enqueue_date)s)'
)
# When the record was enqueued in client code.
enqueue_date = db.DateTimeProperty(required=True)
# String indicating the intent of the notification. Intents are used to group
# and index notifications. Used in key formation; may not contain a colon.
intent = _IntentProperty(required=True)
# Email address used to compose the To:. May house only one value. Subject to
# the restrictions of the underlying App Engine mail library; see the to field
# in
# https://developers.google.com/appengine/docs/python/mail/emailmessagefields.
to = db.StringProperty(required=True)
# When the record was last changed.
_change_date = db.DateTimeProperty(auto_now=True, required=True)
# RetentionPolicy.NAME string. Identifier for the retention policy for the
# Payload.
_retention_policy = db.StringProperty(
required=True, choices=_RETENTION_POLICIES.keys())
def __init__(self, *args, **kwargs):
assert 'key_name' not in kwargs, 'Setting key_name manually not supported'
kwargs['key_name'] = self.key_name(
self._require_kwarg('to', kwargs),
self._require_kwarg('intent', kwargs),
self._require_kwarg('enqueue_date', kwargs))
super(_Model, self).__init__(*args, **kwargs)
@classmethod
def key_name(cls, to, intent, enqueue_date):
_IntentProperty.check(intent)
return cls._KEY_TEMPLATE % {
'delim': _KEY_DELIMITER,
'enqueue_date': _dt_to_epoch_usec(enqueue_date),
'intent': intent,
'kind': cls.kind().lower(),
'to': to,
}
@classmethod
def safe_key(cls, db_key, transform_fn):
_, unsafe_to, intent, serialized_dt = cls._split_key_name(db_key.name())
return db.Key.from_path(
cls.kind(), cls.key_name(
transform_fn(unsafe_to), intent,
_epoch_usec_to_dt(int(serialized_dt))))
@classmethod
def _split_key_name(cls, key_name):
return key_name[1:-1].split(_KEY_DELIMITER)
def _require_kwarg(self, name, kwargs):
"""Gets kwarg with given name or dies."""
value = kwargs.get(name)
assert value, 'Missing required property: ' + name
return value
class Notification(_Model):
# Audit trail of JSON-serializable data. By default Payload.body is deleted
# when it is no longer needed. If you need information for audit purposes,
# pass it here, and the default retention policy will keep it.
audit_trail = _SerializedProperty()
# Email address used to compose the From:. Subject to the sender restrictions
# of the underlying App Engine mail library; see the sender field in
# https://developers.google.com/appengine/docs/python/mail/emailmessagefields.
sender = db.StringProperty(required=True)
# Subject line of the notification.
subject = db.TextProperty(required=True)
# When processing the record fully finished, meaning that the record will
# never be processed by the notification subsystem again. None if the record
# is still in flight. Indicates that the record has either succeeded or failed
# and its retention policy has been applied.
_done_date = db.DateTimeProperty()
# When processing of the record failed and will no longer be retried. None if
# this has not happened. Does not indicated the retention policy has been
# applied; see _done_date.
_fail_date = db.DateTimeProperty()
# When the notification was last placed on the deferred queue.
_last_enqueue_date = db.DateTimeProperty()
# JSON representation of the last recordable exception encountered while
# processing the notification. Format is
# {'type': type_str, 'string': str(exception)}.
_last_exception = _SerializedProperty()
# Number of recoverable failures we've had for this notification.
_recoverable_failure_count = db.IntegerProperty(required=True, default=0)
# When a send_mail # call finshed for the record and we recorded it in the
# datastore. May be None if this has not yet happend. Does not indicate the
# retention policy has been applied; see _done_date.
_send_date = db.DateTimeProperty()
_PROPERTY_EXPORT_BLACKLIST = [audit_trail, _last_exception, subject]
def for_export(self, transform_fn):
model = super(Notification, self).for_export(transform_fn)
model.to = transform_fn(model.to)
model.sender = transform_fn(model.sender)
return model
class Payload(_Model):
"""The data payload of a Notification.
We extract this data from Notification to increase the total size budget
available to the user, which is capped at 1MB/entity.
"""
# Body of the payload.
body = db.TextProperty()
_PROPERTY_EXPORT_BLACKLIST = [body]
def __init__(self, *args, **kwargs):
super(Payload, self).__init__(*args, **kwargs)
_IntentProperty().validate(kwargs.get('intent'))
custom_module = None
def register_module():
"""Registers the module with the Registry."""
def on_module_enabled():
dashboard.filer.ALLOWED_ASSET_TEXT_BASES = (
dashboard.filer.ALLOWED_ASSET_TEXT_BASES.union(
['views/notifications']))
def on_module_disabled():
dashboard.filer.ALLOWED_ASSET_TEXT_BASES = (
dashboard.filer.ALLOWED_ASSET_TEXT_BASES.difference(
['views/notifications']))
global custom_module
# Avert circular dependency. pylint: disable=g-import-not-at-top
from modules.notifications import cron
from modules.notifications import stats
stats.register_analytic()
cron_handlers = [(
'/cron/process_pending_notifications',
cron.ProcessPendingNotificationsHandler
)]
custom_module = custom_modules.Module(
'Notifications', 'Student notification management system.', cron_handlers,
[],
notify_module_disabled=on_module_disabled,
notify_module_enabled=on_module_enabled
)
class Service(services.Notifications):
def enabled(self):
return custom_module.enabled
def query(self, to, intent):
return Manager.query(to, intent)
def send_async(
self, to, sender, intent, body, subject, audit_trail=None,
retention_policy=None):
return Manager.send_async(
to, sender, intent, body, subject, audit_trail=audit_trail,
retention_policy=retention_policy)
services.notifications = Service()
return custom_module
|
|
import numpy as np
from scipy import ndimage as ndi
from ..measure import block_reduce
from ._geometric import (warp, SimilarityTransform, AffineTransform,
_convert_warp_input, _clip_warp_output,
_to_ndimage_mode)
def resize(image, output_shape, order=1, mode='constant', cval=0, clip=True,
preserve_range=False):
"""Resize image to match a certain size.
Performs interpolation to up-size or down-size images. For down-sampling
N-dimensional images by applying the arithmetic sum or mean, see
`skimage.measure.local_sum` and `skimage.transform.downscale_local_mean`,
respectively.
Parameters
----------
image : ndarray
Input image.
output_shape : tuple or ndarray
Size of the generated output image `(rows, cols[, dim])`. If `dim` is
not provided, the number of channels is preserved. In case the number
of input channels does not equal the number of output channels a
3-dimensional interpolation is applied.
Returns
-------
resized : ndarray
Resized version of the input.
Other parameters
----------------
order : int, optional
The order of the spline interpolation, default is 1. The order has to
be in the range 0-5. See `skimage.transform.warp` for detail.
mode : {'constant', 'edge', 'symmetric', 'reflect', 'wrap'}, optional
Points outside the boundaries of the input are filled according
to the given mode. Modes match the behaviour of `numpy.pad`.
cval : float, optional
Used in conjunction with mode 'constant', the value outside
the image boundaries.
clip : bool, optional
Whether to clip the output to the range of values of the input image.
This is enabled by default, since higher order interpolation may
produce values outside the given input range.
preserve_range : bool, optional
Whether to keep the original range of values. Otherwise, the input
image is converted according to the conventions of `img_as_float`.
Note
----
Modes 'reflect' and 'symmetric' are similar, but differ in whether the edge
pixels are duplicated during the reflection. As an example, if an array
has values [0, 1, 2] and was padded to the right by four values using
symmetric, the result would be [0, 1, 2, 2, 1, 0, 0], while for reflect it
would be [0, 1, 2, 1, 0, 1, 2].
Examples
--------
>>> from skimage import data
>>> from skimage.transform import resize
>>> image = data.camera()
>>> resize(image, (100, 100)).shape
(100, 100)
"""
rows, cols = output_shape[0], output_shape[1]
orig_rows, orig_cols = image.shape[0], image.shape[1]
row_scale = float(orig_rows) / rows
col_scale = float(orig_cols) / cols
# 3-dimensional interpolation
if len(output_shape) == 3 and (image.ndim == 2
or output_shape[2] != image.shape[2]):
ndi_mode = _to_ndimage_mode(mode)
dim = output_shape[2]
if image.ndim == 2:
image = image[:, :, np.newaxis]
orig_dim = image.shape[2]
dim_scale = float(orig_dim) / dim
map_rows, map_cols, map_dims = np.mgrid[:rows, :cols, :dim]
map_rows = row_scale * (map_rows + 0.5) - 0.5
map_cols = col_scale * (map_cols + 0.5) - 0.5
map_dims = dim_scale * (map_dims + 0.5) - 0.5
coord_map = np.array([map_rows, map_cols, map_dims])
image = _convert_warp_input(image, preserve_range)
out = ndi.map_coordinates(image, coord_map, order=order,
mode=ndi_mode, cval=cval)
_clip_warp_output(image, out, order, mode, cval, clip)
else: # 2-dimensional interpolation
if rows == 1 and cols == 1:
tform = AffineTransform(translation=(orig_cols / 2.0 - 0.5,
orig_rows / 2.0 - 0.5))
else:
# 3 control points necessary to estimate exact AffineTransform
src_corners = np.array([[1, 1], [1, rows], [cols, rows]]) - 1
dst_corners = np.zeros(src_corners.shape, dtype=np.double)
# take into account that 0th pixel is at position (0.5, 0.5)
dst_corners[:, 0] = col_scale * (src_corners[:, 0] + 0.5) - 0.5
dst_corners[:, 1] = row_scale * (src_corners[:, 1] + 0.5) - 0.5
tform = AffineTransform()
tform.estimate(src_corners, dst_corners)
out = warp(image, tform, output_shape=output_shape, order=order,
mode=mode, cval=cval, clip=clip,
preserve_range=preserve_range)
return out
def rescale(image, scale, order=1, mode='constant', cval=0, clip=True,
preserve_range=False):
"""Scale image by a certain factor.
Performs interpolation to upscale or down-scale images. For down-sampling
N-dimensional images with integer factors by applying the arithmetic sum or
mean, see `skimage.measure.local_sum` and
`skimage.transform.downscale_local_mean`, respectively.
Parameters
----------
image : ndarray
Input image.
scale : {float, tuple of floats}
Scale factors. Separate scale factors can be defined as
`(row_scale, col_scale)`.
Returns
-------
scaled : ndarray
Scaled version of the input.
Other parameters
----------------
order : int, optional
The order of the spline interpolation, default is 1. The order has to
be in the range 0-5. See `skimage.transform.warp` for detail.
mode : {'constant', 'edge', 'symmetric', 'reflect', 'wrap'}, optional
Points outside the boundaries of the input are filled according
to the given mode. Modes match the behaviour of `numpy.pad`.
cval : float, optional
Used in conjunction with mode 'constant', the value outside
the image boundaries.
clip : bool, optional
Whether to clip the output to the range of values of the input image.
This is enabled by default, since higher order interpolation may
produce values outside the given input range.
preserve_range : bool, optional
Whether to keep the original range of values. Otherwise, the input
image is converted according to the conventions of `img_as_float`.
Examples
--------
>>> from skimage import data
>>> from skimage.transform import rescale
>>> image = data.camera()
>>> rescale(image, 0.1).shape
(51, 51)
>>> rescale(image, 0.5).shape
(256, 256)
"""
try:
row_scale, col_scale = scale
except TypeError:
row_scale = col_scale = scale
orig_rows, orig_cols = image.shape[0], image.shape[1]
rows = np.round(row_scale * orig_rows)
cols = np.round(col_scale * orig_cols)
output_shape = (rows, cols)
return resize(image, output_shape, order=order, mode=mode, cval=cval,
clip=clip, preserve_range=preserve_range)
def rotate(image, angle, resize=False, center=None, order=1, mode='constant',
cval=0, clip=True, preserve_range=False):
"""Rotate image by a certain angle around its center.
Parameters
----------
image : ndarray
Input image.
angle : float
Rotation angle in degrees in counter-clockwise direction.
resize : bool, optional
Determine whether the shape of the output image will be automatically
calculated, so the complete rotated image exactly fits. Default is
False.
center : iterable of length 2
The rotation center. If ``center=None``, the image is rotated around
its center, i.e. ``center=(rows / 2 - 0.5, cols / 2 - 0.5)``.
Returns
-------
rotated : ndarray
Rotated version of the input.
Other parameters
----------------
order : int, optional
The order of the spline interpolation, default is 1. The order has to
be in the range 0-5. See `skimage.transform.warp` for detail.
mode : {'constant', 'edge', 'symmetric', 'reflect', 'wrap'}, optional
Points outside the boundaries of the input are filled according
to the given mode. Modes match the behaviour of `numpy.pad`.
cval : float, optional
Used in conjunction with mode 'constant', the value outside
the image boundaries.
clip : bool, optional
Whether to clip the output to the range of values of the input image.
This is enabled by default, since higher order interpolation may
produce values outside the given input range.
preserve_range : bool, optional
Whether to keep the original range of values. Otherwise, the input
image is converted according to the conventions of `img_as_float`.
Examples
--------
>>> from skimage import data
>>> from skimage.transform import rotate
>>> image = data.camera()
>>> rotate(image, 2).shape
(512, 512)
>>> rotate(image, 2, resize=True).shape
(530, 530)
>>> rotate(image, 90, resize=True).shape
(512, 512)
"""
rows, cols = image.shape[0], image.shape[1]
# rotation around center
if center is None:
center = np.array((cols, rows)) / 2. - 0.5
else:
center = np.asarray(center)
tform1 = SimilarityTransform(translation=-center)
tform2 = SimilarityTransform(rotation=np.deg2rad(angle))
tform3 = SimilarityTransform(translation=center)
tform = tform1 + tform2 + tform3
output_shape = None
if resize:
# determine shape of output image
corners = np.array([[1, 1], [1, rows], [cols, rows], [cols, 1]])
corners = tform(corners - 1)
minc = corners[:, 0].min()
minr = corners[:, 1].min()
maxc = corners[:, 0].max()
maxr = corners[:, 1].max()
out_rows = maxr - minr + 1
out_cols = maxc - minc + 1
output_shape = np.ceil((out_rows, out_cols))
# fit output image in new shape
translation = ((cols - out_cols) / 2., (rows - out_rows) / 2.)
tform4 = SimilarityTransform(translation=translation)
tform = tform4 + tform
return warp(image, tform, output_shape=output_shape, order=order,
mode=mode, cval=cval, clip=clip, preserve_range=preserve_range)
def downscale_local_mean(image, factors, cval=0, clip=True):
"""Down-sample N-dimensional image by local averaging.
The image is padded with `cval` if it is not perfectly divisible by the
integer factors.
In contrast to the 2-D interpolation in `skimage.transform.resize` and
`skimage.transform.rescale` this function may be applied to N-dimensional
images and calculates the local mean of elements in each block of size
`factors` in the input image.
Parameters
----------
image : ndarray
N-dimensional input image.
factors : array_like
Array containing down-sampling integer factor along each axis.
cval : float, optional
Constant padding value if image is not perfectly divisible by the
integer factors.
Returns
-------
image : ndarray
Down-sampled image with same number of dimensions as input image.
Examples
--------
>>> a = np.arange(15).reshape(3, 5)
>>> a
array([[ 0, 1, 2, 3, 4],
[ 5, 6, 7, 8, 9],
[10, 11, 12, 13, 14]])
>>> downscale_local_mean(a, (2, 3))
array([[ 3.5, 4. ],
[ 5.5, 4.5]])
"""
return block_reduce(image, factors, np.mean, cval)
def _swirl_mapping(xy, center, rotation, strength, radius):
x, y = xy.T
x0, y0 = center
rho = np.sqrt((x - x0) ** 2 + (y - y0) ** 2)
# Ensure that the transformation decays to approximately 1/1000-th
# within the specified radius.
radius = radius / 5 * np.log(2)
theta = rotation + strength * \
np.exp(-rho / radius) + \
np.arctan2(y - y0, x - x0)
xy[..., 0] = x0 + rho * np.cos(theta)
xy[..., 1] = y0 + rho * np.sin(theta)
return xy
def swirl(image, center=None, strength=1, radius=100, rotation=0,
output_shape=None, order=1, mode='constant', cval=0, clip=True,
preserve_range=False):
"""Perform a swirl transformation.
Parameters
----------
image : ndarray
Input image.
center : (row, column) tuple or (2,) ndarray, optional
Center coordinate of transformation.
strength : float, optional
The amount of swirling applied.
radius : float, optional
The extent of the swirl in pixels. The effect dies out
rapidly beyond `radius`.
rotation : float, optional
Additional rotation applied to the image.
Returns
-------
swirled : ndarray
Swirled version of the input.
Other parameters
----------------
output_shape : tuple (rows, cols), optional
Shape of the output image generated. By default the shape of the input
image is preserved.
order : int, optional
The order of the spline interpolation, default is 1. The order has to
be in the range 0-5. See `skimage.transform.warp` for detail.
mode : {'constant', 'edge', 'symmetric', 'reflect', 'wrap'}, optional
Points outside the boundaries of the input are filled according
to the given mode. Modes match the behaviour of `numpy.pad`.
cval : float, optional
Used in conjunction with mode 'constant', the value outside
the image boundaries.
clip : bool, optional
Whether to clip the output to the range of values of the input image.
This is enabled by default, since higher order interpolation may
produce values outside the given input range.
preserve_range : bool, optional
Whether to keep the original range of values. Otherwise, the input
image is converted according to the conventions of `img_as_float`.
"""
if center is None:
center = np.array(image.shape)[:2] / 2
warp_args = {'center': center,
'rotation': rotation,
'strength': strength,
'radius': radius}
return warp(image, _swirl_mapping, map_args=warp_args,
output_shape=output_shape, order=order, mode=mode, cval=cval,
clip=clip, preserve_range=preserve_range)
|
|
import sqlalchemy
from sqlalchemy import create_engine
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.ext.mutable import MutableDict
from sqlalchemy.orm import sessionmaker, scoped_session, column_property, deferred
from sqlalchemy.sql.expression import and_
from sqlalchemy.ext.declarative import declarative_base, declared_attr
from sqlalchemy import Column, String, Integer, Boolean, DateTime, Table
from sqlalchemy.orm import relationship, backref, remote, foreign
from sqlalchemy.sql.schema import ForeignKey
from sqlalchemy.dialects import postgresql as pg
from sqlalchemy.ext.hybrid import hybrid_property
from mongosql import MongoSqlBase
from flask_jsontools import JsonSerializableBase
from flask import json
class MongoJsonSerializableBase(JsonSerializableBase):
""" Declarative Base mixin to allow objects serialization
Defines interfaces utilized by :cls:ApiJSONEncoder
"""
mongo_project_properties = None
join_project_properties = None
def _project_join(self, obj, project):
if getattr(obj, '__json__', None):
data = obj.__json__()
else:
data = json.loads(json.dumps(obj))
for name, include in project.items():
if include:
data[name] = getattr(obj, name)
return data
def __json__(self, exluded_keys=set()):
data = super(MongoJsonSerializableBase, self).__json__(exluded_keys)
if self.mongo_project_properties:
for name, include in self.mongo_project_properties.items():
if isinstance(include, dict):
if name in data:
obj = data[name]
if isinstance(obj, list):
data[name] = [self._project_join(i, include) for i in obj]
else:
data[name] = self._project_join(obj, include)
else:
if include:
data[name] = getattr(self, name)
return data
Base = declarative_base(cls=(MongoSqlBase, MongoJsonSerializableBase))
class User(Base):
__tablename__ = 'u'
id = Column(Integer, primary_key=True)
name = Column(String)
tags = Column(pg.ARRAY(String)) # ARRAY field
age = Column(Integer)
# Maps an SQL expression as a column
age_in_10 = column_property(age + 10, deferred=True)
master_id = Column(ForeignKey('u.id', ondelete='SET NULL'), nullable=True)
master = relationship(lambda: User, remote_side=lambda: User.id, foreign_keys=master_id)
@property
def user_calculated(self):
return self.age + 10
def __repr__(self):
return 'User(id={}, name={!r})'.format(self.id, self.name)
class Article(Base):
__tablename__ = 'a'
id = Column(Integer, primary_key=True)
uid = Column(Integer, ForeignKey(User.id))
title = Column(String)
theme = Column(String)
data = Column(pg.JSON) # JSON field
user = relationship(User, backref=backref('articles'))
@property
def calculated(self):
return len(self.title) + self.uid
@calculated.setter
def calculated(self, value):
self.title += value
@hybrid_property
def hybrid(self):
return self.id > 10 and self.user.age > 18
@hybrid.expression
def hybrid(cls):
return and_(cls.id > 10, cls.user.has(User.age > 18))
def __repr__(self):
return 'Article(id={}, uid={!r}, title={!r})'.format(self.id, self.uid, self.title)
class Comment(Base):
__tablename__ = 'c'
id = Column(Integer, primary_key=True)
aid = Column(Integer, ForeignKey(Article.id))
uid = Column(Integer, ForeignKey(User.id))
text = Column(String)
article = relationship(Article, backref=backref("comments"))
user = relationship(User, backref=backref("comments"))
@property
def comment_calc(self):
return self.text[-3:]
def __repr__(self):
return 'Comment(id={}, aid={}, uid={})'.format(self.id, self.aid, self.uid)
class Role(Base):
__tablename__ = 'r'
id = Column(Integer, primary_key=True)
uid = Column(Integer, ForeignKey(User.id))
title = Column(String)
description = Column(String)
user = relationship(User, backref=backref("roles"))
class Edit(Base):
__tablename__ = 'e'
id = Column(Integer, primary_key=True)
uid = Column(Integer, ForeignKey(User.id))
cuid = Column(Integer, ForeignKey(User.id))
description = Column(String)
user = relationship(User, foreign_keys=uid)
creator = relationship(User, foreign_keys=cuid)
class CustomStrategies(Base):
__tablename__ = 'd'
id = Column(Integer, primary_key=True)
# ColumnLoader
login = Column(String)
# DeferredColumnLoader
password = deferred(Column(String))
# Models with many columns, many properties, many foreign keys
# Nothing special; just easier to test 100 things in one test when you have superabundance of fields
class ManyFieldsModel(Base):
""" A table with many, many columns
Goal: convenience to test many filters in one query
"""
# A model with many fields for testing huge filters
__tablename__ = 'm'
id = Column(Integer, primary_key=True)
# Integers
a = Column(Integer)
b = Column(Integer)
c = Column(Integer)
d = Column(Integer)
e = Column(Integer)
f = Column(Integer)
g = Column(Integer)
h = Column(Integer)
i = Column(Integer)
j = Column(Integer)
k = Column(Integer)
# Arrays
aa = Column(pg.ARRAY(String))
bb = Column(pg.ARRAY(String))
cc = Column(pg.ARRAY(String))
dd = Column(pg.ARRAY(String))
ee = Column(pg.ARRAY(String))
ff = Column(pg.ARRAY(String))
gg = Column(pg.ARRAY(String))
hh = Column(pg.ARRAY(String))
ii = Column(pg.ARRAY(String))
jj = Column(pg.ARRAY(String))
kk = Column(pg.ARRAY(String))
# JSONs
j_a = Column(pg.JSON)
j_b = Column(pg.JSON)
j_c = Column(pg.JSON)
j_d = Column(pg.JSON)
j_e = Column(pg.JSON)
j_f = Column(pg.JSON)
j_g = Column(pg.JSON)
j_h = Column(pg.JSON)
j_i = Column(pg.JSON)
j_j = Column(pg.JSON)
j_k = Column(pg.JSON)
class ManyPropertiesModel(Base):
""" A table with many properties """
__tablename__ = 'mp'
id = Column(Integer, ForeignKey("gw.id"), primary_key=True)
@property
def p_readonly(self):
return 1
@property
def p_writable(self):
return 2
@p_writable.setter
def p_writable(self, v):
return v
@property
def _p_invisible(self):
return None
@hybrid_property
def hp_readonly(self):
return self.id
@hybrid_property
def hp_writable(self):
return self.id
@hp_writable.setter
def hp_writable(self, v):
self.id = v
class ManyForeignKeysModel(Base):
""" A table with many foreign keys """
__tablename__ = 'mf'
id = Column(Integer, ForeignKey("gw.id"), primary_key=True)
name = Column(String)
fname = Column(String)
lname = Column(String)
# One-to-One relationships
user_1_id = Column(Integer, ForeignKey("u.id"))
user_2_id = Column(Integer, ForeignKey("u.id"))
user_3_id = Column(Integer, ForeignKey("u.id"))
user_1 = relationship(User, foreign_keys=user_1_id)
user_2 = relationship(User, foreign_keys=user_2_id)
user_3 = relationship(User, foreign_keys=user_3_id)
# Test many-to-many relationships
# MongoSQL should be able to work with intermediate M2M tables when loading relationships
class GirlWatcherFavorites(Base):
""" The M2M intermediate table """
__tablename__ = 'gwf'
gw_id = Column(Integer, ForeignKey("gw.id"), primary_key=True)
user_id = Column(Integer, ForeignKey("u.id"), primary_key=True)
best = Column(Boolean)
class GirlWatcher(Base):
""" Complex joins, custom conditions, many-to-many
Goal: test how MongoSql handles many-to-many relationships
"""
__tablename__ = 'gw'
id = Column(Integer, primary_key=True)
name = Column(String)
age = Column(Integer)
favorite_id = Column(Integer, ForeignKey(User.id))
favorite = relationship(User, foreign_keys=favorite_id)
best = relationship(User, uselist=True, viewonly=True,
secondary=GirlWatcherFavorites.__table__,
primaryjoin= and_(id == GirlWatcherFavorites.gw_id,
GirlWatcherFavorites.best == True),
secondaryjoin= GirlWatcherFavorites.user_id == User.id,
)
good = relationship(User, uselist=True, viewonly=True,
secondary=GirlWatcherFavorites.__table__,
primaryjoin= and_(id == GirlWatcherFavorites.gw_id,
GirlWatcherFavorites.best == False),
secondaryjoin= GirlWatcherFavorites.user_id == User.id,
)
best_names = association_proxy('best', 'name')
good_names = association_proxy('good', 'name')
class GirlWatcherManager(Base):
""" Someone to relate GirlWatcher to """
__tablename__ = 'gwm'
id = Column(Integer, primary_key=True)
name = Column(String)
girlwatcher_id = Column(Integer, ForeignKey(GirlWatcher.id))
girlwatcher = relationship(GirlWatcher, foreign_keys=girlwatcher_id, backref=backref('manager'))
# Test how MongoSQL sees mixins
# All these columns are supposed to be visible and manageable
class CreationTimeMixin:
""" Inheritance tests: a mixin """
ctime = Column(DateTime, doc="Creation time")
@declared_attr
def cuid(cls):
return Column(Integer, ForeignKey(User.id, ondelete='SET NULL'),
nullable=True, doc="Created by")
@declared_attr
def cuser(cls):
return relationship('User', remote_side=User.id,
foreign_keys='{}.cuid'.format(cls.__name__), doc="Created by")
class SpecialMixin:
@property
def get_42(self):
return 42
@hybrid_property
def hyb_big_id(self):
return self.id > 1000
@hyb_big_id.expression
def hyb_big_id(cls):
return and_(cls.id > 1000)
# Test how MongoSQL deals with inheritance and polymorphic identity
# MongoSQL shall have no trouble working with inherited fields
class CarArticle(Article, CreationTimeMixin, SpecialMixin):
""" Inheritance tests: inherit attrs """
__tablename__ = 'ia'
id = Column(Integer, ForeignKey(Article.id), primary_key=True)
car = relationship('Cars', back_populates='article')
class Cars(Base):
""" Inheritance tests: joined table inheritance + mixin """
__tablename__ = 'ic' # inheritance: cars
id = Column(Integer, primary_key=True)
type = Column(String(50))
make = Column(String(50))
model = Column(String(50))
horses = Column(Integer)
article_id = Column(ForeignKey(CarArticle.id))
article = relationship(CarArticle, back_populates='car')
__mapper_args__ = {
'polymorphic_identity': 'car',
'polymorphic_on': type
}
class GasolineCar(Cars):
""" Inheritance tests: joined table inheritance """
__tablename__ = 'icg'
id = Column(Integer, ForeignKey(Cars.id), primary_key=True)
engine_volume = Column(Integer)
__mapper_args__ = {
'polymorphic_identity': 'gasoline',
}
class ElectricCar(Cars):
""" Inheritance tests: joined table inheritance """
__tablename__ = 'ice'
id = Column(Integer, ForeignKey(Cars.id), primary_key=True)
batt_capacity = Column(Integer)
__mapper_args__ = {
'polymorphic_identity': 'electric',
}
class ConfiguredLazyloadModel(Base):
""" A model with relationhips configured to lazy=joined """
__tablename__ = 'll'
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey("u.id"))
article_id = Column(Integer, ForeignKey("a.id"))
# lazy
user = relationship(User, foreign_keys=user_id, lazy='joined')
article = relationship(Article, foreign_keys=article_id, lazy='joined')
# not lazy
comment_id = Column(Integer, ForeignKey("c.id"))
comment = relationship(Comment, foreign_keys=comment_id)
class DecoratedJSONB(sqlalchemy.types.TypeDecorator):
# A decorated JSONB. MongoSQL bags might fail to detect it?
impl = pg.JSONB
class DecoratedARRAY(sqlalchemy.types.TypeDecorator):
# A decorated ARRAY. MongoSQL bags might fail to detect it?
impl = pg.ARRAY
class CollectionOfSpecialCases(Base):
__tablename__ = 'ww'
id = Column(Integer, primary_key=True)
# Decorated fields. MongoPropertyBags might not detect them successfully.
decorated_jsonb = Column(DecoratedJSONB)
decorated_mutable_jsonb = Column(MutableDict.as_mutable(DecoratedJSONB))
decorated_array = Column(DecoratedARRAY(Integer))
def init_database(autoflush=True):
""" Init DB
:rtype: (sqlalchemy.engine.Engine, sqlalchemy.orm.Session)
"""
engine = create_engine('postgresql://postgres:postgres@localhost/test_mongosql', echo=False)
Session = sessionmaker(autocommit=autoflush, autoflush=autoflush, bind=engine)
return engine, Session
def create_all(engine):
""" Create all tables """
Base.metadata.create_all(bind=engine)
def drop_all(engine):
""" Drop all tables """
Base.metadata.drop_all(bind=engine)
def content_samples():
""" Generate content samples """
return [[
User(id=1, name='a', age=18, tags=['1', 'a']),
User(id=2, name='b', age=18, tags=['2', 'a', 'b']),
User(id=3, name='c', age=16, tags=['3', 'a', 'b', 'c']),
Article(id=10, uid=1, title='10', data={'rating': 5 , 'o': {'a': True}}),
Article(id=11, uid=1, title='11', data={'rating': 5.5, 'o': {'a': True}}),
Article(id=12, uid=1, title='12', data={'rating': 6 , 'o': {'a': False}}),
Article(id=20, uid=2, title='20', data={'rating': 4.5, 'o': {'a': False}}),
Article(id=21, uid=2, title='21', data={'rating': 4 , 'o': {'z': True}}),
Article(id=30, uid=3, title='30', data={ 'o': {'z': False}}),
Comment(id=100, aid=10, uid=1, text='10-a'),
Comment(id=101, aid=10, uid=2, text='10-b'),
Comment(id=102, aid=10, uid=3, text='10-c'),
Comment(id=103, aid=11, uid=1, text='11-a'),
Comment(id=104, aid=11, uid=2, text='11-b'),
Comment(id=105, aid=12, uid=1, text='12-a'),
Comment(id=106, aid=20, uid=1, text='20-a-ONE'),
Comment(id=107, aid=20, uid=1, text='20-a-TWO'),
Comment(id=108, aid=21, uid=1, text='21-a'),
GirlWatcher(id=1, name='Fred', age=65, favorite_id=3),
GirlWatcher(id=2, name='Ban', age=55, favorite_id=2),
GirlWatcherManager(id=1, name='Mr. One', girlwatcher_id=1),
GirlWatcherManager(id=2, name='Mr. Two', girlwatcher_id=2),
], [
# GirlWatcher #1: good ['b'], best ['c']
GirlWatcherFavorites(gw_id=1, user_id=2, best=False),
GirlWatcherFavorites(gw_id=1, user_id=3, best=True),
# GirlWatcher #2: good ['a', 'c'], best ['b']
GirlWatcherFavorites(gw_id=2, user_id=1, best=False),
GirlWatcherFavorites(gw_id=2, user_id=2, best=True),
GirlWatcherFavorites(gw_id=2, user_id=3, best=False),
]]
def content_samples_random(n_users, n_articles_per_user, n_comments_per_article):
""" Generate lots of users with lots of articles with lots of comments """
ret = []
for i in range(n_users):
ret.append(User(name='X', age=50,
articles=[
Article(title='X'*20,
comments=[
Comment(text='X'*100)
for ic in range(n_comments_per_article)
])
for ia in range(n_articles_per_user)
]))
return ret
def get_big_db_for_benchmarks(n_users, n_articles_per_user, n_comments_per_article):
# Connect, create tables
engine, Session = init_database(autoflush=True)
drop_all(engine)
create_all(engine)
# Fill DB
ssn = Session()
ssn.begin()
ssn.add_all(content_samples_random(n_users, n_articles_per_user, n_comments_per_article))
ssn.commit()
# Done
return engine, Session
def get_empty_db(autoflush=True):
# Connect, create tables
engine, Session = init_database(autoflush=autoflush)
drop_all(engine)
create_all(engine)
return engine, Session
def get_working_db_for_tests(autoflush=True):
# Connect, create tables
engine, Session = get_empty_db(autoflush=autoflush)
# Fill DB
ssn = Session()
for entities_list in content_samples():
if autoflush:
ssn.begin()
ssn.add_all(entities_list)
ssn.commit()
# Done
return engine, Session
if __name__ == '__main__':
# Developer's playground!
import logging
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('parso').setLevel(logging.INFO) # noisy!
logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
engine, Session = get_working_db_for_tests()
# Useful imports and variables
ssn = Session()
from util import stmt2sql, q2sql
from mongosql import MongoQuery
from sqlalchemy import inspect, func
from sqlalchemy.orm import Query
from sqlalchemy.orm.base import instance_state
from sqlalchemy.orm import Load, defaultload, joinedload, lazyload, immediateload, selectinload
from sqlalchemy.orm import raiseload, noload, load_only, defer, undefer
from sqlalchemy.orm import aliased, contains_eager, contains_alias
print('\n'*10)
from IPython import embed ; embed()
|
|
# Copyright 2007-2009 by Peter Cock. All rights reserved.
#
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package..
"""Bio.SeqIO support for the "genbank" and "embl" file formats.
You are expected to use this module via the Bio.SeqIO functions.
Note that internally this module calls Bio.GenBank to do the actual
parsing of both GenBank and EMBL files.
See also:
International Nucleotide Sequence Database Collaboration
http://www.insdc.org/
GenBank
http://www.ncbi.nlm.nih.gov/Genbank/
EMBL Nucleotide Sequence Database
http://www.ebi.ac.uk/embl/
DDBJ (DNA Data Bank of Japan)
http://www.ddbj.nig.ac.jp/
"""
from Bio.Seq import UnknownSeq
from Bio.GenBank.Scanner import GenBankScanner, EmblScanner
from Bio import Alphabet
from Interfaces import SequentialSequenceWriter
from Bio import SeqFeature
# NOTE
# ====
# The "brains" for parsing GenBank and EMBL files (and any
# other flat file variants from the INSDC in future) is in
# Bio.GenBank.Scanner (plus the _FeatureConsumer in Bio.GenBank)
# However, all the writing code is in this file.
def GenBankIterator(handle):
"""Breaks up a Genbank file into SeqRecord objects.
Every section from the LOCUS line to the terminating // becomes
a single SeqRecord with associated annotation and features.
Note that for genomes or chromosomes, there is typically only
one record."""
#This calls a generator function:
return GenBankScanner(debug=0).parse_records(handle)
def EmblIterator(handle):
"""Breaks up an EMBL file into SeqRecord objects.
Every section from the LOCUS line to the terminating // becomes
a single SeqRecord with associated annotation and features.
Note that for genomes or chromosomes, there is typically only
one record."""
#This calls a generator function:
return EmblScanner(debug=0).parse_records(handle)
def GenBankCdsFeatureIterator(handle, alphabet=Alphabet.generic_protein):
"""Breaks up a Genbank file into SeqRecord objects for each CDS feature.
Every section from the LOCUS line to the terminating // can contain
many CDS features. These are returned as with the stated amino acid
translation sequence (if given).
"""
#This calls a generator function:
return GenBankScanner(debug=0).parse_cds_features(handle, alphabet)
def EmblCdsFeatureIterator(handle, alphabet=Alphabet.generic_protein):
"""Breaks up a EMBL file into SeqRecord objects for each CDS feature.
Every section from the LOCUS line to the terminating // can contain
many CDS features. These are returned as with the stated amino acid
translation sequence (if given).
"""
#This calls a generator function:
return EmblScanner(debug=0).parse_cds_features(handle, alphabet)
def _insdc_feature_position_string(pos, offset=0):
"""Build a GenBank/EMBL position string (PRIVATE).
Use offset=1 to add one to convert a start position from python counting.
"""
if isinstance(pos, SeqFeature.ExactPosition):
return "%i" % (pos.position+offset)
elif isinstance(pos, SeqFeature.WithinPosition):
return "(%i.%i)" % (pos.position + offset,
pos.position + pos.extension + offset)
elif isinstance(pos, SeqFeature.BetweenPosition):
return "(%i^%i)" % (pos.position + offset,
pos.position + pos.extension + offset)
elif isinstance(pos, SeqFeature.BeforePosition):
return "<%i" % (pos.position + offset)
elif isinstance(pos, SeqFeature.AfterPosition):
return ">%i" % (pos.position + offset)
elif isinstance(pos, SeqFeature.OneOfPosition):
return "one-of(%s)" \
% ",".join([_insdc_feature_position_string(p,offset) \
for p in pos.position_choices])
elif isinstance(pos, SeqFeature.AbstractPosition):
raise NotImplementedError("Please report this as a bug in Biopython.")
else:
raise ValueError("Expected a SeqFeature position object.")
def _insdc_location_string_ignoring_strand_and_subfeatures(feature):
if feature.ref:
ref = "%s:" % feature.ref
else:
ref = ""
assert not feature.ref_db
if feature.location.start==feature.location.end \
and isinstance(feature.location.end, SeqFeature.ExactPosition):
#Special case, 12^13 gets mapped to location 12:12
#(a zero length slice, meaning the point between two letters)
return "%s%i^%i" % (ref, feature.location.end.position,
feature.location.end.position+1)
else:
#Typical case, e.g. 12..15 gets mapped to 11:15
return ref \
+ _insdc_feature_position_string(feature.location.start, +1) \
+ ".." + \
_insdc_feature_position_string(feature.location.end)
def _insdc_feature_location_string(feature):
"""Build a GenBank/EMBL location string from a SeqFeature (PRIVATE)."""
# Have a choice of how to show joins on the reverse complement strand,
# complement(join(1,10),(20,100)) vs join(complement(20,100),complement(1,10))
# Notice that the order of the entries gets flipped!
#
# GenBank and EMBL would both use now complement(join(1,10),(20,100))
# which is shorter at least.
#
# In the above situations, we expect the parent feature and the two children
# to all be marked as strand==-1, and in the order 0:10 then 19:100.
#
# Also need to consider dual-strand examples like these from the Arabidopsis
# thaliana chloroplast NC_000932: join(complement(69611..69724),139856..140650)
# gene ArthCp047, GeneID:844801 or its CDS which is even better due to a splice:
# join(complement(69611..69724),139856..140087,140625..140650)
# protein NP_051038.1 GI:7525057
#
if not feature.sub_features:
#Non-recursive.
#assert feature.location_operator == "", \
# "%s has no subfeatures but location_operator %s" \
# % (repr(feature), feature.location_operator)
location = _insdc_location_string_ignoring_strand_and_subfeatures(feature)
if feature.strand == -1:
location = "complement(%s)" % location
return location
# As noted above, treat reverse complement strand features carefully:
if feature.strand == -1:
for f in feature.sub_features:
assert f.strand == -1
return "complement(%s(%s))" \
% (feature.location_operator,
",".join(_insdc_location_string_ignoring_strand_and_subfeatures(f) \
for f in feature.sub_features))
#if feature.strand == +1:
# for f in feature.sub_features:
# assert f.strand == +1
#This covers typical forward strand features, and also an evil mixed strand:
assert feature.location_operator != ""
return "%s(%s)" % (feature.location_operator,
",".join([_insdc_feature_location_string(f) \
for f in feature.sub_features]))
class GenBankWriter(SequentialSequenceWriter):
HEADER_WIDTH = 12
MAX_WIDTH = 80
QUALIFIER_INDENT = 21
def _write_single_line(self, tag, text):
"Used in the the 'header' of each GenBank record."""
assert len(tag) < self.HEADER_WIDTH
assert len(text) < self.MAX_WIDTH - self.HEADER_WIDTH, \
"Annotation %s too long for %s line" % (repr(text), tag)
self.handle.write("%s%s\n" % (tag.ljust(self.HEADER_WIDTH),
text.replace("\n"," ")))
def _write_multi_line(self, tag, text):
"Used in the the 'header' of each GenBank record."""
#TODO - Do the line spliting while preserving white space?
max_len = self.MAX_WIDTH - self.HEADER_WIDTH
assert len(tag) < self.HEADER_WIDTH
text = text.strip()
if len(text) < max_len:
self._write_single_line(tag, text)
return
words = text.split()
assert max([len(w) for w in words]) < max_len, \
"Your description cannot be broken into nice lines!"
text = ""
while words and len(text) + 1 + len(words[0]) < max_len:
text += " " + words.pop(0)
text = text.strip()
assert len(text) < max_len
self._write_single_line(tag, text)
while words:
text = ""
while words and len(text) + 1 + len(words[0]) < max_len:
text += " " + words.pop(0)
text = text.strip()
assert len(text) < max_len
self._write_single_line("", text)
assert not words
def _write_multi_entries(self, tag, text_list):
#used for DBLINK and any similar later line types.
#If the list of strings is empty, nothing is written.
for i, text in enumerate(text_list):
if i==0:
self._write_single_line(tag, text)
else:
self._write_single_line("", text)
def _get_date(self, record) :
default = "01-JAN-1980"
try :
date = record.annotations["date"]
except KeyError :
return default
#Cope with a list of one string:
if isinstance(date, list) and len(date)==1 :
date = date[0]
#TODO - allow a Python date object
if not isinstance(date, str) or len(date) != 11 \
or date[2] != "-" or date[6] != "-" \
or not date[:2].isdigit() or not date[7:].isdigit() \
or int(date[:2]) > 31 \
or date[3:6] not in ["JAN","FEB","MAR","APR","MAY","JUN",
"JUL","AUG","SEP","OCT","NOV","DEC"] :
#TODO - Check is a valid date (e.g. not 31 Feb)
return default
return date
def _write_the_first_line(self, record):
"""Write the LOCUS line."""
locus = record.name
if not locus or locus == "<unknown name>":
locus = record.id
if not locus or locus == "<unknown id>":
locus = self._get_annotation_str(record, "accession", just_first=True)
if len(locus) > 16:
raise ValueError("Locus identifier %s is too long" % repr(locus))
if len(record) > 99999999999:
#Currently GenBank only officially support up to 350000, but
#the length field can take eleven digits
raise ValueError("Sequence too long!")
#Get the base alphabet (underneath any Gapped or StopCodon encoding)
a = Alphabet._get_base_alphabet(record.seq.alphabet)
if not isinstance(a, Alphabet.Alphabet):
raise TypeError("Invalid alphabet")
elif isinstance(a, Alphabet.ProteinAlphabet):
units = "aa"
elif isinstance(a, Alphabet.NucleotideAlphabet):
units = "bp"
else:
#Must be something like NucleotideAlphabet or
#just the generic Alphabet (default for fasta files)
raise ValueError("Need a Nucleotide or Protein alphabet")
#Get the molecule type
#TODO - record this explicitly in the parser?
if isinstance(a, Alphabet.ProteinAlphabet):
mol_type = ""
elif isinstance(a, Alphabet.DNAAlphabet):
mol_type = "DNA"
elif isinstance(a, Alphabet.RNAAlphabet):
mol_type = "RNA"
else:
#Must be something like NucleotideAlphabet or
#just the generic Alphabet (default for fasta files)
raise ValueError("Need a DNA, RNA or Protein alphabet")
try:
division = record.annotations["data_file_division"]
except KeyError:
division = "UNK"
if division not in ["PRI","ROD","MAM","VRT","INV","PLN","BCT",
"VRL","PHG","SYN","UNA","EST","PAT","STS",
"GSS","HTG","HTC","ENV","CON"]:
division = "UNK"
assert len(units) == 2
assert len(division) == 3
#TODO - date
#TODO - mol_type
line = "LOCUS %s %s %s %s %s %s\n" \
% (locus.ljust(16),
str(len(record)).rjust(11),
units,
mol_type.ljust(6),
division,
self._get_date(record))
assert len(line) == 79+1, repr(line) #plus one for new line
assert line[12:28].rstrip() == locus, \
'LOCUS line does not contain the locus at the expected position:\n' + line
assert line[28:29] == " "
assert line[29:40].lstrip() == str(len(record)), \
'LOCUS line does not contain the length at the expected position:\n' + line
#Tests copied from Bio.GenBank.Scanner
assert line[40:44] in [' bp ', ' aa '] , \
'LOCUS line does not contain size units at expected position:\n' + line
assert line[44:47] in [' ', 'ss-', 'ds-', 'ms-'], \
'LOCUS line does not have valid strand type (Single stranded, ...):\n' + line
assert line[47:54].strip() == "" \
or line[47:54].strip().find('DNA') != -1 \
or line[47:54].strip().find('RNA') != -1, \
'LOCUS line does not contain valid sequence type (DNA, RNA, ...):\n' + line
assert line[54:55] == ' ', \
'LOCUS line does not contain space at position 55:\n' + line
assert line[55:63].strip() in ['','linear','circular'], \
'LOCUS line does not contain valid entry (linear, circular, ...):\n' + line
assert line[63:64] == ' ', \
'LOCUS line does not contain space at position 64:\n' + line
assert line[67:68] == ' ', \
'LOCUS line does not contain space at position 68:\n' + line
assert line[70:71] == '-', \
'LOCUS line does not contain - at position 71 in date:\n' + line
assert line[74:75] == '-', \
'LOCUS line does not contain - at position 75 in date:\n' + line
self.handle.write(line)
def _get_annotation_str(self, record, key, default=".", just_first=False):
"""Get an annotation dictionary entry (as a string).
Some entries are lists, in which case if just_first=True the first entry
is returned. If just_first=False (default) this verifies there is only
one entry before returning it."""
try:
answer = record.annotations[key]
except KeyError:
return default
if isinstance(answer, list):
if not just_first : assert len(answer) == 1
return str(answer[0])
else:
return str(answer)
def _write_comment(self, record):
#This is a bit complicated due to the range of possible
#ways people might have done their annotation...
#Currently the parser uses a single string with newlines.
#A list of lines is also reasonable.
#A single (long) string is perhaps the most natural of all.
#This means we may need to deal with line wrapping.
comment = record.annotations["comment"]
if isinstance(comment, basestring):
lines = comment.split("\n")
elif isinstance(comment, list) or isinstance(comment, tuple):
lines = comment
else:
raise ValueError("Could not understand comment annotation")
self._write_multi_line("COMMENT",lines[0])
for line in lines[1:]:
self._write_multi_line("",line)
def _write_contig(self, record):
#TODO - Merge this with _write_multi_line method?
#It would need the addition of the comma splitting logic...
#are there any other cases where that would be sensible?
max_len = self.MAX_WIDTH - self.HEADER_WIDTH
contig = record.annotations.get("contig","")
if isinstance(contig, list) or isinstance(contig, tuple):
contig = "".join(contig)
contig = self.clean(contig)
i=0
while contig:
if len(contig) > max_len:
#Split lines at the commas
pos = contig[:max_len-1].rfind(",")
if pos==-1:
raise ValueError("Could not break up CONTIG")
text, contig = contig[:pos+1], contig[pos+1:]
else:
text, contig = contig, ""
if i==0:
self._write_single_line("CONTIG",text)
else:
self._write_single_line("",text)
i+=1
def _write_sequence(self, record):
#Loosely based on code from Howard Salis
#TODO - Force lower case?
LETTERS_PER_LINE = 60
SEQUENCE_INDENT = 9
if isinstance(record.seq, UnknownSeq):
#We have already recorded the length, and there is no need
#to record a long sequence of NNNNNNN...NNN or whatever.
if "contig" in record.annotations:
self._write_contig(record)
else:
self.handle.write("ORIGIN\n")
return
data = self._get_seq_string(record) #Catches sequence being None
seq_len = len(data)
self.handle.write("ORIGIN\n")
for line_number in range(0,seq_len,LETTERS_PER_LINE):
self.handle.write(str(line_number+1).rjust(SEQUENCE_INDENT))
for words in range(line_number,min(line_number+LETTERS_PER_LINE,seq_len),10):
self.handle.write(" %s" % data[words:words+10])
self.handle.write("\n")
def write_record(self, record):
"""Write a single record to the output file."""
handle = self.handle
self._write_the_first_line(record)
accession = self._get_annotation_str(record, "accession",
record.id.split(".",1)[0],
just_first=True)
acc_with_version = accession
if record.id.startswith(accession+"."):
try:
acc_with_version = "%s.%i" \
% (accession, int(record.id.split(".",1)[1]))
except ValueError:
pass
gi = self._get_annotation_str(record, "gi", just_first=True)
descr = record.description
if descr == "<unknown description>" : descr = "."
self._write_multi_line("DEFINITION", descr)
self._write_single_line("ACCESSION", accession)
if gi != ".":
self._write_single_line("VERSION", "%s GI:%s" % (acc_with_version,gi))
else:
self._write_single_line("VERSION", "%s" % (acc_with_version))
#The NCBI only expect two types of link so far,
#e.g. "Project:28471" and "Trace Assembly Archive:123456"
#TODO - Filter the dbxrefs list to just these?
self._write_multi_entries("DBLINK", record.dbxrefs)
try:
#List of strings
keywords = "; ".join(record.annotations["keywords"])
except KeyError:
keywords = "."
self._write_multi_line("KEYWORDS", keywords)
if "segment" in record.annotations:
#Deal with SEGMENT line found only in segmented records,
#e.g. AH000819
segment = record.annotations["segment"]
if isinstance(segment, list):
assert len(segment)==1, segment
segment = segment[0]
self._write_single_line("SEGMENT", segment)
self._write_multi_line("SOURCE", \
self._get_annotation_str(record, "source"))
#The ORGANISM line MUST be a single line, as any continuation is the taxonomy
org = self._get_annotation_str(record, "organism")
if len(org) > self.MAX_WIDTH - self.HEADER_WIDTH:
org = org[:self.MAX_WIDTH - self.HEADER_WIDTH-4]+"..."
self._write_single_line(" ORGANISM", org)
try:
#List of strings
taxonomy = "; ".join(record.annotations["taxonomy"])
except KeyError:
taxonomy = "."
self._write_multi_line("", taxonomy)
#TODO - References...
if "comment" in record.annotations:
self._write_comment(record)
handle.write("FEATURES Location/Qualifiers\n")
for feature in record.features:
self._write_feature(feature)
self._write_sequence(record)
handle.write("//\n")
def _write_feature_qualifier(self, key, value=None, quote=None):
if not value:
self.handle.write("%s/%s\n" % (" "*self.QUALIFIER_INDENT, key))
return
#Quick hack with no line wrapping, may be useful for testing:
#self.handle.write('%s/%s="%s"\n' % (" "*self.QUALIFIER_INDENT, key, value))
if quote is None:
#Try to mimic unwritten rules about when quotes can be left out:
if isinstance(value, int) or isinstance(value, long):
quote = False
else:
quote = True
if quote:
line = '%s/%s="%s"' % (" "*self.QUALIFIER_INDENT, key, value)
else:
line = '%s/%s=%s' % (" "*self.QUALIFIER_INDENT, key, value)
if len(line) < self.MAX_WIDTH:
self.handle.write(line+"\n")
return
while line.lstrip():
if len(line) < self.MAX_WIDTH:
self.handle.write(line+"\n")
return
#Insert line break...
for index in range(min(len(line)-1,self.MAX_WIDTH),self.QUALIFIER_INDENT+1,-1):
if line[index]==" " : break
if line[index] != " ":
#No nice place to break...
index = self.MAX_WIDTH
self.handle.write(line[:index] + "\n")
line = " "*self.QUALIFIER_INDENT + line[index:].lstrip()
def _wrap_location(self, location):
"""Split a feature location into lines (break at commas)."""
#TODO - Rewrite this not to recurse!
length = self.MAX_WIDTH - self.QUALIFIER_INDENT
if len(location) <= length:
return location
index = location[:length].rfind(",")
if index == -1:
#No good place to split (!)
import warnings
warnings.warn("Couldn't split location:\n%s" % location)
return location
return location[:index+1] + "\n" + \
" "*self.QUALIFIER_INDENT + self._wrap_location(location[index+1:])
def _write_feature(self, feature):
"""Write a single SeqFeature object to features table."""
assert feature.type, feature
#TODO - Line wrapping for long locations!
location = _insdc_feature_location_string(feature)
line = (" %s " % feature.type)[:self.QUALIFIER_INDENT] \
+ self._wrap_location(location) + "\n"
self.handle.write(line)
#Now the qualifiers...
for key, values in feature.qualifiers.iteritems():
if isinstance(values, list) or isinstance(values, tuple):
for value in values:
self._write_feature_qualifier(key, value)
elif values:
#String, int, etc
self._write_feature_qualifier(key, values)
else:
#e.g. a /psuedo entry
self._write_feature_qualifier(key)
if __name__ == "__main__":
print "Quick self test"
import os
from StringIO import StringIO
def compare_record(old, new):
if old.id != new.id and old.name != new.name:
raise ValueError("'%s' or '%s' vs '%s' or '%s' records" \
% (old.id, old.name, new.id, new.name))
if len(old.seq) != len(new.seq):
raise ValueError("%i vs %i" % (len(old.seq), len(new.seq)))
if str(old.seq).upper() != str(new.seq).upper():
if len(old.seq) < 200:
raise ValueError("'%s' vs '%s'" % (old.seq, new.seq))
else:
raise ValueError("'%s...' vs '%s...'" % (old.seq[:100], new.seq[:100]))
if old.features and new.features:
return compare_features(old.features, new.features)
#Just insist on at least one word in common:
if (old.description or new.description) \
and not set(old.description.split()).intersection(new.description.split()):
raise ValueError("%s versus %s" \
% (repr(old.description), repr(new.description)))
#TODO - check annotation
if "contig" in old.annotations:
assert old.annotations["contig"] == \
new.annotations["contig"]
return True
def compare_records(old_list, new_list):
"""Check two lists of SeqRecords agree, raises a ValueError if mismatch."""
if len(old_list) != len(new_list):
raise ValueError("%i vs %i records" % (len(old_list), len(new_list)))
for old, new in zip(old_list, new_list):
if not compare_record(old,new):
return False
return True
def compare_feature(old, new, ignore_sub_features=False):
"""Check two SeqFeatures agree."""
if old.type != new.type:
raise ValueError("Type %s versus %s" % (old.type, new.type))
if old.location.nofuzzy_start != new.location.nofuzzy_start \
or old.location.nofuzzy_end != new.location.nofuzzy_end:
raise ValueError("%s versus %s:\n%s\nvs:\n%s" \
% (old.location, new.location, str(old), str(new)))
if old.strand != new.strand:
raise ValueError("Different strand:\n%s\nvs:\n%s" % (str(old), str(new)))
if old.location.start != new.location.start:
raise ValueError("Start %s versus %s:\n%s\nvs:\n%s" \
% (old.location.start, new.location.start, str(old), str(new)))
if old.location.end != new.location.end:
raise ValueError("End %s versus %s:\n%s\nvs:\n%s" \
% (old.location.end, new.location.end, str(old), str(new)))
if not ignore_sub_features:
if len(old.sub_features) != len(new.sub_features):
raise ValueError("Different sub features")
for a,b in zip(old.sub_features, new.sub_features):
if not compare_feature(a,b):
return False
#This only checks key shared qualifiers
#Would a white list be easier?
#for key in ["name","gene","translation","codon_table","codon_start","locus_tag"]:
for key in set(old.qualifiers.keys()).intersection(new.qualifiers.keys()):
if key in ["db_xref","protein_id","product","note"]:
#EMBL and GenBank files are use different references/notes/etc
continue
if old.qualifiers[key] != new.qualifiers[key]:
raise ValueError("Qualifier mis-match for %s:\n%s\n%s" \
% (key, old.qualifiers[key], new.qualifiers[key]))
return True
def compare_features(old_list, new_list, ignore_sub_features=False):
"""Check two lists of SeqFeatures agree, raises a ValueError if mismatch."""
if len(old_list) != len(new_list):
raise ValueError("%i vs %i features" % (len(old_list), len(new_list)))
for old, new in zip(old_list, new_list):
#This assumes they are in the same order
if not compare_feature(old,new,ignore_sub_features):
return False
return True
def check_genbank_writer(records):
handle = StringIO()
GenBankWriter(handle).write_file(records)
handle.seek(0)
records2 = list(GenBankIterator(handle))
assert compare_records(records, records2)
for filename in os.listdir("../../Tests/GenBank"):
if not filename.endswith(".gbk") and not filename.endswith(".gb"):
continue
print filename
handle = open("../../Tests/GenBank/%s" % filename)
records = list(GenBankIterator(handle))
handle.close()
check_genbank_writer(records)
for filename in os.listdir("../../Tests/EMBL"):
if not filename.endswith(".embl"):
continue
print filename
handle = open("../../Tests/EMBL/%s" % filename)
records = list(EmblIterator(handle))
handle.close()
check_genbank_writer(records)
from Bio import SeqIO
for filename in os.listdir("../../Tests/SwissProt"):
if not filename.startswith("sp"):
continue
print filename
handle = open("../../Tests/SwissProt/%s" % filename)
records = list(SeqIO.parse(handle,"swiss"))
handle.close()
check_genbank_writer(records)
|
|
# Natural Language Toolkit: Internal utility functions
#
# Copyright (C) 2001-2012 NLTK Project
# Author: Steven Bird <[email protected]>
# Edward Loper <[email protected]>
# Nitin Madnani <[email protected]>
# URL: <http://www.nltk.org/>
# For license information, see LICENSE.TXT
from __future__ import print_function
import subprocess
import os
import os.path
import re
import warnings
import textwrap
import types
import sys
import stat
# Use the c version of ElementTree, which is faster, if possible:
try: from xml.etree import cElementTree as ElementTree
except ImportError: from xml.etree import ElementTree
from nltk import __file__
######################################################################
# Regular Expression Processing
######################################################################
def convert_regexp_to_nongrouping(pattern):
"""
Convert all grouping parentheses in the given regexp pattern to
non-grouping parentheses, and return the result. E.g.:
>>> from nltk.internals import convert_regexp_to_nongrouping
>>> convert_regexp_to_nongrouping('ab(c(x+)(z*))?d')
'ab(?:c(?:x+)(?:z*))?d'
:type pattern: str
:rtype: str
"""
# Sanity check: back-references are not allowed!
for s in re.findall(r'\\.|\(\?P=', pattern):
if s[1] in '0123456789' or s == '(?P=':
raise ValueError('Regular expressions with back-references '
'are not supported: %r' % pattern)
# This regexp substitution function replaces the string '('
# with the string '(?:', but otherwise makes no changes.
def subfunc(m):
return re.sub('^\((\?P<[^>]*>)?$', '(?:', m.group())
# Scan through the regular expression. If we see any backslashed
# characters, ignore them. If we see a named group, then
# replace it with "(?:". If we see any open parens that are part
# of an extension group, ignore those too. But if we see
# any other open paren, replace it with "(?:")
return re.sub(r'''(?x)
\\. | # Backslashed character
\(\?P<[^>]*> | # Named group
\(\? | # Extension group
\( # Grouping parenthesis''', subfunc, pattern)
##########################################################################
# Java Via Command-Line
##########################################################################
_java_bin = None
_java_options = []
# [xx] add classpath option to config_java?
def config_java(bin=None, options=None, verbose=True):
"""
Configure nltk's java interface, by letting nltk know where it can
find the Java binary, and what extra options (if any) should be
passed to Java when it is run.
:param bin: The full path to the Java binary. If not specified,
then nltk will search the system for a Java binary; and if
one is not found, it will raise a ``LookupError`` exception.
:type bin: str
:param options: A list of options that should be passed to the
Java binary when it is called. A common value is
``'-Xmx512m'``, which tells Java binary to increase
the maximum heap size to 512 megabytes. If no options are
specified, then do not modify the options list.
:type options: list(str)
"""
global _java_bin, _java_options
_java_bin = find_binary('java', bin, env_vars=['JAVAHOME', 'JAVA_HOME'], verbose=verbose)
if options is not None:
if isinstance(options, basestring):
options = options.split()
_java_options = list(options)
def java(cmd, classpath=None, stdin=None, stdout=None, stderr=None,
blocking=True):
"""
Execute the given java command, by opening a subprocess that calls
Java. If java has not yet been configured, it will be configured
by calling ``config_java()`` with no arguments.
:param cmd: The java command that should be called, formatted as
a list of strings. Typically, the first string will be the name
of the java class; and the remaining strings will be arguments
for that java class.
:type cmd: list(str)
:param classpath: A ``':'`` separated list of directories, JAR
archives, and ZIP archives to search for class files.
:type classpath: str
:param stdin, stdout, stderr: Specify the executed programs'
standard input, standard output and standard error file
handles, respectively. Valid values are ``subprocess.PIPE``,
an existing file descriptor (a positive integer), an existing
file object, and None. ``subprocess.PIPE`` indicates that a
new pipe to the child should be created. With None, no
redirection will occur; the child's file handles will be
inherited from the parent. Additionally, stderr can be
``subprocess.STDOUT``, which indicates that the stderr data
from the applications should be captured into the same file
handle as for stdout.
:param blocking: If ``false``, then return immediately after
spawning the subprocess. In this case, the return value is
the ``Popen`` object, and not a ``(stdout, stderr)`` tuple.
:return: If ``blocking=True``, then return a tuple ``(stdout,
stderr)``, containing the stdout and stderr outputs generated
by the java command if the ``stdout`` and ``stderr`` parameters
were set to ``subprocess.PIPE``; or None otherwise. If
``blocking=False``, then return a ``subprocess.Popen`` object.
:raise OSError: If the java command returns a nonzero return code.
"""
if stdin == 'pipe': stdin = subprocess.PIPE
if stdout == 'pipe': stdout = subprocess.PIPE
if stderr == 'pipe': stderr = subprocess.PIPE
if isinstance(cmd, basestring):
raise TypeError('cmd should be a list of strings')
# Make sure we know where a java binary is.
if _java_bin is None:
config_java()
# Set up the classpath.
if classpath is None:
classpath = NLTK_JAR
else:
classpath += os.path.pathsep + NLTK_JAR
# Construct the full command string.
cmd = list(cmd)
cmd = ['-cp', classpath] + cmd
cmd = [_java_bin] + _java_options + cmd
# Call java via a subprocess
p = subprocess.Popen(cmd, stdin=stdin, stdout=stdout, stderr=stderr)
if not blocking: return p
(stdout, stderr) = p.communicate()
# Check the return code.
if p.returncode != 0:
print(stderr)
raise OSError('Java command failed!')
return (stdout, stderr)
#: The location of the NLTK jar file, which is used to communicate
#: with external Java packages (such as Mallet) that do not have
#: a sufficiently powerful native command-line interface.
NLTK_JAR = os.path.abspath(os.path.join(os.path.split(__file__)[0],
'nltk.jar'))
if 0:
#config_java(options='-Xmx512m')
# Write:
#java('weka.classifiers.bayes.NaiveBayes',
# ['-d', '/tmp/names.model', '-t', '/tmp/train.arff'],
# classpath='/Users/edloper/Desktop/weka/weka.jar')
# Read:
(a,b) = java(['weka.classifiers.bayes.NaiveBayes',
'-l', '/tmp/names.model', '-T', '/tmp/test.arff',
'-p', '0'],#, '-distribution'],
classpath='/Users/edloper/Desktop/weka/weka.jar')
######################################################################
# Parsing
######################################################################
class ParseError(ValueError):
"""
Exception raised by parse_* functions when they fail.
:param position: The index in the input string where an error occurred.
:param expected: What was expected when an error occurred.
"""
def __init__(self, expected, position):
ValueError.__init__(self, expected, position)
self.expected = expected
self.position = position
def __str__(self):
return 'Expected %s at %s' % (self.expected, self.position)
_STRING_START_RE = re.compile(r"[uU]?[rR]?(\"\"\"|\'\'\'|\"|\')")
def parse_str(s, start_position):
"""
If a Python string literal begins at the specified position in the
given string, then return a tuple ``(val, end_position)``
containing the value of the string literal and the position where
it ends. Otherwise, raise a ``ParseError``.
"""
# Read the open quote, and any modifiers.
m = _STRING_START_RE.match(s, start_position)
if not m: raise ParseError('open quote', start_position)
quotemark = m.group(1)
# Find the close quote.
_STRING_END_RE = re.compile(r'\\|%s' % quotemark)
position = m.end()
while True:
match = _STRING_END_RE.search(s, position)
if not match: raise ParseError('close quote', position)
if match.group(0) == '\\': position = match.end()+1
else: break
# Parse it, using eval. Strings with invalid escape sequences
# might raise ValueEerror.
try:
return eval(s[start_position:match.end()]), match.end()
except ValueError as e:
raise ParseError('valid string (%s)' % e, start)
_PARSE_INT_RE = re.compile(r'-?\d+')
def parse_int(s, start_position):
"""
If an integer begins at the specified position in the given
string, then return a tuple ``(val, end_position)`` containing the
value of the integer and the position where it ends. Otherwise,
raise a ``ParseError``.
"""
m = _PARSE_INT_RE.match(s, start_position)
if not m: raise ParseError('integer', start_position)
return int(m.group()), m.end()
_PARSE_NUMBER_VALUE = re.compile(r'-?(\d*)([.]?\d*)?')
def parse_number(s, start_position):
"""
If an integer or float begins at the specified position in the
given string, then return a tuple ``(val, end_position)``
containing the value of the number and the position where it ends.
Otherwise, raise a ``ParseError``.
"""
m = _PARSE_NUMBER_VALUE.match(s, start_position)
if not m or not (m.group(1) or m.group(2)):
raise ParseError('number', start_position)
if m.group(2): return float(m.group()), m.end()
else: return int(m.group()), m.end()
######################################################################
# Check if a method has been overridden
######################################################################
def overridden(method):
"""
:return: True if ``method`` overrides some method with the same
name in a base class. This is typically used when defining
abstract base classes or interfaces, to allow subclasses to define
either of two related methods:
>>> class EaterI:
... '''Subclass must define eat() or batch_eat().'''
... def eat(self, food):
... if overridden(self.batch_eat):
... return self.batch_eat([food])[0]
... else:
... raise NotImplementedError()
... def batch_eat(self, foods):
... return [self.eat(food) for food in foods]
:type method: instance method
"""
# [xx] breaks on classic classes!
if isinstance(method, types.MethodType) and method.im_class is not None:
name = method.__name__
funcs = [cls.__dict__[name]
for cls in _mro(method.im_class)
if name in cls.__dict__]
return len(funcs) > 1
else:
raise TypeError('Expected an instance method.')
def _mro(cls):
"""
Return the method resolution order for ``cls`` -- i.e., a list
containing ``cls`` and all its base classes, in the order in which
they would be checked by ``getattr``. For new-style classes, this
is just cls.__mro__. For classic classes, this can be obtained by
a depth-first left-to-right traversal of ``__bases__``.
"""
if isinstance(cls, type):
return cls.__mro__
else:
mro = [cls]
for base in cls.__bases__: mro.extend(_mro(base))
return mro
######################################################################
# Deprecation decorator & base class
######################################################################
# [xx] dedent msg first if it comes from a docstring.
def _add_epytext_field(obj, field, message):
"""Add an epytext @field to a given object's docstring."""
indent = ''
# If we already have a docstring, then add a blank line to separate
# it from the new field, and check its indentation.
if obj.__doc__:
obj.__doc__ = obj.__doc__.rstrip()+'\n\n'
indents = re.findall(r'(?<=\n)[ ]+(?!\s)', obj.__doc__.expandtabs())
if indents: indent = min(indents)
# If we don't have a docstring, add an empty one.
else:
obj.__doc__ = ''
obj.__doc__ += textwrap.fill('@%s: %s' % (field, message),
initial_indent=indent,
subsequent_indent=indent+' ')
def deprecated(message):
"""
A decorator used to mark functions as deprecated. This will cause
a warning to be printed the when the function is used. Usage:
>>> from nltk.internals import deprecated
>>> @deprecated('Use foo() instead')
... def bar(x):
... print x/10
"""
def decorator(func):
msg = ("Function %s() has been deprecated. %s"
% (func.__name__, message))
msg = '\n' + textwrap.fill(msg, initial_indent=' ',
subsequent_indent=' ')
def newFunc(*args, **kwargs):
warnings.warn(msg, category=DeprecationWarning, stacklevel=2)
return func(*args, **kwargs)
# Copy the old function's name, docstring, & dict
newFunc.__dict__.update(func.__dict__)
newFunc.__name__ = func.__name__
newFunc.__doc__ = func.__doc__
newFunc.__deprecated__ = True
# Add a @deprecated field to the docstring.
_add_epytext_field(newFunc, 'deprecated', message)
return newFunc
return decorator
class Deprecated(object):
"""
A base class used to mark deprecated classes. A typical usage is to
alert users that the name of a class has changed:
>>> from nltk.internals import Deprecated
>>> class NewClassName(object):
... pass # All logic goes here.
...
>>> class OldClassName(Deprecated, NewClassName):
... "Use NewClassName instead."
The docstring of the deprecated class will be used in the
deprecation warning message.
"""
def __new__(cls, *args, **kwargs):
# Figure out which class is the deprecated one.
dep_cls = None
for base in _mro(cls):
if Deprecated in base.__bases__:
dep_cls = base; break
assert dep_cls, 'Unable to determine which base is deprecated.'
# Construct an appropriate warning.
doc = dep_cls.__doc__ or ''.strip()
# If there's a @deprecated field, strip off the field marker.
doc = re.sub(r'\A\s*@deprecated:', r'', doc)
# Strip off any indentation.
doc = re.sub(r'(?m)^\s*', '', doc)
# Construct a 'name' string.
name = 'Class %s' % dep_cls.__name__
if cls != dep_cls:
name += ' (base class for %s)' % cls.__name__
# Put it all together.
msg = '%s has been deprecated. %s' % (name, doc)
# Wrap it.
msg = '\n' + textwrap.fill(msg, initial_indent=' ',
subsequent_indent=' ')
warnings.warn(msg, category=DeprecationWarning, stacklevel=2)
# Do the actual work of __new__.
return object.__new__(cls, *args, **kwargs)
##########################################################################
# COUNTER, FOR UNIQUE NAMING
##########################################################################
class Counter:
"""
A counter that auto-increments each time its value is read.
"""
def __init__(self, initial_value=0):
self._value = initial_value
def get(self):
self._value += 1
return self._value
##########################################################################
# Search for files/binaries
##########################################################################
def find_file(filename, env_vars=(), searchpath=(),
file_names=None, url=None, verbose=True):
"""
Search for a file to be used by nltk.
:param filename: The name or path of the file.
:param env_vars: A list of environment variable names to check.
:param file_names: A list of alternative file names to check.
:param searchpath: List of directories to search.
:param url: URL presented to user for download help.
:param verbose: Whether or not to print path when a file is found.
"""
if file_names is None: file_names = [filename]
assert isinstance(filename, basestring)
assert not isinstance(file_names, basestring)
assert not isinstance(searchpath, basestring)
if isinstance(env_vars, basestring):
env_vars = env_vars.split()
# File exists, no magic
if os.path.isfile(filename):
if verbose: print('[Found %s: %s]' % (filename, filename))
return filename
for alternative in file_names:
path_to_file = os.path.join(filename, alternative)
if os.path.isfile(path_to_file):
if verbose: print('[Found %s: %s]' % (filename, path_to_file))
return path_to_file
path_to_file = os.path.join(filename, 'file', alternative)
if os.path.isfile(path_to_file):
if verbose: print('[Found %s: %s]' % (filename, path_to_file))
return path_to_file
# Check environment variables
for env_var in env_vars:
if env_var in os.environ:
path_to_file = os.environ[env_var]
if os.path.isfile(path_to_file):
if verbose: print('[Found %s: %s]' % (filename, path_to_file))
return path_to_file
else:
for alternative in file_names:
path_to_file = os.path.join(os.environ[env_var],
alternative)
if os.path.isfile(path_to_file):
if verbose: print('[Found %s: %s]'%(filename, path_to_file))
return path_to_file
path_to_file = os.path.join(os.environ[env_var], 'file',
alternative)
if os.path.isfile(path_to_file):
if verbose: print('[Found %s: %s]'%(filename, path_to_file))
return path_to_file
# Check the path list.
for directory in searchpath:
for alternative in file_names:
path_to_file = os.path.join(directory, alternative)
if os.path.isfile(path_to_file):
return path_to_file
# If we're on a POSIX system, then try using the 'which' command
# to find the file.
if os.name == 'posix':
for alternative in file_names:
try:
p = subprocess.Popen(['which', alternative], stdout=subprocess.PIPE)
stdout, stderr = p.communicate()
path = stdout.strip()
if path.endswith(alternative) and os.path.exists(path):
if verbose: print('[Found %s: %s]' % (filename, path))
return path
except KeyboardInterrupt as SystemExit:
raise
except:
pass
msg = ("NLTK was unable to find the %s file!" "\nUse software specific "
"configuration paramaters" % filename)
if env_vars: msg += ' or set the %s environment variable' % env_vars[0]
msg += '.'
if searchpath:
msg += '\n\n Searched in:'
msg += ''.join('\n - %s' % d for d in searchpath)
if url: msg += ('\n\n For more information, on %s, see:\n <%s>' %
(filename, url))
div = '='*75
raise LookupError('\n\n%s\n%s\n%s' % (div, msg, div))
def find_binary(name, path_to_bin=None, env_vars=(), searchpath=(),
binary_names=None, url=None, verbose=True):
"""
Search for a file to be used by nltk.
:param name: The name or path of the file.
:param path_to_bin: The user-supplied binary location (deprecated)
:param env_vars: A list of environment variable names to check.
:param file_names: A list of alternative file names to check.
:param searchpath: List of directories to search.
:param url: URL presented to user for download help.
:param verbose: Whether or not to print path when a file is found.
"""
return find_file(path_to_bin or name, env_vars, searchpath, binary_names,
url, verbose)
##########################################################################
# Find Java JAR files
# TODO: Add support for jar names specified as regular expressions
##########################################################################
def find_jar(name, path_to_jar=None, env_vars=(),
searchpath=(), url=None, verbose=True):
"""
Search for a jar that is used by nltk.
:param name: The name of the jar file
:param path_to_jar: The user-supplied jar location, or None.
:param env_vars: A list of environment variable names to check
in addition to the CLASSPATH variable which is
checked by default.
:param searchpath: List of directories to search.
"""
assert isinstance(name, basestring)
assert not isinstance(searchpath, basestring)
if isinstance(env_vars, basestring):
env_vars = env_vars.split()
# Make sure we check the CLASSPATH first
env_vars = ['CLASSPATH'] + list(env_vars)
# If an explicit location was given, then check it, and return it if
# it's present; otherwise, complain.
if path_to_jar is not None:
if os.path.isfile(path_to_jar):
return path_to_jar
raise ValueError('Could not find %s jar file at %s' %
(name, path_to_jar))
# Check environment variables
for env_var in env_vars:
if env_var in os.environ:
if env_var == 'CLASSPATH':
classpath = os.environ['CLASSPATH']
for cp in classpath.split(os.path.pathsep):
if os.path.isfile(cp) and os.path.basename(cp) == name:
if verbose: print('[Found %s: %s]' % (name, cp))
return cp
else:
path_to_jar = os.environ[env_var]
if os.path.isfile(path_to_jar) and os.path.basename(path_to_jar) == name:
if verbose: print('[Found %s: %s]' % (name, path_to_jar))
return path_to_jar
# Check the path list.
for directory in searchpath:
path_to_jar = os.path.join(directory, name)
if os.path.isfile(path_to_jar):
if verbose: print('[Found %s: %s]' % (name, path_to_jar))
return path_to_jar
# If nothing was found, raise an error
msg = ("NLTK was unable to find %s!" % name)
if env_vars: msg += ' Set the %s environment variable' % env_vars[0]
msg = textwrap.fill(msg+'.', initial_indent=' ',
subsequent_indent=' ')
if searchpath:
msg += '\n\n Searched in:'
msg += ''.join('\n - %s' % d for d in searchpath)
if url: msg += ('\n\n For more information, on %s, see:\n <%s>' %
(name, url))
div = '='*75
raise LookupError('\n\n%s\n%s\n%s' % (div, msg, div))
##########################################################################
# Import Stdlib Module
##########################################################################
def import_from_stdlib(module):
"""
When python is run from within the nltk/ directory tree, the
current directory is included at the beginning of the search path.
Unfortunately, that means that modules within nltk can sometimes
shadow standard library modules. As an example, the stdlib
'inspect' module will attempt to import the stdlib 'tokenzie'
module, but will instead end up importing NLTK's 'tokenize' module
instead (causing the import to fail).
"""
old_path = sys.path
sys.path = [d for d in sys.path if d not in ('', '.')]
m = __import__(module)
sys.path = old_path
return m
##########################################################################
# Abstract declaration
##########################################################################
def abstract(func):
"""
A decorator used to mark methods as abstract. I.e., methods that
are marked by this decorator must be overridden by subclasses. If
an abstract method is called (either in the base class or in a
subclass that does not override the base class method), it will
raise ``NotImplementedError``.
"""
# Avoid problems caused by nltk.tokenize shadowing the stdlib tokenize:
inspect = import_from_stdlib('inspect')
# Read the function's signature.
args, varargs, varkw, defaults = inspect.getargspec(func)
# Create a new function with the same signature (minus defaults)
# that raises NotImplementedError.
msg = '%s is an abstract method.' % func.__name__
signature = inspect.formatargspec(args, varargs, varkw, ())
exec ('def newfunc%s: raise NotImplementedError(%r)' % (signature, msg))
# Substitute in the defaults after-the-fact, since eval(repr(val))
# may not work for some default values.
newfunc.func_defaults = func.func_defaults
# Copy the name and docstring
newfunc.__name__ = func.__name__
newfunc.__doc__ = func.__doc__
newfunc.__abstract__ = True
_add_epytext_field(newfunc, "note", "This method is abstract.")
# Return the function.
return newfunc
##########################################################################
# Wrapper for ElementTree Elements
##########################################################################
class ElementWrapper(object):
"""
A wrapper around ElementTree Element objects whose main purpose is
to provide nicer __repr__ and __str__ methods. In addition, any
of the wrapped Element's methods that return other Element objects
are overridden to wrap those values before returning them.
This makes Elements more convenient to work with in
interactive sessions and doctests, at the expense of some
efficiency.
"""
# Prevent double-wrapping:
def __new__(cls, etree):
"""
Create and return a wrapper around a given Element object.
If ``etree`` is an ``ElementWrapper``, then ``etree`` is
returned as-is.
"""
if isinstance(etree, ElementWrapper):
return etree
else:
return object.__new__(ElementWrapper, etree)
def __init__(self, etree):
"""
Initialize a new Element wrapper for ``etree``. If
``etree`` is a string, then it will be converted to an
Element object using ``ElementTree.fromstring()`` first.
"""
if isinstance(etree, basestring):
etree = ElementTree.fromstring(etree)
self.__dict__['_etree'] = etree
def unwrap(self):
"""
Return the Element object wrapped by this wrapper.
"""
return self._etree
##////////////////////////////////////////////////////////////
#{ String Representation
##////////////////////////////////////////////////////////////
def __repr__(self):
s = ElementTree.tostring(self._etree)
if len(s) > 60:
e = s.rfind('<')
if (len(s)-e) > 30: e = -20
s = '%s...%s' % (s[:30], s[e:])
return '<Element %r>' % s
def __str__(self):
"""
:return: the result of applying ``ElementTree.tostring()`` to
the wrapped Element object.
"""
return ElementTree.tostring(self._etree).rstrip()
##////////////////////////////////////////////////////////////
#{ Element interface Delegation (pass-through)
##////////////////////////////////////////////////////////////
def __getattr__(self, attrib):
return getattr(self._etree, attrib)
def __setattr__(self, attr, value):
return setattr(self._etree, attr, value)
def __delattr__(self, attr):
return delattr(self._etree, attr)
def __setitem__(self, index, element):
self._etree[index] = element
def __delitem__(self, index):
del self._etree[index]
def __setslice__(self, start, stop, elements):
self._etree[start:stop] = elements
def __delslice__(self, start, stop):
del self._etree[start:stop]
def __len__(self):
return len(self._etree)
##////////////////////////////////////////////////////////////
#{ Element interface Delegation (wrap result)
##////////////////////////////////////////////////////////////
def __getitem__(self, index):
return ElementWrapper(self._etree[index])
def __getslice__(self, start, stop):
return [ElementWrapper(elt) for elt in self._etree[start:stop]]
def getchildren(self):
return [ElementWrapper(elt) for elt in self._etree]
def getiterator(self, tag=None):
return (ElementWrapper(elt)
for elt in self._etree.getiterator(tag))
def makeelement(self, tag, attrib):
return ElementWrapper(self._etree.makeelement(tag, attrib))
def find(self, path):
elt = self._etree.find(path)
if elt is None: return elt
else: return ElementWrapper(elt)
def findall(self, path):
return [ElementWrapper(elt) for elt in self._etree.findall(path)]
######################################################################
# Helper for Handling Slicing
######################################################################
def slice_bounds(sequence, slice_obj, allow_step=False):
"""
Given a slice, return the corresponding (start, stop) bounds,
taking into account None indices and negative indices. The
following guarantees are made for the returned start and stop values:
- 0 <= start <= len(sequence)
- 0 <= stop <= len(sequence)
- start <= stop
:raise ValueError: If ``slice_obj.step`` is not None.
:param allow_step: If true, then the slice object may have a
non-None step. If it does, then return a tuple
(start, stop, step).
"""
start, stop = (slice_obj.start, slice_obj.stop)
# If allow_step is true, then include the step in our return
# value tuple.
if allow_step:
step = slice_obj.step
if step is None: step = 1
# Use a recursive call without allow_step to find the slice
# bounds. If step is negative, then the roles of start and
# stop (in terms of default values, etc), are swapped.
if step < 0:
start, stop = slice_bounds(sequence, slice(stop, start))
else:
start, stop = slice_bounds(sequence, slice(start, stop))
return start, stop, step
# Otherwise, make sure that no non-default step value is used.
elif slice_obj.step not in (None, 1):
raise ValueError('slices with steps are not supported by %s' %
sequence.__class__.__name__)
# Supply default offsets.
if start is None: start = 0
if stop is None: stop = len(sequence)
# Handle negative indices.
if start < 0: start = max(0, len(sequence)+start)
if stop < 0: stop = max(0, len(sequence)+stop)
# Make sure stop doesn't go past the end of the list. Note that
# we avoid calculating len(sequence) if possible, because for lazy
# sequences, calculating the length of a sequence can be expensive.
if stop > 0:
try: sequence[stop-1]
except IndexError: stop = len(sequence)
# Make sure start isn't past stop.
start = min(start, stop)
# That's all folks!
return start, stop
######################################################################
# Permission Checking
######################################################################
def is_writable(path):
# Ensure that it exists.
if not os.path.exists(path):
return False
# If we're on a posix system, check its permissions.
if hasattr(os, 'getuid'):
statdata = os.stat(path)
perm = stat.S_IMODE(statdata.st_mode)
# is it world-writable?
if (perm & 0002):
return True
# do we own it?
elif statdata.st_uid == os.getuid() and (perm & 0200):
return True
# are we in a group that can write to it?
elif statdata.st_gid == os.getgid() and (perm & 0020):
return True
# otherwise, we can't write to it.
else:
return False
# Otherwise, we'll assume it's writable.
# [xx] should we do other checks on other platforms?
return True
|
|
# Generated from /Users/xudong/git/HTTPIDL/Grammar/HTTPIDL.g4 by ANTLR 4.7
# encoding: utf-8
from __future__ import print_function
from antlr4 import *
from io import StringIO
import sys
def serializedATN():
with StringIO() as buf:
buf.write(u"\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3")
buf.write(u"+\u00de\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t")
buf.write(u"\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r")
buf.write(u"\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4")
buf.write(u"\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30")
buf.write(u"\t\30\4\31\t\31\4\32\t\32\3\2\3\2\7\2\67\n\2\f\2\16\2")
buf.write(u":\13\2\3\2\5\2=\n\2\3\3\3\3\5\3A\n\3\3\3\3\3\3\3\3\3")
buf.write(u"\7\3G\n\3\f\3\16\3J\13\3\3\3\3\3\3\4\3\4\3\4\3\4\3\5")
buf.write(u"\3\5\3\5\3\5\3\6\3\6\3\6\3\6\3\7\3\7\3\7\3\7\3\b\3\b")
buf.write(u"\3\t\3\t\5\tb\n\t\7\td\n\t\f\t\16\tg\13\t\3\n\3\n\5\n")
buf.write(u"k\n\n\3\13\3\13\3\13\3\f\6\fq\n\f\r\f\16\fr\3\r\3\r\3")
buf.write(u"\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3")
buf.write(u"\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3")
buf.write(u"\r\3\r\3\r\3\r\3\r\3\r\3\r\5\r\u0098\n\r\3\16\3\16\3")
buf.write(u"\16\3\17\3\17\5\17\u009f\n\17\3\17\7\17\u00a2\n\17\f")
buf.write(u"\17\16\17\u00a5\13\17\5\17\u00a7\n\17\3\17\3\17\3\20")
buf.write(u"\3\20\3\20\3\21\3\21\3\21\3\21\5\21\u00b2\n\21\3\21\3")
buf.write(u"\21\3\22\3\22\5\22\u00b8\n\22\3\23\3\23\3\23\3\23\5\23")
buf.write(u"\u00be\n\23\3\24\3\24\3\24\3\24\3\25\3\25\3\25\3\25\3")
buf.write(u"\25\3\25\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26")
buf.write(u"\3\26\5\26\u00d4\n\26\3\27\3\27\3\30\3\30\3\31\3\31\3")
buf.write(u"\32\3\32\3\32\2\2\33\2\4\6\b\n\f\16\20\22\24\26\30\32")
buf.write(u"\34\36 \"$&(*,.\60\62\2\4\3\2\5\r\3\2\20\33\2\u00ff\2")
buf.write(u"<\3\2\2\2\4>\3\2\2\2\6M\3\2\2\2\bQ\3\2\2\2\nU\3\2\2\2")
buf.write(u"\fY\3\2\2\2\16]\3\2\2\2\20e\3\2\2\2\22j\3\2\2\2\24l\3")
buf.write(u"\2\2\2\26p\3\2\2\2\30\u0097\3\2\2\2\32\u0099\3\2\2\2")
buf.write(u"\34\u009c\3\2\2\2\36\u00aa\3\2\2\2 \u00ad\3\2\2\2\"\u00b7")
buf.write(u"\3\2\2\2$\u00bd\3\2\2\2&\u00bf\3\2\2\2(\u00c3\3\2\2\2")
buf.write(u"*\u00d3\3\2\2\2,\u00d5\3\2\2\2.\u00d7\3\2\2\2\60\u00d9")
buf.write(u"\3\2\2\2\62\u00db\3\2\2\2\64\67\5\4\3\2\65\67\5\b\5\2")
buf.write(u"\66\64\3\2\2\2\66\65\3\2\2\2\67:\3\2\2\28\66\3\2\2\2")
buf.write(u"89\3\2\2\29=\3\2\2\2:8\3\2\2\2;=\7\2\2\3<8\3\2\2\2<;")
buf.write(u"\3\2\2\2=\3\3\2\2\2>@\7\3\2\2?A\5\6\4\2@?\3\2\2\2@A\3")
buf.write(u"\2\2\2AB\3\2\2\2BC\5\20\t\2CH\7\21\2\2DG\5\n\6\2EG\5")
buf.write(u"\f\7\2FD\3\2\2\2FE\3\2\2\2GJ\3\2\2\2HF\3\2\2\2HI\3\2")
buf.write(u"\2\2IK\3\2\2\2JH\3\2\2\2KL\7\22\2\2L\5\3\2\2\2MN\7\23")
buf.write(u"\2\2NO\5\62\32\2OP\7\24\2\2P\7\3\2\2\2QR\7\4\2\2RS\5")
buf.write(u"\60\31\2ST\5\34\17\2T\t\3\2\2\2UV\5\16\b\2VW\7\16\2\2")
buf.write(u"WX\5\34\17\2X\13\3\2\2\2YZ\5\16\b\2Z[\7\17\2\2[\\\5\34")
buf.write(u"\17\2\\\r\3\2\2\2]^\t\2\2\2^\17\3\2\2\2_a\7\20\2\2`b")
buf.write(u"\5\22\n\2a`\3\2\2\2ab\3\2\2\2bd\3\2\2\2c_\3\2\2\2dg\3")
buf.write(u"\2\2\2ec\3\2\2\2ef\3\2\2\2f\21\3\2\2\2ge\3\2\2\2hk\5")
buf.write(u"\24\13\2ik\5\26\f\2jh\3\2\2\2ji\3\2\2\2k\23\3\2\2\2l")
buf.write(u"m\7\25\2\2mn\5\62\32\2n\25\3\2\2\2oq\5\30\r\2po\3\2\2")
buf.write(u"\2qr\3\2\2\2rp\3\2\2\2rs\3\2\2\2s\27\3\2\2\2t\u0098\7")
buf.write(u"\3\2\2u\u0098\7\4\2\2v\u0098\7\5\2\2w\u0098\7\6\2\2x")
buf.write(u"\u0098\7\7\2\2y\u0098\7\b\2\2z\u0098\7\t\2\2{\u0098\7")
buf.write(u"\n\2\2|\u0098\7\13\2\2}\u0098\7\f\2\2~\u0098\7\r\2\2")
buf.write(u"\177\u0098\7\16\2\2\u0080\u0098\7\17\2\2\u0081\u0098")
buf.write(u"\7\22\2\2\u0082\u0098\7\25\2\2\u0083\u0098\7\26\2\2\u0084")
buf.write(u"\u0098\7\27\2\2\u0085\u0098\7\23\2\2\u0086\u0098\7\24")
buf.write(u"\2\2\u0087\u0098\7\30\2\2\u0088\u0098\7\34\2\2\u0089")
buf.write(u"\u0098\7\35\2\2\u008a\u0098\7\36\2\2\u008b\u0098\7\37")
buf.write(u"\2\2\u008c\u0098\7 \2\2\u008d\u0098\7!\2\2\u008e\u0098")
buf.write(u"\7\"\2\2\u008f\u0098\7#\2\2\u0090\u0098\7$\2\2\u0091")
buf.write(u"\u0098\7%\2\2\u0092\u0098\7&\2\2\u0093\u0098\7\'\2\2")
buf.write(u"\u0094\u0098\7*\2\2\u0095\u0098\5\32\16\2\u0096\u0098")
buf.write(u"\7+\2\2\u0097t\3\2\2\2\u0097u\3\2\2\2\u0097v\3\2\2\2")
buf.write(u"\u0097w\3\2\2\2\u0097x\3\2\2\2\u0097y\3\2\2\2\u0097z")
buf.write(u"\3\2\2\2\u0097{\3\2\2\2\u0097|\3\2\2\2\u0097}\3\2\2\2")
buf.write(u"\u0097~\3\2\2\2\u0097\177\3\2\2\2\u0097\u0080\3\2\2\2")
buf.write(u"\u0097\u0081\3\2\2\2\u0097\u0082\3\2\2\2\u0097\u0083")
buf.write(u"\3\2\2\2\u0097\u0084\3\2\2\2\u0097\u0085\3\2\2\2\u0097")
buf.write(u"\u0086\3\2\2\2\u0097\u0087\3\2\2\2\u0097\u0088\3\2\2")
buf.write(u"\2\u0097\u0089\3\2\2\2\u0097\u008a\3\2\2\2\u0097\u008b")
buf.write(u"\3\2\2\2\u0097\u008c\3\2\2\2\u0097\u008d\3\2\2\2\u0097")
buf.write(u"\u008e\3\2\2\2\u0097\u008f\3\2\2\2\u0097\u0090\3\2\2")
buf.write(u"\2\u0097\u0091\3\2\2\2\u0097\u0092\3\2\2\2\u0097\u0093")
buf.write(u"\3\2\2\2\u0097\u0094\3\2\2\2\u0097\u0095\3\2\2\2\u0097")
buf.write(u"\u0096\3\2\2\2\u0098\31\3\2\2\2\u0099\u009a\7\33\2\2")
buf.write(u"\u009a\u009b\t\3\2\2\u009b\33\3\2\2\2\u009c\u00a6\7\21")
buf.write(u"\2\2\u009d\u009f\5\36\20\2\u009e\u009d\3\2\2\2\u009e")
buf.write(u"\u009f\3\2\2\2\u009f\u00a7\3\2\2\2\u00a0\u00a2\5 \21")
buf.write(u"\2\u00a1\u00a0\3\2\2\2\u00a2\u00a5\3\2\2\2\u00a3\u00a1")
buf.write(u"\3\2\2\2\u00a3\u00a4\3\2\2\2\u00a4\u00a7\3\2\2\2\u00a5")
buf.write(u"\u00a3\3\2\2\2\u00a6\u009e\3\2\2\2\u00a6\u00a3\3\2\2")
buf.write(u"\2\u00a7\u00a8\3\2\2\2\u00a8\u00a9\7\22\2\2\u00a9\35")
buf.write(u"\3\2\2\2\u00aa\u00ab\5\"\22\2\u00ab\u00ac\7\32\2\2\u00ac")
buf.write(u"\37\3\2\2\2\u00ad\u00ae\5\"\22\2\u00ae\u00b1\5,\27\2")
buf.write(u"\u00af\u00b0\7\31\2\2\u00b0\u00b2\5.\30\2\u00b1\u00af")
buf.write(u"\3\2\2\2\u00b1\u00b2\3\2\2\2\u00b2\u00b3\3\2\2\2\u00b3")
buf.write(u"\u00b4\7\32\2\2\u00b4!\3\2\2\2\u00b5\u00b8\5$\23\2\u00b6")
buf.write(u"\u00b8\5*\26\2\u00b7\u00b5\3\2\2\2\u00b7\u00b6\3\2\2")
buf.write(u"\2\u00b8#\3\2\2\2\u00b9\u00ba\7%\2\2\u00ba\u00be\5&\24")
buf.write(u"\2\u00bb\u00bc\7&\2\2\u00bc\u00be\5(\25\2\u00bd\u00b9")
buf.write(u"\3\2\2\2\u00bd\u00bb\3\2\2\2\u00be%\3\2\2\2\u00bf\u00c0")
buf.write(u"\7\26\2\2\u00c0\u00c1\5\"\22\2\u00c1\u00c2\7\27\2\2\u00c2")
buf.write(u"\'\3\2\2\2\u00c3\u00c4\7\26\2\2\u00c4\u00c5\5*\26\2\u00c5")
buf.write(u"\u00c6\7\30\2\2\u00c6\u00c7\5\"\22\2\u00c7\u00c8\7\27")
buf.write(u"\2\2\u00c8)\3\2\2\2\u00c9\u00d4\7\34\2\2\u00ca\u00d4")
buf.write(u"\7\35\2\2\u00cb\u00d4\7\36\2\2\u00cc\u00d4\7\37\2\2\u00cd")
buf.write(u"\u00d4\7 \2\2\u00ce\u00d4\7!\2\2\u00cf\u00d4\7\"\2\2")
buf.write(u"\u00d0\u00d4\7#\2\2\u00d1\u00d4\7$\2\2\u00d2\u00d4\5")
buf.write(u"\60\31\2\u00d3\u00c9\3\2\2\2\u00d3\u00ca\3\2\2\2\u00d3")
buf.write(u"\u00cb\3\2\2\2\u00d3\u00cc\3\2\2\2\u00d3\u00cd\3\2\2")
buf.write(u"\2\u00d3\u00ce\3\2\2\2\u00d3\u00cf\3\2\2\2\u00d3\u00d0")
buf.write(u"\3\2\2\2\u00d3\u00d1\3\2\2\2\u00d3\u00d2\3\2\2\2\u00d4")
buf.write(u"+\3\2\2\2\u00d5\u00d6\5\62\32\2\u00d6-\3\2\2\2\u00d7")
buf.write(u"\u00d8\5\26\f\2\u00d8/\3\2\2\2\u00d9\u00da\5\62\32\2")
buf.write(u"\u00da\61\3\2\2\2\u00db\u00dc\7*\2\2\u00dc\63\3\2\2\2")
buf.write(u"\24\668<@FHaejr\u0097\u009e\u00a3\u00a6\u00b1\u00b7\u00bd")
buf.write(u"\u00d3")
return buf.getvalue()
class HTTPIDL ( Parser ):
grammarFileName = "HTTPIDL.g4"
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
sharedContextCache = PredictionContextCache()
literalNames = [ u"<INVALID>", u"'MESSAGE'", u"'STRUCT'", u"'GET'",
u"'HEAD'", u"'TRACE'", u"'CONNECT'", u"'OPTIONS'",
u"'POST'", u"'PUT'", u"'PATCH'", u"'DELETE'", u"'REQUEST'",
u"'RESPONSE'", u"'/'", u"'{'", u"'}'", u"'('", u"')'",
u"'$'", u"'<'", u"'>'", u"','", u"'='", u"';'", u"'\\'",
u"'INT32'", u"'UINT32'", u"'INT64'", u"'UINT64'", u"'BOOL'",
u"'DOUBLE'", u"'STRING'", u"'FILE'", u"'BLOB'", u"'ARRAY'",
u"'DICT'" ]
symbolicNames = [ u"<INVALID>", u"MESSAGE", u"STRUCT", u"GET", u"HEAD",
u"TRACE", u"CONNECT", u"OPTIONS", u"POST", u"PUT",
u"PATCH", u"DELETE", u"REQUEST", u"RESPONSE", u"SLASH",
u"LCURLY", u"RCURLY", u"LPAREN", u"RPAREN", u"DOLLAR",
u"LABRACKET", u"RABRACKET", u"COMMA", u"ASSIGN", u"SEMICOLON",
u"ESCAPE", u"INT32", u"UINT32", u"INT64", u"UINT64",
u"BOOL", u"DOUBLE", u"STRING", u"FILE", u"BLOB", u"ARRAY",
u"DICT", u"COMMENT", u"NL", u"WS", u"IDENT", u"ANYCHAR" ]
RULE_entry = 0
RULE_message = 1
RULE_messageName = 2
RULE_struct = 3
RULE_request = 4
RULE_response = 5
RULE_method = 6
RULE_uri = 7
RULE_uriPathComponent = 8
RULE_parameterInUri = 9
RULE_string = 10
RULE_stringElement = 11
RULE_escaped = 12
RULE_structBody = 13
RULE_singleParameter = 14
RULE_parameterMap = 15
RULE_paramType = 16
RULE_genericType = 17
RULE_arrayGenericParam = 18
RULE_dictGenericParam = 19
RULE_baseType = 20
RULE_key = 21
RULE_value = 22
RULE_structName = 23
RULE_identifier = 24
ruleNames = [ u"entry", u"message", u"messageName", u"struct", u"request",
u"response", u"method", u"uri", u"uriPathComponent",
u"parameterInUri", u"string", u"stringElement", u"escaped",
u"structBody", u"singleParameter", u"parameterMap", u"paramType",
u"genericType", u"arrayGenericParam", u"dictGenericParam",
u"baseType", u"key", u"value", u"structName", u"identifier" ]
EOF = Token.EOF
MESSAGE=1
STRUCT=2
GET=3
HEAD=4
TRACE=5
CONNECT=6
OPTIONS=7
POST=8
PUT=9
PATCH=10
DELETE=11
REQUEST=12
RESPONSE=13
SLASH=14
LCURLY=15
RCURLY=16
LPAREN=17
RPAREN=18
DOLLAR=19
LABRACKET=20
RABRACKET=21
COMMA=22
ASSIGN=23
SEMICOLON=24
ESCAPE=25
INT32=26
UINT32=27
INT64=28
UINT64=29
BOOL=30
DOUBLE=31
STRING=32
FILE=33
BLOB=34
ARRAY=35
DICT=36
COMMENT=37
NL=38
WS=39
IDENT=40
ANYCHAR=41
def __init__(self, input, output=sys.stdout):
super(HTTPIDL, self).__init__(input, output=output)
self.checkVersion("4.7")
self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
self._predicates = None
class EntryContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(HTTPIDL.EntryContext, self).__init__(parent, invokingState)
self.parser = parser
def message(self, i=None):
if i is None:
return self.getTypedRuleContexts(HTTPIDL.MessageContext)
else:
return self.getTypedRuleContext(HTTPIDL.MessageContext,i)
def struct(self, i=None):
if i is None:
return self.getTypedRuleContexts(HTTPIDL.StructContext)
else:
return self.getTypedRuleContext(HTTPIDL.StructContext,i)
def EOF(self):
return self.getToken(HTTPIDL.EOF, 0)
def getRuleIndex(self):
return HTTPIDL.RULE_entry
def entry(self):
localctx = HTTPIDL.EntryContext(self, self._ctx, self.state)
self.enterRule(localctx, 0, self.RULE_entry)
self._la = 0 # Token type
try:
self.state = 58
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,2,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 54
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==HTTPIDL.MESSAGE or _la==HTTPIDL.STRUCT:
self.state = 52
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [HTTPIDL.MESSAGE]:
self.state = 50
self.message()
pass
elif token in [HTTPIDL.STRUCT]:
self.state = 51
self.struct()
pass
else:
raise NoViableAltException(self)
self.state = 56
self._errHandler.sync(self)
_la = self._input.LA(1)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 57
self.match(HTTPIDL.EOF)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class MessageContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(HTTPIDL.MessageContext, self).__init__(parent, invokingState)
self.parser = parser
def MESSAGE(self):
return self.getToken(HTTPIDL.MESSAGE, 0)
def uri(self):
return self.getTypedRuleContext(HTTPIDL.UriContext,0)
def LCURLY(self):
return self.getToken(HTTPIDL.LCURLY, 0)
def RCURLY(self):
return self.getToken(HTTPIDL.RCURLY, 0)
def messageName(self):
return self.getTypedRuleContext(HTTPIDL.MessageNameContext,0)
def request(self, i=None):
if i is None:
return self.getTypedRuleContexts(HTTPIDL.RequestContext)
else:
return self.getTypedRuleContext(HTTPIDL.RequestContext,i)
def response(self, i=None):
if i is None:
return self.getTypedRuleContexts(HTTPIDL.ResponseContext)
else:
return self.getTypedRuleContext(HTTPIDL.ResponseContext,i)
def getRuleIndex(self):
return HTTPIDL.RULE_message
def message(self):
localctx = HTTPIDL.MessageContext(self, self._ctx, self.state)
self.enterRule(localctx, 2, self.RULE_message)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 60
self.match(HTTPIDL.MESSAGE)
self.state = 62
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==HTTPIDL.LPAREN:
self.state = 61
self.messageName()
self.state = 64
self.uri()
self.state = 65
self.match(HTTPIDL.LCURLY)
self.state = 70
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << HTTPIDL.GET) | (1 << HTTPIDL.HEAD) | (1 << HTTPIDL.TRACE) | (1 << HTTPIDL.CONNECT) | (1 << HTTPIDL.OPTIONS) | (1 << HTTPIDL.POST) | (1 << HTTPIDL.PUT) | (1 << HTTPIDL.PATCH) | (1 << HTTPIDL.DELETE))) != 0):
self.state = 68
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,4,self._ctx)
if la_ == 1:
self.state = 66
self.request()
pass
elif la_ == 2:
self.state = 67
self.response()
pass
self.state = 72
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 73
self.match(HTTPIDL.RCURLY)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class MessageNameContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(HTTPIDL.MessageNameContext, self).__init__(parent, invokingState)
self.parser = parser
def LPAREN(self):
return self.getToken(HTTPIDL.LPAREN, 0)
def identifier(self):
return self.getTypedRuleContext(HTTPIDL.IdentifierContext,0)
def RPAREN(self):
return self.getToken(HTTPIDL.RPAREN, 0)
def getRuleIndex(self):
return HTTPIDL.RULE_messageName
def messageName(self):
localctx = HTTPIDL.MessageNameContext(self, self._ctx, self.state)
self.enterRule(localctx, 4, self.RULE_messageName)
try:
self.enterOuterAlt(localctx, 1)
self.state = 75
self.match(HTTPIDL.LPAREN)
self.state = 76
self.identifier()
self.state = 77
self.match(HTTPIDL.RPAREN)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class StructContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(HTTPIDL.StructContext, self).__init__(parent, invokingState)
self.parser = parser
def STRUCT(self):
return self.getToken(HTTPIDL.STRUCT, 0)
def structName(self):
return self.getTypedRuleContext(HTTPIDL.StructNameContext,0)
def structBody(self):
return self.getTypedRuleContext(HTTPIDL.StructBodyContext,0)
def getRuleIndex(self):
return HTTPIDL.RULE_struct
def struct(self):
localctx = HTTPIDL.StructContext(self, self._ctx, self.state)
self.enterRule(localctx, 6, self.RULE_struct)
try:
self.enterOuterAlt(localctx, 1)
self.state = 79
self.match(HTTPIDL.STRUCT)
self.state = 80
self.structName()
self.state = 81
self.structBody()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class RequestContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(HTTPIDL.RequestContext, self).__init__(parent, invokingState)
self.parser = parser
def method(self):
return self.getTypedRuleContext(HTTPIDL.MethodContext,0)
def REQUEST(self):
return self.getToken(HTTPIDL.REQUEST, 0)
def structBody(self):
return self.getTypedRuleContext(HTTPIDL.StructBodyContext,0)
def getRuleIndex(self):
return HTTPIDL.RULE_request
def request(self):
localctx = HTTPIDL.RequestContext(self, self._ctx, self.state)
self.enterRule(localctx, 8, self.RULE_request)
try:
self.enterOuterAlt(localctx, 1)
self.state = 83
self.method()
self.state = 84
self.match(HTTPIDL.REQUEST)
self.state = 85
self.structBody()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ResponseContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(HTTPIDL.ResponseContext, self).__init__(parent, invokingState)
self.parser = parser
def method(self):
return self.getTypedRuleContext(HTTPIDL.MethodContext,0)
def RESPONSE(self):
return self.getToken(HTTPIDL.RESPONSE, 0)
def structBody(self):
return self.getTypedRuleContext(HTTPIDL.StructBodyContext,0)
def getRuleIndex(self):
return HTTPIDL.RULE_response
def response(self):
localctx = HTTPIDL.ResponseContext(self, self._ctx, self.state)
self.enterRule(localctx, 10, self.RULE_response)
try:
self.enterOuterAlt(localctx, 1)
self.state = 87
self.method()
self.state = 88
self.match(HTTPIDL.RESPONSE)
self.state = 89
self.structBody()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class MethodContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(HTTPIDL.MethodContext, self).__init__(parent, invokingState)
self.parser = parser
def GET(self):
return self.getToken(HTTPIDL.GET, 0)
def POST(self):
return self.getToken(HTTPIDL.POST, 0)
def DELETE(self):
return self.getToken(HTTPIDL.DELETE, 0)
def PUT(self):
return self.getToken(HTTPIDL.PUT, 0)
def PATCH(self):
return self.getToken(HTTPIDL.PATCH, 0)
def HEAD(self):
return self.getToken(HTTPIDL.HEAD, 0)
def TRACE(self):
return self.getToken(HTTPIDL.TRACE, 0)
def CONNECT(self):
return self.getToken(HTTPIDL.CONNECT, 0)
def OPTIONS(self):
return self.getToken(HTTPIDL.OPTIONS, 0)
def getRuleIndex(self):
return HTTPIDL.RULE_method
def method(self):
localctx = HTTPIDL.MethodContext(self, self._ctx, self.state)
self.enterRule(localctx, 12, self.RULE_method)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 91
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << HTTPIDL.GET) | (1 << HTTPIDL.HEAD) | (1 << HTTPIDL.TRACE) | (1 << HTTPIDL.CONNECT) | (1 << HTTPIDL.OPTIONS) | (1 << HTTPIDL.POST) | (1 << HTTPIDL.PUT) | (1 << HTTPIDL.PATCH) | (1 << HTTPIDL.DELETE))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class UriContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(HTTPIDL.UriContext, self).__init__(parent, invokingState)
self.parser = parser
def SLASH(self, i=None):
if i is None:
return self.getTokens(HTTPIDL.SLASH)
else:
return self.getToken(HTTPIDL.SLASH, i)
def uriPathComponent(self, i=None):
if i is None:
return self.getTypedRuleContexts(HTTPIDL.UriPathComponentContext)
else:
return self.getTypedRuleContext(HTTPIDL.UriPathComponentContext,i)
def getRuleIndex(self):
return HTTPIDL.RULE_uri
def uri(self):
localctx = HTTPIDL.UriContext(self, self._ctx, self.state)
self.enterRule(localctx, 14, self.RULE_uri)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 99
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==HTTPIDL.SLASH:
self.state = 93
self.match(HTTPIDL.SLASH)
self.state = 95
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << HTTPIDL.MESSAGE) | (1 << HTTPIDL.STRUCT) | (1 << HTTPIDL.GET) | (1 << HTTPIDL.HEAD) | (1 << HTTPIDL.TRACE) | (1 << HTTPIDL.CONNECT) | (1 << HTTPIDL.OPTIONS) | (1 << HTTPIDL.POST) | (1 << HTTPIDL.PUT) | (1 << HTTPIDL.PATCH) | (1 << HTTPIDL.DELETE) | (1 << HTTPIDL.REQUEST) | (1 << HTTPIDL.RESPONSE) | (1 << HTTPIDL.RCURLY) | (1 << HTTPIDL.LPAREN) | (1 << HTTPIDL.RPAREN) | (1 << HTTPIDL.DOLLAR) | (1 << HTTPIDL.LABRACKET) | (1 << HTTPIDL.RABRACKET) | (1 << HTTPIDL.COMMA) | (1 << HTTPIDL.ESCAPE) | (1 << HTTPIDL.INT32) | (1 << HTTPIDL.UINT32) | (1 << HTTPIDL.INT64) | (1 << HTTPIDL.UINT64) | (1 << HTTPIDL.BOOL) | (1 << HTTPIDL.DOUBLE) | (1 << HTTPIDL.STRING) | (1 << HTTPIDL.FILE) | (1 << HTTPIDL.BLOB) | (1 << HTTPIDL.ARRAY) | (1 << HTTPIDL.DICT) | (1 << HTTPIDL.COMMENT) | (1 << HTTPIDL.IDENT) | (1 << HTTPIDL.ANYCHAR))) != 0):
self.state = 94
self.uriPathComponent()
self.state = 101
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class UriPathComponentContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(HTTPIDL.UriPathComponentContext, self).__init__(parent, invokingState)
self.parser = parser
def parameterInUri(self):
return self.getTypedRuleContext(HTTPIDL.ParameterInUriContext,0)
def string(self):
return self.getTypedRuleContext(HTTPIDL.StringContext,0)
def getRuleIndex(self):
return HTTPIDL.RULE_uriPathComponent
def uriPathComponent(self):
localctx = HTTPIDL.UriPathComponentContext(self, self._ctx, self.state)
self.enterRule(localctx, 16, self.RULE_uriPathComponent)
try:
self.state = 104
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,8,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 102
self.parameterInUri()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 103
self.string()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ParameterInUriContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(HTTPIDL.ParameterInUriContext, self).__init__(parent, invokingState)
self.parser = parser
def DOLLAR(self):
return self.getToken(HTTPIDL.DOLLAR, 0)
def identifier(self):
return self.getTypedRuleContext(HTTPIDL.IdentifierContext,0)
def getRuleIndex(self):
return HTTPIDL.RULE_parameterInUri
def parameterInUri(self):
localctx = HTTPIDL.ParameterInUriContext(self, self._ctx, self.state)
self.enterRule(localctx, 18, self.RULE_parameterInUri)
try:
self.enterOuterAlt(localctx, 1)
self.state = 106
self.match(HTTPIDL.DOLLAR)
self.state = 107
self.identifier()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class StringContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(HTTPIDL.StringContext, self).__init__(parent, invokingState)
self.parser = parser
def stringElement(self, i=None):
if i is None:
return self.getTypedRuleContexts(HTTPIDL.StringElementContext)
else:
return self.getTypedRuleContext(HTTPIDL.StringElementContext,i)
def getRuleIndex(self):
return HTTPIDL.RULE_string
def string(self):
localctx = HTTPIDL.StringContext(self, self._ctx, self.state)
self.enterRule(localctx, 20, self.RULE_string)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 110
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 109
self.stringElement()
self.state = 112
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << HTTPIDL.MESSAGE) | (1 << HTTPIDL.STRUCT) | (1 << HTTPIDL.GET) | (1 << HTTPIDL.HEAD) | (1 << HTTPIDL.TRACE) | (1 << HTTPIDL.CONNECT) | (1 << HTTPIDL.OPTIONS) | (1 << HTTPIDL.POST) | (1 << HTTPIDL.PUT) | (1 << HTTPIDL.PATCH) | (1 << HTTPIDL.DELETE) | (1 << HTTPIDL.REQUEST) | (1 << HTTPIDL.RESPONSE) | (1 << HTTPIDL.RCURLY) | (1 << HTTPIDL.LPAREN) | (1 << HTTPIDL.RPAREN) | (1 << HTTPIDL.DOLLAR) | (1 << HTTPIDL.LABRACKET) | (1 << HTTPIDL.RABRACKET) | (1 << HTTPIDL.COMMA) | (1 << HTTPIDL.ESCAPE) | (1 << HTTPIDL.INT32) | (1 << HTTPIDL.UINT32) | (1 << HTTPIDL.INT64) | (1 << HTTPIDL.UINT64) | (1 << HTTPIDL.BOOL) | (1 << HTTPIDL.DOUBLE) | (1 << HTTPIDL.STRING) | (1 << HTTPIDL.FILE) | (1 << HTTPIDL.BLOB) | (1 << HTTPIDL.ARRAY) | (1 << HTTPIDL.DICT) | (1 << HTTPIDL.COMMENT) | (1 << HTTPIDL.IDENT) | (1 << HTTPIDL.ANYCHAR))) != 0)):
break
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class StringElementContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(HTTPIDL.StringElementContext, self).__init__(parent, invokingState)
self.parser = parser
def MESSAGE(self):
return self.getToken(HTTPIDL.MESSAGE, 0)
def STRUCT(self):
return self.getToken(HTTPIDL.STRUCT, 0)
def GET(self):
return self.getToken(HTTPIDL.GET, 0)
def HEAD(self):
return self.getToken(HTTPIDL.HEAD, 0)
def TRACE(self):
return self.getToken(HTTPIDL.TRACE, 0)
def CONNECT(self):
return self.getToken(HTTPIDL.CONNECT, 0)
def OPTIONS(self):
return self.getToken(HTTPIDL.OPTIONS, 0)
def POST(self):
return self.getToken(HTTPIDL.POST, 0)
def PUT(self):
return self.getToken(HTTPIDL.PUT, 0)
def PATCH(self):
return self.getToken(HTTPIDL.PATCH, 0)
def DELETE(self):
return self.getToken(HTTPIDL.DELETE, 0)
def REQUEST(self):
return self.getToken(HTTPIDL.REQUEST, 0)
def RESPONSE(self):
return self.getToken(HTTPIDL.RESPONSE, 0)
def RCURLY(self):
return self.getToken(HTTPIDL.RCURLY, 0)
def DOLLAR(self):
return self.getToken(HTTPIDL.DOLLAR, 0)
def LABRACKET(self):
return self.getToken(HTTPIDL.LABRACKET, 0)
def RABRACKET(self):
return self.getToken(HTTPIDL.RABRACKET, 0)
def LPAREN(self):
return self.getToken(HTTPIDL.LPAREN, 0)
def RPAREN(self):
return self.getToken(HTTPIDL.RPAREN, 0)
def COMMA(self):
return self.getToken(HTTPIDL.COMMA, 0)
def INT32(self):
return self.getToken(HTTPIDL.INT32, 0)
def UINT32(self):
return self.getToken(HTTPIDL.UINT32, 0)
def INT64(self):
return self.getToken(HTTPIDL.INT64, 0)
def UINT64(self):
return self.getToken(HTTPIDL.UINT64, 0)
def BOOL(self):
return self.getToken(HTTPIDL.BOOL, 0)
def DOUBLE(self):
return self.getToken(HTTPIDL.DOUBLE, 0)
def STRING(self):
return self.getToken(HTTPIDL.STRING, 0)
def FILE(self):
return self.getToken(HTTPIDL.FILE, 0)
def BLOB(self):
return self.getToken(HTTPIDL.BLOB, 0)
def ARRAY(self):
return self.getToken(HTTPIDL.ARRAY, 0)
def DICT(self):
return self.getToken(HTTPIDL.DICT, 0)
def COMMENT(self):
return self.getToken(HTTPIDL.COMMENT, 0)
def IDENT(self):
return self.getToken(HTTPIDL.IDENT, 0)
def escaped(self):
return self.getTypedRuleContext(HTTPIDL.EscapedContext,0)
def ANYCHAR(self):
return self.getToken(HTTPIDL.ANYCHAR, 0)
def getRuleIndex(self):
return HTTPIDL.RULE_stringElement
def stringElement(self):
localctx = HTTPIDL.StringElementContext(self, self._ctx, self.state)
self.enterRule(localctx, 22, self.RULE_stringElement)
try:
self.state = 149
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [HTTPIDL.MESSAGE]:
self.enterOuterAlt(localctx, 1)
self.state = 114
self.match(HTTPIDL.MESSAGE)
pass
elif token in [HTTPIDL.STRUCT]:
self.enterOuterAlt(localctx, 2)
self.state = 115
self.match(HTTPIDL.STRUCT)
pass
elif token in [HTTPIDL.GET]:
self.enterOuterAlt(localctx, 3)
self.state = 116
self.match(HTTPIDL.GET)
pass
elif token in [HTTPIDL.HEAD]:
self.enterOuterAlt(localctx, 4)
self.state = 117
self.match(HTTPIDL.HEAD)
pass
elif token in [HTTPIDL.TRACE]:
self.enterOuterAlt(localctx, 5)
self.state = 118
self.match(HTTPIDL.TRACE)
pass
elif token in [HTTPIDL.CONNECT]:
self.enterOuterAlt(localctx, 6)
self.state = 119
self.match(HTTPIDL.CONNECT)
pass
elif token in [HTTPIDL.OPTIONS]:
self.enterOuterAlt(localctx, 7)
self.state = 120
self.match(HTTPIDL.OPTIONS)
pass
elif token in [HTTPIDL.POST]:
self.enterOuterAlt(localctx, 8)
self.state = 121
self.match(HTTPIDL.POST)
pass
elif token in [HTTPIDL.PUT]:
self.enterOuterAlt(localctx, 9)
self.state = 122
self.match(HTTPIDL.PUT)
pass
elif token in [HTTPIDL.PATCH]:
self.enterOuterAlt(localctx, 10)
self.state = 123
self.match(HTTPIDL.PATCH)
pass
elif token in [HTTPIDL.DELETE]:
self.enterOuterAlt(localctx, 11)
self.state = 124
self.match(HTTPIDL.DELETE)
pass
elif token in [HTTPIDL.REQUEST]:
self.enterOuterAlt(localctx, 12)
self.state = 125
self.match(HTTPIDL.REQUEST)
pass
elif token in [HTTPIDL.RESPONSE]:
self.enterOuterAlt(localctx, 13)
self.state = 126
self.match(HTTPIDL.RESPONSE)
pass
elif token in [HTTPIDL.RCURLY]:
self.enterOuterAlt(localctx, 14)
self.state = 127
self.match(HTTPIDL.RCURLY)
pass
elif token in [HTTPIDL.DOLLAR]:
self.enterOuterAlt(localctx, 15)
self.state = 128
self.match(HTTPIDL.DOLLAR)
pass
elif token in [HTTPIDL.LABRACKET]:
self.enterOuterAlt(localctx, 16)
self.state = 129
self.match(HTTPIDL.LABRACKET)
pass
elif token in [HTTPIDL.RABRACKET]:
self.enterOuterAlt(localctx, 17)
self.state = 130
self.match(HTTPIDL.RABRACKET)
pass
elif token in [HTTPIDL.LPAREN]:
self.enterOuterAlt(localctx, 18)
self.state = 131
self.match(HTTPIDL.LPAREN)
pass
elif token in [HTTPIDL.RPAREN]:
self.enterOuterAlt(localctx, 19)
self.state = 132
self.match(HTTPIDL.RPAREN)
pass
elif token in [HTTPIDL.COMMA]:
self.enterOuterAlt(localctx, 20)
self.state = 133
self.match(HTTPIDL.COMMA)
pass
elif token in [HTTPIDL.INT32]:
self.enterOuterAlt(localctx, 21)
self.state = 134
self.match(HTTPIDL.INT32)
pass
elif token in [HTTPIDL.UINT32]:
self.enterOuterAlt(localctx, 22)
self.state = 135
self.match(HTTPIDL.UINT32)
pass
elif token in [HTTPIDL.INT64]:
self.enterOuterAlt(localctx, 23)
self.state = 136
self.match(HTTPIDL.INT64)
pass
elif token in [HTTPIDL.UINT64]:
self.enterOuterAlt(localctx, 24)
self.state = 137
self.match(HTTPIDL.UINT64)
pass
elif token in [HTTPIDL.BOOL]:
self.enterOuterAlt(localctx, 25)
self.state = 138
self.match(HTTPIDL.BOOL)
pass
elif token in [HTTPIDL.DOUBLE]:
self.enterOuterAlt(localctx, 26)
self.state = 139
self.match(HTTPIDL.DOUBLE)
pass
elif token in [HTTPIDL.STRING]:
self.enterOuterAlt(localctx, 27)
self.state = 140
self.match(HTTPIDL.STRING)
pass
elif token in [HTTPIDL.FILE]:
self.enterOuterAlt(localctx, 28)
self.state = 141
self.match(HTTPIDL.FILE)
pass
elif token in [HTTPIDL.BLOB]:
self.enterOuterAlt(localctx, 29)
self.state = 142
self.match(HTTPIDL.BLOB)
pass
elif token in [HTTPIDL.ARRAY]:
self.enterOuterAlt(localctx, 30)
self.state = 143
self.match(HTTPIDL.ARRAY)
pass
elif token in [HTTPIDL.DICT]:
self.enterOuterAlt(localctx, 31)
self.state = 144
self.match(HTTPIDL.DICT)
pass
elif token in [HTTPIDL.COMMENT]:
self.enterOuterAlt(localctx, 32)
self.state = 145
self.match(HTTPIDL.COMMENT)
pass
elif token in [HTTPIDL.IDENT]:
self.enterOuterAlt(localctx, 33)
self.state = 146
self.match(HTTPIDL.IDENT)
pass
elif token in [HTTPIDL.ESCAPE]:
self.enterOuterAlt(localctx, 34)
self.state = 147
self.escaped()
pass
elif token in [HTTPIDL.ANYCHAR]:
self.enterOuterAlt(localctx, 35)
self.state = 148
self.match(HTTPIDL.ANYCHAR)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class EscapedContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(HTTPIDL.EscapedContext, self).__init__(parent, invokingState)
self.parser = parser
def ESCAPE(self, i=None):
if i is None:
return self.getTokens(HTTPIDL.ESCAPE)
else:
return self.getToken(HTTPIDL.ESCAPE, i)
def SLASH(self):
return self.getToken(HTTPIDL.SLASH, 0)
def LCURLY(self):
return self.getToken(HTTPIDL.LCURLY, 0)
def RCURLY(self):
return self.getToken(HTTPIDL.RCURLY, 0)
def DOLLAR(self):
return self.getToken(HTTPIDL.DOLLAR, 0)
def LABRACKET(self):
return self.getToken(HTTPIDL.LABRACKET, 0)
def RABRACKET(self):
return self.getToken(HTTPIDL.RABRACKET, 0)
def LPAREN(self):
return self.getToken(HTTPIDL.LPAREN, 0)
def RPAREN(self):
return self.getToken(HTTPIDL.RPAREN, 0)
def COMMA(self):
return self.getToken(HTTPIDL.COMMA, 0)
def ASSIGN(self):
return self.getToken(HTTPIDL.ASSIGN, 0)
def SEMICOLON(self):
return self.getToken(HTTPIDL.SEMICOLON, 0)
def getRuleIndex(self):
return HTTPIDL.RULE_escaped
def escaped(self):
localctx = HTTPIDL.EscapedContext(self, self._ctx, self.state)
self.enterRule(localctx, 24, self.RULE_escaped)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 151
self.match(HTTPIDL.ESCAPE)
self.state = 152
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << HTTPIDL.SLASH) | (1 << HTTPIDL.LCURLY) | (1 << HTTPIDL.RCURLY) | (1 << HTTPIDL.LPAREN) | (1 << HTTPIDL.RPAREN) | (1 << HTTPIDL.DOLLAR) | (1 << HTTPIDL.LABRACKET) | (1 << HTTPIDL.RABRACKET) | (1 << HTTPIDL.COMMA) | (1 << HTTPIDL.ASSIGN) | (1 << HTTPIDL.SEMICOLON) | (1 << HTTPIDL.ESCAPE))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class StructBodyContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(HTTPIDL.StructBodyContext, self).__init__(parent, invokingState)
self.parser = parser
def LCURLY(self):
return self.getToken(HTTPIDL.LCURLY, 0)
def RCURLY(self):
return self.getToken(HTTPIDL.RCURLY, 0)
def singleParameter(self):
return self.getTypedRuleContext(HTTPIDL.SingleParameterContext,0)
def parameterMap(self, i=None):
if i is None:
return self.getTypedRuleContexts(HTTPIDL.ParameterMapContext)
else:
return self.getTypedRuleContext(HTTPIDL.ParameterMapContext,i)
def getRuleIndex(self):
return HTTPIDL.RULE_structBody
def structBody(self):
localctx = HTTPIDL.StructBodyContext(self, self._ctx, self.state)
self.enterRule(localctx, 26, self.RULE_structBody)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 154
self.match(HTTPIDL.LCURLY)
self.state = 164
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,13,self._ctx)
if la_ == 1:
self.state = 156
self._errHandler.sync(self)
_la = self._input.LA(1)
if (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << HTTPIDL.INT32) | (1 << HTTPIDL.UINT32) | (1 << HTTPIDL.INT64) | (1 << HTTPIDL.UINT64) | (1 << HTTPIDL.BOOL) | (1 << HTTPIDL.DOUBLE) | (1 << HTTPIDL.STRING) | (1 << HTTPIDL.FILE) | (1 << HTTPIDL.BLOB) | (1 << HTTPIDL.ARRAY) | (1 << HTTPIDL.DICT) | (1 << HTTPIDL.IDENT))) != 0):
self.state = 155
self.singleParameter()
pass
elif la_ == 2:
self.state = 161
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << HTTPIDL.INT32) | (1 << HTTPIDL.UINT32) | (1 << HTTPIDL.INT64) | (1 << HTTPIDL.UINT64) | (1 << HTTPIDL.BOOL) | (1 << HTTPIDL.DOUBLE) | (1 << HTTPIDL.STRING) | (1 << HTTPIDL.FILE) | (1 << HTTPIDL.BLOB) | (1 << HTTPIDL.ARRAY) | (1 << HTTPIDL.DICT) | (1 << HTTPIDL.IDENT))) != 0):
self.state = 158
self.parameterMap()
self.state = 163
self._errHandler.sync(self)
_la = self._input.LA(1)
pass
self.state = 166
self.match(HTTPIDL.RCURLY)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class SingleParameterContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(HTTPIDL.SingleParameterContext, self).__init__(parent, invokingState)
self.parser = parser
def paramType(self):
return self.getTypedRuleContext(HTTPIDL.ParamTypeContext,0)
def SEMICOLON(self):
return self.getToken(HTTPIDL.SEMICOLON, 0)
def getRuleIndex(self):
return HTTPIDL.RULE_singleParameter
def singleParameter(self):
localctx = HTTPIDL.SingleParameterContext(self, self._ctx, self.state)
self.enterRule(localctx, 28, self.RULE_singleParameter)
try:
self.enterOuterAlt(localctx, 1)
self.state = 168
self.paramType()
self.state = 169
self.match(HTTPIDL.SEMICOLON)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ParameterMapContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(HTTPIDL.ParameterMapContext, self).__init__(parent, invokingState)
self.parser = parser
def paramType(self):
return self.getTypedRuleContext(HTTPIDL.ParamTypeContext,0)
def key(self):
return self.getTypedRuleContext(HTTPIDL.KeyContext,0)
def SEMICOLON(self):
return self.getToken(HTTPIDL.SEMICOLON, 0)
def ASSIGN(self):
return self.getToken(HTTPIDL.ASSIGN, 0)
def value(self):
return self.getTypedRuleContext(HTTPIDL.ValueContext,0)
def getRuleIndex(self):
return HTTPIDL.RULE_parameterMap
def parameterMap(self):
localctx = HTTPIDL.ParameterMapContext(self, self._ctx, self.state)
self.enterRule(localctx, 30, self.RULE_parameterMap)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 171
self.paramType()
self.state = 172
self.key()
self.state = 175
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==HTTPIDL.ASSIGN:
self.state = 173
self.match(HTTPIDL.ASSIGN)
self.state = 174
self.value()
self.state = 177
self.match(HTTPIDL.SEMICOLON)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ParamTypeContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(HTTPIDL.ParamTypeContext, self).__init__(parent, invokingState)
self.parser = parser
def genericType(self):
return self.getTypedRuleContext(HTTPIDL.GenericTypeContext,0)
def baseType(self):
return self.getTypedRuleContext(HTTPIDL.BaseTypeContext,0)
def getRuleIndex(self):
return HTTPIDL.RULE_paramType
def paramType(self):
localctx = HTTPIDL.ParamTypeContext(self, self._ctx, self.state)
self.enterRule(localctx, 32, self.RULE_paramType)
try:
self.state = 181
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [HTTPIDL.ARRAY, HTTPIDL.DICT]:
self.enterOuterAlt(localctx, 1)
self.state = 179
self.genericType()
pass
elif token in [HTTPIDL.INT32, HTTPIDL.UINT32, HTTPIDL.INT64, HTTPIDL.UINT64, HTTPIDL.BOOL, HTTPIDL.DOUBLE, HTTPIDL.STRING, HTTPIDL.FILE, HTTPIDL.BLOB, HTTPIDL.IDENT]:
self.enterOuterAlt(localctx, 2)
self.state = 180
self.baseType()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class GenericTypeContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(HTTPIDL.GenericTypeContext, self).__init__(parent, invokingState)
self.parser = parser
def ARRAY(self):
return self.getToken(HTTPIDL.ARRAY, 0)
def arrayGenericParam(self):
return self.getTypedRuleContext(HTTPIDL.ArrayGenericParamContext,0)
def DICT(self):
return self.getToken(HTTPIDL.DICT, 0)
def dictGenericParam(self):
return self.getTypedRuleContext(HTTPIDL.DictGenericParamContext,0)
def getRuleIndex(self):
return HTTPIDL.RULE_genericType
def genericType(self):
localctx = HTTPIDL.GenericTypeContext(self, self._ctx, self.state)
self.enterRule(localctx, 34, self.RULE_genericType)
try:
self.state = 187
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [HTTPIDL.ARRAY]:
self.enterOuterAlt(localctx, 1)
self.state = 183
self.match(HTTPIDL.ARRAY)
self.state = 184
self.arrayGenericParam()
pass
elif token in [HTTPIDL.DICT]:
self.enterOuterAlt(localctx, 2)
self.state = 185
self.match(HTTPIDL.DICT)
self.state = 186
self.dictGenericParam()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ArrayGenericParamContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(HTTPIDL.ArrayGenericParamContext, self).__init__(parent, invokingState)
self.parser = parser
def LABRACKET(self):
return self.getToken(HTTPIDL.LABRACKET, 0)
def paramType(self):
return self.getTypedRuleContext(HTTPIDL.ParamTypeContext,0)
def RABRACKET(self):
return self.getToken(HTTPIDL.RABRACKET, 0)
def getRuleIndex(self):
return HTTPIDL.RULE_arrayGenericParam
def arrayGenericParam(self):
localctx = HTTPIDL.ArrayGenericParamContext(self, self._ctx, self.state)
self.enterRule(localctx, 36, self.RULE_arrayGenericParam)
try:
self.enterOuterAlt(localctx, 1)
self.state = 189
self.match(HTTPIDL.LABRACKET)
self.state = 190
self.paramType()
self.state = 191
self.match(HTTPIDL.RABRACKET)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class DictGenericParamContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(HTTPIDL.DictGenericParamContext, self).__init__(parent, invokingState)
self.parser = parser
def LABRACKET(self):
return self.getToken(HTTPIDL.LABRACKET, 0)
def baseType(self):
return self.getTypedRuleContext(HTTPIDL.BaseTypeContext,0)
def COMMA(self):
return self.getToken(HTTPIDL.COMMA, 0)
def paramType(self):
return self.getTypedRuleContext(HTTPIDL.ParamTypeContext,0)
def RABRACKET(self):
return self.getToken(HTTPIDL.RABRACKET, 0)
def getRuleIndex(self):
return HTTPIDL.RULE_dictGenericParam
def dictGenericParam(self):
localctx = HTTPIDL.DictGenericParamContext(self, self._ctx, self.state)
self.enterRule(localctx, 38, self.RULE_dictGenericParam)
try:
self.enterOuterAlt(localctx, 1)
self.state = 193
self.match(HTTPIDL.LABRACKET)
self.state = 194
self.baseType()
self.state = 195
self.match(HTTPIDL.COMMA)
self.state = 196
self.paramType()
self.state = 197
self.match(HTTPIDL.RABRACKET)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class BaseTypeContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(HTTPIDL.BaseTypeContext, self).__init__(parent, invokingState)
self.parser = parser
def INT32(self):
return self.getToken(HTTPIDL.INT32, 0)
def UINT32(self):
return self.getToken(HTTPIDL.UINT32, 0)
def INT64(self):
return self.getToken(HTTPIDL.INT64, 0)
def UINT64(self):
return self.getToken(HTTPIDL.UINT64, 0)
def BOOL(self):
return self.getToken(HTTPIDL.BOOL, 0)
def DOUBLE(self):
return self.getToken(HTTPIDL.DOUBLE, 0)
def STRING(self):
return self.getToken(HTTPIDL.STRING, 0)
def FILE(self):
return self.getToken(HTTPIDL.FILE, 0)
def BLOB(self):
return self.getToken(HTTPIDL.BLOB, 0)
def structName(self):
return self.getTypedRuleContext(HTTPIDL.StructNameContext,0)
def getRuleIndex(self):
return HTTPIDL.RULE_baseType
def baseType(self):
localctx = HTTPIDL.BaseTypeContext(self, self._ctx, self.state)
self.enterRule(localctx, 40, self.RULE_baseType)
try:
self.state = 209
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [HTTPIDL.INT32]:
self.enterOuterAlt(localctx, 1)
self.state = 199
self.match(HTTPIDL.INT32)
pass
elif token in [HTTPIDL.UINT32]:
self.enterOuterAlt(localctx, 2)
self.state = 200
self.match(HTTPIDL.UINT32)
pass
elif token in [HTTPIDL.INT64]:
self.enterOuterAlt(localctx, 3)
self.state = 201
self.match(HTTPIDL.INT64)
pass
elif token in [HTTPIDL.UINT64]:
self.enterOuterAlt(localctx, 4)
self.state = 202
self.match(HTTPIDL.UINT64)
pass
elif token in [HTTPIDL.BOOL]:
self.enterOuterAlt(localctx, 5)
self.state = 203
self.match(HTTPIDL.BOOL)
pass
elif token in [HTTPIDL.DOUBLE]:
self.enterOuterAlt(localctx, 6)
self.state = 204
self.match(HTTPIDL.DOUBLE)
pass
elif token in [HTTPIDL.STRING]:
self.enterOuterAlt(localctx, 7)
self.state = 205
self.match(HTTPIDL.STRING)
pass
elif token in [HTTPIDL.FILE]:
self.enterOuterAlt(localctx, 8)
self.state = 206
self.match(HTTPIDL.FILE)
pass
elif token in [HTTPIDL.BLOB]:
self.enterOuterAlt(localctx, 9)
self.state = 207
self.match(HTTPIDL.BLOB)
pass
elif token in [HTTPIDL.IDENT]:
self.enterOuterAlt(localctx, 10)
self.state = 208
self.structName()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class KeyContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(HTTPIDL.KeyContext, self).__init__(parent, invokingState)
self.parser = parser
def identifier(self):
return self.getTypedRuleContext(HTTPIDL.IdentifierContext,0)
def getRuleIndex(self):
return HTTPIDL.RULE_key
def key(self):
localctx = HTTPIDL.KeyContext(self, self._ctx, self.state)
self.enterRule(localctx, 42, self.RULE_key)
try:
self.enterOuterAlt(localctx, 1)
self.state = 211
self.identifier()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ValueContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(HTTPIDL.ValueContext, self).__init__(parent, invokingState)
self.parser = parser
def string(self):
return self.getTypedRuleContext(HTTPIDL.StringContext,0)
def getRuleIndex(self):
return HTTPIDL.RULE_value
def value(self):
localctx = HTTPIDL.ValueContext(self, self._ctx, self.state)
self.enterRule(localctx, 44, self.RULE_value)
try:
self.enterOuterAlt(localctx, 1)
self.state = 213
self.string()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class StructNameContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(HTTPIDL.StructNameContext, self).__init__(parent, invokingState)
self.parser = parser
def identifier(self):
return self.getTypedRuleContext(HTTPIDL.IdentifierContext,0)
def getRuleIndex(self):
return HTTPIDL.RULE_structName
def structName(self):
localctx = HTTPIDL.StructNameContext(self, self._ctx, self.state)
self.enterRule(localctx, 46, self.RULE_structName)
try:
self.enterOuterAlt(localctx, 1)
self.state = 215
self.identifier()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class IdentifierContext(ParserRuleContext):
def __init__(self, parser, parent=None, invokingState=-1):
super(HTTPIDL.IdentifierContext, self).__init__(parent, invokingState)
self.parser = parser
def IDENT(self):
return self.getToken(HTTPIDL.IDENT, 0)
def getRuleIndex(self):
return HTTPIDL.RULE_identifier
def identifier(self):
localctx = HTTPIDL.IdentifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 48, self.RULE_identifier)
try:
self.enterOuterAlt(localctx, 1)
self.state = 217
self.match(HTTPIDL.IDENT)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
|
|
import random
import unittest
from ..models import CaseData
from ..exceptions import PropertyExpectedException
from .fixtures import get_default_case_data
class TestCaseData(unittest.TestCase):
def test_total_income_calculation(self):
default_data = get_default_case_data(
you__income__earnings=0,
you__income__self_employment_drawings=0,
you__income__benefits=0,
you__income__tax_credits=0,
you__income__child_benefits=0,
you__income__maintenance_received=0,
you__income__pension=60,
you__income__other_income=0,
)
cd = CaseData(**default_data)
ti = cd.total_income
income = cd.you.income
gross_income_orig = 0
for prop in income.PROPERTY_META.keys():
part = getattr(income, prop, 0)
gross_income_orig += part
self.assertEqual(gross_income_orig, ti)
def test_total_income_calculation_with_partner(self):
combined_income = 31710
default_data = get_default_case_data(
you__income__earnings=10000,
you__income__self_employment_drawings=10,
you__income__benefits=20,
you__income__tax_credits=30,
you__income__child_benefits=40,
you__income__maintenance_received=50,
you__income__pension=60,
you__income__other_income=4000,
partner__income__earnings=10000,
partner__income__self_employment_drawings=100,
partner__income__benefits=200,
partner__income__tax_credits=300,
partner__income__child_benefits=0,
partner__income__maintenance_received=400,
partner__income__pension=500,
partner__income__other_income=6000,
facts__has_partner=True,
)
cd = CaseData(**default_data)
ti = cd.total_income
income = cd.you.income
gross_income_orig = (
income.earnings
+ income.self_employment_drawings
+ income.benefits
+ income.tax_credits
+ income.child_benefits
+ income.maintenance_received
+ income.pension
+ income.other_income
)
gross_income_orig += (
cd.partner.income.earnings
+ cd.partner.income.self_employment_drawings
+ cd.partner.income.benefits
+ cd.partner.income.tax_credits
+ cd.partner.income.child_benefits
+ cd.partner.income.maintenance_received
+ cd.partner.income.pension
+ cd.partner.income.other_income
)
self.assertEqual(gross_income_orig, ti)
self.assertEqual(combined_income, ti)
def test_bad_property_set_exception(self):
cdd = get_default_case_data(foo="bar", bar__baz=24)
with self.assertRaises(PropertyExpectedException):
CaseData(**cdd)
def test_getattr_raises_if_accessing_invalid_prop(self):
with self.assertRaises(AttributeError):
cd = CaseData()
cd.foo
def test_get_total_income_no_partner(self):
cdd = get_default_case_data(
you__income__earnings=265700,
you__income__self_employment_drawings=10,
you__income__benefits=20,
you__income__tax_credits=30,
you__income__child_benefits=40,
you__income__maintenance_received=50,
you__income__pension=60,
you__income__other_income=0,
)
cd = CaseData(**cdd)
self.assertFalse(cd.facts.has_partner)
self.assertEqual(265910, cd.total_income)
# TODO: fix this to check nested properties
# def test_provide_partner_earnings_required_partner_other_income(self):
# with self.assertRaises(PropertyExpectedException):
# cdd = get_default_case_data(
# you__income__earnings=1,
# you__income__other_income=1,
# partner__income__earnings=1,
# facts__has_partner=True
# )
# cd = CaseData(**cdd)
# cd.total_income
def test_get_total_income_with_partner(self):
cdd = get_default_case_data(
you__income__earnings=265700,
you__income__self_employment_drawings=10,
you__income__benefits=20,
you__income__tax_credits=30,
you__income__child_benefits=40,
you__income__maintenance_received=50,
you__income__pension=60,
you__income__other_income=0,
partner__income__earnings=100,
partner__income__self_employment_drawings=100,
partner__income__benefits=200,
partner__income__tax_credits=300,
partner__income__child_benefits=0,
partner__income__maintenance_received=400,
partner__income__pension=500,
partner__income__other_income=2,
facts__has_partner=True,
)
cd = CaseData(**cdd)
self.assertEqual(267512, cd.total_income)
def test_is_partner_disputed_true(self):
cdd = get_default_case_data(facts__has_partner=True, facts__is_partner_opponent=True)
cd = CaseData(**cdd)
self.assertTrue(cd.facts.has_disputed_partner)
def test_is_partner_disputed_false(self):
cdd = get_default_case_data(facts__has_partner=False, facts__is_partner_opponent=True)
cd = CaseData(**cdd)
self.assertFalse(cd.facts.has_disputed_partner)
def test_is_partner_disputed_not_opponent(self):
cdd = get_default_case_data(facts__has_partner=True, facts__is_partner_opponent=False)
cd = CaseData(**cdd)
self.assertFalse(cd.facts.has_disputed_partner)
def test_is_partner_disputed_no_partner_not_opponent(self):
cdd = get_default_case_data(facts__has_partner=False, facts__is_partner_opponent=False)
cd = CaseData(**cdd)
self.assertFalse(cd.facts.has_disputed_partner)
def test_get_non_disputed_liquid_capital(self):
cdd = get_default_case_data(
you__savings__bank_balance=0,
you__savings__credit_balance=0,
you__savings__asset_balance=0,
you__savings__investment_balance=0,
partner__savings__bank_balance=0,
partner__savings__credit_balance=0,
partner__savings__asset_balance=0,
partner__savings__investment_balance=0,
)
cd = CaseData(**cdd)
self.assertEqual(0, cd.non_disputed_liquid_capital)
def test_get_non_disputed_liquid_capital_savings_only(self):
cdd = get_default_case_data(
you__savings__bank_balance=10000,
you__savings__credit_balance=0,
you__savings__asset_balance=0,
you__savings__investment_balance=0,
partner__savings__bank_balance=0,
partner__savings__credit_balance=0,
partner__savings__asset_balance=0,
partner__savings__investment_balance=0,
)
cd = CaseData(**cdd)
self.assertEqual(10000, cd.non_disputed_liquid_capital)
def test_get_non_disputed_liquid_capital_savings_credit_balance(self):
cdd = get_default_case_data(
you__savings__bank_balance=10000,
you__savings__credit_balance=10,
you__savings__asset_balance=0,
you__savings__investment_balance=0,
partner__savings__bank_balance=0,
partner__savings__credit_balance=0,
partner__savings__asset_balance=0,
partner__savings__investment_balance=0,
)
cd = CaseData(**cdd)
self.assertEqual(10010, cd.non_disputed_liquid_capital)
def test_get_non_disputed_liquid_capital_savings_valuable(self):
cdd = get_default_case_data(
you__savings__bank_balance=10000,
you__savings__credit_balance=0,
you__savings__asset_balance=1000,
you__savings__investment_balance=0,
partner__savings__bank_balance=0,
partner__savings__credit_balance=0,
partner__savings__asset_balance=0,
partner__savings__investment_balance=0,
)
cd = CaseData(**cdd)
self.assertEqual(11000, cd.non_disputed_liquid_capital)
def test_get_non_disputed_liquid_capital_savings_investment_balance(self):
cdd = get_default_case_data(
you__savings__bank_balance=10000,
you__savings__credit_balance=0,
you__savings__asset_balance=0,
you__savings__investment_balance=5000,
partner__savings__bank_balance=0,
partner__savings__credit_balance=0,
partner__savings__asset_balance=0,
partner__savings__investment_balance=0,
)
cd = CaseData(**cdd)
self.assertEqual(15000, cd.non_disputed_liquid_capital)
# TODO: Fix invalid state check
# def test_inconsistent_state(self):
# cdd = get_default_case_data(
# you__savings__bank_balance=10000,
# you__savings__credit_balance=0,
# you__savings__asset_balance=0,
# you__savings__investment_balance=0,
# partner__savings__bank_balance=10000,
# partner__savings__credit_balance=0,
# partner__savings__asset_balance=0,
# partner__savings__investment_balance=0,
# facts__has_partner=False,
# )
# with self.assertRaises(InvalidStateException):
# cd = CaseData(**cdd)
def test_get_non_disputed_liquid_capital_savings_with_partner(self):
cdd = get_default_case_data(
you__savings__bank_balance=10000,
you__savings__credit_balance=0,
you__savings__asset_balance=0,
you__savings__investment_balance=0,
partner__savings__bank_balance=1,
partner__savings__credit_balance=0,
partner__savings__asset_balance=0,
partner__savings__investment_balance=0,
facts__has_partner=True,
)
cd = CaseData(**cdd)
self.assertEqual(10001, cd.non_disputed_liquid_capital)
def test_get_non_disputed_liquid_capital_savings_with_partner_credit_balance(self):
cdd = get_default_case_data(
you__savings__bank_balance=10000,
you__savings__credit_balance=00,
you__savings__asset_balance=0,
you__savings__investment_balance=0,
partner__savings__bank_balance=0,
partner__savings__credit_balance=20,
partner__savings__asset_balance=0,
partner__savings__investment_balance=0,
facts__has_partner=True,
)
cd = CaseData(**cdd)
self.assertEqual(10020, cd.non_disputed_liquid_capital)
def test_get_non_disputed_liquid_capital_savings_with_partner_savings(self):
cdd = get_default_case_data(
you__savings__bank_balance=10000,
you__savings__credit_balance=00,
you__savings__asset_balance=0,
you__savings__investment_balance=0,
partner__savings__bank_balance=10,
partner__savings__credit_balance=0,
partner__savings__asset_balance=0,
partner__savings__investment_balance=0,
facts__has_partner=True,
)
cd = CaseData(**cdd)
self.assertEqual(10010, cd.non_disputed_liquid_capital)
def test_get_non_disputed_liquid_capital_savings_with_partner_valuables(self):
cdd = get_default_case_data(
you__savings__bank_balance=10000,
you__savings__credit_balance=00,
you__savings__asset_balance=5000,
you__savings__investment_balance=0,
partner__savings__bank_balance=0,
partner__savings__credit_balance=0,
partner__savings__asset_balance=0,
partner__savings__investment_balance=0,
facts__has_partner=True,
)
cd = CaseData(**cdd)
self.assertEqual(15000, cd.non_disputed_liquid_capital)
def test_get_non_disputed_liquid_capital_savings_with_partner_investment_balance(self):
cdd = get_default_case_data(
you__savings__bank_balance=10000,
you__savings__credit_balance=00,
you__savings__asset_balance=0,
you__savings__investment_balance=0,
partner__savings__bank_balance=0,
partner__savings__credit_balance=0,
partner__savings__asset_balance=0,
partner__savings__investment_balance=100,
facts__has_partner=True,
)
cd = CaseData(**cdd)
self.assertEqual(10100, cd.non_disputed_liquid_capital)
def test_get_non_disputed_liquid_capital_savings_only_partner_savings(self):
cdd = get_default_case_data(
you__savings__bank_balance=0,
you__savings__credit_balance=0,
you__savings__asset_balance=0,
you__savings__investment_balance=0,
partner__savings__bank_balance=0,
partner__savings__credit_balance=0,
partner__savings__asset_balance=0,
partner__savings__investment_balance=100,
facts__has_partner=True,
)
cd = CaseData(**cdd)
self.assertEqual(100, cd.non_disputed_liquid_capital)
def test_get_non_disputed_liquid_capital_savings_only_partner_credit_balance(self):
cdd = get_default_case_data(
you__savings__bank_balance=0,
you__savings__credit_balance=200,
you__savings__asset_balance=0,
you__savings__investment_balance=0,
partner__savings__bank_balance=0,
partner__savings__credit_balance=0,
partner__savings__asset_balance=0,
partner__savings__investment_balance=0,
facts__has_partner=True,
)
cd = CaseData(**cdd)
self.assertEqual(200, cd.non_disputed_liquid_capital)
def test_get_non_disputed_liquid_capital_savings_random_values_no_partner(self):
for i in range(0, 500):
# ghetto quick-check
steps = [random.randint(0, 50000)]
for n in range(3):
step = random.randint(0, steps[-1])
steps.append(step)
cdd = get_default_case_data(
you__savings__bank_balance=steps[0],
you__savings__credit_balance=steps[1],
you__savings__asset_balance=steps[2],
you__savings__investment_balance=steps[3],
)
cd = CaseData(**cdd)
self.assertEqual(sum(steps), cd.non_disputed_liquid_capital)
def test_get_non_disputed_liquid_capital_savings_random_values_with_partner(self):
for i in range(0, 500):
# ghetto quick-check
steps = [random.randint(0, 50000)]
for n in range(7):
step = random.randint(0, steps[-1])
steps.append(step)
cdd = get_default_case_data(
you__savings__bank_balance=steps[0],
you__savings__credit_balance=steps[1],
you__savings__asset_balance=steps[2],
you__savings__investment_balance=steps[3],
partner__savings__bank_balance=steps[4],
partner__savings__credit_balance=steps[5],
partner__savings__asset_balance=steps[6],
partner__savings__investment_balance=steps[7],
facts__has_partner=True,
)
cd = CaseData(**cdd)
self.assertEqual(sum(steps), cd.non_disputed_liquid_capital)
def test_get_non_disputed_liquid_capital_savings_random_values_only_partner(self):
for i in range(0, 500):
# ghetto quick-check
steps = [random.randint(0, 50000)]
for n in range(3):
step = random.randint(0, steps[-1])
steps.append(step)
cdd = get_default_case_data(
you__savings__bank_balance=0,
you__savings__credit_balance=0,
you__savings__asset_balance=0,
you__savings__investment_balance=0,
partner__savings__bank_balance=steps[0],
partner__savings__credit_balance=steps[1],
partner__savings__asset_balance=steps[2],
partner__savings__investment_balance=steps[3],
facts__has_partner=True,
)
cd = CaseData(**cdd)
self.assertEqual(sum(steps), cd.non_disputed_liquid_capital)
|
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import struct
import array
import string
import re
from google.pyglib.gexcept import AbstractMethod
import httplib
__all__ = ['ProtocolMessage', 'Encoder', 'Decoder',
'ProtocolBufferDecodeError',
'ProtocolBufferEncodeError',
'ProtocolBufferReturnError']
URL_RE = re.compile('^(https?)://([^/]+)(/.*)$')
class ProtocolMessage:
def __init__(self, contents=None):
raise AbstractMethod
def Clear(self):
raise AbstractMethod
def IsInitialized(self, debug_strs=None):
raise AbstractMethod
def Encode(self):
try:
return self._CEncode()
except AbstractMethod:
e = Encoder()
self.Output(e)
return e.buffer().tostring()
def _CEncode(self):
raise AbstractMethod
def ParseFromString(self, s):
self.Clear()
self.MergeFromString(s)
return
def MergeFromString(self, s):
try:
self._CMergeFromString(s)
dbg = []
if not self.IsInitialized(dbg):
raise ProtocolBufferDecodeError, '\n\t'.join(dbg)
except AbstractMethod:
a = array.array('B')
a.fromstring(s)
d = Decoder(a, 0, len(a))
self.Merge(d)
return
def _CMergeFromString(self, s):
raise AbstractMethod
def __getstate__(self):
return self.Encode()
def __setstate__(self, contents_):
self.__init__(contents=contents_)
def sendCommand(self, server, url, response, follow_redirects=1,
secure=0, keyfile=None, certfile=None):
data = self.Encode()
if secure:
if keyfile and certfile:
conn = httplib.HTTPSConnection(server, key_file=keyfile,
cert_file=certfile)
else:
conn = httplib.HTTPSConnection(server)
else:
conn = httplib.HTTPConnection(server)
conn.putrequest("POST", url)
conn.putheader("Content-Length", "%d" %len(data))
conn.endheaders()
conn.send(data)
resp = conn.getresponse()
if follow_redirects > 0 and resp.status == 302:
m = URL_RE.match(resp.getheader('Location'))
if m:
protocol, server, url = m.groups()
return self.sendCommand(server, url, response,
follow_redirects=follow_redirects - 1,
secure=(protocol == 'https'),
keyfile=keyfile,
certfile=certfile)
if resp.status != 200:
raise ProtocolBufferReturnError(resp.status)
if response is not None:
response.ParseFromString(resp.read())
return response
def sendSecureCommand(self, server, keyfile, certfile, url, response,
follow_redirects=1):
return self.sendCommand(server, url, response,
follow_redirects=follow_redirects,
secure=1, keyfile=keyfile, certfile=certfile)
def __str__(self, prefix="", printElemNumber=0):
raise AbstractMethod
def ToASCII(self):
return self._CToASCII(ProtocolMessage._SYMBOLIC_FULL_ASCII)
def ToCompactASCII(self):
return self._CToASCII(ProtocolMessage._NUMERIC_ASCII)
def ToShortASCII(self):
return self._CToASCII(ProtocolMessage._SYMBOLIC_SHORT_ASCII)
_NUMERIC_ASCII = 0
_SYMBOLIC_SHORT_ASCII = 1
_SYMBOLIC_FULL_ASCII = 2
def _CToASCII(self, output_format):
raise AbstractMethod
def ParseASCII(self, ascii_string):
raise AbstractMethod
def ParseASCIIIgnoreUnknown(self, ascii_string):
raise AbstractMethod
def Output(self, e):
dbg = []
if not self.IsInitialized(dbg):
raise ProtocolBufferEncodeError, '\n\t'.join(dbg)
self.OutputUnchecked(e)
return
def OutputUnchecked(self, e):
raise AbstractMethod
def Parse(self, d):
self.Clear()
self.Merge(d)
return
def Merge(self, d):
self.TryMerge(d)
dbg = []
if not self.IsInitialized(dbg):
raise ProtocolBufferDecodeError, '\n\t'.join(dbg)
return
def TryMerge(self, d):
raise AbstractMethod
def CopyFrom(self, pb):
if (pb == self): return
self.Clear()
self.MergeFrom(pb)
def MergeFrom(self, pb):
raise AbstractMethod
def lengthVarInt32(self, n):
return self.lengthVarInt64(n)
def lengthVarInt64(self, n):
if n < 0:
return 10
result = 0
while 1:
result += 1
n >>= 7
if n == 0:
break
return result
def lengthString(self, n):
return self.lengthVarInt32(n) + n
def DebugFormat(self, value):
return "%s" % value
def DebugFormatInt32(self, value):
if (value <= -2000000000 or value >= 2000000000):
return self.DebugFormatFixed32(value)
return "%d" % value
def DebugFormatInt64(self, value):
if (value <= -2000000000 or value >= 2000000000):
return self.DebugFormatFixed64(value)
return "%d" % value
def DebugFormatString(self, value):
def escape(c):
o = ord(c)
if o == 10: return r"\n"
if o == 39: return r"\'"
if o == 34: return r'\"'
if o == 92: return r"\\"
if o >= 127 or o < 32: return "\\%03o" % o
return c
return '"' + "".join([escape(c) for c in value]) + '"'
def DebugFormatFloat(self, value):
return "%ff" % value
def DebugFormatFixed32(self, value):
if (value < 0): value += (1L<<32)
return "0x%x" % value
def DebugFormatFixed64(self, value):
if (value < 0): value += (1L<<64)
return "0x%x" % value
def DebugFormatBool(self, value):
if value:
return "true"
else:
return "false"
class Encoder:
NUMERIC = 0
DOUBLE = 1
STRING = 2
STARTGROUP = 3
ENDGROUP = 4
FLOAT = 5
MAX_TYPE = 6
def __init__(self):
self.buf = array.array('B')
return
def buffer(self):
return self.buf
def put8(self, v):
if v < 0 or v >= (1<<8): raise ProtocolBufferEncodeError, "u8 too big"
self.buf.append(v & 255)
return
def put16(self, v):
if v < 0 or v >= (1<<16): raise ProtocolBufferEncodeError, "u16 too big"
self.buf.append((v >> 0) & 255)
self.buf.append((v >> 8) & 255)
return
def put32(self, v):
if v < 0 or v >= (1L<<32): raise ProtocolBufferEncodeError, "u32 too big"
self.buf.append((v >> 0) & 255)
self.buf.append((v >> 8) & 255)
self.buf.append((v >> 16) & 255)
self.buf.append((v >> 24) & 255)
return
def put64(self, v):
if v < 0 or v >= (1L<<64): raise ProtocolBufferEncodeError, "u64 too big"
self.buf.append((v >> 0) & 255)
self.buf.append((v >> 8) & 255)
self.buf.append((v >> 16) & 255)
self.buf.append((v >> 24) & 255)
self.buf.append((v >> 32) & 255)
self.buf.append((v >> 40) & 255)
self.buf.append((v >> 48) & 255)
self.buf.append((v >> 56) & 255)
return
def putVarInt32(self, v):
if v >= (1L << 31) or v < -(1L << 31):
raise ProtocolBufferEncodeError, "int32 too big"
self.putVarInt64(v)
return
def putVarInt64(self, v):
if v >= (1L << 63) or v < -(1L << 63):
raise ProtocolBufferEncodeError, "int64 too big"
if v < 0:
v += (1L << 64)
self.putVarUint64(v)
return
def putVarUint64(self, v):
if v < 0 or v >= (1L << 64):
raise ProtocolBufferEncodeError, "uint64 too big"
while 1:
bits = v & 127
v >>= 7
if (v != 0):
bits |= 128
self.buf.append(bits)
if v == 0:
break
return
def putFloat(self, v):
a = array.array('B')
a.fromstring(struct.pack("f", v))
self.buf.extend(a)
return
def putDouble(self, v):
a = array.array('B')
a.fromstring(struct.pack("d", v))
self.buf.extend(a)
return
def putBoolean(self, v):
if v:
self.buf.append(1)
else:
self.buf.append(0)
return
def putPrefixedString(self, v):
self.putVarInt32(len(v))
a = array.array('B')
a.fromstring(v)
self.buf.extend(a)
return
def putRawString(self, v):
a = array.array('B')
a.fromstring(v)
self.buf.extend(a)
class Decoder:
def __init__(self, buf, idx, limit):
self.buf = buf
self.idx = idx
self.limit = limit
return
def avail(self):
return self.limit - self.idx
def buffer(self):
return self.buf
def pos(self):
return self.idx
def skip(self, n):
if self.idx + n > self.limit: raise ProtocolBufferDecodeError, "truncated"
self.idx += n
return
def skipData(self, tag):
t = tag & 7
if t == Encoder.NUMERIC:
self.getVarInt64()
elif t == Encoder.DOUBLE:
self.skip(8)
elif t == Encoder.STRING:
n = self.getVarInt32()
self.skip(n)
elif t == Encoder.STARTGROUP:
while 1:
t = self.getVarInt32()
if (t & 7) == Encoder.ENDGROUP:
break
else:
self.skipData(t)
if (t - Encoder.ENDGROUP) != (tag - Encoder.STARTGROUP):
raise ProtocolBufferDecodeError, "corrupted"
elif t == Encoder.ENDGROUP:
raise ProtocolBufferDecodeError, "corrupted"
elif t == Encoder.FLOAT:
self.skip(4)
else:
raise ProtocolBufferDecodeError, "corrupted"
def get8(self):
if self.idx >= self.limit: raise ProtocolBufferDecodeError, "truncated"
c = self.buf[self.idx]
self.idx += 1
return c
def get16(self):
if self.idx + 2 > self.limit: raise ProtocolBufferDecodeError, "truncated"
c = self.buf[self.idx]
d = self.buf[self.idx + 1]
self.idx += 2
return (d << 8) | c
def get32(self):
if self.idx + 4 > self.limit: raise ProtocolBufferDecodeError, "truncated"
c = self.buf[self.idx]
d = self.buf[self.idx + 1]
e = self.buf[self.idx + 2]
f = long(self.buf[self.idx + 3])
self.idx += 4
return (f << 24) | (e << 16) | (d << 8) | c
def get64(self):
if self.idx + 8 > self.limit: raise ProtocolBufferDecodeError, "truncated"
c = self.buf[self.idx]
d = self.buf[self.idx + 1]
e = self.buf[self.idx + 2]
f = long(self.buf[self.idx + 3])
g = long(self.buf[self.idx + 4])
h = long(self.buf[self.idx + 5])
i = long(self.buf[self.idx + 6])
j = long(self.buf[self.idx + 7])
self.idx += 8
return ((j << 56) | (i << 48) | (h << 40) | (g << 32) | (f << 24)
| (e << 16) | (d << 8) | c)
def getVarInt32(self):
v = self.getVarInt64()
if v >= (1L << 31) or v < -(1L << 31):
raise ProtocolBufferDecodeError, "corrupted"
return v
def getVarInt64(self):
result = self.getVarUint64()
if result >= (1L << 63):
result -= (1L << 64)
return result
def getVarUint64(self):
result = long(0)
shift = 0
while 1:
if shift >= 64: raise ProtocolBufferDecodeError, "corrupted"
b = self.get8()
result |= (long(b & 127) << shift)
shift += 7
if (b & 128) == 0:
if result >= (1L << 64): raise ProtocolBufferDecodeError, "corrupted"
return result
return result
def getFloat(self):
if self.idx + 4 > self.limit: raise ProtocolBufferDecodeError, "truncated"
a = self.buf[self.idx:self.idx+4]
self.idx += 4
return struct.unpack("f", a)[0]
def getDouble(self):
if self.idx + 8 > self.limit: raise ProtocolBufferDecodeError, "truncated"
a = self.buf[self.idx:self.idx+8]
self.idx += 8
return struct.unpack("d", a)[0]
def getBoolean(self):
b = self.get8()
if b != 0 and b != 1: raise ProtocolBufferDecodeError, "corrupted"
return b
def getPrefixedString(self):
length = self.getVarInt32()
if self.idx + length > self.limit:
raise ProtocolBufferDecodeError, "truncated"
r = self.buf[self.idx : self.idx + length]
self.idx += length
return r.tostring()
def getRawString(self):
r = self.buf[self.idx:self.limit]
self.idx = self.limit
return r.tostring()
class ProtocolBufferDecodeError(Exception): pass
class ProtocolBufferEncodeError(Exception): pass
class ProtocolBufferReturnError(Exception): pass
|
|
# Copyright (c) 2013 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Copyright (c) 2006-2008 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Lisa Hsu
import m5
from m5.defines import buildEnv
from m5.objects import *
from Benchmarks import *
import CpuConfig
import MemConfig
def _listCpuTypes(option, opt, value, parser):
CpuConfig.print_cpu_list()
sys.exit(0)
def _listMemTypes(option, opt, value, parser):
MemConfig.print_mem_list()
sys.exit(0)
def addCommonOptions(parser):
# system options
parser.add_option("--list-cpu-types",
action="callback", callback=_listCpuTypes,
help="List available CPU types")
parser.add_option("--cpu-type", type="choice", default="atomic",
choices=CpuConfig.cpu_names(),
help = "type of cpu to run with")
parser.add_option("--checker", action="store_true");
parser.add_option("-n", "--num-cpus", type="int", default=1)
parser.add_option("--sys-voltage", action="store", type="string",
default='1.0V',
help = """Top-level voltage for blocks running at system
power supply""")
parser.add_option("--sys-clock", action="store", type="string",
default='1GHz',
help = """Top-level clock for blocks running at system
speed""")
parser.add_option("--cpu-clock", action="store", type="string",
default='2GHz',
help="Clock for blocks running at CPU speed")
parser.add_option("--smt", action="store_true", default=False,
help = """
Only used if multiple programs are specified. If true,
then the number of threads per cpu is same as the
number of programs.""")
# Memory Options
parser.add_option("--list-mem-types",
action="callback", callback=_listMemTypes,
help="List available memory types")
parser.add_option("--mem-type", type="choice", default="simple_mem",
choices=MemConfig.mem_names(),
help = "type of memory to use")
parser.add_option("--mem-channels", type="int", default=1,
help = "number of memory channels")
parser.add_option("--mem-size", action="store", type="string",
default="512MB",
help="Specify the physical memory size (single memory)")
# Cache Options
parser.add_option("--caches", action="store_true")
parser.add_option("--l2cache", action="store_true")
parser.add_option("--fastmem", action="store_true")
parser.add_option("--num-dirs", type="int", default=1)
parser.add_option("--num-l2caches", type="int", default=1)
parser.add_option("--num-l3caches", type="int", default=1)
parser.add_option("--l1d_size", type="string", default="64kB")
parser.add_option("--l1i_size", type="string", default="32kB")
parser.add_option("--l2_size", type="string", default="2MB")
parser.add_option("--l3_size", type="string", default="16MB")
parser.add_option("--l1d_assoc", type="int", default=2)
parser.add_option("--l1i_assoc", type="int", default=2)
parser.add_option("--l2_assoc", type="int", default=8)
parser.add_option("--l3_assoc", type="int", default=16)
parser.add_option("--cacheline_size", type="int", default=64)
# Enable Ruby
parser.add_option("--ruby", action="store_true")
# Run duration options
parser.add_option("-m", "--abs-max-tick", type="int", default=m5.MaxTick,
metavar="TICKS", help="Run to absolute simulated tick " \
"specified including ticks from a restored checkpoint")
parser.add_option("--rel-max-tick", type="int", default=None,
metavar="TICKS", help="Simulate for specified number of" \
" ticks relative to the simulation start tick (e.g. if " \
"restoring a checkpoint)")
parser.add_option("--maxtime", type="float", default=None,
help="Run to the specified absolute simulated time in " \
"seconds")
parser.add_option("-I", "--maxinsts", action="store", type="int",
default=None, help="""Total number of instructions to
simulate (default: run forever)""")
parser.add_option("--work-item-id", action="store", type="int",
help="the specific work id for exit & checkpointing")
parser.add_option("--work-begin-cpu-id-exit", action="store", type="int",
help="exit when work starts on the specified cpu")
parser.add_option("--work-end-exit-count", action="store", type="int",
help="exit at specified work end count")
parser.add_option("--work-begin-exit-count", action="store", type="int",
help="exit at specified work begin count")
parser.add_option("--init-param", action="store", type="int", default=0,
help="""Parameter available in simulation with m5
initparam""")
# Simpoint options
parser.add_option("--simpoint-profile", action="store_true",
help="Enable basic block profiling for SimPoints")
parser.add_option("--simpoint-interval", type="int", default=10000000,
help="SimPoint interval in num of instructions")
# Checkpointing options
###Note that performing checkpointing via python script files will override
###checkpoint instructions built into binaries.
parser.add_option("--take-checkpoints", action="store", type="string",
help="<M,N> take checkpoints at tick M and every N ticks thereafter")
parser.add_option("--max-checkpoints", action="store", type="int",
help="the maximum number of checkpoints to drop", default=5)
parser.add_option("--checkpoint-dir", action="store", type="string",
help="Place all checkpoints in this absolute directory")
parser.add_option("-r", "--checkpoint-restore", action="store", type="int",
help="restore from checkpoint <N>")
parser.add_option("--checkpoint-at-end", action="store_true",
help="take a checkpoint at end of run")
parser.add_option("--work-begin-checkpoint-count", action="store", type="int",
help="checkpoint at specified work begin count")
parser.add_option("--work-end-checkpoint-count", action="store", type="int",
help="checkpoint at specified work end count")
parser.add_option("--work-cpus-checkpoint-count", action="store", type="int",
help="checkpoint and exit when active cpu count is reached")
parser.add_option("--restore-with-cpu", action="store", type="choice",
default="atomic", choices=CpuConfig.cpu_names(),
help = "cpu type for restoring from a checkpoint")
# CPU Switching - default switch model goes from a checkpoint
# to a timing simple CPU with caches to warm up, then to detailed CPU for
# data measurement
parser.add_option("--repeat-switch", action="store", type="int",
default=None,
help="switch back and forth between CPUs with period <N>")
parser.add_option("-s", "--standard-switch", action="store", type="int",
default=None,
help="switch from timing to Detailed CPU after warmup period of <N>")
parser.add_option("-p", "--prog-interval", type="str",
help="CPU Progress Interval")
# Fastforwarding and simpoint related materials
parser.add_option("-W", "--warmup-insts", action="store", type="int",
default=None,
help="Warmup period in total instructions (requires --standard-switch)")
parser.add_option("--bench", action="store", type="string", default=None,
help="base names for --take-checkpoint and --checkpoint-restore")
parser.add_option("-F", "--fast-forward", action="store", type="string",
default=None,
help="Number of instructions to fast forward before switching")
parser.add_option("-S", "--simpoint", action="store_true", default=False,
help="""Use workload simpoints as an instruction offset for
--checkpoint-restore or --take-checkpoint.""")
parser.add_option("--at-instruction", action="store_true", default=False,
help="""Treat value of --checkpoint-restore or --take-checkpoint as a
number of instructions.""")
def addSEOptions(parser):
# Benchmark options
parser.add_option("-c", "--cmd", default="",
help="The binary to run in syscall emulation mode.")
parser.add_option("-o", "--options", default="",
help="""The options to pass to the binary, use " "
around the entire string""")
parser.add_option("-i", "--input", default="",
help="Read stdin from a file.")
parser.add_option("--output", default="",
help="Redirect stdout to a file.")
parser.add_option("--errout", default="",
help="Redirect stderr to a file.")
def addFSOptions(parser):
# Simulation options
parser.add_option("--timesync", action="store_true",
help="Prevent simulated time from getting ahead of real time")
# System options
parser.add_option("--kernel", action="store", type="string")
parser.add_option("--script", action="store", type="string")
parser.add_option("--frame-capture", action="store_true",
help="Stores changed frame buffers from the VNC server to compressed "\
"files in the gem5 output directory")
if buildEnv['TARGET_ISA'] == "arm":
parser.add_option("--bare-metal", action="store_true",
help="Provide the raw system without the linux specific bits")
parser.add_option("--machine-type", action="store", type="choice",
choices=ArmMachineType.map.keys(), default="RealView_PBX")
parser.add_option("--dtb-filename", action="store", type="string",
help="Specifies device tree blob file to use with device-tree-"\
"enabled kernels")
parser.add_option("--enable-context-switch-stats-dump", \
action="store_true", help="Enable stats dump at context "\
"switches and dump tasks file (required for Streamline)")
# Benchmark options
parser.add_option("--dual", action="store_true",
help="Simulate two systems attached with an ethernet link")
parser.add_option("-b", "--benchmark", action="store", type="string",
dest="benchmark",
help="Specify the benchmark to run. Available benchmarks: %s"\
% DefinedBenchmarks)
# Metafile options
parser.add_option("--etherdump", action="store", type="string", dest="etherdump",
help="Specify the filename to dump a pcap capture of the" \
"ethernet traffic")
# Disk Image Options
parser.add_option("--disk-image", action="store", type="string", default=None,
help="Path to the disk image to use.")
|
|
# Copyright 2015 Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from manila.common import constants
from manila import context
from manila import db
from manila.message import message_levels
def _create_db_row(method, default_values, custom_values):
override_defaults = custom_values.pop('override_defaults', None)
if override_defaults:
default_values = custom_values
else:
default_values.update(copy.deepcopy(custom_values))
return method(context.get_admin_context(), default_values)
def create_share_group(**kwargs):
"""Create a share group object."""
share_group = {
'share_network_id': None,
'share_server_id': None,
'user_id': 'fake',
'project_id': 'fake',
'status': constants.STATUS_CREATING,
'host': 'fake_host'
}
return _create_db_row(db.share_group_create, share_group, kwargs)
def create_share_group_snapshot(share_group_id, **kwargs):
"""Create a share group snapshot object."""
snapshot = {
'share_group_id': share_group_id,
'user_id': 'fake',
'project_id': 'fake',
'status': constants.STATUS_CREATING,
}
return _create_db_row(db.share_group_snapshot_create, snapshot, kwargs)
def create_share_group_snapshot_member(share_group_snapshot_id, **kwargs):
"""Create a share group snapshot member object."""
member = {
'share_proto': "NFS",
'size': 0,
'share_instance_id': None,
'user_id': 'fake',
'project_id': 'fake',
'status': 'creating',
'share_group_snapshot_id': share_group_snapshot_id,
}
return _create_db_row(
db.share_group_snapshot_member_create, member, kwargs)
def create_share_access(**kwargs):
share_access = {
'id': 'fake_id',
'access_type': 'ip',
'access_to': 'fake_ip_address'
}
return _create_db_row(db.share_access_create, share_access, kwargs)
def create_share(**kwargs):
"""Create a share object."""
share = {
'share_proto': "NFS",
'size': 0,
'snapshot_id': None,
'share_network_id': None,
'share_server_id': None,
'user_id': 'fake',
'project_id': 'fake',
'metadata': {},
'availability_zone': 'fake_availability_zone',
'status': constants.STATUS_CREATING,
'host': 'fake_host',
'is_soft_deleted': False
}
return _create_db_row(db.share_create, share, kwargs)
def create_share_without_instance(**kwargs):
share = {
'share_proto': "NFS",
'size': 0,
'snapshot_id': None,
'share_network_id': None,
'share_server_id': None,
'user_id': 'fake',
'project_id': 'fake',
'metadata': {},
'availability_zone': 'fake_availability_zone',
'status': constants.STATUS_CREATING,
'host': 'fake_host',
'is_soft_deleted': False
}
share.update(copy.deepcopy(kwargs))
return db.share_create(context.get_admin_context(), share, False)
def create_share_instance(**kwargs):
"""Create a share instance object."""
return db.share_instance_create(context.get_admin_context(),
kwargs.pop('share_id'), kwargs)
def create_share_replica(**kwargs):
"""Create a share replica object."""
if 'share_id' not in kwargs:
share = create_share()
kwargs['share_id'] = share['id']
return db.share_instance_create(context.get_admin_context(),
kwargs.pop('share_id'), kwargs)
def create_snapshot(**kwargs):
"""Create a snapshot object."""
with_share = kwargs.pop('with_share', False)
share = None
if with_share:
share = create_share(status=constants.STATUS_AVAILABLE,
size=kwargs.get('size', 0))
snapshot = {
'share_proto': "NFS",
'size': 0,
'share_id': share['id'] if with_share else None,
'user_id': 'fake',
'project_id': 'fake',
'status': 'creating',
'provider_location': 'fake',
}
snapshot.update(kwargs)
return db.share_snapshot_create(context.get_admin_context(), snapshot)
def create_snapshot_instance(snapshot_id, **kwargs):
"""Create a share snapshot instance object."""
snapshot_instance = {
'provider_location': 'fake_provider_location',
'progress': '0%',
'status': constants.STATUS_CREATING,
}
snapshot_instance.update(kwargs)
return db.share_snapshot_instance_create(
context.get_admin_context(), snapshot_id, snapshot_instance)
def create_snapshot_instance_export_locations(snapshot_id, **kwargs):
"""Create a snapshot instance export location object."""
export_location = {
'share_snapshot_instance_id': snapshot_id,
}
export_location.update(kwargs)
return db.share_snapshot_instance_export_location_create(
context.get_admin_context(), export_location)
def create_access(**kwargs):
"""Create an access rule object."""
state = kwargs.pop('state', constants.ACCESS_STATE_QUEUED_TO_APPLY)
access = {
'access_type': 'fake_type',
'access_to': 'fake_IP',
'share_id': kwargs.pop('share_id', None) or create_share()['id'],
}
access.update(kwargs)
share_access_rule = _create_db_row(db.share_access_create, access, kwargs)
for mapping in share_access_rule.instance_mappings:
db.share_instance_access_update(
context.get_admin_context(), share_access_rule['id'],
mapping.share_instance_id, {'state': state})
return share_access_rule
def create_snapshot_access(**kwargs):
"""Create a snapshot access rule object."""
access = {
'access_type': 'fake_type',
'access_to': 'fake_IP',
'share_snapshot_id': None,
}
return _create_db_row(db.share_snapshot_access_create, access, kwargs)
def create_share_server(**kwargs):
"""Create a share server object."""
backend_details = kwargs.pop('backend_details', {})
srv = {
'host': 'host1',
'status': constants.STATUS_ACTIVE
}
share_srv = _create_db_row(db.share_server_create, srv, kwargs)
if backend_details:
db.share_server_backend_details_set(
context.get_admin_context(), share_srv['id'], backend_details)
return db.share_server_get(context.get_admin_context(),
share_srv['id'])
def create_share_type(**kwargs):
"""Create a share type object"""
share_type = {
'name': 'fake_type',
'is_public': True,
}
return _create_db_row(db.share_type_create, share_type, kwargs)
def create_share_group_type(**kwargs):
"""Create a share group type object"""
share_group_type = {
'name': 'fake_group_type',
'is_public': True,
}
return _create_db_row(db.share_group_type_create, share_group_type,
kwargs)
def create_share_network(**kwargs):
"""Create a share network object."""
net = {
'user_id': 'fake',
'project_id': 'fake',
'status': 'active',
'name': 'whatever',
'description': 'fake description',
}
return _create_db_row(db.share_network_create, net, kwargs)
def create_share_network_subnet(**kwargs):
"""Create a share network subnet object."""
subnet = {
'id': 'fake_sns_id',
'neutron_net_id': 'fake-neutron-net',
'neutron_subnet_id': 'fake-neutron-subnet',
'network_type': 'vlan',
'segmentation_id': 1000,
'cidr': '10.0.0.0/24',
'ip_version': 4,
'availability_zone_id': 'fake_zone_id',
'share_network_id': 'fake_sn_id',
'gateway': None,
'mtu': None
}
return _create_db_row(db.share_network_subnet_create, subnet, kwargs)
def create_security_service(**kwargs):
share_network_id = kwargs.pop('share_network_id', None)
service = {
'type': "FAKE",
'project_id': 'fake-project-id',
}
service_ref = _create_db_row(db.security_service_create, service, kwargs)
if share_network_id:
db.share_network_add_security_service(context.get_admin_context(),
share_network_id,
service_ref['id'])
return service_ref
def create_message(**kwargs):
message_dict = {
'action': 'fake_Action',
'project_id': 'fake-project-id',
'message_level': message_levels.ERROR,
}
return _create_db_row(db.message_create, message_dict, kwargs)
|
|
import io
import json
import pytest
import aiohttpretty
from waterbutler.core import streams
from waterbutler.core import exceptions
from waterbutler.providers.figshare import metadata
from waterbutler.providers.figshare import provider
from waterbutler.providers.figshare.path import FigsharePath
from waterbutler.providers.figshare.settings import PRIVATE_IDENTIFIER, MAX_PAGE_SIZE
from tests.providers.figshare.fixtures import (crud_fixtures,
error_fixtures,
root_provider_fixtures)
@pytest.fixture
def auth():
return {
'name': 'cat',
'email': '[email protected]',
'callback_url': 'http://sup.com/api/v1/project/v8s9q/waterbutler/logs/',
'id': 'fakey',
}
@pytest.fixture
def credentials():
return {
'token': 'freddie',
}
@pytest.fixture
def project_settings():
return {
'container_type': 'project',
'container_id': '13423',
}
@pytest.fixture
def article_settings():
return {
'container_type': 'article',
'container_id': '4037952',
}
@pytest.fixture
def project_provider(auth, credentials, project_settings):
return provider.FigshareProvider(auth, credentials, project_settings)
@pytest.fixture
def article_provider(auth, credentials, article_settings):
return provider.FigshareProvider(auth, credentials, article_settings)
@pytest.fixture
def file_content():
return b'sleepy'
@pytest.fixture
def file_like(file_content):
return io.BytesIO(file_content)
@pytest.fixture
def file_stream(file_like):
return streams.FileStreamReader(file_like)
class TestPolymorphism:
# These should not be passing but are
async def test_project_provider(self, project_settings, project_provider):
assert isinstance(project_provider, provider.FigshareProjectProvider)
assert project_provider.project_id == project_settings['container_id']
async def test_article_provider(self, article_settings, article_provider):
assert isinstance(article_provider, provider.FigshareArticleProvider)
assert article_provider.article_id == article_settings['container_id']
class TestProjectV1ValidatePath:
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_validate_v1_path_folder_article(self, project_provider, root_provider_fixtures):
item = root_provider_fixtures['folder_article_metadata']
file_id = str(item['id'])
path = '/{}/'.format(file_id)
article_list_url = project_provider.build_url(False, *project_provider.root_path_parts,
'articles')
article_segments = (*project_provider.root_path_parts, 'articles', str(item['id']))
article_url = project_provider.build_url(False, *article_segments)
aiohttpretty.register_json_uri('GET', article_list_url,
body=root_provider_fixtures['list_project_articles'],
params={'page': '1', 'page_size': str(MAX_PAGE_SIZE)})
aiohttpretty.register_json_uri('GET', article_list_url, body=[],
params={'page': '2', 'page_size': str(MAX_PAGE_SIZE)})
aiohttpretty.register_json_uri('GET', article_url, body=item)
result = await project_provider.validate_v1_path(path)
expected = FigsharePath('/{}/'.format(item['title']),
_ids=(project_provider.container_id, file_id),
folder=True,
is_public=False)
assert result == expected
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_validate_v1_path_folder_article_bad_path(self, project_provider,
root_provider_fixtures):
item = root_provider_fixtures['folder_article_metadata']
file_id = str(item['id'])
path = '/{}'.format(file_id)
article_list_url = project_provider.build_url(False, *project_provider.root_path_parts,
'articles')
article_segments = (*project_provider.root_path_parts, 'articles', str(item['id']))
article_url = project_provider.build_url(False, *article_segments)
aiohttpretty.register_json_uri('GET', article_list_url,
body=root_provider_fixtures['list_project_articles'],
params={'page': '1', 'page_size': str(MAX_PAGE_SIZE)})
aiohttpretty.register_json_uri('GET', article_list_url, body=[],
params={'page': '2', 'page_size': str(MAX_PAGE_SIZE)})
aiohttpretty.register_json_uri('GET', article_url, body=item)
with pytest.raises(exceptions.NotFoundError) as e:
await project_provider.validate_v1_path(path)
assert e.value.code == 404
assert aiohttpretty.has_call(method='GET', uri=article_url)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_validate_v1_path_folder_article_bad_type(self, project_provider,
root_provider_fixtures):
item = root_provider_fixtures['folder_article_metadata']
file_id = str(item['id'])
path = '/{}/'.format(file_id)
item['defined_type'] = 5
article_list_url = project_provider.build_url(False, *project_provider.root_path_parts,
'articles')
article_segments = (*project_provider.root_path_parts, 'articles', str(item['id']))
article_url = project_provider.build_url(False, *article_segments)
aiohttpretty.register_json_uri('GET', article_list_url,
body=root_provider_fixtures['list_project_articles'],
params={'page': '1', 'page_size': str(MAX_PAGE_SIZE)})
aiohttpretty.register_json_uri('GET', article_list_url, body=[],
params={'page': '2', 'page_size': str(MAX_PAGE_SIZE)})
aiohttpretty.register_json_uri('GET', article_url, body=item)
with pytest.raises(exceptions.NotFoundError) as e:
await project_provider.validate_v1_path(path)
assert e.value.code == 404
assert aiohttpretty.has_call(method='GET', uri=article_url)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_project_validate_v1_path_root(self, project_provider):
path = '/'
result = await project_provider.validate_v1_path(path)
expected = FigsharePath(path, _ids=('', ), folder=True, is_public=False)
assert result == expected
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_article_validate_v1_path_invalid_path(self, article_provider):
with pytest.raises(exceptions.InvalidPathError) as e:
await article_provider.validate_v1_path('/this/is/an/invalid/path')
assert e.value.code == 400
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_validate_v1_path_invalid_path(self, project_provider):
path = 'whatever'
with pytest.raises(exceptions.InvalidPathError) as e:
await project_provider.validate_v1_path(path)
assert e.value.code == 400
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_validate_v1_path_file_article(self, project_provider, root_provider_fixtures):
file_item = root_provider_fixtures['file_metadata']
item = root_provider_fixtures['file_article_metadata']
file_id = str(item['files'][0]['id'])
article_id = str(item['id'])
path = '/{}/{}'.format(article_id, file_id)
article_list_url = project_provider.build_url(False, *project_provider.root_path_parts,
'articles')
article_segments = (*project_provider.root_path_parts, 'articles', str(item['id']))
article_url = project_provider.build_url(False, *article_segments, 'files', file_id)
aiohttpretty.register_json_uri('GET', article_list_url,
body=root_provider_fixtures['list_project_articles'],
params={'page': '1', 'page_size': str(MAX_PAGE_SIZE)})
aiohttpretty.register_json_uri('GET', article_list_url, body=[],
params={'page': '2', 'page_size': str(MAX_PAGE_SIZE)})
aiohttpretty.register_json_uri('GET', article_url, body=file_item)
result = await project_provider.validate_v1_path(path)
expected = FigsharePath('/{}/{}'.format(item['title'], file_item['name']),
_ids=(project_provider.container_id, file_id),
folder=False, is_public=False)
assert result == expected
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_validate_v1_path_file_article_public(self, project_provider,
root_provider_fixtures):
file_item = root_provider_fixtures['file_metadata_public']
item = root_provider_fixtures['file_article_metadata']
file_id = str(file_item['id'])
article_id = str(item['id'])
path = '/{}/{}'.format(article_id, file_id)
article_list_url = project_provider.build_url(False, *project_provider.root_path_parts,
'articles')
article_segments = (*project_provider.root_path_parts, 'articles', str(item['id']))
article_url = project_provider.build_url(True, *article_segments, 'files', file_id)
aiohttpretty.register_json_uri('GET', article_list_url,
body=root_provider_fixtures['public_list_project_articles'],
params={'page': '1', 'page_size': str(MAX_PAGE_SIZE)})
aiohttpretty.register_json_uri('GET', article_list_url, body=[],
params={'page': '2', 'page_size': str(MAX_PAGE_SIZE)})
aiohttpretty.register_json_uri('GET', article_url, body=file_item)
result = await project_provider.validate_v1_path(path)
expected = FigsharePath('/{}/{}'.format(item['title'], file_item['name']),
_ids=(project_provider.container_id, file_id),
folder=False, is_public=False)
assert result == expected
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_validate_v1_path_file_article_bad_path(self, project_provider,
root_provider_fixtures):
file_item = root_provider_fixtures['file_metadata']
item = root_provider_fixtures['file_article_metadata']
file_id = str(item['files'][0]['id'])
article_id = str(item['id'])
path = '/{}/{}/'.format(article_id, file_id)
article_list_url = project_provider.build_url(False,
*project_provider.root_path_parts, 'articles')
article_segments = (*project_provider.root_path_parts, 'articles', str(item['id']))
article_url = project_provider.build_url(False, *article_segments, 'files', file_id)
aiohttpretty.register_json_uri('GET', article_list_url,
body=root_provider_fixtures['list_project_articles'],
params={'page': '1', 'page_size': str(MAX_PAGE_SIZE)})
aiohttpretty.register_json_uri('GET', article_list_url, body=[],
params={'page': '2', 'page_size': str(MAX_PAGE_SIZE)})
aiohttpretty.register_json_uri('GET', article_url, body=file_item)
with pytest.raises(exceptions.NotFoundError) as e:
await project_provider.validate_v1_path(path)
assert e.value.code == 404
class TestArticleV1ValidatePath:
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_article_validate_v1_path_root(self, article_provider):
path = '/'
result = await article_provider.validate_v1_path(path)
expected = FigsharePath(path, _ids=('', ), folder=True, is_public=False)
assert result == expected
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_article_validate_v1_path(self, article_provider, root_provider_fixtures):
item = root_provider_fixtures['file_metadata']
file_id = item['id']
path = '/' + str(file_id)
url = article_provider.build_url(False, *article_provider.root_path_parts, 'files',
str(file_id))
aiohttpretty.register_json_uri('GET', url, body=item)
result = await article_provider.validate_v1_path(path)
expected = FigsharePath('/' + item['name'], _ids=('', file_id), folder=False,
is_public=False)
assert result == expected
class TestProjectMetadata:
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_project_contents(self, project_provider, root_provider_fixtures):
project_articles = root_provider_fixtures['list_project_articles']
root_parts = project_provider.root_path_parts
list_articles_url = project_provider.build_url(False, *root_parts, 'articles')
file_metadata_url = project_provider.build_url(False, *root_parts, 'articles',
str(project_articles[0]['id']))
folder_metadata_url = project_provider.build_url(False, *root_parts, 'articles',
str(project_articles[1]['id']))
aiohttpretty.register_json_uri('GET', list_articles_url, body=project_articles,
params={'page': '1', 'page_size': str(MAX_PAGE_SIZE)})
aiohttpretty.register_json_uri('GET', list_articles_url, body=[],
params={'page': '2', 'page_size': str(MAX_PAGE_SIZE)})
aiohttpretty.register_json_uri('GET', file_metadata_url,
body=root_provider_fixtures['file_article_metadata'])
aiohttpretty.register_json_uri('GET', folder_metadata_url,
body=root_provider_fixtures['folder_article_metadata'])
path = FigsharePath('/', _ids=(''), folder=True)
result = await project_provider.metadata(path)
assert aiohttpretty.has_call(method='GET', uri=list_articles_url,
params={'page': '1', 'page_size': str(MAX_PAGE_SIZE)})
assert aiohttpretty.has_call(method='GET', uri=file_metadata_url)
assert aiohttpretty.has_call(method='GET', uri=folder_metadata_url)
assert result == [
metadata.FigshareFileMetadata(root_provider_fixtures['file_article_metadata'],
root_provider_fixtures['file_article_metadata']['files'][0]),
metadata.FigshareFolderMetadata(root_provider_fixtures['folder_article_metadata'])
]
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_project_metadata_invalid_path(self, project_provider):
path = FigsharePath('/testfolder/test/test/text.txt',
_ids=('1', '2', '3', '4', '5'), folder=True, is_public=False)
with pytest.raises(exceptions.NotFoundError) as e:
await project_provider.metadata(path)
assert e.value.code == 404
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_project_metadata_bad_response(self, project_provider):
path = FigsharePath('/testfolder/test/', _ids=('1', '2', '3'), folder=True, is_public=False)
url = project_provider.build_url(path.is_public, *project_provider.root_path_parts,
'articles', path.parts[1].identifier)
aiohttpretty.register_json_uri('GET', url, status=404)
with pytest.raises(exceptions.NotFoundError) as e:
await project_provider.metadata(path)
assert e.value.code == 404
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_project_file_article_contents(self, project_provider, root_provider_fixtures):
article_metadata = root_provider_fixtures['file_article_metadata']
file_metadata = root_provider_fixtures['file_metadata']
root_parts = project_provider.root_path_parts
article_id = str(article_metadata['id'])
article_name = article_metadata['title']
file_id = str(file_metadata['id'])
file_name = file_metadata['name']
file_article_metadata_url = project_provider.build_url(False, *root_parts, 'articles',
article_id)
aiohttpretty.register_json_uri('GET', file_article_metadata_url, body=article_metadata)
path = FigsharePath('/{}/{}'.format(article_name, file_name),
_ids=('', article_id, file_id), folder=False, is_public=False)
result = await project_provider.metadata(path)
assert aiohttpretty.has_call(method='GET', uri=file_article_metadata_url)
expected = metadata.FigshareFileMetadata(article_metadata, file_metadata)
assert result == expected
assert str(result.id) == file_id
assert result.name == file_name
assert result.path == '/{}/{}'.format(article_id, file_id)
assert result.materialized_path == '/{}/{}'.format(article_name, file_name)
assert str(result.article_id) == article_id
assert result.article_name == article_name
assert result.size == file_metadata['size']
assert result.is_public == (PRIVATE_IDENTIFIER not in article_metadata['url'])
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_project_folder_article_contents_error(self, project_provider,
root_provider_fixtures):
item = root_provider_fixtures['folder_article_metadata']
item['files'] = []
root_parts = project_provider.root_path_parts
article_id = str(item['id'])
article_name = item['title']
file_id = str(root_provider_fixtures['folder_file_metadata']['id'])
file_name = root_provider_fixtures['folder_file_metadata']['name']
folder_article_metadata_url = project_provider.build_url(False, *root_parts, 'articles',
article_id)
aiohttpretty.register_json_uri('GET', folder_article_metadata_url, body=item)
path = FigsharePath('/{}/{}'.format(article_name, file_name),
_ids=('', article_id, file_id), folder=False, is_public=False)
with pytest.raises(exceptions.NotFoundError) as e:
await project_provider.metadata(path)
assert aiohttpretty.has_call(method='GET', uri=folder_article_metadata_url)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_project_folder_article_type_error(self, project_provider,
root_provider_fixtures):
item = root_provider_fixtures['folder_article_metadata']
item['defined_type'] = 5
root_parts = project_provider.root_path_parts
article_id = str(item['id'])
article_name = item['title']
folder_article_metadata_url = project_provider.build_url(False, *root_parts, 'articles',
article_id)
aiohttpretty.register_json_uri('GET', folder_article_metadata_url, body=item)
path = FigsharePath('/{}'.format(article_name), _ids=('', article_id), folder=True,
is_public=False)
with pytest.raises(exceptions.NotFoundError) as e:
await project_provider.metadata(path)
assert aiohttpretty.has_call(method='GET', uri=folder_article_metadata_url)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_project_folder_article_contents_file(self, project_provider,
root_provider_fixtures):
item = root_provider_fixtures['folder_article_metadata']
root_parts = project_provider.root_path_parts
article_id = str(item['id'])
article_name = item['title']
folder_article_metadata_url = project_provider.build_url(False, *root_parts, 'articles',
article_id)
aiohttpretty.register_json_uri('GET', folder_article_metadata_url, body=item)
path = FigsharePath('/{}'.format(article_name), _ids=('', article_id), folder=True,
is_public=False)
result = await project_provider.metadata(path)
expected = [metadata.FigshareFileMetadata(item, raw_file=item['files'][0])]
assert result == expected
assert aiohttpretty.has_call(method='GET', uri=folder_article_metadata_url)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_project_folder_article_contents(self, project_provider, root_provider_fixtures):
article_metadata = root_provider_fixtures['folder_article_metadata']
file_metadata = root_provider_fixtures['folder_file_metadata']
root_parts = project_provider.root_path_parts
article_id = str(article_metadata['id'])
article_name = article_metadata['title']
file_id = str(file_metadata['id'])
file_name = file_metadata['name']
folder_article_metadata_url = project_provider.build_url(False, *root_parts, 'articles',
article_id)
aiohttpretty.register_json_uri('GET', folder_article_metadata_url, body=article_metadata)
path = FigsharePath('/{}/{}'.format(article_name, file_name),
_ids=('', article_id, file_id), folder=False, is_public=False)
result = await project_provider.metadata(path)
assert aiohttpretty.has_call(method='GET', uri=folder_article_metadata_url)
expected = metadata.FigshareFileMetadata(article_metadata, file_metadata)
assert result == expected
assert str(result.id) == file_id
assert result.name == file_name
assert result.path == '/{}/{}'.format(article_id, file_id)
assert result.materialized_path == '/{}/{}'.format(article_name, file_name)
assert str(result.article_id) == article_id
assert result.article_name == article_name
assert result.size == file_metadata['size']
assert result.is_public == (PRIVATE_IDENTIFIER not in article_metadata['url'])
assert result.extra['hashes']['md5'] == '03dee7cf60f17a8453ccd2f51cbbbd86'
class TestArticleMetadata:
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_article_file_contents(self, article_provider, root_provider_fixtures):
article_metadata = root_provider_fixtures['folder_article_metadata']
file_metadata = root_provider_fixtures['folder_file_metadata']
root_parts = article_provider.root_path_parts
article_id = str(article_metadata['id'])
article_name = article_metadata['title']
file_id = str(file_metadata['id'])
file_name = file_metadata['name']
folder_article_metadata_url = article_provider.build_url(False, *root_parts)
file_metadata_url = article_provider.build_url(False, *root_parts, 'files', file_id)
aiohttpretty.register_json_uri('GET', folder_article_metadata_url, body=article_metadata)
path = FigsharePath('/{}'.format(file_name), _ids=('', file_id), folder=False,
is_public=False)
result = await article_provider.metadata(path)
assert aiohttpretty.has_call(method='GET', uri=folder_article_metadata_url)
expected = metadata.FigshareFileMetadata(article_metadata, file_metadata)
assert result == expected
assert str(result.id) == file_id
assert result.name == file_name
assert result.path == '/{}/{}'.format(article_id, file_id)
assert result.materialized_path == '/{}/{}'.format(article_name, file_name)
assert result.article_name == article_name
assert result.size == file_metadata['size']
assert result.is_public == (PRIVATE_IDENTIFIER not in article_metadata['url'])
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_article_root_contents(self, article_provider, root_provider_fixtures):
article_metadata = root_provider_fixtures['folder_article_metadata']
file_metadata = root_provider_fixtures['folder_file_metadata']
root_parts = article_provider.root_path_parts
article_id = str(article_metadata['id'])
file_id = str(file_metadata['id'])
folder_article_metadata_url = article_provider.build_url(False, *root_parts)
file_metadata_url = article_provider.build_url(False, *root_parts, 'files', file_id)
aiohttpretty.register_json_uri('GET', folder_article_metadata_url, body=article_metadata)
aiohttpretty.register_json_uri('GET', file_metadata_url, body=file_metadata)
path = FigsharePath('/', _ids=(file_id, ), folder=True, is_public=False)
result = await article_provider.metadata(path)
expected = [metadata.FigshareFileMetadata(article_metadata, file_metadata)]
assert result == expected
class TestProjectCRUD:
"""Due to a bug in aiohttpretty, the file stream is not being read from on file upload for the
Figshare provider. Because the file stream isn't read, the stream hash calculator never gets
any data, and the computed md5sum is always that of the empty string. To work around this, the
fixtures currently include the empty md5 in the metadata. Once aiohttpretty is fixed, the
metadata can be reverted to deliver the actual content hash."""
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_project_upload(self, file_stream, project_provider,
root_provider_fixtures, crud_fixtures):
file_name = 'barricade.gif'
root_parts = project_provider.root_path_parts
list_articles_url = project_provider.build_url(False, *root_parts, 'articles')
validate_article_url = project_provider.build_url(False, *root_parts, 'articles', file_name)
aiohttpretty.register_json_uri('GET', list_articles_url,
body=root_provider_fixtures['list_project_articles'],
params={'page': '1', 'page_size': str(MAX_PAGE_SIZE)})
aiohttpretty.register_json_uri('GET', list_articles_url, body=[],
params={'page': '2', 'page_size': str(MAX_PAGE_SIZE)})
aiohttpretty.register_uri('GET', validate_article_url, status=404)
path = await project_provider.validate_path('/' + file_name)
path = FigsharePath('/' + file_name, _ids=('', ''), folder=False, is_public=False)
article_id = str(crud_fixtures['upload_article_metadata']['id'])
file_metadata = root_provider_fixtures['get_file_metadata']
create_article_url = project_provider.build_url(False, *root_parts, 'articles')
create_file_url = project_provider.build_url(False, 'articles', article_id, 'files')
file_url = project_provider.build_url(False, 'articles', article_id, 'files',
str(file_metadata['id']))
get_article_url = project_provider.build_url(False, *root_parts, 'articles', article_id)
upload_url = file_metadata['upload_url']
aiohttpretty.register_json_uri('POST', create_article_url,
body=crud_fixtures['create_article_metadata'], status=201)
aiohttpretty.register_json_uri('POST', create_file_url,
body=crud_fixtures['create_file_metadata'], status=201)
aiohttpretty.register_json_uri('GET', file_url,
body=file_metadata)
aiohttpretty.register_json_uri('GET', upload_url,
body=root_provider_fixtures['get_upload_metadata'])
aiohttpretty.register_uri('PUT', '{}/1'.format(upload_url), status=200)
aiohttpretty.register_uri('POST', file_url, status=202)
aiohttpretty.register_json_uri('GET', get_article_url,
body=crud_fixtures['upload_article_metadata'])
# md5 hash calculation is being hacked around. see test class docstring
result, created = await project_provider.upload(file_stream, path)
expected = metadata.FigshareFileMetadata(
crud_fixtures['upload_article_metadata'],
crud_fixtures['upload_article_metadata']['files'][0],
)
assert aiohttpretty.has_call(
method='POST',
uri=create_article_url,
data=json.dumps({
'title': 'barricade.gif',
})
)
assert aiohttpretty.has_call(method='PUT', uri='{}/1'.format(upload_url))
assert aiohttpretty.has_call(method='POST', uri=create_file_url)
assert result == expected
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_project_upload_checksum_mismatch(self, project_provider,
root_provider_fixtures,
crud_fixtures, file_stream):
file_name = 'barricade.gif'
item = root_provider_fixtures['get_file_metadata']
root_parts = project_provider.root_path_parts
path = FigsharePath('/' + file_name, _ids=('', ''), folder=False, is_public=False)
article_id = str(crud_fixtures['checksum_mismatch_article_metadata']['id'])
create_article_url = project_provider.build_url(False, *root_parts, 'articles')
create_file_url = project_provider.build_url(False, 'articles', article_id, 'files')
file_url = project_provider.build_url(False, 'articles', article_id, 'files',
str(item['id']))
get_article_url = project_provider.build_url(False, *root_parts, 'articles', article_id)
upload_url = item['upload_url']
aiohttpretty.register_json_uri('POST', create_article_url,
body=crud_fixtures['create_article_metadata'], status=201)
aiohttpretty.register_json_uri('POST', create_file_url,
body=crud_fixtures['create_file_metadata'], status=201)
aiohttpretty.register_json_uri('GET', file_url, body=item)
aiohttpretty.register_json_uri('GET', upload_url,
body=root_provider_fixtures['get_upload_metadata'])
aiohttpretty.register_uri('PUT', '{}/1'.format(upload_url), status=200)
aiohttpretty.register_uri('POST', file_url, status=202)
aiohttpretty.register_json_uri('GET', get_article_url,
body=crud_fixtures['checksum_mismatch_article_metadata'])
with pytest.raises(exceptions.UploadChecksumMismatchError) as exc:
await project_provider.upload(file_stream, path)
assert aiohttpretty.has_call(
method='POST',
uri=create_article_url,
data=json.dumps({
'title': 'barricade.gif',
})
)
assert aiohttpretty.has_call(method='POST', uri=create_file_url)
assert aiohttpretty.has_call(method='GET', uri=file_url)
assert aiohttpretty.has_call(method='GET', uri=upload_url)
assert aiohttpretty.has_call(method='PUT', uri='{}/1'.format(upload_url))
assert aiohttpretty.has_call(method='POST', uri=file_url)
assert aiohttpretty.has_call(method='GET', uri=get_article_url)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_project_folder_upload(self, file_stream,
project_provider,
root_provider_fixtures,
crud_fixtures):
file_name = 'barricade.gif'
article_id = str(root_provider_fixtures['list_project_articles'][1]['id'])
article_name = root_provider_fixtures['list_project_articles'][1]['title']
root_parts = project_provider.root_path_parts
path = FigsharePath('/{}/{}'.format(article_name, file_name),
_ids=('', article_id, ''), folder=False, is_public=False)
file_metadata = root_provider_fixtures['get_file_metadata']
create_file_url = project_provider.build_url(False, 'articles', article_id, 'files')
file_url = project_provider.build_url(False, 'articles', article_id, 'files',
str(file_metadata['id']))
get_article_url = project_provider.build_url(False, *root_parts, 'articles', article_id)
upload_url = file_metadata['upload_url']
aiohttpretty.register_json_uri('POST', create_file_url,
body=crud_fixtures['create_file_metadata'], status=201)
aiohttpretty.register_json_uri('GET', file_url, body=file_metadata)
aiohttpretty.register_json_uri('GET', upload_url,
body=root_provider_fixtures['get_upload_metadata'])
aiohttpretty.register_uri('PUT', '{}/1'.format(upload_url), status=200)
aiohttpretty.register_uri('POST', file_url, status=202)
aiohttpretty.register_json_uri('GET', get_article_url,
body=crud_fixtures['upload_folder_article_metadata'])
# md5 hash calculation is being hacked around. see test class docstring
result, created = await project_provider.upload(file_stream, path)
expected = metadata.FigshareFileMetadata(
crud_fixtures['upload_folder_article_metadata'],
crud_fixtures['upload_folder_article_metadata']['files'][0],
)
assert aiohttpretty.has_call(method='PUT', uri='{}/1'.format(upload_url))
assert aiohttpretty.has_call(method='POST', uri=create_file_url)
assert result == expected
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_project_folder_upload_undefined_type(self, file_stream,
project_provider,
root_provider_fixtures,
crud_fixtures):
file_name = 'barricade.gif'
article_id = str(root_provider_fixtures['list_project_articles'][1]['id'])
article_name = root_provider_fixtures['list_project_articles'][1]['title']
changed_metadata = crud_fixtures['upload_folder_article_metadata']
changed_metadata['defined_type'] = 5
root_parts = project_provider.root_path_parts
list_articles_url = project_provider.build_url(False, *root_parts, 'articles')
aiohttpretty.register_json_uri('POST', list_articles_url, status=201,
body=crud_fixtures['create_upload_article_metadata'])
path = FigsharePath('/{}/{}'.format(article_name, file_name), _ids=('', article_id, ''),
folder=False, is_public=False)
file_metadata = root_provider_fixtures['get_file_metadata']
create_file_url = project_provider.build_url(False, 'articles', article_id, 'files')
file_url = project_provider.build_url(False, 'articles', article_id, 'files',
str(file_metadata['id']))
get_article_url = project_provider.build_url(False, *root_parts, 'articles', article_id)
upload_url = file_metadata['upload_url']
aiohttpretty.register_json_uri('POST', create_file_url,
body=crud_fixtures['create_file_metadata'], status=201)
aiohttpretty.register_json_uri('GET', file_url, body=file_metadata)
aiohttpretty.register_json_uri('GET', upload_url,
body=root_provider_fixtures['get_upload_metadata'])
aiohttpretty.register_uri('PUT', '{}/1'.format(upload_url), status=200)
aiohttpretty.register_uri('POST', file_url, status=202)
aiohttpretty.register_json_uri('GET', get_article_url, body=changed_metadata)
result, created = await project_provider.upload(file_stream, path)
expected = metadata.FigshareFileMetadata(
crud_fixtures['upload_folder_article_metadata'],
crud_fixtures['upload_folder_article_metadata']['files'][0],
)
assert aiohttpretty.has_call(method='PUT', uri='{}/1'.format(upload_url))
assert aiohttpretty.has_call(method='POST', uri=create_file_url)
assert result == expected
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_project_upload_update_error(self, file_stream, project_provider):
path = FigsharePath('/testfolder/whatever.txt',
_ids=('512415', '123325', '8890481'),
folder=False, is_public=False)
with pytest.raises(exceptions.UnsupportedOperationError) as e:
await project_provider.upload(file_stream, path)
assert e.value.code == 403
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_project_article_download(self, project_provider, root_provider_fixtures):
article_id = str(root_provider_fixtures['list_project_articles'][0]['id'])
file_id = str(root_provider_fixtures['file_article_metadata']['files'][0]['id'])
article_name = str(root_provider_fixtures['list_project_articles'][0]['title'])
file_name = str(root_provider_fixtures['file_article_metadata']['files'][0]['name'])
body = b'castle on a cloud'
root_parts = project_provider.root_path_parts
file_metadata_url = project_provider.build_url(False, *root_parts, 'articles', article_id,
'files', file_id)
article_metadata_url = project_provider.build_url(False, *root_parts, 'articles',
article_id)
download_url = root_provider_fixtures['file_metadata']['download_url']
aiohttpretty.register_json_uri('GET', file_metadata_url,
body=root_provider_fixtures['file_metadata'])
aiohttpretty.register_json_uri('GET', article_metadata_url,
body=root_provider_fixtures['file_article_metadata'])
aiohttpretty.register_uri('GET', download_url,
params={'token': project_provider.token},
body=body, auto_length=True)
path = FigsharePath('/{}/{}'.format(article_name, file_name),
_ids=('', article_id, file_id),
folder=False, is_public=False)
result = await project_provider.download(path)
content = await result.read()
assert content == body
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_project_file_delete(self, project_provider, root_provider_fixtures):
file_id = str(root_provider_fixtures['file_metadata']['id'])
file_name = root_provider_fixtures['file_metadata']['name']
article_id = str(root_provider_fixtures['list_project_articles'][0]['id'])
article_name = str(root_provider_fixtures['list_project_articles'][0]['title'])
root_parts = project_provider.root_path_parts
file_url = project_provider.build_url(False, *root_parts,
'articles', article_id, 'files', file_id)
file_article_url = project_provider.build_url(False, *root_parts, 'articles', article_id)
aiohttpretty.register_json_uri('GET', file_url,
body=root_provider_fixtures['file_metadata'])
aiohttpretty.register_json_uri('GET', file_article_url,
body=root_provider_fixtures['file_article_metadata'])
aiohttpretty.register_uri('DELETE', file_article_url, status=204)
path = FigsharePath('/{}/{}'.format(article_name, file_name),
_ids=('', article_id, file_id),
folder=False, is_public=False)
result = await project_provider.delete(path)
assert result is None
assert aiohttpretty.has_call(method='DELETE', uri=file_article_url)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_project_file_delete_folder_type(self, project_provider, root_provider_fixtures):
item = root_provider_fixtures['file_article_metadata']
item['defined_type'] = 4
file_id = str(root_provider_fixtures['file_metadata']['id'])
article_id = str(root_provider_fixtures['list_project_articles'][0]['id'])
root_parts = project_provider.root_path_parts
path = FigsharePath('/{}/{}'.format(article_id, file_id),
_ids=('', article_id, file_id), folder=False)
list_articles_url = project_provider.build_url(False, *root_parts, 'articles')
file_article_url = project_provider.build_url(False, 'articles', path.parts[1]._id,
'files', path.parts[2]._id)
get_file_article_url = project_provider.build_url(False,
*root_parts, 'articles', article_id)
aiohttpretty.register_json_uri('GET', list_articles_url,
body=root_provider_fixtures['list_project_articles'],
params={'page': '1', 'page_size': str(MAX_PAGE_SIZE)})
aiohttpretty.register_json_uri('GET', list_articles_url, body=[],
params={'page': '2', 'page_size': str(MAX_PAGE_SIZE)})
aiohttpretty.register_json_uri('GET', get_file_article_url, body=item)
aiohttpretty.register_uri('DELETE', file_article_url, status=204)
result = await project_provider.delete(path)
assert result is None
assert aiohttpretty.has_call(method='DELETE', uri=file_article_url)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_project_file_delete_bad_path(self, project_provider, root_provider_fixtures):
file_id = str(root_provider_fixtures['file_metadata']['id'])
article_id = str(root_provider_fixtures['list_project_articles'][0]['id'])
file_name = str(root_provider_fixtures['file_metadata']['name'])
article_name = str(root_provider_fixtures['list_project_articles'][0]['title'])
path = FigsharePath('/{}/{}'.format(article_name, file_name), _ids=('',), folder=False)
root_parts = project_provider.root_path_parts
with pytest.raises(exceptions.NotFoundError) as e:
await project_provider.delete(path)
assert e.value.code == 404
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_project_folder_delete(self, project_provider, root_provider_fixtures):
article_id = str(root_provider_fixtures['list_project_articles'][1]['id'])
root_parts = project_provider.root_path_parts
list_articles_url = project_provider.build_url(False, *root_parts, 'articles')
folder_article_url = project_provider.build_url(False, *root_parts, 'articles', article_id)
aiohttpretty.register_json_uri('GET', list_articles_url,
body=root_provider_fixtures['list_project_articles'],
params={'page': '1', 'page_size': str(MAX_PAGE_SIZE)})
aiohttpretty.register_json_uri('GET', list_articles_url, body=[],
params={'page': '2', 'page_size': str(MAX_PAGE_SIZE)})
aiohttpretty.register_json_uri('GET', folder_article_url,
body=root_provider_fixtures['folder_article_metadata'])
aiohttpretty.register_uri('DELETE', folder_article_url, status=204)
path = FigsharePath('/{}'.format(article_id), _ids=('', article_id), folder=True)
result = await project_provider.delete(path)
assert result is None
assert aiohttpretty.has_call(method='DELETE', uri=folder_article_url)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_project_delete_root_confirm_error(self, project_provider):
path = FigsharePath('/', _ids=('11241213', ), folder=True, is_public=False)
with pytest.raises(exceptions.DeleteError) as e:
await project_provider.delete(path)
assert e.value.code == 400
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_project_delete_root(self, project_provider, root_provider_fixtures):
path = FigsharePath('/', _ids=('11241213', ), folder=True, is_public=False)
item = root_provider_fixtures['list_project_articles']
list_articles_url = project_provider.build_url(False,
*project_provider.root_path_parts,
'articles')
delete_url_1 = project_provider.build_url(False, *project_provider.root_path_parts,
'articles', str(item[0]['id']))
delete_url_2 = project_provider.build_url(False, *project_provider.root_path_parts,
'articles', str(item[1]['id']))
aiohttpretty.register_json_uri('DELETE', delete_url_1, status=204)
aiohttpretty.register_json_uri('DELETE', delete_url_2, status=204)
aiohttpretty.register_json_uri('GET', list_articles_url,
body=root_provider_fixtures['list_project_articles'],
params={'page': '1', 'page_size': str(MAX_PAGE_SIZE)})
aiohttpretty.register_json_uri('GET', list_articles_url, body=[],
params={'page': '2', 'page_size': str(MAX_PAGE_SIZE)})
await project_provider.delete(path, 1)
assert aiohttpretty.has_call(method='DELETE', uri=delete_url_2)
assert aiohttpretty.has_call(method='DELETE', uri=delete_url_1)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_project_delete_errors(self, project_provider):
path = FigsharePath('/test.txt', _ids=('11241213', '123123'), folder=False, is_public=False)
with pytest.raises(exceptions.NotFoundError) as e:
await project_provider.delete(path)
assert e.value.code == 404
path = FigsharePath('/test/test.txt', _ids=('11241213', '123123', '123123'),
folder=True, is_public=False)
with pytest.raises(exceptions.NotFoundError) as e:
await project_provider.delete(path)
assert e.value.code == 404
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_create_folder(self, project_provider, root_provider_fixtures, crud_fixtures):
item = root_provider_fixtures['file_metadata']
file_id = str(item['id'])
path = FigsharePath('/folder2/', _ids=('', file_id), folder=True)
create_url = project_provider.build_url(False,
*project_provider.root_path_parts, 'articles')
metadata_url = crud_fixtures['create_article_metadata']['location']
aiohttpretty.register_json_uri('POST', create_url,
body=crud_fixtures['create_article_metadata'], status=201)
aiohttpretty.register_json_uri('GET', metadata_url,
body=root_provider_fixtures['folder_article_metadata'])
result = await project_provider.create_folder(path)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_create_folder_invalid_path(self, project_provider, root_provider_fixtures):
item = root_provider_fixtures['file_metadata']
file_id = str(item['id'])
path = FigsharePath('/folder2/folder3/folder4/folder5',
_ids=('', file_id, file_id, file_id), folder=True)
with pytest.raises(exceptions.CreateFolderError) as e:
await project_provider.create_folder(path)
assert e.value.code == 400
class TestArticleCRUD:
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_article_upload(self, file_stream, article_provider,
root_provider_fixtures, crud_fixtures):
file_name = 'barricade.gif'
file_id = str(root_provider_fixtures['get_file_metadata']['id'])
root_parts = article_provider.root_path_parts
path = FigsharePath('/' + file_name, _ids=('', ''), folder=False, is_public=False)
create_file_url = article_provider.build_url(False, *root_parts, 'files')
file_url = article_provider.build_url(False, *root_parts, 'files', file_id)
get_article_url = article_provider.build_url(False, *root_parts)
upload_url = root_provider_fixtures['get_file_metadata']['upload_url']
aiohttpretty.register_json_uri('POST', create_file_url,
body=crud_fixtures['create_file_metadata'], status=201)
aiohttpretty.register_json_uri('GET', file_url,
body=root_provider_fixtures['get_file_metadata'])
aiohttpretty.register_json_uri('GET',
root_provider_fixtures['get_file_metadata']['upload_url'],
body=root_provider_fixtures['get_upload_metadata'])
aiohttpretty.register_uri('PUT', '{}/1'.format(upload_url), status=200)
aiohttpretty.register_uri('POST', file_url, status=202)
aiohttpretty.register_json_uri('GET', get_article_url,
body=crud_fixtures['upload_folder_article_metadata'])
# md5 hash calculation is being hacked around. see test class docstring
result, created = await article_provider.upload(file_stream, path)
expected = metadata.FigshareFileMetadata(
crud_fixtures['upload_folder_article_metadata'],
crud_fixtures['upload_folder_article_metadata']['files'][0],
)
assert aiohttpretty.has_call(method='PUT', uri='{}/1'.format(upload_url))
assert aiohttpretty.has_call(method='POST', uri=create_file_url)
assert result == expected
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_article_download(self, article_provider, root_provider_fixtures):
item = root_provider_fixtures['file_metadata']
file_id = str(item['id'])
file_name = str(item['name'])
body = b'castle on a cloud'
root_parts = article_provider.root_path_parts
article_metadata_url = article_provider.build_url(False, *root_parts)
download_url = item['download_url']
aiohttpretty.register_json_uri('GET', article_metadata_url,
body=root_provider_fixtures['file_article_metadata'])
aiohttpretty.register_uri('GET', download_url, params={'token': article_provider.token},
body=body, auto_length=True)
path = FigsharePath('/{}'.format(file_name), _ids=('', file_id),
folder=False, is_public=False)
result = await article_provider.download(path)
content = await result.read()
assert content == body
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_article_download_range(self, article_provider, root_provider_fixtures):
item = root_provider_fixtures['file_metadata']
file_id = str(item['id'])
file_name = str(item['name'])
body = b'castle on a cloud'
root_parts = article_provider.root_path_parts
article_metadata_url = article_provider.build_url(False, *root_parts)
download_url = item['download_url']
aiohttpretty.register_json_uri('GET', article_metadata_url,
body=root_provider_fixtures['file_article_metadata'])
aiohttpretty.register_uri('GET', download_url, params={'token': article_provider.token},
body=body[0:2], auto_length=True, status=206)
path = FigsharePath('/{}'.format(file_name), _ids=('', file_id),
folder=False, is_public=False)
result = await article_provider.download(path, range=(0, 1))
assert result.partial
content = await result.read()
assert content == b'ca'
assert aiohttpretty.has_call(method='GET', uri=download_url,
headers={'Range': 'bytes=0-1',
'Authorization': 'token freddie'},
params={'token': 'freddie'})
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_article_download_path_not_file(self, article_provider, root_provider_fixtures):
item = root_provider_fixtures['file_metadata']
path = FigsharePath('/testfolder/', _ids=('', ), folder=True, is_public=False)
with pytest.raises(exceptions.NotFoundError) as e:
await article_provider.download(path)
assert e.value.code == 404
assert e.value.message == 'Could not retrieve file or directory /{}'.format(path.path)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_article_download_no_downloadurl(self, article_provider, error_fixtures):
item = error_fixtures['file_metadata_missing_download_url']
file_id = str(item['id'])
path = FigsharePath('/{}'.format(file_id), _ids=('', file_id), folder=False)
root_parts = article_provider.root_path_parts
file_metadata_url = article_provider.build_url(False, *root_parts, 'files', file_id)
article_metadata_url = article_provider.build_url(False, *root_parts)
missing_download_url = error_fixtures['file_article_metadata_missing_download_url']
aiohttpretty.register_json_uri('GET', file_metadata_url, body=item)
aiohttpretty.register_json_uri('GET', article_metadata_url, body=missing_download_url)
with pytest.raises(exceptions.DownloadError) as e:
await article_provider.download(path)
assert e.value.code == 403
assert e.value.message == 'Download not available'
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_article_upload_checksum_mismatch(self, file_stream, article_provider,
root_provider_fixtures, crud_fixtures):
file_name = 'barricade.gif'
item = root_provider_fixtures['get_file_metadata']
file_id = str(item['id'])
root_parts = article_provider.root_path_parts
validate_file_url = article_provider.build_url(False, *root_parts, 'files', file_name)
aiohttpretty.register_uri('GET', validate_file_url, status=404)
path = FigsharePath('/' + file_name, _ids=('', file_id), folder=False, is_public=False)
create_file_url = article_provider.build_url(False, *root_parts, 'files')
file_url = article_provider.build_url(False, *root_parts, 'files', file_id)
get_article_url = article_provider.build_url(False, *root_parts)
upload_url = item['upload_url']
aiohttpretty.register_json_uri('POST', create_file_url,
body=crud_fixtures['create_file_metadata'], status=201)
aiohttpretty.register_json_uri('GET', file_url, body=item)
aiohttpretty.register_json_uri('GET', item['upload_url'],
body=root_provider_fixtures['get_upload_metadata'])
aiohttpretty.register_uri('PUT', '{}/1'.format(upload_url), status=200)
aiohttpretty.register_uri('POST', file_url, status=202)
aiohttpretty.register_json_uri('GET', get_article_url,
body=crud_fixtures['checksum_mismatch_folder_article_metadata'])
with pytest.raises(exceptions.UploadChecksumMismatchError) as exc:
await article_provider.upload(file_stream, path)
assert aiohttpretty.has_call(method='POST', uri=create_file_url)
assert aiohttpretty.has_call(method='GET', uri=file_url)
assert aiohttpretty.has_call(method='GET', uri=item['upload_url'])
assert aiohttpretty.has_call(method='PUT', uri='{}/1'.format(upload_url))
assert aiohttpretty.has_call(method='POST', uri=file_url)
assert aiohttpretty.has_call(method='GET', uri=get_article_url)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_article_file_delete_root_no_confirm(self, article_provider):
path = FigsharePath('/', _ids=('11241213', ), folder=True, is_public=False)
with pytest.raises(exceptions.DeleteError) as e:
await article_provider.delete(path)
assert e.value.code == 400
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_article_file_delete_root(self, article_provider, root_provider_fixtures):
path = FigsharePath('/', _ids=('11241213', ), folder=True, is_public=False)
item = root_provider_fixtures['file_article_metadata']
list_articles_url = article_provider.build_url(False, *article_provider.root_path_parts)
delete_url = article_provider.build_url(False, *article_provider.root_path_parts,
'files', str(item['files'][0]['id']))
aiohttpretty.register_json_uri('DELETE', delete_url, status=204)
aiohttpretty.register_json_uri('GET', list_articles_url, body=item)
await article_provider.delete(path, 1)
assert aiohttpretty.has_call(method='DELETE', uri=delete_url)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_article_create_folder(self, article_provider):
path = '/'
with pytest.raises(exceptions.CreateFolderError) as e:
await article_provider.create_folder(path)
assert e.value.code == 400
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_article_file_delete(self, article_provider, root_provider_fixtures):
file_id = str(root_provider_fixtures['file_metadata']['id'])
file_name = root_provider_fixtures['file_metadata']['name']
file_url = article_provider.build_url(False, *article_provider.root_path_parts, 'files',
file_id)
aiohttpretty.register_uri('DELETE', file_url, status=204)
path = FigsharePath('/{}'.format(file_name), _ids=('', file_id), folder=False)
result = await article_provider.delete(path)
assert result is None
assert aiohttpretty.has_call(method='DELETE', uri=file_url)
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_article_download_404(self, article_provider, root_provider_fixtures):
item = root_provider_fixtures['file_metadata']
file_id = str(item['id'])
path = FigsharePath('/{}'.format(file_id), _ids=('', file_id), folder=False)
root_parts = article_provider.root_path_parts
file_metadata_url = article_provider.build_url(False, *root_parts, 'files', file_id)
article_metadata_url = article_provider.build_url(False, *root_parts)
download_url = item['download_url']
aiohttpretty.register_json_uri('GET', file_metadata_url, body=item)
aiohttpretty.register_json_uri('GET', article_metadata_url,
body=root_provider_fixtures['file_article_metadata'])
aiohttpretty.register_uri('GET', download_url, params={'token': article_provider.token},
status=404, auto_length=True)
with pytest.raises(exceptions.DownloadError) as e:
await article_provider.download(path)
assert e.value.code == 404
assert e.value.message == 'Download not available'
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_article_upload_root(self, file_stream, article_provider,
root_provider_fixtures, crud_fixtures):
file_name = 'barricade.gif'
file_id = str(root_provider_fixtures['get_file_metadata']['id'])
root_parts = article_provider.root_path_parts
item = crud_fixtures["upload_folder_article_metadata"]
item['defined_type'] = 5
validate_file_url = article_provider.build_url(False, *root_parts, 'files', file_name)
aiohttpretty.register_uri('GET', validate_file_url, status=404)
path = FigsharePath('/1234/94813', _ids=('1234', '94813'), folder=False, is_public=False)
create_file_url = article_provider.build_url(False, *root_parts, 'files')
file_url = article_provider.build_url(False, *root_parts, 'files', file_id)
get_article_url = article_provider.build_url(False, *root_parts)
upload_url = root_provider_fixtures['get_file_metadata']['upload_url']
parent_url = article_provider.build_url(False, *root_parts,
'articles', path.parent.identifier)
aiohttpretty.register_json_uri('GET', parent_url, body=item)
aiohttpretty.register_json_uri('POST', create_file_url,
body=crud_fixtures['create_file_metadata'], status=201)
aiohttpretty.register_json_uri('GET', file_url,
body=root_provider_fixtures['get_file_metadata'])
aiohttpretty.register_json_uri('GET',
root_provider_fixtures['get_file_metadata']['upload_url'],
body=root_provider_fixtures['get_upload_metadata'])
aiohttpretty.register_uri('PUT', '{}/1'.format(upload_url), status=200)
aiohttpretty.register_uri('POST', file_url, status=202)
aiohttpretty.register_json_uri('GET', get_article_url,
body=crud_fixtures['upload_folder_article_metadata'])
result, created = await article_provider.upload(file_stream, path)
expected = metadata.FigshareFileMetadata(
crud_fixtures['upload_folder_article_metadata'],
crud_fixtures['upload_folder_article_metadata']['files'][0],
)
assert aiohttpretty.has_call(method='PUT', uri='{}/1'.format(upload_url))
assert aiohttpretty.has_call(method='POST', uri=create_file_url)
assert result == expected
class TestRevalidatePath:
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_project_revalidate_path(self, project_provider, root_provider_fixtures):
file_article_id = str(root_provider_fixtures['list_project_articles'][0]['id'])
folder_article_id = str(root_provider_fixtures['list_project_articles'][1]['id'])
root_parts = project_provider.root_path_parts
list_articles_url = project_provider.build_url(False, *root_parts, 'articles')
file_article_url = project_provider.build_url(False, *root_parts, 'articles',
file_article_id)
folder_article_url = project_provider.build_url(False, *root_parts, 'articles',
folder_article_id)
aiohttpretty.register_json_uri('GET', list_articles_url,
body=root_provider_fixtures['list_project_articles'],
params={'page': '1', 'page_size': str(MAX_PAGE_SIZE)})
aiohttpretty.register_json_uri('GET', list_articles_url, body=[],
params={'page': '2', 'page_size': str(MAX_PAGE_SIZE)})
aiohttpretty.register_json_uri('GET', file_article_url,
body=root_provider_fixtures['file_article_metadata'])
aiohttpretty.register_json_uri('GET', folder_article_url,
body=root_provider_fixtures['folder_article_metadata'])
path = FigsharePath('/', _ids=(''), folder=True)
result = await project_provider.revalidate_path(path, '{}'.format('file'), folder=False)
assert result.is_dir is False
assert result.name == 'file'
assert result.identifier == str(
root_provider_fixtures['file_article_metadata']['files'][0]['id'])
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_project_revalidate_path_duplicate_folder(self, project_provider,
root_provider_fixtures):
file_article_id = str(root_provider_fixtures['list_project_articles'][0]['id'])
folder_article_id = str(root_provider_fixtures['list_project_articles'][1]['id'])
folder_article_name = root_provider_fixtures['list_project_articles'][1]['title']
root_parts = project_provider.root_path_parts
list_articles_url = project_provider.build_url(False, *root_parts, 'articles')
file_article_url = project_provider.build_url(False, *root_parts, 'articles',
file_article_id)
folder_article_url = project_provider.build_url(False, *root_parts, 'articles',
folder_article_id)
aiohttpretty.register_json_uri('GET', list_articles_url,
body=root_provider_fixtures['list_project_articles'],
params={'page': '1', 'page_size': str(MAX_PAGE_SIZE)})
aiohttpretty.register_json_uri('GET', list_articles_url, body=[],
params={'page': '2', 'page_size': str(MAX_PAGE_SIZE)})
aiohttpretty.register_json_uri('GET', file_article_url,
body=root_provider_fixtures['file_article_metadata'])
aiohttpretty.register_json_uri('GET', folder_article_url,
body=root_provider_fixtures['folder_article_metadata'])
path = FigsharePath('/', _ids=(''), folder=True)
result = await project_provider.revalidate_path(path, folder_article_name, folder=True)
assert result.is_dir is True
assert result.name == 'folder_article'
assert result.identifier == folder_article_id
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_project_revalidate_path_not_root(self, project_provider, root_provider_fixtures):
file_article_id = str(root_provider_fixtures['list_project_articles'][0]['id'])
path = FigsharePath('/folder1/', _ids=('', file_article_id), folder=True)
root_parts = project_provider.root_path_parts
file_article_url = project_provider.build_url(False, *root_parts, 'articles',
file_article_id)
aiohttpretty.register_json_uri('GET', file_article_url,
body=root_provider_fixtures['file_article_metadata'])
result = await project_provider.revalidate_path(path, '{}'.format('file'), folder=False)
assert result.is_dir is False
assert result.name == 'file'
assert result.identifier == str(
root_provider_fixtures['file_article_metadata']['files'][0]['id'])
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_article_revalidate_path_bad_path(self, article_provider, root_provider_fixtures):
item = root_provider_fixtures['file_metadata']
file_id = str(item['id'])
path = FigsharePath('/fodler1/folder2/', _ids=('', '', file_id), folder=True)
with pytest.raises(exceptions.NotFoundError) as e:
await article_provider.revalidate_path(path, 'childname', folder=True)
assert e.value.code == 404
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test_article_revalidate_path_file(self, article_provider, crud_fixtures):
item = crud_fixtures["upload_folder_article_metadata"]
file_id = str(item['files'][0]['id'])
file_name = item['files'][0]['name']
path = FigsharePath('/' + str(file_name), _ids=('', file_id), folder=True)
urn_parts = (*article_provider.root_path_parts, (path.identifier))
url = article_provider.build_url(False, *urn_parts)
aiohttpretty.register_json_uri('GET', url, body=item)
result = await article_provider.revalidate_path(path, item['files'][0]['name'],
folder=False)
expected = path.child(item['files'][0]['name'], _id=file_id, folder=False,
parent_is_folder=False)
assert result == expected
class TestMisc:
def test_path_from_metadata(self, project_provider, root_provider_fixtures):
file_article_metadata = root_provider_fixtures['file_article_metadata']
fig_metadata = metadata.FigshareFileMetadata(file_article_metadata)
path = FigsharePath('/', _ids=(''), folder=True)
item = file_article_metadata['files'][0]
expected = FigsharePath('/' + item['name'], _ids=('', item['id']), folder=False)
result = project_provider.path_from_metadata(path, fig_metadata)
assert result == expected
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test__get_article_metadata_returns_none(self, project_provider,
root_provider_fixtures):
file_id = root_provider_fixtures['file_article_metadata']['id']
item = {'defined_type': 5, 'files': None, 'id': file_id}
url = project_provider.build_url(False, *project_provider.root_path_parts,
'articles', str(file_id))
aiohttpretty.register_json_uri('GET', url, body=item)
result = await project_provider._get_article_metadata(str(file_id), False)
assert result is None
@pytest.mark.asyncio
@pytest.mark.aiohttpretty
async def test__get_file_upload_url_error(self, project_provider):
article_id = '12345'
file_id = '871947'
url = project_provider.build_url(False, 'articles', article_id, 'files', file_id)
aiohttpretty.register_json_uri('GET', url, status=404)
with pytest.raises(exceptions.ProviderError) as e:
await project_provider._get_file_upload_url(article_id, file_id)
assert e.value.code == 500
@pytest.mark.asyncio
async def test_revisions(self, project_provider):
result = await project_provider.revisions('/')
expected = [metadata.FigshareFileRevisionMetadata()]
assert result == expected
def test_can_duplicate_names(self, project_provider):
assert project_provider.can_duplicate_names() is False
def test_base_figshare_provider_fileset(self, auth, credentials):
settings = {
'container_type': 'fileset',
'container_id': '13423',
}
test_provider = provider.FigshareArticleProvider(auth, credentials, settings)
assert test_provider.container_type == 'article'
def test_base_figshare_provider_invalid_setting(self, auth, credentials):
bad_settings = {
'container_type': 'not_a_project',
'container_id': '13423',
}
with pytest.raises(exceptions.ProviderError) as e:
provider.FigshareProjectProvider(auth, credentials, bad_settings)
assert e.value.message == '{} is not a valid container type.'.format(
bad_settings['container_type'])
def test_figshare_provider_invalid_setting(self, auth, credentials):
bad_settings = {
'container_type': 'not_a_project',
'container_id': '13423',
}
with pytest.raises(exceptions.ProviderError) as e:
provider.FigshareProvider(auth, credentials, bad_settings)
assert e.value.message == 'Invalid "container_type" {0}'.format(
bad_settings['container_type'])
|
|
import unicodedata
from collections import defaultdict
from itertools import zip_longest
from .porter import Stemmer
def _normalize(s):
return unicodedata.normalize("NFKD", s)
def _check_type(s):
if not isinstance(s, str):
raise TypeError("expected str or unicode, got %s" % type(s).__name__)
def levenshtein_distance(s1, s2):
_check_type(s1)
_check_type(s2)
if s1 == s2:
return 0
rows = len(s1) + 1
cols = len(s2) + 1
if not s1:
return cols - 1
if not s2:
return rows - 1
prev = None
cur = range(cols)
for r in range(1, rows):
prev, cur = cur, [r] + [0] * (cols - 1)
for c in range(1, cols):
deletion = prev[c] + 1
insertion = cur[c - 1] + 1
edit = prev[c - 1] + (0 if s1[r - 1] == s2[c - 1] else 1)
cur[c] = min(edit, deletion, insertion)
return cur[-1]
def _jaro_winkler(s1, s2, long_tolerance, winklerize):
_check_type(s1)
_check_type(s2)
s1_len = len(s1)
s2_len = len(s2)
if not s1_len or not s2_len:
return 0.0
min_len = min(s1_len, s2_len)
search_range = max(s1_len, s2_len)
search_range = (search_range // 2) - 1
if search_range < 0:
search_range = 0
s1_flags = [False] * s1_len
s2_flags = [False] * s2_len
# looking only within search range, count & flag matched pairs
common_chars = 0
for i, s1_ch in enumerate(s1):
low = max(0, i - search_range)
hi = min(i + search_range, s2_len - 1)
for j in range(low, hi + 1):
if not s2_flags[j] and s2[j] == s1_ch:
s1_flags[i] = s2_flags[j] = True
common_chars += 1
break
# short circuit if no characters match
if not common_chars:
return 0.0
# count transpositions
k = trans_count = 0
for i, s1_f in enumerate(s1_flags):
if s1_f:
for j in range(k, s2_len):
if s2_flags[j]:
k = j + 1
break
if s1[i] != s2[j]:
trans_count += 1
trans_count //= 2
# adjust for similarities in nonmatched characters
common_chars = float(common_chars)
weight = (
(
common_chars / s1_len
+ common_chars / s2_len
+ (common_chars - trans_count) / common_chars
)
) / 3
# winkler modification: continue to boost if strings are similar
if winklerize and weight > 0.7:
# adjust for up to first 4 chars in common
j = min(min_len, 4)
i = 0
while i < j and s1[i] == s2[i]:
i += 1
if i:
weight += i * 0.1 * (1.0 - weight)
# optionally adjust for long strings
# after agreeing beginning chars, at least two or more must agree and
# agreed characters must be > half of remaining characters
if (
long_tolerance
and min_len > 4
and common_chars > i + 1
and 2 * common_chars >= min_len + i
):
weight += (1.0 - weight) * (
float(common_chars - i - 1) / float(s1_len + s2_len - i * 2 + 2)
)
return weight
def jaro_similarity(s1, s2):
return _jaro_winkler(s1, s2, False, False) # noqa
def jaro_winkler_similarity(s1, s2, long_tolerance=False):
return _jaro_winkler(s1, s2, long_tolerance, True) # noqa
def damerau_levenshtein_distance(s1, s2):
_check_type(s1)
_check_type(s2)
len1 = len(s1)
len2 = len(s2)
infinite = len1 + len2
# character array
da = defaultdict(int)
# distance matrix
score = [[0] * (len2 + 2) for x in range(len1 + 2)]
score[0][0] = infinite
for i in range(0, len1 + 1):
score[i + 1][0] = infinite
score[i + 1][1] = i
for i in range(0, len2 + 1):
score[0][i + 1] = infinite
score[1][i + 1] = i
for i in range(1, len1 + 1):
db = 0
for j in range(1, len2 + 1):
i1 = da[s2[j - 1]]
j1 = db
cost = 1
if s1[i - 1] == s2[j - 1]:
cost = 0
db = j
score[i + 1][j + 1] = min(
score[i][j] + cost,
score[i + 1][j] + 1,
score[i][j + 1] + 1,
score[i1][j1] + (i - i1 - 1) + 1 + (j - j1 - 1),
)
da[s1[i - 1]] = i
return score[len1 + 1][len2 + 1]
def soundex(s):
_check_type(s)
if not s:
return ""
s = _normalize(s)
s = s.upper()
replacements = (
("BFPV", "1"),
("CGJKQSXZ", "2"),
("DT", "3"),
("L", "4"),
("MN", "5"),
("R", "6"),
)
result = [s[0]]
count = 1
# find would-be replacement for first character
for lset, sub in replacements:
if s[0] in lset:
last = sub
break
else:
last = None
for letter in s[1:]:
for lset, sub in replacements:
if letter in lset:
if sub != last:
result.append(sub)
count += 1
last = sub
break
else:
if letter != "H" and letter != "W":
# leave last alone if middle letter is H or W
last = None
if count == 4:
break
result += "0" * (4 - count)
return "".join(result)
def hamming_distance(s1, s2):
_check_type(s1)
_check_type(s2)
# ensure length of s1 >= s2
if len(s2) > len(s1):
s1, s2 = s2, s1
# distance is difference in length + differing chars
distance = len(s1) - len(s2)
for i, c in enumerate(s2):
if c != s1[i]:
distance += 1
return distance
def nysiis(s):
_check_type(s)
if not s:
return ""
s = s.upper()
key = []
# step 1 - prefixes
if s.startswith("MAC"):
s = "MCC" + s[3:]
elif s.startswith("KN"):
s = s[1:]
elif s.startswith("K"):
s = "C" + s[1:]
elif s.startswith(("PH", "PF")):
s = "FF" + s[2:]
elif s.startswith("SCH"):
s = "SSS" + s[3:]
# step 2 - suffixes
if s.endswith(("IE", "EE")):
s = s[:-2] + "Y"
elif s.endswith(("DT", "RT", "RD", "NT", "ND")):
s = s[:-2] + "D"
# step 3 - first character of key comes from name
key.append(s[0])
# step 4 - translate remaining chars
i = 1
len_s = len(s)
while i < len_s:
ch = s[i]
if ch == "E" and i + 1 < len_s and s[i + 1] == "V":
ch = "AF"
i += 1
elif ch in "AEIOU":
ch = "A"
elif ch == "Q":
ch = "G"
elif ch == "Z":
ch = "S"
elif ch == "M":
ch = "N"
elif ch == "K":
if i + 1 < len(s) and s[i + 1] == "N":
ch = "N"
else:
ch = "C"
elif ch == "S" and s[i + 1 : i + 3] == "CH":
ch = "SS"
i += 2
elif ch == "P" and i + 1 < len(s) and s[i + 1] == "H":
ch = "F"
i += 1
elif ch == "H" and (
s[i - 1] not in "AEIOU"
or (i + 1 < len(s) and s[i + 1] not in "AEIOU")
or (i + 1 == len(s))
):
if s[i - 1] in "AEIOU":
ch = "A"
else:
ch = s[i - 1]
elif ch == "W" and s[i - 1] in "AEIOU":
ch = s[i - 1]
if ch[-1] != key[-1][-1]:
key.append(ch)
i += 1
key = "".join(key)
# step 5 - remove trailing S
if key.endswith("S") and key != "S":
key = key[:-1]
# step 6 - replace AY w/ Y
if key.endswith("AY"):
key = key[:-2] + "Y"
# step 7 - remove trailing A
if key.endswith("A") and key != "A":
key = key[:-1]
# step 8 was already done
return key
def match_rating_codex(s):
_check_type(s)
# we ignore spaces
s = s.upper().replace(" ", "")
codex = []
prev = None
first = True
for c in s:
# starting character
# or consonant not preceded by same consonant
if first or (c not in "AEIOU" and c != prev):
codex.append(c)
prev = c
first = False
# just use first/last 3
if len(codex) > 6:
return "".join(codex[:3] + codex[-3:])
else:
return "".join(codex)
def match_rating_comparison(s1, s2):
codex1 = match_rating_codex(s1)
codex2 = match_rating_codex(s2)
len1 = len(codex1)
len2 = len(codex2)
res1 = []
res2 = []
# length differs by 3 or more, no result
if abs(len1 - len2) >= 3:
return None
# get minimum rating based on sums of codexes
lensum = len1 + len2
if lensum <= 4:
min_rating = 5
elif lensum <= 7:
min_rating = 4
elif lensum <= 11:
min_rating = 3
else:
min_rating = 2
# strip off common prefixes
for c1, c2 in zip_longest(codex1, codex2):
if c1 != c2:
if c1:
res1.append(c1)
if c2:
res2.append(c2)
unmatched_count1 = unmatched_count2 = 0
for c1, c2 in zip_longest(reversed(res1), reversed(res2)):
if c1 != c2:
if c1:
unmatched_count1 += 1
if c2:
unmatched_count2 += 1
return (6 - max(unmatched_count1, unmatched_count2)) >= min_rating
def metaphone(s):
_check_type(s)
result = []
s = _normalize(s.lower())
# skip first character if s starts with these
if s.startswith(("kn", "gn", "pn", "wr", "ae")):
s = s[1:]
i = 0
while i < len(s):
c = s[i]
next = s[i + 1] if i < len(s) - 1 else "*****"
nextnext = s[i + 2] if i < len(s) - 2 else "*****"
# skip doubles except for cc
if c == next and c != "c":
i += 1
continue
if c in "aeiou":
if i == 0 or s[i - 1] == " ":
result.append(c)
elif c == "b":
if (not (i != 0 and s[i - 1] == "m")) or next:
result.append("b")
elif c == "c":
if next == "i" and nextnext == "a" or next == "h":
result.append("x")
i += 1
elif next in "iey":
result.append("s")
i += 1
else:
result.append("k")
elif c == "d":
if next == "g" and nextnext in "iey":
result.append("j")
i += 2
else:
result.append("t")
elif c in "fjlmnr":
result.append(c)
elif c == "g":
if next in "iey":
result.append("j")
elif next == "h" and nextnext and nextnext not in "aeiou":
i += 1
elif next == "n" and not nextnext:
i += 1
else:
result.append("k")
elif c == "h":
if i == 0 or next in "aeiou" or s[i - 1] not in "aeiou":
result.append("h")
elif c == "k":
if i == 0 or s[i - 1] != "c":
result.append("k")
elif c == "p":
if next == "h":
result.append("f")
i += 1
else:
result.append("p")
elif c == "q":
result.append("k")
elif c == "s":
if next == "h":
result.append("x")
i += 1
elif next == "i" and nextnext in "oa":
result.append("x")
i += 2
else:
result.append("s")
elif c == "t":
if next == "i" and nextnext in "oa":
result.append("x")
elif next == "h":
result.append("0")
i += 1
elif next != "c" or nextnext != "h":
result.append("t")
elif c == "v":
result.append("f")
elif c == "w":
if i == 0 and next == "h":
i += 1
result.append("w")
elif next in "aeiou":
result.append("w")
elif c == "x":
if i == 0:
if next == "h" or (next == "i" and nextnext in "oa"):
result.append("x")
else:
result.append("s")
else:
result.append("k")
result.append("s")
elif c == "y":
if next in "aeiou":
result.append("y")
elif c == "z":
result.append("s")
elif c == " ":
if len(result) > 0 and result[-1] != " ":
result.append(" ")
i += 1
return "".join(result).upper()
def porter_stem(s):
_check_type(s)
return Stemmer(s).stem()
|
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals
import unittest
from sqlalchemy.exc import IntegrityError
from woodbox.access_control.record import And, Or, OpSwitch, IsOwner, IsUser1, HasRole, InRecordACL
from woodbox.db import db
from woodbox.models.record_acl_model import RecordACLModel, make_record_acl
from woodbox.models.user_model import WBRoleModel, WBUserModel
from woodbox.tests.flask_test_case import FlaskTestCase
class MyModel(db.Model):
id = db.Column(db.Integer, db.Sequence('my_model_id_seq'), primary_key=True)
owner_id = db.Column(db.Integer, db.ForeignKey('wb_user_model.id'), nullable=False)
owner = db.relationship('WBUserModel', foreign_keys='MyModel.owner_id')
title = db.Column(db.String(256), unique=False, nullable=True)
class RecordAccessTestCase(FlaskTestCase):
def setUp(self):
super(RecordAccessTestCase, self).setUp()
with self.app.test_request_context('/'):
db.initialize()
# Create some roles
self.r1 = WBRoleModel(rolename='admin')
db.session.add(self.r1)
self.r2 = WBRoleModel(rolename='manager')
db.session.add(self.r2)
self.r3 = WBRoleModel(rolename='user')
db.session.add(self.r3)
db.session.commit()
# Create some users
self.u1 = WBUserModel(username='a', password='a', roles=[self.r1])
db.session.add(self.u1)
self.u2 = WBUserModel(username='b', password='a', roles=[self.r2])
db.session.add(self.u2)
self.u3 = WBUserModel(username='c', password='a', roles=[self.r3])
db.session.add(self.u3)
db.session.commit()
self.u1 = self.u1.id
self.u2 = self.u2.id
self.u3 = self.u3.id
self.r1 = self.r1.id
self.r2 = self.r2.id
self.r3 = self.r3.id
# Create some data
self.d1 = MyModel(title='a', owner_id=self.u1)
db.session.add(self.d1)
self.d2 = MyModel(title='a', owner_id=self.u1)
db.session.add(self.d2)
self.d3 = MyModel(title='a', owner_id=self.u2)
db.session.add(self.d3)
self.d4 = MyModel(title='a', owner_id=self.u3)
db.session.add(self.d4)
db.session.commit()
self.d1 = self.d1.id
self.d2 = self.d2.id
self.d3 = self.d3.id
self.d4 = self.d4.id
# Add some access control records
anon = WBRoleModel.get_anonymous_role_id()
db.session.add_all(make_record_acl(record_types=['My'],
record_ids=[self.d1, self.d2, self.d3, self.d4],
user_role_ids=[self.r1],
permissions=['read', 'update', 'delete']))
db.session.add_all(make_record_acl(record_types=['My'],
record_ids=[self.d1, self.d2, self.d3, self.d4],
user_role_ids=[self.r2],
permissions=['read']))
db.session.add_all(make_record_acl(record_types=['My'],
record_ids=[self.d3, self.d4],
user_role_ids=[self.r2],
permissions=['update', 'delete']))
db.session.add_all(make_record_acl(record_types=['My'],
record_ids=[self.d1, self.d2, self.d3, self.d4],
user_role_ids=[self.r3],
permissions=['read']))
db.session.add_all(make_record_acl(record_types=['My'],
record_ids=[self.d3, self.d4],
user_role_ids=[anon],
permissions=['read']))
db.session.commit()
def test_is_owner_1(self):
with self.app.test_request_context('/'):
query = MyModel.query
ac = IsOwner()
query = ac.alter_query('read', query, self.u1, 'My', MyModel)
items = query.values(MyModel.id)
ids = [i[0] for i in items]
self.assertIn(self.d1, ids)
self.assertIn(self.d2, ids)
self.assertNotIn(self.d3, ids)
self.assertNotIn(self.d4, ids)
def test_is_owner_3(self):
with self.app.test_request_context('/'):
query = MyModel.query
ac = IsOwner()
query = ac.alter_query('read', query, self.u3, 'My', MyModel)
items = query.values(MyModel.id)
ids = [i[0] for i in items]
self.assertNotIn(self.d1, ids)
self.assertNotIn(self.d2, ids)
self.assertNotIn(self.d3, ids)
self.assertIn(self.d4, ids)
def test_is_user1(self):
with self.app.test_request_context('/'):
query = MyModel.query
ac = IsUser1()
query = ac.alter_query('read', query, self.u1, 'My', MyModel)
items = query.values(MyModel.id)
ids = [i[0] for i in items]
self.assertIn(self.d1, ids)
self.assertIn(self.d2, ids)
self.assertIn(self.d3, ids)
self.assertIn(self.d4, ids)
def test_not_is_user1(self):
with self.app.test_request_context('/'):
query = MyModel.query
ac = IsUser1()
query = ac.alter_query('read', query, self.u2, 'My', MyModel)
items = query.values(MyModel.id)
ids = [i[0] for i in items]
self.assertNotIn(self.d1, ids)
self.assertNotIn(self.d2, ids)
self.assertNotIn(self.d3, ids)
self.assertNotIn(self.d4, ids)
def test_is_owner_or_is_user1_1(self):
with self.app.test_request_context('/'):
query = MyModel.query
ac = Or(IsOwner(), IsUser1())
query = ac.alter_query('read', query, self.u1, 'My', MyModel)
items = query.values(MyModel.id)
ids = [i[0] for i in items]
self.assertIn(self.d1, ids)
self.assertIn(self.d2, ids)
self.assertIn(self.d3, ids)
self.assertIn(self.d4, ids)
def test_is_owner_or_is_user1_2(self):
with self.app.test_request_context('/'):
query = MyModel.query
ac = Or(IsOwner(), IsUser1())
query = ac.alter_query('read', query, self.u2, 'My', MyModel)
items = query.values(MyModel.id)
ids = [i[0] for i in items]
self.assertNotIn(self.d1, ids)
self.assertNotIn(self.d2, ids)
self.assertIn(self.d3, ids)
self.assertNotIn(self.d4, ids)
def test_is_owner_or_is_user1_3(self):
with self.app.test_request_context('/'):
query = MyModel.query
ac = Or(IsOwner(), IsUser1())
query = ac.alter_query('read', query, self.u3, 'My', MyModel)
items = query.values(MyModel.id)
ids = [i[0] for i in items]
self.assertNotIn(self.d1, ids)
self.assertNotIn(self.d2, ids)
self.assertNotIn(self.d3, ids)
self.assertIn(self.d4, ids)
def test_is_owner_and_is_user1_1(self):
with self.app.test_request_context('/'):
query = MyModel.query
ac = And(IsOwner(), IsUser1())
query = ac.alter_query('read', query, self.u1, 'My', MyModel)
items = query.values(MyModel.id)
ids = [i[0] for i in items]
self.assertIn(self.d1, ids)
self.assertIn(self.d2, ids)
self.assertNotIn(self.d3, ids)
self.assertNotIn(self.d4, ids)
def test_is_owner_and_is_user1_2(self):
with self.app.test_request_context('/'):
query = MyModel.query
ac = And(IsOwner(), IsUser1())
query = ac.alter_query('read', query, self.u2, 'My', MyModel)
items = query.values(MyModel.id)
ids = [i[0] for i in items]
self.assertNotIn(self.d1, ids)
self.assertNotIn(self.d2, ids)
self.assertNotIn(self.d3, ids)
self.assertNotIn(self.d4, ids)
def test_is_owner_and_is_user1_3(self):
with self.app.test_request_context('/'):
query = MyModel.query
ac = And(IsOwner(), IsUser1())
query = ac.alter_query('read', query, self.u3, 'My', MyModel)
items = query.values(MyModel.id)
ids = [i[0] for i in items]
self.assertNotIn(self.d1, ids)
self.assertNotIn(self.d2, ids)
self.assertNotIn(self.d3, ids)
self.assertNotIn(self.d4, ids)
def test_has_role_anonymous(self):
with self.app.test_request_context('/'):
query = MyModel.query
ac = HasRole([WBRoleModel.anonymous_role_name])
query = ac.alter_query('read', query, None, 'My', MyModel)
items = query.values(MyModel.id)
ids = [i[0] for i in items]
self.assertIn(self.d1, ids)
self.assertIn(self.d2, ids)
self.assertIn(self.d3, ids)
self.assertIn(self.d4, ids)
def test_has_role_admin(self):
with self.app.test_request_context('/'):
query = MyModel.query
ac = HasRole(['admin'])
query = ac.alter_query('read', query, self.u1, 'My', MyModel)
items = query.values(MyModel.id)
ids = [i[0] for i in items]
self.assertIn(self.d1, ids)
self.assertIn(self.d2, ids)
self.assertIn(self.d3, ids)
self.assertIn(self.d4, ids)
def test_not_has_role_admin(self):
with self.app.test_request_context('/'):
query = MyModel.query
ac = HasRole(['admin'])
query = ac.alter_query('read', query, self.u2, 'My', MyModel)
items = query.values(MyModel.id)
ids = [i[0] for i in items]
self.assertNotIn(self.d1, ids)
self.assertNotIn(self.d2, ids)
self.assertNotIn(self.d3, ids)
self.assertNotIn(self.d4, ids)
def test_in_record_acl_1(self):
with self.app.test_request_context('/'):
query = MyModel.query
ac = InRecordACL()
query = ac.alter_query('read', query, self.u1, 'My', MyModel)
items = query.values(MyModel.id)
ids = [i[0] for i in items]
self.assertIn(self.d1, ids)
self.assertIn(self.d2, ids)
self.assertIn(self.d3, ids)
self.assertIn(self.d4, ids)
query = MyModel.query
ac = InRecordACL()
query = ac.alter_query('update', query, self.u1, 'My', MyModel)
items = query.values(MyModel.id)
ids = [i[0] for i in items]
self.assertIn(self.d1, ids)
self.assertIn(self.d2, ids)
self.assertIn(self.d3, ids)
self.assertIn(self.d4, ids)
query = MyModel.query
ac = InRecordACL()
query = ac.alter_query('delete', query, self.u1, 'My', MyModel)
items = query.values(MyModel.id)
ids = [i[0] for i in items]
self.assertIn(self.d1, ids)
self.assertIn(self.d2, ids)
self.assertIn(self.d3, ids)
self.assertIn(self.d4, ids)
def test_in_record_acl_2(self):
with self.app.test_request_context('/'):
query = MyModel.query
ac = InRecordACL()
query = ac.alter_query('read', query, self.u2, 'My', MyModel)
items = query.values(MyModel.id)
ids = [i[0] for i in items]
self.assertIn(self.d1, ids)
self.assertIn(self.d2, ids)
self.assertIn(self.d3, ids)
self.assertIn(self.d4, ids)
query = MyModel.query
ac = InRecordACL()
query = ac.alter_query('update', query, self.u2, 'My', MyModel)
items = query.values(MyModel.id)
ids = [i[0] for i in items]
self.assertNotIn(self.d1, ids)
self.assertNotIn(self.d2, ids)
self.assertIn(self.d3, ids)
self.assertIn(self.d4, ids)
query = MyModel.query
ac = InRecordACL()
query = ac.alter_query('delete', query, self.u2, 'My', MyModel)
items = query.values(MyModel.id)
ids = [i[0] for i in items]
self.assertNotIn(self.d1, ids)
self.assertNotIn(self.d2, ids)
self.assertIn(self.d3, ids)
self.assertIn(self.d4, ids)
def test_in_record_acl_3(self):
with self.app.test_request_context('/'):
query = MyModel.query
ac = InRecordACL()
query = ac.alter_query('read', query, self.u3, 'My', MyModel)
items = query.values(MyModel.id)
ids = [i[0] for i in items]
self.assertIn(self.d1, ids)
self.assertIn(self.d2, ids)
self.assertIn(self.d3, ids)
self.assertIn(self.d4, ids)
query = MyModel.query
ac = InRecordACL()
query = ac.alter_query('update', query, self.u3, 'My', MyModel)
items = query.values(MyModel.id)
ids = [i[0] for i in items]
self.assertNotIn(self.d1, ids)
self.assertNotIn(self.d2, ids)
self.assertNotIn(self.d3, ids)
self.assertNotIn(self.d4, ids)
query = MyModel.query
ac = InRecordACL()
query = ac.alter_query('delete', query, self.u3, 'My', MyModel)
items = query.values(MyModel.id)
ids = [i[0] for i in items]
self.assertNotIn(self.d1, ids)
self.assertNotIn(self.d2, ids)
self.assertNotIn(self.d3, ids)
self.assertNotIn(self.d4, ids)
def test_in_record_acl_none(self):
with self.app.test_request_context('/'):
query = MyModel.query
ac = InRecordACL()
query = ac.alter_query('read', query, None, 'My', MyModel)
items = query.values(MyModel.id)
ids = [i[0] for i in items]
self.assertNotIn(self.d1, ids)
self.assertNotIn(self.d2, ids)
self.assertIn(self.d3, ids)
self.assertIn(self.d4, ids)
query = MyModel.query
ac = InRecordACL()
query = ac.alter_query('update', query, None, 'My', MyModel)
items = query.values(MyModel.id)
ids = [i[0] for i in items]
self.assertNotIn(self.d1, ids)
self.assertNotIn(self.d2, ids)
self.assertNotIn(self.d3, ids)
self.assertNotIn(self.d4, ids)
query = MyModel.query
ac = InRecordACL()
query = ac.alter_query('delete', query, None, 'My', MyModel)
items = query.values(MyModel.id)
ids = [i[0] for i in items]
self.assertNotIn(self.d1, ids)
self.assertNotIn(self.d2, ids)
self.assertNotIn(self.d3, ids)
self.assertNotIn(self.d4, ids)
def test_op_switch(self):
with self.app.test_request_context('/'):
query = MyModel.query
is_owner = IsOwner()
ac = OpSwitch(update_ac=is_owner, delete_ac=is_owner)
query = ac.alter_query('read', query, self.u1, 'My', MyModel)
items = query.values(MyModel.id)
ids = [i[0] for i in items]
self.assertIn(self.d1, ids)
self.assertIn(self.d2, ids)
self.assertIn(self.d3, ids)
self.assertIn(self.d4, ids)
query = MyModel.query
is_owner = IsOwner()
ac = OpSwitch(update_ac=is_owner, delete_ac=is_owner)
query = ac.alter_query('update', query, self.u1, 'My', MyModel)
items = query.values(MyModel.id)
ids = [i[0] for i in items]
self.assertIn(self.d1, ids)
self.assertIn(self.d2, ids)
self.assertNotIn(self.d3, ids)
self.assertNotIn(self.d4, ids)
query = MyModel.query
is_owner = IsOwner()
ac = OpSwitch(update_ac=is_owner, delete_ac=is_owner)
query = ac.alter_query('delete', query, self.u1, 'My', MyModel)
items = query.values(MyModel.id)
ids = [i[0] for i in items]
self.assertIn(self.d1, ids)
self.assertIn(self.d2, ids)
self.assertNotIn(self.d3, ids)
self.assertNotIn(self.d4, ids)
|
|
"""Tests for the Roku component."""
from http import HTTPStatus
import re
from socket import gaierror as SocketGIAError
from homeassistant.components.roku.const import DOMAIN
from homeassistant.components.ssdp import (
ATTR_SSDP_LOCATION,
ATTR_UPNP_FRIENDLY_NAME,
ATTR_UPNP_SERIAL,
)
from homeassistant.const import CONF_HOST, CONF_ID, CONF_NAME
from homeassistant.core import HomeAssistant
from tests.common import MockConfigEntry, load_fixture
from tests.test_util.aiohttp import AiohttpClientMocker
NAME = "Roku 3"
NAME_ROKUTV = '58" Onn Roku TV'
HOST = "192.168.1.160"
SSDP_LOCATION = "http://192.168.1.160/"
UPNP_FRIENDLY_NAME = "My Roku 3"
UPNP_SERIAL = "1GU48T017973"
MOCK_SSDP_DISCOVERY_INFO = {
ATTR_SSDP_LOCATION: SSDP_LOCATION,
ATTR_UPNP_FRIENDLY_NAME: UPNP_FRIENDLY_NAME,
ATTR_UPNP_SERIAL: UPNP_SERIAL,
}
HOMEKIT_HOST = "192.168.1.161"
MOCK_HOMEKIT_DISCOVERY_INFO = {
CONF_NAME: "onn._hap._tcp.local.",
CONF_HOST: HOMEKIT_HOST,
"properties": {
CONF_ID: "2d:97:da:ee:dc:99",
},
}
def mock_connection(
aioclient_mock: AiohttpClientMocker,
device: str = "roku3",
app: str = "roku",
host: str = HOST,
power: bool = True,
media_state: str = "close",
error: bool = False,
server_error: bool = False,
) -> None:
"""Mock the Roku connection."""
roku_url = f"http://{host}:8060"
if error:
mock_connection_error(
aioclient_mock=aioclient_mock, device=device, app=app, host=host
)
return
if server_error:
mock_connection_server_error(
aioclient_mock=aioclient_mock, device=device, app=app, host=host
)
return
info_fixture = f"roku/{device}-device-info.xml"
if not power:
info_fixture = f"roku/{device}-device-info-power-off.xml"
aioclient_mock.get(
f"{roku_url}/query/device-info",
text=load_fixture(info_fixture),
headers={"Content-Type": "text/xml"},
)
apps_fixture = "roku/apps.xml"
if device == "rokutv":
apps_fixture = "roku/apps-tv.xml"
aioclient_mock.get(
f"{roku_url}/query/apps",
text=load_fixture(apps_fixture),
headers={"Content-Type": "text/xml"},
)
aioclient_mock.get(
f"{roku_url}/query/active-app",
text=load_fixture(f"roku/active-app-{app}.xml"),
headers={"Content-Type": "text/xml"},
)
aioclient_mock.get(
f"{roku_url}/query/tv-active-channel",
text=load_fixture("roku/rokutv-tv-active-channel.xml"),
headers={"Content-Type": "text/xml"},
)
aioclient_mock.get(
f"{roku_url}/query/tv-channels",
text=load_fixture("roku/rokutv-tv-channels.xml"),
headers={"Content-Type": "text/xml"},
)
aioclient_mock.get(
f"{roku_url}/query/media-player",
text=load_fixture(f"roku/media-player-{media_state}.xml"),
headers={"Content-Type": "text/xml"},
)
aioclient_mock.post(
re.compile(f"{roku_url}/keypress/.*"),
text="OK",
)
aioclient_mock.post(
re.compile(f"{roku_url}/launch/.*"),
text="OK",
)
aioclient_mock.post(f"{roku_url}/search", text="OK")
def mock_connection_error(
aioclient_mock: AiohttpClientMocker,
device: str = "roku3",
app: str = "roku",
host: str = HOST,
) -> None:
"""Mock the Roku connection error."""
roku_url = f"http://{host}:8060"
aioclient_mock.get(f"{roku_url}/query/device-info", exc=SocketGIAError)
aioclient_mock.get(f"{roku_url}/query/apps", exc=SocketGIAError)
aioclient_mock.get(f"{roku_url}/query/active-app", exc=SocketGIAError)
aioclient_mock.get(f"{roku_url}/query/tv-active-channel", exc=SocketGIAError)
aioclient_mock.get(f"{roku_url}/query/tv-channels", exc=SocketGIAError)
aioclient_mock.post(re.compile(f"{roku_url}/keypress/.*"), exc=SocketGIAError)
aioclient_mock.post(re.compile(f"{roku_url}/launch/.*"), exc=SocketGIAError)
aioclient_mock.post(f"{roku_url}/search", exc=SocketGIAError)
def mock_connection_server_error(
aioclient_mock: AiohttpClientMocker,
device: str = "roku3",
app: str = "roku",
host: str = HOST,
) -> None:
"""Mock the Roku server error."""
roku_url = f"http://{host}:8060"
aioclient_mock.get(
f"{roku_url}/query/device-info", status=HTTPStatus.INTERNAL_SERVER_ERROR
)
aioclient_mock.get(
f"{roku_url}/query/apps", status=HTTPStatus.INTERNAL_SERVER_ERROR
)
aioclient_mock.get(
f"{roku_url}/query/active-app", status=HTTPStatus.INTERNAL_SERVER_ERROR
)
aioclient_mock.get(
f"{roku_url}/query/tv-active-channel", status=HTTPStatus.INTERNAL_SERVER_ERROR
)
aioclient_mock.get(
f"{roku_url}/query/tv-channels", status=HTTPStatus.INTERNAL_SERVER_ERROR
)
aioclient_mock.post(
re.compile(f"{roku_url}/keypress/.*"), status=HTTPStatus.INTERNAL_SERVER_ERROR
)
aioclient_mock.post(
re.compile(f"{roku_url}/launch/.*"), status=HTTPStatus.INTERNAL_SERVER_ERROR
)
aioclient_mock.post(f"{roku_url}/search", status=HTTPStatus.INTERNAL_SERVER_ERROR)
async def setup_integration(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
device: str = "roku3",
app: str = "roku",
host: str = HOST,
unique_id: str = UPNP_SERIAL,
error: bool = False,
power: bool = True,
media_state: str = "close",
server_error: bool = False,
skip_entry_setup: bool = False,
) -> MockConfigEntry:
"""Set up the Roku integration in Home Assistant."""
entry = MockConfigEntry(domain=DOMAIN, unique_id=unique_id, data={CONF_HOST: host})
entry.add_to_hass(hass)
if not skip_entry_setup:
mock_connection(
aioclient_mock,
device,
app=app,
host=host,
error=error,
power=power,
media_state=media_state,
server_error=server_error,
)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
return entry
|
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Script to test TF-TRT INT8 conversion without calibration on Mnist model."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.tensorrt.python import trt_convert
# pylint: disable=unused-import
from tensorflow.contrib.tensorrt.python.ops import trt_engine_op
# pylint: enable=unused-import
from tensorflow.core.protobuf import config_pb2
from tensorflow.python import data
from tensorflow.python import keras
from tensorflow.python.estimator.estimator import Estimator
from tensorflow.python.estimator.model_fn import EstimatorSpec
from tensorflow.python.estimator.model_fn import ModeKeys
from tensorflow.python.estimator.run_config import RunConfig
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import graph_util
from tensorflow.python.framework import importer
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.keras.datasets import mnist
from tensorflow.python.layers import layers
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import metrics
from tensorflow.python.ops import nn
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops.losses import losses
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.summary import summary
from tensorflow.python.training import saver
from tensorflow.python.training.adam import AdamOptimizer
from tensorflow.python.training.checkpoint_management import latest_checkpoint
from tensorflow.python.training.training_util import get_global_step
INPUT_NODE_NAME = 'input'
OUTPUT_NODE_NAME = 'output'
class QuantizationAwareTrainingMNISTTest(test_util.TensorFlowTestCase):
def _BuildGraph(self, x):
def _Quantize(x, r):
x = gen_array_ops.quantize_and_dequantize_v2(x, -r, r)
return x
def _DenseLayer(x, num_inputs, num_outputs, quantization_range, name):
"""Dense layer with quantized outputs.
Args:
x: input to the dense layer
num_inputs: number of input columns of x
num_outputs: number of output columns
quantization_range: the min/max range for quantization
name: name of the variable scope
Returns:
The output of the layer.
"""
with variable_scope.variable_scope(name):
kernel = variable_scope.get_variable(
'kernel',
shape=[num_inputs, num_outputs],
dtype=dtypes.float32,
initializer=keras.initializers.glorot_uniform())
bias = variable_scope.get_variable(
'bias',
shape=[num_outputs],
dtype=dtypes.float32,
initializer=keras.initializers.zeros())
x = math_ops.matmul(x, kernel)
x = _Quantize(x, quantization_range)
x = nn.bias_add(x, bias)
x = _Quantize(x, quantization_range)
return x
x = _Quantize(x, 1)
# Conv + Bias + Relu6
x = layers.conv2d(x, filters=32, kernel_size=3, use_bias=True)
x = nn.relu6(x)
# Conv + Bias + Relu6
x = layers.conv2d(x, filters=64, kernel_size=3, use_bias=True)
x = nn.relu6(x)
# Reduce
x = math_ops.reduce_mean(x, [1, 2])
x = _Quantize(x, 6)
# FC1
x = _DenseLayer(x, 64, 512, 6, name='dense')
x = nn.relu6(x)
# FC2
x = _DenseLayer(x, 512, 10, 25, name='dense_1')
x = array_ops.identity(x, name=OUTPUT_NODE_NAME)
return x
def _GetGraphDef(self, use_trt, max_batch_size, model_dir):
"""Get the frozen mnist GraphDef.
Args:
use_trt: whether use TF-TRT to convert the graph.
max_batch_size: the max batch size to apply during TF-TRT conversion.
model_dir: the model directory to load the checkpoints.
Returns:
The frozen mnist GraphDef.
"""
graph = ops.Graph()
with self.session(graph=graph) as sess:
with graph.device('/GPU:0'):
x = array_ops.placeholder(
shape=(None, 28, 28, 1), dtype=dtypes.float32, name=INPUT_NODE_NAME)
self._BuildGraph(x)
# Load weights
mnist_saver = saver.Saver()
checkpoint_file = latest_checkpoint(model_dir)
mnist_saver.restore(sess, checkpoint_file)
# Freeze
graph_def = graph_util.convert_variables_to_constants(
sess, sess.graph_def, output_node_names=[OUTPUT_NODE_NAME])
# Convert with TF-TRT
if use_trt:
logging.info('Number of nodes before TF-TRT conversion: %d',
len(graph_def.node))
graph_def = trt_convert.create_inference_graph(
graph_def,
outputs=[OUTPUT_NODE_NAME],
max_batch_size=max_batch_size,
precision_mode='INT8',
max_workspace_size_bytes=4096 << 19,
minimum_segment_size=2,
use_calibration=False,
)
logging.info('Number of nodes after TF-TRT conversion: %d',
len(graph_def.node))
num_engines = len(
[1 for n in graph_def.node if str(n.op) == 'TRTEngineOp'])
self.assertEqual(1, num_engines)
return graph_def
def _Run(self, is_training, use_trt, batch_size, num_epochs, model_dir):
"""Train or evaluate the model.
Args:
is_training: whether to train or evaluate the model. In training mode,
quantization will be simulated where the quantize_and_dequantize_v2 are
placed.
use_trt: if true, use TRT INT8 mode for evaluation, which will perform
real quantization. Otherwise use native TensorFlow which will perform
simulated quantization. Ignored if is_training is True.
batch_size: batch size.
num_epochs: how many epochs to train. Ignored if is_training is False.
model_dir: where to save or load checkpoint.
Returns:
The Estimator evaluation result.
"""
# Get dataset
train_data, test_data = mnist.load_data()
def _PreprocessFn(x, y):
x = math_ops.cast(x, dtypes.float32)
x = array_ops.expand_dims(x, axis=2)
x = 2.0 * (x / 255.0) - 1.0
y = math_ops.cast(y, dtypes.int32)
return x, y
def _EvalInputFn():
mnist_x, mnist_y = test_data
dataset = data.Dataset.from_tensor_slices((mnist_x, mnist_y))
dataset = dataset.apply(
data.experimental.map_and_batch(
map_func=_PreprocessFn,
batch_size=batch_size,
num_parallel_calls=8))
dataset = dataset.repeat(count=1)
iterator = dataset.make_one_shot_iterator()
features, labels = iterator.get_next()
return features, labels
def _TrainInputFn():
mnist_x, mnist_y = train_data
dataset = data.Dataset.from_tensor_slices((mnist_x, mnist_y))
dataset = dataset.shuffle(2 * len(mnist_x))
dataset = dataset.apply(
data.experimental.map_and_batch(
map_func=_PreprocessFn,
batch_size=batch_size,
num_parallel_calls=8))
dataset = dataset.repeat(count=num_epochs)
iterator = dataset.make_one_shot_iterator()
features, labels = iterator.get_next()
return features, labels
def _ModelFn(features, labels, mode):
if is_training:
logits_out = self._BuildGraph(features)
else:
graph_def = self._GetGraphDef(use_trt, batch_size, model_dir)
logits_out = importer.import_graph_def(
graph_def,
input_map={INPUT_NODE_NAME: features},
return_elements=[OUTPUT_NODE_NAME + ':0'],
name='')[0]
loss = losses.sparse_softmax_cross_entropy(
labels=labels, logits=logits_out)
summary.scalar('loss', loss)
classes_out = math_ops.argmax(logits_out, axis=1, name='classes_out')
accuracy = metrics.accuracy(
labels=labels, predictions=classes_out, name='acc_op')
summary.scalar('accuracy', accuracy[1])
if mode == ModeKeys.EVAL:
return EstimatorSpec(
mode, loss=loss, eval_metric_ops={'accuracy': accuracy})
elif mode == ModeKeys.TRAIN:
optimizer = AdamOptimizer(learning_rate=1e-2)
train_op = optimizer.minimize(loss, global_step=get_global_step())
return EstimatorSpec(mode, loss=loss, train_op=train_op)
config_proto = config_pb2.ConfigProto()
config_proto.gpu_options.allow_growth = True
estimator = Estimator(
model_fn=_ModelFn,
model_dir=model_dir if is_training else None,
config=RunConfig(session_config=config_proto))
if is_training:
estimator.train(_TrainInputFn)
results = estimator.evaluate(_EvalInputFn)
logging.info('accuracy: %s', str(results['accuracy']))
return results
# To generate the checkpoint, set a different model_dir and call self._Run()
# by setting is_training=True and num_epochs=1000, e.g.:
# model_dir = '/tmp/quantization_mnist'
# self._Run(
# is_training=True,
# use_trt=False,
# batch_size=128,
# num_epochs=100,
# model_dir=model_dir)
def testEval(self):
if not trt_convert.is_tensorrt_enabled():
return
model_dir = test.test_src_dir_path('contrib/tensorrt/test/testdata')
accuracy_tf_native = self._Run(
is_training=False,
use_trt=False,
batch_size=128,
num_epochs=None,
model_dir=model_dir)['accuracy']
logging.info('accuracy_tf_native: %f', accuracy_tf_native)
self.assertAllClose(accuracy_tf_native, 0.9662)
if trt_convert.get_linked_tensorrt_version()[0] < 5:
return
accuracy_tf_trt = self._Run(
is_training=False,
use_trt=True,
batch_size=128,
num_epochs=None,
model_dir=model_dir)['accuracy']
logging.info('accuracy_tf_trt: %f', accuracy_tf_trt)
self.assertAllClose(accuracy_tf_trt, 0.9677)
if __name__ == '__main__':
test.main()
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
##############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
import os.path
from bson.objectid import ObjectId
from girder import logger
from girder.api import access
from girder.api.describe import autoDescribeRoute, Description
from girder.api.rest import filtermodel, RestException, \
boundHandler, getCurrentUser
from girder.constants import AccessType, TokenScope
from girder.plugins.worker import utils as workerUtils
# from girder.utility.model_importer import ModelImporter
from ..constants import JobStatus
def addItemRoutes(item):
routes = createRoutes(item)
item.route('GET', (':id', 'video'), routes['getVideoMetadata'])
item.route('PUT', (':id', 'video'), routes['processVideo'])
item.route('DELETE', (':id', 'video'), routes['deleteProcessedVideo'])
item.route('GET', (':id', 'video', 'frame'), routes['getVideoFrame'])
def createRoutes(item):
@autoDescribeRoute(
Description('Return video metadata if it exists.')
.param('id', 'Id of the item.', paramType='path')
.errorResponse()
.errorResponse('Read access was denied on the item.', 403)
)
@access.public
@boundHandler(item)
def getVideoMetadata(self, id, params):
return {
'a': 1,
'b': 2
}
@autoDescribeRoute(
Description('Create a girder-worker job to process the given video.')
.param('id', 'Id of the item.', paramType='path')
.param('fileId', 'Id of the file to use as the video.', required=False)
.param('force', 'Force the creation of a new job.', required=False,
dataType='boolean', default=False)
.errorResponse()
.errorResponse('Read access was denied on the item.', 403)
)
@access.public
@boundHandler(item)
def processVideo(self, id, params):
force = params['force']
user, userToken = getCurrentUser(True)
itemModel = self.model('item')
fileModel = self.model('file')
tokenModel = self.model('token')
jobModel = self.model('job', 'jobs')
item = itemModel.load(id, user=user, level=AccessType.READ)
itemVideoData = item.get('video', {})
jobId = itemVideoData.get('jobId')
itemAlreadyProcessed = False
job = None
if jobId is not None:
job = jobModel.load(jobId, level=AccessType.READ, user=user)
if not force:
if job is not None:
status = job['status']
if status not in (
None, JobStatus.ERROR, JobStatus.CANCELED):
itemAlreadyProcessed = True
if itemAlreadyProcessed:
result = {
'video': {
'jobCreated': False,
'message': 'Processing job already created.'
}
}
result.update(job)
return result
# if user provided fileId, use that one
fileId = params.get('fileId')
if fileId is not None:
# ensure the provided fileId is valid
inputFile = fileModel.findOne({
'itemId': ObjectId(id), '_id': ObjectId(fileId)})
if inputFile is None:
raise RestException(
'Item with id=%s has no such file with id=%s' %
(id, fileId))
else:
# User did not provide a fileId.
#
# If we're *re*running a processing job (force=True), look
# for the fileId used by the old job.
if force and job:
fileId = job.get('meta', {}).get('video', {}).get('fileId')
if fileId:
# ensure the provided fileId is valid, but in this case,
# don't raise an exception if it is not -- just discard the
# fileId and move on
inputFile = fileModel.findOne({
'itemId': ObjectId(id), '_id': ObjectId(fileId)})
if inputFile is None:
fileId = None
# if we *still* don't have a fileId, just grab the first one found under
# the given item.
if fileId is None:
inputFile = fileModel.findOne({'itemId': ObjectId(id)})
# if there *are* no files, bail
if inputFile is None:
raise RestException('item %s has no files' % itemId)
fileId = inputFile['_id']
# if we are *re*running a processing job (force=True), remove all files
# from this item that were created by the last processing job...
#
# ...unless (for some reason) the user is running the job against that
# particular file (this is almost certainly user error, but for now,
# we'll just keep the file around).
if force:
fileIdList = itemVideoData.get('createdFiles', [])
for f in fileIdList:
if f == fileId:
continue
theFile = fileModel.load(
f, level=AccessType.WRITE, user=user)
if theFile:
fileModel.remove(theFile)
itemVideoData['createdFiles'] = []
# begin construction of the actual job
if not userToken:
# It seems like we should be able to use a token without USER_AUTH
# in its scope, but I'm not sure how.
userToken = tokenModel.createToken(
user, days=1, scope=TokenScope.USER_AUTH)
jobTitle = 'Video Processing'
job = jobModel.createJob(
title=jobTitle,
type='video',
user=user,
handler='worker_handler'
)
jobToken = jobModel.createJobToken(job)
job['kwargs'] = job.get('kwargs', {})
job['kwargs']['task'] = {
'mode': 'docker',
# TODO(opadron): replace this once we have a maintained
# image on dockerhub
'docker_image': 'ffmpeg_local',
'progress_pipe': True,
'a': 'b',
'pull_image': False,
'inputs': [
{
'id': 'input',
'type': 'string',
'format': 'text',
'target': 'filepath'
}
],
'outputs': [
{
'id': '_stdout',
'type': 'string',
'format': 'text',
'target': 'memory'
},
{
'id': '_stderr',
'type': 'string',
'format': 'text',
'target': 'memory'
},
{
'id': 'source',
'type:': 'string',
'format': 'text',
'target': 'filepath',
'path': '/mnt/girder_worker/data/source.webm'
},
{
'id': 'meta',
'type:': 'string',
'format': 'text',
'target': 'filepath',
'path': '/mnt/girder_worker/data/meta.json'
},
]
}
_, itemExt = os.path.splitext(item['name'])
job['kwargs']['inputs'] = {
'input': workerUtils.girderInputSpec(
inputFile,
resourceType='file',
token=userToken,
name='input' + itemExt,
dataType='string',
dataFormat='text'
)
}
job['kwargs']['outputs'] = {
'_stdout': workerUtils.girderOutputSpec(
item,
parentType='item',
token=userToken,
name='processing_stdout.txt',
dataType='string',
dataFormat='text',
reference='videoPlugin'
),
'_stderr': workerUtils.girderOutputSpec(
item,
parentType='item',
token=userToken,
name='processing_stderr.txt',
dataType='string',
dataFormat='text',
reference='videoPlugin'
),
'source': workerUtils.girderOutputSpec(
item,
parentType='item',
token=userToken,
name='source.webm',
dataType='string',
dataFormat='text',
reference='videoPlugin'
),
'meta': workerUtils.girderOutputSpec(
item,
parentType='item',
token=userToken,
name='meta.json',
dataType='string',
dataFormat='text',
reference='videoPlugin'
),
}
job['kwargs']['jobInfo'] = workerUtils.jobInfoSpec(
job=job,
token=jobToken,
logPrint=True)
job['meta'] = job.get('meta', {})
job['meta']['video_plugin'] = {
'itemId': id,
'fileId': fileId
}
job = jobModel.save(job)
jobModel.scheduleJob(job)
itemVideoData['jobId'] = str(job['_id'])
item['video'] = itemVideoData
itemModel.save(item)
result = {
'video': {
'jobCreated': True,
'message': 'Processing job created.'
}
}
result.update(job)
return result
@autoDescribeRoute(
Description('Delete the processed results from the given video.')
.param('id', 'Id of the item.', paramType='path')
.errorResponse()
.errorResponse('Write access was denied on the item.', 403)
)
@access.public
@boundHandler(item)
def deleteProcessedVideo(params):
pass
@autoDescribeRoute(
Description('Get a single frame from the given video.')
.param('id', 'Id of the item.', paramType='path')
.param('time', 'Point in time from which to sample the frame.',
required=True)
.errorResponse()
.errorResponse('Read access was denied on the item.', 403)
)
@access.public
@boundHandler(item)
def getVideoFrame(params):
pass
return {
'getVideoMetadata': getVideoMetadata,
'processVideo': processVideo,
'deleteProcessedVideo': deleteProcessedVideo,
'getVideoFrame': getVideoFrame
}
|
|
# Copyright (C) 2013 eNovance SAS <[email protected]>
#
# Author: Sylvain Afchain <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import logging
import webob.exc
from neutron.api.extensions import ExtensionMiddleware
from neutron.api.extensions import PluginAwareExtensionManager
from neutron.common import config
from neutron import context
import neutron.extensions
from neutron.extensions import metering
from neutron.plugins.common import constants
from neutron.services.metering import metering_plugin
from neutron.tests.unit import test_db_plugin
LOG = logging.getLogger(__name__)
DB_METERING_PLUGIN_KLASS = (
"neutron.services.metering."
"metering_plugin.MeteringPlugin"
)
extensions_path = ':'.join(neutron.extensions.__path__)
class MeteringPluginDbTestCaseMixin(object):
def _create_metering_label(self, fmt, name, description, **kwargs):
data = {'metering_label': {'name': name,
'tenant_id': kwargs.get('tenant_id',
'test_tenant'),
'description': description}}
req = self.new_create_request('metering-labels', data,
fmt)
if kwargs.get('set_context') and 'tenant_id' in kwargs:
# create a specific auth context for this request
req.environ['neutron.context'] = (
context.Context('', kwargs['tenant_id'],
is_admin=kwargs.get('is_admin', True)))
return req.get_response(self.ext_api)
def _make_metering_label(self, fmt, name, description, **kwargs):
res = self._create_metering_label(fmt, name, description, **kwargs)
if res.status_int >= 400:
raise webob.exc.HTTPClientError(code=res.status_int)
return self.deserialize(fmt, res)
def _create_metering_label_rule(self, fmt, metering_label_id, direction,
remote_ip_prefix, excluded, **kwargs):
data = {'metering_label_rule':
{'metering_label_id': metering_label_id,
'tenant_id': kwargs.get('tenant_id', 'test_tenant'),
'direction': direction,
'excluded': excluded,
'remote_ip_prefix': remote_ip_prefix}}
req = self.new_create_request('metering-label-rules',
data, fmt)
if kwargs.get('set_context') and 'tenant_id' in kwargs:
# create a specific auth context for this request
req.environ['neutron.context'] = (
context.Context('', kwargs['tenant_id']))
return req.get_response(self.ext_api)
def _make_metering_label_rule(self, fmt, metering_label_id, direction,
remote_ip_prefix, excluded, **kwargs):
res = self._create_metering_label_rule(fmt, metering_label_id,
direction, remote_ip_prefix,
excluded, **kwargs)
if res.status_int >= 400:
raise webob.exc.HTTPClientError(code=res.status_int)
return self.deserialize(fmt, res)
@contextlib.contextmanager
def metering_label(self, name='label', description='desc',
fmt=None, no_delete=False, **kwargs):
if not fmt:
fmt = self.fmt
metering_label = self._make_metering_label(fmt, name,
description, **kwargs)
try:
yield metering_label
finally:
if not no_delete:
self._delete('metering-labels',
metering_label['metering_label']['id'])
@contextlib.contextmanager
def metering_label_rule(self, metering_label_id=None, direction='ingress',
remote_ip_prefix='10.0.0.0/24',
excluded='false', fmt=None, no_delete=False):
if not fmt:
fmt = self.fmt
metering_label_rule = self._make_metering_label_rule(fmt,
metering_label_id,
direction,
remote_ip_prefix,
excluded)
try:
yield metering_label_rule
finally:
if not no_delete:
self._delete('metering-label-rules',
metering_label_rule['metering_label_rule']['id'])
class MeteringPluginDbTestCase(test_db_plugin.NeutronDbPluginV2TestCase,
MeteringPluginDbTestCaseMixin):
fmt = 'json'
resource_prefix_map = dict(
(k.replace('_', '-'), constants.COMMON_PREFIXES[constants.METERING])
for k in metering.RESOURCE_ATTRIBUTE_MAP.keys()
)
def setUp(self, plugin=None):
service_plugins = {'metering_plugin_name': DB_METERING_PLUGIN_KLASS}
super(MeteringPluginDbTestCase, self).setUp(
plugin=plugin,
service_plugins=service_plugins
)
self.plugin = metering_plugin.MeteringPlugin()
ext_mgr = PluginAwareExtensionManager(
extensions_path,
{constants.METERING: self.plugin}
)
app = config.load_paste_app('extensions_test_app')
self.ext_api = ExtensionMiddleware(app, ext_mgr=ext_mgr)
def test_create_metering_label(self):
name = 'my label'
description = 'my metering label'
keys = [('name', name,), ('description', description)]
with self.metering_label(name, description) as metering_label:
for k, v, in keys:
self.assertEqual(metering_label['metering_label'][k], v)
def test_delete_metering_label(self):
name = 'my label'
description = 'my metering label'
with self.metering_label(name, description,
no_delete=True) as metering_label:
metering_label_id = metering_label['metering_label']['id']
self._delete('metering-labels', metering_label_id, 204)
def test_list_metering_label(self):
name = 'my label'
description = 'my metering label'
with contextlib.nested(
self.metering_label(name, description),
self.metering_label(name, description)) as metering_label:
self._test_list_resources('metering-label', metering_label)
def test_create_metering_label_rule(self):
name = 'my label'
description = 'my metering label'
with self.metering_label(name, description) as metering_label:
metering_label_id = metering_label['metering_label']['id']
direction = 'egress'
remote_ip_prefix = '192.168.0.0/24'
excluded = True
keys = [('metering_label_id', metering_label_id),
('direction', direction),
('excluded', excluded),
('remote_ip_prefix', remote_ip_prefix)]
with self.metering_label_rule(metering_label_id,
direction,
remote_ip_prefix,
excluded) as label_rule:
for k, v, in keys:
self.assertEqual(label_rule['metering_label_rule'][k], v)
def test_delete_metering_label_rule(self):
name = 'my label'
description = 'my metering label'
with self.metering_label(name, description) as metering_label:
metering_label_id = metering_label['metering_label']['id']
direction = 'egress'
remote_ip_prefix = '192.168.0.0/24'
excluded = True
with self.metering_label_rule(metering_label_id,
direction,
remote_ip_prefix,
excluded,
no_delete=True) as label_rule:
rule_id = label_rule['metering_label_rule']['id']
self._delete('metering-label-rules', rule_id, 204)
def test_list_metering_label_rule(self):
name = 'my label'
description = 'my metering label'
with self.metering_label(name, description) as metering_label:
metering_label_id = metering_label['metering_label']['id']
direction = 'egress'
remote_ip_prefix = '192.168.0.0/24'
excluded = True
with contextlib.nested(
self.metering_label_rule(metering_label_id,
direction,
remote_ip_prefix,
excluded),
self.metering_label_rule(metering_label_id,
'ingress',
remote_ip_prefix,
excluded)) as metering_label_rule:
self._test_list_resources('metering-label-rule',
metering_label_rule)
def test_create_metering_label_rules(self):
name = 'my label'
description = 'my metering label'
with self.metering_label(name, description) as metering_label:
metering_label_id = metering_label['metering_label']['id']
direction = 'egress'
remote_ip_prefix = '192.168.0.0/24'
excluded = True
with contextlib.nested(
self.metering_label_rule(metering_label_id,
direction,
remote_ip_prefix,
excluded),
self.metering_label_rule(metering_label_id,
direction,
'0.0.0.0/0',
False)) as metering_label_rule:
self._test_list_resources('metering-label-rule',
metering_label_rule)
def test_create_metering_label_rule_two_labels(self):
name1 = 'my label 1'
name2 = 'my label 2'
description = 'my metering label'
with self.metering_label(name1, description) as metering_label1:
metering_label_id1 = metering_label1['metering_label']['id']
with self.metering_label(name2, description) as metering_label2:
metering_label_id2 = metering_label2['metering_label']['id']
direction = 'egress'
remote_ip_prefix = '192.168.0.0/24'
excluded = True
with contextlib.nested(
self.metering_label_rule(metering_label_id1,
direction,
remote_ip_prefix,
excluded),
self.metering_label_rule(metering_label_id2,
direction,
remote_ip_prefix,
excluded)) as metering_label_rule:
self._test_list_resources('metering-label-rule',
metering_label_rule)
class TestMeteringDbXML(MeteringPluginDbTestCase):
fmt = 'xml'
|
|
"""KDE of Temps."""
import calendar
from datetime import date, datetime
import pandas as pd
from pyiem.plot.util import fitbox
from pyiem.plot import figure
from pyiem.util import get_autoplot_context, get_sqlalchemy_conn
from pyiem.exceptions import NoDataFound
from matplotlib.ticker import MaxNLocator
from scipy.stats import gaussian_kde
import numpy as np
from sqlalchemy import text
PDICT = {
"high": "High Temperature [F]",
"low": "Low Temperature [F]",
"avgt": "Average Temperature [F]",
}
MDICT = dict(
[
("all", "No Month/Time Limit"),
("spring", "Spring (MAM)"),
("fall", "Fall (SON)"),
("winter", "Winter (DJF)"),
("summer", "Summer (JJA)"),
("jan", "January"),
("feb", "February"),
("mar", "March"),
("apr", "April"),
("may", "May"),
("jun", "June"),
("jul", "July"),
("aug", "August"),
("sep", "September"),
("oct", "October"),
("nov", "November"),
("dec", "December"),
]
)
def get_description():
"""Return a dict describing how to call this plotter"""
desc = {}
desc["cache"] = 3600
desc["data"] = True
desc[
"description"
] = """This autoplot generates some metrics on the distribution of temps
over a given period of years. The plotted distribution in the upper panel
is using a guassian kernel density estimate.
"""
desc["arguments"] = [
dict(
type="station",
name="station",
default="IATDSM",
label="Select station:",
network="IACLIMATE",
),
dict(
type="select",
options=PDICT,
name="v",
default="high",
label="Daily Variable to Plot:",
),
dict(
type="select",
name="month",
default="all",
label="Month Limiter",
options=MDICT,
),
dict(
type="year",
min=1880,
name="sy1",
default=1981,
label="Inclusive Start Year for First Period of Years:",
),
dict(
type="year",
min=1880,
name="ey1",
default=2010,
label="Inclusive End Year for First Period of Years:",
),
dict(
type="year",
min=1880,
name="sy2",
default=1991,
label="Inclusive Start Year for Second Period of Years:",
),
dict(
type="year",
min=1880,
name="ey2",
default=2020,
label="Inclusive End Year for Second Period of Years:",
),
]
return desc
def get_df(ctx, period):
"""Get our data."""
table = "alldata_%s" % (ctx["station"][:2])
month = ctx["month"]
ctx["mlabel"] = f"{month.capitalize()} Season"
if month == "all":
months = range(1, 13)
ctx["mlabel"] = "All Year"
elif month == "fall":
months = [9, 10, 11]
elif month == "winter":
months = [12, 1, 2]
elif month == "spring":
months = [3, 4, 5]
elif month == "summer":
months = [6, 7, 8]
else:
ts = datetime.strptime("2000-" + month + "-01", "%Y-%b-%d")
months = [ts.month]
ctx["mlabel"] = calendar.month_name[ts.month]
with get_sqlalchemy_conn("coop") as conn:
df = pd.read_sql(
text(
f"SELECT high, low, (high+low)/2. as avgt from {table} WHERE "
"day >= :d1 and day <= :d2 and station = :station "
"and high is not null "
"and low is not null and month in :months"
),
conn,
params={
"d1": date(ctx[f"sy{period}"], 1, 1),
"d2": date(ctx[f"ey{period}"], 12, 31),
"station": ctx["station"],
"months": tuple(months),
},
)
return df
def f2s(value):
"""HAAAAAAAAAAAAACK."""
return ("%.5f" % value).rstrip("0").rstrip(".")
def plotter(fdict):
"""Go"""
ctx = get_autoplot_context(fdict, get_description())
df1 = get_df(ctx, "1")
df2 = get_df(ctx, "2")
if df1.empty or df2.empty:
raise NoDataFound("Failed to find data for query!")
kern1 = gaussian_kde(df1[ctx["v"]])
kern2 = gaussian_kde(df2[ctx["v"]])
xpos = np.arange(
min([df1[ctx["v"]].min(), df2[ctx["v"]].min()]),
max([df1[ctx["v"]].max(), df2[ctx["v"]].max()]) + 1,
dtype="i",
)
period1 = "%s-%s" % (ctx["sy1"], ctx["ey1"])
period2 = "%s-%s" % (ctx["sy2"], ctx["ey2"])
label1 = "%s-%s %s" % (ctx["sy1"], ctx["ey1"], ctx["v"])
label2 = "%s-%s %s" % (ctx["sy2"], ctx["ey2"], ctx["v"])
df = pd.DataFrame({label1: kern1(xpos), label2: kern2(xpos)}, index=xpos)
fig = figure(apctx=ctx)
title = "[%s] %s %s Distribution\n%s vs %s over %s" % (
ctx["station"],
ctx["_nt"].sts[ctx["station"]]["name"],
PDICT[ctx["v"]],
period2,
period1,
ctx["mlabel"],
)
fitbox(fig, title, 0.12, 0.9, 0.91, 0.99)
ax = fig.add_axes([0.12, 0.38, 0.75, 0.52])
C1 = "blue"
C2 = "red"
alpha = 0.4
ax.plot(
df.index.values,
df[label1],
lw=2,
c=C1,
label=rf"{label1} - $\mu$={df1[ctx['v']].mean():.2f}",
zorder=4,
)
ax.fill_between(xpos, 0, df[label1], color=C1, alpha=alpha, zorder=3)
ax.axvline(df1[ctx["v"]].mean(), color=C1)
ax.plot(
df.index.values,
df[label2],
lw=2,
c=C2,
label=rf"{label2} - $\mu$={df2[ctx['v']].mean():.2f}",
zorder=4,
)
ax.fill_between(xpos, 0, df[label2], color=C2, alpha=alpha, zorder=3)
ax.axvline(df2[ctx["v"]].mean(), color=C2)
ax.set_ylabel("Guassian Kernel Density Estimate")
ax.legend(loc="best")
ax.grid(True)
ax.xaxis.set_major_locator(MaxNLocator(20))
# Sub ax
ax2 = fig.add_axes([0.12, 0.1, 0.75, 0.22])
delta = df[label2] - df[label1]
ax2.plot(df.index.values, delta)
dam = delta.abs().max() * 1.1
ax2.set_ylim(0 - dam, dam)
ax2.set_xlabel(PDICT[ctx["v"]])
ax2.set_ylabel("%s minus\n%s" % (period2, period1))
ax2.grid(True)
ax2.fill_between(xpos, 0, delta, where=delta > 0, color=C2, alpha=alpha)
ax2.fill_between(xpos, 0, delta, where=delta < 0, color=C1, alpha=alpha)
ax2.axhline(0, ls="--", lw=2, color="k")
ax2.xaxis.set_major_locator(MaxNLocator(20))
# Percentiles
levels = [0.001, 0.005, 0.01, 0.05, 0.1, 0.25, 0.5, 0.75]
levels.extend([0.9, 0.95, 0.99, 0.995, 0.999])
p1 = df1[ctx["v"]].describe(percentiles=levels)
p2 = df2[ctx["v"]].describe(percentiles=levels)
y = 0.8
fig.text(0.88, y, "Percentile", rotation=70)
fig.text(0.91, y, period1, rotation=70)
fig.text(0.945, y, period2, rotation=70)
for ptile in levels:
y -= 0.03
val = f2s(ptile * 100.0)
fig.text(0.88, y, val)
fig.text(0.91, y, "%.1f" % (p1[f"{val}%"],))
fig.text(0.95, y, "%.1f" % (p2[f"{val}%"],))
return fig, df
if __name__ == "__main__":
plotter(dict())
|
|
"""Applications models tests"""
from functools import reduce
from operator import or_, itemgetter
from unittest.mock import PropertyMock
from django.core.exceptions import ValidationError
from django.core.files.uploadedfile import SimpleUploadedFile
from django.db import models
import pytest
from django_fsm import TransitionNotAllowed
from applications.constants import (
VALID_SUBMISSION_TYPE_CHOICES,
REVIEW_STATUS_REJECTED,
LETTER_TYPE_REJECTED,
LETTER_TYPE_APPROVED,
)
from applications.models import ApplicationStepSubmission, APP_SUBMISSION_MODELS
from applications.factories import (
BootcampRunApplicationStepFactory,
ApplicationStepSubmissionFactory,
BootcampApplicationFactory,
ApplicationStepFactory,
)
from applications.constants import AppStates
from ecommerce.test_utils import create_test_application, create_test_order
from klasses.constants import ENROLL_CHANGE_STATUS_REFUNDED
from klasses.factories import (
BootcampFactory,
BootcampRunFactory,
InstallmentFactory,
PersonalPriceFactory,
)
from klasses.models import Installment, PersonalPrice, BootcampRunEnrollment
# pylint: disable=redefined-outer-name,unused-argument
from main.features import NOVOED_INTEGRATION
pytestmark = pytest.mark.django_db
@pytest.fixture
def application():
"""An application for testing"""
yield create_test_application()
@pytest.fixture
def user(application):
"""A user with social auth"""
yield application.user
@pytest.fixture
def bootcamp_run(application):
"""
Creates a purchasable bootcamp run. Bootcamp run price is at least $200, in two installments
"""
yield application.bootcamp_run
@pytest.fixture()
def patched_novoed_tasks(mocker):
"""Patched novoed-related tasks"""
return mocker.patch("klasses.api.novoed_tasks")
PAYMENT = 123
def test_submission_types():
"""
The list of valid submission types should match the list of models that are defined as valid submission types,
and the choices for the submissions' content type should be limited to that list of models
"""
assert len(APP_SUBMISSION_MODELS) == len(VALID_SUBMISSION_TYPE_CHOICES)
# The choices for ApplicationStep.submission_type should match the models
# that we have defined as valid submission models
assert {model_cls._meta.model_name for model_cls in APP_SUBMISSION_MODELS} == set(
map(itemgetter(0), VALID_SUBMISSION_TYPE_CHOICES)
)
# Build an OR query with every valid submission model
expected_content_type_limit = reduce(
or_,
(
models.Q(app_label="applications", model=model_cls._meta.model_name)
for model_cls in APP_SUBMISSION_MODELS
), # pylint: disable=protected-access
)
assert (
ApplicationStepSubmission._meta.get_field("content_type").get_limit_choices_to()
== expected_content_type_limit
) # pylint: disable=protected-access
def test_bootcamp_application_with_no_steps_file_submission():
"""
A bootcamp application with no steps
"""
bootcamp_application = BootcampApplicationFactory(
state=AppStates.AWAITING_RESUME.value
)
resume_file = SimpleUploadedFile("resume.pdf", b"file_content")
bootcamp_application.add_resume(resume_file=resume_file)
assert bootcamp_application.state == AppStates.AWAITING_PAYMENT.value
@pytest.mark.parametrize(
"file_name,state,expected",
[
("resume.pdf", AppStates.AWAITING_RESUME.value, True),
("resume.pdf", AppStates.AWAITING_SUBMISSION_REVIEW.value, True),
("resume.pdf", AppStates.AWAITING_USER_SUBMISSIONS.value, True),
("resume.pdf", AppStates.AWAITING_PAYMENT.value, False),
("resume", AppStates.AWAITING_RESUME.value, False),
("resume.doc", AppStates.AWAITING_RESUME.value, True),
("resume.docx", AppStates.AWAITING_SUBMISSION_REVIEW.value, True),
("resume.png", AppStates.AWAITING_RESUME.value, False),
],
)
def test_bootcamp_application_resume_file_validation(file_name, state, expected):
"""
A BootcampApplication should raise an exception if profile is not complete or extension is not allowed
"""
bootcamp_application = BootcampApplicationFactory(state=state)
resume_file = SimpleUploadedFile(file_name, b"file_content")
application_step = ApplicationStepFactory(
bootcamp=bootcamp_application.bootcamp_run.bootcamp
)
BootcampRunApplicationStepFactory(
bootcamp_run=bootcamp_application.bootcamp_run,
application_step=application_step,
)
if expected:
new_state = (
AppStates.AWAITING_USER_SUBMISSIONS.value
if state == AppStates.AWAITING_RESUME.value
else state
)
bootcamp_application.add_resume(resume_file=resume_file)
assert bootcamp_application.state == new_state
else:
with pytest.raises((ValidationError, TransitionNotAllowed)):
bootcamp_application.add_resume(resume_file=resume_file)
assert bootcamp_application.state == state
def test_bootcamp_application_complete(settings, patched_novoed_tasks):
"""
BootcampApplication.complete should create an enrollment and call a task to enroll the user in the course on
NovoEd
"""
settings.FEATURES[NOVOED_INTEGRATION] = True
novoed_course_stub = "course-stub"
bootcamp_application = BootcampApplicationFactory.create(
state=AppStates.AWAITING_PAYMENT.value,
bootcamp_run__novoed_course_stub=novoed_course_stub,
)
assert not bootcamp_application.user.profile.can_skip_application_steps
bootcamp_application.complete()
assert BootcampRunEnrollment.objects.filter(
user=bootcamp_application.user,
bootcamp_run=bootcamp_application.bootcamp_run,
active=True,
).exists()
assert bootcamp_application.user.profile.can_skip_application_steps
patched_novoed_tasks.enroll_users_in_novoed_course.delay.assert_called_once_with(
user_ids=[bootcamp_application.user.id], novoed_course_stub=novoed_course_stub
)
@pytest.mark.django_db
def test_application_await_further_payment(mocker):
"""
BootcampApplication.await_further_payment should call an API function to deactivate an enrollment
"""
patched_deactivate = mocker.patch("applications.models.deactivate_run_enrollment")
bootcamp_application = BootcampApplicationFactory(state=AppStates.COMPLETE.value)
bootcamp_application.await_further_payment()
patched_deactivate.assert_called_once_with(
user=bootcamp_application.user,
bootcamp_run=bootcamp_application.bootcamp_run,
change_status=None,
)
@pytest.mark.django_db
def test_application_refund(mocker):
"""
BootcampApplication.refund should call an API function to deactivate an enrollment
"""
patched_deactivate = mocker.patch("applications.models.deactivate_run_enrollment")
bootcamp_application = BootcampApplicationFactory(state=AppStates.COMPLETE.value)
bootcamp_application.refund()
patched_deactivate.assert_called_once_with(
user=bootcamp_application.user,
bootcamp_run=bootcamp_application.bootcamp_run,
change_status=ENROLL_CHANGE_STATUS_REFUNDED,
)
@pytest.mark.django_db
def test_is_ready_for_payment():
"""
is_ready_for_payment should return true if all application steps are submitted
and reviewed
"""
bootcamp_run = BootcampRunFactory()
submission = ApplicationStepSubmissionFactory.create(
bootcamp_application__bootcamp_run=bootcamp_run,
run_application_step__bootcamp_run=bootcamp_run,
is_approved=True,
)
bootcamp_application = submission.bootcamp_application
assert bootcamp_application.is_ready_for_payment() is True
application_step = BootcampRunApplicationStepFactory.create(
bootcamp_run=bootcamp_run, application_step__bootcamp=bootcamp_run.bootcamp
)
submission_not_approved = ApplicationStepSubmissionFactory.create(
is_pending=True,
bootcamp_application=bootcamp_application,
run_application_step=application_step,
)
assert bootcamp_application.is_ready_for_payment() is False
submission_not_approved.review_status = REVIEW_STATUS_REJECTED
submission_not_approved.save()
assert bootcamp_application.is_ready_for_payment() is False
@pytest.mark.django_db
def test_bootcamp_run_application_step_validation():
"""
A BootcampRunApplicationStep object should raise an exception if it is saved when the bootcamp of the bootcamp run
and step are not the same.
"""
bootcamps = BootcampFactory.create_batch(2)
step = BootcampRunApplicationStepFactory.create(
application_step__bootcamp=bootcamps[0], bootcamp_run__bootcamp=bootcamps[0]
)
step.bootcamp_run.bootcamp = bootcamps[1]
with pytest.raises(ValidationError):
step.save()
step.bootcamp_run.bootcamp = bootcamps[0]
step.save()
def test_app_step_submission_validation():
"""
An ApplicationStepSubmission object should raise an exception if it is saved when the bootcamp run of the
application and the step are not the same.
"""
bootcamp_runs = BootcampRunFactory.create_batch(2)
submission = ApplicationStepSubmissionFactory.create(
bootcamp_application__bootcamp_run=bootcamp_runs[0],
run_application_step__bootcamp_run=bootcamp_runs[0],
)
submission.bootcamp_application.bootcamp_run = bootcamp_runs[1]
with pytest.raises(ValidationError):
submission.save()
submission.bootcamp_application.bootcamp_run = bootcamp_runs[0]
submission.save()
def test_get_total_paid(application):
"""
get_total_paid should look through all fulfilled orders for the payment for a particular user
"""
# Multiple payments should be added together
create_test_order(application, PAYMENT, fulfilled=True)
next_payment = 50
create_test_order(application, next_payment, fulfilled=True)
assert application.total_paid == PAYMENT + next_payment
def test_get_total_paid_unfulfilled(application):
"""Unfulfilled orders should be ignored"""
create_test_order(application, 45, fulfilled=False)
assert application.total_paid == 0
def test_get_total_paid_other_run(application):
"""Orders for other bootcamp runs should be ignored"""
other_application = create_test_application()
create_test_order(other_application, 50, fulfilled=True)
assert application.total_paid == 0
def test_get_total_paid_no_payments(application):
"""If there are no payments get_total_paid should return 0"""
assert application.total_paid == 0
@pytest.mark.parametrize(
"run_price,personal_price,expected_price",
[[10, None, 10], [10, 5, 5], [10, 25, 25]],
) # pylint: disable=too-many-arguments
def test_price(
application, bootcamp_run, user, run_price, personal_price, expected_price
):
"""
BootcampApplication.price should return the personal price for the run, or else the full price
"""
Installment.objects.all().delete()
for _ in range(2):
InstallmentFactory.create(amount=run_price / 2, bootcamp_run=bootcamp_run)
if personal_price is not None:
# this price should be ignored
PersonalPriceFactory.create(bootcamp_run=bootcamp_run)
# this price should be used
PersonalPrice.objects.create(
bootcamp_run=bootcamp_run, user=user, price=personal_price
)
# this price should be ignored
PersonalPriceFactory.create(bootcamp_run=bootcamp_run)
assert application.price == expected_price
@pytest.mark.parametrize(
"price,total_paid,expected_fully_paid",
[[10, 10, True], [10, 9, False], [10, 11, True]],
) # pylint: disable=too-many-arguments
def test_is_paid_in_full(mocker, application, price, total_paid, expected_fully_paid):
"""
is_paid_in_full should return true if the payments match or exceed the price of the run
"""
price_mock = mocker.patch(
"applications.models.BootcampApplication.price", new_callable=PropertyMock
)
price_mock.return_value = price
total_paid_mock = mocker.patch(
"applications.models.BootcampApplication.total_paid", new_callable=PropertyMock
)
total_paid_mock.return_value = total_paid
assert application.is_paid_in_full is expected_fully_paid
@pytest.mark.parametrize("ready_for_payment", [True, False])
def test_applicant_letter_approved(mocker, application, ready_for_payment):
"""If all submissions are approved, an applicant letter should be sent"""
application.state = AppStates.AWAITING_SUBMISSION_REVIEW.value
application.save()
create_patched = mocker.patch(
"applications.tasks.create_and_send_applicant_letter.delay"
)
ready_patched = mocker.patch(
"applications.models.BootcampApplication.is_ready_for_payment",
return_value=ready_for_payment,
)
application.approve_submission()
ready_patched.assert_called_once_with()
if ready_for_payment:
create_patched.assert_called_once_with(
application_id=application.id, letter_type=LETTER_TYPE_APPROVED
)
else:
assert create_patched.called is False
def test_applicant_letter_rejected(mocker, application):
"""If any submission is rejected, an applicant letter should be sent"""
application.state = AppStates.AWAITING_SUBMISSION_REVIEW.value
application.save()
create_patched = mocker.patch(
"applications.tasks.create_and_send_applicant_letter.delay"
)
application.reject_submission()
create_patched.assert_called_once_with(
application_id=application.id, letter_type=LETTER_TYPE_REJECTED
)
|
|
"""
A simple Python Geojson file reader and writer.
Author: Karim Bahgat, 2014
Contact: [email protected]
License: MIT License
"""
try:
import simplejson as json
except:
import json
class Geometry:
"""
A geometry instance.
Can be created from args, or without any to create an empty one from scratch.
| __option__ | __description__
| --- | ---
| obj | another geometry instance, an object with the __geo_interface__ or a geojson dictionary of the Geometry type
| type/coordinates/bbox | if obj isn't specified, type, coordinates, and optionally bbox can be set as arguments
Has several geometrical attribute values.
| __attribute__ | __description__
| --- | ---
| type | as specified when constructed
| coordinates | as specified when constructed
| bbox | if bbox wasn't specified when constructed then it is calculated on-the-fly
"""
def __init__(self, obj=None, type=None, coordinates=None, bbox=None):
if isinstance(obj, Geometry):
self._data = obj._data.copy()
elif hasattr(obj, "__geo_interface__"):
self._data = obj.__geo_interface__
elif isinstance(obj, dict):
self._data = obj
elif type and coordinates:
_data = {"type":type,"coordinates":coordinates}
if bbox: _data.update({"bbox":bbox})
self._data = _data
else:
# empty geometry dictionary
self._data = {}
def __setattr__(self, name, value):
"""Set a class attribute like obj.attr = value"""
try: self._data[name] = value # all attribute setting will directly be redirected to adding or changing the geojson dictionary entries
except AttributeError: self.__dict__[name] = value # except for first time when the _data attribute has to be set
@property
def __geo_interface__(self):
return self._data
# Attributes
@property
def type(self):
return self._data["type"]
@property
def bbox(self):
if self._data.get("bbox"): return self._data["bbox"]
else:
if self.type == "Point":
x,y = self._data["coordinates"]
return [x,y,x,y]
elif self.type in ("MultiPoint","LineString"):
coordsgen = (point for point in self._data["coordinates"])
elif self.type == "MultiLineString":
coordsgen = (point for line in self._data["coordinates"] for point in line)
elif self.type == "Polygon":
coordsgen = (point for point in self._data["coordinates"][0]) # only the first exterior polygon should matter for bbox, not any of the holes
elif self.type == "MultiPolygon":
coordsgen = (point for polygon in self._data["coordinates"] for point in polygon[0]) # only the first exterior polygon should matter for bbox, not any of the holes
firstpoint = next(coordsgen)
_xmin = _xmax = firstpoint[0]
_ymin = _ymax = firstpoint[1]
for _x,_y in coordsgen:
if _x < _xmin: _xmin = _x
elif _x > _xmax: _xmax = _x
if _y < _ymin: _ymin = _y
elif _y > _ymax: _ymax = _y
return _xmin,_ymin,_xmax,_ymax
@property
def coordinates(self):
return self._data["coordinates"]
# Methods
def validate(self):
"""
Validates that the geometry is correctly formatted, and raises an error if not
"""
# first validate geometry type
if not self.type in ("Point","MultiPoint","LineString","MultiLineString","Polygon","MultiPolygon"):
raise Exception('Invalid geometry type. Must be one of: "Point","MultiPoint","LineString","MultiLineString","Polygon","MultiPolygon"')
# then validate coordinate data type
coords = self._data["coordinates"]
if not isinstance(coords, (list,tuple)): raise Exception("Coordinates must be a list or tuple type")
# then validate coordinate structures
if self.type == "Point":
if not len(coords) == 1: raise Exception("Point must be one coordinate")
elif self.type in ("MultiPoint","LineString"):
if not len(coords) > 1: raise Exception("MultiPoint and LineString must have more than one coordinates")
elif self.type == "MultiLineString":
if not len(coords) > 1: raise Exception("MultiLineString must have more than one LineString member")
for line in coords:
if not len(line) > 1: raise Exception("All LineStrings in a MultiLineString must have more than one coordinate")
elif self.type == "Polygon":
for exterior_or_holes in coords:
if not len(exterior_or_holes) >= 3: raise Exception("The exterior and all holes in a Polygon must have at least 3 coordinates")
elif self.type == "MultiPolygon":
if not len(coords) > 1: raise Exception("MultiPolygon must have more than one Polygon member")
for eachmulti in coords:
for exterior_or_holes in eachmulti:
if not len(exterior_or_holes) >= 3: raise Exception("The exterior and all holes in all Polygons of a MultiPolygon must have at least 3 coordinates")
# validation successful
return True
class Feature:
"""
A feature instance.
| __option__ | __description__
| --- | ---
| obj | another feature instance, an object with the __geo_interface__ or a geojson dictionary of the Feature type
| geometry/properties | if obj isn't specified, geometry and properties can be set as arguments directly, with geometry being anything that the Geometry instance can accept, and properties being an optional dictionary.
Attributes include:
| __attribute__ | __description__
| --- | ---
| geometry | a Geometry instance
| properties | a properties dictionary
"""
def __init__(self, obj=None, geometry=None, properties={}):
if isinstance(obj, Feature):
self.geometry = Geometry(obj.geometry)
self.properties = obj.properties.copy()
elif isinstance(obj, dict):
self.geometry = Geometry(obj["geometry"])
self.properties = obj["properties"]
elif geometry:
self.geometry = Geometry(geometry)
self.properties = properties
@property
def __geo_interface__(self):
geojdict = {"type":"Feature",
"geometry":self.geometry.__geo_interface__,
"properties":self.properties }
return geojdict
def validate(self):
"""
Validates that the feature is correctly formatted, and raises an error if not
"""
if not isinstance(self.properties, dict): raise Exception("The 'properties' value of a geojson feature must be a dictionary type")
self.geometry.validate()
class GeojsonFile:
"""
An instance of a geojson file. Can load from data or from a file,
which can then be read or edited.
Call without any arguments to create an empty geojson file
so you can construct it from scratch.
Note: In order for a geojson dict to be considered a file,
it cannot just be single geometries, so this class always
saves them as the toplevel FeatureCollection type,
and requires the files it loads to be the same.
| __option__ | __description__
| --- | ---
| filepath | the path of a geojson file to load (optional).
| data | a complete geojson dictionary to load (optional).
Has the following attributes:
| __attribute__ | __description__
| --- | ---
| crs | The geojson formatted dictionary of the file's coordinate reference system
| bbox | The bounding box surrounding all geometries in the file. You may need to call .update_bbox() to make sure this one is up-to-date.
| common_attributes | Collects and returns a list of attributes/properties/fields common to all features
"""
def __init__(self, filepath=None, data=None, **kwargs):
if filepath:
data = self._loadfilepath(filepath, **kwargs)
if self._validate(data):
self._data = self._prepdata(data)
elif data:
if self._validate(data):
self._data = self._prepdata(data)
else:
self._data = dict([("type","FeatureCollection"),
("features",[]),
("crs",{"type":"name",
"properties":{"name":"urn:ogc:def:crs:OGC:2:84"}}) ])
def __len__(self):
return len(self._data["features"])
def __setattr__(self, name, value):
"""Set a class attribute like obj.attr = value"""
try: self._data[name] = value # all attribute setting will directly be redirected to adding or changing the geojson dictionary entries
except AttributeError: self.__dict__[name] = value # except for first time when the _data attribute has to be set
def __getitem__(self, index):
"""Get a feature based on its index, like geojfile[7]"""
return Feature(self._data["features"][index])
def __setitem__(self, index, feature):
"""Replace a feature based on its index with a new one (must have __geo_interface__ property),
like geojfile[7] = newfeature
"""
self._data["features"][index] = feature.__geo_interface__
def __iter__(self):
"""Iterates through and returns a record list and a
shape instance for each feature in the file.
"""
for featuredict in self._data["features"]:
yield Feature(featuredict)
@property
def __geo_interface__(self):
return self._data
# Attributes
@property
def crs(self):
type = self._data["crs"]["type"]
if type not in ("name","link"): raise Exception("invalid crs type: must be either name or link")
return self._data["crs"]
@property
def bbox(self):
if self._data.get("bbox"):
return self._data["bbox"]
else: return None # file must be new and therefore has no feature geometries that can be used to calculate bbox
@property
def common_attributes(self):
"""
Collect and return a list of attributes/properties/fields common to all features
"""
features = self._data["features"]
if not features: return []
elif len(features) == 1: return features[0]["properties"].keys()
else:
fields = set(features[0]["properties"].keys())
for feature in features[1:]:
fields.intersection_update(feature["properties"].keys())
return list(fields)
# Methods
def getfeature(self, index):
return Feature(self._data["features"][index])
def addfeature(self, feature):
feature = Feature(feature)
self._data["features"].append(feature.__geo_interface__)
def insertfeature(self, index, feature):
feature = Feature(feature)
self._data["features"].insert(index, feature.__geo_interface__)
def replacefeature(self, replaceindex, newfeature):
newfeature = Feature(newfeature)
self._data["features"][replaceindex] = newfeature.__geo_interface__
def removefeature(self, index):
self._data["features"].pop(index)
def define_crs(self, type, name=None, link=None, link_type=None):
"""
Defines the coordinate reference system for the geojson file.
Note: for link crs, only online urls are supported
(no auxilliary crs files)
| __option__ | __description__
| --- | ---
| type | the type of crs, either "name" or "link"
| name | the crs name as an OGC formatted crs string (eg "urn:ogc:def:crs:..."), required if type is "name"
| link | the crs online url link address, required if type is "link"
| link_type | the type of crs link, optional if type is "link"
"""
if not type in ("name","link"): raise Exception("type must be either 'name' or 'link'")
self.crs = {"type":type, "properties":{} }
if type == "name":
if not name: raise Exception("name argument must be given")
self.crs["properties"]["name"] = name
elif type == "link":
if not link: raise Exception("link argument must be given")
self.crs["properties"]["href"] = link
if link_type:
self.crs["properties"]["type"] = link_type
def update_bbox(self):
"""
Recalculates the bbox region for the entire shapefile.
Useful after adding and/or removing features.
Note: No need to use this method just for saving, because saving
automatically updates the bbox.
"""
firstfeature = Feature(self._data["features"][0])
xmin,xmax,ymin,ymax = firstfeature.geometry.bbox
for _featuredict in self._data["features"][1:]:
_xmin,_xmax,_ymin,_ymax = Feature(_featuredict).geometry.bbox
if _xmin < xmin: xmin = _xmin
elif _xmax > xmax: xmax = _xmax
if _ymin < ymin: ymin = _ymin
elif _ymax > ymax: ymax = _ymax
self._data["bbox"] = [xmin,ymin,xmax,ymax]
def add_unique_id(self):
"""
Adds a unique id property to each feature.
Note: Results in error if any of the features already
have an "id" field
"""
uid = 0
for feature in self._data["features"]:
if feature["properties"].get("id"):
raise Exception("one of the features already had an id field")
feature["properties"]["id"] = uid
uid += 1
def add_all_bboxes(self):
"""
Calculates and adds a bbox attribute to all feature geometries
"""
for feature in self._data["features"]:
if not feature["geometry"].get("bbox"):
feature["geometry"]["bbox"] = Feature(feature).geometry.bbox
def save(self, savepath, **kwargs):
"""
Saves the geojson instance to file.
Note: to save with a different text encoding use the 'encoding' argument.
| __option__ | __description__
| --- | ---
| savepath | filepath to save the file
"""
self.update_bbox()
tempfile = open(savepath,"w")
json.dump(self._data, tempfile, **kwargs)
tempfile.close()
# Internal Methods
def _validate(self, data):
"""Checks that the geojson data is a feature collection, and that it
contains a proper "features" attribute, and returns True if so."""
if not data["type"] == "FeatureCollection":
raise ValueError("The geojson data needs to be a feature collection")
if data.get("features"):
if not isinstance(data["features"], list):
raise ValueError("The features property needs to be a list")
return True
else: raise ValueError("The FeatureCollection needs to contain a 'features' property")
def _loadfilepath(self, filepath, **kwargs):
"""This loads a geojson file into a geojson python
dictionary using the json module.
Note: to load with a different text encoding use the encoding argument.
"""
data = json.load(open(filepath,"rU"), **kwargs)
return data
def _prepdata(self, data):
"""Adds potentially missing items to the geojson dictionary"""
# if missing, compute and add bbox
if not data.get("bbox"):
firstfeature = Feature(data["features"][0])
xmin,xmax,ymin,ymax = firstfeature.geometry.bbox
for _featuredict in data["features"][1:]:
_xmin,_xmax,_ymin,_ymax = Feature(_featuredict).geometry.bbox
if _xmin < xmin: xmin = _xmin
elif _xmax > xmax: xmax = _xmax
if _ymin < ymin: ymin = _ymin
elif _ymax > ymax: ymax = _ymax
data["bbox"] = [xmin,ymin,xmax,ymax]
# if missing, set crs to default crs (WGS84), see http://geojson.org/geojson-spec.html
if not data.get("crs"):
data["crs"] = {"type":"name",
"properties":{"name":"urn:ogc:def:crs:OGC:2:84"}}
return data
# User functions
def load(filepath=None, data=None, **kwargs):
"""
Loads a geojson file or dictionary, validates it, and returns a
GeojsonFile instance.
Note: In order for a geojson dict to be considered a file,
it cannot just be single geometries, so this class always
saves them as the toplevel FeatureCollection type,
and requires the files it loads to be the same.
Note: to load with a different text encoding use the 'encoding' argument.
| __option__ | __description__
| --- | ---
| filepath | the path of a geojson file to load (optional).
| data | a complete geojson dictionary to load (optional).
"""
return GeojsonFile(filepath, data, **kwargs)
def new():
"""
Creates and returns a new empty geojson file instance.
"""
return GeojsonFile()
|
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functional tests for XLA Concat Op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.compiler.tests import xla_test
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_array_ops
from tensorflow.python.ops import gradients_impl
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import googletest
class ConcatTest(xla_test.XLATestCase):
def testHStack(self):
with self.cached_session():
p1 = array_ops.placeholder(dtypes.float32, shape=[4, 4])
p2 = array_ops.placeholder(dtypes.float32, shape=[4, 4])
with self.test_scope():
c = array_ops.concat([p1, p2], 0)
params = {
p1: np.random.rand(4, 4).astype("f"),
p2: np.random.rand(4, 4).astype("f")
}
result = c.eval(feed_dict=params)
self.assertEqual(result.shape, c.get_shape())
self.assertAllEqual(result[:4, :], params[p1])
self.assertAllEqual(result[4:, :], params[p2])
def testVStack(self):
with self.cached_session():
p1 = array_ops.placeholder(dtypes.float32, shape=[4, 4])
p2 = array_ops.placeholder(dtypes.float32, shape=[4, 4])
with self.test_scope():
c = array_ops.concat([p1, p2], 1)
params = {
p1: np.random.rand(4, 4).astype("f"),
p2: np.random.rand(4, 4).astype("f")
}
result = c.eval(feed_dict=params)
self.assertEqual(result.shape, c.get_shape())
self.assertAllEqual(result[:, :4], params[p1])
self.assertAllEqual(result[:, 4:], params[p2])
def testInt32(self):
with self.cached_session():
p1 = np.random.rand(2, 3).astype("i")
p2 = np.random.rand(2, 3).astype("i")
x1 = constant_op.constant(p1)
x2 = constant_op.constant(p2)
with self.test_scope():
c = array_ops.concat([x1, x2], 0)
result = c.eval()
self.assertAllEqual(result[:2, :], p1)
self.assertAllEqual(result[2:, :], p2)
def _testRandom(self, dtype):
# Random dims of rank 5
shape = np.random.randint(1, 5, size=5)
# Random number of tensors, but always > 1.
num_tensors = np.random.randint(2, 10)
# Random dim to concat on
concat_dim = np.random.randint(5)
params = {}
if dtype == dtypes.bfloat16:
dtype_feed = dtypes.float32
else:
dtype_feed = dtype
with self.cached_session():
p = []
for i in np.arange(num_tensors):
input_shape = shape
input_shape[concat_dim] = np.random.randint(1, 5)
placeholder = array_ops.placeholder(dtype_feed, shape=input_shape)
p.append(placeholder)
t = dtype_feed.as_numpy_dtype
params[placeholder] = np.random.rand(*input_shape).astype(t)
if dtype != dtype_feed:
concat_inputs = [math_ops.cast(p_i, dtype) for p_i in p]
else:
concat_inputs = p
with self.test_scope():
c = array_ops.concat(concat_inputs, concat_dim)
if dtype != dtype_feed:
c = math_ops.cast(c, dtype_feed)
result = c.eval(feed_dict=params)
self.assertEqual(result.shape, c.get_shape())
cur_offset = 0
for i in np.arange(num_tensors):
# The index into the result is the ':' along all dimensions
# except the concat_dim. slice(0, size) is used for ':', and
# a list of slices is used to index into result.
ind = [slice(0, params[p[i]].shape[j]) for j in np.arange(5)]
ind[concat_dim] = slice(cur_offset,
cur_offset + params[p[i]].shape[concat_dim])
cur_offset += params[p[i]].shape[concat_dim]
if dtype == dtype_feed:
self.assertAllEqual(result[ind], params[p[i]])
else:
self.assertAllClose(result[ind], params[p[i]], 0.01)
def testRandom(self):
self._testRandom(dtypes.float32)
self._testRandom(dtypes.int32)
def _testGradientsSimple(self):
with self.cached_session():
inp = []
inp_tensors = []
with self.test_scope():
for x in [1, 2, 6]:
shape = [10, x, 2]
t = np.random.rand(*shape).astype("f")
inp.append(t)
inp_tensors.append(
constant_op.constant(
[float(y) for y in t.flatten()],
shape=shape,
dtype=dtypes.float32))
c = array_ops.concat(inp_tensors, 1)
output_shape = [10, 9, 2]
grad_inp = np.random.rand(*output_shape).astype("f")
grad_tensor = constant_op.constant(
[float(x) for x in grad_inp.flatten()], shape=output_shape)
grad = gradients_impl.gradients([c], inp_tensors, [grad_tensor])
concated_grad = array_ops.concat(grad, 1)
result = concated_grad.eval()
self.assertAllEqual(result, grad_inp)
def testGradientsSimpleAll(self):
self._testGradientsSimple()
def _testGradientsFirstDim(self):
with self.cached_session():
inp = []
inp_tensors = []
with self.test_scope():
for x in [1, 2, 6]:
shape = [x, 10, 2]
t = np.random.rand(*shape).astype("f")
inp.append(t)
inp_tensors.append(
constant_op.constant(
[float(y) for y in t.flatten()],
shape=shape,
dtype=dtypes.float32))
c = array_ops.concat(inp_tensors, 0)
output_shape = [9, 10, 2]
grad_inp = np.random.rand(*output_shape).astype("f")
grad_tensor = constant_op.constant(
[float(x) for x in grad_inp.flatten()], shape=output_shape)
grad = gradients_impl.gradients([c], inp_tensors, [grad_tensor])
concated_grad = array_ops.concat(grad, 0)
result = concated_grad.eval()
self.assertAllEqual(result, grad_inp)
def testGradientsFirstDimAll(self):
self._testGradientsFirstDim()
def _testGradientsLastDim(self):
with self.cached_session():
inp = []
inp_tensors = []
with self.test_scope():
for x in [1, 2, 6]:
shape = [10, 2, x]
t = np.random.rand(*shape).astype("f")
inp.append(t)
inp_tensors.append(
constant_op.constant(
[float(y) for y in t.flatten()],
shape=shape,
dtype=dtypes.float32))
c = array_ops.concat(inp_tensors, 2)
output_shape = [10, 2, 9]
grad_inp = np.random.rand(*output_shape).astype("f")
grad_tensor = constant_op.constant(
[float(x) for x in grad_inp.flatten()], shape=output_shape)
grad = gradients_impl.gradients([c], inp_tensors, [grad_tensor])
concated_grad = array_ops.concat(grad, 2)
result = concated_grad.eval()
self.assertAllEqual(result, grad_inp)
def testGradientsLastDimAll(self):
self._testGradientsLastDim()
def _RunAndVerifyGradientsRandom(self):
# Random dims of rank 5
input_shape = np.random.randint(1, 5, size=5)
# Random number of tensors
num_tensors = np.random.randint(1, 10)
# Random dim to concat on
concat_dim = np.random.randint(5)
concat_dim_sizes = np.random.randint(1, 5, size=num_tensors)
with self.cached_session():
inp = []
inp_tensors = []
with self.test_scope():
for x in concat_dim_sizes:
shape = input_shape
shape[concat_dim] = x
t = np.random.rand(*shape).astype("f")
inp.append(t)
inp_tensors.append(
constant_op.constant(
[float(y) for y in t.flatten()],
shape=shape,
dtype=dtypes.float32))
c = array_ops.concat(inp_tensors, concat_dim)
output_shape = input_shape
output_shape[concat_dim] = concat_dim_sizes.sum()
grad_inp = np.random.rand(*output_shape).astype("f")
grad_tensor = constant_op.constant(
[float(x) for x in grad_inp.flatten()], shape=output_shape)
grad = gradients_impl.gradients([c], inp_tensors, [grad_tensor])
concated_grad = array_ops.concat(grad, concat_dim)
result = concated_grad.eval()
self.assertAllEqual(result, grad_inp)
def testGradientsRandom(self):
for _ in range(5):
self._RunAndVerifyGradientsRandom()
# Re-enable once zero-element Retvals are handled correctly.
def DISABLED_testZeroSize(self):
# Verify that concat doesn't crash and burn for zero size inputs
np.random.seed(7)
with self.cached_session() as sess:
with self.test_scope():
for shape0 in (), (2,):
axis = len(shape0)
for shape1 in (), (3,):
for n0 in 0, 1, 2:
for n1 in 0, 1, 2:
x0 = np.random.randn(*(shape0 + (n0,) + shape1))
x1 = np.random.randn(*(shape0 + (n1,) + shape1))
correct = np.concatenate([x0, x1], axis=axis)
# TODO(irving): Make tf.concat handle map, then drop list().
xs = list(map(constant_op.constant, [x0, x1]))
c = array_ops.concat(xs, axis)
self.assertAllEqual(c.eval(), correct)
# Check gradients
dc = np.random.randn(*c.get_shape().as_list())
dxs = sess.run(gradients_impl.gradients(c, xs, dc))
self.assertAllEqual(dc, np.concatenate(dxs, axis=axis))
def testConcatTuple(self):
c1 = np.random.rand(4, 4).astype(np.float32)
c2 = np.random.rand(4, 4).astype(np.float32)
with self.cached_session():
with self.test_scope():
concat_list_t = array_ops.concat([c1, c2], 0)
concat_tuple_t = array_ops.concat((c1, c2), 0)
self.assertAllEqual(concat_list_t.eval(), concat_tuple_t.eval())
def testConcatNoScalars(self):
with self.cached_session():
with self.test_scope():
scalar = constant_op.constant(7)
dim = array_ops.placeholder(dtypes.int32)
with self.assertRaisesRegexp(
ValueError, r"Can't concatenate scalars \(use tf\.stack instead\)"):
array_ops.concat([scalar, scalar, scalar], dim)
# The purpose of this is to ensure that XLA on GPU will not run out of memory
# with too many arguments.
def testConcatLargeNumberOfTensors(self):
with self.cached_session():
with self.test_scope():
for concat_dim in range(2):
params = {}
p = []
shape = np.array([7, 13])
num_tensors = 1001
for i in np.arange(num_tensors):
input_shape = shape
placeholder = array_ops.placeholder(
dtypes.float32, shape=input_shape)
p.append(placeholder)
params[placeholder] = np.random.rand(*input_shape).astype(
np.float32)
concat_inputs = p
c = array_ops.concat(concat_inputs, concat_dim)
result = c.eval(feed_dict=params)
self.assertEqual(result.shape, c.get_shape())
cur_offset = 0
for i in np.arange(num_tensors):
# The index into the result is the ':' along all dimensions
# except the concat_dim. slice(0, size) is used for ':', and
# a list of slices is used to index into result.
index = [slice(0, params[p[i]].shape[j]) for j in np.arange(2)]
index[concat_dim] = slice(
cur_offset, cur_offset + params[p[i]].shape[concat_dim])
cur_offset += params[p[i]].shape[concat_dim]
self.assertAllEqual(result[index], params[p[i]])
class ConcatOffsetTest(xla_test.XLATestCase):
def testBasic(self):
with self.cached_session() as sess:
with self.test_scope():
cdim = constant_op.constant(1, dtypes.int32)
s0 = constant_op.constant([2, 3, 5], dtypes.int32)
s1 = constant_op.constant([2, 7, 5], dtypes.int32)
s2 = constant_op.constant([2, 20, 5], dtypes.int32)
off = gen_array_ops.concat_offset(cdim, [s0, s1, s2])
ans = sess.run(off)
self.assertAllEqual(ans, [[0, 0, 0], [0, 3, 0], [0, 10, 0]])
class PackTest(xla_test.XLATestCase):
def testBasic(self):
with self.cached_session() as sess:
with self.test_scope():
s0 = constant_op.constant([2, 3, 5], dtypes.int32)
s1 = constant_op.constant([2, 7, 5], dtypes.int32)
s2 = constant_op.constant([2, 20, 5], dtypes.int32)
packed = array_ops.stack([s0, s1, s2])
ans = sess.run(packed)
self.assertAllEqual(ans, [[2, 3, 5], [2, 7, 5], [2, 20, 5]])
def testScalars(self):
with self.cached_session() as sess:
with self.test_scope():
s0 = constant_op.constant(2, dtypes.int32)
s1 = constant_op.constant(3, dtypes.int32)
s2 = constant_op.constant(5, dtypes.int32)
packed = array_ops.stack([s0, s1, s2])
ans = sess.run(packed)
self.assertAllEqual(ans, [2, 3, 5])
def testEmpty(self):
with self.cached_session() as sess:
with self.test_scope():
s0 = constant_op.constant([[]], dtypes.int32)
s1 = constant_op.constant([[]], dtypes.int32)
s2 = constant_op.constant([[]], dtypes.int32)
packed = array_ops.stack([s0, s1, s2])
ans = sess.run(packed)
self.assertAllEqual(ans, [[[]], [[]], [[]]])
if __name__ == "__main__":
googletest.main()
|
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for ragged_array_ops.where."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
from tensorflow.python.framework import test_util
from tensorflow.python.ops.ragged import ragged_factory_ops
from tensorflow.python.ops.ragged import ragged_test_util
from tensorflow.python.ops.ragged import ragged_where_op
from tensorflow.python.platform import googletest
@test_util.run_all_in_graph_and_eager_modes
class RaggedWhereOpTest(ragged_test_util.RaggedTensorTestCase,
parameterized.TestCase):
@parameterized.parameters([
#=========================================================================
# Docstring Examples
#=========================================================================
dict( # shape=[D1, (D2)]
condition=ragged_factory_ops.constant_value(
[[True, False, True], [False, True]]),
expected=[[0, 0], [0, 2], [1, 1]]),
dict( # shape=[D1, (D2)]
condition=ragged_factory_ops.constant_value(
[[True, False, True], [False, True]]),
x=ragged_factory_ops.constant_value(
[['A', 'B', 'C'], ['D', 'E']]),
y=ragged_factory_ops.constant_value(
[['a', 'b', 'c'], ['d', 'e']]),
expected=ragged_factory_ops.constant_value(
[[b'A', b'b', b'C'], [b'd', b'E']])),
dict( # shape=[D1, (D2)]
condition=ragged_factory_ops.constant_value([True, False]),
x=ragged_factory_ops.constant_value([['A', 'B', 'C'], ['D', 'E']]),
y=ragged_factory_ops.constant_value([['a', 'b', 'c'], ['d', 'e']]),
expected=ragged_factory_ops.constant_value(
[[b'A', b'B', b'C'], [b'd', b'e']])),
#=========================================================================
# Coordinate-retrieval mode
#=========================================================================
dict( # shape=[D1]
condition=[True, False, True, False, True],
expected=[[0], [2], [4]]),
dict( # shape=[D1, D2]
condition=[[True, False], [False, True]],
expected=[[0, 0], [1, 1]]),
dict( # shape=[D1, (D2)]
condition=ragged_factory_ops.constant_value(
[[True, False, True], [False, True]]),
expected=[[0, 0], [0, 2], [1, 1]]),
dict( # shape=[D1, (D2), (D3)]
condition=ragged_factory_ops.constant_value([
[[True, False, True], [False, True]],
[[True], [], [False], [False, True, False]]
]),
expected=[[0, 0, 0], [0, 0, 2], [0, 1, 1],
[1, 0, 0], [1, 3, 1]]),
dict( # shape=[D1, (D2), D3]
condition=ragged_factory_ops.constant_value([
[[True, False], [False, True]],
[[True, False], [False, False], [True, False], [False, True]]
], ragged_rank=1),
expected=[[0, 0, 0], [0, 1, 1],
[1, 0, 0], [1, 2, 0], [1, 3, 1]]),
dict( # shape=[D1, (D2), (D3), (D4)]
condition=ragged_factory_ops.constant_value([
[[[], [True]]],
[[[True, False, True], [False, True]],
[[True], [], [False], [False, True, False]]]
]),
expected=[[0, 0, 1, 0],
[1, 0, 0, 0], [1, 0, 0, 2], [1, 0, 1, 1],
[1, 1, 0, 0], [1, 1, 3, 1]]),
#=========================================================================
# Elementwise value-selection mode
#=========================================================================
dict( # shape=[]
condition=True, x='A', y='a', expected=b'A'),
dict( # shape=[]
condition=False, x='A', y='a', expected=b'a'),
dict( # shape=[D1]
condition=[True, False, True],
x=['A', 'B', 'C'],
y=['a', 'b', 'c'],
expected=[b'A', b'b', b'C']),
dict( # shape=[D1, D2]
condition=[[True, False], [False, True]],
x=[['A', 'B'], ['D', 'E']],
y=[['a', 'b'], ['d', 'e']],
expected=[[b'A', b'b'], [b'd', b'E']]),
dict( # shape=[D1, (D2)]
condition=ragged_factory_ops.constant_value(
[[True, False, True], [False, True]]),
x=ragged_factory_ops.constant_value([['A', 'B', 'C'], ['D', 'E']]),
y=ragged_factory_ops.constant_value([['a', 'b', 'c'], ['d', 'e']]),
expected=ragged_factory_ops.constant_value(
[[b'A', b'b', b'C'], [b'd', b'E']])),
dict( # shape=[D1, (D2), D3]
condition=ragged_factory_ops.constant_value([
[[True, False], [False, True]],
[[True, False], [False, False], [True, False], [False, True]]
], ragged_rank=1),
x=ragged_factory_ops.constant_value([
[['A', 'B'], ['C', 'D']],
[['E', 'F'], ['G', 'H'], ['I', 'J'], ['K', 'L']]
], ragged_rank=1),
y=ragged_factory_ops.constant_value([
[['a', 'b'], ['c', 'd']],
[['e', 'f'], ['g', 'h'], ['i', 'j'], ['k', 'l']]
], ragged_rank=1),
expected=ragged_factory_ops.constant_value([
[[b'A', b'b'], [b'c', b'D']],
[[b'E', b'f'], [b'g', b'h'], [b'I', b'j'], [b'k', b'L']]
], ragged_rank=1)),
dict( # shape=[D1, (D2), (D3), (D4)]
condition=ragged_factory_ops.constant_value([
[[[], [True]]],
[[[True, False, True], [False, True]],
[[True], [], [False], [False, True, False]]]
]),
x=ragged_factory_ops.constant_value([
[[[], ['A']]],
[[['B', 'C', 'D'], ['E', 'F']],
[['G'], [], ['H'], ['I', 'J', 'K']]]
]),
y=ragged_factory_ops.constant_value([
[[[], ['a']]],
[[['b', 'c', 'd'], ['e', 'f']],
[['g'], [], ['h'], ['i', 'j', 'k']]]
]),
expected=ragged_factory_ops.constant_value([
[[[], [b'A']]],
[[[b'B', b'c', b'D'], [b'e', b'F']],
[[b'G'], [], [b'h'], [b'i', b'J', b'k']]]
])),
#=========================================================================
# Elementwise row-selection mode
#=========================================================================
dict( # shape=[D1, D2]
condition=[True, False, True],
x=[['A', 'B'], ['C', 'D'], ['E', 'F']],
y=[['a', 'b'], ['c', 'd'], ['e', 'f']],
expected=[[b'A', b'B'], [b'c', b'd'], [b'E', b'F']]),
dict( # shape=[D1, (D2)]
condition=[True, False, True],
x=ragged_factory_ops.constant_value(
[['A', 'B', 'C'], ['D', 'E'], ['F', 'G']]),
y=ragged_factory_ops.constant_value(
[['a', 'b'], ['c'], ['d', 'e']]),
expected=ragged_factory_ops.constant_value(
[[b'A', b'B', b'C'], [b'c'], [b'F', b'G']])),
dict( # shape=[D1, (D2), (D3), (D4)]
condition=ragged_factory_ops.constant_value([True, False]),
x=ragged_factory_ops.constant_value([
[[[], ['A']]],
[[['B', 'C', 'D'], ['E', 'F']],
[['G'], [], ['H'], ['I', 'J', 'K']]]
]),
y=ragged_factory_ops.constant_value([[[['a']]], [[['b']]]]),
expected=ragged_factory_ops.constant_value(
[[[[], [b'A']]], [[[b'b']]]])),
]) # pyformat: disable
def testRaggedWhere(self, condition, expected, x=None, y=None):
result = ragged_where_op.where(condition, x, y)
self.assertRaggedEqual(result, expected)
@parameterized.parameters([
dict(
condition=[True, False],
x=[1, 2],
error=ValueError,
message='x and y must be either both None or both non-None'),
dict(
condition=ragged_factory_ops.constant_value([[True, False, True],
[False, True]]),
x=ragged_factory_ops.constant_value([['A', 'B', 'C'], ['D', 'E']]),
y=[['a', 'b'], ['d', 'e']],
error=ValueError,
message='Input shapes do not match.'),
])
def testRaggedWhereErrors(self, condition, error, message, x=None, y=None):
with self.assertRaisesRegexp(error, message):
ragged_where_op.where(condition, x, y)
if __name__ == '__main__':
googletest.main()
|
|
# Natural Language Toolkit: Combinatory Categorial Grammar
#
# Copyright (C) 2001-2013 NLTK Project
# Author: Graeme Gange <[email protected]>
# URL: <http://nltk.org/>
# For license information, see LICENSE.TXT
from __future__ import unicode_literals
from nltk.compat import python_2_unicode_compatible
from nltk.ccg.api import FunctionalCategory
class UndirectedBinaryCombinator(object):
"""
Abstract class for representing a binary combinator.
Merely defines functions for checking if the function and argument
are able to be combined, and what the resulting category is.
Note that as no assumptions are made as to direction, the unrestricted
combinators can perform all backward, forward and crossed variations
of the combinators; these restrictions must be added in the rule
class.
"""
def can_combine(self, function, argument):
raise NotImplementedError()
def combine (self,function,argument):
raise NotImplementedError()
class DirectedBinaryCombinator(object):
"""
Wrapper for the undirected binary combinator.
It takes left and right categories, and decides which is to be
the function, and which the argument.
It then decides whether or not they can be combined.
"""
def can_combine(self, left, right):
raise NotImplementedError()
def combine(self, left, right):
raise NotImplementedError()
@python_2_unicode_compatible
class ForwardCombinator(DirectedBinaryCombinator):
'''
Class representing combinators where the primary functor is on the left.
Takes an undirected combinator, and a predicate which adds constraints
restricting the cases in which it may apply.
'''
def __init__(self, combinator, predicate, suffix=''):
self._combinator = combinator
self._predicate = predicate
self._suffix = suffix
def can_combine(self, left, right):
return (self._combinator.can_combine(left,right) and
self._predicate(left,right))
def combine(self, left, right):
for cat in self._combinator.combine(left,right):
yield cat
def __str__(self):
return ">%s%s" % (self._combinator, self._suffix)
@python_2_unicode_compatible
class BackwardCombinator(DirectedBinaryCombinator):
'''
The backward equivalent of the ForwardCombinator class.
'''
def __init__(self, combinator, predicate, suffix=''):
self._combinator = combinator
self._predicate = predicate
self._suffix = suffix
def can_combine(self, left, right):
return (self._combinator.can_combine(right, left) and
self._predicate(left,right))
def combine(self, left, right):
for cat in self._combinator.combine(right, left):
yield cat
def __str__(self):
return "<%s%s" % (self._combinator, self._suffix)
@python_2_unicode_compatible
class UndirectedFunctionApplication(UndirectedBinaryCombinator):
"""
Class representing function application.
Implements rules of the form:
X/Y Y -> X (>)
And the corresponding backwards application rule
"""
def can_combine(self, function, argument):
if not function.is_function():
return False
return not function.arg().can_unify(argument) is None
def combine(self,function,argument):
if not function.is_function():
return
subs = function.arg().can_unify(argument)
if subs is None:
return
yield function.res().substitute(subs)
def __str__(self):
return ''
# Predicates for function application.
# Ensures the left functor takes an argument on the right
def forwardOnly(left,right):
return left.dir().is_forward()
# Ensures the right functor takes an argument on the left
def backwardOnly(left,right):
return right.dir().is_backward()
# Application combinator instances
ForwardApplication = ForwardCombinator(UndirectedFunctionApplication(),
forwardOnly)
BackwardApplication = BackwardCombinator(UndirectedFunctionApplication(),
backwardOnly)
@python_2_unicode_compatible
class UndirectedComposition(UndirectedBinaryCombinator):
"""
Functional composition (harmonic) combinator.
Implements rules of the form
X/Y Y/Z -> X/Z (B>)
And the corresponding backwards and crossed variations.
"""
def can_combine(self, function, argument):
# Can only combine two functions, and both functions must
# allow composition.
if not (function.is_function() and argument.is_function()):
return False
if function.dir().can_compose() and argument.dir().can_compose():
return not function.arg().can_unify(argument.res()) is None
return False
def combine(self, function, argument):
if not (function.is_function() and argument.is_function()):
return
if function.dir().can_compose() and argument.dir().can_compose():
subs = function.arg().can_unify(argument.res())
if not subs is None:
yield FunctionalCategory(function.res().substitute(subs),
argument.arg().substitute(subs),argument.dir())
def __str__(self):
return 'B'
# Predicates for restricting application of straight composition.
def bothForward(left,right):
return left.dir().is_forward() and right.dir().is_forward()
def bothBackward(left,right):
return left.dir().is_backward() and right.dir().is_backward()
# Predicates for crossed composition
def crossedDirs(left,right):
return left.dir().is_forward() and right.dir().is_backward()
def backwardBxConstraint(left,right):
# The functors must be crossed inwards
if not crossedDirs(left, right):
return False
# Permuting combinators must be allowed
if not left.dir().can_cross() and right.dir().can_cross():
return False
# The resulting argument category is restricted to be primitive
return left.arg().is_primitive()
# Straight composition combinators
ForwardComposition = ForwardCombinator(UndirectedComposition(),
forwardOnly)
BackwardComposition = BackwardCombinator(UndirectedComposition(),
backwardOnly)
# Backward crossed composition
BackwardBx = BackwardCombinator(UndirectedComposition(),backwardBxConstraint,
suffix='x')
@python_2_unicode_compatible
class UndirectedSubstitution(UndirectedBinaryCombinator):
"""
Substitution (permutation) combinator.
Implements rules of the form
Y/Z (X\Y)/Z -> X/Z (<Sx)
And other variations.
"""
def can_combine(self, function, argument):
if function.is_primitive() or argument.is_primitive():
return False
# These could potentially be moved to the predicates, as the
# constraints may not be general to all languages.
if function.res().is_primitive():
return False
if not function.arg().is_primitive():
return False
if not (function.dir().can_compose() and argument.dir().can_compose()):
return False
return (function.res().arg() == argument.res()) and (function.arg() == argument.arg())
def combine(self,function,argument):
if self.can_combine(function,argument):
yield FunctionalCategory(function.res().res(),argument.arg(),argument.dir())
def __str__(self):
return 'S'
# Predicate for forward substitution
def forwardSConstraint(left, right):
if not bothForward(left, right):
return False
return left.res().dir().is_forward() and left.arg().is_primitive()
# Predicate for backward crossed substitution
def backwardSxConstraint(left,right):
if not left.dir().can_cross() and right.dir().can_cross():
return False
if not bothForward(left, right):
return False
return right.res().dir().is_backward() and right.arg().is_primitive()
# Instances of substitution combinators
ForwardSubstitution = ForwardCombinator(UndirectedSubstitution(),
forwardSConstraint)
BackwardSx = BackwardCombinator(UndirectedSubstitution(),
backwardSxConstraint,'x')
# Retrieves the left-most functional category.
# ie, (N\N)/(S/NP) => N\N
def innermostFunction(categ):
while categ.res().is_function():
categ = categ.res()
return categ
@python_2_unicode_compatible
class UndirectedTypeRaise(UndirectedBinaryCombinator):
'''
Undirected combinator for type raising.
'''
def can_combine(self,function,arg):
# The argument must be a function.
# The restriction that arg.res() must be a function
# merely reduces redundant type-raising; if arg.res() is
# primitive, we have:
# X Y\X =>(<T) Y/(Y\X) Y\X =>(>) Y
# which is equivalent to
# X Y\X =>(<) Y
if not (arg.is_function() and arg.res().is_function()):
return False
arg = innermostFunction(arg)
# left, arg_categ are undefined!
subs = left.can_unify(arg_categ.arg())
if subs is not None:
return True
return False
def combine(self,function,arg):
if not (function.is_primitive() and
arg.is_function() and arg.res().is_function()):
return
# Type-raising matches only the innermost application.
arg = innermostFunction(arg)
subs = function.can_unify(arg.arg())
if subs is not None:
xcat = arg.res().substitute(subs)
yield FunctionalCategory(xcat,
FunctionalCategory(xcat,function,arg.dir()),
-(arg.dir()))
def __str__(self):
return 'T'
# Predicates for type-raising
# The direction of the innermost category must be towards
# the primary functor.
# The restriction that the variable must be primitive is not
# common to all versions of CCGs; some authors have other restrictions.
def forwardTConstraint(left,right):
arg = innermostFunction(right)
return arg.dir().is_backward() and arg.res().is_primitive()
def backwardTConstraint(left,right):
arg = innermostFunction(left)
return arg.dir().is_forward() and arg.res().is_primitive()
# Instances of type-raising combinators
ForwardT = ForwardCombinator(UndirectedTypeRaise(), forwardTConstraint)
BackwardT = BackwardCombinator(UndirectedTypeRaise(), backwardTConstraint)
|
|
"""
Test suite for _osx_support: shared OS X support functions.
"""
import os
import platform
import stat
import sys
import unittest
from test.support import os_helper
import _osx_support
@unittest.skipUnless(sys.platform.startswith("darwin"), "requires OS X")
class Test_OSXSupport(unittest.TestCase):
def setUp(self):
self.maxDiff = None
self.prog_name = 'bogus_program_xxxx'
self.temp_path_dir = os.path.abspath(os.getcwd())
self.env = os_helper.EnvironmentVarGuard()
self.addCleanup(self.env.__exit__)
for cv in ('CFLAGS', 'LDFLAGS', 'CPPFLAGS',
'BASECFLAGS', 'BLDSHARED', 'LDSHARED', 'CC',
'CXX', 'PY_CFLAGS', 'PY_LDFLAGS', 'PY_CPPFLAGS',
'PY_CORE_CFLAGS', 'PY_CORE_LDFLAGS'):
if cv in self.env:
self.env.unset(cv)
def add_expected_saved_initial_values(self, config_vars, expected_vars):
# Ensure that the initial values for all modified config vars
# are also saved with modified keys.
expected_vars.update(('_OSX_SUPPORT_INITIAL_'+ k,
config_vars[k]) for k in config_vars
if config_vars[k] != expected_vars[k])
def test__find_executable(self):
if self.env['PATH']:
self.env['PATH'] = self.env['PATH'] + ':'
self.env['PATH'] = self.env['PATH'] + os.path.abspath(self.temp_path_dir)
os_helper.unlink(self.prog_name)
self.assertIsNone(_osx_support._find_executable(self.prog_name))
self.addCleanup(os_helper.unlink, self.prog_name)
with open(self.prog_name, 'w') as f:
f.write("#!/bin/sh\n/bin/echo OK\n")
os.chmod(self.prog_name, stat.S_IRWXU)
self.assertEqual(self.prog_name,
_osx_support._find_executable(self.prog_name))
def test__read_output(self):
if self.env['PATH']:
self.env['PATH'] = self.env['PATH'] + ':'
self.env['PATH'] = self.env['PATH'] + os.path.abspath(self.temp_path_dir)
os_helper.unlink(self.prog_name)
self.addCleanup(os_helper.unlink, self.prog_name)
with open(self.prog_name, 'w') as f:
f.write("#!/bin/sh\n/bin/echo ExpectedOutput\n")
os.chmod(self.prog_name, stat.S_IRWXU)
self.assertEqual('ExpectedOutput',
_osx_support._read_output(self.prog_name))
def test__find_build_tool(self):
out = _osx_support._find_build_tool('cc')
self.assertTrue(os.path.isfile(out),
'cc not found - check xcode-select')
def test__get_system_version(self):
self.assertTrue(platform.mac_ver()[0].startswith(
_osx_support._get_system_version()))
def test__remove_original_values(self):
config_vars = {
'CC': 'gcc-test -pthreads',
}
expected_vars = {
'CC': 'clang -pthreads',
}
cv = 'CC'
newvalue = 'clang -pthreads'
_osx_support._save_modified_value(config_vars, cv, newvalue)
self.assertNotEqual(expected_vars, config_vars)
_osx_support._remove_original_values(config_vars)
self.assertEqual(expected_vars, config_vars)
def test__save_modified_value(self):
config_vars = {
'CC': 'gcc-test -pthreads',
}
expected_vars = {
'CC': 'clang -pthreads',
}
self.add_expected_saved_initial_values(config_vars, expected_vars)
cv = 'CC'
newvalue = 'clang -pthreads'
_osx_support._save_modified_value(config_vars, cv, newvalue)
self.assertEqual(expected_vars, config_vars)
def test__save_modified_value_unchanged(self):
config_vars = {
'CC': 'gcc-test -pthreads',
}
expected_vars = config_vars.copy()
cv = 'CC'
newvalue = 'gcc-test -pthreads'
_osx_support._save_modified_value(config_vars, cv, newvalue)
self.assertEqual(expected_vars, config_vars)
def test__supports_universal_builds(self):
import platform
mac_ver_tuple = tuple(int(i) for i in
platform.mac_ver()[0].split('.')[0:2])
self.assertEqual(mac_ver_tuple >= (10, 4),
_osx_support._supports_universal_builds())
def test__find_appropriate_compiler(self):
compilers = (
('gcc-test', 'i686-apple-darwin11-llvm-gcc-4.2'),
('clang', 'clang version 3.1'),
)
config_vars = {
'CC': 'gcc-test -pthreads',
'CXX': 'cc++-test',
'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 ',
'LDFLAGS': '-arch ppc -arch i386 -g',
'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk',
'BLDSHARED': 'gcc-test -bundle -arch ppc -arch i386 -g',
'LDSHARED': 'gcc-test -bundle -arch ppc -arch i386 '
'-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g',
}
expected_vars = {
'CC': 'clang -pthreads',
'CXX': 'clang++',
'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 ',
'LDFLAGS': '-arch ppc -arch i386 -g',
'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk',
'BLDSHARED': 'clang -bundle -arch ppc -arch i386 -g',
'LDSHARED': 'clang -bundle -arch ppc -arch i386 '
'-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g',
}
self.add_expected_saved_initial_values(config_vars, expected_vars)
suffix = (':' + self.env['PATH']) if self.env['PATH'] else ''
self.env['PATH'] = os.path.abspath(self.temp_path_dir) + suffix
for c_name, c_output in compilers:
os_helper.unlink(c_name)
self.addCleanup(os_helper.unlink, c_name)
with open(c_name, 'w') as f:
f.write("#!/bin/sh\n/bin/echo " + c_output)
os.chmod(c_name, stat.S_IRWXU)
self.assertEqual(expected_vars,
_osx_support._find_appropriate_compiler(
config_vars))
def test__remove_universal_flags(self):
config_vars = {
'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 ',
'LDFLAGS': '-arch ppc -arch i386 -g',
'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk',
'BLDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 -g',
'LDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 '
'-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g',
}
expected_vars = {
'CFLAGS': '-fno-strict-aliasing -g -O3 ',
'LDFLAGS': ' -g',
'CPPFLAGS': '-I. ',
'BLDSHARED': 'gcc-4.0 -bundle -g',
'LDSHARED': 'gcc-4.0 -bundle -g',
}
self.add_expected_saved_initial_values(config_vars, expected_vars)
self.assertEqual(expected_vars,
_osx_support._remove_universal_flags(
config_vars))
def test__remove_universal_flags_alternate(self):
# bpo-38360: also test the alternate single-argument form of -isysroot
config_vars = {
'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 ',
'LDFLAGS': '-arch ppc -arch i386 -g',
'CPPFLAGS': '-I. -isysroot/Developer/SDKs/MacOSX10.4u.sdk',
'BLDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 -g',
'LDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 '
'-isysroot/Developer/SDKs/MacOSX10.4u.sdk -g',
}
expected_vars = {
'CFLAGS': '-fno-strict-aliasing -g -O3 ',
'LDFLAGS': ' -g',
'CPPFLAGS': '-I. ',
'BLDSHARED': 'gcc-4.0 -bundle -g',
'LDSHARED': 'gcc-4.0 -bundle -g',
}
self.add_expected_saved_initial_values(config_vars, expected_vars)
self.assertEqual(expected_vars,
_osx_support._remove_universal_flags(
config_vars))
def test__remove_unsupported_archs(self):
config_vars = {
'CC': 'clang',
'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 ',
'LDFLAGS': '-arch ppc -arch i386 -g',
'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk',
'BLDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 -g',
'LDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 '
'-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g',
}
expected_vars = {
'CC': 'clang',
'CFLAGS': '-fno-strict-aliasing -g -O3 -arch i386 ',
'LDFLAGS': ' -arch i386 -g',
'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk',
'BLDSHARED': 'gcc-4.0 -bundle -arch i386 -g',
'LDSHARED': 'gcc-4.0 -bundle -arch i386 '
'-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g',
}
self.add_expected_saved_initial_values(config_vars, expected_vars)
suffix = (':' + self.env['PATH']) if self.env['PATH'] else ''
self.env['PATH'] = os.path.abspath(self.temp_path_dir) + suffix
c_name = 'clang'
os_helper.unlink(c_name)
self.addCleanup(os_helper.unlink, c_name)
# exit status 255 means no PPC support in this compiler chain
with open(c_name, 'w') as f:
f.write("#!/bin/sh\nexit 255")
os.chmod(c_name, stat.S_IRWXU)
self.assertEqual(expected_vars,
_osx_support._remove_unsupported_archs(
config_vars))
def test__override_all_archs(self):
self.env['ARCHFLAGS'] = '-arch x86_64'
config_vars = {
'CC': 'clang',
'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 ',
'LDFLAGS': '-arch ppc -arch i386 -g',
'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk',
'BLDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 -g',
'LDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 '
'-isysroot /Developer/SDKs/MacOSX10.4u.sdk -g',
}
expected_vars = {
'CC': 'clang',
'CFLAGS': '-fno-strict-aliasing -g -O3 -arch x86_64',
'LDFLAGS': ' -g -arch x86_64',
'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.4u.sdk',
'BLDSHARED': 'gcc-4.0 -bundle -g -arch x86_64',
'LDSHARED': 'gcc-4.0 -bundle -isysroot '
'/Developer/SDKs/MacOSX10.4u.sdk -g -arch x86_64',
}
self.add_expected_saved_initial_values(config_vars, expected_vars)
self.assertEqual(expected_vars,
_osx_support._override_all_archs(
config_vars))
def test__check_for_unavailable_sdk(self):
config_vars = {
'CC': 'clang',
'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 '
'-isysroot /Developer/SDKs/MacOSX10.1.sdk',
'LDFLAGS': '-arch ppc -arch i386 -g',
'CPPFLAGS': '-I. -isysroot /Developer/SDKs/MacOSX10.1.sdk',
'BLDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 -g',
'LDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 '
'-isysroot /Developer/SDKs/MacOSX10.1.sdk -g',
}
expected_vars = {
'CC': 'clang',
'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 '
' ',
'LDFLAGS': '-arch ppc -arch i386 -g',
'CPPFLAGS': '-I. ',
'BLDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 -g',
'LDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 '
' -g',
}
self.add_expected_saved_initial_values(config_vars, expected_vars)
self.assertEqual(expected_vars,
_osx_support._check_for_unavailable_sdk(
config_vars))
def test__check_for_unavailable_sdk_alternate(self):
# bpo-38360: also test the alternate single-argument form of -isysroot
config_vars = {
'CC': 'clang',
'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 '
'-isysroot/Developer/SDKs/MacOSX10.1.sdk',
'LDFLAGS': '-arch ppc -arch i386 -g',
'CPPFLAGS': '-I. -isysroot/Developer/SDKs/MacOSX10.1.sdk',
'BLDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 -g',
'LDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 '
'-isysroot/Developer/SDKs/MacOSX10.1.sdk -g',
}
expected_vars = {
'CC': 'clang',
'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 '
' ',
'LDFLAGS': '-arch ppc -arch i386 -g',
'CPPFLAGS': '-I. ',
'BLDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 -g',
'LDSHARED': 'gcc-4.0 -bundle -arch ppc -arch i386 '
' -g',
}
self.add_expected_saved_initial_values(config_vars, expected_vars)
self.assertEqual(expected_vars,
_osx_support._check_for_unavailable_sdk(
config_vars))
def test_get_platform_osx(self):
# Note, get_platform_osx is currently tested more extensively
# indirectly by test_sysconfig and test_distutils
config_vars = {
'CFLAGS': '-fno-strict-aliasing -g -O3 -arch ppc -arch i386 '
'-isysroot /Developer/SDKs/MacOSX10.1.sdk',
'MACOSX_DEPLOYMENT_TARGET': '10.6',
}
result = _osx_support.get_platform_osx(config_vars, ' ', ' ', ' ')
self.assertEqual(('macosx', '10.6', 'fat'), result)
if __name__ == "__main__":
unittest.main()
|
|
"""
pyText2Pdf - Python script to convert plain text files into Adobe
Acrobat PDF files.
Version 1.2
Author: Anand B Pillai <abpillai at lycos dot com>
Keywords: python, tools, converter, pdf, text2pdf, adobe, acrobat,
processing.
Copyright (C) 2003-2004 Free Software Foundation, Inc.
This file is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2, or (at your option)
any later version.
This file is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with GNU Emacs; see the file COPYING. If not, write to
the Free Software Foundation, Inc., 59 Temple Place - Suite 330,
Boston, MA 02111-1307, USA.
Commentary:
Modification History:
Mon Feb 17 12:20:13 2003 Changed option parsing algorithm to use
getopt. Use __main__ calling convention.
Bug in FF character fixed.
Thu Apr 10 11:26:58 2003 Modified to use python style strings
and function objects.
July 1 2003 Fixed help string errors. Added the
Creator property.
Feb 25 2004 Rewrote argument parser to remove
duplicate code.Use string.join() instead
of concatenation. Modified sys.exit()
calls to print messages.
Code:
"""
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/189858
import sys, os
import string
import time
import getopt
LF_EXTRA=0
LINE_END='\015'
# form feed character (^L)
FF=chr(12)
ENCODING_STR = """\
/Encoding <<
/Differences [ 0 /.notdef /.notdef /.notdef /.notdef
/.notdef /.notdef /.notdef /.notdef /.notdef /.notdef
/.notdef /.notdef /.notdef /.notdef /.notdef /.notdef
/.notdef /.notdef /.notdef /.notdef /.notdef /.notdef
/.notdef /.notdef /.notdef /.notdef /.notdef /.notdef
/.notdef /.notdef /.notdef /.notdef /space /exclam
/quotedbl /numbersign /dollar /percent /ampersand
/quoteright /parenleft /parenright /asterisk /plus /comma
/hyphen /period /slash /zero /one /two /three /four /five
/six /seven /eight /nine /colon /semicolon /less /equal
/greater /question /at /A /B /C /D /E /F /G /H /I /J /K /L
/M /N /O /P /Q /R /S /T /U /V /W /X /Y /Z /bracketleft
/backslash /bracketright /asciicircum /underscore
/quoteleft /a /b /c /d /e /f /g /h /i /j /k /l /m /n /o /p
/q /r /s /t /u /v /w /x /y /z /braceleft /bar /braceright
/asciitilde /.notdef /.notdef /.notdef /.notdef /.notdef
/.notdef /.notdef /.notdef /.notdef /.notdef /.notdef
/.notdef /.notdef /.notdef /.notdef /.notdef /.notdef
/dotlessi /grave /acute /circumflex /tilde /macron /breve
/dotaccent /dieresis /.notdef /ring /cedilla /.notdef
/hungarumlaut /ogonek /caron /space /exclamdown /cent
/sterling /currency /yen /brokenbar /section /dieresis
/copyright /ordfeminine /guillemotleft /logicalnot /hyphen
/registered /macron /degree /plusminus /twosuperior
/threesuperior /acute /mu /paragraph /periodcentered
/cedilla /onesuperior /ordmasculine /guillemotright
/onequarter /onehalf /threequarters /questiondown /Agrave
/Aacute /Acircumflex /Atilde /Adieresis /Aring /AE
/Ccedilla /Egrave /Eacute /Ecircumflex /Edieresis /Igrave
/Iacute /Icircumflex /Idieresis /Eth /Ntilde /Ograve
/Oacute /Ocircumflex /Otilde /Odieresis /multiply /Oslash
/Ugrave /Uacute /Ucircumflex /Udieresis /Yacute /Thorn
/germandbls /agrave /aacute /acircumflex /atilde /adieresis
/aring /ae /ccedilla /egrave /eacute /ecircumflex
/edieresis /igrave /iacute /icircumflex /idieresis /eth
/ntilde /ograve /oacute /ocircumflex /otilde /odieresis
/divide /oslash /ugrave /uacute /ucircumflex /udieresis
/yacute /thorn /ydieresis ]
>>
"""
PROG_HELP = """\
%(progname)s [options] [filename]
%(progname)s makes a 7-bit clean PDF file from any input file.
It reads from a named file, and writes the PDF file to a file specified by
the user, otherwise to a file with '.pdf' appended to the input file.
Author: Anand B Pillai.
Copyright (C) 2003-2004 Free Software Foundation, http://www.fsf.org
There are various options as follows:
-h\t\tshow this message\n
-o/-O\t\tdirect output to this file
-f<font>\tuse PostScript <font> (must be in standard 14, default: Courier)
-I\t\tuse ISOLatin1Encoding
-s<size>\tuse font at given pointsize (default 10) points\n
-v<dist>\tuse given line spacing (default 12) points
-l<lines>\tlines per page (default 60, determined automatically\n\t\tif unspecified)
-c<chars>\tmaximum characters per line (default 80)
-t<spaces>\tspaces per tab character (default 4)
-F\t\tignore formfeed characters (^L)
\t\t(i.e, accept formfeed characters as pagebreaks)\n
-A4\t\tuse A4 paper (default Letter)
-A3\t\tuse A3 paper (default Letter)
-x<width>\tindependent paper width in points
-y<height>\tindependent paper height in points
-2\t\tformat in 2 columns
-L\t\tlandscape mode
Note that where one variable is implied by two options, the second option
takes precedence for that variable. (e.g. -A4 -y500)
In landscape mode, page width and height are simply swapped over before
formatting, no matter how or when they were defined.
"""
class pyText2Pdf:
def __init__(self):
# version number
self._version="1.1.1"
# iso encoding flag
self._IsoEnc=0
# formfeeds flag
self._doFFs=0
self._progname="PyText2Pdf"
self._appname = "".join((self._progname, " Version ", str(self._version)))
# default font
self._font="/Courier"
# default font size
self._ptSize=10
# default vert space
self._vertSpace=12
self._lines=0
# number of characters in a row
self._cols=80
self._columns=1
# page ht
self._pageHt=792
# page wd
self._pageWd=612
# input file
self._ifile=""
# output file
self._ofile=""
# default tab width
self._tab=4
# input file descriptor
self._ifs=None
# output file descriptor
self._ofs=None
# landscape flag
self._landscape=0
# marker objects
self._curobj = 5
self._pageObs = [0]
self._locations = [0,0,0,0,0,0]
self._pageNo=0
# file position marker
self._fpos=0
def argsCallBack(self, argslist, listoftuples=False):
""" Callback function called by argument parser.
Helps to remove duplicate code """
x = 0
while x<len(argslist):
item = argslist[x]
if listoftuples:
o, a = item
else:
o = item
if o == '-h':
self.ShowHelp()
elif o == '-I':
self._IsoEnc=1
elif o == '-F':
self._doFFs=1
elif o == '-2':
self._columns=2
elif o == '-L':
self._landscape=1
if o in ('-f', '-s', '-l', '-x', 'y', '-c', '-v', '-o', '-O'):
if not listoftuples:
x += 1
try:
a = argslist[x]
except:
msg = "Argument error for option " + o
sys.exit(msg)
if a == "" or a[0] == "-":
msg = "Error: argument error for option " + o
sys.exit(msg)
elif o == '-f':
self._font='/' + a
elif o == '-A':
if a == '3':
self._pageWd=842
self._pageHt=1190
elif a =='4':
self._pageWd=595
self._pageHt=842
else:
psz=o[1]+a
print self._progname, ': ignoring unknown paper size ', psz
elif o == '-s':
self._ptSize=int(a)
if self._ptSize<1:
self._ptSize=1
elif o == '-v':
self._vertSpace=int(a)
if self._vertSpace<1:
self._vertSpace=1
elif o == '-l':
self._lines=int(a)
if self._lines<1:
self._lines=1
elif o == '-c':
self._cols=int(a)
if self._cols<4:
self._cols=4
elif o == '-t':
self._tab=int(a)
if self._tab<1:
self._tab=1
elif o == '-x':
self._pageWd=int(a)
if self._pageWd<72:
self._pageWd=72
elif o == '-y':
self._pageHt=int(a)
if self._pageHt<72:
self._pageHt=72
elif o in ('-o', '-O'):
self._ofile=a
else:
print self._progname, ': ignoring invalid switch: ', o
x += 1
def parseArgs(self):
if len(sys.argv) == 1:
self.ShowHelp()
arguments=sys.argv[1:]
optlist, args = getopt.getopt(arguments, 'hIF2Lf:A:s:v:l:c:t:x:y:o:')
# input file is the first element in arg list
# or last element in options list (in case of an error!)
if len(args):
self._ifile=args[0]
else:
l=len(optlist)
tup=optlist[l-1]
# parse options list
if len(optlist):
self.argsCallBack( optlist, listoftuples=True )
else:
self.argsCallBack( args )
if self._landscape:
print 'Landscape option on...'
if self._columns==2:
print 'Printing in two columns...'
if self._doFFs:
print 'Ignoring form feed character...'
if self._IsoEnc:
print 'Using ISO Latin Encoding...'
print 'Using font', self._font[1:], ' size =', self._ptSize
def writestr(self, str):
""" Write string to output file descriptor.
All output operations go through this function.
We keep the current file position also here"""
# update current file position
self._fpos += len(str)
for x in range(0, len(str)):
if str[x] == '\n':
self._fpos += LF_EXTRA
try:
self._ofs.write(str)
except IOError, e:
print e
return -1
return 0
def Convert(self):
""" Perform the actual conversion """
if self._landscape:
# swap page width & height
tmp = self._pageHt
self._pageHt = self._pageWd
self._pageWd = tmp
if self._lines==0:
self._lines = (self._pageHt - 72)/self._vertSpace
if self._lines < 1:
self._lines=1
try:
self._ifs=open(self._ifile)
except IOError, (strerror, errno):
print 'Error: Could not open file to read --->', self._ifile
sys.exit(3)
if self._ofile=="":
self._ofile=self._ifile + '.pdf'
try:
self._ofs = open(self._ofile, 'wb')
except IOError, (strerror, errno):
print 'Error: Could not open file to write --->', self._ofile
sys.exit(3)
print 'Input file =>', self._ifile
print 'Writing pdf file', self._ofile, '...'
self.WriteHeader(self._ifile)
self.WritePages()
self.WriteRest()
print 'Wrote file', self._ofile
self._ifs.close()
self._ofs.close()
return 0
def WriteHeader(self, title):
"""Write the PDF header"""
ws = self.writestr
t=time.localtime()
timestr=str(time.strftime("D:%Y%m%d%H%M%S", t))
ws("%PDF-1.4\n")
self._locations[1] = self._fpos
ws("1 0 obj\n")
ws("<<\n")
buf = "".join(("/Creator (", self._appname, " By Anand B Pillai )\n"))
ws(buf)
buf = "".join(("/CreationDate (", timestr, ")\n"))
ws(buf)
buf = "".join(("/Producer (", self._appname, "(\\251 Free Software Foundation, 2004))\n"))
ws(buf)
if title:
buf = "".join(("/Title (", title, ")\n"))
ws(buf)
ws(">>\n")
ws("endobj\n")
self._locations[2] = self._fpos
ws("2 0 obj\n")
ws("<<\n")
ws("/Type /Catalog\n")
ws("/Pages 3 0 R\n")
ws(">>\n")
ws("endobj\n")
self._locations[4] = self._fpos
ws("4 0 obj\n")
ws("<<\n")
buf = "".join(("/BaseFont ", str(self._font), " /Encoding /WinAnsiEncoding /Name /F1 /Subtype /Type1 /Type /Font >>\n"))
ws(buf)
if self._IsoEnc:
ws(ENCODING_STR)
ws(">>\n")
ws("endobj\n")
self._locations[5] = self._fpos
ws("5 0 obj\n")
ws("<<\n")
ws(" /Font << /F1 4 0 R >>\n")
ws(" /ProcSet [ /PDF /Text ]\n")
ws(">>\n")
ws("endobj\n")
def StartPage(self):
""" Start a page of data """
ws = self.writestr
self._pageNo += 1
self._curobj += 1
self._locations.append(self._fpos)
self._locations[self._curobj]=self._fpos
self._pageObs.append(self._curobj)
self._pageObs[self._pageNo] = self._curobj
buf = "".join((str(self._curobj), " 0 obj\n"))
ws(buf)
ws("<<\n")
ws("/Type /Page\n")
ws("/Parent 3 0 R\n")
ws("/Resources 5 0 R\n")
self._curobj += 1
buf = "".join(("/Contents ", str(self._curobj), " 0 R\n"))
ws(buf)
ws(">>\n")
ws("endobj\n")
self._locations.append(self._fpos)
self._locations[self._curobj] = self._fpos
buf = "".join((str(self._curobj), " 0 obj\n"))
ws(buf)
ws("<<\n")
buf = "".join(("/Length ", str(self._curobj + 1), " 0 R\n"))
ws(buf)
ws(">>\n")
ws("stream\n")
strmPos = self._fpos
ws("BT\n");
buf = "".join(("/F1 ", str(self._ptSize), " Tf\n"))
ws(buf)
buf = "".join(("1 0 0 1 50 ", str(self._pageHt - 40), " Tm\n"))
ws(buf)
buf = "".join((str(self._vertSpace), " TL\n"))
ws(buf)
return strmPos
def EndPage(self, streamStart):
"""End a page of data """
ws = self.writestr
ws("ET\n")
streamEnd = self._fpos
ws("endstream\n")
ws("endobj\n")
self._curobj += 1
self._locations.append(self._fpos)
self._locations[self._curobj] = self._fpos
buf = "".join((str(self._curobj), " 0 obj\n"))
ws(buf)
buf = "".join((str(streamEnd - streamStart), '\n'))
ws(buf)
ws('endobj\n')
def WritePages(self):
"""Write pages as PDF"""
ws = self.writestr
beginstream=0
lineNo, charNo=0,0
ch, column=0,0
padding,i=0,0
atEOF=0
while not atEOF:
beginstream = self.StartPage()
column=1
while column <= self._columns:
column += 1
atFF=0
atBOP=0
lineNo=0
while lineNo < self._lines and not atFF and not atEOF:
lineNo += 1
ws("(")
charNo=0
while charNo < self._cols:
charNo += 1
ch = self._ifs.read(1)
cond = ((ch != '\n') and not(ch==FF and self._doFFs) and (ch != ''))
if not cond:
break
if ord(ch) >= 32 and ord(ch) <= 127:
if ch == '(' or ch == ')' or ch == '\\':
ws("\\")
ws(ch)
else:
if ord(ch) == 9:
padding =self._tab - ((charNo - 1) % self._tab)
for i in range(padding):
ws(" ")
charNo += (padding -1)
else:
if ch != FF:
# write \xxx form for dodgy character
buf = "".join(('\\', ch))
ws(buf)
else:
# dont print anything for a FF
charNo -= 1
ws(")'\n")
if ch == FF:
atFF=1
if lineNo == self._lines:
atBOP=1
if atBOP:
pos=0
ch = self._ifs.read(1)
pos= self._ifs.tell()
if ch == FF:
ch = self._ifs.read(1)
pos=self._ifs.tell()
# python's EOF signature
if ch == '':
atEOF=1
else:
# push position back by one char
self._ifs.seek(pos-1)
elif atFF:
ch = self._ifs.read(1)
pos=self._ifs.tell()
if ch == '':
atEOF=1
else:
self._ifs.seek(pos-1)
if column < self._columns:
buf = "".join(("1 0 0 1 ",
str((self._pageWd/2 + 25)),
" ",
str(self._pageHt - 40),
" Tm\n"))
ws(buf)
self.EndPage(beginstream)
def WriteRest(self):
"""Finish the file"""
ws = self.writestr
self._locations[3] = self._fpos
ws("3 0 obj\n")
ws("<<\n")
ws("/Type /Pages\n")
buf = "".join(("/Count ", str(self._pageNo), "\n"))
ws(buf)
buf = "".join(("/MediaBox [ 0 0 ", str(self._pageWd), " ", str(self._pageHt), " ]\n"))
ws(buf)
ws("/Kids [ ")
for i in range(1, self._pageNo+1):
buf = "".join((str(self._pageObs[i]), " 0 R "))
ws(buf)
ws("]\n")
ws(">>\n")
ws("endobj\n")
xref = self._fpos
ws("xref\n")
buf = "".join(("0 ", str((self._curobj) + 1), "\n"))
ws(buf)
buf = "".join(("0000000000 65535 f ", str(LINE_END)))
ws(buf)
for i in range(1, self._curobj + 1):
val = self._locations[i]
buf = "".join((string.zfill(str(val), 10), " 00000 n ", str(LINE_END)))
ws(buf)
ws("trailer\n")
ws("<<\n")
buf = "".join(("/Size ", str(self._curobj + 1), "\n"))
ws(buf)
ws("/Root 2 0 R\n")
ws("/Info 1 0 R\n")
ws(">>\n")
ws("startxref\n")
buf = "".join((str(xref), "\n"))
ws(buf)
ws("%%EOF\n")
def ShowHelp(self):
"""Show help on this program"""
sys.exit( PROG_HELP % {'progname': self._progname} )
def main():
pdfclass=pyText2Pdf()
pdfclass.parseArgs()
pdfclass.Convert()
if __name__ == "__main__":
main()
|
|
"""
Spin-projected MP2
"""
import h5py
import numpy as np
import scipy.linalg as slg
from frankenstein import sgscf
from frankenstein.tools.spscf_utils import (sp2block, get_grid, get_Rbeta)
from frankenstein.tools.perf_utils import TIMER
from frankenstein.mp.romp2 import lorentz_regularization
from pyscf import scf, mp, lib
logger = lib.logger
def kernel(mp):
spin_proj = mp.spin_proj
ngrid = mp.ngrid
grid = mp.grid
frozen = mp.frozen
logger.note(mp, "spin_proj : %d", spin_proj)
logger.note(mp, "ngrid : %d", ngrid)
logger.note(mp, "grid : %s", grid)
logger.note(mp, "nfz : %d\n", frozen)
eris = ao2mo(mp)
timer = TIMER(3)
betas, ys, ws = get_grid(spin_proj, ngrid, grid)
ws *= ys
Ds = np.zeros(ngrid)
es = np.zeros(ngrid)
for i,beta in enumerate(betas):
es[i], Ds[i] = get_emp2_beta(mp._scf, eris, beta, timer, 0)
logger.note(mp, " %d/%d: beta = %.6f e = %.10g D = %.10g" +
("\n" if i == ngrid-1 else ""), i+1, ngrid, beta, es[i], Ds[i])
espmp2 = np.sum(ws * es) / np.sum(ws * Ds)
if mp.verbose >= 3: timer.report(["emp2 (prep)", "emp2 (1+2)", "emp2 (3)"])
return espmp2
def make_ovov(eris, antisymm=False):
no = eris.no
nv = eris.nv
if hasattr(eris,"feri"):
eris.feri.create_dataset("OVov", shape=(eris.ovOV.shape[::-1]),
dtype=eris.ovOV.dtype)
eris.OVov = eris.feri["OVov"]
for i in range(no[0]):
for i in range(no[1]):
eris.OVov[i*nv[1]:(i+1)*nv[1]] = \
eris.ovOV[:,i*nv[1]:(i+1)*nv[1]].T
else:
eris.OVov = eris.ovOV.T
eris.Vovov = [eris.ovov, eris.OVOV, eris.ovOV, eris.OVov]
# anti-symmetrize same-spin component
if antisymm:
for s in [0,1]:
for i in range(no[s]):
Vi = eris.Vovov[s][i*nv[s]:(i+1)*nv[s]].reshape(
nv[s],no[s],nv[s])
Vi -= Vi.transpose(2,1,0)
eris.Vovov[s][i*nv[s]:(i+1)*nv[s]] = Vi.reshape(nv[s],-1)
def make_t2(eris, antisymm=False, alpha=0.):
incore = not hasattr(eris,"feri")
no = eris.no
nv = eris.nv
moe = eris.mo_energy
nov = [no[s]*nv[s] for s in [0,1]]
Vovov = eris.Vovov
dtype = Vovov[0].dtype
eia = [moe[s][:no[s]].reshape(-1,1)-moe[s][no[s]:] for s in [0,1]]
if incore:
# allocate memory
eris.taa = np.empty(eris.ovov.shape, dtype=dtype)
eris.tbb = np.empty(eris.OVOV.shape, dtype=dtype)
eris.tab = np.empty(eris.ovOV.shape, dtype=dtype)
eris.tba = None
eris.tovov = [eris.taa, eris.tbb, eris.tab, eris.tba]
# xform
for s in [0,1]:
for i in range(no[s]):
denom = lorentz_regularization(
eia[s][i].reshape(-1,1,1) + eia[s], alpha)
t2i = Vovov[s][i*nv[s]:(i+1)*nv[s]].reshape(
nv[s],no[s],nv[s]) / denom
if antisymm: t2i -= t2i.transpose(2,1,0)
eris.tovov[s][i*nv[s]:(i+1)*nv[s]] = t2i.reshape(nv[s],nov[s])
for i in range(no[0]):
denom = lorentz_regularization(
eia[0][i].reshape(-1,1,1) + eia[1], alpha)
t2i = Vovov[2][i*nv[0]:(i+1)*nv[0]].reshape(
nv[0],no[1],nv[1]) / denom
eris.tovov[2][i*nv[s]:(i+1)*nv[s]] = t2i.reshape(nv[0],nov[1])
eris.tovov[3] = eris.tovov[2].T
else:
# allocate disk
for s in [0,1]:
eris.feri.create_dataset("%d%d"%(s,s),
shape=eris.Vovov[s].shape, dtype=dtype)
eris.feri.create_dataset("%d%d"%(s,1-s),
shape=eris.Vovov[2+s].shape, dtype=dtype)
eris.tovov = [eris.feri["00"], eris.feri["11"],
eris.feri["01"], eris.feri["10"]]
# xform
for s in [0,1]:
for i in range(no[s]):
denom = lorentz_regularization(
eia[s][i].reshape(-1,1,1) + eia[s], alpha)
t2i = Vovov[s][i*nv[s]:(i+1)*nv[s]].reshape(
nv[s],no[s],nv[s]) / denom
if antisymm: t2i -= t2i.transpose(2,1,0)
eris.tovov[s][i*nv[s]:(i+1)*nv[s]] = t2i.reshape(nv[s],nov[s])
for i in range(no[0]):
denom = lorentz_regularization(
eia[0][i].reshape(-1,1,1) + eia[1], alpha)
t2i = Vovov[2][i*nv[0]:(i+1)*nv[0]].reshape(
nv[0],no[1],nv[1]) / denom
eris.tovov[2][i*nv[s]:(i+1)*nv[s]] = t2i.reshape(nv[0],nov[1])
eris.tovov[3][:,i*nv[s]:(i+1)*nv[s]] = t2i.reshape(nv[0],nov[1]).T
def get_pseudo_cano_mo(mf):
n = mf.nao
no = mf.no
Co = mf.mo_coeff_occ
Cv = mf.mo_coeff_vir
rdm1 = mf.rdm1
Fao = mf.fock
# pseudo canonicalization
logger.note(mf, "%s", "pseudo canonicalizing SPHF orbitals...\n")
Cop = [None]*2
Cvp = [None]*2
for s in [0,1]:
Foo = Co[s].T@Fao[s]@Co[s]
if np.allclose(np.diag(np.diag(Foo)), Foo):
uoo = np.eye(Co[s].shape[1])
else:
uoo = np.linalg.eigh(Foo)[1]
Cop[s] = Co[s] @ uoo
Fvv = Cv[s].T@Fao[s]@Cv[s]
if np.allclose(np.diag(np.diag(Fvv)), Fvv):
uvv = np.eye(Cv[s].shape[1])
else:
uvv = np.linalg.eigh(Fvv)[1]
Cvp[s] = Cv[s] @ uvv
Cp = np.asarray([np.hstack([Cop[s],Cvp[s]]) for s in [0,1]])
return Cp
def dress_eris(mp, eris, C):
""" C is assumed to be pseudo canonicalized
"""
mf = mp._scf
eris.n = mf.nao
eris.s1e = mf.s1e
Co = [C[s][:,:mf.no[s]] for s in [0,1]]
Cv = [C[s][:,mf.no[s]:] for s in [0,1]]
rdm1 = mf.rdm1
Fao = mf.fock
eris.euhf = sum([np.trace((mf.h1e+Fao[s])@rdm1[s]) for s in [0,1]]) * 0.5
Fmo = np.asarray([C[s].T @ Fao[s] @ C[s] for s in [0,1]])
nfz = 0 if mp.frozen is None else mp.frozen
no = mf.no
eris.mo_energy = np.asarray([np.diag(Fmo[s])[nfz:] for s in [0,1]])
eris.mo_coeff_occ = [C[s][:,nfz:no[s]] for s in [0,1]]
eris.mo_coeff_vir = [C[s][:,no[s]:] for s in [0,1]]
eris.no = [eris.mo_coeff_occ[s].shape[1] for s in [0,1]]
eris.nv = [eris.mo_coeff_vir[s].shape[1] for s in [0,1]]
eris.Fuhfov = [Fmo[s][nfz:no[s],no[s]:] for s in [0,1]]
logger.note(mf, "MO energy after pseudo canonicalization:")
mf.dumpmoe(moe=eris.mo_energy, no=eris.no)
hl_gap = [eris.mo_energy[s][eris.no[s]]-eris.mo_energy[s][eris.no[s]-1]
for s in [0,1]]
logger.note(mf, "HOMO-LUMO gap: % .5g % .5g\n", *hl_gap)
def ao2mo(mp):
timer = TIMER(3)
""" Note that we antisymm the same-spin components of both ERI and T2
"""
timer.start(0)
mf = mp._scf
Cp = get_pseudo_cano_mo(mf) # core if not frozen in pseudo cano
mp.mo_occ = np.asarray([
[1 if i < mf.no[s] else 0 for i in range(mf.nao)] for s in [0,1]])
eris = mp.ao2mo(mo_coeff=Cp) # ovov, OVOV, ovOV
dress_eris(mp, eris, Cp) # +: n, no, nv, mo_coeff(occ/vir),
# mo_energy, s1e, Fuhfov, euhf
timer.stop(0)
timer.start(1)
make_ovov(eris, antisymm=True) # +: Vovov = [ovov, OVOV, ovOV, OVov]
timer.stop(1)
timer.start(2)
make_t2(eris, antisymm=False, alpha=mp.alpha)
# +: tovov = [taa, tbb, tab, tba]
timer.stop(2)
if mp.verbose >= 3: timer.report(["ERI", "OVov", "t2"])
return eris
def xform_Vvov(Vvov, A, B, ex=False):
"""
If ex == False : sum_ia Vbia Aki Bac --> Vbkc
If ex == True : sum_ia (Vbia-Vaib) Aki Bac --> Vbkc
"""
no = A.shape[1]
nv = B.shape[0]
V = np.einsum("bia,ki,ac->bkc", Vvov.reshape(-1,no,nv), A, B, optimize=True)
if ex:
V -= np.einsum("aib,ki,ac->bkc", Vvov.reshape(-1,no,nv), A, B,
optimize=True)
return V
def get_emp2_beta_0pi(mf, eris, mode, timer, timer_start):
""" mode = 0 --> beta = 0
mode = 1 --> beta = pi
"""
assert(mode in [0,1])
if mode == 0:
timer.start(timer_start)
n = eris.n
no = eris.no
nv = eris.nv
Vovov = eris.Vovov
tovov = eris.tovov
timer.stop(timer_start)
timer.start(timer_start+2)
es = eo = 0.
for s in [0,1]:
for i in range(no[s]):
ti = tovov[s][i*nv[s]:(i+1)*nv[s]]
Vi = Vovov[s][i*nv[s]:(i+1)*nv[s]]
es += np.sum(ti*Vi)
s = 0
for i in range(no[s]):
ti = tovov[2+s][i*nv[s]:(i+1)*nv[s]]
Vi = Vovov[2+s][i*nv[s]:(i+1)*nv[s]]
eo += np.sum(ti*Vi)
e = es*0.25 + eo
D = 1.
timer.stop(timer_start+2)
elif mode == 1:
raise ValueError("Not ready yet!")
return e, D
def get_emp2_beta(mf, eris, beta, timer, timer_start):
if beta < 1.E-6:
return get_emp2_beta_0pi(mf, eris, 0, timer, timer_start)
# if np.abs(beta-np.pi) < 1.E-6:
# return get_emp2_beta_0pi(mf, eris, 1, timer, timer_start)
timer.start(timer_start)
n = eris.n
no = eris.no
nv = eris.nv
s1e = eris.s1e
s1e_ = slg.block_diag(s1e,s1e)
Co = eris.mo_coeff_occ
Cv = eris.mo_coeff_vir
Co_ = slg.block_diag(*Co)
Cv_ = slg.block_diag(*Cv)
esp = mf.e_scf
euhf = eris.euhf
Fov = slg.block_diag(*eris.Fuhfov)
Fov_ = sp2block(Fov, no[0], nv[0])
R = get_Rbeta(n, beta)
Soo = Co_.T @ s1e_ @ R @ Co_
D = np.linalg.det(Soo)
Too = np.linalg.inv(Soo)
Too_ = sp2block(Too, no[0])
Lov = Too @ Co_.T @ s1e_ @ R @ Cv_
Lov_ = sp2block(Lov, no[0], nv[0])
Lvo = Cv_.T @ s1e_ @ R @ Co_ @ Too
Lvo_ = sp2block(Lvo, nv[0], no[0])
Kvv = Cv_.T @ s1e_ @ R @ (Cv_ - Co_@Lov)
Kvv_ = sp2block(Kvv, nv[0], nv[0])
TFK = Too @ Fov @ Kvv
TFK_ = sp2block(TFK, no[0], nv[0])
Vovov = eris.Vovov
tovov = eris.tovov
timer.stop(timer_start)
timer.start(timer_start+1)
e1 = 0.
# same spin
Lovss = [Lov_[s,s].ravel() for s in [0,1]]
e1 += 0.5 * sum([np.sum(
np.asarray([tovov[s][k*nv[s]:(k+1)*nv[s]] @ Lovss[s]
for k in range(no[s])]) * Lov_[s,s]) for s in [0,1]])
# oppo spin
s = 0
e1 += np.sum(
np.asarray([tovov[2+s][k*nv[s]:(k+1)*nv[s]] @ Lovss[1-s]
for k in range(no[s])]) * Lov_[s,s])
LovsbsT = Lov_[1-s,s].T.ravel()
e1 -= np.sum(
np.asarray([LovsbsT @ tovov[2+s][k*nv[s]:(k+1)*nv[s]].
reshape(nv[s],no[1-s],nv[1-s]).reshape(-1,nv[1-s])
for k in range(no[s])]) * Lov_[s,1-s])
e1 *= D
e2 = 0.
Lovtt = Lovss
e2 += sum([np.sum(
np.asarray([tovov[t][l*nv[t]:(l+1)*nv[t]] @ Lovtt[t]
for l in range(no[t])]) * TFK_[t,t]) for t in [0,1]])
t = 0
Movtt = [TFK_[t1,t1].ravel() for t1 in [0,1]]
MovtbtT = [TFK_[1-t1,t1].T.ravel() for t1 in [0,1]]
LovtbtT = [Lov_[1-t1,t1].T.ravel() for t1 in [0,1]]
tM1 = np.zeros([no[t],nv[t]])
tM2 = np.zeros([no[t],nv[1-t]])
tL1 = np.zeros([no[t],nv[t]])
tL2 = np.zeros([no[t],nv[1-t]])
for k in range(no[t]):
tk = tovov[2+t][k*nv[t]:(k+1)*nv[t]]
tM1[k] = tk @ Movtt[1-t]
tM2[k] = MovtbtT[t] @ tk.reshape(
nv[t],no[1-t],nv[1-t]).reshape(-1,nv[1-t])
tL1[k] = tk @ Lovtt[1-t]
tL2[k] = LovtbtT[t] @ tk.reshape(
nv[t],no[1-t],nv[1-t]).reshape(-1,nv[1-t])
e2 += np.sum(tM1*Lov_[t,t]) - np.sum(tM2*Lov_[t,1-t]) + \
np.sum(tL1*TFK_[t,t]) - np.sum(tL2*TFK_[t,1-t])
e2 *= D
e2 += e1 * np.sum([np.trace(Fov_[s,s] @ Lvo_[s,s]) for s in [0,1]])
tL_ov = tL1_ov = tL2_ov = tM1_ov = tM2_ov = None
timer.stop(timer_start+1)
timer.start(timer_start+2)
e3 = np.zeros(3)
# e3[0]
# both same spin
for s in [0,1]:
for t in [0,1]:
tVK = np.zeros([no[s],no[s]])
for i in range(no[s]):
Vvov = xform_Vvov(Vovov[s][i*nv[s]:(i+1)*nv[s]],
Too_[t,s], Kvv_[s,t], ex=False).reshape(nv[s],-1)
tVK[i] = np.asarray([np.trace(
(tovov[t][k*nv[t]:(k+1)*nv[t]] @ Vvov.T) @ Kvv_[s,t])
for k in range(no[t])])
e3[0] += 0.25 * np.trace(tVK @ Too_[t,s])
# V same spin + t2 oppo spin
t = 0
for s in [0,1]:
tVK = np.zeros([no[s],no[t]])
for i in range(no[s]):
Vvov = xform_Vvov(Vovov[s][i*nv[s]:(i+1)*nv[s]],
Too_[1-t,s], Kvv_[s,1-t], ex=False).reshape(nv[s],-1)
tVK[i] = np.asarray([np.trace(
(tovov[2+t][k*nv[t]:(k+1)*nv[t]] @ Vvov.T) @ Kvv_[s,t])
for k in range(no[t])])
e3[0] += np.trace(tVK @ Too_[t,s])
# V oppo spin + t2 same spin
s = 0
for t in [0,1]:
tVK = np.zeros([no[s],no[t]])
for i in range(no[s]):
Vvov = xform_Vvov(Vovov[2+s][i*nv[s]:(i+1)*nv[s]],
Too_[t,1-s], Kvv_[1-s,t], ex=False).reshape(nv[s],-1)
tVK[i] = np.asarray([np.trace(
(tovov[t][k*nv[t]:(k+1)*nv[t]] @ Vvov.T) @ Kvv_[s,t])
for k in range(no[t])])
e3[0] += np.trace(tVK @ Too_[t,s])
# both oppo spin
s = t = 0
tVK = np.zeros([no[s],no[t]])
for i in range(no[s]):
Vvov = xform_Vvov(Vovov[2+s][i*nv[s]:(i+1)*nv[s]],
Too_[1-t,1-s], Kvv_[1-s,1-t], ex=False).reshape(nv[s],-1)
tVK[i] = np.asarray([np.trace(
(tovov[2+t][k*nv[t]:(k+1)*nv[t]] @ Vvov.T) @ Kvv_[s,t])
for k in range(no[t])])
e3[0] += np.trace(tVK @ Too_[t,s])
for i in range(no[s]):
Vvvo = xform_Vvov(Vovov[2+s][i*nv[s]:(i+1)*nv[s]],
Too_[1-t,1-s], Kvv_[1-s,t],
ex=False).transpose(0,2,1).reshape(nv[s],-1) # iacl
tVK[i] = np.asarray([np.sum(
(Vvvo @ tovov[2+t][k*nv[t]:(k+1)*nv[t]].
reshape(nv[t],no[1-t],nv[1-t]).reshape(-1,nv[1-t])) *
Kvv_[s,1-t]) for k in range(no[t])])
e3[0] -= np.trace(tVK @ Too_[t,s])
tVK = np.zeros([no[s],no[1-t]])
for i in range(no[s]):
Vvvo = xform_Vvov(Vovov[2+s][i*nv[s]:(i+1)*nv[s]],
Too_[t,1-s], Kvv_[1-s,1-t],
ex=False).transpose(0,2,1).reshape(nv[s],-1) # iadk
tVK[i] = np.asarray([np.sum(
(Vvvo @ tovov[3+t][k*nv[1-t]:(k+1)*nv[1-t]].
reshape(nv[1-t],no[t],nv[t]).reshape(-1,nv[t])) *
Kvv_[s,t]) for k in range(no[1-t])])
e3[0] -= np.trace(tVK @ Too_[1-t,s])
for i in range(no[s]):
Vvov = xform_Vvov(Vovov[2+s][i*nv[s]:(i+1)*nv[s]],
Too_[t,1-s], Kvv_[1-s,t], ex=False).reshape(nv[s],-1) # iakc
tVK[i] = np.asarray([np.trace(
(tovov[3+t][k*nv[1-t]:(k+1)*nv[1-t]] @ Vvov.T)
@ Kvv_[s,1-t]) for k in range(no[1-t])])
e3[0] += np.trace(tVK @ Too_[1-t,s])
e3[0] *= D
# e3[1]
# compute some intermediates
Lvoss = [Lvo_[s,s].ravel() for s in [0,1]]
Lvosbs = [Lvo_[1-s,s].ravel() for s in [0,1]]
LvossT = [Lvo_[s,s].T.ravel() for s in [0,1]]
VL = [np.asarray([Lvoss[s] @ Vovov[s][j*nv[s]:(j+1)*nv[s]].
reshape(nv[s],no[s],nv[s]).reshape(-1,nv[s]) for j in range(no[s])])
for s in [0,1]]
VL1 = [np.asarray([Lvosbs[s] @ Vovov[3-s][j*nv[1-s]:(j+1)*nv[1-s]].
reshape(nv[1-s],no[s],nv[s]).reshape(-1,nv[s]) for j in range(no[1-s])])
for s in [0,1]]
VL2 = [np.asarray([Vovov[2+s][i*nv[s]:(i+1)*nv[s]] @ LvossT[1-s]
for i in range(no[s])]) for s in [0,1]]
Lvoss = Lvosbs = LvossT = None
LovttT = [Lov_[t,t].T.ravel() for t in [0,1]]
LovttbT = [Lov_[t,1-t].T.ravel() for t in [0,1]]
Lovtt = [Lov_[t,t].ravel() for t in [0,1]]
tL = [np.asarray([LovttT[t] @ tovov[t][l*nv[t]:(l+1)*nv[t]].
reshape(nv[t],no[t],nv[t]).reshape(-1,nv[t]) for l in range(no[t])]).T
for t in [0,1]]
tL1 = [np.asarray([LovttbT[t] @ tovov[3-t][l*nv[1-t]:(l+1)*nv[1-t]].
reshape(nv[1-t],no[t],nv[t]).reshape(-1,nv[t])
for l in range(no[1-t])]).T for t in [0,1]]
tL2 = [np.asarray([tovov[3-t][l*nv[1-t]:(l+1)*nv[1-t]] @ Lovtt[t]
for l in range(no[1-t])]).T for t in [0,1]]
LovttT = LovttbT = Lovtt = None
KtL12 = [[Kvv_[s,t]@tL1[t] - Kvv_[s,1-t]@tL2[t]
for t in [0,1]] for s in [0,1]]
TVL12 = [[Too_[1-t,1-s]@VL1[s] - Too_[1-t,s]@VL2[s]
for s in [0,1]] for t in [0,1]]
# both same spin
e3[1] += sum([sum([np.trace(Too_[t,s]@VL[s]@Kvv_[s,t]@tL[t])
for t in [0,1]]) for s in [0,1]])
# V same spin + t2 oppo spin
e3[1] += sum([np.trace(
VL[s] @ sum([KtL12[s][t] @ Too_[1-t,s] for t in [0,1]]))
for s in [0,1]])
# V oppo spin + t2 same spin
e3[1] += sum([np.trace(
tL[1-t] @ sum([TVL12[t][s] @ Kvv_[s,1-t] for s in [0,1]]))
for t in [0,1]])
# both oppo spin
e3[1] += sum([sum([np.trace(
KtL12[s][t] @ TVL12[t][s])
for t in [0,1]]) for s in [0,1]])
e3[1] *= D
# e3[2]
# same spin
LvossT = [Lvo_[s,s].T.ravel() for s in [0,1]]
e3[2] += sum([
np.asarray([Vovov[s][i*nv[s]:(i+1)*nv[s]] @ LvossT[s]
for i in range(no[s])]).ravel() @ LvossT[s] for s in [0,1]])
# oppo spin
e3[2] += sum([(-np.trace(VL1[s] @ Lvo_[s,1-s]) +
np.trace(VL2[s] @ Lvo_[s,s])) for s in [0,1]])
e3[2] *= 0.5 * e1
e3 = np.sum(e3)
timer.stop(timer_start+2)
emp2 = (euhf-esp) * e1 + e2 + e3
return emp2, D
class SPMP2(mp.ump2.UMP2):
def __init__(self, mf,
# args for spin projection
spin_proj=None, ngrid=None, grid=None,
# args for frozen core
frozen=0,
# args for regularization
alpha=0.):
if not isinstance(mf, scf.uhf.UHF):
raise ValueError("Input mf must be a derived class of scf.uhf.UHF.")
if ((spin_proj is None and not hasattr(mf, "spin_proj")) or
(ngrid is None and not hasattr(mf, "ngrid"))):
raise ValueError("Input mf is not a valid SPSCF object.")
mp.ump2.UMP2.__init__(self, mf, frozen=frozen)
self.alpha = alpha
self.nao = mf.nao
self.no = mf.no
self.nv = mf.nv
self.spin_proj = spin_proj
if self.spin_proj is None: self.spin_proj = mf.spin_proj
self.ngrid = ngrid
if self.ngrid is None: self.ngrid = mf.ngrid
self.grid = grid
if self.grid is None: self.grid = mf.grid
self.e_corr = None
def kernel(self):
self.e_corr = kernel(self)
return self.e_corr
@property
def e_tot(self):
return self.e_corr + self._scf.e_tot
if __name__ == "__main__":
import sys
try:
geom = sys.argv[1]
basis = sys.argv[2]
except:
print("Usage: geom, basis")
sys.exit(1)
from frankenstein.tools.pyscf_utils import get_pymol
pymol = get_pymol(geom, basis, verbose=3)
pymol.max_memory = 4000
pymol.verbose = 4
mf = scf.RHF(pymol)
mf.kernel()
e0 = mf.e_tot
dm0 = mf.make_rdm1()
mo_coeff0 = np.asarray([mf.mo_coeff, mf.mo_coeff.copy()])
eri = mf._eri
mmp = mp.MP2(mf, frozen=2)
mmp.kernel()
mf = sgscf.UHF(pymol)
mf._eri = eri
mf.guess_mix = 0.3
mf.kernel(mo_coeff0=mo_coeff0)
mo_coeff0 = mf.mo_coeff
frozen = 0
mmp = SPMP2(mf, ngrid=6, spin_proj=0, grid="eq", frozen=frozen)
mmp.kernel()
mmp = mp.UMP2(mf, frozen=frozen)
mmp.kernel()
if mf.S2 < 0.05:
from frankenstein.tools.scf_utils import homo_lumo_mix
homo_lumo_mix(mo_coeff0[0], mf.no[0], 0.5)
mf = sgscf.SPHF(pymol)
mf._eri = eri
mf.spin_proj = 0
mf.ngrid = 6
mf.grid = "gauss"
mf.kernel(mo_coeff0=mo_coeff0)
e1 = mf.e_tot
mmp = SPMP2(mf, frozen=frozen)
print(mmp.kernel())
|
|
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from http import HTTPStatus
from oslo_log import log as logging
import oslo_messaging as messaging
from oslo_utils import strutils
import webob
from cinder.api import common
from cinder.api import extensions
from cinder.api import microversions as mv
from cinder.api.openstack import wsgi
from cinder.api.schemas import admin_actions
from cinder.api import validation
from cinder import backup
from cinder import db
from cinder import exception
from cinder.i18n import _
from cinder import objects
from cinder import volume
from cinder.volume import volume_utils
LOG = logging.getLogger(__name__)
class AdminController(wsgi.Controller):
"""Abstract base class for AdminControllers."""
collection = None # api collection to extend
# FIXME(clayg): this will be hard to keep up-to-date
# Concrete classes can expand or over-ride
def __init__(self, *args, **kwargs):
super(AdminController, self).__init__(*args, **kwargs)
# singular name of the resource
self.resource_name = self.collection.rstrip('s')
self.volume_api = volume.API()
self.backup_api = backup.API()
def _update(self, *args, **kwargs):
raise NotImplementedError()
def _get(self, *args, **kwargs):
raise NotImplementedError()
def _delete(self, *args, **kwargs):
raise NotImplementedError()
def validate_update(self, req, body):
raise NotImplementedError()
def _notify_reset_status(self, context, id, message):
raise NotImplementedError()
def authorize(self, context, action_name, target_obj=None):
context.authorize(
'volume_extension:%(resource)s_admin_actions:%(action)s' %
{'resource': self.resource_name,
'action': action_name}, target_obj=target_obj)
def _remove_worker(self, context, id):
# Remove the cleanup worker from the DB when we change a resource
# status since it renders useless the entry.
res = db.worker_destroy(context, resource_type=self.collection.title(),
resource_id=id)
if res:
LOG.debug('Worker entry for %s with id %s has been deleted.',
self.collection, id)
@wsgi.response(HTTPStatus.ACCEPTED)
@wsgi.action('os-reset_status')
def _reset_status(self, req, id, body):
"""Reset status on the resource."""
def _clean_volume_attachment(context, id):
attachments = (
db.volume_attachment_get_all_by_volume_id(context, id))
for attachment in attachments:
db.volume_detached(context.elevated(), id, attachment.id)
db.volume_admin_metadata_delete(context.elevated(), id,
'attached_mode')
context = req.environ['cinder.context']
update = self.validate_update(req, body=body)
msg = "Updating %(resource)s '%(id)s' with '%(update)r'"
LOG.debug(msg, {'resource': self.resource_name, 'id': id,
'update': update})
self._notify_reset_status(context, id, 'reset_status.start')
# Not found exception will be handled at the wsgi level
self._update(context, id, update)
self._remove_worker(context, id)
if update.get('attach_status') == 'detached':
_clean_volume_attachment(context, id)
self._notify_reset_status(context, id, 'reset_status.end')
@wsgi.response(HTTPStatus.ACCEPTED)
@wsgi.action('os-force_delete')
def _force_delete(self, req, id, body):
"""Delete a resource, bypassing the check that it must be available."""
context = req.environ['cinder.context']
# Not found exception will be handled at the wsgi level
resource = self._get(context, id)
self.authorize(context, 'force_delete', target_obj=resource)
self._delete(context, resource, force=True)
class VolumeAdminController(AdminController):
"""AdminController for Volumes."""
collection = 'volumes'
def _notify_reset_status(self, context, id, message):
volume = objects.Volume.get_by_id(context, id)
volume_utils.notify_about_volume_usage(context, volume,
message)
def _update(self, *args, **kwargs):
db.volume_update(*args, **kwargs)
def _get(self, *args, **kwargs):
return self.volume_api.get(*args, **kwargs)
def _delete(self, *args, **kwargs):
return self.volume_api.delete(*args, **kwargs)
@validation.schema(admin_actions.reset)
def validate_update(self, req, body):
update = {}
body = body['os-reset_status']
status = body.get('status', None)
attach_status = body.get('attach_status', None)
migration_status = body.get('migration_status', None)
if status:
update['status'] = status.lower()
if attach_status:
update['attach_status'] = attach_status.lower()
if migration_status:
update['migration_status'] = migration_status.lower()
if update['migration_status'] == 'none':
update['migration_status'] = None
return update
@wsgi.response(HTTPStatus.ACCEPTED)
@wsgi.action('os-reset_status')
def _reset_status(self, req, id, body):
"""Reset status on the volume."""
def _clean_volume_attachment(context, id):
attachments = (
db.volume_attachment_get_all_by_volume_id(context, id))
for attachment in attachments:
db.volume_detached(context.elevated(), id, attachment.id)
db.volume_admin_metadata_delete(context.elevated(), id,
'attached_mode')
# any exceptions raised will be handled at the wsgi level
update = self.validate_update(req, body=body)
context = req.environ['cinder.context']
volume = objects.Volume.get_by_id(context, id)
self.authorize(context, 'reset_status', target_obj=volume)
# at this point, we still don't know if we're going to
# reset the volume's state. Need to check what the caller
# is requesting first.
if update.get('status') in ('deleting', 'error_deleting'
'detaching'):
msg = _("Cannot reset-state to %s"
% update.get('status'))
raise webob.exc.HTTPBadRequest(explanation=msg)
if update.get('status') == 'in-use':
attachments = (
db.volume_attachment_get_all_by_volume_id(context, id))
if not attachments:
msg = _("Cannot reset-state to in-use "
"because volume does not have any attachments.")
raise webob.exc.HTTPBadRequest(explanation=msg)
msg = "Updating %(resource)s '%(id)s' with '%(update)r'"
LOG.debug(msg, {'resource': self.resource_name, 'id': id,
'update': update})
self._notify_reset_status(context, id, 'reset_status.start')
self._update(context, id, update)
self._remove_worker(context, id)
if update.get('attach_status') == 'detached':
_clean_volume_attachment(context, id)
self._notify_reset_status(context, id, 'reset_status.end')
@wsgi.response(HTTPStatus.ACCEPTED)
@wsgi.action('os-force_detach')
@validation.schema(admin_actions.force_detach)
def _force_detach(self, req, id, body):
"""Roll back a bad detach after the volume been disconnected."""
context = req.environ['cinder.context']
# Not found exception will be handled at the wsgi level
volume = self._get(context, id)
self.authorize(context, 'force_detach', target_obj=volume)
connector = body['os-force_detach'].get('connector', None)
try:
self.volume_api.terminate_connection(context, volume, connector)
except exception.VolumeBackendAPIException:
msg = _("Unable to terminate volume connection from backend.")
raise webob.exc.HTTPInternalServerError(explanation=msg)
attachment_id = body['os-force_detach'].get('attachment_id', None)
try:
self.volume_api.detach(context, volume, attachment_id)
except messaging.RemoteError as error:
if error.exc_type in ['VolumeAttachmentNotFound',
'InvalidVolume']:
msg = _("Error force detaching volume - %(err_type)s: "
"%(err_msg)s") % {'err_type': error.exc_type,
'err_msg': error.value}
raise webob.exc.HTTPBadRequest(explanation=msg)
else:
# There are also few cases where force-detach call could fail
# due to db or volume driver errors. These errors shouldn't
# be exposed to the user and in such cases it should raise
# 500 error.
raise
@wsgi.response(HTTPStatus.ACCEPTED)
@wsgi.action('os-migrate_volume')
@validation.schema(admin_actions.migrate_volume, mv.BASE_VERSION,
mv.get_prior_version(mv.VOLUME_MIGRATE_CLUSTER))
@validation.schema(admin_actions.migrate_volume_v316,
mv.VOLUME_MIGRATE_CLUSTER)
def _migrate_volume(self, req, id, body):
"""Migrate a volume to the specified host."""
context = req.environ['cinder.context']
# Not found exception will be handled at the wsgi level
volume = self._get(context, id)
self.authorize(context, 'migrate_volume', target_obj=volume)
params = body['os-migrate_volume']
cluster_name, host = common.get_cluster_host(req, params,
mv.VOLUME_MIGRATE_CLUSTER)
force_host_copy = strutils.bool_from_string(params.get(
'force_host_copy', False), strict=True)
lock_volume = strutils.bool_from_string(params.get(
'lock_volume', False), strict=True)
self.volume_api.migrate_volume(context, volume, host, cluster_name,
force_host_copy, lock_volume)
@wsgi.action('os-migrate_volume_completion')
@validation.schema(admin_actions.migrate_volume_completion)
def _migrate_volume_completion(self, req, id, body):
"""Complete an in-progress migration."""
context = req.environ['cinder.context']
# Not found exception will be handled at the wsgi level
volume = self._get(context, id)
self.authorize(context, 'migrate_volume_completion', target_obj=volume)
params = body['os-migrate_volume_completion']
new_volume_id = params['new_volume']
# Not found exception will be handled at the wsgi level
new_volume = self._get(context, new_volume_id)
error = params.get('error', False)
ret = self.volume_api.migrate_volume_completion(context, volume,
new_volume, error)
return {'save_volume_id': ret}
class SnapshotAdminController(AdminController):
"""AdminController for Snapshots."""
collection = 'snapshots'
def _notify_reset_status(self, context, id, message):
snapshot = objects.Snapshot.get_by_id(context, id)
volume_utils.notify_about_snapshot_usage(context, snapshot,
message)
@validation.schema(admin_actions.reset_status_snapshot)
def validate_update(self, req, body):
status = body['os-reset_status']['status']
update = {'status': status.lower()}
return update
def _update(self, *args, **kwargs):
context = args[0]
snapshot_id = args[1]
fields = args[2]
snapshot = objects.Snapshot.get_by_id(context, snapshot_id)
self.authorize(context, 'reset_status', target_obj=snapshot)
snapshot.update(fields)
snapshot.save()
def _get(self, *args, **kwargs):
return self.volume_api.get_snapshot(*args, **kwargs)
def _delete(self, *args, **kwargs):
return self.volume_api.delete_snapshot(*args, **kwargs)
class BackupAdminController(AdminController):
"""AdminController for Backups."""
collection = 'backups'
def _notify_reset_status(self, context, id, message):
backup = objects.Backup.get_by_id(context, id)
volume_utils.notify_about_backup_usage(context, backup,
message)
def _get(self, *args, **kwargs):
return self.backup_api.get(*args, **kwargs)
def _delete(self, *args, **kwargs):
return self.backup_api.delete(*args, **kwargs)
@wsgi.response(HTTPStatus.ACCEPTED)
@wsgi.action('os-reset_status')
@validation.schema(admin_actions.reset_status_backup)
def _reset_status(self, req, id, body):
"""Reset status on the resource."""
context = req.environ['cinder.context']
status = body['os-reset_status']['status']
update = {'status': status.lower()}
msg = "Updating %(resource)s '%(id)s' with '%(update)r'"
LOG.debug(msg, {'resource': self.resource_name, 'id': id,
'update': update})
self._notify_reset_status(context, id, 'reset_status.start')
# Not found exception will be handled at the wsgi level
self.backup_api.reset_status(context=context, backup_id=id,
status=update['status'])
class Admin_actions(extensions.ExtensionDescriptor):
"""Enable admin actions."""
name = "AdminActions"
alias = "os-admin-actions"
updated = "2012-08-25T00:00:00+00:00"
def get_controller_extensions(self):
exts = []
for class_ in (VolumeAdminController, SnapshotAdminController,
BackupAdminController):
controller = class_()
extension = extensions.ControllerExtension(
self, class_.collection, controller)
exts.append(extension)
return exts
|
|
#!/usr/bin/env python
from __future__ import print_function, division, unicode_literals, absolute_import
import sys
import pytest
from espwrap.base import batch, MIMETYPE_HTML, MIMETYPE_TEXT
from espwrap.adaptors.noop import NoopMassEmail
if sys.version_info < (3,):
range = xrange
def generate_recipients(count=10):
for x in range(count):
yield {
'name': 'Test Recip {}'.format(x),
'email': 'test+{}@something.com'.format(x),
}
def test_batch():
tester = ['lol']*500
res = list(batch(tester, 5))
assert len(res) == 100
lengths = [len(x) for x in res]
assert max(lengths) == 5
def test_instantiatable():
assert NoopMassEmail()
def test_add_recipient():
me = NoopMassEmail()
me.add_recipient(
name='Tester Dude', email='[email protected]',
merge_vars={'EXAMPLE': 'A TEST REPLACEMENT'}
)
recips = me.get_recipients()
assert len(recips) == 1
assert recips[0].get('merge_vars') is not None
def test_add_recipients():
'''
Test that a list of recipients can be added in bulk, assigning a default
empty dict of merge vars when not provided
'''
me = NoopMassEmail()
me.add_recipients([
{
'email': '[email protected]',
'name': 'Some test dude',
'merge_vars': {'SOMETHING': 'test'},
},
{
'email': '[email protected]',
'name': 'Some test dude',
},
])
recips = me.get_recipients()
assert len(recips) == 2
recips = [x for x in me.get_recipients() if x.get('merge_vars') == {}]
assert len(recips) == 1
def test_can_lazily_add_recipients_and_solidify():
gen_count = 20
me = NoopMassEmail()
me.add_recipients(generate_recipients(gen_count))
recips = me.get_raw_recipients()
assert hasattr(recips, '__iter__') and not hasattr(recips, '__len__')
recips_list = me.solidify_recipients()
assert len(recips_list) == gen_count
def test_no_global_merge_vars_by_default():
me = NoopMassEmail()
assert not me.get_global_merge_vars()
def test_add_global_merge_vars():
me = NoopMassEmail()
me.add_global_merge_vars(FIRST='server', SECOND_ONE_IS_BEST='client')
assert len(me.get_global_merge_vars().items()) == 2
assert 'FIRST' in me.get_global_merge_vars().keys()
assert 'SECOND_ONE_IS_BEST' in me.get_global_merge_vars().keys()
def test_clear_global_merge_vars():
me = NoopMassEmail()
me.add_global_merge_vars(FIRST='server', SECOND='client')
me.clear_global_merge_vars()
assert not me.get_global_merge_vars()
def test_no_tags_by_default():
me = NoopMassEmail()
assert len(me.get_tags()) == 0
def test_add_tags():
me = NoopMassEmail()
# Make sure dupes are eliminated
me.add_tags('test', 'mode', 'test')
assert len(me.get_tags()) == 2
def test_clear_tags():
me = NoopMassEmail()
me.add_tags('test', 'mode')
assert len(me.get_tags()) == 2
me.clear_tags()
assert len(me.get_tags()) == 0
def test_set_body_and_get_body():
me = NoopMassEmail()
msg = '<h1>Hello!</h1>'
me.set_body(msg)
assert me.get_body().get(MIMETYPE_HTML) == msg
assert me.get_body(mimetype=MIMETYPE_HTML) == msg
with pytest.raises(AttributeError) as e:
me.get_body(mimetype=MIMETYPE_TEXT)
assert 'mimetype' in str(e.value)
def test_set_body_with_mimetype():
'''
Test that setting a body will set the default (HTML), but this mimetype
can be overridden with an argument (for, ie. plain text)
'''
me = NoopMassEmail()
msg_text = 'Tester Test'
msg_html = '<h1>Tester Test HTML</h1>'
me.set_body(msg_html)
me.set_body(msg_text, mimetype=MIMETYPE_TEXT)
assert me.get_body(mimetype=MIMETYPE_HTML) == msg_html
assert me.get_body(mimetype=MIMETYPE_TEXT) == msg_text
def test_from_addr():
me = NoopMassEmail()
addr = '[email protected]'
me.set_from_addr(addr)
assert me.get_from_addr() == addr
def test_reply_to_addr():
me = NoopMassEmail()
addr = '[email protected]'
me.set_reply_to_addr(addr)
assert me.get_reply_to_addr() == addr
def test_subject():
me = NoopMassEmail()
sub = 'Testing'
me.set_subject(sub)
assert me.get_subject() == sub
def test_validate():
me = NoopMassEmail()
with pytest.raises(Exception) as e:
me.validate()
assert 'address and subject' in str(e)
me.set_subject('something')
me.set_from_addr('[email protected]')
me.validate()
def test_webhook_data():
me = NoopMassEmail()
sub = 'Testing'
me.set_webhook_data(sub)
assert me.get_webhook_data() == sub
def test_click_tracking():
me = NoopMassEmail()
assert not me.get_click_tracking_status()
me.enable_click_tracking()
assert me.get_click_tracking_status() is True
me.disable_click_tracking()
assert me.get_click_tracking_status() is False
def test_open_tracking():
me = NoopMassEmail()
assert not me.get_open_tracking_status()
me.enable_open_tracking()
assert me.get_open_tracking_status() is True
me.disable_open_tracking()
assert me.get_open_tracking_status() is False
def test_importance():
me = NoopMassEmail()
me.set_importance(True)
assert me.get_importance() is True
me.set_importance(False)
assert me.get_importance() is False
def test_ip_pool():
me = NoopMassEmail()
pool = 'abc_group'
me.set_ip_pool(pool)
assert me.get_ip_pool() == pool
def test_template_name():
me = NoopMassEmail()
template_name = 'test template'
me.set_template_name(template_name)
assert me.get_template_name() == template_name
def test_send():
me = NoopMassEmail()
with pytest.raises(NotImplementedError):
me.send()
def test_delimiters():
'''
By default, we assume the ESP cannot hot-swap variable delimiters the way
SendGrid can, so we raise NotImplementedError and call it a day.
'''
me = NoopMassEmail()
with pytest.raises(NotImplementedError):
me.set_variable_delimiters(start='*|', end='|*')
with pytest.raises(NotImplementedError):
me.get_variable_delimiters()
|
|
from coda.compiler import genbase
from coda import types
import os
import re
from abc import abstractmethod
reUpperFirst = re.compile('(.)([A-Z][a-z]+)')
reUpperRest = re.compile('([a-z0-9])([A-Z])')
def toUpperUnderscore(s):
s = reUpperFirst.sub(r'\1_\2', s)
return reUpperRest.sub(r'\1_\2', s).upper()
def getQualName(decl):
'''The qualified name of a type, including enclosing types but not including namespace. This
should only be used to reference types that are defined in the current namespace.'''
name = decl.getName()
while decl.getEnclosingType():
decl = decl.getEnclosingType()
name = decl.getName() + '::' + name
return name
def getEnclosingQualName(decl):
'''The qualified name of the type enclosing a declaration, including enclosing types but not
including namespace. This should only be used to reference types that are defined in the
current namespace.'''
name = ''
while decl.getEnclosingType():
decl = decl.getEnclosingType()
name = decl.getName() + '::' + name
return name
# Set of types that end with a right angle bracket '>'
HAVE_TYPE_ARGS = set([
types.TypeKind.STRING,
types.TypeKind.BYTES,
types.TypeKind.LIST,
types.TypeKind.SET,
types.TypeKind.MAP,
types.TypeKind.MODIFIED])
class TypeNameFormatter:
'''Return the name of a type with sufficient qualifiers that
it can be referred to from within the current namespace.'''
def __init__(self, gen):
self.gen = gen
def __call__(self, struct, useActual=True):
'''@type struct: coda.descriptors.StructType'''
name = struct.getName()
while struct.getEnclosingType():
struct = struct.getEnclosingType()
name = struct.getName() + '::' + name
fd = struct.getFile()
if fd and fd is not self.fd:
package = self.gen.getFilePackage(fd)
if package:
return package + '::' + name
return name
def setFile(self, fd):
self.fd = fd
class CppTypeFormatter(genbase.AbstractTypeTransform):
'''Transform a type expression into it's C++ representation.'''
def __init__(self, nameFormatter):
super().__init__()
self.nameFormatter = nameFormatter
def visitType(self, ty, *args):
raise AssertionError('Type not handled: ' + str(ty))
def visitPrimitiveType(self, ty, const, ref):
'@type ty: cina.types.AbstractType'
return self.visitType(ty)
def visitBooleanType(self, ty, const, ref):
return 'bool'
def visitIntegerType(self, ty, const, ref):
return 'int{0}_t'.format(ty.getBits())
def visitFloatType(self, ty, const, ref):
return 'float'
def visitDoubleType(self, ty, const, ref):
return 'double'
def visitStringType(self, ty, const, ref):
return self.refType('std::string', const, ref)
def visitBytesType(self, ty, const, ref):
return self.refType('std::string', const, ref)
def visitListType(self, ty, const, ref):
return self.refType(
'std::vector<{0}>'.format(self(ty.getElementType(), False, False)),
const, ref)
def visitSetType(self, ty, const, ref):
if ty.getElementType().typeId() == types.TypeKind.ENUM:
return self.refType(
'std::unordered_set<{0}, coda::runtime::EnumHash<{0}> >'.format(
self(ty.getElementType(), False, False)),
const, ref)
else:
return self.refType(
'std::unordered_set<{0}>'.format(
self(ty.getElementType(), False, False)),
const, ref)
def visitMapType(self, ty, const, ref):
params = '{0}, {1}'.format(
self(ty.getKeyType(), False, False),
self(ty.getValueType(), False, False))
if ty.getKeyType().typeId() == types.TypeKind.ENUM:
params += ', coda::runtime::EnumHash<{0}>'.format(
self(ty.getKeyType(), False, False))
if params.endswith('>'):
params += ' '
return self.refType('std::unordered_map<{0}>'.format(params), const, ref)
def visitStructType(self, ty, const, ref):
return self.pointerType(self.nameFormatter(ty), const)
def visitEnumType(self, ty, const, ref):
return self.nameFormatter(ty)
def visitModifiedType(self, ty, const, ref):
return self(ty.getElementType(), const or ty.isConst(), ref)
def pointerType(self, name, const):
if const:
name = 'const ' + name
return name + '*'
def refType(self, name, const, ref):
if const:
name = 'const ' + name
if ref:
name += '&'
return name
class TypeDescriptorFormatter(genbase.AbstractTypeTransform):
'''Transform a type into a reference to that type's descriptor.'''
def __init__(self, nameFormatter):
super().__init__()
self.nameFormatter = nameFormatter
def visitType(self, ty, *args):
raise AssertionError('Type not handled: ' + str(ty))
def visitBooleanType(self, ty):
return 'coda::types::Boolean'
def visitIntegerType(self, ty):
return 'coda::types::Integer{0}'.format(ty.getBits())
def visitFloatType(self, ty):
return 'coda::types::Float'
def visitDoubleType(self, ty):
return 'coda::types::Double'
def visitStringType(self, ty):
return 'coda::types::String'
def visitBytesType(self, ty):
return 'coda::types::Bytes'
def visitListType(self, ty):
if ty.getElementType().typeId() in HAVE_TYPE_ARGS:
return 'coda::types::List<{0} >'.format(self(ty.getElementType()))
return 'coda::types::List<{0}>'.format(self(ty.getElementType()))
def visitSetType(self, ty):
if ty.getElementType().typeId() in HAVE_TYPE_ARGS:
return 'coda::types::Set<{0} >'.format(self(ty.getElementType()))
return 'coda::types::Set<{0}>'.format(self(ty.getElementType()))
def visitMapType(self, ty):
if ty.getValueType().typeId() in HAVE_TYPE_ARGS:
return 'coda::types::Map<{0}, {1} >'.format(self(ty.getKeyType()), self(ty.getValueType()))
return 'coda::types::Map<{0}, {1}>'.format(self(ty.getKeyType()), self(ty.getValueType()))
def visitStructType(self, ty):
return self.nameFormatter(ty)
def visitEnumType(self, ty):
return 'coda::types::Enum<{0}_DESCRIPTOR>'.format(self.nameFormatter(ty))
def visitModifiedType(self, ty):
return 'coda::types::Modified<{0}, {1}, {2}>'.format(
self(ty.getElementType()),
('true' if ty.isConst() else 'false'),
('true' if ty.isShared() else 'false'))
class DefaultValueProducer(genbase.AbstractTypeTransform):
'''Transform a type into it's default value.'''
def __init__(self, nameFormatter):
super().__init__()
self.nameFormatter = nameFormatter
def visitType(self, ty, *args):
raise AssertionError('Type not handled: ' + str(ty))
def visitBooleanType(self, ty, options):
return 'false'
def visitIntegerType(self, ty, options):
return '0'
def visitFloatType(self, ty, options):
return '0'
def visitDoubleType(self, ty, options):
return '0'
def visitStringType(self, ty, options):
return None
def visitBytesType(self, ty, options):
return None
def visitListType(self, ty, options):
return None
def visitSetType(self, ty, options):
return None
def visitMapType(self, ty, options):
return None
def visitStructType(self, ty, options):
if options.isNullable():
return 'NULL'
else:
return '&{0}::DEFAULT_INSTANCE'.format(getQualName(ty))
def visitEnumType(self, ty, options):
# TODO: get first member
if len(ty.getValues()) > 0:
value = ty.getValues()[0]
prefix = toUpperUnderscore(ty.getName())
return '{0}_{1}'.format(prefix, value.getName())
return '({0}) 0'.format(self.nameFormatter(ty))
def visitModifiedType(self, ty, options):
return self(ty.getElementType(), options)
class ValueFormatter(genbase.AbstractTypeTransform):
'''Transform a typed expression into its C++ representation.'''
def __init__(self, typeFormatter):
super().__init__()
self.formatType = typeFormatter
def visitType(self, ty, *args):
raise AssertionError('Type not handled: ' + str(ty))
def visitBooleanType(self, ty, value):
return 'true' if value else 'false'
def visitIntegerType(self, ty, value):
return repr(value)
def visitFloatType(self, ty, value):
return repr(value)
def visitDoubleType(self, ty, value):
return repr(value)
def visitStringType(self, ty, value):
# TODO: Escapes
return '"' + value + '"'
def visitBytesType(self, ty, value):
return repr(value)
def visitListType(self, ty, value):
result = ['coda::descriptors::StaticListBuilder<{0}>()'.format(
self.formatType(ty.getElementType(), False, False))]
for el in value:
result.append('add(' + self(ty.getElementType(), el) + ')')
result.append('build()')
return '.'.join(result)
# return '[' + ', '.join(self(ty.getElementType(), el) for el in value) + ']'
def visitSetType(self, ty, value):
assert False, 'Not implemented: set constants'
def visitMapType(self, ty, value):
assert False, 'This should not be called'
# return '{' + ', '.join(
# self(ty.getKeyType(), k) + ': ' +
# self(ty.getValueType(), value[k]) for k in sorted(value.keys())) + '}'
def visitStructType(self, ty, value):
return repr(value)
def visitEnumType(self, ty, value):
return repr(value)
class AbstractCppGenerator(genbase.CodeGenerator):
'''C++ header file code generator for CODA classes.'''
def __init__(self, optionScope, options):
super().__init__(optionScope, options)
self.formatTypeName = TypeNameFormatter(self)
self.formatType = CppTypeFormatter(self.formatTypeName)
self.defaultValueOf = DefaultValueProducer(self.formatTypeName)
self.describeType = TypeDescriptorFormatter(self.formatTypeName)
self.formatValue = ValueFormatter(self.formatType)
self.typeKinds = {}
self.computeTypeKindValues()
# TODO: Verify package name
def beginFile(self, fd, fileOptions):
self.formatTypeName.setFile(fd)
self.fileOptions = fileOptions
@abstractmethod
def getPathPrefix(self):
return None
def calcSourcePath(self, fd, options, decl, extension=None):
'''@type fd: coda.descriptors.FileDescriptor
@type options: FileOptions
Method to calculate the file path to write a descriptor to.'''
if options.filepath:
path = options.filepath
elif options.package:
path = os.path.join(*options.package.split('::'))
else:
path = fd.getDirectory()
prefix = self.getPathPrefix()
if prefix:
path = os.path.join(prefix, path)
if decl:
# This is the case for each type being in its own file
# FIXME: I don't think this will actually work
path = os.path.join(path, fd.getName(), decl.getName())
else:
# This is the case where all types are in a single file
pass
if not extension:
extension = self.fileExtension()
return path + extension
def calcHeaderPath(self, fd, options, decl):
'''@type fd: coda.descriptors.FileDescriptor
@type options: FileOptions
Method to calculate the file path to write a descriptor to.'''
if options.filepath:
path = options.filepath
elif options.package:
path = os.path.join(*options.package.split('::'))
else:
path = fd.getDirectory()
if decl:
# This is the case for each type being in its own file
# FIXME: I don't think this will actually work
path = os.path.join(path, fd.getName(), decl.getName())
else:
# This is the case where all types are in a single file
pass
return path + '.h'
def calcHeaderGuard(self, fd, options, decl):
if options.filepath:
parts = options.filepath.upper().split('/')
elif options.package:
parts = [name.upper() for name in options.package.split('::')]
else:
parts = []
if decl:
# This is the case for each type being in its own file
# FIXME: I don't think this will actually work
parts.append(decl.getName().upper())
else:
# This is the case where all types are in a single file
pass
return '_'.join(parts)
RESERVED_WORDS = frozenset([
'class', 'struct', 'extern', 'auto',
'if', 'for', 'while', 'switch',
'default',
'void', 'int', 'unsigned', 'float', 'double', 'long', 'short', 'char',
'bool', 'signed', 'const'
])
def requiresOutOfLineConstructor(self, struct):
'''@type fd: coda.descriptors.FileDescriptor
@type struct: coda.descriptors.StructType'''
for field in struct.getFields():
ftype = types.unmodified(field.getType())
if (ftype.typeId() == types.TypeKind.STRUCT and not field.getOptions().isNullable()):
return True
return False
@staticmethod
def varName(name):
if name in AbstractCppGenerator.RESERVED_WORDS:
return 'v_' + name
else:
return name
def writeBanner(self, msg, *args):
self.writeLn('// ' + '=' * 76)
self.writeLnFmt('// {0}', msg.format(*args))
self.writeLn('// ' + '=' * 76)
self.writeLn()
def capitalize(self, s):
'@type s: string'
return s[0].upper() + s[1:]
def formatPackage(self, packageName):
return '::'.join(packageName.split('.'))
def computeTypeKindValues(self):
for value in types.TypeKind.DESCRIPTOR.getValues():
self.typeKinds[value.getValue()] = value.getName()
def formatTypeKind(self, kind):
return 'coda::descriptors::TYPE_KIND_' + self.typeKinds[int(kind)]
class CppHeaderGenerator(AbstractCppGenerator):
'''C++ header file code generator for CODA classes.'''
def getPathPrefix(self):
return self.options.getOption('prefix_h')
def fileExtension(self):
return '.h'
def getFileOutputDir(self):
return self.headerOutputDir
def genHeader(self, fd):
'''@type fd: coda.descriptors.FileDescriptor'''
self.writeBanner('Generated by codagen from {0}.coda. DO NOT EDIT!',
fd.getName())
guard = self.calcHeaderGuard(fd, self.fileOptions, None)
self.writeLnFmt('#ifndef {0}', guard)
self.writeLnFmt('#define {0} 1', guard)
self.writeLn()
def genImports(self, fd):
'''@type fd: coda.descriptors.FileDescriptor'''
self.writeLn('#ifndef CODA_RUNTIME_OBJECT_H')
self.writeLn(' #include "coda/runtime/object.h"')
self.writeLn('#endif')
self.writeLn()
self.writeLn('#include <bitset>')
self.writeLn('#include <stdint.h>')
for imp in self.getScopedOption(fd.getOptions().getImports(), ()):
self.writeLnFmt('#include "{0}"', imp)
self.writeLn()
for ns in self.fileOptions.package.split('::'):
self.writeLnFmt('namespace {0} {{', ns)
self.writeLn()
# Compute the list of types that need to be forward-declared.
forward = self.getForwardDeclarations(fd)
if forward:
self.writeLn('// Forward declarations')
for ty in forward:
self.writeLnFmt('class {0};', ty)
self.writeLn()
def beginStruct(self, fd, struct):
'''@type fd: coda.descriptors.FileDescriptor
@type struct: coda.descriptors.StructType'''
self.writeBanner('{0}', struct.getName())
if struct.getBaseType():
baseType = struct.getBaseType().getName()
else:
baseType = 'coda::runtime::Object'
mixinType = genbase.getScopedOption(
struct.getOptions().getMixin(),
self.optionScope)
if mixinType:
baseType += ', public ' + mixinType
self.writeLnFmt('class {0} : public {1} {{', struct.getName(), baseType)
self.writeLn('public:')
presentable = self.getPresentableFields(struct)
if presentable:
self.indent()
self.writeLn('enum FieldPresentBits {')
self.indent()
for field in presentable:
self.writeLnFmt('HAS_{0},', toUpperUnderscore(field.getName()))
self.unindent()
self.writeLn('};')
self.writeLn()
self.unindent()
def endStruct(self, fd, struct):
'''@type fd: coda.descriptors.FileDescriptor
@type struct: coda.descriptors.StructType'''
if struct.getFields():
self.writeLn()
self.indent()
self.writeLn('static coda::descriptors::StructDescriptor DESCRIPTOR;')
self.writeLnFmt('static {0} DEFAULT_INSTANCE;', struct.getName())
if struct.hasTypeId() and False:
self.writeLn('static const uint32_t TYPE_ID = {0};'.format(struct.getTypeId()))
else:
self.writeLn('static const uint32_t TYPE_ID;')
self.unindent()
if struct.getFields():
self.writeLn()
self.writeLn('private:')
self.indent()
presentable = self.getPresentableFields(struct)
if len(presentable) > 0:
self.writeLnFmt('std::bitset<{0}> fieldsPresent;', len(presentable))
for field in struct.getFields():
self.writeLnFmt('{0} _{1};',
self.formatType(field.getType(), False, False), field.getName())
self.writeLn()
if len(presentable) > 0:
self.writeLn('bool isFieldPresent(size_t index) const {')
self.writeLn(' return fieldsPresent[index];')
self.writeLn('}')
self.writeLn()
self.writeLn('void setFieldPresent(size_t index, bool present) {')
self.writeLn(' fieldsPresent[index] = present;')
self.writeLn('}')
self.writeLn()
for field in struct.getFields():
self.writeLnFmt('static coda::descriptors::FieldDescriptor Field_{0};', field.getName())
self.writeLn('static coda::descriptors::FieldDescriptor* Fields[];')
self.unindent()
self.writeLn('};')
self.writeLn()
def genStructConstructor(self, fd, struct):
'''@type fd: coda.descriptors.FileDescriptor
@type struct: coda.descriptors.StructType'''
if self.requiresOutOfLineConstructor(struct):
self.writeLnFmt('{0}();', struct.getName())
else:
self.writeLnFmt('{0}()', struct.getName())
self.indent()
delim = ':'
for field in struct.getFields():
value = self.defaultValueOf(field.getType(), field.getOptions())
if value:
self.writeLnFmt('{0} _{1}({2})', delim, field.getName(), value)
delim = ','
self.unindent()
self.writeLn('{}')
self.writeLn()
self.writeLnFmt('{0}(const {0}& _src)', struct.getName())
self.indent()
delim = ':'
if struct.getBaseType():
self.writeLnFmt(': {0}(*this)', struct.getBaseType().getName())
delim = ','
for field in struct.getFields():
self.writeLnFmt('{0} _{1}(_src._{1})', delim, field.getName())
delim = ','
self.unindent()
self.writeLn('{}')
def genStructStdMethods(self, fd, struct):
'''@type fd: coda.descriptors.FileDescriptor
@type struct: coda.descriptors.StructType'''
self.writeLn()
self.writeLn('coda::descriptors::StructDescriptor* descriptor() const {')
self.indent()
self.writeLn('return &DESCRIPTOR;')
self.unindent()
self.writeLn('}')
self.writeLn()
# New instance
# self.writeLnFmt('static {0}* newInstance() {{', struct.getName())
# self.indent()
# self.writeLnFmt('return new {0}();', struct.getName())
# self.unindent()
# self.writeLn('}')
# self.writeLn()
# Clone method
self.writeLn('coda::runtime::Object* clone() const {')
self.indent()
self.writeLnFmt('return new {0}(*this);', struct.getName())
self.unindent()
self.writeLn('}')
self.writeLn()
# Equality comparison
if struct.getFields():
self.writeLn('bool equals(const coda::runtime::Object* other) const;')
# Hashing
if struct.getFields():
self.writeLn('size_t hashValue() const;')
# Freezing and clearing
if struct.getFields():
self.writeLn('void clear();')
self.writeLn('void freezeImpl();')
self.writeLn('void deleteRecursiveImpl(Object** freeList);')
# Serialization
if struct.getBaseType():
self.writeLn('void beginWrite(coda::io::Encoder* encoder) const;')
if self.getWriteableFields(struct) or struct.getBaseType():
self.writeLn('void endWrite(coda::io::Encoder* encoder) const;')
def beginEnum(self, fd, enum):
self.writeLnFmt('enum {0} {{', enum.getName())
self.indent()
prefix = toUpperUnderscore(enum.getName())
for value in enum.getValues():
self.writeLnFmt('{0}_{1} = {2},', prefix, value.getName(), value.getValue())
self.unindent()
self.writeLn('};')
if not enum.hasEnclosingType():
self.writeLnFmt('extern coda::descriptors::EnumDescriptor {0}_DESCRIPTOR;', enum.getName())
def endEnum(self, fd, enum):
'''@type fd: coda.descriptors.FileDescriptor'''
self.writeLn()
def genFieldAccessors(self, fd, struct, field):
'''@type fd: coda.descriptors.FileDescriptor
@type struct: coda.descriptors.StructType
@type field: coda.descriptors.StructType.Field'''
fty = types.unmodified(field.getType())
if self.isFieldPresentable(field):
self.genFieldPresentGetter(fd, struct, field)
self.genFieldGetter(fd, struct, field)
if fty.typeId() in self.MUTABLE_TYPES:
self.genFieldMutableGetter(fd, struct, field)
if fty.typeId() == types.TypeKind.MAP:
self.genFieldPutter(fd, struct, field)
self.genFieldSetter(fd, struct, field)
self.genFieldClear(fd, struct, field)
def genFieldPresentGetter(self, fd, struct, field):
'''@type fd: coda.descriptors.FileDescriptor
@type struct: coda.descriptors.StructType
@type field: coda.descriptors.StructType.Field'''
self.writeLn()
self.writeLnFmt('bool has{0}() const {{',
self.capitalize(field.getName()))
self.indent()
self.writeLnFmt('return fieldsPresent.test(HAS_{0});',
toUpperUnderscore(field.getName()))
self.unindent()
self.writeLn('}')
def genFieldGetter(self, fd, struct, field):
self.writeLn()
if field.getType().typeId() == types.TypeKind.BOOL:
self.writeLnFmt('bool is{0}() const {{', self.capitalize(field.getName()))
else:
self.writeLnFmt('{0} get{1}() const {{',
self.formatType(field.getType(), True, True),
self.capitalize(field.getName()))
self.indent()
self.writeLnFmt('return _{0};', field.getName())
self.unindent()
self.writeLn('}')
def genFieldMutableGetter(self, fd, struct, field):
self.writeLn()
self.writeLnFmt('{0} getMutable{1}() {{',
self.formatType(field.getType(), False, True),
self.capitalize(field.getName()))
self.indent()
self.writeLn('checkMutable();')
if self.isFieldPresentable(field):
self.writeLnFmt('fieldsPresent.set(HAS_{0});',
toUpperUnderscore(field.getName()))
self.writeLnFmt('return _{0};', field.getName())
self.unindent()
self.writeLn('}')
def genFieldSetter(self, fd, struct, field):
name = self.varName(field.getName())
ftype = types.unmodified(field.getType())
self.writeLn()
self.writeLnFmt('{0} set{1}({2} {3}) {{',
struct.getName() + '&',
self.capitalize(field.getName()),
self.formatType(field.getType(), ftype.typeId() != types.TypeKind.STRUCT, True),
name)
self.indent()
self.writeLn('checkMutable();')
if self.isFieldPresentable(field):
self.writeLnFmt('fieldsPresent.set(HAS_{0});',
toUpperUnderscore(field.getName()))
self.writeLnFmt('_{0} = {1};', field.getName(), name)
self.writeLn('return *this;')
self.unindent()
self.writeLn('}')
def genFieldPutter(self, fd, struct, field):
ftype = types.unmodified(field.getType())
self.writeLn()
self.writeLnFmt('{0} put{1}({2} key, {3} value) {{',
struct.getName() + '&',
self.capitalize(field.getName()),
self.formatType(field.getType().getKeyType(), True, True),
self.formatType(field.getType().getValueType(),
ftype.getValueType().typeId() != types.TypeKind.STRUCT, True))
self.indent()
self.writeLn('checkMutable();')
if self.isFieldPresentable(field):
self.writeLnFmt('fieldsPresent.set(HAS_{0});',
toUpperUnderscore(field.getName()))
self.writeLnFmt('_{0}[key] = value;', field.getName())
self.writeLn('return *this;')
self.unindent()
self.writeLn('}')
def genFieldClear(self, fd, struct, field):
self.writeLn()
self.writeLnFmt('{0} clear{1}() {{',
struct.getName() + '&',
self.capitalize(field.getName()))
self.indent()
self.writeLn('checkMutable();')
value = self.defaultValueOf(field.getType(), field.getOptions())
if self.isFieldPresentable(field):
self.writeLnFmt('fieldsPresent.reset(HAS_{0});',
toUpperUnderscore(field.getName()))
if value:
self.writeLnFmt('_{0} = {1};', field.getName(), value)
else:
self.writeLnFmt('_{0}.clear();', field.getName())
self.writeLn('return *this;')
self.unindent()
self.writeLn('}')
def genMethod(self, fd, struct, method):
'''@type fd: coda.descriptors.FileDescriptor
@type struct: coda.descriptors.StructType
@type method: coda.descriptors.StructType.Method'''
params = ['{0}:{1}'.format(p.getName(), self.formatType(p.getType(), True, True))
for p in method.getParams()]
self.writeLn()
self.writeLnFmt('{0} {1}({2}){3};',
self.formatType(method.getType().getReturnType(), True, False),
method.getName(),
', '.join(params),
' const' if method.getOptions().isConst() else '')
def endFile(self, fd):
'@type fd: coda.descriptors.FileDescriptor'
self.writeLn('extern coda::descriptors::StaticFileDescriptor FILE;')
self.writeLn()
for ns in self.fileOptions.package.split('::'):
self.writeLnFmt('}} // namespace {0}', ns)
self.writeLn()
self.writeLnFmt('#endif // {0}',
self.calcHeaderGuard(fd, self.fileOptions, None))
self.writeLn()
def getForwardDeclarations(self, fd):
forward = set()
defined = set()
def checkDefined(ty):
if ty.typeId() == types.TypeKind.STRUCT:
while ty.getEnclosingType():
ty = ty.getEnclosingType()
if ty.getFile() == fd and id(ty) not in defined:
forward.add(ty.getName())
elif (ty.typeId() == types.TypeKind.LIST or
ty.typeId() == types.TypeKind.SET):
checkDefined(ty.getElementType())
elif ty.typeId() == types.TypeKind.MAP:
checkDefined(ty.getKeyType())
checkDefined(ty.getValueType())
elif ty.typeId() == types.TypeKind.MODIFIED:
checkDefined(ty.getElementType())
def findTypes(struct):
defined.add(id(struct))
for st in struct.getStructs():
findTypes(st)
for field in struct.getFields():
checkDefined(field.getType())
for st in fd.getStructs():
findTypes(st)
return sorted(forward)
class CppGenerator(AbstractCppGenerator):
'''C++ source file code generator for CODA classes.'''
def fileExtension(self):
return '.cpp'
def getPathPrefix(self):
return self.options.getOption('prefix_cpp')
def beginFile(self, fd, fileOptions):
super().beginFile(fd, fileOptions)
self.registerUniqueOptions(fd)
def genHeader(self, fd):
self.writeBanner('Generated by codagen from {0}.coda. DO NOT EDIT!',
fd.getName())
def genImports(self, fd):
'''@type fd: coda.descriptors.FileDescriptor'''
self.writeLn('#include "coda/types.h"')
self.writeLn('#include "coda/io/codec.h"')
self.writeLnFmt('#include "{0}"',
self.calcHeaderPath(fd, self.fileOptions, None))
for imp in self.getScopedOption(fd.getOptions().getImports(), ()):
self.writeLnFmt('#include "{0}"', imp)
self.writeLn('#include "coda/runtime/descriptors_static.h"')
self.writeLn()
def beginDecls(self, fd):
'''@type fd: coda.descriptors.FileDescriptor'''
for ns in self.fileOptions.package.split('::'):
self.writeLnFmt('namespace {0} {{', ns)
self.writeLn()
# Table of all options
if self.getUniqueOptions():
index = 0
for opts in self.getUniqueOptions():
self.writeLnFmt('coda::descriptors::{0} _options{1} = {2};',
opts.descriptor().getName(), index, self.formatOptions(opts))
index += 1
self.writeLn()
def beginStruct(self, fd, struct):
'''@type fd: coda.descriptors.FileDescriptor
@type struct: coda.descriptors.StructType'''
self.writeBanner('{0}', struct.getName())
structQualName = getQualName(struct)
# Table of field definitions
presentable = ()
if struct.getFields():
presentable = self.getPresentableFields(struct)
for field in struct.getFields():
self.writeLnFmt(
'coda::descriptors::FieldDescriptor {0}::Field_{1}(',
structQualName, field.getName())
self.indent(2)
self.writeLnFmt('"{0}", {1},', field.getName(), field.getId())
if field.getType().typeId() == types.TypeKind.ENUM:
self.writeLnFmt('{0}_DESCRIPTOR,', field.getType().getName())
else:
self.writeLnFmt('{0}::DESCRIPTOR,', self.describeType(field.getType()))
optionId = self.optionIdOf(field.getOptions())
if optionId >= 0:
self.writeLnFmt('_options{0},', optionId)
else:
self.writeLn('coda::descriptors::FieldOptions::DEFAULT_INSTANCE,')
self.writeLnFmt('CODA_OFFSET_OF({0}, _{1}),', structQualName, field.getName())
if self.isFieldPresentable(field):
self.writeLnFmt('{0}::HAS_{1});', structQualName, toUpperUnderscore(field.getName()))
else:
self.writeLnFmt('(size_t)-1);')
self.unindent(2)
self.writeLn()
if struct.getStructs():
structArray = '{0}_Structs'.format(struct.getName())
self.writeLnFmt(
'static coda::descriptors::StructDescriptor* {0}_Structs[] = {{',
struct.getName())
self.indent()
for st in struct.getStructs():
self.writeLnFmt('&{0}::DESCRIPTOR,', getQualName(st))
self.unindent()
self.writeLn('};')
self.writeLn()
else:
structArray = 'coda::descriptors::StaticArrayRef<coda::descriptors::StructDescriptor*>()'
if struct.getEnums():
enumArray = '{0}_Enums'.format(struct.getName())
self.writeLnFmt(
'static coda::descriptors::EnumDescriptor* {0}_Enums[] = {{',
struct.getName())
self.indent()
for en in struct.getStructs():
self.writeLnFmt('&{0}::DESCRIPTOR,', getQualName(en))
self.unindent()
self.writeLn('};')
self.writeLn()
else:
enumArray = 'coda::descriptors::StaticArrayRef<coda::descriptors::EnumDescriptor*>()'
if struct.getFields():
fieldArray = '{0}::Fields'.format(struct.getName())
self.writeLnFmt('coda::descriptors::FieldDescriptor* {0}::Fields[] = {{', structQualName)
self.indent()
for field in struct.getFields():
self.writeLnFmt('&{0}::Field_{1},', getQualName(struct), field.getName())
self.unindent()
self.writeLn('};')
self.writeLn()
else:
fieldArray = 'coda::descriptors::StaticArrayRef<coda::descriptors::FieldDescriptor*>()'
self.writeLnFmt('const uint32_t {0}::TYPE_ID = {1};', getQualName(struct), struct.getTypeId())
self.writeLn()
self.writeLnFmt('coda::descriptors::StructDescriptor {0}::DESCRIPTOR(', getQualName(struct))
self.indent()
self.writeLnFmt('"{0}",', structQualName)
self.writeLnFmt('{0},', struct.getTypeId())
self.writeLnFmt('&{0}::DEFAULT_INSTANCE,', getQualName(struct))
self.writeLn('FILE,')
if struct.getEnclosingType():
self.writeLnFmt('&{0}::DESCRIPTOR,', getQualName(struct.getEnclosingType()))
else:
self.writeLn('NULL,')
if struct.getBaseType():
self.writeLnFmt('&{0}::DESCRIPTOR,', getQualName(struct.getBaseType()))
else:
self.writeLn('NULL,')
optionId = self.optionIdOf(struct.getOptions())
if optionId >= 0:
self.writeLnFmt('_options{0},', optionId)
else:
self.writeLn('coda::descriptors::StructOptions::DEFAULT_INSTANCE,')
self.writeLn(structArray + ',')
self.writeLn(enumArray + ',')
self.writeLn(fieldArray + ',')
self.writeLnFmt('&coda::descriptors::StaticObjectBuilder<{0}>::create,', structQualName)
if len(presentable) > 0:
self.writeLnFmt('(coda::descriptors::PresenceGetter) &{0}::isFieldPresent,',
getQualName(struct))
self.writeLnFmt('(coda::descriptors::PresenceSetter) &{0}::setFieldPresent',
getQualName(struct))
else:
self.writeLn('NULL, NULL')
self.unindent()
self.writeLn(');')
self.writeLn()
self.unindent()
self.writeLnFmt('{0} {0}::DEFAULT_INSTANCE;', getQualName(struct))
self.writeLn()
def endStruct(self, fd, struct):
self.indent()
def genStructConstructor(self, fd, struct):
'''@type fd: coda.descriptors.FileDescriptor
@type struct: coda.descriptors.StructType'''
if self.requiresOutOfLineConstructor(struct):
self.writeLnFmt('{0}::{1}()', getQualName(struct), struct.getName())
self.indent()
delim = ':'
for field in struct.getFields():
value = self.defaultValueOf(field.getType(), field.getOptions())
if value:
self.writeLnFmt('{0} _{1}({2})', delim, field.getName(), value)
delim = ','
self.unindent()
self.writeLn('{')
self.indent()
self.unindent()
self.writeLn('}')
self.writeLn()
def genStructStdMethods(self, fd, struct):
'''@type fd: coda.descriptors.FileDescriptor
@type struct: coda.descriptors.StructType'''
structQualName = getQualName(struct)
# Compute the name of the nearest ancestor class that has fields.
base = struct.getBaseType()
while base and not base.getFields():
base = base.getBaseType()
if base:
baseName = self.formatTypeName(base)
else:
baseName = 'coda::runtime::Object'
# Equality comparison
if struct.getFields():
self.writeLnFmt('bool {0}::equals(const coda::runtime::Object* other) const {{',
structQualName)
self.indent()
self.writeIndent()
self.write('return ')
if struct.getBaseType():
self.writeFmt('{0}::equals(other)',
self.formatTypeName(struct.getBaseType()))
else:
self.write('other != NULL && descriptor() == other->descriptor()')
self.indent()
for field in struct.getFields():
self.write(' &&\n')
self.writeIndent()
self.writeIndent()
self.write('_{0} == (({1}*) other)->_{0}'.format(
field.getName(), struct.getName()))
self.write(';\n')
self.unindent()
self.unindent()
self.writeLn('}')
self.writeLn()
# Hashing
if struct.getFields():
self.writeLnFmt('size_t {0}::hashValue() const {{', structQualName)
self.indent()
self.writeLnFmt('size_t hash = {0}::hashValue();', baseName)
for field in struct.getFields():
if field.getType().typeId() == types.TypeKind.ENUM:
hashExpr = 'coda::runtime::EnumHash<{0}>()(_{1})'.format(
self.formatType(field.getType(), False, False),
field.getName())
else:
hashExpr = 'coda::runtime::hash(_{0})'.format(field.getName())
self.writeLnFmt('coda::runtime::hash_combine(hash, {0});', hashExpr)
self.writeLn('return hash;')
self.unindent()
self.writeLn('}')
self.writeLn()
# Freezing and clearing
if struct.getFields():
self.writeLnFmt('void {0}::freezeImpl() {{', structQualName)
self.indent()
if base:
self.writeLnFmt('{0}::freezeImpl();', baseName)
for field in struct.getFields():
fname = field.getName()
ftype = field.getType()
self.genValueFreeze('_' + fname, ftype, field.getOptions())
self.unindent()
self.writeLn('}')
self.writeLn()
self.writeLnFmt('void {0}::clear() {{', structQualName)
self.indent()
if base:
self.writeLnFmt('{0}::clear();', baseName)
for field in struct.getFields():
fname = field.getName()
ftype = field.getType()
self.genValueClear('_' + fname, field)
self.unindent()
self.writeLn('}')
self.writeLn()
self.writeLnFmt('void {0}::deleteRecursiveImpl(Object** queue) {{', structQualName)
self.indent()
if base:
self.writeLnFmt('{0}::deleteRecursiveImpl(queue);', baseName)
for field in struct.getFields():
fname = field.getName()
ftype = field.getType()
self.genValueDelete('_' + fname, ftype, field.getOptions())
self.unindent()
self.writeLn('}')
self.writeLn()
# Serialization
if struct.getBaseType():
self.writeLnFmt(
'void {0}::beginWrite(coda::io::Encoder* encoder) const {{', structQualName)
self.indent()
if base:
self.writeLnFmt('{0}::beginWrite(encoder);', baseName)
self.writeLnFmt('encoder->writeSubtypeHeader("{0}", {1});',
struct.getName(), self.formatTypeKind(struct.getTypeId()))
self.unindent()
self.writeLn('}')
self.writeLn()
writeableFields = self.getWriteableFields(struct)
if writeableFields or struct.getBaseType():
self.writeLnFmt(
'void {0}::endWrite(coda::io::Encoder* encoder) const {{', structQualName)
self.indent()
for field in writeableFields:
fty = field.getType()
if isinstance(fty, types.CollectionType):
self.writeLnFmt('if (!_{0}.empty()) {{', field.getName())
else:
self.writeLnFmt('if (has{0}()) {{', self.capitalize(field.getName()))
self.indent()
self.writeLnFmt(
'encoder->writeFieldHeader("{0}", {1});',
field.getName(), field.getId())
self.genValueWrite('_' + field.getName(), fty, field.getOptions())
self.unindent()
self.writeLn('}')
if struct.getBaseType():
self.writeLn('encoder->writeEndSubtype();')
if base:
self.writeLnFmt('{0}::endWrite(encoder);', baseName)
self.unindent()
self.writeLn('}')
self.writeLn()
def isFreezableType(self, ty):
if ty.typeId() == types.TypeKind.STRUCT:
return True
elif ty.typeId() == types.TypeKind.LIST or ty.typeId() == types.TypeKind.SET:
return self.isFreezableType(ty.getElementType())
elif ty.typeId() == types.TypeKind.MODIFIED:
return self.isFreezableType(ty.getElementType())
elif ty.typeId() == types.TypeKind.MAP:
return self.isFreezableType(ty.getKeyType()) or self.isFreezableType(ty.getValyeType())
else:
return False
def genValueFreeze(self, var, ftype, options, level=0):
fkind = ftype.typeId()
iteratorName = 'it' if level == 0 else 'i' + str(level)
if fkind == types.TypeKind.LIST:
if self.isFreezableType(ftype.getElementType()):
typeName = self.formatType(ftype, False, False)
self.beginForLoop(typeName, var, iteratorName)
self.genValueFreeze('(*' + iteratorName + ')', ftype.getElementType(), options, level + 1)
self.endForLoop()
elif fkind == types.TypeKind.SET:
if self.isFreezableType(ftype.getElementType()):
typeName = self.formatType(ftype, False, False)
self.beginForLoop(typeName, var, iteratorName)
self.genValueFreeze('(*' + iteratorName + ')', ftype.getElementType(), options, level + 1)
self.endForLoop()
elif fkind == types.TypeKind.MAP:
if self.isFreezableType(ftype.getKeyType()) or self.isFreezableType(ftype.getValueType()):
typeName = self.formatType(ftype, False, False)
self.beginForLoop(typeName, var, iteratorName)
if self.isFreezableType(ftype.getKeyType()):
self.genValueFreeze(iteratorName + '->first', ftype.getKeyType(), options, level + 1)
if self.isFreezableType(ftype.getValueType()):
self.genValueFreeze(iteratorName + '->second', ftype.getValueType(), options, level + 1)
self.endForLoop()
elif fkind == types.TypeKind.MODIFIED:
self.genValueFreeze(var, ftype.getElementType(), options, level)
elif fkind == types.TypeKind.STRUCT:
if options.isNullable():
self.writeLnFmt('if ({0} && {0}->isMutable()) {{', var)
else:
self.writeLnFmt('if ({0}->isMutable()) {{', var)
self.indent()
self.writeLnFmt('{0}->freeze();', var)
self.unindent()
self.writeLn("}")
def genValueClear(self, var, field):
fname = field.getName()
ftype = field.getType()
while ftype.typeId() == types.TypeKind.MODIFIED:
ftype = ftype.getElementType()
fkind = ftype.typeId()
if fkind == types.TypeKind.LIST or fkind == types.TypeKind.SET or fkind == types.TypeKind.MAP:
self.writeLnFmt('{0}.clear();', var)
else:
value = self.defaultValueOf(field.getType(), field.getOptions())
if value:
self.writeLnFmt('{0} = {1};', var, value)
def genValueDelete(self, var, ftype, options, level=0):
fkind = ftype.typeId()
iteratorName = 'it' if level == 0 else 'i' + str(level)
if fkind == types.TypeKind.LIST:
if self.isFreezableType(ftype.getElementType()):
typeName = self.formatType(ftype, False, False)
self.beginForLoop(typeName, var, iteratorName)
self.genValueDelete('(*' + iteratorName + ')', ftype.getElementType(), options, level + 1)
self.endForLoop()
elif fkind == types.TypeKind.SET:
if self.isFreezableType(ftype.getElementType()):
typeName = self.formatType(ftype, False, False)
self.beginForLoop(typeName, var, iteratorName)
self.genValueDelete('(*' + iteratorName + ')', ftype.getElementType(), options, level + 1)
self.endForLoop()
elif fkind == types.TypeKind.MAP:
if self.isFreezableType(ftype.getKeyType()) or self.isFreezableType(ftype.getValueType()):
typeName = self.formatType(ftype, False, False)
self.beginForLoop(typeName, var, iteratorName)
if self.isFreezableType(ftype.getKeyType()):
self.genValueDelete(iteratorName + '->first', ftype.getKeyType(), options, level + 1)
if self.isFreezableType(ftype.getValueType()):
self.genValueDelete(iteratorName + '->second', ftype.getValueType(), options, level + 1)
self.endForLoop()
elif fkind == types.TypeKind.MODIFIED:
self.genValueDelete(var, ftype.getElementType(), options, level)
elif fkind == types.TypeKind.STRUCT:
defaultVal = self.defaultValueOf(ftype, options)
self.writeLnFmt('if ({0} != {1}) {{', var, defaultVal)
self.indent()
self.writeLnFmt('{0}->queueForDelete(queue);', var)
self.unindent()
self.writeLn("}")
def genValueWrite(self, var, fty, options, level=0):
iteratorName = 'it' if level == 0 else 'i' + str(level)
fkind = fty.typeId()
fixedParam = ', true' if options.isFixed() else ''
if fkind == types.TypeKind.BOOL:
self.writeLnFmt("encoder->writeBoolean({0});", var)
elif fkind == types.TypeKind.INTEGER:
if options.isFixed():
self.writeLnFmt("encoder->writeFixed{1}({0});", var, fty.getBits())
else:
self.writeLnFmt("encoder->writeInteger({0});", var)
elif fkind == types.TypeKind.FLOAT:
self.writeLnFmt("encoder->writeFloat({0});", var)
elif fkind == types.TypeKind.DOUBLE:
self.writeLnFmt("encoder->writeDouble({0});", var)
elif fkind == types.TypeKind.STRING:
self.writeLnFmt("encoder->writeString({0});", var)
elif fkind == types.TypeKind.BYTES:
self.writeLnFmt("encoder->writeBytes({0});", var)
elif fkind == types.TypeKind.LIST:
self.writeLnFmt(
"encoder->writeBeginList({1}, {0}.size(){2});",
var, self.formatTypeKind(fty.getElementType().typeId()), fixedParam)
typeName = self.formatType(fty, False, False)
self.beginForLoop(typeName, var, iteratorName)
self.genValueWrite('*' + iteratorName, fty.getElementType(), options, level + 1)
self.endForLoop()
self.writeLn("encoder->writeEndList();")
elif fkind == types.TypeKind.SET:
self.writeLnFmt(
"encoder->writeBeginSet({1}, {0}.size(){2});",
var, self.formatTypeKind(fty.getElementType().typeId()), fixedParam)
typeName = self.formatType(fty, False, False)
self.beginForLoop(typeName, var, iteratorName)
self.genValueWrite('*' + iteratorName, fty.getElementType(), options, level + 1)
self.endForLoop()
self.writeLn("encoder->writeEndSet();")
elif fkind == types.TypeKind.MAP:
self.writeLnFmt("encoder->writeBeginMap({1}, {2}, {0}.size());", var,
self.formatTypeKind(fty.getKeyType().typeId()),
self.formatTypeKind(fty.getValueType().typeId()))
typeName = self.formatType(fty, False, False)
self.beginForLoop(typeName, var, iteratorName)
self.genValueWrite(iteratorName + '->first', fty.getKeyType(), options, level + 1)
self.genValueWrite(iteratorName + '->second', fty.getValueType(), options, level + 1)
self.endForLoop()
self.writeLn("encoder->writeEndMap();")
elif fkind == types.TypeKind.STRUCT:
self.writeLnFmt("encoder->writeStruct({0}, true);", var)
elif fkind == types.TypeKind.ENUM:
self.writeLnFmt("encoder->writeInteger((int32_t) {0});", var)
elif fkind == types.TypeKind.MODIFIED:
if fty.isShared():
assert isinstance(fty.getElementType(), types.StructType)
self.writeLnFmt("encoder->writeStruct({0}, true);", var)
else:
self.genValueWrite(var, fty.getElementType(), options)
else:
assert False, 'Illegal type kind: ' + str(fkind)
def formatOptions(self, opts):
def setOptionValues(opts, struct):
if struct.getBaseType():
setOptionValues(opts, struct.getBaseType())
for field in struct.getFields():
if field.isPresent(opts):
value = field.getValue(opts)
ftype = field.getType()
if ftype.typeId() == types.TypeKind.LIST or ftype.typeId() == types.TypeKind.SET:
for v in value:
result.append('add{0}({1})'.format(
self.capitalize(field.getName()),
self.formatValue(ftype.getElementType(), v)))
if ftype.typeId() == types.TypeKind.MAP:
for k, v in value.items():
result.append('put{0}({1}, {2})'.format(
self.capitalize(field.getName()),
self.formatValue(ftype.getKeyType(), k),
self.formatValue(ftype.getValueType(), v)))
else:
result.append('set{0}({1})'.format(
self.capitalize(field.getName()),
self.formatValue(ftype, value)))
result = []
result.append('coda::descriptors::{0}()'.format(opts.descriptor().getName()))
setOptionValues(opts, opts.descriptor())
return 'coda::descriptors::freeze(' + '.'.join(result) + ')'
def beginEnum(self, fd, enum):
self.writeBanner('{0}', enum.getName())
if enum.getValues():
enumPrefix = getQualName(enum).replace('::', '_')
enumScope = getEnclosingQualName(enum)
valueArray = '{0}_Values'.format(getQualName(enum))
for val in enum.getValues():
self.writeLnFmt(
'static coda::descriptors::EnumDescriptor::Value {0}_Value_{1}("{1}", {2}{3}_{4});',
enumPrefix, val.getName(), enumScope, toUpperUnderscore(enum.getName()), val.getName())
self.writeLn()
self.writeLnFmt('static coda::descriptors::EnumDescriptor::Value* {0}[] = {{', valueArray)
self.indent()
for val in enum.getValues():
self.writeLnFmt('&{0}_Value_{1},', enumPrefix, val.getName())
self.unindent()
self.writeLn('};')
self.writeLn()
else:
valueArray = 'coda::descriptors::StaticArrayRef<coda::descriptors::EnumDescriptor::Value*>()'
self.writeLnFmt('coda::descriptors::EnumDescriptor {0}_DESCRIPTOR(', enum.getName())
self.indent(2)
self.writeLnFmt('"{0}",', enum.getName())
optionId = self.optionIdOf(enum.getOptions())
if optionId >= 0:
self.writeLnFmt('_options{0},', optionId)
else:
self.writeLn('coda::descriptors::EnumOptions::DEFAULT_INSTANCE,')
self.writeLnFmt('{0}', valueArray)
self.unindent(2)
self.writeLn(');')
def endEnum(self, fd, enum):
'''@type fd: coda.descriptors.FileDescriptor'''
self.writeLn()
def endFile(self, fd):
'@type fd: coda.descriptors.FileDescriptor'
self.writeBanner('FILE')
if fd.getStructs():
structArray = 'FILE_Structs'
self.writeLn('static coda::descriptors::StructDescriptor* FILE_Structs[] = {')
self.indent()
for st in fd.getStructs():
self.writeLnFmt('&{0}::DESCRIPTOR,', st.getName())
self.unindent()
self.writeLn('};')
self.writeLn()
else:
structArray = 'coda::descriptors::StaticArrayRef<coda::descriptors::StructDescriptor*>()'
if fd.getEnums():
enumArray = 'FILE_Enums'
self.writeLn('static coda::descriptors::EnumDescriptor* FILE_Enums[] = {')
self.indent()
for en in fd.getEnums():
self.writeLnFmt('&{0}_DESCRIPTOR,', en.getName())
self.unindent()
self.writeLn('};')
self.writeLn()
else:
enumArray = 'coda::descriptors::StaticArrayRef<coda::descriptors::EnumDescriptor*>()'
self.writeLn('coda::descriptors::StaticFileDescriptor FILE(')
self.indent(2)
self.writeLnFmt('"{0}",', fd.getName())
self.writeLnFmt('"{0}",', fd.getPackage())
optionId = self.optionIdOf(fd.getOptions())
if optionId >= 0:
self.writeLnFmt('_options{0},', optionId)
else:
self.writeLn('coda::descriptors::FileOptions::DEFAULT_INSTANCE,')
self.writeLnFmt('{0},', structArray)
self.writeLnFmt('{0}', enumArray)
self.unindent(2)
self.writeLn(');')
self.writeLn()
for ns in self.fileOptions.package.split('::'):
self.writeLnFmt('}} // namespace {0}', ns)
def beginForLoop(self, typeName, varName, iteratorName):
self.writeLnFmt(
"for ({0}::const_iterator {2} = {1}.begin(), {2}End = {1}.end(); {2} != {2}End; ++{2}) {{",
typeName, varName, iteratorName)
self.indent()
def endForLoop(self):
self.unindent()
self.writeLn("}")
def createGenerators(options):
return (CppHeaderGenerator('cpp', options), CppGenerator('cpp', options))
|
|
# -*- coding: utf-8 -*-
# Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import threading
import time
from oslo_utils import fixture
from oslo_utils import timeutils
from taskflow.engines.worker_based import executor
from taskflow.engines.worker_based import protocol as pr
from taskflow import task as task_atom
from taskflow import test
from taskflow.test import mock
from taskflow.tests import utils as test_utils
from taskflow.types import failure
class TestWorkerTaskExecutor(test.MockTestCase):
def setUp(self):
super(TestWorkerTaskExecutor, self).setUp()
self.task = test_utils.DummyTask()
self.task_uuid = 'task-uuid'
self.task_args = {'a': 'a'}
self.task_result = 'task-result'
self.task_failures = {}
self.timeout = 60
self.broker_url = 'broker-url'
self.executor_uuid = 'executor-uuid'
self.executor_exchange = 'executor-exchange'
self.executor_topic = 'test-topic1'
self.proxy_started_event = threading.Event()
# patch classes
self.proxy_mock, self.proxy_inst_mock = self.patchClass(
executor.proxy, 'Proxy')
self.request_mock, self.request_inst_mock = self.patchClass(
executor.pr, 'Request', autospec=False)
# other mocking
self.proxy_inst_mock.start.side_effect = self._fake_proxy_start
self.proxy_inst_mock.stop.side_effect = self._fake_proxy_stop
self.request_inst_mock.uuid = self.task_uuid
self.request_inst_mock.expired = False
self.request_inst_mock.task_cls = self.task.name
self.message_mock = mock.MagicMock(name='message')
self.message_mock.properties = {'correlation_id': self.task_uuid,
'type': pr.RESPONSE}
def _fake_proxy_start(self):
self.proxy_started_event.set()
while self.proxy_started_event.is_set():
time.sleep(0.01)
def _fake_proxy_stop(self):
self.proxy_started_event.clear()
def executor(self, reset_master_mock=True, **kwargs):
executor_kwargs = dict(uuid=self.executor_uuid,
exchange=self.executor_exchange,
topics=[self.executor_topic],
url=self.broker_url)
executor_kwargs.update(kwargs)
ex = executor.WorkerTaskExecutor(**executor_kwargs)
if reset_master_mock:
self.resetMasterMock()
return ex
def test_creation(self):
ex = self.executor(reset_master_mock=False)
master_mock_calls = [
mock.call.Proxy(self.executor_uuid, self.executor_exchange,
on_wait=ex._on_wait,
url=self.broker_url, transport=mock.ANY,
transport_options=mock.ANY,
retry_options=mock.ANY,
type_handlers=mock.ANY),
mock.call.proxy.dispatcher.type_handlers.update(mock.ANY),
]
self.assertEqual(self.master_mock.mock_calls, master_mock_calls)
def test_on_message_response_state_running(self):
response = pr.Response(pr.RUNNING)
ex = self.executor()
ex._requests_cache[self.task_uuid] = self.request_inst_mock
ex._process_response(response.to_dict(), self.message_mock)
expected_calls = [
mock.call.transition_and_log_error(pr.RUNNING, logger=mock.ANY),
]
self.assertEqual(expected_calls, self.request_inst_mock.mock_calls)
def test_on_message_response_state_progress(self):
response = pr.Response(pr.EVENT,
event_type=task_atom.EVENT_UPDATE_PROGRESS,
details={'progress': 1.0})
ex = self.executor()
ex._requests_cache[self.task_uuid] = self.request_inst_mock
ex._process_response(response.to_dict(), self.message_mock)
expected_calls = [
mock.call.notifier.notify(task_atom.EVENT_UPDATE_PROGRESS,
{'progress': 1.0}),
]
self.assertEqual(expected_calls, self.request_inst_mock.mock_calls)
def test_on_message_response_state_failure(self):
a_failure = failure.Failure.from_exception(Exception('test'))
failure_dict = a_failure.to_dict()
response = pr.Response(pr.FAILURE, result=failure_dict)
ex = self.executor()
ex._requests_cache[self.task_uuid] = self.request_inst_mock
ex._process_response(response.to_dict(), self.message_mock)
self.assertEqual(len(ex._requests_cache), 0)
expected_calls = [
mock.call.transition_and_log_error(pr.FAILURE, logger=mock.ANY),
mock.call.set_result(result=test_utils.FailureMatcher(a_failure))
]
self.assertEqual(expected_calls, self.request_inst_mock.mock_calls)
def test_on_message_response_state_success(self):
response = pr.Response(pr.SUCCESS, result=self.task_result,
event='executed')
ex = self.executor()
ex._requests_cache[self.task_uuid] = self.request_inst_mock
ex._process_response(response.to_dict(), self.message_mock)
expected_calls = [
mock.call.transition_and_log_error(pr.SUCCESS, logger=mock.ANY),
mock.call.set_result(result=self.task_result, event='executed')
]
self.assertEqual(expected_calls, self.request_inst_mock.mock_calls)
def test_on_message_response_unknown_state(self):
response = pr.Response(state='<unknown>')
ex = self.executor()
ex._requests_cache[self.task_uuid] = self.request_inst_mock
ex._process_response(response.to_dict(), self.message_mock)
self.assertEqual(self.request_inst_mock.mock_calls, [])
def test_on_message_response_unknown_task(self):
self.message_mock.properties['correlation_id'] = '<unknown>'
response = pr.Response(pr.RUNNING)
ex = self.executor()
ex._requests_cache[self.task_uuid] = self.request_inst_mock
ex._process_response(response.to_dict(), self.message_mock)
self.assertEqual(self.request_inst_mock.mock_calls, [])
def test_on_message_response_no_correlation_id(self):
self.message_mock.properties = {'type': pr.RESPONSE}
response = pr.Response(pr.RUNNING)
ex = self.executor()
ex._requests_cache[self.task_uuid] = self.request_inst_mock
ex._process_response(response.to_dict(), self.message_mock)
self.assertEqual(self.request_inst_mock.mock_calls, [])
def test_on_wait_task_not_expired(self):
ex = self.executor()
ex._requests_cache[self.task_uuid] = self.request_inst_mock
self.assertEqual(len(ex._requests_cache), 1)
ex._on_wait()
self.assertEqual(len(ex._requests_cache), 1)
def test_on_wait_task_expired(self):
now = timeutils.utcnow()
f = self.useFixture(fixture.TimeFixture(override_time=now))
self.request_inst_mock.expired = True
self.request_inst_mock.created_on = now
f.advance_time_seconds(120)
ex = self.executor()
ex._requests_cache[self.task_uuid] = self.request_inst_mock
self.assertEqual(len(ex._requests_cache), 1)
ex._on_wait()
self.assertEqual(len(ex._requests_cache), 0)
def test_remove_task_non_existent(self):
ex = self.executor()
ex._requests_cache[self.task_uuid] = self.request_inst_mock
self.assertEqual(len(ex._requests_cache), 1)
del ex._requests_cache[self.task_uuid]
self.assertEqual(len(ex._requests_cache), 0)
# delete non-existent
try:
del ex._requests_cache[self.task_uuid]
except KeyError:
pass
self.assertEqual(len(ex._requests_cache), 0)
def test_execute_task(self):
ex = self.executor()
ex._finder._add(self.executor_topic, [self.task.name])
ex.execute_task(self.task, self.task_uuid, self.task_args)
expected_calls = [
mock.call.Request(self.task, self.task_uuid, 'execute',
self.task_args, self.timeout),
mock.call.request.transition_and_log_error(pr.PENDING,
logger=mock.ANY),
mock.call.proxy.publish(self.request_inst_mock,
self.executor_topic,
reply_to=self.executor_uuid,
correlation_id=self.task_uuid)
]
self.assertEqual(expected_calls, self.master_mock.mock_calls)
def test_revert_task(self):
ex = self.executor()
ex._finder._add(self.executor_topic, [self.task.name])
ex.revert_task(self.task, self.task_uuid, self.task_args,
self.task_result, self.task_failures)
expected_calls = [
mock.call.Request(self.task, self.task_uuid, 'revert',
self.task_args, self.timeout,
failures=self.task_failures,
result=self.task_result),
mock.call.request.transition_and_log_error(pr.PENDING,
logger=mock.ANY),
mock.call.proxy.publish(self.request_inst_mock,
self.executor_topic,
reply_to=self.executor_uuid,
correlation_id=self.task_uuid)
]
self.assertEqual(expected_calls, self.master_mock.mock_calls)
def test_execute_task_topic_not_found(self):
ex = self.executor()
ex.execute_task(self.task, self.task_uuid, self.task_args)
expected_calls = [
mock.call.Request(self.task, self.task_uuid, 'execute',
self.task_args, self.timeout),
]
self.assertEqual(self.master_mock.mock_calls, expected_calls)
def test_execute_task_publish_error(self):
self.proxy_inst_mock.publish.side_effect = Exception('Woot!')
ex = self.executor()
ex._finder._add(self.executor_topic, [self.task.name])
ex.execute_task(self.task, self.task_uuid, self.task_args)
expected_calls = [
mock.call.Request(self.task, self.task_uuid, 'execute',
self.task_args, self.timeout),
mock.call.request.transition_and_log_error(pr.PENDING,
logger=mock.ANY),
mock.call.proxy.publish(self.request_inst_mock,
self.executor_topic,
reply_to=self.executor_uuid,
correlation_id=self.task_uuid),
mock.call.request.transition_and_log_error(pr.FAILURE,
logger=mock.ANY),
mock.call.request.set_result(mock.ANY)
]
self.assertEqual(expected_calls, self.master_mock.mock_calls)
def test_start_stop(self):
ex = self.executor()
ex.start()
# make sure proxy thread started
self.assertTrue(self.proxy_started_event.wait(test_utils.WAIT_TIMEOUT))
# stop executor
ex.stop()
self.master_mock.assert_has_calls([
mock.call.proxy.start(),
mock.call.proxy.wait(),
mock.call.proxy.stop()
], any_order=True)
def test_start_already_running(self):
ex = self.executor()
ex.start()
# make sure proxy thread started
self.assertTrue(self.proxy_started_event.wait(test_utils.WAIT_TIMEOUT))
# start executor again
ex.start()
# stop executor
ex.stop()
self.master_mock.assert_has_calls([
mock.call.proxy.start(),
mock.call.proxy.wait(),
mock.call.proxy.stop()
], any_order=True)
def test_stop_not_running(self):
self.executor().stop()
self.assertEqual(self.master_mock.mock_calls, [])
def test_stop_not_alive(self):
self.proxy_inst_mock.start.side_effect = None
# start executor
ex = self.executor()
ex.start()
# stop executor
ex.stop()
# since proxy thread is already done - stop is not called
self.master_mock.assert_has_calls([
mock.call.proxy.start(),
mock.call.proxy.wait()
], any_order=True)
def test_restart(self):
ex = self.executor()
ex.start()
# make sure thread started
self.assertTrue(self.proxy_started_event.wait(test_utils.WAIT_TIMEOUT))
# restart executor
ex.stop()
ex.start()
# make sure thread started
self.assertTrue(self.proxy_started_event.wait(test_utils.WAIT_TIMEOUT))
# stop executor
ex.stop()
self.master_mock.assert_has_calls([
mock.call.proxy.start(),
mock.call.proxy.wait(),
mock.call.proxy.stop(),
mock.call.proxy.start(),
mock.call.proxy.wait(),
mock.call.proxy.stop()
], any_order=True)
|
|
# -*- coding: utf-8 -*-
""" Sahana Eden Document Library
@copyright: 2011-2016 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ("S3DocumentLibrary",
"S3DocSitRepModel",
"S3CKEditorModel",
"doc_image_represent",
"doc_document_list_layout",
)
import os
from gluon import *
from gluon.storage import Storage
from ..s3 import *
# =============================================================================
class S3DocumentLibrary(S3Model):
names = ("doc_entity",
"doc_document",
"doc_document_id",
"doc_image",
)
def model(self):
T = current.T
db = current.db
s3 = current.response.s3
person_comment = self.pr_person_comment
person_id = self.pr_person_id
location_id = self.gis_location_id
organisation_id = self.org_organisation_id
messages = current.messages
NONE = messages["NONE"]
UNKNOWN_OPT = messages.UNKNOWN_OPT
# Shortcuts
configure = self.configure
crud_strings = s3.crud_strings
define_table = self.define_table
folder = current.request.folder
super_link = self.super_link
# ---------------------------------------------------------------------
# Document-referencing entities
#
entity_types = Storage(asset_asset=T("Asset"),
cap_resource=T("CAP Resource"),
cms_post=T("Post"),
cr_shelter=T("Shelter"),
deploy_mission=T("Mission"),
doc_sitrep=T("Situation Report"),
event_incident=T("Incident"),
event_incident_report=T("Incident Report"),
hms_hospital=T("Hospital"),
hrm_human_resource=T("Human Resource"),
inv_adj=T("Stock Adjustment"),
inv_warehouse=T("Warehouse"),
# @ToDo: Deprecate
irs_ireport=T("Incident Report"),
pr_group=T("Team"),
project_project=T("Project"),
project_activity=T("Project Activity"),
project_framework=T("Project Framework"),
project_task=T("Task"),
org_office=T("Office"),
org_facility=T("Facility"),
org_group=T("Organization Group"),
req_req=T("Request"),
# @ToDo: Deprecate
stats_people=T("People"),
vulnerability_document=T("Vulnerability Document"),
vulnerability_risk=T("Risk"),
vulnerability_evac_route=T("Evacuation Route"),
)
tablename = "doc_entity"
self.super_entity(tablename, "doc_id", entity_types)
# Components
doc_id = "doc_id"
self.add_components(tablename,
doc_document = doc_id,
doc_image = doc_id,
)
# ---------------------------------------------------------------------
# Documents
#
tablename = "doc_document"
define_table(tablename,
# Instance
self.stats_source_superlink,
# Component not instance
super_link(doc_id, "doc_entity"),
# @ToDo: Remove since Site Instances are doc entities?
super_link("site_id", "org_site"),
Field("file", "upload",
autodelete = True,
length = current.MAX_FILENAME_LENGTH,
represent = self.doc_file_represent,
# upload folder needs to be visible to the download() function as well as the upload
uploadfolder = os.path.join(folder,
"uploads"),
),
Field("mime_type",
readable = False,
writable = False,
),
Field("name", length=128,
# Allow Name to be added onvalidation
requires = IS_EMPTY_OR(IS_LENGTH(128)),
label = T("Name")
),
Field("url",
label = T("URL"),
represent = lambda url: \
url and A(url, _href=url) or NONE,
requires = IS_EMPTY_OR(IS_URL()),
),
Field("has_been_indexed", "boolean",
default = False,
readable = False,
writable = False,
),
person_id(
# Enable when-required
label = T("Author"),
readable = False,
writable = False,
comment = person_comment(T("Author"),
T("The Author of this Document (optional)"))
),
organisation_id(# Enable when-required
readable = False,
writable = False,
),
s3_date(label = T("Date Published"),
),
# @ToDo: Move location to link table
location_id(# Enable when-required
readable = False,
writable = False,
),
s3_comments(),
Field("checksum",
readable = False,
writable = False,
),
*s3_meta_fields())
# CRUD Strings
crud_strings[tablename] = Storage(
label_create = T("Add Reference Document"),
title_display = T("Document Details"),
title_list = T("Documents"),
title_update = T("Edit Document"),
label_list_button = T("List Documents"),
label_delete_button = T("Delete Document"),
msg_record_created = T("Document added"),
msg_record_modified = T("Document updated"),
msg_record_deleted = T("Document deleted"),
msg_list_empty = T("No Documents found")
)
# Search Method
# Resource Configuration
if current.deployment_settings.get_base_solr_url():
onaccept = self.document_onaccept
ondelete = self.document_ondelete
else:
onaccept = None
ondelete = None
configure(tablename,
context = {"organisation": "organisation_id",
"person": "person_id",
"site": "site_id",
},
deduplicate = self.document_duplicate,
list_layout = doc_document_list_layout,
onaccept = onaccept,
ondelete = ondelete,
onvalidation = self.document_onvalidation,
super_entity = "stats_source",
)
# Reusable field
represent = doc_DocumentRepresent(lookup = tablename,
fields = ("name", "file", "url"),
labels = "%(name)s",
show_link = True)
document_id = S3ReusableField("document_id", "reference %s" % tablename,
label = T("Document"),
ondelete = "CASCADE",
represent = represent,
requires = IS_ONE_OF(db,
"doc_document.id",
represent),
)
# ---------------------------------------------------------------------
# Images
#
# @ToDo: Field to determine which is the default image to use for
# e.g. a Map popup (like the profile picture)
# readable/writable=False except in the cases where-needed
#
doc_image_type_opts = {1: T("Photograph"),
2: T("Map"),
3: T("Document Scan"),
99: T("other")
}
tablename = "doc_image"
define_table(tablename,
# Component not instance
super_link(doc_id, "doc_entity"),
super_link("pe_id", "pr_pentity"), # @ToDo: Remove & make Persons doc entities instead?
super_link("site_id", "org_site"), # @ToDo: Remove since Site Instances are doc entities?
Field("file", "upload",
autodelete = True,
length = current.MAX_FILENAME_LENGTH,
represent = doc_image_represent,
requires = IS_EMPTY_OR(
IS_IMAGE(extensions=(s3.IMAGE_EXTENSIONS)),
# Distingish from prepop
null = "",
),
# upload folder needs to be visible to the download() function as well as the upload
uploadfolder = os.path.join(folder,
"uploads",
"images"),
widget = S3ImageCropWidget((600, 600)),
),
Field("mime_type",
readable = False,
writable = False,
),
Field("name", length=128,
label = T("Name"),
# Allow Name to be added onvalidation
requires = IS_EMPTY_OR(IS_LENGTH(128)),
),
Field("url",
label = T("URL"),
requires = IS_EMPTY_OR(IS_URL()),
),
Field("type", "integer",
default = 1,
label = T("Image Type"),
represent = lambda opt: \
doc_image_type_opts.get(opt, UNKNOWN_OPT),
requires = IS_IN_SET(doc_image_type_opts,
zero=None),
),
person_id(label = T("Author"),
),
organisation_id(),
s3_date(label = T("Date Taken"),
),
# @ToDo: Move location to link table
location_id(),
s3_comments(),
Field("checksum",
readable = False,
writable = False,
),
*s3_meta_fields())
# CRUD Strings
crud_strings[tablename] = Storage(
label_create = T("Add Photo"),
title_display = T("Photo Details"),
title_list = T("Photos"),
title_update = T("Edit Photo"),
label_list_button = T("List Photos"),
label_delete_button = T("Delete Photo"),
msg_record_created = T("Photo added"),
msg_record_modified = T("Photo updated"),
msg_record_deleted = T("Photo deleted"),
msg_list_empty = T("No Photos found"))
# Resource Configuration
configure(tablename,
deduplicate = self.document_duplicate,
onvalidation = lambda form: \
self.document_onvalidation(form, document=False)
)
# ---------------------------------------------------------------------
# Pass model-global names to response.s3
#
return dict(doc_document_id = document_id,
)
# -------------------------------------------------------------------------
def defaults(self):
""" Safe defaults if the module is disabled """
document_id = S3ReusableField("document_id", "integer",
readable=False, writable=False)
return dict(doc_document_id = document_id,
)
# -------------------------------------------------------------------------
@staticmethod
def doc_file_represent(file):
""" File representation """
if file:
try:
# Read the filename from the file
filename = current.db.doc_document.file.retrieve(file)[0]
except IOError:
return current.T("File not found")
else:
return A(filename,
_href=URL(c="default", f="download", args=[file]))
else:
return current.messages["NONE"]
# -------------------------------------------------------------------------
@staticmethod
def document_duplicate(item):
""" Import item de-duplication """
data = item.data
query = None
file = data.get("file")
if file:
table = item.table
query = (table.file == file)
else:
url = data.get("url")
if url:
table = item.table
query = (table.url == url)
if query:
duplicate = current.db(query).select(table.id,
limitby=(0, 1)).first()
if duplicate:
item.id = duplicate.id
item.method = item.METHOD.UPDATE
# -------------------------------------------------------------------------
@staticmethod
def document_onvalidation(form, document=True):
""" Form validation for both, documents and images """
form_vars = form.vars
doc = form_vars.file
if doc is None:
# If this is a prepop, then file not in form
# Interactive forms with empty doc has this as "" not None
return
if not document:
encoded_file = form_vars.get("imagecrop-data", None)
if encoded_file:
# S3ImageCropWidget
import base64
import uuid
metadata, encoded_file = encoded_file.split(",")
filename, datatype, enctype = metadata.split(";")
f = Storage()
f.filename = uuid.uuid4().hex + filename
import cStringIO
f.file = cStringIO.StringIO(base64.decodestring(encoded_file))
doc = form_vars.file = f
if not form_vars.name:
form_vars.name = filename
if not hasattr(doc, "file") and not doc and not form_vars.url:
if document:
msg = current.T("Either file upload or document URL required.")
else:
msg = current.T("Either file upload or image URL required.")
form.errors.file = msg
form.errors.url = msg
if hasattr(doc, "file"):
name = form_vars.name
if not name:
# Use the filename
form_vars.name = doc.filename
else:
id = current.request.post_vars.id
if id:
if document:
tablename = "doc_document"
else:
tablename = "doc_image"
db = current.db
table = db[tablename]
record = db(table.id == id).select(table.file,
limitby=(0, 1)).first()
if record:
name = form_vars.name
if not name:
# Use the filename
form_vars.name = table.file.retrieve(record.file)[0]
# Do a checksum on the file to see if it's a duplicate
#import cgi
#if isinstance(doc, cgi.FieldStorage) and doc.filename:
# f = doc.file
# form_vars.checksum = doc_checksum(f.read())
# f.seek(0)
# if not form_vars.name:
# form_vars.name = doc.filename
#if form_vars.checksum is not None:
# # Duplicate allowed if original version is deleted
# query = ((table.checksum == form_vars.checksum) & \
# (table.deleted == False))
# result = db(query).select(table.name,
# limitby=(0, 1)).first()
# if result:
# doc_name = result.name
# form.errors["file"] = "%s %s" % \
# (T("This file already exists on the server as"), doc_name)
# -------------------------------------------------------------------------
@staticmethod
def document_onaccept(form):
"""
Build a full-text index
"""
form_vars = form.vars
doc = form_vars.file
table = current.db.doc_document
document = json.dumps(dict(filename=doc,
name=table.file.retrieve(doc)[0],
id=form_vars.id,
))
current.s3task.async("document_create_index",
args = [document])
# -------------------------------------------------------------------------
@staticmethod
def document_ondelete(row):
"""
Remove the full-text index
"""
db = current.db
table = db.doc_document
record = db(table.id == row.id).select(table.file,
limitby=(0, 1)).first()
document = json.dumps(dict(filename=record.file,
id=row.id,
))
current.s3task.async("document_delete_index",
args = [document])
# =============================================================================
def doc_image_represent(filename):
"""
Represent an image as a clickable thumbnail
@param filename: name of the image file
"""
if not filename:
return current.messages["NONE"]
return DIV(A(IMG(_src=URL(c="default", f="download",
args=filename),
_height=40),
_class="zoom",
_href=URL(c="default", f="download",
args=filename)))
# @todo: implement/activate the JavaScript for this:
#import uuid
#anchor = "zoom-media-image-%s" % uuid.uuid4()
#return DIV(A(IMG(_src=URL(c="default", f="download",
#args=filename),
#_height=40),
#_class="zoom",
#_href="#%s" % anchor),
#DIV(IMG(_src=URL(c="default", f="download",
#args=filename),
#_width=600),
#_id="%s" % anchor,
#_class="hide"))
# =============================================================================
def doc_checksum(docstr):
""" Calculate a checksum for a file """
import hashlib
converted = hashlib.sha1(docstr).hexdigest()
return converted
# =============================================================================
def doc_document_list_layout(list_id, item_id, resource, rfields, record):
"""
Default dataList item renderer for Documents, e.g. on the HRM Profile
@param list_id: the HTML ID of the list
@param item_id: the HTML ID of the item
@param resource: the S3Resource to render
@param rfields: the S3ResourceFields to render
@param record: the record as dict
"""
record_id = record["doc_document.id"]
item_class = "thumbnail"
raw = record._row
title = record["doc_document.name"]
file = raw["doc_document.file"] or ""
url = raw["doc_document.url"] or ""
date = record["doc_document.date"]
comments = raw["doc_document.comments"] or ""
if file:
try:
doc_name = current.s3db.doc_document.file.retrieve(file)[0]
except (IOError, TypeError):
doc_name = current.messages["NONE"]
doc_url = URL(c="default", f="download",
args=[file])
body = P(ICON("attachment"),
" ",
SPAN(A(doc_name,
_href=doc_url,
)
),
" ",
_class="card_1_line",
)
elif url:
body = P(ICON("link"),
" ",
SPAN(A(url,
_href=url,
)),
" ",
_class="card_1_line",
)
else:
# Shouldn't happen!
body = P(_class="card_1_line")
# Edit Bar
permit = current.auth.s3_has_permission
table = current.s3db.doc_document
if permit("update", table, record_id=record_id):
edit_btn = A(ICON("edit"),
_href=URL(c="doc", f="document",
args=[record_id, "update.popup"],
vars={"refresh": list_id,
"record": record_id}),
_class="s3_modal",
_title=current.T("Edit Document"),
)
else:
edit_btn = ""
if permit("delete", table, record_id=record_id):
delete_btn = A(ICON("delete"),
_class="dl-item-delete",
)
else:
delete_btn = ""
edit_bar = DIV(edit_btn,
delete_btn,
_class="edit-bar fright",
)
# Render the item
item = DIV(DIV(ICON("icon"),
SPAN(" %s" % title,
_class="card-title"),
edit_bar,
_class="card-header",
),
DIV(DIV(DIV(body,
P(SPAN(comments),
" ",
_class="card_manylines",
),
_class="media",
),
_class="media-body",
),
_class="media",
),
_class=item_class,
_id=item_id,
)
return item
# =============================================================================
class doc_DocumentRepresent(S3Represent):
""" Representation of Documents """
def link(self, k, v, row=None):
"""
Represent a (key, value) as hypertext link.
@param k: the key (doc_document.id)
@param v: the representation of the key
@param row: the row with this key
"""
if row:
try:
filename = row["doc_document.file"]
url = row["doc_document.url"]
except AttributeError:
return v
else:
if filename:
url = URL(c="default", f="download", args=filename)
return A(v, _href=url)
elif url:
return A(v, _href=url)
return v
# =============================================================================
class S3DocSitRepModel(S3Model):
"""
Situation Reports
"""
names = ("doc_sitrep",
"doc_sitrep_id",
)
def model(self):
T = current.T
# ---------------------------------------------------------------------
# Situation Reports
# - can be aggregated by OU
#
tablename = "doc_sitrep"
self.define_table(tablename,
self.super_link("doc_id", "doc_entity"),
Field("name", length=128,
label = T("Name"),
),
Field("description", "text",
label = T("Description"),
represent = lambda body: XML(body),
widget = s3_richtext_widget,
),
self.org_organisation_id(),
self.gis_location_id(
widget = S3LocationSelector(show_map = False),
),
s3_date(default = "now",
),
s3_comments(),
*s3_meta_fields())
# CRUD strings
current.response.s3.crud_strings[tablename] = Storage(
label_create = T("Add Situation Report"),
title_display = T("Situation Report Details"),
title_list = T("Situation Reports"),
title_update = T("Edit Situation Report"),
title_upload = T("Import Situation Reports"),
label_list_button = T("List Situation Reports"),
label_delete_button = T("Delete Situation Report"),
msg_record_created = T("Situation Report added"),
msg_record_modified = T("Situation Report updated"),
msg_record_deleted = T("Situation Report deleted"),
msg_list_empty = T("No Situation Reports currently registered"))
crud_form = S3SQLCustomForm("name",
"description",
"organisation_id",
"location_id",
"date",
S3SQLInlineComponent(
"document",
name = "document",
label = T("Attachments"),
fields = [("", "file")],
),
"comments",
)
if current.deployment_settings.get_org_branches():
org_filter = S3HierarchyFilter("organisation_id",
leafonly = False,
)
else:
org_filter = S3OptionsFilter("organisation_id",
#filter = True,
#header = "",
)
filter_widgets = [org_filter,
S3LocationFilter(),
S3DateFilter("date"),
]
self.configure(tablename,
crud_form = crud_form,
filter_widgets = filter_widgets,
list_fields = ["date",
"event_sitrep.incident_id",
"location_id$L1",
"location_id$L2",
"location_id$L3",
"organisation_id",
"name",
(T("Attachments"), "document.file"),
"comments",
],
super_entity = "doc_entity",
)
# Components
self.add_components(tablename,
event_sitrep = {"name": "event_sitrep",
"joinby": "sitrep_id",
},
event_incident = {"link": "event_sitrep",
"joinby": "sitrep_id",
"key": "incident_id",
"actuate": "hide",
"multiple": "False",
#"autocomplete": "name",
"autodelete": False,
},
)
represent = S3Represent(lookup=tablename)
sitrep_id = S3ReusableField("sitrep_id", "reference %s" % tablename,
label = T("Situation Report"),
ondelete = "RESTRICT",
represent = represent,
requires = IS_EMPTY_OR(
IS_ONE_OF(db, "doc_sitrep.id",
represent,
orderby="doc_sitrep.name",
sort=True)),
sortby = "name",
)
# ---------------------------------------------------------------------
# Pass names back to global scope (s3.*)
#
return dict(doc_sitrep_id = sitrep_id,
)
# -------------------------------------------------------------------------
@staticmethod
def defaults():
"""
Return safe defaults in case the model has been deactivated.
"""
dummy = S3ReusableField("dummy_id", "integer",
readable = False,
writable = False)
return dict(doc_sitrep_id = lambda **attr: dummy("sitrep_id"),
)
# =============================================================================
class S3CKEditorModel(S3Model):
"""
Storage for Images used by CKEditor
- and hence the s3_richtext_widget
Based on https://github.com/timrichardson/web2py_ckeditor4
"""
names = ("doc_ckeditor",
"doc_filetype",
)
def model(self):
#T = current.T
# ---------------------------------------------------------------------
# Images
#
tablename = "doc_ckeditor"
self.define_table(tablename,
Field("title", length=255),
Field("filename", length=255),
Field("flength", "integer"),
Field("mime_type", length=128),
Field("upload", "upload",
#uploadfs = self.settings.uploadfs,
requires = [IS_NOT_EMPTY(),
IS_LENGTH(maxsize=10485760, # 10 Mb
minsize=0)],
),
*s3_meta_fields())
# ---------------------------------------------------------------------
# Pass names back to global scope (s3.*)
#
return dict(doc_filetype = self.doc_filetype,
)
# -------------------------------------------------------------------------
@staticmethod
def doc_filetype(filename):
"""
Takes a filename and returns a category based on the file type.
Categories: word, excel, powerpoint, flash, pdf, image, video, audio, archive, other.
"""
parts = os.path.splitext(filename)
if len(parts) < 2:
return "other"
else:
ext = parts[1][1:].lower()
if ext in ("png", "jpg", "jpeg", "gif"):
return "image"
elif ext in ("avi", "mp4", "m4v", "ogv", "wmv", "mpg", "mpeg"):
return "video"
elif ext in ("mp3", "m4a", "wav", "ogg", "aiff"):
return "audio"
elif ext in ("zip", "7z", "tar", "gz", "tgz", "bz2", "rar"):
return "archive"
elif ext in ("doc", "docx", "dot", "dotx", "rtf"):
return "word"
elif ext in ("xls", "xlsx", "xlt", "xltx", "csv"):
return "excel"
elif ext in ("ppt", "pptx"):
return "powerpoint"
elif ext in ("flv", "swf"):
return "flash"
elif ext == "pdf":
return "pdf"
else:
return "other"
# END =========================================================================
|
|
import collections, re
import hindkit as k
from generate_GOADB import Glyph
DO_HACK_FOR_CORE_TEXT = True
DO_OUTPUT_INDIC1 = True
DO_OUTPUT_INDIC2 = True
DO_OUTPUT_USE = True
PREFIX = "dv"
def indent(line):
return " " + line
def comment(line):
return "# " + line
def add_prefix(name):
return PREFIX + name
def remove_prefix(name):
if name.startswith("dv"):
name = name[2:]
return name
class Font(object):
def __init__(self):
self.features = collections.OrderedDict()
def add_feature(self, tag):
self.features[tag] = Feature(tag)
def dump(self):
lines = []
for f in list(self.features.values()):
lines.extend(f.dump())
return lines
@property
def lookups(self):
lookups = collections.OrderedDict()
for k, v in list(self.features.items()):
for k, v in list(v.lookups.items()):
lookups[k] = v
return lookups
class Feature(object):
def __init__(self, tag):
self.tag = tag
self.lookups = collections.OrderedDict()
def register_lookup(self, label):
self.lookups[label] = Lookup(label)
def dump(self):
lines = []
lines.append("feature {} {{".format(self.tag))
for l in list(self.lookups.values()):
lines.extend(list(map(indent, l.dump())))
lines.append("}} {};".format(self.tag))
return lines
class Lookup(object):
def __init__(self, label, determinate = False):
self.label = label
self.determinate = determinate
self.rules = []
def add_rule(self, input_components, output_components, text=None, is_commented=False):
self.rules.append(Rule(input_components, output_components, text=text, is_commented=is_commented))
rule = self.rules[-1]
# return rule
def dump(self):
lines = []
lines.append("lookup {} {{".format(self.label))
lines.extend([indent(r.dump()) for r in self.rules])
lines.append("}} {};".format(self.label))
if not self.rules:
lines = list(map(comment, lines))
return lines
class Rule(object):
def __init__(self, input_components, output_components, text=None, is_commented=False):
self.input_components = input_components
self.output_components = output_components
self._text = text
self.is_commented = is_commented
@property
def text(self):
if self._text is None:
text = "sub {} by {};".format(
" ".join(self.input_components),
" ".join(self.output_components),
)
else:
text = self._text
return text
@property
def is_empty(self):
is_empty = False
if not self.input_components:
is_empty = True
return is_empty
def dump(self):
dump = self.text
if self.is_empty:
dump = " ".join(self.output_components)
self.is_commented = True
if self.is_commented:
dump = "# " + dump
return dump
PLAIN_CONSONANTS_LETTERS = [i + "A" for i in k.constants.CONSONANT_STEMS]
AKHAND_LIGATURES = ["K_SSA", "J_NYA"]
BASIC_FORMS = PLAIN_CONSONANTS_LETTERS + AKHAND_LIGATURES
NUKTA_FORMS = [i + "xA" for i in k.constants.CONSONANT_STEMS]
RAKAR_FORMS = [i[:-1] + "_RA" for i in BASIC_FORMS]
RAKAR_NUKTA_FORMS = [i[:-1] + "_RA" for i in NUKTA_FORMS]
HALF_FORMS = [i[:-1] for i in BASIC_FORMS]
HALF_NUKTA_FORMS = [i[:-1] for i in NUKTA_FORMS]
HALF_RAKAR_FORMS = [i[:-1] for i in RAKAR_FORMS]
HALF_RAKAR_NUKTA_FORMS = [i[:-1] for i in RAKAR_NUKTA_FORMS]
CANONICAL_FORMED_GLYPHS = BASIC_FORMS + NUKTA_FORMS + RAKAR_FORMS + RAKAR_NUKTA_FORMS + HALF_FORMS + HALF_NUKTA_FORMS + HALF_RAKAR_FORMS + HALF_RAKAR_NUKTA_FORMS
def get_components(name, mode="indic2"):
g = Glyph(name)
global PREFIX
PREFIX = g.prefix
stem = g.stem
input_pieces = g.stem_pieces
suffixes = g.suffixes
components = input_pieces
lookup = None
if PREFIX != "dv":
pass
elif suffixes != [""]:
pass
elif stem == "Reph":
lookup = "rphf"
components = ["RA", "Virama"]
elif stem == "RAc2":
if mode == "indic2":
lookup = "blwf_new"
components = ["Virama", "RA"]
elif mode == "indic1":
lookup = "blwf_old"
components = ["RA", "Virama"]
elif stem == "Eyelash":
lookup = "akhn_eyelash"
components = ["RA'", "Virama'", "zerowidthjoiner"]
elif stem in PLAIN_CONSONANTS_LETTERS:
pass
elif stem in AKHAND_LIGATURES:
lookup = "akhn"
if stem == "K_SSA":
components = ["KA", "Virama", "SSA"]
elif stem == "J_NYA":
components = ["JA", "Virama", "NYA"]
elif stem in NUKTA_FORMS:
lookup = "nukt"
components = [stem[:-2] + "A", "Nukta"]
elif stem in HALF_FORMS + HALF_NUKTA_FORMS:
lookup = "half"
components = [stem + "A", "Virama"]
elif stem in RAKAR_FORMS + RAKAR_NUKTA_FORMS:
if mode == "indic2":
lookup = "rkrf_new"
components = [stem[:-3] + "A", "Virama", "RA"]
elif mode == "indic1":
lookup = "vatu_old"
components = [stem[:-3] + "A", "RAc2"]
elif stem in HALF_RAKAR_FORMS + HALF_RAKAR_NUKTA_FORMS:
if mode == "indic2":
lookup = "half_new"
components = [stem + "A", "Virama"]
elif mode == "indic1":
lookup = "vatu_old"
components = [stem[:-2], "RAc2"]
components = list(map(add_prefix, components))
return components, lookup
def ligate(components, reference):
output = [components[0]]
for a in components[1:]:
b = output[-1]
b_a = b + "_" + a
if b_a in reference:
output.pop()
output.append(b_a)
else:
output.append(a)
return output
def generate_cjct(components, formed_glyphs):
is_cjct = False
output = []
for a in components:
is_different = False
if "dv" + a not in formed_glyphs:
if a in RAKAR_FORMS + RAKAR_NUKTA_FORMS + HALF_FORMS + HALF_NUKTA_FORMS + HALF_RAKAR_FORMS + HALF_RAKAR_NUKTA_FORMS:
if not is_cjct:
is_cjct = True
output = {
"cjct_new": output[:],
"pres_old": output[:],
}
if a in HALF_FORMS + HALF_NUKTA_FORMS:
extension = [a + "A", "Virama"]
elif a in RAKAR_FORMS + RAKAR_NUKTA_FORMS:
extension = [a[:-3] + "A", "RAc2"]
elif a in HALF_RAKAR_FORMS + HALF_RAKAR_NUKTA_FORMS:
is_different = True
if "dv" + a + "A" in formed_glyphs:
extension_indic2 = [a + "A", "Virama"]
else:
extension_indic2 = [a[:-2] + "A", "RAc2", "Virama"]
if "dv" + a[:-2] in formed_glyphs:
extension_indic1 = [a[:-2], "RAc2"]
else:
extension_indic1 = [a[:-2] + "A", "Virama", "RAc2"]
else:
extension = [a]
else:
extension = [a]
if is_different:
extension_cjct_new = extension_indic2
extension_pres_old = extension_indic1
else:
extension_cjct_new = extension
extension_pres_old = extension
if is_cjct:
output["cjct_new"].extend(extension_cjct_new)
output["pres_old"].extend(extension_pres_old)
else:
output.extend(extension)
return is_cjct, output
|
|
""" Tools for using spherical harmonic models to fit diffusion data
References
----------
Aganj, I., et. al. 2009. ODF Reconstruction in Q-Ball Imaging With Solid
Angle Consideration.
Descoteaux, M., et. al. 2007. Regularized, fast, and robust analytical
Q-ball imaging.
Tristan-Vega, A., et. al. 2010. A new methodology for estimation of fiber
populations in white matter of the brain with Funk-Radon transform.
Tristan-Vega, A., et. al. 2009. Estimation of fiber orientation probability
density functions in high angular resolution diffusion imaging.
"""
"""
Note about the Transpose:
In the literature the matrix representation of these methods is often written
as Y = Bx where B is some design matrix and Y and x are column vectors. In our
case the input data, a dwi stored as a nifti file for example, is stored as row
vectors (ndarrays) of the form (x, y, z, n), where n is the number of diffusion
directions. We could transpose and reshape the data to be (n, x*y*z), so that
we could directly plug it into the above equation. However, I have chosen to
keep the data as is and implement the relevant equations rewritten in the
following form: Y.T = x.T B.T, or in python syntax data = np.dot(sh_coef, B.T)
where data is Y.T and sh_coef is x.T.
"""
import numpy as np
from numpy import concatenate, diag, diff, empty, eye, sqrt, unique, dot
from numpy.linalg import pinv, svd
from numpy.random import randint
from dipy.reconst.odf import OdfModel, OdfFit
from scipy.special import sph_harm, lpn, lpmv, gammaln
from dipy.core.sphere import Sphere
import dipy.core.gradients as grad
from dipy.sims.voxel import single_tensor, all_tensor_evecs
from dipy.core.geometry import cart2sphere
from dipy.core.onetime import auto_attr
from dipy.reconst.cache import Cache
from distutils.version import StrictVersion
import scipy
if StrictVersion(scipy.version.short_version) >= StrictVersion('0.15.0'):
SCIPY_15_PLUS = True
else:
SCIPY_15_PLUS = False
def _copydoc(obj):
def bandit(f):
f.__doc__ = obj.__doc__
return f
return bandit
def forward_sdeconv_mat(r_rh, n):
""" Build forward spherical deconvolution matrix
Parameters
----------
r_rh : ndarray
Rotational harmonics coefficients for the single fiber response
function. Each element `rh[i]` is associated with spherical harmonics
of degree `2*i`.
n : ndarray
The degree of spherical harmonic function associated with each row of
the deconvolution matrix. Only even degrees are allowed
Returns
-------
R : ndarray (N, N)
Deconvolution matrix with shape (N, N)
"""
if np.any(n % 2):
raise ValueError("n has odd degrees, expecting only even degrees")
return np.diag(r_rh[n // 2])
def sh_to_rh(r_sh, m, n):
""" Spherical harmonics (SH) to rotational harmonics (RH)
Calculate the rotational harmonic decomposition up to
harmonic order `m`, degree `n` for an axially and antipodally
symmetric function. Note that all ``m != 0`` coefficients
will be ignored as axial symmetry is assumed. Hence, there
will be ``(sh_order/2 + 1)`` non-zero coefficients.
Parameters
----------
r_sh : ndarray (N,)
ndarray of SH coefficients for the single fiber response function.
These coefficients must correspond to the real spherical harmonic
functions produced by `shm.real_sph_harm`.
m : ndarray (N,)
The order of the spherical harmonic function associated with each
coefficient.
n : ndarray (N,)
The degree of the spherical harmonic function associated with each
coefficient.
Returns
-------
r_rh : ndarray (``(sh_order + 1)*(sh_order + 2)/2``,)
Rotational harmonics coefficients representing the input `r_sh`
See Also
--------
shm.real_sph_harm, shm.real_sym_sh_basis
References
----------
.. [1] Tournier, J.D., et al. NeuroImage 2007. Robust determination of the
fibre orientation distribution in diffusion MRI: Non-negativity
constrained super-resolved spherical deconvolution
"""
mask = m == 0
# The delta function at theta = phi = 0 is known to have zero coefficients
# where m != 0, therefore we need only compute the coefficients at m=0.
dirac_sh = gen_dirac(0, n[mask], 0, 0)
r_rh = r_sh[mask] / dirac_sh
return r_rh
def gen_dirac(m, n, theta, phi):
""" Generate Dirac delta function orientated in (theta, phi) on the sphere
The spherical harmonics (SH) representation of this Dirac is returned as
coefficients to spherical harmonic functions produced by
`shm.real_sph_harm`.
Parameters
----------
m : ndarray (N,)
The order of the spherical harmonic function associated with each
coefficient.
n : ndarray (N,)
The degree of the spherical harmonic function associated with each
coefficient.
theta : float [0, 2*pi]
The azimuthal (longitudinal) coordinate.
phi : float [0, pi]
The polar (colatitudinal) coordinate.
See Also
--------
shm.real_sph_harm, shm.real_sym_sh_basis
Returns
-------
dirac : ndarray
SH coefficients representing the Dirac function. The shape of this is
`(m + 2) * (m + 1) / 2`.
"""
return real_sph_harm(m, n, theta, phi)
def spherical_harmonics(m, n, theta, phi):
r""" Compute spherical harmonics
This may take scalar or array arguments. The inputs will be broadcasted
against each other.
Parameters
----------
m : int ``|m| <= n``
The order of the harmonic.
n : int ``>= 0``
The degree of the harmonic.
theta : float [0, 2*pi]
The azimuthal (longitudinal) coordinate.
phi : float [0, pi]
The polar (colatitudinal) coordinate.
Returns
-------
y_mn : complex float
The harmonic $Y^m_n$ sampled at `theta` and `phi`.
Notes
-----
This is a faster implementation of scipy.special.sph_harm for
scipy version < 0.15.0.
"""
if SCIPY_15_PLUS:
return sph_harm(m, n, theta, phi)
x = np.cos(phi)
val = lpmv(m, n, x).astype(complex)
val *= np.sqrt((2 * n + 1) / 4.0 / np.pi)
val *= np.exp(0.5 * (gammaln(n - m + 1) - gammaln(n + m + 1)))
val = val * np.exp(1j * m * theta)
return val
def real_sph_harm(m, n, theta, phi):
r""" Compute real spherical harmonics.
Where the real harmonic $Y^m_n$ is defined to be:
Real($Y^m_n$) * sqrt(2) if m > 0
$Y^m_n$ if m == 0
Imag($Y^m_n$) * sqrt(2) if m < 0
This may take scalar or array arguments. The inputs will be broadcasted
against each other.
Parameters
----------
m : int ``|m| <= n``
The order of the harmonic.
n : int ``>= 0``
The degree of the harmonic.
theta : float [0, 2*pi]
The azimuthal (longitudinal) coordinate.
phi : float [0, pi]
The polar (colatitudinal) coordinate.
Returns
--------
y_mn : real float
The real harmonic $Y^m_n$ sampled at `theta` and `phi`.
See Also
--------
scipy.special.sph_harm
"""
# dipy uses a convention for theta and phi that is reversed with respect to
# function signature of scipy.special.sph_harm
sh = spherical_harmonics(np.abs(m), n, phi, theta)
real_sh = np.where(m > 0, sh.imag, sh.real)
real_sh *= np.where(m == 0, 1., np.sqrt(2))
return real_sh
def real_sym_sh_mrtrix(sh_order, theta, phi):
"""
Compute real spherical harmonics as in mrtrix, where the real harmonic
$Y^m_n$ is defined to be::
Real($Y^m_n$) if m > 0
$Y^m_n$ if m == 0
Imag($Y^|m|_n$) if m < 0
This may take scalar or array arguments. The inputs will be broadcasted
against each other.
Parameters
-----------
sh_order : int
The maximum degree or the spherical harmonic basis.
theta : float [0, pi]
The polar (colatitudinal) coordinate.
phi : float [0, 2*pi]
The azimuthal (longitudinal) coordinate.
Returns
--------
y_mn : real float
The real harmonic $Y^m_n$ sampled at `theta` and `phi` as
implemented in mrtrix. Warning: the basis is Tournier et al
2004 and 2007 is slightly different.
m : array
The order of the harmonics.
n : array
The degree of the harmonics.
"""
m, n = sph_harm_ind_list(sh_order)
phi = np.reshape(phi, [-1, 1])
theta = np.reshape(theta, [-1, 1])
m = -m
real_sh = real_sph_harm(m, n, theta, phi)
real_sh /= np.where(m == 0, 1., np.sqrt(2))
return real_sh, m, n
def real_sym_sh_basis(sh_order, theta, phi):
"""Samples a real symmetric spherical harmonic basis at point on the sphere
Samples the basis functions up to order `sh_order` at points on the sphere
given by `theta` and `phi`. The basis functions are defined here the same
way as in fibernavigator [1]_ where the real harmonic $Y^m_n$ is defined to
be:
Imag($Y^m_n$) * sqrt(2) if m > 0
$Y^m_n$ if m == 0
Real($Y^|m|_n$) * sqrt(2) if m < 0
This may take scalar or array arguments. The inputs will be broadcasted
against each other.
Parameters
-----------
sh_order : int
even int > 0, max spherical harmonic degree
theta : float [0, 2*pi]
The azimuthal (longitudinal) coordinate.
phi : float [0, pi]
The polar (colatitudinal) coordinate.
Returns
--------
y_mn : real float
The real harmonic $Y^m_n$ sampled at `theta` and `phi`
m : array
The order of the harmonics.
n : array
The degree of the harmonics.
References
----------
.. [1] http://code.google.com/p/fibernavigator/
"""
m, n = sph_harm_ind_list(sh_order)
phi = np.reshape(phi, [-1, 1])
theta = np.reshape(theta, [-1, 1])
real_sh = real_sph_harm(m, n, theta, phi)
return real_sh, m, n
sph_harm_lookup = {None: real_sym_sh_basis,
"mrtrix": real_sym_sh_mrtrix,
"fibernav": real_sym_sh_basis}
def sph_harm_ind_list(sh_order):
"""
Returns the degree (n) and order (m) of all the symmetric spherical
harmonics of degree less then or equal to `sh_order`. The results, `m_list`
and `n_list` are kx1 arrays, where k depends on sh_order. They can be
passed to :func:`real_sph_harm`.
Parameters
----------
sh_order : int
even int > 0, max degree to return
Returns
-------
m_list : array
orders of even spherical harmonics
n_list : array
degrees of even spherical harmonics
See also
--------
real_sph_harm
"""
if sh_order % 2 != 0:
raise ValueError('sh_order must be an even integer >= 0')
n_range = np.arange(0, sh_order + 1, 2, dtype=int)
n_list = np.repeat(n_range, n_range * 2 + 1)
ncoef = (sh_order + 2) * (sh_order + 1) / 2
offset = 0
m_list = empty(ncoef, 'int')
for ii in n_range:
m_list[offset:offset + 2 * ii + 1] = np.arange(-ii, ii + 1)
offset = offset + 2 * ii + 1
# makes the arrays ncoef by 1, allows for easy broadcasting later in code
return (m_list, n_list)
def order_from_ncoef(ncoef):
"""
Given a number n of coefficients, calculate back the sh_order
"""
# Solve the quadratic equation derived from :
# ncoef = (sh_order + 2) * (sh_order + 1) / 2
return int(-3 + np.sqrt(9 - 4 * (2-2*ncoef)))/2
def smooth_pinv(B, L):
"""Regularized psudo-inverse
Computes a regularized least square inverse of B
Parameters
----------
B : array_like (n, m)
Matrix to be inverted
L : array_like (n,)
Returns
-------
inv : ndarray (m, n)
regularized least square inverse of B
Notes
-----
In the literature this inverse is often written $(B^{T}B+L^{2})^{-1}B^{T}$.
However here this inverse is implemented using the psudo-inverse because it
is more numerically stable than the direct implementation of the matrix
product.
"""
L = diag(L)
inv = pinv(concatenate((B, L)))
return inv[:, :len(B)]
def lazy_index(index):
"""Produces a lazy index
Returns a slice that can be used for indexing an array, if no slice can be
made index is returned as is.
"""
index = np.array(index)
assert index.ndim == 1
if index.dtype.kind == 'b':
index = index.nonzero()[0]
if len(index) == 1:
return slice(index[0], index[0] + 1)
step = unique(diff(index))
if len(step) != 1 or step[0] == 0:
return index
else:
return slice(index[0], index[-1] + 1, step[0])
def _gfa_sh(coef, sh0_index=0):
"""The gfa of the odf, computed from the spherical harmonic coefficients
This is a private function because it only works for coefficients of
normalized sh bases.
Parameters
----------
coef : array
The coefficients, using a normalized sh basis, that represent each odf.
sh0_index : int
The index of the coefficient associated with the 0th order sh harmonic.
Returns
-------
gfa_values : array
The gfa of each odf.
"""
coef_sq = coef**2
numer = coef_sq[..., sh0_index]
denom = (coef_sq).sum(-1)
# The sum of the square of the coefficients being zero is the same as all
# the coefficients being zero
allzero = denom == 0
# By adding 1 to numer and denom where both and are 0, we prevent 0/0
numer = numer + allzero
denom = denom + allzero
return np.sqrt(1. - (numer / denom))
class SphHarmModel(OdfModel, Cache):
"""To be subclassed by all models that return a SphHarmFit when fit."""
def sampling_matrix(self, sphere):
"""The matrix needed to sample ODFs from coefficients of the model.
Parameters
----------
sphere : Sphere
Points used to sample ODF.
Returns
-------
sampling_matrix : array
The size of the matrix will be (N, M) where N is the number of
vertices on sphere and M is the number of coefficients needed by
the model.
"""
sampling_matrix = self.cache_get("sampling_matrix", sphere)
if sampling_matrix is None:
sh_order = self.sh_order
theta = sphere.theta
phi = sphere.phi
sampling_matrix, m, n = real_sym_sh_basis(sh_order, theta, phi)
self.cache_set("sampling_matrix", sphere, sampling_matrix)
return sampling_matrix
class QballBaseModel(SphHarmModel):
"""To be subclassed by Qball type models."""
def __init__(self, gtab, sh_order, smooth=0.006, min_signal=1.,
assume_normed=False):
"""Creates a model that can be used to fit or sample diffusion data
Arguments
---------
gtab : GradientTable
Diffusion gradients used to acquire data
sh_order : even int >= 0
the spherical harmonic order of the model
smooth : float between 0 and 1, optional
The regularization parameter of the model
min_signal : float, > 0, optional
During fitting, all signal values less than `min_signal` are
clipped to `min_signal`. This is done primarily to avoid values
less than or equal to zero when taking logs.
assume_normed : bool, optional
If True, clipping and normalization of the data with respect to the
mean B0 signal are skipped during mode fitting. This is an advanced
feature and should be used with care.
See Also
--------
normalize_data
"""
SphHarmModel.__init__(self, gtab)
self._where_b0s = lazy_index(gtab.b0s_mask)
self._where_dwi = lazy_index(~gtab.b0s_mask)
self.assume_normed = assume_normed
self.min_signal = min_signal
x, y, z = gtab.gradients[self._where_dwi].T
r, theta, phi = cart2sphere(x, y, z)
B, m, n = real_sym_sh_basis(sh_order, theta[:, None], phi[:, None])
L = -n * (n + 1)
legendre0 = lpn(sh_order, 0)[0]
F = legendre0[n]
self.sh_order = sh_order
self.B = B
self.m = m
self.n = n
self._set_fit_matrix(B, L, F, smooth)
def _set_fit_matrix(self, *args):
"""Should be set in a subclass and is called by __init__"""
msg = "User must implement this method in a subclass"
raise NotImplementedError(msg)
def fit(self, data, mask=None):
"""Fits the model to diffusion data and returns the model fit"""
# Normalize the data and fit coefficients
if not self.assume_normed:
data = normalize_data(data, self._where_b0s, self.min_signal)
# Compute coefficients using abstract method
coef = self._get_shm_coef(data)
# Apply the mask to the coefficients
if mask is not None:
mask = np.asarray(mask, dtype=bool)
coef *= mask[..., None]
return SphHarmFit(self, coef, mask)
class SphHarmFit(OdfFit):
"""Diffusion data fit to a spherical harmonic model"""
def __init__(self, model, shm_coef, mask):
self.model = model
self._shm_coef = shm_coef
self.mask = mask
@property
def shape(self):
return self._shm_coef.shape[:-1]
def __getitem__(self, index):
"""Allowing indexing into fit"""
# Index shm_coefficients
if isinstance(index, tuple):
coef_index = index + (Ellipsis,)
else:
coef_index = index
new_coef = self._shm_coef[coef_index]
# Index mask
if self.mask is not None:
new_mask = self.mask[index]
assert new_mask.shape == new_coef.shape[:-1]
else:
new_mask = None
return SphHarmFit(self.model, new_coef, new_mask)
def odf(self, sphere):
"""Samples the odf function on the points of a sphere
Parameters
----------
sphere : Sphere
The points on which to sample the odf.
Returns
-------
values : ndarray
The value of the odf on each point of `sphere`.
"""
B = self.model.sampling_matrix(sphere)
return dot(self._shm_coef, B.T)
@auto_attr
def gfa(self):
return _gfa_sh(self._shm_coef, 0)
@property
def shm_coeff(self):
"""The spherical harmonic coefficients of the odf
Make this a property for now, if there is a usecase for modifying
the coefficients we can add a setter or expose the coefficients more
directly
"""
return self._shm_coef
def predict(self, gtab=None, S0=1.0):
"""
Predict the diffusion signal from the model coefficients.
Parameters
----------
gtab : a GradientTable class instance
The directions and bvalues on which prediction is desired
S0 : float array
The mean non-diffusion-weighted signal in each voxel.
Default: 1.0 in all voxels
"""
if not hasattr(self.model, 'predict'):
msg = "This model does not have prediction implemented yet"
raise NotImplementedError(msg)
return self.model.predict(self.shm_coeff, gtab, S0)
class CsaOdfModel(QballBaseModel):
"""Implementation of Constant Solid Angle reconstruction method.
References
----------
.. [1] Aganj, I., et. al. 2009. ODF Reconstruction in Q-Ball Imaging With
Solid Angle Consideration.
"""
min = .001
max = .999
_n0_const = .5 / np.sqrt(np.pi)
def _set_fit_matrix(self, B, L, F, smooth):
"""The fit matrix, is used by fit_coefficients to return the
coefficients of the odf"""
invB = smooth_pinv(B, sqrt(smooth) * L)
L = L[:, None]
F = F[:, None]
self._fit_matrix = (F * L) / (8 * np.pi) * invB
def _get_shm_coef(self, data, mask=None):
"""Returns the coefficients of the model"""
data = data[..., self._where_dwi]
data = data.clip(self.min, self.max)
loglog_data = np.log(-np.log(data))
sh_coef = dot(loglog_data, self._fit_matrix.T)
sh_coef[..., 0] = self._n0_const
return sh_coef
class OpdtModel(QballBaseModel):
"""Implementation of Orientation Probability Density Transform
reconstruction method.
References
----------
.. [1] Tristan-Vega, A., et. al. 2010. A new methodology for estimation of
fiber populations in white matter of the brain with Funk-Radon
transform.
.. [2] Tristan-Vega, A., et. al. 2009. Estimation of fiber orientation
probability density functions in high angular resolution diffusion
imaging.
"""
def _set_fit_matrix(self, B, L, F, smooth):
invB = smooth_pinv(B, sqrt(smooth) * L)
L = L[:, None]
F = F[:, None]
delta_b = F * L * invB
delta_q = 4 * F * invB
self._fit_matrix = delta_b, delta_q
def _get_shm_coef(self, data, mask=None):
"""Returns the coefficients of the model"""
delta_b, delta_q = self._fit_matrix
return _slowadc_formula(data[..., self._where_dwi], delta_b, delta_q)
def _slowadc_formula(data, delta_b, delta_q):
"""formula used in SlowAdcOpdfModel"""
logd = -np.log(data)
return dot(logd * (1.5 - logd) * data, delta_q.T) - dot(data, delta_b.T)
class QballModel(QballBaseModel):
"""Implementation of regularized Qball reconstruction method.
References
----------
.. [1] Descoteaux, M., et. al. 2007. Regularized, fast, and robust
analytical Q-ball imaging.
"""
def _set_fit_matrix(self, B, L, F, smooth):
invB = smooth_pinv(B, sqrt(smooth) * L)
F = F[:, None]
self._fit_matrix = F * invB
def _get_shm_coef(self, data, mask=None):
"""Returns the coefficients of the model"""
return dot(data[..., self._where_dwi], self._fit_matrix.T)
def normalize_data(data, where_b0, min_signal=1., out=None):
"""Normalizes the data with respect to the mean b0
"""
if out is None:
out = np.array(data, dtype='float32', copy=True)
else:
if out.dtype.kind != 'f':
raise ValueError("out must be floating point")
out[:] = data
out.clip(min_signal, out=out)
b0 = out[..., where_b0].mean(-1)
out /= b0[..., None]
return out
def hat(B):
"""Returns the hat matrix for the design matrix B
"""
U, S, V = svd(B, False)
H = dot(U, U.T)
return H
def lcr_matrix(H):
"""Returns a matrix for computing leveraged, centered residuals from data
if r = (d-Hd), the leveraged centered residuals are lcr = (r/l)-mean(r/l)
ruturns the matrix R, such lcr = Rd
"""
if H.ndim != 2 or H.shape[0] != H.shape[1]:
raise ValueError('H should be a square matrix')
leverages = sqrt(1 - H.diagonal())
leverages = leverages[:, None]
R = (eye(len(H)) - H) / leverages
return R - R.mean(0)
def bootstrap_data_array(data, H, R, permute=None):
"""Applies the Residual Bootstraps to the data given H and R
data must be normalized, ie 0 < data <= 1
This function, and the bootstrap_data_voxel function, calculate
residual-bootsrap samples given a Hat matrix and a Residual matrix. These
samples can be used for non-parametric statistics or for bootstrap
probabilistic tractography:
References
----------
.. [1] J. I. Berman, et al., "Probabilistic streamline q-ball tractography
using the residual bootstrap" 2008.
.. [2] HA Haroon, et al., "Using the model-based residual bootstrap to
quantify uncertainty in fiber orientations from Q-ball analysis"
2009.
.. [3] B. Jeurissen, et al., "Probabilistic Fiber Tracking Using the
Residual Bootstrap with Constrained Spherical Deconvolution" 2011.
"""
if permute is None:
permute = randint(data.shape[-1], size=data.shape[-1])
assert R.shape == H.shape
assert len(permute) == R.shape[-1]
R = R[permute]
data = dot(data, (H + R).T)
return data
def bootstrap_data_voxel(data, H, R, permute=None):
"""Like bootstrap_data_array but faster when for a single voxel
data must be 1d and normalized
"""
if permute is None:
permute = randint(data.shape[-1], size=data.shape[-1])
r = dot(data, R.T)
boot_data = dot(data, H.T)
boot_data += r[permute]
return boot_data
class ResidualBootstrapWrapper(object):
"""Returns a residual bootstrap sample of the signal_object when indexed
Wraps a signal_object, this signal object can be an interpolator. When
indexed, the the wrapper indexes the signal_object to get the signal.
There wrapper than samples the residual boostrap distribution of signal and
returns that sample.
"""
def __init__(self, signal_object, B, where_dwi, min_signal=1.):
"""Builds a ResidualBootstrapWapper
Given some linear model described by B, the design matrix, and a
signal_object, returns an object which can sample the residual
bootstrap distribution of the signal. We assume that the signals are
normalized so we clip the bootsrap samples to be between `min_signal`
and 1.
Parameters
----------
signal_object : some object that can be indexed
This object should return diffusion weighted signals when indexed.
B : ndarray, ndim=2
The design matrix of the spherical harmonics model used to fit the
data. This is the model that will be used to compute the residuals
and sample the residual bootstrap distribution
where_dwi :
indexing object to find diffusion weighted signals from signal
min_signal : float
The lowest allowable signal.
"""
self._signal_object = signal_object
self._H = hat(B)
self._R = lcr_matrix(self._H)
self._min_signal = min_signal
self._where_dwi = where_dwi
self.data = signal_object.data
self.voxel_size = signal_object.voxel_size
def __getitem__(self, index):
"""Indexes self._signal_object and bootstraps the result"""
signal = self._signal_object[index].copy()
dwi_signal = signal[self._where_dwi]
boot_signal = bootstrap_data_voxel(dwi_signal, self._H, self._R)
boot_signal.clip(self._min_signal, 1., out=boot_signal)
signal[self._where_dwi] = boot_signal
return signal
def sf_to_sh(sf, sphere, sh_order=4, basis_type=None, smooth=0.0):
"""Spherical function to spherical harmonics (SH).
Parameters
----------
sf : ndarray
Values of a function on the given `sphere`.
sphere : Sphere
The points on which the sf is defined.
sh_order : int, optional
Maximum SH order in the SH fit. For `sh_order`, there will be
``(sh_order + 1) * (sh_order_2) / 2`` SH coefficients (default 4).
basis_type : {None, 'mrtrix', 'fibernav'}
``None`` for the default dipy basis,
``mrtrix`` for the MRtrix basis, and
``fibernav`` for the FiberNavigator basis
(default ``None``).
smooth : float, optional
Lambda-regularization in the SH fit (default 0.0).
Returns
-------
sh : ndarray
SH coefficients representing the input function.
"""
sph_harm_basis = sph_harm_lookup.get(basis_type)
if sph_harm_basis is None:
raise ValueError("Invalid basis name.")
B, m, n = sph_harm_basis(sh_order, sphere.theta, sphere.phi)
L = -n * (n + 1)
invB = smooth_pinv(B, sqrt(smooth) * L)
sh = np.dot(sf, invB.T)
return sh
def sh_to_sf(sh, sphere, sh_order, basis_type=None):
"""Spherical harmonics (SH) to spherical function (SF).
Parameters
----------
sh : ndarray
SH coefficients representing a spherical function.
sphere : Sphere
The points on which to sample the spherical function.
sh_order : int, optional
Maximum SH order in the SH fit. For `sh_order`, there will be
``(sh_order + 1) * (sh_order_2) / 2`` SH coefficients (default 4).
basis_type : {None, 'mrtrix', 'fibernav'}
``None`` for the default dipy basis,
``mrtrix`` for the MRtrix basis, and
``fibernav`` for the FiberNavigator basis
(default ``None``).
Returns
-------
sf : ndarray
Spherical function values on the `sphere`.
"""
sph_harm_basis = sph_harm_lookup.get(basis_type)
if sph_harm_basis is None:
raise ValueError("Invalid basis name.")
B, m, n = sph_harm_basis(sh_order, sphere.theta, sphere.phi)
sf = np.dot(sh, B.T)
return sf
def sh_to_sf_matrix(sphere, sh_order, basis_type=None, return_inv=True, smooth=0):
""" Matrix that transforms Spherical harmonics (SH) to spherical
function (SF).
Parameters
----------
sphere : Sphere
The points on which to sample the spherical function.
sh_order : int, optional
Maximum SH order in the SH fit. For `sh_order`, there will be
``(sh_order + 1) * (sh_order_2) / 2`` SH coefficients (default 4).
basis_type : {None, 'mrtrix', 'fibernav'}
``None`` for the default dipy basis,
``mrtrix`` for the MRtrix basis, and
``fibernav`` for the FiberNavigator basis
(default ``None``).
return_inv : bool
If True then the inverse of the matrix is also returned
smooth : float, optional
Lambda-regularization in the SH fit (default 0.0).
Returns
-------
B : ndarray
Matrix that transforms spherical harmonics to spherical function
``sf = np.dot(sh, B)``.
invB : ndarray
Inverse of B.
"""
sph_harm_basis = sph_harm_lookup.get(basis_type)
if sph_harm_basis is None:
raise ValueError("Invalid basis name.")
B, m, n = sph_harm_basis(sh_order, sphere.theta, sphere.phi)
if return_inv:
L = -n * (n + 1)
invB = smooth_pinv(B, np.sqrt(smooth) * L)
return B.T, invB.T
return B.T
|
|
from __future__ import absolute_import
# Copyright (c) 2010-2015 openpyxl
# Python stdlib imports
import pytest
from openpyxl.compat import zip
# package imports
from .. named_range import split_named_range, NamedRange, NamedValue
from openpyxl.workbook.names.named_range import read_named_ranges
from openpyxl.utils.exceptions import NamedRangeException
from openpyxl.reader.excel import load_workbook
class DummyWS:
def __init__(self, title):
self.title = title
def __str__(self):
return self.title
class DummyWB:
def __init__(self, ws):
self.ws = ws
self.worksheets = [ws]
def __getitem__(self, key):
if key == self.ws.title:
return self.ws
@pytest.mark.parametrize("range_string",
[
"'My Sheet'!$D$8",
"Sheet1!$A$1",
"[1]Sheet1!$A$1",
"[1]!B2range",
])
def test_check_range(range_string):
from .. named_range import refers_to_range
assert refers_to_range(range_string) is True
@pytest.mark.parametrize("range_string, result",
[
("'My Sheet'!$D$8", [('My Sheet', '$D$8'), ]),
("Sheet1!$A$1", [('Sheet1', '$A$1')]),
("[1]Sheet1!$A$1", [('[1]Sheet1', '$A$1')]),
("[1]!B2range", [('[1]', '')]),
("Sheet1!$C$5:$C$7,Sheet1!$C$9:$C$11,Sheet1!$E$5:$E$7,Sheet1!$E$9:$E$11,Sheet1!$D$8",
[('Sheet1', '$C$5:$C$7'),
('Sheet1', '$C$9:$C$11'),
('Sheet1', '$E$5:$E$7'),
('Sheet1', '$E$9:$E$11'),
('Sheet1', '$D$8')
]),
])
def test_split(range_string, result):
assert list(split_named_range(range_string)) == result
@pytest.mark.parametrize("range_string, external",
[
("'My Sheet'!$D$8", False),
("Sheet1!$A$1", False),
("[1]Sheet1!$A$1", True),
("[1]!B2range", True),
])
def test_external_range(range_string, external):
from .. named_range import external_range
assert external_range(range_string) is external
def test_dict_interface():
xlrange = NamedValue("a range", "the value")
assert dict(xlrange) == {'name':"a range"}
def test_range_scope():
xlrange = NamedValue("Pi", 3.14)
xlrange.scope = 1
assert dict(xlrange) == {'name': 'Pi', 'localSheetId': 1}
def test_destinations_string():
ws = DummyWS('Sheet1')
xlrange = NamedRange("TRAP_2", [
(ws, '$C$5:$C$7'),
(ws, '$C$9:$C$11'),
(ws, '$E$5:$E$7'),
(ws, '$E$9:$E$11'),
(ws, '$D$8')
])
assert xlrange.value == "'Sheet1'!$C$5:$C$7,'Sheet1'!$C$9:$C$11,'Sheet1'!$E$5:$E$7,'Sheet1'!$E$9:$E$11,'Sheet1'!$D$8"
def test_split_no_quotes():
assert [('HYPOTHESES', '$B$3:$L$3'), ] == list(split_named_range('HYPOTHESES!$B$3:$L$3'))
def test_bad_range_name():
with pytest.raises(NamedRangeException):
list(split_named_range('HYPOTHESES$B$3'))
def test_range_name_worksheet_special_chars(datadir):
ws = DummyWS('My Sheeet with a , and \'')
datadir.chdir()
with open('workbook_namedrange.xml') as src:
content = src.read()
named_ranges = list(read_named_ranges(content, DummyWB(ws)))
assert len(named_ranges) == 1
assert isinstance(named_ranges[0], NamedRange)
assert [(ws, '$U$16:$U$24'), (ws, '$V$28:$V$36')] == named_ranges[0].destinations
def test_read_named_ranges(datadir):
ws = DummyWS('My Sheeet')
datadir.chdir()
with open("workbook.xml") as src:
content = src.read()
named_ranges = read_named_ranges(content, DummyWB(ws))
assert ["My Sheeet!$D$8"] == [str(range) for range in named_ranges]
def test_read_named_ranges_missing_sheet(datadir):
ws = DummyWS('NOT My Sheeet')
datadir.chdir()
with open("workbook.xml") as src:
content = src.read()
named_ranges = read_named_ranges(content, DummyWB(ws))
assert list(named_ranges) == []
def test_read_external_ranges(datadir):
datadir.chdir()
ws = DummyWS("Sheet1")
wb = DummyWB(ws)
with open("workbook_external_range.xml") as src:
xml = src.read()
named_ranges = list(read_named_ranges(xml, wb))
assert len(named_ranges) == 4
expected = [
("B1namedrange", "'Sheet1'!$A$1"),
("references_external_workbook", "[1]Sheet1!$A$1"),
("references_nr_in_ext_wb", "[1]!B2range"),
("references_other_named_range", "B1namedrange"),
]
for xlr, target in zip(named_ranges, expected):
assert xlr.name, xlr.value == target
ranges_counts = (
(4, 'TEST_RANGE'),
(3, 'TRAP_1'),
(13, 'TRAP_2')
)
@pytest.mark.parametrize("count, range_name", ranges_counts)
def test_oddly_shaped_named_ranges(datadir, count, range_name):
datadir.chdir()
wb = load_workbook('merge_range.xlsx')
ws = wb.worksheets[0]
assert len(ws.get_named_range(range_name)) == count
def test_merged_cells_named_range(datadir):
datadir.chdir()
wb = load_workbook('merge_range.xlsx')
ws = wb.worksheets[0]
cell = ws.get_named_range('TRAP_3')[0]
assert 'B15' == cell.coordinate
assert 10 == cell.value
def test_print_titles(Workbook):
wb = Workbook()
ws1 = wb.create_sheet()
ws2 = wb.create_sheet()
scope1 = ws1.parent.worksheets.index(ws1)
scope2 = ws2.parent.worksheets.index(ws2)
ws1.add_print_title(2)
ws2.add_print_title(3, rows_or_cols='cols')
def mystr(nr):
return ','.join(['%s!%s' % (sheet.title, name) for sheet, name in nr.destinations])
actual_named_ranges = set([(nr.name, nr.scope, mystr(nr)) for nr in wb.get_named_ranges()])
expected_named_ranges = set([('_xlnm.Print_Titles', scope1, 'Sheet1!$1:$2'),
('_xlnm.Print_Titles', scope2, 'Sheet2!$A:$C')])
assert(actual_named_ranges == expected_named_ranges)
@pytest.mark.usefixtures("datadir")
class TestNameRefersToValue:
def __init__(self, datadir):
datadir.join("genuine").chdir()
self.wb = load_workbook('NameWithValueBug.xlsx')
self.ws = self.wb["Sheet1"]
def test_has_ranges(self):
ranges = self.wb.get_named_ranges()
assert ['MyRef', 'MySheetRef', 'MySheetRef', 'MySheetValue', 'MySheetValue',
'MyValue'] == [range.name for range in ranges]
def test_workbook_has_normal_range(self):
normal_range = self.wb.get_named_range("MyRef")
assert normal_range.name == "MyRef"
assert normal_range.destinations == [(self.ws, '$A$1')]
assert normal_range.scope is None
def test_workbook_has_value_range(self):
value_range = self.wb.get_named_range("MyValue")
assert "MyValue" == value_range.name
assert "9.99" == value_range.value
def test_worksheet_range(self):
range = self.ws.get_named_range("MyRef")
assert range.coordinate == "A1"
def test_worksheet_range_error_on_value_range(self):
with pytest.raises(NamedRangeException):
self.ws.get_named_range("MyValue")
def test_handles_scope(self):
scoped = [
("MySheetRef", "Sheet1"), ("MySheetRef", "Sheet2"),
("MySheetValue", "Sheet1"), ("MySheetValue", "Sheet2"),
]
no_scoped = ["MyRef", "MyValue"]
ranges = self.wb.get_named_ranges()
assert [(r.name, r.scope.title) for r in ranges if r.scope is not None] == scoped
assert [r.name for r in ranges if r.scope is None] == no_scoped
def test_can_be_saved(self, tmpdir):
tmpdir.chdir()
FNAME = "foo.xlsx"
self.wb.save(FNAME)
wbcopy = load_workbook(FNAME)
ranges = wbcopy.get_named_ranges()
names = ["MyRef", "MySheetRef", "MySheetRef", "MySheetValue", "MySheetValue", "MyValue"]
assert [r.name for r in ranges] == names
values = ['3.33', '14.4', '9.99']
assert [r.value for r in ranges if hasattr(r, 'value')] == values
@pytest.mark.parametrize("value",
[
"OFFSET(rep!$AK$1,0,0,COUNT(rep!$AK$1),1)",
"VLOOKUP(Country!$E$3, Table_Currencies[#All], 2, 9)"
])
def test_formula_names(value):
from .. named_range import FORMULA_REGEX
assert FORMULA_REGEX.match(value)
@pytest.mark.parametrize("value",
[
"OFFSET(rep!$AK$1,0,0,COUNT(rep!$AK$1),1)",
"VLOOKUP(Country!$E$3, Table_Currencies[#All], 2, 9)"
])
def test_formula_not_range(value):
from .. named_range import refers_to_range
assert refers_to_range(value) is None
@pytest.mark.parametrize("value, result",
[
('_xlnm.Print_Titles', True),
('myname', False),
]
)
def test_discarded_ranges(value, result):
from ..named_range import DISCARDED_RANGES
m = DISCARDED_RANGES.match(value) is not None
assert m is result
|
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import subprocess
from typing import Optional
from pinotdb import connect
from airflow.exceptions import AirflowException
from airflow.hooks.base_hook import BaseHook
from airflow.hooks.dbapi_hook import DbApiHook
class PinotAdminHook(BaseHook):
"""
This hook is a wrapper around the pinot-admin.sh script.
For now, only small subset of its subcommands are implemented,
which are required to ingest offline data into Apache Pinot
(i.e., AddSchema, AddTable, CreateSegment, and UploadSegment).
Their command options are based on Pinot v0.1.0.
Unfortunately, as of v0.1.0, pinot-admin.sh always exits with
status code 0. To address this behavior, users can use the
pinot_admin_system_exit flag. If its value is set to false,
this hook evaluates the result based on the output message
instead of the status code. This Pinot's behavior is supposed
to be improved in the next release, which will include the
following PR: https://github.com/apache/incubator-pinot/pull/4110
:param conn_id: The name of the connection to use.
:type conn_id: str
:param cmd_path: The filepath to the pinot-admin.sh executable
:type cmd_path: str
:param pinot_admin_system_exit: If true, the result is evaluated based on the status code.
Otherwise, the result is evaluated as a failure if "Error" or
"Exception" is in the output message.
:type pinot_admin_system_exit: bool
"""
def __init__(self,
conn_id="pinot_admin_default",
cmd_path="pinot-admin.sh",
pinot_admin_system_exit=False):
super().__init__()
conn = self.get_connection(conn_id)
self.host = conn.host
self.port = str(conn.port)
self.cmd_path = conn.extra_dejson.get("cmd_path", cmd_path)
self.pinot_admin_system_exit = conn.extra_dejson.get("pinot_admin_system_exit",
pinot_admin_system_exit)
self.conn = conn
def get_conn(self):
return self.conn
def add_schema(self, schema_file: str, with_exec: Optional[bool] = True):
"""
Add Pinot schema by run AddSchema command
:param schema_file: Pinot schema file
:type schema_file: str
:param with_exec: bool
:type with_exec: Optional[bool]
"""
cmd = ["AddSchema"]
cmd += ["-controllerHost", self.host]
cmd += ["-controllerPort", self.port]
cmd += ["-schemaFile", schema_file]
if with_exec:
cmd += ["-exec"]
self.run_cli(cmd)
def add_table(self, file_path: str, with_exec: Optional[bool] = True):
"""
Add Pinot table with run AddTable command
:param file_path: Pinot table configure file
:type file_path: str
:param with_exec: bool
:type with_exec: Optional[bool]
"""
cmd = ["AddTable"]
cmd += ["-controllerHost", self.host]
cmd += ["-controllerPort", self.port]
cmd += ["-filePath", file_path]
if with_exec:
cmd += ["-exec"]
self.run_cli(cmd)
# pylint: disable=too-many-arguments
def create_segment(self,
generator_config_file=None,
data_dir=None,
segment_format=None,
out_dir=None,
overwrite=None,
table_name=None,
segment_name=None,
time_column_name=None,
schema_file=None,
reader_config_file=None,
enable_star_tree_index=None,
star_tree_index_spec_file=None,
hll_size=None,
hll_columns=None,
hll_suffix=None,
num_threads=None,
post_creation_verification=None,
retry=None):
"""
Create Pinot segment by run CreateSegment command
"""
cmd = ["CreateSegment"]
if generator_config_file:
cmd += ["-generatorConfigFile", generator_config_file]
if data_dir:
cmd += ["-dataDir", data_dir]
if segment_format:
cmd += ["-format", segment_format]
if out_dir:
cmd += ["-outDir", out_dir]
if overwrite:
cmd += ["-overwrite", overwrite]
if table_name:
cmd += ["-tableName", table_name]
if segment_name:
cmd += ["-segmentName", segment_name]
if time_column_name:
cmd += ["-timeColumnName", time_column_name]
if schema_file:
cmd += ["-schemaFile", schema_file]
if reader_config_file:
cmd += ["-readerConfigFile", reader_config_file]
if enable_star_tree_index:
cmd += ["-enableStarTreeIndex", enable_star_tree_index]
if star_tree_index_spec_file:
cmd += ["-starTreeIndexSpecFile", star_tree_index_spec_file]
if hll_size:
cmd += ["-hllSize", hll_size]
if hll_columns:
cmd += ["-hllColumns", hll_columns]
if hll_suffix:
cmd += ["-hllSuffix", hll_suffix]
if num_threads:
cmd += ["-numThreads", num_threads]
if post_creation_verification:
cmd += ["-postCreationVerification", post_creation_verification]
if retry:
cmd += ["-retry", retry]
self.run_cli(cmd)
def upload_segment(self, segment_dir, table_name=None):
"""
Upload Segment with run UploadSegment command
:param segment_dir:
:param table_name:
:return:
"""
cmd = ["UploadSegment"]
cmd += ["-controllerHost", self.host]
cmd += ["-controllerPort", self.port]
cmd += ["-segmentDir", segment_dir]
if table_name:
cmd += ["-tableName", table_name]
self.run_cli(cmd)
def run_cli(self, cmd: list, verbose: Optional[bool] = True):
"""
Run command with pinot-admin.sh
:param cmd: List of command going to be run by pinot-admin.sh script
:type cmd: list
:param verbose:
:type verbose: Optional[bool]
"""
command = [self.cmd_path]
command.extend(cmd)
env = None
if self.pinot_admin_system_exit:
env = os.environ.copy()
java_opts = "-Dpinot.admin.system.exit=true " + os.environ.get("JAVA_OPTS", "")
env.update({"JAVA_OPTS": java_opts})
if verbose:
self.log.info(" ".join(command))
sub_process = subprocess.Popen(
command,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
close_fds=True,
env=env)
stdout = ""
if sub_process.stdout:
for line in iter(sub_process.stdout.readline, b''):
stdout += line.decode("utf-8")
if verbose:
self.log.info(line.decode("utf-8").strip())
sub_process.wait()
# As of Pinot v0.1.0, either of "Error: ..." or "Exception caught: ..."
# is expected to be in the output messages. See:
# https://github.com/apache/incubator-pinot/blob/release-0.1.0/pinot-tools/src/main/java/org/apache/pinot/tools/admin/PinotAdministrator.java#L98-L101
if ((self.pinot_admin_system_exit and sub_process.returncode) or
("Error" in stdout or "Exception" in stdout)):
raise AirflowException(stdout)
return stdout
class PinotDbApiHook(DbApiHook):
"""
Connect to pinot db (https://github.com/apache/incubator-pinot) to issue pql
"""
conn_name_attr = 'pinot_broker_conn_id'
default_conn_name = 'pinot_broker_default'
supports_autocommit = False
def get_conn(self):
"""
Establish a connection to pinot broker through pinot dbapi.
"""
conn = self.get_connection(self.pinot_broker_conn_id) # pylint: disable=no-member
pinot_broker_conn = connect(
host=conn.host,
port=conn.port,
path=conn.extra_dejson.get('endpoint', '/pql'),
scheme=conn.extra_dejson.get('schema', 'http')
)
self.log.info('Get the connection to pinot '
'broker on %s', conn.host)
return pinot_broker_conn
def get_uri(self):
"""
Get the connection uri for pinot broker.
e.g: http://localhost:9000/pql
"""
conn = self.get_connection(getattr(self, self.conn_name_attr))
host = conn.host
if conn.port is not None:
host += ':{port}'.format(port=conn.port)
conn_type = 'http' if not conn.conn_type else conn.conn_type
endpoint = conn.extra_dejson.get('endpoint', 'pql')
return '{conn_type}://{host}/{endpoint}'.format(
conn_type=conn_type, host=host, endpoint=endpoint)
def get_records(self, sql):
"""
Executes the sql and returns a set of records.
:param sql: the sql statement to be executed (str) or a list of
sql statements to execute
:type sql: str
"""
with self.get_conn() as cur:
cur.execute(sql)
return cur.fetchall()
def get_first(self, sql):
"""
Executes the sql and returns the first resulting row.
:param sql: the sql statement to be executed (str) or a list of
sql statements to execute
:type sql: str or list
"""
with self.get_conn() as cur:
cur.execute(sql)
return cur.fetchone()
def set_autocommit(self, conn, autocommit):
raise NotImplementedError()
def insert_rows(self, table, rows, target_fields=None, commit_every=1000):
raise NotImplementedError()
|
|
"""watershed.py - watershed algorithm
This module implements a watershed algorithm that apportions pixels into
marked basins. The algorithm uses a priority queue to hold the pixels
with the metric for the priority queue being pixel value, then the time
of entry into the queue - this settles ties in favor of the closest marker.
Some ideas taken from
Soille, "Automated Basin Delineation from Digital Elevation Models Using
Mathematical Morphology", Signal Processing 20 (1990) 171-182.
The most important insight in the paper is that entry time onto the queue
solves two problems: a pixel should be assigned to the neighbor with the
largest gradient or, if there is no gradient, pixels on a plateau should
be split between markers on opposite sides.
Originally part of CellProfiler, code licensed under both GPL and BSD licenses.
Website: http://www.cellprofiler.org
Copyright (c) 2003-2009 Massachusetts Institute of Technology
Copyright (c) 2009-2011 Broad Institute
All rights reserved.
Original author: Lee Kamentsky
"""
import numpy as np
from scipy import ndimage as ndi
from . import _watershed
from ..util import crop, regular_seeds
def _validate_inputs(image, markers, mask):
"""Ensure that all inputs to watershed have matching shapes and types.
Parameters
----------
image : array
The input image.
markers : int or array of int
The marker image.
mask : array, or None
A boolean mask, True where we want to compute the watershed.
Returns
-------
image, markers, mask : arrays
The validated and formatted arrays. Image will have dtype float64,
markers int32, and mask int8. If ``None`` was given for the mask,
it is a volume of all 1s.
Raises
------
ValueError
If the shapes of the given arrays don't match.
"""
if not isinstance(markers, (np.ndarray, list, tuple)):
# not array-like, assume int
markers = regular_seeds(image.shape, markers)
elif markers.shape != image.shape:
raise ValueError("Markers (shape %s) must have same shape "
"as image (shape %s)" % (markers.ndim, image.ndim))
if mask is not None and mask.shape != image.shape:
raise ValueError("mask must have same shape as image")
if mask is None:
# Use a complete `True` mask if none is provided
mask = np.ones(image.shape, bool)
return (image.astype(np.float64),
markers.astype(np.int32),
mask.astype(np.int8))
def _validate_connectivity(image_dim, connectivity, offset):
"""Convert any valid connectivity to a structuring element and offset.
Parameters
----------
image_dim : int
The number of dimensions of the input image.
connectivity : int, array, or None
The neighborhood connectivity. An integer is interpreted as in
``scipy.ndimage.generate_binary_structure``, as the maximum number
of orthogonal steps to reach a neighbor. An array is directly
interpreted as a structuring element and its shape is validated against
the input image shape. ``None`` is interpreted as a connectivity of 1.
offset : tuple of int, or None
The coordinates of the center of the structuring element.
Returns
-------
c_connectivity : array of bool
The structuring element corresponding to the input `connectivity`.
offset : array of int
The offset corresponding to the center of the structuring element.
Raises
------
ValueError:
If the image dimension and the connectivity or offset dimensions don't
match.
"""
if connectivity is None:
connectivity = 1
if np.isscalar(connectivity):
c_connectivity = ndi.generate_binary_structure(image_dim, connectivity)
else:
c_connectivity = np.array(connectivity, bool)
if c_connectivity.ndim != image_dim:
raise ValueError("Connectivity dimension must be same as image")
if offset is None:
if any([x % 2 == 0 for x in c_connectivity.shape]):
raise ValueError("Connectivity array must have an unambiguous "
"center")
offset = np.array(c_connectivity.shape) // 2
return c_connectivity, offset
def _compute_neighbors(image, structure, offset):
"""Compute neighborhood as an array of linear offsets into the image.
These are sorted according to Euclidean distance from the center (given
by `offset`), ensuring that immediate neighbors are visited first.
"""
structure[tuple(offset)] = 0 # ignore the center; it's not a neighbor
locations = np.transpose(np.nonzero(structure))
sqdistances = np.sum((locations - offset)**2, axis=1)
neighborhood = (np.ravel_multi_index(locations.T, image.shape) -
np.ravel_multi_index(offset, image.shape)).astype(np.int32)
sorted_neighborhood = neighborhood[np.argsort(sqdistances)]
return sorted_neighborhood
def watershed(image, markers, connectivity=1, offset=None, mask=None,
compactness=0):
"""Find watershed basins in `image` flooded from given `markers`.
Parameters
----------
image: ndarray (2-D, 3-D, ...) of integers
Data array where the lowest value points are labeled first.
markers: int, or ndarray of int, same shape as `image`
The desired number of markers, or an array marking the basins with the
values to be assigned in the label matrix. Zero means not a marker.
connectivity: ndarray, optional
An array with the same number of dimensions as `image` whose
non-zero elements indicate neighbors for connection.
Following the scipy convention, default is a one-connected array of
the dimension of the image.
offset: array_like of shape image.ndim, optional
offset of the connectivity (one offset per dimension)
mask: ndarray of bools or 0s and 1s, optional
Array of same shape as `image`. Only points at which mask == True
will be labeled.
compactness : float, optional
Use compact watershed [3]_ with given compactness parameter.
Higher values result in more regularly-shaped watershed basins.
Returns
-------
out: ndarray
A labeled matrix of the same type and shape as markers
See also
--------
skimage.segmentation.random_walker: random walker segmentation
A segmentation algorithm based on anisotropic diffusion, usually
slower than the watershed but with good results on noisy data and
boundaries with holes.
Notes
-----
This function implements a watershed algorithm [1]_ [2]_ that apportions
pixels into marked basins. The algorithm uses a priority queue to hold
the pixels with the metric for the priority queue being pixel value, then
the time of entry into the queue - this settles ties in favor of the
closest marker.
Some ideas taken from
Soille, "Automated Basin Delineation from Digital Elevation Models Using
Mathematical Morphology", Signal Processing 20 (1990) 171-182
The most important insight in the paper is that entry time onto the queue
solves two problems: a pixel should be assigned to the neighbor with the
largest gradient or, if there is no gradient, pixels on a plateau should
be split between markers on opposite sides.
This implementation converts all arguments to specific, lowest common
denominator types, then passes these to a C algorithm.
Markers can be determined manually, or automatically using for example
the local minima of the gradient of the image, or the local maxima of the
distance function to the background for separating overlapping objects
(see example).
References
----------
.. [1] http://en.wikipedia.org/wiki/Watershed_%28image_processing%29
.. [2] http://cmm.ensmp.fr/~beucher/wtshed.html
.. [3] Peer Neubert & Peter Protzel (2014). Compact Watershed and
Preemptive SLIC: On Improving Trade-offs of Superpixel Segmentation
Algorithms. ICPR 2014, pp 996-1001. DOI:10.1109/ICPR.2014.181
https://www.tu-chemnitz.de/etit/proaut/forschung/rsrc/cws_pSLIC_ICPR.pdf
Examples
--------
The watershed algorithm is useful to separate overlapping objects.
We first generate an initial image with two overlapping circles:
>>> x, y = np.indices((80, 80))
>>> x1, y1, x2, y2 = 28, 28, 44, 52
>>> r1, r2 = 16, 20
>>> mask_circle1 = (x - x1)**2 + (y - y1)**2 < r1**2
>>> mask_circle2 = (x - x2)**2 + (y - y2)**2 < r2**2
>>> image = np.logical_or(mask_circle1, mask_circle2)
Next, we want to separate the two circles. We generate markers at the
maxima of the distance to the background:
>>> from scipy import ndimage as ndi
>>> distance = ndi.distance_transform_edt(image)
>>> from skimage.feature import peak_local_max
>>> local_maxi = peak_local_max(distance, labels=image,
... footprint=np.ones((3, 3)),
... indices=False)
>>> markers = ndi.label(local_maxi)[0]
Finally, we run the watershed on the image and markers:
>>> labels = watershed(-distance, markers, mask=image)
The algorithm works also for 3-D images, and can be used for example to
separate overlapping spheres.
"""
image, markers, mask = _validate_inputs(image, markers, mask)
connectivity, offset = _validate_connectivity(image.ndim, connectivity,
offset)
# pad the image, markers, and mask so that we can use the mask to
# keep from running off the edges
pad_width = [(p, p) for p in offset]
image = np.pad(image, pad_width, mode='constant')
mask = np.pad(mask, pad_width, mode='constant').ravel()
output = np.pad(markers, pad_width, mode='constant')
flat_neighborhood = _compute_neighbors(image, connectivity, offset)
marker_locations = np.flatnonzero(output).astype(np.int32)
image_strides = np.array(image.strides, dtype=np.int32) // image.itemsize
_watershed.watershed_raveled(image.ravel(),
marker_locations, flat_neighborhood,
mask, image_strides, compactness,
output.ravel())
output = crop(output, pad_width, copy=True)
return output
|
|
# Copyright 2013-2020 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
import functools
import typing as T
from pathlib import Path
from .. import mlog
from .. import mesonlib
from ..environment import Environment
from .base import DependencyException, ExternalDependency, PkgConfigDependency
from .misc import threads_factory
# On windows 3 directory layouts are supported:
# * The default layout (versioned) installed:
# - $BOOST_ROOT/include/boost-x_x/boost/*.hpp
# - $BOOST_ROOT/lib/*.lib
# * The non-default layout (system) installed:
# - $BOOST_ROOT/include/boost/*.hpp
# - $BOOST_ROOT/lib/*.lib
# * The pre-built binaries from sf.net:
# - $BOOST_ROOT/boost/*.hpp
# - $BOOST_ROOT/lib<arch>-<compiler>/*.lib where arch=32/64 and compiler=msvc-14.1
#
# Note that we should also try to support:
# mingw-w64 / Windows : libboost_<module>-mt.a (location = <prefix>/mingw64/lib/)
# libboost_<module>-mt.dll.a
#
# The `modules` argument accept library names. This is because every module that
# has libraries to link against also has multiple options regarding how to
# link. See for example:
# * http://www.boost.org/doc/libs/1_65_1/libs/test/doc/html/boost_test/usage_variants.html
# * http://www.boost.org/doc/libs/1_65_1/doc/html/stacktrace/configuration_and_build.html
# * http://www.boost.org/doc/libs/1_65_1/libs/math/doc/html/math_toolkit/main_tr1.html
# **On Unix**, official packaged versions of boost libraries follow the following schemes:
#
# Linux / Debian: libboost_<module>.so -> libboost_<module>.so.1.66.0
# Linux / Red Hat: libboost_<module>.so -> libboost_<module>.so.1.66.0
# Linux / OpenSuse: libboost_<module>.so -> libboost_<module>.so.1.66.0
# Win / Cygwin: libboost_<module>.dll.a (location = /usr/lib)
# libboost_<module>.a
# cygboost_<module>_1_64.dll (location = /usr/bin)
# Win / VS: boost_<module>-vc<ver>-mt[-gd]-<arch>-1_67.dll (location = C:/local/boost_1_67_0)
# Mac / homebrew: libboost_<module>.dylib + libboost_<module>-mt.dylib (location = /usr/local/lib)
# Mac / macports: libboost_<module>.dylib + libboost_<module>-mt.dylib (location = /opt/local/lib)
#
# Its not clear that any other abi tags (e.g. -gd) are used in official packages.
#
# On Linux systems, boost libs have multithreading support enabled, but without the -mt tag.
#
# Boost documentation recommends using complex abi tags like "-lboost_regex-gcc34-mt-d-1_36".
# (See http://www.boost.org/doc/libs/1_66_0/more/getting_started/unix-variants.html#library-naming)
# However, its not clear that any Unix distribution follows this scheme.
# Furthermore, the boost documentation for unix above uses examples from windows like
# "libboost_regex-vc71-mt-d-x86-1_34.lib", so apparently the abi tags may be more aimed at windows.
#
# We follow the following strategy for finding modules:
# A) Detect potential boost root directories (uses also BOOST_ROOT env var)
# B) Foreach candidate
# 1. Look for the boost headers (boost/version.pp)
# 2. Find all boost libraries
# 2.1 Add all libraries in lib*
# 2.2 Filter out non boost libraries
# 2.3 Filter the renaining libraries based on the meson requirements (static/shared, etc.)
# 2.4 Ensure that all libraries have the same boost tag (and are thus compatible)
# 3. Select the libraries matching the requested modules
@functools.total_ordering
class BoostIncludeDir():
def __init__(self, path: Path, version_int: int):
self.path = path
self.version_int = version_int
major = int(self.version_int / 100000)
minor = int((self.version_int / 100) % 1000)
patch = int(self.version_int % 100)
self.version = '{}.{}.{}'.format(major, minor, patch)
self.version_lib = '{}_{}'.format(major, minor)
def __repr__(self) -> str:
return '<BoostIncludeDir: {} -- {}>'.format(self.version, self.path)
def __lt__(self, other: T.Any) -> bool:
if isinstance(other, BoostIncludeDir):
return (self.version_int, self.path) < (other.version_int, other.path)
return NotImplemented
@functools.total_ordering
class BoostLibraryFile():
# Python libraries are special because of the included
# minor version in the module name.
boost_python_libs = ['boost_python', 'boost_numpy']
reg_python_mod_split = re.compile(r'(boost_[a-zA-Z]+)([0-9]*)')
reg_abi_tag = re.compile(r'^s?g?y?d?p?n?$')
reg_ver_tag = re.compile(r'^[0-9_]+$')
def __init__(self, path: Path):
self.path = path
self.name = self.path.name
# Initialize default properties
self.static = False
self.toolset = ''
self.arch = ''
self.version_lib = ''
self.mt = True
self.runtime_static = False
self.runtime_debug = False
self.python_debug = False
self.debug = False
self.stlport = False
self.deprecated_iostreams = False
# Post process the library name
name_parts = self.name.split('.')
self.basename = name_parts[0]
self.suffixes = name_parts[1:]
self.vers_raw = [x for x in self.suffixes if x.isdigit()]
self.suffixes = [x for x in self.suffixes if not x.isdigit()]
self.nvsuffix = '.'.join(self.suffixes) # Used for detecting the library type
self.nametags = self.basename.split('-')
self.mod_name = self.nametags[0]
if self.mod_name.startswith('lib'):
self.mod_name = self.mod_name[3:]
# Set library version if possible
if len(self.vers_raw) >= 2:
self.version_lib = '{}_{}'.format(self.vers_raw[0], self.vers_raw[1])
# Detecting library type
if self.nvsuffix in ['so', 'dll', 'dll.a', 'dll.lib', 'dylib']:
self.static = False
elif self.nvsuffix in ['a', 'lib']:
self.static = True
else:
raise DependencyException('Unable to process library extension "{}" ({})'.format(self.nvsuffix, self.path))
# boost_.lib is the dll import library
if self.basename.startswith('boost_') and self.nvsuffix == 'lib':
self.static = False
# Process tags
tags = self.nametags[1:]
# Filter out the python version tag and fix modname
if self.is_python_lib():
tags = self.fix_python_name(tags)
if not tags:
return
# Without any tags mt is assumed, however, an absents of mt in the name
# with tags present indicates that the lib was build without mt support
self.mt = False
for i in tags:
if i == 'mt':
self.mt = True
elif len(i) == 3 and i[1:] in ['32', '64']:
self.arch = i
elif BoostLibraryFile.reg_abi_tag.match(i):
self.runtime_static = 's' in i
self.runtime_debug = 'g' in i
self.python_debug = 'y' in i
self.debug = 'd' in i
self.stlport = 'p' in i
self.deprecated_iostreams = 'n' in i
elif BoostLibraryFile.reg_ver_tag.match(i):
self.version_lib = i
else:
self.toolset = i
def __repr__(self) -> str:
return '<LIB: {} {:<32} {}>'.format(self.abitag, self.mod_name, self.path)
def __lt__(self, other: T.Any) -> bool:
if isinstance(other, BoostLibraryFile):
return (
self.mod_name, self.static, self.version_lib, self.arch,
not self.mt, not self.runtime_static,
not self.debug, self.runtime_debug, self.python_debug,
self.stlport, self.deprecated_iostreams,
self.name,
) < (
other.mod_name, other.static, other.version_lib, other.arch,
not other.mt, not other.runtime_static,
not other.debug, other.runtime_debug, other.python_debug,
other.stlport, other.deprecated_iostreams,
other.name,
)
return NotImplemented
def __eq__(self, other: T.Any) -> bool:
if isinstance(other, BoostLibraryFile):
return self.name == other.name
return NotImplemented
def __hash__(self) -> int:
return hash(self.name)
@property
def abitag(self) -> str:
abitag = ''
abitag += 'S' if self.static else '-'
abitag += 'M' if self.mt else '-'
abitag += ' '
abitag += 's' if self.runtime_static else '-'
abitag += 'g' if self.runtime_debug else '-'
abitag += 'y' if self.python_debug else '-'
abitag += 'd' if self.debug else '-'
abitag += 'p' if self.stlport else '-'
abitag += 'n' if self.deprecated_iostreams else '-'
abitag += ' ' + (self.arch or '???')
abitag += ' ' + (self.toolset or '?')
abitag += ' ' + (self.version_lib or 'x_xx')
return abitag
def is_boost(self) -> bool:
return any([self.name.startswith(x) for x in ['libboost_', 'boost_']])
def is_python_lib(self) -> bool:
return any([self.mod_name.startswith(x) for x in BoostLibraryFile.boost_python_libs])
def fix_python_name(self, tags: T.List[str]) -> T.List[str]:
# Handle the boost_python naming madeness.
# See https://github.com/mesonbuild/meson/issues/4788 for some distro
# specific naming variantions.
other_tags = [] # type: T.List[str]
# Split the current modname into the base name and the version
m_cur = BoostLibraryFile.reg_python_mod_split.match(self.mod_name)
cur_name = m_cur.group(1)
cur_vers = m_cur.group(2)
# Update the current version string if the new version string is longer
def update_vers(new_vers: str) -> None:
nonlocal cur_vers
new_vers = new_vers.replace('_', '')
new_vers = new_vers.replace('.', '')
if not new_vers.isdigit():
return
if len(new_vers) > len(cur_vers):
cur_vers = new_vers
for i in tags:
if i.startswith('py'):
update_vers(i[2:])
elif i.isdigit():
update_vers(i)
elif len(i) >= 3 and i[0].isdigit and i[2].isdigit() and i[1] == '.':
update_vers(i)
else:
other_tags += [i]
self.mod_name = cur_name + cur_vers
return other_tags
def mod_name_matches(self, mod_name: str) -> bool:
if self.mod_name == mod_name:
return True
if not self.is_python_lib():
return False
m_cur = BoostLibraryFile.reg_python_mod_split.match(self.mod_name)
m_arg = BoostLibraryFile.reg_python_mod_split.match(mod_name)
if not m_cur or not m_arg:
return False
if m_cur.group(1) != m_arg.group(1):
return False
cur_vers = m_cur.group(2)
arg_vers = m_arg.group(2)
# Always assume python 2 if nothing is specified
if not arg_vers:
arg_vers = '2'
return cur_vers.startswith(arg_vers)
def version_matches(self, version_lib: str) -> bool:
# If no version tag is present, assume that it fits
if not self.version_lib or not version_lib:
return True
return self.version_lib == version_lib
def arch_matches(self, arch: str) -> bool:
# If no version tag is present, assume that it fits
if not self.arch or not arch:
return True
return self.arch == arch
def vscrt_matches(self, vscrt: str) -> bool:
# If no vscrt tag present, assume that it fits ['/MD', '/MDd', '/MT', '/MTd']
if not vscrt:
return True
if vscrt in ['/MD', '-MD']:
return not self.runtime_static and not self.runtime_debug
elif vscrt in ['/MDd', '-MDd']:
return not self.runtime_static and self.runtime_debug
elif vscrt in ['/MT', '-MT']:
return (self.runtime_static or not self.static) and not self.runtime_debug
elif vscrt in ['/MTd', '-MTd']:
return (self.runtime_static or not self.static) and self.runtime_debug
mlog.warning('Boost: unknow vscrt tag {}. This may cause the compilation to fail. Please consider reporting this as a bug.'.format(vscrt), once=True)
return True
def get_compiler_args(self) -> T.List[str]:
args = [] # type: T.List[str]
if self.mod_name in boost_libraries:
libdef = boost_libraries[self.mod_name] # type: BoostLibrary
if self.static:
args += libdef.static
else:
args += libdef.shared
if self.mt:
args += libdef.multi
else:
args += libdef.single
return args
def get_link_args(self) -> T.List[str]:
return [self.path.as_posix()]
class BoostDependency(ExternalDependency):
def __init__(self, environment: Environment, kwargs):
super().__init__('boost', environment, kwargs, language='cpp')
self.debug = environment.coredata.get_builtin_option('buildtype').startswith('debug')
self.multithreading = kwargs.get('threading', 'multi') == 'multi'
self.boost_root = None
self.explicit_static = 'static' in kwargs
# Extract and validate modules
self.modules = mesonlib.extract_as_list(kwargs, 'modules') # type: T.List[str]
for i in self.modules:
if not isinstance(i, str):
raise DependencyException('Boost module argument is not a string.')
if i.startswith('boost_'):
raise DependencyException('Boost modules must be passed without the boost_ prefix')
self.modules_found = [] # type: T.List[str]
self.modules_missing = [] # type: T.List[str]
# Do we need threads?
if 'thread' in self.modules:
if not self._add_sub_dependency(threads_factory(environment, self.for_machine, {})):
self.is_found = False
return
# Try figuring out the architecture tag
self.arch = environment.machines[self.for_machine].cpu_family
self.arch = boost_arch_map.get(self.arch, None)
# Prefere BOOST_INCLUDEDIR and BOOST_LIBRARYDIR if preset
boost_manual_env = [x in os.environ for x in ['BOOST_INCLUDEDIR', 'BOOST_LIBRARYDIR']]
if all(boost_manual_env):
inc_dir = Path(os.environ['BOOST_INCLUDEDIR'])
lib_dir = Path(os.environ['BOOST_LIBRARYDIR'])
mlog.debug('Trying to find boost with:')
mlog.debug(' - BOOST_INCLUDEDIR = {}'.format(inc_dir))
mlog.debug(' - BOOST_LIBRARYDIR = {}'.format(lib_dir))
boost_inc_dir = None
for j in [inc_dir / 'version.hpp', inc_dir / 'boost' / 'version.hpp']:
if j.is_file():
boost_inc_dir = self._include_dir_from_version_header(j)
break
if not boost_inc_dir:
self.is_found = False
return
self.is_found = self.run_check([boost_inc_dir], [lib_dir])
return
elif any(boost_manual_env):
mlog.warning('Both BOOST_INCLUDEDIR *and* BOOST_LIBRARYDIR have to be set (one is not enough). Ignoring.')
# A) Detect potential boost root directories (uses also BOOST_ROOT env var)
roots = self.detect_roots()
roots = list(mesonlib.OrderedSet(roots))
# B) Foreach candidate
for j in roots:
# 1. Look for the boost headers (boost/version.pp)
mlog.debug('Checking potential boost root {}'.format(j.as_posix()))
inc_dirs = self.detect_inc_dirs(j)
inc_dirs = sorted(inc_dirs, reverse=True) # Prefer the newer versions
# Early abort when boost is not found
if not inc_dirs:
continue
lib_dirs = self.detect_lib_dirs(j)
self.is_found = self.run_check(inc_dirs, lib_dirs)
if self.is_found:
self.boost_root = j
break
def run_check(self, inc_dirs: T.List[BoostIncludeDir], lib_dirs: T.List[Path]) -> bool:
mlog.debug(' - potential library dirs: {}'.format([x.as_posix() for x in lib_dirs]))
mlog.debug(' - potential include dirs: {}'.format([x.path.as_posix() for x in inc_dirs]))
# 2. Find all boost libraries
libs = [] # type: T.List[BoostLibraryFile]
for i in lib_dirs:
libs = self.detect_libraries(i)
if libs:
mlog.debug(' - found boost library dir: {}'.format(i))
# mlog.debug(' - raw library list:')
# for j in libs:
# mlog.debug(' - {}'.format(j))
break
libs = sorted(set(libs))
modules = ['boost_' + x for x in self.modules]
for inc in inc_dirs:
mlog.debug(' - found boost {} include dir: {}'.format(inc.version, inc.path))
f_libs = self.filter_libraries(libs, inc.version_lib)
mlog.debug(' - filtered library list:')
for j in f_libs:
mlog.debug(' - {}'.format(j))
# 3. Select the libraries matching the requested modules
not_found = [] # type: T.List[str]
selected_modules = [] # type: T.List[BoostLibraryFile]
for mod in modules:
found = False
for l in f_libs:
if l.mod_name_matches(mod):
selected_modules += [l]
found = True
break
if not found:
not_found += [mod]
# log the result
mlog.debug(' - found:')
comp_args = [] # type: T.List[str]
link_args = [] # type: T.List[str]
for j in selected_modules:
c_args = j.get_compiler_args()
l_args = j.get_link_args()
mlog.debug(' - {:<24} link={} comp={}'.format(j.mod_name, str(l_args), str(c_args)))
comp_args += c_args
link_args += l_args
comp_args = list(set(comp_args))
link_args = list(set(link_args))
self.modules_found = [x.mod_name for x in selected_modules]
self.modules_found = [x[6:] for x in self.modules_found]
self.modules_found = sorted(set(self.modules_found))
self.modules_missing = not_found
self.modules_missing = [x[6:] for x in self.modules_missing]
self.modules_missing = sorted(set(self.modules_missing))
# if we found all modules we are done
if not not_found:
self.version = inc.version
self.compile_args = ['-I' + inc.path.as_posix()]
self.compile_args += comp_args
self.compile_args += self._extra_compile_args()
self.compile_args = list(mesonlib.OrderedSet(self.compile_args))
self.link_args = link_args
mlog.debug(' - final compile args: {}'.format(self.compile_args))
mlog.debug(' - final link args: {}'.format(self.link_args))
return True
# in case we missed something log it and try again
mlog.debug(' - NOT found:')
for mod in not_found:
mlog.debug(' - {}'.format(mod))
return False
def detect_inc_dirs(self, root: Path) -> T.List[BoostIncludeDir]:
candidates = [] # type: T.List[Path]
inc_root = root / 'include'
candidates += [root / 'boost']
candidates += [inc_root / 'boost']
if inc_root.is_dir():
for i in inc_root.iterdir():
if not i.is_dir() or not i.name.startswith('boost-'):
continue
candidates += [i / 'boost']
candidates = [x for x in candidates if x.is_dir()]
candidates = [x / 'version.hpp' for x in candidates]
candidates = [x for x in candidates if x.exists()]
return [self._include_dir_from_version_header(x) for x in candidates]
def detect_lib_dirs(self, root: Path) -> T.List[Path]:
# First check the system include paths. Only consider those within the
# given root path
system_dirs_t = self.clib_compiler.get_library_dirs(self.env)
system_dirs = [Path(x) for x in system_dirs_t]
system_dirs = [x.resolve() for x in system_dirs if x.exists()]
system_dirs = [x for x in system_dirs if mesonlib.path_is_in_root(x, root)]
system_dirs = list(mesonlib.OrderedSet(system_dirs))
if system_dirs:
return system_dirs
# No system include paths were found --> fall back to manually looking
# for library dirs in root
dirs = [] # type: T.List[Path]
subdirs = [] # type: T.List[Path]
for i in root.iterdir():
if i.is_dir() and i.name.startswith('lib'):
dirs += [i]
# Some distros put libraries not directly inside /usr/lib but in /usr/lib/x86_64-linux-gnu
for i in dirs:
for j in i.iterdir():
if j.is_dir() and j.name.endswith('-linux-gnu'):
subdirs += [j]
# Filter out paths that don't match the target arch to avoid finding
# the wrong libraries. See https://github.com/mesonbuild/meson/issues/7110
if not self.arch:
return dirs + subdirs
arch_list_32 = ['32', 'i386']
arch_list_64 = ['64']
raw_list = dirs + subdirs
no_arch = [x for x in raw_list if not any([y in x.name for y in arch_list_32 + arch_list_64])]
matching_arch = [] # type: T.List[Path]
if '32' in self.arch:
matching_arch = [x for x in raw_list if any([y in x.name for y in arch_list_32])]
elif '64' in self.arch:
matching_arch = [x for x in raw_list if any([y in x.name for y in arch_list_64])]
return sorted(matching_arch) + sorted(no_arch)
def filter_libraries(self, libs: T.List[BoostLibraryFile], lib_vers: str) -> T.List[BoostLibraryFile]:
# MSVC is very picky with the library tags
vscrt = ''
try:
crt_val = self.env.coredata.base_options['b_vscrt'].value
buildtype = self.env.coredata.builtins['buildtype'].value
vscrt = self.clib_compiler.get_crt_compile_args(crt_val, buildtype)[0]
except (KeyError, IndexError, AttributeError):
pass
libs = [x for x in libs if x.static == self.static or not self.explicit_static]
libs = [x for x in libs if x.mt == self.multithreading]
libs = [x for x in libs if x.version_matches(lib_vers)]
libs = [x for x in libs if x.arch_matches(self.arch)]
libs = [x for x in libs if x.vscrt_matches(vscrt)]
libs = [x for x in libs if x.nvsuffix != 'dll'] # Only link to import libraries
# Only filter by debug when we are building in release mode. Debug
# libraries are automatically prefered through sorting otherwise.
if not self.debug:
libs = [x for x in libs if not x.debug]
# Take the abitag from the first library and filter by it. This
# ensures that we have a set of libraries that are always compatible.
if not libs:
return []
abitag = libs[0].abitag
libs = [x for x in libs if x.abitag == abitag]
return libs
def detect_libraries(self, libdir: Path) -> T.List[BoostLibraryFile]:
libs = [] # type: T.List[BoostLibraryFile]
for i in libdir.iterdir():
if not i.is_file() or i.is_symlink():
continue
if not any([i.name.startswith(x) for x in ['libboost_', 'boost_']]):
continue
libs += [BoostLibraryFile(i)]
return [x for x in libs if x.is_boost()] # Filter out no boost libraries
def detect_roots(self) -> T.List[Path]:
roots = [] # type: T.List[Path]
# Add roots from the environment
for i in ['BOOST_ROOT', 'BOOSTROOT']:
if i in os.environ:
raw_paths = os.environ[i].split(os.pathsep)
paths = [Path(x) for x in raw_paths]
if paths and any([not x.is_absolute() for x in paths]):
raise DependencyException('Paths in {} must be absolute'.format(i))
roots += paths
return roots # Do not add system paths if BOOST_ROOT is present
# Try getting the BOOST_ROOT from a boost.pc if it exists. This primarely
# allows BoostDependency to find boost from Conan. See #5438
try:
boost_pc = PkgConfigDependency('boost', self.env, {'required': False})
if boost_pc.found():
boost_root = boost_pc.get_pkgconfig_variable('prefix', {'default': None})
if boost_root:
roots += [Path(boost_root)]
except DependencyException:
pass
# Add roots from system paths
inc_paths = [Path(x) for x in self.clib_compiler.get_default_include_dirs()]
inc_paths = [x.parent for x in inc_paths if x.exists()]
inc_paths = [x.resolve() for x in inc_paths]
roots += inc_paths
# Add system paths
if self.env.machines[self.for_machine].is_windows():
# Where boost built from source actually installs it
c_root = Path('C:/Boost')
if c_root.is_dir():
roots += [c_root]
# Where boost documentation says it should be
prog_files = Path('C:/Program Files/boost')
# Where boost prebuilt binaries are
local_boost = Path('C:/local')
candidates = [] # type: T.List[Path]
if prog_files.is_dir():
candidates += [*prog_files.iterdir()]
if local_boost.is_dir():
candidates += [*local_boost.iterdir()]
roots += [x for x in candidates if x.name.lower().startswith('boost') and x.is_dir()]
else:
tmp = [] # type: T.List[Path]
# Homebrew
brew_boost = Path('/usr/local/Cellar/boost')
if brew_boost.is_dir():
tmp += [x for x in brew_boost.iterdir()]
# Add some default system paths
tmp += [Path('/opt/local')]
tmp += [Path('/usr/local/opt/boost')]
tmp += [Path('/usr/local')]
tmp += [Path('/usr')]
# Cleanup paths
tmp = [x for x in tmp if x.is_dir()]
tmp = [x.resolve() for x in tmp]
roots += tmp
return roots
def log_details(self) -> str:
res = ''
if self.modules_found:
res += 'found: ' + ', '.join(self.modules_found)
if self.modules_missing:
if res:
res += ' | '
res += 'missing: ' + ', '.join(self.modules_missing)
return res
def log_info(self) -> str:
if self.boost_root:
return self.boost_root.as_posix()
return ''
def _include_dir_from_version_header(self, hfile: Path) -> BoostIncludeDir:
# Extract the version with a regex. Using clib_compiler.get_define would
# also work, however, this is slower (since it the compiler has to be
# invoked) and overkill since the layout of the header is always the same.
assert hfile.exists()
raw = hfile.read_text()
m = re.search(r'#define\s+BOOST_VERSION\s+([0-9]+)', raw)
if not m:
mlog.debug('Failed to extract version information from {}'.format(hfile))
return BoostIncludeDir(hfile.parents[1], 0)
return BoostIncludeDir(hfile.parents[1], int(m.group(1)))
def _extra_compile_args(self) -> T.List[str]:
# BOOST_ALL_DYN_LINK should not be required with the known defines below
return ['-DBOOST_ALL_NO_LIB'] # Disable automatic linking
# See https://www.boost.org/doc/libs/1_72_0/more/getting_started/unix-variants.html#library-naming
# See https://mesonbuild.com/Reference-tables.html#cpu-families
boost_arch_map = {
'aarch64': 'a64',
'arc': 'a32',
'arm': 'a32',
'ia64': 'i64',
'mips': 'm32',
'mips64': 'm64',
'ppc': 'p32',
'ppc64': 'p64',
'sparc': 's32',
'sparc64': 's64',
'x86': 'x32',
'x86_64': 'x64',
}
#### ---- BEGIN GENERATED ---- ####
# #
# Generated with tools/boost_names.py:
# - boost version: 1.73.0
# - modules found: 159
# - libraries found: 43
#
class BoostLibrary():
def __init__(self, name: str, shared: T.List[str], static: T.List[str], single: T.List[str], multi: T.List[str]):
self.name = name
self.shared = shared
self.static = static
self.single = single
self.multi = multi
class BoostModule():
def __init__(self, name: str, key: str, desc: str, libs: T.List[str]):
self.name = name
self.key = key
self.desc = desc
self.libs = libs
# dict of all know libraries with additional compile options
boost_libraries = {
'boost_atomic': BoostLibrary(
name='boost_atomic',
shared=['-DBOOST_ATOMIC_DYN_LINK=1'],
static=['-DBOOST_ATOMIC_STATIC_LINK=1'],
single=[],
multi=[],
),
'boost_chrono': BoostLibrary(
name='boost_chrono',
shared=['-DBOOST_CHRONO_DYN_LINK=1'],
static=['-DBOOST_CHRONO_STATIC_LINK=1'],
single=['-DBOOST_CHRONO_THREAD_DISABLED'],
multi=[],
),
'boost_container': BoostLibrary(
name='boost_container',
shared=['-DBOOST_CONTAINER_DYN_LINK=1'],
static=['-DBOOST_CONTAINER_STATIC_LINK=1'],
single=[],
multi=[],
),
'boost_context': BoostLibrary(
name='boost_context',
shared=['-DBOOST_CONTEXT_DYN_LINK=1'],
static=[],
single=[],
multi=[],
),
'boost_contract': BoostLibrary(
name='boost_contract',
shared=['-DBOOST_CONTRACT_DYN_LINK'],
static=['-DBOOST_CONTRACT_STATIC_LINK'],
single=['-DBOOST_CONTRACT_DISABLE_THREADS'],
multi=[],
),
'boost_coroutine': BoostLibrary(
name='boost_coroutine',
shared=['-DBOOST_COROUTINES_DYN_LINK=1'],
static=[],
single=[],
multi=[],
),
'boost_date_time': BoostLibrary(
name='boost_date_time',
shared=['-DBOOST_DATE_TIME_DYN_LINK=1'],
static=[],
single=[],
multi=[],
),
'boost_exception': BoostLibrary(
name='boost_exception',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_fiber': BoostLibrary(
name='boost_fiber',
shared=['-DBOOST_FIBERS_DYN_LINK=1'],
static=[],
single=[],
multi=[],
),
'boost_fiber_numa': BoostLibrary(
name='boost_fiber_numa',
shared=['-DBOOST_FIBERS_DYN_LINK=1'],
static=[],
single=[],
multi=[],
),
'boost_filesystem': BoostLibrary(
name='boost_filesystem',
shared=['-DBOOST_FILESYSTEM_DYN_LINK=1'],
static=['-DBOOST_FILESYSTEM_STATIC_LINK=1'],
single=[],
multi=[],
),
'boost_graph': BoostLibrary(
name='boost_graph',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_iostreams': BoostLibrary(
name='boost_iostreams',
shared=['-DBOOST_IOSTREAMS_DYN_LINK=1'],
static=[],
single=[],
multi=[],
),
'boost_locale': BoostLibrary(
name='boost_locale',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_log': BoostLibrary(
name='boost_log',
shared=['-DBOOST_LOG_DYN_LINK=1'],
static=[],
single=['-DBOOST_LOG_NO_THREADS'],
multi=[],
),
'boost_log_setup': BoostLibrary(
name='boost_log_setup',
shared=['-DBOOST_LOG_SETUP_DYN_LINK=1'],
static=[],
single=['-DBOOST_LOG_NO_THREADS'],
multi=[],
),
'boost_math_c99': BoostLibrary(
name='boost_math_c99',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_math_c99f': BoostLibrary(
name='boost_math_c99f',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_math_c99l': BoostLibrary(
name='boost_math_c99l',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_math_tr1': BoostLibrary(
name='boost_math_tr1',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_math_tr1f': BoostLibrary(
name='boost_math_tr1f',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_math_tr1l': BoostLibrary(
name='boost_math_tr1l',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_mpi': BoostLibrary(
name='boost_mpi',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_nowide': BoostLibrary(
name='boost_nowide',
shared=['-DBOOST_NOWIDE_DYN_LINK=1'],
static=[],
single=[],
multi=[],
),
'boost_prg_exec_monitor': BoostLibrary(
name='boost_prg_exec_monitor',
shared=['-DBOOST_TEST_DYN_LINK=1'],
static=[],
single=[],
multi=[],
),
'boost_program_options': BoostLibrary(
name='boost_program_options',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_random': BoostLibrary(
name='boost_random',
shared=['-DBOOST_RANDOM_DYN_LINK'],
static=[],
single=[],
multi=[],
),
'boost_regex': BoostLibrary(
name='boost_regex',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_serialization': BoostLibrary(
name='boost_serialization',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_stacktrace_addr2line': BoostLibrary(
name='boost_stacktrace_addr2line',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_stacktrace_backtrace': BoostLibrary(
name='boost_stacktrace_backtrace',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_stacktrace_basic': BoostLibrary(
name='boost_stacktrace_basic',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_stacktrace_noop': BoostLibrary(
name='boost_stacktrace_noop',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_stacktrace_windbg': BoostLibrary(
name='boost_stacktrace_windbg',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_stacktrace_windbg_cached': BoostLibrary(
name='boost_stacktrace_windbg_cached',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_system': BoostLibrary(
name='boost_system',
shared=['-DBOOST_SYSTEM_DYN_LINK=1'],
static=['-DBOOST_SYSTEM_STATIC_LINK=1'],
single=[],
multi=[],
),
'boost_test_exec_monitor': BoostLibrary(
name='boost_test_exec_monitor',
shared=['-DBOOST_TEST_DYN_LINK=1'],
static=[],
single=[],
multi=[],
),
'boost_thread': BoostLibrary(
name='boost_thread',
shared=['-DBOOST_THREAD_BUILD_DLL=1', '-DBOOST_THREAD_USE_DLL=1'],
static=['-DBOOST_THREAD_BUILD_LIB=1', '-DBOOST_THREAD_USE_LIB=1'],
single=[],
multi=[],
),
'boost_timer': BoostLibrary(
name='boost_timer',
shared=['-DBOOST_TIMER_DYN_LINK=1'],
static=['-DBOOST_TIMER_STATIC_LINK=1'],
single=[],
multi=[],
),
'boost_type_erasure': BoostLibrary(
name='boost_type_erasure',
shared=['-DBOOST_TYPE_ERASURE_DYN_LINK'],
static=[],
single=[],
multi=[],
),
'boost_unit_test_framework': BoostLibrary(
name='boost_unit_test_framework',
shared=['-DBOOST_TEST_DYN_LINK=1'],
static=[],
single=[],
multi=[],
),
'boost_wave': BoostLibrary(
name='boost_wave',
shared=[],
static=[],
single=[],
multi=[],
),
'boost_wserialization': BoostLibrary(
name='boost_wserialization',
shared=[],
static=[],
single=[],
multi=[],
),
}
# #
#### ---- END GENERATED ---- ####
|
|
#!/usr/bin/env python
#
# Copyright 2005 Google Inc. All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for Google Test test filters.
A user can specify which test(s) in a Google Test program to run via either
the GTEST_FILTER environment variable or the --gtest_filter flag.
This script tests such functionality by invoking
gtest_filter_unittest_ (a program written with Google Test) with different
environments and command line flags.
Note that test sharding may also influence which tests are filtered. Therefore,
we test that here also.
"""
__author__ = '[email protected] (Zhanyong Wan)'
import os
import re
try:
from sets import Set as set # For Python 2.3 compatibility
except ImportError:
pass
import sys
import gtest_test_utils
# Constants.
# Checks if this platform can pass empty environment variables to child
# processes. We set an env variable to an empty string and invoke a python
# script in a subprocess to print whether the variable is STILL in
# os.environ. We then use 'eval' to parse the child's output so that an
# exception is thrown if the input is anything other than 'True' nor 'False'.
os.environ['EMPTY_VAR'] = ''
child = gtest_test_utils.Subprocess(
[sys.executable, '-c', 'import os; print(\'EMPTY_VAR\' in os.environ)'])
CAN_PASS_EMPTY_ENV = eval(child.output)
# Check if this platform can unset environment variables in child processes.
# We set an env variable to a non-empty string, unset it, and invoke
# a python script in a subprocess to print whether the variable
# is NO LONGER in os.environ.
# We use 'eval' to parse the child's output so that an exception
# is thrown if the input is neither 'True' nor 'False'.
os.environ['UNSET_VAR'] = 'X'
del os.environ['UNSET_VAR']
child = gtest_test_utils.Subprocess(
[sys.executable, '-c', 'import os; print(\'UNSET_VAR\' not in os.environ)'])
CAN_UNSET_ENV = eval(child.output)
# Checks if we should test with an empty filter. This doesn't
# make sense on platforms that cannot pass empty env variables (Win32)
# and on platforms that cannot unset variables (since we cannot tell
# the difference between "" and NULL -- Borland and Solaris < 5.10)
CAN_TEST_EMPTY_FILTER = (CAN_PASS_EMPTY_ENV and CAN_UNSET_ENV)
# The environment variable for specifying the test filters.
FILTER_ENV_VAR = 'GTEST_FILTER'
# The environment variables for test sharding.
TOTAL_SHARDS_ENV_VAR = 'GTEST_TOTAL_SHARDS'
SHARD_INDEX_ENV_VAR = 'GTEST_SHARD_INDEX'
SHARD_STATUS_FILE_ENV_VAR = 'GTEST_SHARD_STATUS_FILE'
# The command line flag for specifying the test filters.
FILTER_FLAG = 'gtest_filter'
# The command line flag for including disabled tests.
ALSO_RUN_DISABED_TESTS_FLAG = 'gtest_also_run_disabled_tests'
# Command to run the gtest_filter_unittest_ program.
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_filter_unittest_')
# Regex for determining whether parameterized tests are enabled in the binary.
PARAM_TEST_REGEX = re.compile(r'/ParamTest')
# Regex for parsing test case names from Google Test's output.
TEST_CASE_REGEX = re.compile(r'^\[\-+\] \d+ tests? from (\w+(/\w+)?)')
# Regex for parsing test names from Google Test's output.
TEST_REGEX = re.compile(r'^\[\s*RUN\s*\].*\.(\w+(/\w+)?)')
# The command line flag to tell Google Test to output the list of tests it
# will run.
LIST_TESTS_FLAG = '--gtest_list_tests'
# Indicates whether Google Test supports death tests.
SUPPORTS_DEATH_TESTS = 'HasDeathTest' in gtest_test_utils.Subprocess(
[COMMAND, LIST_TESTS_FLAG]).output
# Full names of all tests in gtest_filter_unittests_.
PARAM_TESTS = [
'SeqP/ParamTest.TestX/0',
'SeqP/ParamTest.TestX/1',
'SeqP/ParamTest.TestY/0',
'SeqP/ParamTest.TestY/1',
'SeqQ/ParamTest.TestX/0',
'SeqQ/ParamTest.TestX/1',
'SeqQ/ParamTest.TestY/0',
'SeqQ/ParamTest.TestY/1',
]
DISABLED_TESTS = [
'BarTest.DISABLED_TestFour',
'BarTest.DISABLED_TestFive',
'BazTest.DISABLED_TestC',
'DISABLED_FoobarTest.Test1',
'DISABLED_FoobarTest.DISABLED_Test2',
'DISABLED_FoobarbazTest.TestA',
]
if SUPPORTS_DEATH_TESTS:
DEATH_TESTS = [
'HasDeathTest.Test1',
'HasDeathTest.Test2',
]
else:
DEATH_TESTS = []
# All the non-disabled tests.
ACTIVE_TESTS = [
'FooTest.Abc',
'FooTest.Xyz',
'BarTest.TestOne',
'BarTest.TestTwo',
'BarTest.TestThree',
'BazTest.TestOne',
'BazTest.TestA',
'BazTest.TestB',
] + DEATH_TESTS + PARAM_TESTS
param_tests_present = None
# Utilities.
environ = os.environ.copy()
def SetEnvVar(env_var, value):
"""Sets the env variable to 'value'; unsets it when 'value' is None."""
if value is not None:
environ[env_var] = value
elif env_var in environ:
del environ[env_var]
def RunAndReturnOutput(args = None):
"""Runs the test program and returns its output."""
return gtest_test_utils.Subprocess([COMMAND] + (args or []),
env=environ).output
def RunAndExtractTestList(args = None):
"""Runs the test program and returns its exit code and a list of tests run."""
p = gtest_test_utils.Subprocess([COMMAND] + (args or []), env=environ)
tests_run = []
test_case = ''
test = ''
for line in p.output.split('\n'):
match = TEST_CASE_REGEX.match(line)
if match is not None:
test_case = match.group(1)
else:
match = TEST_REGEX.match(line)
if match is not None:
test = match.group(1)
tests_run.append(test_case + '.' + test)
return (tests_run, p.exit_code)
def InvokeWithModifiedEnv(extra_env, function, *args, **kwargs):
"""Runs the given function and arguments in a modified environment."""
try:
original_env = environ.copy()
environ.update(extra_env)
return function(*args, **kwargs)
finally:
environ.clear()
environ.update(original_env)
def RunWithSharding(total_shards, shard_index, command):
"""Runs a test program shard and returns exit code and a list of tests run."""
extra_env = {SHARD_INDEX_ENV_VAR: str(shard_index),
TOTAL_SHARDS_ENV_VAR: str(total_shards)}
return InvokeWithModifiedEnv(extra_env, RunAndExtractTestList, command)
# The unit test.
class GTestFilterUnitTest(gtest_test_utils.TestCase):
"""Tests the env variable or the command line flag to filter tests."""
# Utilities.
def AssertSetEqual(self, lhs, rhs):
"""Asserts that two sets are equal."""
for elem in lhs:
self.assert_(elem in rhs, '%s in %s' % (elem, rhs))
for elem in rhs:
self.assert_(elem in lhs, '%s in %s' % (elem, lhs))
def AssertPartitionIsValid(self, set_var, list_of_sets):
"""Asserts that list_of_sets is a valid partition of set_var."""
full_partition = []
for slice_var in list_of_sets:
full_partition.extend(slice_var)
self.assertEqual(len(set_var), len(full_partition))
self.assertEqual(set(set_var), set(full_partition))
def AdjustForParameterizedTests(self, tests_to_run):
"""Adjust tests_to_run in case value parameterized tests are disabled."""
global param_tests_present
if not param_tests_present:
return list(set(tests_to_run) - set(PARAM_TESTS))
else:
return tests_to_run
def RunAndVerify(self, gtest_filter, tests_to_run):
"""Checks that the binary runs correct set of tests for a given filter."""
tests_to_run = self.AdjustForParameterizedTests(tests_to_run)
# First, tests using the environment variable.
# Windows removes empty variables from the environment when passing it
# to a new process. This means it is impossible to pass an empty filter
# into a process using the environment variable. However, we can still
# test the case when the variable is not supplied (i.e., gtest_filter is
# None).
# pylint: disable-msg=C6403
if CAN_TEST_EMPTY_FILTER or gtest_filter != '':
SetEnvVar(FILTER_ENV_VAR, gtest_filter)
tests_run = RunAndExtractTestList()[0]
SetEnvVar(FILTER_ENV_VAR, None)
self.AssertSetEqual(tests_run, tests_to_run)
# pylint: enable-msg=C6403
# Next, tests using the command line flag.
if gtest_filter is None:
args = []
else:
args = ['--%s=%s' % (FILTER_FLAG, gtest_filter)]
tests_run = RunAndExtractTestList(args)[0]
self.AssertSetEqual(tests_run, tests_to_run)
def RunAndVerifyWithSharding(self, gtest_filter, total_shards, tests_to_run,
args=None, check_exit_0=False):
"""Checks that binary runs correct tests for the given filter and shard.
Runs all shards of gtest_filter_unittest_ with the given filter, and
verifies that the right set of tests were run. The union of tests run
on each shard should be identical to tests_to_run, without duplicates.
Args:
gtest_filter: A filter to apply to the tests.
total_shards: A total number of shards to split test run into.
tests_to_run: A set of tests expected to run.
args : Arguments to pass to the to the test binary.
check_exit_0: When set to a true value, make sure that all shards
return 0.
"""
tests_to_run = self.AdjustForParameterizedTests(tests_to_run)
# Windows removes empty variables from the environment when passing it
# to a new process. This means it is impossible to pass an empty filter
# into a process using the environment variable. However, we can still
# test the case when the variable is not supplied (i.e., gtest_filter is
# None).
# pylint: disable-msg=C6403
if CAN_TEST_EMPTY_FILTER or gtest_filter != '':
SetEnvVar(FILTER_ENV_VAR, gtest_filter)
partition = []
for i in range(0, total_shards):
(tests_run, exit_code) = RunWithSharding(total_shards, i, args)
if check_exit_0:
self.assertEqual(0, exit_code)
partition.append(tests_run)
self.AssertPartitionIsValid(tests_to_run, partition)
SetEnvVar(FILTER_ENV_VAR, None)
# pylint: enable-msg=C6403
def RunAndVerifyAllowingDisabled(self, gtest_filter, tests_to_run):
"""Checks that the binary runs correct set of tests for the given filter.
Runs gtest_filter_unittest_ with the given filter, and enables
disabled tests. Verifies that the right set of tests were run.
Args:
gtest_filter: A filter to apply to the tests.
tests_to_run: A set of tests expected to run.
"""
tests_to_run = self.AdjustForParameterizedTests(tests_to_run)
# Construct the command line.
args = ['--%s' % ALSO_RUN_DISABED_TESTS_FLAG]
if gtest_filter is not None:
args.append('--%s=%s' % (FILTER_FLAG, gtest_filter))
tests_run = RunAndExtractTestList(args)[0]
self.AssertSetEqual(tests_run, tests_to_run)
def setUp(self):
"""Sets up test case.
Determines whether value-parameterized tests are enabled in the binary and
sets the flags accordingly.
"""
global param_tests_present
if param_tests_present is None:
param_tests_present = PARAM_TEST_REGEX.search(
RunAndReturnOutput()) is not None
def testDefaultBehavior(self):
"""Tests the behavior of not specifying the filter."""
self.RunAndVerify(None, ACTIVE_TESTS)
def testDefaultBehaviorWithShards(self):
"""Tests the behavior without the filter, with sharding enabled."""
self.RunAndVerifyWithSharding(None, 1, ACTIVE_TESTS)
self.RunAndVerifyWithSharding(None, 2, ACTIVE_TESTS)
self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS) - 1, ACTIVE_TESTS)
self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS), ACTIVE_TESTS)
self.RunAndVerifyWithSharding(None, len(ACTIVE_TESTS) + 1, ACTIVE_TESTS)
def testEmptyFilter(self):
"""Tests an empty filter."""
self.RunAndVerify('', [])
self.RunAndVerifyWithSharding('', 1, [])
self.RunAndVerifyWithSharding('', 2, [])
def testBadFilter(self):
"""Tests a filter that matches nothing."""
self.RunAndVerify('BadFilter', [])
self.RunAndVerifyAllowingDisabled('BadFilter', [])
def testFullName(self):
"""Tests filtering by full name."""
self.RunAndVerify('FooTest.Xyz', ['FooTest.Xyz'])
self.RunAndVerifyAllowingDisabled('FooTest.Xyz', ['FooTest.Xyz'])
self.RunAndVerifyWithSharding('FooTest.Xyz', 5, ['FooTest.Xyz'])
def testUniversalFilters(self):
"""Tests filters that match everything."""
self.RunAndVerify('*', ACTIVE_TESTS)
self.RunAndVerify('*.*', ACTIVE_TESTS)
self.RunAndVerifyWithSharding('*.*', len(ACTIVE_TESTS) - 3, ACTIVE_TESTS)
self.RunAndVerifyAllowingDisabled('*', ACTIVE_TESTS + DISABLED_TESTS)
self.RunAndVerifyAllowingDisabled('*.*', ACTIVE_TESTS + DISABLED_TESTS)
def testFilterByTestCase(self):
"""Tests filtering by test case name."""
self.RunAndVerify('FooTest.*', ['FooTest.Abc', 'FooTest.Xyz'])
BAZ_TESTS = ['BazTest.TestOne', 'BazTest.TestA', 'BazTest.TestB']
self.RunAndVerify('BazTest.*', BAZ_TESTS)
self.RunAndVerifyAllowingDisabled('BazTest.*',
BAZ_TESTS + ['BazTest.DISABLED_TestC'])
def testFilterByTest(self):
"""Tests filtering by test name."""
self.RunAndVerify('*.TestOne', ['BarTest.TestOne', 'BazTest.TestOne'])
def testFilterDisabledTests(self):
"""Select only the disabled tests to run."""
self.RunAndVerify('DISABLED_FoobarTest.Test1', [])
self.RunAndVerifyAllowingDisabled('DISABLED_FoobarTest.Test1',
['DISABLED_FoobarTest.Test1'])
self.RunAndVerify('*DISABLED_*', [])
self.RunAndVerifyAllowingDisabled('*DISABLED_*', DISABLED_TESTS)
self.RunAndVerify('*.DISABLED_*', [])
self.RunAndVerifyAllowingDisabled('*.DISABLED_*', [
'BarTest.DISABLED_TestFour',
'BarTest.DISABLED_TestFive',
'BazTest.DISABLED_TestC',
'DISABLED_FoobarTest.DISABLED_Test2',
])
self.RunAndVerify('DISABLED_*', [])
self.RunAndVerifyAllowingDisabled('DISABLED_*', [
'DISABLED_FoobarTest.Test1',
'DISABLED_FoobarTest.DISABLED_Test2',
'DISABLED_FoobarbazTest.TestA',
])
def testWildcardInTestCaseName(self):
"""Tests using wildcard in the test case name."""
self.RunAndVerify('*a*.*', [
'BarTest.TestOne',
'BarTest.TestTwo',
'BarTest.TestThree',
'BazTest.TestOne',
'BazTest.TestA',
'BazTest.TestB', ] + DEATH_TESTS + PARAM_TESTS)
def testWildcardInTestName(self):
"""Tests using wildcard in the test name."""
self.RunAndVerify('*.*A*', ['FooTest.Abc', 'BazTest.TestA'])
def testFilterWithoutDot(self):
"""Tests a filter that has no '.' in it."""
self.RunAndVerify('*z*', [
'FooTest.Xyz',
'BazTest.TestOne',
'BazTest.TestA',
'BazTest.TestB',
])
def testTwoPatterns(self):
"""Tests filters that consist of two patterns."""
self.RunAndVerify('Foo*.*:*A*', [
'FooTest.Abc',
'FooTest.Xyz',
'BazTest.TestA',
])
# An empty pattern + a non-empty one
self.RunAndVerify(':*A*', ['FooTest.Abc', 'BazTest.TestA'])
def testThreePatterns(self):
"""Tests filters that consist of three patterns."""
self.RunAndVerify('*oo*:*A*:*One', [
'FooTest.Abc',
'FooTest.Xyz',
'BarTest.TestOne',
'BazTest.TestOne',
'BazTest.TestA',
])
# The 2nd pattern is empty.
self.RunAndVerify('*oo*::*One', [
'FooTest.Abc',
'FooTest.Xyz',
'BarTest.TestOne',
'BazTest.TestOne',
])
# The last 2 patterns are empty.
self.RunAndVerify('*oo*::', [
'FooTest.Abc',
'FooTest.Xyz',
])
def testNegativeFilters(self):
self.RunAndVerify('*-BazTest.TestOne', [
'FooTest.Abc',
'FooTest.Xyz',
'BarTest.TestOne',
'BarTest.TestTwo',
'BarTest.TestThree',
'BazTest.TestA',
'BazTest.TestB',
] + DEATH_TESTS + PARAM_TESTS)
self.RunAndVerify('*-FooTest.Abc:BazTest.*', [
'FooTest.Xyz',
'BarTest.TestOne',
'BarTest.TestTwo',
'BarTest.TestThree',
] + DEATH_TESTS + PARAM_TESTS)
self.RunAndVerify('BarTest.*-BarTest.TestOne', [
'BarTest.TestTwo',
'BarTest.TestThree',
])
# Tests without leading '*'.
self.RunAndVerify('-FooTest.Abc:FooTest.Xyz:BazTest.*', [
'BarTest.TestOne',
'BarTest.TestTwo',
'BarTest.TestThree',
] + DEATH_TESTS + PARAM_TESTS)
# Value parameterized tests.
self.RunAndVerify('*/*', PARAM_TESTS)
# Value parameterized tests filtering by the sequence name.
self.RunAndVerify('SeqP/*', [
'SeqP/ParamTest.TestX/0',
'SeqP/ParamTest.TestX/1',
'SeqP/ParamTest.TestY/0',
'SeqP/ParamTest.TestY/1',
])
# Value parameterized tests filtering by the test name.
self.RunAndVerify('*/0', [
'SeqP/ParamTest.TestX/0',
'SeqP/ParamTest.TestY/0',
'SeqQ/ParamTest.TestX/0',
'SeqQ/ParamTest.TestY/0',
])
def testFlagOverridesEnvVar(self):
"""Tests that the filter flag overrides the filtering env. variable."""
SetEnvVar(FILTER_ENV_VAR, 'Foo*')
args = ['--%s=%s' % (FILTER_FLAG, '*One')]
tests_run = RunAndExtractTestList(args)[0]
SetEnvVar(FILTER_ENV_VAR, None)
self.AssertSetEqual(tests_run, ['BarTest.TestOne', 'BazTest.TestOne'])
def testShardStatusFileIsCreated(self):
"""Tests that the shard file is created if specified in the environment."""
shard_status_file = os.path.join(gtest_test_utils.GetTempDir(),
'shard_status_file')
self.assert_(not os.path.exists(shard_status_file))
extra_env = {SHARD_STATUS_FILE_ENV_VAR: shard_status_file}
try:
InvokeWithModifiedEnv(extra_env, RunAndReturnOutput)
finally:
self.assert_(os.path.exists(shard_status_file))
os.remove(shard_status_file)
def testShardStatusFileIsCreatedWithListTests(self):
"""Tests that the shard file is created with the "list_tests" flag."""
shard_status_file = os.path.join(gtest_test_utils.GetTempDir(),
'shard_status_file2')
self.assert_(not os.path.exists(shard_status_file))
extra_env = {SHARD_STATUS_FILE_ENV_VAR: shard_status_file}
try:
output = InvokeWithModifiedEnv(extra_env,
RunAndReturnOutput,
[LIST_TESTS_FLAG])
finally:
# This assertion ensures that Google Test enumerated the tests as
# opposed to running them.
self.assert_('[==========]' not in output,
'Unexpected output during test enumeration.\n'
'Please ensure that LIST_TESTS_FLAG is assigned the\n'
'correct flag value for listing Google Test tests.')
self.assert_(os.path.exists(shard_status_file))
os.remove(shard_status_file)
if SUPPORTS_DEATH_TESTS:
def testShardingWorksWithDeathTests(self):
"""Tests integration with death tests and sharding."""
gtest_filter = 'HasDeathTest.*:SeqP/*'
expected_tests = [
'HasDeathTest.Test1',
'HasDeathTest.Test2',
'SeqP/ParamTest.TestX/0',
'SeqP/ParamTest.TestX/1',
'SeqP/ParamTest.TestY/0',
'SeqP/ParamTest.TestY/1',
]
for flag in ['--gtest_death_test_style=threadsafe',
'--gtest_death_test_style=fast']:
self.RunAndVerifyWithSharding(gtest_filter, 3, expected_tests,
check_exit_0=True, args=[flag])
self.RunAndVerifyWithSharding(gtest_filter, 5, expected_tests,
check_exit_0=True, args=[flag])
if __name__ == '__main__':
gtest_test_utils.Main()
|
|
""" Lib to manage an ATM (Automatic Teller Machine).
Important classes: User, Persistence.
"""
from decimal import Decimal
import hashlib
import sqlite3
import os
from datetime import datetime
class User(object):
""" Bank user. Can log in and do some actions or just act as a passive object.
Another class must be used to persist these instances in local storage. """
ACTIONS = { # for later registration in history attribute
'TRANSFERING' : 'transfered', # money transfering between two users
'WITHDRAWING' : 'withdrawed', # withdraw own money
'RECEIVING' : 'received an amount of', # receiving money from anyone/anywhere
}
__agency = ''
__account = ''
__password = '' # md5
__balance = 0
__history = []
__is_logged_in = False # must not be persisted
def __init__(self, agency, account, password, balance=None, history=None):
""" Constructor. Limited actions while it's not logged in.
Args:
agency (str): Agency identification code.
account (str): Account identification code.
password (str): Password MD5 hash, put None if it's unknown.
balance (num): Balance in $, put None if it's unknown.
history (list): A list of tuples representing balance transaction records,
put None if it's unknown or empty.
List format: [(register string, True if it's a saved register), ...]
"""
self.__agency = agency
self.__account = account
self.__password = password
if balance is not None:
self.__balance = balance
if history is not None:
self.__history = history
def log_in(self, password_str):
""" Access this existent bank account, authenticating by this password string.
Args:
password_str (str): A password in natural language.
Returns:
bool: True if it was successfully authenticated, False otherwise.
"""
self.__is_logged_in = self.__password == self.str_to_hash(password_str)
return self.__is_logged_in
def log_out(self):
""" Exit this bank account, ie, removes active rights to do some action. """
self.__is_logged_in = False
def deposit(self, amount, another_user=None):
""" Deposit cash in this account or in another user's account.
If something goes wrong, a fatal error will be triggered.
Args:
amount (num): amount of cash in $ to deposit.
another_user (User): if it's depositing in another user account then
put its instance here, otherwise leave it as None.
Returns:
bool: True if operations has been a success ('unreturned' error otherwise).
"""
if another_user:
another_user.deposit(amount)
self.register_operation(self.ACTIONS['RECEIVING'], amount)
self.register_operation(self.ACTIONS['TRANSFERING'], amount, another_user)
else:
self.__balance = float(Decimal(str(self.__balance + amount)))
self.register_operation(self.ACTIONS['RECEIVING'], amount)
return True # False is never reached
def transfer_to(self, amount, another_user):
""" Transfer an amount of cash from this user to another one.
This instance must have enough balance to do so.
This is a private method, that requires previous authentication.
Args:
amount (num): Cash in $ to discount from this instance user
and increase in another user account.
another_use (User): Another use to receive this transfering amount of cash.
Returns:
bool: True if cash has been transfered from this instance to another, False otherwise.
"""
if self.__balance >= amount and self.__is_logged_in:
self.__balance = float(Decimal(str(self.__balance - amount)))
another_user.deposit(amount)
self.register_operation(self.ACTIONS['TRANSFERING'], amount, another_user)
return True
return False
def withdraw_cash(self, qtt_100s, qtt_50s, qtt_20s):
""" Withdraw cash. Those args should be obtained throught options_to_withdraw function.
Also, there are two limits: $1000,00 or the balance (the lower one).
This is a private method, that requires previous authentication.
Args:
qtt_100s (int): quantity of 100-dollar bills
qtt_50s (int): quantity of 50-dollar bills
qtt_20s (int): quantity of 20-dollar bills
Returns:
bool: True if the cash has been withdrawn, False otherwise.
"""
amount = PaperMoneyCounter().cash(qtt_100s, qtt_50s, qtt_20s)
if (self.__is_logged_in) and (amount <= self.__balance) and (amount <= 1000):
self.__balance = float(Decimal(str(self.__balance - amount)))
self.register_operation(self.ACTIONS['WITHDRAWING'], amount)
return True
return False
def options_to_withdraw(self, amount):
""" Check options to withdraw an amount of cash. Can't be more than $1000,00 and
should be 'printed' in 20, 50 and/or 100-dollar bills.
Args:
amount (num): Desired amount of cash to withdraw.
Returns:
None: If the requirements to withdraw weren't accomplished.
list: If the requeriments to withdraw were accomplished, a list in format
[[a, b, c], ...], where each sublist is an option to withdraw cash,
and reading as a: quantity of 100s, b: quantity of 50s,
c: quantity of 20-dollar bills available and a,b,c are int.
"""
counter = PaperMoneyCounter() # aux class
options = [] # options to withdraw
remaining_cash = 0 # aux var
if (amount % 20 == 0 or amount % 50 == 0) and (amount <= 1000): # is it allowed to withdraw?
# prioritizing 100-dollar bills
qtt_100s = counter.how_many_100s(amount)
remaining_cash = counter.remaining_cash_without_100s(amount)
qtt_50s = counter.how_many_50s(remaining_cash)
remaining_cash = counter.remaining_cash_without_50s(remaining_cash)
qtt_20s = counter.how_many_20s(remaining_cash)
remaining_cash = counter.remaining_cash_without_20s(remaining_cash)
if counter.cash(qtt_100s, qtt_50s, qtt_20s) == amount:
options.append([int(qtt_100s), int(qtt_50s), int(qtt_20s)])
# prioritizing 50-dollar bills
qtt_100s = 0
qtt_50s = counter.how_many_50s(amount)
remaining_cash = counter.remaining_cash_without_50s(amount)
qtt_20s = counter.how_many_20s(remaining_cash)
remaining_cash = counter.remaining_cash_without_20s(remaining_cash)
if counter.cash(qtt_100s, qtt_50s, qtt_20s) == amount:
if not(options[0] == [qtt_100s, qtt_50s, qtt_20s]):
options.append([int(qtt_100s), int(qtt_50s), int(qtt_20s)])
# prioritizing 20-dollar bills
qtt_100s = 0
qtt_50s = 0
qtt_20s = counter.how_many_20s(amount)
if counter.cash(qtt_100s, qtt_50s, qtt_20s) == amount:
if not(options[0] == [qtt_100s, qtt_50s, qtt_20s]):
if not(options[1] == [qtt_100s, qtt_50s, qtt_20s]):
options.append([int(qtt_100s), int(qtt_50s), int(qtt_20s)])
return options
return None # if it wasn't allowed to withdraw
def register_operation(self, action, amount, user_to=None):
""" Register an operation, that this user is executing, in its own history list.
Args:
action (str): An adequate value from ACTIONS dictionary attribute.
amount (num): Amount of money being moved in this operation.
user_to (User): Another user as a target, eg: transfering money from this
user to the argumented user.
Returns:
str: Built operation string added to this user history,
in format 'd/m/y - [account]/[agency] [action] $[amount]'
or 'd/m/y - [account1]/[agency1] [action] $[amount] to [account2]/[agency2]'.
"""
now = datetime.now()
register = str(now.day) + "/" + str(now.month) + "/" + str(now.year) + ' - '
register += self.__account + '/' + self.__agency
register += ' ' + action + ' $' + str(amount)
if user_to:
register += ' to ' + user_to.get_account() + '/' + user_to.get_agency()
self.__history.append((register, False))
return register
def append_register(self, register):
""" Append an already saved register to this user history.
Args:
register (tuple): an item to append in history attribute that, following
construct format, was already been saved.
Returns:
bool: True if has been appended, False otherwise.
"""
register_str, is_saved = register # pylint: disable=I0011,W0612
if is_saved:
self.__history.append(register)
return True
return False
def str_to_hash(self, param):
""" Generate a hash of a string param using md5 algorithm
Args:
param (str): The string content for hashing.
Returns:
str: A hash, generated by a md5 algorithm, using the parameter.
"""
param = param.encode('utf-8')
my_hash = hashlib.md5(param)
return my_hash.hexdigest()
def hash_password(self):
""" Hashes the password of this instance
(but... it's supposed to be already hashed!). """
self.__password = self.str_to_hash(self.__password)
def is_logged_in(self):
""" Check if user has been authenticated.
Returns:
bool: True if is logged in, False otherwise.
"""
return self.__is_logged_in
def get_balance(self):
""" Consult balance in $.
Returns:
num: This user balance, None for unauthorized operation.
"""
if self.is_logged_in:
return self.__balance
else:
return None
def get_agency(self):
""" Get agency id.
Returns:
str: User's agency.
"""
return self.__agency
def get_account(self):
""" Get account id.
Returns:
str: User's account.
"""
return self.__account
def get_history(self):
""" Get history of user's transactions.
Returns:
list: Just a copy of User's history in constructed format.
"""
return self.__history[:]
# ..............................................................
class PaperMoneyCounter(object):
""" Can do some counts about paper money. Aux class. """
def cash(self, qtt_100s, qtt_50s, qtt_20s):
""" Return how much money there is by assembling 100s, 50s and 20-dollar bills quantities.
"""
return (qtt_100s * 100) + (qtt_50s * 50) + (qtt_20s * 20)
def how_many_100s(self, amount):
""" Return how many 100-dollar bill can be printed from this amount of cash.
"""
return amount // 100
def remaining_cash_without_100s(self, amount):
""" Return how much cash remains after using the maximum quantity of 100-dollar bills.
"""
return amount % 100
def how_many_50s(self, amount):
""" Return how many 50-dollar bill can be printed from this amount of cash.
"""
return amount // 50
def remaining_cash_without_50s(self, amount):
""" Return how much cash remains after using the maximum quantity of 50-dollar bills.
"""
return amount % 50
def how_many_20s(self, amount):
""" Return how many 20-dollar bill can be printed from this amount of cash.
"""
return amount // 20
def remaining_cash_without_20s(self, amount):
""" Return how much cash remains after using the maximum quantity of 20-dollar bills.
"""
return amount % 20
# ..............................................................
class Persistence(object):
""" Data manager for ATM bank accounts. """
__DB = 'users.db'
__users = {}
def __init__(self):
""" Create an instance of Persistence, and also try to execute
an initial script for db installation. """
if not self.is_installed():
self.install()
else:
self.load_users()
def install(self):
""" Initialize database, create tables and add few rows. """
conn = sqlite3.connect(self.__DB)
cursor = conn.cursor()
# creating tables...
cursor.execute('''
CREATE TABLE users (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
agency TEXT NOT NULL,
account TEXT NOT NULL,
password TEXT NOT NULL,
balance REAL NOT NULL
);
''')
cursor.execute('''
CREATE TABLE history (
id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
register TEXT NOT NULL,
owner INTEGER NOT NULL
);
''')
# inserting a few users by default (there isn't 'sign up' requirement for this app)...
hasher = User('', '', '')
users_data = [
('A1', '00000-0', hasher.str_to_hash('pass0'), 1500),
('A1', '11111-1', hasher.str_to_hash('pass1'), 400),
('A2', '22222-2', hasher.str_to_hash('pass2'), 260),
('A3', '33333-3', hasher.str_to_hash('pass3'), 380),
('A2', '44444-4', hasher.str_to_hash('pass4'), 240),
]
cursor.executemany('''
INSERT INTO users (agency, account, password, balance)
VALUES (?, ?, ?, ?);
''', users_data)
conn.commit()
conn.close()
self.load_users()
def is_installed(self):
""" Returns: True if database file already exists, False otherwise.
Doesn't guarantee that this file really is a database, ie, a valid file. """
return os.path.isfile(self.__DB)
def update_users(self):
""" Update all current users balance and history in database using list attribute.
There's basically no security against SQL injection, due to there's no espected
input string (the existents here are auto built by this script using numeric inputs) """
conn = sqlite3.connect(self.__DB)
cursor = conn.cursor()
users_data = []
unsaved_histories_data = []
for key, user in self.__users.items(): # here, key it's actually users id
users_data.append((user.get_balance(), key))
for register in user.get_history():
register_str, is_saved = register
if not is_saved:
unsaved_histories_data.append((register_str, key))
cursor.executemany('''
UPDATE users
SET balance=?
WHERE id=?;
''', users_data)
cursor.executemany('''
INSERT INTO history (register, owner)
VALUES (?, ?);
''', unsaved_histories_data)
conn.commit()
conn.close()
self.load_users() # RELOADING!!! Pew, pew, pew, pew, pew...
def load_users(self):
""" Load all database rows and put their data in list attribute. """
self.__users = {}
conn = sqlite3.connect(self.__DB)
cursor = conn.cursor()
cursor.execute('''
SELECT * FROM users;
''')
for row in cursor.fetchall():
self.__users[row[0]] = User(row[1], row[2], row[3], row[4])
cursor.execute('''
SELECT * FROM history;
''')
for row in cursor.fetchall():
self.__users[row[2]].append_register((row[1], True))
conn.close()
def find_user(self, agency=None, account=None):
""" Search for a registered user with these BOTH matching agency and account attributes.
Don't worry about SQL injection, this searching is executed with already loaded users,
so there's no use of SQL here.
Args:
agency (str): Agency name of wanted user (recommended: use upper case only).
account (str): Account name of wanted user (recommended: use upper case only).
Returns:
User: Found user, or None if there's no matching user.
"""
if agency is None or account is None:
return None
for i in self.__users:
user = self.__users[i]
if user.get_agency() == agency and user.get_account() == account:
return self.__users[i]
return None
|
|
# -*- test-case-name: twisted.test.test_process -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
UNIX Process management.
Do NOT use this module directly - use reactor.spawnProcess() instead.
Maintainer: Itamar Shtull-Trauring
"""
from __future__ import division, absolute_import, print_function
from twisted.python.runtime import platform
if platform.isWindows():
raise ImportError(("twisted.internet.process does not work on Windows. "
"Use the reactor.spawnProcess() API instead."))
import errno
import gc
import os
import io
import select
import signal
import stat
import sys
import traceback
try:
import pty
except ImportError:
pty = None
try:
import fcntl, termios
except ImportError:
fcntl = None
from zope.interface import implementer
from twisted.python import log, failure
from twisted.python.util import switchUID
from twisted.python.compat import items, xrange, _PY3
from twisted.internet import fdesc, abstract, error
from twisted.internet.main import CONNECTION_LOST, CONNECTION_DONE
from twisted.internet._baseprocess import BaseProcess
from twisted.internet.interfaces import IProcessTransport
# Some people were importing this, which is incorrect, just keeping it
# here for backwards compatibility:
ProcessExitedAlready = error.ProcessExitedAlready
reapProcessHandlers = {}
def reapAllProcesses():
"""
Reap all registered processes.
"""
# Coerce this to a list, as reaping the process changes the dictionary and
# causes a "size changed during iteration" exception
for process in list(reapProcessHandlers.values()):
process.reapProcess()
def registerReapProcessHandler(pid, process):
"""
Register a process handler for the given pid, in case L{reapAllProcesses}
is called.
@param pid: the pid of the process.
@param process: a process handler.
"""
if pid in reapProcessHandlers:
raise RuntimeError("Try to register an already registered process.")
try:
auxPID, status = os.waitpid(pid, os.WNOHANG)
except:
log.msg('Failed to reap %d:' % pid)
log.err()
auxPID = None
if auxPID:
process.processEnded(status)
else:
# if auxPID is 0, there are children but none have exited
reapProcessHandlers[pid] = process
def unregisterReapProcessHandler(pid, process):
"""
Unregister a process handler previously registered with
L{registerReapProcessHandler}.
"""
if not (pid in reapProcessHandlers
and reapProcessHandlers[pid] == process):
raise RuntimeError("Try to unregister a process not registered.")
del reapProcessHandlers[pid]
def detectLinuxBrokenPipeBehavior():
"""
On some Linux version, write-only pipe are detected as readable. This
function is here to check if this bug is present or not.
See L{ProcessWriter.doRead} for a more detailed explanation.
@return: C{True} if Linux pipe behaviour is broken.
@rtype : L{bool}
"""
r, w = os.pipe()
os.write(w, b'a')
reads, writes, exes = select.select([w], [], [], 0)
if reads:
# Linux < 2.6.11 says a write-only pipe is readable.
brokenPipeBehavior = True
else:
brokenPipeBehavior = False
os.close(r)
os.close(w)
return brokenPipeBehavior
brokenLinuxPipeBehavior = detectLinuxBrokenPipeBehavior()
class ProcessWriter(abstract.FileDescriptor):
"""
(Internal) Helper class to write into a Process's input pipe.
I am a helper which describes a selectable asynchronous writer to a
process's input pipe, including stdin.
@ivar enableReadHack: A flag which determines how readability on this
write descriptor will be handled. If C{True}, then readability may
indicate the reader for this write descriptor has been closed (ie,
the connection has been lost). If C{False}, then readability events
are ignored.
"""
connected = 1
ic = 0
enableReadHack = False
def __init__(self, reactor, proc, name, fileno, forceReadHack=False):
"""
Initialize, specifying a Process instance to connect to.
"""
abstract.FileDescriptor.__init__(self, reactor)
fdesc.setNonBlocking(fileno)
self.proc = proc
self.name = name
self.fd = fileno
if not stat.S_ISFIFO(os.fstat(self.fileno()).st_mode):
# If the fd is not a pipe, then the read hack is never
# applicable. This case arises when ProcessWriter is used by
# StandardIO and stdout is redirected to a normal file.
self.enableReadHack = False
elif forceReadHack:
self.enableReadHack = True
else:
# Detect if this fd is actually a write-only fd. If it's
# valid to read, don't try to detect closing via read.
# This really only means that we cannot detect a TTY's write
# pipe being closed.
try:
os.read(self.fileno(), 0)
except OSError:
# It's a write-only pipe end, enable hack
self.enableReadHack = True
if self.enableReadHack:
self.startReading()
def fileno(self):
"""
Return the fileno() of my process's stdin.
"""
return self.fd
def writeSomeData(self, data):
"""
Write some data to the open process.
"""
rv = fdesc.writeToFD(self.fd, data)
if rv == len(data) and self.enableReadHack:
# If the send buffer is now empty and it is necessary to monitor
# this descriptor for readability to detect close, try detecting
# readability now.
self.startReading()
return rv
def write(self, data):
self.stopReading()
abstract.FileDescriptor.write(self, data)
def doRead(self):
"""
The only way a write pipe can become "readable" is at EOF, because the
child has closed it, and we're using a reactor which doesn't
distinguish between readable and closed (such as the select reactor).
Except that's not true on linux < 2.6.11. It has the following
characteristics: write pipe is completely empty => POLLOUT (writable in
select), write pipe is not completely empty => POLLIN (readable in
select), write pipe's reader closed => POLLIN|POLLERR (readable and
writable in select)
That's what this funky code is for. If linux was not broken, this
function could be simply "return CONNECTION_LOST".
BUG: We call select no matter what the reactor.
If the reactor is pollreactor, and the fd is > 1024, this will fail.
(only occurs on broken versions of linux, though).
"""
if self.enableReadHack:
if brokenLinuxPipeBehavior:
fd = self.fd
r, w, x = select.select([fd], [fd], [], 0)
if r and w:
return CONNECTION_LOST
else:
return CONNECTION_LOST
else:
self.stopReading()
def connectionLost(self, reason):
"""
See abstract.FileDescriptor.connectionLost.
"""
# At least on OS X 10.4, exiting while stdout is non-blocking can
# result in data loss. For some reason putting the file descriptor
# back into blocking mode seems to resolve this issue.
fdesc.setBlocking(self.fd)
abstract.FileDescriptor.connectionLost(self, reason)
self.proc.childConnectionLost(self.name, reason)
class ProcessReader(abstract.FileDescriptor):
"""
ProcessReader
I am a selectable representation of a process's output pipe, such as
stdout and stderr.
"""
connected = 1
def __init__(self, reactor, proc, name, fileno):
"""
Initialize, specifying a process to connect to.
"""
abstract.FileDescriptor.__init__(self, reactor)
fdesc.setNonBlocking(fileno)
self.proc = proc
self.name = name
self.fd = fileno
self.startReading()
def fileno(self):
"""
Return the fileno() of my process's stderr.
"""
return self.fd
def writeSomeData(self, data):
# the only time this is actually called is after .loseConnection Any
# actual write attempt would fail, so we must avoid that. This hack
# allows us to use .loseConnection on both readers and writers.
assert data == ""
return CONNECTION_LOST
def doRead(self):
"""
This is called when the pipe becomes readable.
"""
return fdesc.readFromFD(self.fd, self.dataReceived)
def dataReceived(self, data):
self.proc.childDataReceived(self.name, data)
def loseConnection(self):
if self.connected and not self.disconnecting:
self.disconnecting = 1
self.stopReading()
self.reactor.callLater(0, self.connectionLost,
failure.Failure(CONNECTION_DONE))
def connectionLost(self, reason):
"""
Close my end of the pipe, signal the Process (which signals the
ProcessProtocol).
"""
abstract.FileDescriptor.connectionLost(self, reason)
self.proc.childConnectionLost(self.name, reason)
class _BaseProcess(BaseProcess, object):
"""
Base class for Process and PTYProcess.
"""
status = None
pid = None
def reapProcess(self):
"""
Try to reap a process (without blocking) via waitpid.
This is called when sigchild is caught or a Process object loses its
"connection" (stdout is closed) This ought to result in reaping all
zombie processes, since it will be called twice as often as it needs
to be.
(Unfortunately, this is a slightly experimental approach, since
UNIX has no way to be really sure that your process is going to
go away w/o blocking. I don't want to block.)
"""
try:
try:
pid, status = os.waitpid(self.pid, os.WNOHANG)
except OSError as e:
if e.errno == errno.ECHILD:
# no child process
pid = None
else:
raise
except:
log.msg('Failed to reap %d:' % self.pid)
log.err()
pid = None
if pid:
self.processEnded(status)
unregisterReapProcessHandler(pid, self)
def _getReason(self, status):
exitCode = sig = None
if os.WIFEXITED(status):
exitCode = os.WEXITSTATUS(status)
else:
sig = os.WTERMSIG(status)
if exitCode or sig:
return error.ProcessTerminated(exitCode, sig, status)
return error.ProcessDone(status)
def signalProcess(self, signalID):
"""
Send the given signal C{signalID} to the process. It'll translate a
few signals ('HUP', 'STOP', 'INT', 'KILL', 'TERM') from a string
representation to its int value, otherwise it'll pass directly the
value provided
@type signalID: C{str} or C{int}
"""
if signalID in ('HUP', 'STOP', 'INT', 'KILL', 'TERM'):
signalID = getattr(signal, 'SIG%s' % (signalID,))
if self.pid is None:
raise ProcessExitedAlready()
try:
os.kill(self.pid, signalID)
except OSError as e:
if e.errno == errno.ESRCH:
raise ProcessExitedAlready()
else:
raise
def _resetSignalDisposition(self):
# The Python interpreter ignores some signals, and our child
# process will inherit that behaviour. To have a child process
# that responds to signals normally, we need to reset our
# child process's signal handling (just) after we fork and
# before we execvpe.
for signalnum in xrange(1, signal.NSIG):
if signal.getsignal(signalnum) == signal.SIG_IGN:
# Reset signal handling to the default
signal.signal(signalnum, signal.SIG_DFL)
def _fork(self, path, uid, gid, executable, args, environment, **kwargs):
"""
Fork and then exec sub-process.
@param path: the path where to run the new process.
@type path: C{str}
@param uid: if defined, the uid used to run the new process.
@type uid: C{int}
@param gid: if defined, the gid used to run the new process.
@type gid: C{int}
@param executable: the executable to run in a new process.
@type executable: C{str}
@param args: arguments used to create the new process.
@type args: C{list}.
@param environment: environment used for the new process.
@type environment: C{dict}.
@param kwargs: keyword arguments to L{_setupChild} method.
"""
collectorEnabled = gc.isenabled()
gc.disable()
try:
self.pid = os.fork()
except:
# Still in the parent process
if collectorEnabled:
gc.enable()
raise
else:
if self.pid == 0:
# A return value of 0 from fork() indicates that we are now
# executing in the child process.
# Do not put *ANY* code outside the try block. The child
# process must either exec or _exit. If it gets outside this
# block (due to an exception that is not handled here, but
# which might be handled higher up), there will be two copies
# of the parent running in parallel, doing all kinds of damage.
# After each change to this code, review it to make sure there
# are no exit paths.
try:
# Stop debugging. If I am, I don't care anymore.
sys.settrace(None)
self._setupChild(**kwargs)
self._execChild(path, uid, gid, executable, args,
environment)
except:
# If there are errors, try to write something descriptive
# to stderr before exiting.
# The parent's stderr isn't *necessarily* fd 2 anymore, or
# even still available; however, even libc assumes that
# write(2, err) is a useful thing to attempt.
try:
stderr = os.fdopen(2, 'wb')
msg = ("Upon execvpe {0} {1} in environment id {2}"
"\n:").format(executable, str(args),
id(environment))
if _PY3:
# On Python 3, print_exc takes a text stream, but
# on Python 2 it still takes a byte stream. So on
# Python 3 we will wrap up the byte stream returned
# by os.fdopen using TextIOWrapper.
# We hard-code UTF-8 as the encoding here, rather
# than looking at something like
# getfilesystemencoding() or sys.stderr.encoding,
# because we want an encoding that will be able to
# encode the full range of code points. We are
# (most likely) talking to the parent process on
# the other end of this pipe and not the filesystem
# or the original sys.stderr, so there's no point
# in trying to match the encoding of one of those
# objects.
stderr = io.TextIOWrapper(stderr, encoding="utf-8")
stderr.write(msg)
traceback.print_exc(file=stderr)
stderr.flush()
for fd in xrange(3):
os.close(fd)
except:
# Handle all errors during the error-reporting process
# silently to ensure that the child terminates.
pass
# See comment above about making sure that we reach this line
# of code.
os._exit(1)
# we are now in parent process
if collectorEnabled:
gc.enable()
self.status = -1 # this records the exit status of the child
def _setupChild(self, *args, **kwargs):
"""
Setup the child process. Override in subclasses.
"""
raise NotImplementedError()
def _execChild(self, path, uid, gid, executable, args, environment):
"""
The exec() which is done in the forked child.
"""
if path:
os.chdir(path)
if uid is not None or gid is not None:
if uid is None:
uid = os.geteuid()
if gid is None:
gid = os.getegid()
# set the UID before I actually exec the process
os.setuid(0)
os.setgid(0)
switchUID(uid, gid)
os.execvpe(executable, args, environment)
def __repr__(self):
"""
String representation of a process.
"""
return "<%s pid=%s status=%s>" % (self.__class__.__name__,
self.pid, self.status)
class _FDDetector(object):
"""
This class contains the logic necessary to decide which of the available
system techniques should be used to detect the open file descriptors for
the current process. The chosen technique gets monkey-patched into the
_listOpenFDs method of this class so that the detection only needs to occur
once.
@ivar listdir: The implementation of listdir to use. This gets overwritten
by the test cases.
@ivar getpid: The implementation of getpid to use, returns the PID of the
running process.
@ivar openfile: The implementation of open() to use, by default the Python
builtin.
"""
# So that we can unit test this
listdir = os.listdir
getpid = os.getpid
openfile = open
def __init__(self):
self._implementations = [
self._procFDImplementation, self._devFDImplementation,
self._fallbackFDImplementation]
def _listOpenFDs(self):
"""
Return an iterable of file descriptors which I{may} be open in this
process.
This will try to return the fewest possible descriptors without missing
any.
"""
self._listOpenFDs = self._getImplementation()
return self._listOpenFDs()
def _getImplementation(self):
"""
Pick a method which gives correct results for C{_listOpenFDs} in this
runtime environment.
This involves a lot of very platform-specific checks, some of which may
be relatively expensive. Therefore the returned method should be saved
and re-used, rather than always calling this method to determine what it
is.
See the implementation for the details of how a method is selected.
"""
for impl in self._implementations:
try:
before = impl()
except:
continue
with self.openfile("/dev/null", "r"):
after = impl()
if before != after:
return impl
# If no implementation can detect the newly opened file above, then just
# return the last one. The last one should therefore always be one
# which makes a simple static guess which includes all possible open
# file descriptors, but perhaps also many other values which do not
# correspond to file descriptors. For example, the scheme implemented
# by _fallbackFDImplementation is suitable to be the last entry.
return impl
def _devFDImplementation(self):
"""
Simple implementation for systems where /dev/fd actually works.
See: http://www.freebsd.org/cgi/man.cgi?fdescfs
"""
dname = "/dev/fd"
result = [int(fd) for fd in self.listdir(dname)]
return result
def _procFDImplementation(self):
"""
Simple implementation for systems where /proc/pid/fd exists (we assume
it works).
"""
dname = "/proc/%d/fd" % (self.getpid(),)
return [int(fd) for fd in self.listdir(dname)]
def _fallbackFDImplementation(self):
"""
Fallback implementation where either the resource module can inform us
about the upper bound of how many FDs to expect, or where we just guess
a constant maximum if there is no resource module.
All possible file descriptors from 0 to that upper bound are returned
with no attempt to exclude invalid file descriptor values.
"""
try:
import resource
except ImportError:
maxfds = 1024
else:
# OS-X reports 9223372036854775808. That's a lot of fds to close.
# OS-X should get the /dev/fd implementation instead, so mostly
# this check probably isn't necessary.
maxfds = min(1024, resource.getrlimit(resource.RLIMIT_NOFILE)[1])
return range(maxfds)
detector = _FDDetector()
def _listOpenFDs():
"""
Use the global detector object to figure out which FD implementation to
use.
"""
return detector._listOpenFDs()
@implementer(IProcessTransport)
class Process(_BaseProcess):
"""
An operating-system Process.
This represents an operating-system process with arbitrary input/output
pipes connected to it. Those pipes may represent standard input,
standard output, and standard error, or any other file descriptor.
On UNIX, this is implemented using fork(), exec(), pipe()
and fcntl(). These calls may not exist elsewhere so this
code is not cross-platform. (also, windows can only select
on sockets...)
"""
debug = False
debug_child = False
status = -1
pid = None
processWriterFactory = ProcessWriter
processReaderFactory = ProcessReader
def __init__(self,
reactor, executable, args, environment, path, proto,
uid=None, gid=None, childFDs=None):
"""
Spawn an operating-system process.
This is where the hard work of disconnecting all currently open
files / forking / executing the new process happens. (This is
executed automatically when a Process is instantiated.)
This will also run the subprocess as a given user ID and group ID, if
specified. (Implementation Note: this doesn't support all the arcane
nuances of setXXuid on UNIX: it will assume that either your effective
or real UID is 0.)
"""
if not proto:
assert 'r' not in childFDs.values()
assert 'w' not in childFDs.values()
_BaseProcess.__init__(self, proto)
self.pipes = {}
# keys are childFDs, we can sense them closing
# values are ProcessReader/ProcessWriters
helpers = {}
# keys are childFDs
# values are parentFDs
if childFDs is None:
childFDs = {0: "w", # we write to the child's stdin
1: "r", # we read from their stdout
2: "r", # and we read from their stderr
}
debug = self.debug
if debug: print("childFDs", childFDs)
_openedPipes = []
def pipe():
r, w = os.pipe()
_openedPipes.extend([r, w])
return r, w
# fdmap.keys() are filenos of pipes that are used by the child.
fdmap = {} # maps childFD to parentFD
try:
for childFD, target in items(childFDs):
if debug: print("[%d]" % childFD, target)
if target == "r":
# we need a pipe that the parent can read from
readFD, writeFD = pipe()
if debug: print("readFD=%d, writeFD=%d" % (readFD, writeFD))
fdmap[childFD] = writeFD # child writes to this
helpers[childFD] = readFD # parent reads from this
elif target == "w":
# we need a pipe that the parent can write to
readFD, writeFD = pipe()
if debug: print("readFD=%d, writeFD=%d" % (readFD, writeFD))
fdmap[childFD] = readFD # child reads from this
helpers[childFD] = writeFD # parent writes to this
else:
assert type(target) == int, '%r should be an int' % (target,)
fdmap[childFD] = target # parent ignores this
if debug: print("fdmap", fdmap)
if debug: print("helpers", helpers)
# the child only cares about fdmap.values()
self._fork(path, uid, gid, executable, args, environment, fdmap=fdmap)
except:
for pipe in _openedPipes:
os.close(pipe)
raise
# we are the parent process:
self.proto = proto
# arrange for the parent-side pipes to be read and written
for childFD, parentFD in items(helpers):
os.close(fdmap[childFD])
if childFDs[childFD] == "r":
reader = self.processReaderFactory(reactor, self, childFD,
parentFD)
self.pipes[childFD] = reader
if childFDs[childFD] == "w":
writer = self.processWriterFactory(reactor, self, childFD,
parentFD, forceReadHack=True)
self.pipes[childFD] = writer
try:
# the 'transport' is used for some compatibility methods
if self.proto is not None:
self.proto.makeConnection(self)
except:
log.err()
# The reactor might not be running yet. This might call back into
# processEnded synchronously, triggering an application-visible
# callback. That's probably not ideal. The replacement API for
# spawnProcess should improve upon this situation.
registerReapProcessHandler(self.pid, self)
def _setupChild(self, fdmap):
"""
fdmap[childFD] = parentFD
The child wants to end up with 'childFD' attached to what used to be
the parent's parentFD. As an example, a bash command run like
'command 2>&1' would correspond to an fdmap of {0:0, 1:1, 2:1}.
'command >foo.txt' would be {0:0, 1:os.open('foo.txt'), 2:2}.
This is accomplished in two steps::
1. close all file descriptors that aren't values of fdmap. This
means 0 .. maxfds (or just the open fds within that range, if
the platform supports '/proc/<pid>/fd').
2. for each childFD::
- if fdmap[childFD] == childFD, the descriptor is already in
place. Make sure the CLOEXEC flag is not set, then delete
the entry from fdmap.
- if childFD is in fdmap.values(), then the target descriptor
is busy. Use os.dup() to move it elsewhere, update all
fdmap[childFD] items that point to it, then close the
original. Then fall through to the next case.
- now fdmap[childFD] is not in fdmap.values(), and is free.
Use os.dup2() to move it to the right place, then close the
original.
"""
debug = self.debug_child
if debug:
errfd = sys.stderr
errfd.write("starting _setupChild\n")
destList = fdmap.values()
for fd in _listOpenFDs():
if fd in destList:
continue
if debug and fd == errfd.fileno():
continue
try:
os.close(fd)
except:
pass
# at this point, the only fds still open are the ones that need to
# be moved to their appropriate positions in the child (the targets
# of fdmap, i.e. fdmap.values() )
if debug: print("fdmap", fdmap, file=errfd)
for child in sorted(fdmap.keys()):
target = fdmap[child]
if target == child:
# fd is already in place
if debug: print("%d already in place" % target, file=errfd)
fdesc._unsetCloseOnExec(child)
else:
if child in fdmap.values():
# we can't replace child-fd yet, as some other mapping
# still needs the fd it wants to target. We must preserve
# that old fd by duping it to a new home.
newtarget = os.dup(child) # give it a safe home
if debug: print("os.dup(%d) -> %d" % (child, newtarget),
file=errfd)
os.close(child) # close the original
for c, p in items(fdmap):
if p == child:
fdmap[c] = newtarget # update all pointers
# now it should be available
if debug: print("os.dup2(%d,%d)" % (target, child), file=errfd)
os.dup2(target, child)
# At this point, the child has everything it needs. We want to close
# everything that isn't going to be used by the child, i.e.
# everything not in fdmap.keys(). The only remaining fds open are
# those in fdmap.values().
# Any given fd may appear in fdmap.values() multiple times, so we
# need to remove duplicates first.
old = []
for fd in fdmap.values():
if not fd in old:
if not fd in fdmap.keys():
old.append(fd)
if debug: print("old", old, file=errfd)
for fd in old:
os.close(fd)
self._resetSignalDisposition()
def writeToChild(self, childFD, data):
self.pipes[childFD].write(data)
def closeChildFD(self, childFD):
# for writer pipes, loseConnection tries to write the remaining data
# out to the pipe before closing it
# if childFD is not in the list of pipes, assume that it is already
# closed
if childFD in self.pipes:
self.pipes[childFD].loseConnection()
def pauseProducing(self):
for p in self.pipes.itervalues():
if isinstance(p, ProcessReader):
p.stopReading()
def resumeProducing(self):
for p in self.pipes.itervalues():
if isinstance(p, ProcessReader):
p.startReading()
# compatibility
def closeStdin(self):
"""
Call this to close standard input on this process.
"""
self.closeChildFD(0)
def closeStdout(self):
self.closeChildFD(1)
def closeStderr(self):
self.closeChildFD(2)
def loseConnection(self):
self.closeStdin()
self.closeStderr()
self.closeStdout()
def write(self, data):
"""
Call this to write to standard input on this process.
NOTE: This will silently lose data if there is no standard input.
"""
if 0 in self.pipes:
self.pipes[0].write(data)
def registerProducer(self, producer, streaming):
"""
Call this to register producer for standard input.
If there is no standard input producer.stopProducing() will
be called immediately.
"""
if 0 in self.pipes:
self.pipes[0].registerProducer(producer, streaming)
else:
producer.stopProducing()
def unregisterProducer(self):
"""
Call this to unregister producer for standard input."""
if 0 in self.pipes:
self.pipes[0].unregisterProducer()
def writeSequence(self, seq):
"""
Call this to write to standard input on this process.
NOTE: This will silently lose data if there is no standard input.
"""
if 0 in self.pipes:
self.pipes[0].writeSequence(seq)
def childDataReceived(self, name, data):
self.proto.childDataReceived(name, data)
def childConnectionLost(self, childFD, reason):
# this is called when one of the helpers (ProcessReader or
# ProcessWriter) notices their pipe has been closed
os.close(self.pipes[childFD].fileno())
del self.pipes[childFD]
try:
self.proto.childConnectionLost(childFD)
except:
log.err()
self.maybeCallProcessEnded()
def maybeCallProcessEnded(self):
# we don't call ProcessProtocol.processEnded until:
# the child has terminated, AND
# all writers have indicated an error status, AND
# all readers have indicated EOF
# This insures that we've gathered all output from the process.
if self.pipes:
return
if not self.lostProcess:
self.reapProcess()
return
_BaseProcess.maybeCallProcessEnded(self)
@implementer(IProcessTransport)
class PTYProcess(abstract.FileDescriptor, _BaseProcess):
"""
An operating-system Process that uses PTY support.
"""
status = -1
pid = None
def __init__(self, reactor, executable, args, environment, path, proto,
uid=None, gid=None, usePTY=None):
"""
Spawn an operating-system process.
This is where the hard work of disconnecting all currently open
files / forking / executing the new process happens. (This is
executed automatically when a Process is instantiated.)
This will also run the subprocess as a given user ID and group ID, if
specified. (Implementation Note: this doesn't support all the arcane
nuances of setXXuid on UNIX: it will assume that either your effective
or real UID is 0.)
"""
if pty is None and not isinstance(usePTY, (tuple, list)):
# no pty module and we didn't get a pty to use
raise NotImplementedError(
"cannot use PTYProcess on platforms without the pty module.")
abstract.FileDescriptor.__init__(self, reactor)
_BaseProcess.__init__(self, proto)
if isinstance(usePTY, (tuple, list)):
masterfd, slavefd, _ = usePTY
else:
masterfd, slavefd = pty.openpty()
try:
self._fork(path, uid, gid, executable, args, environment,
masterfd=masterfd, slavefd=slavefd)
except:
if not isinstance(usePTY, (tuple, list)):
os.close(masterfd)
os.close(slavefd)
raise
# we are now in parent process:
os.close(slavefd)
fdesc.setNonBlocking(masterfd)
self.fd = masterfd
self.startReading()
self.connected = 1
self.status = -1
try:
self.proto.makeConnection(self)
except:
log.err()
registerReapProcessHandler(self.pid, self)
def _setupChild(self, masterfd, slavefd):
"""
Set up child process after C{fork()} but before C{exec()}.
This involves:
- closing C{masterfd}, since it is not used in the subprocess
- creating a new session with C{os.setsid}
- changing the controlling terminal of the process (and the new
session) to point at C{slavefd}
- duplicating C{slavefd} to standard input, output, and error
- closing all other open file descriptors (according to
L{_listOpenFDs})
- re-setting all signal handlers to C{SIG_DFL}
@param masterfd: The master end of a PTY file descriptors opened with
C{openpty}.
@type masterfd: L{int}
@param slavefd: The slave end of a PTY opened with C{openpty}.
@type slavefd: L{int}
"""
os.close(masterfd)
os.setsid()
fcntl.ioctl(slavefd, termios.TIOCSCTTY, '')
for fd in range(3):
if fd != slavefd:
os.close(fd)
os.dup2(slavefd, 0) # stdin
os.dup2(slavefd, 1) # stdout
os.dup2(slavefd, 2) # stderr
for fd in _listOpenFDs():
if fd > 2:
try:
os.close(fd)
except:
pass
self._resetSignalDisposition()
# PTYs do not have stdin/stdout/stderr. They only have in and out, just
# like sockets. You cannot close one without closing off the entire PTY.
def closeStdin(self):
pass
def closeStdout(self):
pass
def closeStderr(self):
pass
def doRead(self):
"""
Called when my standard output stream is ready for reading.
"""
return fdesc.readFromFD(
self.fd,
lambda data: self.proto.childDataReceived(1, data))
def fileno(self):
"""
This returns the file number of standard output on this process.
"""
return self.fd
def maybeCallProcessEnded(self):
# two things must happen before we call the ProcessProtocol's
# processEnded method. 1: the child process must die and be reaped
# (which calls our own processEnded method). 2: the child must close
# their stdin/stdout/stderr fds, causing the pty to close, causing
# our connectionLost method to be called. #2 can also be triggered
# by calling .loseConnection().
if self.lostProcess == 2:
_BaseProcess.maybeCallProcessEnded(self)
def connectionLost(self, reason):
"""
I call this to clean up when one or all of my connections has died.
"""
abstract.FileDescriptor.connectionLost(self, reason)
os.close(self.fd)
self.lostProcess += 1
self.maybeCallProcessEnded()
def writeSomeData(self, data):
"""
Write some data to the open process.
"""
return fdesc.writeToFD(self.fd, data)
|
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Salvatore Orlando, VMware
#
import logging
import mock
from oslo.config import cfg
import webob.exc as webexc
import webtest
from neutron.api import extensions
from neutron.common import exceptions as n_exc
from neutron import context
from neutron.db import api as db_api
from neutron.db import servicetype_db as st_db
from neutron.extensions import servicetype
from neutron.plugins.common import constants
from neutron.services import provider_configuration as provconf
from neutron.tests import base
from neutron.tests.unit import dummy_plugin as dp
from neutron.tests.unit import test_api_v2
from neutron.tests.unit import test_db_plugin
from neutron.tests.unit import test_extensions
from neutron.tests.unit import testlib_api
LOG = logging.getLogger(__name__)
DEFAULT_SERVICE_DEFS = [{'service_class': constants.DUMMY,
'plugin': dp.DUMMY_PLUGIN_NAME}]
_uuid = test_api_v2._uuid
_get_path = test_api_v2._get_path
class ServiceTypeManagerTestCase(base.BaseTestCase):
def setUp(self):
super(ServiceTypeManagerTestCase, self).setUp()
st_db.ServiceTypeManager._instance = None
self.manager = st_db.ServiceTypeManager.get_instance()
self.ctx = context.get_admin_context()
def test_service_provider_driver_not_unique(self):
cfg.CONF.set_override('service_provider',
[constants.LOADBALANCER +
':lbaas:driver'],
'service_providers')
prov = {'service_type': constants.LOADBALANCER,
'name': 'name2',
'driver': 'driver',
'default': False}
self.manager._load_conf()
self.assertRaises(
n_exc.Invalid, self.manager.conf.add_provider, prov)
def test_get_service_providers(self):
cfg.CONF.set_override('service_provider',
[constants.LOADBALANCER +
':lbaas:driver_path',
constants.DUMMY + ':dummy:dummy_dr'],
'service_providers')
ctx = context.get_admin_context()
provconf.parse_service_provider_opt()
self.manager._load_conf()
res = self.manager.get_service_providers(ctx)
self.assertEqual(len(res), 2)
res = self.manager.get_service_providers(
ctx,
filters=dict(service_type=[constants.DUMMY])
)
self.assertEqual(len(res), 1)
res = self.manager.get_service_providers(
ctx,
filters=dict(service_type=[constants.LOADBALANCER])
)
self.assertEqual(len(res), 1)
def test_multiple_default_providers_specified_for_service(self):
cfg.CONF.set_override('service_provider',
[constants.LOADBALANCER +
':lbaas1:driver_path:default',
constants.LOADBALANCER +
':lbaas2:driver_path:default'],
'service_providers')
self.assertRaises(n_exc.Invalid, self.manager._load_conf)
def test_get_default_provider(self):
cfg.CONF.set_override('service_provider',
[constants.LOADBALANCER +
':lbaas1:driver_path:default',
constants.DUMMY +
':lbaas2:driver_path2'],
'service_providers')
self.manager._load_conf()
# can pass None as a context
p = self.manager.get_default_service_provider(None,
constants.LOADBALANCER)
self.assertEqual(p, {'service_type': constants.LOADBALANCER,
'name': 'lbaas1',
'driver': 'driver_path',
'default': True})
self.assertRaises(
provconf.DefaultServiceProviderNotFound,
self.manager.get_default_service_provider,
None, constants.DUMMY
)
def test_add_resource_association(self):
cfg.CONF.set_override('service_provider',
[constants.LOADBALANCER +
':lbaas1:driver_path:default',
constants.DUMMY +
':lbaas2:driver_path2'],
'service_providers')
self.manager._load_conf()
ctx = context.get_admin_context()
self.manager.add_resource_association(ctx,
constants.LOADBALANCER,
'lbaas1', '123-123')
self.assertEqual(ctx.session.
query(st_db.ProviderResourceAssociation).count(),
1)
assoc = ctx.session.query(st_db.ProviderResourceAssociation).one()
ctx.session.delete(assoc)
def test_invalid_resource_association(self):
cfg.CONF.set_override('service_provider',
[constants.LOADBALANCER +
':lbaas1:driver_path:default',
constants.DUMMY +
':lbaas2:driver_path2'],
'service_providers')
self.manager._load_conf()
ctx = context.get_admin_context()
self.assertRaises(provconf.ServiceProviderNotFound,
self.manager.add_resource_association,
ctx, 'BLABLA_svc', 'name', '123-123')
class TestServiceTypeExtensionManager(object):
"""Mock extensions manager."""
def get_resources(self):
return (servicetype.Servicetype.get_resources() +
dp.Dummy.get_resources())
def get_actions(self):
return []
def get_request_extensions(self):
return []
class ServiceTypeExtensionTestCaseBase(testlib_api.WebTestCase):
fmt = 'json'
def setUp(self):
# This is needed because otherwise a failure will occur due to
# nonexisting core_plugin
self.setup_coreplugin(test_db_plugin.DB_PLUGIN_KLASS)
cfg.CONF.set_override('service_plugins',
["%s.%s" % (dp.__name__,
dp.DummyServicePlugin.__name__)])
self.addCleanup(cfg.CONF.reset)
# Ensure existing ExtensionManager is not used
extensions.PluginAwareExtensionManager._instance = None
ext_mgr = TestServiceTypeExtensionManager()
self.ext_mdw = test_extensions.setup_extensions_middleware(ext_mgr)
self.api = webtest.TestApp(self.ext_mdw)
self.resource_name = servicetype.RESOURCE_NAME.replace('-', '_')
super(ServiceTypeExtensionTestCaseBase, self).setUp()
class ServiceTypeExtensionTestCase(ServiceTypeExtensionTestCaseBase):
def setUp(self):
self._patcher = mock.patch(
"neutron.db.servicetype_db.ServiceTypeManager",
autospec=True)
self.mock_mgr = self._patcher.start()
self.mock_mgr.get_instance.return_value = self.mock_mgr.return_value
super(ServiceTypeExtensionTestCase, self).setUp()
def test_service_provider_list(self):
instance = self.mock_mgr.return_value
res = self.api.get(_get_path('service-providers', fmt=self.fmt))
instance.get_service_providers.assert_called_with(mock.ANY,
filters={},
fields=[])
self.assertEqual(res.status_int, webexc.HTTPOk.code)
class ServiceTypeExtensionTestCaseXML(ServiceTypeExtensionTestCase):
fmt = 'xml'
class ServiceTypeManagerExtTestCase(ServiceTypeExtensionTestCaseBase):
"""Tests ServiceTypemanager as a public API."""
def setUp(self):
# Blank out service type manager instance
st_db.ServiceTypeManager._instance = None
cfg.CONF.set_override('service_provider',
[constants.LOADBALANCER +
':lbaas:driver_path',
constants.DUMMY + ':dummy:dummy_dr'],
'service_providers')
self.addCleanup(db_api.clear_db)
super(ServiceTypeManagerExtTestCase, self).setUp()
def _list_service_providers(self):
return self.api.get(_get_path('service-providers', fmt=self.fmt))
def test_list_service_providers(self):
res = self._list_service_providers()
self.assertEqual(res.status_int, webexc.HTTPOk.code)
data = self.deserialize(res)
self.assertIn('service_providers', data)
self.assertEqual(len(data['service_providers']), 2)
class ServiceTypeManagerExtTestCaseXML(ServiceTypeManagerExtTestCase):
fmt = 'xml'
|
|
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from resource_management.core.logger import Logger
from resource_management.libraries.functions.default import default
from utils import get_value_from_jmx
class NAMENODE_STATE:
ACTIVE = "active"
STANDBY = "standby"
UNKNOWN = "unknown"
class NamenodeHAState:
"""
Represents the current state of the Namenode Hosts in High Availability Mode
"""
def __init__(self):
"""
Initializes all fields by querying the Namenode state.
Raises a ValueError if unable to construct the object.
"""
import params
self.name_service = default("/configurations/hdfs-site/dfs.nameservices", None)
if not self.name_service:
raise ValueError("Could not retrieve property dfs.nameservices")
nn_unique_ids_key = "dfs.ha.namenodes." + str(self.name_service)
# List of the nn unique ids
self.nn_unique_ids = default("/configurations/hdfs-site/" + nn_unique_ids_key, None)
if not self.nn_unique_ids:
raise ValueError("Could not retrieve property " + nn_unique_ids_key)
self.nn_unique_ids = self.nn_unique_ids.split(",")
self.nn_unique_ids = [x.strip() for x in self.nn_unique_ids]
policy = default("/configurations/hdfs-site/dfs.http.policy", "HTTP_ONLY")
self.encrypted = policy.upper() == "HTTPS_ONLY"
jmx_uri_fragment = ("https" if self.encrypted else "http") + "://{0}/jmx?qry=Hadoop:service=NameNode,name=NameNodeStatus"
namenode_http_fragment = "dfs.namenode.http-address.{0}.{1}"
namenode_https_fragment = "dfs.namenode.https-address.{0}.{1}"
# Dictionary where the key is the Namenode State (e.g., ACTIVE), and the value is a set of hostnames
self.namenode_state_to_hostnames = {}
# Dictionary from nn unique id name to a tuple of (http address, https address)
self.nn_unique_id_to_addresses = {}
for nn_unique_id in self.nn_unique_ids:
http_key = namenode_http_fragment.format(self.name_service, nn_unique_id)
https_key = namenode_https_fragment.format(self.name_service, nn_unique_id)
http_value = default("/configurations/hdfs-site/" + http_key, None)
https_value = default("/configurations/hdfs-site/" + https_key, None)
actual_value = https_value if self.encrypted else http_value
hostname = actual_value.split(":")[0].strip() if actual_value and ":" in actual_value else None
self.nn_unique_id_to_addresses[nn_unique_id] = (http_value, https_value)
try:
if not hostname:
raise Exception("Could not retrieve hostname from address " + actual_value)
jmx_uri = jmx_uri_fragment.format(actual_value)
state = get_value_from_jmx(jmx_uri, "State")
if not state:
raise Exception("Could not retrieve Namenode state from URL " + jmx_uri)
state = state.lower()
if state not in [NAMENODE_STATE.ACTIVE, NAMENODE_STATE.STANDBY]:
state = NAMENODE_STATE.UNKNOWN
if state in self.namenode_state_to_hostnames:
self.namenode_state_to_hostnames[state].add(hostname)
else:
hostnames = set([hostname, ])
self.namenode_state_to_hostnames[state] = hostnames
except:
Logger.error("Could not get namenode state for " + nn_unique_id)
def __str__(self):
return "Namenode HA State: {\n" + \
("IDs: %s\n" % ", ".join(self.nn_unique_ids)) + \
("Addresses: %s\n" % str(self.nn_unique_id_to_addresses)) + \
("States: %s\n" % str(self.namenode_state_to_hostnames)) + \
("Encrypted: %s\n" % str(self.encrypted)) + \
("Healthy: %s\n" % str(self.is_healthy())) + \
"}"
def is_encrypted(self):
"""
:return: Returns a bool indicating if HTTPS is enabled
"""
return self.encrypted
def get_nn_unique_ids(self):
"""
:return Returns a list of the nn unique ids
"""
return self.nn_unique_ids
def get_nn_unique_id_to_addresses(self):
"""
:return Returns a dictionary where the key is the nn unique id, and the value is a tuple of (http address, https address)
Each address is of the form, hostname:port
"""
return self.nn_unique_id_to_addresses
def get_address_for_nn_id(self, id):
"""
:param id: Namenode ID
:return: Returns the appropriate address (HTTP if no encryption, HTTPS otherwise) for the given namenode id.
"""
if id in self.nn_unique_id_to_addresses:
addresses = self.nn_unique_id_to_addresses[id]
if addresses and len(addresses) == 2:
return addresses[1] if self.encrypted else addresses[0]
return None
def get_address_for_host(self, hostname):
"""
:param hostname: Host name
:return: Returns the appropriate address (HTTP if no encryption, HTTPS otherwise) for the given host.
"""
for id, addresses in self.nn_unique_id_to_addresses.iteritems():
if addresses and len(addresses) == 2:
if ":" in addresses[0]:
nn_hostname = addresses[0].split(":")[0].strip()
if nn_hostname == hostname:
# Found the host
return addresses[1] if self.encrypted else addresses[0]
return None
def get_namenode_state_to_hostnames(self):
"""
:return Return a dictionary where the key is a member of NAMENODE_STATE, and the value is a set of hostnames.
"""
return self.namenode_state_to_hostnames
def get_address(self, namenode_state):
"""
@param namenode_state: Member of NAMENODE_STATE
:return Get the address that corresponds to the first host with the given state
"""
hosts = self.namenode_state_to_hostnames[namenode_state] if namenode_state in self.namenode_state_to_hostnames else []
if hosts and len(hosts) > 0:
hostname = list(hosts)[0]
return self.get_address_for_host(hostname)
return None
def is_active(self, host_name):
"""
:param host_name: Host name
:return: Return True if this is the active NameNode, otherwise, False.
"""
return self._is_in_state(host_name, NAMENODE_STATE.ACTIVE)
def is_standby(self, host_name):
"""
:param host_name: Host name
:return: Return True if this is the standby NameNode, otherwise, False.
"""
return self._is_in_state(host_name, NAMENODE_STATE.STANDBY)
def _is_in_state(self, host_name, state):
"""
:param host_name: Host name
:param state: State to check
:return: Return True if this NameNode is in the specified state, otherwise, False.
"""
mapping = self.get_namenode_state_to_hostnames()
if state in mapping:
hosts_in_state = mapping[state]
if hosts_in_state is not None and len(hosts_in_state) == 1 and next(iter(hosts_in_state)).lower() == host_name.lower():
return True
return False
def is_healthy(self):
"""
:return: Returns a bool indicating if exactly one ACTIVE and one STANDBY host exist.
"""
active_hosts = self.namenode_state_to_hostnames[NAMENODE_STATE.ACTIVE] if NAMENODE_STATE.ACTIVE in self.namenode_state_to_hostnames else []
standby_hosts = self.namenode_state_to_hostnames[NAMENODE_STATE.STANDBY] if NAMENODE_STATE.STANDBY in self.namenode_state_to_hostnames else []
return len(active_hosts) == 1 and len(standby_hosts) == 1
|
|
# General Utilities for Systems Management Ultra Thin Layer
#
# Copyright 2017 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import math
from smtLayer import msgs
fiveGigSize = (1024 * 5)
modId = 'GUT'
def cvtToBlocks(rh, diskSize):
"""
Convert a disk storage value to a number of blocks.
Input:
Request Handle
Size of disk in bytes
Output:
Results structure:
overallRC - Overall return code for the function:
0 - Everything went ok
4 - Input validation error
rc - Return code causing the return. Same as overallRC.
rs - Reason code causing the return.
errno - Errno value causing the return. Always zero.
Converted value in blocks
"""
rh.printSysLog("Enter generalUtils.cvtToBlocks")
blocks = 0
results = {'overallRC': 0, 'rc': 0, 'rs': 0, 'errno': 0}
blocks = diskSize.strip().upper()
lastChar = blocks[-1]
if lastChar == 'G' or lastChar == 'M':
# Convert the bytes to blocks
byteSize = blocks[:-1]
if byteSize == '':
# The size of the disk is not valid.
msg = msgs.msg['0200'][1] % (modId, blocks)
rh.printLn("ES", msg)
results = msgs.msg['0200'][0]
else:
try:
if lastChar == 'M':
blocks = (float(byteSize) * 1024 * 1024) / 512
elif lastChar == 'G':
blocks = (float(byteSize) * 1024 * 1024 * 1024) / 512
blocks = str(int(math.ceil(blocks)))
except Exception:
# Failed to convert to a number of blocks.
msg = msgs.msg['0201'][1] % (modId, byteSize)
rh.printLn("ES", msg)
results = msgs.msg['0201'][0]
elif blocks.strip('1234567890'):
# Size is not an integer size of blocks.
msg = msgs.msg['0202'][1] % (modId, blocks)
rh.printLn("ES", msg)
results = msgs.msg['0202'][0]
rh.printSysLog("Exit generalUtils.cvtToBlocks, rc: " +
str(results['overallRC']))
return results, blocks
def cvtToCyl(rh, diskSize):
"""
Convert a disk storage value to a number of cylinders.
Input:
Request Handle
Size of disk in bytes
Output:
Results structure:
overallRC - Overall return code for the function:
0 - Everything went ok
4 - Input validation error
rc - Return code causing the return. Same as overallRC.
rs - Reason code causing the return.
errno - Errno value causing the return. Always zero.
Converted value in cylinders
"""
rh.printSysLog("Enter generalUtils.cvtToCyl")
cyl = 0
results = {'overallRC': 0, 'rc': 0, 'rs': 0, 'errno': 0}
cyl = diskSize.strip().upper()
lastChar = cyl[-1]
if lastChar == 'G' or lastChar == 'M':
# Convert the bytes to cylinders
byteSize = cyl[:-1]
if byteSize == '':
# The size of the disk is not valid.
msg = msgs.msg['0200'][1] % (modId, lastChar)
rh.printLn("ES", msg)
results = msgs.msg['0200'][0]
else:
try:
if lastChar == 'M':
cyl = (float(byteSize) * 1024 * 1024) / 737280
elif lastChar == 'G':
cyl = (float(byteSize) * 1024 * 1024 * 1024) / 737280
cyl = str(int(math.ceil(cyl)))
except Exception:
# Failed to convert to a number of cylinders.
msg = msgs.msg['0203'][1] % (modId, byteSize)
rh.printLn("ES", msg)
results = msgs.msg['0203'][0]
elif cyl.strip('1234567890'):
# Size is not an integer value.
msg = msgs.msg['0204'][1] % (modId, cyl)
rh.printLn("ES", msg)
results = msgs.msg['0202'][0]
rh.printSysLog("Exit generalUtils.cvtToCyl, rc: " +
str(results['overallRC']))
return results, cyl
def cvtToMag(rh, size):
"""
Convert a size value to a number with a magnitude appended.
Input:
Request Handle
Size bytes
Output:
Converted value with a magnitude
"""
rh.printSysLog("Enter generalUtils.cvtToMag")
mSize = ''
size = size / (1024 * 1024)
if size > (1024 * 5):
# Size is greater than 5G. Using "G" magnitude.
size = size / 1024
mSize = "%.1fG" % size
else:
# Size is less than or equal 5G. Using "M" magnitude.
mSize = "%.1fM" % size
rh.printSysLog("Exit generalUtils.cvtToMag, magSize: " + mSize)
return mSize
def getSizeFromPage(rh, page):
"""
Convert a size value from page to a number with a magnitude appended.
Input:
Request Handle
Size in page
Output:
Converted value with a magnitude
"""
rh.printSysLog("Enter generalUtils.getSizeFromPage")
bSize = float(page) * 4096
mSize = cvtToMag(rh, bSize)
rh.printSysLog("Exit generalUtils.getSizeFromPage, magSize: " + mSize)
return mSize
def parseCmdline(rh, posOpsList, keyOpsList):
"""
Parse the request command input.
Input:
Request Handle
Positional Operands List. This is a dictionary that contains
an array for each subfunction. The array contains a entry
(itself an array) for each positional operand.
That array contains:
- Human readable name of the operand,
- Property in the parms dictionary to hold the value,
- Is it required (True) or optional (False),
- Type of data (1: int, 2: string).
Keyword Operands List. This is a dictionary that contains
an item for each subfunction. The value for the subfunction is a
dictionary that contains a key for each recognized operand.
The value associated with the key is an array that contains
the following:
- the related ReqHandle.parms item that stores the value,
- how many values follow the keyword, and
- the type of data for those values (1: int, 2: string)
Output:
Request Handle updated with parsed input.
Return code - 0: ok, non-zero: error
"""
rh.printSysLog("Enter generalUtils.parseCmdline")
# Handle any positional operands on the line.
if rh.results['overallRC'] == 0 and rh.subfunction in posOpsList:
ops = posOpsList[rh.subfunction]
currOp = 0
# While we have operands on the command line AND
# we have more operands in the positional operand list.
while rh.argPos < rh.totalParms and currOp < len(ops):
key = ops[currOp][1] # key for rh.parms[]
opType = ops[currOp][3] # data type
if opType == 1:
# Handle an integer data type
try:
rh.parms[key] = int(rh.request[rh.argPos])
except ValueError:
# keyword is not an integer
msg = msgs.msg['0001'][1] % (modId, rh.function,
rh.subfunction, (currOp + 1),
ops[currOp][0], rh.request[rh.argPos])
rh.printLn("ES", msg)
rh.updateResults(msgs.msg['0001'][0])
break
else:
rh.parms[key] = rh.request[rh.argPos]
currOp += 1
rh.argPos += 1
if (rh.argPos >= rh.totalParms and currOp < len(ops) and
ops[currOp][2] is True):
# Check for missing required operands.
msg = msgs.msg['0002'][1] % (modId, rh.function,
rh.subfunction, ops[currOp][0], (currOp + 1))
rh.printLn("ES", msg)
rh.updateResults(msgs.msg['0002'][0])
# Handle any keyword operands on the line.
if rh.results['overallRC'] == 0 and rh.subfunction in keyOpsList:
while rh.argPos < rh.totalParms:
if rh.request[rh.argPos] in keyOpsList[rh.subfunction]:
keyword = rh.request[rh.argPos]
rh.argPos += 1
ops = keyOpsList[rh.subfunction]
if keyword in ops:
key = ops[keyword][0]
opCnt = ops[keyword][1]
opType = ops[keyword][2]
if opCnt == 0:
# Keyword has no additional value
rh.parms[key] = True
else:
# Keyword has values following it.
storeIntoArray = False # Assume single word
if opCnt < 0:
storeIntoArray = True
# Property is a list all of the rest of the parms.
opCnt = rh.totalParms - rh.argPos
if opCnt == 0:
# Need at least 1 operand value
opCnt = 1
elif opCnt > 1:
storeIntoArray = True
if opCnt + rh.argPos > rh.totalParms:
# keyword is missing its related value operand
msg = msgs.msg['0003'][1] % (modId, rh.function,
rh.subfunction, keyword)
rh.printLn("ES", msg)
rh.updateResults(msgs.msg['0003'][0])
break
"""
Add the expected value to the property.
Take into account if there are more than 1.
"""
if storeIntoArray:
# Initialize the list.
rh.parms[key] = []
for i in range(0, opCnt):
if opType == 1:
# convert from string to int and save it.
try:
if not storeIntoArray:
rh.parms[key] = (
int(rh.request[rh.argPos]))
else:
rh.parms[key].append(int(
rh.request[rh.argPos]))
except ValueError:
# keyword is not an integer
msg = (msgs.msg['0004'][1] %
(modId, rh.function, rh.subfunction,
keyword, rh.request[rh.argPos]))
rh.printLn("ES", msg)
rh.updateResults(msgs.msg['0004'][0])
break
else:
# Value is a string, save it.
if not storeIntoArray:
rh.parms[key] = rh.request[rh.argPos]
else:
rh.parms[key].append(rh.request[rh.argPos])
rh.argPos += 1
if rh.results['overallRC'] != 0:
# Upper loop had an error break from loops.
break
else:
# keyword is not in the subfunction's keyword list
msg = msgs.msg['0005'][1] % (modId, rh.function,
rh.subfunction, keyword)
rh.printLn("ES", msg)
rh.updateResults(msgs.msg['0005'][0])
break
else:
# Subfunction does not support keywords
msg = (msgs.msg['0006'][1] % (modId, rh.function,
rh.subfunction, rh.request[rh.argPos]))
rh.printLn("ES", msg)
rh.updateResults(msgs.msg['0006'][0])
break
rh.printSysLog("Exit generalUtils.parseCmdLine, rc: " +
str(rh.results['overallRC']))
return rh.results['overallRC']
|
|
#
# Copyright (c) SAS Institute, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Module for parsing package sections of xml files from the repository metadata.
"""
__all__ = ('PackageXmlMixIn', 'PackageCompare', )
import os
from rpath_xmllib import api1 as xmllib
from updatebot.lib import util
from repomd.errors import UnknownElementError, UnknownAttributeError
from repomd.xmlcommon import SlotNode
class PackageCompare(object):
def __str__(self):
epoch = self.epoch
if epoch is None:
epoch = ''
elif not isinstance(epoch, str):
epoch = str(epoch)
return '-'.join([self.name, epoch, self.version, self.release,
self.arch])
def __hash__(self):
# NOTE: We do not hash on epoch, because not all package objects will
# have epoch set. This will cause hash collisions that should be
# handled in __cmp__.
return hash((self.name, self.version, self.release, self.arch))
def __cmp__(self, other):
return util.packageCompareByName(self, other)
class _Package(SlotNode, PackageCompare):
"""
Python representation of package section of xml files from the repository
metadata.
"""
# R0902 - Too many instance attributes
# pylint: disable=R0902
__slots__ = ('name', 'arch', 'epoch', 'version', 'release',
'checksum', 'checksumType', 'summary', 'description',
'fileTimestamp', 'buildTimestamp', 'packageSize',
'installedSize', 'archiveSize', 'location', 'format',
'license', 'vendor', 'group', 'buildhost',
'sourcerpm', 'headerStart', 'headerEnd',
'licenseToConfirm', 'files')
# All attributes are defined in __init__ by iterating over __slots__,
# this confuses pylint.
# W0201 - Attribute $foo defined outside __init__
# pylint: disable=W0201
def addChild(self, child):
"""
Parse children of package element.
"""
# R0912 - Too many branches
# pylint: disable=R0912
# R0915 - Too many statements
# pylint: disable=R0915
# E0203 - Access to member 'files' before its definition line 84
# files is set to None by the superclasses __init__
# pylint: disable=E0203
n = child.getName()
if n == 'name':
self.name = child.finalize()
elif n == 'arch':
self.arch = child.finalize()
elif n == 'version':
self.epoch = child.getAttribute('epoch')
self.version = child.getAttribute('ver')
self.release = child.getAttribute('rel')
elif n == 'checksum':
self.checksum = child.finalize()
self.checksumType = child.getAttribute('type')
elif n == 'summary':
self.summary = child.finalize()
elif n == 'description':
self.description = child.finalize()
elif n == 'packager':
self.packager = child.finalize()
elif n == 'url':
self.url = child.finalize()
elif n == 'time':
self.fileTimestamp = child.getAttribute('file')
self.buildTimestamp = child.getAttribute('build')
elif n == 'size':
self.packageSize = child.getAttribute('package')
self.installedSize = child.getAttribute('installed')
self.archiveSize = child.getAttribute('archive')
elif n == 'location':
self.location = child.getAttribute('href')
elif n == 'file':
if self.files is None:
self.files = []
self.files.append(child.finalize())
elif child.getName() == 'format':
self.format = []
for node in child.iterChildren():
nn = node.getName()
if nn == 'rpm:license':
self.license = node.getText()
elif nn == 'rpm:vendor':
self.vendor = node.getText()
elif nn == 'rpm:group':
self.group = node.getText()
elif nn == 'rpm:buildhost':
self.buildhost = node.getText()
elif nn == 'rpm:sourcerpm':
self.sourcerpm = node.getText()
elif nn == 'rpm:header-range':
self.headerStart = node.getAttribute('start')
self.headerEnd = node.getAttribute('end')
elif nn in ('rpm:provides', 'rpm:requires',
'rpm:obsoletes', 'rpm:recommends',
'rpm:conflicts', 'suse:freshens',
'rpm:enhances', 'rpm:supplements',
'rpm:suggests', ):
self.format.append(node)
elif nn == 'file':
pass
else:
raise UnknownElementError(node)
elif n == 'pkgfiles':
pass
elif n == 'suse:license-to-confirm':
self.licenseToConfirm = child.finalize()
else:
raise UnknownElementError(child)
def __repr__(self):
return os.path.basename(self.location)
def __cmp__(self, other):
pkgcmp = PackageCompare.__cmp__(self, other)
if pkgcmp != 0:
return pkgcmp
# Compare arch before checksum to catch cases of multiple
# arch-specific packages that happen to have same content
# (e.g. SLES xorg-x11-fonts packages).
archcmp = cmp(self.arch, other.arch)
if archcmp != 0:
return archcmp
# Compare checksum only for equality, otherwise sorting will result in
# checksum ordering.
if (self.checksum and other.checksum and
self.checksumType == other.checksumType and
self.checksum == other.checksum):
return 0
# Compare on archiveSize for equality only. This is needed for rpms
# that have identical contents, but may have been rebuilt. Idealy we
# would use file checksums for this, but we don't have the payload
# contents available at this time.
if (self.archiveSize and other.archiveSize and
self.archiveSize == other.archiveSize):
return 0
return cmp(self.location, other.location)
def getNevra(self):
"""
Return the name, epoch, version, release, and arch of the package.
"""
return (self.name, self.epoch, self.version, self.release, self.arch)
def getConaryVersion(self):
"""
Get the conary version of a source package.
"""
assert self.arch == 'src'
filename = os.path.basename(self.location)
nameVerRelease = ".".join(filename.split(".")[:-2])
ver = "_".join(nameVerRelease.split("-")[-2:])
return ver
def getFileName(self):
"""
Returns the expected package file name.
"""
return '%s-%s-%s.%s.rpm' % (self.name, self.version,
self.release, self.arch)
class _RpmEntry(SlotNode):
"""
Parse any element that contains rpm:entry or suse:entry elements.
"""
__slots__ = ()
def addChild(self, child):
"""
Parse rpm:entry and suse:entry nodes.
"""
if child.getName() in ('rpm:entry', 'suse:entry'):
for attr, value in child.iterAttributes():
if attr == 'kind':
child.kind = value
elif attr == 'name':
child.name = value
elif attr == 'epoch':
child.epoch = value
elif attr == 'ver':
child.version = value
elif attr == 'rel':
child.release = value
elif attr == 'flags':
child.flags = value
elif attr == 'pre':
child.pre = value
else:
raise UnknownAttributeError(child, attr)
SlotNode.addChild(self, child)
else:
raise UnknownElementError(child)
class _RpmEntries(SlotNode):
"""
Class to represent all rpm:entry and suse:entry types.
"""
__slots__ = ('kind', 'name', 'epoch', 'version', 'release', 'flags',
'pre', )
class _RpmRequires(_RpmEntry):
"""
Parse rpm:requires children.
"""
__slots__ = ()
class _RpmRecommends(_RpmEntry):
"""
Parse rpm:recommends children.
"""
__slots__ = ()
class _RpmProvides(_RpmEntry):
"""
Parse rpm:provides children.
"""
__slots__ = ()
class _RpmObsoletes(_RpmEntry):
"""
Parse rpm:obsoletes children.
"""
__slots__ = ()
class _RpmConflicts(_RpmEntry):
"""
Parse rpm:conflicts children.
"""
__slots__ = ()
class _RpmEnhances(_RpmEntry):
"""
Parse rpm:enhances children.
"""
__slots__ = ()
class _RpmSupplements(_RpmEntry):
"""
Parse rpm:supplements children.
"""
__slots__ = ()
class _RpmSuggests(_RpmEntry):
"""
Parse rpm:suggests children.
"""
__slots__ = ()
class _SuseFreshens(_RpmEntry):
"""
Parse suse:freshens children.
"""
__slots__ = ()
class PackageXmlMixIn(object):
"""
Handle registering all types for parsing package elements.
"""
def _registerTypes(self):
"""
Setup databinder to parse xml.
"""
self._databinder.registerType(_Package, name='package')
self._databinder.registerType(xmllib.StringNode, name='name')
self._databinder.registerType(xmllib.StringNode, name='arch')
self._databinder.registerType(xmllib.StringNode, name='checksum')
self._databinder.registerType(xmllib.StringNode, name='summary')
self._databinder.registerType(xmllib.StringNode, name='description')
self._databinder.registerType(xmllib.StringNode, name='url')
self._databinder.registerType(_RpmEntries, name='entry',
namespace='rpm')
self._databinder.registerType(_RpmEntries, name='entry',
namespace='suse')
self._databinder.registerType(_RpmRequires, name='requires',
namespace='rpm')
self._databinder.registerType(_RpmRecommends, name='recommends',
namespace='rpm')
self._databinder.registerType(_RpmProvides, name='provides',
namespace='rpm')
self._databinder.registerType(_RpmObsoletes, name='obsoletes',
namespace='rpm')
self._databinder.registerType(_RpmConflicts, name='conflicts',
namespace='rpm')
self._databinder.registerType(_RpmEnhances, name='enhances',
namespace='rpm')
self._databinder.registerType(_RpmSupplements, name='supplements',
namespace='rpm')
self._databinder.registerType(_RpmSuggests, name='suggests',
namespace='rpm')
self._databinder.registerType(_SuseFreshens, name='freshens',
namespace='suse')
self._databinder.registerType(xmllib.StringNode,
name='license-to-confirm',
namespace='suse')
|
|
# Parts are courtesey of `Ben Hogdson <http://benhodgson.com/>`_.
# Python imports
import logging
import re
# Project imports
from classified.probe.base import Probe, isdigit
decimal_decoder = lambda s: int(s, 10)
decimal_encoder = lambda i: str(i)
def luhn_sum_mod_base(string, base=10, decoder=decimal_decoder):
# Adapted from http://en.wikipedia.org/wiki/Luhn_algorithm
digits = list(map(decoder, string))
return (sum(digits[::-2]) +
sum([sum(divmod(2*d, base)) for d in digits[-2::-2]])) % base
def generate(string, base=10, encoder=decimal_encoder,
decoder=decimal_decoder):
'''
Calculates the Luhn mod N check character for the given input string. This
character should be appended to the input string to produce a valid Luhn
mod N string in the given base.
>>> value = '4205092350249'
>>> generate(value)
'1'
When operating in a base other than decimal, encoder and decoder callables
should be supplied. The encoder should take a single argument, an integer,
and return the character corresponding to that integer in the operating
base. Conversely, the decoder should take a string containing a single
character and return its integer value in the operating base. Note that
the mapping between values and characters defined by the encoder and
decoder should be one-to-one.
For example, when working in hexadecimal:
>>> hex_alphabet = '0123456789abcdef'
>>> hex_encoder = lambda i: hex_alphabet[i]
>>> hex_decoder = lambda s: hex_alphabet.index(s)
>>> value = 'a8b56f'
>>> generate(value, base=16, encoder=hex_encoder, decoder=hex_decoder)
'b'
>>> verify('a8b56fb', base=16, decoder=hex_decoder)
True
>>> verify('a8b56fc', base=16, decoder=hex_decoder)
False
'''
d = luhn_sum_mod_base(string+encoder(0), base=base, decoder=decoder)
if d != 0:
d = base - d
return encoder(d)
def verify(string, base=10, decoder=decimal_decoder):
'''
Verifies that the given string is a valid Luhn mod N string.
>>> verify('5105105105105100') # MasterCard test number
True
When operating in a base other than decimal, encoder and decoder callables
should be supplied. The encoder should take a single argument, an integer,
and return the character corresponding to that integer in the operating
base. Conversely, the decoder should take a string containing a single
character and return its integer value in the operating base. Note that
the mapping between values and characters defined by the encoder and
decoder should be one-to-one.
For example, 'b' is the correct check character for the hexadecimal string
'a8b56f':
>>> hex_decoder = lambda s: '0123456789abcdef'.index(s)
>>> verify('a8b56fb', base=16, decoder=hex_decoder)
True
Any other check digit (in this example: 'c'), will result in a failed
verification:
>>> verify('a8b56fc', base=16, decoder=hex_decoder)
False
'''
return luhn_sum_mod_base(string, base=base, decoder=decoder) == 0
def mask(card_number, keep=4):
'''
Mask a card number so it's suitable for printing.
'''
keep *= -1
return '*' * len(card_number[:keep]) + card_number[keep:]
class PAN(Probe):
'''
Scan for Primary Account Number (PAN) data in (text) files.
'''
target = ('text/*',)
format = '{filename}[{line:d}]: {company} {card_number_masked}'
ignore = list('\x00-:\r\n')
_check = {
'American Express': dict(
length = [15],
prefix = re.compile(r'^3[47]'),
),
'Diners Club EnRoute': dict(
length = [15],
prefix = re.compile(r'^(?:2014|2149)'),
),
'Diners Club Carte Blanche': dict(
length = [14],
prefix = re.compile(r'^30[1-5]'),
),
'Diners Club International': dict(
length = [14],
prefix = re.compile(r'^36'),
),
'Diners Club America': dict(
length = [14],
prefix = re.compile(r'^5[45]'),
),
'Discover': dict(
length = [16],
prefix = re.compile(r'^6011'),
),
'InstaPayment': dict(
length = [16],
prefix = re.compile(r'^63[7-9]'),
),
'JCB': dict(
length = [16],
prefix = re.compile(r'^(?:3088|3096|3112|3158|3337|352[89]|35[3-7][0-9]|358[0-9])'),
),
'Laser': dict(
length = list(range(12, 20)),
prefix = re.compile(r'^(?:6304|6706|6771|6709)'),
),
'Maestro': dict(
length = list(range(12, 20)),
prefix = re.compile(r'^(?:5018|5020|5038|5893|6304|6759|676[1-3]|0604)'),
),
'MasterCard': dict(
length = [16],
prefix = re.compile(r'^5[1-5]'),
),
'VISA': dict(
length = [13, 16],
prefix = re.compile(r'^4'),
),
}
def __init__(self, config, *args, **kwargs):
super(PAN, self).__init__(config, *args, **kwargs)
# Also keep track of per prefix size checks
self._check_size = {}
for company, checks in self._check.items():
for length in checks['length']:
if length not in self._check_size:
self._check_size[length] = {}
self._check_size[length][company] = checks['prefix']
# Ignores, if configured
if self.config.has_option('probe:pan', 'ignore'):
self.ignore = [chr(int(char, 16)) for char in self.config.getlist('probe:pan', 'ignore')]
def luhn_check(self, card_number):
# Do the Luhn check
if verify(card_number):
return self.process_prefix(card_number)
def process_prefix(self, card_number):
length = len(card_number)
if length in self._check_size:
for company, prefix in self._check_size[length].items():
if prefix.match(card_number):
return company
def probe(self, item):
# Keep track of consecutive ranges of numbers, stripping out potential
# padding characters
digits = []
digits_min = min(self._check_size)
digits_max = max(self._check_size)
line = 0
hits = 0
try:
limit = self.config.getint('probe:pan', 'limit')
except self.config.Error:
limit = 0
prev = chr(0)
for text in item.open():
line += 1
for char in text:
# If we have a digit, append it to the digits list
if isdigit(char):
digits.append(int(char))
if len(digits) >= digits_max:
digits = digits[1:]
if len(digits) >= digits_min:
for x in range(digits_min, digits_max + 1):
card_number = ''.join(map(str, digits[:x]))
card_company = self.luhn_check(card_number)
if card_company is not None:
self.record(item,
raw=text,
line=line,
card_number=card_number,
card_number_masked=mask(card_number),
company=card_company,
)
# Rotate digits
digits = digits[x:]
# Keep track of hits
hits += 1
if limit and hits >= limit:
logging.debug('pan probe hit limit '
'of %d' % limit)
return
break
# We ignore dashes, new lines and carriage returns
elif char in self.ignore:
# .. if we have two successive ignored characters, reset
# the digits array
if prev in self.ignore:
digits = []
# Otherwise we'll reset the buffer
else:
digits = []
# Keep track of the previous character
prev = char
|
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Keras core layers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.eager import context
from tensorflow.python.framework import constant_op
from tensorflow.python.keras._impl import keras
from tensorflow.python.keras._impl.keras import testing_utils
from tensorflow.python.ops import init_ops
from tensorflow.python.platform import test
class CoreLayersTest(test.TestCase):
def test_masking(self):
with self.test_session():
testing_utils.layer_test(
keras.layers.Masking, kwargs={}, input_shape=(3, 2, 3))
def test_dropout(self):
with self.test_session():
testing_utils.layer_test(
keras.layers.Dropout, kwargs={'rate': 0.5}, input_shape=(3, 2))
with self.test_session():
testing_utils.layer_test(
keras.layers.Dropout,
kwargs={'rate': 0.5,
'noise_shape': [3, 1]},
input_shape=(3, 2))
# https://github.com/tensorflow/tensorflow/issues/14819
with self.test_session():
dropout = keras.layers.Dropout(0.5)
self.assertEqual(True, dropout.supports_masking)
with self.test_session():
testing_utils.layer_test(
keras.layers.SpatialDropout1D,
kwargs={'rate': 0.5},
input_shape=(2, 3, 4))
with self.test_session():
testing_utils.layer_test(
keras.layers.SpatialDropout2D,
kwargs={'rate': 0.5},
input_shape=(2, 3, 4, 5))
with self.test_session():
testing_utils.layer_test(
keras.layers.SpatialDropout2D,
kwargs={'rate': 0.5, 'data_format': 'channels_first'},
input_shape=(2, 3, 4, 5))
with self.test_session():
testing_utils.layer_test(
keras.layers.SpatialDropout3D,
kwargs={'rate': 0.5},
input_shape=(2, 3, 4, 4, 5))
with self.test_session():
testing_utils.layer_test(
keras.layers.SpatialDropout3D,
kwargs={'rate': 0.5, 'data_format': 'channels_first'},
input_shape=(2, 3, 4, 4, 5))
def test_activation(self):
# with string argument
with self.test_session():
testing_utils.layer_test(
keras.layers.Activation,
kwargs={'activation': 'relu'},
input_shape=(3, 2))
# with function argument
with self.test_session():
testing_utils.layer_test(
keras.layers.Activation,
kwargs={'activation': keras.backend.relu},
input_shape=(3, 2))
def test_reshape(self):
with self.test_session():
testing_utils.layer_test(
keras.layers.Reshape,
kwargs={'target_shape': (8, 1)},
input_shape=(3, 2, 4))
with self.test_session():
testing_utils.layer_test(
keras.layers.Reshape,
kwargs={'target_shape': (-1, 1)},
input_shape=(3, 2, 4))
with self.test_session():
testing_utils.layer_test(
keras.layers.Reshape,
kwargs={'target_shape': (1, -1)},
input_shape=(3, 2, 4))
with self.test_session():
testing_utils.layer_test(
keras.layers.Reshape,
kwargs={'target_shape': (-1, 1)},
input_shape=(None, None, 2))
def test_permute(self):
with self.test_session():
testing_utils.layer_test(
keras.layers.Permute, kwargs={'dims': (2, 1)}, input_shape=(3, 2, 4))
def test_flatten(self):
with self.test_session():
testing_utils.layer_test(
keras.layers.Flatten, kwargs={}, input_shape=(3, 2, 4))
def test_repeat_vector(self):
with self.test_session():
testing_utils.layer_test(
keras.layers.RepeatVector, kwargs={'n': 3}, input_shape=(3, 2))
def test_lambda(self):
with self.test_session():
testing_utils.layer_test(
keras.layers.Lambda,
kwargs={'function': lambda x: x + 1},
input_shape=(3, 2))
with self.test_session():
testing_utils.layer_test(
keras.layers.Lambda,
kwargs={
'function': lambda x, a, b: x * a + b,
'arguments': {
'a': 0.6,
'b': 0.4
}
},
input_shape=(3, 2))
with self.test_session():
# test serialization with function
def f(x):
return x + 1
ld = keras.layers.Lambda(f)
config = ld.get_config()
ld = keras.layers.deserialize({
'class_name': 'Lambda',
'config': config
})
# test with lambda
ld = keras.layers.Lambda(
lambda x: keras.backend.concatenate([keras.backend.square(x), x]))
config = ld.get_config()
ld = keras.layers.Lambda.from_config(config)
def test_dense(self):
with self.test_session():
testing_utils.layer_test(
keras.layers.Dense, kwargs={'units': 3}, input_shape=(3, 2))
with self.test_session():
testing_utils.layer_test(
keras.layers.Dense, kwargs={'units': 3}, input_shape=(3, 4, 2))
with self.test_session():
testing_utils.layer_test(
keras.layers.Dense, kwargs={'units': 3}, input_shape=(None, None, 2))
with self.test_session():
testing_utils.layer_test(
keras.layers.Dense, kwargs={'units': 3}, input_shape=(3, 4, 5, 2))
# Test regularization
with self.test_session():
layer = keras.layers.Dense(
3,
kernel_regularizer=keras.regularizers.l1(0.01),
bias_regularizer='l1',
activity_regularizer='l2',
name='dense_reg')
layer(keras.backend.variable(np.ones((2, 4))))
self.assertEqual(3, len(layer.losses))
# Test constraints
with self.test_session():
k_constraint = keras.constraints.max_norm(0.01)
b_constraint = keras.constraints.max_norm(0.01)
layer = keras.layers.Dense(
3, kernel_constraint=k_constraint, bias_constraint=b_constraint)
layer(keras.backend.variable(np.ones((2, 4))))
self.assertEqual(layer.kernel.constraint, k_constraint)
self.assertEqual(layer.bias.constraint, b_constraint)
def test_eager_dense(self):
with context.eager_mode():
l = keras.layers.Dense(units=3,
kernel_initializer=init_ops.zeros_initializer())
self.assertAllEqual(l(constant_op.constant([[1.0]])), [[0., 0., 0.]])
def test_activity_regularization(self):
with self.test_session():
layer = keras.layers.ActivityRegularization(l1=0.1)
layer(keras.backend.variable(np.ones((2, 4))))
self.assertEqual(1, len(layer.losses))
_ = layer.get_config()
if __name__ == '__main__':
test.main()
|
|
import sys
sys.path.append('C:/Users/dmccloskey-sbrg/Documents/GitHub/SBaaS_base')
from SBaaS_base.postgresql_settings import postgresql_settings
from SBaaS_base.postgresql_orm import postgresql_orm
# read in the settings file
filename = 'C:/Users/dmccloskey-sbrg/Google Drive/SBaaS_settings/settings_metabolomics.ini';
pg_settings = postgresql_settings(filename);
# connect to the database from the settings file
pg_orm = postgresql_orm();
pg_orm.set_sessionFromSettings(pg_settings.database_settings);
session = pg_orm.get_session();
engine = pg_orm.get_engine();
# your app...
# SBaaS paths:
sys.path.append(pg_settings.datadir_settings['github']+'/SBaaS_base')
sys.path.append(pg_settings.datadir_settings['github']+'/SBaaS_LIMS')
sys.path.append(pg_settings.datadir_settings['github']+'/SBaaS_quantification')
sys.path.append(pg_settings.datadir_settings['github']+'/SBaaS_physiology')
sys.path.append(pg_settings.datadir_settings['github']+'/SBaaS_MFA')
sys.path.append(pg_settings.datadir_settings['github']+'/SBaaS_visualization')
sys.path.append(pg_settings.datadir_settings['github']+'/SBaaS_models')
sys.path.append(pg_settings.datadir_settings['github']+'/SBaaS_thermodynamics')
sys.path.append(pg_settings.datadir_settings['github']+'/SBaaS_COBRA')
# SBaaS dependencies paths:
sys.path.append(pg_settings.datadir_settings['github']+'/io_utilities')
sys.path.append(pg_settings.datadir_settings['github']+'/quantification_analysis')
sys.path.append(pg_settings.datadir_settings['github']+'/matplotlib_utilities')
sys.path.append(pg_settings.datadir_settings['github']+'/thermodynamics')
sys.path.append(pg_settings.datadir_settings['github']+'/sampling')
sys.path.append(pg_settings.datadir_settings['github']+'/component-contribution')
sys.path.append(pg_settings.datadir_settings['github']+'/molmass')
sys.path.append(pg_settings.datadir_settings['github']+'/python_statistics')
sys.path.append(pg_settings.datadir_settings['github']+'/r_statistics')
sys.path.append(pg_settings.datadir_settings['github']+'/listDict')
sys.path.append(pg_settings.datadir_settings['github']+'/ddt_python')
#make the measuredData table
from SBaaS_thermodynamics.stage03_quantification_measuredData_execute import stage03_quantification_measuredData_execute
exmeasuredData01 = stage03_quantification_measuredData_execute(session,engine,pg_settings.datadir_settings)
exmeasuredData01.initialize_dataStage03_quantification_measuredData();
exmeasuredData01.initialize_supportedTables();
exmeasuredData01.initialize_tables()
#make the COBRA table
from SBaaS_models.models_COBRA_execute import models_COBRA_execute
exCOBRA01 = models_COBRA_execute(session,engine,pg_settings.datadir_settings);
exCOBRA01.initialize_supportedTables();
exCOBRA01.initialize_tables()
##load '150526_iDM2015'
#exCOBRA01.import_dataStage02PhysiologyModel_json(
# model_id_I='151026_iDM2015_irreversible',
# date_I='2015-10-26 00:00:00',
# model_json=pg_settings.datadir_settings['workspace_data']+ '/models/150526_iDM2015.json'
# );
#pre-load the models
#thermomodels = exCOBRA01.get_models(model_ids_I=["iJO1366"]);
thermomodels = exCOBRA01.get_models(model_ids_I=["iJO1366_ALEWt_irreversible"]);
#make the measuredData table
from SBaaS_thermodynamics.stage03_quantification_measuredData_execute import stage03_quantification_measuredData_execute
exmeasuredData01 = stage03_quantification_measuredData_execute(session,engine,pg_settings.datadir_settings)
exmeasuredData01.initialize_supportedTables();
exmeasuredData01.initialize_tables()
##reset previous experimental data imports
#exmeasuredData01.reset_dataStage03_quantification_metabolomicsData('IndustrialStrains03');
##transfer measured metabolomics data from data_stage01_quantification_averagesMIGeo
#exmeasuredData01.execute_makeMetabolomicsData_intracellular('IndustrialStrains03');
##import exometabolomic information (i.e., media)
#exmeasuredData01.import_dataStage03QuantificationMetabolomicsData_add(
# pg_settings.datadir_settings['workspace_data']+'/_input/170930_data_stage03_quantification_metabolomicsData_glcM901.csv');
#make the otherData table
from SBaaS_thermodynamics.stage03_quantification_otherData_execute import stage03_quantification_otherData_execute
exotherData01 = stage03_quantification_otherData_execute(session,engine,pg_settings.datadir_settings)
exotherData01.initialize_supportedTables();
exotherData01.initialize_tables()
##reset previous experimental data imports
#exotherData01.reset_dataStage03_quantification_otherData('IndustrialStrains03');
## import the pH, ionic strength, and temperature for the simulation
#exotherData01.import_dataStage03QuantificationOtherData_add(
# pg_settings.datadir_settings['workspace_data']+'/_input/170930_data_stage03_quantification_otherData01.csv');
#make the simulatedData table
from SBaaS_thermodynamics.stage03_quantification_simulatedData_execute import stage03_quantification_simulatedData_execute
exsimData01 = stage03_quantification_simulatedData_execute(session,engine,pg_settings.datadir_settings)
exsimData01.initialize_supportedTables();
exsimData01.initialize_tables()
##reset previous experiments
#exsimData01.reset_dataStage03_quantification_simulatedData('IndustrialStrains03')
## perform FVA and single reaction deletion simulations
#exsimData01.execute_makeSimulatedData('IndustrialStrains03',models_I=thermomodels)
#make the dG_f table
from SBaaS_thermodynamics.stage03_quantification_dG_f_execute import stage03_quantification_dG_f_execute
exdGf01 = stage03_quantification_dG_f_execute(session,engine,pg_settings.datadir_settings)
exdGf01.initialize_supportedTables();
exdGf01.initialize_tables()
##reset previous dG_f adjustments
#exdGf01.reset_dataStage03_quantification_dG_f('IndustrialStrains03');
## adjust dG0 compound formation energies to the in vivo dG compound formation energies
## i.e, to the specified pH, ionic strength and temperature
#exdGf01.execute_adjust_dG_f('IndustrialStrains03',models_I=thermomodels);
#make the dG_r table
from SBaaS_thermodynamics.stage03_quantification_dG_r_execute import stage03_quantification_dG_r_execute
exdGr01 = stage03_quantification_dG_r_execute(session,engine,pg_settings.datadir_settings)
exdGr01.initialize_supportedTables();
exdGr01.initialize_tables()
## reset previous analyses
#exdGr01.reset_dataStage03_quantification_dG_r_all('IndustrialStrains03');
## calculate the in vivo dG reaction energies from adjusted dG_f and metabolomics values
## 1. dG0_r values are first calculated from the dG_f values
## 2. dG_r values are calculated from the dG0_r values and measured data
## 3. A thermodynamic consistency check is performed based on
## FVA, SRA, and dG_r values
#exdGr01.execute_calculate_dG_r('IndustrialStrains03',models_I=thermomodels);
#exdGr01.reset_dataStage03_quantification_dG_r_comparison(
# analysis_id_I='IndustrialStrains03_iJO1366_ALEWt_irreversible'
# );
#perform a thermodynamic comparison (pairwise b/w all strains)
#exdGr01.execute_compare_dG_r(
# analysis_id_I='IndustrialStrains03_iJO1366_ALEWt_irreversible',
# simulation_id_base_I='IndustrialStrains03_iJO1366_ALEWt_irreversible_EColi_MG1655_0',
# simulation_ids_I=[],
# models_I=thermomodels,
# measured_concentration_coverage_criteria_I=0.5,
# measured_dG_f_coverage_criteria_I=0.99)
#exdGr01.execute_compare_dG_r(
# analysis_id_I='IndustrialStrains03_iJO1366_ALEWt_irreversible',
# simulation_id_base_I='IndustrialStrains03_iJO1366_ALEWt_irreversible_EColi_BL21_0',
# simulation_ids_I=[],
# models_I=thermomodels,
# measured_concentration_coverage_criteria_I=0.5,
# measured_dG_f_coverage_criteria_I=0.99)
#exdGr01.execute_compare_dG_r(
# analysis_id_I='IndustrialStrains03_iJO1366_ALEWt_irreversible',
# simulation_id_base_I='IndustrialStrains03_iJO1366_ALEWt_irreversible_EColi_C_0',
# simulation_ids_I=[],
# models_I=thermomodels,
# measured_concentration_coverage_criteria_I=0.5,
# measured_dG_f_coverage_criteria_I=0.99)
#exdGr01.execute_compare_dG_r(
# analysis_id_I='IndustrialStrains03_iJO1366_ALEWt_irreversible',
# simulation_id_base_I='IndustrialStrains03_iJO1366_ALEWt_irreversible_EColi_Crooks_0',
# simulation_ids_I=[],
# models_I=thermomodels,
# measured_concentration_coverage_criteria_I=0.5,
# measured_dG_f_coverage_criteria_I=0.99)
#exdGr01.execute_compare_dG_r(
# analysis_id_I='IndustrialStrains03_iJO1366_ALEWt_irreversible',
# simulation_id_base_I='IndustrialStrains03_iJO1366_ALEWt_irreversible_EColi_DH5a_0',
# simulation_ids_I=[],
# models_I=thermomodels,
# measured_concentration_coverage_criteria_I=0.5,
# measured_dG_f_coverage_criteria_I=0.99)
#exdGr01.execute_compare_dG_r(
# analysis_id_I='IndustrialStrains03_iJO1366_ALEWt_irreversible',
# simulation_id_base_I='IndustrialStrains03_iJO1366_ALEWt_irreversible_EColi_W_0',
# simulation_ids_I=[],
# models_I=thermomodels,
# measured_concentration_coverage_criteria_I=0.5,
# measured_dG_f_coverage_criteria_I=0.99)
#exdGr01.execute_compare_dG_r(
# analysis_id_I='IndustrialStrains03_iJO1366_ALEWt_irreversible',
# simulation_id_base_I='IndustrialStrains03_iJO1366_ALEWt_irreversible_EColi_W3110_0',
# simulation_ids_I=[],
# models_I=thermomodels,
# measured_concentration_coverage_criteria_I=0.5,
# measured_dG_f_coverage_criteria_I=0.99)
#make the dG_r table
from SBaaS_thermodynamics.stage03_quantification_tfba_execute import stage03_quantification_tfba_execute
tfba01 = stage03_quantification_tfba_execute(session,engine,pg_settings.datadir_settings)
tfba01.initialize_supportedTables();
tfba01.initialize_tables()
simulation_ids = ["IndustrialStrains03_iJO1366_ALEWt_irreversible_EColi_BL21_0",
"IndustrialStrains03_iJO1366_ALEWt_irreversible_EColi_C_0",
"IndustrialStrains03_iJO1366_ALEWt_irreversible_EColi_Crooks_0",
"IndustrialStrains03_iJO1366_ALEWt_irreversible_EColi_DH5a_0",
"IndustrialStrains03_iJO1366_ALEWt_irreversible_EColi_MG1655_0",
"IndustrialStrains03_iJO1366_ALEWt_irreversible_EColi_W_0",
"IndustrialStrains03_iJO1366_ALEWt_irreversible_EColi_W3110_0",
"IndustrialStrains03_iJO1366_EColi_BL21_0",
"IndustrialStrains03_iJO1366_EColi_C_0",
"IndustrialStrains03_iJO1366_EColi_Crooks_0",
"IndustrialStrains03_iJO1366_EColi_DH5a_0",
"IndustrialStrains03_iJO1366_EColi_MG1655_0",
"IndustrialStrains03_iJO1366_EColi_W_0",
"IndustrialStrains03_iJO1366_EColi_W3110_0"]
for simulation_id in simulation_ids:
print("Running simulation_id: " + simulation_id)
tfba01.execute_tfva(simulation_id,
thermomodels,
data_dir_I = '',rxn_ids_I=[],
inconsistent_dG_f_I=[],
inconsistent_concentrations_I=[],
inconsistent_tcc_I=[],
measured_concentration_coverage_criteria_I=0.5,
measured_dG_f_coverage_criteria_I=0.99,
solver_I='glpk')
|
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from django.db import models, migrations
import datetime
import tests.models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
# ('contenttypes', '0002_remove_content_type_name'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='All',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('tag', models.IntegerField(null=True)),
],
),
migrations.CreateModel(
name='Brand',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
],
),
migrations.CreateModel(
name='BrandT',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
],
),
migrations.CreateModel(
name='CacheOnSaveModel',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=32)),
],
),
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=128)),
],
),
migrations.CreateModel(
name='Contained',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=30)),
],
),
migrations.CreateModel(
name='DbAgnostic',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
],
),
migrations.CreateModel(
name='DbBinded',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
],
),
migrations.CreateModel(
name='Extra',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('tag', models.IntegerField(unique=True, db_column='custom_column_name')),
],
),
migrations.CreateModel(
name='GenericContainer',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_id', models.PositiveIntegerField()),
('name', models.CharField(max_length=30)),
('content_type', models.ForeignKey(to='contenttypes.ContentType')),
],
),
migrations.CreateModel(
name='Label',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('text', models.CharField(default='', max_length=127, blank=True)),
],
),
migrations.CreateModel(
name='Labeling',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('tag', models.IntegerField()),
('brand', models.ForeignKey(to='tests.BrandT')),
],
),
migrations.CreateModel(
name='LabelT',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('text', models.CharField(default='', max_length=127, blank=True)),
],
),
migrations.CreateModel(
name='Local',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('tag', models.IntegerField(null=True)),
],
),
migrations.CreateModel(
name='Media',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=128)),
],
),
migrations.CreateModel(
name='Point',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('x', models.DecimalField(default=0.0, max_digits=8, decimal_places=6, blank=True)),
],
),
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=128)),
('visible', models.BooleanField(default=True)),
('category', models.ForeignKey(related_name='posts', to='tests.Category')),
],
),
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=32)),
],
),
migrations.CreateModel(
name='ProductReview',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('status', models.IntegerField()),
('product', models.ForeignKey(related_name='reviews', to='tests.Product', null=True)),
],
),
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('tag', models.IntegerField()),
('user', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Video',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=128)),
],
),
migrations.CreateModel(
name='Weird',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('date_field', models.DateField(default=datetime.date(2000, 1, 1))),
('datetime_field', models.DateTimeField(default=datetime.datetime(2000, 1, 1, 10, 10))),
('time_field', models.TimeField(default=datetime.time(10, 10))),
('list_field', tests.models.IntegerArrayField(default=list)),
('custom_field', tests.models.CustomField(default=tests.models.custom_value_default)),
('binary_field', models.BinaryField()),
],
),
migrations.CreateModel(
name='Movie',
fields=[
('media_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='tests.Media')),
('year', models.IntegerField()),
],
bases=('tests.media',),
),
migrations.CreateModel(
name='PremiumBrand',
fields=[
('brand_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='tests.Brand')),
('extra', models.CharField(default='', max_length=127, blank=True)),
],
bases=('tests.brand',),
),
migrations.AddField(
model_name='labeling',
name='label',
field=models.ForeignKey(to='tests.LabelT'),
),
migrations.AddField(
model_name='extra',
name='post',
field=models.OneToOneField(to='tests.Post'),
),
migrations.AddField(
model_name='extra',
name='to_tag',
field=models.ForeignKey(to='tests.Extra', to_field='tag', null=True),
),
migrations.AddField(
model_name='brandt',
name='labels',
field=models.ManyToManyField(related_name='brands', through='tests.Labeling', to='tests.LabelT'),
),
migrations.AddField(
model_name='brand',
name='labels',
field=models.ManyToManyField(related_name='brands', to='tests.Label'),
),
migrations.CreateModel(
name='VideoProxy',
fields=[
],
options={
'proxy': True,
},
bases=('tests.video',),
),
]
from funcy import suppress
with suppress(ImportError):
import django.contrib.postgres.fields
operations.append(
migrations.CreateModel(
name='TaggedPost',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=200)),
('tags', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(), size=None)),
],
)
)
if os.environ.get('CACHEOPS_DB') == 'postgis':
import django.contrib.gis.db.models.fields
operations.append(
migrations.CreateModel(
name='Geometry',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('point', django.contrib.gis.db.models.fields.PointField(blank=True, default=None, dim=3, geography=True, null=True, srid=4326)),
],
)
)
|
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import datetime
import unittest
from unittest import mock
from unittest.mock import ANY, patch
import pytest
from airflow.exceptions import AirflowException
from airflow.models.dag import DAG
from airflow.providers.google.cloud.utils import mlengine_operator_utils
from airflow.version import version
DEFAULT_DATE = datetime.datetime(2017, 6, 6)
TEST_VERSION = f"v{version.replace('.', '-').replace('+', '-')}"
class TestCreateEvaluateOps(unittest.TestCase):
INPUT_MISSING_ORIGIN = {
'dataFormat': 'TEXT',
'inputPaths': ['gs://legal-bucket/fake-input-path/*'],
'outputPath': 'gs://legal-bucket/fake-output-path',
'region': 'us-east1',
'versionName': 'projects/test-project/models/test_model/versions/test_version',
}
SUCCESS_MESSAGE_MISSING_INPUT = {
'jobId': 'eval_test_prediction',
'predictionOutput': {
'outputPath': 'gs://fake-output-path',
'predictionCount': 5000,
'errorCount': 0,
'nodeHours': 2.78,
},
'state': 'SUCCEEDED',
}
def setUp(self):
super().setUp()
self.dag = DAG(
'test_dag',
default_args={
'owner': 'airflow',
'start_date': DEFAULT_DATE,
'end_date': DEFAULT_DATE,
'project_id': 'test-project',
'region': 'us-east1',
'model_name': 'test_model',
'version_name': 'test_version',
},
schedule_interval='@daily',
)
self.metric_fn = lambda x: (0.1,)
self.metric_fn_encoded = mlengine_operator_utils.base64.b64encode(
mlengine_operator_utils.dill.dumps(self.metric_fn, recurse=True)
).decode()
def test_successful_run(self):
input_with_model = self.INPUT_MISSING_ORIGIN.copy()
pred, summary, validate = mlengine_operator_utils.create_evaluate_ops(
task_prefix='eval-test',
batch_prediction_job_id='eval-test-prediction',
data_format=input_with_model['dataFormat'],
input_paths=input_with_model['inputPaths'],
prediction_path=input_with_model['outputPath'],
metric_fn_and_keys=(self.metric_fn, ['err']),
validate_fn=(lambda x: f"err={x['err']:.1f}"),
dag=self.dag,
py_interpreter="python3",
)
with patch('airflow.providers.google.cloud.operators.mlengine.MLEngineHook') as mock_mlengine_hook:
success_message = self.SUCCESS_MESSAGE_MISSING_INPUT.copy()
success_message['predictionInput'] = input_with_model
hook_instance = mock_mlengine_hook.return_value
hook_instance.create_job.return_value = success_message
result = pred.execute(None)
mock_mlengine_hook.assert_called_once_with(
'google_cloud_default',
None,
impersonation_chain=None,
)
hook_instance.create_job.assert_called_once_with(
project_id='test-project',
job={
'jobId': 'eval_test_prediction',
'predictionInput': input_with_model,
},
use_existing_job_fn=ANY,
)
assert success_message['predictionOutput'] == result
with patch(
'airflow.providers.google.cloud.operators.dataflow.DataflowHook'
) as mock_dataflow_hook, patch(
'airflow.providers.google.cloud.operators.dataflow.BeamHook'
) as mock_beam_hook:
dataflow_hook_instance = mock_dataflow_hook.return_value
dataflow_hook_instance.start_python_dataflow.return_value = None
beam_hook_instance = mock_beam_hook.return_value
summary.execute(None)
mock_dataflow_hook.assert_called_once_with(
gcp_conn_id='google_cloud_default',
delegate_to=None,
poll_sleep=10,
drain_pipeline=False,
cancel_timeout=600,
wait_until_finished=None,
impersonation_chain=None,
)
mock_beam_hook.assert_called_once_with(runner="DataflowRunner")
beam_hook_instance.start_python_pipeline.assert_called_once_with(
variables={
'prediction_path': 'gs://legal-bucket/fake-output-path',
'labels': {'airflow-version': TEST_VERSION},
'metric_keys': 'err',
'metric_fn_encoded': self.metric_fn_encoded,
'project': 'test-project',
'region': 'us-central1',
'job_name': mock.ANY,
},
py_file=mock.ANY,
py_options=[],
py_interpreter='python3',
py_requirements=['apache-beam[gcp]>=2.14.0'],
py_system_site_packages=False,
process_line_callback=mock.ANY,
)
dataflow_hook_instance.wait_for_done.assert_called_once_with(
job_name=mock.ANY, location='us-central1', job_id=mock.ANY, multiple_jobs=False
)
with patch('airflow.providers.google.cloud.utils.mlengine_operator_utils.GCSHook') as mock_gcs_hook:
hook_instance = mock_gcs_hook.return_value
hook_instance.download.return_value = '{"err": 0.9, "count": 9}'
result = validate.execute({})
hook_instance.download.assert_called_once_with(
'legal-bucket', 'fake-output-path/prediction.summary.json'
)
assert 'err=0.9' == result
def test_failures(self):
def create_test_dag(dag_id):
dag = DAG(
dag_id,
default_args={
'owner': 'airflow',
'start_date': DEFAULT_DATE,
'end_date': DEFAULT_DATE,
'project_id': 'test-project',
'region': 'us-east1',
},
schedule_interval='@daily',
)
return dag
input_with_model = self.INPUT_MISSING_ORIGIN.copy()
other_params_but_models = {
'task_prefix': 'eval-test',
'batch_prediction_job_id': 'eval-test-prediction',
'data_format': input_with_model['dataFormat'],
'input_paths': input_with_model['inputPaths'],
'prediction_path': input_with_model['outputPath'],
'metric_fn_and_keys': (self.metric_fn, ['err']),
'validate_fn': (lambda x: f"err={x['err']:.1f}"),
}
with pytest.raises(AirflowException, match='Missing model origin'):
mlengine_operator_utils.create_evaluate_ops(
dag=create_test_dag('test_dag_1'), **other_params_but_models
)
with pytest.raises(AirflowException, match='Ambiguous model origin'):
mlengine_operator_utils.create_evaluate_ops(
dag=create_test_dag('test_dag_2'),
model_uri='abc',
model_name='cde',
**other_params_but_models,
)
with pytest.raises(AirflowException, match='Ambiguous model origin'):
mlengine_operator_utils.create_evaluate_ops(
dag=create_test_dag('test_dag_3'),
model_uri='abc',
version_name='vvv',
**other_params_but_models,
)
with pytest.raises(AirflowException, match='`metric_fn` param must be callable'):
params = other_params_but_models.copy()
params['metric_fn_and_keys'] = (None, ['abc'])
mlengine_operator_utils.create_evaluate_ops(
dag=create_test_dag('test_dag_4'), model_uri='gs://blah', **params
)
with pytest.raises(AirflowException, match='`validate_fn` param must be callable'):
params = other_params_but_models.copy()
params['validate_fn'] = None
mlengine_operator_utils.create_evaluate_ops(
dag=create_test_dag('test_dag_5'), model_uri='gs://blah', **params
)
|
|
import hashlib
import json
import os
import tarfile
import warnings
from django.db import models
from semantic_version.django_fields import VersionField
from .constants import MODULE_REGEX
from .storage import ForgeStorage
class AuthorManager(models.Manager):
def get_by_natural_key(self, name):
return self.get(name__iexact=name)
class Author(models.Model):
name = models.CharField(max_length=64, unique=True)
objects = AuthorManager()
def __unicode__(self):
return self.name
def natural_key(self):
return (self.name,)
@property
def v3(self):
return {
'username': self.name.lower(),
}
class ModuleManager(models.Manager):
def get_by_natural_key(self, author, name):
return self.get(author=Author.objects.get_by_natural_key(author),
name__iexact=name)
def get_for_full_name(self, full_name):
"""
Returns Module for the given full name, e.g., 'puppetlabs/stdlib'.
"""
parsed = self.parse_full_name(full_name)
if parsed:
author, name = parsed
return self.get(author__name__iexact=author, name=name)
else:
raise self.model.DoesNotExist
def parse_full_name(self, full_name):
"""
Return the module components given a full module name, or None.
"""
match = MODULE_REGEX.match(full_name)
if match:
return (match.group('author'), match.group('module'))
else:
return None
class Module(models.Model):
author = models.ForeignKey(Author)
name = models.CharField(max_length=128, db_index=True)
desc = models.TextField(db_index=True, blank=True)
tags = models.TextField(db_index=True, blank=True)
objects = ModuleManager()
class Meta:
unique_together = ('author', 'name')
def __unicode__(self):
return self.canonical_name
@classmethod
def parse_full_name(cls, full_name):
"""
Return the module components given a full module name, or None.
"""
warnings.warn('This classmethod is deprecated, please use the '
'manager method instead.', DeprecationWarning)
return cls.objects.parse_full_name(full_name)
@classmethod
def get_for_full_name(cls, full_name):
"""
Returns Module for the given full name, e.g., 'puppetlabs/stdlib'.
"""
warnings.warn('This classmethod is deprecated, please use the '
'manager method instead.', DeprecationWarning)
return cls.objects.get_for_full_name(full_name)
@property
def canonical_name(self):
return u'%s-%s' % (self.author.name.lower(), self.name.lower())
@property
def legacy_name(self):
return u'%s/%s' % (self.author.name.lower(), self.name.lower())
@property
def latest_release(self):
"""
Return the latest version, preferably one that isn't a pre-release.
"""
# First, try and get all non pre-release versions.
releases = dict((release.version, release)
for release in self.releases.all()
if not release.version.prerelease)
if not releases:
# If all pre-releases, get all of them or return None.
releases = dict((release.version, release)
for release in self.releases.all())
if not releases:
return None
latest_version = max(releases.keys())
return releases[latest_version]
def natural_key(self):
return (self.author.name, self.name)
@property
def tag_list(self):
return self.tags.split()
@property
def v3_base(self):
return {
'name': self.name.lower(),
'owner': self.author.v3,
}
@property
def v3(self):
v3_data = self.v3_base
current_release = self.latest_release
if current_release:
current_release = current_release.v3
v3_data.update({
'current_release': current_release,
'homepage_url': current_release['metadata'].get('project_page', ''),
})
return v3_data
def tarball_upload(instance, filename):
author = instance.module.author.name
return '/'.join([author[0].lower(), author, filename])
class Release(models.Model):
module = models.ForeignKey(Module, related_name='releases')
version = VersionField(db_index=True)
tarball = models.FileField(upload_to=tarball_upload,
storage=ForgeStorage())
class Meta:
unique_together = ('module', 'version')
def __unicode__(self):
return u'%s version %s' % (self.module, self.version)
@property
def file_md5(self):
# TODO: This will become an actual database field.
self.tarball.open()
file_md5 = hashlib.md5()
file_md5.update(self.tarball.read())
self.tarball.close()
return file_md5.hexdigest()
@property
def file_size(self):
return self.tarball.size
@property
def metadata_json(self):
with tarfile.open(self.tarball.path, mode='r:gz') as tf:
metadata_ti = None
for fname in tf.getnames():
if os.path.basename(fname) == 'metadata.json':
metadata_ti = tf.getmember(fname)
break
if metadata_ti is None:
raise Exception("Can't find metadata.json for release: %s" %
self)
metadata = tf.extractfile(metadata_ti.name).read()
for encoding in ('utf-8', 'latin-1'):
try:
return metadata.decode(encoding)
except UnicodeDecodeError:
continue
raise Exception("Can't find an encoding for metadata.json")
@property
def metadata(self):
return json.loads(self.metadata_json)
@property
def v3(self):
return {
'file_md5': self.file_md5,
'file_size': self.file_size,
'file_uri': self.tarball.url,
'metadata': self.metadata,
'module': self.module.v3_base,
'tags': self.module.tag_list,
'version': str(self.version),
}
|
|
from __future__ import absolute_import
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
import numpy as np
import scipy.stats
class Link(object):
"""
A generic link function for one-parameter exponential
family, with call, inverse and deriv methods.
"""
def initialize(self, Y):
return np.asarray(Y).mean() * np.ones(Y.shape)
def __call__(self, p):
return NotImplementedError
def inverse(self, z):
return NotImplementedError
def deriv(self, p):
return NotImplementedError
class Logit(Link):
"""
The logit transform as a link function:
g'(x) = 1 / (x * (1 - x))
g^(-1)(x) = exp(x)/(1 + exp(x))
"""
tol = 1.0e-10
def clean(self, p):
"""
Clip logistic values to range (tol, 1-tol)
INPUTS:
p -- probabilities
OUTPUTS: pclip
pclip -- clipped probabilities
"""
return np.clip(p, Logit.tol, 1. - Logit.tol)
def __call__(self, p):
"""
Logit transform
g(p) = log(p / (1 - p))
INPUTS:
p -- probabilities
OUTPUTS: z
z -- logit transform of p
"""
p = self.clean(p)
return np.log(p / (1. - p))
def inverse(self, z):
"""
Inverse logit transform
h(z) = exp(z)/(1+exp(z))
INPUTS:
z -- logit transform of p
OUTPUTS: p
p -- probabilities
"""
t = np.exp(z)
return t / (1. + t)
def deriv(self, p):
"""
Derivative of logit transform
g(p) = 1 / (p * (1 - p))
INPUTS:
p -- probabilities
OUTPUTS: y
y -- derivative of logit transform of p
"""
p = self.clean(p)
return 1. / (p * (1 - p))
logit = Logit()
class Power(Link):
"""
The power transform as a link function:
g(x) = x**power
"""
def __init__(self, power=1.):
self.power = power
def __call__(self, x):
"""
Power transform
g(x) = x**self.power
INPUTS:
x -- mean parameters
OUTPUTS: z
z -- power transform of x
"""
return np.power(x, self.power)
def inverse(self, z):
"""
Inverse of power transform
g(x) = x**(1/self.power)
INPUTS:
z -- linear predictors in GLM
OUTPUTS: x
x -- mean parameters
"""
return np.power(z, 1. / self.power)
def deriv(self, x):
"""
Derivative of power transform
g(x) = self.power * x**(self.power - 1)
INPUTS:
x -- mean parameters
OUTPUTS: z
z -- derivative of power transform of x
"""
return self.power * np.power(x, self.power - 1)
inverse = Power(power=-1.)
inverse.__doc__ = """
The inverse transform as a link function:
g(x) = 1 / x
"""
sqrt = Power(power=0.5)
sqrt.__doc__ = """
The square-root transform as a link function:
g(x) = sqrt(x)
"""
inverse_squared = Power(power=-2.)
inverse_squared.__doc__ = """
The inverse squared transform as a link function:
g(x) = 1 / x**2
"""
identity = Power(power=1.)
identity.__doc__ = """
The identity transform as a link function:
g(x) = x
"""
class Log(Link):
"""
The log transform as a link function:
g(x) = log(x)
"""
tol = 1.0e-10
def clean(self, x):
return np.clip(x, Logit.tol, np.inf)
def __call__(self, x, **extra):
"""
Log transform
g(x) = log(x)
INPUTS:
x -- mean parameters
OUTPUTS: z
z -- log(x)
"""
x = self.clean(x)
return np.log(x)
def inverse(self, z):
"""
Inverse of log transform
g(x) = exp(x)
INPUTS:
z -- linear predictors in GLM
OUTPUTS: x
x -- exp(z)
"""
return np.exp(z)
def deriv(self, x):
"""
Derivative of log transform
g(x) = 1/x
INPUTS:
x -- mean parameters
OUTPUTS: z
z -- derivative of log transform of x
"""
x = self.clean(x)
return 1. / x
log = Log()
class CDFLink(Logit):
"""
The use the CDF of a scipy.stats distribution as a link function:
g(x) = dbn.ppf(x)
"""
def __init__(self, dbn=scipy.stats.norm):
self.dbn = dbn
def __call__(self, p):
"""
CDF link
g(p) = self.dbn.pdf(p)
INPUTS:
p -- mean parameters
OUTPUTS: z
z -- derivative of CDF transform of p
"""
p = self.clean(p)
return self.dbn.ppf(p)
def inverse(self, z):
"""
Derivative of CDF link
g(z) = self.dbn.cdf(z)
INPUTS:
z -- linear predictors in GLM
OUTPUTS: p
p -- inverse of CDF link of z
"""
return self.dbn.cdf(z)
def deriv(self, p):
"""
Derivative of CDF link
g(p) = 1/self.dbn.pdf(self.dbn.ppf(p))
INPUTS:
x -- mean parameters
OUTPUTS: z
z -- derivative of CDF transform of x
"""
p = self.clean(p)
return 1. / self.dbn.pdf(self(p))
probit = CDFLink()
probit.__doc__ = """
The probit (standard normal CDF) transform as a link function:
g(x) = scipy.stats.norm.ppf(x)
"""
cauchy = CDFLink(dbn=scipy.stats.cauchy)
cauchy.__doc__ = """
The Cauchy (standard Cauchy CDF) transform as a link function:
g(x) = scipy.stats.cauchy.ppf(x)
"""
class CLogLog(Logit):
"""
The complementary log-log transform as a link function:
g(x) = log(-log(x))
"""
def __call__(self, p):
"""
C-Log-Log transform
g(p) = log(-log(p))
INPUTS:
p -- mean parameters
OUTPUTS: z
z -- log(-log(p))
"""
p = self.clean(p)
return np.log(-np.log(p))
def inverse(self, z):
"""
Inverse of C-Log-Log transform
g(z) = exp(-exp(z))
INPUTS:
z -- linear predictor scale
OUTPUTS: p
p -- mean parameters
"""
return np.exp(-np.exp(z))
def deriv(self, p):
"""
Derivatve of C-Log-Log transform
g(p) = - 1 / (log(p) * p)
INPUTS:
p -- mean parameters
OUTPUTS: z
z -- - 1 / (log(p) * p)
"""
p = self.clean(p)
return -1. / (np.log(p) * p)
cloglog = CLogLog()
|
|
from __future__ import unicode_literals
import os
from django.conf import settings
from django.contrib.auth.models import User
from django.core.files.uploadedfile import SimpleUploadedFile
from django.test import RequestFactory
from kgb import SpyAgency
from reviewboard.attachments.models import FileAttachment
from reviewboard.reviews.models import (ReviewRequest,
ReviewRequestDraft,
Review)
from reviewboard.reviews.ui.base import (FileAttachmentReviewUI,
register_ui,
unregister_ui)
from reviewboard.testing import TestCase
class InitReviewUI(FileAttachmentReviewUI):
supported_mimetypes = ['image/jpg']
def __init__(self, review_request, obj):
raise Exception
class SandboxReviewUI(FileAttachmentReviewUI):
supported_mimetypes = ['image/png']
def is_enabled_for(self, user=None, review_request=None,
file_attachment=None, **kwargs):
raise Exception
def get_comment_thumbnail(self, comment):
raise Exception
def get_comment_link_url(self, comment):
raise Exception
def get_comment_link_text(self, comment):
raise Exception
def get_extra_context(self, request):
raise Exception
def get_js_view_data(self):
raise Exception
class ConflictFreeReviewUI(FileAttachmentReviewUI):
supported_mimetypes = ['image/gif']
def serialize_comment(self, comment):
raise Exception
def get_js_model_data(self):
raise Exception
class SandboxTests(SpyAgency, TestCase):
"""Testing sandboxing extensions."""
fixtures = ['test_users']
def setUp(self):
super(SandboxTests, self).setUp()
register_ui(InitReviewUI)
register_ui(SandboxReviewUI)
register_ui(ConflictFreeReviewUI)
self.factory = RequestFactory()
filename = os.path.join(settings.STATIC_ROOT,
'rb', 'images', 'trophy.png')
with open(filename, 'r') as f:
self.file = SimpleUploadedFile(f.name, f.read(),
content_type='image/png')
self.user = User.objects.get(username='doc')
self.review_request = ReviewRequest.objects.create(self.user, None)
self.file_attachment1 = FileAttachment.objects.create(
mimetype='image/jpg',
file=self.file)
self.file_attachment2 = FileAttachment.objects.create(
mimetype='image/png',
file=self.file)
self.file_attachment3 = FileAttachment.objects.create(
mimetype='image/gif',
file=self.file)
self.review_request.file_attachments.add(self.file_attachment1)
self.review_request.file_attachments.add(self.file_attachment2)
self.review_request.file_attachments.add(self.file_attachment3)
self.draft = ReviewRequestDraft.create(self.review_request)
def tearDown(self):
super(SandboxTests, self).tearDown()
unregister_ui(InitReviewUI)
unregister_ui(SandboxReviewUI)
unregister_ui(ConflictFreeReviewUI)
def test_init_review_ui(self):
"""Testing FileAttachmentReviewUI sandboxes __init__"""
self.spy_on(InitReviewUI.__init__)
self.file_attachment1.review_ui
self.assertTrue(InitReviewUI.__init__.called)
def test_is_enabled_for(self):
"""Testing FileAttachmentReviewUI sandboxes is_enabled_for"""
comment = "Comment"
self.spy_on(SandboxReviewUI.is_enabled_for)
review = Review.objects.create(review_request=self.review_request,
user=self.user)
review.file_attachment_comments.create(
file_attachment=self.file_attachment2,
text=comment)
self.client.login(username='doc', password='doc')
response = self.client.get('/r/%d/' % self.review_request.pk)
self.assertEqual(response.status_code, 200)
self.assertTrue(SandboxReviewUI.is_enabled_for.called)
def test_get_comment_thumbnail(self):
"""Testing FileAttachmentReviewUI sandboxes get_comment_thumbnail"""
comment = "Comment"
review = Review.objects.create(review_request=self.review_request,
user=self.user)
file_attachment_comment = review.file_attachment_comments.create(
file_attachment=self.file_attachment2,
text=comment)
review_ui = file_attachment_comment.review_ui
self.spy_on(review_ui.get_comment_thumbnail)
file_attachment_comment.thumbnail
self.assertTrue(review_ui.get_comment_thumbnail.called)
def test_get_comment_link_url(self):
"""Testing FileAttachmentReviewUI sandboxes get_comment_link_url"""
comment = "Comment"
review = Review.objects.create(review_request=self.review_request,
user=self.user)
file_attachment_comment = review.file_attachment_comments.create(
file_attachment=self.file_attachment2,
text=comment)
review_ui = file_attachment_comment.review_ui
self.spy_on(review_ui.get_comment_link_url)
file_attachment_comment.get_absolute_url()
self.assertTrue(review_ui.get_comment_link_url.called)
def test_get_comment_link_text(self):
"""Testing FileAttachmentReviewUI sandboxes get_comment_link_text"""
comment = "Comment"
review = Review.objects.create(review_request=self.review_request,
user=self.user)
file_attachment_comment = review.file_attachment_comments.create(
file_attachment=self.file_attachment2,
text=comment)
review_ui = file_attachment_comment.review_ui
self.spy_on(review_ui.get_comment_link_text)
file_attachment_comment.get_link_text()
self.assertTrue(review_ui.get_comment_link_text.called)
def test_get_extra_context(self):
"""Testing FileAttachmentReviewUI sandboxes get_extra_context"""
review_ui = self.file_attachment2.review_ui
request = self.factory.get('test')
request.user = self.user
self.spy_on(review_ui.get_extra_context)
review_ui.render_to_string(request=request)
self.assertTrue(review_ui.get_extra_context.called)
def test_get_js_model_data(self):
"""Testing FileAttachmentReviewUI sandboxes get_js_model_data"""
review_ui = self.file_attachment3.review_ui
request = self.factory.get('test')
request.user = self.user
self.spy_on(review_ui.get_js_model_data)
review_ui.render_to_response(request=request)
self.assertTrue(review_ui.get_js_model_data.called)
def test_get_js_view_data(self):
"""Testing FileAttachmentReviewUI sandboxes get_js_view_data"""
review_ui = self.file_attachment2.review_ui
request = self.factory.get('test')
request.user = self.user
self.spy_on(review_ui.get_js_view_data)
review_ui.render_to_response(request=request)
self.assertTrue(review_ui.get_js_view_data.called)
def test_serialize_comments(self):
"""Testing FileAttachmentReviewUI sandboxes serialize_comments"""
review_ui = self.file_attachment2.review_ui
self.spy_on(review_ui.serialize_comments)
review_ui.get_comments_json()
self.assertTrue(review_ui.serialize_comments.called)
def test_serialize_comment(self):
"""Testing FileAttachmentReviewUI sandboxes serialize_comment"""
comment = 'comment'
review_ui = self.file_attachment3.review_ui
request = self.factory.get('test')
request.user = self.user
review_ui.request = request
review = Review.objects.create(review_request=self.review_request,
user=self.user, public=True)
file_attachment_comment = review.file_attachment_comments.create(
file_attachment=self.file_attachment3,
text=comment)
self.spy_on(review_ui.serialize_comment)
serial_comments = review_ui.serialize_comments(
comments=[file_attachment_comment])
self.assertEqual(len(serial_comments), 0)
self.assertTrue(review_ui.serialize_comment.called)
|
|
# Copyright 2016 Virantha Ekanayake All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Usage:
photokeeper.py [options] SOURCE_DIR examine
photokeeper.py [options] SOURCE_DIR TARGET_DIR [dedupe] file
photokeeper.py [options] SOURCE_DIR [dedupe] flickr
photokeeper.py [options] SOURCE_DIR TARGET_DIR [dedupe] file flickr
photokeeper.py [options] SOURCE_DIR TARGET_DIR all
photokeeper.py --conf=FILE
photokeeper.py -h
Arguments:
SOURCE_DIR Source directory of photos
TARGET_DIR Where to copy the image files
all Run all steps in the flow (%s)
%s
Options:
-h --help show this message
-v --verbose show more information
-d --debug show even more information
--conf=FILE load options from file
"""
from docopt import docopt
import yaml
import sys, os, logging, shutil, datetime, pprint, filecmp
from collections import OrderedDict, defaultdict
from schema import Schema, And, Optional, Or, Use, SchemaError
import piexif, dateparser
from tqdm import tqdm
from photokeeper.flickr import Flickr
from photokeeper.filecopy import FileCopy
from photokeeper.version import __version__
from photokeeper.utils import ordered_load, merge_args
"""
.. automodule:: photokeeper
:private-members:
"""
class ImageFile(object):
def __init__(self, srcdir, filename, tgtbasedir, tgtdatedir, datetime_taken, exif_timestamp_missing=False):
self.srcdir = srcdir
self.filename = filename
self.tgtbasedir = tgtbasedir
self.tgtdatedir = tgtdatedir
self.datetime_taken = datetime_taken
self.dup = False
self.flickr_dup = False
self.exif_timestamp_missing = exif_timestamp_missing
#print("adding {} with datetime {}".format(filename, datetime_taken.strftime('%Y-%m-%d %H:%M:%S')))
pass
@property
def srcpath(self):
return os.path.join(self.srcdir, self.filename)
@property
def tgtpath(self):
return os.path.join(self.tgtbasedir, self.tgtdatedir, self.filename)
def is_duplicate(self, shallow_compare = True):
# First, see if there is a file already there
if not os.path.exists(self.tgtpath):
return False
elif os.path.getsize(self.srcpath) != os.path.getsize(self.tgtpath):
return False
#elif not filecmp.cmp(self.srcpath, self.tgtpath, shallow_compare): # This is too slow over a network share
# return False
return True
class PhotoKeeper(object):
"""
The main clas. Performs the following functions:
"""
def __init__ (self):
"""
"""
self.args = None
self.flow = OrderedDict([ ('examine', 'Examine EXIF tags'),
('dedupe', 'Only select files not already present in target directory'),
('flickr', 'Upload to flickr'),
('file', 'Copy files'),
])
self.images = []
def get_options(self, argv):
"""
Parse the command-line options and set the following object properties:
:param argv: usually just sys.argv[1:]
:returns: Nothing
:ivar debug: Enable logging debug statements
:ivar verbose: Enable verbose logging
:ivar config: Dict of the config file
"""
padding = max([len(x) for x in self.flow.keys()]) # Find max length of flow step names for padding with white space
docstring = __doc__ % (#'|'.join(self.flow),
','.join(self.flow.keys()),
'\n'.join([' '+k+' '*(padding+4-len(k))+v for k,v in self.flow.items()]))
args = docopt(docstring, version=__version__)
# Load in default conf values from file if specified
if args['--conf']:
with open(args['--conf']) as f:
conf_args = yaml.load(f)
else:
conf_args = {}
args = merge_args(conf_args, args)
logging.debug (args)
schema = Schema({
'SOURCE_DIR': Or(os.path.isdir, error='Source directory does not exist'),
'TARGET_DIR': Or(lambda x: x is None, os.path.isdir, error='Destination directory does not exist'),
object: object
})
try:
args = schema.validate(args)
except SchemaError as e:
exit(e)
logging.debug (args)
if args['all'] == 0:
for f in list(self.flow):
if args[f] == 0: del self.flow[f]
logging.info("Doing flow steps: %s" % (','.join(self.flow.keys())))
self.src_dir = args['SOURCE_DIR']
self.tgt_dir = args['TARGET_DIR']
if self.tgt_dir:
assert os.path.abspath(self.src_dir) != os.path.abspath(self.tgt_dir), 'Target and source directories cannot be the same'
if args['--debug']:
logging.basicConfig(level=logging.DEBUG, format='%(message)s')
elif args['--verbose']:
logging.basicConfig(level=logging.INFO, format='%(message)s')
self.args = args # Just save this for posterity
def get_file_count(self, _dir):
cnt = 0
for root, dirs, files in os.walk(_dir):
cnt += len(files)
return cnt
def examine_files(self, img_dir):
counts = defaultdict(int)
dt_format = '%Y-%m-%d'
pp = pprint.PrettyPrinter(indent=4)
raw_count = self.get_file_count(img_dir)
print("Examining {} files in {}".format(raw_count, img_dir))
with tqdm(total=raw_count, ncols=80, unit='file') as progress:
for root, dirs, files in os.walk(img_dir):
for fn in files:
if fn.startswith('.'): continue
filename = os.path.join(root, fn)
progress.update(1)
try:
exif_timestamp_missing = False
tags_dict = piexif.load(filename)
image_date = tags_dict['0th'][piexif.ImageIFD.DateTime]
# Why am I even using dateparser if it can't parse this??
image_datetime = dateparser.parse(image_date.decode('utf8'), date_formats=['%Y:%m:%d %H:%M:%S'])
except (KeyError, ValueError) as e:
logging.info('IGNORED: %s is not a JPG or TIFF' % (filename))
file_mod_time = os.path.getmtime(filename)
image_datetime = datetime.datetime.fromtimestamp(file_mod_time)
logging.info('Using %s ' % (image_datetime))
exif_timestamp_missing = True # Need to mark this since we don't have EXIF and Flickr doesn't honor file date for date-taken
image_datetime_text = image_datetime.strftime(dt_format)
counts[image_datetime_text] += 1
self.images.append(ImageFile(os.path.dirname(filename), os.path.basename(filename), self.tgt_dir, image_datetime_text, image_datetime, exif_timestamp_missing))
counts = dict(counts)
total = sum(counts.values())
print('Found images from {} days'.format(len(counts)))
pp.pprint (counts)
print('Total images: {}'.format(total))
def _get_unique_filename_suffix(self, filename):
dirname = os.path.dirname(filename)
fn_with_ext = os.path.basename(filename)
fn, ext = os.path.splitext(fn_with_ext)
suffix = 1
if not os.path.exists(filename): # Unique, no target filename conflict
return filename
else:
while os.path.exists(os.path.join(dirname, fn+'_'+str(suffix)+ext)):
suffix += 1
return (os.path.join(dirname, fn+'_'+str(suffix)+ext))
def all_images(self):
n = len(self.images)
with tqdm(total=n, ncols=80, unit='file') as progress:
for i, img in enumerate(self.images):
yield img
progress.update(1)
def go(self, argv):
"""
The main entry point into PhotoKeeper
#. Do something
#. Do something else
"""
# Read the command line options
self.get_options(argv)
self.examine_files(self.src_dir)
for photo_target, TargetClass in [('file', FileCopy), ('flickr', Flickr)]:
if photo_target in self.flow:
f = TargetClass()
if 'dedupe' in self.flow:
f.check_duplicates(self.all_images())
f.execute_copy(self.all_images())
def main():
script = PhotoKeeper()
script.go(sys.argv[1:])
if __name__ == '__main__':
main()
|
|
from __future__ import division
# at this stage in the book we haven't actually installed matplotlib,
# comment this out if you need to
from matplotlib import pyplot as plt
##########################
# #
# FINDING KEY CONNECTORS #
# #
##########################
users = [
{ "id": 0, "name": "Hero" },
{ "id": 1, "name": "Dunn" },
{ "id": 2, "name": "Sue" },
{ "id": 3, "name": "Chi" },
{ "id": 4, "name": "Thor" },
{ "id": 5, "name": "Clive" },
{ "id": 6, "name": "Hicks" },
{ "id": 7, "name": "Devin" },
{ "id": 8, "name": "Kate" },
{ "id": 9, "name": "Klein" },
{ "id": 10, "name": "Jen" }
]
friendships = [(0, 1), (0, 2), (1, 2), (1, 3), (2, 3), (3, 4),
(4, 5), (5, 6), (5, 7), (6, 8), (7, 8), (8, 9)]
# first give each user an empty list
for user in users:
user["friends"] = []
# and then populate the lists with friendships
for i, j in friendships:
# this works because users[i] is the user whose id is i
users[i]["friends"].append(users[j]) # add i as a friend of j
users[j]["friends"].append(users[i]) # add j as a friend of i
def number_of_friends(user):
"""how many friends does _user_ have?"""
return len(user["friends"]) # length of friend_ids list
total_connections = sum(number_of_friends(user)
for user in users) # 24
num_users = len(users)
avg_connections = total_connections / num_users # 2.4
################################
# #
# DATA SCIENTISTS YOU MAY KNOW #
# #
################################
def friends_of_friend_ids_bad(user):
# "foaf" is short for "friend of a friend"
return [foaf["id"]
for friend in user["friends"] # for each of user's friends
for foaf in friend["friends"]] # get each of _their_ friends
from collections import Counter # not loaded by default
def not_the_same(user, other_user):
"""two users are not the same if they have different ids"""
return user["id"] != other_user["id"]
def not_friends(user, other_user):
"""other_user is not a friend if he's not in user["friends"];
that is, if he's not_the_same as all the people in user["friends"]"""
return all(not_the_same(friend, other_user)
for friend in user["friends"])
def friends_of_friend_ids(user):
return Counter(foaf["id"]
for friend in user["friends"] # for each of my friends
for foaf in friend["friends"] # count *their* friends
if not_the_same(user, foaf) # who aren't me
and not_friends(user, foaf)) # and aren't my friends
print friends_of_friend_ids(users[3]) # Counter({0: 2, 5: 1})
interests = [
(0, "Hadoop"), (0, "Big Data"), (0, "HBase"), (0, "Java"),
(0, "Spark"), (0, "Storm"), (0, "Cassandra"),
(1, "NoSQL"), (1, "MongoDB"), (1, "Cassandra"), (1, "HBase"),
(1, "Postgres"), (2, "Python"), (2, "scikit-learn"), (2, "scipy"),
(2, "numpy"), (2, "statsmodels"), (2, "pandas"), (3, "R"), (3, "Python"),
(3, "statistics"), (3, "regression"), (3, "probability"),
(4, "machine learning"), (4, "regression"), (4, "decision trees"),
(4, "libsvm"), (5, "Python"), (5, "R"), (5, "Java"), (5, "C++"),
(5, "Haskell"), (5, "programming languages"), (6, "statistics"),
(6, "probability"), (6, "mathematics"), (6, "theory"),
(7, "machine learning"), (7, "scikit-learn"), (7, "Mahout"),
(7, "neural networks"), (8, "neural networks"), (8, "deep learning"),
(8, "Big Data"), (8, "artificial intelligence"), (9, "Hadoop"),
(9, "Java"), (9, "MapReduce"), (9, "Big Data")
]
def data_scientists_who_like(target_interest):
return [user_id
for user_id, user_interest in interests
if user_interest == target_interest]
from collections import defaultdict
# keys are interests, values are lists of user_ids with that interest
user_ids_by_interest = defaultdict(list)
for user_id, interest in interests:
user_ids_by_interest[interest].append(user_id)
# keys are user_ids, values are lists of interests for that user_id
interests_by_user_id = defaultdict(list)
for user_id, interest in interests:
interests_by_user_id[user_id].append(interest)
def most_common_interests_with(user_id):
return Counter(interested_user_id
for interest in interests_by_user["user_id"]
for interested_user_id in users_by_interest[interest]
if interested_user_id != user_id)
###########################
# #
# SALARIES AND EXPERIENCE #
# #
###########################
salaries_and_tenures = [(83000, 8.7), (88000, 8.1),
(48000, 0.7), (76000, 6),
(69000, 6.5), (76000, 7.5),
(60000, 2.5), (83000, 10),
(48000, 1.9), (63000, 4.2)]
def make_chart_salaries_by_tenure():
tenures = [tenure for salary, tenure in salaries_and_tenures]
salaries = [salary for salary, tenure in salaries_and_tenures]
plt.scatter(tenures, salaries)
plt.xlabel("Years Experience")
plt.ylabel("Salary")
plt.show()
# keys are years
# values are the salaries for each tenure
salary_by_tenure = defaultdict(list)
for salary, tenure in salaries_and_tenures:
salary_by_tenure[tenure].append(salary)
average_salary_by_tenure = {
tenure : sum(salaries) / len(salaries)
for tenure, salaries in salary_by_tenure.items()
}
def tenure_bucket(tenure):
if tenure < 2: return "less than two"
elif tenure < 5: return "between two and five"
else: return "more than five"
salary_by_tenure_bucket = defaultdict(list)
for salary, tenure in salaries_and_tenures:
bucket = tenure_bucket(tenure)
salary_by_tenure_bucket[bucket].append(salary)
average_salary_by_bucket = {
tenure_bucket : sum(salaries) / len(salaries)
for tenure_bucket, salaries in salary_by_tenure_bucket.iteritems()
}
#################
# #
# PAID_ACCOUNTS #
# #
#################
def predict_paid_or_unpaid(years_experience):
if years_experience < 3.0: return "paid"
elif years_experience < 8.5: return "unpaid"
else: return "paid"
######################
# #
# TOPICS OF INTEREST #
# #
######################
words_and_counts = Counter(word
for user, interest in interests
for word in interest.lower().split())
if __name__ == "__main__":
print
print "######################"
print "#"
print "# FINDING KEY CONNECTORS"
print "#"
print "######################"
print
print "total connections", total_connections
print "number of users", num_users
print "average connections", total_connections / num_users
print
# create a list (user_id, number_of_friends)
num_friends_by_id = [(user["id"], number_of_friends(user))
for user in users]
print "users sorted by number of friends:"
print sorted(num_friends_by_id,
key=lambda (user_id, num_friends): num_friends, # by number of friends
reverse=True) # largest to smallest
print
print "######################"
print "#"
print "# DATA SCIENTISTS YOU MAY KNOW"
print "#"
print "######################"
print
print "friends of friends bad for user 0:", friends_of_friend_ids_bad(users[0])
print "friends of friends for user 3:", friends_of_friend_ids(users[3])
print
print "######################"
print "#"
print "# SALARIES AND TENURES"
print "#"
print "######################"
print
print "average salary by tenure", average_salary_by_tenure
print "average salary by tenure bucket", average_salary_by_bucket
print
print "######################"
print "#"
print "# MOST COMMON WORDS"
print "#"
print "######################"
print
for word, count in words_and_counts.most_common():
if count > 1:
print word, count
|
|
"""
.. todo::
WRITEME
"""
import functools
from pylearn2.datasets.dataset import Dataset
from pylearn2.utils import wraps
import logging
import numpy
import warnings
import cPickle
# data are in pytables
try:
import tables
except ImportError:
warnings.warn("data are in pytables")
try:
import scipy.sparse
except ImportError:
warnings.warn("Couldn't import scipy.sparse")
import theano
import gzip
floatX = theano.config.floatX
logger = logging.getLogger(__name__)
from pylearn2.space import CompositeSpace, VectorSpace, IndexSpace
from pylearn2.utils import safe_zip
from pylearn2.utils.exc import reraise_as
from pylearn2.utils.iteration import (
FiniteDatasetIterator,
resolve_iterator_class
)
class CLICKSparseDataset(Dataset):
"""
SparseDataset is a class for representing datasets that can be
stored as a sparse matrix.
Parameters
----------
X_load_path : str or None, optional
the path to read the sparse dataset
from_scipy_sparse_dataset is not used if load_path is specified
X_from_scipy_sparse_dataset : matrix of type scipy.sparse or None, optional
In case load_path is not provided,
the sparse dataset is passed directly to the class by
using from_scipy_sparse_dataset parameter.
X_zipped_npy : bool, optional
used only when load_path is specified.
indicates whether the input matrix is zipped or not.
defaults to True.
y_path : str or None, optional
the path of y.
y_part : str or None, optional
which day to be used.
"""
_default_seed = (17, 2, 946)
def __init__(self, X_load_path=None,
X_from_scipy_sparse_dataset=None, X_zipped_npy=False,
y_path=None, y_labels=None, y_part=None, rng=_default_seed):
if X_load_path is not None:
if X_zipped_npy is True:
logger.info('... loading sparse data set from a zip npy file')
self.X = scipy.sparse.csr_matrix(
numpy.load(gzip.open(X_load_path)), dtype=floatX)
else:
logger.info('... loading sparse data set from a npy file')
loader = numpy.load(X_load_path)
self.X = scipy.sparse.csr_matrix((loader['data'], \
loader['indices'], loader['indptr']), \
shape = loader['shape'], dtype=floatX)
else:
logger.info('... building from given sparse dataset')
self.X = X_from_scipy_sparse_dataset
if not scipy.sparse.issparse(X_from_scipy_sparse_dataset):
msg = "from_scipy_sparse_dataset is not sparse : %s" \
% type(self.X)
raise TypeError(msg)
if y_path is not None:
logger.info('... loading y data set from a hdf5 file')
file_handler = tables.open_file(y_path, mode = "r")
y = file_handler.root.train.train_raw.y
assert y_part is not None
f = open('dayrows.pkl', 'r')
dayrows = cPickle.load(f)
f.close()
self.y = y[sum(dayrows[:y_part-1]):sum(dayrows[:y_part])]
self.y_labels = y_labels
X_source = 'features'
X_space = VectorSpace(dim=self.X.shape[1], sparse=True)
if y_path is None:
space = X_space
source = X_source
else:
if self.y.ndim == 1:
dim = 1
else:
dim = self.y.shape[-1]
if self.y_labels is not None:
y_space = IndexSpace(dim=dim, max_labels=self.y_labels)
else:
y_space = VectorSpace(dim=dim)
y_source = 'targets'
space = CompositeSpace((X_space, y_space))
source = (X_source, y_source)
self.data_specs = (space, source)
self.X_space = X_space
self._iter_mode = resolve_iterator_class('sequential')
self._iter_topo = False
self._iter_targets = False
self._iter_data_specs = (self.X_space, 'features')
@wraps(Dataset.iterator)
def iterator(self, mode=None, batch_size=None, num_batches=None,
topo=None, targets=None, rng=None, data_specs=None,
return_tuple=False):
if topo is not None or targets is not None:
warnings.warn("Usage of `topo` and `target` arguments are "
"being deprecated, and will be removed "
"around November 7th, 2013. `data_specs` "
"should be used instead. Here these two "
"arguments are not used",
stacklevel=2)
if data_specs is None:
data_specs = self._iter_data_specs
# If there is a view_converter, we have to use it to convert
# the stored data for "features" into one that the iterator
# can return.
space, source = data_specs
if isinstance(space, CompositeSpace):
sub_spaces = space.components
sub_sources = source
else:
sub_spaces = (space,)
sub_sources = (source,)
convert = []
for sp, src in safe_zip(sub_spaces, sub_sources):
if src == 'features' and \
getattr(self, 'view_converter', None) is not None:
conv_fn = (lambda batch, self=self, space=sp:
self.view_converter.get_formatted_batch(batch,
space))
else:
conv_fn = None
convert.append(conv_fn)
# TODO: Refactor
if mode is None:
if hasattr(self, '_iter_subset_class'):
mode = self._iter_subset_class
else:
raise ValueError('iteration mode not provided and no default '
'mode set for %s' % str(self))
else:
mode = resolve_iterator_class(mode)
if batch_size is None:
batch_size = getattr(self, '_iter_batch_size', None)
if num_batches is None:
num_batches = getattr(self, '_iter_num_batches', None)
if rng is None and mode.stochastic:
rng = self.rng
return FiniteDatasetIterator(self,
mode(self.X.shape[0],
batch_size,
num_batches,
rng),
data_specs=data_specs,
return_tuple=return_tuple,
convert=convert)
'''
def __iter__(self):
"""
.. todo::
WRITEME
"""
return self
def next(self):
"""
.. todo::
WRITEME
"""
indx = self.subset_iterator.next()
try:
mini_batch = self.X[indx]
except IndexError, e:
reraise_as(ValueError("Index out of range"+str(e)))
# the ind of minibatch goes beyond the boundary
return mini_batch
'''
def get_design_matrix(self):
"""
.. todo::
WRITEME
"""
return self.X
@wraps(Dataset.get_batch_design)
def get_batch_design(self, batch_size, include_labels=False):
"""Method inherited from Dataset"""
try:
idx = self.rng.randint(self.X.shape[0] - batch_size + 1)
except ValueError:
if batch_size > self.X.shape[0]:
reraise_as(ValueError("Requested %d examples from a dataset "
"containing only %d." %
(batch_size, self.X.shape[0])))
raise
rx = self.X[idx:idx + batch_size, :]
if include_labels:
if self.y is None:
return rx, None
ry = self.y[idx:idx + batch_size]
return rx, ry
#rx = numpy.cast[theano.config.floatX](rx)
return rx
@wraps(Dataset.get_batch_topo)
def get_batch_topo(self, batch_size):
"""Method inherited from Dataset"""
raise NotImplementedError('Not implemented for sparse dataset')
@functools.wraps(Dataset.get_num_examples)
def get_num_examples(self):
return self.X.shape[0]
def has_targets(self):
""" Returns true if the dataset includes targets """
return self.y is not None
def get_data_specs(self):
"""
Returns the data_specs specifying how the data is internally stored.
This is the format the data returned by `self.get_data()` will be.
"""
return self.data_specs
def get_data(self):
"""
Returns
-------
data : numpy matrix or 2-tuple of matrices
Returns all the data, as it is internally stored.
The definition and format of these data are described in
`self.get_data_specs()`.
"""
if self.y is None:
return self.X
else:
return (self.X, self.y)
|
|
# pylint: skip-file
import json
import os
import pickle
import struct
import subprocess
import tempfile
import threading
from enum import IntEnum
from functools import wraps
import numpy as np
from lru import LRU
import _io
from tools.lib.cache import cache_path_for_file_path
from tools.lib.exceptions import DataUnreadableError
from common.file_helpers import atomic_write_in_dir
from tools.lib.filereader import FileReader
HEVC_SLICE_B = 0
HEVC_SLICE_P = 1
HEVC_SLICE_I = 2
class GOPReader:
def get_gop(self, num):
# returns (start_frame_num, num_frames, frames_to_skip, gop_data)
raise NotImplementedError
class DoNothingContextManager:
def __enter__(self):
return self
def __exit__(self, *x):
pass
class FrameType(IntEnum):
raw = 1
h265_stream = 2
def fingerprint_video(fn):
with FileReader(fn) as f:
header = f.read(4)
if len(header) == 0:
raise DataUnreadableError(f"{fn} is empty")
elif header == b"\x00\xc0\x12\x00":
return FrameType.raw
elif header == b"\x00\x00\x00\x01":
if 'hevc' in fn:
return FrameType.h265_stream
else:
raise NotImplementedError(fn)
else:
raise NotImplementedError(fn)
def ffprobe(fn, fmt=None):
cmd = ["ffprobe",
"-v", "quiet",
"-print_format", "json",
"-show_format", "-show_streams"]
if fmt:
cmd += ["-f", fmt]
cmd += [fn]
try:
ffprobe_output = subprocess.check_output(cmd)
except subprocess.CalledProcessError:
raise DataUnreadableError(fn)
return json.loads(ffprobe_output)
def vidindex(fn, typ):
vidindex_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "vidindex")
vidindex = os.path.join(vidindex_dir, "vidindex")
subprocess.check_call(["make"], cwd=vidindex_dir, stdout=open("/dev/null", "w"))
with tempfile.NamedTemporaryFile() as prefix_f, \
tempfile.NamedTemporaryFile() as index_f:
try:
subprocess.check_call([vidindex, typ, fn, prefix_f.name, index_f.name])
except subprocess.CalledProcessError:
raise DataUnreadableError(f"vidindex failed on file {fn}")
with open(index_f.name, "rb") as f:
index = f.read()
with open(prefix_f.name, "rb") as f:
prefix = f.read()
index = np.frombuffer(index, np.uint32).reshape(-1, 2)
assert index[-1, 0] == 0xFFFFFFFF
assert index[-1, 1] == os.path.getsize(fn)
return index, prefix
def cache_fn(func):
@wraps(func)
def cache_inner(fn, *args, **kwargs):
if kwargs.pop('no_cache', None):
cache_path = None
else:
cache_prefix = kwargs.pop('cache_prefix', None)
cache_path = cache_path_for_file_path(fn, cache_prefix)
if cache_path and os.path.exists(cache_path):
with open(cache_path, "rb") as cache_file:
cache_value = pickle.load(cache_file)
else:
cache_value = func(fn, *args, **kwargs)
if cache_path:
with atomic_write_in_dir(cache_path, mode="wb", overwrite=True) as cache_file:
pickle.dump(cache_value, cache_file, -1)
return cache_value
return cache_inner
@cache_fn
def index_stream(fn, typ):
assert typ in ("hevc", )
with FileReader(fn) as f:
assert os.path.exists(f.name), fn
index, prefix = vidindex(f.name, typ)
probe = ffprobe(f.name, typ)
return {
'index': index,
'global_prefix': prefix,
'probe': probe
}
def index_videos(camera_paths, cache_prefix=None):
"""Requires that paths in camera_paths are contiguous and of the same type."""
if len(camera_paths) < 1:
raise ValueError("must provide at least one video to index")
frame_type = fingerprint_video(camera_paths[0])
for fn in camera_paths:
index_video(fn, frame_type, cache_prefix)
def index_video(fn, frame_type=None, cache_prefix=None):
cache_path = cache_path_for_file_path(fn, cache_prefix)
if os.path.exists(cache_path):
return
if frame_type is None:
frame_type = fingerprint_video(fn[0])
if frame_type == FrameType.h265_stream:
index_stream(fn, "hevc", cache_prefix=cache_prefix)
else:
raise NotImplementedError("Only h265 supported")
def get_video_index(fn, frame_type, cache_prefix=None):
cache_path = cache_path_for_file_path(fn, cache_prefix)
if not os.path.exists(cache_path):
index_video(fn, frame_type, cache_prefix)
if not os.path.exists(cache_path):
return None
with open(cache_path, "rb") as cache_file:
return pickle.load(cache_file)
def read_file_check_size(f, sz, cookie):
buff = bytearray(sz)
bytes_read = f.readinto(buff)
assert bytes_read == sz, (bytes_read, sz)
return buff
def rgb24toyuv420(rgb):
yuv_from_rgb = np.array([[ 0.299 , 0.587 , 0.114 ],
[-0.14714119, -0.28886916, 0.43601035 ],
[ 0.61497538, -0.51496512, -0.10001026 ]])
img = np.dot(rgb.reshape(-1, 3), yuv_from_rgb.T).reshape(rgb.shape)
y_len = img.shape[0] * img.shape[1]
uv_len = y_len // 4
ys = img[:, :, 0]
us = (img[::2, ::2, 1] + img[1::2, ::2, 1] + img[::2, 1::2, 1] + img[1::2, 1::2, 1]) / 4 + 128
vs = (img[::2, ::2, 2] + img[1::2, ::2, 2] + img[::2, 1::2, 2] + img[1::2, 1::2, 2]) / 4 + 128
yuv420 = np.empty(y_len + 2 * uv_len, dtype=img.dtype)
yuv420[:y_len] = ys.reshape(-1)
yuv420[y_len:y_len + uv_len] = us.reshape(-1)
yuv420[y_len + uv_len:y_len + 2 * uv_len] = vs.reshape(-1)
return yuv420.clip(0, 255).astype('uint8')
def decompress_video_data(rawdat, vid_fmt, w, h, pix_fmt):
# using a tempfile is much faster than proc.communicate for some reason
with tempfile.TemporaryFile() as tmpf:
tmpf.write(rawdat)
tmpf.seek(0)
threads = os.getenv("FFMPEG_THREADS", "0")
cuda = os.getenv("FFMPEG_CUDA", "0") == "1"
proc = subprocess.Popen(
["ffmpeg",
"-threads", threads,
"-hwaccel", "none" if not cuda else "cuda",
"-c:v", "hevc",
"-vsync", "0",
"-f", vid_fmt,
"-flags2", "showall",
"-i", "pipe:0",
"-threads", threads,
"-f", "rawvideo",
"-pix_fmt", pix_fmt,
"pipe:1"],
stdin=tmpf, stdout=subprocess.PIPE, stderr=open("/dev/null"))
# dat = proc.communicate()[0]
dat = proc.stdout.read()
if proc.wait() != 0:
raise DataUnreadableError("ffmpeg failed")
if pix_fmt == "rgb24":
ret = np.frombuffer(dat, dtype=np.uint8).reshape(-1, h, w, 3)
elif pix_fmt == "yuv420p":
ret = np.frombuffer(dat, dtype=np.uint8).reshape(-1, (h*w*3//2))
elif pix_fmt == "yuv444p":
ret = np.frombuffer(dat, dtype=np.uint8).reshape(-1, 3, h, w)
else:
raise NotImplementedError
return ret
class BaseFrameReader:
# properties: frame_type, frame_count, w, h
def __enter__(self):
return self
def __exit__(self, *args):
self.close()
def close(self):
pass
def get(self, num, count=1, pix_fmt="yuv420p"):
raise NotImplementedError
def FrameReader(fn, cache_prefix=None, readahead=False, readbehind=False, index_data=None):
frame_type = fingerprint_video(fn)
if frame_type == FrameType.raw:
return RawFrameReader(fn)
elif frame_type in (FrameType.h265_stream,):
if not index_data:
index_data = get_video_index(fn, frame_type, cache_prefix)
return StreamFrameReader(fn, frame_type, index_data, readahead=readahead, readbehind=readbehind)
else:
raise NotImplementedError(frame_type)
class RawData:
def __init__(self, f):
self.f = _io.FileIO(f, 'rb')
self.lenn = struct.unpack("I", self.f.read(4))[0]
self.count = os.path.getsize(f) / (self.lenn+4)
def read(self, i):
self.f.seek((self.lenn+4)*i + 4)
return self.f.read(self.lenn)
class RawFrameReader(BaseFrameReader):
def __init__(self, fn):
# raw camera
self.fn = fn
self.frame_type = FrameType.raw
self.rawfile = RawData(self.fn)
self.frame_count = self.rawfile.count
self.w, self.h = 640, 480
def load_and_debayer(self, img):
img = np.frombuffer(img, dtype='uint8').reshape(960, 1280)
cimg = np.dstack([img[0::2, 1::2], ((img[0::2, 0::2].astype("uint16") + img[1::2, 1::2].astype("uint16")) >> 1).astype("uint8"), img[1::2, 0::2]])
return cimg
def get(self, num, count=1, pix_fmt="yuv420p"):
assert self.frame_count is not None
assert num+count <= self.frame_count
if pix_fmt not in ("yuv420p", "rgb24"):
raise ValueError(f"Unsupported pixel format {pix_fmt!r}")
app = []
for i in range(num, num+count):
dat = self.rawfile.read(i)
rgb_dat = self.load_and_debayer(dat)
if pix_fmt == "rgb24":
app.append(rgb_dat)
elif pix_fmt == "yuv420p":
app.append(rgb24toyuv420(rgb_dat))
else:
raise NotImplementedError
return app
class VideoStreamDecompressor:
def __init__(self, fn, vid_fmt, w, h, pix_fmt):
self.fn = fn
self.vid_fmt = vid_fmt
self.w = w
self.h = h
self.pix_fmt = pix_fmt
if pix_fmt == "yuv420p":
self.out_size = w*h*3//2 # yuv420p
elif pix_fmt in ("rgb24", "yuv444p"):
self.out_size = w*h*3
else:
raise NotImplementedError
self.proc = None
self.t = threading.Thread(target=self.write_thread)
self.t.daemon = True
def write_thread(self):
try:
with FileReader(self.fn) as f:
while True:
r = f.read(1024*1024)
if len(r) == 0:
break
self.proc.stdin.write(r)
finally:
self.proc.stdin.close()
def read(self):
threads = os.getenv("FFMPEG_THREADS", "0")
cuda = os.getenv("FFMPEG_CUDA", "0") == "1"
cmd = [
"ffmpeg",
"-threads", threads,
"-hwaccel", "none" if not cuda else "cuda",
"-c:v", "hevc",
# "-avioflags", "direct",
"-analyzeduration", "0",
"-probesize", "32",
"-flush_packets", "0",
# "-fflags", "nobuffer",
"-vsync", "0",
"-f", self.vid_fmt,
"-i", "pipe:0",
"-threads", threads,
"-f", "rawvideo",
"-pix_fmt", self.pix_fmt,
"pipe:1"
]
self.proc = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL)
try:
self.t.start()
while True:
dat = self.proc.stdout.read(self.out_size)
if len(dat) == 0:
break
assert len(dat) == self.out_size
if self.pix_fmt == "rgb24":
ret = np.frombuffer(dat, dtype=np.uint8).reshape((self.h, self.w, 3))
elif self.pix_fmt == "yuv420p":
ret = np.frombuffer(dat, dtype=np.uint8)
elif self.pix_fmt == "yuv444p":
ret = np.frombuffer(dat, dtype=np.uint8).reshape((3, self.h, self.w))
else:
assert False
yield ret
result_code = self.proc.wait()
assert result_code == 0, result_code
finally:
self.proc.kill()
self.t.join()
class StreamGOPReader(GOPReader):
def __init__(self, fn, frame_type, index_data):
assert frame_type == FrameType.h265_stream
self.fn = fn
self.frame_type = frame_type
self.frame_count = None
self.w, self.h = None, None
self.prefix = None
self.index = None
self.index = index_data['index']
self.prefix = index_data['global_prefix']
probe = index_data['probe']
self.prefix_frame_data = None
self.num_prefix_frames = 0
self.vid_fmt = "hevc"
i = 0
while i < self.index.shape[0] and self.index[i, 0] != HEVC_SLICE_I:
i += 1
self.first_iframe = i
assert self.first_iframe == 0
self.frame_count = len(self.index) - 1
self.w = probe['streams'][0]['width']
self.h = probe['streams'][0]['height']
def _lookup_gop(self, num):
frame_b = num
while frame_b > 0 and self.index[frame_b, 0] != HEVC_SLICE_I:
frame_b -= 1
frame_e = num + 1
while frame_e < (len(self.index) - 1) and self.index[frame_e, 0] != HEVC_SLICE_I:
frame_e += 1
offset_b = self.index[frame_b, 1]
offset_e = self.index[frame_e, 1]
return (frame_b, frame_e, offset_b, offset_e)
def get_gop(self, num):
frame_b, frame_e, offset_b, offset_e = self._lookup_gop(num)
assert frame_b <= num < frame_e
num_frames = frame_e - frame_b
with FileReader(self.fn) as f:
f.seek(offset_b)
rawdat = f.read(offset_e - offset_b)
if num < self.first_iframe:
assert self.prefix_frame_data
rawdat = self.prefix_frame_data + rawdat
rawdat = self.prefix + rawdat
skip_frames = 0
if num < self.first_iframe:
skip_frames = self.num_prefix_frames
return frame_b, num_frames, skip_frames, rawdat
class GOPFrameReader(BaseFrameReader):
#FrameReader with caching and readahead for formats that are group-of-picture based
def __init__(self, readahead=False, readbehind=False):
self.open_ = True
self.readahead = readahead
self.readbehind = readbehind
self.frame_cache = LRU(64)
if self.readahead:
self.cache_lock = threading.RLock()
self.readahead_last = None
self.readahead_len = 30
self.readahead_c = threading.Condition()
self.readahead_thread = threading.Thread(target=self._readahead_thread)
self.readahead_thread.daemon = True
self.readahead_thread.start()
else:
self.cache_lock = DoNothingContextManager()
def close(self):
if not self.open_:
return
self.open_ = False
if self.readahead:
self.readahead_c.acquire()
self.readahead_c.notify()
self.readahead_c.release()
self.readahead_thread.join()
def _readahead_thread(self):
while True:
self.readahead_c.acquire()
try:
if not self.open_:
break
self.readahead_c.wait()
finally:
self.readahead_c.release()
if not self.open_:
break
assert self.readahead_last
num, pix_fmt = self.readahead_last
if self.readbehind:
for k in range(num - 1, max(0, num - self.readahead_len), -1):
self._get_one(k, pix_fmt)
else:
for k in range(num, min(self.frame_count, num + self.readahead_len)):
self._get_one(k, pix_fmt)
def _get_one(self, num, pix_fmt):
assert num < self.frame_count
if (num, pix_fmt) in self.frame_cache:
return self.frame_cache[(num, pix_fmt)]
with self.cache_lock:
if (num, pix_fmt) in self.frame_cache:
return self.frame_cache[(num, pix_fmt)]
frame_b, num_frames, skip_frames, rawdat = self.get_gop(num)
ret = decompress_video_data(rawdat, self.vid_fmt, self.w, self.h, pix_fmt)
ret = ret[skip_frames:]
assert ret.shape[0] == num_frames
for i in range(ret.shape[0]):
self.frame_cache[(frame_b+i, pix_fmt)] = ret[i]
return self.frame_cache[(num, pix_fmt)]
def get(self, num, count=1, pix_fmt="yuv420p"):
assert self.frame_count is not None
if num + count > self.frame_count:
raise ValueError(f"{num + count} > {self.frame_count}")
if pix_fmt not in ("yuv420p", "rgb24", "yuv444p"):
raise ValueError(f"Unsupported pixel format {pix_fmt!r}")
ret = [self._get_one(num + i, pix_fmt) for i in range(count)]
if self.readahead:
self.readahead_last = (num+count, pix_fmt)
self.readahead_c.acquire()
self.readahead_c.notify()
self.readahead_c.release()
return ret
class StreamFrameReader(StreamGOPReader, GOPFrameReader):
def __init__(self, fn, frame_type, index_data, readahead=False, readbehind=False):
StreamGOPReader.__init__(self, fn, frame_type, index_data)
GOPFrameReader.__init__(self, readahead, readbehind)
def GOPFrameIterator(gop_reader, pix_fmt):
dec = VideoStreamDecompressor(gop_reader.fn, gop_reader.vid_fmt, gop_reader.w, gop_reader.h, pix_fmt)
yield from dec.read()
def FrameIterator(fn, pix_fmt, **kwargs):
fr = FrameReader(fn, **kwargs)
if isinstance(fr, GOPReader):
yield from GOPFrameIterator(fr, pix_fmt)
else:
for i in range(fr.frame_count):
yield fr.get(i, pix_fmt=pix_fmt)[0]
|
|
from contextlib import contextmanager
import datetime
from fabric import operations
from fabric.api import *
from fabric.contrib.console import confirm
from fabric.contrib.files import upload_template, append
import os.path
import posixpath
PROJ_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
CONF_DIR = os.path.abspath(os.path.join(PROJ_DIR, 'conf'))
env.user = "oc"
env.gunicorn_port = 9000
env.code_dir = '~/OpenCommunity/'
env.venv_command = '. bin/activate'
env.log_dir = '/var/log/opencommunity/'
env.clone_url = "https://github.com/hasadna/OpenCommunity.git"
env.backup_dir = '~/backups'
env.pip_version = "1.5.4"
@contextmanager
def virtualenv(path):
with cd(path):
with prefix(env.venv_command):
yield
@task
def qa_old():
env.user = "udi"
env.hosts = ['oc-dev.modelarity.com']
env.log_dir = '%slogs/' % env.code_dir
env.pidfile = '/home/udi/OpenCommunity/run/masterpid'
def qa():
env.vhost = '%s.qa.opencommunity.org.il' % env.user
env.redirect_host = 'www.%s' % env.vhost
env.hosts = ['qa.opencommunity.org.il']
env.ocuser = "oc_" + env.user
env.code_dir = '/home/%s/OpenCommunity/' % env.user
env.log_dir = '%slogs/' % env.code_dir
env.clone_url = "https://github.com/%s/OpenCommunity.git" % env.github_user
env.venv_command = '. venv/bin/activate'
env.venv_dir = '%svenv/' % env.code_dir
env.pidfile = '/home/%s/opencommunity.pid' % env.ocuser
@task
def udi():
env.gunicorn_port = 9010
env.user = 'udi'
env.github_user = 'nonZero'
qa()
@task
def amir():
env.gunicorn_port = 9011
env.user = 'amir'
env.github_user = 'amir99'
qa()
@task
def yaniv():
env.gunicorn_port = 9012
env.user = 'yaniv'
env.github_user = 'yaniv14'
qa()
@task
def paul():
env.gunicorn_port = 9013
env.user = 'paul'
env.github_user = 'pwalsh'
qa()
@task
def prod():
env.hosts = ['[email protected]']
env.redirect_host = 'opencommunity.org.il'
env.venv_command = '. ~/.virtualenvs/oc/bin/activate'
env.pidfile = '/home/oc/OpenCommunity/src/masterpid'
env.ocuser = "oc"
env.code_dir = '/home/%s/OpenCommunity/' % env.user
env.venv_dir = '%svenv/' % env.code_dir
@task
def enprod():
env.user = "en"
env.code_dir = '/home/%s/OpenCommunity/' % env.user
env.venv_dir = '%svenv/' % env.code_dir
env.venv_command = '. venv/bin/activate'
env.log_dir = '%slogs/' % env.code_dir
env.vhost = 'en.demos.org.il'
env.redirect_host = 'www.%s' % env.vhost
env.hosts = [env.vhost]
env.ocuser = "weben"
env.pidfile = '/home/%s/web.pid' % env.ocuser
env.gunicorn_port = 9001
@task
def host_type():
run('uname -s')
@task
def git_log():
with virtualenv(env.code_dir):
run("git log -n 1")
@task
def freeze():
with virtualenv(env.code_dir):
run("pip freeze")
@task
def reload_app():
with virtualenv(env.code_dir):
run("cd src && sudo kill -HUP `cat %s`" % env.pidfile)
@task
def upgrade_pip():
with virtualenv(env.code_dir):
run("pip install pip=={}".format(env.pip_version))
@task
def deploy(restart=True):
upgrade_pip()
with virtualenv(env.code_dir):
run("git pull")
run("pip install -r requirements.txt")
run("pip install -r deploy-requirements.txt")
run("cd src && python manage.py syncdb --noinput")
run("cd src && python manage.py migrate --merge --noinput")
run("cd src && python manage.py collectstatic --noinput")
run("git log -n 1 --format=\"%ai %h\" > static/version.txt")
run("git log -n 1 > static/version-full.txt")
if restart:
reload_app()
@task
def hard_reload():
run("sudo supervisorctl restart opencommunity")
@task
def very_hard_reload():
run("sudo service supervisor stop")
run("sudo service supervisor start")
@task
def log():
run("tail %s*" % env.log_dir)
APT_PACKAGES = [
'postgresql',
'nginx',
'supervisor',
'python',
'virtualenvwrapper',
'git',
'python-dev',
'libpq-dev',
'libjpeg-dev',
'libjpeg8',
'zlib1g-dev',
'libfreetype6',
'libfreetype6-dev',
'postfix',
'redis-server',
]
@task
def server_setup():
run("sudo apt-get update")
run("sudo apt-get upgrade -y")
run("sudo apt-get install -y %s" % " ".join(APT_PACKAGES))
@task
def create_ocuser_and_db():
run("sudo adduser %s --gecos '' --disabled-password" % env.ocuser)
run("sudo -iu postgres createuser %s -S -D -R" % env.ocuser)
run("sudo -iu postgres createdb %s -O %s" % (env.ocuser, env.ocuser))
run("sudo -iu postgres psql -c \"alter user %s with password '%s';\"" % (
env.ocuser, env.ocuser))
@task
def clone_project():
run("git clone %s %s" % (env.clone_url, env.code_dir))
with cd(env.code_dir):
run("virtualenv venv --prompt='(%s) '" % env.ocuser)
@task
def create_local_settings():
with cd(env.code_dir):
upload_template('ocd/local_settings.template',
env.code_dir + 'src/ocd/local_settings.py',
{'ocuser': env.ocuser, 'host': env.vhost},
use_jinja=True)
@task
def nginx_setup():
with cd(env.code_dir):
upload_template('nginx.conf.template',
env.code_dir + 'conf/nginx.conf',
{
'host': env.vhost,
'redirect_host': env.redirect_host,
'dir': env.code_dir,
'port': env.gunicorn_port,
}, use_jinja=True, template_dir=CONF_DIR)
nginx_conf1 = '/etc/nginx/sites-available/%s.conf' % env.ocuser
nginx_conf2 = '/etc/nginx/sites-enabled/%s.conf' % env.ocuser
# FIXME
# uncomment('/etc/nginx/nginx.conf',
# 'server_names_hash_bucket_size\s+64',
# use_sudo=True)
run('sudo ln -fs %sconf/nginx.conf %s' % (env.code_dir, nginx_conf1))
run('sudo ln -fs %s %s' % (nginx_conf1, nginx_conf2))
run('sudo service nginx configtest')
run('sudo service nginx start')
run('sudo service nginx reload')
@task
def gunicorn_setup():
with cd(env.code_dir):
upload_template('server.sh.template',
env.code_dir + 'server.sh',
{
'venv': env.venv_dir,
'port': env.gunicorn_port,
'pidfile': env.pidfile,
}, mode=0777, use_jinja=True, template_dir=PROJ_DIR)
@task
def supervisor_setup():
with cd(env.code_dir):
upload_template('supervisor.conf.template',
env.code_dir + 'conf/supervisor.conf',
{
'dir': env.code_dir,
'ocuser': env.ocuser,
'logdir': env.log_dir,
'venv_dir': env.venv_dir,
}, mode=0777, use_jinja=True, template_dir=CONF_DIR)
run(
'sudo ln -fs %sconf/supervisor.conf /etc/supervisor/conf.d/%s.conf'
% (env.code_dir, env.ocuser))
run("sudo supervisorctl reread")
run("sudo supervisorctl update")
run("sudo supervisorctl start %s" % env.ocuser)
@task
def project_setup():
with cd(env.code_dir):
run('mkdir -p uploads')
run('sudo chown %s uploads' % env.ocuser)
run('mkdir -p %s' % env.log_dir)
create_local_settings()
deploy(restart=False)
gunicorn_setup()
supervisor_setup()
nginx_setup()
@task
def initial_project_setup():
create_ocuser_and_db()
clone_project()
project_setup()
@task
def createsuperuser():
""" Creates a Django superuser for the project """
with virtualenv(env.code_dir):
run("cd src && python manage.py createsuperuser")
@task
def supervisor_status():
""" Show server's supoervisord status """
run("sudo supervisorctl status")
@task
def branch():
""" Shows current (and all) branchs """
with cd(env.code_dir):
run('git branch')
@task
def switch(branch):
""" fetches all branchs, and checkouts the specified git branch """
with cd(env.code_dir):
run('git fetch origin')
run('git checkout {}'.format(branch))
deploy()
@task
def showkeys():
""" Displays authorized public ssh keys for user """
with hide('stdout'):
keys = run('cat .ssh/authorized_keys')
print keys
@task
def push_key(key_file):
""" Appends an ssh public key file from the specified file
"""
with open(key_file) as f:
key_text = f.read()
append('~/.ssh/authorized_keys', key_text)
@task
def rebuild_index():
"""Rebuilds haystack search indexes for project"""
with virtualenv(env.code_dir):
run("cd src && python manage.py rebuild_index --noinput")
run("sudo chown -v {} whoosh_index whoosh_index/*".format(env.ocuser))
@task
def backup_db():
now = datetime.datetime.now()
filename = now.strftime("ocd-%Y-%m-%d-%H-%M.sql.gz")
fullpath = posixpath.join(env.backup_dir, filename)
run('sudo -u postgres pg_dump {} | gzip > {}'.format(
env.ocuser, fullpath))
operations.get(fullpath)
@task
def load_local_db_from_file(filename):
if not os.path.isfile(filename):
abort("Unknown file {}".format(filename))
if not confirm(
"DELETE local db and load from backup file {}?".format(filename)):
abort("Aborted.")
drop_command = "drop schema public cascade; create schema public;"
local('''python -c "print '{}'" | python manage.py dbshell'''.format(
drop_command, filename))
cmd = "gunzip -c" if filename.endswith('.gz') else "cat"
local('{} {} | python manage.py dbshell'.format(cmd, filename))
|
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# coding: utf-8
# pylint: disable= arguments-differ
"""VGG, implemented in Gluon."""
from __future__ import division
__all__ = ['VGG',
'vgg11', 'vgg13', 'vgg16', 'vgg19',
'vgg11_bn', 'vgg13_bn', 'vgg16_bn', 'vgg19_bn',
'get_vgg']
import os
from ....context import cpu
from ....initializer import Xavier
from ...block import HybridBlock
from ... import nn
from .... import base
class VGG(HybridBlock):
r"""VGG model from the `"Very Deep Convolutional Networks for Large-Scale Image Recognition"
<https://arxiv.org/abs/1409.1556>`_ paper.
Parameters
----------
layers : list of int
Numbers of layers in each feature block.
filters : list of int
Numbers of filters in each feature block. List length should match the layers.
classes : int, default 1000
Number of classification classes.
batch_norm : bool, default False
Use batch normalization.
"""
def __init__(self, layers, filters, classes=1000, batch_norm=False, **kwargs):
super(VGG, self).__init__(**kwargs)
assert len(layers) == len(filters)
with self.name_scope():
self.features = self._make_features(layers, filters, batch_norm)
self.features.add(nn.Dense(4096, activation='relu',
weight_initializer='normal',
bias_initializer='zeros'))
self.features.add(nn.Dropout(rate=0.5))
self.features.add(nn.Dense(4096, activation='relu',
weight_initializer='normal',
bias_initializer='zeros'))
self.features.add(nn.Dropout(rate=0.5))
self.output = nn.Dense(classes,
weight_initializer='normal',
bias_initializer='zeros')
def _make_features(self, layers, filters, batch_norm):
featurizer = nn.HybridSequential(prefix='')
for i, num in enumerate(layers):
for _ in range(num):
featurizer.add(nn.Conv2D(filters[i], kernel_size=3, padding=1,
weight_initializer=Xavier(rnd_type='gaussian',
factor_type='out',
magnitude=2),
bias_initializer='zeros'))
if batch_norm:
featurizer.add(nn.BatchNorm())
featurizer.add(nn.Activation('relu'))
featurizer.add(nn.MaxPool2D(strides=2))
return featurizer
def hybrid_forward(self, F, x):
x = self.features(x)
x = self.output(x)
return x
# Specification
vgg_spec = {11: ([1, 1, 2, 2, 2], [64, 128, 256, 512, 512]),
13: ([2, 2, 2, 2, 2], [64, 128, 256, 512, 512]),
16: ([2, 2, 3, 3, 3], [64, 128, 256, 512, 512]),
19: ([2, 2, 4, 4, 4], [64, 128, 256, 512, 512])}
# Constructors
def get_vgg(num_layers, pretrained=False, ctx=cpu(),
root=os.path.join(base.data_dir(), 'models'), **kwargs):
r"""VGG model from the `"Very Deep Convolutional Networks for Large-Scale Image Recognition"
<https://arxiv.org/abs/1409.1556>`_ paper.
Parameters
----------
num_layers : int
Number of layers for the variant of densenet. Options are 11, 13, 16, 19.
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default $MXNET_HOME/models
Location for keeping the model parameters.
"""
layers, filters = vgg_spec[num_layers]
net = VGG(layers, filters, **kwargs)
if pretrained:
from ..model_store import get_model_file
batch_norm_suffix = '_bn' if kwargs.get('batch_norm') else ''
net.load_parameters(get_model_file('vgg%d%s'%(num_layers, batch_norm_suffix),
root=root), ctx=ctx)
return net
def vgg11(**kwargs):
r"""VGG-11 model from the `"Very Deep Convolutional Networks for Large-Scale Image Recognition"
<https://arxiv.org/abs/1409.1556>`_ paper.
Parameters
----------
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '$MXNET_HOME/models'
Location for keeping the model parameters.
"""
return get_vgg(11, **kwargs)
def vgg13(**kwargs):
r"""VGG-13 model from the `"Very Deep Convolutional Networks for Large-Scale Image Recognition"
<https://arxiv.org/abs/1409.1556>`_ paper.
Parameters
----------
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '$MXNET_HOME/models'
Location for keeping the model parameters.
"""
return get_vgg(13, **kwargs)
def vgg16(**kwargs):
r"""VGG-16 model from the `"Very Deep Convolutional Networks for Large-Scale Image Recognition"
<https://arxiv.org/abs/1409.1556>`_ paper.
Parameters
----------
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '$MXNET_HOME/models'
Location for keeping the model parameters.
"""
return get_vgg(16, **kwargs)
def vgg19(**kwargs):
r"""VGG-19 model from the `"Very Deep Convolutional Networks for Large-Scale Image Recognition"
<https://arxiv.org/abs/1409.1556>`_ paper.
Parameters
----------
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '$MXNET_HOME/models'
Location for keeping the model parameters.
"""
return get_vgg(19, **kwargs)
def vgg11_bn(**kwargs):
r"""VGG-11 model with batch normalization from the
`"Very Deep Convolutional Networks for Large-Scale Image Recognition"
<https://arxiv.org/abs/1409.1556>`_ paper.
Parameters
----------
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '$MXNET_HOME/models'
Location for keeping the model parameters.
"""
kwargs['batch_norm'] = True
return get_vgg(11, **kwargs)
def vgg13_bn(**kwargs):
r"""VGG-13 model with batch normalization from the
`"Very Deep Convolutional Networks for Large-Scale Image Recognition"
<https://arxiv.org/abs/1409.1556>`_ paper.
Parameters
----------
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '$MXNET_HOME/models'
Location for keeping the model parameters.
"""
kwargs['batch_norm'] = True
return get_vgg(13, **kwargs)
def vgg16_bn(**kwargs):
r"""VGG-16 model with batch normalization from the
`"Very Deep Convolutional Networks for Large-Scale Image Recognition"
<https://arxiv.org/abs/1409.1556>`_ paper.
Parameters
----------
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '$MXNET_HOME/models'
Location for keeping the model parameters.
"""
kwargs['batch_norm'] = True
return get_vgg(16, **kwargs)
def vgg19_bn(**kwargs):
r"""VGG-19 model with batch normalization from the
`"Very Deep Convolutional Networks for Large-Scale Image Recognition"
<https://arxiv.org/abs/1409.1556>`_ paper.
Parameters
----------
pretrained : bool, default False
Whether to load the pretrained weights for model.
ctx : Context, default CPU
The context in which to load the pretrained weights.
root : str, default '$MXNET_HOME/models'
Location for keeping the model parameters.
"""
kwargs['batch_norm'] = True
return get_vgg(19, **kwargs)
|
|
"""Implementation of the WebSocket protocol.
`WebSockets <http://dev.w3.org/html5/websockets/>`_ allow for bidirectional
communication between the browser and server.
WebSockets are supported in the current versions of all major browsers,
although older versions that do not support WebSockets are still in use
(refer to http://caniuse.com/websockets for details).
This module implements the final version of the WebSocket protocol as
defined in `RFC 6455 <http://tools.ietf.org/html/rfc6455>`_. Certain
browser versions (notably Safari 5.x) implemented an earlier draft of
the protocol (known as "draft 76") and are not compatible with this module.
.. versionchanged:: 4.0
Removed support for the draft 76 protocol version.
"""
from __future__ import absolute_import, division, print_function, with_statement
# Author: Jacob Kristhammar, 2010
import base64
import collections
import hashlib
import os
import struct
import tornado.escape
import tornado.web
import zlib
from tornado.concurrent import TracebackFuture
from tornado.escape import utf8, native_str, to_unicode
from tornado import httpclient, httputil
from tornado.ioloop import IOLoop
from tornado.iostream import StreamClosedError
from tornado.log import gen_log, app_log
from tornado import simple_httpclient
from tornado.tcpclient import TCPClient
from tornado.util import _websocket_mask
try:
from urllib.parse import urlparse # py2
except ImportError:
from urlparse import urlparse # py3
try:
xrange # py2
except NameError:
xrange = range # py3
class WebSocketError(Exception):
pass
class WebSocketClosedError(WebSocketError):
"""Raised by operations on a closed connection.
.. versionadded:: 3.2
"""
pass
class WebSocketHandler(tornado.web.RequestHandler):
"""Subclass this class to create a basic WebSocket handler.
Override `on_message` to handle incoming messages, and use
`write_message` to send messages to the client. You can also
override `open` and `on_close` to handle opened and closed
connections.
See http://dev.w3.org/html5/websockets/ for details on the
JavaScript interface. The protocol is specified at
http://tools.ietf.org/html/rfc6455.
Here is an example WebSocket handler that echos back all received messages
back to the client::
class EchoWebSocket(websocket.WebSocketHandler):
def open(self):
print "WebSocket opened"
def on_message(self, message):
self.write_message(u"You said: " + message)
def on_close(self):
print "WebSocket closed"
WebSockets are not standard HTTP connections. The "handshake" is
HTTP, but after the handshake, the protocol is
message-based. Consequently, most of the Tornado HTTP facilities
are not available in handlers of this type. The only communication
methods available to you are `write_message()`, `ping()`, and
`close()`. Likewise, your request handler class should implement
`open()` method rather than ``get()`` or ``post()``.
If you map the handler above to ``/websocket`` in your application, you can
invoke it in JavaScript with::
var ws = new WebSocket("ws://localhost:8888/websocket");
ws.onopen = function() {
ws.send("Hello, world");
};
ws.onmessage = function (evt) {
alert(evt.data);
};
This script pops up an alert box that says "You said: Hello, world".
Web browsers allow any site to open a websocket connection to any other,
instead of using the same-origin policy that governs other network
access from javascript. This can be surprising and is a potential
security hole, so since Tornado 4.0 `WebSocketHandler` requires
applications that wish to receive cross-origin websockets to opt in
by overriding the `~WebSocketHandler.check_origin` method (see that
method's docs for details). Failure to do so is the most likely
cause of 403 errors when making a websocket connection.
When using a secure websocket connection (``wss://``) with a self-signed
certificate, the connection from a browser may fail because it wants
to show the "accept this certificate" dialog but has nowhere to show it.
You must first visit a regular HTML page using the same certificate
to accept it before the websocket connection will succeed.
"""
def __init__(self, application, request, **kwargs):
tornado.web.RequestHandler.__init__(self, application, request,
**kwargs)
self.ws_connection = None
self.close_code = None
self.close_reason = None
self.stream = None
@tornado.web.asynchronous
def get(self, *args, **kwargs):
self.open_args = args
self.open_kwargs = kwargs
# Upgrade header should be present and should be equal to WebSocket
if self.request.headers.get("Upgrade", "").lower() != 'websocket':
self.set_status(400)
self.finish("Can \"Upgrade\" only to \"WebSocket\".")
return
# Connection header should be upgrade. Some proxy servers/load balancers
# might mess with it.
headers = self.request.headers
connection = map(lambda s: s.strip().lower(), headers.get("Connection", "").split(","))
if 'upgrade' not in connection:
self.set_status(400)
self.finish("\"Connection\" must be \"Upgrade\".")
return
# Handle WebSocket Origin naming convention differences
# The difference between version 8 and 13 is that in 8 the
# client sends a "Sec-Websocket-Origin" header and in 13 it's
# simply "Origin".
if "Origin" in self.request.headers:
origin = self.request.headers.get("Origin")
else:
origin = self.request.headers.get("Sec-Websocket-Origin", None)
# If there was an origin header, check to make sure it matches
# according to check_origin. When the origin is None, we assume it
# did not come from a browser and that it can be passed on.
if origin is not None and not self.check_origin(origin):
self.set_status(403)
self.finish("Cross origin websockets not allowed")
return
self.stream = self.request.connection.detach()
self.stream.set_close_callback(self.on_connection_close)
if self.request.headers.get("Sec-WebSocket-Version") in ("7", "8", "13"):
self.ws_connection = WebSocketProtocol13(
self, compression_options=self.get_compression_options())
self.ws_connection.accept_connection()
else:
self.stream.write(tornado.escape.utf8(
"HTTP/1.1 426 Upgrade Required\r\n"
"Sec-WebSocket-Version: 8\r\n\r\n"))
self.stream.close()
def write_message(self, message, binary=False):
"""Sends the given message to the client of this Web Socket.
The message may be either a string or a dict (which will be
encoded as json). If the ``binary`` argument is false, the
message will be sent as utf8; in binary mode any byte string
is allowed.
If the connection is already closed, raises `WebSocketClosedError`.
.. versionchanged:: 3.2
`WebSocketClosedError` was added (previously a closed connection
would raise an `AttributeError`)
"""
if self.ws_connection is None:
raise WebSocketClosedError()
if isinstance(message, dict):
message = tornado.escape.json_encode(message)
self.ws_connection.write_message(message, binary=binary)
def select_subprotocol(self, subprotocols):
"""Invoked when a new WebSocket requests specific subprotocols.
``subprotocols`` is a list of strings identifying the
subprotocols proposed by the client. This method may be
overridden to return one of those strings to select it, or
``None`` to not select a subprotocol. Failure to select a
subprotocol does not automatically abort the connection,
although clients may close the connection if none of their
proposed subprotocols was selected.
"""
return None
def get_compression_options(self):
"""Override to return compression options for the connection.
If this method returns None (the default), compression will
be disabled. If it returns a dict (even an empty one), it
will be enabled. The contents of the dict may be used to
control the memory and CPU usage of the compression,
but no such options are currently implemented.
.. versionadded:: 4.1
"""
return None
def open(self):
"""Invoked when a new WebSocket is opened.
The arguments to `open` are extracted from the `tornado.web.URLSpec`
regular expression, just like the arguments to
`tornado.web.RequestHandler.get`.
"""
pass
def on_message(self, message):
"""Handle incoming messages on the WebSocket
This method must be overridden.
"""
raise NotImplementedError
def ping(self, data):
"""Send ping frame to the remote end."""
if self.ws_connection is None:
raise WebSocketClosedError()
self.ws_connection.write_ping(data)
def on_pong(self, data):
"""Invoked when the response to a ping frame is received."""
pass
def on_close(self):
"""Invoked when the WebSocket is closed.
If the connection was closed cleanly and a status code or reason
phrase was supplied, these values will be available as the attributes
``self.close_code`` and ``self.close_reason``.
.. versionchanged:: 4.0
Added ``close_code`` and ``close_reason`` attributes.
"""
pass
def close(self, code=None, reason=None):
"""Closes this Web Socket.
Once the close handshake is successful the socket will be closed.
``code`` may be a numeric status code, taken from the values
defined in `RFC 6455 section 7.4.1
<https://tools.ietf.org/html/rfc6455#section-7.4.1>`_.
``reason`` may be a textual message about why the connection is
closing. These values are made available to the client, but are
not otherwise interpreted by the websocket protocol.
.. versionchanged:: 4.0
Added the ``code`` and ``reason`` arguments.
"""
if self.ws_connection:
self.ws_connection.close(code, reason)
self.ws_connection = None
def check_origin(self, origin):
"""Override to enable support for allowing alternate origins.
The ``origin`` argument is the value of the ``Origin`` HTTP
header, the url responsible for initiating this request. This
method is not called for clients that do not send this header;
such requests are always allowed (because all browsers that
implement WebSockets support this header, and non-browser
clients do not have the same cross-site security concerns).
Should return True to accept the request or False to reject it.
By default, rejects all requests with an origin on a host other
than this one.
This is a security protection against cross site scripting attacks on
browsers, since WebSockets are allowed to bypass the usual same-origin
policies and don't use CORS headers.
To accept all cross-origin traffic (which was the default prior to
Tornado 4.0), simply override this method to always return true::
def check_origin(self, origin):
return True
To allow connections from any subdomain of your site, you might
do something like::
def check_origin(self, origin):
parsed_origin = urllib.parse.urlparse(origin)
return parsed_origin.netloc.endswith(".mydomain.com")
.. versionadded:: 4.0
"""
parsed_origin = urlparse(origin)
origin = parsed_origin.netloc
origin = origin.lower()
host = self.request.headers.get("Host")
# Check to see that origin matches host directly, including ports
return origin == host
def set_nodelay(self, value):
"""Set the no-delay flag for this stream.
By default, small messages may be delayed and/or combined to minimize
the number of packets sent. This can sometimes cause 200-500ms delays
due to the interaction between Nagle's algorithm and TCP delayed
ACKs. To reduce this delay (at the expense of possibly increasing
bandwidth usage), call ``self.set_nodelay(True)`` once the websocket
connection is established.
See `.BaseIOStream.set_nodelay` for additional details.
.. versionadded:: 3.1
"""
self.stream.set_nodelay(value)
def on_connection_close(self):
if self.ws_connection:
self.ws_connection.on_connection_close()
self.ws_connection = None
self.on_close()
def send_error(self, *args, **kwargs):
if self.stream is None:
super(WebSocketHandler, self).send_error(*args, **kwargs)
else:
# If we get an uncaught exception during the handshake,
# we have no choice but to abruptly close the connection.
# TODO: for uncaught exceptions after the handshake,
# we can close the connection more gracefully.
self.stream.close()
def _wrap_method(method):
def _disallow_for_websocket(self, *args, **kwargs):
if self.stream is None:
method(self, *args, **kwargs)
else:
raise RuntimeError("Method not supported for Web Sockets")
return _disallow_for_websocket
for method in ["write", "redirect", "set_header", "set_cookie",
"set_status", "flush", "finish"]:
setattr(WebSocketHandler, method,
_wrap_method(getattr(WebSocketHandler, method)))
class WebSocketProtocol(object):
"""Base class for WebSocket protocol versions.
"""
def __init__(self, handler):
self.handler = handler
self.request = handler.request
self.stream = handler.stream
self.client_terminated = False
self.server_terminated = False
def _run_callback(self, callback, *args, **kwargs):
"""Runs the given callback with exception handling.
On error, aborts the websocket connection and returns False.
"""
try:
callback(*args, **kwargs)
except Exception:
app_log.error("Uncaught exception in %s",
self.request.path, exc_info=True)
self._abort()
def on_connection_close(self):
self._abort()
def _abort(self):
"""Instantly aborts the WebSocket connection by closing the socket"""
self.client_terminated = True
self.server_terminated = True
self.stream.close() # forcibly tear down the connection
self.close() # let the subclass cleanup
class _PerMessageDeflateCompressor(object):
def __init__(self, persistent, max_wbits):
if max_wbits is None:
max_wbits = zlib.MAX_WBITS
# There is no symbolic constant for the minimum wbits value.
if not (8 <= max_wbits <= zlib.MAX_WBITS):
raise ValueError("Invalid max_wbits value %r; allowed range 8-%d",
max_wbits, zlib.MAX_WBITS)
self._max_wbits = max_wbits
if persistent:
self._compressor = self._create_compressor()
else:
self._compressor = None
def _create_compressor(self):
return zlib.compressobj(-1, zlib.DEFLATED, -self._max_wbits)
def compress(self, data):
compressor = self._compressor or self._create_compressor()
data = (compressor.compress(data) +
compressor.flush(zlib.Z_SYNC_FLUSH))
assert data.endswith(b'\x00\x00\xff\xff')
return data[:-4]
class _PerMessageDeflateDecompressor(object):
def __init__(self, persistent, max_wbits):
if max_wbits is None:
max_wbits = zlib.MAX_WBITS
if not (8 <= max_wbits <= zlib.MAX_WBITS):
raise ValueError("Invalid max_wbits value %r; allowed range 8-%d",
max_wbits, zlib.MAX_WBITS)
self._max_wbits = max_wbits
if persistent:
self._decompressor = self._create_decompressor()
else:
self._decompressor = None
def _create_decompressor(self):
return zlib.decompressobj(-self._max_wbits)
def decompress(self, data):
decompressor = self._decompressor or self._create_decompressor()
return decompressor.decompress(data + b'\x00\x00\xff\xff')
class WebSocketProtocol13(WebSocketProtocol):
"""Implementation of the WebSocket protocol from RFC 6455.
This class supports versions 7 and 8 of the protocol in addition to the
final version 13.
"""
# Bit masks for the first byte of a frame.
FIN = 0x80
RSV1 = 0x40
RSV2 = 0x20
RSV3 = 0x10
RSV_MASK = RSV1 | RSV2 | RSV3
OPCODE_MASK = 0x0f
def __init__(self, handler, mask_outgoing=False,
compression_options=None):
WebSocketProtocol.__init__(self, handler)
self.mask_outgoing = mask_outgoing
self._final_frame = False
self._frame_opcode = None
self._masked_frame = None
self._frame_mask = None
self._frame_length = None
self._fragmented_message_buffer = None
self._fragmented_message_opcode = None
self._waiting = None
self._compression_options = compression_options
self._decompressor = None
self._compressor = None
self._frame_compressed = None
# The total uncompressed size of all messages received or sent.
# Unicode messages are encoded to utf8.
# Only for testing; subject to change.
self._message_bytes_in = 0
self._message_bytes_out = 0
# The total size of all packets received or sent. Includes
# the effect of compression, frame overhead, and control frames.
self._wire_bytes_in = 0
self._wire_bytes_out = 0
def accept_connection(self):
try:
self._handle_websocket_headers()
self._accept_connection()
except ValueError:
gen_log.debug("Malformed WebSocket request received", exc_info=True)
self._abort()
return
def _handle_websocket_headers(self):
"""Verifies all invariant- and required headers
If a header is missing or have an incorrect value ValueError will be
raised
"""
fields = ("Host", "Sec-Websocket-Key", "Sec-Websocket-Version")
if not all(map(lambda f: self.request.headers.get(f), fields)):
raise ValueError("Missing/Invalid WebSocket headers")
@staticmethod
def compute_accept_value(key):
"""Computes the value for the Sec-WebSocket-Accept header,
given the value for Sec-WebSocket-Key.
"""
sha1 = hashlib.sha1()
sha1.update(utf8(key))
sha1.update(b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11") # Magic value
return native_str(base64.b64encode(sha1.digest()))
def _challenge_response(self):
return WebSocketProtocol13.compute_accept_value(
self.request.headers.get("Sec-Websocket-Key"))
def _accept_connection(self):
subprotocol_header = ''
subprotocols = self.request.headers.get("Sec-WebSocket-Protocol", '')
subprotocols = [s.strip() for s in subprotocols.split(',')]
if subprotocols:
selected = self.handler.select_subprotocol(subprotocols)
if selected:
assert selected in subprotocols
subprotocol_header = "Sec-WebSocket-Protocol: %s\r\n" % selected
extension_header = ''
extensions = self._parse_extensions_header(self.request.headers)
for ext in extensions:
if (ext[0] == 'permessage-deflate' and
self._compression_options is not None):
# TODO: negotiate parameters if compression_options
# specifies limits.
self._create_compressors('server', ext[1])
if ('client_max_window_bits' in ext[1] and
ext[1]['client_max_window_bits'] is None):
# Don't echo an offered client_max_window_bits
# parameter with no value.
del ext[1]['client_max_window_bits']
extension_header = ('Sec-WebSocket-Extensions: %s\r\n' %
httputil._encode_header(
'permessage-deflate', ext[1]))
break
self.stream.write(tornado.escape.utf8(
"HTTP/1.1 101 Switching Protocols\r\n"
"Upgrade: websocket\r\n"
"Connection: Upgrade\r\n"
"Sec-WebSocket-Accept: %s\r\n"
"%s%s"
"\r\n" % (self._challenge_response(),
subprotocol_header, extension_header)))
self._run_callback(self.handler.open, *self.handler.open_args,
**self.handler.open_kwargs)
self._receive_frame()
def _parse_extensions_header(self, headers):
extensions = headers.get("Sec-WebSocket-Extensions", '')
if extensions:
return [httputil._parse_header(e.strip())
for e in extensions.split(',')]
return []
def _process_server_headers(self, key, headers):
"""Process the headers sent by the server to this client connection.
'key' is the websocket handshake challenge/response key.
"""
assert headers['Upgrade'].lower() == 'websocket'
assert headers['Connection'].lower() == 'upgrade'
accept = self.compute_accept_value(key)
assert headers['Sec-Websocket-Accept'] == accept
extensions = self._parse_extensions_header(headers)
for ext in extensions:
if (ext[0] == 'permessage-deflate' and
self._compression_options is not None):
self._create_compressors('client', ext[1])
else:
raise ValueError("unsupported extension %r", ext)
def _get_compressor_options(self, side, agreed_parameters):
"""Converts a websocket agreed_parameters set to keyword arguments
for our compressor objects.
"""
options = dict(
persistent=(side + '_no_context_takeover') not in agreed_parameters)
wbits_header = agreed_parameters.get(side + '_max_window_bits', None)
if wbits_header is None:
options['max_wbits'] = zlib.MAX_WBITS
else:
options['max_wbits'] = int(wbits_header)
return options
def _create_compressors(self, side, agreed_parameters):
# TODO: handle invalid parameters gracefully
allowed_keys = set(['server_no_context_takeover',
'client_no_context_takeover',
'server_max_window_bits',
'client_max_window_bits'])
for key in agreed_parameters:
if key not in allowed_keys:
raise ValueError("unsupported compression parameter %r" % key)
other_side = 'client' if (side == 'server') else 'server'
self._compressor = _PerMessageDeflateCompressor(
**self._get_compressor_options(side, agreed_parameters))
self._decompressor = _PerMessageDeflateDecompressor(
**self._get_compressor_options(other_side, agreed_parameters))
def _write_frame(self, fin, opcode, data, flags=0):
if fin:
finbit = self.FIN
else:
finbit = 0
frame = struct.pack("B", finbit | opcode | flags)
l = len(data)
if self.mask_outgoing:
mask_bit = 0x80
else:
mask_bit = 0
if l < 126:
frame += struct.pack("B", l | mask_bit)
elif l <= 0xFFFF:
frame += struct.pack("!BH", 126 | mask_bit, l)
else:
frame += struct.pack("!BQ", 127 | mask_bit, l)
if self.mask_outgoing:
mask = os.urandom(4)
data = mask + _websocket_mask(mask, data)
frame += data
self._wire_bytes_out += len(frame)
self.stream.write(frame)
def write_message(self, message, binary=False):
"""Sends the given message to the client of this Web Socket."""
if binary:
opcode = 0x2
else:
opcode = 0x1
message = tornado.escape.utf8(message)
assert isinstance(message, bytes)
self._message_bytes_out += len(message)
flags = 0
if self._compressor:
message = self._compressor.compress(message)
flags |= self.RSV1
try:
self._write_frame(True, opcode, message, flags=flags)
except StreamClosedError:
self._abort()
def write_ping(self, data):
"""Send ping frame."""
assert isinstance(data, bytes)
self._write_frame(True, 0x9, data)
def _receive_frame(self):
try:
self.stream.read_bytes(2, self._on_frame_start)
except StreamClosedError:
self._abort()
def _on_frame_start(self, data):
self._wire_bytes_in += len(data)
header, payloadlen = struct.unpack("BB", data)
self._final_frame = header & self.FIN
reserved_bits = header & self.RSV_MASK
self._frame_opcode = header & self.OPCODE_MASK
self._frame_opcode_is_control = self._frame_opcode & 0x8
if self._decompressor is not None:
self._frame_compressed = bool(reserved_bits & self.RSV1)
reserved_bits &= ~self.RSV1
if reserved_bits:
# client is using as-yet-undefined extensions; abort
self._abort()
return
self._masked_frame = bool(payloadlen & 0x80)
payloadlen = payloadlen & 0x7f
if self._frame_opcode_is_control and payloadlen >= 126:
# control frames must have payload < 126
self._abort()
return
try:
if payloadlen < 126:
self._frame_length = payloadlen
if self._masked_frame:
self.stream.read_bytes(4, self._on_masking_key)
else:
self.stream.read_bytes(self._frame_length, self._on_frame_data)
elif payloadlen == 126:
self.stream.read_bytes(2, self._on_frame_length_16)
elif payloadlen == 127:
self.stream.read_bytes(8, self._on_frame_length_64)
except StreamClosedError:
self._abort()
def _on_frame_length_16(self, data):
self._wire_bytes_in += len(data)
self._frame_length = struct.unpack("!H", data)[0]
try:
if self._masked_frame:
self.stream.read_bytes(4, self._on_masking_key)
else:
self.stream.read_bytes(self._frame_length, self._on_frame_data)
except StreamClosedError:
self._abort()
def _on_frame_length_64(self, data):
self._wire_bytes_in += len(data)
self._frame_length = struct.unpack("!Q", data)[0]
try:
if self._masked_frame:
self.stream.read_bytes(4, self._on_masking_key)
else:
self.stream.read_bytes(self._frame_length, self._on_frame_data)
except StreamClosedError:
self._abort()
def _on_masking_key(self, data):
self._wire_bytes_in += len(data)
self._frame_mask = data
try:
self.stream.read_bytes(self._frame_length, self._on_masked_frame_data)
except StreamClosedError:
self._abort()
def _on_masked_frame_data(self, data):
# Don't touch _wire_bytes_in; we'll do it in _on_frame_data.
self._on_frame_data(_websocket_mask(self._frame_mask, data))
def _on_frame_data(self, data):
self._wire_bytes_in += len(data)
if self._frame_opcode_is_control:
# control frames may be interleaved with a series of fragmented
# data frames, so control frames must not interact with
# self._fragmented_*
if not self._final_frame:
# control frames must not be fragmented
self._abort()
return
opcode = self._frame_opcode
elif self._frame_opcode == 0: # continuation frame
if self._fragmented_message_buffer is None:
# nothing to continue
self._abort()
return
self._fragmented_message_buffer += data
if self._final_frame:
opcode = self._fragmented_message_opcode
data = self._fragmented_message_buffer
self._fragmented_message_buffer = None
else: # start of new data message
if self._fragmented_message_buffer is not None:
# can't start new message until the old one is finished
self._abort()
return
if self._final_frame:
opcode = self._frame_opcode
else:
self._fragmented_message_opcode = self._frame_opcode
self._fragmented_message_buffer = data
if self._final_frame:
self._handle_message(opcode, data)
if not self.client_terminated:
self._receive_frame()
def _handle_message(self, opcode, data):
if self.client_terminated:
return
if self._frame_compressed:
data = self._decompressor.decompress(data)
if opcode == 0x1:
# UTF-8 data
self._message_bytes_in += len(data)
try:
decoded = data.decode("utf-8")
except UnicodeDecodeError:
self._abort()
return
self._run_callback(self.handler.on_message, decoded)
elif opcode == 0x2:
# Binary data
self._message_bytes_in += len(data)
self._run_callback(self.handler.on_message, data)
elif opcode == 0x8:
# Close
self.client_terminated = True
if len(data) >= 2:
self.handler.close_code = struct.unpack('>H', data[:2])[0]
if len(data) > 2:
self.handler.close_reason = to_unicode(data[2:])
self.close()
elif opcode == 0x9:
# Ping
self._write_frame(True, 0xA, data)
elif opcode == 0xA:
# Pong
self._run_callback(self.handler.on_pong, data)
else:
self._abort()
def close(self, code=None, reason=None):
"""Closes the WebSocket connection."""
if not self.server_terminated:
if not self.stream.closed():
if code is None and reason is not None:
code = 1000 # "normal closure" status code
if code is None:
close_data = b''
else:
close_data = struct.pack('>H', code)
if reason is not None:
close_data += utf8(reason)
self._write_frame(True, 0x8, close_data)
self.server_terminated = True
if self.client_terminated:
if self._waiting is not None:
self.stream.io_loop.remove_timeout(self._waiting)
self._waiting = None
self.stream.close()
elif self._waiting is None:
# Give the client a few seconds to complete a clean shutdown,
# otherwise just close the connection.
self._waiting = self.stream.io_loop.add_timeout(
self.stream.io_loop.time() + 5, self._abort)
class WebSocketClientConnection(simple_httpclient._HTTPConnection):
"""WebSocket client connection.
This class should not be instantiated directly; use the
`websocket_connect` function instead.
"""
def __init__(self, io_loop, request, compression_options=None):
self.compression_options = compression_options
self.connect_future = TracebackFuture()
self.read_future = None
self.read_queue = collections.deque()
self.key = base64.b64encode(os.urandom(16))
scheme, sep, rest = request.url.partition(':')
scheme = {'ws': 'http', 'wss': 'https'}[scheme]
request.url = scheme + sep + rest
request.headers.update({
'Upgrade': 'websocket',
'Connection': 'Upgrade',
'Sec-WebSocket-Key': self.key,
'Sec-WebSocket-Version': '13',
})
if self.compression_options is not None:
# Always offer to let the server set our max_wbits (and even though
# we don't offer it, we will accept a client_no_context_takeover
# from the server).
# TODO: set server parameters for deflate extension
# if requested in self.compression_options.
request.headers['Sec-WebSocket-Extensions'] = (
'permessage-deflate; client_max_window_bits')
self.tcp_client = TCPClient(io_loop=io_loop)
super(WebSocketClientConnection, self).__init__(
io_loop, None, request, lambda: None, self._on_http_response,
104857600, self.tcp_client, 65536)
def close(self, code=None, reason=None):
"""Closes the websocket connection.
``code`` and ``reason`` are documented under
`WebSocketHandler.close`.
.. versionadded:: 3.2
.. versionchanged:: 4.0
Added the ``code`` and ``reason`` arguments.
"""
if self.protocol is not None:
self.protocol.close(code, reason)
self.protocol = None
def on_connection_close(self):
if not self.connect_future.done():
self.connect_future.set_exception(StreamClosedError())
self.on_message(None)
self.tcp_client.close()
super(WebSocketClientConnection, self).on_connection_close()
def _on_http_response(self, response):
if not self.connect_future.done():
if response.error:
self.connect_future.set_exception(response.error)
else:
self.connect_future.set_exception(WebSocketError(
"Non-websocket response"))
def headers_received(self, start_line, headers):
if start_line.code != 101:
return super(WebSocketClientConnection, self).headers_received(
start_line, headers)
self.headers = headers
self.protocol = WebSocketProtocol13(
self, mask_outgoing=True,
compression_options=self.compression_options)
self.protocol._process_server_headers(self.key, self.headers)
self.protocol._receive_frame()
if self._timeout is not None:
self.io_loop.remove_timeout(self._timeout)
self._timeout = None
self.stream = self.connection.detach()
self.stream.set_close_callback(self.on_connection_close)
# Once we've taken over the connection, clear the final callback
# we set on the http request. This deactivates the error handling
# in simple_httpclient that would otherwise interfere with our
# ability to see exceptions.
self.final_callback = None
self.connect_future.set_result(self)
def write_message(self, message, binary=False):
"""Sends a message to the WebSocket server."""
self.protocol.write_message(message, binary)
def read_message(self, callback=None):
"""Reads a message from the WebSocket server.
Returns a future whose result is the message, or None
if the connection is closed. If a callback argument
is given it will be called with the future when it is
ready.
"""
assert self.read_future is None
future = TracebackFuture()
if self.read_queue:
future.set_result(self.read_queue.popleft())
else:
self.read_future = future
if callback is not None:
self.io_loop.add_future(future, callback)
return future
def on_message(self, message):
if self.read_future is not None:
self.read_future.set_result(message)
self.read_future = None
else:
self.read_queue.append(message)
def on_pong(self, data):
pass
def websocket_connect(url, io_loop=None, callback=None, connect_timeout=None,
compression_options=None):
"""Client-side websocket support.
Takes a url and returns a Future whose result is a
`WebSocketClientConnection`.
``compression_options`` is interpreted in the same way as the
return value of `.WebSocketHandler.get_compression_options`.
.. versionchanged:: 3.2
Also accepts ``HTTPRequest`` objects in place of urls.
.. versionchanged:: 4.1
Added ``compression_options``.
"""
if io_loop is None:
io_loop = IOLoop.current()
if isinstance(url, httpclient.HTTPRequest):
assert connect_timeout is None
request = url
# Copy and convert the headers dict/object (see comments in
# AsyncHTTPClient.fetch)
request.headers = httputil.HTTPHeaders(request.headers)
else:
request = httpclient.HTTPRequest(url, connect_timeout=connect_timeout)
request = httpclient._RequestProxy(
request, httpclient.HTTPRequest._DEFAULTS)
conn = WebSocketClientConnection(io_loop, request, compression_options)
if callback is not None:
io_loop.add_future(conn.connect_future, callback)
return conn.connect_future
|
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#### Mozu Auth - Upload - GetKey ####
def get_mozu_authtoken(tenant_url):
import requests, json
# "http://requestb.in/q66719q6" #
auth_url = "https://home.staging.mozu.com/api/platform/applications/authtickets"
tenant_url = tenant_url
headers = {'Content-type': 'application/json',
'Accept-Encoding': 'gzip, deflate'}
auth_request = {'applicationId' : 'bluefly.product_images.1.0.0.release', 'sharedSecret' : '53de2fb67cb04a95af323693caa48ddb'}
auth_response = requests.post(auth_url, data=json.dumps(auth_request), headers=headers, verify=False)
# parse params from filepath
# TODO: 5) add Validation(regex) to prevent unwanted updates
##
print "Auth Response: %s" % auth_response.status_code
auth_response.raise_for_status()
auth = auth_response.json()
print "Auth Ticket: %s" % auth["accessToken"]
return auth["accessToken"]
# Upload and Return MozuID
def upload_productimgs_mozu(src_filepath, mz_imageid=None):
import requests, json
import os.path as path
tenant_url = "https://t11146.staging-sb.mozu.com/"
headers = {'Content-type': 'application/json', 'x-vol-app-claims' : get_mozu_authtoken(tenant_url), 'x-vol-tenant' : '11146', 'x-vol-master-catalog' : '1' }
#, 'x-vol-dataview-mode': 'Pending', # ??'x-vol-site' : '1', }
document_data_api = tenant_url + "/api/content/documentlists/files@mozu/documents"
bf_imageid = path.basename(src_filepath) #[:-1]
print bf_imageid, src_filepath
ext = bf_imageid.split('.')[-1]
document = ''
document_id = ''
document_payload = {'listFQN' : 'files@mozu', 'documentTypeFQN' : 'image@mozu', 'name' : bf_imageid, 'extension' : ext}
document_response = requests.post(document_data_api, data=json.dumps(document_payload), headers=headers, verify=False )
#document_response.raise_for_status()
if document_response.status_code < 400:
document = document_response.json()
try:
document_id = document["id"]
# colorstyle = bf_imageid[:9]
# insert_docid_db(db_name,document_id=document_id, bf_imageid=bf_imageid, colorstyle=colorstyle, img_number=sequence)
except KeyError:
document_response = requests.put(document_data_api, data=json.dumps(document_payload), headers=headers, verify=False)
document = document_response.json()
document_id = document["id"]
document_response.raise_for_status()
## create rest url with doc id from resp
document_content_api = tenant_url + "/api/content/documentlists/files@mozu/documents/" + document_id + "/content"
#files = {'media': open(src_filepath, 'rb')}
mimetype = "image/{}".format(ext.lower().replace('jpg','jpeg'))
headers["Content-type"] = mimetype
file_data = open(src_filepath, 'rb').read()
content_response = requests.put(document_content_api, data=file_data, headers=headers, verify=False)
print "document ID: %s" % document_id
print "document_payload: %s" % document_payload
print "Document content upload Response: %s" % content_response.text
#document_response.raise_for_status()
return document_id, content_response
#return bf_imageid, mz_imageid
elif document_response.status_code == 409:
mimetype = "image/{}".format(ext.lower().replace('jpg', 'jpeg'))
headers["Content-type"] = mimetype
print document_id, ' <-- DocId 409 Code Numero 1'
print 'LOCOS -->', locals()
mz_imageid = orcl_get_mz_imageid_bf_imageid(bf_imageid)
if mz_imageid is not None:
print 'Old MozuID Retrieved from ORCL', mz_imageid
documentUploadApi = tenant_url + "/api/content/documentlists/files@mozu/documents/" + mz_imageid + "/content"
# files = {'media': open("c:\mozu-dc-logo.png", "rb")};
file_data = open(src_filepath, 'rb').read()
headers["Content-type"] = mimetype #"image/png";
content_response = requests.put(documentUploadApi, data=file_data, headers=headers, verify=False);
document = content_response.json()
document_id = document["id"]
print document_id, ' <-- DocId 409 Code'
print '409 Err --> Bluefly Filename Already in Mozu, if you are trying to update the image, this is not the way.\n\t%s' % src_filepath
## TODO: 1) On duplicate file in mozu, check PGSQL by Filename and compare stored MD5 with current MD5.
## TODO: 1A) If same MD5 skip and return MOZUID, else if different.....
## TODO 2) Update Mozu stored image using main_update_put(src_filepath), sending to an "update" endpoint(need to get uri)
## TODO: 3) Update PGSQL MOZUID + MD5
## TODO: 4) Bust image cache on updates in MOZU by forcing MEDIA_VERSION to increment -- Need API endpoint to PM or its going to be super hackey.
pass
else:
print 'MZID is None'
else:
print 'Failed with code --> ', document_response.status_code
# make initial table and update timestamp on modify as function and trigger of the function on the table
# def init_pg_mktble_fnc_trig():
# import psycopg2
# droptable = "DROP TABLE IF EXISTS MOZU_IMAGE;"
# createtbl = "CREATE TABLE IF NOT EXISTS MOZU_IMAGE (id serial PRIMARY KEY, bf_imageid varchar NOT NULL, mz_imageid varchar NOT NULL, md5checksum varchar, updated_at TIMESTAMP NOT NULL DEFAULT 'now'::timestamp, update_ct bigint NOT NULL DEFAULT 1, UNIQUE(bf_imageid));"
# # Auto Mod time Now Func and trig
# createfunc_nowonupdate = "CREATE OR REPLACE FUNCTION update_updated_at_column() RETURNS trigger BEGIN NEW.updated_at := SYSDATE; RETURN NEW; END;"
# createtrig_nowonupdate = "CREATE TRIGGER MOZU_IMAGE_updated_at_column BEFORE INSERT OR UPDATE ON MOZU_IMAGE FOR EACH ROW EXECUTE PROCEDURE update_updated_at_column();"
#
# create_timestamperfunc = "CREATE OR REPLACE FUNCTION trig_time_stamper() RETURNS trigger BEGIN NEW.updated_at := CURRENT_TIMESTAMP; RETURN NEW; END;"
# create_timestampertrig = "CREATE TRIGGER trig_1 BEFORE INSERT OR UPDATE ON MOZU_IMAGE FOR EACH ROW EXECUTE PROCEDURE trig_time_stamper(); OF updated_at"
#
# # Auto incr after modify
# #createfunc_tablehits = "CREATE SEQUENCE tablehits INCREMENT BY 1 MINVALUE 1;"
# # createfunc_incronupdate = "CREATE SEQUENCE update_ct INCREMENT BY 1 MINVALUE 1;
# #createfunc_incronupdate = "CREATE OR REPLACE FUNCTION incr_update_ct() RETURNS trigger BEGIN NEW.UPDATED_COUNT := nextval('update_ct'); RETURN NEW; END; $BODY$;"
# #createtrig_incronupdate = "CREATE TRIGGER MOZU_IMAGE_incr_update_ct BEFORE INSERT OR UPDATE ON MOZU_IMAGE FOR EACH ROW EXECUTE PROCEDURE incr_update_ct();"
# ## Below used if Table exists -- which it obviously should since I just called the mktble above
# # createfuncalter_incronupdate = "ALTER TABLE MOZU_IMAGE ALTER update_ct SET DEFAULT nextval('update_ct'); "
#
# conn = get_mzimg_oracle_connection()
# cur = conn
#
# # drop if exists to create a new one
# #cur.execute(droptable)
#conn.commit()
#
# cur.execute(createtbl)
## conn.commit()
#
# try:
# cur.execute(createfunc_nowonupdate)
# cur.execute(createtrig_nowonupdate)
# conn.commit()
# cur.execute(create_timestamperfunc)
# cur.execute(create_timestampertrig)
# conn.commit()
# cur.execute(createfunc_incronupdate)
# cur.execute(createtrig_incronupdate)
# conn.commit()
# cur.execute(createfuncalter_incronupdate)
# conn.commit()
# except psycopg2.ProgrammingError, e:
# print 'Passing Psycopg2 ProgErr...%s' % e
# pass
# finally:
# if conn:
# conn.commit()
# conn.close()
### Utility Funx - Get File Data
## util func calcs md5 of file
def md5_checksumer(src_filepath):
import hashlib
import os.path as path
if path.isfile(src_filepath):
filepath = path.abspath(src_filepath)
try:
_file = open(filepath, "rb")
content = _file.read()
_file.close()
md5 = hashlib.md5(content)
_hash = md5.hexdigest()
return _hash
except:
return False
## Extracts the image metadata from file
def get_exif_all_data(src_filepath):
import exiftool
with exiftool.ExifTool() as et:
metadata = et.get_metadata(src_filepath) # ['XMP:DateCreated'][:10].replace(':','-')
return metadata
### Generic Logger
def mr_logger(filepath,*args):
import datetime
current_dt = datetime.datetime.strftime(datetime.datetime.now(), '%Y-%m-%d')
logged_items = []
if len(args) > 0:
for arg in args:
logit = "{}\t{}\n".format(current_dt,arg)
logged_items.append(logit)
for i in logged_items:
with open(filepath, 'ab+') as f:
f.write(i)
return filepath
####################
### oracle Funcs
##
########################### Replaced By Alchemy ############################
def get_mzimg_oracle_connection():
import sqlalchemy,sys
orcl_engine = sqlalchemy.create_engine('oracle+cx_oracle://MZIMG:[email protected]:1521/bfyqa1201')
cur = orcl_engine.raw_connection().cursor()
conn = orcl_engine.connect()
print(dir(conn))
return conn, cur
# Store Key in pgsql
def orcl_insert_bf_imageid_mz_imageid(bf_imageid, mz_imageid, md5checksum=''):
# HERE IS THE IMPORTANT PART, by specifying a name for the cursor
# psycopg2 creates a server-side cursor, which prevents all of the
# records from being downloaded at once from the server
import datetime, sqlalchemy
dt = datetime.datetime.now()
upsert_timestamp = datetime.datetime.strftime(dt, "%Y-%m-%d %H:%M:%S")
upsert_timestamp = datetime.datetime.strftime(dt, "%m%d%Y")
try:
conn, cur = get_mzimg_oracle_connection()
#cur = conn
query = "INSERT INTO MOZU_IMAGE (bf_imageid, mz_imageid, md5checksum) VALUES ('{0}', '{1}', '{2}');".format(bf_imageid, mz_imageid, md5checksum)
print query, 'QUERY'
cur.execute(query)
#cur.execute("INSERT INTO MOZU_IMAGE(bf_imageid, mz_imageid, md5checksum, CREATED_DATE) VALUES(%s, %s, %s, TO_DATE('%s','MMDDYY'));", (bf_imageid, mz_imageid, md5checksum, upsert_timestamp))
## conn.commit()
conn.close()
except IndexError:
pass
#########
# Update
def orcl_update_bf_imageid_mz_imageid(bf_imageid, mz_imageid, md5checksum=''):
# HERE IS THE IMPORTANT PART, by specifying a name for the cursor
# psycopg2 creates a server-side cursor, which prevents all of the
# records from being downloaded at once from the server
import datetime
dt = datetime.datetime.now()
upsert_timestamp = datetime.datetime.strftime(dt, "%Y-%m-%d %H:%M:%S")
upsert_timestamp = datetime.datetime.strftime(dt, "%m%d%Y")
try:
conn, cur = get_mzimg_oracle_connection()
#cur = conn
# SET update_ct = update_ct + 1
cur.execute("""UPDATE MOZU_IMAGE
SET mz_imageid='{0}',
md5checksum='{1}',
MODIFIED_DATE=TO_DATE('{2}','MMDDYY'),
UPDATED_COUNT=(UPDATED_COUNT + 1)
WHERE bf_imageid='{3}';""".format(mz_imageid, md5checksum, upsert_timestamp, bf_imageid))
## conn.commit()
conn.close()
except IndexError:
pass
# Get mozu img ID from bfly file id
def orcl_get_mz_imageid_bf_imageid(bf_imageid):
conn, cur = get_mzimg_oracle_connection()
#cur = conn
print bf_imageid,' BFIMGID'
try:
query = """SELECT mz_imageid FROM MOZU_IMAGE WHERE bf_imageid='{0}';""".format(bf_imageid)
print query
res = cur.execute(query)
mz_imageid = [ r for r in res ]
if len(mz_imageid) > 1:
return mz_imageid
else:
return False
except TypeError:
return False
# Get mozu img url
def orcl_get_mozuimageurl_bf_imageid(bf_imageid, destpath=None):
import requests
mozu_files_prefix = 'http://cdn-stg-sb.mozu.com/11146-m1/cms/files/'
mz_imageid = orcl_get_mz_imageid_bf_imageid(bf_imageid)
mozuimageurl = "{}{}".format(mozu_files_prefix, mz_imageid)
res = requests.get(mozuimageurl)
if res.status_code >= 400:
return ''
elif not destpath:
return res
else:
with open(destpath) as f:
f.write(res.content)
return destpath
# Validate new file before insert or perform update function on failed validation, due to duplicate key in DB
def orcl_validate_md5checksum(md5checksum, bf_imageid=None):
import requests
conn, cur = get_mzimg_oracle_connection()
#cur = conn
result = ''
if bf_imageid:
print 'Not NONE --', bf_imageid
cur.execute("SELECT bf_imageid FROM MOZU_IMAGE WHERE md5checksum = '{0}' AND bf_imageid = '{1}'".format(md5checksum, bf_imageid))
result = cur.fetchone()
else:
print 'NONE --', bf_imageid
cur.execute("SELECT bf_imageid FROM MOZU_IMAGE WHERE md5checksum = '{0}'".format(md5checksum))
result = cur.fetchone()
## If Value >1
print bf_imageid, result, '--- bf_imageid -- result'
#conn.commit()
conn.close()
if result:
return result
else: return ''
## Validate file name only
def orcl_validate_bf_imageid(bf_imageid=None):
import requests
conn, cur = get_mzimg_oracle_connection()
#cur = conn
result = ''
if bf_imageid is not None:
print 'Not NONE --', bf_imageid
cur.execute("SELECT mz_imageid FROM MOZU_IMAGE WHERE bf_imageid = '{0}'".format(bf_imageid))
result = cur.fetchone()
#conn.commit()
conn.close()
if result:
return result
else: return ''
# if result:
# try:
# mozu_files_prefix = 'http://cdn-stg-sb.mozu.com/11146-m1/cms/files/'
# mz_imageid = orcl_get_mz_imageid_bf_imageid(bf_imageid)
# mozuimageurl = "{}{}".format(mozu_files_prefix, mz_imageid)
# return bf_imageid, mozuimageurl,
# except TypeError:
# return ''
# else:
# return ''
###################### Now The Alchemy Way ####################
#######################
##########################################################
########################
# Converts image to jpeg
def magick_convert_to_jpeg(img, destdir=None):
import subprocess
ext = img.split('.')[-1]
outfile = img.split('/')[-1].split('.')[0] + ".jpg"
if not destdir:
pass
else:
import os.path as path
outfile = path.join(destdir, outfile)
subprocess.call([
'convert',
'-colorspace',
'RGB',
"-format",
ext,
img,
"-depth",
"16",
"-density",
"72x72",
# "-profile",
# "/usr/local/color_profiles/AdobeRGB1998.icc",
# "-colorspace",
# "RGB",
"-define",
"jpeg:dct-method\=float",
"-define",
"jpeg:sampling-factor\=4:2:2",
"-filter",
"LanczosSharp",
"-compress",
"JPEG",
'-quality',
'90%',
# "-profile",
# '/usr/local/color_profiles/sRGB.icm',
"-interlace",
"Plane",
"-colorspace",
'sRGB',
"-depth",
"8",
"-format",
"jpeg",
"-strip",
outfile
])
return outfile
########################
##########################################################
## --> cache bursting to incr the media_version ID# -- ###
def update_pm_photodate_incr_version_hack(src_filepath):
import requests
if len(src_filepath) == 9 and src_filepath.isdigit():
colorstyle = src_filepath
else:
colorstyle = path.basename(src_filepath)[:9]
update_url = 'http://dmzimage01.l3.bluefly.com:8080/photo/{0}'.format(colorstyle)
data = {"sample_image": "Y", "photographed_date": "now"}
res = requests.put(update_url, data=data)
return res
##########################################################
def main_update_put(bf_imageid, mz_imageid,md5checksum):
## Finally Store New mozuiD and md5checksum
orcl_update_bf_imageid_mz_imageid(bf_imageid, mz_imageid,md5checksum)
return
##########################################################
# ### Main Combined Post or Get -- TODO: --> main_update_put(src_filepath)
# full uploading cmdline shell script, file as sys argv
def main_upload_post(src_filepath):
import os.path as path
##############################
# remove db setup funcs after init release into prod
# try:
# init_pg_mktble_fnc_trig()
# except:
# pass
##############################
## Convert it to jpg if not one (ie png, tiff, gif)
ext = src_filepath.split('.')[-1].lower().replace('jpeg','jpg')
src_basename = path.basename(src_filepath)
if ext == 'jpg':
pass
else:
src_filepath = magick_convert_to_jpeg(src_filepath, destdir=None)
if src_basename[:9].isdigit() and ext:
bf_imageid = path.basename(src_filepath) # .split('.')[0]
else:
bf_imageid = ''
md5checksum = md5_checksumer(src_filepath)
mz_imageid = ''
args = [
{'bf_imageid': bf_imageid },
{'mz_imageid': mz_imageid },
{'md5checksum': md5checksum }
]
#md5colorstyle_exists = orcl_validate_md5checksum(md5checksum, bf_imageid=bf_imageid)
# 1A Validate md5 # TODO: NEW # sqlalchemy
if bf_imageid:
OLDbf_imageid = select('bf_imageid').where('md5checksum'=md5checksum)
if bf_imageid == OLDbf_imageid:
md5colorstyle_exists = md5checksum
else:
md5colorstyle_exists = ''
#md5colorstyle_exists = orcl_validate_bf_imageid(bf_imageid=bf_imageid).execute()
# 1B Validate bf_imageid_exists # TODO: NEW # sqlalchemy
bf_imageid_exists = select('mz_imageid').where('bf_imageid'=bf_imageid).execute()
if bf_imageid_exists:
if bf_imageid_exists.where('md5checksum'!=md5checksum).execute():
mz_imageid = select('mz_imageid').where('bf_imageid'=bf_imageid)
import json
json_insert = json.dumps(args)
## Finished collecting k/v data to send now send if md5colorstyle_exists returns False (meaning we dont have an image for this yet)
if not md5colorstyle_exists and not bf_imageid_exists:
try:
mz_imageid, content_response = upload_productimgs_mozu(src_filepath)
orcl_insert_bf_imageid_mz_imageid(bf_imageid, mz_imageid, md5checksum)
# 2 Insert # TODO: NEW # sqlalchemy table.insert()
printoutput = 'bf_imageid={}\tmz_imageid={}\tmd5checksum={}\n'.format(bf_imageid, mz_imageid, md5checksum).split()
mr_logger('/mnt/mozu_upload.txt', printoutput)
print printoutput, ' Line-420RESULT'
return mz_imageid, bf_imageid
except TypeError, e:
print '\n\t...', src_filepath, ' None TypeError --> ', e
pass
#finally:
# print('Completed ', bf_imageid, md5checksum)
elif bf_imageid_exists and not md5colorstyle_exists:
updated_mz_imageid, content_response = upload_productimgs_mozu(src_filepath, mz_imageid=mz_imageid)
orcl_update_bf_imageid_mz_imageid(bf_imageid, updated_mz_imageid, md5checksum=md5checksum)
# 3 Update # TODO: NEW # sqlalchemy table.update()
else:
print md5colorstyle_exists, ' \n\t<-- Duplicated - Passing -- Exists -- with --> ', bf_imageid
# Query/Display previous/currentDB info
def main_retrieve_get(**kwargs):
args_ct=len(kwargs.items())
bf_imageid = kwargs.get('bf_imageid')
mozu_files_prefix = 'http://cdn-stg-sb.mozu.com/11146-m1/cms/files/'
mz_imageid = orcl_get_mz_imageid_bf_imageid(bf_imageid)
mozuimageurl = "{}{}".format(mozu_files_prefix,mz_imageid)
print 'bf_imageid={}\nmz_imageid={}'.format(bf_imageid, mz_imageid)
return (mozuimageurl, bf_imageid,)
if __name__ == '__main__':
import sys
import os.path as path
ext = '.jpg'
if path.isfile(sys.argv[1]):
src_filepath = sys.argv[1]
ext = src_filepath.split('.')[-1]
result = main_upload_post(src_filepath)
print "Result --> ", result, src_filepath
elif sys.argv[1][:9].isdigit() and len(sys.argv[1]) < 20:
bf_imageid = sys.argv[1]
try:
destpath = sys.argv[2]
if path.isfile(destpath):
orcl_get_mozuimageurl_bf_imageid(bf_imageid, destpath=destpath)
elif path.isdir(destpath):
orcl_get_mozuimageurl_bf_imageid(bf_imageid, destpath=path.join(destpath, bf_imageid))
except IndexError:
destpath = ''
|
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
from django.forms import (
CharField, DateField, FileField, Form, IntegerField, SplitDateTimeField,
ValidationError, formsets,
)
from django.forms.formsets import BaseFormSet, formset_factory
from django.forms.utils import ErrorList
from django.test import TestCase
from django.utils.encoding import force_text
class Choice(Form):
choice = CharField()
votes = IntegerField()
# FormSet allows us to use multiple instance of the same form on 1 page. For now,
# the best way to create a FormSet is by using the formset_factory function.
ChoiceFormSet = formset_factory(Choice)
class FavoriteDrinkForm(Form):
name = CharField()
class BaseFavoriteDrinksFormSet(BaseFormSet):
def clean(self):
seen_drinks = []
for drink in self.cleaned_data:
if drink['name'] in seen_drinks:
raise ValidationError('You may only specify a drink once.')
seen_drinks.append(drink['name'])
class EmptyFsetWontValidate(BaseFormSet):
def clean(self):
raise ValidationError("Clean method called")
# Let's define a FormSet that takes a list of favorite drinks, but raises an
# error if there are any duplicates. Used in ``test_clean_hook``,
# ``test_regression_6926`` & ``test_regression_12878``.
FavoriteDrinksFormSet = formset_factory(FavoriteDrinkForm,
formset=BaseFavoriteDrinksFormSet, extra=3)
# Used in ``test_formset_splitdatetimefield``.
class SplitDateTimeForm(Form):
when = SplitDateTimeField(initial=datetime.datetime.now)
SplitDateTimeFormSet = formset_factory(SplitDateTimeForm)
class FormsFormsetTestCase(TestCase):
def make_choiceformset(self, formset_data=None, formset_class=ChoiceFormSet,
total_forms=None, initial_forms=0, max_num_forms=0, min_num_forms=0, **kwargs):
"""
Make a ChoiceFormset from the given formset_data.
The data should be given as a list of (choice, votes) tuples.
"""
kwargs.setdefault('prefix', 'choices')
kwargs.setdefault('auto_id', False)
if formset_data is None:
return formset_class(**kwargs)
if total_forms is None:
total_forms = len(formset_data)
def prefixed(*args):
args = (kwargs['prefix'],) + args
return '-'.join(args)
data = {
prefixed('TOTAL_FORMS'): str(total_forms),
prefixed('INITIAL_FORMS'): str(initial_forms),
prefixed('MAX_NUM_FORMS'): str(max_num_forms),
prefixed('MIN_NUM_FORMS'): str(min_num_forms),
}
for i, (choice, votes) in enumerate(formset_data):
data[prefixed(str(i), 'choice')] = choice
data[prefixed(str(i), 'votes')] = votes
return formset_class(data, **kwargs)
def test_basic_formset(self):
# A FormSet constructor takes the same arguments as Form. Let's create a FormSet
# for adding data. By default, it displays 1 blank form. It can display more,
# but we'll look at how to do so later.
formset = self.make_choiceformset()
self.assertHTMLEqual(str(formset), """<input type="hidden" name="choices-TOTAL_FORMS" value="1" /><input type="hidden" name="choices-INITIAL_FORMS" value="0" /><input type="hidden" name="choices-MIN_NUM_FORMS" value="0" /><input type="hidden" name="choices-MAX_NUM_FORMS" value="1000" />
<tr><th>Choice:</th><td><input type="text" name="choices-0-choice" /></td></tr>
<tr><th>Votes:</th><td><input type="number" name="choices-0-votes" /></td></tr>""")
# We treat FormSet pretty much like we would treat a normal Form. FormSet has an
# is_valid method, and a cleaned_data or errors attribute depending on whether all
# the forms passed validation. However, unlike a Form instance, cleaned_data and
# errors will be a list of dicts rather than just a single dict.
formset = self.make_choiceformset([('Calexico', '100')])
self.assertTrue(formset.is_valid())
self.assertEqual([form.cleaned_data for form in formset.forms], [{'votes': 100, 'choice': 'Calexico'}])
# If a FormSet was not passed any data, its is_valid and has_changed
# methods should return False.
formset = self.make_choiceformset()
self.assertFalse(formset.is_valid())
self.assertFalse(formset.has_changed())
def test_formset_validation(self):
# FormSet instances can also have an error attribute if validation failed for
# any of the forms.
formset = self.make_choiceformset([('Calexico', '')])
self.assertFalse(formset.is_valid())
self.assertEqual(formset.errors, [{'votes': ['This field is required.']}])
def test_formset_has_changed(self):
# FormSet instances has_changed method will be True if any data is
# passed to his forms, even if the formset didn't validate
blank_formset = self.make_choiceformset([('', '')])
self.assertFalse(blank_formset.has_changed())
# invalid formset test
invalid_formset = self.make_choiceformset([('Calexico', '')])
self.assertFalse(invalid_formset.is_valid())
self.assertTrue(invalid_formset.has_changed())
# valid formset test
valid_formset = self.make_choiceformset([('Calexico', '100')])
self.assertTrue(valid_formset.is_valid())
self.assertTrue(valid_formset.has_changed())
def test_formset_initial_data(self):
# We can also prefill a FormSet with existing data by providing an ``initial``
# argument to the constructor. ``initial`` should be a list of dicts. By default,
# an extra blank form is included.
initial = [{'choice': 'Calexico', 'votes': 100}]
formset = self.make_choiceformset(initial=initial)
form_output = []
for form in formset.forms:
form_output.append(form.as_ul())
self.assertHTMLEqual('\n'.join(form_output), """<li>Choice: <input type="text" name="choices-0-choice" value="Calexico" /></li>
<li>Votes: <input type="number" name="choices-0-votes" value="100" /></li>
<li>Choice: <input type="text" name="choices-1-choice" /></li>
<li>Votes: <input type="number" name="choices-1-votes" /></li>""")
# Let's simulate what would happen if we submitted this form.
formset = self.make_choiceformset([('Calexico', '100'), ('', '')], initial_forms=1)
self.assertTrue(formset.is_valid())
self.assertEqual([form.cleaned_data for form in formset.forms], [{'votes': 100, 'choice': 'Calexico'}, {}])
def test_second_form_partially_filled(self):
# But the second form was blank! Shouldn't we get some errors? No. If we display
# a form as blank, it's ok for it to be submitted as blank. If we fill out even
# one of the fields of a blank form though, it will be validated. We may want to
# required that at least x number of forms are completed, but we'll show how to
# handle that later.
formset = self.make_choiceformset([('Calexico', '100'), ('The Decemberists', '')], initial_forms=1)
self.assertFalse(formset.is_valid())
self.assertEqual(formset.errors, [{}, {'votes': ['This field is required.']}])
def test_delete_prefilled_data(self):
# If we delete data that was pre-filled, we should get an error. Simply removing
# data from form fields isn't the proper way to delete it. We'll see how to
# handle that case later.
formset = self.make_choiceformset([('', ''), ('', '')], initial_forms=1)
self.assertFalse(formset.is_valid())
self.assertEqual(formset.errors, [{'votes': ['This field is required.'], 'choice': ['This field is required.']}, {}])
def test_displaying_more_than_one_blank_form(self):
# Displaying more than 1 blank form ###########################################
# We can also display more than 1 empty form at a time. To do so, pass a
# extra argument to formset_factory.
ChoiceFormSet = formset_factory(Choice, extra=3)
formset = ChoiceFormSet(auto_id=False, prefix='choices')
form_output = []
for form in formset.forms:
form_output.append(form.as_ul())
self.assertHTMLEqual('\n'.join(form_output), """<li>Choice: <input type="text" name="choices-0-choice" /></li>
<li>Votes: <input type="number" name="choices-0-votes" /></li>
<li>Choice: <input type="text" name="choices-1-choice" /></li>
<li>Votes: <input type="number" name="choices-1-votes" /></li>
<li>Choice: <input type="text" name="choices-2-choice" /></li>
<li>Votes: <input type="number" name="choices-2-votes" /></li>""")
# Since we displayed every form as blank, we will also accept them back as blank.
# This may seem a little strange, but later we will show how to require a minimum
# number of forms to be completed.
data = {
'choices-TOTAL_FORMS': '3', # the number of forms rendered
'choices-INITIAL_FORMS': '0', # the number of forms with initial data
'choices-MIN_NUM_FORMS': '0', # min number of forms
'choices-MAX_NUM_FORMS': '0', # max number of forms
'choices-0-choice': '',
'choices-0-votes': '',
'choices-1-choice': '',
'choices-1-votes': '',
'choices-2-choice': '',
'choices-2-votes': '',
}
formset = ChoiceFormSet(data, auto_id=False, prefix='choices')
self.assertTrue(formset.is_valid())
self.assertEqual([form.cleaned_data for form in formset.forms], [{}, {}, {}])
def test_min_num_displaying_more_than_one_blank_form(self):
# We can also display more than 1 empty form passing min_num argument
# to formset_factory. It will (essentially) increment the extra argument
ChoiceFormSet = formset_factory(Choice, extra=1, min_num=1)
formset = ChoiceFormSet(auto_id=False, prefix='choices')
form_output = []
for form in formset.forms:
form_output.append(form.as_ul())
# Min_num forms are required; extra forms can be empty.
self.assertFalse(formset.forms[0].empty_permitted)
self.assertTrue(formset.forms[1].empty_permitted)
self.assertHTMLEqual('\n'.join(form_output), """<li>Choice: <input type="text" name="choices-0-choice" /></li>
<li>Votes: <input type="number" name="choices-0-votes" /></li>
<li>Choice: <input type="text" name="choices-1-choice" /></li>
<li>Votes: <input type="number" name="choices-1-votes" /></li>""")
def test_min_num_displaying_more_than_one_blank_form_with_zero_extra(self):
# We can also display more than 1 empty form passing min_num argument
ChoiceFormSet = formset_factory(Choice, extra=0, min_num=3)
formset = ChoiceFormSet(auto_id=False, prefix='choices')
form_output = []
for form in formset.forms:
form_output.append(form.as_ul())
self.assertHTMLEqual('\n'.join(form_output), """<li>Choice: <input type="text" name="choices-0-choice" /></li>
<li>Votes: <input type="number" name="choices-0-votes" /></li>
<li>Choice: <input type="text" name="choices-1-choice" /></li>
<li>Votes: <input type="number" name="choices-1-votes" /></li>
<li>Choice: <input type="text" name="choices-2-choice" /></li>
<li>Votes: <input type="number" name="choices-2-votes" /></li>""")
def test_single_form_completed(self):
# We can just fill out one of the forms.
data = {
'choices-TOTAL_FORMS': '3', # the number of forms rendered
'choices-INITIAL_FORMS': '0', # the number of forms with initial data
'choices-MIN_NUM_FORMS': '0', # min number of forms
'choices-MAX_NUM_FORMS': '0', # max number of forms
'choices-0-choice': 'Calexico',
'choices-0-votes': '100',
'choices-1-choice': '',
'choices-1-votes': '',
'choices-2-choice': '',
'choices-2-votes': '',
}
ChoiceFormSet = formset_factory(Choice, extra=3)
formset = ChoiceFormSet(data, auto_id=False, prefix='choices')
self.assertTrue(formset.is_valid())
self.assertEqual([form.cleaned_data for form in formset.forms], [{'votes': 100, 'choice': 'Calexico'}, {}, {}])
def test_formset_validate_max_flag(self):
# If validate_max is set and max_num is less than TOTAL_FORMS in the
# data, then throw an exception. MAX_NUM_FORMS in the data is
# irrelevant here (it's output as a hint for the client but its
# value in the returned data is not checked)
data = {
'choices-TOTAL_FORMS': '2', # the number of forms rendered
'choices-INITIAL_FORMS': '0', # the number of forms with initial data
'choices-MIN_NUM_FORMS': '0', # min number of forms
'choices-MAX_NUM_FORMS': '2', # max number of forms - should be ignored
'choices-0-choice': 'Zero',
'choices-0-votes': '0',
'choices-1-choice': 'One',
'choices-1-votes': '1',
}
ChoiceFormSet = formset_factory(Choice, extra=1, max_num=1, validate_max=True)
formset = ChoiceFormSet(data, auto_id=False, prefix='choices')
self.assertFalse(formset.is_valid())
self.assertEqual(formset.non_form_errors(), ['Please submit 1 or fewer forms.'])
def test_formset_validate_min_flag(self):
# If validate_min is set and min_num is more than TOTAL_FORMS in the
# data, then throw an exception. MIN_NUM_FORMS in the data is
# irrelevant here (it's output as a hint for the client but its
# value in the returned data is not checked)
data = {
'choices-TOTAL_FORMS': '2', # the number of forms rendered
'choices-INITIAL_FORMS': '0', # the number of forms with initial data
'choices-MIN_NUM_FORMS': '0', # min number of forms
'choices-MAX_NUM_FORMS': '0', # max number of forms - should be ignored
'choices-0-choice': 'Zero',
'choices-0-votes': '0',
'choices-1-choice': 'One',
'choices-1-votes': '1',
}
ChoiceFormSet = formset_factory(Choice, extra=1, min_num=3, validate_min=True)
formset = ChoiceFormSet(data, auto_id=False, prefix='choices')
self.assertFalse(formset.is_valid())
self.assertEqual(formset.non_form_errors(), ['Please submit 3 or more forms.'])
def test_second_form_partially_filled_2(self):
# And once again, if we try to partially complete a form, validation will fail.
data = {
'choices-TOTAL_FORMS': '3', # the number of forms rendered
'choices-INITIAL_FORMS': '0', # the number of forms with initial data
'choices-MIN_NUM_FORMS': '0', # min number of forms
'choices-MAX_NUM_FORMS': '0', # max number of forms
'choices-0-choice': 'Calexico',
'choices-0-votes': '100',
'choices-1-choice': 'The Decemberists',
'choices-1-votes': '', # missing value
'choices-2-choice': '',
'choices-2-votes': '',
}
ChoiceFormSet = formset_factory(Choice, extra=3)
formset = ChoiceFormSet(data, auto_id=False, prefix='choices')
self.assertFalse(formset.is_valid())
self.assertEqual(formset.errors, [{}, {'votes': ['This field is required.']}, {}])
def test_more_initial_data(self):
# The extra argument also works when the formset is pre-filled with initial
# data.
initial = [{'choice': 'Calexico', 'votes': 100}]
ChoiceFormSet = formset_factory(Choice, extra=3)
formset = ChoiceFormSet(initial=initial, auto_id=False, prefix='choices')
form_output = []
for form in formset.forms:
form_output.append(form.as_ul())
self.assertHTMLEqual('\n'.join(form_output), """<li>Choice: <input type="text" name="choices-0-choice" value="Calexico" /></li>
<li>Votes: <input type="number" name="choices-0-votes" value="100" /></li>
<li>Choice: <input type="text" name="choices-1-choice" /></li>
<li>Votes: <input type="number" name="choices-1-votes" /></li>
<li>Choice: <input type="text" name="choices-2-choice" /></li>
<li>Votes: <input type="number" name="choices-2-votes" /></li>
<li>Choice: <input type="text" name="choices-3-choice" /></li>
<li>Votes: <input type="number" name="choices-3-votes" /></li>""")
# Make sure retrieving an empty form works, and it shows up in the form list
self.assertTrue(formset.empty_form.empty_permitted)
self.assertHTMLEqual(formset.empty_form.as_ul(), """<li>Choice: <input type="text" name="choices-__prefix__-choice" /></li>
<li>Votes: <input type="number" name="choices-__prefix__-votes" /></li>""")
def test_formset_with_deletion(self):
# FormSets with deletion ######################################################
# We can easily add deletion ability to a FormSet with an argument to
# formset_factory. This will add a boolean field to each form instance. When
# that boolean field is True, the form will be in formset.deleted_forms
ChoiceFormSet = formset_factory(Choice, can_delete=True)
initial = [{'choice': 'Calexico', 'votes': 100}, {'choice': 'Fergie', 'votes': 900}]
formset = ChoiceFormSet(initial=initial, auto_id=False, prefix='choices')
form_output = []
for form in formset.forms:
form_output.append(form.as_ul())
self.assertHTMLEqual('\n'.join(form_output), """<li>Choice: <input type="text" name="choices-0-choice" value="Calexico" /></li>
<li>Votes: <input type="number" name="choices-0-votes" value="100" /></li>
<li>Delete: <input type="checkbox" name="choices-0-DELETE" /></li>
<li>Choice: <input type="text" name="choices-1-choice" value="Fergie" /></li>
<li>Votes: <input type="number" name="choices-1-votes" value="900" /></li>
<li>Delete: <input type="checkbox" name="choices-1-DELETE" /></li>
<li>Choice: <input type="text" name="choices-2-choice" /></li>
<li>Votes: <input type="number" name="choices-2-votes" /></li>
<li>Delete: <input type="checkbox" name="choices-2-DELETE" /></li>""")
# To delete something, we just need to set that form's special delete field to
# 'on'. Let's go ahead and delete Fergie.
data = {
'choices-TOTAL_FORMS': '3', # the number of forms rendered
'choices-INITIAL_FORMS': '2', # the number of forms with initial data
'choices-MIN_NUM_FORMS': '0', # min number of forms
'choices-MAX_NUM_FORMS': '0', # max number of forms
'choices-0-choice': 'Calexico',
'choices-0-votes': '100',
'choices-0-DELETE': '',
'choices-1-choice': 'Fergie',
'choices-1-votes': '900',
'choices-1-DELETE': 'on',
'choices-2-choice': '',
'choices-2-votes': '',
'choices-2-DELETE': '',
}
formset = ChoiceFormSet(data, auto_id=False, prefix='choices')
self.assertTrue(formset.is_valid())
self.assertEqual([form.cleaned_data for form in formset.forms], [{'votes': 100, 'DELETE': False, 'choice': 'Calexico'}, {'votes': 900, 'DELETE': True, 'choice': 'Fergie'}, {}])
self.assertEqual([form.cleaned_data for form in formset.deleted_forms], [{'votes': 900, 'DELETE': True, 'choice': 'Fergie'}])
# If we fill a form with something and then we check the can_delete checkbox for
# that form, that form's errors should not make the entire formset invalid since
# it's going to be deleted.
class CheckForm(Form):
field = IntegerField(min_value=100)
data = {
'check-TOTAL_FORMS': '3', # the number of forms rendered
'check-INITIAL_FORMS': '2', # the number of forms with initial data
'choices-MIN_NUM_FORMS': '0', # min number of forms
'check-MAX_NUM_FORMS': '0', # max number of forms
'check-0-field': '200',
'check-0-DELETE': '',
'check-1-field': '50',
'check-1-DELETE': 'on',
'check-2-field': '',
'check-2-DELETE': '',
}
CheckFormSet = formset_factory(CheckForm, can_delete=True)
formset = CheckFormSet(data, prefix='check')
self.assertTrue(formset.is_valid())
# If we remove the deletion flag now we will have our validation back.
data['check-1-DELETE'] = ''
formset = CheckFormSet(data, prefix='check')
self.assertFalse(formset.is_valid())
# Should be able to get deleted_forms from a valid formset even if a
# deleted form would have been invalid.
class Person(Form):
name = CharField()
PeopleForm = formset_factory(
form=Person,
can_delete=True)
p = PeopleForm(
{'form-0-name': '', 'form-0-DELETE': 'on', # no name!
'form-TOTAL_FORMS': 1, 'form-INITIAL_FORMS': 1,
'form-MIN_NUM_FORMS': 0, 'form-MAX_NUM_FORMS': 1})
self.assertTrue(p.is_valid())
self.assertEqual(len(p.deleted_forms), 1)
def test_formsets_with_ordering(self):
# FormSets with ordering ######################################################
# We can also add ordering ability to a FormSet with an argument to
# formset_factory. This will add an integer field to each form instance. When
# form validation succeeds, [form.cleaned_data for form in formset.forms] will have the data in the correct
# order specified by the ordering fields. If a number is duplicated in the set
# of ordering fields, for instance form 0 and form 3 are both marked as 1, then
# the form index used as a secondary ordering criteria. In order to put
# something at the front of the list, you'd need to set it's order to 0.
ChoiceFormSet = formset_factory(Choice, can_order=True)
initial = [{'choice': 'Calexico', 'votes': 100}, {'choice': 'Fergie', 'votes': 900}]
formset = ChoiceFormSet(initial=initial, auto_id=False, prefix='choices')
form_output = []
for form in formset.forms:
form_output.append(form.as_ul())
self.assertHTMLEqual('\n'.join(form_output), """<li>Choice: <input type="text" name="choices-0-choice" value="Calexico" /></li>
<li>Votes: <input type="number" name="choices-0-votes" value="100" /></li>
<li>Order: <input type="number" name="choices-0-ORDER" value="1" /></li>
<li>Choice: <input type="text" name="choices-1-choice" value="Fergie" /></li>
<li>Votes: <input type="number" name="choices-1-votes" value="900" /></li>
<li>Order: <input type="number" name="choices-1-ORDER" value="2" /></li>
<li>Choice: <input type="text" name="choices-2-choice" /></li>
<li>Votes: <input type="number" name="choices-2-votes" /></li>
<li>Order: <input type="number" name="choices-2-ORDER" /></li>""")
data = {
'choices-TOTAL_FORMS': '3', # the number of forms rendered
'choices-INITIAL_FORMS': '2', # the number of forms with initial data
'choices-MIN_NUM_FORMS': '0', # min number of forms
'choices-MAX_NUM_FORMS': '0', # max number of forms
'choices-0-choice': 'Calexico',
'choices-0-votes': '100',
'choices-0-ORDER': '1',
'choices-1-choice': 'Fergie',
'choices-1-votes': '900',
'choices-1-ORDER': '2',
'choices-2-choice': 'The Decemberists',
'choices-2-votes': '500',
'choices-2-ORDER': '0',
}
formset = ChoiceFormSet(data, auto_id=False, prefix='choices')
self.assertTrue(formset.is_valid())
form_output = []
for form in formset.ordered_forms:
form_output.append(form.cleaned_data)
self.assertEqual(form_output, [
{'votes': 500, 'ORDER': 0, 'choice': 'The Decemberists'},
{'votes': 100, 'ORDER': 1, 'choice': 'Calexico'},
{'votes': 900, 'ORDER': 2, 'choice': 'Fergie'},
])
def test_empty_ordered_fields(self):
# Ordering fields are allowed to be left blank, and if they *are* left blank,
# they will be sorted below everything else.
data = {
'choices-TOTAL_FORMS': '4', # the number of forms rendered
'choices-INITIAL_FORMS': '3', # the number of forms with initial data
'choices-MIN_NUM_FORMS': '0', # min number of forms
'choices-MAX_NUM_FORMS': '0', # max number of forms
'choices-0-choice': 'Calexico',
'choices-0-votes': '100',
'choices-0-ORDER': '1',
'choices-1-choice': 'Fergie',
'choices-1-votes': '900',
'choices-1-ORDER': '2',
'choices-2-choice': 'The Decemberists',
'choices-2-votes': '500',
'choices-2-ORDER': '',
'choices-3-choice': 'Basia Bulat',
'choices-3-votes': '50',
'choices-3-ORDER': '',
}
ChoiceFormSet = formset_factory(Choice, can_order=True)
formset = ChoiceFormSet(data, auto_id=False, prefix='choices')
self.assertTrue(formset.is_valid())
form_output = []
for form in formset.ordered_forms:
form_output.append(form.cleaned_data)
self.assertEqual(form_output, [
{'votes': 100, 'ORDER': 1, 'choice': 'Calexico'},
{'votes': 900, 'ORDER': 2, 'choice': 'Fergie'},
{'votes': 500, 'ORDER': None, 'choice': 'The Decemberists'},
{'votes': 50, 'ORDER': None, 'choice': 'Basia Bulat'},
])
def test_ordering_blank_fieldsets(self):
# Ordering should work with blank fieldsets.
data = {
'choices-TOTAL_FORMS': '3', # the number of forms rendered
'choices-INITIAL_FORMS': '0', # the number of forms with initial data
'choices-MIN_NUM_FORMS': '0', # min number of forms
'choices-MAX_NUM_FORMS': '0', # max number of forms
}
ChoiceFormSet = formset_factory(Choice, can_order=True)
formset = ChoiceFormSet(data, auto_id=False, prefix='choices')
self.assertTrue(formset.is_valid())
form_output = []
for form in formset.ordered_forms:
form_output.append(form.cleaned_data)
self.assertEqual(form_output, [])
def test_formset_with_ordering_and_deletion(self):
# FormSets with ordering + deletion ###########################################
# Let's try throwing ordering and deletion into the same form.
ChoiceFormSet = formset_factory(Choice, can_order=True, can_delete=True)
initial = [
{'choice': 'Calexico', 'votes': 100},
{'choice': 'Fergie', 'votes': 900},
{'choice': 'The Decemberists', 'votes': 500},
]
formset = ChoiceFormSet(initial=initial, auto_id=False, prefix='choices')
form_output = []
for form in formset.forms:
form_output.append(form.as_ul())
self.assertHTMLEqual('\n'.join(form_output), """<li>Choice: <input type="text" name="choices-0-choice" value="Calexico" /></li>
<li>Votes: <input type="number" name="choices-0-votes" value="100" /></li>
<li>Order: <input type="number" name="choices-0-ORDER" value="1" /></li>
<li>Delete: <input type="checkbox" name="choices-0-DELETE" /></li>
<li>Choice: <input type="text" name="choices-1-choice" value="Fergie" /></li>
<li>Votes: <input type="number" name="choices-1-votes" value="900" /></li>
<li>Order: <input type="number" name="choices-1-ORDER" value="2" /></li>
<li>Delete: <input type="checkbox" name="choices-1-DELETE" /></li>
<li>Choice: <input type="text" name="choices-2-choice" value="The Decemberists" /></li>
<li>Votes: <input type="number" name="choices-2-votes" value="500" /></li>
<li>Order: <input type="number" name="choices-2-ORDER" value="3" /></li>
<li>Delete: <input type="checkbox" name="choices-2-DELETE" /></li>
<li>Choice: <input type="text" name="choices-3-choice" /></li>
<li>Votes: <input type="number" name="choices-3-votes" /></li>
<li>Order: <input type="number" name="choices-3-ORDER" /></li>
<li>Delete: <input type="checkbox" name="choices-3-DELETE" /></li>""")
# Let's delete Fergie, and put The Decemberists ahead of Calexico.
data = {
'choices-TOTAL_FORMS': '4', # the number of forms rendered
'choices-INITIAL_FORMS': '3', # the number of forms with initial data
'choices-MIN_NUM_FORMS': '0', # min number of forms
'choices-MAX_NUM_FORMS': '0', # max number of forms
'choices-0-choice': 'Calexico',
'choices-0-votes': '100',
'choices-0-ORDER': '1',
'choices-0-DELETE': '',
'choices-1-choice': 'Fergie',
'choices-1-votes': '900',
'choices-1-ORDER': '2',
'choices-1-DELETE': 'on',
'choices-2-choice': 'The Decemberists',
'choices-2-votes': '500',
'choices-2-ORDER': '0',
'choices-2-DELETE': '',
'choices-3-choice': '',
'choices-3-votes': '',
'choices-3-ORDER': '',
'choices-3-DELETE': '',
}
formset = ChoiceFormSet(data, auto_id=False, prefix='choices')
self.assertTrue(formset.is_valid())
form_output = []
for form in formset.ordered_forms:
form_output.append(form.cleaned_data)
self.assertEqual(form_output, [
{'votes': 500, 'DELETE': False, 'ORDER': 0, 'choice': 'The Decemberists'},
{'votes': 100, 'DELETE': False, 'ORDER': 1, 'choice': 'Calexico'},
])
self.assertEqual([form.cleaned_data for form in formset.deleted_forms], [{'votes': 900, 'DELETE': True, 'ORDER': 2, 'choice': 'Fergie'}])
def test_invalid_deleted_form_with_ordering(self):
# Should be able to get ordered forms from a valid formset even if a
# deleted form would have been invalid.
class Person(Form):
name = CharField()
PeopleForm = formset_factory(form=Person, can_delete=True, can_order=True)
p = PeopleForm({
'form-0-name': '',
'form-0-DELETE': 'on', # no name!
'form-TOTAL_FORMS': 1,
'form-INITIAL_FORMS': 1,
'form-MIN_NUM_FORMS': 0,
'form-MAX_NUM_FORMS': 1
})
self.assertTrue(p.is_valid())
self.assertEqual(p.ordered_forms, [])
def test_clean_hook(self):
# FormSet clean hook ##########################################################
# FormSets have a hook for doing extra validation that shouldn't be tied to any
# particular form. It follows the same pattern as the clean hook on Forms.
# We start out with a some duplicate data.
data = {
'drinks-TOTAL_FORMS': '2', # the number of forms rendered
'drinks-INITIAL_FORMS': '0', # the number of forms with initial data
'drinks-MIN_NUM_FORMS': '0', # min number of forms
'drinks-MAX_NUM_FORMS': '0', # max number of forms
'drinks-0-name': 'Gin and Tonic',
'drinks-1-name': 'Gin and Tonic',
}
formset = FavoriteDrinksFormSet(data, prefix='drinks')
self.assertFalse(formset.is_valid())
# Any errors raised by formset.clean() are available via the
# formset.non_form_errors() method.
for error in formset.non_form_errors():
self.assertEqual(str(error), 'You may only specify a drink once.')
# Make sure we didn't break the valid case.
data = {
'drinks-TOTAL_FORMS': '2', # the number of forms rendered
'drinks-INITIAL_FORMS': '0', # the number of forms with initial data
'drinks-MIN_NUM_FORMS': '0', # min number of forms
'drinks-MAX_NUM_FORMS': '0', # max number of forms
'drinks-0-name': 'Gin and Tonic',
'drinks-1-name': 'Bloody Mary',
}
formset = FavoriteDrinksFormSet(data, prefix='drinks')
self.assertTrue(formset.is_valid())
self.assertEqual(formset.non_form_errors(), [])
def test_limiting_max_forms(self):
# Limiting the maximum number of forms ########################################
# Base case for max_num.
# When not passed, max_num will take a high default value, leaving the
# number of forms only controlled by the value of the extra parameter.
LimitedFavoriteDrinkFormSet = formset_factory(FavoriteDrinkForm, extra=3)
formset = LimitedFavoriteDrinkFormSet()
form_output = []
for form in formset.forms:
form_output.append(str(form))
self.assertHTMLEqual('\n'.join(form_output), """<tr><th><label for="id_form-0-name">Name:</label></th><td><input type="text" name="form-0-name" id="id_form-0-name" /></td></tr>
<tr><th><label for="id_form-1-name">Name:</label></th><td><input type="text" name="form-1-name" id="id_form-1-name" /></td></tr>
<tr><th><label for="id_form-2-name">Name:</label></th><td><input type="text" name="form-2-name" id="id_form-2-name" /></td></tr>""")
# If max_num is 0 then no form is rendered at all.
LimitedFavoriteDrinkFormSet = formset_factory(FavoriteDrinkForm, extra=3, max_num=0)
formset = LimitedFavoriteDrinkFormSet()
form_output = []
for form in formset.forms:
form_output.append(str(form))
self.assertEqual('\n'.join(form_output), "")
LimitedFavoriteDrinkFormSet = formset_factory(FavoriteDrinkForm, extra=5, max_num=2)
formset = LimitedFavoriteDrinkFormSet()
form_output = []
for form in formset.forms:
form_output.append(str(form))
self.assertHTMLEqual('\n'.join(form_output), """<tr><th><label for="id_form-0-name">Name:</label></th><td><input type="text" name="form-0-name" id="id_form-0-name" /></td></tr>
<tr><th><label for="id_form-1-name">Name:</label></th><td><input type="text" name="form-1-name" id="id_form-1-name" /></td></tr>""")
# Ensure that max_num has no effect when extra is less than max_num.
LimitedFavoriteDrinkFormSet = formset_factory(FavoriteDrinkForm, extra=1, max_num=2)
formset = LimitedFavoriteDrinkFormSet()
form_output = []
for form in formset.forms:
form_output.append(str(form))
self.assertHTMLEqual('\n'.join(form_output), """<tr><th><label for="id_form-0-name">Name:</label></th><td><input type="text" name="form-0-name" id="id_form-0-name" /></td></tr>""")
def test_max_num_with_initial_data(self):
# max_num with initial data
# When not passed, max_num will take a high default value, leaving the
# number of forms only controlled by the value of the initial and extra
# parameters.
initial = [
{'name': 'Fernet and Coke'},
]
LimitedFavoriteDrinkFormSet = formset_factory(FavoriteDrinkForm, extra=1)
formset = LimitedFavoriteDrinkFormSet(initial=initial)
form_output = []
for form in formset.forms:
form_output.append(str(form))
self.assertHTMLEqual('\n'.join(form_output), """<tr><th><label for="id_form-0-name">Name:</label></th><td><input type="text" name="form-0-name" value="Fernet and Coke" id="id_form-0-name" /></td></tr>
<tr><th><label for="id_form-1-name">Name:</label></th><td><input type="text" name="form-1-name" id="id_form-1-name" /></td></tr>""")
def test_max_num_zero(self):
# If max_num is 0 then no form is rendered at all, regardless of extra,
# unless initial data is present. (This changed in the patch for bug
# 20084 -- previously max_num=0 trumped initial data)
LimitedFavoriteDrinkFormSet = formset_factory(FavoriteDrinkForm, extra=1, max_num=0)
formset = LimitedFavoriteDrinkFormSet()
form_output = []
for form in formset.forms:
form_output.append(str(form))
self.assertEqual('\n'.join(form_output), "")
# test that initial trumps max_num
initial = [
{'name': 'Fernet and Coke'},
{'name': 'Bloody Mary'},
]
LimitedFavoriteDrinkFormSet = formset_factory(FavoriteDrinkForm, extra=1, max_num=0)
formset = LimitedFavoriteDrinkFormSet(initial=initial)
form_output = []
for form in formset.forms:
form_output.append(str(form))
self.assertEqual('\n'.join(form_output), """<tr><th><label for="id_form-0-name">Name:</label></th><td><input id="id_form-0-name" name="form-0-name" type="text" value="Fernet and Coke" /></td></tr>
<tr><th><label for="id_form-1-name">Name:</label></th><td><input id="id_form-1-name" name="form-1-name" type="text" value="Bloody Mary" /></td></tr>""")
def test_more_initial_than_max_num(self):
# More initial forms than max_num now results in all initial forms
# being displayed (but no extra forms). This behavior was changed
# from max_num taking precedence in the patch for #20084
initial = [
{'name': 'Gin Tonic'},
{'name': 'Bloody Mary'},
{'name': 'Jack and Coke'},
]
LimitedFavoriteDrinkFormSet = formset_factory(FavoriteDrinkForm, extra=1, max_num=2)
formset = LimitedFavoriteDrinkFormSet(initial=initial)
form_output = []
for form in formset.forms:
form_output.append(str(form))
self.assertHTMLEqual('\n'.join(form_output), """<tr><th><label for="id_form-0-name">Name:</label></th><td><input id="id_form-0-name" name="form-0-name" type="text" value="Gin Tonic" /></td></tr>
<tr><th><label for="id_form-1-name">Name:</label></th><td><input id="id_form-1-name" name="form-1-name" type="text" value="Bloody Mary" /></td></tr>
<tr><th><label for="id_form-2-name">Name:</label></th><td><input id="id_form-2-name" name="form-2-name" type="text" value="Jack and Coke" /></td></tr>""")
# One form from initial and extra=3 with max_num=2 should result in the one
# initial form and one extra.
initial = [
{'name': 'Gin Tonic'},
]
LimitedFavoriteDrinkFormSet = formset_factory(FavoriteDrinkForm, extra=3, max_num=2)
formset = LimitedFavoriteDrinkFormSet(initial=initial)
form_output = []
for form in formset.forms:
form_output.append(str(form))
self.assertHTMLEqual('\n'.join(form_output), """<tr><th><label for="id_form-0-name">Name:</label></th><td><input type="text" name="form-0-name" value="Gin Tonic" id="id_form-0-name" /></td></tr>
<tr><th><label for="id_form-1-name">Name:</label></th><td><input type="text" name="form-1-name" id="id_form-1-name" /></td></tr>""")
def test_regression_6926(self):
# Regression test for #6926 ##################################################
# Make sure the management form has the correct prefix.
formset = FavoriteDrinksFormSet()
self.assertEqual(formset.management_form.prefix, 'form')
data = {
'form-TOTAL_FORMS': '2',
'form-INITIAL_FORMS': '0',
'form-MIN_NUM_FORMS': '0',
'form-MAX_NUM_FORMS': '0',
}
formset = FavoriteDrinksFormSet(data=data)
self.assertEqual(formset.management_form.prefix, 'form')
formset = FavoriteDrinksFormSet(initial={})
self.assertEqual(formset.management_form.prefix, 'form')
def test_regression_12878(self):
# Regression test for #12878 #################################################
data = {
'drinks-TOTAL_FORMS': '2', # the number of forms rendered
'drinks-INITIAL_FORMS': '0', # the number of forms with initial data
'drinks-MIN_NUM_FORMS': '0', # min number of forms
'drinks-MAX_NUM_FORMS': '0', # max number of forms
'drinks-0-name': 'Gin and Tonic',
'drinks-1-name': 'Gin and Tonic',
}
formset = FavoriteDrinksFormSet(data, prefix='drinks')
self.assertFalse(formset.is_valid())
self.assertEqual(formset.non_form_errors(), ['You may only specify a drink once.'])
def test_formset_iteration(self):
# Regression tests for #16455 -- formset instances are iterable
ChoiceFormset = formset_factory(Choice, extra=3)
formset = ChoiceFormset()
# confirm iterated formset yields formset.forms
forms = list(formset)
self.assertEqual(forms, formset.forms)
self.assertEqual(len(formset), len(forms))
# confirm indexing of formset
self.assertEqual(formset[0], forms[0])
try:
formset[3]
self.fail('Requesting an invalid formset index should raise an exception')
except IndexError:
pass
# Formsets can override the default iteration order
class BaseReverseFormSet(BaseFormSet):
def __iter__(self):
return reversed(self.forms)
def __getitem__(self, idx):
return super(BaseReverseFormSet, self).__getitem__(len(self) - idx - 1)
ReverseChoiceFormset = formset_factory(Choice, BaseReverseFormSet, extra=3)
reverse_formset = ReverseChoiceFormset()
# confirm that __iter__ modifies rendering order
# compare forms from "reverse" formset with forms from original formset
self.assertEqual(str(reverse_formset[0]), str(forms[-1]))
self.assertEqual(str(reverse_formset[1]), str(forms[-2]))
self.assertEqual(len(reverse_formset), len(forms))
def test_formset_nonzero(self):
"""
Formsets with no forms should still evaluate as true.
Regression test for #15722
"""
ChoiceFormset = formset_factory(Choice, extra=0)
formset = ChoiceFormset()
self.assertEqual(len(formset.forms), 0)
self.assertTrue(formset)
def test_formset_splitdatetimefield(self):
"""
Formset should also work with SplitDateTimeField(initial=datetime.datetime.now).
Regression test for #18709.
"""
data = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '0',
'form-0-when_0': '1904-06-16',
'form-0-when_1': '15:51:33',
}
formset = SplitDateTimeFormSet(data)
self.assertTrue(formset.is_valid())
def test_formset_error_class(self):
# Regression tests for #16479 -- formsets form use ErrorList instead of supplied error_class
class CustomErrorList(ErrorList):
pass
formset = FavoriteDrinksFormSet(error_class=CustomErrorList)
self.assertEqual(formset.forms[0].error_class, CustomErrorList)
def test_formset_calls_forms_is_valid(self):
# Regression tests for #18574 -- make sure formsets call
# is_valid() on each form.
class AnotherChoice(Choice):
def is_valid(self):
self.is_valid_called = True
return super(AnotherChoice, self).is_valid()
AnotherChoiceFormSet = formset_factory(AnotherChoice)
data = {
'choices-TOTAL_FORMS': '1', # number of forms rendered
'choices-INITIAL_FORMS': '0', # number of forms with initial data
'choices-MIN_NUM_FORMS': '0', # min number of forms
'choices-MAX_NUM_FORMS': '0', # max number of forms
'choices-0-choice': 'Calexico',
'choices-0-votes': '100',
}
formset = AnotherChoiceFormSet(data, auto_id=False, prefix='choices')
self.assertTrue(formset.is_valid())
self.assertTrue(all(form.is_valid_called for form in formset.forms))
def test_hard_limit_on_instantiated_forms(self):
"""A formset has a hard limit on the number of forms instantiated."""
# reduce the default limit of 1000 temporarily for testing
_old_DEFAULT_MAX_NUM = formsets.DEFAULT_MAX_NUM
try:
formsets.DEFAULT_MAX_NUM = 2
ChoiceFormSet = formset_factory(Choice, max_num=1)
# someone fiddles with the mgmt form data...
formset = ChoiceFormSet(
{
'choices-TOTAL_FORMS': '4',
'choices-INITIAL_FORMS': '0',
'choices-MIN_NUM_FORMS': '0', # min number of forms
'choices-MAX_NUM_FORMS': '4',
'choices-0-choice': 'Zero',
'choices-0-votes': '0',
'choices-1-choice': 'One',
'choices-1-votes': '1',
'choices-2-choice': 'Two',
'choices-2-votes': '2',
'choices-3-choice': 'Three',
'choices-3-votes': '3',
},
prefix='choices',
)
# But we still only instantiate 3 forms
self.assertEqual(len(formset.forms), 3)
# and the formset isn't valid
self.assertFalse(formset.is_valid())
finally:
formsets.DEFAULT_MAX_NUM = _old_DEFAULT_MAX_NUM
def test_increase_hard_limit(self):
"""Can increase the built-in forms limit via a higher max_num."""
# reduce the default limit of 1000 temporarily for testing
_old_DEFAULT_MAX_NUM = formsets.DEFAULT_MAX_NUM
try:
formsets.DEFAULT_MAX_NUM = 3
# for this form, we want a limit of 4
ChoiceFormSet = formset_factory(Choice, max_num=4)
formset = ChoiceFormSet(
{
'choices-TOTAL_FORMS': '4',
'choices-INITIAL_FORMS': '0',
'choices-MIN_NUM_FORMS': '0', # min number of forms
'choices-MAX_NUM_FORMS': '4',
'choices-0-choice': 'Zero',
'choices-0-votes': '0',
'choices-1-choice': 'One',
'choices-1-votes': '1',
'choices-2-choice': 'Two',
'choices-2-votes': '2',
'choices-3-choice': 'Three',
'choices-3-votes': '3',
},
prefix='choices',
)
# Four forms are instantiated and no exception is raised
self.assertEqual(len(formset.forms), 4)
finally:
formsets.DEFAULT_MAX_NUM = _old_DEFAULT_MAX_NUM
def test_non_form_errors_run_full_clean(self):
# Regression test for #11160
# If non_form_errors() is called without calling is_valid() first,
# it should ensure that full_clean() is called.
class BaseCustomFormSet(BaseFormSet):
def clean(self):
raise ValidationError("This is a non-form error")
ChoiceFormSet = formset_factory(Choice, formset=BaseCustomFormSet)
formset = ChoiceFormSet(data, auto_id=False, prefix='choices')
self.assertIsInstance(formset.non_form_errors(), ErrorList)
self.assertEqual(list(formset.non_form_errors()),
['This is a non-form error'])
def test_validate_max_ignores_forms_marked_for_deletion(self):
class CheckForm(Form):
field = IntegerField()
data = {
'check-TOTAL_FORMS': '2',
'check-INITIAL_FORMS': '0',
'check-MAX_NUM_FORMS': '1',
'check-0-field': '200',
'check-0-DELETE': '',
'check-1-field': '50',
'check-1-DELETE': 'on',
}
CheckFormSet = formset_factory(CheckForm, max_num=1, validate_max=True,
can_delete=True)
formset = CheckFormSet(data, prefix='check')
self.assertTrue(formset.is_valid())
def test_formset_total_error_count(self):
"""A valid formset should have 0 total errors."""
data = [ # formset_data, expected error count
([('Calexico', '100')], 0),
([('Calexico', '')], 1),
([('', 'invalid')], 2),
([('Calexico', '100'), ('Calexico', '')], 1),
([('Calexico', ''), ('Calexico', '')], 2),
]
for formset_data, expected_error_count in data:
formset = self.make_choiceformset(formset_data)
self.assertEqual(formset.total_error_count(), expected_error_count)
def test_formset_total_error_count_with_non_form_errors(self):
data = {
'choices-TOTAL_FORMS': '2', # the number of forms rendered
'choices-INITIAL_FORMS': '0', # the number of forms with initial data
'choices-MAX_NUM_FORMS': '2', # max number of forms - should be ignored
'choices-0-choice': 'Zero',
'choices-0-votes': '0',
'choices-1-choice': 'One',
'choices-1-votes': '1',
}
ChoiceFormSet = formset_factory(Choice, extra=1, max_num=1, validate_max=True)
formset = ChoiceFormSet(data, auto_id=False, prefix='choices')
self.assertEqual(formset.total_error_count(), 1)
data['choices-1-votes'] = ''
formset = ChoiceFormSet(data, auto_id=False, prefix='choices')
self.assertEqual(formset.total_error_count(), 2)
def test_html_safe(self):
formset = self.make_choiceformset()
self.assertTrue(hasattr(formset, '__html__'))
self.assertEqual(force_text(formset), formset.__html__())
data = {
'choices-TOTAL_FORMS': '1', # the number of forms rendered
'choices-INITIAL_FORMS': '0', # the number of forms with initial data
'choices-MIN_NUM_FORMS': '0', # min number of forms
'choices-MAX_NUM_FORMS': '0', # max number of forms
'choices-0-choice': 'Calexico',
'choices-0-votes': '100',
}
class Choice(Form):
choice = CharField()
votes = IntegerField()
ChoiceFormSet = formset_factory(Choice)
class FormsetAsFooTests(TestCase):
def test_as_table(self):
formset = ChoiceFormSet(data, auto_id=False, prefix='choices')
self.assertHTMLEqual(formset.as_table(), """<input type="hidden" name="choices-TOTAL_FORMS" value="1" /><input type="hidden" name="choices-INITIAL_FORMS" value="0" /><input type="hidden" name="choices-MIN_NUM_FORMS" value="0" /><input type="hidden" name="choices-MAX_NUM_FORMS" value="0" />
<tr><th>Choice:</th><td><input type="text" name="choices-0-choice" value="Calexico" /></td></tr>
<tr><th>Votes:</th><td><input type="number" name="choices-0-votes" value="100" /></td></tr>""")
def test_as_p(self):
formset = ChoiceFormSet(data, auto_id=False, prefix='choices')
self.assertHTMLEqual(formset.as_p(), """<input type="hidden" name="choices-TOTAL_FORMS" value="1" /><input type="hidden" name="choices-INITIAL_FORMS" value="0" /><input type="hidden" name="choices-MIN_NUM_FORMS" value="0" /><input type="hidden" name="choices-MAX_NUM_FORMS" value="0" />
<p>Choice: <input type="text" name="choices-0-choice" value="Calexico" /></p>
<p>Votes: <input type="number" name="choices-0-votes" value="100" /></p>""")
def test_as_ul(self):
formset = ChoiceFormSet(data, auto_id=False, prefix='choices')
self.assertHTMLEqual(formset.as_ul(), """<input type="hidden" name="choices-TOTAL_FORMS" value="1" /><input type="hidden" name="choices-INITIAL_FORMS" value="0" /><input type="hidden" name="choices-MIN_NUM_FORMS" value="0" /><input type="hidden" name="choices-MAX_NUM_FORMS" value="0" />
<li>Choice: <input type="text" name="choices-0-choice" value="Calexico" /></li>
<li>Votes: <input type="number" name="choices-0-votes" value="100" /></li>""")
# Regression test for #11418 #################################################
class ArticleForm(Form):
title = CharField()
pub_date = DateField()
ArticleFormSet = formset_factory(ArticleForm)
class TestIsBoundBehavior(TestCase):
def test_no_data_raises_validation_error(self):
with self.assertRaises(ValidationError):
ArticleFormSet({}).is_valid()
def test_with_management_data_attrs_work_fine(self):
data = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '0',
}
formset = ArticleFormSet(data)
self.assertEqual(0, formset.initial_form_count())
self.assertEqual(1, formset.total_form_count())
self.assertTrue(formset.is_bound)
self.assertTrue(formset.forms[0].is_bound)
self.assertTrue(formset.is_valid())
self.assertTrue(formset.forms[0].is_valid())
self.assertEqual([{}], formset.cleaned_data)
def test_form_errors_are_caught_by_formset(self):
data = {
'form-TOTAL_FORMS': '2',
'form-INITIAL_FORMS': '0',
'form-0-title': 'Test',
'form-0-pub_date': '1904-06-16',
'form-1-title': 'Test',
'form-1-pub_date': '', # <-- this date is missing but required
}
formset = ArticleFormSet(data)
self.assertFalse(formset.is_valid())
self.assertEqual([{}, {'pub_date': ['This field is required.']}], formset.errors)
def test_empty_forms_are_unbound(self):
data = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '0',
'form-0-title': 'Test',
'form-0-pub_date': '1904-06-16',
}
unbound_formset = ArticleFormSet()
bound_formset = ArticleFormSet(data)
empty_forms = []
empty_forms.append(unbound_formset.empty_form)
empty_forms.append(bound_formset.empty_form)
# Empty forms should be unbound
self.assertFalse(empty_forms[0].is_bound)
self.assertFalse(empty_forms[1].is_bound)
# The empty forms should be equal.
self.assertHTMLEqual(empty_forms[0].as_p(), empty_forms[1].as_p())
class TestEmptyFormSet(TestCase):
def test_empty_formset_is_valid(self):
"""Test that an empty formset still calls clean()"""
EmptyFsetWontValidateFormset = formset_factory(FavoriteDrinkForm, extra=0, formset=EmptyFsetWontValidate)
formset = EmptyFsetWontValidateFormset(data={'form-INITIAL_FORMS': '0', 'form-TOTAL_FORMS': '0'}, prefix="form")
formset2 = EmptyFsetWontValidateFormset(data={'form-INITIAL_FORMS': '0', 'form-TOTAL_FORMS': '1', 'form-0-name': 'bah'}, prefix="form")
self.assertFalse(formset.is_valid())
self.assertFalse(formset2.is_valid())
def test_empty_formset_media(self):
"""Make sure media is available on empty formset, refs #19545"""
class MediaForm(Form):
class Media:
js = ('some-file.js',)
self.assertIn('some-file.js', str(formset_factory(MediaForm, extra=0)().media))
def test_empty_formset_is_multipart(self):
"""Make sure `is_multipart()` works with empty formset, refs #19545"""
class FileForm(Form):
file = FileField()
self.assertTrue(formset_factory(FileForm, extra=0)().is_multipart())
|
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###################################################################
# WARNING!
#
# Do not edit this file directly. This file should be generated by
# running the command "tox -e genopts" any time a config option
# has been added, changed, or removed.
###################################################################
import itertools
from cinder import objects
objects.register_all()
from cinder.api import common as cinder_api_common
from cinder.api.contrib import types_extra_specs as \
cinder_api_contrib_typesextraspecs
from cinder.api.middleware import auth as cinder_api_middleware_auth
from cinder.api.views import versions as cinder_api_views_versions
from cinder.backup import api as cinder_backup_api
from cinder.backup import chunkeddriver as cinder_backup_chunkeddriver
from cinder.backup import driver as cinder_backup_driver
from cinder.backup.drivers import ceph as cinder_backup_drivers_ceph
from cinder.backup.drivers import gcs as cinder_backup_drivers_gcs
from cinder.backup.drivers import glusterfs as cinder_backup_drivers_glusterfs
from cinder.backup.drivers import nfs as cinder_backup_drivers_nfs
from cinder.backup.drivers import posix as cinder_backup_drivers_posix
from cinder.backup.drivers import swift as cinder_backup_drivers_swift
from cinder.backup.drivers import tsm as cinder_backup_drivers_tsm
from cinder.backup import manager as cinder_backup_manager
from cinder.cmd import backup as cinder_cmd_backup
from cinder.cmd import volume as cinder_cmd_volume
from cinder.common import config as cinder_common_config
import cinder.compute
from cinder.compute import nova as cinder_compute_nova
from cinder import context as cinder_context
from cinder import coordination as cinder_coordination
from cinder.db import api as cinder_db_api
from cinder.db import base as cinder_db_base
from cinder.image import glance as cinder_image_glance
from cinder.image import image_utils as cinder_image_imageutils
from cinder.keymgr import conf_key_mgr as cinder_keymgr_confkeymgr
from cinder.message import api as cinder_message_api
from cinder import quota as cinder_quota
from cinder.scheduler import driver as cinder_scheduler_driver
from cinder.scheduler import host_manager as cinder_scheduler_hostmanager
from cinder.scheduler import manager as cinder_scheduler_manager
from cinder.scheduler import scheduler_options as \
cinder_scheduler_scheduleroptions
from cinder.scheduler.weights import capacity as \
cinder_scheduler_weights_capacity
from cinder.scheduler.weights import volume_number as \
cinder_scheduler_weights_volumenumber
from cinder import service as cinder_service
from cinder import service_auth as cinder_serviceauth
from cinder import ssh_utils as cinder_sshutils
from cinder.transfer import api as cinder_transfer_api
from cinder.volume import api as cinder_volume_api
from cinder.volume import driver as cinder_volume_driver
from cinder.volume.drivers.coprhd import common as \
cinder_volume_drivers_coprhd_common
from cinder.volume.drivers.coprhd import scaleio as \
cinder_volume_drivers_coprhd_scaleio
from cinder.volume.drivers.datacore import driver as \
cinder_volume_drivers_datacore_driver
from cinder.volume.drivers.datacore import iscsi as \
cinder_volume_drivers_datacore_iscsi
from cinder.volume.drivers.datera import datera_iscsi as \
cinder_volume_drivers_datera_dateraiscsi
from cinder.volume.drivers.dell_emc import ps as \
cinder_volume_drivers_dell_emc_ps
from cinder.volume.drivers.dell_emc.sc import storagecenter_common as \
cinder_volume_drivers_dell_emc_sc_storagecentercommon
from cinder.volume.drivers.dell_emc.scaleio import driver as \
cinder_volume_drivers_dell_emc_scaleio_driver
from cinder.volume.drivers.dell_emc.unity import driver as \
cinder_volume_drivers_dell_emc_unity_driver
from cinder.volume.drivers.dell_emc.vmax import common as \
cinder_volume_drivers_dell_emc_vmax_common
from cinder.volume.drivers.dell_emc.vnx import common as \
cinder_volume_drivers_dell_emc_vnx_common
from cinder.volume.drivers.dell_emc import xtremio as \
cinder_volume_drivers_dell_emc_xtremio
from cinder.volume.drivers.disco import disco as \
cinder_volume_drivers_disco_disco
from cinder.volume.drivers import drbdmanagedrv as \
cinder_volume_drivers_drbdmanagedrv
from cinder.volume.drivers.fujitsu import eternus_dx_common as \
cinder_volume_drivers_fujitsu_eternusdxcommon
from cinder.volume.drivers.fusionstorage import dsware as \
cinder_volume_drivers_fusionstorage_dsware
from cinder.volume.drivers import hgst as cinder_volume_drivers_hgst
from cinder.volume.drivers.hpe import hpe_3par_common as \
cinder_volume_drivers_hpe_hpe3parcommon
from cinder.volume.drivers.hpe import hpe_lefthand_iscsi as \
cinder_volume_drivers_hpe_hpelefthandiscsi
from cinder.volume.drivers.huawei import huawei_driver as \
cinder_volume_drivers_huawei_huaweidriver
from cinder.volume.drivers.ibm import flashsystem_common as \
cinder_volume_drivers_ibm_flashsystemcommon
from cinder.volume.drivers.ibm import flashsystem_fc as \
cinder_volume_drivers_ibm_flashsystemfc
from cinder.volume.drivers.ibm import flashsystem_iscsi as \
cinder_volume_drivers_ibm_flashsystemiscsi
from cinder.volume.drivers.ibm import gpfs as cinder_volume_drivers_ibm_gpfs
from cinder.volume.drivers.ibm.ibm_storage import ds8k_proxy as \
cinder_volume_drivers_ibm_ibm_storage_ds8kproxy
from cinder.volume.drivers.ibm.ibm_storage import ibm_storage as \
cinder_volume_drivers_ibm_ibm_storage_ibmstorage
from cinder.volume.drivers.ibm.storwize_svc import storwize_svc_common as \
cinder_volume_drivers_ibm_storwize_svc_storwizesvccommon
from cinder.volume.drivers.ibm.storwize_svc import storwize_svc_fc as \
cinder_volume_drivers_ibm_storwize_svc_storwizesvcfc
from cinder.volume.drivers.ibm.storwize_svc import storwize_svc_iscsi as \
cinder_volume_drivers_ibm_storwize_svc_storwizesvciscsi
from cinder.volume.drivers import infinidat as cinder_volume_drivers_infinidat
from cinder.volume.drivers.inspur.instorage import instorage_common as \
cinder_volume_drivers_inspur_instorage_instoragecommon
from cinder.volume.drivers.inspur.instorage import instorage_iscsi as \
cinder_volume_drivers_inspur_instorage_instorageiscsi
from cinder.volume.drivers.kaminario import kaminario_common as \
cinder_volume_drivers_kaminario_kaminariocommon
from cinder.volume.drivers.lenovo import lenovo_common as \
cinder_volume_drivers_lenovo_lenovocommon
from cinder.volume.drivers import lvm as cinder_volume_drivers_lvm
from cinder.volume.drivers.netapp import options as \
cinder_volume_drivers_netapp_options
from cinder.volume.drivers.nexenta import options as \
cinder_volume_drivers_nexenta_options
from cinder.volume.drivers import nfs as cinder_volume_drivers_nfs
from cinder.volume.drivers import nimble as cinder_volume_drivers_nimble
from cinder.volume.drivers.prophetstor import options as \
cinder_volume_drivers_prophetstor_options
from cinder.volume.drivers import pure as cinder_volume_drivers_pure
from cinder.volume.drivers import qnap as cinder_volume_drivers_qnap
from cinder.volume.drivers import quobyte as cinder_volume_drivers_quobyte
from cinder.volume.drivers import rbd as cinder_volume_drivers_rbd
from cinder.volume.drivers import remotefs as cinder_volume_drivers_remotefs
from cinder.volume.drivers.san.hp import hpmsa_common as \
cinder_volume_drivers_san_hp_hpmsacommon
from cinder.volume.drivers.san import san as cinder_volume_drivers_san_san
from cinder.volume.drivers import sheepdog as cinder_volume_drivers_sheepdog
from cinder.volume.drivers import solidfire as cinder_volume_drivers_solidfire
from cinder.volume.drivers import storpool as cinder_volume_drivers_storpool
from cinder.volume.drivers.synology import synology_common as \
cinder_volume_drivers_synology_synologycommon
from cinder.volume.drivers import tintri as cinder_volume_drivers_tintri
from cinder.volume.drivers.veritas_access import veritas_iscsi as \
cinder_volume_drivers_veritas_access_veritasiscsi
from cinder.volume.drivers.vmware import vmdk as \
cinder_volume_drivers_vmware_vmdk
from cinder.volume.drivers import vzstorage as cinder_volume_drivers_vzstorage
from cinder.volume.drivers.windows import iscsi as \
cinder_volume_drivers_windows_iscsi
from cinder.volume.drivers.windows import smbfs as \
cinder_volume_drivers_windows_smbfs
from cinder.volume.drivers import zadara as cinder_volume_drivers_zadara
from cinder.volume.drivers.zfssa import zfssaiscsi as \
cinder_volume_drivers_zfssa_zfssaiscsi
from cinder.volume.drivers.zfssa import zfssanfs as \
cinder_volume_drivers_zfssa_zfssanfs
from cinder.volume import manager as cinder_volume_manager
from cinder.wsgi import eventlet_server as cinder_wsgi_eventletserver
from cinder.zonemanager.drivers.brocade import brcd_fabric_opts as \
cinder_zonemanager_drivers_brocade_brcdfabricopts
from cinder.zonemanager.drivers.brocade import brcd_fc_zone_driver as \
cinder_zonemanager_drivers_brocade_brcdfczonedriver
from cinder.zonemanager.drivers.cisco import cisco_fabric_opts as \
cinder_zonemanager_drivers_cisco_ciscofabricopts
from cinder.zonemanager.drivers.cisco import cisco_fc_zone_driver as \
cinder_zonemanager_drivers_cisco_ciscofczonedriver
from cinder.zonemanager import fc_zone_manager as \
cinder_zonemanager_fczonemanager
def list_opts():
return [
('backend',
itertools.chain(
[cinder_cmd_volume.host_opt],
)),
('brcd_fabric_example',
itertools.chain(
cinder_zonemanager_drivers_brocade_brcdfabricopts.
brcd_zone_opts,
)),
('cisco_fabric_example',
itertools.chain(
cinder_zonemanager_drivers_cisco_ciscofabricopts.
cisco_zone_opts,
)),
('coordination',
itertools.chain(
cinder_coordination.coordination_opts,
)),
('DEFAULT',
itertools.chain(
cinder_api_common.api_common_opts,
cinder_api_contrib_typesextraspecs.extraspec_opts,
[cinder_api_middleware_auth.use_forwarded_for_opt],
cinder_api_views_versions.versions_opts,
cinder_backup_api.backup_api_opts,
cinder_backup_chunkeddriver.chunkedbackup_service_opts,
cinder_backup_driver.service_opts,
cinder_backup_drivers_ceph.service_opts,
cinder_backup_drivers_gcs.gcsbackup_service_opts,
cinder_backup_drivers_glusterfs.glusterfsbackup_service_opts,
cinder_backup_drivers_nfs.nfsbackup_service_opts,
cinder_backup_drivers_posix.posixbackup_service_opts,
cinder_backup_drivers_swift.swiftbackup_service_opts,
cinder_backup_drivers_tsm.tsm_opts,
cinder_backup_manager.backup_manager_opts,
[cinder_cmd_backup.backup_workers_opt],
[cinder_cmd_volume.cluster_opt],
cinder_common_config.core_opts,
cinder_common_config.global_opts,
cinder.compute.compute_opts,
cinder_context.context_opts,
cinder_db_api.db_opts,
[cinder_db_base.db_driver_opt],
cinder_image_glance.glance_opts,
cinder_image_glance.glance_core_properties_opts,
cinder_image_imageutils.image_helper_opts,
cinder_message_api.messages_opts,
cinder_quota.quota_opts,
cinder_scheduler_driver.scheduler_driver_opts,
cinder_scheduler_hostmanager.host_manager_opts,
[cinder_scheduler_manager.scheduler_driver_opt],
[cinder_scheduler_scheduleroptions.
scheduler_json_config_location_opt],
cinder_scheduler_weights_capacity.capacity_weight_opts,
cinder_scheduler_weights_volumenumber.
volume_number_weight_opts,
cinder_service.service_opts,
cinder_sshutils.ssh_opts,
cinder_transfer_api.volume_transfer_opts,
[cinder_volume_api.allow_force_upload_opt],
[cinder_volume_api.volume_host_opt],
[cinder_volume_api.volume_same_az_opt],
[cinder_volume_api.az_cache_time_opt],
cinder_volume_driver.volume_opts,
cinder_volume_driver.iser_opts,
cinder_volume_driver.nvmet_opts,
cinder_volume_drivers_datacore_driver.datacore_opts,
cinder_volume_drivers_datacore_iscsi.datacore_iscsi_opts,
cinder_volume_drivers_inspur_instorage_instoragecommon.
instorage_mcs_opts,
cinder_volume_drivers_inspur_instorage_instorageiscsi.
instorage_mcs_iscsi_opts,
cinder_volume_drivers_storpool.storpool_opts,
cinder_volume_drivers_veritas_access_veritasiscsi.VA_VOL_OPTS,
cinder_volume_manager.volume_manager_opts,
cinder_wsgi_eventletserver.socket_opts,
)),
('fc-zone-manager',
itertools.chain(
cinder_zonemanager_drivers_brocade_brcdfczonedriver.brcd_opts,
cinder_zonemanager_drivers_cisco_ciscofczonedriver.cisco_opts,
cinder_zonemanager_fczonemanager.zone_manager_opts,
)),
('key_manager',
itertools.chain(
cinder_keymgr_confkeymgr.key_mgr_opts,
)),
('service_user',
itertools.chain(
cinder_serviceauth.service_user_opts,
)),
('backend_defaults',
itertools.chain(
cinder_volume_driver.volume_opts,
cinder_volume_driver.iser_opts,
cinder_volume_driver.nvmet_opts,
cinder_volume_drivers_coprhd_common.volume_opts,
cinder_volume_drivers_coprhd_scaleio.scaleio_opts,
cinder_volume_drivers_datera_dateraiscsi.d_opts,
cinder_volume_drivers_dell_emc_ps.eqlx_opts,
cinder_volume_drivers_dell_emc_sc_storagecentercommon.
common_opts,
cinder_volume_drivers_dell_emc_scaleio_driver.scaleio_opts,
cinder_volume_drivers_dell_emc_unity_driver.UNITY_OPTS,
cinder_volume_drivers_dell_emc_vmax_common.vmax_opts,
cinder_volume_drivers_dell_emc_vnx_common.VNX_OPTS,
cinder_volume_drivers_dell_emc_xtremio.XTREMIO_OPTS,
cinder_volume_drivers_disco_disco.disco_opts,
cinder_volume_drivers_drbdmanagedrv.drbd_opts,
cinder_volume_drivers_fujitsu_eternusdxcommon.
FJ_ETERNUS_DX_OPT_opts,
cinder_volume_drivers_fusionstorage_dsware.volume_opts,
cinder_volume_drivers_hgst.hgst_opts,
cinder_volume_drivers_hpe_hpe3parcommon.hpe3par_opts,
cinder_volume_drivers_hpe_hpelefthandiscsi.hpelefthand_opts,
cinder_volume_drivers_huawei_huaweidriver.huawei_opts,
cinder_volume_drivers_ibm_flashsystemcommon.flashsystem_opts,
cinder_volume_drivers_ibm_flashsystemfc.flashsystem_fc_opts,
cinder_volume_drivers_ibm_flashsystemiscsi.
flashsystem_iscsi_opts,
cinder_volume_drivers_ibm_gpfs.gpfs_opts,
cinder_volume_drivers_ibm_gpfs.gpfs_remote_ssh_opts,
cinder_volume_drivers_ibm_ibm_storage_ds8kproxy.ds8k_opts,
cinder_volume_drivers_ibm_ibm_storage_ibmstorage.driver_opts,
cinder_volume_drivers_ibm_storwize_svc_storwizesvccommon.
storwize_svc_opts,
cinder_volume_drivers_ibm_storwize_svc_storwizesvcfc.
storwize_svc_fc_opts,
cinder_volume_drivers_ibm_storwize_svc_storwizesvciscsi.
storwize_svc_iscsi_opts,
cinder_volume_drivers_infinidat.infinidat_opts,
cinder_volume_drivers_kaminario_kaminariocommon.
kaminario_opts,
cinder_volume_drivers_lenovo_lenovocommon.common_opts,
cinder_volume_drivers_lenovo_lenovocommon.iscsi_opts,
cinder_volume_drivers_lvm.volume_opts,
cinder_volume_drivers_netapp_options.netapp_proxy_opts,
cinder_volume_drivers_netapp_options.netapp_connection_opts,
cinder_volume_drivers_netapp_options.netapp_transport_opts,
cinder_volume_drivers_netapp_options.netapp_basicauth_opts,
cinder_volume_drivers_netapp_options.netapp_cluster_opts,
cinder_volume_drivers_netapp_options.netapp_provisioning_opts,
cinder_volume_drivers_netapp_options.netapp_img_cache_opts,
cinder_volume_drivers_netapp_options.netapp_eseries_opts,
cinder_volume_drivers_netapp_options.netapp_nfs_extra_opts,
cinder_volume_drivers_netapp_options.netapp_san_opts,
cinder_volume_drivers_netapp_options.netapp_replication_opts,
cinder_volume_drivers_nexenta_options.NEXENTA_CONNECTION_OPTS,
cinder_volume_drivers_nexenta_options.NEXENTA_ISCSI_OPTS,
cinder_volume_drivers_nexenta_options.NEXENTA_DATASET_OPTS,
cinder_volume_drivers_nexenta_options.NEXENTA_NFS_OPTS,
cinder_volume_drivers_nexenta_options.NEXENTA_RRMGR_OPTS,
cinder_volume_drivers_nexenta_options.NEXENTA_EDGE_OPTS,
cinder_volume_drivers_nfs.nfs_opts,
cinder_volume_drivers_nimble.nimble_opts,
cinder_volume_drivers_prophetstor_options.DPL_OPTS,
cinder_volume_drivers_pure.PURE_OPTS,
cinder_volume_drivers_qnap.qnap_opts,
cinder_volume_drivers_quobyte.volume_opts,
cinder_volume_drivers_rbd.RBD_OPTS,
cinder_volume_drivers_remotefs.nas_opts,
cinder_volume_drivers_remotefs.volume_opts,
cinder_volume_drivers_san_hp_hpmsacommon.common_opts,
cinder_volume_drivers_san_hp_hpmsacommon.iscsi_opts,
cinder_volume_drivers_san_san.san_opts,
cinder_volume_drivers_sheepdog.sheepdog_opts,
cinder_volume_drivers_solidfire.sf_opts,
cinder_volume_drivers_synology_synologycommon.cinder_opts,
cinder_volume_drivers_tintri.tintri_opts,
cinder_volume_drivers_vmware_vmdk.vmdk_opts,
cinder_volume_drivers_vzstorage.vzstorage_opts,
cinder_volume_drivers_windows_iscsi.windows_opts,
cinder_volume_drivers_windows_smbfs.volume_opts,
cinder_volume_drivers_zadara.zadara_opts,
cinder_volume_drivers_zfssa_zfssaiscsi.ZFSSA_OPTS,
cinder_volume_drivers_zfssa_zfssanfs.ZFSSA_OPTS,
cinder_volume_manager.volume_backend_opts,
)),
('nova',
itertools.chain(
cinder_compute_nova.nova_opts,
cinder_compute_nova.nova_session_opts,
cinder_compute_nova.nova_auth_opts,
)),
]
|
|
#!/usr/bin/env python
import os
from collections import defaultdict
import numpy as np
from scipy.stats import scoreatpercentile
from astropy.stats import sigma_clip
from astropy.time import Time,TimeDelta
import fitsio
import matplotlib.pyplot as plt
from matplotlib import ticker
#from .ubercal import CalibrationObject,CalibrationObjectSet,ubercal_solve
from ubercal import CalibrationObject,CalibrationObjectSet,ubercal_solve,init_flatfields
nX,nY = 4096,4032
nX2 = nX//2
nY2 = nY//2
nCCD = 4
# divide nights into contiguous observing blocks
bok_runs = [
('20131222',),
('20140114', '20140115', '20140116', '20140117', '20140118',
'20140119',),
('20140120', '20140121', '20140123', '20140124', '20140126',
'20140127', '20140128', '20140129',),
('20140213', '20140214', '20140215', '20140216', '20140217',
'20140218', '20140219',),
('20140312', '20140313', '20140314', '20140315', '20140316',
'20140317', '20140318', '20140319',),
('20140413', '20140414', '20140415', '20140416', '20140417',
'20140418',),
('20140424', '20140425', '20140426', '20140427', '20140428',),
('20140512', '20140513', '20140514', '20140515', '20140516',
'20140517', '20140518',),
('20140609', '20140610', '20140611', '20140612', '20140613',),
('20140629', '20140630', '20140701', '20140702', '20140703',),
# '20140705',), # no observations, doesn't have a log
('20140710', '20140711', '20140713',
# '20140716', # no observations, doesn't have a log
'20140717', '20140718',),
]
# exclude clearly non-photometric nights
bad_night_list = ['20131222',]
def bok_run_index():
return {utd:run for run,utds in enumerate(bok_runs) for utd in utds}
def get_mjd(utDate,utTime):
utStr = '-'.join([utDate[:4],utDate[4:6],utDate[6:]]) + ' ' + utTime
return Time(utStr,scale='utc').mjd
def select_photometric_images(filt):
'''Add initial cut on photometricity, based on seeing values and zeropoints
calculated from the SExtractor catalogs.'''
zpmin = {'g':25.4}
iqdir = os.path.join(os.environ['BOK90PRIMEDIR'],'py')
iqlog = np.loadtxt(os.path.join(iqdir,'bokimagequality_%s.log'%filt),
dtype=[('utdate','S8'),('frame','i4'),
('sky','f4'),('seeing','f4'),('zeropoint','f4')])
isphoto = ( (iqlog['seeing'] < 2.5) &
(iqlog['zeropoint'] > zpmin[filt]) )
print 'iter1: rejected %d frames out of %d' % ((~isphoto).sum(),len(iqlog))
# require neighboring images to be photometric as well, to handle varying
# conditions
min_nphoto = 5
nphoto = np.zeros(len(iqlog),dtype=np.int)
for i in np.where(isphoto)[0]:
up,down = True,True
for j in range(1,min_nphoto+1):
if up:
if i+j<len(iqlog) and isphoto[i+j]:
nphoto[i] += 1
else:
up = False
if down:
if i-j>0 and isphoto[i-j]:
nphoto[i] += 1
else:
down = False
isphoto &= nphoto > min_nphoto
print 'iter2: rejected %d frames out of %d' % ((~isphoto).sum(),len(iqlog))
return iqlog,isphoto
def build_frame_list(filt,nightlyLogs=None):
'''Collapse the nightly observing logs into a master frame list containing
the relevant info for each observation, namely:
mjd,expTime,airmass observing parameters
nightIndex 0-indexed from the list of observing nights
nightFrameNum frame number from observing logs
'''
import boklog
refTimeUT = '07:00:00.0' # 7h UT = midnight MST
if nightlyLogs is None:
nightlyLogs = boklog.load_Bok_logs()
iqlog,isphoto = select_photometric_images(filt)
frameList = []
for night,utd in enumerate(sorted(nightlyLogs.keys())):
frames = nightlyLogs[utd]
ii = np.where((frames['filter']==filt) &
(frames['imType']=='object'))[0]
if len(ii)==0:
continue
mjds = np.array([get_mjd(utd,frames['utStart'][i]) for i in ii])
epochIndex = np.repeat(night,len(ii))
refTime = get_mjd(utd,refTimeUT)
dt = 24*(mjds-refTime)
jj = np.array([np.where((iqlog['utdate']==utd) &
(iqlog['frame']==i))[0][0]
for i in ii])
frameList.append((mjds,dt,
frames['expTime'][ii],frames['airmass'][ii],
epochIndex,ii,isphoto[jj]))
frameList = np.hstack(frameList)
frameList = np.core.records.fromarrays(frameList,
dtype=[('mjd','f8'),('dt','f4'),
('expTime','f4'),('airmass','f4'),
('nightIndex','i4'),('nightFrameNum','i4'),
('isPhoto','i2')])
return frameList
def collect_observations(filt,catpfx='sdssbright'):
import boklog
import bokcat
photdir = os.path.join(os.environ['BOK90PRIMEOUTDIR'],'catalogs_v2')
aperNum = -1
mag0 = 25.0
SNRcut = 20.0
utd2run = bok_run_index()
nightlyLogs = boklog.load_Bok_logs()
frameList = build_frame_list(filt,nightlyLogs)
objectList = defaultdict(list)
refcat = bokcat.load_targets('SDSSstars')
for night,utd in enumerate(sorted(nightlyLogs.keys())):
try:
catfn = '.'.join([catpfx,utd,filt,'cat','fits'])
fits = fitsio.FITS(os.path.join(photdir,catfn))
except ValueError:
continue
print catfn
runarr = np.repeat(utd2run[utd],100)# to avoid creating repeatedly
night_jj = np.where(frameList['nightIndex']==night)[0]
#
data = fits[1].read()
for starNum,i1,i2 in fits[2]['TINDEX','i1','i2'][:]:
good = ( (data['flags'][i1:i2,aperNum] == 0) &
(data['aperCounts'][i1:i2,aperNum] > 0) &
(data['aperCountsErr'][i1:i2,aperNum] <
(1/SNRcut)*data['aperCounts'][i1:i2,aperNum]) )
if good.sum() == 0:
continue
good = i1 + np.where(good)[0]
# XXX to catch a bug in the catalog - some objects appear multiple
# times in the same frame!
frameNums,jj = np.unique(data['frameNum'][good],return_index=True)
if len(frameNums) != len(good):
print 'WARNING: object with multiple instances ',
print starNum,data['frameNum'][good]
good = good[jj] # restrict to unique frames
jj = np.where(np.in1d(frameList['nightFrameNum'][night_jj],
data['frameNum'][good]))[0]
if len(jj) != len(good):
print good,jj
print frameList['nightFrameNum'][night_jj]
print data['frameNum'][good]
raise ValueError
jj = night_jj[jj]
expTime = frameList['expTime'][jj]
counts = data['aperCounts'][good,aperNum]
mags = mag0 - 2.5*np.log10(counts/expTime)
errs = (2.5/np.log(10))*data['aperCountsErr'][good,aperNum]/counts
ccdNums = data['ccdNum'][good]
ampNums = (data['x'][good]//nX2).astype(np.int) + \
2*(data['y'][good]//nY2).astype(np.int)
nightIndex = frameList['nightIndex'][jj]
runIndex = runarr[:len(good)]
refMag = np.repeat(refcat[filt][starNum],len(good))
objectList[starNum].append((mags,errs,ccdNums,ampNums,
data['x'][good],data['y'][good],
runIndex,nightIndex,jj,refMag))
for starNum in objectList:
arr = np.hstack(objectList[starNum])
objectList[starNum] = np.core.records.fromarrays(arr,
dtype=[('magADU','f4'),('errADU','f4'),
('ccdNum','i4'),('ampNum','i4'),
('x','f4'),('y','f4'),
('runIndex','i4'),('nightIndex','i4'),
('frameIndex','i4'),('refMag','f4')])
return frameList,objectList
def cache_bok_data(frameList,objectList,fileName):
fits = fitsio.FITS(fileName,'rw')
indx = np.empty(len(objectList),
dtype=[('starNum','i4'),('i1','i4'),('i2','i4')])
i1 = 0
for i,starNum in enumerate(objectList):
if i==0:
fits.write(objectList[starNum])
else:
fits[-1].append(objectList[starNum])
indx['starNum'][i] = starNum
indx['i1'][i] = i1
indx['i2'][i] = i1 + len(objectList[starNum])
i1 += len(objectList[starNum])
fits.write(indx)
fits.write(frameList)
fits.close()
def load_cached_bok_data(fileName):
fits = fitsio.FITS(fileName)
data = fits[1].read()
indexes = fits[2].read()
frameList = fits[3].read()
objectList = {}
for starNum,i1,i2 in indexes:
objectList[starNum] = data[i1:i2]
return frameList,objectList
class SimFlatField(object):
def __init__(self,n=1,kind='gradient',dm=0.3):
#self.coeffs = np.random.rand(n,nCCD,2)
self.coeffs = np.array([[[0.0,0.0],
[0.0,1.0],
[1.0,0.0],
[1.0,1.0]]])
self.dims = (nY,nX)
self.dm = dm
if kind=='gradient':
self.flatfun = self._gradientfun
def _gradientfun(self,coeff,x,y):
norm = coeff.sum(axis=-1)
if np.isscalar(norm):
if norm>0:
norm **= -1
else:
norm[norm>0] **= -1
return ( (coeff[...,0]*(x/float(nX)) +
coeff[...,1]*(y/float(nY)))
* self.dm * norm )
def __call__(self,indices,x,y):
coeff = self.coeffs[indices]
return self.flatfun(coeff,x,y)
def make_image(self,indices):
Y,X = np.indices(self.dims)
return self.__call__(indices,X,Y)
def sim_init(a_init,k_init,objs,**kwargs):
a_range = kwargs.get('sim_a_range',0.3)
k_range = kwargs.get('sim_k_range',0.2)
fixed_mag = kwargs.get('sim_fixed_mag',18.)
fixed_err = kwargs.get('sim_fixed_err',0.03)
print 'SIMULATION: a_range=%.2f k_range=%.2f' % (a_range,k_range)
np.random.seed(1)
simdat = {}
simdat['a_true'] = a_range*np.random.random_sample(a_init.shape)
simdat['a_true'] -= np.median(simdat['a_true'])
simdat['k_true'] = k_range*np.random.random_sample(k_init.shape)
simdat['errMin'] = kwargs.get('errMin',0.01)
print 'SIMULATION: minimum rms %.3f' % simdat['errMin']
if kwargs.get('sim_userealmags',True):
simdat['mag'] = np.array([objs[i]['refMag'][0] for i in objs])
print 'SIMULATION: using real magnitudes'
else:
simdat['mag'] = np.repeat(fixed_mag,len(objs))
print 'SIMULATION: using fixed magnitude %.2f' % fixed_mag
if kwargs.get('sim_userealerrs',True):
simdat['err'] = np.array([np.median(objs[i]['errADU']) for i in objs])
print 'SIMULATION: using real errors'
else:
simdat['err'] = np.repeat(fixed_err,len(objs))
print 'SIMULATION: using fixed errors %.2f' % fixed_err
simdat['outlier_frac'] = kwargs.get('sim_outlierfrac',0.1)
print 'SIMULATION: fraction of outliers %g' % simdat['outlier_frac']
simdat['is_outlier'] = []
if kwargs.get('sim_addflatfield',True):
dm = kwargs.get('sim_flatfield_range',0.3)
simdat['flatfield'] = SimFlatField(n=1,kind='gradient',dm=dm)
print 'SIMULATION: applying %s flat field' % 'gradient'
print 'SIMULATION: maximum range dm=%.2f' % dm
else:
simdat['flatfield'] = lambda *args: 0
print 'SIMULATION: no flat field variation'
return simdat
def sim_initobject(i,obj,frames,simdat,rmcal):
x = frames['airmass'][obj['frameIndex']]
dt = frames['dt'][obj['frameIndex']]
dk_dt = rmcal.get_terms('dkdt',0) # using a fixed value
flatfield = simdat['flatfield']
flatIndex = (np.repeat(0,len(obj)),obj['ccdNum']-1)
mags = simdat['mag'][i] - (
simdat['a_true'][obj['nightIndex'],obj['ccdNum']-1]
- (simdat['k_true'][obj['nightIndex']] + dk_dt*dt)*x
+ flatfield(flatIndex,obj['x'],obj['y']) )
errs = np.repeat(simdat['err'][i],len(mags))
mags[:] += errs*np.random.normal(size=mags.shape)
if simdat['outlier_frac'] > 0:
is_outlier = np.random.poisson(simdat['outlier_frac'],len(mags))
ii = np.where(is_outlier)[0]
# start at 5sigma and decline as a power law with index 1.5
nsig_outlier = (np.random.pareto(1.5,len(ii)) + 1) * 5.0
sgn = np.choose(np.random.rand(len(ii)) > 0.5,[-1,1])
mags[ii] += sgn*nsig_outlier*errs[ii]
simdat['is_outlier'].append(is_outlier)
return CalibrationObject(mags,errs,errMin=simdat['errMin'])
def sim_finish(rmcal,simdat):
gk = np.where(~rmcal.params['k']['terms'].mask)
dk = (rmcal.params['k']['terms']-simdat['k_true'])[gk].flatten()
ga = np.where(~rmcal.params['a']['terms'].mask)
da = (rmcal.params['a']['terms']-simdat['a_true'])[ga].flatten()
median_a_offset = np.median(da)
#
plt.figure(figsize=(9,4))
plt.subplots_adjust(0.08,0.06,0.98,0.98,0.23,0.15)
ax1 = plt.subplot2grid((2,4),(0,0),colspan=3)
plt.axhline(0,c='gray')
plt.plot(dk)
plt.xlim(0,len(gk[0]))
plt.ylim(-0.5,0.5)
plt.ylabel(r'$\Delta(k)$',size=12)
#
ax2 = plt.subplot2grid((2,4),(1,0),colspan=3)
plt.axhline(0,c='gray')
plt.plot(da-median_a_offset)
plt.xlim(0,len(ga[0]))
plt.ylim(-0.8,0.8)
plt.ylabel(r'$\Delta(a)$',size=12)
#
dm = []
for i,obj in enumerate(rmcal):
mag,err = rmcal.get_object_phot(obj)
dm.append(obj.refMag - (mag - median_a_offset))
try:
is_outlier = simdat['is_outlier'][i].astype(np.bool)
is_masked = obj.mags.mask
# print '%4d %4d %4d %4d %4d' % (len(mag),np.sum(is_outlier),np.sum(is_outlier&is_masked),np.sum(is_outlier&~is_masked),np.sum(~is_outlier&is_masked))
except:
pass
dm = np.ma.concatenate(dm)
dm3 = sigma_clip(dm,sig=3,iters=1)
frac_sig3 = np.sum(dm3.mask & ~dm.mask) / float(np.sum(~dm.mask))
mm = 1000 # millimag
print
print '<da> = %.1f' % (mm*np.median(da))
print '<dk> = %.1f' % (mm*np.median(dk)),
print ' @AM=2.0 %.1f' % (mm*np.median(dk)*2)
print
print '%8s %8s %8s %8s %8s [millimag]' % \
('<dm>','sig','sig3','%(3sig)','sig0')
print '%8s %8s %8s %8s %8s' % tuple(['-'*6]*5)
print '%8.2f %8.2f %8.2f %8.2f %8.2f' % \
(mm*dm.mean(),mm*dm.std(),mm*dm3.std(),100*frac_sig3,0.0)
#
ax3 = plt.subplot2grid((2,4),(0,3),rowspan=2)
plt.hist(dm.data,50,(-0.2,0.2),edgecolor='none',color='r',normed=True)
plt.hist(dm.data[~dm.mask],50,(-0.2,0.2),edgecolor='none',
color='b',normed=True)
ax3.text(0.95,0.98,r'$\Delta(mag)$',size=12,ha='right',va='top',
transform=ax3.transAxes)
ax3.axvline(dm.mean(),c='purple')
ax3.axvline(dm.mean()-dm3.std(),c='purple',ls='--')
ax3.axvline(dm.mean()+dm3.std(),c='purple',ls='--')
plt.xlim(-0.2,0.2)
ax3.xaxis.set_major_locator(ticker.MultipleLocator(0.1))
for ax in [ax1,ax2,ax3]:
for tick in ax.xaxis.get_major_ticks()+ax.yaxis.get_major_ticks():
tick.label1.set_fontsize(9)
def cal_finish(rmcal):
dm = []
for i,obj in enumerate(rmcal):
mag,err = rmcal.get_object_phot(obj)
dm.append(obj.refMag - mag)
dm = np.ma.concatenate(dm)
dm3 = sigma_clip(dm,sig=3,iters=1)
median_dm = np.ma.median(dm3)
dm -= median_dm
dm3 -= median_dm
frac_sig3 = np.sum(dm3.mask & ~dm.mask) / float(np.sum(~dm.mask))
mm = 1000 # millimag
print
print '%8s %8s %8s %8s %8s %8s [millimag]' % \
('<dm>','sig','sig3','%(3sig)','sig0')
print '%8s %8s %8s %8s %8s' % tuple(['-'*6]*5)
print '%8.2f %8.2f %8.2f %8.2f %8.2f' % \
(mm*dm.mean(),mm*dm.std(),mm*dm3.std(),100*frac_sig3,0.0)
#
plt.figure()
plt.hist(dm,50,(-1,1))
#def reject_outliers(rmcal,**kwargs):
def reject_outliers(rmcal,simdat,**kwargs):
sig = kwargs.get('reject_sig',3.0)
iters = kwargs.get('reject_niter',2)
for i,obj in enumerate(rmcal):
mags,errs = rmcal.get_object_phot(obj)
clipped = sigma_clip(mags,sig=sig,iters=iters)
# need a verbose argument
# if clipped.mask.sum() > mags.mask.sum():
# print 'object %d rejected %d' % (i,(clipped.mask&~mags.mask).sum())
obj.update_mask(clipped.mask)
def fiducial_model(frames,objs,verbose=True,dosim=False,niter=1,**kwargs):
ndownsample = kwargs.get('downsample',1)
doflats = kwargs.get('doflats',True)
numCCDs = 4
numFrames = len(frames)
# identify nights to process
bok_nights = np.array([utd for run in bok_runs for utd in run])
numNights = len(bok_nights)
framesPerNight = np.array([np.sum(frames['nightIndex']==i)
for i in range(numNights)])
bad_nights = np.where( np.in1d(bok_nights,bad_night_list) |
(framesPerNight==0) )[0]
# initialize the a-term array to zeros, masking non-photometric nights
a_init = np.ma.array(np.zeros((numNights,numCCDs)),mask=False)
a_init[bad_nights] = np.ma.masked
# initialize the k-term array to zeros, masking non-photometric nights
k_init = np.ma.array(np.zeros(numNights),mask=False)
k_init[bad_nights] = np.ma.masked
# initialize the flat field arrays
if doflats:
flatfield_init = init_flatfields((numCCDs,),nX,nY,method='spline')
else:
flatfield_init = init_flatfields((numCCDs,),nX,nY,method='null')
# construct the container for the global ubercal parameters
rmcal = CalibrationObjectSet(a_init,k_init,frames['dt'],
frames['airmass'],flatfield_init)
# currently using a fixed value for the time derivate of k, taken from P08
rmcal.set_fixed_dkdt(0)
#rmcal.set_fixed_dkdt(-0.7e-2/10) # given as mag/airmass/10h
#
if dosim:
simdat = sim_init(a_init,k_init,objs,**kwargs)
# loop over individual stars and set their particulars for each
# observation, then add them to the calibration set
for i,(starNum,obj) in enumerate(objs.items()):
if (starNum % ndownsample) != 0:
continue
if dosim:
calobj = sim_initobject(i,obj,frames,simdat,rmcal)
calobj.set_reference_mag(simdat['mag'][i])
else:
# construct a calibration object from the flux/err vectors
calobj = CalibrationObject(obj['magADU'],obj['errADU'])
# mask the pre-assigned non-photometric observations
# XXX before doing this, reasonable a and k values must be set
#calobj.update_mask(frames['isPhoto'][obj['frameIndex']]==0)
# set the catalog magnitude for this object
calobj.set_reference_mag(obj['refMag'][0])
calobj.set_xy(obj['x'],obj['y'])
# XXX should require all of these to be tuples of arrays for consistency
calobj.set_a_indices((obj['nightIndex'],obj['ccdNum']-1))
calobj.set_k_indices(obj['nightIndex'])
calobj.set_flat_indices((obj['ccdNum']-1,))
calobj.set_x_indices(obj['frameIndex'])
calobj.set_t_indices(obj['frameIndex'])
rmcal.add_object(calobj)
if verbose:
print 'number nights: ',np.sum(framesPerNight>0)
print 'number good nights: ', \
np.sum(np.any(~rmcal.params['a']['terms'].mask,axis=1))
print 'number frames: ',numFrames
print 'number objects: ',rmcal.num_objects()
print 'number observations: ',rmcal.num_observations()
print 'number parameters: ',rmcal.num_params()
# iteratively solve for the calibration parameters
for iternum in range(niter):
pars = ubercal_solve(rmcal,**kwargs)
rmcal.update_params(pars)
if doflats:
rmcal.update_flatfields()
if dosim:
sim_finish(rmcal,simdat)
if iternum < niter-1:
#reject_outliers(rmcal,**kwargs)
reject_outliers(rmcal,simdat,**kwargs) # XXX
if dosim:
return rmcal,simdat
return rmcal
def sim_make_residual_images(rmcal,binX=32,binY=32):
xBins = np.arange(0,nX+1,binX)
yBins = np.arange(0,nY+1,binY)
median_a_offset = 0
dmag = []
for i,obj in enumerate(rmcal):
mag,err = rmcal.get_object_phot(obj)
dmag.append(obj.refMag - (mag - median_a_offset))
dmag = np.concatenate(dmag)
xy = np.hstack( [ [rmcal.objs[i].xpos,rmcal.objs[i].ypos]
for i in range(rmcal.num_objects()) ] )
# XXX hack that last index in a_indices is ccdNum
ccds = np.concatenate( [ rmcal.objs[i].a_indices[-1]
for i in range(rmcal.num_objects()) ] )
ffmaps = []
for ccdNum in range(4):
ffmap = [[[] for xi in xBins] for yi in yBins]
ii = np.where(ccds==ccdNum)[0]
for xi,yi,dm in zip(np.digitize(xy[0,ii],xBins),
np.digitize(xy[1,ii],yBins),
dmag[ii]):
ffmap[yi][xi].append(dm)
for xi in range(len(xBins)):
for yi in range(len(yBins)):
if len(ffmap[yi][xi])==0:
ffmap[yi][xi] = np.nan
else:
ffmap[yi][xi] = np.median(ffmap[yi][xi])
ffmaps.append(np.array(ffmap))
return np.array(ffmaps)
def _init_fov_fig():
cmap = plt.get_cmap('jet')
cmap.set_bad('gray',1.)
plt.figure(figsize=(10,9))
plt.subplots_adjust(0.04,0.04,0.99,0.99,0.1,0.1)
def sim_show_residual_images(rmcal,**kwargs):
_init_fov_fig()
ffmaps = sim_make_residual_images(rmcal,**kwargs)
for ccdNum in range(1,5):
ffim = np.ma.array(ffmaps[ccdNum-1],mask=np.isnan(ffmaps[ccdNum-1]))
v1 = scoreatpercentile(ffim[~ffim.mask],10)
v2 = scoreatpercentile(ffim[~ffim.mask],90)
print ccdNum,ffim.mean(),ffim.std(),v1,v2
plt.subplot(2,2,ccdNum)
plt.imshow(ffim,vmin=v1,vmax=v2,
origin='lower',extent=[0,nX,0,nY],interpolation='nearest')
plt.colorbar()
def sim_show_fake_flatfields(simdat):
_init_fov_fig()
for i in range(4):
plt.subplot(2,2,i+1)
plt.imshow(simdat['flatfield'].make_image((0,i,)),
origin='lower',extent=[0,nX,0,nY])
plt.colorbar()
def show_fit_flatfields(rmcal):
_init_fov_fig()
for i,ff in enumerate(rmcal.flatfields):
plt.subplot(2,2,i+1)
plt.imshow(ff.make_image(res=64),
origin='lower',extent=[0,nX,0,nY])
plt.colorbar()
|
|
#!/usr/bin/env python3
#
# This file is part of Linux-on-LiteX-VexRiscv
#
# Copyright (c) 2019-2021, Linux-on-LiteX-VexRiscv Developers
# SPDX-License-Identifier: BSD-2-Clause
import os
import sys
import argparse
from litex.soc.integration.builder import Builder
from litex.soc.cores.cpu.vexriscv_smp import VexRiscvSMP
from litespi.modules import *
from litespi.opcodes import SpiNorFlashOpCodes as Codes
from soc_linux import SoCLinux
kB = 1024
# Board definition----------------------------------------------------------------------------------
class Board:
soc_kwargs = {"integrated_rom_size": 0x10000, "l2_size": 0}
def __init__(self, soc_cls=None, soc_capabilities={}, soc_constants={}, bitstream_ext=""):
self.soc_cls = soc_cls
self.soc_capabilities = soc_capabilities
self.soc_constants = soc_constants
self.bitstream_ext = bitstream_ext
def load(self, filename):
prog = self.platform.create_programmer()
prog.load_bitstream(filename)
def flash(self, filename):
prog = self.platform.create_programmer()
prog.flash(0, filename)
#---------------------------------------------------------------------------------------------------
# Xilinx Boards
#---------------------------------------------------------------------------------------------------
# Acorn support ------------------------------------------------------------------------------------
class Acorn(Board):
soc_kwargs = {"uart_name": "jtag_uart", "sys_clk_freq": int(150e6)}
def __init__(self):
from litex_boards.targets import acorn
Board.__init__(self, acorn.BaseSoC, soc_capabilities={
# Communication
"serial",
# Storage
"sata",
}, bitstream_ext=".bit")
# Acorn PCIe support -------------------------------------------------------------------------------
class AcornPCIe(Board):
soc_kwargs = {"with_pcie": True, "uart_name": "crossover", "sys_clk_freq": int(125e6)}
def __init__(self):
from litex_boards.targets import sqrl_acorn
Board.__init__(self, sqrl_acorn.BaseSoC, soc_capabilities={
# Communication
"serial",
"pcie",
}, bitstream_ext=".bit")
# Arty support -------------------------------------------------------------------------------------
class Arty(Board):
spiflash = S25FL128L(Codes.READ_1_1_1)
def __init__(self):
from litex_boards.targets import arty
Board.__init__(self, arty.BaseSoC, soc_capabilities={
# Communication
"serial",
"ethernet",
# Storage
"spiflash",
"sdcard",
# GPIOs
"leds",
"rgb_led",
"switches",
# Buses
"spi",
"i2c",
# Monitoring
"xadc",
# 7-Series specific
"mmcm",
"icap_bitstream",
}, bitstream_ext=".bit")
class ArtyA7(Arty): pass
class ArtyS7(Board):
spiflash = S25FL128L(Codes.READ_1_1_1)
def __init__(self):
from litex_boards.targets import arty_s7
Board.__init__(self, arty_s7.BaseSoC, soc_capabilities={
# Communication
"serial",
# Storage
"spiflash",
# GPIOs
"leds",
"rgb_led",
"switches",
# Buses
"spi",
"i2c",
# Monitoring
"xadc",
# 7-Series specific
"mmcm",
"icap_bitstream",
}, bitstream_ext=".bit")
# NeTV2 support ------------------------------------------------------------------------------------
class NeTV2(Board):
spiflash = MX25L6436E(Codes.READ_1_1_1)
def __init__(self):
from litex_boards.targets import netv2
Board.__init__(self, netv2.BaseSoC, soc_capabilities={
# Communication
"serial",
"ethernet",
# Storage
"spiflash",
"sdcard",
# GPIOs
"leds",
# Video
"framebuffer",
# Monitoring
"xadc",
}, bitstream_ext=".bit")
# Genesys2 support ---------------------------------------------------------------------------------
class Genesys2(Board):
def __init__(self):
from litex_boards.targets import genesys2
Board.__init__(self, genesys2.BaseSoC, soc_capabilities={
# Communication
"usb_fifo",
"ethernet",
# Storage
"sdcard",
}, bitstream_ext=".bit")
# KC705 support ---------------------------------------------------------------------------------
class KC705(Board):
soc_kwargs = {"uart_baudrate": 500e3} # 1Mbauds not supported by CP210x.
def __init__(self):
from litex_boards.targets import kc705
Board.__init__(self, kc705.BaseSoC, soc_capabilities={
# Communication
"serial",
"ethernet",
# Storage
"sdcard",
#"sata",
# GPIOs
"leds",
# Monitoring
"xadc",
}, bitstream_ext=".bit")
# VC707 support ---------------------------------------------------------------------------------
class VC707(Board):
soc_kwargs = {"uart_baudrate": 500e3} # 1Mbauds not supported by CP210x.
def __init__(self):
from litex_boards.targets import vc707
Board.__init__(self, vc707.BaseSoC, soc_capabilities={
# Communication
"serial",
"ethernet",
# Storage
"sdcard",
#"sata",
# GPIOs
"leds",
# Monitoring
"xadc",
}, bitstream_ext=".bit")
# KCU105 support -----------------------------------------------------------------------------------
class KCU105(Board):
soc_kwargs = {"uart_baudrate": 115.2e3} # FIXME: understand why not working with more.
def __init__(self):
from litex_boards.targets import kcu105
Board.__init__(self, kcu105.BaseSoC, soc_capabilities={
# Communication
"serial",
"ethernet",
# Storage
"sdcard",
}, bitstream_ext=".bit")
# ZCU104 support -----------------------------------------------------------------------------------
class ZCU104(Board):
def __init__(self):
from litex_boards.targets import zcu104
Board.__init__(self, zcu104.BaseSoC, soc_capabilities={
# Communication
"serial",
}, bitstream_ext=".bit")
# Nexys4DDR support --------------------------------------------------------------------------------
class Nexys4DDR(Board):
def __init__(self):
from litex_boards.targets import nexys4ddr
Board.__init__(self, nexys4ddr.BaseSoC, soc_capabilities={
# Communication
"serial",
"ethernet",
# Storage
"sdcard",
# Video
"framebuffer",
}, bitstream_ext=".bit")
# NexysVideo support -------------------------------------------------------------------------------
class NexysVideo(Board):
def __init__(self):
from litex_boards.targets import nexys_video
Board.__init__(self, nexys_video.BaseSoC, soc_capabilities={
# Communication
"usb_fifo",
# Storage
"sdcard",
# Video
"framebuffer",
}, bitstream_ext=".bit")
# MiniSpartan6 support -----------------------------------------------------------------------------
class MiniSpartan6(Board):
soc_kwargs = {"l2_size" : 2048} # Use Wishbone and L2 for memory accesses.
def __init__(self):
from litex_boards.targets import minispartan6
Board.__init__(self, minispartan6.BaseSoC, soc_capabilities={
# Communication
"usb_fifo",
# Storage
"sdcard",
# Video
"framebuffer",
}, bitstream_ext=".bit")
# Pipistrello support ------------------------------------------------------------------------------
class Pipistrello(Board):
soc_kwargs = {"l2_size" : 2048} # Use Wishbone and L2 for memory accesses.
def __init__(self):
from litex_boards.targets import pipistrello
Board.__init__(self, pipistrello.BaseSoC, soc_capabilities={
# Communication
"serial",
}, bitstream_ext=".bit")
# XCU1525 support ----------------------------------------------------------------------------------
class XCU1525(Board):
def __init__(self):
from litex_boards.targets import xcu1525
Board.__init__(self, xcu1525.BaseSoC, soc_capabilities={
# Communication
"serial",
# Storage
"sata",
}, bitstream_ext=".bit")
# AlveoU280 (ES1) support -------------------------------------------------------------------------------
class AlveoU280(Board):
soc_kwargs = {
"ddram_channel": 0, # pick board DRAM channel and clk
"with_pcie": False,
"driver": False,
"with_led_chaser": False,
"with_hbm": True, # will use HBM channel 0, no DRAM
"sys_clk_freq": 250e6 # 250MHz for HBM, 150MHz for DRAM
}
def __init__(self):
from litex_boards.targets import alveo_u280
Board.__init__(self, alveo_u280.BaseSoC, soc_capabilities={
# Communication
"serial"
}, bitstream_ext=".bit")
# AlveoU250 support -------------------------------------------------------------------------------
class AlveoU250(Board):
def __init__(self):
from litex_boards.targets import alveo_u250
Board.__init__(self, alveo_u250.BaseSoC, soc_capabilities={
# Communication
"serial"
}, bitstream_ext=".bit")
# SDS1104X-E support -------------------------------------------------------------------------------
class SDS1104XE(Board):
soc_kwargs = {"l2_size" : 8192} # Use Wishbone and L2 for memory accesses.
def __init__(self):
from litex_boards.targets import sds1104xe
Board.__init__(self, sds1104xe.BaseSoC, soc_capabilities={
# Communication
"serial",
"ethernet",
# Video
"framebuffer",
}, bitstream_ext=".bit")
def load(self, filename):
prog = self.platform.create_programmer()
prog.load_bitstream(filename, device=1)
# QMTECH WuKong support ---------------------------------------------------------------------------
class Qmtech_WuKong(Board):
spiflash = S25FL128L(Codes.READ_1_1_1)
soc_kwargs = {
"uart_baudrate": 3e6,
"l2_size" : 2048, # Use Wishbone and L2 for memory accesses.
}
def __init__(self):
from litex_boards.targets import qmtech_wukong
Board.__init__(self, qmtech_wukong.BaseSoC, soc_capabilities={
"leds",
# Communication
"serial",
"ethernet",
# Storage
"spiflash",
#"spisdcard",
# Video
#"video_terminal",
"framebuffer",
}, bitstream_ext=".bit")
# MNT RKX7 support ---------------------------------------------------------------------------------
class MNT_RKX7(Board):
soc_kwargs = {"uart_baudrate": 115200}
def __init__(self):
from litex_boards.targets import mnt_rkx7
Board.__init__(self, mnt_rkx7.BaseSoC, soc_capabilities={
# Communication
"serial",
# Storage
"spisdcard",
}, bitstream_ext=".bit")
#---------------------------------------------------------------------------------------------------
# Lattice Boards
#---------------------------------------------------------------------------------------------------
# Versa ECP5 support -------------------------------------------------------------------------------
class VersaECP5(Board):
spiflash = N25Q128A13(Codes.READ_1_1_1)
soc_kwargs = {"l2_size" : 2048} # Use Wishbone and L2 for memory accesses.
def __init__(self):
from litex_boards.targets import versa_ecp5
Board.__init__(self, versa_ecp5.BaseSoC, soc_capabilities={
# Communication
"serial",
"ethernet",
# Storage
"spiflash",
}, bitstream_ext=".bit")
# ULX3S support ------------------------------------------------------------------------------------
class ULX3S(Board):
soc_kwargs = {"l2_size" : 2048} # Use Wishbone and L2 for memory accesses.
def __init__(self):
from litex_boards.targets import ulx3s
Board.__init__(self, ulx3s.BaseSoC, soc_capabilities={
# Communication
"serial",
# Storage
"spisdcard",
# Video,
"framebuffer",
}, bitstream_ext=".svf")
# HADBadge support ---------------------------------------------------------------------------------
class HADBadge(Board):
spiflash = W25Q128JV(Codes.READ_1_1_1)
soc_kwargs = {"l2_size" : 2048} # Use Wishbone and L2 for memory accesses.
def __init__(self):
from litex_boards.targets import hadbadge
Board.__init__(self, hadbadge.BaseSoC, soc_capabilities={
# Communication
"serial",
# Storage
"spiflash",
}, bitstream_ext=".bit")
def load(self, filename):
os.system("dfu-util --alt 2 --download {} --reset".format(filename))
# OrangeCrab support -------------------------------------------------------------------------------
class OrangeCrab(Board):
soc_kwargs = {
"sys_clk_freq" : int(64e6), # Increase sys_clk_freq to 64MHz (48MHz default).
"l2_size" : 2048, # Use Wishbone and L2 for memory accesses.
}
def __init__(self):
from litex_boards.targets import orangecrab
os.system("git clone https://github.com/litex-hub/valentyusb -b hw_cdc_eptri")
sys.path.append("valentyusb") # FIXME: do proper install of ValentyUSB.
Board.__init__(self, orangecrab.BaseSoC, soc_capabilities={
# Communication
"usb_acm",
# Buses
"i2c",
# Storage
"spisdcard",
}, bitstream_ext=".bit")
# Butterstick support ------------------------------------------------------------------------------
class ButterStick(Board):
soc_kwargs = {"uart_name": "jtag_uart"}
def __init__(self):
from litex_boards.targets import butterstick
Board.__init__(self, butterstick.BaseSoC, soc_capabilities={
# Communication
"serial",
"ethernet",
}, bitstream_ext=".bit")
# Cam Link 4K support ------------------------------------------------------------------------------
class CamLink4K(Board):
soc_kwargs = {"l2_size" : 2048} # Use Wishbone and L2 for memory accesses.
def __init__(self):
from litex_boards.targets import camlink_4k
Board.__init__(self, camlink_4k.BaseSoC, soc_capabilities={
# Communication
"serial",
}, bitstream_ext=".bit")
def load(self, filename):
os.system("camlink configure {}".format(filename))
# TrellisBoard support -----------------------------------------------------------------------------
class TrellisBoard(Board):
soc_kwargs = {"l2_size" : 2048} # Use Wishbone and L2 for memory accesses.
def __init__(self):
from litex_boards.targets import trellisboard
Board.__init__(self, trellisboard.BaseSoC, soc_capabilities={
# Communication
"serial",
# Storage
"sdcard",
}, bitstream_ext=".bit")
# ECPIX5 support -----------------------------------------------------------------------------------
class ECPIX5(Board):
spiflash = IS25LP256D(Codes.READ_1_1_1)
soc_kwargs = {
"sys_clk_freq" : int(50e6),
"l2_size" : 2048, # Use Wishbone and L2 for memory accesses.
}
def __init__(self):
from litex_boards.targets import ecpix5
Board.__init__(self, ecpix5.BaseSoC, soc_capabilities={
# Communication
"serial",
"ethernet",
# GPIO
#"rgb_led",
# Storage
"sata",
"sdcard",
"spiflash",
}, bitstream_ext=".bit")
# Colorlight i5 support ------------------------------------------------------------------------------------
class Colorlight_i5(Board):
soc_kwargs = {
"sys_clk_freq" : int(50e6), # 48MHz default.
"l2_size" : 2048, # Use Wishbone and L2 for memory accesses.
}
def __init__(self):
from litex_boards.targets import colorlight_i5
Board.__init__(self, colorlight_i5.BaseSoC, soc_capabilities={
# Communication
"serial",
"ethernet",
}, bitstream_ext=".bit")
# Icesugar Pro support ------------------------------------------------------------------------------------
class IcesugarPro(Board):
spiflash = W25Q256JV(Codes.READ_1_1_1)
soc_kwargs = {
"sys_clk_freq" : int(50e6), # 48MHz default.
"l2_size" : 2048, # Use Wishbone and L2 for memory accesses.
}
def __init__(self):
from litex_boards.targets import muselab_icesugar_pro
Board.__init__(self, muselab_icesugar_pro.BaseSoC, soc_capabilities={
# Communication
"serial",
# GPIO
# pin collision with user_led
#"rgb_led",
# Storage
"sdcard",
# USRMCLK issue unsolved in litex_boards
#"spiflash",
}, bitstream_ext=".bit")
#---------------------------------------------------------------------------------------------------
# Intel Boards
#---------------------------------------------------------------------------------------------------
# De10Nano support ---------------------------------------------------------------------------------
class De10Nano(Board):
soc_kwargs = {
"with_mister_sdram": True, # Add MiSTer SDRAM extension.
"l2_size" : 2048, # Use Wishbone and L2 for memory accesses.
}
def __init__(self):
from litex_boards.targets import de10nano
Board.__init__(self, de10nano.BaseSoC, soc_capabilities={
# Communication
"serial",
# Storage
"sdcard",
# GPIOs
"leds",
"switches",
}, bitstream_ext=".sof")
# De0Nano support ----------------------------------------------------------------------------------
class De0Nano(Board):
soc_kwargs = {"l2_size" : 2048} # Use Wishbone and L2 for memory accesses.
def __init__(self):
from litex_boards.targets import de0nano
Board.__init__(self, de0nano.BaseSoC, soc_capabilities={
# Communication
"serial",
}, bitstream_ext=".sof")
# De1-SoC support ----------------------------------------------------------------------------------
class De1SoC(Board):
soc_kwargs = {"l2_size" : 2048} # Use Wishbone and L2 for memory accesses.
def __init__(self):
from litex_boards.targets import de1soc
Board.__init__(self, de1soc.BaseSoC, soc_capabilities={
# Communication
"serial",
# GPIOs
"leds",
"switches",
}, bitstream_ext=".sof")
# QMTECH EP4CE15 support ---------------------------------------------------------------------------
class Qmtech_EP4CE15(Board):
soc_kwargs = {
"variant" : "ep4ce15",
"integrated_sram_size" : 0x800,
"l2_size" : 2048, # Use Wishbone and L2 for memory accesses.
}
def __init__(self):
from litex_boards.targets import qmtech_ep4cex5
Board.__init__(self, qmtech_ep4cex5.BaseSoC, soc_capabilities={
# Communication
"serial",
# "leds",
}, bitstream_ext=".sof")
# ... and its bigger brother
class Qmtech_EP4CE55(Board):
soc_kwargs = {
"variant" : "ep4ce55",
"integrated_sram_size" : 0x800,
"l2_size" : 2048, # Use Wishbone and L2 for memory accesses.
}
def __init__(self):
from litex_boards.targets import qmtech_ep4cex5
Board.__init__(self, qmtech_ep4cex5.BaseSoC, soc_capabilities={
# Communication
"serial",
# "leds",
}, bitstream_ext=".sof")
#---------------------------------------------------------------------------------------------------
# Efinix Boards
#---------------------------------------------------------------------------------------------------
class TrionT120BGA576DevKit(Board):
soc_kwargs = {
"integrated_sram_size" : 0x800,
"l2_size" : 2048, # Use Wishbone and L2 for memory accesses.
}
def __init__(self):
from litex_boards.targets import trion_t120_bga576_dev_kit
Board.__init__(self, trion_t120_bga576_dev_kit.BaseSoC, soc_capabilities={
# Communication
"serial",
# GPIOs
"leds",
}, bitstream_ext=".bit")
class TitaniumTi60F225DevKit(Board):
soc_kwargs = {
"with_hyperram" : True,
"sys_clk_freq" : 300e6,
}
def __init__(self):
from litex_boards.targets import titanium_ti60_f225_dev_kit
Board.__init__(self, titanium_ti60_f225_dev_kit.BaseSoC, soc_capabilities={
# Communication
"serial",
# Storage
"sdcard",
# GPIOs
"leds",
}, bitstream_ext=".bit")
#---------------------------------------------------------------------------------------------------
# Build
#---------------------------------------------------------------------------------------------------
supported_boards = {
# Xilinx
"acorn": Acorn,
"acorn_pcie": AcornPCIe,
"arty": Arty,
"arty_a7": ArtyA7,
"arty_s7": ArtyS7,
"netv2": NeTV2,
"genesys2": Genesys2,
"kc705": KC705,
"vc707": VC707,
"kcu105": KCU105,
"zcu104": ZCU104,
"nexys4ddr": Nexys4DDR,
"nexys_video": NexysVideo,
"minispartan6": MiniSpartan6,
"pipistrello": Pipistrello,
"xcu1525": XCU1525,
"alveo_u280": AlveoU280,#ES1
"alveo_u250": AlveoU250,
"qmtech_wukong": Qmtech_WuKong,
"sds1104xe": SDS1104XE,
"mnt_rkx7": MNT_RKX7,
# Lattice
"versa_ecp5": VersaECP5,
"ulx3s": ULX3S,
"hadbadge": HADBadge,
"orangecrab": OrangeCrab,
"butterstick": ButterStick,
"camlink_4k": CamLink4K,
"trellisboard": TrellisBoard,
"ecpix5": ECPIX5,
"colorlight_i5": Colorlight_i5,
"icesugar_pro": IcesugarPro,
# Altera/Intel
"de0nano": De0Nano,
"de10nano": De10Nano,
"de1soc": De1SoC,
"qmtech_ep4ce15": Qmtech_EP4CE15,
"qmtech_ep4ce55": Qmtech_EP4CE55,
# Efinix
"trion_t120_bga576_dev_kit" : TrionT120BGA576DevKit,
"titanium_ti60_f225_dev_kit": TitaniumTi60F225DevKit,
}
def main():
description = "Linux on LiteX-VexRiscv\n\n"
description += "Available boards:\n"
for name in supported_boards.keys():
description += "- " + name + "\n"
parser = argparse.ArgumentParser(description=description, formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument("--board", required=True, help="FPGA board")
parser.add_argument("--device", default=None, help="FPGA device")
parser.add_argument("--variant", default=None, help="FPGA board variant")
parser.add_argument("--toolchain", default=None, help="Toolchain use to build")
parser.add_argument("--build", action="store_true", help="Build bitstream")
parser.add_argument("--load", action="store_true", help="Load bitstream (to SRAM)")
parser.add_argument("--flash", action="store_true", help="Flash bitstream/images (to SPI Flash)")
parser.add_argument("--doc", action="store_true", help="Build documentation")
parser.add_argument("--local-ip", default="192.168.1.50", help="Local IP address")
parser.add_argument("--remote-ip", default="192.168.1.100", help="Remote IP address of TFTP server")
parser.add_argument("--spi-data-width", type=int, default=8, help="SPI data width (maximum transfered bits per xfer)")
parser.add_argument("--spi-clk-freq", type=int, default=1e6, help="SPI clock frequency")
parser.add_argument("--fdtoverlays", default="", help="Device Tree Overlays to apply")
VexRiscvSMP.args_fill(parser)
args = parser.parse_args()
# Board(s) selection ---------------------------------------------------------------------------
if args.board == "all":
board_names = list(supported_boards.keys())
else:
args.board = args.board.lower()
args.board = args.board.replace(" ", "_")
board_names = [args.board]
# Board(s) iteration ---------------------------------------------------------------------------
for board_name in board_names:
board = supported_boards[board_name]()
soc_kwargs = Board.soc_kwargs
soc_kwargs.update(board.soc_kwargs)
# CPU parameters ---------------------------------------------------------------------------
# Do memory accesses through Wishbone and L2 cache when L2 size is configured.
args.with_wishbone_memory = soc_kwargs["l2_size"] != 0
VexRiscvSMP.args_read(args)
# SoC parameters ---------------------------------------------------------------------------
if args.device is not None:
soc_kwargs.update(device=args.device)
if args.variant is not None:
soc_kwargs.update(variant=args.variant)
if args.toolchain is not None:
soc_kwargs.update(toolchain=args.toolchain)
if "crossover" in board.soc_capabilities:
soc_kwargs.update(uart_name="crossover")
if "usb_fifo" in board.soc_capabilities:
soc_kwargs.update(uart_name="usb_fifo")
if "usb_acm" in board.soc_capabilities:
soc_kwargs.update(uart_name="usb_acm")
if "ethernet" in board.soc_capabilities:
soc_kwargs.update(with_ethernet=True)
if "sata" in board.soc_capabilities:
soc_kwargs.update(with_sata=True)
if "video_terminal" in board.soc_capabilities:
soc_kwargs.update(with_video_terminal=True)
if "framebuffer" in board.soc_capabilities:
soc_kwargs.update(with_video_framebuffer=True)
# SoC creation -----------------------------------------------------------------------------
soc = SoCLinux(board.soc_cls, **soc_kwargs)
board.platform = soc.platform
# SoC constants ----------------------------------------------------------------------------
for k, v in board.soc_constants.items():
soc.add_constant(k, v)
# SoC peripherals --------------------------------------------------------------------------
if board_name in ["arty", "arty_a7"]:
from litex_boards.platforms.arty import _sdcard_pmod_io
board.platform.add_extension(_sdcard_pmod_io)
if board_name in ["orangecrab"]:
from litex_boards.platforms.orangecrab import feather_i2c
board.platform.add_extension(feather_i2c)
if "mmcm" in board.soc_capabilities:
soc.add_mmcm(2)
if "spiflash" in board.soc_capabilities:
soc.add_spi_flash(mode="1x", module=board.spiflash, with_master=False)
if "spisdcard" in board.soc_capabilities:
soc.add_spi_sdcard()
if "sdcard" in board.soc_capabilities:
soc.add_sdcard()
if "ethernet" in board.soc_capabilities:
soc.configure_ethernet(local_ip=args.local_ip, remote_ip=args.remote_ip)
#if "leds" in board.soc_capabilities:
# soc.add_leds()
if "rgb_led" in board.soc_capabilities:
soc.add_rgb_led()
if "switches" in board.soc_capabilities:
soc.add_switches()
if "spi" in board.soc_capabilities:
soc.add_spi(args.spi_data_width, args.spi_clk_freq)
if "i2c" in board.soc_capabilities:
soc.add_i2c()
if "xadc" in board.soc_capabilities:
soc.add_xadc()
if "icap_bitstream" in board.soc_capabilities:
soc.add_icap_bitstream()
# Build ------------------------------------------------------------------------------------
build_dir = os.path.join("build", board_name)
builder = Builder(soc,
output_dir = os.path.join("build", board_name),
bios_options = ["TERM_MINI"],
csr_json = os.path.join(build_dir, "csr.json"),
csr_csv = os.path.join(build_dir, "csr.csv")
)
builder.build(run=args.build, build_name=board_name)
# DTS --------------------------------------------------------------------------------------
soc.generate_dts(board_name)
soc.compile_dts(board_name, args.fdtoverlays)
# DTB --------------------------------------------------------------------------------------
soc.combine_dtb(board_name, args.fdtoverlays)
# PCIe Driver ------------------------------------------------------------------------------
if "pcie" in board.soc_capabilities:
from litepcie.software import generate_litepcie_software
generate_litepcie_software(soc, os.path.join(builder.output_dir, "driver"))
# Load FPGA bitstream ----------------------------------------------------------------------
if args.load:
board.load(filename=os.path.join(builder.gateware_dir, soc.build_name + board.bitstream_ext))
# Flash bitstream/images (to SPI Flash) ----------------------------------------------------
if args.flash:
if board_name == "acorn_pcie":
board.flash(filename=os.path.join(builder.gateware_dir, soc.build_name + "_fallback.bin"))
else:
board.flash(filename=os.path.join(builder.gateware_dir, soc.build_name + board.bitstream_ext))
# Generate SoC documentation ---------------------------------------------------------------
if args.doc:
soc.generate_doc(board_name)
if __name__ == "__main__":
main()
|
|
#!/usr/bin/python
# -*- coding: utf-8; mode: python; indent-tabs-mode: t; tab-width:4 -*-
"""
::
This program loads calibration data from a directory, processes it, and loads it into a connected device
Not for regular users!
Maybe dont include this in the main package
"""
from __future__ import print_function
import templates.ui_testing as testing
import numpy as np
from PyQt4 import QtGui,QtCore
import pyqtgraph as pg
import sys,functools,os,random,struct,time,string
class AppWindow(QtGui.QMainWindow, testing.Ui_MainWindow):
RESISTANCE_ERROR = 80
CCS_ERROR = 150 #150 ohms
CAPACITANCE_ERROR = 30e-12 #30pF
def __init__(self, parent=None,**kwargs):
super(AppWindow, self).__init__(parent)
self.setupUi(self)
try:
import scipy.optimize as optimize
except ImportError:
self.optimize = None
else:
self.optimize = optimize
try:
import scipy.fftpack as fftpack
except ImportError:
self.fftpack = None
else:
self.fftpack = fftpack
self.I=kwargs.get('I',None)
if not self.I.calibrated: QtGui.QMessageBox.about(self,'Error','Device Not Calibrated. Check.')
self.I.set_wave(1e3) #1KHz test
self.I.select_range('A1',8)
self.I.select_range('A2',8)
cap_and_pcs=self.I.read_bulk_flash(self.I.CAP_AND_PCS,5+8*4) #READY+calibration_string
if cap_and_pcs[:5]=='READY':
self.scalers = list(struct.unpack('8f',cap_and_pcs[5:]))
else:
#self.displayDialog('Cap and PCS calibration invalid')
self.scalers = [self.I.SOCKET_CAPACITANCE,1,1,1,1,1,1,1] #socket cap , C0,C1,C2,C3,PCS,SEN,CRRC
from expeyes.analyticsClass import analyticsClass
if self.I.timestamp:
self.setWindowTitle(self.I.generic_name + ' : '+self.I.H.version_string.decode("utf-8")+' : '+self.I.timestamp)
else:
self.setWindowTitle(self.I.generic_name + ' : Uncalibrated')
for a in range(50):
for b in range(3):
item = QtGui.QTableWidgetItem();self.tbl.setItem(a,b,item); item.setText('')
self.group1size = 4
self.group2size = self.group1size+2
self.tests = [
#group 1 begins
['CAP330',330e-12,self.CAP],
['WG-A1',1e3,self.WGA1],
['SQR1-IN2',5e3,self.SQR1IN2],
['SEN',1e3,self.SEN],
#['CCS',0.9976e3,self.CCS],
#group 2 begins
['SQR2-IN2',10e3,self.SQR2IN2],
['CAP 1uF',1e-6,self.CAP1uF],
#group 0 begins . This is automatically executed on start up
['I2C scan',[],self.I2CScan],
['CAP_SOCK',42e-12,self.CAP_SOCK],
['A1[1x]','Calibration',self.A1CAL1],
['A1[32x]','Calibration',self.A1CAL],
['A2[1x]','Calibration',self.A2CAL1],
['A2[32x]','Calibration',self.A2CAL],
]
self.tbl.setVerticalHeaderLabels([row[0] for row in self.tests])
self.tbl.setHorizontalHeaderLabels(['Expected','read','','More'])
self.tbl.setColumnWidth(0, 90)
self.tbl.setColumnWidth(1, 120)
self.tbl.setColumnWidth(2, 100)
self.tbl.setColumnWidth(3, 80)
#Nominal values for calibration constants
self.CCS_SCALING=1
self.socket_cap = 0
self.RESISTANCE_SCALING=1
self.CR0=1;self.CR1=1;self.CR2=1;self.CR3=1
self.CRRC = 1.
self.G0Tests = {}
self.G1Tests = {}
self.G2Tests = {}
for n in range(len(self.tests)) :
self.tbl.item(n,0).setText(str(self.tests[n][1]))
################# make readback buttons ##############
fn = functools.partial(self.tests[n][2],n)
if n<self.group1size: self.G1Tests[self.tests[n][0]]=(fn)
elif n<self.group2size: self.G2Tests[self.tests[n][0]]=(fn)
else: self.G0Tests[self.tests[n][0]]=(fn)
item = QtGui.QPushButton();item.setText('test'); item.clicked.connect(fn)
self.tbl.setCellWidget(n, 2, item)
if len(self.tests[n])==4:
fn = functools.partial(self.tests[n][3],n)
item = QtGui.QPushButton();item.setText('Recal'); item.clicked.connect(fn)
self.tbl.setCellWidget(n, 3, item)
self.DACPLOT=pg.PlotWidget()
self.plot_area.addWidget(self.DACPLOT)
self.WPLOT=pg.PlotWidget()
self.plot_area.addWidget(self.WPLOT)
labelStyle = {'color': 'rgb(255,255,255)', 'font-size': '11pt'}
self.DACPLOT.setLabel('left','Error -->', units='V',**labelStyle)
self.DACPLOT.setLabel('bottom','Actual Voltage -->', units='V',**labelStyle)
self.DACPLOT.setYRange(-.02,.02)
self.WPLOT.setLabel('left','Voltage -->', units='V',**labelStyle)
self.WPLOT.setLabel('bottom','time -->', units='V',**labelStyle)
self.WPLOT.setYRange(-3.3,3.3)
self.DacCurves={}
#self.rebuildLegend(self.DACPLOT)
for a in self.I.DAC.CHANS:
self.DacCurves[a] = self.addCurve(self.DACPLOT,a)
self.p1 = self.addCurve(self.DACPLOT,'tmp')
#self.rebuildLegend(self.WPLOT)
self.WCurve = self.addCurve(self.WPLOT,'WG')
self.eval_init()
def addCurve(self,plot,name,col=(255,0,0)):
C=pg.PlotCurveItem(name = name,pen = col)
plot.addItem(C)
return C
def setSuccess(self,item,val):
if val : item.setBackground(QtCore.Qt.green)
else:item.setBackground(QtCore.Qt.red)
def A1CAL1(self,row):
item = self.tbl.item(row,1)
source = self.I.analogInputSources['A1']
item.setText('%.2f'%(source.calibrationCorrection[0]))
if not source.calibrationError:
self.setSuccess(item,1)
else:
self.setSuccess(item,0)
def A1CAL(self,row):
item = self.tbl.item(row,1)
source = self.I.analogInputSources['A1']
item.setText('%.2f'%(source.calibrationCorrection[7]))
if not source.calibrationError:
self.setSuccess(item,1)
else:
self.setSuccess(item,0)
def A2CAL1(self,row):
item = self.tbl.item(row,1)
source = self.I.analogInputSources['A2']
item.setText('%.2f'%(source.calibrationCorrection[0]))
if not source.calibrationError:
self.setSuccess(item,1)
else:
self.setSuccess(item,0)
def A2CAL(self,row):
item = self.tbl.item(row,1)
source = self.I.analogInputSources['A2']
item.setText('%.2f'%(source.calibrationCorrection[7]))
if not source.calibrationError:
self.setSuccess(item,1)
else:
self.setSuccess(item,0)
def I2CScan(self,row):
res = self.I.I2C.scan()
item = self.tbl.item(row,1)
item.setText(str(res))
self.setSuccess(item,1)
def SQR1IN2(self,row):
frq = float(self.tbl.item(row,0).text() )
self.I.set_sqr1(frq)
res = self.I.get_freq('IN2',0.2)
item = self.tbl.item(row,1)
try:
item.setText('%.3e'%res)
if abs(res-frq)<20: self.setSuccess(item,1)
else: self.setSuccess(item,0)
except Exception as e:
print (e)
item.setText('failed'); self.setSuccess(item,0)
def SQR2IN2(self,row):
frq = float(self.tbl.item(row,0).text() )
self.I.set_sqr2(frq)
res = self.I.get_freq('IN2',0.2)
item = self.tbl.item(row,1)
try:
item.setText('%.3e'%res)
if abs(res-frq)<20: self.setSuccess(item,1)
else: self.setSuccess(item,0)
except Exception as e:
print (e)
item.setText('failed'); self.setSuccess(item,0)
def eval_init(self):
for a in self.G0Tests: self.G0Tests[a]()
def eval1(self):
for a in self.G1Tests: self.G1Tests[a]()
def eval2(self):
for a in self.G2Tests: self.G2Tests[a]()
def SEN(self,row):
res = self.I.get_resistance()
item = self.tbl.item(row,1)
if res!=np.inf:
item.setText(pg.siFormat(res, precision=3, suffix=u"\u03A9", space=True, error=None, minVal=1e-25, allowUnicode=True))
actual = float(self.tbl.item(row,0).text() )
#print (res, actual)
if abs(res-actual)<self.RESISTANCE_ERROR :
self.setSuccess(item,1) #resistance within error margins
self.RESISTANCE_SCALING = actual/res
else :
self.setSuccess(item,0)
else:
item.setText('Open')
self.setSuccess(item,0)
def CCS(self,row):
self.I.set_state(CCS=1)
time.sleep(0.1)
V = self.I.get_voltage('CCS')
if V<2.5:
item = self.tbl.item(row,1)
res = V/1.1e-3 #assume 1.1mA
print (V,res)
item.setText(pg.siFormat(res, precision=3, suffix=u"\u03A9", space=True, error=None, minVal=1e-25, allowUnicode=True))
actual = float(self.tbl.item(row,0).text() )
if abs(res-actual)<self.CCS_ERROR :
self.setSuccess(item,1) #resistance within error margins
self.CCS_SCALING = actual/res
else :
self.setSuccess(item,0)
else:
item.setText('Open')
self.setSuccess(item,0)
def get_capacitance(self,CR): #read capacitance using various current ranges
GOOD_VOLTS=[2.5,2.8]
CT=10
iterations = 0
start_time=time.time()
try:
while (time.time()-start_time)<1:
if CT>65000:
QtGui.QMessageBox.about(self,'Cap error','CT too high')
return 0
V,C = self.I.__get_capacitance__(CR,0,CT)
if V>GOOD_VOLTS[0] and V<GOOD_VOLTS[1]:
print ('Done',V,C)
return C
elif CT>30000 and V<0.1:
QtGui.QMessageBox.about(self,'Cap Error','Capacitance too high for this method')
return 0
elif V<GOOD_VOLTS[0] and V>0.01 and CT<30000:
if GOOD_VOLTS[0]/V >1.1 and iterations<10:
CT=int(CT*GOOD_VOLTS[0]/V)
iterations+=1
elif iterations==10:
return 0
else:
print ('Done',V,C,CT)
return C
except Exception as ex:
QtGui.QMessageBox.about(self,'error',ex.message)
def CAP_SOCK(self,row):
#cap = self.I.get_capacitance()
V = self.I.__get_capacitance_voltage__(1,0, 180)
Charge_Current = self.I.currents[1]
cap = (Charge_Current*180*1e-6/V )/self.I.currentScalers[1]
self.I.SOCKET_CAPACITANCE = cap
#print (V,cap)
item = self.tbl.item(row,1)
item.setText(pg.siFormat(cap, precision=3, suffix='F', space=True, minVal=1e-25, allowUnicode=True))
if abs(cap-float(self.tbl.item(row,0).text() ))<self.CAPACITANCE_ERROR :
self.setSuccess(item,1) #capacitance within error margins
self.socket_cap = cap
else : self.setSuccess(item,0)
def CAP(self,row):
actual = float(self.tbl.item(row,0).text() )
cap1 = self.get_capacitance(1)
self.I.__charge_cap__(0,50000)
cap2 = self.get_capacitance(2)
if cap1 and cap2:
item = self.tbl.item(row,1)
item.setText('%s,%s'%(pg.siFormat(cap1, precision=4, suffix='F', space=True),pg.siFormat(cap2, precision=3, suffix='F', space=True)))
self.CR1 = cap1/actual
self.CR2 = cap2/actual
else:
QtGui.QMessageBox.about(self,'Cap error',"Capacitance invalid. \nIf a %sF capacitor is plugged correctly into CAP socket, this may be an issue."%actual)
if abs(cap1-actual)<self.CAPACITANCE_ERROR : self.setSuccess(item,1) #capacitance within error margins
else :
self.setSuccess(item,0)
QtGui.QMessageBox.about(self,'Cap error',"Capacitance invalid. \nIf a %sF capacitor is plugged correctly into CAP socket, this may be an issue."%actual)
def CAP1uF(self,row):
actual = float(self.tbl.item(row,0).text() )
cap = self.I.capacitance_via_RC_discharge()
if cap:
item = self.tbl.item(row,1)
item.setText('%s'%(pg.siFormat(cap, precision=4, suffix='F', space=True)))
self.CRRC = actual/cap
else:
QtGui.QMessageBox.about(self,'Cap error',"Capacitance invalid. \nIf a 1uF capacitor is plugged correctly into CAP socket, this may be an issue.")
if abs(cap-actual)<0.1e-6: self.setSuccess(item,1) #capacitance within error margins
else : self.setSuccess(item,0)
def __PVA__(self,DAC,ADC,row,rng):
actuals=[];read=[]
self.I.DAC.setVoltage(DAC,rng[0])
time.sleep(.1)
for a in np.linspace(*rng):
actuals.append( self.I.DAC.setVoltage(DAC,a) )
time.sleep(0.02)
read.append (self.I.get_average_voltage(ADC,samples=5) )
read = np.array(read)
actuals = np.array(actuals)
self.DacCurves[DAC].setData(actuals,read-actuals)
self.tbl.item(row,0).setText(string.join(['%.3f'%a for a in actuals],' '))
self.tbl.item(row,1).setText(string.join(['%.3f'%a for a in read-actuals],' '))
if np.any(abs(read-actuals)>20e-3):self.setSuccess(self.tbl.item(row,1),0)
else: self.setSuccess(self.tbl.item(row,1),1)
def PV1A1(self,row):
self.__PVA__('PV1','A1',row,[-4,4,20])
def PV2A2(self,row):
self.__PVA__('PV2','A2',row,[-2.5,2.5,20])
def __WA__(self,ADC,row):
self.I.set_wave(1e3) #1KHz test
x,y = self.I.capture1(ADC,1000,5)#get about five cycles
self.WCurve.setData(x,y)
self.tbl.item(row,0).setText('1 KHz')
try:
amp,frq,ph,off = self.sineFit(x,y)
self.tbl.item(row,1).setText(pg.siFormat(frq, precision=3, suffix='Hz', space=True)+','+pg.siFormat(amp, precision=3, suffix='V', space=True))
if abs(frq-1e3)>2:
self.setSuccess(self.tbl.item(row,1),0)
#print(frq)
else: self.setSuccess(self.tbl.item(row,1),1)
except:
self.tbl.item(row,1).setText('Check Connections')
self.setSuccess(self.tbl.item(row,1),0)
def WGA1(self,row):
self.__WA__('A1',row)
def correct(self):
self.scalers[0] = self.socket_cap
self.scalers[1] *= self.CR0;self.scalers[2]*=self.CR1;self.scalers[3]*=self.CR2;self.scalers[4]*=self.CR3;
self.scalers[5] *= self.CCS_SCALING #slope
self.scalers[6] *= self.RESISTANCE_SCALING
self.scalers[7] *= self.CRRC
#QtGui.QMessageBox.about(self,'info','loading %s\nPCS SCALING:%s\nCR0 : %.3f\nCR1 : %.3f\nCR2 : %.3f\nCR3 : %.3f\nRES : %.3f\nCap RC : %.3f\n'%(self.scalers,self.CCS_SCALING,self.CR0,self.CR1,self.CR2,self.CR3,self.RESISTANCE_SCALING,self.CRRC))
cap_and_pcs=self.I.write_bulk_flash(self.I.CAP_AND_PCS,self.I.__stoa__('READY'+struct.pack('8f',*self.scalers))) #READY+calibration_string
self.I.SOCKET_CAPACITANCE = self.scalers[0]
self.I.__calibrate_ctmu__(self.scalers[1:5])
self.I.resistanceScaling = self.scalers[6]
self.I.CAP_RC_SCALING = self.scalers[7]
#self.G2Tests['SEN']()
#self.G2Tests['PCS-CH3']()
def __del__(self):
print ('bye')
def closeEvent(self, evnt):
evnt.ignore()
self.askBeforeQuit()
def askBeforeQuit(self):
global app
reply = QtGui.QMessageBox.question(self, 'Warning', 'Save and Quit?\n\nloading %s\nPCS SCALING:%s\nCR0 : %.3f\nCR1 : %.3f\nCR2 : %.3f\nCR3 : %.3f\nRES : %.3f\nCap RC : %.3f\n'%(self.scalers,self.CCS_SCALING,self.CR0,self.CR1,self.CR2,self.CR3,self.RESISTANCE_SCALING,self.CRRC), QtGui.QMessageBox.No, QtGui.QMessageBox.Yes)
if reply == QtGui.QMessageBox.Yes:
self.running =False
self.finished=True
self.save()
app.quit()
else:
self.running =False
self.finished=True
print ('Did not save/upload calibration')
app.quit()
def save(self):
self.correct()
p = QtGui.QPixmap.grabWindow(self.tab1.winId())
from os.path import expanduser
home = expanduser("~")
path = os.path.join(home,'test '+self.I.timestamp+'.png')
p.save(path)
#QtGui.QMessageBox.about(self,'saved to ',path)
print ('saved to ',path)
def sineFunc(self,x, a1, a2, a3,a4):
return a4 + a1*np.sin(abs(a2*(2*np.pi))*x + a3)
def sineFit(self,xReal,yReal,**kwargs):
N=len(xReal)
xReal*=1e3 #convert mS to uS
OFFSET = (yReal.max()+yReal.min())/2.
yhat = self.fftpack.rfft(yReal-OFFSET)
idx = (yhat**2).argmax()
freqs = self.fftpack.rfftfreq(N, d = (xReal[1]-xReal[0])/(2*np.pi))
frequency = kwargs.get('freq',freqs[idx])
frequency/=(2*np.pi) #Convert angular velocity to freq
amplitude = kwargs.get('amp',(yReal.max()-yReal.min())/2.0)
phase=kwargs.get('phase',0) #.5*np.pi*((yReal[0]-offset)/amplitude)
guess = [amplitude, frequency, phase,0]
try:
(amplitude, frequency, phase,offset), pcov = self.optimize.curve_fit(self.sineFunc, xReal, yReal-OFFSET, guess)
offset+=OFFSET
ph = ((phase)*180/(np.pi))
if(frequency<0):
#print ('negative frq')
return False
if(amplitude<0):
ph-=180
if(ph<0):ph = (ph+720)%360
freq=1e6*abs(frequency)
amp=abs(amplitude)
pcov[0]*=1e6
#print (pcov)
if(abs(pcov[-1][0])>1e-6):
False
return [amp, freq, offset,ph]
except:
return False
if __name__ == "__main__":
import expeyes.eyes17 as eyes
app = QtGui.QApplication(sys.argv)
myapp = AppWindow(I=eyes.open(verbose=True))
myapp.show()
sys.exit(app.exec_())
|
|
"""
(C) 2014-2019 Roman Sirokov and contributors
Licensed under BSD license
http://github.com/r0x0r/pywebview/
"""
import json
import logging
import webbrowser
import ctypes
from threading import Event, Semaphore
import Foundation
import AppKit
import WebKit
from PyObjCTools import AppHelper
from objc import _objc, nil, super, registerMetaDataForSelector
from webview import _debug, _user_agent, OPEN_DIALOG, FOLDER_DIALOG, SAVE_DIALOG, parse_file_type, windows
from webview.util import parse_api_js, default_html, js_bridge_call
from webview.js.css import disable_text_select
from webview.screen import Screen
from webview.window import FixPoint
settings = {}
# This lines allow to load non-HTTPS resources, like a local app as: http://127.0.0.1:5000
bundle = AppKit.NSBundle.mainBundle()
info = bundle.localizedInfoDictionary() or bundle.infoDictionary()
info['NSAppTransportSecurity'] = {'NSAllowsArbitraryLoads': Foundation.YES}
info['NSRequiresAquaSystemAppearance'] = Foundation.NO # Enable dark mode support for Mojave
# Dynamic library required by BrowserView.pyobjc_method_signature()
_objc_so = ctypes.cdll.LoadLibrary(_objc.__file__)
# Bridgesupport metadata for [WKWebView evaluateJavaScript:completionHandler:]
_eval_js_metadata = { 'arguments': { 3: { 'callable': { 'retval': { 'type': b'v' },
'arguments': { 0: { 'type': b'^v' }, 1: { 'type': b'@' }, 2: { 'type': b'@' }}}}}}
# Fallbacks, in case these constants are not wrapped by PyObjC
try:
NSFullSizeContentViewWindowMask = AppKit.NSFullSizeContentViewWindowMask
except AttributeError:
NSFullSizeContentViewWindowMask = 1 << 15
try:
NSWindowTitleHidden = AppKit.NSWindowTitleHidden
except AttributeError:
NSWindowTitleHidden = 1
logger = logging.getLogger('pywebview')
logger.debug('Using Cocoa')
renderer = 'wkwebview'
class BrowserView:
instances = {}
app = AppKit.NSApplication.sharedApplication()
cascade_loc = Foundation.NSMakePoint(100.0, 0.0)
class AppDelegate(AppKit.NSObject):
def applicationShouldTerminate_(self, app):
for i in BrowserView.instances.values():
i.closing.set()
return Foundation.YES
class WindowDelegate(AppKit.NSObject):
def windowShouldClose_(self, window):
i = BrowserView.get_instance('window', window)
return BrowserView.should_close(i)
def windowWillClose_(self, notification):
# Delete the closed instance from the dict
i = BrowserView.get_instance('window', notification.object())
del BrowserView.instances[i.uid]
if i.pywebview_window in windows:
windows.remove(i.pywebview_window)
i.closed.set()
if BrowserView.instances == {}:
BrowserView.app.stop_(self)
def windowDidResize_(self, notification):
i = BrowserView.get_instance('window', notification.object())
size = i.window.frame().size
i.pywebview_window.events.resized.set(size.width, size.height)
def windowDidMiniaturize_(self, notification):
i = BrowserView.get_instance('window', notification.object())
i.pywebview_window.events.minimized.set()
def windowDidDeminiaturize_(self, notification):
i = BrowserView.get_instance('window', notification.object())
i.pywebview_window.events.restored.set()
def windowDidEnterFullScreen_(self, notification):
i = BrowserView.get_instance('window', notification.object())
i.pywebview_window.events.maximized.set()
def windowDidExitFullScreen_(self, notification):
i = BrowserView.get_instance('window', notification.object())
i.pywebview_window.events.restored.set()
class JSBridge(AppKit.NSObject):
def initWithObject_(self, window):
super(BrowserView.JSBridge, self).init()
self.window = window
return self
def userContentController_didReceiveScriptMessage_(self, controller, message):
func_name, param, value_id = json.loads(message.body())
if param is WebKit.WebUndefined.undefined():
param = None
js_bridge_call(self.window, func_name, param, value_id)
class BrowserDelegate(AppKit.NSObject):
# Display a JavaScript alert panel containing the specified message
def webView_runJavaScriptAlertPanelWithMessage_initiatedByFrame_completionHandler_(self, webview, message, frame, handler):
AppKit.NSRunningApplication.currentApplication().activateWithOptions_(AppKit.NSApplicationActivateIgnoringOtherApps)
alert = AppKit.NSAlert.alloc().init()
alert.setInformativeText_(message)
alert.runModal()
if not handler.__block_signature__:
handler.__block_signature__ = BrowserView.pyobjc_method_signature(b'v@')
handler()
# Display a JavaScript confirm panel containing the specified message
def webView_runJavaScriptConfirmPanelWithMessage_initiatedByFrame_completionHandler_(self, webview, message, frame, handler):
i = BrowserView.get_instance('webkit', webview)
ok = i.localization['global.ok']
cancel = i.localization['global.cancel']
if not handler.__block_signature__:
handler.__block_signature__ = BrowserView.pyobjc_method_signature(b'v@B')
if BrowserView.display_confirmation_dialog(ok, cancel, message):
handler(Foundation.YES)
else:
handler(Foundation.NO)
# Display an open panel for <input type="file"> element
def webView_runOpenPanelWithParameters_initiatedByFrame_completionHandler_(self, webview, param, frame, handler):
i = list(BrowserView.instances.values())[0]
files = i.create_file_dialog(OPEN_DIALOG, '', param.allowsMultipleSelection(), '', [], main_thread=True)
if not handler.__block_signature__:
handler.__block_signature__ = BrowserView.pyobjc_method_signature(b'v@@')
if files:
urls = [Foundation.NSURL.fileURLWithPath_(BrowserView.quote(i)) for i in files]
handler(urls)
else:
handler(nil)
# Open target="_blank" links in external browser
def webView_createWebViewWithConfiguration_forNavigationAction_windowFeatures_(self, webview, config, action, features):
if action.navigationType() == getattr(WebKit, 'WKNavigationTypeLinkActivated', 0):
webbrowser.open(action.request().URL().absoluteString(), 2, True)
return nil
# WKNavigationDelegate method, invoked when a navigation decision needs to be made
def webView_decidePolicyForNavigationAction_decisionHandler_(self, webview, action, handler):
# The event that might have triggered the navigation
event = AppKit.NSApp.currentEvent()
if not handler.__block_signature__:
handler.__block_signature__ = BrowserView.pyobjc_method_signature(b'v@i')
""" Disable back navigation on pressing the Delete key: """
# Check if the requested navigation action is Back/Forward
if action.navigationType() == getattr(WebKit, 'WKNavigationTypeBackForward', 2):
# Check if the event is a Delete key press (keyCode = 51)
if event and event.type() == AppKit.NSKeyDown and event.keyCode() == 51:
# If so, ignore the request and return
handler(getattr(WebKit, 'WKNavigationActionPolicyCancel', 0))
return
# Normal navigation, allow
handler(getattr(WebKit, 'WKNavigationActionPolicyAllow', 1))
# Show the webview when it finishes loading
def webView_didFinishNavigation_(self, webview, nav):
# Add the webview to the window if it's not yet the contentView
i = BrowserView.get_instance('webkit', webview)
if i:
if not webview.window():
i.window.setContentView_(webview)
i.window.makeFirstResponder_(webview)
script = parse_api_js(i.js_bridge.window, 'cocoa')
i.webkit.evaluateJavaScript_completionHandler_(script, lambda a,b: None)
if not i.text_select:
i.webkit.evaluateJavaScript_completionHandler_(disable_text_select, lambda a,b: None)
print_hook = 'window.print = function() { window.webkit.messageHandlers.browserDelegate.postMessage("print") };'
i.webkit.evaluateJavaScript_completionHandler_(print_hook, lambda a,b: None)
i.loaded.set()
# Handle JavaScript window.print()
def userContentController_didReceiveScriptMessage_(self, controller, message):
if message.body() == 'print':
i = BrowserView.get_instance('_browserDelegate', self)
BrowserView.print_webview(i.webkit)
class FileFilterChooser(AppKit.NSPopUpButton):
def initWithFilter_(self, file_filter):
super(BrowserView.FileFilterChooser, self).init()
self.filter = file_filter
self.addItemsWithTitles_([i[0] for i in self.filter])
self.setAction_('onChange:')
self.setTarget_(self)
return self
def onChange_(self, sender):
option = sender.indexOfSelectedItem()
self.window().setAllowedFileTypes_(self.filter[option][1])
class WebKitHost(WebKit.WKWebView):
def mouseDown_(self, event):
i = BrowserView.get_instance('webkit', self)
window = self.window()
if i.frameless and i.easy_drag:
windowFrame = window.frame()
if windowFrame is None:
raise RuntimeError('Failed to obtain screen')
self.initialLocation = window.convertBaseToScreen_(event.locationInWindow())
self.initialLocation.x -= windowFrame.origin.x
self.initialLocation.y -= windowFrame.origin.y
super(BrowserView.WebKitHost, self).mouseDown_(event)
def mouseDragged_(self, event):
i = BrowserView.get_instance('webkit', self)
window = self.window()
if i.frameless and i.easy_drag:
screenFrame = AppKit.NSScreen.mainScreen().frame()
if screenFrame is None:
raise RuntimeError('Failed to obtain screen')
windowFrame = window.frame()
if windowFrame is None:
raise RuntimeError('Failed to obtain frame')
currentLocation = window.convertBaseToScreen_(window.mouseLocationOutsideOfEventStream())
newOrigin = AppKit.NSMakePoint((currentLocation.x - self.initialLocation.x),
(currentLocation.y - self.initialLocation.y))
if (newOrigin.y + windowFrame.size.height) > \
(screenFrame.origin.y + screenFrame.size.height):
newOrigin.y = screenFrame.origin.y + \
(screenFrame.size.height + windowFrame.size.height)
window.setFrameOrigin_(newOrigin)
if event.modifierFlags() & getattr(AppKit, 'NSEventModifierFlagControl', 1 << 18):
i = BrowserView.get_instance('webkit', self)
if not _debug['mode']:
return
super(BrowserView.WebKitHost, self).mouseDown_(event)
def rightMouseDown_(self, event):
i = BrowserView.get_instance('webkit', self)
if _debug['mode']:
super(BrowserView.WebKitHost, self).rightMouseDown_(event)
def performKeyEquivalent_(self, theEvent):
"""
Handle common hotkey shortcuts as copy/cut/paste/undo/select all/quit
:param theEvent:
:return:
"""
# Fix arrow keys not responding in text inputs
keyCode_ = theEvent.keyCode()
UP, DOWN, LEFT, RIGHT, DELETE, PG_DWN, PG_UP = 126, 125, 123, 124, 117, 121, 116
if keyCode_ in (UP, DOWN, LEFT, RIGHT, DELETE, PG_DWN, PG_UP):
return False
if theEvent.type() == AppKit.NSKeyDown and theEvent.modifierFlags() & AppKit.NSCommandKeyMask:
responder = self.window().firstResponder()
keyCode = theEvent.keyCode()
if responder != None:
handled = False
range_ = responder.selectedRange()
hasSelectedText = len(range_) > 0
if keyCode == 7 and hasSelectedText : #cut
responder.cut_(self)
handled = True
elif keyCode == 8 and hasSelectedText: #copy
responder.copy_(self)
handled = True
elif keyCode == 9: # paste
responder.paste_(self)
handled = True
elif keyCode == 0: # select all
responder.selectAll_(self)
handled = True
elif keyCode == 6: # undo
if responder.undoManager().canUndo():
responder.undoManager().undo()
handled = True
elif keyCode == 12: # quit
BrowserView.app.stop_(self)
elif keyCode == 13: # w (close)
self.window().performClose_(theEvent)
handled = True
return handled
return True
def __init__(self, window):
BrowserView.instances[window.uid] = self
self.uid = window.uid
self.pywebview_window = window
self.js_bridge = None
self._file_name = None
self._file_name_semaphore = Semaphore(0)
self._current_url_semaphore = Semaphore(0)
self.closed = window.events.closed
self.closing = window.events.closing
self.shown = window.events.shown
self.loaded = window.events.loaded
self.confirm_close = window.confirm_close
self.title = window.title
self.text_select = window.text_select
self.is_fullscreen = False
self.hidden = window.hidden
self.minimized = window.minimized
self.localization = window.localization
rect = AppKit.NSMakeRect(0.0, 0.0, window.initial_width, window.initial_height)
window_mask = AppKit.NSTitledWindowMask | AppKit.NSClosableWindowMask | AppKit.NSMiniaturizableWindowMask
if window.resizable:
window_mask = window_mask | AppKit.NSResizableWindowMask
if window.frameless:
window_mask = window_mask | NSFullSizeContentViewWindowMask | AppKit.NSTexturedBackgroundWindowMask
# The allocated resources are retained because we would explicitly delete
# this instance when its window is closed
self.window = AppKit.NSWindow.alloc().\
initWithContentRect_styleMask_backing_defer_(rect, window_mask, AppKit.NSBackingStoreBuffered, False).retain()
self.window.setTitle_(window.title)
self.window.setMinSize_(AppKit.NSSize(window.min_size[0], window.min_size[1]))
self.window.setAnimationBehavior_(AppKit.NSWindowAnimationBehaviorDocumentWindow)
BrowserView.cascade_loc = self.window.cascadeTopLeftFromPoint_(BrowserView.cascade_loc)
frame = self.window.frame()
frame.size.width = window.initial_width
frame.size.height = window.initial_height
self.window.setFrame_display_(frame, True)
self.webkit = BrowserView.WebKitHost.alloc().initWithFrame_(rect).retain()
user_agent = settings.get('user_agent') or _user_agent
if user_agent:
self.webkit.setCustomUserAgent_(user_agent)
if window.initial_x is not None and window.initial_y is not None:
self.move(window.initial_x, window.initial_y)
else:
self.window.center()
if window.transparent:
self.window.setOpaque_(False)
self.window.setHasShadow_(False)
self.window.setBackgroundColor_(BrowserView.nscolor_from_hex(window.background_color, 0))
self.webkit.setValue_forKey_(True, 'drawsTransparentBackground')
else:
self.window.setBackgroundColor_(BrowserView.nscolor_from_hex(window.background_color))
self._browserDelegate = BrowserView.BrowserDelegate.alloc().init().retain()
self._windowDelegate = BrowserView.WindowDelegate.alloc().init().retain()
self._appDelegate = BrowserView.AppDelegate.alloc().init().retain()
BrowserView.app.setDelegate_(self._appDelegate)
self.webkit.setUIDelegate_(self._browserDelegate)
self.webkit.setNavigationDelegate_(self._browserDelegate)
self.window.setDelegate_(self._windowDelegate)
self.frameless = window.frameless
self.easy_drag = window.easy_drag
if window.frameless:
# Make content full size and titlebar transparent
self.window.setTitlebarAppearsTransparent_(True)
self.window.setTitleVisibility_(NSWindowTitleHidden)
self.window.standardWindowButton_(AppKit.NSWindowCloseButton).setHidden_(True)
self.window.standardWindowButton_(AppKit.NSWindowMiniaturizeButton).setHidden_(True)
self.window.standardWindowButton_(AppKit.NSWindowZoomButton).setHidden_(True)
else:
# Set the titlebar color (so that it does not change with the window color)
self.window.contentView().superview().subviews().lastObject().setBackgroundColor_(AppKit.NSColor.windowBackgroundColor())
if window.on_top:
self.window.setLevel_(AppKit.NSStatusWindowLevel)
try:
self.webkit.evaluateJavaScript_completionHandler_('', lambda a, b: None)
except TypeError:
registerMetaDataForSelector(b'WKWebView', b'evaluateJavaScript:completionHandler:', _eval_js_metadata)
config = self.webkit.configuration()
config.userContentController().addScriptMessageHandler_name_(self._browserDelegate, 'browserDelegate')
try:
config.preferences().setValue_forKey_(Foundation.NO, 'backspaceKeyNavigationEnabled')
except:
pass
if _debug['mode']:
config.preferences().setValue_forKey_(Foundation.YES, 'developerExtrasEnabled')
self.js_bridge = BrowserView.JSBridge.alloc().initWithObject_(window)
config.userContentController().addScriptMessageHandler_name_(self.js_bridge, 'jsBridge')
if window.real_url:
self.url = window.real_url
self.load_url(window.real_url)
elif window.html:
self.load_html(window.html, '')
else:
self.load_html(default_html, '')
if window.fullscreen:
self.toggle_fullscreen()
self.shown.set()
def first_show(self):
if not self.hidden:
self.window.makeKeyAndOrderFront_(self.window)
else:
self.hidden = False
if self.minimized:
self.minimize()
if not BrowserView.app.isRunning():
# Add the default Cocoa application menu
self._add_app_menu()
self._add_view_menu()
BrowserView.app.activateIgnoringOtherApps_(Foundation.YES)
AppHelper.installMachInterrupt()
BrowserView.app.run()
def show(self):
def _show():
self.window.makeKeyAndOrderFront_(self.window)
AppHelper.callAfter(_show)
def hide(self):
def _hide():
self.window.orderOut_(self.window)
AppHelper.callAfter(_hide)
def destroy(self):
AppHelper.callAfter(self.window.close)
def set_title(self, title):
def _set_title():
self.window.setTitle_(title)
AppHelper.callAfter(_set_title)
def toggle_fullscreen(self):
def toggle():
if self.is_fullscreen:
window_behaviour = 1 << 2 # NSWindowCollectionBehaviorManaged
else:
window_behaviour = 1 << 7 # NSWindowCollectionBehaviorFullScreenPrimary
self.window.setCollectionBehavior_(window_behaviour)
self.window.toggleFullScreen_(None)
AppHelper.callAfter(toggle)
self.is_fullscreen = not self.is_fullscreen
def resize(self, width, height, fix_point):
def _resize():
frame = self.window.frame()
if fix_point & FixPoint.EAST:
# Keep the right of the window in the same place
frame.origin.x += frame.size.width - width
if fix_point & FixPoint.NORTH:
# Keep the top of the window in the same place
frame.origin.y += frame.size.height - height
frame.size.width = width
frame.size.height = height
self.window.setFrame_display_(frame, True)
AppHelper.callAfter(_resize)
def minimize(self):
self.window.miniaturize_(self)
def restore(self):
self.window.deminiaturize_(self)
def move(self, x, y):
screen = self.window.screen().frame()
flipped_y = screen.size.height - y
self.window.setFrameTopLeftPoint_(AppKit.NSPoint(x, flipped_y))
def get_current_url(self):
def get():
self._current_url = str(self.webkit.URL())
self._current_url_semaphore.release()
AppHelper.callAfter(get)
self._current_url_semaphore.acquire()
return None if self._current_url == 'about:blank' else self._current_url
def load_url(self, url):
def load(url):
page_url = Foundation.NSURL.URLWithString_(BrowserView.quote(url))
req = Foundation.NSURLRequest.requestWithURL_(page_url)
self.webkit.loadRequest_(req)
self.loaded.clear()
self.url = url
AppHelper.callAfter(load, url)
def load_html(self, content, base_uri):
def load(content, url):
url = Foundation.NSURL.URLWithString_(BrowserView.quote(url))
self.webkit.loadHTMLString_baseURL_(content, url)
self.loaded.clear()
AppHelper.callAfter(load, content, base_uri)
def evaluate_js(self, script):
def eval():
self.webkit.evaluateJavaScript_completionHandler_(script, handler)
def handler(result, error):
JSResult.result = None if result is None else json.loads(result)
JSResult.result_semaphore.release()
class JSResult:
result = None
result_semaphore = Semaphore(0)
self.loaded.wait()
AppHelper.callAfter(eval)
JSResult.result_semaphore.acquire()
return JSResult.result
def create_file_dialog(self, dialog_type, directory, allow_multiple, save_filename, file_filter, main_thread=False):
def create_dialog(*args):
dialog_type = args[0]
if dialog_type == SAVE_DIALOG:
save_filename = args[2]
save_dlg = AppKit.NSSavePanel.savePanel()
save_dlg.setTitle_(self.localization['global.saveFile'])
if directory: # set initial directory
save_dlg.setDirectoryURL_(Foundation.NSURL.fileURLWithPath_(directory))
if save_filename: # set file name
save_dlg.setNameFieldStringValue_(save_filename)
if save_dlg.runModal() == AppKit.NSFileHandlingPanelOKButton:
self._file_name = save_dlg.filename()
else:
self._file_name = None
else:
allow_multiple = args[1]
open_dlg = AppKit.NSOpenPanel.openPanel()
# Enable the selection of files in the dialog.
open_dlg.setCanChooseFiles_(dialog_type != FOLDER_DIALOG)
# Enable the selection of directories in the dialog.
open_dlg.setCanChooseDirectories_(dialog_type == FOLDER_DIALOG)
# Enable / disable multiple selection
open_dlg.setAllowsMultipleSelection_(allow_multiple)
# Set allowed file extensions
if file_filter:
open_dlg.setAllowedFileTypes_(file_filter[0][1])
# Add a menu to choose between multiple file filters
if len(file_filter) > 1:
filter_chooser = BrowserView.FileFilterChooser.alloc().initWithFilter_(file_filter)
open_dlg.setAccessoryView_(filter_chooser)
open_dlg.setAccessoryViewDisclosed_(True)
if directory: # set initial directory
open_dlg.setDirectoryURL_(Foundation.NSURL.fileURLWithPath_(directory))
if open_dlg.runModal() == AppKit.NSFileHandlingPanelOKButton:
files = open_dlg.filenames()
self._file_name = tuple(files)
else:
self._file_name = None
if not main_thread:
self._file_name_semaphore.release()
if main_thread:
create_dialog(dialog_type, allow_multiple, save_filename)
else:
AppHelper.callAfter(create_dialog, dialog_type, allow_multiple, save_filename)
self._file_name_semaphore.acquire()
return self._file_name
def _add_app_menu(self):
"""
Create a default Cocoa menu that shows 'Services', 'Hide',
'Hide Others', 'Show All', and 'Quit'. Will append the application name
to some menu items if it's available.
"""
# Set the main menu for the application
mainMenu = AppKit.NSMenu.alloc().init()
self.app.setMainMenu_(mainMenu)
# Create an application menu and make it a submenu of the main menu
mainAppMenuItem = AppKit.NSMenuItem.alloc().init()
mainMenu.addItem_(mainAppMenuItem)
appMenu = AppKit.NSMenu.alloc().init()
mainAppMenuItem.setSubmenu_(appMenu)
appMenu.addItemWithTitle_action_keyEquivalent_(self._append_app_name(self.localization["cocoa.menu.about"]), "orderFrontStandardAboutPanel:", "")
appMenu.addItem_(AppKit.NSMenuItem.separatorItem())
# Set the 'Services' menu for the app and create an app menu item
appServicesMenu = AppKit.NSMenu.alloc().init()
self.app.setServicesMenu_(appServicesMenu)
servicesMenuItem = appMenu.addItemWithTitle_action_keyEquivalent_(self.localization["cocoa.menu.services"], nil, "")
servicesMenuItem.setSubmenu_(appServicesMenu)
appMenu.addItem_(AppKit.NSMenuItem.separatorItem())
# Append the 'Hide', 'Hide Others', and 'Show All' menu items
appMenu.addItemWithTitle_action_keyEquivalent_(self._append_app_name(self.localization["cocoa.menu.hide"]), "hide:", "h")
hideOthersMenuItem = appMenu.addItemWithTitle_action_keyEquivalent_(self.localization["cocoa.menu.hideOthers"], "hideOtherApplications:", "h")
hideOthersMenuItem.setKeyEquivalentModifierMask_(AppKit.NSAlternateKeyMask | AppKit.NSCommandKeyMask)
appMenu.addItemWithTitle_action_keyEquivalent_(self.localization["cocoa.menu.showAll"], "unhideAllApplications:", "")
appMenu.addItem_(AppKit.NSMenuItem.separatorItem())
# Append a 'Quit' menu item
appMenu.addItemWithTitle_action_keyEquivalent_(self._append_app_name(self.localization["cocoa.menu.quit"]), "terminate:", "q")
def _add_view_menu(self):
"""
Create a default View menu that shows 'Enter Full Screen'.
"""
mainMenu = self.app.mainMenu()
# Create an View menu and make it a submenu of the main menu
viewMenu = AppKit.NSMenu.alloc().init()
viewMenu.setTitle_(self.localization["cocoa.menu.view"])
viewMenuItem = AppKit.NSMenuItem.alloc().init()
viewMenuItem.setSubmenu_(viewMenu)
mainMenu.addItem_(viewMenuItem)
# TODO: localization of the Enter fullscreen string has no effect
fullScreenMenuItem = viewMenu.addItemWithTitle_action_keyEquivalent_(self.localization["cocoa.menu.fullscreen"], "toggleFullScreen:", "f")
fullScreenMenuItem.setKeyEquivalentModifierMask_(AppKit.NSControlKeyMask | AppKit.NSCommandKeyMask)
def _append_app_name(self, val):
"""
Append the application name to a string if it's available. If not, the
string is returned unchanged.
:param str val: The string to append to
:return: String with app name appended, or unchanged string
:rtype: str
"""
if "CFBundleName" in info:
val += " {}".format(info["CFBundleName"])
return val
@staticmethod
def nscolor_from_hex(hex_string, alpha=1.0):
"""
Convert given hex color to NSColor.
:hex_string: Hex code of the color as #RGB or #RRGGBB
"""
hex_string = hex_string[1:] # Remove leading hash
if len(hex_string) == 3:
hex_string = ''.join([c*2 for c in hex_string]) # 3-digit to 6-digit
hex_int = int(hex_string, 16)
rgb = (
(hex_int >> 16) & 0xff, # Red byte
(hex_int >> 8) & 0xff, # Blue byte
(hex_int) & 0xff # Green byte
)
rgb = [i / 255.0 for i in rgb] # Normalize to range(0.0, 1.0)
return AppKit.NSColor.colorWithSRGBRed_green_blue_alpha_(rgb[0], rgb[1], rgb[2], alpha)
@staticmethod
def get_instance(attr, value):
"""
Return a BrowserView instance by the :value of its given :attribute,
and None if no match is found.
"""
for i in list(BrowserView.instances.values()):
try:
if getattr(i, attr) == value:
return i
except AttributeError:
break
return None
@staticmethod
def display_confirmation_dialog(first_button, second_button, message):
AppKit.NSApplication.sharedApplication()
AppKit.NSRunningApplication.currentApplication().activateWithOptions_(AppKit.NSApplicationActivateIgnoringOtherApps)
alert = AppKit.NSAlert.alloc().init()
alert.addButtonWithTitle_(first_button)
alert.addButtonWithTitle_(second_button)
alert.setMessageText_(message)
alert.setAlertStyle_(AppKit.NSWarningAlertStyle)
if alert.runModal() == AppKit.NSAlertFirstButtonReturn:
return True
else:
return False
@staticmethod
def should_close(window):
quit = window.localization['global.quit']
cancel = window.localization['global.cancel']
msg = window.localization['global.quitConfirmation']
if not window.confirm_close or BrowserView.display_confirmation_dialog(quit, cancel, msg):
should_cancel = window.closing.set()
if should_cancel:
return Foundation.NO
else:
return Foundation.YES
else:
return Foundation.NO
@staticmethod
def print_webview(webview):
info = AppKit.NSPrintInfo.sharedPrintInfo().copy()
# default print settings used by Safari
info.setHorizontalPagination_(AppKit.NSFitPagination)
info.setHorizontallyCentered_(Foundation.NO)
info.setVerticallyCentered_(Foundation.NO)
imageableBounds = info.imageablePageBounds()
paperSize = info.paperSize()
if (Foundation.NSWidth(imageableBounds) > paperSize.width):
imageableBounds.origin.x = 0
imageableBounds.size.width = paperSize.width
if (Foundation.NSHeight(imageableBounds) > paperSize.height):
imageableBounds.origin.y = 0
imageableBounds.size.height = paperSize.height
info.setBottomMargin_(Foundation.NSMinY(imageableBounds))
info.setTopMargin_(paperSize.height - Foundation.NSMinY(imageableBounds) - Foundation.NSHeight(imageableBounds))
info.setLeftMargin_(Foundation.NSMinX(imageableBounds))
info.setRightMargin_(paperSize.width - Foundation.NSMinX(imageableBounds) - Foundation.NSWidth(imageableBounds))
# show the print panel
print_op = webview._printOperationWithPrintInfo_(info)
print_op.runOperationModalForWindow_delegate_didRunSelector_contextInfo_(webview.window(), nil, nil, nil)
@staticmethod
def pyobjc_method_signature(signature_str):
"""
Return a PyObjCMethodSignature object for given signature string.
:param signature_str: A byte string containing the type encoding for the method signature
:return: A method signature object, assignable to attributes like __block_signature__
:rtype: <type objc._method_signature>
"""
_objc_so.PyObjCMethodSignature_WithMetaData.restype = ctypes.py_object
return _objc_so.PyObjCMethodSignature_WithMetaData(ctypes.create_string_buffer(signature_str), None, False)
@staticmethod
def quote(string):
return string.replace(' ', '%20')
def create_window(window):
global _debug
def create():
browser = BrowserView(window)
browser.first_show()
if window.uid == 'master':
create()
else:
AppHelper.callAfter(create)
def set_title(title, uid):
BrowserView.instances[uid].set_title(title)
def create_file_dialog(dialog_type, directory, allow_multiple, save_filename, file_types, uid):
file_filter = []
# Parse file_types to obtain allowed file extensions
for s in file_types:
description, extensions = parse_file_type(s)
file_extensions = [i.lstrip('*.') for i in extensions.split(';') if i != '*.*']
file_filter.append([description, file_extensions or None])
i = BrowserView.instances[uid]
return i.create_file_dialog(dialog_type, directory, allow_multiple, save_filename, file_filter)
def load_url(url, uid):
BrowserView.instances[uid].load_url(url)
def load_html(content, base_uri, uid):
BrowserView.instances[uid].load_html(content, base_uri)
def destroy_window(uid):
BrowserView.instances[uid].destroy()
def hide(uid):
BrowserView.instances[uid].hide()
def show(uid):
BrowserView.instances[uid].show()
def toggle_fullscreen(uid):
BrowserView.instances[uid].toggle_fullscreen()
def set_on_top(uid, top):
def _set_on_top():
level = AppKit.NSStatusWindowLevel if top else AppKit.NSNormalWindowLevel
BrowserView.instances[uid].window.setLevel_(level)
AppHelper.callAfter(_set_on_top)
def resize(width, height, uid, fix_point):
BrowserView.instances[uid].resize(width, height, fix_point)
def minimize(uid):
BrowserView.instances[uid].minimize()
def restore(uid):
BrowserView.instances[uid].restore()
def move(x, y, uid):
AppHelper.callAfter(BrowserView.instances[uid].move, x, y)
def get_current_url(uid):
return BrowserView.instances[uid].get_current_url()
def evaluate_js(script, uid):
return BrowserView.instances[uid].evaluate_js(script)
def get_position(uid):
def _position(coordinates):
screen_frame = AppKit.NSScreen.mainScreen().frame()
if screen_frame is None:
raise RuntimeError('Failed to obtain screen')
window = BrowserView.instances[uid].window
frame = window.frame()
coordinates[0] = int(frame.origin.x)
coordinates[1] = int(screen_frame.size.height - frame.origin.y - frame.size.height)
semaphore.release()
coordinates = [None, None]
semaphore = Semaphore(0)
try:
_position(coordinates)
except:
AppHelper.callAfter(_position, coordinates)
semaphore.acquire()
return coordinates
def get_size(uid):
def _size(dimensions):
size = BrowserView.instances[uid].window.frame().size
dimensions[0] = size.width
dimensions[1] = size.height
semaphore.release()
dimensions = [None, None]
semaphore = Semaphore(0)
try:
_size(dimensions)
except:
AppHelper.callAfter(_size, dimensions)
semaphore.acquire()
return dimensions
def get_screens():
screens = [Screen(s.frame().size.width, s.frame().size.height) for s in AppKit.NSScreen.screens()]
return screens
|
|
# Copyright (c) 2012 OpenStack Foundation.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import contextlib
import copy
import mock
from neutron_lib.api import validators
from neutron_lib import constants as const
from neutron_lib import context
from neutron_lib.db import api as db_api
from neutron_lib.db import constants as db_const
from neutron_lib.plugins import directory
from oslo_config import cfg
import oslo_db.exception as exc
import testtools
import webob.exc
from neutron.common import exceptions as n_exc
from neutron.db import db_base_plugin_v2
from neutron.db import securitygroups_db
from neutron.extensions import securitygroup as ext_sg
from neutron.extensions import standardattrdescription
from neutron.tests import base
from neutron.tests.unit.db import test_db_base_plugin_v2
DB_PLUGIN_KLASS = ('neutron.tests.unit.extensions.test_securitygroup.'
'SecurityGroupTestPlugin')
LONG_NAME_OK = 'x' * (db_const.NAME_FIELD_SIZE)
LONG_NAME_NG = 'x' * (db_const.NAME_FIELD_SIZE + 1)
class SecurityGroupTestExtensionManager(object):
def get_resources(self):
# The description of security_group_rules will be added by extending
# standardattrdescription. But as API router will not be initialized
# in test code, manually add it.
ext_res = (standardattrdescription.Standardattrdescription().
get_extended_resources("2.0"))
if ext_sg.SECURITYGROUPRULES in ext_res:
existing_sg_rule_attr_map = (
ext_sg.RESOURCE_ATTRIBUTE_MAP[ext_sg.SECURITYGROUPRULES])
sg_rule_attr_desc = ext_res[ext_sg.SECURITYGROUPRULES]
existing_sg_rule_attr_map.update(sg_rule_attr_desc)
if ext_sg.SECURITYGROUPS in ext_res:
existing_sg_attr_map = (
ext_sg.RESOURCE_ATTRIBUTE_MAP[ext_sg.SECURITYGROUPS])
sg_attr_desc = ext_res[ext_sg.SECURITYGROUPS]
existing_sg_attr_map.update(sg_attr_desc)
return ext_sg.Securitygroup.get_resources()
def get_actions(self):
return []
def get_request_extensions(self):
return []
class SecurityGroupsTestCase(test_db_base_plugin_v2.NeutronDbPluginV2TestCase):
def _build_security_group(self, name, description, **kwargs):
data = {
'security_group': {
'name': name,
'tenant_id': kwargs.get(
'tenant_id', test_db_base_plugin_v2.TEST_TENANT_ID),
'description': description}}
return data
def _create_security_group_response(self, fmt, data, **kwargs):
security_group_req = self.new_create_request('security-groups', data,
fmt)
if (kwargs.get('set_context') and 'tenant_id' in kwargs):
# create a specific auth context for this request
security_group_req.environ['neutron.context'] = (
context.Context('', kwargs['tenant_id']))
return security_group_req.get_response(self.ext_api)
def _create_security_group(self, fmt, name, description, **kwargs):
data = self._build_security_group(name, description, **kwargs)
return self._create_security_group_response(fmt, data, **kwargs)
def _build_security_group_rule(
self, security_group_id, direction, proto,
port_range_min=None, port_range_max=None,
remote_ip_prefix=None, remote_group_id=None,
tenant_id=test_db_base_plugin_v2.TEST_TENANT_ID,
ethertype=const.IPv4):
data = {'security_group_rule': {'security_group_id': security_group_id,
'direction': direction,
'protocol': proto,
'ethertype': ethertype,
'tenant_id': tenant_id}}
if port_range_min:
data['security_group_rule']['port_range_min'] = port_range_min
if port_range_max:
data['security_group_rule']['port_range_max'] = port_range_max
if remote_ip_prefix:
data['security_group_rule']['remote_ip_prefix'] = remote_ip_prefix
if remote_group_id:
data['security_group_rule']['remote_group_id'] = remote_group_id
return data
def _create_security_group_rule(self, fmt, rules, **kwargs):
security_group_rule_req = self.new_create_request(
'security-group-rules', rules, fmt)
if (kwargs.get('set_context') and 'tenant_id' in kwargs):
# create a specific auth context for this request
security_group_rule_req.environ['neutron.context'] = (
context.Context('', kwargs['tenant_id']))
return security_group_rule_req.get_response(self.ext_api)
def _make_security_group(self, fmt, name, description, **kwargs):
res = self._create_security_group(fmt, name, description, **kwargs)
if res.status_int >= webob.exc.HTTPBadRequest.code:
raise webob.exc.HTTPClientError(code=res.status_int)
return self.deserialize(fmt, res)
def _make_security_group_rule(self, fmt, rules, **kwargs):
res = self._create_security_group_rule(self.fmt, rules)
if res.status_int >= webob.exc.HTTPBadRequest.code:
raise webob.exc.HTTPClientError(code=res.status_int)
return self.deserialize(fmt, res)
@contextlib.contextmanager
def security_group(self, name='webservers', description='webservers',
fmt=None):
if not fmt:
fmt = self.fmt
security_group = self._make_security_group(fmt, name, description)
yield security_group
@contextlib.contextmanager
def security_group_rule(self, security_group_id='4cd70774-cc67-4a87-9b39-7'
'd1db38eb087',
direction='ingress', protocol=const.PROTO_NAME_TCP,
port_range_min='22', port_range_max='22',
remote_ip_prefix=None, remote_group_id=None,
fmt=None, ethertype=const.IPv4):
if not fmt:
fmt = self.fmt
rule = self._build_security_group_rule(security_group_id,
direction,
protocol, port_range_min,
port_range_max,
remote_ip_prefix,
remote_group_id,
ethertype=ethertype)
security_group_rule = self._make_security_group_rule(self.fmt, rule)
yield security_group_rule
def _delete_default_security_group_egress_rules(self, security_group_id):
"""Deletes default egress rules given a security group ID."""
res = self._list(
'security-group-rules',
query_params='security_group_id=%s' % security_group_id)
for r in res['security_group_rules']:
if (r['direction'] == 'egress' and not r['port_range_max'] and
not r['port_range_min'] and not r['protocol'] and
not r['remote_ip_prefix']):
self._delete('security-group-rules', r['id'])
def _assert_sg_rule_has_kvs(self, security_group_rule, expected_kvs):
"""Asserts that the sg rule has expected key/value pairs passed
in as expected_kvs dictionary
"""
for k, v in expected_kvs.items():
self.assertEqual(security_group_rule[k], v)
class SecurityGroupTestPlugin(db_base_plugin_v2.NeutronDbPluginV2,
securitygroups_db.SecurityGroupDbMixin):
"""Test plugin that implements necessary calls on create/delete port for
associating ports with security groups.
"""
__native_pagination_support = True
__native_sorting_support = True
supported_extension_aliases = ["security-group"]
def create_port(self, context, port):
tenant_id = port['port']['tenant_id']
default_sg = self._ensure_default_security_group(context, tenant_id)
if not validators.is_attr_set(port['port'].get(ext_sg.SECURITYGROUPS)):
port['port'][ext_sg.SECURITYGROUPS] = [default_sg]
with db_api.CONTEXT_WRITER.using(context):
sgids = self._get_security_groups_on_port(context, port)
port = super(SecurityGroupTestPlugin, self).create_port(context,
port)
self._process_port_create_security_group(context, port,
sgids)
return port
def update_port(self, context, id, port):
with db_api.CONTEXT_WRITER.using(context):
if ext_sg.SECURITYGROUPS in port['port']:
port['port'][ext_sg.SECURITYGROUPS] = (
self._get_security_groups_on_port(context, port))
# delete the port binding and read it with the new rules
self._delete_port_security_group_bindings(context, id)
port['port']['id'] = id
self._process_port_create_security_group(
context, port['port'],
port['port'].get(ext_sg.SECURITYGROUPS))
port = super(SecurityGroupTestPlugin, self).update_port(
context, id, port)
return port
def create_network(self, context, network):
self._ensure_default_security_group(context,
network['network']['tenant_id'])
return super(SecurityGroupTestPlugin, self).create_network(context,
network)
def get_ports(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
sorts = sorts or []
neutron_lports = super(SecurityGroupTestPlugin, self).get_ports(
context, filters, sorts=sorts, limit=limit, marker=marker,
page_reverse=page_reverse)
return neutron_lports
class SecurityGroupDBTestCase(SecurityGroupsTestCase):
def setUp(self, plugin=None, ext_mgr=None):
self._backup = copy.deepcopy(ext_sg.RESOURCE_ATTRIBUTE_MAP)
self.addCleanup(self._restore)
plugin = plugin or DB_PLUGIN_KLASS
ext_mgr = ext_mgr or SecurityGroupTestExtensionManager()
super(SecurityGroupDBTestCase,
self).setUp(plugin=plugin, ext_mgr=ext_mgr)
def _restore(self):
ext_sg.RESOURCE_ATTRIBUTE_MAP = self._backup
class TestSecurityGroups(SecurityGroupDBTestCase):
def test_create_security_group(self):
name = 'webservers'
description = 'my webservers'
keys = [('name', name,), ('description', description)]
with self.security_group(name, description) as security_group:
for k, v, in keys:
self.assertEqual(security_group['security_group'][k], v)
# Verify that default egress rules have been created
sg_rules = security_group['security_group']['security_group_rules']
self.assertEqual(2, len(sg_rules))
v4_rules = [r for r in sg_rules if r['ethertype'] == const.IPv4]
self.assertEqual(1, len(v4_rules))
v4_rule = v4_rules[0]
expected = {'direction': 'egress',
'ethertype': const.IPv4,
'remote_group_id': None,
'remote_ip_prefix': None,
'protocol': None,
'port_range_max': None,
'port_range_min': None}
self._assert_sg_rule_has_kvs(v4_rule, expected)
v6_rules = [r for r in sg_rules if r['ethertype'] == const.IPv6]
self.assertEqual(1, len(v6_rules))
v6_rule = v6_rules[0]
expected = {'direction': 'egress',
'ethertype': const.IPv6,
'remote_group_id': None,
'remote_ip_prefix': None,
'protocol': None,
'port_range_max': None,
'port_range_min': None}
self._assert_sg_rule_has_kvs(v6_rule, expected)
def test_create_security_group_bulk(self):
rule1 = self._build_security_group("sg_1", "sec_grp_1")
rule2 = self._build_security_group("sg_2", "sec_grp_2")
rules = {'security_groups': [rule1['security_group'],
rule2['security_group']]}
res = self._create_security_group_response(self.fmt, rules)
ret = self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPCreated.code, res.status_int)
self.assertEqual(2, len(ret['security_groups']))
def test_skip_duplicate_default_sg_error(self):
num_called = [0]
original_func = self.plugin.create_security_group
def side_effect(context, security_group, default_sg):
# can't always raise, or create_security_group will hang
self.assertTrue(default_sg)
self.assertLess(num_called[0], 2)
num_called[0] += 1
ret = original_func(context, security_group, default_sg)
if num_called[0] == 1:
return ret
# make another call to cause an exception.
# NOTE(yamamoto): raising the exception by ourselves
# doesn't update the session state appropriately.
self.assertRaises(exc.DBDuplicateEntry,
original_func, context, security_group,
default_sg)
with mock.patch.object(SecurityGroupTestPlugin,
'create_security_group',
side_effect=side_effect):
self.plugin.create_network(
context.get_admin_context(),
{'network': {'name': 'foo',
'admin_state_up': True,
'shared': False,
'tenant_id': 'bar'}})
def test_update_security_group(self):
with self.security_group() as sg:
data = {'security_group': {'name': 'new_name',
'description': 'new_desc'}}
req = self.new_update_request('security-groups',
data,
sg['security_group']['id'])
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
self.assertEqual(data['security_group']['name'],
res['security_group']['name'])
self.assertEqual(data['security_group']['description'],
res['security_group']['description'])
def test_update_security_group_name_to_default_fail(self):
with self.security_group() as sg:
data = {'security_group': {'name': 'default',
'description': 'new_desc'}}
req = self.new_update_request('security-groups',
data,
sg['security_group']['id'])
req.environ['neutron.context'] = context.Context('', 'somebody')
res = req.get_response(self.ext_api)
self.assertEqual(webob.exc.HTTPConflict.code, res.status_int)
def test_update_default_security_group_name_fail(self):
with self.network():
res = self.new_list_request('security-groups')
sg = self.deserialize(self.fmt, res.get_response(self.ext_api))
data = {'security_group': {'name': 'new_name',
'description': 'new_desc'}}
req = self.new_update_request('security-groups',
data,
sg['security_groups'][0]['id'])
req.environ['neutron.context'] = context.Context('', 'somebody')
res = req.get_response(self.ext_api)
self.assertEqual(webob.exc.HTTPNotFound.code, res.status_int)
def test_update_default_security_group_with_description(self):
with self.network():
res = self.new_list_request('security-groups')
sg = self.deserialize(self.fmt, res.get_response(self.ext_api))
data = {'security_group': {'description': 'new_desc'}}
req = self.new_update_request('security-groups',
data,
sg['security_groups'][0]['id'])
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
self.assertEqual(data['security_group']['description'],
res['security_group']['description'])
def test_update_security_group_with_max_name_length(self):
with self.security_group() as sg:
data = {'security_group': {'name': LONG_NAME_OK,
'description': 'new_desc'}}
req = self.new_update_request('security-groups',
data,
sg['security_group']['id'])
res = self.deserialize(self.fmt, req.get_response(self.ext_api))
self.assertEqual(data['security_group']['name'],
res['security_group']['name'])
self.assertEqual(data['security_group']['description'],
res['security_group']['description'])
def test_update_security_group_with_too_long_name(self):
with self.security_group() as sg:
data = {'security_group': {'name': LONG_NAME_NG,
'description': 'new_desc'}}
req = self.new_update_request('security-groups',
data,
sg['security_group']['id'])
res = req.get_response(self.ext_api)
self.assertEqual(webob.exc.HTTPBadRequest.code, res.status_int)
def test_update_security_group_with_boolean_type_name(self):
with self.security_group() as sg:
data = {'security_group': {'name': True,
'description': 'new_desc'}}
req = self.new_update_request('security-groups',
data,
sg['security_group']['id'])
res = req.get_response(self.ext_api)
self.assertEqual(webob.exc.HTTPBadRequest.code, res.status_int)
def test_check_default_security_group_description(self):
with self.network():
res = self.new_list_request('security-groups')
sg = self.deserialize(self.fmt, res.get_response(self.ext_api))
self.assertEqual('Default security group',
sg['security_groups'][0]['description'])
def test_default_security_group(self):
with self.network():
res = self.new_list_request('security-groups')
groups = self.deserialize(self.fmt, res.get_response(self.ext_api))
self.assertEqual(1, len(groups['security_groups']))
def test_create_default_security_group_fail(self):
name = 'default'
description = 'my webservers'
res = self._create_security_group(self.fmt, name, description)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPConflict.code, res.status_int)
def test_create_default_security_group_check_case_insensitive(self):
name = 'DEFAULT'
description = 'my webservers'
res = self._create_security_group(self.fmt, name, description)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPConflict.code, res.status_int)
def test_create_security_group_with_max_name_length(self):
description = 'my webservers'
res = self._create_security_group(self.fmt, LONG_NAME_OK, description)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPCreated.code, res.status_int)
def test_create_security_group_with_too_long_name(self):
description = 'my webservers'
res = self._create_security_group(self.fmt, LONG_NAME_NG, description)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPBadRequest.code, res.status_int)
def test_create_security_group_with_boolean_type_name(self):
description = 'my webservers'
res = self._create_security_group(self.fmt, True, description)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPBadRequest.code, res.status_int)
def test_list_security_groups(self):
with self.security_group(name='sg1', description='sg') as v1,\
self.security_group(name='sg2', description='sg') as v2,\
self.security_group(name='sg3', description='sg') as v3:
security_groups = (v1, v2, v3)
self._test_list_resources('security-group',
security_groups,
query_params='description=sg')
def test_list_security_groups_with_sort(self):
with self.security_group(name='sg1', description='sg') as sg1,\
self.security_group(name='sg2', description='sg') as sg2,\
self.security_group(name='sg3', description='sg') as sg3:
self._test_list_with_sort('security-group',
(sg3, sg2, sg1),
[('name', 'desc')],
query_params='description=sg')
def test_list_security_groups_with_pagination(self):
with self.security_group(name='sg1', description='sg') as sg1,\
self.security_group(name='sg2', description='sg') as sg2,\
self.security_group(name='sg3', description='sg') as sg3:
self._test_list_with_pagination('security-group',
(sg1, sg2, sg3),
('name', 'asc'), 2, 2,
query_params='description=sg')
def test_list_security_groups_with_pagination_reverse(self):
with self.security_group(name='sg1', description='sg') as sg1,\
self.security_group(name='sg2', description='sg') as sg2,\
self.security_group(name='sg3', description='sg') as sg3:
self._test_list_with_pagination_reverse(
'security-group', (sg1, sg2, sg3), ('name', 'asc'), 2, 2,
query_params='description=sg')
def test_create_security_group_rule_ethertype_invalid_as_number(self):
name = 'webservers'
description = 'my webservers'
with self.security_group(name, description) as sg:
security_group_id = sg['security_group']['id']
ethertype = 2
rule = self._build_security_group_rule(
security_group_id, 'ingress', const.PROTO_NAME_TCP, '22',
'22', None, None, ethertype=ethertype)
res = self._create_security_group_rule(self.fmt, rule)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPBadRequest.code, res.status_int)
def test_create_security_group_rule_ethertype_invalid_for_protocol(self):
name = 'webservers'
description = 'my webservers'
with self.security_group(name, description) as sg:
security_group_id = sg['security_group']['id']
rule = self._build_security_group_rule(
security_group_id, 'ingress', const.PROTO_NAME_IPV6_ICMP)
res = self._create_security_group_rule(self.fmt, rule)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPBadRequest.code, res.status_int)
def test_create_security_group_rule_invalid_ip_prefix(self):
name = 'webservers'
description = 'my webservers'
for bad_prefix in ['bad_ip', 256, "2001:db8:a::123/129", '172.30./24']:
with self.security_group(name, description) as sg:
sg_id = sg['security_group']['id']
remote_ip_prefix = bad_prefix
rule = self._build_security_group_rule(
sg_id,
'ingress',
const.PROTO_NAME_TCP,
'22', '22',
remote_ip_prefix)
res = self._create_security_group_rule(self.fmt, rule)
self.assertEqual(webob.exc.HTTPBadRequest.code, res.status_int)
def test_create_security_group_rule_invalid_ethertype_for_prefix(self):
name = 'webservers'
description = 'my webservers'
test_addr = {'192.168.1.1/24': 'IPv6',
'2001:db8:1234::/48': 'IPv4',
'192.168.2.1/24': 'BadEthertype'}
for remote_ip_prefix, ethertype in test_addr.items():
with self.security_group(name, description) as sg:
sg_id = sg['security_group']['id']
rule = self._build_security_group_rule(
sg_id,
'ingress',
const.PROTO_NAME_TCP,
'22', '22',
remote_ip_prefix,
None,
ethertype=ethertype)
res = self._create_security_group_rule(self.fmt, rule)
self.assertEqual(webob.exc.HTTPBadRequest.code, res.status_int)
def test_create_security_group_rule_with_unmasked_prefix(self):
name = 'webservers'
description = 'my webservers'
addr = {'10.1.2.3': {'mask': '32', 'ethertype': 'IPv4'},
'fe80::2677:3ff:fe7d:4c': {'mask': '128', 'ethertype': 'IPv6'}}
for ip in addr:
with self.security_group(name, description) as sg:
sg_id = sg['security_group']['id']
ethertype = addr[ip]['ethertype']
remote_ip_prefix = ip
rule = self._build_security_group_rule(
sg_id,
'ingress',
const.PROTO_NAME_TCP,
'22', '22',
remote_ip_prefix,
None,
ethertype=ethertype)
res = self._create_security_group_rule(self.fmt, rule)
self.assertEqual(res.status_int, 201)
res_sg = self.deserialize(self.fmt, res)
prefix = res_sg['security_group_rule']['remote_ip_prefix']
self.assertEqual(prefix, '%s/%s' % (ip, addr[ip]['mask']))
def test_create_security_group_rule_tcp_protocol_as_number(self):
name = 'webservers'
description = 'my webservers'
with self.security_group(name, description) as sg:
security_group_id = sg['security_group']['id']
protocol = const.PROTO_NUM_TCP # TCP
rule = self._build_security_group_rule(
security_group_id, 'ingress', protocol, '22', '22')
res = self._create_security_group_rule(self.fmt, rule)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPCreated.code, res.status_int)
def test_create_security_group_rule_protocol_as_number(self):
name = 'webservers'
description = 'my webservers'
with self.security_group(name, description) as sg:
security_group_id = sg['security_group']['id']
protocol = 2
rule = self._build_security_group_rule(
security_group_id, 'ingress', protocol)
res = self._create_security_group_rule(self.fmt, rule)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPCreated.code, res.status_int)
def test_create_security_group_rule_protocol_as_number_with_port(self):
name = 'webservers'
description = 'my webservers'
with self.security_group(name, description) as sg:
security_group_id = sg['security_group']['id']
protocol = 111
rule = self._build_security_group_rule(
security_group_id, 'ingress', protocol, '70')
res = self._create_security_group_rule(self.fmt, rule)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPCreated.code, res.status_int)
def test_create_security_group_rule_protocol_as_number_range(self):
# This is a SG rule with a port range, but treated as a single
# port since min/max are the same.
name = 'webservers'
description = 'my webservers'
with self.security_group(name, description) as sg:
security_group_id = sg['security_group']['id']
protocol = 111
rule = self._build_security_group_rule(
security_group_id, 'ingress', protocol, '70', '70')
res = self._create_security_group_rule(self.fmt, rule)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPCreated.code, res.status_int)
def test_create_security_group_rule_protocol_as_number_range_bad(self):
# Only certain protocols support a SG rule with a port range
name = 'webservers'
description = 'my webservers'
with self.security_group(name, description) as sg:
security_group_id = sg['security_group']['id']
protocol = 111
rule = self._build_security_group_rule(
security_group_id, 'ingress', protocol, '70', '71')
res = self._create_security_group_rule(self.fmt, rule)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPBadRequest.code, res.status_int)
def test_create_security_group_rule_case_insensitive(self):
name = 'webservers'
description = 'my webservers'
with self.security_group(name, description) as sg:
security_group_id = sg['security_group']['id']
direction = "ingress"
remote_ip_prefix = "10.0.0.0/24"
protocol = 'TCP'
port_range_min = 22
port_range_max = 22
ethertype = 'ipV4'
with self.security_group_rule(security_group_id, direction,
protocol, port_range_min,
port_range_max,
remote_ip_prefix,
ethertype=ethertype) as rule:
# the lower case value will be return
self.assertEqual(rule['security_group_rule']['protocol'],
protocol.lower())
self.assertEqual(rule['security_group_rule']['ethertype'],
const.IPv4)
def test_get_security_group(self):
name = 'webservers'
description = 'my webservers'
with self.security_group(name, description) as sg:
remote_group_id = sg['security_group']['id']
res = self.new_show_request('security-groups', remote_group_id)
security_group_id = sg['security_group']['id']
direction = "ingress"
remote_ip_prefix = "10.0.0.0/24"
protocol = const.PROTO_NAME_TCP
port_range_min = 22
port_range_max = 22
keys = [('remote_ip_prefix', remote_ip_prefix),
('security_group_id', security_group_id),
('direction', direction),
('protocol', protocol),
('port_range_min', port_range_min),
('port_range_max', port_range_max)]
with self.security_group_rule(security_group_id, direction,
protocol, port_range_min,
port_range_max,
remote_ip_prefix):
group = self.deserialize(
self.fmt, res.get_response(self.ext_api))
sg_rule = group['security_group']['security_group_rules']
self.assertEqual(group['security_group']['id'],
remote_group_id)
self.assertEqual(3, len(sg_rule))
sg_rule = [r for r in sg_rule if r['direction'] == 'ingress']
for k, v, in keys:
self.assertEqual(sg_rule[0][k], v)
def test_get_security_group_on_port_from_wrong_tenant(self):
plugin = directory.get_plugin()
if not hasattr(plugin, '_get_security_groups_on_port'):
self.skipTest("plugin doesn't use the mixin with this method")
neutron_context = context.get_admin_context()
res = self._create_security_group(self.fmt, 'webservers', 'webservers',
tenant_id='bad_tenant')
sg1 = self.deserialize(self.fmt, res)
with testtools.ExpectedException(ext_sg.SecurityGroupNotFound):
plugin._get_security_groups_on_port(
neutron_context,
{'port': {'security_groups': [sg1['security_group']['id']],
'tenant_id': 'tenant'}}
)
def test_delete_security_group(self):
name = 'webservers'
description = 'my webservers'
with self.security_group(name, description) as sg:
remote_group_id = sg['security_group']['id']
self._delete('security-groups', remote_group_id,
webob.exc.HTTPNoContent.code)
def test_delete_default_security_group_admin(self):
with self.network():
res = self.new_list_request('security-groups')
sg = self.deserialize(self.fmt, res.get_response(self.ext_api))
self._delete('security-groups', sg['security_groups'][0]['id'],
webob.exc.HTTPNoContent.code)
def test_delete_default_security_group_nonadmin(self):
with self.network():
res = self.new_list_request('security-groups')
sg = self.deserialize(self.fmt, res.get_response(self.ext_api))
neutron_context = context.Context(
'', test_db_base_plugin_v2.TEST_TENANT_ID)
self._delete('security-groups', sg['security_groups'][0]['id'],
webob.exc.HTTPConflict.code,
neutron_context=neutron_context)
def test_security_group_list_creates_default_security_group(self):
neutron_context = context.Context(
'', test_db_base_plugin_v2.TEST_TENANT_ID)
sg = self._list('security-groups',
neutron_context=neutron_context).get('security_groups')
self.assertEqual(1, len(sg))
def test_security_group_port_create_creates_default_security_group(self):
res = self._create_network(self.fmt, 'net1', True,
tenant_id='not_admin',
set_context=True)
net1 = self.deserialize(self.fmt, res)
res = self._create_port(self.fmt, net1['network']['id'],
tenant_id='not_admin', set_context=True)
sg = self._list('security-groups').get('security_groups')
self.assertEqual(1, len(sg))
def test_default_security_group_rules(self):
with self.network():
res = self.new_list_request('security-groups')
groups = self.deserialize(self.fmt, res.get_response(self.ext_api))
self.assertEqual(len(groups['security_groups']), 1)
security_group_id = groups['security_groups'][0]['id']
res = self.new_list_request('security-group-rules')
rules = self.deserialize(self.fmt, res.get_response(self.ext_api))
self.assertEqual(len(rules['security_group_rules']), 4)
# Verify default rule for v4 egress
sg_rules = rules['security_group_rules']
rules = [
r for r in sg_rules
if r['direction'] == 'egress' and r['ethertype'] == const.IPv4
]
self.assertEqual(1, len(rules))
v4_egress = rules[0]
expected = {'direction': 'egress',
'ethertype': const.IPv4,
'remote_group_id': None,
'remote_ip_prefix': None,
'protocol': None,
'port_range_max': None,
'port_range_min': None}
self._assert_sg_rule_has_kvs(v4_egress, expected)
# Verify default rule for v6 egress
rules = [
r for r in sg_rules
if r['direction'] == 'egress' and r['ethertype'] == const.IPv6
]
self.assertEqual(1, len(rules))
v6_egress = rules[0]
expected = {'direction': 'egress',
'ethertype': const.IPv6,
'remote_group_id': None,
'remote_ip_prefix': None,
'protocol': None,
'port_range_max': None,
'port_range_min': None}
self._assert_sg_rule_has_kvs(v6_egress, expected)
# Verify default rule for v4 ingress
rules = [
r for r in sg_rules
if r['direction'] == 'ingress' and r['ethertype'] == const.IPv4
]
self.assertEqual(1, len(rules))
v4_ingress = rules[0]
expected = {'direction': 'ingress',
'ethertype': const.IPv4,
'remote_group_id': security_group_id,
'remote_ip_prefix': None,
'protocol': None,
'port_range_max': None,
'port_range_min': None}
self._assert_sg_rule_has_kvs(v4_ingress, expected)
# Verify default rule for v6 ingress
rules = [
r for r in sg_rules
if r['direction'] == 'ingress' and r['ethertype'] == const.IPv6
]
self.assertEqual(1, len(rules))
v6_ingress = rules[0]
expected = {'direction': 'ingress',
'ethertype': const.IPv6,
'remote_group_id': security_group_id,
'remote_ip_prefix': None,
'protocol': None,
'port_range_max': None,
'port_range_min': None}
self._assert_sg_rule_has_kvs(v6_ingress, expected)
def test_create_security_group_rule_remote_ip_prefix(self):
name = 'webservers'
description = 'my webservers'
with self.security_group(name, description) as sg:
security_group_id = sg['security_group']['id']
direction = "ingress"
remote_ip_prefix = "10.0.0.0/24"
protocol = const.PROTO_NAME_TCP
port_range_min = 22
port_range_max = 22
keys = [('remote_ip_prefix', remote_ip_prefix),
('security_group_id', security_group_id),
('direction', direction),
('protocol', protocol),
('port_range_min', port_range_min),
('port_range_max', port_range_max)]
with self.security_group_rule(security_group_id, direction,
protocol, port_range_min,
port_range_max,
remote_ip_prefix) as rule:
for k, v, in keys:
self.assertEqual(rule['security_group_rule'][k], v)
def test_create_security_group_rule_group_id(self):
name = 'webservers'
description = 'my webservers'
with self.security_group(name, description) as sg:
with self.security_group(name, description) as sg2:
security_group_id = sg['security_group']['id']
direction = "ingress"
remote_group_id = sg2['security_group']['id']
protocol = const.PROTO_NAME_TCP
port_range_min = 22
port_range_max = 22
keys = [('remote_group_id', remote_group_id),
('security_group_id', security_group_id),
('direction', direction),
('protocol', protocol),
('port_range_min', port_range_min),
('port_range_max', port_range_max)]
with self.security_group_rule(security_group_id, direction,
protocol, port_range_min,
port_range_max,
remote_group_id=remote_group_id
) as rule:
for k, v, in keys:
self.assertEqual(rule['security_group_rule'][k], v)
def test_create_security_group_rule_icmp_with_type_and_code(self):
name = 'webservers'
description = 'my webservers'
with self.security_group(name, description) as sg:
security_group_id = sg['security_group']['id']
direction = "ingress"
remote_ip_prefix = "10.0.0.0/24"
protocol = const.PROTO_NAME_ICMP
# port_range_min (ICMP type) is greater than port_range_max
# (ICMP code) in order to confirm min <= max port check is
# not called for ICMP.
port_range_min = 8
port_range_max = 5
keys = [('remote_ip_prefix', remote_ip_prefix),
('security_group_id', security_group_id),
('direction', direction),
('protocol', protocol),
('port_range_min', port_range_min),
('port_range_max', port_range_max)]
with self.security_group_rule(security_group_id, direction,
protocol, port_range_min,
port_range_max,
remote_ip_prefix) as rule:
for k, v, in keys:
self.assertEqual(rule['security_group_rule'][k], v)
def test_create_security_group_rule_icmp_with_type_only(self):
name = 'webservers'
description = 'my webservers'
with self.security_group(name, description) as sg:
security_group_id = sg['security_group']['id']
direction = "ingress"
remote_ip_prefix = "10.0.0.0/24"
protocol = const.PROTO_NAME_ICMP
# ICMP type
port_range_min = 8
# ICMP code
port_range_max = None
keys = [('remote_ip_prefix', remote_ip_prefix),
('security_group_id', security_group_id),
('direction', direction),
('protocol', protocol),
('port_range_min', port_range_min),
('port_range_max', port_range_max)]
with self.security_group_rule(security_group_id, direction,
protocol, port_range_min,
port_range_max,
remote_ip_prefix) as rule:
for k, v, in keys:
self.assertEqual(rule['security_group_rule'][k], v)
def test_create_security_group_rule_icmpv6_with_type_only(self):
name = 'webservers'
description = 'my webservers'
with self.security_group(name, description) as sg:
security_group_id = sg['security_group']['id']
direction = "ingress"
ethertype = const.IPv6
remote_ip_prefix = "2001::f401:56ff:fefe:d3dc/128"
protocol = const.PROTO_NAME_IPV6_ICMP
# ICMPV6 type
port_range_min = const.ICMPV6_TYPE_RA
# ICMPV6 code
port_range_max = None
keys = [('remote_ip_prefix', remote_ip_prefix),
('security_group_id', security_group_id),
('direction', direction),
('ethertype', ethertype),
('protocol', protocol),
('port_range_min', port_range_min),
('port_range_max', port_range_max)]
with self.security_group_rule(security_group_id, direction,
protocol, port_range_min,
port_range_max,
remote_ip_prefix,
None, None,
ethertype) as rule:
for k, v, in keys:
self.assertEqual(rule['security_group_rule'][k], v)
def test_create_security_group_rule_icmpv6_legacy_protocol_name(self):
name = 'webservers'
description = 'my webservers'
with self.security_group(name, description) as sg:
security_group_id = sg['security_group']['id']
direction = "ingress"
ethertype = const.IPv6
remote_ip_prefix = "2001::f401:56ff:fefe:d3dc/128"
protocol = const.PROTO_NAME_IPV6_ICMP_LEGACY
keys = [('remote_ip_prefix', remote_ip_prefix),
('security_group_id', security_group_id),
('direction', direction),
('ethertype', ethertype),
('protocol', protocol)]
with self.security_group_rule(security_group_id, direction,
protocol, None, None,
remote_ip_prefix,
None, None,
ethertype) as rule:
for k, v, in keys:
self.assertEqual(rule['security_group_rule'][k], v)
def test_create_security_group_source_group_ip_and_ip_prefix(self):
security_group_id = "4cd70774-cc67-4a87-9b39-7d1db38eb087"
direction = "ingress"
remote_ip_prefix = "10.0.0.0/24"
protocol = const.PROTO_NAME_TCP
port_range_min = 22
port_range_max = 22
remote_group_id = "9cd70774-cc67-4a87-9b39-7d1db38eb087"
rule = self._build_security_group_rule(security_group_id, direction,
protocol, port_range_min,
port_range_max,
remote_ip_prefix,
remote_group_id)
res = self._create_security_group_rule(self.fmt, rule)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPBadRequest.code, res.status_int)
def test_create_security_group_rule_bad_security_group_id(self):
security_group_id = "4cd70774-cc67-4a87-9b39-7d1db38eb087"
direction = "ingress"
remote_ip_prefix = "10.0.0.0/24"
protocol = const.PROTO_NAME_TCP
port_range_min = 22
port_range_max = 22
rule = self._build_security_group_rule(security_group_id, direction,
protocol, port_range_min,
port_range_max,
remote_ip_prefix)
res = self._create_security_group_rule(self.fmt, rule)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPNotFound.code, res.status_int)
def test_create_security_group_rule_bad_tenant(self):
with self.security_group() as sg:
rule = {'security_group_rule':
{'security_group_id': sg['security_group']['id'],
'direction': 'ingress',
'protocol': const.PROTO_NAME_TCP,
'port_range_min': '22',
'port_range_max': '22',
'tenant_id': "bad_tenant"}}
res = self._create_security_group_rule(self.fmt, rule,
tenant_id='bad_tenant',
set_context=True)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPNotFound.code, res.status_int)
def test_create_security_group_rule_bad_tenant_remote_group_id(self):
with self.security_group() as sg:
res = self._create_security_group(self.fmt, 'webservers',
'webservers',
tenant_id='bad_tenant')
sg2 = self.deserialize(self.fmt, res)
rule = {'security_group_rule':
{'security_group_id': sg2['security_group']['id'],
'direction': 'ingress',
'protocol': const.PROTO_NAME_TCP,
'port_range_min': '22',
'port_range_max': '22',
'tenant_id': 'bad_tenant',
'remote_group_id': sg['security_group']['id']}}
res = self._create_security_group_rule(self.fmt, rule,
tenant_id='bad_tenant',
set_context=True)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPNotFound.code, res.status_int)
def test_create_security_group_rule_bad_tenant_security_group_rule(self):
with self.security_group() as sg:
res = self._create_security_group(self.fmt, 'webservers',
'webservers',
tenant_id='bad_tenant')
self.deserialize(self.fmt, res)
rule = {'security_group_rule':
{'security_group_id': sg['security_group']['id'],
'direction': 'ingress',
'protocol': const.PROTO_NAME_TCP,
'port_range_min': '22',
'port_range_max': '22',
'tenant_id': 'bad_tenant'}}
res = self._create_security_group_rule(self.fmt, rule,
tenant_id='bad_tenant',
set_context=True)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPNotFound.code, res.status_int)
def test_create_security_group_rule_bad_remote_group_id(self):
name = 'webservers'
description = 'my webservers'
with self.security_group(name, description) as sg:
security_group_id = sg['security_group']['id']
remote_group_id = "4cd70774-cc67-4a87-9b39-7d1db38eb087"
direction = "ingress"
protocol = const.PROTO_NAME_TCP
port_range_min = 22
port_range_max = 22
rule = self._build_security_group_rule(security_group_id, direction,
protocol, port_range_min,
port_range_max,
remote_group_id=remote_group_id)
res = self._create_security_group_rule(self.fmt, rule)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPNotFound.code, res.status_int)
def test_create_security_group_rule_duplicate_rules(self):
name = 'webservers'
description = 'my webservers'
with self.security_group(name, description) as sg:
security_group_id = sg['security_group']['id']
with self.security_group_rule(security_group_id) as sgr:
rule = self._build_security_group_rule(
sg['security_group']['id'], 'ingress',
const.PROTO_NAME_TCP, '22', '22')
res = self._create_security_group_rule(self.fmt, rule)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPConflict.code, res.status_int)
self.assertIn(sgr['security_group_rule']['id'],
res.json['NeutronError']['message'])
def test_create_security_group_rule_duplicate_rules_diff_desc(self):
name = 'webservers'
description = 'my webservers'
with self.security_group(name, description) as sg:
security_group_id = sg['security_group']['id']
with self.security_group_rule(security_group_id) as sgr:
rule = self._build_security_group_rule(
sg['security_group']['id'], 'ingress',
const.PROTO_NAME_TCP, '22', '22')
rule['security_group_rule']['description'] = "description"
res = self._create_security_group_rule(self.fmt, rule)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPConflict.code, res.status_int)
self.assertIn(sgr['security_group_rule']['id'],
res.json['NeutronError']['message'])
def test_create_security_group_rule_duplicate_rules_proto_name_num(self):
name = 'webservers'
description = 'my webservers'
with self.security_group(name, description) as sg:
security_group_id = sg['security_group']['id']
with self.security_group_rule(security_group_id):
rule = self._build_security_group_rule(
sg['security_group']['id'], 'ingress',
const.PROTO_NAME_TCP, '22', '22')
self._create_security_group_rule(self.fmt, rule)
rule = self._build_security_group_rule(
sg['security_group']['id'], 'ingress',
const.PROTO_NUM_TCP, '22', '22')
res = self._create_security_group_rule(self.fmt, rule)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPConflict.code, res.status_int)
def test_create_security_group_rule_duplicate_rules_proto_num_name(self):
name = 'webservers'
description = 'my webservers'
with self.security_group(name, description) as sg:
security_group_id = sg['security_group']['id']
with self.security_group_rule(security_group_id):
rule = self._build_security_group_rule(
sg['security_group']['id'], 'ingress',
const.PROTO_NUM_UDP, '50', '100')
self._create_security_group_rule(self.fmt, rule)
rule = self._build_security_group_rule(
sg['security_group']['id'], 'ingress',
const.PROTO_NAME_UDP, '50', '100')
res = self._create_security_group_rule(self.fmt, rule)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPConflict.code, res.status_int)
def test_create_security_group_rule_min_port_greater_max(self):
name = 'webservers'
description = 'my webservers'
with self.security_group(name, description) as sg:
security_group_id = sg['security_group']['id']
with self.security_group_rule(security_group_id):
for protocol in [const.PROTO_NAME_TCP, const.PROTO_NAME_UDP,
const.PROTO_NUM_TCP, const.PROTO_NUM_UDP]:
rule = self._build_security_group_rule(
sg['security_group']['id'],
'ingress', protocol, '50', '22')
res = self._create_security_group_rule(self.fmt, rule)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPBadRequest.code,
res.status_int)
def test_create_security_group_rule_ports_but_no_protocol(self):
name = 'webservers'
description = 'my webservers'
with self.security_group(name, description) as sg:
security_group_id = sg['security_group']['id']
with self.security_group_rule(security_group_id):
rule = self._build_security_group_rule(
sg['security_group']['id'], 'ingress', None, '22', '22')
res = self._create_security_group_rule(self.fmt, rule)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPBadRequest.code, res.status_int)
def test_create_security_group_rule_port_range_min_only(self):
name = 'webservers'
description = 'my webservers'
with self.security_group(name, description) as sg:
security_group_id = sg['security_group']['id']
with self.security_group_rule(security_group_id):
rule = self._build_security_group_rule(
sg['security_group']['id'], 'ingress',
const.PROTO_NAME_TCP, '22', None)
res = self._create_security_group_rule(self.fmt, rule)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPBadRequest.code, res.status_int)
def test_create_security_group_rule_port_range_max_only(self):
name = 'webservers'
description = 'my webservers'
with self.security_group(name, description) as sg:
security_group_id = sg['security_group']['id']
with self.security_group_rule(security_group_id):
rule = self._build_security_group_rule(
sg['security_group']['id'], 'ingress',
const.PROTO_NAME_TCP, None, '22')
res = self._create_security_group_rule(self.fmt, rule)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPBadRequest.code, res.status_int)
def test_create_security_group_rule_icmp_type_too_big(self):
name = 'webservers'
description = 'my webservers'
with self.security_group(name, description) as sg:
security_group_id = sg['security_group']['id']
with self.security_group_rule(security_group_id):
rule = self._build_security_group_rule(
sg['security_group']['id'], 'ingress',
const.PROTO_NAME_ICMP, '256', None)
res = self._create_security_group_rule(self.fmt, rule)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPBadRequest.code, res.status_int)
def test_create_security_group_rule_icmp_code_too_big(self):
name = 'webservers'
description = 'my webservers'
with self.security_group(name, description) as sg:
security_group_id = sg['security_group']['id']
with self.security_group_rule(security_group_id):
rule = self._build_security_group_rule(
sg['security_group']['id'], 'ingress',
const.PROTO_NAME_ICMP, '8', '256')
res = self._create_security_group_rule(self.fmt, rule)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPBadRequest.code, res.status_int)
def test_create_security_group_rule_icmp_with_code_only(self):
name = 'webservers'
description = 'my webservers'
with self.security_group(name, description) as sg:
security_group_id = sg['security_group']['id']
with self.security_group_rule(security_group_id):
for code in ['2', '0']:
rule = self._build_security_group_rule(
sg['security_group']['id'], 'ingress',
const.PROTO_NAME_ICMP, None, code)
res = self._create_security_group_rule(self.fmt, rule)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPBadRequest.code,
res.status_int)
def test_list_ports_security_group(self):
with self.network() as n:
with self.subnet(n):
self._create_port(self.fmt, n['network']['id'])
req = self.new_list_request('ports')
res = req.get_response(self.api)
ports = self.deserialize(self.fmt, res)
port = ports['ports'][0]
self.assertEqual(len(port[ext_sg.SECURITYGROUPS]), 1)
self._delete('ports', port['id'])
def test_list_security_group_rules(self):
with self.security_group(name='sg') as sg:
security_group_id = sg['security_group']['id']
with self.security_group_rule(security_group_id,
direction='egress',
port_range_min=22,
port_range_max=22) as sgr1,\
self.security_group_rule(security_group_id,
direction='egress',
port_range_min=23,
port_range_max=23) as sgr2,\
self.security_group_rule(security_group_id,
direction='egress',
port_range_min=24,
port_range_max=24) as sgr3:
# Delete default rules as they would fail the following
# assertion at the end.
self._delete_default_security_group_egress_rules(
security_group_id)
q = 'direction=egress&security_group_id=' + security_group_id
self._test_list_resources('security-group-rule',
[sgr1, sgr2, sgr3],
query_params=q)
def test_list_security_group_rules_with_sort(self):
with self.security_group(name='sg') as sg:
security_group_id = sg['security_group']['id']
with self.security_group_rule(security_group_id,
direction='egress',
port_range_min=22,
port_range_max=22) as sgr1,\
self.security_group_rule(security_group_id,
direction='egress',
port_range_min=23,
port_range_max=23) as sgr2,\
self.security_group_rule(security_group_id,
direction='egress',
port_range_min=24,
port_range_max=24) as sgr3:
# Delete default rules as they would fail the following
# assertion at the end.
self._delete_default_security_group_egress_rules(
security_group_id)
q = 'direction=egress&security_group_id=' + security_group_id
self._test_list_with_sort('security-group-rule',
(sgr3, sgr2, sgr1),
[('port_range_max', 'desc')],
query_params=q)
def test_list_security_group_rules_with_pagination(self):
with self.security_group(name='sg') as sg:
security_group_id = sg['security_group']['id']
with self.security_group_rule(security_group_id,
direction='egress',
port_range_min=22,
port_range_max=22) as sgr1,\
self.security_group_rule(security_group_id,
direction='egress',
port_range_min=23,
port_range_max=23) as sgr2,\
self.security_group_rule(security_group_id,
direction='egress',
port_range_min=24,
port_range_max=24) as sgr3:
# Delete default rules as they would fail the following
# assertion at the end.
self._delete_default_security_group_egress_rules(
security_group_id)
q = 'direction=egress&security_group_id=' + security_group_id
self._test_list_with_pagination(
'security-group-rule', (sgr3, sgr2, sgr1),
('port_range_max', 'desc'), 2, 2,
query_params=q)
def test_list_security_group_rules_with_pagination_reverse(self):
with self.security_group(name='sg') as sg:
security_group_id = sg['security_group']['id']
with self.security_group_rule(security_group_id,
direction='egress',
port_range_min=22,
port_range_max=22) as sgr1,\
self.security_group_rule(security_group_id,
direction='egress',
port_range_min=23,
port_range_max=23) as sgr2,\
self.security_group_rule(security_group_id,
direction='egress',
port_range_min=24,
port_range_max=24) as sgr3:
self._test_list_with_pagination_reverse(
'security-group-rule', (sgr3, sgr2, sgr1),
('port_range_max', 'desc'), 2, 2,
query_params='direction=egress')
def test_create_port_with_multiple_security_groups(self):
with self.network() as n:
with self.subnet(n):
with self.security_group() as sg1:
with self.security_group() as sg2:
res = self._create_port(
self.fmt, n['network']['id'],
security_groups=[sg1['security_group']['id'],
sg2['security_group']['id']])
port = self.deserialize(self.fmt, res)
self.assertEqual(2, len(
port['port'][ext_sg.SECURITYGROUPS]))
self._delete('ports', port['port']['id'])
def test_create_port_with_no_security_groups(self):
with self.network() as n:
with self.subnet(n):
res = self._create_port(self.fmt, n['network']['id'],
security_groups=[])
port = self.deserialize(self.fmt, res)
self.assertEqual([], port['port'][ext_sg.SECURITYGROUPS])
def test_update_port_with_security_group(self):
with self.network() as n:
with self.subnet(n):
with self.security_group() as sg:
res = self._create_port(self.fmt, n['network']['id'])
port = self.deserialize(self.fmt, res)
data = {'port': {'fixed_ips': port['port']['fixed_ips'],
'name': port['port']['name'],
ext_sg.SECURITYGROUPS:
[sg['security_group']['id']]}}
req = self.new_update_request('ports', data,
port['port']['id'])
res = self.deserialize(self.fmt,
req.get_response(self.api))
self.assertEqual(res['port'][ext_sg.SECURITYGROUPS][0],
sg['security_group']['id'])
# Test update port without security group
data = {'port': {'fixed_ips': port['port']['fixed_ips'],
'name': port['port']['name']}}
req = self.new_update_request('ports', data,
port['port']['id'])
res = self.deserialize(self.fmt,
req.get_response(self.api))
self.assertEqual(res['port'][ext_sg.SECURITYGROUPS][0],
sg['security_group']['id'])
self._delete('ports', port['port']['id'])
def test_update_port_with_multiple_security_groups(self):
with self.network() as n:
with self.subnet(n) as s:
with self.port(s) as port:
with self.security_group() as sg1:
with self.security_group() as sg2:
data = {'port': {ext_sg.SECURITYGROUPS:
[sg1['security_group']['id'],
sg2['security_group']['id']]}}
req = self.new_update_request(
'ports', data, port['port']['id'])
port = self.deserialize(
self.fmt, req.get_response(self.api))
self.assertEqual(
2, len(port['port'][ext_sg.SECURITYGROUPS]))
def test_update_port_remove_security_group_empty_list(self):
with self.network() as n:
with self.subnet(n):
with self.security_group() as sg:
res = self._create_port(self.fmt, n['network']['id'],
security_groups=(
[sg['security_group']['id']]))
port = self.deserialize(self.fmt, res)
data = {'port': {'fixed_ips': port['port']['fixed_ips'],
'name': port['port']['name'],
'security_groups': []}}
req = self.new_update_request('ports', data,
port['port']['id'])
res = self.deserialize(self.fmt,
req.get_response(self.api))
self.assertEqual([],
res['port'].get(ext_sg.SECURITYGROUPS))
self._delete('ports', port['port']['id'])
def test_update_port_remove_security_group_none(self):
with self.network() as n:
with self.subnet(n):
with self.security_group() as sg:
res = self._create_port(self.fmt, n['network']['id'],
security_groups=(
[sg['security_group']['id']]))
port = self.deserialize(self.fmt, res)
data = {'port': {'fixed_ips': port['port']['fixed_ips'],
'name': port['port']['name'],
'security_groups': None}}
req = self.new_update_request('ports', data,
port['port']['id'])
res = self.deserialize(self.fmt,
req.get_response(self.api))
self.assertEqual([],
res['port'].get(ext_sg.SECURITYGROUPS))
self._delete('ports', port['port']['id'])
def test_update_port_with_invalid_type_in_security_groups_param(self):
with self.network() as n:
with self.subnet(n):
with self.security_group() as sg:
res = self._create_port(self.fmt, n['network']['id'],
security_groups=(
[sg['security_group']['id']]))
port = self.deserialize(self.fmt, res)
data = {'port': {'fixed_ips': port['port']['fixed_ips'],
'name': port['port']['name'],
'security_groups': True}}
req = self.new_update_request('ports', data,
port['port']['id'])
res = req.get_response(self.api)
self.assertEqual(webob.exc.HTTPBadRequest.code,
res.status_int)
def test_create_port_with_bad_security_group(self):
with self.network() as n:
with self.subnet(n):
res = self._create_port(self.fmt, n['network']['id'],
security_groups=['bad_id'])
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPBadRequest.code, res.status_int)
def test_create_port_with_invalid_type_in_security_groups_param(self):
with self.network() as n:
with self.subnet(n):
res = self._create_port(self.fmt, n['network']['id'],
security_groups=True)
self.assertEqual(webob.exc.HTTPBadRequest.code, res.status_int)
def test_create_delete_security_group_port_in_use(self):
with self.network() as n:
with self.subnet(n):
with self.security_group() as sg:
res = self._create_port(self.fmt, n['network']['id'],
security_groups=(
[sg['security_group']['id']]))
port = self.deserialize(self.fmt, res)
self.assertEqual(port['port'][ext_sg.SECURITYGROUPS][0],
sg['security_group']['id'])
# try to delete security group that's in use
res = self._delete('security-groups',
sg['security_group']['id'],
webob.exc.HTTPConflict.code)
# delete the blocking port
self._delete('ports', port['port']['id'])
def test_create_security_group_rule_bulk_native(self):
if self._skip_native_bulk:
self.skipTest("Plugin does not support native bulk "
"security_group_rule create")
with self.security_group() as sg:
rule1 = self._build_security_group_rule(sg['security_group']['id'],
'ingress',
const.PROTO_NAME_TCP, '22',
'22', '10.0.0.1/24')
rule2 = self._build_security_group_rule(sg['security_group']['id'],
'ingress',
const.PROTO_NAME_TCP, '23',
'23', '10.0.0.1/24')
rules = {'security_group_rules': [rule1['security_group_rule'],
rule2['security_group_rule']]}
res = self._create_security_group_rule(self.fmt, rules)
ret = self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPCreated.code, res.status_int)
self.assertEqual(2, len(ret['security_group_rules']))
def test_create_security_group_rule_bulk_emulated(self):
real_has_attr = hasattr
# ensures the API choose the emulation code path
def fakehasattr(item, attr):
if attr.endswith('__native_bulk_support'):
return False
return real_has_attr(item, attr)
with mock.patch('six.moves.builtins.hasattr',
new=fakehasattr):
with self.security_group() as sg:
rule1 = self._build_security_group_rule(
sg['security_group']['id'], 'ingress',
const.PROTO_NAME_TCP, '22', '22', '10.0.0.1/24')
rule2 = self._build_security_group_rule(
sg['security_group']['id'], 'ingress',
const.PROTO_NAME_TCP, '23', '23', '10.0.0.1/24')
rules = {'security_group_rules': [rule1['security_group_rule'],
rule2['security_group_rule']]
}
res = self._create_security_group_rule(self.fmt, rules)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPCreated.code, res.status_int)
def test_create_security_group_rule_allow_all_ipv4(self):
with self.security_group() as sg:
rule = {'security_group_id': sg['security_group']['id'],
'direction': 'ingress',
'ethertype': const.IPv4,
'tenant_id': test_db_base_plugin_v2.TEST_TENANT_ID}
res = self._create_security_group_rule(
self.fmt, {'security_group_rule': rule})
rule = self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPCreated.code, res.status_int)
def test_create_security_group_rule_allow_all_ipv4_v6_bulk(self):
if self._skip_native_bulk:
self.skipTest("Plugin does not support native bulk "
"security_group_rule create")
with self.security_group() as sg:
rule_v4 = {'security_group_id': sg['security_group']['id'],
'direction': 'ingress',
'ethertype': const.IPv4,
'tenant_id': test_db_base_plugin_v2.TEST_TENANT_ID}
rule_v6 = {'security_group_id': sg['security_group']['id'],
'direction': 'ingress',
'ethertype': const.IPv6,
'tenant_id': test_db_base_plugin_v2.TEST_TENANT_ID}
rules = {'security_group_rules': [rule_v4, rule_v6]}
res = self._create_security_group_rule(self.fmt, rules)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPCreated.code, res.status_int)
def test_create_security_group_rule_duplicate_rule_in_post(self):
if self._skip_native_bulk:
self.skipTest("Plugin does not support native bulk "
"security_group_rule create")
with self.security_group() as sg:
rule = self._build_security_group_rule(sg['security_group']['id'],
'ingress',
const.PROTO_NAME_TCP, '22',
'22', '10.0.0.1/24')
rules = {'security_group_rules': [rule['security_group_rule'],
rule['security_group_rule']]}
res = self._create_security_group_rule(self.fmt, rules)
rule = self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPConflict.code, res.status_int)
def test_create_security_group_rule_duplicate_rule_in_post_emulated(self):
real_has_attr = hasattr
# ensures the API choose the emulation code path
def fakehasattr(item, attr):
if attr.endswith('__native_bulk_support'):
return False
return real_has_attr(item, attr)
with mock.patch('six.moves.builtins.hasattr',
new=fakehasattr):
with self.security_group() as sg:
rule = self._build_security_group_rule(
sg['security_group']['id'], 'ingress',
const.PROTO_NAME_TCP, '22', '22', '10.0.0.1/24')
rules = {'security_group_rules': [rule['security_group_rule'],
rule['security_group_rule']]}
res = self._create_security_group_rule(self.fmt, rules)
rule = self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPConflict.code, res.status_int)
def test_create_security_group_rule_duplicate_rule_db(self):
if self._skip_native_bulk:
self.skipTest("Plugin does not support native bulk "
"security_group_rule create")
with self.security_group() as sg:
rule = self._build_security_group_rule(sg['security_group']['id'],
'ingress',
const.PROTO_NAME_TCP, '22',
'22', '10.0.0.1/24')
rules = {'security_group_rules': [rule]}
self._create_security_group_rule(self.fmt, rules)
res = self._create_security_group_rule(self.fmt, rules)
rule = self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPConflict.code, res.status_int)
def test_create_security_group_rule_duplicate_rule_db_emulated(self):
real_has_attr = hasattr
# ensures the API choose the emulation code path
def fakehasattr(item, attr):
if attr.endswith('__native_bulk_support'):
return False
return real_has_attr(item, attr)
with mock.patch('six.moves.builtins.hasattr',
new=fakehasattr):
with self.security_group() as sg:
rule = self._build_security_group_rule(
sg['security_group']['id'], 'ingress',
const.PROTO_NAME_TCP, '22', '22', '10.0.0.1/24')
rules = {'security_group_rules': [rule]}
self._create_security_group_rule(self.fmt, rules)
res = self._create_security_group_rule(self.fmt, rule)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPConflict.code, res.status_int)
def test_create_security_groups_native_quotas(self):
quota = 1
cfg.CONF.set_override('quota_security_group', quota, group='QUOTAS')
name = 'quota_test'
description = 'quota_test'
res = self._create_security_group(self.fmt, name, description)
self.assertEqual(webob.exc.HTTPCreated.code, res.status_int)
res = self._create_security_group(self.fmt, name, description)
self.assertEqual(webob.exc.HTTPConflict.code, res.status_int)
def test_create_security_group_rules_native_quotas(self):
name = 'quota_test'
description = 'quota_test'
with self.security_group(name, description) as sg:
# avoid the number of default security group rules
sgr = self._list('security-group-rules').get(
'security_group_rules')
quota = len(sgr) + 1
cfg.CONF.set_override(
'quota_security_group_rule', quota, group='QUOTAS')
security_group_id = sg['security_group']['id']
rule = self._build_security_group_rule(
security_group_id, 'ingress',
const.PROTO_NAME_TCP, '22', '22')
res = self._create_security_group_rule(self.fmt, rule)
self.assertEqual(webob.exc.HTTPCreated.code, res.status_int)
rule = self._build_security_group_rule(
security_group_id, 'egress',
const.PROTO_NAME_TCP, '22', '22')
res = self._create_security_group_rule(self.fmt, rule)
self.assertEqual(webob.exc.HTTPConflict.code, res.status_int)
def test_create_security_group_rule_different_security_group_ids(self):
if self._skip_native_bulk:
self.skipTest("Plugin does not support native bulk "
"security_group_rule create")
with self.security_group() as sg1:
with self.security_group() as sg2:
rule1 = self._build_security_group_rule(
sg1['security_group']['id'], 'ingress',
const.PROTO_NAME_TCP, '22', '22', '10.0.0.1/24')
rule2 = self._build_security_group_rule(
sg2['security_group']['id'], 'ingress',
const.PROTO_NAME_TCP, '23', '23', '10.0.0.1/24')
rules = {'security_group_rules': [rule1['security_group_rule'],
rule2['security_group_rule']]
}
res = self._create_security_group_rule(self.fmt, rules)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPBadRequest.code, res.status_int)
def test_create_security_group_rule_with_invalid_ethertype(self):
security_group_id = "4cd70774-cc67-4a87-9b39-7d1db38eb087"
direction = "ingress"
remote_ip_prefix = "10.0.0.0/24"
protocol = const.PROTO_NAME_TCP
port_range_min = 22
port_range_max = 22
remote_group_id = "9cd70774-cc67-4a87-9b39-7d1db38eb087"
rule = self._build_security_group_rule(security_group_id, direction,
protocol, port_range_min,
port_range_max,
remote_ip_prefix,
remote_group_id,
ethertype='IPv5')
res = self._create_security_group_rule(self.fmt, rule)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPBadRequest.code, res.status_int)
def test_create_security_group_rule_with_invalid_protocol(self):
security_group_id = "4cd70774-cc67-4a87-9b39-7d1db38eb087"
direction = "ingress"
remote_ip_prefix = "10.0.0.0/24"
protocol = 'tcp/ip'
port_range_min = 22
port_range_max = 22
remote_group_id = "9cd70774-cc67-4a87-9b39-7d1db38eb087"
rule = self._build_security_group_rule(security_group_id, direction,
protocol, port_range_min,
port_range_max,
remote_ip_prefix,
remote_group_id)
res = self._create_security_group_rule(self.fmt, rule)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPBadRequest.code, res.status_int)
def test_create_security_group_rule_with_invalid_tcp_or_udp_protocol(self):
security_group_id = "4cd70774-cc67-4a87-9b39-7d1db38eb087"
direction = "ingress"
remote_ip_prefix = "10.0.0.0/24"
protocol = 'tcp'
port_range_min = 0
port_range_max = 80
remote_group_id = "9cd70774-cc67-4a87-9b39-7d1db38eb087"
rule = self._build_security_group_rule(security_group_id, direction,
protocol, port_range_min,
port_range_max,
remote_ip_prefix,
remote_group_id)
res = self._create_security_group_rule(self.fmt, rule)
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPBadRequest.code, res.status_int)
def test_create_port_with_non_uuid(self):
with self.network() as n:
with self.subnet(n):
res = self._create_port(self.fmt, n['network']['id'],
security_groups=['not_valid'])
self.deserialize(self.fmt, res)
self.assertEqual(webob.exc.HTTPBadRequest.code, res.status_int)
def test_create_security_group_rule_with_specific_id(self):
neutron_context = context.Context(
'', test_db_base_plugin_v2.TEST_TENANT_ID)
specified_id = "4cd70774-cc67-4a87-9b39-7d1db38eb087"
with self.security_group() as sg:
rule = self._build_security_group_rule(
sg['security_group']['id'], 'ingress', const.PROTO_NUM_TCP)
rule['security_group_rule'].update({'id': specified_id,
'port_range_min': None,
'port_range_max': None,
'remote_ip_prefix': None,
'remote_group_id': None})
result = self.plugin.create_security_group_rule(
neutron_context, rule)
self.assertEqual(specified_id, result['id'])
class TestConvertIPPrefixToCIDR(base.BaseTestCase):
def test_convert_bad_ip_prefix_to_cidr(self):
for val in ['bad_ip', 256, "2001:db8:a::123/129"]:
self.assertRaises(n_exc.InvalidCIDR,
ext_sg.convert_ip_prefix_to_cidr, val)
self.assertIsNone(ext_sg.convert_ip_prefix_to_cidr(None))
def test_convert_ip_prefix_no_netmask_to_cidr(self):
addr = {'10.1.2.3': '32', 'fe80::2677:3ff:fe7d:4c': '128'}
for k, v in addr.items():
self.assertEqual(ext_sg.convert_ip_prefix_to_cidr(k),
'%s/%s' % (k, v))
def test_convert_ip_prefix_with_netmask_to_cidr(self):
addresses = ['10.1.0.0/16', '10.1.2.3/32', '2001:db8:1234::/48']
for addr in addresses:
self.assertEqual(addr, ext_sg.convert_ip_prefix_to_cidr(addr))
class TestConvertProtocol(base.BaseTestCase):
def test_convert_numeric_protocol(self):
self.assertIsInstance(ext_sg.convert_protocol('2'), str)
def test_convert_bad_protocol(self):
for val in ['bad', '256', '-1']:
self.assertRaises(ext_sg.SecurityGroupRuleInvalidProtocol,
ext_sg.convert_protocol, val)
def test_convert_numeric_protocol_to_string(self):
self.assertIsInstance(ext_sg.convert_protocol(2), str)
class TestConvertEtherType(base.BaseTestCase):
def test_convert_unsupported_ethertype(self):
for val in ['ip', 'ip4', 'ip6', '']:
self.assertRaises(ext_sg.SecurityGroupRuleInvalidEtherType,
ext_sg.convert_ethertype_to_case_insensitive,
val)
|
|
#!/usr/bin/env python
"""Tests for grr.lib.aff4_objects.filestore."""
import hashlib
import os
import StringIO
import time
from grr.lib import action_mocks
from grr.lib import aff4
from grr.lib import config_lib
from grr.lib import data_store
from grr.lib import events
from grr.lib import flags
from grr.lib import rdfvalue
from grr.lib import test_lib
from grr.lib import utils
from grr.lib.aff4_objects import aff4_grr
from grr.lib.aff4_objects import filestore
from grr.lib.flows.general import file_finder
from grr.lib.flows.general import transfer
from grr.lib.rdfvalues import file_finder as rdf_file_finder
from grr.lib.rdfvalues import flows as rdf_flows
from grr.lib.rdfvalues import paths as rdf_paths
class FakeStore(object):
PRIORITY = 99
PATH = rdfvalue.RDFURN("aff4:/files/temp")
def __init__(self, path, token):
self.dest_file = aff4.FACTORY.Create(
path, aff4.AFF4MemoryStream, mode="rw", token=token)
def AddFile(self, unused_blob_fd, sync=False):
_ = sync
return self.dest_file
def Get(self, _):
return True
class Schema(object):
ACTIVE = "unused"
class FileStoreTest(test_lib.AFF4ObjectTest):
"""Tests for file store functionality."""
def testFileAdd(self):
fs = aff4.FACTORY.Open(
filestore.FileStore.PATH, filestore.FileStore, token=self.token)
fake_store1 = FakeStore("aff4:/files/temp1", self.token)
fake_store2 = FakeStore("aff4:/files/temp2", self.token)
with utils.Stubber(fs, "OpenChildren", lambda: [fake_store1, fake_store2]):
src_fd = aff4.FACTORY.Create(
aff4.ROOT_URN.Add("temp").Add("src"),
aff4_grr.VFSBlobImage,
token=self.token,
mode="rw")
src_fd.SetChunksize(filestore.FileStore.CHUNK_SIZE)
src_data = "ABC" * filestore.FileStore.CHUNK_SIZE
src_data_fd = StringIO.StringIO(src_data)
src_fd.AppendContent(src_data_fd)
fs.AddFile(src_fd)
# Reset file pointers
src_fd.Seek(0)
fake_store1.dest_file.Seek(0)
fake_store2.dest_file.Seek(0)
# Check file content got written to both data stores.
self.assertEqual(src_data, fake_store1.dest_file.Read(-1))
self.assertEqual(src_data, fake_store2.dest_file.Read(-1))
def testGetByPriority(self):
priority1 = aff4.FACTORY.Create(
"aff4:/files/1", filestore.FileStore, mode="rw", token=self.token)
priority1.PRIORITY = 1
priority1.Set(priority1.Schema.ACTIVE(False))
priority2 = aff4.FACTORY.Create(
"aff4:/files/2", filestore.FileStore, mode="rw", token=self.token)
priority2.PRIORITY = 2
priority3 = aff4.FACTORY.Create(
"aff4:/files/3", filestore.FileStore, mode="rw", token=self.token)
priority3.PRIORITY = 3
fs = aff4.FACTORY.Open(
filestore.FileStore.PATH, filestore.FileStore, token=self.token)
with utils.Stubber(fs, "OpenChildren",
lambda: [priority3, priority1, priority2]):
child_list = list(fs.GetChildrenByPriority())
self.assertEqual(child_list[0].PRIORITY, 2)
self.assertEqual(child_list[1].PRIORITY, 3)
child_list = list(fs.GetChildrenByPriority(allow_external=False))
self.assertEqual(child_list[0].PRIORITY, 2)
class HashFileStoreTest(test_lib.AFF4ObjectTest):
"""Tests for hash file store functionality."""
def setUp(self):
super(HashFileStoreTest, self).setUp()
client_ids = self.SetupClients(1)
self.client_id = client_ids[0]
@staticmethod
def AddFileToFileStore(pathspec=None, client_id=None, token=None):
"""Adds file with given pathspec to the hash file store."""
if pathspec is None:
raise ValueError("pathspec can't be None")
if client_id is None:
raise ValueError("client_id can't be None")
urn = aff4_grr.VFSGRRClient.PathspecToURN(pathspec, client_id)
client_mock = action_mocks.GetFileClientMock()
for _ in test_lib.TestFlowHelper(
transfer.GetFile.__name__,
client_mock,
token=token,
client_id=client_id,
pathspec=pathspec):
pass
auth_state = rdf_flows.GrrMessage.AuthorizationState.AUTHENTICATED
events.Events.PublishEvent(
"FileStore.AddFileToStore",
rdf_flows.GrrMessage(
payload=urn, auth_state=auth_state),
token=token)
worker = test_lib.MockWorker(token=token)
worker.Simulate()
return urn
def AddFile(self, path):
"""Add file with a subpath (relative to winexec_img.dd) to the store."""
pathspec = rdf_paths.PathSpec(
pathtype=rdf_paths.PathSpec.PathType.OS,
path=os.path.join(self.base_path, "winexec_img.dd"))
pathspec.Append(path=path, pathtype=rdf_paths.PathSpec.PathType.TSK)
return self.AddFileToFileStore(
pathspec, client_id=self.client_id, token=self.token)
def testListHashes(self):
self.AddFile("/Ext2IFS_1_10b.exe")
hashes = list(filestore.HashFileStore.ListHashes(token=self.token))
self.assertEqual(len(hashes), 5)
self.assertTrue(
filestore.FileStoreHash(
fingerprint_type="pecoff",
hash_type="md5",
hash_value="a3a3259f7b145a21c7b512d876a5da06") in hashes)
self.assertTrue(
filestore.FileStoreHash(
fingerprint_type="pecoff",
hash_type="sha1",
hash_value="019bddad9cac09f37f3941a7f285c79d3c7e7801") in hashes)
self.assertTrue(
filestore.FileStoreHash(
fingerprint_type="generic",
hash_type="md5",
hash_value="bb0a15eefe63fd41f8dc9dee01c5cf9a") in hashes)
self.assertTrue(
filestore.FileStoreHash(
fingerprint_type="generic",
hash_type="sha1",
hash_value="7dd6bee591dfcb6d75eb705405302c3eab65e21a") in hashes)
self.assertTrue(
filestore.FileStoreHash(
fingerprint_type="generic",
hash_type="sha256",
hash_value="0e8dc93e150021bb4752029ebbff51394aa36f06"
"9cf19901578e4f06017acdb5") in hashes)
def testListHashesWithAge(self):
with utils.Stubber(time, "time", lambda: 42):
self.AddFile("/Ext2IFS_1_10b.exe")
hashes = list(
filestore.HashFileStore.ListHashes(
token=self.token, age=41e6))
self.assertEqual(len(hashes), 0)
hashes = list(
filestore.HashFileStore.ListHashes(
token=self.token, age=43e6))
self.assertEqual(len(hashes), 5)
hashes = list(filestore.HashFileStore.ListHashes(token=self.token))
self.assertEqual(len(hashes), 5)
def testHashAgeUpdatedWhenNewHitAddedWithinAFF4IndexCacheAge(self):
# Check that there are no hashes.
hashes = list(
filestore.HashFileStore.ListHashes(
token=self.token, age=(41e6, 1e10)))
self.assertEqual(len(hashes), 0)
with utils.Stubber(time, "time", lambda: 42):
self.AddFileToFileStore(
rdf_paths.PathSpec(
pathtype=rdf_paths.PathSpec.PathType.OS,
path=os.path.join(self.base_path, "one_a")),
client_id=self.client_id,
token=self.token)
hashes = list(
filestore.HashFileStore.ListHashes(
token=self.token, age=(41e6, 1e10)))
self.assertTrue(hashes)
hits = list(
filestore.HashFileStore.GetClientsForHash(
hashes[0], token=self.token))
self.assertEqual(len(hits), 1)
latest_time = 42 + config_lib.CONFIG["AFF4.intermediate_cache_age"] - 1
with utils.Stubber(time, "time", lambda: latest_time):
self.AddFileToFileStore(
rdf_paths.PathSpec(
pathtype=rdf_paths.PathSpec.PathType.OS,
path=os.path.join(self.base_path, "a", "b", "c", "helloc.txt")),
client_id=self.client_id,
token=self.token)
# Check that now we have two hits for the previosly added hash.
hits = list(
filestore.HashFileStore.GetClientsForHash(
hashes[0], token=self.token))
self.assertEqual(len(hits), 2)
# Check that new hit doesn't affect hash age.
hashes = list(
filestore.HashFileStore.ListHashes(
token=self.token, age=(43e6, 1e10)))
self.assertFalse(hashes)
def testHashAgeUpdatedWhenNewHitAddedAfterAFF4IndexCacheAge(self):
# Check that there are no hashes.
hashes = list(
filestore.HashFileStore.ListHashes(
token=self.token, age=(41e6, 1e10)))
self.assertEqual(len(hashes), 0)
with utils.Stubber(time, "time", lambda: 42):
self.AddFileToFileStore(
rdf_paths.PathSpec(
pathtype=rdf_paths.PathSpec.PathType.OS,
path=os.path.join(self.base_path, "one_a")),
client_id=self.client_id,
token=self.token)
hashes = list(
filestore.HashFileStore.ListHashes(
token=self.token, age=(41e6, 1e10)))
self.assertTrue(hashes)
hits = list(
filestore.HashFileStore.GetClientsForHash(
hashes[0], token=self.token))
self.assertEqual(len(hits), 1)
latest_time = 42 + config_lib.CONFIG["AFF4.intermediate_cache_age"] + 1
with utils.Stubber(time, "time", lambda: latest_time):
self.AddFileToFileStore(
rdf_paths.PathSpec(
pathtype=rdf_paths.PathSpec.PathType.OS,
path=os.path.join(self.base_path, "a", "b", "c", "helloc.txt")),
client_id=self.client_id,
token=self.token)
# Check that now we have two hits for the previosly added hash.
hits = list(
filestore.HashFileStore.GetClientsForHash(
hashes[0], token=self.token))
self.assertEqual(len(hits), 2)
# Check that new hit affects hash age.
hashes = list(
filestore.HashFileStore.ListHashes(
token=self.token, age=(43e6, 1e10)))
self.assertTrue(hashes)
def testGetClientsForHash(self):
self.AddFile("/Ext2IFS_1_10b.exe")
self.AddFile("/idea.dll")
hits = list(
filestore.HashFileStore.GetClientsForHash(
filestore.FileStoreHash(
fingerprint_type="generic",
hash_type="md5",
hash_value="bb0a15eefe63fd41f8dc9dee01c5cf9a"),
token=self.token))
self.assertListEqual(hits, [
self.client_id.Add("fs/tsk").Add(self.base_path)
.Add("winexec_img.dd/Ext2IFS_1_10b.exe")
])
def testGetClientsForHashWithAge(self):
with utils.Stubber(time, "time", lambda: 42):
self.AddFile("/Ext2IFS_1_10b.exe")
self.AddFile("/idea.dll")
hits = list(
filestore.HashFileStore.GetClientsForHash(
filestore.FileStoreHash(
fingerprint_type="generic",
hash_type="md5",
hash_value="bb0a15eefe63fd41f8dc9dee01c5cf9a"),
age=41e6,
token=self.token))
self.assertEqual(len(hits), 0)
hits = list(
filestore.HashFileStore.GetClientsForHash(
filestore.FileStoreHash(
fingerprint_type="generic",
hash_type="md5",
hash_value="bb0a15eefe63fd41f8dc9dee01c5cf9a"),
age=43e6,
token=self.token))
self.assertEqual(len(hits), 1)
hits = list(
filestore.HashFileStore.GetClientsForHash(
filestore.FileStoreHash(
fingerprint_type="generic",
hash_type="md5",
hash_value="bb0a15eefe63fd41f8dc9dee01c5cf9a"),
token=self.token))
self.assertEqual(len(hits), 1)
def testGetClientsForHashes(self):
self.AddFile("/Ext2IFS_1_10b.exe")
self.AddFile("/idea.dll")
hash1 = filestore.FileStoreHash(
fingerprint_type="generic",
hash_type="md5",
hash_value="bb0a15eefe63fd41f8dc9dee01c5cf9a")
hash2 = filestore.FileStoreHash(
fingerprint_type="generic",
hash_type="sha1",
hash_value="e1f7e62b3909263f3a2518bbae6a9ee36d5b502b")
hits = dict(
filestore.HashFileStore.GetClientsForHashes(
[hash1, hash2], token=self.token))
self.assertEqual(len(hits), 2)
self.assertListEqual(hits[hash1], [
self.client_id.Add("fs/tsk").Add(self.base_path).Add(
"winexec_img.dd/Ext2IFS_1_10b.exe")
])
self.assertListEqual(hits[hash2], [
self.client_id.Add("fs/tsk").Add(self.base_path).Add(
"winexec_img.dd/idea.dll")
])
def testGetClientsForHashesWithAge(self):
with utils.Stubber(time, "time", lambda: 42):
self.AddFile("/Ext2IFS_1_10b.exe")
self.AddFile("/idea.dll")
hash1 = filestore.FileStoreHash(
fingerprint_type="generic",
hash_type="md5",
hash_value="bb0a15eefe63fd41f8dc9dee01c5cf9a")
hash2 = filestore.FileStoreHash(
fingerprint_type="generic",
hash_type="sha1",
hash_value="e1f7e62b3909263f3a2518bbae6a9ee36d5b502b")
hits = dict(
filestore.HashFileStore.GetClientsForHashes(
[hash1, hash2], age=41e6, token=self.token))
self.assertEqual(len(hits), 0)
hits = dict(
filestore.HashFileStore.GetClientsForHashes(
[hash1, hash2], age=43e6, token=self.token))
self.assertEqual(len(hits), 2)
hits = dict(
filestore.HashFileStore.GetClientsForHashes(
[hash1, hash2], token=self.token))
self.assertEqual(len(hits), 2)
def testAttributesOfFileFoundInHashFileStoreAreSetCorrectly(self):
client_ids = self.SetupClients(2)
filename = os.path.join(self.base_path, "tcpip.sig")
pathspec = rdf_paths.PathSpec(
pathtype=rdf_paths.PathSpec.PathType.OS, path=filename)
urn1 = aff4_grr.VFSGRRClient.PathspecToURN(pathspec, client_ids[0])
urn2 = aff4_grr.VFSGRRClient.PathspecToURN(pathspec, client_ids[1])
for client_id in client_ids:
client_mock = action_mocks.FileFinderClientMock()
for _ in test_lib.TestFlowHelper(
file_finder.FileFinder.__name__,
client_mock,
token=self.token,
client_id=client_id,
paths=[filename],
action=rdf_file_finder.FileFinderAction(
action_type=rdf_file_finder.FileFinderAction.Action.DOWNLOAD)):
pass
# Running worker to make sure FileStore.AddFileToStore event is processed
# by the worker.
worker = test_lib.MockWorker(token=self.token)
worker.Simulate()
fd1 = aff4.FACTORY.Open(urn1, token=self.token)
self.assertTrue(isinstance(fd1, aff4_grr.VFSBlobImage))
fd2 = aff4.FACTORY.Open(urn2, token=self.token)
self.assertTrue(isinstance(fd2, filestore.FileStoreImage))
self.assertEqual(fd1.Get(fd1.Schema.STAT), fd2.Get(fd2.Schema.STAT))
self.assertEqual(fd1.Get(fd1.Schema.SIZE), fd2.Get(fd2.Schema.SIZE))
self.assertEqual(
fd1.Get(fd1.Schema.CONTENT_LAST), fd2.Get(fd2.Schema.CONTENT_LAST))
def testEmptyFileHasNoBackreferences(self):
# First make sure we store backrefs for a non empty file.
filename = os.path.join(self.base_path, "tcpip.sig")
pathspec = rdf_paths.PathSpec(
pathtype=rdf_paths.PathSpec.PathType.OS, path=filename)
self.AddFileToFileStore(
pathspec, client_id=self.client_id, token=self.token)
self.assertEqual(len(self._GetBackRefs(filename)), 3)
# Now use the empty file.
filename = os.path.join(self.base_path, "empty_file")
pathspec = rdf_paths.PathSpec(
pathtype=rdf_paths.PathSpec.PathType.OS, path=filename)
self.AddFileToFileStore(
pathspec, client_id=self.client_id, token=self.token)
self.assertEqual(len(self._GetBackRefs(filename)), 0)
def _GetBackRefs(self, filename):
res = []
for name, algo in [
("sha256", hashlib.sha256),
("sha1", hashlib.sha1),
("md5", hashlib.md5),
]:
h = algo()
h.update(open(filename, "rb").read())
urn = rdfvalue.RDFURN("aff4:/files/hash/generic/").Add(name)
urn = urn.Add(h.hexdigest())
for _, target, _ in data_store.DB.ResolvePrefix(
urn, "index:target:", token=self.token):
res.append(target)
return res
def main(argv):
# Run the full test suite
test_lib.GrrTestProgram(argv=argv)
if __name__ == "__main__":
flags.StartMain(main)
|
|
# occiput
# Stefano Pedemonte
# April 2014
# Harvard University, Martinos Center for Biomedical Imaging
# Boston, MA, USA
__all__ = ['vNAV_MPRage']
import Image
from occiput.Core import Image3D, Transform_Affine
import numpy
import nibabel
import dicom
import pylab
import os
import copy
from occiput.Core import transformations as tr
BOX_MIN = [-50.0,-50.0,-50.0]
BOX_MAX = [50.0,50.0,50.0]
THRESHOLD_MM = 1.1
LINE_COLOR = "#2f8dff"
def quaternion_to_rotation(q,axes='sxyz'):
return tr.euler_from_quaternion(q,axes)
def angle_axis_to_quaternion(angle_rad,axis):
f = numpy.sin(0.5*angle_rad)
quaternion = numpy.asarray( [numpy.cos(0.5*angle_rad),f*axis[0],f*axis[1],f*axis[2]] )
return quaternion
def angle_axis_to_rotation(angle_rad,axis):
quaternion = angle_axis_to_quaternion(angle_rad,axis)
rotation = quaternion_to_rotation(quaternion)
return rotation
def affine_from_quaternion(q,axes='sxyz'):
pass
def quaternion_from_matrix(affine):
return tx.quaternion_from_matrix(affine)
def rad_to_deg(rad):
return numpy.asarray(rad)*180.0/numpy.pi
def deg_to_rad(deg):
return numpy.asarray(deg)/180.0*numpy.pi
class vNAV_MPRage():
def __init__(self,path=None, from_dicom_comments=True, files_start_with=None, files_end_with=None):
self._n_time_points = 0
self._duration = []
self._motion = []
if path is not None:
self.load_data_files(path, from_dicom_comments, files_start_with, files_end_with)
def get_volume_dicom(self, index):
dcm_file = self._paths[index]
f = dicom.read_file(dcm_file)
return f
def get_motion_quaternion(self, index):
motion_affine = self.get_motion_affine(index)
return quaternion_from_matrix(motion_affine)
def get_motion_affine(self, index):
return self._motion[index]
def get_n_time_points(self):
return self._n_time_points
def get_duration(self,index):
return self.duration[index]
def load_data_files(self, path, from_dicom_comments=True, files_start_with=None, files_end_with=None, exclude_files_end_with=['.dat','.txt','.py','.pyc','.nii','.gz'] ):
"""Load vNAV dicom files from given path and extract motion information.
If from_dicom_comments==True, use the information stored in the dicom comments.
If from_dicom_comments==False, use the information stored in the dicom MOCO field.
As of April 2014, these two express motion in different coordinate systems.
The dicom comments report absolute motion in scanner coordinates (origin is the magnetic iso-center of the scanner).
The dicom MOCO field is currently for Siemens use (??). """
self._n_time_points = 0
self._duration = []
self._motion = []
self._paths = []
self._tx = []; self._ty = []; self._tz = [];
self._rx = []; self._ry = []; self._rz = [];
self._tx_comm = []; self._ty_comm = []; self._tz_comm = [];
self._rx_comm = []; self._ry_comm = []; self._rz_comm = [];
self._q0_comm = []; self._q1_comm = []; self._q2_comm = []; self._q3_comm = [];
self._a0_comm = []; self._a1_comm = []; self._a2_comm = []; self._a3_comm = [];
N=0
# pick the first dicom file found in path
files = os.listdir(path)
# CASE 1: there exist files named with .dcm extension
for file_name in files:
file_valid = True
if files_start_with is not None:
if not file_name.startswith(files_start_with):
file_valid = False
if files_end_with is not None:
if not file_name.endswith(files_end_with):
file_valid = False
for s in exclude_files_end_with:
if file_name.endswith(s):
file_valid = False
if file_valid:
full_path = path+os.sep+file_name
# read moco information from files
self._paths.append(full_path)
f = dicom.read_file(full_path)
t = f.get(0x00191025).value
r = f.get(0x00191026).value
self._tx.append(t[0]); self._ty.append(t[1]); self._tz.append(t[2]);
self._rx.append(r[0]); self._ry.append(r[1]); self._rz.append(r[2]);
motion_dicom_moco = []
# extract moco information stored in the dicom comment field
s = f.get(0x00204000).value
if N:
a = numpy.float32(s.split(' ')[1:5])
t = numpy.float32(s.split(' ')[6:9])
freq = numpy.float32(s.split(' ')[10])
r = angle_axis_to_rotation(a[0],a[1:4])
else:
t = numpy.float32([0,0,0])
r = numpy.float32([0,0,0])
a = numpy.float32([0,1,0,0]) #FIXME: is this right?
q = angle_axis_to_quaternion(a.copy()[0],a.copy()[1:4])
self._a0_comm.append(a[0]); self._a1_comm.append(a[1]); self._a2_comm.append(a[2]); self._a3_comm.append(a[3]);
self._tx_comm.append(t[0]); self._ty_comm.append(t[1]); self._tz_comm.append(t[2])
self._q0_comm.append(q[0]); self._q1_comm.append(q[1]); self._q2_comm.append(q[2]); self._q3_comm.append(q[3]);
self._rx_comm.append(r[0]); self._ry_comm.append(r[1]); self._rz_comm.append(r[2]);
tra_mat = tr.translation_matrix(t)
rot_mat = tr.quaternion_matrix(q)
motion_dicom_comments = numpy.dot(tra_mat,rot_mat)
#xaxis, yaxis, zaxis = [1, 0, 0], [0, 1, 0], [0, 0, 1]
#Rx = tr.rotation_matrix(r[0], xaxis)
#Ry = tr.rotation_matrix(r[1], yaxis)
#Rz = tr.rotation_matrix(r[2], zaxis)
#rot_mat = tr.concatenate_matrices(Rx, Ry, Rz)
#rot_mat = Ry.copy()
#motion_dicom_comments = numpy.dot(tra_mat,rot_mat)
#motion_dicom_comments = rot_mat.copy()
N += 1
if from_dicom_comments:
self._motion.append(motion_dicom_comments)
else:
self._motion.append(motion_dicom_moco)
acquisition_number = f.get(0x00200012).value
creation_time = f.get(0x00080013).value
# print "Acquisition number: ", acquisition_number
# print "Creation time: ",creation_time
self._n_time_points = N
def _draw_rectangle(self, axis, x, y, alpha=0.2, ec="gray", fc="gray"): #"CornflowerBlue"
axis.add_patch( pylab.Rectangle( (x[0],y[0]) , x[1]-x[0],y[1]-y[0], alpha=alpha, ec=ec, fc=fc, visible=True) )
#pylab.draw()
def _draw_rectangles(self, axis, windows, range_y):
for ii in range(len(windows)):
tt = windows[ii]
yy = (range_y[0],range_y[1])
self._draw_rectangle(axis,tt,yy)
def _draw_line(self, axis, x, range_y, color="#ff8d8d", linestyle="dashed", label=""):
axis.vlines(x, range_y[0], range_y[1], colors=color, linestyles=linestyle, label=label, visible=True)
def _draw_events(self, axis, time, range_y, events, color="#ff8d8d", linestyle="dashed", label=""):
for t_index in time:
if t_index:
if events[t_index-1]:
#print "Drawing line: ", t_index, range_y, color, linestyle
self._draw_line(axis, t_index, range_y, color, linestyle, label)
def plot_motion(self, save_to_file=None, display_dicom_comments=True, display_dicom_moco=False, range_mm=(-8,8), range_deg=(-7,7), extract_events_threshold=THRESHOLD_MM, method='box', box_min=BOX_MIN, box_max=BOX_MAX, min_duration=3, line_color=LINE_COLOR):
t = range(len(self._tx))
# make windows:
if extract_events_threshold is not None:
windows = []
events = self.extract_motion_events(method, extract_events_threshold, box_min, box_max)
t_index_start = 0
for t_index in t:
if t_index: #this excludes the possibility of a motion event at time 0
if events[t_index-1]:
t_index_end = t_index-1
if t_index_end-t_index_start > min_duration:
windows.append( (t_index_start, t_index_end) ) #end window with frame before a motion event
t_index_start = t_index+1 #start window with frame after a motion event
windows.append( (t_index_start, t[-1]) )
if display_dicom_moco:
fig1 = pylab.figure(1)
ax1 = fig1.add_subplot(311)
ax1.plot(t, self._tx, line_color)
ax1.grid(True)
ax1.set_ylim( range_mm )
ax1.set_ylabel('TX [mm]')
#for label in ax1.get_xticklabels():
# label.set_color('r')
if extract_events_threshold is not None:
self._draw_rectangles(ax1,windows,range_mm)
self._draw_events(ax1, t, range_mm, events)
ax1 = fig1.add_subplot(312)
ax1.plot(t, self._ty, line_color)
ax1.grid(True)
ax1.set_ylim( range_mm )
ax1.set_ylabel('TY [mm]')
#for label in ax1.get_xticklabels():
# label.set_color('r')
if extract_events_threshold is not None:
self._draw_rectangles(ax1,windows,range_mm)
self._draw_events(ax1, t, range_mm, events)
ax1 = fig1.add_subplot(313)
ax1.plot(t, self._tz, line_color)
ax1.grid(True)
ax1.set_ylim( range_mm )
ax1.set_ylabel('TZ [mm]')
#for label in ax1.get_xticklabels():
# label.set_color('r')
if extract_events_threshold is not None:
self._draw_rectangles(ax1,windows,range_mm)
self._draw_events(ax1, t, range_mm, events)
# if save_to_file is not None:
# pylab.savefig(save_to_file)
fig2 = pylab.figure(2)
ax1 = fig2.add_subplot(311)
ax1.plot(t, rad_to_deg(self._rx), line_color)
ax1.grid(True)
ax1.set_ylim( range_deg )
ax1.set_ylabel('RX [deg]')
#for label in ax1.get_xticklabels():
# label.set_color('r')
if extract_events_threshold is not None:
self._draw_rectangles(ax1,windows,range_deg)
self._draw_events(ax1, t, range_deg, events)
ax1 = fig2.add_subplot(312)
ax1.plot(t, rad_to_deg(self._ry), line_color)
ax1.grid(True)
ax1.set_ylim( range_deg )
ax1.set_ylabel('RY [deg]')
#for label in ax1.get_xticklabels():
# label.set_color('r')
if extract_events_threshold is not None:
self._draw_rectangles(ax1,windows,range_deg)
self._draw_events(ax1, t, range_deg, events)
ax1 = fig2.add_subplot(313)
ax1.plot(t, rad_to_deg(self._rz), line_color)
ax1.grid(True)
ax1.set_ylim( range_deg )
ax1.set_ylabel('RZ [deg]')
#for label in ax1.get_xticklabels():
# label.set_color('r')
if extract_events_threshold is not None:
self._draw_rectangles(ax1,windows,range_deg)
self._draw_events(ax1, t, range_deg, events)
# if save_to_file is not None:
# pylab.savefig(save_to_file)
else:
fig1=None
fig2=None
if display_dicom_comments:
fig3 = pylab.figure(3)
#mr = numpy.min([self._rx_comm,self._ry_comm,self._rz_comm])
#Mr = numpy.max([self._rx_comm,self._ry_comm,self._rz_comm])
#mt = numpy.min([self._tx_comm,self._ty_comm,self._tz_comm])
#Mt = numpy.max([self._tx_comm,self._ty_comm,self._tz_comm])
mr = range_deg[0]
Mr = range_deg[1]
mt = range_mm[0]
Mt = range_mm[1]
ax1 = fig3.add_subplot(311)
ax1.set_title("Rotation vs. vNAV frame number")
ax1.plot(t, rad_to_deg(self._rx_comm), line_color)
ax1.grid(True)
ax1.set_ylim( (mr,Mr) )
ax1.set_ylabel('RX [deg]')
#for label in ax1.get_xticklabels():
# label.set_color('r')
#print windows
if extract_events_threshold is not None:
self._draw_rectangles(ax1,windows,range_deg)
self._draw_events(ax1, t, range_deg, events)
ax1 = fig3.add_subplot(312)
# ax1.set_title("Rotation comments Y")
ax1.plot(t, rad_to_deg(self._ry_comm), line_color)
ax1.grid(True)
ax1.set_ylim( (mr,Mr) )
ax1.set_ylabel('RY [deg]')
#for label in ax1.get_xticklabels():
# label.set_color('r')
if extract_events_threshold is not None:
self._draw_rectangles(ax1,windows,range_deg)
self._draw_events(ax1, t, range_deg, events)
ax1 = fig3.add_subplot(313)
# ax1.set_title("Rotation comments Z")
ax1.plot(t, rad_to_deg(self._rz_comm), line_color)
ax1.grid(True)
ax1.set_ylim( (mr,Mr) )
ax1.set_ylabel('RZ [deg]')
#for label in ax1.get_xticklabels():
# label.set_color('r')
if extract_events_threshold is not None:
self._draw_rectangles(ax1,windows,range_deg)
self._draw_events(ax1, t, range_deg, events)
# if save_to_file is not None:
# pylab.savefig(save_to_file)
fig4 = pylab.figure(4)
ax1 = fig4.add_subplot(311)
ax1.set_title("Translation vs. vNAV frame number")
ax1.plot(t, self._tx_comm, line_color)
ax1.grid(True)
ax1.set_ylim( (mt,Mt) )
ax1.set_ylabel('TX [mm]')
#for label in ax1.get_xticklabels():
# label.set_color('r')
if extract_events_threshold is not None:
self._draw_rectangles(ax1,windows,range_mm)
self._draw_events(ax1, t, range_mm, events)
ax1 = fig4.add_subplot(312)
# ax1.set_title("Translation comments Y")
ax1.plot(t, self._ty_comm, line_color)
ax1.grid(True)
ax1.set_ylim( (mt,Mt) )
ax1.set_ylabel('TY [mm]')
#for label in ax1.get_xticklabels():
# label.set_color('r')
if extract_events_threshold is not None:
self._draw_rectangles(ax1,windows,range_mm)
self._draw_events(ax1, t, range_mm, events)
ax1 = fig4.add_subplot(313)
# ax1.set_title("Translation comments Z")
ax1.plot(t, self._tz_comm, line_color)
ax1.grid(True)
ax1.set_ylim( (mt,Mt) )
ax1.set_ylabel('TZ [mm]')
#for label in ax1.get_xticklabels():
# label.set_color('r')
if extract_events_threshold is not None:
self._draw_rectangles(ax1,windows,range_mm)
self._draw_events(ax1, t, range_mm, events)
# if save_to_file is not None:
# pylab.savefig(save_to_file)
else:
fig3=None
fig4=None
pylab.show()
return fig1,fig2,fig3,fig4
def get_mean_displacement(self, index, method='box', box_min=BOX_MIN, box_max=BOX_MAX):
mat = self.get_motion_affine(index)
mat = Transform_Affine(mat)
if method=='box':
b = box_min
B = box_max
corners = numpy.asarray([ [b[0],b[1],b[2],1], [B[0],b[1],b[2],1], [b[0],B[1],b[2],1], [B[0],B[1],b[2],1], [b[0],b[1],B[2],1], [B[0],b[1],B[2],1], [b[0],B[1],B[2],1], [B[0],B[1],B[2],1] ]).transpose()
corners_t = mat.left_multiply(corners)
corners_t[3,:]=0
corners[3,:] =0
#print "CORNERS: ", corners_t
#dist = numpy.sqrt(((corners-corners_t)**2).sum(0))
dist = (corners-corners_t).sum(0)
mean_displ = numpy.mean(dist)
else:
raise "Method to compute mean displacement is unknown. "
return mean_displ
def get_mean_displacement_variation(self, index, method='box', box_min=BOX_MIN, box_max=BOX_MAX):
mat = self.get_motion_affine(index)
if index > 0:
mat0 = self.get_motion_affine(index-1)
else:
mat0 = tr.identity_matrix()
mat = Transform_Affine(mat)
mat0 = Transform_Affine(mat0)
if method=='box':
b = box_min
B = box_max
corners = numpy.asarray([ [b[0],b[1],b[2],1], [B[0],b[1],b[2],1], [b[0],B[1],b[2],1], [B[0],B[1],b[2],1], [b[0],b[1],B[2],1], [B[0],b[1],B[2],1], [b[0],B[1],B[2],1], [B[0],B[1],B[2],1] ]).transpose()
corners_t = mat.left_multiply(corners)
corners_t[3,:]=0
corners_t0 = mat0.left_multiply(corners)
corners_t0[3,:]=0
dist = numpy.sqrt(((corners_t-corners_t0)**2).sum(0))
#dist = (corners-corners_t).sum(0)
mean_displ = numpy.mean(dist)
else:
raise "Method to compute mean displacement is unknown. "
return mean_displ
def get_mean_displacement_variation_since_time(self, index_new, index_old, method='box', box_min=BOX_MIN, box_max=BOX_MAX):
mat = self.get_motion_affine(index_new)
mat0 = self.get_motion_affine(index_old)
mat = Transform_Affine(mat)
mat0 = Transform_Affine(mat0)
if method=='box':
b = box_min
B = box_max
corners = numpy.asarray([ [b[0],b[1],b[2],1], [B[0],b[1],b[2],1], [b[0],B[1],b[2],1], [B[0],B[1],b[2],1], [b[0],b[1],B[2],1], [B[0],b[1],B[2],1], [b[0],B[1],B[2],1], [B[0],B[1],B[2],1] ]).transpose()
corners_t = mat.left_multiply(corners)
corners_t[3,:]=0
corners_t0 = mat0.left_multiply(corners)
corners_t0[3,:]=0
dist = numpy.sqrt(((corners_t-corners_t0)**2).sum(0))
#dist = (corners-corners_t).sum(0)
mean_displ = numpy.mean(dist)
else:
raise "Method to compute mean displacement is unknown. "
return mean_displ
def extract_motion_events(self, method='box', threshold=THRESHOLD_MM, box_min=BOX_MIN, box_max=BOX_MAX):
t = range(self.get_n_time_points())
is_event = numpy.zeros(len(t)-1)
t_index_old = 0
for t_index in t[1:]:
# mean_displ = self.get_mean_displacement_variation(t_index, method, box_min, box_max )
# if numpy.sqrt((mean_displ)**2) >= threshold:
mean_displ = self.get_mean_displacement_variation_since_time(t_index, t_index_old, method, box_min, box_max )
if numpy.sqrt((mean_displ)**2) >= threshold:
t_index_old = numpy.copy(t_index)
is_event[t_index-1] = 1
else:
is_event[t_index-1] = 0
return is_event
def plot_mean_displacement(self, method='box', box_min=BOX_MIN,box_max=BOX_MAX, save_to_file=None, plot_zero=False, extract_events_threshold=THRESHOLD_MM, plot_range=[None,None], line_color=LINE_COLOR ):
t = range(self.get_n_time_points())
mean_displ = numpy.zeros(len(t))
mean_displ_var = numpy.zeros(len(t))
mean_displ_var_since_event = numpy.zeros(len(t))
if extract_events_threshold is not None:
events = self.extract_motion_events(method, extract_events_threshold, box_min, box_max)
t_index_old = 0
for t_index in t:
mean_displ[t_index] = self.get_mean_displacement(t_index, method, box_min, box_max )
mean_displ_var[t_index] = self.get_mean_displacement_variation(t_index, method, box_min, box_max )
mean_displ_var_since_event[t_index] = self.get_mean_displacement_variation_since_time(t_index, t_index_old, method, box_min, box_max )
if t_index:
if events[t_index-1] == 1:
t_index_old = t_index-1
if not plot_zero:
t = t[1:]
mean_displ = mean_displ[1:]
mean_displ_var = mean_displ_var[1:]
mean_displ_var_since_event = mean_displ_var_since_event[1:]
# mean_displ[numpy.where(mean_displ==0)]=-1000
# mean_displ_var[numpy.where(mean_displ_var==0)]=-1000
# mean_displ_var_since_event[numpy.where(mean_displ_var_since_event==0)]=-1000
fig = pylab.figure(5)
ax1 = fig.add_subplot(311)
ax1.set_title("Mean displacement [mm]")
ax1.plot(t,mean_displ, line_color)
ax1.grid(True)
if plot_range[0] is None:
plot_range[0]=mean_displ.min()
if plot_range[1] is None:
plot_range[1]=mean_displ.max()
ax1.set_ylim( plot_range )
ax1.set_ylabel('disp [mm]')
if extract_events_threshold is not None:
ax1.hold(1)
E = events*mean_displ
E[numpy.where(E<0.1)]=-1000
ax1.plot(t,E,'r.')
#ax1.plot(t,0.5*(events*(mean_displ.max()-mean_displ.min())+mean_displ.min()),'r.')
#for label in ax1.get_xticklabels():
# label.set_color('r')
ax1 = fig.add_subplot(312)
ax1.set_title("Mean displacement delta ")
ax1.plot(t,mean_displ_var, line_color)
ax1.grid(True)
if plot_range[0] is None:
plot_range[0]=mean_displ_var.min()
if plot_range[1] is None:
plot_range[1]=mean_displ_var.max()
ax1.set_ylim( plot_range )
ax1.set_ylabel('delta [mm]')
if extract_events_threshold is not None:
ax1.hold(1)
E = events*mean_displ_var
E[numpy.where(E<0.1)]=-1000
ax1.plot(t,E,'r.')
#ax1.plot(t,0.5*(events*(mean_displ.max()-mean_displ.min())+mean_displ.min()),'r.')
#for label in ax1.get_xticklabels():
# label.set_color('r')
ax1 = fig.add_subplot(313)
ax1.set_title("Mean displacement event")
ax1.plot(t,mean_displ_var_since_event, line_color)
ax1.grid(True)
if plot_range[0] is None:
plot_range[0]=mean_displ_var_since_event.min()
if plot_range[1] is None:
plot_range[1]=mean_displ_var_since_event.max()
ax1.set_ylim( plot_range )
ax1.set_ylabel('event [mm]')
if extract_events_threshold is not None:
ax1.hold(1)
E = events*mean_displ_var_since_event
E[numpy.where(E<0.1)]=-1000
ax1.plot(t,E,'r.')
#ax1.plot(t,0.5*(events*(mean_displ.max()-mean_displ.min())+mean_displ.min()),'r.')
#for label in ax1.get_xticklabels():
# label.set_color('r')
if save_to_file is not None:
pylab.savefig(save_to_file)
pylab.show()
return fig
def plot_quaternion(self, save_to_file=None, plot_range=[None,None], line_color=LINE_COLOR):
t = range(self.get_n_time_points())[1:]
s = rad_to_deg(numpy.asarray(self._a0_comm))[1:]
fig = pylab.figure(6)
ax1 = fig.add_subplot(211)
ax1.set_title("Rotation agnle [deg] vs. vNAV frame number")
ax1.plot(t,s, line_color)
ax1.grid(True)
if plot_range[0] is None:
plot_range[0]=s.min()
if plot_range[1] is None:
plot_range[1]=s.max()
ax1.set_ylim( plot_range )
ax1.set_ylabel('Rotation angle [deg]')
#for label in ax1.get_xticklabels():
# label.set_color('r')
arc = numpy.zeros(self.get_n_time_points())
v0 = numpy.asarray( self._a1_comm[0],self._a2_comm[0],self._a3_comm[0] )
for t in range(self.get_n_time_points()):
vt = numpy.asarray( self._a1_comm[t],self._a2_comm[t],self._a3_comm[t] )
arc[t] = numpy.dot( numpy.transpose(v0), vt )
ax1 = fig.add_subplot(212)
ax1.set_title("Arc vs. vNAV frame number")
t = range(self.get_n_time_points())
ax1.plot(t,arc, line_color)
ax1.grid(True)
plot_range[0]=arc.min()-0.0001
plot_range[1]=arc.max()+0.0001
ax1.set_ylim( plot_range )
ax1.set_ylabel('Arc [steradians]')
#for label in ax1.get_xticklabels():
# label.set_color('r')
if save_to_file is not None:
pylab.savefig(save_to_file)
pylab.show()
return fig
def _repr_html_(self):
self.plot_mean_displacement()
def load_vnav_mprage(path, from_dicom_comments=True, files_start_with=None, files_end_with=None):
return vNAV_MPRage(path, from_dicom_comments, files_start_with, files_end_with)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.