code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
# AUTOMATICALLY GENERATED FILE
# DO NOT EDIT
# Generated from outputs.png
icon_outputs = b'iVBORw0KGgoAAAANSUhEUgAAADAAAAAwCAYAAABXAvmHAAAA'\
b'BHNCSVQICAgIfAhkiAAAAAlwSFlzAAAN1wAADdcBQiibeAAA'\
b'CuNJREFUaIHVmltsXcd1hr+Z2ftceZVoi4xFS6KdSlHkooGr'\
b'JgpyqWQEKFwkqNvaemrhINVDCxfpS4HW7YP8UD+5ShEbiuJL'\
b'0LhGW0Bx/ZDGdYwiiA2bVS624jqOqBspUbLEm07Ec9vXWdOH'\
b'fXgkUhR5KJkPXcAAm+SamfXPWutfa8+mYhX57ksvTez6xCe2'\
b'Gt+jq9xFIZ/H8zy01hhjaDabvPXWW+zbtw+tNSJCkqaEYUi9'\
b'XqNarXH6zJnxr331q/esttetiLeawq9/Xdm6cWCAOI4xxuD5'\
b'PlopcrkcuVyORqPB5OQk5XIZAGstJk1J0pT5apVfnTjBlUpl'\
b'ZD2MB9CrKVQqFcbPnmViYoKJ8XFOjo1Rq9dwOABEhCNHjuCc'\
b'wzmHOIdYi01T0tQizq2X7UAHHujt7WPrtq3EcUIulyNJEur1'\
b'Ohs2bGjr+L7fflaLZq+v8dCBB8QJIq59ws4JWpsV5zho6QLr'\
b'7IHVAYi0jadlj9aLp8Ub77zJbNcGs16yaghdO/lrQ6nFgdLY'\
b'vuvGedz88Dc9dPjOgip+1hil6mEyOvP9A9O3ZD1r9MDCWOoB'\
b'EzSvGe5cZnkWPzekweaH/7FYcubAx4e7vrNrpP/ZjWX9xJav'\
b'HP7UrQLoyAPipM06ywHo/uDdZec5R3vegjRrVb/buG0Pfu7+'\
b'/vs+OcybPzn7tXd/OXkfX/7mM82q/v7sG4/V1wKgcw9cl8h6'\
b'SQipOF5s/DJPbamADatGa0H5Pl/Y8xveV76067MPfGbLoTs3'\
b'qX/Y/OVn7l0LgM5yQDJ+X0hGpZfH7ZY8L0TRIh17WdmwbIKg'\
b'SdAMERyDg/383t5dg73dxT8/fmJqq/8Hh5+bODX9Or96YvHJ'\
b'3CqABS8sWLM0hNpAr/2wSH+ppFFVh0FAEEbXwkwrfvu3tvob'\
b'+sq//4ux6W3lrq6R2e0v/vv0K386s5J9q4YQLITRNRBLWQgg'\
b'jmNmpqex1q5YviSdVjap6yAICIKYMIwIg5gwiEmsZXhzv/ni'\
b'79x936fv23Tw7g25x0ceevFGirtOVvUAgBW7mFiW4UfnHKVy'\
b'mUYQ8/LrP+fyzDzVepWr81XCIOTjD3z9nyQNsHGUS+Lg/iiK'\
b'iOKk7SSHa4ern/P4ze0b+3tK3l+cmLj6KW//dw+dmr/jNV57'\
b'MLolACLSYsQWE4ncoFMoFMgXCpw4fZE33znN5uEhujYOUto4'\
b'hHOOLTvk6yIWsYITy+DQx4jj9DoAZF4WyfJNhM2DJd/3+EJ3'\
b'ybvLG6/srj78z9+6ePTRD9cMwFqL1jpjIlixQXNOEBHK3SVy'\
b'+QKe52N8D6M9jDEordFaoxQkseAU7ZxZaFXEZmuICN1lzY4t'\
b'pRHl7F9fqoTbcg9/54XxmfNv8sYTaecesAKoRR3oTQHgCMOI'\
b'ZjPCYRAB48AYhRHQBrQGpTQKAbVQ96RF1RkAay1WLNam2NSq'\
b'uwf9XLngHskpu924Lc8FDz939OLRA5UOc0BQWrcZQ8TeXNlB'\
b'GEU0myFK+4hoPDF4vuCcRjvXAtAiA7cQ/7RaccnacWtJbYqz'\
b'ljRNiOMIlTY9L67c74Jwq9HFe+7+wxeOdJwDGQvJTZM4Mz7L'\
b'8jAIqTdCtJfH4eGU4LQDBaalpvS1Oe33CHHYNDv9KIwJg4hm'\
b'EFCvN5ifrzJ35SqNep1mEG4UU/5Lv2dwW8c5YDzTrk4rhpA4'\
b'qtUa5yen2TAg9PQIpbIjX4BcDjzPobXAwuk7hxUhTYU0scRJ'\
b'QhjGBEFAFEUEzWb2HAYkqSAOcAKS+qRxV8cAsgR2CCsDAAjD'\
b'iLkrNVKXJ4g05QCKJSgUHJ5n8VrJrFSWV04yEGItSSokFsRp'\
b'HAqlNcYsJL4CpZ3yyxXtd79skc5CKKsDGbWtFEJZNXY4ScG5'\
b'bHPPYHwfz/fxPB/f9/A80zZoodIbEVKlsjwTh/UM2hq0NqhW'\
b'vCXWprFVp1LtH7GR/t7cf//t5VUB2DQliWNMuYtavYHYlOHh'\
b'YbTSy7YUOEFsgnVZomtt8IyH7xn8nEfO9zMAKgOw8Ma3UMFF'\
b'BCuZ4ZmOxlpLtVazjUbzB5G13557+9LrcNRCBzRqPA9rhTAM'\
b'6evrpau7m2Kx2ObpZRCglKVZvcK8Udi4SbN2hUI+R84zeEZj'\
b'jOKurdvp7t3QMljQrdMXIxgrmNbhNOpVNzN96VK1Ov9vYZR8'\
b'u3r8hTPX79ZRCDmxxHGMtNy7kgxs7OOhL32Gk2cnCaKAMJwn'\
b'asR8cH72ZSQFiY3YZPeDf3zgrg0DmxARtLXgwEhmuNaKNI25'\
b'NDkuM1Mf/rRarT1zdebKK1z+z+bS/TqjUSBXyEO7obv5fcPG'\
b'vh7+6sAfMTc3x8S5CSYvXKDeaHDwh//yCADdO/pzxdxhrfUj'\
b'uZyPtRalaAFROKAyN8X5MyevXLgw8UoQREeaY0ePt8y4QTpm'\
b'IVo8LU5WvGhY9M6gVLt1aBug8xatref75Hy/lbgOrS1pEnP+'\
b'zPv2zNj747NTlw/HNj0anP7BhzffbQ0AFqrwzUJobm4u43Rr'\
b'aTab1Gs1mvUGjUY22qI9h3LWmIyNRASlFLX5Cu+/M5qc+OXP'\
b'X6vNX30+OCevwhvparZ1XInlul5lORkYGEBECIKAsZMn6evv'\
b'p1AsUCwWl6Nd8TyDMQZrA86OvefePfbjS+fGT/5rHNSejy/9'\
b'7FQndnUMIEnSa53iKuHz7LPPEscxs3NzfHHv797IVMo4EAmC'\
b'kLmZKX729o/kg/eO/c/Uh+e/lVyeeRnOh50a3zGA1KaL7j2d'\
b'ZP1KmqakNuP7NE2xIpRKJQC6u7qWL3jKc07Z+uiPf+h++vaP'\
b'Khcnz7waBbVvplPHjwMrdIm3AUCsJQpCnHOEYciFC5MMbhpk'\
b'5J57SJOE4TBsg9m/fz8nxsbIFwpcuDB5I4gro00Z/PSrk+On'\
b'Upz93yRJXqfy3oqJelsAlFIkSUI+n0dpTblUZnjzMIVCnjiO'\
b'SZKETzYaxEnS/jYwODREpVJBxKFQSyu2tVM/+S8Lr/ER3P6u'\
b'CkBrTZIkWLFEQZNCvkAcx1kz6bLQaXz+88RRlHWVSUIUhsRx'\
b'jBWL0mrZS4CPwviOABitCaOYixcv0tvXl3WMSYzWClD09vXx'\
b'99/4RhZCaUocx9nXmUaDOIpxIhiz8m32egLIO+emjVabrAjO'\
b'SqtHD7CpxZgI43nQ4v+09WUmCAKSNMXh8DyfJI6m1gvAsr5d'\
b'It3Xja7WKG3btu1ze/bsedzzPAYGBjh27Njp0dHRvwFCoNEa'\
b'daAK1FrjI5dOWGjZzffs2SN79+59vFAoMDIyQpIkU6Ojo//x'\
b'0Zu4snREo8tJsVhM+/v7KRQK3HHHHRSLxVXL/nrILQPwfd/2'\
b'9/dTLBYZGBigXC7//wJQLpfT/v5+fN+nVCqhtV7/L3rLSCdJ'\
b'zKFDh+5VSr0D9LR/qX2czoPOgUvBRigXL/quNDQ0dG7//v0j'\
b'Sql1A6ceffTRP/F9/2MrKe3cufPeffv2/dlaF+/q6uKpp576'\
b'uzRNl+1xlFJqYGDg4pNPPvnSWtdur3H8+PFzpVJpy0pK1Wr1'\
b'lhbv6elZVadarZ7cvXv3jlvagA5ywFpLvV6nXl/Tpysge4/o'\
b'6+tbVed2xBsaGmr/n8NSSZKEq1ev4nkeU1NrL6b5fJ4dO3Ys'\
b'f/3SEpFVbglWEe/w4cMv5nK5TUv/EEWRFwRBDkBrPdTd3f3A'\
b'Whfv7e3l6aeffp6bvJC31r6tNqMjFnrsscd2OOd+AeTXsvjO'\
b'nTtnZ2dnBw8ePHh7cbKC/B8rHMqx5Ahr7gAAAABJRU5ErkJg'\
b'gg=='
| gion86/awlsim | awlsim/gui/icons/outputs.py | Python | gpl-2.0 | 5,539 |
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from webkitpy.common.net.git_cl import GitCL
from webkitpy.common.system.executive_mock import MockExecutive2
from webkitpy.common.host_mock import MockHost
class GitCLTest(unittest.TestCase):
def test_run(self):
host = MockHost()
host.executive = MockExecutive2(output='mock-output')
git_cl = GitCL(host)
output = git_cl.run(['command'])
self.assertEqual(output, 'mock-output')
self.assertEqual(host.executive.calls, [['git', 'cl', 'command']])
def test_run_with_auth(self):
host = MockHost()
host.executive = MockExecutive2(output='mock-output')
git_cl = GitCL(host, auth_refresh_token_json='token.json')
git_cl.run(['upload'])
self.assertEqual(
host.executive.calls,
[['git', 'cl', 'upload', '--auth-refresh-token-json', 'token.json']])
def test_some_commands_not_run_with_auth(self):
host = MockHost()
host.executive = MockExecutive2(output='mock-output')
git_cl = GitCL(host, auth_refresh_token_json='token.json')
git_cl.run(['issue'])
self.assertEqual(host.executive.calls, [['git', 'cl', 'issue']])
def test_get_issue_number(self):
host = MockHost()
host.executive = MockExecutive2(output='Issue number: 12345 (http://crrev.com/12345)')
git_cl = GitCL(host)
self.assertEqual(git_cl.get_issue_number(), '12345')
def test_get_issue_number_none(self):
host = MockHost()
host.executive = MockExecutive2(output='Issue number: None (None)')
git_cl = GitCL(host)
self.assertEqual(git_cl.get_issue_number(), 'None')
def test_all_jobs_finished_empty(self):
self.assertTrue(GitCL.all_jobs_finished([]))
def test_all_jobs_finished_with_started_jobs(self):
self.assertFalse(GitCL.all_jobs_finished([
{
'builder_name': 'some-builder',
'status': 'COMPLETED',
'result': 'FAILURE',
},
{
'builder_name': 'some-builder',
'status': 'STARTED',
'result': None,
},
]))
def test_all_jobs_finished_only_completed_jobs(self):
self.assertTrue(GitCL.all_jobs_finished([
{
'builder_name': 'some-builder',
'status': 'COMPLETED',
'result': 'FAILURE',
},
{
'builder_name': 'some-builder',
'status': 'COMPLETED',
'result': 'SUCCESS',
},
]))
def test_has_failing_try_results_empty(self):
self.assertFalse(GitCL.has_failing_try_results([]))
def test_has_failing_try_results_only_success_and_started(self):
self.assertFalse(GitCL.has_failing_try_results([
{
'builder_name': 'some-builder',
'status': 'COMPLETED',
'result': 'SUCCESS',
},
{
'builder_name': 'some-builder',
'status': 'STARTED',
'result': None,
},
]))
def test_has_failing_try_results_with_failing_results(self):
self.assertTrue(GitCL.has_failing_try_results([
{
'builder_name': 'some-builder',
'status': 'COMPLETED',
'result': 'FAILURE',
},
]))
| geminy/aidear | oss/qt/qt-everywhere-opensource-src-5.9.0/qtwebengine/src/3rdparty/chromium/third_party/WebKit/Tools/Scripts/webkitpy/common/net/git_cl_unittest.py | Python | gpl-3.0 | 3,618 |
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
from __future__ import with_statement
__license__ = 'GPL 3'
__copyright__ = '2009, Kovid Goyal <[email protected]>'
__docformat__ = 'restructuredtext en'
from itertools import izip
from calibre.customize import Plugin as _Plugin
FONT_SIZES = [('xx-small', 1),
('x-small', None),
('small', 2),
('medium', 3),
('large', 4),
('x-large', 5),
('xx-large', 6),
(None, 7)]
class Plugin(_Plugin):
fbase = 12
fsizes = [5, 7, 9, 12, 13.5, 17, 20, 22, 24]
screen_size = (1600, 1200)
dpi = 100
def __init__(self, *args, **kwargs):
_Plugin.__init__(self, *args, **kwargs)
self.width, self.height = self.screen_size
fsizes = list(self.fsizes)
self.fkey = list(self.fsizes)
self.fsizes = []
for (name, num), size in izip(FONT_SIZES, fsizes):
self.fsizes.append((name, num, float(size)))
self.fnames = dict((name, sz) for name, _, sz in self.fsizes if name)
self.fnums = dict((num, sz) for _, num, sz in self.fsizes if num)
self.width_pts = self.width * 72./self.dpi
self.height_pts = self.height * 72./self.dpi
# Input profiles {{{
class InputProfile(Plugin):
author = 'Kovid Goyal'
supported_platforms = set(['windows', 'osx', 'linux'])
can_be_disabled = False
type = _('Input profile')
name = 'Default Input Profile'
short_name = 'default' # Used in the CLI so dont use spaces etc. in it
description = _('This profile tries to provide sane defaults and is useful '
'if you know nothing about the input document.')
class SonyReaderInput(InputProfile):
name = 'Sony Reader'
short_name = 'sony'
description = _('This profile is intended for the SONY PRS line. '
'The 500/505/600/700 etc.')
screen_size = (584, 754)
dpi = 168.451
fbase = 12
fsizes = [7.5, 9, 10, 12, 15.5, 20, 22, 24]
class SonyReader300Input(SonyReaderInput):
name = 'Sony Reader 300'
short_name = 'sony300'
description = _('This profile is intended for the SONY PRS 300.')
dpi = 200
class SonyReader900Input(SonyReaderInput):
author = 'John Schember'
name = 'Sony Reader 900'
short_name = 'sony900'
description = _('This profile is intended for the SONY PRS-900.')
screen_size = (584, 978)
class MSReaderInput(InputProfile):
name = 'Microsoft Reader'
short_name = 'msreader'
description = _('This profile is intended for the Microsoft Reader.')
screen_size = (480, 652)
dpi = 96
fbase = 13
fsizes = [10, 11, 13, 16, 18, 20, 22, 26]
class MobipocketInput(InputProfile):
name = 'Mobipocket Books'
short_name = 'mobipocket'
description = _('This profile is intended for the Mobipocket books.')
# Unfortunately MOBI books are not narrowly targeted, so this information is
# quite likely to be spurious
screen_size = (600, 800)
dpi = 96
fbase = 18
fsizes = [14, 14, 16, 18, 20, 22, 24, 26]
class HanlinV3Input(InputProfile):
name = 'Hanlin V3'
short_name = 'hanlinv3'
description = _('This profile is intended for the Hanlin V3 and its clones.')
# Screen size is a best guess
screen_size = (584, 754)
dpi = 168.451
fbase = 16
fsizes = [12, 12, 14, 16, 18, 20, 22, 24]
class HanlinV5Input(HanlinV3Input):
name = 'Hanlin V5'
short_name = 'hanlinv5'
description = _('This profile is intended for the Hanlin V5 and its clones.')
# Screen size is a best guess
screen_size = (584, 754)
dpi = 200
class CybookG3Input(InputProfile):
name = 'Cybook G3'
short_name = 'cybookg3'
description = _('This profile is intended for the Cybook G3.')
# Screen size is a best guess
screen_size = (600, 800)
dpi = 168.451
fbase = 16
fsizes = [12, 12, 14, 16, 18, 20, 22, 24]
class CybookOpusInput(InputProfile):
author = 'John Schember'
name = 'Cybook Opus'
short_name = 'cybook_opus'
description = _('This profile is intended for the Cybook Opus.')
# Screen size is a best guess
screen_size = (600, 800)
dpi = 200
fbase = 16
fsizes = [12, 12, 14, 16, 18, 20, 22, 24]
class KindleInput(InputProfile):
name = 'Kindle'
short_name = 'kindle'
description = _('This profile is intended for the Amazon Kindle.')
# Screen size is a best guess
screen_size = (525, 640)
dpi = 168.451
fbase = 16
fsizes = [12, 12, 14, 16, 18, 20, 22, 24]
class IlliadInput(InputProfile):
name = 'Illiad'
short_name = 'illiad'
description = _('This profile is intended for the Irex Illiad.')
screen_size = (760, 925)
dpi = 160.0
fbase = 12
fsizes = [7.5, 9, 10, 12, 15.5, 20, 22, 24]
class IRexDR1000Input(InputProfile):
author = 'John Schember'
name = 'IRex Digital Reader 1000'
short_name = 'irexdr1000'
description = _('This profile is intended for the IRex Digital Reader 1000.')
# Screen size is a best guess
screen_size = (1024, 1280)
dpi = 160
fbase = 16
fsizes = [12, 14, 16, 18, 20, 22, 24]
class IRexDR800Input(InputProfile):
author = 'Eric Cronin'
name = 'IRex Digital Reader 800'
short_name = 'irexdr800'
description = _('This profile is intended for the IRex Digital Reader 800.')
screen_size = (768, 1024)
dpi = 160
fbase = 16
fsizes = [12, 14, 16, 18, 20, 22, 24]
class NookInput(InputProfile):
author = 'John Schember'
name = 'Nook'
short_name = 'nook'
description = _('This profile is intended for the B&N Nook.')
# Screen size is a best guess
screen_size = (600, 800)
dpi = 167
fbase = 16
fsizes = [12, 12, 14, 16, 18, 20, 22, 24]
input_profiles = [InputProfile, SonyReaderInput, SonyReader300Input,
SonyReader900Input, MSReaderInput, MobipocketInput, HanlinV3Input,
HanlinV5Input, CybookG3Input, CybookOpusInput, KindleInput, IlliadInput,
IRexDR1000Input, IRexDR800Input, NookInput]
input_profiles.sort(cmp=lambda x,y:cmp(x.name.lower(), y.name.lower()))
# }}}
class OutputProfile(Plugin):
author = 'Kovid Goyal'
supported_platforms = set(['windows', 'osx', 'linux'])
can_be_disabled = False
type = _('Output profile')
name = 'Default Output Profile'
short_name = 'default' # Used in the CLI so dont use spaces etc. in it
description = _('This profile tries to provide sane defaults and is useful '
'if you want to produce a document intended to be read at a '
'computer or on a range of devices.')
#: The image size for comics
comic_screen_size = (584, 754)
#: If True the MOBI renderer on the device supports MOBI indexing
supports_mobi_indexing = False
#: If True output should be optimized for a touchscreen interface
touchscreen = False
touchscreen_news_css = ''
#: A list of extra (beyond CSS 2.1) modules supported by the device
#: Format is a cssutils profile dictionary (see iPad for example)
extra_css_modules = []
#: If True, the date is appended to the title of downloaded news
periodical_date_in_title = True
#: Characters used in jackets and catalogs
ratings_char = u'*'
empty_ratings_char = u' '
#: Unsupported unicode characters to be replaced during preprocessing
unsupported_unicode_chars = []
#: Number of ems that the left margin of a blockquote is rendered as
mobi_ems_per_blockquote = 1.0
#: Special periodical formatting needed in EPUB
epub_periodical_format = None
class iPadOutput(OutputProfile):
name = 'iPad'
short_name = 'ipad'
description = _('Intended for the iPad and similar devices with a '
'resolution of 768x1024')
screen_size = (768, 1024)
comic_screen_size = (768, 1024)
dpi = 132.0
extra_css_modules = [
{
'name':'webkit',
'props': {'-webkit-border-bottom-left-radius':'{length}',
'-webkit-border-bottom-right-radius':'{length}',
'-webkit-border-top-left-radius':'{length}',
'-webkit-border-top-right-radius':'{length}',
'-webkit-border-radius': r'{border-width}(\s+{border-width}){0,3}|inherit',
},
'macros': {'border-width': '{length}|medium|thick|thin'}
}
]
ratings_char = u'\u2605' # filled star
empty_ratings_char = u'\u2606' # hollow star
touchscreen = True
# touchscreen_news_css {{{
touchscreen_news_css = u'''
/* hr used in articles */
.article_articles_list {
width:18%;
}
.article_link {
color: #593f29;
font-style: italic;
}
.article_next {
-webkit-border-top-right-radius:4px;
-webkit-border-bottom-right-radius:4px;
font-style: italic;
width:32%;
}
.article_prev {
-webkit-border-top-left-radius:4px;
-webkit-border-bottom-left-radius:4px;
font-style: italic;
width:32%;
}
.article_sections_list {
width:18%;
}
.articles_link {
font-weight: bold;
}
.sections_link {
font-weight: bold;
}
.caption_divider {
border:#ccc 1px solid;
}
.touchscreen_navbar {
background:#c3bab2;
border:#ccc 0px solid;
border-collapse:separate;
border-spacing:1px;
margin-left: 5%;
margin-right: 5%;
page-break-inside:avoid;
width: 90%;
-webkit-border-radius:4px;
}
.touchscreen_navbar td {
background:#fff;
font-family:Helvetica;
font-size:80%;
/* UI touchboxes use 8px padding */
padding: 6px;
text-align:center;
}
.touchscreen_navbar td a:link {
color: #593f29;
text-decoration: none;
}
/* Index formatting */
.publish_date {
text-align:center;
}
.divider {
border-bottom:1em solid white;
border-top:1px solid gray;
}
hr.caption_divider {
border-color:black;
border-style:solid;
border-width:1px;
}
/* Feed summary formatting */
.article_summary {
display:inline-block;
padding-bottom:0.5em;
}
.feed {
font-family:sans-serif;
font-weight:bold;
font-size:larger;
}
.feed_link {
font-style: italic;
}
.feed_next {
-webkit-border-top-right-radius:4px;
-webkit-border-bottom-right-radius:4px;
font-style: italic;
width:40%;
}
.feed_prev {
-webkit-border-top-left-radius:4px;
-webkit-border-bottom-left-radius:4px;
font-style: italic;
width:40%;
}
.feed_title {
text-align: center;
font-size: 160%;
}
.feed_up {
font-weight: bold;
width:20%;
}
.summary_headline {
font-weight:bold;
text-align:left;
}
.summary_byline {
text-align:left;
font-family:monospace;
}
.summary_text {
text-align:left;
}
'''
# }}}
class iPad3Output(iPadOutput):
screen_size = comic_screen_size = (2048, 1536)
dpi = 264.0
name = 'iPad 3'
short_name = 'ipad3'
description = _('Intended for the iPad 3 and similar devices with a '
'resolution of 1536x2048')
class TabletOutput(iPadOutput):
name = 'Tablet'
short_name = 'tablet'
description = _('Intended for generic tablet devices, does no resizing of images')
screen_size = (10000, 10000)
comic_screen_size = (10000, 10000)
class SamsungGalaxy(TabletOutput):
name = 'Samsung Galaxy'
short_name = 'galaxy'
description = _('Intended for the Samsung Galaxy and similar tablet devices with '
'a resolution of 600x1280')
screen_size = comic_screen_size = (600, 1280)
class NookHD(TabletOutput):
name = 'Nook HD+'
short_name = 'nook_hd_plus'
description = _('Intended for the Nook HD+ and similar tablet devices with '
'a resolution of 1280x1920')
screen_size = comic_screen_size = (1280, 1920)
class SonyReaderOutput(OutputProfile):
name = 'Sony Reader'
short_name = 'sony'
description = _('This profile is intended for the SONY PRS line. '
'The 500/505/600/700 etc.')
screen_size = (590, 775)
dpi = 168.451
fbase = 12
fsizes = [7.5, 9, 10, 12, 15.5, 20, 22, 24]
unsupported_unicode_chars = [u'\u201f', u'\u201b']
epub_periodical_format = 'sony'
# periodical_date_in_title = False
class KoboReaderOutput(OutputProfile):
name = 'Kobo Reader'
short_name = 'kobo'
description = _('This profile is intended for the Kobo Reader.')
screen_size = (536, 710)
comic_screen_size = (536, 710)
dpi = 168.451
fbase = 12
fsizes = [7.5, 9, 10, 12, 15.5, 20, 22, 24]
class SonyReader300Output(SonyReaderOutput):
author = 'John Schember'
name = 'Sony Reader 300'
short_name = 'sony300'
description = _('This profile is intended for the SONY PRS-300.')
dpi = 200
class SonyReader900Output(SonyReaderOutput):
author = 'John Schember'
name = 'Sony Reader 900'
short_name = 'sony900'
description = _('This profile is intended for the SONY PRS-900.')
screen_size = (600, 999)
comic_screen_size = screen_size
class SonyReaderT3Output(SonyReaderOutput):
author = 'Kovid Goyal'
name = 'Sony Reader T3'
short_name = 'sonyt3'
description = _('This profile is intended for the SONY PRS-T3.')
screen_size = (758, 934)
comic_screen_size = screen_size
class GenericEink(SonyReaderOutput):
name = 'Generic e-ink'
short_name = 'generic_eink'
description = _('Suitable for use with any e-ink device')
epub_periodical_format = None
class GenericEinkLarge(GenericEink):
name = 'Generic e-ink large'
short_name = 'generic_eink_large'
description = _('Suitable for use with any large screen e-ink device')
screen_size = (600, 999)
comic_screen_size = screen_size
class GenericEinkHD(GenericEink):
name = 'Generic e-ink HD'
short_name = 'generic_eink_hd'
description = _('Suitable for use with any modern high resolution e-ink device')
screen_size = (10000, 10000)
comic_screen_size = (10000, 10000)
class JetBook5Output(OutputProfile):
name = 'JetBook 5-inch'
short_name = 'jetbook5'
description = _('This profile is intended for the 5-inch JetBook.')
screen_size = (480, 640)
dpi = 168.451
class SonyReaderLandscapeOutput(SonyReaderOutput):
name = 'Sony Reader Landscape'
short_name = 'sony-landscape'
description = _('This profile is intended for the SONY PRS line. '
'The 500/505/700 etc, in landscape mode. Mainly useful '
'for comics.')
screen_size = (784, 1012)
comic_screen_size = (784, 1012)
class MSReaderOutput(OutputProfile):
name = 'Microsoft Reader'
short_name = 'msreader'
description = _('This profile is intended for the Microsoft Reader.')
screen_size = (480, 652)
dpi = 96
fbase = 13
fsizes = [10, 11, 13, 16, 18, 20, 22, 26]
class MobipocketOutput(OutputProfile):
name = 'Mobipocket Books'
short_name = 'mobipocket'
description = _('This profile is intended for the Mobipocket books.')
# Unfortunately MOBI books are not narrowly targeted, so this information is
# quite likely to be spurious
screen_size = (600, 800)
dpi = 96
fbase = 18
fsizes = [14, 14, 16, 18, 20, 22, 24, 26]
class HanlinV3Output(OutputProfile):
name = 'Hanlin V3'
short_name = 'hanlinv3'
description = _('This profile is intended for the Hanlin V3 and its clones.')
# Screen size is a best guess
screen_size = (584, 754)
dpi = 168.451
fbase = 16
fsizes = [12, 12, 14, 16, 18, 20, 22, 24]
class HanlinV5Output(HanlinV3Output):
name = 'Hanlin V5'
short_name = 'hanlinv5'
description = _('This profile is intended for the Hanlin V5 and its clones.')
dpi = 200
class CybookG3Output(OutputProfile):
name = 'Cybook G3'
short_name = 'cybookg3'
description = _('This profile is intended for the Cybook G3.')
# Screen size is a best guess
screen_size = (600, 800)
comic_screen_size = (600, 757)
dpi = 168.451
fbase = 16
fsizes = [12, 12, 14, 16, 18, 20, 22, 24]
class CybookOpusOutput(SonyReaderOutput):
author = 'John Schember'
name = 'Cybook Opus'
short_name = 'cybook_opus'
description = _('This profile is intended for the Cybook Opus.')
# Screen size is a best guess
dpi = 200
fbase = 16
fsizes = [12, 12, 14, 16, 18, 20, 22, 24]
epub_periodical_format = None
class KindleOutput(OutputProfile):
name = 'Kindle'
short_name = 'kindle'
description = _('This profile is intended for the Amazon Kindle.')
# Screen size is a best guess
screen_size = (525, 640)
dpi = 168.451
fbase = 16
fsizes = [12, 12, 14, 16, 18, 20, 22, 24]
supports_mobi_indexing = True
periodical_date_in_title = False
empty_ratings_char = u'\u2606'
ratings_char = u'\u2605'
mobi_ems_per_blockquote = 2.0
class KindleDXOutput(OutputProfile):
name = 'Kindle DX'
short_name = 'kindle_dx'
description = _('This profile is intended for the Amazon Kindle DX.')
# Screen size is a best guess
screen_size = (744, 1022)
dpi = 150.0
comic_screen_size = (771, 1116)
# comic_screen_size = (741, 1022)
supports_mobi_indexing = True
periodical_date_in_title = False
empty_ratings_char = u'\u2606'
ratings_char = u'\u2605'
mobi_ems_per_blockquote = 2.0
class KindlePaperWhiteOutput(KindleOutput):
name = 'Kindle PaperWhite'
short_name = 'kindle_pw'
description = _('This profile is intended for the Amazon Kindle PaperWhite 1 and 2')
# Screen size is a best guess
screen_size = (658, 940)
dpi = 212.0
comic_screen_size = screen_size
class KindleVoyageOutput(KindleOutput):
name = 'Kindle Voyage'
short_name = 'kindle_voyage'
description = _('This profile is intended for the Amazon Kindle Voyage')
# Screen size is currently just the spec size, actual renderable area will
# depend on someone with the device doing tests.
screen_size = (1080, 1430)
dpi = 300.0
comic_screen_size = screen_size
class KindlePaperWhite3Output(KindleVoyageOutput):
name = 'Kindle PaperWhite 3'
short_name = 'kindle_pw3'
description = _('This profile is intended for the Amazon Kindle PaperWhite 3 and above')
# Screen size is currently just the spec size, actual renderable area will
# depend on someone with the device doing tests.
screen_size = (1072, 1430)
dpi = 300.0
comic_screen_size = screen_size
class KindleFireOutput(KindleDXOutput):
name = 'Kindle Fire'
short_name = 'kindle_fire'
description = _('This profile is intended for the Amazon Kindle Fire.')
screen_size = (570, 1016)
dpi = 169.0
comic_screen_size = (570, 1016)
class IlliadOutput(OutputProfile):
name = 'Illiad'
short_name = 'illiad'
description = _('This profile is intended for the Irex Illiad.')
screen_size = (760, 925)
comic_screen_size = (760, 925)
dpi = 160.0
fbase = 12
fsizes = [7.5, 9, 10, 12, 15.5, 20, 22, 24]
class IRexDR1000Output(OutputProfile):
author = 'John Schember'
name = 'IRex Digital Reader 1000'
short_name = 'irexdr1000'
description = _('This profile is intended for the IRex Digital Reader 1000.')
# Screen size is a best guess
screen_size = (1024, 1280)
comic_screen_size = (996, 1241)
dpi = 160
fbase = 16
fsizes = [12, 14, 16, 18, 20, 22, 24]
class IRexDR800Output(OutputProfile):
author = 'Eric Cronin'
name = 'IRex Digital Reader 800'
short_name = 'irexdr800'
description = _('This profile is intended for the IRex Digital Reader 800.')
# Screen size is a best guess
screen_size = (768, 1024)
comic_screen_size = (768, 1024)
dpi = 160
fbase = 16
fsizes = [12, 14, 16, 18, 20, 22, 24]
class NookOutput(OutputProfile):
author = 'John Schember'
name = 'Nook'
short_name = 'nook'
description = _('This profile is intended for the B&N Nook.')
# Screen size is a best guess
screen_size = (600, 730)
comic_screen_size = (584, 730)
dpi = 167
fbase = 16
fsizes = [12, 12, 14, 16, 18, 20, 22, 24]
class NookColorOutput(NookOutput):
name = 'Nook Color'
short_name = 'nook_color'
description = _('This profile is intended for the B&N Nook Color.')
screen_size = (600, 900)
comic_screen_size = (594, 900)
dpi = 169
class PocketBook900Output(OutputProfile):
author = 'Chris Lockfort'
name = 'PocketBook Pro 900'
short_name = 'pocketbook_900'
description = _('This profile is intended for the PocketBook Pro 900 series of devices.')
screen_size = (810, 1180)
dpi = 150.0
comic_screen_size = screen_size
class PocketBookPro912Output(OutputProfile):
author = 'Daniele Pizzolli'
name = 'PocketBook Pro 912'
short_name = 'pocketbook_pro_912'
description = _('This profile is intended for the PocketBook Pro 912 series of devices.')
# According to http://download.pocketbook-int.com/user-guides/E_Ink/912/User_Guide_PocketBook_912(EN).pdf
screen_size = (825, 1200)
dpi = 155.0
comic_screen_size = screen_size
output_profiles = [
OutputProfile, SonyReaderOutput, SonyReader300Output, SonyReader900Output,
SonyReaderT3Output, MSReaderOutput, MobipocketOutput, HanlinV3Output,
HanlinV5Output, CybookG3Output, CybookOpusOutput, KindleOutput, iPadOutput,
iPad3Output, KoboReaderOutput, TabletOutput, SamsungGalaxy,
SonyReaderLandscapeOutput, KindleDXOutput, IlliadOutput, NookHD,
IRexDR1000Output, IRexDR800Output, JetBook5Output, NookOutput,
NookColorOutput, PocketBook900Output,
PocketBookPro912Output, GenericEink, GenericEinkLarge, GenericEinkHD,
KindleFireOutput, KindlePaperWhiteOutput, KindleVoyageOutput,
KindlePaperWhite3Output
]
output_profiles.sort(cmp=lambda x,y:cmp(x.name.lower(), y.name.lower()))
| hazrpg/calibre | src/calibre/customize/profiles.py | Python | gpl-3.0 | 26,074 |
# -*- coding: iso-8859-1 -*-
# Copyright (C) 2011 Daniele Simonetti
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
class MasteryAbility(object):
@staticmethod
def build_from_xml(elem):
f = MasteryAbility()
f.rank = int(elem.attrib['rank'])
f.rule = elem.attrib['rule'] if ('rule' in elem.attrib) else None
f.desc = elem.text
return f
class SkillCateg(object):
@staticmethod
def build_from_xml(elem):
f = SkillCateg()
f.id = elem.attrib['id']
f.name = elem.text
return f
def __str__(self):
return self.name
def __unicode__(self):
return self.name
def __eq__(self, obj):
return obj and obj.id == self.id
def __ne__(self, obj):
return not self.__eq__(obj)
def __hash__(self):
return self.id.__hash__()
class Skill(object):
@staticmethod
def build_from_xml(elem):
f = Skill()
f.name = elem.attrib['name']
f.id = elem.attrib['id']
f.trait = elem.attrib['trait']
f.type = elem.attrib['type']
f.tags = [f.type]
if elem.find('Tags'):
for se in elem.find('Tags').iter():
if se.tag == 'Tag':
f.tags.append(se.text)
f.mastery_abilities = []
if elem.find('MasteryAbilities'):
for se in elem.find('MasteryAbilities').iter():
if se.tag == 'MasteryAbility':
f.mastery_abilities.append(MasteryAbility.build_from_xml(se))
return f
def __str__(self):
return self.name or self.id
def __unicode__(self):
return self.name
def __eq__(self, obj):
return obj and obj.id == self.id
def __ne__(self, obj):
return not self.__eq__(obj)
def __hash__(self):
return self.id.__hash__()
| tectronics/l5rcm | dal/skill.py | Python | gpl-3.0 | 2,619 |
'''
*** SHED SKIN Python-to-C++ Compiler ***
Copyright 2005-2013 Mark Dufour; License GNU GPL version 3 (See LICENSE)
graph.py: build constraint graph used in dataflow analysis
constraint graph: graph along which possible types 'flow' during an 'abstract execution' of a program (a dataflow analysis). consider the assignment statement 'a = b'. it follows that the set of possible types of b is smaller than or equal to that of a (a constraint). we can determine possible types of a, by 'flowing' the types from b to a, in other words, along the constraint.
constraint graph nodes are stored in gx.cnode, and the set of types of for each node in gx.types. nodes are identified by an AST Node, and two integers. the integers are used in py to duplicate parts of the constraint graph along two dimensions. in the initial constraint graph, these integers are always 0.
class ModuleVisitor: inherits visitor pattern from compiler.visitor.ASTVisitor, to recursively generate constraints for each syntactical Python construct. for example, the visitFor method is called in case of a for-loop. temporary variables are introduced in many places, to enable translation to a lower-level language.
parse_module(): locate module by name (e.g. 'os.path'), and use ModuleVisitor if not cached
'''
import copy
import os
import re
import sys
from compiler.ast import Const, AssTuple, AssList, From, Add, ListCompFor, \
UnaryAdd, Import, Bitand, Stmt, Assign, FloorDiv, Not, Mod, AssAttr, \
Keyword, GenExpr, LeftShift, AssName, Div, Or, Lambda, And, CallFunc, \
Global, Slice, RightShift, Sub, Getattr, Dict, Ellipsis, Mul, \
Subscript, Function as FunctionNode, Return, Power, Bitxor, Class as ClassNode, Name, List, \
Discard, Sliceobj, Tuple, Pass, UnarySub, Bitor, ListComp, TryExcept, With
from compiler.visitor import ASTVisitor
from error import error
from infer import inode, in_out, CNode, default_var, register_temp_var
from python import StaticClass, lookup_func, Function, is_zip2, \
lookup_class, is_method, is_literal, is_enum, lookup_var, assign_rec, \
Class, is_property_setter, is_fastfor, aug_msg, is_isinstance, \
Module, def_class, parse_file, find_module
# --- global variable mv
_mv = None
def setmv(mv):
global _mv
_mv = mv
return _mv
def getmv():
return _mv
class FakeGetattr3(Getattr):
pass
class FakeGetattr2(Getattr):
pass
class FakeGetattr(Getattr):
pass # XXX ugly
def check_redef(gx, node, s=None, onlybuiltins=False): # XXX to modvisitor, rewrite
if not getmv().module.builtin:
existing = [getmv().ext_classes, getmv().ext_funcs]
if not onlybuiltins:
existing += [getmv().classes, getmv().funcs]
for whatsit in existing:
if s is not None:
name = s
else:
name = node.name
if name in whatsit:
error("function/class redefinition is not supported", gx, node, mv=getmv())
# --- maintain inheritance relations between copied AST nodes
def inherit_rec(gx, original, copy, mv):
gx.inheritance_relations.setdefault(original, []).append(copy)
gx.inherited.add(copy)
gx.parent_nodes[copy] = original
for (a, b) in zip(original.getChildNodes(), copy.getChildNodes()):
inherit_rec(gx, a, b, mv)
def register_node(node, func):
if func:
func.registered.append(node)
def slice_nums(nodes):
nodes2 = []
x = 0
for i, n in enumerate(nodes):
if not n or (isinstance(n, Const) and n.value is None):
nodes2.append(Const(0))
else:
nodes2.append(n)
x |= (1 << i)
return [Const(x)] + nodes2
# --- module visitor; analyze program, build constraint graph
class ModuleVisitor(ASTVisitor):
def __init__(self, module, gx):
ASTVisitor.__init__(self)
self.module = module
self.gx = gx
self.classes = {}
self.funcs = {}
self.globals = {}
self.exc_names = {}
self.current_with_vars = []
self.lambdas = {}
self.imports = {}
self.fake_imports = {}
self.ext_classes = {}
self.ext_funcs = {}
self.lambdaname = {}
self.lwrapper = {}
self.tempcount = self.gx.tempcount
self.callfuncs = []
self.for_in_iters = []
self.listcomps = []
self.defaults = {}
self.importnodes = []
def dispatch(self, node, *args):
if (node, 0, 0) not in self.gx.cnode:
ASTVisitor.dispatch(self, node, *args)
def fake_func(self, node, objexpr, attrname, args, func):
if (node, 0, 0) in self.gx.cnode: # XXX
newnode = self.gx.cnode[node, 0, 0]
else:
newnode = CNode(self.gx, node, parent=func, mv=getmv())
self.gx.types[newnode] = set()
fakefunc = CallFunc(Getattr(objexpr, attrname), args)
fakefunc.lineno = objexpr.lineno
self.visit(fakefunc, func)
self.add_constraint((inode(self.gx, fakefunc), newnode), func)
inode(self.gx, objexpr).fakefunc = fakefunc
return fakefunc
# simple heuristic for initial list split: count nesting depth, first constant child type
def list_type(self, node):
count = 0
child = node
while isinstance(child, (List, ListComp)):
if not child.getChildNodes():
return None
child = child.getChildNodes()[0]
count += 1
if isinstance(child, (UnarySub, UnaryAdd)):
child = child.expr
if isinstance(child, CallFunc) and isinstance(child.node, Name):
map = {'int': int, 'str': str, 'float': float}
if child.node.name in ('range'): # ,'xrange'):
count, child = count + 1, int
elif child.node.name in map:
child = map[child.node.name]
elif child.node.name in (cl.ident for cl in self.gx.allclasses) or child.node.name in getmv().classes: # XXX getmv().classes
child = child.node.name
else:
if count == 1:
return None
child = None
elif isinstance(child, Const):
child = type(child.value)
elif isinstance(child, Name) and child.name in ('True', 'False'):
child = bool
elif isinstance(child, Tuple):
child = tuple
elif isinstance(child, Dict):
child = dict
else:
if count == 1:
return None
child = None
self.gx.list_types.setdefault((count, child), len(self.gx.list_types) + 2)
# print 'listtype', node, self.gx.list_types[count, child]
return self.gx.list_types[count, child]
def instance(self, node, cl, func=None):
if (node, 0, 0) in self.gx.cnode: # XXX to create_node() func
newnode = self.gx.cnode[node, 0, 0]
else:
newnode = CNode(self.gx, node, parent=func, mv=getmv())
newnode.constructor = True
if cl.ident in ['int_', 'float_', 'str_', 'none', 'class_', 'bool_']:
self.gx.types[newnode] = set([(cl, cl.dcpa - 1)])
else:
if cl.ident == 'list' and self.list_type(node):
self.gx.types[newnode] = set([(cl, self.list_type(node))])
else:
self.gx.types[newnode] = set([(cl, cl.dcpa)])
def constructor(self, node, classname, func):
cl = def_class(self.gx, classname)
self.instance(node, cl, func)
default_var(self.gx, 'unit', cl)
if classname in ['list', 'tuple'] and not node.nodes:
self.gx.empty_constructors.add(node) # ifa disables those that flow to instance variable assignments
# --- internally flow binary tuples
if cl.ident == 'tuple2':
default_var(self.gx, 'first', cl)
default_var(self.gx, 'second', cl)
elem0, elem1 = node.nodes
self.visit(elem0, func)
self.visit(elem1, func)
self.add_dynamic_constraint(node, elem0, 'unit', func)
self.add_dynamic_constraint(node, elem1, 'unit', func)
self.add_dynamic_constraint(node, elem0, 'first', func)
self.add_dynamic_constraint(node, elem1, 'second', func)
return
# --- add dynamic children constraints for other types
if classname == 'dict': # XXX filter children
default_var(self.gx, 'unit', cl)
default_var(self.gx, 'value', cl)
for child in node.getChildNodes():
self.visit(child, func)
for (key, value) in node.items: # XXX filter
self.add_dynamic_constraint(node, key, 'unit', func)
self.add_dynamic_constraint(node, value, 'value', func)
else:
for child in node.nodes:
self.visit(child, func)
for child in self.filter_redundant_children(node):
self.add_dynamic_constraint(node, child, 'unit', func)
# --- for compound list/tuple/dict constructors, we only consider a single child node for each subtype
def filter_redundant_children(self, node):
done = set()
nonred = []
for child in node.nodes:
type = self.child_type_rec(child)
if not type or not type in done:
done.add(type)
nonred.append(child)
return nonred
# --- determine single constructor child node type, used by the above
def child_type_rec(self, node):
if isinstance(node, (UnarySub, UnaryAdd)):
node = node.expr
if isinstance(node, (List, Tuple)):
if isinstance(node, List):
cl = def_class(self.gx, 'list')
elif len(node.nodes) == 2:
cl = def_class(self.gx, 'tuple2')
else:
cl = def_class(self.gx, 'tuple')
merged = set()
for child in node.nodes:
merged.add(self.child_type_rec(child))
if len(merged) == 1:
return (cl, merged.pop())
elif isinstance(node, Const):
return (list(inode(self.gx, node).types())[0][0],)
# --- add dynamic constraint for constructor argument, e.g. '[expr]' becomes [].__setattr__('unit', expr)
def add_dynamic_constraint(self, parent, child, varname, func):
# print 'dynamic constr', child, parent
self.gx.assign_target[child] = parent
cu = Const(varname)
self.visit(cu, func)
fakefunc = CallFunc(FakeGetattr2(parent, '__setattr__'), [cu, child])
self.visit(fakefunc, func)
fakechildnode = CNode(self.gx, (child, varname), parent=func, mv=getmv()) # create separate 'fake' CNode per child, so we can have multiple 'callfuncs'
self.gx.types[fakechildnode] = set()
self.add_constraint((inode(self.gx, parent), fakechildnode), func) # add constraint from parent to fake child node. if parent changes, all fake child nodes change, and the callfunc for each child node is triggered
fakechildnode.callfuncs.append(fakefunc)
# --- add regular constraint to function
def add_constraint(self, constraint, func):
in_out(constraint[0], constraint[1])
self.gx.constraints.add(constraint)
while isinstance(func, Function) and func.listcomp:
func = func.parent # XXX
if isinstance(func, Function):
func.constraints.add(constraint)
def struct_unpack(self, rvalue, func):
if isinstance(rvalue, CallFunc):
if isinstance(rvalue.node, Getattr) and isinstance(rvalue.node.expr, Name) and rvalue.node.expr.name == 'struct' and rvalue.node.attrname == 'unpack' and lookup_var('struct', func, mv=self).imported: # XXX imported from where?
return True
elif isinstance(rvalue.node, Name) and rvalue.node.name == 'unpack' and 'unpack' in self.ext_funcs and not lookup_var('unpack', func, mv=self): # XXX imported from where?
return True
def struct_info(self, node, func):
if isinstance(node, Name):
var = lookup_var(node.name, func, mv=self) # XXX fwd ref?
if not var or len(var.const_assign) != 1:
error('non-constant format string', self.gx, node, mv=self)
error('assuming constant format string', self.gx, node, mv=self, warning=True)
fmt = var.const_assign[0].value
elif isinstance(node, Const):
fmt = node.value
else:
error('non-constant format string', self.gx, node, mv=self)
char_type = dict(['xx', 'cs', 'bi', 'Bi', '?b', 'hi', 'Hi', 'ii', 'Ii', 'li', 'Li', 'qi', 'Qi', 'ff', 'df', 'ss', 'ps'])
ordering = '@'
if fmt and fmt[0] in '@<>!=':
ordering, fmt = fmt[0], fmt[1:]
result = []
digits = ''
for i, c in enumerate(fmt):
if c.isdigit():
digits += c
elif c in char_type:
rtype = {'i': 'int', 's': 'str', 'b': 'bool', 'f': 'float', 'x': 'pad'}[char_type[c]]
if rtype == 'str' and c != 'c':
result.append((ordering, c, 'str', int(digits or '1')))
elif digits == '0':
result.append((ordering, c, rtype, 0))
else:
result.extend(int(digits or '1') * [(ordering, c, rtype, 1)])
digits = ''
else:
error('bad or unsupported char in struct format: ' + repr(c), self.gx, node, mv=self)
digits = ''
return result
def struct_faketuple(self, info):
result = []
for o, c, t, d in info:
if d != 0 or c == 's':
if t == 'int':
result.append(Const(1))
elif t == 'str':
result.append(Const(''))
elif t == 'float':
result.append(Const(1.0))
elif t == 'bool':
result.append(Name('True'))
return Tuple(result)
def visitExec(self, node, func=None):
error("'exec' is not supported", self.gx, node, mv=getmv())
def visitGenExpr(self, node, func=None):
newnode = CNode(self.gx, node, parent=func, mv=getmv())
self.gx.types[newnode] = set()
lc = ListComp(node.code.expr, [ListCompFor(qual.assign, qual.iter, qual.ifs, qual.lineno) for qual in node.code.quals], lineno=node.lineno)
register_node(lc, func)
self.gx.genexp_to_lc[node] = lc
self.visit(lc, func)
self.add_constraint((inode(self.gx, lc), newnode), func)
def visitStmt(self, node, func=None):
comments = []
for b in node.nodes:
if isinstance(b, Discard):
self.bool_test_add(b.expr)
if isinstance(b, Discard) and isinstance(b.expr, Const) and type(b.expr.value) == str:
comments.append(b.expr.value)
elif comments:
self.gx.comments[b] = comments
comments = []
self.visit(b, func)
def visitModule(self, node):
# --- bootstrap built-in classes
if self.module.ident == 'builtin':
for dummy in self.gx.builtins:
self.visit(ClassNode(dummy, [], None, Pass()))
if self.module.ident != 'builtin':
n = From('builtin', [('*', None)], None) # Python2.5+
getmv().importnodes.append(n)
self.visit(n)
# --- __name__
if self.module.ident != 'builtin':
namevar = default_var(self.gx, '__name__', None, mv=getmv())
self.gx.types[inode(self.gx, namevar)] = set([(def_class(self.gx, 'str_'), 0)])
self.forward_references(node)
# --- visit children
for child in node.getChildNodes():
if isinstance(child, Stmt):
getmv().importnodes.extend(n for n in child.nodes if isinstance(n, (Import, From)))
self.visit(child, None)
# --- register classes
for cl in getmv().classes.values():
self.gx.allclasses.add(cl)
# --- inheritance expansion
# determine base classes
for cl in self.classes.values():
for base in cl.node.bases:
if not (isinstance(base, Name) and base.name == 'object'):
ancestor = lookup_class(base, getmv())
cl.bases.append(ancestor)
ancestor.children.append(cl)
# for each base class, duplicate methods
for cl in self.classes.values():
for ancestor in cl.ancestors_upto(None)[1:]:
cl.staticmethods.extend(ancestor.staticmethods)
cl.properties.update(ancestor.properties)
for func in ancestor.funcs.values():
if not func.node or func.inherited:
continue
ident = func.ident
if ident in cl.funcs:
ident += ancestor.ident + '__'
# deep-copy AST function nodes
func_copy = copy.deepcopy(func.node)
inherit_rec(self.gx, func.node, func_copy, func.mv)
tempmv, mv = getmv(), func.mv
setmv(mv)
self.visitFunction(func_copy, cl, inherited_from=ancestor)
mv = tempmv
setmv(mv)
# maintain relation with original
self.gx.inheritance_relations.setdefault(func, []).append(cl.funcs[ident])
cl.funcs[ident].inherited = func.node
cl.funcs[ident].inherited_from = func
func_copy.name = ident
if ident == func.ident:
cl.funcs[ident + ancestor.ident + '__'] = cl.funcs[ident]
def stmt_nodes(self, node, cl):
result = []
for child in node.getChildNodes():
if isinstance(child, Stmt):
for n in child.nodes:
if isinstance(n, cl):
result.append(n)
return result
def forward_references(self, node):
getmv().classnodes = []
# classes
for n in self.stmt_nodes(node, ClassNode):
check_redef(self.gx, n)
getmv().classnodes.append(n)
newclass = Class(self.gx, n, getmv())
self.classes[n.name] = newclass
getmv().classes[n.name] = newclass
newclass.module = self.module
newclass.parent = StaticClass(newclass, getmv())
# methods
for m in self.stmt_nodes(n, FunctionNode):
if hasattr(m, 'decorators') and m.decorators and [dec for dec in m.decorators if is_property_setter(dec)]:
m.name = m.name + '__setter__'
if m.name in newclass.funcs: # and func.ident not in ['__getattr__', '__setattr__']: # XXX
error("function/class redefinition is not allowed", self.gx, m, mv=getmv())
func = Function(self.gx, m, newclass, mv=getmv())
newclass.funcs[func.ident] = func
self.set_default_vars(m, func)
# functions
getmv().funcnodes = []
for n in self.stmt_nodes(node, FunctionNode):
check_redef(self.gx, n)
getmv().funcnodes.append(n)
func = getmv().funcs[n.name] = Function(self.gx, n, mv=getmv())
self.set_default_vars(n, func)
# global variables XXX visitGlobal
for assname in self.local_assignments(node, global_=True):
default_var(self.gx, assname.name, None, mv=getmv())
def set_default_vars(self, node, func):
globals = set(self.get_globals(node))
for assname in self.local_assignments(node):
if assname.name not in globals:
default_var(self.gx, assname.name, func)
def get_globals(self, node):
if isinstance(node, Global):
result = node.names
else:
result = []
for child in node.getChildNodes():
result.extend(self.get_globals(child))
return result
def local_assignments(self, node, global_=False):
if global_ and isinstance(node, (ClassNode, FunctionNode)):
return []
elif isinstance(node, (ListComp, GenExpr)):
return []
elif isinstance(node, AssName):
result = [node]
else:
# Try-Excepts introduce a new small scope with the exception name,
# so we skip it here.
if isinstance(node, TryExcept):
children = list(node.body.getChildNodes())
for handler in node.handlers:
children.extend(handler[2].getChildNodes())
if node.else_:
children.extend(node.else_.getChildNodes())
elif isinstance(node, With):
children = node.body.getChildNodes()
else:
children = node.getChildNodes()
result = []
for child in children:
result.extend(self.local_assignments(child, global_))
return result
def visitImport(self, node, func=None):
if not node in getmv().importnodes:
error("please place all imports (no 'try:' etc) at the top of the file", self.gx, node, mv=getmv())
for (name, pseudonym) in node.names:
if pseudonym:
# --- import a.b as c: don't import a
self.import_module(name, pseudonym, node, False)
else:
self.import_modules(name, node, False)
def import_modules(self, name, node, fake):
# --- import a.b.c: import a, then a.b, then a.b.c
split = name.split('.')
module = getmv().module
for i in range(len(split)):
subname = '.'.join(split[:i + 1])
parent = module
module = self.import_module(subname, subname, node, fake)
if module.ident not in parent.mv.imports: # XXX
if not fake:
parent.mv.imports[module.ident] = module
return module
def import_module(self, name, pseudonym, node, fake):
module = self.analyze_module(name, pseudonym, node, fake)
if not fake:
var = default_var(self.gx, pseudonym or name, None, mv=getmv())
var.imported = True
self.gx.types[inode(self.gx, var)] = set([(module, 0)])
return module
def visitFrom(self, node, parent=None):
if not node in getmv().importnodes: # XXX use (func, node) as parent..
error("please place all imports (no 'try:' etc) at the top of the file", self.gx, node, mv=getmv())
if hasattr(node, 'level') and node.level:
error("relative imports are not supported", self.gx, node, mv=getmv())
if node.modname == '__future__':
for name, _ in node.names:
if name not in ['with_statement', 'print_function']:
error("future '%s' is not yet supported" % name, self.gx, node, mv=getmv())
return
module = self.import_modules(node.modname, node, True)
self.gx.from_module[node] = module
for name, pseudonym in node.names:
if name == '*':
self.ext_funcs.update(module.mv.funcs)
self.ext_classes.update(module.mv.classes)
for import_name, import_module in module.mv.imports.items():
var = default_var(self.gx, import_name, None, mv=getmv()) # XXX merge
var.imported = True
self.gx.types[inode(self.gx, var)] = set([(import_module, 0)])
self.imports[import_name] = import_module
for name, extvar in module.mv.globals.items():
if not extvar.imported and not name in ['__name__']:
var = default_var(self.gx, name, None, mv=getmv()) # XXX merge
var.imported = True
self.add_constraint((inode(self.gx, extvar), inode(self.gx, var)), None)
continue
path = module.path
pseudonym = pseudonym or name
if name in module.mv.funcs:
self.ext_funcs[pseudonym] = module.mv.funcs[name]
elif name in module.mv.classes:
self.ext_classes[pseudonym] = module.mv.classes[name]
elif name in module.mv.globals and not module.mv.globals[name].imported: # XXX
extvar = module.mv.globals[name]
var = default_var(self.gx, pseudonym, None, mv=getmv())
var.imported = True
self.add_constraint((inode(self.gx, extvar), inode(self.gx, var)), None)
elif os.path.isfile(os.path.join(path, name + '.py')) or \
os.path.isfile(os.path.join(path, name, '__init__.py')):
modname = '.'.join(module.name_list + [name])
self.import_module(modname, name, node, False)
else:
error("no identifier '%s' in module '%s'" % (name, node.modname), self.gx, node, mv=getmv())
def analyze_module(self, name, pseud, node, fake):
module = parse_module(name, self.gx, getmv().module, node)
if not fake:
self.imports[pseud] = module
else:
self.fake_imports[pseud] = module
return module
def visitFunction(self, node, parent=None, is_lambda=False, inherited_from=None):
if not getmv().module.builtin and (node.varargs or node.kwargs):
error('argument (un)packing is not supported', self.gx, node, mv=getmv())
if not parent and not is_lambda and node.name in getmv().funcs:
func = getmv().funcs[node.name]
elif isinstance(parent, Class) and not inherited_from and node.name in parent.funcs:
func = parent.funcs[node.name]
else:
func = Function(self.gx, node, parent, inherited_from, mv=getmv())
if inherited_from:
self.set_default_vars(node, func)
if not is_method(func):
if not getmv().module.builtin and not node in getmv().funcnodes and not is_lambda:
error("non-global function '%s'" % node.name, self.gx, node, mv=getmv())
if hasattr(node, 'decorators') and node.decorators:
for dec in node.decorators.nodes:
if isinstance(dec, Name) and dec.name == 'staticmethod':
parent.staticmethods.append(node.name)
elif isinstance(dec, Name) and dec.name == 'property':
parent.properties[node.name] = [node.name, None]
elif is_property_setter(dec):
parent.properties[dec.expr.name][1] = node.name
else:
error("unsupported type of decorator", self.gx, dec, mv=getmv())
if parent:
if not inherited_from and not func.ident in parent.staticmethods and (not func.formals or func.formals[0] != 'self'):
error("formal arguments of method must start with 'self'", self.gx, node, mv=getmv())
if not func.mv.module.builtin and func.ident in ['__new__', '__getattr__', '__setattr__', '__radd__', '__rsub__', '__rmul__', '__rdiv__', '__rtruediv__', '__rfloordiv__', '__rmod__', '__rdivmod__', '__rpow__', '__rlshift__', '__rrshift__', '__rand__', '__rxor__', '__ror__', '__iter__', '__call__', '__enter__', '__exit__', '__del__', '__copy__', '__deepcopy__']:
error("'%s' is not supported" % func.ident, self.gx, node, warning=True, mv=getmv())
if is_lambda:
self.lambdas[node.name] = func
# --- add unpacking statement for tuple formals
func.expand_args = {}
for i, formal in enumerate(func.formals):
if isinstance(formal, tuple):
tmp = self.temp_var((node, i), func)
func.formals[i] = tmp.name
fake_unpack = Assign([self.unpack_rec(formal)], Name(tmp.name))
func.expand_args[tmp.name] = fake_unpack
self.visit(fake_unpack, func)
func.defaults = node.defaults
for formal in func.formals:
var = default_var(self.gx, formal, func)
var.formal_arg = True
# --- flow return expressions together into single node
func.retnode = retnode = CNode(self.gx, node, parent=func, mv=getmv())
self.gx.types[retnode] = set()
func.yieldnode = yieldnode = CNode(self.gx, (node, 'yield'), parent=func, mv=getmv())
self.gx.types[yieldnode] = set()
self.visit(node.code, func)
for i, default in enumerate(func.defaults):
if not is_literal(default):
self.defaults[default] = (len(self.defaults), func, i)
self.visit(default, None) # defaults are global
# --- add implicit 'return None' if no return expressions
if not func.returnexpr:
func.fakeret = Return(Name('None'))
self.visit(func.fakeret, func)
# --- register function
if isinstance(parent, Class):
if func.ident not in parent.staticmethods: # XXX use flag
default_var(self.gx, 'self', func)
if func.ident == '__init__' and '__del__' in parent.funcs: # XXX what if no __init__
self.visit(CallFunc(Getattr(Name('self'), '__del__'), []), func)
self.gx.gc_cleanup = True
parent.funcs[func.ident] = func
def unpack_rec(self, formal):
if isinstance(formal, str):
return AssName(formal, 'OP_ASSIGN')
else:
return AssTuple([self.unpack_rec(elem) for elem in formal])
def visitLambda(self, node, func=None):
lambdanr = len(self.lambdas)
name = '__lambda%d__' % lambdanr
fakenode = FunctionNode(None, name, node.argnames, node.defaults, node.flags, None, Return(node.code))
self.visit(fakenode, None, True)
f = self.lambdas[name]
f.lambdanr = lambdanr
self.lambdaname[node] = name
newnode = CNode(self.gx, node, parent=func, mv=getmv())
self.gx.types[newnode] = set([(f, 0)])
newnode.copymetoo = True
def visitAnd(self, node, func=None):
self.visit_and_or(node, func)
def visitOr(self, node, func=None):
self.visit_and_or(node, func)
def visit_and_or(self, node, func):
newnode = CNode(self.gx, node, parent=func, mv=getmv())
self.gx.types[newnode] = set()
for child in node.getChildNodes():
if node in self.gx.bool_test_only:
self.bool_test_add(child)
self.visit(child, func)
self.add_constraint((inode(self.gx, child), newnode), func)
self.temp_var2(child, newnode, func)
def visitIf(self, node, func=None):
for test, code in node.tests:
if is_isinstance(test):
self.gx.filterstack.append(test.args)
self.bool_test_add(test)
faker = CallFunc(Name('bool'), [test])
self.visit(faker, func)
self.visit(code, func)
if is_isinstance(test):
self.gx.filterstack.pop()
if node.else_:
self.visit(node.else_, func)
def visitIfExp(self, node, func=None):
newnode = CNode(self.gx, node, parent=func, mv=getmv())
self.gx.types[newnode] = set()
for child in node.getChildNodes():
self.visit(child, func)
self.add_constraint((inode(self.gx, node.then), newnode), func)
self.add_constraint((inode(self.gx, node.else_), newnode), func)
def visitGlobal(self, node, func=None):
func.globals += node.names
def visitList(self, node, func=None):
self.constructor(node, 'list', func)
def visitDict(self, node, func=None):
self.constructor(node, 'dict', func)
if node.items: # XXX library bug
node.lineno = node.items[0][0].lineno
def visitNot(self, node, func=None):
self.bool_test_add(node.expr)
newnode = CNode(self.gx, node, parent=func, mv=getmv())
newnode.copymetoo = True
self.gx.types[newnode] = set([(def_class(self.gx, 'bool_'), 0)]) # XXX new type?
self.visit(node.expr, func)
def visitBackquote(self, node, func=None):
self.fake_func(node, node.expr, '__repr__', [], func)
def visitTuple(self, node, func=None):
if len(node.nodes) == 2:
self.constructor(node, 'tuple2', func)
else:
self.constructor(node, 'tuple', func)
def visitSubscript(self, node, func=None): # XXX merge __setitem__, __getitem__
if len(node.subs) > 1:
subscript = Tuple(node.subs)
else:
subscript = node.subs[0]
if isinstance(subscript, Ellipsis): # XXX also check at setitem
error('ellipsis is not supported', self.gx, node, mv=getmv())
if isinstance(subscript, Sliceobj):
self.slice(node, node.expr, subscript.nodes, func)
else:
if node.flags == 'OP_DELETE':
self.fake_func(node, node.expr, '__delitem__', [subscript], func)
elif len(node.subs) > 1:
self.fake_func(node, node.expr, '__getitem__', [subscript], func)
else:
ident = '__getitem__'
self.fake_func(node, node.expr, ident, [subscript], func)
def visitSlice(self, node, func=None):
self.slice(node, node.expr, [node.lower, node.upper, None], func)
def slice(self, node, expr, nodes, func, replace=None):
nodes2 = slice_nums(nodes)
if replace:
self.fake_func(node, expr, '__setslice__', nodes2 + [replace], func)
elif node.flags == 'OP_DELETE':
self.fake_func(node, expr, '__delete__', nodes2, func)
else:
self.fake_func(node, expr, '__slice__', nodes2, func)
def visitUnarySub(self, node, func=None):
self.fake_func(node, node.expr, '__neg__', [], func)
def visitUnaryAdd(self, node, func=None):
self.fake_func(node, node.expr, '__pos__', [], func)
def visitCompare(self, node, func=None):
newnode = CNode(self.gx, node, parent=func, mv=getmv())
newnode.copymetoo = True
self.gx.types[newnode] = set([(def_class(self.gx, 'bool_'), 0)]) # XXX new type?
self.visit(node.expr, func)
msgs = {'<': 'lt', '>': 'gt', 'in': 'contains', 'not in': 'contains', '!=': 'ne', '==': 'eq', '<=': 'le', '>=': 'ge'}
left = node.expr
for op, right in node.ops:
self.visit(right, func)
msg = msgs.get(op)
if msg == 'contains':
self.fake_func(node, right, '__' + msg + '__', [left], func)
elif msg in ('lt', 'gt', 'le', 'ge'):
fakefunc = CallFunc(Name('__%s' % msg), [left, right])
fakefunc.lineno = left.lineno
self.visit(fakefunc, func)
elif msg:
self.fake_func(node, left, '__' + msg + '__', [right], func)
left = right
# tempvars, e.g. (t1=fun())
for term in node.ops[:-1]:
if not isinstance(term[1], (Name, Const)):
self.temp_var2(term[1], inode(self.gx, term[1]), func)
def visitBitand(self, node, func=None):
self.visitBitpair(node, aug_msg(node, 'and'), func)
def visitBitor(self, node, func=None):
self.visitBitpair(node, aug_msg(node, 'or'), func)
def visitBitxor(self, node, func=None):
self.visitBitpair(node, aug_msg(node, 'xor'), func)
def visitBitpair(self, node, msg, func=None):
CNode(self.gx, node, parent=func, mv=getmv())
self.gx.types[inode(self.gx, node)] = set()
left = node.nodes[0]
for i, right in enumerate(node.nodes[1:]):
faker = self.fake_func((left, i), left, msg, [right], func)
left = faker
self.add_constraint((inode(self.gx, faker), inode(self.gx, node)), func)
def visitAdd(self, node, func=None):
self.fake_func(node, node.left, aug_msg(node, 'add'), [node.right], func)
def visitInvert(self, node, func=None):
self.fake_func(node, node.expr, '__invert__', [], func)
def visitRightShift(self, node, func=None):
self.fake_func(node, node.left, aug_msg(node, 'rshift'), [node.right], func)
def visitLeftShift(self, node, func=None):
self.fake_func(node, node.left, aug_msg(node, 'lshift'), [node.right], func)
def visitAugAssign(self, node, func=None): # a[b] += c -> a[b] = a[b]+c, using tempvars to handle sidefx
newnode = CNode(self.gx, node, parent=func, mv=getmv())
self.gx.types[newnode] = set()
clone = copy.deepcopy(node)
lnode = node.node
if isinstance(node.node, Name):
blah = AssName(clone.node.name, 'OP_ASSIGN')
elif isinstance(node.node, Getattr):
blah = AssAttr(clone.node.expr, clone.node.attrname, 'OP_ASSIGN')
elif isinstance(node.node, Subscript):
t1 = self.temp_var(node.node.expr, func)
a1 = Assign([AssName(t1.name, 'OP_ASSIGN')], node.node.expr)
self.visit(a1, func)
self.add_constraint((inode(self.gx, node.node.expr), inode(self.gx, t1)), func)
if len(node.node.subs) > 1:
subs = Tuple(node.node.subs)
else:
subs = node.node.subs[0]
t2 = self.temp_var(subs, func)
a2 = Assign([AssName(t2.name, 'OP_ASSIGN')], subs)
self.visit(a1, func)
self.visit(a2, func)
self.add_constraint((inode(self.gx, subs), inode(self.gx, t2)), func)
inode(self.gx, node).temp1 = t1.name
inode(self.gx, node).temp2 = t2.name
inode(self.gx, node).subs = subs
blah = Subscript(Name(t1.name), 'OP_APPLY', [Name(t2.name)])
lnode = Subscript(Name(t1.name), 'OP_APPLY', [Name(t2.name)])
else:
error('unsupported type of assignment', self.gx, node, mv=getmv())
if node.op == '-=':
blah2 = Sub((lnode, node.expr))
if node.op == '+=':
blah2 = Add((lnode, node.expr))
if node.op == '|=':
blah2 = Bitor((lnode, node.expr))
if node.op == '&=':
blah2 = Bitand((lnode, node.expr))
if node.op == '^=':
blah2 = Bitxor((lnode, node.expr))
if node.op == '**=':
blah2 = Power((lnode, node.expr))
if node.op == '<<=':
blah2 = LeftShift((lnode, node.expr))
if node.op == '>>=':
blah2 = RightShift((lnode, node.expr))
if node.op == '%=':
blah2 = Mod((lnode, node.expr))
if node.op == '*=':
blah2 = Mul((lnode, node.expr))
if node.op == '/=':
blah2 = Div((lnode, node.expr))
if node.op == '//=':
blah2 = FloorDiv((lnode, node.expr))
blah2.augment = True
assign = Assign([blah], blah2)
register_node(assign, func)
inode(self.gx, node).assignhop = assign
self.visit(assign, func)
def visitSub(self, node, func=None):
self.fake_func(node, node.left, aug_msg(node, 'sub'), [node.right], func)
def visitMul(self, node, func=None):
self.fake_func(node, node.left, aug_msg(node, 'mul'), [node.right], func)
def visitDiv(self, node, func=None):
self.fake_func(node, node.left, aug_msg(node, 'div'), [node.right], func)
def visitFloorDiv(self, node, func=None):
self.fake_func(node, node.left, aug_msg(node, 'floordiv'), [node.right], func)
def visitPower(self, node, func=None):
self.fake_func(node, node.left, '__pow__', [node.right], func)
def visitMod(self, node, func=None):
if isinstance(node.right, (Tuple, Dict)):
self.fake_func(node, node.left, '__mod__', [], func)
for child in node.right.getChildNodes():
self.visit(child, func)
if isinstance(node.right, Tuple):
self.fake_func(inode(self.gx, child), child, '__str__', [], func)
else:
self.fake_func(node, node.left, '__mod__', [node.right], func)
def visitPrintnl(self, node, func=None):
self.visitPrint(node, func)
def visitPrint(self, node, func=None):
pnode = CNode(self.gx, node, parent=func, mv=getmv())
self.gx.types[pnode] = set()
for child in node.getChildNodes():
self.visit(child, func)
self.fake_func(inode(self.gx, child), child, '__str__', [], func)
def temp_var(self, node, func=None, looper=None, wopper=None, exc_name=False):
if node in self.gx.parent_nodes:
varname = self.tempcount[self.gx.parent_nodes[node]]
elif node in self.tempcount: # XXX investigate why this happens
varname = self.tempcount[node]
else:
varname = '__' + str(len(self.tempcount))
var = default_var(self.gx, varname, func, mv=getmv(), exc_name=exc_name)
var.looper = looper
var.wopper = wopper
self.tempcount[node] = varname
register_temp_var(var, func)
return var
def temp_var2(self, node, source, func):
tvar = self.temp_var(node, func)
self.add_constraint((source, inode(self.gx, tvar)), func)
return tvar
def temp_var_int(self, node, func):
var = self.temp_var(node, func)
self.gx.types[inode(self.gx, var)] = set([(def_class(self.gx, 'int_'), 0)])
inode(self.gx, var).copymetoo = True
return var
def visitRaise(self, node, func=None):
if node.expr1 is None or node.expr2 is not None or node.expr3 is not None:
error('unsupported raise syntax', self.gx, node, mv=getmv())
for child in node.getChildNodes():
self.visit(child, func)
def visitTryExcept(self, node, func=None):
self.visit(node.body, func)
for handler in node.handlers:
if not handler[0]:
continue
if isinstance(handler[0], Tuple):
pairs = [(n, handler[1]) for n in handler[0].nodes]
else:
pairs = [(handler[0], handler[1])]
for (h0, h1) in pairs:
if isinstance(h0, Name) and h0.name in ['int', 'float', 'str', 'class']:
continue # handle in lookup_class
cl = lookup_class(h0, getmv())
if not cl:
error("unknown or unsupported exception type", self.gx, h0, mv=getmv())
if isinstance(h1, AssName):
var = self.default_var(h1.name, func, exc_name=True)
else:
var = self.temp_var(h0, func, exc_name=True)
var.invisible = True
inode(self.gx, var).copymetoo = True
self.gx.types[inode(self.gx, var)] = set([(cl, 1)])
for handler in node.handlers:
self.visit(handler[2], func)
# else
if node.else_:
self.visit(node.else_, func)
self.temp_var_int(node.else_, func)
def visitTryFinally(self, node, func=None):
error("'try..finally' is not supported", self.gx, node, mv=getmv())
def visitYield(self, node, func):
func.isGenerator = True
func.yieldNodes.append(node)
self.visit(Return(CallFunc(Name('__iter'), [node.value])), func)
self.add_constraint((inode(self.gx, node.value), func.yieldnode), func)
def visitFor(self, node, func=None):
# --- iterable contents -> assign node
assnode = CNode(self.gx, node.assign, parent=func, mv=getmv())
self.gx.types[assnode] = set()
get_iter = CallFunc(Getattr(node.list, '__iter__'), [])
fakefunc = CallFunc(Getattr(get_iter, 'next'), [])
self.visit(fakefunc, func)
self.add_constraint((inode(self.gx, fakefunc), assnode), func)
# --- assign node -> variables XXX merge into assign_pair
if isinstance(node.assign, AssName):
# for x in..
lvar = self.default_var(node.assign.name, func)
self.add_constraint((assnode, inode(self.gx, lvar)), func)
elif isinstance(node.assign, AssAttr): # XXX experimental :)
# for expr.x in..
CNode(self.gx, node.assign, parent=func, mv=getmv())
self.gx.assign_target[node.assign.expr] = node.assign.expr # XXX multiple targets possible please
fakefunc2 = CallFunc(Getattr(node.assign.expr, '__setattr__'), [Const(node.assign.attrname), fakefunc])
self.visit(fakefunc2, func)
elif isinstance(node.assign, (AssTuple, AssList)):
# for (a,b, ..) in..
self.tuple_flow(node.assign, node.assign, func)
else:
error('unsupported type of assignment', self.gx, node, mv=getmv())
self.do_for(node, assnode, get_iter, func)
# --- for-else
if node.else_:
self.temp_var_int(node.else_, func)
self.visit(node.else_, func)
# --- loop body
self.gx.loopstack.append(node)
self.visit(node.body, func)
self.gx.loopstack.pop()
self.for_in_iters.append(node.list)
def do_for(self, node, assnode, get_iter, func):
# --- for i in range(..) XXX i should not be modified.. use tempcounter; two bounds
if is_fastfor(node):
self.temp_var2(node.assign, assnode, func)
self.temp_var2(node.list, inode(self.gx, node.list.args[0]), func)
if len(node.list.args) == 3 and not isinstance(node.list.args[2], Name) and not is_literal(node.list.args[2]): # XXX merge with ListComp
for arg in node.list.args:
if not isinstance(arg, Name) and not is_literal(arg): # XXX create func for better check
self.temp_var2(arg, inode(self.gx, arg), func)
# --- temp vars for list, iter etc.
else:
self.temp_var2(node, inode(self.gx, node.list), func)
self.temp_var2((node, 1), inode(self.gx, get_iter), func)
self.temp_var_int(node.list, func)
if is_enum(node) or is_zip2(node):
self.temp_var2((node, 2), inode(self.gx, node.list.args[0]), func)
if is_zip2(node):
self.temp_var2((node, 3), inode(self.gx, node.list.args[1]), func)
self.temp_var_int((node, 4), func)
self.temp_var((node, 5), func, looper=node.list)
if isinstance(node.list, CallFunc) and isinstance(node.list.node, Getattr):
self.temp_var((node, 6), func, wopper=node.list.node.expr)
self.temp_var2((node, 7), inode(self.gx, node.list.node.expr), func)
def bool_test_add(self, node):
if isinstance(node, (And, Or, Not)):
self.gx.bool_test_only.add(node)
def visitWhile(self, node, func=None):
self.gx.loopstack.append(node)
self.bool_test_add(node.test)
for child in node.getChildNodes():
self.visit(child, func)
self.gx.loopstack.pop()
if node.else_:
self.temp_var_int(node.else_, func)
self.visit(node.else_, func)
def visitWith(self, node, func=None):
if node.vars:
varnode = CNode(self.gx, node.vars, parent=func, mv=getmv())
self.gx.types[varnode] = set()
self.visit(node.expr, func)
self.add_constraint((inode(self.gx, node.expr), varnode), func)
lvar = self.default_var(node.vars.name, func)
self.add_constraint((varnode, inode(self.gx, lvar)), func)
else:
self.visit(node.expr, func)
for child in node.getChildNodes():
self.visit(child, func)
def visitListCompIf(self, node, func=None):
self.bool_test_add(node.test)
for child in node.getChildNodes():
self.visit(child, func)
def visitListComp(self, node, func=None):
# --- [expr for iter in list for .. if cond ..]
lcfunc = Function(self.gx, mv=getmv())
lcfunc.listcomp = True
lcfunc.ident = 'l.c.' # XXX
lcfunc.parent = func
for qual in node.quals:
# iter
assnode = CNode(self.gx, qual.assign, parent=func, mv=getmv())
self.gx.types[assnode] = set()
# list.unit->iter
get_iter = CallFunc(Getattr(qual.list, '__iter__'), [])
fakefunc = CallFunc(Getattr(get_iter, 'next'), [])
self.visit(fakefunc, lcfunc)
self.add_constraint((inode(self.gx, fakefunc), inode(self.gx, qual.assign)), lcfunc)
if isinstance(qual.assign, AssName): # XXX merge with visitFor
lvar = default_var(self.gx, qual.assign.name, lcfunc) # XXX str or Name?
self.add_constraint((inode(self.gx, qual.assign), inode(self.gx, lvar)), lcfunc)
else: # AssTuple, AssList
self.tuple_flow(qual.assign, qual.assign, lcfunc)
self.do_for(qual, assnode, get_iter, lcfunc)
# cond
for child in qual.ifs:
self.visit(child, lcfunc)
self.for_in_iters.append(qual.list)
# node type
if node in self.gx.genexp_to_lc.values(): # converted generator expression
self.instance(node, def_class(self.gx, '__iter'), func)
else:
self.instance(node, def_class(self.gx, 'list'), func)
# expr->instance.unit
self.visit(node.expr, lcfunc)
self.add_dynamic_constraint(node, node.expr, 'unit', lcfunc)
lcfunc.ident = 'list_comp_' + str(len(self.listcomps))
self.listcomps.append((node, lcfunc, func))
def visitReturn(self, node, func):
self.visit(node.value, func)
func.returnexpr.append(node.value)
if not (isinstance(node.value, Const) and node.value.value is None):
newnode = CNode(self.gx, node, parent=func, mv=getmv())
self.gx.types[newnode] = set()
if isinstance(node.value, Name):
func.retvars.append(node.value.name)
if func.retnode:
self.add_constraint((inode(self.gx, node.value), func.retnode), func)
def visitAssign(self, node, func=None):
# --- rewrite for struct.unpack XXX rewrite callfunc as tuple
if len(node.nodes) == 1:
lvalue, rvalue = node.nodes[0], node.expr
if self.struct_unpack(rvalue, func) and isinstance(lvalue, (AssList, AssTuple)) and not [n for n in lvalue.nodes if isinstance(n, (AssList, AssTuple))]:
self.visit(node.expr, func)
sinfo = self.struct_info(rvalue.args[0], func)
faketuple = self.struct_faketuple(sinfo)
self.visit(Assign(node.nodes, faketuple), func)
tvar = self.temp_var2(rvalue.args[1], inode(self.gx, rvalue.args[1]), func)
tvar_pos = self.temp_var_int(rvalue.args[0], func)
self.gx.struct_unpack[node] = (sinfo, tvar.name, tvar_pos.name)
return
newnode = CNode(self.gx, node, parent=func, mv=getmv())
self.gx.types[newnode] = set()
# --- a,b,.. = c,(d,e),.. = .. = expr
for target_expr in node.nodes:
pairs = assign_rec(target_expr, node.expr)
for (lvalue, rvalue) in pairs:
# expr[expr] = expr
if isinstance(lvalue, Subscript) and not isinstance(lvalue.subs[0], Sliceobj):
self.assign_pair(lvalue, rvalue, func) # XXX use here generally, and in tuple_flow
# expr.attr = expr
elif isinstance(lvalue, AssAttr):
self.assign_pair(lvalue, rvalue, func)
# name = expr
elif isinstance(lvalue, AssName):
if (rvalue, 0, 0) not in self.gx.cnode: # XXX generalize
self.visit(rvalue, func)
self.visit(lvalue, func)
lvar = self.default_var(lvalue.name, func)
if isinstance(rvalue, Const):
lvar.const_assign.append(rvalue)
self.add_constraint((inode(self.gx, rvalue), inode(self.gx, lvar)), func)
# (a,(b,c), ..) = expr
elif isinstance(lvalue, (AssTuple, AssList)):
self.visit(rvalue, func)
self.tuple_flow(lvalue, rvalue, func)
# expr[a:b] = expr # XXX bla()[1:3] = [1]
elif isinstance(lvalue, Slice):
self.slice(lvalue, lvalue.expr, [lvalue.lower, lvalue.upper, None], func, rvalue)
# expr[a:b:c] = expr
elif isinstance(lvalue, Subscript) and isinstance(lvalue.subs[0], Sliceobj):
self.slice(lvalue, lvalue.expr, lvalue.subs[0].nodes, func, rvalue)
# temp vars
if len(node.nodes) > 1 or isinstance(node.expr, Tuple):
if isinstance(node.expr, Tuple):
if [n for n in node.nodes if isinstance(n, AssTuple)]:
for child in node.expr.nodes:
if (child, 0, 0) not in self.gx.cnode: # (a,b) = (1,2): (1,2) never visited
continue
if not isinstance(child, Const) and not (isinstance(child, Name) and child.name == 'None'):
self.temp_var2(child, inode(self.gx, child), func)
elif not isinstance(node.expr, Const) and not (isinstance(node.expr, Name) and node.expr.name == 'None'):
self.temp_var2(node.expr, inode(self.gx, node.expr), func)
def assign_pair(self, lvalue, rvalue, func):
# expr[expr] = expr
if isinstance(lvalue, Subscript) and not isinstance(lvalue.subs[0], Sliceobj):
if len(lvalue.subs) > 1:
subscript = Tuple(lvalue.subs)
else:
subscript = lvalue.subs[0]
fakefunc = CallFunc(Getattr(lvalue.expr, '__setitem__'), [subscript, rvalue])
self.visit(fakefunc, func)
inode(self.gx, lvalue.expr).fakefunc = fakefunc
if len(lvalue.subs) > 1:
inode(self.gx, lvalue.expr).faketuple = subscript
if not isinstance(lvalue.expr, Name):
self.temp_var2(lvalue.expr, inode(self.gx, lvalue.expr), func)
# expr.attr = expr
elif isinstance(lvalue, AssAttr):
CNode(self.gx, lvalue, parent=func, mv=getmv())
self.gx.assign_target[rvalue] = lvalue.expr
fakefunc = CallFunc(Getattr(lvalue.expr, '__setattr__'), [Const(lvalue.attrname), rvalue])
self.visit(fakefunc, func)
def default_var(self, name, func, exc_name=False):
if isinstance(func, Function) and name in func.globals:
return default_var(self.gx, name, None, mv=getmv(), exc_name=exc_name)
else:
return default_var(self.gx, name, func, mv=getmv(), exc_name=exc_name)
def tuple_flow(self, lvalue, rvalue, func=None):
self.temp_var2(lvalue, inode(self.gx, rvalue), func)
if isinstance(lvalue, (AssTuple, AssList)):
lvalue = lvalue.nodes
for (i, item) in enumerate(lvalue):
fakenode = CNode(self.gx, (item,), parent=func, mv=getmv()) # fake node per item, for multiple callfunc triggers
self.gx.types[fakenode] = set()
self.add_constraint((inode(self.gx, rvalue), fakenode), func)
fakefunc = CallFunc(FakeGetattr3(rvalue, '__getitem__'), [Const(i)])
fakenode.callfuncs.append(fakefunc)
self.visit(fakefunc, func)
self.gx.item_rvalue[item] = rvalue
if isinstance(item, AssName):
lvar = self.default_var(item.name, func)
self.add_constraint((inode(self.gx, fakefunc), inode(self.gx, lvar)), func)
elif isinstance(item, (Subscript, AssAttr)):
self.assign_pair(item, fakefunc, func)
elif isinstance(item, (AssTuple, AssList)): # recursion
self.tuple_flow(item, fakefunc, func)
else:
error('unsupported type of assignment', self.gx, item, mv=getmv())
def super_call(self, orig, parent):
node = orig.node
while isinstance(parent, Function):
parent = parent.parent
if (isinstance(node.expr, CallFunc) and
node.attrname not in ('__getattr__', '__setattr__') and
isinstance(node.expr.node, Name) and
node.expr.node.name == 'super'):
if (len(node.expr.args) >= 2 and
isinstance(node.expr.args[1], Name) and node.expr.args[1].name == 'self'):
cl = lookup_class(node.expr.args[0], getmv())
if cl.node.bases:
return cl.node.bases[0]
error("unsupported usage of 'super'", self.gx, orig, mv=getmv())
def visitCallFunc(self, node, func=None): # XXX clean up!!
newnode = CNode(self.gx, node, parent=func, mv=getmv())
if isinstance(node.node, Getattr): # XXX import math; math.e
# rewrite super(..) call
base = self.super_call(node, func)
if base:
node.node = Getattr(copy.deepcopy(base), node.node.attrname)
node.args = [Name('self')] + node.args
# method call
if isinstance(node.node, FakeGetattr): # XXX butt ugly
self.visit(node.node, func)
elif isinstance(node.node, FakeGetattr2):
self.gx.types[newnode] = set() # XXX move above
self.callfuncs.append((node, func))
for arg in node.args:
inode(self.gx, arg).callfuncs.append(node) # this one too
return
elif isinstance(node.node, FakeGetattr3):
pass
else:
self.visitGetattr(node.node, func, callfunc=True)
inode(self.gx, node.node).callfuncs.append(node) # XXX iterative dataflow analysis: move there?
inode(self.gx, node.node).fakert = True
ident = node.node.attrname
inode(self.gx, node.node.expr).callfuncs.append(node) # XXX iterative dataflow analysis: move there?
if isinstance(node.node.expr, Name) and node.node.expr.name in getmv().imports and node.node.attrname == '__getattr__': # XXX analyze_callfunc
if node.args[0].value in getmv().imports[node.node.expr.name].mv.globals: # XXX bleh
self.add_constraint((inode(self.gx, getmv().imports[node.node.expr.name].mv.globals[node.args[0].value]), newnode), func)
elif isinstance(node.node, Name):
# direct call
ident = node.node.name
if ident == 'print':
ident = node.node.name = '__print' # XXX
if ident in ['hasattr', 'getattr', 'setattr', 'slice', 'type', 'Ellipsis']:
error("'%s' function is not supported" % ident, self.gx, node.node, mv=getmv())
if ident == 'dict' and [x for x in node.args if isinstance(x, Keyword)]:
error('unsupported method of initializing dictionaries', self.gx, node, mv=getmv())
if ident == 'isinstance':
error("'isinstance' is not supported; always returns True", self.gx, node, mv=getmv(), warning=True)
if lookup_var(ident, func, mv=getmv()):
self.visit(node.node, func)
inode(self.gx, node.node).callfuncs.append(node) # XXX iterative dataflow analysis: move there
else:
self.visit(node.node, func)
inode(self.gx, node.node).callfuncs.append(node) # XXX iterative dataflow analysis: move there
# --- arguments
if not getmv().module.builtin and (node.star_args or node.dstar_args):
error('argument (un)packing is not supported', self.gx, node, mv=getmv())
args = node.args[:]
if node.star_args:
args.append(node.star_args) # partially allowed in builtins
if node.dstar_args:
args.append(node.dstar_args)
for arg in args:
if isinstance(arg, Keyword):
arg = arg.expr
self.visit(arg, func)
inode(self.gx, arg).callfuncs.append(node) # this one too
# --- handle instantiation or call
constructor = lookup_class(node.node, getmv())
if constructor and (not isinstance(node.node, Name) or not lookup_var(node.node.name, func, mv=getmv())):
self.instance(node, constructor, func)
inode(self.gx, node).callfuncs.append(node) # XXX see above, investigate
else:
self.gx.types[newnode] = set()
self.callfuncs.append((node, func))
def visitClass(self, node, parent=None):
if not getmv().module.builtin and not node in getmv().classnodes:
error("non-global class '%s'" % node.name, self.gx, node, mv=getmv())
if len(node.bases) > 1:
error('multiple inheritance is not supported', self.gx, node, mv=getmv())
if not getmv().module.builtin:
for base in node.bases:
if not isinstance(base, (Name, Getattr)):
error("invalid expression for base class", self.gx, node, mv=getmv())
if isinstance(base, Name):
name = base.name
else:
name = base.attrname
cl = lookup_class(base, getmv())
if not cl:
error("no such class: '%s'" % name, self.gx, node, mv=getmv())
elif cl.mv.module.builtin and name not in ['object', 'Exception', 'tzinfo']:
if def_class(self.gx, 'Exception') not in cl.ancestors():
error("inheritance from builtin class '%s' is not supported" % name, self.gx, node, mv=getmv())
if node.name in getmv().classes:
newclass = getmv().classes[node.name] # set in visitModule, for forward references
else:
check_redef(self.gx, node) # XXX merge with visitModule
newclass = Class(self.gx, node, getmv())
self.classes[node.name] = newclass
getmv().classes[node.name] = newclass
newclass.module = self.module
newclass.parent = StaticClass(newclass, getmv())
# --- built-in functions
for cl in [newclass, newclass.parent]:
for ident in ['__setattr__', '__getattr__']:
func = Function(self.gx, mv=getmv())
func.ident = ident
func.parent = cl
if ident == '__setattr__':
func.formals = ['name', 'whatsit']
retexpr = Return(Name('None'))
self.visit(retexpr, func)
elif ident == '__getattr__':
func.formals = ['name']
cl.funcs[ident] = func
# --- built-in attributes
if 'class_' in getmv().classes or 'class_' in getmv().ext_classes:
var = default_var(self.gx, '__class__', newclass)
var.invisible = True
self.gx.types[inode(self.gx, var)] = set([(def_class(self.gx, 'class_'), def_class(self.gx, 'class_').dcpa)])
def_class(self.gx, 'class_').dcpa += 1
# --- staticmethod, property
skip = []
for child in node.code.getChildNodes():
if isinstance(child, Assign) and len(child.nodes) == 1:
lvalue, rvalue = child.nodes[0], child.expr
if isinstance(lvalue, AssName) and isinstance(rvalue, CallFunc) and isinstance(rvalue.node, Name) and rvalue.node.name in ['staticmethod', 'property']:
if rvalue.node.name == 'property':
if len(rvalue.args) == 1 and isinstance(rvalue.args[0], Name):
newclass.properties[lvalue.name] = rvalue.args[0].name, None
elif len(rvalue.args) == 2 and isinstance(rvalue.args[0], Name) and isinstance(rvalue.args[1], Name):
newclass.properties[lvalue.name] = rvalue.args[0].name, rvalue.args[1].name
else:
error("complex properties are not supported", self.gx, rvalue, mv=getmv())
else:
newclass.staticmethods.append(lvalue.name)
skip.append(child)
# --- children
for child in node.code.getChildNodes():
if child not in skip:
cl = self.classes[node.name]
if isinstance(child, FunctionNode):
self.visit(child, cl)
else:
cl.parent.static_nodes.append(child)
self.visit(child, cl.parent)
# --- __iadd__ etc.
if not newclass.mv.module.builtin or newclass.ident in ['int_', 'float_', 'str_', 'tuple', 'complex']:
msgs = ['add', 'mul'] # XXX mod, pow
if newclass.ident in ['int_', 'float_']:
msgs += ['sub', 'div', 'floordiv']
if newclass.ident in ['int_']:
msgs += ['lshift', 'rshift', 'and', 'xor', 'or']
for msg in msgs:
if not '__i' + msg + '__' in newclass.funcs:
self.visit(FunctionNode(None, '__i' + msg + '__', ['self', 'other'], [], 0, None, Stmt([Return(CallFunc(Getattr(Name('self'), '__' + msg + '__'), [Name('other')], None, None))])), newclass)
# --- __str__, __hash__ # XXX model in lib/builtin.py, other defaults?
if not newclass.mv.module.builtin and not '__str__' in newclass.funcs:
self.visit(FunctionNode(None, '__str__', ['self'], [], 0, None, Return(CallFunc(Getattr(Name('self'), '__repr__'), []))), newclass)
newclass.funcs['__str__'].invisible = True
if not newclass.mv.module.builtin and not '__hash__' in newclass.funcs:
self.visit(FunctionNode(None, '__hash__', ['self'], [], 0, None, Return(Const(0)), []), newclass)
newclass.funcs['__hash__'].invisible = True
def visitGetattr(self, node, func=None, callfunc=False):
if node.attrname in ['__doc__']:
error('%s attribute is not supported' % node.attrname, self.gx, node, mv=getmv())
newnode = CNode(self.gx, node, parent=func, mv=getmv())
self.gx.types[newnode] = set()
fakefunc = CallFunc(FakeGetattr(node.expr, '__getattr__'), [Const(node.attrname)])
self.visit(fakefunc, func)
self.add_constraint((self.gx.cnode[fakefunc, 0, 0], newnode), func)
self.callfuncs.append((fakefunc, func))
if not callfunc:
self.fncl_passing(node, newnode, func)
def visitConst(self, node, func=None):
if type(node.value) == unicode:
error('unicode is not supported', self.gx, node, mv=getmv())
map = {int: 'int_', str: 'str_', float: 'float_', type(None): 'none', long: 'int_', complex: 'complex'} # XXX 'return' -> Return(Const(None))?
self.instance(node, def_class(self.gx, map[type(node.value)]), func)
def fncl_passing(self, node, newnode, func):
lfunc, lclass = lookup_func(node, getmv()), lookup_class(node, getmv())
if lfunc:
if lfunc.mv.module.builtin:
lfunc = self.builtin_wrapper(node, func)
elif lfunc.ident not in lfunc.mv.lambdas:
lfunc.lambdanr = len(lfunc.mv.lambdas)
lfunc.mv.lambdas[lfunc.ident] = lfunc
self.gx.types[newnode] = set([(lfunc, 0)])
elif lclass:
if lclass.mv.module.builtin:
lclass = self.builtin_wrapper(node, func)
else:
lclass = lclass.parent
self.gx.types[newnode] = set([(lclass, 0)])
else:
return False
newnode.copymetoo = True # XXX merge into some kind of 'seeding' function
return True
def visitName(self, node, func=None):
newnode = CNode(self.gx, node, parent=func, mv=getmv())
self.gx.types[newnode] = set()
if node.name == '__doc__':
error("'%s' attribute is not supported" % node.name, self.gx, node, mv=getmv())
if node.name in ['None', 'True', 'False']:
if node.name == 'None': # XXX also bools, remove def seed_nodes()
self.instance(node, def_class(self.gx, 'none'), func)
else:
self.instance(node, def_class(self.gx, 'bool_'), func)
return
if isinstance(func, Function) and node.name in func.globals:
var = default_var(self.gx, node.name, None, mv=getmv())
else:
var = lookup_var(node.name, func, mv=getmv())
if not var:
if self.fncl_passing(node, newnode, func):
pass
elif node.name in ['int', 'float', 'str']: # XXX
cl = self.ext_classes[node.name + '_']
self.gx.types[newnode] = set([(cl.parent, 0)])
newnode.copymetoo = True
else:
var = default_var(self.gx, node.name, None, mv=getmv())
if var:
self.add_constraint((inode(self.gx, var), newnode), func)
for a, b in self.gx.filterstack:
if var.name == a.name:
self.gx.filters[node] = lookup_class(b, getmv())
def builtin_wrapper(self, node, func):
node2 = CallFunc(copy.deepcopy(node), [Name(x) for x in 'abcde'])
l = Lambda(list('abcde'), [], 0, node2)
self.visit(l, func)
self.lwrapper[node] = self.lambdaname[l]
self.gx.lambdawrapper[node2] = self.lambdaname[l]
f = self.lambdas[self.lambdaname[l]]
f.lambdawrapper = True
inode(self.gx, node2).lambdawrapper = f
return f
def parse_module(name, gx, parent=None, node=None):
# --- valid name?
if not re.match("^[a-zA-Z0-9_.]+$", name):
print ("*ERROR*:%s.py: module names should consist of letters, digits and underscores" % name)
sys.exit(1)
# --- create module
try:
if parent and parent.path != os.getcwd():
basepaths = [parent.path, os.getcwd()]
else:
basepaths = [os.getcwd()]
module_paths = basepaths + gx.libdirs
absolute_name, filename, relative_filename, builtin = find_module(gx, name, module_paths)
module = Module(absolute_name, filename, relative_filename, builtin, node)
except ImportError:
error('cannot locate module: ' + name, gx, node, mv=getmv())
# --- check cache
if module.name in gx.modules: # cached?
return gx.modules[module.name]
gx.modules[module.name] = module
# --- not cached, so parse
module.ast = parse_file(module.filename)
old_mv = getmv()
module.mv = mv = ModuleVisitor(module, gx)
setmv(mv)
mv.visit = mv.dispatch
mv.visitor = mv
mv.dispatch(module.ast)
module.import_order = gx.import_order
gx.import_order += 1
mv = old_mv
setmv(mv)
return module
| shedskin/shedskin | shedskin/graph.py | Python | gpl-3.0 | 71,909 |
#!/usr/bin/env python3
#
# PLASMA : Generate an indented asm code (pseudo-C) with colored syntax.
# Copyright (C) 2015 Joel
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import sys
from time import time
from plasma.lib.ast import (Ast_Branch, Ast_Goto, Ast_Loop, Ast_If_cond,
Ast_IfGoto, Ast_Ifelse, Ast_AndIf, Ast_Comment)
from plasma.lib.utils import BRANCH_NEXT, BRANCH_NEXT_JUMP, debug__
from plasma.lib.exceptions import ExcIfelse
from plasma.lib.colors import pick_color
class Endpoint():
def __init__(self, ast, unseen, l_start):
self.ast = [ast]
self.unseen = unseen
self.loop_start = [l_start]
def rendezvous(self, ast, prev, l_start):
self.ast.append(ast)
self.loop_start.append(l_start)
if prev in self.unseen:
self.unseen.remove(prev)
def assign_colors(libarch, ctx, ast):
if isinstance(ast, Ast_Branch):
for n in ast.nodes:
if isinstance(n, list):
if libarch.utils.is_uncond_jump(n[0]) and \
n[0].operands[0].type == libarch.utils.OP_IMM and \
n[0].address in ctx.gph.link_out:
nxt = ctx.gph.link_out[n[0].address][BRANCH_NEXT]
pick_color(nxt)
else: # ast
assign_colors(libarch, ctx, n)
elif isinstance(ast, Ast_IfGoto) or isinstance(ast, Ast_Goto):
pick_color(ast.addr_jump)
elif isinstance(ast, Ast_Ifelse):
assign_colors(libarch, ctx, ast.br_next_jump)
assign_colors(libarch, ctx, ast.br_next)
elif isinstance(ast, Ast_Loop):
assign_colors(libarch, ctx, ast.branch)
def get_first_addr(ast):
# Assume that there are no Ast_Comment
if isinstance(ast, list):
return ast[0].address
if isinstance(ast, Ast_Branch):
if len(ast.nodes) > 0:
return get_first_addr(ast.nodes[0])
if isinstance(ast, Ast_Ifelse):
# Any instructions at the moment so we can use the jump inst
return ast.jump_inst.address
if isinstance(ast, Ast_Loop):
if len(ast.branch.nodes) > 0:
return get_first_addr(ast.branch.nodes[0])
if isinstance(ast, Ast_Goto):
return ast.addr_jump
if isinstance(ast, Ast_IfGoto):
return ast.orig_jump.address
if isinstance(ast, Ast_AndIf):
return ast.orig_jump.address
if isinstance(ast, Ast_If_cond):
if len(ast.br.nodes) > 0:
return get_first_addr(ast.br.nodes[0])
return -1
def get_next_addr(ast):
par = ast.parent
if par is None:
return -1
i = ast.idx_in_parent + 1
# Get the next address of the parent ast
if i == len(par.nodes):
return get_next_addr(par)
return get_first_addr(par.nodes[i])
# Returns the first address of the current loop only if the i th ast
# is the last in the parent ast.
def is_last_in_loop(ast, i):
par = ast.parent
if par is None:
return -1
is_last = i == len(ast.nodes) - 1
a = ast.parent.nodes[ast.idx_in_parent]
if isinstance(a, Ast_Loop) and is_last:
return get_first_addr(a)
if not is_last:
return -1
return is_last_in_loop(par, ast.idx_in_parent)
def remove_all_unnecessary_goto(ast):
if isinstance(ast, Ast_Branch):
# Remove all last Ast_Goto, only if the previous is not an andif
if len(ast.nodes) > 0 and isinstance(ast.nodes[-1], Ast_Goto):
if len(ast.nodes) <= 1 or not isinstance(ast.nodes[-2], Ast_AndIf):
if not ast.nodes[-1].dont_remove:
nxt = get_next_addr(ast)
if ast.nodes[-1].addr_jump == nxt:
del ast.nodes[-1]
for n in ast.nodes:
if not isinstance(n, list):
remove_all_unnecessary_goto(n)
elif isinstance(ast, Ast_Ifelse):
remove_all_unnecessary_goto(ast.br_next)
remove_all_unnecessary_goto(ast.br_next_jump)
elif isinstance(ast, Ast_Loop):
if isinstance(ast.branch.nodes[-1], Ast_Goto):
if get_first_addr(ast) == ast.branch.nodes[-1].addr_jump:
del ast.branch.nodes[-1]
remove_all_unnecessary_goto(ast.branch)
def fix_non_consecutives(ctx, ast):
if isinstance(ast, Ast_Branch):
idx_to_add = {}
for i, n in enumerate(ast.nodes):
if isinstance(n, list):
ad = n[0].address
if ad in ctx.gph.uncond_jumps_set or ad not in ctx.gph.link_out:
continue
nxt1 = ctx.gph.link_out[ad][BRANCH_NEXT]
if i == len(ast.nodes) - 1:
loop_start = is_last_in_loop(ast, i)
if loop_start != -1:
if nxt1 != loop_start:
idx_to_add[i + 1] = nxt1
continue
nxt2 = get_next_addr(ast)
else:
nxt2 = get_first_addr(ast.nodes[i + 1])
if nxt1 != nxt2:
idx_to_add[i + 1] = nxt1
else:
fix_non_consecutives(ctx, n)
if not idx_to_add:
return
# Add from the end of the nodes list
lst = list(idx_to_add.keys())
lst.sort()
for i in reversed(lst):
ast.nodes.insert(i, Ast_Goto(idx_to_add[i]))
elif isinstance(ast, Ast_Ifelse):
fix_non_consecutives(ctx, ast.br_next)
fix_non_consecutives(ctx, ast.br_next_jump)
elif isinstance(ast, Ast_Loop):
fix_non_consecutives(ctx, ast.branch)
# An endpoint is the first intersection between all paths which starts
# at entry. This is used to find the next statement just after a if-else.
def search_endpoint(ctx, ast, entry, l_set, l_prev_loop, l_start):
endp = __search_endpoint(ctx, ast, entry, l_set, l_prev_loop, l_start)
if endp == -1:
return -1
# Check if we found an endpoint in a subloop : for a "if" it's not possible
# that the end goes in a loop, so we return -1 if this is the case.
if l_prev_loop == -1:
l = ctx.gph.not_in_loop
else:
# l_set contains also subloops, here we just want the current loop
l = ctx.gph.loops_set[(l_prev_loop, l_start)]
if endp not in l:
return -1
return endp
def __push_empty_waiting(stack, waiting, done):
for ad in list(waiting):
if len(waiting[ad]) > 0:
continue
del waiting[ad]
done.add(ad)
stack.append((-1, ad))
def __search_endpoint(ctx, ast, entry, l_set, l_prev_loop, l_start):
waiting = {}
visited = set()
done = set()
stack = []
first_nxt = []
for n in ctx.gph.link_out[entry]:
stack.append((entry, n))
first_nxt.append(n)
while 1:
while stack:
prev, ad = stack.pop(-1)
# Don't go outside the current loop : we want to search
# an if-endpoint.
if l_prev_loop != -1 and ad not in l_set:
continue
# If "ad" is in last_node_loop we are sure that the path
# will loop. So don't keep it if it's a subloop.
if ad in ctx.gph.last_node_loop and \
(l_prev_loop, l_start) not in ctx.gph.last_node_loop[ad]:
continue
# If endpoint == loop : maybe the endpoint is at the end of the loop
# If we have multiple link in, and if it's not a new loop, wait
if ad not in done:
lkin = ctx.gph.link_in[ad]
if ad == l_start or len(lkin) > 1:
unseen = get_unseen_links_in(ad, l_set, l_prev_loop, l_start)
if len(unseen) > 1:
if ad in waiting:
if prev in waiting[ad]:
waiting[ad].remove(prev)
else:
unseen.remove(prev)
waiting[ad] = unseen
continue
if ad in visited:
continue
visited.add(ad)
if ad in ctx.gph.link_out:
for n in ctx.gph.link_out[ad]:
stack.append((ad, n))
if not waiting:
return -1
# Now the stack is empty, but there are still some waiting nodes.
__push_empty_waiting(stack, waiting, done)
# If the stack is still empty but if we have still some waiting
# nodes, search if paths are really possible. If not, delete
# a dependence.
if not stack and waiting:
for ad in set(waiting):
for i in set(waiting[ad]):
if not ctx.gph.path_exists(entry, i, l_start):
waiting[ad].remove(i)
__push_empty_waiting(stack, waiting, done)
# It means that there was still one node in waiting without
# remaining dependencies and it was moved in stack.
if len(stack) == 1 and not waiting:
endp = stack[0][1]
# Check to be sure, see tests/analyzer/ifexit.c
v = True
for n in first_nxt:
v &= ctx.gph.path_exists(n, endp, l_start)
if not v:
return -1
return endp
if not stack:
return -1
# the while 1 continue...
def get_unseen_links_in(ad, l_set, l_prev_loop, l_start):
unseen = set(ctx.gph.link_in[ad])
# Is it the beginning of a loop ?
# Remove internal links to the beginning of the loop
if (l_start, ad) in ctx.gph.loops_all:
sub_loop = ctx.gph.loops_all[(l_start, ad)]
for prev in ctx.gph.link_in[ad]:
if prev in sub_loop and prev in unseen:
unseen.remove(prev)
if l_set is None:
return unseen
# Remove external jumps which are outside the current loop
for prev in ctx.gph.link_in[ad]:
if prev not in l_set and prev in unseen:
unseen.remove(prev)
return unseen
def remove_unnecessary_goto(ast, ad):
if len(ast.nodes) > 1:
if isinstance(ast.nodes[-1], Ast_Goto) and \
ast.nodes[-1].addr_jump == ad:
ast.nodes.pop(-1)
def rm_waiting(ctx, waiting, ad):
# Get the ast which has the smallest level
min_level_idx = -1
list_ast = waiting[ad].ast
list_loop_start = waiting[ad].loop_start
for i, a in enumerate(list_ast):
if (list_loop_start[i], ad) in ctx.gph.false_loops:
continue
if min_level_idx == -1 or a.level < list_ast[min_level_idx].level:
min_level_idx = i
if min_level_idx == -1:
print("errorD: this is a bug, please report")
sys.exit(1)
ast = list_ast[min_level_idx]
# Add goto on each other ast
# If they are finally unuseful, they will be deleted with
# remove_unnecessary_goto or in remove_unnecessary_goto
for i, a in enumerate(list_ast):
if i == min_level_idx:
continue
if len(a.nodes) == 0:
a.add(Ast_Goto(ad))
continue
# The previous instruction has not `ad` as the next instruction
if isinstance(a.nodes[-1], list):
prev = a.nodes[-1][0].address
if prev in ctx.gph.uncond_jumps_set:
continue
if prev in ctx.gph.link_out:
n = ctx.gph.link_out[prev][BRANCH_NEXT]
if n != ad:
a.add(Ast_Goto(n))
continue
# The previous is a goto, skip it
if isinstance(a.nodes[-1], Ast_Goto):
continue
a.add(Ast_Goto(ad))
waiting[ad].ast.clear()
del waiting[ad]
return ast
def manage_endpoint(ctx, waiting, ast, prev, ad, l_set, l_prev_loop,
l_start, ad_is_visited):
if ad not in ctx.gph.link_in or len(ctx.gph.link_in[ad]) <= 1:
return ast
# If ad_is_visited is False it means this is a prevision for a future
# visit on this node. Here prev has no sense.
if not ad_is_visited:
if ad not in waiting:
unseen = get_unseen_links_in(ad, l_set, l_prev_loop, l_start)
waiting[ad] = Endpoint(ast, unseen, l_start)
return None
if ad in waiting:
waiting[ad].rendezvous(ast, prev, l_start)
if len(waiting[ad].unseen) != 0:
return None
ast = rm_waiting(ctx, waiting, ad)
return ast
unseen = get_unseen_links_in(ad, l_set, l_prev_loop, l_start)
if len(unseen) > 1:
unseen.remove(prev)
waiting[ad] = Endpoint(ast, unseen, l_start)
return None
return ast
def generate_ast(ctx__):
global ctx
ctx = ctx__
start = time()
ast = Ast_Branch()
ast.parent = None
stack = [(ast, [], -1, ctx.entry, -1)]
visited = set()
waiting = {}
ast_head = ast
fake_br = Ast_Branch()
fake_br.level = sys.maxsize
libarch = ctx.gctx.libarch
while stack or waiting:
if not stack and waiting:
if not ctx.gph.skipped_loops_analysis:
break
for ad in set(waiting):
waiting[ad].unseen.clear()
stack.append((fake_br, [], -1, ad, -1))
ast, loops_stack, prev, curr, else_addr = stack.pop(-1)
# Check if we enter in a false loop (see gotoinloop*)
if loops_stack:
_, _, l_start = loops_stack[-1]
else:
l_start = ctx.entry
blk = ctx.gph.nodes[curr]
# Exit the current loop
while loops_stack:
l_ast, l_prev_loop, l_start = loops_stack[-1]
l_set = ctx.gph.loops_all[(l_prev_loop, l_start)]
if curr not in l_set:
loops_stack.pop(-1)
ast = l_ast.parent
if curr not in ctx.gctx.db.reverse_symbols:
name = "break_0x%x" % curr
ctx.gctx.db.symbols[name] = curr
ctx.gctx.db.reverse_symbols[curr] = name
ctx.gctx.db.modified = True
else:
break
if not loops_stack:
l_prev_loop = -1
l_start = ctx.entry
l_set = None
level = ast.level
if curr not in visited:
# Check if we need to stop and wait on a node
a = manage_endpoint(ctx, waiting, ast, prev, curr, l_set,
l_prev_loop, l_start, True)
if a is None:
continue
ast = a
remove_unnecessary_goto(ast, curr)
# Check if we enter in a new loop
if (l_start, curr) in ctx.gph.loops_all:
if not ctx.gctx.is_interactive or curr in ctx.gctx.db.xrefs:
do = True
if curr in ctx.gctx.db.reverse_symbols:
if not ctx.gctx.db.reverse_symbols[curr].startswith("break_"):
do = False
if do:
name = "loop_0x%x" % curr
ctx.gctx.db.symbols[name] = curr
ctx.gctx.db.reverse_symbols[curr] = name
ctx.gctx.db.modified = True
level += 1
a = Ast_Loop()
a.level = level
a.parent = ast
a.idx_in_parent = len(ast.nodes)
a.branch.parent = ast
a.branch.level = level
a.branch.idx_in_parent = len(ast.nodes)
ast.add(a)
ast = a.branch
loops_stack.append((a, l_start, curr))
else_addr = -1
l_ast = a
l_set = ctx.gph.loops_all[(l_start, curr)]
l_prev_loop = l_start
l_start = curr
if (l_prev_loop, l_start) in ctx.gph.infinite_loop:
a.is_infinite = True
# Here curr may has changed
if curr in visited:
if curr == l_start:
continue
if len(ast.nodes) > 0:
if isinstance(ast.nodes[-1], list):
prev = ast.nodes[-1][0].address
if prev not in ctx.gph.uncond_jumps_set:
ast.add(Ast_Goto(curr))
else:
ast.add(Ast_Goto(curr))
continue
visited.add(curr)
# Return instruction
if curr not in ctx.gph.link_out:
if curr != ctx.entry and \
(not ctx.gctx.is_interactive or curr in ctx.gctx.db.xrefs):
do = True
if curr in ctx.gctx.db.reverse_symbols:
if not ctx.gctx.db.reverse_symbols[curr].startswith("break_"):
do = False
if do:
name = "ret_0x%x" % curr
ctx.gctx.db.symbols[name] = curr
ctx.gctx.db.reverse_symbols[curr] = name
ctx.gctx.db.modified = True
ast.add(blk)
continue
nxt = ctx.gph.link_out[curr]
if curr in ctx.gctx.dis.jmptables:
ast.add(blk)
for n in nxt:
stack.append((ast, loops_stack, curr, n, else_addr))
elif len(nxt) == 2:
# We are on a conditional jump
prefetch = blk[1] if len(blk) == 2 else None
if loops_stack:
goto_set = False
c1 = nxt[BRANCH_NEXT] not in l_set
c2 = nxt[BRANCH_NEXT_JUMP] not in l_set
if c1 and c2:
raise ExcIfelse(curr)
if c1:
exit_loop = nxt[BRANCH_NEXT]
nxt_node_in_loop = nxt[BRANCH_NEXT_JUMP]
cond_id = libarch.utils.invert_cond(blk[0])
goto_set = True
if c2:
exit_loop = nxt[BRANCH_NEXT_JUMP]
nxt_node_in_loop = nxt[BRANCH_NEXT]
cond_id = libarch.utils.get_cond(blk[0])
goto_set = True
# goto to exit a loop
if goto_set:
stack.append((ast.parent, list(loops_stack), curr,
exit_loop, else_addr))
stack.append((ast, list(loops_stack), curr,
nxt_node_in_loop, else_addr))
a = Ast_IfGoto(blk[0], cond_id, exit_loop, prefetch)
a.parent = ast
a.level = level
a.idx_in_parent = len(ast.nodes)
ast.add(a)
continue
# and-if
if ctx.gctx.print_andif:
if else_addr == nxt[BRANCH_NEXT_JUMP]:
cond_id = libarch.utils.invert_cond(blk[0])
a = Ast_AndIf(blk[0], cond_id, nxt[BRANCH_NEXT], prefetch)
a.parent = ast
a.idx_in_parent = len(ast.nodes)
ast.add(a)
ast.add(Ast_Goto(nxt[BRANCH_NEXT]))
# Add a fake branch: when the manage_endpoint function will
# choose a branch to continue an endpoint (it means that
# all branchs to this endpoint have been reached), it will
# never take the fake_br because the level is set to maxint.
# The fake_br will be invisible
stack.append((fake_br, list(loops_stack), curr,
nxt[BRANCH_NEXT_JUMP], else_addr))
stack.append((ast, list(loops_stack), curr,
nxt[BRANCH_NEXT], else_addr))
continue
# and-if
if else_addr == nxt[BRANCH_NEXT]:
cond_id = libarch.utils.get_cond(blk[0])
a = Ast_AndIf(blk[0], cond_id, nxt[BRANCH_NEXT_JUMP], prefetch)
a.parent = ast
a.idx_in_parent = len(ast.nodes)
ast.add(a)
ast.add(Ast_Goto(nxt[BRANCH_NEXT_JUMP]))
stack.append((fake_br, list(loops_stack), curr,
nxt[BRANCH_NEXT], else_addr))
stack.append((ast, list(loops_stack), curr,
nxt[BRANCH_NEXT_JUMP], else_addr))
continue
# if-else
endpoint = search_endpoint(ctx, ast, curr, l_set, l_prev_loop, l_start)
force_inv_if = False
if curr in ctx.gctx.db.inverted_cond:
nxt = list(reversed(nxt))
force_inv_if = True
ast_if = Ast_Branch()
ast_if.parent = ast
ast_if.level = level + 1
ast_if.idx_in_parent = len(ast.nodes)
ast_else = Ast_Branch()
ast_else.parent = ast
ast_else.level = level + 1
ast_else.idx_in_parent = len(ast.nodes)
else_addr = nxt[BRANCH_NEXT_JUMP]
if endpoint != -1:
if (l_start, endpoint) not in ctx.gph.false_loops:
# If we have already seen this address (for example the
# endpoint is the beginning of the current loop) we don't
# re-add in the waiting list.
if endpoint not in visited:
manage_endpoint(ctx, waiting, ast, -1, endpoint, l_set,
l_prev_loop, l_start, False)
else:
endpoint = -1
stack.append((ast_if, list(loops_stack), curr,
nxt[BRANCH_NEXT], else_addr))
if endpoint == -1:
# No endpoint, so it's not useful to have an else-branch
# -> the stack will continue on `ast`
a = Ast_Ifelse(blk[0], ast_else, ast_if, else_addr, prefetch, force_inv_if)
stack.append((ast, list(loops_stack), curr,
nxt[BRANCH_NEXT_JUMP], else_addr))
a.parent = ast
a.level = level + 1
a.idx_in_parent = len(ast.nodes)
ast.add(a)
ast.add(Ast_Goto(else_addr))
elif endpoint == else_addr:
# Branch ast_else will be empty
a = Ast_Ifelse(blk[0], ast_else, ast_if, endpoint, prefetch)
# put the current ast instead of the ast_else
# -> it's not possible to invert this condition in the visual
stack.append((ast, list(loops_stack), curr,
nxt[BRANCH_NEXT_JUMP], else_addr))
a.parent = ast
a.level = level + 1
a.idx_in_parent = len(ast.nodes)
ast.add(a)
ast.add(Ast_Goto(else_addr))
else:
a = Ast_Ifelse(blk[0], ast_else, ast_if, endpoint, prefetch, force_inv_if)
stack.append((ast_else, list(loops_stack), curr,
nxt[BRANCH_NEXT_JUMP], else_addr))
a.parent = ast
a.level = level + 1
a.idx_in_parent = len(ast.nodes)
ast.add(a)
ast.add(Ast_Goto(endpoint))
else:
ast.add(blk)
stack.append((ast, loops_stack, curr,
nxt[BRANCH_NEXT], else_addr))
ast = ast_head
remove_all_unnecessary_goto(ast)
fix_non_consecutives(ctx, ast)
elapsed = time()
elapsed = elapsed - start
debug__("Ast generated in %fs" % elapsed)
# Process ast
start = time()
for func in libarch.registered:
func(ctx, ast)
elapsed = time()
elapsed = elapsed - start
debug__("Functions for processing ast in %fs" % elapsed)
if ctx.gctx.color:
assign_colors(libarch, ctx, ast)
if waiting:
ast_head.nodes.insert(0, Ast_Comment(""))
ast_head.nodes.insert(0, Ast_Comment(""))
ast_head.nodes.insert(0,
Ast_Comment("WARNING: there is a bug, the output is incomplete !"))
ast_head.nodes.insert(0, Ast_Comment(""))
ast_head.nodes.insert(0, Ast_Comment(""))
return ast, False
return ast, True
| joelpx/plasma | plasma/lib/generate_ast.py | Python | gpl-3.0 | 25,145 |
import collections
class AsaList(object):
@classmethod
def flatten(cls, lst):
"""
Returns Generator of non-iterable values
"""
for x in lst:
if not isinstance(x, collections.Iterable):
yield x
else:
for x in AsaList.flatten(x):
yield x
| asascience-open/paegan | paegan/utils/asalist.py | Python | gpl-3.0 | 358 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from .util import Specification
from . import compat
class Field(Specification):
"""
Field object for adding fields to a resource schema.
Currently this is built around the Tabular Data Package.
"""
SPECIFICATION = {'name': compat.str,
'title': compat.str,
'type': compat.str,
'format': compat.str,
'constraints': dict}
REQUIRED = ('name',)
class Constraints(Specification):
"""
Constraints object which can be added to a field in a resource schema
in order to represent the constraints put on that particular field.
"""
SPECIFICATION = {'required': bool,
'minLength': int,
'maxLength': int,
'unique': bool,
'pattern': compat.str,
'minimum': None,
'maximum': None}
class Reference(Specification):
"""
Reference object which can be added to a ForeignKey object to represent
the reference to the other datapackage.
"""
SPECIFICATION = {'datapackage': compat.str,
'resource': compat.str,
'fields': (compat.str, list)}
REQUIRED = ('fields',)
def __setattr__(self, attribute, value):
if attribute == 'fields':
# We need to make sure all fields are represented with by their
# names if it is a list
if type(value) == list:
modified_value = []
for single_value in value:
if type(single_value) == compat.str:
modified_value.append(single_value)
elif isinstance(single_value, Field):
modified_value.append(single_value.name)
else:
raise TypeError(
'Field type ({0}) is not supported'.format(
type(single_value)))
value = modified_value
elif type(value) == compat.str:
# We don't need to do anything with a str
pass
elif isinstance(value, Field):
# Set the name from the field as the value
value = value.name
else:
raise TypeError("Type of field ({0}) is not supported".format(
type(value)))
super(Reference, self).__setattr__(attribute, value)
class ForeignKey(Specification):
"""
ForeignKey object which can be added to a resource schema object to
represent a foreign key in another data package.
"""
SPECIFICATION = {'fields': (compat.str, list),
'reference': Reference}
REQUIRED = ('fields', 'reference')
def __setattr__(self, attribute, value):
# If the attribute is 'reference' we need to check if there is a
# fields attribute and do some checks to see if they are inconsistent
# because they shouldn't be
if attribute == 'reference' and 'fields' in self:
fields = self['fields']
if type(fields) != type(value.fields):
raise TypeError(
'Reference fields must have the same type as fields')
if type(value.fields) == list:
if len(value.fields) != len(fields):
raise ValueError(
'Reference fields and fields are inconsistent')
if attribute == 'fields':
value_type = type(value)
# We only want to show the names of the fields so we add we need
# to go through a list and get out the names and use them as the
# value
if value_type == list:
modified_value = []
for single_value in value:
if type(single_value) == compat.str:
modified_value.append(single_value)
elif isinstance(single_value, Field):
modified_value.append(single_value.name)
else:
raise TypeError(
'Foreign key type ({0}) is not supported'.format(
type(single_value)))
value = modified_value
elif value_type == compat.str:
# We don't need to do anything if the value is a str
pass
elif isinstance(value, Field):
value = value.name
else:
raise TypeError("Type of field ({0}) is not supported".format(
value_type))
# Same check as before about inconsistencies but just the other
# way around
if 'reference' in self:
reference_fields = self['reference'].fields
if type(reference_fields) != value_type:
raise TypeError(
'Fields must have the same type as Reference fields')
if type(reference_fields) == list:
if len(reference_fields) != len(value):
raise ValueError(
'Reference fields and fields are inconsistent')
super(ForeignKey, self).__setattr__(attribute, value)
class Schema(Specification):
"""
Schema object which holds the representation of the schema for a
Tabular Data Package (using the JSON Table Schema protocol). The
schema can be used just like a dictionary which means it is ready
for json serialization and export as part of a data package
descriptor (when added to a resource).
"""
SPECIFICATION = {'fields': list,
'primaryKey': (compat.str, list),
'foreignKeys': list}
def __init__(self, *args, **kwargs):
# We need to initialize an empty fields array (this is a required
# field but we don't require it, we create it)
self['fields'] = []
# We add the fields using the internal method so we can do
# validation of each field
self.add_fields(kwargs.pop('fields', []))
super(Schema, self).__init__(self, *args, **kwargs)
def __setattr__(self, attribute, value):
if attribute == 'primaryKey' and value is not None:
# Primary Keys must be a reference to existing fields so we
# need to check if the primary key is in the fields array
field_names = [f.name for f in self.get('fields', [])]
if type(value) == list:
modified_value = []
for single_value in value:
if type(single_value) == compat.str:
if single_value in field_names:
modified_value.append(single_value)
else:
raise AttributeError(
"Unknown '{0}' cannot be primaryKey".format(
single_value))
elif isinstance(single_value, Field):
if single_value.name in field_names:
modified_value.append(single_value.name)
else:
raise AttributeError(
"Unknown '{0}' cannot be primaryKey".format(
single_value.name))
else:
raise TypeError(
'primaryKey type ({0}) is not supported'.format(
type(single_value)))
value = modified_value
elif type(value) == compat.str:
if value not in field_names:
raise AttributeError(
"Unknown '{0}' cannot be primaryKey".format(
value))
elif isinstance(value, Field):
if value.name in field_names:
value = value.name
else:
raise AttributeError(
"Unknown '{0}' cannot be primaryKey".format(
value.name))
else:
raise TypeError('Primary Key type ({0}) not supported'.format(
type(value)))
super(Schema, self).__setattr__(attribute, value)
def add_field(self, field):
"""
Adds a field to the resource schema
:param ~Field field: A Field instance containing the field to be
appended to the schema.
"""
if isinstance(field, Field):
self['fields'].append(field)
elif type(field) == dict:
self['fields'].append(Field(field))
else:
raise TypeError("Type of parameter field is not supported.")
def add_fields(self, fields):
"""
Adds fields to the resource schema
:param list fields: A list of Field instances which should be
appended (extend) to the resource schema fields.
"""
# We loop through the fields list to make sure all elements
# in the list are of the proper type
for field in fields:
self.add_field(field)
def add_foreign_key(self, foreign_key):
"""
Adds a foreign key to the resource schema.
:param ~ForeignKey foreign_key: A ForeignKey object which keeps
track of a foreign key relationship to another data package.
"""
# We can only accept ForeignKey objects
if not isinstance(foreign_key, ForeignKey):
raise TypeError("Foreign Key type is not supported")
# ForeignKey fields must be a schema field
field_names = [f.name for f in self.get('fields', [])]
for field in foreign_key.fields:
if field not in field_names:
raise ValueError(
"Foreign key field '{0}' is not in schema fields".format(
field))
# Append the ForeignKey to the foreignKeys object or create it if it
# doesn't exist
foreign_keys = dict.get(self, 'foreignKeys', [])
foreign_keys.append(foreign_key)
self['foreignKeys'] = foreign_keys
def add_foreign_keys(self, foreign_keys):
"""
Adds foreign keys to the resource schema
:param list foreign_keys: A list of ForeignKey instances which should
be appended (extend) to the resource schema fields or create a
foreignKeys attribute if it doesn't exist.
"""
# We loop through the foreign keys list to make sure all elements
# in the list are of the proper type and validate
for foreign_key in foreign_keys:
self.add_foreign_key(foreign_key)
| tryggvib/datapackage | datapackage/schema.py | Python | gpl-3.0 | 11,045 |
__author__ = 'Sun'
from sandbox.dynamic_title.creator.char_corpus import CharacterCorpus
import cPickle
import click
@click.command()
@click.argument("text_file", type=click.File(mode='r', encoding='gb18030'))
@click.argument("char_cropus_file", type=click.File(mode='wb'))
def make_char_corpus(text_file, char_cropus_file):
corpus = CharacterCorpus()
corpus.build(text_file)
cPickle.dump(corpus, char_cropus_file, protocol=cPickle.HIGHEST_PROTOCOL)
if __name__ == "__main__":
make_char_corpus() | rudaoshi/neural_machine | char_rnn/make_char_corpus.py | Python | gpl-3.0 | 520 |
# Part of Cosmos by OpenGenus Foundation
def replace_0_5_iterative(user_input):
modified = []
for i in user_input:
if i == "0":
modified.append("5")
else:
modified.append(i)
return "".join(modified)
def replace_0_5_pythonic(user_input):
return user_input.replace("0", "5")
user_input = input("Enter the number: ")
print("\n----- Iterative Approach -----")
new_str = replace_0_5_iterative(user_input)
print("Modified number: " + new_str)
print("\n----- Python Replace Approach -----")
new_str = replace_0_5_pythonic(user_input)
print("Modified number: " + new_str)
| OpenGenus/cosmos | code/mathematical_algorithms/src/replace_0_with_5/replace_0_with_5.py | Python | gpl-3.0 | 629 |
# -*- coding: utf-8 -*-
'''Core plugins unit tests'''
import os
import tempfile
import unittest
import time
from contextlib import contextmanager
from tempfile import mkdtemp
from shutil import rmtree
from hashlib import md5
import gzip_cache
@contextmanager
def temporary_folder():
"""creates a temporary folder, return it and delete it afterwards.
This allows to do something like this in tests:
>>> with temporary_folder() as d:
# do whatever you want
"""
tempdir = mkdtemp()
try:
yield tempdir
finally:
rmtree(tempdir)
class TestGzipCache(unittest.TestCase):
def test_should_compress(self):
user_exclude_types = ()
# Some filetypes should compress and others shouldn't.
self.assertTrue(gzip_cache.should_compress('foo.html', user_exclude_types))
self.assertTrue(gzip_cache.should_compress('bar.css', user_exclude_types))
self.assertTrue(gzip_cache.should_compress('baz.js', user_exclude_types))
self.assertTrue(gzip_cache.should_compress('foo.txt', user_exclude_types))
self.assertFalse(gzip_cache.should_compress('foo.gz', user_exclude_types))
self.assertFalse(gzip_cache.should_compress('bar.png', user_exclude_types))
self.assertFalse(gzip_cache.should_compress('baz.mp3', user_exclude_types))
self.assertFalse(gzip_cache.should_compress('foo.mov', user_exclude_types))
user_exclude_types = ('.html', '.xyz')
self.assertFalse(gzip_cache.should_compress('foo.html', user_exclude_types))
self.assertFalse(gzip_cache.should_compress('bar.xyz', user_exclude_types))
self.assertFalse(gzip_cache.should_compress('foo.gz', user_exclude_types))
self.assertTrue(gzip_cache.should_compress('baz.js', user_exclude_types))
def test_should_overwrite(self):
# Default to false if GZIP_CACHE_OVERWRITE is not set
settings = { }
self.assertFalse(gzip_cache.should_overwrite(settings))
settings = { 'GZIP_CACHE_OVERWRITE': False }
self.assertFalse(gzip_cache.should_overwrite(settings))
settings = { 'GZIP_CACHE_OVERWRITE': True }
self.assertTrue(gzip_cache.should_overwrite(settings))
def test_creates_gzip_file(self):
# A file matching the input filename with a .gz extension is created.
# The plugin walks over the output content after the finalized signal
# so it is safe to assume that the file exists (otherwise walk would
# not report it). Therefore, create a dummy file to use.
with temporary_folder() as tempdir:
_, a_html_filename = tempfile.mkstemp(suffix='.html', dir=tempdir)
with open(a_html_filename, 'w') as f:
f.write('A' * 24) # under this length, compressing is useless and create_gzip_file will not create any file
gzip_cache.create_gzip_file(a_html_filename, False)
self.assertTrue(os.path.exists(a_html_filename + '.gz'))
def test_creates_same_gzip_file(self):
# Should create the same gzip file from the same contents.
# gzip will create a slightly different file because it includes
# a timestamp in the compressed file by default. This can cause
# problems for some caching strategies.
with temporary_folder() as tempdir:
_, a_html_filename = tempfile.mkstemp(suffix='.html', dir=tempdir)
with open(a_html_filename, 'w') as f:
f.write('A' * 24) # under this length, compressing is useless and create_gzip_file will not create any file
a_gz_filename = a_html_filename + '.gz'
gzip_cache.create_gzip_file(a_html_filename, False)
gzip_hash = get_md5(a_gz_filename)
time.sleep(1)
gzip_cache.create_gzip_file(a_html_filename, False)
self.assertEqual(gzip_hash, get_md5(a_gz_filename))
def test_overwrites_gzip_file(self):
# A file matching the input filename with a .gz extension is not created.
# The plugin walks over the output content after the finalized signal
# so it is safe to assume that the file exists (otherwise walk would
# not report it). Therefore, create a dummy file to use.
with temporary_folder() as tempdir:
_, a_html_filename = tempfile.mkstemp(suffix='.html', dir=tempdir)
gzip_cache.create_gzip_file(a_html_filename, True)
self.assertFalse(os.path.exists(a_html_filename + '.gz'))
def get_md5(filepath):
with open(filepath, 'rb') as fh:
return md5(fh.read()).hexdigest()
| mikitex70/pelican-plugins | gzip_cache/test_gzip_cache.py | Python | agpl-3.0 | 4,627 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010-today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
import base64
import logging
import re
from urllib import urlencode
from urlparse import urljoin
from openerp import tools
from openerp import SUPERUSER_ID
from openerp.osv import fields, osv
from openerp.osv.orm import except_orm
from openerp.tools.translate import _
_logger = logging.getLogger(__name__)
class mail_mail(osv.Model):
""" Model holding RFC2822 email messages to send. This model also provides
facilities to queue and send new email messages. """
_name = 'mail.mail'
_description = 'Outgoing Mails'
_inherits = {'mail.message': 'mail_message_id'}
_order = 'id desc'
_columns = {
'mail_message_id': fields.many2one('mail.message', 'Message', required=True, ondelete='cascade'),
'mail_server_id': fields.many2one('ir.mail_server', 'Outgoing mail server', readonly=1),
'state': fields.selection([
('outgoing', 'Outgoing'),
('sent', 'Sent'),
('received', 'Received'),
('exception', 'Delivery Failed'),
('cancel', 'Cancelled'),
], 'Status', readonly=True),
'auto_delete': fields.boolean('Auto Delete',
help="Permanently delete this email after sending it, to save space"),
'references': fields.text('References', help='Message references, such as identifiers of previous messages', readonly=1),
'email_from': fields.char('From', help='Message sender, taken from user preferences.'),
'email_to': fields.text('To', help='Message recipients'),
'email_cc': fields.char('Cc', help='Carbon copy message recipients'),
'reply_to': fields.char('Reply-To', help='Preferred response address for the message'),
'body_html': fields.text('Rich-text Contents', help="Rich-text/HTML message"),
# Auto-detected based on create() - if 'mail_message_id' was passed then this mail is a notification
# and during unlink() we will not cascade delete the parent and its attachments
'notification': fields.boolean('Is Notification')
}
def _get_default_from(self, cr, uid, context=None):
this = self.pool.get('res.users').browse(cr, uid, uid, context=context)
if this.alias_domain:
return '%s@%s' % (this.alias_name, this.alias_domain)
elif this.email:
return this.email
raise osv.except_osv(_('Invalid Action!'), _("Unable to send email, please configure the sender's email address or alias."))
_defaults = {
'state': 'outgoing',
'email_from': lambda self, cr, uid, ctx=None: self._get_default_from(cr, uid, ctx),
}
def default_get(self, cr, uid, fields, context=None):
# protection for `default_type` values leaking from menu action context (e.g. for invoices)
# To remove when automatic context propagation is removed in web client
if context and context.get('default_type') and context.get('default_type') not in self._all_columns['type'].column.selection:
context = dict(context, default_type=None)
return super(mail_mail, self).default_get(cr, uid, fields, context=context)
def create(self, cr, uid, values, context=None):
if 'notification' not in values and values.get('mail_message_id'):
values['notification'] = True
return super(mail_mail, self).create(cr, uid, values, context=context)
def unlink(self, cr, uid, ids, context=None):
# cascade-delete the parent message for all mails that are not created for a notification
ids_to_cascade = self.search(cr, uid, [('notification', '=', False), ('id', 'in', ids)])
parent_msg_ids = [m.mail_message_id.id for m in self.browse(cr, uid, ids_to_cascade, context=context)]
res = super(mail_mail, self).unlink(cr, uid, ids, context=context)
self.pool.get('mail.message').unlink(cr, uid, parent_msg_ids, context=context)
return res
def mark_outgoing(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state': 'outgoing'}, context=context)
def cancel(self, cr, uid, ids, context=None):
return self.write(cr, uid, ids, {'state': 'cancel'}, context=context)
def process_email_queue(self, cr, uid, ids=None, context=None):
"""Send immediately queued messages, committing after each
message is sent - this is not transactional and should
not be called during another transaction!
:param list ids: optional list of emails ids to send. If passed
no search is performed, and these ids are used
instead.
:param dict context: if a 'filters' key is present in context,
this value will be used as an additional
filter to further restrict the outgoing
messages to send (by default all 'outgoing'
messages are sent).
"""
if context is None:
context = {}
if not ids:
filters = ['&', ('state', '=', 'outgoing'), ('type', '=', 'email')]
if 'filters' in context:
filters.extend(context['filters'])
ids = self.search(cr, uid, filters, context=context)
res = None
try:
# Force auto-commit - this is meant to be called by
# the scheduler, and we can't allow rolling back the status
# of previously sent emails!
res = self.send(cr, uid, ids, auto_commit=True, context=context)
except Exception:
_logger.exception("Failed processing mail queue")
return res
def _postprocess_sent_message(self, cr, uid, mail, context=None):
"""Perform any post-processing necessary after sending ``mail``
successfully, including deleting it completely along with its
attachment if the ``auto_delete`` flag of the mail was set.
Overridden by subclasses for extra post-processing behaviors.
:param browse_record mail: the mail that was just sent
:return: True
"""
if mail.auto_delete:
# done with SUPERUSER_ID to avoid giving large unlink access rights
self.unlink(cr, SUPERUSER_ID, [mail.id], context=context)
return True
def send_get_mail_subject(self, cr, uid, mail, force=False, partner=None, context=None):
""" If subject is void and record_name defined: '<Author> posted on <Resource>'
:param boolean force: force the subject replacement
:param browse_record mail: mail.mail browse_record
:param browse_record partner: specific recipient partner
"""
if force or (not mail.subject and mail.model and mail.res_id):
return 'Re: %s' % (mail.record_name)
return mail.subject
def send_get_mail_body(self, cr, uid, mail, partner=None, context=None):
""" Return a specific ir_email body. The main purpose of this method
is to be inherited by Portal, to add a link for signing in, in
each notification email a partner receives.
:param browse_record mail: mail.mail browse_record
:param browse_record partner: specific recipient partner
"""
body = mail.body_html
# partner is a user, link to a related document (incentive to install portal)
if partner and partner.user_ids and mail.model and mail.res_id \
and self.check_access_rights(cr, partner.user_ids[0].id, 'read', raise_exception=False):
related_user = partner.user_ids[0]
try:
self.pool.get(mail.model).check_access_rule(cr, related_user.id, [mail.res_id], 'read', context=context)
base_url = self.pool.get('ir.config_parameter').get_param(cr, uid, 'web.base.url')
# the parameters to encode for the query and fragment part of url
query = {'db': cr.dbname}
fragment = {
'login': related_user.login,
'model': mail.model,
'id': mail.res_id,
}
url = urljoin(base_url, "?%s#%s" % (urlencode(query), urlencode(fragment)))
text = _("""<p>Access this document <a href="%s">directly in OpenERP</a></p>""") % url
body = tools.append_content_to_html(body, ("<div><p>%s</p></div>" % text), plaintext=False)
except except_orm, e:
pass
return body
def send_get_mail_reply_to(self, cr, uid, mail, partner=None, context=None):
""" Return a specific ir_email reply_to.
:param browse_record mail: mail.mail browse_record
:param browse_record partner: specific recipient partner
"""
if mail.reply_to:
return mail.reply_to
email_reply_to = False
# if model and res_id: try to use ``message_get_reply_to`` that returns the document alias
if mail.model and mail.res_id and hasattr(self.pool.get(mail.model), 'message_get_reply_to'):
email_reply_to = self.pool.get(mail.model).message_get_reply_to(cr, uid, [mail.res_id], context=context)[0]
# no alias reply_to -> reply_to will be the email_from, only the email part
if not email_reply_to and mail.email_from:
emails = tools.email_split(mail.email_from)
if emails:
email_reply_to = emails[0]
# format 'Document name <email_address>'
if email_reply_to and mail.model and mail.res_id:
document_name = self.pool.get(mail.model).name_get(cr, SUPERUSER_ID, [mail.res_id], context=context)[0]
if document_name:
# sanitize document name
sanitized_doc_name = re.sub(r'[^\w+.]+', '-', document_name[1])
# generate reply to
email_reply_to = _('"Followers of %s" <%s>') % (sanitized_doc_name, email_reply_to)
return email_reply_to
def send_get_email_dict(self, cr, uid, mail, partner=None, context=None):
""" Return a dictionary for specific email values, depending on a
partner, or generic to the whole recipients given by mail.email_to.
:param browse_record mail: mail.mail browse_record
:param browse_record partner: specific recipient partner
"""
body = self.send_get_mail_body(cr, uid, mail, partner=partner, context=context)
subject = self.send_get_mail_subject(cr, uid, mail, partner=partner, context=context)
reply_to = self.send_get_mail_reply_to(cr, uid, mail, partner=partner, context=context)
body_alternative = tools.html2plaintext(body)
# generate email_to, heuristic:
# 1. if 'partner' is specified and there is a related document: Followers of 'Doc' <email>
# 2. if 'partner' is specified, but no related document: Partner Name <email>
# 3; fallback on mail.email_to that we split to have an email addresses list
if partner and mail.record_name:
sanitized_record_name = re.sub(r'[^\w+.]+', '-', mail.record_name)
email_to = [_('"Followers of %s" <%s>') % (sanitized_record_name, partner.email)]
elif partner:
email_to = ['%s <%s>' % (partner.name, partner.email)]
else:
email_to = tools.email_split(mail.email_to)
return {
'body': body,
'body_alternative': body_alternative,
'subject': subject,
'email_to': email_to,
'reply_to': reply_to,
}
def send(self, cr, uid, ids, auto_commit=False, recipient_ids=None, context=None):
""" Sends the selected emails immediately, ignoring their current
state (mails that have already been sent should not be passed
unless they should actually be re-sent).
Emails successfully delivered are marked as 'sent', and those
that fail to be deliver are marked as 'exception', and the
corresponding error mail is output in the server logs.
:param bool auto_commit: whether to force a commit of the mail status
after sending each mail (meant only for scheduler processing);
should never be True during normal transactions (default: False)
:param list recipient_ids: specific list of res.partner recipients.
If set, one email is sent to each partner. Its is possible to
tune the sent email through ``send_get_mail_body`` and ``send_get_mail_subject``.
If not specified, one email is sent to mail_mail.email_to.
:return: True
"""
ir_mail_server = self.pool.get('ir.mail_server')
for mail in self.browse(cr, uid, ids, context=context):
try:
# handle attachments
attachments = []
for attach in mail.attachment_ids:
attachments.append((attach.datas_fname, base64.b64decode(attach.datas)))
# specific behavior to customize the send email for notified partners
email_list = []
if recipient_ids:
for partner in self.pool.get('res.partner').browse(cr, SUPERUSER_ID, recipient_ids, context=context):
email_list.append(self.send_get_email_dict(cr, uid, mail, partner=partner, context=context))
else:
email_list.append(self.send_get_email_dict(cr, uid, mail, context=context))
# build an RFC2822 email.message.Message object and send it without queuing
for email in email_list:
msg = ir_mail_server.build_email(
email_from = mail.email_from,
email_to = email.get('email_to'),
subject = email.get('subject'),
body = email.get('body'),
body_alternative = email.get('body_alternative'),
email_cc = tools.email_split(mail.email_cc),
reply_to = email.get('reply_to'),
attachments = attachments,
message_id = mail.message_id,
references = mail.references,
object_id = mail.res_id and ('%s-%s' % (mail.res_id, mail.model)),
subtype = 'html',
subtype_alternative = 'plain')
res = ir_mail_server.send_email(cr, uid, msg,
mail_server_id=mail.mail_server_id.id, context=context)
if res:
mail.write({'state': 'sent', 'message_id': res})
mail_sent = True
else:
mail.write({'state': 'exception'})
mail_sent = False
# /!\ can't use mail.state here, as mail.refresh() will cause an error
# see revid:[email protected] in 6.1
if mail_sent:
self._postprocess_sent_message(cr, uid, mail, context=context)
except Exception:
_logger.exception('failed sending mail.mail %s', mail.id)
mail.write({'state': 'exception'})
if auto_commit == True:
cr.commit()
return True
| Johnzero/OE7 | openerp/addons/mail/mail_mail.py | Python | agpl-3.0 | 16,471 |
"""
Unit tests for user messages.
"""
import warnings
import ddt
from django.contrib.messages.middleware import MessageMiddleware
from django.test import RequestFactory, TestCase
from common.test.utils import normalize_repr
from openedx.core.djangolib.markup import HTML, Text
from common.djangoapps.student.tests.factories import UserFactory
from ..user_messages import PageLevelMessages, UserMessageType
TEST_MESSAGE = 'Test message'
@ddt.ddt
class UserMessagesTestCase(TestCase):
"""
Unit tests for page level user messages.
"""
def setUp(self):
super().setUp()
self.student = UserFactory.create()
self.request = RequestFactory().request()
self.request.session = {}
self.request.user = self.student
MessageMiddleware().process_request(self.request)
@ddt.data(
('Rock & Roll', '<div class="message-content">Rock & Roll</div>'),
(Text('Rock & Roll'), '<div class="message-content">Rock & Roll</div>'),
(HTML('<p>Hello, world!</p>'), '<div class="message-content"><p>Hello, world!</p></div>')
)
@ddt.unpack
def test_message_escaping(self, message, expected_message_html):
"""
Verifies that a user message is escaped correctly.
"""
PageLevelMessages.register_user_message(self.request, UserMessageType.INFO, message)
messages = list(PageLevelMessages.user_messages(self.request))
assert len(messages) == 1
assert messages[0].message_html == expected_message_html
@ddt.data(
(UserMessageType.ERROR, 'alert-danger', 'fa fa-warning'),
(UserMessageType.INFO, 'alert-info', 'fa fa-bullhorn'),
(UserMessageType.SUCCESS, 'alert-success', 'fa fa-check-circle'),
(UserMessageType.WARNING, 'alert-warning', 'fa fa-warning'),
)
@ddt.unpack
def test_message_icon(self, message_type, expected_css_class, expected_icon_class):
"""
Verifies that a user message returns the correct CSS and icon classes.
"""
PageLevelMessages.register_user_message(self.request, message_type, TEST_MESSAGE)
messages = list(PageLevelMessages.user_messages(self.request))
assert len(messages) == 1
assert messages[0].css_class == expected_css_class
assert messages[0].icon_class == expected_icon_class
@ddt.data(
(normalize_repr(PageLevelMessages.register_error_message), UserMessageType.ERROR),
(normalize_repr(PageLevelMessages.register_info_message), UserMessageType.INFO),
(normalize_repr(PageLevelMessages.register_success_message), UserMessageType.SUCCESS),
(normalize_repr(PageLevelMessages.register_warning_message), UserMessageType.WARNING),
)
@ddt.unpack
def test_message_type(self, register_message_function, expected_message_type):
"""
Verifies that each user message function returns the correct type.
"""
register_message_function(self.request, TEST_MESSAGE)
messages = list(PageLevelMessages.user_messages(self.request))
assert len(messages) == 1
assert messages[0].type == expected_message_type
def global_message_count(self):
"""
Count the number of times the global message appears in the user messages.
"""
expected_html = """<div class="message-content">I <3 HTML-escaping</div>"""
messages = list(PageLevelMessages.user_messages(self.request))
return len(list(msg for msg in messages if expected_html in msg.message_html))
def test_global_message_off_by_default(self):
"""Verifies feature toggle."""
with self.settings(
GLOBAL_NOTICE_ENABLED=False,
GLOBAL_NOTICE_MESSAGE="I <3 HTML-escaping",
GLOBAL_NOTICE_TYPE='WARNING'
):
# Missing when feature disabled
assert self.global_message_count() == 0
def test_global_message_persistent(self):
"""Verifies global message is always included, when enabled."""
with self.settings(
GLOBAL_NOTICE_ENABLED=True,
GLOBAL_NOTICE_MESSAGE="I <3 HTML-escaping",
GLOBAL_NOTICE_TYPE='WARNING'
):
# Present with no other setup
assert self.global_message_count() == 1
# Present when other messages are present
PageLevelMessages.register_user_message(self.request, UserMessageType.INFO, "something else")
assert self.global_message_count() == 1
def test_global_message_error_isolation(self):
"""Verifies that any setting errors don't break the page, or other messages."""
with self.settings(
GLOBAL_NOTICE_ENABLED=True,
GLOBAL_NOTICE_MESSAGE=ThrowingMarkup(), # force an error
GLOBAL_NOTICE_TYPE='invalid'
):
PageLevelMessages.register_user_message(self.request, UserMessageType.WARNING, "something else")
# Doesn't throw, or even interfere with other messages,
# when given invalid settings
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
messages = list(PageLevelMessages.user_messages(self.request))
assert len(w) == 1
assert str(w[0].message) == "Could not register global notice: Exception('Some random error')"
assert len(messages) == 1
assert "something else" in messages[0].message_html
class ThrowingMarkup:
"""Class that raises an exception if markupsafe tries to get HTML from it."""
def __html__(self):
raise Exception("Some random error")
| eduNEXT/edx-platform | openedx/core/djangoapps/util/tests/test_user_messages.py | Python | agpl-3.0 | 5,683 |
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from . import CuraProfileReader
from UM.i18n import i18nCatalog
catalog = i18nCatalog("cura")
def getMetaData():
return {
"plugin": {
"name": catalog.i18nc("@label", "Cura Profile Reader"),
"author": "Ultimaker",
"version": "1.0",
"description": catalog.i18nc("@info:whatsthis", "Provides support for importing Cura profiles."),
"api": 3
},
"profile_reader": [
{
"extension": "curaprofile",
"description": catalog.i18nc("@item:inlistbox", "Cura Profile")
}
]
}
def register(app):
return { "profile_reader": CuraProfileReader.CuraProfileReader() }
| senttech/Cura | plugins/CuraProfileReader/__init__.py | Python | agpl-3.0 | 807 |
# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2017, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
import time
from django import forms
from django.conf import settings
from django.core.management import call_command
from django.http.response import HttpResponse, JsonResponse
from django.utils.translation import ugettext_lazy as _
from django.views.generic import FormView
from six import StringIO
from shuup.addons.manager import get_enabled_addons
from shuup.addons.reloader import get_reload_method_classes
from shuup.apps.settings import reload_apps
from shuup.utils.excs import Problem
from shuup.utils.iterables import first
class ReloadMethodForm(forms.Form):
def get_viable_reload_methods(self):
for klass in get_reload_method_classes():
rm = klass()
if rm.is_viable():
yield rm
def __init__(self, **kwargs):
super(ReloadMethodForm, self).__init__(**kwargs)
self.reload_methods = list(self.get_viable_reload_methods())
if not self.reload_methods:
raise Problem(_("There are no viable reload methods available. Please contact your system administrator."))
self.fields["reload_method"] = forms.ChoiceField(
choices=[(rm.identifier, rm.title) for rm in self.reload_methods],
label=_("Reload Method"),
initial=self.reload_methods[0].identifier,
widget=forms.RadioSelect
)
def get_selected_reload_method(self):
return first(rm for rm in self.reload_methods if rm.identifier == self.cleaned_data["reload_method"])
def finalize_installation_for_enabled_apps():
out = StringIO()
enabled_addons = get_enabled_addons(settings.SHUUP_ENABLED_ADDONS_FILE)
new_apps = [app for app in enabled_addons if app not in settings.INSTALLED_APPS]
if new_apps:
out.write("Enabling new addons: %s" % new_apps)
settings.INSTALLED_APPS += type(settings.INSTALLED_APPS)(new_apps)
reload_apps()
call_command("migrate", "--noinput", "--no-color", stdout=out)
call_command("collectstatic", "--noinput", "--no-color", stdout=out)
return out.getvalue()
class ReloadView(FormView):
template_name = "shuup/admin/addons/reload.jinja"
form_class = ReloadMethodForm
def form_valid(self, form):
reloader = form.get_selected_reload_method()
reloader.execute()
return HttpResponse(_("Reloading.")) # This might not reach the user...
def get(self, request, *args, **kwargs):
if request.GET.get("ping"):
return JsonResponse({"pong": time.time()})
elif request.GET.get("finalize"):
return JsonResponse({"message": finalize_installation_for_enabled_apps()})
return super(ReloadView, self).get(request, *args, **kwargs)
| suutari-ai/shoop | shuup/addons/admin_module/views/reload.py | Python | agpl-3.0 | 3,004 |
"""Offers a simple XML-RPC dispatcher for django_xmlrpc
Author::
Graham Binns
Credit must go to Brendan W. McAdams <[email protected]>, who
posted the original SimpleXMLRPCDispatcher to the Django wiki:
http://code.djangoproject.com/wiki/XML-RPC
New BSD License
===============
Copyright (c) 2007, Graham Binns http://launchpad.net/~codedragon
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the <ORGANIZATION> nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
# This file is needed to run XMLRPC
from inspect import getargspec
from SimpleXMLRPCServer import SimpleXMLRPCDispatcher
from django.conf import settings
# If we need to debug, now we know
DEBUG = hasattr(settings, 'XMLRPC_DEBUG') and settings.XMLRPC_DEBUG
class DjangoXMLRPCDispatcher(SimpleXMLRPCDispatcher):
"""A simple XML-RPC dispatcher for Django.
Subclassess SimpleXMLRPCServer.SimpleXMLRPCDispatcher for the purpose of
overriding certain built-in methods (it's nicer than monkey-patching them,
that's for sure).
"""
def system_methodSignature(self, method):
"""Returns the signature details for a specified method
method
The name of the XML-RPC method to get the details for
"""
# See if we can find the method in our funcs dict
# TODO: Handle this better: We really should return something more
# formal than an AttributeError
func = self.funcs[method]
try:
sig = func._xmlrpc_signature
except:
sig = {
'returns': 'string',
'args': ['string' for arg in getargspec(func)[0]],
}
return [sig['returns']] + sig['args']
| gnowgi/gnowsys-studio | gstudio/xmlrpc/dispatcher.py | Python | agpl-3.0 | 3,078 |
# -*- encoding: utf-8 -*-
###########################################################################
# Module Writen to OpenERP, Open Source Management Solution
# Copyright (C) OpenERP Venezuela (<http://openerp.com.ve>).
# All Rights Reserved
###############Credits######################################################
# Coded by: Humberto Arocha [email protected]
# Angelica Barrios [email protected]
# Jordi Esteve <[email protected]>
# Planified by: Humberto Arocha
# Finance by: LUBCAN COL S.A.S http://www.lubcancol.com
# Audited by: Humberto Arocha [email protected]
#############################################################################
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
##############################################################################
import wizard
| kailIII/emaresa | trunk.pe/account_financial_report/wizard/__init__.py | Python | agpl-3.0 | 1,490 |
# Copyright 2016-2017 Jairo Llopis <[email protected]>
# Copyright 2016 Tecnativa - Vicent Cubells
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
import logging
from lxml import etree, html
from odoo import api, models
_logger = logging.getLogger(__name__)
class IrFieldsConverter(models.AbstractModel):
_inherit = "ir.fields.converter"
@api.model
def text_from_html(self, html_content, max_words=None, max_chars=None,
ellipsis=u"…", fail=False):
"""Extract text from an HTML field in a generator.
:param str html_content:
HTML contents from where to extract the text.
:param int max_words:
Maximum amount of words allowed in the resulting string.
:param int max_chars:
Maximum amount of characters allowed in the resulting string. If
you apply this limit, beware that the last word could get cut in an
unexpected place.
:param str ellipsis:
Character(s) to be appended to the end of the resulting string if
it gets truncated after applying limits set in :param:`max_words`
or :param:`max_chars`. If you want nothing applied, just set an
empty string.
:param bool fail:
If ``True``, exceptions will be raised. Otherwise, an empty string
will be returned on failure.
"""
# Parse HTML
try:
doc = html.fromstring(html_content)
except (TypeError, etree.XMLSyntaxError, etree.ParserError):
if fail:
raise
else:
_logger.exception("Failure parsing this HTML:\n%s",
html_content)
return ""
# Get words
words = u"".join(doc.xpath("//text()")).split()
# Truncate words
suffix = max_words and len(words) > max_words
if max_words:
words = words[:max_words]
# Get text
text = u" ".join(words)
# Truncate text
suffix = suffix or max_chars and len(text) > max_chars
if max_chars:
text = text[:max_chars - (len(ellipsis) if suffix else 0)].strip()
# Append ellipsis if needed
if suffix:
text += ellipsis
return text
| brain-tec/server-tools | html_text/models/ir_fields_converter.py | Python | agpl-3.0 | 2,350 |
# Copyright (C) 2007, Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
"""
Test the style of toggle and radio buttons inside a palette. The buttons
contains only an icon and should be rendered similarly to the toolbar
controls. Ticket #2855.
"""
from gi.repository import Gtk
from sugar3.graphics.palette import Palette
from sugar3.graphics.icon import Icon
from sugar3.graphics import style
import common
test = common.TestPalette()
palette = Palette('Test radio and toggle')
test.set_palette(palette)
box = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL)
toggle = Gtk.ToggleButton()
icon = Icon(icon_name='go-previous', pixel_size=style.STANDARD_ICON_SIZE)
toggle.set_image(icon)
box.pack_start(toggle, False, False, 0)
toggle.show()
radio = Gtk.RadioButton()
icon = Icon(icon_name='go-next', pixel_size=style.STANDARD_ICON_SIZE)
radio.set_image(icon)
radio.set_mode(False)
box.pack_start(radio, False, False, 0)
radio.show()
palette.set_content(box)
box.show()
if __name__ == '__main__':
common.main(test)
| quozl/sugar-toolkit-gtk3 | examples/ticket2855.py | Python | lgpl-2.1 | 1,719 |
# -*- Mode: Python -*-
# vi:si:et:sw=4:sts=4:ts=4
# Flumotion - a streaming media server
# Copyright (C) 2004,2005,2006,2007,2008,2009 Fluendo, S.L.
# Copyright (C) 2010,2011 Flumotion Services, S.A.
# All rights reserved.
#
# This file may be distributed and/or modified under the terms of
# the GNU Lesser General Public License version 2.1 as published by
# the Free Software Foundation.
# This file is distributed without any warranty; without even the implied
# warranty of merchantability or fitness for a particular purpose.
# See "LICENSE.LGPL" in the source distribution for more information.
#
# Headers in this file shall remain intact.
import gobject
import gst
import gst.interfaces
from twisted.internet.threads import deferToThread
from twisted.internet import defer
from flumotion.common import gstreamer, errors, log, messages
from flumotion.common.i18n import N_, gettexter
from flumotion.twisted import defer as fdefer
from flumotion.worker.checks import check
__version__ = "$Rev$"
T_ = gettexter()
class BusResolution(fdefer.Resolution):
pipeline = None
signal_id = None
def cleanup(self):
if self.pipeline:
if self.signal_id:
self.pipeline.get_bus().remove_signal_watch()
self.pipeline.get_bus().disconnect(self.signal_id)
self.signal_id = None
self.pipeline.set_state(gst.STATE_NULL)
self.pipeline = None
def do_element_check(pipeline_str, element_name, check_proc, state=None,
set_state_deferred=False):
"""
Parse the given pipeline and set it to the given state.
When the bin reaches that state, perform the given check function on the
element with the given name.
@param pipeline_str: description of the pipeline used to test
@param element_name: name of the element being checked
@param check_proc: a function to call with the GstElement as argument.
@param state: an unused keyword parameter that will be removed when
support for GStreamer 0.8 is dropped.
@param set_state_deferred: a flag to say whether the set_state is run in
a deferToThread
@type set_state_deferred: bool
@returns: a deferred that will fire with the result of check_proc, or
fail.
@rtype: L{twisted.internet.defer.Deferred}
"""
def run_check(pipeline, resolution):
element = pipeline.get_by_name(element_name)
try:
retval = check_proc(element)
resolution.callback(retval)
except check.CheckProcError, e:
log.debug('check', 'CheckProcError when running %r: %r',
check_proc, e.data)
resolution.errback(errors.RemoteRunError(e.data))
except Exception, e:
log.debug('check', 'Unhandled exception while running %r: %r',
check_proc, e)
resolution.errback(errors.RemoteRunError(
log.getExceptionMessage(e)))
# set pipeline state to NULL so worker does not consume
# unnecessary resources
pipeline.set_state(gst.STATE_NULL)
def message_rcvd(bus, message, pipeline, resolution):
t = message.type
if t == gst.MESSAGE_STATE_CHANGED:
if message.src == pipeline:
old, new, pending = message.parse_state_changed()
if new == gst.STATE_PLAYING:
run_check(pipeline, resolution)
elif t == gst.MESSAGE_ERROR:
gerror, debug = message.parse_error()
# set pipeline state to NULL so worker does not consume
# unnecessary resources
pipeline.set_state(gst.STATE_NULL)
resolution.errback(errors.GStreamerGstError(
message.src, gerror, debug))
elif t == gst.MESSAGE_EOS:
resolution.errback(errors.GStreamerError(
"Unexpected end of stream"))
else:
log.debug('check', 'message: %s: %s:' % (
message.src.get_path_string(),
message.type.value_nicks[1]))
if message.structure:
log.debug('check', 'message: %s' %
message.structure.to_string())
else:
log.debug('check', 'message: (no structure)')
return True
resolution = BusResolution()
log.debug('check', 'parsing pipeline %s' % pipeline_str)
try:
pipeline = gst.parse_launch(pipeline_str)
log.debug('check', 'parsed pipeline %s' % pipeline_str)
except gobject.GError, e:
resolution.errback(errors.GStreamerError(e.message))
return resolution.d
bus = pipeline.get_bus()
bus.add_signal_watch()
signal_id = bus.connect('message', message_rcvd, pipeline, resolution)
resolution.signal_id = signal_id
resolution.pipeline = pipeline
log.debug('check', 'setting state to playing')
if set_state_deferred:
d = deferToThread(pipeline.set_state, gst.STATE_PLAYING)
def stateChanged(res):
return resolution.d
d.addCallback(stateChanged)
return d
else:
pipeline.set_state(gst.STATE_PLAYING)
return resolution.d
def check1394(mid, guid):
"""
Probe the firewire device.
Return a deferred firing a result.
The result is either:
- succesful, with a None value: no device found
- succesful, with a dictionary of width, height, and par as a num/den pair
- failed
@param mid: the id to set on the message.
@param guid: the id of the selected device.
@rtype: L{twisted.internet.defer.Deferred} of
L{flumotion.common.messages.Result}
"""
result = messages.Result()
def do_check(demux):
pad = demux.get_pad('video')
if not pad or pad.get_negotiated_caps() == None:
raise errors.GStreamerError('Pipeline failed to negotiate?')
caps = pad.get_negotiated_caps()
s = caps.get_structure(0)
w = s['width']
h = s['height']
par = s['pixel-aspect-ratio']
# FIXME: not a good idea to reuse the result name which
# also exists in the parent context.
# pychecker should warn; however it looks like
# the parent result doesn't get stored as name,
# but instead with STORE_DEREF
result = dict(width=w, height=h, par=(par.num, par.denom))
log.debug('check', 'returning dict %r' % result)
return result
pipeline = \
'dv1394src guid=%s ! dvdemux name=demux .video ! fakesink' % guid
d = do_element_check(pipeline, 'demux', do_check)
def errbackResult(failure):
log.debug('check', 'returning failed Result, %r' % failure)
m = None
if failure.check(errors.GStreamerGstError):
source, gerror, debug = failure.value.args
log.debug('check', 'GStreamer GError: %s (debug: %s)' % (
gerror.message, debug))
if gerror.domain == "gst-resource-error-quark":
if gerror.code == int(gst.RESOURCE_ERROR_NOT_FOUND):
# dv1394src was fixed after gst-plugins-good 0.10.2
# to distinguish NOT_FOUND and OPEN_READ
version = gstreamer.get_plugin_version('1394')
if version >= (0, 10, 0, 0) and version <= (0, 10, 2, 0):
m = messages.Error(T_(
N_("Could not find or open the Firewire device. "
"Check the device node and its permissions.")))
else:
m = messages.Error(T_(
N_("No Firewire device found.")))
elif gerror.code == int(gst.RESOURCE_ERROR_OPEN_READ):
m = messages.Error(T_(
N_("Could not open Firewire device for reading. "
"Check permissions on the device.")))
if not m:
m = check.handleGStreamerDeviceError(failure, 'Firewire',
mid=mid)
if not m:
m = messages.Error(T_(N_("Could not probe Firewire device.")),
debug=check.debugFailure(failure))
m.id = mid
result.add(m)
return result
d.addCallback(check.callbackResult, result)
d.addErrback(errbackResult)
return d
| flumotion-mirror/flumotion | flumotion/worker/checks/gst010.py | Python | lgpl-2.1 | 8,378 |
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Libice(AutotoolsPackage):
"""libICE - Inter-Client Exchange Library."""
homepage = "http://cgit.freedesktop.org/xorg/lib/libICE"
url = "https://www.x.org/archive/individual/lib/libICE-1.0.9.tar.gz"
version('1.0.9', '95812d61df8139c7cacc1325a26d5e37')
depends_on('xproto', type='build')
depends_on('xtrans', type='build')
depends_on('[email protected]:', type='build')
depends_on('util-macros', type='build')
| TheTimmy/spack | var/spack/repos/builtin/packages/libice/package.py | Python | lgpl-2.1 | 1,709 |
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class FontIsasMisc(Package):
"""X.org isas-misc font."""
homepage = "http://cgit.freedesktop.org/xorg/font/isas-misc"
url = "https://www.x.org/archive/individual/font/font-isas-misc-1.0.3.tar.gz"
version('1.0.3', 'ecc3b6fbe8f5721ddf5c7fc66f73e76f')
depends_on('font-util')
depends_on('fontconfig', type='build')
depends_on('mkfontdir', type='build')
depends_on('bdftopcf', type='build')
depends_on('[email protected]:', type='build')
depends_on('util-macros', type='build')
def install(self, spec, prefix):
configure('--prefix={0}'.format(prefix))
make()
make('install')
# `make install` copies the files to the font-util installation.
# Create a fake directory to convince Spack that we actually
# installed something.
mkdir(prefix.lib)
| TheTimmy/spack | var/spack/repos/builtin/packages/font-isas-misc/package.py | Python | lgpl-2.1 | 2,105 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# -*- coding: utf-8 -*-
try:
import setuptools
except ImportError:
import ez_setup
ez_setup.use_setuptools()
import setuptools
setuptools.setup(
name='api',
version='0.1',
description='',
author='',
author_email='',
install_requires=[
"pecan",
],
test_suite='api',
zip_safe=False,
include_package_data=True,
packages=setuptools.find_packages(exclude=['ez_setup'])
)
| jiahaoliang/group-based-policy | gbpservice/tests/contrib/nfp_service/reference_configurator/api/setup.py | Python | apache-2.0 | 1,008 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import sys
from nova import flags
import sqlalchemy
from migrate.versioning import api as versioning_api
try:
from migrate.versioning import exceptions as versioning_exceptions
except ImportError:
try:
# python-migration changed location of exceptions after 1.6.3
# See LP Bug #717467
from migrate import exceptions as versioning_exceptions
except ImportError:
sys.exit(_("python-migrate is not installed. Exiting."))
FLAGS = flags.FLAGS
def db_sync(version=None):
db_version()
repo_path = _find_migrate_repo()
return versioning_api.upgrade(FLAGS.sql_connection, repo_path, version)
def db_version():
repo_path = _find_migrate_repo()
try:
return versioning_api.db_version(FLAGS.sql_connection, repo_path)
except versioning_exceptions.DatabaseNotControlledError:
# If we aren't version controlled we may already have the database
# in the state from before we started version control, check for that
# and set up version_control appropriately
meta = sqlalchemy.MetaData()
engine = sqlalchemy.create_engine(FLAGS.sql_connection, echo=False)
meta.reflect(bind=engine)
try:
for table in ('auth_tokens', 'zones', 'export_devices',
'fixed_ips', 'floating_ips', 'instances',
'key_pairs', 'networks', 'projects', 'quotas',
'security_group_instance_association',
'security_group_rules', 'security_groups',
'services', 'migrations',
'users', 'user_project_association',
'user_project_role_association',
'user_role_association',
'volumes'):
assert table in meta.tables
return db_version_control(1)
except AssertionError:
return db_version_control(0)
def db_version_control(version=None):
repo_path = _find_migrate_repo()
versioning_api.version_control(FLAGS.sql_connection, repo_path, version)
return version
def _find_migrate_repo():
"""Get the path for the migrate repository."""
path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'migrate_repo')
assert os.path.exists(path)
return path
| superstack/nova | nova/db/sqlalchemy/migration.py | Python | apache-2.0 | 3,168 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
# Generated by Django 1.9 on 2016-05-03 02:48
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('podcasts', '0014_auto_20160503_0247'),
]
operations = [
migrations.RemoveField(
model_name='podcast',
name='tip_last_payout',
),
migrations.RemoveField(
model_name='podcast',
name='tip_last_payout_amount',
),
migrations.RemoveField(
model_name='podcast',
name='tip_value',
),
]
| AlmostBetterNetwork/podmaster-host | podcasts/migrations/0015_auto_20160503_0248.py | Python | apache-2.0 | 667 |
# -*- coding: utf-8 -*-
u"""Test auth.guest
:copyright: Copyright (c) 2019 RadiaSoft LLC. All Rights Reserved.
:license: http://www.apache.org/licenses/LICENSE-2.0.html
"""
from __future__ import absolute_import, division, print_function
import pytest
def test_happy_path(auth_fc):
fc = auth_fc
from pykern import pkconfig, pkunit, pkio
from pykern.pkunit import pkok, pkre, pkeq
from pykern.pkdebug import pkdp
import re
fc.sr_get('authGuestLogin', {'simulation_type': fc.sr_sim_type})
fc.sr_post('listSimulations', {'simulationType': fc.sr_sim_type})
fc.sr_auth_state(
avatarUrl=None,
displayName='Guest User',
guestIsOnlyMethod=False,
isGuestUser=True,
isLoggedIn=True,
isLoginExpired=False,
method='guest',
needCompleteRegistration=False,
userName=None,
visibleMethods=['email'],
)
def test_timeout(auth_fc):
fc = auth_fc
from pykern import pkconfig, pkunit, pkio
from pykern import pkjson
from pykern.pkdebug import pkdp
from pykern.pkunit import pkok, pkre, pkeq, pkexcept
import re
r = fc.sr_get('authGuestLogin', {'simulation_type': fc.sr_sim_type}, redirect=False)
pkeq(200, r.status_code)
d = pkjson.load_any(r.data)
pkeq(True, d.authState.isLoggedIn)
fc.sr_post('listSimulations', {'simulationType': fc.sr_sim_type})
fc.sr_auth_state(
isGuestUser=True,
isLoggedIn=True,
isLoginExpired=False,
)
fc.sr_get_json('adjustTime', params={'days': '2'})
fc.sr_auth_state(
isGuestUser=True,
isLoggedIn=True,
isLoginExpired=True,
)
with pkexcept('SRException.*guest-expired'):
fc.sr_post('listSimulations', {'simulationType': fc.sr_sim_type})
| mkeilman/sirepo | tests/auth/guest1_test.py | Python | apache-2.0 | 1,794 |
#!/usr/bin/python
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example downloads activity tags for a given floodlight activity."""
import argparse
import sys
from apiclient import sample_tools
from oauth2client import client
# Declare command-line flags.
argparser = argparse.ArgumentParser(add_help=False)
argparser.add_argument(
'profile_id', type=int,
help='The ID of the profile to download tags for')
argparser.add_argument(
'activity_id', type=int,
help='The ID of the floodlight activity to download tags for')
def main(argv):
# Authenticate and construct service.
service, flags = sample_tools.init(
argv, 'dfareporting', 'v2.0', __doc__, __file__, parents=[argparser],
scope=['https://www.googleapis.com/auth/dfareporting',
'https://www.googleapis.com/auth/dfatrafficking'])
profile_id = flags.profile_id
activity_id = flags.activity_id
try:
# Construct the request.
request = service.floodlightActivities().generatetag(
profileId=profile_id, floodlightActivityId=activity_id)
# Execute request and print response.
response = request.execute()
print response['floodlightActivityTag']
except client.AccessTokenRefreshError:
print ('The credentials have been revoked or expired, please re-run the '
'application to re-authorize')
if __name__ == '__main__':
main(sys.argv)
| falbassini/googleads-dfa-reporting-samples | python/v2.0/download_floodlight_tag.py | Python | apache-2.0 | 1,952 |
"""
Platform for Ecobee Thermostats.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/climate.ecobee/
"""
import logging
import voluptuous as vol
from homeassistant.components import ecobee
from homeassistant.components.climate import (
DOMAIN, STATE_COOL, STATE_HEAT, STATE_AUTO, STATE_IDLE, ClimateDevice,
ATTR_TARGET_TEMP_LOW, ATTR_TARGET_TEMP_HIGH, SUPPORT_TARGET_TEMPERATURE,
SUPPORT_AWAY_MODE, SUPPORT_HOLD_MODE, SUPPORT_OPERATION_MODE,
SUPPORT_TARGET_HUMIDITY_LOW, SUPPORT_TARGET_HUMIDITY_HIGH,
SUPPORT_AUX_HEAT, SUPPORT_TARGET_TEMPERATURE_HIGH, SUPPORT_FAN_MODE,
SUPPORT_TARGET_TEMPERATURE_LOW, STATE_OFF)
from homeassistant.const import (
ATTR_ENTITY_ID, STATE_ON, ATTR_TEMPERATURE, TEMP_FAHRENHEIT)
import homeassistant.helpers.config_validation as cv
_CONFIGURING = {}
_LOGGER = logging.getLogger(__name__)
ATTR_FAN_MIN_ON_TIME = 'fan_min_on_time'
ATTR_RESUME_ALL = 'resume_all'
DEFAULT_RESUME_ALL = False
TEMPERATURE_HOLD = 'temp'
VACATION_HOLD = 'vacation'
AWAY_MODE = 'awayMode'
DEPENDENCIES = ['ecobee']
SERVICE_SET_FAN_MIN_ON_TIME = 'ecobee_set_fan_min_on_time'
SERVICE_RESUME_PROGRAM = 'ecobee_resume_program'
SET_FAN_MIN_ON_TIME_SCHEMA = vol.Schema({
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Required(ATTR_FAN_MIN_ON_TIME): vol.Coerce(int),
})
RESUME_PROGRAM_SCHEMA = vol.Schema({
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Optional(ATTR_RESUME_ALL, default=DEFAULT_RESUME_ALL): cv.boolean,
})
SUPPORT_FLAGS = (SUPPORT_TARGET_TEMPERATURE | SUPPORT_AWAY_MODE |
SUPPORT_HOLD_MODE | SUPPORT_OPERATION_MODE |
SUPPORT_TARGET_HUMIDITY_LOW | SUPPORT_TARGET_HUMIDITY_HIGH |
SUPPORT_AUX_HEAT | SUPPORT_TARGET_TEMPERATURE_HIGH |
SUPPORT_TARGET_TEMPERATURE_LOW | SUPPORT_FAN_MODE)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Ecobee Thermostat Platform."""
if discovery_info is None:
return
data = ecobee.NETWORK
hold_temp = discovery_info['hold_temp']
_LOGGER.info(
"Loading ecobee thermostat component with hold_temp set to %s",
hold_temp)
devices = [Thermostat(data, index, hold_temp)
for index in range(len(data.ecobee.thermostats))]
add_entities(devices)
def fan_min_on_time_set_service(service):
"""Set the minimum fan on time on the target thermostats."""
entity_id = service.data.get(ATTR_ENTITY_ID)
fan_min_on_time = service.data[ATTR_FAN_MIN_ON_TIME]
if entity_id:
target_thermostats = [device for device in devices
if device.entity_id in entity_id]
else:
target_thermostats = devices
for thermostat in target_thermostats:
thermostat.set_fan_min_on_time(str(fan_min_on_time))
thermostat.schedule_update_ha_state(True)
def resume_program_set_service(service):
"""Resume the program on the target thermostats."""
entity_id = service.data.get(ATTR_ENTITY_ID)
resume_all = service.data.get(ATTR_RESUME_ALL)
if entity_id:
target_thermostats = [device for device in devices
if device.entity_id in entity_id]
else:
target_thermostats = devices
for thermostat in target_thermostats:
thermostat.resume_program(resume_all)
thermostat.schedule_update_ha_state(True)
hass.services.register(
DOMAIN, SERVICE_SET_FAN_MIN_ON_TIME, fan_min_on_time_set_service,
schema=SET_FAN_MIN_ON_TIME_SCHEMA)
hass.services.register(
DOMAIN, SERVICE_RESUME_PROGRAM, resume_program_set_service,
schema=RESUME_PROGRAM_SCHEMA)
class Thermostat(ClimateDevice):
"""A thermostat class for Ecobee."""
def __init__(self, data, thermostat_index, hold_temp):
"""Initialize the thermostat."""
self.data = data
self.thermostat_index = thermostat_index
self.thermostat = self.data.ecobee.get_thermostat(
self.thermostat_index)
self._name = self.thermostat['name']
self.hold_temp = hold_temp
self.vacation = None
self._climate_list = self.climate_list
self._operation_list = ['auto', 'auxHeatOnly', 'cool',
'heat', 'off']
self._fan_list = ['auto', 'on']
self.update_without_throttle = False
def update(self):
"""Get the latest state from the thermostat."""
if self.update_without_throttle:
self.data.update(no_throttle=True)
self.update_without_throttle = False
else:
self.data.update()
self.thermostat = self.data.ecobee.get_thermostat(
self.thermostat_index)
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_FLAGS
@property
def name(self):
"""Return the name of the Ecobee Thermostat."""
return self.thermostat['name']
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return TEMP_FAHRENHEIT
@property
def current_temperature(self):
"""Return the current temperature."""
return self.thermostat['runtime']['actualTemperature'] / 10.0
@property
def target_temperature_low(self):
"""Return the lower bound temperature we try to reach."""
if self.current_operation == STATE_AUTO:
return self.thermostat['runtime']['desiredHeat'] / 10.0
return None
@property
def target_temperature_high(self):
"""Return the upper bound temperature we try to reach."""
if self.current_operation == STATE_AUTO:
return self.thermostat['runtime']['desiredCool'] / 10.0
return None
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
if self.current_operation == STATE_AUTO:
return None
if self.current_operation == STATE_HEAT:
return self.thermostat['runtime']['desiredHeat'] / 10.0
if self.current_operation == STATE_COOL:
return self.thermostat['runtime']['desiredCool'] / 10.0
return None
@property
def fan(self):
"""Return the current fan status."""
if 'fan' in self.thermostat['equipmentStatus']:
return STATE_ON
return STATE_OFF
@property
def current_fan_mode(self):
"""Return the fan setting."""
return self.thermostat['runtime']['desiredFanMode']
@property
def current_hold_mode(self):
"""Return current hold mode."""
mode = self._current_hold_mode
return None if mode == AWAY_MODE else mode
@property
def fan_list(self):
"""Return the available fan modes."""
return self._fan_list
@property
def _current_hold_mode(self):
events = self.thermostat['events']
for event in events:
if event['running']:
if event['type'] == 'hold':
if event['holdClimateRef'] == 'away':
if int(event['endDate'][0:4]) - \
int(event['startDate'][0:4]) <= 1:
# A temporary hold from away climate is a hold
return 'away'
# A permanent hold from away climate
return AWAY_MODE
if event['holdClimateRef'] != "":
# Any other hold based on climate
return event['holdClimateRef']
# Any hold not based on a climate is a temp hold
return TEMPERATURE_HOLD
if event['type'].startswith('auto'):
# All auto modes are treated as holds
return event['type'][4:].lower()
if event['type'] == 'vacation':
self.vacation = event['name']
return VACATION_HOLD
return None
@property
def current_operation(self):
"""Return current operation."""
if self.operation_mode == 'auxHeatOnly' or \
self.operation_mode == 'heatPump':
return STATE_HEAT
return self.operation_mode
@property
def operation_list(self):
"""Return the operation modes list."""
return self._operation_list
@property
def operation_mode(self):
"""Return current operation ie. heat, cool, idle."""
return self.thermostat['settings']['hvacMode']
@property
def mode(self):
"""Return current mode, as the user-visible name."""
cur = self.thermostat['program']['currentClimateRef']
climates = self.thermostat['program']['climates']
current = list(filter(lambda x: x['climateRef'] == cur, climates))
return current[0]['name']
@property
def fan_min_on_time(self):
"""Return current fan minimum on time."""
return self.thermostat['settings']['fanMinOnTime']
@property
def device_state_attributes(self):
"""Return device specific state attributes."""
# Move these to Thermostat Device and make them global
status = self.thermostat['equipmentStatus']
operation = None
if status == '':
operation = STATE_IDLE
elif 'Cool' in status:
operation = STATE_COOL
elif 'auxHeat' in status:
operation = STATE_HEAT
elif 'heatPump' in status:
operation = STATE_HEAT
else:
operation = status
return {
"actual_humidity": self.thermostat['runtime']['actualHumidity'],
"fan": self.fan,
"climate_mode": self.mode,
"operation": operation,
"climate_list": self.climate_list,
"fan_min_on_time": self.fan_min_on_time
}
@property
def is_away_mode_on(self):
"""Return true if away mode is on."""
return self._current_hold_mode == AWAY_MODE
@property
def is_aux_heat_on(self):
"""Return true if aux heater."""
return 'auxHeat' in self.thermostat['equipmentStatus']
def turn_away_mode_on(self):
"""Turn away mode on by setting it on away hold indefinitely."""
if self._current_hold_mode != AWAY_MODE:
self.data.ecobee.set_climate_hold(self.thermostat_index, 'away',
'indefinite')
self.update_without_throttle = True
def turn_away_mode_off(self):
"""Turn away off."""
if self._current_hold_mode == AWAY_MODE:
self.data.ecobee.resume_program(self.thermostat_index)
self.update_without_throttle = True
def set_hold_mode(self, hold_mode):
"""Set hold mode (away, home, temp, sleep, etc.)."""
hold = self.current_hold_mode
if hold == hold_mode:
# no change, so no action required
return
if hold_mode == 'None' or hold_mode is None:
if hold == VACATION_HOLD:
self.data.ecobee.delete_vacation(
self.thermostat_index, self.vacation)
else:
self.data.ecobee.resume_program(self.thermostat_index)
else:
if hold_mode == TEMPERATURE_HOLD:
self.set_temp_hold(self.current_temperature)
else:
self.data.ecobee.set_climate_hold(
self.thermostat_index, hold_mode, self.hold_preference())
self.update_without_throttle = True
def set_auto_temp_hold(self, heat_temp, cool_temp):
"""Set temperature hold in auto mode."""
if cool_temp is not None:
cool_temp_setpoint = cool_temp
else:
cool_temp_setpoint = (
self.thermostat['runtime']['desiredCool'] / 10.0)
if heat_temp is not None:
heat_temp_setpoint = heat_temp
else:
heat_temp_setpoint = (
self.thermostat['runtime']['desiredCool'] / 10.0)
self.data.ecobee.set_hold_temp(self.thermostat_index,
cool_temp_setpoint, heat_temp_setpoint,
self.hold_preference())
_LOGGER.debug("Setting ecobee hold_temp to: heat=%s, is=%s, "
"cool=%s, is=%s", heat_temp,
isinstance(heat_temp, (int, float)), cool_temp,
isinstance(cool_temp, (int, float)))
self.update_without_throttle = True
def set_fan_mode(self, fan_mode):
"""Set the fan mode. Valid values are "on" or "auto"."""
if (fan_mode.lower() != STATE_ON) and (fan_mode.lower() != STATE_AUTO):
error = "Invalid fan_mode value: Valid values are 'on' or 'auto'"
_LOGGER.error(error)
return
cool_temp = self.thermostat['runtime']['desiredCool'] / 10.0
heat_temp = self.thermostat['runtime']['desiredHeat'] / 10.0
self.data.ecobee.set_fan_mode(self.thermostat_index, fan_mode,
cool_temp, heat_temp,
self.hold_preference())
_LOGGER.info("Setting fan mode to: %s", fan_mode)
def set_temp_hold(self, temp):
"""Set temperature hold in modes other than auto.
Ecobee API: It is good practice to set the heat and cool hold
temperatures to be the same, if the thermostat is in either heat, cool,
auxHeatOnly, or off mode. If the thermostat is in auto mode, an
additional rule is required. The cool hold temperature must be greater
than the heat hold temperature by at least the amount in the
heatCoolMinDelta property.
https://www.ecobee.com/home/developer/api/examples/ex5.shtml
"""
if self.current_operation == STATE_HEAT or self.current_operation == \
STATE_COOL:
heat_temp = temp
cool_temp = temp
else:
delta = self.thermostat['settings']['heatCoolMinDelta'] / 10
heat_temp = temp - delta
cool_temp = temp + delta
self.set_auto_temp_hold(heat_temp, cool_temp)
def set_temperature(self, **kwargs):
"""Set new target temperature."""
low_temp = kwargs.get(ATTR_TARGET_TEMP_LOW)
high_temp = kwargs.get(ATTR_TARGET_TEMP_HIGH)
temp = kwargs.get(ATTR_TEMPERATURE)
if self.current_operation == STATE_AUTO and \
(low_temp is not None or high_temp is not None):
self.set_auto_temp_hold(low_temp, high_temp)
elif temp is not None:
self.set_temp_hold(temp)
else:
_LOGGER.error(
"Missing valid arguments for set_temperature in %s", kwargs)
def set_humidity(self, humidity):
"""Set the humidity level."""
self.data.ecobee.set_humidity(self.thermostat_index, humidity)
def set_operation_mode(self, operation_mode):
"""Set HVAC mode (auto, auxHeatOnly, cool, heat, off)."""
self.data.ecobee.set_hvac_mode(self.thermostat_index, operation_mode)
self.update_without_throttle = True
def set_fan_min_on_time(self, fan_min_on_time):
"""Set the minimum fan on time."""
self.data.ecobee.set_fan_min_on_time(
self.thermostat_index, fan_min_on_time)
self.update_without_throttle = True
def resume_program(self, resume_all):
"""Resume the thermostat schedule program."""
self.data.ecobee.resume_program(
self.thermostat_index, 'true' if resume_all else 'false')
self.update_without_throttle = True
def hold_preference(self):
"""Return user preference setting for hold time."""
# Values returned from thermostat are 'useEndTime4hour',
# 'useEndTime2hour', 'nextTransition', 'indefinite', 'askMe'
default = self.thermostat['settings']['holdAction']
if default == 'nextTransition':
return default
# add further conditions if other hold durations should be
# supported; note that this should not include 'indefinite'
# as an indefinite away hold is interpreted as away_mode
return 'nextTransition'
@property
def climate_list(self):
"""Return the list of climates currently available."""
climates = self.thermostat['program']['climates']
return list(map((lambda x: x['name']), climates))
| PetePriority/home-assistant | homeassistant/components/ecobee/climate.py | Python | apache-2.0 | 16,743 |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Keras Vis utils."""
from tensorflow.python import keras
from tensorflow.python.keras.utils import vis_utils
from tensorflow.python.lib.io import file_io
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
class ModelToDotFormatTest(test.TestCase):
def test_plot_model_cnn(self):
model = keras.Sequential()
model.add(
keras.layers.Conv2D(
filters=2, kernel_size=(2, 3), input_shape=(3, 5, 5), name='conv'))
model.add(keras.layers.Flatten(name='flat'))
model.add(keras.layers.Dense(5, name='dense'))
dot_img_file = 'model_1.png'
try:
vis_utils.plot_model(
model, to_file=dot_img_file, show_shapes=True, show_dtype=True)
self.assertTrue(file_io.file_exists_v2(dot_img_file))
file_io.delete_file_v2(dot_img_file)
except ImportError:
pass
def test_plot_model_with_wrapped_layers_and_models(self):
inputs = keras.Input(shape=(None, 3))
lstm = keras.layers.LSTM(6, return_sequences=True, name='lstm')
x = lstm(inputs)
# Add layer inside a Wrapper
bilstm = keras.layers.Bidirectional(
keras.layers.LSTM(16, return_sequences=True, name='bilstm'))
x = bilstm(x)
# Add model inside a Wrapper
submodel = keras.Sequential(
[keras.layers.Dense(32, name='dense', input_shape=(None, 32))]
)
wrapped_dense = keras.layers.TimeDistributed(submodel)
x = wrapped_dense(x)
# Add shared submodel
outputs = submodel(x)
model = keras.Model(inputs, outputs)
dot_img_file = 'model_2.png'
try:
vis_utils.plot_model(
model,
to_file=dot_img_file,
show_shapes=True,
show_dtype=True,
expand_nested=True)
self.assertTrue(file_io.file_exists_v2(dot_img_file))
file_io.delete_file_v2(dot_img_file)
except ImportError:
pass
def test_plot_model_with_add_loss(self):
inputs = keras.Input(shape=(None, 3))
outputs = keras.layers.Dense(1)(inputs)
model = keras.Model(inputs, outputs)
model.add_loss(math_ops.reduce_mean(outputs))
dot_img_file = 'model_3.png'
try:
vis_utils.plot_model(
model,
to_file=dot_img_file,
show_shapes=True,
show_dtype=True,
expand_nested=True)
self.assertTrue(file_io.file_exists_v2(dot_img_file))
file_io.delete_file_v2(dot_img_file)
except ImportError:
pass
model = keras.Sequential([
keras.Input(shape=(None, 3)), keras.layers.Dense(1)])
model.add_loss(math_ops.reduce_mean(model.output))
dot_img_file = 'model_4.png'
try:
vis_utils.plot_model(
model,
to_file=dot_img_file,
show_shapes=True,
show_dtype=True,
expand_nested=True)
self.assertTrue(file_io.file_exists_v2(dot_img_file))
file_io.delete_file_v2(dot_img_file)
except ImportError:
pass
if __name__ == '__main__':
test.main()
| sarvex/tensorflow | tensorflow/python/keras/utils/vis_utils_test.py | Python | apache-2.0 | 3,671 |
"""
pygments.lexers.stata
~~~~~~~~~~~~~~~~~~~~~
Lexer for Stata
:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, default, include, words
from pygments.token import Comment, Keyword, Name, Number, \
String, Text, Operator
from pygments.lexers._stata_builtins import builtins_base, builtins_functions
__all__ = ['StataLexer']
class StataLexer(RegexLexer):
"""
For `Stata <http://www.stata.com/>`_ do files.
.. versionadded:: 2.2
"""
# Syntax based on
# - http://fmwww.bc.edu/RePEc/bocode/s/synlightlist.ado
# - https://github.com/isagalaev/highlight.js/blob/master/src/languages/stata.js
# - https://github.com/jpitblado/vim-stata/blob/master/syntax/stata.vim
name = 'Stata'
aliases = ['stata', 'do']
filenames = ['*.do', '*.ado']
mimetypes = ['text/x-stata', 'text/stata', 'application/x-stata']
flags = re.MULTILINE | re.DOTALL
tokens = {
'root': [
include('comments'),
include('strings'),
include('macros'),
include('numbers'),
include('keywords'),
include('operators'),
include('format'),
(r'.', Text),
],
# Comments are a complicated beast in Stata because they can be
# nested and there are a few corner cases with that. See:
# - github.com/kylebarron/language-stata/issues/90
# - statalist.org/forums/forum/general-stata-discussion/general/1448244
'comments': [
(r'(^//|(?<=\s)//)(?!/)', Comment.Single, 'comments-double-slash'),
(r'^\s*\*', Comment.Single, 'comments-star'),
(r'/\*', Comment.Multiline, 'comments-block'),
(r'(^///|(?<=\s)///)', Comment.Special, 'comments-triple-slash')
],
'comments-block': [
(r'/\*', Comment.Multiline, '#push'),
# this ends and restarts a comment block. but need to catch this so
# that it doesn\'t start _another_ level of comment blocks
(r'\*/\*', Comment.Multiline),
(r'(\*/\s+\*(?!/)[^\n]*)|(\*/)', Comment.Multiline, '#pop'),
# Match anything else as a character inside the comment
(r'.', Comment.Multiline),
],
'comments-star': [
(r'///.*?\n', Comment.Single,
('#pop', 'comments-triple-slash')),
(r'(^//|(?<=\s)//)(?!/)', Comment.Single,
('#pop', 'comments-double-slash')),
(r'/\*', Comment.Multiline, 'comments-block'),
(r'.(?=\n)', Comment.Single, '#pop'),
(r'.', Comment.Single),
],
'comments-triple-slash': [
(r'\n', Comment.Special, '#pop'),
# A // breaks out of a comment for the rest of the line
(r'//.*?(?=\n)', Comment.Single, '#pop'),
(r'.', Comment.Special),
],
'comments-double-slash': [
(r'\n', Text, '#pop'),
(r'.', Comment.Single),
],
# `"compound string"' and regular "string"; note the former are
# nested.
'strings': [
(r'`"', String, 'string-compound'),
(r'(?<!`)"', String, 'string-regular'),
],
'string-compound': [
(r'`"', String, '#push'),
(r'"\'', String, '#pop'),
(r'\\\\|\\"|\\\$|\\`|\\\n', String.Escape),
include('macros'),
(r'.', String)
],
'string-regular': [
(r'(")(?!\')|(?=\n)', String, '#pop'),
(r'\\\\|\\"|\\\$|\\`|\\\n', String.Escape),
include('macros'),
(r'.', String)
],
# A local is usually
# `\w{0,31}'
# `:extended macro'
# `=expression'
# `[rsen](results)'
# `(++--)scalar(++--)'
#
# However, there are all sorts of weird rules wrt edge
# cases. Instead of writing 27 exceptions, anything inside
# `' is a local.
#
# A global is more restricted, so we do follow rules. Note only
# locals explicitly enclosed ${} can be nested.
'macros': [
(r'\$(\{|(?=[$`]))', Name.Variable.Global, 'macro-global-nested'),
(r'\$', Name.Variable.Global, 'macro-global-name'),
(r'`', Name.Variable, 'macro-local'),
],
'macro-local': [
(r'`', Name.Variable, '#push'),
(r"'", Name.Variable, '#pop'),
(r'\$(\{|(?=[$`]))', Name.Variable.Global, 'macro-global-nested'),
(r'\$', Name.Variable.Global, 'macro-global-name'),
(r'.', Name.Variable), # fallback
],
'macro-global-nested': [
(r'\$(\{|(?=[$`]))', Name.Variable.Global, '#push'),
(r'\}', Name.Variable.Global, '#pop'),
(r'\$', Name.Variable.Global, 'macro-global-name'),
(r'`', Name.Variable, 'macro-local'),
(r'\w', Name.Variable.Global), # fallback
default('#pop'),
],
'macro-global-name': [
(r'\$(\{|(?=[$`]))', Name.Variable.Global, 'macro-global-nested', '#pop'),
(r'\$', Name.Variable.Global, 'macro-global-name', '#pop'),
(r'`', Name.Variable, 'macro-local', '#pop'),
(r'\w{1,32}', Name.Variable.Global, '#pop'),
],
# Built in functions and statements
'keywords': [
(words(builtins_functions, prefix = r'\b', suffix = r'(?=\()'),
Name.Function),
(words(builtins_base, prefix = r'(^\s*|\s)', suffix = r'\b'),
Keyword),
],
# http://www.stata.com/help.cgi?operators
'operators': [
(r'-|==|<=|>=|<|>|&|!=', Operator),
(r'\*|\+|\^|/|!|~|==|~=', Operator)
],
# Stata numbers
'numbers': [
# decimal number
(r'\b[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)([eE][+-]?[0-9]+)?[i]?\b',
Number),
],
# Stata formats
'format': [
(r'%-?\d{1,2}(\.\d{1,2})?[gfe]c?', Name.Other),
(r'%(21x|16H|16L|8H|8L)', Name.Other),
(r'%-?(tc|tC|td|tw|tm|tq|th|ty|tg)\S{0,32}', Name.Other),
(r'%[-~]?\d{1,4}s', Name.Other),
]
}
| sonntagsgesicht/regtest | .aux/venv/lib/python3.9/site-packages/pygments/lexers/stata.py | Python | apache-2.0 | 6,414 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from heat_integrationtests.common import test
LOG = logging.getLogger(__name__)
class CeilometerAlarmTest(test.HeatIntegrationTest):
"""Class is responsible for testing of ceilometer usage."""
def setUp(self):
super(CeilometerAlarmTest, self).setUp()
self.client = self.orchestration_client
self.template = self._load_template(__file__,
'test_ceilometer_alarm.yaml',
'templates')
def check_instance_count(self, stack_identifier, expected):
stack = self.client.stacks.get(stack_identifier)
actual = self._stack_output(stack, 'asg_size')
if actual != expected:
LOG.warn('check_instance_count exp:%d, act:%s' % (expected,
actual))
return actual == expected
def test_alarm(self):
"""Confirm we can create an alarm and trigger it."""
# 1. create the stack
stack_identifier = self.stack_create(template=self.template)
# 2. send ceilometer a metric (should cause the alarm to fire)
sample = {}
sample['counter_type'] = 'gauge'
sample['counter_name'] = 'test_meter'
sample['counter_volume'] = 1
sample['counter_unit'] = 'count'
sample['resource_metadata'] = {'metering.stack_id':
stack_identifier.split('/')[-1]}
sample['resource_id'] = 'shouldnt_matter'
self.metering_client.samples.create(**sample)
# 3. confirm we get a scaleup.
# Note: there is little point waiting more than 60s+time to scale up.
self.assertTrue(test.call_until_true(
120, 2, self.check_instance_count, stack_identifier, 2))
| rh-s/heat | heat_integrationtests/scenario/test_ceilometer_alarm.py | Python | apache-2.0 | 2,412 |
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
from six.moves import xrange
from nose.tools import assert_equal, assert_true
from matplotlib.testing.decorators import image_comparison, cleanup
from matplotlib.axes import Axes
import matplotlib.pyplot as plt
import numpy as np
@cleanup
def test_figure_label():
# pyplot figure creation, selection and closing with figure label and
# number
plt.close('all')
plt.figure('today')
plt.figure(3)
plt.figure('tomorrow')
plt.figure()
plt.figure(0)
plt.figure(1)
plt.figure(3)
assert_equal(plt.get_fignums(), [0, 1, 3, 4, 5])
assert_equal(plt.get_figlabels(), ['', 'today', '', 'tomorrow', ''])
plt.close(10)
plt.close()
plt.close(5)
plt.close('tomorrow')
assert_equal(plt.get_fignums(), [0, 1])
assert_equal(plt.get_figlabels(), ['', 'today'])
@cleanup
def test_fignum_exists():
# pyplot figure creation, selection and closing with fignum_exists
plt.figure('one')
plt.figure(2)
plt.figure('three')
plt.figure()
assert_equal(plt.fignum_exists('one'), True)
assert_equal(plt.fignum_exists(2), True)
assert_equal(plt.fignum_exists('three'), True)
assert_equal(plt.fignum_exists(4), True)
plt.close('one')
plt.close(4)
assert_equal(plt.fignum_exists('one'), False)
assert_equal(plt.fignum_exists(4), False)
@image_comparison(baseline_images=['figure_today'])
def test_figure():
# named figure support
fig = plt.figure('today')
ax = fig.add_subplot(111)
ax.set_title(fig.get_label())
ax.plot(list(xrange(5)))
# plot red line in a different figure.
plt.figure('tomorrow')
plt.plot([0, 1], [1, 0], 'r')
# Return to the original; make sure the red line is not there.
plt.figure('today')
plt.close('tomorrow')
@cleanup
def test_gca():
fig = plt.figure()
ax1 = fig.add_axes([0, 0, 1, 1])
assert_true(fig.gca(projection='rectilinear') is ax1)
assert_true(fig.gca() is ax1)
ax2 = fig.add_subplot(121, projection='polar')
assert_true(fig.gca() is ax2)
assert_true(fig.gca(polar=True)is ax2)
ax3 = fig.add_subplot(122)
assert_true(fig.gca() is ax3)
# the final request for a polar axes will end up creating one
# with a spec of 111.
assert_true(fig.gca(polar=True) is not ax3)
assert_true(fig.gca(polar=True) is not ax2)
assert_equal(fig.gca().get_geometry(), (1, 1, 1))
fig.sca(ax1)
assert_true(fig.gca(projection='rectilinear') is ax1)
assert_true(fig.gca() is ax1)
@image_comparison(baseline_images=['figure_suptitle'])
def test_suptitle():
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
fig.suptitle('hello', color='r')
fig.suptitle('title', color='g', rotation='30')
@cleanup
def test_suptitle_fontproperties():
from matplotlib.font_manager import FontProperties
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
fps = FontProperties(size='large', weight='bold')
txt = fig.suptitle('fontprops title', fontproperties=fps)
assert_equal(txt.get_fontsize(), fps.get_size_in_points())
assert_equal(txt.get_weight(), fps.get_weight())
@image_comparison(baseline_images=['alpha_background'],
# only test png and svg. The PDF output appears correct,
# but Ghostscript does not preserve the background color.
extensions=['png', 'svg'],
savefig_kwarg={'facecolor': (0, 1, 0.4),
'edgecolor': 'none'})
def test_alpha():
# We want an image which has a background color and an
# alpha of 0.4.
fig = plt.figure(figsize=[2, 1])
fig.set_facecolor((0, 1, 0.4))
fig.patch.set_alpha(0.4)
import matplotlib.patches as mpatches
fig.patches.append(mpatches.CirclePolygon([20, 20],
radius=15,
alpha=0.6,
facecolor='red'))
@cleanup
def test_too_many_figures():
import warnings
with warnings.catch_warnings(record=True) as w:
for i in range(22):
fig = plt.figure()
assert len(w) == 1
def test_iterability_axes_argument():
# This is a regression test for matplotlib/matplotlib#3196. If one of the
# arguments returned by _as_mpl_axes defines __getitem__ but is not
# iterable, this would raise an execption. This is because we check
# whether the arguments are iterable, and if so we try and convert them
# to a tuple. However, the ``iterable`` function returns True if
# __getitem__ is present, but some classes can define __getitem__ without
# being iterable. The tuple conversion is now done in a try...except in
# case it fails.
class MyAxes(Axes):
def __init__(self, *args, **kwargs):
kwargs.pop('myclass', None)
return Axes.__init__(self, *args, **kwargs)
class MyClass(object):
def __getitem__(self, item):
if item != 'a':
raise ValueError("item should be a")
def _as_mpl_axes(self):
return MyAxes, {'myclass': self}
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1, projection=MyClass())
plt.close(fig)
@cleanup
def test_set_fig_size():
fig = plt.figure()
# check figwidth
fig.set_figwidth(5)
assert_equal(fig.get_figwidth(), 5)
# check figheight
fig.set_figheight(1)
assert_equal(fig.get_figheight(), 1)
# check using set_size_inches
fig.set_size_inches(2, 4)
assert_equal(fig.get_figwidth(), 2)
assert_equal(fig.get_figheight(), 4)
# check using tuple to first argument
fig.set_size_inches((1, 3))
assert_equal(fig.get_figwidth(), 1)
assert_equal(fig.get_figheight(), 3)
@cleanup
def test_axes_remove():
fig, axes = plt.subplots(2, 2)
axes[-1, -1].remove()
for ax in axes.ravel()[:-1]:
assert ax in fig.axes
assert axes[-1, -1] not in fig.axes
assert_equal(len(fig.axes), 3)
def test_figaspect():
w, h = plt.figaspect(np.float64(2) / np.float64(1))
assert h / w == 2
w, h = plt.figaspect(2)
assert h / w == 2
w, h = plt.figaspect(np.zeros((1, 2)))
assert h / w == 0.5
w, h = plt.figaspect(np.zeros((2, 2)))
assert h / w == 1
if __name__ == "__main__":
import nose
nose.runmodule(argv=['-s', '--with-doctest'], exit=False)
| andyraib/data-storage | python_scripts/env/lib/python3.6/site-packages/matplotlib/tests/test_figure.py | Python | apache-2.0 | 6,508 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import io
import os
import sys
from setuptools import setup
from setuptools import find_packages
with io.open('README.md', 'rt', encoding='utf8') as f:
README = f.read()
if sys.argv[-1] == 'test':
os.system('python -sm unittest discover tests "*_test.py"')
sys.exit(0)
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist')
os.system('twine upload dist/*')
sys.exit(0)
VERSION = '0.0.7'
REQUIRES = ['google-api-python-client>=1.5.3', 'pandas>=0.22.0', 'fire>=0.1.3']
GITHUB_URL = 'https://github.com/condad/google-objects'
setup(
name='google_objects',
packages=find_packages(),
version=VERSION,
description="A simple OO wrapper around Google's python API client",
long_description=README,
long_description_content_type='text/markdown',
author='Connor Sullivan',
author_email='[email protected]',
install_requires=REQUIRES,
url=GITHUB_URL,
download_url='https://github.com/condad/google-objects/tarball/' + VERSION,
keywords=['google api', 'google sheets', 'google drive', 'google slides'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Libraries :: Python Modules',
],
entry_points={
'console_scripts': [
'sheets-cli = google_objects.cli:main',
],
},
)
| condad/google-objects | setup.py | Python | apache-2.0 | 1,680 |
"""OpenGLDemo.py -- A simple demo of using OpenGL with Cocoa
To build the demo program, run this line in Terminal.app:
$ python setup.py py2app -A
This creates a directory "dist" containing OpenGLDemo.app. (The
-A option causes the files to be symlinked to the .app bundle instead
of copied. This means you don't have to rebuild the app if you edit the
sources or nibs.)
This example requires PyOpenGL
"""
from Cocoa import *
from OpenGL.GL import *
from PyObjCTools import AppHelper
ClearColors = redIndex, greenIndex, blueIndex, alphaIndex = range(4)
class OpenGLDemoView(NSOpenGLView):
def awakeFromNib(self):
self.color_index = alphaIndex
def initWithFrame_(self, frame):
attribs = [
NSOpenGLPFANoRecovery,
NSOpenGLPFAWindow,
NSOpenGLPFAAccelerated,
NSOpenGLPFADoubleBuffer,
NSOpenGLPFAColorSize, 24,
NSOpenGLPFAAlphaSize, 8,
NSOpenGLPFADepthSize, 24,
NSOpenGLPFAStencilSize, 8,
NSOpenGLPFAAccumSize, 0,
]
fmt = NSOpenGLPixelFormat.alloc().initWithAttributes_(attribs)
self = super(OpenGLDemoView, self).initWithFrame_pixelFormat_(frame, fmt)
return self
@objc.IBAction
def setClearColor_(self, sender):
self.color_index = sender.tag()
self.setNeedsDisplay_(True)
def drawRect_(self, ((x, y), (w, h))):
glViewport(0, 0, w, h)
clear_color = [0.0]*4
clear_color[self.color_index] = 1.0
glClearColor(*clear_color)
glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT|GL_STENCIL_BUFFER_BIT)
self.openGLContext().flushBuffer()
if __name__ == "__main__":
AppHelper.runEventLoop()
| albertz/music-player | mac/pyobjc-framework-Cocoa/Examples/OpenGL/OpenGLDemo/OpenGLDemo.py | Python | bsd-2-clause | 1,734 |
from PyQt4.QtCore import QSize
from PyQt4.QtGui import QVBoxLayout
# This is really really ugly, but the QDockWidget for some reason does not notice when
# its child widget becomes smaller...
# Therefore we manually set its minimum size when our own minimum size changes
class MyVBoxLayout(QVBoxLayout):
def __init__(self, parent=None):
QVBoxLayout.__init__(self, parent)
self._last_size = QSize(0, 0)
def setGeometry(self, r):
QVBoxLayout.setGeometry(self, r)
try:
wid = self.parentWidget().parentWidget()
new_size = self.minimumSize()
if new_size == self._last_size: return
self._last_size = new_size
twid = wid.titleBarWidget()
if twid is not None:
theight = twid.sizeHint().height()
else:
theight = 0
new_size += QSize(0, theight)
wid.setMinimumSize(new_size)
except Exception:
pass
| bitmingw/FindYourSister | sloth/sloth/gui/utils.py | Python | bsd-2-clause | 994 |
from Foundation import *
from PyObjCTools.TestSupport import *
class TestNSXMLNodeOptions (TestCase):
def testConstants(self):
self.assertEqual(NSXMLNodeOptionsNone, 0)
self.assertEqual(NSXMLNodeIsCDATA, 1 << 0)
self.assertEqual(NSXMLNodeExpandEmptyElement, 1 << 1)
self.assertEqual(NSXMLNodeCompactEmptyElement, 1 << 2)
self.assertEqual(NSXMLNodeUseSingleQuotes, 1 << 3)
self.assertEqual(NSXMLNodeUseDoubleQuotes, 1 << 4)
self.assertEqual(NSXMLDocumentTidyHTML, 1 << 9)
self.assertEqual(NSXMLDocumentTidyXML, 1 << 10)
self.assertEqual(NSXMLDocumentValidate, 1 << 13)
self.assertEqual(NSXMLNodeLoadExternalEntitiesAlways, 1 << 14)
self.assertEqual(NSXMLNodeLoadExternalEntitiesSameOriginOnly, 1 << 15)
self.assertEqual(NSXMLNodeLoadExternalEntitiesNever, 1 << 19)
self.assertEqual(NSXMLDocumentXInclude, 1 << 16)
self.assertEqual(NSXMLNodePrettyPrint, 1 << 17)
self.assertEqual(NSXMLDocumentIncludeContentTypeDeclaration, 1 << 18)
self.assertEqual(NSXMLNodePreserveNamespaceOrder, 1 << 20)
self.assertEqual(NSXMLNodePreserveAttributeOrder, 1 << 21)
self.assertEqual(NSXMLNodePreserveEntities, 1 << 22)
self.assertEqual(NSXMLNodePreservePrefixes, 1 << 23)
self.assertEqual(NSXMLNodePreserveCDATA, 1 << 24)
self.assertEqual(NSXMLNodePreserveWhitespace, 1 << 25)
self.assertEqual(NSXMLNodePreserveDTD, 1 << 26)
self.assertEqual(NSXMLNodePreserveCharacterReferences, 1 << 27)
self.assertEqual(NSXMLNodePreserveEmptyElements, (
NSXMLNodeExpandEmptyElement | NSXMLNodeCompactEmptyElement))
self.assertEqual(NSXMLNodePreserveQuotes, (NSXMLNodeUseSingleQuotes | NSXMLNodeUseDoubleQuotes))
self.assertEqual(NSXMLNodePreserveAll & 0xFFFFFFFF, 0xFFFFFFFF & (
NSXMLNodePreserveNamespaceOrder |
NSXMLNodePreserveAttributeOrder |
NSXMLNodePreserveEntities |
NSXMLNodePreservePrefixes |
NSXMLNodePreserveCDATA |
NSXMLNodePreserveEmptyElements |
NSXMLNodePreserveQuotes |
NSXMLNodePreserveWhitespace |
NSXMLNodePreserveDTD |
NSXMLNodePreserveCharacterReferences |
0xFFF00000))
if __name__ == "__main__":
main()
| albertz/music-player | mac/pyobjc-framework-Cocoa/PyObjCTest/test_nsxmlnodeoptions.py | Python | bsd-2-clause | 2,358 |
#!/usr/bin/env python
import os.path as path
import sys
root=path.abspath(path.dirname(__file__))
sys.path.insert(0,root)
| stiletto/bnw | bnw_shell.py | Python | bsd-2-clause | 122 |
from __future__ import absolute_import
from sentry.models import Activity
from .mail import ActivityMailDebugView
class DebugUnassignedEmailView(ActivityMailDebugView):
def get_activity(self, request, event):
return {"type": Activity.UNASSIGNED, "user": request.user}
| mvaled/sentry | src/sentry/web/frontend/debug/debug_unassigned_email.py | Python | bsd-3-clause | 284 |
# Django settings for celery_http_gateway project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
CARROT_BACKEND = "amqp"
CELERY_RESULT_BACKEND = "database"
BROKER_HOST = "localhost"
BROKER_VHOST = "/"
BROKER_USER = "guest"
BROKER_PASSWORD = "guest"
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
# 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
DATABASE_ENGINE = 'sqlite3'
# path to database file if using sqlite3.
DATABASE_NAME = 'development.db'
# Not used with sqlite3.
DATABASE_USER = ''
# Not used with sqlite3.
DATABASE_PASSWORD = ''
# Set to empty string for localhost. Not used with sqlite3.
DATABASE_HOST = ''
# Set to empty string for default. Not used with sqlite3.
DATABASE_PORT = ''
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = ''
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = '#1i=edpk55k3781$z-p%b#dbn&n+-rtt83pgz2o9o)v8g7(owq'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
# 'django.template.loaders.eggs.load_template_source',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
ROOT_URLCONF = 'celery_http_gateway.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or
# "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'djcelery',
)
| frac/celery | examples/celery_http_gateway/settings.py | Python | bsd-3-clause | 2,931 |
import logging
logger = logging.getLogger(__name__)
logger.warning('DEPRECATED: pyface.grid, use pyface.ui.wx.grid instead.')
from pyface.ui.wx.grid.inverted_grid_model import *
| geggo/pyface | pyface/grid/inverted_grid_model.py | Python | bsd-3-clause | 180 |
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from string import Template
import optparse
import os
import sys
try:
grit_module_path = os.path.join(
os.path.dirname(__file__), '..', '..', '..', 'tools', 'grit')
sys.path.insert(0, grit_module_path)
from grit.format import data_pack as DataPack
except ImportError, e:
print 'ImportError: ', e
sys.exit(-1)
def is_ascii(s):
return all(ord(c) < 128 for c in s)
header_template = \
"""// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef MOJO_SERVICES_HTML_VIEWER_BLINK_RESOURCE_MAP_H_
#define MOJO_SERVICES_HTML_VIEWER_BLINK_RESOURCE_MAP_H_
#include <map>
namespace html_viewer {
class BlinkResourceMap {
public:
BlinkResourceMap();
const char* GetResource(int id, int* length);
private:
struct ResourceEntry {
const char* data;
int length;
ResourceEntry()
: data(nullptr)
, length(0) {
}
ResourceEntry(const char* data, int length)
: data(data)
, length(length) {
}
};
typedef std::map<int, ResourceEntry> ResourceMap;
ResourceMap resources_;
};
} // namespace html_viewer
#endif // MOJO_SERVICES_HTML_VIEWER_BLINK_RESOURCE_MAP_H_"""
cpp_template = \
"""// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "$header_file_name"
#include "base/macros.h"
namespace html_viewer {
$definitions
BlinkResourceMap::BlinkResourceMap()
{
$map_initializer
}
const char* BlinkResourceMap::GetResource(int id, int* length)
{
ResourceMap::iterator it = resources_.find(id);
if (it == resources_.end()) {
*length = 0;
return nullptr;
}
*length = it->second.length;
return it->second.data;
}
} // namespace html_viewer"""
def main():
parser = optparse.OptionParser(
usage='Usage: %prog --pak-file PAK_FILE --header HEADER --cpp CPP\n')
parser.add_option('-i', '--pak-file', action='store', dest='pak_file',
help='The .pak file to be extracted.')
parser.add_option('', '--header', action='store', dest='header_file',
help='Header file to be generated.')
parser.add_option('', '--cpp', action='store', dest='cpp_file',
help='C++ file to be generated.')
(options, _) = parser.parse_args()
if (not options.pak_file or not options.header_file or not options.cpp_file):
parser.print_help()
sys.exit(-1)
header_file = open(options.header_file, 'w+')
cpp_file = open(options.cpp_file, 'w+')
pak_contents = DataPack.ReadDataPack(options.pak_file)
resourceIds = []
header_contents = dict()
cpp_contents = dict()
definitions = []
for (resId, data) in pak_contents.resources.iteritems():
if not is_ascii(data):
continue
resourceIds.append(resId)
hex_values = ['0x{0:02x}'.format(ord(char)) for char in data]
f = lambda A, n=12: [A[i:i+n] for i in range(0, len(A), n)]
hex_values_string = ',\n '.join(', '.join(x) for x in f(hex_values))
cpp_definition = \
'const char kResource%s[%d] = {\n %s \n};' % \
(str(resId), len(hex_values), hex_values_string)
definitions.append(cpp_definition)
header_file_contents = Template(header_template).substitute(header_contents)
header_file.write(header_file_contents)
header_file.close()
map_initializer = []
for resId in resourceIds:
insert_statement = \
'resources_.insert(std::pair<int, ResourceEntry>(\n' \
' %s, ResourceEntry(kResource%s, arraysize(kResource%s))));'
map_initializer.append( \
insert_statement % (str(resId), str(resId), str(resId)))
cpp_contents['definitions']= '\n'.join(definitions)
cpp_contents['header_file_name'] = os.path.basename(options.header_file)
cpp_contents['map_initializer'] = '\n '.join(map_initializer)
cpp_file_contents = Template(cpp_template).substitute(cpp_contents)
cpp_file.write(cpp_file_contents)
cpp_file.close()
if __name__ == '__main__':
main()
| CTSRD-SOAAP/chromium-42.0.2311.135 | mojo/services/html_viewer/generate_blink_resource_map.py | Python | bsd-3-clause | 4,259 |
'''This allows running a bit of code on couchdb docs.
code should take a json python object, modify it and hand back to the code
Not quite that slick yet, need way to pass in code or make this a decorator
'''
import importlib
from harvester.collection_registry_client import Collection
from harvester.couchdb_init import get_couchdb
COUCHDB_VIEW = 'all_provider_docs/by_provider_name'
def run_on_couchdb_by_collection(func, collection_key=None):
'''If collection_key is none, trying to grab all of docs and modify
func is a function that takes a couchdb doc in and returns it modified.
(can take long time - not recommended)
Function should return new document or None if no changes made
'''
_couchdb = get_couchdb()
v = _couchdb.view(COUCHDB_VIEW, include_docs='true', key=collection_key) \
if collection_key else _couchdb.view(COUCHDB_VIEW,
include_docs='true')
doc_ids = []
n = 0
for r in v:
n += 1
doc_new = func(r.doc)
if doc_new and doc_new != doc:
_couchdb.save(doc_new)
doc_ids.append(r.doc['_id'])
if n % 100 == 0:
print '{} docs ran. Last doc:{}\n'.format(n, r.doc['_id'])
return doc_ids
def run_on_couchdb_doc(docid, func):
'''Run on a doc, by doc id'''
_couchdb = get_couchdb()
doc = _couchdb[docid]
mod_name, func_name = func.rsplit('.', 1)
fmod = importlib.import_module(mod_name)
ffunc = getattr(fmod, func_name)
doc_new = ffunc(doc)
if doc_new and doc_new != doc:
_couchdb.save(doc_new)
return True
return False
C_CACHE = {}
def update_collection_description(doc):
cjson = doc['originalRecord']['collection'][0]
# get collection description
if 'description' not in cjson:
if cjson['@id'] in C_CACHE:
c = C_CACHE[cjson['@id']]
else:
c = Collection(url_api=cjson['@id'])
C_CACHE[cjson['@id']] = c
description = c['description'] if c['description'] else c['name']
print('DOC: {} DESCRIP: {}'.format(
doc['_id'], c['description'].encode('utf8')))
doc['originalRecord']['collection'][0]['description'] = description
doc['sourceResource']['collection'][0]['description'] = description
return doc
def add_rights_and_type_to_collection(doc):
cjson = doc['originalRecord']['collection'][0]
# get collection description
if cjson['@id'] in C_CACHE:
c = C_CACHE[cjson['@id']]
else:
c = Collection(url_api=cjson['@id'])
C_CACHE[cjson['@id']] = c
doc['originalRecord']['collection'][0]['rights_status'] = c['rights_status']
doc['originalRecord']['collection'][0]['rights_statement'] = c['rights_statement']
doc['originalRecord']['collection'][0]['dcmi_type']=c['dcmi_type']
if 'collection' in doc['sourceResource']:
doc['sourceResource']['collection'][0]['rights_status'] = c['rights_status']
doc['sourceResource']['collection'][0]['rights_statement'] = c['rights_statement']
doc['sourceResource']['collection'][0]['dcmi_type'] = c['dcmi_type']
else:
doc['sourceResource']['collection'] = doc['originalRecord']['collection']
return doc
| ucldc/harvester | harvester/post_processing/run_transform_on_couchdb_docs.py | Python | bsd-3-clause | 3,271 |
# -*- coding: utf-8 -*-
import pytest
import six
from sqlalchemy_utils import Currency, i18n
@pytest.fixture
def set_get_locale():
i18n.get_locale = lambda: i18n.babel.Locale('en')
@pytest.mark.skipif('i18n.babel is None')
@pytest.mark.usefixtures('set_get_locale')
class TestCurrency(object):
def test_init(self):
assert Currency('USD') == Currency(Currency('USD'))
def test_hashability(self):
assert len(set([Currency('USD'), Currency('USD')])) == 1
def test_invalid_currency_code(self):
with pytest.raises(ValueError):
Currency('Unknown code')
def test_invalid_currency_code_type(self):
with pytest.raises(TypeError):
Currency(None)
@pytest.mark.parametrize(
('code', 'name'),
(
('USD', 'US Dollar'),
('EUR', 'Euro')
)
)
def test_name_property(self, code, name):
assert Currency(code).name == name
@pytest.mark.parametrize(
('code', 'symbol'),
(
('USD', u'$'),
('EUR', u'€')
)
)
def test_symbol_property(self, code, symbol):
assert Currency(code).symbol == symbol
def test_equality_operator(self):
assert Currency('USD') == 'USD'
assert 'USD' == Currency('USD')
assert Currency('USD') == Currency('USD')
def test_non_equality_operator(self):
assert Currency('USD') != 'EUR'
assert not (Currency('USD') != 'USD')
def test_unicode(self):
currency = Currency('USD')
assert six.text_type(currency) == u'USD'
def test_str(self):
currency = Currency('USD')
assert str(currency) == 'USD'
def test_representation(self):
currency = Currency('USD')
assert repr(currency) == "Currency('USD')"
| konstantinoskostis/sqlalchemy-utils | tests/primitives/test_currency.py | Python | bsd-3-clause | 1,823 |
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Various custom data types for use throughout the unexpected pass finder."""
from __future__ import print_function
import collections
import copy
import fnmatch
import logging
import six
FULL_PASS = 1
NEVER_PASS = 2
PARTIAL_PASS = 3
# Allow different unexpected pass finder implementations to register custom
# data types if necessary. These are set to the base versions at the end of the
# file.
Expectation = None
Result = None
BuildStats = None
TestExpectationMap = None
def SetExpectationImplementation(impl):
global Expectation
assert issubclass(impl, BaseExpectation)
Expectation = impl
def SetResultImplementation(impl):
global Result
assert issubclass(impl, BaseResult)
Result = impl
def SetBuildStatsImplementation(impl):
global BuildStats
assert issubclass(impl, BaseBuildStats)
BuildStats = impl
def SetTestExpectationMapImplementation(impl):
global TestExpectationMap
assert issubclass(impl, BaseTestExpectationMap)
TestExpectationMap = impl
class BaseExpectation(object):
"""Container for a test expectation.
Similar to typ's expectations_parser.Expectation class, but with unnecessary
data stripped out and made hashable.
The data contained in an Expectation is equivalent to a single line in an
expectation file.
"""
def __init__(self, test, tags, expected_results, bug=None):
self.test = test
self.tags = frozenset(tags)
self.bug = bug or ''
if isinstance(expected_results, str):
self.expected_results = frozenset([expected_results])
else:
self.expected_results = frozenset(expected_results)
# We're going to be making a lot of comparisons, and fnmatch is *much*
# slower (~40x from rough testing) than a straight comparison, so only use
# it if necessary.
if '*' in test:
self._comp = self._CompareWildcard
else:
self._comp = self._CompareNonWildcard
def __eq__(self, other):
return (isinstance(other, BaseExpectation) and self.test == other.test
and self.tags == other.tags
and self.expected_results == other.expected_results
and self.bug == other.bug)
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash((self.test, self.tags, self.expected_results, self.bug))
def _CompareWildcard(self, result_test_name):
return fnmatch.fnmatch(result_test_name, self.test)
def _CompareNonWildcard(self, result_test_name):
return result_test_name == self.test
def AppliesToResult(self, result):
"""Checks whether this expectation should have applied to |result|.
An expectation applies to a result if the test names match (including
wildcard expansion) and the expectation's tags are a subset of the result's
tags.
Args:
result: A Result instance to check against.
Returns:
True if |self| applies to |result|, otherwise False.
"""
assert isinstance(result, BaseResult)
return (self._comp(result.test) and self.tags <= result.tags)
def MaybeAppliesToTest(self, test_name):
"""Similar to AppliesToResult, but used to do initial filtering.
Args:
test_name: A string containing the name of a test.
Returns:
True if |self| could apply to a test named |test_name|, otherwise False.
"""
return self._comp(test_name)
class BaseResult(object):
"""Container for a test result.
Contains the minimal amount of data necessary to describe/identify a result
from ResultDB for the purposes of the unexpected pass finder.
"""
def __init__(self, test, tags, actual_result, step, build_id):
"""
Args:
test: A string containing the name of the test.
tags: An iterable containing the typ tags for the result.
actual_result: The actual result of the test as a string.
step: A string containing the name of the step on the builder.
build_id: A string containing the Buildbucket ID for the build this result
came from.
"""
self.test = test
self.tags = frozenset(tags)
self.actual_result = actual_result
self.step = step
self.build_id = build_id
def __eq__(self, other):
return (isinstance(other, BaseResult) and self.test == other.test
and self.tags == other.tags
and self.actual_result == other.actual_result
and self.step == other.step and self.build_id == other.build_id)
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(
(self.test, self.tags, self.actual_result, self.step, self.build_id))
class BaseBuildStats(object):
"""Container for keeping track of a builder's pass/fail stats."""
def __init__(self):
self.passed_builds = 0
self.total_builds = 0
self.failure_links = frozenset()
@property
def failed_builds(self):
return self.total_builds - self.passed_builds
@property
def did_fully_pass(self):
return self.passed_builds == self.total_builds
@property
def did_never_pass(self):
return self.failed_builds == self.total_builds
def AddPassedBuild(self):
self.passed_builds += 1
self.total_builds += 1
def AddFailedBuild(self, build_id):
self.total_builds += 1
build_link = BuildLinkFromBuildId(build_id)
self.failure_links = frozenset([build_link]) | self.failure_links
def GetStatsAsString(self):
return '(%d/%d passed)' % (self.passed_builds, self.total_builds)
def NeverNeededExpectation(self, expectation): # pylint:disable=unused-argument
"""Returns whether the results tallied in |self| never needed |expectation|.
Args:
expectation: An Expectation object that |stats| is located under.
Returns:
True if all the results tallied in |self| would have passed without
|expectation| being present. Otherwise, False.
"""
return self.did_fully_pass
def AlwaysNeededExpectation(self, expectation): # pylint:disable=unused-argument
"""Returns whether the results tallied in |self| always needed |expectation.
Args:
expectation: An Expectation object that |stats| is located under.
Returns:
True if all the results tallied in |self| would have failed without
|expectation| being present. Otherwise, False.
"""
return self.did_never_pass
def __eq__(self, other):
return (isinstance(other, BuildStats)
and self.passed_builds == other.passed_builds
and self.total_builds == other.total_builds
and self.failure_links == other.failure_links)
def __ne__(self, other):
return not self.__eq__(other)
def BuildLinkFromBuildId(build_id):
return 'http://ci.chromium.org/b/%s' % build_id
# These explicit overrides could likely be replaced by using regular dicts with
# type hinting in Python 3. Based on https://stackoverflow.com/a/2588648, this
# should cover all cases where the dict can be modified.
class BaseTypedMap(dict):
"""A base class for typed dictionaries.
Any child classes that override __setitem__ will have any modifications to the
dictionary go through the type checking in __setitem__.
"""
def __init__(self, *args, **kwargs): # pylint:disable=super-init-not-called
self.update(*args, **kwargs)
def update(self, *args, **kwargs):
if args:
assert len(args) == 1
other = dict(args[0])
for k, v in other.items():
self[k] = v
for k, v in kwargs.items():
self[k] = v
def setdefault(self, key, value=None):
if key not in self:
self[key] = value
return self[key]
def _value_type(self):
raise NotImplementedError()
def IterToValueType(self, value_type):
"""Recursively iterates over contents until |value_type| is found.
Used to get rid of nested loops, instead using a single loop that
automatically iterates through all the contents at a certain depth.
Args:
value_type: The type to recurse to and then iterate over. For example,
"BuilderStepMap" would result in iterating over the BuilderStepMap
values, meaning that the returned generator would create tuples in the
form (test_name, expectation, builder_map).
Returns:
A generator that yields tuples. The length and content of the tuples will
vary depending on |value_type|. For example, using "BuilderStepMap" would
result in tuples of the form (test_name, expectation, builder_map), while
"BuildStats" would result in (test_name, expectation, builder_name,
step_name, build_stats).
"""
if self._value_type() == value_type:
for k, v in self.items():
yield k, v
else:
for k, v in self.items():
for nested_value in v.IterToValueType(value_type):
yield (k, ) + nested_value
def Merge(self, other_map, reference_map=None):
"""Merges |other_map| into self.
Args:
other_map: A BaseTypedMap whose contents will be merged into self.
reference_map: A dict containing the information that was originally in
self. Used for ensuring that a single expectation/builder/step
combination is only ever updated once. If None, a copy of self will be
used.
"""
assert isinstance(other_map, self.__class__)
# We should only ever encounter a single updated BuildStats for an
# expectation/builder/step combination. Use the reference map to determine
# if a particular BuildStats has already been updated or not.
reference_map = reference_map or copy.deepcopy(self)
for key, value in other_map.items():
if key not in self:
self[key] = value
else:
if isinstance(value, dict):
self[key].Merge(value, reference_map.get(key, {}))
else:
assert isinstance(value, BuildStats)
# Ensure we haven't updated this BuildStats already. If the reference
# map doesn't have a corresponding BuildStats, then base_map shouldn't
# have initially either, and thus it would have been added before
# reaching this point. Otherwise, the two values must match, meaning
# that base_map's BuildStats hasn't been updated yet.
reference_stats = reference_map.get(key, None)
assert reference_stats is not None
assert reference_stats == self[key]
self[key] = value
class BaseTestExpectationMap(BaseTypedMap):
"""Typed map for string types -> ExpectationBuilderMap.
This results in a dict in the following format:
{
expectation_file1 (str): {
expectation1 (data_types.Expectation): {
builder_name1 (str): {
step_name1 (str): stats1 (data_types.BuildStats),
step_name2 (str): stats2 (data_types.BuildStats),
...
},
builder_name2 (str): { ... },
},
expectation2 (data_types.Expectation): { ... },
...
},
expectation_file2 (str): { ... },
...
}
"""
def __setitem__(self, key, value):
assert IsStringType(key)
assert isinstance(value, ExpectationBuilderMap)
super(BaseTestExpectationMap, self).__setitem__(key, value)
def _value_type(self):
return ExpectationBuilderMap
def IterBuilderStepMaps(self):
"""Iterates over all BuilderStepMaps contained in the map.
Returns:
A generator yielding tuples in the form (expectation_file (str),
expectation (Expectation), builder_map (BuilderStepMap))
"""
return self.IterToValueType(BuilderStepMap)
def AddResultList(self, builder, results, expectation_files=None):
"""Adds |results| to |self|.
Args:
builder: A string containing the builder |results| came from. Should be
prefixed with something to distinguish between identically named CI
and try builders.
results: A list of data_types.Result objects corresponding to the ResultDB
data queried for |builder|.
expectation_files: An iterable of expectation file names that these
results could possibly apply to. If None, then expectations from all
known expectation files will be used.
Returns:
A list of data_types.Result objects who did not have a matching
expectation in |self|.
"""
failure_results = set()
pass_results = set()
unmatched_results = []
for r in results:
if r.actual_result == 'Pass':
pass_results.add(r)
else:
failure_results.add(r)
# Remove any cases of failure -> pass from the passing set. If a test is
# flaky, we get both pass and failure results for it, so we need to remove
# the any cases of a pass result having a corresponding, earlier failure
# result.
modified_failing_retry_results = set()
for r in failure_results:
modified_failing_retry_results.add(
Result(r.test, r.tags, 'Pass', r.step, r.build_id))
pass_results -= modified_failing_retry_results
# Group identically named results together so we reduce the number of
# comparisons we have to make.
all_results = pass_results | failure_results
grouped_results = collections.defaultdict(list)
for r in all_results:
grouped_results[r.test].append(r)
matched_results = self._AddGroupedResults(grouped_results, builder,
expectation_files)
unmatched_results = list(all_results - matched_results)
return unmatched_results
def _AddGroupedResults(self, grouped_results, builder, expectation_files):
"""Adds all results in |grouped_results| to |self|.
Args:
grouped_results: A dict mapping test name (str) to a list of
data_types.Result objects for that test.
builder: A string containing the name of the builder |grouped_results|
came from.
expectation_files: An iterable of expectation file names that these
results could possibly apply to. If None, then expectations from all
known expectation files will be used.
Returns:
A set of data_types.Result objects that had at least one matching
expectation.
"""
matched_results = set()
for test_name, result_list in grouped_results.items():
for ef, expectation_map in self.items():
if expectation_files is not None and ef not in expectation_files:
continue
for expectation, builder_map in expectation_map.items():
if not expectation.MaybeAppliesToTest(test_name):
continue
for r in result_list:
if expectation.AppliesToResult(r):
matched_results.add(r)
step_map = builder_map.setdefault(builder, StepBuildStatsMap())
stats = step_map.setdefault(r.step, BuildStats())
self._AddSingleResult(r, stats)
return matched_results
def _AddSingleResult(self, result, stats):
"""Adds |result| to |self|.
Args:
result: A data_types.Result object to add.
stats: A data_types.BuildStats object to add the result to.
"""
if result.actual_result == 'Pass':
stats.AddPassedBuild()
else:
stats.AddFailedBuild(result.build_id)
def SplitByStaleness(self):
"""Separates stored data based on expectation staleness.
Returns:
Three TestExpectationMaps (stale_dict, semi_stale_dict, active_dict). All
three combined contain the information of |self|. |stale_dict| contains
entries for expectations that are no longer being helpful,
|semi_stale_dict| contains entries for expectations that might be
removable or modifiable, but have at least one failed test run.
|active_dict| contains entries for expectations that are preventing
failures on all builders they're active on, and thus shouldn't be removed.
"""
stale_dict = TestExpectationMap()
semi_stale_dict = TestExpectationMap()
active_dict = TestExpectationMap()
# This initially looks like a good target for using
# TestExpectationMap's iterators since there are many nested loops.
# However, we need to reset state in different loops, and the alternative of
# keeping all the state outside the loop and resetting under certain
# conditions ends up being less readable than just using nested loops.
for expectation_file, expectation_map in self.items():
for expectation, builder_map in expectation_map.items():
# A temporary map to hold data so we can later determine whether an
# expectation is stale, semi-stale, or active.
tmp_map = {
FULL_PASS: BuilderStepMap(),
NEVER_PASS: BuilderStepMap(),
PARTIAL_PASS: BuilderStepMap(),
}
split_stats_map = builder_map.SplitBuildStatsByPass(expectation)
for builder_name, (fully_passed, never_passed,
partially_passed) in split_stats_map.items():
if fully_passed:
tmp_map[FULL_PASS][builder_name] = fully_passed
if never_passed:
tmp_map[NEVER_PASS][builder_name] = never_passed
if partially_passed:
tmp_map[PARTIAL_PASS][builder_name] = partially_passed
def _CopyPassesIntoBuilderMap(builder_map, pass_types):
for pt in pass_types:
for builder, steps in tmp_map[pt].items():
builder_map.setdefault(builder, StepBuildStatsMap()).update(steps)
# Handle the case of a stale expectation.
if not (tmp_map[NEVER_PASS] or tmp_map[PARTIAL_PASS]):
builder_map = stale_dict.setdefault(
expectation_file,
ExpectationBuilderMap()).setdefault(expectation, BuilderStepMap())
_CopyPassesIntoBuilderMap(builder_map, [FULL_PASS])
# Handle the case of an active expectation.
elif not tmp_map[FULL_PASS]:
builder_map = active_dict.setdefault(
expectation_file,
ExpectationBuilderMap()).setdefault(expectation, BuilderStepMap())
_CopyPassesIntoBuilderMap(builder_map, [NEVER_PASS, PARTIAL_PASS])
# Handle the case of a semi-stale expectation.
else:
# TODO(crbug.com/998329): Sort by pass percentage so it's easier to
# find problematic builders without highlighting.
builder_map = semi_stale_dict.setdefault(
expectation_file,
ExpectationBuilderMap()).setdefault(expectation, BuilderStepMap())
_CopyPassesIntoBuilderMap(builder_map,
[FULL_PASS, PARTIAL_PASS, NEVER_PASS])
return stale_dict, semi_stale_dict, active_dict
def FilterOutUnusedExpectations(self):
"""Filters out any unused Expectations from stored data.
An Expectation is considered unused if its corresponding dictionary is
empty. If removing Expectations results in a top-level test key having an
empty dictionary, that test entry will also be removed.
Returns:
A dict from expectation file name (str) to set of unused expectations
(str) from that file.
"""
logging.info('Filtering out unused expectations')
unused = collections.defaultdict(list)
unused_count = 0
for (expectation_file, expectation,
builder_map) in self.IterBuilderStepMaps():
if not builder_map:
unused[expectation_file].append(expectation)
unused_count += 1
for expectation_file, expectations in unused.items():
for e in expectations:
del self[expectation_file][e]
logging.debug('Found %d unused expectations', unused_count)
empty_files = []
for expectation_file, expectation_map in self.items():
if not expectation_map:
empty_files.append(expectation_file)
for empty in empty_files:
del self[empty]
logging.debug('Found %d empty files: %s', len(empty_files), empty_files)
return unused
class ExpectationBuilderMap(BaseTypedMap):
"""Typed map for Expectation -> BuilderStepMap."""
def __setitem__(self, key, value):
assert isinstance(key, BaseExpectation)
assert isinstance(value, self._value_type())
super(ExpectationBuilderMap, self).__setitem__(key, value)
def _value_type(self):
return BuilderStepMap
class BuilderStepMap(BaseTypedMap):
"""Typed map for string types -> StepBuildStatsMap."""
def __setitem__(self, key, value):
assert IsStringType(key)
assert isinstance(value, self._value_type())
super(BuilderStepMap, self).__setitem__(key, value)
def _value_type(self):
return StepBuildStatsMap
def SplitBuildStatsByPass(self, expectation):
"""Splits the underlying BuildStats data by passing-ness.
Args:
expectation: The Expectation that this BuilderStepMap is located under.
Returns:
A dict mapping builder name to a tuple (fully_passed, never_passed,
partially_passed). Each *_passed is a StepBuildStatsMap containing data
for the steps that either fully passed on all builds, never passed on any
builds, or passed some of the time.
"""
retval = {}
for builder_name, step_map in self.items():
fully_passed = StepBuildStatsMap()
never_passed = StepBuildStatsMap()
partially_passed = StepBuildStatsMap()
for step_name, stats in step_map.items():
if stats.NeverNeededExpectation(expectation):
assert step_name not in fully_passed
fully_passed[step_name] = stats
elif stats.AlwaysNeededExpectation(expectation):
assert step_name not in never_passed
never_passed[step_name] = stats
else:
assert step_name not in partially_passed
partially_passed[step_name] = stats
retval[builder_name] = (fully_passed, never_passed, partially_passed)
return retval
def IterBuildStats(self):
"""Iterates over all BuildStats contained in the map.
Returns:
A generator yielding tuples in the form (builder_name (str), step_name
(str), build_stats (BuildStats)).
"""
return self.IterToValueType(BuildStats)
class StepBuildStatsMap(BaseTypedMap):
"""Typed map for string types -> BuildStats"""
def __setitem__(self, key, value):
assert IsStringType(key)
assert isinstance(value, self._value_type())
super(StepBuildStatsMap, self).__setitem__(key, value)
def _value_type(self):
return BuildStats
def IsStringType(s):
return isinstance(s, six.string_types)
Expectation = BaseExpectation
Result = BaseResult
BuildStats = BaseBuildStats
TestExpectationMap = BaseTestExpectationMap
| nwjs/chromium.src | testing/unexpected_passes_common/data_types.py | Python | bsd-3-clause | 22,630 |
"""
42. Storing files according to a custom storage system
``FileField`` and its variations can take a ``storage`` argument to specify how
and where files should be stored.
"""
import random
import tempfile
from django.db import models
from django.core.files.base import ContentFile
from django.core.files.storage import FileSystemStorage
temp_storage_location = tempfile.mkdtemp()
temp_storage = FileSystemStorage(location=temp_storage_location)
class Storage(models.Model):
def custom_upload_to(self, filename):
return 'foo'
def random_upload_to(self, filename):
# This returns a different result each time,
# to make sure it only gets called once.
return '%s/%s' % (random.randint(100, 999), filename)
normal = models.FileField(storage=temp_storage, upload_to='tests')
custom = models.FileField(storage=temp_storage, upload_to=custom_upload_to)
random = models.FileField(storage=temp_storage, upload_to=random_upload_to)
default = models.FileField(storage=temp_storage, upload_to='tests', default='tests/default.txt')
| skevy/django | tests/modeltests/files/models.py | Python | bsd-3-clause | 1,085 |
#!/usr/bin/env python
# Copyright 2021 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This is generated, do not edit. Update BuildConfigGenerator.groovy and
# 3ppFetch.template instead.
from __future__ import print_function
import argparse
import json
import os
_FILE_URL = 'https://dl.google.com/dl/android/maven2/com/google/firebase/firebase-messaging/21.0.1/firebase-messaging-21.0.1.aar'
_FILE_NAME = 'firebase-messaging-21.0.1.aar'
_FILE_VERSION = '21.0.1'
def do_latest():
print(_FILE_VERSION)
def get_download_url(version):
if _FILE_URL.endswith('.jar'):
ext = '.jar'
elif _FILE_URL.endswith('.aar'):
ext = '.aar'
else:
raise Exception('Unsupported extension for %s' % _FILE_URL)
partial_manifest = {
'url': [_FILE_URL],
'name': [_FILE_NAME],
'ext': ext,
}
print(json.dumps(partial_manifest))
def main():
ap = argparse.ArgumentParser()
sub = ap.add_subparsers()
latest = sub.add_parser("latest")
latest.set_defaults(func=lambda _opts: do_latest())
download = sub.add_parser("get_url")
download.set_defaults(
func=lambda _opts: get_download_url(os.environ['_3PP_VERSION']))
opts = ap.parse_args()
opts.func(opts)
if __name__ == '__main__':
main()
| nwjs/chromium.src | third_party/android_deps/libs/com_google_firebase_firebase_messaging/3pp/fetch.py | Python | bsd-3-clause | 1,389 |
from __future__ import absolute_import
import numpy.linalg as npla
from .numpy_wrapper import wrap_namespace, dot
from . import numpy_wrapper as anp
wrap_namespace(npla.__dict__, globals())
def atleast_2d_col(x):
# Promotes a 1D array into a column rather than a row.
return x if x.ndim > 1 else x[:,None]
# Some formulas are from
# "An extended collection of matrix derivative results
# for forward and reverse mode algorithmic differentiation"
# by Mike Giles
# https://people.maths.ox.ac.uk/gilesm/files/NA-08-01.pdf
inv.defgrad( lambda ans, x : lambda g : -dot(dot(ans.T, g), ans.T))
det.defgrad( lambda ans, x : lambda g : g * ans * inv(x).T)
slogdet.defgrad(lambda ans, x : lambda g : g[1] * inv(x).T)
solve.defgrad( lambda ans, a, b : lambda g : -dot(atleast_2d_col(solve(a.T, g)),
atleast_2d_col(ans).T))
solve.defgrad(lambda ans, a, b : lambda g : solve(a.T, g), argnum=1)
norm.defgrad( lambda ans, a : lambda g : dot(g, a/ans))
def make_grad_eigh(ans, x, UPLO='L'):
"""Gradient for eigenvalues and vectors of a symmetric matrix."""
N = x.shape[0]
w, v = ans # Eigenvalues, eigenvectors.
def eigh_grad(g):
wg, vg = g # Gradient w.r.t. eigenvalues, eigenvectors.
w_repeated = anp.repeat(w[:, anp.newaxis], N, 1)
off_diag = anp.ones((N, N)) - anp.eye(N)
F = off_diag / (w_repeated.T - w_repeated + anp.eye(N))
dx = dot(v * wg + dot(v, F * dot(v.T, vg)), v.T)
if UPLO == 'U': # Reflect to account for symmetry.
return anp.triu(dx) + anp.tril(dx, -1).T
else:
return anp.tril(dx) + anp.triu(dx, 1).T
return eigh_grad
eigh.defgrad(make_grad_eigh)
| t1m0thy/autograd | autograd/numpy/linalg.py | Python | mit | 1,757 |
# Copyright (c) 2012-2015 Netforce Co. Ltd.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
from netforce.model import Model, fields, get_model
class BarcodeIssueLine(Model):
_name = "barcode.issue.line"
_transient = True
_fields = {
"wizard_id": fields.Many2One("barcode.issue", "Wizard", required=True, on_delete="cascade"),
"product_id": fields.Many2One("product", "Product", required=True),
"qty": fields.Decimal("Qty", required=True),
"uom_id": fields.Many2One("uom", "UoM", required=True),
"qty2": fields.Decimal("Secondary Qty"),
"lot_id": fields.Many2One("stock.lot", "Lot / Serial Number"),
"container_from_id": fields.Many2One("stock.container", "From Container"),
"container_to_id": fields.Many2One("stock.container", "To Container"),
"location_from_id": fields.Many2One("stock.location", "From Location"),
"location_to_id": fields.Many2One("stock.location", "To Location"),
"related_id": fields.Reference([["sale.order", "Sales Order"], ["purchase.order", "Purchase Order"]], "Related To"),
"qty2": fields.Decimal("Qty2"),
"notes": fields.Text("Notes"),
}
BarcodeIssueLine.register()
| sidzan/netforce | netforce_stock/netforce_stock/models/barcode_issue_line.py | Python | mit | 2,232 |
from txaws.server.method import Method
from txaws.server.tests.fixtures import method
@method
class TestMethod(Method):
pass
| lud4ik/txAWS | txaws/server/tests/fixtures/importerror/amodule.py | Python | mit | 131 |
import clr
clr.AddReference('RevitAPI')
from Autodesk.Revit.DB import *
clr.AddReference("RevitServices")
import RevitServices
from RevitServices.Persistence import DocumentManager
from RevitServices.Transactions import TransactionManager
doc = DocumentManager.Instance.CurrentDBDocument
faminstances = UnwrapElement(IN[0])
booleans = []
TransactionManager.Instance.EnsureInTransaction(doc)
for item in faminstances:
try:
item.FlipFromToRoom()
booleans.append(True)
except:
booleans.append(False)
TransactionManager.Instance.TransactionTaskDone()
OUT = (faminstances,booleans) | andydandy74/ClockworkForDynamo | nodes/2.x/python/FamilyInstance.FlipFromToRoom.py | Python | mit | 612 |
import sys
from healthcareai.common.healthcareai_error import HealthcareAIError
def validate_pyodbc_is_loaded():
""" Simple check that alerts user if they are do not have pyodbc installed, which is not a requirement. """
if 'pyodbc' not in sys.modules:
raise HealthcareAIError('Using this function requires installation of pyodbc.')
def validate_sqlite3_is_loaded():
""" Simple check that alerts user if they are do not have sqlite installed, which is not a requirement. """
if 'sqlite3' not in sys.modules:
raise HealthcareAIError('Using this function requires installation of sqlite3.')
| HealthCatalystSLC/healthcareai-py | healthcareai/common/database_library_validators.py | Python | mit | 626 |
# 2014-12-18
# build by qianqians
# deletenonespacelstrip
def deleteNoneSpacelstrip(str):
while(str.lstrip('\n') is not str):str = str.lstrip('\n')
while(str.lstrip('\t') is not str):str = str.lstrip('\t')
while(str.lstrip('\0') is not str):str = str.lstrip('\0')
while(str.lstrip('\n') is not str):str = str.lstrip('\n')
while(str.lstrip('\t') is not str):str = str.lstrip('\t')
while(str.lstrip('\0') is not str):str = str.lstrip('\0')
while(str.lstrip('\n') is not str):str = str.lstrip('\n')
while(str.lstrip('\t') is not str):str = str.lstrip('\t')
while(str.lstrip('\0') is not str):str = str.lstrip('\0')
while(str.lstrip(' ') is not str):str = str.lstrip(' ')
while(str.lstrip(' ') is not str):str = str.lstrip(' ')
while(str.lstrip(' ') is not str):str = str.lstrip(' ')
return str | yinchunlong/abelkhan-1 | juggle/parser/deletenonespacelstrip.py | Python | mit | 843 |
'''OpenGL extension ARB.fragment_program
This module customises the behaviour of the
OpenGL.raw.GL.ARB.fragment_program to provide a more
Python-friendly API
Overview (from the spec)
Unextended OpenGL mandates a certain set of configurable per-
fragment computations defining texture application, texture
environment, color sum, and fog operations. Several extensions have
added further per-fragment computations to OpenGL. For example,
extensions have defined new texture environment capabilities
(ARB_texture_env_add, ARB_texture_env_combine, ARB_texture_env_dot3,
ARB_texture_env_crossbar), per-fragment depth comparisons
(ARB_depth_texture, ARB_shadow, ARB_shadow_ambient,
EXT_shadow_funcs), per-fragment lighting (EXT_fragment_lighting,
EXT_light_texture), and environment mapped bump mapping
(ATI_envmap_bumpmap).
Each such extension adds a small set of relatively inflexible per-
fragment computations.
This inflexibility is in contrast to the typical flexibility
provided by the underlying programmable floating point engines
(whether micro-coded fragment engines, DSPs, or CPUs) that are
traditionally used to implement OpenGL's texturing computations.
The purpose of this extension is to expose to the OpenGL application
writer a significant degree of per-fragment programmability for
computing fragment parameters.
For the purposes of discussing this extension, a fragment program is
a sequence of floating-point 4-component vector operations that
determines how a set of program parameters (not specific to an
individual fragment) and an input set of per-fragment parameters are
transformed to a set of per-fragment result parameters.
The per-fragment computations for standard OpenGL given a particular
set of texture and fog application modes (along with any state for
extensions defining per-fragment computations) is, in essence, a
fragment program. However, the sequence of operations is defined
implicitly by the current OpenGL state settings rather than defined
explicitly as a sequence of instructions.
This extension provides an explicit mechanism for defining fragment
program instruction sequences for application-defined fragment
programs. In order to define such fragment programs, this extension
defines a fragment programming model including a floating-point
4-component vector instruction set and a relatively large set of
floating-point 4-component registers.
The extension's fragment programming model is designed for efficient
hardware implementation and to support a wide variety of fragment
programs. By design, the entire set of existing fragment programs
defined by existing OpenGL per-fragment computation extensions can
be implemented using the extension's fragment programming model.
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/ARB/fragment_program.txt
'''
from OpenGL import platform, constants, constant, arrays
from OpenGL import extensions, wrapper
from OpenGL.GL import glget
import ctypes
from OpenGL.raw.GL.ARB.fragment_program import *
### END AUTOGENERATED SECTION
from OpenGL.GL import glget
glget.addGLGetConstant( GL_FRAGMENT_PROGRAM_ARB, (1,) ) | D4wN/brickv | src/build_data/windows/OpenGL/GL/ARB/fragment_program.py | Python | gpl-2.0 | 3,247 |
import base64
import json
from pcs_test.tools.command_env.mock_node_communicator import (
place_multinode_call,
)
class FilesShortcuts:
def __init__(self, calls):
self.__calls = calls
def put_files(
self,
node_labels=None,
pcmk_authkey=None,
corosync_authkey=None,
corosync_conf=None,
pcs_disaster_recovery_conf=None,
pcs_settings_conf=None,
communication_list=None,
name="http.files.put_files",
):
# pylint: disable=too-many-arguments
"""
Create a call for the files distribution to the nodes.
node_labels list -- create success responses from these nodes
pcmk_authkey bytes -- content of pacemaker authkey file
corosync_authkey bytes -- content of corosync authkey file
corosync_conf string -- content of corosync.conf
pcs_disaster_recovery_conf string -- content of pcs DR config
pcs_settings_conf string -- content of pcs_settings.conf
communication_list list -- create custom responses
name string -- the key of this call
"""
input_data = {}
output_data = {}
written_output_dict = dict(
code="written",
message="",
)
if pcmk_authkey:
file_id = "pacemaker_remote authkey"
input_data[file_id] = dict(
data=base64.b64encode(pcmk_authkey).decode("utf-8"),
type="pcmk_remote_authkey",
rewrite_existing=True,
)
output_data[file_id] = written_output_dict
if corosync_authkey:
file_id = "corosync authkey"
input_data[file_id] = dict(
data=base64.b64encode(corosync_authkey).decode("utf-8"),
type="corosync_authkey",
rewrite_existing=True,
)
output_data[file_id] = written_output_dict
if corosync_conf:
file_id = "corosync.conf"
input_data[file_id] = dict(
data=corosync_conf,
type="corosync_conf",
)
output_data[file_id] = written_output_dict
if pcs_disaster_recovery_conf:
file_id = "disaster-recovery config"
input_data[file_id] = dict(
data=base64.b64encode(pcs_disaster_recovery_conf).decode(
"utf-8"
),
type="pcs_disaster_recovery_conf",
rewrite_existing=True,
)
output_data[file_id] = written_output_dict
if pcs_settings_conf:
file_id = "pcs_settings.conf"
input_data[file_id] = dict(
data=pcs_settings_conf,
type="pcs_settings_conf",
rewrite_existing=True,
)
output_data[file_id] = written_output_dict
place_multinode_call(
self.__calls,
name,
node_labels,
communication_list,
action="remote/put_file",
param_list=[("data_json", json.dumps(input_data))],
output=json.dumps(dict(files=output_data)),
)
def remove_files(
self,
node_labels=None,
pcsd_settings=False,
pcs_disaster_recovery_conf=False,
communication_list=None,
name="http.files.remove_files",
):
"""
Create a call for removing the files on the nodes.
node_labels list -- create success responses from these nodes
pcsd_settings bool -- if True, remove file pcsd_settings
pcs_disaster_recovery_conf bool -- if True, remove pcs DR config
communication_list list -- create custom responses
name string -- the key of this call
"""
input_data = {}
output_data = {}
if pcsd_settings:
file_id = "pcsd settings"
input_data[file_id] = dict(type="pcsd_settings")
output_data[file_id] = dict(
code="deleted",
message="",
)
if pcs_disaster_recovery_conf:
file_id = "pcs disaster-recovery config"
input_data[file_id] = dict(type="pcs_disaster_recovery_conf")
output_data[file_id] = dict(
code="deleted",
message="",
)
place_multinode_call(
self.__calls,
name,
node_labels,
communication_list,
action="remote/remove_file",
param_list=[("data_json", json.dumps(input_data))],
output=json.dumps(dict(files=output_data)),
)
| feist/pcs | pcs_test/tools/command_env/config_http_files.py | Python | gpl-2.0 | 4,688 |
# orm/exc.py
# Copyright (C) 2005-2020 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""SQLAlchemy ORM exceptions."""
from .. import exc as sa_exc
from .. import util
NO_STATE = (AttributeError, KeyError)
"""Exception types that may be raised by instrumentation implementations."""
class StaleDataError(sa_exc.SQLAlchemyError):
"""An operation encountered database state that is unaccounted for.
Conditions which cause this to happen include:
* A flush may have attempted to update or delete rows
and an unexpected number of rows were matched during
the UPDATE or DELETE statement. Note that when
version_id_col is used, rows in UPDATE or DELETE statements
are also matched against the current known version
identifier.
* A mapped object with version_id_col was refreshed,
and the version number coming back from the database does
not match that of the object itself.
* A object is detached from its parent object, however
the object was previously attached to a different parent
identity which was garbage collected, and a decision
cannot be made if the new parent was really the most
recent "parent".
"""
ConcurrentModificationError = StaleDataError
class FlushError(sa_exc.SQLAlchemyError):
"""A invalid condition was detected during flush()."""
class UnmappedError(sa_exc.InvalidRequestError):
"""Base for exceptions that involve expected mappings not present."""
class ObjectDereferencedError(sa_exc.SQLAlchemyError):
"""An operation cannot complete due to an object being garbage
collected.
"""
class DetachedInstanceError(sa_exc.SQLAlchemyError):
"""An attempt to access unloaded attributes on a
mapped instance that is detached."""
code = "bhk3"
class UnmappedInstanceError(UnmappedError):
"""An mapping operation was requested for an unknown instance."""
@util.dependencies("sqlalchemy.orm.base")
def __init__(self, base, obj, msg=None):
if not msg:
try:
base.class_mapper(type(obj))
name = _safe_cls_name(type(obj))
msg = (
"Class %r is mapped, but this instance lacks "
"instrumentation. This occurs when the instance "
"is created before sqlalchemy.orm.mapper(%s) "
"was called." % (name, name)
)
except UnmappedClassError:
msg = _default_unmapped(type(obj))
if isinstance(obj, type):
msg += (
"; was a class (%s) supplied where an instance was "
"required?" % _safe_cls_name(obj)
)
UnmappedError.__init__(self, msg)
def __reduce__(self):
return self.__class__, (None, self.args[0])
class UnmappedClassError(UnmappedError):
"""An mapping operation was requested for an unknown class."""
def __init__(self, cls, msg=None):
if not msg:
msg = _default_unmapped(cls)
UnmappedError.__init__(self, msg)
def __reduce__(self):
return self.__class__, (None, self.args[0])
class ObjectDeletedError(sa_exc.InvalidRequestError):
"""A refresh operation failed to retrieve the database
row corresponding to an object's known primary key identity.
A refresh operation proceeds when an expired attribute is
accessed on an object, or when :meth:`_query.Query.get` is
used to retrieve an object which is, upon retrieval, detected
as expired. A SELECT is emitted for the target row
based on primary key; if no row is returned, this
exception is raised.
The true meaning of this exception is simply that
no row exists for the primary key identifier associated
with a persistent object. The row may have been
deleted, or in some cases the primary key updated
to a new value, outside of the ORM's management of the target
object.
"""
@util.dependencies("sqlalchemy.orm.base")
def __init__(self, base, state, msg=None):
if not msg:
msg = (
"Instance '%s' has been deleted, or its "
"row is otherwise not present." % base.state_str(state)
)
sa_exc.InvalidRequestError.__init__(self, msg)
def __reduce__(self):
return self.__class__, (None, self.args[0])
class UnmappedColumnError(sa_exc.InvalidRequestError):
"""Mapping operation was requested on an unknown column."""
class NoResultFound(sa_exc.InvalidRequestError):
"""A database result was required but none was found."""
class MultipleResultsFound(sa_exc.InvalidRequestError):
"""A single database result was required but more than one were found."""
class LoaderStrategyException(sa_exc.InvalidRequestError):
"""A loader strategy for an attribute does not exist."""
def __init__(
self,
applied_to_property_type,
requesting_property,
applies_to,
actual_strategy_type,
strategy_key,
):
if actual_strategy_type is None:
sa_exc.InvalidRequestError.__init__(
self,
"Can't find strategy %s for %s"
% (strategy_key, requesting_property),
)
else:
sa_exc.InvalidRequestError.__init__(
self,
'Can\'t apply "%s" strategy to property "%s", '
'which is a "%s"; this loader strategy is intended '
'to be used with a "%s".'
% (
util.clsname_as_plain_name(actual_strategy_type),
requesting_property,
util.clsname_as_plain_name(applied_to_property_type),
util.clsname_as_plain_name(applies_to),
),
)
def _safe_cls_name(cls):
try:
cls_name = ".".join((cls.__module__, cls.__name__))
except AttributeError:
cls_name = getattr(cls, "__name__", None)
if cls_name is None:
cls_name = repr(cls)
return cls_name
@util.dependencies("sqlalchemy.orm.base")
def _default_unmapped(base, cls):
try:
mappers = base.manager_of_class(cls).mappers
except NO_STATE:
mappers = {}
except TypeError:
mappers = {}
name = _safe_cls_name(cls)
if not mappers:
return "Class '%s' is not mapped" % name
| gltn/stdm | stdm/third_party/sqlalchemy/orm/exc.py | Python | gpl-2.0 | 6,616 |
# TODO: this must be a stub!
| vg/netsukuku | pyntk/ntk/sim/network/route.py | Python | gpl-2.0 | 29 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
script.skin.helper.service
Helper service and scripts for Kodi skins
mainmodule.py
All script methods provided by the addon
'''
import xbmc
import xbmcvfs
import xbmcgui
import xbmcaddon
from skinsettings import SkinSettings
from simplecache import SimpleCache
from utils import log_msg, KODI_VERSION
from utils import log_exception, get_current_content_type, ADDON_ID, recursive_delete_dir
from dialogselect import DialogSelect
from xml.dom.minidom import parse
from metadatautils import KodiDb, process_method_on_list
import urlparse
import sys
class MainModule:
'''mainmodule provides the script methods for the skinhelper addon'''
def __init__(self):
'''Initialization and main code run'''
self.win = xbmcgui.Window(10000)
self.addon = xbmcaddon.Addon(ADDON_ID)
self.kodidb = KodiDb()
self.cache = SimpleCache()
self.params = self.get_params()
log_msg("MainModule called with parameters: %s" % self.params)
action = self.params.get("action", "")
# launch module for action provided by this script
try:
getattr(self, action)()
except AttributeError:
log_exception(__name__, "No such action: %s" % action)
except Exception as exc:
log_exception(__name__, exc)
finally:
xbmc.executebuiltin("dialog.Close(busydialog)")
# do cleanup
self.close()
def close(self):
'''Cleanup Kodi Cpython instances on exit'''
self.cache.close()
del self.win
del self.addon
del self.kodidb
log_msg("MainModule exited")
@classmethod
def get_params(self):
'''extract the params from the called script path'''
params = {}
for arg in sys.argv[1:]:
paramname = arg.split('=')[0]
paramvalue = arg.replace(paramname + "=", "")
paramname = paramname.lower()
if paramname == "action":
paramvalue = paramvalue.lower()
params[paramname] = paramvalue
return params
def deprecated_method(self, newaddon):
'''
used when one of the deprecated methods is called
print warning in log and call the external script with the same parameters
'''
action = self.params.get("action")
log_msg("Deprecated method: %s. Please call %s directly" % (action, newaddon), xbmc.LOGWARNING)
paramstring = ""
for key, value in self.params.iteritems():
paramstring += ",%s=%s" % (key, value)
if xbmc.getCondVisibility("System.HasAddon(%s)" % newaddon):
xbmc.executebuiltin("RunAddon(%s%s)" % (newaddon, paramstring))
else:
# trigger install of the addon
if KODI_VERSION > 16:
xbmc.executebuiltin("InstallAddon(%s)" % newaddon)
else:
xbmc.executebuiltin("RunPlugin(plugin://%s)" % newaddon)
@staticmethod
def musicsearch():
'''helper to go directly to music search dialog'''
xbmc.executebuiltin("ActivateWindow(Music)")
xbmc.executebuiltin("SendClick(8)")
def setview(self):
'''sets the selected viewmode for the container'''
xbmc.executebuiltin("ActivateWindow(busydialog)")
content_type = get_current_content_type()
if not content_type:
content_type = "files"
current_view = xbmc.getInfoLabel("Container.Viewmode").decode("utf-8")
view_id, view_label = self.selectview(content_type, current_view)
current_forced_view = xbmc.getInfoLabel("Skin.String(SkinHelper.ForcedViews.%s)" % content_type)
if view_id is not None:
# also store forced view
if (content_type and current_forced_view and current_forced_view != "None" and
xbmc.getCondVisibility("Skin.HasSetting(SkinHelper.ForcedViews.Enabled)")):
xbmc.executebuiltin("Skin.SetString(SkinHelper.ForcedViews.%s,%s)" % (content_type, view_id))
xbmc.executebuiltin("Skin.SetString(SkinHelper.ForcedViews.%s.label,%s)" % (content_type, view_label))
self.win.setProperty("SkinHelper.ForcedView", view_id)
if not xbmc.getCondVisibility("Control.HasFocus(%s)" % current_forced_view):
xbmc.sleep(100)
xbmc.executebuiltin("Container.SetViewMode(%s)" % view_id)
xbmc.executebuiltin("SetFocus(%s)" % view_id)
else:
self.win.clearProperty("SkinHelper.ForcedView")
# set view
xbmc.executebuiltin("Container.SetViewMode(%s)" % view_id)
def selectview(self, content_type="other", current_view=None, display_none=False):
'''reads skinfile with all views to present a dialog to choose from'''
cur_view_select_id = None
label = ""
all_views = []
if display_none:
listitem = xbmcgui.ListItem(label="None")
listitem.setProperty("id", "None")
all_views.append(listitem)
# read the special skin views file
views_file = xbmc.translatePath('special://skin/extras/views.xml').decode("utf-8")
if xbmcvfs.exists(views_file):
doc = parse(views_file)
listing = doc.documentElement.getElementsByTagName('view')
itemcount = 0
for view in listing:
label = xbmc.getLocalizedString(int(view.attributes['languageid'].nodeValue))
viewid = view.attributes['value'].nodeValue
mediatypes = view.attributes['type'].nodeValue.lower().split(",")
if label.lower() == current_view.lower() or viewid == current_view:
cur_view_select_id = itemcount
if display_none:
cur_view_select_id += 1
if (("all" in mediatypes or content_type.lower() in mediatypes) and
(not "!" + content_type.lower() in mediatypes) and not
xbmc.getCondVisibility("Skin.HasSetting(SkinHelper.view.Disabled.%s)" % viewid)):
image = "special://skin/extras/viewthumbs/%s.jpg" % viewid
listitem = xbmcgui.ListItem(label=label, iconImage=image)
listitem.setProperty("viewid", viewid)
listitem.setProperty("icon", image)
all_views.append(listitem)
itemcount += 1
dialog = DialogSelect("DialogSelect.xml", "", listing=all_views,
windowtitle=self.addon.getLocalizedString(32012), richlayout=True)
dialog.autofocus_id = cur_view_select_id
dialog.doModal()
result = dialog.result
del dialog
if result:
viewid = result.getProperty("viewid")
label = result.getLabel().decode("utf-8")
return (viewid, label)
else:
return (None, None)
# pylint: disable-msg=too-many-local-variables
def enableviews(self):
'''show select dialog to enable/disable views'''
all_views = []
views_file = xbmc.translatePath('special://skin/extras/views.xml').decode("utf-8")
richlayout = self.params.get("richlayout", "") == "true"
if xbmcvfs.exists(views_file):
doc = parse(views_file)
listing = doc.documentElement.getElementsByTagName('view')
for view in listing:
view_id = view.attributes['value'].nodeValue
label = xbmc.getLocalizedString(int(view.attributes['languageid'].nodeValue))
desc = label + " (" + str(view_id) + ")"
image = "special://skin/extras/viewthumbs/%s.jpg" % view_id
listitem = xbmcgui.ListItem(label=label, label2=desc, iconImage=image)
listitem.setProperty("viewid", view_id)
if not xbmc.getCondVisibility("Skin.HasSetting(SkinHelper.view.Disabled.%s)" % view_id):
listitem.select(selected=True)
excludefromdisable = False
try:
excludefromdisable = view.attributes['excludefromdisable'].nodeValue == "true"
except Exception:
pass
if not excludefromdisable:
all_views.append(listitem)
dialog = DialogSelect(
"DialogSelect.xml",
"",
listing=all_views,
windowtitle=self.addon.getLocalizedString(32013),
multiselect=True, richlayout=richlayout)
dialog.doModal()
result = dialog.result
del dialog
if result:
for item in result:
view_id = item.getProperty("viewid")
if item.isSelected():
# view is enabled
xbmc.executebuiltin("Skin.Reset(SkinHelper.view.Disabled.%s)" % view_id)
else:
# view is disabled
xbmc.executebuiltin("Skin.SetBool(SkinHelper.view.Disabled.%s)" % view_id)
# pylint: enable-msg=too-many-local-variables
def setforcedview(self):
'''helper that sets a forced view for a specific content type'''
content_type = self.params.get("contenttype")
if content_type:
current_view = xbmc.getInfoLabel("Skin.String(SkinHelper.ForcedViews.%s)" % content_type)
if not current_view:
current_view = "0"
view_id, view_label = self.selectview(content_type, current_view, True)
if view_id or view_label:
xbmc.executebuiltin("Skin.SetString(SkinHelper.ForcedViews.%s,%s)" % (content_type, view_id))
xbmc.executebuiltin("Skin.SetString(SkinHelper.ForcedViews.%s.label,%s)" % (content_type, view_label))
@staticmethod
def get_youtube_listing(searchquery):
'''get items from youtube plugin by query'''
lib_path = u"plugin://plugin.video.youtube/kodion/search/query/?q=%s" % searchquery
return KodiDb().files(lib_path)
def searchyoutube(self):
'''helper to search youtube for the given title'''
xbmc.executebuiltin("ActivateWindow(busydialog)")
title = self.params.get("title", "")
window_header = self.params.get("header", "")
results = []
for media in self.get_youtube_listing(title):
if not media["filetype"] == "directory":
label = media["label"]
label2 = media["plot"]
image = ""
if media.get('art'):
if media['art'].get('thumb'):
image = (media['art']['thumb'])
listitem = xbmcgui.ListItem(label=label, label2=label2, iconImage=image)
listitem.setProperty("path", media["file"])
results.append(listitem)
# finished lookup - display listing with results
xbmc.executebuiltin("dialog.Close(busydialog)")
dialog = DialogSelect("DialogSelect.xml", "", listing=results, windowtitle=window_header,
multiselect=False, richlayout=True)
dialog.doModal()
result = dialog.result
del dialog
if result:
if xbmc.getCondVisibility(
"Window.IsActive(script-skin_helper_service-CustomInfo.xml) | "
"Window.IsActive(movieinformation)"):
xbmc.executebuiltin("Dialog.Close(movieinformation)")
xbmc.executebuiltin("Dialog.Close(script-skin_helper_service-CustomInfo.xml)")
xbmc.sleep(1000)
xbmc.executebuiltin('PlayMedia("%s")' % result.getProperty("path"))
del result
def getcastmedia(self):
'''helper to show a dialog with all media for a specific actor'''
xbmc.executebuiltin("ActivateWindow(busydialog)")
name = self.params.get("name", "")
window_header = self.params.get("name", "")
results = []
items = self.kodidb.castmedia(name)
items = process_method_on_list(self.kodidb.prepare_listitem, items)
for item in items:
if item["file"].startswith("videodb://"):
item["file"] = "ActivateWindow(Videos,%s,return)" % item["file"]
else:
item["file"] = 'PlayMedia("%s")' % item["file"]
results.append(self.kodidb.create_listitem(item, False))
# finished lookup - display listing with results
xbmc.executebuiltin("dialog.Close(busydialog)")
dialog = DialogSelect("DialogSelect.xml", "", listing=results, windowtitle=window_header, richlayout=True)
dialog.doModal()
result = dialog.result
del dialog
if result:
while xbmc.getCondVisibility("System.HasModalDialog"):
xbmc.executebuiltin("Action(Back)")
xbmc.sleep(300)
xbmc.executebuiltin(result.getfilename())
del result
def setfocus(self):
'''helper to set focus on a list or control'''
control = self.params.get("control")
fallback = self.params.get("fallback")
position = self.params.get("position", "0")
relativeposition = self.params.get("relativeposition")
if relativeposition:
position = int(relativeposition) - 1
count = 0
if control:
while not xbmc.getCondVisibility("Control.HasFocus(%s)" % control):
if xbmc.getCondVisibility("Window.IsActive(busydialog)"):
xbmc.sleep(150)
continue
elif count == 20 or (xbmc.getCondVisibility(
"!Control.IsVisible(%s) | "
"!IntegerGreaterThan(Container(%s).NumItems,0)" % (control, control))):
if fallback:
xbmc.executebuiltin("Control.SetFocus(%s)" % fallback)
break
else:
xbmc.executebuiltin("Control.SetFocus(%s,%s)" % (control, position))
xbmc.sleep(50)
count += 1
def setwidgetcontainer(self):
'''helper that reports the current selected widget container/control'''
controls = self.params.get("controls", "").split("-")
if controls:
xbmc.sleep(50)
for i in range(10):
for control in controls:
if xbmc.getCondVisibility("Control.IsVisible(%s) + IntegerGreaterThan(Container(%s).NumItems,0)"
% (control, control)):
self.win.setProperty("SkinHelper.WidgetContainer", control)
return
xbmc.sleep(50)
self.win.clearProperty("SkinHelper.WidgetContainer")
def saveskinimage(self):
'''let the user select an image and save it to addon_data for easy backup'''
skinstring = self.params.get("skinstring", "")
allow_multi = self.params.get("multi", "") == "true"
header = self.params.get("header", "")
value = SkinSettings().save_skin_image(skinstring, allow_multi, header)
if value:
xbmc.executebuiltin("Skin.SetString(%s,%s)" % (skinstring.encode("utf-8"), value.encode("utf-8")))
@staticmethod
def checkskinsettings():
'''performs check of all default skin settings and labels'''
SkinSettings().correct_skin_settings()
def setskinsetting(self):
'''allows the user to set a skin setting with a select dialog'''
setting = self.params.get("setting", "")
org_id = self.params.get("id", "")
if "$" in org_id:
org_id = xbmc.getInfoLabel(org_id).decode("utf-8")
header = self.params.get("header", "")
SkinSettings().set_skin_setting(setting=setting, window_header=header, original_id=org_id)
def setskinconstant(self):
'''allows the user to set a skin constant with a select dialog'''
setting = self.params.get("setting", "")
value = self.params.get("value", "")
header = self.params.get("header", "")
SkinSettings().set_skin_constant(setting, header, value)
def setskinconstants(self):
'''allows the skinner to set multiple skin constants'''
settings = self.params.get("settings", "").split("|")
values = self.params.get("values", "").split("|")
SkinSettings().set_skin_constants(settings, values)
def setskinshortcutsproperty(self):
'''allows the user to make a setting for skinshortcuts using the special skinsettings dialogs'''
setting = self.params.get("setting", "")
prop = self.params.get("property", "")
header = self.params.get("header", "")
SkinSettings().set_skinshortcuts_property(setting, header, prop)
def togglekodisetting(self):
'''toggle kodi setting'''
settingname = self.params.get("setting", "")
cur_value = xbmc.getCondVisibility("system.getbool(%s)" % settingname)
if cur_value:
new_value = "false"
else:
new_value = "true"
xbmc.executeJSONRPC(
'{"jsonrpc":"2.0", "id":1, "method":"Settings.SetSettingValue","params":{"setting":"%s","value":%s}}' %
(settingname, new_value))
def setkodisetting(self):
'''set kodi setting'''
settingname = self.params.get("setting", "")
value = self.params.get("value", "")
is_int = False
try:
valueint = int(value)
is_int = True
del valueint
except Exception:
pass
if value.lower() == "true":
value = 'true'
elif value.lower() == "false":
value = 'false'
elif is_int:
value = '"%s"' % value
xbmc.executeJSONRPC('{"jsonrpc":"2.0", "id":1, "method":"Settings.SetSettingValue",\
"params":{"setting":"%s","value":%s}}' % (settingname, value))
def playtrailer(self):
'''auto play windowed trailer inside video listing'''
if not xbmc.getCondVisibility("Player.HasMedia | Container.Scrolling | Container.OnNext | "
"Container.OnPrevious | !IsEmpty(Window(Home).Property(traileractionbusy))"):
self.win.setProperty("traileractionbusy", "traileractionbusy")
widget_container = self.params.get("widgetcontainer", "")
trailer_mode = self.params.get("mode", "").replace("auto_", "")
allow_youtube = self.params.get("youtube", "") == "true"
if not trailer_mode:
trailer_mode = "windowed"
if widget_container:
widget_container_prefix = "Container(%s)." % widget_container
else:
widget_container_prefix = ""
li_title = xbmc.getInfoLabel("%sListItem.Title" % widget_container_prefix).decode('utf-8')
li_trailer = xbmc.getInfoLabel("%sListItem.Trailer" % widget_container_prefix).decode('utf-8')
if not li_trailer and allow_youtube:
youtube_result = self.get_youtube_listing("%s Trailer" % li_title)
if youtube_result:
li_trailer = youtube_result[0].get("file")
# always wait a bit to prevent trailer start playing when we're scrolling the list
xbmc.Monitor().waitForAbort(3)
if li_trailer and (li_title == xbmc.getInfoLabel("%sListItem.Title"
% widget_container_prefix).decode('utf-8')):
if trailer_mode == "fullscreen" and li_trailer:
xbmc.executebuiltin('PlayMedia("%s")' % li_trailer)
else:
xbmc.executebuiltin('PlayMedia("%s",1)' % li_trailer)
self.win.setProperty("TrailerPlaying", trailer_mode)
self.win.clearProperty("traileractionbusy")
def colorpicker(self):
'''legacy'''
self.deprecated_method("script.skin.helper.colorpicker")
def backup(self):
'''legacy'''
self.deprecated_method("script.skin.helper.skinbackup")
def restore(self):
'''legacy'''
self.deprecated_method("script.skin.helper.skinbackup")
def reset(self):
'''legacy'''
self.deprecated_method("script.skin.helper.skinbackup")
def colorthemes(self):
'''legacy'''
self.deprecated_method("script.skin.helper.skinbackup")
def createcolortheme(self):
'''legacy'''
self.deprecated_method("script.skin.helper.skinbackup")
def restorecolortheme(self):
'''legacy'''
self.deprecated_method("script.skin.helper.skinbackup")
def conditionalbackgrounds(self):
'''legacy'''
self.deprecated_method("script.skin.helper.backgrounds")
def splashscreen(self):
'''helper to show a user defined splashscreen in the skin'''
import time
splashfile = self.params.get("file", "")
duration = int(self.params.get("duration", 5))
if (splashfile.lower().endswith("jpg") or splashfile.lower().endswith("gif") or
splashfile.lower().endswith("png") or splashfile.lower().endswith("tiff")):
# this is an image file
self.win.setProperty("SkinHelper.SplashScreen", splashfile)
# for images we just wait for X seconds to close the splash again
start_time = time.time()
while (time.time() - start_time) <= duration:
xbmc.sleep(500)
else:
# for video or audio we have to wait for the player to finish...
xbmc.Player().play(splashfile, windowed=True)
xbmc.sleep(500)
while xbmc.getCondVisibility("Player.HasMedia"):
xbmc.sleep(150)
# replace startup window with home
startupwindow = xbmc.getInfoLabel("System.StartupWindow")
xbmc.executebuiltin("ReplaceWindow(%s)" % startupwindow)
autostart_playlist = xbmc.getInfoLabel("$ESCINFO[Skin.String(autostart_playlist)]")
if autostart_playlist:
xbmc.executebuiltin("PlayMedia(%s)" % autostart_playlist)
def videosearch(self):
'''show the special search dialog'''
xbmc.executebuiltin("ActivateWindow(busydialog)")
from resources.lib.searchdialog import SearchDialog
search_dialog = SearchDialog("script-skin_helper_service-CustomSearch.xml",
self.addon.getAddonInfo('path').decode("utf-8"), "Default", "1080i")
search_dialog.doModal()
del search_dialog
def showinfo(self):
'''shows our special videoinfo dialog'''
dbid = self.params.get("dbid", "")
dbtype = self.params.get("dbtype", "")
from infodialog import show_infodialog
show_infodialog(dbid, dbtype)
def deletedir(self):
'''helper to delete a directory, input can be normal filesystem path or vfs'''
del_path = self.params.get("path")
if del_path:
ret = xbmcgui.Dialog().yesno(heading=xbmc.getLocalizedString(122),
line1=u"%s[CR]%s" % (xbmc.getLocalizedString(125), del_path))
if ret:
success = recursive_delete_dir(del_path)
if success:
xbmcgui.Dialog().ok(heading=xbmc.getLocalizedString(19179),
line1=self.addon.getLocalizedString(32014))
else:
xbmcgui.Dialog().ok(heading=xbmc.getLocalizedString(16205),
line1=xbmc.getLocalizedString(32015))
def overlaytexture(self):
'''legacy: helper to let the user choose a background overlay from a skin defined folder'''
skinstring = self.params.get("skinstring", "BackgroundOverlayTexture")
self.params["skinstring"] = skinstring
self.params["resourceaddon"] = "resource.images.backgroundoverlays"
self.params["customfolder"] = "special://skin/extras/bgoverlays/"
self.params["allowmulti"] = "false"
self.params["header"] = self.addon.getLocalizedString(32002)
self.selectimage()
def busytexture(self):
'''legacy: helper which lets the user select a busy spinner from predefined spinners in the skin'''
skinstring = self.params.get("skinstring", "SkinHelper.SpinnerTexture")
self.params["skinstring"] = skinstring
self.params["resourceaddon"] = "resource.images.busyspinners"
self.params["customfolder"] = "special://skin/extras/busy_spinners/"
self.params["allowmulti"] = "true"
self.params["header"] = self.addon.getLocalizedString(32006)
self.selectimage()
def selectimage(self):
'''helper which lets the user select an image or imagepath from resourceaddons or custom path'''
skinsettings = SkinSettings()
skinstring = self.params.get("skinstring", "")
skinshortcutsprop = self.params.get("skinshortcutsproperty", "")
current_value = self.params.get("currentvalue", "")
resource_addon = self.params.get("resourceaddon", "")
allow_multi = self.params.get("allowmulti", "false") == "true"
windowheader = self.params.get("header", "")
skinhelper_backgrounds = self.params.get("skinhelperbackgrounds", "false") == "true"
label, value = skinsettings.select_image(
skinstring, allow_multi=allow_multi, windowheader=windowheader, resource_addon=resource_addon,
skinhelper_backgrounds=skinhelper_backgrounds, current_value=current_value)
if label:
if skinshortcutsprop:
# write value to skinshortcuts prop
from skinshortcuts import set_skinshortcuts_property
set_skinshortcuts_property(skinshortcutsprop, value, label)
else:
# write the values to skin strings
if value.startswith("$INFO"):
# we got an dynamic image from window property
skinsettings.set_skin_variable(skinstring, value)
value = "$VAR[%s]" % skinstring
skinstring = skinstring.encode("utf-8")
label = label.encode("utf-8")
xbmc.executebuiltin("Skin.SetString(%s.label,%s)" % (skinstring, label))
xbmc.executebuiltin("Skin.SetString(%s.name,%s)" % (skinstring, label))
xbmc.executebuiltin("Skin.SetString(%s,%s)" % (skinstring, value))
xbmc.executebuiltin("Skin.SetString(%s.path,%s)" % (skinstring, value))
del skinsettings
def dialogok(self):
'''helper to show an OK dialog with a message'''
headertxt = self.params.get("header")
bodytxt = self.params.get("message")
if bodytxt.startswith(" "):
bodytxt = bodytxt[1:]
if headertxt.startswith(" "):
headertxt = headertxt[1:]
dialog = xbmcgui.Dialog()
dialog.ok(heading=headertxt, line1=bodytxt)
del dialog
def dialogyesno(self):
'''helper to show a YES/NO dialog with a message'''
headertxt = self.params.get("header")
bodytxt = self.params.get("message")
yesactions = self.params.get("yesaction", "").split("|")
noactions = self.params.get("noaction", "").split("|")
if bodytxt.startswith(" "):
bodytxt = bodytxt[1:]
if headertxt.startswith(" "):
headertxt = headertxt[1:]
if xbmcgui.Dialog().yesno(heading=headertxt, line1=bodytxt):
for action in yesactions:
xbmc.executebuiltin(action.encode("utf-8"))
else:
for action in noactions:
xbmc.executebuiltin(action.encode("utf-8"))
def textviewer(self):
'''helper to show a textviewer dialog with a message'''
headertxt = self.params.get("header", "")
bodytxt = self.params.get("message", "")
if bodytxt.startswith(" "):
bodytxt = bodytxt[1:]
if headertxt.startswith(" "):
headertxt = headertxt[1:]
xbmcgui.Dialog().textviewer(headertxt, bodytxt)
def fileexists(self):
'''helper to let the skinner check if a file exists
and write the outcome to a window prop or skinstring'''
filename = self.params.get("file")
skinstring = self.params.get("skinstring")
windowprop = self.params.get("winprop")
if xbmcvfs.exists(filename):
if windowprop:
self.win.setProperty(windowprop, "exists")
if skinstring:
xbmc.executebuiltin("Skin.SetString(%s,exists)" % skinstring)
else:
if windowprop:
self.win.clearProperty(windowprop)
if skinstring:
xbmc.executebuiltin("Skin.Reset(%s)" % skinstring)
def stripstring(self):
'''helper to allow the skinner to strip a string and write results to a skin string'''
splitchar = self.params.get("splitchar")
if splitchar.upper() == "[SPACE]":
splitchar = " "
skinstring = self.params.get("string")
if not skinstring:
skinstring = self.params.get("skinstring")
output = self.params.get("output")
index = self.params.get("index", 0)
skinstring = skinstring.split(splitchar)[int(index)]
self.win.setProperty(output, skinstring)
def getfilename(self, filename=""):
'''helper to display a sanitized filename in the vidoeinfo dialog'''
output = self.params.get("output")
if not filename:
filename = xbmc.getInfoLabel("ListItem.FileNameAndPath")
if not filename:
filename = xbmc.getInfoLabel("ListItem.FileName")
if "filename=" in filename:
url_params = dict(urlparse.parse_qsl(filename))
filename = url_params.get("filename")
self.win.setProperty(output, filename)
def getplayerfilename(self):
'''helper to parse the filename from a plugin (e.g. emby) filename'''
filename = xbmc.getInfoLabel("Player.FileNameAndPath")
if not filename:
filename = xbmc.getInfoLabel("Player.FileName")
self.getfilename(filename)
def getpercentage(self):
'''helper to calculate the percentage of 2 numbers and write results to a skinstring'''
total = int(params.get("total"))
count = int(params.get("count"))
roundsteps = self.params.get("roundsteps")
skinstring = self.params.get("skinstring")
percentage = int(round((1.0 * count / total) * 100))
if roundsteps:
roundsteps = int(roundsteps)
percentage = percentage + (roundsteps - percentage) % roundsteps
xbmc.executebuiltin("Skin.SetString(%s,%s)" % (skinstring, percentage))
def setresourceaddon(self):
'''helper to let the user choose a resource addon and set that as skin string'''
from resourceaddons import setresourceaddon
addontype = self.params.get("addontype", "")
skinstring = self.params.get("skinstring", "")
setresourceaddon(addontype, skinstring)
def checkresourceaddons(self):
'''allow the skinner to perform a basic check if some required resource addons are available'''
from resourceaddons import checkresourceaddons
addonslist = self.params.get("addonslist", [])
if addonslist:
addonslist = addonslist.split("|")
checkresourceaddons(addonslist)
| mrquim/mrquimrepo | script.skin.helper.service/resources/lib/main_module.py | Python | gpl-2.0 | 31,819 |
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2016 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import io
import os
from unittest import mock
from xml.etree import ElementTree
import fixtures
from testtools.matchers import HasLength
import snapcraft
from snapcraft import tests
from snapcraft.plugins import maven
class MavenPluginTestCase(tests.TestCase):
def setUp(self):
super().setUp()
class Options:
maven_options = []
maven_targets = ['']
self.options = Options()
self.project_options = snapcraft.ProjectOptions()
patcher = mock.patch('snapcraft.repo.Ubuntu')
self.ubuntu_mock = patcher.start()
self.addCleanup(patcher.stop)
@staticmethod
def _canonicalize_settings(settings):
with io.StringIO(settings) as f:
tree = ElementTree.parse(f)
for element in tree.iter():
if element.text is not None and element.text.isspace():
element.text = None
if element.tail is not None and element.tail.isspace():
element.tail = None
with io.StringIO() as f:
tree.write(
f, encoding='unicode',
default_namespace='http://maven.apache.org/SETTINGS/1.0.0')
return f.getvalue() + '\n'
def test_get_build_properties(self):
expected_build_properties = ['maven-options', 'maven-targets']
resulting_build_properties = maven.MavenPlugin.get_build_properties()
self.assertThat(resulting_build_properties,
HasLength(len(expected_build_properties)))
for property in expected_build_properties:
self.assertIn(property, resulting_build_properties)
def assertSettingsEqual(self, expected, observed):
print(repr(self._canonicalize_settings(expected)))
print(repr(self._canonicalize_settings(observed)))
self.assertEqual(
self._canonicalize_settings(expected),
self._canonicalize_settings(observed))
def test_schema(self):
schema = maven.MavenPlugin.schema()
properties = schema['properties']
self.assertTrue('maven-options' in properties,
'Expected "maven-options" to be included in '
'properties')
maven_options = properties['maven-options']
self.assertTrue(
'type' in maven_options,
'Expected "type" to be included in "maven-options"')
self.assertEqual(maven_options['type'], 'array',
'Expected "maven-options" "type" to be "array", but '
'it was "{}"'.format(maven_options['type']))
self.assertTrue(
'minitems' in maven_options,
'Expected "minitems" to be included in "maven-options"')
self.assertEqual(maven_options['minitems'], 1,
'Expected "maven-options" "minitems" to be 1, but '
'it was "{}"'.format(maven_options['minitems']))
self.assertTrue(
'uniqueItems' in maven_options,
'Expected "uniqueItems" to be included in "maven-options"')
self.assertTrue(
maven_options['uniqueItems'],
'Expected "maven-options" "uniqueItems" to be "True"')
maven_targets = properties['maven-targets']
self.assertTrue(
'type' in maven_targets,
'Expected "type" to be included in "maven-targets"')
self.assertEqual(maven_targets['type'], 'array',
'Expected "maven-targets" "type" to be "array", but '
'it was "{}"'.format(maven_targets['type']))
self.assertTrue(
'minitems' in maven_targets,
'Expected "minitems" to be included in "maven-targets"')
self.assertEqual(maven_targets['minitems'], 1,
'Expected "maven-targets" "minitems" to be 1, but '
'it was "{}"'.format(maven_targets['minitems']))
self.assertTrue(
'uniqueItems' in maven_targets,
'Expected "uniqueItems" to be included in "maven-targets"')
self.assertTrue(
maven_targets['uniqueItems'],
'Expected "maven-targets" "uniqueItems" to be "True"')
@mock.patch.object(maven.MavenPlugin, 'run')
def test_build(self, run_mock):
env_vars = (
('http_proxy', None),
('https_proxy', None),
)
for v in env_vars:
self.useFixture(fixtures.EnvironmentVariable(v[0], v[1]))
plugin = maven.MavenPlugin('test-part', self.options,
self.project_options)
def side(l):
os.makedirs(os.path.join(plugin.builddir, 'target'))
open(os.path.join(plugin.builddir,
'target', 'dummy.jar'), 'w').close()
run_mock.side_effect = side
os.makedirs(plugin.sourcedir)
plugin.build()
run_mock.assert_has_calls([
mock.call(['mvn', 'package']),
])
@mock.patch.object(maven.MavenPlugin, 'run')
def test_build_fail(self, run_mock):
env_vars = (
('http_proxy', None),
('https_proxy', None),
)
for v in env_vars:
self.useFixture(fixtures.EnvironmentVariable(v[0], v[1]))
plugin = maven.MavenPlugin('test-part', self.options,
self.project_options)
os.makedirs(plugin.sourcedir)
self.assertRaises(RuntimeError, plugin.build)
run_mock.assert_has_calls([
mock.call(['mvn', 'package']),
])
@mock.patch.object(maven.MavenPlugin, 'run')
def test_build_war(self, run_mock):
env_vars = (
('http_proxy', None),
('https_proxy', None),
)
for v in env_vars:
self.useFixture(fixtures.EnvironmentVariable(v[0], v[1]))
plugin = maven.MavenPlugin('test-part', self.options,
self.project_options)
def side(l):
os.makedirs(os.path.join(plugin.builddir, 'target'))
open(os.path.join(plugin.builddir,
'target', 'dummy.war'), 'w').close()
run_mock.side_effect = side
os.makedirs(plugin.sourcedir)
plugin.build()
run_mock.assert_has_calls([
mock.call(['mvn', 'package']),
])
@mock.patch.object(maven.MavenPlugin, 'run')
def test_build_with_targets(self, run_mock):
env_vars = (
('http_proxy', None),
('https_proxy', None),
)
for v in env_vars:
self.useFixture(fixtures.EnvironmentVariable(v[0], v[1]))
opts = self.options
opts.maven_targets = ['child1', 'child2']
plugin = maven.MavenPlugin('test-part', opts,
self.project_options)
def side(l):
os.makedirs(os.path.join(plugin.builddir,
'child1', 'target'))
os.makedirs(os.path.join(plugin.builddir,
'child2', 'target'))
open(os.path.join(plugin.builddir,
'child1', 'target', 'child1.jar'), 'w').close()
open(os.path.join(plugin.builddir,
'child2', 'target', 'child2.jar'), 'w').close()
run_mock.side_effect = side
os.makedirs(plugin.sourcedir)
plugin.build()
run_mock.assert_has_calls([
mock.call(['mvn', 'package']),
])
@mock.patch.object(maven.MavenPlugin, 'run')
def test_build_with_http_proxy(self, run_mock):
env_vars = (
('http_proxy', 'http://localhost:3132'),
('https_proxy', None),
('no_proxy', None),
)
for v in env_vars:
self.useFixture(fixtures.EnvironmentVariable(v[0], v[1]))
plugin = maven.MavenPlugin('test-part', self.options,
self.project_options)
def side(l):
os.makedirs(os.path.join(plugin.builddir, 'target'))
open(os.path.join(plugin.builddir,
'target', 'dummy.jar'), 'w').close()
run_mock.side_effect = side
settings_path = os.path.join(plugin.partdir, 'm2', 'settings.xml')
os.makedirs(plugin.sourcedir)
plugin.build()
run_mock.assert_has_calls([
mock.call(['mvn', 'package', '-s', settings_path]),
])
self.assertTrue(
os.path.exists(settings_path),
'expected {!r} to exist'.format(settings_path))
with open(settings_path) as f:
settings_contents = f.read()
expected_contents = (
'<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"\n'
' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"\n'
' xsi:schemaLocation="http://maven.apache.org/SETTINGS/'
'1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd">\n'
' <interactiveMode>false</interactiveMode>\n'
' <proxies>\n'
' <proxy>\n'
' <id>http_proxy</id>\n'
' <active>true</active>\n'
' <protocol>http</protocol>\n'
' <host>localhost</host>\n'
' <port>3132</port>\n'
' <nonProxyHosts>localhost</nonProxyHosts>\n'
' </proxy>\n'
' </proxies>\n'
'</settings>\n')
self.assertSettingsEqual(expected_contents, settings_contents)
@mock.patch.object(maven.MavenPlugin, 'run')
def test_build_with_http_proxy_and_no_proxy(self, run_mock):
env_vars = (
('http_proxy', 'http://localhost:3132'),
('https_proxy', None),
('no_proxy', 'internal'),
)
for v in env_vars:
self.useFixture(fixtures.EnvironmentVariable(v[0], v[1]))
plugin = maven.MavenPlugin('test-part', self.options,
self.project_options)
def side(l):
os.makedirs(os.path.join(plugin.builddir, 'target'))
open(os.path.join(plugin.builddir,
'target', 'dummy.jar'), 'w').close()
run_mock.side_effect = side
settings_path = os.path.join(plugin.partdir, 'm2', 'settings.xml')
os.makedirs(plugin.sourcedir)
plugin.build()
run_mock.assert_has_calls([
mock.call(['mvn', 'package', '-s', settings_path]),
])
self.assertTrue(
os.path.exists(settings_path),
'expected {!r} to exist'.format(settings_path))
with open(settings_path) as f:
settings_contents = f.read()
expected_contents = (
'<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"\n'
' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"\n'
' xsi:schemaLocation="http://maven.apache.org/SETTINGS/'
'1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd">\n'
' <interactiveMode>false</interactiveMode>\n'
' <proxies>\n'
' <proxy>\n'
' <id>http_proxy</id>\n'
' <active>true</active>\n'
' <protocol>http</protocol>\n'
' <host>localhost</host>\n'
' <port>3132</port>\n'
' <nonProxyHosts>internal</nonProxyHosts>\n'
' </proxy>\n'
' </proxies>\n'
'</settings>\n')
self.assertSettingsEqual(expected_contents, settings_contents)
@mock.patch.object(maven.MavenPlugin, 'run')
def test_build_with_http_proxy_and_no_proxies(self, run_mock):
env_vars = (
('http_proxy', 'http://localhost:3132'),
('https_proxy', None),
('no_proxy', 'internal, pseudo-dmz'),
)
for v in env_vars:
self.useFixture(fixtures.EnvironmentVariable(v[0], v[1]))
plugin = maven.MavenPlugin('test-part', self.options,
self.project_options)
def side(l):
os.makedirs(os.path.join(plugin.builddir, 'target'))
open(os.path.join(plugin.builddir,
'target', 'dummy.jar'), 'w').close()
run_mock.side_effect = side
settings_path = os.path.join(plugin.partdir, 'm2', 'settings.xml')
os.makedirs(plugin.sourcedir)
plugin.build()
run_mock.assert_has_calls([
mock.call(['mvn', 'package', '-s', settings_path]),
])
self.assertTrue(
os.path.exists(settings_path),
'expected {!r} to exist'.format(settings_path))
with open(settings_path) as f:
settings_contents = f.read()
expected_contents = (
'<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"\n'
' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"\n'
' xsi:schemaLocation="http://maven.apache.org/SETTINGS/'
'1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd">\n'
' <interactiveMode>false</interactiveMode>\n'
' <proxies>\n'
' <proxy>\n'
' <id>http_proxy</id>\n'
' <active>true</active>\n'
' <protocol>http</protocol>\n'
' <host>localhost</host>\n'
' <port>3132</port>\n'
' <nonProxyHosts>internal|pseudo-dmz</nonProxyHosts>\n'
' </proxy>\n'
' </proxies>\n'
'</settings>\n')
self.assertSettingsEqual(expected_contents, settings_contents)
@mock.patch.object(maven.MavenPlugin, 'run')
def test_build_with_http_and_https_proxy(self, run_mock):
env_vars = (
('http_proxy', 'http://localhost:3132'),
('https_proxy', 'http://localhost:3133'),
('no_proxy', None),
)
for v in env_vars:
self.useFixture(fixtures.EnvironmentVariable(v[0], v[1]))
plugin = maven.MavenPlugin('test-part', self.options,
self.project_options)
def side(l):
os.makedirs(os.path.join(plugin.builddir, 'target'))
open(os.path.join(plugin.builddir,
'target', 'dummy.jar'), 'w').close()
run_mock.side_effect = side
settings_path = os.path.join(plugin.partdir, 'm2', 'settings.xml')
os.makedirs(plugin.sourcedir)
plugin.build()
run_mock.assert_has_calls([
mock.call(['mvn', 'package', '-s', settings_path]),
])
self.assertTrue(
os.path.exists(settings_path),
'expected {!r} to exist'.format(settings_path))
with open(settings_path) as f:
settings_contents = f.read()
expected_contents = (
'<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"\n'
' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"\n'
' xsi:schemaLocation="http://maven.apache.org/SETTINGS/'
'1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd">\n'
' <interactiveMode>false</interactiveMode>\n'
' <proxies>\n'
' <proxy>\n'
' <id>http_proxy</id>\n'
' <active>true</active>\n'
' <protocol>http</protocol>\n'
' <host>localhost</host>\n'
' <port>3132</port>\n'
' <nonProxyHosts>localhost</nonProxyHosts>\n'
' </proxy>\n'
' <proxy>\n'
' <id>https_proxy</id>\n'
' <active>true</active>\n'
' <protocol>https</protocol>\n'
' <host>localhost</host>\n'
' <port>3133</port>\n'
' <nonProxyHosts>localhost</nonProxyHosts>\n'
' </proxy>\n'
' </proxies>\n'
'</settings>\n')
self.assertSettingsEqual(expected_contents, settings_contents)
@mock.patch.object(maven.MavenPlugin, 'run')
def test_build_with_authenticated_proxies(self, run_mock):
env_vars = (
('http_proxy', 'http://user1:pass1@localhost:3132'),
('https_proxy', 'http://user2:pass2@localhost:3133'),
('no_proxy', None),
)
for v in env_vars:
self.useFixture(fixtures.EnvironmentVariable(v[0], v[1]))
plugin = maven.MavenPlugin('test-part', self.options,
self.project_options)
def side(l):
os.makedirs(os.path.join(plugin.builddir, 'target'))
open(os.path.join(plugin.builddir,
'target', 'dummy.jar'), 'w').close()
run_mock.side_effect = side
settings_path = os.path.join(plugin.partdir, 'm2', 'settings.xml')
os.makedirs(plugin.sourcedir)
plugin.build()
run_mock.assert_has_calls([
mock.call(['mvn', 'package', '-s', settings_path]),
])
self.assertTrue(
os.path.exists(settings_path),
'expected {!r} to exist'.format(settings_path))
with open(settings_path) as f:
settings_contents = f.read()
expected_contents = (
'<settings xmlns="http://maven.apache.org/SETTINGS/1.0.0"\n'
' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"\n'
' xsi:schemaLocation="http://maven.apache.org/SETTINGS/'
'1.0.0 http://maven.apache.org/xsd/settings-1.0.0.xsd">\n'
' <interactiveMode>false</interactiveMode>\n'
' <proxies>\n'
' <proxy>\n'
' <id>http_proxy</id>\n'
' <active>true</active>\n'
' <protocol>http</protocol>\n'
' <host>localhost</host>\n'
' <port>3132</port>\n'
' <username>user1</username>\n'
' <password>pass1</password>\n'
' <nonProxyHosts>localhost</nonProxyHosts>\n'
' </proxy>\n'
' <proxy>\n'
' <id>https_proxy</id>\n'
' <active>true</active>\n'
' <protocol>https</protocol>\n'
' <host>localhost</host>\n'
' <port>3133</port>\n'
' <username>user2</username>\n'
' <password>pass2</password>\n'
' <nonProxyHosts>localhost</nonProxyHosts>\n'
' </proxy>\n'
' </proxies>\n'
'</settings>\n')
self.assertSettingsEqual(expected_contents, settings_contents)
| jonathon-love/snapcraft | snapcraft/tests/test_plugin_maven.py | Python | gpl-3.0 | 19,419 |
#
# Contain unparsing procedures.
#
import sys
#-------------------------------------------
def unparseToC(vars, annot_body_code, indent, extra_indent):
'''Unparse to C/C++ code'''
if len(vars) == 0:
return annot_body_code
s = '\n'
s += indent + '#pragma disjoint ('
for i, v in enumerate(vars):
if i > 0:
s += ', '
s += '*' + __printAddressC(v.var_name, v.dimensions)
s += ') \n'
s += indent + 'if ((('
for i, v in enumerate(vars):
if i > 0:
s += '|'
s += '(int)(' + __printAddressC(v.var_name, v.dimensions) + ')'
s += ') & 0xF) == 0) {\n'
for v in vars:
s += indent + extra_indent
s += '__alignx(16,' + __printAddressC(v.var_name, v.dimensions) + ');\n'
s += annot_body_code.replace('\n', '\n' + extra_indent)
s += '\n'
s += indent + '} else {\n'
s += annot_body_code.replace('\n', '\n' + extra_indent)
s += '\n'
s += indent + '}\n'
s += indent
return s
#-------------------------------------------
def unparseToFortran(vars, annot_body_code, indent, extra_indent):
'''Unparse to Fortran code'''
print 'error: Fortran is not yet supported in alignment module'
sys.exit(1)
#-------------------------------------------
def __printAddressC(var_name, dimensions):
'''Return the starting address location of the given variable (in C/C++)'''
dimensions = dimensions[:]
dimensions.remove(None)
s = str(var_name)
if len(dimensions) > 0:
s += '['
s += ']['.join(map(str, dimensions))
s += ']'
return s
#-------------------------------------------
def __printAddressFortran(var_name, dimensions):
'''Return the starting address location of the given variable (in Fortran)'''
dimensions = dimensions[:]
dimensions.remove(None)
s = str(var_name)
if len(dimensions) > 0:
s += '('
s += ','.join(map(str, dimensions))
s += ')'
return s
| tajkhan/pluto-pocc | annotations/module/align/unparser.py | Python | gpl-3.0 | 2,012 |
#!/usr/bin/env python
# -*- coding: utf-8; py-indent-offset:4 -*-
###############################################################################
#
# Copyright (C) 2015 Daniel Rodriguez
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import testcommon
import backtrader as bt
import backtrader.indicators as btind
chkdatas = 1
chkvals = [
['4076.212366', '3655.193634', '3576.228000'],
['4178.117675', '3746.573475', '3665.633700'],
['3974.307056', '3563.813794', '3486.822300'],
]
chkmin = 30
chkind = btind.WMAEnvelope
def test_run(main=False):
datas = [testcommon.getdata(i) for i in range(chkdatas)]
testcommon.runtest(datas,
testcommon.TestStrategy,
main=main,
plot=main,
chkind=chkind,
chkmin=chkmin,
chkvals=chkvals)
if __name__ == '__main__':
test_run(main=True)
| china-quant/backtrader | tests/test_ind_wmaenvelope.py | Python | gpl-3.0 | 1,720 |
"""
Parses the results found for the ETW started on a machine,
downloads the results and stops the ETW.
All credit to pauldotcom-
http://pauldotcom.com/2012/07/post-exploitation-recon-with-e.html
Module built by @harmj0y
"""
import settings
from lib import command_methods
from lib import helpers
from lib import smb
class Module:
def __init__(self, targets=None, creds=None, args=None):
self.name = "ETW Data Download"
self.description = "Download data results from ETW and clean everything up."
# internal list() that holds one or more targets
self.targets = targets
# internal list() that holds one or more cred tuples
# [ (username, pw), (username2, pw2), ...]
self.creds = creds
# a state output file that will be written out by pillage.py
# ex- if you're querying domain users
self.output = ""
# user interaction for- format is {Option : [Value, Description]]}
self.required_options = { "trigger_method" : ["wmis", "[wmis] or [winexe] for triggering"],
"flag" : ["cookies", "search for [cookies] or [post] parameters"]}
def run(self):
# assume single set of credentials
username, password = self.creds[0]
triggerMethod = self.required_options["trigger_method"][0]
flag = self.required_options["flag"][0]
for target in self.targets:
# stop the ETW
stopCMD = "logman stop Status32 -ets"
command_methods.executeCommand(target, username, password, stopCMD, triggerMethod)
# search for cookies or POST paramters
if flag.lower() == "post":
flag = "POST"
moduleFile = "post_params.txt"
else:
flag = "cookie added"
moduleFile = "cookies.txt"
# check the ETW results for the specified flag, and delete the dump file
parseCmd = "wevtutil qe C:\\Windows\\Temp\\status32.etl /lf:true /f:Text | find /i \""+flag+"\""
# wait 20 seconds for everything to parse...if errors happen, increase this
parseResult = command_methods.executeResult(target, username, password, parseCmd, triggerMethod, pause=20)
# delete the trace file
delCmd = "del C:\\Windows\\Temp\\status32.etl"
command_methods.executeCommand(target, username, password, delCmd, triggerMethod)
if parseResult == "":
self.output += "[!] No ETW results for "+flag+" using creds '"+username+":"+password+"' on : " + target + "\n"
else:
# save the file off to the appropriate location
saveFile = helpers.saveModuleFile(self, target, moduleFile, parseResult)
self.output += "[*] ETW results for "+flag+" using creds '"+username+":"+password+"' on " + target + " stored at "+saveFile+"\n"
| Exploit-install/Veil-Pillage | modules/enumeration/host/etw_results.py | Python | gpl-3.0 | 3,006 |
# -*- encoding: utf-8 -*-
##############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
##############################################################################
from openerp import models, fields, api, _
import math
class MrpBom(models.Model):
_inherit = 'mrp.bom'
@api.model
def _bom_explode(self, bom, product, factor, properties=None, level=0,
routing_id=False, previous_products=None,
master_bom=None):
routing_id = bom.routing_id.id or routing_id
result, result2 = super(MrpBom, self)._bom_explode(
bom, product, factor, properties=properties, level=level,
routing_id=routing_id, previous_products=previous_products,
master_bom=master_bom)
result2 = self._get_workorder_operations(
result2, factor=factor, level=level, routing_id=routing_id)
return result, result2
def _get_routing_line_from_workorder(self, routing_id, seq, workcenter_id,
wo_name):
""" Returns first routing line from a given data if found
@param routing_id: Routing id
@param seq: workorder sequence
@param workcenter_id: Workcenter id
@return: wo_name = Workorder name
"""
routing_line_obj = self.env['mrp.routing.workcenter']
domain = [('routing_id', '=', routing_id), ('sequence', '=', seq),
('workcenter_id', '=', workcenter_id)]
routing_lines = routing_line_obj.search(domain)
for rl in routing_lines:
if rl.name in wo_name:
return rl
return routing_line_obj
def _get_workorder_operations(self, result2, factor, level=0,
routing_id=False):
for work_order in result2:
if (work_order['sequence'] < level or
work_order.get('routing_wc_line')):
continue
seq = work_order['sequence'] - level
rl = self._get_routing_line_from_workorder(
routing_id, seq, work_order['workcenter_id'],
work_order['name'])
cycle = rl.cycle_nbr and int(math.ceil(factor / rl.cycle_nbr)) or 0
hour = rl.hour_nbr * cycle
default_wc_line = rl.op_wc_lines.filtered(lambda r: r.default)
work_order['cycle'] = cycle
work_order['hour'] = hour
work_order['time_start'] = default_wc_line.time_start or 0.0
work_order['time_stop'] = default_wc_line.time_stop or 0.0
work_order['routing_wc_line'] = rl.id
work_order['do_production'] = rl.do_production
return result2
@api.multi
@api.onchange('routing_id')
def onchange_routing_id(self):
for line in self.bom_line_ids:
line.operation = (self.routing_id.workcenter_lines and
self.routing_id.workcenter_lines[0])
if self.routing_id:
return {'warning': {
'title': _('Changing Routing'),
'message': _("Changing routing will cause to change the"
" operation in which each component will be"
" consumed, by default it is set the first"
" one of the routing")
}}
return {}
class MrpBomLine(models.Model):
_inherit = 'mrp.bom.line'
operation = fields.Many2one(
comodel_name='mrp.routing.workcenter', string='Consumed in')
| jorsea/odoomrp-wip | mrp_operations_extension/models/mrp_bom.py | Python | agpl-3.0 | 4,254 |
# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from UM.Mesh.MeshWriter import MeshWriter
from UM.Math.Vector import Vector
from UM.Logger import Logger
from UM.Math.Matrix import Matrix
from UM.Application import Application
import UM.Scene.SceneNode
import Savitar
import numpy
MYPY = False
try:
if not MYPY:
import xml.etree.cElementTree as ET
except ImportError:
Logger.log("w", "Unable to load cElementTree, switching to slower version")
import xml.etree.ElementTree as ET
import zipfile
import UM.Application
class ThreeMFWriter(MeshWriter):
def __init__(self):
super().__init__()
self._namespaces = {
"3mf": "http://schemas.microsoft.com/3dmanufacturing/core/2015/02",
"content-types": "http://schemas.openxmlformats.org/package/2006/content-types",
"relationships": "http://schemas.openxmlformats.org/package/2006/relationships",
"cura": "http://software.ultimaker.com/xml/cura/3mf/2015/10"
}
self._unit_matrix_string = self._convertMatrixToString(Matrix())
self._archive = None
self._store_archive = False
def _convertMatrixToString(self, matrix):
result = ""
result += str(matrix._data[0, 0]) + " "
result += str(matrix._data[1, 0]) + " "
result += str(matrix._data[2, 0]) + " "
result += str(matrix._data[0, 1]) + " "
result += str(matrix._data[1, 1]) + " "
result += str(matrix._data[2, 1]) + " "
result += str(matrix._data[0, 2]) + " "
result += str(matrix._data[1, 2]) + " "
result += str(matrix._data[2, 2]) + " "
result += str(matrix._data[0, 3]) + " "
result += str(matrix._data[1, 3]) + " "
result += str(matrix._data[2, 3])
return result
## Should we store the archive
# Note that if this is true, the archive will not be closed.
# The object that set this parameter is then responsible for closing it correctly!
def setStoreArchive(self, store_archive):
self._store_archive = store_archive
## Convenience function that converts an Uranium SceneNode object to a SavitarSceneNode
# \returns Uranium Scenen node.
def _convertUMNodeToSavitarNode(self, um_node, transformation = Matrix()):
if type(um_node) is not UM.Scene.SceneNode.SceneNode:
return None
savitar_node = Savitar.SceneNode()
node_matrix = um_node.getLocalTransformation()
matrix_string = self._convertMatrixToString(node_matrix.preMultiply(transformation))
savitar_node.setTransformation(matrix_string)
mesh_data = um_node.getMeshData()
if mesh_data is not None:
savitar_node.getMeshData().setVerticesFromBytes(mesh_data.getVerticesAsByteArray())
indices_array = mesh_data.getIndicesAsByteArray()
if indices_array is not None:
savitar_node.getMeshData().setFacesFromBytes(indices_array)
else:
savitar_node.getMeshData().setFacesFromBytes(numpy.arange(mesh_data.getVertices().size / 3, dtype=numpy.int32).tostring())
# Handle per object settings (if any)
stack = um_node.callDecoration("getStack")
if stack is not None:
changed_setting_keys = set(stack.getTop().getAllKeys())
# Ensure that we save the extruder used for this object.
if stack.getProperty("machine_extruder_count", "value") > 1:
changed_setting_keys.add("extruder_nr")
# Get values for all changed settings & save them.
for key in changed_setting_keys:
savitar_node.setSetting(key, str(stack.getProperty(key, "value")))
for child_node in um_node.getChildren():
savitar_child_node = self._convertUMNodeToSavitarNode(child_node)
if savitar_child_node is not None:
savitar_node.addChild(savitar_child_node)
return savitar_node
def getArchive(self):
return self._archive
def write(self, stream, nodes, mode = MeshWriter.OutputMode.BinaryMode):
self._archive = None # Reset archive
archive = zipfile.ZipFile(stream, "w", compression = zipfile.ZIP_DEFLATED)
try:
model_file = zipfile.ZipInfo("3D/3dmodel.model")
# Because zipfile is stupid and ignores archive-level compression settings when writing with ZipInfo.
model_file.compress_type = zipfile.ZIP_DEFLATED
# Create content types file
content_types_file = zipfile.ZipInfo("[Content_Types].xml")
content_types_file.compress_type = zipfile.ZIP_DEFLATED
content_types = ET.Element("Types", xmlns = self._namespaces["content-types"])
rels_type = ET.SubElement(content_types, "Default", Extension = "rels", ContentType = "application/vnd.openxmlformats-package.relationships+xml")
model_type = ET.SubElement(content_types, "Default", Extension = "model", ContentType = "application/vnd.ms-package.3dmanufacturing-3dmodel+xml")
# Create _rels/.rels file
relations_file = zipfile.ZipInfo("_rels/.rels")
relations_file.compress_type = zipfile.ZIP_DEFLATED
relations_element = ET.Element("Relationships", xmlns = self._namespaces["relationships"])
model_relation_element = ET.SubElement(relations_element, "Relationship", Target = "/3D/3dmodel.model", Id = "rel0", Type = "http://schemas.microsoft.com/3dmanufacturing/2013/01/3dmodel")
savitar_scene = Savitar.Scene()
transformation_matrix = Matrix()
transformation_matrix._data[1, 1] = 0
transformation_matrix._data[1, 2] = -1
transformation_matrix._data[2, 1] = 1
transformation_matrix._data[2, 2] = 0
global_container_stack = Application.getInstance().getGlobalContainerStack()
# Second step: 3MF defines the left corner of the machine as center, whereas cura uses the center of the
# build volume.
if global_container_stack:
translation_vector = Vector(x=global_container_stack.getProperty("machine_width", "value") / 2,
y=global_container_stack.getProperty("machine_depth", "value") / 2,
z=0)
translation_matrix = Matrix()
translation_matrix.setByTranslation(translation_vector)
transformation_matrix.preMultiply(translation_matrix)
root_node = UM.Application.Application.getInstance().getController().getScene().getRoot()
for node in nodes:
if node == root_node:
for root_child in node.getChildren():
savitar_node = self._convertUMNodeToSavitarNode(root_child, transformation_matrix)
if savitar_node:
savitar_scene.addSceneNode(savitar_node)
else:
savitar_node = self._convertUMNodeToSavitarNode(node, transformation_matrix)
if savitar_node:
savitar_scene.addSceneNode(savitar_node)
parser = Savitar.ThreeMFParser()
scene_string = parser.sceneToString(savitar_scene)
archive.writestr(model_file, scene_string)
archive.writestr(content_types_file, b'<?xml version="1.0" encoding="UTF-8"?> \n' + ET.tostring(content_types))
archive.writestr(relations_file, b'<?xml version="1.0" encoding="UTF-8"?> \n' + ET.tostring(relations_element))
except Exception as e:
Logger.logException("e", "Error writing zip file")
return False
finally:
if not self._store_archive:
archive.close()
else:
self._archive = archive
return True
| Curahelper/Cura | plugins/3MFWriter/ThreeMFWriter.py | Python | agpl-3.0 | 7,998 |
# run using env as parameter: python run_exporter.py cad id api_key
import exporter
#parse into data type files
import dataset_export
import update_datastore_content #enter key as an argument
from sys import argv
script, env, res_id, api_key = argv
with open(env + '.csv', 'w') as f:
csv_string = exporter.export('https://' + env + '.data.gov.bc.ca', 'columns.json')
f.write(csv_string)
if __name__ == '__main__':
dataset_export.export_type(env)
update_datastore_content.update_resource(env, res_id, api_key) | gjlawran/ckanext-bcgov | ckanext/bcgov/scripts/export/run_exporter.py | Python | agpl-3.0 | 518 |
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
from glob import glob
class Cuda(Package):
"""CUDA is a parallel computing platform and programming model invented
by NVIDIA. It enables dramatic increases in computing performance by
harnessing the power of the graphics processing unit (GPU).
Note: This package does not currently install the drivers necessary
to run CUDA. These will need to be installed manually. See:
https://docs.nvidia.com/cuda/ for details."""
homepage = "https://developer.nvidia.com/cuda-zone"
version('9.2.88', 'dd6e33e10d32a29914b7700c7b3d1ca0', expand=False,
url="https://developer.nvidia.com/compute/cuda/9.2/Prod/local_installers/cuda_9.2.88_396.26_linux")
version('9.1.85', '67a5c3933109507df6b68f80650b4b4a', expand=False,
url="https://developer.nvidia.com/compute/cuda/9.1/Prod/local_installers/cuda_9.1.85_387.26_linux")
version('9.0.176', '7a00187b2ce5c5e350e68882f42dd507', expand=False,
url="https://developer.nvidia.com/compute/cuda/9.0/Prod/local_installers/cuda_9.0.176_384.81_linux-run")
version('8.0.61', '33e1bd980e91af4e55f3ef835c103f9b', expand=False,
url="https://developer.nvidia.com/compute/cuda/8.0/Prod2/local_installers/cuda_8.0.61_375.26_linux-run")
version('8.0.44', '6dca912f9b7e2b7569b0074a41713640', expand=False,
url="https://developer.nvidia.com/compute/cuda/8.0/prod/local_installers/cuda_8.0.44_linux-run")
version('7.5.18', '4b3bcecf0dfc35928a0898793cf3e4c6', expand=False,
url="http://developer.download.nvidia.com/compute/cuda/7.5/Prod/local_installers/cuda_7.5.18_linux.run")
version('6.5.14', '90b1b8f77313600cc294d9271741f4da', expand=False,
url="http://developer.download.nvidia.com/compute/cuda/6_5/rel/installers/cuda_6.5.14_linux_64.run")
def install(self, spec, prefix):
runfile = glob(join_path(self.stage.path, 'cuda*_linux*'))[0]
chmod = which('chmod')
chmod('+x', runfile)
runfile = which(runfile)
# Note: NVIDIA does not officially support many newer versions of
# compilers. For example, on CentOS 6, you must use GCC 4.4.7 or
# older. See:
# http://docs.nvidia.com/cuda/cuda-installation-guide-linux/#system-requirements
# https://gist.github.com/ax3l/9489132
# for details.
runfile(
'--silent', # disable interactive prompts
'--verbose', # create verbose log file
'--override', # override compiler version checks
'--toolkit', # install CUDA Toolkit
'--toolkitpath=%s' % prefix
)
| tmerrick1/spack | var/spack/repos/builtin/packages/cuda/package.py | Python | lgpl-2.1 | 3,901 |
#!/usr/bin/python
from ming import *
import sys
srcdir=sys.argv[1]
m = SWFMovie();
font = SWFFont(srcdir + "/../Media/test.ttf")
text = SWFText(1)
w = font.getStringWidth("The quick brown fox jumps over the lazy dog. 1234567890")
text.setFont(font)
text.setColor(0,0,0,255)
text.setHeight(20)
text.moveTo(w,0)
text.addString("|")
m.add(text)
m.nextFrame()
m.save("test03.swf")
| pombredanne/libming | test/Font/test03.py | Python | lgpl-2.1 | 384 |
"""Tests for certbot.plugins.disco."""
import unittest
import mock
import pkg_resources
import zope.interface
from certbot import errors
from certbot import interfaces
from certbot.plugins import standalone
from certbot.plugins import webroot
EP_SA = pkg_resources.EntryPoint(
"sa", "certbot.plugins.standalone",
attrs=("Authenticator",),
dist=mock.MagicMock(key="certbot"))
EP_WR = pkg_resources.EntryPoint(
"wr", "certbot.plugins.webroot",
attrs=("Authenticator",),
dist=mock.MagicMock(key="certbot"))
class PluginEntryPointTest(unittest.TestCase):
"""Tests for certbot.plugins.disco.PluginEntryPoint."""
def setUp(self):
self.ep1 = pkg_resources.EntryPoint(
"ep1", "p1.ep1", dist=mock.MagicMock(key="p1"))
self.ep1prim = pkg_resources.EntryPoint(
"ep1", "p2.ep2", dist=mock.MagicMock(key="p2"))
# nested
self.ep2 = pkg_resources.EntryPoint(
"ep2", "p2.foo.ep2", dist=mock.MagicMock(key="p2"))
# project name != top-level package name
self.ep3 = pkg_resources.EntryPoint(
"ep3", "a.ep3", dist=mock.MagicMock(key="p3"))
from certbot.plugins.disco import PluginEntryPoint
self.plugin_ep = PluginEntryPoint(EP_SA)
def test_entry_point_to_plugin_name(self):
from certbot.plugins.disco import PluginEntryPoint
names = {
self.ep1: "p1:ep1",
self.ep1prim: "p2:ep1",
self.ep2: "p2:ep2",
self.ep3: "p3:ep3",
EP_SA: "sa",
}
for entry_point, name in names.iteritems():
self.assertEqual(
name, PluginEntryPoint.entry_point_to_plugin_name(entry_point))
def test_description(self):
self.assertEqual(
"Automatically use a temporary webserver",
self.plugin_ep.description)
def test_description_with_name(self):
self.plugin_ep.plugin_cls = mock.MagicMock(description="Desc")
self.assertEqual(
"Desc (sa)", self.plugin_ep.description_with_name)
def test_ifaces(self):
self.assertTrue(self.plugin_ep.ifaces((interfaces.IAuthenticator,)))
self.assertFalse(self.plugin_ep.ifaces((interfaces.IInstaller,)))
self.assertFalse(self.plugin_ep.ifaces((
interfaces.IInstaller, interfaces.IAuthenticator)))
def test__init__(self):
self.assertFalse(self.plugin_ep.initialized)
self.assertFalse(self.plugin_ep.prepared)
self.assertFalse(self.plugin_ep.misconfigured)
self.assertFalse(self.plugin_ep.available)
self.assertTrue(self.plugin_ep.problem is None)
self.assertTrue(self.plugin_ep.entry_point is EP_SA)
self.assertEqual("sa", self.plugin_ep.name)
self.assertTrue(self.plugin_ep.plugin_cls is standalone.Authenticator)
def test_init(self):
config = mock.MagicMock()
plugin = self.plugin_ep.init(config=config)
self.assertTrue(self.plugin_ep.initialized)
self.assertTrue(plugin.config is config)
# memoize!
self.assertTrue(self.plugin_ep.init() is plugin)
self.assertTrue(plugin.config is config)
# try to give different config
self.assertTrue(self.plugin_ep.init(123) is plugin)
self.assertTrue(plugin.config is config)
self.assertFalse(self.plugin_ep.prepared)
self.assertFalse(self.plugin_ep.misconfigured)
self.assertFalse(self.plugin_ep.available)
def test_verify(self):
iface1 = mock.MagicMock(__name__="iface1")
iface2 = mock.MagicMock(__name__="iface2")
iface3 = mock.MagicMock(__name__="iface3")
# pylint: disable=protected-access
self.plugin_ep._initialized = plugin = mock.MagicMock()
exceptions = zope.interface.exceptions
with mock.patch("certbot.plugins."
"disco.zope.interface") as mock_zope:
mock_zope.exceptions = exceptions
def verify_object(iface, obj): # pylint: disable=missing-docstring
assert obj is plugin
assert iface is iface1 or iface is iface2 or iface is iface3
if iface is iface3:
raise mock_zope.exceptions.BrokenImplementation(None, None)
mock_zope.verify.verifyObject.side_effect = verify_object
self.assertTrue(self.plugin_ep.verify((iface1,)))
self.assertTrue(self.plugin_ep.verify((iface1, iface2)))
self.assertFalse(self.plugin_ep.verify((iface3,)))
self.assertFalse(self.plugin_ep.verify((iface1, iface3)))
def test_prepare(self):
config = mock.MagicMock()
self.plugin_ep.init(config=config)
self.plugin_ep.prepare()
self.assertTrue(self.plugin_ep.prepared)
self.assertFalse(self.plugin_ep.misconfigured)
# output doesn't matter that much, just test if it runs
str(self.plugin_ep)
def test_prepare_misconfigured(self):
plugin = mock.MagicMock()
plugin.prepare.side_effect = errors.MisconfigurationError
# pylint: disable=protected-access
self.plugin_ep._initialized = plugin
self.assertTrue(isinstance(self.plugin_ep.prepare(),
errors.MisconfigurationError))
self.assertTrue(self.plugin_ep.prepared)
self.assertTrue(self.plugin_ep.misconfigured)
self.assertTrue(isinstance(self.plugin_ep.problem,
errors.MisconfigurationError))
self.assertTrue(self.plugin_ep.available)
def test_prepare_no_installation(self):
plugin = mock.MagicMock()
plugin.prepare.side_effect = errors.NoInstallationError
# pylint: disable=protected-access
self.plugin_ep._initialized = plugin
self.assertTrue(isinstance(self.plugin_ep.prepare(),
errors.NoInstallationError))
self.assertTrue(self.plugin_ep.prepared)
self.assertFalse(self.plugin_ep.misconfigured)
self.assertFalse(self.plugin_ep.available)
def test_prepare_generic_plugin_error(self):
plugin = mock.MagicMock()
plugin.prepare.side_effect = errors.PluginError
# pylint: disable=protected-access
self.plugin_ep._initialized = plugin
self.assertTrue(isinstance(self.plugin_ep.prepare(), errors.PluginError))
self.assertTrue(self.plugin_ep.prepared)
self.assertFalse(self.plugin_ep.misconfigured)
self.assertFalse(self.plugin_ep.available)
def test_repr(self):
self.assertEqual("PluginEntryPoint#sa", repr(self.plugin_ep))
class PluginsRegistryTest(unittest.TestCase):
"""Tests for certbot.plugins.disco.PluginsRegistry."""
def setUp(self):
from certbot.plugins.disco import PluginsRegistry
self.plugin_ep = mock.MagicMock(name="mock")
self.plugin_ep.__hash__.side_effect = TypeError
self.plugins = {"mock": self.plugin_ep}
self.reg = PluginsRegistry(self.plugins)
def test_find_all(self):
from certbot.plugins.disco import PluginsRegistry
with mock.patch("certbot.plugins.disco.pkg_resources") as mock_pkg:
mock_pkg.iter_entry_points.side_effect = [iter([EP_SA]),
iter([EP_WR])]
plugins = PluginsRegistry.find_all()
self.assertTrue(plugins["sa"].plugin_cls is standalone.Authenticator)
self.assertTrue(plugins["sa"].entry_point is EP_SA)
self.assertTrue(plugins["wr"].plugin_cls is webroot.Authenticator)
self.assertTrue(plugins["wr"].entry_point is EP_WR)
def test_getitem(self):
self.assertEqual(self.plugin_ep, self.reg["mock"])
def test_iter(self):
self.assertEqual(["mock"], list(self.reg))
def test_len(self):
self.assertEqual(1, len(self.reg))
self.plugins.clear()
self.assertEqual(0, len(self.reg))
def test_init(self):
self.plugin_ep.init.return_value = "baz"
self.assertEqual(["baz"], self.reg.init("bar"))
self.plugin_ep.init.assert_called_once_with("bar")
def test_filter(self):
self.plugins.update({
"foo": "bar",
"bar": "foo",
"baz": "boo",
})
self.assertEqual(
{"foo": "bar", "baz": "boo"},
self.reg.filter(lambda p_ep: str(p_ep).startswith("b")))
def test_ifaces(self):
self.plugin_ep.ifaces.return_value = True
# pylint: disable=protected-access
self.assertEqual(self.plugins, self.reg.ifaces()._plugins)
self.plugin_ep.ifaces.return_value = False
self.assertEqual({}, self.reg.ifaces()._plugins)
def test_verify(self):
self.plugin_ep.verify.return_value = True
# pylint: disable=protected-access
self.assertEqual(
self.plugins, self.reg.verify(mock.MagicMock())._plugins)
self.plugin_ep.verify.return_value = False
self.assertEqual({}, self.reg.verify(mock.MagicMock())._plugins)
def test_prepare(self):
self.plugin_ep.prepare.return_value = "baz"
self.assertEqual(["baz"], self.reg.prepare())
self.plugin_ep.prepare.assert_called_once_with()
def test_available(self):
self.plugin_ep.available = True
# pylint: disable=protected-access
self.assertEqual(self.plugins, self.reg.available()._plugins)
self.plugin_ep.available = False
self.assertEqual({}, self.reg.available()._plugins)
def test_find_init(self):
self.assertTrue(self.reg.find_init(mock.Mock()) is None)
self.plugin_ep.initalized = True
self.assertTrue(
self.reg.find_init(self.plugin_ep.init()) is self.plugin_ep)
def test_repr(self):
self.plugin_ep.__repr__ = lambda _: "PluginEntryPoint#mock"
self.assertEqual("PluginsRegistry(PluginEntryPoint#mock)",
repr(self.reg))
def test_str(self):
self.plugin_ep.__str__ = lambda _: "Mock"
self.plugins["foo"] = "Mock"
self.assertEqual("Mock\n\nMock", str(self.reg))
self.plugins.clear()
self.assertEqual("No plugins", str(self.reg))
if __name__ == "__main__":
unittest.main() # pragma: no cover
| wteiken/letsencrypt | certbot/plugins/disco_test.py | Python | apache-2.0 | 10,343 |
#!/usr/bin/env python
#-*- coding:utf-8 -*-
from poster.encode import multipart_encode
from poster.streaminghttp import register_openers
import urllib2,urllib,sys,time
import cookielib,mechanize
import re
DEBUG =0
reload(sys)
sys.setdefaultencoding('utf8') #@UndefinedVariable
register_openers()
headers = {
'Host':'agent.anjuke.com',
'User-Agent' : 'Mozilla/5.0 (X11; Linux i686; rv:2.0.1) Gecko/20100101 Firefox/4.0.1',
#'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
#'Accept-Language':'zh-cn,zh;q=0.5',
#'Accept-Encoding':'gzip, deflate',
#'Accept-Charset':'GB2312,utf-8;q=0.7,*;q=0.7',
'Keep-Alive':'115',
'Connection':'keep-alive',
}
#datagen11, headers = multipart_encode({"fileUploadInput": open("/home/myapp/Screenshot-1.jpg","rb"),"backFunction": "$.c.Uploader.finish"})
class httpPost():
data = {}
def __init__(self,dataDic):
self.cookie = cookielib.CookieJar()
httpsHandler = urllib2.HTTPHandler()
httpsHandler.set_http_debuglevel(DEBUG)
self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cookie),httpsHandler)
self.data = dataDic
def login1(self):
self.brow = mechanize.Browser()
httpHandler = mechanize.HTTPHandler()
httpsHandler = mechanize.HTTPSHandler()
httpHandler.set_http_debuglevel(DEBUG)
self.cookiejar = mechanize.LWPCookieJar()
#self.cookiejar = "Cookie lzstat_uv=34741959842666604402|1786789; Hm_lvt_976797cb85805d626fc5642aa5244ba0=1304534271541; ASPSESSIONIDQCDRAQBB=JHCHINLAHGMAIGBIFMNANLGF; lzstat_ss=2189193215_2_1304564199_1786789; Hm_lpvt_976797cb85805d626fc5642aa5244ba0=1304535401191"
self.opener = mechanize.OpenerFactory(mechanize.SeekableResponseOpener).build_opener(
httpHandler,httpsHandler,
mechanize.HTTPCookieProcessor(self.cookiejar),
mechanize.HTTPRefererProcessor,
mechanize.HTTPEquivProcessor,
mechanize.HTTPRefreshProcessor,
)
self.opener.addheaders = [("User-Agent","Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN; rv:1.9.2.13) Gecko/20101203 Firefox/3.6.13"),
("From", "")]
#self.opener.addheaders = [(
# "Referer", self.data['postUrl']
# )]
login={}
login['method'] = self.data['method']
login['name'] = self.data['name']
login['pwd'] = self.data['pwd']
loginUrl = self.data['loginUrl']+'?'+urllib.urlencode(login)
print loginUrl
response = mechanize.urlopen("http://esf.soufun.com/")
response = mechanize.urlopen(loginUrl)
print response.read().decode('gb2312')
def login(self):
self.cookie = cookielib.CookieJar()
httpsHandler = urllib2.HTTPHandler()
httpsHandler.set_http_debuglevel(DEBUG)
self.opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cookie),httpsHandler)
login={}
login['act'] = self.data['act']
login['loginName'] = self.data['loginName']
login['history'] = ''
login['loginPasswd'] = self.data['loginPasswd']
loginUrl = self.data['loginUrl']
req = urllib2.Request(loginUrl,urllib.urlencode(login),headers)
r = self.opener.open(req)
res = None
for item in self.cookie:
#print item.name,item.value
if item.name == 'aQQ_ajklastuser':
res = item.value
return res
#aQQ_ajklastuser junyue_liuhua
#print self.opener.open('http://my.anjuke.com/v2/user/broker/checked/').read()
#open('login.txt','w').write(r.read().encode('utf-8'))
def post(self):
pass
#postData = {}
#postData['loginUrl'] = 'http://agent.anjuke.com/v2/login/'
#postData['act'] = 'login'
#postData['loginName'] = 'junyue_liuhua'
#postData['loginPasswd'] = 'lh_131415'
#http = httpPost(postData)
#http.login()
| ptphp/PyLib | src/webpy1/webpy1.1/post/post_anju.py | Python | apache-2.0 | 4,425 |
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2008,2009,2010,2011,2012,2013 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from aquilon.worker.broker import BrokerCommand # pylint: disable=W0611
from aquilon.worker.commands.show_hostlink import CommandShowHostlink
class CommandShowHostlinkHostlink(CommandShowHostlink):
required_parameters = ["hostlink"]
| stdweird/aquilon | lib/python2.6/aquilon/worker/commands/show_hostlink_hostlink.py | Python | apache-2.0 | 951 |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sqlalchemy
def upgrade(migrate_engine):
meta = sqlalchemy.MetaData(bind=migrate_engine)
stack = sqlalchemy.Table('stack', meta, autoload=True)
name_index = sqlalchemy.Index('ix_stack_owner_id', stack.c.owner_id,
mysql_length=36)
name_index.create(migrate_engine)
| cwolferh/heat-scratch | heat/db/sqlalchemy/migrate_repo/versions/071_stack_owner_id_index.py | Python | apache-2.0 | 897 |
"""Support for Axis camera streaming."""
from homeassistant.components.camera import SUPPORT_STREAM
from homeassistant.components.mjpeg.camera import (
CONF_MJPEG_URL,
CONF_STILL_IMAGE_URL,
MjpegCamera,
filter_urllib3_logging,
)
from homeassistant.const import (
CONF_AUTHENTICATION,
CONF_DEVICE,
CONF_HOST,
CONF_MAC,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_USERNAME,
HTTP_DIGEST_AUTHENTICATION,
)
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .axis_base import AxisEntityBase
from .const import DOMAIN as AXIS_DOMAIN
AXIS_IMAGE = "http://{}:{}/axis-cgi/jpg/image.cgi"
AXIS_VIDEO = "http://{}:{}/axis-cgi/mjpg/video.cgi"
AXIS_STREAM = "rtsp://{}:{}@{}/axis-media/media.amp?videocodec=h264"
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Axis camera video stream."""
filter_urllib3_logging()
serial_number = config_entry.data[CONF_MAC]
device = hass.data[AXIS_DOMAIN][serial_number]
config = {
CONF_NAME: config_entry.data[CONF_NAME],
CONF_USERNAME: config_entry.data[CONF_DEVICE][CONF_USERNAME],
CONF_PASSWORD: config_entry.data[CONF_DEVICE][CONF_PASSWORD],
CONF_MJPEG_URL: AXIS_VIDEO.format(
config_entry.data[CONF_DEVICE][CONF_HOST],
config_entry.data[CONF_DEVICE][CONF_PORT],
),
CONF_STILL_IMAGE_URL: AXIS_IMAGE.format(
config_entry.data[CONF_DEVICE][CONF_HOST],
config_entry.data[CONF_DEVICE][CONF_PORT],
),
CONF_AUTHENTICATION: HTTP_DIGEST_AUTHENTICATION,
}
async_add_entities([AxisCamera(config, device)])
class AxisCamera(AxisEntityBase, MjpegCamera):
"""Representation of a Axis camera."""
def __init__(self, config, device):
"""Initialize Axis Communications camera component."""
AxisEntityBase.__init__(self, device)
MjpegCamera.__init__(self, config)
async def async_added_to_hass(self):
"""Subscribe camera events."""
self.unsub_dispatcher.append(
async_dispatcher_connect(
self.hass, self.device.event_new_address, self._new_address
)
)
await super().async_added_to_hass()
@property
def supported_features(self):
"""Return supported features."""
return SUPPORT_STREAM
async def stream_source(self):
"""Return the stream source."""
return AXIS_STREAM.format(
self.device.config_entry.data[CONF_DEVICE][CONF_USERNAME],
self.device.config_entry.data[CONF_DEVICE][CONF_PASSWORD],
self.device.host,
)
def _new_address(self):
"""Set new device address for video stream."""
port = self.device.config_entry.data[CONF_DEVICE][CONF_PORT]
self._mjpeg_url = AXIS_VIDEO.format(self.device.host, port)
self._still_image_url = AXIS_IMAGE.format(self.device.host, port)
@property
def unique_id(self):
"""Return a unique identifier for this device."""
return f"{self.device.serial}-camera"
| leppa/home-assistant | homeassistant/components/axis/camera.py | Python | apache-2.0 | 3,117 |
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=invalid-name
# pylint: disable=missing-docstring
"""EfficientNet models for Keras.
Reference:
- [EfficientNet: Rethinking Model Scaling for Convolutional Neural Networks](
https://arxiv.org/abs/1905.11946) (ICML 2019)
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
import math
from tensorflow.python.keras import backend
from tensorflow.python.keras.applications import imagenet_utils
from tensorflow.python.keras.engine import training
from tensorflow.python.keras.layers import VersionAwareLayers
from tensorflow.python.keras.utils import data_utils
from tensorflow.python.keras.utils import layer_utils
from tensorflow.python.lib.io import file_io
from tensorflow.python.util.tf_export import keras_export
BASE_WEIGHTS_PATH = 'https://storage.googleapis.com/keras-applications/'
WEIGHTS_HASHES = {
'b0': ('902e53a9f72be733fc0bcb005b3ebbac',
'50bc09e76180e00e4465e1a485ddc09d'),
'b1': ('1d254153d4ab51201f1646940f018540',
'74c4e6b3e1f6a1eea24c589628592432'),
'b2': ('b15cce36ff4dcbd00b6dd88e7857a6ad',
'111f8e2ac8aa800a7a99e3239f7bfb39'),
'b3': ('ffd1fdc53d0ce67064dc6a9c7960ede0',
'af6d107764bb5b1abb91932881670226'),
'b4': ('18c95ad55216b8f92d7e70b3a046e2fc',
'ebc24e6d6c33eaebbd558eafbeedf1ba'),
'b5': ('ace28f2a6363774853a83a0b21b9421a',
'38879255a25d3c92d5e44e04ae6cec6f'),
'b6': ('165f6e37dce68623721b423839de8be5',
'9ecce42647a20130c1f39a5d4cb75743'),
'b7': ('8c03f828fec3ef71311cd463b6759d99',
'cbcfe4450ddf6f3ad90b1b398090fe4a'),
}
DEFAULT_BLOCKS_ARGS = [{
'kernel_size': 3,
'repeats': 1,
'filters_in': 32,
'filters_out': 16,
'expand_ratio': 1,
'id_skip': True,
'strides': 1,
'se_ratio': 0.25
}, {
'kernel_size': 3,
'repeats': 2,
'filters_in': 16,
'filters_out': 24,
'expand_ratio': 6,
'id_skip': True,
'strides': 2,
'se_ratio': 0.25
}, {
'kernel_size': 5,
'repeats': 2,
'filters_in': 24,
'filters_out': 40,
'expand_ratio': 6,
'id_skip': True,
'strides': 2,
'se_ratio': 0.25
}, {
'kernel_size': 3,
'repeats': 3,
'filters_in': 40,
'filters_out': 80,
'expand_ratio': 6,
'id_skip': True,
'strides': 2,
'se_ratio': 0.25
}, {
'kernel_size': 5,
'repeats': 3,
'filters_in': 80,
'filters_out': 112,
'expand_ratio': 6,
'id_skip': True,
'strides': 1,
'se_ratio': 0.25
}, {
'kernel_size': 5,
'repeats': 4,
'filters_in': 112,
'filters_out': 192,
'expand_ratio': 6,
'id_skip': True,
'strides': 2,
'se_ratio': 0.25
}, {
'kernel_size': 3,
'repeats': 1,
'filters_in': 192,
'filters_out': 320,
'expand_ratio': 6,
'id_skip': True,
'strides': 1,
'se_ratio': 0.25
}]
CONV_KERNEL_INITIALIZER = {
'class_name': 'VarianceScaling',
'config': {
'scale': 2.0,
'mode': 'fan_out',
'distribution': 'truncated_normal'
}
}
DENSE_KERNEL_INITIALIZER = {
'class_name': 'VarianceScaling',
'config': {
'scale': 1. / 3.,
'mode': 'fan_out',
'distribution': 'uniform'
}
}
layers = VersionAwareLayers()
BASE_DOCSTRING = """Instantiates the {name} architecture.
Reference:
- [EfficientNet: Rethinking Model Scaling for Convolutional Neural Networks](
https://arxiv.org/abs/1905.11946) (ICML 2019)
Optionally loads weights pre-trained on ImageNet.
Note that the data format convention used by the model is
the one specified in your Keras config at `~/.keras/keras.json`.
If you have never configured it, it defaults to `"channels_last"`.
Arguments:
include_top: Whether to include the fully-connected
layer at the top of the network. Defaults to True.
weights: One of `None` (random initialization),
'imagenet' (pre-training on ImageNet),
or the path to the weights file to be loaded. Defaults to 'imagenet'.
input_tensor: Optional Keras tensor
(i.e. output of `layers.Input()`)
to use as image input for the model.
input_shape: Optional shape tuple, only to be specified
if `include_top` is False.
It should have exactly 3 inputs channels.
pooling: Optional pooling mode for feature extraction
when `include_top` is `False`. Defaults to None.
- `None` means that the output of the model will be
the 4D tensor output of the
last convolutional layer.
- `avg` means that global average pooling
will be applied to the output of the
last convolutional layer, and thus
the output of the model will be a 2D tensor.
- `max` means that global max pooling will
be applied.
classes: Optional number of classes to classify images
into, only to be specified if `include_top` is True, and
if no `weights` argument is specified. Defaults to 1000 (number of
ImageNet classes).
classifier_activation: A `str` or callable. The activation function to use
on the "top" layer. Ignored unless `include_top=True`. Set
`classifier_activation=None` to return the logits of the "top" layer.
Defaults to 'softmax'.
Returns:
A `keras.Model` instance.
"""
def EfficientNet(
width_coefficient,
depth_coefficient,
default_size,
dropout_rate=0.2,
drop_connect_rate=0.2,
depth_divisor=8,
activation='swish',
blocks_args='default',
model_name='efficientnet',
include_top=True,
weights='imagenet',
input_tensor=None,
input_shape=None,
pooling=None,
classes=1000,
classifier_activation='softmax'):
"""Instantiates the EfficientNet architecture using given scaling coefficients.
Reference:
- [EfficientNet: Rethinking Model Scaling for Convolutional Neural Networks](
https://arxiv.org/abs/1905.11946) (ICML 2019)
Optionally loads weights pre-trained on ImageNet.
Note that the data format convention used by the model is
the one specified in your Keras config at `~/.keras/keras.json`.
Arguments:
width_coefficient: float, scaling coefficient for network width.
depth_coefficient: float, scaling coefficient for network depth.
default_size: integer, default input image size.
dropout_rate: float, dropout rate before final classifier layer.
drop_connect_rate: float, dropout rate at skip connections.
depth_divisor: integer, a unit of network width.
activation: activation function.
blocks_args: list of dicts, parameters to construct block modules.
model_name: string, model name.
include_top: whether to include the fully-connected
layer at the top of the network.
weights: one of `None` (random initialization),
'imagenet' (pre-training on ImageNet),
or the path to the weights file to be loaded.
input_tensor: optional Keras tensor
(i.e. output of `layers.Input()`)
to use as image input for the model.
input_shape: optional shape tuple, only to be specified
if `include_top` is False.
It should have exactly 3 inputs channels.
pooling: optional pooling mode for feature extraction
when `include_top` is `False`.
- `None` means that the output of the model will be
the 4D tensor output of the
last convolutional layer.
- `avg` means that global average pooling
will be applied to the output of the
last convolutional layer, and thus
the output of the model will be a 2D tensor.
- `max` means that global max pooling will
be applied.
classes: optional number of classes to classify images
into, only to be specified if `include_top` is True, and
if no `weights` argument is specified.
classifier_activation: A `str` or callable. The activation function to use
on the "top" layer. Ignored unless `include_top=True`. Set
`classifier_activation=None` to return the logits of the "top" layer.
Returns:
A `keras.Model` instance.
Raises:
ValueError: in case of invalid argument for `weights`,
or invalid input shape.
ValueError: if `classifier_activation` is not `softmax` or `None` when
using a pretrained top layer.
"""
if blocks_args == 'default':
blocks_args = DEFAULT_BLOCKS_ARGS
if not (weights in {'imagenet', None} or file_io.file_exists_v2(weights)):
raise ValueError('The `weights` argument should be either '
'`None` (random initialization), `imagenet` '
'(pre-training on ImageNet), '
'or the path to the weights file to be loaded.')
if weights == 'imagenet' and include_top and classes != 1000:
raise ValueError('If using `weights` as `"imagenet"` with `include_top`'
' as true, `classes` should be 1000')
# Determine proper input shape
input_shape = imagenet_utils.obtain_input_shape(
input_shape,
default_size=default_size,
min_size=32,
data_format=backend.image_data_format(),
require_flatten=include_top,
weights=weights)
if input_tensor is None:
img_input = layers.Input(shape=input_shape)
else:
if not backend.is_keras_tensor(input_tensor):
img_input = layers.Input(tensor=input_tensor, shape=input_shape)
else:
img_input = input_tensor
bn_axis = 3 if backend.image_data_format() == 'channels_last' else 1
def round_filters(filters, divisor=depth_divisor):
"""Round number of filters based on depth multiplier."""
filters *= width_coefficient
new_filters = max(divisor, int(filters + divisor / 2) // divisor * divisor)
# Make sure that round down does not go down by more than 10%.
if new_filters < 0.9 * filters:
new_filters += divisor
return int(new_filters)
def round_repeats(repeats):
"""Round number of repeats based on depth multiplier."""
return int(math.ceil(depth_coefficient * repeats))
# Build stem
x = img_input
x = layers.Rescaling(1. / 255.)(x)
x = layers.Normalization(axis=bn_axis)(x)
x = layers.ZeroPadding2D(
padding=imagenet_utils.correct_pad(x, 3),
name='stem_conv_pad')(x)
x = layers.Conv2D(
round_filters(32),
3,
strides=2,
padding='valid',
use_bias=False,
kernel_initializer=CONV_KERNEL_INITIALIZER,
name='stem_conv')(x)
x = layers.BatchNormalization(axis=bn_axis, name='stem_bn')(x)
x = layers.Activation(activation, name='stem_activation')(x)
# Build blocks
blocks_args = copy.deepcopy(blocks_args)
b = 0
blocks = float(sum(round_repeats(args['repeats']) for args in blocks_args))
for (i, args) in enumerate(blocks_args):
assert args['repeats'] > 0
# Update block input and output filters based on depth multiplier.
args['filters_in'] = round_filters(args['filters_in'])
args['filters_out'] = round_filters(args['filters_out'])
for j in range(round_repeats(args.pop('repeats'))):
# The first block needs to take care of stride and filter size increase.
if j > 0:
args['strides'] = 1
args['filters_in'] = args['filters_out']
x = block(
x,
activation,
drop_connect_rate * b / blocks,
name='block{}{}_'.format(i + 1, chr(j + 97)),
**args)
b += 1
# Build top
x = layers.Conv2D(
round_filters(1280),
1,
padding='same',
use_bias=False,
kernel_initializer=CONV_KERNEL_INITIALIZER,
name='top_conv')(x)
x = layers.BatchNormalization(axis=bn_axis, name='top_bn')(x)
x = layers.Activation(activation, name='top_activation')(x)
if include_top:
x = layers.GlobalAveragePooling2D(name='avg_pool')(x)
if dropout_rate > 0:
x = layers.Dropout(dropout_rate, name='top_dropout')(x)
imagenet_utils.validate_activation(classifier_activation, weights)
x = layers.Dense(
classes,
activation=classifier_activation,
kernel_initializer=DENSE_KERNEL_INITIALIZER,
name='predictions')(x)
else:
if pooling == 'avg':
x = layers.GlobalAveragePooling2D(name='avg_pool')(x)
elif pooling == 'max':
x = layers.GlobalMaxPooling2D(name='max_pool')(x)
# Ensure that the model takes into account
# any potential predecessors of `input_tensor`.
if input_tensor is not None:
inputs = layer_utils.get_source_inputs(input_tensor)
else:
inputs = img_input
# Create model.
model = training.Model(inputs, x, name=model_name)
# Load weights.
if weights == 'imagenet':
if include_top:
file_suffix = '.h5'
file_hash = WEIGHTS_HASHES[model_name[-2:]][0]
else:
file_suffix = '_notop.h5'
file_hash = WEIGHTS_HASHES[model_name[-2:]][1]
file_name = model_name + file_suffix
weights_path = data_utils.get_file(
file_name,
BASE_WEIGHTS_PATH + file_name,
cache_subdir='models',
file_hash=file_hash)
model.load_weights(weights_path)
elif weights is not None:
model.load_weights(weights)
return model
def block(inputs,
activation='swish',
drop_rate=0.,
name='',
filters_in=32,
filters_out=16,
kernel_size=3,
strides=1,
expand_ratio=1,
se_ratio=0.,
id_skip=True):
"""An inverted residual block.
Arguments:
inputs: input tensor.
activation: activation function.
drop_rate: float between 0 and 1, fraction of the input units to drop.
name: string, block label.
filters_in: integer, the number of input filters.
filters_out: integer, the number of output filters.
kernel_size: integer, the dimension of the convolution window.
strides: integer, the stride of the convolution.
expand_ratio: integer, scaling coefficient for the input filters.
se_ratio: float between 0 and 1, fraction to squeeze the input filters.
id_skip: boolean.
Returns:
output tensor for the block.
"""
bn_axis = 3 if backend.image_data_format() == 'channels_last' else 1
# Expansion phase
filters = filters_in * expand_ratio
if expand_ratio != 1:
x = layers.Conv2D(
filters,
1,
padding='same',
use_bias=False,
kernel_initializer=CONV_KERNEL_INITIALIZER,
name=name + 'expand_conv')(
inputs)
x = layers.BatchNormalization(axis=bn_axis, name=name + 'expand_bn')(x)
x = layers.Activation(activation, name=name + 'expand_activation')(x)
else:
x = inputs
# Depthwise Convolution
if strides == 2:
x = layers.ZeroPadding2D(
padding=imagenet_utils.correct_pad(x, kernel_size),
name=name + 'dwconv_pad')(x)
conv_pad = 'valid'
else:
conv_pad = 'same'
x = layers.DepthwiseConv2D(
kernel_size,
strides=strides,
padding=conv_pad,
use_bias=False,
depthwise_initializer=CONV_KERNEL_INITIALIZER,
name=name + 'dwconv')(x)
x = layers.BatchNormalization(axis=bn_axis, name=name + 'bn')(x)
x = layers.Activation(activation, name=name + 'activation')(x)
# Squeeze and Excitation phase
if 0 < se_ratio <= 1:
filters_se = max(1, int(filters_in * se_ratio))
se = layers.GlobalAveragePooling2D(name=name + 'se_squeeze')(x)
se = layers.Reshape((1, 1, filters), name=name + 'se_reshape')(se)
se = layers.Conv2D(
filters_se,
1,
padding='same',
activation=activation,
kernel_initializer=CONV_KERNEL_INITIALIZER,
name=name + 'se_reduce')(
se)
se = layers.Conv2D(
filters,
1,
padding='same',
activation='sigmoid',
kernel_initializer=CONV_KERNEL_INITIALIZER,
name=name + 'se_expand')(se)
x = layers.multiply([x, se], name=name + 'se_excite')
# Output phase
x = layers.Conv2D(
filters_out,
1,
padding='same',
use_bias=False,
kernel_initializer=CONV_KERNEL_INITIALIZER,
name=name + 'project_conv')(x)
x = layers.BatchNormalization(axis=bn_axis, name=name + 'project_bn')(x)
if id_skip and strides == 1 and filters_in == filters_out:
if drop_rate > 0:
x = layers.Dropout(
drop_rate, noise_shape=(None, 1, 1, 1), name=name + 'drop')(x)
x = layers.add([x, inputs], name=name + 'add')
return x
@keras_export('keras.applications.efficientnet.EfficientNetB0',
'keras.applications.EfficientNetB0')
def EfficientNetB0(include_top=True,
weights='imagenet',
input_tensor=None,
input_shape=None,
pooling=None,
classes=1000,
classifier_activation='softmax',
**kwargs):
return EfficientNet(
1.0,
1.0,
224,
0.2,
model_name='efficientnetb0',
include_top=include_top,
weights=weights,
input_tensor=input_tensor,
input_shape=input_shape,
pooling=pooling,
classes=classes,
classifier_activation=classifier_activation,
**kwargs)
@keras_export('keras.applications.efficientnet.EfficientNetB1',
'keras.applications.EfficientNetB1')
def EfficientNetB1(include_top=True,
weights='imagenet',
input_tensor=None,
input_shape=None,
pooling=None,
classes=1000,
classifier_activation='softmax',
**kwargs):
return EfficientNet(
1.0,
1.1,
240,
0.2,
model_name='efficientnetb1',
include_top=include_top,
weights=weights,
input_tensor=input_tensor,
input_shape=input_shape,
pooling=pooling,
classes=classes,
classifier_activation=classifier_activation,
**kwargs)
@keras_export('keras.applications.efficientnet.EfficientNetB2',
'keras.applications.EfficientNetB2')
def EfficientNetB2(include_top=True,
weights='imagenet',
input_tensor=None,
input_shape=None,
pooling=None,
classes=1000,
classifier_activation='softmax',
**kwargs):
return EfficientNet(
1.1,
1.2,
260,
0.3,
model_name='efficientnetb2',
include_top=include_top,
weights=weights,
input_tensor=input_tensor,
input_shape=input_shape,
pooling=pooling,
classes=classes,
classifier_activation=classifier_activation,
**kwargs)
@keras_export('keras.applications.efficientnet.EfficientNetB3',
'keras.applications.EfficientNetB3')
def EfficientNetB3(include_top=True,
weights='imagenet',
input_tensor=None,
input_shape=None,
pooling=None,
classes=1000,
classifier_activation='softmax',
**kwargs):
return EfficientNet(
1.2,
1.4,
300,
0.3,
model_name='efficientnetb3',
include_top=include_top,
weights=weights,
input_tensor=input_tensor,
input_shape=input_shape,
pooling=pooling,
classes=classes,
classifier_activation=classifier_activation,
**kwargs)
@keras_export('keras.applications.efficientnet.EfficientNetB4',
'keras.applications.EfficientNetB4')
def EfficientNetB4(include_top=True,
weights='imagenet',
input_tensor=None,
input_shape=None,
pooling=None,
classes=1000,
classifier_activation='softmax',
**kwargs):
return EfficientNet(
1.4,
1.8,
380,
0.4,
model_name='efficientnetb4',
include_top=include_top,
weights=weights,
input_tensor=input_tensor,
input_shape=input_shape,
pooling=pooling,
classes=classes,
classifier_activation=classifier_activation,
**kwargs)
@keras_export('keras.applications.efficientnet.EfficientNetB5',
'keras.applications.EfficientNetB5')
def EfficientNetB5(include_top=True,
weights='imagenet',
input_tensor=None,
input_shape=None,
pooling=None,
classes=1000,
classifier_activation='softmax',
**kwargs):
return EfficientNet(
1.6,
2.2,
456,
0.4,
model_name='efficientnetb5',
include_top=include_top,
weights=weights,
input_tensor=input_tensor,
input_shape=input_shape,
pooling=pooling,
classes=classes,
classifier_activation=classifier_activation,
**kwargs)
@keras_export('keras.applications.efficientnet.EfficientNetB6',
'keras.applications.EfficientNetB6')
def EfficientNetB6(include_top=True,
weights='imagenet',
input_tensor=None,
input_shape=None,
pooling=None,
classes=1000,
classifier_activation='softmax',
**kwargs):
return EfficientNet(
1.8,
2.6,
528,
0.5,
model_name='efficientnetb6',
include_top=include_top,
weights=weights,
input_tensor=input_tensor,
input_shape=input_shape,
pooling=pooling,
classes=classes,
classifier_activation=classifier_activation,
**kwargs)
@keras_export('keras.applications.efficientnet.EfficientNetB7',
'keras.applications.EfficientNetB7')
def EfficientNetB7(include_top=True,
weights='imagenet',
input_tensor=None,
input_shape=None,
pooling=None,
classes=1000,
classifier_activation='softmax',
**kwargs):
return EfficientNet(
2.0,
3.1,
600,
0.5,
model_name='efficientnetb7',
include_top=include_top,
weights=weights,
input_tensor=input_tensor,
input_shape=input_shape,
pooling=pooling,
classes=classes,
classifier_activation=classifier_activation,
**kwargs)
EfficientNetB0.__doc__ = BASE_DOCSTRING.format(name='EfficientNetB0')
EfficientNetB1.__doc__ = BASE_DOCSTRING.format(name='EfficientNetB1')
EfficientNetB2.__doc__ = BASE_DOCSTRING.format(name='EfficientNetB2')
EfficientNetB3.__doc__ = BASE_DOCSTRING.format(name='EfficientNetB3')
EfficientNetB4.__doc__ = BASE_DOCSTRING.format(name='EfficientNetB4')
EfficientNetB5.__doc__ = BASE_DOCSTRING.format(name='EfficientNetB5')
EfficientNetB6.__doc__ = BASE_DOCSTRING.format(name='EfficientNetB6')
EfficientNetB7.__doc__ = BASE_DOCSTRING.format(name='EfficientNetB7')
@keras_export('keras.applications.efficientnet.preprocess_input')
def preprocess_input(x, data_format=None): # pylint: disable=unused-argument
return x
@keras_export('keras.applications.efficientnet.decode_predictions')
def decode_predictions(preds, top=5):
return imagenet_utils.decode_predictions(preds, top=top)
decode_predictions.__doc__ = imagenet_utils.decode_predictions.__doc__
| karllessard/tensorflow | tensorflow/python/keras/applications/efficientnet.py | Python | apache-2.0 | 24,166 |
# Copyright 2017 Battelle Energy Alliance, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import numpy
def run(self, Input):
number_of_steps = 16
self.time = numpy.zeros(number_of_steps)
uniform = Input["uniform"]
self.out = numpy.zeros(number_of_steps)
for i in range(len(self.time)):
self.time[i] = 0.25*i
time = self.time[i]
self.out[i] = math.sin(time+uniform)
| idaholab/raven | tests/framework/fail_xml/calc.py | Python | apache-2.0 | 899 |
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
from django.conf import settings
from django.template import Context # noqa
from django.template import Template # noqa
from django.utils.text import normalize_newlines # noqa
from horizon.test import helpers as test
from horizon.test.test_dashboards.cats.dashboard import Cats # noqa
from horizon.test.test_dashboards.cats.kittens.panel import Kittens # noqa
from horizon.test.test_dashboards.dogs.dashboard import Dogs # noqa
from horizon.test.test_dashboards.dogs.puppies.panel import Puppies # noqa
def single_line(text):
"""Quick utility to make comparing template output easier."""
return re.sub(' +',
' ',
normalize_newlines(text).replace('\n', '')).strip()
class TemplateTagTests(test.TestCase):
"""Test Custom Template Tag."""
def render_template_tag(self, tag_name, tag_require=''):
tag_call = "{%% %s %%}" % tag_name
return self.render_template(tag_call, tag_require)
def render_template(self, template_text, tag_require='', context={}):
"""Render a Custom Template to string."""
template = Template("{%% load %s %%} %s"
% (tag_require, template_text))
return template.render(Context(context))
def test_site_branding_tag(self):
"""Test if site_branding tag renders the correct setting."""
rendered_str = self.render_template_tag("site_branding", "branding")
self.assertEqual(settings.SITE_BRANDING, rendered_str.strip(),
"tag site_branding renders %s" % rendered_str.strip())
def test_size_format_filters(self):
size_str = ('5|diskgbformat', '10|diskgbformat',
'5555|mb_float_format', '80|mb_float_format',
'.5|mbformat', '0.005|mbformat', '0.0005|mbformat')
expected = u' 5GB 10GB 5.4GB 80MB 512KB 5KB 524Bytes '
text = ''
for size_filter in size_str:
text += '{{' + size_filter + '}} '
rendered_str = self.render_template(tag_require='sizeformat',
template_text=text)
self.assertEqual(expected, rendered_str)
def test_size_format_filters_with_string(self):
size_str = ('"test"|diskgbformat', '"limit"|mb_float_format',
'"no limit"|mbformat')
expected = u' test limit no limit '
text = ''
for size_filter in size_str:
text += '{{' + size_filter + '}} '
rendered_str = self.render_template(tag_require='sizeformat',
template_text=text)
self.assertEqual(expected, rendered_str)
def test_truncate_filter(self):
ctx_string = {'val1': 'he',
'val2': 'hellotrunc',
'val3': 'four'}
text = ('{{test.val1|truncate:1}}#{{test.val2|truncate:4}}#'
'{{test.val3|truncate:10}}')
expected = u' h#h...#four'
rendered_str = self.render_template(tag_require='truncate_filter',
template_text=text,
context={'test': ctx_string})
self.assertEqual(expected, rendered_str)
def test_quota_filter(self):
ctx_string = {'val1': 100,
'val2': 1000,
'val3': float('inf')}
text = ('{{test.val1|quota:"TB"}}#{{test.val2|quota}}#'
'{{test.val3|quota}}')
expected = u' 100 TB Available#1000 Available#No Limit'
rendered_str = self.render_template(tag_require='horizon',
template_text=text,
context={'test': ctx_string})
self.assertEqual(expected, rendered_str)
def test_horizon_main_nav(self):
text = "{% horizon_main_nav %}"
expected = """
<div class='clearfix'>
<ul class=\"nav nav-tabs\">
<li>
<a href=\"/cats/\" tabindex='1'>Cats</a>
</li>
<li>
<a href=\"/dogs/\" tabindex='1'>Dogs</a>
</li>
</ul></div>"""
rendered_str = self.render_template(tag_require='horizon',
template_text=text,
context={'request': self.request})
self.assertEqual(single_line(rendered_str), single_line(expected))
| ging/horizon | horizon/test/tests/templatetags.py | Python | apache-2.0 | 5,342 |
"""Platform for retrieving meteorological data from Environment Canada."""
import datetime
import re
from env_canada import ECData # pylint: disable=import-error
import voluptuous as vol
from homeassistant.components.weather import (
ATTR_FORECAST_CONDITION,
ATTR_FORECAST_PRECIPITATION_PROBABILITY,
ATTR_FORECAST_TEMP,
ATTR_FORECAST_TEMP_LOW,
ATTR_FORECAST_TIME,
PLATFORM_SCHEMA,
WeatherEntity,
)
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE, CONF_NAME, TEMP_CELSIUS
import homeassistant.helpers.config_validation as cv
import homeassistant.util.dt as dt
CONF_FORECAST = "forecast"
CONF_ATTRIBUTION = "Data provided by Environment Canada"
CONF_STATION = "station"
def validate_station(station):
"""Check that the station ID is well-formed."""
if station is None:
return
if not re.fullmatch(r"[A-Z]{2}/s0000\d{3}", station):
raise vol.error.Invalid('Station ID must be of the form "XX/s0000###"')
return station
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_STATION): validate_station,
vol.Inclusive(CONF_LATITUDE, "latlon"): cv.latitude,
vol.Inclusive(CONF_LONGITUDE, "latlon"): cv.longitude,
vol.Optional(CONF_FORECAST, default="daily"): vol.In(["daily", "hourly"]),
}
)
# Icon codes from http://dd.weatheroffice.ec.gc.ca/citypage_weather/
# docs/current_conditions_icon_code_descriptions_e.csv
ICON_CONDITION_MAP = {
"sunny": [0, 1],
"clear-night": [30, 31],
"partlycloudy": [2, 3, 4, 5, 22, 32, 33, 34, 35],
"cloudy": [10],
"rainy": [6, 9, 11, 12, 28, 36],
"lightning-rainy": [19, 39, 46, 47],
"pouring": [13],
"snowy-rainy": [7, 14, 15, 27, 37],
"snowy": [8, 16, 17, 18, 25, 26, 38, 40],
"windy": [43],
"fog": [20, 21, 23, 24, 44],
"hail": [26, 27],
}
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the Environment Canada weather."""
if config.get(CONF_STATION):
ec_data = ECData(station_id=config[CONF_STATION])
else:
lat = config.get(CONF_LATITUDE, hass.config.latitude)
lon = config.get(CONF_LONGITUDE, hass.config.longitude)
ec_data = ECData(coordinates=(lat, lon))
add_devices([ECWeather(ec_data, config)])
class ECWeather(WeatherEntity):
"""Representation of a weather condition."""
def __init__(self, ec_data, config):
"""Initialize Environment Canada weather."""
self.ec_data = ec_data
self.platform_name = config.get(CONF_NAME)
self.forecast_type = config[CONF_FORECAST]
@property
def attribution(self):
"""Return the attribution."""
return CONF_ATTRIBUTION
@property
def name(self):
"""Return the name of the weather entity."""
if self.platform_name:
return self.platform_name
return self.ec_data.metadata.get("location")
@property
def temperature(self):
"""Return the temperature."""
if self.ec_data.conditions.get("temperature", {}).get("value"):
return float(self.ec_data.conditions["temperature"]["value"])
if self.ec_data.hourly_forecasts[0].get("temperature"):
return float(self.ec_data.hourly_forecasts[0]["temperature"])
return None
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return TEMP_CELSIUS
@property
def humidity(self):
"""Return the humidity."""
if self.ec_data.conditions.get("humidity", {}).get("value"):
return float(self.ec_data.conditions["humidity"]["value"])
return None
@property
def wind_speed(self):
"""Return the wind speed."""
if self.ec_data.conditions.get("wind_speed", {}).get("value"):
return float(self.ec_data.conditions["wind_speed"]["value"])
return None
@property
def wind_bearing(self):
"""Return the wind bearing."""
if self.ec_data.conditions.get("wind_bearing", {}).get("value"):
return float(self.ec_data.conditions["wind_bearing"]["value"])
return None
@property
def pressure(self):
"""Return the pressure."""
if self.ec_data.conditions.get("pressure", {}).get("value"):
return 10 * float(self.ec_data.conditions["pressure"]["value"])
return None
@property
def visibility(self):
"""Return the visibility."""
if self.ec_data.conditions.get("visibility", {}).get("value"):
return float(self.ec_data.conditions["visibility"]["value"])
return None
@property
def condition(self):
"""Return the weather condition."""
icon_code = None
if self.ec_data.conditions.get("icon_code", {}).get("value"):
icon_code = self.ec_data.conditions["icon_code"]["value"]
elif self.ec_data.hourly_forecasts[0].get("icon_code"):
icon_code = self.ec_data.hourly_forecasts[0]["icon_code"]
if icon_code:
return icon_code_to_condition(int(icon_code))
return ""
@property
def forecast(self):
"""Return the forecast array."""
return get_forecast(self.ec_data, self.forecast_type)
def update(self):
"""Get the latest data from Environment Canada."""
self.ec_data.update()
def get_forecast(ec_data, forecast_type):
"""Build the forecast array."""
forecast_array = []
if forecast_type == "daily":
half_days = ec_data.daily_forecasts
if half_days[0]["temperature_class"] == "high":
forecast_array.append(
{
ATTR_FORECAST_TIME: dt.now().isoformat(),
ATTR_FORECAST_TEMP: int(half_days[0]["temperature"]),
ATTR_FORECAST_TEMP_LOW: int(half_days[1]["temperature"]),
ATTR_FORECAST_CONDITION: icon_code_to_condition(
int(half_days[0]["icon_code"])
),
ATTR_FORECAST_PRECIPITATION_PROBABILITY: int(
half_days[0]["precip_probability"]
),
}
)
half_days = half_days[2:]
else:
half_days = half_days[1:]
for day, high, low in zip(range(1, 6), range(0, 9, 2), range(1, 10, 2)):
forecast_array.append(
{
ATTR_FORECAST_TIME: (
dt.now() + datetime.timedelta(days=day)
).isoformat(),
ATTR_FORECAST_TEMP: int(half_days[high]["temperature"]),
ATTR_FORECAST_TEMP_LOW: int(half_days[low]["temperature"]),
ATTR_FORECAST_CONDITION: icon_code_to_condition(
int(half_days[high]["icon_code"])
),
ATTR_FORECAST_PRECIPITATION_PROBABILITY: int(
half_days[high]["precip_probability"]
),
}
)
elif forecast_type == "hourly":
hours = ec_data.hourly_forecasts
for hour in range(0, 24):
forecast_array.append(
{
ATTR_FORECAST_TIME: dt.as_local(
datetime.datetime.strptime(hours[hour]["period"], "%Y%m%d%H%M")
).isoformat(),
ATTR_FORECAST_TEMP: int(hours[hour]["temperature"]),
ATTR_FORECAST_CONDITION: icon_code_to_condition(
int(hours[hour]["icon_code"])
),
ATTR_FORECAST_PRECIPITATION_PROBABILITY: int(
hours[hour]["precip_probability"]
),
}
)
return forecast_array
def icon_code_to_condition(icon_code):
"""Return the condition corresponding to an icon code."""
for condition, codes in ICON_CONDITION_MAP.items():
if icon_code in codes:
return condition
return None
| sdague/home-assistant | homeassistant/components/environment_canada/weather.py | Python | apache-2.0 | 8,104 |
import json
import logging
import inspect
from .decorators import pipeline_functions, register_pipeline
from indra.statements import get_statement_by_name, Statement
logger = logging.getLogger(__name__)
class AssemblyPipeline():
"""An assembly pipeline that runs the specified steps on a given set of
statements.
Ways to initialize and run the pipeline (examples assume you have a list
of INDRA Statements stored in the `stmts` variable.)
>>> from indra.statements import *
>>> map2k1 = Agent('MAP2K1', db_refs={'HGNC': '6840'})
>>> mapk1 = Agent('MAPK1', db_refs={'HGNC': '6871'})
>>> braf = Agent('BRAF')
>>> stmts = [Phosphorylation(map2k1, mapk1, 'T', '185'),
... Phosphorylation(braf, map2k1)]
1) Provide a JSON file containing the steps, then use the classmethod
`from_json_file`, and run it with the `run` method on a list of statements.
This option allows storing pipeline versions in a separate file and
reproducing the same results. All functions referenced in the JSON file
have to be registered with the @register_pipeline decorator.
>>> import os
>>> path_this = os.path.dirname(os.path.abspath(__file__))
>>> filename = os.path.abspath(
... os.path.join(path_this, '..', 'tests', 'pipeline_test.json'))
>>> ap = AssemblyPipeline.from_json_file(filename)
>>> assembled_stmts = ap.run(stmts)
2) Initialize a pipeline with a list of steps and run it with the `run`
method on a list of statements. All functions referenced in steps have to
be registered with the @register_pipeline decorator.
>>> steps = [
... {"function": "filter_no_hypothesis"},
... {"function": "filter_grounded_only",
... "kwargs": {"score_threshold": 0.8}}
... ]
>>> ap = AssemblyPipeline(steps)
>>> assembled_stmts = ap.run(stmts)
3) Initialize an empty pipeline and append/insert the steps one by one.
Provide a function and its args and kwargs. For arguments that
require calling a different function, use the RunnableArgument class. All
functions referenced here have to be either imported and passed as function
objects or registered with the @register_pipeline decorator and passed as
function names (strings). The pipeline built this way can be optionally
saved into a JSON file. (Note that this example requires indra_world
to be installed.)
>>> from indra.tools.assemble_corpus import *
>>> from indra_world.ontology import load_world_ontology
>>> from indra_world.belief import get_eidos_scorer
>>> ap = AssemblyPipeline()
>>> ap.append(filter_no_hypothesis)
>>> ap.append(filter_grounded_only)
>>> ap.append(run_preassembly,
... belief_scorer=RunnableArgument(get_eidos_scorer),
... ontology=RunnableArgument(load_world_ontology))
>>> assembled_stmts = ap.run(stmts)
>>> ap.to_json_file('filename.json')
Parameters
----------
steps : list[dict]
A list of dictionaries representing steps in the pipeline. Each step
should have a 'function' key and, if appropriate, 'args' and 'kwargs'
keys. Arguments can be simple values (strings, integers, booleans,
lists, etc.) or can be functions themselves. In case an argument is a
function or a result of another function, it should also be
represented as a dictionary of a similar structure. If a function
itself is an argument (and not its result), the dictionary should
contain a key-value pair {'no_run': True}. If an argument is a type
of a statement, it should be represented as a dictionary {'stmt_type':
<name of a statement type>}.
"""
def __init__(self, steps=None):
# This import is here to avoid circular imports
# It is enough to import one function to get all registered functions
from indra.tools.assemble_corpus import filter_grounded_only
from indra.ontology.bio import bio_ontology
from indra.preassembler.grounding_mapper.gilda import ground_statements
from indra.preassembler.custom_preassembly import agent_grounding_matches
self.steps = steps if steps else []
@classmethod
def from_json_file(cls, filename):
"""Create an instance of AssemblyPipeline from a JSON file with
steps."""
with open(filename, 'r') as f:
steps = json.load(f)
ap = AssemblyPipeline(steps)
return ap
def to_json_file(self, filename):
"""Save AssemblyPipeline to a JSON file."""
with open(filename, 'w') as f:
json.dump(self.steps, f, indent=1)
def run(self, statements, **kwargs):
"""Run all steps of the pipeline.
Parameters
----------
statements : list[indra.statements.Statement]
A list of INDRA Statements to run the pipeline on.
**kwargs : kwargs
It is recommended to define all arguments for the steps functions
in the steps definition, but it is also possible to provide some
external objects (if it is not possible to provide them as a step
argument) as kwargs to the entire pipeline here. One should be
cautious to avoid kwargs name clashes between multiple functions
(this value will be provided to all functions that expect an
argument with the same name). To overwrite this value in other
functions, provide it explicitly in the corresponding steps kwargs.
Returns
-------
list[indra.statements.Statement]
The list of INDRA Statements resulting from running the pipeline
on the list of input Statements.
"""
logger.info('Running the pipeline')
for step in self.steps:
statements = self.run_function(step, statements, **kwargs)
return statements
def append(self, func, *args, **kwargs):
"""Append a step to the end of the pipeline.
Args and kwargs here can be of any type. All functions referenced here
have to be either imported and passed as function objects or
registered with @register_pipeline decorator and passed as function
names (strings). For arguments that require calling a different
function, use RunnableArgument class.
Parameters
----------
func : str or function
A function or the string name of a function to add to the pipeline.
args : args
Args that are passed to func when calling it.
kwargs : kwargs
Kwargs that are passed to func when calling it.
"""
if inspect.isfunction(func):
func_name = func.__name__
if func_name not in pipeline_functions:
register_pipeline(func)
elif isinstance(func, str):
func_name = func
else:
raise TypeError('Should be a function object or a string')
new_step = self.create_new_step(func_name, *args, **kwargs)
self.steps.append(new_step)
def insert(self, ix, func, *args, **kwargs):
"""Insert a step to any position in the pipeline.
Args and kwargs here can be of any type. All functions referenced here
have to be either imported and passed as function objects or
registered with @register_pipeline decorator and passed as function
names (strings). For arguments that require calling a different
function, use RunnableArgument class.
Parameters
----------
func : str or function
A function or the string name of a function to add to the pipeline.
args : args
Args that are passed to func when calling it.
kwargs : kwargs
Kwargs that are passed to func when calling it.
"""
if inspect.isfunction(func):
func_name = func.__name__
if func_name not in pipeline_functions:
register_pipeline(func)
elif isinstance(func, str):
func_name = func
else:
raise TypeError('Should be a function object or a string')
new_step = self.create_new_step(func_name, *args, **kwargs)
self.steps.insert(ix, new_step)
def create_new_step(self, func_name, *args, **kwargs):
"""Create a dictionary representing a new step in the pipeline.
Parameters
----------
func_name : str
The string name of a function to create as a step.
args : args
Args that are passed to the function when calling it.
kwargs : kwargs
Kwargs that are passed to the function when calling it.
Returns
-------
dict
A dict structure representing a step in the pipeline.
"""
assert self.get_function_from_name(func_name)
new_step = {'function': func_name}
if args:
new_step['args'] = [jsonify_arg_input(arg) for arg in args]
if kwargs:
new_step['kwargs'] = {
k: jsonify_arg_input(v) for (k, v) in kwargs.items()}
return new_step
@staticmethod
def get_function_parameters(func_dict):
"""Retrieve a function name and arguments from function dictionary.
Parameters
----------
func_dict : dict
A dict structure representing a function and its args and kwargs.
Returns
-------
tuple of str, list and dict
A tuple with the following elements: the name of the function,
the args of the function, and the kwargs of the function.
"""
func_name = func_dict['function']
args = func_dict.get('args', [])
kwargs = func_dict.get('kwargs', {})
return func_name, args, kwargs
@staticmethod
def get_function_from_name(name):
"""Return a function object by name if available or raise exception.
Parameters
----------
name : str
The name of the function.
Returns
-------
function
The function that was found based on its name. If not found,
a NotRegisteredFunctionError is raised.
"""
if name in pipeline_functions:
return pipeline_functions[name]
raise NotRegisteredFunctionError('%s is not registered' % name)
@staticmethod
def run_simple_function(func, *args, **kwargs):
"""Run a simple function and return the result.
Simple here means a function all arguments of which are simple values
(do not require extra function calls).
Parameters
----------
func : function
The function to call.
args : args
Args that are passed to the function when calling it.
kwargs : kwargs
Kwargs that are passed to the function when calling it.
Returns
-------
object
Any value that the given function returns.
"""
statements = kwargs.pop('statements', None)
if statements is not None:
return func(statements, *args, **kwargs)
return func(*args, **kwargs)
def run_function(self, func_dict, statements=None, **kwargs):
"""Run a given function and return the results.
For each of the arguments, if it requires an extra
function call, recursively call the functions until we get a simple
function.
Parameters
----------
func_dict : dict
A dict representing the function to call, its args and kwargs.
args : args
Args that are passed to the function when calling it.
kwargs : kwargs
Kwargs that are passed to the function when calling it.
Returns
-------
object
Any value that the given function returns.
"""
func_name, func_args, func_kwargs = self.get_function_parameters(
func_dict)
func = self.get_function_from_name(func_name)
logger.info('Calling %s' % func_name)
new_args = []
new_kwargs = {}
for arg in func_args:
arg_value = self.get_argument_value(arg)
new_args.append(arg_value)
for k, v in func_kwargs.items():
kwarg_value = self.get_argument_value(v)
new_kwargs[k] = kwarg_value
if statements is not None:
new_kwargs['statements'] = statements
if kwargs:
for k, v in kwargs.items():
if k not in new_kwargs and k in inspect.getargspec(func).args:
new_kwargs[k] = v
return self.run_simple_function(func, *new_args, **new_kwargs)
@staticmethod
def is_function(argument, keyword='function'):
"""Check if an argument should be converted to a specific object type,
e.g. a function or a statement type.
Parameters
----------
argument : dict or other object
The argument is a dict, its keyword entry is checked, and if it is
there, we return True, otherwise we return False.
keyword : Optional[str]
The keyword to check if it's there if the argument is a dict.
Default: function
"""
if not isinstance(argument, dict):
return False
if keyword not in argument:
return False
return True
def get_argument_value(self, arg_json):
"""Get a value of an argument from its json version."""
if self.is_function(arg_json, 'function'):
# Argument is a function
if arg_json.get('no_run', False):
value = self.get_function_from_name(arg_json['function'])
# Argument is a result of a function
else:
value = self.run_function(arg_json)
# Argument is a statement type
elif self.is_function(arg_json, 'stmt_type'):
value = get_statement_by_name(arg_json.get('stmt_type'))
# Argument is a simple value (str, int, boolean, etc.)
else:
value = arg_json
return value
def __len__(self):
return len(self.steps)
def __iter__(self):
return iter(self.steps)
class NotRegisteredFunctionError(Exception):
pass
class RunnableArgument():
"""Class representing arguments generated by calling a function.
RunnableArguments should be used as args or kwargs in AssemblyPipeline
`append` and `insert` methods.
Parameters
----------
func : str or function
A function or a name of a function to be called to generate argument
value.
"""
def __init__(self, func, *args, **kwargs):
if inspect.isfunction(func):
self.func_name = func.__name__
if self.func_name not in pipeline_functions:
register_pipeline(func)
elif isinstance(func, str):
self.func_name = func
else:
raise TypeError('Should be a function object or a string')
self.args = args
self.kwargs = kwargs
def to_json(self):
"""Jsonify to standard AssemblyPipeline step format."""
json_dict = {'function': self.func_name}
new_args = []
new_kwargs = {}
for arg in self.args:
new_args.append(jsonify_arg_input(arg))
for k, v in self.kwargs.items():
new_kwargs[k] = jsonify_arg_input(v)
if new_args:
json_dict['args'] = new_args
if new_kwargs:
json_dict['kwargs'] = new_kwargs
return json_dict
def jsonify_arg_input(arg):
"""Jsonify user input (in AssemblyPipeline `append` and `insert` methods)
into a standard step json."""
if isinstance(arg, RunnableArgument):
return arg.to_json()
# If a function object or name of a function is provided, we assume it
# does not have to be run (function itself is argument).
if inspect.isfunction(arg):
func_name = arg.__name__
if func_name not in pipeline_functions:
register_pipeline(arg)
return {'function': func_name, 'no_run': True}
if isinstance(arg, str) and arg in pipeline_functions:
return {'function': arg, 'no_run': True}
# For some functions Statement type has to be argument
if inspect.isclass(arg) and issubclass(arg, Statement):
return {'stmt_type': arg.__name__}
# Argument is a simple value and can be stored as provided
return arg
| johnbachman/indra | indra/pipeline/pipeline.py | Python | bsd-2-clause | 16,636 |
#!/usr/bin/env python
""" sha1Hash_test.py
Unit tests for sha1.py
"""
from crypto.hash.sha1Hash import SHA1
import unittest
import struct
assert struct.calcsize('!IIIII') == 20, '5 integers should be 20 bytes'
class SHA1_FIPS180_TestCases(unittest.TestCase):
""" SHA-1 tests from FIPS180-1 Appendix A, B and C """
def testFIPS180_1_Appendix_A(self):
""" APPENDIX A. A SAMPLE MESSAGE AND ITS MESSAGE DIGEST """
hashAlg = SHA1()
message = 'abc'
message_digest = 0xA9993E36L, 0x4706816AL, 0xBA3E2571L, 0x7850C26CL, 0x9CD0D89DL
md_string = _toBString(message_digest)
assert( hashAlg(message) == md_string ), 'FIPS180 Appendix A test Failed'
def testFIPS180_1_Appendix_B(self):
""" APPENDIX B. A SECOND SAMPLE MESSAGE AND ITS MESSAGE DIGEST """
hashAlg = SHA1()
message = 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'
message_digest = 0x84983E44L, 0x1C3BD26EL, 0xBAAE4AA1L, 0xF95129E5L, 0xE54670F1L
md_string = _toBString(message_digest)
assert( hashAlg(message) == md_string ), 'FIPS180 Appendix B test Failed'
def testFIPS180_1_Appendix_C(self):
""" APPENDIX C. A THIRD SAMPLE MESSAGE AND ITS MESSAGE DIGEST
Let the message be the binary-coded form of the ASCII string which consists
of 1,000,000 repetitions of "a". """
hashAlg = SHA1()
message = 1000000*'a'
message_digest = 0x34AA973CL, 0xD4C4DAA4L, 0xF61EEB2BL, 0xDBAD2731L, 0x6534016FL
md_string = _toBString(message_digest)
assert( hashAlg(message) == md_string ), 'FIPS180 Appendix C test Failed'
def _toBlock(binaryString):
""" Convert binary string to blocks of 5 words of uint32() """
return [uint32(word) for word in struct.unpack('!IIIII', binaryString)]
def _toBString(block):
""" Convert block (5 words of 32 bits to binary string """
return ''.join([struct.pack('!I',word) for word in block])
if __name__ == '__main__':
# Run the tests from the command line
unittest.main()
| realms-team/basestation-fw | libs/smartmeshsdk-REL-1.3.0.1/external_libs/cryptopy/crypto/hash/sha1Hash_test.py | Python | bsd-3-clause | 2,119 |
from datetime import datetime
from django.contrib.contenttypes.models import ContentType
from actstream.managers import ActionManager, stream
class MyActionManager(ActionManager):
@stream
def testfoo(self, object, time=None):
if time is None:
time = datetime.now()
return object.actor_actions.filter(timestamp__lte = time)
@stream
def testbar(self, verb):
return self.filter(verb=verb)
| WW-Digital/django-activity-stream | example_project/testapp/streams.py | Python | bsd-3-clause | 444 |
from neo.io.basefromrawio import BaseFromRaw
from neo.rawio.plexonrawio import PlexonRawIO
class PlexonIO(PlexonRawIO, BaseFromRaw):
"""
Class for reading the old data format from Plexon
acquisition system (.plx)
Note that Plexon now use a new format PL2 which is NOT
supported by this IO.
Compatible with versions 100 to 106.
Other versions have not been tested.
"""
_prefered_signal_group_mode = 'group-by-same-units'
def __init__(self, filename):
PlexonRawIO.__init__(self, filename=filename)
BaseFromRaw.__init__(self, filename)
| samuelgarcia/python-neo | neo/io/plexonio.py | Python | bsd-3-clause | 594 |
#!/usr/bin/env python
''' Copyright (c) 2013 Potential Ventures Ltd
Copyright (c) 2013 SolarFlare Communications Inc
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of Potential Ventures Ltd,
SolarFlare Communications Inc nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL POTENTIAL VENTURES LTD BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. '''
"""
Collection of Ethernet Packet generators to use for testdata generation
Most generators take the keyword argument "payload" which can be
used to control the payload contents if required. Defaults to random data.
"""
import random
from scapy.all import Ether, IP, UDP
# Supress SCAPY warning messages
import logging
logging.getLogger("scapy").setLevel(logging.ERROR)
from cocotb.decorators import public
from cocotb.generators.byte import get_bytes, random_data
_default_payload = random_data
# UDP packet generators
@public
def udp_all_sizes(max_size=1500, payload=_default_payload()):
"""UDP packets of every supported size"""
header = Ether() / IP() / UDP()
for size in range(0, max_size - len(header)):
yield header / get_bytes(size, payload)
@public
def udp_random_sizes(npackets=100, payload=_default_payload()):
"""UDP packets with random sizes"""
header = Ether() / IP() / UDP()
max_size = 1500 - len(header)
for pkt in range(npackets):
yield header / get_bytes(random.randint(0, max_size), payload)
# IPV4 generator
@public
def ipv4_small_packets(npackets=100, payload=_default_payload()):
"""Small (<100bytes payload) IPV4 packets"""
for pkt in range(npackets):
yield Ether() / IP() / get_bytes(random.randint(0, 100), payload)
| stuarthodgson/cocotb | cocotb/generators/packet.py | Python | bsd-3-clause | 2,962 |
"""Add mod versioning
Revision ID: 1d46e8d4483
Revises: 2650a2191fe
Create Date: 2014-06-10 01:29:49.567535
"""
# revision identifiers, used by Alembic.
revision = '1d46e8d4483'
down_revision = '2650a2191fe'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('mod', 'ksp_version')
op.drop_column('mod', 'keywords')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('mod', sa.Column('keywords', sa.VARCHAR(length=256), autoincrement=False, nullable=True))
op.add_column('mod', sa.Column('ksp_version', sa.VARCHAR(length=16), autoincrement=False, nullable=True))
### end Alembic commands ###
| Kerbas-ad-astra/KerbalStuff | alembic/versions/1d46e8d4483_add_mod_versioning.py | Python | mit | 786 |
import pyspeckit
import os
from pyspeckit.spectrum.models import nh2d
import numpy as np
import astropy.units as u
if not os.path.exists('p-nh2d_spec.fits'):
import astropy.utils.data as aud
from astropy.io import fits
f = aud.download_file('https://github.com/pyspeckit/pyspeckit-example-files/raw/master/p-nh2d_spec.fits')
with fits.open(f) as ff:
ff.writeto('p-nh2d_spec.fits')
# Load the spectrum
spec = pyspeckit.Spectrum('p-nh2d_spec.fits')
# Determine rms from line free section and load into cube
rms = np.std(spec.data[10:340])
spec.error[:] = rms
# setup spectral axis
spec.xarr.refX = 110.153594*u.GHz
spec.xarr.velocity_convention = 'radio'
spec.xarr.convert_to_unit('km/s')
# define useful shortcuts for True and False
F=False
T=True
# Setup of matplotlib
import matplotlib.pyplot as plt
plt.ion()
# Add NH2D fitter
spec.Registry.add_fitter('nh2d_vtau', pyspeckit.models.nh2d.nh2d_vtau_fitter,4)
# run spectral fit using some reasonable guesses
spec.specfit(fittype='nh2d_vtau', guesses=[5.52, 2.15, 0.166, 0.09067],
verbose_level=4, signal_cut=1.5, limitedmax=[F,T,T,T], limitedmin=[T,T,T,T],
minpars=[0, 0, -1, 0.05], maxpars=[30.,50.,1,0.5], fixed=[F,F,F,F])
# plot best fit
spec.plotter(errstyle='fill')
spec.specfit.plot_fit()
#save figure
plt.savefig('example_p-NH2D.png')
| jpinedaf/pyspeckit | examples/example_pNH2D.py | Python | mit | 1,329 |
# Some useful functions to extract data out of emails
# Copyright (C) 2002-2012 John Goerzen & contributors
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import email
from email.Parser import Parser as MailParser
import time
def get_message_date(content, header='Date'):
"""
Parses mail and returns resulting timestamp.
:param header: the header to extract date from;
:returns: timestamp or `None` in the case of failure.
"""
message = MailParser().parsestr(content, True)
dateheader = message.get(header)
# parsedate_tz returns a 10-tuple that can be passed to mktime_tz
# Will be None if missing or not in a valid format. Note that
# indexes 6, 7, and 8 of the result tuple are not usable.
datetuple = email.utils.parsedate_tz(dateheader)
if datetuple is None:
return None
return email.utils.mktime_tz(datetuple)
| styk-tv/offlineimap | offlineimap/emailutil.py | Python | gpl-2.0 | 1,573 |
class BookmarkData:
def __init__(self, _id, _title, _url, _parent, _type):
self.mId = _id
self.mTitle = _title
self.mUrl = _url
self.mParent = _parent
self.mType = _type
def dump(self, _intent=' '):
print "%s-> %d, %s, %s, %d, %d" % (_intent, self.mId, self.mTitle, self.mUrl, self.mParent, self.mType)
class CategoryData:
def __init__(self, _id, _name):
self.mId = _id
self.mName = _name
self.mBookmarks = {}
def setBookmarks(self, _bookmarks):
self.mBookmarks = _bookmarks
def appendBookmark(self, _bookmark):
self.mBookmarks[_bookmark.mId] = _bookmark
def dump(self):
print " -> %d, %s" % (self.mId, self.mName)
for key in self.mBookmarks.iterkeys():
self.mBookmarks[key].dump(' ')
import ConfigParser
class SimpleConfigParser:
def __init__(self):
self.mFileName = None
self.mConfig = None
self.mCategoryCurrentIdx = 0
self.mBookmarkCurrentIdx = 0
self.mDataValid = False
self.mPopulateValid = False
def _read(self):
if self.mDataValid:
return
print "populate!!"
self.mConfig.read(self.mFileName)
self.mCategoryCurrentIdx = self.getNumber('__SYS__', 'category_current_idx')
self.mBookmarkCurrentIdx = self.getNumber('__SYS__', 'bookmark_current_idx')
self.mDataValid = True
def _save(self):
with open(self.mFileName, 'wb') as bookmarkFile:
self.mConfig.write(bookmarkFile)
self.mDataValid = False
self.mPopulateValid = False
def _del(self, _section, _option=None):
#print _section, ' :', _option
if _option is None:
if not self.exist(_section):
return
self.mConfig.remove_section(_section)
return
if not self.exist(_section, _option):
return
self.mConfig.remove_option(_section, _option)
def _get(self, _section, _option, _default):
try:
data = self.mConfig.get(_section, _option)
except Exception, e:
#print e
return _default
else : return data
def _set(self, _section, _option, _value):
self.mConfig.set(_section, _option, _value)
def exist(self, _section, _option=None):
if _option is None:
return self.mConfig.has_section(_section)
return self.mConfig.has_option(_section, _option)
def setNumber(self, _section, _option, _value):
self._set(_section, _option, str(_value))
def setString(self, _section, _option, _value):
self._set(_section, _option, _value)
def getNumber(self, _section, _option, _default=0):
return int(self._get(_section, _option, _default))
def getString(self, _section, _option, _default=''):
return self._get(_section, _option, _default)
def delOption(self, _section, _option):
self._del(_section, _option)
def addSection(self, _section):
self.mConfig.add_section(_section)
def delSection(self, _section):
self._del(_section)
def init(self, _fileName):
self.mFileName = _fileName
self.mConfig = ConfigParser.RawConfigParser()
if self.mConfig is None:
return False
self._read()
return True
class BookmarkManager(SimpleConfigParser):
_instance = None
def __new__(cls, *args, **kwargs):
if not cls._instance:
cls._instance = super(Singleton, cls).__new__(cls, *args, **kwargs)
return cls._instance
def __init__(self, _dbFileName):
SimpleConfigParser.__init__(self)
self.mBookmarkRoot = None
self.mDebugEnable = True
import os
if not os.path.exists(_dbFileName):
f = file('/proc/stb/info/vumodel')
model = f.read().strip()
f.close()
manualmode = (model == "solo2" or model == "duo2")
out = open(_dbFileName, 'w')
line = "[__SYS__]\n"
line = line + "category_current_idx = 1\n"
if manualmode :
line = line + "bookmark_current_idx = 3\n"
else:
line = line + "bookmark_current_idx = 2\n"
line = line + "\n"
line = line + "[c-1]\n"
line = line + "id = 1\n"
line = line + "name = My favorite\n"
line = line + "\n"
line = line + "[b-1]\n"
line = line + "id = 1\n"
line = line + "title = Google Com\n"
line = line + "url = http://www.google.com/\n"
line = line + "parent = 1\n"
line = line + "type = 0\n"
line = line + "\n"
line = line + "[b-2]\n"
line = line + "id = 2\n"
line = line + "title = HBBig\n"
line = line + "url = http://www.hbbig.com/\n"
line = line + "parent = 1\n"
line = line + "type = 0\n"
line = line + "\n"
if manualmode :
line = line + "[b-3]\n"
line = line + "url = file:///usr/local/manual/main.html\n"
line = line + "id = 2\n"
line = line + "parent = 1\n"
line = line + "title = User Manual\n"
line = line + "type = 1\n"
out.write(line)
self.init(_dbFileName)
def message(self, format, params=None):
if not self.mDebugEnable:
return
if params is None:
print format
else: print format % (params)
def getBookmark(self, _title):
self.populate()
for key in self.mBookmarkRoot.iterkeys():
for key2 in self.mBookmarkRoot[key].mBookmarks.iterkeys():
if self.mBookmarkRoot[key].mBookmarks[key2].mTitle == _title:
return 'b-%d' % (self.mBookmarkRoot[key].mBookmarks[key2].mId)
return None
def addBookmark(self, _title, _url, _parent, _type):
if self.getBookmark(_title) is not None:
return False
i = self.mBookmarkCurrentIdx + 1
s = "b-%d" % (i,)
self.message("add bookmark : %s, %s, %d, %d", (_title, _url, _parent, _type,))
self.mConfig.add_section(s)
self.setNumber(s, 'id', i)
self.setString(s, 'title', _title)
self.setString(s, 'url', _url)
self.setNumber(s, 'parent', _parent)
self.setNumber(s, 'type', _type)
self.setNumber('__SYS__', 'bookmark_current_idx', i)
self._save()
return True
def deleteBookmark(self, _id):
self.populate()
self.message("delete bookmark : %d", (_id,))
self.delSection('b-%d' % (_id,))
self._save()
def updateBookmark(self, _bookmark):
self.populate()
s = "b-%d" % (_bookmark.mId)
self.message("update bookmark : %s, %s, %d, %d", (_bookmark.mTitle, _bookmark.mUrl, _bookmark.mParent, _bookmark.mType,))
self.setString(s, 'title', _bookmark.mTitle)
self.setString(s, 'url', _bookmark.mUrl)
self.setNumber(s, 'parent', _bookmark.mParent)
self.setNumber(s, 'type', _bookmark.mType)
self._save()
def getCategory(self, _name):
self.populate()
for key in self.mBookmarkRoot.iterkeys():
if self.mBookmarkRoot[key].mName == _name:
return 'c-%d' % (self.mBookmarkRoot[key].mId)
return None
def addCategory(self, _name):
if self.getCategory(_name) is not None:
return False
self.message("add category : %s", (_name,))
i = self.mCategoryCurrentIdx + 1
s = "c-%d" % (i)
self.mConfig.add_section(s)
self.setNumber(s, 'id', i)
self.setNumber(s, 'name', _name)
self.setNumber('__SYS__', 'category_current_idx', i)
self._save()
return True
def deleteCategory(self, _id):
self.populate()
self.message("delete category : %d", (_id,))
try:
for key in self.mBookmarkRoot[_id].mBookmarks.iterkeys():
self.delSection('b-%d' % (key,))
except: pass
self.delSection('c-%d' % (_id,))
self._save()
def updateCategory(self, _category):
self.populate()
self.message("update category : %s", (_category.mName,))
s = "c-%d" % (_category.mId)
self.setNumber(s, 'name', _category.mName)
self._save()
def populate(self):
cx, bx = 0, 0
categoryList = {}
self.message("populate : %d, %d", (self.mPopulateValid, self.mDataValid))
self._read()
if self.mPopulateValid:
return
while cx <= self.mCategoryCurrentIdx:
s = 'c-%d' % (cx,)
i = self.getNumber(s, 'id', -1)
if i != -1:
n = self.getString(s, 'name')
categoryList[i] = CategoryData(i, n)
cx += 1
sorted(categoryList)
while bx <= self.mBookmarkCurrentIdx:
s = 'b-%d' % (bx,)
i = self.getNumber(s, 'id', -1)
if i != -1:
t = self.getString(s, 'title')
u = self.getString(s, 'url')
p = self.getNumber(s, 'parent')
e = self.getNumber(s, 'type')
try:
categoryList[p].appendBookmark(BookmarkData(i, t, u, p, e))
except Exception, e: self._del(s)
bx += 1
for key in categoryList.iterkeys():
sorted(categoryList[key].mBookmarks)
self.mBookmarkRoot = categoryList
self.mPopulateValid = True
self.dump()
def getBookmarkRoot(self):
self.populate()
return self.mBookmarkRoot
def dump(self):
if not self.mDebugEnable:
return
self.populate()
print "-- snapshot --"
for key in self.mBookmarkRoot.iterkeys():
self.mBookmarkRoot[key].dump()
print "--------------"
@staticmethod
def getInstance():
return BookmarkManager('/etc/enigma2/hbbtv_bookmark.ini')
| popazerty/dvbapp2-gui | lib/python/Plugins/Extensions/IniHbbTV/bookmark.py | Python | gpl-2.0 | 8,456 |
# -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2015 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Configuration for Zenodo Records."""
from __future__ import absolute_import, print_function
from flask_babelex import gettext
from speaklater import make_lazy_gettext
_ = make_lazy_gettext(lambda: gettext)
ZENODO_COMMUNITIES_AUTO_ENABLED = True
"""Automatically add and request to communities upon publishing."""
ZENODO_COMMUNITIES_AUTO_REQUEST = ['zenodo', ]
"""Communities which are to be auto-requested upon first publishing."""
ZENODO_COMMUNITIES_REQUEST_IF_GRANTS = ['ecfunded', ]
"""Communities which are to be auto-requested if record has grants."""
ZENODO_COMMUNITIES_ADD_IF_GRANTS = []
"""Communities which are to be auto-added if record has grants."""
ZENODO_BUCKET_QUOTA_SIZE = 50 * 1000 * 1000 * 1000 # 50 GB
"""Maximum quota per bucket."""
ZENODO_MAX_FILE_SIZE = ZENODO_BUCKET_QUOTA_SIZE
"""Maximum file size accepted."""
| tiborsimko/zenodo | zenodo/modules/deposit/config.py | Python | gpl-2.0 | 1,816 |
# -*- coding: utf-8 -*-
from .Base import Base
from .misc import parse_name, safename
class Crypter(Base):
__name__ = "Crypter"
__type__ = "crypter"
__version__ = "0.20"
__status__ = "stable"
__pattern__ = r'^unmatchable$'
__config__ = [("activated", "bool", "Activated", True),
("use_premium", "bool", "Use premium account if available", True),
("folder_per_package", "Default;Yes;No", "Create folder for each package", "Default")]
__description__ = """Base decrypter plugin"""
__license__ = "GPLv3"
__authors__ = [("Walter Purcaro", "[email protected]")]
def init_base(self):
#: Put all packages here. It's a list of tuples like: ( name, [list of links], folder )
self.packages = []
self.links = [] #: List of urls, pyLoad will generate packagenames
def setup_base(self):
self.packages = []
self.links = []
def process(self, pyfile):
self.decrypt(pyfile)
if self.links:
self._generate_packages()
elif not self.packages:
self.error(_("No link grabbed"), "decrypt")
self._create_packages()
def decrypt(self, pyfile):
"""
The "main" method of every crypter plugin, you **have to** overwrite it
"""
raise NotImplementedError
def _generate_packages(self):
"""
Generate new packages from self.links
"""
name = self.info['pattern'].get("N")
if name is None:
links = map(self.fixurl, self.links)
pdict = self.pyload.api.generatePackages(links)
packages = [(_name, _links, parse_name(_name))
for _name, _links in pdict.items()]
else:
packages = [(name, self.links, parse_name(name))]
self.packages.extend(packages)
def _create_packages(self):
"""
Create new packages from self.packages
"""
pack_folder = self.pyfile.package().folder
pack_password = self.pyfile.package().password
pack_queue = self.pyfile.package().queue
folder_per_package = self.config.get('folder_per_package', "Default")
if folder_per_package == "Default":
folder_per_package = self.pyload.config.get(
'general', 'folder_per_package')
else:
folder_per_package = folder_per_package == "Yes"
for name, links, folder in self.packages:
self.log_info(_("Create package: %s") % name,
_("%d links") % len(links))
links = map(self.fixurl, links)
self.log_debug("LINKS for package " + name, *links)
pid = self.pyload.api.addPackage(name, links, pack_queue)
if pack_password:
self.pyload.api.setPackageData(
pid, {'password': pack_password})
#: Workaround to do not break API addPackage method
set_folder = lambda x: self.pyload.api.setPackageData(
pid, {'folder': safename(x or "")})
if not folder_per_package:
folder = pack_folder
elif not folder or folder == name:
folder = parse_name(name)
self.log_info(_("Save package `%(name)s` to folder: %(folder)s")
% {'name': name, 'folder': folder})
set_folder(folder)
| Arno-Nymous/pyload | module/plugins/internal/Crypter.py | Python | gpl-3.0 | 3,435 |
# (c) 2015, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import base64
import datetime
import os
import time
from ansible import constants as C
from ansible.plugins.action import ActionBase
from ansible.utils.hashing import checksum_s
from ansible.utils.unicode import to_bytes
class ActionModule(ActionBase):
TRANSFERS_FILES = True
def get_checksum(self, tmp, dest, try_directory=False, source=None):
remote_checksum = self._remote_checksum(tmp, dest)
if remote_checksum in ('0', '2', '3', '4'):
# Note: 1 means the file is not present which is fine; template
# will create it. 3 means directory was specified instead of file
if try_directory and remote_checksum == '3' and source:
base = os.path.basename(source)
dest = os.path.join(dest, base)
remote_checksum = self.get_checksum(tmp, dest, try_directory=False)
if remote_checksum not in ('0', '2', '3', '4'):
return remote_checksum
result = dict(failed=True, msg="failed to checksum remote file."
" Checksum error code: %s" % remote_checksum)
return result
return remote_checksum
def run(self, tmp=None, task_vars=dict()):
''' handler for template operations '''
source = self._task.args.get('src', None)
dest = self._task.args.get('dest', None)
faf = self._task.first_available_file
if (source is None and faf is not None) or dest is None:
return dict(failed=True, msg="src and dest are required")
if tmp is None:
tmp = self._make_tmp_path()
if faf:
#FIXME: issue deprecation warning for first_available_file, use with_first_found or lookup('first_found',...) instead
found = False
for fn in faf:
fn_orig = fn
fnt = self._templar.template(fn)
fnd = self._loader.path_dwim(self._task._role_._role_path, 'templates', fnt)
if not os.path.exists(fnd):
of = task_vars.get('_original_file', None)
if of is not None:
fnd = self._loader.path_dwim(self._task._role_._role_path, 'templates', of)
if os.path.exists(fnd):
source = fnd
found = True
break
if not found:
return dict(failed=True, msg="could not find src in first_available_file list")
else:
if self._task._role is not None:
source = self._loader.path_dwim_relative(self._task._role._role_path, 'templates', source)
else:
source = self._loader.path_dwim(source)
# Expand any user home dir specification
dest = self._remote_expand_user(dest, tmp)
directory_prepended = False
if dest.endswith(os.sep):
directory_prepended = True
base = os.path.basename(source)
dest = os.path.join(dest, base)
# template the source data locally & get ready to transfer
try:
with open(source, 'r') as f:
template_data = f.read()
try:
template_uid = pwd.getpwuid(os.stat(source).st_uid).pw_name
except:
template_uid = os.stat(source).st_uid
vars = task_vars.copy()
vars['template_host'] = os.uname()[1]
vars['template_path'] = source
vars['template_mtime'] = datetime.datetime.fromtimestamp(os.path.getmtime(source))
vars['template_uid'] = template_uid
vars['template_fullpath'] = os.path.abspath(source)
vars['template_run_date'] = datetime.datetime.now()
managed_default = C.DEFAULT_MANAGED_STR
managed_str = managed_default.format(
host = vars['template_host'],
uid = vars['template_uid'],
file = to_bytes(vars['template_path'])
)
vars['ansible_managed'] = time.strftime(
managed_str,
time.localtime(os.path.getmtime(source))
)
old_vars = self._templar._available_variables
self._templar.set_available_variables(vars)
resultant = self._templar.template(template_data, preserve_trailing_newlines=True)
self._templar.set_available_variables(old_vars)
except Exception as e:
return dict(failed=True, msg=type(e).__name__ + ": " + str(e))
local_checksum = checksum_s(resultant)
remote_checksum = self.get_checksum(tmp, dest, not directory_prepended, source=source)
if isinstance(remote_checksum, dict):
# Error from remote_checksum is a dict. Valid return is a str
return remote_checksum
if local_checksum != remote_checksum:
# if showing diffs, we need to get the remote value
dest_contents = ''
# FIXME: still need to implement diff mechanism
#if self.runner.diff:
# # using persist_files to keep the temp directory around to avoid needing to grab another
# dest_result = self.runner._execute_module(conn, tmp, 'slurp', "path=%s" % dest, task_vars=task_vars, persist_files=True)
# if 'content' in dest_result.result:
# dest_contents = dest_result.result['content']
# if dest_result.result['encoding'] == 'base64':
# dest_contents = base64.b64decode(dest_contents)
# else:
# raise Exception("unknown encoding, failed: %s" % dest_result.result)
xfered = self._transfer_data(self._connection._shell.join_path(tmp, 'source'), resultant)
# fix file permissions when the copy is done as a different user
if self._connection_info.become and self._connection_info.become_user != 'root':
self._remote_chmod('a+r', xfered, tmp)
# run the copy module
new_module_args = self._task.args.copy()
new_module_args.update(
dict(
src=xfered,
dest=dest,
original_basename=os.path.basename(source),
follow=True,
),
)
result = self._execute_module(module_name='copy', module_args=new_module_args, task_vars=task_vars)
if result.get('changed', False):
result['diff'] = dict(before=dest_contents, after=resultant)
return result
else:
# when running the file module based on the template data, we do
# not want the source filename (the name of the template) to be used,
# since this would mess up links, so we clear the src param and tell
# the module to follow links. When doing that, we have to set
# original_basename to the template just in case the dest is
# a directory.
new_module_args = self._task.args.copy()
new_module_args.update(
dict(
src=None,
original_basename=os.path.basename(source),
follow=True,
),
)
return self._execute_module(module_name='file', module_args=new_module_args, task_vars=task_vars)
| datsfosure/ansible | lib/ansible/plugins/action/template.py | Python | gpl-3.0 | 8,281 |
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Xgc(AutotoolsPackage):
"""xgc is an X11 graphics demo that shows various features of the X11
core protocol graphics primitives."""
homepage = "http://cgit.freedesktop.org/xorg/app/xgc"
url = "https://www.x.org/archive/individual/app/xgc-1.0.5.tar.gz"
version('1.0.5', '605557a9c138f6dc848c87a21bc7c7fc')
depends_on('libxaw')
depends_on('libxt')
depends_on('flex', type='build')
depends_on('bison', type='build')
depends_on('pkgconfig', type='build')
depends_on('util-macros', type='build')
| krafczyk/spack | var/spack/repos/builtin/packages/xgc/package.py | Python | lgpl-2.1 | 1,806 |
#!/usr/bin/env python
#
# This file is part of Scalable COncurrent Operations in Python (SCOOP).
#
# SCOOP is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 3 of
# the License, or (at your option) any later version.
#
# SCOOP is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with SCOOP. If not, see <http://www.gnu.org/licenses/>.
#
import scoop
scoop.DEBUG = False
import unittest
import subprocess
import time
import copy
import os
import sys
import operator
import signal
import math
from tests_parser import TestUtils
from tests_stat import TestStat
from tests_stopwatch import TestStopWatch
from scoop import futures, _control, utils, shared
from scoop._types import FutureQueue
from scoop.broker.structs import BrokerInfo
subprocesses = []
def cleanSubprocesses():
[a.kill() for a in subprocesses]
try:
signal.signal(signal.SIGQUIT, cleanSubprocesses)
except AttributeError:
# SIGQUIT doesn't exist on Windows
signal.signal(signal.SIGTERM, cleanSubprocesses)
def func0(n):
task = futures.submit(func1, n)
result = task.result()
return result
def func1(n):
result = futures.map(func2, [i+1 for i in range(n)])
return sum(result)
def func2(n):
launches = []
for i in range(n):
launches.append(futures.submit(func3, i + 1))
result = futures.as_completed(launches)
return sum(r.result() for r in result)
def func3(n):
result = list(futures.map(func4, [i+1 for i in range(n)]))
return sum(result)
def func4(n):
result = n * n
return result
def funcLambda(n):
lambda_func = lambda x : x*x
result = list(futures.map(lambda_func, [i+1 for i in range(n)]))
return sum(result)
def funcWithKW(n, **kwargs):
return kwargs
def funcLambdaSubfuncNotGlobal(n):
"""Tests a lambda function containing a call to a function that is not in
the globals()."""
my_mul = operator.mul
lambda_func = lambda x : my_mul(x, x)
result = list(futures.map(lambda_func, [i+1 for i in range(n)]))
return sum(result)
def funcCos():
result = list(futures.map(math.cos, [i for i in range(10)]))
return sum(result)
def funcCallback():
f = futures.submit(func4, 100)
def callBack(future):
future.was_callabacked = True
f.add_done_callback(callBack)
if len(f.callback) == 0:
return False
futures.wait((f,))
try:
return f.was_callabacked
except:
return False
def funcCancel():
f = futures.submit(func4, 100)
f.cancel()
return f.cancelled()
def funcCompleted(n):
launches = []
for i in range(n):
launches.append(futures.submit(func4, i + 1))
result = futures.as_completed(launches)
return sum(r.result() for r in result)
def funcDone():
f = futures.submit(func4, 100)
futures.wait((f,))
done = f.done()
if done != True:
return done
res = f.result()
done = f.done()
return done
def funcWait(timeout):
fs = [futures.submit(func4, i) for i in range(1000)]
done, not_done = futures.wait(fs, timeout=timeout)
return done, not_done
def funcExcept(n):
f = futures.submit(funcRaise, n)
try:
f.result()
except:
return True
return False
def funcRaise(n):
raise Exception("Test exception")
def funcSub(n):
f = futures.submit(func4, n)
return f.result()
def funcMapScan(l):
resultat = futures.mapScan(func4,
operator.add,
l)
_control.execQueue.socket.pumpInfoSocket()
return resultat
def funcMapReduce(l):
resultat = futures.mapReduce(func4,
operator.add,
l)
_control.execQueue.socket.pumpInfoSocket()
return resultat
def funcDoubleMapReduce(l):
resultat = futures.mapReduce(func4,
operator.add,
l)
resultat2 = futures.mapReduce(func4,
operator.add,
l)
_control.execQueue.socket.pumpInfoSocket()
return resultat == resultat2
def funcUseSharedConstant():
# Tries on a mutable and an immutable object
assert shared.getConst('myVar') == {
1: 'Example 1',
2: 'Example 2',
3: 'Example 3',
}
assert shared.getConst('secondVar') == "Hello World!"
return True
def funcUseSharedFunction():
assert shared.getConst('myRemoteFunc')(5) == 5 * 5
assert shared.getConst('myRemoteFunc')(25) == 25 * 25
return True
def funcSharedConstant():
shared.setConst(myVar={1: 'Example 1',
2: 'Example 2',
3: 'Example 3',
})
shared.setConst(secondVar="Hello World!")
result = True
for _ in range(100):
try:
result &= futures.submit(funcUseSharedConstant).result()
except AssertionError:
result = False
return result
def funcSharedFunction():
shared.setConst(myRemoteFunc=func4)
result = True
for _ in range(100):
try:
result &= futures.submit(funcUseSharedFunction).result()
except AssertionError:
result = False
return result
def funcMapAsCompleted(n):
result = list(futures.map_as_completed(func4, [i+1 for i in range(n)]))
return sum(result)
def funcIter(n):
result = list(futures.map(func4, (i+1 for i in range(n))))
return sum(result)
def funcKeywords(n, **kwargs):
task = futures.submit(funcWithKW, n, **kwargs)
futures.wait([task], return_when=futures.ALL_COMPLETED)
result = task.result()
return result
def main(n):
task = futures.submit(func0, n)
futures.wait([task], return_when=futures.ALL_COMPLETED)
result = task.result()
return result
def mainSimple(n):
task = futures.submit(func3, n)
futures.wait([task], return_when=futures.ALL_COMPLETED)
result = task.result()
return result
def submit_get_queues_size(n):
task = futures.submit(func4, n)
result = task.result()
return [
len(_control.execQueue.inprogress),
len(_control.execQueue.ready),
len(_control.execQueue.movable),
len(_control.futureDict) - 1, # - 1 because the current function is a future too
]
def map_get_queues_size(n):
result = list(map(func4, [n for n in range(n)]))
return [
len(_control.execQueue.inprogress),
len(_control.execQueue.ready),
len(_control.execQueue.movable),
len(_control.futureDict) - 1, # - 1 because the current function is a future too
]
def port_ready(port, socket):
"""Checks if a given port is already binded"""
try:
socket.connect(('127.0.0.1', port))
except IOError:
return False
else:
socket.shutdown(2)
socket.close()
return True
class TestScoopCommon(unittest.TestCase):
def __init__(self, *args, **kwargs):
# Parent initialization
super(TestScoopCommon, self).__init__(*args, **kwargs)
def multiworker_set(self):
global subprocesses
worker = subprocess.Popen([sys.executable, "-m", "scoop.bootstrap.__main__",
"--brokerHostname", "127.0.0.1", "--taskPort", "5555",
"--metaPort", "5556", "tests.py"])
subprocesses.append(worker)
return worker
def setUp(self):
global subprocesses
import socket, datetime, time
# Start the server
self.server = subprocess.Popen([sys.executable, "-m", "scoop.broker.__main__",
"--tPort", "5555", "--mPort", "5556"])
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
begin = datetime.datetime.now()
while not port_ready(5555, s):
if (datetime.datetime.now() - begin > datetime.timedelta(seconds=3)):
raise Exception('Could not start server!')
subprocesses.append(self.server)
# Setup worker environment
scoop.IS_RUNNING = True
scoop.IS_ORIGIN = True
scoop.WORKER_NAME = 'origin'.encode()
scoop.BROKER_NAME = 'broker'.encode()
scoop.BROKER = BrokerInfo("127.0.0.1",
5555,
5556,
"127.0.0.1")
scoop.worker = (scoop.WORKER_NAME, scoop.BROKER_NAME)
scoop.MAIN_MODULE = "tests.py"
scoop.VALID = True
scoop.DEBUG = False
scoop.SIZE = 2
_control.execQueue = FutureQueue()
def tearDown(self):
global subprocesses
import socket, datetime, time
_control.execQueue.shutdown()
del _control.execQueue
_control.futureDict.clear()
try:
self.w.terminate()
self.w.wait()
except:
pass
# Destroy the server
if self.server.poll() == None:
try:
self.server.terminate()
self.server.wait()
except:
pass
# Stabilise zmq after a deleted socket
del subprocesses[:]
# Wait for the previous server to be correctly terminated
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
begin = datetime.datetime.now()
while port_ready(5555, s):
if (datetime.datetime.now() - begin > datetime.timedelta(seconds=3)):
raise Exception('Could not terminate server!')
s.close()
class TestMultiFunction(TestScoopCommon):
def __init__(self, *args, **kwargs):
# Parent initialization
super(TestMultiFunction, self).__init__(*args, **kwargs)
self.main_func = main
self.small_result = 77
self.large_result = 76153
def test_small_uniworker(self):
_control.FutureQueue.highwatermark = 10
_control.FutureQueue.lowwatermark = 5
result = futures._startup(self.main_func, 4)
self.assertEqual(result, self.small_result)
def test_small_no_lowwatermark_uniworker(self):
_control.FutureQueue.highwatermark = 9999999999999
_control.FutureQueue.lowwatermark = 1
result = futures._startup(self.main_func, 4)
self.assertEqual(result, self.small_result)
def test_small_foreign_uniworker(self):
_control.FutureQueue.highwatermark = 1
result = futures._startup(self.main_func, 4)
self.assertEqual(result, self.small_result)
def test_small_local_uniworker(self):
_control.FutureQueue.highwatermark = 9999999999999
result = futures._startup(self.main_func, 4)
self.assertEqual(result, self.small_result)
def test_large_uniworker(self):
_control.FutureQueue.highwatermark = 9999999999999
result = futures._startup(self.main_func, 20)
self.assertEqual(result, self.large_result)
def test_large_no_lowwatermark_uniworker(self):
_control.FutureQueue.lowwatermark = 1
_control.FutureQueue.highwatermark = 9999999999999
result = futures._startup(self.main_func, 20)
self.assertEqual(result, self.large_result)
def test_large_foreign_uniworker(self):
_control.FutureQueue.highwatermark = 1
result = futures._startup(self.main_func, 20)
self.assertEqual(result, self.large_result)
def test_large_local_uniworker(self):
_control.FutureQueue.highwatermark = 9999999999999
result = futures._startup(self.main_func, 20)
self.assertEqual(result, self.large_result)
def test_small_local_multiworker(self):
self.w = self.multiworker_set()
_control.FutureQueue.highwatermark = 9999999999999
Backupenv = os.environ.copy()
result = futures._startup(self.main_func, 4)
self.assertEqual(result, self.small_result)
os.environ = Backupenv
def test_small_foreign_multiworker(self):
self.w = self.multiworker_set()
_control.FutureQueue.highwatermark = 1
Backupenv = os.environ.copy()
result = futures._startup(self.main_func, 4)
self.assertEqual(result, self.small_result)
os.environ = Backupenv
def test_execQueue_multiworker(self):
self.w = self.multiworker_set()
result = futures._startup(func0, 6)
self.assertEqual(len(_control.execQueue.inprogress), 0)
self.assertEqual(len(_control.execQueue.ready), 0)
self.assertEqual(len(_control.execQueue.movable), 0)
self.assertEqual(len(_control.futureDict), 0)
def test_execQueue_uniworker(self):
result = futures._startup(func0, 6)
self.assertEqual(len(_control.execQueue.inprogress), 0)
self.assertEqual(len(_control.execQueue.ready), 0)
self.assertEqual(len(_control.execQueue.movable), 0)
self.assertEqual(len(_control.futureDict), 0)
def test_execQueue_submit_uniworker(self):
result = futures._startup(submit_get_queues_size, 6)
self.assertEqual(
result,
[0 for _ in range(len(result))],
"Buffers are not empty after future completion"
)
def test_execQueue_map_uniworker(self):
result = futures._startup(map_get_queues_size, 6)
self.assertEqual(
result,
[0 for _ in range(len(result))],
"Buffers are not empty after future completion"
)
def test_execQueue_submit_multiworker(self):
self.w = self.multiworker_set()
result = futures._startup(submit_get_queues_size, 6)
self.assertEqual(
result,
[0 for _ in range(len(result))],
"Buffers are not empty after future completion"
)
def test_execQueue_map_multiworker(self):
self.w = self.multiworker_set()
result = futures._startup(map_get_queues_size, 6)
self.assertEqual(
result,
[0 for _ in range(len(result))],
"Buffers are not empty after future completion"
)
def test_partial(self):
"""This function removes some attributes (such as __name__)."""
from functools import partial
result = futures._startup(partial(self.main_func), 4)
self.assertEqual(result, self.small_result)
class TestSingleFunction(TestMultiFunction):
def __init__(self, *args, **kwargs):
# Parent initialization
super(TestSingleFunction, self).__init__(*args, **kwargs)
self.main_func = mainSimple
self.small_result = 30
self.large_result = 2870
class TestApi(TestScoopCommon):
def __init(self, *args, **kwargs):
super(TestApi, self).__init(*args, **kwargs)
def test_as_Completed_single(self):
result = futures._startup(funcCompleted, 30)
self.assertEqual(result, 9455)
def test_as_Completed_multi(self):
self.w = self.multiworker_set()
result = futures._startup(funcCompleted, 30)
self.assertEqual(result, 9455)
def test_map_single(self):
result = futures._startup(func3, 30)
self.assertEqual(result, 9455)
def test_map_multi(self):
self.w = self.multiworker_set()
result = futures._startup(func3, 30)
self.assertEqual(result, 9455)
def test_map_lambda(self):
self.w = self.multiworker_set()
result = futures._startup(funcLambda, 30)
self.assertEqual(result, 9455)
def test_submit_with_keyword(self):
result = futures._startup(funcKeywords, 2, kwarg=3.1415926)
self.assertEqual(result, { "kwarg": 3.1415926} )
# This test is complex to handle and has many implications
# Bundle a closure with the future?
# How to manage side-effects of variables in closure?
#def test_map_lambda_subfunc_not_global(self):
# self.w = self.multiworker_set()
# result = futures._startup(funcLambdaSubfuncNotGlobal, 30)
# self.assertEqual(result, 9455)
def test_map_imported_func(self):
self.w = self.multiworker_set()
result = futures._startup(funcCos)
self.assertGreater(result, 0.4)
self.assertLess(result, 0.5)
def test_submit_single(self):
result = futures._startup(funcSub, 10)
self.assertEqual(result, 100)
def test_submit_multi(self):
self.w = self.multiworker_set()
result = futures._startup(funcSub, 10)
self.assertEqual(result, 100)
def test_exception_single(self):
result = futures._startup(funcExcept, 19)
self.assertTrue(result)
def test_exception_multi(self):
self.w = self.multiworker_set()
result = futures._startup(funcExcept, 19)
self.assertTrue(result)
def test_done(self):
result = futures._startup(funcDone)
self.assertTrue(result)
def test_cancel(self):
self.assertTrue(futures._startup(funcCancel))
def test_callback(self):
self.assertTrue(futures._startup(funcCallback))
def test_wait_no_timeout(self):
done, not_done = futures._startup(funcWait, -1)
self.assertTrue(len(done) == 1000)
self.assertTrue(len(not_done) == 0)
def test_wait_with_timeout(self):
done, not_done = futures._startup(funcWait, 0.1)
self.assertTrue((len(done) + len(not_done)) == 1000)
def test_wait_nonblocking(self):
done, not_done = futures._startup(funcWait, 0)
self.assertTrue((len(done) + len(not_done)) == 1000)
def test_map_as_completed_single(self):
result = futures._startup(funcMapAsCompleted, 30)
self.assertEqual(result, 9455)
def test_map_as_completed_multi(self):
self.w = self.multiworker_set()
result = futures._startup(funcMapAsCompleted, 30)
self.assertEqual(result, 9455)
def test_from_generator_single(self):
result = futures._startup(funcIter, 30)
self.assertEqual(result, 9455)
def test_from_generator_multi(self):
self.w = self.multiworker_set()
result = futures._startup(funcIter, 30)
self.assertEqual(result, 9455)
class TestCoherent(TestScoopCommon):
def __init(self, *args, **kwargs):
super(TestCoherent, self).__init(*args, **kwargs)
def test_mapReduce(self):
result = futures._startup(funcMapReduce, [10, 20, 30])
self.assertEqual(result, 1400)
def test_doubleMapReduce(self):
result = futures._startup(funcDoubleMapReduce, [10, 20, 30])
self.assertTrue(result)
def test_mapScan(self):
result = futures._startup(funcMapScan, [10, 20, 30])
self.assertEqual(max(result), 1400)
class TestShared(TestScoopCommon):
def __init(self, *args, **kwargs):
super(TestShared, self).__init(*args, **kwargs)
def test_shareConstant(self):
result = futures._startup(funcSharedFunction)
self.assertEqual(result, True)
def test_shareFunction(self):
result = futures._startup(funcSharedConstant)
self.assertEqual(result, True)
if __name__ == '__main__' and os.environ.get('IS_ORIGIN', "1") == "1":
utSimple = unittest.TestLoader().loadTestsFromTestCase(TestSingleFunction)
utComplex = unittest.TestLoader().loadTestsFromTestCase(TestMultiFunction)
utApi = unittest.TestLoader().loadTestsFromTestCase(TestApi)
utUtils = unittest.TestLoader().loadTestsFromTestCase(TestUtils)
utCoherent = unittest.TestLoader().loadTestsFromTestCase(TestCoherent)
utShared = unittest.TestLoader().loadTestsFromTestCase(TestShared)
utStat = unittest.TestLoader().loadTestsFromTestCase(TestStat)
utStopWatch = unittest.TestLoader().loadTestsFromTestCase(TestStopWatch)
if len(sys.argv) > 1:
if sys.argv[1] == "simple":
unittest.TextTestRunner(verbosity=2).run(utSimple)
elif sys.argv[1] == "complex":
unittest.TextTestRunner(verbosity=2).run(utComplex)
elif sys.argv[1] == "api":
unittest.TextTestRunner(verbosity=2).run(utApi)
elif sys.argv[1] == "utils":
unittest.TextTestRunner(verbosity=2).run(utUtils)
elif sys.argv[1] == "coherent":
unittest.TextTestRunner(verbosity=2).run(utCoherent)
elif sys.argv[1] == "shared":
unittest.TextTestRunner(verbosity=2).run(utShared)
elif sys.argv[1] == "stat":
unittest.TextTestRunner(verbosity=2).run(utStat)
elif sys.argv[1] == "stopwatch":
unittest.TextTestRunner(verbosity=2).run(utStopWatch)
elif sys.argv[1] == "verbose":
sys.argv = sys.argv[0:1]
unittest.main(verbosity=2)
else:
unittest.main()
elif __name__ == '__main__':
futures._startup(mainSimple)
| IGITUGraz/scoop | test/tests.py | Python | lgpl-3.0 | 21,163 |
from typing import Any
from django.db import connection
from zerver.lib.management import ZulipBaseCommand
def create_indexes() -> None:
# Creating concurrent indexes is kind of a pain with current versions
# of Django/postgres, because you will get this error with seemingly
# reasonable code:
#
# CREATE INDEX CONCURRENTLY cannot be executed from a function or multi-command string
#
# For a lot more detail on this process, refer to the commit message
# that added this file to the repo.
with connection.cursor() as cursor:
# copied from 0082
print("Creating index zerver_usermessage_starred_message_id.")
cursor.execute('''
CREATE INDEX IF NOT EXISTS zerver_usermessage_starred_message_id
ON zerver_usermessage (user_profile_id, message_id)
WHERE (flags & 2) != 0;
''')
# copied from 0083
print("Creating index zerver_usermessage_mentioned_message_id.")
cursor.execute('''
CREATE INDEX IF NOT EXISTS zerver_usermessage_mentioned_message_id
ON zerver_usermessage (user_profile_id, message_id)
WHERE (flags & 8) != 0;
''')
# copied from 0095
print("Creating index zerver_usermessage_unread_message_id.")
cursor.execute('''
CREATE INDEX IF NOT EXISTS zerver_usermessage_unread_message_id
ON zerver_usermessage (user_profile_id, message_id)
WHERE (flags & 1) = 0;
''')
# copied from 0098
print("Creating index zerver_usermessage_has_alert_word_message_id.")
cursor.execute('''
CREATE INDEX IF NOT EXISTS zerver_usermessage_has_alert_word_message_id
ON zerver_usermessage (user_profile_id, message_id)
WHERE (flags & 512) != 0;
''')
# copied from 0099
print("Creating index zerver_usermessage_wildcard_mentioned_message_id.")
cursor.execute('''
CREATE INDEX IF NOT EXISTS zerver_usermessage_wildcard_mentioned_message_id
ON zerver_usermessage (user_profile_id, message_id)
WHERE (flags & 8) != 0 OR (flags & 16) != 0;
''')
# copied from 0177
print("Creating index zerver_usermessage_is_private_message_id.")
cursor.execute('''
CREATE INDEX IF NOT EXISTS zerver_usermessage_is_private_message_id
ON zerver_usermessage (user_profile_id, message_id)
WHERE (flags & 2048) != 0;
''')
# copied from 0180
print("Creating index zerver_usermessage_active_mobile_push_notification_id.")
cursor.execute('''
CREATE INDEX IF NOT EXISTS zerver_usermessage_active_mobile_push_notification_id
ON zerver_usermessage (user_profile_id, message_id)
WHERE (flags & 4096) != 0;
''')
print("Finished.")
class Command(ZulipBaseCommand):
help = """Create concurrent indexes for large tables."""
def handle(self, *args: Any, **options: str) -> None:
create_indexes()
| brainwane/zulip | zerver/management/commands/create_large_indexes.py | Python | apache-2.0 | 3,096 |
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import itertools
import json
import time
from collections import defaultdict
from eventlet import Timeout
from swift.container.sync_store import ContainerSyncStore
from swift.container.backend import ContainerBroker, DATADIR
from swift.container.reconciler import (
MISPLACED_OBJECTS_ACCOUNT, incorrect_policy_index,
get_reconciler_container_name, get_row_to_q_entry_translator)
from swift.common import db_replicator
from swift.common.storage_policy import POLICIES
from swift.common.exceptions import DeviceUnavailable
from swift.common.http import is_success
from swift.common.db import DatabaseAlreadyExists
from swift.common.utils import (Timestamp, hash_path,
storage_directory, majority_size)
class ContainerReplicator(db_replicator.Replicator):
server_type = 'container'
brokerclass = ContainerBroker
datadir = DATADIR
default_port = 6201
def report_up_to_date(self, full_info):
reported_key_map = {
'reported_put_timestamp': 'put_timestamp',
'reported_delete_timestamp': 'delete_timestamp',
'reported_bytes_used': 'bytes_used',
'reported_object_count': 'count',
}
for reported, value_key in reported_key_map.items():
if full_info[reported] != full_info[value_key]:
return False
return True
def _gather_sync_args(self, replication_info):
parent = super(ContainerReplicator, self)
sync_args = parent._gather_sync_args(replication_info)
if len(POLICIES) > 1:
sync_args += tuple(replication_info[k] for k in
('status_changed_at', 'count',
'storage_policy_index'))
return sync_args
def _handle_sync_response(self, node, response, info, broker, http,
different_region):
parent = super(ContainerReplicator, self)
if is_success(response.status):
remote_info = json.loads(response.data)
if incorrect_policy_index(info, remote_info):
status_changed_at = Timestamp(time.time())
broker.set_storage_policy_index(
remote_info['storage_policy_index'],
timestamp=status_changed_at.internal)
sync_timestamps = ('created_at', 'put_timestamp',
'delete_timestamp')
if any(info[key] != remote_info[key] for key in sync_timestamps):
broker.merge_timestamps(*(remote_info[key] for key in
sync_timestamps))
rv = parent._handle_sync_response(
node, response, info, broker, http, different_region)
return rv
def find_local_handoff_for_part(self, part):
"""
Look through devices in the ring for the first handoff device that was
identified during job creation as available on this node.
:returns: a node entry from the ring
"""
nodes = self.ring.get_part_nodes(part)
more_nodes = self.ring.get_more_nodes(part)
for node in itertools.chain(nodes, more_nodes):
if node['id'] in self._local_device_ids:
return node
return None
def get_reconciler_broker(self, timestamp):
"""
Get a local instance of the reconciler container broker that is
appropriate to enqueue the given timestamp.
:param timestamp: the timestamp of the row to be enqueued
:returns: a local reconciler broker
"""
container = get_reconciler_container_name(timestamp)
if self.reconciler_containers and \
container in self.reconciler_containers:
return self.reconciler_containers[container][1]
account = MISPLACED_OBJECTS_ACCOUNT
part = self.ring.get_part(account, container)
node = self.find_local_handoff_for_part(part)
if not node:
raise DeviceUnavailable(
'No mounted devices found suitable to Handoff reconciler '
'container %s in partition %s' % (container, part))
hsh = hash_path(account, container)
db_dir = storage_directory(DATADIR, part, hsh)
db_path = os.path.join(self.root, node['device'], db_dir, hsh + '.db')
broker = ContainerBroker(db_path, account=account, container=container)
if not os.path.exists(broker.db_file):
try:
broker.initialize(timestamp, 0)
except DatabaseAlreadyExists:
pass
if self.reconciler_containers is not None:
self.reconciler_containers[container] = part, broker, node['id']
return broker
def feed_reconciler(self, container, item_list):
"""
Add queue entries for rows in item_list to the local reconciler
container database.
:param container: the name of the reconciler container
:param item_list: the list of rows to enqueue
:returns: True if successfully enqueued
"""
try:
reconciler = self.get_reconciler_broker(container)
except DeviceUnavailable as e:
self.logger.warning('DeviceUnavailable: %s', e)
return False
self.logger.debug('Adding %d objects to the reconciler at %s',
len(item_list), reconciler.db_file)
try:
reconciler.merge_items(item_list)
except (Exception, Timeout):
self.logger.exception('UNHANDLED EXCEPTION: trying to merge '
'%d items to reconciler container %s',
len(item_list), reconciler.db_file)
return False
return True
def dump_to_reconciler(self, broker, point):
"""
Look for object rows for objects updates in the wrong storage policy
in broker with a ``ROWID`` greater than the rowid given as point.
:param broker: the container broker with misplaced objects
:param point: the last verified ``reconciler_sync_point``
:returns: the last successful enqueued rowid
"""
max_sync = broker.get_max_row()
misplaced = broker.get_misplaced_since(point, self.per_diff)
if not misplaced:
return max_sync
translator = get_row_to_q_entry_translator(broker)
errors = False
low_sync = point
while misplaced:
batches = defaultdict(list)
for item in misplaced:
container = get_reconciler_container_name(item['created_at'])
batches[container].append(translator(item))
for container, item_list in batches.items():
success = self.feed_reconciler(container, item_list)
if not success:
errors = True
point = misplaced[-1]['ROWID']
if not errors:
low_sync = point
misplaced = broker.get_misplaced_since(point, self.per_diff)
return low_sync
def _post_replicate_hook(self, broker, info, responses):
if info['account'] == MISPLACED_OBJECTS_ACCOUNT:
return
try:
self.sync_store.update_sync_store(broker)
except Exception:
self.logger.exception('Failed to update sync_store %s' %
broker.db_file)
point = broker.get_reconciler_sync()
if not broker.has_multiple_policies() and info['max_row'] != point:
broker.update_reconciler_sync(info['max_row'])
return
max_sync = self.dump_to_reconciler(broker, point)
success = responses.count(True) >= majority_size(len(responses))
if max_sync > point and success:
# to be safe, only slide up the sync point with a majority on
# replication
broker.update_reconciler_sync(max_sync)
def delete_db(self, broker):
"""
Ensure that reconciler databases are only cleaned up at the end of the
replication run.
"""
if (self.reconciler_cleanups is not None and
broker.account == MISPLACED_OBJECTS_ACCOUNT):
# this container shouldn't be here, make sure it's cleaned up
self.reconciler_cleanups[broker.container] = broker
return
try:
# DB is going to get deleted. Be preemptive about it
self.sync_store.remove_synced_container(broker)
except Exception:
self.logger.exception('Failed to remove sync_store entry %s' %
broker.db_file)
return super(ContainerReplicator, self).delete_db(broker)
def replicate_reconcilers(self):
"""
Ensure any items merged to reconciler containers during replication
are pushed out to correct nodes and any reconciler containers that do
not belong on this node are removed.
"""
self.logger.info('Replicating %d reconciler containers',
len(self.reconciler_containers))
for part, reconciler, node_id in self.reconciler_containers.values():
self.cpool.spawn_n(
self._replicate_object, part, reconciler.db_file, node_id)
self.cpool.waitall()
# wipe out the cache do disable bypass in delete_db
cleanups = self.reconciler_cleanups
self.reconciler_cleanups = self.reconciler_containers = None
self.logger.info('Cleaning up %d reconciler containers',
len(cleanups))
for reconciler in cleanups.values():
self.cpool.spawn_n(self.delete_db, reconciler)
self.cpool.waitall()
self.logger.info('Finished reconciler replication')
def run_once(self, *args, **kwargs):
self.reconciler_containers = {}
self.reconciler_cleanups = {}
self.sync_store = ContainerSyncStore(self.root,
self.logger,
self.mount_check)
rv = super(ContainerReplicator, self).run_once(*args, **kwargs)
if any([self.reconciler_containers, self.reconciler_cleanups]):
self.replicate_reconcilers()
return rv
class ContainerReplicatorRpc(db_replicator.ReplicatorRpc):
def _parse_sync_args(self, args):
parent = super(ContainerReplicatorRpc, self)
remote_info = parent._parse_sync_args(args)
if len(args) > 9:
remote_info['status_changed_at'] = args[7]
remote_info['count'] = args[8]
remote_info['storage_policy_index'] = args[9]
return remote_info
def _get_synced_replication_info(self, broker, remote_info):
"""
Sync the remote_info storage_policy_index if needed and return the
newly synced replication info.
:param broker: the database broker
:param remote_info: the remote replication info
:returns: local broker replication info
"""
info = broker.get_replication_info()
if incorrect_policy_index(info, remote_info):
status_changed_at = Timestamp(time.time()).internal
broker.set_storage_policy_index(
remote_info['storage_policy_index'],
timestamp=status_changed_at)
info = broker.get_replication_info()
return info
| larsbutler/swift | swift/container/replicator.py | Python | apache-2.0 | 12,083 |
# Copyright 2016 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from c7n.manager import resources
from c7n.query import QueryResourceManager
@resources.register('rest-api')
class RestAPI(QueryResourceManager):
resource_type = "aws.apigateway.restapis"
| RyanWolfe/cloud-custodian | c7n/resources/apigw.py | Python | apache-2.0 | 781 |
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy as np
from op_test import OpTest
def smooth_l1_loss_forward(val, sigma2):
abs_val = abs(val)
if abs_val < 1.0 / sigma2:
return 0.5 * val * val * sigma2
else:
return abs_val - 0.5 / sigma2
class TestSmoothL1LossOp1(OpTest):
def setUp(self):
self.op_type = "smooth_l1_loss"
dims = (5, 10)
self.inputs = {
'X': np.random.random(dims).astype("float32"),
'Y': np.random.random(dims).astype("float32")
}
sigma = 3.0
self.attrs = {'sigma': sigma}
sigma2 = sigma * sigma
diff = self.inputs['X'] - self.inputs['Y']
loss = np.vectorize(smooth_l1_loss_forward)(diff, sigma2).sum(1)
loss = loss.reshape((dims[0], 1))
self.outputs = {
'Diff': diff.astype('float32'),
'Out': loss.astype('float32')
}
def test_check_output(self):
self.check_output()
def test_check_grad_normal(self):
self.check_grad(['X', 'Y'], 'Out', max_relative_error=0.02)
def test_check_grad_ingore_x(self):
self.check_grad(
['Y'], 'Out', max_relative_error=0.03, no_grad_set=set("X"))
def test_check_grad_ingore_y(self):
self.check_grad(
['X'], 'Out', max_relative_error=0.03, no_grad_set=set('Y'))
class TestSmoothL1LossOp2(OpTest):
def setUp(self):
self.op_type = "smooth_l1_loss"
dims = (5, 10)
self.inputs = {
'X': np.random.random(dims).astype("float32"),
'Y': np.random.random(dims).astype("float32"),
'InsideWeight': np.random.random(dims).astype("float32"),
'OutsideWeight': np.random.random(dims).astype("float32")
}
sigma = 3.0
self.attrs = {'sigma': sigma}
sigma2 = sigma * sigma
diff = self.inputs['X'] - self.inputs['Y']
diff = diff * self.inputs['InsideWeight']
loss = np.vectorize(smooth_l1_loss_forward)(diff, sigma2)
loss = loss * self.inputs['OutsideWeight']
loss = loss.sum(1).reshape((dims[0], 1))
self.outputs = {
'Diff': diff.astype('float32'),
'Out': loss.astype('float32')
}
def test_check_output(self):
self.check_output()
def test_check_grad_normal(self):
self.check_grad(['X', 'Y'], 'Out', max_relative_error=0.03)
def test_check_grad_ingore_x(self):
self.check_grad(
['Y'],
'Out',
max_relative_error=0.03,
no_grad_set=set(['X', 'InsideWeight', 'OutsideWeight']))
def test_check_grad_ingore_y(self):
self.check_grad(
['X'],
'Out',
max_relative_error=0.03,
no_grad_set=set(['Y', 'InsideWeight', 'OutsideWeight']))
if __name__ == '__main__':
unittest.main()
| Canpio/Paddle | python/paddle/fluid/tests/unittests/test_smooth_l1_loss_op.py | Python | apache-2.0 | 3,483 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Grappler LayoutOptimizer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.core.protobuf import config_pb2
from tensorflow.core.protobuf import device_properties_pb2
from tensorflow.core.protobuf import rewriter_config_pb2
from tensorflow.core.protobuf import saver_pb2
from tensorflow.python.client import session
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.framework import test_util
from tensorflow.python.grappler import cluster as gcluster
from tensorflow.python.grappler import tf_optimizer
from tensorflow.python.layers import convolutional as conv_layers
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import functional_ops
from tensorflow.python.ops import gen_array_ops
from tensorflow.python.ops import gen_math_ops
from tensorflow.python.ops import gen_nn_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training import gradient_descent
from tensorflow.python.training import saver as saver_lib
def _weight(shape):
"""Generates a weight of a given shape."""
return random_ops.truncated_normal(shape, seed=0, stddev=0.1)
def _bias(shape):
"""Generates a bias of a given shape."""
return constant_op.constant(0.1, shape=shape)
def _conv2d(x, w):
"""Returns a 2d convolution layer with full stride."""
return nn.conv2d(x, w, strides=[1, 1, 1, 1], padding='SAME')
def _max_pool_2x2(x):
"""Downsamples a feature map by 2X."""
return nn.max_pool(
x, ksize=[1, 2, 2, 1], strides=[1, 2, 2, 1], padding='SAME')
# Taken from tensorflow/examples/tutorials/mnist/mnist_deep.py
def _two_layer_model(x):
x_image = array_ops.reshape(x, [-1, 28, 28, 1])
w_conv1 = _weight([5, 5, 1, 32])
b_conv1 = _bias([32])
h_conv1 = nn.relu(_conv2d(x_image, w_conv1) + b_conv1)
h_pool1 = _max_pool_2x2(h_conv1)
w_conv2 = _weight([5, 5, 32, 64])
b_conv2 = _bias([64])
h_conv2 = nn.relu(_conv2d(h_pool1, w_conv2) + b_conv2)
h_pool2 = _max_pool_2x2(h_conv2)
return h_pool2
def _model_with_second_port():
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([2, 5, 5, 4], seed=0)
scale = constant_op.constant(0.1, shape=[4])
offset = constant_op.constant(0.3, shape=[4])
y, mean, _ = nn.fused_batch_norm(x, scale, offset)
mul = math_ops.add(y, mean)
output = array_ops.identity(mul)
return output
def _model_with_branch(x):
x_image = array_ops.reshape(x, [-1, 28, 28, 1])
w_conv1 = _weight([5, 5, 1, 32])
w_conv2 = _weight([5, 5, 1, 32])
c_conv1 = _conv2d(x_image, w_conv1)
c_conv2 = _conv2d(x_image, w_conv2)
add = math_ops.add(c_conv1, c_conv2)
return add
def _model_with_vec_and_4d(x):
x_image = array_ops.reshape(x, [-1, 28, 28, 1])
w_conv1 = _weight([5, 5, 1, 32])
c_conv1 = _conv2d(x_image, w_conv1)
vector = constant_op.constant(6.4, shape=[32])
add = math_ops.add(c_conv1, vector)
return add
def _loop():
random_seed.set_random_seed(0)
x1 = random_ops.truncated_normal([1, 784], seed=0)
x2 = random_ops.truncated_normal([1, 784], seed=0)
x3 = random_ops.truncated_normal([1, 784], seed=0)
x4 = random_ops.truncated_normal([1, 784], seed=0)
elems = (x1, x2, x3, x4)
outputs = functional_ops.map_fn(_two_layer_model, elems, dtype=dtypes.float32)
return outputs
def _loop_with_branch():
random_seed.set_random_seed(0)
x1 = random_ops.truncated_normal([1, 784], seed=0)
x2 = random_ops.truncated_normal([1, 784], seed=0)
x3 = random_ops.truncated_normal([1, 784], seed=0)
x4 = random_ops.truncated_normal([1, 784], seed=0)
elems = (x1, x2, x3, x4)
outputs = functional_ops.map_fn(
_model_with_branch, elems, dtype=dtypes.float32)
return outputs
def _loop_with_vec_and_4d():
random_seed.set_random_seed(0)
x1 = random_ops.truncated_normal([1, 784], seed=0)
x2 = random_ops.truncated_normal([1, 784], seed=0)
x3 = random_ops.truncated_normal([1, 784], seed=0)
x4 = random_ops.truncated_normal([1, 784], seed=0)
elems = (x1, x2, x3, x4)
outputs = functional_ops.map_fn(
_model_with_vec_and_4d, elems, dtype=dtypes.float32)
return outputs
def _get_config(layout_optimizer=True):
if layout_optimizer:
rewrite_options = rewriter_config_pb2.RewriterConfig(
layout_optimizer=rewriter_config_pb2.RewriterConfig.ON,
# do not remove duplicated nodes
arithmetic_optimization=rewriter_config_pb2.RewriterConfig.OFF)
else:
rewrite_options = rewriter_config_pb2.RewriterConfig(
layout_optimizer=rewriter_config_pb2.RewriterConfig.OFF,
# do not remove duplicated nodes
arithmetic_optimization=rewriter_config_pb2.RewriterConfig.OFF)
rewrite_options.min_graph_nodes = -1
graph_options = config_pb2.GraphOptions(
rewrite_options=rewrite_options, build_cost_model=1)
config = config_pb2.ConfigProto(graph_options=graph_options)
config.graph_options.optimizer_options.opt_level = -1
return config
def _simple_metagraph(depthwise=False):
random_seed.set_random_seed(0)
x = variables.Variable(random_ops.truncated_normal([1, 200, 200, 3], seed=0))
conv = conv_layers.separable_conv2d if depthwise else conv_layers.conv2d
y = conv(x, 32, [3, 3])
z = conv(y, 32, [3, 3])
optimizer = gradient_descent.GradientDescentOptimizer(1e-4)
loss = math_ops.reduce_mean(z)
train_op = optimizer.minimize(loss)
graph = ops.get_default_graph()
graph.add_to_collection('train_op', train_op)
meta_graph = saver_lib.export_meta_graph(graph_def=graph.as_graph_def())
return meta_graph
def _get_cluster():
named_device = device_properties_pb2.NamedDevice()
named_device.name = '/GPU:0'
named_device.properties.type = 'GPU'
named_device.properties.num_cores = 24
named_device.properties.frequency = 1000
named_device.properties.environment['architecture'] = '4'
cluster = gcluster.Cluster(devices=[named_device])
return cluster
def _is_transpose(node):
return node.endswith('TransposeNHWCToNCHW-LayoutOptimizer') or node.endswith(
'TransposeNCHWToNHWC-LayoutOptimizer')
def _is_permute(node):
return node.endswith('VecPermuteNHWCToNCHW-LayoutOptimizer') or node.endswith(
'VecPermuteNCHWToNHWC-LayoutOptimizer')
class LayoutOptimizerTest(test.TestCase):
"""Tests the Grappler layout optimizer."""
def _assert_trans_nchw_to_nhwc(self, name, nodes):
self.assertIn(name + '-TransposeNCHWToNHWC-LayoutOptimizer', nodes)
def _assert_trans_nhwc_to_nchw(self, name, nodes):
self.assertIn(name + '-TransposeNHWCToNCHW-LayoutOptimizer', nodes)
def _assert_map_nhwc_to_nchw(self, name, nodes):
self.assertIn(name + '-DimMapNHWCToNCHW-LayoutOptimizer', nodes)
def _assert_vec_nchw_to_nhwc(self, name, nodes):
self.assertIn(name + '-VecPermuteNCHWToNHWC-LayoutOptimizer', nodes)
def _assert_vec_nhwc_to_nchw(self, name, nodes):
self.assertIn(name + '-VecPermuteNHWCToNCHW-LayoutOptimizer', nodes)
def _train(self, checkpoint_path, layout_optimizer=False, restore=False):
ops.reset_default_graph()
graph = ops.get_default_graph()
with session.Session(
config=_get_config(layout_optimizer), graph=graph) as sess:
batch = 2
height = 6
width = 7
input_channels = 3
shape = [batch, height, width, input_channels]
image = array_ops.placeholder(dtype='float32', shape=shape)
conv1 = conv_layers.conv2d(image, 32, [3, 3])
conv2 = conv_layers.conv2d(conv1, 32, [3, 3])
optimizer = gradient_descent.GradientDescentOptimizer(0.01)
loss = math_ops.reduce_mean(conv2)
train_op = optimizer.minimize(loss)
saver = saver_lib.Saver(write_version=saver_pb2.SaverDef.V2)
if restore:
saver.restore(sess, checkpoint_path)
else:
self.evaluate(variables.global_variables_initializer())
np.random.seed(0)
for _ in range(2):
image_val = np.random.rand(*shape).astype(np.float32)
sess.run([loss, train_op], feed_dict={image: image_val})
if restore:
all_vars = ops.get_collection(ops.GraphKeys.GLOBAL_VARIABLES)
all_vars_values = [var.eval(session=sess) for var in all_vars]
return all_vars_values
else:
saver.save(sess, checkpoint_path)
def testTwoConvLayers(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
output = _two_layer_model(x)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('Relu_1-0-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
def testSplitWithNonConstAxis(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
dim = array_ops.placeholder(dtype='int32')
split = array_ops.split(conv, 2, axis=dim)
scale = constant_op.constant(0.1, shape=[32])
offset = constant_op.constant(0.3, shape=[32])
bn0 = nn.fused_batch_norm(split[0], scale, offset)
bn1 = nn.fused_batch_norm(split[1], scale, offset)
add = bn0[0] + bn1[0]
output = array_ops.identity(add)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = sess.run(output, feed_dict={dim: 3})
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata, feed_dict={dim: 3})
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('add_2-0-0', nodes)
self._assert_map_nhwc_to_nchw('split-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
def testSplitVWithNonConstAxis(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
dim = array_ops.placeholder(dtype='int32')
sizes = constant_op.constant([50, 10, 4], shape=[3])
split = gen_array_ops.split_v(
value=conv, size_splits=sizes, axis=dim, num_split=3)
output = math_ops.reduce_sum(split[0])
with session.Session(config=_get_config(False)) as sess:
output_val_ref = sess.run(output, feed_dict={dim: 3})
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata, feed_dict={dim: 3})
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('SplitV-0-0', nodes)
self._assert_map_nhwc_to_nchw('SplitV-2', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
def testPadWithConstPaddings(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
paddings_val = [[1, 2], [3, 4], [5, 6], [7, 8]]
paddings = constant_op.constant(
paddings_val, dtype='int32', name='PaddingsConst')
pad = array_ops.pad(conv, paddings)
output = array_ops.identity(pad)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('Pad-0-0', nodes)
self.assertIn('Pad-1-LayoutOptimizer', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
def testReduceSum(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
reduce_sum = math_ops.reduce_sum(conv)
output = array_ops.identity(reduce_sum)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Three transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 1
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
def testCast(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
cast = math_ops.cast(conv, dtype='bool')
output = array_ops.identity(cast)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('Cast-0-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
def testSqueeze(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
reduce_sum = math_ops.reduce_sum(conv, axis=[1, 2])
squeeze = array_ops.squeeze(reduce_sum)
output = array_ops.identity(squeeze)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Three transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 1
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
def testSqueezeAlongHW(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
reduce_sum = math_ops.reduce_sum(conv, axis=[1, 2], keepdims=True)
squeeze = array_ops.squeeze(reduce_sum, axis=[1, 2])
output = array_ops.identity(squeeze)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Three transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 1
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
def testSqueezeAlongNHW(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
reduce_sum = math_ops.reduce_sum(conv, axis=[0, 1, 2], keepdims=True)
squeeze = array_ops.squeeze(reduce_sum, axis=[0, 1, 2])
output = array_ops.identity(squeeze)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Three transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 1
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
def testReduceSumAlongHWC(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
reduce_sum = math_ops.reduce_sum(conv, axis=[1, 2, 3])
output = array_ops.identity(reduce_sum)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Three transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 1
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
def testReduceSumAlongNHW(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
reduce_sum = math_ops.reduce_sum(conv, axis=[0, 1, 2])
output = array_ops.identity(reduce_sum)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Three transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 1
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
def testReduceSumAlongC(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
reduce_sum = math_ops.reduce_sum(conv, axis=[3])
output = array_ops.identity(reduce_sum)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Three transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 1
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
def testReduceSumAlongCKeepDims(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
reduce_sum = math_ops.reduce_sum(conv, axis=[3], keepdims=True)
output = array_ops.identity(reduce_sum)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('Sum-0-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
def testReduceSumAlongHKeepDims(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
reduce_sum = math_ops.reduce_sum(conv, axis=[2], keepdims=True)
output = array_ops.identity(reduce_sum)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
def testReduceSumAlongWCKeepDims(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
reduce_sum = math_ops.reduce_sum(conv, axis=[2, 3], keepdims=True)
output = array_ops.identity(reduce_sum)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
def testConcatWithControlDependency(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
axis = constant_op.constant(3)
var = variables.Variable(3)
assign = state_ops.assign(var, 6)
with ops.control_dependencies([assign]):
concat = array_ops.concat([conv, conv], axis)
output = array_ops.identity(concat)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('concat-0-0', nodes)
self.assertIn('concat-2-LayoutOptimizer', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
def testFill(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = array_ops.placeholder(dtype='float32')
conv = _two_layer_model(x)
shape = array_ops.shape(conv)
scalar = array_ops.constant(5.7)
fill = array_ops.fill(shape, scalar)
output = array_ops.identity(fill)
x_val = [3.4] * 784
with session.Session(config=_get_config(False)) as sess:
output_val_ref = sess.run(output, feed_dict={x: x_val})
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(
output, run_metadata=metadata, feed_dict={
x: x_val
})
nodes = []
num_transposes = 0
num_vec_permute = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
if _is_permute(node.name):
num_vec_permute += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
# Two vector permute nodes were initially added in the Expand phase of
# LayoutOptimizer; they cancelled out each other in the Collapse phase.
expected_vec_permute = 0
self.assertEqual(expected_vec_permute, num_vec_permute)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('Fill-0-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
def testTile(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
multiple = array_ops.placeholder(dtype='int32')
tile = array_ops.tile(conv, multiple)
output = array_ops.identity(tile)
multiple_val = [2, 3, 4, 1]
with session.Session(config=_get_config(False)) as sess:
output_val_ref = sess.run(output, feed_dict={multiple: multiple_val})
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(
output, run_metadata=metadata, feed_dict={
multiple: multiple_val
})
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('Tile-0-0', nodes)
self._assert_vec_nhwc_to_nchw('Tile-1', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
def testReverseWithConstDims(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
dims = constant_op.constant([3, 1], name='DimsConst')
reverse = array_ops.reverse(conv, dims)
output = array_ops.identity(reverse)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('ReverseV2-0-0', nodes)
self.assertIn('ReverseV2-1-LayoutOptimizer', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
def testReverseWithNonConstDims(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
dims = array_ops.placeholder(dtype='int32')
reverse = array_ops.reverse(conv, dims)
output = array_ops.identity(reverse)
dims_val = [2, 3]
with session.Session(config=_get_config(False)) as sess:
output_val_ref = sess.run(output, feed_dict={dims: dims_val})
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(
output, run_metadata=metadata, feed_dict={
dims: dims_val
})
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('ReverseV2-0-0', nodes)
self._assert_map_nhwc_to_nchw('ReverseV2-1', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
def testSelectOp(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
add = math_ops.add(conv, conv)
mean = math_ops.reduce_mean(conv)
condition = math_ops.less(conv, mean)
select = gen_math_ops.select(condition, conv, add)
output = array_ops.identity(select)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('Select-0-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
def testSelectOpConditionUnknownShape(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
add = math_ops.add(conv, conv)
condition = array_ops.placeholder(dtype='bool')
select = gen_math_ops.select(condition, conv, add)
output = array_ops.identity(select)
condition_val = np.zeros((1, 7, 7, 64))
with session.Session(config=_get_config(False)) as sess:
output_val_ref = sess.run(output, feed_dict={condition: condition_val})
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(
output, run_metadata=metadata, feed_dict={condition: condition_val})
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
expected_num_transposes = 3
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
def testSelectOpScalarCondition(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
add = math_ops.add(conv, conv)
condition = constant_op.constant(True)
select = gen_math_ops.select(condition, conv, add)
output = array_ops.identity(select)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('Select-0-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
def testPadWithNonConstPaddings(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
paddings = array_ops.placeholder(dtype='int32')
pad = array_ops.pad(conv, paddings)
output = array_ops.identity(pad)
paddings_val = [[1, 2], [3, 4], [5, 6], [7, 8]]
with session.Session(config=_get_config(False)) as sess:
output_val_ref = sess.run(output, feed_dict={paddings: paddings_val})
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(
output, run_metadata=metadata, feed_dict={
paddings: paddings_val
})
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('Pad-0-0', nodes)
self._assert_vec_nhwc_to_nchw('Pad-1', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
def testMaxPoolV2(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
ksize = constant_op.constant([1, 2, 3, 1], shape=[4])
strides = array_ops.placeholder(dtype='int32', shape=[4])
max_pool = gen_nn_ops.max_pool_v2(conv, ksize, strides, 'VALID')
output = array_ops.identity(max_pool)
strides_val = [1, 3, 2, 1]
with session.Session(config=_get_config(False)) as sess:
output_val_ref = sess.run(output, feed_dict={strides: strides_val})
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(
output, run_metadata=metadata, feed_dict={
strides: strides_val
})
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('MaxPoolV2-0-0', nodes)
self._assert_vec_nhwc_to_nchw('MaxPoolV2-2', nodes)
self.assertIn('MaxPoolV2-1-LayoutOptimizer', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
def testMaxPoolGradV2(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
ksize = constant_op.constant([1, 2, 3, 1], shape=[4])
strides = array_ops.placeholder(dtype='int32', shape=[4])
max_pool_grad = gen_nn_ops.max_pool_grad_v2(conv, conv, conv, ksize,
strides, 'VALID')
output = array_ops.identity(max_pool_grad)
strides_val = [1, 3, 2, 1]
with session.Session(config=_get_config(False)) as sess:
output_val_ref = sess.run(output, feed_dict={strides: strides_val})
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(
output, run_metadata=metadata, feed_dict={
strides: strides_val
})
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('MaxPoolGradV2-0-0', nodes)
self._assert_vec_nhwc_to_nchw('MaxPoolGradV2-4', nodes)
self.assertIn('MaxPoolGradV2-3-LayoutOptimizer', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
def testSliceWithNonConstAxis(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
size = array_ops.placeholder(dtype='int32')
s = array_ops.slice(conv, [0, 0, 0, 0], size)
output = array_ops.identity(s)
size_val = [1, 2, 3, 4]
with session.Session(config=_get_config(False)) as sess:
output_val_ref = sess.run(output, feed_dict={size: size_val})
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(
output, run_metadata=metadata, feed_dict={
size: size_val
})
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('Slice-0-0', nodes)
self._assert_vec_nhwc_to_nchw('Slice-2', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
def testStridedSliceWithNonConstAxis(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
end = array_ops.placeholder(dtype='int32')
s = array_ops.strided_slice(conv, [0, 0, 0, 0], end, strides=[1, 2, 3, 1])
output = array_ops.identity(s)
end_val = [1, 2, 3, 4]
with session.Session(config=_get_config(False)) as sess:
output_val_ref = sess.run(output, feed_dict={end: end_val})
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(
output, run_metadata=metadata, feed_dict={
end: end_val
})
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('StridedSlice-0-0', nodes)
self._assert_vec_nhwc_to_nchw('StridedSlice-2', nodes)
self.assertIn('StridedSlice-1-LayoutOptimizer', nodes)
self.assertIn('StridedSlice-3-LayoutOptimizer', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
def testStridedSliceWithMask1011(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
# This will generate a StridedSlice op with begin mask and
# end mask 11(1011).
s = conv[:, :, 1:-1, :]
output = array_ops.identity(s)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('strided_slice-0-0', nodes)
self.assertIn('strided_slice-1-LayoutOptimizer', nodes)
self.assertIn('strided_slice-2-LayoutOptimizer', nodes)
self.assertIn('strided_slice-3-LayoutOptimizer', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
def testStridedSliceWithMask0111(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
# This will generate a StridedSlice op with begin mask and
# end mask 7(0111).
s = conv[:, :, :, 1:-1]
output = array_ops.identity(s)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('strided_slice-0-0', nodes)
self.assertIn('strided_slice-1-LayoutOptimizer', nodes)
self.assertIn('strided_slice-2-LayoutOptimizer', nodes)
self.assertIn('strided_slice-3-LayoutOptimizer', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
def testStridedSliceGradWithNonConstAxis(self):
if test.is_gpu_available(cuda_only=True):
random_seed.set_random_seed(0)
x = random_ops.truncated_normal([1, 784], seed=0)
conv = _two_layer_model(x)
end = array_ops.placeholder(dtype='int32')
shape = array_ops.shape(conv)
end_val = [1, 2, 3, 4]
s = array_ops.strided_slice(
conv, [0, 0, 0, 0], end_val, strides=[1, 2, 3, 1])
s_grad = array_ops.strided_slice_grad(shape, [0, 0, 0, 0], end,
[1, 2, 3, 1], s)
output = array_ops.identity(s_grad)
with session.Session(config=_get_config(False)) as sess:
output_val_ref = sess.run(output, feed_dict={end: end_val})
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(
output, run_metadata=metadata, feed_dict={
end: end_val
})
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('StridedSliceGrad-0-0', nodes)
self._assert_vec_nhwc_to_nchw('StridedSliceGrad-2', nodes)
self.assertIn('StridedSlice-1-LayoutOptimizer', nodes)
self.assertIn('StridedSlice-2-LayoutOptimizer', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
def testShapeN(self):
if test.is_gpu_available(cuda_only=True):
x = array_ops.placeholder(dtype='float32')
conv = _two_layer_model(x)
shapen = array_ops.shape_n([conv, conv])
output = math_ops.add(shapen[0], shapen[1])
x_val = [1.7] * 784
with session.Session(config=_get_config(False)) as sess:
output_val_ref = sess.run(output, feed_dict={x: x_val})
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(
output, run_metadata=metadata, feed_dict={
x: x_val
})
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
expected_num_transposes = 1
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self._assert_vec_nchw_to_nhwc('ShapeN-0-0', nodes)
self.assertAllEqual(output_val_ref, output_val)
def testShapeNFollowedByNotConvertibleNodeReshape(self):
if test.is_gpu_available(cuda_only=True):
x = array_ops.placeholder(dtype='float32')
conv = _two_layer_model(x)
conv_reshape = array_ops.reshape(conv, [1, 1, 1, -1])
shapen = array_ops.shape_n([conv, conv_reshape])
shape = array_ops.identity(shapen[1])
ones = array_ops.ones(shape)
output = math_ops.add_n([conv_reshape, ones])
x_val = [1.7] * 784
with session.Session(config=_get_config(False)) as sess:
output_val_ref = sess.run(output, feed_dict={x: x_val})
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(
output, run_metadata=metadata, feed_dict={x: x_val})
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
def testLoop(self):
if test.is_gpu_available(cuda_only=True):
output = _loop()
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
# Four transposes were initially added in the Expand phase of
# LayoutOptimizer; two of them are cancelled out in the Collapse phase.
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('map/while/Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('map/while/MaxPool_1-0-2', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
def testLoopWithBranch(self):
if test.is_gpu_available(cuda_only=True):
output = _loop_with_branch()
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
expected_num_transposes = 3
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('map/while/Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('map/while/Add_1-0-2', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
def testLoopWithVecAnd4D(self):
if test.is_gpu_available(cuda_only=True):
output = _loop_with_vec_and_4d()
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('map/while/Conv2D-0', nodes)
self._assert_trans_nchw_to_nhwc('map/while/Add_1-0-2', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
def testBinaryOpSecondPort(self):
if test.is_gpu_available(cuda_only=True):
output = _model_with_second_port()
with session.Session(config=_get_config(False)) as sess:
output_val_ref = self.evaluate(output)
with session.Session(config=_get_config()) as sess:
metadata = config_pb2.RunMetadata()
output_val = sess.run(output, run_metadata=metadata)
nodes = []
num_transposes = 0
for node in metadata.cost_graph.node:
if _is_transpose(node.name):
num_transposes += 1
nodes.append(node.name)
expected_num_transposes = 2
self.assertEqual(expected_num_transposes, num_transposes)
self._assert_trans_nhwc_to_nchw('FusedBatchNorm-0', nodes)
self._assert_trans_nchw_to_nhwc('Add-0-0', nodes)
self.assertAllClose(output_val_ref, output_val, atol=1e-3)
@test_util.run_deprecated_v1
def testGradient(self):
meta_graph = _simple_metagraph()
config = config_pb2.ConfigProto()
config.graph_options.rewrite_options.CopyFrom(
rewriter_config_pb2.RewriterConfig(
layout_optimizer=rewriter_config_pb2.RewriterConfig.ON,
min_graph_nodes=-1))
optimized_graph = tf_optimizer.OptimizeGraph(
config, meta_graph, cluster=_get_cluster())
found = 0
for node in optimized_graph.node:
if node.op in ['Conv2D', 'Conv2DBackpropFilter', 'Conv2DBackpropInput']:
found += 1
self.assertEqual(node.attr['data_format'].s, b'NCHW')
self.assertEqual(found, 5)
@test_util.run_deprecated_v1
def testDepthwise(self):
meta_graph = _simple_metagraph(depthwise=True)
config = config_pb2.ConfigProto()
config.graph_options.rewrite_options.CopyFrom(
rewriter_config_pb2.RewriterConfig(
layout_optimizer=rewriter_config_pb2.RewriterConfig.ON,
min_graph_nodes=-1))
optimized_graph = tf_optimizer.OptimizeGraph(
config, meta_graph, cluster=_get_cluster())
found = 0
for node in optimized_graph.node:
if node.op in [
'DepthwiseConv2dNative', 'DepthwiseConv2dNativeBackpropFilter',
'DepthwiseConv2dNativeBackpropInput'
]:
found += 1
self.assertEqual(node.attr['data_format'].s, b'NCHW')
self.assertEqual(found, 6)
def testCheckpointCompatibility(self):
if not test.is_gpu_available(cuda_only=True):
self.skipTest('GPU required')
checkpoint_path = self.get_temp_dir()
self._train(checkpoint_path)
vars_expected = self._train(checkpoint_path, restore=True)
vars_layout_optimized = self._train(
checkpoint_path, restore=True, layout_optimizer=True)
for var_expected, var_layout_optimized in zip(vars_expected,
vars_layout_optimized):
self.assertAllClose(var_expected, var_layout_optimized, atol=1e-6)
if __name__ == '__main__':
test.main()
| hfp/tensorflow-xsmm | tensorflow/python/grappler/layout_optimizer_test.py | Python | apache-2.0 | 58,406 |
import os
from unittest import TestCase
from mock import patch
from os.path import exists
import shutil
from carbon.tests.util import TestSettings
from carbon.database import WhisperDatabase, CeresDatabase
class WhisperDatabaseTest(TestCase):
def setUp(self):
self._sep_patch = patch.object(os.path, 'sep', "/")
self._sep_patch.start()
def tearDown(self):
self._sep_patch.stop()
def test_getFilesystemPath(self):
settings = TestSettings()
settings['LOCAL_DATA_DIR'] = '/tmp/'
database = WhisperDatabase(settings)
result = database.getFilesystemPath('stats.example.counts')
self.assertEqual(result, '/tmp/stats/example/counts.wsp') # nosec
def test_getTaggedFilesystemPath(self):
metric = 'stats.example.counts;tag1=value1'
settings = TestSettings()
settings['LOCAL_DATA_DIR'] = '/tmp/'
settings['TAG_HASH_FILENAMES'] = False
database = WhisperDatabase(settings)
result = database.getFilesystemPath(metric)
self.assertEqual(
result, '/tmp/_tagged/872/252/stats_DOT_example_DOT_counts;tag1=value1.wsp') # nosec
result = database.exists(metric)
self.assertEqual(result, False)
def test_getTaggedFilesystemPathHashed(self):
metric = 'stats.example.counts;tag1=value1'
settings = TestSettings()
settings['LOCAL_DATA_DIR'] = '/tmp/'
settings['TAG_HASH_FILENAMES'] = True
database = WhisperDatabase(settings)
result = database.getFilesystemPath(metric)
self.assertEqual(
result,
'/tmp/_tagged/872/252/' + # nosec
'872252dcead671982862f82a3b440f02aa8f525dd6d0f2921de0dc2b3e874ad0.wsp')
result = database.exists(metric)
self.assertEqual(result, False)
def test_migrateTaggedFilesystemPathHashed(self):
metric = 'stats.example.counts;tag1=value1'
settings = TestSettings()
settings['LOCAL_DATA_DIR'] = '/tmp/'
settings['TAG_HASH_FILENAMES'] = False
database = WhisperDatabase(settings)
result = database.exists(metric)
self.assertEqual(result, False)
old_path = database.getFilesystemPath(metric)
self.assertEqual(
old_path, '/tmp/_tagged/872/252/stats_DOT_example_DOT_counts;tag1=value1.wsp') # nosec
self.assertEqual(exists(old_path), False)
result = database.create(metric, [(60, 60)], 0.5, 'average')
self.assertEqual(exists(old_path), True)
result = database.exists(metric)
self.assertEqual(result, True)
settings['TAG_HASH_FILENAMES'] = True
database = WhisperDatabase(settings)
hashed_path = database.getFilesystemPath(metric)
self.assertEqual(
hashed_path,
'/tmp/_tagged/872/252/' + # nosec
'872252dcead671982862f82a3b440f02aa8f525dd6d0f2921de0dc2b3e874ad0.wsp')
self.assertEqual(exists(hashed_path), False)
result = database.exists(metric)
self.assertEqual(result, True)
self.assertEqual(exists(old_path), False)
self.assertEqual(exists(hashed_path), True)
os.remove(hashed_path)
class CeresDatabaseTest(TestCase):
def setUp(self):
self._sep_patch = patch.object(os.path, 'sep', "/")
self._sep_patch.start()
def tearDown(self):
self._sep_patch.stop()
def test_getFilesystemPath(self):
settings = TestSettings()
settings['LOCAL_DATA_DIR'] = '/tmp/'
database = CeresDatabase(settings)
result = database.getFilesystemPath('stats.example.counts')
self.assertEqual(result, '/tmp/stats/example/counts') # nosec
def test_getTaggedFilesystemPath(self):
metric = 'stats.example.counts;tag1=value1'
settings = TestSettings()
settings['LOCAL_DATA_DIR'] = '/tmp/'
settings['TAG_HASH_FILENAMES'] = False
database = CeresDatabase(settings)
result = database.getFilesystemPath(metric)
self.assertEqual(
result, '/tmp/_tagged/872/252/stats_DOT_example_DOT_counts;tag1=value1') # nosec
result = database.exists(metric)
self.assertEqual(result, False)
def test_getTaggedFilesystemPathHashed(self):
metric = 'stats.example.counts;tag1=value1'
settings = TestSettings()
settings['LOCAL_DATA_DIR'] = '/tmp/'
settings['TAG_HASH_FILENAMES'] = True
database = CeresDatabase(settings)
result = database.getFilesystemPath(metric)
self.assertEqual(
result,
'/tmp/_tagged/872/252/' + # nosec
'872252dcead671982862f82a3b440f02aa8f525dd6d0f2921de0dc2b3e874ad0')
result = database.exists(metric)
self.assertEqual(result, False)
def test_migrateTaggedFilesystemPathHashed(self):
metric = 'stats.example.counts;tag1=value1'
settings = TestSettings()
settings['LOCAL_DATA_DIR'] = '/tmp/'
settings['TAG_HASH_FILENAMES'] = False
database = CeresDatabase(settings)
result = database.exists(metric)
self.assertEqual(result, False)
old_path = database.getFilesystemPath(metric)
self.assertEqual(
old_path, '/tmp/_tagged/872/252/stats_DOT_example_DOT_counts;tag1=value1') # nosec
self.assertEqual(exists(old_path), False)
result = database.create(metric, [(60, 60)], 0.5, 'average')
self.assertEqual(exists(old_path), True)
result = database.exists(metric)
self.assertEqual(result, True)
settings['TAG_HASH_FILENAMES'] = True
database = CeresDatabase(settings)
hashed_path = database.getFilesystemPath(metric)
self.assertEqual(
hashed_path,
'/tmp/_tagged/872/252/' + # nosec
'872252dcead671982862f82a3b440f02aa8f525dd6d0f2921de0dc2b3e874ad0')
self.assertEqual(exists(hashed_path), False)
result = database.exists(metric)
self.assertEqual(result, True)
self.assertEqual(exists(old_path), False)
self.assertEqual(exists(hashed_path), True)
shutil.rmtree(hashed_path)
| deniszh/carbon | lib/carbon/tests/test_database.py | Python | apache-2.0 | 6,229 |
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from oslo.serialization import jsonutils as json
from glance.common import client as base_client
from glance.common import exception
from glance import i18n
_ = i18n._
class CacheClient(base_client.BaseClient):
DEFAULT_PORT = 9292
DEFAULT_DOC_ROOT = '/v1'
def delete_cached_image(self, image_id):
"""
Delete a specified image from the cache
"""
self.do_request("DELETE", "/cached_images/%s" % image_id)
return True
def get_cached_images(self, **kwargs):
"""
Returns a list of images stored in the image cache.
"""
res = self.do_request("GET", "/cached_images")
data = json.loads(res.read())['cached_images']
return data
def get_queued_images(self, **kwargs):
"""
Returns a list of images queued for caching
"""
res = self.do_request("GET", "/queued_images")
data = json.loads(res.read())['queued_images']
return data
def delete_all_cached_images(self):
"""
Delete all cached images
"""
res = self.do_request("DELETE", "/cached_images")
data = json.loads(res.read())
num_deleted = data['num_deleted']
return num_deleted
def queue_image_for_caching(self, image_id):
"""
Queue an image for prefetching into cache
"""
self.do_request("PUT", "/queued_images/%s" % image_id)
return True
def delete_queued_image(self, image_id):
"""
Delete a specified image from the cache queue
"""
self.do_request("DELETE", "/queued_images/%s" % image_id)
return True
def delete_all_queued_images(self):
"""
Delete all queued images
"""
res = self.do_request("DELETE", "/queued_images")
data = json.loads(res.read())
num_deleted = data['num_deleted']
return num_deleted
def get_client(host, port=None, timeout=None, use_ssl=False, username=None,
password=None, tenant=None,
auth_url=None, auth_strategy=None,
auth_token=None, region=None,
is_silent_upload=False, insecure=False):
"""
Returns a new client Glance client object based on common kwargs.
If an option isn't specified falls back to common environment variable
defaults.
"""
if auth_url or os.getenv('OS_AUTH_URL'):
force_strategy = 'keystone'
else:
force_strategy = None
creds = {
'username': username or
os.getenv('OS_AUTH_USER', os.getenv('OS_USERNAME')),
'password': password or
os.getenv('OS_AUTH_KEY', os.getenv('OS_PASSWORD')),
'tenant': tenant or
os.getenv('OS_AUTH_TENANT', os.getenv('OS_TENANT_NAME')),
'auth_url': auth_url or
os.getenv('OS_AUTH_URL'),
'strategy': force_strategy or
auth_strategy or
os.getenv('OS_AUTH_STRATEGY', 'noauth'),
'region': region or
os.getenv('OS_REGION_NAME'),
}
if creds['strategy'] == 'keystone' and not creds['auth_url']:
msg = _("--os_auth_url option or OS_AUTH_URL environment variable "
"required when keystone authentication strategy is enabled\n")
raise exception.ClientConfigurationError(msg)
return CacheClient(
host=host,
port=port,
timeout=timeout,
use_ssl=use_ssl,
auth_token=auth_token or
os.getenv('OS_TOKEN'),
creds=creds,
insecure=insecure)
| yanheven/glance | glance/image_cache/client.py | Python | apache-2.0 | 4,184 |
# flake8: noqa
###############################################################################
# Compat file to import the correct modules for each platform and python
# version.
#
# author: Thomas Moreau and Olivier grisel
#
import sys
if sys.version_info[:2] >= (3, 3):
import queue
else:
import Queue as queue
from pickle import PicklingError
if sys.version_info >= (3, 4):
from multiprocessing.process import BaseProcess
else:
from multiprocessing.process import Process as BaseProcess
# Platform specific compat
if sys.platform == "win32":
from .compat_win32 import *
else:
from .compat_posix import *
| vortex-ape/scikit-learn | sklearn/externals/joblib/externals/loky/backend/compat.py | Python | bsd-3-clause | 635 |
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
import webapp2
import webtest
from google.appengine.ext import ndb
from dashboard import group_report
from dashboard import test_owner
from dashboard import testing_common
from dashboard import utils
from dashboard.models import anomaly
from dashboard.models import bug_data
from dashboard.models import sheriff
from dashboard.models import stoppage_alert
class GroupReportTest(testing_common.TestCase):
def setUp(self):
super(GroupReportTest, self).setUp()
app = webapp2.WSGIApplication(
[('/group_report', group_report.GroupReportHandler)])
self.testapp = webtest.TestApp(app)
def _AddAnomalyEntities(
self, revision_ranges, test_key, sheriff_key, bug_id=None):
"""Adds a group of Anomaly entities to the datastore."""
urlsafe_keys = []
for start_rev, end_rev in revision_ranges:
anomaly_key = anomaly.Anomaly(
start_revision=start_rev, end_revision=end_rev,
test=test_key, bug_id=bug_id, sheriff=sheriff_key,
median_before_anomaly=100, median_after_anomaly=200).put()
urlsafe_keys.append(anomaly_key.urlsafe())
return urlsafe_keys
def _AddTests(self):
"""Adds sample Test entities and returns their keys."""
testing_common.AddTests(['ChromiumGPU'], ['linux-release'], {
'scrolling-benchmark': {
'first_paint': {},
'mean_frame_time': {},
}
})
keys = [
utils.TestKey(
'ChromiumGPU/linux-release/scrolling-benchmark/first_paint'),
utils.TestKey(
'ChromiumGPU/linux-release/scrolling-benchmark/mean_frame_time'),
]
# By default, all Test entities have an improvement_direction of UNKNOWN,
# meaning that neither direction is considered an improvement.
# Here we set the improvement direction so that some anomalies are
# considered improvements.
for test_key in keys:
test = test_key.get()
test.improvement_direction = anomaly.DOWN
test.put()
return keys
def _AddSheriff(self):
"""Adds a Sheriff entity and returns the key."""
return sheriff.Sheriff(
id='Chromium Perf Sheriff', email='[email protected]').put()
def testGet_WithAnomalyKeys_ShowsSelectedAndOverlapping(self):
sheriff_key = self._AddSheriff()
test_keys = self._AddTests()
selected_ranges = [(400, 900), (200, 700)]
overlapping_ranges = [(300, 500), (500, 600), (600, 800)]
non_overlapping_ranges = [(100, 200)]
selected_keys = self._AddAnomalyEntities(
selected_ranges, test_keys[0], sheriff_key)
self._AddAnomalyEntities(
overlapping_ranges, test_keys[0], sheriff_key)
self._AddAnomalyEntities(
non_overlapping_ranges, test_keys[0], sheriff_key)
response = self.testapp.get(
'/group_report?keys=%s' % ','.join(selected_keys))
alert_list = self.GetEmbeddedVariable(response, 'ALERT_LIST')
# Expect selected alerts + overlapping alerts,
# but not the non-overlapping alert.
self.assertEqual(5, len(alert_list))
def testGet_WithKeyOfNonExistentAlert_ShowsError(self):
key = ndb.Key('Anomaly', 123)
response = self.testapp.get('/group_report?keys=%s' % key.urlsafe())
self.assertIn('error', response.body)
self.assertIn('No Anomaly found for key', response.body)
def testGet_WithInvalidKeyParameter_ShowsError(self):
response = self.testapp.get('/group_report?keys=foobar')
self.assertIn('error', response.body)
self.assertIn('Invalid Anomaly key', response.body)
def testGet_WithRevParameter(self):
# If the rev parameter is given, then all alerts whose revision range
# includes the given revision should be included.
sheriff_key = self._AddSheriff()
test_keys = self._AddTests()
self._AddAnomalyEntities(
[(190, 210), (200, 300), (100, 200), (400, 500)],
test_keys[0], sheriff_key)
response = self.testapp.get('/group_report?rev=200')
alert_list = self.GetEmbeddedVariable(response, 'ALERT_LIST')
self.assertEqual(3, len(alert_list))
def testGet_WithInvalidRevParameter_ShowsError(self):
response = self.testapp.get('/group_report?rev=foo')
self.assertIn('error', response.body)
self.assertIn('Invalid rev', response.body)
def testGet_WithBugIdParameter(self):
sheriff_key = self._AddSheriff()
test_keys = self._AddTests()
bug_data.Bug(id=123).put()
self._AddAnomalyEntities(
[(200, 300), (100, 200), (400, 500)],
test_keys[0], sheriff_key, bug_id=123)
self._AddAnomalyEntities(
[(150, 250)], test_keys[0], sheriff_key)
response = self.testapp.get('/group_report?bug_id=123')
alert_list = self.GetEmbeddedVariable(response, 'ALERT_LIST')
self.assertEqual(3, len(alert_list))
def testGet_WithBugIdParameter_ListsStoppageAlerts(self):
test_keys = self._AddTests()
bug_data.Bug(id=123).put()
row = testing_common.AddRows(utils.TestPath(test_keys[0]), {100})[0]
alert = stoppage_alert.CreateStoppageAlert(test_keys[0].get(), row)
alert.bug_id = 123
alert.put()
response = self.testapp.get('/group_report?bug_id=123')
alert_list = self.GetEmbeddedVariable(response, 'ALERT_LIST')
self.assertEqual(1, len(alert_list))
def testGet_WithBugIdForBugThatHasOwner_ShowsOwnerInfo(self):
sheriff_key = self._AddSheriff()
test_keys = self._AddTests()
bug_data.Bug(id=123).put()
test_key = test_keys[0]
test_path_parts = utils.TestPath(test_key).split('/')
test_suite_path = '%s/%s' % (test_path_parts[0], test_path_parts[2])
test_owner.AddOwnerFromDict({test_suite_path: ['[email protected]']})
self._AddAnomalyEntities([(150, 250)], test_key, sheriff_key, bug_id=123)
response = self.testapp.get('/group_report?bug_id=123')
owner_info = self.GetEmbeddedVariable(response, 'OWNER_INFO')
self.assertEqual('[email protected]', owner_info[0]['email'])
def testGet_WithInvalidBugIdParameter_ShowsError(self):
response = self.testapp.get('/group_report?bug_id=foo')
self.assertNotIn('ALERT_LIST', response.body)
self.assertIn('Invalid bug ID', response.body)
if __name__ == '__main__':
unittest.main()
| modulexcite/catapult | dashboard/dashboard/group_report_test.py | Python | bsd-3-clause | 6,327 |
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from __future__ import unicode_literals
from ..model import Level1Design
def test_Level1Design_inputs():
input_map = dict(bases=dict(mandatory=True,
),
contrasts=dict(),
ignore_exception=dict(nohash=True,
usedefault=True,
),
interscan_interval=dict(mandatory=True,
),
model_serial_correlations=dict(mandatory=True,
),
orthogonalization=dict(),
session_info=dict(mandatory=True,
),
)
inputs = Level1Design.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(inputs.traits()[key], metakey) == value
def test_Level1Design_outputs():
output_map = dict(ev_files=dict(),
fsf_files=dict(),
)
outputs = Level1Design.output_spec()
for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(outputs.traits()[key], metakey) == value
| mick-d/nipype | nipype/interfaces/fsl/tests/test_auto_Level1Design.py | Python | bsd-3-clause | 1,023 |
"""
sentry.client.celery.tasks
~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from celery.decorators import task
from sentry.client.base import SentryClient
from sentry.client.celery import conf
@task(routing_key=conf.CELERY_ROUTING_KEY)
def send(data):
return SentryClient().send(**data)
| dcramer/sentry-old | sentry/client/celery/tasks.py | Python | bsd-3-clause | 396 |
Subsets and Splits