repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
EmanueleCannizzaro/scons | test/Interactive/option-j.py | 1 | 5172 | #!/usr/bin/env python
#
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "test/Interactive/option-j.py rel_2.5.1:3735:9dc6cee5c168 2016/11/03 14:02:02 bdbaddog"
"""
Verify that "build" command of --interactive mode can take a -j
option to build things in parallel.
"""
import TestSCons
test = TestSCons.TestSCons(combine=1)
test.write('SConstruct', """\
import os
import time
from SCons.Script import *
def cat(target, source, env):
t = str(target[0])
os.mkdir(t + '.started')
fp = open(t, 'wb')
for s in source:
fp.write(open(str(s), 'rb').read())
fp.close()
os.mkdir(t + '.finished')
def must_be_finished(target, source, env, dir):
if not os.path.exists(dir):
msg = 'build failed, %s does not exist\\n' % dir
sys.stderr.write(msg)
return 1
return cat(target, source, env)
def f1_a_out_must_be_finished(target, source, env):
return must_be_finished(target, source, env, 'f1-a.out.finished')
def f3_a_out_must_be_finished(target, source, env):
return must_be_finished(target, source, env, 'f3-a.out.finished')
def must_wait_for_f2_b_out(target, source, env):
t = str(target[0])
os.mkdir(t + '.started')
f2_b_started = 'f2-b.out.started'
while not os.path.exists(f2_b_started):
time.sleep(1)
fp = open(t, 'wb')
for s in source:
fp.write(open(str(s), 'rb').read())
fp.close()
os.mkdir(t + '.finished')
def _f2_a_out_must_not_be_finished(target, source, env):
f2_a_started = 'f2-a.out.started'
f2_a_finished = 'f2-a.out.finished'
while not os.path.exists(f2_a_started):
time.sleep(1)
msg = 'f2_a_out_must_not_be_finished(["%s"], ["%s"])\\n' % (target[0], source[0])
sys.stdout.write(msg)
if os.path.exists(f2_a_finished):
msg = 'build failed, %s exists\\n' % f2_a_finished
sys.stderr.write(msg)
return 1
return cat(target, source, env)
f2_a_out_must_not_be_finished = Action(_f2_a_out_must_not_be_finished,
strfunction = None)
Cat = Action(cat)
f1_a = Command('f1-a.out', 'f1-a.in', cat)
f1_b = Command('f1-b.out', 'f1-b.in', f1_a_out_must_be_finished)
f2_a = Command('f2-a.out', 'f2-a.in', must_wait_for_f2_b_out)
f2_b = Command('f2-b.out', 'f2-b.in', f2_a_out_must_not_be_finished)
f3_a = Command('f3-a.out', 'f3-a.in', cat)
f3_b = Command('f3-b.out', 'f3-b.in', f3_a_out_must_be_finished)
Command('f1.out', f1_a + f1_b, cat)
Command('f2.out', f2_a + f2_b, cat)
Command('f3.out', f3_a + f3_b, cat)
Command('1', [], Touch('$TARGET'))
Command('2', [], Touch('$TARGET'))
Command('3', [], Touch('$TARGET'))
""")
test.write('f1-a.in', "f1-a.in\n")
test.write('f1-b.in', "f1-b.in\n")
test.write('f2-a.in', "f2-a.in\n")
test.write('f2-b.in', "f2-b.in\n")
test.write('f3-a.in', "f3-a.in\n")
test.write('f3-b.in', "f3-b.in\n")
scons = test.start(arguments = '-Q --interactive')
scons.send("build f1.out\n")
scons.send("build 1\n")
test.wait_for(test.workpath('1'), popen=scons)
test.must_match(test.workpath('f1.out'), "f1-a.in\nf1-b.in\n")
scons.send("build -j2 f2.out\n")
scons.send("build 2\n")
test.wait_for(test.workpath('2'), popen=scons)
test.must_match(test.workpath('f2.out'), "f2-a.in\nf2-b.in\n")
scons.send("build f3.out\n")
scons.send("build 3\n")
test.wait_for(test.workpath('3'))
test.must_match(test.workpath('f3.out'), "f3-a.in\nf3-b.in\n")
expect_stdout = """\
scons>>> cat(["f1-a.out"], ["f1-a.in"])
f1_a_out_must_be_finished(["f1-b.out"], ["f1-b.in"])
cat(["f1.out"], ["f1-a.out", "f1-b.out"])
scons>>> Touch("1")
scons>>> must_wait_for_f2_b_out(["f2-a.out"], ["f2-a.in"])
f2_a_out_must_not_be_finished(["f2-b.out"], ["f2-b.in"])
cat(["f2.out"], ["f2-a.out", "f2-b.out"])
scons>>> Touch("2")
scons>>> cat(["f3-a.out"], ["f3-a.in"])
f3_a_out_must_be_finished(["f3-b.out"], ["f3-b.in"])
cat(["f3.out"], ["f3-a.out", "f3-b.out"])
scons>>> Touch("3")
scons>>>
"""
test.finish(scons, stdout = expect_stdout)
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| mit | -1,765,563,623,980,143,600 | 29.423529 | 102 | 0.643852 | false |
plumdog/pysql_browser | pysql_browser/tab_widgets.py | 1 | 1485 | from PySide import QtCore, QtGui
from .query_widgets import QueryWidget
from .results_widgets import ResultsWidget
from . import app_config
class Tabs(QtGui.QTabWidget):
def __init__(self, parent=None):
super().__init__(parent)
self.tabBar().tabCloseRequested.connect(self.close_tab)
def close_tab(self, index):
self.removeTab(index)
class Tab(QtGui.QWidget):
def __init__(self, parent=None):
super().__init__(parent)
self.query_widget = QueryWidget(self)
self.results_widget = ResultsWidget(self)
query_and_results_splitter = QtGui.QSplitter(self)
query_and_results_splitter.setOrientation(QtCore.Qt.Vertical)
query_and_results_splitter.addWidget(self.query_widget)
query_and_results_splitter.addWidget(self.results_widget)
#query_and_results_splitter.setStretchFactor(0, app_config.v_split_1)
#query_and_results_splitter.setStretchFactor(1, app_config.v_split_2)
query_and_results_splitter.setChildrenCollapsible(False)
query_and_results_splitter.setHandleWidth(app_config.v_split_handle)
layout = QtGui.QHBoxLayout(self)
layout.addWidget(query_and_results_splitter)
self.setLayout(layout)
def is_empty(self):
return ((self.results_widget.results_widget_table.rowCount() == 0)
and (self.results_widget.results_widget_table.columnCount() == 0)
and (self.query_widget.sql() == ''))
| gpl-3.0 | 3,306,132,935,044,650,500 | 36.125 | 81 | 0.678788 | false |
xmdevops/xm_zoomeye_upgradeserver | upgradeserver_lua/models.py | 1 | 17639 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# author: limanman
# emails: [email protected]
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
AREA_CHOICE = [
('亚洲区域', (
('Asia', 'Asia'),
('Asia_Afghanistan', 'Asia_Afghanistan'),
('Asia_Aomen', 'Asia_Aomen'),
('Asia_Azerbaijan', 'Asia_Azerbaijan'),
('Asia_Bahrein', 'Asia_Bahrein'),
('Asia_Bangladesh', 'Asia_Bangladesh'),
('Asia_Bhutan', 'Asia_Bhutan'),
('Asia_Brunei', 'Asia_Brunei'),
('Asia_China', 'Asia_China'),
('Asia_China_AnHui', 'Asia_China_AnHui'),
('Asia_China_BeiJing', 'Asia_China_BeiJing'),
('Asia_China_ChongQing', 'Asia_China_ChongQing'),
('Asia_China_FuJian', 'Asia_China_FuJian'),
('Asia_China_GanSu', 'Asia_China_GanSu'),
('Asia_China_GuangDong', 'Asia_China_GuangDong'),
('Asia_China_GuangXi', 'Asia_China_GuangXi'),
('Asia_China_GuiZhou', 'Asia_China_GuiZhou'),
('Asia_China_HaiNan', 'Asia_China_HaiNan'),
('Asia_China_HeBei', 'Asia_China_HeBei'),
('Asia_China_HeNan', 'Asia_China_HeNan'),
('Asia_China_HeiLongJiang', 'Asia_China_HeiLongJiang'),
('Asia_China_HuBei', 'Asia_China_HuBei'),
('Asia_China_HuNan', 'Asia_China_HuNan'),
('Asia_China_JiLin', 'Asia_China_JiLin'),
('Asia_China_JiangSu', 'Asia_China_JiangSu'),
('Asia_China_JiangXi', 'Asia_China_JiangXi'),
('Asia_China_LiaoNing', 'Asia_China_LiaoNing'),
('Asia_China_NeiNengGu', 'Asia_China_NeiNengGu'),
('Asia_China_NingXia', 'Asia_China_NingXia'),
('Asia_China_QingHai', 'Asia_China_QingHai'),
('Asia_China_ShanDong', 'Asia_China_ShanDong'),
('Asia_China_ShanXi', 'Asia_China_ShanXi'),
('Asia_China_ShanXi2', 'Asia_China_ShanXi2'),
('Asia_China_ShangHai', 'Asia_China_ShangHai'),
('Asia_China_SiChuan', 'Asia_China_SiChuan'),
('Asia_China_TianJin', 'Asia_China_TianJin'),
('Asia_China_XiZang', 'Asia_China_XiZang'),
('Asia_China_XinJiang', 'Asia_China_XinJiang'),
('Asia_China_YunNan', 'Asia_China_YunNan'),
('Asia_China_ZheJiang', 'Asia_China_ZheJiang'),
('Asia_ChristmasIsland', 'Asia_ChristmasIsland'),
('Asia_Hongkong', 'Asia_Hongkong'),
('Asia_India', 'Asia_India'),
('Asia_Indonesia', 'Asia_Indonesia'),
('Asia_Iran', 'Asia_Iran'),
('Asia_Iraq', 'Asia_Iraq'),
('Asia_Israel', 'Asia_Israel'),
('Asia_Japan', 'Asia_Japan'),
('Asia_Jordan', 'Asia_Jordan'),
('Asia_Kampuchea', 'Asia_Kampuchea'),
('Asia_Kazakhstan', 'Asia_Kazakhstan'),
('Asia_Korea', 'Asia_Korea'),
('Asia_Kuwait', 'Asia_Kuwait'),
('Asia_Lanka', 'Asia_Lanka'),
('Asia_Laos', 'Asia_Laos'),
('Asia_Lebanon', 'Asia_Lebanon'),
('Asia_Malaysia', 'Asia_Malaysia'),
('Asia_Maldives', 'Asia_Maldives'),
('Asia_Mongolia', 'Asia_Mongolia'),
('Asia_Myanmar', 'Asia_Myanmar'),
('Asia_Nepal', 'Asia_Nepal'),
('Asia_NorthKorea', 'Asia_NorthKorea'),
('Asia_Oman', 'Asia_Oman'),
('Asia_Pakistan', 'Asia_Pakistan'),
('Asia_Palau', 'Asia_Palau'),
('Asia_Philippines', 'Asia_Philippines'),
('Asia_Qatar', 'Asia_Qatar'),
('Asia_Saudi', 'Asia_Saudi'),
('Asia_Singapore', 'Asia_Singapore'),
('Asia_Syria', 'Asia_Syria'),
('Asia_Taiwan', 'Asia_Taiwan'),
('Asia_Tajikistan', 'Asia_Tajikistan'),
('Asia_Thailand', 'Asia_Thailand'),
('Asia_TimorLeste', 'Asia_TimorLeste'),
('Asia_TimorLeste', 'Asia_TimorLeste'),
('Asia_Turkmenistan', 'Asia_Turkmenistan'),
('Asia_UnitedArabEmirates', 'Asia_UnitedArabEmirates'),
('Asia_Uzbekistan', 'Asia_Uzbekistan'),
('Asia_Vietnam', 'Asia_Vietnam'),
('Asia_Yemen', 'Asia_Yemen'))
),
('欧洲区域', (
('Europe', 'Europe'),
('Europe_Ahvenanmaa', 'Europe_Ahvenanmaa'),
('Europe_Albania', 'Europe_Albania'),
('Europe_Andorra', 'Europe_Andorra'),
('Europe_Armenia', 'Europe_Armenia'),
('Europe_Austria', 'Europe_Austria'),
('Europe_Belarus', 'Europe_Belarus'),
('Europe_Belgium', 'Europe_Belgium'),
('Europe_BosniaAndHerzegovina', 'Europe_BosniaAndHerzegovina'),
('Europe_Britain', 'Europe_Britain'),
('Europe_Britain', 'Europe_Britain'),
('Europe_Bulgaria', 'Europe_Bulgaria'),
('Europe_Croatia', 'Europe_Croatia'),
('Europe_Curaao', 'Europe_Curaao'),
('Europe_Cyprus', 'Europe_Cyprus'),
('Europe_CzechRepublic', 'Europe_CzechRepublic'),
('Europe_CzechRepublic', 'Europe_CzechRepublic'),
('Europe_Denmark', 'Europe_Denmark'),
('Europe_Estonia', 'Europe_Estonia'),
('Europe_EuropeanUnion', 'Europe_EuropeanUnion'),
('Europe_FaroeIslands', 'Europe_FaroeIslands'),
('Europe_Finland', 'Europe_Finland'),
('Europe_France', 'Europe_France'),
('Europe_Germany', 'Europe_Germany'),
('Europe_Gibraltar', 'Europe_Gibraltar'),
('Europe_Greece', 'Europe_Greece'),
('Europe_Greenland', 'Europe_Greenland'),
('Europe_Guernsey', 'Europe_Guernsey'),
('Europe_Hungary', 'Europe_Hungary'),
('Europe_Iceland', 'Europe_Iceland'),
('Europe_Ireland', 'Europe_Ireland'),
('Europe_IsleOfMan', 'Europe_IsleOfMan'),
('Europe_Italy', 'Europe_Italy'),
('Europe_Jersey', 'Europe_Jersey'),
('Europe_Latvia', 'Europe_Latvia'),
('Europe_Liechtenstein', 'Europe_Liechtenstein'),
('Europe_Lithuania', 'Europe_Lithuania'),
('Europe_Luxembourg', 'Europe_Luxembourg'),
('Europe_Macedonia', 'Europe_Macedonia'),
('Europe_Malta', 'Europe_Malta'),
('Europe_Micronesia', 'Europe_Micronesia'),
('Europe_Moldova', 'Europe_Moldova'),
('Europe_Monaco', 'Europe_Monaco'),
('Europe_NetherlandAntilles', 'Europe_NetherlandAntilles'),
('Europe_Netherlands', 'Europe_Netherlands'),
('Europe_Norway', 'Europe_Norway'),
('Europe_Palestine', 'Europe_Palestine'),
('Europe_Poland', 'Europe_Poland'),
('Europe_Portugal', 'Europe_Portugal'),
('Europe_Romania', 'Europe_Romania'),
('Europe_Russia', 'Europe_Russia'),
('Europe_SanMarino', 'Europe_SanMarino'),
('Europe_Serbia', 'Europe_Serbia'),
('Europe_Slovakia', 'Europe_Slovakia'),
('Europe_Slovenia', 'Europe_Slovenia'),
('Europe_SolomonIslands', 'Europe_SolomonIslands'),
('Europe_Spain', 'Europe_Spain'),
('Europe_Svalbard', 'Europe_Svalbard'),
('Europe_Sweden', 'Europe_Sweden'),
('Europe_Switzerland', 'Europe_Switzerland'),
('Europe_Turkey', 'Europe_Turkey'),
('Europe_Tuvalu', 'Europe_Tuvalu'),
('Europe_Ukraine', 'Europe_Ukraine'),
('Europe_Vatican', 'Europe_Vatican'),
('Europe_Yugoslavia', 'Europe_Yugoslavia'))
),
('美洲区域', (
('America', 'America'),
('America_America', 'America_America'),
('America_AmericanSamoa', 'America_AmericanSamoa'),
('America_Anguilla', 'America_Anguilla'),
('America_AntiguaBarbuda', 'America_AntiguaBarbuda'),
('America_Argentina', 'America_Argentina'),
('America_Aruba', 'America_Aruba'),
('America_AscensionIslands', 'America_AscensionIslands'),
('America_Bahamas', 'America_Bahamas'),
('America_Barbados', 'America_Barbados'),
('America_Bermuda', 'America_Bermuda'),
('America_Bolivia', 'America_Bolivia'),
('America_Brazil', 'America_Brazil'),
('America_Canada', 'America_Canada'),
('America_CaymanIslands', 'America_CaymanIslands'),
('America_Chile', 'America_Chile'),
('America_CocosIslands', 'America_CocosIslands'),
('America_Colombia', 'America_Colombia'),
('America_Congo', 'America_Congo'),
('America_CostaRica', 'America_CostaRica'),
('America_Cuba', 'America_Cuba'),
('America_Dominica', 'America_Dominica'),
('America_DominicanRepublic', 'America_DominicanRepublic'),
('America_Ecuador', 'America_Ecuador'),
('America_FrenchGuiana', 'America_FrenchGuiana'),
('America_Georgia', 'America_Georgia'),
('America_Grenada', 'America_Grenada'),
('America_Guadeloupe', 'America_Guadeloupe'),
('America_Guam', 'America_Guam'),
('America_Guatemala', 'America_Guatemala'),
('America_Guyana', 'America_Guyana'),
('America_Hayti', 'America_Hayti'),
('America_Honduras', 'America_Honduras'),
('America_Jamaica', 'America_Jamaica'),
('America_MalvieIslands', 'America_MalvieIslands'),
('America_MarianaIslands', 'America_MarianaIslands'),
('America_Martinique', 'America_Martinique'),
('America_Mexico', 'America_Mexico'),
('America_MontserratIsland', 'America_MontserratIsland'),
('America_Nicaragua', 'America_Nicaragua'),
('America_Panama', 'America_Panama'),
('America_Paraguay', 'America_Paraguay'),
('America_Peru', 'America_Peru'),
('America_PitcairnIsland', 'America_PitcairnIsland'),
('America_PuertoRico', 'America_PuertoRico'),
('America_SaintKittsAndNevis', 'America_SaintKittsAndNevis'),
('America_SaintLucia', 'America_SaintLucia'),
('America_SaintPierreAndMiquelon', 'America_SaintPierreAndMiquelon'),
('America_SaintVincent', 'America_SaintVincent'),
('America_Salvador', 'America_Salvador'),
('America_SouthGeorgiaAndTheSouthIsland', 'America_SouthGeorgiaAndTheSouthIsland'),
('America_Surinam', 'America_Surinam'),
('America_TrinidadAndTobago', 'America_TrinidadAndTobago'),
('America_TurksAndCaicosIslands', 'America_TurksAndCaicosIslands'),
('America_USMinorOutlyingIslands', 'America_USMinorOutlyingIslands'),
('America_Uruguay', 'America_Uruguay'),
('America_Venezuela', 'America_Venezuela'),
('America_VirginIslands', 'America_VirginIslands'),
('America_VirginIslands', 'America_VirginIslands'),
('America_Zaire', 'America_Zaire'))
),
('非洲区域', (
('Africa', 'Africa'),
('Africa_Algeria', 'Africa_Algeria'),
('Africa_Angola', 'Africa_Angola'),
('Africa_Benin', 'Africa_Benin'),
('Africa_Botswana', 'Africa_Botswana'),
('Africa_BouvetIsland', 'Africa_BouvetIsland'),
('Africa_BritishIndianOceanTerritory', 'Africa_BritishIndianOceanTerritory'),
('Africa_BurkinaFaso', 'Africa_BurkinaFaso'),
('Africa_Burundi', 'Africa_Burundi'),
('Africa_Cameroon', 'Africa_Cameroon'),
('Africa_CapeVerde', 'Africa_CapeVerde'),
('Africa_CaymanIslands', 'Africa_CaymanIslands'),
('Africa_CentralAfricanRepublic', 'Africa_CentralAfricanRepublic'),
('Africa_Comoros', 'Africa_Comoros'),
('Africa_Djibouti', 'Africa_Djibouti'),
('Africa_Egypt', 'Africa_Egypt'),
('Africa_EquatorialGuinea', 'Africa_EquatorialGuinea'),
('Africa_Eritrea', 'Africa_Eritrea'),
('Africa_Ethiopia', 'Africa_Ethiopia'),
('Africa_Gabon', 'Africa_Gabon'),
('Africa_Gambia', 'Africa_Gambia'),
('Africa_Ghana', 'Africa_Ghana'),
('Africa_Guinea', 'Africa_Guinea'),
('Africa_GuineaBissau', 'Africa_GuineaBissau'),
('Africa_Kenya', 'Africa_Kenya'),
('Africa_Kyrgyzstan', 'Africa_Kyrgyzstan'),
('Africa_Lesotho', 'Africa_Lesotho'),
('Africa_Liberia', 'Africa_Liberia'),
('Africa_Libya', 'Africa_Libya'),
('Africa_Madagascar', 'Africa_Madagascar'),
('Africa_Malawi', 'Africa_Malawi'),
('Africa_Mali', 'Africa_Mali'),
('Africa_Mauritania', 'Africa_Mauritania'),
('Africa_Mauritius', 'Africa_Mauritius'),
('Africa_Mayotte', 'Africa_Mayotte'),
('Africa_Morocco', 'Africa_Morocco'),
('Africa_Mozambique', 'Africa_Mozambique'),
('Africa_Namibia', 'Africa_Namibia'),
('Africa_Niger', 'Africa_Niger'),
('Africa_Nigeria', 'Africa_Nigeria'),
('Africa_Reunion', 'Africa_Reunion'),
('Africa_Rwanda', 'Africa_Rwanda'),
('Africa_SaintHelena', 'Africa_SaintHelena'),
('Africa_SaoTomePrincipe', 'Africa_SaoTomePrincipe'),
('Africa_Senegal', 'Africa_Senegal'),
('Africa_Seychelles', 'Africa_Seychelles'),
('Africa_SierraLeone', 'Africa_SierraLeone'),
('Africa_Somali', 'Africa_Somali'),
('Africa_SouthAfrica', 'Africa_SouthAfrica'),
('Africa_Sudan', 'Africa_Sudan'),
('Africa_Swaziland', 'Africa_Swaziland'),
('Africa_Tanzania', 'Africa_Tanzania'),
('Africa_Togo', 'Africa_Togo'),
('Africa_Tunisia', 'Africa_Tunisia'),
('Africa_Uganda', 'Africa_Uganda'),
('Africa_WesternSahara', 'Africa_WesternSahara'),
('Africa_Zambia', 'Africa_Zambia'),
('Africa_Zimbabwe', 'Africa_Zimbabwe'))
),
('大洋洲区', (
('Oceania', 'Oceania'),
('Oceania_Australia', 'Oceania_Australia'),
('Oceania_CookIs', 'Oceania_CookIs'),
('Oceania_Fiji', 'Oceania_Fiji'),
('Oceania_FrenchPolynesia', 'Oceania_FrenchPolynesia'),
('Oceania_FrenchSouthernTerritories', 'Oceania_FrenchSouthernTerritories'),
('Oceania_HeardIslandsMcDonaldIslands', 'Oceania_HeardIslandsMcDonaldIslands'),
('Oceania_IndependentStateOfSamoa', 'Oceania_IndependentStateOfSamoa'),
('Oceania_Kiribati', 'Oceania_Kiribati'),
('Oceania_MarshallIslands', 'Oceania_MarshallIslands'),
('Oceania_Nauru', 'Oceania_Nauru'),
('Oceania_NewCaledonia', 'Oceania_NewCaledonia'),
('Oceania_NewZealand', 'Oceania_NewZealand'),
('Oceania_Niue', 'Oceania_Niue'),
('Oceania_NorfolkIsland', 'Oceania_NorfolkIsland'),
('Oceania_PapuaNewCuinea', 'Oceania_PapuaNewCuinea'),
('Oceania_Tokelau', 'Oceania_Tokelau'),
('Oceania_Tonga', 'Oceania_Tonga'),
('Oceania_Vanuatu', 'Oceania_Vanuatu'),
('Oceania_WallisEtFutuna', 'Oceania_WallisEtFutuna'))
)
]
class Control(object):
def is_expired(self):
time = timezone.now()
if self.start_time is None and self.end_time is None:
return u'否'
elif self.end_time is None and self.start_time and time > self.start_time:
return u'否'
elif self.start_time is None and self.end_time and time < self.end_time:
return u'否'
elif self.start_time and time > self.start_time and self.end_time and time < self.end_time:
return u'否'
else:
return u'是'
is_expired.short_description = u'是否过期'
class AreaControl(models.Model, Control):
area = models.CharField(u'设备区域', max_length=100, choices=AREA_CHOICE, unique=False)
devid = models.CharField(u'固件序列号', max_length=100, unique=False)
start_time = models.DateTimeField(u'开始时间', default=None, null=True, blank=True)
end_time = models.DateTimeField(u'结束时间', default=None, null=True, blank=True)
notes = models.TextField(u'附加信息', default='', blank=True)
def __str__(self):
return '<AreaControl {0}>'.format(self.area)
class UuidControl(models.Model, Control):
uuid = models.CharField(u'设备序列号', max_length=100, unique=True)
devid = models.CharField(u'固件序列号', max_length=100, unique=False)
start_time = models.DateTimeField(u'开始时间', default=None, null=True, blank=True)
end_time = models.DateTimeField(u'结束时间', default=None, null=True, blank=True)
notes = models.TextField(u'附加信息', default='', blank=True)
class DateControl(models.Model, Control):
devid = models.CharField(u'固件序列号', max_length=100, unique=False)
start_time = models.DateTimeField(u'开始时间', default=None, null=True, blank=True)
end_time = models.DateTimeField(u'结束时间', default=None, null=True, blank=True)
start_date = models.DateField(u'开始日期', default=None, null=True, blank=True)
end_date = models.DateField(u'结束日期', default=None, null=True, blank=True)
upg_once = models.BooleanField(u'升级一次', default=False, blank=True)
notes = models.TextField(u'附加信息', default='', blank=True)
class UpgradeLog(models.Model):
uuid = models.CharField(u'设备序列号', max_length=100, unique=False)
devid = models.CharField(u'固件序列号', max_length=100, unique=False)
area = models.CharField(u'设备区域', max_length=100, unique=False)
upgrade_time = models.DateTimeField(u'升级时间', default=timezone.now, blank=True)
class Firmware(models.Model):
name = models.FileField(u'上传文件', unique=True)
date = models.DateTimeField(u'上传时间', default=timezone.now, blank=True)
is_important = models.BooleanField(u'重要版本', default=False, blank=True)
is_generated = models.BooleanField(u'是否生成', default=False, blank=True)
cn_commit = models.TextField(u'中文日志', blank=False)
en_commit = models.TextField(u'英文日志', blank=False)
notes = models.TextField(u'附加信息', blank=True)
def __str__(self):
return '<Firmware {0}>'.format(self.name)
| gpl-3.0 | -1,720,569,387,226,788,900 | 45.253333 | 99 | 0.607783 | false |
Honzin/ccs | tests/testResponse/testBtccusd/testTrades.py | 1 | 1341 | import unittest
import ccs
####################################################################################################################
# BTCCUSD #
####################################################################################################################
class Valid(unittest.TestCase):
def setUp(self):
self.stock = ccs.constants.BTCCUSD
self.base = ccs.constants.BTC
self.quote = ccs.constants.USD
symbol = ccs.btccusd.Symbol(self.base, self.quote)
self.limit = 2
self.json = '[{"Id":7618,"Timestamp":1485830873401,"Price":969.96,"Quantity":0.092,"Side":"Buy"},{"Id":7619,"Timestamp":1485834001646,"Price":965,"Quantity":0.003,"Side":"Sell"}]'
self.trades = ccs.btccusd.public.response.Trades(self.json, symbol)
def testLen(self):
self.assertAlmostEqual(len(self.trades), self.limit)
def testGetItem(self):
self.assertIsInstance(self.trades[0], ccs.btccusd.public.response.Trade)
def testMethod(self):
pass
def testStock(self):
pass
def testOsymbol(self):
pass
def testUsymbol(self):
pass
def testStr(self):
pass
if __name__ == '__main__':
unittest.main() | agpl-3.0 | -1,838,338,672,830,686,700 | 30.209302 | 187 | 0.467562 | false |
hadim/spindle_tracker | spindle_tracker/spatial/packer.py | 1 | 7486 | from PIL import Image
from copy import copy
from os.path import join
from glob import glob
import functools
__all__ = ["pack_images"]
class Rect(object):
"""Represent a rectangle in the BinPack tree."""
def __init__(self, x1, y1, x2, y2):
self.x1 = x1
self.y1 = y1 # bottom
self.x2 = x2
self.y2 = y2 # top
def get_width(self):
return abs(self.x2 - self.x1)
def set_width(self, w):
self.x2 = self.x1 + w
def get_height(self):
return abs(self.y2 - self.y1)
def set_height(self, h):
self.y2 = self.y1 + h
def get_left(self):
return self.x1
def set_left(self, l):
w = self.get_width()
self.x1 = l
self.x2 = l + w
def get_top(self):
return self.y2
def set_top(self, t):
h = self.get_height()
self.y2 = t
self.y1 = t - h
def get_right(self):
return self.x2
def get_bottom(self):
return self.y1
def set_bottom(self, y1):
h = self.get_height()
self.y1 = y1
self.y2 = self.y1 + h
def offset(self, x, y):
self.left = self.left + x
self.top = self.top + y
return self
def inset(self, d):
"""return a rect which is this rect inset by d in each direction"""
return Rect(self.x1 + d, self.y1 + d,
self.x2 - d, self.y2 - d)
def inside(self, r):
"""return true if this rectangle is inside r"""
return self.x1 >= r.x1 and self.x2 <= r.x2\
and self.y1 >= r.y1 and self.y2 <= r.y2
width = property(fget=get_width, fset=set_width)
height = property(fget=get_height, fset=set_height)
left = property(fget=get_left, fset=set_left)
top = property(fget=get_top, fset=set_top)
right = property(fget=get_right)
bottom = property(fget=get_bottom, fset=set_bottom)
def __str__(self):
return "[%f, %f, %f, %f]" % (self.x1, self.y1, self.x2, self.y2)
def __repr__(self):
return "Rect[%s]" % str(self)
class BinPackNode(object):
"""A Node in a tree of recursively smaller areas within which images can be placed."""
def __init__(self, area):
"""Create a binpack node
@param area a Rect describing the area the node covers in texture coorinates
"""
#the area that I take up in the image.
self.area = area
# if I've been subdivided then I always have a left/right child
self.leftchild = None
self.rightchild = None
#I'm a leaf node and an image would be placed here, I can't be suddivided.
self.filled = False
def __repr__(self):
return "<%s %s>" % (self.__class__.__name__, str(self.area))
def insert(self, newarea):
"""Insert the newarea in to my area.
@param newarea a Rect to insert in to me by subdividing myself up
@return the area filled or None if the newarea couldn't be accommodated within this
node tree
"""
#if I've been subdivided already then get my child trees to insert the image.
if self.leftchild and self.rightchild:
return self.leftchild.insert(newarea) or self.rightchild.insert(newarea)
#If my area has been used (filled) or the area requested is bigger then my
# area return None. I can't help you.
if self.filled or newarea.width > self.area.width or newarea.height > self.area.height:
return None
#if the image fits exactly in me then yep, the are has been filled
if self.area.width == newarea.width and self.area.height == newarea.height:
self.filled = True
return self.area
#I am going to subdivide myself, copy my area in to the two children
# and then massage them to be useful sizes for placing the newarea.
leftarea = copy(self.area)
rightarea = copy(self.area)
widthdifference = self.area.width - newarea.width
heightdifference = self.area.height - newarea.height
if widthdifference > heightdifference:
leftarea.width = newarea.width
rightarea.left = rightarea.left + newarea.width
rightarea.width = rightarea.width - newarea.width
else:
leftarea.height = newarea.height
rightarea.top = rightarea.top + newarea.height
rightarea.height = rightarea.height - newarea.height
#create my children and then insert it in to the left child which
#was carefully crafted about to fit in one dimension.
self.leftchild = BinPackNode(leftarea)
self.rightchild = BinPackNode(rightarea)
return self.leftchild.insert(newarea)
def _imagesize(i):
return i.size[0] * i.size[1]
#table of heuristics to sort the list of images by before placing
# them in the BinPack Tree NOTE that they are compared backwards
# as we want to go from big to small (r2->r1 as opposed to r1->r2)
sort_heuristics = {
"maxarea": lambda r1, r2: cmp(_imagesize(r2[1]), _imagesize(r1[1])),
"maxwidth": lambda r1, r2: cmp(r2[1].size[0], r1[1].size[0]),
"maxheight": lambda r1, r2: cmp(r2[1].size[1], r1[1].size[1]),
}
def pack_images(imagelist, dstfilename, padding=2, sort="maxarea"):
"""pack the images in image list in to a pow2 PNg file
@param imagelist iterable of tuples (image name, image)
@param padding padding to be applied to all sides of the image
@param dstfilename the filename to save the packed image to.
@return a list of ( rect, name, image) tuples describing where the images were placed.
"""
#sort the images based on the heuristic passed in
images = sorted(imagelist, key=functools.cmp_to_key(sort_heuristics[sort]))
#the start dimension of the target image. this grows
# by doubling to accomodate the images. Should start
# on a power of two otherwise it wont end on a power
# of two. Could possibly start this on the first pow2
# above the largest image but this works.
targetdim_x = 64
targetdim_y = 64
placement = []
while True:
try:
placement = []
tree = BinPackNode(Rect(0, 0, targetdim_x, targetdim_y))
#insert each image into the BinPackNode area. If an image fails to insert
# we start again with a slightly bigger target size.
for name, img in images:
imsize = img.size
r = Rect(0, 0, imsize[0] + padding * 2, imsize[1] + padding * 2)
uv = tree.insert(r)
if uv is None:
#the tree couldn't accomodate the area, we'll need to start again.
raise ValueError('Pack size too small.')
uv = uv.inset(padding)
placement.append((uv, name, img))
#if we get here we've found a place for all the images so
# break from the while True loop
break
except ValueError:
if targetdim_x == targetdim_y:
targetdim_x = targetdim_x * 2
else:
targetdim_y = targetdim_x
# save the images to the target file packed
image = Image.new("RGBA", (targetdim_x, targetdim_y))
for uv, name, img in placement:
image.paste(img, (uv.x1, uv.y1))
# reduce image to its minimum size
image = image.crop(image.getbbox())
# image.show()
image.save(dstfilename)
return placement
| bsd-3-clause | 7,871,469,997,638,940,000 | 34.311321 | 95 | 0.604595 | false |
moiseslorap/RIT | Computer Science 1/Practical Exam/drawString.py | 1 | 1130 | """
Moisés Lora Pérez
"""
from turtle import *
def init():
reset()
setup(600,600)
setworldcoordinates(-300,-300,300,300)
def drawC():
circle(30)
def drawP():
forward(30)
left(72)
forward(30)
left(72)
forward(30)
left(72)
forward(30)
left(72)
forward(30)
left(72)
def drawF():
forward(30)
def drawB():
back(30)
def drawL():
left(30)
def drawR():
right(30)
def drawU():
up()
def drawD():
down()
def drawH():
up()
goto(0,0)
def drawRec():
line = 0
if s == 'C':
drawC()
line +=1
elif s == 'P':
drawP()
line +=5
elif s == 'F':
drawF()
line +=1
elif s == 'B':
drawB()
line +=1
elif s == 'L':
drawL()
elif s == 'R':
drawR()
elif s == 'U':
drawU()
elif s == 'D':
drawD()
elif s == 'H':
drawH()
return line
letters = ['C','P','F','B','L','R','U','D','H']
s = str(input("String to Parse:"))
drawRec()
input() | mit | -3,508,877,732,127,080,400 | 12.125 | 47 | 0.420213 | false |
mrksbrg/moped | new-server/misc/testmoped.py | 1 | 1068 | import base64
import json
import re
def ack(client, app):
vin = "20UYA31581L000000"
while True:
y = client.service.listInstalledApps()
y = json.loads(y)
for t in y['result']:
if t['vin'] == vin:
if int(t['appId']) == app:
print t['installationState']
return
# x = client.service.get_ack_status(vin, app)
# print x
# if x == True:
# break
def readfile(name):
f = open(name)
s = f.read()
f.close()
return s
def upload(s, n, v):
x=base64.b64encode(readfile("/home/arndt/moped/moped/plugins/%s/target/%s-%s.jar" % (n,n,v)))
x=s.uploadApp(x, n, v)
return x
def uploadplus(s, n, v):
x = upload(s, n, v)
print x
x=s.compileApp(n, v)
m = re.search("Romizer processed ([0-9]+) class", x, re.MULTILINE)
if m:
if m.group(1) == "0":
print "Romizer processed 0 classes!"
print x
else:
print "Couldn't find how many classes were processed"
#print x
| gpl-2.0 | -4,124,176,131,638,919,000 | 23.837209 | 97 | 0.527154 | false |
alt-core/sheetbot | condition_expr.py | 1 | 5691 | # coding: utf-8
import re
import string
from unicodedata import normalize
from arpeggio import ParserPython, PTNodeVisitor, visit_parse_tree, Optional, ZeroOrMore, OneOrMore, EOF
from arpeggio import RegExMatch as _
from syntax_tree import SyntaxNode, SyntaxTreeEvaluator
DEBUG = False
def token_and(): return _(ur"[&&]")
def token_or(): return _(ur"[||]")
def token_lparen(): return _(ur"[((]")
def token_rparen(): return _(ur"[]))]")
def regex_match(): return _(ur'/(\\/|[^/])*/[iLN]*')
def string_match(): return _(ur'(\\.|[^&&\||\))])*')
def sub_expression(): return token_lparen, expression, token_rparen
def factor(): return [sub_expression, regex_match, string_match]
def term(): return OneOrMore(factor, sep=token_and)
def expression(): return ZeroOrMore(term, sep=token_or)
def top(): return expression, EOF
regex_match_regex = re.compile(ur'/((?:(?:\/)|[^/])*)/([iLN]*)?')
unescape_sub_regex = re.compile(ur'\\(.)')
OPTION_REGEXP_NORMALIZE = 1
OPTION_REGEXP_LOWER_CASE = 2
expression_parser = ParserPython(top, ws=u'\t\n\r ', debug=DEBUG)
class ExpressionConverter(PTNodeVisitor):
def node(self, node, children):
children_list = tuple(children)
is_terminal = len(children_list) == 0
value = node.value if is_terminal else children_list
if DEBUG:
if is_terminal:
print(u'Leaf<{}>({})'.format(node.rule_name, value))
else:
print(u'Node<{}>{}'.format(node.rule_name, value))
return SyntaxNode(node.rule_name, is_terminal, value)
def suppress(self, node, children):
if len(children) == 0:
return None
elif len(children) == 1:
return children[0]
else:
raise RuntimeError
def __getattr__(self, name):
# 未定義のルールはデフォルト処理
if name.startswith('visit_token_'):
# token_ とついているルールは省略する
return self.suppress
elif name.startswith('visit_'):
return self.node
else:
raise AttributeError
def visit_string_match(self, node, children):
value = node.value
value = unescape_sub_regex.sub(r'\1', value)
value = normalize('NFKC', value).lower().strip()
node.value = value
return self.node(node, children)
def visit_regex_match(self, node, children):
m = regex_match_regex.match(node.value)
option_str = m.group(2)
regex_string = m.group(1)
regex_option = 0
condition_option = []
if option_str and u'i' in option_str:
regex_option = re.IGNORECASE
if option_str and u'L' in option_str:
condition_option.append(OPTION_REGEXP_LOWER_CASE)
if option_str and u'N' in option_str:
condition_option.append(OPTION_REGEXP_NORMALIZE)
regex = re.compile(regex_string, regex_option)
node.value = (regex, condition_option)
return self.node(node, children)
class ConditionExpression(object):
def __init__(self):
self.expr = None
@classmethod
def from_str(cls, s):
self = cls()
expr = expression_parser.parse(s)
self.expr = visit_parse_tree(expr, ExpressionConverter())
return self
def eval(self, env, matches=[]):
return ExpressionEvaluator(env, matches).eval(self.expr)
def check(self, action):
action_normalized = normalize('NFKC', action).lower()
result, matched = ConditionExpressionEvaluator(action, action_normalized).eval(self.expr)
if result:
return matched
else:
return None
class ConditionExpressionEvaluator(SyntaxTreeEvaluator):
def __init__(self, action, action_normalized, **kwargs):
self.action = action
self.action_normalized = action_normalized
super(ConditionExpressionEvaluator, self).__init__(**kwargs)
def visit_top(self, node):
children = self.eval_children(node)
if len(children) > 0:
return children[0]
return (False, ('',))
def visit_expression(self, node):
matched = ('',)
flag = False
for child in node.children:
result, sub_matched = self.eval(child)
if result:
flag = True
if len(matched[0]) < len(sub_matched[0]):
matched = sub_matched
return (flag, matched)
def visit_term(self, node):
matched = ['']
for child in node.children:
result, sub_matched = self.eval(child)
if not result:
return (False, ('',))
matched[0] = matched[0] + sub_matched[0]
matched.extend(sub_matched[1:])
return (True, tuple(matched))
def visit_string_match(self, node):
value = node.value
if value in self.action_normalized:
#print('%s found in %s' % (value, self.action_normalized))
return (True, (node.value,))
else:
#print('%s not found in %s' % (value, self.action_normalized))
return (False, ('',))
def visit_regex_match(self, node):
target_string = self.action
regex = node.value[0]
options = node.value[1]
if OPTION_REGEXP_NORMALIZE in options:
target_string = normalize('NFKC', target_string)
if OPTION_REGEXP_LOWER_CASE in options:
target_string = target_string.lower()
m = regex.search(target_string)
if m:
return (True, (m.group(0),) + m.groups())
else:
return (False, ('',))
| mit | -6,069,350,434,592,281,000 | 32.837349 | 104 | 0.588214 | false |
googlei18n/glyphsLib | Lib/glyphsLib/builder/sources.py | 1 | 8954 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function, division, absolute_import, unicode_literals
import collections
import logging
import os
import fontTools.designspaceLib
from glyphsLib.util import build_ufo_path
from .masters import UFO_FILENAME_KEY
from .axes import get_axis_definitions, get_regular_master, font_uses_new_axes, interp
logger = logging.getLogger(__name__)
def to_designspace_sources(self):
regular_master = get_regular_master(self.font)
for master in self.font.masters:
_to_designspace_source(self, master, (master is regular_master))
_to_designspace_source_layer(self)
_warn_duplicate_master_locations(self)
def _warn_duplicate_master_locations(self):
designspace = self._designspace
master_locations = collections.defaultdict(list)
for source in designspace.sources:
master_locations[tuple(source.location.items())].append(source)
duplicates = {l: s for l, s in master_locations.items() if len(s) > 1}
if duplicates:
msg = [
"DesignSpace sources contain duplicate locations; "
"varLib expects each master to define a unique location."
]
if any(s.layerName for s in designspace.sources):
msg.append(
" Make sure that you used consistent 'brace layer' names"
" in all the glyph layers that share the same location."
)
for loc, sources in sorted(duplicates.items()):
msg.append(
"\n %r => %r"
% ([s.layerName if s.layerName else s.name for s in sources], dict(loc))
)
logger.warning("".join(msg))
def _to_designspace_source(self, master, is_regular):
source = self._sources[master.id]
ufo = source.font
if is_regular:
source.copyLib = True
source.copyInfo = True
source.copyGroups = True
source.copyFeatures = True
source.familyName = ufo.info.familyName
source.styleName = ufo.info.styleName
# TODO: recover original source name from userData
# UFO_SOURCE_NAME_KEY
source.name = "{} {}".format(source.familyName, source.styleName)
if UFO_FILENAME_KEY in master.userData:
source.filename = master.userData[UFO_FILENAME_KEY]
else:
# TODO: (jany) allow another naming convention?
source.filename = build_ufo_path("", source.familyName, source.styleName)
# Make sure UFO filenames are unique, lest we overwrite masters that
# happen to have the same weight name.
n = "_"
while any(
s is not source and s.filename == source.filename
for s in self._sources.values()
):
source.filename = os.path.basename(
build_ufo_path("", source.familyName, source.styleName + n)
)
n += "_"
logger.warning(
"The master with id {} has the same style name ({}) "
"as another one. All masters should have distinctive "
"(style) names. Use the 'Master name' custom parameter"
" on a master to give it a unique name. Proceeding "
"with an unchanged name, but appending '_' to the file"
" name on disk.".format(master.id, source.styleName)
)
location = {}
for axis_def in get_axis_definitions(self.font):
location[axis_def.name] = axis_def.get_design_loc(master)
source.location = location
def _to_designspace_source_layer(self):
# To construct a source layer, we need
# 1. The Designspace source filename and font object which holds the layer.
# 2. The (brace) layer name itself.
# 3. The location of the intermediate master in the design space.
# (For logging purposes, it's nice to know which glyphs contain the layer.)
#
# Note that a brace layer can be associated with different master layers (e.g. the
# 'a' can have a '{400}' brace layer associated with 'Thin', and 'b''s can be
# associte with 'Black').
# Also note that if a brace layer name has less values than there are axes, they
# are supposed to take on the values from the associated master as the missing
# values.
# First, collect all brace layers in the font and which glyphs and which masters
# they belong to.
layer_name_to_master_ids = collections.defaultdict(set)
layer_name_to_glyph_names = collections.defaultdict(list)
for glyph in self.font.glyphs:
for layer in glyph.layers:
if (
"{" in layer.name
and "}" in layer.name
and ".background" not in layer.name
):
layer_name_to_master_ids[layer.name].add(layer.associatedMasterId)
layer_name_to_glyph_names[layer.name].append(glyph.name)
# Next, insert the brace layers in a defined location in the existing designspace.
designspace = self._designspace
layers_to_insert = collections.defaultdict(list)
for layer_name, master_ids in layer_name_to_master_ids.items():
# Construct coordinates first...
brace_coordinates = [
int(c)
for c in layer_name[
layer_name.index("{") + 1 : layer_name.index("}")
].split(",")
]
for master_id in master_ids:
# ... as they may need to be filled up with the values of the associated
# master.
master = self._sources[master_id]
master_coordinates = brace_coordinates
if len(master_coordinates) < len(designspace.axes):
master_locations = [master.location[a.name] for a in designspace.axes]
master_coordinates = (
brace_coordinates + master_locations[len(brace_coordinates) :]
)
elif len(master_coordinates) > len(designspace.axes):
logger.warning(
"Glyph(s) %s, brace layer '%s' defines more locations than "
"there are design axes.",
layer_name_to_glyph_names[layer_name],
layer_name,
)
# If we have more locations than axes, ignore the extra locations.
layer_coordinates_mapping = collections.OrderedDict(
(axis.name, location)
for axis, location in zip(designspace.axes, master_coordinates)
)
s = fontTools.designspaceLib.SourceDescriptor()
s.filename = master.filename
s.font = master.font
s.layerName = layer_name
s.name = "{} {}".format(master.name, layer_name)
s.location = layer_coordinates_mapping
# We collect all generated SourceDescriptors first, grouped by the masters
# they belong to, so we can insert them in a defined order in the next step.
layers_to_insert[master_id].append(s)
# Splice brace layers into the appropriate location after their master.
for master_id, brace_layers in layers_to_insert.items():
master = self._sources[master_id]
insert_index = designspace.sources.index(master) + 1
brace_layers.sort(key=lambda x: tuple(x.location.values()))
designspace.sources[insert_index:insert_index] = brace_layers
def to_glyphs_sources(self):
for master in self.font.masters:
_to_glyphs_source(self, master)
def _to_glyphs_source(self, master):
source = self._sources[master.id]
# Retrieve the master locations: weight, width, custom 0 - 1 - 2 - 3
for axis_def in get_axis_definitions(self.font):
try:
design_location = source.location[axis_def.name]
except KeyError:
# The location does not have this axis?
continue
axis_def.set_design_loc(master, design_location)
if font_uses_new_axes(self.font):
# The user location can be found by reading the mapping backwards
mapping = []
for axis in self.designspace.axes:
if axis.tag == axis_def.tag:
mapping = axis.map
break
reverse_mapping = [(dl, ul) for ul, dl in mapping]
user_location = interp(reverse_mapping, design_location)
axis_def.set_user_loc(master, user_location)
| apache-2.0 | -2,613,525,136,835,783,000 | 39.7 | 88 | 0.623409 | false |
asoplata/dynasim-benchmark-brette-2007 | Brian2/brian2_benchmark_COBAHH_clocksyn_hidens_compiled_0004.py | 1 | 3909 | """
# Notes:
- This simulation seeks to emulate the COBAHH benchmark simulations of (Brette
et al. 2007) using the Brian2 simulator for speed benchmark comparison to
DynaSim. However, this simulation includes CLOCK-DRIVEN synapses, for direct
comparison to DynaSim's clock-driven architecture. The synaptic connections
are "high-density", with a 90% probability of connection.
- The time taken to simulate will be indicated in the stdout log file
'~/batchdirs/brian2_benchmark_COBAHH_clocksyn_hidens_compiled_0004/pbsout/brian2_benchmark_COBAHH_clocksyn_hidens_compiled_0004.out'
- Note that this code has been slightly modified from the original (Brette et
al. 2007) benchmarking code, available here on ModelDB:
https://senselab.med.yale.edu/modeldb/showModel.cshtml?model=83319 in order
to work with version 2 of the Brian simulator (aka Brian2), and also modified
to change the model being benchmarked, etc.
# References:
- Brette R, Rudolph M, Carnevale T, Hines M, Beeman D, Bower JM, et al.
Simulation of networks of spiking neurons: A review of tools and strategies.
Journal of Computational Neuroscience 2007;23:349–98.
doi:10.1007/s10827-007-0038-6.
- Goodman D, Brette R. Brian: a simulator for spiking neural networks in Python.
Frontiers in Neuroinformatics 2008;2. doi:10.3389/neuro.11.005.2008.
"""
from brian2 import *
set_device('cpp_standalone')
prefs.codegen.cpp.extra_compile_args = ['-w', '-O3', '-ffast-math', '-march=native']
# Parameters
cells = 4
defaultclock.dt = 0.01*ms
area = 20000*umetre**2
Cm = (1*ufarad*cmetre**-2) * area
gl = (5e-5*siemens*cmetre**-2) * area
El = -60*mV
EK = -90*mV
ENa = 50*mV
g_na = (100*msiemens*cmetre**-2) * area
g_kd = (30*msiemens*cmetre**-2) * area
VT = -63*mV
# Synaptic strengths
gAMPA = (0.1*msiemens*cmetre**-2)* area
gGABAA = (0.06*msiemens*cmetre**-2)* area
# Synaptic time constants
tauAMPA = 2
tauGABAA = 5
# Synaptic reversal potentials
EAMPA = 1*mV
EGABAA = -80*mV
# The model
eqs = Equations('''
dv/dt = (gl*(El-v)-
gAMPA/cells*sAMPAtotal*(v-EAMPA)-
gGABAA/cells*sGABAAtotal*(v-EGABAA)-
g_na*(m*m*m)*h*(v-ENa)-
g_kd*(n*n*n*n)*(v-EK))/Cm : volt
dm/dt = alpha_m*(1-m)-beta_m*m : 1
dn/dt = alpha_n*(1-n)-beta_n*n : 1
dh/dt = alpha_h*(1-h)-beta_h*h : 1
alpha_m = 0.32*(mV**-1)*(13*mV-v+VT)/
(exp((13*mV-v+VT)/(4*mV))-1.)/ms : Hz
beta_m = 0.28*(mV**-1)*(v-VT-40*mV)/
(exp((v-VT-40*mV)/(5*mV))-1)/ms : Hz
alpha_h = 0.128*exp((17*mV-v+VT)/(18*mV))/ms : Hz
beta_h = 4./(1+exp((40*mV-v+VT)/(5*mV)))/ms : Hz
alpha_n = 0.032*(mV**-1)*(15*mV-v+VT)/
(exp((15*mV-v+VT)/(5*mV))-1.)/ms : Hz
beta_n = .5*exp((10*mV-v+VT)/(40*mV))/ms : Hz
sAMPAtotal : 1
sGABAAtotal : 1
''')
# Construct intrinsic cells
P = NeuronGroup(cells, model=eqs, method='euler')
proportion=int(0.8*cells)
Pe = P[:proportion]
Pi = P[proportion:]
# Contruct synaptic network
sAMPA=Synapses(Pe,P,
model='''ds/dt=1000.*5.*(1 + tanh(v_pre/(4.*mV)))*(1-s)/ms - (s)/(2*ms) : 1 (clock-driven)
sAMPAtotal_post = s : 1 (summed)
''')
sAMPA.connect(p=0.90)
sGABAA_RETC=Synapses(Pi,P,
model='''ds/dt=1000.*2.*(1 + tanh(v_pre/(4.*mV)))*(1-s)/ms - s/(5*ms) : 1 (clock-driven)
sGABAAtotal_post = s : 1 (summed)
''')
sGABAA_RETC.connect(p=0.90)
# Initialization
P.v = 'El + (randn() * 5 - 5)*mV'
# Record a few traces
trace = StateMonitor(P, 'v', record=[1, 10, 100])
totaldata = StateMonitor(P, 'v', record=True)
run(0.5 * second, report='text')
# # If you want to plot:
# plot(trace.t/ms, trace[1].v/mV)
# plot(trace.t/ms, trace[10].v/mV)
# plot(trace.t/ms, trace[100].v/mV)
# xlabel('t (ms)')
# ylabel('v (mV)')
# show()
# # If you want to save data:
# print("Saving TC cell voltages!")
# numpy.savetxt("foo_totaldata.csv", totaldata.v/mV, delimiter=",")
| gpl-3.0 | -6,110,867,096,197,492,000 | 30.764228 | 132 | 0.638341 | false |
Fokko/incubator-airflow | tests/operators/test_python_operator.py | 1 | 23039 | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import copy
import logging
import os
import unittest
import unittest.mock
from collections import namedtuple
from datetime import date, timedelta
from airflow.exceptions import AirflowException
from airflow.models import DAG, DagRun, TaskInstance as TI
from airflow.operators.dummy_operator import DummyOperator
from airflow.operators.python_operator import BranchPythonOperator, PythonOperator, ShortCircuitOperator
from airflow.utils import timezone
from airflow.utils.db import create_session
from airflow.utils.state import State
DEFAULT_DATE = timezone.datetime(2016, 1, 1)
END_DATE = timezone.datetime(2016, 1, 2)
INTERVAL = timedelta(hours=12)
FROZEN_NOW = timezone.datetime(2016, 1, 2, 12, 1, 1)
TI_CONTEXT_ENV_VARS = ['AIRFLOW_CTX_DAG_ID',
'AIRFLOW_CTX_TASK_ID',
'AIRFLOW_CTX_EXECUTION_DATE',
'AIRFLOW_CTX_DAG_RUN_ID']
class Call:
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
def build_recording_function(calls_collection):
"""
We can not use a Mock instance as a PythonOperator callable function or some tests fail with a
TypeError: Object of type Mock is not JSON serializable
Then using this custom function recording custom Call objects for further testing
(replacing Mock.assert_called_with assertion method)
"""
def recording_function(*args):
calls_collection.append(Call(*args))
return recording_function
@unittest.mock.patch('os.environ', {
'AIRFLOW_CTX_DAG_ID': None,
'AIRFLOW_CTX_TASK_ID': None,
'AIRFLOW_CTX_EXECUTION_DATE': None,
'AIRFLOW_CTX_DAG_RUN_ID': None
})
class TestPythonOperator(unittest.TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
with create_session() as session:
session.query(DagRun).delete()
session.query(TI).delete()
def setUp(self):
super().setUp()
self.dag = DAG(
'test_dag',
default_args={
'owner': 'airflow',
'start_date': DEFAULT_DATE},
schedule_interval=INTERVAL)
self.addCleanup(self.dag.clear)
self.clear_run()
self.addCleanup(self.clear_run)
def tearDown(self):
super().tearDown()
with create_session() as session:
session.query(DagRun).delete()
session.query(TI).delete()
def do_run(self):
self.run = True
def clear_run(self):
self.run = False
def is_run(self):
return self.run
def test_python_operator_run(self):
"""Tests that the python callable is invoked on task run."""
task = PythonOperator(
python_callable=self.do_run,
task_id='python_operator',
dag=self.dag)
self.assertFalse(self.is_run())
task.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
self.assertTrue(self.is_run())
def test_python_operator_python_callable_is_callable(self):
"""Tests that PythonOperator will only instantiate if
the python_callable argument is callable."""
not_callable = {}
with self.assertRaises(AirflowException):
PythonOperator(
python_callable=not_callable,
task_id='python_operator',
dag=self.dag)
not_callable = None
with self.assertRaises(AirflowException):
PythonOperator(
python_callable=not_callable,
task_id='python_operator',
dag=self.dag)
def _assert_calls_equal(self, first, second):
self.assertIsInstance(first, Call)
self.assertIsInstance(second, Call)
self.assertTupleEqual(first.args, second.args)
def test_python_callable_arguments_are_templatized(self):
"""Test PythonOperator op_args are templatized"""
recorded_calls = []
# Create a named tuple and ensure it is still preserved
# after the rendering is done
Named = namedtuple('Named', ['var1', 'var2'])
named_tuple = Named('{{ ds }}', 'unchanged')
task = PythonOperator(
task_id='python_operator',
# a Mock instance cannot be used as a callable function or test fails with a
# TypeError: Object of type Mock is not JSON serializable
python_callable=build_recording_function(recorded_calls),
op_args=[
4,
date(2019, 1, 1),
"dag {{dag.dag_id}} ran on {{ds}}.",
named_tuple
],
dag=self.dag)
self.dag.create_dagrun(
run_id='manual__' + DEFAULT_DATE.isoformat(),
execution_date=DEFAULT_DATE,
start_date=DEFAULT_DATE,
state=State.RUNNING
)
task.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
ds_templated = DEFAULT_DATE.date().isoformat()
self.assertEqual(1, len(recorded_calls))
self._assert_calls_equal(
recorded_calls[0],
Call(4,
date(2019, 1, 1),
"dag {} ran on {}.".format(self.dag.dag_id, ds_templated),
Named(ds_templated, 'unchanged'))
)
def test_python_callable_keyword_arguments_are_templatized(self):
"""Test PythonOperator op_kwargs are templatized"""
recorded_calls = []
task = PythonOperator(
task_id='python_operator',
# a Mock instance cannot be used as a callable function or test fails with a
# TypeError: Object of type Mock is not JSON serializable
python_callable=build_recording_function(recorded_calls),
op_kwargs={
'an_int': 4,
'a_date': date(2019, 1, 1),
'a_templated_string': "dag {{dag.dag_id}} ran on {{ds}}."
},
dag=self.dag)
self.dag.create_dagrun(
run_id='manual__' + DEFAULT_DATE.isoformat(),
execution_date=DEFAULT_DATE,
start_date=DEFAULT_DATE,
state=State.RUNNING
)
task.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
self.assertEqual(1, len(recorded_calls))
self._assert_calls_equal(
recorded_calls[0],
Call(an_int=4,
a_date=date(2019, 1, 1),
a_templated_string="dag {} ran on {}.".format(
self.dag.dag_id, DEFAULT_DATE.date().isoformat()))
)
def test_python_operator_shallow_copy_attr(self):
not_callable = lambda x: x
original_task = PythonOperator(
python_callable=not_callable,
task_id='python_operator',
op_kwargs={'certain_attrs': ''},
dag=self.dag
)
new_task = copy.deepcopy(original_task)
# shallow copy op_kwargs
self.assertEqual(id(original_task.op_kwargs['certain_attrs']),
id(new_task.op_kwargs['certain_attrs']))
# shallow copy python_callable
self.assertEqual(id(original_task.python_callable),
id(new_task.python_callable))
def _env_var_check_callback(self):
self.assertEqual('test_dag', os.environ['AIRFLOW_CTX_DAG_ID'])
self.assertEqual('hive_in_python_op', os.environ['AIRFLOW_CTX_TASK_ID'])
self.assertEqual(DEFAULT_DATE.isoformat(),
os.environ['AIRFLOW_CTX_EXECUTION_DATE'])
self.assertEqual('manual__' + DEFAULT_DATE.isoformat(),
os.environ['AIRFLOW_CTX_DAG_RUN_ID'])
def test_echo_env_variables(self):
"""
Test that env variables are exported correctly to the
python callback in the task.
"""
self.dag.create_dagrun(
run_id='manual__' + DEFAULT_DATE.isoformat(),
execution_date=DEFAULT_DATE,
start_date=DEFAULT_DATE,
state=State.RUNNING,
external_trigger=False,
)
t = PythonOperator(task_id='hive_in_python_op',
dag=self.dag,
python_callable=self._env_var_check_callback
)
t.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
def test_conflicting_kwargs(self):
self.dag.create_dagrun(
run_id='manual__' + DEFAULT_DATE.isoformat(),
execution_date=DEFAULT_DATE,
start_date=DEFAULT_DATE,
state=State.RUNNING,
external_trigger=False,
)
# dag is not allowed since it is a reserved keyword
def fn(dag):
# An ValueError should be triggered since we're using dag as a
# reserved keyword
raise RuntimeError("Should not be triggered, dag: {}".format(dag))
python_operator = PythonOperator(
task_id='python_operator',
op_args=[1],
python_callable=fn,
dag=self.dag
)
with self.assertRaises(ValueError) as context:
python_operator.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
self.assertTrue('dag' in context.exception, "'dag' not found in the exception")
def test_context_with_conflicting_op_args(self):
self.dag.create_dagrun(
run_id='manual__' + DEFAULT_DATE.isoformat(),
execution_date=DEFAULT_DATE,
start_date=DEFAULT_DATE,
state=State.RUNNING,
external_trigger=False,
)
def fn(custom, dag):
self.assertEqual(1, custom, "custom should be 1")
self.assertIsNotNone(dag, "dag should be set")
python_operator = PythonOperator(
task_id='python_operator',
op_kwargs={'custom': 1},
python_callable=fn,
dag=self.dag
)
python_operator.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
def test_context_with_kwargs(self):
self.dag.create_dagrun(
run_id='manual__' + DEFAULT_DATE.isoformat(),
execution_date=DEFAULT_DATE,
start_date=DEFAULT_DATE,
state=State.RUNNING,
external_trigger=False,
)
def fn(**context):
# check if context is being set
self.assertGreater(len(context), 0, "Context has not been injected")
python_operator = PythonOperator(
task_id='python_operator',
op_kwargs={'custom': 1},
python_callable=fn,
dag=self.dag
)
python_operator.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
class TestBranchOperator(unittest.TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
with create_session() as session:
session.query(DagRun).delete()
session.query(TI).delete()
def setUp(self):
self.dag = DAG('branch_operator_test',
default_args={
'owner': 'airflow',
'start_date': DEFAULT_DATE},
schedule_interval=INTERVAL)
self.branch_1 = DummyOperator(task_id='branch_1', dag=self.dag)
self.branch_2 = DummyOperator(task_id='branch_2', dag=self.dag)
def tearDown(self):
super().tearDown()
with create_session() as session:
session.query(DagRun).delete()
session.query(TI).delete()
def test_without_dag_run(self):
"""This checks the defensive against non existent tasks in a dag run"""
self.branch_op = BranchPythonOperator(task_id='make_choice',
dag=self.dag,
python_callable=lambda: 'branch_1')
self.branch_1.set_upstream(self.branch_op)
self.branch_2.set_upstream(self.branch_op)
self.dag.clear()
self.branch_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
with create_session() as session:
tis = session.query(TI).filter(
TI.dag_id == self.dag.dag_id,
TI.execution_date == DEFAULT_DATE
)
for ti in tis:
if ti.task_id == 'make_choice':
self.assertEqual(ti.state, State.SUCCESS)
elif ti.task_id == 'branch_1':
# should exist with state None
self.assertEqual(ti.state, State.NONE)
elif ti.task_id == 'branch_2':
self.assertEqual(ti.state, State.SKIPPED)
else:
raise Exception
def test_branch_list_without_dag_run(self):
"""This checks if the BranchPythonOperator supports branching off to a list of tasks."""
self.branch_op = BranchPythonOperator(task_id='make_choice',
dag=self.dag,
python_callable=lambda: ['branch_1', 'branch_2'])
self.branch_1.set_upstream(self.branch_op)
self.branch_2.set_upstream(self.branch_op)
self.branch_3 = DummyOperator(task_id='branch_3', dag=self.dag)
self.branch_3.set_upstream(self.branch_op)
self.dag.clear()
self.branch_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
with create_session() as session:
tis = session.query(TI).filter(
TI.dag_id == self.dag.dag_id,
TI.execution_date == DEFAULT_DATE
)
expected = {
"make_choice": State.SUCCESS,
"branch_1": State.NONE,
"branch_2": State.NONE,
"branch_3": State.SKIPPED,
}
for ti in tis:
if ti.task_id in expected:
self.assertEqual(ti.state, expected[ti.task_id])
else:
raise Exception
def test_with_dag_run(self):
self.branch_op = BranchPythonOperator(task_id='make_choice',
dag=self.dag,
python_callable=lambda: 'branch_1')
self.branch_1.set_upstream(self.branch_op)
self.branch_2.set_upstream(self.branch_op)
self.dag.clear()
dr = self.dag.create_dagrun(
run_id="manual__",
start_date=timezone.utcnow(),
execution_date=DEFAULT_DATE,
state=State.RUNNING
)
self.branch_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
tis = dr.get_task_instances()
for ti in tis:
if ti.task_id == 'make_choice':
self.assertEqual(ti.state, State.SUCCESS)
elif ti.task_id == 'branch_1':
self.assertEqual(ti.state, State.NONE)
elif ti.task_id == 'branch_2':
self.assertEqual(ti.state, State.SKIPPED)
else:
raise Exception
def test_with_skip_in_branch_downstream_dependencies(self):
self.branch_op = BranchPythonOperator(task_id='make_choice',
dag=self.dag,
python_callable=lambda: 'branch_1')
self.branch_op >> self.branch_1 >> self.branch_2
self.branch_op >> self.branch_2
self.dag.clear()
dr = self.dag.create_dagrun(
run_id="manual__",
start_date=timezone.utcnow(),
execution_date=DEFAULT_DATE,
state=State.RUNNING
)
self.branch_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
tis = dr.get_task_instances()
for ti in tis:
if ti.task_id == 'make_choice':
self.assertEqual(ti.state, State.SUCCESS)
elif ti.task_id == 'branch_1':
self.assertEqual(ti.state, State.NONE)
elif ti.task_id == 'branch_2':
self.assertEqual(ti.state, State.NONE)
else:
raise Exception
def test_with_skip_in_branch_downstream_dependencies2(self):
self.branch_op = BranchPythonOperator(task_id='make_choice',
dag=self.dag,
python_callable=lambda: 'branch_2')
self.branch_op >> self.branch_1 >> self.branch_2
self.branch_op >> self.branch_2
self.dag.clear()
dr = self.dag.create_dagrun(
run_id="manual__",
start_date=timezone.utcnow(),
execution_date=DEFAULT_DATE,
state=State.RUNNING
)
self.branch_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
tis = dr.get_task_instances()
for ti in tis:
if ti.task_id == 'make_choice':
self.assertEqual(ti.state, State.SUCCESS)
elif ti.task_id == 'branch_1':
self.assertEqual(ti.state, State.SKIPPED)
elif ti.task_id == 'branch_2':
self.assertEqual(ti.state, State.NONE)
else:
raise Exception
class TestShortCircuitOperator(unittest.TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
with create_session() as session:
session.query(DagRun).delete()
session.query(TI).delete()
def tearDown(self):
super().tearDown()
with create_session() as session:
session.query(DagRun).delete()
session.query(TI).delete()
def test_without_dag_run(self):
"""This checks the defensive against non existent tasks in a dag run"""
value = False
dag = DAG('shortcircuit_operator_test_without_dag_run',
default_args={
'owner': 'airflow',
'start_date': DEFAULT_DATE
},
schedule_interval=INTERVAL)
short_op = ShortCircuitOperator(task_id='make_choice',
dag=dag,
python_callable=lambda: value)
branch_1 = DummyOperator(task_id='branch_1', dag=dag)
branch_1.set_upstream(short_op)
branch_2 = DummyOperator(task_id='branch_2', dag=dag)
branch_2.set_upstream(branch_1)
upstream = DummyOperator(task_id='upstream', dag=dag)
upstream.set_downstream(short_op)
dag.clear()
short_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
with create_session() as session:
tis = session.query(TI).filter(
TI.dag_id == dag.dag_id,
TI.execution_date == DEFAULT_DATE
)
for ti in tis:
if ti.task_id == 'make_choice':
self.assertEqual(ti.state, State.SUCCESS)
elif ti.task_id == 'upstream':
# should not exist
raise Exception
elif ti.task_id == 'branch_1' or ti.task_id == 'branch_2':
self.assertEqual(ti.state, State.SKIPPED)
else:
raise Exception
value = True
dag.clear()
short_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
for ti in tis:
if ti.task_id == 'make_choice':
self.assertEqual(ti.state, State.SUCCESS)
elif ti.task_id == 'upstream':
# should not exist
raise Exception
elif ti.task_id == 'branch_1' or ti.task_id == 'branch_2':
self.assertEqual(ti.state, State.NONE)
else:
raise Exception
def test_with_dag_run(self):
value = False
dag = DAG('shortcircuit_operator_test_with_dag_run',
default_args={
'owner': 'airflow',
'start_date': DEFAULT_DATE
},
schedule_interval=INTERVAL)
short_op = ShortCircuitOperator(task_id='make_choice',
dag=dag,
python_callable=lambda: value)
branch_1 = DummyOperator(task_id='branch_1', dag=dag)
branch_1.set_upstream(short_op)
branch_2 = DummyOperator(task_id='branch_2', dag=dag)
branch_2.set_upstream(branch_1)
upstream = DummyOperator(task_id='upstream', dag=dag)
upstream.set_downstream(short_op)
dag.clear()
logging.error("Tasks %s", dag.tasks)
dr = dag.create_dagrun(
run_id="manual__",
start_date=timezone.utcnow(),
execution_date=DEFAULT_DATE,
state=State.RUNNING
)
upstream.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
short_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
tis = dr.get_task_instances()
self.assertEqual(len(tis), 4)
for ti in tis:
if ti.task_id == 'make_choice':
self.assertEqual(ti.state, State.SUCCESS)
elif ti.task_id == 'upstream':
self.assertEqual(ti.state, State.SUCCESS)
elif ti.task_id == 'branch_1' or ti.task_id == 'branch_2':
self.assertEqual(ti.state, State.SKIPPED)
else:
raise Exception
value = True
dag.clear()
dr.verify_integrity()
upstream.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
short_op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
tis = dr.get_task_instances()
self.assertEqual(len(tis), 4)
for ti in tis:
if ti.task_id == 'make_choice':
self.assertEqual(ti.state, State.SUCCESS)
elif ti.task_id == 'upstream':
self.assertEqual(ti.state, State.SUCCESS)
elif ti.task_id == 'branch_1' or ti.task_id == 'branch_2':
self.assertEqual(ti.state, State.NONE)
else:
raise Exception
| apache-2.0 | 5,132,822,653,135,645,000 | 35.803514 | 104 | 0.556795 | false |
KangHsi/youtube-8m | youtube-8m/train.py | 1 | 26303 | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Binary for training Tensorflow models on the YouTube-8M dataset."""
import json
import os
import time
import eval_util
import export_model
import losses
import frame_level_models
import video_level_models
import readers
import tensorflow as tf
import tensorflow.contrib.slim as slim
from tensorflow import app
from tensorflow import flags
from tensorflow import gfile
from tensorflow import logging
from tensorflow.python.client import device_lib
import utils
FLAGS = flags.FLAGS
if __name__ == "__main__":
# Dataset flags.
flags.DEFINE_string("train_dir", "/tmp/yt8m_model/",
"The directory to save the model files in.")
flags.DEFINE_string(
"train_data_pattern", "",
"File glob for the training dataset. If the files refer to Frame Level "
"features (i.e. tensorflow.SequenceExample), then set --reader_type "
"format. The (Sequence)Examples are expected to have 'rgb' byte array "
"sequence feature as well as a 'labels' int64 context feature.")
flags.DEFINE_string("feature_names", "mean_rgb", "Name of the feature "
"to use for training.")
flags.DEFINE_string("feature_sizes", "1024", "Length of the feature vectors.")
# Model flags.
flags.DEFINE_bool(
"frame_features", False,
"If set, then --train_data_pattern must be frame-level features. "
"Otherwise, --train_data_pattern must be aggregated video-level "
"features. The model must also be set appropriately (i.e. to read 3D "
"batches VS 4D batches.")
flags.DEFINE_string(
"model", "LogisticModel",
"Which architecture to use for the model. Models are defined "
"in models.py.")
flags.DEFINE_bool(
"start_new_model", False,
"If set, this will not resume from a checkpoint and will instead create a"
" new model instance.")
# Training flags.
flags.DEFINE_integer("batch_size", 1024,
"How many examples to process per batch for training.")
flags.DEFINE_string("label_loss", "CrossEntropyLoss",
"Which loss function to use for training the model.")
flags.DEFINE_float(
"regularization_penalty", 1.0,
"How much weight to give to the regularization loss (the label loss has "
"a weight of 1).")
flags.DEFINE_float("base_learning_rate", 0.01,
"Which learning rate to start with.")
flags.DEFINE_float("learning_rate_decay", 0.95,
"Learning rate decay factor to be applied every "
"learning_rate_decay_examples.")
flags.DEFINE_float("learning_rate_decay_examples", 4000000,
"Multiply current learning rate by learning_rate_decay "
"every learning_rate_decay_examples.")
flags.DEFINE_integer("num_epochs", 5,
"How many passes to make over the dataset before "
"halting training.")
flags.DEFINE_integer("max_steps", None,
"The maximum number of iterations of the training loop.")
flags.DEFINE_integer("export_model_steps", 1000,
"The period, in number of steps, with which the model "
"is exported for batch prediction.")
# Other flags.
flags.DEFINE_string("gpus", "0",
"GPU ids to use.")
flags.DEFINE_integer("num_readers", 8,
"How many threads to use for reading input files.")
flags.DEFINE_integer("disp_batches", 100,
"Display losses and metrics each disp_batches step")
flags.DEFINE_string("optimizer", "AdamOptimizer",
"What optimizer class to use.")
flags.DEFINE_float("clip_gradient_norm", 1.0, "Norm to clip gradients to.")
flags.DEFINE_bool(
"log_device_placement", False,
"Whether to write the device on which every op will run into the "
"logs on startup.")
def validate_class_name(flag_value, category, modules, expected_superclass):
"""Checks that the given string matches a class of the expected type.
Args:
flag_value: A string naming the class to instantiate.
category: A string used further describe the class in error messages
(e.g. 'model', 'reader', 'loss').
modules: A list of modules to search for the given class.
expected_superclass: A class that the given class should inherit from.
Raises:
FlagsError: If the given class could not be found or if the first class
found with that name doesn't inherit from the expected superclass.
Returns:
True if a class was found that matches the given constraints.
"""
candidates = [getattr(module, flag_value, None) for module in modules]
for candidate in candidates:
if not candidate:
continue
if not issubclass(candidate, expected_superclass):
raise flags.FlagsError("%s '%s' doesn't inherit from %s." %
(category, flag_value,
expected_superclass.__name__))
return True
raise flags.FlagsError("Unable to find %s '%s'." % (category, flag_value))
def get_input_data_tensors(reader,
data_pattern,
batch_size=1000,
num_epochs=None,
num_readers=1):
"""Creates the section of the graph which reads the training data.
Args:
reader: A class which parses the training data.
data_pattern: A 'glob' style path to the data files.
batch_size: How many examples to process at a time.
num_epochs: How many passes to make over the training data. Set to 'None'
to run indefinitely.
num_readers: How many I/O threads to use.
Returns:
A tuple containing the features tensor, labels tensor, and optionally a
tensor containing the number of frames per video. The exact dimensions
depend on the reader being used.
Raises:
IOError: If no files matching the given pattern were found.
"""
logging.info("Using batch size of " + str(batch_size) + " for training.")
with tf.name_scope("train_input"):
files = gfile.Glob(data_pattern)
if not files:
raise IOError("Unable to find training files. data_pattern='" +
data_pattern + "'.")
logging.info("Number of training files: %s.", str(len(files)))
filename_queue = tf.train.string_input_producer(
files, num_epochs=num_epochs, shuffle=True)
training_data = [
reader.prepare_reader(filename_queue) for _ in range(num_readers)
]
return tf.train.shuffle_batch_join(
training_data,
batch_size=batch_size,
capacity=batch_size * 5,
min_after_dequeue=batch_size,
allow_smaller_final_batch=True,
enqueue_many=True)
def find_class_by_name(name, modules):
"""Searches the provided modules for the named class and returns it."""
modules = [getattr(module, name, None) for module in modules]
return next(a for a in modules if a)
def build_graph(reader,
model,
train_data_pattern,
label_loss_fn=losses.CrossEntropyLoss(),
batch_size=1000,
base_learning_rate=0.01,
learning_rate_decay_examples=1000000,
learning_rate_decay=0.95,
optimizer_class=tf.train.AdamOptimizer,
clip_gradient_norm=1.0,
regularization_penalty=1,
num_readers=1,
num_epochs=None):
"""Creates the Tensorflow graph.
This will only be called once in the life of
a training model, because after the graph is created the model will be
restored from a meta graph file rather than being recreated.
Args:
reader: The data file reader. It should inherit from BaseReader.
model: The core model (e.g. logistic or neural net). It should inherit
from BaseModel.
train_data_pattern: glob path to the training data files.
label_loss_fn: What kind of loss to apply to the model. It should inherit
from BaseLoss.
batch_size: How many examples to process at a time.
base_learning_rate: What learning rate to initialize the optimizer with.
optimizer_class: Which optimization algorithm to use.
clip_gradient_norm: Magnitude of the gradient to clip to.
regularization_penalty: How much weight to give the regularization loss
compared to the label loss.
num_readers: How many threads to use for I/O operations.
num_epochs: How many passes to make over the data. 'None' means an
unlimited number of passes.
"""
global_step = tf.Variable(0, trainable=False, name="global_step")
local_device_protos = device_lib.list_local_devices()
gpus = [x.name for x in local_device_protos if x.device_type == 'GPU']
num_gpus = len(gpus)
if num_gpus > 0:
logging.info("Using the following GPUs to train: " + str(gpus))
num_towers = num_gpus
device_string = '/gpu:%d'
else:
logging.info("No GPUs found. Training on CPU.")
num_towers = 1
device_string = '/cpu:%d'
learning_rate = tf.train.exponential_decay(
base_learning_rate,
global_step * batch_size * num_towers,
learning_rate_decay_examples,
learning_rate_decay,
staircase=True)
tf.summary.scalar('learning_rate', learning_rate)
optimizer = optimizer_class(learning_rate)
unused_video_id, model_input_raw, labels_batch, num_frames = (
get_input_data_tensors(
reader,
train_data_pattern,
batch_size=batch_size * num_towers,
num_readers=num_readers,
num_epochs=num_epochs))
tf.summary.histogram("model/input_raw", model_input_raw)
feature_dim = len(model_input_raw.get_shape()) - 1
model_input = tf.nn.l2_normalize(model_input_raw, feature_dim)
tower_inputs = tf.split(model_input, num_towers)
tower_labels = tf.split(labels_batch, num_towers)
tower_num_frames = tf.split(num_frames, num_towers)
tower_gradients = []
tower_predictions = []
tower_label_losses = []
tower_reg_losses = []
for i in range(num_towers):
# For some reason these 'with' statements can't be combined onto the same
# line. They have to be nested.
with tf.device(device_string % i):
with (tf.variable_scope(("tower"), reuse=True if i > 0 else None)):
with (slim.arg_scope([slim.model_variable, slim.variable], device="/cpu:0" if num_gpus!=1 else "/gpu:0")):
result = model.create_model(
tower_inputs[i],
num_frames=tower_num_frames[i],
vocab_size=reader.num_classes,
labels=tower_labels[i])
for variable in slim.get_model_variables():
tf.summary.histogram(variable.op.name, variable)
predictions = result["predictions"]
tower_predictions.append(predictions)
if "loss" in result.keys():
label_loss = result["loss"]
else:
label_loss = label_loss_fn.calculate_loss(predictions, tower_labels[i])
if "regularization_loss" in result.keys():
reg_loss = result["regularization_loss"]
else:
reg_loss = tf.constant(0.0)
reg_losses = tf.losses.get_regularization_losses()
if reg_losses:
reg_loss += tf.add_n(reg_losses)
tower_reg_losses.append(reg_loss)
# Adds update_ops (e.g., moving average updates in batch normalization) as
# a dependency to the train_op.
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
if "update_ops" in result.keys():
update_ops += result["update_ops"]
if update_ops:
with tf.control_dependencies(update_ops):
barrier = tf.no_op(name="gradient_barrier")
with tf.control_dependencies([barrier]):
label_loss = tf.identity(label_loss)
tower_label_losses.append(label_loss)
# Incorporate the L2 weight penalties etc.
final_loss = regularization_penalty * reg_loss + label_loss
gradients = optimizer.compute_gradients(final_loss,
colocate_gradients_with_ops=False)
tower_gradients.append(gradients)
label_loss = tf.reduce_mean(tf.stack(tower_label_losses))
tf.summary.scalar("label_loss", label_loss)
if regularization_penalty != 0:
reg_loss = tf.reduce_mean(tf.stack(tower_reg_losses))
tf.summary.scalar("reg_loss", reg_loss)
merged_gradients = utils.combine_gradients(tower_gradients)
if clip_gradient_norm > 0:
with tf.name_scope('clip_grads'):
merged_gradients = utils.clip_gradient_norms(merged_gradients, clip_gradient_norm)
train_op = optimizer.apply_gradients(merged_gradients, global_step=global_step)
tf.add_to_collection("global_step", global_step)
tf.add_to_collection("loss", label_loss)
tf.add_to_collection("predictions", tf.concat(tower_predictions, 0))
tf.add_to_collection("input_batch_raw", model_input_raw)
tf.add_to_collection("input_batch", model_input)
tf.add_to_collection("num_frames", num_frames)
tf.add_to_collection("labels", tf.cast(labels_batch, tf.float32))
tf.add_to_collection("train_op", train_op)
class Trainer(object):
"""A Trainer to train a Tensorflow graph."""
def __init__(self, cluster, task, train_dir, model, reader, model_exporter,
log_device_placement=True, max_steps=None,
export_model_steps=1000, disp_batches=100):
""""Creates a Trainer.
Args:
cluster: A tf.train.ClusterSpec if the execution is distributed.
None otherwise.
task: A TaskSpec describing the job type and the task index.
"""
self.cluster = cluster
self.task = task
self.is_master = (task.type == "master" and task.index == 0)
self.train_dir = train_dir
self.config = tf.ConfigProto(
allow_soft_placement=True,log_device_placement=log_device_placement)
self.config.gpu_options.allow_growth = True
self.model = model
self.reader = reader
self.model_exporter = model_exporter
self.max_steps = max_steps
self.max_steps_reached = False
self.export_model_steps = export_model_steps
self.last_model_export_step = 0
self.disp_batches = disp_batches
# if self.is_master and self.task.index > 0:
# raise StandardError("%s: Only one replica of master expected",
# task_as_string(self.task))
def run(self, start_new_model=False):
"""Performs training on the currently defined Tensorflow graph.
Returns:
A tuple of the training Hit@1 and the training PERR.
"""
if self.is_master and start_new_model:
self.remove_training_directory(self.train_dir)
target, device_fn = self.start_server_if_distributed()
meta_filename = self.get_meta_filename(start_new_model, self.train_dir)
with tf.Graph().as_default() as graph:
if meta_filename:
saver = self.recover_model(meta_filename)
with tf.device(device_fn):
if not meta_filename:
saver = self.build_model(self.model, self.reader)
global_step = tf.get_collection("global_step")[0]
loss = tf.get_collection("loss")[0]
predictions = tf.get_collection("predictions")[0]
labels = tf.get_collection("labels")[0]
train_op = tf.get_collection("train_op")[0]
init_op = tf.global_variables_initializer()
sv = tf.train.Supervisor(
graph,
logdir=self.train_dir,
init_op=init_op,
is_chief=self.is_master,
global_step=global_step,
save_model_secs=0 * 60,
save_summaries_secs=120,
saver=saver)
logging.info("%s: Starting managed session.", task_as_string(self.task))
with sv.managed_session(target, config=self.config) as sess:
try:
logging.info("%s: Entering training loop.", task_as_string(self.task))
while (not sv.should_stop()) and (not self.max_steps_reached):
batch_start_time = time.time()
_, global_step_val, loss_val, predictions_val, labels_val = sess.run(
[train_op, global_step, loss, predictions, labels])
seconds_per_batch = time.time() - batch_start_time
examples_per_second = labels_val.shape[0] / seconds_per_batch
if self.max_steps and self.max_steps <= global_step_val:
self.max_steps_reached = True
if self.is_master and global_step_val % self.disp_batches == 0 and self.train_dir:
eval_start_time = time.time()
hit_at_one = eval_util.calculate_hit_at_one(predictions_val, labels_val)
perr = eval_util.calculate_precision_at_equal_recall_rate(predictions_val,
labels_val)
gap = eval_util.calculate_gap(predictions_val, labels_val)
eval_end_time = time.time()
eval_time = eval_end_time - eval_start_time
logging.info("training step " + str(global_step_val) + " | Loss: " + ("%.2f" % loss_val) +
" Examples/sec: " + ("%.2f" % examples_per_second) + " | Hit@1: " +
("%.4f" % hit_at_one) + " PERR: " + ("%.4f" % perr) +
" GAP: " + ("%.4f" % gap))
sv.summary_writer.add_summary(
utils.MakeSummary("model/Training_Hit@1", hit_at_one),
global_step_val)
sv.summary_writer.add_summary(
utils.MakeSummary("model/Training_Perr", perr), global_step_val)
sv.summary_writer.add_summary(
utils.MakeSummary("model/Training_GAP", gap), global_step_val)
sv.summary_writer.add_summary(
utils.MakeSummary("global_step/Examples/Second",
examples_per_second), global_step_val)
sv.summary_writer.flush()
# Exporting the model every x steps
time_to_export = ((self.last_model_export_step == 0) or
(global_step_val - self.last_model_export_step
>= self.export_model_steps))
if self.is_master and time_to_export:
self.export_model(global_step_val, sv.saver, sv.save_path, sess)
self.last_model_export_step = global_step_val
#else:
logging.info("training step " + str(global_step_val) + " | Loss: " +
("%.4f" % loss_val) + " Examples/sec: " + ("%.4f" % examples_per_second))
except tf.errors.OutOfRangeError:
logging.info("%s: Done training -- epoch limit reached.",
task_as_string(self.task))
logging.info("%s: Exited training loop.", task_as_string(self.task))
sv.Stop()
def export_model(self, global_step_val, saver, save_path, session):
# If the model has already been exported at this step, return.
if global_step_val == self.last_model_export_step:
return
last_checkpoint = saver.save(session, save_path, global_step_val)
model_dir = "{0}/export/step_{1}".format(self.train_dir, global_step_val)
logging.info("%s: Exporting the model at step %s to %s.",
task_as_string(self.task), global_step_val, model_dir)
self.model_exporter.export_model(
model_dir=model_dir,
global_step_val=global_step_val,
last_checkpoint=last_checkpoint)
def start_server_if_distributed(self):
"""Starts a server if the execution is distributed."""
if self.cluster:
logging.info("%s: Starting trainer within cluster %s.",
task_as_string(self.task), self.cluster.as_dict())
server = start_server(self.cluster, self.task)
target = server.target
device_fn = tf.train.replica_device_setter(
ps_device="/job:ps",
worker_device="/job:%s/task:%d" % (self.task.type, self.task.index),
cluster=self.cluster)
else:
target = ""
device_fn = ""
return (target, device_fn)
def remove_training_directory(self, train_dir):
"""Removes the training directory."""
try:
logging.info(
"%s: Removing existing train directory.",
task_as_string(self.task))
gfile.DeleteRecursively(train_dir)
except:
logging.error(
"%s: Failed to delete directory " + train_dir +
" when starting a new model. Please delete it manually and" +
" try again.", task_as_string(self.task))
def get_meta_filename(self, start_new_model, train_dir):
if start_new_model:
logging.info("%s: Flag 'start_new_model' is set. Building a new model.",
task_as_string(self.task))
return None
latest_checkpoint = tf.train.latest_checkpoint(train_dir)
if not latest_checkpoint:
logging.info("%s: No checkpoint file found. Building a new model.",
task_as_string(self.task))
return None
meta_filename = latest_checkpoint + ".meta"
if not gfile.Exists(meta_filename):
logging.info("%s: No meta graph file found. Building a new model.",
task_as_string(self.task))
return None
else:
return meta_filename
def recover_model(self, meta_filename):
logging.info("%s: Restoring from meta graph file %s",
task_as_string(self.task), meta_filename)
return tf.train.import_meta_graph(meta_filename)
def build_model(self, model, reader):
"""Find the model and build the graph."""
label_loss_fn = find_class_by_name(FLAGS.label_loss, [losses])()
optimizer_class = find_class_by_name(FLAGS.optimizer, [tf.train])
build_graph(reader=reader,
model=model,
optimizer_class=optimizer_class,
clip_gradient_norm=FLAGS.clip_gradient_norm,
train_data_pattern=FLAGS.train_data_pattern,
label_loss_fn=label_loss_fn,
base_learning_rate=FLAGS.base_learning_rate,
learning_rate_decay=FLAGS.learning_rate_decay,
learning_rate_decay_examples=FLAGS.learning_rate_decay_examples,
regularization_penalty=FLAGS.regularization_penalty,
num_readers=FLAGS.num_readers,
batch_size=FLAGS.batch_size,
num_epochs=FLAGS.num_epochs)
return tf.train.Saver(max_to_keep=20, keep_checkpoint_every_n_hours=1)
def get_reader():
# Convert feature_names and feature_sizes to lists of values.
feature_names, feature_sizes = utils.GetListOfFeatureNamesAndSizes(
FLAGS.feature_names, FLAGS.feature_sizes)
if FLAGS.frame_features:
reader = readers.YT8MFrameFeatureReader(
feature_names=feature_names, feature_sizes=feature_sizes)
else:
reader = readers.YT8MAggregatedFeatureReader(
feature_names=feature_names, feature_sizes=feature_sizes)
return reader
class ParameterServer(object):
"""A parameter server to serve variables in a distributed execution."""
def __init__(self, cluster, task):
"""Creates a ParameterServer.
Args:
cluster: A tf.train.ClusterSpec if the execution is distributed.
None otherwise.
task: A TaskSpec describing the job type and the task index.
"""
self.cluster = cluster
self.task = task
def run(self):
"""Starts the parameter server."""
logging.info("%s: Starting parameter server within cluster %s.",
task_as_string(self.task), self.cluster.as_dict())
server = start_server(self.cluster, self.task)
server.join()
def start_server(cluster, task):
"""Creates a Server.
Args:
cluster: A tf.train.ClusterSpec if the execution is distributed.
None otherwise.
task: A TaskSpec describing the job type and the task index.
"""
if not task.type:
raise ValueError("%s: The task type must be specified." %
task_as_string(task))
if task.index is None:
raise ValueError("%s: The task index must be specified." %
task_as_string(task))
# Create and start a server.
return tf.train.Server(
tf.train.ClusterSpec(cluster),
protocol="grpc",
job_name=task.type,
task_index=task.index)
def task_as_string(task):
return "/job:%s/task:%s" % (task.type, task.index)
def main(unused_argv):
# Load the environment.
env = json.loads(os.environ.get("TF_CONFIG", "{}"))
os.environ["CUDA_VISIBLE_DEVICES"] = FLAGS.gpus
# Load the cluster data from the environment.
cluster_data = env.get("cluster", None)
cluster = tf.train.ClusterSpec(cluster_data) if cluster_data else None
# Load the task data from the environment.
task_data = env.get("task", None) or {"type": "master", "index": 0}
task = type("TaskSpec", (object,), task_data)
# Logging the version.
logging.set_verbosity(tf.logging.INFO)
logging.info("%s: Tensorflow version: %s.",
task_as_string(task), tf.__version__)
# Dispatch to a master, a worker, or a parameter server.
if not cluster or task.type == "master" or task.type == "worker":
model = find_class_by_name(FLAGS.model,
[frame_level_models, video_level_models])()
reader = get_reader()
model_exporter = export_model.ModelExporter(
frame_features=FLAGS.frame_features,
model=model,
reader=reader)
Trainer(cluster, task, FLAGS.train_dir, model, reader, model_exporter,
FLAGS.log_device_placement, FLAGS.max_steps,
FLAGS.export_model_steps, FLAGS.disp_batches).run(start_new_model=FLAGS.start_new_model)
elif task.type == "ps":
ParameterServer(cluster, task).run()
else:
raise ValueError("%s: Invalid task_type: %s." %
(task_as_string(task), task.type))
if __name__ == "__main__":
app.run() | apache-2.0 | -5,588,164,957,105,381,000 | 44.508651 | 114 | 0.636505 | false |
heejongahn/hjlog | migrations/versions/3255e6bed08_.py | 1 | 1189 | """empty message
Revision ID: 3255e6bed08
Revises: 46ae0d2b68d
Create Date: 2015-12-31 22:35:01.740168
"""
# revision identifiers, used by Alembic.
revision = '3255e6bed08'
down_revision = '46ae0d2b68d'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('comment')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('comment',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('name', sa.VARCHAR(length=30), autoincrement=False, nullable=True),
sa.Column('ip', sa.VARCHAR(length=20), autoincrement=False, nullable=True),
sa.Column('body', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('datetime', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column('original_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['original_id'], ['post.id'], name='comment_original_id_fkey'),
sa.PrimaryKeyConstraint('id', name='comment_pkey')
)
### end Alembic commands ###
| mit | 1,213,826,446,446,747,100 | 32.971429 | 91 | 0.699748 | false |
kurniawano/pythymiodw | pythymiodw/sm/__init__.py | 1 | 4836 | from .. import *
from ..io import Input
from ..pyro import Thymio3D
from threading import Thread, Event
import sys
class ThymioSMSim():
def __init__(self, MySM, world=None, graphic='pygame', scale=1):
if graphic == 'pygame':
self.thymio = ThymioSMPG(MySM, world, scale)
elif graphic == 'turtle':
self.thymio = ThymioSMTurtle(MySM, world, scale)
elif graphic == '3D':
self.thymio = ThymioSM3DBase(MySM)
def start(self):
try:
while True:
if not self.thymio.behaviour.done(self.thymio.behaviour.state):
self.thymio.update()
else:
self.stop()
break
self.thymio.sleep(dt / 1000.0)
except KeyboardInterrupt:
self.stop()
except Exception as e:
print('Error:', e)
self.stop()
def stop(self):
self.thymio.quit()
sys.exit(1)
class ThymioSMReal(Thread):
def __init__(self, MySM, world=None):
super().__init__()
self.thymio = ThymioSM1(MySM)
self.stopped = Event()
def run(self):
while not self.stopped.wait(dt / 1000.0):
try:
if not self.thymio.behaviour.done(self.thymio.behaviour.state):
self.thymio.update()
else:
self.stop()
except Exception as e:
print('Error:', e)
self.stop()
def stop(self):
self.stopped.set()
self.thymio.quit()
class ThymioSM3D():
def __init__(self, MySM):
self.thymio = ThymioSM3DBase(MySM)
def start(self):
try:
while True:
if not self.thymio.behaviour.done(self.thymio.behaviour.state):
self.thymio.update()
else:
self.stop()
break
self.thymio.sleep(dt / 1000.0)
except KeyboardInterrupt:
self.stop()
except Exception as e:
print('Error:', e)
self.stop()
def stop(self):
self.thymio.quit()
sys.exit(1)
class ThymioSM1(ThymioReal):
def __init__(self, MySM):
super().__init__(self)
self.behaviour = MySM
self.input = Input()
self.behaviour.start()
self.init_read()
def update(self):
self.input.prox_horizontal = self.prox_horizontal
self.input.prox_ground = self.prox_ground
self.input.temperature = self.temperature
self.input.accelerometer = self.accelerometer
self.input.button_center = self.button_center
self.input.button_left = self.button_left
self.input.button_right = self.button_right
self.input.button_forward = self.button_forward
self.input.button_backward = self.button_backward
output = self.behaviour.step(self.input)
self.move(output)
def move(self, output):
self.wheels(output.leftv, output.rightv)
class ThymioSMSimBase:
def __init__(self, *args, **kwargs):
MySM = args[0]
if len(args) > 1:
world = args[1]
scale = args[2]
super().__init__(world, scale)
else:
super().__init__()
self.behaviour = MySM
self.input = Input()
self.behaviour.start()
def update(self):
self.input.prox_horizontal = self.prox_horizontal
self.input.prox_ground = self.prox_ground
self.input.temperature = self.temperature
# self.input.accelerometer=self.accelerometer
# self.input.button_center=self.button_center
# self.input.button_left=self.button_left
# self.input.button_right=self.button_right
# self.input.button_forward=self.button_forward
# self.input.button_backward=self.button_backward
output = self.behaviour.step(self.input)
self.move(output)
def move(self, output):
self.wheels(output.leftv, output.rightv)
class ThymioSMTurtle(ThymioSMSimBase, ThymioSim):
pass
class ThymioSMPG(ThymioSMSimBase, ThymioSimPG):
pass
class ThymioSM3DBase(ThymioSMSimBase, Thymio3D):
def update(self):
self.input.prox_horizontal = self.prox_horizontal
self.input.prox_ground = self.prox_ground
# self.input.temperature = self.temperature
# self.input.accelerometer=self.accelerometer
# self.input.button_center=self.button_center
# self.input.button_left=self.button_left
# self.input.button_right=self.button_right
# self.input.button_forward=self.button_forward
# self.input.button_backward=self.button_backward
output = self.behaviour.step(self.input)
self.move(output)
| mit | -4,015,822,135,599,048,700 | 28.13253 | 79 | 0.577337 | false |
pravisankar/solum | doc/source/conf.py | 1 | 2732 | # -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
sys.path.insert(0, os.path.abspath('../..'))
# -- General configuration ----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinxcontrib.autohttp.flask',
'sphinxcontrib.pecanwsme.rest',
'oslosphinx',
'wsmeext.sphinxext',
]
wsme_protocols = ['restjson', 'restxml']
# autodoc generation is a bit aggressive and a nuisance when doing heavy
# text edit cycles.
# execute "export SPHINX_DEBUG=1" in your terminal to disable
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'solum'
copyright = u'2013, OpenStack Foundation'
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
# html_theme_path = ["."]
# html_theme = '_theme'
html_static_path = ['_static']
# Output file base name for HTML help builder.
htmlhelp_basename = '%sdoc' % project
# -- Options for manual page output -------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('man/solum-db-manage', 'solum-db-manage',
u'Script which helps manage specific database operations',
[u'Solum Developers'], 1),
]
# If true, show URL addresses after external links.
man_show_urls = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
| apache-2.0 | -3,590,725,781,218,680,000 | 31.915663 | 79 | 0.683382 | false |
acumb/LatticeDNAOrigami | scripts/tikz/create_tikz_diagram.py | 1 | 3756 | #!/usr/bin/env python
"""Generates tikz scripts for configurations of the origami model.
Takes template tikz scripts for geneting configuration diagrams and outputs a
script for configurations at specified steps in the specified intput file.
"""
import argparse
import sys
import string
from origamipy.origami_io import JSONInputFile, PlainTextTrajFile
TEMPLATE_FILENAME = 'tikz_template.tex'
def make_tikz_position_bond_orientation_list(chains):
scaffold_list = ''
staple_list = ''
for chain in chains:
for domain_index in range(len(chain['positions'])):
rix = chain['positions'][domain_index][0]
riy = chain['positions'][domain_index][1]
riz = chain['positions'][domain_index][2]
try:
rjx = chain['positions'][domain_index + 1][0]
rjy = chain['positions'][domain_index + 1][1]
rjz = chain['positions'][domain_index + 1][2]
aix = rjx - rix
aiy = rjy - riy
aiz = rjz - riz
except IndexError:
aix = 0
aiy = 0
aiz = 0
bix = chain['orientations'][domain_index][0] * 0.5
biy = chain['orientations'][domain_index][1] * 0.5
biz = chain['orientations'][domain_index][2] * 0.5
if chain['identity'] == 0:
scaffold_list = scaffold_list + '{} / {} / {} / {} / {} / {} / {} / {} / {}, '.format(
rix, riy, riz, aix, aiy, aiz, bix, biy, biz)
else:
staple_list = staple_list + '{} / {} / {} / {} / {} / {} / {} / {} / {}, '.format(
rix, riy, riz, aix, aiy, aiz, bix, biy, biz)
# Remove last comma and space
scaffold_list = scaffold_list[:-2]
staple_list = staple_list[:-2]
return scaffold_list, staple_list
def insert_list_and_write(scaffold_list, staple_list, output_filename, coor1,
coor2, axis):
with open(TEMPLATE_FILENAME) as input:
template = string.Template(input.read())
template = template.substitute(scaffold_list=scaffold_list,
staple_list=staple_list, coor1=coor1, coor2=coor2, axis=axis)
with open(output_filename, 'w') as output:
output.write(template)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('system_filename',
help='System filename')
parser.add_argument('traj_filename',
help='Trajectory filename')
parser.add_argument('output_filename',
help='Tex filename')
parser.add_argument('step', type=int,
help='Step in configuration file to draw')
parser.add_argument('--coor1', dest='coor1', default=40,
help='First perspective coordinate (default 40)')
parser.add_argument('--coor2', dest='coor2', default=110,
help='First perspective coordinate (default 110)')
parser.add_argument('--noaxis', dest='noaxis', action='store_true',
default=False,
help='Switch off axis')
args = parser.parse_args()
system_filename = args.system_filename
traj_filename = args.traj_filename
output_filename = args.output_filename
step = args.step
coor1 = args.coor1
coor2 = args.coor2
noaxis = args.noaxis
if noaxis:
axis = '%'
else:
axis = ''
system_file = JSONInputFile(system_filename)
traj_file = PlainTextTrajFile(traj_filename, system_file)
chains = traj_file.chains(step)
scaffold_list, staple_list = make_tikz_position_bond_orientation_list(chains)
insert_list_and_write(scaffold_list, staple_list, output_filename, coor1,
coor2, axis)
if __name__ == '__main__':
main()
| mit | 3,299,467,935,302,009,300 | 33.777778 | 102 | 0.587593 | false |
googleapis/python-api-common-protos | google/api/log_pb2.py | 1 | 5430 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/api/log.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.api import label_pb2 as google_dot_api_dot_label__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name="google/api/log.proto",
package="google.api",
syntax="proto3",
serialized_options=b"\n\016com.google.apiB\010LogProtoP\001ZEgoogle.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig\242\002\004GAPI",
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x14google/api/log.proto\x12\ngoogle.api\x1a\x16google/api/label.proto"u\n\rLogDescriptor\x12\x0c\n\x04name\x18\x01 \x01(\t\x12+\n\x06labels\x18\x02 \x03(\x0b\x32\x1b.google.api.LabelDescriptor\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x04 \x01(\tBj\n\x0e\x63om.google.apiB\x08LogProtoP\x01ZEgoogle.golang.org/genproto/googleapis/api/serviceconfig;serviceconfig\xa2\x02\x04GAPIb\x06proto3',
dependencies=[google_dot_api_dot_label__pb2.DESCRIPTOR],
)
_LOGDESCRIPTOR = _descriptor.Descriptor(
name="LogDescriptor",
full_name="google.api.LogDescriptor",
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="name",
full_name="google.api.LogDescriptor.name",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="labels",
full_name="google.api.LogDescriptor.labels",
index=1,
number=2,
type=11,
cpp_type=10,
label=3,
has_default_value=False,
default_value=[],
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="description",
full_name="google.api.LogDescriptor.description",
index=2,
number=3,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="display_name",
full_name="google.api.LogDescriptor.display_name",
index=3,
number=4,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
],
extensions=[],
nested_types=[],
enum_types=[],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=60,
serialized_end=177,
)
_LOGDESCRIPTOR.fields_by_name[
"labels"
].message_type = google_dot_api_dot_label__pb2._LABELDESCRIPTOR
DESCRIPTOR.message_types_by_name["LogDescriptor"] = _LOGDESCRIPTOR
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
LogDescriptor = _reflection.GeneratedProtocolMessageType(
"LogDescriptor",
(_message.Message,),
{
"DESCRIPTOR": _LOGDESCRIPTOR,
"__module__": "google.api.log_pb2"
# @@protoc_insertion_point(class_scope:google.api.LogDescriptor)
},
)
_sym_db.RegisterMessage(LogDescriptor)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| apache-2.0 | -4,377,264,029,072,648,700 | 32.9375 | 450 | 0.626888 | false |
bensoer/pychat | crypto/algorithms/randomcaesarcipher.py | 1 | 3378 | __author__ = 'bensoer'
from crypto.algorithms.algorithminterface import AlgorithmInterface
from tools.argparcer import ArgParcer
import random
class RandomCaesarCipher(AlgorithmInterface):
__alphabet = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
__scrambledAlphabet = ""
__seed = 5
__offset = 3
def __init__(self, arguments):
seed = ArgParcer.getValue(arguments, "-s")
offset = ArgParcer.getValue(arguments, "-o")
'set the seet value if it was passed in'
if(seed != ""):
self.__seed = int(seed)
'set the offset value if it was passed in'
if(offset != ""):
self.__offset = int(offset)
'now generate our scrambled alphabet'
self.__generateScrambledAlphabet()
def __generateScrambledAlphabet(self):
random.seed(self.__seed)
i = 0
while i < len(self.__alphabet):
index = random.randrange(0, len(self.__alphabet))
letterToBeAdded = self.__alphabet[index];
if self.__letterIsAlreadyScrambled(letterToBeAdded) == False:
self.__scrambledAlphabet += letterToBeAdded
i = i + 1
print("Scrambled Alphabet Generated: " + self.__scrambledAlphabet)
def __letterIsAlreadyScrambled(self, letter):
for scrambledLetter in self.__scrambledAlphabet:
if scrambledLetter == letter:
return True
return False
def __getIndexOfLetter(self, letter, alphabet):
for index, alphabetLetter in enumerate(alphabet):
if alphabetLetter == letter:
return index
def encryptString(self, unencryptedMessage):
encryptedMessage = ""
for letter in unencryptedMessage:
'check if this is the colon or space'
if letter == ":" or letter == " ":
encryptedMessage += letter
continue
'anything else we encrypt with the random letters'
index = self.__getIndexOfLetter(letter, self.__alphabet)
'apply the offset'
offsetIndex = index + self.__offset
'correct the index in case it overflows. Do wrap around'
correctedIndex = offsetIndex % (len(self.__alphabet))
encryptedLetter = self.__scrambledAlphabet[correctedIndex]
encryptedMessage += encryptedLetter
return encryptedMessage.encode()
def decryptString(self, encryptedMessage):
encryptedMessage = encryptedMessage.decode()
decryptedMessage = ""
for letter in encryptedMessage:
'check if this is the colon or space'
if letter == ":" or letter == " ":
decryptedMessage += letter
continue
index = self.__getIndexOfLetter(letter, self.__scrambledAlphabet)
'apply offset'
offsetIndex = index - self.__offset
'correct the index in case we go over'
correctedIndex = 0
if offsetIndex < 0:
offsetIndex = offsetIndex * -1
correctedIndex = (len(self.__alphabet)) - offsetIndex
else:
correctedIndex = offsetIndex
decryptedLetter = self.__alphabet[correctedIndex]
decryptedMessage += decryptedLetter
return decryptedMessage
| mit | -2,099,955,493,845,500,400 | 31.796117 | 77 | 0.597099 | false |
dolph/python-keystoneclient | tests/v2_0/test_auth.py | 1 | 8962 | import httplib2
import json
from keystoneclient.v2_0 import client
from keystoneclient import exceptions
from tests import utils
def to_http_response(resp_dict):
"""
Utility function to convert a python dictionary
(e.g. {'status':status, 'body': body, 'headers':headers}
to an httplib2 response.
"""
resp = httplib2.Response(resp_dict)
for k, v in resp_dict['headers'].items():
resp[k] = v
return resp
class AuthenticateAgainstKeystoneTests(utils.TestCase):
def setUp(self):
super(AuthenticateAgainstKeystoneTests, self).setUp()
self.TEST_RESPONSE_DICT = {
"access": {
"token": {
"expires": "12345",
"id": self.TEST_TOKEN,
"tenant": {
"id": self.TEST_TENANT_ID
},
},
"user": {
"id": self.TEST_USER
},
"serviceCatalog": self.TEST_SERVICE_CATALOG,
},
}
self.TEST_REQUEST_BODY = {
"auth": {
"passwordCredentials": {
"username": self.TEST_USER,
"password": self.TEST_TOKEN,
},
"tenantId": self.TEST_TENANT_ID,
},
}
self.TEST_REQUEST_HEADERS = {
'Content-Type': 'application/json',
'User-Agent': 'python-keystoneclient',
}
def test_authenticate_failure(self):
_auth = 'auth'
_cred = 'passwordCredentials'
_pass = 'password'
self.TEST_REQUEST_BODY[_auth][_cred][_pass] = 'bad_key'
resp = httplib2.Response({
"status": 401,
"body": json.dumps({
"unauthorized": {
"message": "Unauthorized",
"code": "401",
},
}),
})
# Implicit retry on API calls, so it gets called twice
httplib2.Http.request(self.TEST_URL + "/tokens",
'POST',
body=json.dumps(self.TEST_REQUEST_BODY),
headers=self.TEST_REQUEST_HEADERS) \
.AndReturn((resp, resp['body']))
httplib2.Http.request(self.TEST_URL + "/tokens",
'POST',
body=json.dumps(self.TEST_REQUEST_BODY),
headers=self.TEST_REQUEST_HEADERS) \
.AndReturn((resp, resp['body']))
self.mox.ReplayAll()
# Workaround for issue with assertRaises on python2.6
# where with assertRaises(exceptions.Unauthorized): doesn't work
# right
def client_create_wrapper():
client.Client(username=self.TEST_USER,
password="bad_key",
tenant_id=self.TEST_TENANT_ID,
auth_url=self.TEST_URL)
self.assertRaises(exceptions.Unauthorized, client_create_wrapper)
def test_auth_redirect(self):
correct_response = json.dumps(self.TEST_RESPONSE_DICT)
dict_responses = [
{
"headers": {
'location': self.TEST_ADMIN_URL + "/tokens",
},
"status": 305,
"body": "Use proxy",
},
{
"headers": {},
"status": 200,
"body": correct_response,
},
]
responses = [(to_http_response(resp), resp['body'])
for resp in dict_responses]
httplib2.Http.request(self.TEST_URL + "/tokens",
'POST',
body=json.dumps(self.TEST_REQUEST_BODY),
headers=self.TEST_REQUEST_HEADERS) \
.AndReturn(responses[0])
httplib2.Http.request(self.TEST_ADMIN_URL + "/tokens",
'POST',
body=json.dumps(self.TEST_REQUEST_BODY),
headers=self.TEST_REQUEST_HEADERS) \
.AndReturn(responses[1])
self.mox.ReplayAll()
cs = client.Client(username=self.TEST_USER,
password=self.TEST_TOKEN,
tenant_id=self.TEST_TENANT_ID,
auth_url=self.TEST_URL)
self.assertEqual(cs.management_url,
self.TEST_RESPONSE_DICT["access"]["serviceCatalog"][3]
['endpoints'][0]["adminURL"])
self.assertEqual(cs.auth_token,
self.TEST_RESPONSE_DICT["access"]["token"]["id"])
def test_authenticate_success_password_scoped(self):
resp = httplib2.Response({
"status": 200,
"body": json.dumps(self.TEST_RESPONSE_DICT),
})
httplib2.Http.request(self.TEST_URL + "/tokens",
'POST',
body=json.dumps(self.TEST_REQUEST_BODY),
headers=self.TEST_REQUEST_HEADERS) \
.AndReturn((resp, resp['body']))
self.mox.ReplayAll()
cs = client.Client(username=self.TEST_USER,
password=self.TEST_TOKEN,
tenant_id=self.TEST_TENANT_ID,
auth_url=self.TEST_URL)
self.assertEqual(cs.management_url,
self.TEST_RESPONSE_DICT["access"]["serviceCatalog"][3]
['endpoints'][0]["adminURL"])
self.assertEqual(cs.auth_token,
self.TEST_RESPONSE_DICT["access"]["token"]["id"])
def test_authenticate_success_password_unscoped(self):
del self.TEST_RESPONSE_DICT['access']['serviceCatalog']
del self.TEST_REQUEST_BODY['auth']['tenantId']
resp = httplib2.Response({
"status": 200,
"body": json.dumps(self.TEST_RESPONSE_DICT),
})
httplib2.Http.request(self.TEST_URL + "/tokens",
'POST',
body=json.dumps(self.TEST_REQUEST_BODY),
headers=self.TEST_REQUEST_HEADERS) \
.AndReturn((resp, resp['body']))
self.mox.ReplayAll()
cs = client.Client(username=self.TEST_USER,
password=self.TEST_TOKEN,
auth_url=self.TEST_URL)
self.assertEqual(cs.auth_token,
self.TEST_RESPONSE_DICT["access"]["token"]["id"])
self.assertFalse('serviceCatalog' in cs.service_catalog.catalog)
def test_authenticate_success_token_scoped(self):
del self.TEST_REQUEST_BODY['auth']['passwordCredentials']
self.TEST_REQUEST_BODY['auth']['token'] = {'id': self.TEST_TOKEN}
self.TEST_REQUEST_HEADERS['X-Auth-Token'] = self.TEST_TOKEN
resp = httplib2.Response({
"status": 200,
"body": json.dumps(self.TEST_RESPONSE_DICT),
})
httplib2.Http.request(self.TEST_URL + "/tokens",
'POST',
body=json.dumps(self.TEST_REQUEST_BODY),
headers=self.TEST_REQUEST_HEADERS) \
.AndReturn((resp, resp['body']))
self.mox.ReplayAll()
cs = client.Client(token=self.TEST_TOKEN,
tenant_id=self.TEST_TENANT_ID,
auth_url=self.TEST_URL)
self.assertEqual(cs.management_url,
self.TEST_RESPONSE_DICT["access"]["serviceCatalog"][3]
['endpoints'][0]["adminURL"])
self.assertEqual(cs.auth_token,
self.TEST_RESPONSE_DICT["access"]["token"]["id"])
def test_authenticate_success_token_unscoped(self):
del self.TEST_REQUEST_BODY['auth']['passwordCredentials']
del self.TEST_REQUEST_BODY['auth']['tenantId']
del self.TEST_RESPONSE_DICT['access']['serviceCatalog']
self.TEST_REQUEST_BODY['auth']['token'] = {'id': self.TEST_TOKEN}
self.TEST_REQUEST_HEADERS['X-Auth-Token'] = self.TEST_TOKEN
resp = httplib2.Response({
"status": 200,
"body": json.dumps(self.TEST_RESPONSE_DICT),
})
httplib2.Http.request(self.TEST_URL + "/tokens",
'POST',
body=json.dumps(self.TEST_REQUEST_BODY),
headers=self.TEST_REQUEST_HEADERS) \
.AndReturn((resp, resp['body']))
self.mox.ReplayAll()
cs = client.Client(token=self.TEST_TOKEN,
auth_url=self.TEST_URL)
self.assertEqual(cs.auth_token,
self.TEST_RESPONSE_DICT["access"]["token"]["id"])
self.assertFalse('serviceCatalog' in cs.service_catalog.catalog)
| apache-2.0 | 1,598,639,294,003,492,900 | 38.654867 | 79 | 0.495314 | false |
nipe0324/flask-todo-api | app.py | 1 | 2962 | from flask import Flask, jsonify, abort, url_for, request, make_response
from flask.ext.httpauth import HTTPBasicAuth
app = Flask(__name__)
auth = HTTPBasicAuth()
tasks = [
{
'id': 1,
'title': u'Buy groceries',
'description': u'Milk, Cheese, Pizza, Fruit, Tylenol',
'done': False
},
{
'id': 2,
'title': u'Learn Python',
'description': u'Need to find a good Python tutorial on the web',
'done': False
}
]
def make_public_task(task):
new_task = {}
for field in task:
if field == 'id':
new_task['uri'] = url_for('get_task', task_id=task['id'], _external=True)
else:
new_task[field] = task[field]
return new_task
@auth.get_password
def get_password(username):
if username == 'root':
return 'pass'
return None
@auth.error_handler
def unauthorized():
return make_response(jsonify({'error': 'Unauthorized access'}), 401)
@app.route('/')
def index():
return "Hello, world!"
@app.route('/todo/api/v1.0/tasks', methods=['GET'])
@auth.login_required
def get_tasks():
return jsonify({'tasks': [make_public_task(task) for task in tasks]})
@app.route('/todo/api/v1.0/tasks/<int:task_id>', methods=['GET'])
@auth.login_required
def get_task(task_id):
task = [task for task in tasks if task['id'] == task_id]
if len(task) == 0:
abort(404)
return jsonify({'tasks': tasks[0]})
@app.route('/todo/api/v1.0/tasks', methods=['POST'])
@auth.login_required
def create_task():
if not request.json or not 'title' in request.json:
abort(400)
task = {
'id': tasks[-1]['id'] + 1,
'title': request.json['title'],
'description': request.json.get('description', ""),
'done': False
}
tasks.append(task)
return jsonify({'task': task}), 201
@app.route('/todo/api/v1.0/tasks/<int:task_id>', methods=['PUT'])
@auth.login_required
def update_task(task_id):
task = [task for task in tasks if task['id'] == task_id]
if len(task) == 0:
abort(404)
if not request.json:
abort(400)
if 'title' in request.json and type(request.json['title']) != unicode:
abort(400)
if 'description' in request.json and type(request.json['description']) is not unicode:
abort(400)
if 'done' in request.json and type(request.json['done']) is not bool:
abort(400)
task[0]['title'] = request.json.get('title', task[0]['title'])
return jsonify({'task': task[0]})
@app.route('/todo/api/v1.0/tasks/<int:task_id>', methods=['DELETE'])
@auth.login_required
def delete_task(task_id):
task = [task for task in tasks if task['id'] == task_id]
if len(task) == 0:
abort(404)
tasks.remove(task[0])
return jsonify({'result': True})
@app.errorhandler(404)
def not_found(error):
return make_response(jsonify({'error': 'Not found'}), 404)
if __name__ == '__main__':
app.run(debug=True)
| apache-2.0 | -1,418,981,903,589,351,700 | 27.757282 | 90 | 0.599257 | false |
Zearin/python-ietflib | pyietflib/rfc6350/vcard.py | 1 | 4743 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""`vCard <http://tools.ietf.org/html/rfc6350>`_ object that contains
the information in structured form."""
__version__ = '1.0'
__copyright__ = """Copyright 2011 Lance Finn Helsten ([email protected])"""
__license__ = """
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
if sys.version_info < (3, 2):
raise Exception("rfc6350 requires Python 3.2 or higher.")
import logging
import re
import string
from .property import *
from .parameter import *
import pyietflib.iso8601
__all__ = ['parse_vcard', 'vCard']
__log__ = logging.getLogger('rfc6350')
def parse_vcard(stream):
"""Given a binary `stream` that is UTF-8 encoded and RFC 6350
folded parse and return a single vCard from that stream."""
state = 'start'
vcard = None
contentline_parser = None
for line, linenum in contentline_generator(stream):
if state == 'content':
if re.match('^END:VCARD\r\n$', line):
state = 'end'
contentline_parser = None
vcard.validate()
return vcard
else:
assert contentline_parser
prop = contentline_parser(line, linenum)
if prop.name not in vcard:
vcard[prop.name] = []
vcard[prop.name].append(prop)
elif state == 'start':
if not re.match('^BEGIN:VCARD\r\n$', line):
raise ValueError('Invalid vCard BEGIN content-line[{0}]: "{1:.30s}...".'.format(linenum, line))
state = 'version'
continue
elif state == 'version':
mo = re.match(r'^VERSION:(?P<version>.+)\r\n$', line)
if not mo:
raise ValueError('Invalid vCard VERSION content-line[{0}]: "{1:.30s}...".'.format(linenum, line))
state = 'content'
version = mo.group('version')
if version == '4.0':
vcard = vCard()
contentline_parser = property_from_contentline
else:
raise ValueError('Invalid or unknown vCard version {0} on line {1}: "{2:.30s}...".'.format(version, linenum, line))
raise ValueError('Invalid vCard stream END contentline not found before EOF.')
def contentline_generator(stream):
"""Generate unfolded and decoded content lines from the stream."""
linenum = 0
try:
unfold = None
unfoldline = 0
for line in stream:
linenum = linenum + 1
if line[-2:] != b'\r\n':
raise ValueError('Invalid line ending on line {0}: "{1:.30s}...".'.format(linenum, line))
line = line[:-2]
if not line[:-2]:
continue
if line[0] in b' \t':
if not unfold:
raise ValueError('Invalid line folding on line {0}: "{1:.30s}...".'.format(linenum, line))
while line[0] in b' \t':
line = line[1:]
unfold.extend(line)
elif not unfold:
unfold = bytearray(line)
unfoldline = linenum
else:
unfold.extend(b'\r\n')
yield (unfold.decode("UTF-8"), unfoldline)
unfold = bytearray(line)
unfoldline = linenum
else:
if unfold:
unfold.extend(b'\r\n')
yield (unfold.decode("UTF-8"), unfoldline)
except UnicodeDecodeError as err:
print(line)
raise ValueError('Invalid UTF-8 encoded stream on line {0}: "{1:.30s}...".'.format(linenum, line))
class vCard(dict):
"""Defines a structured vCard in accordance with `RFC 6350: vCard
Format Specification <http://tools.ietf.org/html/rfc6350>`_
that defines version 4 vCard.
"""
def __init__(self, version='4.0'):
self.version = version
def __str__(self):
raise NotImplementedError()
def __repr__(self):
return 'parse_vcard(r"""{0}""")'.format(str(self))
def validate(self):
"""Check that the vCard is valid IAW RFC 6350."""
if len(self) == 0:
raise ValueError("Invalid vCard: a property is required.")
| apache-2.0 | -7,209,633,680,010,250,000 | 34.133333 | 131 | 0.568206 | false |
openstreetmap/Nominatim | nominatim/nominatim.py | 1 | 12478 | #! /usr/bin/env python3
#-----------------------------------------------------------------------------
# nominatim - [description]
#-----------------------------------------------------------------------------
#
# Indexing tool for the Nominatim database.
#
# Based on C version by Brian Quinion
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#-----------------------------------------------------------------------------
from argparse import ArgumentParser, RawDescriptionHelpFormatter, ArgumentTypeError
import logging
import sys
import re
import getpass
from datetime import datetime
import psycopg2
from psycopg2.extras import wait_select
import select
log = logging.getLogger()
def make_connection(options, asynchronous=False):
params = {'dbname' : options.dbname,
'user' : options.user,
'password' : options.password,
'host' : options.host,
'port' : options.port,
'async' : asynchronous}
return psycopg2.connect(**params)
class RankRunner(object):
""" Returns SQL commands for indexing one rank within the placex table.
"""
def __init__(self, rank):
self.rank = rank
def name(self):
return "rank {}".format(self.rank)
def sql_index_sectors(self):
return """SELECT geometry_sector, count(*) FROM placex
WHERE rank_search = {} and indexed_status > 0
GROUP BY geometry_sector
ORDER BY geometry_sector""".format(self.rank)
def sql_nosector_places(self):
return """SELECT place_id FROM placex
WHERE indexed_status > 0 and rank_search = {}
ORDER BY geometry_sector""".format(self.rank)
def sql_sector_places(self):
return """SELECT place_id FROM placex
WHERE indexed_status > 0 and rank_search = {}
and geometry_sector = %s""".format(self.rank)
def sql_index_place(self):
return "UPDATE placex SET indexed_status = 0 WHERE place_id = %s"
class InterpolationRunner(object):
""" Returns SQL commands for indexing the address interpolation table
location_property_osmline.
"""
def name(self):
return "interpolation lines (location_property_osmline)"
def sql_index_sectors(self):
return """SELECT geometry_sector, count(*) FROM location_property_osmline
WHERE indexed_status > 0
GROUP BY geometry_sector
ORDER BY geometry_sector"""
def sql_nosector_places(self):
return """SELECT place_id FROM location_property_osmline
WHERE indexed_status > 0
ORDER BY geometry_sector"""
def sql_sector_places(self):
return """SELECT place_id FROM location_property_osmline
WHERE indexed_status > 0 and geometry_sector = %s
ORDER BY geometry_sector"""
def sql_index_place(self):
return """UPDATE location_property_osmline
SET indexed_status = 0 WHERE place_id = %s"""
class DBConnection(object):
""" A single non-blocking database connection.
"""
def __init__(self, options):
self.current_query = None
self.current_params = None
self.conn = None
self.connect()
def connect(self):
if self.conn is not None:
self.cursor.close()
self.conn.close()
self.conn = make_connection(options, asynchronous=True)
self.wait()
self.cursor = self.conn.cursor()
def wait(self):
""" Block until any pending operation is done.
"""
while True:
try:
wait_select(self.conn)
self.current_query = None
return
except psycopg2.extensions.TransactionRollbackError as e:
if e.pgcode == '40P01':
log.info("Deadlock detected (params = {}), retry."
.format(self.current_params))
self.cursor.execute(self.current_query, self.current_params)
else:
raise
except psycopg2.errors.DeadlockDetected:
self.cursor.execute(self.current_query, self.current_params)
def perform(self, sql, args=None):
""" Send SQL query to the server. Returns immediately without
blocking.
"""
self.current_query = sql
self.current_params = args
self.cursor.execute(sql, args)
def fileno(self):
""" File descriptor to wait for. (Makes this class select()able.)
"""
return self.conn.fileno()
def is_done(self):
""" Check if the connection is available for a new query.
Also checks if the previous query has run into a deadlock.
If so, then the previous query is repeated.
"""
if self.current_query is None:
return True
try:
if self.conn.poll() == psycopg2.extensions.POLL_OK:
self.current_query = None
return True
except psycopg2.extensions.TransactionRollbackError as e:
if e.pgcode == '40P01':
log.info("Deadlock detected (params = {}), retry.".format(self.current_params))
self.cursor.execute(self.current_query, self.current_params)
else:
raise
except psycopg2.errors.DeadlockDetected:
self.cursor.execute(self.current_query, self.current_params)
return False
class Indexer(object):
""" Main indexing routine.
"""
def __init__(self, options):
self.minrank = max(0, options.minrank)
self.maxrank = min(30, options.maxrank)
self.conn = make_connection(options)
self.threads = [DBConnection(options) for i in range(options.threads)]
def run(self):
""" Run indexing over the entire database.
"""
log.warning("Starting indexing rank ({} to {}) using {} threads".format(
self.minrank, self.maxrank, len(self.threads)))
for rank in range(self.minrank, self.maxrank):
self.index(RankRunner(rank))
if self.maxrank == 30:
self.index(InterpolationRunner())
self.index(RankRunner(self.maxrank))
def index(self, obj):
""" Index a single rank or table. `obj` describes the SQL to use
for indexing.
"""
log.warning("Starting {}".format(obj.name()))
cur = self.conn.cursor(name='main')
cur.execute(obj.sql_index_sectors())
total_tuples = 0
for r in cur:
total_tuples += r[1]
log.debug("Total number of rows; {}".format(total_tuples))
cur.scroll(0, mode='absolute')
next_thread = self.find_free_thread()
done_tuples = 0
rank_start_time = datetime.now()
sector_sql = obj.sql_sector_places()
index_sql = obj.sql_index_place()
min_grouped_tuples = total_tuples - len(self.threads) * 1000
next_info = 100 if log.isEnabledFor(logging.INFO) else total_tuples + 1
for r in cur:
sector = r[0]
# Should we do the remaining ones together?
do_all = done_tuples > min_grouped_tuples
pcur = self.conn.cursor(name='places')
if do_all:
pcur.execute(obj.sql_nosector_places())
else:
pcur.execute(sector_sql, (sector, ))
for place in pcur:
place_id = place[0]
log.debug("Processing place {}".format(place_id))
thread = next(next_thread)
thread.perform(index_sql, (place_id,))
done_tuples += 1
if done_tuples >= next_info:
now = datetime.now()
done_time = (now - rank_start_time).total_seconds()
tuples_per_sec = done_tuples / done_time
log.info("Done {} in {} @ {:.3f} per second - {} ETA (seconds): {:.2f}"
.format(done_tuples, int(done_time),
tuples_per_sec, obj.name(),
(total_tuples - done_tuples)/tuples_per_sec))
next_info += int(tuples_per_sec)
pcur.close()
if do_all:
break
cur.close()
for t in self.threads:
t.wait()
rank_end_time = datetime.now()
diff_seconds = (rank_end_time-rank_start_time).total_seconds()
log.warning("Done {}/{} in {} @ {:.3f} per second - FINISHED {}\n".format(
done_tuples, total_tuples, int(diff_seconds),
done_tuples/diff_seconds, obj.name()))
def find_free_thread(self):
""" Generator that returns the next connection that is free for
sending a query.
"""
ready = self.threads
command_stat = 0
while True:
for thread in ready:
if thread.is_done():
command_stat += 1
yield thread
# refresh the connections occasionaly to avoid potential
# memory leaks in Postgresql.
if command_stat > 100000:
for t in self.threads:
while not t.is_done():
wait_select(t.conn)
t.connect()
command_stat = 0
ready = self.threads
else:
ready, _, _ = select.select(self.threads, [], [])
assert(False, "Unreachable code")
def nominatim_arg_parser():
""" Setup the command-line parser for the tool.
"""
def h(s):
return re.sub("\s\s+" , " ", s)
p = ArgumentParser(description="Indexing tool for Nominatim.",
formatter_class=RawDescriptionHelpFormatter)
p.add_argument('-d', '--database',
dest='dbname', action='store', default='nominatim',
help='Name of the PostgreSQL database to connect to.')
p.add_argument('-U', '--username',
dest='user', action='store',
help='PostgreSQL user name.')
p.add_argument('-W', '--password',
dest='password_prompt', action='store_true',
help='Force password prompt.')
p.add_argument('-H', '--host',
dest='host', action='store',
help='PostgreSQL server hostname or socket location.')
p.add_argument('-P', '--port',
dest='port', action='store',
help='PostgreSQL server port')
p.add_argument('-r', '--minrank',
dest='minrank', type=int, metavar='RANK', default=0,
help='Minimum/starting rank.')
p.add_argument('-R', '--maxrank',
dest='maxrank', type=int, metavar='RANK', default=30,
help='Maximum/finishing rank.')
p.add_argument('-t', '--threads',
dest='threads', type=int, metavar='NUM', default=1,
help='Number of threads to create for indexing.')
p.add_argument('-v', '--verbose',
dest='loglevel', action='count', default=0,
help='Increase verbosity')
return p
if __name__ == '__main__':
logging.basicConfig(stream=sys.stderr, format='%(levelname)s: %(message)s')
options = nominatim_arg_parser().parse_args(sys.argv[1:])
log.setLevel(max(3 - options.loglevel, 0) * 10)
options.password = None
if options.password_prompt:
password = getpass.getpass("Database password: ")
options.password = password
Indexer(options).run()
| gpl-2.0 | 3,152,322,180,283,444,000 | 33.565097 | 95 | 0.552733 | false |
Azure/azure-sdk-for-python | tools/azure-devtools/setup.py | 1 | 2163 | #!/usr/bin/env python
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import io
from setuptools import setup
VERSION = "1.2.1"
CLASSIFIERS = [
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"License :: OSI Approved :: MIT License",
]
DEPENDENCIES = ["ConfigArgParse>=0.12.0", "six>=1.10.0", "vcrpy~=3.0.0"]
with io.open("README.rst", "r", encoding="utf-8") as f:
README = f.read()
setup(
name="azure-devtools",
version=VERSION,
description="Microsoft Azure Development Tools for SDK",
long_description=README,
license="MIT",
author="Microsoft Corporation",
author_email="[email protected]",
url="https://github.com/Azure/azure-python-devtools",
zip_safe=False,
classifiers=CLASSIFIERS,
packages=[
"azure_devtools",
"azure_devtools.scenario_tests",
"azure_devtools.perfstress_tests",
"azure_devtools.ci_tools",
],
entry_points={
"console_scripts": [
"perfstress = azure_devtools.perfstress_tests:run_perfstress_cmd",
"systemperf = azure_devtools.perfstress_tests:run_system_perfstress_tests_cmd",
],
},
extras_require={
"ci_tools": [
"PyGithub>=1.40", # Can Merge PR after 1.36, "requests" and tests after 1.40
"GitPython",
"requests>=2.0",
],
"systemperf": ["aiohttp>=3.0", "requests>=2.0", "tornado==6.0.3" "pycurl==7.43.0.5" "httpx==0.11.1"],
},
package_dir={"": "src"},
install_requires=DEPENDENCIES,
)
| mit | 8,086,753,318,997,490,000 | 32.796875 | 109 | 0.564494 | false |
vgrem/SharePointOnline-REST-Python-Client | tests/test_sharepoint_list.py | 1 | 2363 | from random import randint
from tests.sharepoint_case import SPTestCase
from office365.sharepoint.list_creation_information import ListCreationInformation
from office365.sharepoint.list_template_type import ListTemplateType
class TestSPList(SPTestCase):
target_list_id = None
target_list_title = "Tasks" + str(randint(0, 10000))
def test1_create_list(self):
list_properties = ListCreationInformation()
list_properties.AllowContentTypes = True
list_properties.BaseTemplate = ListTemplateType.TasksWithTimelineAndHierarchy
list_properties.Title = self.target_list_title
list_to_create = self.client.web.lists.add(list_properties)
self.client.execute_query()
self.assertEqual(list_properties.Title, list_to_create.properties['Title'])
self.__class__.target_list_id = list_to_create.properties['Id']
def test2_read_list(self):
list_to_read = self.client.web.lists.get_by_title(self.target_list_title)
self.client.load(list_to_read)
self.client.execute_query()
self.assertEqual(self.target_list_title, list_to_read.properties['Title'])
def test3_read_list_by_id(self):
list_to_read = self.client.web.lists.get_by_id(self.__class__.target_list_id)
self.client.load(list_to_read)
self.client.execute_query()
self.assertEqual(self.target_list_id, list_to_read.properties['Id'])
def test4_update_list(self):
list_to_update = self.client.web.lists.get_by_title(self.target_list_title)
self.target_list_title += "_updated"
list_to_update.set_property('Title', self.target_list_title)
list_to_update.update()
self.client.execute_query()
result = self.client.web.lists.filter("Title eq '{0}'".format(self.target_list_title))
self.client.load(result)
self.client.execute_query()
self.assertEqual(len(result), 1)
def test5_delete_list(self):
list_title = self.target_list_title + "_updated"
list_to_delete = self.client.web.lists.get_by_title(list_title)
list_to_delete.delete_object()
self.client.execute_query()
result = self.client.web.lists.filter("Title eq '{0}'".format(list_title))
self.client.load(result)
self.client.execute_query()
self.assertEqual(len(result), 0)
| mit | -8,524,878,684,743,771,000 | 41.196429 | 94 | 0.681337 | false |
yuginboy/from_GULP_to_FEFF | feff/libs/GaMnAs_sub_classes.py | 1 | 33150 | '''
* Created by Zhenia Syryanyy (Yevgen Syryanyy)
* e-mail: [email protected]
* License: this code is under GPL license
* Last modified: 2017-10-18
'''
import sys
import os
from copy import deepcopy
from collections import OrderedDict as odict
from itertools import cycle
from io import StringIO
import inspect
import numpy as np
from scipy.optimize import differential_evolution
import matplotlib.gridspec as gridspec
from matplotlib import pylab
import matplotlib.pyplot as plt
import scipy as sp
from scipy.interpolate import interp1d
from scipy.interpolate import Rbf, InterpolatedUnivariateSpline, splrep, splev, splprep
import re
from shutil import copyfile
from libs.dir_and_file_operations import listOfFilesFN, listOfFiles, listOfFilesFN_with_selected_ext
from feff.libs.numpy_group_by_ep_second_draft import group_by
from scipy.signal import savgol_filter
from feff.libs.fit_current_curve import return_fit_param, func, f_PM, f_diff_PM_for_2_T, \
linearFunc, f_PM_with_T, f_SPM_with_T
from scipy.optimize import curve_fit, leastsq
from feff.libs.math_libs import approx_errors
g_J_Mn2_plus = 5.92
g_J_Mn3_plus = 4.82
g_e = 2.0023 # G-factor Lande
mu_Bohr = 927.4e-26 # J/T
Navagadro = 6.02214e23 #1/mol
k_b = 1.38065e-23 #J/K
rho_GaAs = 5.3176e3 #kg/m3
mass_Molar_kg_GaAs = 144.645e-3 #kg/mol
mass_Molar_kg_Diamond = 12.011e-3 # diamond
rho_Diamond = 3.515e3
testX1 = [0.024, 0.026, 0.028, 0.03, 0.032, 0.034, 0.036, 0.038, 0.03, 0.0325]
testY1 = [0.6, 0.527361, 0.564139, 0.602, 0.640714, 0.676684, 0.713159, 0.7505, 0.9, 0.662469]
testArray = np.array([testX1, testY1])
def fromRowToColumn (Array = testArray):
# if num of columns bigger then num of rows then transpoze that marix
n,m = Array.shape
if n < m:
return Array.T
else:
return Array
def sortMatrixByFirstColumn(Array = fromRowToColumn(testArray), colnum = 0):
# return sorted by selected column number the matrix
return Array[Array[:, colnum].argsort()]
out = sortMatrixByFirstColumn()
# print('sorted out:')
# print(out)
# print('--')
def deleteNonUniqueElements(key = out[:, 0], val = out[:, 1]):
# calc the val.mean value for non-uniq key values
# u, idx = np.unique(Array[:, key_colnum], return_index=True)
return fromRowToColumn(np.array(group_by(key).mean(val)))
# print('mean :')
# print(deleteNonUniqueElements())
# print('--')
def from_EMU_cm3_to_A_by_m(moment_emu = 2300e-8, V_cm3 = 3e-6):
# return value of Magnetization in SI (A/m)
return (moment_emu / V_cm3)*1000
def concentration_from_Ms(Ms = 7667, J=2.5):
# return concentration from Ms = n*gj*mu_Bohr*J = n*p_exp*mu_Bohr
return Ms/mu_Bohr/J/g_e
def number_density(rho = rho_GaAs, M = mass_Molar_kg_GaAs):
# return concentration from Molar mass
return Navagadro*rho/M
class MagneticPropertiesOfPhases:
def __init__(self):
self.concentration_ParaMagnetic = None
self.concentration_ParaMagnetic_error = None
self.J_total_momentum = 2.5
self.Mn_type = 'Mn2+' # Mn2+ or Mn3+
self.spin_type_cfg = 'high' # low or high
self.g_factor = g_e
self.mu_eff = g_J_Mn2_plus
self.info = ''
class MagneticData:
'''
base class for store spectra
'''
COLOR_CYCOL = cycle('bgrcmk')
def __init__(self):
self.magnetic_field = []
self.magnetic_moment = []
self.magnetic_moment_raw = []
self.label = []
self.do_plot = True
self.line_style = '-'
self.line_color = 'cornflowerblue'
self.line_width = 2
self.line_alpha = 1.0
self.line_marker_style = 'o'
self.line_marker_size = 4
self.line_marker_face_color = 'blue'
self.line_marker_edge_color = 'darkblue'
self.temperature = []
self.magnetic_field_shift = []
self.magnetic_moment_shift = []
# for intersection of region for two models:
self.accepted_indices = []
self.history_log = odict()
def append_history_log(self, case=''):
num = len(self.history_log)
num += 1
self.history_log[num] = case
def plot(self, ax=plt.gca()):
if self.do_plot:
ax.plot(self.magnetic_field, self.magnetic_moment,
linestyle=self.line_style,
color=self.line_color,
linewidth=self.line_width,
alpha=self.line_alpha,
label=self.label,
marker=self.line_marker_style,
markersize=self.line_marker_size,
markerfacecolor=self.line_marker_face_color,
markeredgecolor=self.line_marker_edge_color
)
class StructBase:
'''
Describe structure for a data
'''
def __init__(self):
self.raw = MagneticData()
self.prepared_raw = MagneticData()
self.for_fit = MagneticData()
self.line = MagneticData()
self.fit = MagneticData()
self.magnetic_field_inflection_point = 30
self.magnetic_field_step = 0.1 #[T]
self.magnetic_field_minimum = 0
self.magnetic_field_maximum = 0
# obj for saving params about unique phases in current material:
self.current_magnetic_phase_data = MagneticPropertiesOfPhases()
# main magnetic params for a structure which could be saved in unique phases:
self.J_total_momentum = 2.5
self.Mn_type = 'Mn2+' # Mn2+ or Mn3+
self.spin_type_cfg = 'high' # low or high
self.g_factor = g_e
self.mu_eff = g_J_Mn2_plus
self.mu_eff_min = g_J_Mn2_plus - 0.1
self.mu_eff_max = g_J_Mn2_plus + 0.1
self.volumeOfTheFilm_GaMnAs = 0 #[m^3]
self.fit.magnetic_moment = []
self.forFit_y = []
self.forFit_x = []
self.zeroIndex = []
# point of magnetic field which define a outside region where we fit the functions
self.magnetic_field_value_for_fit = 3 # [T]
# number of density for PM fit only for the current temperature:
self.concentration_ParaMagnetic = 0
self.concentration_ParaMagnetic_error = 10
# linear coefficient from the line_subtracted procedure:
self.linear_coefficient = 0
# corrections for curve:
self.y_shift = 0
self.x_shift = 0
# 'IUS' - Interpolation using univariate spline
# 'RBF' - Interpolation using Radial basis functions
# Interpolation using RBF - multiquadrics
# 'Spline'
# 'Cubic'
# 'Linear'
self.typeOfFiltering = 'IUS'
self.R_factor = 100
self.std = 100
self.label_summary = ''
self.title = ''
self.font_size = 18
self.y_label = '$M(A/m)$'
self.x_label = '$B(T)$'
self.dict_of_magnetic_phases = odict()
def addDataToDict(self, current_data):
num = len(self.dict_of_magnetic_phases)
if isinstance(current_data, MagneticPropertiesOfPhases):
self.dict_of_magnetic_phases[num] = odict({'data': current_data})
def flushDictOfSpectra(self):
self.dict_of_magnetic_phases = odict()
def define_Mn_type_variables(self):
'''
# unpaired electrons examples
d-count high spin low spin
d 4 4 2 Cr 2+ , Mn 3+
d 5 5 1 Fe 3+ , Mn 2+
d 6 4 0 Fe 2+ , Co 3+
d 7 3 1 Co 2+
Table: High and low spin octahedral transition metal complexes.
'''
# ===================================================
# Mn2 +
# 5.916, 3d5 4s0, 5 unpaired e-, observed: 5.7 - 6.0 in [muB]
# self.mu_spin_only = np.sqrt(5*(5+2))
# Mn3 +
# 5.916, 3d4 4s0, 4 unpaired e-, observed: 4.8 - 4.9 in [muB]
# self.mu_spin_only = np.sqrt(4*(4+2))
if self.Mn_type == 'Mn2+':
if self.spin_type_cfg == 'high':
self.J_total_momentum = 2.5 # high spin
elif self.spin_type_cfg == 'low':
self.J_total_momentum = 1.5 # low spin ?
self.mu_eff = g_J_Mn2_plus
self.mu_eff_min = 5.7
self.mu_eff_max = 6.0
elif self.Mn_type == 'Mn3+':
if self.spin_type_cfg == 'low':
self.J_total_momentum = 2.0 # ? low-spin, probably because mu_eff is 4.82 from the experiment
elif self.spin_type_cfg == 'high':
self.J_total_momentum = 0.0 # high-spin
self.mu_eff = g_J_Mn3_plus
self.mu_eff_min = 4.8
self.mu_eff_max = 4.9
self.g_factor = self.mu_eff / self.J_total_momentum
def set_Mn2_plus_high(self):
self.Mn_type = 'Mn2+'
self.spin_type_cfg = 'high'
self.define_Mn_type_variables()
def set_Mn2_plus_low(self):
self.Mn_type = 'Mn2+'
self.spin_type_cfg = 'low'
self.define_Mn_type_variables()
def set_Mn3_plus_low(self):
self.Mn_type = 'Mn3+'
self.spin_type_cfg = 'low'
self.define_Mn_type_variables()
def save_magnetic_params_to_current_phase_obj(self):
self.current_magnetic_phase_data.J_total_momentum = self.J_total_momentum
self.current_magnetic_phase_data.Mn_type = self.Mn_type
self.current_magnetic_phase_data.g_factor = self.g_factor
self.current_magnetic_phase_data.spin_type_cfg = self.spin_type_cfg
self.current_magnetic_phase_data.concentration_ParaMagnetic = self.concentration_ParaMagnetic
self.current_magnetic_phase_data.concentration_ParaMagnetic_error = self.concentration_ParaMagnetic_error
self.current_magnetic_phase_data.mu_eff = self.mu_eff
def interpolate_data(self):
x = np.array(self.raw.magnetic_field)
y = np.array(self.raw.magnetic_moment)
if self.magnetic_field_minimum == self.magnetic_field_maximum:
self.magnetic_field_minimum = np.fix(10 * self.raw.magnetic_field.min()) / 10
self.magnetic_field_maximum = np.fix(10 * self.raw.magnetic_field.max()) / 10
if self.magnetic_field_minimum < self.raw.magnetic_field.min():
self.magnetic_field_minimum = np.fix(10 * self.raw.magnetic_field.min()) / 10
if self.magnetic_field_maximum > self.raw.magnetic_field.max():
self.magnetic_field_maximum = np.fix(10 * self.raw.magnetic_field.max()) / 10
self.fit.magnetic_field = \
np.r_[self.magnetic_field_minimum: self.magnetic_field_maximum: self.magnetic_field_step]
if self.typeOfFiltering == 'Linear':
f = interp1d(self.raw.magnetic_field, self.raw.magnetic_moment)
self.fit.magnetic_moment = f(self.fit.magnetic_field)
if self.typeOfFiltering == 'Cubic':
f = interp1d(self.raw.magnetic_field, self.raw.magnetic_moment, kind='cubic')
self.fit.magnetic_moment = f(self.fit.magnetic_field)
if self.typeOfFiltering == 'Spline':
tck = splrep(x, y, s=0)
self.fit.magnetic_moment = splev(self.fit.magnetic_field, tck, der=0)
if self.typeOfFiltering == 'IUS':
f = InterpolatedUnivariateSpline(self.raw.magnetic_field, self.raw.magnetic_moment)
self.fit.magnetic_moment = f(self.fit.magnetic_field)
if self.typeOfFiltering == 'RBF':
f = Rbf(self.raw.magnetic_field, self.raw.magnetic_moment, function = 'linear')
self.fit.magnetic_moment = f(self.fit.magnetic_field)
if abs(self.magnetic_field_minimum) == abs(self.magnetic_field_maximum):
self.y_shift = self.fit.magnetic_moment[-1] - abs(self.fit.magnetic_moment[0])
self.fit.magnetic_moment = self.fit.magnetic_moment - self.y_shift
self.fit.magnetic_moment_shift = self.y_shift
yy_0 = np.r_[0:self.fit.magnetic_moment[-1]:self.fit.magnetic_moment[-1]/100]
f_0 = interp1d(self.fit.magnetic_moment, self.fit.magnetic_field)
xx_0 = f_0(yy_0)
self.x_shift = xx_0[0]
self.fit.magnetic_field = self.fit.magnetic_field - self.x_shift
self.fit.magnetic_field_shift = self.x_shift
# we need to adjust new self.fit.magnetic_field values to a good precision:
if self.magnetic_field_minimum < self.fit.magnetic_field.min():
self.magnetic_field_minimum = np.fix(10 * self.fit.magnetic_field.min()) / 10
if self.magnetic_field_maximum > self.fit.magnetic_field.max():
self.magnetic_field_maximum = np.fix(10 * self.fit.magnetic_field.max()) / 10
xx = np.r_[self.magnetic_field_minimum: self.magnetic_field_maximum: self.magnetic_field_step]
self.zeroIndex = np.nonzero((np.abs(xx) < self.magnetic_field_step*1e-2))
xx[self.zeroIndex] = 0
f = interp1d(self.fit.magnetic_field, self.fit.magnetic_moment)
self.fit.magnetic_moment = f(xx)
self.fit.magnetic_field = xx
self.fit.append_history_log(
case='do interpolation with type of filtering: {}'.format(self.typeOfFiltering))
# store interpolated data of raw data in spacial object:
self.prepared_raw = deepcopy(self.fit)
self.for_fit = deepcopy(self.fit)
self.line = deepcopy(self.fit)
self.raw.label = 'raw: T={0}K'.format(self.raw.temperature)
self.prepared_raw.label = 'prep raw: T={0}K {1}'.format(self.prepared_raw.temperature, self.typeOfFiltering)
self.line.label = 'subtracted line: T={0}K {1}'.format(self.prepared_raw.temperature, self.typeOfFiltering)
self.for_fit.label = 'selected points for fit: T={0}K {1}'.format(self.fit.temperature, self.typeOfFiltering)
self.fit.label = 'fit: T={0}K {1}'.format(self.fit.temperature, self.typeOfFiltering)
def filtering(self):
# do some filtering operations under data:
if self.magnetic_field_minimum == self.magnetic_field_maximum:
self.magnetic_field_minimum = self.raw.magnetic_field.min()
self.magnetic_field_maximum = self.raw.magnetic_field.max()
self.fit.magnetic_field = np.r_[self.magnetic_field_minimum: self.magnetic_field_maximum: self.magnetic_field_step]
window_size, poly_order = 101, 3
self.fit.magnetic_moment = savgol_filter(self.fit.magnetic_moment, window_size, poly_order)
self.fit.append_history_log(
case='apply savgol filter: window = {}, poly_order = {}'.format(window_size, poly_order))
def line_subtracting(self):
indx_plus = (self.prepared_raw.magnetic_field >= self.magnetic_field_value_for_fit)
indx_minus = (self.prepared_raw.magnetic_field <= -self.magnetic_field_value_for_fit)
# > 0:
indx = indx_plus
self.for_fit.magnetic_field = self.prepared_raw.magnetic_field[indx]
self.for_fit.magnetic_moment = self.prepared_raw.magnetic_moment[indx]
# fit the data:
par_plus, pcov = curve_fit(linearFunc,
self.for_fit.magnetic_field,
self.for_fit.magnetic_moment
)
# < 0:
indx = indx_minus
self.for_fit.magnetic_field = self.prepared_raw.magnetic_field[indx]
self.for_fit.magnetic_moment = self.prepared_raw.magnetic_moment[indx]
# fit the data:
par_minus, pcov = curve_fit(linearFunc,
self.for_fit.magnetic_field,
self.for_fit.magnetic_moment
)
self.linear_coefficient = 0.5*(par_plus[0] + par_minus[0])
self.line.magnetic_moment = (self.linear_coefficient * self.prepared_raw.magnetic_field)
# store to for_fit object:
indx = np.logical_or(indx_minus, indx_plus)
self.for_fit.magnetic_field = self.prepared_raw.magnetic_field[indx]
self.for_fit.magnetic_moment = self.prepared_raw.magnetic_moment[indx]
self.prepared_raw.magnetic_moment -= (self.linear_coefficient * self.prepared_raw.magnetic_field)
self.prepared_raw.append_history_log('line k={coef} * B was subtracted'.format(coef=self.linear_coefficient))
self.line.label = 'subtracted line $M-\\mathbf{{k}} \\ast B$: $\\mathbf{{k}}={:1.5}$ '.format(self.linear_coefficient)
# self.for_fit = deepcopy(self.prepared_raw)
self.line.do_plot = True
def fit_PM_single_phase(self):
# do a fit procedure:
# indx = np.argwhere(self.fit.magnetic_field >= 3)
indx = (np.abs(self.prepared_raw.magnetic_field) >= self.magnetic_field_value_for_fit)
# indx = ((self.prepared_raw.magnetic_field) >= self.magnetic_field_value_for_fit)
self.for_fit.magnetic_field = self.prepared_raw.magnetic_field[indx]
self.for_fit.magnetic_moment = self.prepared_raw.magnetic_moment[indx]
self.forFit_x = (self.g_factor * self.J_total_momentum * mu_Bohr * self.for_fit.magnetic_field) \
/ k_b / self.fit.temperature
self.forFit_y = self.for_fit.magnetic_moment
# try to fit concentration of Mn atoms n[1/m^3*1e27]
def fun_tmp(x, n):
return f_PM(x, n, J=self.J_total_momentum, g_factor=self.g_factor)
popt, pcov = curve_fit(fun_tmp,
xdata=self.forFit_x,
ydata=self.forFit_y,
)
self.concentration_ParaMagnetic = popt[0] #[1/m^3*1e27]
self.concentration_ParaMagnetic_error = np.sqrt(np.diag(pcov[0]))
# calc x values for all values of magnetic_field range:
xx = (self.g_factor * self.J_total_momentum * mu_Bohr * self.fit.magnetic_field) \
/ k_b / self.fit.temperature
self.fit.magnetic_moment = fun_tmp(xx, self.concentration_ParaMagnetic)
# fight with uncertainty in 0 vicinity:
self.fit.magnetic_moment[self.zeroIndex] = 0
self.calc_R_factor(raw=self.prepared_raw.magnetic_moment, fit=self.fit.magnetic_moment)
self.fit.label = \
'\nfit [$R=\\mathbf{{{R:1.3}}}\%$, $\sigma=\\mathbf{{{std:1.3}}}$] ' \
'\n$g_{{factor}}=\\mathbf{{{g_f:1.3}}}$, T={temper:2.1g}K\n'\
'$J({Mn_type}$, ${spin_type})=\\mathbf{{{J:1.3}}}$ $[\mu_{{Bohr}}]$'\
'\n$n_{{{Mn_type}}}=({conc:1.4g}\\pm{conc_error:1.4g})\\ast10^{{27}} [1/m^3]$' \
'\n or $\\mathbf{{{conc_GaAs:1.3g}}}\%$ of $n(GaAs)$'.format(
g_f=float(self.g_factor),
R=float(self.R_factor),
std=float(self.std),
temper=float(self.fit.temperature),
Mn_type=self.Mn_type,
spin_type=self.spin_type_cfg,
J=float(self.J_total_momentum),
conc=float(self.concentration_ParaMagnetic),
conc_error=float(np.round(self.concentration_ParaMagnetic_error,4)),
conc_GaAs=float(self.concentration_ParaMagnetic / 22.139136 * 100),
)
print('->> fit PM (single PM phase) have been done. '
'For T = {0} K obtained n = {1:1.3g} *1e27 [1/m^3] or {2:1.3g} % of the n(GaAs)' \
.format(self.raw.temperature,
self.concentration_ParaMagnetic,
self.concentration_ParaMagnetic / 22.139136 * 100))
print('->> R = {R_f:1.5g} %'.format(R_f=self.R_factor))
print('->> J[{Mn_type}, {spin_type}] = {J:1.3} [mu(Bohr)]'.format(
Mn_type=self.Mn_type,
spin_type=self.spin_type_cfg,
J=float(self.J_total_momentum)))
def multi_phase_PM_func(self, n_concentration, magnetic_field, temperature, zero_index=None):
# calc Brillouin function for multi-phase sample
num = len(self.dict_of_magnetic_phases)
len_of_x = len(magnetic_field)
# vector for magnetic calculation:
vec_x = np.zeros(len_of_x)
#concentration of Mn atoms n[1/m^3*1e27]
out = np.zeros(len_of_x)
for i in self.dict_of_magnetic_phases:
val = self.dict_of_magnetic_phases[i]
n = n_concentration[i]
J = val['data'].J_total_momentum
g = val['data'].g_factor
Mn_type = val['data'].Mn_type
spin_type = val['data'].spin_type_cfg
tmp = np.zeros(len_of_x)
# create unique x-vector for Brillouin function:
vec_x = (g * J * mu_Bohr * magnetic_field) \
/ k_b / temperature
# tmp = f_PM(x=vec_x, n=n, J=J, g_factor=g)
tmp = f_PM_with_T(B=magnetic_field, n=n, J=J, T=temperature, g_factor=g)
# fight with uncertainty in 0 vicinity:
if zero_index is not None:
tmp[zero_index] = 0
out += tmp
return out
def fit_PM_multi_phase(self):
# do a fit procedure for the multi-phases magnetic material:
# indx = np.argwhere(self.fit.magnetic_field >= 3)
indx = (np.abs(self.prepared_raw.magnetic_field) >= self.magnetic_field_value_for_fit)
# indx = ((self.prepared_raw.magnetic_field) >= self.magnetic_field_value_for_fit)
self.for_fit.magnetic_field = self.prepared_raw.magnetic_field[indx]
self.for_fit.magnetic_moment = self.prepared_raw.magnetic_moment[indx]
self.forFit_y = self.for_fit.magnetic_moment
len_of_vec = len(self.forFit_y)
self.forFit_x = self.for_fit.magnetic_field
num = len(self.dict_of_magnetic_phases)
# try to fit concentration of Mn atoms n[1/m^3*1e27]
# construct tmp function for a minimization procedure:
def fun_tmp(n_concentration):
out = np.zeros(len_of_vec)
out = self.multi_phase_PM_func(n_concentration,
magnetic_field=self.forFit_x,
temperature=self.fit.temperature)
return self.get_R_factor(raw=self.forFit_y, fit=out)
# create bounds:
bounds = []
for i in range(num):
bounds.append((0, 10))
res = differential_evolution(fun_tmp, bounds)
self.concentration_ParaMagnetic = res.x #[1/m^3*1e27]
s2 = self.get_std(raw=self.forFit_y,
fit=self.multi_phase_PM_func(self.concentration_ParaMagnetic,
magnetic_field=self.forFit_x,
temperature=self.fit.temperature
)
)
se = approx_errors(fun_tmp, self.concentration_ParaMagnetic, epsilon=0.01*np.min(self.concentration_ParaMagnetic))
std = np.sqrt(s2) * se
self.concentration_ParaMagnetic_error = std
self.fit.magnetic_moment = self.multi_phase_PM_func(
self.concentration_ParaMagnetic,
magnetic_field=self.fit.magnetic_field,
temperature=self.fit.temperature,
zero_index=self.zeroIndex
)
# fight with uncertainty in 0 vicinity:
# self.fit.magnetic_moment[self.zeroIndex] = 0
self.calc_R_factor(raw=self.prepared_raw.magnetic_moment, fit=self.fit.magnetic_moment)
# write label for plotting:
self.fit.label = \
'\nFit [$R=\\mathbf{{{R:1.3}}}\%$, $\sigma=\\mathbf{{{std:1.3}}}$], T={temper:2.1g}K\n\n'.format(
R=float(self.R_factor),
std=float(self.std),
temper=float(self.fit.temperature),
)
tmp_txt = ''
for i in self.dict_of_magnetic_phases:
val = self.dict_of_magnetic_phases[i]
n = self.concentration_ParaMagnetic[i]
n_std = self.concentration_ParaMagnetic_error[i]
J = val['data'].J_total_momentum
g = val['data'].g_factor
Mn_type = val['data'].Mn_type
spin_type = val['data'].spin_type_cfg
tmp_txt += 'phase $\\mathbf{{{num_of_phase:}}}$:\n'\
'$g_{{factor}}=\\mathbf{{{g_f:1.3}}}$, $J({Mn_type}$, ${spin_type})=\\mathbf{{{J:1.3}}}$ $[\mu_{{Bohr}}]$'\
.format(
num_of_phase = i,
g_f=float(g),
Mn_type=Mn_type,
J=float(J),
spin_type=spin_type,
)
tmp_txt += '\n$n_{{{Mn_type}}}=({conc:1.4g}\\pm{conc_error:1.4g})\\ast10^{{27}} [1/m^3]$' \
'\n or $\\mathbf{{{conc_GaAs:1.3g}}}\%$ of $n(GaAs)$\n'.format(
Mn_type=Mn_type,
spin_type=spin_type,
conc=float(n),
conc_error=float(np.round(n_std, 4)),
conc_GaAs=float(n / 22.139136 * 100),
)
self.fit.label += tmp_txt
print('==='*15)
print(' fit PM (multi PM phases) have been done. '
'For T = {0} K obtained:' \
.format(self.raw.temperature,)
)
print(' R = {R_f:1.5g} %'.format(R_f=self.R_factor))
for i in self.dict_of_magnetic_phases:
val = self.dict_of_magnetic_phases[i]
n = self.concentration_ParaMagnetic[i]
n_std = self.concentration_ParaMagnetic_error[i]
J = val['data'].J_total_momentum
g = val['data'].g_factor
Mn_type = val['data'].Mn_type
spin_type = val['data'].spin_type_cfg
print('------- phases #{}:'.format(i))
print(' n = ( {n:1.3g} +/- {err:1.4g} )*1e27 [1/m^3] or {n_2:1.3g} % of the n(GaAs)'.format(
n=n,
n_2=n / 22.139136 * 100,
err=n_std
)
)
print(' J[{Mn_type}, {spin_type}] = {J:1.3} [mu(Bohr)]'.format(
Mn_type=Mn_type,
spin_type=spin_type,
J=float(J)))
print('===' * 15)
def set_default_line_params(self):
self.raw.line_style = 'None'
self.raw.line_marker_size = 6
self.raw.line_alpha = 0.2
self.raw.line_marker_face_color = next(MagneticData.COLOR_CYCOL)
self.line.line_style = '-'
self.line.do_plot = False
self.line.line_width = 3
self.line.line_color = 'r'
self.line.line_alpha = 0.3
self.line.line_marker_style = 'None'
self.prepared_raw.line_style = 'None'
self.prepared_raw.line_marker_size = 6
self.prepared_raw.line_marker_style = 'v'
self.prepared_raw.line_alpha = 0.3
self.prepared_raw.line_marker_face_color = next(MagneticData.COLOR_CYCOL)
self.for_fit.line_style = 'None'
self.for_fit.line_marker_size = 12
self.for_fit.line_marker_style = 'D'
self.for_fit.line_alpha = 0.2
self.for_fit.line_marker_face_color = 'g'
self.for_fit.line_marker_edge_color = next(MagneticData.COLOR_CYCOL)
self.fit.line_style = 'None'
self.fit.line_marker_size = 9
self.fit.line_alpha = 0.3
self.fit.line_marker_face_color = next(MagneticData.COLOR_CYCOL)
def plot(self, ax=plt.gca()):
self.raw.plot(ax)
self.line.plot(ax)
self.prepared_raw.plot(ax)
self.for_fit.plot(ax)
self.fit.plot(ax)
ax.set_ylabel(self.y_label, fontsize=16, fontweight='bold')
ax.set_xlabel(self.x_label, fontsize=16, fontweight='bold')
ax.grid(True)
ax.set_title(self.title, fontsize=self.font_size)
# ax.legend(shadow=True, fancybox=True, loc='best')
ax.legend(shadow=False, fancybox=True, loc='best')
# ax.fill_between(x, y - error, y + error,
# alpha=0.2, edgecolor='#1B2ACC', facecolor='#089FFF',
# linewidth=4, linestyle='dashdot', antialiased=True, label='$\chi(k)$')
def calc_R_factor(self, raw=[], fit=[]):
# eval R-factor
denominator = np.sum(np.abs(raw))
if (len(raw) == len(fit)) and (denominator != 0):
self.R_factor = 100 * np.sum(np.abs(raw - fit))/denominator
self.std = np.sqrt(
np.sum(
(raw - fit)**2
) / ( len(raw) -1 )
)
else:
print('raw = {} and fit = {}'.format(len(raw), len(fit)))
def get_R_factor(self, raw=[], fit=[]):
self.calc_R_factor(raw, fit)
return self.R_factor
def get_std(self, raw=[], fit=[]):
self.calc_R_factor(raw, fit)
return self.std
class StructComplex(StructBase):
def __init__(self):
super().__init__()
self.model_A = StructBase()
self.model_B = StructBase()
def set_global_Mn2_plus_high(self):
self.model_A.set_Mn2_plus_high()
self.model_B.set_Mn2_plus_high()
def set_global_Mn2_plus_low(self):
self.model_A.set_Mn2_plus_low()
self.model_B.set_Mn2_plus_low()
def set_global_Mn3_plus_low(self):
self.model_A.set_Mn3_plus_low()
self.model_B.set_Mn3_plus_low()
def find_common_region_for_fit(self):
if len(self.model_A.prepared_raw.magnetic_field) != len(self.model_B.prepared_raw.magnetic_field):
print('len(T={T1}K)={L1} but len(T={T2}K)={L2}'.format(
T1=self.model_A.prepared_raw.temperature,
L1=len(self.model_A.prepared_raw.magnetic_field),
T2=self.model_B.prepared_raw.temperature,
L2=len(self.model_B.prepared_raw.magnetic_field)
))
# Find the intersection of two arrays to avoid conflict with numbers of elements.
self.model_A.prepared_raw.accepted_indices = np.nonzero(
np.isin(self.model_A.prepared_raw.magnetic_field,
self.model_B.prepared_raw.magnetic_field))
self.model_B.prepared_raw.accepted_indices = np.nonzero(
np.isin(self.model_B.prepared_raw.magnetic_field,
self.model_A.prepared_raw.magnetic_field))
def prepare_data_for_diff_calc(self):
self.raw.magnetic_field = self.model_A.prepared_raw.magnetic_field[self.model_A.prepared_raw.accepted_indices]
# for calculating diff_PM we need 2 different Temperature data for ex: m(T=2K) - m(T=5K)
# select only common points in two models:
self.raw.magnetic_moment = \
self.model_A.prepared_raw.magnetic_moment[self.model_A.prepared_raw.accepted_indices] - \
self.model_B.prepared_raw.magnetic_moment[self.model_B.prepared_raw.accepted_indices]
self.prepared_raw = deepcopy(self.raw)
# shift spectra to the center line:
if len(self.prepared_raw.magnetic_moment[np.where(self.prepared_raw.magnetic_field > 0)]) \
!= \
len(self.prepared_raw.magnetic_moment[np.where(self.prepared_raw.magnetic_field < 0)]):
# reduce a noise:
negVal = abs(np.min(self.prepared_raw.magnetic_field))
pozVal = np.max(self.prepared_raw.magnetic_field)
if pozVal >= negVal:
limitVal = negVal
else:
limitVal = pozVal
eps = 0.001 * abs(abs(self.prepared_raw.magnetic_field[0]) - abs(self.prepared_raw.magnetic_field[1]))
self.prepared_raw.magnetic_field = self.prepared_raw.magnetic_field[
np.logical_or((np.abs(self.prepared_raw.magnetic_field) <= limitVal + eps),
(np.abs(self.prepared_raw.magnetic_field) <= eps))
]
Mp = []
Mn = []
Mp = self.prepared_raw.magnetic_moment[np.where(self.prepared_raw.magnetic_field > 0)]
Mn = self.prepared_raw.magnetic_moment[np.where(self.prepared_raw.magnetic_field < 0)]
if len(self.prepared_raw.magnetic_moment[np.where(self.prepared_raw.magnetic_field > 0)]) \
== \
len(self.prepared_raw.magnetic_moment[np.where(self.prepared_raw.magnetic_field < 0)]):
# reduce a noise:
Mp = self.prepared_raw.magnetic_moment[np.where(self.prepared_raw.magnetic_field > 0)]
Mn = self.prepared_raw.magnetic_moment[np.where(self.prepared_raw.magnetic_field < 0)]
self.prepared_raw.magnetic_moment[np.where(self.prepared_raw.magnetic_field > 0)] = \
0.5 * (Mp + np.abs(Mn[::-1]))
self.prepared_raw.magnetic_moment[np.where(self.prepared_raw.magnetic_field < 0)] = \
0.5 * (Mn - np.abs(Mp[::-1]))
# M_for_fit[(B > 0)] = 0.5*(Mp + np.abs(Mn))
# M_for_fit[(B < 0)] = 0.5*(Mn - np.abs(Mp))
self.for_fit = deepcopy(self.prepared_raw)
self.fit = deepcopy(self.prepared_raw)
if __name__ == '__main__':
tmp_obj = StructComplex()
print('-> you run ', __file__, ' file in a main mode') | gpl-3.0 | 3,044,061,559,392,634,400 | 41.886158 | 126 | 0.577074 | false |
Netflix-Skunkworks/napalm-base | napalm_base/base.py | 1 | 57375 | # Copyright 2015 Spotify AB. All rights reserved.
#
# The contents of this file are licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
# Python3 support
from __future__ import print_function
from __future__ import unicode_literals
# std libs
import sys
# local modules
import napalm_base.exceptions
import napalm_base.helpers
import napalm_base.constants as c
from napalm_base import validate
class NetworkDriver(object):
def __init__(self, hostname, username, password, timeout=60, optional_args=None):
"""
This is the base class you have to inherit from when writing your own Network Driver to
manage any device. You will, in addition, have to override all the methods specified on
this class. Make sure you follow the guidelines for every method and that you return the
correct data.
:param hostname: (str) IP or FQDN of the device you want to connect to.
:param username: (str) Username you want to use
:param password: (str) Password
:param timeout: (int) Time in seconds to wait for the device to respond.
:param optional_args: (dict) Pass additional arguments to underlying driver
:return:
"""
raise NotImplementedError
def __enter__(self):
try:
self.open()
except: # noqa
exc_info = sys.exc_info()
self.__raise_clean_exception(exc_info[0], exc_info[1], exc_info[2])
return self
def __exit__(self, exc_type, exc_value, exc_traceback):
self.close()
if exc_type is not None:
self.__raise_clean_exception(exc_type, exc_value, exc_traceback)
def __del__(self):
"""
This method is used to cleanup when the program is terminated suddenly.
We need to make sure the connection is closed properly and the configuration DB
is released (unlocked).
"""
try:
self.close()
except OSError as e:
# Ignore if socket was already closed
if 'is closed' in str(e):
pass
@staticmethod
def __raise_clean_exception(exc_type, exc_value, exc_traceback):
"""
This method is going to check if the exception exc_type is part of the builtins exceptions
or part of the napalm exceptions. If it is not, it will print a message on the screen
giving instructions to fill a bug.
Finally it will raise the original exception.
:param exc_type: Exception class.
:param exc_value: Exception object.
:param exc_traceback: Traceback.
"""
if (exc_type.__name__ not in dir(napalm_base.exceptions) and
exc_type.__name__ not in __builtins__.keys()):
epilog = ("NAPALM didn't catch this exception. Please, fill a bugfix on "
"https://github.com/napalm-automation/napalm/issues\n"
"Don't forget to include this traceback.")
print(epilog)
# Traceback should already be attached to exception; no need to re-attach
raise exc_value
def open(self):
"""
Opens a connection to the device.
"""
raise NotImplementedError
def close(self):
"""
Closes the connection to the device.
"""
raise NotImplementedError
def is_alive(self):
"""
Returns a flag with the connection state.
Depends on the nature of API used by each driver.
The state does not reflect only on the connection status (when SSH), it must also take into
consideration other parameters, e.g.: NETCONF session might not be usable, althought the
underlying SSH session is still open etc.
"""
raise NotImplementedError
def load_template(self, template_name, template_source=None,
template_path=None, **template_vars):
"""
Will load a templated configuration on the device.
:param cls: Instance of the driver class.
:param template_name: Identifies the template name.
:param template_source (optional): Custom config template rendered and loaded on device
:param template_path (optional): Absolute path to directory for the configuration templates
:param template_vars: Dictionary with arguments to be used when the template is rendered.
:raise DriverTemplateNotImplemented: No template defined for the device type.
:raise TemplateNotImplemented: The template specified in template_name does not exist in \
the default path or in the custom path if any specified using parameter `template_path`.
:raise TemplateRenderException: The template could not be rendered. Either the template \
source does not have the right format, either the arguments in `template_vars` are not \
properly specified.
"""
return napalm_base.helpers.load_template(self,
template_name,
template_source=template_source,
template_path=template_path,
**template_vars)
def load_replace_candidate(self, filename=None, config=None):
"""
Populates the candidate configuration. You can populate it from a file or from a string.
If you send both a filename and a string containing the configuration, the file takes
precedence.
If you use this method the existing configuration will be replaced entirely by the
candidate configuration once you commit the changes. This method will not change the
configuration by itself.
:param filename: Path to the file containing the desired configuration. By default is None.
:param config: String containing the desired configuration.
:raise ReplaceConfigException: If there is an error on the configuration sent.
"""
raise NotImplementedError
def load_merge_candidate(self, filename=None, config=None):
"""
Populates the candidate configuration. You can populate it from a file or from a string.
If you send both a filename and a string containing the configuration, the file takes
precedence.
If you use this method the existing configuration will be merged with the candidate
configuration once you commit the changes. This method will not change the configuration
by itself.
:param filename: Path to the file containing the desired configuration. By default is None.
:param config: String containing the desired configuration.
:raise MergeConfigException: If there is an error on the configuration sent.
"""
raise NotImplementedError
def compare_config(self):
"""
:return: A string showing the difference between the running configuration and the \
candidate configuration. The running_config is loaded automatically just before doing the \
comparison so there is no need for you to do it.
"""
raise NotImplementedError
def commit_config(self):
"""
Commits the changes requested by the method load_replace_candidate or load_merge_candidate.
"""
raise NotImplementedError
def discard_config(self):
"""
Discards the configuration loaded into the candidate.
"""
raise NotImplementedError
def rollback(self):
"""
If changes were made, revert changes to the original state.
"""
raise NotImplementedError
def get_facts(self):
"""
Returns a dictionary containing the following information:
* uptime - Uptime of the device in seconds.
* vendor - Manufacturer of the device.
* model - Device model.
* hostname - Hostname of the device
* fqdn - Fqdn of the device
* os_version - String with the OS version running on the device.
* serial_number - Serial number of the device
* interface_list - List of the interfaces of the device
Example::
{
'uptime': 151005.57332897186,
'vendor': u'Arista',
'os_version': u'4.14.3-2329074.gaatlantarel',
'serial_number': u'SN0123A34AS',
'model': u'vEOS',
'hostname': u'eos-router',
'fqdn': u'eos-router',
'interface_list': [u'Ethernet2', u'Management1', u'Ethernet1', u'Ethernet3']
}
"""
raise NotImplementedError
def get_interfaces(self):
"""
Returns a dictionary of dictionaries. The keys for the first dictionary will be the \
interfaces in the devices. The inner dictionary will containing the following data for \
each interface:
* is_up (True/False)
* is_enabled (True/False)
* description (string)
* last_flapped (int in seconds)
* speed (int in Mbit)
* mac_address (string)
Example::
{
u'Management1':
{
'is_up': False,
'is_enabled': False,
'description': u'',
'last_flapped': -1,
'speed': 1000,
'mac_address': u'dead:beef:dead',
},
u'Ethernet1':
{
'is_up': True,
'is_enabled': True,
'description': u'foo',
'last_flapped': 1429978575.1554043,
'speed': 1000,
'mac_address': u'beef:dead:beef',
},
u'Ethernet2':
{
'is_up': True,
'is_enabled': True,
'description': u'bla',
'last_flapped': 1429978575.1555667,
'speed': 1000,
'mac_address': u'beef:beef:beef',
},
u'Ethernet3':
{
'is_up': False,
'is_enabled': True,
'description': u'bar',
'last_flapped': -1,
'speed': 1000,
'mac_address': u'dead:dead:dead',
}
}
"""
raise NotImplementedError
def get_lldp_neighbors(self):
"""
Returns a dictionary where the keys are local ports and the value is a list of \
dictionaries with the following information:
* hostname
* port
Example::
{
u'Ethernet2':
[
{
'hostname': u'junos-unittest',
'port': u'520',
}
],
u'Ethernet3':
[
{
'hostname': u'junos-unittest',
'port': u'522',
}
],
u'Ethernet1':
[
{
'hostname': u'junos-unittest',
'port': u'519',
},
{
'hostname': u'ios-xrv-unittest',
'port': u'Gi0/0/0/0',
}
],
u'Management1':
[
{
'hostname': u'junos-unittest',
'port': u'508',
}
]
}
"""
raise NotImplementedError
def get_bgp_neighbors(self):
"""
Returns a dictionary of dictionaries. The keys for the first dictionary will be the vrf
(global if no vrf). The inner dictionary will contain the following data for each vrf:
* router_id
* peers - another dictionary of dictionaries. Outer keys are the IPs of the neighbors. \
The inner keys are:
* local_as (int)
* remote_as (int)
* remote_id - peer router id
* is_up (True/False)
* is_enabled (True/False)
* description (string)
* uptime (int in seconds)
* address_family (dictionary) - A dictionary of address families available for the \
neighbor. So far it can be 'ipv4' or 'ipv6'
* received_prefixes (int)
* accepted_prefixes (int)
* sent_prefixes (int)
"""
raise NotImplementedError
def get_environment(self):
"""
Returns a dictionary where:
* fans is a dictionary of dictionaries where the key is the location and the values:
* status (True/False) - True if it's ok, false if it's broken
* temperature is a dict of dictionaries where the key is the location and the values:
* temperature (float) - Temperature in celsius the sensor is reporting.
* is_alert (True/False) - True if the temperature is above the alert threshold
* is_critical (True/False) - True if the temp is above the critical threshold
* power is a dictionary of dictionaries where the key is the PSU id and the values:
* status (True/False) - True if it's ok, false if it's broken
* capacity (float) - Capacity in W that the power supply can support
* output (float) - Watts drawn by the system
* cpu is a dictionary of dictionaries where the key is the ID and the values
* %usage
* memory is a dictionary with:
* available_ram (int) - Total amount of RAM installed in the device
* used_ram (int) - RAM in use in the device
"""
raise NotImplementedError
def get_interfaces_counters(self):
"""
Returns a dictionary of dictionaries where the first key is an interface name and the
inner dictionary contains the following keys:
* tx_errors (int)
* rx_errors (int)
* tx_discards (int)
* rx_discards (int)
* tx_octets (int)
* rx_octets (int)
* tx_unicast_packets (int)
* rx_unicast_packets (int)
* tx_multicast_packets (int)
* rx_multicast_packets (int)
* tx_broadcast_packets (int)
* rx_broadcast_packets (int)
Example::
{
u'Ethernet2': {
'tx_multicast_packets': 699,
'tx_discards': 0,
'tx_octets': 88577,
'tx_errors': 0,
'rx_octets': 0,
'tx_unicast_packets': 0,
'rx_errors': 0,
'tx_broadcast_packets': 0,
'rx_multicast_packets': 0,
'rx_broadcast_packets': 0,
'rx_discards': 0,
'rx_unicast_packets': 0
},
u'Management1': {
'tx_multicast_packets': 0,
'tx_discards': 0,
'tx_octets': 159159,
'tx_errors': 0,
'rx_octets': 167644,
'tx_unicast_packets': 1241,
'rx_errors': 0,
'tx_broadcast_packets': 0,
'rx_multicast_packets': 0,
'rx_broadcast_packets': 80,
'rx_discards': 0,
'rx_unicast_packets': 0
},
u'Ethernet1': {
'tx_multicast_packets': 293,
'tx_discards': 0,
'tx_octets': 38639,
'tx_errors': 0,
'rx_octets': 0,
'tx_unicast_packets': 0,
'rx_errors': 0,
'tx_broadcast_packets': 0,
'rx_multicast_packets': 0,
'rx_broadcast_packets': 0,
'rx_discards': 0,
'rx_unicast_packets': 0
}
}
"""
raise NotImplementedError
def get_lldp_neighbors_detail(self, interface=''):
"""
Returns a detailed view of the LLDP neighbors as a dictionary
containing lists of dictionaries for each interface.
Inner dictionaries contain fields:
* parent_interface (string)
* remote_port (string)
* remote_port_description (string)
* remote_chassis_id (string)
* remote_system_name (string)
* remote_system_description (string)
* remote_system_capab (string)
* remote_system_enabled_capab (string)
Example::
{
'TenGigE0/0/0/8': [
{
'parent_interface': u'Bundle-Ether8',
'remote_chassis_id': u'8c60.4f69.e96c',
'remote_system_name': u'switch',
'remote_port': u'Eth2/2/1',
'remote_port_description': u'Ethernet2/2/1',
'remote_system_description': u'''Cisco Nexus Operating System (NX-OS)
Software 7.1(0)N1(1a)
TAC support: http://www.cisco.com/tac
Copyright (c) 2002-2015, Cisco Systems, Inc. All rights reserved.''',
'remote_system_capab': u'B, R',
'remote_system_enable_capab': u'B'
}
]
}
"""
raise NotImplementedError
def get_bgp_config(self, group='', neighbor=''):
"""
Returns a dictionary containing the BGP configuration.
Can return either the whole config, either the config only for a group or neighbor.
:param group: Returns the configuration of a specific BGP group.
:param neighbor: Returns the configuration of a specific BGP neighbor.
Main dictionary keys represent the group name and the values represent a dictionary having
the keys below. Neighbors which aren't members of a group will be stored in a key named "_":
* type (string)
* description (string)
* apply_groups (string list)
* multihop_ttl (int)
* multipath (True/False)
* local_address (string)
* local_as (int)
* remote_as (int)
* import_policy (string)
* export_policy (string)
* remove_private_as (True/False)
* prefix_limit (dictionary)
* neighbors (dictionary)
Neighbors is a dictionary of dictionaries with the following keys:
* description (string)
* import_policy (string)
* export_policy (string)
* local_address (string)
* local_as (int)
* remote_as (int)
* authentication_key (string)
* prefix_limit (dictionary)
* route_reflector_client (True/False)
* nhs (True/False)
The inner dictionary prefix_limit has the same structure for both layers::
{
[FAMILY_NAME]: {
[FAMILY_TYPE]: {
'limit': [LIMIT],
... other options
}
}
}
Example::
{
'PEERS-GROUP-NAME':{
'type' : u'external',
'description' : u'Here we should have a nice description',
'apply_groups' : [u'BGP-PREFIX-LIMIT'],
'import_policy' : u'PUBLIC-PEER-IN',
'export_policy' : u'PUBLIC-PEER-OUT',
'remove_private_as' : True,
'multipath' : True,
'multihop_ttl' : 30,
'neighbors' : {
'192.168.0.1': {
'description' : 'Facebook [CDN]',
'prefix_limit' : {
'inet': {
'unicast': {
'limit': 100,
'teardown': {
'threshold' : 95,
'timeout' : 5
}
}
}
}
'remote_as' : 32934,
'route_reflector_client': False,
'nhs' : True
},
'172.17.17.1': {
'description' : 'Twitter [CDN]',
'prefix_limit' : {
'inet': {
'unicast': {
'limit': 500,
'no-validate': 'IMPORT-FLOW-ROUTES'
}
}
}
'remote_as' : 13414
'route_reflector_client': False,
'nhs' : False
}
}
}
}
"""
raise NotImplementedError
def cli(self, commands):
"""
Will execute a list of commands and return the output in a dictionary format.
Example::
{
u'show version and haiku': u'''Hostname: re0.edge01.arn01
Model: mx480
Junos: 13.3R6.5
Help me, Obi-Wan
I just saw Episode Two
You're my only hope
''',
u'show chassis fan' : u'''
Item Status RPM Measurement
Top Rear Fan OK 3840 Spinning at intermediate-speed
Bottom Rear Fan OK 3840 Spinning at intermediate-speed
Top Middle Fan OK 3900 Spinning at intermediate-speed
Bottom Middle Fan OK 3840 Spinning at intermediate-speed
Top Front Fan OK 3810 Spinning at intermediate-speed
Bottom Front Fan OK 3840 Spinning at intermediate-speed'''
}
"""
raise NotImplementedError
def get_bgp_neighbors_detail(self, neighbor_address=''):
"""
Returns a detailed view of the BGP neighbors as a dictionary of lists.
:param neighbor_address: Retuns the statistics for a spcific BGP neighbor.
Returns a dictionary of dictionaries. The keys for the first dictionary will be the vrf
(global if no vrf).
The keys of the inner dictionary represent the AS number of the neighbors.
Leaf dictionaries contain the following fields:
* up (True/False)
* local_as (int)
* remote_as (int)
* router_id (string)
* local_address (string)
* routing_table (string)
* local_address_configured (True/False)
* local_port (int)
* remote_address (string)
* remote_port (int)
* multihop (True/False)
* multipath (True/False)
* remove_private_as (True/False)
* import_policy (string)
* export_policy (string)
* input_messages (int)
* output_messages (int)
* input_updates (int)
* output_updates (int)
* messages_queued_out (int)
* connection_state (string)
* previous_connection_state (string)
* last_event (string)
* suppress_4byte_as (True/False)
* local_as_prepend (True/False)
* holdtime (int)
* configured_holdtime (int)
* keepalive (int)
* configured_keepalive (int)
* active_prefix_count (int)
* received_prefix_count (int)
* accepted_prefix_count (int)
* suppressed_prefix_count (int)
* advertised_prefix_count (int)
* flap_count (int)
Example::
{
'global': {
8121: [
{
'up' : True,
'local_as' : 13335,
'remote_as' : 8121,
'local_address' : u'172.101.76.1',
'local_address_configured' : True,
'local_port' : 179,
'routing_table' : u'inet.0',
'remote_address' : u'192.247.78.0',
'remote_port' : 58380,
'multihop' : False,
'multipath' : True,
'remove_private_as' : True,
'import_policy' : u'4-NTT-TRANSIT-IN',
'export_policy' : u'4-NTT-TRANSIT-OUT',
'input_messages' : 123,
'output_messages' : 13,
'input_updates' : 123,
'output_updates' : 5,
'messages_queued_out' : 23,
'connection_state' : u'Established',
'previous_connection_state' : u'EstabSync',
'last_event' : u'RecvKeepAlive',
'suppress_4byte_as' : False,
'local_as_prepend' : False,
'holdtime' : 90,
'configured_holdtime' : 90,
'keepalive' : 30,
'configured_keepalive' : 30,
'active_prefix_count' : 132808,
'received_prefix_count' : 566739,
'accepted_prefix_count' : 566479,
'suppressed_prefix_count' : 0,
'advertised_prefix_count' : 0,
'flap_count' : 27
}
]
}
}
"""
raise NotImplementedError
def get_arp_table(self):
"""
Returns a list of dictionaries having the following set of keys:
* interface (string)
* mac (string)
* ip (string)
* age (float)
Example::
[
{
'interface' : 'MgmtEth0/RSP0/CPU0/0',
'mac' : '5c:5e:ab:da:3c:f0',
'ip' : '172.17.17.1',
'age' : 1454496274.84
},
{
'interface' : 'MgmtEth0/RSP0/CPU0/0',
'mac' : '66:0e:94:96:e0:ff',
'ip' : '172.17.17.2',
'age' : 1435641582.49
}
]
"""
raise NotImplementedError
def get_ntp_peers(self):
"""
Returns the NTP peers configuration as dictionary.
The keys of the dictionary represent the IP Addresses of the peers.
Inner dictionaries do not have yet any available keys.
Example::
{
'192.168.0.1': {},
'17.72.148.53': {},
'37.187.56.220': {},
'162.158.20.18': {}
}
"""
raise NotImplementedError
def get_ntp_servers(self):
"""
Returns the NTP servers configuration as dictionary.
The keys of the dictionary represent the IP Addresses of the servers.
Inner dictionaries do not have yet any available keys.
Example::
{
'192.168.0.1': {},
'17.72.148.53': {},
'37.187.56.220': {},
'162.158.20.18': {}
}
"""
raise NotImplementedError
def get_ntp_stats(self):
"""
Returns a list of NTP synchronization statistics.
* remote (string)
* referenceid (string)
* synchronized (True/False)
* stratum (int)
* type (string)
* when (string)
* hostpoll (int)
* reachability (int)
* delay (float)
* offset (float)
* jitter (float)
Example::
[
{
'remote' : u'188.114.101.4',
'referenceid' : u'188.114.100.1',
'synchronized' : True,
'stratum' : 4,
'type' : u'-',
'when' : u'107',
'hostpoll' : 256,
'reachability' : 377,
'delay' : 164.228,
'offset' : -13.866,
'jitter' : 2.695
}
]
"""
raise NotImplementedError
def get_interfaces_ip(self):
"""
Returns all configured IP addresses on all interfaces as a dictionary of dictionaries.
Keys of the main dictionary represent the name of the interface.
Values of the main dictionary represent are dictionaries that may consist of two keys
'ipv4' and 'ipv6' (one, both or none) which are themselvs dictionaries witht the IP
addresses as keys.
Each IP Address dictionary has the following keys:
* prefix_length (int)
Example::
{
u'FastEthernet8': {
u'ipv4': {
u'10.66.43.169': {
'prefix_length': 22
}
}
},
u'Loopback555': {
u'ipv4': {
u'192.168.1.1': {
'prefix_length': 24
}
},
u'ipv6': {
u'1::1': {
'prefix_length': 64
},
u'2001:DB8:1::1': {
'prefix_length': 64
},
u'2::': {
'prefix_length': 64
},
u'FE80::3': {
'prefix_length': u'N/A'
}
}
},
u'Tunnel0': {
u'ipv4': {
u'10.63.100.9': {
'prefix_length': 24
}
}
}
}
"""
raise NotImplementedError
def get_mac_address_table(self):
"""
Returns a lists of dictionaries. Each dictionary represents an entry in the MAC Address
Table, having the following keys:
* mac (string)
* interface (string)
* vlan (int)
* active (boolean)
* static (boolean)
* moves (int)
* last_move (float)
However, please note that not all vendors provide all these details.
E.g.: field last_move is not available on JUNOS devices etc.
Example::
[
{
'mac' : '00:1c:58:29:4a:71',
'interface' : 'Ethernet47',
'vlan' : 100,
'static' : False,
'active' : True,
'moves' : 1,
'last_move' : 1454417742.58
},
{
'mac' : '8c:60:4f:58:e1:c1',
'interface' : 'xe-1/0/1',
'vlan' : 100,
'static' : False,
'active' : True,
'moves' : 2,
'last_move' : 1453191948.11
},
{
'mac' : 'f4:b5:2f:56:72:01',
'interface' : 'ae7.900',
'vlan' : 900,
'static' : False,
'active' : True,
'moves' : None,
'last_move' : None
}
]
"""
raise NotImplementedError
def get_route_to(self, destination='', protocol=''):
"""
Returns a dictionary of dictionaries containing details of all available routes to a
destination.
:param destination: The destination prefix to be used when filtering the routes.
:param protocol (optional): Retrieve the routes only for a specific protocol.
Each inner dictionary contains the following fields:
* protocol (string)
* current_active (True/False)
* last_active (True/False)
* age (int)
* next_hop (string)
* outgoing_interface (string)
* selected_next_hop (True/False)
* preference (int)
* inactive_reason (string)
* routing_table (string)
* protocol_attributes (dictionary)
protocol_attributes is a dictionary with protocol-specific information, as follows:
- BGP
* local_as (int)
* remote_as (int)
* peer_id (string)
* as_path (string)
* communities (list)
* local_preference (int)
* preference2 (int)
* metric (int)
* metric2 (int)
- ISIS:
* level (int)
Example::
{
"1.0.0.0/24": [
{
"protocol" : u"BGP",
"inactive_reason" : u"Local Preference",
"last_active" : False,
"age" : 105219,
"next_hop" : u"172.17.17.17",
"selected_next_hop" : True,
"preference" : 170,
"current_active" : False,
"outgoing_interface": u"ae9.0",
"routing_table" : "inet.0",
"protocol_attributes": {
"local_as" : 13335,
"as_path" : u"2914 8403 54113 I",
"communities" : [
u"2914:1234",
u"2914:5678",
u"8403:1717",
u"54113:9999"
],
"preference2" : -101,
"remote_as" : 2914,
"local_preference" : 100
}
}
]
}
"""
raise NotImplementedError
def get_snmp_information(self):
"""
Returns a dict of dicts containing SNMP configuration.
Each inner dictionary contains these fields
* chassis_id (string)
* community (dictionary)
* contact (string)
* location (string)
'community' is a dictionary with community string specific information, as follows:
* acl (string) # acl number or name
* mode (string) # read-write (rw), read-only (ro)
Example::
{
'chassis_id': u'Asset Tag 54670',
'community': {
u'private': {
'acl': u'12',
'mode': u'rw'
},
u'public': {
'acl': u'11',
'mode': u'ro'
},
u'public_named_acl': {
'acl': u'ALLOW-SNMP-ACL',
'mode': u'ro'
},
u'public_no_acl': {
'acl': u'N/A',
'mode': u'ro'
}
},
'contact' : u'Joe Smith',
'location': u'123 Anytown USA Rack 404'
}
"""
raise NotImplementedError
def get_probes_config(self):
"""
Returns a dictionary with the probes configured on the device.
Probes can be either RPM on JunOS devices, either SLA on IOS-XR. Other vendors do not
support probes.
The keys of the main dictionary represent the name of the probes.
Each probe consists on multiple tests, each test name being a key in the probe dictionary.
A test has the following keys:
* probe_type (str)
* target (str)
* source (str)
* probe_count (int)
* test_interval (int)
Example::
{
'probe1':{
'test1': {
'probe_type' : 'icmp-ping',
'target' : '192.168.0.1',
'source' : '192.168.0.2',
'probe_count' : 13,
'test_interval': 3
},
'test2': {
'probe_type' : 'http-ping',
'target' : '172.17.17.1',
'source' : '192.17.17.2',
'probe_count' : 5,
'test_interval': 60
}
}
}
"""
raise NotImplementedError
def get_probes_results(self):
"""
Returns a dictionary with the results of the probes.
The keys of the main dictionary represent the name of the probes.
Each probe consists on multiple tests, each test name being a key in the probe dictionary.
A test has the following keys:
* target (str)
* source (str)
* probe_type (str)
* probe_count (int)
* rtt (float)
* round_trip_jitter (float)
* current_test_loss (float)
* current_test_min_delay (float)
* current_test_max_delay (float)
* current_test_avg_delay (float)
* last_test_min_delay (float)
* last_test_max_delay (float)
* last_test_avg_delay (float)
* global_test_min_delay (float)
* global_test_max_delay (float)
* global_test_avg_delay (float)
Example::
{
'probe1': {
'test1': {
'last_test_min_delay' : 63.120,
'global_test_min_delay' : 62.912,
'current_test_avg_delay': 63.190,
'global_test_max_delay' : 177.349,
'current_test_max_delay': 63.302,
'global_test_avg_delay' : 63.802,
'last_test_avg_delay' : 63.438,
'last_test_max_delay' : 65.356,
'probe_type' : 'icmp-ping',
'rtt' : 63.138,
'current_test_loss' : 0,
'round_trip_jitter' : -59.0,
'target' : '192.168.0.1',
'source' : '192.168.0.2'
'probe_count' : 15,
'current_test_min_delay': 63.138
},
'test2': {
'last_test_min_delay' : 176.384,
'global_test_min_delay' : 169.226,
'current_test_avg_delay': 177.098,
'global_test_max_delay' : 292.628,
'current_test_max_delay': 180.055,
'global_test_avg_delay' : 177.959,
'last_test_avg_delay' : 177.178,
'last_test_max_delay' : 184.671,
'probe_type' : 'icmp-ping',
'rtt' : 176.449,
'current_test_loss' : 0,
'round_trip_jitter' : -34.0,
'target' : '172.17.17.1',
'source' : '172.17.17.2'
'probe_count' : 15,
'current_test_min_delay': 176.402
}
}
}
"""
raise NotImplementedError
def ping(self, destination, source=c.PING_SOURCE, ttl=c.PING_TTL, timeout=c.PING_TIMEOUT,
size=c.PING_SIZE, count=c.PING_COUNT, vrf=c.PING_VRF):
"""
Executes ping on the device and returns a dictionary with the result
:param destination: Host or IP Address of the destination
:param source (optional): Source address of echo request
:param ttl (optional): Maximum number of hops
:param timeout (optional): Maximum seconds to wait after sending final packet
:param size (optional): Size of request (bytes)
:param count (optional): Number of ping request to send
Output dictionary has one of following keys:
* success
* error
In case of success, inner dictionary will have the followin keys:
* probes_sent (int)
* packet_loss (int)
* rtt_min (float)
* rtt_max (float)
* rtt_avg (float)
* rtt_stddev (float)
* results (list)
'results' is a list of dictionaries with the following keys:
* ip_address (str)
* rtt (float)
Example::
{
'success': {
'probes_sent': 5,
'packet_loss': 0,
'rtt_min': 72.158,
'rtt_max': 72.433,
'rtt_avg': 72.268,
'rtt_stddev': 0.094,
'results': [
{
'ip_address': u'1.1.1.1',
'rtt': 72.248
},
{
'ip_address': '2.2.2.2',
'rtt': 72.299
}
]
}
}
OR
{
'error': 'unknown host 8.8.8.8.8'
}
"""
raise NotImplementedError
def traceroute(self,
destination,
source=c.TRACEROUTE_SOURCE,
ttl=c.TRACEROUTE_TTL,
timeout=c.TRACEROUTE_TIMEOUT,
vrf=c.TRACEROUTE_VRF):
"""
Executes traceroute on the device and returns a dictionary with the result.
:param destination: Host or IP Address of the destination
:param source (optional): Use a specific IP Address to execute the traceroute
:param ttl (optional): Maimum number of hops
:param timeout (optional): Number of seconds to wait for response
Output dictionary has one of the following keys:
* success
* error
In case of success, the keys of the dictionary represent the hop ID, while values are
dictionaries containing the probes results:
* rtt (float)
* ip_address (str)
* host_name (str)
Example::
{
'success': {
1: {
'probes': {
1: {
'rtt': 1.123,
'ip_address': u'206.223.116.21',
'host_name': u'eqixsj-google-gige.google.com'
},
2: {
'rtt': 1.9100000000000001,
'ip_address': u'206.223.116.21',
'host_name': u'eqixsj-google-gige.google.com'
},
3: {
'rtt': 3.347,
'ip_address': u'198.32.176.31',
'host_name': u'core2-1-1-0.pao.net.google.com'}
}
},
2: {
'probes': {
1: {
'rtt': 1.586,
'ip_address': u'209.85.241.171',
'host_name': u'209.85.241.171'
},
2: {
'rtt': 1.6300000000000001,
'ip_address': u'209.85.241.171',
'host_name': u'209.85.241.171'
},
3: {
'rtt': 1.6480000000000001,
'ip_address': u'209.85.241.171',
'host_name': u'209.85.241.171'}
}
},
3: {
'probes': {
1: {
'rtt': 2.529,
'ip_address': u'216.239.49.123',
'host_name': u'216.239.49.123'},
2: {
'rtt': 2.474,
'ip_address': u'209.85.255.255',
'host_name': u'209.85.255.255'
},
3: {
'rtt': 7.813,
'ip_address': u'216.239.58.193',
'host_name': u'216.239.58.193'}
}
},
4: {
'probes': {
1: {
'rtt': 1.361,
'ip_address': u'8.8.8.8',
'host_name': u'google-public-dns-a.google.com'
},
2: {
'rtt': 1.605,
'ip_address': u'8.8.8.8',
'host_name': u'google-public-dns-a.google.com'
},
3: {
'rtt': 0.989,
'ip_address': u'8.8.8.8',
'host_name': u'google-public-dns-a.google.com'}
}
}
}
}
OR
{
'error': 'unknown host 8.8.8.8.8'
}
"""
raise NotImplementedError
def get_users(self):
"""
Returns a dictionary with the configured users.
The keys of the main dictionary represents the username. The values represent the details
of the user, represented by the following keys:
* level (int)
* password (str)
* sshkeys (list)
The level is an integer between 0 and 15, where 0 is the lowest access and 15 represents
full access to the device.
Example::
{
'mircea': {
'level': 15,
'password': '$1$0P70xKPa$z46fewjo/10cBTckk6I/w/',
'sshkeys': [
'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC4pFn+shPwTb2yELO4L7NtQrKOJXNeCl1je\
l9STXVaGnRAnuc2PXl35vnWmcUq6YbUEcgUTRzzXfmelJKuVJTJIlMXii7h2xkbQp0YZIEs4P\
8ipwnRBAxFfk/ZcDsN3mjep4/yjN56eorF5xs7zP9HbqbJ1dsqk1p3A/9LIL7l6YewLBCwJj6\
D+fWSJ0/YW+7oH17Fk2HH+tw0L5PcWLHkwA4t60iXn16qDbIk/ze6jv2hDGdCdz7oYQeCE55C\
CHOHMJWYfN3jcL4s0qv8/u6Ka1FVkV7iMmro7ChThoV/5snI4Ljf2wKqgHH7TfNaCfpU0WvHA\
nTs8zhOrGScSrtb mircea@master-roshi'
]
}
}
"""
raise NotImplementedError
def get_optics(self):
"""Fetches the power usage on the various transceivers installed
on the switch (in dbm), and returns a view that conforms with the
openconfig model openconfig-platform-transceiver.yang
Returns a dictionary where the keys are as listed below:
* intf_name (unicode)
* physical_channels
* channels (list of dicts)
* index (int)
* state
* input_power
* instant (float)
* avg (float)
* min (float)
* max (float)
* output_power
* instant (float)
* avg (float)
* min (float)
* max (float)
* laser_bias_current
* instant (float)
* avg (float)
* min (float)
* max (float)
Example:
{
'et1': {
'physical_channels': {
'channel': [
{
'index': 0,
'state': {
'input_power': {
'instant': 0.0,
'avg': 0.0,
'min': 0.0,
'max': 0.0,
},
'output_power': {
'instant': 0.0,
'avg': 0.0,
'min': 0.0,
'max': 0.0,
},
'laser_bias_current': {
'instant': 0.0,
'avg': 0.0,
'min': 0.0,
'max': 0.0,
},
}
}
]
}
}
}
"""
raise NotImplementedError
def get_config(self, retrieve='all'):
"""
Return the configuration of a device.
Args:
retrieve(string): Which configuration type you want to populate, default is all of them.
The rest will be set to "".
Returns:
The object returned is a dictionary with the following keys:
- running(string) - Representation of the native running configuration
- candidate(string) - Representation of the native candidate configuration. If the
device doesnt differentiate between running and startup configuration this will an
empty string
- startup(string) - Representation of the native startup configuration. If the
device doesnt differentiate between running and startup configuration this will an
empty string
"""
raise NotImplementedError
def get_network_instances(self, name=''):
"""
Return a dictionary of network instances (VRFs) configured, including default/global
Args:
name(string) - Name of the network instance to return, default is all.
Returns:
A dictionary of network instances in OC format:
* name (dict)
* name (unicode)
* type (unicode)
* state (dict)
* route_distinguisher (unicode)
* interfaces (dict)
* interface (dict)
* interface name: (dict)
Example:
{
u'MGMT': {
u'name': u'MGMT',
u'type': u'L3VRF',
u'state': {
u'route_distinguisher': u'123:456',
},
u'interfaces': {
u'interface': {
u'Management1': {}
}
}
}
u'default': {
u'name': u'default',
u'type': u'DEFAULT_INSTANCE',
u'state': {
u'route_distinguisher': None,
},
u'interfaces: {
u'interface': {
u'Ethernet1': {}
u'Ethernet2': {}
u'Ethernet3': {}
u'Ethernet4': {}
}
}
}
}
"""
raise NotImplementedError
def get_firewall_policies(self):
"""
Returns a dictionary of lists of dictionaries where the first key is an unique policy
name and the inner dictionary contains the following keys:
* position (int)
* packet_hits (int)
* byte_hits (int)
* id (text_type)
* enabled (bool)
* schedule (text_type)
* log (text_type)
* l3_src (text_type)
* l3_dst (text_type)
* service (text_type)
* src_zone (text_type)
* dst_zone (text_type)
* action (text_type)
Example::
{
'policy_name': [{
'position': 1,
'packet_hits': 200,
'byte_hits': 83883,
'id': '230',
'enabled': True,
'schedule': 'Always',
'log': 'all',
'l3_src': 'any',
'l3_dst': 'any',
'service': 'HTTP',
'src_zone': 'port2',
'dst_zone': 'port3',
'action': 'Permit'
}]
}
"""
raise NotImplementedError
def compliance_report(self, validation_file='validate.yml'):
"""
Return a compliance report.
Verify that the device complies with the given validation file and writes a compliance
report file. See https://napalm.readthedocs.io/en/latest/validate.html.
"""
return validate.compliance_report(self, validation_file=validation_file)
| apache-2.0 | 5,470,869,723,787,131,000 | 36.871287 | 100 | 0.427503 | false |
stephane-martin/salt-debian-packaging | salt-2016.3.2/salt/master.py | 1 | 94538 | # -*- coding: utf-8 -*-
'''
This module contains all of the routines needed to set up a master server, this
involves preparing the three listeners and the workers needed by the master.
'''
# Import python libs
from __future__ import absolute_import, with_statement
import copy
import ctypes
import os
import re
import sys
import time
import errno
import signal
import stat
import logging
import multiprocessing
import tempfile
import traceback
# Import third party libs
from Crypto.PublicKey import RSA
# pylint: disable=import-error,no-name-in-module,redefined-builtin
import salt.ext.six as six
from salt.ext.six.moves import range
# pylint: enable=import-error,no-name-in-module,redefined-builtin
try:
import zmq
import zmq.eventloop.ioloop
# support pyzmq 13.0.x, TODO: remove once we force people to 14.0.x
if not hasattr(zmq.eventloop.ioloop, 'ZMQIOLoop'):
zmq.eventloop.ioloop.ZMQIOLoop = zmq.eventloop.ioloop.IOLoop
LOOP_CLASS = zmq.eventloop.ioloop.ZMQIOLoop
HAS_ZMQ = True
except ImportError:
import tornado.ioloop
LOOP_CLASS = tornado.ioloop.IOLoop
HAS_ZMQ = False
import tornado.gen # pylint: disable=F0401
# Import salt libs
import salt.crypt
import salt.utils
import salt.client
import salt.payload
import salt.pillar
import salt.state
import salt.runner
import salt.auth
import salt.wheel
import salt.minion
import salt.search
import salt.key
import salt.acl
import salt.engines
import salt.fileserver
import salt.daemons.masterapi
import salt.defaults.exitcodes
import salt.transport.server
import salt.log.setup
import salt.utils.args
import salt.utils.atomicfile
import salt.utils.event
import salt.utils.job
import salt.utils.reactor
import salt.utils.verify
import salt.utils.minions
import salt.utils.gzip_util
import salt.utils.process
import salt.utils.zeromq
import salt.utils.jid
from salt.defaults import DEFAULT_TARGET_DELIM
from salt.exceptions import FileserverConfigError
from salt.transport import iter_transport_opts
from salt.utils.debug import (
enable_sigusr1_handler, enable_sigusr2_handler, inspect_stack
)
from salt.utils.event import tagify
from salt.utils.master import ConnectedCache
from salt.utils.process import default_signals, SignalHandlingMultiprocessingProcess
try:
import resource
HAS_RESOURCE = True
except ImportError:
# resource is not available on windows
HAS_RESOURCE = False
# Import halite libs
try:
import halite # pylint: disable=import-error
HAS_HALITE = True
except ImportError:
HAS_HALITE = False
log = logging.getLogger(__name__)
class SMaster(object):
'''
Create a simple salt-master, this will generate the top-level master
'''
secrets = {} # mapping of key -> {'secret': multiprocessing type, 'reload': FUNCTION}
def __init__(self, opts):
'''
Create a salt master server instance
:param dict opts: The salt options dictionary
'''
self.opts = opts
self.master_key = salt.crypt.MasterKeys(self.opts)
self.key = self.__prep_key()
# We need __setstate__ and __getstate__ to also pickle 'SMaster.secrets'.
# Otherwise, 'SMaster.secrets' won't be copied over to the spawned process
# on Windows since spawning processes on Windows requires pickling.
# These methods are only used when pickling so will not be used on
# non-Windows platforms.
def __setstate__(self, state):
self.opts = state['opts']
self.master_key = state['master_key']
self.key = state['key']
SMaster.secrets = state['secrets']
def __getstate__(self):
return {'opts': self.opts,
'master_key': self.master_key,
'key': self.key,
'secrets': SMaster.secrets}
def __prep_key(self):
'''
A key needs to be placed in the filesystem with permissions 0400 so
clients are required to run as root.
'''
return salt.daemons.masterapi.access_keys(self.opts)
class Maintenance(SignalHandlingMultiprocessingProcess):
'''
A generalized maintenance process which performances maintenance
routines.
'''
def __init__(self, opts, log_queue=None):
'''
Create a maintenance instance
:param dict opts: The salt options
'''
super(Maintenance, self).__init__(log_queue=log_queue)
self.opts = opts
# How often do we perform the maintenance tasks
self.loop_interval = int(self.opts['loop_interval'])
# Track key rotation intervals
self.rotate = int(time.time())
# __setstate__ and __getstate__ are only used on Windows.
# We do this so that __init__ will be invoked on Windows in the child
# process so that a register_after_fork() equivalent will work on Windows.
def __setstate__(self, state):
self._is_child = True
self.__init__(state['opts'], log_queue=state['log_queue'])
def __getstate__(self):
return {'opts': self.opts,
'log_queue': self.log_queue}
def _post_fork_init(self):
'''
Some things need to be init'd after the fork has completed
The easiest example is that one of these module types creates a thread
in the parent process, then once the fork happens you'll start getting
errors like "WARNING: Mixing fork() and threads detected; memory leaked."
'''
# Init fileserver manager
self.fileserver = salt.fileserver.Fileserver(self.opts)
# Load Runners
ropts = dict(self.opts)
ropts['quiet'] = True
runner_client = salt.runner.RunnerClient(ropts)
# Load Returners
self.returners = salt.loader.returners(self.opts, {})
# Init Scheduler
self.schedule = salt.utils.schedule.Schedule(self.opts,
runner_client.functions_dict(),
returners=self.returners)
self.ckminions = salt.utils.minions.CkMinions(self.opts)
# Make Event bus for firing
self.event = salt.utils.event.get_master_event(self.opts, self.opts['sock_dir'], listen=False)
# Init any values needed by the git ext pillar
self.git_pillar = salt.daemons.masterapi.init_git_pillar(self.opts)
# Set up search object
self.search = salt.search.Search(self.opts)
self.presence_events = False
if self.opts.get('presence_events', False):
tcp_only = True
for transport, _ in iter_transport_opts(self.opts):
if transport != 'tcp':
tcp_only = False
if not tcp_only:
# For a TCP only transport, the presence events will be
# handled in the transport code.
self.presence_events = True
def run(self):
'''
This is the general passive maintenance process controller for the Salt
master.
This is where any data that needs to be cleanly maintained from the
master is maintained.
'''
salt.utils.appendproctitle('Maintenance')
# init things that need to be done after the process is forked
self._post_fork_init()
# Make Start Times
last = int(time.time())
# Clean out the fileserver backend cache
salt.daemons.masterapi.clean_fsbackend(self.opts)
# Clean out pub auth
salt.daemons.masterapi.clean_pub_auth(self.opts)
old_present = set()
while True:
now = int(time.time())
if (now - last) >= self.loop_interval:
salt.daemons.masterapi.clean_old_jobs(self.opts)
salt.daemons.masterapi.clean_expired_tokens(self.opts)
self.handle_search(now, last)
self.handle_git_pillar()
self.handle_schedule()
self.handle_presence(old_present)
self.handle_key_rotate(now)
salt.daemons.masterapi.fileserver_update(self.fileserver)
salt.utils.verify.check_max_open_files(self.opts)
last = now
time.sleep(self.loop_interval)
def handle_search(self, now, last):
'''
Update the search index
'''
if self.opts.get('search'):
if now - last >= self.opts['search_index_interval']:
self.search.index()
def handle_key_rotate(self, now):
'''
Rotate the AES key rotation
'''
to_rotate = False
dfn = os.path.join(self.opts['cachedir'], '.dfn')
try:
stats = os.stat(dfn)
# Basic Windows permissions don't distinguish between
# user/group/all. Check for read-only state instead.
if salt.utils.is_windows() and not os.access(dfn, os.W_OK):
to_rotate = True
# Cannot delete read-only files on Windows.
os.chmod(dfn, stat.S_IRUSR | stat.S_IWUSR)
elif stats.st_mode == 0o100400:
to_rotate = True
else:
log.error('Found dropfile with incorrect permissions, ignoring...')
os.remove(dfn)
except os.error:
pass
if self.opts.get('publish_session'):
if now - self.rotate >= self.opts['publish_session']:
to_rotate = True
if to_rotate:
log.info('Rotating master AES key')
for secret_key, secret_map in six.iteritems(SMaster.secrets):
# should be unnecessary-- since no one else should be modifying
with secret_map['secret'].get_lock():
secret_map['secret'].value = secret_map['reload']()
self.event.fire_event({'rotate_{0}_key'.format(secret_key): True}, tag='key')
self.rotate = now
if self.opts.get('ping_on_rotate'):
# Ping all minions to get them to pick up the new key
log.debug('Pinging all connected minions '
'due to key rotation')
salt.utils.master.ping_all_connected_minions(self.opts)
def handle_git_pillar(self):
'''
Update git pillar
'''
try:
for pillar in self.git_pillar:
pillar.update()
except Exception as exc:
log.error(
'Exception \'{0}\' caught while updating git_pillar'
.format(exc),
exc_info_on_loglevel=logging.DEBUG
)
def handle_schedule(self):
'''
Evaluate the scheduler
'''
try:
self.schedule.eval()
# Check if scheduler requires lower loop interval than
# the loop_interval setting
if self.schedule.loop_interval < self.loop_interval:
self.loop_interval = self.schedule.loop_interval
except Exception as exc:
log.error(
'Exception {0} occurred in scheduled job'.format(exc)
)
def handle_presence(self, old_present):
'''
Fire presence events if enabled
'''
if self.presence_events:
present = self.ckminions.connected_ids()
new = present.difference(old_present)
lost = old_present.difference(present)
if new or lost:
# Fire new minions present event
data = {'new': list(new),
'lost': list(lost)}
self.event.fire_event(data, tagify('change', 'presence'))
data = {'present': list(present)}
# On the first run it may need more time for the EventPublisher
# to come up and be ready. Set the timeout to account for this.
self.event.fire_event(data, tagify('present', 'presence'), timeout=3)
old_present.clear()
old_present.update(present)
class Master(SMaster):
'''
The salt master server
'''
def __init__(self, opts):
'''
Create a salt master server instance
:param dict: The salt options
'''
if HAS_ZMQ:
# Warn if ZMQ < 3.2
try:
zmq_version_info = zmq.zmq_version_info()
except AttributeError:
# PyZMQ <= 2.1.9 does not have zmq_version_info, fall back to
# using zmq.zmq_version() and build a version info tuple.
zmq_version_info = tuple(
[int(x) for x in zmq.zmq_version().split('.')]
)
if zmq_version_info < (3, 2):
log.warning(
'You have a version of ZMQ less than ZMQ 3.2! There are '
'known connection keep-alive issues with ZMQ < 3.2 which '
'may result in loss of contact with minions. Please '
'upgrade your ZMQ!'
)
SMaster.__init__(self, opts)
def __set_max_open_files(self):
if not HAS_RESOURCE:
return
# Let's check to see how our max open files(ulimit -n) setting is
mof_s, mof_h = resource.getrlimit(resource.RLIMIT_NOFILE)
if mof_h == resource.RLIM_INFINITY:
# Unclear what to do with infinity... OSX reports RLIM_INFINITY as
# hard limit,but raising to anything above soft limit fails...
mof_h = mof_s
log.info(
'Current values for max open files soft/hard setting: '
'{0}/{1}'.format(
mof_s, mof_h
)
)
# Let's grab, from the configuration file, the value to raise max open
# files to
mof_c = self.opts['max_open_files']
if mof_c > mof_h:
# The configured value is higher than what's allowed
log.info(
'The value for the \'max_open_files\' setting, {0}, is higher '
'than what the user running salt is allowed to raise to, {1}. '
'Defaulting to {1}.'.format(mof_c, mof_h)
)
mof_c = mof_h
if mof_s < mof_c:
# There's room to raise the value. Raise it!
log.info('Raising max open files value to {0}'.format(mof_c))
resource.setrlimit(resource.RLIMIT_NOFILE, (mof_c, mof_h))
try:
mof_s, mof_h = resource.getrlimit(resource.RLIMIT_NOFILE)
log.info(
'New values for max open files soft/hard values: '
'{0}/{1}'.format(mof_s, mof_h)
)
except ValueError:
# https://github.com/saltstack/salt/issues/1991#issuecomment-13025595
# A user under OSX reported that our 100000 default value is
# still too high.
log.critical(
'Failed to raise max open files setting to {0}. If this '
'value is too low. The salt-master will most likely fail '
'to run properly.'.format(
mof_c
)
)
def _pre_flight(self):
'''
Run pre flight checks. If anything in this method fails then the master
should not start up.
'''
errors = []
critical_errors = []
try:
os.chdir('/')
except OSError as err:
errors.append(
'Cannot change to root directory ({1})'.format(err)
)
fileserver = salt.fileserver.Fileserver(self.opts)
if not fileserver.servers:
errors.append(
'Failed to load fileserver backends, the configured backends '
'are: {0}'.format(', '.join(self.opts['fileserver_backend']))
)
else:
# Run init() for all backends which support the function, to
# double-check configuration
try:
fileserver.init()
except FileserverConfigError as exc:
critical_errors.append('{0}'.format(exc))
if not self.opts['fileserver_backend']:
errors.append('No fileserver backends are configured')
# Check to see if we need to create a pillar cache dir
if self.opts['pillar_cache'] and not os.path.isdir(os.path.join(self.opts['cachedir'], 'pillar_cache')):
try:
prev_umask = os.umask(0o077)
os.mkdir(os.path.join(self.opts['cachedir'], 'pillar_cache'))
os.umask(prev_umask)
except OSError:
pass
non_legacy_git_pillars = [
x for x in self.opts.get('ext_pillar', [])
if 'git' in x
and not isinstance(x['git'], six.string_types)
]
if non_legacy_git_pillars:
new_opts = copy.deepcopy(self.opts)
new_opts['ext_pillar'] = non_legacy_git_pillars
try:
# Init any values needed by the git ext pillar
salt.utils.gitfs.GitPillar(new_opts)
except FileserverConfigError as exc:
critical_errors.append(exc.strerror)
finally:
del new_opts
if errors or critical_errors:
for error in errors:
log.error(error)
for error in critical_errors:
log.critical(error)
log.critical('Master failed pre flight checks, exiting\n')
sys.exit(salt.defaults.exitcodes.EX_GENERIC)
# run_reqserver cannot be defined within a class method in order for it
# to be picklable.
def run_reqserver(self, **kwargs):
secrets = kwargs.pop('secrets', None)
if secrets is not None:
SMaster.secrets = secrets
with default_signals(signal.SIGINT, signal.SIGTERM):
reqserv = ReqServer(
self.opts,
self.key,
self.master_key,
**kwargs)
reqserv.run()
def start(self):
'''
Turn on the master server components
'''
self._pre_flight()
log.info(
'salt-master is starting as user \'{0}\''.format(
salt.utils.get_user()
)
)
enable_sigusr1_handler()
enable_sigusr2_handler()
self.__set_max_open_files()
# Reset signals to default ones before adding processes to the process
# manager. We don't want the processes being started to inherit those
# signal handlers
with default_signals(signal.SIGINT, signal.SIGTERM):
# Setup the secrets here because the PubServerChannel may need
# them as well.
SMaster.secrets['aes'] = {'secret': multiprocessing.Array(ctypes.c_char,
salt.crypt.Crypticle.generate_key_string()),
'reload': salt.crypt.Crypticle.generate_key_string
}
log.info('Creating master process manager')
self.process_manager = salt.utils.process.ProcessManager()
pub_channels = []
log.info('Creating master publisher process')
for transport, opts in iter_transport_opts(self.opts):
chan = salt.transport.server.PubServerChannel.factory(opts)
chan.pre_fork(self.process_manager)
pub_channels.append(chan)
log.info('Creating master event publisher process')
self.process_manager.add_process(salt.utils.event.EventPublisher, args=(self.opts,))
salt.engines.start_engines(self.opts, self.process_manager)
# must be after channels
log.info('Creating master maintenance process')
self.process_manager.add_process(Maintenance, args=(self.opts,))
if 'reactor' in self.opts:
log.info('Creating master reactor process')
self.process_manager.add_process(salt.utils.reactor.Reactor, args=(self.opts,))
if self.opts.get('event_return'):
log.info('Creating master event return process')
self.process_manager.add_process(salt.utils.event.EventReturn, args=(self.opts,))
ext_procs = self.opts.get('ext_processes', [])
for proc in ext_procs:
log.info('Creating ext_processes process: {0}'.format(proc))
try:
mod = '.'.join(proc.split('.')[:-1])
cls = proc.split('.')[-1]
_tmp = __import__(mod, globals(), locals(), [cls], -1)
cls = _tmp.__getattribute__(cls)
self.process_manager.add_process(cls, args=(self.opts,))
except Exception:
log.error(('Error creating ext_processes '
'process: {0}').format(proc))
if HAS_HALITE and 'halite' in self.opts:
log.info('Creating master halite process')
self.process_manager.add_process(Halite, args=(self.opts['halite'],))
# TODO: remove, or at least push into the transport stuff (pre-fork probably makes sense there)
if self.opts['con_cache']:
log.info('Creating master concache process')
self.process_manager.add_process(ConnectedCache, args=(self.opts,))
# workaround for issue #16315, race condition
log.debug('Sleeping for two seconds to let concache rest')
time.sleep(2)
log.info('Creating master request server process')
kwargs = {}
if salt.utils.is_windows():
kwargs['log_queue'] = salt.log.setup.get_multiprocessing_logging_queue()
kwargs['secrets'] = SMaster.secrets
# No need to call this one under default_signals because that's invoked when
# actually starting the ReqServer
self.process_manager.add_process(self.run_reqserver, kwargs=kwargs, name='ReqServer')
# Install the SIGINT/SIGTERM handlers if not done so far
if signal.getsignal(signal.SIGINT) is signal.SIG_DFL:
# No custom signal handling was added, install our own
signal.signal(signal.SIGINT, self._handle_signals)
if signal.getsignal(signal.SIGTERM) is signal.SIG_DFL:
# No custom signal handling was added, install our own
signal.signal(signal.SIGINT, self._handle_signals)
self.process_manager.run()
def _handle_signals(self, signum, sigframe): # pylint: disable=unused-argument
# escalate the signals to the process manager
self.process_manager.stop_restarting()
self.process_manager.send_signal_to_processes(signum)
# kill any remaining processes
self.process_manager.kill_children()
class Halite(SignalHandlingMultiprocessingProcess):
'''
Manage the Halite server
'''
def __init__(self, hopts, log_queue=None):
'''
Create a halite instance
:param dict hopts: The halite options
'''
super(Halite, self).__init__(log_queue=log_queue)
self.hopts = hopts
# __setstate__ and __getstate__ are only used on Windows.
# We do this so that __init__ will be invoked on Windows in the child
# process so that a register_after_fork() equivalent will work on Windows.
def __setstate__(self, state):
self._is_child = True
self.__init__(state['hopts'], log_queue=state['log_queue'])
def __getstate__(self):
return {'hopts': self.hopts,
'log_queue': self.log_queue}
def run(self):
'''
Fire up halite!
'''
salt.utils.appendproctitle(self.__class__.__name__)
halite.start(self.hopts)
class ReqServer(SignalHandlingMultiprocessingProcess):
'''
Starts up the master request server, minions send results to this
interface.
'''
def __init__(self, opts, key, mkey, log_queue=None):
'''
Create a request server
:param dict opts: The salt options dictionary
:key dict: The user starting the server and the AES key
:mkey dict: The user starting the server and the RSA key
:rtype: ReqServer
:returns: Request server
'''
super(ReqServer, self).__init__(log_queue=log_queue)
self.opts = opts
self.master_key = mkey
# Prepare the AES key
self.key = key
def _handle_signals(self, signum, sigframe): # pylint: disable=unused-argument
self.destroy(signum)
super(ReqServer, self)._handle_signals(signum, sigframe)
def __bind(self):
'''
Binds the reply server
'''
if self.log_queue is not None:
salt.log.setup.set_multiprocessing_logging_queue(self.log_queue)
salt.log.setup.setup_multiprocessing_logging(self.log_queue)
dfn = os.path.join(self.opts['cachedir'], '.dfn')
if os.path.isfile(dfn):
try:
if salt.utils.is_windows() and not os.access(dfn, os.W_OK):
# Cannot delete read-only files on Windows.
os.chmod(dfn, stat.S_IRUSR | stat.S_IWUSR)
os.remove(dfn)
except os.error:
pass
self.process_manager = salt.utils.process.ProcessManager(name='ReqServer_ProcessManager')
req_channels = []
tcp_only = True
for transport, opts in iter_transport_opts(self.opts):
chan = salt.transport.server.ReqServerChannel.factory(opts)
chan.pre_fork(self.process_manager)
req_channels.append(chan)
if transport != 'tcp':
tcp_only = False
kwargs = {}
if salt.utils.is_windows():
kwargs['log_queue'] = self.log_queue
# Use one worker thread if the only TCP transport is set up on Windows. See #27188.
if tcp_only:
log.warning("TCP transport is currently supporting the only 1 worker on Windows.")
self.opts['worker_threads'] = 1
for ind in range(int(self.opts['worker_threads'])):
name = 'MWorker-{0}'.format(ind)
self.process_manager.add_process(MWorker,
args=(self.opts,
self.master_key,
self.key,
req_channels,
name
),
kwargs=kwargs,
name=name
)
self.process_manager.run()
def run(self):
'''
Start up the ReqServer
'''
self.__bind()
def destroy(self, signum=signal.SIGTERM):
if hasattr(self, 'clients') and self.clients.closed is False:
if HAS_ZMQ:
self.clients.setsockopt(zmq.LINGER, 1)
self.clients.close()
if hasattr(self, 'workers') and self.workers.closed is False:
if HAS_ZMQ:
self.workers.setsockopt(zmq.LINGER, 1)
self.workers.close()
if hasattr(self, 'context') and self.context.closed is False:
self.context.term()
# Also stop the workers
if hasattr(self, 'process_manager'):
self.process_manager.stop_restarting()
self.process_manager.send_signal_to_processes(signum)
self.process_manager.kill_children()
def __del__(self):
self.destroy()
class MWorker(SignalHandlingMultiprocessingProcess):
'''
The worker multiprocess instance to manage the backend operations for the
salt master.
'''
def __init__(self,
opts,
mkey,
key,
req_channels,
name,
**kwargs):
'''
Create a salt master worker process
:param dict opts: The salt options
:param dict mkey: The user running the salt master and the AES key
:param dict key: The user running the salt master and the RSA key
:rtype: MWorker
:return: Master worker
'''
kwargs['name'] = name
SignalHandlingMultiprocessingProcess.__init__(self, **kwargs)
self.opts = opts
self.req_channels = req_channels
self.mkey = mkey
self.key = key
self.k_mtime = 0
# We need __setstate__ and __getstate__ to also pickle 'SMaster.secrets'.
# Otherwise, 'SMaster.secrets' won't be copied over to the spawned process
# on Windows since spawning processes on Windows requires pickling.
# These methods are only used when pickling so will not be used on
# non-Windows platforms.
def __setstate__(self, state):
self._is_child = True
SignalHandlingMultiprocessingProcess.__init__(self, log_queue=state['log_queue'])
self.opts = state['opts']
self.req_channels = state['req_channels']
self.mkey = state['mkey']
self.key = state['key']
self.k_mtime = state['k_mtime']
SMaster.secrets = state['secrets']
def __getstate__(self):
return {'opts': self.opts,
'req_channels': self.req_channels,
'mkey': self.mkey,
'key': self.key,
'k_mtime': self.k_mtime,
'log_queue': self.log_queue,
'secrets': SMaster.secrets}
def _handle_signals(self, signum, sigframe):
for channel in getattr(self, 'req_channels', ()):
channel.close()
super(MWorker, self)._handle_signals(signum, sigframe)
def __bind(self):
'''
Bind to the local port
'''
# using ZMQIOLoop since we *might* need zmq in there
if HAS_ZMQ:
zmq.eventloop.ioloop.install()
self.io_loop = LOOP_CLASS()
for req_channel in self.req_channels:
req_channel.post_fork(self._handle_payload, io_loop=self.io_loop) # TODO: cleaner? Maybe lazily?
self.io_loop.start()
@tornado.gen.coroutine
def _handle_payload(self, payload):
'''
The _handle_payload method is the key method used to figure out what
needs to be done with communication to the server
Example cleartext payload generated for 'salt myminion test.ping':
{'enc': 'clear',
'load': {'arg': [],
'cmd': 'publish',
'fun': 'test.ping',
'jid': '',
'key': 'alsdkjfa.,maljf-==adflkjadflkjalkjadfadflkajdflkj',
'kwargs': {'show_jid': False, 'show_timeout': False},
'ret': '',
'tgt': 'myminion',
'tgt_type': 'glob',
'user': 'root'}}
:param dict payload: The payload route to the appropriate handler
'''
key = payload['enc']
load = payload['load']
ret = {'aes': self._handle_aes,
'clear': self._handle_clear}[key](load)
raise tornado.gen.Return(ret)
def _handle_clear(self, load):
'''
Process a cleartext command
:param dict load: Cleartext payload
:return: The result of passing the load to a function in ClearFuncs corresponding to
the command specified in the load's 'cmd' key.
'''
log.trace('Clear payload received with command {cmd}'.format(**load))
if load['cmd'].startswith('__'):
return False
return getattr(self.clear_funcs, load['cmd'])(load), {'fun': 'send_clear'}
def _handle_aes(self, data):
'''
Process a command sent via an AES key
:param str load: Encrypted payload
:return: The result of passing the load to a function in AESFuncs corresponding to
the command specified in the load's 'cmd' key.
'''
if 'cmd' not in data:
log.error('Received malformed command {0}'.format(data))
return {}
log.trace('AES payload received with command {0}'.format(data['cmd']))
if data['cmd'].startswith('__'):
return False
return self.aes_funcs.run_func(data['cmd'], data)
def run(self):
'''
Start a Master Worker
'''
salt.utils.appendproctitle(self.name)
self.clear_funcs = ClearFuncs(
self.opts,
self.key,
)
self.aes_funcs = AESFuncs(self.opts)
salt.utils.reinit_crypto()
self.__bind()
# TODO: rename? No longer tied to "AES", just "encrypted" or "private" requests
class AESFuncs(object):
'''
Set up functions that are available when the load is encrypted with AES
'''
# The AES Functions:
#
def __init__(self, opts):
'''
Create a new AESFuncs
:param dict opts: The salt options
:rtype: AESFuncs
:returns: Instance for handling AES operations
'''
self.opts = opts
self.event = salt.utils.event.get_master_event(self.opts, self.opts['sock_dir'], listen=False)
self.serial = salt.payload.Serial(opts)
self.ckminions = salt.utils.minions.CkMinions(opts)
# Make a client
self.local = salt.client.get_local_client(self.opts['conf_file'])
# Create the master minion to access the external job cache
self.mminion = salt.minion.MasterMinion(
self.opts,
states=False,
rend=False,
ignore_config_errors=True
)
self.__setup_fileserver()
self.masterapi = salt.daemons.masterapi.RemoteFuncs(opts)
def __setup_fileserver(self):
'''
Set the local file objects from the file server interface
'''
self.fs_ = salt.fileserver.Fileserver(self.opts)
self._serve_file = self.fs_.serve_file
self._file_hash = self.fs_.file_hash
self._file_list = self.fs_.file_list
self._file_list_emptydirs = self.fs_.file_list_emptydirs
self._dir_list = self.fs_.dir_list
self._symlink_list = self.fs_.symlink_list
self._file_envs = self.fs_.envs
def __verify_minion(self, id_, token):
'''
Take a minion id and a string signed with the minion private key
The string needs to verify as 'salt' with the minion public key
:param str id_: A minion ID
:param str token: A string signed with the minion private key
:rtype: bool
:return: Boolean indicating whether or not the token can be verified.
'''
if not salt.utils.verify.valid_id(self.opts, id_):
return False
pub_path = os.path.join(self.opts['pki_dir'], 'minions', id_)
with salt.utils.fopen(pub_path, 'r') as fp_:
minion_pub = fp_.read()
tmp_pub = salt.utils.mkstemp()
with salt.utils.fopen(tmp_pub, 'w+') as fp_:
fp_.write(minion_pub)
pub = None
try:
with salt.utils.fopen(tmp_pub) as fp_:
pub = RSA.importKey(fp_.read())
except (ValueError, IndexError, TypeError) as err:
log.error('Unable to load temporary public key "{0}": {1}'
.format(tmp_pub, err))
try:
os.remove(tmp_pub)
if salt.crypt.public_decrypt(pub, token) == 'salt':
return True
except ValueError as err:
log.error('Unable to decrypt token: {0}'.format(err))
log.error('Salt minion claiming to be {0} has attempted to'
'communicate with the master and could not be verified'
.format(id_))
return False
def verify_minion(self, id_, token):
'''
Take a minion id and a string signed with the minion private key
The string needs to verify as 'salt' with the minion public key
:param str id_: A minion ID
:param str token: A string signed with the minion private key
:rtype: bool
:return: Boolean indicating whether or not the token can be verified.
'''
return self.__verify_minion(id_, token)
def __verify_minion_publish(self, clear_load):
'''
Verify that the passed information authorized a minion to execute
:param dict clear_load: A publication load from a minion
:rtype: bool
:return: A boolean indicating if the minion is allowed to publish the command in the load
'''
# Verify that the load is valid
if 'peer' not in self.opts:
return False
if not isinstance(self.opts['peer'], dict):
return False
if any(key not in clear_load for key in ('fun', 'arg', 'tgt', 'ret', 'tok', 'id')):
return False
# If the command will make a recursive publish don't run
if clear_load['fun'].startswith('publish.'):
return False
# Check the permissions for this minion
if not self.__verify_minion(clear_load['id'], clear_load['tok']):
# The minion is not who it says it is!
# We don't want to listen to it!
log.warn(
(
'Minion id {0} is not who it says it is and is attempting '
'to issue a peer command'
).format(clear_load['id'])
)
return False
clear_load.pop('tok')
perms = []
for match in self.opts['peer']:
if re.match(match, clear_load['id']):
# This is the list of funcs/modules!
if isinstance(self.opts['peer'][match], list):
perms.extend(self.opts['peer'][match])
if ',' in clear_load['fun']:
# 'arg': [['cat', '/proc/cpuinfo'], [], ['foo']]
clear_load['fun'] = clear_load['fun'].split(',')
arg_ = []
for arg in clear_load['arg']:
arg_.append(arg.split())
clear_load['arg'] = arg_
# finally, check the auth of the load
return self.ckminions.auth_check(
perms,
clear_load['fun'],
clear_load['arg'],
clear_load['tgt'],
clear_load.get('tgt_type', 'glob'),
publish_validate=True)
def __verify_load(self, load, verify_keys):
'''
A utility function to perform common verification steps.
:param dict load: A payload received from a minion
:param list verify_keys: A list of strings that should be present in a
given load
:rtype: bool
:rtype: dict
:return: The original load (except for the token) if the load can be
verified. False if the load is invalid.
'''
if any(key not in load for key in verify_keys):
return False
if 'tok' not in load:
log.error(
'Received incomplete call from {0} for \'{1}\', missing \'{2}\''
.format(
load['id'],
inspect_stack()['co_name'],
'tok'
))
return False
if not self.__verify_minion(load['id'], load['tok']):
# The minion is not who it says it is!
# We don't want to listen to it!
log.warn(
'Minion id {0} is not who it says it is!'.format(
load['id']
)
)
return False
if 'tok' in load:
load.pop('tok')
return load
def _ext_nodes(self, load):
'''
Return the results from an external node classifier if one is
specified
:param dict load: A payload received from a minion
:return: The results from an external node classifier
'''
load = self.__verify_load(load, ('id', 'tok'))
if load is False:
return {}
return self.masterapi._ext_nodes(load, skip_verify=True)
def _master_opts(self, load):
'''
Return the master options to the minion
:param dict load: A payload received from a minion
:rtype: dict
:return: The master options
'''
mopts = {}
file_roots = {}
envs = self._file_envs()
for saltenv in envs:
if saltenv not in file_roots:
file_roots[saltenv] = []
mopts['file_roots'] = file_roots
mopts['top_file_merging_strategy'] = self.opts['top_file_merging_strategy']
mopts['env_order'] = self.opts['env_order']
mopts['default_top'] = self.opts['default_top']
if load.get('env_only'):
return mopts
mopts['renderer'] = self.opts['renderer']
mopts['failhard'] = self.opts['failhard']
mopts['state_top'] = self.opts['state_top']
mopts['state_top_saltenv'] = self.opts['state_top_saltenv']
mopts['nodegroups'] = self.opts['nodegroups']
mopts['state_auto_order'] = self.opts['state_auto_order']
mopts['state_events'] = self.opts['state_events']
mopts['state_aggregate'] = self.opts['state_aggregate']
mopts['jinja_lstrip_blocks'] = self.opts['jinja_lstrip_blocks']
mopts['jinja_trim_blocks'] = self.opts['jinja_trim_blocks']
return mopts
def _mine_get(self, load):
'''
Gathers the data from the specified minions' mine
:param dict load: A payload received from a minion
:rtype: dict
:return: Mine data from the specified minions
'''
load = self.__verify_load(load, ('id', 'tgt', 'fun', 'tok'))
if load is False:
return {}
else:
return self.masterapi._mine_get(load, skip_verify=True)
def _mine(self, load):
'''
Store the mine data
:param dict load: A payload received from a minion
:rtype: bool
:return: True if the data has been stored in the mine
'''
load = self.__verify_load(load, ('id', 'data', 'tok'))
if load is False:
return {}
return self.masterapi._mine(load, skip_verify=True)
def _mine_delete(self, load):
'''
Allow the minion to delete a specific function from its own mine
:param dict load: A payload received from a minion
:rtype: bool
:return: Boolean indicating whether or not the given function was deleted from the mine
'''
load = self.__verify_load(load, ('id', 'fun', 'tok'))
if load is False:
return {}
else:
return self.masterapi._mine_delete(load)
def _mine_flush(self, load):
'''
Allow the minion to delete all of its own mine contents
:param dict load: A payload received from a minion
'''
load = self.__verify_load(load, ('id', 'tok'))
if load is False:
return {}
else:
return self.masterapi._mine_flush(load, skip_verify=True)
def _file_recv(self, load):
'''
Allows minions to send files to the master, files are sent to the
master file cache
'''
if any(key not in load for key in ('id', 'path', 'loc')):
return False
if not self.opts['file_recv'] or os.path.isabs(load['path']):
return False
if os.path.isabs(load['path']) or '../' in load['path']:
# Can overwrite master files!!
return False
if not salt.utils.verify.valid_id(self.opts, load['id']):
return False
file_recv_max_size = 1024*1024 * self.opts['file_recv_max_size']
if 'loc' in load and load['loc'] < 0:
log.error('Invalid file pointer: load[loc] < 0')
return False
if len(load['data']) + load.get('loc', 0) > file_recv_max_size:
log.error(
'Exceeding file_recv_max_size limit: {0}'.format(
file_recv_max_size
)
)
return False
if 'tok' not in load:
log.error(
'Received incomplete call from {0} for \'{1}\', missing '
'\'{2}\''.format(
load['id'],
inspect_stack()['co_name'],
'tok'
)
)
return False
if not self.__verify_minion(load['id'], load['tok']):
# The minion is not who it says it is!
# We don't want to listen to it!
log.warn(
'Minion id {0} is not who it says it is!'.format(
load['id']
)
)
return {}
load.pop('tok')
# Normalize Windows paths
normpath = load['path']
if ':' in normpath:
# make sure double backslashes are normalized
normpath = normpath.replace('\\', '/')
normpath = os.path.normpath(normpath)
cpath = os.path.join(
self.opts['cachedir'],
'minions',
load['id'],
'files',
normpath)
cdir = os.path.dirname(cpath)
if not os.path.isdir(cdir):
try:
os.makedirs(cdir)
except os.error:
pass
if os.path.isfile(cpath) and load['loc'] != 0:
mode = 'ab'
else:
mode = 'wb'
with salt.utils.fopen(cpath, mode) as fp_:
if load['loc']:
fp_.seek(load['loc'])
fp_.write(load['data'])
return True
def _pillar(self, load):
'''
Return the pillar data for the minion
:param dict load: Minion payload
:rtype: dict
:return: The pillar data for the minion
'''
if any(key not in load for key in ('id', 'grains')):
return False
if not salt.utils.verify.valid_id(self.opts, load['id']):
return False
load['grains']['id'] = load['id']
pillar_dirs = {}
# pillar = salt.pillar.Pillar(
pillar = salt.pillar.get_pillar(
self.opts,
load['grains'],
load['id'],
load.get('saltenv', load.get('env')),
ext=load.get('ext'),
pillar=load.get('pillar_override', {}),
pillarenv=load.get('pillarenv'))
data = pillar.compile_pillar(pillar_dirs=pillar_dirs)
self.fs_.update_opts()
if self.opts.get('minion_data_cache', False):
cdir = os.path.join(self.opts['cachedir'], 'minions', load['id'])
if not os.path.isdir(cdir):
os.makedirs(cdir)
datap = os.path.join(cdir, 'data.p')
tmpfh, tmpfname = tempfile.mkstemp(dir=cdir)
os.close(tmpfh)
with salt.utils.fopen(tmpfname, 'w+b') as fp_:
fp_.write(
self.serial.dumps(
{'grains': load['grains'],
'pillar': data})
)
# On Windows, os.rename will fail if the destination file exists.
salt.utils.atomicfile.atomic_rename(tmpfname, datap)
return data
def _minion_event(self, load):
'''
Receive an event from the minion and fire it on the master event
interface
:param dict load: The minion payload
'''
load = self.__verify_load(load, ('id', 'tok'))
if load is False:
return {}
# Route to master event bus
self.masterapi._minion_event(load)
# Process locally
self._handle_minion_event(load)
def _handle_minion_event(self, load):
'''
Act on specific events from minions
'''
id_ = load['id']
if load.get('tag', '') == '_salt_error':
log.error(
'Received minion error from [{minion}]: {data}'
.format(minion=id_, data=load['data']['message'])
)
for event in load.get('events', []):
event_data = event.get('data', {})
if 'minions' in event_data:
jid = event_data.get('jid')
if not jid:
continue
minions = event_data['minions']
try:
salt.utils.job.store_minions(
self.opts,
jid,
minions,
mminion=self.mminion,
syndic_id=id_)
except (KeyError, salt.exceptions.SaltCacheError) as exc:
log.error(
'Could not add minion(s) {0} for job {1}: {2}'
.format(minions, jid, exc)
)
def _return(self, load):
'''
Handle the return data sent from the minions.
Takes the return, verifies it and fires it on the master event bus.
Typically, this event is consumed by the Salt CLI waiting on the other
end of the event bus but could be heard by any listener on the bus.
:param dict load: The minion payload
'''
try:
salt.utils.job.store_job(
self.opts, load, event=self.event, mminion=self.mminion)
except salt.exceptions.SaltCacheError:
log.error('Could not store job information for load: {0}'.format(load))
def _syndic_return(self, load):
'''
Receive a syndic minion return and format it to look like returns from
individual minions.
:param dict load: The minion payload
'''
# Verify the load
if any(key not in load for key in ('return', 'jid', 'id')):
return None
# if we have a load, save it
if load.get('load'):
fstr = '{0}.save_load'.format(self.opts['master_job_cache'])
self.mminion.returners[fstr](load['jid'], load['load'])
# Register the syndic
syndic_cache_path = os.path.join(self.opts['cachedir'], 'syndics', load['id'])
if not os.path.exists(syndic_cache_path):
path_name = os.path.split(syndic_cache_path)[0]
if not os.path.exists(path_name):
os.makedirs(path_name)
with salt.utils.fopen(syndic_cache_path, 'w') as wfh:
wfh.write('')
# Format individual return loads
for key, item in six.iteritems(load['return']):
ret = {'jid': load['jid'],
'id': key,
'return': item}
if 'master_id' in load:
ret['master_id'] = load['master_id']
if 'fun' in load:
ret['fun'] = load['fun']
if 'arg' in load:
ret['fun_args'] = load['arg']
if 'out' in load:
ret['out'] = load['out']
self._return(ret)
def minion_runner(self, clear_load):
'''
Execute a runner from a minion, return the runner's function data
:param dict clear_load: The minion payload
:rtype: dict
:return: The runner function data
'''
load = self.__verify_load(clear_load, ('fun', 'arg', 'id', 'tok'))
if load is False:
return {}
else:
return self.masterapi.minion_runner(clear_load)
def pub_ret(self, load):
'''
Request the return data from a specific jid, only allowed
if the requesting minion also initialted the execution.
:param dict load: The minion payload
:rtype: dict
:return: Return data corresponding to a given JID
'''
load = self.__verify_load(load, ('jid', 'id', 'tok'))
if load is False:
return {}
# Check that this minion can access this data
auth_cache = os.path.join(
self.opts['cachedir'],
'publish_auth')
if not os.path.isdir(auth_cache):
os.makedirs(auth_cache)
jid_fn = os.path.join(auth_cache, str(load['jid']))
with salt.utils.fopen(jid_fn, 'r') as fp_:
if not load['id'] == fp_.read():
return {}
# Grab the latest and return
return self.local.get_cache_returns(load['jid'])
def minion_pub(self, clear_load):
'''
Publish a command initiated from a minion, this method executes minion
restrictions so that the minion publication will only work if it is
enabled in the config.
The configuration on the master allows minions to be matched to
salt functions, so the minions can only publish allowed salt functions
The config will look like this:
.. code-block:: bash
peer:
.*:
- .*
This configuration will enable all minions to execute all commands:
.. code-block:: bash
peer:
foo.example.com:
- test.*
The above configuration will only allow the minion foo.example.com to
execute commands from the test module.
:param dict clear_load: The minion pay
'''
if not self.__verify_minion_publish(clear_load):
return {}
else:
return self.masterapi.minion_pub(clear_load)
def minion_publish(self, clear_load):
'''
Publish a command initiated from a minion, this method executes minion
restrictions so that the minion publication will only work if it is
enabled in the config.
The configuration on the master allows minions to be matched to
salt functions, so the minions can only publish allowed salt functions
The config will look like this:
.. code-block:: bash
peer:
.*:
- .*
This configuration will enable all minions to execute all commands.
peer:
.. code-block:: bash
foo.example.com:
- test.*
The above configuration will only allow the minion foo.example.com to
execute commands from the test module.
:param dict clear_load: The minion payload
'''
if not self.__verify_minion_publish(clear_load):
return {}
else:
return self.masterapi.minion_publish(clear_load)
def revoke_auth(self, load):
'''
Allow a minion to request revocation of its own key
:param dict load: The minion payload
:rtype: dict
:return: If the load is invalid, it may be returned. No key operation is performed.
:rtype: bool
:return: True if key was revoked, False if not
'''
load = self.__verify_load(load, ('id', 'tok'))
if load is False:
return load
else:
return self.masterapi.revoke_auth(load)
def run_func(self, func, load):
'''
Wrapper for running functions executed with AES encryption
:param function func: The function to run
:return: The result of the master function that was called
'''
# Don't honor private functions
if func.startswith('__'):
# TODO: return some error? Seems odd to return {}
return {}, {'fun': 'send'}
# Run the func
if hasattr(self, func):
try:
start = time.time()
ret = getattr(self, func)(load)
log.trace(
'Master function call {0} took {1} seconds'.format(
func, time.time() - start
)
)
except Exception:
ret = ''
log.error(
'Error in function {0}:\n'.format(func),
exc_info=True
)
else:
log.error(
'Received function {0} which is unavailable on the master, '
'returning False'.format(
func
)
)
return False, {'fun': 'send'}
# Don't encrypt the return value for the _return func
# (we don't care about the return value, so why encrypt it?)
if func == '_return':
return ret, {'fun': 'send'}
if func == '_pillar' and 'id' in load:
if load.get('ver') != '2' and self.opts['pillar_version'] == 1:
# Authorized to return old pillar proto
return ret, {'fun': 'send'}
return ret, {'fun': 'send_private', 'key': 'pillar', 'tgt': load['id']}
# Encrypt the return
return ret, {'fun': 'send'}
class ClearFuncs(object):
'''
Set up functions that are safe to execute when commands sent to the master
without encryption and authentication
'''
# The ClearFuncs object encapsulates the functions that can be executed in
# the clear:
# publish (The publish from the LocalClient)
# _auth
def __init__(self, opts, key):
self.opts = opts
self.key = key
# Create the event manager
self.event = salt.utils.event.get_master_event(self.opts, self.opts['sock_dir'], listen=False)
# Make a client
self.local = salt.client.get_local_client(self.opts['conf_file'])
# Make an minion checker object
self.ckminions = salt.utils.minions.CkMinions(opts)
# Make an Auth object
self.loadauth = salt.auth.LoadAuth(opts)
# Stand up the master Minion to access returner data
self.mminion = salt.minion.MasterMinion(
self.opts,
states=False,
rend=False,
ignore_config_errors=True
)
# Make a wheel object
self.wheel_ = salt.wheel.Wheel(opts)
# Make a masterapi object
self.masterapi = salt.daemons.masterapi.LocalFuncs(opts, key)
def process_token(self, tok, fun, auth_type):
'''
Process a token and determine if a command is authorized
'''
try:
token = self.loadauth.get_tok(tok)
except Exception as exc:
msg = 'Exception occurred when generating auth token: {0}'.format(
exc)
log.error(msg)
return dict(error=dict(name='TokenAuthenticationError',
message=msg))
if not token:
msg = 'Authentication failure of type "token" occurred.'
log.warning(msg)
return dict(error=dict(name='TokenAuthenticationError',
message=msg))
if token['eauth'] not in self.opts['external_auth']:
msg = 'Authentication failure of type "token" occurred.'
log.warning(msg)
return dict(error=dict(name='TokenAuthenticationError',
message=msg))
check_fun = getattr(self.ckminions,
'{auth}_check'.format(auth=auth_type))
if token['name'] in self.opts['external_auth'][token['eauth']]:
good = check_fun(self.opts['external_auth'][token['eauth']][token['name']], fun)
elif any(key.endswith('%') for key in self.opts['external_auth'][token['eauth']]):
for group in self.opts['external_auth'][token['eauth']]:
if group.endswith('%'):
for group in self.opts['external_auth'][token['eauth']]:
good = check_fun(self.opts['external_auth'][token['eauth']][group], fun)
if good:
break
else:
good = check_fun(self.opts['external_auth'][token['eauth']]['*'], fun)
if not good:
msg = ('Authentication failure of type "token" occurred for '
'user {0}.').format(token['name'])
log.warning(msg)
return dict(error=dict(name='TokenAuthenticationError',
message=msg))
return None
def process_eauth(self, clear_load, auth_type):
'''
Process a clear load to determine eauth perms
Any return other than None is an eauth failure
'''
if 'eauth' not in clear_load:
msg = ('Authentication failure of type "eauth" occurred for '
'user {0}.').format(clear_load.get('username', 'UNKNOWN'))
log.warning(msg)
return dict(error=dict(name='EauthAuthenticationError',
message=msg))
if clear_load['eauth'] not in self.opts['external_auth']:
# The eauth system is not enabled, fail
msg = ('Authentication failure of type "eauth" occurred for '
'user {0}.').format(clear_load.get('username', 'UNKNOWN'))
log.warning(msg)
return dict(error=dict(name='EauthAuthenticationError',
message=msg))
name = self.loadauth.load_name(clear_load)
if not ((name in self.opts['external_auth'][clear_load['eauth']]) |
('*' in self.opts['external_auth'][clear_load['eauth']])):
msg = ('Authentication failure of type "eauth" occurred for '
'user {0}.').format(clear_load.get('username', 'UNKNOWN'))
log.warning(msg)
return dict(error=dict(name='EauthAuthenticationError',
message=msg))
if not self.loadauth.time_auth(clear_load):
msg = ('Authentication failure of type "eauth" occurred for '
'user {0}.').format(clear_load.get('username', 'UNKNOWN'))
log.warning(msg)
return dict(error=dict(name='EauthAuthenticationError',
message=msg))
check_fun = getattr(self.ckminions,
'{auth}_check'.format(auth=auth_type))
if name in self.opts['external_auth'][clear_load['eauth']]:
good = check_fun(self.opts['external_auth'][clear_load['eauth']][name], clear_load['fun'])
elif any(key.endswith('%') for key in self.opts['external_auth'][clear_load['eauth']]):
for group in self.opts['external_auth'][clear_load['eauth']]:
if group.endswith('%'):
good = check_fun(self.opts['external_auth'][clear_load['eauth']][group], clear_load['fun'])
if good:
break
else:
good = check_fun(self.opts['external_auth'][clear_load['eauth']]['*'], clear_load['fun'])
if not good:
msg = ('Authentication failure of type "eauth" occurred for '
'user {0}.').format(clear_load.get('username', 'UNKNOWN'))
log.warning(msg)
return dict(error=dict(name='EauthAuthenticationError',
message=msg))
return None
def runner(self, clear_load):
'''
Send a master control function back to the runner system
'''
# All runner ops pass through eauth
if 'token' in clear_load:
auth_error = self.process_token(clear_load['token'],
clear_load['fun'],
'runner')
if auth_error:
return auth_error
else:
token = self.loadauth.get_tok(clear_load.pop('token'))
try:
fun = clear_load.pop('fun')
runner_client = salt.runner.RunnerClient(self.opts)
return runner_client.async(
fun,
clear_load.get('kwarg', {}),
token['name'])
except Exception as exc:
log.error('Exception occurred while '
'introspecting {0}: {1}'.format(fun, exc))
return dict(error=dict(name=exc.__class__.__name__,
args=exc.args,
message=str(exc)))
try:
eauth_error = self.process_eauth(clear_load, 'runner')
if eauth_error:
return eauth_error
# No error occurred, consume the password from the clear_load if
# passed
clear_load.pop('password', None)
try:
fun = clear_load.pop('fun')
runner_client = salt.runner.RunnerClient(self.opts)
return runner_client.async(fun,
clear_load.get('kwarg', {}),
clear_load.pop('username', 'UNKNOWN'))
except Exception as exc:
log.error('Exception occurred while '
'introspecting {0}: {1}'.format(fun, exc))
return dict(error=dict(name=exc.__class__.__name__,
args=exc.args,
message=str(exc)))
except Exception as exc:
log.error(
'Exception occurred in the runner system: {0}'.format(exc)
)
return dict(error=dict(name=exc.__class__.__name__,
args=exc.args,
message=str(exc)))
def wheel(self, clear_load):
'''
Send a master control function back to the wheel system
'''
# All wheel ops pass through eauth
if 'token' in clear_load:
auth_error = self.process_token(clear_load['token'],
clear_load['fun'],
'wheel')
if auth_error:
return auth_error
else:
token = self.loadauth.get_tok(clear_load.pop('token'))
jid = salt.utils.jid.gen_jid()
fun = clear_load.pop('fun')
tag = tagify(jid, prefix='wheel')
data = {'fun': "wheel.{0}".format(fun),
'jid': jid,
'tag': tag,
'user': token['name']}
try:
self.event.fire_event(data, tagify([jid, 'new'], 'wheel'))
ret = self.wheel_.call_func(fun, **clear_load)
data['return'] = ret
data['success'] = True
self.event.fire_event(data, tagify([jid, 'ret'], 'wheel'))
return {'tag': tag,
'data': data}
except Exception as exc:
log.error(exc)
log.error('Exception occurred while '
'introspecting {0}: {1}'.format(fun, exc))
data['return'] = 'Exception occurred in wheel {0}: {1}: {2}'.format(
fun,
exc.__class__.__name__,
exc,
)
data['success'] = False
self.event.fire_event(data, tagify([jid, 'ret'], 'wheel'))
return {'tag': tag,
'data': data}
try:
eauth_error = self.process_eauth(clear_load, 'wheel')
if eauth_error:
return eauth_error
# No error occurred, consume the password from the clear_load if
# passed
clear_load.pop('password', None)
jid = salt.utils.jid.gen_jid()
fun = clear_load.pop('fun')
tag = tagify(jid, prefix='wheel')
data = {'fun': "wheel.{0}".format(fun),
'jid': jid,
'tag': tag,
'user': clear_load.pop('username', 'UNKNOWN')}
try:
self.event.fire_event(data, tagify([jid, 'new'], 'wheel'))
ret = self.wheel_.call_func(fun, **clear_load)
data['return'] = ret
data['success'] = True
self.event.fire_event(data, tagify([jid, 'ret'], 'wheel'))
return {'tag': tag,
'data': data}
except Exception as exc:
log.error('Exception occurred while '
'introspecting {0}: {1}'.format(fun, exc))
data['return'] = 'Exception occurred in wheel {0}: {1}: {2}'.format(
fun,
exc.__class__.__name__,
exc,
)
self.event.fire_event(data, tagify([jid, 'ret'], 'wheel'))
return {'tag': tag,
'data': data}
except Exception as exc:
log.error(
'Exception occurred in the wheel system: {0}'.format(exc)
)
return dict(error=dict(name=exc.__class__.__name__,
args=exc.args,
message=str(exc)))
def mk_token(self, clear_load):
'''
Create and return an authentication token, the clear load needs to
contain the eauth key and the needed authentication creds.
'''
if 'eauth' not in clear_load:
log.warning('Authentication failure of type "eauth" occurred.')
return ''
if clear_load['eauth'] not in self.opts['external_auth']:
# The eauth system is not enabled, fail
log.warning('Authentication failure of type "eauth" occurred.')
return ''
try:
name = self.loadauth.load_name(clear_load)
groups = self.loadauth.get_groups(clear_load)
eauth_config = self.opts['external_auth'][clear_load['eauth']]
if '*' not in eauth_config and name not in eauth_config:
found = False
for group in groups:
if "{0}%".format(group) in eauth_config:
found = True
break
if not found:
log.warning('Authentication failure of type "eauth" occurred.')
return ''
clear_load['groups'] = groups
token = self.loadauth.mk_token(clear_load)
if not token:
log.warning('Authentication failure of type "eauth" occurred.')
return ''
else:
return token
except Exception as exc:
type_, value_, traceback_ = sys.exc_info()
log.error(
'Exception occurred while authenticating: {0}'.format(exc)
)
log.error(traceback.format_exception(type_, value_, traceback_))
return ''
def get_token(self, clear_load):
'''
Return the name associated with a token or False if the token is invalid
'''
if 'token' not in clear_load:
return False
return self.loadauth.get_tok(clear_load['token'])
def publish(self, clear_load):
'''
This method sends out publications to the minions, it can only be used
by the LocalClient.
'''
extra = clear_load.get('kwargs', {})
if self.opts['client_acl'] or self.opts['client_acl_blacklist']:
salt.utils.warn_until(
'Nitrogen',
'ACL rules should be configured with \'publisher_acl\' and '
'\'publisher_acl_blacklist\' not \'client_acl\' and \'client_acl_blacklist\'. '
'This functionality will be removed in Salt Nitrogen.'
)
publisher_acl = salt.acl.PublisherACL(
self.opts['publisher_acl_blacklist'] or self.opts['client_acl_blacklist'])
if publisher_acl.user_is_blacklisted(clear_load['user']) or \
publisher_acl.cmd_is_blacklisted(clear_load['fun']):
log.error(
'{user} does not have permissions to run {function}. Please '
'contact your local administrator if you believe this is in '
'error.\n'.format(
user=clear_load['user'],
function=clear_load['fun']
)
)
return ''
# Check for external auth calls
if extra.get('token', False):
# A token was passed, check it
try:
token = self.loadauth.get_tok(extra['token'])
except Exception as exc:
log.error(
'Exception occurred when generating auth token: {0}'.format(
exc
)
)
return ''
# Bail if the token is empty or if the eauth type specified is not allowed
if not token or token['eauth'] not in self.opts['external_auth']:
log.warning('Authentication failure of type "token" occurred.')
return ''
# Fetch eauth config and collect users and groups configured for access
eauth_config = self.opts['external_auth'][token['eauth']]
eauth_users = []
eauth_groups = []
for entry in eauth_config:
if entry.endswith('%'):
eauth_groups.append(entry.rstrip('%'))
else:
eauth_users.append(entry)
# If there are groups in the token, check if any of them are listed in the eauth config
group_auth_match = False
try:
if token.get('groups'):
for group in token['groups']:
if group in eauth_groups:
group_auth_match = True
break
except KeyError:
pass
if '*' not in eauth_users and token['name'] not in eauth_users \
and not group_auth_match:
log.warning('Authentication failure of type "token" occurred.')
return ''
# Compile list of authorized actions for the user
auth_list = []
# Add permissions for '*' or user-specific to the auth list
for user_key in ('*', token['name']):
auth_list.extend(eauth_config.get(user_key, []))
# Add any add'l permissions allowed by group membership
if group_auth_match:
auth_list = self.ckminions.fill_auth_list_from_groups(eauth_config, token['groups'], auth_list)
auth_list = self.ckminions.fill_auth_list_from_ou(auth_list, self.opts)
log.trace("Compiled auth_list: {0}".format(auth_list))
good = self.ckminions.auth_check(
auth_list,
clear_load['fun'],
clear_load['arg'],
clear_load['tgt'],
clear_load.get('tgt_type', 'glob'))
if not good:
# Accept find_job so the CLI will function cleanly
if clear_load['fun'] != 'saltutil.find_job':
log.warning(
'Authentication failure of type "token" occurred.'
)
return ''
clear_load['user'] = token['name']
log.debug('Minion tokenized user = "{0}"'.format(clear_load['user']))
elif 'eauth' in extra:
if extra['eauth'] not in self.opts['external_auth']:
# The eauth system is not enabled, fail
log.warning(
'Authentication failure of type "eauth" occurred.'
)
return ''
try:
name = self.loadauth.load_name(extra) # The username we are attempting to auth with
groups = self.loadauth.get_groups(extra) # The groups this user belongs to
if groups is None:
groups = []
group_perm_keys = [item for item in self.opts['external_auth'][extra['eauth']] if item.endswith('%')] # The configured auth groups
# First we need to know if the user is allowed to proceed via any of their group memberships.
group_auth_match = False
for group_config in group_perm_keys:
group_config = group_config.rstrip('%')
for group in groups:
if group == group_config:
group_auth_match = True
# If a group_auth_match is set it means only that we have a
# user which matches at least one or more of the groups defined
# in the configuration file.
external_auth_in_db = False
for entry in self.opts['external_auth'][extra['eauth']]:
if entry.startswith('^'):
external_auth_in_db = True
break
# If neither a catchall, a named membership or a group
# membership is found, there is no need to continue. Simply
# deny the user access.
if not ((name in self.opts['external_auth'][extra['eauth']]) |
('*' in self.opts['external_auth'][extra['eauth']]) |
group_auth_match | external_auth_in_db):
# A group def is defined and the user is a member
#[group for groups in ['external_auth'][extra['eauth']]]):
# Auth successful, but no matching user found in config
log.warning(
'Authentication failure of type "eauth" occurred.'
)
return ''
# Perform the actual authentication. If we fail here, do not
# continue.
if not self.loadauth.time_auth(extra):
log.warning(
'Authentication failure of type "eauth" occurred.'
)
return ''
except Exception as exc:
type_, value_, traceback_ = sys.exc_info()
log.error(
'Exception occurred while authenticating: {0}'.format(exc)
)
log.error(traceback.format_exception(
type_, value_, traceback_))
return ''
# auth_list = self.opts['external_auth'][extra['eauth']][name] if name in self.opts['external_auth'][extra['eauth']] else self.opts['external_auth'][extra['eauth']]['*']
# We now have an authenticated session and it is time to determine
# what the user has access to.
auth_list = []
if '*' in self.opts['external_auth'][extra['eauth']]:
auth_list.extend(self.opts['external_auth'][extra['eauth']]['*'])
if name in self.opts['external_auth'][extra['eauth']]:
auth_list = self.opts['external_auth'][extra['eauth']][name]
if group_auth_match:
auth_list = self.ckminions.fill_auth_list_from_groups(
self.opts['external_auth'][extra['eauth']],
groups,
auth_list)
if extra['eauth'] == 'ldap':
auth_list = self.ckminions.fill_auth_list_from_ou(auth_list, self.opts)
good = self.ckminions.auth_check(
auth_list,
clear_load['fun'],
clear_load['arg'],
clear_load['tgt'],
clear_load.get('tgt_type', 'glob')
)
if not good:
# Accept find_job so the CLI will function cleanly
if clear_load['fun'] != 'saltutil.find_job':
log.warning(
'Authentication failure of type "eauth" occurred.'
)
return ''
clear_load['user'] = name
# Verify that the caller has root on master
elif 'user' in clear_load:
auth_user = salt.auth.AuthUser(clear_load['user'])
if auth_user.is_sudo():
# If someone sudos check to make sure there is no ACL's around their username
if clear_load.get('key', 'invalid') == self.key.get('root'):
clear_load.pop('key')
elif clear_load.pop('key') != self.key[self.opts.get('user', 'root')]:
log.warning(
'Authentication failure of type "user" occurred.'
)
return ''
publisher_acl = self.opts['publisher_acl'] or self.opts['client_acl']
if self.opts['sudo_acl'] and publisher_acl:
good = self.ckminions.auth_check(
publisher_acl.get(clear_load['user'].split('_', 1)[-1]),
clear_load['fun'],
clear_load['arg'],
clear_load['tgt'],
clear_load.get('tgt_type', 'glob'))
if not good:
# Accept find_job so the CLI will function cleanly
if clear_load['fun'] != 'saltutil.find_job':
log.warning(
'Authentication failure of type "user" '
'occurred.'
)
return ''
elif clear_load['user'] == self.opts.get('user', 'root') or clear_load['user'] == 'root':
if clear_load.pop('key') != self.key[self.opts.get('user', 'root')]:
log.warning(
'Authentication failure of type "user" occurred.'
)
return ''
elif auth_user.is_running_user():
if clear_load.pop('key') != self.key.get(clear_load['user']):
log.warning(
'Authentication failure of type "user" occurred.'
)
return ''
elif clear_load.get('key', 'invalid') == self.key.get('root'):
clear_load.pop('key')
else:
if clear_load['user'] in self.key:
# User is authorised, check key and check perms
if clear_load.pop('key') != self.key[clear_load['user']]:
log.warning(
'Authentication failure of type "user" occurred.'
)
return ''
acl = self.opts['publisher_acl'] or self.opts['client_acl']
if clear_load['user'] not in acl:
log.warning(
'Authentication failure of type "user" occurred.'
)
return ''
good = self.ckminions.auth_check(
acl[clear_load['user']],
clear_load['fun'],
clear_load['arg'],
clear_load['tgt'],
clear_load.get('tgt_type', 'glob'))
if not good:
# Accept find_job so the CLI will function cleanly
if clear_load['fun'] != 'saltutil.find_job':
log.warning(
'Authentication failure of type "user" '
'occurred.'
)
return ''
else:
log.warning(
'Authentication failure of type "user" occurred.'
)
return ''
else:
if clear_load.pop('key') != self.key[salt.utils.get_user()]:
log.warning(
'Authentication failure of type "other" occurred.'
)
return ''
# FIXME Needs additional refactoring
# Retrieve the minions list
delimiter = clear_load.get('kwargs', {}).get('delimiter', DEFAULT_TARGET_DELIM)
minions = self.ckminions.check_minions(
clear_load['tgt'],
clear_load.get('tgt_type', 'glob'),
delimiter
)
# If we order masters (via a syndic), don't short circuit if no minions
# are found
if not self.opts.get('order_masters'):
# Check for no minions
if not minions:
return {
'enc': 'clear',
'load': {
'jid': None,
'minions': minions
}
}
jid = self._prep_jid(clear_load, extra)
if jid is None:
return {}
payload = self._prep_pub(minions, jid, clear_load, extra)
# Send it!
self._send_pub(payload)
return {
'enc': 'clear',
'load': {
'jid': clear_load['jid'],
'minions': minions
}
}
def _prep_jid(self, clear_load, extra):
'''
Return a jid for this publication
'''
# the jid in clear_load can be None, '', or something else. this is an
# attempt to clean up the value before passing to plugins
passed_jid = clear_load['jid'] if clear_load.get('jid') else None
nocache = extra.get('nocache', False)
# Retrieve the jid
fstr = '{0}.prep_jid'.format(self.opts['master_job_cache'])
try:
# Retrieve the jid
jid = self.mminion.returners[fstr](nocache=nocache,
passed_jid=passed_jid)
except (KeyError, TypeError):
# The returner is not present
msg = (
'Failed to allocate a jid. The requested returner \'{0}\' '
'could not be loaded.'.format(fstr.split('.')[0])
)
log.error(msg)
return {'error': msg}
return jid
def _send_pub(self, load):
'''
Take a load and send it across the network to connected minions
'''
for transport, opts in iter_transport_opts(self.opts):
chan = salt.transport.server.PubServerChannel.factory(opts)
chan.publish(load)
def _prep_pub(self, minions, jid, clear_load, extra):
'''
Take a given load and perform the necessary steps
to prepare a publication.
TODO: This is really only bound by temporal cohesion
and thus should be refactored even further.
'''
clear_load['jid'] = jid
delimiter = clear_load.get('kwargs', {}).get('delimiter', DEFAULT_TARGET_DELIM)
# TODO Error reporting over the master event bus
self.event.fire_event({'minions': minions}, clear_load['jid'])
new_job_load = {
'jid': clear_load['jid'],
'tgt_type': clear_load['tgt_type'],
'tgt': clear_load['tgt'],
'user': clear_load['user'],
'fun': clear_load['fun'],
'arg': clear_load['arg'],
'minions': minions,
}
# Announce the job on the event bus
self.event.fire_event(new_job_load, tagify([clear_load['jid'], 'new'], 'job'))
if self.opts['ext_job_cache']:
fstr = '{0}.save_load'.format(self.opts['ext_job_cache'])
save_load_func = True
# Get the returner's save_load arg_spec.
try:
arg_spec = salt.utils.args.get_function_argspec(fstr)
# Check if 'minions' is included in returner's save_load arg_spec.
# This may be missing in custom returners, which we should warn about.
if 'minions' not in arg_spec.args:
log.critical(
'The specified returner used for the external job cache '
'\'{0}\' does not have a \'minions\' kwarg in the returner\'s '
'save_load function.'.format(
self.opts['ext_job_cache']
)
)
except AttributeError:
save_load_func = False
log.critical(
'The specified returner used for the external job cache '
'"{0}" does not have a save_load function!'.format(
self.opts['ext_job_cache']
)
)
if save_load_func:
try:
self.mminion.returners[fstr](clear_load['jid'], clear_load, minions=minions)
except Exception:
log.critical(
'The specified returner threw a stack trace:\n',
exc_info=True
)
# always write out to the master job caches
try:
fstr = '{0}.save_load'.format(self.opts['master_job_cache'])
self.mminion.returners[fstr](clear_load['jid'], clear_load, minions)
except KeyError:
log.critical(
'The specified returner used for the master job cache '
'"{0}" does not have a save_load function!'.format(
self.opts['master_job_cache']
)
)
except Exception:
log.critical(
'The specified returner threw a stack trace:\n',
exc_info=True
)
# Set up the payload
payload = {'enc': 'aes'}
# Altering the contents of the publish load is serious!! Changes here
# break compatibility with minion/master versions and even tiny
# additions can have serious implications on the performance of the
# publish commands.
#
# In short, check with Thomas Hatch before you even think about
# touching this stuff, we can probably do what you want to do another
# way that won't have a negative impact.
load = {
'fun': clear_load['fun'],
'arg': clear_load['arg'],
'tgt': clear_load['tgt'],
'jid': clear_load['jid'],
'ret': clear_load['ret'],
}
# if you specified a master id, lets put that in the load
if 'master_id' in self.opts:
load['master_id'] = self.opts['master_id']
# if someone passed us one, use that
if 'master_id' in extra:
load['master_id'] = extra['master_id']
# Only add the delimiter to the pub data if it is non-default
if delimiter != DEFAULT_TARGET_DELIM:
load['delimiter'] = delimiter
if 'id' in extra:
load['id'] = extra['id']
if 'tgt_type' in clear_load:
load['tgt_type'] = clear_load['tgt_type']
if 'to' in clear_load:
load['to'] = clear_load['to']
if 'kwargs' in clear_load:
if 'ret_config' in clear_load['kwargs']:
load['ret_config'] = clear_load['kwargs'].get('ret_config')
if 'metadata' in clear_load['kwargs']:
load['metadata'] = clear_load['kwargs'].get('metadata')
if 'module_executors' in clear_load['kwargs']:
load['module_executors'] = clear_load['kwargs'].get('module_executors')
if 'ret_kwargs' in clear_load['kwargs']:
load['ret_kwargs'] = clear_load['kwargs'].get('ret_kwargs')
if 'user' in clear_load:
log.info(
'User {user} Published command {fun} with jid {jid}'.format(
**clear_load
)
)
load['user'] = clear_load['user']
else:
log.info(
'Published command {fun} with jid {jid}'.format(
**clear_load
)
)
log.debug('Published command details {0}'.format(load))
return load
def ping(self, clear_load):
'''
Send the load back to the sender.
'''
return clear_load
class FloMWorker(MWorker):
'''
Change the run and bind to be ioflo friendly
'''
def __init__(self,
opts,
key,
):
MWorker.__init__(self, opts, key)
def setup(self):
'''
Prepare the needed objects and socket for iteration within ioflo
'''
salt.utils.appendproctitle(self.__class__.__name__)
self.clear_funcs = salt.master.ClearFuncs(
self.opts,
self.key,
)
self.aes_funcs = salt.master.AESFuncs(self.opts)
self.context = zmq.Context(1)
self.socket = self.context.socket(zmq.REP)
if self.opts.get('ipc_mode', '') == 'tcp':
self.w_uri = 'tcp://127.0.0.1:{0}'.format(
self.opts.get('tcp_master_workers', 4515)
)
else:
self.w_uri = 'ipc://{0}'.format(
os.path.join(self.opts['sock_dir'], 'workers.ipc')
)
log.info('ZMQ Worker binding to socket {0}'.format(self.w_uri))
self.poller = zmq.Poller()
self.poller.register(self.socket, zmq.POLLIN)
self.socket.connect(self.w_uri)
def handle_request(self):
'''
Handle a single request
'''
try:
polled = self.poller.poll(1)
if polled:
package = self.socket.recv()
self._update_aes()
payload = self.serial.loads(package)
ret = self.serial.dumps(self._handle_payload(payload))
self.socket.send(ret)
except KeyboardInterrupt:
raise
except Exception as exc:
# Properly handle EINTR from SIGUSR1
if isinstance(exc, zmq.ZMQError) and exc.errno == errno.EINTR:
return
| apache-2.0 | -931,202,925,453,625,500 | 37.745082 | 180 | 0.525334 | false |
DavideMassidda/Simon | main.py | 1 | 6607 | # -*- coding: utf-8 -*-
from __future__ import division
from psychopy import visual, core, event
from settings import * # Carica la maschera per le impostazioni
# --------------------------------------
# Impostazioni esprimento
# --------------------------------------
buttons = ['l','a'] # Pulsanti di risposta (L:Dx,A:Sx)
subjCode = SubjCode.get() # Codice numerico del soggetto
seq = FirstSeq.get() # Codice sequenza di start
isTrain = IsTrain.get() # Indica se registrare le risposte
ISI = Isi.get() # Intervallo inter-stimolo (sec)
# --------------------------------------
# Impostazioni monitor
# --------------------------------------
screenSize = (MonitorResW.get(),MonitorResH.get()) # Risoluzione (w,h)
fullscreen = FS.get() # Schermo intero si/no
screenColor = ScreenCol.get() # Colore finestra
stimColor = StimCol.get() # Colore stimolo
# --------------------------------------
# Organizzazione stimoli
# --------------------------------------
# Fattori sperimentali:
# Stim: stimolo (Cerchio vs Quadrato)
# Side: lato di presentazione (Dx vs Sx)
if isTrain == 0: # Non e' una sessione di prova
Stim = [
[0, 0, 1, 1],
[0, 0, 1, 1],
[1, 1, 0, 0],
[1, 1, 0, 0],
[0, 1, 0, 1],
[1, 0, 1, 0],
[0, 1, 0, 1],
[1, 0, 1, 0]]
Side = [
[0, 1, 0, 1],
[1, 0, 1, 0],
[0, 1, 0, 1],
[1, 0, 1, 0],
[0, 0, 1, 1],
[0, 0, 1, 1],
[1, 1, 0, 0],
[1, 1, 0, 0]]
else: # E' una sessione di prova
seq = 1 # Si parte comunque dalla prima sequenza
Stim = [
[0, 1, 0, 1],
[0, 1, 0, 1]]
Side = [
[1, 0, 0, 1],
[0, 1, 1, 0]]
numStim = len(Stim) # Numero di stimoli da presentare
# --------------------------------------
# Costruzione finestra
# --------------------------------------
screen = visual.Window(size=screenSize,units="pix",fullscr=fullscreen,color=screenColor) # Finestra
screen.setMouseVisible(False) # Mouse
fixation = visual.TextStim(screen,text="+",color=stimColor,height=25,pos=(0,0)) # Punto di fissazione
screen.flip() # Avvio interfaccia
# --------------------------------------
# Istruzioni
# --------------------------------------
textWrapper = [0,0] # Dimensioni del contenitore di testo
textWrapper[0] = round(screenSize[0]*0.75)
textWrapper[1] = round(screenSize[1]*0.1)
InstrFile = open('instructions.txt','r')
instrText = InstrFile.read()
InstrFile.close()
instrStim = visual.TextStim(screen,text=instrText,color=stimColor,height=25,pos=(0,textWrapper[1]),wrapWidth=textWrapper[0])
instrStim.draw()
screen.flip()
event.waitKeys(keyList='space') # Attende che venga premuta la barra spaziatrice
# --------------------------------------
# Inizio esperimento
# --------------------------------------
alertText = "Poni l'indice sinistro sul pulsante " + buttons[1].upper() + " e l'indice destro sul pulsante " + buttons[0].upper() + ".\n\n"
alertText = alertText + "La prova comincera\' tra pochi secondi."
alertStim = visual.TextStim(screen,text=alertText,color=stimColor,height=30,pos=(0,textWrapper[1]),wrapWidth=textWrapper[0])
alertStim.draw()
screen.flip()
core.wait(7)
# --------------------------------------
# Avvio sessione
# --------------------------------------
screen.flip() # Avvia uno schermo pulito
count = 0 # Conta quante sequenze sono state presentate
seq = seq-1 # Perche' il conteggio degli indici parte da 0 e non da 1
timer = core.Clock() # Timer per tempi di reazione
while count < numStim: # Inizia la presentazione degli stimoli
count = count+1
for i in range(4):
fixation.draw() # Disegna il punto di fissazione
screen.flip() # Riavvia la finestra
fixation.draw() # (Ripetizione per bug Windows)
screen.flip() # (Ripetizione per bug Windows)
core.wait(ISI) # Blocca l'esecuzione per ISI
event.clearEvents() # Ripulisce tutte le precedenti registrazioni di pulsanti
fixation.draw() # Disegna il punto di fissazione
if Stim[seq][i] == 0: # Cerchio
exactKey = buttons[0] # Pulsante di risposta corretto
if Side[seq][i] == 0:
# Cerchio a sinistra
visual.Circle(screen,radius=80,pos=(-300,0),fillColor=stimColor,lineColor=stimColor,edges=256).draw() # interpolate=True
else:
# Cerchio a destra
visual.Circle(screen,radius=80,pos=(300,0),fillColor=stimColor,lineColor=stimColor,edges=256).draw() # interpolate=True
else: # Quadrato
exactKey = buttons[1] # Pulsante di risposta corretto
if Side[seq][i] == 0:
# Quadrato a sinistra
visual.Rect(screen,width=160,height=160,pos=(-300,0),fillColor=stimColor,lineColor=stimColor).draw()
else:
# Quadrato a destra
visual.Rect(screen,width=160,height=160,pos=(300,0),fillColor=stimColor,lineColor=stimColor).draw()
screen.flip() # Riavvia la finestra
timer.reset() # Azzera il timer
t0 = timer.getTime() # Avvia il timer
respKey = event.waitKeys(keyList=buttons) # Attende la risposta
t1 = timer.getTime() # Blocca il timer
if isTrain == 0: # Se si deve registrare la risposta
respTime = (t1-t0)*1000 # Calcolo tempi di reazione
respTime = str(respTime) # Trasformazione variabile RT in stringa
respTime = respTime.replace('.',',') # Sostituzione punti in virgoole nel decimale per csv2
score = respKey[0] == exactKey # Calcolo score
if score == True:
score = 1
else:
score = 0
# Registrazione dati su file
dataFile = open('data/expdata.csv','a')
dataFile.write("%s;%s;%s;%s;%s;%s\n"%(subjCode,Stim[seq][i]+1,Side[seq][i]+1,count,score,respTime))
dataFile.close()
# Si aggiorna il valore di seq
seq = seq+1
if seq == numStim:
seq = 0
# --------------------------------------
# Conclusione esperimento
# --------------------------------------
fixation.draw()
screen.flip()
core.wait(1)
alertText = "Prova conclusa\n\nGrazie per la partecipazione"
alertStim = visual.TextStim(screen,text=alertText,color=stimColor,height=30,pos=(0,200),wrapWidth=800)
alertStim.draw()
screen.flip()
core.wait(3)
screen.close()
core.quit()
| gpl-3.0 | 9,154,638,935,427,574,000 | 38.789157 | 139 | 0.547918 | false |
pztrn/spicecm-prototype | src/lib/spice_server_info.py | 1 | 1045 | # -*- coding: utf8 -*-
# Construct server info messages
greeting_text = \
"""<h1>Welcome to SpiceCM!</h1>
SpiceCM allows you to manage your SPICE VDI connections.<br />
<br />
SpiceCM licensed under Terms and Conditions of GNU General
Public License version 3 or any higher version.<br />
<br />
<a href="http://spice-space.org/">What is SPICE?</a>
<h2>Participating in development of SpiceCM</h2>
Bugreports, ideas and suggestion you can send to
<a href="https://dev.pztrn.name/index.php?project=5&do=index&switch=1">
SpiceCM bugtracker</a>
"""
# Server info template
server_info = \
"""<h2>{0}</h2>
<b>Address:</b>{1}<br />
<b>Port:</b> {2}<br />
<h2>Graphics</h2>
<b>Color depth:</b> {3}<br />
<b>Fullscreen:</b> {4}
"""
def greeting_message():
return greeting_text
def construct(server_dict):
if server_dict["fullscreen"] == "0":
fullscreen = "False"
else:
fullscreen = "True"
return server_info.format(server_dict["name"], server_dict["address"], server_dict["port"], server_dict["depth"], fullscreen) | gpl-3.0 | 2,086,356,318,419,998,200 | 25.820513 | 129 | 0.666029 | false |
rkk09c/SayInterview | API/resources/calc_cargo_resource.py | 1 | 3581 | from flask import request, jsonify
from flask_restful import Resource
class CalcCargo(Resource):
def __init__(self):
pass
def post(self):
in_data = request.get_json(force=True, silent=False)
vehicles_list = []
if 'total_weight' in in_data:
total_weight = in_data['total_weight']
remainder_weight = in_data['total_weight']
else:
resp = {'status_code': 500,
'message': 'Total weight not included'}
return jsonify(resp)
if 'sports_car_count' in in_data:
for count in range(in_data['sports_car_count']):
vehicles_list.append(SportsCar())
if 'family_car_count'in in_data:
for count in range(in_data['family_car_count']):
vehicles_list.append(FamilyCar())
if 'truck_count' in in_data:
for count in range(in_data['truck_count']):
vehicles_list.append(Truck())
if 'minivan_count' in in_data:
for count in range(in_data['minivan_count']):
vehicles_list.append(MiniVan())
if 'cargo_van_count' in in_data:
for count in range(in_data['cargo_van_count']):
vehicles_list.append(CargoVan())
tup = self.calc_cargo(remainder_weight, vehicles_list)
return jsonify({'status_code': 200,
'message': self.format_output(total_weight, tup[0], tup[1])
})
def calc_cargo(self, remainder_weight, vehicles_list):
for vehicle in vehicles_list:
if remainder_weight >= vehicle.weight_limit:
remainder_weight -= vehicle.weight_limit
vehicle.weight_used = vehicle.weight_limit
else:
vehicle.weight_used = remainder_weight
remainder_weight = 0
return (remainder_weight, vehicles_list)
def format_output(self, total_weight, remainder_weight, vehicles_list):
format_lst = ['allocating {0} lbs of cargo'.format(total_weight)]
for vehicle in vehicles_list:
format_lst.append('a {0} with {1} lbs'.format(vehicle.vehicle_type, vehicle.weight_used))
format_lst.append('we have {0} lbs of cargo left over'.format(remainder_weight))
print(format_lst)
return format_lst
class Vehicle(object):
"""
As this is the 'object oriented' portion of the interview assignment,
please note that this inheritance structure is to show an object oriented
paradigm of programming only, it is not particularly necessary or useful
in this case.
"""
def __init__(self):
self.vehicle_type = None
self.weight_limit = 0
self.weight_used = 0
class SportsCar(Vehicle):
def __init__(self):
super(SportsCar, self).__init__()
self.vehicle_type = 'Sports Car'
self.weight_limit = 100
class FamilyCar(Vehicle):
def __init__(self):
super(FamilyCar, self).__init__()
self.vehicle_type = 'Family Car'
self.weight_limit = 300
class Truck(Vehicle):
def __init__(self):
super(Truck, self).__init__()
self.vehicle_type = 'Truck'
self.weight_limit = 1500
class MiniVan(Vehicle):
def __init__(self):
super(MiniVan, self).__init__()
self.vehicle_type = 'Mini Van'
self.weight_limit = 200
class CargoVan(Vehicle):
def __init__(self):
super(CargoVan, self).__init__()
self.vehicle_type = 'Cargo Van'
self.weight_limit = 800
| apache-2.0 | 68,001,541,734,344,560 | 30.690265 | 101 | 0.584753 | false |
tempbottle/ghmm | HMMEd/ObjectHMM.py | 1 | 55163 | #!/usr/bin/env python
################################################################################
#
# This file is part of Gato (Graph Algorithm Toolbox)
#
# file: ObjectHMM.py
# author: Janne Grunau
#
# Copyright (C) 1998-2002, Alexander Schliep
#
# Contact: [email protected]
#
# Information: http://gato.sf.net
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the Free
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
#
#
# This file is version $Revision: 1.29 $
# from $Date: 2005/02/22 11:12:56 $
# last change by $Author: schliep $.
#
################################################################################
from Gato.ObjectGraph import *
from Gato.EditObjectAttributesDialog import *
from Gato.MapEditor import NamedCollectionEditor
from Gato import ProbEditorBasics, ProbEditorDialogs, ProbEditorContinuous
import Tkinter
import ghmmwrapper, ghmmhelper, ghmm, HMMEditor
import copy
try:
from collections import defaultdict
except:
class defaultdict(dict):
def __init__(self, default_factory=None, *a, **kw):
if (default_factory is not None and
not hasattr(default_factory, '__call__')):
raise TypeError('first argument must be callable')
dict.__init__(self, *a, **kw)
self.default_factory = default_factory
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
return self.__missing__(key)
def __missing__(self, key):
if self.default_factory is None:
raise KeyError(key)
self[key] = value = self.default_factory()
return value
def __reduce__(self):
if self.default_factory is None:
args = tuple()
else:
args = self.default_factory,
return type(self), args, None, None, self.items()
def copy(self):
return self.__copy__()
def __copy__(self):
return type(self)(self.default_factory, self)
def __deepcopy__(self, memo):
import copy
return type(self)(self.default_factory,
copy.deepcopy(self.items()))
def __repr__(self):
return ('defaultdict(%s, %s)' % (self.default_factory,
dict.__repr__(self)))
class UnknownFileTypeException(Exception):
def __init__(self,message):
self.message = message
def __str__(self):
return repr(self.message)
class UnsupportedFileException(Exception):
def __init__(self,message):
self.message = message
def __str__(self):
return repr(self.message)
class DiscreteHMMAlphabet:
def __init__(self, names = [], description = "alphabet_1"):
self.id = description
self.name = {}
self.name2code = {}
for i,name in enumerate(names):
self.name[i] = name
self.name2code[name] = i
def __str__(self):
string = str(self.id) + ": "
string += str(self.name)
return string
def size(self):
return len(self.name.keys())
def edit(self, master, name):
pass
def editDialog(self, master, hmm):
self.hmm = hmm
editor = NamedCollectionEditor(master, self)
self.hmm = None
def names(self):
return self.name.values()
def add(self, name):
key = len(self.name)
self.name[key] = name
self.name2code[name] = key
# update all states emissions
for state in self.hmm.vertices.values():
state.emission.grow()
# also update background distributions if needed
if self.hmm.modelType & ghmmwrapper.kBackgroundDistributions:
self.hmm.backgroundDistributions.grow()
def delete(self, name):
key = self.name2code[name]
for i in range(key, len(self.name)-1):
self.name[i] = self.name[i+1]
self.name2code[self.name[i]] = i
del self.name2code[name]
del self.name[len(self.name)-1]
# update all states emissions
for state in self.hmm.vertices.values():
state.emission.shrink(key)
# also update background distributions if needed
if self.hmm.modelType & ghmmwrapper.kBackgroundDistributions:
self.hmm.backgroundDistributions.shrink()
# and tie groups
if self.hmm.modelType & ghmmwrapper.kTiedEmissions:
self.hmm.tie_groups.shrink()
def GetKeys(self):
return self.name.keys()
def GetKey(self, symbol):
return self.name2code[symbol]
def GetSymbol(self, key):
return self.name[key]
def ReadCAlphabet(self, calphabet):
self.id = calphabet.description
if self.id is None:
self.id = str(calphabet.size)
for i in xrange(0, calphabet.size):
name = calphabet.getSymbol(i)
self.name[i] = name
self.name2code[name] = i
def WriteCAlphabet(self):
calphabet = ghmmwrapper.ghmm_alphabet(len(self.name), self.id )
#calphabet.description = self.id
for i in xrange(len(self.name)):
calphabet.setSymbol(i, self.name[i])
return calphabet
class DiscreteHMMBackground:
def __init__(self, eclass):
self.EmissionClass = eclass
self.nextKey = 1
self.val2pop = {0:"no background"}
self.name = {}
self.name2code = {}
self.values = {}
self.hmm = None
def size(self):
return len(self.name.keys())
def edit(self, master, name):
self.values[self.name2code[name]].edit(master, "backgound distribution \"%s\""%name)
def editDialog(self, master, hmm):
self.hmm = hmm
editor = NamedCollectionEditor(master, self)
self.hmm = None
def names(self):
return self.name.values()
def add(self, name, alphabet=None):
key = self.nextKey
self.nextKey += 1
if self.hmm is not None:
e = self.EmissionClass(self.hmm.alphabet)
elif alphabet is not None:
e = self.EmissionClass(alphabet)
else:
e = self.EmissionClass()
self.name[key] = name
self.name2code[name] = key
self.values[key] = e
self.val2pop[key] = name
def delete(self, name):
key = self.name2code[name]
del self.name[key]
del self.name2code[name]
del self.values[key]
del self.val2pop[key]
def grow(self):
for emission in self.values.values():
emission.grow()
def shrink(self):
for emission in self.values.values():
emission.shrink()
def getOrders(self):
keys = self.name.keys()
keys.sort()
return [self.values[k].order for k in keys]
def getWeights(self):
keys = self.name.keys()
keys.sort()
return [self.values[k].weights for k in keys]
def getNames(self):
keys = self.name.keys()
keys.sort()
return [self.name[k] for k in keys]
def ReadCBackground(self, alphabet, bp):
number = bp.n
M = bp.m
for i in xrange(number):
key = self.nextKey
self.nextKey += 1
e = self.EmissionClass(alphabet)
e.order = bp.getOrder(i)
e.weights = ghmmwrapper.double_array2list(bp.getWeights(i), M**(e.order+1))
name = bp.getName(i)
self.name[key] = name
self.name2code[name] = key
self.values[key] = e
self.val2pop[key] = name
class TieGroups(DiscreteHMMBackground):
def __init__(self, eclass):
self.EmissionClass = eclass
self.nextKey = 1
self.val2pop = {0:"untied"}
self.name = {}
self.name2code = {}
self.values = {}
self.hmm = None
def edit(self, master, name):
self.values[self.name2code[name]].edit(master, "Emissions of tie group \"%s\""%name)
def editEmissions(self, master, id):
self.values[id].edit(master, "Emissions of tie group \"%s\""%self.name[id])
def ReadCBackground(self, alphabet, bp):
pass
class UniformDensity(ProbEditorContinuous.box_function):
def getParameters(self):
return (self.stop, self.start, 0, ghmmwrapper.uniform)
class NormalDensity(ProbEditorContinuous.gauss_function):
def getParameters(self):
return (self.mu, self.sigma, 0, ghmmwrapper.normal)
class NormalDensityTruncRight(ProbEditorContinuous.gauss_tail_function_right):
def getParameters(self):
return (self.mu, self.sigma, self.tail, ghmmwrapper.normal_right)
class NormalDensityTruncLeft(ProbEditorContinuous.gauss_tail_function_left):
def getParameters(self):
return (self.mu, self.sigma, self.tail, ghmmwrapper.normal_left)
class Emission(object):
def __init__(self):
pass
def __str__(self):
pass
def edit(self, master):
pass
def get(self):
pass
def set(self, value):
pass
def writeParameters(self, cstate):
pass
def ReadCState(self, cstate, M):
pass
class DiscreteEmission(Emission):
def __init__(self, alphabet):
Emission.__init__(self)
self.alphabet = alphabet
if self.alphabet.size() > 0:
self.weights = [1.0 / self.alphabet.size()] * self.alphabet.size()
else:
self.weights = []
self.order = 0
def __str__(self):
return str(self.weights)
def grow(self):
s = float(self.alphabet.size()-1)
self.weights = [(x*s) for x in self.weights] + [1.0]
self.weights = [x/sum(self.weights) for x in self.weights]
def shrink(self, index):
del self.weights[index]
s = sum(self.weights)
if s > 0.0:
self.weights = [x / s for x in self.weights]
elif self.alphabet.size() > 0:
self.weights = [1.0 / self.alphabet.size()] * self.alphabet.size()
else:
self.weights = []
def edit(self, master, description):
transition_probabilities = ProbEditorBasics.ProbDict({})
for key in self.alphabet.GetKeys():
weight = self.weights[key]
label = self.alphabet.GetSymbol(key)
transition_probabilities.update({label:weight})
if transition_probabilities.sum == 0:
key_list = transition_probabilities.keys()
for key in key_list:
transition_probabilities[key]=1.0/len(key_list)
e = ProbEditorBasics.emission_data(transition_probabilities)
d = ProbEditorDialogs.emission_dialog(master, e, description)
if d.success():
# write back normalized probabilities
for label in transition_probabilities.keys():
key = self.alphabet.GetKey(label)
self.weights[key] = transition_probabilities[label]/transition_probabilities.sum
def get(self):
return self.weights
def set(self, values):
diff = self.alphabet.size() - len(values)
#print diff, values, self.alphabet.size(), self.alphabet
if diff == 0:
self.weights = values
elif diff > 0:
s = sum(values)
if s == 1.0:
values += ([0.0]*diff)
elif s < 1.0 and s >= 0.0:
values += ([(1.0-s)/diff] * diff)
else:
raise ValueError("sum")
else:
raise ValueError("wrong number of arguments")
self.weights = values
def writeParameters(self, cstate):
cstate.b = ghmmwrapper.list2double_array(self.weights)
def ReadCState(self, cstate, M):
self.weights = ghmmwrapper.double_array2list(cstate.b, M)
class DiscreteHigherOrderEmission(DiscreteEmission):
def __init__(self, alphabet, order=0):
self.alphabet = alphabet
self.order = ValidatingInt(order)
M = self.alphabet.size()
if M > 0:
self.weights = [1.0 / M] * M**(order+1)
else:
self.weights = []
def grow(self):
print "BUG! DiscreteHigherOrderEmission.grow() not implemented and should be called"
def shrink(self, index):
print "BUG! DiscreteHigherOrderEmission.shrink() not implemented and should be called"
def edit(self, master, description):
if self.order > 0:
message = "editing the emissions of higher order states is not implemented"
tkMessageBox.showwarning("HMMEd", message)
print message
else:
DiscreteEmission.edit(self, master, description)
def ChangeOrder(self, neworder):
M = self.alphabet.size()
if neworder < self.order:
self.weights = self.weights[0:M**(neworder+1)]
elif neworder > self.order:
self.weights += [1.0 / M] * (M**(neworder+1) - M**neworder)
self.order = neworder
def ReadCState(self, cstate, M):
self.order = ValidatingInt(cstate.order)
self.weights = ghmmwrapper.double_array2list(cstate.b, M**(self.order+1))
class ContinuousEmission(Emission):
def __init__(self):
Emission.__init__(self)
self.weights = [1.0]
self.plotList = []
def get(self):
raise
def set(self, value):
raise
def edit(self, master, title):
if len(self.plotList) == 0:
self.plotList.append(NormalDensity(mu=0,sigma=1,color="LightGreen"))
tmp = [copy.copy(x) for x in self.plotList]
top = Tkinter.Toplevel(master)
d = HMMEditor.ContinuousEmissionEditor(top, tmp)
d.pack(expand=1,fill=Tkinter.BOTH)
top.withdraw()
top.title(title)
top.update_idletasks()
top.deiconify()
top.wait_window(top)
if d.success():
self.plotList = d.plot_list
self.weights = [ d.dict[str(i)] for i in xrange(1,len(self.plotList)+1)]
def writeParameters(self, cstate):
cstate.M = len(self.plotList)
muL=[]; uL=[]; aL=[]; density_tL=[]; cL=[]
for d in xrange(len(self.plotList)):
(mu, u, a, density_t) = self.plotList[d].getParameters()
muL.append(mu); uL.append(u); aL.append(a); density_tL.append(density_t);
# write parameters in the state
density_array = ghmmwrapper.density_array_alloc(len(density_tL))
for (i,density) in enumerate(density_tL):
ghmmwrapper.density_array_setitem(density_array, i, density)
cstate.density = density_array
cstate.mue = ghmmwrapper.list2double_array(muL)
cstate.u = ghmmwrapper.list2double_array(uL)
cstate.a = ghmmwrapper.list2double_array(aL)
cstate.c = ghmmwrapper.list2double_array(self.weights)
# editor doesn't supports fixed mixture components
cstate.mixture_fix = ghmmwrapper.list2int_array([0] * len(self.plotList))
def ReadCState(self, cstate, M):
M = cstate.M
for i in xrange(M):
if cstate.getDensity(i) == ghmmwrapper.normal:
self.plotList.append(NormalDensity())
elif cstate.getDensity(i) == ghmmwrapper.normal_left:
self.plotList.append(NormalDensityTruncLeft())
elif cstate.getDensity(i) == ghmmwrapper.normal_right:
self.plotList.append(NormalDensityTruncRight())
elif cstate.getDensity(i) == ghmmwrapper.uniform:
self.plotList.append(UniformDensity())
class Distribution(ContinuousEmission):
def __str__(self):
return " 1.0 * " + str(self.get())
class UniformDistribution(Distribution):
def __init__(self, start=0, stop=1):
ContinuousEmission.__init__(self)
self.plotList = [UniformDensity(start, stop)]
def set(self, values):
self.weights = [1.0]
density = self.plotList[0]
density.start = float(min(values))
density.stop = float(max(values))
def get(self):
return self.plotList[0]
def getParameters(self):
return self.plotList[0].getParameters()
class GaussianDistribution(Distribution):
def __init__(self, mu=0, sigma=1):
ContinuousEmission.__init__(self)
self.plotList = [NormalDensity(mu, sigma)]
def set(self, values):
self.weights = [1.0]
density = self.plotList[0]
density.mu = float(values[0])
density.sigma = float(values[1])
def get(self):
return self.plotList[0]
def getParameters(self):
return self.plotList[0].getParameters()
class LeftTruncGaussianDistribution(Distribution):
def __init__(self, mu=0, sigma=1, trunc=0.5):
ContinuousEmission.__init__(self)
self.plotList = [NormalDensityTruncLeft(mu, sigma, trunc)]
def set(self, values):
self.weights = [1.0]
density = self.plotList[0]
density.mu = float(values[0])
density.sigma = float(values[1])
density.tail = float(values[2])
def get(self):
return self.plotList[0]
def getParameters(self):
return self.plotList[0].getParameters()
class RightTruncGaussianDistribution(Distribution):
def __init__(self, mu=0, sigma=1, trunc=0.5):
ContinuousEmission.__init__(self)
self.plotList = [NormalDensityTruncRight(mu, sigma, trunc)]
def set(self, values):
self.weights = [1.0]
density = self.plotList[0]
density.mu = float(values[0])
density.sigma = float(values[1])
density.tail = float(values[2])
def get(self):
return self.plotList[0]
def getParameters(self):
return self.plotList[0].getParameters()
class ContinuousMixtureDistribution(Distribution):
def __init__(self, parameters=[]):
ContinuousEmission.__init__(self)
self.set(parameters)
def __str__(self):
string = ""
for i,w in enumerate(self.weights):
string += (" %f * " % w) + str(self.plotList[i]) + '\n'
return string[:-1]
def set(self, values):
self.weights = []
self.plotList = []
if isinstance(values, Distribution):
values = [(values, 1.0)]
for value in values:
self.plotList.append(value[0])
if len(value) > 1:
self.weights.append(value[1])
else:
self.weights.append(-1)
weights = [w for w in self.weights if w >= 0.0]
wsum = sum(weights)
if wsum > 1.0:
factor = 1.0 / wsum
for i,weight in enumerate(self.weights):
if weight >= 0.0:
self.weights[i] *= factor
wsum = 1.0
if len(weights) < len(self.weights):
mean = (1.0-wsum) / (len(self.weights)-len(weights))
for i,weight in enumerate(self.weights):
if weight >= 0.0:
self.weights[i] = mean
def get(self):
retlist = []
for (i, w) in enumerate(self.weights):
retlist.append((self.plotList[i], w))
return retlist
class GaussianMixtureDistribution(ContinuousMixtureDistribution):
pass
class DiscretePairEmission(Emission):
pass
class State(VertexObject):
def __init__(self, emission=Emission(), hmm=None):
VertexObject.__init__(self)
self.num = -1 # continuous id (0..Order()-1) used for writing transitions
self.editableAttr = {'labeling':"Name", 'initial':"Initial Probability", 'fixed':"fixed emissions"}
self.initial = Probability()
self.Emission = emission
self.itsHMM = hmm
self.fixed = ValidatingBool(False)
def __setattr__(self, name, value):
if name is "emission":
self.Emission.set(value)
elif name is "name":
try:
if self.itsHMM is not None:
vname = self.labeling
self.itsHMM.name2id[vname] = [x for x in self.itsHMM.name2id[vname] if x != self.id]
if len(self.itsHMM.name2id[vname]) == 0:
del self.itsHMM.name2id[vname]
except:
raise "State doesn't hold pointer to its HMM, can't update the name"
self.labeling = ValidatingString(str(value))
else:
self.__dict__[name] = value
#object.__setattr__(self, name, value)
def __getattr__(self, name):
if name is "emission":
return self.Emission.get()
elif name is "name":
return self.labeling
else:
return self.__dict__[name]
#return object.__getattr__(self, name)
def __str__(self):
string = ('State %d ("%s"). Initial probability: %f\n' % (self.id, self.labeling, self.initial))
string += str(self.Emission)
return string
def update(self):
pass
def normalize(self):
# normalize outgoing transmission probabilities
weights = [e.GetWeight() for e in self.outEdges]
weights = [w for w in weights if w >= 0.0]
osum = float(sum(weights))
if osum > 1.0 or len(weights) == len(self.outEdges):
for i,edge in enumerate(self.outEdges):
w = edge.GetWeight()
if w >= 0.0:
edge.SetWeight(w/osum)
osum=1.0
if len(weights) < len(self.outEdges):
mean = (1.0-osum) / (len(self.outEdges)-len(weights))
for i,edge in enumerate(self.outEdges):
if edge.GetWeight() < 0.0:
edge.SetWeight(mean)
def editProperties(self, parent, attributes = None):
self.update()
self.desc = ('Properties of State %d (%s)' % (self.id, self.labeling))
if self.itsHMM.modelType & ghmmwrapper.kHigherOrderEmissions:
self.order = self.Emission.order
self.editableAttr['order'] = "Order"
if attributes == None:
editBox = EditObjectAttributesDialog(parent, self, self.editableAttr)
else:
editableAttr = {}
for attr in attributes:
editableAttr[attr] = self.editableAttr[attr]
editBox = EditObjectAttributesDialog(parent, self, editableAttr)
if self.itsHMM.modelType & ghmmwrapper.kHigherOrderEmissions:
self.Emission.ChangeOrder(self.order)
del self.order
del self.editableAttr['order']
def editEmissions(self, master):
self.Emission.edit(master, ('Properties of State %d (%s)' % (self.id, self.labeling)))
def WriteCState(self, cstate):
cstate.pi = self.initial
cstate.in_states = len(self.inEdges)
cstate.out_states = len(self.outEdges)
cstate.desc = str(self.labeling)
if self.embedding is not None:
cstate.xPosition = int(self.embedding.x)
cstate.yPosition = int(self.embedding.y)
else:
cstate.xPosition = 0
cstate.yPosition = 0
cstate.fix = self.fixed
self.WriteTransitions(cstate)
self.Emission.writeParameters(cstate)
def WriteTransitions(self, cstate):
inID = [edge.tail.num for edge in self.inEdges]
inA = [edge.GetEdgeWeight(0) for edge in self.inEdges]
cstate.in_id = ghmmwrapper.list2int_array(inID)
cstate.in_a = ghmmwrapper.list2double_array(inA)
outID = [edge.head.num for edge in self.outEdges]
outA = [edge.GetEdgeWeight(0) for edge in self.outEdges]
cstate.out_id = ghmmwrapper.list2int_array(outID)
cstate.out_a = ghmmwrapper.list2double_array(outA)
def ReadCState(self, cmodel, cstate, i):
self.initial = ValidatingFloat(cstate.pi)
self.fixed = ValidatingBool(cstate.fix)
self.labeling = ValidatingString(cstate.desc)
if self.itsHMM is not None:
self.itsHMM.SetEmbedding(self.id, cstate.xPosition, cstate.yPosition)
self.Emission.ReadCState(cstate, cmodel.M)
class ContinuousState(State):
def WriteTransitions(self, cstate):
inID = [edge.tail.id for edge in self.inEdges]
inA = [[edge.GetEdgeWeight(0) for edge in self.inEdges]]
cstate.in_id = ghmmwrapper.list2int_array(inID)
(mat, lens) = ghmmhelper.list2double_matrix(inA)
cstate.in_a = mat
outID = [edge.head.id for edge in self.outEdges]
outA = [[edge.GetEdgeWeight(0) for edge in self.outEdges]]
cstate.out_id = ghmmwrapper.list2int_array(outID)
(mat, lens) = ghmmhelper.list2double_matrix(outA)
cstate.out_a = mat
class SilentState(State):
def __init__(self, emission=Emission(), hmm=None, silent=ValidatingBool(False)):
State.__init__(self, emission, hmm)
self.init(silent)
def init(self, silent):
self.editableAttr['silent'] = "Silent"
self.silent = silent
def editEmissions(self, master):
if not self.silent:
State.editEmissions(self, master)
def ReadCState(self, cmodel, cstate, i):
State.ReadCState(self, cmodel, cstate, i)
self.silent = ValidatingBool(cmodel.getSilent(i))
class BackgroundState(State):
def __init__(self, emission=Emission(), hmm=None):
State.__init__(self, emission, hmm)
self.init()
def init(self):
self.background = PopupableInt()
self.background.setPopup(self.itsHMM.backgroundDistributions.val2pop)
def update(self):
if len(self.itsHMM.backgroundDistributions.names()) > 0:
self.editableAttr['background'] = "Background distribution"
self.background.setPopup(self.itsHMM.backgroundDistributions.val2pop)
def ReadCState(self, cmodel, cstate, i):
State.ReadCState(self, cmodel, cstate, i)
self.update()
self.background = PopupableInt(cmodel.getBackgroundID(i)+1)
class LabeledState(State):
def __init__(self, emission=Emission(), hmm=None):
State.__init__(self, emission, hmm)
self.init()
def init(self):
self.label = PopupableInt()
self.label.setPopup(self.itsHMM.label_alphabet.name)
def update(self):
if len(self.itsHMM.label_alphabet.names()) > 0:
self.editableAttr['label'] = "State label"
self.label.setPopup(self.itsHMM.label_alphabet.name)
def ReadCState(self, cmodel, cstate, i):
State.ReadCState(self, cmodel, cstate, i)
self.update
self.label = PopupableInt(cmodel.getStateLabel(i))
class TiedState(State):
def __init__(self, emission=Emission(), hmm=None):
State.__init__(self, emission, hmm)
self.init()
def init(self):
self.tiedto = PopupableInt(0)
self.tiedto.setPopup(self.itsHMM.tie_groups.val2pop)
def update(self):
if len(self.itsHMM.tie_groups.names()) > 0:
self.editableAttr['tiedto'] = "Emissions tied to state"
self.tiedto.setPopup(self.itsHMM.tie_groups.val2pop)
def editEmissions(self, master):
if self.tiedto > 0:
self.itsHMM.tie_groups.editEmissions(master, self.tiedto)
else:
State.editEmissions(self, master)
def ReadCState(self, cmodel, cstate, i):
State.ReadCState(self, cmodel, cstate, i)
tied = cmodel.getTiedTo(i)
if tied == i:
self.itsHMM.tie_groups.add("tiedto_state%d"%tied, self.itsHMM.alphabet)
self.update()
self.tiedto = PopupableInt(self.itsHMM.tie_groups.name2code["tiedto_state%d"%tied])
class SilentBackgroundState(SilentState, BackgroundState):
def __init__(self, emission=Emission(), hmm=None, silent=ValidatingBool(False)):
State.__init__(self, emission, hmm)
SilentState.init(self, silent)
BackgroundState.init(self)
def update(self):
BackgroundState.update(self)
def ReadCState(self, cmodel, cstate, i):
SilentState.ReadCState(self, cmodel, cstate, i)
BackgroundState.ReadCState(self, cmodel, cstate, i)
class SilentLabeledState(SilentState, LabeledState):
def __init__(self, emission=Emission(), hmm=None, silent=ValidatingBool(False)):
State.__init__(self, emission, hmm)
SilentState.init(self, silent)
LabeledState.init(self)
def update(self):
LabeledState.update(self)
def ReadCState(self, cmodel, cstate, i):
SilentState.ReadCState(self, cmodel, cstate, i)
LabeledState.ReadCState(self, cmodel, cstate, i)
class SilentTiedState(SilentState, TiedState):
def __init__(self, emission=Emission(), hmm=None, silent=ValidatingBool(False)):
State.__init__(self, emission, hmm)
SilentState.init(self, silent)
TiedState.init(self)
def update(self):
TiedState.update(self)
def ReadCState(self, cmodel, cstate, i):
SilentState.ReadCState(self, cmodel, cstate, i)
TiedState.ReadCState(self, cmodel, cstate, i)
class LabeledBackgroundState(LabeledState, BackgroundState):
def __init__(self, emission=Emission(), hmm=None):
State.__init__(self, emission, hmm)
LabeledState.init(self)
BackgroundState.init(self)
def update(self):
BackgroundState.update(self)
LabeledState.update(self)
def ReadCState(self, cmodel, cstate, i):
LabeledState.ReadCState(self, cmodel, cstate, i)
BackgroundState.ReadCState(self, cmodel, cstate, i)
class LabeledTiedState(LabeledState, TiedState):
def __init__(self, emission=Emission(), hmm=None):
State.__init__(self, emission, hmm)
LabeledState.init(self)
TiedState.init(self)
def update(self):
LabeledState.update(self)
TiedState.update(self)
def ReadCState(self, cmodel, cstate, i):
LabeledState.ReadCState(self, cmodel, cstate, i)
TiedState.ReadCState(self, cmodel, cstate, i)
class TiedBackgroundState(TiedState, BackgroundState):
def __init__(self, emission=Emission(), hmm=None):
State.__init__(self, emission, hmm)
TiedState.init(self)
BackgroundState.init(self)
def update(self):
BackgroundState.update(self)
TiedState.update(self)
def ReadCState(self, cmodel, cstate):
TiedState.ReadCState(self, cmodel, cstate, i)
BackgroundState.ReadCState(self, cmodel, cstate, i)
class SilentLabeledBackgroundState(SilentState, LabeledState, BackgroundState):
def __init__(self, emission=Emission(), hmm=None, silent=ValidatingBool(False)):
State.__init__(self, emission, hmm)
SilentState.init(self, silent)
LabeledState.init(self)
BackgroundState.init(self)
def update(self):
BackgroundState.update(self)
LabeledState.update(self)
def ReadCState(self, cmodel, cstate, i):
SilentState.ReadCState(self, cmodel, cstate, i)
LabeledState.ReadCState(self, cmodel, cstate, i)
BackgroundState.ReadCState(self, cmodel, cstate, i)
class SilentLabeledTiedState(SilentState, LabeledState, TiedState):
def __init__(self, emission=Emission(), hmm=None, silent=ValidatingBool(False)):
State.__init__(self, emission, hmm)
SilentState.init(self, silent)
LabeledState.init(self)
TiedState.init(self)
def update(self):
LabeledState.update(self)
TiedState.update(self)
def ReadCState(self, cmodel, cstate, i):
SilentState.ReadCState(self, cmodel, cstate, i)
LabeledState.ReadCState(self, cmodel, cstate, i)
TiedState.ReadCState(self, cmodel, cstate, i)
class SilentTiedBackgroundState(SilentState, TiedState, BackgroundState):
def __init__(self, emission=Emission(), hmm=None, silent=ValidatingBool(False)):
State.__init__(self, emission, hmm)
SilentState.init(self, silent)
TiedState.init(self)
BackgroundState.init(self)
def update(self):
BackgroundState.update(self)
TiedState.update(self)
def ReadCState(self, cmodel, cstate):
SilentState.ReadCState(self, cmodel, cstate, i)
TiedState.ReadCState(self, cmodel, cstate, i)
BackgroundState.ReadCState(self, cmodel, cstate, i)
class LabeledTiedBackgroundState(LabeledState, TiedState, BackgroundState):
def __init__(self, emission=Emission(), hmm=None):
State.__init__(self, emission, hmm)
LabeledState.init(self)
TiedState.init(self)
BackgroundState.init(self)
def update(self):
BackgroundState.update(self)
LabeledState.update(self)
TiedState.update(self)
def ReadCState(self, cmodel, cstate, i):
LabeledState.ReadCState(self, cmodel, cstate, i)
TiedState.ReadCState(self, cmodel, cstate, i)
BackgroundState.ReadCState(self, cmodel, cstate, i)
class SilentLabeledTiedBackgroundState(SilentState, LabeledState, TiedState, BackgroundState):
def __init__(self, emission=Emission(), hmm=None, silent=ValidatingBool(False)):
State.__init__(self, emission, hmm)
SilentState.init(self, silent)
LabeledState.init(self)
TiedState.init(self)
BackgroundState.init(self)
def update(self):
BackgroundState.update(self)
LabeledState.update(self)
TiedState.update(self)
def ReadCState(self, cmodel, cstate, i):
SilentState.ReadCState(self, cmodel, cstate, i)
LabeledState.ReadCState(self, cmodel, cstate, i)
TiedState.ReadCState(self, cmodel, cstate, i)
BackgroundState.ReadCState(self, cmodel, cstate, i)
class Transition(EdgeObject):
def __init__(self, tail, head):
EdgeObject.__init__(self, tail, head)
#self.weight = Probability()
self.editableAttr = {'weight':"Weight"}
def __str__(self):
return "Transition from %d to %d with probability %f" % (self.tail.id, self.head.id, self.GetWeight())
def __setattr__(self, name, value):
if name is "p":
self.SetWeight(value)
else:
self.__dict__[name] = value
#object.__setattr__(self, name, value)
def __getattr__(self, name):
if name is "p":
return self.GetWeight()
else:
return self.__dict__[name]
#return object.__getattr__(self, name)
def GetWeight(self):
return self.GetEdgeWeight(0)
def SetWeight(self, value):
self.SetEdgeWeight(0, value)
def edit(self, parent, attributes = None):
if attributes == None:
editBox = EditObjectAttributesDialog(parent, self, self.editableAttr)
else:
editableAttr = {}
for attr in attributes:
editableAttr[attr] = self.editableAttr[attr]
editBox = EditObjectAttributesDialog(parent, self, editableAttr)
def ReadCTransition(self, state, cos, i):
assert (cos == 1)
self.weight = state.getOutProb(i)
class SwitchedTransition(Transition):
def __init__(self, tail, head, noTransitionClasses=2):
Transition.__init__(self, tail, head)
self.weight = [Probability()] * noTransitionClasses
self.noClasses = noTransitionClasses
def GetEdgeWeight(self,i):
if i < self.noClasses:
return self.weight[i]
def SetEdgeWeight(self,i,value):
if i < self.noClasses:
self.weight[i] = value
def ReadCTransition(self, state, cos, i):
self.weight = ghmmwrapper.double_array2list(state.out_a[i], cos)
class ObjectHMM(ObjectGraph):
"""
"""
def __init__(self, stateClass, transitionClass, emissionClass=Emission, alphabet=None, etype=0):
ObjectGraph.__init__(self, stateClass, transitionClass)
self.simple = 0
self.euclidian = 0
self.directed = 1
self.modelType = 0
self.alphabet = alphabet
self.emissionClass = emissionClass
self.etype = etype
self.name2id = defaultdict(list)
self.vertices_ids = {0:'untied'}
# editable attributes per EditPropertiesDialog
# common properties:
self.desc = "New HMM properties"
self.editableAttr = {'name':"Name"}
self.alphatype = PopupableInt()
self.alphatype_val2pop = {0:"binary", 1:"dice", 2:"DNA",
3:"amino acids", 4:"custom"}
self.alphatype.setPopup(self.alphatype_val2pop)
self.labels = ValidatingBool()
self.background = ValidatingBool()
self.maxOrder = DefaultedInt(0)
self.maxOrder.setDefault(1, 0)
self.name = ValidatingString("")
self.silent = ValidatingBool()
self.switching = DefaultedInt(1)
self.switching.setDefault(1, 1)
self.tied = ValidatingBool()
# discrete emissions only properties
if etype == 0:
self.editableAttr['tied'] = "Tied emissions"
self.editableAttr['silent'] = "Silent states"
self.editableAttr['maxOrder'] = "Higher order emissions"
self.editableAttr['background'] = "Background distributions"
self.editableAttr['labels'] = "State labels"
self.editableAttr['alphatype'] = "Alphabet"
# continuous emissions only properties
elif etype == 1:
self.editableAttr['switching'] = "No. of transition Classes"
else:
tkMessageBox.showerror("HMMEd", "invalid model type")
def __str__(self):
string = "HMM with %d states" % len(self.vertices)
for v in self.vertices.values():
string += '\n' + str(v)
for e in self.edges.values():
string += '\n' + str(e)
return string
def AddVertex(self):
""" Add an isolated vertex. Returns the id of the new vertex """
if self.alphabet is not None:
e = self.emissionClass(self.alphabet)
else:
e = self.emissionClass()
v = self.vertexClass(e, self)
v.id = self.GetNextVertexID()
self.vertices[v.id] = v
self.vertices_ids[v.id] = str(v.id)
vname = str(v.id)
v.name = vname
self.name2id[vname].append(v.id)
return v.id
def DeleteVertex(self, v):
vname = self.vertices[v].name
self.name2id[vname] = [x for x in self.name2id[vname] if x != v]
if len(self.name2id[vname]) == 0:
del self.name2id[vname]
del self.vertices_ids[v]
ObjectGraph.DeleteVertex(self, v)
def AddEdge(self,tail,head):
ObjectGraph.AddEdge(self,tail,head)
edge = self.edges[tail,head]
edge.SetWeight(1.0)
def DeleteEdge(self,tail,head):
ObjectGraph.DeleteEdge(self,tail,head)
self.vertices[tail].normalize()
def SetLabeling(self,v, value):
self.vertices[v].labeling = ValidatingString(value)
def edit(self, parent, attributes = None):
if attributes == None:
editBox = EditObjectAttributesDialog(parent, self, self.editableAttr)
else:
editableAttr = {}
for attr in attributes:
editableAttr[attr] = self.editableAttr[attr]
editBox = EditObjectAttributesDialog(parent, self, editableAttr)
mt = self.computeModelType()
if mt > 0:
self.initHMM(mt)
else:
print "invalid model type:", mt
def computeModelType(self):
modelType = 0
if self.etype == 0:
modelType += ghmmwrapper.kDiscreteHMM
if self.maxOrder > 0:
modelType += ghmmwrapper.kHigherOrderEmissions
elif self.etype == 1:
modelType += ghmmwrapper.kContinuousHMM
elif self.etype == 2:
modelType += ghmmwrapper.kDiscreteHMM
modelType += ghmmwrapper.kPairHMM
else:
print "invalid type:", self.etype
if self.switching > 1:
modelType += ghmmwrapper.kTransitionClasses
if self.tied:
modelType += ghmmwrapper.kTiedEmissions
if self.silent:
modelType += ghmmwrapper.kSilentStates
if self.background:
modelType += ghmmwrapper.kBackgroundDistributions
if self.labels:
modelType += ghmmwrapper.kLabeledStates
return modelType
def initHMM(self, modelType):
# set the right emission type
if modelType & ghmmwrapper.kDiscreteHMM:
if modelType & ghmmwrapper.kPairHMM:
emissionClass = DiscretePairEmission
# alphabet missing
else:
if modelType & ghmmwrapper.kHigherOrderEmissions:
emissionClass = DiscreteHigherOrderEmission
else:
emissionClass = DiscreteEmission
alphabet = self.initAlphabet()
elif modelType & ghmmwrapper.kContinuousHMM:
if self.emissionClass == Emission:
emissionClass = ContinuousEmission
else:
emissionClass = self.emissionClass
alphabet = None
else:
print "not a valid model type"
# set the right transition type
if modelType & ghmmwrapper.kTransitionClasses:
edgeClass = SwitchedTransition
else:
edgeClass = Transition
# masking unnecessary model type flags out
mt = modelType
if modelType & ghmmwrapper.kDiscreteHMM:
mt -= ghmmwrapper.kDiscreteHMM
if modelType & ghmmwrapper.kContinuousHMM:
mt -= ghmmwrapper.kContinuousHMM
if modelType & ghmmwrapper.kPairHMM:
mt -= ghmmwrapper.kPairHMM
if modelType & (ghmmwrapper.kHigherOrderEmissions):
mt -= ghmmwrapper.kHigherOrderEmissions
# setting the right vertex type
if mt == (ghmmwrapper.kSilentStates):
vertexClass = SilentState
elif mt == (ghmmwrapper.kTiedEmissions):
vertexClass = TiedState
elif mt == (ghmmwrapper.kBackgroundDistributions):
vertexClass = BackgroundState
elif mt == (ghmmwrapper.kLabeledStates):
vertexClass = LabeledState
# 2
elif mt == (ghmmwrapper.kSilentStates + ghmmwrapper.kTiedEmissions):
vertexClass = SilentTiedState
elif mt == (ghmmwrapper.kSilentStates + ghmmwrapper.kLabeledStates):
vertexClass = SilentLabeledState
elif mt == (ghmmwrapper.kSilentStates + ghmmwrapper.kBackgroundDistributions):
vertexClass = SilentBackgroundState
elif mt == (ghmmwrapper.kTiedEmissions + ghmmwrapper.kBackgroundDistributions):
vertexClass = TiedBackgroundState
elif mt == (ghmmwrapper.kTiedEmissions + ghmmwrapper.kLabeledStates):
vertexClass = LabeledTiedState
elif mt == (ghmmwrapper.kLabeledStates + ghmmwrapper.kBackgroundDistributions):
vertexClass = LabeledBackgroundState
# 3
elif mt == (ghmmwrapper.kSilentStates + ghmmwrapper.kTiedEmissions + ghmmwrapper.kBackgroundDistributions):
vertexClass = SilentTiedBackgroundState
elif mt == (ghmmwrapper.kSilentStates + ghmmwrapper.kTiedEmissions + ghmmwrapper.kLabeledStates):
vertexClass = SilentLabeledTiedState
elif mt == (ghmmwrapper.kSilentStates + ghmmwrapper.kLabeledStates + ghmmwrapper.kBackgroundDistributions):
vertexClass = SilentLabeledBackgroundState
elif mt == (ghmmwrapper.kTiedEmissions + ghmmwrapper.kLabeledStates + ghmmwrapper.kBackgroundDistributions):
vertexClass = LabeledTiedBackgroundState
# 4
elif mt == (ghmmwrapper.kSilentStates + ghmmwrapper.kTiedEmissions + ghmmwrapper.kLabeledStates + ghmmwrapper.kBackgroundDistributions):
vertexClass = SilentLabeledTiedBackgroundState
else:
vertexClass = (modelType & ghmmwrapper.kContinuousHMM) and ContinuousState or State
# initialize state labels
if mt & ghmmwrapper.kLabeledStates:
self.label_alphabet = DiscreteHMMAlphabet(description = "state labels")
# initialize background distributions
if mt & ghmmwrapper.kBackgroundDistributions:
self.backgroundDistributions = DiscreteHMMBackground(emissionClass)
# initialize background distributions
if mt & ghmmwrapper.kTiedEmissions:
self.tie_groups = TieGroups(emissionClass)
self.__init__(vertexClass, edgeClass, emissionClass, alphabet)
self.modelType = modelType
def initAlphabet(self):
if isinstance(self.alphabet, ghmm.Alphabet):
return DiscreteHMMAlphabet(self.alphabet.listOfCharacters)
if self.alphatype == 0:
return DiscreteHMMAlphabet(["0", "1"])
elif self.alphatype == 1:
return DiscreteHMMAlphabet(["1", "2", "3", "4", "5", "6"])
elif self.alphatype == 2:
return DiscreteHMMAlphabet(["A", "C", "G", "T"])
elif self.alphatype == 3:
return DiscreteHMMAlphabet(["ala", "arg", "asn", "asp", "asx",
"cys", "glu", "gln", "glx", "gly",
"his", "ile", "leu", "lys", "met",
"phe", "pro", "ser", "thr", "try",
"tyr", "val"])
elif self.alphatype == 4:
return DiscreteHMMAlphabet()
else:
print "invalid alphabet type"
return None
def openXML(self, filename="test.xml"):
# simple check of file
filedata = ghmmwrapper.ghmm_xmlfile_parse(filename)
if filedata == None:
raise UnknownFileTypeException(filename)
if filedata.noModels > 1:
raise UnsupportedFileException(filename + "more than one HMM per file currently not supported")
# initialize model and set auxiliary data accordingly to the model type
self.initHMM(filedata.modelType)
#cmodel = filedata.getModel(0)
if self.modelType & ghmmwrapper.kContinuousHMM:
self.buildFromCModel(filedata.get_cmodel(0))
elif self.modelType & ghmmwrapper.kDiscreteHMM:
if self.modelType & ghmmwrapper.kPairHMM:
self.buildFromCModel(filedata.get_dpmodel(0))
elif self.modelType & ghmmwrapper.kTransitionClasses:
self.buildFromCModel(filedata.get_dsmodel(0))
else:
self.buildFromCModel(filedata.get_dmodel(0))
def buildFromCModel(self, cmodel):
cos = 1
# Add alphabet if appropiate first
if self.modelType & ghmmwrapper.kDiscreteHMM:
self.alphabet = DiscreteHMMAlphabet()
self.alphabet.ReadCAlphabet(cmodel.alphabet)
# Add all states
vdict = {}
for i in xrange(cmodel.N):
vdict[i] = self.AddVertex()
# Add all transitions
for i in xrange(cmodel.N):
state = cmodel.getState(i)
for j in xrange(state.out_states):
outID = state.getOutState(j)
tail = vdict[i]
head = vdict[outID]
self.AddEdge(tail, head)
# Add label alphabet
if self.modelType & ghmmwrapper.kLabeledStates:
self.label_alphabet = DiscreteHMMAlphabet()
self.label_alphabet.ReadCAlphabet(cmodel.label_alphabet)
# Add background distributions if appropiate
if self.modelType & ghmmwrapper.kBackgroundDistributions:
self.backgroundDistributions = DiscreteHMMBackground(self.emissionClass)
self.backgroundDistributions.ReadCBackground(self.alphabet, cmodel.bp)
# Add switching functions if appropiate
if self.modelType & ghmmwrapper.kTransitionClasses:
cos = cmodel.cos
print "TODO: transition classes???"
if self.modelType & ghmmwrapper.kContinuousHMM:
cos = cmodel.cos
# Set all states' values and set transition weights
for i in xrange(cmodel.N):
state = cmodel.getState(i)
self.vertices[vdict[i]].ReadCState(cmodel, state, i)
for j in xrange(state.out_states):
outID = state.getOutState(j)
tail = vdict[i]
head = vdict[outID]
self.edges[tail, head].ReadCTransition(state, cos, j)
def normalize(self):
# normalize initial probablilities
initials = [v.initial for v in self.vertices.values() if v.initial >= 0.0]
isum = sum(initials)
if len(initials) == self.Order():
if isum == 0.0:
for vertex in self.vertices.values():
vertex.initial = 1.0 / self.Order()
else:
factor = 1.0 / isum
for vertex in self.vertices.values():
if vertex.initial >= 0.0:
vertex.initial *= factor
else:
if isum > 1.0:
factor = 1.0 / isum
for vertex in self.vertices.values():
if vertex.initial >= 0.0:
vertex.initial *= factor
elif isum < 1.0:
mean = (1.0-isum) / (self.Order()-len(initials))
for vertex in self.vertices.values():
if vertex.initial < 0.0:
vertex.initial = mean
# normalize state's transition probablilities
for vertex in self.vertices.values():
vertex.normalize()
def finalize(self):
# ensure that all entities are properly normalized and initialized
self.normalize()
# build cmodel
if self.modelType & ghmmwrapper.kContinuousHMM:
cmodel = ghmmwrapper.ghmm_cmodel()
cmodel.s = ghmmwrapper.cstate_array_alloc(self.Order())
elif self.modelType & ghmmwrapper.kDiscreteHMM:
if self.modelType & ghmmwrapper.kPairHMM:
cmodel = None
elif self.modelType & ghmmwrapper.kTransitionClasses:
cmodel = None
else:
cmodel = ghmmwrapper.ghmm_dmodel()
cmodel.s = ghmmwrapper.dstate_array_alloc(self.Order())
cmodel.M = self.alphabet.size()
cmodel.alphabet = self.alphabet.WriteCAlphabet()
if self.modelType & ghmmwrapper.kTransitionClasses:
cmodel.cos = maxcos()
else:
cmodel.cos = 1
# sort state IDs
sortedIDs = self.vertices.keys()
sortedIDs.sort()
# fill state property arrays according to the model type with default values
cmodel.N = self.Order()
# fill silent array
if self.modelType & ghmmwrapper.kSilentStates:
cmodel.silent = ghmmwrapper.list2int_array([self.vertices[id].silent for id in sortedIDs])
# fill tied to array
if self.modelType & ghmmwrapper.kTiedEmissions:
tied_list = [ghmmwrapper.kUntied] * self.Order()
tieddict = {}
# map python id to continious C array indeces
for i, id in enumerate(sortedIDs):
if self.vertices[id].tiedto > 0:
tiedto = self.vertices[id].tiedto-1
if tieddict.has_key(tiedto):
tieddict[tiedto].append(i)
else:
tieddict[tiedto] = [i]
# tiedto has to be sorted, the first entry points to it self
for k in tieddict.keys():
temp = tieddict[k]
temp.sort()
first = temp[0]
for index in temp:
tied_list[index] = first
cmodel.tied_to = ghmmwrapper.list2int_array(tied_list)
# fill background id arrary
if self.modelType & ghmmwrapper.kBackgroundDistributions:
N = self.backgroundDistributions.size()
M = self.alphabet.size()
orders = ghmmwrapper.list2int_array(self.backgroundDistributions.getOrders())
(weights,lengths) = ghmmhelper.list2double_matrix(self.backgroundDistributions.getWeights())
cmodel.bp = ghmmwrapper.ghmm_dbackground(N, M, orders, weights)
for i,name in enumerate(self.backgroundDistributions.getNames()):
cmodel.bp.setName(i, name)
cmodel.background_id = ghmmwrapper.list2int_array([(self.vertices[id].background-1) for id in sortedIDs])
# fill higher order array
if self.modelType & ghmmwrapper.kHigherOrderEmissions:
cmodel.order = ghmmwrapper.list2int_array([self.vertices[id].emission.order for id in sortedIDs])
# fil label id array
if self.modelType & ghmmwrapper.kLabeledStates:
cmodel.label_alphabet = self.label_alphabet.WriteCAlphabet()
cmodel.label = ghmmwrapper.list2int_array([self.vertices[id].label for id in sortedIDs])
cmodel.model_type = self.modelType
# create each state
initial_sum = 0.0
for i, id in enumerate(sortedIDs):
self.vertices[id].num = i
initial_sum += self.vertices[id].initial
if initial_sum < 1E-14:
for id in sortedIDs:
self.vertices[id].initial = 1.0
initial_sum = float(self.Order())
for i, id in enumerate(sortedIDs):
cstate = cmodel.getState(i)
self.vertices[id].initial /= initial_sum
self.vertices[id].WriteCState(cstate)
return cmodel
def writeXML(self, filename="test.xml"):
cmodel = self.finalize()
# write to file
cmodel.write_xml(filename)
| gpl-3.0 | 2,216,229,121,287,193,600 | 34.293026 | 144 | 0.595109 | false |
FibercorpLabs/FibercorpDevops | vmware/nsx/utils/nsx/EdgeFirewall.py | 1 | 2383 | from nsx_rest import *
import json
import sys
sys.path.append("../utils/common/")
from jinja import render
from edge import *
from pprint import pprint
def getFirewallConfig(edge_name):
edgeId = getNsxEdgeIdByName(edge_name)
r = nsxGet("/api/4.0/edges/"+ edgeId + "/firewall/config")
return json.loads(r)
def getRuleIdByName(edge_name, rule_name):
edgeConfig = getFirewallConfig(edge_name)
firewallRules = edgeConfig['firewallRules']['firewallRules']
for firewallRule in firewallRules:
if firewallRule['name'] == rule_name:
return firewallRule['ruleId']
return None
def createRule(edge_name, jinja_vars):
edgeId = getNsxEdgeIdByName(edge_name)
dir = os.path.dirname(__file__)
nsx_rules_xml = os.path.join(dir, '../../templates/edge_firewall/nsx_edge_firewall_rules.j2')
data = render(nsx_rules_xml, jinja_vars)
return nsxPost("/api/4.0/edges/" + edgeId +"/firewall/config/rules", data), data
def updateGlobalConfig(edge_name, jinja_vars):
edgeId = getNsxEdgeIdByName(edge_name)
dir = os.path.dirname(__file__)
nsx_globalconfig_xml = os.path.join(dir, '../../templates/edge_firewall/nsx_edge_firewall_globalconfig.j2')
data = render(nsx_globalconfig_xml, jinja_vars)
return nsxPut("/api/4.0/edges/" + edgeId + "/firewall/config/global", data)
def updateDefaultPolicy(edge_name, jinja_vars):
edgeId = getNsxEdgeIdByName(edge_name)
dir = os.path.dirname(__file__)
nsx_defaultpolicy_xml = os.path.join(dir, '../../templates/edge_firewall/nsx_edge_firewall_defaultpolicy.j2')
data = render(nsx_defaultpolicy_xml, jinja_vars)
return nsxPut("/api/4.0/edges/" + edgeId + "/firewall/config/defaultpolicy", data)
def updateRule(edge_name, rule_name, jinja_vars):
edgeId = getNsxEdgeIdByName(edge_name)
ruleId = getRuleIdByName(edge_name, rule_name)
dir = os.path.dirname(__file__)
nsx_rule_xml = os.path.join(dir, '../../templates/edge_firewall/nsx_edge_firewall_rule.j2')
data = render(nsx_rule_xml, jinja_vars)
return nsxPut("/api/4.0/edges/"+ edgeId + "/firewall/config/rules/" + ruleId, data)
def deleteRule(edge_name, rule_name):
edgeId = getNsxEdgeIdByName(edge_name)
ruleId = getRuleIdByName(edge_name, rule_name)
return nsxDelete("/api/4.0/edges/"+ edgeId + "/firewall/config/rules/" + ruleId)
# pprint(getFirewallConfig("PGW01"))
# pprint(getRuleIdByName("PGW01", "firewall")) | gpl-3.0 | 314,744,389,271,548,700 | 28.8 | 111 | 0.712547 | false |
anandha2017/udacity | nd101 Deep Learning Nanodegree Foundation/DockerImages/18_Sirajs_Image_Generation/notebooks/02_how_to_win_slot_machines-master/preprocess.py | 1 | 1087 | import numpy as np
def normalise_windows(window_data):
normalised_data = []
for window in window_data:
normalised_window = [((float(p) / float(window[0])) - 1) for p in window]
normalised_data.append(normalised_window)
return normalised_data
def load_data(filename, seq_len, normalise_window):
f = open(filename, 'r').read()
data = f.split('\n')
sequence_length = seq_len + 1
result = []
for index in range(len(data) - sequence_length):
result.append(data[index: index + sequence_length])
if normalise_window:
result = normalise_windows(result)
result = np.array(result, dtype='float32')
row = round(0.9 * result.shape[0])
train = result[:int(row), :]
np.random.shuffle(train)
x_train = train[:, :-1]
y_train = train[:, -1]
x_test = result[int(row):, :-1]
y_test = result[int(row):, -1]
x_train = np.reshape(x_train, (x_train.shape[0], x_train.shape[1], 1))
x_test = np.reshape(x_test, (x_test.shape[0], x_test.shape[1], 1))
return [x_train, y_train, x_test, y_test] | mit | 8,232,472,866,018,088,000 | 29.222222 | 81 | 0.611776 | false |
AutorestCI/azure-sdk-for-python | azure-mgmt-network/azure/mgmt/network/v2017_09_01/models/express_route_circuits_arp_table_list_result.py | 1 | 1235 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ExpressRouteCircuitsArpTableListResult(Model):
"""Response for ListArpTable associated with the Express Route Circuits API.
:param value: Gets list of the ARP table.
:type value:
list[~azure.mgmt.network.v2017_09_01.models.ExpressRouteCircuitArpTable]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ExpressRouteCircuitArpTable]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(self, value=None, next_link=None):
super(ExpressRouteCircuitsArpTableListResult, self).__init__()
self.value = value
self.next_link = next_link
| mit | 8,622,345,961,035,120,000 | 36.424242 | 80 | 0.607287 | false |
Xyrotechnology/Project-Anthrax | SD/libraries/Scripts/Pixhawk/Firmware-master/Tools/px_romfs_pruner.py | 1 | 3366 | #!/usr/bin/env python
############################################################################
#
# Copyright (C) 2014 PX4 Development Team. All rights reserved.
# Author: Julian Oes <[email protected]>
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# 3. Neither the name PX4 nor the names of its contributors may be
# used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
############################################################################
"""
px_romfs_pruner.py:
Delete all comments and newlines before ROMFS is converted to an image
"""
from __future__ import print_function
import argparse
import re
import os
def main():
# Parse commandline arguments
parser = argparse.ArgumentParser(description="ROMFS pruner.")
parser.add_argument('--folder', action="store", help="ROMFS scratch folder.")
args = parser.parse_args()
print("Pruning ROMFS files.")
# go through
for (root, dirs, files) in os.walk(args.folder):
for file in files:
# only prune text files
if ".zip" in file or ".bin" in file or ".swp" in file or ".data" in file or ".DS_Store" in file:
continue
file_path = os.path.join(root, file)
# read file line by line
pruned_content = ""
with open(file_path, "rU") as f:
for line in f:
# handle mixer files differently than startup files
if file_path.endswith(".mix"):
if line.startswith(("Z:", "M:", "R: ", "O:", "S:")):
pruned_content += line
else:
if not line.isspace() and not line.strip().startswith("#"):
pruned_content += line
# overwrite old scratch file
with open(file_path, "wb") as f:
pruned_content = re.sub("\r\n", "\n", pruned_content)
f.write(pruned_content.encode("ascii", errors='strict'))
if __name__ == '__main__':
main()
| apache-2.0 | -8,903,790,954,902,878,000 | 39.071429 | 108 | 0.620618 | false |
danielballan/photomosaic | photomosaic/flickr.py | 1 | 6191 | import warnings
import os
import re
import urllib
import requests
import itertools
import json
from tqdm import tqdm
from .photomosaic import options
PUBLIC_URL = "https://www.flickr.com/photos/"
API_URL = 'https://api.flickr.com/services/rest/'
PATH = "http://farm{farm}.staticflickr.com/{server}/"
NAME = "{id}_{secret}_b.jpg"
def _flickr_request(**kwargs):
params = dict(api_key=options['flickr_api_key'],
format='json',
nojsoncallback=1,
**kwargs)
response = requests.get(API_URL, params=params)
return response.json()
def from_search(text, dest, cutoff=4000, license=None):
"""
Download photos matching a search query and the specified license(s).
Parameters
----------
text : string
Search query
dest : string
Output directory
cutoff : integer or None, optional
Max number of images to download. By default, None; all matches
up to Flickr's max (4000) will be downloaded.
license : list or None
List of license codes documented by Flickr at
https://www.flickr.com/services/api/flickr.photos.licenses.getInfo.html
If None, photomosaic defaults to ``[1, 2, 4, 5, 7, 8]``. See link for
details.
"""
dest = os.path.expanduser(dest)
if license is None:
license = [1, 2, 4, 5, 7, 8]
os.makedirs(dest, exist_ok=True)
total = itertools.count(0)
raw_licenses = _flickr_request(method='flickr.photos.licenses.getInfo')
licenses = {item.pop('id'): item
for item in raw_licenses['licenses']['license']}
for page in itertools.count(1):
response = _flickr_request(
method='flickr.photos.search',
license=','.join(map(str, license)),
extras='owner_name,license',
per_page=500, # the max allowed value, to conserve our queries
text=text,
content_type=1, # photos only
page=page
)
if response.get('stat') != 'ok':
# If we fail requesting page 1, that's an error. If we fail
# requesting page > 1, we're just out of photos.
if page == 1:
raise RuntimeError("response: {}".format(response))
break
photos = response['photos']['photo']
pbar = tqdm(photos, desc='downloading page {}'.format(page))
for photo in pbar:
if (cutoff is not None) and (next(total) > cutoff):
pbar.close()
return
# Download and save image.
url = (PATH + NAME).format(**photo)
filename = (NAME).format(**photo)
filepath = os.path.join(dest, filename)
_try_retrieve_warn_failure(url, filepath)
# Save metadata for attribution.
metapath = os.path.splitext(filepath)[0] + '.json'
with open(metapath, 'w') as metafile:
# Collect attribution info as specified by Creative Commons
# best practices:
# https://wiki.creativecommons.org/wiki/best_practices_for_attribution#Title.2C_Author.2C_Source.2C_License
license_id = photo['license']
attribution = {'title': photo['title'],
'owner': photo['owner'],
'owner_name': photo['ownername'],
'owner_url': PUBLIC_URL + photo['ownername'],
'license_url': licenses[license_id]['url'],
'license_name': licenses[license_id]['name'],
'license': license_id}
json.dump(attribution, metafile)
def _get_photoset(photoset_id, nsid, dest):
dest = os.path.expanduser(dest)
os.makedirs(dest, exist_ok=True)
for page in itertools.count(1):
response = _flickr_request(
method='flickr.photosets.getPhotos',
photoset_id=photoset_id,
nsid=nsid,
content_type=1, # photos only
page=page
)
if response.get('stat') != 'ok':
# If we fail requesting page 1, that's an error. If we fail
# requesting page > 1, we're just out of photos.
if page == 1:
raise RuntimeError("response: {}".format(response))
break
photos = response['photoset']['photo']
for photo in tqdm(photos, desc='downloading page {}'.format(page)):
url = (PATH + NAME).format(**photo)
filename = (NAME).format(**photo)
filepath = os.path.join(dest, filename)
_try_retrieve_warn_failure(url, filepath)
# Save metadata for attribution.
metapath = os.path.splitext(filepath)[0] + '.json'
with open(metapath, 'w') as metafile:
json.dump(photo, metafile)
def _try_retrieve_warn_failure(url, filepath):
errors = []
for _ in range(3):
try:
urllib.request.urlretrieve(url, filepath)
except urllib.error.URLError as error:
errors.append(error)
continue # try again
else:
break
else:
# tried 3 times, failed every time
warnings.warn("Skipping {}: {}".format(url, errors))
def from_url(url, dest):
"""
Download an album ("photoset") from its url.
The is no programmatic license-checking here; that is up to the user.
Parameters
----------
url : string
e.g., https://www.flickr.com/photos/<username>/sets/<photoset_id>
dest : string
Output directory
"""
dest = os.path.expanduser(dest)
m = re.match(PUBLIC_URL + "(.*)/sets/([0-9]+)", url)
if m is None:
raise ValueError("""Expected URL like:
https://www.flickr.com/photos/<username>/sets/<photoset_id>""")
username, photoset_id = m.groups()
response = _flickr_request(method="flickr.urls.lookupUser",
url=PUBLIC_URL + username)
nsid = response['user']['username']['_content']
return _get_photoset(photoset_id, nsid, dest)
| bsd-3-clause | 7,755,686,242,330,780,000 | 36.75 | 123 | 0.560491 | false |
PI2-2015-2/server_side | configuration_files/parser.py | 1 | 1183 | import json
from collections import OrderedDict
import execInstructionsProcedural as instruction
class Parser():
def parseJson(self, jsonFile):
with open("instruction.json", "w") as text_file:
text_file.write(jsonFile)
# Loads the Json file in a OrderedDict
parsed_json = json.load(open('instruction.json'), object_pairs_hook=OrderedDict)
size = len(parsed_json)
i = 0
while i < size:
# Execute inside loop instructions
if(''.join([p for p in parsed_json.keys()[i] if not p.isdigit()]) == 'loop'):
for k in range(0, parsed_json.values()[i].get('loops')):
for j in range(i+1, i+1+parsed_json.values()[i].get('instructions')):
instruction.run(parsed_json.keys()[j], parsed_json.values()[j])
# Jump to the next instruction outside the loop
i += 1+parsed_json.values()[i].get('instructions')
else:
# execute instruction
instruction.run(parsed_json.keys()[i], parsed_json.values()[i])
i += 1
# Clean PWM
instruction.cleanPWM()
| gpl-3.0 | -5,921,316,127,386,513,000 | 38.433333 | 89 | 0.568047 | false |
puruckertom/ubertool | ubertool/leslie_probit/leslie_probit_exe.py | 1 | 7835 | import numpy as np
import os.path
import pandas as pd
import sys
import math
# find parent directory and import base (travis)
parentddir = os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir))
sys.path.append(parentddir)
from base.uber_model import UberModel, ModelSharedInputs
# print(sys.path)
# print(os.path)
class Leslie_probitInputs(ModelSharedInputs):
"""
Input class for Leslie_probit.
"""
def __init__(self):
"""Class representing the inputs for Leslie_probit"""
super(Leslie_probitInputs, self).__init__()
# self.a_n = pd.Series([], dtype="object")
# self.c_n = pd.Series([], dtype="object")
self.grass_type = pd.Series([], dtype="object")
self.percent_active_ingredient = pd.Series([], dtype="float")
self.foliar_half_life = pd.Series([], dtype="float")
self.sol = pd.Series([], dtype="float")
self.time_steps = pd.Series([], dtype="float")
self.number_applications = pd.Series([], dtype="float")
self.application_rates = pd.Series([], dtype="float")
self.application_days = pd.Series([], dtype="float")
self.b = pd.Series([], dtype="float")
self.test_species = pd.Series([], dtype="object")
self.ld50_test = pd.Series([], dtype="float")
# self.bw_tested = pd.Series([], dtype="float")
# self.ass_species = pd.Series([], dtype="object")
# self.bw_ass = pd.Series([], dtype="float")
self.mineau_scaling_factor = pd.Series([], dtype="float")
self.probit_gamma = pd.Series([], dtype="float")
self.init_pop_size = pd.Series([], dtype="float")
self.stages = pd.Series([], dtype="float")
self.l_m = pd.Series([], dtype="float")
self.plant_surface_conc = pd.Series([], dtype="float")
class Leslie_probitOutputs(object):
"""
Output class for Leslie_probit.
"""
def __init__(self):
"""Class representing the outputs for Leslie_probit"""
super(Leslie_probitOutputs, self).__init__()
self.out_pop_matrix = pd.Series(name="out_pop_matrix")
class Leslie_probit(UberModel, Leslie_probitInputs, Leslie_probitOutputs):
"""
Leslie_probit model for population growth.
"""
def __init__(self, pd_obj, pd_obj_exp):
"""Class representing the Leslie_probit model and containing all its methods"""
super(Leslie_probit, self).__init__()
self.pd_obj = pd_obj
self.pd_obj_exp = pd_obj_exp
self.pd_obj_out = None
def execute_model(self):
"""
Callable to execute the running of the model:
1) Populate input parameters
2) Create output DataFrame to hold the model outputs
3) Run the model's methods to generate outputs
4) Fill the output DataFrame with the generated model outputs
"""
self.populate_inputs(self.pd_obj)
self.pd_obj_out = self.populate_outputs()
self.run_methods()
self.fill_output_dataframe()
# Begin model methods
def run_methods(self):
""" Execute all algorithm methods for model logic """
try:
self.leslie_probit_growth()
except Exception as e:
print(str(e))
def leslie_probit_growth(self):
self.conc_out = self.conc()
self.out = self.dose_bird()
self.out_no = self.no_dose_bird()
return
def conc(self):
# Concentration over time
def C_0(r, a, p):
return r * a / 100 * p
def C_t(c, h):
return c * np.exp(-(np.log(2) / h) * 1)
if self.grass_type == "Short Grass":
self.plant_surface_conc = 240
elif self.grass_type == "Tall Grass":
self.plant_surface_conc = 110
if self.number_applications == 1:
C_temp = C_0(self.application_rates[0], self.percent_active_ingredient, self.plant_surface_conc)
else:
C_temp = [] # empty array to hold the concentrations over days
number_applications_temp = 0 # number of existing applications
dayt = 0
application_days_l = len(self.application_days)
for i in range(0, self.time_steps):
if i == 0: # first day of application
C_temp.append(
C_0(self.application_rates[0], self.percent_active_ingredient, self.plant_surface_conc))
number_applications_temp = number_applications_temp + 1
dayt = dayt + 1
elif dayt <= application_days_l - 1 and number_applications_temp <= self.number_applications: # next application day
if i == self.application_days[dayt]:
C_temp.append(C_t(C_temp[i - 1], self.foliar_half_life) + C_0(self.application_rates[dayt],
self.percent_active_ingredient,
self.plant_surface_conc))
number_applications_temp = number_applications_temp + 1
dayt = dayt + 1
else:
C_temp.append(C_t(C_temp[i - 1], self.foliar_half_life))
else:
C_temp.append(C_t(C_temp[i - 1], self.foliar_half_life))
return C_temp
def dose_bird(self):
####Initial Leslie Matrix and pesticide conc###########
S = self.l_m.shape[1]
n_f = np.zeros(shape=(S, self.t))
l_m_temp = np.zeros(shape=(S, S), dtype=float)
n_csum = np.sum(self.init_pop_size)
n_f[:, 0] = self.init_pop_size.squeeze()
fw_bird = (1.180 * (self.aw_bird ** 0.874)) / 1000.0
m = []
dose_out = []
z_out = []
for i in range(self.t):
# C_temp = C_temp*np.exp(-(np.log(2)/h_l)*1)
C_temp = self.conc_all[i]
if C_temp >= self.sol:
dose_bird = (fw_bird * C_temp) / (self.aw_bird / 1000)
else:
dose_bird = (fw_bird * C_temp[0]) / (self.aw_bird / 1000)
at_bird = (self.ld50_a) * ((self.aw_bird / self.bw_bird) ** (self.mineau_scaling_factor - 1))
# print at_bird
z = self.b * (np.log10(dose_bird) - np.log10(at_bird))
m_temp = 1 - 0.5 * (1 + math.erf(z / 1.4142))
for j in range(0, S):
l_m_temp[0, j] = self.l_m[0, j] * np.exp(-self.probit_gamma * n_csum)
if j - 1 >= 0:
l_m_temp[j, j - 1] = self.l_m[j, j - 1] * m_temp
l_m_temp[S - 1, S - 1] = self.l_m[S - 1, S - 1] * m_temp
n = np.dot(l_m_temp, init_pop_size)
n_csum = np.sum(n)
init_pop_size = n
n_f[:, i] = n.squeeze()
m.append(m_temp)
dose_out.append(dose_bird)
z_out.append(z)
return fw_bird, dose_out, at_bird, m, n_f.tolist(), z_out
def no_dose_bird(self):
####Initial Leslie Matrix and pesticide conc###########
S = self.l_m.shape[1]
n_f = np.zeros(shape=(S, self.time_steps))
n_f[:, 0] = self.init_pop_size.squeeze()
for i in range(self.time_steps):
n = np.dot(self.l_m, init_pop_size)
init_pop_size = n
n_f[:, i] = n.squeeze()
return n_f.tolist()
def leslie_growth(self):
self.out_pop_matrix = np.zeros(shape=(self.stages, self.time_steps))
self.out_pop_matrix[:, 0] = self.init_pop_size
for i in range(1, self.time_steps):
n = np.dot(self.l_m, self.out_pop_matrix[:, i - 1])
self.out_pop_matrix[:, i] = n.squeeze()
return self.out_pop_matrix.tolist()
| unlicense | 2,785,421,741,394,123,000 | 38.570707 | 133 | 0.534652 | false |
sqall01/alertR | shared_code/clients_all/tests/client/core.py | 1 | 7309 | import threading
import time
import logging
import json
from typing import List, Tuple
from lib.client.core import Connection, RecvTimeout
from lib.client.communication import Communication, MsgRequest
class BasicConnection(Connection):
def __init__(self):
self._msg_list = [] # type: List[str]
@property
def msg_list(self):
return list(self._msg_list)
def connect(self):
raise NotImplementedError("Abstract class.")
def send(self,
data: str):
self._msg_list.append(data)
def recv(self,
buffsize: int,
timeout: float = 20.0) -> str:
raise NotImplementedError("Abstract class.")
def close(self):
raise NotImplementedError("Abstract class.")
class SimulatedConnection(Connection):
def __init__(self,
send_msg_queue: List[str],
send_lock: threading.Lock,
recv_msg_queue: List[str],
recv_lock: threading.Lock,
tag: str):
self._send_msg_queue = send_msg_queue
self._send_lock = send_lock
self._recv_msg_queue = recv_msg_queue
self._recv_lock = recv_lock
self._tag = tag
def connect(self):
pass
def send(self,
data: str):
logging.debug("[%s]: Sending: %s" % (self._tag, data))
with self._send_lock:
self._send_msg_queue.append(data)
def recv(self,
buffsize: int,
timeout: float = 20.0) -> str:
logging.debug("[%s]: Start receiving." % self._tag)
start_time = time.time()
while True:
# Check if our received timed out.
if (time.time() - start_time) > timeout:
logging.debug("[%s]: Timeout while receiving." % self._tag)
raise RecvTimeout
with self._recv_lock:
if self._recv_msg_queue:
data = self._recv_msg_queue.pop(0)
logging.debug("[%s]: Received: %s" % (self._tag, data))
return data
time.sleep(0.2)
def close(self):
raise NotImplementedError("Abstract class.")
class SimulatedErrorConnection(SimulatedConnection):
def __init__(self,
send_msg_queue: List[str],
send_lock: threading.Lock,
recv_msg_queue: List[str],
recv_lock: threading.Lock,
tag: str,
sim_error_rts: bool = False,
sim_error_cts: bool = False,
sim_error_request: bool = False,
sim_error_response: bool = False):
super().__init__(send_msg_queue,
send_lock,
recv_msg_queue,
recv_lock,
tag)
self.sim_error_rts = sim_error_rts
self.sim_error_cts = sim_error_cts
self.sim_error_request = sim_error_request
self.sim_error_response = sim_error_response
def connect(self):
raise NotImplementedError("Abstract class.")
def send(self,
data: str):
try:
data_json = json.loads(data)
except:
raise ValueError("Unexpected data format")
raise_error = False
if self.sim_error_rts and data_json["payload"]["type"] == "rts":
self.sim_error_rts = False
raise_error = True
elif self.sim_error_cts and data_json["payload"]["type"] == "cts":
self.sim_error_cts = False
raise_error = True
elif self.sim_error_request and data_json["payload"]["type"] == "request":
self.sim_error_request = False
raise_error = True
elif self.sim_error_response and data_json["payload"]["type"] == "response":
self.sim_error_response = False
raise_error = True
if raise_error:
super().send("SIM_EXCEPTION")
raise OSError("Simulated connection error")
super().send(data)
def recv(self,
buffsize: int,
timeout: float = 20.0) -> str:
data = super().recv(buffsize, timeout=timeout)
if data == "SIM_EXCEPTION":
raise OSError("Simulated connection error")
return data
def close(self):
raise NotImplementedError("Abstract class.")
def create_basic_communication() -> Communication:
comm = Communication(BasicConnection())
comm.set_connected()
return comm
def create_simulated_communication() -> Tuple[Communication, Communication]:
lock_send_client = threading.Lock()
lock_send_server = threading.Lock()
msg_queue_send_client = []
msg_queue_send_server = []
conn_client = SimulatedConnection(msg_queue_send_client,
lock_send_client,
msg_queue_send_server,
lock_send_server,
"client")
conn_server = SimulatedConnection(msg_queue_send_server,
lock_send_server,
msg_queue_send_client,
lock_send_client,
"server")
comm_client = Communication(conn_client)
comm_server = Communication(conn_server, is_server=True)
comm_client._log_tag = "client"
comm_server._log_tag = "server"
comm_client.set_connected()
comm_server.set_connected()
return comm_client, comm_server
def create_simulated_error_communication() -> Tuple[Communication, Communication]:
lock_send_client = threading.Lock()
lock_send_server = threading.Lock()
msg_queue_send_client = []
msg_queue_send_server = []
conn_client = SimulatedErrorConnection(msg_queue_send_client,
lock_send_client,
msg_queue_send_server,
lock_send_server,
"client")
conn_server = SimulatedErrorConnection(msg_queue_send_server,
lock_send_server,
msg_queue_send_client,
lock_send_client,
"server")
comm_client = Communication(conn_client)
comm_server = Communication(conn_server, is_server=True)
comm_client._log_tag = "client"
comm_server._log_tag = "server"
comm_client.set_connected()
comm_server.set_connected()
return comm_client, comm_server
def msg_receiver(**kwargs):
count = kwargs["count"] # type: int
comm = kwargs["comm"] # type: Communication
msg_requests = kwargs["msg_requests"] # type: List[MsgRequest]
sync = kwargs["sync"] # type: threading.Event
# Wait until we are clear to receive messages.
sync.wait()
logging.debug("[%s]: Starting receiver loop." % comm._log_tag)
for _ in range(count):
while not comm.has_channel:
time.sleep(0.5)
msg_request = comm.recv_request()
msg_requests.append(msg_request)
| agpl-3.0 | 6,071,460,876,706,926,000 | 29.839662 | 84 | 0.535094 | false |
gribozavr/swift | utils/update_checkout/update_checkout/update_checkout.py | 1 | 23360 | # utils/update_checkout.py - Utility to update local checkouts --*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
from __future__ import print_function
import argparse
import json
import os
import platform
import re
import sys
import traceback
from functools import reduce
from multiprocessing import Lock, Pool, cpu_count, freeze_support
from swift_build_support.swift_build_support import shell
from swift_build_support.swift_build_support.SwiftBuildSupport import \
SWIFT_SOURCE_ROOT
SCRIPT_FILE = os.path.abspath(__file__)
SCRIPT_DIR = os.path.dirname(SCRIPT_FILE)
def run_parallel(fn, pool_args, n_processes=0):
"""Function used to run a given closure in parallel.
NOTE: This function was originally located in the shell module of
swift_build_support and should eventually be replaced with a better
parallel implementation.
"""
def init(l):
global lock
lock = l
if n_processes == 0:
n_processes = cpu_count() * 2
lk = Lock()
print("Running ``%s`` with up to %d processes." %
(fn.__name__, n_processes))
pool = Pool(processes=n_processes, initializer=init, initargs=(lk,))
results = pool.map_async(func=fn, iterable=pool_args).get(999999)
pool.close()
pool.join()
return results
def check_parallel_results(results, op):
"""Function used to check the results of run_parallel.
NOTE: This function was originally located in the shell module of
swift_build_support and should eventually be replaced with a better
parallel implementation.
"""
fail_count = 0
if results is None:
return 0
for r in results:
if r is not None:
if fail_count == 0:
print("======%s FAILURES======" % op)
print("%s failed (ret=%d): %s" % (r.repo_path, r.ret, r))
fail_count += 1
if r.stderr:
print(r.stderr)
return fail_count
def confirm_tag_in_repo(tag, repo_name):
tag_exists = shell.capture(['git', 'ls-remote', '--tags',
'origin', tag], echo=False)
if not tag_exists:
print("Tag '" + tag + "' does not exist for '" +
repo_name + "', just updating regularly")
tag = None
return tag
def find_rev_by_timestamp(timestamp, repo_name, refspec):
args = ["git", "log", "-1", "--format=%H", "--first-parent",
'--before=' + timestamp, refspec]
rev = shell.capture(args).strip()
if rev:
return rev
else:
raise RuntimeError('No rev in %s before timestamp %s' %
(repo_name, timestamp))
def get_branch_for_repo(config, repo_name, scheme_name, scheme_map,
cross_repos_pr):
cross_repo = False
repo_branch = scheme_name
if scheme_map:
scheme_branch = scheme_map[repo_name]
repo_branch = scheme_branch
remote_repo_id = config['repos'][repo_name]['remote']['id']
if remote_repo_id in cross_repos_pr:
cross_repo = True
pr_id = cross_repos_pr[remote_repo_id]
repo_branch = "ci_pr_{0}".format(pr_id)
shell.run(["git", "checkout", scheme_branch],
echo=True)
shell.capture(["git", "branch", "-D", repo_branch],
echo=True, allow_non_zero_exit=True)
shell.run(["git", "fetch", "origin",
"pull/{0}/merge:{1}"
.format(pr_id, repo_branch), "--tags"], echo=True)
return repo_branch, cross_repo
def update_single_repository(pool_args):
source_root, config, repo_name, scheme_name, scheme_map, tag, timestamp, \
reset_to_remote, should_clean, cross_repos_pr = pool_args
repo_path = os.path.join(source_root, repo_name)
if not os.path.isdir(repo_path) or os.path.islink(repo_path):
return
try:
print("Updating '" + repo_path + "'")
with shell.pushd(repo_path, dry_run=False, echo=False):
cross_repo = False
checkout_target = None
if tag:
checkout_target = confirm_tag_in_repo(tag, repo_name)
elif scheme_name:
checkout_target, cross_repo = get_branch_for_repo(
config, repo_name, scheme_name, scheme_map, cross_repos_pr)
if timestamp:
checkout_target = find_rev_by_timestamp(timestamp,
repo_name,
checkout_target)
# The clean option restores a repository to pristine condition.
if should_clean:
shell.run(['git', 'clean', '-fdx'], echo=True)
shell.run(['git', 'submodule', 'foreach', '--recursive', 'git',
'clean', '-fdx'], echo=True)
shell.run(['git', 'submodule', 'foreach', '--recursive', 'git',
'reset', '--hard', 'HEAD'], echo=True)
shell.run(['git', 'reset', '--hard', 'HEAD'], echo=True)
# It is possible to reset --hard and still be mid-rebase.
try:
shell.run(['git', 'rebase', '--abort'], echo=True)
except Exception:
pass
if checkout_target:
shell.run(['git', 'status', '--porcelain', '-uno'],
echo=False)
shell.run(['git', 'checkout', checkout_target], echo=True)
# It's important that we checkout, fetch, and rebase, in order.
# .git/FETCH_HEAD updates the not-for-merge attributes based on
# which branch was checked out during the fetch.
shell.run(["git", "fetch", "--recurse-submodules=yes", "--tags"],
echo=True)
# If we were asked to reset to the specified branch, do the hard
# reset and return.
if checkout_target and reset_to_remote and not cross_repo:
full_target = full_target_name('origin', checkout_target)
shell.run(['git', 'reset', '--hard', full_target], echo=True)
return
# Query whether we have a "detached HEAD", which will mean that
# we previously checked out a tag rather than a branch.
detached_head = False
try:
# This git command returns error code 1 if HEAD is detached.
# Otherwise there was some other error, and we need to handle
# it like other command errors.
shell.run(["git", "symbolic-ref", "-q", "HEAD"], echo=False)
except Exception as e:
if e.ret == 1:
detached_head = True
else:
raise # Pass this error up the chain.
# If we have a detached HEAD in this repository, we don't want
# to rebase. With a detached HEAD, the fetch will have marked
# all the branches in FETCH_HEAD as not-for-merge, and the
# "git rebase FETCH_HEAD" will try to rebase the tree from the
# default branch's current head, making a mess.
# Prior to Git 2.6, this is the way to do a "git pull
# --rebase" that respects rebase.autostash. See
# http://stackoverflow.com/a/30209750/125349
if not cross_repo and not detached_head:
shell.run(["git", "rebase", "FETCH_HEAD"], echo=True)
elif detached_head:
print(repo_path,
"\nDetached HEAD; probably checked out a tag. No need "
"to rebase.\n")
shell.run(["git", "submodule", "update", "--recursive"], echo=True)
except Exception:
(type, value, tb) = sys.exc_info()
print('Error on repo "%s": %s' % (repo_path, traceback.format_exc()))
return value
def get_timestamp_to_match(args):
if not args.match_timestamp:
return None
with shell.pushd(os.path.join(args.source_root, "swift"),
dry_run=False, echo=False):
return shell.capture(["git", "log", "-1", "--format=%cI"],
echo=False).strip()
def update_all_repositories(args, config, scheme_name, cross_repos_pr):
scheme_map = None
if scheme_name:
# This loop is only correct, since we know that each alias set has
# unique contents. This is checked by validate_config. Thus the first
# branch scheme data that has scheme_name as one of its aliases is
# the only possible correct answer.
for v in config['branch-schemes'].values():
if scheme_name in v['aliases']:
scheme_map = v['repos']
break
pool_args = []
timestamp = get_timestamp_to_match(args)
for repo_name in config['repos'].keys():
if repo_name in args.skip_repository_list:
print("Skipping update of '" + repo_name + "', requested by user")
continue
my_args = [args.source_root, config,
repo_name,
scheme_name,
scheme_map,
args.tag,
timestamp,
args.reset_to_remote,
args.clean,
cross_repos_pr]
pool_args.append(my_args)
return run_parallel(update_single_repository, pool_args, args.n_processes)
def obtain_additional_swift_sources(pool_args):
(args, repo_name, repo_info, repo_branch, remote, with_ssh, scheme_name,
skip_history, skip_repository_list) = pool_args
env = dict(os.environ)
env.update({'GIT_TERMINAL_PROMPT': 0})
with shell.pushd(args.source_root, dry_run=False, echo=False):
print("Cloning '" + repo_name + "'")
if skip_history:
shell.run(['git', 'clone',
'--recursive', '--depth', '1', '--branch',
repo_branch, remote, repo_name],
env=env,
echo=True)
else:
shell.run(['git', 'clone',
'--recursive', remote, repo_name],
env=env,
echo=True)
if scheme_name:
src_path = os.path.join(args.source_root, repo_name, ".git")
shell.run(['git', '--git-dir',
src_path, '--work-tree',
os.path.join(args.source_root, repo_name),
'checkout', repo_branch],
env=env,
echo=False)
with shell.pushd(os.path.join(args.source_root, repo_name),
dry_run=False, echo=False):
shell.run(["git", "submodule",
"update", "--recursive"],
env=env,
echo=False)
def obtain_all_additional_swift_sources(args, config, with_ssh, scheme_name,
skip_history, skip_repository_list):
pool_args = []
with shell.pushd(args.source_root, dry_run=False, echo=False):
for repo_name, repo_info in config['repos'].items():
if repo_name in skip_repository_list:
print("Skipping clone of '" + repo_name + "', requested by "
"user")
continue
if os.path.isdir(os.path.join(repo_name, ".git")):
print("Skipping clone of '" + repo_name + "', directory "
"already exists")
continue
# If we have a url override, use that url instead of
# interpolating.
remote_repo_info = repo_info['remote']
if 'url' in remote_repo_info:
remote = remote_repo_info['url']
else:
remote_repo_id = remote_repo_info['id']
if with_ssh is True or 'https-clone-pattern' not in config:
remote = config['ssh-clone-pattern'] % remote_repo_id
else:
remote = config['https-clone-pattern'] % remote_repo_id
repo_branch = None
repo_not_in_scheme = False
if scheme_name:
for v in config['branch-schemes'].values():
if scheme_name not in v['aliases']:
continue
# If repo is not specified in the scheme, skip cloning it.
if repo_name not in v['repos']:
repo_not_in_scheme = True
continue
repo_branch = v['repos'][repo_name]
break
else:
repo_branch = scheme_name
if repo_not_in_scheme:
continue
pool_args.append([args, repo_name, repo_info, repo_branch, remote,
with_ssh, scheme_name, skip_history,
skip_repository_list])
if not pool_args:
print("Not cloning any repositories.")
return
return run_parallel(
obtain_additional_swift_sources, pool_args, args.n_processes)
def dump_repo_hashes(args, config, branch_scheme_name='repro'):
"""
Dumps the current state of the repo into a new config file that contains a
master branch scheme with the relevant branches set to the appropriate
hashes.
"""
new_config = {}
config_copy_keys = ['ssh-clone-pattern', 'https-clone-pattern', 'repos']
for config_copy_key in config_copy_keys:
new_config[config_copy_key] = config[config_copy_key]
repos = {}
repos = repo_hashes(args, config)
branch_scheme = {'aliases': [branch_scheme_name], 'repos': repos}
new_config['branch-schemes'] = {branch_scheme_name: branch_scheme}
json.dump(new_config, sys.stdout, indent=4)
def repo_hashes(args, config):
repos = {}
for repo_name, repo_info in sorted(config['repos'].items(),
key=lambda x: x[0]):
repo_path = os.path.join(args.source_root, repo_name)
if os.path.exists(repo_path):
with shell.pushd(repo_path, dry_run=False, echo=False):
h = shell.capture(["git", "rev-parse", "HEAD"],
echo=False).strip()
else:
h = 'skip'
repos[repo_name] = str(h)
return repos
def print_repo_hashes(args, config):
repos = repo_hashes(args, config)
for repo_name, repo_hash in sorted(repos.items(),
key=lambda x: x[0]):
print("{:<35}: {:<35}".format(repo_name, repo_hash))
def validate_config(config):
# Make sure that our branch-names are unique.
scheme_names = config['branch-schemes'].keys()
if len(scheme_names) != len(set(scheme_names)):
raise RuntimeError('Configuration file has duplicate schemes?!')
# Ensure the branch-scheme name is also an alias
# This guarantees sensible behavior of update_repository_to_scheme when
# the branch-scheme is passed as the scheme name
for scheme_name in config['branch-schemes'].keys():
if scheme_name not in config['branch-schemes'][scheme_name]['aliases']:
raise RuntimeError('branch-scheme name: "{0}" must be an alias '
'too.'.format(scheme_name))
# Then make sure the alias names used by our branches are unique.
#
# We do this by constructing a list consisting of len(names),
# set(names). Then we reduce over that list summing the counts and taking
# the union of the sets. We have uniqueness if the length of the union
# equals the length of the sum of the counts.
data = [(len(v['aliases']), set(v['aliases']))
for v in config['branch-schemes'].values()]
result = reduce(lambda acc, x: (acc[0] + x[0], acc[1] | x[1]), data,
(0, set([])))
if result[0] == len(result[1]):
return
raise RuntimeError('Configuration file has schemes with duplicate '
'aliases?!')
def full_target_name(repository, target):
tag = shell.capture(["git", "tag", "-l", target], echo=True).strip()
if tag == target:
return tag
branch = shell.capture(["git", "branch", "--list", target],
echo=True).strip().replace("* ", "")
if branch == target:
name = "%s/%s" % (repository, target)
return name
raise RuntimeError('Cannot determine if %s is a branch or a tag' % target)
def skip_list_for_platform(config):
# If there is a platforms key only include the repo if the
# plaform is in the list
skip_list = []
platform_name = platform.system()
for repo_name, repo_info in config['repos'].items():
if 'platforms' in repo_info:
if platform_name not in repo_info['platforms']:
print("Skipping", repo_name, "on", platform_name)
skip_list.append(repo_name)
else:
print("Including", repo_name, "on", platform_name)
return skip_list
def main():
freeze_support()
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description="""
repositories.
By default, updates your checkouts of Swift, SourceKit, LLDB, and SwiftPM.""")
parser.add_argument(
"--clone",
help="Obtain Sources for Swift and Related Projects",
action="store_true")
parser.add_argument(
"--clone-with-ssh",
help="Obtain Sources for Swift and Related Projects via SSH",
action="store_true")
parser.add_argument(
"--skip-history",
help="Skip histories when obtaining sources",
action="store_true")
parser.add_argument(
"--skip-repository",
metavar="DIRECTORY",
default=[],
help="Skip the specified repository",
dest='skip_repository_list',
action="append")
parser.add_argument(
"--scheme",
help='Use branches from the specified branch-scheme. A "branch-scheme"'
' is a list of (repo, branch) pairs.',
metavar='BRANCH-SCHEME',
dest='scheme')
parser.add_argument(
'--reset-to-remote',
help='Reset each branch to the remote state.',
action='store_true')
parser.add_argument(
'--clean',
help='Clean unrelated files from each repository.',
action='store_true')
parser.add_argument(
"--config",
default=os.path.join(SCRIPT_DIR, os.pardir,
"update-checkout-config.json"),
help="Configuration file to use")
parser.add_argument(
"--github-comment",
help="""Check out related pull requests referenced in the given
free-form GitHub-style comment.""",
metavar='GITHUB-COMMENT',
dest='github_comment')
parser.add_argument(
'--dump-hashes',
action='store_true',
help='Dump the git hashes of all repositories being tracked')
parser.add_argument(
'--dump-hashes-config',
help='Dump the git hashes of all repositories packaged into '
'update-checkout-config.json',
metavar='BRANCH-SCHEME-NAME')
parser.add_argument(
"--tag",
help="""Check out each repository to the specified tag.""",
metavar='TAG-NAME')
parser.add_argument(
"--match-timestamp",
help='Check out adjacent repositories to match timestamp of '
' current swift checkout.',
action='store_true')
parser.add_argument(
"-j", "--jobs",
type=int,
help="Number of threads to run at once",
default=0,
dest="n_processes")
parser.add_argument(
"--source-root",
help="The root directory to checkout repositories",
default=SWIFT_SOURCE_ROOT,
dest='source_root')
args = parser.parse_args()
if not args.scheme:
if args.reset_to_remote:
print("update-checkout usage error: --reset-to-remote must "
"specify --scheme=foo")
sys.exit(1)
if args.match_timestamp:
# without a scheme, we won't be able match timestamps forward in
# time, which is an annoying footgun for bisection etc.
print("update-checkout usage error: --match-timestamp must "
"specify --scheme=foo")
sys.exit(1)
clone = args.clone
clone_with_ssh = args.clone_with_ssh
skip_history = args.skip_history
scheme = args.scheme
github_comment = args.github_comment
with open(args.config) as f:
config = json.load(f)
validate_config(config)
if args.dump_hashes:
dump_repo_hashes(args, config)
return (None, None)
if args.dump_hashes_config:
dump_repo_hashes(args, config, args.dump_hashes_config)
return (None, None)
cross_repos_pr = {}
if github_comment:
regex_pr = r'(apple/[-a-zA-Z0-9_]+/pull/\d+|apple/[-a-zA-Z0-9_]+#\d+)'
repos_with_pr = re.findall(regex_pr, github_comment)
print("Found related pull requests:", str(repos_with_pr))
repos_with_pr = [pr.replace('/pull/', '#') for pr in repos_with_pr]
cross_repos_pr = dict(pr.split('#') for pr in repos_with_pr)
clone_results = None
if clone or clone_with_ssh:
# If branch is None, default to using the default branch alias
# specified by our configuration file.
if scheme is None:
scheme = config['default-branch-scheme']
skip_repo_list = skip_list_for_platform(config)
skip_repo_list.extend(args.skip_repository_list)
clone_results = obtain_all_additional_swift_sources(args, config,
clone_with_ssh,
scheme,
skip_history,
skip_repo_list)
# Quick check whether somebody is calling update in an empty directory
directory_contents = os.listdir(args.source_root)
if not ('cmark' in directory_contents or
'llvm' in directory_contents or
'clang' in directory_contents):
print("You don't have all swift sources. "
"Call this script with --clone to get them.")
update_results = update_all_repositories(args, config, scheme,
cross_repos_pr)
fail_count = 0
fail_count += check_parallel_results(clone_results, "CLONE")
fail_count += check_parallel_results(update_results, "UPDATE")
if fail_count > 0:
print("update-checkout failed, fix errors and try again")
else:
print("update-checkout succeeded")
print_repo_hashes(args, config)
sys.exit(fail_count)
| apache-2.0 | 6,838,618,704,136,831,000 | 37.803987 | 79 | 0.557877 | false |
tcpcloud/openvstorage | ovs/extensions/hypervisor/hypervisors/vmware.py | 1 | 7066 | # Copyright 2014 Open vStorage NV
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Module for the VMware hypervisor client
"""
import os
from ovs.extensions.hypervisor.apis.vmware.sdk import Sdk
class VMware(object):
"""
Represents the hypervisor client for VMware
"""
def __init__(self, ip, username, password):
"""
Initializes the object with credentials and connection information
"""
self.sdk = Sdk(ip, username, password)
self.state_mapping = {'poweredOn' : 'RUNNING',
'poweredOff': 'HALTED',
'suspended' : 'PAUSED'}
def get_state(self, vmid):
"""
Get the current power state of a virtual machine
@param vmid: hypervisor id of the virtual machine
"""
return self.state_mapping[self.sdk.get_power_state(vmid)]
def create_vm_from_template(self, name, source_vm, disks, ip, mountpoint, wait=True):
"""
Create a new vmachine from an existing template
"""
task = self.sdk.create_vm_from_template(name, source_vm, disks, ip, mountpoint, wait)
if wait is True:
if self.sdk.validate_result(task):
task_info = self.sdk.get_task_info(task)
return task_info.info.result.value
return None
def clone_vm(self, vmid, name, disks, wait=False):
"""
Clone a vmachine
@param vmid: hypervisor id of the virtual machine
@param name: name of the virtual machine
@param disks: list of disk information
@param wait: wait for action to complete
"""
task = self.sdk.clone_vm(vmid, name, disks, wait)
if wait is True:
if self.sdk.validate_result(task):
task_info = self.sdk.get_task_info(task)
return task_info.info.result.value
return None
def delete_vm(self, vmid, storagedriver_mountpoint, storagedriver_storage_ip, devicename, disks_info=None, wait=False):
"""
Remove the vmachine from the hypervisor
@param vmid: hypervisor id of the virtual machine
@param wait: wait for action to complete
"""
if disks_info is None:
disks_info = []
_ = disks_info
self.sdk.delete_vm(vmid, storagedriver_mountpoint, storagedriver_storage_ip, devicename, wait)
def get_vm_object(self, vmid):
"""
Gets the VMware virtual machine object from VMware by its identifier
"""
return self.sdk.get_vm(vmid)
def get_vm_agnostic_object(self, vmid):
"""
Gets the VMware virtual machine object from VMware by its identifier
"""
return self.sdk.make_agnostic_config(self.sdk.get_vm(vmid))
def get_vm_object_by_devicename(self, devicename, ip, mountpoint):
"""
Gets the VMware virtual machine object from VMware by devicename
and datastore identifiers
"""
return self.sdk.make_agnostic_config(self.sdk.get_nfs_datastore_object(ip, mountpoint, devicename)[0])
def get_vms_by_nfs_mountinfo(self, ip, mountpoint):
"""
Gets a list of agnostic vm objects for a given ip and mountpoint
"""
for vm in self.sdk.get_vms(ip, mountpoint):
yield self.sdk.make_agnostic_config(vm)
def is_datastore_available(self, ip, mountpoint):
"""
@param ip : hypervisor ip to query for datastore presence
@param mountpoint: nfs mountpoint on hypervisor
@rtype: boolean
@return: True | False
"""
return self.sdk.is_datastore_available(ip, mountpoint)
def set_as_template(self, vmid, disks, wait=False):
"""
Configure a vm as template
This lets the machine exist on the hypervisor but configures
all disks as "Independent Non-persistent"
@param vmid: hypervisor id of the virtual machine
"""
return self.sdk.set_disk_mode(vmid, disks, 'independent_nonpersistent', wait)
def mount_nfs_datastore(self, name, remote_host, remote_path):
"""
Mounts a given NFS export as a datastore
"""
return self.sdk.mount_nfs_datastore(name, remote_host, remote_path)
def test_connection(self):
"""
Checks whether this node is a vCenter
"""
return self.sdk.test_connection()
def clean_backing_disk_filename(self, path):
"""
Cleans a backing disk filename to the corresponding disk filename
"""
_ = self
return path.replace('-flat.vmdk', '.vmdk').strip('/')
def get_backing_disk_path(self, machinename, devicename):
"""
Builds the path for the file backing a given device/disk
"""
_ = self
return '/{}/{}-flat.vmdk'.format(machinename.replace(' ', '_'), devicename)
def get_disk_path(self, machinename, devicename):
"""
Builds the path for the file backing a given device/disk
"""
_ = self
return '/{}/{}.vmdk'.format(machinename.replace(' ', '_'), devicename)
def clean_vmachine_filename(self, path):
"""
Cleans a VM filename
"""
_ = self
return path.strip('/')
def get_vmachine_path(self, machinename, storagerouter_machineid):
"""
Builds the path for the file representing a given vmachine
"""
_ = self, storagerouter_machineid # For compatibility purposes only
machinename = machinename.replace(' ', '_')
return '/{}/{}.vmx'.format(machinename, machinename)
def get_rename_scenario(self, old_name, new_name):
"""
Gets the rename scenario based on the old and new name
"""
_ = self
if old_name.endswith('.vmx') and new_name.endswith('.vmx'):
return 'RENAME'
elif old_name.endswith('.vmx~') and new_name.endswith('.vmx'):
return 'UPDATE'
return 'UNSUPPORTED'
def should_process(self, devicename, machine_ids=None):
"""
Checks whether a given device should be processed
"""
_ = self, devicename, machine_ids
return True
def file_exists(self, storagedriver, devicename):
"""
Check if devicename exists on the given vpool
"""
return self.sdk.file_exists(storagedriver.storage_ip,
storagedriver.mountpoint,
self.clean_vmachine_filename(devicename))
| apache-2.0 | 2,021,212,799,868,938,500 | 34.33 | 123 | 0.608973 | false |
qualitio/qualitio | qualitio/filter/tests/building_forms_from_params_test.py | 1 | 2192 | from nose.tools import *
from django.http import QueryDict
from qualitio.core.tests.utils import BaseTestCase
from qualitio.store.models import TestCase as StoreTestCase
from qualitio import filter as filterapp
# params construction:
# group_id - form_class_id - form_id - field_name
class BuildingFormsFromParamsTest(BaseTestCase):
def setUp(self):
self.default_exclude = ('lft', 'rght', 'tree_id', 'level')
self.form_classes = filterapp.generate_form_classes(StoreTestCase, exclude=self.default_exclude)
def get_number_of_forms(self, filter):
number_of_forms = 0
for g in filter.groups.values():
number_of_forms += len(g.forms)
return number_of_forms
def assertFilterGroupConsistency(self, params, expected_number_of_groups=-1, expected_number_of_forms=-1):
filter = filterapp.Filter(params, form_classes=self.form_classes)
filter.build_from_params()
number_of_forms = self.get_number_of_forms(filter)
self.assertEquals(expected_number_of_groups, len(filter.groups))
self.assertEquals(expected_number_of_forms, number_of_forms)
def test_should_have_proper_number_of_forms(self):
params = QueryDict('&'.join([
'1-0-1-to_date=',
'1-0-1-from_date=',
'1-1-1-to_date=',
'1-1-1-from_date=',
'1-4-1-q=',
'2-4-1-q=3',
'3-4-1-q=',
]))
self.assertFilterGroupConsistency(params, expected_number_of_groups=3, expected_number_of_forms=5)
def test_problematic(self):
params = QueryDict('&'.join([
'1-0-1-from_date=',
'1-0-1-to_date=',
'1-1-1-from_date=',
'1-1-1-to_date=',
'1-4-1-q=1',
'2-4-1-q=3',
'3-4-1-q=',
'4-4-1-q=',
'5-1-1-from_date=',
'5-1-1-to_date=',
]))
self.assertFilterGroupConsistency(params, expected_number_of_groups=5, expected_number_of_forms=7)
| gpl-3.0 | 2,354,415,502,812,982,300 | 33.793651 | 110 | 0.552007 | false |
lino-framework/lino | lino/modlib/forms/__init__.py | 1 | 1165 | # -*- coding: UTF-8 -*-
# Copyright 2017 Rumma & Ko Ltd
# License: GNU Affero General Public License v3 (see file COPYING for details)
"""
This started as a copy of :mod:`lino.modlib.bootstrap`.
.. autosummary::
:toctree:
views
renderer
models
"""
from lino.api.ad import Plugin
class Plugin(Plugin):
ui_handle_attr_name = 'forms_handle'
needs_plugins = ['lino.modlib.jinja']
url_prefix = 'f'
def on_ui_init(self, ui):
from .renderer import Renderer
self.renderer = Renderer(self)
# ui.bs3_renderer = self.renderer
def get_patterns(self):
from django.conf.urls import url
from . import views
rx = '^'
urls = [
url(rx + r'$', views.Index.as_view()),
url(rx + r'auth', views.Authenticate.as_view()),
url(rx + r'(?P<app_label>\w+)/(?P<actor>\w+)$',
views.List.as_view()),
url(rx + r'(?P<app_label>\w+)/(?P<actor>\w+)/(?P<pk>.+)$',
views.Element.as_view()),
]
return urls
def get_index_view(self):
from . import views
return views.Index.as_view()
| bsd-2-clause | -6,574,725,036,883,842,000 | 21.843137 | 78 | 0.550215 | false |
noashin/3D_features_classification_CNN | generate_experiment_files/generate_experiment_files.py | 1 | 1824 | import click
from prepare_args_all_instances import prepare_args
from generate_labels import generate_labels
from generate_mean_image_binaryprotos import generate_mean_files
from read_csv_seperated_rgb_histograms import generate_lmdbs
@click.command()
@click.option('--source_dir', type=click.STRING,
default='/mnt/scratch/noa/pclproj/fpfh',
help='directory containing fpfh csv files')
@click.option('--lmdb_dir', type=click.STRING,
default='/mnt/scratch/noa/pclproj/results/',
help='path to save the json file')
@click.option('--mean_dir', type=click.STRING,
default='/home/noa/pcl_proj/experiments/mean_images',
help='dictionary to save the json file')
@click.option('--labels_dict', type=click.STRING,
default='/mnt/scratch/noa/pclproj/results/labels_objects_dict.json',
help='dictionary mapping between objects and labels')
def main(source_dir, lmdb_dir, mean_dir, labels_dict):
print '============================================================================'
print 'beginning generating argument file'
args_file_path = prepare_args(source_dir, lmdb_dir)
print 'Done generating arguments file'
print '============================================================================'
print 'Begining to generate labels'
generate_labels(args_file_path, labels_dict, lmdb_dir)
print 'Done generating labels'
print '============================================================================'
print 'Begining to generate mean files'
generate_mean_files(args_file_path, mean_dir)
print 'Done generating mean files'
print '============================================================================'
print 'Beginning to generate lmdb files'
generate_lmdbs(args_file_path)
print 'Done generating lmdb files'
if __name__ == "__main__":
main() | mit | -6,617,948,964,496,934,000 | 41.44186 | 85 | 0.606908 | false |
skbkontur/carbonate | carbonate/fill.py | 1 | 3788 | # original work: https://github.com/graphite-project/whisper/issues/22
# whisper-fill: unlike whisper-merge, don't overwrite data that's
# already present in the target file, but instead, only add the missing
# data (e.g. where the gaps in the target file are). Because no values
# are overwritten, no data or precision gets lost. Also, unlike
# whisper-merge, try to take the highest-precision archive to provide
# the data, instead of the one with the largest retention.
# Using this script, reconciliation between two replica instances can be
# performed by whisper-fill-ing the data of the other replica with the
# data that exists locally, without introducing the quite remarkable
# gaps that whisper-merge leaves behind (filling a higher precision
# archive with data from a lower precision one)
# Work performed by author while working at Booking.com.
from whisper import info, fetch, update_many
try:
from whisper import operator
HAS_OPERATOR = True
except ImportError:
HAS_OPERATOR = False
import itertools
import time
def itemgetter(*items):
if HAS_OPERATOR:
return operator.itemgetter(*items)
else:
if len(items) == 1:
item = items[0]
def g(obj):
return obj[item]
else:
def g(obj):
return tuple(obj[item] for item in items)
return g
def fill(src, dst, tstart, tstop):
# fetch range start-stop from src, taking values from the highest
# precision archive, thus optionally requiring multiple fetch + merges
srcHeader = info(src)
if srcHeader is None:
return
srcArchives = srcHeader['archives']
srcArchives.sort(key=itemgetter('retention'))
# find oldest point in time, stored by both files
srcTime = int(time.time()) - srcHeader['maxRetention']
if tstart < srcTime and tstop < srcTime:
return
# we want to retain as much precision as we can, hence we do backwards
# walk in time
# skip forward at max 'step' points at a time
for archive in srcArchives:
# skip over archives that don't have any data points
rtime = time.time() - archive['retention']
if tstop <= rtime:
continue
untilTime = tstop
fromTime = rtime if rtime > tstart else tstart
(timeInfo, values) = fetch(src, fromTime, untilTime)
(start, end, archive_step) = timeInfo
pointsToWrite = list(itertools.ifilter(
lambda points: points[1] is not None,
itertools.izip(xrange(start, end, archive_step), values)))
# order points by timestamp, newest first
pointsToWrite.sort(key=lambda p: p[0], reverse=True)
update_many(dst, pointsToWrite)
tstop = fromTime
# can stop when there's nothing to fetch any more
if tstart == tstop:
return
def fill_archives(src, dst, startFrom):
header = info(dst)
archives = header['archives']
archives = sorted(archives, key=lambda t: t['retention'])
for archive in archives:
fromTime = time.time() - archive['retention']
if fromTime >= startFrom:
continue
(timeInfo, values) = fetch(dst, fromTime, startFrom)
(start, end, step) = timeInfo
gapstart = None
for v in values:
if not v and not gapstart:
gapstart = start
elif v and gapstart:
# ignore single units lost
if (start - gapstart) > archive['secondsPerPoint']:
fill(src, dst, gapstart - step, start)
gapstart = None
elif gapstart and start == end - step:
fill(src, dst, gapstart - step, start)
start += step
startFrom = fromTime
| mit | 6,861,726,190,305,352,000 | 31.93913 | 74 | 0.636748 | false |
iwconfig/dlffmpeg | binary_builder.py | 1 | 7329 | #!/usr/bin/env python
from __future__ import print_function
from platform import system
import os, shutil, argparse
def input():
try:
return raw_input()
except NameError:
return __builtins__.input()
__version__ = '0.2'
def main(args):
path = os.getcwd()
while not args.file:
def dirlist(path = path):
ls = [x for x in os.listdir(path) if x.endswith(('.spec', '.py')) and not x in ('setup.py', os.path.basename(__file__))]
ls.sort(key=lambda f: os.path.splitext(f)[1], reverse=True)
default = 'build.spec'
if not default in ls:
default = [x for x in ls if x.endswith('.spec')][0]
ls.insert(0, ls.pop(ls.index(default)))
if not ls:
return False
else: return ls
def nlist(ls, num = {}):
for n, d in enumerate(ls, 1):
print('{0}) {1}'.format(n, d))
num[n] = d
if d == ls[-1]:
print('{0}) {1}'.format(n+1, '::CHOOSE ANOTHER DIRECTORY::'))
num[n+1] = None
return num
ls = dirlist(path)
if ls:
num = nlist(ls)
print('Select file (default: {}) [number/enter]:'.format(ls[0]), end=' ')
while True:
n = input()
if not n:
n = '1'
if n.isdigit():
n = int(n)
if n in num.keys():
break
print('\nEnter valid number [{0}-{1}]:'.format(num.keys()[0], num.keys()[-1]), end=' ')
if n == list(num)[-1]:
while True:
print('\nEnter path to look for files:', end=' ')
path = input()
if not os.path.isdir(path):
print('Not a valid path. Try again.')
if not dirlist(path):
print('No *.spec or *.py found. Enter another path.')
else:
os.chdir(path)
break
else:
args.file = os.path.abspath(num[n])
if not os.path.isfile(os.path.abspath(args.file)):
print('not a real file')
os._exit(1)
else:
dirname = os.path.dirname(args.file)
shutil.rmtree('{}/__pycache__'.format(dirname), ignore_errors=True)
shutil.rmtree('{}/build/'.format(dirname), ignore_errors=True)
shutil.rmtree('{}/dist/'.format(dirname), ignore_errors=True)
pyc = '{}/dlffmpeg.pyc'.format(dirname)
os.path.exists(pyc) and os.remove(pyc)
def choose(message = None):
print(message, end=' ')
yes = set(['yes','y', 'ye', ''])
no = set(['no','n'])
choice = input().lower()
if choice in yes:
return True
elif choice in no:
return False
else:
print("Please respond with 'yes' or 'no'")
def which(program):
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
if args.unix:
if which('pyinstaller'):
os.system('pyinstaller --onefile {}'.format(args.file))
else:
print('installing pyinstaller using pip')
from pip import main as pip
pip(['install', '-U', 'pyinstaller'])
os._exit(0)
if args.windows:
if not 'windows' in system:
try:
prefix = os.environ['WINEPREFIX']
print('running under wine')
except KeyError:
choice = choose("Can't find wine path. Is Wine installed? [y/n]")
if choice == True:
print("Please specify the wine path below. Press 'enter' for default (~/.wine):", end=' ')
prefix = input()
if prefix == '':
prefix = os.path.expanduser('~/.wine')
if prefix.startswith('~'):
prefix = os.path.expanduser(prefix)
choice = choose("Is {} correct? [y/n]".format(prefix))
prefix = os.environ['WINEPREFIX'] = prefix
with open(os.path.expanduser('~/.bashrc'), 'a') as bashrc:
bashrc.write("\nexport WINEPREFIX={}\n".format(prefix))
print('Wrote env. variable WINEPREFIX={} to ~/.bashrc'.format(prefix))
else:
print('Please install Wine and Python (under Wine).')
print('\n# Follow theese instructions:')
print('# Install Wine with apt-get')
print('sudo dpkg --add i386 # If you want 32bit support')
print('sudo apt-get install wine')
print('\n# Downloading python version 3.4.4 (latest available with MSI installer) ...')
try:
from urllib.request import urlretrieve as dl
except ImportError:
from urllib import urlretrieve as dl
f = dl('https://www.python.org/ftp/python/3.4.4/python-3.4.4.msi', '/tmp/python-3.4.4.msi')[0]
print('# Done. Now install Python using the following:')
print('wine msiexec /i {} /L*v log.txt'.format(f))
print('wine C:/Python34/python.exe C:/Python34/Scripts/pip.exe install -U pip')
print('wine C:/Python34/python.exe C:/Python34/Scripts/pip.exe install -U pyinstaller')
print('\n# Alright, do this and come back ok. Bye.')
os._exit(0)
if prefix:
os.system('DISPLAY=:0.0 WINEDEBUG=fixme-all wine {prefix}/drive_c/Python34/Scripts/pyinstaller.exe --onefile {file}'.format(prefix=prefix, file=args.file))
if __name__ == '__main__':
system = system().lower()
if 'windows' in system:
print('*nix only at the moment, sorry.')
os._exit(0)
p = argparse.ArgumentParser(description="Compile script to binary executable")
p.add_argument('file', nargs='?', const=None, help=".spec or .py file")
p.add_argument('-u', '--unix', action='store_true', dest='unix', help="Compile single *nix binary executable")
p.add_argument('-w', '--windows', action='store_true', dest='windows', help="Compile single Windows binary executable (*.exe)")
p.add_argument('-v', '--version', action='version', version='%(prog)s {}'.format(__version__))
args = p.parse_args()
if not any([args.unix, args.windows]):
print('need at least one option')
os._exit(1)
try:
main(args)
except KeyboardInterrupt:
print('\n\nctrl-C: exit') | mit | -2,804,960,794,753,186,300 | 40.885714 | 171 | 0.489835 | false |
pwil3058/scm_test_tree | scm_test_tree_pkg/dummy_console.py | 1 | 1210 | ### Copyright (C) 2007-2015 Peter Williams <[email protected]>
###
### This program is free software; you can redistribute it and/or modify
### it under the terms of the GNU General Public License as published by
### the Free Software Foundation; version 2 of the License only.
###
### This program is distributed in the hope that it will be useful,
### but WITHOUT ANY WARRANTY; without even the implied warranty of
### MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
### GNU General Public License for more details.
###
### You should have received a copy of the GNU General Public License
### along with this program; if not, write to the Free Software
### Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
# A dummy console facilitate code sharing in othe modules
from .bab.decorators import singleton
@singleton
class ConsoleLogWidget(object):
def start_cmd(self, cmd):
return
def append_stdin(self, msg):
return
def append_stdout(self, msg):
return
def append_stderr(self, msg):
return
def end_cmd(self, result=None):
return
def append_entry(self, msg):
return
LOG = ConsoleLogWidget()
| gpl-2.0 | 5,675,970,126,361,851,000 | 33.571429 | 80 | 0.707438 | false |
gnowxilef/plexpy | plexpy/webserve.py | 1 | 52488 | # This file is part of PlexPy.
#
# PlexPy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PlexPy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PlexPy. If not, see <http://www.gnu.org/licenses/>.
from plexpy import logger, notifiers, plextv, pmsconnect, common, log_reader, datafactory, graphs, users
from plexpy.helpers import checked, radio
from mako.lookup import TemplateLookup
from mako import exceptions
import plexpy
import threading
import cherrypy
import hashlib
import random
import json
import os
try:
# pylint:disable=E0611
# ignore this error because we are catching the ImportError
from collections import OrderedDict
# pylint:enable=E0611
except ImportError:
# Python 2.6.x fallback, from libs
from ordereddict import OrderedDict
def serve_template(templatename, **kwargs):
interface_dir = os.path.join(str(plexpy.PROG_DIR), 'data/interfaces/')
template_dir = os.path.join(str(interface_dir), plexpy.CONFIG.INTERFACE)
_hplookup = TemplateLookup(directories=[template_dir])
try:
template = _hplookup.get_template(templatename)
return template.render(**kwargs)
except:
return exceptions.html_error_template().render()
class WebInterface(object):
def __init__(self):
self.interface_dir = os.path.join(str(plexpy.PROG_DIR), 'data/')
@cherrypy.expose
def index(self):
if plexpy.CONFIG.FIRST_RUN_COMPLETE:
raise cherrypy.HTTPRedirect("home")
else:
raise cherrypy.HTTPRedirect("welcome")
@cherrypy.expose
def home(self):
config = {
"home_stats_length": plexpy.CONFIG.HOME_STATS_LENGTH,
"home_stats_type": plexpy.CONFIG.HOME_STATS_TYPE,
"home_stats_count": plexpy.CONFIG.HOME_STATS_COUNT,
"pms_identifier": plexpy.CONFIG.PMS_IDENTIFIER,
}
return serve_template(templatename="index.html", title="Home", config=config)
@cherrypy.expose
def welcome(self, **kwargs):
config = {
"launch_browser": checked(plexpy.CONFIG.LAUNCH_BROWSER),
"refresh_users_on_startup": checked(plexpy.CONFIG.REFRESH_USERS_ON_STARTUP),
"pms_identifier": plexpy.CONFIG.PMS_IDENTIFIER,
"pms_ip": plexpy.CONFIG.PMS_IP,
"pms_is_remote": checked(plexpy.CONFIG.PMS_IS_REMOTE),
"pms_port": plexpy.CONFIG.PMS_PORT,
"pms_token": plexpy.CONFIG.PMS_TOKEN,
"pms_ssl": checked(plexpy.CONFIG.PMS_SSL),
"pms_uuid": plexpy.CONFIG.PMS_UUID,
"tv_notify_enable": checked(plexpy.CONFIG.TV_NOTIFY_ENABLE),
"movie_notify_enable": checked(plexpy.CONFIG.MOVIE_NOTIFY_ENABLE),
"music_notify_enable": checked(plexpy.CONFIG.MUSIC_NOTIFY_ENABLE),
"tv_notify_on_start": checked(plexpy.CONFIG.TV_NOTIFY_ON_START),
"movie_notify_on_start": checked(plexpy.CONFIG.MOVIE_NOTIFY_ON_START),
"music_notify_on_start": checked(plexpy.CONFIG.MUSIC_NOTIFY_ON_START),
"video_logging_enable": checked(plexpy.CONFIG.VIDEO_LOGGING_ENABLE),
"music_logging_enable": checked(plexpy.CONFIG.MUSIC_LOGGING_ENABLE),
"logging_ignore_interval": plexpy.CONFIG.LOGGING_IGNORE_INTERVAL,
"check_github": checked(plexpy.CONFIG.CHECK_GITHUB)
}
# The setup wizard just refreshes the page on submit so we must redirect to home if config set.
# Also redirecting to home if a PMS token already exists - will remove this in future.
if plexpy.CONFIG.FIRST_RUN_COMPLETE or plexpy.CONFIG.PMS_TOKEN:
raise cherrypy.HTTPRedirect("home")
else:
return serve_template(templatename="welcome.html", title="Welcome", config=config)
@cherrypy.expose
def get_date_formats(self):
if plexpy.CONFIG.DATE_FORMAT:
date_format = plexpy.CONFIG.DATE_FORMAT
else:
date_format = 'YYYY-MM-DD'
if plexpy.CONFIG.TIME_FORMAT:
time_format = plexpy.CONFIG.TIME_FORMAT
else:
time_format = 'HH:mm'
formats = {'date_format': date_format,
'time_format': time_format}
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(formats)
@cherrypy.expose
def home_stats(self, time_range='30', stat_type='0', stat_count='5', **kwargs):
data_factory = datafactory.DataFactory()
stats_data = data_factory.get_home_stats(time_range=time_range, stat_type=stat_type, stat_count=stat_count)
return serve_template(templatename="home_stats.html", title="Stats", data=stats_data)
@cherrypy.expose
def library_stats(self, **kwargs):
pms_connect = pmsconnect.PmsConnect()
stats_data = pms_connect.get_library_stats()
return serve_template(templatename="library_stats.html", title="Library Stats", data=stats_data)
@cherrypy.expose
def history(self):
return serve_template(templatename="history.html", title="History")
@cherrypy.expose
def users(self):
return serve_template(templatename="users.html", title="Users")
@cherrypy.expose
def graphs(self):
return serve_template(templatename="graphs.html", title="Graphs")
@cherrypy.expose
def sync(self):
return serve_template(templatename="sync.html", title="Synced Items")
@cherrypy.expose
def user(self, user=None, user_id=None):
user_data = users.Users()
if user_id:
try:
user_details = user_data.get_user_details(user_id=user_id)
except:
logger.warn("Unable to retrieve friendly name for user_id %s " % user_id)
elif user:
try:
user_details = user_data.get_user_details(user=user)
except:
logger.warn("Unable to retrieve friendly name for user %s " % user)
else:
logger.debug(u"User page requested but no parameters received.")
raise cherrypy.HTTPRedirect("home")
return serve_template(templatename="user.html", title="User", data=user_details)
@cherrypy.expose
def edit_user_dialog(self, user=None, user_id=None, **kwargs):
user_data = users.Users()
if user_id:
result = user_data.get_user_friendly_name(user_id=user_id)
status_message = ''
elif user:
result = user_data.get_user_friendly_name(user=user)
status_message = ''
else:
result = None
status_message = 'An error occured.'
return serve_template(templatename="edit_user.html", title="Edit User", data=result, status_message=status_message)
@cherrypy.expose
def edit_user(self, user=None, user_id=None, friendly_name=None, **kwargs):
if 'do_notify' in kwargs:
do_notify = kwargs.get('do_notify')
else:
do_notify = 0
if 'keep_history' in kwargs:
keep_history = kwargs.get('keep_history')
else:
keep_history = 0
if 'thumb' in kwargs:
custom_avatar = kwargs['thumb']
else:
custom_avatar = ''
user_data = users.Users()
if user_id:
try:
user_data.set_user_friendly_name(user_id=user_id,
friendly_name=friendly_name,
do_notify=do_notify,
keep_history=keep_history)
user_data.set_user_profile_url(user_id=user_id,
profile_url=custom_avatar)
status_message = "Successfully updated user."
return status_message
except:
status_message = "Failed to update user."
return status_message
if user:
try:
user_data.set_user_friendly_name(user=user,
friendly_name=friendly_name,
do_notify=do_notify,
keep_history=keep_history)
user_data.set_user_profile_url(user=user,
profile_url=custom_avatar)
status_message = "Successfully updated user."
return status_message
except:
status_message = "Failed to update user."
return status_message
@cherrypy.expose
def get_stream_data(self, row_id=None, user=None, **kwargs):
data_factory = datafactory.DataFactory()
stream_data = data_factory.get_stream_details(row_id)
return serve_template(templatename="stream_data.html", title="Stream Data", data=stream_data, user=user)
@cherrypy.expose
def get_ip_address_details(self, ip_address=None, **kwargs):
import socket
try:
socket.inet_aton(ip_address)
except socket.error:
ip_address = None
return serve_template(templatename="ip_address_modal.html", title="IP Address Details", data=ip_address)
@cherrypy.expose
def get_user_list(self, **kwargs):
user_data = users.Users()
user_list = user_data.get_user_list(kwargs=kwargs)
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(user_list)
@cherrypy.expose
def checkGithub(self):
from plexpy import versioncheck
versioncheck.checkGithub()
raise cherrypy.HTTPRedirect("home")
@cherrypy.expose
def logs(self):
return serve_template(templatename="logs.html", title="Log", lineList=plexpy.LOG_LIST)
@cherrypy.expose
def clearLogs(self):
plexpy.LOG_LIST = []
logger.info("Web logs cleared")
raise cherrypy.HTTPRedirect("logs")
@cherrypy.expose
def toggleVerbose(self):
plexpy.VERBOSE = not plexpy.VERBOSE
logger.initLogger(console=not plexpy.QUIET,
log_dir=plexpy.CONFIG.LOG_DIR, verbose=plexpy.VERBOSE)
logger.info("Verbose toggled, set to %s", plexpy.VERBOSE)
logger.debug("If you read this message, debug logging is available")
raise cherrypy.HTTPRedirect("logs")
@cherrypy.expose
def getLog(self, start=0, length=100, **kwargs):
start = int(start)
length = int(length)
search_value = ""
search_regex = ""
order_column = 0
order_dir = "desc"
if 'order[0][dir]' in kwargs:
order_dir = kwargs.get('order[0][dir]', "desc")
if 'order[0][column]' in kwargs:
order_column = kwargs.get('order[0][column]', "0")
if 'search[value]' in kwargs:
search_value = kwargs.get('search[value]', "")
if 'search[regex]' in kwargs:
search_regex = kwargs.get('search[regex]', "")
filtered = []
if search_value == "":
filtered = plexpy.LOG_LIST[::]
else:
filtered = [row for row in plexpy.LOG_LIST for column in row if search_value.lower() in column.lower()]
sortcolumn = 0
if order_column == '1':
sortcolumn = 2
elif order_column == '2':
sortcolumn = 1
filtered.sort(key=lambda x: x[sortcolumn], reverse=order_dir == "desc")
rows = filtered[start:(start + length)]
rows = [[row[0], row[2], row[1]] for row in rows]
return json.dumps({
'recordsFiltered': len(filtered),
'recordsTotal': len(plexpy.LOG_LIST),
'data': rows,
})
@cherrypy.expose
def get_plex_log(self, window=1000, **kwargs):
log_lines = []
try:
log_lines = {'data': log_reader.get_log_tail(window=window)}
except:
logger.warn("Unable to retrieve Plex Logs.")
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(log_lines)
@cherrypy.expose
def generateAPI(self):
apikey = hashlib.sha224(str(random.getrandbits(256))).hexdigest()[0:32]
logger.info("New API generated")
return apikey
@cherrypy.expose
def settings(self):
interface_dir = os.path.join(plexpy.PROG_DIR, 'data/interfaces/')
interface_list = [name for name in os.listdir(interface_dir) if
os.path.isdir(os.path.join(interface_dir, name))]
# Initialise blank passwords so we do not expose them in the html forms
# but users are still able to clear them
if plexpy.CONFIG.HTTP_PASSWORD != '':
http_password = ' '
else:
http_password = ''
config = {
"http_host": plexpy.CONFIG.HTTP_HOST,
"http_username": plexpy.CONFIG.HTTP_USERNAME,
"http_port": plexpy.CONFIG.HTTP_PORT,
"http_password": http_password,
"launch_browser": checked(plexpy.CONFIG.LAUNCH_BROWSER),
"enable_https": checked(plexpy.CONFIG.ENABLE_HTTPS),
"https_cert": plexpy.CONFIG.HTTPS_CERT,
"https_key": plexpy.CONFIG.HTTPS_KEY,
"api_enabled": checked(plexpy.CONFIG.API_ENABLED),
"api_key": plexpy.CONFIG.API_KEY,
"update_db_interval": plexpy.CONFIG.UPDATE_DB_INTERVAL,
"freeze_db": checked(plexpy.CONFIG.FREEZE_DB),
"log_dir": plexpy.CONFIG.LOG_DIR,
"cache_dir": plexpy.CONFIG.CACHE_DIR,
"check_github": checked(plexpy.CONFIG.CHECK_GITHUB),
"interface_list": interface_list,
"growl_enabled": checked(plexpy.CONFIG.GROWL_ENABLED),
"growl_host": plexpy.CONFIG.GROWL_HOST,
"growl_password": plexpy.CONFIG.GROWL_PASSWORD,
"prowl_enabled": checked(plexpy.CONFIG.PROWL_ENABLED),
"prowl_keys": plexpy.CONFIG.PROWL_KEYS,
"prowl_priority": plexpy.CONFIG.PROWL_PRIORITY,
"xbmc_enabled": checked(plexpy.CONFIG.XBMC_ENABLED),
"xbmc_host": plexpy.CONFIG.XBMC_HOST,
"xbmc_username": plexpy.CONFIG.XBMC_USERNAME,
"xbmc_password": plexpy.CONFIG.XBMC_PASSWORD,
"plex_enabled": checked(plexpy.CONFIG.PLEX_ENABLED),
"plex_client_host": plexpy.CONFIG.PLEX_CLIENT_HOST,
"plex_username": plexpy.CONFIG.PLEX_USERNAME,
"plex_password": plexpy.CONFIG.PLEX_PASSWORD,
"nma_enabled": checked(plexpy.CONFIG.NMA_ENABLED),
"nma_apikey": plexpy.CONFIG.NMA_APIKEY,
"nma_priority": int(plexpy.CONFIG.NMA_PRIORITY),
"pushalot_enabled": checked(plexpy.CONFIG.PUSHALOT_ENABLED),
"pushalot_apikey": plexpy.CONFIG.PUSHALOT_APIKEY,
"pushover_enabled": checked(plexpy.CONFIG.PUSHOVER_ENABLED),
"pushover_keys": plexpy.CONFIG.PUSHOVER_KEYS,
"pushover_apitoken": plexpy.CONFIG.PUSHOVER_APITOKEN,
"pushover_priority": plexpy.CONFIG.PUSHOVER_PRIORITY,
"pushbullet_enabled": checked(plexpy.CONFIG.PUSHBULLET_ENABLED),
"pushbullet_apikey": plexpy.CONFIG.PUSHBULLET_APIKEY,
"pushbullet_deviceid": plexpy.CONFIG.PUSHBULLET_DEVICEID,
"twitter_enabled": checked(plexpy.CONFIG.TWITTER_ENABLED),
"osx_notify_enabled": checked(plexpy.CONFIG.OSX_NOTIFY_ENABLED),
"osx_notify_app": plexpy.CONFIG.OSX_NOTIFY_APP,
"boxcar_enabled": checked(plexpy.CONFIG.BOXCAR_ENABLED),
"boxcar_token": plexpy.CONFIG.BOXCAR_TOKEN,
"cache_sizemb": plexpy.CONFIG.CACHE_SIZEMB,
"email_enabled": checked(plexpy.CONFIG.EMAIL_ENABLED),
"email_from": plexpy.CONFIG.EMAIL_FROM,
"email_to": plexpy.CONFIG.EMAIL_TO,
"email_smtp_server": plexpy.CONFIG.EMAIL_SMTP_SERVER,
"email_smtp_user": plexpy.CONFIG.EMAIL_SMTP_USER,
"email_smtp_password": plexpy.CONFIG.EMAIL_SMTP_PASSWORD,
"email_smtp_port": int(plexpy.CONFIG.EMAIL_SMTP_PORT),
"email_tls": checked(plexpy.CONFIG.EMAIL_TLS),
"pms_identifier": plexpy.CONFIG.PMS_IDENTIFIER,
"pms_ip": plexpy.CONFIG.PMS_IP,
"pms_logs_folder": plexpy.CONFIG.PMS_LOGS_FOLDER,
"pms_port": plexpy.CONFIG.PMS_PORT,
"pms_token": plexpy.CONFIG.PMS_TOKEN,
"pms_ssl": checked(plexpy.CONFIG.PMS_SSL),
"pms_use_bif": checked(plexpy.CONFIG.PMS_USE_BIF),
"pms_uuid": plexpy.CONFIG.PMS_UUID,
"plexwatch_database": plexpy.CONFIG.PLEXWATCH_DATABASE,
"date_format": plexpy.CONFIG.DATE_FORMAT,
"time_format": plexpy.CONFIG.TIME_FORMAT,
"grouping_global_history": checked(plexpy.CONFIG.GROUPING_GLOBAL_HISTORY),
"grouping_user_history": checked(plexpy.CONFIG.GROUPING_USER_HISTORY),
"grouping_charts": checked(plexpy.CONFIG.GROUPING_CHARTS),
"tv_notify_enable": checked(plexpy.CONFIG.TV_NOTIFY_ENABLE),
"movie_notify_enable": checked(plexpy.CONFIG.MOVIE_NOTIFY_ENABLE),
"music_notify_enable": checked(plexpy.CONFIG.MUSIC_NOTIFY_ENABLE),
"tv_notify_on_start": checked(plexpy.CONFIG.TV_NOTIFY_ON_START),
"movie_notify_on_start": checked(plexpy.CONFIG.MOVIE_NOTIFY_ON_START),
"music_notify_on_start": checked(plexpy.CONFIG.MUSIC_NOTIFY_ON_START),
"tv_notify_on_stop": checked(plexpy.CONFIG.TV_NOTIFY_ON_STOP),
"movie_notify_on_stop": checked(plexpy.CONFIG.MOVIE_NOTIFY_ON_STOP),
"music_notify_on_stop": checked(plexpy.CONFIG.MUSIC_NOTIFY_ON_STOP),
"tv_notify_on_pause": checked(plexpy.CONFIG.TV_NOTIFY_ON_PAUSE),
"movie_notify_on_pause": checked(plexpy.CONFIG.MOVIE_NOTIFY_ON_PAUSE),
"music_notify_on_pause": checked(plexpy.CONFIG.MUSIC_NOTIFY_ON_PAUSE),
"monitoring_interval": plexpy.CONFIG.MONITORING_INTERVAL,
"refresh_users_interval": plexpy.CONFIG.REFRESH_USERS_INTERVAL,
"refresh_users_on_startup": checked(plexpy.CONFIG.REFRESH_USERS_ON_STARTUP),
"ip_logging_enable": checked(plexpy.CONFIG.IP_LOGGING_ENABLE),
"video_logging_enable": checked(plexpy.CONFIG.VIDEO_LOGGING_ENABLE),
"music_logging_enable": checked(plexpy.CONFIG.MUSIC_LOGGING_ENABLE),
"logging_ignore_interval": plexpy.CONFIG.LOGGING_IGNORE_INTERVAL,
"pms_is_remote": checked(plexpy.CONFIG.PMS_IS_REMOTE),
"notify_watched_percent": plexpy.CONFIG.NOTIFY_WATCHED_PERCENT,
"notify_on_start_subject_text": plexpy.CONFIG.NOTIFY_ON_START_SUBJECT_TEXT,
"notify_on_start_body_text": plexpy.CONFIG.NOTIFY_ON_START_BODY_TEXT,
"notify_on_stop_subject_text": plexpy.CONFIG.NOTIFY_ON_STOP_SUBJECT_TEXT,
"notify_on_stop_body_text": plexpy.CONFIG.NOTIFY_ON_STOP_BODY_TEXT,
"notify_on_pause_subject_text": plexpy.CONFIG.NOTIFY_ON_PAUSE_SUBJECT_TEXT,
"notify_on_pause_body_text": plexpy.CONFIG.NOTIFY_ON_PAUSE_BODY_TEXT,
"notify_on_resume_subject_text": plexpy.CONFIG.NOTIFY_ON_RESUME_SUBJECT_TEXT,
"notify_on_resume_body_text": plexpy.CONFIG.NOTIFY_ON_RESUME_BODY_TEXT,
"notify_on_buffer_subject_text": plexpy.CONFIG.NOTIFY_ON_BUFFER_SUBJECT_TEXT,
"notify_on_buffer_body_text": plexpy.CONFIG.NOTIFY_ON_BUFFER_BODY_TEXT,
"notify_on_watched_subject_text": plexpy.CONFIG.NOTIFY_ON_WATCHED_SUBJECT_TEXT,
"notify_on_watched_body_text": plexpy.CONFIG.NOTIFY_ON_WATCHED_BODY_TEXT,
"home_stats_length": plexpy.CONFIG.HOME_STATS_LENGTH,
"home_stats_type": checked(plexpy.CONFIG.HOME_STATS_TYPE),
"home_stats_count": plexpy.CONFIG.HOME_STATS_COUNT,
"buffer_threshold": plexpy.CONFIG.BUFFER_THRESHOLD,
"buffer_wait": plexpy.CONFIG.BUFFER_WAIT
}
return serve_template(templatename="settings.html", title="Settings", config=config)
@cherrypy.expose
def configUpdate(self, **kwargs):
# Handle the variable config options. Note - keys with False values aren't getting passed
checked_configs = [
"launch_browser", "enable_https", "api_enabled", "freeze_db", "growl_enabled",
"prowl_enabled", "xbmc_enabled", "check_github",
"plex_enabled", "nma_enabled", "pushalot_enabled",
"pushover_enabled", "pushbullet_enabled",
"twitter_enabled", "osx_notify_enabled",
"boxcar_enabled", "email_enabled", "email_tls",
"grouping_global_history", "grouping_user_history", "grouping_charts", "pms_use_bif", "pms_ssl",
"tv_notify_enable", "movie_notify_enable", "music_notify_enable",
"tv_notify_on_start", "movie_notify_on_start", "music_notify_on_start",
"tv_notify_on_stop", "movie_notify_on_stop", "music_notify_on_stop",
"tv_notify_on_pause", "movie_notify_on_pause", "music_notify_on_pause", "refresh_users_on_startup",
"ip_logging_enable", "video_logging_enable", "music_logging_enable", "pms_is_remote", "home_stats_type"
]
for checked_config in checked_configs:
if checked_config not in kwargs:
# checked items should be zero or one. if they were not sent then the item was not checked
kwargs[checked_config] = 0
# If http password exists in config, do not overwrite when blank value received
if 'http_password' in kwargs:
if kwargs['http_password'] == ' ' and plexpy.CONFIG.HTTP_PASSWORD != '':
kwargs['http_password'] = plexpy.CONFIG.HTTP_PASSWORD
for plain_config, use_config in [(x[4:], x) for x in kwargs if x.startswith('use_')]:
# the use prefix is fairly nice in the html, but does not match the actual config
kwargs[plain_config] = kwargs[use_config]
del kwargs[use_config]
# Check if we should refresh our data
refresh_users = False
reschedule = False
if 'monitoring_interval' in kwargs and 'refresh_users_interval' in kwargs:
if (kwargs['monitoring_interval'] != str(plexpy.CONFIG.MONITORING_INTERVAL)) or \
(kwargs['refresh_users_interval'] != str(plexpy.CONFIG.REFRESH_USERS_INTERVAL)):
reschedule = True
if 'pms_ip' in kwargs:
if kwargs['pms_ip'] != plexpy.CONFIG.PMS_IP:
refresh_users = True
plexpy.CONFIG.process_kwargs(kwargs)
# Write the config
plexpy.CONFIG.write()
# Get new server URLs for SSL communications.
plextv.get_real_pms_url()
# Reconfigure scheduler if intervals changed
if reschedule:
plexpy.initialize_scheduler()
# Refresh users table if our server IP changes.
if refresh_users:
threading.Thread(target=plextv.refresh_users).start()
raise cherrypy.HTTPRedirect("settings")
@cherrypy.expose
def set_notification_config(self, **kwargs):
# Handle the variable config options. Note - keys with False values aren't getting passed
checked_configs = [
"email_tls"
]
for checked_config in checked_configs:
if checked_config not in kwargs:
# checked items should be zero or one. if they were not sent then the item was not checked
kwargs[checked_config] = 0
for plain_config, use_config in [(x[4:], x) for x in kwargs if x.startswith('use_')]:
# the use prefix is fairly nice in the html, but does not match the actual config
kwargs[plain_config] = kwargs[use_config]
del kwargs[use_config]
plexpy.CONFIG.process_kwargs(kwargs)
# Write the config
plexpy.CONFIG.write()
cherrypy.response.status = 200
@cherrypy.expose
def do_state_change(self, signal, title, timer):
message = title
quote = self.random_arnold_quotes()
plexpy.SIGNAL = signal
return serve_template(templatename="shutdown.html", title=title,
message=message, timer=timer, quote=quote)
@cherrypy.expose
def get_history(self, user=None, user_id=None, **kwargs):
custom_where=[]
if user_id:
custom_where = [['user_id', user_id]]
elif user:
custom_where = [['user', user]]
if 'rating_key' in kwargs:
rating_key = kwargs.get('rating_key', "")
custom_where = [['rating_key', rating_key]]
if 'parent_rating_key' in kwargs:
rating_key = kwargs.get('parent_rating_key', "")
custom_where = [['parent_rating_key', rating_key]]
if 'grandparent_rating_key' in kwargs:
rating_key = kwargs.get('grandparent_rating_key', "")
custom_where = [['grandparent_rating_key', rating_key]]
if 'start_date' in kwargs:
start_date = kwargs.get('start_date', "")
custom_where = [['strftime("%Y-%m-%d", datetime(date, "unixepoch", "localtime"))', start_date]]
data_factory = datafactory.DataFactory()
history = data_factory.get_history(kwargs=kwargs, custom_where=custom_where)
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(history)
@cherrypy.expose
def history_table_modal(self, start_date=None, **kwargs):
return serve_template(templatename="history_table_modal.html", title="History Data", data=start_date)
@cherrypy.expose
def shutdown(self):
return self.do_state_change('shutdown', 'Shutting Down', 15)
@cherrypy.expose
def restart(self):
return self.do_state_change('restart', 'Restarting', 30)
@cherrypy.expose
def update(self):
return self.do_state_change('update', 'Updating', 120)
@cherrypy.expose
def api(self, *args, **kwargs):
from plexpy.api import Api
a = Api()
a.checkParams(*args, **kwargs)
return a.fetchData()
@cherrypy.expose
def twitterStep1(self):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
tweet = notifiers.TwitterNotifier()
return tweet._get_authorization()
@cherrypy.expose
def twitterStep2(self, key):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
tweet = notifiers.TwitterNotifier()
result = tweet._get_credentials(key)
logger.info(u"result: " + str(result))
if result:
return "Key verification successful"
else:
return "Unable to verify key"
@cherrypy.expose
def testTwitter(self):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
tweet = notifiers.TwitterNotifier()
result = tweet.test_notify()
if result:
return "Tweet successful, check your twitter to make sure it worked"
else:
return "Error sending tweet"
@cherrypy.expose
def osxnotifyregister(self, app):
cherrypy.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
from osxnotify import registerapp as osxnotify
result, msg = osxnotify.registerapp(app)
if result:
osx_notify = notifiers.OSX_NOTIFY()
osx_notify.notify('Registered', result, 'Success :-)')
logger.info('Registered %s, to re-register a different app, delete this app first' % result)
else:
logger.warn(msg)
return msg
@cherrypy.expose
def get_pms_token(self):
token = plextv.PlexTV()
result = token.get_token()
if result:
return result
else:
logger.warn('Unable to retrieve Plex.tv token.')
return False
@cherrypy.expose
def get_pms_sessions_json(self, **kwargs):
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_sessions('json')
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return result
else:
logger.warn('Unable to retrieve data.')
return False
@cherrypy.expose
def get_current_activity(self, **kwargs):
try:
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_current_activity()
except:
return serve_template(templatename="current_activity.html", data=None)
if result:
return serve_template(templatename="current_activity.html", data=result)
else:
logger.warn('Unable to retrieve data.')
return serve_template(templatename="current_activity.html", data=None)
@cherrypy.expose
def get_current_activity_header(self, **kwargs):
try:
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_current_activity()
except IOError, e:
return serve_template(templatename="current_activity_header.html", data=None)
if result:
return serve_template(templatename="current_activity_header.html", data=result['stream_count'])
else:
logger.warn('Unable to retrieve data.')
return serve_template(templatename="current_activity_header.html", data=None)
@cherrypy.expose
def get_recently_added(self, count='0', **kwargs):
try:
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_recently_added_details(count)
except IOError, e:
return serve_template(templatename="recently_added.html", data=None)
if result:
return serve_template(templatename="recently_added.html", data=result['recently_added'])
else:
logger.warn('Unable to retrieve data.')
return serve_template(templatename="recently_added.html", data=None)
@cherrypy.expose
def pms_image_proxy(self, img='', width='0', height='0', fallback=None, **kwargs):
try:
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_image(img, width, height)
cherrypy.response.headers['Content-type'] = result[1]
return result[0]
except:
logger.warn('Image proxy queried but errors occured.')
if fallback == 'poster':
logger.info('Trying fallback image...')
try:
fallback_image = open(self.interface_dir + common.DEFAULT_POSTER_THUMB, 'rb')
cherrypy.response.headers['Content-type'] = 'image/png'
return fallback_image
except IOError, e:
logger.error('Unable to read fallback image. %s' % e)
elif fallback == 'cover':
logger.info('Trying fallback image...')
try:
fallback_image = open(self.interface_dir + common.DEFAULT_COVER_THUMB, 'rb')
cherrypy.response.headers['Content-type'] = 'image/png'
return fallback_image
except IOError, e:
logger.error('Unable to read fallback image. %s' % e)
return None
@cherrypy.expose
def info(self, item_id=None, source=None, **kwargs):
metadata = None
config = {
"pms_identifier": plexpy.CONFIG.PMS_IDENTIFIER
}
if source == 'history':
data_factory = datafactory.DataFactory()
metadata = data_factory.get_metadata_details(row_id=item_id)
else:
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_metadata_details(rating_key=item_id)
if result:
metadata = result['metadata']
if metadata:
return serve_template(templatename="info.html", data=metadata, title="Info", config=config)
else:
logger.warn('Unable to retrieve data.')
return serve_template(templatename="info.html", data=None, title="Info")
@cherrypy.expose
def get_user_recently_watched(self, user=None, user_id=None, limit='10', **kwargs):
data_factory = datafactory.DataFactory()
result = data_factory.get_recently_watched(user_id=user_id, user=user, limit=limit)
if result:
return serve_template(templatename="user_recently_watched.html", data=result,
title="Recently Watched")
else:
logger.warn('Unable to retrieve data.')
return serve_template(templatename="user_recently_watched.html", data=None,
title="Recently Watched")
@cherrypy.expose
def get_user_watch_time_stats(self, user=None, user_id=None, **kwargs):
user_data = users.Users()
result = user_data.get_user_watch_time_stats(user_id=user_id, user=user)
if result:
return serve_template(templatename="user_watch_time_stats.html", data=result, title="Watch Stats")
else:
logger.warn('Unable to retrieve data.')
return serve_template(templatename="user_watch_time_stats.html", data=None, title="Watch Stats")
@cherrypy.expose
def get_user_platform_stats(self, user=None, user_id=None, **kwargs):
user_data = users.Users()
result = user_data.get_user_platform_stats(user_id=user_id, user=user)
if result:
return serve_template(templatename="user_platform_stats.html", data=result,
title="Platform Stats")
else:
logger.warn('Unable to retrieve data.')
return serve_template(templatename="user_platform_stats.html", data=None, title="Platform Stats")
@cherrypy.expose
def get_item_children(self, rating_key='', **kwargs):
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_item_children(rating_key)
if result:
return serve_template(templatename="info_children_list.html", data=result, title="Children List")
else:
logger.warn('Unable to retrieve data.')
return serve_template(templatename="info_children_list.html", data=None, title="Children List")
@cherrypy.expose
def get_metadata_json(self, rating_key='', **kwargs):
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_metadata(rating_key, 'json')
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return result
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_metadata_xml(self, rating_key='', **kwargs):
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_metadata(rating_key)
if result:
cherrypy.response.headers['Content-type'] = 'application/xml'
return result
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_recently_added_json(self, count='0', **kwargs):
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_recently_added(count, 'json')
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return result
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_episode_list_json(self, rating_key='', **kwargs):
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_episode_list(rating_key, 'json')
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return result
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_user_ips(self, user_id=None, user=None, **kwargs):
custom_where=[]
if user_id:
custom_where = [['user_id', user_id]]
elif user:
custom_where = [['user', user]]
user_data = users.Users()
history = user_data.get_user_unique_ips(kwargs=kwargs,
custom_where=custom_where)
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(history)
@cherrypy.expose
def get_plays_by_date(self, time_range='30', y_axis='plays', **kwargs):
graph = graphs.Graphs()
result = graph.get_total_plays_per_day(time_range=time_range, y_axis=y_axis)
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(result)
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_plays_by_dayofweek(self, time_range='30', y_axis='plays', **kwargs):
graph = graphs.Graphs()
result = graph.get_total_plays_per_dayofweek(time_range=time_range, y_axis=y_axis)
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(result)
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_plays_by_hourofday(self, time_range='30', y_axis='plays', **kwargs):
graph = graphs.Graphs()
result = graph.get_total_plays_per_hourofday(time_range=time_range, y_axis=y_axis)
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(result)
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_plays_per_month(self, y_axis='plays', **kwargs):
graph = graphs.Graphs()
result = graph.get_total_plays_per_month(y_axis=y_axis)
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(result)
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_plays_by_top_10_platforms(self, time_range='30', y_axis='plays', **kwargs):
graph = graphs.Graphs()
result = graph.get_total_plays_by_top_10_platforms(time_range=time_range, y_axis=y_axis)
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(result)
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_plays_by_top_10_users(self, time_range='30', y_axis='plays', **kwargs):
graph = graphs.Graphs()
result = graph.get_total_plays_by_top_10_users(time_range=time_range, y_axis=y_axis)
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(result)
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_plays_by_stream_type(self, time_range='30', y_axis='plays', **kwargs):
graph = graphs.Graphs()
result = graph.get_total_plays_per_stream_type(time_range=time_range, y_axis=y_axis)
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(result)
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_plays_by_source_resolution(self, time_range='30', y_axis='plays', **kwargs):
graph = graphs.Graphs()
result = graph.get_total_plays_by_source_resolution(time_range=time_range, y_axis=y_axis)
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(result)
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_plays_by_stream_resolution(self, time_range='30', y_axis='plays', **kwargs):
graph = graphs.Graphs()
result = graph.get_total_plays_by_stream_resolution(time_range=time_range, y_axis=y_axis)
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(result)
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_stream_type_by_top_10_users(self, time_range='30', y_axis='plays', **kwargs):
graph = graphs.Graphs()
result = graph.get_stream_type_by_top_10_users(time_range=time_range, y_axis=y_axis)
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(result)
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_stream_type_by_top_10_platforms(self, time_range='30', y_axis='plays', **kwargs):
graph = graphs.Graphs()
result = graph.get_stream_type_by_top_10_platforms(time_range=time_range, y_axis=y_axis)
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(result)
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_friends_list(self, **kwargs):
plex_tv = plextv.PlexTV()
result = plex_tv.get_plextv_friends('json')
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return result
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_user_details(self, **kwargs):
plex_tv = plextv.PlexTV()
result = plex_tv.get_plextv_user_details('json')
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return result
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_server_list(self, **kwargs):
plex_tv = plextv.PlexTV()
result = plex_tv.get_plextv_server_list('json')
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return result
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_sync_lists(self, machine_id='', **kwargs):
plex_tv = plextv.PlexTV()
result = plex_tv.get_plextv_sync_lists(machine_id=machine_id, output_format='json')
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return result
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_servers(self, **kwargs):
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_server_list(output_format='json')
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return result
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_servers_info(self, **kwargs):
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_servers_info()
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(result)
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_server_prefs(self, **kwargs):
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_server_prefs(output_format='json')
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return result
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_activity(self, **kwargs):
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_current_activity()
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(result)
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_full_users_list(self, **kwargs):
plex_tv = plextv.PlexTV()
result = plex_tv.get_full_users_list()
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(result)
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def refresh_users_list(self, **kwargs):
threading.Thread(target=plextv.refresh_users).start()
logger.info('Manual user list refresh requested.')
@cherrypy.expose
def get_sync(self, machine_id=None, user_id=None, **kwargs):
pms_connect = pmsconnect.PmsConnect()
server_id = pms_connect.get_server_identity()
plex_tv = plextv.PlexTV()
if not machine_id:
result = plex_tv.get_synced_items(machine_id=server_id['machine_identifier'], user_id=user_id)
else:
result = plex_tv.get_synced_items(machine_id=machine_id, user_id=user_id)
if result:
output = {"data": result}
else:
logger.warn('Unable to retrieve sync data for user.')
output = {"data": []}
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps(output)
@cherrypy.expose
def get_sync_item(self, sync_id, **kwargs):
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_sync_item(sync_id, output_format='json')
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return result
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_sync_transcode_queue(self, **kwargs):
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_sync_transcode_queue(output_format='json')
if result:
cherrypy.response.headers['Content-type'] = 'application/json'
return result
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_server_pref(self, pref=None, **kwargs):
pms_connect = pmsconnect.PmsConnect()
result = pms_connect.get_server_pref(pref=pref)
if result:
return result
else:
logger.warn('Unable to retrieve data.')
@cherrypy.expose
def get_plexwatch_export_data(self, database_path=None, table_name=None, import_ignore_interval=0, **kwargs):
from plexpy import plexwatch_import
db_check_msg = plexwatch_import.validate_database(database=database_path,
table_name=table_name)
if db_check_msg == 'success':
threading.Thread(target=plexwatch_import.import_from_plexwatch,
kwargs={'database': database_path,
'table_name': table_name,
'import_ignore_interval': import_ignore_interval}).start()
return 'Import has started. Check the PlexPy logs to monitor any problems.'
else:
return db_check_msg
@cherrypy.expose
def plexwatch_import(self, **kwargs):
return serve_template(templatename="plexwatch_import.html", title="Import PlexWatch Database")
@cherrypy.expose
def get_server_id(self, hostname=None, port=None, **kwargs):
from plexpy import http_handler
if hostname and port:
request_handler = http_handler.HTTPHandler(host=hostname,
port=port,
token=None)
uri = '/identity'
request = request_handler.make_request(uri=uri,
proto='http',
request_type='GET',
output_format='',
no_token=True)
if request:
cherrypy.response.headers['Content-type'] = 'application/xml'
return request
else:
logger.warn('Unable to retrieve data.')
return None
else:
return None
@cherrypy.expose
def random_arnold_quotes(self, **kwargs):
from random import randint
quote_list = ['To crush your enemies, see them driven before you, and to hear the lamentation of their women!',
'Your clothes, give them to me, now!',
'Do it!',
'If it bleeds, we can kill it',
'See you at the party Richter!',
'Let off some steam, Bennett',
'I\'ll be back',
'Get to the chopper!',
'Hasta La Vista, Baby!',
'It\'s not a tumor!',
'Dillon, you son of a bitch!',
'Benny!! Screw you!!',
'Stop whining! You kids are soft. You lack discipline.',
'Nice night for a walk.',
'Stick around!',
'I need your clothes, your boots and your motorcycle.',
'No, it\'s not a tumor. It\'s not a tumor!',
'I LIED!',
'See you at the party, Richter!',
'Are you Sarah Conner?',
'I\'m a cop you idiot!',
'Come with me if you want to live.',
'Who is your daddy and what does he do?'
]
random_number = randint(0, len(quote_list) - 1)
return quote_list[int(random_number)]
@cherrypy.expose
def get_notification_agent_config(self, config_id, **kwargs):
config = notifiers.get_notification_agent_config(config_id=config_id)
checkboxes = {'email_tls': checked(plexpy.CONFIG.EMAIL_TLS)}
return serve_template(templatename="notification_config.html", title="Notification Configuration",
data=config, checkboxes=checkboxes)
@cherrypy.expose
def get_notification_agent_triggers(self, config_id, **kwargs):
if config_id.isdigit():
agents = notifiers.available_notification_agents()
for agent in agents:
if int(config_id) == agent['id']:
this_agent = agent
break
else:
this_agent = None
else:
return None
return serve_template(templatename="notification_triggers_modal.html", title="Notification Triggers",
data=this_agent)
@cherrypy.expose
def delete_history_rows(self, row_id, **kwargs):
data_factory = datafactory.DataFactory()
if row_id:
delete_row = data_factory.delete_session_history_rows(row_id=row_id)
if delete_row:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps({'message': delete_row})
else:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps({'message': 'no data received'})
@cherrypy.expose
def delete_all_user_history(self, user_id, **kwargs):
data_factory = datafactory.DataFactory()
if user_id:
delete_row = data_factory.delete_all_user_history(user_id=user_id)
if delete_row:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps({'message': delete_row})
else:
cherrypy.response.headers['Content-type'] = 'application/json'
return json.dumps({'message': 'no data received'})
| gpl-3.0 | -6,099,891,910,855,282,000 | 38.612075 | 123 | 0.596216 | false |
cylussec/gameoflife | life.py | 1 | 1914 | # -*- coding: utf-8 -*-
"""
Created on Thu Jul 24 19:49:39 2014
@author: brian
"""
import socket, time, re
class LifeBoard:
"""
Play the game of life
"""
def __init__(self, board):
"""
board - 2d array of the board where '*' is an alive cell and ' ' is a dead
cell
"""
self.board = board
def iterate(self,count=1):
for i in range(count):
newboard = ["" for x in range(len(self.board))]
for row in range(len(board)):
for col in range(len(board[row])):
neighbors = self._getval(row-1,col-1) + self._getval(row-1,col) + self._getval(row-1,col+1)+ \
self._getval(row,col-1) + self._getval(row,col+1)+ \
self._getval(row+1,col-1) + self._getval(row+1,col) + self._getval(row+1,col+1)
if neighbors < 2 or neighbors > 3:
newboard[row] += ' '
elif neighbors == 3 or (neighbors == 2 and self._getval(row, col) == 1):
newboard[row] += '*'
else:
newboard[row] += ' '
self.board = newboard
def _getval(self, row,col):
if row < 0 or col < 0:
return 0
try:
return 1 if self.board[row][col] == '*' else 0
except IndexError:
return 0
host = '127.0.0.1'
port = int(raw_input("enter port"))
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((host, port))
data = s.recv(4096)
data = data[102:]
try:
while(True):
regex = re.compile("(\d*) Generations")
r = regex.search(data)
board = [i[1:-1] for i in data.split('\n')[3:-2]]
lb = LifeBoard(board)
lb.iterate(count=int(r.groups(0)[0]))
res_board = lb.board
res_board.insert(0, '#'*(len(board[0])+1))
res_board.append('#'*(len(board[0])+1)+'\n')
print('#\n#'.join(res_board))
s.send('#\n#'.join(res_board))
data = s.recv(4096)
except:
print(data) | gpl-2.0 | -8,025,376,240,631,603,000 | 25.597222 | 104 | 0.535528 | false |
renatoGarcia/tangram | tangram/widget/buttons.py | 1 | 6856 | # Copyright 2017-2018 The Tangram Developers. See the AUTHORS file at the
# top-level directory of this distribution and at
# https://github.com/renatoGarcia/tangram/blob/master/AUTHORS.
#
# This file is part of Tangram.
#
# Tangram is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tangram is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Tangram in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from typing import Optional, Union, Callable
from gi.repository import Gtk
from .widgets import Widget, Position, Label, Icon
from .._gui_thread import on_gtk_thread
from ..signal import Signal
class Button(Widget):
"""
A button.
Signals:
:clicked(): Emitted when the button is clicked (pressed and released).
"""
@on_gtk_thread
def __init__(self,
label: Optional[Union[str, Label]] = None,
*,
icon: Optional[Icon] = None,
icon_position: Position = Position.LEFT,
_gtk_obj: Optional[Gtk.Widget] = None,
**kwargs) -> None:
self.clicked: Signal[Callable[[], Any]] = Signal()
if _gtk_obj is None:
_gtk_obj = Gtk.Button()
super().__init__(_gtk_obj, **kwargs)
self._gtk_obj.set_focus_on_click(False)
if isinstance(label, str):
label = Label(label, use_underline=True)
self.__label: Optional[Label] = label
self.__icon: Optional[Icon] = icon
self.__icon_position: Position = icon_position
self._grid: Gtk.Grid = Gtk.Grid()
self._grid.set_row_spacing(2)
self._grid.set_column_spacing(2)
self._grid.set_halign(Gtk.Align.CENTER)
self._grid.set_valign(Gtk.Align.CENTER)
if icon_position == Position.LEFT:
self._icon_pos = (0, 0)
self._label_pos = (1, 0)
elif icon_position == Position.RIGHT:
self._icon_pos = (1, 0)
self._label_pos = (0, 0)
elif icon_position == Position.TOP:
self._icon_pos = (0, 0)
self._label_pos = (0, 1)
else: # icon_position == Position.BOTTOM
self._icon_pos = (0, 1)
self._label_pos = (0, 0)
if icon is not None:
self._grid.attach(icon._gtk_obj, *self._icon_pos, 1, 1)
self._add_child_tan(icon)
if label is not None:
self._grid.attach(label._gtk_obj, *self._label_pos, 1, 1)
self._add_child_tan(label)
self._gtk_obj.add(self._grid)
self._gtk_obj.connect('clicked', self.__on_clicked)
def __on_clicked(self, widget: Gtk.Button) -> None:
self.clicked.emit()
def label():
doc = """Button text label.
"""
@on_gtk_thread
def fget(self) -> Optional[Label]:
return self.__label
@on_gtk_thread
def fset(self, value: Optional[Label]) -> None:
if not isinstance(value, (Label, type(None))):
raise ValueError("Button.label attribute must be a Label instance or None.")
del self.__label
if value is not None:
self._grid.attach(value._gtk_obj, *self._label_pos, 1, 1)
self._add_child_tan(value)
self.__label = value
@on_gtk_thread
def fdel(self) -> None:
if self.__label is not None:
self._grid.remove(self.__label._gtk_obj)
self._remove_child_tan(self.__label)
self.__label = None
return locals()
label = property(**label())
def icon():
doc = """The button icon."""
@on_gtk_thread
def fget(self) -> Optional[Icon]:
# Running on gtk thread to avoid a race condition with fset
return self.__icon
@on_gtk_thread
def fset(self, value: Optional[Icon]) -> None:
if not isinstance(value, (Icon, type(None))):
raise ValueError("Button.icon attribute must be a Icon instance or None.")
del self.__icon
if value is not None:
self._grid.attach(value._gtk_obj, *self._icon_pos, 1, 1)
self._add_child_tan(value)
self.__icon = value
self.__icon = value
self._gtk_obj.set_image(value._gtk_obj)
@on_gtk_thread
def fdel(self) -> None:
if self.__icon is not None:
self._grid.remove(self.__icon._gtk_obj)
self._remove_child_tan(self.__icon)
self.__icon = None
return locals()
icon = property(**icon())
class ToggleButton(Button):
@on_gtk_thread
def __init__(self,
*,
is_active: bool = False,
is_inconsistent: bool = False,
_gtk_obj: Optional[Gtk.ToggleButton] = None,
**kwargs):
self.toggled: Signal[Callable[[bool], Any]] = Signal()
if _gtk_obj is None:
_gtk_obj = Gtk.ToggleButton()
super().__init__(_gtk_obj=_gtk_obj, **kwargs)
self._gtk_obj.set_active(is_active)
self._gtk_obj.set_inconsistent(is_inconsistent)
self._grid.set_halign(Gtk.Align.START)
self._gtk_obj.connect('toggled', self.__on_toggled)
def is_active():
doc = """Status of the button, if True the button is checked."""
@on_gtk_thread
def fget(self) -> bool:
return self._gtk_obj.get_active()
@on_gtk_thread
def fset(self, value: bool) -> None:
self._gtk_obj.set_active(value)
return locals()
is_active = property(**is_active())
def is_inconsistent():
doc = """Status of the button, if True the button is checked."""
@on_gtk_thread
def fget(self) -> bool:
return self._gtk_obj.get_inconsistent()
@on_gtk_thread
def fset(self, value: bool) -> None:
self._gtk_obj.set_inconsistent(value)
return locals()
is_inconsistent = property(**is_inconsistent())
def __on_toggled(self, widget: Gtk.ToggleButton) -> None:
state = widget.get_active()
self.toggled.emit(state)
class CheckButton(ToggleButton):
@on_gtk_thread
def __init__(self, **kwargs):
super().__init__(_gtk_obj=Gtk.CheckButton(), **kwargs)
| lgpl-3.0 | 320,085,339,060,968,200 | 32.120773 | 92 | 0.566803 | false |
bloc11/authsys-brain | tests/test_debit_orders.py | 1 | 3108 |
from datetime import datetime, timedelta
from sqlalchemy import select
import py
from debit_orders import DebitOrderError, add_two_days, list_pending_transactions, _tstamp
from authsys_common.model import meta, pending_transactions, members
from authsys_common.scripts import create_db
def populate_test_data():
eng = create_db('sqlite:///:memory:')
meta.reflect(bind=eng)
con = eng.connect()
con.execute(members.insert().values({
'id': 0, 'name': 'Foo Bar', 'id_number': '1234', 'email': 'email',
'account_holder_name': 'Foo Bar',
'spam_consent': False, 'phone': 'phone', 'emergency_phone': 'phone',
'show_up_reason': 'unknown'}))
con.execute(members.insert().values({
'id': 1, 'name': 'Bar Baz', 'account_holder_name': 'Bar Baz Dad', 'id_number': '4321', 'email': 'email',
'spam_consent': False, 'phone': 'phone', 'emergency_phone': 'phone',
'show_up_reason': 'unknown'}))
return con
def test_two_days():
py.test.raises(DebitOrderError, add_two_days, datetime(2023, 1, 1))
assert add_two_days(datetime(2021, 5, 12)) == datetime(2021, 5, 14)
assert add_two_days(datetime(2021, 5, 13)) == datetime(2021, 5, 17)
assert add_two_days(datetime(2021, 5, 13, 14, 00)) == datetime(2021, 5, 18)
assert add_two_days(datetime(2021, 9, 22)) == datetime(2021, 9, 27)
assert add_two_days(datetime(2021, 12, 23)) == datetime(2021, 12, 28)
assert add_two_days(datetime(2021, 12, 23, 14)) == datetime(2021, 12, 29)
assert add_two_days(datetime(2021, 12, 24)) == datetime(2021, 12, 29)
assert add_two_days(datetime(2021, 12, 24, 14)) == datetime(2021, 12, 30)
assert add_two_days(datetime(2021, 12, 25)) == datetime(2021, 12, 29)
assert add_two_days(datetime(2021, 12, 25, 14)) == datetime(2021, 12, 30)
assert add_two_days(datetime(2021, 12, 26)) == datetime(2021, 12, 29)
assert add_two_days(datetime(2021, 12, 26, 14)) == datetime(2021, 12, 30)
def test_list_pending_transactions():
con = populate_test_data()
t0 = datetime(2021, 5, 5) # Wed
assert list_pending_transactions(con, now=t0) == []
def _add_pending(items):
for member_id, t in items:
con.execute(pending_transactions.insert().values(
{'member_id': member_id, 'timestamp': _tstamp(t)}))
_add_pending([(0, t0), (1, t0 + timedelta(days=1))])
assert [x['account_holder_name'] for x in list_pending_transactions(con, now=t0)] == ['Foo Bar', 'Bar Baz Dad']
con.execute(pending_transactions.delete())
t0 = datetime(2021, 5, 3) # Mon
t1 = datetime(2021, 5, 5)
_add_pending([(0, t1), (1, t1 + timedelta(days=1))])
assert [x['account_holder_name'] for x in list_pending_transactions(con, now=t0)] == ['Foo Bar']
con.execute(pending_transactions.delete())
t0 = datetime(2021, 5, 7) # Fri
t1 = datetime(2021, 5, 10)
_add_pending([(0, t1), (1, t1 + timedelta(days=1))])
assert [x['account_holder_name'] for x in list_pending_transactions(con, now=t0)] == ['Foo Bar', 'Bar Baz Dad']
con.execute(pending_transactions.delete())
| mit | 2,692,458,703,641,903,600 | 45.38806 | 115 | 0.634492 | false |
mixkorshun/django-safe-filefield | safe_filefield/clamav.py | 1 | 1084 | from urllib.parse import urlparse
import clamd
from django.conf import settings
from django.utils.functional import SimpleLazyObject
from safe_filefield import default_settings
def get_scanner(socket, timeout=None):
if socket.startswith('unix://'):
return clamd.ClamdUnixSocket(socket[7:], timeout)
elif socket.startswith('tcp://'):
uri = urlparse(socket)
return clamd.ClamdNetworkSocket(
uri.hostname, uri.port or 3310, timeout
)
else:
raise NotImplementedError(
'Missed or unsupported ClamAV connection string schema. '
'Only tcp:// or unix:// is allowed.'
)
def _get_default_scanner():
return get_scanner(
getattr(settings, 'CLAMAV_SOCKET', default_settings.CLAMAV_SOCKET),
getattr(settings, 'CLAMAV_TIMEOUT', default_settings.CLAMAV_TIMEOUT),
)
scanner = SimpleLazyObject(_get_default_scanner)
def scan_file(f):
_pos = f.tell()
f.seek(0)
status, virus_name = scanner.instream(f)['stream']
f.seek(_pos)
return status, virus_name
| mit | 6,824,187,474,666,307,000 | 24.809524 | 77 | 0.664207 | false |
tijko/Project-Euler | py_solutions_11-20/Euler_13.py | 1 | 6171 | # work out the first ten digits of the sum of the 100, 50 digit numbers?
from __future__ import print_function
import timeit
start = timeit.default_timer()
n = """37107287533902102798797998220837590246510135740250
46376937677490009712648124896970078050417018260538
74324986199524741059474233309513058123726617309629
91942213363574161572522430563301811072406154908250
23067588207539346171171980310421047513778063246676
89261670696623633820136378418383684178734361726757
28112879812849979408065481931592621691275889832738
44274228917432520321923589422876796487670272189318
47451445736001306439091167216856844588711603153276
70386486105843025439939619828917593665686757934951
62176457141856560629502157223196586755079324193331
64906352462741904929101432445813822663347944758178
92575867718337217661963751590579239728245598838407
58203565325359399008402633568948830189458628227828
80181199384826282014278194139940567587151170094390
35398664372827112653829987240784473053190104293586
86515506006295864861532075273371959191420517255829
71693888707715466499115593487603532921714970056938
54370070576826684624621495650076471787294438377604
53282654108756828443191190634694037855217779295145
36123272525000296071075082563815656710885258350721
45876576172410976447339110607218265236877223636045
17423706905851860660448207621209813287860733969412
81142660418086830619328460811191061556940512689692
51934325451728388641918047049293215058642563049483
62467221648435076201727918039944693004732956340691
15732444386908125794514089057706229429197107928209
55037687525678773091862540744969844508330393682126
18336384825330154686196124348767681297534375946515
80386287592878490201521685554828717201219257766954
78182833757993103614740356856449095527097864797581
16726320100436897842553539920931837441497806860984
48403098129077791799088218795327364475675590848030
87086987551392711854517078544161852424320693150332
59959406895756536782107074926966537676326235447210
69793950679652694742597709739166693763042633987085
41052684708299085211399427365734116182760315001271
65378607361501080857009149939512557028198746004375
35829035317434717326932123578154982629742552737307
94953759765105305946966067683156574377167401875275
88902802571733229619176668713819931811048770190271
25267680276078003013678680992525463401061632866526
36270218540497705585629946580636237993140746255962
24074486908231174977792365466257246923322810917141
91430288197103288597806669760892938638285025333403
34413065578016127815921815005561868836468420090470
23053081172816430487623791969842487255036638784583
11487696932154902810424020138335124462181441773470
63783299490636259666498587618221225225512486764533
67720186971698544312419572409913959008952310058822
95548255300263520781532296796249481641953868218774
76085327132285723110424803456124867697064507995236
37774242535411291684276865538926205024910326572967
23701913275725675285653248258265463092207058596522
29798860272258331913126375147341994889534765745501
18495701454879288984856827726077713721403798879715
38298203783031473527721580348144513491373226651381
34829543829199918180278916522431027392251122869539
40957953066405232632538044100059654939159879593635
29746152185502371307642255121183693803580388584903
41698116222072977186158236678424689157993532961922
62467957194401269043877107275048102390895523597457
23189706772547915061505504953922979530901129967519
86188088225875314529584099251203829009407770775672
11306739708304724483816533873502340845647058077308
82959174767140363198008187129011875491310547126581
97623331044818386269515456334926366572897563400500
42846280183517070527831839425882145521227251250327
55121603546981200581762165212827652751691296897789
32238195734329339946437501907836945765883352399886
75506164965184775180738168837861091527357929701337
62177842752192623401942399639168044983993173312731
32924185707147349566916674687634660915035914677504
99518671430235219628894890102423325116913619626622
73267460800591547471830798392868535206946944540724
76841822524674417161514036427982273348055556214818
97142617910342598647204516893989422179826088076852
87783646182799346313767754307809363333018982642090
10848802521674670883215120185883543223812876952786
71329612474782464538636993009049310363619763878039
62184073572399794223406235393808339651327408011116
66627891981488087797941876876144230030984490851411
60661826293682836764744779239180335110989069790714
85786944089552990653640447425576083659976645795096
66024396409905389607120198219976047599490197230297
64913982680032973156037120041377903785566085089252
16730939319872750275468906903707539413042652315011
94809377245048795150954100921645863754710598436791
78639167021187492431995700641917969777599028300699
15368713711936614952811305876380278410754449733078
40789923115535562561142322423255033685442488917353
44889911501440648020369068063960672322193204149535
41503128880339536053299340368006977710650566631954
81234880673210146739058568557934581403627822703280
82616570773948327592232845941706525094512325230608
22918802058777319719839450180888072429661980811197
77158542502016545090413245809786882778948721859617
72107838435069186155435662884062257473692284509516
20849603980134001723930671666823555245252804609722
53503534226472524250874054075591789781264330331690"""
def euler_13(data):
total = sum(map(int, data.split()))
return str(total)[:10]
print("Answer: {}".format(euler_13(n)))
stop = timeit.default_timer()
print("Time: {0:9.5f}".format(stop - start))
| mit | 8,113,004,038,961,572,000 | 50.857143 | 73 | 0.850267 | false |
melver/bibmanage | lib/python/bibman/bibfetch/frontend.py | 1 | 2186 | # Copyright (c) 2012-2016, Marco Elver <me AT marcoelver.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Bibfetch frontent, calling all backends until one succeeds.
"""
import subprocess
import os
import re
import logging
import pprint
class Frontend:
def __init__(self, args):
self.backends = []
for backend in args.fetch_prio_list:
self.backends.append(getattr(__import__("bibman.bibfetch.{}".format(backend),
fromlist=["remote_fetch"]), "remote_fetch"))
def extract_fileinfo(self, kwargs):
if "filename" in kwargs:
if kwargs["filename"].split(".")[-1].lower() == "pdf":
pdftext = subprocess.Popen(["pdftotext", "-q",
os.path.expanduser(kwargs["filename"]), "-"],
stdout=subprocess.PIPE).communicate()[0]
pdftext = re.sub(b'\W', b' ', pdftext).decode()
words = pdftext.strip().split()[:20]
kwargs["textsearch"] = " ".join(words)
return kwargs
def __call__(self, **kwargs):
"""
@return Dictionary with keys as in format modules (see format.bibtex
for an example). Missing entries will be added automatically.
"""
kwargs = self.extract_fileinfo(kwargs)
if logging.getLogger().isEnabledFor(logging.DEBUG):
pp = pprint.PrettyPrinter(indent=4)
logging.debug("(bibfetch/frontend:Frontend) __call__::kwargs =\n{}".format(
pp.pformat(kwargs)))
for backend in self.backends:
result = backend(**kwargs)
if result is not None:
return result
| apache-2.0 | -8,679,017,488,649,756,000 | 34.258065 | 89 | 0.616651 | false |
HRODEV/Frequency | Frequency/Menu/InGameMenu/SaveGameButton.py | 1 | 1252 | import pickle
import pygame
from pygame.surface import Surface
from Menu.StartMenu.StartMenuItems.StartMenuItem import StartMenuItem
from Vector2 import Vector2
class SaveGameButton(StartMenuItem):
def __init__(self, offset: Vector2, image: Surface = None, hover: Surface = None, rect=None, newState=None):
image = image if image is not None else pygame.image.load('images/buttons/saveButton.png').convert_alpha()
hover = hover if hover is not None else pygame.image.load('images/buttons/saveButtonHover.png').convert_alpha()
super().__init__(offset, image, hover, rect)
self._newState = newState
def Update(self, game):
if self.IsClickedByMouse(game):
from Board.Board import Board
self._newState = Board(game)
from datetime import datetime
time = str(datetime.now().strftime('%Y-%m-%d-%H-%M'))
with open("./savegames/%s.frgame" % time, "wb") as f:
pickle.dump(game.Logic, f)
nself = super().Update(game)
return SaveGameButton(nself.Offset, nself.Image, nself.Hover, nself.Rect, self._newState)
def Draw(self, game):
super().Draw(game)
def GetNewState(self):
return self._newState
| mit | -4,127,266,724,686,686,000 | 39.387097 | 119 | 0.658147 | false |
Juvawa/bib2web | bib2web/bibtexparser/tests/test_bwriter.py | 1 | 2564 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Author: Francois Boulogne
# License:
from __future__ import unicode_literals
import unittest
import sys
from bibtexparser.bparser import BibTexParser
from bibtexparser.bwriter import BibTexWriter, to_bibtex
from bibtexparser.customization import author
class TestBibtexWriterList(unittest.TestCase):
###########
# ARTICLE
###########
def test_article(self):
with open('bibtexparser/tests/data/article.bib', 'r') as bibfile:
bib = BibTexParser(bibfile.read())
with open('bibtexparser/tests/data/article_output.bib', 'r') as bibfile:
expected = bibfile.read()
result = to_bibtex(bib)
if not sys.version_info >= (3, 0):
if isinstance(result, unicode):
result = result.encode('utf-8')
self.maxDiff = None
self.assertEqual(expected, result)
###########
# BOOK
###########
def test_book(self):
with open('bibtexparser/tests/data/book.bib', 'r') as bibfile:
bib = BibTexParser(bibfile.read())
with open('bibtexparser/tests/data/book_output.bib', 'r') as bibfile:
expected = bibfile.read()
result = to_bibtex(bib)
self.maxDiff = None
self.assertEqual(expected, result)
###########
# COMMA FIRST
###########
def test_comma_first(self):
with open('bibtexparser/tests/data/book.bib', 'r') as bibfile:
bib = BibTexParser(bibfile.read())
with open('bibtexparser/tests/data/book_comma_first.bib', 'r') as bibfile:
expected = bibfile.read()
writer = BibTexWriter()
writer.indent = ' '
writer.comma_first = True
result = writer.write(bib)
self.maxDiff = None
self.assertEqual(expected, result)
###########
# MULTIPLE
###########
def test_multiple(self):
with open('bibtexparser/tests/data/multiple_entries.bib', 'r') as bibfile:
bib = BibTexParser(bibfile.read())
with open('bibtexparser/tests/data/multiple_entries_output.bib', 'r') as bibfile:
expected = bibfile.read()
result = to_bibtex(bib)
self.maxDiff = None
self.assertEqual(expected, result)
###########
# Exception
###########
def test_exception_typeerror(self):
with open('bibtexparser/tests/data/article.bib', 'r') as bibfile:
bib = BibTexParser(bibfile.read(), customization=author)
self.assertRaises(TypeError, to_bibtex, bib)
| mit | -8,285,331,172,103,560,000 | 29.891566 | 89 | 0.592434 | false |
pombredanne/readthedocs.org | readthedocs/comments/models.py | 1 | 8353 | """Models for the comments app."""
from __future__ import absolute_import
from builtins import str
from builtins import object
from django.contrib.auth.models import User
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
from readthedocs.restapi.serializers import VersionSerializer
class DocumentNodeManager(models.Manager):
def create(self, *args, **kwargs):
try:
node_hash = kwargs.pop('hash')
commit = kwargs.pop('commit')
except KeyError:
raise TypeError("You must provide a hash and commit for the initial NodeSnapshot.")
node = super(DocumentNodeManager, self).create(*args, **kwargs)
NodeSnapshot.objects.create(commit=commit, hash=node_hash, node=node)
return node
def from_hash(self, version_slug, page, node_hash, project_slug=None):
"""Return a node matching a given hash."""
snapshots = NodeSnapshot.objects.filter(hash=node_hash,
node__version__slug=version_slug,
node__page=page)
if project_slug:
snapshots = snapshots.filter(node__project__slug=project_slug)
if not snapshots.exists():
raise DocumentNode.DoesNotExist(
"No node exists on %s with a current hash of %s" % (
page, node_hash))
if snapshots.count() == 1:
# If we have found only one snapshot, we know we have the correct node.
node = snapshots[0].node
else:
# IF we have found more than one snapshot...
number_of_nodes = len(set(snapshots.values_list('node')))
if number_of_nodes == 1:
# ...and they're all from the same node, then we know we have the proper node.
node = snapshots[0].node
else:
# On the other hand, if they're from different nodes, then we must
# have different nodes with the same hash (and thus the same content).
raise NotImplementedError(
'''
There is more than one node with this content on this page.
In the future, ReadTheDocs will implement an indexing feature
to allow unique identification of nodes on the same page with the same content.
''')
return node
@python_2_unicode_compatible
class DocumentNode(models.Model):
"""Document node."""
objects = DocumentNodeManager()
project = models.ForeignKey('projects.Project', verbose_name=_('Project'),
related_name='nodes', null=True)
version = models.ForeignKey('builds.Version', verbose_name=_('Version'),
related_name='nodes', null=True)
page = models.CharField(_('Path'), max_length=255)
raw_source = models.TextField(_('Raw Source'))
def __str__(self):
return "node %s on %s for %s" % (self.id, self.page, self.project)
def latest_hash(self):
return self.snapshots.latest().hash
def latest_commit(self):
return self.snapshots.latest().commit
def visible_comments(self):
if not self.project.comment_moderation:
return self.comments.all()
# non-optimal SQL warning.
decisions = ModerationAction.objects.filter(
comment__node=self,
decision=1,
date__gt=self.snapshots.latest().date
)
valid_comments = self.comments.filter(moderation_actions__in=decisions).distinct()
return valid_comments
def update_hash(self, new_hash, commit):
latest_snapshot = self.snapshots.latest()
if latest_snapshot.hash == new_hash and latest_snapshot.commit == commit:
return latest_snapshot
return self.snapshots.create(hash=new_hash, commit=commit)
class DocumentNodeSerializer(serializers.ModelSerializer):
version = VersionSerializer()
current_hash = serializers.CharField(source='latest_hash')
last_commit = serializers.CharField(source='latest_commit')
snapshots_count = serializers.CharField(source='snapshots.count')
class Meta(object):
model = DocumentNode
exclude = ('')
@python_2_unicode_compatible
class NodeSnapshot(models.Model):
date = models.DateTimeField('Publication date', auto_now_add=True)
hash = models.CharField(_('Hash'), max_length=255)
node = models.ForeignKey(DocumentNode, related_name="snapshots")
commit = models.CharField(max_length=255)
class Meta(object):
get_latest_by = 'date'
# Snapshots are *almost* unique_together just for node and hash,
# but for the possibility that a node's hash might change and then change back
# in a later commit.
unique_together = ("hash", "node", "commit")
def __str__(self):
return self.hash
# class DocumentCommentManager(models.Manager):
#
# def visible(self, inquiring_user=None, node=None):
# if node:
#
# decisions = ModerationAction.objects.filter(
# comment__node=node,
# decision=1,
# date__gt=self.snapshots.latest().date
# )
# valid_comments = node.comments.filter(moderation_actions__in=decisions).distinct()
#
# if not self.project.comment_moderation:
# return self.comments.all()
# else:
# non-optimal SQL warning.
#
# return valid_comments
@python_2_unicode_compatible
class DocumentComment(models.Model):
"""Comment on a ``DocumentNode`` by a user."""
date = models.DateTimeField(_('Date'), auto_now_add=True)
rating = models.IntegerField(_('Rating'), default=0)
text = models.TextField(_('Text'))
user = models.ForeignKey(User)
node = models.ForeignKey(DocumentNode, related_name='comments')
def __str__(self):
return "%s - %s" % (self.text, self.node)
def get_absolute_url(self):
return "/%s" % self.node.latest_hash()
def moderate(self, user, decision):
return self.moderation_actions.create(user=user, decision=decision)
def has_been_approved_since_most_recent_node_change(self):
try:
latest_moderation_action = self.moderation_actions.latest()
except ModerationAction.DoesNotExist:
# If we have no moderation actions, obviously we're not approved.
return False
most_recent_node_change = self.node.snapshots.latest().date
if latest_moderation_action.date > most_recent_node_change:
# If we do have an approval action which is newer than the most recent change,
# we'll return True or False commensurate with its "approved" attribute.
return latest_moderation_action.approved()
return False
def is_orphaned(self):
raise NotImplementedError('TODO')
class DocumentCommentSerializer(serializers.ModelSerializer):
node = DocumentNodeSerializer()
class Meta(object):
model = DocumentComment
fields = ('date', 'user', 'text', 'node')
def perform_create(self):
pass
@python_2_unicode_compatible
class ModerationActionManager(models.Model):
def __str__(self):
return str(self.id)
def current_approvals(self):
# pylint: disable=unused-variable
most_recent_change = self.comment.node.snapshots.latest().date # noqa
@python_2_unicode_compatible
class ModerationAction(models.Model):
user = models.ForeignKey(User)
comment = models.ForeignKey(DocumentComment, related_name="moderation_actions")
decision = models.IntegerField(choices=(
(0, 'No Decision'),
(1, 'Publish'),
(2, 'Hide'),
))
date = models.DateTimeField(_('Date'), auto_now_add=True)
def __str__(self):
return "%s - %s" % (self.user_id, self.get_decision_display())
class Meta(object):
get_latest_by = 'date'
def approved(self):
return self.decision == 1
class ModerationActionSerializer(serializers.ModelSerializer):
class Meta(object):
model = ModerationAction
exclude = ()
| mit | -15,031,181,141,574,940 | 33.233607 | 100 | 0.626841 | false |
jdilallo/jdilallo-test | examples/adwords/v201309/campaign_management/get_all_disapproved_ads.py | 1 | 2122 | #!/usr/bin/python
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example gets all disapproved ads for a given campaign.
To add an ad, run add_ads.py.
Tags: AdGroupAdService.get
"""
__author__ = ('[email protected] (Kevin Winter)'
'Joseph DiLallo')
from googleads import adwords
CAMPAIGN_ID = 'INSERT_CAMPAIGN_ID_HERE'
def main(client, campaign_id):
# Initialize appropriate service.
ad_group_ad_service = client.GetService('AdGroupAdService', version='v201309')
# Construct selector and get all ads for a given ad group.
selector = {
'fields': ['Id', 'AdGroupAdDisapprovalReasons'],
'predicates': [
{
'field': 'CampaignId',
'operator': 'EQUALS',
'values': [campaign_id]
},
{
'field': 'AdGroupCreativeApprovalStatus',
'operator': 'IN',
'values': ['DISAPPROVED']
}
]
}
ads = ad_group_ad_service.get(selector)
# Display results.
if 'entries' in ads:
for ad in ads['entries']:
print ('Ad with id \'%s\' was disapproved for the following reasons: '
% (ad['ad']['id']))
if ad['ad'].get('disapprovalReasons'):
for reason in ad['ad']['disapprovalReasons']:
print '\t%s' % reason
else:
print '\tReason not provided.'
else:
print 'No disapproved ads were found.'
if __name__ == '__main__':
# Initialize client object.
adwords_client = adwords.AdWordsClient.LoadFromStorage()
main(adwords_client, CAMPAIGN_ID)
| apache-2.0 | 330,944,383,159,758,600 | 28.068493 | 80 | 0.637135 | false |
Locbit/OctoPrint-Locbit | setup.py | 1 | 3967 | # coding=utf-8
########################################################################################################################
### Do not forget to adjust the following variables to your own plugin.
# The plugin's identifier, has to be unique
plugin_identifier = "Locbit"
# The plugin's python package, should be "octoprint_<plugin identifier>", has to be unique
plugin_package = "octoprint_Locbit"
# The plugin's human readable name. Can be overwritten within OctoPrint's internal data via __plugin_name__ in the
# plugin module
plugin_name = "OctoPrint-Locbit"
# The plugin's version. Can be overwritten within OctoPrint's internal data via __plugin_version__ in the plugin module
plugin_version = "0.1.0"
# The plugin's description. Can be overwritten within OctoPrint's internal data via __plugin_description__ in the plugin
# module
plugin_description = """Connect 3D printers to Locbit servers."""
# The plugin's author. Can be overwritten within OctoPrint's internal data via __plugin_author__ in the plugin module
plugin_author = "Brandon Herbert"
# The plugin's author's mail address.
plugin_author_email = "[email protected]"
# The plugin's homepage URL. Can be overwritten within OctoPrint's internal data via __plugin_url__ in the plugin module
plugin_url = "www.locbit.com"
# The plugin's license. Can be overwritten within OctoPrint's internal data via __plugin_license__ in the plugin module
plugin_license = "AGPLv3"
# Any additional requirements besides OctoPrint should be listed here
plugin_requires = []
### --------------------------------------------------------------------------------------------------------------------
### More advanced options that you usually shouldn't have to touch follow after this point
### --------------------------------------------------------------------------------------------------------------------
# Additional package data to install for this plugin. The subfolders "templates", "static" and "translations" will
# already be installed automatically if they exist.
plugin_additional_data = []
# Any additional python packages you need to install with your plugin that are not contained in <plugin_package>.*
plugin_additional_packages = []
# Any python packages within <plugin_package>.* you do NOT want to install with your plugin
plugin_ignored_packages = []
# Additional parameters for the call to setuptools.setup. If your plugin wants to register additional entry points,
# define dependency links or other things like that, this is the place to go. Will be merged recursively with the
# default setup parameters as provided by octoprint_setuptools.create_plugin_setup_parameters using
# octoprint.util.dict_merge.
#
# Example:
# plugin_requires = ["someDependency==dev"]
# additional_setup_parameters = {"dependency_links": ["https://github.com/someUser/someRepo/archive/master.zip#egg=someDependency-dev"]}
additional_setup_parameters = {}
########################################################################################################################
from setuptools import setup
try:
import octoprint_setuptools
except:
print("Could not import OctoPrint's setuptools, are you sure you are running that under "
"the same python installation that OctoPrint is installed under?")
import sys
sys.exit(-1)
setup_parameters = octoprint_setuptools.create_plugin_setup_parameters(
identifier=plugin_identifier,
package=plugin_package,
name=plugin_name,
version=plugin_version,
description=plugin_description,
author=plugin_author,
mail=plugin_author_email,
url=plugin_url,
license=plugin_license,
requires=plugin_requires,
additional_packages=plugin_additional_packages,
ignored_packages=plugin_ignored_packages,
additional_data=plugin_additional_data
)
if len(additional_setup_parameters):
from octoprint.util import dict_merge
setup_parameters = dict_merge(setup_parameters, additional_setup_parameters)
setup(**setup_parameters)
| mit | -7,069,196,337,411,426,000 | 41.202128 | 140 | 0.679859 | false |
Python3WebSpider/ProxyPool | proxypool/scheduler.py | 1 | 3309 | import time
import multiprocessing
from proxypool.processors.server import app
from proxypool.processors.getter import Getter
from proxypool.processors.tester import Tester
from proxypool.setting import CYCLE_GETTER, CYCLE_TESTER, API_HOST, API_THREADED, API_PORT, ENABLE_SERVER, \
ENABLE_GETTER, ENABLE_TESTER, IS_WINDOWS
from loguru import logger
if IS_WINDOWS:
multiprocessing.freeze_support()
tester_process, getter_process, server_process = None, None, None
class Scheduler():
"""
scheduler
"""
def run_tester(self, cycle=CYCLE_TESTER):
"""
run tester
"""
if not ENABLE_TESTER:
logger.info('tester not enabled, exit')
return
tester = Tester()
loop = 0
while True:
logger.debug(f'tester loop {loop} start...')
tester.run()
loop += 1
time.sleep(cycle)
def run_getter(self, cycle=CYCLE_GETTER):
"""
run getter
"""
if not ENABLE_GETTER:
logger.info('getter not enabled, exit')
return
getter = Getter()
loop = 0
while True:
logger.debug(f'getter loop {loop} start...')
getter.run()
loop += 1
time.sleep(cycle)
def run_server(self):
"""
run server for api
"""
if not ENABLE_SERVER:
logger.info('server not enabled, exit')
return
app.run(host=API_HOST, port=API_PORT, threaded=API_THREADED)
def run(self):
global tester_process, getter_process, server_process
try:
logger.info('starting proxypool...')
if ENABLE_TESTER:
tester_process = multiprocessing.Process(target=self.run_tester)
logger.info(f'starting tester, pid {tester_process.pid}...')
tester_process.start()
if ENABLE_GETTER:
getter_process = multiprocessing.Process(target=self.run_getter)
logger.info(f'starting getter, pid{getter_process.pid}...')
getter_process.start()
if ENABLE_SERVER:
server_process = multiprocessing.Process(target=self.run_server)
logger.info(f'starting server, pid{server_process.pid}...')
server_process.start()
tester_process.join()
getter_process.join()
server_process.join()
except KeyboardInterrupt:
logger.info('received keyboard interrupt signal')
tester_process.terminate()
getter_process.terminate()
server_process.terminate()
finally:
# must call join method before calling is_alive
tester_process.join()
getter_process.join()
server_process.join()
logger.info(f'tester is {"alive" if tester_process.is_alive() else "dead"}')
logger.info(f'getter is {"alive" if getter_process.is_alive() else "dead"}')
logger.info(f'server is {"alive" if server_process.is_alive() else "dead"}')
logger.info('proxy terminated')
if __name__ == '__main__':
scheduler = Scheduler()
scheduler.run()
| mit | 4,403,152,699,556,547,000 | 31.762376 | 108 | 0.56573 | false |
ios-xr/iosxr-ansible | local/library/iosxr_nc11_send.py | 1 | 4463 | #!/usr/bin/python
#------------------------------------------------------------------------------
#
# Copyright (C) 2016 Cisco Systems, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#------------------------------------------------------------------------------
from ansible.module_utils.basic import *
import paramiko
DOCUMENTATION = """
---
module: iosxr_nc11_send
author: Adisorn Ermongkonchai
short_description: Send NETCONF-YANG 1.1 XML file to IOS-XR device
description:
- Send NETCONF-YANG 1.1 XML file to IOS-XR device
options:
host:
description:
- IP address or hostname (resolvable by Ansible control host) of
the target IOS-XR node.
required: true
username:
description:
- username used to login to IOS-XR
required: false
default: none
password:
description:
- password used to login to IOS-XR
required: false
default: none
xmlfile:
description:
- XML file
required: true
example: nc_show_install_active.xml
<rpc message-id="101" xmlns="urn:ietf:params:xml:ns:netconf:base:1.0">
<get>
<filter type="subtree">
<interface-configurations xmlns="http://cisco.com/ns/yang/Cisco-IOS-XR-ifmgr-cfg"/>
</filter>
</get>
</rpc>
"""
EXAMPLES = """
- iosxr_nc11_send:
host: '{{ ansible_ssh_host }}'
username: cisco
password: cisco
xmlfile: xml/nc_show_install_active.xml
"""
RETURN = """
stdout:
description: raw response
returned: always
stdout_lines:
description: list of response lines
returned: always
"""
HELLO = """
<hello xmlns="urn:ietf:params:xml:ns:netconf:base:1.0">
<capabilities>
<capability>urn:ietf:params:netconf:base:1.1</capability>
</capabilities>
</hello>
]]>]]>"""
COMMIT = """
#91
<rpc message-id="101" xmlns="urn:ietf:params:xml:ns:netconf:base:1.0">
<commit/>
</rpc>
##
"""
CLOSE = """
#98
<rpc message-id="102" xmlns="urn:ietf:params:xml:ns:netconf:base:1.0">
<close-session/>
</rpc>
##
"""
def main():
module = AnsibleModule(
argument_spec = dict(
host = dict(required=True),
username = dict(required=False, default=None),
password = dict(required=False, default=None),
xmlfile = dict(required=True),
port = dict(required=False, type='int', default=830)
),
supports_check_mode = False
)
args = module.params
xml_file = module.params['xmlfile']
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(module.params['host'],
port=module.params['port'],
username=module.params['username'],
password=module.params['password'],
timeout=10)
transport = ssh.get_transport()
channel = transport.open_channel('session')
channel.invoke_subsystem('netconf')
# read hello msg
response = channel.recv(1024)
while ']]>]]>' not in response:
response += channel.recv(1024)
result = dict(changed=False)
xml_text = open(xml_file).read()
if 'edit-config' in xml_text or 'delete-config' in xml_text:
result['changed'] = True
xml_msg = '\n#' + str(len(xml_text)-1) + '\n' + xml_text + '##\n'
# send hello followed by contents of xml file
channel.send(HELLO)
channel.send(xml_msg)
# collect all responses 1024 bytes at a time
response = channel.recv(1024)
while '##' not in response:
response += channel.recv(1024)
# commit changes
if result['changed']:
channel.send(COMMIT)
channel.send(CLOSE)
result['stdout'] = response
if 'rpc-error' in response:
return module.fail_json(msg=response)
else:
return module.exit_json(**result)
if __name__ == "__main__":
main()
| gpl-3.0 | 6,026,454,946,953,059,000 | 26.89375 | 95 | 0.611248 | false |
Fyzel/weather-data-flaskapi | database/__init__.py | 1 | 5398 | """
@author: [email protected]
@copyright: 2017 Englesh.org. All rights reserved.
@license: https://github.com/Fyzel/weather-data-flaskapi/blob/master/LICENSE
@contact: [email protected]
@deffield updated: 2017-06-14
"""
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
def create_humidity_indexes(app):
with app.app_context():
from sqlalchemy import text
from sqlalchemy.exc import OperationalError
# Create humidity table indices
try:
sql = text(
'CREATE UNIQUE INDEX humidity_city_subdivision_country_index ON humidity (city, subdivision, country);')
db.engine.execute(sql)
except OperationalError as oe:
pass
try:
sql = text('CREATE INDEX humidity_city_index ON humidity (city);')
db.engine.execute(sql)
except OperationalError as oe:
pass
try:
sql = text('CREATE INDEX humidity_subdivision_index ON humidity (subdivision);')
db.engine.execute(sql)
except OperationalError as oe:
pass
try:
sql = text('CREATE INDEX humidity_country_index ON humidity (country);')
db.engine.execute(sql)
except OperationalError as oe:
pass
try:
sql = text('CREATE INDEX humidity_latitude_longitude_index ON humidity (latitude, longitude);')
db.engine.execute(sql)
except OperationalError as oe:
pass
try:
sql = text('CREATE INDEX humidity_timestamp_index ON humidity (timestamp);')
db.engine.execute(sql)
except OperationalError as oe:
pass
def create_pressure_indexes(app):
with app.app_context():
from sqlalchemy import text
from sqlalchemy.exc import OperationalError
# Create pressure table indices
try:
sql = text(
'CREATE UNIQUE INDEX pressure_city_subdivision_country_index ON pressure (city, subdivision, country);')
db.engine.execute(sql)
except OperationalError as oe:
pass
try:
sql = text('CREATE INDEX pressure_city_index ON pressure (city);')
db.engine.execute(sql)
except OperationalError as oe:
pass
try:
sql = text('CREATE INDEX pressure_subdivision_index ON pressure (subdivision);')
db.engine.execute(sql)
except OperationalError as oe:
pass
try:
sql = text('CREATE INDEX pressure_country_index ON pressure (country);')
db.engine.execute(sql)
except OperationalError as oe:
pass
try:
sql = text('CREATE INDEX pressure_latitude_longitude_index ON pressure (latitude, longitude);')
db.engine.execute(sql)
except OperationalError as oe:
pass
try:
sql = text('CREATE INDEX pressure_timestamp_index ON pressure (timestamp);')
db.engine.execute(sql)
except OperationalError as oe:
pass
def create_temperature_indexes(app):
with app.app_context():
from sqlalchemy import text
from sqlalchemy.exc import OperationalError
# Create temperature table indices
try:
sql = text(
'CREATE UNIQUE INDEX temperature_city_subdivision_country_index ON temperature (city, subdivision, country);')
db.engine.execute(sql)
except OperationalError as oe:
pass
try:
sql = text('CREATE INDEX temperature_city_index ON temperature (city);')
db.engine.execute(sql)
except OperationalError as oe:
pass
try:
sql = text('CREATE INDEX temperature_subdivision_index ON temperature (subdivision);')
db.engine.execute(sql)
except OperationalError as oe:
pass
try:
sql = text('CREATE INDEX temperature_country_index ON temperature (country);')
db.engine.execute(sql)
except OperationalError as oe:
pass
try:
sql = text('CREATE INDEX temperature_latitude_longitude_index ON temperature (latitude, longitude);')
db.engine.execute(sql)
except OperationalError as oe:
pass
try:
sql = text('CREATE INDEX temperature_timestamp_index ON temperature (timestamp);')
db.engine.execute(sql)
except OperationalError as oe:
pass
def create_user_indexes(app):
with app.app_context():
from sqlalchemy import text
from sqlalchemy.exc import OperationalError
# Create user table indices
try:
sql = text('CREATE INDEX user_username_index ON user (username);')
db.engine.execute(sql)
except OperationalError as oe:
pass
def create_indexes(app):
create_humidity_indexes(app)
create_pressure_indexes(app)
create_temperature_indexes(app)
create_user_indexes(app)
def create_database(app=None):
db.create_all(app=app)
create_indexes(app)
def reset_database():
from database.models import ProtectedHumidity, ProtectedPressure, ProtectedTemperature, User
db.drop_all()
db.create_all()
| apache-2.0 | 7,651,208,777,404,038,000 | 29.497175 | 126 | 0.608744 | false |
dodaro/preasp | tests/asp/AllAnswerSets/nontight/tautologyinbody.test.py | 1 | 3116 | input = """
1 13 3 2 14 19 16
1 35 3 2 70 6 68
1 5 3 2 47 62 91
1 91 3 1 33 15 57
1 26 3 2 27 28 2
1 22 3 2 68 2 17
1 18 3 1 60 16 17
1 44 3 2 8 6 33
1 39 3 2 46 47 43
1 8 3 1 90 31 54
1 6 3 2 48 81 90
1 9 3 2 3 82 3
1 46 3 2 70 37 61
1 80 3 1 81 79 28
1 14 3 1 13 21 40
1 14 2 1 47 90
1 22 3 1 37 91 68
1 14 3 1 46 30 89
1 5 3 1 51 21 69
1 64 3 2 67 66 3
1 2 3 0 43 85 65
1 22 3 1 30 2 67
1 81 3 1 26 17 32
1 19 3 1 4 54 37
1 67 3 2 29 91 38
1 16 3 1 60 54 38
1 72 3 2 26 12 42
1 86 3 2 61 18 53
1 16 3 1 23 5 92
1 25 3 1 72 28 59
1 28 3 2 46 35 40
1 17 3 2 17 37 41
1 7 3 1 38 13 6
1 63 3 1 19 69 26
1 87 2 0 46 7
1 16 3 2 36 54 51
1 45 3 1 63 36 21
1 85 3 0 47 25 2
1 59 2 0 86 82
1 92 3 1 82 80 81
1 40 3 1 9 60 66
1 53 3 1 60 45 38
1 92 3 1 43 51 51
1 44 3 2 40 44 59
1 20 2 0 11 23
1 85 3 2 29 43 16
1 79 3 0 28 44 79
1 69 3 0 82 65 44
1 7 3 1 37 53 39
1 58 2 1 52 79
1 87 3 2 38 87 33
1 88 3 2 17 62 58
1 68 3 2 17 29 7
1 55 3 1 32 47 8
1 87 3 2 56 45 24
1 57 3 1 12 92 16
1 67 3 0 22 42 48
1 55 3 2 6 79 76
1 90 3 1 16 7 26
1 15 3 1 7 2 6
1 72 3 0 73 3 86
1 43 3 1 16 44 90
1 60 3 1 20 88 28
1 51 3 2 37 25 81
1 64 2 1 11 89
1 57 3 2 15 78 75
1 54 3 1 3 49 69
1 91 3 0 21 25 50
1 56 3 1 49 6 72
1 46 3 2 43 60 83
1 47 3 2 83 58 66
1 77 3 1 55 11 52
1 76 3 1 78 20 67
1 25 3 2 50 26 81
1 2 3 3 3 4 5
1 16 3 3 20 21 22
1 29 3 2 54 82 50
1 53 3 1 31 89 73
1 48 3 3 49 16 23
1 10 3 2 71 86 29
1 63 3 2 35 13 79
1 87 3 2 91 48 93
1 19 3 1 3 65 86
1 27 3 1 44 7 51
1 21 3 3 69 9 63
1 6 3 3 7 6 8
1 48 2 1 3 65
1 89 3 1 76 22 22
1 60 3 0 32 17 24
1 51 3 1 33 49 2
1 55 2 1 56 56
1 70 3 0 65 30 87
1 88 3 3 56 81 5
1 54 3 2 37 37 37
1 68 3 1 79 86 57
1 11 3 2 36 11 79
1 64 3 1 87 39 85
1 89 3 2 15 32 93
1 5 3 1 14 24 11
1 72 3 3 14 27 36
1 74 3 2 37 21 57
1 34 3 3 35 36 37
1 42 3 1 44 59 42
1 85 3 1 52 42 67
1 79 3 2 64 74 64
1 84 3 2 40 78 69
1 6 3 1 65 63 33
1 37 3 1 57 52 9
1 34 3 1 12 54 80
1 66 3 1 77 25 17
1 32 3 3 11 33 34
1 58 3 1 20 42 80
1 75 3 3 57 37 87
1 23 3 2 26 31 38
1 53 3 1 10 21 7
1 41 3 1 64 39 57
1 17 2 0 32 18
1 13 3 1 30 31 47
1 23 3 3 6 24 25
1 7 3 1 58 78 38
1 13 3 2 12 14 9
1 79 3 2 33 90 92
1 6 3 1 68 45 57
1 63 3 0 40 58 74
1 26 3 3 31 32 16
1 14 3 1 18 7 33
1 50 2 1 26 48
1 42 3 3 27 54 37
1 38 3 2 58 37 20
1 45 3 1 80 21 11
1 12 3 1 91 67 5
1 36 3 2 30 32 77
1 90 2 1 92 66
1 49 3 2 58 46 55
1 30 3 1 34 84 14
1 89 3 1 7 67 4
1 45 3 1 49 25 64
1 90 3 2 55 10 59
1 18 3 3 21 59 22
1 21 3 2 10 22 54
0
29 p67
84 p87
64 p18
90 p55
76 p23
15 p36
47 p31
43 p32
2 p48
67 p56
14 p20
66 p84
9 p97
91 p51
68 p83
17 p4
31 p16
11 p85
24 p13
78 p9
38 p86
71 p17
36 p3
62 p15
63 p6
39 p53
41 p79
57 p2
79 p61
6 p78
70 p65
46 p58
5 p21
56 p10
10 p45
59 p82
88 p96
74 p30
92 p71
75 p24
54 p28
20 p66
12 p81
61 p98
18 p64
82 p88
44 p27
83 p29
26 p38
30 p70
4 p44
89 p33
87 p91
25 p12
42 p62
37 p52
85 p35
34 p37
27 p80
22 p8
16 p60
93 p40
77 p41
81 p50
58 p63
32 p47
55 p1
21 p73
40 p92
8 p90
35 p72
13 p74
3 p0
52 p99
72 p39
60 p26
49 p94
19 p77
50 p43
51 p69
53 p57
28 p7
80 p25
33 p5
73 p19
7 p93
23 p95
69 p11
48 p46
86 p89
65 p22
45 p42
0
B+
0
B-
1
0
1
"""
output = """
INCOHERENT
"""
| apache-2.0 | -9,124,946,536,032,706,000 | 11.718367 | 17 | 0.619384 | false |
ladinu/CS350-project | TestEfficency.py | 1 | 2211 | import time
from geometry.utils import getNRandomPoints, getCircle
import BruteForceHull, QuickHull
from math import log
global fcount
fcount = 2
def outStr(a, b):
return "%i,%f" % (a, b)
def getBruteForceExecTime(points):
t1 = time.time()
BruteForceHull.computeHull(points)
t2 = time.time()
return t2-t1
def getQuickHullExecTime(points, loopCount=1):
t1 = time.time()
qh = QuickHull.QuickHull(points)
qh.computeHull()
t2 = time.time()
return t2-t1
def getBruteForceData():
global fcount
print "> Generating execution reports for BruteForce hull..."
f = open("reports/bruteforce%s.csv" % fcount, 'w', 1)
dataPoints = [100, 200, 300, 400, 500, 600, 700, 800, 900, 1000]
for d in dataPoints:
etime = getBruteForceExecTime(getNRandomPoints(d))
f.write("%s\n" % outStr(d, etime))
print outStr(d, etime)
f.close()
def getQuickHullWorstCaseData():
global fcount
print "> Generating execution reports for QuickHull worst case..."
dataPoints = [10000, 20000, 40000, 50000, 60000, 70000, 80000, 90000, 100000, 200000,
400000, 500000, 600000, 700000, 800000, 900000, 1000000, 2000000, 3000000,
4000000, 5000000, 6000000, 7000000, 8000000, 9000000, 10000000]
f = open("reports/quickhull_worst_case%s.csv" % fcount, 'w', 1)
for d in dataPoints:
etime = getQuickHullExecTime(getCircle(10000, d))
f.write("%s\n" % outStr(d, etime))
print outStr(d, etime)
f.close()
def getQuickHullData():
global fcount
print "> Generating execution reports for QuickHull..."
f = open('reports/quickhull%s.csv' % fcount, 'w', 1)
for j in [100000000, 10, 100, 1000, 10000, 100000, 200000, 400000, 600000, 800000, 1000000, 9000000, 10000000]:
f.write("%s\n" % outStr(j, getQuickHullExecTime(getNRandomPoints(j))))
print outStr(j, getQuickHullExecTime(getNRandomPoints(j)))
f.close()
if __name__ == "__main__":
fcount = "_0"
getQuickHullData()
getBruteForceData()
getQuickHullWorstCaseData()
fcount = "_1"
getBruteForceData()
getQuickHullWorstCaseData()
getQuickHullData()
fcount = "_2"
getBruteForceData()
getQuickHullWorstCaseData()
getQuickHullData()
| mit | 5,516,835,767,643,267,000 | 30.585714 | 114 | 0.679331 | false |
rdo-management/tuskar | tuskar/api/renderers.py | 1 | 2382 | # Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pecan
import wsme
from wsme import api
class JSONRenderer(object):
"""Custom JSON renderer.
Renders to JSON and handles responses for various HTTP status codes.
"""
def __init__(self, path, extra_vars):
"""Create an empty __init__ to accept the arguments provided to a
Renderer but ignore them as they are not needed.
"""
def _render_fault(self, message, details, code=500):
"""Given the namespace dictionary render a JSON error response for the
fault in the format defined by the OpenStack identity service
documentation.
"""
body = {
'identityFault': {
"message": message,
"details": details,
"code": code
}
}
return wsme.rest.json.encode_error(None, body)
def render(self, template_path, namespace):
"""Given a namespace dict render the response as JSON and return.
If the dict contains a faultcode or wsme.api.Response its a fault from
user code and is rendered via _render_fault.
template_path is a required parameter for renderers but unused in
this context.
"""
if 'faultcode' in namespace:
return self._render_fault(
namespace['faultstring'],
namespace['debuginfo'])
result = namespace['result']
if isinstance(namespace['result'], api.Response):
pecan.response.status_code = result.status_code
return self._render_fault(
result.obj.faultstring, result.obj.debuginfo,
code=result.status_code)
return wsme.rest.json.encode_result(
result,
namespace['datatype']
)
| apache-2.0 | 3,922,971,986,940,133,400 | 32.549296 | 78 | 0.625525 | false |
xibalbanus/PIA2 | osiris_phylogenetics/phylogenies/phytab_raxml_pars.py | 1 | 5675 | #!/usr/bin/env python
## This tool runs RAxML's parsimony inference on a phytab input.
## If N = # of nodes requested in job runner, then N RAxML jobs will run simultaneously. Make sure that the
## number of processors ('ppn') in the job runner matches the 'numthreads' commandline argument -T.
##
## Usage: ./phytab_raxml_using_ptree.parallel.py -i <phytabinput> -e <model> -f <modelfile> -T 4
## example: ./phytab_raxml_using_ptree.parallel.py -i myphytab.txt -e PROTGAMMAWAG -f None -T 4
## or: ./phytab_raxml_using_ptree.parallel.py -i myphtab.txt -e None -f modelsforeachpartition.txt -T 4
##
## outputs a tab-delimited file with gene-partition and newick parsimony tree on each line.
import optparse
import os
import subprocess
import multiprocessing
RESULTS_DIR = 'results'
RESULTS_FILE = 'parsimony_results.txt'
RAXML_PREFIX = 'RAxML_parsimonyTree.'
def unescape(string):
mapped_chars = {
'>': '__gt__',
'<': '__lt__',
"'": '__sq__',
'"': '__dq__',
'[': '__ob__',
']': '__cb__',
'{': '__oc__',
'}': '__cc__',
'@': '__at__',
'\n': '__cn__',
'\r': '__cr__',
'\t': '__tc__',
'#': '__pd__'
}
for key, value in mapped_chars.iteritems():
string = string.replace(value, key)
return string
class Species:
def __init__(self, string):
lis = string.split('\t')
# print lis
self.species = lis[0]
self.gene = lis[1]
self.name = lis[2]
self.sequence = lis[3]
def toString(self):
return self.species + '\t' + self.sequence
class Gene:
def __init__(self, name):
self.name = name
self.count = 0
self.length = 0
self.species = []
def output(self):
file_name = self.name + ".phy"
location = RESULTS_DIR + os.sep + file_name
with open(location, 'w') as f:
f.write(str(self.count) + '\t' + str(self.length) + '\n')
for s in self.species:
f.write(s.toString())
return file_name
def add(self, species):
if species.name == "":
return
self.species.append(species)
self.count += 1
if self.length == 0:
self.length = len(species.sequence) - 1
def output_species(species):
file_name = species.gene + ".phy"
location = RESULTS_DIR + os.sep + file_name
with open(location, 'a') as f:
f.write(species.toString())
return file_name
def process_phytab(input):
files = set()
genes = dict()
with open(input) as f:
for line in f:
if len(line) < 4:
continue
species = Species(line)
if species.gene in genes:
genes[species.gene].add(species)
else:
gene = Gene(species.gene)
gene.add(species)
genes[gene.name] = gene
for k, gene in genes.iteritems():
files.add(gene.output())
return files
def runRaxml(list_of_files, evo, evoDict,NUMTHREADS):
for gene_file in list_of_files:
if gene_file.split(".")[0] in evoDict:
newEvo = evoDict[gene_file.split(".")[0]]
else:
newEvo = evo
# cpu_count = str(multiprocessing.cpu_count())
file_name = RESULTS_DIR + os.sep + gene_file
# to run parsimony trees:
popen = subprocess.Popen(['raxmlHPC-PTHREADS', '-T', cpu_count,'-f', 'd', '-s', file_name,'-y', '-m', newEvo, '-n', gene_file[:-4]+'.tre', '-p', '34'])
# to run likelihood trees:
# popen = subprocess.Popen(['raxmlHPC-PTHREADS', "-T", NUMTHREADS, "-s", file_name, '-m', newEvo, '-n', gene_file[:-4], '-p', '34'])
popen.wait()
def toData(text, name):
text = name + "\t" + text.replace("\n", "\\n")
return text
def readEfile(efile):
evoDict = {}
with open(efile, "r") as f:
for line in f:
pair = line.split("\t")
evoDict[pair[0].strip()] = pair[1].strip()
return evoDict
def main():
usage = """%prog [options]
options (listed below) default to 'None' if omitted
"""
parser = optparse.OptionParser(usage=usage)
parser.add_option(
'-i', '--in',
dest='input',
action='store',
type='string',
metavar="FILE",
help='Name of input data.')
parser.add_option(
'-e', '--evo',
dest='evo',
action='store',
type='string',
metavar="EVO",
help='Evolution model.')
parser.add_option(
'-f', '--evo-file',
dest='efile',
action='store',
type='string',
metavar="EVO_FILE",
help='Evolution model file. Format is gene_name [tab] evolution_model.')
parser.add_option('-T', '--numthread',dest='numthreads', action='store',type='int', metavar="NUMT", help='Provide number of threads for RAxML')
options, args = parser.parse_args()
os.mkdir(RESULTS_DIR)
list_of_species_files = process_phytab(unescape(options.input))
try:
evoDict = readEfile(unescape(options.efile))
except IOError:
print "Could not find evolution model file, using:", unescape(options.evo)
evoDict = {}
runRaxml(list_of_species_files, unescape(options.evo), evoDict,str(options.numthreads))
result = [file for file in os.listdir('./') if file.startswith(RAXML_PREFIX)]
with open(RESULTS_DIR + os.sep + RESULTS_FILE, "w") as f:
for file in result:
with open(file, "r") as r:
f.write(file[len(RAXML_PREFIX):-4] + '\t' + r.read())
if __name__ == '__main__':
main()
| mit | -6,460,458,334,718,591,000 | 28.868421 | 160 | 0.551366 | false |
lehinevych/cfme_tests | cfme/tests/test_rest.py | 1 | 4051 | # -*- coding: utf-8 -*-
"""This module contains REST API specific tests."""
import fauxfactory
import pytest
from cfme import Credential
from cfme.configure.access_control import User, Group
from cfme.login import login
from cfme.rest import vm as _vm
from utils.providers import setup_a_provider as _setup_a_provider
from utils.version import current_version
from utils import testgen, conf, version
pytest_generate_tests = testgen.generate(
testgen.provider_by_type,
['virtualcenter', 'rhevm'],
scope="module"
)
@pytest.fixture(scope="module")
def a_provider():
return _setup_a_provider("infra")
@pytest.mark.usefixtures("logged_in")
@pytest.fixture(scope='function')
def user():
user = User(credential=Credential(principal=fauxfactory.gen_alphanumeric(),
secret=fauxfactory.gen_alphanumeric()), name=fauxfactory.gen_alphanumeric(),
group=Group(description='EvmGroup-super_administrator'))
user.create()
return user
# This test should be deleted when we get new build > 5.5.2.4
@pytest.mark.tier(2)
@pytest.mark.uncollectif(lambda: version.current_version() < '5.5')
def test_edit_user_password(rest_api, user):
if "edit" not in rest_api.collections.users.action.all:
pytest.skip("Edit action for users is not implemented in this version")
try:
for cur_user in rest_api.collections.users:
if cur_user.userid != conf.credentials['default']['username']:
rest_user = cur_user
break
except:
pytest.skip("There is no user to change password")
new_password = fauxfactory.gen_alphanumeric()
rest_user.action.edit(password=new_password)
cred = Credential(principal=rest_user.userid, secret=new_password)
new_user = User(credential=cred)
login(new_user)
@pytest.fixture(scope="function")
def vm(request, a_provider, rest_api):
return _vm(request, a_provider, rest_api)
@pytest.mark.tier(2)
@pytest.mark.parametrize(
"from_detail", [True, False],
ids=["from_detail", "from_collection"])
def test_vm_scan(rest_api, vm, from_detail):
rest_vm = rest_api.collections.vms.get(name=vm)
if from_detail:
response = rest_vm.action.scan()
else:
response, = rest_api.collections.vms.action.scan(rest_vm)
@pytest.wait_for(timeout="5m", delay=5, message="REST running scanning vm finishes")
def _finished():
response.task.reload()
if response.task.status.lower() in {"error"}:
pytest.fail("Error when running scan vm method: `{}`".format(response.task.message))
return response.task.state.lower() == 'finished'
COLLECTIONS_IGNORED_53 = {
"availability_zones", "conditions", "events", "flavors", "policy_actions", "security_groups",
"tags", "tasks",
}
COLLECTIONS_IGNORED_54 = {
"features", "pictures", "provision_dialogs", "rates", "results", "service_dialogs",
}
@pytest.mark.tier(3)
@pytest.mark.parametrize(
"collection_name",
["availability_zones", "chargebacks", "clusters", "conditions", "data_stores", "events",
"features", "flavors", "groups", "hosts", "pictures", "policies", "policy_actions",
"policy_profiles", "provision_dialogs", "rates", "request_tasks", "requests", "resource_pools",
"results", "roles", "security_groups", "servers", "service_dialogs", "service_requests",
"tags", "tasks", "templates", "users", "vms", "zones"])
@pytest.mark.uncollectif(
lambda collection_name: (
collection_name in COLLECTIONS_IGNORED_53 and current_version() < "5.4") or (
collection_name in COLLECTIONS_IGNORED_54 and current_version() < "5.5"))
def test_query_simple_collections(rest_api, collection_name):
"""This test tries to load each of the listed collections. 'Simple' collection means that they
have no usable actions that we could try to run
Steps:
* GET /api/<collection_name>
Metadata:
test_flag: rest
"""
collection = getattr(rest_api.collections, collection_name)
collection.reload()
list(collection)
| gpl-2.0 | 6,124,307,325,183,130,000 | 34.535088 | 99 | 0.680573 | false |
scality/manila | manila/scheduler/filters/capabilities.py | 1 | 3548 | # Copyright (c) 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from manila.scheduler.filters import base_host
from manila.scheduler.filters import extra_specs_ops
LOG = logging.getLogger(__name__)
class CapabilitiesFilter(base_host.BaseHostFilter):
"""HostFilter to work with resource (instance & volume) type records."""
def _satisfies_extra_specs(self, capabilities, resource_type):
"""Compare capabilities against extra specs.
Check that the capabilities provided by the services satisfy
the extra specs associated with the resource type.
"""
extra_specs = resource_type.get('extra_specs', [])
if not extra_specs:
return True
for key, req in extra_specs.items():
# Either not scoped format, or in capabilities scope
scope = key.split(':')
# Ignore scoped (such as vendor-specific) capabilities
if len(scope) > 1 and scope[0] != "capabilities":
continue
# Strip off prefix if spec started with 'capabilities:'
elif scope[0] == "capabilities":
del scope[0]
cap = capabilities
for index in range(len(scope)):
try:
cap = cap.get(scope[index])
except AttributeError:
cap = None
if cap is None:
LOG.debug("Host doesn't provide capability '%(cap)s' "
"listed in the extra specs",
{'cap': scope[index]})
return False
# Make all capability values a list so we can handle lists
cap_list = [cap] if not isinstance(cap, list) else cap
# Loop through capability values looking for any match
for cap_value in cap_list:
if extra_specs_ops.match(cap_value, req):
break
else:
# Nothing matched, so bail out
LOG.debug('Share type extra spec requirement '
'"%(key)s=%(req)s" does not match reported '
'capability "%(cap)s"',
{'key': key, 'req': req, 'cap': cap})
return False
return True
def host_passes(self, host_state, filter_properties):
"""Return a list of hosts that can create resource_type."""
# Note(zhiteng) Currently only Cinder and Nova are using
# this filter, so the resource type is either instance or
# volume.
resource_type = filter_properties.get('resource_type')
if not self._satisfies_extra_specs(host_state.capabilities,
resource_type):
LOG.debug("%(host_state)s fails resource_type extra_specs "
"requirements", {'host_state': host_state})
return False
return True
| apache-2.0 | 9,135,078,643,539,611,000 | 39.318182 | 78 | 0.580045 | false |
antiface/audiolazy | examples/pi.py | 1 | 2141 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of AudioLazy, the signal processing Python package.
# Copyright (C) 2012-2014 Danilo de Jesus da Silva Bellini
#
# AudioLazy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Created on Sun May 05 2013
# danilo [dot] bellini [at] gmail [dot] com
"""
Calculate "pi" using the Madhava-Gregory-Leibniz series and Machin formula
"""
from __future__ import division, print_function
from audiolazy import Stream, thub, count, z, pi # For comparison
def mgl_seq(x):
"""
Sequence whose sum is the Madhava-Gregory-Leibniz series.
[x, -x^3/3, x^5/5, -x^7/7, x^9/9, -x^11/11, ...]
Returns
-------
An endless sequence that has the property
``atan(x) = sum(mgl_seq(x))``.
Usually you would use the ``atan()`` function, not this one.
"""
odd_numbers = thub(count(start=1, step=2), 2)
return Stream(1, -1) * x ** odd_numbers / odd_numbers
def atan_mgl(x, n=10):
"""
Finds the arctan using the Madhava-Gregory-Leibniz series.
"""
acc = 1 / (1 - z ** -1) # Accumulator filter
return acc(mgl_seq(x)).skip(n-1).take()
if __name__ == "__main__":
print("Reference (for comparison):", repr(pi))
print()
print("Machin formula (fast)")
pi_machin = 4 * (4 * atan_mgl(1/5) - atan_mgl(1/239))
print("Found:", repr(pi_machin))
print("Error:", repr(abs(pi - pi_machin)))
print()
print("Madhava-Gregory-Leibniz series for 45 degrees (slow)")
pi_mgl_series = 4 * atan_mgl(1, n=1e6) # Sums 1,000,000 items...slow...
print("Found:", repr(pi_mgl_series))
print("Error:", repr(abs(pi - pi_mgl_series)))
print()
| gpl-3.0 | -3,385,673,187,416,630,000 | 31.439394 | 74 | 0.670715 | false |
rensimlab/rensimlab.github.io | tools/cloud_retrieve.py | 1 | 4546 | import argparse
import numpy as np
import os
import shutil
import signal
import subprocess
import sys
import tempfile
import yaml
import yt
rsl_page_root = os.environ.get(
'RSL_PAGE_ROOT', os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
def calculate_datasets(es, field, val_list, val_range, file_list):
all_vals = es.data[field]
esfns = es.data["filename"].astype(str)
fns = []
if val_list is not None:
for value in val_list:
i = np.argmin(np.abs(all_vals - value))
fn = esfns[i]
if fn not in fns:
fns.append(fn)
if val_range is not None:
start, stop = val_range
istart = np.argmin(np.abs(all_vals - start))
istop = np.argmin(np.abs(all_vals - stop ))
inc = 1
if istart > istop:
inc = -1
rfns = esfns[istart:istop+inc:inc]
fns.extend([rfn for rfn in rfns if rfn not in fns])
if file_list is not None:
dirs = dict([(os.path.dirname(fn), fn) for fn in esfns])
fns.extend([dirs[f] for f in file_list if dirs[f] not in fns])
return fns
def download_dataset(cloud_object, cloud_path, data_path, timeout=None, tempdir=".", dryrun=False):
yt.mylog.info("Downloading %s %s to %s." % (cloud_object, cloud_path, data_path))
if dryrun: return
curdir = os.getcwd()
os.chdir(tempdir)
command = "swift download %s -p %s" % (cloud_object, cloud_path)
try:
proc = subprocess.run(command, shell=True, timeout=timeout)
if proc.returncode == 0:
shutil.move(cloud_path, data_path)
success = True
except subprocess.TimeoutExpired:
yt.mylog.error("Download of %s timedout after %d seconds." %
(cloud_path, timeout))
success = False
except KeyboardInterrupt:
yt.mylog.error("Eject!")
success = False
if success:
os.chdir(curdir)
else:
shutil.rmtree(cloud_path)
os.chdir(curdir)
sys.exit(0)
def gather_datasets(args, config):
if args.redshift_list is None and \
args.redshift_range is None and \
args.dataset_list is None:
raise RuntimeError(
"Must specify a dataset-list, redshift-list, or redshift-range.")
if args.simulation not in config["simulations"]:
raise RuntimeError(
"%s not in available simulations: %s." %
(args.simulation, ", ".join(config["simulations"])))
esfn = os.path.join(config["data_dir"], args.simulation,
"rs_%s.h5" % args.simulation.lower())
if not os.path.exists(esfn):
raise RuntimeError("Simulation file not found: %s." % esfn)
es = yt.load(esfn)
fns = calculate_datasets(es, "redshift", args.redshift_list,
args.redshift_range, args.dataset_list)
for fn in fns:
dsfn = os.path.join(config["data_dir"], args.simulation, fn)
if os.path.exists(dsfn):
yt.mylog.info("%s already available, skipping." % fn)
else:
cloud_dir = os.path.join(
config["simulations"][args.simulation]["cloud_directory"],
os.path.dirname(fn))
download_dataset(
config["simulations"][args.simulation]["cloud_object"],
cloud_dir, os.path.join(config["data_dir"], args.simulation),
tempdir=config["temp_dir"], dryrun=args.dryrun)
if __name__ == "__main__":
cfgfn = os.path.join(rsl_page_root, "_data", "cloud.yaml")
with open(cfgfn) as f:
config = yaml.load(f)
parser = argparse.ArgumentParser(
description="Retrieve Renaissance simulation data from SDSC Cloud.")
parser.add_argument("simulation", type=str,
help="The target simulation.")
parser.add_argument("--dataset-list", type=str, nargs="+", metavar="files",
help="List of datasets to retrieve. Example: RD0013 RD0020")
parser.add_argument("--redshift-list", type=float, nargs="+", metavar="z",
help="List of redshifts to retrieve. Example: 15 16 17")
parser.add_argument("--redshift-range", type=float, nargs=2, metavar="z",
help="Redshift range to retrieve. Example: 15 20 for redshift 15-20")
parser.add_argument("--dryrun", default=False, action="store_true",
help="Simulated run. Do not actually download data.")
args = parser.parse_args()
gather_datasets(args, config)
| mit | 1,798,824,713,954,896,600 | 36.262295 | 99 | 0.594809 | false |
achernya/byobu | usr/lib/byobu/include/select-session.py | 1 | 6798 | #!/usr/bin/python3
#
# select-session.py
# Copyright (C) 2010 Canonical Ltd.
# Copyright (C) 2012-2014 Dustin Kirkland <[email protected]>
#
# Authors: Dustin Kirkland <[email protected]>
# Ryan C. Thompson <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import re
import sys
import subprocess
try:
# For Python3, try and import input from builtins
from builtins import input
except Exception:
# But fall back to using the default input
True
PKG = "byobu"
SHELL = os.getenv("SHELL", "/bin/bash")
HOME = os.getenv("HOME")
BYOBU_CONFIG_DIR = os.getenv("BYOBU_CONFIG_DIR", HOME + "/.byobu")
BYOBU_BACKEND = os.getenv("BYOBU_BACKEND", "tmux")
choice = -1
sessions = []
text = []
reuse_sessions = os.path.exists("%s/.reuse-session" % (BYOBU_CONFIG_DIR))
BYOBU_UPDATE_ENVVARS = ["DISPLAY", "DBUS_SESSION_BUS_ADDRESS", "SESSION_MANAGER", "GPG_AGENT_INFO", "XDG_SESSION_COOKIE", "XDG_SESSION_PATH", "GNOME_KEYRING_CONTROL", "GNOME_KEYRING_PID", "GPG_AGENT_INFO", "SSH_ASKPASS", "SSH_AUTH_SOCK", "SSH_AGENT_PID", "WINDOWID", "UPSTART_JOB", "UPSTART_EVENTS", "UPSTART_SESSION", "UPSTART_INSTANCE"]
def get_sessions():
sessions = []
i = 0
output = False
if BYOBU_BACKEND == "screen":
try:
output = subprocess.Popen(["screen", "-ls"], stdout=subprocess.PIPE).communicate()[0]
except subprocess.CalledProcessError as cpe:
# screen -ls seems to always return 1
if cpe.returncode != 1:
raise
else:
output = cpe.output
if sys.stdout.encoding is None:
output = output.decode("UTF-8")
else:
output = output.decode(sys.stdout.encoding)
if output:
for s in output.splitlines():
s = re.sub(r'\s+', ' ', s)
# Ignore hidden sessions (named sessions that start with a "." or a "_")
if s and s != " " and (s.find(" ") == 0 and len(s) > 1 and s.count("..") == 0 and s.count("._") == 0):
text.append("screen: %s" % s.strip())
items = s.split(" ")
sessions.append("screen____%s" % items[1])
i += 1
if BYOBU_BACKEND == "tmux":
output = subprocess.Popen(["tmux", "list-sessions"], stdout=subprocess.PIPE).communicate()[0]
if sys.stdout.encoding is None:
output = output.decode("UTF-8")
else:
output = output.decode(sys.stdout.encoding)
if output:
for s in output.splitlines():
# Ignore hidden sessions (named sessions that start with a "_")
if s and not s.startswith("_"):
text.append("tmux: %s" % s.strip())
sessions.append("tmux____%s" % s.split(":")[0])
i += 1
return sessions
def cull_zombies(session_name):
# When using tmux session groups, closing a client will leave
# unattached "zombie" sessions that will never be reattached.
# Search for and kill any unattached hidden sessions in the same group
if BYOBU_BACKEND == "tmux":
output = subprocess.Popen(["tmux", "list-sessions"], stdout=subprocess.PIPE).communicate()[0]
if sys.stdout.encoding is None:
output = output.decode("UTF-8")
else:
output = output.decode(sys.stdout.encoding)
if not output:
return
# Find the master session to extract the group name. We use
# the group number to be extra sure the right session is getting
# killed. We don't want to accidentally kill the wrong one
pattern = "^%s:.+\\((group [^\\)]+)\\).*$" % session_name
master = re.search(pattern, output, re.MULTILINE)
if not master:
return
# Kill all the matching hidden & unattached sessions
pattern = "^_%s-\\d+:.+\\(%s\\)$" % (session_name, master.group(1))
for s in re.findall(pattern, output, re.MULTILINE):
subprocess.Popen(["tmux", "kill-session", "-t", s.split(":")[0]])
def update_environment(session):
backend, session_name = session.split("____", 2)
for var in BYOBU_UPDATE_ENVVARS:
value = os.getenv(var)
if value:
if backend == "tmux":
cmd = ["tmux", "setenv", "-t", session_name, var, value]
else:
cmd = ["screen", "-S", session_name, "-X", "setenv", var, value]
subprocess.call(cmd, stdout=open(os.devnull, "w"))
def attach_session(session):
update_environment(session)
backend, session_name = session.split("____", 2)
cull_zombies(session_name)
# must use the binary, not the wrapper!
if backend == "tmux":
if reuse_sessions:
os.execvp("tmux", ["tmux", "attach", "-t", session_name])
else:
os.execvp("tmux", ["tmux", "-2", "new-session", "-t", session_name, "-s", "_%s-%i" % (session_name, os.getpid())])
else:
os.execvp("screen", ["screen", "-AOxRR", session_name])
sessions = get_sessions()
show_shell = os.path.exists("%s/.always-select" % (BYOBU_CONFIG_DIR))
if len(sessions) > 1 or show_shell:
sessions.append("NEW")
text.append("Create a new Byobu session (%s)" % BYOBU_BACKEND)
sessions.append("SHELL")
text.append("Run a shell without Byobu (%s)" % SHELL)
if len(sessions) > 1:
sys.stdout.write("\nByobu sessions...\n\n")
tries = 0
while tries < 3:
i = 1
for s in text:
sys.stdout.write(" %d. %s\n" % (i, s))
i += 1
try:
try:
user_input = input("\nChoose 1-%d [1]: " % (i - 1))
except Exception:
user_input = ""
if not user_input or user_input == "":
choice = 1
break
try:
choice = int(user_input)
except Exception:
choice = int(eval(user_input))
if choice >= 1 and choice < i:
break
else:
tries += 1
choice = -1
sys.stderr.write("\nERROR: Invalid input\n")
except KeyboardInterrupt:
sys.stdout.write("\n")
sys.exit(0)
except Exception:
if choice == "" or choice == -1:
choice = 1
break
tries += 1
choice = -1
sys.stderr.write("\nERROR: Invalid input\n")
elif len(sessions) == 1:
# Auto-select the only session
choice = 1
if choice >= 1:
if sessions[choice - 1] == "NEW":
# Create a new session
if BYOBU_BACKEND == "tmux":
os.execvp("byobu", ["byobu", "new-session", SHELL])
else:
os.execvp("byobu", ["byobu", SHELL])
elif sessions[choice - 1] == "SHELL":
os.execvp(SHELL, [SHELL])
else:
# Attach to the chosen session; must use the binary, not the wrapper!
attach_session(sessions[choice - 1])
# No valid selection, default to the youngest session, create if necessary
if BYOBU_BACKEND == "tmux":
os.execvp("tmux", ["tmux"])
else:
os.execvp("screen", ["screen", "-AOxRR"])
| gpl-3.0 | -5,511,983,227,267,064,000 | 31.371429 | 338 | 0.649603 | false |
p-l-/miasm | miasm2/expression/simplifications_common.py | 1 | 19247 | # ----------------------------- #
# Common simplifications passes #
# ----------------------------- #
from miasm2.expression.expression import *
from miasm2.expression.expression_helper import *
def simp_cst_propagation(e_s, e):
"""This passe includes:
- Constant folding
- Common logical identities
- Common binary identities
"""
# merge associatif op
if not isinstance(e, ExprOp):
return e
args = list(e.args)
op = e.op
# simpl integer manip
# int OP int => int
# TODO: <<< >>> << >> are architecture dependant
if op in op_propag_cst:
while (len(args) >= 2 and
isinstance(args[-1], ExprInt) and
isinstance(args[-2], ExprInt)):
i2 = args.pop()
i1 = args.pop()
if op == '+':
o = i1.arg + i2.arg
elif op == '*':
o = i1.arg * i2.arg
elif op == '^':
o = i1.arg ^ i2.arg
elif op == '&':
o = i1.arg & i2.arg
elif op == '|':
o = i1.arg | i2.arg
elif op == '>>':
o = i1.arg >> i2.arg
elif op == '<<':
o = i1.arg << i2.arg
elif op == 'a>>':
x1 = mod_size2int[i1.arg.size](i1.arg)
x2 = mod_size2int[i2.arg.size](i2.arg)
o = mod_size2uint[i1.arg.size](x1 >> x2)
elif op == '>>>':
rounds = i2.arg
o = i1.arg >> i2.arg | i1.arg << (i1.size - i2.arg)
elif op == '<<<':
o = i1.arg << i2.arg | i1.arg >> (i1.size - i2.arg)
elif op == '/':
o = i1.arg / i2.arg
elif op == '%':
o = i1.arg % i2.arg
elif op == 'idiv':
assert(i2.arg.arg)
x1 = mod_size2int[i1.arg.size](i1.arg)
x2 = mod_size2int[i2.arg.size](i2.arg)
o = mod_size2uint[i1.arg.size](x1 / x2)
elif op == 'imod':
assert(i2.arg.arg)
x1 = mod_size2int[i1.arg.size](i1.arg)
x2 = mod_size2int[i2.arg.size](i2.arg)
o = mod_size2uint[i1.arg.size](x1 % x2)
elif op == 'umod':
assert(i2.arg.arg)
x1 = mod_size2uint[i1.arg.size](i1.arg)
x2 = mod_size2uint[i2.arg.size](i2.arg)
o = mod_size2uint[i1.arg.size](x1 % x2)
elif op == 'udiv':
assert(i2.arg.arg)
x1 = mod_size2uint[i1.arg.size](i1.arg)
x2 = mod_size2uint[i2.arg.size](i2.arg)
o = mod_size2uint[i1.arg.size](x1 / x2)
o = ExprInt_fromsize(i1.size, o)
args.append(o)
# bsf(int) => int
if op == "bsf" and isinstance(args[0], ExprInt) and args[0].arg != 0:
i = 0
while args[0].arg & (1 << i) == 0:
i += 1
return ExprInt_from(args[0], i)
# bsr(int) => int
if op == "bsr" and isinstance(args[0], ExprInt) and args[0].arg != 0:
i = args[0].size - 1
while args[0].arg & (1 << i) == 0:
i -= 1
return ExprInt_from(args[0], i)
# -(-(A)) => A
if op == '-' and len(args) == 1 and isinstance(args[0], ExprOp) and \
args[0].op == '-' and len(args[0].args) == 1:
return args[0].args[0]
# -(int) => -int
if op == '-' and len(args) == 1 and isinstance(args[0], ExprInt):
return ExprInt(-args[0].arg)
# A op 0 =>A
if op in ['+', '|', "^", "<<", ">>", "<<<", ">>>"] and len(args) > 1:
if isinstance(args[-1], ExprInt) and args[-1].arg == 0:
args.pop()
# A - 0 =>A
if op == '-' and len(args) > 1 and args[-1].arg == 0:
assert(len(args) == 2) # Op '-' with more than 2 args: SantityCheckError
return args[0]
# A * 1 =>A
if op == "*" and len(args) > 1:
if isinstance(args[-1], ExprInt) and args[-1].arg == 1:
args.pop()
# for cannon form
# A * -1 => - A
if op == "*" and len(args) > 1:
if (isinstance(args[-1], ExprInt) and
args[-1].arg == (1 << args[-1].size) - 1):
args.pop()
args[-1] = - args[-1]
# op A => A
if op in ['+', '*', '^', '&', '|', '>>', '<<',
'a>>', '<<<', '>>>', 'idiv', 'imod', 'umod', 'udiv'] and len(args) == 1:
return args[0]
# A-B => A + (-B)
if op == '-' and len(args) > 1:
if len(args) > 2:
raise ValueError(
'sanity check fail on expr -: should have one or 2 args ' +
'%r %s' % (e, e))
return ExprOp('+', args[0], -args[1])
# A op 0 => 0
if op in ['&', "*"] and isinstance(args[1], ExprInt) and args[1].arg == 0:
return ExprInt_from(e, 0)
# - (A + B +...) => -A + -B + -C
if (op == '-' and
len(args) == 1 and
isinstance(args[0], ExprOp) and
args[0].op == '+'):
args = [-a for a in args[0].args]
e = ExprOp('+', *args)
return e
# -(a?int1:int2) => (a?-int1:-int2)
if (op == '-' and
len(args) == 1 and
isinstance(args[0], ExprCond) and
isinstance(args[0].src1, ExprInt) and
isinstance(args[0].src2, ExprInt)):
i1 = args[0].src1
i2 = args[0].src2
i1 = ExprInt_from(i1, -i1.arg)
i2 = ExprInt_from(i2, -i2.arg)
return ExprCond(args[0].cond, i1, i2)
i = 0
while i < len(args) - 1:
j = i + 1
while j < len(args):
# A ^ A => 0
if op == '^' and args[i] == args[j]:
args[i] = ExprInt_from(args[i], 0)
del(args[j])
continue
# A + (- A) => 0
if op == '+' and isinstance(args[j], ExprOp) and args[j].op == "-":
if len(args[j].args) == 1 and args[i] == args[j].args[0]:
args[i] = ExprInt_from(args[i], 0)
del(args[j])
continue
# (- A) + A => 0
if op == '+' and isinstance(args[i], ExprOp) and args[i].op == "-":
if len(args[i].args) == 1 and args[j] == args[i].args[0]:
args[i] = ExprInt_from(args[i], 0)
del(args[j])
continue
# A | A => A
if op == '|' and args[i] == args[j]:
del(args[j])
continue
# A & A => A
if op == '&' and args[i] == args[j]:
del(args[j])
continue
j += 1
i += 1
if op in ['|', '&', '%', '/'] and len(args) == 1:
return args[0]
# A <<< A.size => A
if (op in ['<<<', '>>>'] and
isinstance(args[1], ExprInt) and
args[1].arg == args[0].size):
return args[0]
# A <<< X <<< Y => A <<< (X+Y) (ou <<< >>>)
if (op in ['<<<', '>>>'] and
isinstance(args[0], ExprOp) and
args[0].op in ['<<<', '>>>']):
op1 = op
op2 = args[0].op
if op1 == op2:
op = op1
args1 = args[0].args[1] + args[1]
else:
op = op2
args1 = args[0].args[1] - args[1]
args0 = args[0].args[0]
args = [args0, args1]
# A >> X >> Y => A >> (X+Y)
if (op in ['<<', '>>'] and
isinstance(args[0], ExprOp) and
args[0].op == op):
args = [args[0].args[0], args[0].args[1] + args[1]]
# ((A & A.mask)
if op == "&" and args[-1] == e.mask:
return ExprOp('&', *args[:-1])
# ((A | A.mask)
if op == "|" and args[-1] == e.mask:
return args[-1]
# ! (!X + int) => X - int
# TODO
# ((A & mask) >> shift) whith mask < 2**shift => 0
if (op == ">>" and
isinstance(args[1], ExprInt) and
isinstance(args[0], ExprOp) and args[0].op == "&"):
if (isinstance(args[0].args[1], ExprInt) and
2 ** args[1].arg > args[0].args[1].arg):
return ExprInt_from(args[0], 0)
# parity(int) => int
if op == 'parity' and isinstance(args[0], ExprInt):
return ExprInt1(parity(args[0].arg))
# (-a) * b * (-c) * (-d) => (-a) * b * c * d
if op == "*" and len(args) > 1:
new_args = []
counter = 0
for a in args:
if isinstance(a, ExprOp) and a.op == '-' and len(a.args) == 1:
new_args.append(a.args[0])
counter += 1
else:
new_args.append(a)
if counter % 2:
return -ExprOp(op, *new_args)
args = new_args
# A << int with A ExprCompose => move index
if op == "<<" and isinstance(args[0], ExprCompose) and isinstance(args[1], ExprInt):
final_size = args[0].size
shift = int(args[1].arg)
new_args = []
# shift indexes
for expr, start, stop in args[0].args:
new_args.append((expr, start+shift, stop+shift))
# filter out expression
filter_args = []
min_index = final_size
for expr, start, stop in new_args:
if start >= final_size:
continue
if stop > final_size:
expr = expr[:expr.size - (stop - final_size)]
stop = final_size
filter_args.append((expr, start, stop))
min_index = min(start, min_index)
# create entry 0
expr = ExprInt_fromsize(min_index, 0)
filter_args = [(expr, 0, min_index)] + filter_args
return ExprCompose(filter_args)
# A >> int with A ExprCompose => move index
if op == ">>" and isinstance(args[0], ExprCompose) and isinstance(args[1], ExprInt):
final_size = args[0].size
shift = int(args[1].arg)
new_args = []
# shift indexes
for expr, start, stop in args[0].args:
new_args.append((expr, start-shift, stop-shift))
# filter out expression
filter_args = []
max_index = 0
for expr, start, stop in new_args:
if stop <= 0:
continue
if start < 0:
expr = expr[-start:]
start = 0
filter_args.append((expr, start, stop))
max_index = max(stop, max_index)
# create entry 0
expr = ExprInt_fromsize(final_size - max_index, 0)
filter_args += [(expr, max_index, final_size)]
return ExprCompose(filter_args)
# Compose(a) OP Compose(b) with a/b same bounds => Compose(a OP b)
if op in ['|', '&', '^'] and all([isinstance(arg, ExprCompose) for arg in args]):
bounds = set()
for arg in args:
bound = tuple([(start, stop) for (expr, start, stop) in arg.args])
bounds.add(bound)
if len(bounds) == 1:
bound = list(bounds)[0]
new_args = [[expr] for (expr, start, stop) in args[0].args]
for sub_arg in args[1:]:
for i, (expr, start, stop) in enumerate(sub_arg.args):
new_args[i].append(expr)
for i, arg in enumerate(new_args):
new_args[i] = ExprOp(op, *arg), bound[i][0], bound[i][1]
return ExprCompose(new_args)
return ExprOp(op, *args)
def simp_cond_op_int(e_s, e):
"Extract conditions from operations"
if not isinstance(e, ExprOp):
return e
if not e.op in ["+", "|", "^", "&", "*", '<<', '>>', 'a>>']:
return e
if len(e.args) < 2:
return e
if not isinstance(e.args[-1], ExprInt):
return e
a_int = e.args[-1]
conds = []
for a in e.args[:-1]:
if not isinstance(a, ExprCond):
return e
conds.append(a)
if not conds:
return e
c = conds.pop()
c = ExprCond(c.cond,
ExprOp(e.op, c.src1, a_int),
ExprOp(e.op, c.src2, a_int))
conds.append(c)
new_e = ExprOp(e.op, *conds)
return new_e
def simp_cond_factor(e_s, e):
"Merge similar conditions"
if not isinstance(e, ExprOp):
return e
if not e.op in ["+", "|", "^", "&", "*", '<<', '>>', 'a>>']:
return e
if len(e.args) < 2:
return e
conds = {}
not_conds = []
multi_cond = False
for a in e.args:
if not isinstance(a, ExprCond):
not_conds.append(a)
continue
c = a.cond
if not c in conds:
conds[c] = []
else:
multi_cond = True
conds[c].append(a)
if not multi_cond:
return e
c_out = not_conds[:]
for c, vals in conds.items():
new_src1 = [x.src1 for x in vals]
new_src2 = [x.src2 for x in vals]
src1 = e_s.expr_simp_wrapper(ExprOp(e.op, *new_src1))
src2 = e_s.expr_simp_wrapper(ExprOp(e.op, *new_src2))
c_out.append(ExprCond(c, src1, src2))
if len(c_out) == 1:
new_e = c_out[0]
else:
new_e = ExprOp(e.op, *c_out)
return new_e
def simp_slice(e_s, e):
"Slice optimization"
# slice(A, 0, a.size) => A
if e.start == 0 and e.stop == e.arg.size:
return e.arg
# Slice(int) => int
elif isinstance(e.arg, ExprInt):
total_bit = e.stop - e.start
mask = (1 << (e.stop - e.start)) - 1
return ExprInt_fromsize(total_bit, (e.arg.arg >> e.start) & mask)
# Slice(Slice(A, x), y) => Slice(A, z)
elif isinstance(e.arg, ExprSlice):
if e.stop - e.start > e.arg.stop - e.arg.start:
raise ValueError('slice in slice: getting more val', str(e))
new_e = ExprSlice(e.arg.arg, e.start + e.arg.start,
e.start + e.arg.start + (e.stop - e.start))
return new_e
# Slice(Compose(A), x) => Slice(A, y)
elif isinstance(e.arg, ExprCompose):
for a in e.arg.args:
if a[1] <= e.start and a[2] >= e.stop:
new_e = a[0][e.start - a[1]:e.stop - a[1]]
return new_e
# ExprMem(x, size)[:A] => ExprMem(x, a)
# XXXX todo hum, is it safe?
elif (isinstance(e.arg, ExprMem) and
e.start == 0 and
e.arg.size > e.stop and e.stop % 8 == 0):
e = ExprMem(e.arg.arg, size=e.stop)
return e
# distributivity of slice and &
# (a & int)[x:y] => 0 if int[x:y] == 0
elif (isinstance(e.arg, ExprOp) and
e.arg.op == "&" and
isinstance(e.arg.args[-1], ExprInt)):
tmp = e_s.expr_simp_wrapper(e.arg.args[-1][e.start:e.stop])
if isinstance(tmp, ExprInt) and tmp.arg == 0:
return tmp
# distributivity of slice and exprcond
# (a?int1:int2)[x:y] => (a?int1[x:y]:int2[x:y])
elif (isinstance(e.arg, ExprCond) and
isinstance(e.arg.src1, ExprInt) and
isinstance(e.arg.src2, ExprInt)):
src1 = e.arg.src1[e.start:e.stop]
src2 = e.arg.src2[e.start:e.stop]
e = ExprCond(e.arg.cond, src1, src2)
# (a * int)[0:y] => (a[0:y] * int[0:y])
elif (e.start == 0 and isinstance(e.arg, ExprOp) and
e.arg.op == "*" and isinstance(e.arg.args[-1], ExprInt)):
args = [e_s.expr_simp_wrapper(a[e.start:e.stop]) for a in e.arg.args]
e = ExprOp(e.arg.op, *args)
return e
def simp_compose(e_s, e):
"Commons simplification on ExprCompose"
args = merge_sliceto_slice(e.args)
out = []
# compose of compose
for a in args:
if isinstance(a[0], ExprCompose):
for x, start, stop in a[0].args:
out.append((x, start + a[1], stop + a[1]))
else:
out.append(a)
args = out
# Compose(a) with a.size = compose.size => a
if len(args) == 1 and args[0][1] == 0 and args[0][2] == e.size:
return args[0][0]
# {(X[X.size-z, 0, z), (0, z, X.size)} => (X >> x)
if (len(args) == 2 and
isinstance(args[1][0], ExprInt) and
args[1][0].arg == 0):
a1 = args[0]
a2 = args[1]
if (isinstance(a1[0], ExprSlice) and
a1[1] == 0 and a1[0].stop == a1[0].arg.size):
if a2[1] == a1[0].size and a2[2] == a1[0].arg.size:
new_e = a1[0].arg >> ExprInt_fromsize(
a1[0].arg.size, a1[0].start)
return new_e
# Compose with ExprCond with integers for src1/src2 and intergers =>
# propagage integers
# {XXX?(0x0,0x1)?(0x0,0x1),0,8, 0x0,8,32} => XXX?(int1, int2)
ok = True
expr_cond = None
expr_ints = []
for i, a in enumerate(args):
if not is_int_or_cond_src_int(a[0]):
ok = False
break
expr_ints.append(a)
if isinstance(a[0], ExprCond):
if expr_cond is not None:
ok = False
expr_cond = i
cond = a[0]
if ok and expr_cond is not None:
src1 = []
src2 = []
for i, a in enumerate(expr_ints):
if i == expr_cond:
src1.append((a[0].src1, a[1], a[2]))
src2.append((a[0].src2, a[1], a[2]))
else:
src1.append(a)
src2.append(a)
src1 = e_s.apply_simp(ExprCompose(src1))
src2 = e_s.apply_simp(ExprCompose(src2))
if isinstance(src1, ExprInt) and isinstance(src2, ExprInt):
return ExprCond(cond.cond, src1, src2)
return ExprCompose(args)
def simp_cond(e_s, e):
"Common simplifications on ExprCond"
if not isinstance(e, ExprCond):
return e
# eval exprcond src1/src2 with satifiable/unsatisfiable condition
# propagation
if (not isinstance(e.cond, ExprInt)) and e.cond.size == 1:
src1 = e.src1.replace_expr({e.cond: ExprInt1(1)})
src2 = e.src2.replace_expr({e.cond: ExprInt1(0)})
if src1 != e.src1 or src2 != e.src2:
return ExprCond(e.cond, src1, src2)
# -A ? B:C => A ? B:C
if (isinstance(e.cond, ExprOp) and
e.cond.op == '-' and
len(e.cond.args) == 1):
e = ExprCond(e.cond.args[0], e.src1, e.src2)
# a?x:x
elif e.src1 == e.src2:
e = e.src1
# int ? A:B => A or B
elif isinstance(e.cond, ExprInt):
if e.cond.arg == 0:
e = e.src2
else:
e = e.src1
# a?(a?b:c):x => a?b:x
elif isinstance(e.src1, ExprCond) and e.cond == e.src1.cond:
e = ExprCond(e.cond, e.src1.src1, e.src2)
# a?x:(a?b:c) => a?x:c
elif isinstance(e.src2, ExprCond) and e.cond == e.src2.cond:
e = ExprCond(e.cond, e.src1, e.src2.src2)
# a|int ? b:c => b with int != 0
elif (isinstance(e.cond, ExprOp) and
e.cond.op == '|' and
isinstance(e.cond.args[1], ExprInt) and
e.cond.args[1].arg != 0):
return e.src1
# (C?int1:int2)?(A:B) =>
elif (isinstance(e.cond, ExprCond) and
isinstance(e.cond.src1, ExprInt) and
isinstance(e.cond.src2, ExprInt)):
int1 = e.cond.src1.arg.arg
int2 = e.cond.src2.arg.arg
if int1 and int2:
e = e.src1
elif int1 == 0 and int2 == 0:
e = e.src2
elif int1 == 0 and int2:
e = ExprCond(e.cond.cond, e.src2, e.src1)
elif int1 and int2 == 0:
e = ExprCond(e.cond.cond, e.src1, e.src2)
return e
| gpl-2.0 | -3,524,819,148,954,221,000 | 32.473043 | 88 | 0.470203 | false |
kpeckett/willie-trello | willie-trello.py | 1 | 1429 | # -*- coding: utf-8 -*-
"""
willie-trello.py - Enhanced Trello links
Licensed under the GNU GPLv3
Copyright (C) 2015 Kieran Peckett
"""
import willie.module
import requests
import time
import re
def setup(bot):
regex = re.compile(r".*\bhttps?://trello\.com/c/(\w+).*")
if not bot.memory.contains('url_callbacks'):
bot.memory['url_callbacks'] = {regex: showTrelloInfo}
else:
exclude = bot.memory['url_callbacks']
exclude[regex] = showTrelloInfo
bot.memory['url_callbacks'] = exclude
@willie.module.rule(r".*https?://trello\.com/c/(\w+).*")
def showTrelloInfo(bot,trigger,found_match=None):
"""Shows info about a card on Trello"""
match = found_match or trigger
card_id = match.group(1)
url = "https://api.trello.com/1/card/" + card_id + "?fields=name,closed,desc,due,shortUrl"
response = requests.get(url)
if response.text == "unauthorized card permission requested":
bot.say("Private Trello Card")
else:
data = response.json()
output = data["name"] # Add name of card
# Add first 50 chars or less of description
if len(data["desc"]) > 50:
output += " | " + data["desc"][0:75] + u"…" # Add ellipsis at end
elif data["desc"] == "":
output += " | No Description"
else:
output += " | " + data["desc"]
if data["due"] == None:
output += " | No Due Date"
else:
due_date = data["due"][0:10]
output += " | Due: " + due_date
output += " | " + data["shortUrl"]
bot.say(output)
| gpl-3.0 | -7,577,456,725,166,876,000 | 30.021739 | 91 | 0.644008 | false |
kubernetes-client/python | kubernetes/client/models/v1_aws_elastic_block_store_volume_source.py | 1 | 8002 | # coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: release-1.18
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from kubernetes.client.configuration import Configuration
class V1AWSElasticBlockStoreVolumeSource(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'fs_type': 'str',
'partition': 'int',
'read_only': 'bool',
'volume_id': 'str'
}
attribute_map = {
'fs_type': 'fsType',
'partition': 'partition',
'read_only': 'readOnly',
'volume_id': 'volumeID'
}
def __init__(self, fs_type=None, partition=None, read_only=None, volume_id=None, local_vars_configuration=None): # noqa: E501
"""V1AWSElasticBlockStoreVolumeSource - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._fs_type = None
self._partition = None
self._read_only = None
self._volume_id = None
self.discriminator = None
if fs_type is not None:
self.fs_type = fs_type
if partition is not None:
self.partition = partition
if read_only is not None:
self.read_only = read_only
self.volume_id = volume_id
@property
def fs_type(self):
"""Gets the fs_type of this V1AWSElasticBlockStoreVolumeSource. # noqa: E501
Filesystem type of the volume that you want to mount. Tip: Ensure that the filesystem type is supported by the host operating system. Examples: \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified. More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore # noqa: E501
:return: The fs_type of this V1AWSElasticBlockStoreVolumeSource. # noqa: E501
:rtype: str
"""
return self._fs_type
@fs_type.setter
def fs_type(self, fs_type):
"""Sets the fs_type of this V1AWSElasticBlockStoreVolumeSource.
Filesystem type of the volume that you want to mount. Tip: Ensure that the filesystem type is supported by the host operating system. Examples: \"ext4\", \"xfs\", \"ntfs\". Implicitly inferred to be \"ext4\" if unspecified. More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore # noqa: E501
:param fs_type: The fs_type of this V1AWSElasticBlockStoreVolumeSource. # noqa: E501
:type: str
"""
self._fs_type = fs_type
@property
def partition(self):
"""Gets the partition of this V1AWSElasticBlockStoreVolumeSource. # noqa: E501
The partition in the volume that you want to mount. If omitted, the default is to mount by volume name. Examples: For volume /dev/sda1, you specify the partition as \"1\". Similarly, the volume partition for /dev/sda is \"0\" (or you can leave the property empty). # noqa: E501
:return: The partition of this V1AWSElasticBlockStoreVolumeSource. # noqa: E501
:rtype: int
"""
return self._partition
@partition.setter
def partition(self, partition):
"""Sets the partition of this V1AWSElasticBlockStoreVolumeSource.
The partition in the volume that you want to mount. If omitted, the default is to mount by volume name. Examples: For volume /dev/sda1, you specify the partition as \"1\". Similarly, the volume partition for /dev/sda is \"0\" (or you can leave the property empty). # noqa: E501
:param partition: The partition of this V1AWSElasticBlockStoreVolumeSource. # noqa: E501
:type: int
"""
self._partition = partition
@property
def read_only(self):
"""Gets the read_only of this V1AWSElasticBlockStoreVolumeSource. # noqa: E501
Specify \"true\" to force and set the ReadOnly property in VolumeMounts to \"true\". If omitted, the default is \"false\". More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore # noqa: E501
:return: The read_only of this V1AWSElasticBlockStoreVolumeSource. # noqa: E501
:rtype: bool
"""
return self._read_only
@read_only.setter
def read_only(self, read_only):
"""Sets the read_only of this V1AWSElasticBlockStoreVolumeSource.
Specify \"true\" to force and set the ReadOnly property in VolumeMounts to \"true\". If omitted, the default is \"false\". More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore # noqa: E501
:param read_only: The read_only of this V1AWSElasticBlockStoreVolumeSource. # noqa: E501
:type: bool
"""
self._read_only = read_only
@property
def volume_id(self):
"""Gets the volume_id of this V1AWSElasticBlockStoreVolumeSource. # noqa: E501
Unique ID of the persistent disk resource in AWS (Amazon EBS volume). More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore # noqa: E501
:return: The volume_id of this V1AWSElasticBlockStoreVolumeSource. # noqa: E501
:rtype: str
"""
return self._volume_id
@volume_id.setter
def volume_id(self, volume_id):
"""Sets the volume_id of this V1AWSElasticBlockStoreVolumeSource.
Unique ID of the persistent disk resource in AWS (Amazon EBS volume). More info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore # noqa: E501
:param volume_id: The volume_id of this V1AWSElasticBlockStoreVolumeSource. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and volume_id is None: # noqa: E501
raise ValueError("Invalid value for `volume_id`, must not be `None`") # noqa: E501
self._volume_id = volume_id
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1AWSElasticBlockStoreVolumeSource):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1AWSElasticBlockStoreVolumeSource):
return True
return self.to_dict() != other.to_dict()
| apache-2.0 | 5,675,701,868,138,536,000 | 37.657005 | 329 | 0.631342 | false |
jangxyz/ecogwiki-client | setup.py | 1 | 1523 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Ecogwiki OAuth client
`ecog` is a python client that talks with [ecogwiki](http://www.ecogwiki.com/). It is configurable to talk with any other ecogwiki hosts.
See https://github.com/jangxyz/ecogwiki-client for details.
"""
from setuptools import setup
def read_version():
import sys
import importlib
sys.path.insert(0, 'ecog')
try:
v = importlib.import_module('version')
return v.__version__
finally:
sys.path.pop(0)
setup(name='ecog',
version=read_version(),
author = 'Jang-hwan Kim',
author_email = '[email protected]',
description = 'Ecogwiki OAuth client',
long_description = __doc__,
url = 'https://github.com/jangxyz/ecogwiki-client',
packages = ['ecog'],
scripts = ['scripts/ecog'],
install_requires = ['oauth2', 'feedparser', 'python-dateutil'],
license = 'MIT License',
platforms = ['POSIX'],
keywords = ['oauth', 'markdown'],
classifiers = [line.strip() for line in '''
Development Status :: 3 - Alpha
Environment :: Console
Intended Audience :: Developers
Intended Audience :: End Users/Desktop
License :: OSI Approved :: MIT License
Natural Language :: English
Operating System :: POSIX
Programming Language :: Python :: 2.7
Topic :: Communications
Topic :: Terminals
Topic :: Text Processing
Topic :: Utilities
'''.strip().splitlines()]
)
| mit | 8,209,925,821,725,269,000 | 26.196429 | 141 | 0.61392 | false |
nuagenetworks/vspk-python | vspk/v5_0/nupolicydecision.py | 1 | 11345 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2015, Alcatel-Lucent Inc, 2017 Nokia
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from .fetchers import NUMetadatasFetcher
from .fetchers import NUGlobalMetadatasFetcher
from .fetchers import NUQOSsFetcher
from bambou import NURESTObject
class NUPolicyDecision(NURESTObject):
""" Represents a PolicyDecision in the VSD
Notes:
This object is a read only object that provides the policy decisions for a particular VM interface.
"""
__rest_name__ = "policydecision"
__resource_name__ = "policydecisions"
## Constants
CONST_ENTITY_SCOPE_GLOBAL = "GLOBAL"
CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE"
def __init__(self, **kwargs):
""" Initializes a PolicyDecision instance
Notes:
You can specify all parameters while calling this methods.
A special argument named `data` will enable you to load the
object from a Python dictionary
Examples:
>>> policydecision = NUPolicyDecision(id=u'xxxx-xxx-xxx-xxx', name=u'PolicyDecision')
>>> policydecision = NUPolicyDecision(data=my_dict)
"""
super(NUPolicyDecision, self).__init__()
# Read/Write Attributes
self._last_updated_by = None
self._egress_acls = None
self._egress_qos = None
self._fip_acls = None
self._ingress_acls = None
self._ingress_adv_fwd = None
self._entity_scope = None
self._qos = None
self._stats = None
self._external_id = None
self.expose_attribute(local_name="last_updated_by", remote_name="lastUpdatedBy", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="egress_acls", remote_name="egressACLs", attribute_type=list, is_required=False, is_unique=False)
self.expose_attribute(local_name="egress_qos", remote_name="egressQos", attribute_type=dict, is_required=False, is_unique=False)
self.expose_attribute(local_name="fip_acls", remote_name="fipACLs", attribute_type=list, is_required=False, is_unique=False)
self.expose_attribute(local_name="ingress_acls", remote_name="ingressACLs", attribute_type=list, is_required=False, is_unique=False)
self.expose_attribute(local_name="ingress_adv_fwd", remote_name="ingressAdvFwd", attribute_type=list, is_required=False, is_unique=False)
self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL'])
self.expose_attribute(local_name="qos", remote_name="qos", attribute_type=dict, is_required=False, is_unique=False)
self.expose_attribute(local_name="stats", remote_name="stats", attribute_type=dict, is_required=False, is_unique=False)
self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True)
# Fetchers
self.metadatas = NUMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.global_metadatas = NUGlobalMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.qoss = NUQOSsFetcher.fetcher_with_object(parent_object=self, relationship="child")
self._compute_args(**kwargs)
# Properties
@property
def last_updated_by(self):
""" Get last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
return self._last_updated_by
@last_updated_by.setter
def last_updated_by(self, value):
""" Set last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
self._last_updated_by = value
@property
def egress_acls(self):
""" Get egress_acls value.
Notes:
List of actual Egress ACLs that will be applied on the interface of this VM
This attribute is named `egressACLs` in VSD API.
"""
return self._egress_acls
@egress_acls.setter
def egress_acls(self, value):
""" Set egress_acls value.
Notes:
List of actual Egress ACLs that will be applied on the interface of this VM
This attribute is named `egressACLs` in VSD API.
"""
self._egress_acls = value
@property
def egress_qos(self):
""" Get egress_qos value.
Notes:
Egress QoS primitive that was selected
This attribute is named `egressQos` in VSD API.
"""
return self._egress_qos
@egress_qos.setter
def egress_qos(self, value):
""" Set egress_qos value.
Notes:
Egress QoS primitive that was selected
This attribute is named `egressQos` in VSD API.
"""
self._egress_qos = value
@property
def fip_acls(self):
""" Get fip_acls value.
Notes:
List of actual Egress ACLs that will be applied on the interface of this VM
This attribute is named `fipACLs` in VSD API.
"""
return self._fip_acls
@fip_acls.setter
def fip_acls(self, value):
""" Set fip_acls value.
Notes:
List of actual Egress ACLs that will be applied on the interface of this VM
This attribute is named `fipACLs` in VSD API.
"""
self._fip_acls = value
@property
def ingress_acls(self):
""" Get ingress_acls value.
Notes:
List of actual Ingress ACLs that will be applied on the interface of this VM
This attribute is named `ingressACLs` in VSD API.
"""
return self._ingress_acls
@ingress_acls.setter
def ingress_acls(self, value):
""" Set ingress_acls value.
Notes:
List of actual Ingress ACLs that will be applied on the interface of this VM
This attribute is named `ingressACLs` in VSD API.
"""
self._ingress_acls = value
@property
def ingress_adv_fwd(self):
""" Get ingress_adv_fwd value.
Notes:
List of actual Ingress Redirect ACLs that will be applied on the interface of this VM
This attribute is named `ingressAdvFwd` in VSD API.
"""
return self._ingress_adv_fwd
@ingress_adv_fwd.setter
def ingress_adv_fwd(self, value):
""" Set ingress_adv_fwd value.
Notes:
List of actual Ingress Redirect ACLs that will be applied on the interface of this VM
This attribute is named `ingressAdvFwd` in VSD API.
"""
self._ingress_adv_fwd = value
@property
def entity_scope(self):
""" Get entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
return self._entity_scope
@entity_scope.setter
def entity_scope(self, value):
""" Set entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
self._entity_scope = value
@property
def qos(self):
""" Get qos value.
Notes:
QoS primitive that was selected based on inheritance policies
"""
return self._qos
@qos.setter
def qos(self, value):
""" Set qos value.
Notes:
QoS primitive that was selected based on inheritance policies
"""
self._qos = value
@property
def stats(self):
""" Get stats value.
Notes:
Stats primitive that was selected based on inheritance policies
"""
return self._stats
@stats.setter
def stats(self, value):
""" Set stats value.
Notes:
Stats primitive that was selected based on inheritance policies
"""
self._stats = value
@property
def external_id(self):
""" Get external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
return self._external_id
@external_id.setter
def external_id(self, value):
""" Set external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
self._external_id = value
| bsd-3-clause | -3,218,528,264,936,086,000 | 28.857895 | 175 | 0.58052 | false |
AlCap23/Thesis | Python/Experiments/MIMO/titostudy_extern_PTN_H05_TICONST.py | 1 | 11238 | """
Python programm to study the robustness of TITO systems.
Identitfies the system, computes the controller and analysis the controller using the state space - transfer function relation.
Computes the singular values.
Use this script from the terminal / console with
./python FILENAME.py --file_storage = FOLDERNAME
to store essential information with sacred
"""
# Import the packages
# Import numpy
import numpy as np
# Import pandas
import pandas as pd
# Import linear regression model
from scipy import stats
# Import the Algorithms
import sys
#sys.path.append('../../')
import Algorithms as alg
# Import the MOBA Simulator
import MoBASimulator as mb
# Plotting
import pylab as p
# Define an experiment
from sacred import Experiment
###########################################################
########################## MAIN PROGRAM ###################
###########################################################
# Define a Sacred Experiment
ex = Experiment()
###########################################################
########################## CONFIG #########################
###########################################################
@ex.config
def experimental_setup():
# Filename to store in
filename = 'H05_TICONST.csv'
# Overall sample_size
sample_size = 9000
# Max degree
max_deg = 9
# Gain Limits
gain_limits = [-20., 20.0]
# Lag Limits
lag_limits = [50,100]
# Delay Limits, if small approx. no delay
delay_limits = [10,30]
# Step size for simulate
dt = 0.01
# Final time for simulation
t_sim = 1500
# Maximum Interaction
H = 0.5*np.eye(2,2)
# Frequency parameter (as dB)
wmin = -5
wmax = 3
dw = 10000
# Special frequencies
w_special = np.array([0.01, 0.02, 0.1, 1./150., 1./200.])
# Store the results
# System Order, Maximum Sensitivity, corresponding Frequency, MS_w0.1, MS_w0.5, MS_w1, Grad_MS(w0.1...1)
columns = ['Degree','MS_RGA','w_MS_RGA','Grad_RGA','MS_A', 'w_MS_A','Grad_A', 'MS_D','w_MS_D', 'Grad_D']
# Add the special frequencies
for freq in range(0, w_special.shape[0]):
columns.append('w_'+str(w_special[freq])+'_RGA')
columns.append('w_'+str(w_special[freq])+'_A')
columns.append('w_'+str(w_special[freq])+'_D')
# Make empty data frame with zeros
R = pd.DataFrame(data = np.zeros((sample_size, len(columns))), columns = columns)
###########################################################
################## CREATE VARIABLES #######################
###########################################################
# Create the gain
k = np.random.uniform(gain_limits[0], gain_limits[1],(sample_size,2,2))
num = np.zeros_like(k)
# Delay
l = np.random.uniform(delay_limits[0], delay_limits[1], (sample_size,2,2))
# Create random time constants
t = np.random.uniform(lag_limits[0],lag_limits[1],(sample_size,2,2))
den = np.zeros((sample_size, 2, 2, max_deg+1))
# Loop over the samples and estimate even distribution over degree
for samples in range(0, sample_size):
# Compute current order, from 1 to ...
degree = int(1.0*samples/sample_size * max_deg) + 1
# Loop over outputs
for outputs in range(0,2):
# Loop over inputs
for inputs in range(0,2):
# Compute the distances between the random time constants
# Sort until current degree
dist = float(t[samples, outputs, inputs]/degree) * np.ones(degree)
# Insert a zero for the first distance
#dist = np.insert(dist, [0], 0.0)
# Calculate the distance
#dist = np.ediff1d(dist)
# Calculate a stable polynomial, which highest coefficient is normed!!!
den[samples, outputs, inputs, :(degree+1)] = np.polynomial.polynomial.polyfromroots(-1./dist)
# Hence, normalize the gain with the highest coefficient
num[samples, outputs, inputs] = k[samples, outputs, inputs] * den[samples, outputs, inputs, 0]
###########################################################
################## EXPERIMENT #############################
###########################################################
@ex.automain
def experiment(num, den, l, R, filename, sample_size, max_deg, dt, t_sim, H, wmin, wmax, dw, w_special):
# Loop over the samples, compute order like earlier
###########################################################
####################### INITIAL MODEL #####################
###########################################################
# Open Simulator
sim = mb.Simulator()
# Show the log window
sim.showLogWindow()
###########################################################
####################### SAMPLE LOOP #######################
###########################################################
# Set initial degree to zero
degree = 0
for samples in range(0, sample_size):
# Calculate the current degree
c_deg = int(1.0 * samples/sample_size * max_deg) +1
# Check if degree has changed
if degree < c_deg:
# Change degree
degree = c_deg
# Clear Simulator
#sim.clear()
# Load new model
sim.loadModel("/2_2_n"+str(degree)+"/Masterthesis_Models_mimo_0processmodel.fmu")
sim.setOperationMode('FMU for ModelExchange')
# Preallocat identification parameter
K = np.zeros((2,2))
T = np.zeros((2,2))
L = np.zeros((2,2))
# Reload the model
sim.reloadModel()
# Set Simulation Parameter
daeSolverParams = sim.getDAESolverParameters()
daeSolverParams['absTol'] = 1e-7
daeSolverParams['relTol'] = 1e-8
sim.setDAESolverParameters(daeSolverParams)
###########################################################
####################### MODEL SETUP #######################
###########################################################
# Create a parameter list
params = {}
# Loop over the systems outputs
for outputs in range(0,2):
# Loop over the systems inputs
for inputs in range(0,2):
# Set system gain
params.update({"fmu.num["+str(outputs+1)+","+str(inputs+1)+",1]": num[samples][outputs][inputs]})
# Set system delay
params.update({"fmu.delay["+str(outputs+1)+","+str(inputs+1)+"]": l.item(samples,outputs,inputs)})
# Loop over denominator coefficients
for order in range(0, degree+1):
params.update({"fmu.den["+str(outputs+1)+","+str(inputs+1)+","+str(degree-order+1)+"]": den[samples][outputs][inputs][(order)]})
# Set the parameter
sim.set(params)
# Show the Parameter
#sim.showParameterDialog()
# Store the state space rep for later use
ss = sim.analyser_getStateSpaceForm()
###########################################################
####################### IDENTIFICATION ####################
###########################################################
# Setup first experiment Input 1 -> Output 1 and Output 2
sim.set({"fmu.u[1]": 1,"fmu.u[2]": 0})
# Simulation of the experiment
res = sim.simulate(dt, t_sim)
# Get the needed signals
y = res["fmu.y[1]"]
y2 = res["fmu.y[2]"]
u = res["fmu.u[1]"]
time = res["time"]
# Plot the system
#p.plot(time,y)
#p.plot(time,y2)
#p.show()
# Get TF from Input 1 to Output 1
K[0][0],T[0][0],L[0][0]=alg.Integral_Identification(y,u,time)
# Get TF from Input 1 to Output 2
K[1][0],T[1][0],L[1][0]=alg.Integral_Identification(y2,u,time)
# Setup second experiment Input 2 -> Output 1 and Output 2
# Reset the model state
sim.resetModelState()
# Input Parameter
sim.set({"fmu.u[1]": 0,"fmu.u[2]": 1})
# Simulation of the experiment
res = sim.simulate(dt, t_sim)
# Get the needed signals
y = res["fmu.y[1]"]
y2 = res["fmu.y[2]"]
u = res["fmu.u[2]"]
time = res["time"]
# Get TF from Input 2 to Output 1
K[0][1],T[0][1],L[0][1] = alg.Integral_Identification(y,u,time)
# Get TF from Input 2 to Output 2
K[1][1],T[1][1],L[1][1] = alg.Integral_Identification(y2,u,time)
# Print the System Parameter
# print(K,T,L)
###########################################################
####################### CONTROLLER DESIGN #################
###########################################################
# Loop over the three methods
for methods in range(0,3):
if methods == 0:
KY,B,D = alg.Control_Decentral(K,T,L, b = 0.)
elif methods == 1:
KY,B,D = alg.Control_Astrom(K,T,L,H, b = .0)
else:
KY,B,D = alg.Control_Decoupled(K,T,L,H ,b = .0)
###########################################################
####################### EVALUATION ########################
###########################################################
# Create a frequency range
omega = np.logspace(wmin, wmax, dw)
# Store the singular values
sv = np.zeros((2,omega.shape[0]))
# Loop over the frequency
for freq in range(0, omega.shape[0]):
# Evaluate the sensitivity at given frequency
S = alg.compute_sensitivity(ss, KY,B,D, omega[freq])
u, sv[:, freq], w = np.linalg.svd(np.abs(S))
# Clear variables
del u,w
# Find the maximum of the sensitivity
ms = np.max(sv)
# Get the corresponding frequency
omega_ms = omega[np.argmax(sv)]
# Print the sensitivity
#p.loglog(omega, sv[0,:])
#p.loglog(omega, sv[1,:])
#p.show()
# Compute the gradient of the maximal singular values
# Compute the maximum singular value along all frequency
sv_max = np.max(sv, axis=0)
# Compute the slope via linear regression
slope, intercept, r_value, p_value, std_err = stats.linregress(omega[np.where(omega<=1.0)], sv_max[np.where(omega<=1.0)])
# Clear variables
del intercept, r_value, p_value, std_err
# Evaluate at the special frequencies
ms_s = []
for freq in w_special:
# Evaluate the sensitivity at given frequency
S = alg.compute_sensitivity(ss, KY,B,D, freq)
u, v, w = np.linalg.svd(np.abs(S))
ms_s.append(np.max(v))
# Clear variables
del u,v,w
###########################################################
####################### STORE DATA ########################
###########################################################
# Store Degree
R.set_value(samples, 'Degree', degree)
if methods == 0:
# Store the maximum sensitivity
R.set_value(samples, 'MS_RGA', ms)
# Store the correspondig frequency
R.set_value(samples, 'w_MS_RGA', omega_ms)
# Store the maximum singular value at the special frequencies
for freq in range(0, w_special.shape[0]):
R.set_value(samples, 'w_'+str(w_special[freq])+'_RGA', ms_s[0])
# Store the gradient
R.set_value(samples, 'Grad_RGA', slope)
elif methods == 1:
# Store the maximum sensitivity
R.set_value(samples, 'MS_A', ms)
# Store the correspondig frequency
R.set_value(samples, 'w_MS_A', omega_ms)
# Store the maximum singular value at the special frequencies
for freq in range(0, w_special.shape[0]):
R.set_value(samples, 'w_'+str(w_special[freq])+'_A', ms_s[0])
# Store the gradient
R.set_value(samples, 'Grad_A', slope)
else:
# Store the maximum sensitivity
R.set_value(samples, 'MS_D', ms)
# Store the correspondig frequency
R.set_value(samples, 'w_MS_D', omega_ms)
# Store the maximum singular value at the special frequencies
for freq in range(0, w_special.shape[0]):
R.set_value(samples, 'w_'+str(w_special[freq])+'_D', ms_s[0])
# Store the gradient
R.set_value(samples, 'Grad_D', slope)
# Store after every sample
R.to_csv(filename, sep=";")
| gpl-3.0 | 4,852,432,078,727,296,000 | 33.158055 | 133 | 0.555348 | false |
AlexPayment/github-sms-notifier | github_sms_notifier/github_sms_notifier.py | 1 | 7472 | import json
import re
from flask import Flask, flash, make_response
from flask.globals import request
from flask.templating import render_template
import requests
from twilio.rest import TwilioRestClient
PHONE_NUMBER_PATTERN = re.compile("^\\+?\\d{10,14}$")
PULL_REQUEST_OPENED = 'prOpened'
PULL_REQUEST_CLOSED = 'prClosed'
PULL_REQUEST_SYNCHRONIZE = 'prSynchronize'
PULL_REQUEST_REOPENED = 'prReopened'
REPOSITORIES = 'repositories'
REPOSITORY_PATTERN = re.compile("[A-Za-z0-9_\\.-]+/[A-Za-z0-9_\\.-]+")
SETTINGS_JSON_FILE_NAME = 'settings.json'
SETTINGS_TEMPLATE = 'settings.html'
TO_NUMBERS = 'toNumbers'
TWILIO_ACCOUNT_SID = 'twilioAccountSid'
TWILIO_AUTH_TOKEN = 'twilioAuthToken'
TWILIO_FROM_NUMBER = 'twilioFromNumber'
app = Flask(__name__)
short_urls = {}
@app.route('/')
def root():
return 'Thank you for using github-sms-notifier!'
@app.route('/admin', methods=['GET'])
def config():
settings = __read_settings()
return render_template(SETTINGS_TEMPLATE, settings=settings)
@app.route('/admin', methods=['POST'])
def save_config():
app.logger.debug(request.form)
pull_request_closed_enabled = False
if PULL_REQUEST_CLOSED in request.form:
pull_request_closed_enabled = True
pull_request_opened_enabled = False
if PULL_REQUEST_OPENED in request.form:
pull_request_opened_enabled = True
pull_request_reopened_enabled = False
if PULL_REQUEST_REOPENED in request.form:
pull_request_reopened_enabled = True
pull_request_synchronize_enabled = False
if PULL_REQUEST_SYNCHRONIZE in request.form:
pull_request_synchronize_enabled = True
settings = {TWILIO_ACCOUNT_SID: request.form[TWILIO_ACCOUNT_SID].strip(),
TWILIO_AUTH_TOKEN: request.form[TWILIO_AUTH_TOKEN].strip(),
TWILIO_FROM_NUMBER: request.form[TWILIO_FROM_NUMBER].strip(),
TO_NUMBERS: request.form[TO_NUMBERS].strip().split(), PULL_REQUEST_CLOSED: pull_request_closed_enabled,
PULL_REQUEST_OPENED: pull_request_opened_enabled, PULL_REQUEST_REOPENED: pull_request_reopened_enabled,
PULL_REQUEST_SYNCHRONIZE: pull_request_synchronize_enabled,
REPOSITORIES: request.form[REPOSITORIES].strip().split()}
errors = __validate_settings(settings)
if errors:
for error in errors:
flash(error, category='error')
else:
with open(SETTINGS_JSON_FILE_NAME, 'w+') as settings_file:
json.dump(settings, settings_file)
flash("Settings saved!")
return render_template(SETTINGS_TEMPLATE, settings=settings)
@app.route('/pullRequests', methods=['POST'])
def pull_requests():
settings = __read_settings()
if settings:
content = json.loads(request.data)
if 'pull_request' in content:
client = TwilioRestClient(settings[TWILIO_ACCOUNT_SID], settings[TWILIO_AUTH_TOKEN])
message = __build_sms_body(content)
app.logger.debug(request.data)
if message and not app.testing:
numbers = settings[TO_NUMBERS]
for number in numbers:
client.sms.messages.create(body=message, from_=settings[TWILIO_FROM_NUMBER], to=number)
else:
app.logger.warn("Not a pull request: {}".format(request.data))
else:
app.logger.warn("Cannot load settings.")
return make_response("", 204)
def __build_sms_body(request_body):
settings = __read_settings()
message_prefix = 'Pull request #' + str(request_body['number'])
message_suffix = request_body['repository']['full_name'] + ' ' + __get_short_url(
request_body['pull_request']['html_url'])
if request_body['action'] == 'opened':
if settings[PULL_REQUEST_OPENED] and __is_supported_repository(settings.get(REPOSITORIES),
request_body['repository']['full_name']):
return message_prefix + ' was opened in ' + message_suffix
elif request_body['action'] == 'closed':
if settings[PULL_REQUEST_CLOSED] and __is_supported_repository(settings.get(REPOSITORIES),
request_body['repository']['full_name']):
return message_prefix + ' was closed in ' + message_suffix
elif request_body['action'] == 'synchronize':
if settings[PULL_REQUEST_SYNCHRONIZE] and __is_supported_repository(settings.get(REPOSITORIES),
request_body['repository']['full_name']):
return message_prefix + ' was synchronized in ' + message_suffix
elif request_body['action'] == 'reopened':
if settings[PULL_REQUEST_REOPENED] and __is_supported_repository(settings.get(REPOSITORIES),
request_body['repository']['full_name']):
return message_prefix + ' was reopened in ' + message_suffix
else:
return 'Unsupported action \'' + request_body['action'] + '\' occurred on pull request #' + str(
request_body['number']) + ' in ' + message_suffix
def __get_short_url(url):
if short_urls.get(url):
return short_urls[url]
payload = {'url': url}
r = requests.post('http://git.io', data=payload)
short_urls[url] = r.headers.get('Location')
return short_urls[url]
def __is_supported_repository(repositories_settings, notification_repository):
if not repositories_settings:
return True
for repository in repositories_settings:
if notification_repository == repository:
return True
return False
def __is_valid_phone_number(phone_number):
if PHONE_NUMBER_PATTERN.match(phone_number):
return True
else:
return False
def __is_valid_repository_name(repository_name):
if REPOSITORY_PATTERN.match(repository_name):
return True
else:
return False
def __read_settings():
settings = {}
with open(SETTINGS_JSON_FILE_NAME, 'r+') as settings_file:
try:
settings = json.load(settings_file)
except ValueError:
app.logger.warning("Cannot load configuration.")
return settings
def __validate_settings(settings):
errors = []
if not settings.get(TWILIO_ACCOUNT_SID):
errors.append('Twilio Account Sid is required')
if not settings.get(TWILIO_AUTH_TOKEN):
errors.append('Twilio Auth Token is required')
if not settings.get(TWILIO_FROM_NUMBER):
errors.append('Twilio From Number is required')
else:
if not __is_valid_phone_number(settings.get(TWILIO_FROM_NUMBER)):
errors.append("Invalid Twilio From Number: " + settings.get(TWILIO_FROM_NUMBER))
if not settings.get(TO_NUMBERS):
errors.append('Numbers to send SMS to is required')
else:
for to_number in settings.get(TO_NUMBERS):
if not __is_valid_phone_number(to_number):
errors.append("Invalid phone number: " + to_number)
if settings.get(REPOSITORIES):
for repository in settings.get(REPOSITORIES):
if not __is_valid_repository_name(repository):
errors.append("Invalid repository name format: " + repository)
return errors
if __name__ == '__main__':
app.secret_key = 'Uqtbl6HxgNWcJsuycuXtHQyR8ExiaNHm'
app.debug = True
app.run()
| mit | 1,487,203,907,290,717,700 | 36.928934 | 119 | 0.633565 | false |
unixnut/cpylmnl | cpylmnl/linux/genetlinkh.py | 1 | 1734 | # -*- coding: utf-8 -*-
import ctypes
from cpylmnl.nlstruct import NLStructure
import cpylmnl.linux.netlinkh as netlink
GENL_NAMSIZ = 16 # length of family name
GENL_MIN_ID = netlink.NLMSG_MIN_TYPE
GENL_MAX_ID = 1023
class Genlmsghdr(NLStructure):
"""struct genlmsghdr
"""
_fields_ = [("cmd", ctypes.c_uint8), # __u8 cmd
("version", ctypes.c_uint8), # __u8 version
("reserved", ctypes.c_uint16)] # __u16 reserved
GENL_HDR_LEN = netlink.NLMSG_ALIGN(ctypes.sizeof(Genlmsghdr))
GENL_ADMIN_PERM = 0x01
GENL_CMD_CAP_DO = 0x02
GENL_CMD_CAP_DUMP = 0x04
GENL_CMD_CAP_HASPOL = 0x08
# List of reserved static generic netlink identifiers:
GENL_ID_GENERATE = 0
GENL_ID_CTRL = netlink.NLMSG_MIN_TYPE
GENL_ID_VFS_DQUOT = netlink.NLMSG_MIN_TYPE + 1
GENL_ID_PMCRAID = netlink.NLMSG_MIN_TYPE + 2
# Controller
# enum
CTRL_CMD_UNSPEC = 0
CTRL_CMD_NEWFAMILY = 1
CTRL_CMD_DELFAMILY = 2
CTRL_CMD_GETFAMILY = 3
CTRL_CMD_NEWOPS = 4
CTRL_CMD_DELOPS = 5
CTRL_CMD_GETOPS = 6
CTRL_CMD_NEWMCAST_GRP = 7
CTRL_CMD_DELMCAST_GRP = 8
CTRL_CMD_GETMCAST_GRP = 9
__CTRL_CMD_MAX = 10
CTRL_CMD_MAX = (__CTRL_CMD_MAX - 1)
# enum
CTRL_ATTR_UNSPEC = 0
CTRL_ATTR_FAMILY_ID = 1
CTRL_ATTR_FAMILY_NAME = 2
CTRL_ATTR_VERSION = 3
CTRL_ATTR_HDRSIZE = 4
CTRL_ATTR_MAXATTR = 5
CTRL_ATTR_OPS = 6
CTRL_ATTR_MCAST_GROUPS = 7
__CTRL_ATTR_MAX = 8
CTRL_ATTR_MAX = (__CTRL_ATTR_MAX - 1)
# enum
CTRL_ATTR_OP_UNSPEC = 0
CTRL_ATTR_OP_ID = 1
CTRL_ATTR_OP_FLAGS = 2
__CTRL_ATTR_OP_MAX = 3
CTRL_ATTR_OP_MAX = (__CTRL_ATTR_OP_MAX - 1)
# enum
CTRL_ATTR_MCAST_GRP_UNSPEC = 0
CTRL_ATTR_MCAST_GRP_NAME = 1
CTRL_ATTR_MCAST_GRP_ID = 2
__CTRL_ATTR_MCAST_GRP_MAX = 3
CTRL_ATTR_MCAST_GRP_MAX = (__CTRL_ATTR_MCAST_GRP_MAX - 1)
| lgpl-2.1 | -5,343,383,620,565,762,000 | 23.083333 | 64 | 0.672434 | false |
sagiss/txrm2nexus | txm2nexuslib/xrmnex.py | 1 | 44684 | #!/usr/bin/python
"""
(C) Copyright 2016-2017 Carlos Falcon, Zbigniew Reszela, Marc Rosanes
The program is distributed under the terms of the
GNU General Public License (or the Lesser GPL).
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from OleFileIO_PL import *
import numpy as np
import h5py
import sys
import struct
import datetime
import re
import pkg_resources
#import pprint
from tinydb import Query
from operator import itemgetter
from txm2nexuslib.parser import get_db, get_file_paths
SAMPLEENC = 2
DETECTORENC_Z = 23
ENERGY = 27
CURRENT = 28
ENERGYENC = 30
class FilesOrganization(object):
def __init__(self):
pass
def get_samples(self, txm_txt_script, use_existing_db=False,
use_subfolders=True, organize_by_repetitions=False):
"""Organize the files by samples"""
#prettyprinter = pprint.PrettyPrinter(indent=4)
if use_subfolders:
print("Using Subfolders for finding the files")
else:
print("Searching files through the whole root path")
root_path = os.path.dirname(os.path.abspath(txm_txt_script))
db = get_db(txm_txt_script, use_existing_db=use_existing_db)
all_file_records = db.all()
#prettyprinter.pprint(all_file_records)
dates_samples_energies = []
for record in all_file_records:
dates_samples_energies.append((record["date"],
record["sample"],
record["energy"]))
dates_samples_energies = list(set(dates_samples_energies))
samples = {}
files_query = Query()
for date_sample_energie in dates_samples_energies:
files_raw_data = {}
files_for_sample_subdict = {}
date = date_sample_energie[0]
sample = date_sample_energie[1]
energy = date_sample_energie[2]
query_impl = ((files_query.date == date) &
(files_query.sample == sample) &
(files_query.energy == energy) &
(files_query.FF == False))
records_by_sample_and_energy = db.search(query_impl)
if not organize_by_repetitions:
zps_by_sample_and_e = [record["zpz"] for record in
records_by_sample_and_energy]
zpz_positions_by_sample_e = sorted(set(zps_by_sample_and_e))
for zpz in zpz_positions_by_sample_e:
query_impl = ((files_query.date == date) &
(files_query.sample == sample) &
(files_query.energy == energy) &
(files_query.zpz == zpz) &
(files_query.FF == False))
fn_by_zpz_query = db.search(query_impl)
sorted_fn_by_zpz_query = sorted(fn_by_zpz_query,
key=itemgetter('angle'))
files = get_file_paths(sorted_fn_by_zpz_query, root_path,
use_subfolders=use_subfolders)
files_raw_data[zpz] = files
else:
repetitions_by_sample_and_e = [record["repetition"] for record
in records_by_sample_and_energy]
repetitions_by_sample_and_e = sorted(set(
repetitions_by_sample_and_e))
for repetition in repetitions_by_sample_and_e:
query_impl = ((files_query.date == date) &
(files_query.sample == sample) &
(files_query.energy == energy) &
(files_query.repetition == repetition) &
(files_query.FF == False))
fn_by_repetition_query = db.search(query_impl)
sorted_fn_by_repetition_query = sorted(
fn_by_repetition_query, key=itemgetter('angle'))
files = get_file_paths(sorted_fn_by_repetition_query,
root_path,
use_subfolders=use_subfolders)
files_raw_data[repetition] = files
# Get FF image records
fn_ff_query_by_energy = ((files_query.date == date) &
(files_query.sample == sample) &
(files_query.energy == energy) &
(files_query.FF == True))
query_output = db.search(fn_ff_query_by_energy)
files_FF = get_file_paths(query_output, root_path,
use_subfolders=use_subfolders)
files_for_sample_subdict['tomos'] = files_raw_data
files_for_sample_subdict['ff'] = files_FF
samples[date_sample_energie] = files_for_sample_subdict
#prettyprinter.pprint(samples)
return samples
class validate_getter(object):
def __init__(self, required_fields):
self.required_fields = required_fields
def __call__(self, method):
def wrapped_method(xradia_file):
if not xradia_file.is_opened():
raise RuntimeError("XradiaFile is not opened")
for field in self.required_fields:
if not xradia_file.exists(field):
raise RuntimeError(
"%s does not exist in XradiaFile" % field)
return method(xradia_file)
return wrapped_method
class XradiaFile(object):
def __init__(self, file_name):
self.file_name = file_name
self.file = None
self._axes_names = None
self._no_of_images = None
self._no_of_axes = None
self._energyenc_name = None
self._image_width = None
self._image_height = None
self._data_type = None
self._det_zero = None
self._pixel_size = None
self._dates = None
self._axes_positions = None
def __enter__(self):
self.open()
return self
def __exit__(self, type, value, traceback):
self.close()
def is_opened(self):
return self.file is not None
def open(self):
self.file = OleFileIO(self.file_name)
def close(self):
self.file.close()
def exists(self, field):
return self.file.exists(field)
@validate_getter(["SampleInfo/SampleID"])
def get_sample_id(self):
stream = self.file.openstream('SampleInfo/SampleID')
data = stream.read()
struct_fmt = '<' + '50s'
sample_id = struct.unpack(struct_fmt, data)
if sample_id != 'Unknown':
sample_id = sample_id[0]
return sample_id
@validate_getter(["ImageInfo/PixelSize"])
def get_pixel_size(self):
if self._pixel_size is None:
stream = self.file.openstream('ImageInfo/PixelSize')
data = stream.read()
struct_fmt = '<1f'
pixel_size = struct.unpack(struct_fmt, data)
self._pixel_size = pixel_size[0]
return self._pixel_size
pixel_size = property(get_pixel_size)
@validate_getter(["ImageInfo/XrayMagnification"])
def get_xray_magnification(self):
stream = self.file.openstream('ImageInfo/XrayMagnification')
data = stream.read(4)
struct_fmt = '<1f'
xray_magnification = struct.unpack(struct_fmt, data)
xray_magnification = xray_magnification[0]
if (xray_magnification != 0.0):
pass
elif (xray_magnification == 0.0 and self.pixel_size != 0.0):
# magnification in micrometers
xray_magnification = 13.0 / self.pixel_size
else:
print("Magnification could not be deduced.")
xray_magnification = 0.0
return xray_magnification
@validate_getter(["PositionInfo/MotorPositions"])
def get_axes_positions(self):
if self._axes_positions is None:
stream = self.file.openstream('PositionInfo/MotorPositions')
data = stream.read(112)
struct_fmt = '<28f'
self._axes_positions = struct.unpack(struct_fmt, data)
return self._axes_positions
axes_positions = property(get_axes_positions)
def get_sample_distance(self):
return self.axes_positions[SAMPLEENC]
sample_distance = property(get_sample_distance)
def get_detector_distance(self):
return self.axes_positions[DETECTORENC_Z] * 1000 # from mm to um
detector_distance = property(get_detector_distance)
def get_distance(self):
if (self.sampleenc_name == "Sample Z" and
self.detectorenc_name == "Detector Z"):
distance = (self.det_zero + self.detector_distance +
self.sample_distance)
return distance
@validate_getter(["ImageData1/Image1"])
def get_image(self):
stream = self.file.openstream('ImageData1/Image1')
data = stream.read()
if self.data_type == 'uint16':
struct_fmt = "<{0:10}H".format(
self.image_height * self.image_width)
imgdata = struct.unpack(struct_fmt, data)
elif self.data_type == 'float':
struct_fmt = "<{0:10}f".format(
self.image_height * self.image_width)
imgdata = struct.unpack(struct_fmt, data)
else:
print "Wrong data type"
return
image = np.flipud(np.reshape(imgdata, (self.image_height,
self.image_width), order='A'))
image = np.reshape(image, (1, self.image_height, self.image_width),
order='A')
return image
@validate_getter(["ImageData1/Image1"])
def get_image_2D(self):
stream = self.file.openstream('ImageData1/Image1')
data = stream.read()
if self.data_type == 'uint16':
struct_fmt = "<{0:10}H".format(
self.image_height * self.image_width)
imgdata = struct.unpack(struct_fmt, data)
elif self.data_type == 'float':
struct_fmt = "<{0:10}f".format(
self.image_height * self.image_width)
imgdata = struct.unpack(struct_fmt, data)
else:
print "Wrong data type"
return
image = np.flipud(np.reshape(imgdata, (self.image_height,
self.image_width), order='A'))
return image
@validate_getter(["PositionInfo/AxisNames"])
def get_axes_names(self):
if self._axes_names is None:
stream = self.file.openstream('PositionInfo/AxisNames')
data = stream.read()
lendatabytes = len(data)
formatstring = '<' + str(lendatabytes) + 'c'
struct_fmt = formatstring
axis_names_raw = struct.unpack(struct_fmt, data)
axis_names_raw = ''.join(axis_names_raw)
axis_names_raw = axis_names_raw.replace("\x00", " ")
self._axes_names = re.split('\s+\s+', axis_names_raw)
self._no_of_axes = len(self._axes_names) - 1
return self._axes_names
axes_names = property(get_axes_names)
def get_energyenc_name(self):
return self.axes_names[ENERGYENC]
energyenc_name = property(get_energyenc_name)
def get_energy_name(self):
return self.axes_names[ENERGY]
energy_name = property(get_energy_name)
def get_detectorenc_name(self):
return self.axes_names[DETECTORENC_Z]
detectorenc_name = property(get_detectorenc_name)
def get_sampleenc_name(self):
return self.axes_names[SAMPLEENC]
sampleenc_name = property(get_sampleenc_name)
def get_current_name(self):
return self.axes_names[CURRENT]
current_name = property(get_current_name)
def get_no_of_axes(self):
if self._no_of_axes is None:
self.get_axes_names()
return self._no_of_axes
no_of_axes = property(get_no_of_axes)
@validate_getter(["ImageInfo/NoOfImages"])
def get_no_of_images(self):
if self._no_of_images is None:
stream = self.file.openstream('ImageInfo/NoOfImages')
data = stream.read()
nimages = struct.unpack('<I', data)
self._no_of_images = np.int(nimages[0])
return self._no_of_images
no_of_images = property(get_no_of_images)
@validate_getter(["ImageInfo/ImageWidth"])
def get_image_width(self):
if self._image_width is None:
stream = self.file.openstream('ImageInfo/ImageWidth')
data = stream.read()
yimage = struct.unpack('<I', data)
self._image_width = np.int(yimage[0])
return self._image_width
image_width = property(get_image_width)
@validate_getter(["ImageInfo/ImageHeight"])
def get_image_height(self):
if self._image_height is None:
stream = self.file.openstream('ImageInfo/ImageHeight')
data = stream.read()
yimage = struct.unpack('<I', data)
self._image_height = np.int(yimage[0])
return self._image_height
image_height = property(get_image_height)
@validate_getter(["PositionInfo/MotorPositions"])
def get_machine_currents(self):
stream = self.file.openstream('PositionInfo/MotorPositions')
num_axes = len(self.axes_names) - 1
number_of_floats = num_axes * self.no_of_images
struct_fmt = '<' + str(number_of_floats) + 'f'
number_of_bytes = number_of_floats * 4 # 4 bytes every float
data = stream.read(number_of_bytes)
axis = struct.unpack(struct_fmt, data)
currents = self.no_of_images * [0]
for i in range(self.no_of_images):
currents[i] = axis[self.no_of_axes * i + CURRENT] # In mA
return currents
@validate_getter([])
def get_energies(self):
if (self.energyenc_name.lower() == "energyenc"):
if self.file.exists('PositionInfo/MotorPositions'):
stream = self.file.openstream('PositionInfo/MotorPositions')
number_of_floats = self.no_of_axes * self.no_of_images
struct_fmt = '<' + str(number_of_floats) + 'f'
number_of_bytes = number_of_floats * 4 # 4 bytes every float
data = stream.read(number_of_bytes)
axis = struct.unpack(struct_fmt, data)
energies = self.no_of_images * [0]
for i in range(self.no_of_images):
energies[i] = axis[self.no_of_axes * i + ENERGYENC] # In eV
# Energy for each image calculated from Energy motor ####
elif (self.energy_name == "Energy"):
if self.file.exists('PositionInfo/MotorPositions'):
stream = self.file.openstream('PositionInfo/MotorPositions')
number_of_floats = self.no_of_axes * self.no_of_images
struct_fmt = '<' + str(number_of_floats) + 'f'
number_of_bytes = number_of_floats * 4 # 4 bytes every float
data = stream.read(number_of_bytes)
axis = struct.unpack(struct_fmt, data)
energies = self.no_of_images * [0]
for i in range(self.no_of_images):
energies[i] = axis[self.no_of_axes * i + ENERGY] # In eV
# Energy for each image calculated from ImageInfo ####
elif self.file.exists('ImageInfo/Energy'):
stream = self.file.openstream('ImageInfo/Energy')
data = stream.read()
struct_fmt = "<{0:10}f".format(self.no_of_images)
try: # we found some txrm images (flatfields) with different encoding of data
energies = struct.unpack(struct_fmt, data)
except struct.error:
print >> sys.stderr, 'Unexpected data length (%i bytes). Trying to unpack energies with: "f"+"36xf"*(nSampleFrames-1)' % len(
data)
struct_fmt = '<' + "f" + "36xf" * (self.no_of_images - 1)
energies = struct.unpack(struct_fmt, data)
else:
raise RuntimeError("There is no information about the energies at"
"which have been taken the different images.")
return energies
@validate_getter(["ImageInfo/ExpTimes"])
def get_exp_times(self):
stream = self.file.openstream('ImageInfo/ExpTimes')
data = stream.read()
struct_fmt = "<{0:10}f".format(self.no_of_images)
try: # we found some txrm images (flatfields) with different encoding of data
exp_times = struct.unpack(struct_fmt, data)
except struct.error:
print >> sys.stderr, 'Unexpected data length (%i bytes). Trying to unpack exposure times with: "f"+"36xf"*(nSampleFrames-1)' % len(
data)
struct_fmt = '<' + "f" + "36xf" * (self.no_of_images - 1)
exp_times = struct.unpack(struct_fmt, data)
return exp_times
@validate_getter(['ImageInfo/Angles'])
def get_angles(self):
stream = self.file.openstream('ImageInfo/Angles')
data = stream.read()
struct_fmt = '<{0:10}f'.format(self.no_of_images)
angles = struct.unpack(struct_fmt, data)
return angles
@validate_getter(['ImageInfo/XPosition'])
def get_x_positions(self):
stream = self.file.openstream('ImageInfo/XPosition')
data = stream.read()
struct_fmt = "<{0:10}f".format(self.no_of_images)
# Found some txrm images with different encoding of data #
try:
positions = struct.unpack(struct_fmt, data)
except struct.error:
print >> sys.stderr, 'Unexpected data length (%i bytes). Trying to unpack XPositions with: "f"+"36xf"*(nSampleFrames-1)' % len(
data)
struct_fmt = '<' + "f" + "36xf" * (self.no_of_images - 1)
positions = struct.unpack(struct_fmt, data)
return positions
@validate_getter(['ImageInfo/YPosition'])
def get_y_positions(self):
stream = self.file.openstream('ImageInfo/YPosition')
data = stream.read()
struct_fmt = "<{0:10}f".format(self.no_of_images)
# Found some txrm images with different encoding of data #
try:
positions = struct.unpack(struct_fmt, data)
except struct.error:
print >> sys.stderr, 'Unexpected data length (%i bytes). Trying to unpack YPositions with: "f"+"36xf"*(nSampleFrames-1)' % len(
data)
struct_fmt = '<' + "f" + "36xf" * (self.no_of_images - 1)
positions = struct.unpack(struct_fmt, data)
return positions
@validate_getter(['ImageInfo/ZPosition'])
def get_z_positions(self):
stream = self.file.openstream('ImageInfo/ZPosition')
data = stream.read()
struct_fmt = "<{0:10}f".format(self.no_of_images)
# Found some txrm images with different encoding of data #
try:
positions = struct.unpack(struct_fmt, data)
except struct.error:
print >> sys.stderr, 'Unexpected data length (%i bytes). Trying to unpack ZPositions with: "f"+"36xf"*(nSampleFrames-1)' % len(
data)
struct_fmt = '<' + "f" + "36xf" * (self.no_of_images - 1)
positions = struct.unpack(struct_fmt, data)
return positions
@validate_getter(["ImageInfo/DataType"])
def get_data_type(self):
if self._data_type is None:
stream = self.file.openstream('ImageInfo/DataType')
data = stream.read()
struct_fmt = '<1I'
datatype = struct.unpack(struct_fmt, data)
datatype = int(datatype[0])
if datatype == 5:
self._data_type = 'uint16'
else:
self._data_type = 'float'
return self._data_type
data_type = property(get_data_type)
@validate_getter(["ImageInfo/Date"])
def get_single_date(self):
stream = self.file.openstream('ImageInfo/Date')
data = stream.read()
date = struct.unpack('<' + '17s23x', data)[0]
[day, hour] = date.split(" ")
[month, day, year] = day.split("/")
[hour, minute, second] = hour.split(":")
year = '20' + year
year = int(year)
month = int(month)
day = int(day)
hour = int(hour)
minute = int(minute)
second = int(second)
raw_time = datetime.datetime(year, month, day,
hour, minute, second)
time_iso = raw_time.isoformat()
return time_iso
@validate_getter(["ImageInfo/Date"])
def get_dates(self):
if self._dates is None:
stream = self.file.openstream('ImageInfo/Date')
data = stream.read()
self._dates = struct.unpack('<' + '17s23x' * self.no_of_images,
data)
return self._dates
dates = property(get_dates)
def get_start_date(self):
startdate = self.dates[0]
[day, hour] = startdate.split(" ")
[month, day, year] = day.split("/")
[hour, minute, second] = hour.split(":")
year = '20' + year
year = int(year)
month = int(month)
day = int(day)
hour = int(hour)
minute = int(minute)
second = int(second)
starttime = datetime.datetime(year, month, day,
hour, minute, second)
starttimeiso = starttime.isoformat()
return starttimeiso
def get_end_date(self):
enddate = self.dates[self.no_of_images - 1]
[endday, endhour] = enddate.split(" ")
[endmonth, endday, endyear] = endday.split("/")
[endhour, endminute, endsecond] = endhour.split(":")
endyear = '20' + endyear
endyear = int(endyear)
endmonth = int(endmonth)
endday = int(endday)
endhour = int(endhour)
endminute = int(endminute)
endsecond = int(endsecond)
endtime = datetime.datetime(endyear, endmonth, endday,
endhour, endminute, endsecond)
endtimeiso = endtime.isoformat()
return endtimeiso
def get_det_zero(self):
where_detzero = ("ConfigureBackup/ConfigCamera/" +
"Camera 1/ConfigZonePlates/DetZero")
if self._det_zero is None and self.file.exists(where_detzero):
stream = self.file.openstream("ConfigureBackup/ConfigCamera/" +
"Camera 1/ConfigZonePlates/DetZero")
data = stream.read()
if len(data) != 0:
struct_fmt = '<1f'
sample_to_detector_zero_enc = struct.unpack(struct_fmt, data)
self._det_zero = sample_to_detector_zero_enc[0]
else:
self._det_zero = 0
else:
self._det_zero = 0
return self._det_zero
det_zero = property(get_det_zero)
class xrmNXtomo(object):
definition = 'NXtomo'
# CCD detector pixelsize in micrometers
CCDdetector_pixelsize = 13
CCDdetector_pixelsize_unit = 'um'
def __init__(self, reader, ffreader, file_order, program_name,
hdf5_output_path=None, title='X-ray tomography',
zero_deg_in=None, zero_deg_final=None, sourcename='ALBA',
sourcetype='Synchrotron X-ray Source',
sourceprobe='x-ray', instrument='BL09 @ ALBA',
sample='Unknown'):
self.reader = reader
self.ff_reader = ffreader
if hdf5_output_path is None:
path = reader.get_sample_path()
else:
path = hdf5_output_path
sample_name = reader.get_sample_name()
splitted_file = sample_name.split('_')
sample_dir_name = '{0}_{1}'.format(splitted_file[0], splitted_file[1])
path = os.path.join(path, sample_dir_name)
if not os.path.exists(path):
os.makedirs(path)
self.hdf5_file_name = os.path.join(path, "%s.hdf5" % sample_name)
self.txrmhdf = h5py.File(self.hdf5_file_name, 'w')
self.filename_zerodeg_in = zero_deg_in
self.filename_zerodeg_final = zero_deg_final
self.nxentry = None
self.nxsample = None
self.nxmonitor = None
self.nxinstrument = None
self.nxdata = None
self.nxdetectorsample = None
self.nxsource = None
self.count_num_sequence = 0
self.num_sample_sequence = []
self.num_bright_sequence = []
self.num_dark_sequence = []
self.program_name = program_name
version = pkg_resources.get_distribution("txrm2nexus").version
self.program_version = version
self.title = title
self.sourcename = sourcename
self.sourcetype = sourcetype
self.sourceprobe = sourceprobe
self.instrument = instrument
self.sample = sample
self.file_order = list(file_order)
self.datatype_zerodeg = 'uint16'
self.numrows_zerodeg = 0
self.numcols_zerodeg = 0
self.filename_zerodeg_in = zero_deg_in
self.filename_zerodeg_final = zero_deg_final
self.numrows = 0
self.numcols = 0
self.nSampleFrames = 0
self.datatype = None
self.numrows_bright = 0
self.numcols_bright = 0
self.nFramesBright = 0
self.datatype_bright = 'uint16'
def convert_metadata(self):
self.nxentry = self.txrmhdf.create_group(self.definition)
self.nxentry.attrs['NX_class'] = "NXentry"
self.nxentry.create_dataset("title", data=self.title)
self.nxentry.create_dataset("definition", data=self.definition)
self.nxinstrument = self.nxentry.create_group("instrument")
self.nxsample = self.nxentry.create_group("sample")
self.nxmonitor = self.nxentry.create_group("control")
self.nxdata = self.nxentry.create_group("data")
self.nxmonitor.attrs['NX_class'] = "NXmonitor"
self.nxsample.attrs['NX_class'] = "NXsample"
self.nxdata.attrs['NX_class'] = "NXdata"
self.nxinstrument.attrs['NX_class'] = "NXinstrument"
self.nxinstrument['name'] = self.instrument
pixel_size = "%d %s" % (self.CCDdetector_pixelsize,
self.CCDdetector_pixelsize_unit)
self.nxinstrument['name'].attrs['CCD pixel size'] = pixel_size
self.nxsource= self.nxinstrument.create_group("source")
self.nxdetectorsample = self.nxinstrument.create_group("sample")
self.nxsource.attrs['NX_class'] = "NXsource"
self.nxdetectorsample.attrs['NX_class'] = "NXdetector"
self.nxinstrument['source']['name'] = self.sourcename
self.nxinstrument['source']['type'] = self.sourcetype
self.nxinstrument['source']['probe'] = self.sourceprobe
self.nxentry['program_name'] = self.program_name
self.nxentry['program_name'].attrs['version'] = self.program_version
self.nxentry['program_name'].attrs['configuration'] = \
(self.program_name + ' ' + ' '.join(sys.argv[1:]))
# Sample-ID
sample_name = self.reader.get_sample_name()
self.nxsample['name'] = sample_name
distance = self.reader.get_distance()
self.nxdetectorsample.create_dataset("distance", data=distance)
self.nxdetectorsample["distance"].attrs["units"] = "um"
# Pixel-size
pixel_size = self.reader.get_pixel_size()
self.nxdetectorsample.create_dataset("x_pixel_size",
data=pixel_size)
self.nxdetectorsample.create_dataset("y_pixel_size",
data=pixel_size)
self.nxdetectorsample["x_pixel_size"].attrs["units"] = "um"
self.nxdetectorsample["y_pixel_size"].attrs["units"] = "um"
# X-Ray Magnification
magnification = self.reader.get_xray_magnification()
self.nxdetectorsample['magnification'] = magnification
# Accelerator current for each image (machine current)
currents = self.reader.get_machine_currents()
self.nxdetectorsample['current'] = currents
self.nxdetectorsample['current'].attrs["units"] = "mA"
# Energy for each image:
energies = self.reader.get_energies()
self.nxsource["energy"] = energies
self.nxsource["energy"].attrs["units"] = "eV"
# Exposure Times
exptimes = self.reader.get_exp_times()
self.nxdetectorsample["ExpTimes"] = exptimes
self.nxdetectorsample["ExpTimes"].attrs["units"] = "s"
# Start and End Times
starttimeiso = self.reader.get_start_time()
self.nxentry['start_time'] = str(starttimeiso)
endtimeiso = self.reader.get_end_time()
self.nxentry['end_time'] = str(endtimeiso)
# Sample rotation angles
angles = self.reader.get_angles()
self.nxsample['rotation_angle'] = angles
self.nxsample["rotation_angle"].attrs["units"] = "degrees"
# h5py NeXus link
source_addr = '/NXtomo/sample/rotation_angle'
target_addr = 'rotation_angle'
self.nxsample['rotation_angle'].attrs['target'] = source_addr
self.nxdata._id.link(source_addr, target_addr, h5py.h5g.LINK_HARD)
# X sample translation: nxsample['z_translation']
xpositions = self.reader.get_x_positions()
self.nxsample['x_translation'] = xpositions
self.nxsample['x_translation'].attrs['units'] = 'um'
# Y sample translation: nxsample['z_translation']
ypositions = self.reader.get_y_positions()
self.nxsample['y_translation'] = ypositions
self.nxsample['y_translation'].attrs['units'] = 'um'
# Z sample translation: nxsample['z_translation']
zpositions = self.reader.get_z_positions()
self.nxsample['z_translation'] = zpositions
self.nxsample['z_translation'].attrs['units'] = 'um'
def _convert_samples(self):
self.numrows, self.numcols = self.reader.get_image_size()
data_type = self.reader.get_data_type()
self.nSampleFrames = self.reader.get_images_number()
if data_type == 'float':
self.datatype = 'float32'
else:
self.datatype = data_type
self.nxdetectorsample.create_dataset(
"data",
shape=(self.nSampleFrames,
self.numrows,
self.numcols),
chunks=(1,
self.numrows,
self.numcols),
dtype=self.datatype)
self.nxdetectorsample['data'].attrs[
'Data Type'] = self.datatype
self.nxdetectorsample[
'data'].attrs['Number of Frames'] = self.nSampleFrames
self.nxdetectorsample['data'].attrs[
'Image Height'] = self.numrows
self.nxdetectorsample['data'].attrs[
'Image Width'] = self.numcols
for numimage in range(self.nSampleFrames):
self.count_num_sequence = self.count_num_sequence + 1
tomoimagesingle = self.reader.get_image(numimage)
self.num_sample_sequence.append(
self.count_num_sequence)
self.nxdetectorsample['data'][numimage] = tomoimagesingle
if numimage % 20 == 0:
print('Image %i converted' % numimage)
if numimage + 1 == self.nSampleFrames:
print ('%i images converted\n' % self.nSampleFrames)
# h5py NeXus link
source_addr = '/NXtomo/instrument/sample/data'
target_addr = 'data'
self.nxdetectorsample['data'].attrs[
'target'] = source_addr
self.nxdata._id.link(source_addr, target_addr,
h5py.h5g.LINK_HARD)
def _convert_bright(self):
self.datatype_bright = self.ff_reader.get_data_type()
self.numrows_bright, self.numcols_bright = \
self.ff_reader.get_image_size()
self.nFramesBright = self.ff_reader.get_images_number()
self.nxbright = self.nxinstrument.create_group("bright_field")
self.nxbright.attrs['NX_class'] = "Unknown"
self.nxbright.create_dataset(
"data",
shape=(self.nFramesBright,
self.numrows_bright,
self.numcols_bright),
chunks=(1,
self.numrows_bright,
self.numcols_bright),
dtype=self.datatype_bright)
self.nxbright['data'].attrs['Data Type'] = \
self.datatype_bright
self.nxbright['data'].attrs['Image Height'] = \
self.numrows_bright
self.nxbright['data'].attrs['Image Width'] = \
self.numcols_bright
for numimage in range(self.nFramesBright):
if numimage + 1 == self.nFramesBright:
print ('%i Bright-Field images '
'converted\n' % self.nFramesBright)
self.count_num_sequence = self.count_num_sequence + 1
tomoimagesingle = self.ff_reader.get_image(numimage)
self.num_bright_sequence.append(self.count_num_sequence)
self.nxbright['data'][numimage] = tomoimagesingle
# Accelerator current for each image of FF (machine current)
ff_currents = self.ff_reader.get_machine_currents()
self.nxbright.create_dataset("current", data=ff_currents)
self.nxbright["current"].attrs["units"] = "mA"
# Exposure Times
exp_times = self.ff_reader.get_exp_times()
self.nxbright.create_dataset("ExpTimes", data=exp_times)
self.nxbright["ExpTimes"].attrs["units"] = "s"
def _convert_zero_deg_images(self, ole_zerodeg):
verbose = False
# DataType: 10 float; 5 uint16 (unsigned 16-bit (2-byte) integers)
if ole_zerodeg.exists('ImageInfo/DataType'):
stream = ole_zerodeg.openstream('ImageInfo/DataType')
data = stream.read()
struct_fmt = '<1I'
datatype_zerodeg = struct.unpack(struct_fmt, data)
datatype_zerodeg = int(datatype_zerodeg[0])
if datatype_zerodeg == 5:
self.datatype_zerodeg = 'uint16'
else:
self.datatype_zerodeg = 'float'
if verbose:
print "ImageInfo/DataType: %s " % self.datatype_zerodeg
else:
print("There is no information about DataType")
# Zero degrees data size
if (ole_zerodeg.exists('ImageInfo/NoOfImages') and
ole_zerodeg.exists('ImageInfo/ImageWidth') and
ole_zerodeg.exists('ImageInfo/ImageHeight')):
stream = ole_zerodeg.openstream('ImageInfo/ImageHeight')
data = stream.read()
yimage = struct.unpack('<I', data)
self.numrows_zerodeg = np.int(yimage[0])
if verbose:
print "ImageInfo/ImageHeight = %i" % yimage[0]
stream = ole_zerodeg.openstream('ImageInfo/ImageWidth')
data = stream.read()
ximage = struct.unpack('<I', data)
self.numcols_zerodeg = np.int(ximage[0])
if verbose:
print "ImageInfo/ImageWidth = %i" % ximage[0]
else:
print('There is no information about the 0 degrees image size '
'(ImageHeight, or about ImageWidth)')
if ole_zerodeg.exists('ImageData1/Image1'):
img_string = "ImageData1/Image1"
stream = ole_zerodeg.openstream(img_string)
data = stream.read()
if self.datatype == 'uint16':
struct_fmt = "<{0:10}H".format(self.numrows_zerodeg *
self.numcols_zerodeg)
imgdata = struct.unpack(struct_fmt, data)
elif self.datatype == 'float':
struct_fmt = "<{0:10}f".format(self.numrows_zerodeg *
self.numcols_zerodeg)
imgdata = struct.unpack(struct_fmt, data)
else:
print "Wrong data type"
imgdata_zerodeg = np.flipud(np.reshape(imgdata,
(self.numrows,
self.numcols),
order='A'))
else:
imgdata_zerodeg = 0
return imgdata_zerodeg
def convert_tomography(self):
# TODO: 0 degree images not implemented in xrm2nexs
if self.filename_zerodeg_in is not None:
ole_zerodeg_in = OleFileIO(self.filename_zerodeg_in)
image_zerodeg_in = self._convert_zero_deg_images(ole_zerodeg_in)
self.nxdetectorsample.create_dataset(
'0_degrees_initial_image',
data=image_zerodeg_in,
dtype=self.datatype_zerodeg)
self.nxdetectorsample['0_degrees_initial_image'].attrs[
'Data Type'] = self.datatype_zerodeg
self.nxdetectorsample['0_degrees_initial_image'].attrs[
'Image Height'] = self.numrows_zerodeg
self.nxdetectorsample['0_degrees_initial_image'].attrs[
'Image Width'] = self.numcols_zerodeg
print('Zero degrees initial image converted')
if self.filename_zerodeg_final is not None:
ole_zerodeg_final = OleFileIO(self.filename_zerodeg_final)
image_zerodeg_final = self._convert_zero_deg_images(
ole_zerodeg_final)
self.nxdetectorsample.create_dataset(
'0_degrees_final_image',
data=image_zerodeg_final,
dtype=self.datatype_zerodeg)
self.nxdetectorsample['0_degrees_final_image'].attrs[
'Data Type'] = self.datatype_zerodeg
self.nxdetectorsample['0_degrees_final_image'].attrs[
'Image Height'] = self.numrows_zerodeg
self.nxdetectorsample['0_degrees_final_image'].attrs[
'Image Width'] = self.numcols_zerodeg
print('Zero degrees final image converted')
print("\nConverting tomography image data from xrm(s) to NeXus HDF5.")
brightexists = False
darkexists = False
for file in self.file_order:
# Tomography Data Images
if file == 's':
self._convert_samples()
# Bright-Field
elif file == 'b':
brightexists = True
self._convert_bright()
# Post-Dark-Field
elif file == 'd':
darkexists = True
# TODO
pass
self.nxinstrument['sample']['sequence_number'] = \
self.num_sample_sequence
if brightexists:
self.nxinstrument['bright_field']['sequence_number'] = \
self.num_bright_sequence
if darkexists:
self.nxinstrument['dark_field']['sequence_number'] = \
self.num_dark_sequence
# NXMonitor data: Not used in TXM microscope.
# In the ALBA-BL09 case all the values will be set to 1.
monitor_size = self.nSampleFrames + self.nFramesBright
monitor_counts = np.ones(monitor_size, dtype=np.uint16)
self.nxmonitor['data'] = monitor_counts
# Flush and close the nexus file
self.txrmhdf.flush()
self.txrmhdf.close()
class xrmReader(object):
def __init__(self, file_names):
self.file_names = file_names
def get_images_number(self):
return len(self.file_names)
def get_pixel_size(self):
file_name = self.file_names[0]
with XradiaFile(file_name) as xrm_file:
return xrm_file.pixel_size
def get_exp_times(self):
exp_times = []
for file_name in self.file_names:
with XradiaFile(file_name) as xrm_file:
exp_times.extend(xrm_file.get_exp_times())
return exp_times
def get_machine_currents(self):
currents = []
for file_name in self.file_names:
with XradiaFile(file_name) as xrm_file:
currents.extend(xrm_file.get_machine_currents())
return currents
def get_energies(self):
energies = []
for file_name in self.file_names:
with XradiaFile(file_name) as xrm_file:
energies.extend(xrm_file.get_energies())
return energies
def get_start_time(self):
filename = self.file_names[0]
with XradiaFile(filename) as xrm_file:
return xrm_file.get_start_date()
def get_end_time(self):
filename = self.file_names[-1]
with XradiaFile(filename) as xrm_file:
return xrm_file.get_end_date()
def get_angles(self):
angles = []
for file_name in self.file_names:
with XradiaFile(file_name) as xrm_file:
angles.extend(xrm_file.get_angles())
return angles
def get_x_positions(self):
positions = []
for file_name in self.file_names:
with XradiaFile(file_name) as xrm_file:
positions.extend(xrm_file.get_x_positions())
return positions
def get_y_positions(self):
positions = []
for file_name in self.file_names:
with XradiaFile(file_name) as xrm_file:
positions.extend(xrm_file.get_y_positions())
return positions
def get_z_positions(self):
positions = []
for file_name in self.file_names:
with XradiaFile(file_name) as xrm_file:
positions.extend(xrm_file.get_z_positions())
return positions
def get_image(self, id):
"""
:param id: number of the images sequence
:return: image data
"""
filename = self.file_names[id]
with XradiaFile(filename) as xrm_file:
return xrm_file.get_image()
def get_distance(self):
filename = self.file_names[0]
# TODO: get the data from the first file
with XradiaFile(filename) as xrm_file:
return xrm_file.get_distance()
def get_sample_id(self):
filename = self.file_names[0]
# TODO: get the data from the first file
with XradiaFile(filename) as xrm_file:
return xrm_file.get_sample_id()
def get_xray_magnification(self):
filename = self.file_names[0]
# TODO: get the data from the first file
with XradiaFile(filename) as xrm_file:
return xrm_file.get_xray_magnification()
def get_data_type(self):
filename = self.file_names[0]
# TODO: get the data from the first file
with XradiaFile(filename) as xrm_file:
return xrm_file.data_type
def get_image_size(self):
filename = self.file_names[0]
# TODO: get the data from the first file
with XradiaFile(filename) as xrm_file:
return xrm_file.image_height, xrm_file.image_width
def get_sample_name(self):
filename = self.file_names[0]
file = filename.rsplit('/', 1)[1]
splitted_file = file.split('_')
tomo_name = splitted_file[1]
energy = splitted_file[2]
pos_ext = splitted_file[-1].find('.xrm')
conf = splitted_file[-1][:pos_ext]
return '{0}_{1}_{2}_{3}'.format(splitted_file[0],
tomo_name,
energy,
conf)
def get_sample_path(self):
filename = self.file_names[0]
path = filename.rsplit('/', 1)[0]
return path
| gpl-3.0 | -3,555,758,332,532,904,400 | 37.720971 | 143 | 0.567362 | false |
Teknologforeningen/tf-info | apps/reittiopas/views.py | 1 | 2286 | from django.shortcuts import render_to_response
from django.template import RequestContext
from django.http import HttpResponse
from django.conf import settings
from operator import itemgetter
from datetime import datetime, timedelta
import json
import urllib2
import re
# Get API user and token from settings
user = settings.REITTIOPAS_USER
token = settings.REITTIOPAS_TOKEN
stops = settings.REITTIOPAS_STOPS
def index(request):
all_departures = []
for stop in stops:
try:
response = urllib2.urlopen("http://api.reittiopas.fi/hsl/prod/?user=%s&pass=%s&request=stop&code=%s"%(user,token,stop))
except:
return HttpResponse("Unable to access reittiopas API.", status=500)
try:
stop_departures = json.load(response)[0]
except ValueError as e:
return HttpResponse("Error parsing json from reittiopas", status=500)
# Parse line destinations from codes
lines_dict = {}
for item in stop_departures['lines']:
parts = item.split(':')
lines_dict[parts[0]] = parts[1]
# Parse departures
departures = []
for departure in stop_departures['departures']:
# Convert code to actual line number
departure['line'] = re.sub(r'^\d0*(\d?\w*) .*', r'\1',departure['code'])
departure['stop'] = stop_departures['name_fi']
# Add destination name to departure item
departure['dest'] = lines_dict[departure['code']]
# Create datetime object to sort departures by
if departure['time'] >= 2400:
departure['time'] = departure['time']-2400
dt = datetime.strptime('%d%d'%(departure['date'], departure['time']), "%Y%m%d%H%M")
departure['datetime'] = dt + timedelta(days=1)
else:
departure['datetime'] = datetime.strptime('%d%d'%(departure['date'], departure['time']), "%Y%m%d%H%M")
departures.append(departure)
all_departures = all_departures + departures
sorted_departures = sorted(all_departures, key=itemgetter('datetime'))[:10]
return render_to_response('reittiopas/index.html', {"departures": sorted_departures}, context_instance=RequestContext(request)) | bsd-3-clause | -5,200,406,112,815,346,000 | 36.491803 | 131 | 0.631234 | false |
quantumlib/Cirq | cirq-core/cirq/ops/diagonal_gate.py | 1 | 8669 | # Copyright 2021 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Creates the gate instance for any number qubits diagonal gate.
The gate is used to create a (2^n)x(2^n) matrix with the diagonal elements
passed as a list.
"""
from typing import AbstractSet, Any, Iterator, List, Optional, Sequence, Tuple, TYPE_CHECKING, Union
import numpy as np
import sympy
from cirq import protocols, value
from cirq._compat import proper_repr
from cirq.ops import common_gates, raw_types, global_phase_op
if TYPE_CHECKING:
import cirq
def _fast_walsh_hadamard_transform(a: Tuple[Any, ...]) -> np.ndarray:
"""Fast Walsh–Hadamard Transform of an array."""
h = 1
a_ = np.array(a)
while h < len(a_):
for i in range(0, len(a_), h * 2):
for j in range(i, i + h):
x = a_[j]
y = a_[j + h]
a_[j] = x + y
a_[j + h] = x - y
h *= 2
return a_
def _gen_gray_code(n: int) -> Iterator[Tuple[int, int]]:
"""Generate the Gray Code from 0 to 2^n-1.
Each iteration yields a two-tuple, `(gray_code, bit_flip)`. `gray_code` is the decimal
representation of the gray code and `bit_flip` is the position of bits flipped for next
gray code.
"""
gray_code = 0
for i in range(1, 2 ** n):
next_gray = i ^ (i >> 1)
bit_flip = int(np.log2(gray_code ^ next_gray))
yield gray_code, bit_flip
gray_code = next_gray
yield gray_code, int(np.log2(gray_code))
@value.value_equality()
class DiagonalGate(raw_types.Gate):
"""A gate given by a diagonal (2^n)\\times(2^n) matrix."""
def __init__(self, diag_angles_radians: Sequence[value.TParamVal]) -> None:
r"""A n-qubit gate with only diagonal elements.
This gate's off-diagonal elements are zero and it's on diagonal
elements are all phases.
Args:
diag_angles_radians: The list of angles on the diagonal in radians.
If these values are $(x_0, x_1, \ldots , x_N)$ then the unitary
has diagonal values $(e^{i x_0}, e^{i x_1}, \ldots, e^{i x_N})$.
"""
self._diag_angles_radians: Tuple[value.TParamVal, ...] = tuple(diag_angles_radians)
def _num_qubits_(self):
return int(np.log2(len(self._diag_angles_radians)))
def _is_parameterized_(self) -> bool:
return any(protocols.is_parameterized(angle) for angle in self._diag_angles_radians)
def _parameter_names_(self) -> AbstractSet[str]:
return {
name for angle in self._diag_angles_radians for name in protocols.parameter_names(angle)
}
def _resolve_parameters_(
self, resolver: 'cirq.ParamResolver', recursive: bool
) -> 'DiagonalGate':
return DiagonalGate(
protocols.resolve_parameters(self._diag_angles_radians, resolver, recursive)
)
def _has_unitary_(self) -> bool:
return not self._is_parameterized_()
def _unitary_(self) -> Optional[np.ndarray]:
if self._is_parameterized_():
return None
return np.diag([np.exp(1j * angle) for angle in self._diag_angles_radians])
def _apply_unitary_(self, args: 'protocols.ApplyUnitaryArgs') -> np.ndarray:
if self._is_parameterized_():
return NotImplemented
for index, angle in enumerate(self._diag_angles_radians):
subspace_index = args.subspace_index(big_endian_bits_int=index)
args.target_tensor[subspace_index] *= np.exp(1j * angle)
return args.target_tensor
def _circuit_diagram_info_(
self, args: 'cirq.CircuitDiagramInfoArgs'
) -> 'cirq.CircuitDiagramInfo':
rounded_angles = np.array(self._diag_angles_radians)
if args.precision is not None:
rounded_angles = rounded_angles.round(args.precision)
if len(rounded_angles) <= 4:
rounded_angles_str = ', '.join(proper_repr(angle) for angle in rounded_angles)
diag_str = f'diag({rounded_angles_str})'
else:
diag_str = ', '.join(proper_repr(angle) for angle in rounded_angles[:2])
diag_str += ', ..., '
diag_str += ', '.join(proper_repr(angle) for angle in rounded_angles[-2:])
diag_str = f'diag({diag_str})'
return protocols.CircuitDiagramInfo(
[diag_str] + ['#' + str(i) for i in range(2, self._num_qubits_() + 1)]
)
def __pow__(self, exponent: Any) -> 'DiagonalGate':
if not isinstance(exponent, (int, float, sympy.Basic)):
return NotImplemented
angles = []
for angle in self._diag_angles_radians:
mul_angle = protocols.mul(angle, exponent, NotImplemented)
angles.append(mul_angle)
return DiagonalGate(angles)
def _value_equality_values_(self) -> Any:
return tuple(self._diag_angles_radians)
def _decompose_for_basis(
self, index: int, bit_flip: int, theta: float, qubits: Sequence['cirq.Qid']
) -> Iterator[Union['cirq.ZPowGate', 'cirq.CXPowGate']]:
if index == 0:
return []
largest_digit = self._num_qubits_() - (len(bin(index)) - 2)
yield common_gates.rz(2 * theta)(qubits[largest_digit])
_flip_bit = self._num_qubits_() - bit_flip - 1
if _flip_bit < largest_digit:
yield common_gates.CNOT(qubits[largest_digit], qubits[_flip_bit])
elif _flip_bit > largest_digit:
yield common_gates.CNOT(qubits[_flip_bit], qubits[largest_digit])
def _decompose_(self, qubits: Sequence['cirq.Qid']) -> 'cirq.OP_TREE':
"""Decompose the n-qubit diagonal gates into CNOT and Rz gates.
A 3 qubits decomposition looks like
0: ───────────────────────────────────X───Rz(6)───X───Rz(7)───X───Rz(5)───X───Rz(4)───
│ │ │ │
1: ───────────X───Rz(3)───X───Rz(2)───@───────────┼───────────@───────────┼───────────
│ │ │ │
2: ───Rz(1)───@───────────@───────────────────────@───────────────────────@───────────
where the angles in Rz gates are corresponding to the fast-walsh-Hadamard transfrom
of diagonal_angles in the Gray Code order.
For n qubits decomposition looks similar but with 2^n-1 Rz gates and 2^n-2 CNOT gates.
The algorithm is implemented according to the paper:
Welch, Jonathan, et al. "Efficient quantum circuits for diagonal unitaries without
ancillas." New Journal of Physics 16.3 (2014): 033040.
https://iopscience.iop.org/article/10.1088/1367-2630/16/3/033040/meta
"""
if protocols.is_parameterized(self):
return NotImplemented
n = self._num_qubits_()
hat_angles = _fast_walsh_hadamard_transform(self._diag_angles_radians) / (2 ** n)
# There is one global phase shift between unitary matrix of the diagonal gate and the
# decomposed gates. On its own it is not physically observable. However, if using this
# diagonal gate for sub-system like controlled gate, it is no longer equivalent. Hence,
# we add global phase.
decomposed_circ: List[Any] = [
global_phase_op.GlobalPhaseOperation(np.exp(1j * hat_angles[0]))
]
for i, bit_flip in _gen_gray_code(n):
decomposed_circ.extend(self._decompose_for_basis(i, bit_flip, -hat_angles[i], qubits))
return decomposed_circ
def __repr__(self) -> str:
return 'cirq.DiagonalGate([{}])'.format(
','.join(proper_repr(angle) for angle in self._diag_angles_radians)
)
| apache-2.0 | 8,599,835,669,843,185,000 | 40.442211 | 100 | 0.5927 | false |
chromium/chromium | third_party/android_deps/libs/com_google_android_gms_play_services_basement/3pp/fetch.py | 6 | 1389 | #!/usr/bin/env python
# Copyright 2021 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This is generated, do not edit. Update BuildConfigGenerator.groovy and
# 3ppFetch.template instead.
from __future__ import print_function
import argparse
import json
import os
_FILE_URL = 'https://maven.google.com/com/google/android/gms/play-services-basement/17.5.0/play-services-basement-17.5.0.aar'
_FILE_NAME = 'play-services-basement-17.5.0.aar'
_FILE_VERSION = '17.5.0'
def do_latest():
print(_FILE_VERSION)
def get_download_url(version):
if _FILE_URL.endswith('.jar'):
ext = '.jar'
elif _FILE_URL.endswith('.aar'):
ext = '.aar'
else:
raise Exception('Unsupported extension for %s' % _FILE_URL)
partial_manifest = {
'url': [_FILE_URL],
'name': [_FILE_NAME],
'ext': ext,
}
print(json.dumps(partial_manifest))
def main():
ap = argparse.ArgumentParser()
sub = ap.add_subparsers()
latest = sub.add_parser("latest")
latest.set_defaults(func=lambda _opts: do_latest())
download = sub.add_parser("get_url")
download.set_defaults(
func=lambda _opts: get_download_url(os.environ['_3PP_VERSION']))
opts = ap.parse_args()
opts.func(opts)
if __name__ == '__main__':
main()
| bsd-3-clause | -1,260,905,872,956,646,100 | 23.803571 | 125 | 0.647948 | false |
splunk/splunk-webframework | server/splunkdj/testlib.py | 1 | 1436 | #
#
# Licensed under the Apache License, Version 2.0 (the "License"): you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Helper functions for wrinting unit tests for apps built using the
Splunk Django Bindings."""
from os import path
import sys
__all__ = ["loadrc"]
# Print the given message to stderr, and optionally exit
def error(message, exitcode = None):
print >> sys.stderr, "Error: %s" % message
if not exitcode is None: sys.exit(exitcode)
def loadrc(filepath):
"""Load a `.splunkrc` style options file and return a `dict` of option
values."""
filepath = path.expanduser(filepath) # Just in case
argv = []
try:
file = open(filepath)
except:
error("Unable to open '%s'" % filepath, 2)
result = {}
for line in file:
if line.startswith("#"): continue # Skip comment
line = line.strip()
if len(line) == 0: continue # Skip blank line
k, v = line.split('=', 1)
result[k] = v
return result
| apache-2.0 | -6,819,429,677,982,128,000 | 29.553191 | 75 | 0.667131 | false |
lynxis/libavg | src/python/app/app.py | 1 | 12797 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# libavg - Media Playback Engine.
# Copyright (C) 2003-2013 Ulrich von Zadow
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Current versions can be found at www.libavg.de
#
# Original author of this file is OXullo Interecans <x at brainrapers dot org>
import os
import math
import time
import libavg
from libavg import avg, Point2D, mtemu
import settings
from settings import Option
import keyboardmanager
import debugpanel
import flashmessage
class MainDiv(libavg.avg.DivNode):
VERSION = 'undef'
def __init__(self, **kargs):
assert not 'parent' in kargs
super(MainDiv, self).__init__(**kargs)
self.registerInstance(self, None)
def onArgvParserCreated(self, parser):
pass
def onArgvParsed(self, options, args, parser):
pass
def onStartup(self):
pass
def onInit(self):
pass
def onExit(self):
pass
def onFrame(self):
pass
class App(object):
def __init__(self):
self._setupInstance()
self._mainDiv = None
self._appParent = None
self._debugPanel = None
self._overlayPanel = None
self._resolution = None
self._windowSize = None
self._mtEmu = None
self.__lastFrameTimestamp = 0
self._setupSettings()
def run(self, mainDiv, **kargs):
assert isinstance(mainDiv, MainDiv)
self._mainDiv = mainDiv
self.mainDiv.settings = self._settings
self._applySettingsExtenders(kargs)
self._setupLogging()
mainDiv.onStartup()
self._setupResolution()
self._setupRootNode()
self._setupMouse()
pos, size, angle = self._getAppParentGeometry()
self._setupAppParent(pos, size, angle)
self._setupMainDiv()
self._setupTopPanel()
self._setupDebugPanel()
self._setupKeyboardManager()
self._setupDebuggingWidgets()
self._applyResolution()
self._setupOnInit()
self.onBeforeLaunch()
self.__lastFrameTimestamp = time.time()
try:
self._runLoop()
except Exception, e:
self._teardownKeyboardManager()
raise
mainDiv.onExit()
self._teardownKeyboardManager()
return 0
@property
def mainDiv(self):
return self._mainDiv
@property
def debugPanel(self):
return self._debugPanel
@property
def overlayPanel(self):
return self._overlayPanel
@property
def settings(self):
return self._settings
def onBeforeLaunch(self):
pass
def takeScreenshot(self, targetFolder='.'):
screenBmp = libavg.player.screenshot()
filenameTemplate = os.path.join(targetFolder, '%s-%03d.png')
i = 1
while i < 1000:
filename = filenameTemplate % (self.__class__.__name__, i)
if os.path.exists(filename):
i += 1
else:
break
if i == 1000:
flashmessage.FlashMessage('Maximum number of screenshots reached',
parent=self._appParent, isError=True)
else:
screenBmp.save(filename)
flashmessage.FlashMessage('Screenshot saved as %s' % filename,
parent=self._appParent)
def dumpTextObjectCount(self):
objects = libavg.player.getTestHelper().getObjectCount()
savedSeverity = libavg.logger.getCategories()[libavg.logger.Category.APP]
libavg.logger.configureCategory(libavg.logger.Category.APP,
libavg.logger.Severity.INFO)
libavg.logger.info('Dumping objects count')
for key, value in objects.iteritems():
libavg.logger.info(' %-25s: %s' % (key, value))
libavg.logger.configureCategory(libavg.logger.Category.APP, savedSeverity)
def _setupInstance(self):
import libavg.app
if libavg.app.instance is not None:
raise RuntimeError('%s has been already instantiated' %
self.__class__.__name__)
libavg.app.instance = self
def _setupSettings(self):
self._settings = settings.Settings()
self._settings.addOption(Option('app_resolution', '640x480'))
self._settings.addOption(Option('app_window_size', ''))
self._settings.addOption(Option('app_fullscreen', 'false'))
self._settings.addOption(Option('app_show_cursor', 'true'))
self._settings.addOption(Option('app_rotation', 'normal'))
self._settings.addOption(Option('app_panel_fontsize', '10'))
self._settings.addOption(Option('app_mouse_enabled', 'true'))
self._settings.addOption(Option('multitouch_enabled', 'false'))
self._settings.addOption(Option('multitouch_driver', ''))
self._settings.addOption(Option('multitouch_tuio_port', ''))
self._settings.addOption(Option('multitouch_mtdev_device', ''))
self._settings.addOption(Option('log_avg_categories', ''))
def _applySettingsExtenders(self, kargs):
self.settings.applyExtender(settings.KargsExtender(kargs))
argvExtender = settings.ArgvExtender(self.mainDiv.VERSION)
self.mainDiv.onArgvParserCreated(argvExtender.parser)
self.settings.applyExtender(argvExtender)
self.mainDiv.onArgvParsed(argvExtender.parsedArgs[0], argvExtender.parsedArgs[1],
argvExtender.parser)
def _setupLogging(self):
catMap = self.settings.get('log_avg_categories').strip()
if catMap:
for catPair in catMap.split(' '):
cat, strLevel = catPair.split(':')
level = getattr(avg.logger.Severity, strLevel)
libavg.avg.logger.configureCategory(cat, level)
def _setupRootNode(self):
libavg.player.loadString('''<?xml version="1.0"?>
<!DOCTYPE avg SYSTEM "../../libavg/doc/avg.dtd">
<avg width="%s" height="%s">
</avg>''' % tuple(self._resolution))
def _setupMouse(self):
libavg.player.enableMouse(self.settings.getBoolean('app_mouse_enabled'))
def _setupMultitouch(self):
if self.settings.getBoolean('multitouch_enabled'):
driver = self.settings.get('multitouch_driver').upper()
if driver:
os.putenv('AVG_MULTITOUCH_DRIVER', driver)
tuio_port = self.settings.get('multitouch_tuio_port').upper()
if tuio_port:
os.putenv('AVG_TUIO_PORT', tuio_port)
mtdev_device = self.settings.get('multitouch_mtdev_device').upper()
if mtdev_device:
os.putenv('AVG_LINUX_MULTITOUCH_DEVICE', mtdev_device)
libavg.player.enableMultitouch()
def _getAppParentGeometry(self):
rotation = self.settings.get('app_rotation').lower()
size = self._resolution
pos = (0, 0)
angle = 0
if rotation == 'left':
angle = -math.pi / 2
size = (self._resolution.y, self._resolution.x)
pos = ((self._resolution.x - self._resolution.y) / 2,
(self._resolution.y - self._resolution.x) / 2)
elif rotation == 'right':
angle = math.pi / 2
size = (self._resolution.y, self._resolution.x)
pos = ((self._resolution.x - self._resolution.y) / 2,
(self._resolution.y - self._resolution.x) / 2)
elif rotation == 'inverted':
angle = math.pi
elif rotation != 'normal':
raise TypeError('Invalid rotation %s' % rotation)
return (pos, size, angle)
def _setupAppParent(self, pos, size, angle):
self._appParent = libavg.avg.DivNode(parent=libavg.player.getRootNode(),
pos=pos, size=size, angle=angle)
def _setupMainDiv(self):
self._appParent.appendChild(self.mainDiv)
self.mainDiv.size = self._appParent.size
def _setupTopPanel(self):
self._overlayPanel = libavg.avg.DivNode(parent=self._appParent, id='overlayPanel')
def _setupDebugPanel(self):
self._debugPanel = debugpanel.DebugPanel(parent=self._appParent,
size=self._appParent.size, id='debugPanel',
fontsize=self.settings.getFloat('app_panel_fontsize'))
def _setupDebuggingWidgets(self):
pass
def _setupResolution(self):
rotation = self.settings.get('app_rotation').lower()
resolutionStr = self.settings.get('app_resolution').lower()
if resolutionStr != '':
resolution = self.settings.getPoint2D('app_resolution')
else:
resolution = libavg.player.getScreenResolution()
windowSizeStr = self.settings.get('app_window_size')
if windowSizeStr != '':
windowSize = self.settings.getPoint2D('app_window_size')
else:
windowSize = resolution
if rotation in ('left', 'right'):
resolution = Point2D(resolution.y, resolution.x)
windowSize = Point2D(windowSize.y, windowSize.x)
self._resolution = resolution
self._windowSize = windowSize
def _applyResolution(self):
fullscreen = self.settings.getBoolean('app_fullscreen')
if fullscreen:
resolution = self._resolution
else:
resolution = self._windowSize
libavg.player.setResolution(
fullscreen,
int(resolution.x), int(resolution.y),
0 # color depth
)
libavg.player.showCursor(self.settings.getBoolean('app_show_cursor'))
def _setupKeyboardManager(self):
keyboardmanager.init()
keyboardmanager.bindKeyDown(
keystring='d',
handler=self._debugPanel.toggleVisibility,
help='Show/hide the debug panel',
modifiers=libavg.avg.KEYMOD_CTRL)
keyboardmanager.bindKeyDown(
keystring='h',
handler=lambda: libavg.player.showCursor(
not libavg.player.isCursorShown()),
help='Show/hide cursor',
modifiers=libavg.avg.KEYMOD_CTRL)
keyboardmanager.bindKeyDown(
keystring='p',
handler=self.takeScreenshot,
help='Take screenshot',
modifiers=libavg.avg.KEYMOD_CTRL)
keyboardmanager.bindKeyDown(
keystring='b',
handler=self.dumpTextObjectCount,
help='Dump objects count to the console',
modifiers=libavg.avg.KEYMOD_CTRL)
keyboardmanager.bindKeyDown(
keystring='e',
handler=self._toggleMtEmulation,
help='Toggle multitouch emulation',
modifiers=libavg.avg.KEYMOD_CTRL)
self.debugPanel.setupKeys()
def _toggleMtEmulation(self):
if self._mtEmu is None:
self._mtEmu = mtemu.MTemu()
keyboardmanager.bindKeyDown('shift', self._mtEmu.enableDualTouch,
'Enable pinch gesture emulation')
keyboardmanager.bindKeyUp('shift', self._mtEmu.disableDualTouch,
'Disable pinch gesture emulation')
keyboardmanager.bindKeyDown('t', self._mtEmu.toggleSource,
'Toggle source between TOUCH and TRACK', libavg.avg.KEYMOD_CTRL)
else:
self._mtEmu.deinit()
keyboardmanager.unbindKeyDown('t', libavg.avg.KEYMOD_CTRL)
keyboardmanager.unbindKeyDown('shift')
keyboardmanager.unbindKeyUp('shift')
del self._mtEmu
self._mtEmu = None
def _teardownKeyboardManager(self):
keyboardmanager.unbindAll()
def _setupOnInit(self):
libavg.player.setTimeout(0, self._onInitInternal)
def _runLoop(self):
libavg.player.play()
def _onInitInternal(self):
self._setupMultitouch()
self.mainDiv.onInit()
libavg.player.subscribe(libavg.player.ON_FRAME, self.mainDiv.onFrame)
| lgpl-2.1 | -4,420,832,061,049,459,700 | 32.238961 | 90 | 0.608815 | false |
lochiiconnectivity/exabgp | lib/exabgp/protocol/ip/fragment.py | 1 | 1401 | # encoding: utf-8
"""
fragment.py
Created by Thomas Mangin on 2010-02-04.
Copyright (c) 2009-2015 Exa Networks. All rights reserved.
"""
# =================================================================== Fragment
# Uses bitmask operand format defined above.
# 0 1 2 3 4 5 6 7
# +---+---+---+---+---+---+---+---+
# | Reserved |LF |FF |IsF|DF |
# +---+---+---+---+---+---+---+---+
#
# Bitmask values:
# + Bit 7 - Don't fragment (DF)
# + Bit 6 - Is a fragment (IsF)
# + Bit 5 - First fragment (FF)
# + Bit 4 - Last fragment (LF)
class Fragment (int):
# reserved = 0xF0
LAST = 0x08
FIRST = 0x04
IS = 0x02
DONT = 0x01
def __str__ (self):
if self == 0x00: return 'not-a-fragment'
if self == self.DONT: return 'dont-fragment'
if self == self.IS: return 'is-fragment'
if self == self.FIRST: return 'first-fragment'
if self == self.LAST: return 'last-fragment'
return 'unknown fragment value %d' % int(self)
def NamedFragment (name):
fragment = name.lower()
if fragment == 'not-a-fragment': return Fragment(0x00)
if fragment == 'dont-fragment': return Fragment(Fragment.DONT)
if fragment == 'is-fragment': return Fragment(Fragment.IS)
if fragment == 'first-fragment': return Fragment(Fragment.FIRST)
if fragment == 'last-fragment': return Fragment(Fragment.LAST)
raise ValueError('unknown fragment name %s' % fragment)
| bsd-3-clause | 4,092,678,265,313,435,600 | 30.133333 | 78 | 0.58601 | false |
tulip-control/tulip-control | contrib/aut2simulink.py | 1 | 14363 | # Copyright (c) 2012 by California Institute of Technology
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the California Institute of Technology nor
# the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL CALTECH
# OR THE CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
# OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
"""
This program takes an aut and smv file from a generated TuLiP controller
and automatically writes the MATLAB compatible script for that controller.
Run this program by typing "python programfile.py nameofautfile
nameofmatlabfile", aut and smv file shall have the same name.
Do not include file extensions.
Written by Robert Rogersten during SURF June 2012,
Co-mentors Mumu Xu, Necmiye Ozay and Ufuk Topcu.
"""
from __future__ import print_function
import re, copy, os, sys
try:
import queue as _queue
except ImportError:
import Queue as _queue
class AutomatonState(object):
"""AutomatonState class for representing a state in a finite state
automaton. An AutomatonState object contains the following
fields:
- `stateid`: an integer specifying the state id of this AutomatonState object.
- `state`: a dictionary whose keys are the names of the variables
and whose values are the values of the variables.
- `transition`: a list of id's of the AutomatonState objects to
which this AutomatonState object can transition.
"""
def __init__(self, stateid=-1, state={},transition=[]):
self.stateid = stateid
self.state = copy.copy(state)
self.transition = transition[:]
def question(string):
"""This function asks a yes/no question and returns the answer.
@param string: The question to use as the prompt.
@return: The "answer" return value is one of "yes" or "no". The
default is "yes". (The default occurs if the user only presses
the RETURN button.)
"""
default="yes"
valid = {"yes":True, "y":True, "ye":True,
"no":False, "n":False}
prompt = " [Y/n] "
while True:
print(string)
choice = raw_input().lower()
if default is not None and choice == '':
return valid[default]
elif choice in valid:
return valid[choice]
else:
print("Please respond with 'yes' or 'no'\n")
def load_file(aut_file):
"""Construct an AutomatonState object from aut_file and place in a Queue.
@param aut_file: the name of the text file containing the
automaton, or an (open) file-like object.
"""
if isinstance(aut_file, str):
f = open(aut_file, 'r')
else:
f = aut_file
stateid = -1
for line in f:
# parse states
if (line.find('State ') >= 0):
stateid = re.search(r'State (\d+)', line)
stateid = int(stateid.group(1))
state = dict(re.findall(r'(\w+):(\w+)', line))
state1 = dict(re.findall(r'(\w+):(-\w+)', line))
state.update(state1)
if re.search('successors', line):
transition = list(re.findall(r'\d+', line))
automaton=(stateid,state,transition)
queue.put(automaton)
queue1.put(automaton)
queue2.put(automaton)
def read_variables(smv_file):
"""Put the enviroment and system variables from smv_file in two different
Queues called system and enviroment.
@param smv_file: the name of the text file containing the
automaton, or an (open) file-like object.
"""
if isinstance(smv_file, str):
f = open(smv_file, 'r')
else:
f = smv_file
for line in f:
if re.search('MODULE env',line):
for line in f:
if re.search(' : ', line):
env = str(re.findall(r'(\w+) :', line))
env = env[2:len(env)-2]
enviroment.put(env)
if re.search('MODULE sys',line):
break
if re.search('MODULE sys',line):
for line in f:
if re.search(' : ', line):
sys = str(re.findall(r'(\w+) :', line))
sys = sys[2:len(sys)-2]
system.put(sys)
def write_startline(enviroment,system,f):
"""Write the first lines before the switch cases in the matlab file.
Input:
- enviroment queue
- system queue
- fileobject f
"""
f.write('function [')
for i in range(system.qsize()):
count = system.qsize()
temp = system.get()
if count == i+1:
f.write(temp)
else:
f.write(temp+',')
system.put(temp)
f.write('] = '+sys.argv[2]+'(')
for i in range(enviroment.qsize()):
count = enviroment.qsize()
temp = enviroment.get()
if count == i+1:
f.write(temp)
else:
f.write(temp+',')
enviroment.put(temp)
f.write(")\nglobal state;\ncoder.extrinsic('disp');\nswitch state\n")
def write_case(enviroment,system,f,verbosem):
"""Write the switch cases in the matlab file.
Input:
- enviroment queue
- system queue
- fileobject f
- verbosem
"""
#for each case
for i in range(queue.qsize()):
f.write('\tcase '+str(i)+'\n')
#for each condition within each case
temp=queue.get()
ef=0
for k in range(queue1.qsize()):
temp2=queue1.get()
if str(k) in temp[2]:
if ef == 0:
f.write('\t\tif ')
ef=1
else:
f.write('\t\telseif ')
for l in range(enviroment.qsize()):
count=enviroment.qsize()
temp1=enviroment.get()
if count == l+1:
f.write(temp1+' == '+temp2[1][temp1])
else:
f.write(temp1+' == '+temp2[1][temp1]+' && ')
enviroment.put(temp1)
f.write('\n')
if verbosem==1:
f.write('\t\t\tstate = '+str(temp2[0])+';\n')
elif verbosem==0:
f.write('\t\t\tstate = '+str(temp2[0])+'\n')
else:
raise Exception
for l in range(system.qsize()):
temp1=system.get()
if verbosem==1:
f.write('\t\t\t'+temp1+' = '+temp2[1][temp1]+';\n')
elif verbosem==0:
f.write('\t\t\t'+temp1+' = '+temp2[1][temp1]+'\n')
else:
raise Exception
system.put(temp1)
queue1.put(temp2)
#else statement for each case
if not temp[2]:
for l in range(system.qsize()):
temp1=system.get()
if verbosem==1:
f.write('\t\t'+temp1+' = '+temp[1][temp1]+';\n')
elif verbosem==0:
f.write('\t\t'+temp1+' = '+temp[1][temp1]+'\n')
else:
raise Exception
system.put(temp1)
else:
f.write('\t\telse\n')
f.write("\t\t\tdisp('Cannot find a valid successor, environment assumption is like to be violated')\n")
for l in range(system.qsize()):
temp1=system.get()
if verbosem==1:
f.write('\t\t\t'+temp1+' = '+temp[1][temp1]+';\n')
elif verbosem==0:
f.write('\t\t\t'+temp1+' = '+temp[1][temp1]+'\n')
else:
raise Exception
system.put(temp1)
f.write('\t\tend\n')
queue.put(temp)
#the last case is an otherwise statement
f.write('\totherwise\n')
f.write("\t\tdisp('Cannot find a valid successor, environment assumption is like to be violated')\n")
for l in range(system.qsize()):
temp1=system.get()
if verbosem==1:
f.write('\t\t'+temp1+' = 0;\n')
elif verbosem==0:
f.write('\t\t'+temp1+' = 0\n')
else:
raise Exception
system.put(temp1)
f.write('end')
def write_case_no(enviroment,system,f,verbosem):
"""Write the switch cases in the matlab file and exclude no
successors.
Input:
- enviroment queue
- system queue
- fileobject f
- verbosem
"""
#for each case
li=list()
for i in range(queue.qsize()):
q=queue.get()
li.append(q[0])
queue.put(q)
for i in range(queue.qsize()):
#for each condition within each case
temp=queue.get()
f.write('\tcase '+str(temp[0])+'\n')
ef=0
for k in range(queue2.qsize()):
temp2=queue2.get()
if str(k) in temp[2] and k in li:
if ef == 0:
f.write('\t\tif ')
ef=1
else:
f.write('\t\telseif ')
for l in range(enviroment.qsize()):
count=enviroment.qsize()
temp1=enviroment.get()
if count == l+1:
f.write(temp1+' == '+temp2[1][temp1])
else:
f.write(temp1+' == '+temp2[1][temp1]+' && ')
enviroment.put(temp1)
f.write('\n')
if verbosem==1:
f.write('\t\t\tstate = '+str(k)+';\n')
elif verbosem==0:
f.write('\t\t\tstate = '+str(k)+'\n')
else:
raise Exception
for l in range(system.qsize()):
temp1=system.get()
if verbosem==1:
f.write('\t\t\t'+temp1+' = '+temp2[1][temp1]+';\n')
elif verbosem==0:
f.write('\t\t\t'+temp1+' = '+temp2[1][temp1]+'\n')
else:
raise Exception
system.put(temp1)
queue2.put(temp2)
#else statement for each case
if not temp[2]:
for l in range(system.qsize()):
temp1=system.get()
if verbosem==1:
f.write('\t\t'+temp1+' = '+temp[1][temp1]+';\n')
elif verbosem==0:
f.write('\t\t'+temp1+' = '+temp[1][temp1]+'\n')
else:
raise Exception
system.put(temp1)
else:
f.write('\t\telse\n')
f.write("\t\t\tdisp('Cannot find a valid successor, environment assumption is like to be violated')\n")
for l in range(system.qsize()):
temp1=system.get()
if verbosem==1:
f.write('\t\t\t'+temp1+' = '+temp[1][temp1]+';\n')
elif verbosem==0:
f.write('\t\t\t'+temp1+' = '+temp[1][temp1]+'\n')
else:
raise Exception
system.put(temp1)
f.write('\t\tend\n')
queue.put(temp)
#the last case is an otherwise statement
f.write('\totherwise\n')
f.write("\t\tdisp('Cannot find a valid successor, environment assumption is like to be violated')\n")
for l in range(system.qsize()):
temp1=system.get()
if verbosem==1:
f.write('\t\t'+temp1+' = 0;\n')
elif verbosem==0:
f.write('\t\t'+temp1+' = 0\n')
else:
raise Exception
system.put(temp1)
f.write('end')
queue=_queue.Queue()
queue1=_queue.Queue()
queue2=_queue.Queue()
enviroment=_queue.Queue()
system=_queue.Queue()
try:
load_file(sys.argv[1]+'.aut')
read_variables(sys.argv[1]+'.smv')
q=question('Shall there be a semicolon printed after each variable assignment? [Y/n]')
q2=question('Shall the script exclude no successors? [Y/n]')
if q:
verbosem=1
else:
verbosem=0
if not os.path.isfile(sys.argv[2]+'.m'):
f=open(sys.argv[2]+'.m','w')
write_startline(enviroment,system,f)
if q2:
for i in range(queue.qsize()):
temp=queue.get()
temp1=queue1.get()
if not temp[2] == []:
queue.put(temp)
queue1.put(temp1)
write_case_no(enviroment,system,f,verbosem)
else:
write_case(enviroment,system,f,verbosem)
f.close()
if queue.get()[0]==-1:
raise IOError
print('MATLAB script written to '+sys.argv[2]+'.m'+' with success\n')
else:
print('Enter a matlab filename that does not exist.')
except IOError:
print(
'Enter correct filename for a TuLiP generated controller, '
'aut and\nsmv file must have the same name')
except IndexError:
print(
'Usage: aut2simulink.py JTLV-AUT-FILE MATLAB-FILE\n\n'
' aut and smv file must have the same name.\n'
' Do not include file extensions.')
| bsd-3-clause | -22,288,746,802,600,890 | 34.289926 | 115 | 0.53227 | false |
Data2Semantics/prov-o-matic | src/provomatic/extension.py | 1 | 2174 | from watcher import NotebookWatcher, CodeVisitor
from wrapper import prov, replace
from builder import get_dataset, save_prov, clear_dataset, add_prov, revive, list_entities, list_activities
from viewer import Viewer
from ducktape import Ducktape
import logging
import os
log = logging.getLogger('provomatic.extension')
log.setLevel(logging.WARNING)
def load_ipython_extension(ip):
log.debug("Loading PROV-O-Matic extension")
# Push the prov and replace wrapper functions
ip.push('prov')
ip.push('replace')
# Push the save_prov function (for saving the generated provenance trace to a file)
ip.push('save_prov')
# Push the add_prov function (for adding provenance from external files)
ip.push('add_prov')
# Push the revive function (for binding a value from an inported provenance graph to a new variable)
ip.push('revive')
ip.push('list_entities')
ip.push('list_activities')
## Initialize the PROV-O-Viz adapter
viewer = Viewer()
view_prov = viewer.view_prov
set_provoviz_url = viewer.set_provoviz_url
view_prov_service = viewer.view_prov_service
# Push the PROV-O-Viz functions to the IPython Notebook
ip.push('view_prov')
ip.push('set_provoviz_url')
ip.push('view_prov_service')
## Initialize the Ducktape loader
ducktape = Ducktape(ip)
load_ducktape = ducktape.load
ip.push('load_ducktape')
# Clear the provenance graph
clear_dataset()
try :
add_prov('http://www.w3.org/ns/prov#',url='http://localhost:8000/datafiles/prov-o.ttl')
except :
curwd = os.getcwd()
provopath = os.path.join(curwd,'datafiles/prov-o.ttl')
log.warning('Could not load PROV schema from URL, attempting to load from {}'.format(provopath))
add_prov('http://www.w3.org/ns/prov#',url='file://{}'.format(provopath))
## Initialize the notebookwatcher and code visitor.
nw = NotebookWatcher(ip)
cv = CodeVisitor(nw)
ip.events.register('pre_execute', nw.pre_execute)
ip.events.register('post_execute', nw.post_execute)
ip.ast_transformers.append(cv)
| mit | 5,969,903,609,708,959,000 | 27.233766 | 107 | 0.674793 | false |
Linktime/Aike | app/userApp/forms.py | 1 | 2544 | #-*- coding:utf-8 -*-
from django import forms
from django.contrib.auth.models import User
from app.userApp.models import AikeUser, MessageBoard
class UserRegisterForm(forms.ModelForm):
username = forms.EmailField(label=u"*用户名(邮箱)")
password = forms.CharField(widget=forms.PasswordInput,label=u"*密码")
alias = forms.CharField(label=u"*别名",help_text="用于在网站中显示给其他人的名称")
sex = forms.ChoiceField(widget=forms.RadioSelect(),choices=((0,u'高富帅'),(1,u'白富美')),label="性别")
# name = forms.CharField(required=False,label="真名")
# age = forms.IntegerField(required=False,label="年龄")
# city = forms.CharField(required=False,label="所在城市",help_text="我们将优先通过地域给您展示活动")
# university = forms.CharField(required=False,label="大学",help_text="如果您是大学生,我们会优先将您所在大学的活动推荐给您")
# lbs = forms.CharField(required=False,label="上一次手机登陆地理位置")
# Auth = forms.BooleanField(required=False,label="认证")
class Meta:
model = AikeUser
fields = ("username","password","alias","name","sex","age","city","university")
class UserChangeForm(forms.ModelForm):
sex = forms.ChoiceField(widget=forms.RadioSelect(),choices=((0,u'高富帅'),(1,u'白富美')),label="性别")
# alias = forms.CharField(required=False,label="别名",help_text="用于在网站中显示给其他人的名称")
# name = forms.CharField(required=False,label="真名")
# email = forms.EmailField(required=False,label="Email")
# sex = forms.ChoiceField(widget=forms.RadioSelect(),choices=((0,u'高富帅'),(1,u'白富美')),label="性别")
# age = forms.IntegerField(required=False,label="年龄")
# city = forms.CharField(required=False,label="所在城市",help_text="我们将优先通过地域给您展示活动")
# university = forms.CharField(required=False,label="大学",help_text="如果您是大学生,我们会优先将您所在大学的活动推荐给您")
# lbs = forms.CharField(required=False,label="上一次手机登陆地理位置")
# Auth = forms.BooleanField(required=False,label="认证")
class Meta:
model = AikeUser
exclude = ("user")
class UserMessageBoardForm(forms.ModelForm):
# text = forms.CharField(widget=forms.Textarea(attrs={'class':'span5','rows':'5','style':'resize:none'}))
class Meta:
model = MessageBoard
| apache-2.0 | -8,952,682,153,699,489,000 | 50.634146 | 109 | 0.682576 | false |
mfherbst/bohrium | test/python/tests/test_reorganization.py | 4 | 4226 | import util
import functools
import operator
class test_gather:
def init(self):
for ary, shape in util.gen_random_arrays("R", 3, max_dim=50, dtype="np.float64"):
nelem = functools.reduce(operator.mul, shape)
if nelem == 0:
continue
cmd = "R = bh.random.RandomState(42); a = %s; " % ary
cmd += "ind = M.arange(%d, dtype=np.int64).reshape(%s); " % (nelem, shape)
yield cmd
yield cmd + "ind = ind[::2]; "
if shape[0] > 2:
yield cmd + "ind = ind[1:]; "
if len(shape) > 1 and shape[1] > 5:
yield cmd + "ind = ind[3:]; "
def test_take(self, cmd):
return cmd + "res = M.take(a, ind)"
def test_take_ary_mth(self, cmd):
return cmd + "res = a.take(ind)"
def test_indexing(self, cmd):
return cmd + "res = a.flatten()[ind.flatten()]"
class test_scatter:
def init(self):
for ary, shape in util.gen_random_arrays("R", 3, max_dim=50, dtype="np.float64"):
nelem = functools.reduce(operator.mul, shape)
if nelem == 0:
continue
cmd = "R = bh.random.RandomState(42); res = %s; " % ary
cmd += "ind = M.arange(%d, dtype=np.int64).reshape(%s); " % (nelem, shape)
VAL = "val = R.random(ind.shape, np.float64, bohrium=BH); "
yield cmd + VAL
yield cmd + "ind = ind[::2]; " + VAL
if shape[0] > 2:
yield cmd + "ind = ind[1:];" + VAL
if len(shape) > 1 and shape[1] > 5:
yield cmd + "ind = ind[3:];" + VAL
def test_put(self, cmd):
return cmd + "M.put(res, ind, val)"
def test_put_scalar(self, cmd):
return cmd + "M.put(res, ind, 42)"
def test_put_fixed_length_val(self, cmd):
return cmd + "M.put(res, ind, M.arange(10))"
def test_put_ary_mth(self, cmd):
return cmd + "res.put(ind, val)"
def test_indexing(self, cmd):
return cmd + "res = res.flatten(); res[ind] = val"
def test_cond(self, cmd):
cmd += cmd + "mask = R.random(ind.size, np.bool, bohrium=BH).reshape(ind.shape); "
np_cmd = cmd + "np.put(res, ind[mask], val[mask])"
bh_cmd = cmd + "M.cond_scatter(res, ind, val, mask)"
return (np_cmd, bh_cmd)
class test_nonzero:
def init(self):
for ary, shape in util.gen_random_arrays("R", 3, max_dim=50, dtype="np.float64"):
nelem = functools.reduce(operator.mul, shape)
if nelem == 0:
continue
cmd = "R = bh.random.RandomState(42); a = %s; " % ary
yield cmd
def test_flatnonzero(self, cmd):
return cmd + "res = M.flatnonzero(a)"
def test_nonzero(self, cmd):
return cmd + "res = M.concatenate(M.nonzero(a))"
class test_fancy_indexing_get:
def init(self):
for ary, shape in util.gen_random_arrays("R", 3, max_dim=50, dtype="np.float64"):
nelem = functools.reduce(operator.mul, shape)
if nelem == 0:
continue
cmd = "R = bh.random.RandomState(42); a = %s; " % ary
ind = "ind = ("
for dim in shape:
ind += "R.random(10, np.uint64, bohrium=BH) %% %d, " % dim
ind += "); "
yield cmd + ind
def test_take_using_index_tuple(self, cmd):
return cmd + "res = bh.take_using_index_tuple(a, ind)"
def test_indexing(self, cmd):
return cmd + "res = a[ind]"
class test_fancy_indexing_set:
def init(self):
for ary, shape in util.gen_random_arrays("R", 3, max_dim=50, dtype="np.float64"):
nelem = functools.reduce(operator.mul, shape)
if nelem == 0:
continue
cmd = "R = bh.random.RandomState(42); res = %s; " % ary
ind = "ind = ("
for dim in shape:
ind += "R.random(10, np.uint64, bohrium=BH) %% %d, " % dim
ind += "); "
yield cmd + ind
def test_put_using_index_tuple(self, cmd):
return cmd + "bh.put_using_index_tuple(res, ind, 42)"
def test_indexing(self, cmd):
return cmd + "res[ind] = 42" | lgpl-3.0 | 6,225,781,323,098,376,000 | 33.647541 | 90 | 0.514671 | false |
mupif/mupif | mupif/examples/Example09-operatorEmail/Example09.py | 1 | 6155 | #!/usr/bin/env python3
import sys
sys.path.extend(['..', '../../..'])
from mupif import *
import jsonpickle
import time # for sleep
import logging
log = logging.getLogger()
import mupif.Physics.PhysicalQuantities as PQ
#
# Expected response from operator: E-mail with "CSJ01" (workflow + jobID)
# in the subject line, message body: json encoded dictionary with 'Operator-results' key, e.g.
# {"Operator-results": 3.14}
#
class EmailAPI(Model.Model):
"""
Simple application API that involves operator interaction
"""
def __init__(self, file):
super(EmailAPI, self).__init__(file)
# note: "From" should correspond to destination e-mail
# where the response is received (Operator can reply to the message)
self.operator = operatorUtil.OperatorEMailInteraction(From='[email protected]',
To='[email protected]',
smtpHost='smtp.something.com',
imapHost='imap.gmail.com',
imapUser='appAPI')
self.inputs = {}
self.outputs = {}
self.key = 'Operator-results'
def initialize(self, file='', workdir='', metaData={}, validateMetaData=True, **kwargs):
MD = {
'Name': 'Email operator application',
'ID': 'N/A',
'Description': 'Sending email with input and receiving email with results',
'Physics': {
'Type': 'Other',
'Entity': 'Other'
},
'Solver': {
'Software': 'Unknown',
'Language': 'Unknown',
'License': 'Unknown',
'Creator': 'Unknown',
'Version_date': '02/2019',
'Type': 'Summator',
'Documentation': 'Nowhere',
'Estim_time_step_s': 1,
'Estim_comp_time_s': 0.01,
'Estim_execution_cost_EUR': 0.01,
'Estim_personnel_cost_EUR': 0.01,
'Required_expertise': 'None',
'Accuracy': 'Unknown',
'Sensitivity': 'Unknown',
'Complexity': 'Unknown',
'Robustness': 'Unknown'
},
'Inputs': [
{'Type': 'mupif.Property', 'Type_ID': 'mupif.PropertyID.PID_CumulativeConcentration', 'Name': 'Concentration', 'Description': 'Concentration', 'Units': 'kg/m**3', 'Origin': 'Simulated', 'Required': True}],
'Outputs': [
{'Type': 'mupif.Property', 'Type_ID': 'mupif.PropertyID.PID_Demo_Value', 'Name': 'Demo value',
'Description': 'Demo value', 'Units': 'dimensionless', 'Origin': 'Simulated'}]
}
self.updateMetadata(MD)
super(EmailAPI, self).initialize(file, workdir, metaData, validateMetaData, **kwargs)
def setProperty(self, property, objectID=0):
# remember the mapped value
self.inputs[str(property.propID)] = property
self.inputs[self.key] = 0.0
def getProperty(self, propID, time, objectID=0):
md = {
'Execution': {
'ID': self.getMetadata('Execution.ID'),
'Use_case_ID': self.getMetadata('Execution.Use_case_ID'),
'Task_ID': self.getMetadata('Execution.Task_ID')
}
}
if self.outputs:
# unpack & process outputs (expected json encoded data)
if propID == PropertyID.PID_Demo_Value:
if self.key in self.outputs:
value = float(self.outputs[self.key])
log.info('Found key %s with value %f' % (self.key, value))
return Property.ConstantProperty(value, propID, ValueType.Scalar, PQ.getDimensionlessUnit(), time, 0, metaData=md)
else:
log.error('Not found key %s in email' % self.key)
return None
def solveStep(self, tstep, stageID=0, runInBackground=False):
# send email to operator, pack json encoded inputs in the message
# note workflow and job IDs will be available in upcoming MuPIF version
self.operator.contactOperator("CS", "J01", jsonpickle.encode(self.inputs))
responseReceived = False
# check for response and repeat until received
while not responseReceived:
# check response and receive the data
responseReceived, operatorOutput = self.operator.checkOperatorResponse("CS", "J01")
# print(responseReceived, operatorOutput.splitlines()[0])
if responseReceived:
try:
self.outputs = jsonpickle.decode(operatorOutput.splitlines()[0]) # pick up only dictionary to new line
except Exception as e:
log.error(e)
log.info("Received response from operator %s" % self.outputs)
else:
time.sleep(60) # wait
def getCriticalTimeStep(self):
return PQ.PhysicalQuantity(1.0, 's')
#################################################
# demo code
#################################################
# create instance of application API
app = EmailAPI(None)
try:
executionMetadata = {
'Execution': {
'ID': '1',
'Use_case_ID': '1_1',
'Task_ID': '1'
}
}
app.initialize(metaData=executionMetadata)
# CumulativeConcentration property on input
p = Property.ConstantProperty(0.1, PropertyID.PID_CumulativeConcentration, ValueType.Scalar, 'kg/m**3')
# set concentration as input
app.setProperty(p)
# solve (involves operator interaction)
tstep = TimeStep.TimeStep(0.0, 0.1, 1.0, 's', 1)
app.solveStep (tstep)
# get result of the simulation
r = app.getProperty(PropertyID.PID_Demo_Value, tstep.getTime())
log.info("Application API return value is %f", r.getValue())
# terminate app
except Exception as e:
log.error(e)
finally:
app.terminate()
| lgpl-3.0 | -1,701,757,126,261,883,600 | 40.308725 | 221 | 0.543461 | false |
aequitas/home-assistant | tests/components/scene/test_init.py | 1 | 3995 | """The tests for the Scene component."""
import io
import unittest
from homeassistant.setup import setup_component
from homeassistant.components import light, scene
from homeassistant.util.yaml import loader as yaml_loader
from tests.common import get_test_home_assistant
from tests.components.light import common as common_light
from tests.components.scene import common
class TestScene(unittest.TestCase):
"""Test the scene component."""
def setUp(self): # pylint: disable=invalid-name
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
test_light = getattr(self.hass.components, 'test.light')
test_light.init()
assert setup_component(self.hass, light.DOMAIN, {
light.DOMAIN: {'platform': 'test'}
})
self.light_1, self.light_2 = test_light.DEVICES[0:2]
common_light.turn_off(
self.hass, [self.light_1.entity_id, self.light_2.entity_id])
self.hass.block_till_done()
assert not self.light_1.is_on
assert not self.light_2.is_on
def tearDown(self): # pylint: disable=invalid-name
"""Stop everything that was started."""
self.hass.stop()
def test_config_yaml_alias_anchor(self):
"""Test the usage of YAML aliases and anchors.
The following test scene configuration is equivalent to:
scene:
- name: test
entities:
light_1: &light_1_state
state: 'on'
brightness: 100
light_2: *light_1_state
When encountering a YAML alias/anchor, the PyYAML parser will use a
reference to the original dictionary, instead of creating a copy, so
care needs to be taken to not modify the original.
"""
entity_state = {
'state': 'on',
'brightness': 100,
}
assert setup_component(self.hass, scene.DOMAIN, {
'scene': [{
'name': 'test',
'entities': {
self.light_1.entity_id: entity_state,
self.light_2.entity_id: entity_state,
}
}]
})
common.activate(self.hass, 'scene.test')
self.hass.block_till_done()
assert self.light_1.is_on
assert self.light_2.is_on
assert 100 == self.light_1.last_call('turn_on')[1].get('brightness')
assert 100 == self.light_2.last_call('turn_on')[1].get('brightness')
def test_config_yaml_bool(self):
"""Test parsing of booleans in yaml config."""
config = (
'scene:\n'
' - name: test\n'
' entities:\n'
' {0}: on\n'
' {1}:\n'
' state: on\n'
' brightness: 100\n').format(
self.light_1.entity_id, self.light_2.entity_id)
with io.StringIO(config) as file:
doc = yaml_loader.yaml.load(file)
assert setup_component(self.hass, scene.DOMAIN, doc)
common.activate(self.hass, 'scene.test')
self.hass.block_till_done()
assert self.light_1.is_on
assert self.light_2.is_on
assert 100 == self.light_2.last_call('turn_on')[1].get('brightness')
def test_activate_scene(self):
"""Test active scene."""
assert setup_component(self.hass, scene.DOMAIN, {
'scene': [{
'name': 'test',
'entities': {
self.light_1.entity_id: 'on',
self.light_2.entity_id: {
'state': 'on',
'brightness': 100,
}
}
}]
})
common.activate(self.hass, 'scene.test')
self.hass.block_till_done()
assert self.light_1.is_on
assert self.light_2.is_on
assert 100 == self.light_2.last_call('turn_on')[1].get('brightness')
| apache-2.0 | 6,011,448,469,299,943,000 | 31.479675 | 76 | 0.547685 | false |
mfraezz/osf.io | tests/test_registrations/test_embargoes.py | 1 | 52151 | """Tests related to embargoes of registrations"""
import datetime
from rest_framework import status as http_status
import json
import pytz
from django.core.exceptions import ValidationError
from django.utils import timezone
import mock
import pytest
from nose.tools import * # noqa
from tests.base import fake, OsfTestCase
from osf_tests.factories import (
AuthUserFactory, EmbargoFactory, NodeFactory, ProjectFactory,
RegistrationFactory, UserFactory, UnconfirmedUserFactory, DraftRegistrationFactory,
EmbargoTerminationApprovalFactory
)
from tests import utils
from framework.exceptions import PermissionsError, HTTPError
from framework.auth import Auth
from osf.exceptions import (
InvalidSanctionRejectionToken, InvalidSanctionApprovalToken, NodeStateError,
)
from osf.utils import tokens
from osf.models import AbstractNode
from osf.models.sanctions import PreregCallbackMixin, Embargo
from osf.utils import permissions
from osf.models import Registration, Contributor, OSFUser, SpamStatus
DUMMY_TOKEN = tokens.encode({
'dummy': 'token'
})
@pytest.mark.enable_bookmark_creation
class RegistrationEmbargoModelsTestCase(OsfTestCase):
def setUp(self):
super(RegistrationEmbargoModelsTestCase, self).setUp()
self.user = UserFactory()
self.project = ProjectFactory(creator=self.user)
self.registration = RegistrationFactory(project=self.project)
self.embargo = EmbargoFactory(user=self.user)
self.valid_embargo_end_date = timezone.now() + datetime.timedelta(days=3)
# Node#_initiate_embargo tests
def test__initiate_embargo_saves_embargo(self):
initial_count = Embargo.objects.all().count()
self.registration._initiate_embargo(
self.user,
self.valid_embargo_end_date,
for_existing_registration=True
)
assert_equal(Embargo.objects.all().count(), initial_count + 1)
def test_state_can_be_set_to_complete(self):
embargo = EmbargoFactory()
embargo.state = Embargo.COMPLETED
embargo.save() # should pass validation
assert_equal(embargo.state, Embargo.COMPLETED)
def test__initiate_embargo_does_not_create_tokens_for_unregistered_admin(self):
unconfirmed_user = UnconfirmedUserFactory()
contrib = Contributor.objects.create(user=unconfirmed_user, node=self.registration)
self.registration.add_permission(unconfirmed_user, permissions.ADMIN, save=True)
assert_equal(Contributor.objects.get(node=self.registration, user=unconfirmed_user).permission, permissions.ADMIN)
embargo = self.registration._initiate_embargo(
self.user,
self.valid_embargo_end_date,
for_existing_registration=True
)
assert_true(self.user._id in embargo.approval_state)
assert_false(unconfirmed_user._id in embargo.approval_state)
def test__initiate_embargo_adds_admins_on_child_nodes(self):
project_admin = UserFactory()
project_non_admin = UserFactory()
child_admin = UserFactory()
child_non_admin = UserFactory()
grandchild_admin = UserFactory()
project = ProjectFactory(creator=project_admin)
project.add_contributor(project_non_admin, auth=Auth(project.creator), save=True)
child = NodeFactory(creator=child_admin, parent=project)
child.add_contributor(child_non_admin, auth=Auth(child.creator), save=True)
grandchild = NodeFactory(creator=grandchild_admin, parent=child) # noqa
registration = RegistrationFactory(project=project)
embargo = registration._initiate_embargo(
project.creator,
self.valid_embargo_end_date,
for_existing_registration=True
)
assert_in(project_admin._id, embargo.approval_state)
assert_in(child_admin._id, embargo.approval_state)
assert_in(grandchild_admin._id, embargo.approval_state)
assert_not_in(project_non_admin._id, embargo.approval_state)
assert_not_in(child_non_admin._id, embargo.approval_state)
def test__initiate_embargo_with_save_does_save_embargo(self):
initial_count = Embargo.objects.all().count()
self.registration._initiate_embargo(
self.user,
self.valid_embargo_end_date,
for_existing_registration=True,
)
assert_equal(Embargo.objects.all().count(), initial_count + 1)
# Node#embargo_registration tests
def test_embargo_from_non_admin_raises_PermissionsError(self):
self.registration.remove_permission(self.user, permissions.ADMIN)
self.registration.save()
self.registration.reload()
with assert_raises(PermissionsError):
self.registration.embargo_registration(self.user, self.valid_embargo_end_date)
def test_embargo_end_date_in_past_raises_ValueError(self):
with assert_raises(ValidationError):
self.registration.embargo_registration(
self.user,
datetime.datetime(1999, 1, 1, tzinfo=pytz.utc)
)
def test_embargo_end_date_today_raises_ValueError(self):
with assert_raises(ValidationError):
self.registration.embargo_registration(
self.user,
timezone.now()
)
def test_embargo_end_date_in_far_future_raises_ValidationError(self):
with assert_raises(ValidationError):
self.registration.embargo_registration(
self.user,
datetime.datetime(2099, 1, 1, tzinfo=pytz.utc)
)
def test_embargo_with_valid_end_date_starts_pending_embargo(self):
self.registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.is_pending_embargo)
def test_embargo_public_project_makes_private_pending_embargo(self):
self.registration.is_public = True
assert_true(self.registration.is_public)
self.registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.is_pending_embargo)
assert_false(self.registration.is_public)
# Embargo#approve_embargo tests
def test_invalid_approval_token_raises_InvalidSanctionApprovalToken(self):
self.registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.is_pending_embargo)
invalid_approval_token = 'not a real token'
with assert_raises(InvalidSanctionApprovalToken):
self.registration.embargo.approve_embargo(self.user, invalid_approval_token)
assert_true(self.registration.is_pending_embargo)
def test_non_admin_approval_token_raises_PermissionsError(self):
non_admin = UserFactory()
self.registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.is_pending_embargo)
approval_token = self.registration.embargo.approval_state[self.user._id]['approval_token']
with assert_raises(PermissionsError):
self.registration.embargo.approve_embargo(non_admin, approval_token)
assert_true(self.registration.is_pending_embargo)
def test_one_approval_with_one_admin_embargoes(self):
self.registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.is_pending_embargo)
approval_token = self.registration.embargo.approval_state[self.user._id]['approval_token']
self.registration.embargo.approve_embargo(self.user, approval_token)
assert_true(self.registration.embargo_end_date)
assert_false(self.registration.is_pending_embargo)
def test_approval_adds_to_parent_projects_log(self):
initial_project_logs = self.registration.registered_from.logs.count()
self.registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10)
)
self.registration.save()
approval_token = self.registration.embargo.approval_state[self.user._id]['approval_token']
self.registration.embargo.approve_embargo(self.user, approval_token)
# Logs: Created, registered, embargo initiated, embargo approved
assert_equal(self.registration.registered_from.logs.count(), initial_project_logs + 2)
def test_one_approval_with_two_admins_stays_pending(self):
admin2 = UserFactory()
Contributor.objects.create(user=admin2, node=self.registration)
self.registration.add_permission(admin2, permissions.ADMIN, save=True)
self.registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10)
)
self.registration.save()
# First admin approves
approval_token = self.registration.embargo.approval_state[self.user._id]['approval_token']
self.registration.embargo.approve_embargo(self.user, approval_token)
assert_true(self.registration.is_pending_embargo)
num_of_approvals = sum([val['has_approved'] for val in self.registration.embargo.approval_state.values()])
assert_equal(num_of_approvals, 1)
# Second admin approves
approval_token = self.registration.embargo.approval_state[admin2._id]['approval_token']
self.registration.embargo.approve_embargo(admin2, approval_token)
assert_true(self.registration.embargo_end_date)
assert_false(self.registration.is_pending_embargo)
num_of_approvals = sum([val['has_approved'] for val in self.registration.embargo.approval_state.values()])
assert_equal(num_of_approvals, 2)
# Embargo#disapprove_embargo tests
def test_invalid_rejection_token_raises_InvalidSanctionRejectionToken(self):
self.registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.is_pending_embargo)
with assert_raises(InvalidSanctionRejectionToken):
self.registration.embargo.disapprove_embargo(self.user, fake.sentence())
assert_true(self.registration.is_pending_embargo)
def test_non_admin_rejection_token_raises_PermissionsError(self):
non_admin = UserFactory()
self.registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.is_pending_embargo)
rejection_token = self.registration.embargo.approval_state[self.user._id]['rejection_token']
with assert_raises(PermissionsError):
self.registration.embargo.disapprove_embargo(non_admin, rejection_token)
assert_true(self.registration.is_pending_embargo)
def test_one_disapproval_cancels_embargo(self):
self.registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.is_pending_embargo)
rejection_token = self.registration.embargo.approval_state[self.user._id]['rejection_token']
self.registration.embargo.disapprove_embargo(self.user, rejection_token)
assert_equal(self.registration.embargo.state, Embargo.REJECTED)
assert_false(self.registration.is_pending_embargo)
def test_disapproval_adds_to_parent_projects_log(self):
initial_project_logs = self.registration.registered_from.logs.count()
self.registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10)
)
self.registration.save()
rejection_token = self.registration.embargo.approval_state[self.user._id]['rejection_token']
registered_from = self.registration.registered_from
self.registration.embargo.disapprove_embargo(self.user, rejection_token)
# Logs: Created, registered, embargo initiated, embargo cancelled
assert_equal(registered_from.logs.count(), initial_project_logs + 2)
def test_cancelling_embargo_deletes_parent_registration(self):
self.registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10)
)
self.registration.save()
rejection_token = self.registration.embargo.approval_state[self.user._id]['rejection_token']
self.registration.embargo.disapprove_embargo(self.user, rejection_token)
self.registration.reload()
assert_equal(self.registration.embargo.state, Embargo.REJECTED)
assert_true(self.registration.is_deleted)
def test_cancelling_embargo_deletes_component_registrations(self):
component = NodeFactory(
creator=self.user,
parent=self.project,
title='Component'
)
NodeFactory( # subcomponent
creator=self.user,
parent=component,
title='Subcomponent'
)
project_registration = RegistrationFactory(project=self.project)
component_registration = project_registration._nodes.first()
subcomponent_registration = component_registration._nodes.first()
project_registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10)
)
project_registration.save()
rejection_token = project_registration.embargo.approval_state[self.user._id]['rejection_token']
project_registration.embargo.disapprove_embargo(self.user, rejection_token)
assert_equal(project_registration.embargo.state, Embargo.REJECTED)
project_registration.reload()
assert_true(project_registration.is_deleted)
component_registration.reload()
assert_true(component_registration.is_deleted)
subcomponent_registration.reload()
assert_true(subcomponent_registration.is_deleted)
def test_cancelling_embargo_for_existing_registration_does_not_delete_registration(self):
self.registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10),
for_existing_registration=True
)
self.registration.save()
rejection_token = self.registration.embargo.approval_state[self.user._id]['rejection_token']
self.registration.embargo.disapprove_embargo(self.user, rejection_token)
assert_equal(self.registration.embargo.state, Embargo.REJECTED)
assert_false(self.registration.is_deleted)
def test_rejecting_embargo_for_existing_registration_does_not_deleted_component_registrations(self):
component = NodeFactory(
creator=self.user,
parent=self.project,
title='Component'
)
NodeFactory( # subcomponent
creator=self.user,
parent=component,
title='Subcomponent'
)
project_registration = RegistrationFactory(project=self.project)
component_registration = project_registration._nodes.first()
subcomponent_registration = component_registration._nodes.first()
project_registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10),
for_existing_registration=True
)
rejection_token = project_registration.embargo.approval_state[self.user._id]['rejection_token']
project_registration.embargo.disapprove_embargo(self.user, rejection_token)
project_registration.save()
assert_equal(project_registration.embargo.state, Embargo.REJECTED)
assert_false(project_registration.is_deleted)
assert_false(component_registration.is_deleted)
assert_false(subcomponent_registration.is_deleted)
# Embargo property tests
def test_new_registration_is_pending_registration(self):
self.registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.is_pending_embargo_for_existing_registration)
def test_existing_registration_is_not_pending_registration(self):
self.registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10),
for_existing_registration=True
)
self.registration.save()
assert_false(self.registration.is_pending_embargo_for_existing_registration)
def test_on_complete_notify_initiator(self):
self.registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10),
notify_initiator_on_complete=True
)
self.registration.save()
with mock.patch.object(PreregCallbackMixin, '_notify_initiator') as mock_notify:
self.registration.embargo._on_complete(self.user)
assert_equal(mock_notify.call_count, 1)
def test_on_complete_raises_error_if_registration_is_spam(self):
self.registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10),
notify_initiator_on_complete=True
)
self.registration.spam_status = SpamStatus.FLAGGED
self.registration.save()
with mock.patch.object(PreregCallbackMixin, '_notify_initiator') as mock_notify:
with assert_raises(NodeStateError):
self.registration.embargo._on_complete(self.user)
assert_equal(mock_notify.call_count, 0)
# Regression for OSF-8840
def test_public_embargo_cannot_be_deleted_with_initial_token(self):
embargo_termination_approval = EmbargoTerminationApprovalFactory()
registration = Registration.objects.get(embargo_termination_approval=embargo_termination_approval)
user = registration.contributors.first()
registration.terminate_embargo(Auth(user))
rejection_token = registration.embargo.approval_state[user._id]['rejection_token']
with assert_raises(HTTPError) as e:
registration.embargo.disapprove_embargo(user, rejection_token)
registration.refresh_from_db()
assert registration.is_deleted is False
@pytest.mark.enable_bookmark_creation
class RegistrationWithChildNodesEmbargoModelTestCase(OsfTestCase):
def setUp(self):
super(RegistrationWithChildNodesEmbargoModelTestCase, self).setUp()
self.user = AuthUserFactory()
self.auth = self.user.auth
self.valid_embargo_end_date = timezone.now() + datetime.timedelta(days=3)
self.project = ProjectFactory(title='Root', is_public=False, creator=self.user)
self.component = NodeFactory(
creator=self.user,
parent=self.project,
title='Component'
)
self.subproject = ProjectFactory(
creator=self.user,
parent=self.project,
title='Subproject'
)
self.subproject_component = NodeFactory(
creator=self.user,
parent=self.subproject,
title='Subcomponent'
)
self.registration = RegistrationFactory(project=self.project)
# Reload the registration; else tests won't catch failures to save
self.registration.reload()
def test_approval_embargoes_descendant_nodes(self):
# Initiate embargo for parent registration
self.registration.embargo_registration(
self.user,
self.valid_embargo_end_date
)
self.registration.save()
assert_true(self.registration.is_pending_embargo)
# Ensure descendant nodes are pending embargo
descendants = self.registration.get_descendants_recursive()
for node in descendants:
assert_true(node.is_pending_embargo)
# Approve parent registration's embargo
approval_token = self.registration.embargo.approval_state[self.user._id]['approval_token']
self.registration.embargo.approve_embargo(self.user, approval_token)
assert_true(self.registration.embargo.embargo_end_date)
# Ensure descendant nodes are in embargo
descendants = self.registration.get_descendants_recursive()
for node in descendants:
assert_true(node.embargo_end_date)
def test_disapproval_cancels_embargo_on_descendant_nodes(self):
# Initiate embargo on parent registration
self.registration.embargo_registration(
self.user,
self.valid_embargo_end_date
)
self.registration.save()
assert_true(self.registration.is_pending_embargo)
# Ensure descendant nodes are pending embargo
descendants = self.registration.get_descendants_recursive()
for node in descendants:
assert_true(node.is_pending_embargo)
# Disapprove parent registration's embargo
rejection_token = self.registration.embargo.approval_state[self.user._id]['rejection_token']
self.registration.embargo.disapprove_embargo(self.user, rejection_token)
assert_false(self.registration.is_pending_embargo)
assert_equal(self.registration.embargo.state, Embargo.REJECTED)
# Ensure descendant nodes' embargoes are cancelled
descendants = self.registration.get_descendants_recursive()
for node in descendants:
node.reload()
assert_false(node.is_pending_embargo)
assert_false(node.embargo_end_date)
@pytest.mark.enable_bookmark_creation
class LegacyRegistrationEmbargoApprovalDisapprovalViewsTestCase(OsfTestCase):
"""
TODO: Remove this set of tests when process_token_or_pass decorator taken
off the view_project view
"""
def setUp(self):
super(LegacyRegistrationEmbargoApprovalDisapprovalViewsTestCase, self).setUp()
self.user = AuthUserFactory()
self.project = ProjectFactory(creator=self.user)
self.registration = RegistrationFactory(creator=self.user, project=self.project)
def test_GET_approve_registration_without_embargo_raises_HTTPBad_Request(self):
assert_false(self.registration.is_pending_embargo)
res = self.app.get(
self.registration.web_url_for('view_project', token=DUMMY_TOKEN),
auth=self.user.auth,
expect_errors=True
)
assert_equal(res.status_code, 400)
def test_GET_approve_with_invalid_token_returns_HTTPBad_Request(self):
self.registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.is_pending_embargo)
res = self.app.get(
self.registration.web_url_for('view_project', token=DUMMY_TOKEN),
auth=self.user.auth,
expect_errors=True
)
assert_equal(res.status_code, 400)
def test_GET_approve_with_wrong_token_returns_HTTPBad_Request(self):
admin2 = UserFactory()
Contributor.objects.create(user=admin2, node=self.registration)
self.registration.add_permission(admin2, permissions.ADMIN, save=True)
self.registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.is_pending_embargo)
wrong_approval_token = self.registration.embargo.approval_state[admin2._id]['approval_token']
res = self.app.get(
self.registration.web_url_for('view_project', token=wrong_approval_token),
auth=self.user.auth,
expect_errors=True
)
assert_equal(res.status_code, 400)
def test_GET_approve_with_wrong_admins_token_returns_HTTPBad_Request(self):
admin2 = UserFactory()
Contributor.objects.create(user=admin2, node=self.registration)
self.registration.add_permission(admin2, permissions.ADMIN, save=True)
self.registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.is_pending_embargo)
wrong_approval_token = self.registration.embargo.approval_state[admin2._id]['approval_token']
res = self.app.get(
self.registration.web_url_for('view_project', token=wrong_approval_token),
auth=self.user.auth,
expect_errors=True
)
assert_true(self.registration.is_pending_embargo)
assert_equal(res.status_code, 400)
@mock.patch('flask.redirect')
def test_GET_approve_with_valid_token_redirects(self, mock_redirect):
self.registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.is_pending_embargo)
approval_token = self.registration.embargo.approval_state[self.user._id]['approval_token']
self.app.get(
self.registration.web_url_for('view_project', token=approval_token),
auth=self.user.auth,
)
self.registration.embargo.reload()
assert_true(self.registration.embargo_end_date)
assert_false(self.registration.is_pending_embargo)
assert_true(mock_redirect.called_with(self.registration.web_url_for('view_project')))
def test_GET_disapprove_registration_without_embargo_HTTPBad_Request(self):
assert_false(self.registration.is_pending_embargo)
res = self.app.get(
self.registration.web_url_for('view_project', token=DUMMY_TOKEN),
auth=self.user.auth,
expect_errors=True
)
assert_equal(res.status_code, 400)
def test_GET_disapprove_with_invalid_token_returns_HTTPBad_Request(self):
self.registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.is_pending_embargo)
res = self.app.get(
self.registration.web_url_for('view_project', token=DUMMY_TOKEN),
auth=self.user.auth,
expect_errors=True
)
self.registration.embargo.reload()
assert_true(self.registration.is_pending_embargo)
assert_equal(res.status_code, 400)
def test_GET_disapprove_with_wrong_admins_token_returns_HTTPBad_Request(self):
admin2 = UserFactory()
Contributor.objects.create(user=admin2, node=self.registration)
self.registration.add_permission(admin2, permissions.ADMIN, save=True)
self.registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.is_pending_embargo)
wrong_rejection_token = self.registration.embargo.approval_state[admin2._id]['rejection_token']
res = self.app.get(
self.registration.web_url_for('view_project', token=wrong_rejection_token),
auth=self.user.auth,
expect_errors=True
)
assert_true(self.registration.is_pending_embargo)
assert_equal(res.status_code, 400)
def test_GET_disapprove_with_valid(self):
project = ProjectFactory(creator=self.user)
registration = RegistrationFactory(project=project)
registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10)
)
registration.save()
assert_true(registration.is_pending_embargo)
rejection_token = registration.embargo.approval_state[self.user._id]['rejection_token']
res = self.app.get(
registration.registered_from.web_url_for('view_project', token=rejection_token),
auth=self.user.auth,
)
registration.embargo.reload()
assert_equal(registration.embargo.state, Embargo.REJECTED)
assert_false(registration.is_pending_embargo)
assert_equal(res.status_code, 200)
assert_equal(project.web_url_for('view_project'), res.request.path)
def test_GET_disapprove_for_existing_registration_returns_200(self):
self.registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10),
for_existing_registration=True
)
self.registration.save()
assert_true(self.registration.is_pending_embargo)
rejection_token = self.registration.embargo.approval_state[self.user._id]['rejection_token']
res = self.app.get(
self.registration.web_url_for('view_project', token=rejection_token),
auth=self.user.auth,
)
self.registration.embargo.reload()
assert_equal(self.registration.embargo.state, Embargo.REJECTED)
assert_false(self.registration.is_pending_embargo)
assert_equal(res.status_code, 200)
assert_equal(res.request.path, self.registration.web_url_for('view_project'))
def test_GET_from_unauthorized_user_with_registration_token(self):
unauthorized_user = AuthUserFactory()
self.registration.require_approval(self.user)
self.registration.save()
app_token = self.registration.registration_approval.approval_state[self.user._id]['approval_token']
rej_token = self.registration.registration_approval.approval_state[self.user._id]['rejection_token']
# Test unauth user cannot approve
res = self.app.get(
# approval token goes through registration
self.registration.web_url_for('view_project', token=app_token),
auth=unauthorized_user.auth,
expect_errors=True,
)
assert_equal(res.status_code, 401)
# Test unauth user cannot reject
res = self.app.get(
# rejection token goes through registration parent
self.project.web_url_for('view_project', token=rej_token),
auth=unauthorized_user.auth,
expect_errors=True,
)
assert_equal(res.status_code, 401)
# Delete Node and try again
self.project.is_deleted = True
self.project.save()
# Test unauth user cannot approve deleted node
res = self.app.get(
self.registration.web_url_for('view_project', token=app_token),
auth=unauthorized_user.auth,
expect_errors=True,
)
assert_equal(res.status_code, 401)
# Test unauth user cannot reject
res = self.app.get(
self.project.web_url_for('view_project', token=rej_token),
auth=unauthorized_user.auth,
expect_errors=True,
)
assert_equal(res.status_code, 401)
# Test auth user can approve registration with deleted parent
res = self.app.get(
self.registration.web_url_for('view_project', token=app_token),
auth=self.user.auth,
)
assert_equal(res.status_code, 200)
def test_GET_from_authorized_user_with_registration_app_token(self):
self.registration.require_approval(self.user)
self.registration.save()
app_token = self.registration.registration_approval.approval_state[self.user._id]['approval_token']
res = self.app.get(
self.registration.web_url_for('view_project', token=app_token),
auth=self.user.auth,
)
assert_equal(res.status_code, 200)
def test_GET_from_authorized_user_with_registration_rej_token(self):
self.registration.require_approval(self.user)
self.registration.save()
rej_token = self.registration.registration_approval.approval_state[self.user._id]['rejection_token']
res = self.app.get(
self.project.web_url_for('view_project', token=rej_token),
auth=self.user.auth,
)
assert_equal(res.status_code, 200)
def test_GET_from_authorized_user_with_registration_rej_token_deleted_node(self):
self.registration.require_approval(self.user)
self.registration.save()
rej_token = self.registration.registration_approval.approval_state[self.user._id]['rejection_token']
self.project.is_deleted = True
self.project.save()
res = self.app.get(
self.project.web_url_for('view_project', token=rej_token),
auth=self.user.auth,
expect_errors=True,
)
assert_equal(res.status_code, 410)
res = self.app.get(
self.registration.web_url_for('view_project'),
auth=self.user.auth,
expect_errors=True,
)
assert_equal(res.status_code, 410)
@pytest.mark.enable_bookmark_creation
class RegistrationEmbargoApprovalDisapprovalViewsTestCase(OsfTestCase):
def setUp(self):
super(RegistrationEmbargoApprovalDisapprovalViewsTestCase, self).setUp()
self.user = AuthUserFactory()
self.project = ProjectFactory(creator=self.user)
self.registration = RegistrationFactory(creator=self.user, project=self.project)
def test_GET_approve_registration_without_embargo_raises_HTTPBad_Request(self):
assert_false(self.registration.is_pending_embargo)
res = self.app.get(
self.registration.web_url_for('token_action', token=DUMMY_TOKEN),
auth=self.user.auth,
expect_errors=True
)
assert_equal(res.status_code, 400)
def test_GET_approve_with_invalid_token_returns_HTTPBad_Request(self):
self.registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.is_pending_embargo)
res = self.app.get(
self.registration.web_url_for('token_action', token=DUMMY_TOKEN),
auth=self.user.auth,
expect_errors=True
)
assert_equal(res.status_code, 400)
def test_GET_approve_with_wrong_token_returns_HTTPBad_Request(self):
admin2 = UserFactory()
Contributor.objects.create(user=admin2, node=self.registration)
self.registration.add_permission(admin2, permissions.ADMIN, save=True)
self.registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.is_pending_embargo)
wrong_approval_token = self.registration.embargo.approval_state[admin2._id]['approval_token']
res = self.app.get(
self.registration.web_url_for('token_action', token=wrong_approval_token),
auth=self.user.auth,
expect_errors=True
)
assert_equal(res.status_code, 400)
def test_GET_approve_with_wrong_admins_token_returns_HTTPBad_Request(self):
admin2 = UserFactory()
Contributor.objects.create(user=admin2, node=self.registration)
self.registration.add_permission(admin2, permissions.ADMIN, save=True)
self.registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.is_pending_embargo)
wrong_approval_token = self.registration.embargo.approval_state[admin2._id]['approval_token']
res = self.app.get(
self.registration.web_url_for('token_action', token=wrong_approval_token),
auth=self.user.auth,
expect_errors=True
)
assert_true(self.registration.is_pending_embargo)
assert_equal(res.status_code, 400)
@mock.patch('flask.redirect')
def test_GET_approve_with_valid_token_redirects(self, mock_redirect):
self.registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.is_pending_embargo)
approval_token = self.registration.embargo.approval_state[self.user._id]['approval_token']
self.app.get(
self.registration.web_url_for('token_action', token=approval_token),
auth=self.user.auth,
)
self.registration.embargo.reload()
assert_true(self.registration.embargo_end_date)
assert_false(self.registration.is_pending_embargo)
assert_true(mock_redirect.called_with(self.registration.web_url_for('view_project')))
def test_GET_disapprove_registration_without_embargo_HTTPBad_Request(self):
assert_false(self.registration.is_pending_embargo)
res = self.app.get(
self.registration.web_url_for('token_action', token=DUMMY_TOKEN),
auth=self.user.auth,
expect_errors=True
)
assert_equal(res.status_code, 400)
def test_GET_disapprove_with_invalid_token_returns_HTTPBad_Request(self):
self.registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.is_pending_embargo)
res = self.app.get(
self.registration.web_url_for('token_action', token=DUMMY_TOKEN),
auth=self.user.auth,
expect_errors=True
)
self.registration.embargo.reload()
assert_true(self.registration.is_pending_embargo)
assert_equal(res.status_code, 400)
def test_GET_disapprove_with_wrong_admins_token_returns_HTTPBad_Request(self):
admin2 = UserFactory()
Contributor.objects.create(user=admin2, node=self.registration)
self.registration.add_permission(admin2, permissions.ADMIN, save=True)
self.registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.is_pending_embargo)
wrong_rejection_token = self.registration.embargo.approval_state[admin2._id]['rejection_token']
res = self.app.get(
self.registration.web_url_for('token_action', token=wrong_rejection_token),
auth=self.user.auth,
expect_errors=True
)
assert_true(self.registration.is_pending_embargo)
assert_equal(res.status_code, 400)
def test_GET_disapprove_with_valid(self):
project = ProjectFactory(creator=self.user)
registration = RegistrationFactory(project=project)
registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10)
)
registration.save()
assert_true(registration.is_pending_embargo)
rejection_token = registration.embargo.approval_state[self.user._id]['rejection_token']
res = self.app.get(
registration.registered_from.web_url_for('token_action', token=rejection_token),
auth=self.user.auth,
)
registration.embargo.reload()
assert_equal(registration.embargo.state, Embargo.REJECTED)
assert_false(registration.is_pending_embargo)
assert_equal(res.status_code, 302)
assert_equal(project.web_url_for('token_action'), res.request.path)
def test_GET_disapprove_for_existing_registration_returns_200(self):
self.registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10),
for_existing_registration=True
)
self.registration.save()
assert_true(self.registration.is_pending_embargo)
rejection_token = self.registration.embargo.approval_state[self.user._id]['rejection_token']
res = self.app.get(
self.registration.web_url_for('token_action', token=rejection_token),
auth=self.user.auth,
)
self.registration.embargo.reload()
assert_equal(self.registration.embargo.state, Embargo.REJECTED)
assert_false(self.registration.is_pending_embargo)
assert_equal(res.status_code, 302)
assert_equal(res.request.path, self.registration.web_url_for('token_action'))
def test_GET_from_unauthorized_user_with_registration_token(self):
unauthorized_user = AuthUserFactory()
self.registration.require_approval(self.user)
self.registration.save()
app_token = self.registration.registration_approval.approval_state[self.user._id]['approval_token']
rej_token = self.registration.registration_approval.approval_state[self.user._id]['rejection_token']
# Test unauth user cannot approve
res = self.app.get(
# approval token goes through registration
self.registration.web_url_for('token_action', token=app_token),
auth=unauthorized_user.auth,
expect_errors=True,
)
assert_equal(res.status_code, 401)
# Test unauth user cannot reject
res = self.app.get(
# rejection token goes through registration parent
self.project.web_url_for('token_action', token=rej_token),
auth=unauthorized_user.auth,
expect_errors=True,
)
assert_equal(res.status_code, 401)
# Delete Node and try again
self.project.is_deleted = True
self.project.save()
# Test unauth user cannot approve deleted node
res = self.app.get(
self.registration.web_url_for('token_action', token=app_token),
auth=unauthorized_user.auth,
expect_errors=True,
)
assert_equal(res.status_code, 401)
# Test unauth user cannot reject
res = self.app.get(
self.project.web_url_for('token_action', token=rej_token),
auth=unauthorized_user.auth,
expect_errors=True,
)
assert_equal(res.status_code, 401)
# Test auth user can approve registration with deleted parent
res = self.app.get(
self.registration.web_url_for('token_action', token=app_token),
auth=self.user.auth,
)
assert_equal(res.status_code, 302)
def test_GET_from_authorized_user_with_registration_app_token(self):
self.registration.require_approval(self.user)
self.registration.save()
app_token = self.registration.registration_approval.approval_state[self.user._id]['approval_token']
res = self.app.get(
self.registration.web_url_for('token_action', token=app_token),
auth=self.user.auth,
)
assert_equal(res.status_code, 302)
def test_GET_from_authorized_user_with_registration_rej_token(self):
self.registration.require_approval(self.user)
self.registration.save()
rej_token = self.registration.registration_approval.approval_state[self.user._id]['rejection_token']
res = self.app.get(
self.project.web_url_for('token_action', token=rej_token),
auth=self.user.auth,
)
assert_equal(res.status_code, 302)
def test_GET_from_authorized_user_with_registration_rej_token_deleted_node(self):
self.registration.require_approval(self.user)
self.registration.save()
rej_token = self.registration.registration_approval.approval_state[self.user._id]['rejection_token']
self.project.is_deleted = True
self.project.save()
res = self.app.get(
self.project.web_url_for('token_action', token=rej_token),
auth=self.user.auth,
expect_errors=True,
)
assert_equal(res.status_code, 410)
res = self.app.get(
self.registration.web_url_for('token_action'),
auth=self.user.auth,
expect_errors=True,
)
assert_equal(res.status_code, 410)
@pytest.mark.enable_bookmark_creation
class RegistrationEmbargoViewsTestCase(OsfTestCase):
def setUp(self):
super(RegistrationEmbargoViewsTestCase, self).setUp()
self.user = AuthUserFactory()
self.project = ProjectFactory(creator=self.user)
self.draft = DraftRegistrationFactory(branched_from=self.project)
self.registration = RegistrationFactory(project=self.project, creator=self.user)
current_month = timezone.now().strftime('%B')
current_year = timezone.now().strftime('%Y')
self.valid_make_public_payload = json.dumps({
'data': {
'attributes': {
u'registration_choice': 'immediate',
},
'type': 'registrations',
}
})
valid_date = timezone.now() + datetime.timedelta(days=180)
self.valid_embargo_payload = json.dumps({
'data': {
'attributes': {
u'lift_embargo': unicode(valid_date.strftime('%a, %d, %B %Y %H:%M:%S')) + u' GMT',
u'registration_choice': 'embargo',
},
'type': 'registrations',
},
})
self.invalid_embargo_date_payload = json.dumps({
'data': {
'attributes': {
u'lift_embargo': u'Thu, 01 {month} {year} 05:00:00 GMT'.format(
month=current_month,
year=str(int(current_year) - 1)
),
u'registration_choice': 'embargo',
},
'type': 'registrations',
}
})
@mock.patch('osf.models.sanctions.EmailApprovableSanction.ask')
def test_embargoed_registration_set_privacy_requests_embargo_termination(self, mock_ask):
# Initiate and approve embargo
for i in range(3):
c = AuthUserFactory()
self.registration.add_contributor(c, permissions.ADMIN, auth=Auth(self.user))
self.registration.save()
self.registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10)
)
for user_id, embargo_tokens in self.registration.embargo.approval_state.items():
approval_token = embargo_tokens['approval_token']
self.registration.embargo.approve_embargo(OSFUser.load(user_id), approval_token)
self.registration.save()
self.registration.set_privacy('public', Auth(self.registration.creator))
for reg in self.registration.node_and_primary_descendants():
reg.reload()
assert_false(reg.is_public)
assert_true(reg.embargo_termination_approval)
assert_true(reg.embargo_termination_approval.is_pending_approval)
def test_cannot_request_termination_on_component_of_embargo(self):
node = ProjectFactory()
ProjectFactory(parent=node, creator=node.creator) # child project
with utils.mock_archive(node, embargo=True, autocomplete=True, autoapprove=True) as reg:
with assert_raises(NodeStateError):
reg._nodes.first().request_embargo_termination(Auth(node.creator))
@mock.patch('website.mails.send_mail')
def test_embargoed_registration_set_privacy_sends_mail(self, mock_send_mail):
"""
Integration test for https://github.com/CenterForOpenScience/osf.io/pull/5294#issuecomment-212613668
"""
# Initiate and approve embargo
for i in range(3):
c = AuthUserFactory()
self.registration.add_contributor(c, permissions.ADMIN, auth=Auth(self.user))
self.registration.save()
self.registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10)
)
for user_id, embargo_tokens in self.registration.embargo.approval_state.items():
approval_token = embargo_tokens['approval_token']
self.registration.embargo.approve_embargo(OSFUser.load(user_id), approval_token)
self.registration.save()
self.registration.set_privacy('public', Auth(self.registration.creator))
admin_contributors = []
for contributor in self.registration.contributors:
if Contributor.objects.get(user_id=contributor.id, node_id=self.registration.id).permission == permissions.ADMIN:
admin_contributors.append(contributor)
for admin in admin_contributors:
assert_true(any([each[0][0] == admin.username for each in mock_send_mail.call_args_list]))
@mock.patch('osf.models.sanctions.EmailApprovableSanction.ask')
def test_make_child_embargoed_registration_public_asks_all_admins_in_tree(self, mock_ask):
# Initiate and approve embargo
node = NodeFactory(creator=self.user)
c1 = AuthUserFactory()
child = NodeFactory(parent=node, creator=c1)
c2 = AuthUserFactory()
NodeFactory(parent=child, creator=c2)
registration = RegistrationFactory(project=node)
registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10)
)
for user_id, embargo_tokens in registration.embargo.approval_state.items():
approval_token = embargo_tokens['approval_token']
registration.embargo.approve_embargo(OSFUser.load(user_id), approval_token)
self.registration.save()
registration.set_privacy('public', Auth(self.registration.creator))
asked_admins = [(admin._id, n._id) for admin, n in mock_ask.call_args[0][0]]
for admin, node in registration.get_admin_contributors_recursive():
assert_in((admin._id, node._id), asked_admins)
def test_non_contributor_GET_approval_returns_HTTPError(self):
non_contributor = AuthUserFactory()
self.registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.is_pending_embargo)
approval_token = self.registration.embargo.approval_state[self.user._id]['approval_token']
approval_url = self.registration.web_url_for('token_action', token=approval_token)
res = self.app.get(approval_url, auth=non_contributor.auth, expect_errors=True)
self.registration.reload()
assert_equal(http_status.HTTP_401_UNAUTHORIZED, res.status_code)
assert_true(self.registration.is_pending_embargo)
assert_equal(self.registration.embargo.state, Embargo.UNAPPROVED)
def test_non_contributor_GET_disapproval_returns_HTTPError(self):
non_contributor = AuthUserFactory()
self.registration.embargo_registration(
self.user,
timezone.now() + datetime.timedelta(days=10)
)
self.registration.save()
assert_true(self.registration.is_pending_embargo)
rejection_token = self.registration.embargo.approval_state[self.user._id]['rejection_token']
approval_url = self.registration.web_url_for('token_action', token=rejection_token)
res = self.app.get(approval_url, auth=non_contributor.auth, expect_errors=True)
assert_equal(http_status.HTTP_401_UNAUTHORIZED, res.status_code)
assert_true(self.registration.is_pending_embargo)
assert_equal(self.registration.embargo.state, Embargo.UNAPPROVED)
| apache-2.0 | -4,773,855,884,013,464,000 | 41.296026 | 125 | 0.6569 | false |
missionpinball/mpf_mc | mpfmc/core/mode_controller.py | 1 | 8889 | """ ModeController for MPF-MC"""
import logging
import os
from collections import namedtuple
from mpf.core.config_processor import ConfigProcessor
from mpf.core.utility_functions import Util
from mpfmc.core.mode import Mode
RemoteMethod = namedtuple('RemoteMethod',
'method config_section kwargs priority',
)
"""RemotedMethod is used by other modules that want to register a method to
be called on mode_start or mode_stop.
"""
# todo create a single ModeController base class for MPF and MPF-MC
class ModeController:
"""Parent class for the Mode Controller. There is one instance of this in
MPF and it's responsible for loading, unloading, and managing all game
modes.
"""
def __init__(self, mc):
self.mc = mc
self.log = logging.getLogger('ModeController')
self.debug = False
self.active_modes = list()
self.mode_stop_count = 0
# The following two lists hold namedtuples of any remote components
# that need to be notified when a mode object is created and/or
# started.
self.loader_methods = list()
self.start_methods = list()
self.stop_methods = list()
self._machine_mode_folders = dict()
self._mpf_mode_folders = dict()
if 'modes' in self.mc.machine_config:
self.mc.events.add_handler('init_phase_2',
self._load_modes)
def _load_modes(self, **kwargs):
del kwargs
# Loads the modes from the modes: section of the machine configuration
# file.
self._build_mode_folder_dicts()
for mode in set(self.mc.machine_config['modes']):
self.mc.modes[mode] = self._load_mode(mode)
# initialise modes after loading all of them to prevent races
for item in self.loader_methods:
for mode in self.mc.modes.values():
if ((item.config_section in mode.config and
mode.config[item.config_section]) or not
item.config_section):
item.method(config=mode.config.get(item.config_section),
mode=mode,
mode_path=mode.path,
root_config_dict=mode.config,
**item.kwargs)
def _load_mode(self, mode_string):
"""Loads a mode, reads in its config, and creates the Mode object.
Args:
mode: String name of the mode you're loading. This is the name of
the mode's folder in your game's machine_files/modes folder.
"""
if self.debug:
self.log.debug('Processing mode: %s', mode_string)
asset_paths = []
mode_path = None
if mode_string in self._mpf_mode_folders:
mode_path = os.path.join(
self.mc.mpf_path,
"modes",
self._mpf_mode_folders[mode_string])
asset_paths.append(mode_path)
if mode_string in self._machine_mode_folders:
mode_path = os.path.join(
self.mc.machine_path,
self.mc.machine_config['mpf-mc']['paths']['modes'],
self._machine_mode_folders[mode_string])
asset_paths.append(mode_path)
if not mode_path:
raise ValueError("No folder found for mode '{}'. Is your mode "
"folder in your machine's 'modes' folder?"
.format(mode_string))
config = self.mc.mc_config.get_mode_config(mode_string)
# validate config
self.mc.config_validator.validate_config("mode", config['mode'])
return Mode(self.mc, config, mode_string, mode_path, asset_paths)
def _build_mode_folder_dicts(self):
self._mpf_mode_folders = self._get_mode_folder(self.mc.mpf_path)
self.log.debug("Found MPF Mode folders: %s", self._mpf_mode_folders)
self._machine_mode_folders = (
self._get_mode_folder(self.mc.machine_path))
self.log.debug("Found Machine-specific Mode folders: %s",
self._machine_mode_folders)
def _get_mode_folder(self, base_folder):
try:
mode_folders = os.listdir(os.path.join(
base_folder, self.mc.machine_config['mpf-mc']['paths']['modes']))
except FileNotFoundError:
return dict()
final_mode_folders = dict()
for folder in mode_folders:
this_mode_folder = os.path.join(
base_folder,
self.mc.machine_config['mpf-mc']['paths']['modes'],
folder)
if os.path.isdir(this_mode_folder) and not folder.startswith('_'):
final_mode_folders[folder] = folder
return final_mode_folders
def register_load_method(self, load_method, config_section_name=None,
priority=0, **kwargs):
"""Used by system components, plugins, etc. to register themselves with
the Mode Controller for anything they need a mode to do when it's
registered.
Args:
load_method: The method that will be called when this mode code
loads.
config_section_name: An optional string for the section of the
configuration file that will be passed to the load_method when
it's called.
priority: Int of the relative priority which allows remote methods
to be called in a specific order. Default is 0. Higher values
will be called first.
**kwargs: Any additional keyword arguments specified will be passed
to the load_method.
Note that these methods will be called once, when the mode code is
first
initialized during the MPF boot process.
"""
if not callable(load_method):
raise ValueError("Cannot add load method '{}' as it is not"
"callable".format(load_method))
self.loader_methods.append(RemoteMethod(method=load_method,
config_section=config_section_name,
kwargs=kwargs,
priority=priority))
self.loader_methods.sort(key=lambda x: x.priority, reverse=True)
def register_start_method(self, start_method, config_section_name=None,
priority=0, **kwargs):
"""Used by system components, plugins, etc. to register themselves with
the Mode Controller for anything that they a mode to do when it starts.
Args:
start_method: The method that will be called when this mode code
loads.
config_section_name: An optional string for the section of the
configuration file that will be passed to the start_method when
it's called.
priority: Int of the relative priority which allows remote methods
to be called in a specific order. Default is 0. Higher values
will be called first.
**kwargs: Any additional keyword arguments specified will be passed
to the start_method.
Note that these methods will be called every single time this mode is
started.
"""
if not callable(start_method):
raise ValueError("Cannot add start method '{}' as it is not"
"callable".format(start_method))
self.start_methods.append(RemoteMethod(method=start_method,
config_section=config_section_name,
priority=priority,
kwargs=kwargs))
self.start_methods.sort(key=lambda x: x.priority, reverse=True)
def register_stop_method(self, callback, priority=0):
# these are universal, in that they're called every time a mode stops
# priority is the priority they're called. Has nothing to do with mode
# priority
if not callable(callback):
raise ValueError("Cannot add stop method '{}' as it is not"
"callable".format(callback))
self.stop_methods.append((callback, priority))
self.stop_methods.sort(key=lambda x: x[1], reverse=True)
def active_change(self, mode, active):
# called when a mode goes active or inactive
if active:
self.active_modes.append(mode)
else:
self.active_modes.remove(mode)
# sort the active mode list by priority
self.active_modes.sort(key=lambda x: x.priority, reverse=True)
| mit | -752,988,655,631,671,800 | 37.150215 | 83 | 0.57273 | false |
armijnhemel/cleanup-for-discogs | monthly/compare-not-accepted.py | 1 | 2940 | #! /usr/bin/python3
## Hackish example script to compare some results output by process-discogs-chunks.py
##
## Licensed under the terms of the General Public License version 3
##
## SPDX-License-Identifier: GPL-3.0
##
## Copyright 2017-2019 - Armijn Hemel
import os
import sys
import argparse
def main():
parser = argparse.ArgumentParser()
# the following options are provided on the commandline
parser.add_argument("-f", "--first", action="store", dest="first",
help="path to first file", metavar="FILE")
parser.add_argument("-s", "--second", action="store", dest="second",
help="path to second file", metavar="FILE")
parser.add_argument("-a", "--all", action="store", dest="all",
help="path to all hashes (example: sha256-201909", metavar="FILE")
parser.add_argument("-p", "--printchanged", action="store_true", dest="printchanged",
help="print changed entries instead of statistics")
args = parser.parse_args()
# then some sanity checks for the data files
if args.first is None:
parser.error("Path to first file missing")
if not os.path.exists(args.first):
parser.error("First file %s does not exist" % args.first)
if args.second is None:
parser.error("Path to second file missing")
if not os.path.exists(args.second):
parser.error("Second file %s does not exist" % args.second)
if args.all is None:
parser.error("Path to file with all hashes missing")
if not os.path.exists(args.all):
parser.error("All hashes file %s does not exist" % args.all)
all_releases = set()
try:
shafile1 = open(args.all, 'r')
except:
print("Could not open %s, exiting" % args.all, file=sys.stderr)
sys.exit(1)
for i in shafile1:
(release_id, sha) = i.split('\t')
release = release_id.split('.')[0]
all_releases.add(release)
release_to_status1 = {}
notacceptedfile1 = open(args.first, 'r')
for i in notacceptedfile1:
(release_id, status) = i.split('\t')
release = release_id.split('.')[0]
release_to_status1[release] = i[1].strip()
notacceptedfile1.close()
release_to_status2 = {}
notacceptedfile2 = open(args.second, 'r')
for i in notacceptedfile2:
(release_id, status) = i.split('\t')
release = release_id.split('.')[0]
release_to_status2[release] = i[1].strip()
notacceptedfile2.close()
notkeys1 = set(release_to_status1.keys())
notkeys2 = set(release_to_status2.keys())
print("%d releases in not1 that are not in not2" % len(notkeys1.difference(notkeys2)))
print("%d releases in not2 that are not in not1" % len(notkeys2.difference(notkeys1)))
for i in sorted(notkeys1.difference(notkeys2)):
print(i, i in all_releases)
if __name__ == "__main__":
main()
| gpl-3.0 | 4,643,265,375,250,114,000 | 30.612903 | 90 | 0.620748 | false |
CrafterLuc2/Python | Scenery/main.py | 1 | 2193 | # Importation of Pygame
import pygame
from car import Car
import os
folder = os.path.dirname(os.path.realpath(__file__))
pygame.init()
pygame.mixer.init()
# Colouration
BLACK = ( 0, 0, 0)
WHITE = ( 255, 255, 255)
GREEN = ( 0, 255, 0)
RED = ( 255, 0, 0)
BLUE = (0, 0, 255)
SKYBLUE = (135,206,235)
GRASSGREEN = (74, 197, 5)
YELLOW = (255, 255, 0)
BROWN = (139,69,19)
LEAFEGREEN = (0, 100, 0)
# Basic Window
size = (1200, 800)
screen = pygame.display.set_mode(size)
pygame.display.set_caption("Pandorium")
all_sprites_list = pygame.sprite.Group()
playerCar = Car(RED, 20, 30)
playerCar.rect.x = 200
playerCar.rect.y = 300
all_sprites_list.add(playerCar)
carryOn = True
clock = pygame.time.Clock()
pygame.mixer.music.load(os.path.join(folder, "music.ogg"))
pygame.mixer.music.play(-1)
while carryOn:
for event in pygame.event.get():
if event.type == pygame.QUIT:
carryOn = False
elif event.type==pygame.KEYDOWN:
if event.key==pygame.K_x:
carryOn = false
keys = pygame.key.get_pressed()
if keys[pygame.K_LEFT]:
playerCar.moveLeft(5)
if keys[pygame.K_RIGHT]:
playerCar.moveRight(5)
#Drawing code
screen.fill(WHITE)
pygame.draw.rect(screen, RED, [55, 200, 100, 70],0)
pygame.draw.line(screen, GREEN, [0, 0], [100, 100], 5)
pygame.draw.ellipse(screen, BLUE, [20,20,250,100], 2)
pygame.draw.rect(screen, SKYBLUE, [0, 0, 1200, 600],0)
pygame.draw.rect(screen, GRASSGREEN, [0, 550, 1200, 300],0)
pygame.draw.ellipse(screen, YELLOW, [100,75, 100,100], 0)
pygame.draw.line(screen, YELLOW, [40,40] , [80,80], 5)
pygame.draw.line(screen, YELLOW, [150,10] , [150,60], 5)
pygame.draw.line(screen, YELLOW, [280,40] , [220,80], 5)
pygame.draw.rect(screen, BROWN, [800,550, 60,-200], 0)
pygame.draw.ellipse(screen, LEAFEGREEN, [700,130, 260,300], 0)
pygame.draw.rect(screen, BLACK, [0,575, 1200,150], 0 )
pygame.draw.line(screen, WHITE, [0,650],[1200,650],10)
all_sprites_list.draw(screen)
pygame.display.flip()
clock.tick(60)
pygame.quit()
| apache-2.0 | 8,310,543,791,064,626,000 | 24.421687 | 66 | 0.611035 | false |
runt18/mojo | mojo/dart/tools/presubmit/check_mojom_dart.py | 1 | 13585 | #!/usr/bin/env python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Checks that released mojom.dart files in the source tree are up to date"""
import argparse
import os
import subprocess
import sys
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
SRC_DIR = os.path.dirname(
os.path.dirname(
os.path.dirname(
os.path.dirname(SCRIPT_DIR))))
# Insert path to mojom parsing library.
sys.path.insert(0, os.path.join(SRC_DIR,
'mojo',
'public',
'tools',
'bindings',
'pylib'))
from mojom.error import Error
from mojom.parse.parser import Parse
from mojom.parse.translate import Translate
PACKAGES_DIR = os.path.join(SRC_DIR, 'mojo', 'dart', 'packages')
# Script that calculates mojom output paths.
DART_OUTPUTS_SCRIPT = os.path.join(SRC_DIR,
'mojo',
'public',
'tools',
'bindings',
'mojom_list_dart_outputs.py')
# Runs command line in args from cwd. Returns the output as a string.
def run(cwd, args):
return subprocess.check_output(args, cwd=cwd)
# Given a parsed mojom, return the path of the .mojom.dart relative to its
# package directory.
def _mojom_output_path(mojom):
name = mojom['name']
namespace = mojom['namespace']
elements = ['lib']
elements.extend(namespace.split('.'))
elements.append("%s.dart" % name)
return os.path.join(*elements)
# Given a parsed mojom, return the package or None.
def _mojom_package(mojom):
attributes = mojom.get('attributes', {})
return attributes.get('DartPackage')
# Load and parse a .mojom file. Returns the parsed mojom.
def _load_mojom(path_to_mojom):
filename = os.path.abspath(path_to_mojom)
name = os.path.basename(filename)
# Read in mojom file.
with open(filename) as f:
source = f.read()
# Parse
tree = Parse(source, name)
mojom = Translate(tree, name)
return mojom
def _print_regenerate_message(package):
print("""
*** Dart Generated Bindings Check Failed for package: %s
To regenerate all bindings, from the src directory, run:
./mojo/dart/tools/bindings/generate.py
""" % (package))
# Returns a map from package name to source directory.
def _build_package_map():
packages = {}
for package in os.listdir(PACKAGES_DIR):
package_path = os.path.join(PACKAGES_DIR, package)
# Skip everything but directories.
if not os.path.isdir(package_path):
continue
packages[package] = package_path
return packages
# Returns a list of paths to .mojom files vended by package_name.
def _find_mojoms_for_package(package_name):
# Run git grep for all .mojom files with DartPackage="package_name"
try:
output = run(SRC_DIR, ['git',
'grep',
'--name-only',
'DartPackage="' + package_name + '"',
'--',
'*.mojom'])
except subprocess.CalledProcessError as e:
# git grep exits with code 1 if nothing was found.
if e.returncode == 1:
return []
# Process output
mojoms = []
for line in output.splitlines():
line = line.strip()
# Skip empty lines.
if not line:
continue
mojoms.append(line)
return mojoms
# Return the list of expected mojom.dart files for a package.
def _expected_mojom_darts_for_package(mojoms):
output = run(SRC_DIR, ['python',
DART_OUTPUTS_SCRIPT,
'--mojoms'] + mojoms)
mojom_darts = []
for line in output.splitlines():
line = line.strip()
# Skip empty lines.
if not line:
continue
mojom_darts.append(line)
return mojom_darts
# Returns a map indexed by output mojom.dart name with the value of
# the modification time of the .mojom file in the source tree.
def _build_expected_map(mojoms, mojom_darts):
assert(len(mojom_darts) == len(mojoms))
expected = {}
for i in range(0, len(mojoms)):
mojom_path = os.path.join(SRC_DIR, mojoms[i])
expected[mojom_darts[i]] = os.path.getmtime(mojom_path)
return expected
# Returns a map indexed by output mojom.dart name with the value of
# the modification time of the .mojom.dart file in the source tree.
def _build_current_map(package):
current = {}
package_path = os.path.join(PACKAGES_DIR, package)
for directory, _, files in os.walk(package_path):
for filename in files:
if filename.endswith('.mojom.dart'):
path = os.path.abspath(os.path.join(directory, filename))
relpath = os.path.relpath(path, start=PACKAGES_DIR)
current[relpath] = os.path.getmtime(path)
return current
# Checks if a mojom.dart file we expected in the source tree isn't there.
def _check_new(package, expected, current):
check_failure = False
for mojom_dart in expected:
if not current.get(mojom_dart):
print("FAIL: Package %s missing %s" % (package, mojom_dart))
check_failure = True
return check_failure
# Checks if a mojom.dart file exists without an associated .mojom file.
def _check_delete(package, expected, current):
check_failure = False
for mojom_dart in current:
if not expected.get(mojom_dart):
print("FAIL: Package %s no longer has %s." % (package, mojom_dart))
print("Delete %s", os.path.join(PACKAGES_DIR, mojom_dart))
check_failure = True
return check_failure
# Checks if a .mojom.dart file is older than the associated .mojom file.
def _check_stale(package, expected, current):
check_failure = False
for mojom_dart in expected:
# Missing mojom.dart file in source tree case handled by _check_new.
source_mtime = expected[mojom_dart]
if not current.get(mojom_dart):
continue
generated_mtime = current[mojom_dart]
if generated_mtime < source_mtime:
print("FAIL: Package %s has old %s" % (package, mojom_dart))
check_failure = True
return check_failure
# Checks that all .mojom.dart files are newer than time.
def _check_bindings_newer_than(package, current, time):
for mojom_dart in current:
if time > current[mojom_dart]:
# Bindings are older than specified time.
print("FAIL: Package %s has generated bindings older than the bindings"
" scripts / templates." % package)
return True
return False
# Returns True if any checks fail.
def _check(package, expected, current, bindings_gen_mtime):
check_failure = False
if bindings_gen_mtime > 0:
if _check_bindings_newer_than(package, current, bindings_gen_mtime):
check_failure = True
if _check_new(package, expected, current):
check_failure = True
if _check_stale(package, expected, current):
check_failure = True
if _check_delete(package, expected, current):
check_failure = True
return check_failure
def global_check(packages, bindings_gen_mtime=0):
check_failure = False
for package in packages:
mojoms = _find_mojoms_for_package(package)
if not mojoms:
continue
mojom_darts = _expected_mojom_darts_for_package(mojoms)
# We only feed in mojom files with DartPackage annotations, therefore, we
# should have a 1:1 mapping from mojoms[i] to mojom_darts[i].
assert(len(mojom_darts) == len(mojoms))
expected = _build_expected_map(mojoms, mojom_darts)
current = _build_current_map(package)
if _check(package, expected, current, bindings_gen_mtime):
_print_regenerate_message(package)
check_failure = True
return check_failure
def is_mojom_dart(path):
return path.endswith('.mojom.dart')
def is_mojom(path):
return path.endswith('.mojom')
def filter_paths(paths, path_filter):
result = []
for path in paths:
path = os.path.abspath(os.path.join(SRC_DIR, path))
if path_filter(path):
result.append(path)
return result
def safe_mtime(path):
try:
return os.path.getmtime(path)
except Exception:
pass
return 0
def is_bindings_machinery_path(filename):
# NOTE: It's possible other paths inside of
# mojo/public/tools/bindings/generators might also affect the Dart bindings.
# The code below is somewhat conservative and may miss a change.
# Dart templates changed.
if filename.startswith(
'mojo/public/tools/bindings/generators/dart_templates/'):
return True
# Dart generation script changed.
if (filename ==
'mojo/public/tools/bindings/generators/mojom_dart_generator.py'):
return True
return False
# Detects if any part of the Dart bindings generation machinery has changed.
def check_for_bindings_machinery_changes(affected_files):
for filename in affected_files:
if is_bindings_machinery_path(filename):
return True
return False
# Returns the latest modification time for any bindings generation
# machinery files.
def bindings_machinery_latest_mtime(affected_files):
latest_mtime = 0
for filename in affected_files:
if is_bindings_machinery_path(filename):
path = os.path.join(SRC_DIR, filename)
mtime = safe_mtime(path)
if mtime > latest_mtime:
latest_mtime = mtime
return latest_mtime
def presubmit_check(packages, affected_files):
mojoms = filter_paths(affected_files, is_mojom)
mojom_darts = filter_paths(affected_files, is_mojom_dart)
if check_for_bindings_machinery_changes(affected_files):
# Bindings machinery changed, perform global check instead.
latest_mtime = bindings_machinery_latest_mtime(affected_files)
return global_check(packages, latest_mtime)
updated_mojom_dart_files = []
packages_with_failures = []
check_failure = False
# Check for updated .mojom without updated .mojom.dart
for mojom_file in mojoms:
try:
mojom = _load_mojom(mojom_file)
except Exception:
# Could not load .mojom file
print("Could not load mojom file: %s" % mojom_file)
return True
package = _mojom_package(mojom)
# If a mojom doesn't have a package, ignore it.
if not package:
continue
package_dir = packages.get(package)
# If the package isn't a known package, ignore it.
if not package_dir:
continue
# Expected output path relative to src.
mojom_dart_path = os.path.relpath(
os.path.join(package_dir, _mojom_output_path(mojom)), start=SRC_DIR)
mojom_mtime = safe_mtime(mojom_file)
mojom_dart_mtime = safe_mtime(os.path.join(SRC_DIR, mojom_dart_path))
if mojom_mtime > mojom_dart_mtime:
check_failure = True
print("Package %s has old %s" % (package, mojom_dart_path))
if not (package in packages_with_failures):
packages_with_failures.append(package)
continue
# Remember that this .mojom.dart file was updated after the .mojom file.
# This list is used to verify that all updated .mojom.dart files were
# updated because their source .mojom file changed.
updated_mojom_dart_files.append(mojom_dart_path)
# Check for updated .mojom.dart file without updated .mojom file.
for mojom_dart_file in mojom_darts:
# mojom_dart_file is not inside //mojo/dart/packages.
if not mojom_dart_file.startswith(PACKAGES_DIR):
continue
# Path relative to //mojo/dart/packages/
path_relative_to_packages = os.path.relpath(mojom_dart_file,
start=PACKAGES_DIR)
# Package name is first element of split path.
package = path_relative_to_packages.split(os.sep)[0]
# Path relative to src.
mojom_dart_path = os.path.relpath(mojom_dart_file, start=SRC_DIR)
# If mojom_dart_path is not in updated_mojom_dart_files, a .mojom.dart
# file was updated without updating the related .mojom file.
if not (mojom_dart_path in updated_mojom_dart_files):
check_failure = True
print("Package %s has new %s without updating source .mojom file." %
(package, mojom_dart_path))
if not (package in packages_with_failures):
packages_with_failures.append(package)
for package in packages_with_failures:
_print_regenerate_message(package)
return check_failure
def main():
parser = argparse.ArgumentParser(description='Generate a dart-pkg')
parser.add_argument('--affected-files',
action='store',
metavar='affected_files',
help='List of files that should be checked.',
nargs='+')
args = parser.parse_args()
packages = _build_package_map()
# This script runs in two modes, the first mode is invoked by PRESUBMIT.py
# and passes the list of affected files. This checks for the following cases:
# 1) An updated .mojom file without an updated .mojom.dart file.
# 2) An updated .mojom.dart file without an updated .mojom file.
# NOTE: Case 1) also handles the case of a new .mojom file being added.
#
# The second mode does a global check of all packages under
# //mojo/dart/packages. This checks for the following cases:
# 1) An updated .mojom file without an updated .mojom.dart file.
# 2) A .mojom.dart file without an associated .mojom file (deletion case).
if args.affected_files:
check_failure = presubmit_check(packages, args.affected_files)
else:
check_failure = global_check(packages)
if check_failure:
return 2
return 0
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause | 7,206,733,076,434,669,000 | 32.053528 | 79 | 0.664188 | false |
daphnei/nn_chatbot | remove_proper_names/proper_names.py | 1 | 1587 | import urllib2
import csv
from pygtrie import Trie
from pygtrie import PrefixSet
import pickle
import os
def _create_trie():
tsvs = ["https://www2.census.gov/topics/genealogy/1990surnames/dist.female.first",
"https://www2.census.gov/topics/genealogy/1990surnames/dist.male.first"]
# "https://www2.census.gov/topics/genealogy/1990surnames/dist.all.last"]
# A harded-coded list of exceptions. (names that are more often seen as common noun
# at the front of sentences.)
exceptions = ["winter", "grant", "van", "son", "young", "royal", "long", "june", "august", "joy", "young", "aura", "ray", "ok", "harmony", "ha", "sun", "in", "many", "see", "so", "my", "may", "an", "les", "will", "love", "man", "major", "faith"]
names = []
for tsv_url in tsvs:
tsv_file = urllib2.urlopen(tsv_url)
tabbed = zip(*[line for line in csv.reader(tsv_file, delimiter=' ')])
names = names + list(tabbed[0])
names_lower = set()
for name in names:
name = name.lower()
if name not in exceptions:
names_lower.add(name)
trie = PrefixSet(names_lower)
with open('proper_names.pickle', 'w') as outfile:
pickle.dump(trie, outfile)
return trie
def get_or_create_proper_names():
if os.path.exists('proper_names.pickle'):
with open('proper_names.pickle', 'r') as file:
return pickle.load(file)
else:
return _create_trie()
if __name__ == "__main__":
p = _create_trie()
print(p.__contains__("daphne"))
print(p.__contains__("xavier"))
print(p.__contains__("sally"))
print(p.__contains__("billy"))
print(p.__contains__("wxyz"))
print(p.__contains__("adamrobinson"))
| mit | 2,190,983,337,128,533,500 | 29.519231 | 246 | 0.655955 | false |
Erotemic/ibeis | ibeis/web/futures_utils/process_actor.py | 1 | 11765 | """ Implements ProcessActor """
from concurrent.futures import _base
from concurrent.futures import process
from multiprocessing.connection import wait
from ibeis.web.futures_utils import _base_actor
import os
import queue
import weakref
import threading
import multiprocessing
# Most of this code is duplicated from the concurrent.futures.thread and
# concurrent.futures.process modules, writen by Brian Quinlan. The main
# difference is that we expose an `Actor` class which can be inherited from and
# provides the `executor` classmethod. This creates an asynchronously
# maintained instance of this class in a separate thread/process
__author__ = 'Jon Crall ([email protected])'
def _process_actor_eventloop(_call_queue, _result_queue, _ActorClass, *args,
**kwargs):
"""
actor event loop run in a separate process.
Creates the instance of the actor (passing in the required *args, and
**kwargs). Then the eventloop starts and feeds the actor messages from the
_call_queue. Results are placed in the _result_queue, which are then placed
in Future objects.
"""
actor = _ActorClass(*args, **kwargs)
while True:
call_item = _call_queue.get(block=True)
if call_item is None:
# Wake up queue management thread
_result_queue.put(os.getpid())
return
try:
r = actor.handle(call_item.message)
except BaseException as e:
exc = process._ExceptionWithTraceback(e, e.__traceback__)
_result_queue.put(process._ResultItem(
call_item.work_id, exception=exc))
else:
_result_queue.put(process._ResultItem(
call_item.work_id, result=r))
class _WorkItem(object):
def __init__(self, future, message):
self.future = future
self.message = message
class _CallItem(object):
def __init__(self, work_id, message):
self.work_id = work_id
self.message = message
def _add_call_item_to_queue(pending_work_items,
work_ids,
call_queue):
"""Fills call_queue with _WorkItems from pending_work_items.
This function never blocks.
Args:
pending_work_items: A dict mapping work ids to _WorkItems e.g.
{5: <_WorkItem...>, 6: <_WorkItem...>, ...}
work_ids: A queue.Queue of work ids e.g. Queue([5, 6, ...]). Work ids
are consumed and the corresponding _WorkItems from
pending_work_items are transformed into _CallItems and put in
call_queue.
call_queue: A multiprocessing.Queue that will be filled with _CallItems
derived from _WorkItems.
"""
while True:
if call_queue.full():
return
try:
work_id = work_ids.get(block=False)
except queue.Empty:
return
else:
work_item = pending_work_items[work_id]
if work_item.future.set_running_or_notify_cancel():
call_queue.put(_CallItem(work_id,
work_item.message),
block=True)
else:
del pending_work_items[work_id]
continue
def _queue_management_worker(executor_reference,
_manager,
pending_work_items,
work_ids_queue,
_call_queue,
_result_queue):
"""Manages the communication between this process and the worker processes."""
executor = None
def shutting_down():
return process._shutdown or executor is None or executor._shutdown_thread
def shutdown_worker():
# This is an upper bound
if _manager.is_alive():
_call_queue.put_nowait(None)
# Release the queue's resources as soon as possible.
_call_queue.close()
# If .join() is not called on the created processes then
# some multiprocessing.Queue methods may deadlock on Mac OS X.
_manager.join()
reader = _result_queue._reader
while True:
_add_call_item_to_queue(pending_work_items,
work_ids_queue,
_call_queue)
sentinel = _manager.sentinel
assert sentinel
ready = wait([reader, sentinel])
if reader in ready:
result_item = reader.recv()
else:
# Mark the process pool broken so that submits fail right now.
executor = executor_reference()
if executor is not None:
executor._broken = True
executor._shutdown_thread = True
executor = None
# All futures in flight must be marked failed
for work_id, work_item in pending_work_items.items():
work_item.future.set_exception(
process.BrokenProcessPool(
"A process in the process pool was "
"terminated abruptly while the future was "
"running or pending."
))
# Delete references to object. See issue16284
del work_item
pending_work_items.clear()
# Terminate remaining workers forcibly: the queues or their
# locks may be in a dirty state and block forever.
_manager.terminate()
shutdown_worker()
return
if isinstance(result_item, int):
# Clean shutdown of a worker using its PID
# (avoids marking the executor broken)
assert shutting_down()
_manager.join()
if _manager is None:
shutdown_worker()
return
elif result_item is not None:
work_item = pending_work_items.pop(result_item.work_id, None)
# work_item can be None if another process terminated (see above)
if work_item is not None:
if result_item.exception:
work_item.future.set_exception(result_item.exception)
else:
work_item.future.set_result(result_item.result)
# Delete references to object. See issue16284
del work_item
# Check whether we should start shutting down.
executor = executor_reference()
# No more work items can be added if:
# - The interpreter is shutting down OR
# - The executor that owns this worker has been collected OR
# - The executor that owns this worker has been shutdown.
if shutting_down():
try:
# Since no new work items can be added, it is safe to shutdown
# this thread if there are no pending work items.
if not pending_work_items:
shutdown_worker()
return
except queue.Full:
# This is not a problem: we will eventually be woken up (in
# _result_queue.get()) and be able to send a sentinel again.
pass
executor = None
class ProcessActorExecutor(_base_actor.ActorExecutor):
def __init__(self, _ActorClass, *args, **kwargs):
process._check_system_limits()
self._ActorClass = _ActorClass
# todo: If we want to cancel futures we need to give the task_queue a
# maximum size
self._call_queue = multiprocessing.JoinableQueue()
self._call_queue._ignore_epipe = True
self._result_queue = multiprocessing.Queue()
self._work_ids = queue.Queue()
self._queue_management_thread = None
# We only maintain one process for our actor
self._manager = None
# Shutdown is a two-step process.
self._shutdown_thread = False
self._shutdown_lock = threading.Lock()
self._broken = False
self._queue_count = 0
self._pending_work_items = {}
self._did_initialize = False
if args or kwargs:
# If given actor initialization args we must start the Actor
# immediately. Otherwise just wait until we get a message
print('Init with args')
print('args = %r' % (args,))
self._initialize_actor(*args, **kwargs)
def post(self, message):
with self._shutdown_lock:
if self._broken:
raise process.BrokenProcessPool(
'A child process terminated '
'abruptly, the process pool is not usable anymore')
if self._shutdown_thread:
raise RuntimeError('cannot schedule new futures after shutdown')
f = _base.Future()
w = _WorkItem(f, message)
self._pending_work_items[self._queue_count] = w
self._work_ids.put(self._queue_count)
self._queue_count += 1
# Wake up queue management thread
self._result_queue.put(None)
self._start_queue_management_thread()
return f
post.__doc__ = _base_actor.ActorExecutor.post.__doc__
def _start_queue_management_thread(self):
# When the executor gets lost, the weakref callback will wake up
# the queue management thread.
def weakref_cb(_, q=self._result_queue):
q.put(None)
if self._queue_management_thread is None:
# Start the processes so that their sentinel are known.
self._initialize_actor()
self._queue_management_thread = threading.Thread(
target=_queue_management_worker,
args=(weakref.ref(self, weakref_cb),
self._manager,
self._pending_work_items,
self._work_ids,
self._call_queue,
self._result_queue))
self._queue_management_thread.daemon = True
self._queue_management_thread.start()
# use structures already in futures as much as possible
process._threads_queues[self._queue_management_thread] = self._result_queue
def _initialize_actor(self, *args, **kwargs):
if self._manager is None:
assert self._did_initialize is False, 'only initialize actor once'
self._did_initialize = True
# We only maintain one thread process for an actor
self._manager = multiprocessing.Process(
target=_process_actor_eventloop,
args=(self._call_queue,
self._result_queue, self._ActorClass) + args,
kwargs=kwargs)
self._manager.start()
def shutdown(self, wait=True):
with self._shutdown_lock:
self._shutdown_thread = True
if self._queue_management_thread:
# Wake up queue management thread
self._result_queue.put(None)
if wait:
self._queue_management_thread.join()
# To reduce the risk of opening too many files, remove references to
# objects that use file descriptors.
self._queue_management_thread = None
self._call_queue = None
self._result_queue = None
self._manager = None
shutdown.__doc__ = _base.Executor.shutdown.__doc__
class ProcessActor(_base_actor.Actor):
@classmethod
def executor(cls, *args, **kwargs):
return ProcessActorExecutor(cls, *args, **kwargs)
# executor.__doc__ = _base_actor.Actor.executor.__doc___
# ProcessActor.__doc__ = _base_actor.Actor.__doc___
| apache-2.0 | -5,419,728,240,105,604,000 | 37.198052 | 87 | 0.569571 | false |
andrewyoung1991/supriya | supriya/tools/requesttools/BufferGetRequest.py | 1 | 2109 | # -*- encoding: utf-8 -*-
from supriya.tools import osctools
from supriya.tools.requesttools.Request import Request
class BufferGetRequest(Request):
r'''A /b_get request.
::
>>> from supriya.tools import requesttools
>>> request = requesttools.BufferGetRequest(
... buffer_id=23,
... indices=(0, 4, 8, 16),
... )
>>> request
BufferGetRequest(
buffer_id=23,
indices=(0, 4, 8, 16)
)
::
>>> message = request.to_osc_message()
>>> message
OscMessage(42, 23, 0, 4, 8, 16)
::
>>> message.address == requesttools.RequestId.BUFFER_GET
True
'''
### CLASS VARIABLES ###
__slots__ = (
'_buffer_id',
'_indices',
)
### INITIALIZER ###
def __init__(
self,
buffer_id=None,
indices=None,
):
Request.__init__(self)
self._buffer_id = int(buffer_id)
self._indices = tuple(int(index) for index in indices)
### PUBLIC METHODS ###
def to_osc_message(self):
request_id = int(self.request_id)
buffer_id = int(self.buffer_id)
contents = [
request_id,
buffer_id,
]
if self.indices:
for index in self.indices:
contents.append(index)
message = osctools.OscMessage(*contents)
return message
### PUBLIC PROPERTIES ###
@property
def buffer_id(self):
return self._buffer_id
@property
def indices(self):
return self._indices
@property
def response_specification(self):
from supriya.tools import responsetools
return {
responsetools.BufferSetResponse: {
'buffer_id': self.buffer_id,
},
responsetools.FailResponse: {
'failed_command': '/b_get',
}
}
@property
def request_id(self):
from supriya.tools import requesttools
return requesttools.RequestId.BUFFER_GET | mit | 8,440,430,544,276,011,000 | 21.688172 | 64 | 0.515884 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.