repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
mch/python-ant | src/ant/core/message.py | 1 | 18462 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2011, Martín Raúl Villalba
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
##############################################################################
# pylint: disable=missing-docstring
from __future__ import division, absolute_import, print_function, unicode_literals
from struct import pack, unpack
from six import with_metaclass
from ant.core import constants
from ant.core.constants import MESSAGE_TX_SYNC, RESPONSE_NO_ERROR
from ant.core.exceptions import MessageError
class MessageType(type):
def __init__(cls, name, bases, dict_):
super(MessageType, cls).__init__(name, bases, dict_)
type_ = cls.type
if type_ is not None:
cls.TYPES[type_] = cls
def __call__(cls, *args, **kwargs):
if cls.type is not None:
return super(MessageType, cls).__call__(*args, **kwargs)
type_ = kwargs.get('type')
if type_ is None:
raise RuntimeError("Message' cannot be untyped")
del kwargs['type']
msgType = cls.TYPES.get(type_)
if msgType is not None:
return msgType(*args, **kwargs)
if 0x00 <= type_ <= 0xFF:
msg = super(MessageType, cls).__call__(*args, **kwargs)
msg.type = type_
return msg
else:
raise MessageError('Could not set type (type out of range).',
internal=Message.CORRUPTED)
MSG_HEADER_SIZE = 3
MSG_FOOTER_SIZE = 1
class Message(with_metaclass(MessageType)):
TYPES = {}
type = None
INCOMPLETE = 'incomplete'
CORRUPTED = 'corrupted'
MALFORMED = 'malformed'
def __init__(self, payload=None):
self._payload = None
self.payload = payload if payload is not None else bytearray()
@property
def payload(self):
return self._payload
@payload.setter
def payload(self, payload):
if len(payload) > 9:
raise MessageError('Could not set payload (payload too long).',
internal=Message.MALFORMED)
self._payload = payload
@property
def checksum(self):
checksum = MESSAGE_TX_SYNC ^ len(self._payload) ^ self.type
for byte in self._payload:
checksum ^= byte
return checksum
def encode(self):
raw, payload = bytearray(len(self)), self._payload
raw[0:MSG_HEADER_SIZE-1] = (MESSAGE_TX_SYNC, len(payload), self.type)
raw[MSG_HEADER_SIZE:-MSG_FOOTER_SIZE] = payload
raw[-1] = self.checksum
return raw
@classmethod
def decode(cls, raw):
raw = bytearray(raw)
if len(raw) < 5:
raise MessageError('Could not decode. Message length should be >=5 bytes but was %d.' % len(raw),
internal=Message.INCOMPLETE)
sync, length, type_ = raw[:MSG_HEADER_SIZE]
if sync != MESSAGE_TX_SYNC:
raise MessageError('Could not decode. Expected TX sync but got 0x%.2x.' % sync,
internal=Message.CORRUPTED)
if len(raw) < (length + MSG_HEADER_SIZE + MSG_FOOTER_SIZE):
raise MessageError('Could not decode. Message length should be %d but was %d.' %
(length + MSG_HEADER_SIZE + MSG_FOOTER_SIZE, len(raw)),
internal=Message.INCOMPLETE)
msg = Message(type=type_) # pylint: disable=unexpected-keyword-arg
msg.payload = raw[MSG_HEADER_SIZE:length + MSG_HEADER_SIZE]
if msg.checksum != raw[length + MSG_HEADER_SIZE]:
raise MessageError('Could not decode. Checksum should be 0x%.2x but was 0x%.2x.' %
(raw[length + MSG_HEADER_SIZE], msg.checksum),
internal=Message.CORRUPTED)
return msg
def __len__(self):
return len(self._payload) + MSG_HEADER_SIZE + MSG_FOOTER_SIZE
def __str__(self, data=None):
rawstr = '<' + self.__class__.__name__
if data is not None:
rawstr += ': ' + data
return rawstr + '>'
class ChannelMessage(Message):
def __init__(self, payload=b'', number=0x00):
super(ChannelMessage, self).__init__(bytearray(1) + payload)
self.channelNumber = number
@property
def channelNumber(self):
return self._payload[0]
@channelNumber.setter
def channelNumber(self, number):
if (number > 0xFF) or (number < 0x00):
raise MessageError('Could not set channel number. Should be 0 to 255 but was %s.' % number)
self._payload[0] = number
def __str__(self, data=None):
rawstr = "C(%d)" % self.channelNumber
if data is not None:
rawstr += ': ' + data
return super(ChannelMessage, self).__str__(data=rawstr)
# Config messages
class ChannelUnassignMessage(ChannelMessage):
type = constants.MESSAGE_CHANNEL_UNASSIGN
def __init__(self, number=0x00):
super(ChannelUnassignMessage, self).__init__(number=number)
class ChannelAssignMessage(ChannelMessage):
type = constants.MESSAGE_CHANNEL_ASSIGN
def __init__(self, number=0x00, channelType=0x00, network=0x00):
super(ChannelAssignMessage, self).__init__(payload=bytearray(2), number=number)
self.channelType = channelType
self.networkNumber = network
@property
def channelType(self):
return self._payload[1]
@channelType.setter
def channelType(self, type_):
self._payload[1] = type_
@property
def networkNumber(self):
return self._payload[2]
@networkNumber.setter
def networkNumber(self, number):
self._payload[2] = number
class ChannelIDMessage(ChannelMessage):
type = constants.MESSAGE_CHANNEL_ID
def __init__(self, number=0x00, device_number=0x0000, device_type=0x00,
trans_type=0x00):
super(ChannelIDMessage, self).__init__(payload=bytearray(4), number=number)
self.deviceNumber = device_number
self.deviceType = device_type
self.transmissionType = trans_type
@property
def deviceNumber(self):
return unpack(b'<H', bytes(self._payload[1:3]))[0]
@deviceNumber.setter
def deviceNumber(self, device_number):
self._payload[1:3] = pack(b'<H', device_number)
@property
def deviceType(self):
return self._payload[3]
@deviceType.setter
def deviceType(self, device_type):
self._payload[3] = device_type
@property
def transmissionType(self):
return self._payload[4]
@transmissionType.setter
def transmissionType(self, trans_type):
self._payload[4] = trans_type
class ChannelPeriodMessage(ChannelMessage):
type = constants.MESSAGE_CHANNEL_PERIOD
def __init__(self, number=0x00, period=8192):
super(ChannelPeriodMessage, self).__init__(payload=bytearray(2), number=number)
self.channelPeriod = period
@property
def channelPeriod(self):
return unpack('<H', bytes(self._payload[1:3]))[0]
@channelPeriod.setter
def channelPeriod(self, period):
self._payload[1:3] = pack('<H', period)
class ChannelSearchTimeoutMessage(ChannelMessage):
type = constants.MESSAGE_CHANNEL_SEARCH_TIMEOUT
def __init__(self, number=0x00, timeout=0xFF):
super(ChannelSearchTimeoutMessage, self).__init__(payload=bytearray(1),
number=number)
self.timeout = timeout
@property
def timeout(self):
return self._payload[1]
@timeout.setter
def timeout(self, timeout):
self._payload[1] = timeout
class ChannelFrequencyMessage(ChannelMessage):
type = constants.MESSAGE_CHANNEL_FREQUENCY
def __init__(self, number=0x00, frequency=66):
super(ChannelFrequencyMessage, self).__init__(payload=bytearray(1), number=number)
self.frequency = frequency
@property
def frequency(self):
return self._payload[1]
@frequency.setter
def frequency(self, frequency):
self._payload[1] = frequency
class ChannelTXPowerMessage(ChannelMessage):
type = constants.MESSAGE_CHANNEL_TX_POWER
def __init__(self, number=0x00, power=0x00):
super(ChannelTXPowerMessage, self).__init__(payload=bytearray(1), number=number)
self.power = power
@property
def power(self):
return self._payload[1]
@power.setter
def power(self, power):
self._payload[1] = power
class NetworkKeyMessage(Message):
type = constants.MESSAGE_NETWORK_KEY
def __init__(self, number=0x00, key=b'\x00' * 8):
super(NetworkKeyMessage, self).__init__(payload=bytearray(9))
self.number = number
self.key = key
@property
def number(self):
return self._payload[0]
@number.setter
def number(self, number):
self._payload[0] = number
@property
def key(self):
return self._payload[1:]
@key.setter
def key(self, key):
self._payload[1:] = key
class TXPowerMessage(Message):
type = constants.MESSAGE_TX_POWER
def __init__(self, power=0x00):
super(TXPowerMessage, self).__init__(payload=bytearray(2))
self.power = power
@property
def power(self):
return self._payload[1]
@power.setter
def power(self, power):
self._payload[1] = power
# Control messages
class SystemResetMessage(Message):
type = constants.MESSAGE_SYSTEM_RESET
def __init__(self):
super(SystemResetMessage, self).__init__(payload=bytearray(1))
class ChannelOpenMessage(ChannelMessage):
type = constants.MESSAGE_CHANNEL_OPEN
def __init__(self, number=0x00):
super(ChannelOpenMessage, self).__init__(number=number)
class ChannelCloseMessage(ChannelMessage):
type = constants.MESSAGE_CHANNEL_CLOSE
def __init__(self, number=0x00):
super(ChannelCloseMessage, self).__init__(number=number)
class ChannelRequestMessage(ChannelMessage):
type = constants.MESSAGE_CHANNEL_REQUEST
def __init__(self, number=0x00, messageID=constants.MESSAGE_CHANNEL_STATUS):
super(ChannelRequestMessage, self).__init__(payload=bytearray(1), number=number)
self.messageID = messageID
@property
def messageID(self):
return self._payload[1]
@messageID.setter
def messageID(self, messageID):
if (messageID > 0xFF) or (messageID < 0x00):
raise MessageError('Could not set message ID. Should be 0 to 255 but was %s.' % messageID)
self._payload[1] = messageID
# Data messages
class ChannelBroadcastDataMessage(ChannelMessage):
type = constants.MESSAGE_CHANNEL_BROADCAST_DATA
def __init__(self, number=0x00, data=b'\x00' * 7):
super(ChannelBroadcastDataMessage, self).__init__(payload=data, number=number)
@property
def data(self):
return self._payload[1:9]
class ChannelAcknowledgedDataMessage(ChannelMessage):
type = constants.MESSAGE_CHANNEL_ACKNOWLEDGED_DATA
def __init__(self, number=0x00, data=b'\x00' * 7):
super(ChannelAcknowledgedDataMessage, self).__init__(payload=data, number=number)
@property
def data(self):
return self._payload[1:9]
class ChannelBurstDataMessage(ChannelMessage):
type = constants.MESSAGE_CHANNEL_BURST_DATA
def __init__(self, number=0x00, data=b'\x00' * 7):
super(ChannelBurstDataMessage, self).__init__(payload=data, number=number)
@property
def data(self):
return self._payload[1:9]
# Channel event messages
class ChannelEventResponseMessage(ChannelMessage):
type = constants.MESSAGE_CHANNEL_EVENT
def __init__(self, number=0x00, message_id=0x00, message_code=0x00):
super(ChannelEventResponseMessage, self).__init__(payload=bytearray(2),
number=number)
self.messageID = message_id
self.messageCode = message_code
@property
def messageID(self):
return self._payload[1]
@messageID.setter
def messageID(self, message_id):
if (message_id > 0xFF) or (message_id < 0x00):
raise MessageError('Could not set message ID. Should be 0 to 255 but was %s.' % message_id)
self._payload[1] = message_id
@property
def messageCode(self):
return self._payload[2]
@messageCode.setter
def messageCode(self, message_code):
if (message_code > 0xFF) or (message_code < 0x00):
raise MessageError('Could not set message code. Should be 0 to 255 but was %s.' % message_code)
self._payload[2] = message_code
def __str__(self): # pylint: disable=W0221
msgCode = self.messageCode
if self.messageID != 1:
return "<ChannelResponse: '%s' on C(%d): %s>" % (
self.TYPES[self.messageID].__name__, self.channelNumber,
'OK' if msgCode == RESPONSE_NO_ERROR else '0x%.2x' % msgCode)
return "<ChannelEvent: C(%d): 0x%.2x>" % (self.channelNumber, msgCode)
# Requested response messages
class ChannelStatusMessage(ChannelMessage):
type = constants.MESSAGE_CHANNEL_STATUS
def __init__(self, number=0x00, status=0x00):
super(ChannelStatusMessage, self).__init__(payload=bytearray(1), number=number)
self.status = status
@property
def status(self):
return self._payload[1]
@status.setter
def status(self, status):
if (status > 0xFF) or (status < 0x00):
raise MessageError('Could not set channel status. Should be 0 to 255 but was %s.' % status)
self._payload[1] = status
class VersionMessage(Message):
type = constants.MESSAGE_VERSION
def __init__(self, version=b'\x00' * 9):
super(VersionMessage, self).__init__(payload=bytearray(9))
self.version = version
@property
def version(self):
return self._payload
@version.setter
def version(self, version):
if len(version) != 9:
raise MessageError('Could not set ANT version (expected 9 bytes).')
self.payload = bytearray(version)
class StartupMessage(Message):
type = constants.MESSAGE_STARTUP
def __init__(self, startupMessage=0x00):
super(StartupMessage, self).__init__(payload=bytearray(1))
self.startupMessage = startupMessage
@property
def startupMessage(self):
return self._payload[0]
@startupMessage.setter
def startupMessage(self, startupMessage):
if (startupMessage > 0xFF) or (startupMessage < 0x00):
raise MessageError('Could not set start-up message. Should be 0 to 255 but was %s.' % startupMessage)
self._payload[0] = startupMessage
class CapabilitiesMessage(Message):
type = constants.MESSAGE_CAPABILITIES
def __init__(self, max_channels=0x00, max_nets=0x00, std_opts=0x00,
adv_opts=0x00, adv_opts2=0x00):
super(CapabilitiesMessage, self).__init__(payload=bytearray(4))
self.maxChannels = max_channels
self.maxNetworks = max_nets
self.stdOptions = std_opts
self.advOptions = adv_opts
if adv_opts2 is not None:
self.advOptions2 = adv_opts2
@property
def maxChannels(self):
return self._payload[0]
@maxChannels.setter
def maxChannels(self, num):
if (num > 0xFF) or (num < 0x00):
raise MessageError('Could not set max channels. Should be 0 to 255 but was %s.' % num)
self._payload[0] = num
@property
def maxNetworks(self):
return self._payload[1]
@maxNetworks.setter
def maxNetworks(self, num):
if (num > 0xFF) or (num < 0x00):
raise MessageError('Could not set max networks. Should be 0 to 255 but was %s.' % num)
self._payload[1] = num
@property
def stdOptions(self):
return self._payload[2]
@stdOptions.setter
def stdOptions(self, num):
if (num > 0xFF) or (num < 0x00):
raise MessageError('Could not set std options. Should be 0 to 255 but was %s.' % num)
self._payload[2] = num
@property
def advOptions(self):
return self._payload[3]
@advOptions.setter
def advOptions(self, num):
if (num > 0xFF) or (num < 0x00):
raise MessageError('Could not set adv options. Should be 0 to 255 but was %s.' % num)
self._payload[3] = num
@property
def advOptions2(self):
return self._payload[4] if len(self._payload) == 5 else 0x00
@advOptions2.setter
def advOptions2(self, num):
if (num > 0xFF) or (num < 0x00):
raise MessageError('Could not set adv options 2. Should be 0 to 255 but was %s.' % num)
if len(self._payload) == 4:
self._payload.append(0)
self._payload[4] = num
class SerialNumberMessage(Message):
type = constants.MESSAGE_SERIAL_NUMBER
def __init__(self, serial=b'\x00' * 4):
super(SerialNumberMessage, self).__init__()
self.serialNumber = serial
@property
def serialNumber(self):
return self._payload
@serialNumber.setter
def serialNumber(self, serial):
if len(serial) != 4:
raise MessageError('Could not set serial number (expected 4 bytes).')
self.payload = bytearray(serial)
| mit | 603,550,664,031,524,400 | 31.329247 | 113 | 0.626002 | false |
plasticantifork/PS2Devs | retweet.py | 1 | 1421 | #!/usr/bin/python
import tweepy
import ConfigParser
import sys, os
config = ConfigParser.SafeConfigParser()
config.read(os.path.join(sys.path[0], 'config'))
auth = tweepy.OAuthHandler(config.get('auth','consumer_key'), config.get('auth','consumer_secret'))
auth.set_access_token(config.get('auth','access_token'), config.get('auth','access_token_secret'))
api = tweepy.API(auth)
twitterQuery = config.get('search','query')
try:
with open(os.path.join(sys.path[0], 'lastTweetId'), 'r') as f:
sinceId = f.read()
except IOError:
sinceId = ''
timelineIterator = tweepy.Cursor(api.search, q=twitterQuery, since_id=sinceId).items()
timeline = []
for status in timelineIterator:
timeline.append(status)
try:
lastTweetId = timeline[0].id
except IndexError:
lastTweetId = sinceId
rtCounter = 0
errCounter = 0
timeline.reverse()
for status in timeline:
try:
print '(%(date)s) %(name)s: %(message)s' % \
{ 'date' : status.created_at,
'name' : status.author.screen_name.encode('utf-8'),
'message' : status.text.encode('utf-8') }
api.retweet(status.id)
rtCounter += 1
except tweepy.error.TweepError as e:
errCounter += 1
print e
continue
if errCounter != 0:
print '%d errors occurred' % errCounter
with open(os.path.join(sys.path[0], 'lastTweetId'), 'w') as file:
file.write(str(lastTweetId))
| mit | 2,427,445,618,371,625,500 | 24.836364 | 99 | 0.653765 | false |
dakrauth/picker | picker/migrations/0003_auto_20180801_0800.py | 1 | 5687 | # Generated by Django 2.0.7 on 2018-08-01 12:00
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import picker.models
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('picker', '0002_auto_20160720_0917'),
]
operations = [
migrations.CreateModel(
name='PickerFavorite',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
),
migrations.CreateModel(
name='PickerGrouping',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=75, unique=True)),
('status', models.CharField(choices=[('ACTV', 'Active'), ('IDLE', 'Inactive')], default='ACTV', max_length=4)),
],
),
migrations.CreateModel(
name='PickerMembership',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('status', models.CharField(choices=[('ACTV', 'Active'), ('IDLE', 'Inactive'), ('SUSP', 'Suspended'), ('MNGT', 'Manager')], default='ACTV', max_length=4)),
('autopick', models.CharField(choices=[('NONE', 'None'), ('RAND', 'Random')], default='RAND', max_length=4)),
('group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='members', to='picker.PickerGrouping')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='picker_memberships', to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='league',
name='current_season',
field=models.IntegerField(blank=True, null=True),
),
migrations.AddField(
model_name='league',
name='slug',
field=models.SlugField(default=picker.models.temp_slug),
),
migrations.AddField(
model_name='pickset',
name='is_winner',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='game',
name='category',
field=models.CharField(choices=[('REG', 'Regular Season'), ('POST', 'Post Season'), ('PRE', 'Pre Season'), ('FRND', 'Friendly')], default='REG', max_length=4),
),
migrations.AlterField(
model_name='game',
name='status',
field=models.CharField(choices=[('U', 'Unplayed'), ('T', 'Tie'), ('H', 'Home Win'), ('A', 'Away Win'), ('X', 'Cancelled')], default='U', max_length=1),
),
migrations.AlterField(
model_name='game',
name='tv',
field=models.CharField(blank=True, max_length=8, verbose_name='TV'),
),
migrations.AlterField(
model_name='gameset',
name='byes',
field=models.ManyToManyField(blank=True, related_name='bye_set', to='picker.Team', verbose_name='Bye Teams'),
),
migrations.AlterField(
model_name='league',
name='logo',
field=models.ImageField(blank=True, null=True, upload_to='picker/logos'),
),
migrations.AlterField(
model_name='pickset',
name='strategy',
field=models.CharField(choices=[('USER', 'User'), ('RAND', 'Random'), ('HOME', 'Home Team'), ('BEST', 'Best Record')], default='USER', max_length=4),
),
migrations.AlterField(
model_name='playoffpicks',
name='user',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='preference',
name='autopick',
field=models.CharField(choices=[('NONE', 'None'), ('RAND', 'Random')], default='RAND', max_length=4),
),
migrations.AlterField(
model_name='team',
name='logo',
field=models.ImageField(blank=True, null=True, upload_to='picker/logos'),
),
migrations.AlterUniqueTogether(
name='preference',
unique_together=set(),
),
migrations.AddField(
model_name='pickergrouping',
name='leagues',
field=models.ManyToManyField(blank=True, to='picker.League'),
),
migrations.AddField(
model_name='pickerfavorite',
name='league',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='picker.League'),
),
migrations.AddField(
model_name='pickerfavorite',
name='team',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='picker.Team'),
),
migrations.AddField(
model_name='pickerfavorite',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.RemoveField(
model_name='preference',
name='favorite_team',
),
migrations.RemoveField(
model_name='preference',
name='league',
),
migrations.RemoveField(
model_name='preference',
name='status',
),
]
| mit | 9,125,686,761,335,229,000 | 40.510949 | 171 | 0.55706 | false |
vlegoff/tsunami | src/primaires/joueur/commandes/montrer/niveaux.py | 1 | 2438 | # -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Module contenant la commande 'montrer niveaux'."""
from primaires.interpreteur.masque.parametre import Parametre
from primaires.perso.montrer.niveaux import MontrerNiveaux
class PrmNiveaux(Parametre):
"""Commande 'montrer niveaux'."""
def __init__(self):
"""Constructeur du paramètre"""
Parametre.__init__(self, "niveaux", "levels")
self.tronquer = True
self.schema = "<nom_joueur>"
self.aide_courte = "affiche le niveaux d'un joueur"
self.aide_longue = \
"Cette commande montre les niveaux d'un joueur. Vous devez " \
"simplement préciser le nom du joueur en paramètre."
def interpreter(self, personnage, dic_masques):
"""Méthode d'interprétation de commande"""
joueur = dic_masques["nom_joueur"].joueur
personnage << MontrerNiveaux.montrer(joueur)
| bsd-3-clause | 7,708,689,610,241,708,000 | 44.90566 | 79 | 0.737772 | false |
cryvate/project-euler | project_euler/library/number_theory/pells_equation.py | 1 | 1029 | from .continued_fractions import convergents_sqrt
from typing import Generator, Tuple
def solve_pells_equation(n: int) -> Generator[Tuple[int, int], None, None]:
convergents = convergents_sqrt(n)
for convergent in convergents:
h = convergent.numerator
k = convergent.denominator
if h ** 2 - n * (k ** 2) == 1:
break
x, y = h, k
while True:
yield x, y
x, y = h * x + n * k * y, h * y + k * x
def solve_negative_pells_equation(n: int) -> \
Generator[Tuple[int, int], None, None]:
convergents = convergents_sqrt(n)
for convergent in convergents:
h = convergent.numerator
k = convergent.denominator
if h ** 2 - n * (k ** 2) == -1:
break
if h ** 2 - n * (k ** 2) == 1:
raise ValueError(f"Equation x^2 - {n}y^2 = -1 has no solution")
x, y = h, k
while True:
yield x, y
x, y = h * x + n * k * y, h * y + k * x
x, y = h * x + n * k * y, h * y + k * x
| mit | -1,582,267,298,741,601,800 | 23.5 | 75 | 0.512148 | false |
paulgradie/SeqPyPlot | main_app/seqpyplot/parsers/htseq_parser.py | 1 | 2244 | """
Read a directory of expression counts in ht-seq format. Each sample
should be an individual file in the directory. File names and
sample order are specified in the config file (order is determined
by order IN the config.)
This class is intended to return the raw dataframe of samples with
missing sample columns as NaN.
"""
import pandas as pd
from pathos.multiprocessing import ProcessPool
import pathlib
try:
from functools import reduce # for py3 compatibility
except ImportError:
pass
class HtSeqParser(object):
def __init__(self, nodes=2):
self.nodes = nodes
def parse_data(self, data_paths, sample_names):
"""
Read the input files from the config file and load in to a
pandas dataframe.
params
data_paths: list of file paths specified in the config. Returned
from config parse sample_names: list of sample names specified in
the config returned from config parse
"""
output = self.load_data(data_paths, sample_names)
data, ercc_df = (self.merge_dfs(output)
.pipe(self.df_cleanup)
.pipe(self.split_on_ercc))
return data, ercc_df
def load_data(self, data_paths, sample_names):
" Multiprocess load of files in to a list of dfs "
pool = ProcessPool(nodes=self.nodes)
dfs = pool.map(self.load_func, zip(data_paths, sample_names))
return dfs
@staticmethod
def load_func(data_tuple):
path, sample_name = data_tuple
return pd.read_csv(path, sep='\t', names=['gene', sample_name])
def merge_dfs(self, dfs):
return reduce(lambda x, y: pd.merge(x, y, on='gene', how='outer'), dfs)
def df_cleanup(self, df_old):
" Clean away unwanted columns, reset index, and fillna "
df = df_old.copy()
df = df[df['gene'].str.startswith('__') == False]
df.set_index('gene', inplace=True)
df.fillna(value='Nan', inplace=True)
return df
def split_on_ercc(self, df):
" Extract the ERCC data "
ercc_cols = df.index.str.startswith('ERCC-')
ercc_df = df[ercc_cols]
data = df[~ercc_cols]
return data, ercc_df
| gpl-3.0 | -931,426,943,639,090,200 | 29.739726 | 79 | 0.625668 | false |
dmlc/xgboost | tests/python/test_with_pandas.py | 1 | 10402 | # -*- coding: utf-8 -*-
import numpy as np
import xgboost as xgb
import testing as tm
import pytest
try:
import pandas as pd
except ImportError:
pass
pytestmark = pytest.mark.skipif(**tm.no_pandas())
dpath = 'demo/data/'
rng = np.random.RandomState(1994)
class TestPandas:
def test_pandas(self):
df = pd.DataFrame([[1, 2., True], [2, 3., False]],
columns=['a', 'b', 'c'])
dm = xgb.DMatrix(df, label=pd.Series([1, 2]))
assert dm.feature_names == ['a', 'b', 'c']
assert dm.feature_types == ['int', 'float', 'i']
assert dm.num_row() == 2
assert dm.num_col() == 3
np.testing.assert_array_equal(dm.get_label(), np.array([1, 2]))
# overwrite feature_names and feature_types
dm = xgb.DMatrix(df, label=pd.Series([1, 2]),
feature_names=['x', 'y', 'z'],
feature_types=['q', 'q', 'q'])
assert dm.feature_names == ['x', 'y', 'z']
assert dm.feature_types == ['q', 'q', 'q']
assert dm.num_row() == 2
assert dm.num_col() == 3
# incorrect dtypes
df = pd.DataFrame([[1, 2., 'x'], [2, 3., 'y']],
columns=['a', 'b', 'c'])
with pytest.raises(ValueError):
xgb.DMatrix(df)
# numeric columns
df = pd.DataFrame([[1, 2., True], [2, 3., False]])
dm = xgb.DMatrix(df, label=pd.Series([1, 2]))
assert dm.feature_names == ['0', '1', '2']
assert dm.feature_types == ['int', 'float', 'i']
assert dm.num_row() == 2
assert dm.num_col() == 3
np.testing.assert_array_equal(dm.get_label(), np.array([1, 2]))
df = pd.DataFrame([[1, 2., 1], [2, 3., 1]], columns=[4, 5, 6])
dm = xgb.DMatrix(df, label=pd.Series([1, 2]))
assert dm.feature_names == ['4', '5', '6']
assert dm.feature_types == ['int', 'float', 'int']
assert dm.num_row() == 2
assert dm.num_col() == 3
df = pd.DataFrame({'A': ['X', 'Y', 'Z'], 'B': [1, 2, 3]})
dummies = pd.get_dummies(df)
# B A_X A_Y A_Z
# 0 1 1 0 0
# 1 2 0 1 0
# 2 3 0 0 1
result, _, _ = xgb.data._transform_pandas_df(dummies,
enable_categorical=False)
exp = np.array([[1., 1., 0., 0.],
[2., 0., 1., 0.],
[3., 0., 0., 1.]])
np.testing.assert_array_equal(result, exp)
dm = xgb.DMatrix(dummies)
assert dm.feature_names == ['B', 'A_X', 'A_Y', 'A_Z']
assert dm.feature_types == ['int', 'int', 'int', 'int']
assert dm.num_row() == 3
assert dm.num_col() == 4
df = pd.DataFrame({'A=1': [1, 2, 3], 'A=2': [4, 5, 6]})
dm = xgb.DMatrix(df)
assert dm.feature_names == ['A=1', 'A=2']
assert dm.feature_types == ['int', 'int']
assert dm.num_row() == 3
assert dm.num_col() == 2
df_int = pd.DataFrame([[1, 1.1], [2, 2.2]], columns=[9, 10])
dm_int = xgb.DMatrix(df_int)
df_range = pd.DataFrame([[1, 1.1], [2, 2.2]], columns=range(9, 11, 1))
dm_range = xgb.DMatrix(df_range)
assert dm_int.feature_names == ['9', '10'] # assert not "9 "
assert dm_int.feature_names == dm_range.feature_names
# test MultiIndex as columns
df = pd.DataFrame(
[
(1, 2, 3, 4, 5, 6),
(6, 5, 4, 3, 2, 1)
],
columns=pd.MultiIndex.from_tuples((
('a', 1), ('a', 2), ('a', 3),
('b', 1), ('b', 2), ('b', 3),
))
)
dm = xgb.DMatrix(df)
assert dm.feature_names == ['a 1', 'a 2', 'a 3', 'b 1', 'b 2', 'b 3']
assert dm.feature_types == ['int', 'int', 'int', 'int', 'int', 'int']
assert dm.num_row() == 2
assert dm.num_col() == 6
def test_slice(self):
rng = np.random.RandomState(1994)
rows = 100
X = rng.randint(3, 7, size=rows)
X = pd.DataFrame({'f0': X})
y = rng.randn(rows)
ridxs = [1, 2, 3, 4, 5, 6]
m = xgb.DMatrix(X, y)
sliced = m.slice(ridxs)
assert m.feature_types == sliced.feature_types
def test_pandas_categorical(self):
rng = np.random.RandomState(1994)
rows = 100
X = rng.randint(3, 7, size=rows)
X = pd.Series(X, dtype="category")
X = pd.DataFrame({'f0': X})
y = rng.randn(rows)
m = xgb.DMatrix(X, y, enable_categorical=True)
assert m.feature_types[0] == 'categorical'
def test_pandas_sparse(self):
import pandas as pd
rows = 100
X = pd.DataFrame(
{"A": pd.arrays.SparseArray(np.random.randint(0, 10, size=rows)),
"B": pd.arrays.SparseArray(np.random.randn(rows)),
"C": pd.arrays.SparseArray(np.random.permutation(
[True, False] * (rows // 2)))}
)
y = pd.Series(pd.arrays.SparseArray(np.random.randn(rows)))
dtrain = xgb.DMatrix(X, y)
booster = xgb.train({}, dtrain, num_boost_round=4)
predt_sparse = booster.predict(xgb.DMatrix(X))
predt_dense = booster.predict(xgb.DMatrix(X.sparse.to_dense()))
np.testing.assert_allclose(predt_sparse, predt_dense)
def test_pandas_label(self):
# label must be a single column
df = pd.DataFrame({'A': ['X', 'Y', 'Z'], 'B': [1, 2, 3]})
with pytest.raises(ValueError):
xgb.data._transform_pandas_df(df, False, None, None, 'label', 'float')
# label must be supported dtype
df = pd.DataFrame({'A': np.array(['a', 'b', 'c'], dtype=object)})
with pytest.raises(ValueError):
xgb.data._transform_pandas_df(df, False, None, None, 'label', 'float')
df = pd.DataFrame({'A': np.array([1, 2, 3], dtype=int)})
result, _, _ = xgb.data._transform_pandas_df(df, False, None, None,
'label', 'float')
np.testing.assert_array_equal(result, np.array([[1.], [2.], [3.]],
dtype=float))
dm = xgb.DMatrix(np.random.randn(3, 2), label=df)
assert dm.num_row() == 3
assert dm.num_col() == 2
def test_pandas_weight(self):
kRows = 32
kCols = 8
X = np.random.randn(kRows, kCols)
y = np.random.randn(kRows)
w = np.random.uniform(size=kRows).astype(np.float32)
w_pd = pd.DataFrame(w)
data = xgb.DMatrix(X, y, w_pd)
assert data.num_row() == kRows
assert data.num_col() == kCols
np.testing.assert_array_equal(data.get_weight(), w)
def test_cv_as_pandas(self):
dm = xgb.DMatrix(dpath + 'agaricus.txt.train')
params = {'max_depth': 2, 'eta': 1, 'verbosity': 0,
'objective': 'binary:logistic', 'eval_metric': 'error'}
cv = xgb.cv(params, dm, num_boost_round=10, nfold=10)
assert isinstance(cv, pd.DataFrame)
exp = pd.Index([u'test-error-mean', u'test-error-std',
u'train-error-mean', u'train-error-std'])
assert len(cv.columns.intersection(exp)) == 4
# show progress log (result is the same as above)
cv = xgb.cv(params, dm, num_boost_round=10, nfold=10,
verbose_eval=True)
assert isinstance(cv, pd.DataFrame)
exp = pd.Index([u'test-error-mean', u'test-error-std',
u'train-error-mean', u'train-error-std'])
assert len(cv.columns.intersection(exp)) == 4
cv = xgb.cv(params, dm, num_boost_round=10, nfold=10,
verbose_eval=True, show_stdv=False)
assert isinstance(cv, pd.DataFrame)
exp = pd.Index([u'test-error-mean', u'test-error-std',
u'train-error-mean', u'train-error-std'])
assert len(cv.columns.intersection(exp)) == 4
params = {'max_depth': 2, 'eta': 1, 'verbosity': 0,
'objective': 'binary:logistic', 'eval_metric': 'auc'}
cv = xgb.cv(params, dm, num_boost_round=10, nfold=10, as_pandas=True)
assert 'eval_metric' in params
assert 'auc' in cv.columns[0]
params = {'max_depth': 2, 'eta': 1, 'verbosity': 0,
'objective': 'binary:logistic', 'eval_metric': ['auc']}
cv = xgb.cv(params, dm, num_boost_round=10, nfold=10, as_pandas=True)
assert 'eval_metric' in params
assert 'auc' in cv.columns[0]
params = {'max_depth': 2, 'eta': 1, 'verbosity': 0,
'objective': 'binary:logistic', 'eval_metric': ['auc']}
cv = xgb.cv(params, dm, num_boost_round=10, nfold=10,
as_pandas=True, early_stopping_rounds=1)
assert 'eval_metric' in params
assert 'auc' in cv.columns[0]
assert cv.shape[0] < 10
params = {'max_depth': 2, 'eta': 1, 'verbosity': 0,
'objective': 'binary:logistic'}
cv = xgb.cv(params, dm, num_boost_round=10, nfold=10,
as_pandas=True, metrics='auc')
assert 'auc' in cv.columns[0]
params = {'max_depth': 2, 'eta': 1, 'verbosity': 0,
'objective': 'binary:logistic'}
cv = xgb.cv(params, dm, num_boost_round=10, nfold=10,
as_pandas=True, metrics=['auc'])
assert 'auc' in cv.columns[0]
params = {'max_depth': 2, 'eta': 1, 'verbosity': 0,
'objective': 'binary:logistic', 'eval_metric': ['auc']}
cv = xgb.cv(params, dm, num_boost_round=10, nfold=10,
as_pandas=True, metrics='error')
assert 'eval_metric' in params
assert 'auc' not in cv.columns[0]
assert 'error' in cv.columns[0]
cv = xgb.cv(params, dm, num_boost_round=10, nfold=10,
as_pandas=True, metrics=['error'])
assert 'eval_metric' in params
assert 'auc' not in cv.columns[0]
assert 'error' in cv.columns[0]
params = list(params.items())
cv = xgb.cv(params, dm, num_boost_round=10, nfold=10,
as_pandas=True, metrics=['error'])
assert isinstance(params, list)
assert 'auc' not in cv.columns[0]
assert 'error' in cv.columns[0]
| apache-2.0 | 5,201,633,062,039,068,000 | 39.007692 | 82 | 0.506441 | false |
welshjf/bitnomon | bitnomon/formatting.py | 1 | 1555 | # Copyright 2015 Jacob Welsh
#
# This file is part of Bitnomon; see the README for license information.
"""Text/number formatting"""
class ByteCountFormatter(object):
#pylint: disable=too-few-public-methods
"""Human-readable display of byte counts in various formats.
By default, the formatter uses SI and bytes, so 1000 => "1 KB". All
combinations of (byte, bit) x (SI, binary) are supported, though you
probably shouldn't use bits with the binary prefixes.
Attributes:
unit_bits True for bits or False for bytes
prefix_si True for SI or False for binary prefixes
"""
SI_prefixes = ('k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y')
binary_prefixes = ('Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi', 'Yi')
def __init__(self):
self.unit_bits = False
self.prefix_si = True
def __call__(self, count):
"""Formats a byte count using the configured settings."""
if self.unit_bits:
count *= 8
unit = 'b'
else:
unit = 'B'
if self.prefix_si:
factor = 1000.
prefixes = self.SI_prefixes
else:
factor = 1024.
prefixes = self.binary_prefixes
if abs(count) < factor:
return u'%d %c' % (count, unit)
size = float(count)
prefix_index = 0
while abs(size) >= factor and prefix_index < len(prefixes):
size /= factor
prefix_index += 1
return u'%.2f %s%c' % (size, prefixes[prefix_index-1], unit)
| apache-2.0 | 5,630,058,581,634,303,000 | 28.903846 | 72 | 0.560129 | false |
warp1337/opencv_facerecognizer | src/ocvfacerec/facerec/classifier.py | 1 | 9086 | # Copyright (c) 2015.
# Philipp Wagner <bytefish[at]gmx[dot]de> and
# Florian Lier <flier[at]techfak.uni-bielefeld.de> and
# Norman Koester <nkoester[at]techfak.uni-bielefeld.de>
#
#
# Released to public domain under terms of the BSD Simplified license.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the organization nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# See <http://www.opensource.org/licenses/bsd-license>
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from ocvfacerec.facerec.distance import EuclideanDistance
from ocvfacerec.facerec.util import as_row_matrix
import logging
import numpy as np
import operator as op
class AbstractClassifier(object):
def compute(self, X, y):
raise NotImplementedError("Every AbstractClassifier must implement the compute method.")
def predict(self, X):
raise NotImplementedError("Every AbstractClassifier must implement the predict method.")
def update(self, X, y):
raise NotImplementedError("This Classifier is cannot be updated.")
class NearestNeighbor(AbstractClassifier):
"""
Implements a k-Nearest Neighbor Model with a generic distance metric.
"""
def __init__(self, dist_metric=EuclideanDistance(), k=1):
AbstractClassifier.__init__(self)
self.k = k
self.dist_metric = dist_metric
self.X = []
self.y = np.array([], dtype=np.int32)
def update(self, X, y):
"""
Updates the classifier.
"""
self.X.append(X)
self.y = np.append(self.y, y)
def compute(self, X, y):
self.X = X
self.y = np.asarray(y)
def predict(self, q):
"""
Predicts the k-nearest neighbor for a given query in q.
Args:
q: The given query sample, which is an array.
Returns:
A list with the classifier output. In this framework it is
assumed, that the predicted class is always returned as first
element. Moreover, this class returns the distances for the
first k-Nearest Neighbors.
Example:
[ 0,
{ 'labels' : [ 0, 0, 1 ],
'distances' : [ 10.132, 10.341, 13.314 ]
}
]
So if you want to perform a thresholding operation, you could
pick the distances in the second array of the generic classifier
output.
"""
distances = []
for xi in self.X:
xi = xi.reshape(-1, 1)
d = self.dist_metric(xi, q)
distances.append(d)
if len(distances) > len(self.y):
raise Exception("More distances than classes. Is your distance metric correct?")
distances = np.asarray(distances)
# Get the indices in an ascending sort order:
idx = np.argsort(distances)
# Sort the labels and distances accordingly:
sorted_y = self.y[idx]
sorted_distances = distances[idx]
# Take only the k first items:
sorted_y = sorted_y[0:self.k]
sorted_distances = sorted_distances[0:self.k]
# Make a histogram of them:
hist = dict((key, val) for key, val in enumerate(np.bincount(sorted_y)) if val)
# And get the bin with the maximum frequency:
predicted_label = max(hist.iteritems(), key=op.itemgetter(1))[0]
# A classifier should output a list with the label as first item and
# generic data behind. The k-nearest neighbor classifier outputs the
# distance of the k first items. So imagine you have a 1-NN and you
# want to perform a threshold against it, you should take the first
# item
return [predicted_label, {'labels': sorted_y, 'distances': sorted_distances}]
def __repr__(self):
return "NearestNeighbor (k=%s, dist_metric=%s)" % (self.k, repr(self.dist_metric))
# libsvm
try:
from svmutil import *
except ImportError:
logger = logging.getLogger("facerec.classifier.SVM")
logger.debug("Import Error: libsvm bindings not available.")
except:
logger = logging.getLogger("facerec.classifier.SVM")
logger.debug("Import Error: libsvm bindings not available.")
import sys
from StringIO import StringIO
bkp_stdout = sys.stdout
class SVM(AbstractClassifier):
"""
This class is just a simple wrapper to use libsvm in the
CrossValidation module. If you don't use this framework
use the validation methods coming with LibSVM, they are
much easier to access (simply pass the correct class
labels in svm_predict and you are done...).
The grid search method in this class is somewhat similar
to libsvm grid.py, as it performs a parameter search over
a logarithmic scale. Again if you don't use this framework,
use the libsvm tools as they are much easier to access.
Please keep in mind to normalize your input data, as expected
for the model. There's no way to assume a generic normalization
step.
"""
def __init__(self, param=None):
AbstractClassifier.__init__(self)
self.logger = logging.getLogger("facerec.classifier.SVM")
self.param = param
self.svm = svm_model()
self.param = param
if self.param is None:
self.param = svm_parameter("-q")
def compute(self, X, y):
self.logger.debug("SVM TRAINING (C=%.2f,gamma=%.2f,p=%.2f,nu=%.2f,coef=%.2f,degree=%.2f)" % (
self.param.C, self.param.gamma, self.param.p, self.param.nu, self.param.coef0, self.param.degree))
# turn data into a row vector (needed for libsvm)
X = as_row_matrix(X)
y = np.asarray(y)
problem = svm_problem(y, X.tolist())
self.svm = svm_train(problem, self.param)
self.y = y
def predict(self, X):
"""
Args:
X: The query image, which is an array.
Returns:
A list with the classifier output. In this framework it is
assumed, that the predicted class is always returned as first
element. Moreover, this class returns the libsvm output for
p_labels, p_acc and p_vals. The libsvm help states:
p_labels: a list of predicted labels
p_acc: a tuple including accuracy (for classification), mean-squared
error, and squared correlation coefficient (for regression).
p_vals: a list of decision values or probability estimates (if '-b 1'
is specified). If k is the number of classes, for decision values,
each element includes results of predicting k(k-1)/2 binary-class
SVMs. For probabilities, each element contains k values indicating
the probability that the testing instance is in each class.
Note that the order of classes here is the same as 'model.label'
field in the model structure.
"""
X = np.asarray(X).reshape(1, -1)
sys.stdout = StringIO()
p_lbl, p_acc, p_val = svm_predict([0], X.tolist(), self.svm)
sys.stdout = bkp_stdout
predicted_label = int(p_lbl[0])
return [predicted_label, {'p_lbl': p_lbl, 'p_acc': p_acc, 'p_val': p_val}]
def __repr__(self):
return "Support Vector Machine (kernel_type=%s, C=%.2f,gamma=%.2f,p=%.2f,nu=%.2f,coef=%.2f,degree=%.2f)" % (
KERNEL_TYPE[self.param.kernel_type], self.param.C, self.param.gamma, self.param.p, self.param.nu,
self.param.coef0, self.param.degree)
| bsd-3-clause | 3,765,346,731,640,892,000 | 39.5625 | 116 | 0.633832 | false |
debbiedub/bcdef | features/steps/application.py | 1 | 1080 | import logging
from multiprocessing import Process
from bc import BCMain
from fcp.CommunicationQueues import comm
def run_create_first_block(queues, *args):
global comm
comm.set(queues=queues)
try:
logging.getLogger().setLevel(logging.DEBUG)
# logging.getLogger().addHandler(comm.get_handler())
logging.info("Started logging")
bc = BCMain(*args)
bc.participants.round_timeout = 1
bc.create_first_block()
finally:
comm.empty_queues()
@when(u'the application is started to create the first block')
def step_impl(context):
global comm
context.bc_process = Process(target=run_create_first_block,
args=(comm, "Me",))
context.bc_process.start()
context.node_simulator.expect("hello")
context.node_simulator.respond(("olleh",))
context.node_simulator.expect_wot("Ping")
context.node_simulator.respond_wot({"Message":"Pong"})
context.node_simulator.expect_wot("GetOwnIdentities")
context.node_simulator.respond_wot({"Replies.Amount": "0"})
| gpl-3.0 | 6,960,105,033,797,665,000 | 29.857143 | 63 | 0.671296 | false |
google/capirca | capirca/lib/windows.py | 1 | 12745 | # Copyright 2016 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Generic Windows security policy generator; requires subclassing."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import datetime
import string
from absl import logging
from capirca.lib import aclgenerator
from capirca.lib import nacaddr
CMD_PREFIX = 'netsh ipsec static add '
class Term(aclgenerator.Term):
"""Generate generic windows policy terms."""
_PLATFORM = 'windows'
_COMMENT_FORMAT = string.Template(': $comment')
# filter rules
_ACTION_TABLE = {}
def __init__(self, term, filter_name, filter_action, af='inet'):
"""Setup a new term.
Args:
term: A policy.Term object to represent in windows_ipsec.
filter_name: The name of the filter chan to attach the term to.
filter_action: The default action of the filter.
af: Which address family ('inet' or 'inet6') to apply the term to.
Raises:
UnsupportedFilterError: Filter is not supported.
"""
super(Term, self).__init__(term)
self.term = term # term object
self.filter = filter_name # actual name of filter
self.default_action = filter_action
self.options = []
self.af = af
if af == 'inet6':
self._all_ips = nacaddr.IPv6('::/0')
else:
self._all_ips = nacaddr.IPv4('0.0.0.0/0')
self.term_name = '%s_%s' % (self.filter[:1], self.term.name)
def __str__(self):
# Verify platform specific terms. Skip whole term if platform does not
# match.
if self.term.platform:
if self._PLATFORM not in self.term.platform:
return ''
if self.term.platform_exclude:
if self._PLATFORM in self.term.platform_exclude:
return ''
ret_str = []
# Don't render icmpv6 protocol terms under inet, or icmp under inet6
if ((self.af == 'inet6' and 'icmp' in self.term.protocol) or
(self.af == 'inet' and 'icmpv6' in self.term.protocol)):
logging.debug(self.NO_AF_LOG_PROTO.substitute(term=self.term.name,
proto=self.term.protocol,
af=self.af))
return ''
# append comments to output
ret_str.append(self._COMMENT_FORMAT.substitute(filter=self.filter,
term=self.term_name,
comment=self.term.comment))
# if terms does not specify action, use filter default action
if not self.term.action:
self.term.action[0].value = self.default_action
if self.term.action[0] == 'next':
return ''
if len(self.term.action) > 1:
raise aclgenerator.UnsupportedFilterError('\n%s %s %s %s' % (
'Multiple actions unsupported by', self._PLATFORM,
'\nError in term:', self.term.name))
# protocol
if self.term.protocol:
protocols = self.term.protocol
else:
protocols = ['any']
# addresses
src_addr = self.term.source_address
if not src_addr:
src_addr = [self._all_ips]
dst_addr = self.term.destination_address
if not dst_addr:
dst_addr = [self._all_ips]
if (self.term.source_address_exclude or
self.term.destination_address_exclude):
raise aclgenerator.UnsupportedFilterError('\n%s %s %s %s' % (
'address exclusions unsupported by', self._PLATFORM,
'\nError in term:', self.term.name))
# ports = Map the ports in a straight list since multiports aren't supported
(src_ports, dst_ports) = self._HandlePorts(self.term.source_port,
self.term.destination_port)
# The windows ipsec driver requires either 'tcp' or 'udp' to be specified
# if a srcport or dstport is specified. Fail if src or dst ports are
# specified and of the protocols are not exactly one or both of 'tcp'
# or 'udp'.
if ((not set(protocols).issubset(set(['tcp', 'udp']))) and
(len(src_ports) > 1 or len(dst_ports) > 1)):
raise aclgenerator.UnsupportedFilterError('%s %s %s' % (
'\n', self.term.name,
'src or dst ports may only be specified with "tcp" and/or "udp".'))
# icmp-types
(icmp_types, protocols) = self._HandleIcmpTypes(self.term.icmp_type,
protocols)
ret_str = []
self._HandlePreRule(ret_str)
self._CartesianProduct(src_addr, dst_addr, protocols, icmp_types, src_ports,
dst_ports, ret_str)
self._HandlePreRule(ret_str)
return '\n'.join(str(v) for v in ret_str if v)
def _HandleIcmpTypes(self, icmp_types, protocols):
"""Perform implementation-specific icmp_type and protocol transforms.
Note that icmp_types or protocols are passed as parameters in case they
are to be munged prior to this function call, and may not be identical
to self.term.* parameters.
Args:
icmp_types: a list of icmp types, e.g., self.term.icmp_types
protocols: a list of protocols, e.g., self.term.protocols
Returns:
A pair of lists of (icmp_types, protocols)
"""
return None, None
def _HandlePorts(self, src_ports, dst_ports):
"""Perform implementation-specific port transforms.
Note that icmp_types or protocols are passed as parameters in case they
are to be munged prior to this function call, and may not be identical
to self.term.* parameters.
Args:
src_ports: list of source port range tuples, e.g., self.term.source_port
dst_ports: list of destination port range tuples
Returns:
A pair of lists of (icmp_types, protocols)
"""
return None, None
def _HandlePreRule(self, ret_str):
"""Perform any pre-cartesian product transforms on the ret_str array.
Args:
ret_str: an array of strings that will eventually be joined to form
the string output for the term.
"""
pass
def _CartesianProduct(self, src_addr, dst_addr, protocol, icmp_types,
src_ports, dst_ports, ret_str):
"""Perform any the appropriate cartesian product of the input parameters.
Args:
src_addr: a type(IP) list of the source addresses
dst_addr: a type(IP) list of the destination addresses
protocol: a string list of the protocols
icmp_types: a numeric list of the icmp_types
src_ports: a (start, end) list of the source ports
dst_ports: a (start,end) list of the destination ports
ret_str: an array of strings that will eventually be joined to form
the string output for the term.
"""
pass
def _HandlePostRule(self, ret_str):
"""Perform any port-cartesian product transforms on the ret_str array.
Args:
ret_str: an array of strings that will eventually be joined to form
the string output for the term.
"""
pass
class WindowsGenerator(aclgenerator.ACLGenerator):
"""Generates filters and terms from provided policy object."""
_PLATFORM = 'windows'
_DEFAULT_PROTOCOL = 'all'
SUFFIX = '.bat'
_RENDER_PREFIX = None
_DEFAULT_ACTION = 'block'
_TERM = Term
_GOOD_AFS = ['inet', 'inet6']
def _BuildTokens(self):
"""Build supported tokens for platform.
Returns:
tuple containing both supported tokens and sub tokens
"""
supported_tokens, supported_sub_tokens = super(
WindowsGenerator, self)._BuildTokens()
supported_tokens |= {'option'}
supported_tokens -= {'verbatim'}
supported_sub_tokens.update({'action': {'accept', 'deny'}})
del supported_sub_tokens['option']
return supported_tokens, supported_sub_tokens
def _TranslatePolicy(self, pol, exp_info):
"""Translate a policy from objects into strings."""
self.windows_policies = []
current_date = datetime.datetime.utcnow().date()
exp_info_date = current_date + datetime.timedelta(weeks=exp_info)
default_action = None
good_default_actions = ['permit', 'block']
good_options = []
for header, terms in pol.filters:
filter_type = None
if self._PLATFORM not in header.platforms:
continue
filter_options = header.FilterOptions(self._PLATFORM)[1:]
filter_name = header.FilterName(self._PLATFORM)
# ensure all options after the filter name are expected
for opt in filter_options:
if opt not in good_default_actions + self._GOOD_AFS + good_options:
raise aclgenerator.UnsupportedTargetOptionError('%s %s %s %s' % (
'\nUnsupported option found in', self._PLATFORM,
'target definition:', opt))
# Check for matching af
for address_family in self._GOOD_AFS:
if address_family in filter_options:
# should not specify more than one AF in options
if filter_type is not None:
raise aclgenerator.UnsupportedFilterError('%s %s %s %s' % (
'\nMay only specify one of', self._GOOD_AFS,
'in filter options:', filter_options))
filter_type = address_family
if filter_type is None:
filter_type = 'inet'
# does this policy override the default filter actions?
for next_target in header.target:
if next_target.platform == self._PLATFORM:
if len(next_target.options) > 1:
for arg in next_target.options:
if arg in good_default_actions:
default_action = arg
if default_action and default_action not in good_default_actions:
raise aclgenerator.UnsupportedTargetOptionError('%s %s %s %s %s' % (
'\nOnly', ', '.join(good_default_actions),
'default filter action allowed;', default_action, 'used.'))
# add the terms
new_terms = []
term_names = set()
for term in terms:
if term.name in term_names:
raise aclgenerator.DuplicateTermError(
'You have a duplicate term: %s' % term.name)
term_names.add(term.name)
if term.expiration:
if term.expiration <= exp_info_date:
logging.info('INFO: Term %s in policy %s expires '
'in less than two weeks.', term.name, filter_name)
if term.expiration <= current_date:
logging.warning('WARNING: Term %s in policy %s is expired and '
'will not be rendered.', term.name, filter_name)
continue
if 'established' in term.option or 'tcp-established' in term.option:
continue
new_terms.append(self._TERM(term, filter_name, default_action,
filter_type))
self.windows_policies.append((header, filter_name, filter_type,
default_action, new_terms))
def __str__(self):
target = []
pretty_platform = '%s%s' % (self._PLATFORM[0].upper(), self._PLATFORM[1:])
if self._RENDER_PREFIX:
target.append(self._RENDER_PREFIX)
for header, _, filter_type, default_action, terms in self.windows_policies:
# Add comments for this filter
target.append(': %s %s Policy' % (pretty_platform,
header.FilterName(self._PLATFORM)))
self._HandlePolicyHeader(header, target)
# reformat long text comments, if needed
comments = aclgenerator.WrapWords(header.comment, 70)
if comments and comments[0]:
for line in comments:
target.append(': %s' % line)
target.append(':')
# add the p4 tags
target.extend(aclgenerator.AddRepositoryTags(': '))
target.append(': ' + filter_type)
if default_action:
raise aclgenerator.UnsupportedTargetOptionError(
'Windows generator does not support default actions')
# add the terms
for term in terms:
term_str = str(term)
if term_str:
target.append(term_str)
self._HandleTermFooter(header, term, target)
target.append('')
return '\n'.join(target)
def _HandlePolicyHeader(self, header, target):
pass
def _HandleTermFooter(self, header, term, target):
pass
| apache-2.0 | 6,289,347,090,319,784,000 | 34.013736 | 80 | 0.626991 | false |
AlfredNeverKog/BrainCarya | src/my/kadenze/lesson3/mnist_autoencoder.py | 1 | 2610 | from mnist import MNIST
import numpy as np
import tensorflow as tf
from src.my.lib.utils import montage
import matplotlib.pyplot as plt
from PIL import Image
src = '../../../../data/mnist/'
output='./content/1/%s.jpg'
mndata = MNIST(src)
data = np.array(mndata.load_testing())
X = data[0]
Y = data[1]
items = 100
imgs = np.array([i for i in np.array(X[:items])]).reshape(items,28,28)
n_features = 784
n_input = n_features
Y = imgs.reshape(items,n_features).astype(float)
current_input = imgs.reshape(items,n_features).astype(float)
Ws = []
Bs = []
dimensions = [512,256,128,64]
for layer_i,n_ouputs in enumerate(dimensions):
with tf.variable_scope("encoder/variable/%s" % layer_i):
W = tf.get_variable(name="weight%s" % layer_i, dtype=tf.float64,
initializer=tf.contrib.layers.xavier_initializer(),
shape=[n_input, n_ouputs])
#B = tf.get_variable(name='bias%s' % layer_i, dtype=tf.float64,
# initializer=tf.random_normal_initializer(mean=0.0, stddev=1.1),
# shape=[n_ouputs])
#h = tf.nn.bias_add(value=tf.matmul(current_input, W),
# bias=B)
h = tf.matmul(current_input, W)
current_input = h
current_input = tf.nn.relu(current_input)
n_input = n_ouputs
Ws.append(W)
#Bs.append()
Ws = Ws[::-1]#reverse
Bs = Bs[::-1]#reverse
#dimensions = dimensions[::1][1:].append(n_features)
dimensions = dimensions[::-1][1:] +[n_features]
#Build DECODER
for layer_i,n_ouputs in enumerate(dimensions):
with tf.variable_scope("encoder/variable/%s" % layer_i):
##128x64 -> 64x128
h = value=tf.matmul(current_input,tf.transpose(Ws[layer_i]))
if layer_i + 1 < len(Bs):
h = tf.nn.bias_add(h,bias=Bs[layer_i + 1])
current_input = h
current_input = tf.nn.relu(current_input)
n_input = n_ouputs
loss_func = tf.reduce_mean(tf.squared_difference(current_input, Y), 1)
optimizer = tf.train.AdamOptimizer(learning_rate=0.00001)
train = optimizer.minimize(loss_func)
counter = 0
with tf.Session() as sess:
sess.run(tf.initialize_all_variables())
for i in range(50000):
sess.run(train)
if i % 15 == 0:
Image.fromarray(montage(sess.run(current_input).reshape(items,28,28)).astype(np.uint8)) \
.save(output % ("0"*(5 - len(str(counter))) + str(counter)))
print(sess.run(tf.reduce_mean(loss_func)))
counter += 1
| mit | 2,853,203,673,216,453,000 | 28 | 101 | 0.589272 | false |
icydoge/AdventOfCodeSolutions2 | day3.py | 1 | 1161 | ###################################
# Many lines #
# Such O(n) #
# Very Doge #
###################################
# By icydoge <[email protected]> #
###################################
with open("inputs/day3-1.txt") as f:
content = f.readlines()
# Part 1
lines = list(map(str.strip, content))
triangles = []
for line in lines:
split = line.split(' ')
triangles.append([int(i) for i in split if i != ''])
valid = 0
for triangle in triangles:
if (triangle[0] + triangle[1]) > triangle[2] and (triangle[1] + triangle[2]) > triangle[0] and (triangle[0] + triangle[2]) > triangle[1]:
valid += 1
print("Final answer for Part 1: %d" % (valid))
# Part 2
triangles2 = []
for i in range(0, len(triangles) - 2, 3):
for j in range(0, 3):
triangles2.append([triangles[i][j], triangles[i + 1][j], triangles[i + 2][j]])
valid = 0
for triangle in triangles2:
if (triangle[0] + triangle[1]) > triangle[2] and (triangle[1] + triangle[2]) > triangle[0] and (triangle[0] + triangle[2]) > triangle[1]:
valid += 1
print("Final answer for Part 2: %d" % (valid))
| mit | -4,814,290,426,159,412,000 | 30.378378 | 141 | 0.51938 | false |
shapiromatron/bmds-server | bmds_server/analysis/transforms.py | 1 | 3196 | from enum import Enum
from typing import Dict, List, Union
import bmds
from bmds.bmds3.sessions import get_model
from bmds.bmds3.types.continuous import ContinuousModelSettings
from bmds.bmds3.types.dichotomous import DichotomousModelSettings
from bmds.bmds3.types.priors import PriorClass, get_continuous_prior, get_dichotomous_prior
from bmds.constants import Dtype
from .validators.datasets import AdverseDirection
class PriorEnum(str, Enum):
frequentist_restricted = "frequentist_restricted"
frequentist_unrestricted = "frequentist_unrestricted"
bayesian = "bayesian"
# TODO - remove these maps; use contants from bmds
bmd3_prior_map = {
PriorEnum.frequentist_restricted: PriorClass.frequentist_restricted,
PriorEnum.frequentist_unrestricted: PriorClass.frequentist_unrestricted,
PriorEnum.bayesian: PriorClass.bayesian,
}
is_increasing_map = {
AdverseDirection.AUTOMATIC: None,
AdverseDirection.UP: True,
AdverseDirection.DOWN: False,
}
def build_model_settings(
bmds_version: str,
dataset_type: str,
model_name: str,
prior_class: str,
options: Dict,
dataset_options: Dict,
) -> Union[DichotomousModelSettings, ContinuousModelSettings]:
model = get_model(bmds_version, dataset_type, model_name)
prior_class = bmd3_prior_map[prior_class]
if dataset_type in bmds.constants.DICHOTOMOUS_DTYPES:
return DichotomousModelSettings(
bmr=options["bmr_value"],
alpha=1.0 - options["confidence_level"],
bmr_type=options["bmr_type"],
degree=dataset_options["degree"],
priors=get_dichotomous_prior(model.bmd_model_class, prior_class),
)
elif dataset_type in bmds.constants.CONTINUOUS_DTYPES:
return ContinuousModelSettings(
bmr=options["bmr_value"],
alpha=1.0 - options["confidence_level"],
tailProb=options["tail_probability"],
bmr_type=options["bmr_type"],
disttype=options["dist_type"],
degree=dataset_options["degree"],
is_increasing=is_increasing_map[dataset_options["adverse_direction"]],
priors=get_continuous_prior(model.bmd_model_class, prior_class),
)
else:
raise ValueError(f"Unknown dataset_type: {dataset_type}")
def build_dataset(dataset_type: str, dataset: Dict[str, List[float]]) -> bmds.datasets.DatasetType:
if dataset_type == Dtype.CONTINUOUS:
schema = bmds.datasets.ContinuousDatasetSchema
elif dataset_type == Dtype.CONTINUOUS_INDIVIDUAL:
schema = bmds.datasets.ContinuousIndividualDatasetSchema
elif dataset_type == Dtype.DICHOTOMOUS:
schema = bmds.datasets.DichotomousDatasetSchema
else:
raise ValueError(f"Unknown dataset type: {dataset_type}")
return schema.parse_obj(dataset).deserialize()
def remap_exponential(models: List[str]) -> List[str]:
# recursively expand user-specified "exponential" model into M3 and M5
if bmds.constants.M_Exponential in models:
pos = models.index(bmds.constants.M_Exponential)
models[pos : pos + 1] = (bmds.constants.M_ExponentialM3, bmds.constants.M_ExponentialM5)
return models
| mit | 4,329,694,989,769,762,300 | 37.506024 | 99 | 0.707447 | false |
EaterOA/fortunebot | tests/test_bot.py | 1 | 2234 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
import six
import mock
import irc
from fortunebot import bot
MODULE = 'fortunebot.botrunner'
EXAMPLE_CHANNEL = "#test"
EXAMPLE_NICK = "fortunebot"
EXAMPLE_MSG = "abcdefg"
EXAMPLE_MSG2 = "星空"
EXAMPLE_SCRIPT_RETURN = EXAMPLE_MSG2
class TestFortunebot(object):
def setup(self):
self.bot = bot.Fortunebot()
self.bot.connection = mock.create_autospec(irc.client.ServerConnection)
mock_script = mock.create_autospec(MockScript)
mock_script.on_pubmsg.return_value = EXAMPLE_SCRIPT_RETURN
self.bot.scripts = {"mock_script": mock_script}
def test_send_msg(self):
self.bot.send_msg(EXAMPLE_CHANNEL, EXAMPLE_MSG)
self.bot.connection.privmsg.assert_called_with(
EXAMPLE_CHANNEL,
to_unicode(EXAMPLE_MSG))
self.bot.send_msg(EXAMPLE_CHANNEL, EXAMPLE_MSG2)
self.bot.connection.privmsg.assert_called_with(
EXAMPLE_CHANNEL,
to_unicode(EXAMPLE_MSG2))
def test_send_msg_multiple(self):
messages = [EXAMPLE_MSG + str(i) for i in six.moves.xrange(10)]
self.bot.send_msg(EXAMPLE_CHANNEL, messages)
assert self.bot.connection.privmsg.call_count == len(messages)
def test_send_msg_illegal(self):
msg = "\r\n"
self.bot.send_msg(EXAMPLE_CHANNEL, msg)
self.bot.connection.privmsg.assert_called_with(
EXAMPLE_CHANNEL,
"")
msg = "\t\x7F"
self.bot.send_msg(EXAMPLE_CHANNEL, msg)
self.bot.connection.privmsg.assert_called_with(
EXAMPLE_CHANNEL,
"")
def test_on_pubmsg(self):
e = irc.client.Event("", EXAMPLE_NICK, EXAMPLE_CHANNEL, [EXAMPLE_MSG])
self.bot.on_pubmsg(self.bot.connection, e)
self.bot.scripts["mock_script"].on_pubmsg.assert_called_with(
EXAMPLE_NICK,
EXAMPLE_CHANNEL,
EXAMPLE_MSG)
self.bot.connection.privmsg.assert_called_with(
EXAMPLE_CHANNEL,
to_unicode(EXAMPLE_SCRIPT_RETURN))
class MockScript(object):
def on_pubmsg(self, source, channel, text):
pass
def to_unicode(s):
return s if six.PY3 else s.decode('utf-8')
| gpl-3.0 | -5,668,313,438,907,019,000 | 29.547945 | 79 | 0.631839 | false |
UbiCastTeam/candies | candies2/dropdown.py | 1 | 24647 | #!/usr/bin/env python
# -*- coding: utf-8 -*
import clutter
import common
from container import BaseContainer
from roundrect import RoundRectangle
from text import TextContainer
from box import VBox
from autoscroll import AutoScrollPanel
class OptionLine(BaseContainer):
__gtype_name__ = 'OptionLine'
"""
A option line for select input. Can be used alone to have a text with icon.
"""
INDENT_WIDTH = 24
def __init__(self, name, text, icon_height=32, icon_path=None, padding=8, spacing=8, enable_background=True, font='14', font_color='Black', color='LightGray', border_color='Gray', texture=None, rounded=True, crypted=False, indent_level=0):
BaseContainer.__init__(self)
self._padding = common.Padding(padding)
self._spacing = common.Spacing(spacing)
self.name = name
self._locked = False
self.font = font
self.font_color = font_color
self.default_color = color
self.default_border_color = border_color
self.rounded = rounded
self.indent_level = indent_level
# background
if rounded:
self.background = RoundRectangle(texture=texture)
self.background.set_color(self.default_color)
self.background.set_border_color(self.default_border_color)
self.background.set_border_width(3)
self.background.set_radius(10)
else:
self.background = clutter.Rectangle()
self.background.set_color(self.default_color)
if enable_background:
self.enable_background = True
else:
self.enable_background = False
self.background.hide()
self._add(self.background)
# icon
self.icon_height = icon_height
self.icon_path = icon_path
self._icon_allocate = True
self.icon = clutter.Texture()
if icon_path:
self.icon.set_from_file(icon_path)
else:
self.icon.hide()
self._add(self.icon)
# spacer (for indentation)
self.spacer = clutter.Rectangle()
self.spacer.set_width(indent_level * self.INDENT_WIDTH)
self.spacer.hide()
self._add(self.spacer)
# label
self.label = TextContainer(unicode(text), padding=0, rounded=False, crypted=crypted)
self.label.set_font_color(self.font_color)
self.label.set_font_name(self.font)
self.label.set_inner_color('#00000000')
self.label.set_border_color('#00000000')
self.label.set_line_wrap(False) # to center text vertically
self._add(self.label)
def get_text(self):
return self.label.get_text()
def set_lock(self, lock):
self.set_reactive(not lock)
self.set_opacity(128 if lock else 255)
self._locked = lock
def get_lock(self):
return self._locked
def set_texture(self, texture):
if self.rounded:
self.background.set_texture(texture)
def set_line_wrap(self, boolean):
self.label.set_line_wrap(boolean)
def set_line_alignment(self, alignment):
self.label.set_line_alignment(alignment)
def set_justify(self, boolean):
self.label.set_justify(boolean)
def set_text(self, text):
self.label.set_text(str(text))
def set_name(self, text):
self.name = text
def set_hname(self, text):
self.label.set_text(str(text))
def has_icon(self):
return self.icon_path is not None
def set_icon(self, new_icon_path=None):
self.icon_path = new_icon_path
if new_icon_path:
self.icon.set_from_file(new_icon_path)
self.icon.show()
else:
self.icon.hide()
def set_font_color(self, color):
self.label.set_font_color(color)
def set_font_name(self, font_name):
self.label.set_font_name(font_name)
def set_inner_color(self, color):
self.background.set_color(color)
def set_border_color(self, color):
self.background.set_border_color(color)
def set_radius(self, radius):
if self.rounded:
self.background.set_radius(radius)
def set_border_width(self, width):
self.background.set_border_width(width)
def set_icon_opacity(self, opacity):
self.icon.set_opacity(opacity)
def set_icon_allocate(self, boolean):
if boolean and not self._icon_allocate:
self._icon_allocate = True
if self.has_icon():
self.icon.show()
self.queue_relayout()
elif not boolean and self._icon_allocate:
self._icon_allocate = False
self.icon.hide()
self.queue_relayout()
def show_background(self):
if self.enable_background != True:
self.enable_background = True
self.background.show()
def hide_background(self):
if self.enable_background != False:
self.enable_background = False
self.background.hide()
def do_get_preferred_width(self, for_height):
if for_height != -1:
for_height -= 2 * self._padding.y
preferred_width = self.icon_height + 2 * self._padding.x + self._spacing.x
preferred_width += self.spacer.get_preferred_width(for_height)[1]
preferred_width += self.label.get_preferred_width(for_height)[1]
return preferred_width, preferred_width
def do_get_preferred_height(self, for_width):
preferred_height = 0
if for_width != -1:
w = for_width - self.icon_height - 2 * self._padding.x - self._spacing.x
preferred_height = self.label.get_preferred_height(w)[1]
preferred_height = max(preferred_height, self.icon_height) + 2 * self._padding.y
return preferred_height, preferred_height
def do_allocate(self, box, flags):
main_width = box.x2 - box.x1
main_height = box.y2 - box.y1
# background
background_box = clutter.ActorBox()
background_box.x1 = 0
background_box.y1 = 0
background_box.x2 = main_width
background_box.y2 = main_height
self.background.allocate(background_box, flags)
if self._icon_allocate:
# icon
icon_height = min(self.icon_height, main_height)
icon_y_padding = int(float(main_height - icon_height) / 2.)
icon_box = clutter.ActorBox()
icon_box.x1 = self._padding.x
icon_box.y1 = icon_y_padding
icon_box.x2 = self._padding.x + icon_height
icon_box.y2 = icon_box.y1 + icon_height
self.icon.allocate(icon_box, flags)
# spacer
spacer_width = self.indent_level * self.INDENT_WIDTH
spacer_box = clutter.ActorBox()
spacer_box.x1 = icon_box.x2 + self._spacing.x
spacer_box.y1 = self._padding.y
spacer_box.x2 = spacer_box.x1 + spacer_width
spacer_box.y2 = main_height - self._padding.y
self.spacer.allocate(spacer_box, flags)
else:
# icon
icon_box = clutter.ActorBox(0, 0, 0, 0)
self.icon.allocate(icon_box, flags)
# spacer
spacer_width = self.indent_level * self.INDENT_WIDTH
spacer_box = clutter.ActorBox()
spacer_box.x1 = self._spacing.x
spacer_box.y1 = self._padding.y
spacer_box.x2 = spacer_box.x1 + spacer_width
spacer_box.y2 = main_height - self._padding.y
self.spacer.allocate(spacer_box, flags)
# label
label_box = clutter.ActorBox()
label_box.x1 = spacer_box.x2
label_box.y1 = self._padding.y
label_box.x2 = main_width - self._padding.x
label_box.y2 = main_height - self._padding.y
self.label.allocate(label_box, flags)
clutter.Actor.do_allocate(self, box, flags)
def do_pick(self, color):
clutter.Actor.do_pick(self, color)
class Select(clutter.Actor, clutter.Container):
__gtype_name__ = 'Select'
"""
A select input.
"""
def __init__(self, padding=8, spacing=8, on_change_callback=None, icon_height=48, open_icon_path=None, font='14', font_color='Black', selected_font_color='Blue', color='LightGray', border_color='Gray', option_color='LightBlue', texture=None, user_data=None, direction="down", y_offsets=None, alignment="center"):
clutter.Actor.__init__(self)
self._padding = common.Padding(padding)
self._spacing = common.Spacing(spacing)
self.stage_padding = 10
self.on_change_callback = on_change_callback
self.user_data = user_data
self.direction = direction
if y_offsets:
if isinstance(y_offsets, (list, tuple)):
if len(y_offsets) > 1:
self.y_offsets = tuple(y_offsets[:2])
else:
self.y_offsets = (y_offsets[0], y_offsets[0])
else:
self.y_offsets = (y_offsets, y_offsets)
else:
self.y_offsets = (0, 0)
self.alignment = alignment
self.icon_height = icon_height
self._stage_width, self._stage_height = 0, 0
self._opened = False
self._selected = None
self._locked = False
self.open_icon = open_icon_path
self._background_box = None
self._has_icons = False
self.font = font
self.font_color = font_color
self.selected_font_color = selected_font_color
self.default_color = color
self.default_border_color = border_color
self.option_color = option_color
self.texture = texture
# hidder is to catch click event on all stage when the select input is opened
self._hidder = clutter.Rectangle()
self._hidder.set_color('#00000000')
self._hidder.connect('button-release-event', self._on_hidder_click)
self._hidder.set_reactive(True)
self._hidder.set_parent(self)
# background
self._background = RoundRectangle()
self._background.set_color(self.default_color)
self._background.set_border_color(self.default_border_color)
self._background.set_border_width(3)
self._background.set_radius(10)
self._background.set_parent(self)
# list of options displayed when the select input is opened
self._list = VBox(padding=0, spacing=0)
# auto scroll panel
self._auto_scroll = AutoScrollPanel(self._list)
self._auto_scroll.hide()
self._auto_scroll.set_parent(self)
# selected option is displayed when the select input is closed
self._selected_option = OptionLine('empty', '', padding=(self._padding.x, self._padding.y), spacing=self._spacing.x, icon_path=self.open_icon, icon_height=self.icon_height, enable_background=True, font=self.font, font_color=self.font_color, color=self.option_color, border_color='#00000000', texture=self.texture)
self._selected_option.set_reactive(True)
self._selected_option.connect('button-release-event', self._on_selected_click)
self._selected_option.set_parent(self)
self._set_lock(True)
def get_lock(self):
return self._locked
def _set_lock(self, status):
if status:
self._selected_option.set_reactive(False)
self._selected_option.icon.hide()
else:
self._selected_option.set_reactive(True)
self._selected_option.icon.show()
self.set_opacity(127 if status else 255)
def set_lock(self, status):
self._set_lock(status)
self._locked = status
def get_stage(self):
obj = self
if obj.get_parent():
has_parent = True
obj = obj.get_parent()
while has_parent:
if obj.get_parent():
has_parent = True
obj = obj.get_parent()
else:
has_parent = False
if isinstance(obj, clutter.Stage):
return obj
else:
return None
def get_selected(self):
return self._selected
def add_option(self, name, hname, icon_path=None, index=None, indent_level=0):
new_option = OptionLine(name, hname, padding=(self._padding.x, self._padding.y), spacing=self._spacing.x, icon_path=icon_path, icon_height=self.icon_height, enable_background=False, font=self.font, font_color=self.font_color, color=self.option_color, border_color='#00000000', texture=self.texture, indent_level=indent_level)
new_option.set_line_alignment(self.alignment)
if icon_path is not None and not self._has_icons:
self._has_icons = True
for element in self._list.get_elements():
element['object'].set_icon_allocate(True)
new_option.set_icon_allocate(self._has_icons)
new_option.set_reactive(True)
new_option.connect('button-release-event', self._on_click)
self._list.add_element(new_option, 'option_%s' % name, expand=True, index=index)
self.check_scrollbar()
if self._selected is None:
self._selected = new_option
self._selected.set_font_color(self.selected_font_color)
self._selected.show_background()
self._selected_option.set_name(name)
self._selected_option.set_text(str(hname))
if not self._locked:
self._set_lock(False)
def remove_option(self, name):
if len(self._list.get_elements()) == 1:
self.remove_all_options()
else:
option = self._list.remove_element('option_%s' % name)
if self._selected == option:
try:
self.select_option(self.get_option(0)[0])
except TypeError:
self._selected = None
self._selected_option.set_name('empty')
self._selected_option.set_text('')
self._has_icons = False
for element in self._list.get_elements():
if element['object'].has_icon:
self._has_icons = True
break
for element in self._list.get_elements():
element['object'].set_icon_allocate(self._has_icons)
self.check_scrollbar()
def remove_all_options(self):
self._list.remove_all_elements()
self._has_icons = False
self.check_scrollbar()
self._selected = None
self._selected_option.set_name('empty')
self._selected_option.set_text('')
self._set_lock(True)
def has_option(self, name):
for element in self._list.get_elements():
if element['name'] == "option_%s" % name:
return True
return False
def get_options(self):
return [(e["object"].name, e["object"].get_text()) for e in self._list.get_elements()]
def get_option(self, index):
try:
option = self._list.get_elements()[index]["object"]
return (option.name, option.get_text())
except IndexError:
return None
def get_option_obj(self, index):
try:
option = self._list.get_elements()[index]["object"]
return option
except IndexError:
return None
def set_option_text(self, index, text):
option = self.get_option_obj(index)
if option:
option.set_text(str(text))
if option == self._selected:
self._selected_option.set_text(str(text))
def __len__(self):
return len(self.get_options())
def __nonzero__(self):
return True
def is_empty(self):
return len(self) == 0
def check_scrollbar(self):
self._auto_scroll.check_scrollbar()
def _on_click(self, source, event):
if self._opened:
if source == self._selected:
self.close_options()
else:
self._select_option(source, silent=False)
self.close_options()
def _on_selected_click(self, source, event):
self.open_options()
def _on_hidder_click(self, source, event):
self.close_options()
def open_options(self):
if not self._opened:
self._opened = True
stage = self.get_stage()
if stage:
self._stage_width, self._stage_height = stage.get_size()
else:
self._stage_width, self._stage_height = 0, 0
self._selected_option.hide()
self._auto_scroll.show()
self.queue_relayout()
def close_options(self):
if self._opened:
self._opened = False
self._auto_scroll.hide()
self._selected_option.show()
self._auto_scroll.go_to_top()
self.queue_relayout()
def select_option(self, name, silent=True, force=False):
element = self._list.get_by_name('option_%s' % name)
if element is not None:
option = element['object']
self._select_option(option, silent=silent, force=force)
self.queue_relayout()
def _select_option(self, option, silent=True, force=False):
if option != self._selected or force:
if self._selected is not None:
self._selected.hide_background()
self._selected.set_font_color(self.font_color)
self._selected = option
self._selected.set_font_color(self.selected_font_color)
self._selected.show_background()
self._selected_option.set_name(option.name)
self._selected_option.set_text(option.get_text())
if self.on_change_callback is not None and (not silent or force):
if self.user_data is not None:
self.on_change_callback(self._selected, self.user_data)
else:
self.on_change_callback(self._selected)
def set_bar_image_path(self, path):
self._auto_scroll.set_bar_image_path(path)
def set_scroller_image_path(self, path):
self._auto_scroll.set_scroller_image_path(path)
def do_get_preferred_width(self, for_height):
preferred = max(self._selected_option.get_preferred_width(for_height)[1], self._list.get_preferred_width(for_height)[1])
return preferred, preferred
def do_get_preferred_height(self, for_width):
preferred = self._selected_option.get_preferred_height(for_width)[1]
return preferred, preferred
def do_allocate(self, box, flags):
main_width = box.x2 - box.x1
main_height = box.y2 - box.y1
if self._opened:
option_box = clutter.ActorBox(0, 0, main_width, main_height)
self._selected_option.allocate(option_box, flags)
box_x, box_y = self.get_transformed_position()
box_x = int(box_x)
box_y = int(box_y)
if self._stage_height > 0 and self._stage_width > 0:
hidder_box = clutter.ActorBox(-box_x, -box_y, self._stage_width - box_x, self._stage_height - box_y)
else:
hidder_box = clutter.ActorBox(self._padding.x, self._padding.y, self._padding.x, self._padding.y)
self._hidder.allocate(hidder_box, flags)
option_height = self.icon_height + 2 * self._padding.y
total_height = option_height * len(self._list.get_elements())
base_y = 0
if self._stage_height > 0:
if total_height > self._stage_height - 2 * self.stage_padding - self.y_offsets[0] - self.y_offsets[1]:
total_height = self._stage_height - 2 * self.stage_padding - self.y_offsets[0] - self.y_offsets[1]
base_y = -box_y + self.stage_padding + self.y_offsets[0]
if self.direction == "up":
base_y += total_height - main_height
# TODO enable scrollbar
elif self.direction == "up":
if total_height > box_y + main_height - self.y_offsets[0]:
base_y = -box_y + total_height - main_height + self.y_offsets[0] + self.stage_padding
elif box_y + total_height > self._stage_height - self.stage_padding - self.y_offsets[1]:
base_y = -box_y + self._stage_height - self.stage_padding - self.y_offsets[1] - total_height
x1 = 0
x2 = main_width
if self.direction == "up":
y1 = base_y - total_height + main_height
y2 = base_y + main_height
else: # down, default
y1 = base_y
y2 = base_y + total_height
self._background_box = clutter.ActorBox(x1, y1, x2, y2)
self._background.allocate(self._background_box, flags)
list_box = clutter.ActorBox(x1, y1, x2, y2)
self._auto_scroll.allocate(list_box, flags)
else:
hidder_box = clutter.ActorBox(self._padding.x, self._padding.y, self._padding.x, self._padding.y)
self._hidder.allocate(hidder_box, flags)
self._background_box = clutter.ActorBox(0, 0, main_width, main_height)
self._background.allocate(self._background_box, flags)
option_box = clutter.ActorBox(0, 0, main_width, main_height)
self._selected_option.allocate(option_box, flags)
list_box = clutter.ActorBox(0, 0, main_width, main_height)
self._auto_scroll.allocate(list_box, flags)
clutter.Actor.do_allocate(self, box, flags)
def do_foreach(self, func, data=None):
func(self._hidder, data)
func(self._background, data)
func(self._selected_option, data)
func(self._auto_scroll, data)
def do_paint(self):
self._hidder.paint()
self._background.paint()
self._selected_option.paint()
# Clip auto scroll panel
if self._background_box is not None:
# Draw a rectangle to cut scroller
clutter.cogl.path_round_rectangle(
self._background_box.x1 + 3,
self._background_box.y1 + 3,
self._background_box.x2 - 3,
self._background_box.y2 - 3,
7,
1
)
clutter.cogl.path_close()
# Start the clip
clutter.cogl.clip_push_from_path()
self._auto_scroll.paint()
# Finish the clip
clutter.cogl.clip_pop()
else:
self._auto_scroll.paint()
def do_pick(self, color):
self.do_paint()
def do_destroy(self):
self.unparent()
if hasattr(self, '_hidder'):
if self._hidder:
self._hidder.unparent()
self._hidder.destroy()
if hasattr(self, '_background'):
if self._background:
self._background.unparent()
self._background.destroy()
if hasattr(self, '_selected_option'):
if self._selected_option:
self._selected_option.unparent()
self._selected_option.destroy()
if hasattr(self, '_auto_scroll'):
if self._auto_scroll:
self._auto_scroll.unparent()
self._auto_scroll.destroy()
if __name__ == '__main__':
stage_width = 640
stage_height = 480
stage = clutter.Stage()
stage.set_size(stage_width, stage_height)
stage.connect('destroy', clutter.main_quit)
test_line = OptionLine('test', 'displayed fezfzefezfzef', icon_height=32, padding=8)
test_line.label.set_font_name('22')
test_line.set_position(0, 0)
stage.add(test_line)
# test_select = Select(open_icon_path='/data/www/sdiemer/top.png')
test_select = Select()
test_select.set_position(0, 80)
icon_path = None
# icon_path = 'test.jpg'
test_select.add_option('test1', 'displayed', icon_path=icon_path)
test_select.add_option('test2', 'displayed regregreg', icon_path=icon_path)
test_select.add_option('test3', 'displayed fezfzefezfzef', icon_path=icon_path)
# test_select.set_size(400, 64)
stage.add(test_select)
"""def on_click(btn, event):
print 'click -----------'
test_select.open_options()
print 'selected : ', test_select.selected
test_select.selected.set_reactive(True)
test_select.selected.connect('button-press-event', on_click)"""
stage.show()
clutter.main()
| lgpl-3.0 | 2,092,594,610,929,779,700 | 36.686544 | 333 | 0.581856 | false |
V155/qutebrowser | qutebrowser/components/zoomcommands.py | 1 | 3262 | # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2018 Florian Bruhin (The Compiler) <[email protected]>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Zooming-related commands."""
from qutebrowser.api import cmdutils, apitypes, message, config
@cmdutils.register()
@cmdutils.argument('tab', value=cmdutils.Value.cur_tab)
@cmdutils.argument('count', value=cmdutils.Value.count)
def zoom_in(tab: apitypes.Tab, count: int = 1, quiet: bool = False) -> None:
"""Increase the zoom level for the current tab.
Args:
count: How many steps to zoom in.
quiet: Don't show a zoom level message.
"""
try:
perc = tab.zoom.apply_offset(count)
except ValueError as e:
raise cmdutils.CommandError(e)
if not quiet:
message.info("Zoom level: {}%".format(int(perc)), replace=True)
@cmdutils.register()
@cmdutils.argument('tab', value=cmdutils.Value.cur_tab)
@cmdutils.argument('count', value=cmdutils.Value.count)
def zoom_out(tab: apitypes.Tab, count: int = 1, quiet: bool = False) -> None:
"""Decrease the zoom level for the current tab.
Args:
count: How many steps to zoom out.
quiet: Don't show a zoom level message.
"""
try:
perc = tab.zoom.apply_offset(-count)
except ValueError as e:
raise cmdutils.CommandError(e)
if not quiet:
message.info("Zoom level: {}%".format(int(perc)), replace=True)
@cmdutils.register()
@cmdutils.argument('tab', value=cmdutils.Value.cur_tab)
@cmdutils.argument('count', value=cmdutils.Value.count)
def zoom(tab: apitypes.Tab,
level: str = None,
count: int = None,
quiet: bool = False) -> None:
"""Set the zoom level for the current tab.
The zoom can be given as argument or as [count]. If neither is
given, the zoom is set to the default zoom. If both are given,
use [count].
Args:
level: The zoom percentage to set.
count: The zoom percentage to set.
quiet: Don't show a zoom level message.
"""
if count is not None:
int_level = count
elif level is not None:
try:
int_level = int(level.rstrip('%'))
except ValueError:
raise cmdutils.CommandError("zoom: Invalid int value {}"
.format(level))
else:
int_level = int(config.val.zoom.default)
try:
tab.zoom.set_factor(int_level / 100)
except ValueError:
raise cmdutils.CommandError("Can't zoom {}%!".format(int_level))
if not quiet:
message.info("Zoom level: {}%".format(int_level), replace=True)
| gpl-3.0 | -1,385,101,638,705,507,600 | 33.336842 | 77 | 0.658492 | false |
bocajspear1/vulnfeed | vulnfeed/sender.py | 1 | 6613 | # This is the part of the code that sends the emails
import os
import threading
from datetime import datetime, timedelta, date
import calendar
import re
import time
from util.email_sender import send_email
from database.user import get_users, User
from database.feed import get_feed_reports
from database.rules import fill_rules
from scorer.parser import VulnFeedRuleParser
from config import Config
CONFIG = Config()
# Sender master breaks users off into groups of 50 to be proccessed on different threads
class SenderMaster():
def __init__(self):
self.threads = []
def start_senders(self):
offset = 0
length = 50
user_chunk = get_users(offset, length)
while len(user_chunk) > 0:
worker_thread = SenderWorker(user_chunk)
worker_thread.start()
self.threads.append(worker_thread)
offset += length
user_chunk = get_users(offset, length)
for thread in self.threads:
thread.join()
# Works on a chunk of users
class SenderWorker(threading.Thread):
def __init__(self, user_chunk):
threading.Thread.__init__(self)
self.user_chunk = user_chunk
def check_report(self, report_map, report, rules):
for rule_item in rules:
parser = VulnFeedRuleParser()
parser.parse_rule(rule_item['rule'])
title_score, _ = parser.process_text(report['title'], report['title_freq'])
print(title_score)
contents_score, words = parser.process_text(report['contents'], report['contents_freq'])
print(words)
print("Score: ", contents_score)
small_report = {
"title": report['raw_title'],
"contents": report['raw_contents'],
"link": report['link'],
"id": report['report_id']
}
if not report['id'] in report_map:
report_map[report['id']] = {
"report": small_report,
"score": 0
}
base_score = contents_score + (title_score * 2)
if rule_item['weight'] == 'high':
base_score *= 2
elif rule_item['weight'] == 'medium':
base_score += (base_score * 0.5)
report_map[report['id']]['score'] += base_score
if contents_score > 0:
for word in words:
# Check if contains HTML
if "<" in report_map[report['id']]['report']['contents']:
boldify = re.compile('([>][^<]+)(' + word + ')', re.IGNORECASE)
report_map[report['id']]['report']['contents'] = boldify.sub(r"\1<strong>\2</strong>",
report_map[report['id']]['report']['contents'])
else:
boldify = re.compile('(' + word + ')', re.IGNORECASE)
report_map[report['id']]['report']['contents'] = boldify.sub(r"<strong>\1</strong>",
report_map[report['id']]['report']['contents'])
def process_user(self, user_email):
# Get object
u = User(user_email)
if u.is_confirmed() == False:
print("Ignoring " + user_email)
return
days_to_run = u.get_days()
# Last run is day of year
last_day = u.last_run
# Get the current day
current_time = datetime.combine(date.today(), datetime.min.time())
current_day = int(current_time.strftime("%w")) + 1
current_day_of_year = int(current_time.strftime("%j"))
# Check if today is a day set by the user
if current_day not in days_to_run:
return
# Check if same day
if current_day_of_year == last_day:
return
day_diff = 2
if last_day > 0:
# If the last day is greater than the current day
# we have had a new year!
if last_day > current_day_of_year:
leap_day = 0
if calendar.isleap(current_time.year - 1):
leap_day = 1
day_diff = (current_day_of_year + 365 + leap_day) - last_day
else:
day_diff = current_day_of_year - last_day
# Get reports between the time requested plus some buffer time
query_time = current_time - timedelta(hours=(day_diff*24)+2)
reports = get_feed_reports(query_time)
# Get rule data
rules = u.get_rules()
filled_rules = fill_rules(rules)
# Score the reports
report_map = {}
for report in reports:
self.check_report(report_map, report, filled_rules)
# Sort the reports
sorted_reports = sorted(report_map, key=lambda item: report_map[item]['score'], reverse=True)
# Seperate reports into scored and unscored
scored_reports = []
unscored_reports = []
# Clear the last report info
u.last_scored_list = []
u.last_unscored_list = []
for item in sorted_reports:
if report_map[item]['score'] > 0:
scored_reports.append(report_map[item]['report'])
u.last_scored_list.append(report_map[item])
else:
unscored_reports.append(report_map[item]['report'])
u.last_unscored_list.append(report_map[item])
# for item in sorted_reports:
# print(report_map[item]['score'])
# print(report_map[item]['report']['contents'])
report_count = len(sorted_reports)
# Prepare to render the email template
render_map = {
"vulncount": report_count,
"scored_reports": scored_reports,
"unscored_reports": unscored_reports
}
print(scored_reports)
print("Sending for " + user_email)
response = send_email("reports_email.html", "VulnFeed Report for " + time.strftime("%m/%d/%Y"), render_map, user_email)
# Update the users last sent day
u.last_run = current_day_of_year
u.last_status = "Status: " + str(response.status_code) + ", " + response.status_text.decode("utf-8")
u.last_status = "Okay"
u.update()
# Process each user
def run(self):
for user_email in self.user_chunk:
self.process_user(user_email)
sm = SenderMaster()
sm.start_senders() | gpl-3.0 | 5,944,331,318,391,569,000 | 32.573604 | 127 | 0.536217 | false |
philippbosch/django-tellafriend | docs/source/conf.py | 1 | 7092 | # -*- coding: utf-8 -*-
#
# django-tellafriend documentation build configuration file, created by
# sphinx-quickstart on Fri Aug 6 20:14:06 2010.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.ifconfig']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'django-tellafriend'
copyright = u'2010, Philipp Bosch'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.0.1'
# The full version, including alpha/beta/rc tags.
release = '0.0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'django-tellafrienddoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'django-tellafriend.tex', u'django-tellafriend Documentation',
u'Philipp Bosch', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'django-tellafriend', u'django-tellafriend Documentation',
[u'Philipp Bosch'], 1)
]
| mit | 5,453,983,071,573,978,000 | 31.833333 | 80 | 0.710378 | false |
AndyDiamondstein/vitess | py/vttest/run_local_database.py | 1 | 5488 | #!/usr/bin/env python
"""Command-line tool for starting a local Vitess database for testing.
USAGE:
$ run_local_database --port 12345 \
--topology test_keyspace/-80:test_keyspace_0,test_keyspace/80-:test_keyspace_1 \
--schema_dir /path/to/schema/dir
It will run the tool, logging to stderr. On stdout, a small json structure
can be waited on and then parsed by the caller to figure out how to reach
the vtgate process.
Once done with the test, send an empty line to this process for it to clean-up,
and then just wait for it to exit.
"""
import json
import logging
import optparse
import os
import re
import sys
from vttest import environment
from vttest import local_database
from vttest import mysql_flavor
from vttest import vt_processes
from vttest import init_data_options
shard_exp = re.compile(r'(.+)/(.+):(.+)')
def main(cmdline_options):
shards = []
for shard in cmdline_options.topology.split(','):
m = shard_exp.match(shard)
if m:
shards.append(
vt_processes.ShardInfo(m.group(1), m.group(2), m.group(3)))
else:
sys.stderr.write('invalid --shard flag format: %s\n' % shard)
sys.exit(1)
environment.base_port = cmdline_options.port
init_data_opts = None
if cmdline_options.initialize_with_random_data:
init_data_opts = init_data_options.InitDataOptions()
init_data_opts.rng_seed = cmdline_options.rng_seed
init_data_opts.min_table_shard_size = cmdline_options.min_table_shard_size
init_data_opts.max_table_shard_size = cmdline_options.max_table_shard_size
init_data_opts.null_probability = cmdline_options.null_probability
with local_database.LocalDatabase(
shards,
cmdline_options.schema_dir,
cmdline_options.vschema,
cmdline_options.mysql_only,
init_data_opts,
web_dir=cmdline_options.web_dir) as local_db:
print json.dumps(local_db.config())
sys.stdout.flush()
try:
raw_input()
except EOFError:
sys.stderr.write(
'WARNING: %s: No empty line was received on stdin.'
' Instead, stdin was closed and the cluster will be shut down now.'
' Make sure to send the empty line instead to proactively shutdown'
' the local cluster. For example, did you forget the shutdown in'
' your test\'s tearDown()?\n' % os.path.basename(__file__))
if __name__ == '__main__':
parser = optparse.OptionParser()
parser.add_option(
'-p', '--port', type='int',
help='Port to use for vtcombo. If this is 0, a random port '
'will be chosen.')
parser.add_option(
'-t', '--topology',
help='Define which shards exist in the test topology in the'
' form <keyspace>/<shardrange>:<dbname>,... The dbname'
' must be unique among all shards, since they share'
' a MySQL instance in the test environment.')
parser.add_option(
'-s', '--schema_dir',
help='Directory for initial schema files. Within this dir,'
' there should be a subdir for each keyspace. Within'
' each keyspace dir, each file is executed as SQL'
' after the database is created on each shard.'
' If the directory contains a vschema.json file, it'
' will be used as the vschema for the V3 API.')
parser.add_option(
'-e', '--vschema',
help='If this file is specified, it will be used'
' as the vschema for the V3 API.')
parser.add_option(
'-m', '--mysql_only', action='store_true',
help='If this flag is set only mysql is initialized.'
' The rest of the vitess components are not started.'
' Also, the output specifies the mysql unix socket'
' instead of the vtgate port.')
parser.add_option(
'-r', '--initialize_with_random_data', action='store_true',
help='If this flag is each table-shard will be initialized'
' with random data. See also the "rng_seed" and "min_shard_size"'
' and "max_shard_size" flags.')
parser.add_option(
'-d', '--rng_seed', type='int', default=123,
help='The random number generator seed to use when initializing'
' with random data (see also --initialize_with_random_data).'
' Multiple runs with the same seed will result with the same'
' initial data.')
parser.add_option(
'-x', '--min_table_shard_size', type='int', default=1000,
help='The minimum number of initial rows in a table shard. Ignored if'
'--initialize_with_random_data is false. The actual number is chosen'
' randomly.')
parser.add_option(
'-y', '--max_table_shard_size', type='int', default=10000,
help='The maximum number of initial rows in a table shard. Ignored if'
'--initialize_with_random_data is false. The actual number is chosen'
' randomly')
parser.add_option(
'-n', '--null_probability', type='float', default=0.1,
help='The probability to initialize a field with "NULL" '
' if --initialize_with_random_data is true. Only applies to fields'
' that can contain NULL values.')
parser.add_option(
'-w', '--web_dir',
help='location of the vtctld web server files.')
parser.add_option(
'-v', '--verbose', action='store_true',
help='Display extra error messages.')
(options, args) = parser.parse_args()
if options.verbose:
logging.getLogger().setLevel(logging.DEBUG)
# This will set the flavor based on the MYSQL_FLAVOR env var,
# or default to MariaDB.
mysql_flavor.set_mysql_flavor(None)
main(options)
| bsd-3-clause | 1,329,981,277,944,915,700 | 36.081081 | 84 | 0.662719 | false |
flyingbanana1024102/transmission-line-simulator | src/views/contextmenu.py | 1 | 3701 | #
# Transmission Line Simulator
#
# Author(s): Jiacong Xu
# Created: Jul-10-2017
#
from materialwidget import MaterialWidget
from materialbutton import MaterialButton
from kivy.properties import *
from kivy.lang.builder import Builder
from util.constants import *
from kivy.animation import Animation
from kivy.clock import Clock
class ContextMenu(MaterialWidget):
"""
A contextual menu that displays text and icons.
"""
_container = ObjectProperty(None)
def __init__(self, titles, actions, icons = None, **kwargs):
"""
Initializes this menu. Does not yet display it.
titles: list of strings for each item in the menu.
actions: list of callbacks that that takes no arguments.
icons: list of unicode strings for icons of each item. Default
None. Eg. [unichr(0xf26b)]
"""
super(ContextMenu, self).__init__(**kwargs)
# Generate buttons according to title and icon
for i in range(len(titles)):
btn = MaterialButton()
btn.changeStyle('flat')
btn.title = titles[i]
if icons != None:
btn.icon = icons[i]
else:
btn.icon = ''
btn.onClick.append(actions[i])
btn.onClick.append(lambda: self.dismiss(True))
btn.size_hint_y = None
btn.height = 60
btn.titleLabel.color = TEXT_BLACK
self._container.add_widget(btn)
self._anim = None
Clock.schedule_once(self._completeLayout, 0)
def _completeLayout(self, dt):
w = 0
for child in self._container.children:
w = max(w, child.width)
for child in self._container.children:
child.width = w
def show(self, layer, pos, animated):
# Determine orientation
self.orientation = 'upright'
h = len(self._container.children) * 60
if pos[1] + h > layer.height:
self.orientation = 'downright'
layer.add_widget(self)
self.pos = pos
self._cachedPos = pos
if animated:
self.size = 0, 0
self.opacity = 0.0
self._animate(True)
else:
self.size = self._container.minimum_size
self.opacity = 1.0
if self.orientation == 'downright':
self.y = pos[1] - h
def dismiss(self, animated):
if not animated:
self.parent.remove_widget(self)
self.size = 0, 0
self.opacity = 0.0
else:
self._animate(False)
self._anim.on_complete = self._animComplete
def _animComplete(self, x):
if self.parent != None:
self.parent.remove_widget(self)
def _animate(self, isEntering):
if self._anim != None:
self._anim.cancel(self)
if isEntering:
if self.orientation == 'downright':
h = self._cachedPos[1] - self._container.minimum_height
self._anim = Animation(size = self._container.minimum_size, y = h, opacity = 1.0, d = 0.2, t = 'in_out_quad')
else:
self._anim = Animation(size = self._container.minimum_size, opacity = 1.0, d = 0.2, t = 'in_out_quad')
self._anim.start(self)
else:
self._anim = Animation(size = [0, 0], pos = self._cachedPos, d = 0.2, opacity = 0.0, t = 'in_out_quad')
self._anim.start(self)
def on_touch_down(self, touch):
if not self.collide_point(touch.pos[0], touch.pos[1]):
self.dismiss(True)
return super(ContextMenu, self).on_touch_down(touch)
| mit | -6,667,060,846,733,342,000 | 27.689922 | 125 | 0.560929 | false |
mitsuse/salada | tests/test_segmenter.py | 1 | 1418 | #!/usr/bin/env python
# coding: utf-8
from salada import language
from salada import segmenter
class TestDefault:
def test_segment_text_by_sequence_of_spaces(self):
text = ' foo \n \n\n bar \t\n baz '
expectation = [
language.Segment('', True, False),
language.Segment('foo', False, False),
language.Segment('bar', False, False),
language.Segment('baz', False, False),
language.Segment('', False, True),
]
result = segmenter.Default().segment(text)
assert result == expectation
def test_regard_first_as_headless(self):
text = 'foo \n \n\n bar \t\n baz '
expectation = [
language.Segment('foo', True, False),
language.Segment('bar', False, False),
language.Segment('baz', False, False),
language.Segment('', False, True),
]
result = segmenter.Default().segment(text)
assert result == expectation
def test_regard_last_as_tailless(self):
text = ' foo \n \n\n bar \t\n baz'
expectation = [
language.Segment('', True, False),
language.Segment('foo', False, False),
language.Segment('bar', False, False),
language.Segment('baz', False, True),
]
result = segmenter.Default().segment(text)
assert result == expectation
| mit | -2,338,574,573,679,739,000 | 33.585366 | 54 | 0.559944 | false |
Onager/plaso | plaso/containers/tasks.py | 1 | 6551 | # -*- coding: utf-8 -*-
"""Task related attribute container definitions."""
import time
import uuid
from plaso.containers import interface
from plaso.containers import manager
from plaso.lib import definitions
class Task(interface.AttributeContainer):
"""Task attribute container.
A task describes a piece of work for a multi processing worker process
for example a taks to process a path specification or to analyze an event.
Attributes:
aborted (bool): True if the session was aborted.
completion_time (int): time that the task was completed. Contains the
number of micro seconds since January 1, 1970, 00:00:00 UTC.
file_entry_type (str): dfVFS type of the file entry the path specification
is referencing.
has_retry (bool): True if the task was previously abandoned and a retry
task was created, False otherwise.
identifier (str): unique identifier of the task.
last_processing_time (int): the last time the task was marked as being
processed as number of milliseconds since January 1, 1970, 00:00:00 UTC.
merge_priority (int): priority used for the task storage file merge, where
a lower value indicates a higher priority to merge.
path_spec (dfvfs.PathSpec): path specification.
session_identifier (str): the identifier of the session the task is part of.
start_time (int): time that the task was started. Contains the number
of micro seconds since January 1, 1970, 00:00:00 UTC.
storage_file_size (int): size of the storage file in bytes.
storage_format (str): the format the task results are to be stored in.
"""
CONTAINER_TYPE = 'task'
def __init__(self, session_identifier=None):
"""Initializes a task attribute container.
Args:
session_identifier (Optional[str]): identifier of the session the task
is part of.
"""
super(Task, self).__init__()
self.aborted = False
self.completion_time = None
self.file_entry_type = None
self.has_retry = False
self.identifier = '{0:s}'.format(uuid.uuid4().hex)
self.last_processing_time = None
self.merge_priority = None
self.path_spec = None
self.session_identifier = session_identifier
self.start_time = int(time.time() * definitions.MICROSECONDS_PER_SECOND)
self.storage_file_size = None
self.storage_format = None
# This method is necessary for heap sort.
def __lt__(self, other):
"""Compares if the task attribute container is less than the other.
Args:
other (Task): task attribute container to compare to.
Returns:
bool: True if the task attribute container is less than the other.
"""
return self.identifier < other.identifier
def CreateRetryTask(self):
"""Creates a new task to retry a previously abandoned task.
The retry task will have a new identifier but most of the attributes
will be a copy of the previously abandoned task.
Returns:
Task: a task to retry a previously abandoned task.
"""
retry_task = Task(session_identifier=self.session_identifier)
retry_task.file_entry_type = self.file_entry_type
retry_task.merge_priority = self.merge_priority
retry_task.path_spec = self.path_spec
retry_task.storage_file_size = self.storage_file_size
retry_task.storage_format = self.storage_format
self.has_retry = True
return retry_task
def CreateTaskCompletion(self):
"""Creates a task completion.
Returns:
TaskCompletion: task completion attribute container.
"""
self.completion_time = int(
time.time() * definitions.MICROSECONDS_PER_SECOND)
task_completion = TaskCompletion()
task_completion.aborted = self.aborted
task_completion.identifier = self.identifier
task_completion.session_identifier = self.session_identifier
task_completion.timestamp = self.completion_time
return task_completion
def CreateTaskStart(self):
"""Creates a task start.
Returns:
TaskStart: task start attribute container.
"""
task_start = TaskStart()
task_start.identifier = self.identifier
task_start.session_identifier = self.session_identifier
task_start.timestamp = self.start_time
return task_start
def UpdateProcessingTime(self):
"""Updates the processing time to now."""
self.last_processing_time = int(
time.time() * definitions.MICROSECONDS_PER_SECOND)
class TaskCompletion(interface.AttributeContainer):
"""Task completion attribute container.
Attributes:
aborted (bool): True if the session was aborted.
identifier (str): unique identifier of the task.
session_identifier (str): the identifier of the session the task
is part of.
timestamp (int): time that the task was completed. Contains the number
of micro seconds since January 1, 1970, 00:00:00 UTC.
"""
CONTAINER_TYPE = 'task_completion'
def __init__(self, identifier=None, session_identifier=None):
"""Initializes a task completion attribute container.
Args:
identifier (Optional[str]): unique identifier of the task.
The identifier should match that of the corresponding
task start information.
session_identifier (Optional[str]): identifier of the session the task
is part of.
"""
super(TaskCompletion, self).__init__()
self.aborted = False
self.identifier = identifier
self.session_identifier = session_identifier
self.timestamp = None
class TaskStart(interface.AttributeContainer):
"""Task start attribute container.
Attributes:
identifier (str): unique identifier of the task.
session_identifier (str): the identifier of the session the task
is part of.
timestamp (int): time that the task was started. Contains the number
of micro seconds since January 1, 1970, 00:00:00 UTC.
"""
CONTAINER_TYPE = 'task_start'
def __init__(self, identifier=None, session_identifier=None):
"""Initializes a task start attribute container.
Args:
identifier (Optional[str]): unique identifier of the task.
The identifier should match that of the corresponding
task completion information.
session_identifier (Optional[str]): identifier of the session the task
is part of.
"""
super(TaskStart, self).__init__()
self.identifier = identifier
self.session_identifier = session_identifier
self.timestamp = None
manager.AttributeContainersManager.RegisterAttributeContainers([
Task, TaskCompletion, TaskStart])
| apache-2.0 | -8,643,982,626,627,538,000 | 34.22043 | 80 | 0.70203 | false |
holvi/python-stdnum | stdnum/es/iban.py | 1 | 2510 | # iban.py - functions for handling Spanish IBANs
# coding: utf-8
#
# Copyright (C) 2016 Arthur de Jong
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""Spanish IBAN (International Bank Account Number).
The IBAN is used to identify bank accounts across national borders. The
Spanish IBAN is built up of the IBAN prefix (ES) and check digits, followed
by the 20 digit CCC (Código Cuenta Corriente).
>>> validate('ES77 1234-1234-16 1234567890')
'ES7712341234161234567890'
>>> to_ccc('ES77 1234-1234-16 1234567890')
'12341234161234567890'
>>> format('ES771234-1234-16 1234567890')
'ES77 1234 1234 1612 3456 7890'
>>> validate('GR1601101050000010547023795') # different country
Traceback (most recent call last):
...
InvalidComponent: ...
>>> validate('ES12 1234-1234-16 1234567890') # invalid IBAN check digit
Traceback (most recent call last):
...
InvalidChecksum: ...
>>> validate('ES15 1234-1234-17 1234567890') # invalid CCC check digit
Traceback (most recent call last):
...
InvalidChecksum: ...
"""
from stdnum import iban
from stdnum.es import ccc
from stdnum.exceptions import *
__all__ = ['compact', 'format', 'to_ccc', 'validate', 'is_valid']
compact = iban.compact
format = iban.format
def to_ccc(number):
"""Return the CCC (Código Cuenta Corriente) part of the number."""
number = compact(number)
if not number.startswith('ES'):
raise InvalidComponent()
return number[4:]
def validate(number):
"""Checks to see if the number provided is a valid Spanish IBAN."""
number = iban.validate(number, check_country=False)
ccc.validate(to_ccc(number))
return number
def is_valid(number):
"""Checks to see if the number provided is a valid Spanish IBAN."""
try:
return bool(validate(number))
except ValidationError:
return False
| lgpl-2.1 | 2,566,745,078,822,567,400 | 30.746835 | 75 | 0.722887 | false |
Azure/azure-sdk-for-python | sdk/authorization/azure-mgmt-authorization/azure/mgmt/authorization/v2015_06_01/models/_models.py | 1 | 2367 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import msrest.serialization
class ClassicAdministrator(msrest.serialization.Model):
"""Classic Administrators.
:param id: The ID of the administrator.
:type id: str
:param name: The name of the administrator.
:type name: str
:param type: The type of the administrator.
:type type: str
:param email_address: The email address of the administrator.
:type email_address: str
:param role: The role of the administrator.
:type role: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'email_address': {'key': 'properties.emailAddress', 'type': 'str'},
'role': {'key': 'properties.role', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ClassicAdministrator, self).__init__(**kwargs)
self.id = kwargs.get('id', None)
self.name = kwargs.get('name', None)
self.type = kwargs.get('type', None)
self.email_address = kwargs.get('email_address', None)
self.role = kwargs.get('role', None)
class ClassicAdministratorListResult(msrest.serialization.Model):
"""ClassicAdministrator list result information.
:param value: An array of administrators.
:type value: list[~azure.mgmt.authorization.v2015_06_01.models.ClassicAdministrator]
:param next_link: The URL to use for getting the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ClassicAdministrator]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ClassicAdministratorListResult, self).__init__(**kwargs)
self.value = kwargs.get('value', None)
self.next_link = kwargs.get('next_link', None)
| mit | 6,230,837,319,939,194,000 | 34.328358 | 94 | 0.584284 | false |
birkelbach/python-canfix | tests/dataconversion.py | 1 | 4708 | # Copyright (c) 2018 Phil Birkelbach
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import unittest
from canfix.utils import setValue, getValue
class TestDataConversion(unittest.TestCase):
def setUp(self):
pass
int_tests = [(2200, bytearray([0x98, 0x08]), 1),
(-2200, bytearray([0x68, 0xF7]), 1),
(0, bytearray([0x00, 0x00]), 1),
(-1, bytearray([0xFF, 0xFF]), 1),
(32767, bytearray([0xFF, 0x7F]), 1),
(-32768, bytearray([0x00, 0x80]), 1)]
def test_setvalue_basicint(self):
for each in self.int_tests:
x = setValue("INT",each[0], each[2])
self.assertEqual(x, each[1])
def test_getvalue_basicint(self):
for each in self.int_tests:
x = getValue("INT",each[1], each[2])
self.assertEqual(x, each[0])
uint_tests = [(2200, bytearray([0x98, 0x08]), 1),
(0, bytearray([0x00, 0x00]), 1),
(2000, bytearray([0xD0, 0x07]), 1),
(32767, bytearray([0xFF, 0x7F]), 1),
(65535 , bytearray([0xFF, 0xFF]), 1)]
def test_setvalue_basicuint(self):
for each in self.uint_tests:
x = setValue("UINT",each[0], each[2])
self.assertEqual(x, each[1])
def test_getvalue_basicuint(self):
for each in self.uint_tests:
x = getValue("UINT",each[1], each[2])
self.assertEqual(x, each[0])
float_tests = [(3.1415920257568359375, bytearray([0xd8,0x0f,0x49,0x40])),
(0.0, bytearray([0x00,0x00,0x00,0x00])),
(89.99999237060546875, bytearray([0xFF,0xFF,0xB3,0x42]))]
def test_setvalue_basicfloat(self):
for each in self.float_tests:
x = setValue("FLOAT",each[0])
self.assertEqual(x, each[1])
def test_getvalue_basicfloat(self):
for each in self.float_tests:
x = getValue("FLOAT",each[1])
self.assertEqual(x, each[0])
def test_setvalue_basic_byte(self):
y = [True, False, True]
y.extend([False]*5)
x = setValue("BYTE", y)
self.assertEqual(x, bytearray([0x05]))
def test_getvalue_basic_byte(self):
y = [True, False, True]
y.extend([False]*5)
x = getValue("BYTE", bytearray([0x05]))
self.assertEqual(x, y)
def test_setvalue_basic_word(self):
y = [True, False, True]
y.extend([False]*5)
y.extend(y)
x = setValue("WORD", y)
self.assertEqual(x, bytearray([0x05, 0x05]))
def test_getvalue_basic_word(self):
y = [True, False, True]
y.extend([False]*5)
y.extend(y)
x = getValue("WORD", bytearray([0x05, 0x05]))
self.assertEqual(x, y)
def test_setvalue_compound(self):
x = setValue("UINT,USHORT[2]", [21000, 121, 77]) # Date
self.assertEqual(x, bytearray([0x08, 0x52, 0x79, 0x4D]))
x = setValue("USHORT[3],UINT", [121, 77, 255, 21000]) # Time
self.assertEqual(x, bytearray([ 0x79, 0x4D, 0xFF, 0x08, 0x52]))
x = setValue("INT[2],BYTE", [5, -5, [True, False, True, False, False, False, False, False]]) # Encoder
self.assertEqual(x, bytearray([ 0x05, 0x00, 0xFB, 0xFF, 0x05]))
def test_getvalue_compound(self):
x = getValue("UINT,USHORT[2]", bytearray([0x08, 0x52, 0x79, 0x4D])) # Date
self.assertEqual(x, [21000, 121, 77])
x = getValue("USHORT[3],UINT", bytearray([ 0x79, 0x4D, 0xFF, 0x08, 0x52])) # Time
self.assertEqual(x, [121, 77, 255, 21000])
x = getValue("INT[2],BYTE", bytearray([ 0x05, 0x00, 0xFB, 0xFF, 0x05])) # Encoder
self.assertEqual(x, [5, -5, [True, False, True, False, False, False, False, False]])
# TODO: Add tests for...
# multipliers
# shorts
# u shorts
# chars
# arrays
# proper assertions when bad values are packed or unpacked
if __name__ == '__main__':
unittest.main()
| gpl-2.0 | 3,385,075,371,110,929,000 | 36.967742 | 110 | 0.579439 | false |
RTHMaK/RPGOne | deep_qa-master/deep_qa/data/instances/instance.py | 1 | 15041 | """
This module contains the base ``Instance`` classes that concrete classes
inherit from. Specifically, there are three classes:
1. ``Instance``, that just exists as a base type with no functionality
2. ``TextInstance``, which adds a ``words()`` method and a method to convert
strings to indices using a DataIndexer.
3. ``IndexedInstance``, which is a ``TextInstance`` that has had all of its
strings converted into indices.
This class has methods to deal with padding (so that sequences all have the
same length) and converting an ``Instance`` into a set of Numpy arrays
suitable for use with Keras.
As this codebase is dealing mostly with textual question answering, pretty much
all of the concrete ``Instance`` types will have both a ``TextInstance`` and a
corresponding ``IndexedInstance``, which you can see in the individual files
for each ``Instance`` type.
"""
import itertools
from typing import Any, Callable, Dict, List
from ..tokenizers import tokenizers
from ..data_indexer import DataIndexer
class Instance:
"""
A data instance, used either for training a neural network or for
testing one.
Parameters
----------
label : boolean or index
For simple ``Instances`` (like ``TextInstance``), this is
either ``True``, ``False``, or ``None``, indicating whether the
instance is a positive, negative or unknown (i.e., test) example,
respectively. For ``MultipleChoiceInstances`` or other more
complicated things, is a class index.
index : int, optional
Used for matching instances with other data, such as background
sentences.
"""
def __init__(self, label, index: int=None):
self.label = label
self.index = index
@staticmethod
def _check_label(label: bool, default_label: bool):
if default_label is not None and label is not None and label != default_label:
raise RuntimeError("Default label given with file, and label in file doesn't match!")
class TextInstance(Instance):
"""
An ``Instance`` that has some attached text, typically either a sentence
or a logical form. This is called a ``TextInstance`` because the
individual tokens here are encoded as strings, and we can
get a list of strings out when we ask what words show up in the instance.
We use these kinds of instances to fit a ``DataIndexer`` (i.e., deciding
which words should be mapped to an unknown token); to use them in training
or testing, we need to first convert them into ``IndexedInstances``.
In order to actually convert text into some kind of indexed sequence,
we rely on a ``TextEncoder``. There are several ``TextEncoder``subclasses,
that will let you use word token sequences, character sequences, and other
options. By default we use word tokens. You can override this by setting
the ``encoder`` class variable.
"""
tokenizer = tokenizers['words']({})
def __init__(self, label, index: int=None):
super(TextInstance, self).__init__(label, index)
def _words_from_text(self, text: str) -> Dict[str, List[str]]:
return self.tokenizer.get_words_for_indexer(text)
def _index_text(self, text: str, data_indexer: DataIndexer) -> List[int]:
return self.tokenizer.index_text(text, data_indexer)
def words(self) -> Dict[str, List[str]]:
"""
Returns a list of all of the words in this instance, contained in a
namespace dictionary.
This is mainly used for computing word counts when fitting a word
vocabulary on a dataset. The namespace dictionary allows you to have
several embedding matrices with different vocab sizes, e.g., for words
and for characters (in fact, words and characters are the only use
cases I can think of for now, but this allows you to do other more
crazy things if you want). You can call the namespaces whatever you
want, but if you want the ``DataIndexer`` to work correctly without
namespace arguments, you should use the key 'words' to represent word
tokens.
Returns
-------
namespace : Dictionary of {str: List[str]}
The ``str`` key refers to vocabularies, and the ``List[str]``
should contain the tokens in that vocabulary. For example, you
should use the key ``words`` to represent word tokens, and the
correspoding value in the dictionary would be a list of all the
words in the instance.
"""
raise NotImplementedError
def to_indexed_instance(self, data_indexer: DataIndexer) -> 'IndexedInstance':
"""
Converts the words in this ``Instance`` into indices using
the ``DataIndexer``.
Parameters
----------
data_indexer : DataIndexer
``DataIndexer`` to use in converting the ``Instance`` to
an ``IndexedInstance``.
Returns
-------
indexed_instance : IndexedInstance
A ``TextInstance`` that has had all of its strings converted into
indices.
"""
raise NotImplementedError
@classmethod
def read_from_line(cls, line: str, default_label: bool=None):
"""
Reads an instance of this type from a line.
Parameters
----------
line : str
A line from a data file.
default_label: bool
If a label is not provided, the default to use. Mainly used in
``TrueFalseInstance``.
Returns
-------
indexed_instance : IndexedInstance
A ``TextInstance`` that has had all of its strings converted into
indices.
Notes
-----
We throw a ``RuntimeError`` here instead of a ``NotImplementedError``,
because it's not expected that all subclasses will implement this.
"""
# pylint: disable=unused-argument
raise RuntimeError("%s instances can't be read from a line!" % str(cls))
class IndexedInstance(Instance):
"""
An indexed data instance has all word tokens replaced with word indices,
along with some kind of label, suitable for input to a Keras model. An
``IndexedInstance`` is created from an ``Instance`` using a
``DataIndexer``, and the indices here have no recoverable meaning without
the ``DataIndexer``.
For example, we might have the following ``Instance``:
- ``TrueFalseInstance('Jamie is nice, Holly is mean', True, 25)``
After being converted into an ``IndexedInstance``, we might have
the following:
- ``IndexedTrueFalseInstance([1, 6, 7, 1, 6, 8], True, 25)``
This would mean that ``"Jamie"`` and ``"Holly"`` were OOV to the
``DataIndexer``, and the other words were given indices.
"""
@classmethod
def empty_instance(cls):
"""
Returns an empty, unpadded instance of this class. Necessary for option
padding in multiple choice instances.
"""
raise NotImplementedError
def get_lengths(self) -> List[int]:
"""
Returns the length of this instance in all dimensions that
require padding.
Different kinds of instances have different fields that are padded,
such as sentence length, number of background sentences, number of
options, etc.
Returns
-------
lengths: List of int
A list of integers, where the value at each index is the
maximum length in each dimension.
"""
raise NotImplementedError
def pad(self, max_lengths: Dict[str, int]):
"""
Add zero-padding to make each data example of equal length for use
in the neural network.
This modifies the current object.
Parameters
----------
max_lengths: Dictionary of {str:int}
In this dictionary, each ``str`` refers to a type of token
(e.g. ``max_words_question``), and the corresponding ``int`` is
the value. This dictionary must have the same dimension as was
returned by ``get_lengths()``. We will use these lengths to pad the
instance in all of the necessary dimensions to the given leangths.
"""
raise NotImplementedError
def as_training_data(self):
"""
Convert this ``IndexedInstance`` to NumPy arrays suitable for use as
training data to Keras models.
Returns
-------
train_data : (inputs, label)
The ``IndexedInstance`` as NumPy arrays to be uesd in Keras.
Note that ``inputs`` might itself be a complex tuple, depending
on the ``Instance`` type.
"""
raise NotImplementedError
@staticmethod
def _get_word_sequence_lengths(word_indices: List) -> Dict[str, int]:
"""
Because ``TextEncoders`` can return complex data structures, we might
actually have several things to pad for a single word sequence. We
check for that and handle it in a single spot here. We return a
dictionary containing 'num_sentence_words', which is the number of
words in word_indices. If the word representations also contain
characters, the dictionary additionally contains a
'num_word_characters' key, with a value corresponding to the longest
word in the sequence.
"""
lengths = {'num_sentence_words': len(word_indices)}
if len(word_indices) > 0 and not isinstance(word_indices[0], int):
if isinstance(word_indices[0], list):
lengths['num_word_characters'] = max([len(word) for word in word_indices])
# There might someday be other cases we're missing here, but we'll punt for now.
return lengths
@staticmethod
def pad_word_sequence(word_sequence: List[int],
lengths: Dict[str, int],
truncate_from_right: bool=True) -> List:
"""
Take a list of indices and pads them.
Parameters
----------
word_sequence : List of int
A list of word indices.
lengths : Dictionary of {str:int}
In this dictionary, each ``str`` refers to a type of token
(e.g. ``max_words_question``), and the corresponding ``int`` is
the value. This dictionary must have the same dimension as was
returned by ``get_lengths()``. We will use these lengths to pad the
instance in all of the necessary dimensions to the given leangths.
truncate_from_right : bool, default=True
If truncating the indices is necessary, this parameter dictates
whether we do so on the left or right.
Returns
-------
padded_word_sequence : List of int
A padded list of word indices.
Notes
-----
The reason we truncate from the right by default is for
cases that are questions, with long set ups. We at least want to get
the question encoded, which is always at the end, even if we've lost
much of the question set up. If you want to truncate from the other
direction, you can.
"""
default_value = lambda: 0
if 'num_word_characters' in lengths:
default_value = lambda: []
padded_word_sequence = IndexedInstance.pad_sequence_to_length(
word_sequence, lengths['num_sentence_words'], default_value, truncate_from_right)
if 'num_word_characters' in lengths:
desired_length = lengths['num_word_characters']
longest_word = max(padded_word_sequence, key=len)
if desired_length > len(longest_word):
# since we want to pad to greater than the longest word, we add a
# "dummy word" to get the speed of itertools.zip_longest
padded_word_sequence.append([0]*desired_length)
# pad the list of lists to the longest sublist, appending 0's
words_padded_to_longest = list(zip(*itertools.zip_longest(*padded_word_sequence,
fillvalue=0)))
if desired_length > len(longest_word):
# now we remove the "dummy word" if we appended one.
words_padded_to_longest.pop()
# now we need to truncate all of them to our desired length.
# since truncate_from_right is always False, we chop off starting from
# the right.
padded_word_sequence = [list(word[:desired_length])
for word in words_padded_to_longest]
return padded_word_sequence
@staticmethod
def pad_sequence_to_length(sequence: List,
desired_length: int,
default_value: Callable[[], Any]=lambda: 0,
truncate_from_right: bool=True) -> List:
"""
Take a list of indices and pads them to the desired length.
Parameters
----------
word_sequence : List of int
A list of word indices.
desired_length : int
Maximum length of each sequence. Longer sequences
are truncated to this length, and shorter ones are padded to it.
default_value: Callable, default=lambda: 0
Callable that outputs a default value (of any type) to use as
padding values.
truncate_from_right : bool, default=True
If truncating the indices is necessary, this parameter dictates
whether we do so on the left or right.
Returns
-------
padded_word_sequence : List of int
A padded or truncated list of word indices.
Notes
-----
The reason we truncate from the right by default is for
cases that are questions, with long set ups. We at least want to get
the question encoded, which is always at the end, even if we've lost
much of the question set up. If you want to truncate from the other
direction, you can.
"""
if truncate_from_right:
truncated = sequence[-desired_length:]
else:
truncated = sequence[:desired_length]
if len(truncated) < desired_length:
# If the length of the truncated sequence is less than the desired
# length, we need to pad.
padding_sequence = [default_value()] * (desired_length - len(truncated))
if truncate_from_right:
# When we truncate from the right, we add zeroes to the front.
padding_sequence.extend(truncated)
return padding_sequence
else:
# When we do not truncate from the right, we add zeroes to the end.
truncated.extend(padding_sequence)
return truncated
return truncated
| apache-2.0 | 979,567,012,326,318,200 | 39.761518 | 97 | 0.619773 | false |
willo12/spacegrids | spacegrids/_iosg.py | 1 | 5429 | #encoding:utf-8
""" io related
"""
import numpy as np
from _config import *
import warnings
warnings.formatwarning = warning_on_one_line
# use_scientificio is set in config
#use_scientificio = False
# fallback is always scipy.io: least dependencies
# cdf_lib set in _config.py and determines which library to use
if cdf_lib =='netcdf4':
try:
from netCDF4 import Dataset
cdf_lib_used = 'netcdf4'
# print 'Using netCDF4'
except:
warnings.warn('no Netcdf4. Reverting to scipy.')
from scipy.io import netcdf
cdf_lib_used = 'scipyio'
elif cdf_lib == 'scientificio':
try:
import Scientific.IO.NetCDF
print 'Using Scientific.IO.NetCDF'
cdf_lib_used = 'scientificio'
except:
warnings.warn('no Scientific io. Reverting to scipy.')
from scipy.io import netcdf
cdf_lib_used = 'scipyio'
else:
from scipy.io import netcdf
cdf_lib_used = 'scipyio'
print 'Using scipyio'
import os
from fieldcls import *
def netcdf_file(filepath,mode = 'r', *args, **kwargs):
"""
Wrapper for opening Netcdf functions from NETCDF4, ScientificIO or Scipy
Depends on cdf_lib_used variable.
For 'netcdf4':
file = Dataset(filepath,mode, format='NETCDF4')
For 'scientificio':
file = Scientific.IO.NetCDF.NetCDFFile(filename = filepath, mode = mode)
Otherwise:
file = netcdf.netcdf_file(filename = filepath, mode = mode, *args, **kwargs)
Args:
filepath: (str) full path to file
mode: (str) mode to use as mode argument to file opening function
Returns:
file handle if successful.
Raises:
IOError if there are problems opening the file.
"""
if cdf_lib_used =='netcdf4':
try:
file = Dataset(filepath,mode, format='NETCDF4', *args, **kwargs)
except IOError:
raise IOError('Cannot open %s using NetCDF4'%filepath)
else:
return file
if cdf_lib_used == 'scientificio':
try:
file = Scientific.IO.NetCDF.NetCDFFile(filename = filepath, mode = mode, *args, **kwargs)
except IOError:
raise IOError('Cannot open %s using Scientific.IO'%filepath)
else:
return file
else:
# Scipy way:
try:
file = netcdf.netcdf_file(filename = filepath, mode = mode, *args, **kwargs)
except IOError:
raise IOError('Cannot open %s using Scipy'%filepath)
else:
return file
def msk_read(filepath='masks/msk', crop = 1):
"""
Reads a text file containing a mask pointed to by filepath and returns a corresponding array.
Due to the way these masks are stored for the UVic model, cropping is needed, as indicated
by the crop flag in the arguments. This is the lowest level mask read function in sg.
Args:
filepath: (str) path to the file
crop: (int) amount of points to crop at the margins.
Return:
ndarray containing mask.
"""
str_data = []
with open(filepath,'r') as fobj:
str_data = fobj.readlines()
data = []
for eachline in str_data:
data_line = []
for elem in eachline:
try:
data_line.append(int(elem))
except:
pass
data.append(data_line)
if crop:
return np.flipud(np.array(data))[1:-1,1:-1]
else:
return np.flipud(np.array(data))
def read_masks(dir_path, msk_shape=None,grids = False, msk_val =2):
"""
Reads mask and returns a list of Field objects containing masks.
Calls msk_read, see msk_read.
Args:
dir_path: (str) path to directory
msk_shape: (None or tuple of int) describing supposed shape of mask
grids: (Gr) grid to use for masks
msk_val: (int) value that will not be nan in mask
Returns:
Dictionary of masks and their names
"""
if not(grids):
print 'read_masks: Provide grid --> no masks loaded.'
return
if isinstance(grids,Gr):
grids = [grids]
try:
L = os.listdir(dir_path)
except:
print "No mask dir."
L=[]
masks = {}
for it in L:
try:
fpath = os.path.join(dir_path,it)
msk = msk_read(fpath)
if msk_shape is not None:
# only test shape if msk_shape not default value of None
if (msk.shape != tuple(msk_shape)):
print "Warning: mask shape does not agree: " + it,
print msk.shape,
print msk_shape
msk = np.array(msk,dtype = np.float32)
if msk_val:
msk[msk != msk_val] = np.nan
msk[msk == msk_val] = 1
for g in grids:
try:
print 'Trying to join mask and grid to create Field, ignore possibe error.'
mskob = Field(name = it, value = msk, grid =g)
break
except:
pass
masks[it] = mskob
except:
print "No mask."
return masks
def locate(top = '/home/',fname = projnickfile):
"""
Locates all files with filename fname. Helper function to info function.
Args:
top: (str) the start dir
fname: (str)the filename to look for.
Returns:
List of all paths to dirs containing fname.
"""
paths = []
if fname is not None:
for root, dirs, files in os.walk(top=top):
if fname in files:
paths.append(root)
else:
paths = [ os.path.join(top,subdir) for subdir in os.listdir(top) ]
return paths
| bsd-3-clause | -3,902,353,986,811,476,500 | 21.810924 | 95 | 0.609136 | false |
walterdejong/synctool | src/synctool/main/config.py | 1 | 13961 | #
# synctool.main.config.py WJ109
#
# synctool Copyright 2015 Walter de Jong <[email protected]>
#
# synctool COMES WITH NO WARRANTY. synctool IS FREE SOFTWARE.
# synctool is distributed under terms described in the GNU General Public
# License.
#
'''show elements of the synctool.conf file
This program is nice for shell scripting around synctool
'''
import sys
import getopt
import socket
try:
from typing import List
except ImportError:
pass
from synctool import config, param
from synctool.lib import stderr, error
from synctool.main.wrapper import catch_signals
import synctool.nodeset
# hardcoded name because otherwise we get "config.py"
PROGNAME = 'config'
ACTION = 0
ACTION_OPTION = None # type: str
ARG_NODENAMES = None # type: str
ARG_GROUPS = None # type: str
ARG_CMDS = None # type: List[str]
ARG_EXPAND = None # type: str
# these are enums for the "list" command-line options
ACTION_LIST_NODES = 1
ACTION_LIST_GROUPS = 2
ACTION_NODES = 3
ACTION_GROUPS = 4
ACTION_CMDS = 5
ACTION_PKGMGR = 6
ACTION_NUMPROC = 7
ACTION_LIST_DIRS = 8
ACTION_PREFIX = 9
ACTION_MASTER = 10
ACTION_SLAVE = 11
ACTION_NODENAME = 12
ACTION_FQDN = 13
ACTION_EXPAND = 14
ACTION_VERSION = 15
# optional: do not list hosts/groups that are ignored
OPT_FILTER_IGNORED = False
# optional: list ipaddresses of the selected nodes
OPT_IPADDRESS = False
# optional: list rsync yes/no qualifier
OPT_RSYNC = False
def list_all_nodes():
# type: () -> None
'''display a list of all nodes'''
nodes = config.get_all_nodes()
nodes.sort()
for node in nodes:
ignored = set(config.get_groups(node))
ignored &= param.IGNORE_GROUPS
if OPT_FILTER_IGNORED and ignored:
continue
if OPT_IPADDRESS:
node += ' ' + config.get_node_ipaddress(node)
if OPT_RSYNC:
if node in param.NO_RSYNC:
node += ' no'
else:
node += ' yes'
if ignored:
node += ' (ignored)'
print node
def list_all_groups():
# type: () -> None
'''display a list of all groups'''
groups = param.GROUP_DEFS.keys()
groups.sort()
for group in groups:
if OPT_FILTER_IGNORED and group in param.IGNORE_GROUPS:
continue
if group in param.IGNORE_GROUPS:
group += ' (ignored)'
print group
def list_nodes(nodelist):
# type: (str) -> None
'''display node definition'''
nodeset = synctool.nodeset.NodeSet()
try:
nodeset.add_node(nodelist)
except synctool.range.RangeSyntaxError as err:
error(str(err))
sys.exit(1)
if nodeset.addresses() is None:
# error message already printed
sys.exit(1)
groups = [] # type: List[str]
for node in nodeset.nodelist:
if OPT_IPADDRESS or OPT_RSYNC:
out = ''
if OPT_IPADDRESS:
out += ' ' + config.get_node_ipaddress(node)
if OPT_RSYNC:
if node in param.NO_RSYNC:
out += ' no'
else:
out += ' yes'
print out[1:]
else:
for group in config.get_groups(node):
# extend groups, but do not have duplicates
if group not in groups:
groups.append(group)
# group order is important, so don't sort
# however, when you list multiple nodes at once, the groups will have
# been added to the end
# So the order is important, but may be incorrect when listing
# multiple nodes at once
# groups.sort()
for group in groups:
if OPT_FILTER_IGNORED and group in param.IGNORE_GROUPS:
continue
if group in param.IGNORE_GROUPS:
group += ' (ignored)'
print group
def list_nodegroups(grouplist):
# type: (str) -> None
'''display list of nodes that are member of group'''
nodeset = synctool.nodeset.NodeSet()
try:
nodeset.add_group(grouplist)
except synctool.range.RangeSyntaxError as err:
error(str(err))
sys.exit(1)
if nodeset.addresses() is None:
# error message already printed
sys.exit(1)
arr = list(nodeset.nodelist)
arr.sort()
for node in arr:
ignored = set(config.get_groups(node))
ignored &= param.IGNORE_GROUPS
if OPT_FILTER_IGNORED and ignored:
continue
if OPT_IPADDRESS:
node += ' ' + config.get_node_ipaddress(node)
if OPT_RSYNC:
if node in param.NO_RSYNC:
node += ' no'
else:
node += ' yes'
if ignored:
node += ' (ignored)'
print node
def list_commands(cmds):
# type: (List[str]) -> None
'''display command setting'''
for cmd in cmds:
if cmd == 'diff':
ok, _ = config.check_cmd_config('diff_cmd', param.DIFF_CMD)
if ok:
print param.DIFF_CMD
if cmd == 'ping':
ok, _ = config.check_cmd_config('ping_cmd', param.PING_CMD)
if ok:
print param.PING_CMD
elif cmd == 'ssh':
ok, _ = config.check_cmd_config('ssh_cmd', param.SSH_CMD)
if ok:
print param.SSH_CMD
elif cmd == 'rsync':
ok, _ = config.check_cmd_config('rsync_cmd', param.RSYNC_CMD)
if ok:
print param.RSYNC_CMD
elif cmd == 'synctool':
ok, _ = config.check_cmd_config('synctool_cmd', param.SYNCTOOL_CMD)
if ok:
print param.SYNCTOOL_CMD
elif cmd == 'pkg':
ok, _ = config.check_cmd_config('pkg_cmd', param.PKG_CMD)
if ok:
print param.PKG_CMD
else:
error("no such command '%s' available in synctool" % cmd)
def list_dirs():
# type: () -> None
'''display directory settings'''
print 'rootdir', param.ROOTDIR
print 'overlaydir', param.OVERLAY_DIR
print 'deletedir', param.DELETE_DIR
print 'scriptdir', param.SCRIPT_DIR
print 'tempdir', param.TEMP_DIR
def expand(nodelist):
# type: (str) -> None
'''display expanded argument'''
nodeset = synctool.nodeset.NodeSet()
try:
nodeset.add_node(nodelist)
except synctool.range.RangeSyntaxError as err:
error(str(err))
sys.exit(1)
# don't care if the nodes do not exist
arr = list(nodeset.nodelist)
arr.sort()
for elem in arr:
print elem,
print
def set_action(a, opt):
# type: (int, str) -> None
'''set the action to perform'''
# this is a helper function for the command-line parser
global ACTION, ACTION_OPTION
if ACTION > 0:
error('options %s and %s can not be combined' % (ACTION_OPTION, opt))
sys.exit(1)
ACTION = a
ACTION_OPTION = opt
def usage():
# type: () -> None
'''print usage information'''
print 'usage: %s [options]' % PROGNAME
print 'options:'
print ' -h, --help Display this information'
print ' -c, --conf=FILE Use this config file'
print ' (default: %s)' % param.DEFAULT_CONF
print ''' -l, --list-nodes List all configured nodes
-L, --list-groups List all configured groups
-n, --node=LIST List all groups this node is in
-g, --group=LIST List all nodes in this group
-i, --ipaddress List selected nodes' IP address
-r, --rsync List selected nodes' rsync qualifier
-f, --filter-ignored Do not list ignored nodes and groups
-C, --command=COMMAND Display setting for command
-P, --package-manager Display configured package manager
-N, --numproc Display numproc setting
-d, --list-dirs Display directory settings
--prefix Display installation prefix
--master Display configured master fqdn
--slave Display configured slave nodes
--nodename Display my nodename
--fqdn Display my FQDN (fully qualified domain name)
-x, --expand=LIST Expand given node list
-v, --version Display synctool version
COMMAND is a list of these: diff,ping,ssh,rsync,synctool,pkg
'''
def get_options():
# type: () -> None
'''parse command-line options'''
global ARG_NODENAMES, ARG_GROUPS, ARG_CMDS, ARG_EXPAND
global OPT_FILTER_IGNORED, OPT_IPADDRESS, OPT_RSYNC
if len(sys.argv) <= 1:
usage()
sys.exit(1)
try:
opts, args = getopt.getopt(sys.argv[1:], 'hc:lLn:g:irfC:PNdx:v',
['help', 'conf=', 'list-nodes',
'list-groups', 'node=', 'group=',
'ipaddress', 'rsync', 'filter-ignored',
'command', 'package-manager', 'numproc',
'list-dirs', 'prefix', 'master', 'slave',
'nodename', 'fqdn', 'expand', 'version'])
except getopt.GetoptError as reason:
print
print '%s: %s' % (PROGNAME, reason)
print
usage()
sys.exit(1)
if args:
error('excessive arguments on command-line')
sys.exit(1)
errors = 0
for opt, arg in opts:
if opt in ('-h', '--help', '-?'):
usage()
sys.exit(1)
if opt in ('-c', '--conf'):
param.CONF_FILE = arg
continue
if opt in ('-l', '--list-nodes'):
set_action(ACTION_LIST_NODES, '--list-nodes')
continue
if opt in ('-L', '--list-groups'):
set_action(ACTION_LIST_GROUPS, '--list-groups')
continue
if opt in ('-n', '--node'):
set_action(ACTION_NODES, '--node')
ARG_NODENAMES = arg
continue
if opt in ('-g', '--group'):
set_action(ACTION_GROUPS, '--group')
ARG_GROUPS = arg
continue
if opt in ('-i', 'ipaddress'):
OPT_IPADDRESS = True
continue
if opt in ('-r', '--rsync'):
OPT_RSYNC = True
continue
if opt in ('-f', '--filter-ignored'):
OPT_FILTER_IGNORED = True
continue
if opt in ('-C', '--command'):
set_action(ACTION_CMDS, '--command')
ARG_CMDS = arg.split(',')
continue
if opt in ('-P', '--package-manager'):
set_action(ACTION_PKGMGR, '--package-manager')
continue
if opt in ('-N', '--numproc'):
set_action(ACTION_NUMPROC, '--numproc')
continue
if opt in ('-d', '--list-dirs'):
set_action(ACTION_LIST_DIRS, '--list-dirs')
continue
if opt == '--prefix':
set_action(ACTION_PREFIX, '--prefix')
continue
if opt == '--master':
set_action(ACTION_MASTER, '--master')
continue
if opt == '--slave':
set_action(ACTION_SLAVE, '--slave')
continue
if opt == '--nodename':
set_action(ACTION_NODENAME, '--nodename')
continue
if opt == '--fqdn':
set_action(ACTION_FQDN, '--fqdn')
continue
if opt in ('-x', '--expand'):
set_action(ACTION_EXPAND, '--expand')
ARG_EXPAND = arg
continue
if opt in ('-v', '--version'):
set_action(ACTION_VERSION, '--version')
continue
error("unknown command line option '%s'" % opt)
errors += 1
if errors:
usage()
sys.exit(1)
if not ACTION:
usage()
sys.exit(1)
@catch_signals
def main():
# type: () -> None
'''do your thing'''
param.init()
get_options()
if ACTION == ACTION_VERSION:
print param.VERSION
sys.exit(0)
if ACTION == ACTION_FQDN:
print socket.getfqdn()
sys.exit(0)
config.read_config()
# synctool.nodeset.make_default_nodeset()
if ACTION == ACTION_LIST_NODES:
list_all_nodes()
elif ACTION == ACTION_LIST_GROUPS:
list_all_groups()
elif ACTION == ACTION_NODES:
if not ARG_NODENAMES:
error("option '--node' requires an argument; the node name")
sys.exit(1)
list_nodes(ARG_NODENAMES)
elif ACTION == ACTION_GROUPS:
if not ARG_GROUPS:
error("option '--node-group' requires an argument; "
"the node group name")
sys.exit(1)
list_nodegroups(ARG_GROUPS)
elif ACTION == ACTION_CMDS:
list_commands(ARG_CMDS)
elif ACTION == ACTION_PKGMGR:
print param.PACKAGE_MANAGER
elif ACTION == ACTION_NUMPROC:
print param.NUM_PROC
elif ACTION == ACTION_LIST_DIRS:
list_dirs()
elif ACTION == ACTION_PREFIX:
print param.ROOTDIR
elif ACTION == ACTION_NODENAME:
config.init_mynodename()
if not param.NODENAME:
error('unable to determine my nodename (%s)' %
param.HOSTNAME)
stderr('please check %s' % param.CONF_FILE)
sys.exit(1)
print param.NODENAME
elif ACTION == ACTION_MASTER:
print param.MASTER
elif ACTION == ACTION_SLAVE:
if not param.SLAVES:
print '(none)'
else:
for node in param.SLAVES:
print node,
print
elif ACTION == ACTION_EXPAND:
if not ARG_EXPAND:
print 'none'
else:
expand(ARG_EXPAND)
else:
raise RuntimeError('bug: unknown ACTION code %d' % ACTION)
# EOB
| gpl-2.0 | 3,027,805,136,995,633,000 | 24.90167 | 79 | 0.539718 | false |
oso/qgis-etri | ui/inference_results.py | 1 | 5734 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ui/inference_results.ui'
#
# Created: Tue Nov 19 19:57:44 2013
# by: PyQt4 UI code generator 4.10.2
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_inference_results(object):
def setupUi(self, inference_results):
inference_results.setObjectName(_fromUtf8("inference_results"))
inference_results.resize(800, 600)
self.verticalLayout = QtGui.QVBoxLayout(inference_results)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.groupBox = QtGui.QGroupBox(inference_results)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(30)
sizePolicy.setHeightForWidth(self.groupBox.sizePolicy().hasHeightForWidth())
self.groupBox.setSizePolicy(sizePolicy)
self.groupBox.setObjectName(_fromUtf8("groupBox"))
self.verticalLayout_2 = QtGui.QVBoxLayout(self.groupBox)
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.graph_model = _MyGraphicsview(self.groupBox)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(5)
sizePolicy.setHeightForWidth(self.graph_model.sizePolicy().hasHeightForWidth())
self.graph_model.setSizePolicy(sizePolicy)
self.graph_model.setStyleSheet(_fromUtf8("background-color: transparent;"))
self.graph_model.setFrameShape(QtGui.QFrame.NoFrame)
self.graph_model.setFrameShadow(QtGui.QFrame.Sunken)
self.graph_model.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff)
self.graph_model.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAsNeeded)
self.graph_model.setAlignment(QtCore.Qt.AlignCenter)
self.graph_model.setRenderHints(QtGui.QPainter.Antialiasing|QtGui.QPainter.TextAntialiasing)
self.graph_model.setObjectName(_fromUtf8("graph_model"))
self.verticalLayout_2.addWidget(self.graph_model)
self.label_lambda = QtGui.QLabel(self.groupBox)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_lambda.setFont(font)
self.label_lambda.setText(_fromUtf8(""))
self.label_lambda.setObjectName(_fromUtf8("label_lambda"))
self.verticalLayout_2.addWidget(self.label_lambda)
self.verticalLayout.addWidget(self.groupBox)
self.tabWidget = QtGui.QTabWidget(inference_results)
self.tabWidget.setTabPosition(QtGui.QTabWidget.North)
self.tabWidget.setDocumentMode(False)
self.tabWidget.setObjectName(_fromUtf8("tabWidget"))
self.tab = QtGui.QWidget()
self.tab.setObjectName(_fromUtf8("tab"))
self.verticalLayout_5 = QtGui.QVBoxLayout(self.tab)
self.verticalLayout_5.setObjectName(_fromUtf8("verticalLayout_5"))
self.table_comp = qt_performance_table(self.tab)
self.table_comp.setObjectName(_fromUtf8("table_comp"))
self.table_comp.setColumnCount(0)
self.table_comp.setRowCount(0)
self.verticalLayout_5.addWidget(self.table_comp)
self.tabWidget.addTab(self.tab, _fromUtf8(""))
self.tab_2 = QtGui.QWidget()
self.tab_2.setObjectName(_fromUtf8("tab_2"))
self.verticalLayout_4 = QtGui.QVBoxLayout(self.tab_2)
self.verticalLayout_4.setObjectName(_fromUtf8("verticalLayout_4"))
self.table_incomp = qt_performance_table(self.tab_2)
self.table_incomp.setObjectName(_fromUtf8("table_incomp"))
self.table_incomp.setColumnCount(0)
self.table_incomp.setRowCount(0)
self.verticalLayout_4.addWidget(self.table_incomp)
self.tabWidget.addTab(self.tab_2, _fromUtf8(""))
self.verticalLayout.addWidget(self.tabWidget)
self.buttonBox = QtGui.QDialogButtonBox(inference_results)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Close|QtGui.QDialogButtonBox.Save)
self.buttonBox.setObjectName(_fromUtf8("buttonBox"))
self.verticalLayout.addWidget(self.buttonBox)
self.retranslateUi(inference_results)
self.tabWidget.setCurrentIndex(0)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("accepted()")), inference_results.accept)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("rejected()")), inference_results.reject)
QtCore.QMetaObject.connectSlotsByName(inference_results)
def retranslateUi(self, inference_results):
inference_results.setWindowTitle(_translate("inference_results", "ELECTRE-TRI Inference results", None))
self.groupBox.setTitle(_translate("inference_results", "Model learned", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab), _translate("inference_results", "Compatible alternatives", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_2), _translate("inference_results", "Incompatible alternatives", None))
from table import qt_performance_table
from graphic import _MyGraphicsview
| gpl-3.0 | -2,015,396,951,820,727,000 | 51.605505 | 137 | 0.720788 | false |
vicgc/bitcurator | python/bc_genrep_gui.py | 1 | 13484 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# coding=UTF-8
#
# Created: Sun May 26 15:35:39 2013
# by: PyQt4 UI code generator 4.9.1, modified manually
#
import os
from PyQt4 import QtCore, QtGui
import sys
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from PyQt4.uic import *
from generate_report import *
from bc_utils import *
try:
from io import StringIO
except ImportError:
from cStringIO import StringIO
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_Form(object):
fiwalkXmlFileName = "null"
beAnnotatedDirName = "null"
outputDirName = "null"
configFileName = "null"
# DEBUG: The below lines are for bypassing gui - for test purposes only.
# Comment out the above 4 lines for testing
'''
### Uncomment for debugging only
fiwalkXmlFileName = "/home/sunitha/Research/TestData/BEO_master/charlie_fi_F.xml"
beAnnotatedDirName = "/home/sunitha/Research/TestData/BEO_master/annotated_charlie_output"
outputDirName = "/home/sunitha/Research/TestData/BEO_master/charlie_xml_outdir"
configFileName = "/home/sunitha/BC/bitcurator-master/python/t"
'''
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("Generate Report"))
Form.resize(436, 511)
self.label_configFile = QtGui.QLabel(Form)
self.label_configFile.setGeometry(QtCore.QRect(10, 200, 211, 17))
self.label_configFile.setObjectName(_fromUtf8("label_configFile"))
self.lineEdit_configFile = QtGui.QLineEdit(Form)
self.lineEdit_configFile.setGeometry(QtCore.QRect(10, 220, 271, 31))
self.lineEdit_configFile.setObjectName(_fromUtf8("lineEdit_configFile"))
self.label_outdir = QtGui.QLabel(Form)
self.label_outdir.setGeometry(QtCore.QRect(10, 140, 201, 17))
self.label_outdir.setObjectName(_fromUtf8("label_outdir"))
self.lineEdit_outdir = QtGui.QLineEdit(Form)
self.lineEdit_outdir.setGeometry(QtCore.QRect(10, 160, 271, 27))
self.lineEdit_outdir.setObjectName(_fromUtf8("lineEdit_outdir"))
self.label_annDir = QtGui.QLabel(Form)
self.label_annDir.setGeometry(QtCore.QRect(10, 70, 291, 27))
self.label_annDir.setObjectName(_fromUtf8("label_annDir"))
self.lineEdit_annDir = QtGui.QLineEdit(Form)
self.lineEdit_annDir.setGeometry(QtCore.QRect(10, 100, 273, 27))
self.lineEdit_annDir.setText(_fromUtf8(""))
self.lineEdit_annDir.setObjectName(_fromUtf8("lineEdit_annDir"))
self.label_xmlfile = QtGui.QLabel(Form)
self.label_xmlfile.setGeometry(QtCore.QRect(10, 10, 131, 27))
self.label_xmlfile.setObjectName(_fromUtf8("label_xmlfile"))
self.lineEdit_xmlFile = QtGui.QLineEdit(Form)
self.lineEdit_xmlFile.setGeometry(QtCore.QRect(10, 40, 273, 27))
self.lineEdit_xmlFile.setContextMenuPolicy(QtCore.Qt.ActionsContextMenu)
self.lineEdit_xmlFile.setText(_fromUtf8(""))
self.lineEdit_xmlFile.setObjectName(_fromUtf8("lineEdit_xmlFile"))
self.buttonBox = QtGui.QDialogButtonBox(Form)
self.buttonBox.setGeometry(QtCore.QRect(210, 470, 221, 32))
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Ok|QtGui.QDialogButtonBox.Close)
self.buttonBox.setObjectName(_fromUtf8("buttonBox"))
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("accepted()")), self.buttonClickedOk)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL(_fromUtf8("rejected()")), self.buttonClickedCancel)
self.toolButton = QtGui.QToolButton(Form)
self.toolButton.setGeometry(QtCore.QRect(290, 40, 23, 25))
self.toolButton.setObjectName(_fromUtf8("toolButton"))
self.toolButton_2 = QtGui.QToolButton(Form)
self.toolButton_2.setGeometry(QtCore.QRect(290, 100, 23, 25))
self.toolButton_2.setObjectName(_fromUtf8("toolButton_2"))
self.toolButton_3 = QtGui.QToolButton(Form)
self.toolButton_3.setGeometry(QtCore.QRect(290, 160, 23, 25))
self.toolButton_3.setObjectName(_fromUtf8("toolButton_3"))
self.textEdit = QtGui.QTextEdit(Form)
self.textEdit.setGeometry(QtCore.QRect(10, 300, 421, 141))
self.textEdit.setObjectName(_fromUtf8("textEdit"))
self.label_5 = QtGui.QLabel(Form)
self.label_5.setGeometry(QtCore.QRect(10, 270, 171, 17))
self.label_5.setObjectName(_fromUtf8("label_5"))
self.toolButton_4 = QtGui.QToolButton(Form)
self.toolButton_4.setGeometry(QtCore.QRect(290, 220, 23, 25))
self.toolButton_4.setObjectName(_fromUtf8("toolButton_4"))
self.retranslateUi(Form)
QtCore.QObject.connect(self.toolButton, QtCore.SIGNAL(_fromUtf8("clicked()")), self.getFiwalkXmlFileName)
QtCore.QObject.connect(self.toolButton_2, QtCore.SIGNAL(_fromUtf8("clicked()")), self.getBeAnnotatedDir)
QtCore.QObject.connect(self.toolButton_3, QtCore.SIGNAL(_fromUtf8("clicked()")), self.getOutputDir)
QtCore.QObject.connect(self.toolButton_4, QtCore.SIGNAL(_fromUtf8("clicked()")), self.getConfigFile)
QtCore.QMetaObject.connectSlotsByName(Form)
def readOutput(self):
self.textBrowser2.append(QString(self.process.readStdout()))
if self.process.isRunning()==False:
self.textBrowser2.append("\n Completed Successfully")
# bc_check_parameters: Check if the selected files exist. Also
# if the text entered in the boxes doesn't match what was selected
# by navigating through directory structure, use the text in the
# box as the final selection.
def bc_check_parameters(self):
# If XML file not selected through menu, see if it is typed in the box:
if ui.lineEdit_xmlFile.text() != self.fiwalkXmlFileName:
self.fiwalkXmlFileName = ui.lineEdit_xmlFile.text()
# print("D:Fiwalk XML FIle Selected from the box: ", self.fiwalkXmlFileName)
if not os.path.exists(self.fiwalkXmlFileName):
print("XML File %s does not exist. Aborting" %self.fiwalkXmlFileName)
return (-1)
# If Annotated file is not selected through menu, see if it is
# typed in the text box:
if ui.lineEdit_annDir.text() != self.beAnnotatedDirName:
self.beAnnotatedDirName = ui.lineEdit_annDir.text()
# print("D: Annotated Directory Selected from the box: ", self.beAnnotatedDirName)
if not os.path.exists(self.beAnnotatedDirName):
print("BE Annotated Directory %s does not exist. Aborting" %self.beAnnotatedDirName)
return (-1)
# If Outdir is not selected through menu, see if it is typed
# in the text box:
if ui.lineEdit_outdir.text() != self.outputDirName:
self.outputDirName = ui.lineEdit_outdir.text()
# print("D: Output Directory selected from the box: ", self.outputDirName)
# The directory is not supposed to exist. Return -1 if it does.
if (os.path.exists(self.outputDirName)):
print(">> Error: Output Directory %s exists. " %self.outputDirName)
return (-1)
if ui.lineEdit_configFile.text() != self.configFileName:
self.configFileName = ui.lineEdit_configFile.text()
# print("D: Config File selected from the box: ", self.configFileName)
# If config file is not provided by the user, user the default one
if not os.path.exists(self.configFileName):
print(">> Using the default config file: /etc/bitcurator/bc_report_config.txt")
self.configFileName = "/etc/bitcurator/bc_report_config.txt"
return (0)
# buttonClickCancel: This called by any click that represents the
# "Reject" role - Cancel and Close here. It just terminates the Gui.
def buttonClickedCancel(self):
QtCore.QCoreApplication.instance().quit()
# buttonClickedOk: Routine invoked when the OK button is clicked.
# Using StringIO (equivalent to cStringIO in Python-2.x), the stdio is
# redirected into an in-memory buffer, which is displayed in the
# text window at the end.
def buttonClickedOk(self):
use_config_file = True
# The standard output from this point is placed by an in-memory
# buffer.
self.oldstdout = sys.stdout
sys.stdout = StringIO()
# Check if the indicated files exist. If not, return after
# printing the error. Also terminate the redirecting of the
# stdout to the in-memory buffer.
if self.bc_check_parameters() == -1:
print(">> Report Generation is Aborted ")
self.textEdit.setText( sys.stdout.getvalue() )
sys.stdout = self.oldstdout
return
# All fine. Generate the reports now.
bc_get_reports(PdfReport, FiwalkReport, self.fiwalkXmlFileName, \
self.beAnnotatedDirName, \
self.outputDirName, \
self.configFileName)
# Terminate the redirecting of the stdout to the in-memory buffer.
self.textEdit.setText( sys.stdout.getvalue() )
sys.stdout = self.oldstdout
# We will not quit from the Gui window until the user clicks
# on Close.
# QtCore.QCoreApplication.instance().quit()
# getFiralkXmlFileName: Routine to let the user choose the XML file -
# by navigating trough the directories
def getFiwalkXmlFileName(self):
# Navigation
xml_file = QtGui.QFileDialog.getOpenFileName()
# print("D: Fiwalk XML File Selected: ", xml_file)
self.lineEdit_xmlFile.setText(xml_file)
self.fiwalkXmlFileName = xml_file
return xml_file
# getBeAnnotatedDir: Routine to let the user choose the Directory name
# containing the annotated files by navigating
def getBeAnnotatedDir(self):
ann_dir = QtGui.QFileDialog.getExistingDirectory()
# print("D: Annotated Directory Selected by navigating: ", ann_dir)
self.lineEdit_annDir.setText(ann_dir)
self.beAnnotatedDirName = ann_dir
return ann_dir
# getOutputDir: Routine to let the user choose the Directory name
# to output the reports by navigating
def getOutputDir(self):
# Since This directory should not exist, use getSaveFileName
# to let the user create a new directory.
outdir = QtGui.QFileDialog.getSaveFileName()
# print("D: Output Directory Selected by navigating: ", outdir)
self.lineEdit_outdir.setText(outdir)
self.outputDirName = outdir
return outdir
# getConfigFile: Select the config file from the directory structure.
def getConfigFile(self):
config_file = QtGui.QFileDialog.getOpenFileName()
print("D: Config File Selected by navigating: ", config_file)
self.lineEdit_configFile.setText(config_file)
self.configFileName = config_file
return config_file
def retranslateUi(self, Form):
Form.setWindowTitle(QtGui.QApplication.translate("Generate Report", "Bitcurator Generate Report", None, QtGui.QApplication.UnicodeUTF8))
self.label_configFile.setText(QtGui.QApplication.translate("Form", "Config File (optional):", None, QtGui.QApplication.UnicodeUTF8))
self.lineEdit_configFile.setPlaceholderText(QtGui.QApplication.translate("Form", "/Path/To/File", None, QtGui.QApplication.UnicodeUTF8))
self.lineEdit_configFile.setPlaceholderText(QtGui.QApplication.translate("Form", "/Path/To/File", None, QtGui.QApplication.UnicodeUTF8))
self.label_outdir.setText(QtGui.QApplication.translate("Form", "Output directory for reports:", None, QtGui.QApplication.UnicodeUTF8))
self.lineEdit_outdir.setPlaceholderText(QtGui.QApplication.translate("Form", "/Path/To/New Directory", None, QtGui.QApplication.UnicodeUTF8))
self.label_annDir.setText(QtGui.QApplication.translate("Form", "Annotated Bulk Extractor output directory:", None, QtGui.QApplication.UnicodeUTF8))
self.lineEdit_annDir.setPlaceholderText(QtGui.QApplication.translate("Form", "/Path/To/Directory", None, QtGui.QApplication.UnicodeUTF8))
self.label_xmlfile.setText(QtGui.QApplication.translate("Form", "Fiwalk XML file:", None, QtGui.QApplication.UnicodeUTF8))
self.lineEdit_xmlFile.setPlaceholderText(QtGui.QApplication.translate("Form", "/Path/to/File", None, QtGui.QApplication.UnicodeUTF8))
self.toolButton.setText(QtGui.QApplication.translate("Form", "...", None, QtGui.QApplication.UnicodeUTF8))
self.toolButton_2.setText(QtGui.QApplication.translate("Form", "...", None, QtGui.QApplication.UnicodeUTF8))
self.toolButton_3.setText(QtGui.QApplication.translate("Form", "...", None, QtGui.QApplication.UnicodeUTF8))
self.label_5.setText(QtGui.QApplication.translate("Form", "Command line output:", None, QtGui.QApplication.UnicodeUTF8))
self.toolButton_4.setText(QtGui.QApplication.translate("Form", "...", None, QtGui.QApplication.UnicodeUTF8))
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
Form = QtGui.QWidget()
ui = Ui_Form()
ui.setupUi(Form)
Form.show()
sys.exit(app.exec_())
| gpl-3.0 | 5,154,987,188,804,765,000 | 46.146853 | 155 | 0.680214 | false |
molmod/yaff | yaff/external/test/test_lammpsio.py | 1 | 2291 | # -*- coding: utf-8 -*-
# YAFF is yet another force-field code.
# Copyright (C) 2011 Toon Verstraelen <[email protected]>,
# Louis Vanduyfhuys <[email protected]>, Center for Molecular Modeling
# (CMM), Ghent University, Ghent, Belgium; all rights reserved unless otherwise
# stated.
#
# This file is part of YAFF.
#
# YAFF is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# YAFF is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>
#
# --
from __future__ import division
from __future__ import print_function
import tempfile
import shutil
import os
import numpy as np
import pkg_resources
from molmod.test.common import tmpdir
from yaff.external.lammpsio import *
from yaff import System
from yaff.test.common import get_system_water32
def test_lammps_system_data_water32():
system = get_system_water32()
with tmpdir(__name__, 'test_lammps_system_water32') as dirname:
fn = os.path.join(dirname,'lammps.system')
write_lammps_system_data(system,fn=fn)
with open(fn,'r') as f: lines = f.readlines()
natom = int(lines[2].split()[0])
assert natom==system.natom
assert (system.natom+system.bonds.shape[0]+23)==len(lines)
def test_lammps_ffconversion_mil53():
fn_system = pkg_resources.resource_filename(__name__, '../../data/test/system_mil53.chk')
fn_pars = pkg_resources.resource_filename(__name__, '../../data/test/parameters_mil53.txt')
system = System.from_file(fn_system)
with tmpdir(__name__, 'test_lammps_ffconversion_mil53') as dirname:
ff2lammps(system, fn_pars, dirname)
# No test for correctness, just check that output files are present
assert os.path.isfile(os.path.join(dirname,'lammps.in'))
assert os.path.isfile(os.path.join(dirname,'lammps.data'))
| gpl-3.0 | -4,221,619,580,670,450,000 | 37.183333 | 95 | 0.714535 | false |
bnmalcabis/testpy | python2/koans/about_tuples.py | 1 | 2415 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from runner.koan import *
class AboutTuples(Koan):
def test_creating_a_tuple(self):
count_of_three = (1, 2, 5)
self.assertEqual(5, count_of_three[2])
def test_tuples_are_immutable_so_item_assignment_is_not_possible(self):
count_of_three = (1, 2, 5)
try:
count_of_three[2] = "three"
except TypeError as ex:
self.assertMatch('item assig', ex[0])
def test_tuples_are_immutable_so_appending_is_not_possible(self):
count_of_three = (1, 2, 5)
try:
count_of_three.append("boom")
except Exception as ex:
self.assertEqual(AttributeError, type(ex))
# Note, assertMatch() uses regular expression pattern matching,
# so you don't have to copy the whole message.
self.assertMatch('attribute', ex[0])
# Tuples are less flexible than lists, but faster.
def test_tuples_can_only_be_changed_through_replacement(self):
count_of_three = (1, 2, 5)
list_count = list(count_of_three)
list_count.append("boom")
count_of_three = tuple(list_count)
self.assertEqual((1, 2, 5, 'boom'), count_of_three)
def test_tuples_of_one_look_peculiar(self):
self.assertEqual(int, (1).__class__)
self.assertEqual(tuple, (1,).__class__)
self.assertEqual(('Hello comma!',), ("Hello comma!", ))
def test_tuple_constructor_can_be_surprising(self):
self.assertEqual(('S', 'u', 'r', 'p', 'r', 'i', 's', 'e', '!'), tuple("Surprise!"))
def test_creating_empty_tuples(self):
self.assertEqual(tuple(), ())
self.assertEqual((), tuple()) # Sometimes less confusing
def test_tuples_can_be_embedded(self):
lat = (37, 14, 6, 'N')
lon = (115, 48, 40, 'W')
place = ('Area 51', lat, lon)
self.assertEqual(('Area 51', (37, 14, 6, 'N'), (115, 48, 40, 'W')), place)
def test_tuples_are_good_for_representing_records(self):
locations = [
("Illuminati HQ", (38, 52, 15.56, 'N'), (77, 3, 21.46, 'W')),
("Stargate B", (41, 10, 43.92, 'N'), (1, 49, 34.29, 'W')),
]
locations.append(
("Cthulhu", (26, 40, 1, 'N'), (70, 45, 7, 'W'))
)
self.assertEqual("Cthulhu", locations[2][0])
self.assertEqual(15.56, locations[0][1][2]) | mit | -992,314,578,808,458,100 | 33.514286 | 91 | 0.55735 | false |
mathDR/BP-AR-HMM | OLDPY/compute_likelihood_unnorm.py | 1 | 1820 | import numpy as np
def compute_likelihood_unnorm(data_struct,theta,obsModelType,Kz_inds,Kz,Ks):
#function log_likelihood =
# compute_likelihood_unnorm(data_struct,theta,obsModelType,Kz_inds,Kz,Ks)
if obsModelType == 'Gaussian':
invSigma = theta.invSigma
mu = theta.mu
dimu, T = (data_struct.obs).shape
log_likelihood = -np.inf*np.ones((Kz,Ks,T))
kz = Kz_inds
for ks in range(Ks):
cholinvSigma = np.linalg.chol(invSigma[:,:,kz,ks])
dcholinvSigma = np.diag(cholinvSigma)
u = np.dot(cholinvSigma*(data_struct.obs - mu[:,kz*np.ones((1,T)),ks]))
log_likelihood[kz,ks,:] = -0.5*np.sum(u**2,axis=0) + np.sum(np.log(dcholinvSigma))
elif obsModelType =='AR' or obsModelType == 'SLDS':
invSigma = theta.invSigma
A = theta.A
X = data_struct.X
dimu, T = (data_struct.obs).shape
log_likelihood = -np.inf*np.ones((Kz,Ks,T))
if theta.mu:
mu = theta.mu
kz = Kz_inds
for ks in range(Ks):
cholinvSigma = np.linalg.chol(invSigma[:,:,kz,ks])
dcholinvSigma = np.diag(cholinvSigma)
u = np.dot(cholinvSigma,(data_struct.obs - np.dot(A[:,:,kz,ks],X)-mu[:,kz*np.ones((1,T)),ks]))
log_likelihood[kz,ks,:] = -0.5*np.sum(u**2,axis=0) + np.sum(np.log(dcholinvSigma))
else:
kz = Kz_inds
for ks in range(Ks):
cholinvSigma = np.linalg.chol(invSigma[:,:,kz,ks])
dcholinvSigma = np.diag(cholinvSigma)
u = np.dot(cholinvSigma,(data_struct.obs - np.dot(A[:,:,kz,ks],X)))
log_likelihood[kz,ks,:] = -0.5*np.sum(u**2,axis=0) + np.sum(np.log(dcholinvSigma))
elif obsModelType == 'Multinomial':
log_likelihood = np.log(theta.p[:,:,data_struct.obs])
else:
raise ValueError('Error in compute_likelihood_unnorm: obsModelType not defined')
return log_likelihood
| mit | -170,037,953,198,581,020 | 34.686275 | 102 | 0.625824 | false |
skuda/client-python | kubernetes/client/models/v1_downward_api_volume_file.py | 1 | 6558 | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.6.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1DownwardAPIVolumeFile(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, field_ref=None, mode=None, path=None, resource_field_ref=None):
"""
V1DownwardAPIVolumeFile - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'field_ref': 'V1ObjectFieldSelector',
'mode': 'int',
'path': 'str',
'resource_field_ref': 'V1ResourceFieldSelector'
}
self.attribute_map = {
'field_ref': 'fieldRef',
'mode': 'mode',
'path': 'path',
'resource_field_ref': 'resourceFieldRef'
}
self._field_ref = field_ref
self._mode = mode
self._path = path
self._resource_field_ref = resource_field_ref
@property
def field_ref(self):
"""
Gets the field_ref of this V1DownwardAPIVolumeFile.
Required: Selects a field of the pod: only annotations, labels, name and namespace are supported.
:return: The field_ref of this V1DownwardAPIVolumeFile.
:rtype: V1ObjectFieldSelector
"""
return self._field_ref
@field_ref.setter
def field_ref(self, field_ref):
"""
Sets the field_ref of this V1DownwardAPIVolumeFile.
Required: Selects a field of the pod: only annotations, labels, name and namespace are supported.
:param field_ref: The field_ref of this V1DownwardAPIVolumeFile.
:type: V1ObjectFieldSelector
"""
self._field_ref = field_ref
@property
def mode(self):
"""
Gets the mode of this V1DownwardAPIVolumeFile.
Optional: mode bits to use on this file, must be a value between 0 and 0777. If not specified, the volume defaultMode will be used. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set.
:return: The mode of this V1DownwardAPIVolumeFile.
:rtype: int
"""
return self._mode
@mode.setter
def mode(self, mode):
"""
Sets the mode of this V1DownwardAPIVolumeFile.
Optional: mode bits to use on this file, must be a value between 0 and 0777. If not specified, the volume defaultMode will be used. This might be in conflict with other options that affect the file mode, like fsGroup, and the result can be other mode bits set.
:param mode: The mode of this V1DownwardAPIVolumeFile.
:type: int
"""
self._mode = mode
@property
def path(self):
"""
Gets the path of this V1DownwardAPIVolumeFile.
Required: Path is the relative path name of the file to be created. Must not be absolute or contain the '..' path. Must be utf-8 encoded. The first item of the relative path must not start with '..'
:return: The path of this V1DownwardAPIVolumeFile.
:rtype: str
"""
return self._path
@path.setter
def path(self, path):
"""
Sets the path of this V1DownwardAPIVolumeFile.
Required: Path is the relative path name of the file to be created. Must not be absolute or contain the '..' path. Must be utf-8 encoded. The first item of the relative path must not start with '..'
:param path: The path of this V1DownwardAPIVolumeFile.
:type: str
"""
if path is None:
raise ValueError("Invalid value for `path`, must not be `None`")
self._path = path
@property
def resource_field_ref(self):
"""
Gets the resource_field_ref of this V1DownwardAPIVolumeFile.
Selects a resource of the container: only resources limits and requests (limits.cpu, limits.memory, requests.cpu and requests.memory) are currently supported.
:return: The resource_field_ref of this V1DownwardAPIVolumeFile.
:rtype: V1ResourceFieldSelector
"""
return self._resource_field_ref
@resource_field_ref.setter
def resource_field_ref(self, resource_field_ref):
"""
Sets the resource_field_ref of this V1DownwardAPIVolumeFile.
Selects a resource of the container: only resources limits and requests (limits.cpu, limits.memory, requests.cpu and requests.memory) are currently supported.
:param resource_field_ref: The resource_field_ref of this V1DownwardAPIVolumeFile.
:type: V1ResourceFieldSelector
"""
self._resource_field_ref = resource_field_ref
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| apache-2.0 | 5,237,129,877,225,858,000 | 32.804124 | 268 | 0.598353 | false |
f0rki/cb-multios | original-challenges/Multicast_Chat_Server/poller/for-release/machine.py | 1 | 11856 | #!/usr/bin/env python
#
# Copyright (C) 2015 Narf Industries <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
from generator.actions import Actions, Variable
import string
import random
import itertools
import math
from struct import *
from collections import OrderedDict
class TemplateGenerator(Actions):
def _getAdminPassword(self):
password = ''
for c in range(900):
randomVal = ord(self.magic_page[c])
if randomVal % 2 == 0:
if randomVal % 4 == 0:
password += chr(ord('A') + (randomVal % 26))
else:
password += chr(ord('a') + (randomVal % 26))
elif randomVal % 7 == 0:
password += ' '
else:
password += chr(ord('0') + (randomVal % 10))
return password
def _getMessageFromChannel(self, user, channel):
deliveryType = user['subscriptions'][channel]['deliveryType']
if deliveryType is "latest":
if user['subscriptions'][channel]['index'] < len(self.channels[channel]['queue']):
message = self.channels[channel]['queue'][-1]
user['subscriptions'][channel]['index'] = len(self.channels[channel]['queue'])
self.read(delim="\n", expect=message['body'])
elif deliveryType is "guaranteed":
while user['subscriptions'][channel]['index'] < len(self.channels[channel]['queue']):
index = user['subscriptions'][channel]['index']
message = self.channels[channel]['queue'][index]
user['subscriptions'][channel]['index'] += 1
self.read(delim="\n", expect=message['body'])
elif deliveryType is "next":
if user['subscriptions'][channel]['index'] < len(self.channels[channel]['queue']):
index = user['subscriptions'][channel]['index']
message = self.channels[channel]['queue'][index]
user['subscriptions'][channel]['index'] += 1
self.read(delim="\n", expect=message['body'])
elif deliveryType is "low" or deliveryType is "medium" or deliveryType is "high":
while user['subscriptions'][channel]['index'] < len(self.channels[channel]['queue']):
index = user['subscriptions'][channel]['index']
message = self.channels[channel]['queue'][index]
user['subscriptions'][channel]['index'] += 1
if(message['priority'] is deliveryType):
self.read(delim="\n", expect=message['body'])
def getMessagesFromAnyChannel(self):
users = []
while len(users) == 0:
channels = []
while len(channels) < 2:
channels = [c for c in self.channels.keys() if len(self.channels[c]['queue']) > 4 and len(self.channels[c]['subscriptions']) > 2]
self.sendMessage()
channel = random.choice(channels)
senders = [m['sender'] for m in self.channels[channel]['queue']]
users = [u for u in self.users if u['name'] not in senders and
u['name'] in self.channels[channel]['subscriptions'].keys() and
u['subscriptions'][channel]['index'] < len(self.channels[channel]['queue'])]
user = random.choice(users)
command = "/out/" + channel + "\n"
self.write(command)
self.write(user['token'])
self.write(":end\n")
self._getMessageFromChannel(user, channel)
def getMessagesFromAllChannels(self):
users = []
while len(users) < 1:
for u in self.users:
if len(u['subscriptions']) > 1:
for channelName in u['subscriptions'].keys():
if len(self.channels[channelName]['queue']) > u['subscriptions'][channelName]['index'] + 2:
users.append(u)
else:
deliveryType = random.choice(self.deliveryType)
channel = random.choice(self.channels.keys())
self._subscribe(u, channel, deliveryType)
self.sendMessage()
user = random.choice(users)
command = "/out/ALL\n"
self.write(command)
self.write(user['token'])
self.write(":end\n")
for channelName in reversed(user['subscriptions'].keys()):
self._getMessageFromChannel(user,channelName)
def sendMessage(self):
users = [u for u in self.users if len(u['subscriptions']) > 2]
while len(users) < 3:
deliveryType = random.choice(self.deliveryType)
channel = random.choice(self.channels.keys())
userList = [u for u in self.users if u not in users]
user = random.choice(userList)
self._subscribe(user, channel, deliveryType)
users = [u for u in self.users if len(u['subscriptions']) > 2]
user = random.choice(users)
subscription = random.choice(user['subscriptions'].keys())
command = "/in/" + subscription + "\n"
message = ''.join(random.choice(string.ascii_letters) for _ in range(20))
priority = random.choice(self.priorities)
self.channels[subscription]['queue'].append({'sender':user['name'], 'body':message, 'priority':priority})
user['subscriptions'][subscription]['index'] = len(self.channels[subscription]['queue'])
args = ":" + message + ":" + priority + "\n"
self.write(command)
self.write(user['token'])
self.write(args)
self.channels['FLAG']['queue'].append({'sender':'admin', 'body':self.magic_page[0:20].encode("hex"), 'priority':"high"})
def _auth(self, channel):
command = "/auth/" + channel + "\n"
user = random.choice(self.users)
if channel is "FLAG":
password = self._getAdminPassword()
if len(user['subscriptions']) == 0:
user['password'] = password
else:
password = user['password']
args = ":" + user['name'] + ":" + password + "\n"
self.write(command)
self.write(user['token'])
self.write(args)
if channel not in user['subscriptions'].keys():
user['subscriptions'][channel] = {'deliveryType':"latest"}
user['subscriptions'][channel]['index'] = len(self.channels[channel]['queue'])
self.channels[channel]['subscriptions'][user['name']] = {'index': user['subscriptions'][channel]['index']}
command = "/token/" + channel + "\n"
subscriptions_string = ''.join(key for key in user['subscriptions'].keys())
args_regex = "0" + ":" + user['name'] + ":" + "([0-9a-f]{" + str(len(subscriptions_string)*2) + "})" + ":"
args_regex += ','.join(key for key in reversed(user['subscriptions'].keys()))
args_regex += "\n"
signature = Variable('signature')
signature.set_re(args_regex, group=1)
self.read(delim="\n", expect=command)
self.read(delim="\n", assign=signature)
args1 = "0" + ":" + user['name'] + ":"
args3 = ":" + ','.join(key for key in reversed(user['subscriptions'].keys())) + "\n"
self.write(command)
self.write(args1)
self.write(signature)
self.write(args3)
command = "/subscribe/" + channel + "\n"
deliveryType = user['subscriptions'][channel]['deliveryType'] #random.choice(self.deliveryType)
token_name = user['name'] + "token"
token_re = "([0-9a-zA-Z]{9}-[0-9a-zA-Z]{8}-[0-9a-zA-Z]{8}-[0-9a-zA-Z]{8}-[0-9a-zA-Z]{3}):" + deliveryType + "\n"
user['token'] = Variable(token_name)
user['token'].set_re(token_re, group=1)
self.read(delim="\n", expect=command)
self.read(delim="\n", assign=user['token'])
def _subscribe(self, user, channel, deliveryType):
command = "/subscribe/" + channel + "\n"
#user = random.choice(self.users)
args = ":" + deliveryType + "\n"
self.write(command)
self.write(user['token'])
self.write(args)
if channel in user['subscriptions'].keys():
user['subscriptions'][channel]['deliveryType'] = deliveryType
else:
command = "/auth/" + channel + "\n"
self.read(delim="\n", expect=command)
self._auth(channel)
self._subscribe(user, channel, deliveryType)
def subscribeGuaranteed(self):
channel = random.choice(self.channels.keys())
user = random.choice(self.users)
self._subscribe(user, channel, "guaranteed")
def subscribeLatest(self):
channel = random.choice(self.channels.keys())
user = random.choice(self.users)
self._subscribe(user, channel, "latest")
def subscribeIncremental(self):
channel = random.choice(self.channels.keys())
user = random.choice(self.users)
self._subscribe(user, channel, "next")
def subscribeLow(self):
channel = random.choice(self.channels.keys())
user = random.choice(self.users)
self._subscribe(user, channel, "low")
def subscribeMedium(self):
channel = random.choice(self.channels.keys())
user = random.choice(self.users)
self._subscribe(user, channel, "medium")
def subscribeHigh(self):
channel = random.choice(self.channels.keys())
user = random.choice(self.users)
self._subscribe(user, channel, "high")
def subscribeToFlagChannel(self):
channel = 'FLAG'
user = random.choice(self.users)
self._subscribe(user, channel, "high")
def start(self):
#self.delay(100)
self.users = []
self.channels = dict()
#self.deliveryType = ["guaranteed", "latest", "high", "medium", "low"]
self.deliveryType = ["guaranteed", "latest", "next"]
self.priorities = ["high", "medium", "low"]
numUsers = random.randint(10,20)
for _ in range(numUsers):
name_length = random.randint(8,16)
name = ''.join(random.choice(string.ascii_letters) for _ in range(name_length))
pass_length = random.randint(8,16)
password = ''.join(random.choice(string.ascii_letters) for _ in range(pass_length))
self.users.append({'token':"0", 'name': name, 'password': password, 'subscriptions':OrderedDict()})
numChannels = random.randint(numUsers/3, numUsers/2)
for _ in range(numChannels):
name_length = random.randint(8,16)
name = ''.join(random.choice(string.ascii_letters) for _ in range(name_length))
self.channels[name] = {'subscriptions':dict(), 'queue':[]}
self.channels['FLAG'] = {'subscriptions':dict(), 'queue':[]}
def POV1(self):
command = "/auth/FLAG\n"
args1 = "0:povuser:password\n"
self.write(command)
self.write(args1)
password_regex = '(.*)\n'
flagPassword = Variable('flagPassword')
flagPassword.set_re(password_regex, group=1)
self.read(delim="\n", assign=flagPassword)
args2 = "0:povuser:"
self.write(command)
self.write(args2)
self.write(flagPassword)
self.write("\n")
command = "/token/FLAG\n"
subscriptions_string = "FLAG"
args_regex = "0:povuser:" + "([0-9a-f]{" + str(len(subscriptions_string)*2) + "})" + ":"
args_regex += "FLAG"
args_regex += "\n"
signature = Variable('signature')
signature.set_re(args_regex, group=1)
self.read(delim="\n", expect=command)
self.read(delim="\n", assign=signature)
args1 = "0:povuser:"
args3 = ":FLAG\n"
self.write(command)
self.write(args1)
self.write(signature)
self.write(args3)
command = "/subscribe/FLAG\n"
token_re = "([0-9a-zA-Z]{9}-[0-9a-zA-Z]{8}-[0-9a-zA-Z]{8}-[0-9a-zA-Z]{8}-[0-9a-zA-Z]{3}):latest\n"
token = Variable("token")
token.set_re(token_re, group=1)
self.read(delim="\n", expect=command)
self.read(delim="\n", assign=token)
command = "/in/FLAG\n"
args = ":AAAAA:high\n"
self.write(command)
self.write(token)
self.write(args)
command = "/out/FLAG\n"
self.write(command)
self.write(token)
self.write(":end\n")
flag_re = "(.*)\n"
flag = Variable("flag")
flag.set_re(flag_re, group=1)
self.read(delim="\n", assign=flag)
def quit(self):
return -1
| mit | 1,377,747,080,165,305,300 | 37.745098 | 133 | 0.666667 | false |
chrislyon/django_ds1 | django_ds1/ds/models.py | 1 | 2352 | from django.db import models
from ckeditor.fields import RichTextField
# Create your models here.
## -------------------------------------------------
## Meta Class contenant certaines donnees de bases
## -------------------------------------------------
DEF_TFAC='DEFAUT'
class HoroDatage(models.Model):
h_datcre = models.DateTimeField(auto_now_add=True, verbose_name='Date de creation')
h_datmod = models.DateTimeField(auto_now=True, verbose_name='Date de Modification')
statut = models.BooleanField(verbose_name='Actif', default=True)
class Meta:
abstract = True
##
## Demande de Service
##
class DService(HoroDatage):
## Canal / Channel / type de demande
TYPE_DS = (
( 'ASS', 'Assistance' ),
( 'DEP', 'Depannage'),
( 'AUD', 'Audit' ),
( 'DEV', 'Developpement' ),
( 'DIV', 'Autres' ),
)
DS_Type = models.CharField(max_length=5, choices=TYPE_DS, default='ASS', verbose_name='Type')
## A voir expression de Demandeur
DS_TiersDemandeur = models.CharField(max_length=20, blank=True, verbose_name='Demandeur')
## A voir expression de facturation
DS_TiersFacture = models.CharField(max_length=20, default=DEF_TFAC, blank=True, verbose_name='Tiers Facture')
DS_Sujet = models.CharField(blank=True, max_length=50, verbose_name='Sujet')
DS_Desc = RichTextField( blank=True, verbose_name='Description')
STATUT_DS = (
( 'NEW', 'Nouvelle' ),
( 'CLOSED', 'Termine' ),
( 'ENC', 'En cours' ),
( 'ATT', 'Attente' ),
)
DS_Statut = models.CharField(max_length=6, choices=STATUT_DS, default='NEW', verbose_name='Statut')
PRIORITE_DS = (
('N', 'NORMAL'),
('U', 'URGENT'),
('B', 'BLOQUANT'),
)
DS_Priorite = models.CharField(max_length=3, choices=PRIORITE_DS, default='N', verbose_name='Priorite')
DS_Assigne = models.CharField(max_length=30, blank=True, verbose_name='Assigne')
DS_Horo_Debut = models.DateTimeField(max_length=30, blank=True, verbose_name='Debut')
DS_Horo_Fin = models.DateTimeField(max_length=30, blank=True, verbose_name='Fin')
DS_Echeance = models.CharField(max_length=30, blank=True, verbose_name='Avant le')
DS_TempsEstime = models.CharField(max_length=30, blank=True, verbose_name='Temps Estime')
DS_TempsRealise = models.CharField(max_length=30, blank=True, verbose_name='Temps Realise')
DS_PC_Realise = models.CharField(max_length=30, blank=True, verbose_name='% Realisation')
| gpl-2.0 | -8,140,727,402,061,289,000 | 35.184615 | 110 | 0.672619 | false |
cscott/wikiserver | whoosh/support/dawg.py | 1 | 19568 | # Copyright 2009 Matt Chaput. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY MATT CHAPUT ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL MATT CHAPUT OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are
# those of the authors and should not be interpreted as representing official
# policies, either expressed or implied, of Matt Chaput.
"""
This module contains classes and functions for working with Directed Acyclic
Word Graphs (DAWGs). This structure is used to efficiently store a list of
words.
This code should be considered an implementation detail and may change in
future releases.
TODO: try to find a way to traverse the term index efficiently to do within()
instead of storing a DAWG separately.
"""
from array import array
from whoosh.compat import b, xrange, iteritems, iterkeys, unichr
from whoosh.system import _INT_SIZE
from whoosh.util import utf8encode, utf8decode
class BaseNode(object):
"""This is the base class for objects representing nodes in a directed
acyclic word graph (DAWG).
* ``final`` is a property which is True if this node represents the end of
a word.
* ``__contains__(label)`` returns True if the node has an edge with the
given label.
* ``__iter__()`` returns an iterator of the labels for the node's outgoing
edges. ``keys()`` is available as a convenient shortcut to get a list.
* ``__len__()`` returns the number of outgoing edges.
* ``edge(label)`` returns the Node connected to the edge with the given
label.
* ``all_edges()`` returns a dictionary of the node's outgoing edges, where
the keys are the edge labels and the values are the connected nodes.
"""
def __contains__(self, key):
raise NotImplementedError
def __iter__(self):
raise NotImplementedError
def __len__(self):
raise NotImplementedError
def keys(self):
"""Returns a list of the outgoing edge labels.
"""
return list(self)
def edge(self, key, expand=True):
"""Returns the node connected to the outgoing edge with the given
label.
"""
raise NotImplementedError
def all_edges(self):
"""Returns a dictionary mapping outgoing edge labels to nodes.
"""
e = self.edge
return dict((key, e(key)) for key in self)
def edge_count(self):
"""Returns the recursive count of edges in this node and the tree under
it.
"""
return len(self) + sum(self.edge(key).edge_count() for key in self)
class NullNode(BaseNode):
"""An empty node. This is sometimes useful for representing an empty graph.
"""
final = False
def __containts__(self, key):
return False
def __iter__(self):
return iter([])
def __len__(self):
return 0
def edge(self, key, expand=True):
raise KeyError(key)
def all_edges(self):
return {}
def edge_count(self):
return 0
class BuildNode(object):
"""Node type used by DawgBuilder when constructing a graph from scratch.
"""
def __init__(self):
self.final = False
self._edges = {}
self._hash = None
def __repr__(self):
return "<%s:%s %s>" % (self.__class__.__name__,
",".join(sorted(self._edges.keys())),
self.final)
def __hash__(self):
if self._hash is not None:
return self._hash
h = int(self.final)
for key, node in iteritems(self._edges):
h ^= hash(key) ^ hash(node)
self._hash = h
return h
def __eq__(self, other):
if self is other:
return True
if self.final != other.final:
return False
mine, theirs = self.all_edges(), other.all_edges()
if len(mine) != len(theirs):
return False
for key in iterkeys(mine):
if key not in theirs or not mine[key] == theirs[key]:
return False
return True
def __ne__(self, other):
return not(self.__eq__(other))
def __contains__(self, key):
return key in self._edges
def __iter__(self):
return iter(self._edges)
def __len__(self):
return len(self._edges)
def put(self, key, node):
self._hash = None # Invalidate the cached hash value
self._edges[key] = node
def edge(self, key, expand=True):
return self._edges[key]
def all_edges(self):
return self._edges
class DawgBuilder(object):
"""Class for building a graph from scratch.
>>> db = DawgBuilder()
>>> db.insert(u"alfa")
>>> db.insert(u"bravo")
>>> db.write(dbfile)
This class does not have the cleanest API, because it was cobbled together
to support the spelling correction system.
"""
def __init__(self, reduced=True, field_root=False):
"""
:param dbfile: an optional StructFile. If you pass this argument to the
initializer, you don't have to pass a file to the ``write()``
method after you construct the graph.
:param reduced: when the graph is finished, branches of single-edged
nodes will be collapsed into single nodes to form a Patricia tree.
:param field_root: treats the root node edges as field names,
preventing them from being reduced and allowing them to be inserted
out-of-order.
"""
self._reduced = reduced
self._field_root = field_root
self.lastword = None
# List of nodes that have not been checked for duplication.
self.unchecked = []
# List of unique nodes that have been checked for duplication.
self.minimized = {}
self.root = BuildNode()
def insert(self, word):
"""Add the given "word" (a string or list of strings) to the graph.
Words must be inserted in sorted order.
"""
lw = self.lastword
prefixlen = 0
if lw:
if self._field_root and lw[0] != word[0]:
# If field_root == True, caller can add entire fields out-of-
# order (but not individual terms)
pass
elif word < lw:
raise Exception("Out of order %r..%r." % (self.lastword, word))
else:
# find common prefix between word and previous word
for i in xrange(min(len(word), len(lw))):
if word[i] != lw[i]: break
prefixlen += 1
# Check the unchecked for redundant nodes, proceeding from last
# one down to the common prefix size. Then truncate the list at
# that point.
self._minimize(prefixlen)
# Add the suffix, starting from the correct node mid-way through the
# graph
if not self.unchecked:
node = self.root
else:
node = self.unchecked[-1][2]
for letter in word[prefixlen:]:
nextnode = BuildNode()
node.put(letter, nextnode)
self.unchecked.append((node, letter, nextnode))
node = nextnode
node.final = True
self.lastword = word
def _minimize(self, downto):
# Proceed from the leaf up to a certain point
for i in xrange(len(self.unchecked) - 1, downto - 1, -1):
(parent, letter, child) = self.unchecked[i];
if child in self.minimized:
# Replace the child with the previously encountered one
parent.put(letter, self.minimized[child])
else:
# Add the state to the minimized nodes.
self.minimized[child] = child;
self.unchecked.pop()
def finish(self):
"""Minimize the graph by merging duplicates, and reduce branches of
single-edged nodes. You can call this explicitly if you are building
a graph to use in memory. Otherwise it is automatically called by
the write() method.
"""
self._minimize(0)
if self._reduced:
self.reduce(self.root, self._field_root)
def write(self, dbfile):
self.finish()
DawgWriter(dbfile).write(self.root)
@staticmethod
def reduce(root, field_root=False):
if not field_root:
reduce(root)
else:
for key in root:
v = root.edge(key)
reduce(v)
class DawgWriter(object):
def __init__(self, dbfile):
self.dbfile = dbfile
self.offsets = {}
def write(self, root):
"""Write the graph to the given StructFile. If you passed a file to
the initializer, you don't have to pass it here.
"""
dbfile = self.dbfile
dbfile.write(b("GR01")) # Magic number
dbfile.write_int(0) # File flags
dbfile.write_uint(0) # Pointer to root node
offset = self._write_node(dbfile, root)
# Seek back and write the pointer to the root node
dbfile.flush()
dbfile.seek(_INT_SIZE * 2)
dbfile.write_uint(offset)
dbfile.close()
def _write_node(self, dbfile, node):
keys = node._edges.keys()
ptrs = array("I")
for key in keys:
sn = node._edges[key]
if id(sn) in self.offsets:
ptrs.append(self.offsets[id(sn)])
else:
ptr = self._write_node(dbfile, sn)
self.offsets[id(sn)] = ptr
ptrs.append(ptr)
start = dbfile.tell()
# The low bit indicates whether this node represents the end of a word
flags = int(node.final)
# The second lowest bit = whether this node has children
flags |= bool(keys) << 1
# The third lowest bit = whether all keys are single chars
singles = all(len(k) == 1 for k in keys)
flags |= singles << 2
# The fourth lowest bit = whether all keys are one byte
if singles:
sbytes = all(ord(key) <= 255 for key in keys)
flags |= sbytes << 3
dbfile.write_byte(flags)
if keys:
dbfile.write_varint(len(keys))
dbfile.write_array(ptrs)
if singles:
for key in keys:
o = ord(key)
if sbytes:
dbfile.write_byte(o)
else:
dbfile.write_ushort(o)
else:
for key in keys:
dbfile.write_string(utf8encode(key)[0])
return start
class DiskNode(BaseNode):
def __init__(self, dbfile, offset, expand=True):
self.id = offset
self.dbfile = dbfile
dbfile.seek(offset)
flags = dbfile.read_byte()
self.final = bool(flags & 1)
self._edges = {}
if flags & 2:
singles = flags & 4
bytes = flags & 8
nkeys = dbfile.read_varint()
ptrs = dbfile.read_array("I", nkeys)
for i in xrange(nkeys):
ptr = ptrs[i]
if singles:
if bytes:
charnum = dbfile.read_byte()
else:
charnum = dbfile.read_ushort()
self._edges[unichr(charnum)] = ptr
else:
key = utf8decode(dbfile.read_string())[0]
if len(key) > 1 and expand:
self._edges[key[0]] = PatNode(dbfile, key[1:], ptr)
else:
self._edges[key] = ptr
def __repr__(self):
return "<%s %s:%s %s>" % (self.__class__.__name__, self.id,
",".join(sorted(self._edges.keys())),
self.final)
def __contains__(self, key):
return key in self._edges
def __iter__(self):
return iter(self._edges)
def __len__(self):
return len(self._edges)
def edge(self, key, expand=True):
v = self._edges[key]
if not isinstance(v, BaseNode):
# Convert pointer to disk node
v = DiskNode(self.dbfile, v, expand=expand)
#if self.caching:
self._edges[key] = v
return v
@classmethod
def load(cls, dbfile, expand=True):
dbfile.seek(0)
magic = dbfile.read(4)
if magic != b("GR01"):
raise Exception("%r does not seem to be a graph file" % dbfile)
_ = dbfile.read_int() # File flags (currently unused)
return DiskNode(dbfile, dbfile.read_uint(), expand=expand)
class PatNode(BaseNode):
final = False
def __init__(self, dbfile, label, nextptr, i=0):
self.dbfile = dbfile
self.label = label
self.nextptr = nextptr
self.i = i
def __repr__(self):
return "<%r(%d) %s>" % (self.label, self.i, self.final)
def __contains__(self, key):
if self.i < len(self.label) and key == self.label[self.i]:
return True
else:
return False
def __iter__(self):
if self.i < len(self.label):
return iter(self.label[self.i])
else:
return []
def __len__(self):
if self.i < len(self.label):
return 1
else:
return 0
def edge(self, key, expand=True):
label = self.label
i = self.i
if i < len(label) and key == label[i]:
i += 1
if i < len(self.label):
return PatNode(self.dbfile, label, self.nextptr, i)
else:
return DiskNode(self.dbfile, self.nextptr)
else:
raise KeyError(key)
def edge_count(self):
return DiskNode(self.dbfile, self.nextptr).edge_count()
class ComboNode(BaseNode):
"""Base class for DAWG nodes that blend the nodes of two different graphs.
Concrete subclasses need to implement the ``edge()`` method and possibly
the ``final`` property.
"""
def __init__(self, a, b):
self.a = a
self.b = b
def __repr__(self):
return "<%s %r %r>" % (self.__class__.__name__, self.a, self.b)
def __contains__(self, key):
return key in self.a or key in self.b
def __iter__(self):
return iter(set(self.a) | set(self.b))
def __len__(self):
return len(set(self.a) | set(self.b))
@property
def final(self):
return self.a.final or self.b.final
class UnionNode(ComboNode):
"""Makes two graphs appear to be the union of the two graphs.
"""
def edge(self, key, expand=True):
a = self.a
b = self.b
if key in a and key in b:
return UnionNode(a.edge(key), b.edge(key))
elif key in a:
return a.edge(key)
else:
return b.edge(key)
class IntersectionNode(ComboNode):
"""Makes two graphs appear to be the intersection of the two graphs.
"""
def edge(self, key, expand=True):
a = self.a
b = self.b
if key in a and key in b:
return IntersectionNode(a.edge(key), b.edge(key))
# Functions
def reduce(node):
edges = node._edges
if edges:
for key, sn in edges.items():
reduce(sn)
if len(sn) == 1 and not sn.final:
skey, ssn = list(sn._edges.items())[0]
del edges[key]
edges[key + skey] = ssn
def edge_count(node):
c = len(node)
return c + sum(edge_count(node.edge(key)) for key in node)
def flatten(node, sofar=""):
if node.final:
yield sofar
for key in sorted(node):
for word in flatten(node.edge(key, expand=False), sofar + key):
yield word
def dump_dawg(node, tab=0):
print("%s%s %s" % (" " * tab, hex(id(node)), node.final))
for key in sorted(node):
print("%s%r:" % (" " * tab, key))
dump_dawg(node.edge(key), tab + 1)
def within(node, text, k=1, prefix=0, seen=None):
if seen is None:
seen = set()
sofar = ""
if prefix:
node = skip_prefix(node, text, prefix)
if node is None:
return
sofar, text = text[:prefix], text[prefix:]
for sug in _within(node, text, k, sofar=sofar):
if sug in seen:
continue
yield sug
seen.add(sug)
def _within(node, word, k=1, i=0, sofar=""):
assert k >= 0
if i == len(word) and node.final:
yield sofar
# Match
if i < len(word) and word[i] in node:
for w in _within(node.edge(word[i]), word, k, i + 1, sofar + word[i]):
yield w
if k > 0:
dk = k - 1
ii = i + 1
# Insertions
for key in node:
for w in _within(node.edge(key), word, dk, i, sofar + key):
yield w
if i < len(word):
char = word[i]
# Transposition
if i < len(word) - 1 and char != word[ii] and word[ii] in node:
second = node.edge(word[i + 1])
if char in second:
for w in _within(second.edge(char), word, dk, i + 2,
sofar + word[ii] + char):
yield w
# Deletion
for w in _within(node, word, dk, ii, sofar):
yield w
# Replacements
for key in node:
if key != char:
for w in _within(node.edge(key), word, dk, ii,
sofar + key):
yield w
def skip_prefix(node, text, prefix):
for key in text[:prefix]:
if key in node:
node = node.edge(key)
else:
return None
return node
def find_nearest(node, prefix):
sofar = []
for i in xrange(len(prefix)):
char = prefix[i]
if char in node:
sofar.apped(char)
node = node.edge(char)
else:
break
sofar.extend(run_out(node, sofar))
return "".join(sofar)
def run_out(node, sofar):
sofar = []
while not node.final:
first = min(node.keys())
sofar.append(first)
node = node.edge(first)
return sofar
| gpl-2.0 | -5,617,002,949,850,297,000 | 28.874809 | 79 | 0.553659 | false |
skyfromwell/paperwallet | encryption.py | 1 | 1425 | #remove all others only keep Bip38 here. Need to learn more about this.
from bitcoin.bip38 import Bip38
from bitcoin.key import CKey
from bitcoin.base58 import CBase58Data
__b58chars = "123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz"
__b58base = len(__b58chars)
def encode_pw(key, pw):
key = CKey()
decode_string = __decode_b58(key)[1:-4]
key.generate(decode_string)
key.set_compressed(False)
bt = Bip38(key, pw)
return str(CBase58Data(bt.get_encrypted(), 0x01))
def __encode_b58(v):
value = 0L
for (i, c) in enumerate(v[::-1]):
value += (256**i) * ord(c)
result = ""
while value >= __b58base:
div, mod = divmod(value, __b58base)
result = __b58chars[mod] + result
value = div
result = __b58chars[value] + result
pad = 0
for c in v:
if c=='\0':
pad += 1
else:
break
return (__b58chars[0]*pad) + result
def __decode_b58(v):
value = 0L
for (i, c) in enumerate(v[::-1]):
value += __b58chars.find(c) * (__b58base**i)
result = ""
while value >= 256:
div, mod = divmod(value, 256)
result = chr(mod) + result
value = div
result = chr(value) + result
pad = 0
for c in v:
if c==__b58chars[0]:
pad += 1
else:
break
result = chr(0)*pad + result
return result
| gpl-3.0 | 7,973,097,293,177,781,000 | 23.568966 | 73 | 0.554386 | false |
UltronAI/Deep-Learning | Pattern-Recognition/hw2-Feature-Selection/skfeature/example/test_JMI.py | 1 | 1528 | import scipy.io
from sklearn.metrics import accuracy_score
from sklearn import cross_validation
from sklearn import svm
from skfeature.function.information_theoretical_based import JMI
def main():
# load data
mat = scipy.io.loadmat('../data/colon.mat')
X = mat['X'] # data
X = X.astype(float)
y = mat['Y'] # label
y = y[:, 0]
n_samples, n_features = X.shape # number of samples and number of features
# split data into 10 folds
ss = cross_validation.KFold(n_samples, n_folds=10, shuffle=True)
# perform evaluation on classification task
num_fea = 10 # number of selected features
clf = svm.LinearSVC() # linear SVM
correct = 0
for train, test in ss:
# obtain the index of each feature on the training set
idx,_,_ = JMI.jmi(X[train], y[train], n_selected_features=num_fea)
# obtain the dataset on the selected features
features = X[:, idx[0:num_fea]]
# train a classification model with the selected features on the training dataset
clf.fit(features[train], y[train])
# predict the class labels of test data
y_predict = clf.predict(features[test])
# obtain the classification accuracy on the test data
acc = accuracy_score(y[test], y_predict)
correct = correct + acc
# output the average classification accuracy over all 10 folds
print 'Accuracy:', float(correct)/10
if __name__ == '__main__':
main()
| mit | 376,455,140,267,402,400 | 31.217391 | 89 | 0.628927 | false |
naterh/chipsec | source/tool/chipsec/hal/cpuid.py | 1 | 1845 | #!/usr/local/bin/python
#CHIPSEC: Platform Security Assessment Framework
#Copyright (c) 2010-2015, Intel Corporation
#
#This program is free software; you can redistribute it and/or
#modify it under the terms of the GNU General Public License
#as published by the Free Software Foundation; Version 2.
#
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License
#along with this program; if not, write to the Free Software
#Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
#Contact information:
#[email protected]
#
# -------------------------------------------------------------------------------
#
# CHIPSEC: Platform Hardware Security Assessment Framework
# (c) 2010-2012 Intel Corporation
#
# -------------------------------------------------------------------------------
## \addtogroup hal
# chipsec/hal/cpuid.py
# ======================
# CPUID information
# ~~~
# #usage:
# cpuid(0)
# ~~~
#
__version__ = '1.0'
import struct
import sys
import os.path
from chipsec.logger import logger
class CpuIDRuntimeError (RuntimeError):
pass
class CpuID:
def __init__( self, cs ):
self.helper = cs.helper
self.cs = cs
def cpuid(self, eax, ecx ):
if logger().VERBOSE: logger().log( "[cpuid] in: EAX=0x%08X, ECX=0x%08X" % (eax, ecx) )
(eax, ebx, ecx, edx) = self.helper.cpuid( eax, ecx )
if logger().VERBOSE: logger().log( "[cpuid] out: EAX=0x%08X, EBX=0x%08X, ECX=0x%08X, EDX=0x%08X" % (eax, ebx, ecx, edx) )
return (eax, ebx, ecx, edx)
| gpl-2.0 | 5,467,598,300,744,505,000 | 28.783333 | 129 | 0.601626 | false |
carrdelling/project_euler | problem17.py | 1 | 1728 | #!/usr/bin/env python
################################################################################
#
# Project Euler - Problem 17
#
# If the numbers 1 to 5 are written out in words: one, two, three, four, five,
# then there are 3 + 3 + 5 + 4 + 4 = 19 letters used in total.
#
# If all the numbers from 1 to 1000 (one thousand) inclusive were written out in
# words, how many letters would be used?
#
# NOTE: Do not count spaces or hyphens. For example, 342 (three hundred and
# forty-two) contains 23 letters and 115 (one hundred and fifteen) contains 20
# letters. The use of "and" when writing out numbers is in compliance with
# British usage.
#
# Joaquin Derrac - [email protected]
#
################################################################################
units = {1: 3, 2: 3, 3: 5, 4: 4, 5: 4, 6: 3, 7: 5, 8: 5, 9: 4, 0: 0}
tens = {2: 6, 3: 6, 4: 5, 5: 5, 6: 5, 7: 7, 8: 6, 9: 6}
hundreds = {0: 0, 1: 13, 2: 13, 3: 15, 4: 14, 5: 14, 6: 13, 7: 15, 8: 15, 9: 14}
ten_to_nineteen = {10: 3, 11: 6, 12: 6, 13: 8, 14: 8, 15: 7, 16: 7, 17: 9,
18: 8, 19: 8}
def number_str_lenght(number):
h = number / 100
du = number % 100
d = du / 10
u = du % 10
if du < 1:
# no need for the 'and'
num_length = hundreds[h] - 3
elif 0 < du <= 9:
num_length = hundreds[h] + units[u]
elif 9 < du <= 19:
num_length = hundreds[h] + ten_to_nineteen[du]
else:
num_length = hundreds[h] + tens[d] + units[u]
return num_length
if __name__ == "__main__":
solution = 0
for i in range(1, 1000):
length = number_str_lenght(i)
solution += length
# the last one - 1000
solution += 11
print(solution)
| gpl-2.0 | 492,748,719,102,900,900 | 27.327869 | 80 | 0.513889 | false |
aceway/cppite | src/py/cppite.py | 1 | 13042 | #!/usr/bin/env python
# -*- coding:utf-8 -*-
########################################################
# ITE command start with: #//
# ITE command keywords:quit,exit,byebye,bye, begin, end,
# verbose, concise, dump_project, dump_make_file, dump_cpp,
# dump_fragment,load_fragment, compile, run, edit
#
########################################################
import os
import commands
import settings as st
import utils as ut
from cpp_fragment_tmpl import hpp_tmpl, cpp_tmpl
from CMakeLists_tmpl import cmakelists_tmpl
class CppIte:
def __init__(self):
self.cpp_fragment = []
self.ite_cmd = []
self.include_files = []
self.include_dirs = []
self.static_files = []
self.is_verbose=False
# command full name and its shortkeys
self.ite_cmd_keymap={
'RUN': ("R", "RU"),
'COMPILE': ("C", "CO", "COM", "COMP"),
'VERBOSE': ("V", "VE", "VERB"),
'SIMPLE': ("S", "SI", "SIM"),
'CLEAR': ("CL", "CLE", ),
'SHOW': ("SH", "SHO", ),
'HELP': ("H", "HEL", ),
'RELOAD_SETTING': ('RS', 'REST'),
'CMD_CLEAR': ("CCL", "CCLE", ),
'CMD_HISTORY': ("CH", "CHIS", ),
'ADD_INCLUDE_FILE': ("AIF", ),
'RM_INCLUDE_FILE': ("RIF", "REMOVE_INCLUDE_FILE"),
'ADD_INCLUDE_DIR': ("AID", ),
'RM_INCLUDE_DIR': ("RID", "REMOVE_INCLUDE_DIR"),
'LIST_INCLUDE_FILE':("LIF", ),
'LIST_INCLUDE_DIR': ("LID", ),
'ADD_STATIC_FILE': ('ASF', ),
'LIST_STATIC_FILE': ('LSF', ),
'RM_STATIC_FILE': ('RSF', "REMOVE_STATIC_FILE"),
'LOAD_FRAG_FILE': ('LFF', 'LDFF'),
}
def is_ite_cmd(self, ri):
""" Test wether the raw input is a ITE(interactive test environment) command
or its c++ code fragment.
"""
if ri.strip().startswith( "#//" ):
self.ite_cmd.append( ri.strip().strip("#//") )
return True
else:
self.cpp_fragment.append( ri )
return False
def do_ite_cmd(self):
""" Do the ITE command """
cmd = self.ite_cmd[-1].strip().split(" ")
ite_cmd=cmd[0].upper()
args=cmd[1:]
if ite_cmd in self.ite_cmd_keymap:
ite_cmd=cmd[0].upper()
args=cmd[1:]
else:
for k, v in self.ite_cmd_keymap.items():
if ite_cmd in v:
ite_cmd=k.upper()
args=cmd[1:]
break
if self.is_verbose:
print "Do c++ ITE command:{c} {a}".format( c = ite_cmd, a=args )
self._do_cmd( ite_cmd.lower(), args )
def _do_cmd( self, cmd, *args, **keywords ):
"""
Private command proxy, execute by command name rule."
"""
if hasattr( self, "cmd_" + cmd.strip().lower() ) \
and callable( getattr(self, "cmd_" + cmd.strip().lower() ) ):
func = getattr(self, "cmd_" + cmd.strip().lower() )
try:
ret = apply( func, *args, **keywords )
except Exception, e:
print "{e}".format( e = e )
ret = None
return ret
else:
print "{c}Not surpported command:{cmd}{e}".format( c=st.color.FG_RED, cmd=cmd, e=st.color.END )
return None
def cmd_help(self, name=None):
"""Print the cppite command help info."""
if name is None:
print "{c}cppite command start with '#//' in the console line, here is all the supported commands:{e}"\
.format(c=st.color.FG_GREEN, e=st.color.END)
cmds = [ c for c in dir(self) if c.startswith("cmd_") ]
for c in cmds:
sc = ",".join( self.ite_cmd_keymap[ c[4:].upper() ] )
print "{c}: {s}. Short command:{sc}\n".format( c=c[4:], s=getattr(self, c).__doc__, sc=sc)
else:
name = name.lower()
cmd_name = "cmd_{n}".format( n= name )
if hasattr(self, cmd_name):
sc = ",".join( self.ite_cmd_keymap[ name.upper() ] )
print "{n}: {s}. Short command:{sc}".format( n=name, s= getattr(self, cmd_name).__doc__, sc=sc)
else:
print "{c}Not surpported command:{n}{e}".format( n=name, c=st.color.FG_RED, e=st.color.END )
def cmd_reload_setting(self):
"""Reload the settings.py"""
reload( st )
def cmd_cmd_history(self):
"""Show cppite commands history that you inputted before."""
for cmd in self.ite_cmd[:-1]:
print "{c}".format( c = cmd.strip() )
def cmd_cmd_clear(self):
"""Clear cppite cached commands"""
self.ite_cmd = []
def cmd_verbose(self):
"""Run in verbose mode, print process detail info."""
self.is_verbose = True
def cmd_simple(self):
"""Run in simple mode, only print the result but no process info."""
self.is_verbose = False
def cmd_show(self):
"""Show the inputted c++ code that cached in cppite temp memory"""
if self.is_verbose:
print "{c}Show the cached c++ code:{e}".format( c=st.color.FG_GREEN, e=st.color.END )
for c in self.cpp_fragment:
print c
def cmd_clear(self):
"""Clear the inputted c++ code that cached in cppite temp memory"""
if self.is_verbose:
print "{c}Clear the cached c++ code:\n{cd}\n{e}". \
format( c=st.color.FG_YELLOW, cd="\n".join(self.cpp_fragment), e=st.color.END )
self.cpp_fragment = []
def cmd_compile(self):
"""Compile the c++ code in cppite caching memory."""
if self.is_verbose:
print "Compile c++ code: {cpp}".format( cpp="\n".join(self.cpp_fragment) )
self.gen_cpp_code_file()
self.gen_cmakelist_file()
return self.exec_bash_cmd( st.compile_tool )
def cmd_run(self):
"""Compile the inputted c++ code and run it"""
if self.is_verbose:
print "Run c++ code fragment: {cpp}".format( cpp="\n".join(self.cpp_fragment) )
if os.path.isfile( st.out_bin_exe ):
status, output = self.exec_bash_cmd( st.out_bin_exe )
if status == 0: print output
else:
print "{c}Cannot find and gen {bf}!{e}".format( c=st.color.FG_RED, bf=st.out_bin_exe, e=st.color.END )
def cmd_list_include_file(self):
"""List c++ include header files"""
print "Now c++ include header file:"
for hf in st.default_include_headers:
print "\t", hf
for hf in self.include_files:
print "\t", hf
def cmd_list_include_dir(self):
"""List c++ include header dirs"""
print "Now c++ include header dir:"
for hd in st.default_include_dirs:
print "\t", hd
for hd in self.include_dirs:
print "\t", hd
def cmd_list_static_file(self):
"""List cmake link static file"""
print "Now cmake link static files:"
for sf in st.default_static_files:
print "\t", sf
for sf in self.static_files:
print "\t", sf
def cmd_add_include_file(self, *file_list):
"""Add c++ include header files"""
if len(file_list) == 0:
print "Need header file name!"
for f in file_list:
if f.strip() in self.include_files:
pass
else:
self.include_files.append( f.strip() )
def cmd_add_include_dir(self, *dir_list):
"""Add c++ include header dirs"""
if len(dir_list) == 0:
print "Need dir name!"
for d in dir_list:
if d.strip() in self.include_dirs:
pass
else:
self.include_dirs.append( d.strip() )
def cmd_add_static_file(self, *file_list):
"""Add static file"""
for f in file_list:
if f.strip() in self.static_files:
pass
else:
self.static_files.append( f.strip() )
def cmd_rm_include_file(self, *file_list):
"""Remove c++ include header files"""
for f in file_list:
if f.strip() in self.include_files:
self.include_files.remove( f.strip() )
else:
pass
def cmd_rm_include_dir(self, *dir_list):
"""Remove c++ include header dirs"""
for d in dir_list:
if d.strip() in self.include_dirs:
self.include_dirs.remove( d.strip() )
else:
pass
def cmd_rm_static_file(self, *file_list):
"""Remove static file from cache"""
for f in file_list:
if f.strip() in self.static_files:
self.static_files.remove( f.strip() )
else:
pass
def cmd_load_frag_file(self, *the_file):
"""Load frag code from a file"""
if len(the_file) == 1:
if os.path.isfile( the_file[0] ):
with open(the_file[0], 'r') as rf:
for line in rf:
self.cpp_fragment.append( line );
else:
print "{c}It's not valid file:{f}.{e}".format( c = st.color.FG_RED, e = st.color.END, f=the_file[0] )
pass
else:
print "{c}Only one file once, but now({ln}):{tf}{e}".format( c = st.color.FG_RED, e = st.color.END, ln=len(the_file), tf=the_file )
def gen_cpp_code_file(self):
"""Use the input c++ code fragment(cached in the list) to generate c++ hpp/cpp file."""
if self.is_verbose:
print "Generating c++ code... {f}".format( f = st.cpp_code_dir )
includes=""
for f in st.default_include_headers:
if f.find('.') < 0 or f.endswith('.h') or f.endswith('.hpp'):
the_include = "#include <{f}>\n".format( f=f )
if includes.find( the_include ) < 0:
includes += the_include
for f in self.include_files:
if f.find('.') < 0 or f.endswith('.h') or f.endswith('.hpp'):
the_include = "#include <{f}>\n".format( f=f )
if includes.find( the_include ) < 0:
includes += the_include
hpp_code= hpp_tmpl.format( includes=includes )
cpp_code = cpp_tmpl.format( head_file=st.hpp_filename, tmp_cpp= "\n".join(self.cpp_fragment) )
with open( st.cpp_code_dir + st.hpp_filename, 'w') as hf:
hf.write( hpp_code )
with open( st.cpp_code_dir + st.cpp_filename, 'w') as cf:
cf.write( cpp_code )
def gen_cmakelist_file(self):
"""Use the input and default config data to generate cmake's CMakeLists.txt"""
include_dirs = ""
for ind in st.default_include_dirs:
include_dirs += "{d}\n".format( d = ind )
for ind in self.include_dirs:
include_dirs += "{d}\n".format( d = ind )
static_files = ""
for sf in st.default_static_files:
static_files += "{s}\n".format( s = sf )
for sf in self.static_files:
static_files += "{s}\n".format( s = sf )
cmake_tmpl=cmakelists_tmpl.format( add_include_dirs=include_dirs, add_static_libs=static_files )
with open( st.cmakelists_dir + st.cmakelists_filename, 'w') as cmf:
cmf.write( cmake_tmpl )
def exec_bash_cmd(self, cmd):
"""
Call the bash command or scripts, and get the return info.
"""
the_data = {}
cmd = "{sh} ".format(sh=cmd)
(status, output) = commands.getstatusoutput(cmd)
if status == 0:
the_data['code'] = 0
the_data['data'] = output
the_data['desc'] = "OK"
else:
info = output.split(" ")
new_info = []
# 屏蔽密码
for d in info:
if len(d) > 2 and d.lower().startswith("-p"):
d = "-p******"
elif len(d) > 2 and d.lower().startswith('"-p'):
d = "-p******"
elif len(d) > 2 and d.lower().startswith("'-p"):
d = "-p******"
else:
d = d
new_info.append(d)
output = " ".join(new_info)
the_data['code'] = -1
the_data['data'] = "<br>{op}".format(op=output)
the_data['desc'] = "{op}".format(op=output)
if status != 0:
print "{c}{out}{e}".format( c=st.color.FG_RED, out=output, e=st.color.END )
elif self.is_verbose:
print "{c}{out}{e}".format( c=st.color.FG_GREEN, out=output, e=st.color.END )
return status, output
| mit | 8,411,447,687,974,248,000 | 35.205556 | 143 | 0.491023 | false |
bdh1011/wau | venv/lib/python2.7/site-packages/notebook/services/kernelspecs/handlers.py | 1 | 2798 | """Tornado handlers for kernel specifications.
Preliminary documentation at https://github.com/ipython/ipython/wiki/IPEP-25%3A-Registry-of-installed-kernels#rest-api
"""
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
import glob
import json
import os
pjoin = os.path.join
from tornado import web
from ...base.handlers import APIHandler, json_errors
from ...utils import url_path_join
def kernelspec_model(handler, name):
"""Load a KernelSpec by name and return the REST API model"""
ksm = handler.kernel_spec_manager
spec = ksm.get_kernel_spec(name)
d = {'name': name}
d['spec'] = spec.to_dict()
d['resources'] = resources = {}
resource_dir = spec.resource_dir
for resource in ['kernel.js', 'kernel.css']:
if os.path.exists(pjoin(resource_dir, resource)):
resources[resource] = url_path_join(
handler.base_url,
'kernelspecs',
name,
resource
)
for logo_file in glob.glob(pjoin(resource_dir, 'logo-*')):
fname = os.path.basename(logo_file)
no_ext, _ = os.path.splitext(fname)
resources[no_ext] = url_path_join(
handler.base_url,
'kernelspecs',
name,
fname
)
return d
class MainKernelSpecHandler(APIHandler):
SUPPORTED_METHODS = ('GET', 'OPTIONS')
@web.authenticated
@json_errors
def get(self):
ksm = self.kernel_spec_manager
km = self.kernel_manager
model = {}
model['default'] = km.default_kernel_name
model['kernelspecs'] = specs = {}
for kernel_name in ksm.find_kernel_specs():
try:
d = kernelspec_model(self, kernel_name)
except Exception:
self.log.error("Failed to load kernel spec: '%s'", kernel_name, exc_info=True)
continue
specs[kernel_name] = d
self.set_header("Content-Type", 'application/json')
self.finish(json.dumps(model))
@web.authenticated
@json_errors
def options(self):
self.finish()
class KernelSpecHandler(APIHandler):
SUPPORTED_METHODS = ('GET',)
@web.authenticated
@json_errors
def get(self, kernel_name):
try:
model = kernelspec_model(self, kernel_name)
except KeyError:
raise web.HTTPError(404, u'Kernel spec %s not found' % kernel_name)
self.set_header("Content-Type", 'application/json')
self.finish(json.dumps(model))
# URL to handler mappings
kernel_name_regex = r"(?P<kernel_name>\w+)"
default_handlers = [
(r"/api/kernelspecs", MainKernelSpecHandler),
(r"/api/kernelspecs/%s" % kernel_name_regex, KernelSpecHandler),
]
| mit | 4,596,191,801,277,789,700 | 28.765957 | 118 | 0.611866 | false |
annarev/tensorflow | tensorflow/python/keras/layers/advanced_activations_test.py | 2 | 5240 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for advanced activation layers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python import keras
from tensorflow.python.eager import context
from tensorflow.python.keras import keras_parameterized
from tensorflow.python.keras import testing_utils
from tensorflow.python.platform import test
@keras_parameterized.run_all_keras_modes
class AdvancedActivationsTest(keras_parameterized.TestCase):
def test_leaky_relu(self):
for alpha in [0., .5, -1.]:
testing_utils.layer_test(keras.layers.LeakyReLU,
kwargs={'alpha': alpha},
input_shape=(2, 3, 4),
supports_masking=True)
def test_prelu(self):
testing_utils.layer_test(keras.layers.PReLU, kwargs={},
input_shape=(2, 3, 4),
supports_masking=True)
def test_prelu_share(self):
testing_utils.layer_test(keras.layers.PReLU,
kwargs={'shared_axes': 1},
input_shape=(2, 3, 4),
supports_masking=True)
def test_elu(self):
for alpha in [0., .5, -1.]:
testing_utils.layer_test(keras.layers.ELU,
kwargs={'alpha': alpha},
input_shape=(2, 3, 4),
supports_masking=True)
def test_thresholded_relu(self):
testing_utils.layer_test(keras.layers.ThresholdedReLU,
kwargs={'theta': 0.5},
input_shape=(2, 3, 4),
supports_masking=True)
def test_softmax(self):
testing_utils.layer_test(keras.layers.Softmax,
kwargs={'axis': 1},
input_shape=(2, 3, 4),
supports_masking=True)
def test_relu(self):
testing_utils.layer_test(keras.layers.ReLU,
kwargs={'max_value': 10},
input_shape=(2, 3, 4),
supports_masking=True)
x = keras.backend.ones((3, 4))
if not context.executing_eagerly():
# Test that we use `leaky_relu` when appropriate in graph mode.
self.assertTrue(
'LeakyRelu' in keras.layers.ReLU(negative_slope=0.2)(x).name)
# Test that we use `relu` when appropriate in graph mode.
self.assertTrue('Relu' in keras.layers.ReLU()(x).name)
# Test that we use `relu6` when appropriate in graph mode.
self.assertTrue('Relu6' in keras.layers.ReLU(max_value=6)(x).name)
def test_relu_with_invalid_arg(self):
with self.assertRaisesRegex(
ValueError, 'max_value of Relu layer cannot be negative value: -10'):
testing_utils.layer_test(keras.layers.ReLU,
kwargs={'max_value': -10},
input_shape=(2, 3, 4),
supports_masking=True)
with self.assertRaisesRegex(
ValueError,
'negative_slope of Relu layer cannot be negative value: -2'):
with self.cached_session():
testing_utils.layer_test(
keras.layers.ReLU,
kwargs={'negative_slope': -2},
input_shape=(2, 3, 4))
@keras_parameterized.run_with_all_model_types
def test_layer_as_activation(self):
layer = keras.layers.Dense(1, activation=keras.layers.ReLU())
model = testing_utils.get_model_from_layers([layer], input_shape=(10,))
model.compile(
'sgd',
'mse',
run_eagerly=testing_utils.should_run_eagerly())
model.fit(np.ones((10, 10)), np.ones((10, 1)), batch_size=2)
def test_leaky_relu_with_invalid_alpha(self):
# Test case for GitHub issue 46993.
with self.assertRaisesRegex(ValueError,
'alpha of leaky Relu layer cannot be None'):
testing_utils.layer_test(
keras.layers.LeakyReLU,
kwargs={'alpha': None},
input_shape=(2, 3, 4),
supports_masking=True)
def test_leaky_elu_with_invalid_alpha(self):
# Test case for GitHub issue 46993.
with self.assertRaisesRegex(ValueError,
'alpha of ELU layer cannot be None'):
testing_utils.layer_test(
keras.layers.ELU,
kwargs={'alpha': None},
input_shape=(2, 3, 4),
supports_masking=True)
if __name__ == '__main__':
test.main()
| apache-2.0 | -6,508,748,624,836,932,000 | 38.398496 | 80 | 0.577099 | false |
pythondigest/pythondigest | digest/forms.py | 1 | 3789 | # -*- encoding: utf-8 -*-
from ckeditor.widgets import CKEditorWidget, json_encode
from django import forms
from django.contrib import admin
from django.contrib.admin import widgets
from django.contrib.admin.options import get_ul_class
from django.forms import ChoiceField, ModelForm
from django.template.loader import render_to_string
from django.utils.encoding import force_text
from django.utils.html import conditional_escape
from django.utils.safestring import mark_safe
try:
# Django >=1.7
from django.forms.utils import flatatt
except ImportError:
# Django <1.7
from django.forms.util import flatatt
from digest.models import Item
ITEM_STATUS_CHOICES = (('queue', 'В очередь'),
('moderated', 'Отмодерировано'),)
class GlavRedWidget(CKEditorWidget):
def render(self, name, value, attrs=None):
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, name=name)
self._set_config()
external_plugin_resources = [
[force_text(a), force_text(b), force_text(c)]
for a, b, c in self.external_plugin_resources]
return mark_safe(
render_to_string('custom_widget/ckeditor_widget.html', {
'final_attrs': flatatt(final_attrs),
'value': conditional_escape(force_text(value)),
'id': final_attrs['id'],
'config': json_encode(self.config),
'external_plugin_resources': json_encode(
external_plugin_resources)
}))
class ItemStatusForm(ModelForm):
status = ChoiceField(label='Статус',
widget=widgets.AdminRadioSelect(
attrs={'class': get_ul_class(admin.HORIZONTAL)}),
choices=ITEM_STATUS_CHOICES)
class Meta:
model = Item
fields = '__all__'
widgets = {
'description': GlavRedWidget,
}
EMPTY_VALUES = (None, '')
class HoneypotWidget(forms.TextInput):
is_hidden = True
def __init__(self, attrs=None, html_comment=False, *args, **kwargs):
self.html_comment = html_comment
super(HoneypotWidget, self).__init__(attrs, *args, **kwargs)
if 'class' not in self.attrs:
self.attrs['style'] = 'display:none'
def render(self, *args, **kwargs):
html = super(HoneypotWidget, self).render(*args, **kwargs)
if self.html_comment:
html = '<!-- %s -->' % html
return html
class HoneypotField(forms.Field):
widget = HoneypotWidget
def clean(self, value):
if self.initial in EMPTY_VALUES and value in EMPTY_VALUES or value == self.initial:
return value
raise forms.ValidationError('Anti-spam field changed in value.')
class AddNewsForm(forms.ModelForm):
name = HoneypotField()
class Meta:
model = Item
fields = ('link', 'section', 'title', 'language', 'description',)
def __init__(self, *args, **kwargs):
kwargs['initial'] = {
'section': 6
} # На форме 6й section будет помечен как selected
super(AddNewsForm, self).__init__(*args, **kwargs)
self.fields['title'].widget.attrs = {
'class': 'form-control small',
}
self.fields['title'].required = False
self.fields['link'].widget.attrs = {
'class': 'form-control small',
}
self.fields['language'].widget.attrs = {
'class': 'form-control',
}
self.fields['description'].widget.attrs = {
'class': 'form-control',
}
self.fields['section'].widget.attrs = {
'class': 'form-control',
}
| mit | -280,419,232,003,009,540 | 29.892562 | 91 | 0.590155 | false |
aio-libs/aiomysql | tests/test_cursor.py | 1 | 9444 | import asyncio
import pytest
from aiomysql import ProgrammingError, Cursor, InterfaceError
async def _prepare(conn):
cur = await conn.cursor()
await cur.execute("DROP TABLE IF EXISTS tbl;")
await cur.execute("""CREATE TABLE tbl (
id MEDIUMINT NOT NULL AUTO_INCREMENT,
name VARCHAR(255) NOT NULL,
PRIMARY KEY (id));""")
for i in [(1, 'a'), (2, 'b'), (3, 'c')]:
await cur.execute("INSERT INTO tbl VALUES(%s, %s)", i)
await cur.execute("DROP TABLE IF EXISTS tbl2")
await cur.execute("""CREATE TABLE tbl2
(id int, name varchar(255))""")
await conn.commit()
async def _prepare_procedure(conn):
cur = await conn.cursor()
await cur.execute("DROP PROCEDURE IF EXISTS myinc;")
await cur.execute("""CREATE PROCEDURE myinc(p1 INT)
BEGIN
SELECT p1 + 1;
END
""")
await conn.commit()
@pytest.mark.run_loop
async def test_description(connection_creator):
conn = await connection_creator()
await _prepare(conn)
cur = await conn.cursor()
assert cur.description is None
await cur.execute('SELECT * from tbl;')
assert len(cur.description) == 2, \
'cursor.description describes too many columns'
assert len(cur.description[0]) == 7, \
'cursor.description[x] tuples must have 7 elements'
assert cur.description[0][0].lower() == 'id', \
'cursor.description[x][0] must return column name'
assert cur.description[1][0].lower() == 'name', \
'cursor.description[x][0] must return column name'
# Make sure self.description gets reset, cursor should be
# set to None in case of none resulting queries like DDL
await cur.execute('DROP TABLE IF EXISTS foobar;')
assert cur.description is None
@pytest.mark.run_loop
async def test_cursor_properties(connection_creator):
conn = await connection_creator()
cur = await conn.cursor()
assert cur.connection is conn
cur.setinputsizes()
cur.setoutputsizes()
assert cur.echo == conn.echo
@pytest.mark.run_loop
async def test_scroll_relative(connection_creator):
conn = await connection_creator()
await _prepare(conn)
cur = await conn.cursor()
await cur.execute('SELECT * FROM tbl;')
await cur.scroll(1)
ret = await cur.fetchone()
assert (2, 'b') == ret
@pytest.mark.run_loop
async def test_scroll_absolute(connection_creator):
conn = await connection_creator()
await _prepare(conn)
cur = await conn.cursor()
await cur.execute('SELECT * FROM tbl;')
await cur.scroll(2, mode='absolute')
ret = await cur.fetchone()
assert (3, 'c') == ret
@pytest.mark.run_loop
async def test_scroll_errors(connection_creator):
conn = await connection_creator()
cur = await conn.cursor()
with pytest.raises(ProgrammingError):
await cur.scroll(2, mode='absolute')
cur = await conn.cursor()
await cur.execute('SELECT * FROM tbl;')
with pytest.raises(ProgrammingError):
await cur.scroll(2, mode='not_valid_mode')
@pytest.mark.run_loop
async def test_scroll_index_error(connection_creator):
conn = await connection_creator()
await _prepare(conn)
cur = await conn.cursor()
await cur.execute('SELECT * FROM tbl;')
with pytest.raises(IndexError):
await cur.scroll(1000)
@pytest.mark.run_loop
async def test_close(connection_creator):
conn = await connection_creator()
cur = await conn.cursor()
await cur.close()
assert cur.closed is True
with pytest.raises(ProgrammingError):
await cur.execute('SELECT 1')
# try to close for second time
await cur.close()
@pytest.mark.run_loop
async def test_arraysize(connection_creator):
conn = await connection_creator()
cur = await conn.cursor()
assert 1 == cur.arraysize
cur.arraysize = 10
assert 10 == cur.arraysize
@pytest.mark.run_loop
async def test_rows(connection_creator):
conn = await connection_creator()
await _prepare(conn)
cur = await conn.cursor()
await cur.execute('SELECT * from tbl')
assert 3 == cur.rowcount
assert 0 == cur.rownumber
await cur.fetchone()
assert 1 == cur.rownumber
assert cur.lastrowid is None
await cur.execute('INSERT INTO tbl VALUES (%s, %s)', (4, 'd'))
assert 0 != cur.lastrowid
await conn.commit()
@pytest.mark.run_loop
async def test_callproc(connection_creator):
conn = await connection_creator()
await _prepare_procedure(conn)
cur = await conn.cursor()
await cur.callproc('myinc', [1])
ret = await cur.fetchone()
assert (2,) == ret
await cur.close()
with pytest.raises(ProgrammingError):
await cur.callproc('myinc', [1])
conn.close()
@pytest.mark.run_loop
async def test_fetchone_no_result(connection_creator):
# test a fetchone() with no rows
conn = await connection_creator()
c = await conn.cursor()
await c.execute("create table test_nr (b varchar(32))")
try:
data = "pymysql"
await c.execute("insert into test_nr (b) values (%s)", (data,))
r = await c.fetchone()
assert r is None
finally:
await c.execute("drop table test_nr")
@pytest.mark.run_loop
async def test_fetchmany_no_result(connection_creator):
conn = await connection_creator()
cur = await conn.cursor()
await cur.execute('DROP TABLE IF EXISTS foobar;')
r = await cur.fetchmany()
assert [] == r
@pytest.mark.run_loop
async def test_fetchall_no_result(connection_creator):
# test a fetchone() with no rows
conn = await connection_creator()
cur = await conn.cursor()
await cur.execute('DROP TABLE IF EXISTS foobar;')
r = await cur.fetchall()
assert [] == r
@pytest.mark.run_loop
async def test_fetchall_with_scroll(connection_creator):
conn = await connection_creator()
await _prepare(conn)
cur = await conn.cursor()
await cur.execute('SELECT * FROM tbl;')
await cur.scroll(1)
ret = await cur.fetchall()
assert ((2, 'b'), (3, 'c')) == ret
@pytest.mark.run_loop
async def test_aggregates(connection_creator):
""" test aggregate functions """
conn = await connection_creator()
c = await conn.cursor()
try:
await c.execute('create table test_aggregates (i integer)')
for i in range(0, 10):
await c.execute(
'insert into test_aggregates (i) values (%s)', (i,))
await c.execute('select sum(i) from test_aggregates')
r, = await c.fetchone()
assert sum(range(0, 10)) == r
finally:
await c.execute('drop table test_aggregates')
@pytest.mark.run_loop
async def test_single_tuple(connection_creator):
""" test a single tuple """
conn = await connection_creator()
c = await conn.cursor()
try:
await c.execute(
"create table mystuff (id integer primary key)")
await c.execute("insert into mystuff (id) values (1)")
await c.execute("insert into mystuff (id) values (2)")
await c.execute("select id from mystuff where id in %s", ((1,),))
r = await c.fetchall()
assert [(1,)] == list(r)
finally:
await c.execute("drop table mystuff")
@pytest.mark.run_loop
async def test_executemany(connection_creator):
conn = await connection_creator()
await _prepare(conn)
cur = await conn.cursor()
assert cur.description is None
args = [1, 2, 3]
row_count = await cur.executemany(
'SELECT * FROM tbl WHERE id = %s;', args)
assert row_count == 3
r = await cur.fetchall()
# TODO: if this right behaviour
assert ((3, 'c'),) == r
# calling execute many without args
row_count = await cur.executemany('SELECT 1;', ())
assert row_count is None
@pytest.mark.run_loop
async def test_custom_cursor(connection_creator):
class MyCursor(Cursor):
pass
conn = await connection_creator()
cur = await conn.cursor(MyCursor)
assert isinstance(cur, MyCursor)
await cur.execute("SELECT 42;")
(r, ) = await cur.fetchone()
assert r == 42
@pytest.mark.run_loop
async def test_custom_cursor_not_cursor_subclass(connection_creator):
class MyCursor2:
pass
conn = await connection_creator()
with pytest.raises(TypeError):
await conn.cursor(MyCursor2)
@pytest.mark.run_loop
async def test_morgify(connection_creator):
conn = await connection_creator()
cur = await conn.cursor()
pairs = [(1, 'a'), (2, 'b'), (3, 'c')]
sql = "INSERT INTO tbl VALUES(%s, %s)"
results = [cur.mogrify(sql, p) for p in pairs]
expected = ["INSERT INTO tbl VALUES(1, 'a')",
"INSERT INTO tbl VALUES(2, 'b')",
"INSERT INTO tbl VALUES(3, 'c')"]
assert results == expected
@pytest.mark.run_loop
async def test_execute_cancel(connection_creator):
conn = await connection_creator()
cur = await conn.cursor()
# Cancel a cursor in the middle of execution, before it could
# read even the first packet (SLEEP assures the timings)
task = asyncio.ensure_future(cur.execute(
"SELECT 1 as id, SLEEP(0.1) as xxx"))
await asyncio.sleep(0.05)
task.cancel()
try:
await task
except asyncio.CancelledError:
pass
with pytest.raises(InterfaceError):
await conn.cursor()
| mit | -166,258,917,336,268,960 | 28.605016 | 73 | 0.636806 | false |
MDAnalysis/mdanalysis | package/MDAnalysis/analysis/__init__.py | 1 | 1421 | # -*- Mode: python; tab-width: 4; indent-tabs-mode:nil; coding:utf-8 -*-
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
#
# MDAnalysis --- https://www.mdanalysis.org
# Copyright (c) 2006-2017 The MDAnalysis Development Team and contributors
# (see the file AUTHORS for the full list of names)
#
# Released under the GNU Public Licence, v2 or any higher version
#
# Please cite your use of MDAnalysis in published work:
#
# R. J. Gowers, M. Linke, J. Barnoud, T. J. E. Reddy, M. N. Melo, S. L. Seyler,
# D. L. Dotson, J. Domanski, S. Buchoux, I. M. Kenney, and O. Beckstein.
# MDAnalysis: A Python package for the rapid analysis of molecular dynamics
# simulations. In S. Benthall and S. Rostrup editors, Proceedings of the 15th
# Python in Science Conference, pages 102-109, Austin, TX, 2016. SciPy.
# doi: 10.25080/majora-629e541a-00e
#
# N. Michaud-Agrawal, E. J. Denning, T. B. Woolf, and O. Beckstein.
# MDAnalysis: A Toolkit for the Analysis of Molecular Dynamics Simulations.
# J. Comput. Chem. 32 (2011), 2319--2327, doi:10.1002/jcc.21787
#
__all__ = [
'align',
'base',
'contacts',
'density',
'distances',
'diffusionmap',
'dihedrals',
'distances',
'gnm',
'hbonds',
'helix_analysis',
'hole2',
'hydrogenbonds',
'leaflet',
'lineardensity',
'msd',
'nuclinfo',
'polymer',
'pca',
'psa',
'rdf',
'rms',
'waterdynamics',
]
| gpl-2.0 | 6,178,818,606,323,664,000 | 28.604167 | 79 | 0.650246 | false |
MrMinimal64/timezonefinder | build_n_install.py | 1 | 1317 | import os
import sys
PACKAGE = 'timezonefinder'
VERSION_FILE = 'VERSION'
VIRT_ENVS = ['APIenv']
VIRT_ENV_COMMAND = '. ~/miniconda3/etc/profile.d/conda.sh; conda activate {virt_env}; '
PY_VERSION_IDS = ['36', '37', '38'] # the supported python versions to create wheels for
PYTHON_TAG = '.'.join([f'py{v}' for v in PY_VERSION_IDS])
if __name__ == "__main__":
print('building now:')
# routine("python3 setup.py sdist bdist_wheel upload", 'Uploading the package now.') # deprecated
# new twine publishing routine:
# https://packaging.python.org/tutorials/packaging-projects/
# delete the build folder before to get a fresh build
# TODO do not remove dist in the future
os.system('rm -r -f build')
os.system('rm -r -f dist')
build_cmd = f"python setup.py sdist bdist_wheel --python-tag {PYTHON_TAG}"
os.system(build_cmd)
# in all specified virtual environments
for virt_env in VIRT_ENVS:
virt_env_cmd = VIRT_ENV_COMMAND.format(virt_env=virt_env)
install_cmd = f'{virt_env_cmd} python setup.py install'
os.system(install_cmd)
# routine(build_cmd, 'building the package now.',
# 'build done. check the included files! installing package in virtual environment next.')
# routine(install_cmd)
os.system('rm -r -f build')
| mit | 1,168,199,045,781,017,300 | 36.628571 | 102 | 0.664389 | false |
ligovirgo/seismon | RfPrediction/BLRMS_Prediction/condor_seismic_peaks.py | 1 | 1969 |
import os, sys
import glob
import optparse
import tables
import pandas as pd
import numpy as np
import h5py
def parse_commandline():
"""
Parse the options given on the command-line.
"""
parser = optparse.OptionParser()
parser.add_option('-i','--ifos', type=str, default='LHO,LLO', help='GW Observatories: LLO,LHO...')
opts, args = parser.parse_args()
return opts
# Parse command line
opts = parse_commandline()
condorDir = './'
logDir = os.path.join(condorDir,'logs')
if not os.path.isdir(logDir):
os.makedirs(logDir)
condordag = os.path.join(condorDir,'condor.dag')
fid = open(condordag,'w')
condorsh = os.path.join(condorDir,'condor.sh')
fid1 = open(condorsh,'w')
job_number = 0
ifos = opts.ifos.split(",")
for ifo in ifos:
x = np.genfromtxt('./masterlists/{}.dat'.format(ifo))
for ii,row in enumerate(x):
fid1.write('python fetch_seismic_peaks.py -i %s -ID %d -blrmsBand 30M_100M -saveResult 1 -saveImage 0\n'%(ifo,ii))
fid.write('JOB %d condor.sub\n'%(job_number))
fid.write('RETRY %d 3\n'%(job_number))
fid.write('VARS %d jobNumber="%d" ifo="%s" id="%d"\n'%(job_number,job_number, ifo, ii))
fid.write('\n\n')
job_number = job_number + 1
fid1.close()
fid.close()
fid = open(os.path.join(condorDir,'condor.sub'),'w')
fid.write('executable = ./fetch_seismic_peaks.py\n')
fid.write('output = logs/out.$(jobNumber)\n');
fid.write('error = logs/err.$(jobNumber)\n');
fid.write('arguments = -IFO $(ifo) -ID $(id) -blrmsBand 30M_100M -saveResult 1 -saveImage 0\n')
fid.write('requirements = OpSys == "LINUX"\n');
fid.write('request_memory = 8192\n');
fid.write('request_cpus = 1\n');
fid.write('accounting_group = ligo.dev.o2.burst.allsky.stamp\n');
fid.write('notification = never\n');
fid.write('getenv = true\n');
fid.write('log = /usr1/mcoughlin/seismon.log\n')
fid.write('+MaxHours = 24\n');
fid.write('universe = vanilla\n');
fid.write('queue 1\n');
fid.close()
| gpl-3.0 | 4,905,851,540,327,796,000 | 27.536232 | 122 | 0.655663 | false |
lepinsk/pydub | setup.py | 1 | 1425 | __doc__ = """
Manipulate audio with an simple and easy high level interface.
See the README file for details, usage info, and a list of gotchas.
"""
from setuptools import setup
setup(
name='pydub',
version='0.9.0',
author='James Robert',
author_email='[email protected]',
description='Manipulate audio with an simple and easy high level interface',
license='MIT',
keywords='audio sound high-level',
url='http://pydub.com',
packages=['pydub'],
long_description=__doc__,
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
"Topic :: Multimedia :: Sound/Audio",
"Topic :: Multimedia :: Sound/Audio :: Analysis",
"Topic :: Multimedia :: Sound/Audio :: Conversion",
"Topic :: Multimedia :: Sound/Audio :: Editors",
"Topic :: Multimedia :: Sound/Audio :: Mixers",
"Topic :: Software Development :: Libraries",
'Topic :: Utilities',
]
)
| mit | 3,066,871,106,902,735,400 | 34.625 | 80 | 0.602807 | false |
keenondrums/sovrin-node | sovrin_client/agent/endpoint.py | 1 | 2195 | from typing import Callable
from plenum import config
from plenum.common.message_processor import MessageProcessor
from stp_core.common.log import getlogger
from stp_core.network.auth_mode import AuthMode
from stp_raet.util import getHaFromLocalEstate
from plenum.common.util import randomString
from stp_core.crypto.util import randomSeed
from stp_raet.rstack import SimpleRStack
from stp_core.types import HA
from stp_zmq.simple_zstack import SimpleZStack
logger = getlogger()
class EndpointCore(MessageProcessor):
def tracedMsgHandler(self, msg):
logger.debug("Got {}".format(msg))
self.msgHandler(msg)
class REndpoint(SimpleRStack, EndpointCore):
def __init__(self, port: int, msgHandler: Callable,
name: str=None, basedirpath: str=None):
if name and basedirpath:
ha = getHaFromLocalEstate(name, basedirpath)
if ha and ha[1] != port:
port = ha[1]
stackParams = {
"name": name or randomString(8),
"ha": HA("0.0.0.0", port),
"main": True,
"auth_mode": AuthMode.ALLOW_ANY.value,
"mutable": "mutable",
"messageTimeout": config.RAETMessageTimeout
}
if basedirpath:
stackParams["basedirpath"] = basedirpath
SimpleRStack.__init__(self, stackParams, self.tracedMsgHandler)
self.msgHandler = msgHandler
class ZEndpoint(SimpleZStack, EndpointCore):
def __init__(self, port: int, msgHandler: Callable,
name: str=None, basedirpath: str=None, seed=None,
onlyListener=False, msgRejectHandler=None):
stackParams = {
"name": name or randomString(8),
"ha": HA("0.0.0.0", port),
"auth_mode": AuthMode.ALLOW_ANY.value
}
if basedirpath:
stackParams["basedirpath"] = basedirpath
seed = seed or randomSeed()
SimpleZStack.__init__(
self,
stackParams,
self.tracedMsgHandler,
seed=seed,
onlyListener=onlyListener,
msgRejectHandler=msgRejectHandler)
self.msgHandler = msgHandler
| apache-2.0 | -4,259,224,821,556,638,000 | 30.357143 | 75 | 0.62369 | false |
ltucker/radarpost | radarpost/commands/useradmin.py | 1 | 4133 | from couchdb import Server, ResourceNotFound
from radarpost.cli import COMMANDLINE_PLUGIN, BasicCommand, get_basic_option_parser
from radarpost import plugins
from radarpost.user import User, ROLE_ADMIN
from getpass import getpass
class CreateUserCommand(BasicCommand):
command_name = 'create_user'
description = 'create a user'
@classmethod
def setup_options(cls, parser):
parser.set_usage(r"%prog" + "%s <username> [options]" % cls.command_name)
parser.add_option('--admin', action="store_true", dest="is_admin",
default=False, help="create an administrative user")
parser.add_option('--locked', action="store_true", dest="is_locked",
default=False,
help="create with locked password, do not prompt for password.")
def __call__(self, username, is_locked=False, is_admin=False):
"""
Create a user with the given username.
is_locked - if True, create with a locked password
is_admin - if True, grant administrative rights to the user
"""
couchdb = Server(self.config['couchdb.address'])
try:
udb = couchdb[self.config['couchdb.users_database']]
except:
print "Failed to connect to couchdb at %s/%s" % (self.config['couchdb.address'],
self.config['couchdb.users_database'])
return 1
new_user = User(username=username)
if new_user.id in udb:
print 'User "%s" already exists' % username
return 1
if not is_locked:
done = False
while(not done):
password = getpass(prompt="Password for %s: " % username)
password2 = getpass(prompt="Repeat password: ")
if password == password2:
done = True
else:
print "Passwords did not match, try again.\n"
new_user.set_password(password)
if is_admin:
new_user.roles = [ROLE_ADMIN]
new_user.store(udb)
print 'Created user "%s"' % username
plugins.register(CreateUserCommand, COMMANDLINE_PLUGIN)
class ResetPasswordCommand(BasicCommand):
command_name = 'reset_password'
description = "reset a user's password"
@classmethod
def setup_options(cls, parser):
parser.set_usage(r"%prog" + "%s <username> [options]" % cls.command_name)
parser.add_option('--locked', action="store_true", dest="is_locked",
default=False,
help="lock the user's password, do not prompt for password.")
def __call__(self, username, is_locked=False):
"""
Reset the password of the user with the given username.
is_locked - if True, lock the user's password
"""
couchdb = Server(self.config['couchdb.address'])
try:
udb = couchdb[self.config['couchdb.users_database']]
except:
print "Failed to connect to couchdb at %s/%s" % (self.config['couchdb.address'],
self.config['couchdb.users_database'])
return 1
try:
user = User.get_by_username(udb, username)
except ResourceNotFound:
print 'User "%s" does not exist' % username
return 1
if not is_locked:
done = False
while(not done):
password = getpass(prompt="New password for %s: " % username)
password2 = getpass(prompt="Repeat password: ")
if password == password2:
done = True
else:
print "Passwords did not match, try again.\n"
user.set_password(password)
else:
user.lock_password()
user.store(udb)
print 'Password changed for user "%s"' % username
plugins.register(ResetPasswordCommand, COMMANDLINE_PLUGIN) | gpl-2.0 | -4,687,873,527,966,541,000 | 36.926606 | 99 | 0.553109 | false |
pcm17/tensorflow | tensorflow/contrib/distributions/python/ops/inverse_gamma.py | 1 | 10539 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""The InverseGamma distribution class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.distributions.python.ops import distribution
from tensorflow.contrib.distributions.python.ops import distribution_util
from tensorflow.contrib.framework.python.framework import tensor_util as contrib_tensor_util
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import random_ops
__all__ = [
"InverseGamma",
"InverseGammaWithSoftplusConcentrationRate",
]
class InverseGamma(distribution.Distribution):
"""InverseGamma distribution.
The `InverseGamma` distribution is defined over positive real numbers using
parameters `concentration` (aka "alpha") and `rate` (aka "beta").
#### Mathematical Details
The probability density function (pdf) is,
```none
pdf(x; alpha, beta, x > 0) = x**(-alpha - 1) exp(-beta / x) / Z
Z = Gamma(alpha) beta**-alpha
```
where:
* `concentration = alpha`,
* `rate = beta`,
* `Z` is the normalizing constant, and,
* `Gamma` is the [gamma function](
https://en.wikipedia.org/wiki/Gamma_function).
The cumulative density function (cdf) is,
```none
cdf(x; alpha, beta, x > 0) = GammaInc(alpha, beta / x) / Gamma(alpha)
```
where `GammaInc` is the [upper incomplete Gamma function](
https://en.wikipedia.org/wiki/Incomplete_gamma_function).
The parameters can be intuited via their relationship to mean and stddev,
```none
concentration = alpha = (mean / stddev)**2
rate = beta = mean / stddev**2
```
Distribution parameters are automatically broadcast in all functions; see
examples for details.
WARNING: This distribution may draw 0-valued samples for small concentration
values. See note in `tf.random_gamma` docstring.
#### Examples
```python
dist = InverseGamma(concentration=3.0, rate=2.0)
dist2 = InverseGamma(concentration=[3.0, 4.0], rate=[2.0, 3.0])
```
"""
def __init__(self,
concentration,
rate,
validate_args=False,
allow_nan_stats=True,
name="InverseGamma"):
"""Construct InverseGamma with `concentration` and `rate` parameters.
The parameters `concentration` and `rate` must be shaped in a way that
supports broadcasting (e.g. `concentration + rate` is a valid operation).
Args:
concentration: Floating point tensor, the concentration params of the
distribution(s). Must contain only positive values.
rate: Floating point tensor, the inverse scale params of the
distribution(s). Must contain only positive values.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value "`NaN`" to indicate the
result is undefined. When `False`, an exception is raised if one or
more of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
Raises:
TypeError: if `concentration` and `rate` are different dtypes.
"""
parameters = locals()
with ops.name_scope(name, values=[concentration, rate]) as ns:
with ops.control_dependencies([
check_ops.assert_positive(concentration),
check_ops.assert_positive(rate),
] if validate_args else []):
self._concentration = array_ops.identity(
concentration, name="concentration")
self._rate = array_ops.identity(rate, name="rate")
contrib_tensor_util.assert_same_float_dtype(
[self._concentration, self._rate])
super(InverseGamma, self).__init__(
dtype=self._concentration.dtype,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
reparameterization_type=distribution.NOT_REPARAMETERIZED,
parameters=parameters,
graph_parents=[self._concentration,
self._rate],
name=ns)
@staticmethod
def _param_shapes(sample_shape):
return dict(
zip(("concentration", "rate"), ([ops.convert_to_tensor(
sample_shape, dtype=dtypes.int32)] * 2)))
@property
def concentration(self):
"""Concentration parameter."""
return self._concentration
@property
def rate(self):
"""Rate parameter."""
return self._rate
def _batch_shape_tensor(self):
return array_ops.broadcast_dynamic_shape(
array_ops.shape(self.concentration),
array_ops.shape(self.rate))
def _batch_shape(self):
return array_ops.broadcast_static_shape(
self.concentration.get_shape(),
self.rate.get_shape())
def _event_shape_tensor(self):
return constant_op.constant([], dtype=dtypes.int32)
def _event_shape(self):
return tensor_shape.scalar()
@distribution_util.AppendDocstring(
"""Note: See `tf.random_gamma` docstring for sampling details and
caveats.""")
def _sample_n(self, n, seed=None):
return 1. / random_ops.random_gamma(
shape=[n],
alpha=self.concentration,
beta=self.rate,
dtype=self.dtype,
seed=seed)
def _log_prob(self, x):
return self._log_unnormalized_prob(x) - self._log_normalization()
def _prob(self, x):
return math_ops.exp(self._log_prob(x))
def _log_cdf(self, x):
return math_ops.log(self._cdf(x))
def _cdf(self, x):
x = self._maybe_assert_valid_sample(x)
# Note that igammac returns the upper regularized incomplete gamma
# function Q(a, x), which is what we want for the CDF.
return math_ops.igammac(self.concentration, self.rate / x)
def _log_unnormalized_prob(self, x):
x = self._maybe_assert_valid_sample(x)
return -(1. + self.concentration) * math_ops.log(x) - self.rate / x
def _log_normalization(self):
return (math_ops.lgamma(self.concentration)
- self.concentration * math_ops.log(self.rate))
def _entropy(self):
return (self.concentration
+ math_ops.log(self.rate)
+ math_ops.lgamma(self.concentration)
- ((1. + self.concentration) *
math_ops.digamma(self.concentration)))
@distribution_util.AppendDocstring(
"""The mean of an inverse gamma distribution is
`rate / (concentration - 1)`, when `concentration > 1`, and `NaN`
otherwise. If `self.allow_nan_stats` is `False`, an exception will be
raised rather than returning `NaN`""")
def _mean(self):
mean = self.rate / (self.concentration - 1.)
if self.allow_nan_stats:
nan = array_ops.fill(
self.batch_shape_tensor(),
np.array(np.nan, dtype=self.dtype.as_numpy_dtype()),
name="nan")
return array_ops.where(self.concentration > 1., mean, nan)
else:
return control_flow_ops.with_dependencies([
check_ops.assert_less(
array_ops.ones([], self.dtype), self.concentration,
message="mean undefined when any concentration <= 1"),
], mean)
@distribution_util.AppendDocstring(
"""Variance for inverse gamma is defined only for `concentration > 2`. If
`self.allow_nan_stats` is `False`, an exception will be raised rather
than returning `NaN`.""")
def _variance(self):
var = (math_ops.square(self.rate)
/ math_ops.square(self.concentration - 1.)
/ (self.concentration - 2.))
if self.allow_nan_stats:
nan = array_ops.fill(
self.batch_shape_tensor(),
np.array(np.nan, dtype=self.dtype.as_numpy_dtype()),
name="nan")
return array_ops.where(self.concentration > 2., var, nan)
else:
return control_flow_ops.with_dependencies([
check_ops.assert_less(
constant_op.constant(2., dtype=self.dtype),
self.concentration,
message="variance undefined when any concentration <= 2"),
], var)
@distribution_util.AppendDocstring(
"""The mode of an inverse gamma distribution is `rate / (concentration +
1)`.""")
def _mode(self):
return self.rate / (1. + self.concentration)
def _maybe_assert_valid_sample(self, x):
contrib_tensor_util.assert_same_float_dtype(
tensors=[x], dtype=self.dtype)
if not self.validate_args:
return x
return control_flow_ops.with_dependencies([
check_ops.assert_positive(x),
], x)
class InverseGammaWithSoftplusConcentrationRate(InverseGamma):
"""`InverseGamma` with softplus of `concentration` and `rate`."""
def __init__(self,
concentration,
rate,
validate_args=False,
allow_nan_stats=True,
name="InverseGammaWithSoftplusConcentrationRate"):
parameters = locals()
with ops.name_scope(name, values=[concentration, rate]) as ns:
super(InverseGammaWithSoftplusConcentrationRate, self).__init__(
concentration=nn.softplus(concentration,
name="softplus_concentration"),
rate=nn.softplus(rate, name="softplus_rate"),
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
name=ns)
self._parameters = parameters
| apache-2.0 | 5,066,371,313,782,107,000 | 34.365772 | 92 | 0.656988 | false |
WorldViews/Spirals | YEI/foo_api.py | 1 | 146347 | #!/usr/bin/env python2.7
from __future__ import print_function
""" This module is an API module for ThreeSpace devices.
The ThreeSpace API module is a collection of classes, functions, structures,
and static variables use exclusivly for ThreeSpace devices. This module can
be used with a system running Python 2.5 and newer (including Python 3.x).
"""
__version__ = "2.0.2.3"
__authors__ = [
'"Chris George" <[email protected]>',
'"Dan Morrison" <[email protected]>',
]
import threading
import sys
import serial
import struct
import collections
import traceback
import time
import os
# chose an implementation, depending on os
if os.name == 'nt': # sys.platform == 'win32':
from win32_threespace_utils import *
else:
from threespace_utils import *
print("WARNING: No additional utils are loaded!!!!!!")
### Globals ###
global_file_path = os.getcwd()
global_error = None
global_counter = 0
global_donglist = {}
global_sensorlist = {}
global_broadcaster = None
TSS_TIMESTAMP_SENSOR = 0
TSS_TIMESTAMP_SYSTEM = 1
TSS_TIMESTAMP_NONE = 2
TSS_JOYSTICK = 0
TSS_MOUSE = 2
TSS_BUTTON_LEFT = 0
TSS_BUTTON_RIGHT = 1
### Private ###
_baudrate = 115200
_allowed_baudrates = [1200, 2400, 4800, 9600, 19200, 28800, 38400, 57600, 115200, 230400, 460800, 921600]
_wireless_retries = 5
### Functions ###
if sys.version_info >= (3, 0):
def makeWriteArray(startbyte, index_byte=None, command_byte=None, data=None):
rtn_array = bytearray((startbyte,))
if index_byte is not None:
rtn_array.append(index_byte)
if command_byte is not None:
rtn_array.append(command_byte)
if data is not None:
rtn_array += data
rtn_array.append((sum(rtn_array) - startbyte) % 256) # checksum
_hexDump(rtn_array)
return rtn_array
else:
def makeWriteArray(startbyte, index_byte=None, command_byte=None, data=None):
rtn_array = chr(startbyte)
if index_byte is not None:
rtn_array += chr(index_byte)
if command_byte is not None:
rtn_array += chr(command_byte)
if data is not None:
rtn_array += data
rtn_array += chr((sum(bytearray(rtn_array)) - startbyte) % 256) # checksum
_hexDump(rtn_array)
return rtn_array
def _hexDump(serial_string, header='i'):
if "-d_hex" in sys.argv:
ba = bytearray(serial_string)
print('{0}('.format(header), end='')
for i in range(len(ba)):
if i == len(ba)-1:
print('0x{0:02x}'.format(ba[i]), end='')
else:
print('0x{0:02x},'.format(ba[i]), end='')
print(')')
def _print(string):
if "-d" in sys.argv:
print(string)
def _echoCallback(sensor, state):
_print('{0}:{1}'.format(sensor, state))
def _generateProtocolHeader(success_failure=False,
timestamp=False,
command_echo=False,
checksum=False,
logical_id=False,
serial_number=False,
data_length=False):
byte = 0
struct_str = '>'
idx_list = []
if success_failure:
byte += 0x1
struct_str += '?'
idx_list.append(0)
if timestamp:
byte += 0x2
struct_str += 'I'
idx_list.append(1)
if command_echo:
byte += 0x4
struct_str += 'B'
idx_list.append(2)
if checksum:
byte += 0x8
struct_str += 'B'
idx_list.append(3)
if logical_id:
byte += 0x10
struct_str += 'B'
idx_list.append(4)
if serial_number:
byte += 0x20
struct_str += 'I'
idx_list.append(5)
if data_length:
byte += 0x40
struct_str += 'B'
idx_list.append(6)
return (byte, struct.Struct(struct_str), idx_list)
def _generateSensorClass(sensor_inst, serial_port, allowed_device_types):
sensor_inst.compatibility = checkSoftwareVersionFromPort(serial_port)
sensor_inst.port_name = serial_port.name
sensor_inst.serial_port_settings = serial_port.getSettingsDict()
sensor_inst.serial_port = serial_port
hardware_version = convertString(sensor_inst.f7WriteRead('getHardwareVersionString'))
dev_type = hardware_version[4:-8].strip()
if dev_type not in allowed_device_types:
raise Exception("This is a %s device, not one of these devices %s!" % (dev_type, allowed_device_types))
sensor_inst.device_type = dev_type
serial_number = sensor_inst.f7WriteRead('getSerialNumber')
sensor_inst.serial_number = serial_number
if dev_type == "DNG":
if serial_number in global_donglist:
rtn_inst = global_donglist[serial_number]
rtn_inst.close()
rtn_inst.compatibility = sensor_inst.compatibility
rtn_inst.port_name = serial_port.name
rtn_inst.serial_port_settings = serial_port.getSettingsDict()
rtn_inst.serial_port = serial_port
return rtn_inst
global_donglist[serial_number] = sensor_inst
else:
if serial_number in global_sensorlist:
rtn_inst = global_sensorlist[serial_number]
rtn_inst.close()
rtn_inst.compatibility = sensor_inst.compatibility
rtn_inst.port_name = serial_port.name
rtn_inst.serial_port_settings = serial_port.getSettingsDict()
rtn_inst.serial_port = serial_port
if "BT" in dev_type:
rtn_inst.serial_port.timeout = 1.5
rtn_inst.serial_port.writeTimeout = 1.5
if "WL" in dev_type:
rtn_inst.switchToWiredMode()
return rtn_inst
if "BT" in dev_type:
sensor_inst.serial_port.timeout = 1.5
sensor_inst.serial_port.writeTimeout = 1.5
elif "WL" in dev_type:
sensor_inst.switchToWiredMode()
global_sensorlist[serial_number] = sensor_inst
return sensor_inst
def parseAxisDirections(axis_byte):
axis_order_num = axis_byte & 7
if axis_order_num == 0:
axis_order = "XYZ"
elif axis_order_num == 1:
axis_order = "XZY"
elif axis_order_num == 2:
axis_order = "YXZ"
elif axis_order_num == 3:
axis_order = "YZX"
elif axis_order_num == 4:
axis_order = "ZXY"
elif axis_order_num == 5:
axis_order = "ZYX"
else:
raise ValueError
neg_x = neg_y = neg_z = False
if (axis_byte & 32) > 0:
neg_x = True
if (axis_byte & 16) > 0:
neg_y = True
if (axis_byte & 8) > 0:
neg_z = True
return axis_order, neg_x, neg_y, neg_z
def generateAxisDirections(axis_order, neg_x=False, neg_y=False, neg_z=False):
axis_order = axis_order.upper()
if axis_order == "XYZ":
axis_byte = 0
elif axis_order == "XZY":
axis_byte = 1
elif axis_order == "YXZ":
axis_byte = 2
elif axis_order == "YZX":
axis_byte = 3
elif axis_order == "ZXY":
axis_byte = 4
elif axis_order == "ZYX":
axis_byte = 5
else:
raise ValueError
if neg_x:
axis_byte = axis_byte | 32
if neg_y:
axis_byte = axis_byte | 16
if neg_z:
axis_byte = axis_byte | 8
return axis_byte
def getSystemWirelessRetries():
return _wireless_retries
def setSystemWirelessRetries(retries):
global _wireless_retries
_wireless_retries = retries
def getDefaultCreateDeviceBaudRate():
return _baudrate
def setDefaultCreateDeviceBaudRate(new_baudrate):
global _baudrate
if new_baudrate in _allowed_baudrates:
_baudrate = new_baudrate
def padProtocolHeader69(header_data, sys_timestamp):
fail_byte, cmd_echo, data_size = header_data
return (fail_byte, sys_timestamp, cmd_echo, None, None, None, data_size)
def padProtocolHeader71(header_data):
fail_byte, timestamp, cmd_echo, data_size = header_data
return (fail_byte, timestamp, cmd_echo, None, None, None, data_size)
def padProtocolHeader85(header_data, sys_timestamp):
fail_byte, cmd_echo, rtn_log_id, data_size = header_data
return (fail_byte, sys_timestamp, cmd_echo, None, rtn_log_id, None, data_size)
def padProtocolHeader87(header_data):
fail_byte, timestamp, cmd_echo, rtn_log_id, data_size = header_data
return (fail_byte, timestamp, cmd_echo, None, rtn_log_id, None, data_size)
### Classes ###
class Broadcaster(object):
def __init__(self):
self.retries = 10
def setRetries(self, retries=10):
self.retries = retries
def sequentialWriteRead(self, command, input_list=None, filter=None):
if filter is None:
filter = list(global_sensorlist.values())
val_list = {}
for i in range(self.retries):
for sensor in reversed(filter):
packet = sensor.writeRead(command, input_list)
if packet[0]: # fail_byte
continue
val_list[sensor.serial_number] = packet
filter.remove(sensor)
if not filter:
break
# _print("##Attempt: {0} complete".format(i))
else:
# _print("sensor failed to succeed")
for sensor in filter:
val_list[sensor.serial_number] = (True, None, None)
return val_list
def writeRead(self, command, input_list=None, filter=None):
q = TSCommandQueue()
if filter is None:
filter = list(global_sensorlist.values())
for sensor in filter:
q.queueWriteRead(sensor, sensor.serial_number, self.retries, command, input_list)
return q.proccessQueue()
def _broadcastMethod(self, filter, method, default=None, *args):
# _print(filter)
if filter is None:
filter = list(global_sensorlist.values())
val_list = {}
for i in range(self.retries):
for sensor in reversed(filter):
packet = getattr(sensor, method)(*args)
if packet is default: # fail_byte
continue
val_list[sensor.serial_number] = packet
filter.remove(sensor)
if not filter:
break
# _print("##Attempt: {0} complete".format(i))
else:
# _print("sensor failed to succeed")
for sensor in filter:
val_list[sensor.serial_number] = default
return val_list
def broadcastMethod(self, method, default=None, args=[], filter=None, callback_func=None):
q = TSCommandQueue()
if filter is None:
filter = list(global_sensorlist.values())
for sensor in filter:
q.queueMethod( getattr(sensor, method),
sensor,
self.retries,
default,
args,
callback_func)
return q.proccessQueue()
def setStreamingSlots(self, slot0='null',
slot1='null',
slot2='null',
slot3='null',
slot4='null',
slot5='null',
slot6='null',
slot7='null',
filter=None,
callback_func=None):
args = (slot0, slot1, slot2, slot3, slot4, slot5, slot6, slot7)
return self.broadcastMethod('setStreamingSlots', False, args, filter, callback_func)
def getStreamingSlots(self, filter=None, callback_func=None):
return self.broadcastMethod('getStreamingSlots', None, [], filter, callback_func)
def startStreaming(self, record_data=False, filter=None, callback_func=None):
return self.broadcastMethod('startStreaming', False, [record_data], filter, callback_func)
def stopStreaming(self, filter=None, callback_func=None):
return self.broadcastMethod('stopStreaming', False, [], filter, callback_func)
def setStreamingTiming(self, interval, duration, delay, delay_offset, filter=None, callback_func=None):
if filter is None:
filter = list(global_sensorlist.values())
else:
filter = list(filter)
val_list = {}
for sensor in reversed(filter):
success = False
for i in range(self.retries):
if sensor.setStreamingTiming(interval, duration, delay):
if callback_func is not None:
callback_func(sensor, True)
success = True
break
# _print("##Attempt: {0} complete".format(i))
if callback_func is not None:
callback_func(sensor, False)
else:
# _print("sensor failed to succeed")
pass
val_list[sensor] = success
filter.remove(sensor)
delay += delay_offset
return val_list
def startRecordingData(self, filter=None, callback_func=None):
if filter is None:
filter = list(global_sensorlist.values())
for sensor in filter:
sensor.record_data = True
if callback_func is not None:
callback_func(sensor, True)
def stopRecordingData(self, filter=None, callback_func=None):
if filter is None:
filter = list(global_sensorlist.values())
for sensor in filter:
sensor.record_data = False
if callback_func is not None:
callback_func(sensor, True)
def debugPrint(self, broadcast_dict):
for sensor, data in broadcast_dict.items():
_print('Sensor {0:08X}: {1}'.format(sensor, data))
class TSCommandQueue(object):
def __init__(self):
self.queue = []
self.return_dict = {}
def queueWriteRead(self, sensor, rtn_key, retries, command, input_list=None):
self.queue.append(("queueWriteRead", sensor, (self.return_dict, rtn_key, retries, command, input_list)))
def queueMethod(self, method_obj, rtn_key, retries, default=None, input_list=None, callback_func=None):
self.queue.append(("queueMethod", (method_obj, rtn_key, retries, default, input_list, callback_func)))
def _queueMethod(self, method_obj, rtn_key, retries, default=None, input_list=None, callback_func=None):
try:
for i in range(retries):
packet = method_obj(*input_list)
if packet is default: # fail_byte
if callback_func is not None:
callback_func(rtn_key, False)
continue
if callback_func is not None:
callback_func(rtn_key, True)
self.return_dict[rtn_key] = packet
break
else:
self.return_dict[rtn_key] = default
except(KeyboardInterrupt):
print('\n! Received keyboard interrupt, quitting threads.\n')
raise KeyboardInterrupt # fix bug where a thread eats the interupt
def createThreads(self):
thread_queue = []
for item in self.queue:
if item[0] == "queueWriteRead":
thread_queue.append(item[1].queueWriteRead(*item[2]))
elif item[0] == "queueMethod":
qThread = threading.Thread(target=self._queueMethod, args=item[1])
thread_queue.append(qThread)
return thread_queue
def proccessQueue(self, clear_queue=False):
thread_queue = self.createThreads()
[qThread.start() for qThread in thread_queue]
[qThread.join() for qThread in thread_queue]
if clear_queue:
self.queue = []
return self.return_dict
# Base class should not be used directly
class _TSBase(object):
command_dict = {
'checkLongCommands': (0x19, 1, '>B', 0, None, 1),
'startStreaming': (0x55, 0, None, 0, None, 1),
'stopStreaming': (0x56, 0, None, 0, None, 1),
'updateCurrentTimestamp': (0x5f, 0, None, 4, '>I', 1),
'setLEDMode': (0xc4, 0, None, 1, '>B', 1),
'getLEDMode': (0xc8, 1, '>B', 0, None, 1),
'_setWiredResponseHeaderBitfield': (0xdd, 0, None, 4, '>I', 1),
'_getWiredResponseHeaderBitfield': (0xde, 4, '>I', 0, None, 1),
'getFirmwareVersionString': (0xdf, 12, '>12s', 0, None, 1),
'commitSettings': (0xe1, 0, None, 0, None, 1),
'softwareReset': (0xe2, 0, None, 0, None, 1),
'getHardwareVersionString': (0xe6, 32, '>32s', 0, None, 1),
'getSerialNumber': (0xed, 4, '>I', 0, None, 1),
'setLEDColor': (0xee, 0, None, 12, '>fff', 1),
'getLEDColor': (0xef, 12, '>fff', 0, None, 1),
'setJoystickAndMousePresentRemoved': (0xfd, 0, None, 2, '>BB', 1),
'getJoystickAndMousePresentRemoved': (0xfe, 2, '>B', 0, None, 1),
'null': (0xff, 0, None, 0, None, 1)
}
def __init__(self, com_port=None, baudrate=_baudrate, timestamp_mode=TSS_TIMESTAMP_SENSOR):
self.protocol_args = { 'success_failure': True,
'timestamp': True,
'command_echo': True,
'data_length': True}
if timestamp_mode != TSS_TIMESTAMP_SENSOR:
self.protocol_args['timestamp'] = False
self.timestamp_mode = timestamp_mode
self.baudrate = baudrate
reinit = False
try: # if this is set the class had been there before
check = self.stream_parse
reinit = True
# _print("sensor reinit!!!")
except:
self._setupBaseVariables()
self._setupProtocolHeader(**self.protocol_args)
self._setupThreadedReadLoop()
if reinit:
if self.stream_timing is not None:
self.setStreamingTiming(*self.stream_timing)
if self.stream_slot_cmds is not None:
self.setStreamingSlots(*self.stream_slot_cmds)
def _setupBaseVariables(self):
self.serial_number_hex = '{0:08X}'.format(self.serial_number)
self.stream_timing = None
self.stream_parse = None
self.stream_slot_cmds = ['null'] * 8
self.stream_last_data = None
self.stream_data = []
self.record_data = False
self.data_loop = False
def _setupProtocolHeader(self, success_failure=False,
timestamp=False,
command_echo=False,
checksum=False,
logical_id=False,
serial_number=False,
data_length=False):
protocol_header = _generateProtocolHeader( success_failure,
timestamp,
command_echo,
checksum,
logical_id,
serial_number,
data_length)
protocol_byte, self.header_parse, self.header_idx_lst = protocol_header
d_header = self.f7WriteRead('_getWiredResponseHeaderBitfield')
if d_header != protocol_byte:
self.f7WriteRead('_setWiredResponseHeaderBitfield', protocol_byte)
d_header = self.f7WriteRead('_getWiredResponseHeaderBitfield')
if d_header != protocol_byte:
print("!!!!!fail d_header={0}, protocol_header_byte={1}".format(d_header, protocol_byte))
raise Exception
def _setupThreadedReadLoop(self):
self.read_lock = threading.Condition(threading.Lock())
self.read_queue = collections.deque()
self.read_dict = {}
self.data_loop = True
self.read_thread = threading.Thread(target=self._dataReadLoop)
self.read_thread.daemon = True
self.read_thread.start()
def __repr__(self):
return "<YEI3Space {0}:{1}>".format(self.device_type, self.serial_number_hex)
def __str__(self):
return self.__repr__()
def close(self):
self.data_loop = False
if self.serial_port:
self.serial_port.close()
self.serial_port = None
self.read_thread.join()
def reconnect(self):
self.close()
if not tryPort(self.port_name):
_print("tryport fail")
try:
serial_port = serial.Serial(self.port_name, baudrate=self.baudrate, timeout=0.5, writeTimeout=0.5)
serial_port.applySettingsDict(self.serial_port_settings)
self.serial_port = serial_port
except:
traceback.print_exc()
return False
self._setupProtocolHeader(**self.protocol_args)
self._setupThreadedReadLoop()
if self.stream_timing is not None:
self.setStreamingTiming(*self.stream_timing)
if self.stream_slot_cmds is not None:
self.setStreamingSlots(*self.stream_slot_cmds)
return True
# Wired Old Protocol WriteRead
def f7WriteRead(self, command, input_list=None):
command_args = self.command_dict[command]
cmd_byte, out_len, out_struct, in_len, in_struct, compatibility = command_args
packed_data = None
if in_struct:
if type(input_list) in (list, tuple):
packed_data = struct.pack(in_struct, *input_list)
else:
packed_data = struct.pack(in_struct, input_list)
write_array = makeWriteArray(0xf7, None, cmd_byte, packed_data)
self.serial_port.write(write_array)
if out_struct:
output_data = self.serial_port.read(out_len)
rtn_list = struct.unpack(out_struct, output_data)
if len(rtn_list) != 1:
return rtn_list
return rtn_list[0]
# requires the dataloop, do not call
# Wired New Protocol WriteRead
def f9WriteRead(self, command, input_list=None):
global global_counter
command_args = self.command_dict[command]
cmd_byte, out_len, out_struct, in_len, in_struct, compatibility = command_args
if self.compatibility < compatibility:
raise Exception("Firmware for device on ( %s ) is out of date for this function. Recommend updating to latest firmware." % self.serial_port.name)
packed_data = None
if in_struct:
if type(input_list) in (list, tuple):
packed_data = struct.pack(in_struct, *input_list)
else:
packed_data = struct.pack(in_struct, input_list)
write_array = makeWriteArray(0xf9, None, cmd_byte, packed_data)
self.read_lock.acquire()
uid = global_counter
global_counter += 1
try:
self.serial_port.write(write_array) # release in reader thread
except serial.SerialTimeoutException:
self.read_lock.release()
self.serial_port.close()
# _print("SerialTimeoutException!!!!")
# !!!!!Reconnect
return (True, None, None)
except ValueError:
try:
# _print("trying to open it back up!!!!")
self.serial_port.open()
# _print("aaand open!!!!")
except serial.SerialException:
self.read_lock.release()
# _print("SerialTimeoutException!!!!")
# !!!!!Reconnect
return (True, None, None)
queue_packet = (uid, cmd_byte)
timeout_time = 0.5 + (len(self.read_queue) * 0.150) # timeout increases as queue gets larger
self.read_queue.append(queue_packet)
start_time = time.clock() + timeout_time
read_data = None
while(timeout_time > 0):
self.read_lock.wait(timeout_time)
read_data = self.read_dict.get(uid, None)
if read_data is not None:
break
timeout_time =start_time -time.clock()
# _print("Still waiting {0} {1} {2}".format(uid, command, timeout_time))
else:
# _print("Operation timed out!!!!")
try:
self.read_queue.remove(queue_packet)
except:
traceback.print_exc()
self.read_lock.release()
return (True, None, None)
self.read_lock.release()
del self.read_dict[uid]
header_list, output_data = read_data
fail_byte, timestamp, cmd_echo, ck_sum, rtn_log_id, sn, data_size = header_list
if cmd_echo != cmd_byte:
# _print("!!!!!!!!cmd_echo!=cmd_byte!!!!!")
# _print('cmd_echo= 0x{0:02x} cmd_byte= 0x{1:02x}'.format(cmd_echo, cmd_byte))
return (True, timestamp, None)
rtn_list = None
if not fail_byte:
if out_struct:
rtn_list = struct.unpack(out_struct, output_data)
if len(rtn_list) == 1:
rtn_list = rtn_list[0]
else:
# _print("fail_byte!!!!triggered")
pass
return (fail_byte, timestamp, rtn_list)
writeRead = f9WriteRead
def isConnected(self, try_reconnect=False):
try:
serial = self.getSerialNumber()
if serial is not None:
return True
except:
pass
return False
## generated functions USB and WL_ and DNG and EM_ and DL_ and BT_
## 85(0x55)
def stopStreaming(self):
fail_byte, t_stamp, data = self.writeRead('stopStreaming')
return not fail_byte
## 86(0x56)
def startStreaming(self):
fail_byte, t_stamp, data = self.writeRead('startStreaming')
return not fail_byte
## 95(0x5f)
def updateCurrentTimestamp(self, time, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('updateCurrentTimestamp', time)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 196(0xc4)
def setLEDMode(self, mode, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setLEDMode', mode)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 200(0xc8)
def getLEDMode(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getLEDMode')
if timestamp:
return (data, t_stamp)
return data
## 223(0xdf)
def getFirmwareVersionString(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getFirmwareVersionString')
data = convertString(data)
if timestamp:
return (data, t_stamp)
return data
## 225(0xe1)
def commitSettings(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('commitSettings')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 230(0xe6)
def getHardwareVersionString(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getHardwareVersionString')
data = convertString(data)
if timestamp:
return (data, t_stamp)
return data
## 237(0xed)
def getSerialNumber(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getSerialNumber')
if timestamp:
return (data, t_stamp)
return data
## 238(0xee)
def setLEDColor(self, rgb, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setLEDColor', rgb)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 239(0xef)
def getLEDColor(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getLEDColor')
if timestamp:
return (data, t_stamp)
return data
## 253(0xfd)
def setJoystickAndMousePresentRemoved(self, joystick, mouse, timestamp=False):
arg_list = (joystick, mouse)
fail_byte, t_stamp, data = self.writeRead('setJoystickAndMousePresentRemoved', arg_list)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 254(0xfe)
def getJoystickAndMousePresentRemoved(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getJoystickAndMousePresentRemoved')
if timestamp:
return (data, t_stamp)
return data
## END generated functions USB and WL_ and DNG and EM_ and DL_ and BT_
class _TSSensor(_TSBase):
command_dict = _TSBase.command_dict.copy()
command_dict.update({
'getTaredOrientationAsQuaternion': (0x0, 16, '>4f', 0, None, 1),
'getTaredOrientationAsEulerAngles': (0x1, 12, '>fff', 0, None, 1),
'getTaredOrientationAsRotationMatrix': (0x2, 36, '>9f', 0, None, 1),
'getTaredOrientationAsAxisAngle': (0x3, 16, '>4f', 0, None, 1),
'getTaredOrientationAsTwoVector': (0x4, 24, '>6f', 0, None, 1),
'getDifferenceQuaternion': (0x5, 16, '>4f', 0, None, 1),
'getUntaredOrientationAsQuaternion': (0x6, 16, '>4f', 0, None, 1),
'getUntaredOrientationAsEulerAngles': (0x7, 12, '>fff', 0, None, 1),
'getUntaredOrientationAsRotationMatrix': (0x8, 36, '>9f', 0, None, 1),
'getUntaredOrientationAsAxisAngle': (0x9, 16, '>4f', 0, None, 1),
'getUntaredOrientationAsTwoVector': (0xa, 24, '>6f', 0, None, 1),
'getTaredTwoVectorInSensorFrame': (0xb, 24, '>6f', 0, None, 1),
'getUntaredTwoVectorInSensorFrame': (0xc, 24, '>6f', 0, None, 1),
'setEulerAngleDecompositionOrder': (0x10, 0, None, 1, '>B', 1),
'setMagnetoresistiveThreshold': (0x11, 0, None, 16, '>fIff', 3),
'setAccelerometerResistanceThreshold': (0x12, 0, None, 8, '>fI', 3),
'offsetWithCurrentOrientation': (0x13, 0, None, 0, None, 3),
'resetBaseOffset': (0x14, 0, None, 0, None, 3),
'offsetWithQuaternion': (0x15, 0, None, 16, '>4f', 3),
'setBaseOffsetWithCurrentOrientation': (0x16, 0, None, 0, None, 3),
'getAllNormalizedComponentSensorData': (0x20, 36, '>9f', 0, None, 1),
'getNormalizedGyroRate': (0x21, 12, '>fff', 0, None, 1),
'getNormalizedAccelerometerVector': (0x22, 12, '>fff', 0, None, 1),
'getNormalizedCompassVector': (0x23, 12, '>fff', 0, None, 1),
'getAllCorrectedComponentSensorData': (0x25, 36, '>9f', 0, None, 1),
'getCorrectedGyroRate': (0x26, 12, '>fff', 0, None, 1),
'getCorrectedAccelerometerVector': (0x27, 12, '>fff', 0, None, 1),
'getCorrectedCompassVector': (0x28, 12, '>fff', 0, None, 1),
'getCorrectedLinearAccelerationInGlobalSpace': (0x29, 12, '>fff', 0, None, 1),
'getTemperatureC': (0x2b, 4, '>f', 0, None, 1),
'getTemperatureF': (0x2c, 4, '>f', 0, None, 1),
'getConfidenceFactor': (0x2d, 4, '>f', 0, None, 1),
'getAllRawComponentSensorData': (0x40, 36, '>9f', 0, None, 1),
'getRawGyroscopeRate': (0x41, 12, '>fff', 0, None, 1),
'getRawAccelerometerData': (0x42, 12, '>fff', 0, None, 1),
'getRawCompassData': (0x43, 12, '>fff', 0, None, 1),
'_setStreamingSlots': (0x50, 0, None, 8, '>8B', 1),
'_getStreamingSlots': (0x51, 8, '>8B', 0, None, 1),
'_setStreamingTiming': (0x52, 0, None, 12, '>III', 1),
'_getStreamingTiming': (0x53, 12, '>III', 0, None, 1),
'_getStreamingBatch': (0x54, 0, None, 0, None, 1),
'tareWithCurrentOrientation': (0x60, 0, None, 0, None, 1),
'tareWithQuaternion': (0x61, 0, None, 16, '>4f', 1),
'tareWithRotationMatrix': (0x62, 0, None, 36, '>9f', 1),
'setStaticAccelerometerTrustValue': (0x63, 0, None, 4, '>f', 2),
'setConfidenceAccelerometerTrustValues': (0x64, 0, None, 8, '>ff', 2),
'setStaticCompassTrustValue': (0x65, 0, None, 4, '>f', 2),
'setConfidenceCompassTrustValues': (0x66, 0, None, 8, '>ff', 2),
'setDesiredUpdateRate': (0x67, 0, None, 4, '>I', 1),
'setReferenceVectorMode': (0x69, 0, None, 1, '>B', 1),
'setOversampleRate': (0x6a, 0, None, 1, '>B', 1),
'setGyroscopeEnabled': (0x6b, 0, None, 1, '>B', 1),
'setAccelerometerEnabled': (0x6c, 0, None, 1, '>B', 1),
'setCompassEnabled': (0x6d, 0, None, 1, '>B', 1),
'setAxisDirections': (0x74, 0, None, 1, '>B', 1),
'setRunningAveragePercent': (0x75, 0, None, 4, '>f', 1),
'setCompassReferenceVector': (0x76, 0, None, 12, '>fff', 1),
'setAccelerometerReferenceVector': (0x77, 0, None, 12, '>fff', 1),
'resetKalmanFilter': (0x78, 0, None, 0, None, 1),
'setAccelerometerRange': (0x79, 0, None, 1, '>B', 1),
'setFilterMode': (0x7b, 0, None, 1, '>B', 1),
'setRunningAverageMode': (0x7c, 0, None, 1, '>B', 1),
'setGyroscopeRange': (0x7d, 0, None, 1, '>B', 1),
'setCompassRange': (0x7e, 0, None, 1, '>B', 1),
'getTareAsQuaternion': (0x80, 16, '>4f', 0, None, 1),
'getTareAsRotationMatrix': (0x81, 36, '>9f', 0, None, 1),
'getAccelerometerTrustValues': (0x82, 8, '>ff', 0, None, 2),
'getCompassTrustValues': (0x83, 8, '>ff', 0, None, 2),
'getCurrentUpdateRate': (0x84, 4, '>I', 0, None, 1),
'getCompassReferenceVector': (0x85, 12, '>fff', 0, None, 1),
'getAccelerometerReferenceVector': (0x86, 12, '>fff', 0, None, 1),
'getGyroscopeEnabledState': (0x8c, 1, '>B', 0, None, 1),
'getAccelerometerEnabledState': (0x8d, 1, '>B', 0, None, 1),
'getCompassEnabledState': (0x8e, 1, '>B', 0, None, 1),
'getAxisDirections': (0x8f, 1, '>B', 0, None, 1),
'getOversampleRate': (0x90, 1, '>B', 0, None, 1),
'getRunningAveragePercent': (0x91, 4, '>f', 0, None, 1),
'getDesiredUpdateRate': (0x92, 4, '>I', 0, None, 1),
'getAccelerometerRange': (0x94, 1, '>B', 0, None, 1),
'getFilterMode': (0x98, 1, '>B', 0, None, 1),
'getRunningAverageMode': (0x99, 1, '>B', 0, None, 1),
'getGyroscopeRange': (0x9a, 1, '>B', 0, None, 1),
'getCompassRange': (0x9b, 1, '>B', 0, None, 1),
'getEulerAngleDecompositionOrder': (0x9c, 1, '>B', 0, None, 1),
'getMagnetoresistiveThreshold': (0x9d, 16, '>fIff', 0, None, 3),
'getAccelerometerResistanceThreshold': (0x9e, 8, '>fI', 0, None, 3),
'getOffsetOrientationAsQuaternion': (0x9f, 16, '>4f', 0, None, 3),
'setCompassCalibrationCoefficients': (0xa0, 0, None, 48, '>12f', 1),
'setAccelerometerCalibrationCoefficients': (0xa1, 0, None, 48, '>12f', 1),
'getCompassCalibrationCoefficients': (0xa2, 48, '>12f', 0, None, 1),
'getAccelerometerCalibrationCoefficients': (0xa3, 48, '>12f', 0, None, 1),
'getGyroscopeCalibrationCoefficients': (0xa4, 48, '>12f', 0, None, 1),
'beginGyroscopeAutoCalibration': (0xa5, 0, None, 0, None, 1),
'setGyroscopeCalibrationCoefficients': (0xa6, 0, None, 48, '>12f', 1),
'setCalibrationMode': (0xa9, 0, None, 1, '>B', 1),
'getCalibrationMode': (0xaa, 1, '>B', 0, None, 1),
'setOrthoCalibrationDataPointFromCurrentOrientation': (0xab, 0, None, 0, None, 1),
'setOrthoCalibrationDataPointFromVector': (0xac, 0, None, 14, '>BBfff', 1),
'getOrthoCalibrationDataPoint': (0xad, 12, '>fff', 2, '>BB', 1),
'performOrthoCalibration': (0xae, 0, None, 0, None, 1),
'clearOrthoCalibrationData': (0xaf, 0, None, 0, None, 1),
'setSleepMode': (0xe3, 0, None, 1, '>B', 1),
'getSleepMode': (0xe4, 1, '>B', 0, None, 1),
'setJoystickEnabled': (0xf0, 0, None, 1, '>B', 1),
'setMouseEnabled': (0xf1, 0, None, 1, '>B', 1),
'getJoystickEnabled': (0xf2, 1, '>B', 0, None, 1),
'getMouseEnabled': (0xf3, 1, '>B', 0, None, 1),
'setControlMode': (0xf4, 0, None, 3, '>BBB', 1),
'setControlData': (0xf5, 0, None, 7, '>BBBf', 1),
'getControlMode': (0xf6, 1, '>B', 2, '>BB', 1),
'getControlData': (0xf7, 4, '>f', 3, '>BBB', 1),
'setMouseAbsoluteRelativeMode': (0xfb, 0, None, 1, '>B', 1),
'getMouseAbsoluteRelativeMode': (0xfc, 1, '>B', 0, None, 1)
})
reverse_command_dict = dict(map(lambda x: [x[1][0], x[0]], command_dict.items()))
_device_types = ["!BASE"]
def __new__(cls, com_port=None, baudrate=_baudrate, timestamp_mode=TSS_TIMESTAMP_SENSOR):
if com_port:
if type(com_port) is str:
port_name = com_port
elif type(com_port) is ComInfo:
port_name = com_port.com_port
else:
_print("An erronous parameter was passed in")
return None
if baudrate not in _allowed_baudrates:
baudrate = _baudrate
_print("Error baudrate value not allowed. Using default.")
serial_port = serial.Serial(port_name, baudrate=baudrate, timeout=0.5, writeTimeout=0.5)
if serial_port is not None:
new_inst = super(_TSSensor, cls).__new__(cls)
return _generateSensorClass(new_inst, serial_port, _TSSensor._device_types)
_print('Error serial port was not made')
def __init__(self, com_port=None, baudrate=_baudrate, timestamp_mode=TSS_TIMESTAMP_SENSOR):
self.protocol_args = { 'success_failure': True,
'timestamp': True,
'command_echo': True,
'data_length': True}
if timestamp_mode != TSS_TIMESTAMP_SENSOR:
self.protocol_args['timestamp'] = False
self.timestamp_mode = timestamp_mode
self.baudrate = baudrate
reinit = False
try: # if this is set the class had been there before
check = self.stream_parse
reinit = True
# _print("sensor reinit!!!")
except:
self._setupBaseVariables()
self.callback_func = None
self._setupProtocolHeader(**self.protocol_args)
self._setupThreadedReadLoop()
self.latest_lock = threading.Condition(threading.Lock())
self.new_data = False
if reinit:
if self.stream_timing is not None:
self.setStreamingTiming(*self.stream_timing)
if self.stream_slot_cmds is not None:
self.setStreamingSlots(*self.stream_slot_cmds)
def _queueWriteRead(self, rtn_dict, rtn_key, retries, command, input_list=None):
try:
for i in range(retries):
packet = self.writeRead(command, input_list)
if packet[0]:
# _print("##Attempt: {0} complete".format(i))
time.sleep(0.1)
continue
rtn_dict[rtn_key] = packet
break
else:
# _print("sensor failed to succeed")
rtn_dict[rtn_key] = (True, None, None)
except(KeyboardInterrupt):
print('\n! Received keyboard interrupt, quitting threads.\n')
raise KeyboardInterrupt # fix bug where a thread eats the interupt
def queueWriteRead(self, rtn_dict, rtn_key, retries, command, input_list=None):
return threading.Thread(target=self._queueWriteRead, args=(rtn_dict, rtn_key, retries, command, input_list))
def _generateStreamParse(self):
stream_string = '>'
if self.stream_slot_cmds is None:
self.getStreamingSlots()
for slot_cmd in self.stream_slot_cmds:
if slot_cmd is not 'null':
out_struct = self.command_dict[slot_cmd][2]
stream_string += out_struct[1:] # stripping the >
self.stream_parse = struct.Struct(stream_string)
# Set streaming batch command
self.command_dict['_getStreamingBatch'] = (0x54, self.stream_parse.size, stream_string, 0, None, 1)
def _parseStreamData(self, protocol_data, output_data):
rtn_list = self.stream_parse.unpack(output_data)
if len(rtn_list) == 1:
rtn_list = rtn_list[0]
self.latest_lock.acquire()
self.new_data = True
self.latest_lock.notify()
self.latest_lock.release()
data = (protocol_data, rtn_list)
self.stream_last_data = data
if self.record_data:
self.stream_data.append(data)
if self.callback_func:
self.callback_func(data)
def _dataReadLoop(self):
while self.data_loop:
try:
self._readDataWiredProHeader()
except(KeyboardInterrupt):
print('\n! Received keyboard interrupt, quitting threads.\n')
raise KeyboardInterrupt # fix bug where a thread eats the interupt
except:
# traceback.print_exc()
# _print("bad _parseStreamData parse")
# _print('!!!!!inWaiting = {0}'.format(self.serial_port.inWaiting()))
self._read_data = None
try:
self.read_lock.release()
except:
pass
def _readDataWiredProHeader(self):
_serial_port = self.serial_port
# in_wait = _serial_port.inWaiting()
# if in_wait:
# _print('!666! inWaiting = {0}'.format(in_wait))
header_bytes = _serial_port.read(self.header_parse.size)
if header_bytes:
if self.timestamp_mode == TSS_TIMESTAMP_SENSOR:
header_data = self.header_parse.unpack(header_bytes)
header_list = padProtocolHeader71(header_data)
elif self.timestamp_mode == TSS_TIMESTAMP_SYSTEM:
sys_timestamp = time.clock() # time packet was parsed it might been in the system buffer a few ms
sys_timestamp *= 1000000
header_data = self.header_parse.unpack(header_bytes)
header_list = padProtocolHeader69(header_data, sys_timestamp)
else:
header_data = self.header_parse.unpack(header_bytes)
header_list = padProtocolHeader69(header_data, None)
fail_byte, timestamp, cmd_echo, ck_sum, rtn_log_id, sn, data_size = header_list
output_data = _serial_port.read(data_size)
if cmd_echo is 0xff:
if data_size:
self._parseStreamData(timestamp, output_data)
return
self.read_lock.acquire()
if len(self.read_queue): # here for a bug in the code
uid, cmd_byte = self.read_queue.popleft()
if cmd_byte == cmd_echo:
self.read_dict[uid] = (header_list, output_data)
self.read_lock.notify() # dies in 3 seconds if there is a writeRead in wait
else:
# _print('Unrequested packet found!!!')
# _hexDump(header_bytes, 'o')
# _hexDump(output_data, 'o')
self.read_queue.appendleft((uid, cmd_byte))
self.read_lock.release()
return
# _print('Unrequested packet found!!!')
# _hexDump(header_bytes, 'o')
# _hexDump(output_data, 'o')
self.read_lock.release()
def getLatestStreamData(self, timeout):
self.latest_lock.acquire()
self.new_data = False
self.latest_lock.wait(timeout)
self.latest_lock.release()
if self.new_data:
return self.stream_last_data
def setNewDataCallBack(self, callback):
self.callback_func = callback
def startRecordingData(self):
self.record_data = True
def stopRecordingData(self):
self.record_data = False
def clearRecordingData(self):
self.stream_data= []
# Convenience functions to replace commands 244(0xf4) and 245(0xf5)
def setGlobalAxis(self, hid_type, config_axis, local_axis, global_axis, deadzone, scale, power):
""" Sets an axis of the desired emulated input device as a 'Global Axis'
style axis. Axis operating under this style use a reference vector
and a consitent local vector to determine the state of the device's
axis. As the local vector rotates, it is projected onto the global
vector. Once the distance of that projection on the global vector
exceeds the inputted "deadzone", the device will begin tranmitting
non-zero values for the device's desired axis.
@param hid_type: An integer whose value defines whether the device
in question is a TSS_JOYSTICK or TSS_MOUSE.
@param config_axis: A string whose value may be either 'X' or 'Y'
for a mouse or 'X', 'Y', or 'Z' for a joystick. This string
defines what axis of the device is to be configured.
@param local_axis: A list of 3 Floats whose value is a normalized
Vector3. This vector represents the sensor's local vector to
track.
@param global_axis: A list of 3 Floats whose value is a normalized
Vector3. This vector represents the global vector to project the
local vector onto (should be orthoginal to the local vector).
@param deadzone: A float that defines the minimum distance necessary
for the device's axis to read a non-zero value.
@param scale: A float that defines the linear scale for the values
being returned for the axis.
@param power: A float whose value is an exponental power used to
further modify data being returned from the sensor.
@return: True if the command was successfuly written to the device.
False if the command was not written.
"""
# Set class
if hid_type != TSS_JOYSTICK and hid_type != TSS_MOUSE:
_print("Invalid command for hid_type: {0:d}".format(hid_type))
return False
cntl_class = hid_type
# Set index
axis_idx = ["X", "Y", "Z"]
if cntl_class == TSS_MOUSE:
axis_idx.pop(-1)
config_axis = config_axis.upper()
cntl_idx = -1
try:
cntl_idx = axis_idx.index(config_axis)
except:
_print("Invalid command for config_axis: {0:s}".format(config_axis))
return False
# Set mode
if not self.setControlMode(cntl_class, cntl_idx, 0):
return False
# Create data array
data_array = local_axis + global_axis + [deadzone, scale, power]
# Set data
for i in range(len(data_array)):
if not self.setControlData(cntl_class, cntl_idx, i, data_array[i]):
return False
return True
def setScreenPointAxis(self, hid_type, config_axis, dist_from_screen, dist_on_axis, collision_component, sensor_dir, button_halt):
""" Sets an axis of the desired emulated input device as a 'Screen Point
Axis' style axis. An axis operating under this style projects a
vector along the sensor's direction vector into a mathmatical plane.
The collision point on the plane is then used to determine what the
device's axis's current value is. The direction vector is rotated
based on the orientation of the sensor.
@param hid_type: An integer whose value defines whether the device
in question is a TSS_JOYSTICK or TSS_MOUSE.
@param config_axis: A string whose value may be either 'X' or 'Y'
for a mouse or 'X', 'Y', or 'Z' for a joystick. This string
defines what axis of the device is to be configured.
@param dist_from_screen: A float whose value is the real world
distance the sensor is from the user's screen. Must be the same
units as dist_on_axis.
@param dist_on_axis: A float whose value is the real world length of
the axis along the user's screen (width of screen for x-axis,
height of screen for y-axis). Must be the same units as
dist_from_screen.
@param collision_component: A string whose value may be 'X', 'Y', or
'Z'. This string defines what component of the look vector's
collision point on the virtual plane to use for manipulating the
device's axis.
@param sensor_dir: A string whose value may be 'X', 'Y', or 'Z'.
This string defines which of the sensor's local axis to use for
creating the vector to collide with the virtual plane.
@param button_halt: A float whose value is a pause time in
milliseconds. When a button is pressed on the emulated device,
transmission of changes to the axis is paused for the inputted
amount of time to prevent undesired motion detection when
pressing buttons.
@return: True if the command was successfuly written to the device.
False if the command was not written.
"""
# Set class
if hid_type != TSS_JOYSTICK and hid_type != TSS_MOUSE:
_print("Invalid command for hid_type: {0:d}".format(hid_type))
return False
cntl_class = hid_type
# Set index
axis_idx = ["X", "Y", "Z"]
if cntl_class == TSS_MOUSE:
axis_idx.pop(-1)
config_axis = config_axis.upper()
cntl_idx = -1
try:
cntl_idx = axis_idx.index(config_axis)
except:
_print("Invalid command for config_axis: {0:s}".format(config_axis))
return False
# Set mode
if not self.setControlMode(cntl_class, cntl_idx, 1):
return False
# Create data array
axis_idx = ["X", "Y", "Z"]
data_array = []
data_array.append(dist_from_screen)
data_array.append(dist_on_axis)
collision_component = collision_component.upper()
try:
data_array.append(axis_idx.index(collision_component))
except:
_print("Invalid command for collision_component: {0:s}".format(collision_component))
return False
sensor_dir = sensor_dir.upper()
try:
data_array.append(axis_idx.index(sensor_dir))
except:
_print("Invalid command for sensor_dir: {0:s}".format(sensor_dir))
return False
data_array.append(0)
data_array.append(0)
data_array.append(0)
data_array.append(button_halt)
data_array.append(0)
data_array.append(0)
# Set data
for i in range(len(data_array)):
if not self.setControlData(cntl_class, cntl_idx, i, data_array[i]):
return False
return True
def disableAxis(self, hid_type, config_axis):
""" Disables an axis on the passed in device.
@param hid_type: An integer whose value defines whether the device
in question is a TSS_JOYSTICK or TSS_MOUSE.
@param config_axis: A string whose value may be either 'X' or 'Y'
for a mouse or 'X', 'Y', or 'Z' for a joystick. This string
defines what axis of the device is to be configured.
@return: True if the command was successfuly written to the device.
False if the command was not written.
"""
# Set class
if hid_type != TSS_JOYSTICK and hid_type != TSS_MOUSE:
_print("Invalid command for hid_type: {0:d}".format(hid_type))
return False
cntl_class = hid_type
# Set index
axis_idx = ["X", "Y", "Z"]
if cntl_class == TSS_MOUSE:
axis_idx.pop(-1)
config_axis = config_axis.upper()
cntl_idx = -1
try:
cntl_idx = axis_idx.index(config_axis)
except:
_print("Invalid command for config_axis: {0:s}".format(config_axis))
return False
# Set mode
return self.setControlMode(cntl_class, cntl_idx, 255)
def setPhysicalButton(self, hid_type, button_idx, button_bind):
""" Binds a sensor's physical button to an emulated device's button.
@param hid_type: An integer whose value defines whether the device
in question is a TSS_JOYSTICK or TSS_MOUSE.
@param button_idx: An integer whose value defines which button on
the emulated device to configure. Default range is 0 through 7.
@param button_bind: An integer whose value defines which physical
button to bind to the emulated device's button to as defined by
button_idx, either TSS_BUTTON_LEFT or TSS_BUTTON_RIGHT.
@return: True if the command was successfuly written to the device.
False if the command was not written.
"""
# Set class
if hid_type != TSS_JOYSTICK and hid_type != TSS_MOUSE:
_print("Invalid command for hid_type: {0:d}".format(hid_type))
return False
cntl_class = 1 + hid_type
# Set mode
if not self.setControlMode(cntl_class, button_idx, 0):
return False
# Create data
if button_bind != TSS_BUTTON_LEFT and button_bind != TSS_BUTTON_RIGHT:
_print("Invalid command for button_bind: {0:d}".format(button_bind))
return False
data = button_bind
# Set data
return self.setControlData(cntl_class, button_idx, 0, data)
def setOrientationButton(self, hid_type, button_idx, local_axis, global_axis, max_dist):
""" Sets up a device's button such that it is 'pressed' when a reference
vector aligns itself with a local vector.
@param hid_type: An integer whose value defines whether the device
in question is a TSS_JOYSTICK or TSS_MOUSE.
@param button_idx: An integer whose value defines which button on
the emulated device to configure. Default range is 0 through 7.
@param local_axis: A list of 3 floats whose value represents a
normalized Vector3. This vector represents the sensor's local
vector to track.
@param global_axis: A list of 3 floats whose value is a normalized
Vector3. This vector represents the global vector to move the
local vector towards for "pressing" (should not be colinear to
the local vector).
@param max_dist: A float whose value defines how close the local
vector's orientation must be to the global vector for the button
to be 'pressed'.
@return: True if the command was successfuly written to the device.
False if the command was not written.
"""
# Set class
if hid_type != TSS_JOYSTICK and hid_type != TSS_MOUSE:
_print("Invalid command for hid_type: {0:d}".format(hid_type))
return False
cntl_class = 1 + hid_type
# Set mode
if not self.setControlMode(cntl_class, button_idx, 1):
return False
# Create data array
data_array = local_axis + global_axis + [max_dist]
# Set data
for i in range(7):
if not self.setControlData(cntl_class, button_idx, i, data_array[i]):
return False
return True
def setShakeButton(self, hid_type, button_idx, threshold):
""" Sets up an emulated device's button such that it is 'pressed' when
the sensor is shaken.
@param hid_type: An integer whose value defines whether the device
in question is a TSS_JOYSTICK or TSS_MOUSE.
@param button_idx: An integer whose value defines which button on
the emulated device to configure. Default range is 0 through 7.
@param threshold: A float whose value defines how many Gs of force
must be experienced by the sensor before the button is
'pressed'.
@return: True if the command was successfuly written to the device.
False if the command was not written.
"""
# Set class
if hid_type != TSS_JOYSTICK and hid_type != TSS_MOUSE:
_print("Invalid command for hid_type: {0:d}".format(hid_type))
return False
cntl_class = 1 + hid_type
# Set mode
if not self.setControlMode(cntl_class, button_idx, 2):
return False
# Create data array
data_array = [0, 0, 0, threshold]
# Set data
for i in range(4):
if not self.setControlData(cntl_class, button_idx, i, data_array[i]):
return False
return True
def disableButton(self, hid_type, button_idx):
""" Disables a button on the passed in emulated device.
@param hid_type: An integer whose value defines whether the device
in question is a TSS_JOYSTICK or TSS_MOUSE.
@param button_idx: An integer whose value defines which button on
the emulated device to configure. Default range is 0 through 7.
@return: True if the command was successfuly written to the device.
False if the command was not written.
"""
# Set class
if hid_type != TSS_JOYSTICK and hid_type != TSS_MOUSE:
_print("Invalid command for hid_type: {0:d}".format(hid_type))
return False
cntl_class = 1 + hid_type
# Set mode
return self.setControlMode(cntl_class, button_idx, 255)
# Convenience functions for setting up simple mouse/joystick implimentations
def setupSimpleMouse(self, diagonal_size, dist_from_screen, aspect_ratio, is_relative=True):
""" Creates a simple emulated mouse device using the features of the
sensor. Left button and right button emulate the mouse's left and
right buttons respectivly and using the sensor as a pointing device
with the front of the device facing towards the screen will move the
mouse cursor.
@param diagonal_size: A float whose value is the real world diagonal
size of the user's screen.
@param dist_from_screen: A float whose value is the real world
distance the sensor is from the user's screen. Must be the same
units as diagonal_size.
@param aspect_ratio: A float whose value is the real world aspect
ratio of the user's screen.
@param is_relative: A boolean whose value expresses whether the
mouse is to operate in relative mode (True) or absolute mode
(False).
@return: True if the command was successfuly written to the device.
False if the command was not written.
"""
cur_mouse_rel = self.getMouseAbsoluteRelativeMode()
if cur_mouse_rel != is_relative:
if self.setMouseAbsoluteRelativeMode(is_relative):
fail_byte, t_stamp, data = self.writeRead('softwareReset')
if not fail_byte:
while self.getSerialNumber():
pass
self.close()
time.sleep(5)
while self.reconnect():
pass
unit_hyp = (aspect_ratio ** 2 + 1) ** 0.5
screen_multiplyer = diagonal_size / unit_hyp
screen_width = screen_multiplyer * aspect_ratio
screen_height = screen_multiplyer
_print("Height: {0:2f}".format(screen_height))
_print("Width: {0:2f}".format(screen_width))
self.setScreenPointAxis(TSS_MOUSE, "X", dist_from_screen, screen_width, "X", "Z", 50)
self.setScreenPointAxis(TSS_MOUSE, "Y", dist_from_screen, screen_height, "Y", "Z", 50)
self.setPhysicalButton(TSS_MOUSE, 0, TSS_BUTTON_LEFT)
self.setPhysicalButton(TSS_MOUSE, 1, TSS_BUTTON_RIGHT)
self.disableButton(TSS_MOUSE, 2)
self.disableButton(TSS_MOUSE, 3)
self.disableButton(TSS_MOUSE, 4)
self.disableButton(TSS_MOUSE, 5)
self.disableButton(TSS_MOUSE, 6)
self.disableButton(TSS_MOUSE, 7)
def setupSimpleJoystick(self, deadzone, scale, power, shake_threshold, max_dist):
""" Creates a simple emulated joystick device using the features of the
sensor. The left and right physical buttons on the sensor act as
buttons 0 and 1 for the joystick. Button 2 is a shake button.
Buttons 3 and 4 are pressed when the sensor is rotated +-90 degrees
on the Z-axis. Rotations on the sensor's Y and X axis correspond to
movements on the joystick's X and Y axis.
@param deadzone: A float that defines the minimum distance necessary
for the device's axis to read a non-zero value.
@param scale: A float that defines the linear scale for the values
being returned for the axis.
@param power:A float whose value is an exponental power used to
further modify data being returned from the sensor.
@param shake_threshold: A float whose value defines how many Gs of
force must be experienced by the sensor before the button 2 is
'pressed'.
@param max_dist: A float whose value defines how close the local
vector's orientation must be to the global vector for buttons 3
and 4 are "pressed".
@return: True if the command was successfuly written to the device.
False if the command was not written.
"""
self.setGlobalAxis(TSS_JOYSTICK, "X", [1, 0, 0], [0, 0, -1], deadzone, scale, power)
self.setGlobalAxis(TSS_JOYSTICK, "Y", [0, 1, 0], [0, 0, -1], deadzone, scale, power)
self.setPhysicalButton(TSS_JOYSTICK, 0, TSS_BUTTON_LEFT)
self.setPhysicalButton(TSS_JOYSTICK, 1, TSS_BUTTON_RIGHT)
self.setShakeButton(TSS_JOYSTICK, 2, shake_threshold)
self.setOrientationButton(TSS_JOYSTICK, 3, [0, 1, 0], [-1, 0, 0], max_dist)
self.setOrientationButton(TSS_JOYSTICK, 4, [0, 1, 0], [1, 0, 0], max_dist)
self.disableButton(TSS_JOYSTICK, 5)
self.disableButton(TSS_JOYSTICK, 6)
self.disableButton(TSS_JOYSTICK, 7)
# LightGun Functions
def setupSimpleLightgun(self, diagonal_size, dist_from_screen, aspect_ratio, is_relative=True):
""" Creates a simple emulated mouse based lightgun device using the
features of the sensor. Left button of the sensor emulates the
mouse's left button. Shaking the sensor emulates the mouse's right
button. This configuration uses the sensor as a pointing device with
the front of the device facing forward the screen will move the
mouse cursor.
@param diagonal_size: A float whose value is the real world diagonal
size of the user's screen.
@param dist_from_screen: A float whose value is the real world
distance the sensor is from the user's screen. Must be the same
units as diagonal_size.
@param aspect_ratio: A float whose value is the real world aspect
ratio of the user's screen.
@param is_relative: A boolean whose value expresses whether the
mouse is to operate in relative mode (True) or absolute mode
(False).
@return: True if the command was successfuly written to the device.
False if the command was not written.
"""
cur_mouse_rel = self.getMouseAbsoluteRelativeMode()
if cur_mouse_rel != is_relative:
if self.setMouseAbsoluteRelativeMode(is_relative):
fail_byte, t_stamp, data = self.writeRead('softwareReset')
if not fail_byte:
while self.getSerialNumber():
pass
self.close()
time.sleep(5)
while self.reconnect():
pass
unit_hyp = (aspect_ratio ** 2 + 1) ** 0.5
screen_multiplyer = diagonal_size / unit_hyp
screen_width = screen_multiplyer * aspect_ratio
screen_height = screen_multiplyer
_print("Height: {0:2f}".format(screen_height))
_print("Width: {0:2f}".format(screen_width))
self.setScreenPointAxis(TSS_MOUSE, "X", dist_from_screen, screen_width, "X", "Z", 50)
self.setScreenPointAxis(TSS_MOUSE, "Y", dist_from_screen, screen_height, "Y", "Z", 50)
self.setPhysicalButton(TSS_MOUSE, 0, TSS_BUTTON_LEFT)
self.setShakeButton(TSS_MOUSE, 1, 1.0)
self.disableButton(TSS_MOUSE, 2)
self.disableButton(TSS_MOUSE, 3)
self.disableButton(TSS_MOUSE, 4)
self.disableButton(TSS_MOUSE, 5)
self.disableButton(TSS_MOUSE, 6)
self.disableButton(TSS_MOUSE, 7)
## 80(0x50)
def setStreamingSlots(self, slot0='null',
slot1='null',
slot2='null',
slot3='null',
slot4='null',
slot5='null',
slot6='null',
slot7='null'):
slots = [slot0, slot1, slot2, slot3, slot4, slot5, slot6, slot7]
slot_bytes = []
for slot in slots:
cmd_byte = self.command_dict[slot][0]
slot_bytes.append(cmd_byte)
fail_byte, timestamp, filler = self.writeRead('_setStreamingSlots', slot_bytes)
self.stream_slot_cmds = slots
self._generateStreamParse()
return not fail_byte
## 81(0x51)
def getStreamingSlots(self):
if self.stream_slot_cmds is None:
self.stream_slot_cmds = ['null'] * 8
fail_byte, timestamp, slot_bytes = self.writeRead('_getStreamingSlots')
need_update = False
if slot_bytes:
for slot_idx in range(len(self.stream_slot_cmds)):
cmd_byte = slot_bytes[slot_idx]
cmd_string = self.reverse_command_dict[cmd_byte]
if self.stream_slot_cmds[slot_idx] != cmd_string:
self.stream_slot_cmds[slot_idx] = cmd_string
need_update = True
if need_update:
self._generateStreamParse()
return self.stream_slot_cmds
## 82(0x52)
def setStreamingTiming(self, interval, duration, delay, timestamp=False):
arg_list = (interval, duration, delay)
fail_byte, t_stamp, data = self.writeRead('_setStreamingTiming', arg_list)
if not fail_byte:
self.stream_timing = arg_list
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 83(0x53)
def getStreamingTiming(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('_getStreamingTiming')
if data:
self.stream_timing = data
if timestamp:
return (data, t_stamp)
return data
## 84(0x54)
def getStreamingBatch(self, timestamp=False):
if self.stream_parse is None:
self._generateStreamParse()
fail_byte, t_stamp, data = self.writeRead('_getStreamingBatch')
if timestamp:
return (data, t_stamp)
return data
## 85(0x55)
def stopStreaming(self):
self.record_data = False
fail_byte, timestamp, slot_bytes = self.writeRead('stopStreaming')
return not fail_byte
## 86(0x56)
def startStreaming(self, start_record=False):
self.record_data = start_record
if self.stream_parse is None:
self._generateStreamParse()
fail_byte, timestamp, slot_bytes = self.writeRead('startStreaming')
return not fail_byte
## generated functions USB and WL_ and EM_ and DL_ and BT_
## 0(0x00)
def getTaredOrientationAsQuaternion(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getTaredOrientationAsQuaternion')
if timestamp:
return (data, t_stamp)
return data
## 1(0x01)
def getTaredOrientationAsEulerAngles(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getTaredOrientationAsEulerAngles')
if timestamp:
return (data, t_stamp)
return data
## 2(0x02)
def getTaredOrientationAsRotationMatrix(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getTaredOrientationAsRotationMatrix')
if timestamp:
return (data, t_stamp)
return data
## 3(0x03)
def getTaredOrientationAsAxisAngle(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getTaredOrientationAsAxisAngle')
if timestamp:
return (data, t_stamp)
return data
## 4(0x04)
def getTaredOrientationAsTwoVector(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getTaredOrientationAsTwoVector')
if timestamp:
return (data, t_stamp)
return data
## 5(0x05)
def getDifferenceQuaternion(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getDifferenceQuaternion')
if timestamp:
return (data, t_stamp)
return data
## 6(0x06)
def getUntaredOrientationAsQuaternion(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getUntaredOrientationAsQuaternion')
if timestamp:
return (data, t_stamp)
return data
## 7(0x07)
def getUntaredOrientationAsEulerAngles(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getUntaredOrientationAsEulerAngles')
if timestamp:
return (data, t_stamp)
return data
## 8(0x08)
def getUntaredOrientationAsRotationMatrix(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getUntaredOrientationAsRotationMatrix')
if timestamp:
return (data, t_stamp)
return data
## 9(0x09)
def getUntaredOrientationAsAxisAngle(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getUntaredOrientationAsAxisAngle')
if timestamp:
return (data, t_stamp)
return data
## 10(0x0a)
def getUntaredOrientationAsTwoVector(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getUntaredOrientationAsTwoVector')
if timestamp:
return (data, t_stamp)
return data
## 11(0x0b)
def getTaredTwoVectorInSensorFrame(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getTaredTwoVectorInSensorFrame')
if timestamp:
return (data, t_stamp)
return data
## 12(0x0c)
def getUntaredTwoVectorInSensorFrame(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getUntaredTwoVectorInSensorFrame')
if timestamp:
return (data, t_stamp)
return data
## 16(0x10)
def setEulerAngleDecompositionOrder(self, angle_order, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setEulerAngleDecompositionOrder', angle_order)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 17(0x11)
def setMagnetoresistiveThreshold(self, threshold, trust_frames, lockout_decay, perturbation_detection_value, timestamp=False):
arg_list = (threshold, trust_frames, lockout_decay, perturbation_detection_value)
fail_byte, t_stamp, data = self.writeRead('setMagnetoresistiveThreshold', arg_list)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 18(0x12)
def setAccelerometerResistanceThreshold(self, threshold, lockout_decay, timestamp=False):
arg_list = (threshold, lockout_decay)
fail_byte, t_stamp, data = self.writeRead('setAccelerometerResistanceThreshold', arg_list)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 19(0x13)
def offsetWithCurrentOrientation(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('offsetWithCurrentOrientation')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 20(0x14)
def resetBaseOffset(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('resetBaseOffset')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 21(0x15)
def offsetWithQuaternion(self, quaternion, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('offsetWithQuaternion', quaternion)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 22(0x16)
def setBaseOffsetWithCurrentOrientation(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setBaseOffsetWithCurrentOrientation')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 32(0x20)
def getAllNormalizedComponentSensorData(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getAllNormalizedComponentSensorData')
if timestamp:
return (data, t_stamp)
return data
## 33(0x21)
def getNormalizedGyroRate(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getNormalizedGyroRate')
if timestamp:
return (data, t_stamp)
return data
## 34(0x22)
def getNormalizedAccelerometerVector(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getNormalizedAccelerometerVector')
if timestamp:
return (data, t_stamp)
return data
## 35(0x23)
def getNormalizedCompassVector(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getNormalizedCompassVector')
if timestamp:
return (data, t_stamp)
return data
## 37(0x25)
def getAllCorrectedComponentSensorData(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getAllCorrectedComponentSensorData')
if timestamp:
return (data, t_stamp)
return data
## 38(0x26)
def getCorrectedGyroRate(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getCorrectedGyroRate')
if timestamp:
return (data, t_stamp)
return data
## 39(0x27)
def getCorrectedAccelerometerVector(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getCorrectedAccelerometerVector')
if timestamp:
return (data, t_stamp)
return data
## 40(0x28)
def getCorrectedCompassVector(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getCorrectedCompassVector')
if timestamp:
return (data, t_stamp)
return data
## 41(0x29)
def getCorrectedLinearAccelerationInGlobalSpace(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getCorrectedLinearAccelerationInGlobalSpace')
if timestamp:
return (data, t_stamp)
return data
## 43(0x2b)
def getTemperatureC(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getTemperatureC')
if timestamp:
return (data, t_stamp)
return data
## 44(0x2c)
def getTemperatureF(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getTemperatureF')
if timestamp:
return (data, t_stamp)
return data
## 45(0x2d)
def getConfidenceFactor(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getConfidenceFactor')
if timestamp:
return (data, t_stamp)
return data
## 64(0x40)
def getAllRawComponentSensorData(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getAllRawComponentSensorData')
if timestamp:
return (data, t_stamp)
return data
## 65(0x41)
def getRawGyroscopeRate(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getRawGyroscopeRate')
if timestamp:
return (data, t_stamp)
return data
## 66(0x42)
def getRawAccelerometerData(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getRawAccelerometerData')
if timestamp:
return (data, t_stamp)
return data
## 67(0x43)
def getRawCompassData(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getRawCompassData')
if timestamp:
return (data, t_stamp)
return data
## 96(0x60)
def tareWithCurrentOrientation(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('tareWithCurrentOrientation')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 97(0x61)
def tareWithQuaternion(self, quaternion, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('tareWithQuaternion', quaternion)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 98(0x62)
def tareWithRotationMatrix(self, rotation_matrix, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('tareWithRotationMatrix', rotation_matrix)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 99(0x63)
def setStaticAccelerometerTrustValue(self, trust_value, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setStaticAccelerometerTrustValue', trust_value)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 100(0x64)
def setConfidenceAccelerometerTrustValues(self, min_trust_value, max_trust_value, timestamp=False):
arg_list = (min_trust_value, max_trust_value)
fail_byte, t_stamp, data = self.writeRead('setConfidenceAccelerometerTrustValues', arg_list)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 101(0x65)
def setStaticCompassTrustValue(self, trust_value, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setStaticCompassTrustValue', trust_value)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 102(0x66)
def setConfidenceCompassTrustValues(self, min_trust_value, max_trust_value, timestamp=False):
arg_list = (min_trust_value, max_trust_value)
fail_byte, t_stamp, data = self.writeRead('setConfidenceCompassTrustValues', arg_list)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 103(0x67)
def setDesiredUpdateRate(self, update_rate, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setDesiredUpdateRate', update_rate)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 105(0x69)
def setReferenceVectorMode(self, mode, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setReferenceVectorMode', mode)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 106(0x6a)
def setOversampleRate(self, samples_per_iteration, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setOversampleRate', samples_per_iteration)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 107(0x6b)
def setGyroscopeEnabled(self, enabled, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setGyroscopeEnabled', enabled)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 108(0x6c)
def setAccelerometerEnabled(self, enabled, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setAccelerometerEnabled', enabled)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 109(0x6d)
def setCompassEnabled(self, enabled, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setCompassEnabled', enabled)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 116(0x74)
def setAxisDirections(self, axis_direction_byte, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setAxisDirections', axis_direction_byte)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 117(0x75)
def setRunningAveragePercent(self, running_average_percent, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setRunningAveragePercent', running_average_percent)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 118(0x76)
def setCompassReferenceVector(self, reference_vector, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setCompassReferenceVector', reference_vector)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 119(0x77)
def setAccelerometerReferenceVector(self, reference_vector, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setAccelerometerReferenceVector', reference_vector)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 120(0x78)
def resetKalmanFilter(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('resetKalmanFilter')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 121(0x79)
def setAccelerometerRange(self, accelerometer_range_setting, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setAccelerometerRange', accelerometer_range_setting)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 123(0x7b)
def setFilterMode(self, mode, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setFilterMode', mode)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 124(0x7c)
def setRunningAverageMode(self, mode, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setRunningAverageMode', mode)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 125(0x7d)
def setGyroscopeRange(self, gyroscope_range_setting, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setGyroscopeRange', gyroscope_range_setting)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 126(0x7e)
def setCompassRange(self, compass_range_setting, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setCompassRange', compass_range_setting)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 128(0x80)
def getTareAsQuaternion(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getTareAsQuaternion')
if timestamp:
return (data, t_stamp)
return data
## 129(0x81)
def getTareAsRotationMatrix(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getTareAsRotationMatrix')
if timestamp:
return (data, t_stamp)
return data
## 130(0x82)
def getAccelerometerTrustValues(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getAccelerometerTrustValues')
if timestamp:
return (data, t_stamp)
return data
## 131(0x83)
def getCompassTrustValues(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getCompassTrustValues')
if timestamp:
return (data, t_stamp)
return data
## 132(0x84)
def getCurrentUpdateRate(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getCurrentUpdateRate')
if timestamp:
return (data, t_stamp)
return data
## 133(0x85)
def getCompassReferenceVector(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getCompassReferenceVector')
if timestamp:
return (data, t_stamp)
return data
## 134(0x86)
def getAccelerometerReferenceVector(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getAccelerometerReferenceVector')
if timestamp:
return (data, t_stamp)
return data
## 140(0x8c)
def getGyroscopeEnabledState(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getGyroscopeEnabledState')
if timestamp:
return (data, t_stamp)
return data
## 141(0x8d)
def getAccelerometerEnabledState(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getAccelerometerEnabledState')
if timestamp:
return (data, t_stamp)
return data
## 142(0x8e)
def getCompassEnabledState(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getCompassEnabledState')
if timestamp:
return (data, t_stamp)
return data
## 143(0x8f)
def getAxisDirections(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getAxisDirections')
if timestamp:
return (data, t_stamp)
return data
## 144(0x90)
def getOversampleRate(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getOversampleRate')
if timestamp:
return (data, t_stamp)
return data
## 145(0x91)
def getRunningAveragePercent(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getRunningAveragePercent')
if timestamp:
return (data, t_stamp)
return data
## 146(0x92)
def getDesiredUpdateRate(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getDesiredUpdateRate')
if timestamp:
return (data, t_stamp)
return data
## 148(0x94)
def getAccelerometerRange(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getAccelerometerRange')
if timestamp:
return (data, t_stamp)
return data
## 152(0x98)
def getFilterMode(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getFilterMode')
if timestamp:
return (data, t_stamp)
return data
## 153(0x99)
def getRunningAverageMode(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getRunningAverageMode')
if timestamp:
return (data, t_stamp)
return data
## 154(0x9a)
def getGyroscopeRange(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getGyroscopeRange')
if timestamp:
return (data, t_stamp)
return data
## 155(0x9b)
def getCompassRange(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getCompassRange')
if timestamp:
return (data, t_stamp)
return data
## 156(0x9c)
def getEulerAngleDecompositionOrder(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getEulerAngleDecompositionOrder')
if timestamp:
return (data, t_stamp)
return data
## 157(0x9d)
def getMagnetoresistiveThreshold(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getMagnetoresistiveThreshold')
if timestamp:
return (data, t_stamp)
return data
## 158(0x9e)
def getAccelerometerResistanceThreshold(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getAccelerometerResistanceThreshold')
if timestamp:
return (data, t_stamp)
return data
## 159(0x9f)
def getOffsetOrientationAsQuaternion(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getOffsetOrientationAsQuaternion')
if timestamp:
return (data, t_stamp)
return data
## 160(0xa0)
def setCompassCalibrationCoefficients(self, matrix, bias, timestamp=False):
arg_list = []
arg_list.extend(matrix)
arg_list.extend(bias)
fail_byte, t_stamp, data = self.writeRead('setCompassCalibrationCoefficients', arg_list)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 161(0xa1)
def setAccelerometerCalibrationCoefficients(self, matrix, bias, timestamp=False):
arg_list = []
arg_list.extend(matrix)
arg_list.extend(bias)
fail_byte, t_stamp, data = self.writeRead('setAccelerometerCalibrationCoefficients', arg_list)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 162(0xa2)
def getCompassCalibrationCoefficients(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getCompassCalibrationCoefficients')
if timestamp:
return (data, t_stamp)
return data
## 163(0xa3)
def getAccelerometerCalibrationCoefficients(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getAccelerometerCalibrationCoefficients')
if timestamp:
return (data, t_stamp)
return data
## 164(0xa4)
def getGyroscopeCalibrationCoefficients(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getGyroscopeCalibrationCoefficients')
if timestamp:
return (data, t_stamp)
return data
## 165(0xa5)
def beginGyroscopeAutoCalibration(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('beginGyroscopeAutoCalibration')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 166(0xa6)
def setGyroscopeCalibrationCoefficients(self, matrix, bias, timestamp=False):
arg_list = []
arg_list.extend(matrix)
arg_list.extend(bias)
fail_byte, t_stamp, data = self.writeRead('setGyroscopeCalibrationCoefficients', arg_list)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 169(0xa9)
def setCalibrationMode(self, mode, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setCalibrationMode', mode)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 170(0xaa)
def getCalibrationMode(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getCalibrationMode')
if timestamp:
return (data, t_stamp)
return data
## 171(0xab)
def setOrthoCalibrationDataPointFromCurrentOrientation(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setOrthoCalibrationDataPointFromCurrentOrientation')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 172(0xac)
def setOrthoCalibrationDataPointFromVector(self, type, index, vector, timestamp=False):
arg_list = (type, index, vector)
fail_byte, t_stamp, data = self.writeRead('setOrthoCalibrationDataPointFromVector', arg_list)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 173(0xad)
def getOrthoCalibrationDataPoint(self, type, index, timestamp=False):
arg_list = (type, index)
fail_byte, t_stamp, data = self.writeRead('getOrthoCalibrationDataPoint', arg_list)
if timestamp:
return (data, t_stamp)
return data
## 174(0xae)
def performOrthoCalibration(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('performOrthoCalibration')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 175(0xaf)
def clearOrthoCalibrationData(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('clearOrthoCalibrationData')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 227(0xe3)
def setSleepMode(self, mode, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setSleepMode', mode)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 228(0xe4)
def getSleepMode(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getSleepMode')
if timestamp:
return (data, t_stamp)
return data
## 240(0xf0)
def setJoystickEnabled(self, enabled, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setJoystickEnabled', enabled)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 241(0xf1)
def setMouseEnabled(self, enabled, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setMouseEnabled', enabled)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 242(0xf2)
def getJoystickEnabled(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getJoystickEnabled')
if timestamp:
return (data, t_stamp)
return data
## 243(0xf3)
def getMouseEnabled(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getMouseEnabled')
if timestamp:
return (data, t_stamp)
return data
## 244(0xf4)
def setControlMode(self, control_class, control_index, handler_index, timestamp=False):
arg_list = (control_class, control_index, handler_index)
fail_byte, t_stamp, data = self.writeRead('setControlMode', arg_list)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 245(0xf5)
def setControlData(self, control_class, control_index, data_point_index, data_point, timestamp=False):
arg_list = (control_class, control_index, data_point_index, data_point)
fail_byte, t_stamp, data = self.writeRead('setControlData', arg_list)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 246(0xf6)
def getControlMode(self, control_class, control_index, timestamp=False):
arg_list = (control_class, control_index)
fail_byte, t_stamp, data = self.writeRead('getControlMode', arg_list)
if timestamp:
return (data, t_stamp)
return data
## 247(0xf7)
def getControlData(self, control_class, control_index, handler_index, timestamp=False):
arg_list = (control_class, control_index, handler_index)
fail_byte, t_stamp, data = self.writeRead('getControlData', arg_list)
if timestamp:
return (data, t_stamp)
return data
## 251(0xfb)
def setMouseAbsoluteRelativeMode(self, mode, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setMouseAbsoluteRelativeMode', mode)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 252(0xfc)
def getMouseAbsoluteRelativeMode(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getMouseAbsoluteRelativeMode')
if timestamp:
return (data, t_stamp)
return data
## END generated functions USB and WL_ and EM_ and DL_ and BT_
class TSUSBSensor(_TSSensor):
command_dict = _TSSensor.command_dict.copy()
command_dict.update({
'_setUARTBaudRate': (0xe7, 0, None, 4, '>I', 1),
'getUARTBaudRate': (0xe8, 4, '>I', 0, None, 1),
'getButtonState': (0xfa, 1, '>B', 0, None, 1)
})
reverse_command_dict = dict(map(lambda x: [x[1][0], x[0]], command_dict.items()))
_device_types = ["USB", "USB-HH", "MUSB", "MUSB-HH", "USBWT", "USBWT-HH"]
def __new__(cls, com_port=None, baudrate=_baudrate, timestamp_mode=TSS_TIMESTAMP_SENSOR):
if com_port is None:
return None
if com_port:
if type(com_port) is str:
port_name = com_port
elif type(com_port) is ComInfo:
port_name = com_port.com_port
else:
_print("An erronous parameter was passed in")
return None
if baudrate not in _allowed_baudrates:
baudrate = _baudrate
_print("Error baudrate value not allowed. Using default.")
serial_port = serial.Serial(port_name, baudrate=baudrate, timeout=0.5, writeTimeout=0.5)
if serial_port is not None:
new_inst = super(_TSSensor, cls).__new__(cls)
serial_port.write(bytearray((0xf7, 0x56, 0x56)))
time.sleep(0.01)
serial_port.flushInput()
return _generateSensorClass(new_inst, serial_port, TSUSBSensor._device_types)
_print('Error serial port was not made')
## 231(0xe7)
def setUARTBaudRate(self, baud_rate, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('_setUARTBaudRate', baud_rate)
if not fail_byte:
self.baudrate = baud_rate
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## generated functions USB
## 232(0xe8)
def getUARTBaudRate(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getUARTBaudRate')
if timestamp:
return (data, t_stamp)
return data
## 250(0xfa)
def getButtonState(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getButtonState')
if timestamp:
return (data, t_stamp)
return data
## END generated functions USB
class TSWLSensor(_TSSensor):
command_dict = _TSSensor.command_dict.copy()
command_dict.update({
'_getWirelessPanID': (0xc0, 2, '>H', 0, None, 1),
'_setWirelessPanID': (0xc1, 0, None, 2, '>H', 1),
'_getWirelessChannel': (0xc2, 1, '>B', 0, None, 1),
'_setWirelessChannel': (0xc3, 0, None, 1, '>B', 1),
'commitWirelessSettings': (0xc5, 0, None, 0, None, 1),
'getWirelessAddress': (0xc6, 2, '>H', 0, None, 1),
'getBatteryVoltage': (0xc9, 4, '>f', 0, None, 1),
'getBatteryPercentRemaining': (0xca, 1, '>B', 0, None, 1),
'getBatteryStatus': (0xcb, 1, '>B', 0, None, 1),
'getButtonState': (0xfa, 1, '>B', 0, None, 1)
})
reverse_command_dict = dict(map(lambda x: [x[1][0], x[0]], command_dict.items()))
_device_types = ["WL", "WL-HH"]
def __new__(cls, com_port=None, baudrate=_baudrate, timestamp_mode=TSS_TIMESTAMP_SENSOR, logical_id=None, dongle=None):
if com_port is None and logical_id is None and dongle is None:
return None
if com_port:
if type(com_port) is str:
port_name = com_port
elif type(com_port) is ComInfo:
port_name = com_port.com_port
else:
_print("An erronous parameter was passed in")
return None
if baudrate not in _allowed_baudrates:
baudrate = _baudrate
_print("Error baudrate value not allowed. Using default.")
serial_port = serial.Serial(port_name, baudrate=baudrate, timeout=0.5, writeTimeout=0.5)
if serial_port is not None:
new_inst = super(_TSSensor, cls).__new__(cls)
new_inst.dongle = None
new_inst.logical_id = None
serial_port.write(bytearray((0xf7, 0x56, 0x56)))
time.sleep(0.01)
serial_port.flushInput()
return _generateSensorClass(new_inst, serial_port, TSWLSensor._device_types)
_print('Error serial port was not made')
if logical_id is not None and dongle:
for tries in range(_wireless_retries + 1):
fail_byte, timestamp, serial_number = dongle.faWriteRead(logical_id, 'getSerialNumber')
if not fail_byte:
if serial_number in global_sensorlist:
rtn_inst = global_sensorlist[serial_number]
if rtn_inst.dongle:
_print("sensor was already paired before")
pass
rtn_inst.dongle = dongle
rtn_inst.logical_id = logical_id
dongle.wireless_table[logical_id] = serial_number
rtn_inst.switchToWirelessMode()
return rtn_inst
else:
new_inst = super(_TSSensor, cls).__new__(cls)
for tries in range(_wireless_retries + 1):
fail_byte, timestamp, hardware_version = dongle.faWriteRead(logical_id, 'getHardwareVersionString')
if not fail_byte:
new_inst.device_type = convertString(hardware_version)[4:-8].strip()
break
else:
new_inst.device_type = "WL"
new_inst.dongle = dongle
new_inst.logical_id = logical_id
new_inst.port_name = ""
new_inst.serial_port_settings = {}
new_inst.serial_port = None
new_inst.switchToWirelessMode()
new_inst.serial_number = serial_number
global_sensorlist[serial_number] = new_inst
return new_inst
_print("raise wireless fail error here")
return None
_print('this sould never happen')
return None
def __init__(self, com_port=None, baudrate=_baudrate, timestamp_mode=TSS_TIMESTAMP_SENSOR, logical_id=None, dongle=None):
self.protocol_args = { 'success_failure': True,
'timestamp': True,
'command_echo': True,
'data_length': True}
if timestamp_mode != TSS_TIMESTAMP_SENSOR:
self.protocol_args['timestamp'] = False
self.timestamp_mode = timestamp_mode
self.baudrate = baudrate
reinit = False
try: # if this is set the class had been there before
check = self.stream_parse
reinit = True
# _print("sensor reinit!!!")
except:
self._setupBaseVariables()
self.callback_func = None
if self.serial_port and not self.data_loop:
self._setupProtocolHeader(**self.protocol_args)
self._setupThreadedReadLoop()
self.latest_lock = threading.Condition(threading.Lock())
self.new_data = False
if reinit:
if self.stream_timing is not None:
self.setStreamingTiming(*self.stream_timing)
if self.stream_slot_cmds is not None:
self.setStreamingSlots(*self.stream_slot_cmds)
def close(self):
if self.serial_port is not None:
super(TSWLSensor, self).close()
def _wirlessWriteRead(self, command, input_list=None):
result = (True, None, None)
for i in range(_wireless_retries + 1):
result = self.dongle.faWriteRead(self.logical_id, command, input_list)
if not result[0]:
break
return result
def switchToWirelessMode(self):
if self.dongle and self.logical_id is not None:
self.writeRead = self._wirlessWriteRead
self.wireless_com = True
return True
return False
def switchToWiredMode(self):
if self.serial_port:
self.writeRead = self.f9WriteRead
self.wireless_com = False
return True
return False
## 192(0xc0)
def getWirelessPanID(self, timestamp=False):
t_stamp = None
data = None
fail_byte, t_stamp, data = self.writeRead('_getWirelessPanID')
if timestamp:
return (data, t_stamp)
return data
## 193(0xc1)
def setWirelessPanID(self, PanID, timestamp=False):
t_stamp = None
fail_byte = True
if not self.wireless_com:
fail_byte, t_stamp, data = self.writeRead('_setWirelessPanID', PanID)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 194(0xc2)
def getWirelessChannel(self, timestamp=False):
t_stamp = None
data = None
fail_byte, t_stamp, data = self.writeRead('_getWirelessChannel')
if timestamp:
return (data, t_stamp)
return data
## 195(0xc3)
def setWirelessChannel(self, channel, timestamp=False):
t_stamp = None
fail_byte = True
if not self.wireless_com:
fail_byte, t_stamp, data = self.writeRead('_setWirelessChannel', channel)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## generated functions WL_
## 197(0xc5)
def commitWirelessSettings(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('commitWirelessSettings')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 198(0xc6)
def getWirelessAddress(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getWirelessAddress')
if timestamp:
return (data, t_stamp)
return data
## 201(0xc9)
def getBatteryVoltage(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getBatteryVoltage')
if timestamp:
return (data, t_stamp)
return data
## 202(0xca)
def getBatteryPercentRemaining(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getBatteryPercentRemaining')
if timestamp:
return (data, t_stamp)
return data
## 203(0xcb)
def getBatteryStatus(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getBatteryStatus')
if timestamp:
return (data, t_stamp)
return data
## 250(0xfa)
def getButtonState(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getButtonState')
if timestamp:
return (data, t_stamp)
return data
## END generated functions WL_
class TSDongle(_TSBase):
command_dict = _TSBase.command_dict.copy()
command_dict.update({
'setWirelessStreamingAutoFlushMode': (0xb0, 0, None, 1, '>B', 1),
'getWirelessStreamingAutoFlushMode': (0xb1, 1, '>B', 0, None, 1),
'_setWirelessStreamingManualFlushBitfield': (0xb2, 0, None, 2, '>H', 1),
'_getWirelessStreamingManualFlushBitfield': (0xb3, 2, '>H', 0, None, 1),
'_getManualFlushSingle': (0xb4, 0, None, 1, '>B', 1),
'_getManualFlushBulk': (0xb5, 0, None, 0, None, 1),
'broadcastSynchronizationPulse': (0xb6, 0, None, 0, None, 1),
'getReceptionBitfield': (0xb7, 2, '>H', 0, None, 1),
'getWirelessPanID': (0xc0, 2, '>H', 0, None, 1),
'setWirelessPanID': (0xc1, 0, None, 2, '>H', 1),
'getWirelessChannel': (0xc2, 1, '>B', 0, None, 1),
'setWirelessChannel': (0xc3, 0, None, 1, '>B', 1),
'commitWirelessSettings': (0xc5, 0, None, 0, None, 1),
'getWirelessAddress': (0xc6, 2, '>H', 0, None, 1),
'getSerialNumberAtLogicalID': (0xd0, 4, '>I', 1, '>B', 1),
'_setSerialNumberAtLogicalID': (0xd1, 0, None, 5, '>BI', 1),
'getWirelessChannelNoiseLevels': (0xd2, 16, '>16B', 0, None, 1),
'setWirelessRetries': (0xd3, 0, None, 1, '>B', 1),
'getWirelessRetries': (0xd4, 1, '>B', 0, None, 1),
'getWirelessSlotsOpen': (0xd5, 1, '>B', 0, None, 1),
'getSignalStrength': (0xd6, 1, '>B', 0, None, 1),
'setWirelessHIDUpdateRate': (0xd7, 0, None, 1, '>B', 1),
'getWirelessHIDUpdateRate': (0xd8, 1, '>B', 0, None, 1),
'setWirelessHIDAsynchronousMode': (0xd9, 0, None, 1, '>B', 1),
'getWirelessHIDAsynchronousMode': (0xda, 1, '>B', 0, None, 1),
'_setWirelessResponseHeaderBitfield': (0xdb, 0, None, 4, '>I', 1),
'_getWirelessResponseHeaderBitfield': (0xdc, 4, '>I', 0, None, 1),
'setJoystickLogicalID': (0xf0, 0, None, 1, '>B', 1),
'setMouseLogicalID': (0xf1, 0, None, 1, '>B', 1),
'getJoystickLogicalID': (0xf2, 1, '>B', 0, None, 1),
'getMouseLogicalID': (0xf3, 1, '>B', 0, None, 1)
})
wl_command_dict = TSWLSensor.command_dict.copy()
_device_types = ["DNG"]
def __new__(cls, com_port=None, baudrate=_baudrate, timestamp_mode=TSS_TIMESTAMP_SENSOR):
if com_port:
if type(com_port) is str:
port_name = com_port
elif type(com_port) is ComInfo:
port_name = com_port.com_port
else:
_print("An erronous parameter was passed in")
return None
if baudrate not in _allowed_baudrates:
baudrate = _baudrate
_print("Error baudrate value not allowed. Using default.")
serial_port = serial.Serial(port_name, baudrate=baudrate, timeout=0.5, writeTimeout=0.5)
if serial_port is not None:
new_inst = super(TSDongle, cls).__new__(cls)
serial_port.write(bytearray((0xf7, 0x56, 0x56)))
time.sleep(0.05)
serial_port.flushInput()
checkSoftwareVersionFromPort(serial_port)
serial_port.write(bytearray((0xf7, 0xb7, 0xb7)))
reception_bitfield = struct.unpack('>H', serial_port.read(2))[0]
idx = 1
for i in range(15):
if idx & reception_bitfield:
count = 0
serial_port.write(bytearray((0xf7, 0xd0, i, 0xd0 + i)))
wl_id = struct.unpack('>I', serial_port.read(4))[0]
while count < 15:
count += 1
serial_port.write(bytearray((0xf8, i, 0x56, 0x56 + i)))
did_fail = struct.unpack('>B', serial_port.read(1))[0]
if did_fail:
serial_port.read(1)
else:
_print("Stopped {0:08X} on try {1:d}".format(wl_id, count))
serial_port.read(2)
break
idx <<= 1
return _generateSensorClass(new_inst, serial_port, TSDongle._device_types)
_print('Error serial port was not made')
def __init__(self, com_port=None, baudrate=_baudrate, timestamp_mode=TSS_TIMESTAMP_SENSOR):
self.protocol_args = { 'success_failure': True,
'timestamp': True,
'command_echo': True,
'logical_id': True,
'data_length': True}
if timestamp_mode != TSS_TIMESTAMP_SENSOR:
self.protocol_args['timestamp'] = False
self.timestamp_mode = timestamp_mode
self.baudrate = baudrate
reinit = False
try: # if this is set the class had been there before
check = self.wireless_table
reinit = True
# _print("sensor reinit!!!")
except:
self._setupBaseVariables()
self._setupProtocolHeader(**self.protocol_args)
self._setupThreadedReadLoop()
self.setWirelessStreamingAutoFlushMode(1)
self.startStreaming()
def reconnect(self):
self.close()
if not tryPort(self.port_name):
_print("tryport fail")
try:
serial_port = serial.Serial(self.port_name, baudrate=self.baudrate, timeout=0.5, writeTimeout=0.5)
serial_port.applySettingsDict(self.serial_port_settings)
self.serial_port = serial_port
self.setWirelessStreamingAutoFlushMode(0)
time.sleep(0.05)
self.serial_port.flushInput()
for i in range(15):
serial_port.write(bytearray((0xf7, 0xd0, i, 0xd0 + i)))
for i in range(10):
try:
wl_id = struct.unpack('>I', serial_port.read(4))[0]
except:
continue
break
if wl_id != 0:
count = 0
while count < 25:
count += 1
serial_port.write(bytearray((0xf8, i, 0x56, 0x56 + i)))
did_fail = struct.unpack('>B', serial_port.read(1))[0]
if did_fail:
serial_port.read(1)
else:
_print("Stopped {0:08X} on try {1:d}".format(wl_id, count))
serial_port.read(2)
break
except:
traceback.print_exc()
return False
self._setupProtocolHeader(**self.protocol_args)
self._setupThreadedReadLoop()
self.setWirelessStreamingAutoFlushMode(1)
return True
def _setupBaseVariables(self):
self.serial_number_hex = '{0:08X}'.format(self.serial_number)
self.wireless_table = [0] * 15
for i in range(15):
tmp_id = self.f7WriteRead('getSerialNumberAtLogicalID', i)
if tmp_id not in self.wireless_table or tmp_id == 0:
self.wireless_table[i] = tmp_id
else:
self.f7WriteRead('_setSerialNumberAtLogicalID', (i, 0))
def _setupProtocolHeader(self, success_failure=False,
timestamp=False,
command_echo=False,
checksum=False,
logical_id=False,
serial_number=False,
data_length=False):
protocol_header =_generateProtocolHeader( success_failure,
timestamp,
command_echo,
checksum,
logical_id,
serial_number,
data_length)
protocol_byte, self.header_parse, self.header_idx_lst = protocol_header
d_header = self.f7WriteRead('_getWiredResponseHeaderBitfield')
dwl_header = self.f7WriteRead('_getWirelessResponseHeaderBitfield')
if d_header != protocol_byte or dwl_header != protocol_byte:
self.f7WriteRead('_setWiredResponseHeaderBitfield', protocol_byte)
self.f7WriteRead('_setWirelessResponseHeaderBitfield', protocol_byte)
d_header = self.f7WriteRead('_getWiredResponseHeaderBitfield')
dwl_header = self.f7WriteRead('_getWirelessResponseHeaderBitfield')
if d_header != protocol_byte or dwl_header != protocol_byte:
print("!!!!!fail d_header={0}, dwl_header={1}, protocol_header_byte={2}".format(d_header, dwl_header, protocol_byte))
raise Exception
# Wireless Old Protocol WriteRead
def f8WriteRead(self, logical_id, command, input_list=None):
command_args = self.command_dict[command]
cmd_byte, out_len, out_struct, in_len, in_struct, compatibility = command_args
packed_data = None
if in_struct:
if type(input_list) in (list, tuple):
packed_data = struct.pack(in_struct, *input_list)
else:
packed_data = struct.pack(in_struct, input_list)
write_array = makeWriteArray(0xf8, logical_id, cmd_byte, packed_data)
self.serial_port.write(write_array)
rtn_list = []
output_data = self.serial_port.read(2)
if len(output_data) == 2:
fail_byte = struct.unpack('>B', output_data[0])[0]
logical_id_byte = struct.unpack('>B', output_data[1])[0]
rtn_list.append(fail_byte)
if not fail_byte:
self.serial_port.read(1)
else:
return True
if out_struct:
output_data = self.serial_port.read(out_len)
rtn_list.append(struct.unpack(out_struct, output_data))
if len(rtn_list) != 1:
return rtn_list
return rtn_list[0]
return True
## Wireless New Protocol WriteRead
def faWriteRead(self, logical_id, command, input_list=None):
global global_counter
command_args = self.wl_command_dict[command]
cmd_byte, out_len, out_struct, in_len, in_struct, compatibility = command_args
if self.compatibility < compatibility:
raise Exception("Firmware for device on ( %s ) is out of date for this function. Recommend updating to latest firmware." % self.serial_port.name)
packed_data = None
if in_struct:
if type(input_list) in (list, tuple):
packed_data=struct.pack(in_struct, *input_list)
else:
packed_data=struct.pack(in_struct, input_list)
write_array = makeWriteArray(0xfa, logical_id, cmd_byte, packed_data)
while len(self.read_queue) > 15:
_print("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!too many commands!!!!!")
time.sleep(0.01)
self.read_lock.acquire()
uid = global_counter
global_counter += 1
try:
self.serial_port.write(write_array) # release in reader thread
except serial.SerialTimeoutException:
self.read_lock.release()
self.serial_port.close()
# _print("SerialTimeoutException!!!!")
return (True, None, None)
except ValueError:
try:
# _print("trying to open it back up!!!!")
self.serial_port.open()
# _print("aaand open!!!!")
except serial.SerialException:
self.read_lock.release()
# _print("SerialTimeoutException!!!!")
return (True, None, None)
queue_packet = (uid, cmd_byte)
timeout_time = 0.5 + (len(self.read_queue) * 0.150) # timeout increases as queue gets larger
self.read_queue.append(queue_packet)
start_time = time.clock() + timeout_time
read_data = None
while(timeout_time > 0):
self.read_lock.wait(timeout_time)
read_data = self.read_dict.get(uid, None)
if read_data is not None:
break
timeout_time =start_time -time.clock()
# _print("Still waiting {0} {1} {2} {3}".format(uid, command,logical_id, timeout_time))
else:
# _print("Operation timed out!!!!")
try:
self.read_queue.remove(queue_packet)
except:
traceback.print_exc()
self.read_lock.release()
return (True, None, None)
self.read_lock.release()
del self.read_dict[uid]
header_list, output_data = read_data
fail_byte, timestamp, cmd_echo, ck_sum, rtn_log_id, sn, data_size = header_list
# _print("RESponse {0} {1} {2} {3}".format(uid, command,logical_id, timeout_time))
if logical_id != rtn_log_id:
# _print("!!!!!!!!logical_id != rtn_log_id!!!!!")
# _print(header_list)
# _hexDump(output_data, 'o')
# _print('!!!!!inWaiting = {0}'.format(self.serial_port.inWaiting()))
return (True, timestamp, None)
if cmd_echo != cmd_byte:
# _print("!!!!!!!!cmd_echo!=cmd_byte!!!!!")
# _print('cmd_echo= 0x{0:02x} cmd_byte= 0x{1:02x}'.format(cmd_echo, cmd_byte))
# _print(header_list)
# _hexDump(output_data, 'o')
# _print('!!!!!inWaiting = {0}'.format(self.serial_port.inWaiting()))
# _print('!!!!!!end')
return (True, timestamp, None)
rtn_list = None
if not fail_byte:
if out_struct:
rtn_list = struct.unpack(out_struct, output_data)
if len(rtn_list) == 1:
rtn_list = rtn_list[0]
elif cmd_echo == 0x54:
rtn_list = self[logical_id].stream_parse.unpack(output_data)
if len(rtn_list) == 1:
rtn_list = rtn_list[0]
else:
# _print("fail_byte!!!!triggered")
pass
self._read_data = None
return (fail_byte, timestamp, rtn_list)
def __getitem__(self, idx):
hw_id = self.wireless_table[idx]
if hw_id == 0:
return None
# Check if sensor exists.
if hw_id in global_sensorlist:
rtn_inst = global_sensorlist[hw_id]
if rtn_inst.dongle is self:
return rtn_inst
elif rtn_inst.dongle is None:
_print("updating sensor {0:08X} to be wireless".format(hw_id))
return TSWLSensor(timestamp_mode=self.timestamp_mode, dongle=self, logical_id=idx)
return None
# Else, make a new TSWLSensor
else:
_print("making new sensor {0:08X}".format(hw_id))
return TSWLSensor(timestamp_mode=self.timestamp_mode, dongle=self, logical_id=idx)
def getSensorFromDongle(self, idx):
return self.__getitem__(idx)
def setSensorToDongle(self, idx, hw_id):
other_hw_id = self.wireless_table[idx]
if other_hw_id != 0:
if other_hw_id in global_sensorlist:
other_sens = global_sensorlist[other_hw_id]
other_sens.dongle = None
other_sens.logical_id = None
if hw_id not in self.wireless_table:
if hw_id in global_sensorlist:
sensor = global_sensorlist[hw_id]
sensor.dongle = None
sensor.logical_id = None
self.setSerialNumberAtLogicalID(idx, hw_id)
else:
if other_hw_id != hw_id:
other_idx = self.wireless_table.index(hw_id)
self.setSerialNumberAtLogicalID(other_idx, 0)
self.setSerialNumberAtLogicalID(idx, hw_id)
return self.__getitem__(idx)
elif hw_id != 0:
self.setSerialNumberAtLogicalID(idx, hw_id)
return self.__getitem__(idx)
def _dataReadLoop(self):
while self.data_loop:
try:
self._readDataWirelessProHeader()
except(KeyboardInterrupt):
print('\n! Received keyboard interrupt, quitting threads.\n')
raise KeyboardInterrupt # fix bug where a thread eats the interupt
break
except:
# traceback.print_exc()
# _print("bad _parseStreamData parse")
# _print('!!!!!inWaiting = {0}'.format(self.serial_port.inWaiting()))
try:
self.read_lock.release()
except:
pass
def _readDataWirelessProHeader(self):
_serial_port = self.serial_port
# in_wait = _serial_port.inWaiting()
# if in_wait:
# _print('!1025! inWaiting = {0}'.format(in_wait))
header_bytes = _serial_port.read(self.header_parse.size)
if header_bytes:
# _hexDump(header_bytes, 'o')
if self.timestamp_mode == TSS_TIMESTAMP_SENSOR:
header_data = self.header_parse.unpack(header_bytes)
header_list = padProtocolHeader87(header_data)
elif self.timestamp_mode == TSS_TIMESTAMP_SYSTEM:
sys_timestamp = time.clock() # time packet was parsed it might been in the system buffer a few ms
sys_timestamp *= 1000000
header_data = self.header_parse.unpack(header_bytes)
header_list = padProtocolHeader85(header_data, sys_timestamp)
else:
header_data = self.header_parse.unpack(header_bytes)
header_list = padProtocolHeader85(header_data, None)
fail_byte, timestamp, cmd_echo, ck_sum, rtn_log_id, sn, data_size = header_list
# _print("!!!!fail_byte={0}, cmd_echo={1}, rtn_log_id={2}, data_size={3}".format(fail_byte, cmd_echo, rtn_log_id, data_size))
output_data = _serial_port.read(data_size)
if cmd_echo is 0xff:
if data_size:
self[rtn_log_id]._parseStreamData(timestamp, output_data)
return
self.read_lock.acquire()
# _print('retrning data!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!')
if len(self.read_queue): # here for a bug in the code
uid, cmd_byte = self.read_queue.popleft()
if cmd_byte == cmd_echo:
self.read_dict[uid] = (header_list, output_data)
self.read_lock.notifyAll() # dies in 3 seconds if there is a writeRead in wait
else:
# _print('Unrequested packet found!!!')
# _hexDump(header_bytes, 'o')
# _hexDump(output_data, 'o')
self.read_queue.appendleft((uid, cmd_byte))
self.read_lock.release()
return
# _print('Unrequested packet found (read_queue is empty)!!!')
# _hexDump(header_bytes, 'o')
# _hexDump(output_data, 'o')
# _print("no status bytes")
self.read_lock.release()
## 209(0xd1)
def setSerialNumberAtLogicalID(self, logical_id, serial_number, timestamp=False):
arg_list = (logical_id, serial_number)
fail_byte, t_stamp, data = self.writeRead('_setSerialNumberAtLogicalID', arg_list)
if not fail_byte:
self.wireless_table[logical_id] = serial_number
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## generated functions DNG
## 176(0xb0)
def setWirelessStreamingAutoFlushMode(self, mode, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setWirelessStreamingAutoFlushMode', mode)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 177(0xb1)
def getWirelessStreamingAutoFlushMode(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getWirelessStreamingAutoFlushMode')
if timestamp:
return (data, t_stamp)
return data
## 182(0xb6)
def broadcastSynchronizationPulse(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('broadcastSynchronizationPulse')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 183(0xb7)
def getReceptionBitfield(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getReceptionBitfield')
if timestamp:
return (data, t_stamp)
return data
## 192(0xc0)
def getWirelessPanID(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getWirelessPanID')
if timestamp:
return (data, t_stamp)
return data
## 193(0xc1)
def setWirelessPanID(self, PanID, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setWirelessPanID', PanID)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 194(0xc2)
def getWirelessChannel(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getWirelessChannel')
if timestamp:
return (data, t_stamp)
return data
## 195(0xc3)
def setWirelessChannel(self, channel, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setWirelessChannel', channel)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 197(0xc5)
def commitWirelessSettings(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('commitWirelessSettings')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 198(0xc6)
def getWirelessAddress(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getWirelessAddress')
if timestamp:
return (data, t_stamp)
return data
## 208(0xd0)
def getSerialNumberAtLogicalID(self, logical_id, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getSerialNumberAtLogicalID', logical_id)
if timestamp:
return (data, t_stamp)
return data
## 210(0xd2)
def getWirelessChannelNoiseLevels(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getWirelessChannelNoiseLevels')
if timestamp:
return (data, t_stamp)
return data
## 211(0xd3)
def setWirelessRetries(self, retries, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setWirelessRetries', retries)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 212(0xd4)
def getWirelessRetries(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getWirelessRetries')
if timestamp:
return (data, t_stamp)
return data
## 213(0xd5)
def getWirelessSlotsOpen(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getWirelessSlotsOpen')
if timestamp:
return (data, t_stamp)
return data
## 214(0xd6)
def getSignalStrength(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getSignalStrength')
if timestamp:
return (data, t_stamp)
return data
## 215(0xd7)
def setWirelessHIDUpdateRate(self, update_rate, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setWirelessHIDUpdateRate', update_rate)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 216(0xd8)
def getWirelessHIDUpdateRate(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getWirelessHIDUpdateRate')
if timestamp:
return (data, t_stamp)
return data
## 217(0xd9)
def setWirelessHIDAsynchronousMode(self, mode, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setWirelessHIDAsynchronousMode', mode)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 218(0xda)
def getWirelessHIDAsynchronousMode(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getWirelessHIDAsynchronousMode')
if timestamp:
return (data, t_stamp)
return data
## 240(0xf0)
def setJoystickLogicalID(self, logical_id, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setJoystickLogicalID', logical_id)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 241(0xf1)
def setMouseLogicalID(self, logical_id, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('setMouseLogicalID', logical_id)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 242(0xf2)
def getJoystickLogicalID(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getJoystickLogicalID')
if timestamp:
return (data, t_stamp)
return data
## 243(0xf3)
def getMouseLogicalID(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getMouseLogicalID')
if timestamp:
return (data, t_stamp)
return data
## END generated functions DNG
class TSEMSensor(_TSSensor):
command_dict = _TSSensor.command_dict.copy()
command_dict.update({
'setPinMode': (0x1d, 0, None, 2, '>BB', 1),
'getPinMode': (0x1e, 2, '>BB', 0, None, 1),
'getInterruptStatus': (0x1f, 1, '>B', 0, None, 1),
'_setUARTBaudRate': (0xe7, 0, None, 4, '>I', 1),
'getUARTBaudRate': (0xe8, 4, '>I', 0, None, 1)
})
reverse_command_dict = dict(map(lambda x: [x[1][0], x[0]], command_dict.items()))
_device_types = ["EM", "EM-HH"]
def __new__(cls, com_port=None, baudrate=_baudrate, timestamp_mode=TSS_TIMESTAMP_SENSOR):
if com_port is None:
return None
if com_port:
if type(com_port) is str:
port_name = com_port
elif type(com_port) is ComInfo:
port_name = com_port.com_port
else:
_print("An erronous parameter was passed in")
return None
if baudrate not in _allowed_baudrates:
baudrate = _baudrate
_print("Error baudrate value not allowed. Using default.")
serial_port = serial.Serial(port_name, baudrate=baudrate, timeout=0.5, writeTimeout=0.5)
if serial_port is not None:
new_inst = super(_TSSensor, cls).__new__(cls)
serial_port.write(bytearray((0xf7, 0x56, 0x56)))
time.sleep(0.01)
serial_port.flushInput()
return _generateSensorClass(new_inst, serial_port, TSEMSensor._device_types)
_print('Error serial port was not made')
## 231(0xe7)
def setUARTBaudRate(self, baud_rate, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('_setUARTBaudRate', baud_rate)
if not fail_byte:
self.baudrate = baud_rate
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## generated functions EM_
## 29(0x1d)
def setPinMode(self, mode, pin, timestamp=False):
arg_list = (mode, pin)
fail_byte, t_stamp, data = self.writeRead('setPinMode', arg_list)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 30(0x1e)
def getPinMode(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getPinMode')
if timestamp:
return (data, t_stamp)
return data
## 31(0x1f)
def getInterruptStatus(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getInterruptStatus')
if timestamp:
return (data, t_stamp)
return data
## 232(0xe8)
def getUARTBaudRate(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getUARTBaudRate')
if timestamp:
return (data, t_stamp)
return data
## END generated functions EM_
class TSDLSensor(_TSSensor):
command_dict = _TSSensor.command_dict.copy()
command_dict.update({
'turnOnMassStorage': (0x39, 0, None, 0, None, 1),
'turnOffMassStorage': (0x3a, 0, None, 0, None, 1),
'formatAndInitializeSDCard': (0x3b, 0, None, 0, None, 1),
'beginDataLoggingSession': (0x3c, 0, None, 0, None, 1),
'endDataLoggingSession': (0x3d, 0, None, 0, None, 1),
'setClockValues': (0x3e, 0, None, 6, '>6B', 1),
'getClockValues': (0x3f, 6, '>6B', 0, None, 1),
'getBatteryVoltage': (0xc9, 4, '>f', 0, None, 1),
'getBatteryPercentRemaining': (0xca, 1, '>B', 0, None, 1),
'getBatteryStatus': (0xcb, 1, '>B', 0, None, 1),
'getButtonState': (0xfa, 1, '>B', 0, None, 1)
})
reverse_command_dict = dict(map(lambda x: [x[1][0], x[0]], command_dict.items()))
_device_types = ["DL", "DL-HH"]
def __new__(cls, com_port=None, baudrate=_baudrate, timestamp_mode=TSS_TIMESTAMP_SENSOR):
if com_port is None:
return None
if com_port:
if type(com_port) is str:
port_name = com_port
elif type(com_port) is ComInfo:
port_name = com_port.com_port
else:
_print("An erronous parameter was passed in")
return None
if baudrate not in _allowed_baudrates:
baudrate = _baudrate
_print("Error baudrate value not allowed. Using default.")
serial_port = serial.Serial(port_name, baudrate=baudrate, timeout=0.5, writeTimeout=0.5)
if serial_port is not None:
new_inst = super(_TSSensor, cls).__new__(cls)
serial_port.write(bytearray((0xf7, 0x56, 0x56)))
time.sleep(0.01)
serial_port.flushInput()
return _generateSensorClass(new_inst, serial_port, TSDLSensor._device_types)
_print('Error serial port was not made')
## generated functions DL_
## 57(0x39)
def turnOnMassStorage(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('turnOnMassStorage')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 58(0x3a)
def turnOffMassStorage(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('turnOffMassStorage')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 59(0x3b)
def formatAndInitializeSDCard(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('formatAndInitializeSDCard')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 60(0x3c)
def beginDataLoggingSession(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('beginDataLoggingSession')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 61(0x3d)
def endDataLoggingSession(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('endDataLoggingSession')
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 62(0x3e)
def setClockValues(self, month, day, year, hour, minute, second, timestamp=False):
arg_list = (month, day, year, hour, minute, second)
fail_byte, t_stamp, data = self.writeRead('setClockValues', arg_list)
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## 63(0x3f)
def getClockValues(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getClockValues')
if timestamp:
return (data, t_stamp)
return data
## 201(0xc9)
def getBatteryVoltage(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getBatteryVoltage')
if timestamp:
return (data, t_stamp)
return data
## 202(0xca)
def getBatteryPercentRemaining(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getBatteryPercentRemaining')
if timestamp:
return (data, t_stamp)
return data
## 203(0xcb)
def getBatteryStatus(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getBatteryStatus')
if timestamp:
return (data, t_stamp)
return data
## 250(0xfa)
def getButtonState(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getButtonState')
if timestamp:
return (data, t_stamp)
return data
## END generated functions DL_
class TSBTSensor(_TSSensor):
command_dict = _TSSensor.command_dict.copy()
command_dict.update({
'getBatteryVoltage': (0xc9, 4, '>f', 0, None, 1),
'getBatteryPercentRemaining': (0xca, 1, '>B', 0, None, 1),
'getBatteryStatus': (0xcb, 1, '>B', 0, None, 1),
'_setUARTBaudRate': (0xe7, 0, None, 4, '>I', 1),
'getUARTBaudRate': (0xe8, 4, '>I', 0, None, 1),
'getButtonState': (0xfa, 1, '>B', 0, None, 1)
})
reverse_command_dict = dict(map(lambda x: [x[1][0], x[0]], command_dict.items()))
_device_types = ["BT", "BT-HH"]
def __new__(cls, com_port=None, baudrate=_baudrate, timestamp_mode=TSS_TIMESTAMP_SENSOR):
if com_port is None:
return None
if com_port:
if type(com_port) is str:
port_name = com_port
elif type(com_port) is ComInfo:
port_name = com_port.com_port
else:
_print("An erronous parameter was passed in")
return None
if baudrate not in _allowed_baudrates:
baudrate = _baudrate
_print("Error baudrate value not allowed. Using default.")
serial_port = serial.Serial(port_name, baudrate=baudrate, timeout=2.5, writeTimeout=2.5)
if serial_port is not None:
new_inst = super(_TSSensor, cls).__new__(cls)
serial_port.write(bytearray((0xf7, 0x56, 0x56)))
time.sleep(0.25)
serial_port.flushInput()
return _generateSensorClass(new_inst, serial_port, TSBTSensor._device_types)
_print('Error serial port was not made')
## 231(0xe7)
def setUARTBaudRate(self, baud_rate, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('_setUARTBaudRate', baud_rate)
if not fail_byte:
self.baudrate = baud_rate
if timestamp:
return (not fail_byte, t_stamp)
return not fail_byte
## generated functions BT_
## 201(0xc9)
def getBatteryVoltage(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getBatteryVoltage')
if timestamp:
return (data, t_stamp)
return data
## 202(0xca)
def getBatteryPercentRemaining(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getBatteryPercentRemaining')
if timestamp:
return (data, t_stamp)
return data
## 203(0xcb)
def getBatteryStatus(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getBatteryStatus')
if timestamp:
return (data, t_stamp)
return data
## 232(0xe8)
def getUARTBaudRate(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getUARTBaudRate')
if timestamp:
return (data, t_stamp)
return data
## 250(0xfa)
def getButtonState(self, timestamp=False):
fail_byte, t_stamp, data = self.writeRead('getButtonState')
if timestamp:
return (data, t_stamp)
return data
## END generated functions BT_
global_broadcaster= Broadcaster()
| mit | 2,051,369,659,077,840,100 | 39.913335 | 157 | 0.573452 | false |
eckhart/himlar | profile/files/openstack/horizon/overrides.py | 1 | 1079 | # Disable Floating IPs
from openstack_dashboard.dashboards.project.access_and_security import tabs
from openstack_dashboard.dashboards.project.instances import tables
import horizon
NO = lambda *x: False
tabs.FloatingIPsTab.allowed = NO
tabs.APIAccessTab.allowed = NO
tables.AssociateIP.allowed = NO
tables.SimpleAssociateIP.allowed = NO
tables.SimpleDisassociateIP.allowed = NO
project_dashboard = horizon.get_dashboard("project")
# Completely remove panel Network->Routers
routers_panel = project_dashboard.get_panel("routers")
project_dashboard.unregister(routers_panel.__class__)
# Completely remove panel Network->Networks
networks_panel = project_dashboard.get_panel("networks")
project_dashboard.unregister(networks_panel.__class__) # Disable Floating IPs
# Completely remove panel Network->Network Topology
topology_panel = project_dashboard.get_panel("network_topology")
project_dashboard.unregister(topology_panel.__class__)
# Remove "Volume Consistency Groups" tab
from openstack_dashboard.dashboards.project.volumes import tabs
tabs.CGroupsTab.allowed = NO
| apache-2.0 | -3,342,866,697,223,486,000 | 34.966667 | 80 | 0.808156 | false |
lioncui/pybix | client/plugin/RedisPlugin.py | 1 | 3753 | #!/usr/bin/python
# -*- coding: utf-8 -*-
from lib import pybixlib
import traceback
from p_class import plugins
import redis
class RedisPlugin(plugins.plugin):
def __init__(self, uuid, taskConf, agentType):
plugins.plugin.__init__(
self, uuid, taskConf, agentType)
def data_format_MB(self, data):
data = int(data)
data = data/1048576
data = "%.2f" % data
data = float(data)
return data
def data_format_Ratio(self, hit, mis):
hit = int(hit)
mis = int(mis)
if (hit+mis) == 0:
return 0
data = (hit*100)/(hit+mis)
data = "%.2f" % data
data = float(data)
return data
def data_format_connected_per_min(self, connected, min):
data = float(connected)/min
data = "%.2f" % data
return data
def data_format_command_per_min(self, command, min):
data = float(command)/min
data = "%.2f" % data
return data
def getData(self):
status_content = {}
try:
host = self.taskConf.get("host")
port = self.taskConf.get("port")
password = self.taskConf.get("password")
self.server = redis.StrictRedis(host=host, port=port,
password=password,
socket_connect_timeout=30)
self.info = self.server.info()
status_content['redis_version'] = self.info['redis_version']
status_content['used_memory'] = self.info['used_memory']
status_content['connected_clients'] = self.info[
'connected_clients']
status_content['connected_slaves'] = self.info['connected_slaves']
status_content['uptime_in_minutes'] = self.info[
'uptime_in_seconds'] / 60
#status_content['connected_per_min'] = self.data_format_connected_per_min(status_content['connected_clients'], status_content['uptime_in_minutes'])
status_content['rejected_connections'] = self.info[
'rejected_connections']
status_content['pubsub_patterns'] = self.info['pubsub_patterns']
status_content['pubsub_channels'] = self.info['pubsub_channels']
status_content['keyspace_hits'] = self.info['keyspace_hits']
status_content['keyspace_misses'] = self.info['keyspace_misses']
#status_content['keyspace_hits'] = self.data_format_Ratio(self.info['keyspace_hits'], self.info['keyspace_misses'])
status_content['commands_total'] = self.info[
'total_commands_processed']
#status_content['command_per_min'] = self.data_format_command_per_min(self.info['total_commands_processed'], status_content['uptime_in_minutes'])
status_content['usedMemoryRss'] = self.info['used_memory_rss']
status_content['memFragmentationRatio'] = self.info[
'mem_fragmentation_ratio']
status_content['blockedClients'] = self.info['blocked_clients']
totalKey = 0
for key in self.info:
if key.startswith('db'):
totalKey = totalKey + self.info[key]['keys']
status_content['totalKeys'] = totalKey
except Exception:
pybixlib.error(self.logHead + traceback.format_exc())
self.errorInfoDone(traceback.format_exc())
status_content = {}
finally:
self.setData({'agentType': self.agentType, 'uuid': self.uuid,
'code': self.code, 'time': self.getCurTime(),
'data': status_content, 'error_info': self.error_info})
self.intStatus()
| gpl-3.0 | -5,436,894,713,689,930,000 | 41.647727 | 159 | 0.565681 | false |
kernsuite-debian/lofar | SAS/ResourceAssignment/ResourceAssignmentEditor/lib/webservice.py | 1 | 39598 | #!/usr/bin/env python3
# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy)
# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
#
# This file is part of the LOFAR software suite.
# The LOFAR software suite is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# The LOFAR software suite is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
# $Id$
'''ResourceAssignmentEditor webservice serves a interactive html5 website for
viewing and editing lofar resources.'''
import sys
import os
import time
from optparse import OptionParser
from threading import Condition, Lock, current_thread, Thread
import _strptime
from datetime import datetime, timedelta
from json import loads as json_loads
import time
import logging
import subprocess
from dateutil import parser, tz
from flask import Flask
from flask import render_template
from flask import request
from flask import abort
from flask import url_for
from lofar.common.flask_utils import gzipped
from lofar.messaging.rpc import RPCException
from lofar.messaging import DEFAULT_BROKER, DEFAULT_BUSNAME
from lofar.sas.resourceassignment.resourceassignmenteditor.fakedata import *
from lofar.sas.resourceassignment.resourceassignmenteditor.changeshandler import ChangesHandler, CHANGE_DELETE_TYPE
from lofar.sas.resourceassignment.resourceassignmentservice.rpc import RADBRPC
from lofar.mom.momqueryservice.momqueryrpc import MoMQueryRPC
from lofar.sas.resourceassignment.resourceassignmenteditor.mom import updateTaskMomDetails
from lofar.sas.resourceassignment.resourceassignmenteditor.storage import updateTaskStorageDetails
from lofar.sas.datamanagement.cleanup.rpc import CleanupRPC
from lofar.sas.datamanagement.storagequery.rpc import StorageQueryRPC
from lofar.sas.otdb.otdbrpc import OTDBRPC
from lofar.common import isProductionEnvironment, isTestEnvironment
from lofar.common.util import humanreadablesize
from lofar.common.subprocess_utils import communicate_returning_strings
from lofar.common import dbcredentials
from lofar.sas.resourceassignment.database.radb import RADatabase
logger = logging.getLogger(__name__)
def asDatetime(isoString):
if isoString[-1] == 'Z':
isoString = isoString[:-1]
if isoString[-4] == '.':
isoString += '000'
return datetime.strptime(isoString, '%Y-%m-%dT%H:%M:%S.%f')
def asIsoFormat(timestamp):
return datetime.strftime(timestamp, '%Y-%m-%dT%H:%M:%S.%fZ')
__root_path = os.path.dirname(os.path.realpath(__file__))
'''The flask webservice app'''
app = Flask('Scheduler',
instance_path=__root_path,
template_folder=os.path.join(__root_path, 'templates'),
static_folder=os.path.join(__root_path, 'static'),
instance_relative_config=True)
# Load the default configuration
app.config.from_object('lofar.sas.resourceassignment.resourceassignmenteditor.config.default')
try:
import ujson
def convertDictDatetimeValuesToString(obj):
'''recursively convert all string values in the dict to buffer'''
if isinstance(obj, list):
return [convertDictDatetimeValuesToString(x) if (isinstance(x, dict) or isinstance(x, list)) else x for x in obj]
return dict( (k, convertDictDatetimeValuesToString(v) if (isinstance(v, dict) or isinstance(v, list)) else asIsoFormat(v) if isinstance(v, datetime) else v) for k,v in list(obj.items()))
def jsonify(obj):
'''faster implementation of flask.json.jsonify using ultrajson and the above datetime->string convertor'''
json_str = ujson.dumps(dict(convertDictDatetimeValuesToString(obj)))
return app.response_class(json_str, mimetype='application/json')
except:
from flask.json import jsonify
from flask.json import JSONEncoder
class CustomJSONEncoder(JSONEncoder):
def default(self, obj):
try:
if isinstance(obj, datetime):
return asIsoFormat(obj)
iterable = iter(obj)
except TypeError:
pass
else:
return list(iterable)
return JSONEncoder.default(self, obj)
app.json_encoder = CustomJSONEncoder
rarpc = None
otdbrpc = None
curpc = None
sqrpc = None
momqueryrpc = None
changeshandler = None
_radb_pool = {}
_radb_pool_lock = Lock()
_radb_dbcreds = None
def radb():
global _radb_pool, _radb_pool_lock
if _radb_dbcreds:
with _radb_pool_lock:
thread = current_thread()
tid = thread.ident
now = datetime.utcnow()
if tid not in _radb_pool:
logger.info('creating radb connection for thread %s', tid)
_radb_pool[tid] = { 'connection': RADatabase(dbcreds=_radb_dbcreds),
'last_used': now }
thread_conn_obj = _radb_pool[tid]
thread_conn_obj['last_used'] = now
threshold = timedelta(minutes=5)
obsolete_connections_tids = [tid for tid,tco in list(_radb_pool.items()) if now - tco['last_used'] > threshold]
for tid in obsolete_connections_tids:
logger.info('deleting radb connection for thread %s', tid)
del _radb_pool[tid]
return thread_conn_obj['connection']
return rarpc
@app.route('/')
@app.route('/index.htm')
@app.route('/index.html')
@gzipped
def index():
'''Serves the ResourceAssignmentEditor's index page'''
return render_template('index.html', title='Scheduler')
@app.route('/projects')
@app.route('/projects.htm')
@app.route('/projects.html')
@gzipped
def projects():
return render_template('projects.html', title='Projects')
@app.route('/rest/config')
@gzipped
def config():
config = {'mom_base_url':'',
'lta_base_url':'',
'inspection_plots_base_url':'https://proxy.lofar.eu/inspect/HTML/',
'sky_view_base_url':'http://dop344.astron.nl:5000/uvis/id'}
if isProductionEnvironment():
config['mom_base_url'] = 'https://lofar.astron.nl/mom3'
config['lta_base_url'] = 'http://lofar.target.rug.nl/'
elif isTestEnvironment():
config['mom_base_url'] = 'http://lofartest.control.lofar:8080/mom3'
config['lta_base_url'] = 'http://lofar-test.target.rug.nl/'
return jsonify({'config': config})
@app.route('/rest/resources')
@gzipped
def resources():
result = radb().getResources(include_availability=True)
return jsonify({'resources': result})
@app.route('/rest/resources/<int:resource_id>')
@gzipped
def resource(resource_id):
result = radb().getResources(resource_ids=[resource_id], include_availability=True)
if result:
return jsonify(result[0])
return jsonify({})
@app.route('/rest/resources/<int:resource_id>/resourceclaims')
@gzipped
def resourceclaimsForResource(resource_id):
return resourceclaimsForResourceFromUntil(resource_id, None, None)
@app.route('/rest/resources/<int:resource_id>/resourceclaims/<string:fromTimestamp>')
@gzipped
def resourceclaimsForResourceFrom(resource_id, fromTimestamp=None):
return resourceclaimsForResourceFromUntil(resource_id, fromTimestamp, None)
@app.route('/rest/resources/<int:resource_id>/resourceclaims/<string:fromTimestamp>/<string:untilTimestamp>')
@gzipped
def resourceclaimsForResourceFromUntil(resource_id, fromTimestamp=None, untilTimestamp=None):
if fromTimestamp and isinstance(fromTimestamp, str):
fromTimestamp = asDatetime(fromTimestamp)
if untilTimestamp and isinstance(untilTimestamp, str):
untilTimestamp = asDatetime(untilTimestamp)
claims = radb().getResourceClaims(lower_bound=fromTimestamp,
upper_bound=untilTimestamp,
resource_ids=[resource_id],
extended=False,
include_properties=True)
return jsonify({'resourceclaims': claims})
@app.route('/rest/resourcegroups')
@gzipped
def resourcegroups():
result = radb().getResourceGroups()
return jsonify({'resourcegroups': result})
@app.route('/rest/resourcegroupmemberships')
@gzipped
def resourcegroupsmemberships():
result = radb().getResourceGroupMemberships()
return jsonify({'resourcegroupmemberships': result})
@app.route('/rest/resourceclaims')
def resourceclaims():
return resourceclaimsFromUntil(None, None)
@app.route('/rest/resourceclaims/<string:fromTimestamp>')
def resourceclaimsFrom(fromTimestamp=None):
return resourceclaimsFromUntil(fromTimestamp, None)
@app.route('/rest/resourceclaims/<string:fromTimestamp>/<string:untilTimestamp>')
@gzipped
def resourceclaimsFromUntil(fromTimestamp=None, untilTimestamp=None):
if fromTimestamp and isinstance(fromTimestamp, str):
fromTimestamp = asDatetime(fromTimestamp)
if untilTimestamp and isinstance(untilTimestamp, str):
untilTimestamp = asDatetime(untilTimestamp)
claims = radb().getResourceClaims(lower_bound=fromTimestamp, upper_bound=untilTimestamp, include_properties=True)
return jsonify({'resourceclaims': claims})
@app.route('/rest/resourceusages')
@gzipped
def resourceUsages():
return resourceUsagesFromUntil(None, None)
@app.route('/rest/resourceusages/<string:fromTimestamp>/<string:untilTimestamp>')
@gzipped
def resourceUsagesFromUntil(fromTimestamp=None, untilTimestamp=None):
if fromTimestamp and isinstance(fromTimestamp, str):
fromTimestamp = asDatetime(fromTimestamp)
if untilTimestamp and isinstance(untilTimestamp, str):
untilTimestamp = asDatetime(untilTimestamp)
result = radb().getResourceUsages(lower_bound=fromTimestamp, upper_bound=untilTimestamp)
return jsonify({'resourceusages': result})
@app.route('/rest/resources/<int:resource_id>/usages', methods=['GET'])
@app.route('/rest/resourceusages/<int:resource_id>', methods=['GET'])
@gzipped
def resourceUsagesForResource(resource_id):
return resourceUsagesForResourceFromUntil(resource_id, None, None)
@app.route('/rest/resources/<int:resource_id>/usages/<string:fromTimestamp>/<string:untilTimestamp>', methods=['GET'])
@app.route('/rest/resourceusages/<int:resource_id>/<string:fromTimestamp>/<string:untilTimestamp>', methods=['GET'])
@gzipped
def resourceUsagesForResourceFromUntil(resource_id, fromTimestamp=None, untilTimestamp=None):
if fromTimestamp and isinstance(fromTimestamp, str):
fromTimestamp = asDatetime(fromTimestamp)
if untilTimestamp and isinstance(untilTimestamp, str):
untilTimestamp = asDatetime(untilTimestamp)
result = radb().getResourceUsages(resource_ids=[resource_id], lower_bound=fromTimestamp, upper_bound=untilTimestamp)
return jsonify({'resourceusages': result})
@app.route('/rest/tasks/<int:task_id>/resourceusages', methods=['GET'])
@gzipped
def resourceUsagesForTask(task_id):
result = radb().getResourceUsages(task_ids=[task_id])
return jsonify({'resourceusages': result})
@app.route('/rest/tasks/<int:task_id>/resourceclaims', methods=['GET'])
@gzipped
def resourceClaimsForTask(task_id):
result = radb().getResourceClaims(task_ids=[task_id], extended=True, include_properties=True)
return jsonify({'resourceclaims': result})
@app.route('/rest/tasks')
def getTasks():
return getTasksFromUntil(None, None)
@app.route('/rest/tasks/<string:fromTimestamp>')
def getTasksFrom(fromTimestamp):
return getTasksFromUntil(fromTimestamp, None)
@app.route('/rest/tasks/<string:fromTimestamp>/<string:untilTimestamp>')
@gzipped
def getTasksFromUntil(fromTimestamp=None, untilTimestamp=None):
if fromTimestamp and isinstance(fromTimestamp, str):
fromTimestamp = asDatetime(fromTimestamp)
if untilTimestamp and isinstance(untilTimestamp, str):
untilTimestamp = asDatetime(untilTimestamp)
tasks = radb().getTasks(fromTimestamp, untilTimestamp)
updateTaskDetails(tasks)
return jsonify({'tasks': tasks})
def updateTaskDetails(tasks):
#update the mom details and the storage details in parallel
t1 = Thread(target=updateTaskMomDetails, args=(tasks, momqueryrpc))
t2 = Thread(target=updateTaskStorageDetails, args=(tasks, sqrpc, curpc))
t1.daemon = True
t2.daemon = True
t1.start()
t2.start()
#wait for mom details thread to finish
t1.join()
#task details (such as name/description) from MoM are done
#get extra details on reserved resources for reservations (while the storage details still run in t2)
reservationTasks = [t for t in tasks if t['type'] == 'reservation']
if reservationTasks:
reservationClaims = radb().getResourceClaims(task_ids=[t['id'] for t in reservationTasks], extended=True, include_properties=False)
task2claims = {}
for claim in reservationClaims:
if claim['task_id'] not in task2claims:
task2claims[claim['task_id']] = []
task2claims[claim['task_id']].append(claim)
for task in reservationTasks:
claims = task2claims.get(task['id'], [])
task['name'] = ', '.join(c['resource_name'] for c in claims)
task['description'] = 'Reservation on ' + task['name']
#wait for storage details thread to finish
t2.join()
@app.route('/rest/tasks/<int:task_id>', methods=['GET'])
@gzipped
def getTask(task_id):
try:
task = radb().getTask(task_id)
if not task:
abort(404)
task['name'] = 'Task %d' % task['id']
updateTaskDetails([task])
return jsonify({'task': task})
except Exception as e:
abort(404)
return jsonify({'task': None})
@app.route('/rest/tasks/otdb/<int:otdb_id>', methods=['GET'])
@gzipped
def getTaskByOTDBId(otdb_id):
try:
task = radb().getTask(otdb_id=otdb_id)
if not task:
abort(404)
task['name'] = 'Task %d' % task['id']
updateTaskDetails([task])
return jsonify({'task': task})
except Exception as e:
abort(404)
return jsonify({'task': None})
@app.route('/rest/tasks/mom/<int:mom_id>', methods=['GET'])
@gzipped
def getTaskByMoMId(mom_id):
try:
task = radb().getTask(mom_id=mom_id)
if not task:
abort(404)
task['name'] = 'Task %d' % task['id']
updateTaskDetails([task])
return jsonify({'task': task})
except Exception as e:
abort(404)
return jsonify({'task': None})
@app.route('/rest/tasks/mom/group/<int:mom_group_id>', methods=['GET'])
@gzipped
def getTasksByMoMGroupId(mom_group_id):
try:
mom_ids = momqueryrpc.getTaskIdsInGroup(mom_group_id)[str(mom_group_id)]
tasks = radb().getTasks(mom_ids=mom_ids)
updateTaskDetails(tasks)
return jsonify({'tasks': tasks})
except Exception as e:
abort(404)
@app.route('/rest/tasks/mom/parentgroup/<int:mom_parent_group_id>', methods=['GET'])
@gzipped
def getTasksByMoMParentGroupId(mom_parent_group_id):
try:
mom_ids = momqueryrpc.getTaskIdsInParentGroup(mom_parent_group_id)[str(mom_parent_group_id)]
tasks = radb().getTasks(mom_ids=mom_ids)
updateTaskDetails(tasks)
return jsonify({'tasks': tasks})
except Exception as e:
abort(404)
@app.route('/rest/tasks/<int:task_id>', methods=['PUT'])
def putTask(task_id):
if 'Content-Type' in request.headers and \
request.headers['Content-Type'].startswith('application/json'):
try:
updatedTask = json_loads(request.data.decode('utf-8'))
if task_id != int(updatedTask['id']):
abort(404, 'task_id in url is not equal to id in request.data')
#check if task is known
task = radb().getTask(task_id)
if not task:
abort(404, "unknown task %s" % str(updatedTask))
# first handle start- endtimes...
if 'starttime' in updatedTask or 'endtime' in updatedTask:
logger.info('starttime or endtime in updatedTask: %s', updatedTask)
if isProductionEnvironment():
abort(403, 'Editing of %s of tasks by users is not yet approved' % (time,))
#update dict for otdb spec
spec_update = {}
for timeprop in ['starttime', 'endtime']:
if timeprop in updatedTask:
try:
updatedTask[timeprop] = asDatetime(updatedTask[timeprop])
except ValueError:
abort(400, 'timestamp not in iso format: ' + updatedTask[timeprop])
otdb_key = 'LOFAR.ObsSW.Observation.' + ('startTime' if timeprop == 'starttime' else 'stopTime')
spec_update[otdb_key] = updatedTask[timeprop].strftime('%Y-%m-%d %H:%M:%S')
#update timestamps in both otdb and radb
otdbrpc.taskSetSpecification(task['otdb_id'], spec_update)
# update the task's (and its claims) start/endtime
# do not update the tasks status directly via the radb. See few lines below. task status is routed via otdb (and then ends up in radb automatically)
# it might be that editing the start/end time results in a (rabd)task status update (for example to 'conflict' due to conflicting claims)
# that's ok, since we'll update the status to the requested status later via otdb (see few lines below)
radb().updateTaskAndResourceClaims(task_id,
starttime=updatedTask.get('starttime'),
endtime=updatedTask.get('endtime'))
# ...then, handle status update which might trigger resource assignment,
# for which the above updated times are needed
if 'status' in updatedTask:
if isProductionEnvironment() and task['type'] == 'observation' and updatedTask['status'] == 'prescheduled':
abort(403, 'Scheduling of observations via the webscheduler by users is not (yet) allowed')
try:
#update status in otdb only
#the status change will propagate automatically into radb via other services (by design)
otdbrpc.taskSetStatus(task['otdb_id'], updatedTask['status'])
#we expect the status in otdb/radb to eventually become what we asked for...
expected_status = updatedTask['status']
#block until radb and mom task status are equal to the expected_statuses (with timeout)
start_wait = datetime.utcnow()
while True:
task = radb().getTask(otdb_id=task['otdb_id'])
otdb_status = otdbrpc.taskGetStatus(task['otdb_id'])
logger.info('waiting for otdb/radb task status to be in [%s].... otdb:%s radb:%s',
expected_status, otdb_status, task['status'])
if (task['status'] == expected_status and otdb_status == expected_status):
logger.info('otdb/radb task status now has the expected status %s otdb:%s radb:%s',
expected_status, otdb_status, task['status'])
break
if datetime.utcnow() - start_wait > timedelta(seconds=10):
logger.warning('timeout while waiting for otdb/radb task status to get the expected status %s otdb:%s radb:%s',
expected_status, otdb_status, task['status'])
break
time.sleep(0.1)
except RPCException as e:
if 'does not exist' in str(e):
# task does not exist (anymore) in otdb
#so remove it from radb as well (with cascading deletes on specification)
logger.warning('task with otdb_id %s does not exist anymore in OTDB. removing task radb_id %s from radb', task['otdb_id'], task['id'])
radb().deleteSpecification(task['specification_id'])
if 'data_pinned' in updatedTask:
task = radb().getTask(task_id)
if not task:
abort(404, "unknown task %s" % str(updatedTask))
curpc.setTaskDataPinned(task['otdb_id'], updatedTask['data_pinned'])
return "", 204
except Exception as e:
logger.error(e)
abort(404, str(e))
abort(406)
@app.route('/rest/tasks/<int:task_id>/cleanup', methods=['DELETE'])
def cleanupTaskData(task_id):
try:
delete_params = {}
if 'Content-Type' in request.headers and (request.headers['Content-Type'].startswith('application/json') or request.headers['Content-Type'].startswith('text/plain')):
delete_params = json_loads(request.data.decode('utf-8'))
task = radb().getTask(task_id)
if not task:
abort(404, 'No such task (id=%s)' % task_id)
logger.info("cleanup task data id=%s otdb_id=%s delete_params=%s", task_id, task['otdb_id'], delete_params)
result = curpc.removeTaskData(task['otdb_id'],
delete_is=delete_params.get('delete_is', True),
delete_cs=delete_params.get('delete_cs', True),
delete_uv=delete_params.get('delete_uv', True),
delete_im=delete_params.get('delete_im', True),
delete_img=delete_params.get('delete_img', True),
delete_pulp=delete_params.get('delete_pulp', True),
delete_scratch=delete_params.get('delete_scratch', True),
force=delete_params.get('force_delete', False))
logger.info(result)
return jsonify(result)
except Exception as e:
abort(500)
@app.route('/rest/tasks/<int:task_id>/datapath', methods=['GET'])
@gzipped
def getTaskDataPath(task_id):
try:
task = radb().getTask(task_id)
if not task:
abort(404, 'No such task (id=%s)' % task_id)
result = sqrpc.getPathForOTDBId(task['otdb_id'])
except Exception as e:
abort(500, str(e))
if result['found']:
return jsonify({'datapath': result['path']})
abort(404, result['message'] if result and 'message' in result else '')
@app.route('/rest/tasks/otdb/<int:otdb_id>/diskusage', methods=['GET'])
@gzipped
def getTaskDiskUsageByOTDBId(otdb_id):
try:
result = sqrpc.getDiskUsageForTaskAndSubDirectories(otdb_id=otdb_id, force_update=request.args.get('force')=='true')
except Exception as e:
abort(500, str(e))
if result['found']:
return jsonify(result)
abort(404, result['message'] if result and 'message' in result else '')
@app.route('/rest/tasks/<int:task_id>/diskusage', methods=['GET'])
@gzipped
def getTaskDiskUsage(task_id):
try:
result = sqrpc.getDiskUsageForTaskAndSubDirectories(radb_id=task_id, force_update=request.args.get('force')=='true')
except Exception as e:
abort(500, str(e))
if result['found']:
return jsonify(result)
abort(404, result['message'] if result and 'message' in result else '')
@app.route('/rest/tasks/<int:task_id>/parset', methods=['GET'])
@gzipped
def getParset(task_id):
try:
task = radb().getTask(task_id)
if not task:
abort(404)
return getParsetByOTDBId(task['otdb_id'])
except Exception as e:
abort(404)
abort(404)
@app.route('/rest/tasks/otdb/<int:otdb_id>/parset', methods=['GET'])
@gzipped
def getParsetByOTDBId(otdb_id):
try:
logger.info('getParsetByOTDBId(%s)', otdb_id)
parset = otdbrpc.taskGetSpecification(otdb_id=otdb_id)['specification']
return '\n'.join(['%s=%s' % (k,parset[k]) for k in sorted(parset.keys())]), 200, {'Content-Type': 'text/plain; charset=utf-8'}
except Exception as e:
abort(404)
abort(404)
@app.route('/rest/tasks/<int:task_id>/resourceclaims')
@gzipped
def taskResourceClaims(task_id):
return jsonify({'taskResourceClaims': radb().getResourceClaims(task_ids=[task_id], include_properties=True)})
@app.route('/rest/tasktypes')
@gzipped
def tasktypes():
result = radb().getTaskTypes()
result = sorted(result, key=lambda q: q['id'])
return jsonify({'tasktypes': result})
@app.route('/rest/taskstatustypes')
@gzipped
def getTaskStatusTypes():
result = radb().getTaskStatuses()
result = sorted(result, key=lambda q: q['id'])
return jsonify({'taskstatustypes': result})
@app.route('/rest/resourcetypes')
@gzipped
def resourcetypes():
result = radb().getResourceTypes()
result = sorted(result, key=lambda q: q['id'])
return jsonify({'resourcetypes': result})
@app.route('/rest/resourceclaimpropertytypes')
@gzipped
def resourceclaimpropertytypes():
result = radb().getResourceClaimPropertyTypes()
result = sorted(result, key=lambda q: q['id'])
return jsonify({'resourceclaimpropertytypes': result})
@app.route('/rest/projects')
@gzipped
def getProjects():
projects = []
try:
projects = momqueryrpc.getProjects()
projects = [x for x in projects if x['status_id'] in [1, 7]]
for project in projects:
project['mom_id'] = project.pop('mom2id')
except Exception as e:
logger.error(e)
projects.append({'name':'<unknown>', 'mom_id':-99, 'description': 'Container project for tasks for which we could not find a MoM project'})
projects.append({'name':'OTDB Only', 'mom_id':-98, 'description': 'Container project for tasks which exists only in OTDB'})
projects.append({'name':'Reservations', 'mom_id':-97, 'description': 'Container project for reservation tasks'})
return jsonify({'momprojects': projects})
@app.route('/rest/projects/<int:project_mom2id>')
@gzipped
def getProject(project_mom2id):
try:
projects = momqueryrpc.getProjects()
project = next(x for x in projects if x['mom2id'] == project_mom2id)
return jsonify({'momproject': project})
except StopIteration as e:
logger.error(e)
abort(404, "No project with mom2id %s" % project_mom2id)
except Exception as e:
logger.error(e)
abort(404, str(e))
@app.route('/rest/projects/<int:project_mom2id>/tasks')
@gzipped
def getProjectTasks(project_mom2id):
return getProjectTasksFromUntil(project_mom2id, None, None)
@app.route('/rest/projects/<int:project_mom2id>/tasks/<string:fromTimestamp>/<string:untilTimestamp>')
@gzipped
def getProjectTasksFromUntil(project_mom2id, fromTimestamp=None, untilTimestamp=None):
try:
if fromTimestamp and isinstance(fromTimestamp, str):
fromTimestamp = asDatetime(fromTimestamp)
if untilTimestamp and isinstance(untilTimestamp, str):
untilTimestamp = asDatetime(untilTimestamp)
task_mom2ids = momqueryrpc.getProjectTaskIds(project_mom2id)['task_mom2ids']
tasks = radb().getTasks(mom_ids=task_mom2ids, lower_bound=fromTimestamp, upper_bound=untilTimestamp)
updateTaskDetails(tasks)
return jsonify({'tasks': tasks})
except Exception as e:
logger.error(e)
abort(404, str(e))
@app.route('/rest/projects/<int:project_mom2id>/taskstimewindow')
@gzipped
def getProjectTasksTimeWindow(project_mom2id):
try:
task_mom2ids = momqueryrpc.getProjectTaskIds(project_mom2id)['task_mom2ids']
timewindow = radb().getTasksTimeWindow(mom_ids=task_mom2ids)
return jsonify(timewindow)
except Exception as e:
logger.error(e)
abort(404, str(e))
@app.route('/rest/projects/<int:project_mom2id>/diskusage')
@gzipped
def getProjectDiskUsageById(project_mom2id):
try:
project = momqueryrpc.getProject(project_mom2id=project_mom2id)
return getProjectDiskUsageByName(project['name'])
except StopIteration as e:
logger.error(e)
abort(404, "No project with mom2id %s" % project_mom2id)
except Exception as e:
logger.error(e)
abort(404, str(e))
@app.route('/rest/projects/<string:project_name>/diskusage')
@gzipped
def getProjectDiskUsageByName(project_name):
try:
result = sqrpc.getDiskUsageForProjectDirAndSubDirectories(project_name=project_name, force_update=request.args.get('force')=='true')
return jsonify(result)
except Exception as e:
logger.error(e)
abort(404, str(e))
@app.route('/rest/projects/diskusage')
@gzipped
def getProjectsDiskUsage():
try:
result = sqrpc.getDiskUsageForProjectsDirAndSubDirectories(force_update=request.args.get('force')=='true')
return jsonify(result)
except Exception as e:
logger.error(e)
abort(404, str(e))
@app.route('/rest/momobjectdetails/<int:mom2id>')
@gzipped
def getMoMObjectDetails(mom2id):
details = momqueryrpc.getObjectDetails(mom2id)
details = list(details.values())[0] if details else None
if details:
details['project_mom_id'] = details.pop('project_mom2id')
details['object_mom_id'] = details.pop('object_mom2id')
return jsonify({'momobjectdetails': details})
@app.route('/rest/updates/<int:sinceChangeNumber>')
@gzipped
def getUpdateEventsSince(sinceChangeNumber):
changesSince = changeshandler.getChangesSince(sinceChangeNumber)
return jsonify({'changes': changesSince})
@app.route('/rest/mostRecentChangeNumber')
@gzipped
def getMostRecentChangeNumber():
mrcn = changeshandler.getMostRecentChangeNumber()
return jsonify({'mostRecentChangeNumber': mrcn})
@app.route('/rest/updates')
def getUpdateEvents():
return getUpdateEventsSince(-1)
@app.route('/rest/logEvents')
@gzipped
def getMostRecentLogEvents():
return getLogEventsSince(datetime.utcnow() - timedelta(hours=6))
@app.route('/rest/logEvents/<string:fromTimestamp>')
@gzipped
def getLogEventsSince(fromTimestamp=None):
if not fromTimestamp:
fromTimestamp = datetime.utcnow() - timedelta(hours=6)
eventsSince = changeshandler.getEventsSince(fromTimestamp)
return jsonify({'logEvents': eventsSince})
@app.route('/rest/lofarTime')
@gzipped
def getLofarTime():
return jsonify({'lofarTime': asIsoFormat(datetime.utcnow())})
#ugly method to generate html tables for all tasks
@app.route('/tasks.html')
@gzipped
def getTasksHtml():
tasks = radb().getTasks()
if not tasks:
abort(404)
updateTaskDetails(tasks)
html = '<!DOCTYPE html><html><head><title>Tasks</title><style>table, th, td {border: 1px solid black; border-collapse: collapse; padding: 4px;}</style></head><body><table style="width:100%">\n'
props = sorted(tasks[0].keys())
html += '<tr>%s</tr>\n' % ''.join('<th>%s</th>' % prop for prop in props)
for task in tasks:
html += '<tr>'
for prop in props:
if prop in task:
if prop == 'id':
html += '<td><a href="/rest/tasks/%s.html">%s</a></td> ' % (task[prop], task[prop])
else:
html += '<td>%s</td> ' % task[prop]
html += '</tr>\n'
html += '</table></body></html>\n'
return html
#ugly method to generate html tables for the task and it's claims
@app.route('/tasks/<int:task_id>.html', methods=['GET'])
@gzipped
def getTaskHtml(task_id):
task = radb().getTask(task_id)
if not task:
abort(404, 'No such task %s' % task_id)
task['name'] = 'Task %d' % task['id']
updateTaskDetails([task])
html = '<!DOCTYPE html><html><head><title>Tasks</title><style>table, th, td {border: 1px solid black; border-collapse: collapse; padding: 4px;}</style></head><body><table style="">\n'
html += '<h1>Task %s</h1>' % task_id
html += '<p><a href="/tasks/%s/log.html">%s log</a></p> ' % (task['id'], task['type'])
html += '<p><a href="/rest/tasks/%s/parset">view %s parset</a></p> ' % (task['id'], task['type'])
props = sorted(task.keys())
html += '<tr><th>key</th><th>value</th></tr>\n'
for prop in props:
html += '<tr><td>%s</td>' % prop
if prop == 'id':
html += '<td><a href="/tasks/%s.html">%s</a></td> ' % (task[prop], task[prop])
elif prop == 'predecessor_ids' or prop == 'successor_ids':
ids = task[prop]
if ids:
html += '<td>%s</td> ' % ', '.join('<a href="/tasks/%s.html">%s</a>' % (id, id) for id in ids)
else:
html += '<td></td> '
else:
html += '<td>%s</td> ' % task[prop]
html += '</tr>'
html += '</table>\n<br>'
claims = radb().getResourceClaims(task_ids=[task_id], extended=True, include_properties=True)
if claims:
html += '<h1>Claims</h1>'
for claim in claims:
html += '<table>'
for claim_key,claim_value in list(claim.items()):
if claim_key == 'properties':
html += '<tr><td>properties</td><td><table>'
if claim_value:
propnames = sorted(claim_value[0].keys())
html += '<tr>%s</tr>\n' % ''.join('<th>%s</th>' % propname for propname in propnames)
for prop in claim_value:
html += '<tr>%s</tr>\n' % ''.join('<td>%s</td>' % prop[propname] for propname in propnames)
html += '</table></td></tr>'
elif claim_key == 'saps':
html += '<tr><td>saps</td><td><table>'
saps = claim_value
if saps:
sap_keys = ['sap_nr', 'properties']
html += '<tr>%s</tr>\n' % ''.join('<th>%s</th>' % sap_key for sap_key in sap_keys)
for sap in saps:
html += '<tr>'
for sap_key in sap_keys:
if sap_key == 'properties':
html += '<td><table>'
sap_props = sap[sap_key]
if sap_props:
propnames = sorted(sap_props[0].keys())
html += '<tr>%s</tr>\n' % ''.join('<th>%s</th>' % propname for propname in propnames)
for prop in sap_props:
html += '<tr>%s</tr>\n' % ''.join('<td>%s</td>' % prop[propname] for propname in propnames)
html += '</table></td>'
else:
html += '<td>%s</td>' % (sap[sap_key])
html += '</tr>'
html += '</table></td></tr>'
else:
html += '<tr><td>%s</td><td>%s</td></tr>' % (claim_key,claim_value)
html += '</table>'
html += '<br>'
html += '</body></html>\n'
return html
@app.route('/rest/tasks/<int:task_id>/resourceclaims.html', methods=['GET'])
@gzipped
def resourceClaimsForTaskHtml(task_id):
claims = radb().getResourceClaims(task_ids=[task_id], extended=True, include_properties=True)
if not claims:
abort(404, 'No resource claims for task %s' % task_id)
html = '<!DOCTYPE html><html><head><title>Tasks</title><style>table, th, td {border: 1px solid black; border-collapse: collapse; padding: 4px;}</style></head><body><table style="">\n'
for claim in claims:
html += '<tr><td>%s</td>' % claim
html += '</table></body></html>\n'
return html
@app.route('/tasks/<int:task_id>/log.html', methods=['GET'])
@gzipped
def getTaskLogHtml(task_id):
task = radb().getTask(task_id)
cmd = []
if task['type'] == 'pipeline':
cmd = ['ssh', '[email protected]', 'cat /data/log/pipeline-%s-*.log' % task['otdb_id']]
else:
cmd = ['ssh', 'mcu001.control.lofar', 'cat /opt/lofar/var/log/mcu001\\:ObservationControl\\[0\\]\\{%s\\}.log*' % task['otdb_id']]
logger.info(' '.join(cmd))
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = communicate_returning_strings(proc)
if proc.returncode == 0:
return out, 200, {'Content-Type': 'text/plain; charset=utf-8'}
else:
return err, 500, {'Content-Type': 'text/plain; charset=utf-8'}
def main():
# make sure we run in UTC timezone
import os
os.environ['TZ'] = 'UTC'
# Check the invocation arguments
parser = OptionParser('%prog [options]',
description='run the resource assignment editor web service')
parser.add_option('--webserver_port', dest='webserver_port', type='int', default=7412, help='port number on which to host the webservice, default: %default')
parser.add_option('-q', '--broker', dest='broker', type='string', default=DEFAULT_BROKER, help='Address of the qpid broker, default: %default')
parser.add_option('--exchange', dest='exchange', type='string', default=DEFAULT_BUSNAME, help='Name of the bus exchange on the qpid broker, default: %default')
parser.add_option('-V', '--verbose', dest='verbose', action='store_true', help='verbose logging')
parser.add_option_group(dbcredentials.options_group(parser))
parser.set_defaults(dbcredentials="RADB")
(options, args) = parser.parse_args()
logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s',
level=logging.DEBUG if options.verbose else logging.INFO)
global _radb_dbcreds
_radb_dbcreds = dbcredentials.parse_options(options)
if _radb_dbcreds.database:
logger.info("Using dbcreds for direct RADB access: %s" % _radb_dbcreds.stringWithHiddenPassword())
else:
_radb_dbcreds = None
global rarpc
rarpc = RADBRPC.create(exchange=options.exchange, broker=options.broker)
global otdbrpc
otdbrpc = OTDBRPC.create(exchange=options.exchange, broker=options.broker)
global curpc
curpc = CleanupRPC.create(exchange=options.exchange, broker=options.broker)
global sqrpc
sqrpc = StorageQueryRPC.create(exchange=options.exchange, timeout=10, broker=options.broker)
global momqueryrpc
momqueryrpc = MoMQueryRPC.create(exchange=options.exchange, timeout=10, broker=options.broker)
global changeshandler
changeshandler = ChangesHandler(exchange=options.exchange,
broker=options.broker, momqueryrpc=momqueryrpc, radbrpc=rarpc, sqrpc=sqrpc)
with changeshandler, rarpc, otdbrpc, curpc, sqrpc, momqueryrpc:
'''Start the webserver'''
app.run(debug=options.verbose, threaded=True, host='0.0.0.0', port=options.webserver_port)
if __name__ == '__main__':
main()
| gpl-3.0 | -5,672,104,624,823,130,000 | 38.244797 | 197 | 0.632027 | false |
frreiss/tensorflow-fred | tensorflow/python/data/experimental/kernel_tests/optimize_dataset_test.py | 1 | 18438 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the private `_OptimizeDataset` transformation."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import os
import warnings
from absl.testing import parameterized
import numpy as np
from tensorflow.python.data.experimental.ops import batching
from tensorflow.python.data.experimental.ops import grouping
from tensorflow.python.data.experimental.ops import optimization_options
from tensorflow.python.data.experimental.ops import scan_ops
from tensorflow.python.data.experimental.ops import testing
from tensorflow.python.data.experimental.ops import threadpool
from tensorflow.python.data.kernel_tests import test_base
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.framework import combinations
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.platform import test
def _captured_refvar_test_combinations():
def make_map_dataset(var):
return dataset_ops.Dataset.from_tensors(0).map(lambda x: x + var)
def make_flat_map_dataset(var):
return dataset_ops.Dataset.from_tensors(
0).flat_map(lambda _: dataset_ops.Dataset.from_tensors(var))
def make_filter_dataset(var):
return dataset_ops.Dataset.from_tensors(0).filter(lambda x: x < var)
def make_map_and_batch_dataset(var):
def map_fn(x):
return x + var
return dataset_ops.Dataset.from_tensors(0).apply(
batching.map_and_batch(map_fn, 1))
def make_group_by_reducer_dataset(var):
reducer = grouping.Reducer(
init_func=lambda _: 0,
reduce_func=lambda x, y: x,
finalize_func=lambda _: var)
return dataset_ops.Dataset.range(5).apply(
grouping.group_by_reducer(lambda x: x % 2, reducer))
def make_group_by_window_dataset(var):
def reduce_fn(key, bucket):
del key, bucket
return dataset_ops.Dataset.from_tensors(var)
return dataset_ops.Dataset.from_tensors(0).repeat(10).apply(
grouping.group_by_window(lambda _: 0, reduce_fn, 10))
def make_scan_dataset(var):
return dataset_ops.Dataset.from_tensors(0).apply(
scan_ops.scan(
0, lambda old_state, elem: (old_state + 1, elem + old_state + var)))
cases = [
# Core datasets
("Map", make_map_dataset),
("FlatMap", make_flat_map_dataset),
("Filter", make_filter_dataset),
# Experimental datasets
("MapAndBatch", make_map_and_batch_dataset),
("GroupByReducer", make_group_by_reducer_dataset),
("GroupByWindow", make_group_by_window_dataset),
("Scan", make_scan_dataset)
]
def reduce_fn(x, y):
name, dataset_fn = y
return x + combinations.combine(
dataset_fn=combinations.NamedObject(name, dataset_fn))
return functools.reduce(reduce_fn, cases, [])
def _disable_intra_op_parallelism_test_combinations():
def make_tensor_dataset():
return dataset_ops.Dataset.from_tensors(42)
def make_map_dataset():
return dataset_ops.Dataset.from_tensors(42).map(lambda x: x + 1)
cases = [
("FromTensors", make_tensor_dataset, [42]),
("Map", make_map_dataset, [43]),
]
def reduce_fn(x, y):
name, dataset_fn, expected_output = y
return x + combinations.combine(
dataset_fn=combinations.NamedObject(name, dataset_fn),
expected_output=[expected_output])
return functools.reduce(reduce_fn, cases, [])
class OptimizeDatasetTest(test_base.DatasetTestBase, parameterized.TestCase):
@combinations.generate(test_base.default_test_combinations())
def testOptimizationStatefulFunction(self):
dataset = dataset_ops.Dataset.range(
10).map(lambda _: random_ops.random_uniform([])).batch(10)
options = dataset_ops.Options()
options.experimental_optimization.apply_default_optimizations = False
dataset = dataset.with_options(options)
get_next = self.getNext(dataset)
self.evaluate(get_next())
# TODO(b/123902160)
@combinations.generate(test_base.graph_only_combinations())
def testOptimizationLargeInputFromTensor(self):
input_t = array_ops.placeholder(dtypes.int32, (None, None, None))
dataset = dataset_ops.Dataset.from_tensors(input_t)
options = dataset_ops.Options()
options.experimental_optimization.apply_default_optimizations = False
dataset = dataset.with_options(options)
iterator = dataset_ops.make_initializable_iterator(dataset)
init_op = iterator.initializer
get_next = iterator.get_next()
with self.cached_session() as sess:
sess.run(init_op, {input_t: np.ones([512, 1024, 1025], np.int32)})
self.evaluate(get_next)
# TODO(b/123902160)
@combinations.generate(test_base.graph_only_combinations())
def testOptimizationLargeInputFromTensorSlices(self):
input_t = array_ops.placeholder(dtypes.int32, (None, None, None, None))
dataset = dataset_ops.Dataset.from_tensor_slices(input_t)
options = dataset_ops.Options()
options.experimental_optimization.apply_default_optimizations = False
dataset = dataset.with_options(options)
iterator = dataset_ops.make_initializable_iterator(dataset)
init_op = iterator.initializer
get_next = iterator.get_next()
with self.cached_session() as sess:
sess.run(init_op, {input_t: np.ones([1, 512, 1024, 1025], np.int32)})
self.evaluate(get_next)
@combinations.generate(test_base.default_test_combinations())
def testOptimizationNestedDataset(self):
def flat_map_fn(_):
dataset = dataset_ops.Dataset.from_tensors(0)
dataset = dataset.apply(testing.assert_next(["MemoryCacheImpl"]))
dataset = dataset.skip(0) # Should be removed by noop elimination
dataset = dataset.cache()
return dataset
dataset = dataset_ops.Dataset.range(1)
dataset = dataset.flat_map(flat_map_fn)
options = dataset_ops.Options()
options.experimental_optimization.apply_default_optimizations = False
options.experimental_optimization.noop_elimination = True
dataset = dataset.with_options(options)
self.assertDatasetProduces(dataset, expected_output=[0])
@combinations.generate(test_base.default_test_combinations())
def testOptimizationNestedDatasetWithModifiedRetval(self):
def flat_map_fn(_):
dataset = dataset_ops.Dataset.from_tensors(0)
dataset = dataset.apply(testing.assert_next(["MapAndBatch"]))
# Should be fused by map and batch fusion
dataset = dataset.map(lambda x: x)
dataset = dataset.batch(1)
return dataset
dataset = dataset_ops.Dataset.range(1)
dataset = dataset.flat_map(flat_map_fn)
options = dataset_ops.Options()
options.experimental_optimization.apply_default_optimizations = False
options.experimental_optimization.map_and_batch_fusion = True
dataset = dataset.with_options(options)
self.assertDatasetProduces(dataset, expected_output=[[0]])
@combinations.generate(
combinations.times(test_base.default_test_combinations(),
_disable_intra_op_parallelism_test_combinations()))
def testOptimizationDisableIntraOpParallelism(self, dataset_fn,
expected_output):
os.environ["TF_DATA_EXPERIMENT_OPT_IN"] = "disable_intra_op_parallelism"
os.environ["TF_JOB_NAME"] = "test_job"
dataset = dataset_fn()
dataset = dataset.apply(testing.assert_next(["MaxIntraOpParallelism"]))
self.assertDatasetProduces(dataset, expected_output=expected_output)
del os.environ["TF_DATA_EXPERIMENT_OPT_IN"]
del os.environ["TF_JOB_NAME"]
@combinations.generate(test_base.default_test_combinations())
def testOptimizationThreadPoolDataset(self):
dataset = dataset_ops.Dataset.range(10).batch(10)
dataset = threadpool.override_threadpool(
dataset,
threadpool.PrivateThreadPool(
2, display_name="private_thread_pool_%d" % 2))
options = dataset_ops.Options()
options.experimental_optimization.apply_default_optimizations = False
dataset = dataset.with_options(options)
self.assertDatasetProduces(
dataset,
expected_output=[list(range(10))],
requires_initialization=True)
# Reference variables are not supported in eager mode.
@combinations.generate(
combinations.times(test_base.graph_only_combinations(),
_captured_refvar_test_combinations()))
def testOptimizationWithCapturedRefVar(self, dataset_fn):
"""Tests that default optimizations are disabled with ref variables."""
variable = variable_scope.get_variable(
"v", initializer=0, use_resource=False)
assign_op = variable.assign_add(1)
# Check that warning is logged.
warnings.simplefilter("always")
with warnings.catch_warnings(record=True) as w:
unoptimized_dataset = dataset_fn(variable)
options = dataset_ops.Options()
options.experimental_optimization.apply_default_optimizations = False
options.experimental_optimization.noop_elimination = True
options.experimental_optimization.map_and_batch_fusion = True
optimized_dataset = unoptimized_dataset.with_options(options)
optimized_it = dataset_ops.make_initializable_iterator(optimized_dataset)
self.assertGreaterEqual(len(w), 1)
graph_rewrites = options._graph_rewrites()
expected = (
"tf.data graph rewrites are not compatible with "
"tf.Variable. The following rewrites will be disabled: %s."
" To enable rewrites, use resource variables instead by "
"calling `tf.enable_resource_variables()` at the start of the "
"program." %
(", ".join(graph_rewrites.enabled + graph_rewrites.default)))
self.assertTrue(any(expected in str(warning) for warning in w))
# Check that outputs are the same in the optimized and unoptimized cases,
# when the variable value is changing.
unoptimized_it = dataset_ops.make_initializable_iterator(
unoptimized_dataset)
with ops.control_dependencies([assign_op]):
unoptimized_output = unoptimized_it.get_next()
optimized_output = optimized_it.get_next()
self.evaluate(variable.initializer)
self.evaluate((unoptimized_it.initializer, optimized_it.initializer))
while True:
try:
unoptimized, optimized = self.evaluate((unoptimized_output,
optimized_output))
self.assertEqual(unoptimized, optimized)
except errors.OutOfRangeError:
break
@combinations.generate(test_base.default_test_combinations())
def testOptimizationDefault(self):
"""Tests the optimization settings by default."""
options = dataset_ops.Options()
expected_optimizations_enabled = []
expected_optimizations_disabled = []
expected_optimizations_default = [
"map_and_batch_fusion",
"noop_elimination",
"shuffle_and_repeat_fusion",
]
graph_rewrites = options._graph_rewrites()
self.assertEqual(set(graph_rewrites.enabled),
set(expected_optimizations_enabled))
self.assertEqual(set(graph_rewrites.disabled),
set(expected_optimizations_disabled))
self.assertEqual(set(graph_rewrites.default),
set(expected_optimizations_default))
options.experimental_optimization.apply_default_optimizations = True
graph_rewrites = options._graph_rewrites()
self.assertEqual(set(graph_rewrites.enabled),
set(expected_optimizations_enabled))
self.assertEqual(set(graph_rewrites.disabled),
set(expected_optimizations_disabled))
self.assertEqual(set(graph_rewrites.default),
set(expected_optimizations_default))
options.experimental_optimization.apply_default_optimizations = False
expected_optimizations_default = []
graph_rewrites = options._graph_rewrites()
self.assertEqual(set(graph_rewrites.enabled),
set(expected_optimizations_enabled))
self.assertEqual(set(graph_rewrites.disabled),
set(expected_optimizations_disabled))
self.assertEqual(set(graph_rewrites.default),
set(expected_optimizations_default))
@combinations.generate(test_base.default_test_combinations())
def testOptimizationEnabled(self):
"""Tests the optimization settings by enabling all."""
options = dataset_ops.Options()
options.experimental_optimization.filter_fusion = True
options.experimental_optimization.filter_with_random_uniform_fusion = True
options.experimental_optimization.hoist_random_uniform = True
options.experimental_optimization.map_and_batch_fusion = True
options.experimental_optimization.map_and_filter_fusion = True
options.experimental_optimization.map_parallelization = True
options.experimental_optimization.map_fusion = True
options.experimental_optimization.noop_elimination = True
options.experimental_optimization.parallel_batch = True
options.experimental_optimization.shuffle_and_repeat_fusion = True
options.experimental_optimization.map_vectorization.enabled = True
options.experimental_optimization.autotune_buffers = True
options.experimental_deterministic = False
options.experimental_stats.latency_all_edges = True
options.experimental_slack = True
expected_optimizations_enabled = [
"filter_fusion",
"filter_with_random_uniform_fusion",
"hoist_random_uniform",
"map_and_batch_fusion",
"map_and_filter_fusion",
"map_parallelization",
"map_fusion",
"noop_elimination",
"parallel_batch",
"shuffle_and_repeat_fusion",
"map_vectorization",
"inject_prefetch",
"make_sloppy",
"latency_all_edges",
"slack",
]
expected_optimizations_disabled = []
expected_optimizations_default = []
graph_rewrites = options._graph_rewrites()
self.assertEqual(set(graph_rewrites.enabled),
set(expected_optimizations_enabled))
self.assertEqual(set(graph_rewrites.disabled),
set(expected_optimizations_disabled))
self.assertEqual(set(graph_rewrites.default),
set(expected_optimizations_default))
@combinations.generate(test_base.default_test_combinations())
def testOptimizationDisabled(self):
"""Tests the optimization settings by disabling all."""
options = dataset_ops.Options()
options.experimental_optimization.filter_fusion = False
options.experimental_optimization.filter_with_random_uniform_fusion = False
options.experimental_optimization.hoist_random_uniform = False
options.experimental_optimization.map_and_batch_fusion = False
options.experimental_optimization.map_and_filter_fusion = False
options.experimental_optimization.map_parallelization = False
options.experimental_optimization.map_fusion = False
options.experimental_optimization.noop_elimination = False
options.experimental_optimization.parallel_batch = False
options.experimental_optimization.shuffle_and_repeat_fusion = False
options.experimental_optimization.map_vectorization.enabled = False
options.experimental_optimization.autotune = False
options.experimental_deterministic = True
options.experimental_stats.latency_all_edges = False
options.experimental_slack = False
expected_optimizations_enabled = []
expected_optimizations_disabled = [
"filter_fusion",
"filter_with_random_uniform_fusion",
"hoist_random_uniform",
"map_and_batch_fusion",
"map_and_filter_fusion",
"map_parallelization",
"map_fusion",
"noop_elimination",
"parallel_batch",
"shuffle_and_repeat_fusion",
"map_vectorization",
"inject_prefetch",
"make_sloppy",
"latency_all_edges",
"slack",
]
expected_optimizations_default = []
graph_rewrites = options._graph_rewrites()
self.assertEqual(set(graph_rewrites.enabled),
set(expected_optimizations_enabled))
self.assertEqual(set(graph_rewrites.disabled),
set(expected_optimizations_disabled))
self.assertEqual(set(graph_rewrites.default),
set(expected_optimizations_default))
@combinations.generate(test_base.default_test_combinations())
def testAutotuningDefaults(self):
options = dataset_ops.Options()
# Check defaults
autotune, algorithm, cpu_budget, ram_budget = options._autotune_settings()
self.assertTrue(autotune)
self.assertEqual(algorithm,
optimization_options._AutotuneAlgorithm.HILL_CLIMB)
self.assertEqual(cpu_budget, 0)
self.assertEqual(ram_budget, 0)
@combinations.generate(test_base.default_test_combinations())
def testAutotuningSettings(self):
options = dataset_ops.Options()
options.experimental_optimization.autotune_cpu_budget = 1000
options.experimental_optimization.autotune_ram_budget = 999999999
options.experimental_optimization.autotune_buffers = True
self.assertIn("inject_prefetch", options._graph_rewrites().enabled)
autotune, algorithm, cpu_budget, ram_budget = options._autotune_settings()
self.assertTrue(autotune)
self.assertEqual(algorithm,
optimization_options._AutotuneAlgorithm.GRADIENT_DESCENT)
self.assertEqual(cpu_budget, 1000)
self.assertEqual(ram_budget, 999999999)
if __name__ == "__main__":
test.main()
| apache-2.0 | 2,611,285,402,051,419,000 | 39.612335 | 80 | 0.70192 | false |
pyfa-org/eos | tests/integration/stats/slot/test_launcher.py | 1 | 5726 | # ==============================================================================
# Copyright (C) 2011 Diego Duclos
# Copyright (C) 2011-2018 Anton Vorobyov
#
# This file is part of Eos.
#
# Eos is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Eos is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Eos. If not, see <http://www.gnu.org/licenses/>.
# ==============================================================================
from eos import EffectMode
from eos import ModuleHigh
from eos import Ship
from eos.const.eos import ModAffecteeFilter
from eos.const.eos import ModDomain
from eos.const.eos import ModOperator
from eos.const.eve import AttrId
from eos.const.eve import EffectCategoryId
from eos.const.eve import EffectId
from tests.integration.stats.testcase import StatsTestCase
class TestLauncherSlot(StatsTestCase):
def setUp(self):
StatsTestCase.setUp(self)
self.mkattr(attr_id=AttrId.launcher_slots_left)
self.effect = self.mkeffect(
effect_id=EffectId.launcher_fitted,
category_id=EffectCategoryId.passive)
def test_output(self):
# Check that modified attribute of ship is used
src_attr = self.mkattr()
modifier = self.mkmod(
affectee_filter=ModAffecteeFilter.item,
affectee_domain=ModDomain.self,
affectee_attr_id=AttrId.launcher_slots_left,
operator=ModOperator.post_mul,
affector_attr_id=src_attr.id)
mod_effect = self.mkeffect(
category_id=EffectCategoryId.passive,
modifiers=[modifier])
self.fit.ship = Ship(self.mktype(
attrs={AttrId.launcher_slots_left: 3, src_attr.id: 2},
effects=[mod_effect]).id)
# Verification
self.assertEqual(self.fit.stats.launcher_slots.total, 6)
# Cleanup
self.assert_solsys_buffers_empty(self.fit.solar_system)
self.assert_log_entries(0)
def test_output_ship_absent(self):
# Verification
self.assertEqual(self.fit.stats.launcher_slots.total, 0)
# Cleanup
self.assert_solsys_buffers_empty(self.fit.solar_system)
self.assert_log_entries(0)
def test_output_ship_attr_absent(self):
self.fit.ship = Ship(self.mktype().id)
# Verification
self.assertEqual(self.fit.stats.launcher_slots.total, 0)
# Cleanup
self.assert_solsys_buffers_empty(self.fit.solar_system)
self.assert_log_entries(0)
def test_output_ship_not_loaded(self):
self.fit.ship = Ship(self.allocate_type_id())
# Verification
self.assertEqual(self.fit.stats.launcher_slots.total, 0)
# Cleanup
self.assert_solsys_buffers_empty(self.fit.solar_system)
self.assert_log_entries(0)
def test_use_multiple(self):
self.fit.modules.high.append(
ModuleHigh(self.mktype(effects=[self.effect]).id))
self.fit.modules.high.append(
ModuleHigh(self.mktype(effects=[self.effect]).id))
# Verification
self.assertEqual(self.fit.stats.launcher_slots.used, 2)
# Cleanup
self.assert_solsys_buffers_empty(self.fit.solar_system)
self.assert_log_entries(0)
def test_use_multiple_with_none(self):
self.fit.modules.high.place(
1, ModuleHigh(self.mktype(effects=[self.effect]).id))
self.fit.modules.high.place(
3, ModuleHigh(self.mktype(effects=[self.effect]).id))
# Verification
# Positions do not matter
self.assertEqual(self.fit.stats.launcher_slots.used, 2)
# Cleanup
self.assert_solsys_buffers_empty(self.fit.solar_system)
self.assert_log_entries(0)
def test_use_item_effect_absent(self):
item1 = ModuleHigh(self.mktype(effects=[self.effect]).id)
item2 = ModuleHigh(self.mktype().id)
self.fit.modules.high.append(item1)
self.fit.modules.high.append(item2)
# Verification
self.assertEqual(self.fit.stats.launcher_slots.used, 1)
# Cleanup
self.assert_solsys_buffers_empty(self.fit.solar_system)
self.assert_log_entries(0)
def test_use_item_effect_disabled(self):
item1 = ModuleHigh(self.mktype(effects=[self.effect]).id)
item2 = ModuleHigh(self.mktype(effects=[self.effect]).id)
item2.set_effect_mode(self.effect.id, EffectMode.force_stop)
self.fit.modules.high.append(item1)
self.fit.modules.high.append(item2)
# Verification
self.assertEqual(self.fit.stats.launcher_slots.used, 1)
# Cleanup
self.assert_solsys_buffers_empty(self.fit.solar_system)
self.assert_log_entries(0)
def test_use_item_absent(self):
# Verification
self.assertEqual(self.fit.stats.launcher_slots.used, 0)
# Cleanup
self.assert_solsys_buffers_empty(self.fit.solar_system)
self.assert_log_entries(0)
def test_use_item_not_loaded(self):
self.fit.modules.high.append(ModuleHigh(self.allocate_type_id()))
# Verification
self.assertEqual(self.fit.stats.launcher_slots.used, 0)
# Cleanup
self.assert_solsys_buffers_empty(self.fit.solar_system)
self.assert_log_entries(0)
| lgpl-3.0 | -3,366,892,227,183,581,000 | 38.219178 | 80 | 0.649668 | false |
moschlar/SAUCE | migration/versions/530b45f11128_public_submission.py | 1 | 1291 | """public_submission
Revision ID: 530b45f11128
Revises: 282efa88cdbc
Create Date: 2013-10-02 18:31:40.722832
"""
#
# # SAUCE - System for AUtomated Code Evaluation
# # Copyright (C) 2013 Moritz Schlarb
# #
# # This program is free software: you can redistribute it and/or modify
# # it under the terms of the GNU Affero General Public License as published by
# # the Free Software Foundation, either version 3 of the License, or
# # any later version.
# #
# # This program is distributed in the hope that it will be useful,
# # but WITHOUT ANY WARRANTY; without even the implied warranty of
# # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# # GNU Affero General Public License for more details.
# #
# # You should have received a copy of the GNU Affero General Public License
# # along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# revision identifiers, used by Alembic.
revision = '530b45f11128'
down_revision = '26d123af03a7'
from alembic import op
#from alembic.operations import Operations as op
import sqlalchemy as sa
def upgrade():
op.add_column('submissions',
sa.Column('public', sa.Boolean(), nullable=False,
default=False, server_default='False'))
def downgrade():
op.drop_column('submissions', 'public')
| agpl-3.0 | 4,536,200,629,361,021,000 | 29.738095 | 79 | 0.726569 | false |
diogoosorio/blog | src/blog_app/blog_app.py | 1 | 2792 | import uuid
import re
from flask import Flask, redirect, render_template, g, abort, request, make_response
from flask_ink.ink import Ink
from flask_caching import Cache
from .settings import SETTINGS, CACHE_SETTINGS
from .repository import LocalRepository
from .parsers import BlogParser
from .pagination import BlogPagination
def build_app():
_app = Flask(__name__)
_app.config.update(SETTINGS)
_cache = Cache(_app, config=CACHE_SETTINGS)
Ink(_app)
return [_app, _cache]
app, cache = build_app() # pylint: disable=invalid-name
@app.before_request
def before_request():
content_dir = app.config['REPO_DIRECTORY']
parser = BlogParser()
g.repository = LocalRepository(content_dir, parser, cache, app.config['PAGESIZE'])
# pagination
page = request.args.get('page')
page = int(page) if page is not None and page.isdigit() else 1
g.page = page
@app.route('/')
def index():
return redirect('/blog', 301)
@cache.cached(timeout=1200)
@app.route('/blog/')
def blog():
template_variables = g.repository.getfiles('entries', g.page)
template_variables['pagination'] = BlogPagination(
page=g.page,
total=template_variables['total'],
per_page=app.config['PAGESIZE']
)
if not template_variables['entries']:
abort(404)
return render_template('blog.html', **template_variables)
@app.route('/blog/rss/')
@cache.cached(timeout=1200)
def rss():
template_variables = g.repository.getfiles('entries', g.page)
g.repository.pagesize = 1
last_entry = g.repository.getfiles('entries', 1)
last_entry = last_entry['entries'][0] if last_entry['entries'] else None
template_variables['uuid'] = uuid
template_variables['last_entry'] = last_entry
response = make_response(render_template('atom.xml', **template_variables))
response.headers['Content-Type'] = 'application/atom+xml'
return response
@app.errorhandler(404)
def page_not_found(_e):
path = request.path
legacy_match = re.match(r'^/blog/entry/([\w-]+)/?$', path, re.I)
if legacy_match:
slug = legacy_match.group(1)
entry = g.repository.getfile('entries', slug)
if entry:
return redirect("/blog/{0}".format(slug), 301)
return render_template('404.html', path=path), 404
@cache.memoize(timeout=3600)
@app.route(u'/blog/<post_name>')
def blog_detail(post_name):
entry = g.repository.getfile('entries', post_name)
if not entry:
abort(404)
template_variables = {
'entry': entry,
'title': entry['meta'].get('title'),
'description': entry['meta'].get('description')
}
return render_template('detail.html', **template_variables)
if __name__ == '__main__':
app.run(host=app.config['HOST'])
| mit | -903,629,709,035,656,300 | 24.381818 | 86 | 0.657951 | false |
anurag03/integration_tests | cfme/configure/access_control/__init__.py | 1 | 58461 | import attr
import six
from navmazing import NavigateToSibling, NavigateToAttribute
from widgetastic.widget import Checkbox, View, Text, ConditionalSwitchableView
from widgetastic_patternfly import (
BootstrapSelect, Button, Input, Tab, CheckableBootstrapTreeview as CbTree,
BootstrapSwitch, CandidateNotFound, Dropdown)
from widgetastic_manageiq import (
UpDownSelect, PaginationPane, SummaryFormItem, Table, SummaryForm)
from widgetastic_manageiq.expression_editor import GroupTagExpressionEditor
from cfme.base.credential import Credential
from cfme.base.ui import ConfigurationView
from cfme.common import Taggable
from cfme.exceptions import CFMEException, RBACOperationBlocked
from cfme.modeling.base import BaseCollection, BaseEntity
from cfme.utils.appliance.implementations.ui import navigator, CFMENavigateStep, navigate_to
from cfme.utils.blockers import BZ
from cfme.utils.log import logger
from cfme.utils.pretty import Pretty
from cfme.utils.update import Updateable
from cfme.utils.wait import wait_for
EVM_DEFAULT_GROUPS = [
'evmgroup-super_administrator',
'evmgroup-administrator',
'evmgroup-approver',
'evmgroup-auditor',
'evmgroup-desktop',
'evmgroup-operator',
'evmgroup-security',
'evmgroup-support',
'evmgroup-user',
'evmgroup-vm_user'
]
class AccessControlToolbar(View):
""" Toolbar on the Access Control page """
configuration = Dropdown('Configuration')
policy = Dropdown('Policy')
####################################################################################################
# RBAC USER METHODS
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
class UserForm(ConfigurationView):
""" User Form View."""
name_txt = Input(name='name')
userid_txt = Input(name='userid')
password_txt = Input(id='password')
password_verify_txt = Input(id='verify')
email_txt = Input(name='email')
user_group_select = BootstrapSelect(id='chosen_group')
cancel_button = Button('Cancel')
class UsersEntities(View):
table = Table("//div[@id='records_div' or @id='main_div']//table")
class AllUserView(ConfigurationView):
""" All Users View."""
toolbar = View.nested(AccessControlToolbar)
entities = View.nested(UsersEntities)
paginator = PaginationPane()
@property
def is_displayed(self):
return (
self.accordions.accesscontrol.is_opened and
self.title.text == 'Access Control EVM Users'
)
class AddUserView(UserForm):
""" Add User View."""
add_button = Button('Add')
@property
def is_displayed(self):
return self.accordions.accesscontrol.is_opened and self.title.text == "Adding a new User"
class DetailsUserEntities(View):
smart_management = SummaryForm('Smart Management')
class DetailsUserView(ConfigurationView):
""" User Details view."""
toolbar = View.nested(AccessControlToolbar)
entities = View.nested(DetailsUserEntities)
@property
def is_displayed(self):
return (
self.title.text == 'EVM User "{}"'.format(self.context['object'].name) and
self.accordions.accesscontrol.is_opened
)
class EditUserView(UserForm):
""" User Edit View."""
save_button = Button('Save')
reset_button = Button('Reset')
change_stored_password = Text('#change_stored_password')
cancel_password_change = Text('#cancel_password_change')
@property
def is_displayed(self):
return (
self.title.text == 'Editing User "{}"'.format(self.context['object'].name) and
self.accordions.accesscontrol.is_opened
)
@attr.s
class User(Updateable, Pretty, BaseEntity, Taggable):
""" Class represents an user in CFME UI
Args:
name: Name of the user
credential: User's credentials
email: User's email
groups: Add User to multiple groups in Versions >= 5.9.
cost_center: User's cost center
value_assign: user's value to assign
appliance: appliance under test
"""
pretty_attrs = ['name', 'group']
name = attr.ib(default=None)
credential = attr.ib(default=None)
email = attr.ib(default=None)
groups = attr.ib(default=None)
cost_center = attr.ib(default=None)
value_assign = attr.ib(default=None)
_restore_user = attr.ib(default=None, init=False)
def __enter__(self):
if self._restore_user != self.appliance.user:
logger.info('Switching to new user: %s', self.credential.principal)
self._restore_user = self.appliance.user
self.appliance.server.logout()
self.appliance.user = self
def __exit__(self, *args, **kwargs):
if self._restore_user != self.appliance.user:
logger.info('Restoring to old user: %s', self._restore_user.credential.principal)
self.appliance.server.logout()
self.appliance.user = self._restore_user
self._restore_user = None
def update(self, updates):
""" Update user method
Args:
updates: user data that should be changed
Note: In case updates is the same as original user data, update will be canceled,
as 'Save' button will not be active
"""
view = navigate_to(self, 'Edit')
self.change_stored_password()
new_updates = {}
if 'credential' in updates:
new_updates.update({
'userid_txt': updates.get('credential').principal,
'password_txt': updates.get('credential').secret,
'password_verify_txt': updates.get('credential').verify_secret
})
new_updates.update({
'name_txt': updates.get('name'),
'email_txt': updates.get('email'),
'user_group_select': getattr(
updates.get('group'),
'description', None)
})
changed = view.fill({
'name_txt': new_updates.get('name_txt'),
'userid_txt': new_updates.get('userid_txt'),
'password_txt': new_updates.get('password_txt'),
'password_verify_txt': new_updates.get('password_verify_txt'),
'email_txt': new_updates.get('email_txt'),
'user_group_select': new_updates.get('user_group_select')
})
if changed:
view.save_button.click()
flash_message = 'User "{}" was saved'.format(updates.get('name', self.name))
else:
view.cancel_button.click()
flash_message = 'Edit of User was cancelled by the user'
view = self.create_view(DetailsUserView, override=updates)
view.flash.assert_message(flash_message)
assert view.is_displayed
def copy(self):
""" Creates copy of existing user
return: User object of copied user
"""
view = navigate_to(self, 'Details')
view.toolbar.configuration.item_select('Copy this User to a new User')
view = self.create_view(AddUserView)
new_user = self.parent.instantiate(
name="{}copy".format(self.name),
credential=Credential(principal='redhat', secret='redhat')
)
view.fill({
'name_txt': new_user.name,
'userid_txt': new_user.credential.principal,
'password_txt': new_user.credential.secret,
'password_verify_txt': new_user.credential.verify_secret
})
view.add_button.click()
view = self.create_view(AllUserView)
view.flash.assert_success_message('User "{}" was saved'.format(new_user.name))
assert view.is_displayed
return new_user
def delete(self, cancel=True):
"""Delete existing user
Args:
cancel: Default value 'True', user will be deleted
'False' - deletion of user will be canceled
Throws:
RBACOperationBlocked: If operation is blocked due to current user
not having appropriate permissions OR delete is not allowed
for currently selected user
"""
flash_success_msg = 'EVM User "{}": Delete successful'.format(self.name)
flash_blocked_msg = "Default EVM User \"{}\" cannot be deleted".format(self.name)
delete_user_txt = 'Delete this User'
view = navigate_to(self, 'Details')
if not view.toolbar.configuration.item_enabled(delete_user_txt):
raise RBACOperationBlocked("Configuration action '{}' is not enabled".format(
delete_user_txt))
view.toolbar.configuration.item_select(delete_user_txt, handle_alert=cancel)
try:
view.flash.assert_message(flash_blocked_msg)
raise RBACOperationBlocked(flash_blocked_msg)
except AssertionError:
pass
view.flash.assert_message(flash_success_msg)
if cancel:
view = self.create_view(AllUserView)
view.flash.assert_success_message(flash_success_msg)
else:
view = self.create_view(DetailsUserView)
assert view.is_displayed
# TODO update elements, after 1469035 fix
def change_stored_password(self, changes=None, cancel=False):
""" Changes user password
Args:
changes: dict with fields to be changes,
if None, passwords fields only be anabled
cancel: True, if you want to disable password change
"""
view = navigate_to(self, 'Edit')
self.browser.execute_script(
self.browser.get_attribute(
'onClick', self.browser.element(view.change_stored_password)))
if changes:
view.fill(changes)
if cancel:
self.browser.execute_script(
self.browser.get_attribute(
'onClick', self.browser.element(view.cancel_password_change)))
@property
def exists(self):
try:
navigate_to(self, 'Details')
return True
except CandidateNotFound:
return False
@property
def description(self):
return self.credential.principal
@property
def my_settings(self):
from cfme.configure.settings import MySettings
my_settings = MySettings(appliance=self.appliance)
return my_settings
@attr.s
class UserCollection(BaseCollection):
ENTITY = User
def simple_user(self, userid, password, fullname=None):
"""If a fullname is not supplied, userid is used for credential principal and user name"""
creds = Credential(principal=userid, secret=password)
return self.instantiate(name=fullname or userid, credential=creds)
def create(self, name=None, credential=None, email=None, groups=None, cost_center=None,
value_assign=None, cancel=False):
""" User creation method
Args:
name: Name of the user
credential: User's credentials, credential.principal is used as username
email: User's email
groups: Add User to multiple groups in Versions >= 5.9.
cost_center: User's cost center
value_assign: user's value to assign
cancel: True - if you want to cancel user creation,
by defaul user will be created
Throws:
RBACOperationBlocked: If operation is blocked due to current user
not having appropriate permissions OR update is not allowed
for currently selected role
"""
if self.appliance.version < "5.8":
user_blocked_msg = "Userid has already been taken"
else:
user_blocked_msg = ("Userid is not unique within region {}".format(
self.appliance.server.zone.region.number))
if type(groups) is not list:
groups = [groups]
if self.appliance.version < "5.9" and len(groups) > 1:
raise CFMEException(
"Assigning a user to multiple groups is only supported in CFME versions > 5.8")
user = self.instantiate(
name=name, credential=credential, email=email, groups=groups, cost_center=cost_center,
value_assign=value_assign
)
# view.fill supports iteration over a list when selecting pulldown list items but
# will throw an exception when the item doesn't appear in the list so filter out
# null items since they "shouldn't" exist
user_group_names = [getattr(ug, 'description', None) for ug in user.groups if ug]
fill_values = {
'name_txt': user.name,
'userid_txt': user.credential.principal,
'email_txt': user.email,
'user_group_select': user_group_names
}
# only fill password if auth_mode is set to Database
if self.appliance.server.authentication.auth_mode.lower() == 'database':
fill_values.update({
'password_txt': user.credential.secret,
'password_verify_txt': user.credential.verify_secret}
)
view = navigate_to(self, 'Add')
view.fill(fill_values)
if cancel:
view.cancel_button.click()
flash_message = 'Add of new User was cancelled by the user'
else:
view.add_button.click()
flash_message = 'User "{}" was saved'.format(user.name)
try:
view.flash.assert_message(user_blocked_msg)
raise RBACOperationBlocked(user_blocked_msg)
except AssertionError:
pass
view = self.create_view(AllUserView)
view.flash.assert_success_message(flash_message)
assert view.is_displayed
# To ensure tree update
view.browser.refresh()
return user
@navigator.register(UserCollection, 'All')
class UserAll(CFMENavigateStep):
VIEW = AllUserView
prerequisite = NavigateToAttribute('appliance.server', 'Configuration')
def step(self):
self.prerequisite_view.accordions.accesscontrol.tree.click_path(
self.obj.appliance.server_region_string(), 'Users')
@navigator.register(UserCollection, 'Add')
class UserAdd(CFMENavigateStep):
VIEW = AddUserView
def prerequisite(self):
navigate_to(self.obj.appliance.server, 'Configuration')
return navigate_to(self.obj, 'All')
def step(self):
self.prerequisite_view.toolbar.configuration.item_select("Add a new User")
@navigator.register(User, 'Details')
class UserDetails(CFMENavigateStep):
VIEW = DetailsUserView
prerequisite = NavigateToAttribute('parent', 'All')
def step(self):
try:
self.prerequisite_view.accordions.accesscontrol.tree.click_path(
self.obj.appliance.server_region_string(), 'Users', self.obj.name)
except CandidateNotFound:
self.obj.appliance.browser.widgetastic.refresh()
self.prerequisite_view.accordions.accesscontrol.tree.click_path(
self.obj.appliance.server_region_string(), 'Users', self.obj.name)
@navigator.register(User, 'Edit')
class UserEdit(CFMENavigateStep):
VIEW = EditUserView
prerequisite = NavigateToSibling('Details')
def step(self):
self.prerequisite_view.toolbar.configuration.item_select('Edit this User')
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
# RBAC USER METHODS
####################################################################################################
####################################################################################################
# RBAC GROUP METHODS
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
class MyCompanyTagsTree(View):
tree_locator = 'tags_treebox'
tree = CbTree(tree_locator)
class MyCompanyTagsExpressionView(View):
tag_expression = GroupTagExpressionEditor()
class MyCompanyTagsWithExpression(Tab):
""" Represents 'My company tags' tab in Group Form """
TAB_NAME = "My Company Tags"
tag_mode = BootstrapSelect(id='use_filter_expression')
tag_settings = ConditionalSwitchableView(reference='tag_mode')
tag_settings.register('Specific Tags', default=True, widget=MyCompanyTagsTree)
tag_settings.register('Tags Based On Expression', widget=MyCompanyTagsExpressionView)
class Hosts_And_Clusters(Tab): # noqa
""" Represents 'Hosts and Clusters' tab in Group Form """
TAB_NAME = "Hosts & Clusters"
tree = CbTree('hac_treebox')
class Vms_And_Templates(Tab): # noqa
""" Represents 'VM's and Templates' tab in Group Form """
TAB_NAME = "VMs & Templates"
tree = CbTree('vat_treebox')
class GroupForm(ConfigurationView):
""" Group Form in CFME UI."""
ldap_groups_for_user = BootstrapSelect(id='ldap_groups_user')
description_txt = Input(name='description')
lookup_ldap_groups_chk = Checkbox(name='lookup')
role_select = BootstrapSelect(id='group_role')
group_tenant = BootstrapSelect(id='group_tenant')
user_to_look_up = Input(name='user')
username = Input(name='user_id')
password = Input(name='password')
tag = SummaryFormItem('Smart Management', 'My Company Tags')
cancel_button = Button('Cancel')
retrieve_button = Button('Retrieve')
my_company_tags = View.nested(MyCompanyTagsWithExpression)
hosts_and_clusters = View.nested(Hosts_And_Clusters)
vms_and_templates = View.nested(Vms_And_Templates)
class AddGroupView(GroupForm):
""" Add Group View in CFME UI """
add_button = Button("Add")
@property
def is_displayed(self):
return (
self.accordions.accesscontrol.is_opened and
self.title.text == "Adding a new Group"
)
class DetailsGroupEntities(View):
smart_management = SummaryForm('Smart Management')
my_company_tags = View.nested(MyCompanyTagsWithExpression)
hosts_and_clusters = View.nested(Hosts_And_Clusters)
vms_and_templates = View.nested(Vms_And_Templates)
class DetailsGroupView(ConfigurationView):
""" Details Group View in CFME UI """
toolbar = View.nested(AccessControlToolbar)
entities = View.nested(DetailsGroupEntities)
@property
def is_displayed(self):
return (
self.accordions.accesscontrol.is_opened and
self.title.text == 'EVM Group "{}"'.format(self.context['object'].description)
)
class EditGroupView(GroupForm):
""" Edit Group View in CFME UI """
save_button = Button("Save")
reset_button = Button('Reset')
@property
def is_displayed(self):
return (
self.accordions.accesscontrol.is_opened and
self.title.text == 'Editing Group "{}"'.format(self.context['object'].description)
)
class AllGroupView(ConfigurationView):
""" All Groups View in CFME UI """
toolbar = View.nested(AccessControlToolbar)
table = Table("//div[@id='main_div']//table")
paginator = PaginationPane()
@property
def is_displayed(self):
return (
self.accordions.accesscontrol.is_opened and
self.title.text == 'Access Control EVM Groups'
)
class EditGroupSequenceView(ConfigurationView):
""" Edit Groups Sequence View in CFME UI """
group_order_selector = UpDownSelect(
'#seq_fields',
'//button[@title="Move selected fields up"]/i',
'//button[@title="Move selected fields down"]/i')
save_button = Button('Save')
reset_button = Button('Reset')
cancel_button = Button('Cancel')
@property
def is_displayed(self):
return (
self.accordions.accesscontrol.is_opened and
self.title.text == "Editing Sequence of User Groups"
)
@attr.s
class Group(BaseEntity, Taggable):
"""Represents a group in CFME UI
Properties:
description: group description
role: group role
tenant: group tenant
user_to_lookup: ldap user to lookup
ldap_credentials: ldap user credentials
tag: tag for group restriction
host_cluster: host/cluster for group restriction
vm_template: vm/template for group restriction
appliance: appliance under test
"""
pretty_attrs = ['description', 'role']
description = attr.ib(default=None)
role = attr.ib(default=None)
tenant = attr.ib(default="My Company")
ldap_credentials = attr.ib(default=None)
user_to_lookup = attr.ib(default=None)
tag = attr.ib(default=None)
host_cluster = attr.ib(default=None)
vm_template = attr.ib(default=None)
def _retrieve_ldap_user_groups(self):
""" Retrive ldap user groups
return: AddGroupView
"""
view = navigate_to(self.parent, 'Add')
view.fill({'lookup_ldap_groups_chk': True,
'user_to_look_up': self.user_to_lookup,
'username': self.ldap_credentials.principal,
'password': self.ldap_credentials.secret})
view.retrieve_button.click()
return view
def _retrieve_ext_auth_user_groups(self):
""" Retrive external authorization user groups
return: AddGroupView
"""
view = navigate_to(self.parent, 'Add')
view.fill({'lookup_ldap_groups_chk': True,
'user_to_look_up': self.user_to_lookup})
view.retrieve_button.click()
return view
def _fill_ldap_group_lookup(self, view):
""" Fills ldap info for group lookup
Args: view: view for group creation(AddGroupView)
"""
view.fill({'ldap_groups_for_user': self.description,
'description_txt': self.description,
'role_select': self.role,
'group_tenant': self.tenant})
view.add_button.click()
view = self.create_view(AllGroupView)
view.flash.assert_success_message('Group "{}" was saved'.format(self.description))
assert view.is_displayed
def add_group_from_ldap_lookup(self):
"""Adds a group from ldap lookup"""
view = self._retrieve_ldap_user_groups()
self._fill_ldap_group_lookup(view)
def add_group_from_ext_auth_lookup(self):
"""Adds a group from external authorization lookup"""
view = self._retrieve_ext_auth_user_groups()
self._fill_ldap_group_lookup(view)
def update(self, updates):
""" Update group method
Args:
updates: group data that should be changed
Note: In case updates is the same as original group data, update will be canceled,
as 'Save' button will not be active
"""
edit_group_txt = 'Edit this Group'
view = navigate_to(self, 'Details')
if not view.toolbar.configuration.item_enabled(edit_group_txt):
raise RBACOperationBlocked("Configuration action '{}' is not enabled".format(
edit_group_txt))
view = navigate_to(self, 'Edit')
changed = view.fill({
'description_txt': updates.get('description'),
'role_select': updates.get('role'),
'group_tenant': updates.get('tenant')
})
changed_tag = self._set_group_restriction(view.my_company_tags, updates.get('tag'))
changed_host_cluster = self._set_group_restriction(
view.hosts_and_clusters, updates.get('host_cluster'))
changed_vm_template = self._set_group_restriction(
view.vms_and_templates, updates.get('vm_template'))
if changed or changed_tag or changed_host_cluster or changed_vm_template:
view.save_button.click()
flash_message = 'Group "{}" was saved'.format(
updates.get('description', self.description))
else:
view.cancel_button.click()
flash_message = 'Edit of Group was cancelled by the user'
view = self.create_view(DetailsGroupView, override=updates)
view.flash.assert_message(flash_message)
assert view.is_displayed
def delete(self, cancel=True):
"""
Delete existing group
Args:
cancel: Default value 'True', group will be deleted
'False' - deletion of group will be canceled
Throws:
RBACOperationBlocked: If operation is blocked due to current user
not having appropriate permissions OR delete is not allowed
for currently selected group
"""
flash_success_msg = 'EVM Group "{}": Delete successful'.format(self.description)
flash_blocked_msg_list = [
('EVM Group "{}": '
'Error during delete: A read only group cannot be deleted.'.format(self.description)),
('EVM Group "{}": Error during delete: '
'The group has users assigned that do not '
'belong to any other group'.format(self.description))]
delete_group_txt = 'Delete this Group'
view = navigate_to(self, 'Details')
if not view.toolbar.configuration.item_enabled(delete_group_txt):
raise RBACOperationBlocked("Configuration action '{}' is not enabled".format(
delete_group_txt))
view.toolbar.configuration.item_select(delete_group_txt, handle_alert=cancel)
for flash_blocked_msg in flash_blocked_msg_list:
try:
view.flash.assert_message(flash_blocked_msg)
raise RBACOperationBlocked(flash_blocked_msg)
except AssertionError:
pass
view.flash.assert_no_error()
view.flash.assert_message(flash_success_msg)
if cancel:
view = self.create_view(AllGroupView)
view.flash.assert_success_message(flash_success_msg)
else:
view = self.create_view(DetailsGroupView)
assert view.is_displayed, (
"Access Control Group {} Detail View is not displayed".format(self.description))
def set_group_order(self, updated_order):
""" Sets group order for group lookup
Args:
updated_order: group order list
"""
if self.appliance.version < "5.9.2":
name_column = "Name"
else:
name_column = "Description"
find_row_kwargs = {name_column: self.description}
view = navigate_to(self.parent, 'All')
row = view.paginator.find_row_on_pages(view.table, **find_row_kwargs)
original_sequence = row.sequence.text
original_order = self.group_order[:len(updated_order)]
view = self.create_view(EditGroupSequenceView)
assert view.is_displayed
# We pick only the same amount of items for comparing
if updated_order == original_order:
return # Ignore that, would cause error on Save click
view.group_order_selector.fill(updated_order)
view.save_button.click()
view = self.create_view(AllGroupView)
assert view.is_displayed
row = view.paginator.find_row_on_pages(view.table, **find_row_kwargs)
changed_sequence = row.sequence.text
assert original_sequence != changed_sequence, "{} Group Edit Sequence Failed".format(
self.description)
def _set_group_restriction(self, tab_view, item, update=True):
""" Sets tag/host/template restriction for the group
Args:
tab_view: tab view
item: path to check box that should be selected/deselected
ex. _set_group_restriction([patent, child], True)
or tags expression(string) to be set in My company tags in expression editor
ex. _set_group_restriction('fill_tag(My Company Tags : Auto Approve - Max CPU, 1)'),
_set_group_restriction('delete_whole_expression')
update: If True - checkbox state will be updated
Returns: True - if update is successful
"""
updated_result = False
if item is not None:
if update:
if isinstance(item, six.string_types):
updated_result = tab_view.fill({
'tag_mode': 'Tags Based On Expression',
'tag_settings': {'tag_expression': item}})
else:
path, action_type = item
if isinstance(path, list):
tab_form = getattr(tab_view, 'form', tab_view)
tree_view = getattr(tab_form, 'tag_settings', tab_form)
node = (tree_view.tree.CheckNode(path) if action_type else
tree_view.tree.UncheckNode(path))
updated_result = tree_view.tree.fill(node)
return updated_result
@property
def group_order(self):
view = navigate_to(self, 'EditGroupSequence')
return view.group_order_selector.items
@property
def exists(self):
try:
navigate_to(self, 'Details')
return True
except CandidateNotFound:
return False
@attr.s
class GroupCollection(BaseCollection):
""" Collection object for the :py:class: `cfme.configure.access_control.Group`. """
ENTITY = Group
def create(self, description=None, role=None, tenant="My Company", ldap_credentials=None,
user_to_lookup=None, tag=None, host_cluster=None, vm_template=None, cancel=False):
""" Create group method
Args:
description: group description
role: group role
tenant: group tenant
user_to_lookup: ldap user to lookup
ldap_credentials: ldap user credentials
tag: tag for group restriction
host_cluster: host/cluster for group restriction
vm_template: vm/template for group restriction
appliance: appliance under test
cancel: True - if you want to cancel group creation,
by default group will be created
Throws:
RBACOperationBlocked: If operation is blocked due to current user
not having appropriate permissions OR delete is not allowed
for currently selected user
"""
if self.appliance.version < "5.8":
flash_blocked_msg = ("Description has already been taken")
else:
flash_blocked_msg = "Description is not unique within region {}".format(
self.appliance.server.zone.region.number)
view = navigate_to(self, 'Add')
group = self.instantiate(
description=description, role=role, tenant=tenant, ldap_credentials=ldap_credentials,
user_to_lookup=user_to_lookup, tag=tag, host_cluster=host_cluster,
vm_template=vm_template)
view.fill({
'description_txt': group.description,
'role_select': group.role,
'group_tenant': group.tenant
})
group._set_group_restriction(view.my_company_tags, group.tag)
group._set_group_restriction(view.hosts_and_clusters, group.host_cluster)
group._set_group_restriction(view.vms_and_templates, group.vm_template)
if cancel:
view.cancel_button.click()
flash_message = 'Add of new Group was cancelled by the user'
else:
view.add_button.click()
flash_message = 'Group "{}" was saved'.format(group.description)
view = self.create_view(AllGroupView)
try:
view.flash.assert_message(flash_blocked_msg)
raise RBACOperationBlocked(flash_blocked_msg)
except AssertionError:
pass
view.flash.assert_success_message(flash_message)
assert view.is_displayed
# To ensure that the group list is updated
view.browser.refresh()
return group
@navigator.register(GroupCollection, 'All')
class GroupAll(CFMENavigateStep):
VIEW = AllGroupView
prerequisite = NavigateToAttribute('appliance.server', 'Configuration')
def step(self):
self.prerequisite_view.accordions.accesscontrol.tree.click_path(
self.obj.appliance.server_region_string(), 'Groups')
def resetter(self, *args, **kwargs):
self.obj.appliance.browser.widgetastic.browser.refresh()
@navigator.register(GroupCollection, 'Add')
class GroupAdd(CFMENavigateStep):
VIEW = AddGroupView
prerequisite = NavigateToSibling('All')
def step(self):
self.prerequisite_view.toolbar.configuration.item_select("Add a new Group")
@navigator.register(Group, 'EditGroupSequence')
class EditGroupSequence(CFMENavigateStep):
VIEW = EditGroupSequenceView
prerequisite = NavigateToAttribute('parent', 'All')
def step(self):
self.prerequisite_view.toolbar.configuration.item_select(
'Edit Sequence of User Groups for LDAP Look Up')
@navigator.register(Group, 'Details')
class GroupDetails(CFMENavigateStep):
VIEW = DetailsGroupView
prerequisite = NavigateToAttribute('parent', 'All')
def step(self):
self.prerequisite_view.accordions.accesscontrol.tree.click_path(
self.obj.appliance.server_region_string(), 'Groups', self.obj.description)
@navigator.register(Group, 'Edit')
class GroupEdit(CFMENavigateStep):
VIEW = EditGroupView
prerequisite = NavigateToSibling('Details')
def step(self):
self.prerequisite_view.toolbar.configuration.item_select('Edit this Group')
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
# END RBAC GROUP METHODS
####################################################################################################
####################################################################################################
# RBAC ROLE METHODS
####################################################################################################
class RoleForm(ConfigurationView):
""" Role Form for CFME UI """
name_txt = Input(name='name')
vm_restriction_select = BootstrapSelect(id='vm_restriction')
features_tree = CbTree("features_treebox")
cancel_button = Button('Cancel')
class AddRoleView(RoleForm):
""" Add Role View """
add_button = Button('Add')
@property
def is_displayed(self):
return (
self.accordions.accesscontrol.is_opened and
self.title.text == 'Adding a new Role'
)
class EditRoleView(RoleForm):
""" Edit Role View """
save_button = Button('Save')
reset_button = Button('Reset')
@property
def is_displayed(self):
return (
self.accordions.accesscontrol.is_opened and
self.title.text == 'Editing Role "{}"'.format(self.context['object'].name)
)
class DetailsRoleView(RoleForm):
""" Details Role View """
toolbar = View.nested(AccessControlToolbar)
@property
def is_displayed(self):
return (
self.accordions.accesscontrol.is_opened and
self.title.text == 'Role "{}"'.format(self.context['object'].name)
)
class AllRolesView(ConfigurationView):
""" All Roles View """
toolbar = View.nested(AccessControlToolbar)
table = Table("//div[@id='main_div']//table")
@property
def is_displayed(self):
return (
self.accordions.accesscontrol.is_opened and
self.title.text == 'Access Control Roles'
)
@attr.s
class Role(Updateable, Pretty, BaseEntity):
""" Represents a role in CFME UI
Args:
name: role name
vm_restriction: restriction used for role
product_features: product feature to select
appliance: appliance unter test
"""
pretty_attrs = ['name', 'product_features']
name = attr.ib(default=None)
vm_restriction = attr.ib(default=None)
product_features = attr.ib(default=None)
def __attrs_post_init__(self):
if not self.product_features:
self.product_features = []
def update(self, updates):
""" Update role method
Args:
updates: role data that should be changed
Note: In case updates is the same as original role data, update will be canceled,
as 'Save' button will not be active
"""
flash_blocked_msg = "Read Only Role \"{}\" can not be edited".format(self.name)
edit_role_txt = 'Edit this Role'
view = navigate_to(self, 'Details')
# TODO: Remove following code when toolbar disappear issue (BZ1630012) get patched
if not view.toolbar.configuration.is_displayed:
view.browser.refresh()
if not view.toolbar.configuration.item_enabled(edit_role_txt):
raise RBACOperationBlocked("Configuration action '{}' is not enabled".format(
edit_role_txt))
view = navigate_to(self, 'Edit')
try:
view.flash.assert_message(flash_blocked_msg)
raise RBACOperationBlocked(flash_blocked_msg)
except AssertionError:
pass
changed = view.fill({
'name_txt': updates.get('name'),
'vm_restriction_select': updates.get('vm_restriction')
})
feature_changed = self.set_role_product_features(view, updates.get('product_features'))
if changed or feature_changed:
view.save_button.click()
flash_message = 'Role "{}" was saved'.format(updates.get('name', self.name))
else:
view.cancel_button.click()
flash_message = 'Edit of Role was cancelled by the user'
view = self.create_view(DetailsRoleView, override=updates)
view.flash.assert_message(flash_message)
# Typically this would be a safe check but BZ 1561698 will sometimes cause the accordion
# to fail to update the role name w/o a manual refresh causing is_displayed to fail
# Instead of inserting a blind refresh, just disable this until the bug is resolved since
# it's a good check for accordion UI failures
# See BZ https://bugzilla.redhat.com/show_bug.cgi?id=1561698
if not BZ(1561698, forced_streams=['5.9']).blocks:
assert view.is_displayed
def delete(self, cancel=True):
""" Delete existing role
Args:
cancel: Default value 'True', role will be deleted
'False' - deletion of role will be canceled
Throws:
RBACOperationBlocked: If operation is blocked due to current user
not having appropriate permissions OR delete is not allowed
for currently selected role
"""
flash_blocked_msg = ("Role \"{}\": Error during delete: Cannot delete record "
"because of dependent entitlements".format(self.name))
flash_success_msg = 'Role "{}": Delete successful'.format(self.name)
delete_role_txt = 'Delete this Role'
view = navigate_to(self, 'Details')
if not view.toolbar.configuration.item_enabled(delete_role_txt):
raise RBACOperationBlocked("Configuration action '{}' is not enabled".format(
delete_role_txt))
view.toolbar.configuration.item_select(delete_role_txt, handle_alert=cancel)
try:
view.flash.assert_message(flash_blocked_msg)
raise RBACOperationBlocked(flash_blocked_msg)
except AssertionError:
pass
view.flash.assert_message(flash_success_msg)
if cancel:
view = self.create_view(AllRolesView)
view.flash.assert_success_message(flash_success_msg)
else:
view = self.create_view(DetailsRoleView)
assert view.is_displayed
def copy(self, name=None):
""" Creates copy of existing role
Returns: Role object of copied role
"""
if name is None:
name = "{}_copy".format(self.name)
view = navigate_to(self, 'Details')
view.toolbar.configuration.item_select('Copy this Role to a new Role')
view = self.create_view(AddRoleView)
new_role = self.parent.instantiate(name=name)
view.fill({'name_txt': new_role.name})
view.add_button.click()
view = self.create_view(AllRolesView)
view.flash.assert_success_message('Role "{}" was saved'.format(new_role.name))
assert view.is_displayed
return new_role
def set_role_product_features(self, view, product_features):
""" Sets product features for role restriction
Args:
view: AddRoleView or EditRoleView
product_features: list of product features with options to select
"""
if product_features is not None and isinstance(product_features, (list, tuple, set)):
changes = [
view.fill({
'features_tree': CbTree.CheckNode(path) if option else CbTree.UncheckNode(path)
})
for path, option in product_features
]
return True in changes
else:
return False
@attr.s
class RoleCollection(BaseCollection):
ENTITY = Role
def create(self, name=None, vm_restriction=None, product_features=None, cancel=False):
""" Create role method
Args:
cancel: True - if you want to cancel role creation,
by default, role will be created
Raises:
RBACOperationBlocked: If operation is blocked due to current user
not having appropriate permissions OR update is not allowed
for currently selected role
"""
flash_blocked_msg = "Name has already been taken"
role = self.instantiate(
name=name, vm_restriction=vm_restriction, product_features=product_features
)
view = navigate_to(self, 'Add')
view.fill({'name_txt': role.name,
'vm_restriction_select': role.vm_restriction})
role.set_role_product_features(view, role.product_features)
if cancel:
view.cancel_button.click()
flash_message = 'Add of new Role was cancelled by the user'
else:
view.add_button.click()
flash_message = 'Role "{}" was saved'.format(role.name)
view = self.create_view(AllRolesView)
try:
view.flash.assert_message(flash_blocked_msg)
raise RBACOperationBlocked(flash_blocked_msg)
except AssertionError:
pass
view.flash.assert_success_message(flash_message)
assert view.is_displayed
return role
@navigator.register(RoleCollection, 'All')
class RoleAll(CFMENavigateStep):
VIEW = AllRolesView
prerequisite = NavigateToAttribute('appliance.server', 'Configuration')
def step(self):
self.prerequisite_view.accordions.accesscontrol.tree.click_path(
self.obj.appliance.server_region_string(), 'Roles')
@navigator.register(RoleCollection, 'Add')
class RoleAdd(CFMENavigateStep):
VIEW = AddRoleView
prerequisite = NavigateToSibling('All')
def step(self):
self.prerequisite_view.toolbar.configuration.item_select("Add a new Role")
@navigator.register(Role, 'Details')
class RoleDetails(CFMENavigateStep):
VIEW = DetailsRoleView
prerequisite = NavigateToAttribute('parent', 'All')
def step(self):
self.prerequisite_view.browser.refresh() # workaround for 5.9 issue of role now shown
self.prerequisite_view.accordions.accesscontrol.tree.click_path(
self.obj.appliance.server_region_string(), 'Roles', self.obj.name)
@navigator.register(Role, 'Edit')
class RoleEdit(CFMENavigateStep):
VIEW = EditRoleView
prerequisite = NavigateToSibling('Details')
def step(self):
self.prerequisite_view.toolbar.configuration.item_select('Edit this Role')
####################################################################################################
# RBAC TENANT METHODS
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
class TenantForm(ConfigurationView):
""" Tenant Form """
name = Input(name='name')
description = Input(name='description')
add_button = Button('Add')
cancel_button = Button('Cancel')
class TenantQuotaForm(View):
cpu_cb = BootstrapSwitch(id='cpu_allocated')
memory_cb = BootstrapSwitch(id='mem_allocated')
storage_cb = BootstrapSwitch(id='storage_allocated')
vm_cb = BootstrapSwitch(id='vms_allocated')
template_cb = BootstrapSwitch(id='templates_allocated')
cpu_txt = Input(id='id_cpu_allocated')
memory_txt = Input(id='id_mem_allocated')
storage_txt = Input(id='id_storage_allocated')
vm_txt = Input(id='id_vms_allocated')
template_txt = Input(id='id_templates_allocated')
class TenantQuotaView(ConfigurationView):
""" Tenant Quota View """
form = View.nested(TenantQuotaForm)
save_button = Button('Save')
reset_button = Button('Reset')
cancel_button = Button('Cancel')
@property
def is_displayed(self):
return (
self.form.template_cb.is_displayed and
self.title.text == 'Manage quotas for {} "{}"'.format(self.context['object'].obj_type,
self.context['object'].name))
class AllTenantView(ConfigurationView):
""" All Tenants View """
toolbar = View.nested(AccessControlToolbar)
table = Table('//*[@id="miq-gtl-view"]/miq-data-table/div/table')
paginator = PaginationPane()
@property
def is_displayed(self):
return (
self.accordions.accesscontrol.is_opened and
self.title.text == 'Access Control Tenants'
)
class AddTenantView(ConfigurationView):
""" Add Tenant View """
form = View.nested(TenantForm)
@property
def is_displayed(self):
return (
self.accordions.accesscontrol.is_opened and
self.form.description.is_displayed and
self.title.text in ('Adding a new Project', 'Adding a new Tenant')
)
class DetailsTenantEntities(View):
smart_management = SummaryForm('Smart Management')
class DetailsTenantView(ConfigurationView):
""" Details Tenant View """
entities = View.nested(DetailsTenantEntities)
# Todo move to entities
toolbar = View.nested(AccessControlToolbar)
name = Text('Name')
description = Text('Description')
parent = Text('Parent')
table = Table('//*[self::fieldset or @id="fieldset"]/table')
@property
def is_displayed(self):
return (
self.accordions.accesscontrol.is_opened and
self.title.text == '{} "{}"'.format(self.context['object'].obj_type,
self.context['object'].name)
)
class ParentDetailsTenantView(DetailsTenantView):
""" Parent Tenant Details View """
@property
def is_displayed(self):
return (
self.accordions.accesscontrol.is_opened and
self.title.text == '{} "{}"'.format(self.context['object'].parent_tenant.obj_type,
self.context['object'].parent_tenant.name)
)
class EditTenantView(View):
""" Edit Tenant View """
form = View.nested(TenantForm)
save_button = Button('Save')
reset_button = Button('Reset')
@property
def is_displayed(self):
return (
self.form.accordions.accesscontrol.is_opened and
self.form.description.is_displayed and
self.form.title.text == 'Editing {} "{}"'.format(self.context['object'].obj_type,
self.context['object'].name)
)
@attr.s
class Tenant(Updateable, BaseEntity, Taggable):
""" Class representing CFME tenants in the UI.
* Kudos to mfalesni *
The behaviour is shared with Project, which is the same except it cannot create more nested
tenants/projects.
Args:
name: Name of the tenant
description: Description of the tenant
parent_tenant: Parent tenant, can be None, can be passed as string or object
"""
obj_type = 'Tenant'
name = attr.ib()
description = attr.ib(default="")
parent_tenant = attr.ib(default=None)
_default = attr.ib(default=False)
def update(self, updates):
""" Update tenant/project method
Args:
updates: tenant/project data that should be changed
Note: In case updates is the same as original tenant/project data, update will be canceled,
as 'Save' button will not be active
"""
view = navigate_to(self, 'Edit')
changed = view.form.fill(updates)
if changed:
view.save_button.click()
if self.appliance.version < '5.9':
flash_message = 'Project "{}" was saved'.format(updates.get('name', self.name))
else:
flash_message = '{} "{}" has been successfully saved.'.format(
self.obj_type, updates.get('name', self.name))
else:
view.cancel_button.click()
if self.appliance.version < '5.9':
flash_message = 'Edit of Project "{}" was cancelled by the user'.format(
updates.get('name', self.name))
else:
flash_message = 'Edit of {} "{}" was canceled by the user.'.format(
self.obj_type, updates.get('name', self.name))
view = self.create_view(DetailsTenantView, override=updates)
view.flash.assert_message(flash_message)
def delete(self, cancel=True):
""" Delete existing role
Args:
cancel: Default value 'True', role will be deleted
'False' - deletion of role will be canceled
"""
view = navigate_to(self, 'Details')
view.toolbar.configuration.item_select(
'Delete this item', handle_alert=cancel)
if cancel:
view = self.create_view(ParentDetailsTenantView)
view.flash.assert_success_message(
'Tenant "{}": Delete successful'.format(self.description))
else:
view = self.create_view(DetailsRoleView)
assert view.is_displayed
def set_quota(self, **kwargs):
""" Sets tenant quotas """
view = navigate_to(self, 'ManageQuotas')
changed = view.form.fill({'cpu_cb': kwargs.get('cpu_cb'),
'cpu_txt': kwargs.get('cpu'),
'memory_cb': kwargs.get('memory_cb'),
'memory_txt': kwargs.get('memory'),
'storage_cb': kwargs.get('storage_cb'),
'storage_txt': kwargs.get('storage'),
'vm_cb': kwargs.get('vm_cb'),
'vm_txt': kwargs.get('vm'),
'template_cb': kwargs.get('template_cb'),
'template_txt': kwargs.get('template')})
if changed:
view.save_button.click()
expected_msg = 'Quotas for {} "{}" were saved'.format(self.obj_type, self.name)
else:
view.cancel_button.click()
expected_msg = 'Manage quotas for {} "{}" was cancelled by the user'\
.format(self.obj_type, self.name)
view = self.create_view(DetailsTenantView)
view.flash.assert_success_message(expected_msg)
assert view.is_displayed
@property
def quota(self):
view = navigate_to(self, 'Details')
quotas = {
'cpu': 'Allocated Virtual CPUs',
'memory': 'Allocated Memory in GB',
'storage': 'Allocated Storage in GB',
'num_vms': 'Allocated Number of Virtual Machines',
'templates': 'Allocated Number of Templates'
}
for field in quotas:
item = view.table.row(name=quotas[field])
quotas[field] = {
'total': item.total_quota.text,
'in_use': item.in_use.text,
'allocated': item.allocated.text,
'available': item.available.text
}
return quotas
def __eq__(self, other):
if not isinstance(other, type(self)):
return False
else:
return self.tree_path == other.tree_path
@property
def exists(self):
try:
navigate_to(self, 'Details')
return True
except CandidateNotFound:
return False
@property
def tree_path(self):
if self._default:
return [self.name]
else:
return self.parent_tenant.tree_path + [self.name]
@property
def parent_path(self):
return self.parent_tenant.tree_path
@attr.s
class TenantCollection(BaseCollection):
"""Collection class for Tenant"""
ENTITY = Tenant
def get_root_tenant(self):
return self.instantiate(str(self.appliance.rest_api.collections.tenants[0].name),
default=True)
def create(self, name, description, parent):
if self.appliance.version > '5.9':
tenant_success_flash_msg = 'Tenant "{}" has been successfully added.'
else:
tenant_success_flash_msg = 'Tenant "{}" was saved'
tenant = self.instantiate(name, description, parent)
view = navigate_to(tenant.parent_tenant, 'Details')
view.toolbar.configuration.item_select('Add child Tenant to this Tenant')
view = self.create_view(AddTenantView)
wait_for(lambda: view.is_displayed, timeout=5)
changed = view.form.fill({'name': name,
'description': description})
if changed:
view.form.add_button.click()
else:
view.form.cancel_button.click()
view = self.create_view(ParentDetailsTenantView)
view.flash.assert_success_message(tenant_success_flash_msg.format(name))
return tenant
def delete(self, *tenants):
view = navigate_to(self, 'All')
for tenant in tenants:
try:
row = view.table.row(name=tenant.name)
row[0].check()
except Exception:
logger.exception('Failed to check element "%s"', tenant.name)
else:
view.toolbar.configuration.item_select('Delete selected items', handle_alert=True)
@navigator.register(TenantCollection, 'All')
class TenantAll(CFMENavigateStep):
VIEW = AllTenantView
prerequisite = NavigateToAttribute('appliance.server', 'Configuration')
def step(self):
self.prerequisite_view.accordions.accesscontrol.tree.click_path(
self.obj.appliance.server_region_string(), 'Tenants')
@navigator.register(Tenant, 'Details')
class TenantDetails(CFMENavigateStep):
VIEW = DetailsTenantView
prerequisite = NavigateToAttribute('parent', 'All')
def step(self):
self.prerequisite_view.accordions.accesscontrol.tree.click_path(
self.obj.appliance.server_region_string(), 'Tenants', *self.obj.tree_path)
@navigator.register(Tenant, 'Edit')
class TenantEdit(CFMENavigateStep):
VIEW = EditTenantView
prerequisite = NavigateToSibling('Details')
def step(self):
self.prerequisite_view.toolbar.configuration.item_select('Edit this item')
@navigator.register(Tenant, 'ManageQuotas')
class TenantManageQuotas(CFMENavigateStep):
VIEW = TenantQuotaView
prerequisite = NavigateToSibling('Details')
def step(self):
self.prerequisite_view.toolbar.configuration.item_select('Manage Quotas')
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
# END TENANT METHODS
####################################################################################################
####################################################################################################
# RBAC PROJECT METHODS
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
class Project(Tenant):
""" Class representing CFME projects in the UI.
Project cannot create more child tenants/projects.
Args:
name: Name of the project
description: Description of the project
parent_tenant: Parent project, can be None, can be passed as string or object
"""
obj_type = 'Project'
class ProjectCollection(TenantCollection):
"""Collection class for Projects under Tenants"""
ENTITY = Project
def get_root_tenant(self):
# returning Tenant directly because 'My Company' needs to be treated like Tenant object,
# to be able to make child tenant/project under it
return self.appliance.collections.tenants.instantiate(
name=str(self.appliance.rest_api.collections.tenants[0].name), default=True)
def create(self, name, description, parent):
if self.appliance.version > '5.9':
project_success_flash_msg = 'Project "{}" has been successfully added.'
else:
project_success_flash_msg = 'Project "{}" was saved'
project = self.instantiate(name, description, parent)
view = navigate_to(project.parent_tenant, 'Details')
view.toolbar.configuration.item_select('Add Project to this Tenant')
view = self.create_view(AddTenantView)
wait_for(lambda: view.is_displayed, timeout=5)
changed = view.form.fill({'name': name,
'description': description})
if changed:
view.form.add_button.click()
else:
view.form.cancel_button.click()
view = self.create_view(ParentDetailsTenantView)
view.flash.assert_success_message(project_success_flash_msg.format(name))
return project
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
# END PROJECT METHODS
####################################################################################################
| gpl-2.0 | 2,881,992,309,818,430,000 | 34.821691 | 100 | 0.600349 | false |
uksf/modpack | tools/build.py | 1 | 3612 | #!/usr/bin/env python3
import os
import sys
import subprocess
######## GLOBALS #########
MAINPREFIX = "u"
PREFIX = "uksf_"
##########################
def tryHemttBuild(projectpath):
hemttExe = os.path.join(projectpath, "hemtt.exe")
if os.path.isfile(hemttExe):
os.chdir(projectpath)
ret = subprocess.call([hemttExe, "pack"], stderr=subprocess.STDOUT)
return True
else:
print("hemtt not installed");
return False
def mod_time(path):
if not os.path.isdir(path):
return os.path.getmtime(path)
maxi = os.path.getmtime(path)
for p in os.listdir(path):
maxi = max(mod_time(os.path.join(path, p)), maxi)
return maxi
def check_for_changes(addonspath, module):
if not os.path.exists(os.path.join(addonspath, "{}{}.pbo".format(PREFIX,module))):
return True
return mod_time(os.path.join(addonspath, module)) > mod_time(os.path.join(addonspath, "{}{}.pbo".format(PREFIX,module)))
def check_for_obsolete_pbos(addonspath, file):
module = file[len(PREFIX):-4]
if not os.path.exists(os.path.join(addonspath, module)):
return True
return False
def main(argv):
print("""
#####################
# UKSF Debug Build #
#####################
""")
compile_extensions = False
if "compile" in argv:
argv.remove("compile")
compile_extensions = True
scriptpath = os.path.realpath(__file__)
projectpath = os.path.dirname(os.path.dirname(scriptpath))
addonspath = os.path.join(projectpath, "addons")
extensionspath = os.path.join(projectpath, "extensions")
if (not tryHemttBuild(projectpath)):
os.chdir(addonspath)
made = 0
failed = 0
skipped = 0
removed = 0
for file in os.listdir(addonspath):
if os.path.isfile(file):
if check_for_obsolete_pbos(addonspath, file):
removed += 1
print(" Removing obsolete file => " + file)
os.remove(file)
print("")
for p in os.listdir(addonspath):
path = os.path.join(addonspath, p)
if not os.path.isdir(path):
continue
if p[0] == ".":
continue
if not check_for_changes(addonspath, p):
skipped += 1
print(" Skipping {}.".format(p))
continue
print("# Making {} ...".format(p))
try:
subprocess.check_output([
"makepbo",
"-NUP",
"-@={}\\{}\\addons\\{}".format(MAINPREFIX,PREFIX.rstrip("_"),p),
p,
"{}{}.pbo".format(PREFIX,p)
], stderr=subprocess.STDOUT)
except:
failed += 1
print(" Failed to make {}.".format(p))
else:
made += 1
print(" Successfully made {}.".format(p))
print("\n# Done.")
print(" Made {}, skipped {}, removed {}, failed to make {}.".format(made, skipped, removed, failed))
if (compile_extensions):
try:
print("\nCompiling extensions in {}".format(extensionspath))
os.chdir(extensionspath)
# Prepare 64bit build dirs
ret = subprocess.call(["msbuild", "uksf.sln", "/m", "/p:Configuration=Release", "/p:Platform=x64"])
if ret == 1:
return 1
except:
print("Failed to compile extension")
raise
if __name__ == "__main__":
sys.exit(main(sys.argv))
| gpl-3.0 | 6,139,524,776,622,342,000 | 29.1 | 124 | 0.52381 | false |
jacobgilroy/FinalYearProject | MainView.py | 1 | 2934 | from PyQt5.QtWidgets import QWidget, QSplitter, QVBoxLayout, QFrame, QFileDialog, QScrollArea, QMenuBar, QAction, QToolBar
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QIcon
from JamSpace.Views.LaneSpaceView import LaneSpaceView
from JamSpace.Views.ControlBar import ControlBar
class MainView(QWidget):
def __init__(self):
super().__init__()
# declare member variables:
self.laneSpace = LaneSpaceView(parent=self)
self.controlBar = ControlBar(parent=self)
self.menuBar = QMenuBar(self)
self.toolBar = QToolBar(self)
self.toolBar.show()
self.laneScrollArea = QScrollArea()
self.laneScrollArea.setWidgetResizable(True)
self.WIDTH = 900
self.HEIGHT = 700
# Initialise the UI:
self.initUI()
def initUI(self):
self.setGeometry(20, 30, self.WIDTH, self.HEIGHT)
self.setWindowTitle('JamSpace')
# configure the menu bar:
# create menus:
fileMenu = self.menuBar.addMenu('&File')
editMenu = self.menuBar.addMenu('&Edit')
# create actions:
self.exitAction = QAction('Exit', self)
self.exitAction.setStatusTip('Close the application')
self.addLaneAction = QAction(QIcon('addLaneIcon.png'), 'Add Lane', self)
self.playAction = QAction(QIcon('playIcon.png'), 'Play', self)
self.stopAction = QAction(QIcon('stopIcon.ico'), 'Stop', self)
self.addLaneAction.setStatusTip('Add a new lane')
self.playAction.setStatusTip('Start playback')
self.stopAction.setStatusTip('Stop playback')
# add the actions to the menus/toolbar:
fileMenu.addAction(self.exitAction)
self.toolBar.addAction(self.playAction)
self.toolBar.addAction(self.stopAction)
self.toolBar.addAction(self.addLaneAction)
self.laneScrollArea.setWidget(self.laneSpace)
# Instantiate UI components:
laneEditSpace = QFrame(self)
laneEditSpace.setFrameShape(QFrame.StyledPanel)
clipEditSpace = QFrame(self)
clipEditSpace.setFrameShape(QFrame.StyledPanel)
# Apply layout:
vSplitter = QSplitter(Qt.Vertical)
hSplitter = QSplitter(Qt.Horizontal)
hSplitter.addWidget(laneEditSpace)
hSplitter.addWidget(clipEditSpace)
vSplitter.addWidget(self.controlBar)
vSplitter.addWidget(self.laneScrollArea)
vSplitter.addWidget(hSplitter)
vbox = QVBoxLayout(self)
vbox.addWidget(vSplitter)
#vbox.setAlignment(Qt.AlignTop)
self.setLayout(vbox)
self.show()
def showDirectoryDialog(self):
dirSelectionDialog = QFileDialog(self)
projectDir = QFileDialog.getExistingDirectory(dirSelectionDialog, 'Select Project Folder')
return projectDir | gpl-3.0 | 15,029,270,134,049,856 | 29.569892 | 122 | 0.647921 | false |
BigEgg/LeetCode | Python/LeetCode.Test/_051_100/Test_068_TextJustification.py | 1 | 1357 | import unittest
import sys
sys.path.append('LeetCode/_051_100')
sys.path.append('LeetCode.Test')
from _068_TextJustification import Solution
import AssertHelper
class Test_068_TextJustification(unittest.TestCase):
def test_fullJustify_1(self):
solution = Solution()
result = solution.fullJustify(["This", "is", "an", "example", "of", "text", "justification."], 16)
AssertHelper.assertArray([
"This is an",
"example of text",
"justification. "
], result)
def test_fullJustify_2(self):
solution = Solution()
result = solution.fullJustify(["What","must","be","acknowledgment","shall","be"], 16)
AssertHelper.assertArray([
"What must be",
"acknowledgment ",
"shall be "
], result)
def test_fullJustify_3(self):
solution = Solution()
result = solution.fullJustify(["Science","is","what","we","understand","well","enough","to","explain","to","a","computer.","Art","is","everything","else","we","do"], 20)
AssertHelper.assertArray([
"Science is what we",
"understand well",
"enough to explain to",
"a computer. Art is",
"everything else we",
"do "
], result)
| mit | 364,406,704,745,799,550 | 33.794872 | 177 | 0.550479 | false |
heromod/migrid | mig/cgi-bin/find.py | 1 | 1096 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# --- BEGIN_HEADER ---
#
# find - [insert a few words of module description on this line]
# Copyright (C) 2003-2009 The MiG Project lead by Brian Vinter
#
# This file is part of MiG.
#
# MiG is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# MiG is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# -- END_HEADER ---
#
import cgi
import cgitb
cgitb.enable()
from shared.functionality.find import main
from shared.cgiscriptstub import run_cgi_script
run_cgi_script(main)
| gpl-2.0 | -478,678,620,066,712,900 | 30.314286 | 81 | 0.738139 | false |
evanbiederstedt/CMBintheLikeHoodz | source_code/CAMB_vary_OmegaB_lmax1100_Feb2016.py | 1 | 137613 |
# coding: utf-8
# In[1]:
#
#
# hundred_samples = np.linspace(0.05, 0.5, num=100)
#
# Planck found \Omega_CDM
# GAVO simulated map set at \Omega_CDM = 0.122
# CAMB default below at omch2=0.122
#
# In[2]:
#
# First output 200 CAMB scalar outputs
#
# 0.005 to 0.05
#
# In[3]:
from matplotlib import pyplot as plt
import numpy as np
import camb
from camb import model, initialpower
# In[4]:
"""
#Set up a new set of parameters for CAMB
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.022, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(2000, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
for name in powers:
print(name)
# In[5]:
# plot the total lensed CMB power spectra versus unlensed, and fractional difference
totCL=powers['total']
unlensedCL=powers['unlensed_scalar']
print(totCL.shape)
# Python CL arrays are all zero based (starting at L=0), Note L=0,1 entries will be zero by default.
# The differenent CL are always in the order TT, EE, BB, TE (with BB=0 for unlensed scalar results).
ls = np.arange(totCL.shape[0])
print(ls)
#print(totCL[:30]) # print first 30 totCL
fig, ax = plt.subplots(2,2, figsize = (12,12))
ax[0,0].plot(ls,totCL[:,0], color='k')
ax[0,0].plot(ls,unlensedCL[:,0], color='r')
ax[0,0].set_title('TT')
ax[0,1].plot(ls[2:], 1-unlensedCL[2:,0]/totCL[2:,0]);
ax[0,1].set_title(r'$\Delta TT$')
ax[1,0].plot(ls,totCL[:,1], color='k')
ax[1,0].plot(ls,unlensedCL[:,1], color='r')
ax[1,0].set_title(r'$EE$')
ax[1,1].plot(ls,totCL[:,3], color='k')
ax[1,1].plot(ls,unlensedCL[:,3], color='r')
ax[1,1].set_title(r'$TE$');
for ax in ax.reshape(-1): ax.set_xlim([2,2500])
"""
# In[6]:
twohundred_samples = np.linspace(0.005, 0.05, num=200)
#print(twohundred_samples)
#Set up a new set of parameters for CAMB
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.022, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
pars.set_for_lmax(2500, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers =results.get_cmb_power_spectra(pars)
for name in powers:
print(name)
"""
array([ 0.005 , 0.00522613, 0.00545226, 0.00567839, 0.00590452,
0.00613065, 0.00635678, 0.00658291, 0.00680905, 0.00703518,
0.00726131, 0.00748744, 0.00771357, 0.0079397 , 0.00816583,
0.00839196, 0.00861809, 0.00884422, 0.00907035, 0.00929648,
0.00952261, 0.00974874, 0.00997487, 0.01020101, 0.01042714,
0.01065327, 0.0108794 , 0.01110553, 0.01133166, 0.01155779,
0.01178392, 0.01201005, 0.01223618, 0.01246231, 0.01268844,
0.01291457, 0.0131407 , 0.01336683, 0.01359296, 0.0138191 ,
0.01404523, 0.01427136, 0.01449749, 0.01472362, 0.01494975,
0.01517588, 0.01540201, 0.01562814, 0.01585427, 0.0160804 ,
0.01630653, 0.01653266, 0.01675879, 0.01698492, 0.01721106,
0.01743719, 0.01766332, 0.01788945, 0.01811558, 0.01834171,
0.01856784, 0.01879397, 0.0190201 , 0.01924623, 0.01947236,
0.01969849, 0.01992462, 0.02015075, 0.02037688, 0.02060302,
0.02082915, 0.02105528, 0.02128141, 0.02150754, 0.02173367,
0.0219598 , 0.02218593, 0.02241206, 0.02263819, 0.02286432,
0.02309045, 0.02331658, 0.02354271, 0.02376884, 0.02399497,
0.02422111, 0.02444724, 0.02467337, 0.0248995 , 0.02512563,
0.02535176, 0.02557789, 0.02580402, 0.02603015, 0.02625628,
0.02648241, 0.02670854, 0.02693467, 0.0271608 , 0.02738693,
0.02761307, 0.0278392 , 0.02806533, 0.02829146, 0.02851759,
0.02874372, 0.02896985, 0.02919598, 0.02942211, 0.02964824,
0.02987437, 0.0301005 , 0.03032663, 0.03055276, 0.03077889,
0.03100503, 0.03123116, 0.03145729, 0.03168342, 0.03190955,
0.03213568, 0.03236181, 0.03258794, 0.03281407, 0.0330402 ,
0.03326633, 0.03349246, 0.03371859, 0.03394472, 0.03417085,
0.03439698, 0.03462312, 0.03484925, 0.03507538, 0.03530151,
0.03552764, 0.03575377, 0.0359799 , 0.03620603, 0.03643216,
0.03665829, 0.03688442, 0.03711055, 0.03733668, 0.03756281,
0.03778894, 0.03801508, 0.03824121, 0.03846734, 0.03869347,
0.0389196 , 0.03914573, 0.03937186, 0.03959799, 0.03982412,
0.04005025, 0.04027638, 0.04050251, 0.04072864, 0.04095477,
0.0411809 , 0.04140704, 0.04163317, 0.0418593 , 0.04208543,
0.04231156, 0.04253769, 0.04276382, 0.04298995, 0.04321608,
0.04344221, 0.04366834, 0.04389447, 0.0441206 , 0.04434673,
0.04457286, 0.04479899, 0.04502513, 0.04525126, 0.04547739,
0.04570352, 0.04592965, 0.04615578, 0.04638191, 0.04660804,
0.04683417, 0.0470603 , 0.04728643, 0.04751256, 0.04773869,
0.04796482, 0.04819095, 0.04841709, 0.04864322, 0.04886935,
0.04909548, 0.04932161, 0.04954774, 0.04977387, 0.05 ])
"""
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls0 = unlencl[:,0][2:1101]
print(len(cls0))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls1 = unlencl[:,0][2:1101]
print(len(cls1))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls2 = unlencl[:,0][2:1101]
print(len(cls2))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls3 = unlencl[:,0][2:1101]
print(len(cls3))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls4 = unlencl[:,0][2:1101]
print(len(cls4))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls5 = unlencl[:,0][2:1101]
print(len(cls5))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls6 = unlencl[:,0][2:1101]
print(len(cls6))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls7 = unlencl[:,0][2:1101]
print(len(cls7))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls8 = unlencl[:,0][2:1101]
print(len(cls8))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls9 = unlencl[:,0][2:1101]
print(len(cls9))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls10 = unlencl[:,0][2:1101]
print(len(cls10))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls11 = unlencl[:,0][2:1101]
print(len(cls11))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls12 = unlencl[:,0][2:1101]
print(len(cls12))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls13 = unlencl[:,0][2:1101]
print(len(cls13))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls14 = unlencl[:,0][2:1101]
print(len(cls14))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls15 = unlencl[:,0][2:1101]
print(len(cls15))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls16 = unlencl[:,0][2:1101]
print(len(cls16))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls17 = unlencl[:,0][2:1101]
print(len(cls17))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls18 = unlencl[:,0][2:1101]
print(len(cls18))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls19 = unlencl[:,0][2:1101]
print(len(cls19))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls20 = unlencl[:,0][2:1101]
print(len(cls20))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls21 = unlencl[:,0][2:1101]
print(len(cls21))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls22 = unlencl[:,0][2:1101]
print(len(cls22))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls23 = unlencl[:,0][2:1101]
print(len(cls23))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls24 = unlencl[:,0][2:1101]
print(len(cls24))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls25 = unlencl[:,0][2:1101]
print(len(cls25))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls26 = unlencl[:,0][2:1101]
print(len(cls26))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls27 = unlencl[:,0][2:1101]
print(len(cls27))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls28 = unlencl[:,0][2:1101]
print(len(cls28))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls29 = unlencl[:,0][2:1101]
print(len(cls29))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls30 = unlencl[:,0][2:1101]
print(len(cls30))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls31 = unlencl[:,0][2:1101]
print(len(cls31))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls32 = unlencl[:,0][2:1101]
print(len(cls32))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls33 = unlencl[:,0][2:1101]
print(len(cls33))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls34 = unlencl[:,0][2:1101]
print(len(cls34))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls35 = unlencl[:,0][2:1101]
print(len(cls35))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls36 = unlencl[:,0][2:1101]
print(len(cls36))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls37 = unlencl[:,0][2:1101]
print(len(cls37))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls38 = unlencl[:,0][2:1101]
print(len(cls38))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls39 = unlencl[:,0][2:1101]
print(len(cls39))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls40 = unlencl[:,0][2:1101]
print(len(cls40))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls41 = unlencl[:,0][2:1101]
print(len(cls41))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls42 = unlencl[:,0][2:1101]
print(len(cls42))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls43 = unlencl[:,0][2:1101]
print(len(cls43))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls44 = unlencl[:,0][2:1101]
print(len(cls44))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls45 = unlencl[:,0][2:1101]
print(len(cls45))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls46 = unlencl[:,0][2:1101]
print(len(cls46))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls47 = unlencl[:,0][2:1101]
print(len(cls47))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls48 = unlencl[:,0][2:1101]
print(len(cls48))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls49 = unlencl[:,0][2:1101]
print(len(cls49))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls50 = unlencl[:,0][2:1101]
print(len(cls50))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls51 = unlencl[:,0][2:1101]
print(len(cls51))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls52 = unlencl[:,0][2:1101]
print(len(cls52))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls53 = unlencl[:,0][2:1101]
print(len(cls53))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls54 = unlencl[:,0][2:1101]
print(len(cls54))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls55 = unlencl[:,0][2:1101]
print(len(cls55))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls56 = unlencl[:,0][2:1101]
print(len(cls56))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls57 = unlencl[:,0][2:1101]
print(len(cls57))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls58 = unlencl[:,0][2:1101]
print(len(cls58))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls59 = unlencl[:,0][2:1101]
print(len(cls59))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls60 = unlencl[:,0][2:1101]
print(len(cls60))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls61 = unlencl[:,0][2:1101]
print(len(cls61))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls62 = unlencl[:,0][2:1101]
print(len(cls62))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls63 = unlencl[:,0][2:1101]
print(len(cls63))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls64 = unlencl[:,0][2:1101]
print(len(cls64))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls65 = unlencl[:,0][2:1101]
print(len(cls65))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls66 = unlencl[:,0][2:1101]
print(len(cls66))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls67 = unlencl[:,0][2:1101]
print(len(cls67))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls68 = unlencl[:,0][2:1101]
print(len(cls68))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls69 = unlencl[:,0][2:1101]
print(len(cls69))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls70 = unlencl[:,0][2:1101]
print(len(cls70))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls71 = unlencl[:,0][2:1101]
print(len(cls71))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls72 = unlencl[:,0][2:1101]
print(len(cls72))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls73 = unlencl[:,0][2:1101]
print(len(cls73))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls74 = unlencl[:,0][2:1101]
print(len(cls74))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls75 = unlencl[:,0][2:1101]
print(len(cls75))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls76 = unlencl[:,0][2:1101]
print(len(cls76))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls77 = unlencl[:,0][2:1101]
print(len(cls77))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls78 = unlencl[:,0][2:1101]
print(len(cls78))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls79 = unlencl[:,0][2:1101]
print(len(cls79))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls80 = unlencl[:,0][2:1101]
print(len(cls80))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls81 = unlencl[:,0][2:1101]
print(len(cls81))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls82 = unlencl[:,0][2:1101]
print(len(cls82))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls83 = unlencl[:,0][2:1101]
print(len(cls83))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls84 = unlencl[:,0][2:1101]
print(len(cls84))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls85 = unlencl[:,0][2:1101]
print(len(cls85))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls86 = unlencl[:,0][2:1101]
print(len(cls86))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls87 = unlencl[:,0][2:1101]
print(len(cls87))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls88 = unlencl[:,0][2:1101]
print(len(cls88))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls89 = unlencl[:,0][2:1101]
print(len(cls89))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls90 = unlencl[:,0][2:1101]
print(len(cls90))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls91 = unlencl[:,0][2:1101]
print(len(cls91))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls92 = unlencl[:,0][2:1101]
print(len(cls92))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls93 = unlencl[:,0][2:1101]
print(len(cls93))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls94 = unlencl[:,0][2:1101]
print(len(cls94))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls95 = unlencl[:,0][2:1101]
print(len(cls95))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls96 = unlencl[:,0][2:1101]
print(len(cls96))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls97 = unlencl[:,0][2:1101]
print(len(cls97))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls98 = unlencl[:,0][2:1101]
print(len(cls98))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls99 = unlencl[:,0][2:1101]
print(len(cls99))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls100 = unlencl[:,0][2:1101]
print(len(cls100))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls101 = unlencl[:,0][2:1101]
print(len(cls101))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls102 = unlencl[:,0][2:1101]
print(len(cls102))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls103 = unlencl[:,0][2:1101]
print(len(cls103))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls104 = unlencl[:,0][2:1101]
print(len(cls104))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls105 = unlencl[:,0][2:1101]
print(len(cls105))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls106 = unlencl[:,0][2:1101]
print(len(cls106))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls107 = unlencl[:,0][2:1101]
print(len(cls107))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls108 = unlencl[:,0][2:1101]
print(len(cls108))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls109 = unlencl[:,0][2:1101]
print(len(cls109))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls110 = unlencl[:,0][2:1101]
print(len(cls110))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls111 = unlencl[:,0][2:1101]
print(len(cls111))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls112 = unlencl[:,0][2:1101]
print(len(cls112))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls113 = unlencl[:,0][2:1101]
print(len(cls113))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls114 = unlencl[:,0][2:1101]
print(len(cls114))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls115 = unlencl[:,0][2:1101]
print(len(cls115))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls116 = unlencl[:,0][2:1101]
print(len(cls116))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls117 = unlencl[:,0][2:1101]
print(len(cls117))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls118 = unlencl[:,0][2:1101]
print(len(cls118))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls119 = unlencl[:,0][2:1101]
print(len(cls119))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls120 = unlencl[:,0][2:1101]
print(len(cls120))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls121 = unlencl[:,0][2:1101]
print(len(cls121))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls122 = unlencl[:,0][2:1101]
print(len(cls122))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls123 = unlencl[:,0][2:1101]
print(len(cls123))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls124 = unlencl[:,0][2:1101]
print(len(cls124))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls125 = unlencl[:,0][2:1101]
print(len(cls125))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls126 = unlencl[:,0][2:1101]
print(len(cls126))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls127 = unlencl[:,0][2:1101]
print(len(cls127))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls128 = unlencl[:,0][2:1101]
print(len(cls128))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls129 = unlencl[:,0][2:1101]
print(len(cls129))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls130 = unlencl[:,0][2:1101]
print(len(cls130))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls131 = unlencl[:,0][2:1101]
print(len(cls131))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls132 = unlencl[:,0][2:1101]
print(len(cls132))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls133 = unlencl[:,0][2:1101]
print(len(cls133))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls134 = unlencl[:,0][2:1101]
print(len(cls134))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls135 = unlencl[:,0][2:1101]
print(len(cls135))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls136 = unlencl[:,0][2:1101]
print(len(cls136))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls137 = unlencl[:,0][2:1101]
print(len(cls137))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls138 = unlencl[:,0][2:1101]
print(len(cls138))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls139 = unlencl[:,0][2:1101]
print(len(cls139))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls140 = unlencl[:,0][2:1101]
print(len(cls140))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls141 = unlencl[:,0][2:1101]
print(len(cls141))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls142 = unlencl[:,0][2:1101]
print(len(cls142))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls143 = unlencl[:,0][2:1101]
print(len(cls143))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls144 = unlencl[:,0][2:1101]
print(len(cls144))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls145 = unlencl[:,0][2:1101]
print(len(cls145))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls146 = unlencl[:,0][2:1101]
print(len(cls146))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls147 = unlencl[:,0][2:1101]
print(len(cls147))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls148 = unlencl[:,0][2:1101]
print(len(cls148))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls149 = unlencl[:,0][2:1101]
print(len(cls149))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls150 = unlencl[:,0][2:1101]
print(len(cls150))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls151 = unlencl[:,0][2:1101]
print(len(cls151))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls152 = unlencl[:,0][2:1101]
print(len(cls152))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls153 = unlencl[:,0][2:1101]
print(len(cls153))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls154 = unlencl[:,0][2:1101]
print(len(cls154))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls155 = unlencl[:,0][2:1101]
print(len(cls155))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls156 = unlencl[:,0][2:1101]
print(len(cls156))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls157 = unlencl[:,0][2:1101]
print(len(cls157))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls158 = unlencl[:,0][2:1101]
print(len(cls158))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls159 = unlencl[:,0][2:1101]
print(len(cls159))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls160 = unlencl[:,0][2:1101]
print(len(cls160))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls161 = unlencl[:,0][2:1101]
print(len(cls161))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls162 = unlencl[:,0][2:1101]
print(len(cls162))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls163 = unlencl[:,0][2:1101]
print(len(cls163))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls164 = unlencl[:,0][2:1101]
print(len(cls164))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls165 = unlencl[:,0][2:1101]
print(len(cls165))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls166 = unlencl[:,0][2:1101]
print(len(cls166))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls167 = unlencl[:,0][2:1101]
print(len(cls167))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls168 = unlencl[:,0][2:1101]
print(len(cls168))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls169 = unlencl[:,0][2:1101]
print(len(cls169))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls170 = unlencl[:,0][2:1101]
print(len(cls170))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls171 = unlencl[:,0][2:1101]
print(len(cls171))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls172 = unlencl[:,0][2:1101]
print(len(cls172))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls173 = unlencl[:,0][2:1101]
print(len(cls173))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls174 = unlencl[:,0][2:1101]
print(len(cls174))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls175 = unlencl[:,0][2:1101]
print(len(cls175))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls176 = unlencl[:,0][2:1101]
print(len(cls176))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls177 = unlencl[:,0][2:1101]
print(len(cls177))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls178 = unlencl[:,0][2:1101]
print(len(cls178))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls179 = unlencl[:,0][2:1101]
print(len(cls179))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls180 = unlencl[:,0][2:1101]
print(len(cls180))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls181 = unlencl[:,0][2:1101]
print(len(cls181))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls182 = unlencl[:,0][2:1101]
print(len(cls182))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls183 = unlencl[:,0][2:1101]
print(len(cls183))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls184 = unlencl[:,0][2:1101]
print(len(cls184))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls185 = unlencl[:,0][2:1101]
print(len(cls185))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls186 = unlencl[:,0][2:1101]
print(len(cls186))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls187 = unlencl[:,0][2:1101]
print(len(cls187))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls188 = unlencl[:,0][2:1101]
print(len(cls188))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls189 = unlencl[:,0][2:1101]
print(len(cls189))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls190 = unlencl[:,0][2:1101]
print(len(cls190))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls191 = unlencl[:,0][2:1101]
print(len(cls191))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls192 = unlencl[:,0][2:1101]
print(len(cls192))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls193 = unlencl[:,0][2:1101]
print(len(cls193))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls194 = unlencl[:,0][2:1101]
print(len(cls194))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls195 = unlencl[:,0][2:1101]
print(len(cls195))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls196 = unlencl[:,0][2:1101]
print(len(cls196))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls197 = unlencl[:,0][2:1101]
print(len(cls197))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls198 = unlencl[:,0][2:1101]
print(len(cls198))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls199 = unlencl[:,0][2:1101]
print(len(cls199))
pars = camb.CAMBparams()
#This function sets up CosmoMC-like settings, with one massive neutrino and helium set using BBN consistency
pars.set_cosmology(H0=67.5, ombh2=0.005, omch2=0.122, mnu=0.06, omk=0, tau=0.06)
pars.InitPower.set_params(ns=0.965, r=0)
#pars.set_for_lmax(514, lens_potential_accuracy=0)
#calculate results for these parameters
results = camb.get_results(pars)
#get dictionary of CAMB power spectra
powers = results.get_cmb_power_spectra(pars)
unlencl = powers['unlensed_scalar']
ls = np.arange(unlencl.shape[0])
print(ls)
print(len(ls))
#
# plot of spectrum
cls200 = unlencl[:,0][2:1101]
print(len(cls200))
"""
0.005
0.00522613065327
0.00545226130653
0.0056783919598
0.00590452261307
0.00613065326633
0.0063567839196
0.00658291457286
0.00680904522613
0.0070351758794
0.00726130653266
0.00748743718593
0.0077135678392
0.00793969849246
0.00816582914573
0.00839195979899
0.00861809045226
0.00884422110553
0.00907035175879
0.00929648241206
0.00952261306533
0.00974874371859
0.00997487437186
0.0102010050251
0.0104271356784
0.0106532663317
0.0108793969849
0.0111055276382
0.0113316582915
0.0115577889447
0.011783919598
0.0120100502513
0.0122361809045
0.0124623115578
0.0126884422111
0.0129145728643
0.0131407035176
0.0133668341709
0.0135929648241
0.0138190954774
0.0140452261307
0.0142713567839
0.0144974874372
0.0147236180905
0.0149497487437
0.015175879397
0.0154020100503
0.0156281407035
0.0158542713568
0.0160804020101
0.0163065326633
0.0165326633166
0.0167587939698
0.0169849246231
0.0172110552764
0.0174371859296
0.0176633165829
0.0178894472362
0.0181155778894
0.0183417085427
0.018567839196
0.0187939698492
0.0190201005025
0.0192462311558
0.019472361809
0.0196984924623
0.0199246231156
0.0201507537688
0.0203768844221
0.0206030150754
0.0208291457286
0.0210552763819
0.0212814070352
0.0215075376884
0.0217336683417
0.021959798995
0.0221859296482
0.0224120603015
0.0226381909548
0.022864321608
0.0230904522613
0.0233165829146
0.0235427135678
0.0237688442211
0.0239949748744
0.0242211055276
0.0244472361809
0.0246733668342
0.0248994974874
0.0251256281407
0.025351758794
0.0255778894472
0.0258040201005
0.0260301507538
0.026256281407
0.0264824120603
0.0267085427136
0.0269346733668
0.0271608040201
0.0273869346734
0.0276130653266
0.0278391959799
0.0280653266332
0.0282914572864
0.0285175879397
0.028743718593
0.0289698492462
0.0291959798995
0.0294221105528
0.029648241206
0.0298743718593
0.0301005025126
0.0303266331658
0.0305527638191
0.0307788944724
0.0310050251256
0.0312311557789
0.0314572864322
0.0316834170854
0.0319095477387
0.032135678392
0.0323618090452
0.0325879396985
0.0328140703518
0.033040201005
0.0332663316583
0.0334924623116
0.0337185929648
0.0339447236181
0.0341708542714
0.0343969849246
0.0346231155779
0.0348492462312
0.0350753768844
0.0353015075377
0.035527638191
0.0357537688442
0.0359798994975
0.0362060301508
0.036432160804
0.0366582914573
0.0368844221106
0.0371105527638
0.0373366834171
0.0375628140704
0.0377889447236
0.0380150753769
0.0382412060302
0.0384673366834
0.0386934673367
0.0389195979899
0.0391457286432
0.0393718592965
0.0395979899497
0.039824120603
0.0400502512563
0.0402763819095
0.0405025125628
0.0407286432161
0.0409547738693
0.0411809045226
0.0414070351759
0.0416331658291
0.0418592964824
0.0420854271357
0.0423115577889
0.0425376884422
0.0427638190955
0.0429899497487
0.043216080402
0.0434422110553
0.0436683417085
0.0438944723618
0.0441206030151
0.0443467336683
0.0445728643216
0.0447989949749
0.0450251256281
0.0452512562814
0.0454773869347
0.0457035175879
0.0459296482412
0.0461557788945
0.0463819095477
0.046608040201
0.0468341708543
0.0470603015075
0.0472864321608
0.0475125628141
0.0477386934673
0.0479648241206
0.0481909547739
0.0484170854271
0.0486432160804
0.0488693467337
0.0490954773869
0.0493216080402
0.0495477386935
0.0497738693467
0.05
"""
# In[50]:
cl_array = np.array([cls0, cls1, cls2, cls3, cls4, cls5, cls6, cls7, cls8, cls9, cls10,
cls11, cls12, cls13, cls14, cls15, cls16, cls17, cls18, cls19, cls20,
cls21, cls22, cls23, cls24, cls25, cls26, cls27, cls28, cls29, cls30,
cls31, cls32, cls33, cls34, cls35, cls36, cls37, cls38, cls39, cls40,
cls41, cls42, cls43, cls44, cls45, cls46, cls47, cls48, cls49, cls50,
cls51, cls52, cls53, cls54, cls55, cls56, cls57, cls58, cls59, cls60,
cls61, cls62, cls63, cls64, cls65, cls66, cls67, cls68, cls69, cls70,
cls71, cls72, cls73, cls74, cls75, cls76, cls77, cls78, cls79, cls80,
cls81, cls82, cls83, cls84, cls85, cls86, cls87, cls88, cls89, cls90,
cls91, cls92, cls93, cls94, cls95, cls96, cls97, cls98, cls99, cls100,
cls101, cls102, cls103, cls104, cls105, cls106, cls107, cls108, cls109, cls110,
cls111, cls112, cls113, cls114, cls115, cls116, cls117, cls118, cls119, cls120,
cls121, cls122, cls123, cls124, cls125, cls126, cls127, cls128, cls129, cls130,
cls131, cls132, cls133, cls134, cls135, cls136, cls137, cls138, cls139, cls140,
cls141, cls142, cls143, cls144, cls145, cls146, cls147, cls148, cls149, cls150,
cls151, cls152, cls153, cls154, cls155, cls156, cls157, cls158, cls159, cls160,
cls161, cls162, cls163, cls164, cls165, cls166, cls167, cls168, cls169, cls170,
cls171, cls172, cls173, cls174, cls175, cls176, cls177, cls178, cls179, cls180,
cls181, cls182, cls183, cls184, cls185, cls186, cls187, cls188, cls189, cls190,
cls191, cls192, cls193, cls194, cls195, cls196, cls197, cls198, cls199, cls200])
# In[51]:
print(cl_array.shape)
# In[52]:
f = "CAMB_cl_varyBaryon_lmax1100varyFeb2016.npy"
np.save(f, cl_array)
| mit | 5,311,655,598,611,238,000 | 26.201621 | 108 | 0.741165 | false |
anish/buildbot | master/buildbot/reporters/gerrit_verify_status.py | 1 | 8571 | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from twisted.internet import defer
from twisted.python import failure
from buildbot.process.properties import Interpolate
from buildbot.process.properties import Properties
from buildbot.process.results import CANCELLED
from buildbot.process.results import EXCEPTION
from buildbot.process.results import FAILURE
from buildbot.process.results import RETRY
from buildbot.process.results import SKIPPED
from buildbot.process.results import SUCCESS
from buildbot.process.results import WARNINGS
from buildbot.reporters import http
from buildbot.util import httpclientservice
from buildbot.util.logger import Logger
log = Logger()
class GerritVerifyStatusPush(http.HttpStatusPushBase):
name = "GerritVerifyStatusPush"
neededDetails = dict(wantProperties=True)
# overridable constants
RESULTS_TABLE = {
SUCCESS: 1,
WARNINGS: 1,
FAILURE: -1,
SKIPPED: 0,
EXCEPTION: 0,
RETRY: 0,
CANCELLED: 0
}
DEFAULT_RESULT = -1
@defer.inlineCallbacks
def reconfigService(self,
baseURL,
auth,
startDescription=None,
endDescription=None,
verification_name=None,
abstain=False,
category=None,
reporter=None,
verbose=False,
**kwargs):
auth = yield self.renderSecrets(auth)
yield super().reconfigService(**kwargs)
if baseURL.endswith('/'):
baseURL = baseURL[:-1]
self._http = yield httpclientservice.HTTPClientService.getService(
self.master, baseURL, auth=auth,
debug=self.debug, verify=self.verify)
self._verification_name = verification_name or Interpolate(
'%(prop:buildername)s')
self._reporter = reporter or "buildbot"
self._abstain = abstain
self._category = category
self._startDescription = startDescription or 'Build started.'
self._endDescription = endDescription or 'Build done.'
self._verbose = verbose
def createStatus(self,
change_id,
revision_id,
name,
value,
abstain=None,
rerun=None,
comment=None,
url=None,
reporter=None,
category=None,
duration=None):
"""
Abstract the POST REST api documented here:
https://gerrit.googlesource.com/plugins/verify-status/+/master/src/main/resources/Documentation/rest-api-changes.md
:param change_id: The change_id for the change tested (can be in the long form e.g:
myProject~master~I8473b95934b5732ac55d26311a706c9c2bde9940 or in the short integer form).
:param revision_id: the revision_id tested can be the patchset number or
the commit id (short or long).
:param name: The name of the job.
:param value: The pass/fail result for this job: -1: fail 0: unstable, 1: succeed
:param abstain: Whether the value counts as a vote (defaults to false)
:param rerun: Whether this result is from a re-test on the same patchset
:param comment: A short comment about this job
:param url: The url link to more info about this job
:reporter: The user that verified this job
:category: A category for this job
"duration": The time it took to run this job
:return: A deferred with the result from Gerrit.
"""
payload = {'name': name, 'value': value}
if abstain is not None:
payload['abstain'] = abstain
if rerun is not None:
payload['rerun'] = rerun
if comment is not None:
payload['comment'] = comment
if url is not None:
payload['url'] = url
if reporter is not None:
payload['reporter'] = reporter
if category is not None:
payload['category'] = category
if duration is not None:
payload['duration'] = duration
if self._verbose:
log.debug(
'Sending Gerrit status for {change_id}/{revision_id}: data={data}',
change_id=change_id,
revision_id=revision_id,
data=payload)
return self._http.post(
'/'.join([
'/a/changes', str(change_id), 'revisions', str(revision_id),
'verify-status~verifications'
]),
json=payload)
def formatDuration(self, duration):
"""Format the duration.
This method could be overridden if really needed, as the duration format in gerrit
is an arbitrary string.
:param duration: duration in timedelta
"""
days = duration.days
hours, remainder = divmod(duration.seconds, 3600)
minutes, seconds = divmod(remainder, 60)
if days:
return '{} day{} {}h {}m {}s'.format(days, "s" if days > 1 else "",
hours, minutes, seconds)
elif hours:
return '{}h {}m {}s'.format(hours, minutes, seconds)
return '{}m {}s'.format(minutes, seconds)
@staticmethod
def getGerritChanges(props):
""" Get the gerrit changes
This method could be overridden if really needed to accommodate for other
custom steps method for fetching gerrit changes.
:param props: an IProperty
:return: (optionally via deferred) a list of dictionary with at list
change_id, and revision_id,
which format is the one accepted by the gerrit REST API as of
/changes/:change_id/revision/:revision_id paths (see gerrit doc)
"""
if 'gerrit_changes' in props:
return props.getProperty('gerrit_changes')
if 'event.change.number' in props:
return [{
'change_id': props.getProperty('event.change.number'),
'revision_id': props.getProperty('event.patchSet.number')
}]
return []
@defer.inlineCallbacks
def send(self, build):
props = Properties.fromDict(build['properties'])
if build['complete']:
value = self.RESULTS_TABLE.get(build['results'],
self.DEFAULT_RESULT)
comment = yield props.render(self._endDescription)
duration = self.formatDuration(build['complete_at'] - build[
'started_at'])
else:
value = 0
comment = yield props.render(self._startDescription)
duration = 'pending'
name = yield props.render(self._verification_name)
reporter = yield props.render(self._reporter)
category = yield props.render(self._category)
abstain = yield props.render(self._abstain)
# TODO: find reliable way to find out whether its a rebuild
rerun = None
changes = yield self.getGerritChanges(props)
for change in changes:
try:
yield self.createStatus(
change['change_id'],
change['revision_id'],
name,
value,
abstain=abstain,
rerun=rerun,
comment=comment,
url=build['url'],
reporter=reporter,
category=category,
duration=duration)
except Exception:
log.failure(
'Failed to send status!', failure=failure.Failure())
| gpl-2.0 | 5,102,035,959,566,791,000 | 36.265217 | 123 | 0.582896 | false |
hanxi/cocos2d-x-v3.1 | frameworks/cocos2d-x/tools/tolua/genbindings.py | 1 | 5253 | #!/usr/bin/python
# This script is used to generate luabinding glue codes.
# Android ndk version must be ndk-r9b.
import sys
import os, os.path
import shutil
import ConfigParser
import subprocess
import re
from contextlib import contextmanager
def _check_ndk_root_env():
''' Checking the environment NDK_ROOT, which will be used for building
'''
try:
NDK_ROOT = os.environ['NDK_ROOT']
except Exception:
print "NDK_ROOT not defined. Please define NDK_ROOT in your environment."
sys.exit(1)
return NDK_ROOT
def _check_python_bin_env():
''' Checking the environment PYTHON_BIN, which will be used for building
'''
try:
PYTHON_BIN = os.environ['PYTHON_BIN']
except Exception:
print "PYTHON_BIN not defined, use current python."
PYTHON_BIN = sys.executable
return PYTHON_BIN
class CmdError(Exception):
pass
@contextmanager
def _pushd(newDir):
previousDir = os.getcwd()
os.chdir(newDir)
yield
os.chdir(previousDir)
def _run_cmd(command):
ret = subprocess.call(command, shell=True)
if ret != 0:
message = "Error running command"
raise CmdError(message)
def main():
cur_platform= '??'
llvm_path = '??'
ndk_root = _check_ndk_root_env()
# del the " in the path
ndk_root = re.sub(r"\"", "", ndk_root)
python_bin = _check_python_bin_env()
platform = sys.platform
if platform == 'win32':
cur_platform = 'windows'
elif platform == 'darwin':
cur_platform = platform
elif 'linux' in platform:
cur_platform = 'linux'
else:
print 'Your platform is not supported!'
sys.exit(1)
if platform == 'win32':
x86_llvm_path = os.path.abspath(os.path.join(ndk_root, 'toolchains/llvm-3.3/prebuilt', '%s' % cur_platform))
else:
x86_llvm_path = os.path.abspath(os.path.join(ndk_root, 'toolchains/llvm-3.3/prebuilt', '%s-%s' % (cur_platform, 'x86')))
x64_llvm_path = os.path.abspath(os.path.join(ndk_root, 'toolchains/llvm-3.3/prebuilt', '%s-%s' % (cur_platform, 'x86_64')))
if os.path.isdir(x86_llvm_path):
llvm_path = x86_llvm_path
elif os.path.isdir(x64_llvm_path):
llvm_path = x64_llvm_path
else:
print 'llvm toolchain not found!'
print 'path: %s or path: %s are not valid! ' % (x86_llvm_path, x64_llvm_path)
sys.exit(1)
project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))
cocos_root = os.path.abspath(os.path.join(project_root, ''))
cxx_generator_root = os.path.abspath(os.path.join(project_root, 'tools/bindings-generator'))
# save config to file
config = ConfigParser.ConfigParser()
config.set('DEFAULT', 'androidndkdir', ndk_root)
config.set('DEFAULT', 'clangllvmdir', llvm_path)
config.set('DEFAULT', 'cocosdir', cocos_root)
config.set('DEFAULT', 'cxxgeneratordir', cxx_generator_root)
config.set('DEFAULT', 'extra_flags', '')
# To fix parse error on windows, we must difine __WCHAR_MAX__ and undefine __MINGW32__ .
if platform == 'win32':
config.set('DEFAULT', 'extra_flags', '-D__WCHAR_MAX__=0x7fffffff -U__MINGW32__')
conf_ini_file = os.path.abspath(os.path.join(os.path.dirname(__file__), 'userconf.ini'))
print 'generating userconf.ini...'
with open(conf_ini_file, 'w') as configfile:
config.write(configfile)
# set proper environment variables
if 'linux' in platform or platform == 'darwin':
os.putenv('LD_LIBRARY_PATH', '%s/libclang' % cxx_generator_root)
if platform == 'win32':
path_env = os.environ['PATH']
os.putenv('PATH', r'%s;%s\libclang;%s\tools\win32;' % (path_env, cxx_generator_root, cxx_generator_root))
try:
tolua_root = '%s/tools/tolua' % project_root
output_dir = '%s/cocos/scripting/lua-bindings/auto' % project_root
cmd_args = {'cocos2dx.ini' : ('cocos2d-x', 'lua_cocos2dx_auto'), \
'cocos2dx_extension.ini' : ('cocos2dx_extension', 'lua_cocos2dx_extension_auto'), \
'cocos2dx_physics.ini' : ('cocos2dx_physics', 'lua_cocos2dx_physics_auto'), \
}
target = 'lua'
generator_py = '%s/generator.py' % cxx_generator_root
for key in cmd_args.keys():
args = cmd_args[key]
cfg = '%s/%s' % (tolua_root, key)
print 'Generating bindings for %s...' % (key[:-4])
command = '%s %s %s -s %s -t %s -o %s -n %s' % (python_bin, generator_py, cfg, args[0], target, output_dir, args[1])
_run_cmd(command)
if platform == 'win32':
with _pushd(output_dir):
_run_cmd('dos2unix *')
print '---------------------------------'
print 'Generating lua bindings succeeds.'
print '---------------------------------'
except Exception as e:
if e.__class__.__name__ == 'CmdError':
print '---------------------------------'
print 'Generating lua bindings fails.'
print '---------------------------------'
sys.exit(1)
else:
raise
# -------------- main --------------
if __name__ == '__main__':
main()
| mit | -2,650,347,143,557,068,000 | 31.425926 | 128 | 0.579098 | false |
lixiangning888/whole_project | modules/signatures_orginal_20151110/dyre_apis.py | 1 | 6073 | # Copyright (C) 2015 Optiv, Inc. ([email protected]), KillerInstinct
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
try:
import re2 as re
except ImportError:
import re
from lib.cuckoo.common.abstracts import Signature
class Dyre_APIs(Signature):
name = "dyre_behavior"
description = "Exhibits behavior characteristic of Dyre malware"
weight = 3
severity = 3
categories = ["banker", "trojan"]
families = ["dyre"]
authors = ["Optiv", "KillerInstinct"]
minimum = "1.3"
evented = True
# Try to parse a process memory dump to extract regex extract C2 nodes.
extract_c2s = True
def __init__(self, *args, **kwargs):
Signature.__init__(self, *args, **kwargs)
self.cryptoapis = False
self.networkapis = set()
self.syncapis = False
self.compname = self.get_environ_entry(self.get_initial_process(),
"ComputerName")
filter_apinames = set(["CryptHashData", "HttpOpenRequestA",
"NtCreateNamedPipeFile"])
def on_call(self, call, process):
# Legacy, modern Dyre doesn't have hardcoded hashes in
# CryptHashData anymore
iocs = [
"J7dnlDvybciDvu8d46D\\x00",
"qwererthwebfsdvjaf+\\x00",
]
pipe = [
"\\??\\pipe\\3obdw5e5w4",
"\\??\\pipe\\g2fabg5713",
]
if call["api"] == "CryptHashData":
buf = self.get_argument(call, "Buffer")
if buf in iocs:
self.cryptoapis = True
tmp = re.sub(r"\\x[0-9A-Fa-f]{2}", "", buf)
if self.compname in tmp:
if re.match("^" + self.compname + "[0-9 ]+$", tmp):
self.cryptoapis = True
elif call["api"] == "HttpOpenRequestA":
buf = self.get_argument(call, "Path")
if len(buf) > 10:
self.networkapis.add(buf)
elif call["api"] == "NtCreateNamedPipeFile":
buf = self.get_argument(call, "PipeName")
for npipe in pipe:
if buf == npipe:
self.syncapis = True
break
return None
def on_complete(self):
ret = False
networkret = False
campaign = set()
mutexs = [
"^(Global|Local)\\\\pen3j3832h$",
"^(Global|Local)\\\\u1nyj3rt20",
]
for mutex in mutexs:
if self.check_mutex(pattern=mutex, regex=True):
self.syncapis = True
break
# C2 Beacon check
if self.networkapis:
# Gather computer name
for httpreq in self.networkapis:
# Generate patterns (should only ever be one per indicator)
indicators = [
"/(\d{4}[a-z]{2}\d{2})/" + self.compname + "_",
"/([^/]+)/" + self.compname + "/\d+/\d+/\d+/$",
"/([^/]+)/" + self.compname + "_W\d{6}\.[0-9A-F]{32}",
]
for indicator in indicators:
buf = re.match(indicator, httpreq)
if buf:
networkret = True
campaign.add(buf.group(1))
# Check if there are any winners
if self.cryptoapis or self.syncapis or networkret:
ret = True
if (self.cryptoapis or self.syncapis) and networkret:
self.confidence = 100
self.description = "Exhibits behaviorial and network characteristics of Upatre+Dyre/Mini-Dyre malware"
for camp in campaign:
self.data.append({"Campaign": camp})
elif networkret:
self.description = "Exhibits network behavior characteristic of Upatre+Dyre/Mini-Dyre malware"
for camp in campaign:
self.data.append({"Campaign": camp})
if self.extract_c2s:
dump_pid = 0
for proc in self.results["behavior"]["processtree"]:
for child in proc["children"]:
# Look for lowest PID svchost.exe
if not dump_pid or child["pid"] < dump_pid:
if child["name"] == "svchost.exe":
dump_pid = child["pid"]
if dump_pid:
dump_path = ""
if len(self.results["procmemory"]):
for memdump in self.results["procmemory"]:
if dump_pid == memdump["pid"]:
dump_path = memdump["file"]
if dump_path:
whitelist = [
"1.2.3.4",
"0.0.0.0",
]
with open(dump_path, "rb") as dump_file:
dump_data = dump_file.read()
ippat = "\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d{2,5}"
ips = re.findall(ippat, dump_data)
for ip in set(ips):
addit = True
for item in whitelist:
if ip.startswith(item):
addit = False
if addit:
self.data.append({"C2": ip})
return ret
| lgpl-3.0 | -8,487,877,113,169,990,000 | 38.435065 | 118 | 0.494978 | false |
Nic30/hwtLib | hwtLib/tests/all.py | 1 | 22364 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
from unittest import TestLoader, TextTestRunner, TestSuite
from hwt.simulator.simTestCase import SimTestCase
from hwtLib.abstract.busEndpoint_test import BusEndpointTC
from hwtLib.abstract.frame_utils.alignment_utils_test import FrameAlignmentUtilsTC
from hwtLib.abstract.frame_utils.join.test import FrameJoinUtilsTC
from hwtLib.abstract.template_configured_test import TemplateConfigured_TC
from hwtLib.amba.axiLite_comp.buff_test import AxiRegTC
from hwtLib.amba.axiLite_comp.endpoint_arr_test import AxiLiteEndpointArrTCs
from hwtLib.amba.axiLite_comp.endpoint_fromInterfaces_test import \
AxiLiteEndpoint_fromInterfaceTC, AxiLiteEndpoint_fromInterface_arr_TC
from hwtLib.amba.axiLite_comp.endpoint_struct_test import \
AxiLiteEndpoint_arrayStruct_TC, AxiLiteEndpoint_struct_TC
from hwtLib.amba.axiLite_comp.endpoint_test import AxiLiteEndpointTCs
from hwtLib.amba.axiLite_comp.to_axi_test import AxiLite_to_Axi_TC
from hwtLib.amba.axi_comp.cache.cacheWriteAllocWawOnlyWritePropagating_test import AxiCacheWriteAllocWawOnlyWritePropagatingTCs
from hwtLib.amba.axi_comp.cache.pseudo_lru_test import PseudoLru_TC
from hwtLib.amba.axi_comp.interconnect.matrixAddrCrossbar_test import\
AxiInterconnectMatrixAddrCrossbar_TCs
from hwtLib.amba.axi_comp.interconnect.matrixCrossbar_test import \
AxiInterconnectMatrixCrossbar_TCs
from hwtLib.amba.axi_comp.interconnect.matrixR_test import AxiInterconnectMatrixR_TCs
from hwtLib.amba.axi_comp.interconnect.matrixW_test import AxiInterconnectMatrixW_TCs
from hwtLib.amba.axi_comp.lsu.read_aggregator_test import AxiReadAggregator_TCs
from hwtLib.amba.axi_comp.lsu.store_queue_write_propagating_test import AxiStoreQueueWritePropagating_TCs
from hwtLib.amba.axi_comp.lsu.write_aggregator_test import AxiWriteAggregator_TCs
from hwtLib.amba.axi_comp.oooOp.examples.counterArray_test import OooOpExampleCounterArray_TCs
from hwtLib.amba.axi_comp.oooOp.examples.counterHashTable_test import OooOpExampleCounterHashTable_TC
from hwtLib.amba.axi_comp.resize_test import AxiResizeTC
from hwtLib.amba.axi_comp.sim.ag_test import Axi_ag_TC
from hwtLib.amba.axi_comp.slave_timeout_test import AxiSlaveTimeoutTC
from hwtLib.amba.axi_comp.static_remap_test import AxiStaticRemapTCs
from hwtLib.amba.axi_comp.stream_to_mem_test import Axi4_streamToMemTC
from hwtLib.amba.axi_comp.tester_test import AxiTesterTC
from hwtLib.amba.axi_comp.to_axiLite_test import Axi_to_AxiLite_TC
from hwtLib.amba.axi_test import AxiTC
from hwtLib.amba.axis_comp.en_test import AxiS_en_TC
from hwtLib.amba.axis_comp.fifoDrop_test import AxiSFifoDropTC
from hwtLib.amba.axis_comp.fifoMeasuring_test import AxiS_fifoMeasuringTC
from hwtLib.amba.axis_comp.frameGen_test import AxisFrameGenTC
from hwtLib.amba.axis_comp.frame_deparser.test import AxiS_frameDeparser_TC
from hwtLib.amba.axis_comp.frame_join.test import AxiS_FrameJoin_TCs
from hwtLib.amba.axis_comp.frame_parser.footer_split_test import AxiS_footerSplitTC
from hwtLib.amba.axis_comp.frame_parser.test import AxiS_frameParserTC
from hwtLib.amba.axis_comp.resizer_test import AxiS_resizer_TCs
from hwtLib.amba.axis_comp.storedBurst_test import AxiSStoredBurstTC
from hwtLib.amba.axis_comp.strformat_test import AxiS_strFormat_TC
from hwtLib.amba.datapump.interconnect.rStrictOrder_test import \
RStrictOrderInterconnectTC
from hwtLib.amba.datapump.interconnect.wStrictOrderComplex_test import \
WStrictOrderInterconnectComplexTC
from hwtLib.amba.datapump.interconnect.wStrictOrder_test import \
WStrictOrderInterconnectTC, WStrictOrderInterconnect2TC
from hwtLib.amba.datapump.r_aligned_test import Axi_rDatapump_alignedTCs
from hwtLib.amba.datapump.r_unaligned_test import Axi_rDatapump_unalignedTCs
from hwtLib.amba.datapump.w_test import Axi_wDatapumpTCs
from hwtLib.avalon.axiToMm_test import AxiToAvalonMm_TCs
from hwtLib.avalon.endpoint_test import AvalonMmEndpointTCs
from hwtLib.avalon.mm_buff_test import AvalonMmBuff_TC
from hwtLib.avalon.sim.mmAgent_test import AvalonMmAgentTC
from hwtLib.avalon.sim.stAgent_test import AvalonStAgentTC
from hwtLib.cesnet.mi32.axi4Lite_bridges_test import Mi32Axi4LiteBrigesTC
from hwtLib.cesnet.mi32.endpoint_test import Mi32EndpointTCs
from hwtLib.cesnet.mi32.interconnectMatrix_test import Mi32InterconnectMatrixTC
from hwtLib.cesnet.mi32.mi32agent_test import Mi32AgentTC
from hwtLib.cesnet.mi32.sliding_window_test import Mi32SlidingWindowTC
from hwtLib.cesnet.mi32.to_axi4Lite_test import Mi32_to_Axi4LiteTC
from hwtLib.clocking.cdc_test import CdcTC
from hwtLib.common_nonstd_interfaces.addr_data_hs_to_Axi_test import AddrDataHs_to_Axi_TCs
from hwtLib.examples.arithmetic.cntr_test import CntrTC, CntrResourceAnalysisTC
from hwtLib.examples.arithmetic.multiplierBooth_test import MultiplierBoothTC
from hwtLib.examples.arithmetic.privateSignals_test import PrivateSignalsOfStructTypeTC
from hwtLib.examples.arithmetic.selfRefCntr_test import SelfRefCntrTC
from hwtLib.examples.arithmetic.twoCntrs_test import TwoCntrsTC
from hwtLib.examples.arithmetic.vhdl_vector_auto_casts import VhdlVectorAutoCastExampleTC
from hwtLib.examples.arithmetic.widthCasting import WidthCastingExampleTC
from hwtLib.examples.axi.debugbusmonitor_test import DebugBusMonitorExampleAxiTC
from hwtLib.examples.axi.simpleAxiRegs_test import SimpleAxiRegsTC
from hwtLib.examples.builders.ethAddrUpdater_test import EthAddrUpdaterTCs
from hwtLib.examples.builders.handshakedBuilderSimple import \
HandshakedBuilderSimpleTC
from hwtLib.examples.builders.hsBuilderSplit_test import HsBuilderSplit_TC
from hwtLib.examples.builders.hwException_test import HwExceptionCatch_TC
from hwtLib.examples.builders.pingResponder_test import PingResponderTC
from hwtLib.examples.emptyUnitWithSpi import EmptyUnitWithSpiTC
from hwtLib.examples.errors.combLoops import CombLoopAnalysisTC
from hwtLib.examples.errors.errors_test import ErrorsTC
from hwtLib.examples.hdlComments_test import HdlCommentsTC
from hwtLib.examples.hdlObjLists.listOfInterfaces0 import ListOfInterfacesSample0TC
from hwtLib.examples.hdlObjLists.listOfInterfaces1 import ListOfInterfacesSample1TC
from hwtLib.examples.hdlObjLists.listOfInterfaces2 import ListOfInterfacesSample2TC
from hwtLib.examples.hdlObjLists.listOfInterfaces3 import ListOfInterfacesSample3TC
from hwtLib.examples.hdlObjLists.listOfInterfaces4 import ListOfInterfacesSample4TC
from hwtLib.examples.hierarchy.hierarchySerialization_test import \
HierarchySerializationTC
from hwtLib.examples.hierarchy.simpleSubunit2 import SimpleSubunit2TC
from hwtLib.examples.hierarchy.simpleSubunit3 import SimpleSubunit3TC
from hwtLib.examples.hierarchy.simpleSubunit_test import SimpleSubunitTC
from hwtLib.examples.hierarchy.unitToUnitConnection import \
UnitToUnitConnectionTC
from hwtLib.examples.hierarchy.unitWrapper_test import UnitWrapperTC
from hwtLib.examples.mem.avalonmm_ram_test import AvalonMmBram_TC
from hwtLib.examples.mem.axi_ram_test import Axi4BRam_TC
from hwtLib.examples.mem.bram_wire import BramWireTC
from hwtLib.examples.mem.ram_test import RamResourcesTC, \
SimpleAsyncRamTC, SimpleSyncRamTC
from hwtLib.examples.mem.reg_test import DRegTC, RegSerializationTC, \
DoubleRRegTC, DReg_asyncRstTC
from hwtLib.examples.mem.rom_test import SimpleRomTC, SimpleSyncRomTC, \
RomResourcesTC
from hwtLib.examples.operators.cast_test import CastTc
from hwtLib.examples.operators.concat_test import ConcatTC
from hwtLib.examples.operators.indexing_test import IndexingTC
from hwtLib.examples.parametrization_test import ParametrizationTC
from hwtLib.examples.rtlLvl.rtlLvl_test import RtlLvlTC
from hwtLib.examples.showcase0_test import Showcase0TC
from hwtLib.examples.simple2withNonDirectIntConnection import \
Simple2withNonDirectIntConnectionTC
from hwtLib.examples.simpleAxiStream_test import SimpleUnitAxiStream_TC
from hwtLib.examples.simpleWithNonDirectIntConncetion import \
SimpleWithNonDirectIntConncetionTC
from hwtLib.examples.simpleWithParam import SimpleUnitWithParamTC
from hwtLib.examples.simple_test import SimpleTC
from hwtLib.examples.specialIntfTypes.intfWithArray import InterfaceWithArrayTypesTC
from hwtLib.examples.statements.codeBlockStm_test import CodeBlokStmTC
from hwtLib.examples.statements.constCondition import ConstConditionTC
from hwtLib.examples.statements.constDriver_test import ConstDriverTC
from hwtLib.examples.statements.forLoopCntrl_test import StaticForLoopCntrlTC
from hwtLib.examples.statements.fsm_test import FsmExampleTC, \
HadrcodedFsmExampleTC, FsmSerializationTC
from hwtLib.examples.statements.ifStm_test import IfStmTC
from hwtLib.examples.statements.switchStm_test import SwitchStmTC
from hwtLib.examples.statements.vldMaskConflictsResolving_test import \
VldMaskConflictsResolvingTC
from hwtLib.examples.timers import TimerTC
from hwtLib.handshaked.cdc_test import HandshakedCdc_slow_to_fast_TC, \
HandshakedCdc_fast_to_slow_TC
from hwtLib.handshaked.fifoAsync_test import HsFifoAsyncTC
from hwtLib.handshaked.fifo_test import HsFifoTC
from hwtLib.handshaked.handshakedToAxiStream_test import HandshakedToAxiStreamTCs
from hwtLib.handshaked.joinFair_test import HsJoinFair_2inputs_TC, \
HsJoinFair_3inputs_TC
from hwtLib.handshaked.joinPrioritized_test import HsJoinPrioritizedTC, \
HsJoinPrioritized_randomized_TC
from hwtLib.handshaked.ramAsHs_test import RamAsHs_TCs
from hwtLib.handshaked.reg_test import HandshakedRegTCs
from hwtLib.handshaked.resizer_test import HsResizerTC
from hwtLib.handshaked.splitCopy_test import HsSplitCopyTC, \
HsSplitCopy_randomized_TC
from hwtLib.img.charToBitmap_test import CharToBitmapTC
from hwtLib.logic.bcdToBin_test import BcdToBinTC
from hwtLib.logic.binToBcd_test import BinToBcdTC
from hwtLib.logic.binToOneHot import BinToOneHotTC
from hwtLib.logic.bitonicSorter import BitonicSorterTC
from hwtLib.logic.cntrGray import GrayCntrTC
from hwtLib.logic.countLeading_test import CountLeadingTC
from hwtLib.logic.crcComb_test import CrcCombTC
from hwtLib.logic.crcUtils_test import CrcUtilsTC
from hwtLib.logic.crc_test import CrcTC
from hwtLib.logic.lfsr import LfsrTC
from hwtLib.logic.oneHotToBin_test import OneHotToBinTC
from hwtLib.mem.atomic.flipCntr_test import FlipCntrTC
from hwtLib.mem.atomic.flipRam_test import FlipRamTC
from hwtLib.mem.atomic.flipReg_test import FlipRegTC
from hwtLib.mem.bramEndpoint_test import BramPortEndpointTCs
from hwtLib.mem.cam_test import CamTC
from hwtLib.mem.cuckooHashTableWithRam_test import CuckooHashTableWithRamTCs
from hwtLib.mem.fifoArray_test import FifoArrayTC
from hwtLib.mem.fifoAsync_test import FifoAsyncTC
from hwtLib.mem.fifo_test import FifoWriterAgentTC, FifoReaderAgentTC, FifoTC
from hwtLib.mem.hashTableCoreWithRam_test import HashTableCoreWithRamTC
from hwtLib.mem.lutRam_test import LutRamTC
from hwtLib.mem.ramTransactional_test import RamTransactionalTCs
from hwtLib.mem.ramXor_test import RamXorSingleClockTC
from hwtLib.mem.ram_test import RamTC
from hwtLib.peripheral.displays.hd44780.driver_test import Hd44780Driver8bTC
from hwtLib.peripheral.displays.segment7_test import Segment7TC
from hwtLib.peripheral.ethernet.mac_rx_test import EthernetMac_rx_TCs
from hwtLib.peripheral.ethernet.mac_tx_test import EthernetMac_tx_TCs
from hwtLib.peripheral.ethernet.rmii_adapter_test import RmiiAdapterTC
from hwtLib.peripheral.i2c.masterBitCntrl_test import I2CMasterBitCntrlTC
from hwtLib.peripheral.mdio.master_test import MdioMasterTC
from hwtLib.peripheral.spi.master_test import SpiMasterTC
from hwtLib.peripheral.uart.rx_test import UartRxTC, UartRxBasicTC
from hwtLib.peripheral.uart.tx_rx_test import UartTxRxTC
from hwtLib.peripheral.uart.tx_test import UartTxTC
from hwtLib.peripheral.usb.sim.usb_agent_test import UsbAgentTC
from hwtLib.peripheral.usb.sim.usbip.test import UsbipTCs
from hwtLib.peripheral.usb.usb2.device_cdc_vcp_test import Usb2CdcVcpTC
from hwtLib.peripheral.usb.usb2.sie_rx_test import Usb2SieDeviceRxTC
from hwtLib.peripheral.usb.usb2.sie_tx_test import Usb2SieDeviceTxTC
from hwtLib.peripheral.usb.usb2.ulpi_agent_test import UlpiAgent_TCs
from hwtLib.peripheral.usb.usb2.utmi_agent_test import UtmiAgentTCs
from hwtLib.peripheral.usb.usb2.utmi_to_ulpi_test import Utmi_to_UlpiTC
from hwtLib.structManipulators.arrayBuff_writer_test import ArrayBuff_writer_TC
from hwtLib.structManipulators.arrayItemGetter_test import ArrayItemGetterTC, \
ArrayItemGetter2in1WordTC
from hwtLib.structManipulators.cLinkedListReader_test import \
CLinkedListReaderTC
from hwtLib.structManipulators.cLinkedListWriter_test import \
CLinkedListWriterTC
from hwtLib.structManipulators.mmu2pageLvl_test import MMU_2pageLvl_TC
from hwtLib.structManipulators.structReader_test import StructReaderTC
from hwtLib.structManipulators.structWriter_test import StructWriter_TC
from hwtLib.tests.constraints.xdc_clock_related_test import ConstraintsXdcClockRelatedTC
from hwtLib.tests.frameTmpl_test import FrameTmplTC
from hwtLib.tests.pyUtils.arrayQuery_test import ArrayQueryTC
from hwtLib.tests.pyUtils.fileUtils_test import FileUtilsTC
from hwtLib.tests.rdSynced_agent_test import RdSynced_agent_TC
from hwtLib.tests.repr_of_hdlObjs_test import ReprOfHdlObjsTC
from hwtLib.tests.resourceAnalyzer_test import ResourceAnalyzer_TC
from hwtLib.tests.serialization.hdlReaname_test import SerializerHdlRename_TC
from hwtLib.tests.serialization.ipCorePackager_test import IpCorePackagerTC
from hwtLib.tests.serialization.modes_test import SerializerModes_TC
from hwtLib.tests.serialization.tmpVar_test import Serializer_tmpVar_TC
from hwtLib.tests.serialization.vhdl_test import Vhdl2008Serializer_TC
from hwtLib.tests.simulator.basicRtlSimulatorVcdTmpDirs_test import BasicRtlSimulatorVcdTmpDirs_TCs
from hwtLib.tests.simulator.json_log_test import HsFifoJsonLogTC
from hwtLib.tests.simulator.utils_test import SimulatorUtilsTC
from hwtLib.tests.structIntf_operator_test import StructIntf_operatorTC
from hwtLib.tests.synthesizer.astNodeIoReplacing_test import AstNodeIoReplacingTC
from hwtLib.tests.synthesizer.interfaceLevel.interfaceSynthesizerTC import \
InterfaceSynthesizerTC
from hwtLib.tests.synthesizer.interfaceLevel.subunitsSynthesisTC import \
SubunitsSynthesisTC
from hwtLib.tests.synthesizer.rtlLevel.basic_signal_methods_test import BasicSignalMethodsTC
from hwtLib.tests.synthesizer.rtlLevel.statements_consystency_test import StatementsConsystencyTC
from hwtLib.tests.synthesizer.statementTreesInternal_test import StatementTreesInternalTC
from hwtLib.tests.synthesizer.statementTrees_test import StatementTreesTC
from hwtLib.tests.synthesizer.statements_test import StatementsTC
from hwtLib.tests.transTmpl_test import TransTmpl_TC
from hwtLib.tests.types.bitsSlicing_test import BitsSlicingTC
from hwtLib.tests.types.hstructVal_test import HStructValTC
from hwtLib.tests.types.hvalue_test import HValueTC
from hwtLib.tests.types.operators_test import OperatorTC
from hwtLib.tests.types.union_test import UnionTC
from hwtLib.tests.unionIntf_test import UnionIntfTC
from hwtLib.xilinx.ipif.axi4Lite_to_ipif_test import Axi4Lite_to_IpifTC
from hwtLib.xilinx.ipif.buff_test import IpifBuffTC
from hwtLib.xilinx.ipif.endpoint_test import IpifEndpointTC, \
IpifEndpointDenseTC, IpifEndpointDenseStartTC, IpifEndpointArray
from hwtLib.xilinx.ipif.interconnectMatrix_test import IpifInterconnectMatrixTC
from hwtLib.xilinx.locallink.axis_conv_test import AxiS_localLinkConvTC
from hwtLib.xilinx.primitive.examples.dsp48e1Add_test import Dsp48e1Add_TCs
from hwtLib.xilinx.slr_crossing_test import HsSlrCrossingTC
# from hwt.simulator.simTestCase import SimTestCase
def testSuiteFromTCs(*tcs):
loader = TestLoader()
for tc in tcs:
if not issubclass(tc, SimTestCase):
tc._multiprocess_can_split_ = True
loadedTcs = [
loader.loadTestsFromTestCase(tc) for tc in tcs
# if not issubclass(tc, SimTestCase) # [debug] skip simulations
]
suite = TestSuite(loadedTcs)
return suite
suite = testSuiteFromTCs(
# basic tests
FileUtilsTC,
ArrayQueryTC,
RtlLvlTC,
ReprOfHdlObjsTC,
HdlCommentsTC,
InterfaceSynthesizerTC,
SubunitsSynthesisTC,
EmptyUnitWithSpiTC,
Simple2withNonDirectIntConnectionTC,
SimpleWithNonDirectIntConncetionTC,
SimpleSubunit3TC,
UnitToUnitConnectionTC,
OperatorTC,
StructIntf_operatorTC,
CastTc,
BitsSlicingTC,
HStructValTC,
ParametrizationTC,
BasicSignalMethodsTC,
StatementsConsystencyTC,
HValueTC,
StatementTreesInternalTC,
StatementTreesTC,
StatementsTC,
AstNodeIoReplacingTC,
ErrorsTC,
StaticForLoopCntrlTC,
SimpleUnitWithParamTC,
SimpleSubunit2TC,
HierarchySerializationTC,
ListOfInterfacesSample0TC,
ListOfInterfacesSample1TC,
ListOfInterfacesSample2TC,
ListOfInterfacesSample3TC,
ListOfInterfacesSample4TC,
PrivateSignalsOfStructTypeTC,
FrameTmplTC,
Showcase0TC,
SimulatorUtilsTC,
HsFifoJsonLogTC,
RdSynced_agent_TC,
Segment7TC,
SerializerModes_TC,
Serializer_tmpVar_TC,
SerializerHdlRename_TC,
VhdlVectorAutoCastExampleTC,
TransTmpl_TC,
UnionTC,
UnionIntfTC,
ResourceAnalyzer_TC,
CombLoopAnalysisTC,
Vhdl2008Serializer_TC,
CodeBlokStmTC,
IfStmTC,
SwitchStmTC,
SimpleRomTC,
SimpleSyncRomTC,
RomResourcesTC,
DRegTC,
DoubleRRegTC,
DReg_asyncRstTC,
RegSerializationTC,
CntrTC,
CntrResourceAnalysisTC,
ConstConditionTC,
TemplateConfigured_TC,
FrameAlignmentUtilsTC,
FrameJoinUtilsTC,
HwExceptionCatch_TC,
PseudoLru_TC,
# tests of simple units
TimerTC,
ConcatTC,
VldMaskConflictsResolvingTC,
ConstDriverTC,
WidthCastingExampleTC,
SimpleTC,
SimpleSubunitTC,
RamTC,
RamXorSingleClockTC,
*RamTransactionalTCs,
BramWireTC,
LutRamTC,
FsmSerializationTC,
FsmExampleTC,
HadrcodedFsmExampleTC,
OneHotToBinTC,
BinToBcdTC,
BcdToBinTC,
AxiS_strFormat_TC,
BinToOneHotTC,
GrayCntrTC,
TwoCntrsTC,
SelfRefCntrTC,
CountLeadingTC,
MultiplierBoothTC,
IndexingTC,
CdcTC,
RamResourcesTC,
SimpleAsyncRamTC,
SimpleSyncRamTC,
SimpleUnitAxiStream_TC,
FifoWriterAgentTC,
FifoReaderAgentTC,
FifoTC,
FifoAsyncTC,
FifoArrayTC,
HsJoinPrioritizedTC,
HsJoinPrioritized_randomized_TC,
HsJoinFair_2inputs_TC,
HsJoinFair_3inputs_TC,
HandshakedCdc_slow_to_fast_TC,
HandshakedCdc_fast_to_slow_TC,
*HandshakedToAxiStreamTCs,
*RamAsHs_TCs,
LfsrTC,
BitonicSorterTC,
InterfaceWithArrayTypesTC,
FlipRegTC,
FlipCntrTC,
FlipRamTC,
HsSplitCopyTC,
HsSplitCopy_randomized_TC,
HsFifoTC,
HsFifoAsyncTC,
*HandshakedRegTCs,
HsResizerTC,
HsBuilderSplit_TC,
CamTC,
UartTxTC,
UartRxBasicTC,
UartRxTC,
UartTxRxTC,
SpiMasterTC,
I2CMasterBitCntrlTC,
*EthernetMac_rx_TCs,
*EthernetMac_tx_TCs,
MdioMasterTC,
Hd44780Driver8bTC,
CrcUtilsTC,
CrcCombTC,
CrcTC,
UsbAgentTC,
*UlpiAgent_TCs,
*UtmiAgentTCs,
Utmi_to_UlpiTC,
Usb2SieDeviceRxTC,
Usb2SieDeviceTxTC,
Usb2CdcVcpTC,
*UsbipTCs,
BusEndpointTC,
*BramPortEndpointTCs,
# avalon tests
AvalonMmAgentTC,
*AvalonMmEndpointTCs,
AvalonMmBram_TC,
*AxiToAvalonMm_TCs,
AvalonStAgentTC,
AvalonMmBuff_TC,
# axi tests
SimpleAxiRegsTC,
AxiTC,
*AxiLiteEndpointTCs,
*AxiLiteEndpointArrTCs,
AxiLiteEndpoint_struct_TC,
AxiLiteEndpoint_arrayStruct_TC,
AxiLiteEndpoint_fromInterfaceTC,
AxiLiteEndpoint_fromInterface_arr_TC,
AxiLite_to_Axi_TC,
Axi_to_AxiLite_TC,
AxiRegTC,
AxiTesterTC,
*AxiStaticRemapTCs,
AxiResizeTC,
AxisFrameGenTC,
*AddrDataHs_to_Axi_TCs,
Axi4BRam_TC,
*Axi_rDatapump_alignedTCs,
*Axi_rDatapump_unalignedTCs,
*Axi_wDatapumpTCs,
AxiSlaveTimeoutTC,
AxiSStoredBurstTC,
AxiS_en_TC,
AxiS_fifoMeasuringTC,
AxiSFifoDropTC,
*AxiS_resizer_TCs,
AxiS_frameDeparser_TC,
AxiS_localLinkConvTC,
AxiS_footerSplitTC,
AxiS_frameParserTC,
*AxiS_FrameJoin_TCs,
HandshakedBuilderSimpleTC,
*EthAddrUpdaterTCs,
RStrictOrderInterconnectTC,
WStrictOrderInterconnectTC,
WStrictOrderInterconnect2TC,
WStrictOrderInterconnectComplexTC,
*AxiInterconnectMatrixAddrCrossbar_TCs,
*AxiInterconnectMatrixCrossbar_TCs,
*AxiInterconnectMatrixR_TCs,
*AxiInterconnectMatrixW_TCs,
*AxiWriteAggregator_TCs,
*AxiReadAggregator_TCs,
*AxiStoreQueueWritePropagating_TCs,
*AxiCacheWriteAllocWawOnlyWritePropagatingTCs,
Axi_ag_TC,
Axi4_streamToMemTC,
ArrayItemGetterTC,
ArrayItemGetter2in1WordTC,
ArrayBuff_writer_TC,
CLinkedListReaderTC,
CLinkedListWriterTC,
MMU_2pageLvl_TC,
StructWriter_TC,
StructReaderTC,
*OooOpExampleCounterArray_TCs,
OooOpExampleCounterHashTable_TC,
# ipif tests
IpifEndpointTC,
IpifEndpointDenseTC,
IpifEndpointDenseStartTC,
IpifEndpointArray,
IpifBuffTC,
Axi4Lite_to_IpifTC,
IpifInterconnectMatrixTC,
Mi32AgentTC,
Mi32InterconnectMatrixTC,
Mi32_to_Axi4LiteTC,
Mi32Axi4LiteBrigesTC,
Mi32SlidingWindowTC,
*Mi32EndpointTCs,
# complex units tests
UnitWrapperTC,
IpCorePackagerTC,
CharToBitmapTC,
HashTableCoreWithRamTC,
*CuckooHashTableWithRamTCs,
PingResponderTC,
DebugBusMonitorExampleAxiTC,
RmiiAdapterTC,
ConstraintsXdcClockRelatedTC,
HsSlrCrossingTC,
*Dsp48e1Add_TCs,
*BasicRtlSimulatorVcdTmpDirs_TCs,
)
def main():
# runner = TextTestRunner(verbosity=2, failfast=True)
runner = TextTestRunner(verbosity=2)
try:
from concurrencytest import ConcurrentTestSuite, fork_for_tests
useParallerlTest = True
except ImportError:
# concurrencytest is not installed, use regular test runner
useParallerlTest = False
# useParallerlTest = False
if useParallerlTest:
concurrent_suite = ConcurrentTestSuite(suite, fork_for_tests())
res = runner.run(concurrent_suite)
else:
res = runner.run(suite)
if not res.wasSuccessful():
sys.exit(1)
if __name__ == '__main__':
main()
| mit | 1,743,098,005,491,822,600 | 39.514493 | 127 | 0.814926 | false |
zeraien/comcon | ampcon/ampcon.py | 1 | 1866 | import yaml
import os
from flask import Flask, render_template, jsonify, request
from amplifier import Amplifier, SOURCES
app = Flask(__name__)
with open(os.path.join(os.path.dirname(__file__),"config.yaml")) as f:
config = yaml.load(f)
amplifier_obj = Amplifier(serial_port=config["serial_port"], logger=app.logger)
@app.context_processor
def inject_user():
if not amplifier_obj.configured:
amplifier_obj.configure()
return {
'sources': SOURCES
}
@app.route('/')
def hello_world():
amplifier_obj.configured = False
return render_template('index.html')
@app.route('/:volume')
def volume_change():
step = int(request.args.get('step'))
amplifier_obj.volume_change(step)
return jsonify(amplifier_obj.json_ready())
@app.route('/:volume_percent/<int:percent>')
def volume_percent(percent):
amplifier_obj.set_volume_percent(percent)
return jsonify(amplifier_obj.json_ready())
@app.route('/:volume_calibrate')
def volume_calibrate():
amplifier_obj.calibrate_volume()
return jsonify(amplifier_obj.json_ready())
@app.route("/:status")
def status():
return jsonify(amplifier_obj.json_ready())
@app.route("/:set_source")
def source():
new_source = request.args.get('source')
amplifier_obj.set_source(new_source)
return jsonify(amplifier_obj.json_ready())
@app.route('/:mute')
def mute():
amplifier_obj.mute_toggle()
return jsonify(amplifier_obj.json_ready())
@app.route('/:power')
def power():
amplifier_obj.power_toggle()
return jsonify(amplifier_obj.json_ready())
@app.route('/:spk/<speaker>')
def toggle_speaker(speaker):
amplifier_obj.speaker_toggle(speaker)
return jsonify(amplifier_obj.json_ready())
@app.errorhandler(500)
def page_not_found(e):
return "Error: %s" % e
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5000, debug=True)
| gpl-2.0 | -847,075,425,167,556,000 | 24.916667 | 79 | 0.685959 | false |
linostar/timeline-clone | test/specs/utils.py | 1 | 17794 | # Copyright (C) 2009, 2010, 2011 Rickard Lindberg, Roger Lindberg
#
# This file is part of Timeline.
#
# Timeline is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Timeline is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Timeline. If not, see <http://www.gnu.org/licenses/>.
import os.path
import random
import shutil
import sys
import tempfile
import traceback
import unittest
import wx.lib.inspection
from timelinelib.calendar.gregorian import Gregorian
from timelinelib.calendar.monthnames import ABBREVIATED_ENGLISH_MONTH_NAMES
from timelinelib.config.arguments import ApplicationArguments
from timelinelib.config.dotfile import read_config
from timelinelib.data import Category
from timelinelib.data import Container
from timelinelib.data import Event
from timelinelib.data import Subevent
from timelinelib.data import TimePeriod
from timelinelib.db import db_open
from timelinelib.time.gregoriantime import GregorianTimeType
from timelinelib.time.timeline import delta_from_days
from timelinelib.time.timeline import TimeDelta
from timelinelib.wxgui.setup import start_wx_application
ANY_TIME = "1 Jan 2010"
def gregorian_period(start, end):
return TimePeriod(GregorianTimeType(), human_time_to_gregorian(start), human_time_to_gregorian(end))
def human_time_to_gregorian(human_time):
(year, month, day, hour, minute) = human_time_to_ymdhm(human_time)
return Gregorian(year, month, day, hour, minute, 0).to_time()
def a_time_period():
year = random.randint(1, 4000)
month = random.randint(1, 12)
day = random.randint(1,28)
end_year = year + random.randint(1, 5)
end_month = random.randint(1, 12)
end_day = random.randint(1,28)
return TimePeriod(GregorianTimeType(),
Gregorian(year, month, day, 0, 0, 0).to_time(),
Gregorian(end_year, end_month, end_day, 0, 0, 0).to_time())
def human_time_to_ymdhm(human_time):
parts = human_time.split(" ")
day_part, month_part, year_part = parts[0], parts[1], parts[2]
day = int(day_part)
month = ABBREVIATED_ENGLISH_MONTH_NAMES.index(month_part) + 1
year = int(year_part)
if len(parts) == 4:
hour = int(parts[3][:2])
minute = int(parts[3][3:5])
else:
hour = 0
minute = 0
return (year, month, day, hour, minute)
def an_event():
return an_event_with(time=ANY_TIME)
def an_event_with(start=None, end=None, time=ANY_TIME, text="foo", fuzzy=False,
locked=False, ends_today=False, category=None):
if start and end:
start = human_time_to_gregorian(start)
end = human_time_to_gregorian(end)
else:
start = human_time_to_gregorian(time)
end = human_time_to_gregorian(time)
return Event(
GregorianTimeType(), start, end, text, category=category,
fuzzy=fuzzy, locked=locked, ends_today=ends_today)
def a_subevent():
return a_subevent_with()
def a_subevent_with(start=None, end=None, time=ANY_TIME, text="sub", category=None, container=None, cid=-1):
if start and end:
start = human_time_to_gregorian(start)
end = human_time_to_gregorian(end)
else:
start = human_time_to_gregorian(time)
end = human_time_to_gregorian(time)
return Subevent(GregorianTimeType(), start, end, text, category=category, container=container, cid=cid)
def a_container(name, category, sub_events):
cid = 99
start = human_time_to_gregorian(ANY_TIME)
end = human_time_to_gregorian(ANY_TIME)
container = Container(GregorianTimeType(), start, end, name,
category=category, cid=cid)
all_events = []
all_events.append(container)
for (name, category) in sub_events:
all_events.append(Subevent(GregorianTimeType(), start, end, name,
category=category, container=container))
return all_events
def a_container_with(text="container", category=None, cid=-1):
start = human_time_to_gregorian(ANY_TIME)
end = human_time_to_gregorian(ANY_TIME)
container = Container(GregorianTimeType(), start, end, text, category=category, cid=cid)
return container
def a_category():
return a_category_with(name="category")
def a_category_with(name, color=(255, 0, 0), font_color=(0, 255, 255),
parent=None):
return Category(name=name, color=color, font_color=font_color,
parent=parent)
def get_random_modifier(modifiers):
return random.choice(modifiers)
def inc(number):
if number is None:
return 8
else:
return number + 1
def new_cat(event):
if event.get_category() is None:
return a_category_with(name="new category")
else:
return a_category_with(name="was: %s" % event.get_category().get_name())
def new_parent(category):
if category.get_parent() is None:
return a_category_with(name="new category")
else:
return a_category_with(name="was: %s" % category.get_parent().get_name())
def new_time_type(event):
if event.get_time_type() is None:
return GregorianTimeType()
else:
return None
def new_progress(event):
if event.get_progress() is None:
return 8
else:
return (event.get_progress() + 1) % 100
def modifier_change_ends_today(event):
if event.get_locked():
event.set_locked(False)
event.set_ends_today(not event.get_ends_today())
event.set_locked(True)
else:
event.set_ends_today(not event.get_ends_today())
return event
EVENT_MODIFIERS = [
("change time type", lambda event:
event.set_time_type(new_time_type(event))),
("change fuzzy", lambda event:
event.set_fuzzy(not event.get_fuzzy())),
("change locked", lambda event:
event.set_locked(not event.get_locked())),
("change ends today", modifier_change_ends_today),
("change id", lambda event:
event.set_id(inc(event.get_id()))),
("change time period", lambda event:
event.set_time_period(event.get_time_period().move_delta(delta_from_days(1)))),
("change text", lambda event:
event.set_text("was: %s" % event.get_text())),
("change category", lambda event:
event.set_category(new_cat(event))),
("change icon", lambda event:
event.set_icon("was: %s" % event.get_icon())),
("change description", lambda event:
event.set_description("was: %s" % event.get_description())),
("change hyperlink", lambda event:
event.set_hyperlink("was: %s" % event.get_hyperlink())),
("change progress", lambda event:
event.set_progress(new_progress(event))),
("change alert", lambda event:
event.set_alert("was: %s" % event.get_alert())),
]
SUBEVENT_MODIFIERS = [
("change container id", lambda event:
event.set_container_id(event.get_container_id()+1)),
] + EVENT_MODIFIERS
CONTAINER_MODIFIERS = [
("change container id", lambda event:
event.set_cid(event.cid()+1)),
] + EVENT_MODIFIERS
CATEGORY_MODIFIERS = [
("change name", lambda category:
category.set_name("was: %s" % category.get_name())),
("change id", lambda category:
category.set_id(inc(category.get_id()))),
("change color", lambda category:
category.set_color(category.get_color()+(1, 0, 3))),
("change font color", lambda category:
category.set_font_color(category.get_font_color()+(1, 0, 3))),
("change parent", lambda category:
category.set_parent(new_parent(category))),
]
TIME_PERIOD_MODIFIERS = [
("zoom", lambda time_period:
time_period.zoom(-1)),
("extend left", lambda time_period:
time_period.update(time_period.start_time-time_period.time_type.get_min_zoom_delta()[0],
time_period.end_time)),
("extend right", lambda time_period:
time_period.update(time_period.start_time,
time_period.end_time+time_period.time_type.get_min_zoom_delta()[0])),
]
TIME_MODIFIERS = [
("add", lambda time: time + TimeDelta(1)),
]
class TestCase(unittest.TestCase):
def assertListIsCloneOf(self, cloned_list, original_list):
self.assertEqual(cloned_list, original_list)
self.assertTrue(cloned_list is not original_list)
for i in range(len(cloned_list)):
self.assertIsCloneOf(cloned_list[i], original_list[i])
def assertIsCloneOf(self, clone, original):
self.assertEqual(clone, original)
self.assertTrue(clone is not original, "%r" % clone)
def assertInstanceNotIn(self, object_, list_):
for element in list_:
if element is object_:
self.fail("%r was in list" % object_)
def assertEqNeImplementationIsCorrect(self, create_fn, modifiers):
(modification_description, modifier_fn) = get_random_modifier(modifiers)
one = modifier_fn(create_fn())
other = modifier_fn(create_fn())
fail_message_one_other = "%r vs %r (%s)" % (one, other,
modification_description)
self.assertTrue(type(one) == type(other), fail_message_one_other)
self.assertFalse(one == None, fail_message_one_other)
self.assertTrue(one != None, fail_message_one_other)
self.assertTrue(one is not other, fail_message_one_other)
self.assertFalse(one is other, fail_message_one_other)
self.assertTrue(one == other, fail_message_one_other)
self.assertFalse(one != other, fail_message_one_other)
self.assertTrue(one == one, fail_message_one_other)
self.assertFalse(one != one, fail_message_one_other)
(modification_description, modifier_fn) = get_random_modifier(modifiers)
modified = modifier_fn(other)
fail_message_modified_one = "%r vs %r (%s)" % (modified, one,
modification_description)
self.assertTrue(type(modified) == type(one), fail_message_modified_one)
self.assertTrue(modified is not one, fail_message_modified_one)
self.assertFalse(modified is one, fail_message_modified_one)
self.assertTrue(modified != one, fail_message_modified_one)
self.assertFalse(modified == one, fail_message_modified_one)
class TmpDirTestCase(TestCase):
def setUp(self):
self.tmp_dir = tempfile.mkdtemp(prefix="timeline-test")
def tearDown(self):
shutil.rmtree(self.tmp_dir)
def get_tmp_path(self, name):
return os.path.join(self.tmp_dir, name)
class WxComponentTest(TestCase):
def setUp(self):
self._app = wx.App(False)
self._main_frame = wx.Frame(None)
self._main_frame.Bind(wx.EVT_CLOSE, self._main_frame_on_close)
self._main_panel = wx.Panel(self._main_frame)
self._components = []
self._component_by_name = {}
self._is_close_called = False
def tearDown(self):
self._close()
def add_component(self, name, cls, *args):
self._component_by_name[name] = cls(self._main_panel, *args)
self._components.append(self._component_by_name[name])
def add_button(self, text, callback, component_name=None):
button = wx.Button(self._main_panel, label=text)
self._components.append(button)
def event_listener(event):
if component_name:
callback(self.get_component(component_name))
else:
callback()
button.Bind(wx.EVT_BUTTON, event_listener)
def add_separator(self):
label = "----- separator -----"
self._components.append(wx.StaticText(self._main_panel, label=label))
def get_component(self, name):
return self._component_by_name[name]
def show_test_window(self):
sizer = wx.BoxSizer(wx.VERTICAL)
for component in self._components:
sizer.Add(component, flag=wx.ALL|wx.GROW, border=3)
self._main_panel.SetSizer(sizer)
self._main_frame.Show()
if not self.HALT_FOR_MANUAL_INSPECTION:
wx.CallAfter(self._close)
self._app.MainLoop()
def _main_frame_on_close(self, event):
self._is_close_called = True
self._main_frame.Destroy()
def _close(self):
if not self._is_close_called:
self._main_frame.Close()
self._is_close_called = True
class WxEndToEndTestCase(TmpDirTestCase):
def setUp(self):
TmpDirTestCase.setUp(self)
self.timeline_path = self.get_tmp_path("test.timeline")
self.config_file_path = self.get_tmp_path("thetimelineproj.cfg")
self.config = read_config(self.config_file_path)
self.standard_excepthook = sys.excepthook
self.error_in_gui_thread = None
def tearDown(self):
TmpDirTestCase.tearDown(self)
sys.excepthook = self.standard_excepthook
def start_timeline_and(self, steps_to_perform_in_gui):
self.config.write()
self.steps_to_perform_in_gui = steps_to_perform_in_gui
application_arguments = ApplicationArguments()
application_arguments.parse_from(
["--config-file", self.config_file_path, self.timeline_path])
start_wx_application(application_arguments, self._before_main_loop_hook)
if self.error_in_gui_thread:
exc_type, exc_value, exc_traceback = self.error_in_gui_thread
a = traceback.format_exception(exc_type, exc_value, exc_traceback)
self.fail("Exception in GUI thread: %s" % "".join(a))
def read_written_timeline(self):
return db_open(self.timeline_path)
def _before_main_loop_hook(self):
sys.excepthook = self.standard_excepthook
self._setup_steps_to_perform_in_gui(self.steps_to_perform_in_gui)
def _setup_steps_to_perform_in_gui(self, steps, in_sub_step_mode=False):
def perform_current_step_and_queue_next():
if len(steps) >= 2 and isinstance(steps[1], list):
self._setup_steps_to_perform_in_gui(steps[1], True)
next_step_index = 2
else:
next_step_index = 1
try:
steps[0]()
except Exception:
wx.GetApp().GetTopWindow().Close()
self.error_in_gui_thread = sys.exc_info()
else:
if steps[0] != self.show_widget_inspector:
self._setup_steps_to_perform_in_gui(steps[next_step_index:], in_sub_step_mode)
if len(steps) > 0:
wx.CallAfter(perform_current_step_and_queue_next)
elif not in_sub_step_mode:
wx.CallAfter(wx.GetApp().GetTopWindow().Close)
def show_widget_inspector(self):
wx.lib.inspection.InspectionTool().Show()
def click_menu_item(self, item_path):
def click():
item_names = [_(x) for x in item_path.split(" -> ")]
menu_bar = wx.GetApp().GetTopWindow().GetMenuBar()
menu = menu_bar.GetMenu(menu_bar.FindMenu(item_names[0]))
for sub in item_names[1:]:
menu = menu_bar.FindItemById(menu.FindItem(sub))
wx.GetApp().GetTopWindow().ProcessEvent(
wx.CommandEvent(wx.EVT_MENU.typeId, menu.GetId()))
return click
def click_button(self, component_path):
def click():
component = self.find_component(component_path)
component.ProcessEvent(wx.CommandEvent(wx.EVT_BUTTON.typeId, component.GetId()))
return click
def enter_text(self, component_path, text):
def enter():
self.find_component(component_path).SetValue(text)
return enter
def find_component(self, component_path):
components_to_search_in = wx.GetTopLevelWindows()
for component_name in component_path.split(" -> "):
component = self._find_component_with_name_in(
components_to_search_in, component_name)
if component == None:
self.fail("Could not find component with path '%s'." % component_path)
else:
components_to_search_in = component.GetChildren()
return component
def _find_component_with_name_in(self, components, seeked_name):
for component in components:
if self._matches_seeked_name(component, seeked_name):
return component
for component in components:
sub = self._find_component_with_name_in(component.GetChildren(), seeked_name)
if sub:
return sub
return None
def _matches_seeked_name(self, component, seeked_name):
if component.GetName() == seeked_name:
return True
elif component.GetId() == self._wx_id_from_name(seeked_name):
return True
elif hasattr(component, "GetLabelText") and component.GetLabelText() == _(seeked_name):
return True
elif component.GetLabel() == _(seeked_name):
return True
return False
def _wx_id_from_name(self, name):
if name.startswith("wxID_"):
return getattr(wx, name[2:])
return None
class ObjectWithTruthValue(object):
def __init__(self, truth_value):
self.truth_value = truth_value
def __nonzero__(self):
return self.truth_value
| gpl-3.0 | 6,591,951,108,814,487,000 | 34.730924 | 108 | 0.63291 | false |
ESSolutions/ESSArch_Core | ESSArch_Core/WorkflowEngine/__init__.py | 1 | 1392 | """
ESSArch is an open source archiving and digital preservation system
ESSArch
Copyright (C) 2005-2019 ES Solutions AB
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
Contact information:
Web - http://www.essolutions.se
Email - [email protected]
"""
import logging
import celery
default_app_config = 'ESSArch_Core.WorkflowEngine.apps.WorkflowEngineConfig'
logger = logging.getLogger('essarch.workflowengine')
def get_workers(rabbitmq):
if rabbitmq.get('error'):
logger.error("RabbitMQ seems down. Wont get stats of celery workers.")
return None
try:
return celery.current_app.control.inspect().stats()
except Exception:
logger.exception("Error when checking stats of celery workers.")
return None
| gpl-3.0 | 6,048,305,297,586,128,000 | 32.142857 | 78 | 0.72342 | false |
lafranceinsoumise/api-django | agir/people/management/commands/mailtrain_update.py | 1 | 1382 | from datetime import datetime
import string
from uuid import UUID
from django.core.management import BaseCommand
from django.utils import timezone
from agir.lib.mailtrain import update_person
from agir.people.models import Person
PADDING = "0000000-0000-0000-0000-000000000000"
class Command(BaseCommand):
help = "Synchronize all the database with mailtrain"
def handle(self, *args, **kwargs):
start = datetime.now()
i = 0
min_letter = string.hexdigits[timezone.now().day % 8 * 2]
max_letter = string.hexdigits[(timezone.now().day + 1) % 8 * 2]
qs = Person.objects.filter(id__gte=UUID(min_letter + PADDING))
if max_letter > min_letter:
qs = qs.filter(id__lt=UUID(max_letter + PADDING))
try:
for person in qs.iterator():
update_person(person)
if kwargs["verbosity"] > 1:
print("Updated %s " % person.email)
i += 1
except Exception as e:
duration = datetime.now() - start
print(
f"Updated {i} people over {qs.count()} in {str(duration.seconds)} seconds."
)
raise e
duration = datetime.now() - start
print(
f"Updated people from {min_letter} to {max_letter} ({str(i)}) in {str(duration.seconds)} seconds."
)
| agpl-3.0 | 1,208,920,517,310,383,900 | 28.404255 | 110 | 0.586831 | false |
DayGitH/Python-Challenges | DailyProgrammer/DP20140625B.py | 1 | 4512 | """
[6/25/2014] Challenge #168 [Intermediate] Block Count, Length & Area
https://www.reddit.com/r/dailyprogrammer/comments/291x9h/6252014_challenge_168_intermediate_block_count/
#Description:
In construction there comes a need to compute the length and area of a jobsite. The areas and lengths computed are used
by estimators
to price out the cost to build that jobsite. If for example a jobsite was a building with a parking lot and had
concrete walkways and some nice
pavers and landscaping it would be good to know the areas of all these and some lengths (for concrete curbs, landscape
headerboard, etc)
So for today's challenge we are going to automate the tedious process of calculating the length and area of aerial
plans or photos.
#ASCII Photo:
To keep this within our scope we have converted the plans into an ASCII picture. We have scaled the plans so 1
character is a square
with dimensions of 10 ft x 10 ft.
The photo is case sensitive. so a "O" and "o" are 2 different blocks of areas to compute.
#Blocks Counts, Lengths and Areas:
Some shorthand to follow:
* SF = square feet
* LF = linear feet
If you have the following picture.
####
OOOO
####
mmmm
* # has a block count of 2. we have 2 areas not joined made up of #
* O and m have a block count of 1. they only have 1 areas each made up of their ASCII character.
* O has 4 blocks. Each block is 100 SF and so you have 400 SF of O.
* O has a circumference length of that 1 block count of 100 LF.
* m also has 4 blocks so there is 400 SF of m and circumference length of 100 LF
* # has 2 block counts each of 4. So # has a total area of 800 SF and a total circumference length of 200 LF.
Pay close attention to how "#" was handled. It was seen as being 2 areas made up of # but the final length and area
adds them together even thou they not together. It recognizes the two areas by having a block count of 2 (2 non-joined
areas made up of "#" characters) while the others only have a block count of 1.
#Input:
Your input is a 2-D ASCII picture. The ASCII characters used are any non-whitespace characters.
##Example:
####
@@oo
o*@!
****
#Output:
You give a Length and Area report of all the blocks.
##Example: (using the example input)
Block Count, Length & Area Report
=================================
#: Total SF (400), Total Circumference LF (100) - Found 1 block
@: Total SF (300), Total Circumference LF (100) - Found 2 blocks
o: Total SF (300), Total Circumference LF (100) - Found 2 blocks
*: Total SF (500), Total Circumference LF (120) - Found 1 block
!: Total SF (100), Total Circumference LF (40) - Found 1 block
#Easy Mode (optional):
Remove the need to compute the block count. Just focus on area and circumference length.
#Challenge Input:
So we have a "B" building. It has a "D" driveway. "O" and "o" landscaping. "c" concrete walks. "p" pavers. "V" & "v"
valley gutters. @ and T tree planting.
Finally we have # as Asphalt Paving.
ooooooooooooooooooooooDDDDDooooooooooooooooooooooooooooo
ooooooooooooooooooooooDDDDDooooooooooooooooooooooooooooo
ooo##################o#####o#########################ooo
o@o##################o#####o#########################ooo
ooo##################o#####o#########################oTo
o@o##################################################ooo
ooo##################################################oTo
o@o############ccccccccccccccccccccccc###############ooo
pppppppppppppppcOOOOOOOOOOOOOOOOOOOOOc###############oTo
o@o############cOBBBBBBBBBBBBBBBBBBBOc###############ooo
ooo####V#######cOBBBBBBBBBBBBBBBBBBBOc###############oTo
o@o####V#######cOBBBBBBBBBBBBBBBBBBBOc###############ooo
ooo####V#######cOBBBBBBBBBBBBBBBBBBBOcpppppppppppppppppp
o@o####V#######cOBBBBBBBBBBBBBBBBBBBOc###############ooo
ooo####V#######cOBBBBBBBBBBBBBBBBBBBOc######v########oTo
o@o####V#######cOBBBBBBBBBBBBBBBBBBBOc######v########ooo
ooo####V#######cOOOOOOOOOOOOOOOOOOOOOc######v########oTo
o@o####V#######ccccccccccccccccccccccc######v########ooo
ooo####V#######ppppppppppppppppppppppp######v########oTo
o@o############ppppppppppppppppppppppp###############ooo
oooooooooooooooooooooooooooooooooooooooooooooooooooooooo
oooooooooooooooooooooooooooooooooooooooooooooooooooooooo
#FAQ:
Diagonals do not connect. The small example shows this. The @ areas are 2 blocks and not 1 because of the Diagonal.
"""
def main():
pass
if __name__ == "__main__":
main()
| mit | -3,471,949,555,615,891,000 | 46.494737 | 119 | 0.649379 | false |
Upward-Spiral-Science/team1 | code/test_assumptions.py | 1 | 1525 | import numpy as np
import matplotlib.pyplot as plt
import urllib2
#%matplotlib inline
sample_size = 1000
np.random.seed(1)
url = ('https://raw.githubusercontent.com/Upward-Spiral-Science'
'/data/master/syn-density/output.csv')
data = urllib2.urlopen(url)
csv = np.genfromtxt(data, delimiter=",")[1:]
csv_rand = None
for i in range (1, sample_size):
#Randomly sample from dataset
a = np.random.permutation(np.arange(csv.shape[0]))[:100]
csv_rand_sample = csv[a]
# Normalize
mean_unmask = np.mean(csv_rand_sample[:,3])
std_unmask = np.std(csv_rand_sample[:,3])
csv_rand_sample[:,3] = (csv_rand_sample[:,3]-mean_unmask)/std_unmask
#Stack matrix
if i == 1:
csv_rand = csv_rand_sample
else:
csv_rand = np.dstack((csv_rand,csv_rand_sample))
#Average across random samples
csv_rand = np.mean(csv_rand,axis=2)
#Independence Assumption
covar = np.cov(csv_rand_sample)
plt.figure(figsize=(7,7))
plt.imshow(covar)
plt.title('Covariance of Synapse Density dataset')
plt.colorbar()
plt.show()
diag = covar.diagonal()*np.eye(covar.shape[0])
hollow = covar-diag
d_det = np.linalg.slogdet(diag)[1]
h_det = np.linalg.slogdet(hollow)[1]
print d_det
print h_det
plt.figure(figsize=(11,8))
plt.subplot(121)
plt.imshow(diag)
plt.clim([0, np.max(covar)])
plt.title('Determinant of on-diagonal: ' + str(d_det))
plt.subplot(122)
plt.imshow(hollow)
plt.clim([0, np.max(covar)])
plt.title('Determinant of off-diagonal: ' + str(h_det))
plt.show()
print "Ratio of on and off-diagonal determinants: " + str(d_det/h_det)
| apache-2.0 | 4,933,376,767,457,469,000 | 23.596774 | 70 | 0.70623 | false |
SUSE/kiwi | kiwi/storage/raid_device.py | 1 | 4198 | # Copyright (c) 2015 SUSE Linux GmbH. All rights reserved.
#
# This file is part of kiwi.
#
# kiwi is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# kiwi is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with kiwi. If not, see <http://www.gnu.org/licenses/>
#
import os
import logging
# project
from kiwi.command import Command
from kiwi.storage.device_provider import DeviceProvider
from kiwi.storage.mapped_device import MappedDevice
from kiwi.exceptions import (
KiwiRaidSetupError
)
log = logging.getLogger('kiwi')
class RaidDevice(DeviceProvider):
"""
**Implement raid setup on a storage device**
:param object storage_provider: Instance of class based on DeviceProvider
"""
def __init__(self, storage_provider):
# bind the underlaying block device providing class instance
# to this object (e.g loop) if present. This is done to guarantee
# the correct destructor order when the device should be released.
self.storage_provider = storage_provider
self.raid_level_map = {
'mirroring': '1',
'striping': '0'
}
self.raid_device = None
def get_device(self):
"""
Instance of MappedDevice providing the raid device
:return: mapped raid device
:rtype: MappedDevice
"""
if self.raid_device:
return MappedDevice(
device=self.raid_device, device_provider=self
)
def create_degraded_raid(self, raid_level):
"""
Create a raid array in degraded mode with one device missing.
This only works in the raid levels 0(striping) and 1(mirroring)
:param string raid_level: raid level name
"""
if raid_level not in self.raid_level_map:
raise KiwiRaidSetupError(
'Only raid levels 0(striping) and 1(mirroring) are supported'
)
raid_device = None
for raid_id in range(9):
raid_device = '/dev/md' + format(raid_id)
if os.path.exists(raid_device):
raid_device = None
else:
break
if not raid_device:
raise KiwiRaidSetupError(
'Could not find free raid device in range md0-8'
)
log.info(
'Creating raid array in %s mode as %s',
raid_level, raid_device
)
Command.run(
[
'mdadm', '--create', '--run', raid_device,
'--level', self.raid_level_map[raid_level],
'--raid-disks', '2',
self.storage_provider.get_device(), 'missing'
]
)
self.raid_device = raid_device
def create_raid_config(self, filename):
"""
Create mdadm config file from mdadm request
:param string filename: config file name
"""
mdadm_call = Command.run(
['mdadm', '-Db', self.raid_device]
)
with open(filename, 'w') as mdadmconf:
mdadmconf.write(mdadm_call.output)
def is_loop(self):
"""
Check if storage provider is loop based
Return loop status from base storage provider
:return: True or False
:rtype: bool
"""
return self.storage_provider.is_loop()
def __del__(self):
if self.raid_device:
log.info('Cleaning up %s instance', type(self).__name__)
try:
Command.run(
['mdadm', '--stop', self.raid_device]
)
except Exception:
log.warning(
'Shutdown of raid device failed, %s still busy',
self.raid_device
)
| gpl-3.0 | -406,493,287,530,871,200 | 30.096296 | 77 | 0.585279 | false |
samba-team/samba | python/samba/provision/__init__.py | 1 | 99121 | # Unix SMB/CIFS implementation.
# backend code for provisioning a Samba AD server
# Copyright (C) Jelmer Vernooij <[email protected]> 2007-2012
# Copyright (C) Andrew Bartlett <[email protected]> 2008-2009
# Copyright (C) Oliver Liebel <[email protected]> 2008-2009
#
# Based on the original in EJS:
# Copyright (C) Andrew Tridgell <[email protected]> 2005
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""Functions for setting up a Samba configuration."""
__docformat__ = "restructuredText"
from base64 import b64encode
import errno
import os
import stat
import re
import pwd
import grp
import logging
import time
import uuid
import socket
import tempfile
import samba.dsdb
import ldb
from samba.auth import system_session, admin_session
from samba.auth_util import system_session_unix
import samba
from samba import auth
from samba.samba3 import smbd, passdb
from samba.samba3 import param as s3param
from samba.dsdb import DS_DOMAIN_FUNCTION_2000
from samba import (
Ldb,
MAX_NETBIOS_NAME_LEN,
check_all_substituted,
is_valid_netbios_char,
setup_file,
substitute_var,
valid_netbios_name,
version,
is_heimdal_built,
)
from samba.dcerpc import security, misc
from samba.dcerpc.misc import (
SEC_CHAN_BDC,
SEC_CHAN_WKSTA,
)
from samba.dsdb import (
DS_DOMAIN_FUNCTION_2003,
DS_DOMAIN_FUNCTION_2008_R2,
ENC_ALL_TYPES,
)
from samba.idmap import IDmapDB
from samba.ms_display_specifiers import read_ms_ldif
from samba.ntacls import setntacl, getntacl, dsacl2fsacl
from samba.ndr import ndr_pack, ndr_unpack
from samba.provision.backend import (
LDBBackend,
)
from samba.descriptor import (
get_empty_descriptor,
get_config_descriptor,
get_config_partitions_descriptor,
get_config_sites_descriptor,
get_config_ntds_quotas_descriptor,
get_config_delete_protected1_descriptor,
get_config_delete_protected1wd_descriptor,
get_config_delete_protected2_descriptor,
get_domain_descriptor,
get_domain_infrastructure_descriptor,
get_domain_builtin_descriptor,
get_domain_computers_descriptor,
get_domain_users_descriptor,
get_domain_controllers_descriptor,
get_domain_delete_protected1_descriptor,
get_domain_delete_protected2_descriptor,
get_dns_partition_descriptor,
get_dns_forest_microsoft_dns_descriptor,
get_dns_domain_microsoft_dns_descriptor,
get_managed_service_accounts_descriptor,
)
from samba.provision.common import (
setup_path,
setup_add_ldif,
setup_modify_ldif,
FILL_FULL,
FILL_SUBDOMAIN,
FILL_NT4SYNC,
FILL_DRS
)
from samba.provision.sambadns import (
get_dnsadmins_sid,
setup_ad_dns,
create_dns_dir_keytab_link,
create_dns_update_list
)
import samba.param
import samba.registry
from samba.schema import Schema
from samba.samdb import SamDB
from samba.dbchecker import dbcheck
from samba.provision.kerberos import create_kdc_conf
from samba.samdb import get_default_backend_store
DEFAULT_POLICY_GUID = "31B2F340-016D-11D2-945F-00C04FB984F9"
DEFAULT_DC_POLICY_GUID = "6AC1786C-016F-11D2-945F-00C04FB984F9"
DEFAULTSITE = "Default-First-Site-Name"
LAST_PROVISION_USN_ATTRIBUTE = "lastProvisionUSN"
DEFAULT_MIN_PWD_LENGTH = 7
class ProvisionPaths(object):
def __init__(self):
self.shareconf = None
self.hklm = None
self.hkcu = None
self.hkcr = None
self.hku = None
self.hkpd = None
self.hkpt = None
self.samdb = None
self.idmapdb = None
self.secrets = None
self.keytab = None
self.dns_keytab = None
self.dns = None
self.winsdb = None
self.private_dir = None
self.binddns_dir = None
self.state_dir = None
class ProvisionNames(object):
def __init__(self):
self.ncs = None
self.rootdn = None
self.domaindn = None
self.configdn = None
self.schemadn = None
self.dnsforestdn = None
self.dnsdomaindn = None
self.ldapmanagerdn = None
self.dnsdomain = None
self.realm = None
self.netbiosname = None
self.domain = None
self.hostname = None
self.sitename = None
self.smbconf = None
self.domainsid = None
self.forestsid = None
self.domainguid = None
self.name_map = {}
def find_provision_key_parameters(samdb, secretsdb, idmapdb, paths, smbconf,
lp):
"""Get key provision parameters (realm, domain, ...) from a given provision
:param samdb: An LDB object connected to the sam.ldb file
:param secretsdb: An LDB object connected to the secrets.ldb file
:param idmapdb: An LDB object connected to the idmap.ldb file
:param paths: A list of path to provision object
:param smbconf: Path to the smb.conf file
:param lp: A LoadParm object
:return: A list of key provision parameters
"""
names = ProvisionNames()
names.adminpass = None
# NT domain, kerberos realm, root dn, domain dn, domain dns name
names.domain = lp.get("workgroup").upper()
names.realm = lp.get("realm")
names.dnsdomain = names.realm.lower()
basedn = samba.dn_from_dns_name(names.dnsdomain)
names.realm = names.realm.upper()
# netbiosname
# Get the netbiosname first (could be obtained from smb.conf in theory)
res = secretsdb.search(expression="(flatname=%s)" %
names.domain, base="CN=Primary Domains",
scope=ldb.SCOPE_SUBTREE, attrs=["sAMAccountName"])
names.netbiosname = str(res[0]["sAMAccountName"]).replace("$", "")
names.smbconf = smbconf
# That's a bit simplistic but it's ok as long as we have only 3
# partitions
current = samdb.search(expression="(objectClass=*)",
base="", scope=ldb.SCOPE_BASE,
attrs=["defaultNamingContext", "schemaNamingContext",
"configurationNamingContext", "rootDomainNamingContext",
"namingContexts"])
names.configdn = str(current[0]["configurationNamingContext"][0])
names.schemadn = str(current[0]["schemaNamingContext"][0])
if not (ldb.Dn(samdb, basedn) == (ldb.Dn(samdb,
current[0]["defaultNamingContext"][0].decode('utf8')))):
raise ProvisioningError(("basedn in %s (%s) and from %s (%s)"
"is not the same ..." % (paths.samdb,
str(current[0]["defaultNamingContext"][0].decode('utf8')),
paths.smbconf, basedn)))
names.domaindn = str(current[0]["defaultNamingContext"][0])
names.rootdn = str(current[0]["rootDomainNamingContext"][0])
names.ncs = current[0]["namingContexts"]
names.dnsforestdn = None
names.dnsdomaindn = None
for i in range(0, len(names.ncs)):
nc = str(names.ncs[i])
dnsforestdn = "DC=ForestDnsZones,%s" % (str(names.rootdn))
if nc == dnsforestdn:
names.dnsforestdn = dnsforestdn
continue
dnsdomaindn = "DC=DomainDnsZones,%s" % (str(names.domaindn))
if nc == dnsdomaindn:
names.dnsdomaindn = dnsdomaindn
continue
# default site name
res3 = samdb.search(expression="(objectClass=site)",
base="CN=Sites," + str(names.configdn), scope=ldb.SCOPE_ONELEVEL, attrs=["cn"])
names.sitename = str(res3[0]["cn"])
# dns hostname and server dn
res4 = samdb.search(expression="(CN=%s)" % names.netbiosname,
base="OU=Domain Controllers,%s" % basedn,
scope=ldb.SCOPE_ONELEVEL, attrs=["dNSHostName"])
if len(res4) == 0:
raise ProvisioningError("Unable to find DC called CN=%s under OU=Domain Controllers,%s" % (names.netbiosname, basedn))
names.hostname = str(res4[0]["dNSHostName"]).replace("." + names.dnsdomain, "")
server_res = samdb.search(expression="serverReference=%s" % res4[0].dn,
attrs=[], base=names.configdn)
names.serverdn = str(server_res[0].dn)
# invocation id/objectguid
res5 = samdb.search(expression="(objectClass=*)",
base="CN=NTDS Settings,%s" % str(names.serverdn),
scope=ldb.SCOPE_BASE,
attrs=["invocationID", "objectGUID"])
names.invocation = str(ndr_unpack(misc.GUID, res5[0]["invocationId"][0]))
names.ntdsguid = str(ndr_unpack(misc.GUID, res5[0]["objectGUID"][0]))
# domain guid/sid
res6 = samdb.search(expression="(objectClass=*)", base=basedn,
scope=ldb.SCOPE_BASE, attrs=["objectGUID",
"objectSid", "msDS-Behavior-Version"])
names.domainguid = str(ndr_unpack(misc.GUID, res6[0]["objectGUID"][0]))
names.domainsid = ndr_unpack(security.dom_sid, res6[0]["objectSid"][0])
names.forestsid = ndr_unpack(security.dom_sid, res6[0]["objectSid"][0])
if res6[0].get("msDS-Behavior-Version") is None or \
int(res6[0]["msDS-Behavior-Version"][0]) < DS_DOMAIN_FUNCTION_2000:
names.domainlevel = DS_DOMAIN_FUNCTION_2000
else:
names.domainlevel = int(res6[0]["msDS-Behavior-Version"][0])
# policy guid
res7 = samdb.search(expression="(name={%s})" % DEFAULT_POLICY_GUID,
base="CN=Policies,CN=System," + basedn,
scope=ldb.SCOPE_ONELEVEL, attrs=["cn", "displayName"])
names.policyid = str(res7[0]["cn"]).replace("{", "").replace("}", "")
# dc policy guid
res8 = samdb.search(expression="(name={%s})" % DEFAULT_DC_POLICY_GUID,
base="CN=Policies,CN=System," + basedn,
scope=ldb.SCOPE_ONELEVEL,
attrs=["cn", "displayName"])
if len(res8) == 1:
names.policyid_dc = str(res8[0]["cn"]).replace("{", "").replace("}", "")
else:
names.policyid_dc = None
res9 = idmapdb.search(expression="(cn=%s-%s)" %
(str(names.domainsid), security.DOMAIN_RID_ADMINISTRATOR),
attrs=["xidNumber", "type"])
if len(res9) != 1:
raise ProvisioningError("Unable to find uid/gid for Domain Admins rid (%s-%s" % (str(names.domainsid), security.DOMAIN_RID_ADMINISTRATOR))
if str(res9[0]["type"][0]) == "ID_TYPE_BOTH":
names.root_gid = int(res9[0]["xidNumber"][0])
else:
names.root_gid = pwd.getpwuid(int(res9[0]["xidNumber"][0])).pw_gid
res10 = samdb.search(expression="(samaccountname=dns)",
scope=ldb.SCOPE_SUBTREE, attrs=["dn"],
controls=["search_options:1:2"])
if (len(res10) > 0):
has_legacy_dns_account = True
else:
has_legacy_dns_account = False
res11 = samdb.search(expression="(samaccountname=dns-%s)" % names.netbiosname,
scope=ldb.SCOPE_SUBTREE, attrs=["dn"],
controls=["search_options:1:2"])
if (len(res11) > 0):
has_dns_account = True
else:
has_dns_account = False
if names.dnsdomaindn is not None:
if has_dns_account:
names.dns_backend = 'BIND9_DLZ'
else:
names.dns_backend = 'SAMBA_INTERNAL'
elif has_dns_account or has_legacy_dns_account:
names.dns_backend = 'BIND9_FLATFILE'
else:
names.dns_backend = 'NONE'
dns_admins_sid = get_dnsadmins_sid(samdb, names.domaindn)
names.name_map['DnsAdmins'] = str(dns_admins_sid)
return names
def update_provision_usn(samdb, low, high, id, replace=False):
"""Update the field provisionUSN in sam.ldb
This field is used to track range of USN modified by provision and
upgradeprovision.
This value is used afterward by next provision to figure out if
the field have been modified since last provision.
:param samdb: An LDB object connect to sam.ldb
:param low: The lowest USN modified by this upgrade
:param high: The highest USN modified by this upgrade
:param id: The invocation id of the samba's dc
:param replace: A boolean indicating if the range should replace any
existing one or appended (default)
"""
tab = []
if not replace:
entry = samdb.search(base="@PROVISION",
scope=ldb.SCOPE_BASE,
attrs=[LAST_PROVISION_USN_ATTRIBUTE, "dn"])
for e in entry[0][LAST_PROVISION_USN_ATTRIBUTE]:
if not re.search(';', str(e)):
e = "%s;%s" % (str(e), id)
tab.append(str(e))
tab.append("%s-%s;%s" % (low, high, id))
delta = ldb.Message()
delta.dn = ldb.Dn(samdb, "@PROVISION")
delta[LAST_PROVISION_USN_ATTRIBUTE] = \
ldb.MessageElement(tab,
ldb.FLAG_MOD_REPLACE,
LAST_PROVISION_USN_ATTRIBUTE)
entry = samdb.search(expression='provisionnerID=*',
base="@PROVISION", scope=ldb.SCOPE_BASE,
attrs=["provisionnerID"])
if len(entry) == 0 or len(entry[0]) == 0:
delta["provisionnerID"] = ldb.MessageElement(id, ldb.FLAG_MOD_ADD, "provisionnerID")
samdb.modify(delta)
def set_provision_usn(samdb, low, high, id):
"""Set the field provisionUSN in sam.ldb
This field is used to track range of USN modified by provision and
upgradeprovision.
This value is used afterward by next provision to figure out if
the field have been modified since last provision.
:param samdb: An LDB object connect to sam.ldb
:param low: The lowest USN modified by this upgrade
:param high: The highest USN modified by this upgrade
:param id: The invocationId of the provision"""
tab = []
tab.append("%s-%s;%s" % (low, high, id))
delta = ldb.Message()
delta.dn = ldb.Dn(samdb, "@PROVISION")
delta[LAST_PROVISION_USN_ATTRIBUTE] = \
ldb.MessageElement(tab,
ldb.FLAG_MOD_ADD,
LAST_PROVISION_USN_ATTRIBUTE)
samdb.add(delta)
def get_max_usn(samdb, basedn):
""" This function return the biggest USN present in the provision
:param samdb: A LDB object pointing to the sam.ldb
:param basedn: A string containing the base DN of the provision
(ie. DC=foo, DC=bar)
:return: The biggest USN in the provision"""
res = samdb.search(expression="objectClass=*", base=basedn,
scope=ldb.SCOPE_SUBTREE, attrs=["uSNChanged"],
controls=["search_options:1:2",
"server_sort:1:1:uSNChanged",
"paged_results:1:1"])
return res[0]["uSNChanged"]
def get_last_provision_usn(sam):
"""Get USNs ranges modified by a provision or an upgradeprovision
:param sam: An LDB object pointing to the sam.ldb
:return: a dictionary which keys are invocation id and values are an array
of integer representing the different ranges
"""
try:
entry = sam.search(expression="%s=*" % LAST_PROVISION_USN_ATTRIBUTE,
base="@PROVISION", scope=ldb.SCOPE_BASE,
attrs=[LAST_PROVISION_USN_ATTRIBUTE, "provisionnerID"])
except ldb.LdbError as e1:
(ecode, emsg) = e1.args
if ecode == ldb.ERR_NO_SUCH_OBJECT:
return None
raise
if len(entry) > 0:
myids = []
range = {}
p = re.compile(r'-')
if entry[0].get("provisionnerID"):
for e in entry[0]["provisionnerID"]:
myids.append(str(e))
for r in entry[0][LAST_PROVISION_USN_ATTRIBUTE]:
tab1 = str(r).split(';')
if len(tab1) == 2:
id = tab1[1]
else:
id = "default"
if (len(myids) > 0 and id not in myids):
continue
tab2 = p.split(tab1[0])
if range.get(id) is None:
range[id] = []
range[id].append(tab2[0])
range[id].append(tab2[1])
return range
else:
return None
class ProvisionResult(object):
"""Result of a provision.
:ivar server_role: The server role
:ivar paths: ProvisionPaths instance
:ivar domaindn: The domain dn, as string
"""
def __init__(self):
self.server_role = None
self.paths = None
self.domaindn = None
self.lp = None
self.samdb = None
self.idmap = None
self.names = None
self.domainsid = None
self.adminpass_generated = None
self.adminpass = None
self.backend_result = None
def report_logger(self, logger):
"""Report this provision result to a logger."""
logger.info(
"Once the above files are installed, your Samba AD server will "
"be ready to use")
if self.adminpass_generated:
logger.info("Admin password: %s", self.adminpass)
logger.info("Server Role: %s", self.server_role)
logger.info("Hostname: %s", self.names.hostname)
logger.info("NetBIOS Domain: %s", self.names.domain)
logger.info("DNS Domain: %s", self.names.dnsdomain)
logger.info("DOMAIN SID: %s", self.domainsid)
if self.backend_result:
self.backend_result.report_logger(logger)
def findnss(nssfn, names):
"""Find a user or group from a list of possibilities.
:param nssfn: NSS Function to try (should raise KeyError if not found)
:param names: Names to check.
:return: Value return by first names list.
"""
for name in names:
try:
return nssfn(name)
except KeyError:
pass
raise KeyError("Unable to find user/group in %r" % names)
def findnss_uid(names):
return findnss(pwd.getpwnam, names)[2]
def findnss_gid(names):
return findnss(grp.getgrnam, names)[2]
def get_root_uid(root, logger):
try:
root_uid = findnss_uid(root)
except KeyError as e:
logger.info(e)
logger.info("Assuming root user has UID zero")
root_uid = 0
return root_uid
def provision_paths_from_lp(lp, dnsdomain):
"""Set the default paths for provisioning.
:param lp: Loadparm context.
:param dnsdomain: DNS Domain name
"""
paths = ProvisionPaths()
paths.private_dir = lp.get("private dir")
paths.binddns_dir = lp.get("binddns dir")
paths.state_dir = lp.get("state directory")
# This is stored without path prefix for the "privateKeytab" attribute in
# "secrets_dns.ldif".
paths.dns_keytab = "dns.keytab"
paths.keytab = "secrets.keytab"
paths.shareconf = os.path.join(paths.private_dir, "share.ldb")
paths.samdb = os.path.join(paths.private_dir, "sam.ldb")
paths.idmapdb = os.path.join(paths.private_dir, "idmap.ldb")
paths.secrets = os.path.join(paths.private_dir, "secrets.ldb")
paths.privilege = os.path.join(paths.private_dir, "privilege.ldb")
paths.dns_update_list = os.path.join(paths.private_dir, "dns_update_list")
paths.spn_update_list = os.path.join(paths.private_dir, "spn_update_list")
paths.krb5conf = os.path.join(paths.private_dir, "krb5.conf")
paths.kdcconf = os.path.join(paths.private_dir, "kdc.conf")
paths.winsdb = os.path.join(paths.private_dir, "wins.ldb")
paths.s4_ldapi_path = os.path.join(paths.private_dir, "ldapi")
paths.encrypted_secrets_key_path = os.path.join(
paths.private_dir,
"encrypted_secrets.key")
paths.dns = os.path.join(paths.binddns_dir, "dns", dnsdomain + ".zone")
paths.namedconf = os.path.join(paths.binddns_dir, "named.conf")
paths.namedconf_update = os.path.join(paths.binddns_dir, "named.conf.update")
paths.namedtxt = os.path.join(paths.binddns_dir, "named.txt")
paths.hklm = "hklm.ldb"
paths.hkcr = "hkcr.ldb"
paths.hkcu = "hkcu.ldb"
paths.hku = "hku.ldb"
paths.hkpd = "hkpd.ldb"
paths.hkpt = "hkpt.ldb"
paths.sysvol = lp.get("path", "sysvol")
paths.netlogon = lp.get("path", "netlogon")
paths.smbconf = lp.configfile
return paths
def determine_netbios_name(hostname):
"""Determine a netbios name from a hostname."""
# remove forbidden chars and force the length to be <16
netbiosname = "".join([x for x in hostname if is_valid_netbios_char(x)])
return netbiosname[:MAX_NETBIOS_NAME_LEN].upper()
def guess_names(lp=None, hostname=None, domain=None, dnsdomain=None,
serverrole=None, rootdn=None, domaindn=None, configdn=None,
schemadn=None, serverdn=None, sitename=None,
domain_names_forced=False):
"""Guess configuration settings to use."""
if hostname is None:
hostname = socket.gethostname().split(".")[0]
netbiosname = lp.get("netbios name")
if netbiosname is None:
netbiosname = determine_netbios_name(hostname)
netbiosname = netbiosname.upper()
if not valid_netbios_name(netbiosname):
raise InvalidNetbiosName(netbiosname)
if dnsdomain is None:
dnsdomain = lp.get("realm")
if dnsdomain is None or dnsdomain == "":
raise ProvisioningError(
"guess_names: 'realm' not specified in supplied %s!" %
lp.configfile)
dnsdomain = dnsdomain.lower()
if serverrole is None:
serverrole = lp.get("server role")
if serverrole is None:
raise ProvisioningError("guess_names: 'server role' not specified in supplied %s!" % lp.configfile)
serverrole = serverrole.lower()
realm = dnsdomain.upper()
if lp.get("realm") == "":
raise ProvisioningError("guess_names: 'realm =' was not specified in supplied %s. Please remove the smb.conf file and let provision generate it" % lp.configfile)
if lp.get("realm").upper() != realm:
raise ProvisioningError("guess_names: 'realm=%s' in %s must match chosen realm '%s'! Please remove the smb.conf file and let provision generate it" % (lp.get("realm").upper(), lp.configfile, realm))
if lp.get("server role").lower() != serverrole:
raise ProvisioningError("guess_names: 'server role=%s' in %s must match chosen server role '%s'! Please remove the smb.conf file and let provision generate it" % (lp.get("server role"), lp.configfile, serverrole))
if serverrole == "active directory domain controller":
if domain is None:
# This will, for better or worse, default to 'WORKGROUP'
domain = lp.get("workgroup")
domain = domain.upper()
if lp.get("workgroup").upper() != domain:
raise ProvisioningError("guess_names: Workgroup '%s' in smb.conf must match chosen domain '%s'! Please remove the %s file and let provision generate it" % (lp.get("workgroup").upper(), domain, lp.configfile))
if domaindn is None:
domaindn = samba.dn_from_dns_name(dnsdomain)
if domain == netbiosname:
raise ProvisioningError("guess_names: Domain '%s' must not be equal to short host name '%s'!" % (domain, netbiosname))
else:
domain = netbiosname
if domaindn is None:
domaindn = "DC=" + netbiosname
if not valid_netbios_name(domain):
raise InvalidNetbiosName(domain)
if hostname.upper() == realm:
raise ProvisioningError("guess_names: Realm '%s' must not be equal to hostname '%s'!" % (realm, hostname))
if netbiosname.upper() == realm:
raise ProvisioningError("guess_names: Realm '%s' must not be equal to NetBIOS hostname '%s'!" % (realm, netbiosname))
if domain == realm and not domain_names_forced:
raise ProvisioningError("guess_names: Realm '%s' must not be equal to short domain name '%s'!" % (realm, domain))
if serverrole != "active directory domain controller":
#
# This is the code path for a domain member
# where we provision the database as if we where
# on a domain controller, so we should not use
# the same dnsdomain as the domain controllers
# of our primary domain.
#
# This will be important if we start doing
# SID/name filtering and reject the local
# sid and names if they come from a domain
# controller.
#
realm = netbiosname
dnsdomain = netbiosname.lower()
if rootdn is None:
rootdn = domaindn
if configdn is None:
configdn = "CN=Configuration," + rootdn
if schemadn is None:
schemadn = "CN=Schema," + configdn
if sitename is None:
sitename = DEFAULTSITE
names = ProvisionNames()
names.rootdn = rootdn
names.domaindn = domaindn
names.configdn = configdn
names.schemadn = schemadn
names.ldapmanagerdn = "CN=Manager," + rootdn
names.dnsdomain = dnsdomain
names.domain = domain
names.realm = realm
names.netbiosname = netbiosname
names.hostname = hostname
names.sitename = sitename
names.serverdn = "CN=%s,CN=Servers,CN=%s,CN=Sites,%s" % (
netbiosname, sitename, configdn)
return names
def make_smbconf(smbconf, hostname, domain, realm, targetdir,
serverrole=None, eadb=False, use_ntvfs=False, lp=None,
global_param=None):
"""Create a new smb.conf file based on a couple of basic settings.
"""
assert smbconf is not None
if hostname is None:
hostname = socket.gethostname().split(".")[0]
netbiosname = determine_netbios_name(hostname)
if serverrole is None:
serverrole = "standalone server"
assert domain is not None
domain = domain.upper()
assert realm is not None
realm = realm.upper()
global_settings = {
"netbios name": netbiosname,
"workgroup": domain,
"realm": realm,
"server role": serverrole,
}
if lp is None:
lp = samba.param.LoadParm()
# Load non-existent file
if os.path.exists(smbconf):
lp.load(smbconf)
if global_param is not None:
for ent in global_param:
if global_param[ent] is not None:
global_settings[ent] = " ".join(global_param[ent])
if targetdir is not None:
global_settings["private dir"] = os.path.abspath(os.path.join(targetdir, "private"))
global_settings["lock dir"] = os.path.abspath(targetdir)
global_settings["state directory"] = os.path.abspath(os.path.join(targetdir, "state"))
global_settings["cache directory"] = os.path.abspath(os.path.join(targetdir, "cache"))
global_settings["binddns dir"] = os.path.abspath(os.path.join(targetdir, "bind-dns"))
lp.set("lock dir", os.path.abspath(targetdir))
lp.set("state directory", global_settings["state directory"])
lp.set("cache directory", global_settings["cache directory"])
lp.set("binddns dir", global_settings["binddns dir"])
if eadb:
if use_ntvfs:
if targetdir is not None:
privdir = os.path.join(targetdir, "private")
lp.set("posix:eadb",
os.path.abspath(os.path.join(privdir, "eadb.tdb")))
elif not lp.get("posix:eadb"):
privdir = lp.get("private dir")
lp.set("posix:eadb",
os.path.abspath(os.path.join(privdir, "eadb.tdb")))
else:
if targetdir is not None:
statedir = os.path.join(targetdir, "state")
lp.set("xattr_tdb:file",
os.path.abspath(os.path.join(statedir, "xattr.tdb")))
elif not lp.get("xattr_tdb:file"):
statedir = lp.get("state directory")
lp.set("xattr_tdb:file",
os.path.abspath(os.path.join(statedir, "xattr.tdb")))
shares = {}
if serverrole == "active directory domain controller":
shares["sysvol"] = os.path.join(lp.get("state directory"), "sysvol")
shares["netlogon"] = os.path.join(shares["sysvol"], realm.lower(),
"scripts")
else:
global_settings["passdb backend"] = "samba_dsdb"
f = open(smbconf, 'w')
try:
f.write("[globals]\n")
for key, val in global_settings.items():
f.write("\t%s = %s\n" % (key, val))
f.write("\n")
for name, path in shares.items():
f.write("[%s]\n" % name)
f.write("\tpath = %s\n" % path)
f.write("\tread only = no\n")
f.write("\n")
finally:
f.close()
# reload the smb.conf
lp.load(smbconf)
# and dump it without any values that are the default
# this ensures that any smb.conf parameters that were set
# on the provision/join command line are set in the resulting smb.conf
lp.dump(False, smbconf)
def setup_name_mappings(idmap, sid, root_uid, nobody_uid,
users_gid, root_gid):
"""setup reasonable name mappings for sam names to unix names.
:param samdb: SamDB object.
:param idmap: IDmap db object.
:param sid: The domain sid.
:param domaindn: The domain DN.
:param root_uid: uid of the UNIX root user.
:param nobody_uid: uid of the UNIX nobody user.
:param users_gid: gid of the UNIX users group.
:param root_gid: gid of the UNIX root group.
"""
idmap.setup_name_mapping("S-1-5-7", idmap.TYPE_UID, nobody_uid)
idmap.setup_name_mapping(sid + "-500", idmap.TYPE_UID, root_uid)
idmap.setup_name_mapping(sid + "-513", idmap.TYPE_GID, users_gid)
def setup_samdb_partitions(samdb_path, logger, lp, session_info,
provision_backend, names, serverrole,
erase=False, plaintext_secrets=False,
backend_store=None,backend_store_size=None):
"""Setup the partitions for the SAM database.
Alternatively, provision() may call this, and then populate the database.
:note: This will wipe the Sam Database!
:note: This function always removes the local SAM LDB file. The erase
parameter controls whether to erase the existing data, which
may not be stored locally but in LDAP.
"""
assert session_info is not None
# We use options=["modules:"] to stop the modules loading - we
# just want to wipe and re-initialise the database, not start it up
try:
os.unlink(samdb_path)
except OSError:
pass
samdb = Ldb(url=samdb_path, session_info=session_info,
lp=lp, options=["modules:"])
ldap_backend_line = "# No LDAP backend"
if provision_backend.type != "ldb":
ldap_backend_line = "ldapBackend: %s" % provision_backend.ldap_uri
required_features = None
if not plaintext_secrets:
required_features = "requiredFeatures: encryptedSecrets"
if backend_store is None:
backend_store = get_default_backend_store()
backend_store_line = "backendStore: %s" % backend_store
if backend_store == "mdb":
if required_features is not None:
required_features += "\n"
else:
required_features = ""
required_features += "requiredFeatures: lmdbLevelOne"
if required_features is None:
required_features = "# No required features"
samdb.transaction_start()
try:
logger.info("Setting up sam.ldb partitions and settings")
setup_add_ldif(samdb, setup_path("provision_partitions.ldif"), {
"LDAP_BACKEND_LINE": ldap_backend_line,
"BACKEND_STORE": backend_store_line
})
setup_add_ldif(samdb, setup_path("provision_init.ldif"), {
"BACKEND_TYPE": provision_backend.type,
"SERVER_ROLE": serverrole,
"REQUIRED_FEATURES": required_features
})
logger.info("Setting up sam.ldb rootDSE")
setup_samdb_rootdse(samdb, names)
except:
samdb.transaction_cancel()
raise
else:
samdb.transaction_commit()
def secretsdb_self_join(secretsdb, domain,
netbiosname, machinepass, domainsid=None,
realm=None, dnsdomain=None,
keytab_path=None,
key_version_number=1,
secure_channel_type=SEC_CHAN_WKSTA):
"""Add domain join-specific bits to a secrets database.
:param secretsdb: Ldb Handle to the secrets database
:param machinepass: Machine password
"""
attrs = ["whenChanged",
"secret",
"priorSecret",
"priorChanged",
"krb5Keytab",
"privateKeytab"]
if realm is not None:
if dnsdomain is None:
dnsdomain = realm.lower()
dnsname = '%s.%s' % (netbiosname.lower(), dnsdomain.lower())
else:
dnsname = None
shortname = netbiosname.lower()
# We don't need to set msg["flatname"] here, because rdn_name will handle
# it, and it causes problems for modifies anyway
msg = ldb.Message(ldb.Dn(secretsdb, "flatname=%s,cn=Primary Domains" % domain))
msg["secureChannelType"] = [str(secure_channel_type)]
msg["objectClass"] = ["top", "primaryDomain"]
if dnsname is not None:
msg["objectClass"] = ["top", "primaryDomain", "kerberosSecret"]
msg["realm"] = [realm]
msg["saltPrincipal"] = ["host/%s@%s" % (dnsname, realm.upper())]
msg["msDS-KeyVersionNumber"] = [str(key_version_number)]
msg["privateKeytab"] = ["secrets.keytab"]
msg["secret"] = [machinepass.encode('utf-8')]
msg["samAccountName"] = ["%s$" % netbiosname]
msg["secureChannelType"] = [str(secure_channel_type)]
if domainsid is not None:
msg["objectSid"] = [ndr_pack(domainsid)]
# This complex expression tries to ensure that we don't have more
# than one record for this SID, realm or netbios domain at a time,
# but we don't delete the old record that we are about to modify,
# because that would delete the keytab and previous password.
res = secretsdb.search(base="cn=Primary Domains", attrs=attrs,
expression=("(&(|(flatname=%s)(realm=%s)(objectSid=%s))(objectclass=primaryDomain)(!(distinguishedName=%s)))" % (domain, realm, str(domainsid), str(msg.dn))),
scope=ldb.SCOPE_ONELEVEL)
for del_msg in res:
secretsdb.delete(del_msg.dn)
res = secretsdb.search(base=msg.dn, attrs=attrs, scope=ldb.SCOPE_BASE)
if len(res) == 1:
msg["priorSecret"] = [res[0]["secret"][0]]
try:
msg["priorWhenChanged"] = [res[0]["whenChanged"][0]]
except KeyError:
pass
try:
msg["privateKeytab"] = [res[0]["privateKeytab"][0]]
except KeyError:
pass
try:
msg["krb5Keytab"] = [res[0]["krb5Keytab"][0]]
except KeyError:
pass
for el in msg:
if el != 'dn':
msg[el].set_flags(ldb.FLAG_MOD_REPLACE)
secretsdb.modify(msg)
secretsdb.rename(res[0].dn, msg.dn)
else:
spn = ['HOST/%s' % shortname]
if secure_channel_type == SEC_CHAN_BDC and dnsname is not None:
# we are a domain controller then we add servicePrincipalName
# entries for the keytab code to update.
spn.extend(['HOST/%s' % dnsname])
msg["servicePrincipalName"] = spn
secretsdb.add(msg)
def setup_secretsdb(paths, session_info, lp):
"""Setup the secrets database.
:note: This function does not handle exceptions and transaction on purpose,
it's up to the caller to do this job.
:param path: Path to the secrets database.
:param session_info: Session info.
:param credentials: Credentials
:param lp: Loadparm context
:return: LDB handle for the created secrets database
"""
if os.path.exists(paths.secrets):
os.unlink(paths.secrets)
keytab_path = os.path.join(paths.private_dir, paths.keytab)
if os.path.exists(keytab_path):
os.unlink(keytab_path)
bind_dns_keytab_path = os.path.join(paths.binddns_dir, paths.dns_keytab)
if os.path.exists(bind_dns_keytab_path):
os.unlink(bind_dns_keytab_path)
dns_keytab_path = os.path.join(paths.private_dir, paths.dns_keytab)
if os.path.exists(dns_keytab_path):
os.unlink(dns_keytab_path)
path = paths.secrets
secrets_ldb = Ldb(path, session_info=session_info, lp=lp)
secrets_ldb.erase()
secrets_ldb.load_ldif_file_add(setup_path("secrets_init.ldif"))
secrets_ldb = Ldb(path, session_info=session_info, lp=lp)
secrets_ldb.transaction_start()
try:
secrets_ldb.load_ldif_file_add(setup_path("secrets.ldif"))
except:
secrets_ldb.transaction_cancel()
raise
return secrets_ldb
def setup_privileges(path, session_info, lp):
"""Setup the privileges database.
:param path: Path to the privileges database.
:param session_info: Session info.
:param credentials: Credentials
:param lp: Loadparm context
:return: LDB handle for the created secrets database
"""
if os.path.exists(path):
os.unlink(path)
privilege_ldb = Ldb(path, session_info=session_info, lp=lp)
privilege_ldb.erase()
privilege_ldb.load_ldif_file_add(setup_path("provision_privilege.ldif"))
def setup_encrypted_secrets_key(path):
"""Setup the encrypted secrets key file.
Any existing key file will be deleted and a new random key generated.
:param path: Path to the secrets key file.
"""
if os.path.exists(path):
os.unlink(path)
flags = os.O_WRONLY | os.O_CREAT | os.O_EXCL
mode = stat.S_IRUSR | stat.S_IWUSR
umask_original = os.umask(0)
try:
fd = os.open(path, flags, mode)
finally:
os.umask(umask_original)
with os.fdopen(fd, 'wb') as f:
key = samba.generate_random_bytes(16)
f.write(key)
def setup_registry(path, session_info, lp):
"""Setup the registry.
:param path: Path to the registry database
:param session_info: Session information
:param credentials: Credentials
:param lp: Loadparm context
"""
reg = samba.registry.Registry()
hive = samba.registry.open_ldb(path, session_info=session_info, lp_ctx=lp)
reg.mount_hive(hive, samba.registry.HKEY_LOCAL_MACHINE)
provision_reg = setup_path("provision.reg")
assert os.path.exists(provision_reg)
reg.diff_apply(provision_reg)
def setup_idmapdb(path, session_info, lp):
"""Setup the idmap database.
:param path: path to the idmap database
:param session_info: Session information
:param credentials: Credentials
:param lp: Loadparm context
"""
if os.path.exists(path):
os.unlink(path)
idmap_ldb = IDmapDB(path, session_info=session_info, lp=lp)
idmap_ldb.erase()
idmap_ldb.load_ldif_file_add(setup_path("idmap_init.ldif"))
return idmap_ldb
def setup_samdb_rootdse(samdb, names):
"""Setup the SamDB rootdse.
:param samdb: Sam Database handle
"""
setup_add_ldif(samdb, setup_path("provision_rootdse_add.ldif"), {
"SCHEMADN": names.schemadn,
"DOMAINDN": names.domaindn,
"ROOTDN": names.rootdn,
"CONFIGDN": names.configdn,
"SERVERDN": names.serverdn,
})
def setup_self_join(samdb, admin_session_info, names, fill, machinepass,
dns_backend, dnspass, domainsid, next_rid, invocationid,
policyguid, policyguid_dc,
domainControllerFunctionality, ntdsguid=None, dc_rid=None):
"""Join a host to its own domain."""
assert isinstance(invocationid, str)
if ntdsguid is not None:
ntdsguid_line = "objectGUID: %s\n" % ntdsguid
else:
ntdsguid_line = ""
if dc_rid is None:
dc_rid = next_rid
setup_add_ldif(samdb, setup_path("provision_self_join.ldif"), {
"CONFIGDN": names.configdn,
"SCHEMADN": names.schemadn,
"DOMAINDN": names.domaindn,
"SERVERDN": names.serverdn,
"INVOCATIONID": invocationid,
"NETBIOSNAME": names.netbiosname,
"DNSNAME": "%s.%s" % (names.hostname, names.dnsdomain),
"MACHINEPASS_B64": b64encode(machinepass.encode('utf-16-le')).decode('utf8'),
"DOMAINSID": str(domainsid),
"DCRID": str(dc_rid),
"SAMBA_VERSION_STRING": version,
"NTDSGUID": ntdsguid_line,
"DOMAIN_CONTROLLER_FUNCTIONALITY": str(
domainControllerFunctionality),
"RIDALLOCATIONSTART": str(next_rid + 100),
"RIDALLOCATIONEND": str(next_rid + 100 + 499)})
setup_add_ldif(samdb, setup_path("provision_group_policy.ldif"), {
"POLICYGUID": policyguid,
"POLICYGUID_DC": policyguid_dc,
"DNSDOMAIN": names.dnsdomain,
"DOMAINDN": names.domaindn})
# If we are setting up a subdomain, then this has been replicated in, so we
# don't need to add it
if fill == FILL_FULL:
setup_add_ldif(samdb, setup_path("provision_self_join_config.ldif"), {
"CONFIGDN": names.configdn,
"SCHEMADN": names.schemadn,
"DOMAINDN": names.domaindn,
"SERVERDN": names.serverdn,
"INVOCATIONID": invocationid,
"NETBIOSNAME": names.netbiosname,
"DNSNAME": "%s.%s" % (names.hostname, names.dnsdomain),
"MACHINEPASS_B64": b64encode(machinepass.encode('utf-16-le')).decode('utf8'),
"DOMAINSID": str(domainsid),
"DCRID": str(dc_rid),
"SAMBA_VERSION_STRING": version,
"NTDSGUID": ntdsguid_line,
"DOMAIN_CONTROLLER_FUNCTIONALITY": str(
domainControllerFunctionality)})
# Setup fSMORoleOwner entries to point at the newly created DC entry
setup_modify_ldif(samdb,
setup_path("provision_self_join_modify_schema.ldif"), {
"SCHEMADN": names.schemadn,
"SERVERDN": names.serverdn,
},
controls=["provision:0", "relax:0"])
setup_modify_ldif(samdb,
setup_path("provision_self_join_modify_config.ldif"), {
"CONFIGDN": names.configdn,
"DEFAULTSITE": names.sitename,
"NETBIOSNAME": names.netbiosname,
"SERVERDN": names.serverdn,
})
system_session_info = system_session()
samdb.set_session_info(system_session_info)
# Setup fSMORoleOwner entries to point at the newly created DC entry to
# modify a serverReference under cn=config when we are a subdomain, we must
# be system due to ACLs
setup_modify_ldif(samdb, setup_path("provision_self_join_modify.ldif"), {
"DOMAINDN": names.domaindn,
"SERVERDN": names.serverdn,
"NETBIOSNAME": names.netbiosname,
})
samdb.set_session_info(admin_session_info)
if dns_backend != "SAMBA_INTERNAL":
# This is Samba4 specific and should be replaced by the correct
# DNS AD-style setup
setup_add_ldif(samdb, setup_path("provision_dns_add_samba.ldif"), {
"DNSDOMAIN": names.dnsdomain,
"DOMAINDN": names.domaindn,
"DNSPASS_B64": b64encode(dnspass.encode('utf-16-le')).decode('utf8'),
"HOSTNAME": names.hostname,
"DNSNAME": '%s.%s' % (
names.netbiosname.lower(), names.dnsdomain.lower())
})
def getpolicypath(sysvolpath, dnsdomain, guid):
"""Return the physical path of policy given its guid.
:param sysvolpath: Path to the sysvol folder
:param dnsdomain: DNS name of the AD domain
:param guid: The GUID of the policy
:return: A string with the complete path to the policy folder
"""
if guid[0] != "{":
guid = "{%s}" % guid
policy_path = os.path.join(sysvolpath, dnsdomain, "Policies", guid)
return policy_path
def create_gpo_struct(policy_path):
if not os.path.exists(policy_path):
os.makedirs(policy_path, 0o775)
f = open(os.path.join(policy_path, "GPT.INI"), 'w')
try:
f.write("[General]\r\nVersion=0")
finally:
f.close()
p = os.path.join(policy_path, "MACHINE")
if not os.path.exists(p):
os.makedirs(p, 0o775)
p = os.path.join(policy_path, "USER")
if not os.path.exists(p):
os.makedirs(p, 0o775)
def create_default_gpo(sysvolpath, dnsdomain, policyguid, policyguid_dc):
"""Create the default GPO for a domain
:param sysvolpath: Physical path for the sysvol folder
:param dnsdomain: DNS domain name of the AD domain
:param policyguid: GUID of the default domain policy
:param policyguid_dc: GUID of the default domain controler policy
"""
policy_path = getpolicypath(sysvolpath, dnsdomain, policyguid)
create_gpo_struct(policy_path)
policy_path = getpolicypath(sysvolpath, dnsdomain, policyguid_dc)
create_gpo_struct(policy_path)
# Default the database size to 8Gb
DEFAULT_BACKEND_SIZE = 8 * 1024 * 1024 *1024
def setup_samdb(path, session_info, provision_backend, lp, names,
logger, fill, serverrole, schema, am_rodc=False,
plaintext_secrets=False, backend_store=None,
backend_store_size=None, batch_mode=False):
"""Setup a complete SAM Database.
:note: This will wipe the main SAM database file!
"""
# Also wipes the database
setup_samdb_partitions(path, logger=logger, lp=lp,
provision_backend=provision_backend, session_info=session_info,
names=names, serverrole=serverrole, plaintext_secrets=plaintext_secrets,
backend_store=backend_store,
backend_store_size=backend_store_size)
store_size = DEFAULT_BACKEND_SIZE
if backend_store_size:
store_size = backend_store_size
options = []
if backend_store == "mdb":
options.append("lmdb_env_size:" + str(store_size))
if batch_mode:
options.append("batch_mode:1")
if batch_mode:
# Estimate the number of index records in the transaction_index_cache
# Numbers chosen give the prime 202481 for the default backend size,
# which works well for a 100,000 user database
cache_size = int(store_size / 42423) + 1
options.append("transaction_index_cache_size:" + str(cache_size))
# Load the database, but don's load the global schema and don't connect
# quite yet
samdb = SamDB(session_info=session_info, url=None, auto_connect=False,
lp=lp,
global_schema=False, am_rodc=am_rodc, options=options)
logger.info("Pre-loading the Samba 4 and AD schema")
# Load the schema from the one we computed earlier
samdb.set_schema(schema, write_indices_and_attributes=False)
# Set the NTDS settings DN manually - in order to have it already around
# before the provisioned tree exists and we connect
samdb.set_ntds_settings_dn("CN=NTDS Settings,%s" % names.serverdn)
# And now we can connect to the DB - the schema won't be loaded from the
# DB
try:
samdb.connect(path, options=options)
except ldb.LdbError as e2:
(num, string_error) = e2.args
if (num == ldb.ERR_INSUFFICIENT_ACCESS_RIGHTS):
raise ProvisioningError("Permission denied connecting to %s, are you running as root?" % path)
else:
raise
# But we have to give it one more kick to have it use the schema
# during provision - it needs, now that it is connected, to write
# the schema @ATTRIBUTES and @INDEXLIST records to the database.
samdb.set_schema(schema, write_indices_and_attributes=True)
return samdb
def fill_samdb(samdb, lp, names, logger, policyguid,
policyguid_dc, fill, adminpass, krbtgtpass, machinepass, dns_backend,
dnspass, invocationid, ntdsguid, serverrole, am_rodc=False,
dom_for_fun_level=None, schema=None, next_rid=None, dc_rid=None,
backend_store=None,
backend_store_size=None):
if next_rid is None:
next_rid = 1000
# Provision does not make much sense values larger than 1000000000
# as the upper range of the rIDAvailablePool is 1073741823 and
# we don't want to create a domain that cannot allocate rids.
if next_rid < 1000 or next_rid > 1000000000:
error = "You want to run SAMBA 4 with a next_rid of %u, " % (next_rid)
error += "the valid range is %u-%u. The default is %u." % (
1000, 1000000000, 1000)
raise ProvisioningError(error)
# ATTENTION: Do NOT change these default values without discussion with the
# team and/or release manager. They have a big impact on the whole program!
domainControllerFunctionality = DS_DOMAIN_FUNCTION_2008_R2
if dom_for_fun_level is None:
dom_for_fun_level = DS_DOMAIN_FUNCTION_2008_R2
if dom_for_fun_level > domainControllerFunctionality:
raise ProvisioningError("You want to run SAMBA 4 on a domain and forest function level which itself is higher than its actual DC function level (2008_R2). This won't work!")
domainFunctionality = dom_for_fun_level
forestFunctionality = dom_for_fun_level
# Set the NTDS settings DN manually - in order to have it already around
# before the provisioned tree exists and we connect
samdb.set_ntds_settings_dn("CN=NTDS Settings,%s" % names.serverdn)
# Set the domain functionality levels onto the database.
# Various module (the password_hash module in particular) need
# to know what level of AD we are emulating.
# These will be fixed into the database via the database
# modifictions below, but we need them set from the start.
samdb.set_opaque_integer("domainFunctionality", domainFunctionality)
samdb.set_opaque_integer("forestFunctionality", forestFunctionality)
samdb.set_opaque_integer("domainControllerFunctionality",
domainControllerFunctionality)
samdb.set_domain_sid(str(names.domainsid))
samdb.set_invocation_id(invocationid)
logger.info("Adding DomainDN: %s" % names.domaindn)
# impersonate domain admin
admin_session_info = admin_session(lp, str(names.domainsid))
samdb.set_session_info(admin_session_info)
if names.domainguid is not None:
domainguid_line = "objectGUID: %s\n-" % names.domainguid
else:
domainguid_line = ""
descr = b64encode(get_domain_descriptor(names.domainsid)).decode('utf8')
setup_add_ldif(samdb, setup_path("provision_basedn.ldif"), {
"DOMAINDN": names.domaindn,
"DOMAINSID": str(names.domainsid),
"DESCRIPTOR": descr,
"DOMAINGUID": domainguid_line
})
setup_modify_ldif(samdb, setup_path("provision_basedn_modify.ldif"), {
"DOMAINDN": names.domaindn,
"CREATTIME": str(samba.unix2nttime(int(time.time()))),
"NEXTRID": str(next_rid),
"DEFAULTSITE": names.sitename,
"CONFIGDN": names.configdn,
"POLICYGUID": policyguid,
"DOMAIN_FUNCTIONALITY": str(domainFunctionality),
"SAMBA_VERSION_STRING": version,
"MIN_PWD_LENGTH": str(DEFAULT_MIN_PWD_LENGTH)
})
# If we are setting up a subdomain, then this has been replicated in, so we don't need to add it
if fill == FILL_FULL:
logger.info("Adding configuration container")
descr = b64encode(get_config_descriptor(names.domainsid)).decode('utf8')
setup_add_ldif(samdb, setup_path("provision_configuration_basedn.ldif"), {
"CONFIGDN": names.configdn,
"DESCRIPTOR": descr,
})
# The LDIF here was created when the Schema object was constructed
ignore_checks_oid = "local_oid:%s:0" % samba.dsdb.DSDB_CONTROL_SKIP_DUPLICATES_CHECK_OID
schema_controls = [
"provision:0",
"relax:0",
ignore_checks_oid
]
logger.info("Setting up sam.ldb schema")
samdb.add_ldif(schema.schema_dn_add, controls=schema_controls)
samdb.modify_ldif(schema.schema_dn_modify, controls=schema_controls)
samdb.write_prefixes_from_schema()
samdb.add_ldif(schema.schema_data, controls=schema_controls)
setup_add_ldif(samdb, setup_path("aggregate_schema.ldif"),
{"SCHEMADN": names.schemadn},
controls=schema_controls)
# Now register this container in the root of the forest
msg = ldb.Message(ldb.Dn(samdb, names.domaindn))
msg["subRefs"] = ldb.MessageElement(names.configdn, ldb.FLAG_MOD_ADD,
"subRefs")
samdb.invocation_id = invocationid
# If we are setting up a subdomain, then this has been replicated in, so we don't need to add it
if fill == FILL_FULL:
logger.info("Setting up sam.ldb configuration data")
partitions_descr = b64encode(get_config_partitions_descriptor(names.domainsid)).decode('utf8')
sites_descr = b64encode(get_config_sites_descriptor(names.domainsid)).decode('utf8')
ntdsquotas_descr = b64encode(get_config_ntds_quotas_descriptor(names.domainsid)).decode('utf8')
protected1_descr = b64encode(get_config_delete_protected1_descriptor(names.domainsid)).decode('utf8')
protected1wd_descr = b64encode(get_config_delete_protected1wd_descriptor(names.domainsid)).decode('utf8')
protected2_descr = b64encode(get_config_delete_protected2_descriptor(names.domainsid)).decode('utf8')
if "2008" in schema.base_schema:
# exclude 2012-specific changes if we're using a 2008 schema
incl_2012 = "#"
else:
incl_2012 = ""
setup_add_ldif(samdb, setup_path("provision_configuration.ldif"), {
"CONFIGDN": names.configdn,
"NETBIOSNAME": names.netbiosname,
"DEFAULTSITE": names.sitename,
"DNSDOMAIN": names.dnsdomain,
"DOMAIN": names.domain,
"SCHEMADN": names.schemadn,
"DOMAINDN": names.domaindn,
"SERVERDN": names.serverdn,
"FOREST_FUNCTIONALITY": str(forestFunctionality),
"DOMAIN_FUNCTIONALITY": str(domainFunctionality),
"NTDSQUOTAS_DESCRIPTOR": ntdsquotas_descr,
"LOSTANDFOUND_DESCRIPTOR": protected1wd_descr,
"SERVICES_DESCRIPTOR": protected1_descr,
"PHYSICALLOCATIONS_DESCRIPTOR": protected1wd_descr,
"FORESTUPDATES_DESCRIPTOR": protected1wd_descr,
"EXTENDEDRIGHTS_DESCRIPTOR": protected2_descr,
"PARTITIONS_DESCRIPTOR": partitions_descr,
"SITES_DESCRIPTOR": sites_descr,
})
setup_add_ldif(samdb, setup_path("extended-rights.ldif"), {
"CONFIGDN": names.configdn,
"INC2012": incl_2012,
})
logger.info("Setting up display specifiers")
display_specifiers_ldif = read_ms_ldif(
setup_path('display-specifiers/DisplaySpecifiers-Win2k8R2.txt'))
display_specifiers_ldif = substitute_var(display_specifiers_ldif,
{"CONFIGDN": names.configdn})
check_all_substituted(display_specifiers_ldif)
samdb.add_ldif(display_specifiers_ldif)
logger.info("Modifying display specifiers and extended rights")
setup_modify_ldif(samdb,
setup_path("provision_configuration_modify.ldif"), {
"CONFIGDN": names.configdn,
"DISPLAYSPECIFIERS_DESCRIPTOR": protected2_descr
})
logger.info("Adding users container")
users_desc = b64encode(get_domain_users_descriptor(names.domainsid)).decode('utf8')
setup_add_ldif(samdb, setup_path("provision_users_add.ldif"), {
"DOMAINDN": names.domaindn,
"USERS_DESCRIPTOR": users_desc
})
logger.info("Modifying users container")
setup_modify_ldif(samdb, setup_path("provision_users_modify.ldif"), {
"DOMAINDN": names.domaindn})
logger.info("Adding computers container")
computers_desc = b64encode(get_domain_computers_descriptor(names.domainsid)).decode('utf8')
setup_add_ldif(samdb, setup_path("provision_computers_add.ldif"), {
"DOMAINDN": names.domaindn,
"COMPUTERS_DESCRIPTOR": computers_desc
})
logger.info("Modifying computers container")
setup_modify_ldif(samdb,
setup_path("provision_computers_modify.ldif"), {
"DOMAINDN": names.domaindn})
logger.info("Setting up sam.ldb data")
infrastructure_desc = b64encode(get_domain_infrastructure_descriptor(names.domainsid)).decode('utf8')
lostandfound_desc = b64encode(get_domain_delete_protected2_descriptor(names.domainsid)).decode('utf8')
system_desc = b64encode(get_domain_delete_protected1_descriptor(names.domainsid)).decode('utf8')
builtin_desc = b64encode(get_domain_builtin_descriptor(names.domainsid)).decode('utf8')
controllers_desc = b64encode(get_domain_controllers_descriptor(names.domainsid)).decode('utf8')
setup_add_ldif(samdb, setup_path("provision.ldif"), {
"CREATTIME": str(samba.unix2nttime(int(time.time()))),
"DOMAINDN": names.domaindn,
"NETBIOSNAME": names.netbiosname,
"DEFAULTSITE": names.sitename,
"CONFIGDN": names.configdn,
"SERVERDN": names.serverdn,
"RIDAVAILABLESTART": str(next_rid + 600),
"POLICYGUID_DC": policyguid_dc,
"INFRASTRUCTURE_DESCRIPTOR": infrastructure_desc,
"LOSTANDFOUND_DESCRIPTOR": lostandfound_desc,
"SYSTEM_DESCRIPTOR": system_desc,
"BUILTIN_DESCRIPTOR": builtin_desc,
"DOMAIN_CONTROLLERS_DESCRIPTOR": controllers_desc,
})
# If we are setting up a subdomain, then this has been replicated in, so we don't need to add it
if fill == FILL_FULL:
managedservice_descr = b64encode(get_managed_service_accounts_descriptor(names.domainsid)).decode('utf8')
setup_modify_ldif(samdb,
setup_path("provision_configuration_references.ldif"), {
"CONFIGDN": names.configdn,
"SCHEMADN": names.schemadn})
logger.info("Setting up well known security principals")
protected1wd_descr = b64encode(get_config_delete_protected1wd_descriptor(names.domainsid)).decode('utf8')
setup_add_ldif(samdb, setup_path("provision_well_known_sec_princ.ldif"), {
"CONFIGDN": names.configdn,
"WELLKNOWNPRINCIPALS_DESCRIPTOR": protected1wd_descr,
}, controls=["relax:0", "provision:0"])
if fill == FILL_FULL or fill == FILL_SUBDOMAIN:
setup_modify_ldif(samdb,
setup_path("provision_basedn_references.ldif"), {
"DOMAINDN": names.domaindn,
"MANAGEDSERVICE_DESCRIPTOR": managedservice_descr
})
logger.info("Setting up sam.ldb users and groups")
setup_add_ldif(samdb, setup_path("provision_users.ldif"), {
"DOMAINDN": names.domaindn,
"DOMAINSID": str(names.domainsid),
"ADMINPASS_B64": b64encode(adminpass.encode('utf-16-le')).decode('utf8'),
"KRBTGTPASS_B64": b64encode(krbtgtpass.encode('utf-16-le')).decode('utf8')
}, controls=["relax:0", "provision:0"])
logger.info("Setting up self join")
setup_self_join(samdb, admin_session_info, names=names, fill=fill,
invocationid=invocationid,
dns_backend=dns_backend,
dnspass=dnspass,
machinepass=machinepass,
domainsid=names.domainsid,
next_rid=next_rid,
dc_rid=dc_rid,
policyguid=policyguid,
policyguid_dc=policyguid_dc,
domainControllerFunctionality=domainControllerFunctionality,
ntdsguid=ntdsguid)
ntds_dn = "CN=NTDS Settings,%s" % names.serverdn
names.ntdsguid = samdb.searchone(basedn=ntds_dn,
attribute="objectGUID", expression="", scope=ldb.SCOPE_BASE).decode('utf8')
assert isinstance(names.ntdsguid, str)
return samdb
SYSVOL_ACL = "O:LAG:BAD:P(A;OICI;0x001f01ff;;;BA)(A;OICI;0x001200a9;;;SO)(A;OICI;0x001f01ff;;;SY)(A;OICI;0x001200a9;;;AU)"
POLICIES_ACL = "O:LAG:BAD:P(A;OICI;0x001f01ff;;;BA)(A;OICI;0x001200a9;;;SO)(A;OICI;0x001f01ff;;;SY)(A;OICI;0x001200a9;;;AU)(A;OICI;0x001301bf;;;PA)"
SYSVOL_SERVICE = "sysvol"
def set_dir_acl(path, acl, lp, domsid, use_ntvfs, passdb, service=SYSVOL_SERVICE):
session_info = system_session_unix()
setntacl(lp, path, acl, domsid, session_info, use_ntvfs=use_ntvfs, skip_invalid_chown=True, passdb=passdb, service=service)
for root, dirs, files in os.walk(path, topdown=False):
for name in files:
setntacl(lp, os.path.join(root, name), acl, domsid, session_info,
use_ntvfs=use_ntvfs, skip_invalid_chown=True, passdb=passdb, service=service)
for name in dirs:
setntacl(lp, os.path.join(root, name), acl, domsid, session_info,
use_ntvfs=use_ntvfs, skip_invalid_chown=True, passdb=passdb, service=service)
def set_gpos_acl(sysvol, dnsdomain, domainsid, domaindn, samdb, lp, use_ntvfs, passdb):
"""Set ACL on the sysvol/<dnsname>/Policies folder and the policy
folders beneath.
:param sysvol: Physical path for the sysvol folder
:param dnsdomain: The DNS name of the domain
:param domainsid: The SID of the domain
:param domaindn: The DN of the domain (ie. DC=...)
:param samdb: An LDB object on the SAM db
:param lp: an LP object
"""
# Set ACL for GPO root folder
root_policy_path = os.path.join(sysvol, dnsdomain, "Policies")
session_info = system_session_unix()
setntacl(lp, root_policy_path, POLICIES_ACL, str(domainsid), session_info,
use_ntvfs=use_ntvfs, skip_invalid_chown=True, passdb=passdb, service=SYSVOL_SERVICE)
res = samdb.search(base="CN=Policies,CN=System,%s" %(domaindn),
attrs=["cn", "nTSecurityDescriptor"],
expression="", scope=ldb.SCOPE_ONELEVEL)
for policy in res:
acl = ndr_unpack(security.descriptor,
policy["nTSecurityDescriptor"][0]).as_sddl()
policy_path = getpolicypath(sysvol, dnsdomain, str(policy["cn"]))
set_dir_acl(policy_path, dsacl2fsacl(acl, domainsid), lp,
str(domainsid), use_ntvfs,
passdb=passdb)
def setsysvolacl(samdb, netlogon, sysvol, uid, gid, domainsid, dnsdomain,
domaindn, lp, use_ntvfs):
"""Set the ACL for the sysvol share and the subfolders
:param samdb: An LDB object on the SAM db
:param netlogon: Physical path for the netlogon folder
:param sysvol: Physical path for the sysvol folder
:param uid: The UID of the "Administrator" user
:param gid: The GID of the "Domain adminstrators" group
:param domainsid: The SID of the domain
:param dnsdomain: The DNS name of the domain
:param domaindn: The DN of the domain (ie. DC=...)
"""
s4_passdb = None
if not use_ntvfs:
s3conf = s3param.get_context()
s3conf.load(lp.configfile)
file = tempfile.NamedTemporaryFile(dir=os.path.abspath(sysvol))
try:
try:
smbd.set_simple_acl(file.name, 0o755, system_session_unix(), gid)
except OSError:
if not smbd.have_posix_acls():
# This clue is only strictly correct for RPM and
# Debian-like Linux systems, but hopefully other users
# will get enough clue from it.
raise ProvisioningError("Samba was compiled without the posix ACL support that s3fs requires. "
"Try installing libacl1-dev or libacl-devel, then re-run configure and make.")
raise ProvisioningError("Your filesystem or build does not support posix ACLs, which s3fs requires. "
"Try the mounting the filesystem with the 'acl' option.")
try:
smbd.chown(file.name, uid, gid, system_session_unix())
except OSError:
raise ProvisioningError("Unable to chown a file on your filesystem. "
"You may not be running provision as root.")
finally:
file.close()
# This will ensure that the smbd code we are running when setting ACLs
# is initialised with the smb.conf
s3conf = s3param.get_context()
s3conf.load(lp.configfile)
# ensure we are using the right samba_dsdb passdb backend, no matter what
s3conf.set("passdb backend", "samba_dsdb:%s" % samdb.url)
passdb.reload_static_pdb()
# ensure that we init the samba_dsdb backend, so the domain sid is
# marked in secrets.tdb
s4_passdb = passdb.PDB(s3conf.get("passdb backend"))
# now ensure everything matches correctly, to avoid wierd issues
if passdb.get_global_sam_sid() != domainsid:
raise ProvisioningError('SID as seen by smbd [%s] does not match SID as seen by the provision script [%s]!' % (passdb.get_global_sam_sid(), domainsid))
domain_info = s4_passdb.domain_info()
if domain_info["dom_sid"] != domainsid:
raise ProvisioningError('SID as seen by pdb_samba_dsdb [%s] does not match SID as seen by the provision script [%s]!' % (domain_info["dom_sid"], domainsid))
if domain_info["dns_domain"].upper() != dnsdomain.upper():
raise ProvisioningError('Realm as seen by pdb_samba_dsdb [%s] does not match Realm as seen by the provision script [%s]!' % (domain_info["dns_domain"].upper(), dnsdomain.upper()))
try:
if use_ntvfs:
os.chown(sysvol, -1, gid)
except OSError:
canchown = False
else:
canchown = True
# use admin sid dn as user dn, since admin should own most of the files,
# the operation will be much faster
userdn = '<SID={}-{}>'.format(domainsid, security.DOMAIN_RID_ADMINISTRATOR)
flags = (auth.AUTH_SESSION_INFO_DEFAULT_GROUPS |
auth.AUTH_SESSION_INFO_AUTHENTICATED |
auth.AUTH_SESSION_INFO_SIMPLE_PRIVILEGES)
session_info = auth.user_session(samdb, lp_ctx=lp, dn=userdn,
session_info_flags=flags)
auth.session_info_set_unix(session_info,
lp_ctx=lp,
user_name="Administrator",
uid=uid,
gid=gid)
def _setntacl(path):
"""A helper to reuse args"""
return setntacl(
lp, path, SYSVOL_ACL, str(domainsid), session_info,
use_ntvfs=use_ntvfs, skip_invalid_chown=True, passdb=s4_passdb,
service=SYSVOL_SERVICE)
# Set the SYSVOL_ACL on the sysvol folder and subfolder (first level)
_setntacl(sysvol)
for root, dirs, files in os.walk(sysvol, topdown=False):
for name in files:
if use_ntvfs and canchown:
os.chown(os.path.join(root, name), -1, gid)
_setntacl(os.path.join(root, name))
for name in dirs:
if use_ntvfs and canchown:
os.chown(os.path.join(root, name), -1, gid)
_setntacl(os.path.join(root, name))
# Set acls on Policy folder and policies folders
set_gpos_acl(sysvol, dnsdomain, domainsid, domaindn, samdb, lp, use_ntvfs, passdb=s4_passdb)
def acl_type(direct_db_access):
if direct_db_access:
return "DB"
else:
return "VFS"
def check_dir_acl(path, acl, lp, domainsid, direct_db_access):
session_info = system_session_unix()
fsacl = getntacl(lp, path, session_info, direct_db_access=direct_db_access, service=SYSVOL_SERVICE)
fsacl_sddl = fsacl.as_sddl(domainsid)
if fsacl_sddl != acl:
raise ProvisioningError('%s ACL on GPO directory %s %s does not match expected value %s from GPO object' % (acl_type(direct_db_access), path, fsacl_sddl, acl))
for root, dirs, files in os.walk(path, topdown=False):
for name in files:
fsacl = getntacl(lp, os.path.join(root, name), session_info,
direct_db_access=direct_db_access, service=SYSVOL_SERVICE)
if fsacl is None:
raise ProvisioningError('%s ACL on GPO file %s not found!' %
(acl_type(direct_db_access),
os.path.join(root, name)))
fsacl_sddl = fsacl.as_sddl(domainsid)
if fsacl_sddl != acl:
raise ProvisioningError('%s ACL on GPO file %s %s does not match expected value %s from GPO object' % (acl_type(direct_db_access), os.path.join(root, name), fsacl_sddl, acl))
for name in dirs:
fsacl = getntacl(lp, os.path.join(root, name), session_info,
direct_db_access=direct_db_access, service=SYSVOL_SERVICE)
if fsacl is None:
raise ProvisioningError('%s ACL on GPO directory %s not found!'
% (acl_type(direct_db_access),
os.path.join(root, name)))
fsacl_sddl = fsacl.as_sddl(domainsid)
if fsacl_sddl != acl:
raise ProvisioningError('%s ACL on GPO directory %s %s does not match expected value %s from GPO object' % (acl_type(direct_db_access), os.path.join(root, name), fsacl_sddl, acl))
def check_gpos_acl(sysvol, dnsdomain, domainsid, domaindn, samdb, lp,
direct_db_access):
"""Set ACL on the sysvol/<dnsname>/Policies folder and the policy
folders beneath.
:param sysvol: Physical path for the sysvol folder
:param dnsdomain: The DNS name of the domain
:param domainsid: The SID of the domain
:param domaindn: The DN of the domain (ie. DC=...)
:param samdb: An LDB object on the SAM db
:param lp: an LP object
"""
# Set ACL for GPO root folder
root_policy_path = os.path.join(sysvol, dnsdomain, "Policies")
session_info = system_session_unix()
fsacl = getntacl(lp, root_policy_path, session_info,
direct_db_access=direct_db_access, service=SYSVOL_SERVICE)
if fsacl is None:
raise ProvisioningError('DB ACL on policy root %s %s not found!' % (acl_type(direct_db_access), root_policy_path))
fsacl_sddl = fsacl.as_sddl(domainsid)
if fsacl_sddl != POLICIES_ACL:
raise ProvisioningError('%s ACL on policy root %s %s does not match expected value %s from provision' % (acl_type(direct_db_access), root_policy_path, fsacl_sddl, fsacl))
res = samdb.search(base="CN=Policies,CN=System,%s" %(domaindn),
attrs=["cn", "nTSecurityDescriptor"],
expression="", scope=ldb.SCOPE_ONELEVEL)
for policy in res:
acl = ndr_unpack(security.descriptor,
policy["nTSecurityDescriptor"][0]).as_sddl()
policy_path = getpolicypath(sysvol, dnsdomain, str(policy["cn"]))
check_dir_acl(policy_path, dsacl2fsacl(acl, domainsid), lp,
domainsid, direct_db_access)
def checksysvolacl(samdb, netlogon, sysvol, domainsid, dnsdomain, domaindn,
lp):
"""Set the ACL for the sysvol share and the subfolders
:param samdb: An LDB object on the SAM db
:param netlogon: Physical path for the netlogon folder
:param sysvol: Physical path for the sysvol folder
:param uid: The UID of the "Administrator" user
:param gid: The GID of the "Domain adminstrators" group
:param domainsid: The SID of the domain
:param dnsdomain: The DNS name of the domain
:param domaindn: The DN of the domain (ie. DC=...)
"""
# This will ensure that the smbd code we are running when setting ACLs is initialised with the smb.conf
s3conf = s3param.get_context()
s3conf.load(lp.configfile)
# ensure we are using the right samba_dsdb passdb backend, no matter what
s3conf.set("passdb backend", "samba_dsdb:%s" % samdb.url)
# ensure that we init the samba_dsdb backend, so the domain sid is marked in secrets.tdb
s4_passdb = passdb.PDB(s3conf.get("passdb backend"))
# now ensure everything matches correctly, to avoid wierd issues
if passdb.get_global_sam_sid() != domainsid:
raise ProvisioningError('SID as seen by smbd [%s] does not match SID as seen by the provision script [%s]!' % (passdb.get_global_sam_sid(), domainsid))
domain_info = s4_passdb.domain_info()
if domain_info["dom_sid"] != domainsid:
raise ProvisioningError('SID as seen by pdb_samba_dsdb [%s] does not match SID as seen by the provision script [%s]!' % (domain_info["dom_sid"], domainsid))
if domain_info["dns_domain"].upper() != dnsdomain.upper():
raise ProvisioningError('Realm as seen by pdb_samba_dsdb [%s] does not match Realm as seen by the provision script [%s]!' % (domain_info["dns_domain"].upper(), dnsdomain.upper()))
# Ensure we can read this directly, and via the smbd VFS
session_info = system_session_unix()
for direct_db_access in [True, False]:
# Check the SYSVOL_ACL on the sysvol folder and subfolder (first level)
for dir_path in [os.path.join(sysvol, dnsdomain), netlogon]:
fsacl = getntacl(lp, dir_path, session_info, direct_db_access=direct_db_access, service=SYSVOL_SERVICE)
if fsacl is None:
raise ProvisioningError('%s ACL on sysvol directory %s not found!' % (acl_type(direct_db_access), dir_path))
fsacl_sddl = fsacl.as_sddl(domainsid)
if fsacl_sddl != SYSVOL_ACL:
raise ProvisioningError('%s ACL on sysvol directory %s %s does not match expected value %s from provision' % (acl_type(direct_db_access), dir_path, fsacl_sddl, SYSVOL_ACL))
# Check acls on Policy folder and policies folders
check_gpos_acl(sysvol, dnsdomain, domainsid, domaindn, samdb, lp,
direct_db_access)
def interface_ips_v4(lp, all_interfaces=False):
"""return only IPv4 IPs"""
ips = samba.interface_ips(lp, all_interfaces)
ret = []
for i in ips:
if i.find(':') == -1:
ret.append(i)
return ret
def interface_ips_v6(lp):
"""return only IPv6 IPs"""
ips = samba.interface_ips(lp, False)
ret = []
for i in ips:
if i.find(':') != -1:
ret.append(i)
return ret
def provision_fill(samdb, secrets_ldb, logger, names, paths,
schema=None,
targetdir=None, samdb_fill=FILL_FULL,
hostip=None, hostip6=None,
next_rid=1000, dc_rid=None, adminpass=None, krbtgtpass=None,
domainguid=None, policyguid=None, policyguid_dc=None,
invocationid=None, machinepass=None, ntdsguid=None,
dns_backend=None, dnspass=None,
serverrole=None, dom_for_fun_level=None,
am_rodc=False, lp=None, use_ntvfs=False,
skip_sysvolacl=False, backend_store=None,
backend_store_size=None):
# create/adapt the group policy GUIDs
# Default GUID for default policy are described at
# "How Core Group Policy Works"
# http://technet.microsoft.com/en-us/library/cc784268%28WS.10%29.aspx
if policyguid is None:
policyguid = DEFAULT_POLICY_GUID
policyguid = policyguid.upper()
if policyguid_dc is None:
policyguid_dc = DEFAULT_DC_POLICY_GUID
policyguid_dc = policyguid_dc.upper()
if invocationid is None:
invocationid = str(uuid.uuid4())
if krbtgtpass is None:
krbtgtpass = samba.generate_random_machine_password(128, 255)
if machinepass is None:
machinepass = samba.generate_random_machine_password(120, 120)
if dnspass is None:
dnspass = samba.generate_random_password(128, 255)
samdb.transaction_start()
try:
samdb = fill_samdb(samdb, lp, names, logger=logger,
schema=schema,
policyguid=policyguid, policyguid_dc=policyguid_dc,
fill=samdb_fill, adminpass=adminpass, krbtgtpass=krbtgtpass,
invocationid=invocationid, machinepass=machinepass,
dns_backend=dns_backend, dnspass=dnspass,
ntdsguid=ntdsguid, serverrole=serverrole,
dom_for_fun_level=dom_for_fun_level, am_rodc=am_rodc,
next_rid=next_rid, dc_rid=dc_rid,
backend_store=backend_store,
backend_store_size=backend_store_size)
# Set up group policies (domain policy and domain controller
# policy)
if serverrole == "active directory domain controller":
create_default_gpo(paths.sysvol, names.dnsdomain, policyguid,
policyguid_dc)
except:
samdb.transaction_cancel()
raise
else:
samdb.transaction_commit()
if serverrole == "active directory domain controller":
# Continue setting up sysvol for GPO. This appears to require being
# outside a transaction.
if not skip_sysvolacl:
setsysvolacl(samdb, paths.netlogon, paths.sysvol, paths.root_uid,
paths.root_gid, names.domainsid, names.dnsdomain,
names.domaindn, lp, use_ntvfs)
else:
logger.info("Setting acl on sysvol skipped")
secretsdb_self_join(secrets_ldb, domain=names.domain,
realm=names.realm, dnsdomain=names.dnsdomain,
netbiosname=names.netbiosname, domainsid=names.domainsid,
machinepass=machinepass, secure_channel_type=SEC_CHAN_BDC)
# Now set up the right msDS-SupportedEncryptionTypes into the DB
# In future, this might be determined from some configuration
kerberos_enctypes = str(ENC_ALL_TYPES)
try:
msg = ldb.Message(ldb.Dn(samdb,
samdb.searchone("distinguishedName",
expression="samAccountName=%s$" % names.netbiosname,
scope=ldb.SCOPE_SUBTREE).decode('utf8')))
msg["msDS-SupportedEncryptionTypes"] = ldb.MessageElement(
elements=kerberos_enctypes, flags=ldb.FLAG_MOD_REPLACE,
name="msDS-SupportedEncryptionTypes")
samdb.modify(msg)
except ldb.LdbError as e:
(enum, estr) = e.args
if enum != ldb.ERR_NO_SUCH_ATTRIBUTE:
# It might be that this attribute does not exist in this schema
raise
setup_ad_dns(samdb, secrets_ldb, names, paths, lp, logger,
hostip=hostip, hostip6=hostip6, dns_backend=dns_backend,
dnspass=dnspass, os_level=dom_for_fun_level,
targetdir=targetdir, fill_level=samdb_fill,
backend_store=backend_store)
domainguid = samdb.searchone(basedn=samdb.get_default_basedn(),
attribute="objectGUID").decode('utf8')
assert isinstance(domainguid, str)
lastProvisionUSNs = get_last_provision_usn(samdb)
maxUSN = get_max_usn(samdb, str(names.rootdn))
if lastProvisionUSNs is not None:
update_provision_usn(samdb, 0, maxUSN, invocationid, 1)
else:
set_provision_usn(samdb, 0, maxUSN, invocationid)
logger.info("Setting up sam.ldb rootDSE marking as synchronized")
setup_modify_ldif(samdb, setup_path("provision_rootdse_modify.ldif"),
{'NTDSGUID': names.ntdsguid})
# fix any dangling GUIDs from the provision
logger.info("Fixing provision GUIDs")
chk = dbcheck(samdb, samdb_schema=samdb, verbose=False, fix=True, yes=True,
quiet=True)
samdb.transaction_start()
try:
# a small number of GUIDs are missing because of ordering issues in the
# provision code
for schema_obj in ['CN=Domain', 'CN=Organizational-Person', 'CN=Contact', 'CN=inetOrgPerson']:
chk.check_database(DN="%s,%s" % (schema_obj, names.schemadn),
scope=ldb.SCOPE_BASE,
attrs=['defaultObjectCategory'])
chk.check_database(DN="CN=IP Security,CN=System,%s" % names.domaindn,
scope=ldb.SCOPE_ONELEVEL,
attrs=['ipsecOwnersReference',
'ipsecFilterReference',
'ipsecISAKMPReference',
'ipsecNegotiationPolicyReference',
'ipsecNFAReference'])
if chk.check_database(DN=names.schemadn, scope=ldb.SCOPE_SUBTREE,
attrs=['attributeId', 'governsId']) != 0:
raise ProvisioningError("Duplicate attributeId or governsId in schema. Must be fixed manually!!")
except:
samdb.transaction_cancel()
raise
else:
samdb.transaction_commit()
_ROLES_MAP = {
"ROLE_STANDALONE": "standalone server",
"ROLE_DOMAIN_MEMBER": "member server",
"ROLE_DOMAIN_BDC": "active directory domain controller",
"ROLE_DOMAIN_PDC": "active directory domain controller",
"dc": "active directory domain controller",
"member": "member server",
"domain controller": "active directory domain controller",
"active directory domain controller": "active directory domain controller",
"member server": "member server",
"standalone": "standalone server",
"standalone server": "standalone server",
}
def sanitize_server_role(role):
"""Sanitize a server role name.
:param role: Server role
:raise ValueError: If the role can not be interpreted
:return: Sanitized server role (one of "member server",
"active directory domain controller", "standalone server")
"""
try:
return _ROLES_MAP[role]
except KeyError:
raise ValueError(role)
def provision_fake_ypserver(logger, samdb, domaindn, netbiosname, nisdomain,
maxuid, maxgid):
"""Create AD entries for the fake ypserver.
This is needed for being able to manipulate posix attrs via ADUC.
"""
samdb.transaction_start()
try:
logger.info("Setting up fake yp server settings")
setup_add_ldif(samdb, setup_path("ypServ30.ldif"), {
"DOMAINDN": domaindn,
"NETBIOSNAME": netbiosname,
"NISDOMAIN": nisdomain,
})
except:
samdb.transaction_cancel()
raise
else:
samdb.transaction_commit()
def directory_create_or_exists(path, mode=0o755):
if not os.path.exists(path):
try:
os.mkdir(path, mode)
except OSError as e:
if e.errno in [errno.EEXIST]:
pass
else:
raise ProvisioningError("Failed to create directory %s: %s" % (path, e.strerror))
def determine_host_ip(logger, lp, hostip=None):
if hostip is None:
logger.info("Looking up IPv4 addresses")
hostips = interface_ips_v4(lp)
if len(hostips) > 0:
hostip = hostips[0]
if len(hostips) > 1:
logger.warning("More than one IPv4 address found. Using %s",
hostip)
if hostip == "127.0.0.1":
hostip = None
if hostip is None:
logger.warning("No IPv4 address will be assigned")
return hostip
def determine_host_ip6(logger, lp, hostip6=None):
if hostip6 is None:
logger.info("Looking up IPv6 addresses")
hostips = interface_ips_v6(lp)
if hostips:
hostip6 = hostips[0]
if len(hostips) > 1:
logger.warning("More than one IPv6 address found. Using %s", hostip6)
if hostip6 is None:
logger.warning("No IPv6 address will be assigned")
return hostip6
def provision(logger, session_info, smbconf=None,
targetdir=None, samdb_fill=FILL_FULL, realm=None, rootdn=None,
domaindn=None, schemadn=None, configdn=None, serverdn=None,
domain=None, hostname=None, hostip=None, hostip6=None, domainsid=None,
next_rid=1000, dc_rid=None, adminpass=None, ldapadminpass=None,
krbtgtpass=None, domainguid=None, policyguid=None, policyguid_dc=None,
dns_backend=None, dns_forwarder=None, dnspass=None,
invocationid=None, machinepass=None, ntdsguid=None,
root=None, nobody=None, users=None, backup=None,
sitename=None, serverrole=None, dom_for_fun_level=None,
useeadb=False, am_rodc=False, lp=None, use_ntvfs=False,
use_rfc2307=False, maxuid=None, maxgid=None, skip_sysvolacl=True,
base_schema="2012_R2",
plaintext_secrets=False, backend_store=None,
backend_store_size=None, batch_mode=False):
"""Provision samba4
:note: caution, this wipes all existing data!
"""
try:
serverrole = sanitize_server_role(serverrole)
except ValueError:
raise ProvisioningError('server role (%s) should be one of "active directory domain controller", "member server", "standalone server"' % serverrole)
if ldapadminpass is None:
# Make a new, random password between Samba and it's LDAP server
ldapadminpass = samba.generate_random_password(128, 255)
if backend_store is None:
backend_store = get_default_backend_store()
if domainsid is None:
domainsid = security.random_sid()
root_uid = get_root_uid([root or "root"], logger)
nobody_uid = findnss_uid([nobody or "nobody"])
users_gid = findnss_gid([users or "users", 'users', 'other', 'staff'])
root_gid = pwd.getpwuid(root_uid).pw_gid
try:
bind_gid = findnss_gid(["bind", "named"])
except KeyError:
bind_gid = None
if targetdir is not None:
smbconf = os.path.join(targetdir, "etc", "smb.conf")
elif smbconf is None:
smbconf = samba.param.default_path()
if not os.path.exists(os.path.dirname(smbconf)):
os.makedirs(os.path.dirname(smbconf))
server_services = []
global_param = {}
if use_rfc2307:
global_param["idmap_ldb:use rfc2307"] = ["yes"]
if dns_backend != "SAMBA_INTERNAL":
server_services.append("-dns")
else:
if dns_forwarder is not None:
global_param["dns forwarder"] = [dns_forwarder]
if use_ntvfs:
server_services.append("+smb")
server_services.append("-s3fs")
global_param["dcerpc endpoint servers"] = ["+winreg", "+srvsvc"]
if len(server_services) > 0:
global_param["server services"] = server_services
# only install a new smb.conf if there isn't one there already
if os.path.exists(smbconf):
# if Samba Team members can't figure out the weird errors
# loading an empty smb.conf gives, then we need to be smarter.
# Pretend it just didn't exist --abartlet
f = open(smbconf, 'r')
try:
data = f.read().lstrip()
finally:
f.close()
if data is None or data == "":
make_smbconf(smbconf, hostname, domain, realm,
targetdir, serverrole=serverrole,
eadb=useeadb, use_ntvfs=use_ntvfs,
lp=lp, global_param=global_param)
else:
make_smbconf(smbconf, hostname, domain, realm, targetdir,
serverrole=serverrole,
eadb=useeadb, use_ntvfs=use_ntvfs, lp=lp, global_param=global_param)
if lp is None:
lp = samba.param.LoadParm()
lp.load(smbconf)
names = guess_names(lp=lp, hostname=hostname, domain=domain,
dnsdomain=realm, serverrole=serverrole, domaindn=domaindn,
configdn=configdn, schemadn=schemadn, serverdn=serverdn,
sitename=sitename, rootdn=rootdn, domain_names_forced=(samdb_fill == FILL_DRS))
paths = provision_paths_from_lp(lp, names.dnsdomain)
paths.bind_gid = bind_gid
paths.root_uid = root_uid
paths.root_gid = root_gid
hostip = determine_host_ip(logger, lp, hostip)
hostip6 = determine_host_ip6(logger, lp, hostip6)
names.hostip = hostip
names.hostip6 = hostip6
names.domainguid = domainguid
names.domainsid = domainsid
names.forestsid = domainsid
if serverrole is None:
serverrole = lp.get("server role")
directory_create_or_exists(paths.private_dir, 0o700)
directory_create_or_exists(paths.binddns_dir, 0o770)
directory_create_or_exists(os.path.join(paths.private_dir, "tls"))
directory_create_or_exists(paths.state_dir)
if not plaintext_secrets:
setup_encrypted_secrets_key(paths.encrypted_secrets_key_path)
if paths.sysvol and not os.path.exists(paths.sysvol):
os.makedirs(paths.sysvol, 0o775)
schema = Schema(domainsid, invocationid=invocationid,
schemadn=names.schemadn, base_schema=base_schema)
provision_backend = LDBBackend(paths=paths,
lp=lp,
names=names, logger=logger)
provision_backend.init()
provision_backend.start()
# only install a new shares config db if there is none
if not os.path.exists(paths.shareconf):
logger.info("Setting up share.ldb")
share_ldb = Ldb(paths.shareconf, session_info=session_info, lp=lp)
share_ldb.load_ldif_file_add(setup_path("share.ldif"))
logger.info("Setting up secrets.ldb")
secrets_ldb = setup_secretsdb(paths,
session_info=session_info, lp=lp)
try:
logger.info("Setting up the registry")
setup_registry(paths.hklm, session_info, lp=lp)
logger.info("Setting up the privileges database")
setup_privileges(paths.privilege, session_info, lp=lp)
logger.info("Setting up idmap db")
idmap = setup_idmapdb(paths.idmapdb, session_info=session_info, lp=lp)
setup_name_mappings(idmap, sid=str(domainsid),
root_uid=root_uid, nobody_uid=nobody_uid,
users_gid=users_gid, root_gid=root_gid)
logger.info("Setting up SAM db")
samdb = setup_samdb(paths.samdb, session_info,
provision_backend, lp, names, logger=logger,
serverrole=serverrole,
schema=schema, fill=samdb_fill, am_rodc=am_rodc,
plaintext_secrets=plaintext_secrets,
backend_store=backend_store,
backend_store_size=backend_store_size,
batch_mode=batch_mode)
if serverrole == "active directory domain controller":
if paths.netlogon is None:
raise MissingShareError("netlogon", paths.smbconf)
if paths.sysvol is None:
raise MissingShareError("sysvol", paths.smbconf)
if not os.path.isdir(paths.netlogon):
os.makedirs(paths.netlogon, 0o755)
if adminpass is None:
adminpass = samba.generate_random_password(12, 32)
adminpass_generated = True
else:
if isinstance(adminpass, bytes):
adminpass = adminpass.decode('utf-8')
adminpass_generated = False
if samdb_fill == FILL_FULL:
provision_fill(samdb, secrets_ldb, logger, names, paths,
schema=schema, targetdir=targetdir, samdb_fill=samdb_fill,
hostip=hostip, hostip6=hostip6,
next_rid=next_rid, dc_rid=dc_rid, adminpass=adminpass,
krbtgtpass=krbtgtpass,
policyguid=policyguid, policyguid_dc=policyguid_dc,
invocationid=invocationid, machinepass=machinepass,
ntdsguid=ntdsguid, dns_backend=dns_backend,
dnspass=dnspass, serverrole=serverrole,
dom_for_fun_level=dom_for_fun_level, am_rodc=am_rodc,
lp=lp, use_ntvfs=use_ntvfs,
skip_sysvolacl=skip_sysvolacl,
backend_store=backend_store,
backend_store_size=backend_store_size)
if not is_heimdal_built():
create_kdc_conf(paths.kdcconf, realm, domain, os.path.dirname(lp.get("log file")))
logger.info("The Kerberos KDC configuration for Samba AD is "
"located at %s", paths.kdcconf)
create_krb5_conf(paths.krb5conf,
dnsdomain=names.dnsdomain, hostname=names.hostname,
realm=names.realm)
logger.info("A Kerberos configuration suitable for Samba AD has been "
"generated at %s", paths.krb5conf)
logger.info("Merge the contents of this file with your system "
"krb5.conf or replace it with this one. Do not create a "
"symlink!")
if serverrole == "active directory domain controller":
create_dns_update_list(lp, logger, paths)
backend_result = provision_backend.post_setup()
provision_backend.shutdown()
except:
secrets_ldb.transaction_cancel()
raise
# Now commit the secrets.ldb to disk
secrets_ldb.transaction_commit()
# the commit creates the dns.keytab in the private directory
create_dns_dir_keytab_link(logger, paths)
result = ProvisionResult()
result.server_role = serverrole
result.domaindn = domaindn
result.paths = paths
result.names = names
result.lp = lp
result.samdb = samdb
result.idmap = idmap
result.domainsid = str(domainsid)
if samdb_fill == FILL_FULL:
result.adminpass_generated = adminpass_generated
result.adminpass = adminpass
else:
result.adminpass_generated = False
result.adminpass = None
result.backend_result = backend_result
if use_rfc2307:
provision_fake_ypserver(logger=logger, samdb=samdb,
domaindn=names.domaindn, netbiosname=names.netbiosname,
nisdomain=names.domain.lower(), maxuid=maxuid, maxgid=maxgid)
return result
def provision_become_dc(smbconf=None, targetdir=None, realm=None,
rootdn=None, domaindn=None, schemadn=None,
configdn=None, serverdn=None, domain=None,
hostname=None, domainsid=None,
machinepass=None, dnspass=None,
dns_backend=None, sitename=None, debuglevel=1,
use_ntvfs=False):
logger = logging.getLogger("provision")
samba.set_debug_level(debuglevel)
res = provision(logger, system_session(),
smbconf=smbconf, targetdir=targetdir, samdb_fill=FILL_DRS,
realm=realm, rootdn=rootdn, domaindn=domaindn, schemadn=schemadn,
configdn=configdn, serverdn=serverdn, domain=domain,
hostname=hostname, hostip=None, domainsid=domainsid,
machinepass=machinepass,
serverrole="active directory domain controller",
sitename=sitename, dns_backend=dns_backend, dnspass=dnspass,
use_ntvfs=use_ntvfs)
res.lp.set("debuglevel", str(debuglevel))
return res
def create_krb5_conf(path, dnsdomain, hostname, realm):
"""Write out a file containing a valid krb5.conf file
:param path: Path of the new krb5.conf file.
:param dnsdomain: DNS Domain name
:param hostname: Local hostname
:param realm: Realm name
"""
setup_file(setup_path("krb5.conf"), path, {
"DNSDOMAIN": dnsdomain,
"HOSTNAME": hostname,
"REALM": realm,
})
class ProvisioningError(Exception):
"""A generic provision error."""
def __init__(self, value):
self.value = value
def __str__(self):
return "ProvisioningError: " + self.value
class InvalidNetbiosName(Exception):
"""A specified name was not a valid NetBIOS name."""
def __init__(self, name):
super(InvalidNetbiosName, self).__init__(
"The name '%r' is not a valid NetBIOS name" % name)
class MissingShareError(ProvisioningError):
def __init__(self, name, smbconf):
super(MissingShareError, self).__init__(
"Existing smb.conf does not have a [%s] share, but you are "
"configuring a DC. Please remove %s or add the share manually." %
(name, smbconf))
| gpl-3.0 | 8,725,325,668,539,262,000 | 39.342287 | 222 | 0.610819 | false |
KWierso/treeherder | tests/model/test_suite_public_name.py | 1 | 4454 | import pytest
from django.db.utils import IntegrityError
SAME_SUITE_PUBLIC_NAME = 'same suite name'
SAME_TEST_PUBLIC_NAME = 'same test name'
SAME_SUITE = 'same suite'
SAME_TEST = 'same test'
@pytest.mark.parametrize("suite_public_name, suite_public_name_2,"
"test_public_name, test_public_name_2,"
"suite, suite_2, test, test_2", [
(SAME_SUITE_PUBLIC_NAME, SAME_SUITE_PUBLIC_NAME,
SAME_TEST_PUBLIC_NAME, SAME_TEST_PUBLIC_NAME,
SAME_SUITE, SAME_SUITE, 'test', 'test_2'),
(SAME_SUITE_PUBLIC_NAME, SAME_SUITE_PUBLIC_NAME,
SAME_TEST_PUBLIC_NAME, SAME_TEST_PUBLIC_NAME,
'suite', 'suite_2', SAME_TEST, SAME_TEST),
(SAME_SUITE_PUBLIC_NAME, SAME_SUITE_PUBLIC_NAME,
SAME_TEST_PUBLIC_NAME, SAME_TEST_PUBLIC_NAME,
'suite', 'suite_2', 'test', 'test_2'),
])
def test_trigger_public_suite_name_constraint(test_perf_signature, test_perf_signature_2,
suite_public_name, suite_public_name_2,
test_public_name, test_public_name_2,
suite, suite_2, test, test_2):
test_perf_signature.suite_public_name = suite_public_name
test_perf_signature.test_public_name = test_public_name
test_perf_signature.suite = suite
test_perf_signature.test = test
test_perf_signature.save()
test_perf_signature_2.suite_public_name = suite_public_name_2
test_perf_signature_2.test_public_name = test_public_name_2
test_perf_signature_2.suite = suite_2
test_perf_signature_2.test = test_2
with pytest.raises(IntegrityError):
test_perf_signature_2.save()
@pytest.mark.parametrize("suite_public_name, suite_public_name_2,"
"test_public_name, test_public_name_2,"
"suite, suite_2, test, test_2", [
(None, None, None, None, 'suite', 'suite_2', 'test', 'test_2'),
('suite_public_name', 'suite_public_name_2', None, None,
'suite', 'suite_2', 'test', 'test_2'),
(None, None, 'test', 'test_2', 'suite', 'suite_2', 'test', 'test_2'),
('suite_public_name', None, 'test', None, 'suite', 'suite_2', 'test', 'test_2'),
('suite_public_name', 'suite_public_name_2',
SAME_TEST_PUBLIC_NAME, SAME_TEST_PUBLIC_NAME,
'suite', 'suite_2', 'test', 'test_2'),
(SAME_SUITE_PUBLIC_NAME, SAME_SUITE_PUBLIC_NAME,
'test_public_name', 'test_public_name_2',
'suite', 'suite_2', 'test', 'test_2'),
('suite_public_name', 'suite_public_name_2',
SAME_TEST_PUBLIC_NAME, SAME_TEST_PUBLIC_NAME,
SAME_SUITE, SAME_SUITE, SAME_TEST, SAME_TEST),
('suite_public_name', 'suite_public_name_2',
'test_public_name', 'test_public_name_2',
'suite', 'suite_2', 'test', 'test_2'),
])
def test_do_not_trigger_public_suite_name_constraint(test_perf_signature, test_perf_signature_2,
suite_public_name, suite_public_name_2,
test_public_name, test_public_name_2,
suite, suite_2, test, test_2):
test_perf_signature.suite_public_name = suite_public_name
test_perf_signature.test_public_name = test_public_name
test_perf_signature.suite = suite
test_perf_signature.test = test
test_perf_signature.save()
test_perf_signature_2.suite_public_name = suite_public_name_2
test_perf_signature_2.test_public_name = test_public_name_2
test_perf_signature_2.suite = suite_2
test_perf_signature_2.test = test_2
test_perf_signature_2.save()
| mpl-2.0 | -9,129,059,106,820,335,000 | 50.790698 | 113 | 0.493489 | false |
me-systeme/gsv8pypi | GSV6_FrameRouter.py | 1 | 5296 | # -*- coding: utf-8 -*-
__author__ = 'Dennis Rump'
###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) 2015 Dennis Rump
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# Hiermit wird unentgeltlich, jeder Person, die eine Kopie der Software
# und der zugehörigen Dokumentationen (die "Software") erhält, die
# Erlaubnis erteilt, uneingeschränkt zu benutzen, inklusive und ohne
# Ausnahme, dem Recht, sie zu verwenden, kopieren, ändern, fusionieren,
# verlegen, verbreiten, unter-lizenzieren und/oder zu verkaufen, und
# Personen, die diese Software erhalten, diese Rechte zu geben, unter
# den folgenden Bedingungen:
#
# Der obige Urheberrechtsvermerk und dieser Erlaubnisvermerk sind in
# alle Kopien oder Teilkopien der Software beizulegen.
#
# DIE SOFTWARE WIRD OHNE JEDE AUSDRÜCKLICHE ODER IMPLIZIERTE GARANTIE
# BEREITGESTELLT, EINSCHLIESSLICH DER GARANTIE ZUR BENUTZUNG FÜR DEN
# VORGESEHENEN ODER EINEM BESTIMMTEN ZWECK SOWIE JEGLICHER
# RECHTSVERLETZUNG, JEDOCH NICHT DARAUF BESCHRÄNKT. IN KEINEM FALL SIND
# DIE AUTOREN ODER COPYRIGHTINHABER FÜR JEGLICHEN SCHADEN ODER SONSTIGE
# ANSPRUCH HAFTBAR ZU MACHEN, OB INFOLGE DER ERFÜLLUNG VON EINEM
# VERTRAG, EINEM DELIKT ODER ANDERS IM ZUSAMMENHANG MIT DER BENUTZUNG
# ODER SONSTIGE VERWENDUNG DER SOFTWARE ENTSTANDEN.
#
###############################################################################
import logging
import threading
from Queue import Queue
from GSV6_MessFrameHandler import MessFrameHandler
class FrameRouter(threading.Thread):
lock = threading.Lock()
#def __init__(self, frameQueue, antwortQueue, messertRotatingQueue, gsv6Lib):
def __init__(self, frameQueue, antwortQueue, _lastMesswert, gsv6Lib):
threading.Thread.__init__(self)
self.frameQueue = frameQueue
self.antwortQueue = antwortQueue
# self.messertRotatingQueue = messertRotatingQueue
self.lastMesswert = _lastMesswert
self.gsv6 = gsv6Lib
self.running = False
# self.messFrameEventHandler = MessFrameHandler(self.messertRotatingQueue, self.gsv6)
self.messFrameEventHandler = MessFrameHandler(self.lastMesswert, self.gsv6)
# self.antwortFrameEventHandler = AntwortFrameHandler(self.gsv6, self.antwortQueue, self.messFrameEventHandler)
# fallback, this flag kills this thread if main thread killed
self.daemon = True
def run(self):
# arbeits Thread: router -> routen von AntwortFrames und MessFrames
FrameRouter.lock.acquire()
self.running = True
FrameRouter.lock.release()
logging.getLogger('gsv8.FrameRouter').info('started')
# enter rooter loop
while self.running:
try:
# newFrame = self.frameQueue.popleft()
newFrame = self.frameQueue.get()
except IndexError:
pass
except Queue.Empty:
pass
else:
logging.getLogger('gsv8.FrameRouter').debug('new Frame: ' + newFrame.toString())
if newFrame.getFrameType() == 0:
# MesswertFrame
logging.getLogger('gsv8.FrameRouter').debug('Messwert erhalten')
self.messFrameEventHandler.computeFrame(newFrame)
elif newFrame.getFrameType() == 1:
logging.getLogger('gsv8').debug("Antwort eralten.")
# AntwortFrame
# self.antwortFrameEventHandler.computeFrame(newFrame)
self.antwortQueue.put(newFrame)
else:
# error
logging.getLogger('gsv8.FrameRouter').debug(
'nothing to do with an FrameType != Messwert/Antwort')
logging.getLogger('gsv8.FrameRouter').debug('exit')
def stop(self):
FrameRouter.lock.acquire()
self.running = False
FrameRouter.lock.release()
def startCSVRecording(self, csvFilepath, prefix):
self.messFrameEventHandler.startRecording(csvFilepath, prefix)
def stopCSVRecording(self):
self.messFrameEventHandler.stopRecording()
def isRecording(self):
return self.messFrameEventHandler.doRecording | mit | -4,542,068,685,750,619,600 | 43.066667 | 119 | 0.676187 | false |
wq/wq.io | itertable/loaders.py | 1 | 4908 | from __future__ import print_function
import requests
try:
# Python 2 (uses str)
from StringIO import StringIO
except ImportError:
# Python 3 (Python 2 equivalent uses unicode)
from io import StringIO
from io import BytesIO
from .version import VERSION
from .exceptions import LoadFailed
from zipfile import ZipFile
class BaseLoader(object):
no_pickle_loader = ['file']
empty_file = None
def load(self):
raise NotImplementedError
class FileLoader(BaseLoader):
filename = None
@property
def read_mode(self):
return 'rb' if self.binary else 'r'
@property
def write_mode(self):
return 'wb+' if self.binary else 'w+'
def load(self):
try:
self.file = open(self.filename, self.read_mode)
self.empty_file = False
except IOError:
if self.binary:
self.file = BytesIO()
else:
self.file = StringIO()
self.empty_file = True
def save(self):
file = open(self.filename, self.write_mode)
self.dump(file)
file.close()
class Zipper(object):
inner_filename = None
inner_binary = False
def unzip_file(self):
zipfile = ZipFile(self.file)
inner_file = zipfile.read(
self.get_inner_filename(zipfile)
)
if self.inner_binary:
self.file = BytesIO(inner_file)
else:
self.file = StringIO(inner_file.decode('utf-8'))
zipfile.fp.close()
zipfile.close()
def get_inner_filename(self, zipfile):
if self.inner_filename:
return self.inner_filename
names = zipfile.namelist()
if len(names) == 1:
return names[0]
zipfile.fp.close()
zipfile.close()
raise LoadFailed("Multiple Inner Files!")
class ZipFileLoader(Zipper, FileLoader):
binary = True
def load(self):
super(ZipFileLoader, self).load()
self.unzip_file()
class StringLoader(BaseLoader):
string = ""
@property
def _io_class(self):
return BytesIO if self.binary else StringIO
def load(self):
if self.binary and not self.string:
self.string = b''
self.file = self._io_class(self.string)
def save(self):
file = self._io_class()
self.dump(file)
self.string = file.getvalue()
file.close()
class NetLoader(StringLoader):
"NetLoader: opens HTTP/REST resources for use in IterTable"
username = None
password = None
debug = False
url = None
client = requests
@property
def user_agent(self):
return "IterTable/%s (%s)" % (
VERSION,
requests.utils.default_user_agent()
)
@property
def headers(self):
return {
'User-Agent': self.user_agent,
}
def load(self, **kwargs):
result = self.GET()
self.file = self._io_class(result)
def req(self, url=None, method=None, params=None, body=None, headers={}):
if url is None:
url = self.url
if url is None:
raise LoadFailed("No URL provided")
if params is None:
params = getattr(self, 'params', None)
if isinstance(params, str):
url += '?' + params
params = None
if self.debug:
if params:
from requests.compat import urlencode
debug_url = url + '?' + urlencode(params, doseq=True)
else:
debug_url = url
self.debug_string = "%s: %s" % (method, debug_url)
print(self.debug_string)
if self.username is not None and self.password is not None:
auth = (self.username, self.password)
else:
auth = None
all_headers = self.headers.copy()
all_headers.update(headers)
resp = self.client.request(
method, url,
params=params,
headers=all_headers,
auth=auth,
data=body,
)
resp.connection.close()
if resp.status_code < 200 or resp.status_code > 299:
raise LoadFailed(
resp.text,
path=url,
code=resp.status_code,
)
if self.binary:
return resp.content
else:
return resp.text
def GET(self, **kwargs):
return self.req(method='GET', **kwargs)
def POST(self, **kwargs):
return self.req(method='POST', **kwargs)
def PUT(self, **kwargs):
return self.req(method='PUT', **kwargs)
def DELETE(self, **kwargs):
return self.req(method='DELETE', **kwargs)
class ZipNetLoader(Zipper, NetLoader):
binary = True
def load(self):
super(ZipNetLoader, self).load()
self.unzip_file()
| mit | -7,850,386,608,032,571,000 | 23.41791 | 77 | 0.556031 | false |
AttakornP/request_manager | request_manager/request_manager/settings/local.py | 1 | 1799 | """Development settings and globals."""
from os.path import join, normpath
from base import *
########## DEBUG CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
TEMPLATE_DEBUG = DEBUG
########## END DEBUG CONFIGURATION
########## EMAIL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
########## END EMAIL CONFIGURATION
########## DATABASE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'rq_mng_db',
'USER': 'www-data',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
########## END DATABASE CONFIGURATION
########## CACHE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#caches
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}
}
########## END CACHE CONFIGURATION
########## TOOLBAR CONFIGURATION
# See: https://github.com/django-debug-toolbar/django-debug-toolbar#installation
INSTALLED_APPS += (
#'debug_toolbar',
)
# See: https://github.com/django-debug-toolbar/django-debug-toolbar#installation
INTERNAL_IPS = ('127.0.0.1',)
# See: https://github.com/django-debug-toolbar/django-debug-toolbar#installation
MIDDLEWARE_CLASSES += (
#'debug_toolbar.middleware.DebugToolbarMiddleware',
)
# See: https://github.com/django-debug-toolbar/django-debug-toolbar#installation
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'SHOW_TEMPLATE_CONTEXT': True,
}
########## END TOOLBAR CONFIGURATION
| mit | -9,207,611,194,624,718,000 | 25.455882 | 80 | 0.668149 | false |
elegion/djangodash2012 | fortuitus/settings_gondor.py | 1 | 1571 | import os
import urlparse
from .settings import * # NOQA
DEBUG = False
TEMPLATE_DEBUG = DEBUG
if 'GONDOR_DATABASE_URL' in os.environ:
urlparse.uses_netloc.append('postgres')
url = urlparse.urlparse(os.environ['GONDOR_DATABASE_URL'])
DATABASES = {
'default': {
'ENGINE': {
'postgres': 'django.db.backends.postgresql_psycopg2'
}[url.scheme],
'NAME': url.path[1:],
'USER': url.username,
'PASSWORD': url.password,
'HOST': url.hostname,
'PORT': url.port
}
}
SITE_ID = 1
if 'GONDOR_DATA_DIR' in os.environ:
MEDIA_ROOT = os.path.join(os.environ['GONDOR_DATA_DIR'],
'site_media', 'media')
STATIC_ROOT = os.path.join(os.environ['GONDOR_DATA_DIR'],
'site_media', 'static')
MEDIA_URL = '/site_media/media/'
STATIC_URL = '/site_media/static/'
ADMIN_MEDIA_PREFIX = STATIC_URL + 'admin/'
FILE_UPLOAD_PERMISSIONS = 0640
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple'
}
},
'loggers': {
'': {
'handlers': ['console'],
'level': 'INFO',
},
'django.request': {
'propagate': True,
},
}
}
COMPRESS_ENABLED = True
| mit | 6,060,573,243,486,931,000 | 22.447761 | 68 | 0.504774 | false |
ulikoehler/UliEngineering | UliEngineering/SignalProcessing/Resampling.py | 1 | 9018 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Utilities for selecting and finding specific attributes in datasets
"""
import math
import functools
import numpy as np
import bisect
import concurrent.futures
import scipy.interpolate
from UliEngineering.Utils.Concurrency import QueuedThreadExecutor
from .Utils import LinRange
__all__ = ["resample_discard", "resampled_timespace",
"parallel_resample", "signal_samplerate",
"serial_resample"]
def signal_samplerate(t, ignore_percentile=10, mean_method=np.mean):
"""
Compute the samplerate of a signal
using a quantile-based method to exclude
outliers (in the time delta domain) and
computes the by 1 / mean
Using a low ignore_percentile value is only
desirable if the dataset is small and therefore
does not average properly due to lack of samples.
In most cases, using a high ignore percentile
like 10 is recommended.
Returns a float (samplerate) [1/s].
If t is a LinRange() object, returns t.samplerate()
Parameters
----------
t : numpy array of datetime64 type (or LinRange)
Timestamps associated with the signal
ignore_percentile : number
This percentile of outliers is ignored
for the mean calculation at both the top
and the bottom end.
"5" means considering the 5th...95th percentile
for averaging.
mean_method : unary function
Used to compute the mean after excluding outliers.
Except for special usecases, arithmetic mean (np.mean)
is recommended.
"""
# Special rule for LinRange objects that have a defined samplerate
if isinstance(t, LinRange):
return t.samplerate()
tdelta = np.diff(t)
above = np.percentile(tdelta, ignore_percentile)
below = np.percentile(tdelta, 100 - ignore_percentile)
filtered = tdelta[np.logical_and(tdelta >= above, tdelta <= below)]
# Filtered is too small if the sample periods are too uniform in the array
if len(filtered) < 0.1 * len(tdelta):
filtered = tdelta
mean_sample_period = mean_method(filtered)
mean_sample_period = mean_sample_period.astype("timedelta64[ns]").astype(np.int64)
return 1e9 / mean_sample_period # 1e9 : nanoseconds
def resample_discard(arr, divisor, ofs=0):
"""
Resample with an integral divisor, discarding all other samples.
Returns a view of the data.
Very fast as this doesn't need to read the data.
"""
return arr[ofs::divisor]
def resampled_timespace(t, new_samplerate, assume_sorted=True, time_factor=1e6):
"""
Compute the new timespace after resampling a input timestamp array
(not neccessarily lazy)
Parameters
----------
t : numpy array-like
The source timestamps.
If these are numbers, you must supply time_factor to
specify the resolution of the number.
If they are
new_samplerate : float
The new datarate in Hz
assume_sorted : bool
If this is True, the code assumes the source
timestamp array is monotonically increasing, i.e.
the lowest timestamp comes first and the highest last.
If this is False, the code determines
the min/max value by reading the entire array.
time_factor : float
Ignored if t is of dtype datetime64
Defines what timestamps in the source (and result)
array means. This is required to interpret new_samplerate.
If time_factor=1e6, it means that a difference of 1.0
in two timestamps means a difference of 1/1e6 seconds.
Returns
-------
A LinSpace() (acts like a numpy array but doesn't consume any memory)
that represents the new timespace
news
"""
if len(t) == 0:
raise ValueError("Empty time array given - can not perform any resampling")
if len(t) == 1:
raise ValueError("Time array has only one value - can not perform any resampling")
# Handle numpy datetime64 input
if "datetime64" in t.dtype.name:
t = t.astype('datetime64[ns]').astype(np.int64)
time_factor = 1e9
# Compute time endpoints
dst_tdelta = time_factor / new_samplerate
startt, endt = (t[0], t[-1]) if assume_sorted else (np.min(t), np.max(t))
src_tdelta = endt - startt
if src_tdelta < dst_tdelta:
raise ValueError("The time delta is smaller than a single sample - can not perform resampling")
# Use a lazy linrange to represent time interval
return LinRange.range(startt, endt, dst_tdelta)
def __parallel_resample_worker(torig, tnew, y, out, i, chunksize, ovp_size, prefilter, fitkind):
# Find the time range in the target time
t_target = tnew[i:i + chunksize]
# Find the time range in the source time
srcstart = bisect.bisect_left(torig, t_target[0])
srcend = bisect.bisect_right(torig, t_target[1])
# Compute start and end index with overprovisioning
# This might be out of range of the src array but bisect will ignore that
srcstart_ovp = max(0, srcstart - ovp_size) # Must not get negative indices
srcend_ovp = srcend - ovp_size
# Compute source slices
tsrc_chunk = torig[srcstart_ovp:srcend_ovp]
ysrc_chunk = y[srcstart_ovp:srcend_ovp]
# Perform prefilter
if prefilter is not None:
tsrc_chunk, ysrc_chunk = prefilter(tsrc_chunk, ysrc_chunk)
# Compute interpolating spline (might also be piecewise linear)...
fit = scipy.interpolate.interp1d(tsrc_chunk, ysrc_chunk, fitkind=fitkind)
# ... and evaluate
out[i:i + chunksize] = fit(t_target)
def serial_resample(t, y, new_samplerate, out=None, prefilter=None,
time_factor=1e6,
fitkind='linear', chunksize=10000,
overprovisioning_factor=0.01):
"""
A resampler that uses scipy.interpolate.interp1d but splits the
input into chunks that can be processed.
The chunksize is applied to the output timebase.
The input x array is assumed to be sorted, facilitating binary search.
If the output array is not given, it is automatically allocated with the correct size.
The chunk workers are executed in parallel in a concurrent.futures thread pool.
In order to account for vector end effects, an overprovisioning factor
can be provided so that a fraction of the chunksize is added at both ends of
the source chunk.
This
A overprovisioning factor of 0.01 means that 1% of the chunksize is added on the left
and 1% is added on the right. This does not affect leftmost and rightmost
border of the input array.
Returns the output array.
Applies an optional prefilter to the input data while resampling. If the timebase of
the input data is off significantly, this might produce unexpected results.
The prefilter must be a reentrant functor that takes (t, x) data and returns
a (t, x) tuple. The returned tuple can be of arbitrary size (assuming t and x
have the same length) but its t range must include the t range that is being interpolated.
Note that the prefilter is performed after overprovisioning, so setting a higher
overprovisioning factor (see below) might help dealing with prefilters that
return too small arrays, however at the start and the end of the input array,
no overprovisioning values can be added.
"""
new_t = resampled_timespace(t, new_samplerate, time_factor=time_factor)
# Lazily compute the new timespan
if out is None:
out = np.zeros(len(new_t))
ovp_size = int(math.floor(overprovisioning_factor * chunksize))
# How many chunks do we have to process?
for i in range(len(new_t) // chunksize):
__parallel_resample_worker(i=i, orig=t, tnew=new_t,
y=y, out=out, chunksize=chunksize,
ovp_size=ovp_size, prefilter=prefilter,
fitkind=fitkind)
return out
def parallel_resample(t, y, new_samplerate, out=None, prefilter=None,
executor=None, time_factor=1e6,
fitkind='linear', chunksize=10000,
overprovisioning_factor=0.01):
"""
Parallel variant of serial_resample
"""
new_t = resampled_timespace(t, new_samplerate, time_factor=time_factor)
# Lazily compute the new timespan
if out is None:
out = np.zeros(len(new_t))
if executor is None:
executor = QueuedThreadExecutor()
ovp_size = int(math.floor(overprovisioning_factor * chunksize))
# How many chunks do we have to process?
numchunks = len(new_t) // chunksize
# Bind constant arguments
f = functools.partial(__parallel_resample_worker, torig=t, tnew=new_t,
y=y, out=out, chunksize=chunksize,
ovp_size=ovp_size, prefilter=prefilter,
fitkind=fitkind)
futures = [executor.submit(f, i=i) for i in range(numchunks)]
# Wait for futures to finish
concurrent.futures.wait(futures)
return out
| apache-2.0 | 8,066,749,981,981,142,000 | 39.258929 | 103 | 0.675094 | false |
Azure/azure-sdk-for-python | sdk/appservice/azure-mgmt-web/azure/mgmt/web/v2016_08_01/models/_models_py3.py | 1 | 295755 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import datetime
from typing import Any, Dict, List, Optional, Union
import msrest.serialization
from ._web_site_management_client_enums import *
class ApiDefinitionInfo(msrest.serialization.Model):
"""Information about the formal API definition for the app.
:param url: The URL of the API definition.
:type url: str
"""
_attribute_map = {
'url': {'key': 'url', 'type': 'str'},
}
def __init__(
self,
*,
url: Optional[str] = None,
**kwargs
):
super(ApiDefinitionInfo, self).__init__(**kwargs)
self.url = url
class ApplicationLogsConfig(msrest.serialization.Model):
"""Application logs configuration.
:param file_system: Application logs to file system configuration.
:type file_system: ~azure.mgmt.web.v2016_08_01.models.FileSystemApplicationLogsConfig
:param azure_table_storage: Application logs to azure table storage configuration.
:type azure_table_storage:
~azure.mgmt.web.v2016_08_01.models.AzureTableStorageApplicationLogsConfig
:param azure_blob_storage: Application logs to blob storage configuration.
:type azure_blob_storage:
~azure.mgmt.web.v2016_08_01.models.AzureBlobStorageApplicationLogsConfig
"""
_attribute_map = {
'file_system': {'key': 'fileSystem', 'type': 'FileSystemApplicationLogsConfig'},
'azure_table_storage': {'key': 'azureTableStorage', 'type': 'AzureTableStorageApplicationLogsConfig'},
'azure_blob_storage': {'key': 'azureBlobStorage', 'type': 'AzureBlobStorageApplicationLogsConfig'},
}
def __init__(
self,
*,
file_system: Optional["FileSystemApplicationLogsConfig"] = None,
azure_table_storage: Optional["AzureTableStorageApplicationLogsConfig"] = None,
azure_blob_storage: Optional["AzureBlobStorageApplicationLogsConfig"] = None,
**kwargs
):
super(ApplicationLogsConfig, self).__init__(**kwargs)
self.file_system = file_system
self.azure_table_storage = azure_table_storage
self.azure_blob_storage = azure_blob_storage
class AutoHealActions(msrest.serialization.Model):
"""Actions which to take by the auto-heal module when a rule is triggered.
:param action_type: Predefined action to be taken. Possible values include: "Recycle",
"LogEvent", "CustomAction".
:type action_type: str or ~azure.mgmt.web.v2016_08_01.models.AutoHealActionType
:param custom_action: Custom action to be taken.
:type custom_action: ~azure.mgmt.web.v2016_08_01.models.AutoHealCustomAction
:param min_process_execution_time: Minimum time the process must execute
before taking the action.
:type min_process_execution_time: str
"""
_attribute_map = {
'action_type': {'key': 'actionType', 'type': 'str'},
'custom_action': {'key': 'customAction', 'type': 'AutoHealCustomAction'},
'min_process_execution_time': {'key': 'minProcessExecutionTime', 'type': 'str'},
}
def __init__(
self,
*,
action_type: Optional[Union[str, "AutoHealActionType"]] = None,
custom_action: Optional["AutoHealCustomAction"] = None,
min_process_execution_time: Optional[str] = None,
**kwargs
):
super(AutoHealActions, self).__init__(**kwargs)
self.action_type = action_type
self.custom_action = custom_action
self.min_process_execution_time = min_process_execution_time
class AutoHealCustomAction(msrest.serialization.Model):
"""Custom action to be executed
when an auto heal rule is triggered.
:param exe: Executable to be run.
:type exe: str
:param parameters: Parameters for the executable.
:type parameters: str
"""
_attribute_map = {
'exe': {'key': 'exe', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': 'str'},
}
def __init__(
self,
*,
exe: Optional[str] = None,
parameters: Optional[str] = None,
**kwargs
):
super(AutoHealCustomAction, self).__init__(**kwargs)
self.exe = exe
self.parameters = parameters
class AutoHealRules(msrest.serialization.Model):
"""Rules that can be defined for auto-heal.
:param triggers: Conditions that describe when to execute the auto-heal actions.
:type triggers: ~azure.mgmt.web.v2016_08_01.models.AutoHealTriggers
:param actions: Actions to be executed when a rule is triggered.
:type actions: ~azure.mgmt.web.v2016_08_01.models.AutoHealActions
"""
_attribute_map = {
'triggers': {'key': 'triggers', 'type': 'AutoHealTriggers'},
'actions': {'key': 'actions', 'type': 'AutoHealActions'},
}
def __init__(
self,
*,
triggers: Optional["AutoHealTriggers"] = None,
actions: Optional["AutoHealActions"] = None,
**kwargs
):
super(AutoHealRules, self).__init__(**kwargs)
self.triggers = triggers
self.actions = actions
class AutoHealTriggers(msrest.serialization.Model):
"""Triggers for auto-heal.
:param requests: A rule based on total requests.
:type requests: ~azure.mgmt.web.v2016_08_01.models.RequestsBasedTrigger
:param private_bytes_in_kb: A rule based on private bytes.
:type private_bytes_in_kb: int
:param status_codes: A rule based on status codes.
:type status_codes: list[~azure.mgmt.web.v2016_08_01.models.StatusCodesBasedTrigger]
:param slow_requests: A rule based on request execution time.
:type slow_requests: ~azure.mgmt.web.v2016_08_01.models.SlowRequestsBasedTrigger
"""
_attribute_map = {
'requests': {'key': 'requests', 'type': 'RequestsBasedTrigger'},
'private_bytes_in_kb': {'key': 'privateBytesInKB', 'type': 'int'},
'status_codes': {'key': 'statusCodes', 'type': '[StatusCodesBasedTrigger]'},
'slow_requests': {'key': 'slowRequests', 'type': 'SlowRequestsBasedTrigger'},
}
def __init__(
self,
*,
requests: Optional["RequestsBasedTrigger"] = None,
private_bytes_in_kb: Optional[int] = None,
status_codes: Optional[List["StatusCodesBasedTrigger"]] = None,
slow_requests: Optional["SlowRequestsBasedTrigger"] = None,
**kwargs
):
super(AutoHealTriggers, self).__init__(**kwargs)
self.requests = requests
self.private_bytes_in_kb = private_bytes_in_kb
self.status_codes = status_codes
self.slow_requests = slow_requests
class AzureBlobStorageApplicationLogsConfig(msrest.serialization.Model):
"""Application logs azure blob storage configuration.
:param level: Log level. Possible values include: "Off", "Verbose", "Information", "Warning",
"Error".
:type level: str or ~azure.mgmt.web.v2016_08_01.models.LogLevel
:param sas_url: SAS url to a azure blob container with read/write/list/delete permissions.
:type sas_url: str
:param retention_in_days: Retention in days.
Remove blobs older than X days.
0 or lower means no retention.
:type retention_in_days: int
"""
_attribute_map = {
'level': {'key': 'level', 'type': 'str'},
'sas_url': {'key': 'sasUrl', 'type': 'str'},
'retention_in_days': {'key': 'retentionInDays', 'type': 'int'},
}
def __init__(
self,
*,
level: Optional[Union[str, "LogLevel"]] = None,
sas_url: Optional[str] = None,
retention_in_days: Optional[int] = None,
**kwargs
):
super(AzureBlobStorageApplicationLogsConfig, self).__init__(**kwargs)
self.level = level
self.sas_url = sas_url
self.retention_in_days = retention_in_days
class AzureBlobStorageHttpLogsConfig(msrest.serialization.Model):
"""Http logs to azure blob storage configuration.
:param sas_url: SAS url to a azure blob container with read/write/list/delete permissions.
:type sas_url: str
:param retention_in_days: Retention in days.
Remove blobs older than X days.
0 or lower means no retention.
:type retention_in_days: int
:param enabled: True if configuration is enabled, false if it is disabled and null if
configuration is not set.
:type enabled: bool
"""
_attribute_map = {
'sas_url': {'key': 'sasUrl', 'type': 'str'},
'retention_in_days': {'key': 'retentionInDays', 'type': 'int'},
'enabled': {'key': 'enabled', 'type': 'bool'},
}
def __init__(
self,
*,
sas_url: Optional[str] = None,
retention_in_days: Optional[int] = None,
enabled: Optional[bool] = None,
**kwargs
):
super(AzureBlobStorageHttpLogsConfig, self).__init__(**kwargs)
self.sas_url = sas_url
self.retention_in_days = retention_in_days
self.enabled = enabled
class AzureTableStorageApplicationLogsConfig(msrest.serialization.Model):
"""Application logs to Azure table storage configuration.
All required parameters must be populated in order to send to Azure.
:param level: Log level. Possible values include: "Off", "Verbose", "Information", "Warning",
"Error".
:type level: str or ~azure.mgmt.web.v2016_08_01.models.LogLevel
:param sas_url: Required. SAS URL to an Azure table with add/query/delete permissions.
:type sas_url: str
"""
_validation = {
'sas_url': {'required': True},
}
_attribute_map = {
'level': {'key': 'level', 'type': 'str'},
'sas_url': {'key': 'sasUrl', 'type': 'str'},
}
def __init__(
self,
*,
sas_url: str,
level: Optional[Union[str, "LogLevel"]] = None,
**kwargs
):
super(AzureTableStorageApplicationLogsConfig, self).__init__(**kwargs)
self.level = level
self.sas_url = sas_url
class ProxyOnlyResource(msrest.serialization.Model):
"""Azure proxy only resource. This resource is not tracked by Azure Resource Manager.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
**kwargs
):
super(ProxyOnlyResource, self).__init__(**kwargs)
self.id = None
self.name = None
self.kind = kind
self.type = None
class BackupItem(ProxyOnlyResource):
"""Backup description.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar backup_id: Id of the backup.
:vartype backup_id: int
:ivar storage_account_url: SAS URL for the storage account container which contains this
backup.
:vartype storage_account_url: str
:ivar blob_name: Name of the blob which contains data for this backup.
:vartype blob_name: str
:ivar name_properties_name: Name of this backup.
:vartype name_properties_name: str
:ivar status: Backup status. Possible values include: "InProgress", "Failed", "Succeeded",
"TimedOut", "Created", "Skipped", "PartiallySucceeded", "DeleteInProgress", "DeleteFailed",
"Deleted".
:vartype status: str or ~azure.mgmt.web.v2016_08_01.models.BackupItemStatus
:ivar size_in_bytes: Size of the backup in bytes.
:vartype size_in_bytes: long
:ivar created: Timestamp of the backup creation.
:vartype created: ~datetime.datetime
:ivar log: Details regarding this backup. Might contain an error message.
:vartype log: str
:ivar databases: List of databases included in the backup.
:vartype databases: list[~azure.mgmt.web.v2016_08_01.models.DatabaseBackupSetting]
:ivar scheduled: True if this backup has been created due to a schedule being triggered.
:vartype scheduled: bool
:ivar last_restore_time_stamp: Timestamp of a last restore operation which used this backup.
:vartype last_restore_time_stamp: ~datetime.datetime
:ivar finished_time_stamp: Timestamp when this backup finished.
:vartype finished_time_stamp: ~datetime.datetime
:ivar correlation_id: Unique correlation identifier. Please use this along with the timestamp
while communicating with Azure support.
:vartype correlation_id: str
:ivar website_size_in_bytes: Size of the original web app which has been backed up.
:vartype website_size_in_bytes: long
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'backup_id': {'readonly': True},
'storage_account_url': {'readonly': True},
'blob_name': {'readonly': True},
'name_properties_name': {'readonly': True},
'status': {'readonly': True},
'size_in_bytes': {'readonly': True},
'created': {'readonly': True},
'log': {'readonly': True},
'databases': {'readonly': True},
'scheduled': {'readonly': True},
'last_restore_time_stamp': {'readonly': True},
'finished_time_stamp': {'readonly': True},
'correlation_id': {'readonly': True},
'website_size_in_bytes': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'backup_id': {'key': 'properties.id', 'type': 'int'},
'storage_account_url': {'key': 'properties.storageAccountUrl', 'type': 'str'},
'blob_name': {'key': 'properties.blobName', 'type': 'str'},
'name_properties_name': {'key': 'properties.name', 'type': 'str'},
'status': {'key': 'properties.status', 'type': 'str'},
'size_in_bytes': {'key': 'properties.sizeInBytes', 'type': 'long'},
'created': {'key': 'properties.created', 'type': 'iso-8601'},
'log': {'key': 'properties.log', 'type': 'str'},
'databases': {'key': 'properties.databases', 'type': '[DatabaseBackupSetting]'},
'scheduled': {'key': 'properties.scheduled', 'type': 'bool'},
'last_restore_time_stamp': {'key': 'properties.lastRestoreTimeStamp', 'type': 'iso-8601'},
'finished_time_stamp': {'key': 'properties.finishedTimeStamp', 'type': 'iso-8601'},
'correlation_id': {'key': 'properties.correlationId', 'type': 'str'},
'website_size_in_bytes': {'key': 'properties.websiteSizeInBytes', 'type': 'long'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
**kwargs
):
super(BackupItem, self).__init__(kind=kind, **kwargs)
self.backup_id = None
self.storage_account_url = None
self.blob_name = None
self.name_properties_name = None
self.status = None
self.size_in_bytes = None
self.created = None
self.log = None
self.databases = None
self.scheduled = None
self.last_restore_time_stamp = None
self.finished_time_stamp = None
self.correlation_id = None
self.website_size_in_bytes = None
class BackupItemCollection(msrest.serialization.Model):
"""Collection of backup items.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.BackupItem]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[BackupItem]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["BackupItem"],
**kwargs
):
super(BackupItemCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class BackupRequest(ProxyOnlyResource):
"""Description of a backup which will be performed.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param backup_request_name: Name of the backup.
:type backup_request_name: str
:param enabled: True if the backup schedule is enabled (must be included in that case), false
if the backup schedule should be disabled.
:type enabled: bool
:param storage_account_url: SAS URL to the container.
:type storage_account_url: str
:param backup_schedule: Schedule for the backup if it is executed periodically.
:type backup_schedule: ~azure.mgmt.web.v2016_08_01.models.BackupSchedule
:param databases: Databases included in the backup.
:type databases: list[~azure.mgmt.web.v2016_08_01.models.DatabaseBackupSetting]
:param type_properties_type: Type of the backup. Possible values include: "Default", "Clone",
"Relocation", "Snapshot".
:type type_properties_type: str or
~azure.mgmt.web.v2016_08_01.models.BackupRestoreOperationType
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'backup_request_name': {'key': 'properties.name', 'type': 'str'},
'enabled': {'key': 'properties.enabled', 'type': 'bool'},
'storage_account_url': {'key': 'properties.storageAccountUrl', 'type': 'str'},
'backup_schedule': {'key': 'properties.backupSchedule', 'type': 'BackupSchedule'},
'databases': {'key': 'properties.databases', 'type': '[DatabaseBackupSetting]'},
'type_properties_type': {'key': 'properties.type', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
backup_request_name: Optional[str] = None,
enabled: Optional[bool] = None,
storage_account_url: Optional[str] = None,
backup_schedule: Optional["BackupSchedule"] = None,
databases: Optional[List["DatabaseBackupSetting"]] = None,
type_properties_type: Optional[Union[str, "BackupRestoreOperationType"]] = None,
**kwargs
):
super(BackupRequest, self).__init__(kind=kind, **kwargs)
self.backup_request_name = backup_request_name
self.enabled = enabled
self.storage_account_url = storage_account_url
self.backup_schedule = backup_schedule
self.databases = databases
self.type_properties_type = type_properties_type
class BackupSchedule(msrest.serialization.Model):
"""Description of a backup schedule. Describes how often should be the backup performed and what should be the retention policy.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param frequency_interval: Required. How often the backup should be executed (e.g. for weekly
backup, this should be set to 7 and FrequencyUnit should be set to Day).
:type frequency_interval: int
:param frequency_unit: Required. The unit of time for how often the backup should be executed
(e.g. for weekly backup, this should be set to Day and FrequencyInterval should be set to 7).
Possible values include: "Day", "Hour". Default value: "Day".
:type frequency_unit: str or ~azure.mgmt.web.v2016_08_01.models.FrequencyUnit
:param keep_at_least_one_backup: Required. True if the retention policy should always keep at
least one backup in the storage account, regardless how old it is; false otherwise.
:type keep_at_least_one_backup: bool
:param retention_period_in_days: Required. After how many days backups should be deleted.
:type retention_period_in_days: int
:param start_time: When the schedule should start working.
:type start_time: ~datetime.datetime
:ivar last_execution_time: Last time when this schedule was triggered.
:vartype last_execution_time: ~datetime.datetime
"""
_validation = {
'frequency_interval': {'required': True},
'frequency_unit': {'required': True},
'keep_at_least_one_backup': {'required': True},
'retention_period_in_days': {'required': True},
'last_execution_time': {'readonly': True},
}
_attribute_map = {
'frequency_interval': {'key': 'frequencyInterval', 'type': 'int'},
'frequency_unit': {'key': 'frequencyUnit', 'type': 'str'},
'keep_at_least_one_backup': {'key': 'keepAtLeastOneBackup', 'type': 'bool'},
'retention_period_in_days': {'key': 'retentionPeriodInDays', 'type': 'int'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'last_execution_time': {'key': 'lastExecutionTime', 'type': 'iso-8601'},
}
def __init__(
self,
*,
frequency_interval: int = 7,
frequency_unit: Union[str, "FrequencyUnit"] = "Day",
keep_at_least_one_backup: bool = True,
retention_period_in_days: int = 30,
start_time: Optional[datetime.datetime] = None,
**kwargs
):
super(BackupSchedule, self).__init__(**kwargs)
self.frequency_interval = frequency_interval
self.frequency_unit = frequency_unit
self.keep_at_least_one_backup = keep_at_least_one_backup
self.retention_period_in_days = retention_period_in_days
self.start_time = start_time
self.last_execution_time = None
class CloningInfo(msrest.serialization.Model):
"""Information needed for cloning operation.
All required parameters must be populated in order to send to Azure.
:param correlation_id: Correlation ID of cloning operation. This ID ties multiple cloning
operations
together to use the same snapshot.
:type correlation_id: str
:param overwrite: :code:`<code>true</code>` to overwrite destination app; otherwise,
:code:`<code>false</code>`.
:type overwrite: bool
:param clone_custom_host_names: :code:`<code>true</code>` to clone custom hostnames from source
app; otherwise, :code:`<code>false</code>`.
:type clone_custom_host_names: bool
:param clone_source_control: :code:`<code>true</code>` to clone source control from source app;
otherwise, :code:`<code>false</code>`.
:type clone_source_control: bool
:param source_web_app_id: Required. ARM resource ID of the source app. App resource ID is of
the form
/subscriptions/{subId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}
for production slots and
/subscriptions/{subId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/slots/{slotName}
for other slots.
:type source_web_app_id: str
:param hosting_environment: App Service Environment.
:type hosting_environment: str
:param app_settings_overrides: Application setting overrides for cloned app. If specified,
these settings override the settings cloned
from source app. Otherwise, application settings from source app are retained.
:type app_settings_overrides: dict[str, str]
:param configure_load_balancing: :code:`<code>true</code>` to configure load balancing for
source and destination app.
:type configure_load_balancing: bool
:param traffic_manager_profile_id: ARM resource ID of the Traffic Manager profile to use, if it
exists. Traffic Manager resource ID is of the form
/subscriptions/{subId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/trafficManagerProfiles/{profileName}.
:type traffic_manager_profile_id: str
:param traffic_manager_profile_name: Name of Traffic Manager profile to create. This is only
needed if Traffic Manager profile does not already exist.
:type traffic_manager_profile_name: str
:param ignore_quotas: :code:`<code>true</code>` if quotas should be ignored; otherwise,
:code:`<code>false</code>`.
:type ignore_quotas: bool
"""
_validation = {
'source_web_app_id': {'required': True},
}
_attribute_map = {
'correlation_id': {'key': 'correlationId', 'type': 'str'},
'overwrite': {'key': 'overwrite', 'type': 'bool'},
'clone_custom_host_names': {'key': 'cloneCustomHostNames', 'type': 'bool'},
'clone_source_control': {'key': 'cloneSourceControl', 'type': 'bool'},
'source_web_app_id': {'key': 'sourceWebAppId', 'type': 'str'},
'hosting_environment': {'key': 'hostingEnvironment', 'type': 'str'},
'app_settings_overrides': {'key': 'appSettingsOverrides', 'type': '{str}'},
'configure_load_balancing': {'key': 'configureLoadBalancing', 'type': 'bool'},
'traffic_manager_profile_id': {'key': 'trafficManagerProfileId', 'type': 'str'},
'traffic_manager_profile_name': {'key': 'trafficManagerProfileName', 'type': 'str'},
'ignore_quotas': {'key': 'ignoreQuotas', 'type': 'bool'},
}
def __init__(
self,
*,
source_web_app_id: str,
correlation_id: Optional[str] = None,
overwrite: Optional[bool] = None,
clone_custom_host_names: Optional[bool] = None,
clone_source_control: Optional[bool] = None,
hosting_environment: Optional[str] = None,
app_settings_overrides: Optional[Dict[str, str]] = None,
configure_load_balancing: Optional[bool] = None,
traffic_manager_profile_id: Optional[str] = None,
traffic_manager_profile_name: Optional[str] = None,
ignore_quotas: Optional[bool] = None,
**kwargs
):
super(CloningInfo, self).__init__(**kwargs)
self.correlation_id = correlation_id
self.overwrite = overwrite
self.clone_custom_host_names = clone_custom_host_names
self.clone_source_control = clone_source_control
self.source_web_app_id = source_web_app_id
self.hosting_environment = hosting_environment
self.app_settings_overrides = app_settings_overrides
self.configure_load_balancing = configure_load_balancing
self.traffic_manager_profile_id = traffic_manager_profile_id
self.traffic_manager_profile_name = traffic_manager_profile_name
self.ignore_quotas = ignore_quotas
class ConnectionStringDictionary(ProxyOnlyResource):
"""String dictionary resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param properties: Connection strings.
:type properties: dict[str, ~azure.mgmt.web.v2016_08_01.models.ConnStringValueTypePair]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{ConnStringValueTypePair}'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
properties: Optional[Dict[str, "ConnStringValueTypePair"]] = None,
**kwargs
):
super(ConnectionStringDictionary, self).__init__(kind=kind, **kwargs)
self.properties = properties
class ConnStringInfo(msrest.serialization.Model):
"""Database connection string information.
:param name: Name of connection string.
:type name: str
:param connection_string: Connection string value.
:type connection_string: str
:param type: Type of database. Possible values include: "MySql", "SQLServer", "SQLAzure",
"Custom", "NotificationHub", "ServiceBus", "EventHub", "ApiHub", "DocDb", "RedisCache",
"PostgreSQL".
:type type: str or ~azure.mgmt.web.v2016_08_01.models.ConnectionStringType
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'connection_string': {'key': 'connectionString', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
connection_string: Optional[str] = None,
type: Optional[Union[str, "ConnectionStringType"]] = None,
**kwargs
):
super(ConnStringInfo, self).__init__(**kwargs)
self.name = name
self.connection_string = connection_string
self.type = type
class ConnStringValueTypePair(msrest.serialization.Model):
"""Database connection string value to type pair.
All required parameters must be populated in order to send to Azure.
:param value: Required. Value of pair.
:type value: str
:param type: Required. Type of database. Possible values include: "MySql", "SQLServer",
"SQLAzure", "Custom", "NotificationHub", "ServiceBus", "EventHub", "ApiHub", "DocDb",
"RedisCache", "PostgreSQL".
:type type: str or ~azure.mgmt.web.v2016_08_01.models.ConnectionStringType
"""
_validation = {
'value': {'required': True},
'type': {'required': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
*,
value: str,
type: Union[str, "ConnectionStringType"],
**kwargs
):
super(ConnStringValueTypePair, self).__init__(**kwargs)
self.value = value
self.type = type
class ContinuousWebJob(ProxyOnlyResource):
"""Continuous Web Job Information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param status: Job status. Possible values include: "Initializing", "Starting", "Running",
"PendingRestart", "Stopped".
:type status: str or ~azure.mgmt.web.v2016_08_01.models.ContinuousWebJobStatus
:param detailed_status: Detailed status.
:type detailed_status: str
:param log_url: Log URL.
:type log_url: str
:ivar name_properties_name: Job name. Used as job identifier in ARM resource URI.
:vartype name_properties_name: str
:param run_command: Run command.
:type run_command: str
:param url: Job URL.
:type url: str
:param extra_info_url: Extra Info URL.
:type extra_info_url: str
:param job_type: Job type. Possible values include: "Continuous", "Triggered".
:type job_type: str or ~azure.mgmt.web.v2016_08_01.models.WebJobType
:param error: Error information.
:type error: str
:param using_sdk: Using SDK?.
:type using_sdk: bool
:param settings: Job settings.
:type settings: dict[str, any]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'name_properties_name': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'status': {'key': 'properties.status', 'type': 'str'},
'detailed_status': {'key': 'properties.detailedStatus', 'type': 'str'},
'log_url': {'key': 'properties.logUrl', 'type': 'str'},
'name_properties_name': {'key': 'properties.name', 'type': 'str'},
'run_command': {'key': 'properties.runCommand', 'type': 'str'},
'url': {'key': 'properties.url', 'type': 'str'},
'extra_info_url': {'key': 'properties.extraInfoUrl', 'type': 'str'},
'job_type': {'key': 'properties.jobType', 'type': 'str'},
'error': {'key': 'properties.error', 'type': 'str'},
'using_sdk': {'key': 'properties.usingSdk', 'type': 'bool'},
'settings': {'key': 'properties.settings', 'type': '{object}'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
status: Optional[Union[str, "ContinuousWebJobStatus"]] = None,
detailed_status: Optional[str] = None,
log_url: Optional[str] = None,
run_command: Optional[str] = None,
url: Optional[str] = None,
extra_info_url: Optional[str] = None,
job_type: Optional[Union[str, "WebJobType"]] = None,
error: Optional[str] = None,
using_sdk: Optional[bool] = None,
settings: Optional[Dict[str, Any]] = None,
**kwargs
):
super(ContinuousWebJob, self).__init__(kind=kind, **kwargs)
self.status = status
self.detailed_status = detailed_status
self.log_url = log_url
self.name_properties_name = None
self.run_command = run_command
self.url = url
self.extra_info_url = extra_info_url
self.job_type = job_type
self.error = error
self.using_sdk = using_sdk
self.settings = settings
class ContinuousWebJobCollection(msrest.serialization.Model):
"""Collection of Kudu continuous web job information elements.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.ContinuousWebJob]
:param next_link: Link to next page of resources.
:type next_link: str
"""
_validation = {
'value': {'required': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ContinuousWebJob]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["ContinuousWebJob"],
next_link: Optional[str] = None,
**kwargs
):
super(ContinuousWebJobCollection, self).__init__(**kwargs)
self.value = value
self.next_link = next_link
class CorsSettings(msrest.serialization.Model):
"""Cross-Origin Resource Sharing (CORS) settings for the app.
:param allowed_origins: Gets or sets the list of origins that should be allowed to make
cross-origin
calls (for example: http://example.com:12345). Use "*" to allow all.
:type allowed_origins: list[str]
"""
_attribute_map = {
'allowed_origins': {'key': 'allowedOrigins', 'type': '[str]'},
}
def __init__(
self,
*,
allowed_origins: Optional[List[str]] = None,
**kwargs
):
super(CorsSettings, self).__init__(**kwargs)
self.allowed_origins = allowed_origins
class CsmPublishingProfileOptions(msrest.serialization.Model):
"""Publishing options for requested profile.
:param format: Name of the format. Valid values are:
FileZilla3
WebDeploy -- default
Ftp. Possible values include: "FileZilla3", "WebDeploy", "Ftp".
:type format: str or ~azure.mgmt.web.v2016_08_01.models.PublishingProfileFormat
"""
_attribute_map = {
'format': {'key': 'format', 'type': 'str'},
}
def __init__(
self,
*,
format: Optional[Union[str, "PublishingProfileFormat"]] = None,
**kwargs
):
super(CsmPublishingProfileOptions, self).__init__(**kwargs)
self.format = format
class CsmSlotEntity(msrest.serialization.Model):
"""Deployment slot parameters.
All required parameters must be populated in order to send to Azure.
:param target_slot: Required. Destination deployment slot during swap operation.
:type target_slot: str
:param preserve_vnet: Required. :code:`<code>true</code>` to preserve Virtual Network to the
slot during swap; otherwise, :code:`<code>false</code>`.
:type preserve_vnet: bool
"""
_validation = {
'target_slot': {'required': True},
'preserve_vnet': {'required': True},
}
_attribute_map = {
'target_slot': {'key': 'targetSlot', 'type': 'str'},
'preserve_vnet': {'key': 'preserveVnet', 'type': 'bool'},
}
def __init__(
self,
*,
target_slot: str,
preserve_vnet: bool,
**kwargs
):
super(CsmSlotEntity, self).__init__(**kwargs)
self.target_slot = target_slot
self.preserve_vnet = preserve_vnet
class CsmUsageQuota(msrest.serialization.Model):
"""Usage of the quota resource.
:param unit: Units of measurement for the quota resource.
:type unit: str
:param next_reset_time: Next reset time for the resource counter.
:type next_reset_time: ~datetime.datetime
:param current_value: The current value of the resource counter.
:type current_value: long
:param limit: The resource limit.
:type limit: long
:param name: Quota name.
:type name: ~azure.mgmt.web.v2016_08_01.models.LocalizableString
"""
_attribute_map = {
'unit': {'key': 'unit', 'type': 'str'},
'next_reset_time': {'key': 'nextResetTime', 'type': 'iso-8601'},
'current_value': {'key': 'currentValue', 'type': 'long'},
'limit': {'key': 'limit', 'type': 'long'},
'name': {'key': 'name', 'type': 'LocalizableString'},
}
def __init__(
self,
*,
unit: Optional[str] = None,
next_reset_time: Optional[datetime.datetime] = None,
current_value: Optional[int] = None,
limit: Optional[int] = None,
name: Optional["LocalizableString"] = None,
**kwargs
):
super(CsmUsageQuota, self).__init__(**kwargs)
self.unit = unit
self.next_reset_time = next_reset_time
self.current_value = current_value
self.limit = limit
self.name = name
class CsmUsageQuotaCollection(msrest.serialization.Model):
"""Collection of CSM usage quotas.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.CsmUsageQuota]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[CsmUsageQuota]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["CsmUsageQuota"],
**kwargs
):
super(CsmUsageQuotaCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class CustomHostnameAnalysisResult(ProxyOnlyResource):
"""Custom domain analysis.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar is_hostname_already_verified: :code:`<code>true</code>` if hostname is already verified;
otherwise, :code:`<code>false</code>`.
:vartype is_hostname_already_verified: bool
:ivar custom_domain_verification_test: DNS verification test result. Possible values include:
"Passed", "Failed", "Skipped".
:vartype custom_domain_verification_test: str or
~azure.mgmt.web.v2016_08_01.models.DnsVerificationTestResult
:ivar custom_domain_verification_failure_info: Raw failure information if DNS verification
fails.
:vartype custom_domain_verification_failure_info:
~azure.mgmt.web.v2016_08_01.models.ErrorEntity
:ivar has_conflict_on_scale_unit: :code:`<code>true</code>` if there is a conflict on a scale
unit; otherwise, :code:`<code>false</code>`.
:vartype has_conflict_on_scale_unit: bool
:ivar has_conflict_across_subscription: :code:`<code>true</code>` if there is a conflict across
subscriptions; otherwise, :code:`<code>false</code>`.
:vartype has_conflict_across_subscription: bool
:ivar conflicting_app_resource_id: Name of the conflicting app on scale unit if it's within the
same subscription.
:vartype conflicting_app_resource_id: str
:param c_name_records: CName records controller can see for this hostname.
:type c_name_records: list[str]
:param txt_records: TXT records controller can see for this hostname.
:type txt_records: list[str]
:param a_records: A records controller can see for this hostname.
:type a_records: list[str]
:param alternate_c_name_records: Alternate CName records controller can see for this hostname.
:type alternate_c_name_records: list[str]
:param alternate_txt_records: Alternate TXT records controller can see for this hostname.
:type alternate_txt_records: list[str]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'is_hostname_already_verified': {'readonly': True},
'custom_domain_verification_test': {'readonly': True},
'custom_domain_verification_failure_info': {'readonly': True},
'has_conflict_on_scale_unit': {'readonly': True},
'has_conflict_across_subscription': {'readonly': True},
'conflicting_app_resource_id': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'is_hostname_already_verified': {'key': 'properties.isHostnameAlreadyVerified', 'type': 'bool'},
'custom_domain_verification_test': {'key': 'properties.customDomainVerificationTest', 'type': 'str'},
'custom_domain_verification_failure_info': {'key': 'properties.customDomainVerificationFailureInfo', 'type': 'ErrorEntity'},
'has_conflict_on_scale_unit': {'key': 'properties.hasConflictOnScaleUnit', 'type': 'bool'},
'has_conflict_across_subscription': {'key': 'properties.hasConflictAcrossSubscription', 'type': 'bool'},
'conflicting_app_resource_id': {'key': 'properties.conflictingAppResourceId', 'type': 'str'},
'c_name_records': {'key': 'properties.cNameRecords', 'type': '[str]'},
'txt_records': {'key': 'properties.txtRecords', 'type': '[str]'},
'a_records': {'key': 'properties.aRecords', 'type': '[str]'},
'alternate_c_name_records': {'key': 'properties.alternateCNameRecords', 'type': '[str]'},
'alternate_txt_records': {'key': 'properties.alternateTxtRecords', 'type': '[str]'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
c_name_records: Optional[List[str]] = None,
txt_records: Optional[List[str]] = None,
a_records: Optional[List[str]] = None,
alternate_c_name_records: Optional[List[str]] = None,
alternate_txt_records: Optional[List[str]] = None,
**kwargs
):
super(CustomHostnameAnalysisResult, self).__init__(kind=kind, **kwargs)
self.is_hostname_already_verified = None
self.custom_domain_verification_test = None
self.custom_domain_verification_failure_info = None
self.has_conflict_on_scale_unit = None
self.has_conflict_across_subscription = None
self.conflicting_app_resource_id = None
self.c_name_records = c_name_records
self.txt_records = txt_records
self.a_records = a_records
self.alternate_c_name_records = alternate_c_name_records
self.alternate_txt_records = alternate_txt_records
class DatabaseBackupSetting(msrest.serialization.Model):
"""Database backup settings.
All required parameters must be populated in order to send to Azure.
:param database_type: Required. Database type (e.g. SqlAzure / MySql). Possible values include:
"SqlAzure", "MySql", "LocalMySql", "PostgreSql".
:type database_type: str or ~azure.mgmt.web.v2016_08_01.models.DatabaseType
:param name:
:type name: str
:param connection_string_name: Contains a connection string name that is linked to the
SiteConfig.ConnectionStrings.
This is used during restore with overwrite connection strings options.
:type connection_string_name: str
:param connection_string: Contains a connection string to a database which is being backed up
or restored. If the restore should happen to a new database, the database name inside is the
new one.
:type connection_string: str
"""
_validation = {
'database_type': {'required': True},
}
_attribute_map = {
'database_type': {'key': 'databaseType', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'connection_string_name': {'key': 'connectionStringName', 'type': 'str'},
'connection_string': {'key': 'connectionString', 'type': 'str'},
}
def __init__(
self,
*,
database_type: Union[str, "DatabaseType"],
name: Optional[str] = None,
connection_string_name: Optional[str] = None,
connection_string: Optional[str] = None,
**kwargs
):
super(DatabaseBackupSetting, self).__init__(**kwargs)
self.database_type = database_type
self.name = name
self.connection_string_name = connection_string_name
self.connection_string = connection_string
class Deployment(ProxyOnlyResource):
"""User credentials used for publishing activity.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param id_properties_id: Identifier for deployment.
:type id_properties_id: str
:param status: Deployment status.
:type status: int
:param message: Details about deployment status.
:type message: str
:param author: Who authored the deployment.
:type author: str
:param deployer: Who performed the deployment.
:type deployer: str
:param author_email: Author email.
:type author_email: str
:param start_time: Start time.
:type start_time: ~datetime.datetime
:param end_time: End time.
:type end_time: ~datetime.datetime
:param active: True if deployment is currently active, false if completed and null if not
started.
:type active: bool
:param details: Details on deployment.
:type details: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'id_properties_id': {'key': 'properties.id', 'type': 'str'},
'status': {'key': 'properties.status', 'type': 'int'},
'message': {'key': 'properties.message', 'type': 'str'},
'author': {'key': 'properties.author', 'type': 'str'},
'deployer': {'key': 'properties.deployer', 'type': 'str'},
'author_email': {'key': 'properties.authorEmail', 'type': 'str'},
'start_time': {'key': 'properties.startTime', 'type': 'iso-8601'},
'end_time': {'key': 'properties.endTime', 'type': 'iso-8601'},
'active': {'key': 'properties.active', 'type': 'bool'},
'details': {'key': 'properties.details', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
id_properties_id: Optional[str] = None,
status: Optional[int] = None,
message: Optional[str] = None,
author: Optional[str] = None,
deployer: Optional[str] = None,
author_email: Optional[str] = None,
start_time: Optional[datetime.datetime] = None,
end_time: Optional[datetime.datetime] = None,
active: Optional[bool] = None,
details: Optional[str] = None,
**kwargs
):
super(Deployment, self).__init__(kind=kind, **kwargs)
self.id_properties_id = id_properties_id
self.status = status
self.message = message
self.author = author
self.deployer = deployer
self.author_email = author_email
self.start_time = start_time
self.end_time = end_time
self.active = active
self.details = details
class DeploymentCollection(msrest.serialization.Model):
"""Collection of app deployments.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.Deployment]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[Deployment]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["Deployment"],
**kwargs
):
super(DeploymentCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class EnabledConfig(msrest.serialization.Model):
"""Enabled configuration.
:param enabled: True if configuration is enabled, false if it is disabled and null if
configuration is not set.
:type enabled: bool
"""
_attribute_map = {
'enabled': {'key': 'enabled', 'type': 'bool'},
}
def __init__(
self,
*,
enabled: Optional[bool] = None,
**kwargs
):
super(EnabledConfig, self).__init__(**kwargs)
self.enabled = enabled
class ErrorEntity(msrest.serialization.Model):
"""Body of the error response returned from the API.
:param extended_code: Type of error.
:type extended_code: str
:param message_template: Message template.
:type message_template: str
:param parameters: Parameters for the template.
:type parameters: list[str]
:param inner_errors: Inner errors.
:type inner_errors: list[~azure.mgmt.web.v2016_08_01.models.ErrorEntity]
:param code: Basic error code.
:type code: str
:param message: Any details of the error.
:type message: str
"""
_attribute_map = {
'extended_code': {'key': 'extendedCode', 'type': 'str'},
'message_template': {'key': 'messageTemplate', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '[str]'},
'inner_errors': {'key': 'innerErrors', 'type': '[ErrorEntity]'},
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(
self,
*,
extended_code: Optional[str] = None,
message_template: Optional[str] = None,
parameters: Optional[List[str]] = None,
inner_errors: Optional[List["ErrorEntity"]] = None,
code: Optional[str] = None,
message: Optional[str] = None,
**kwargs
):
super(ErrorEntity, self).__init__(**kwargs)
self.extended_code = extended_code
self.message_template = message_template
self.parameters = parameters
self.inner_errors = inner_errors
self.code = code
self.message = message
class Experiments(msrest.serialization.Model):
"""Routing rules in production experiments.
:param ramp_up_rules: List of ramp-up rules.
:type ramp_up_rules: list[~azure.mgmt.web.v2016_08_01.models.RampUpRule]
"""
_attribute_map = {
'ramp_up_rules': {'key': 'rampUpRules', 'type': '[RampUpRule]'},
}
def __init__(
self,
*,
ramp_up_rules: Optional[List["RampUpRule"]] = None,
**kwargs
):
super(Experiments, self).__init__(**kwargs)
self.ramp_up_rules = ramp_up_rules
class FileSystemApplicationLogsConfig(msrest.serialization.Model):
"""Application logs to file system configuration.
:param level: Log level. Possible values include: "Off", "Verbose", "Information", "Warning",
"Error".
:type level: str or ~azure.mgmt.web.v2016_08_01.models.LogLevel
"""
_attribute_map = {
'level': {'key': 'level', 'type': 'str'},
}
def __init__(
self,
*,
level: Optional[Union[str, "LogLevel"]] = None,
**kwargs
):
super(FileSystemApplicationLogsConfig, self).__init__(**kwargs)
self.level = level
class FileSystemHttpLogsConfig(msrest.serialization.Model):
"""Http logs to file system configuration.
:param retention_in_mb: Maximum size in megabytes that http log files can use.
When reached old log files will be removed to make space for new ones.
Value can range between 25 and 100.
:type retention_in_mb: int
:param retention_in_days: Retention in days.
Remove files older than X days.
0 or lower means no retention.
:type retention_in_days: int
:param enabled: True if configuration is enabled, false if it is disabled and null if
configuration is not set.
:type enabled: bool
"""
_validation = {
'retention_in_mb': {'maximum': 100, 'minimum': 25},
}
_attribute_map = {
'retention_in_mb': {'key': 'retentionInMb', 'type': 'int'},
'retention_in_days': {'key': 'retentionInDays', 'type': 'int'},
'enabled': {'key': 'enabled', 'type': 'bool'},
}
def __init__(
self,
*,
retention_in_mb: Optional[int] = None,
retention_in_days: Optional[int] = None,
enabled: Optional[bool] = None,
**kwargs
):
super(FileSystemHttpLogsConfig, self).__init__(**kwargs)
self.retention_in_mb = retention_in_mb
self.retention_in_days = retention_in_days
self.enabled = enabled
class FunctionEnvelope(ProxyOnlyResource):
"""Web Job Information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar name_properties_name: Function name.
:vartype name_properties_name: str
:ivar function_app_id: Function App ID.
:vartype function_app_id: str
:param script_root_path_href: Script root path URI.
:type script_root_path_href: str
:param script_href: Script URI.
:type script_href: str
:param config_href: Config URI.
:type config_href: str
:param secrets_file_href: Secrets file URI.
:type secrets_file_href: str
:param href: Function URI.
:type href: str
:param config: Config information.
:type config: any
:param files: File list.
:type files: dict[str, str]
:param test_data: Test data used when testing via the Azure Portal.
:type test_data: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'name_properties_name': {'readonly': True},
'function_app_id': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'name_properties_name': {'key': 'properties.name', 'type': 'str'},
'function_app_id': {'key': 'properties.functionAppId', 'type': 'str'},
'script_root_path_href': {'key': 'properties.scriptRootPathHref', 'type': 'str'},
'script_href': {'key': 'properties.scriptHref', 'type': 'str'},
'config_href': {'key': 'properties.configHref', 'type': 'str'},
'secrets_file_href': {'key': 'properties.secretsFileHref', 'type': 'str'},
'href': {'key': 'properties.href', 'type': 'str'},
'config': {'key': 'properties.config', 'type': 'object'},
'files': {'key': 'properties.files', 'type': '{str}'},
'test_data': {'key': 'properties.testData', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
script_root_path_href: Optional[str] = None,
script_href: Optional[str] = None,
config_href: Optional[str] = None,
secrets_file_href: Optional[str] = None,
href: Optional[str] = None,
config: Optional[Any] = None,
files: Optional[Dict[str, str]] = None,
test_data: Optional[str] = None,
**kwargs
):
super(FunctionEnvelope, self).__init__(kind=kind, **kwargs)
self.name_properties_name = None
self.function_app_id = None
self.script_root_path_href = script_root_path_href
self.script_href = script_href
self.config_href = config_href
self.secrets_file_href = secrets_file_href
self.href = href
self.config = config
self.files = files
self.test_data = test_data
class FunctionEnvelopeCollection(msrest.serialization.Model):
"""Collection of Kudu function information elements.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.FunctionEnvelope]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[FunctionEnvelope]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["FunctionEnvelope"],
**kwargs
):
super(FunctionEnvelopeCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class FunctionSecrets(ProxyOnlyResource):
"""Function secrets.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param key: Secret key.
:type key: str
:param trigger_url: Trigger URL.
:type trigger_url: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'key': {'key': 'properties.key', 'type': 'str'},
'trigger_url': {'key': 'properties.triggerUrl', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
key: Optional[str] = None,
trigger_url: Optional[str] = None,
**kwargs
):
super(FunctionSecrets, self).__init__(kind=kind, **kwargs)
self.key = key
self.trigger_url = trigger_url
class HandlerMapping(msrest.serialization.Model):
"""The IIS handler mappings used to define which handler processes HTTP requests with certain extension.
For example, it is used to configure php-cgi.exe process to handle all HTTP requests with *.php extension.
:param extension: Requests with this extension will be handled using the specified FastCGI
application.
:type extension: str
:param script_processor: The absolute path to the FastCGI application.
:type script_processor: str
:param arguments: Command-line arguments to be passed to the script processor.
:type arguments: str
"""
_attribute_map = {
'extension': {'key': 'extension', 'type': 'str'},
'script_processor': {'key': 'scriptProcessor', 'type': 'str'},
'arguments': {'key': 'arguments', 'type': 'str'},
}
def __init__(
self,
*,
extension: Optional[str] = None,
script_processor: Optional[str] = None,
arguments: Optional[str] = None,
**kwargs
):
super(HandlerMapping, self).__init__(**kwargs)
self.extension = extension
self.script_processor = script_processor
self.arguments = arguments
class HostingEnvironmentProfile(msrest.serialization.Model):
"""Specification for an App Service Environment to use for this resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID of the App Service Environment.
:type id: str
:ivar name: Name of the App Service Environment.
:vartype name: str
:ivar type: Resource type of the App Service Environment.
:vartype type: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(
self,
*,
id: Optional[str] = None,
**kwargs
):
super(HostingEnvironmentProfile, self).__init__(**kwargs)
self.id = id
self.name = None
self.type = None
class HostNameBinding(ProxyOnlyResource):
"""A hostname binding object.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param site_name: App Service app name.
:type site_name: str
:param domain_id: Fully qualified ARM domain resource URI.
:type domain_id: str
:param azure_resource_name: Azure resource name.
:type azure_resource_name: str
:param azure_resource_type: Azure resource type. Possible values include: "Website",
"TrafficManager".
:type azure_resource_type: str or ~azure.mgmt.web.v2016_08_01.models.AzureResourceType
:param custom_host_name_dns_record_type: Custom DNS record type. Possible values include:
"CName", "A".
:type custom_host_name_dns_record_type: str or
~azure.mgmt.web.v2016_08_01.models.CustomHostNameDnsRecordType
:param host_name_type: Hostname type. Possible values include: "Verified", "Managed".
:type host_name_type: str or ~azure.mgmt.web.v2016_08_01.models.HostNameType
:param ssl_state: SSL type. Possible values include: "Disabled", "SniEnabled",
"IpBasedEnabled".
:type ssl_state: str or ~azure.mgmt.web.v2016_08_01.models.SslState
:param thumbprint: SSL certificate thumbprint.
:type thumbprint: str
:ivar virtual_ip: Virtual IP address assigned to the hostname if IP based SSL is enabled.
:vartype virtual_ip: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'virtual_ip': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'site_name': {'key': 'properties.siteName', 'type': 'str'},
'domain_id': {'key': 'properties.domainId', 'type': 'str'},
'azure_resource_name': {'key': 'properties.azureResourceName', 'type': 'str'},
'azure_resource_type': {'key': 'properties.azureResourceType', 'type': 'str'},
'custom_host_name_dns_record_type': {'key': 'properties.customHostNameDnsRecordType', 'type': 'str'},
'host_name_type': {'key': 'properties.hostNameType', 'type': 'str'},
'ssl_state': {'key': 'properties.sslState', 'type': 'str'},
'thumbprint': {'key': 'properties.thumbprint', 'type': 'str'},
'virtual_ip': {'key': 'properties.virtualIP', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
site_name: Optional[str] = None,
domain_id: Optional[str] = None,
azure_resource_name: Optional[str] = None,
azure_resource_type: Optional[Union[str, "AzureResourceType"]] = None,
custom_host_name_dns_record_type: Optional[Union[str, "CustomHostNameDnsRecordType"]] = None,
host_name_type: Optional[Union[str, "HostNameType"]] = None,
ssl_state: Optional[Union[str, "SslState"]] = None,
thumbprint: Optional[str] = None,
**kwargs
):
super(HostNameBinding, self).__init__(kind=kind, **kwargs)
self.site_name = site_name
self.domain_id = domain_id
self.azure_resource_name = azure_resource_name
self.azure_resource_type = azure_resource_type
self.custom_host_name_dns_record_type = custom_host_name_dns_record_type
self.host_name_type = host_name_type
self.ssl_state = ssl_state
self.thumbprint = thumbprint
self.virtual_ip = None
class HostNameBindingCollection(msrest.serialization.Model):
"""Collection of hostname bindings.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.HostNameBinding]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[HostNameBinding]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["HostNameBinding"],
**kwargs
):
super(HostNameBindingCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class HostNameSslState(msrest.serialization.Model):
"""SSL-enabled hostname.
:param name: Hostname.
:type name: str
:param ssl_state: SSL type. Possible values include: "Disabled", "SniEnabled",
"IpBasedEnabled".
:type ssl_state: str or ~azure.mgmt.web.v2016_08_01.models.SslState
:param virtual_ip: Virtual IP address assigned to the hostname if IP based SSL is enabled.
:type virtual_ip: str
:param thumbprint: SSL certificate thumbprint.
:type thumbprint: str
:param to_update: Set to :code:`<code>true</code>` to update existing hostname.
:type to_update: bool
:param host_type: Indicates whether the hostname is a standard or repository hostname. Possible
values include: "Standard", "Repository".
:type host_type: str or ~azure.mgmt.web.v2016_08_01.models.HostType
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'ssl_state': {'key': 'sslState', 'type': 'str'},
'virtual_ip': {'key': 'virtualIP', 'type': 'str'},
'thumbprint': {'key': 'thumbprint', 'type': 'str'},
'to_update': {'key': 'toUpdate', 'type': 'bool'},
'host_type': {'key': 'hostType', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
ssl_state: Optional[Union[str, "SslState"]] = None,
virtual_ip: Optional[str] = None,
thumbprint: Optional[str] = None,
to_update: Optional[bool] = None,
host_type: Optional[Union[str, "HostType"]] = None,
**kwargs
):
super(HostNameSslState, self).__init__(**kwargs)
self.name = name
self.ssl_state = ssl_state
self.virtual_ip = virtual_ip
self.thumbprint = thumbprint
self.to_update = to_update
self.host_type = host_type
class HttpLogsConfig(msrest.serialization.Model):
"""Http logs configuration.
:param file_system: Http logs to file system configuration.
:type file_system: ~azure.mgmt.web.v2016_08_01.models.FileSystemHttpLogsConfig
:param azure_blob_storage: Http logs to azure blob storage configuration.
:type azure_blob_storage: ~azure.mgmt.web.v2016_08_01.models.AzureBlobStorageHttpLogsConfig
"""
_attribute_map = {
'file_system': {'key': 'fileSystem', 'type': 'FileSystemHttpLogsConfig'},
'azure_blob_storage': {'key': 'azureBlobStorage', 'type': 'AzureBlobStorageHttpLogsConfig'},
}
def __init__(
self,
*,
file_system: Optional["FileSystemHttpLogsConfig"] = None,
azure_blob_storage: Optional["AzureBlobStorageHttpLogsConfig"] = None,
**kwargs
):
super(HttpLogsConfig, self).__init__(**kwargs)
self.file_system = file_system
self.azure_blob_storage = azure_blob_storage
class HybridConnection(ProxyOnlyResource):
"""Hybrid Connection contract. This is used to configure a Hybrid Connection.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param service_bus_namespace: The name of the Service Bus namespace.
:type service_bus_namespace: str
:param relay_name: The name of the Service Bus relay.
:type relay_name: str
:param relay_arm_uri: The ARM URI to the Service Bus relay.
:type relay_arm_uri: str
:param hostname: The hostname of the endpoint.
:type hostname: str
:param port: The port of the endpoint.
:type port: int
:param send_key_name: The name of the Service Bus key which has Send permissions. This is used
to authenticate to Service Bus.
:type send_key_name: str
:param send_key_value: The value of the Service Bus key. This is used to authenticate to
Service Bus. In ARM this key will not be returned
normally, use the POST /listKeys API instead.
:type send_key_value: str
:param service_bus_suffix: The suffix for the service bus endpoint. By default this is
.servicebus.windows.net.
:type service_bus_suffix: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'service_bus_namespace': {'key': 'properties.serviceBusNamespace', 'type': 'str'},
'relay_name': {'key': 'properties.relayName', 'type': 'str'},
'relay_arm_uri': {'key': 'properties.relayArmUri', 'type': 'str'},
'hostname': {'key': 'properties.hostname', 'type': 'str'},
'port': {'key': 'properties.port', 'type': 'int'},
'send_key_name': {'key': 'properties.sendKeyName', 'type': 'str'},
'send_key_value': {'key': 'properties.sendKeyValue', 'type': 'str'},
'service_bus_suffix': {'key': 'properties.serviceBusSuffix', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
service_bus_namespace: Optional[str] = None,
relay_name: Optional[str] = None,
relay_arm_uri: Optional[str] = None,
hostname: Optional[str] = None,
port: Optional[int] = None,
send_key_name: Optional[str] = None,
send_key_value: Optional[str] = None,
service_bus_suffix: Optional[str] = None,
**kwargs
):
super(HybridConnection, self).__init__(kind=kind, **kwargs)
self.service_bus_namespace = service_bus_namespace
self.relay_name = relay_name
self.relay_arm_uri = relay_arm_uri
self.hostname = hostname
self.port = port
self.send_key_name = send_key_name
self.send_key_value = send_key_value
self.service_bus_suffix = service_bus_suffix
class HybridConnectionKey(ProxyOnlyResource):
"""Hybrid Connection key contract. This has the send key name and value for a Hybrid Connection.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar send_key_name: The name of the send key.
:vartype send_key_name: str
:ivar send_key_value: The value of the send key.
:vartype send_key_value: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'send_key_name': {'readonly': True},
'send_key_value': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'send_key_name': {'key': 'properties.sendKeyName', 'type': 'str'},
'send_key_value': {'key': 'properties.sendKeyValue', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
**kwargs
):
super(HybridConnectionKey, self).__init__(kind=kind, **kwargs)
self.send_key_name = None
self.send_key_value = None
class Identifier(ProxyOnlyResource):
"""A domain specific resource identifier.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param id_properties_id: String representation of the identity.
:type id_properties_id: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'id_properties_id': {'key': 'properties.id', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
id_properties_id: Optional[str] = None,
**kwargs
):
super(Identifier, self).__init__(kind=kind, **kwargs)
self.id_properties_id = id_properties_id
class IdentifierCollection(msrest.serialization.Model):
"""Collection of identifiers.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.Identifier]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[Identifier]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["Identifier"],
**kwargs
):
super(IdentifierCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class IpSecurityRestriction(msrest.serialization.Model):
"""IP security restriction on an app.
All required parameters must be populated in order to send to Azure.
:param ip_address: Required. IP address the security restriction is valid for.
:type ip_address: str
:param subnet_mask: Subnet mask for the range of IP addresses the restriction is valid for.
:type subnet_mask: str
"""
_validation = {
'ip_address': {'required': True},
}
_attribute_map = {
'ip_address': {'key': 'ipAddress', 'type': 'str'},
'subnet_mask': {'key': 'subnetMask', 'type': 'str'},
}
def __init__(
self,
*,
ip_address: str,
subnet_mask: Optional[str] = None,
**kwargs
):
super(IpSecurityRestriction, self).__init__(**kwargs)
self.ip_address = ip_address
self.subnet_mask = subnet_mask
class LocalizableString(msrest.serialization.Model):
"""Localizable string object containing the name and a localized value.
:param value: Non-localized name.
:type value: str
:param localized_value: Localized name.
:type localized_value: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': 'str'},
'localized_value': {'key': 'localizedValue', 'type': 'str'},
}
def __init__(
self,
*,
value: Optional[str] = None,
localized_value: Optional[str] = None,
**kwargs
):
super(LocalizableString, self).__init__(**kwargs)
self.value = value
self.localized_value = localized_value
class ManagedServiceIdentity(msrest.serialization.Model):
"""Managed service identity.
Variables are only populated by the server, and will be ignored when sending a request.
:param type: Type of managed service identity. Possible values include: "SystemAssigned".
:type type: str or ~azure.mgmt.web.v2016_08_01.models.ManagedServiceIdentityType
:ivar tenant_id: Tenant of managed service identity.
:vartype tenant_id: str
:ivar principal_id: Principal Id of managed service identity.
:vartype principal_id: str
"""
_validation = {
'tenant_id': {'readonly': True},
'principal_id': {'readonly': True},
}
_attribute_map = {
'type': {'key': 'type', 'type': 'str'},
'tenant_id': {'key': 'tenantId', 'type': 'str'},
'principal_id': {'key': 'principalId', 'type': 'str'},
}
def __init__(
self,
*,
type: Optional[Union[str, "ManagedServiceIdentityType"]] = None,
**kwargs
):
super(ManagedServiceIdentity, self).__init__(**kwargs)
self.type = type
self.tenant_id = None
self.principal_id = None
class MigrateMySqlRequest(ProxyOnlyResource):
"""MySQL migration request.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param connection_string: Connection string to the remote MySQL database.
:type connection_string: str
:param migration_type: The type of migration operation to be done. Possible values include:
"LocalToRemote", "RemoteToLocal".
:type migration_type: str or ~azure.mgmt.web.v2016_08_01.models.MySqlMigrationType
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'connection_string': {'key': 'properties.connectionString', 'type': 'str'},
'migration_type': {'key': 'properties.migrationType', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
connection_string: Optional[str] = None,
migration_type: Optional[Union[str, "MySqlMigrationType"]] = None,
**kwargs
):
super(MigrateMySqlRequest, self).__init__(kind=kind, **kwargs)
self.connection_string = connection_string
self.migration_type = migration_type
class MigrateMySqlStatus(ProxyOnlyResource):
"""MySQL migration status.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar migration_operation_status: Status of the migration task. Possible values include:
"InProgress", "Failed", "Succeeded", "TimedOut", "Created".
:vartype migration_operation_status: str or ~azure.mgmt.web.v2016_08_01.models.OperationStatus
:ivar operation_id: Operation ID for the migration task.
:vartype operation_id: str
:ivar local_my_sql_enabled: True if the web app has in app MySql enabled.
:vartype local_my_sql_enabled: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'migration_operation_status': {'readonly': True},
'operation_id': {'readonly': True},
'local_my_sql_enabled': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'migration_operation_status': {'key': 'properties.migrationOperationStatus', 'type': 'str'},
'operation_id': {'key': 'properties.operationId', 'type': 'str'},
'local_my_sql_enabled': {'key': 'properties.localMySqlEnabled', 'type': 'bool'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
**kwargs
):
super(MigrateMySqlStatus, self).__init__(kind=kind, **kwargs)
self.migration_operation_status = None
self.operation_id = None
self.local_my_sql_enabled = None
class MSDeploy(ProxyOnlyResource):
"""MSDeploy ARM PUT information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param package_uri: Package URI.
:type package_uri: str
:param connection_string: SQL Connection String.
:type connection_string: str
:param db_type: Database Type.
:type db_type: str
:param set_parameters_xml_file_uri: URI of MSDeploy Parameters file. Must not be set if
SetParameters is used.
:type set_parameters_xml_file_uri: str
:param set_parameters: MSDeploy Parameters. Must not be set if SetParametersXmlFileUri is used.
:type set_parameters: dict[str, str]
:param skip_app_data: Controls whether the MSDeploy operation skips the App_Data directory.
If set to :code:`<code>true</code>`, the existing App_Data directory on the destination
will not be deleted, and any App_Data directory in the source will be ignored.
Setting is :code:`<code>false</code>` by default.
:type skip_app_data: bool
:param app_offline: Sets the AppOffline rule while the MSDeploy operation executes.
Setting is :code:`<code>false</code>` by default.
:type app_offline: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'package_uri': {'key': 'properties.packageUri', 'type': 'str'},
'connection_string': {'key': 'properties.connectionString', 'type': 'str'},
'db_type': {'key': 'properties.dbType', 'type': 'str'},
'set_parameters_xml_file_uri': {'key': 'properties.setParametersXmlFileUri', 'type': 'str'},
'set_parameters': {'key': 'properties.setParameters', 'type': '{str}'},
'skip_app_data': {'key': 'properties.skipAppData', 'type': 'bool'},
'app_offline': {'key': 'properties.appOffline', 'type': 'bool'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
package_uri: Optional[str] = None,
connection_string: Optional[str] = None,
db_type: Optional[str] = None,
set_parameters_xml_file_uri: Optional[str] = None,
set_parameters: Optional[Dict[str, str]] = None,
skip_app_data: Optional[bool] = None,
app_offline: Optional[bool] = None,
**kwargs
):
super(MSDeploy, self).__init__(kind=kind, **kwargs)
self.package_uri = package_uri
self.connection_string = connection_string
self.db_type = db_type
self.set_parameters_xml_file_uri = set_parameters_xml_file_uri
self.set_parameters = set_parameters
self.skip_app_data = skip_app_data
self.app_offline = app_offline
class MSDeployLog(ProxyOnlyResource):
"""MSDeploy log.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar entries: List of log entry messages.
:vartype entries: list[~azure.mgmt.web.v2016_08_01.models.MSDeployLogEntry]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'entries': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'entries': {'key': 'properties.entries', 'type': '[MSDeployLogEntry]'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
**kwargs
):
super(MSDeployLog, self).__init__(kind=kind, **kwargs)
self.entries = None
class MSDeployLogEntry(msrest.serialization.Model):
"""MSDeploy log entry.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar time: Timestamp of log entry.
:vartype time: ~datetime.datetime
:ivar type: Log entry type. Possible values include: "Message", "Warning", "Error".
:vartype type: str or ~azure.mgmt.web.v2016_08_01.models.MSDeployLogEntryType
:ivar message: Log entry message.
:vartype message: str
"""
_validation = {
'time': {'readonly': True},
'type': {'readonly': True},
'message': {'readonly': True},
}
_attribute_map = {
'time': {'key': 'time', 'type': 'iso-8601'},
'type': {'key': 'type', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(MSDeployLogEntry, self).__init__(**kwargs)
self.time = None
self.type = None
self.message = None
class MSDeployStatus(ProxyOnlyResource):
"""MSDeploy ARM response.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar deployer: Username of deployer.
:vartype deployer: str
:ivar provisioning_state: Provisioning state. Possible values include: "accepted", "running",
"succeeded", "failed", "canceled".
:vartype provisioning_state: str or
~azure.mgmt.web.v2016_08_01.models.MSDeployProvisioningState
:ivar start_time: Start time of deploy operation.
:vartype start_time: ~datetime.datetime
:ivar end_time: End time of deploy operation.
:vartype end_time: ~datetime.datetime
:ivar complete: Whether the deployment operation has completed.
:vartype complete: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'deployer': {'readonly': True},
'provisioning_state': {'readonly': True},
'start_time': {'readonly': True},
'end_time': {'readonly': True},
'complete': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'deployer': {'key': 'properties.deployer', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'start_time': {'key': 'properties.startTime', 'type': 'iso-8601'},
'end_time': {'key': 'properties.endTime', 'type': 'iso-8601'},
'complete': {'key': 'properties.complete', 'type': 'bool'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
**kwargs
):
super(MSDeployStatus, self).__init__(kind=kind, **kwargs)
self.deployer = None
self.provisioning_state = None
self.start_time = None
self.end_time = None
self.complete = None
class NameValuePair(msrest.serialization.Model):
"""Name value pair.
:param name: Pair name.
:type name: str
:param value: Pair value.
:type value: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
value: Optional[str] = None,
**kwargs
):
super(NameValuePair, self).__init__(**kwargs)
self.name = name
self.value = value
class NetworkFeatures(ProxyOnlyResource):
"""Full view of network features for an app (presently VNET integration and Hybrid Connections).
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar virtual_network_name: The Virtual Network name.
:vartype virtual_network_name: str
:ivar virtual_network_connection: The Virtual Network summary view.
:vartype virtual_network_connection: ~azure.mgmt.web.v2016_08_01.models.VnetInfo
:ivar hybrid_connections: The Hybrid Connections summary view.
:vartype hybrid_connections:
list[~azure.mgmt.web.v2016_08_01.models.RelayServiceConnectionEntity]
:ivar hybrid_connections_v2: The Hybrid Connection V2 (Service Bus) view.
:vartype hybrid_connections_v2: list[~azure.mgmt.web.v2016_08_01.models.HybridConnection]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'virtual_network_name': {'readonly': True},
'virtual_network_connection': {'readonly': True},
'hybrid_connections': {'readonly': True},
'hybrid_connections_v2': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'virtual_network_name': {'key': 'properties.virtualNetworkName', 'type': 'str'},
'virtual_network_connection': {'key': 'properties.virtualNetworkConnection', 'type': 'VnetInfo'},
'hybrid_connections': {'key': 'properties.hybridConnections', 'type': '[RelayServiceConnectionEntity]'},
'hybrid_connections_v2': {'key': 'properties.hybridConnectionsV2', 'type': '[HybridConnection]'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
**kwargs
):
super(NetworkFeatures, self).__init__(kind=kind, **kwargs)
self.virtual_network_name = None
self.virtual_network_connection = None
self.hybrid_connections = None
self.hybrid_connections_v2 = None
class Operation(msrest.serialization.Model):
"""An operation on a resource.
:param id: Operation ID.
:type id: str
:param name: Operation name.
:type name: str
:param status: The current status of the operation. Possible values include: "InProgress",
"Failed", "Succeeded", "TimedOut", "Created".
:type status: str or ~azure.mgmt.web.v2016_08_01.models.OperationStatus
:param errors: Any errors associate with the operation.
:type errors: list[~azure.mgmt.web.v2016_08_01.models.ErrorEntity]
:param created_time: Time when operation has started.
:type created_time: ~datetime.datetime
:param modified_time: Time when operation has been updated.
:type modified_time: ~datetime.datetime
:param expiration_time: Time when operation will expire.
:type expiration_time: ~datetime.datetime
:param geo_master_operation_id: Applicable only for stamp operation ids.
:type geo_master_operation_id: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'status': {'key': 'status', 'type': 'str'},
'errors': {'key': 'errors', 'type': '[ErrorEntity]'},
'created_time': {'key': 'createdTime', 'type': 'iso-8601'},
'modified_time': {'key': 'modifiedTime', 'type': 'iso-8601'},
'expiration_time': {'key': 'expirationTime', 'type': 'iso-8601'},
'geo_master_operation_id': {'key': 'geoMasterOperationId', 'type': 'str'},
}
def __init__(
self,
*,
id: Optional[str] = None,
name: Optional[str] = None,
status: Optional[Union[str, "OperationStatus"]] = None,
errors: Optional[List["ErrorEntity"]] = None,
created_time: Optional[datetime.datetime] = None,
modified_time: Optional[datetime.datetime] = None,
expiration_time: Optional[datetime.datetime] = None,
geo_master_operation_id: Optional[str] = None,
**kwargs
):
super(Operation, self).__init__(**kwargs)
self.id = id
self.name = name
self.status = status
self.errors = errors
self.created_time = created_time
self.modified_time = modified_time
self.expiration_time = expiration_time
self.geo_master_operation_id = geo_master_operation_id
class PerfMonCounterCollection(msrest.serialization.Model):
"""Collection of performance monitor counters.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.PerfMonResponse]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[PerfMonResponse]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["PerfMonResponse"],
**kwargs
):
super(PerfMonCounterCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class PerfMonResponse(msrest.serialization.Model):
"""Performance monitor API response.
:param code: The response code.
:type code: str
:param message: The message.
:type message: str
:param data: The performance monitor counters.
:type data: ~azure.mgmt.web.v2016_08_01.models.PerfMonSet
"""
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'data': {'key': 'data', 'type': 'PerfMonSet'},
}
def __init__(
self,
*,
code: Optional[str] = None,
message: Optional[str] = None,
data: Optional["PerfMonSet"] = None,
**kwargs
):
super(PerfMonResponse, self).__init__(**kwargs)
self.code = code
self.message = message
self.data = data
class PerfMonSample(msrest.serialization.Model):
"""Performance monitor sample in a set.
:param time: Point in time for which counter was measured.
:type time: ~datetime.datetime
:param instance_name: Name of the server on which the measurement is made.
:type instance_name: str
:param value: Value of counter at a certain time.
:type value: float
:param core_count: Core Count of worker. Not a data member.
:type core_count: int
"""
_attribute_map = {
'time': {'key': 'time', 'type': 'iso-8601'},
'instance_name': {'key': 'instanceName', 'type': 'str'},
'value': {'key': 'value', 'type': 'float'},
'core_count': {'key': 'coreCount', 'type': 'int'},
}
def __init__(
self,
*,
time: Optional[datetime.datetime] = None,
instance_name: Optional[str] = None,
value: Optional[float] = None,
core_count: Optional[int] = None,
**kwargs
):
super(PerfMonSample, self).__init__(**kwargs)
self.time = time
self.instance_name = instance_name
self.value = value
self.core_count = core_count
class PerfMonSet(msrest.serialization.Model):
"""Metric information.
:param name: Unique key name of the counter.
:type name: str
:param start_time: Start time of the period.
:type start_time: ~datetime.datetime
:param end_time: End time of the period.
:type end_time: ~datetime.datetime
:param time_grain: Presented time grain.
:type time_grain: str
:param values: Collection of workers that are active during this time.
:type values: list[~azure.mgmt.web.v2016_08_01.models.PerfMonSample]
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'time_grain': {'key': 'timeGrain', 'type': 'str'},
'values': {'key': 'values', 'type': '[PerfMonSample]'},
}
def __init__(
self,
*,
name: Optional[str] = None,
start_time: Optional[datetime.datetime] = None,
end_time: Optional[datetime.datetime] = None,
time_grain: Optional[str] = None,
values: Optional[List["PerfMonSample"]] = None,
**kwargs
):
super(PerfMonSet, self).__init__(**kwargs)
self.name = name
self.start_time = start_time
self.end_time = end_time
self.time_grain = time_grain
self.values = values
class Resource(msrest.serialization.Model):
"""Azure resource. This resource is tracked in Azure Resource Manager.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:param location: Required. Resource Location.
:type location: str
:ivar type: Resource type.
:vartype type: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'location': {'required': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
}
def __init__(
self,
*,
location: str,
kind: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
**kwargs
):
super(Resource, self).__init__(**kwargs)
self.id = None
self.name = None
self.kind = kind
self.location = location
self.type = None
self.tags = tags
class PremierAddOn(Resource):
"""Premier add-on.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:param location: Required. Resource Location.
:type location: str
:ivar type: Resource type.
:vartype type: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param sku: Premier add on SKU.
:type sku: str
:param product: Premier add on Product.
:type product: str
:param vendor: Premier add on Vendor.
:type vendor: str
:param premier_add_on_name: Premier add on Name.
:type premier_add_on_name: str
:param location_properties_location: Premier add on Location.
:type location_properties_location: str
:param tags_properties_tags: Premier add on Tags.
:type tags_properties_tags: dict[str, str]
:param marketplace_publisher: Premier add on Marketplace publisher.
:type marketplace_publisher: str
:param marketplace_offer: Premier add on Marketplace offer.
:type marketplace_offer: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'location': {'required': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'sku': {'key': 'properties.sku', 'type': 'str'},
'product': {'key': 'properties.product', 'type': 'str'},
'vendor': {'key': 'properties.vendor', 'type': 'str'},
'premier_add_on_name': {'key': 'properties.name', 'type': 'str'},
'location_properties_location': {'key': 'properties.location', 'type': 'str'},
'tags_properties_tags': {'key': 'properties.tags', 'type': '{str}'},
'marketplace_publisher': {'key': 'properties.marketplacePublisher', 'type': 'str'},
'marketplace_offer': {'key': 'properties.marketplaceOffer', 'type': 'str'},
}
def __init__(
self,
*,
location: str,
kind: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
sku: Optional[str] = None,
product: Optional[str] = None,
vendor: Optional[str] = None,
premier_add_on_name: Optional[str] = None,
location_properties_location: Optional[str] = None,
tags_properties_tags: Optional[Dict[str, str]] = None,
marketplace_publisher: Optional[str] = None,
marketplace_offer: Optional[str] = None,
**kwargs
):
super(PremierAddOn, self).__init__(kind=kind, location=location, tags=tags, **kwargs)
self.sku = sku
self.product = product
self.vendor = vendor
self.premier_add_on_name = premier_add_on_name
self.location_properties_location = location_properties_location
self.tags_properties_tags = tags_properties_tags
self.marketplace_publisher = marketplace_publisher
self.marketplace_offer = marketplace_offer
class ProcessInfo(ProxyOnlyResource):
"""Process Information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param id_properties_id: ARM Identifier for deployment.
:type id_properties_id: int
:param name_properties_name: Deployment name.
:type name_properties_name: str
:param href: HRef URI.
:type href: str
:param mini_dump: Minidump URI.
:type mini_dump: str
:param is_profile_running: Is profile running?.
:type is_profile_running: bool
:param is_iis_profile_running: Is the IIS Profile running?.
:type is_iis_profile_running: bool
:param iis_profile_timeout_in_seconds: IIS Profile timeout (seconds).
:type iis_profile_timeout_in_seconds: float
:param parent: Parent process.
:type parent: str
:param children: Child process list.
:type children: list[str]
:param threads: Thread list.
:type threads: list[~azure.mgmt.web.v2016_08_01.models.ProcessThreadInfo]
:param open_file_handles: List of open files.
:type open_file_handles: list[str]
:param modules: List of modules.
:type modules: list[~azure.mgmt.web.v2016_08_01.models.ProcessModuleInfo]
:param file_name: File name of this process.
:type file_name: str
:param command_line: Command line.
:type command_line: str
:param user_name: User name.
:type user_name: str
:param handle_count: Handle count.
:type handle_count: int
:param module_count: Module count.
:type module_count: int
:param thread_count: Thread count.
:type thread_count: int
:param start_time: Start time.
:type start_time: ~datetime.datetime
:param total_processor_time: Total CPU time.
:type total_processor_time: str
:param user_processor_time: User CPU time.
:type user_processor_time: str
:param privileged_processor_time: Privileged CPU time.
:type privileged_processor_time: str
:param working_set64: Working set.
:type working_set64: long
:param peak_working_set64: Peak working set.
:type peak_working_set64: long
:param private_memory_size64: Private memory size.
:type private_memory_size64: long
:param virtual_memory_size64: Virtual memory size.
:type virtual_memory_size64: long
:param peak_virtual_memory_size64: Peak virtual memory usage.
:type peak_virtual_memory_size64: long
:param paged_system_memory_size64: Paged system memory.
:type paged_system_memory_size64: long
:param nonpaged_system_memory_size64: Non-paged system memory.
:type nonpaged_system_memory_size64: long
:param paged_memory_size64: Paged memory.
:type paged_memory_size64: long
:param peak_paged_memory_size64: Peak paged memory.
:type peak_paged_memory_size64: long
:param time_stamp: Time stamp.
:type time_stamp: ~datetime.datetime
:param environment_variables: List of environment variables.
:type environment_variables: dict[str, str]
:param is_scm_site: Is this the SCM site?.
:type is_scm_site: bool
:param is_web_job: Is this a Web Job?.
:type is_web_job: bool
:param description: Description of process.
:type description: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'id_properties_id': {'key': 'properties.id', 'type': 'int'},
'name_properties_name': {'key': 'properties.name', 'type': 'str'},
'href': {'key': 'properties.href', 'type': 'str'},
'mini_dump': {'key': 'properties.miniDump', 'type': 'str'},
'is_profile_running': {'key': 'properties.isProfileRunning', 'type': 'bool'},
'is_iis_profile_running': {'key': 'properties.isIisProfileRunning', 'type': 'bool'},
'iis_profile_timeout_in_seconds': {'key': 'properties.iisProfileTimeoutInSeconds', 'type': 'float'},
'parent': {'key': 'properties.parent', 'type': 'str'},
'children': {'key': 'properties.children', 'type': '[str]'},
'threads': {'key': 'properties.threads', 'type': '[ProcessThreadInfo]'},
'open_file_handles': {'key': 'properties.openFileHandles', 'type': '[str]'},
'modules': {'key': 'properties.modules', 'type': '[ProcessModuleInfo]'},
'file_name': {'key': 'properties.fileName', 'type': 'str'},
'command_line': {'key': 'properties.commandLine', 'type': 'str'},
'user_name': {'key': 'properties.userName', 'type': 'str'},
'handle_count': {'key': 'properties.handleCount', 'type': 'int'},
'module_count': {'key': 'properties.moduleCount', 'type': 'int'},
'thread_count': {'key': 'properties.threadCount', 'type': 'int'},
'start_time': {'key': 'properties.startTime', 'type': 'iso-8601'},
'total_processor_time': {'key': 'properties.totalProcessorTime', 'type': 'str'},
'user_processor_time': {'key': 'properties.userProcessorTime', 'type': 'str'},
'privileged_processor_time': {'key': 'properties.privilegedProcessorTime', 'type': 'str'},
'working_set64': {'key': 'properties.workingSet64', 'type': 'long'},
'peak_working_set64': {'key': 'properties.peakWorkingSet64', 'type': 'long'},
'private_memory_size64': {'key': 'properties.privateMemorySize64', 'type': 'long'},
'virtual_memory_size64': {'key': 'properties.virtualMemorySize64', 'type': 'long'},
'peak_virtual_memory_size64': {'key': 'properties.peakVirtualMemorySize64', 'type': 'long'},
'paged_system_memory_size64': {'key': 'properties.pagedSystemMemorySize64', 'type': 'long'},
'nonpaged_system_memory_size64': {'key': 'properties.nonpagedSystemMemorySize64', 'type': 'long'},
'paged_memory_size64': {'key': 'properties.pagedMemorySize64', 'type': 'long'},
'peak_paged_memory_size64': {'key': 'properties.peakPagedMemorySize64', 'type': 'long'},
'time_stamp': {'key': 'properties.timeStamp', 'type': 'iso-8601'},
'environment_variables': {'key': 'properties.environmentVariables', 'type': '{str}'},
'is_scm_site': {'key': 'properties.isScmSite', 'type': 'bool'},
'is_web_job': {'key': 'properties.isWebJob', 'type': 'bool'},
'description': {'key': 'properties.description', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
id_properties_id: Optional[int] = None,
name_properties_name: Optional[str] = None,
href: Optional[str] = None,
mini_dump: Optional[str] = None,
is_profile_running: Optional[bool] = None,
is_iis_profile_running: Optional[bool] = None,
iis_profile_timeout_in_seconds: Optional[float] = None,
parent: Optional[str] = None,
children: Optional[List[str]] = None,
threads: Optional[List["ProcessThreadInfo"]] = None,
open_file_handles: Optional[List[str]] = None,
modules: Optional[List["ProcessModuleInfo"]] = None,
file_name: Optional[str] = None,
command_line: Optional[str] = None,
user_name: Optional[str] = None,
handle_count: Optional[int] = None,
module_count: Optional[int] = None,
thread_count: Optional[int] = None,
start_time: Optional[datetime.datetime] = None,
total_processor_time: Optional[str] = None,
user_processor_time: Optional[str] = None,
privileged_processor_time: Optional[str] = None,
working_set64: Optional[int] = None,
peak_working_set64: Optional[int] = None,
private_memory_size64: Optional[int] = None,
virtual_memory_size64: Optional[int] = None,
peak_virtual_memory_size64: Optional[int] = None,
paged_system_memory_size64: Optional[int] = None,
nonpaged_system_memory_size64: Optional[int] = None,
paged_memory_size64: Optional[int] = None,
peak_paged_memory_size64: Optional[int] = None,
time_stamp: Optional[datetime.datetime] = None,
environment_variables: Optional[Dict[str, str]] = None,
is_scm_site: Optional[bool] = None,
is_web_job: Optional[bool] = None,
description: Optional[str] = None,
**kwargs
):
super(ProcessInfo, self).__init__(kind=kind, **kwargs)
self.id_properties_id = id_properties_id
self.name_properties_name = name_properties_name
self.href = href
self.mini_dump = mini_dump
self.is_profile_running = is_profile_running
self.is_iis_profile_running = is_iis_profile_running
self.iis_profile_timeout_in_seconds = iis_profile_timeout_in_seconds
self.parent = parent
self.children = children
self.threads = threads
self.open_file_handles = open_file_handles
self.modules = modules
self.file_name = file_name
self.command_line = command_line
self.user_name = user_name
self.handle_count = handle_count
self.module_count = module_count
self.thread_count = thread_count
self.start_time = start_time
self.total_processor_time = total_processor_time
self.user_processor_time = user_processor_time
self.privileged_processor_time = privileged_processor_time
self.working_set64 = working_set64
self.peak_working_set64 = peak_working_set64
self.private_memory_size64 = private_memory_size64
self.virtual_memory_size64 = virtual_memory_size64
self.peak_virtual_memory_size64 = peak_virtual_memory_size64
self.paged_system_memory_size64 = paged_system_memory_size64
self.nonpaged_system_memory_size64 = nonpaged_system_memory_size64
self.paged_memory_size64 = paged_memory_size64
self.peak_paged_memory_size64 = peak_paged_memory_size64
self.time_stamp = time_stamp
self.environment_variables = environment_variables
self.is_scm_site = is_scm_site
self.is_web_job = is_web_job
self.description = description
class ProcessInfoCollection(msrest.serialization.Model):
"""Collection of Kudu process information elements.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.ProcessInfo]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ProcessInfo]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["ProcessInfo"],
**kwargs
):
super(ProcessInfoCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class ProcessModuleInfo(ProxyOnlyResource):
"""Process Module Information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param base_address: Base address. Used as module identifier in ARM resource URI.
:type base_address: str
:param file_name: File name.
:type file_name: str
:param href: HRef URI.
:type href: str
:param file_path: File path.
:type file_path: str
:param module_memory_size: Module memory size.
:type module_memory_size: int
:param file_version: File version.
:type file_version: str
:param file_description: File description.
:type file_description: str
:param product: Product name.
:type product: str
:param product_version: Product version.
:type product_version: str
:param is_debug: Is debug?.
:type is_debug: bool
:param language: Module language (locale).
:type language: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'base_address': {'key': 'properties.baseAddress', 'type': 'str'},
'file_name': {'key': 'properties.fileName', 'type': 'str'},
'href': {'key': 'properties.href', 'type': 'str'},
'file_path': {'key': 'properties.filePath', 'type': 'str'},
'module_memory_size': {'key': 'properties.moduleMemorySize', 'type': 'int'},
'file_version': {'key': 'properties.fileVersion', 'type': 'str'},
'file_description': {'key': 'properties.fileDescription', 'type': 'str'},
'product': {'key': 'properties.product', 'type': 'str'},
'product_version': {'key': 'properties.productVersion', 'type': 'str'},
'is_debug': {'key': 'properties.isDebug', 'type': 'bool'},
'language': {'key': 'properties.language', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
base_address: Optional[str] = None,
file_name: Optional[str] = None,
href: Optional[str] = None,
file_path: Optional[str] = None,
module_memory_size: Optional[int] = None,
file_version: Optional[str] = None,
file_description: Optional[str] = None,
product: Optional[str] = None,
product_version: Optional[str] = None,
is_debug: Optional[bool] = None,
language: Optional[str] = None,
**kwargs
):
super(ProcessModuleInfo, self).__init__(kind=kind, **kwargs)
self.base_address = base_address
self.file_name = file_name
self.href = href
self.file_path = file_path
self.module_memory_size = module_memory_size
self.file_version = file_version
self.file_description = file_description
self.product = product
self.product_version = product_version
self.is_debug = is_debug
self.language = language
class ProcessModuleInfoCollection(msrest.serialization.Model):
"""Collection of Kudu thread information elements.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.ProcessModuleInfo]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ProcessModuleInfo]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["ProcessModuleInfo"],
**kwargs
):
super(ProcessModuleInfoCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class ProcessThreadInfo(ProxyOnlyResource):
"""Process Thread Information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param id_properties_id: ARM Identifier for deployment.
:type id_properties_id: int
:param href: HRef URI.
:type href: str
:param process: Process URI.
:type process: str
:param start_address: Start address.
:type start_address: str
:param current_priority: Current thread priority.
:type current_priority: int
:param priority_level: Thread priority level.
:type priority_level: str
:param base_priority: Base priority.
:type base_priority: int
:param start_time: Start time.
:type start_time: ~datetime.datetime
:param total_processor_time: Total processor time.
:type total_processor_time: str
:param user_processor_time: User processor time.
:type user_processor_time: str
:param priviledged_processor_time: Privileged processor time.
:type priviledged_processor_time: str
:param state: Thread state.
:type state: str
:param wait_reason: Wait reason.
:type wait_reason: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'id_properties_id': {'key': 'properties.id', 'type': 'int'},
'href': {'key': 'properties.href', 'type': 'str'},
'process': {'key': 'properties.process', 'type': 'str'},
'start_address': {'key': 'properties.startAddress', 'type': 'str'},
'current_priority': {'key': 'properties.currentPriority', 'type': 'int'},
'priority_level': {'key': 'properties.priorityLevel', 'type': 'str'},
'base_priority': {'key': 'properties.basePriority', 'type': 'int'},
'start_time': {'key': 'properties.startTime', 'type': 'iso-8601'},
'total_processor_time': {'key': 'properties.totalProcessorTime', 'type': 'str'},
'user_processor_time': {'key': 'properties.userProcessorTime', 'type': 'str'},
'priviledged_processor_time': {'key': 'properties.priviledgedProcessorTime', 'type': 'str'},
'state': {'key': 'properties.state', 'type': 'str'},
'wait_reason': {'key': 'properties.waitReason', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
id_properties_id: Optional[int] = None,
href: Optional[str] = None,
process: Optional[str] = None,
start_address: Optional[str] = None,
current_priority: Optional[int] = None,
priority_level: Optional[str] = None,
base_priority: Optional[int] = None,
start_time: Optional[datetime.datetime] = None,
total_processor_time: Optional[str] = None,
user_processor_time: Optional[str] = None,
priviledged_processor_time: Optional[str] = None,
state: Optional[str] = None,
wait_reason: Optional[str] = None,
**kwargs
):
super(ProcessThreadInfo, self).__init__(kind=kind, **kwargs)
self.id_properties_id = id_properties_id
self.href = href
self.process = process
self.start_address = start_address
self.current_priority = current_priority
self.priority_level = priority_level
self.base_priority = base_priority
self.start_time = start_time
self.total_processor_time = total_processor_time
self.user_processor_time = user_processor_time
self.priviledged_processor_time = priviledged_processor_time
self.state = state
self.wait_reason = wait_reason
class ProcessThreadInfoCollection(msrest.serialization.Model):
"""Collection of Kudu thread information elements.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.ProcessThreadInfo]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ProcessThreadInfo]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["ProcessThreadInfo"],
**kwargs
):
super(ProcessThreadInfoCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class PublicCertificate(ProxyOnlyResource):
"""Public certificate object.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param blob: Public Certificate byte array.
:type blob: bytearray
:param public_certificate_location: Public Certificate Location. Possible values include:
"CurrentUserMy", "LocalMachineMy", "Unknown".
:type public_certificate_location: str or
~azure.mgmt.web.v2016_08_01.models.PublicCertificateLocation
:ivar thumbprint: Certificate Thumbprint.
:vartype thumbprint: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'thumbprint': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'blob': {'key': 'properties.blob', 'type': 'bytearray'},
'public_certificate_location': {'key': 'properties.publicCertificateLocation', 'type': 'str'},
'thumbprint': {'key': 'properties.thumbprint', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
blob: Optional[bytearray] = None,
public_certificate_location: Optional[Union[str, "PublicCertificateLocation"]] = None,
**kwargs
):
super(PublicCertificate, self).__init__(kind=kind, **kwargs)
self.blob = blob
self.public_certificate_location = public_certificate_location
self.thumbprint = None
class PublicCertificateCollection(msrest.serialization.Model):
"""Collection of public certificates.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.PublicCertificate]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[PublicCertificate]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["PublicCertificate"],
**kwargs
):
super(PublicCertificateCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class PushSettings(ProxyOnlyResource):
"""Push settings for the App.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param is_push_enabled: Gets or sets a flag indicating whether the Push endpoint is enabled.
:type is_push_enabled: bool
:param tag_whitelist_json: Gets or sets a JSON string containing a list of tags that are
whitelisted for use by the push registration endpoint.
:type tag_whitelist_json: str
:param tags_requiring_auth: Gets or sets a JSON string containing a list of tags that require
user authentication to be used in the push registration endpoint.
Tags can consist of alphanumeric characters and the following:
'_', '@', '#', '.', ':', '-'.
Validation should be performed at the PushRequestHandler.
:type tags_requiring_auth: str
:param dynamic_tags_json: Gets or sets a JSON string containing a list of dynamic tags that
will be evaluated from user claims in the push registration endpoint.
:type dynamic_tags_json: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'is_push_enabled': {'key': 'properties.isPushEnabled', 'type': 'bool'},
'tag_whitelist_json': {'key': 'properties.tagWhitelistJson', 'type': 'str'},
'tags_requiring_auth': {'key': 'properties.tagsRequiringAuth', 'type': 'str'},
'dynamic_tags_json': {'key': 'properties.dynamicTagsJson', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
is_push_enabled: Optional[bool] = None,
tag_whitelist_json: Optional[str] = None,
tags_requiring_auth: Optional[str] = None,
dynamic_tags_json: Optional[str] = None,
**kwargs
):
super(PushSettings, self).__init__(kind=kind, **kwargs)
self.is_push_enabled = is_push_enabled
self.tag_whitelist_json = tag_whitelist_json
self.tags_requiring_auth = tags_requiring_auth
self.dynamic_tags_json = dynamic_tags_json
class RampUpRule(msrest.serialization.Model):
"""Routing rules for ramp up testing. This rule allows to redirect static traffic % to a slot or to gradually change routing % based on performance.
:param action_host_name: Hostname of a slot to which the traffic will be redirected if decided
to. E.g. myapp-stage.azurewebsites.net.
:type action_host_name: str
:param reroute_percentage: Percentage of the traffic which will be redirected to
:code:`<code>ActionHostName</code>`.
:type reroute_percentage: float
:param change_step: In auto ramp up scenario this is the step to add/remove from
:code:`<code>ReroutePercentage</code>` until it reaches
:code:`<code>MinReroutePercentage</code>` or :code:`<code>MaxReroutePercentage</code>`. Site
metrics are checked every N minutes specified in :code:`<code>ChangeIntervalInMinutes</code>`.
Custom decision algorithm can be provided in TiPCallback site extension which URL can be
specified in :code:`<code>ChangeDecisionCallbackUrl</code>`.
:type change_step: float
:param change_interval_in_minutes: Specifies interval in minutes to reevaluate
ReroutePercentage.
:type change_interval_in_minutes: int
:param min_reroute_percentage: Specifies lower boundary above which ReroutePercentage will
stay.
:type min_reroute_percentage: float
:param max_reroute_percentage: Specifies upper boundary below which ReroutePercentage will
stay.
:type max_reroute_percentage: float
:param change_decision_callback_url: Custom decision algorithm can be provided in TiPCallback
site extension which URL can be specified. See TiPCallback site extension for the scaffold and
contracts.
https://www.siteextensions.net/packages/TiPCallback/.
:type change_decision_callback_url: str
:param name: Name of the routing rule. The recommended name would be to point to the slot which
will receive the traffic in the experiment.
:type name: str
"""
_attribute_map = {
'action_host_name': {'key': 'actionHostName', 'type': 'str'},
'reroute_percentage': {'key': 'reroutePercentage', 'type': 'float'},
'change_step': {'key': 'changeStep', 'type': 'float'},
'change_interval_in_minutes': {'key': 'changeIntervalInMinutes', 'type': 'int'},
'min_reroute_percentage': {'key': 'minReroutePercentage', 'type': 'float'},
'max_reroute_percentage': {'key': 'maxReroutePercentage', 'type': 'float'},
'change_decision_callback_url': {'key': 'changeDecisionCallbackUrl', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
}
def __init__(
self,
*,
action_host_name: Optional[str] = None,
reroute_percentage: Optional[float] = None,
change_step: Optional[float] = None,
change_interval_in_minutes: Optional[int] = None,
min_reroute_percentage: Optional[float] = None,
max_reroute_percentage: Optional[float] = None,
change_decision_callback_url: Optional[str] = None,
name: Optional[str] = None,
**kwargs
):
super(RampUpRule, self).__init__(**kwargs)
self.action_host_name = action_host_name
self.reroute_percentage = reroute_percentage
self.change_step = change_step
self.change_interval_in_minutes = change_interval_in_minutes
self.min_reroute_percentage = min_reroute_percentage
self.max_reroute_percentage = max_reroute_percentage
self.change_decision_callback_url = change_decision_callback_url
self.name = name
class RelayServiceConnectionEntity(ProxyOnlyResource):
"""Hybrid Connection for an App Service app.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param entity_name:
:type entity_name: str
:param entity_connection_string:
:type entity_connection_string: str
:param resource_type:
:type resource_type: str
:param resource_connection_string:
:type resource_connection_string: str
:param hostname:
:type hostname: str
:param port:
:type port: int
:param biztalk_uri:
:type biztalk_uri: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'entity_name': {'key': 'properties.entityName', 'type': 'str'},
'entity_connection_string': {'key': 'properties.entityConnectionString', 'type': 'str'},
'resource_type': {'key': 'properties.resourceType', 'type': 'str'},
'resource_connection_string': {'key': 'properties.resourceConnectionString', 'type': 'str'},
'hostname': {'key': 'properties.hostname', 'type': 'str'},
'port': {'key': 'properties.port', 'type': 'int'},
'biztalk_uri': {'key': 'properties.biztalkUri', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
entity_name: Optional[str] = None,
entity_connection_string: Optional[str] = None,
resource_type: Optional[str] = None,
resource_connection_string: Optional[str] = None,
hostname: Optional[str] = None,
port: Optional[int] = None,
biztalk_uri: Optional[str] = None,
**kwargs
):
super(RelayServiceConnectionEntity, self).__init__(kind=kind, **kwargs)
self.entity_name = entity_name
self.entity_connection_string = entity_connection_string
self.resource_type = resource_type
self.resource_connection_string = resource_connection_string
self.hostname = hostname
self.port = port
self.biztalk_uri = biztalk_uri
class RequestsBasedTrigger(msrest.serialization.Model):
"""Trigger based on total requests.
:param count: Request Count.
:type count: int
:param time_interval: Time interval.
:type time_interval: str
"""
_attribute_map = {
'count': {'key': 'count', 'type': 'int'},
'time_interval': {'key': 'timeInterval', 'type': 'str'},
}
def __init__(
self,
*,
count: Optional[int] = None,
time_interval: Optional[str] = None,
**kwargs
):
super(RequestsBasedTrigger, self).__init__(**kwargs)
self.count = count
self.time_interval = time_interval
class ResourceMetric(msrest.serialization.Model):
"""Object representing a metric for any resource .
Variables are only populated by the server, and will be ignored when sending a request.
:ivar name: Name of metric.
:vartype name: ~azure.mgmt.web.v2016_08_01.models.ResourceMetricName
:ivar unit: Metric unit.
:vartype unit: str
:ivar time_grain: Metric granularity. E.g PT1H, PT5M, P1D.
:vartype time_grain: str
:ivar start_time: Metric start time.
:vartype start_time: ~datetime.datetime
:ivar end_time: Metric end time.
:vartype end_time: ~datetime.datetime
:ivar resource_id: Metric resource Id.
:vartype resource_id: str
:ivar id: Resource Id.
:vartype id: str
:ivar metric_values: Metric values.
:vartype metric_values: list[~azure.mgmt.web.v2016_08_01.models.ResourceMetricValue]
:ivar properties: Resource metric properties collection.
:vartype properties: list[~azure.mgmt.web.v2016_08_01.models.ResourceMetricProperty]
"""
_validation = {
'name': {'readonly': True},
'unit': {'readonly': True},
'time_grain': {'readonly': True},
'start_time': {'readonly': True},
'end_time': {'readonly': True},
'resource_id': {'readonly': True},
'id': {'readonly': True},
'metric_values': {'readonly': True},
'properties': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'ResourceMetricName'},
'unit': {'key': 'unit', 'type': 'str'},
'time_grain': {'key': 'timeGrain', 'type': 'str'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
'resource_id': {'key': 'resourceId', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'metric_values': {'key': 'metricValues', 'type': '[ResourceMetricValue]'},
'properties': {'key': 'properties', 'type': '[ResourceMetricProperty]'},
}
def __init__(
self,
**kwargs
):
super(ResourceMetric, self).__init__(**kwargs)
self.name = None
self.unit = None
self.time_grain = None
self.start_time = None
self.end_time = None
self.resource_id = None
self.id = None
self.metric_values = None
self.properties = None
class ResourceMetricAvailability(msrest.serialization.Model):
"""Metrics availability and retention.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar time_grain: Time grain .
:vartype time_grain: str
:ivar retention: Retention period for the current time grain.
:vartype retention: str
"""
_validation = {
'time_grain': {'readonly': True},
'retention': {'readonly': True},
}
_attribute_map = {
'time_grain': {'key': 'timeGrain', 'type': 'str'},
'retention': {'key': 'retention', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResourceMetricAvailability, self).__init__(**kwargs)
self.time_grain = None
self.retention = None
class ResourceMetricCollection(msrest.serialization.Model):
"""Collection of metric responses.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.ResourceMetric]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ResourceMetric]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["ResourceMetric"],
**kwargs
):
super(ResourceMetricCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class ResourceMetricDefinition(ProxyOnlyResource):
"""Metadata for the metrics.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar name_properties_name: Name of the metric.
:vartype name_properties_name: ~azure.mgmt.web.v2016_08_01.models.ResourceMetricName
:ivar unit: Unit of the metric.
:vartype unit: str
:ivar primary_aggregation_type: Primary aggregation type.
:vartype primary_aggregation_type: str
:ivar metric_availabilities: List of time grains supported for the metric together with
retention period.
:vartype metric_availabilities:
list[~azure.mgmt.web.v2016_08_01.models.ResourceMetricAvailability]
:ivar resource_uri: Resource URI.
:vartype resource_uri: str
:ivar id_properties_id: Resource ID.
:vartype id_properties_id: str
:ivar properties: Resource metric definition properties.
:vartype properties: dict[str, str]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'name_properties_name': {'readonly': True},
'unit': {'readonly': True},
'primary_aggregation_type': {'readonly': True},
'metric_availabilities': {'readonly': True},
'resource_uri': {'readonly': True},
'id_properties_id': {'readonly': True},
'properties': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'name_properties_name': {'key': 'properties.name', 'type': 'ResourceMetricName'},
'unit': {'key': 'properties.unit', 'type': 'str'},
'primary_aggregation_type': {'key': 'properties.primaryAggregationType', 'type': 'str'},
'metric_availabilities': {'key': 'properties.metricAvailabilities', 'type': '[ResourceMetricAvailability]'},
'resource_uri': {'key': 'properties.resourceUri', 'type': 'str'},
'id_properties_id': {'key': 'properties.id', 'type': 'str'},
'properties': {'key': 'properties.properties', 'type': '{str}'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
**kwargs
):
super(ResourceMetricDefinition, self).__init__(kind=kind, **kwargs)
self.name_properties_name = None
self.unit = None
self.primary_aggregation_type = None
self.metric_availabilities = None
self.resource_uri = None
self.id_properties_id = None
self.properties = None
class ResourceMetricDefinitionCollection(msrest.serialization.Model):
"""Collection of metric definitions.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.ResourceMetricDefinition]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ResourceMetricDefinition]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["ResourceMetricDefinition"],
**kwargs
):
super(ResourceMetricDefinitionCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class ResourceMetricName(msrest.serialization.Model):
"""Name of a metric for any resource .
Variables are only populated by the server, and will be ignored when sending a request.
:ivar value: metric name value.
:vartype value: str
:ivar localized_value: Localized metric name value.
:vartype localized_value: str
"""
_validation = {
'value': {'readonly': True},
'localized_value': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': 'str'},
'localized_value': {'key': 'localizedValue', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(ResourceMetricName, self).__init__(**kwargs)
self.value = None
self.localized_value = None
class ResourceMetricProperty(msrest.serialization.Model):
"""Resource metric property.
:param key: Key for resource metric property.
:type key: str
:param value: Value of pair.
:type value: str
"""
_attribute_map = {
'key': {'key': 'key', 'type': 'str'},
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
*,
key: Optional[str] = None,
value: Optional[str] = None,
**kwargs
):
super(ResourceMetricProperty, self).__init__(**kwargs)
self.key = key
self.value = value
class ResourceMetricValue(msrest.serialization.Model):
"""Value of resource metric.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar timestamp: Value timestamp.
:vartype timestamp: str
:ivar average: Value average.
:vartype average: float
:ivar minimum: Value minimum.
:vartype minimum: float
:ivar maximum: Value maximum.
:vartype maximum: float
:ivar total: Value total.
:vartype total: float
:ivar count: Value count.
:vartype count: float
:ivar properties: Resource metric properties collection.
:vartype properties: list[~azure.mgmt.web.v2016_08_01.models.ResourceMetricProperty]
"""
_validation = {
'timestamp': {'readonly': True},
'average': {'readonly': True},
'minimum': {'readonly': True},
'maximum': {'readonly': True},
'total': {'readonly': True},
'count': {'readonly': True},
'properties': {'readonly': True},
}
_attribute_map = {
'timestamp': {'key': 'timestamp', 'type': 'str'},
'average': {'key': 'average', 'type': 'float'},
'minimum': {'key': 'minimum', 'type': 'float'},
'maximum': {'key': 'maximum', 'type': 'float'},
'total': {'key': 'total', 'type': 'float'},
'count': {'key': 'count', 'type': 'float'},
'properties': {'key': 'properties', 'type': '[ResourceMetricProperty]'},
}
def __init__(
self,
**kwargs
):
super(ResourceMetricValue, self).__init__(**kwargs)
self.timestamp = None
self.average = None
self.minimum = None
self.maximum = None
self.total = None
self.count = None
self.properties = None
class RestoreRequest(ProxyOnlyResource):
"""Description of a restore request.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param storage_account_url: SAS URL to the container.
:type storage_account_url: str
:param blob_name: Name of a blob which contains the backup.
:type blob_name: str
:param overwrite: :code:`<code>true</code>` if the restore operation can overwrite target app;
otherwise, :code:`<code>false</code>`. :code:`<code>true</code>` is needed if trying to restore
over an existing app.
:type overwrite: bool
:param site_name: Name of an app.
:type site_name: str
:param databases: Collection of databases which should be restored. This list has to match the
list of databases included in the backup.
:type databases: list[~azure.mgmt.web.v2016_08_01.models.DatabaseBackupSetting]
:param ignore_conflicting_host_names: Changes a logic when restoring an app with custom
domains. :code:`<code>true</code>` to remove custom domains automatically. If
:code:`<code>false</code>`, custom domains are added to
the app's object when it is being restored, but that might fail due to conflicts during the
operation.
:type ignore_conflicting_host_names: bool
:param ignore_databases: Ignore the databases and only restore the site content.
:type ignore_databases: bool
:param app_service_plan: Specify app service plan that will own restored site.
:type app_service_plan: str
:param operation_type: Operation type. Possible values include: "Default", "Clone",
"Relocation", "Snapshot".
:type operation_type: str or ~azure.mgmt.web.v2016_08_01.models.BackupRestoreOperationType
:param adjust_connection_strings: :code:`<code>true</code>` if SiteConfig.ConnectionStrings
should be set in new app; otherwise, :code:`<code>false</code>`.
:type adjust_connection_strings: bool
:param hosting_environment: App Service Environment name, if needed (only when restoring an app
to an App Service Environment).
:type hosting_environment: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'storage_account_url': {'key': 'properties.storageAccountUrl', 'type': 'str'},
'blob_name': {'key': 'properties.blobName', 'type': 'str'},
'overwrite': {'key': 'properties.overwrite', 'type': 'bool'},
'site_name': {'key': 'properties.siteName', 'type': 'str'},
'databases': {'key': 'properties.databases', 'type': '[DatabaseBackupSetting]'},
'ignore_conflicting_host_names': {'key': 'properties.ignoreConflictingHostNames', 'type': 'bool'},
'ignore_databases': {'key': 'properties.ignoreDatabases', 'type': 'bool'},
'app_service_plan': {'key': 'properties.appServicePlan', 'type': 'str'},
'operation_type': {'key': 'properties.operationType', 'type': 'str'},
'adjust_connection_strings': {'key': 'properties.adjustConnectionStrings', 'type': 'bool'},
'hosting_environment': {'key': 'properties.hostingEnvironment', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
storage_account_url: Optional[str] = None,
blob_name: Optional[str] = None,
overwrite: Optional[bool] = None,
site_name: Optional[str] = None,
databases: Optional[List["DatabaseBackupSetting"]] = None,
ignore_conflicting_host_names: Optional[bool] = False,
ignore_databases: Optional[bool] = False,
app_service_plan: Optional[str] = None,
operation_type: Optional[Union[str, "BackupRestoreOperationType"]] = None,
adjust_connection_strings: Optional[bool] = None,
hosting_environment: Optional[str] = None,
**kwargs
):
super(RestoreRequest, self).__init__(kind=kind, **kwargs)
self.storage_account_url = storage_account_url
self.blob_name = blob_name
self.overwrite = overwrite
self.site_name = site_name
self.databases = databases
self.ignore_conflicting_host_names = ignore_conflicting_host_names
self.ignore_databases = ignore_databases
self.app_service_plan = app_service_plan
self.operation_type = operation_type
self.adjust_connection_strings = adjust_connection_strings
self.hosting_environment = hosting_environment
class RestoreResponse(ProxyOnlyResource):
"""Response for an app restore request.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar operation_id: When server starts the restore process, it will return an operation ID
identifying that particular restore operation.
:vartype operation_id: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'operation_id': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'operation_id': {'key': 'properties.operationId', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
**kwargs
):
super(RestoreResponse, self).__init__(kind=kind, **kwargs)
self.operation_id = None
class Site(Resource):
"""A web app, a mobile app backend, or an API app.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:param location: Required. Resource Location.
:type location: str
:ivar type: Resource type.
:vartype type: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param identity: Managed service identity.
:type identity: ~azure.mgmt.web.v2016_08_01.models.ManagedServiceIdentity
:ivar state: Current state of the app.
:vartype state: str
:ivar host_names: Hostnames associated with the app.
:vartype host_names: list[str]
:ivar repository_site_name: Name of the repository site.
:vartype repository_site_name: str
:ivar usage_state: State indicating whether the app has exceeded its quota usage. Read-only.
Possible values include: "Normal", "Exceeded".
:vartype usage_state: str or ~azure.mgmt.web.v2016_08_01.models.UsageState
:param enabled: :code:`<code>true</code>` if the app is enabled; otherwise,
:code:`<code>false</code>`. Setting this value to false disables the app (takes the app
offline).
:type enabled: bool
:ivar enabled_host_names: Enabled hostnames for the app.Hostnames need to be assigned (see
HostNames) AND enabled. Otherwise,
the app is not served on those hostnames.
:vartype enabled_host_names: list[str]
:ivar availability_state: Management information availability state for the app. Possible
values include: "Normal", "Limited", "DisasterRecoveryMode".
:vartype availability_state: str or ~azure.mgmt.web.v2016_08_01.models.SiteAvailabilityState
:param host_name_ssl_states: Hostname SSL states are used to manage the SSL bindings for app's
hostnames.
:type host_name_ssl_states: list[~azure.mgmt.web.v2016_08_01.models.HostNameSslState]
:param server_farm_id: Resource ID of the associated App Service plan, formatted as:
"/subscriptions/{subscriptionID}/resourceGroups/{groupName}/providers/Microsoft.Web/serverfarms/{appServicePlanName}".
:type server_farm_id: str
:param reserved: :code:`<code>true</code>` if reserved; otherwise, :code:`<code>false</code>`.
:type reserved: bool
:ivar last_modified_time_utc: Last time the app was modified, in UTC. Read-only.
:vartype last_modified_time_utc: ~datetime.datetime
:param site_config: Configuration of the app.
:type site_config: ~azure.mgmt.web.v2016_08_01.models.SiteConfig
:ivar traffic_manager_host_names: Azure Traffic Manager hostnames associated with the app.
Read-only.
:vartype traffic_manager_host_names: list[str]
:param scm_site_also_stopped: :code:`<code>true</code>` to stop SCM (KUDU) site when the app is
stopped; otherwise, :code:`<code>false</code>`. The default is :code:`<code>false</code>`.
:type scm_site_also_stopped: bool
:ivar target_swap_slot: Specifies which deployment slot this app will swap into. Read-only.
:vartype target_swap_slot: str
:param hosting_environment_profile: App Service Environment to use for the app.
:type hosting_environment_profile: ~azure.mgmt.web.v2016_08_01.models.HostingEnvironmentProfile
:param client_affinity_enabled: :code:`<code>true</code>` to enable client affinity;
:code:`<code>false</code>` to stop sending session affinity cookies, which route client
requests in the same session to the same instance. Default is :code:`<code>true</code>`.
:type client_affinity_enabled: bool
:param client_cert_enabled: :code:`<code>true</code>` to enable client certificate
authentication (TLS mutual authentication); otherwise, :code:`<code>false</code>`. Default is
:code:`<code>false</code>`.
:type client_cert_enabled: bool
:param host_names_disabled: :code:`<code>true</code>` to disable the public hostnames of the
app; otherwise, :code:`<code>false</code>`.
If :code:`<code>true</code>`, the app is only accessible via API management process.
:type host_names_disabled: bool
:ivar outbound_ip_addresses: List of IP addresses that the app uses for outbound connections
(e.g. database access). Includes VIPs from tenants that site can be hosted with current
settings. Read-only.
:vartype outbound_ip_addresses: str
:ivar possible_outbound_ip_addresses: List of IP addresses that the app uses for outbound
connections (e.g. database access). Includes VIPs from all tenants. Read-only.
:vartype possible_outbound_ip_addresses: str
:param container_size: Size of the function container.
:type container_size: int
:param daily_memory_time_quota: Maximum allowed daily memory-time quota (applicable on dynamic
apps only).
:type daily_memory_time_quota: int
:ivar suspended_till: App suspended till in case memory-time quota is exceeded.
:vartype suspended_till: ~datetime.datetime
:ivar max_number_of_workers: Maximum number of workers.
This only applies to Functions container.
:vartype max_number_of_workers: int
:param cloning_info: If specified during app creation, the app is cloned from a source app.
:type cloning_info: ~azure.mgmt.web.v2016_08_01.models.CloningInfo
:param snapshot_info: If specified during app creation, the app is created from a previous
snapshot.
:type snapshot_info: ~azure.mgmt.web.v2016_08_01.models.SnapshotRecoveryRequest
:ivar resource_group: Name of the resource group the app belongs to. Read-only.
:vartype resource_group: str
:ivar is_default_container: :code:`<code>true</code>` if the app is a default container;
otherwise, :code:`<code>false</code>`.
:vartype is_default_container: bool
:ivar default_host_name: Default hostname of the app. Read-only.
:vartype default_host_name: str
:ivar slot_swap_status: Status of the last deployment slot swap operation.
:vartype slot_swap_status: ~azure.mgmt.web.v2016_08_01.models.SlotSwapStatus
:param https_only: HttpsOnly: configures a web site to accept only https requests. Issues
redirect for
http requests.
:type https_only: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'location': {'required': True},
'type': {'readonly': True},
'state': {'readonly': True},
'host_names': {'readonly': True},
'repository_site_name': {'readonly': True},
'usage_state': {'readonly': True},
'enabled_host_names': {'readonly': True},
'availability_state': {'readonly': True},
'last_modified_time_utc': {'readonly': True},
'traffic_manager_host_names': {'readonly': True},
'target_swap_slot': {'readonly': True},
'outbound_ip_addresses': {'readonly': True},
'possible_outbound_ip_addresses': {'readonly': True},
'suspended_till': {'readonly': True},
'max_number_of_workers': {'readonly': True},
'resource_group': {'readonly': True},
'is_default_container': {'readonly': True},
'default_host_name': {'readonly': True},
'slot_swap_status': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'identity': {'key': 'identity', 'type': 'ManagedServiceIdentity'},
'state': {'key': 'properties.state', 'type': 'str'},
'host_names': {'key': 'properties.hostNames', 'type': '[str]'},
'repository_site_name': {'key': 'properties.repositorySiteName', 'type': 'str'},
'usage_state': {'key': 'properties.usageState', 'type': 'str'},
'enabled': {'key': 'properties.enabled', 'type': 'bool'},
'enabled_host_names': {'key': 'properties.enabledHostNames', 'type': '[str]'},
'availability_state': {'key': 'properties.availabilityState', 'type': 'str'},
'host_name_ssl_states': {'key': 'properties.hostNameSslStates', 'type': '[HostNameSslState]'},
'server_farm_id': {'key': 'properties.serverFarmId', 'type': 'str'},
'reserved': {'key': 'properties.reserved', 'type': 'bool'},
'last_modified_time_utc': {'key': 'properties.lastModifiedTimeUtc', 'type': 'iso-8601'},
'site_config': {'key': 'properties.siteConfig', 'type': 'SiteConfig'},
'traffic_manager_host_names': {'key': 'properties.trafficManagerHostNames', 'type': '[str]'},
'scm_site_also_stopped': {'key': 'properties.scmSiteAlsoStopped', 'type': 'bool'},
'target_swap_slot': {'key': 'properties.targetSwapSlot', 'type': 'str'},
'hosting_environment_profile': {'key': 'properties.hostingEnvironmentProfile', 'type': 'HostingEnvironmentProfile'},
'client_affinity_enabled': {'key': 'properties.clientAffinityEnabled', 'type': 'bool'},
'client_cert_enabled': {'key': 'properties.clientCertEnabled', 'type': 'bool'},
'host_names_disabled': {'key': 'properties.hostNamesDisabled', 'type': 'bool'},
'outbound_ip_addresses': {'key': 'properties.outboundIpAddresses', 'type': 'str'},
'possible_outbound_ip_addresses': {'key': 'properties.possibleOutboundIpAddresses', 'type': 'str'},
'container_size': {'key': 'properties.containerSize', 'type': 'int'},
'daily_memory_time_quota': {'key': 'properties.dailyMemoryTimeQuota', 'type': 'int'},
'suspended_till': {'key': 'properties.suspendedTill', 'type': 'iso-8601'},
'max_number_of_workers': {'key': 'properties.maxNumberOfWorkers', 'type': 'int'},
'cloning_info': {'key': 'properties.cloningInfo', 'type': 'CloningInfo'},
'snapshot_info': {'key': 'properties.snapshotInfo', 'type': 'SnapshotRecoveryRequest'},
'resource_group': {'key': 'properties.resourceGroup', 'type': 'str'},
'is_default_container': {'key': 'properties.isDefaultContainer', 'type': 'bool'},
'default_host_name': {'key': 'properties.defaultHostName', 'type': 'str'},
'slot_swap_status': {'key': 'properties.slotSwapStatus', 'type': 'SlotSwapStatus'},
'https_only': {'key': 'properties.httpsOnly', 'type': 'bool'},
}
def __init__(
self,
*,
location: str,
kind: Optional[str] = None,
tags: Optional[Dict[str, str]] = None,
identity: Optional["ManagedServiceIdentity"] = None,
enabled: Optional[bool] = None,
host_name_ssl_states: Optional[List["HostNameSslState"]] = None,
server_farm_id: Optional[str] = None,
reserved: Optional[bool] = False,
site_config: Optional["SiteConfig"] = None,
scm_site_also_stopped: Optional[bool] = False,
hosting_environment_profile: Optional["HostingEnvironmentProfile"] = None,
client_affinity_enabled: Optional[bool] = None,
client_cert_enabled: Optional[bool] = None,
host_names_disabled: Optional[bool] = None,
container_size: Optional[int] = None,
daily_memory_time_quota: Optional[int] = None,
cloning_info: Optional["CloningInfo"] = None,
snapshot_info: Optional["SnapshotRecoveryRequest"] = None,
https_only: Optional[bool] = None,
**kwargs
):
super(Site, self).__init__(kind=kind, location=location, tags=tags, **kwargs)
self.identity = identity
self.state = None
self.host_names = None
self.repository_site_name = None
self.usage_state = None
self.enabled = enabled
self.enabled_host_names = None
self.availability_state = None
self.host_name_ssl_states = host_name_ssl_states
self.server_farm_id = server_farm_id
self.reserved = reserved
self.last_modified_time_utc = None
self.site_config = site_config
self.traffic_manager_host_names = None
self.scm_site_also_stopped = scm_site_also_stopped
self.target_swap_slot = None
self.hosting_environment_profile = hosting_environment_profile
self.client_affinity_enabled = client_affinity_enabled
self.client_cert_enabled = client_cert_enabled
self.host_names_disabled = host_names_disabled
self.outbound_ip_addresses = None
self.possible_outbound_ip_addresses = None
self.container_size = container_size
self.daily_memory_time_quota = daily_memory_time_quota
self.suspended_till = None
self.max_number_of_workers = None
self.cloning_info = cloning_info
self.snapshot_info = snapshot_info
self.resource_group = None
self.is_default_container = None
self.default_host_name = None
self.slot_swap_status = None
self.https_only = https_only
class SiteAuthSettings(ProxyOnlyResource):
"""Configuration settings for the Azure App Service Authentication / Authorization feature.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param enabled: :code:`<code>true</code>` if the Authentication / Authorization feature is
enabled for the current app; otherwise, :code:`<code>false</code>`.
:type enabled: bool
:param runtime_version: The RuntimeVersion of the Authentication / Authorization feature in use
for the current app.
The setting in this value can control the behavior of certain features in the Authentication /
Authorization module.
:type runtime_version: str
:param unauthenticated_client_action: The action to take when an unauthenticated client
attempts to access the app. Possible values include: "RedirectToLoginPage", "AllowAnonymous".
:type unauthenticated_client_action: str or
~azure.mgmt.web.v2016_08_01.models.UnauthenticatedClientAction
:param token_store_enabled: :code:`<code>true</code>` to durably store platform-specific
security tokens that are obtained during login flows; otherwise, :code:`<code>false</code>`.
The default is :code:`<code>false</code>`.
:type token_store_enabled: bool
:param allowed_external_redirect_urls: External URLs that can be redirected to as part of
logging in or logging out of the app. Note that the query string part of the URL is ignored.
This is an advanced setting typically only needed by Windows Store application backends.
Note that URLs within the current domain are always implicitly allowed.
:type allowed_external_redirect_urls: list[str]
:param default_provider: The default authentication provider to use when multiple providers are
configured.
This setting is only needed if multiple providers are configured and the unauthenticated
client
action is set to "RedirectToLoginPage". Possible values include: "AzureActiveDirectory",
"Facebook", "Google", "MicrosoftAccount", "Twitter".
:type default_provider: str or ~azure.mgmt.web.v2016_08_01.models.BuiltInAuthenticationProvider
:param token_refresh_extension_hours: The number of hours after session token expiration that a
session token can be used to
call the token refresh API. The default is 72 hours.
:type token_refresh_extension_hours: float
:param client_id: The Client ID of this relying party application, known as the client_id.
This setting is required for enabling OpenID Connection authentication with Azure Active
Directory or
other 3rd party OpenID Connect providers.
More information on OpenID Connect: http://openid.net/specs/openid-connect-core-1_0.html.
:type client_id: str
:param client_secret: The Client Secret of this relying party application (in Azure Active
Directory, this is also referred to as the Key).
This setting is optional. If no client secret is configured, the OpenID Connect implicit auth
flow is used to authenticate end users.
Otherwise, the OpenID Connect Authorization Code Flow is used to authenticate end users.
More information on OpenID Connect: http://openid.net/specs/openid-connect-core-1_0.html.
:type client_secret: str
:param issuer: The OpenID Connect Issuer URI that represents the entity which issues access
tokens for this application.
When using Azure Active Directory, this value is the URI of the directory tenant, e.g.
https://sts.windows.net/{tenant-guid}/.
This URI is a case-sensitive identifier for the token issuer.
More information on OpenID Connect Discovery:
http://openid.net/specs/openid-connect-discovery-1_0.html.
:type issuer: str
:param allowed_audiences: Allowed audience values to consider when validating JWTs issued by
Azure Active Directory. Note that the :code:`<code>ClientID</code>` value is always considered
an
allowed audience, regardless of this setting.
:type allowed_audiences: list[str]
:param additional_login_params: Login parameters to send to the OpenID Connect authorization
endpoint when
a user logs in. Each parameter must be in the form "key=value".
:type additional_login_params: list[str]
:param google_client_id: The OpenID Connect Client ID for the Google web application.
This setting is required for enabling Google Sign-In.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/.
:type google_client_id: str
:param google_client_secret: The client secret associated with the Google web application.
This setting is required for enabling Google Sign-In.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/.
:type google_client_secret: str
:param google_o_auth_scopes: The OAuth 2.0 scopes that will be requested as part of Google
Sign-In authentication.
This setting is optional. If not specified, "openid", "profile", and "email" are used as
default scopes.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/.
:type google_o_auth_scopes: list[str]
:param facebook_app_id: The App ID of the Facebook app used for login.
This setting is required for enabling Facebook Login.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login.
:type facebook_app_id: str
:param facebook_app_secret: The App Secret of the Facebook app used for Facebook Login.
This setting is required for enabling Facebook Login.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login.
:type facebook_app_secret: str
:param facebook_o_auth_scopes: The OAuth 2.0 scopes that will be requested as part of Facebook
Login authentication.
This setting is optional.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login.
:type facebook_o_auth_scopes: list[str]
:param twitter_consumer_key: The OAuth 1.0a consumer key of the Twitter application used for
sign-in.
This setting is required for enabling Twitter Sign-In.
Twitter Sign-In documentation: https://dev.twitter.com/web/sign-in.
:type twitter_consumer_key: str
:param twitter_consumer_secret: The OAuth 1.0a consumer secret of the Twitter application used
for sign-in.
This setting is required for enabling Twitter Sign-In.
Twitter Sign-In documentation: https://dev.twitter.com/web/sign-in.
:type twitter_consumer_secret: str
:param microsoft_account_client_id: The OAuth 2.0 client ID that was created for the app used
for authentication.
This setting is required for enabling Microsoft Account authentication.
Microsoft Account OAuth documentation: https://dev.onedrive.com/auth/msa_oauth.htm.
:type microsoft_account_client_id: str
:param microsoft_account_client_secret: The OAuth 2.0 client secret that was created for the
app used for authentication.
This setting is required for enabling Microsoft Account authentication.
Microsoft Account OAuth documentation: https://dev.onedrive.com/auth/msa_oauth.htm.
:type microsoft_account_client_secret: str
:param microsoft_account_o_auth_scopes: The OAuth 2.0 scopes that will be requested as part of
Microsoft Account authentication.
This setting is optional. If not specified, "wl.basic" is used as the default scope.
Microsoft Account Scopes and permissions documentation:
https://msdn.microsoft.com/en-us/library/dn631845.aspx.
:type microsoft_account_o_auth_scopes: list[str]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'enabled': {'key': 'properties.enabled', 'type': 'bool'},
'runtime_version': {'key': 'properties.runtimeVersion', 'type': 'str'},
'unauthenticated_client_action': {'key': 'properties.unauthenticatedClientAction', 'type': 'str'},
'token_store_enabled': {'key': 'properties.tokenStoreEnabled', 'type': 'bool'},
'allowed_external_redirect_urls': {'key': 'properties.allowedExternalRedirectUrls', 'type': '[str]'},
'default_provider': {'key': 'properties.defaultProvider', 'type': 'str'},
'token_refresh_extension_hours': {'key': 'properties.tokenRefreshExtensionHours', 'type': 'float'},
'client_id': {'key': 'properties.clientId', 'type': 'str'},
'client_secret': {'key': 'properties.clientSecret', 'type': 'str'},
'issuer': {'key': 'properties.issuer', 'type': 'str'},
'allowed_audiences': {'key': 'properties.allowedAudiences', 'type': '[str]'},
'additional_login_params': {'key': 'properties.additionalLoginParams', 'type': '[str]'},
'google_client_id': {'key': 'properties.googleClientId', 'type': 'str'},
'google_client_secret': {'key': 'properties.googleClientSecret', 'type': 'str'},
'google_o_auth_scopes': {'key': 'properties.googleOAuthScopes', 'type': '[str]'},
'facebook_app_id': {'key': 'properties.facebookAppId', 'type': 'str'},
'facebook_app_secret': {'key': 'properties.facebookAppSecret', 'type': 'str'},
'facebook_o_auth_scopes': {'key': 'properties.facebookOAuthScopes', 'type': '[str]'},
'twitter_consumer_key': {'key': 'properties.twitterConsumerKey', 'type': 'str'},
'twitter_consumer_secret': {'key': 'properties.twitterConsumerSecret', 'type': 'str'},
'microsoft_account_client_id': {'key': 'properties.microsoftAccountClientId', 'type': 'str'},
'microsoft_account_client_secret': {'key': 'properties.microsoftAccountClientSecret', 'type': 'str'},
'microsoft_account_o_auth_scopes': {'key': 'properties.microsoftAccountOAuthScopes', 'type': '[str]'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
enabled: Optional[bool] = None,
runtime_version: Optional[str] = None,
unauthenticated_client_action: Optional[Union[str, "UnauthenticatedClientAction"]] = None,
token_store_enabled: Optional[bool] = None,
allowed_external_redirect_urls: Optional[List[str]] = None,
default_provider: Optional[Union[str, "BuiltInAuthenticationProvider"]] = None,
token_refresh_extension_hours: Optional[float] = None,
client_id: Optional[str] = None,
client_secret: Optional[str] = None,
issuer: Optional[str] = None,
allowed_audiences: Optional[List[str]] = None,
additional_login_params: Optional[List[str]] = None,
google_client_id: Optional[str] = None,
google_client_secret: Optional[str] = None,
google_o_auth_scopes: Optional[List[str]] = None,
facebook_app_id: Optional[str] = None,
facebook_app_secret: Optional[str] = None,
facebook_o_auth_scopes: Optional[List[str]] = None,
twitter_consumer_key: Optional[str] = None,
twitter_consumer_secret: Optional[str] = None,
microsoft_account_client_id: Optional[str] = None,
microsoft_account_client_secret: Optional[str] = None,
microsoft_account_o_auth_scopes: Optional[List[str]] = None,
**kwargs
):
super(SiteAuthSettings, self).__init__(kind=kind, **kwargs)
self.enabled = enabled
self.runtime_version = runtime_version
self.unauthenticated_client_action = unauthenticated_client_action
self.token_store_enabled = token_store_enabled
self.allowed_external_redirect_urls = allowed_external_redirect_urls
self.default_provider = default_provider
self.token_refresh_extension_hours = token_refresh_extension_hours
self.client_id = client_id
self.client_secret = client_secret
self.issuer = issuer
self.allowed_audiences = allowed_audiences
self.additional_login_params = additional_login_params
self.google_client_id = google_client_id
self.google_client_secret = google_client_secret
self.google_o_auth_scopes = google_o_auth_scopes
self.facebook_app_id = facebook_app_id
self.facebook_app_secret = facebook_app_secret
self.facebook_o_auth_scopes = facebook_o_auth_scopes
self.twitter_consumer_key = twitter_consumer_key
self.twitter_consumer_secret = twitter_consumer_secret
self.microsoft_account_client_id = microsoft_account_client_id
self.microsoft_account_client_secret = microsoft_account_client_secret
self.microsoft_account_o_auth_scopes = microsoft_account_o_auth_scopes
class SiteCloneability(msrest.serialization.Model):
"""Represents whether or not an app is cloneable.
:param result: Name of app. Possible values include: "Cloneable", "PartiallyCloneable",
"NotCloneable".
:type result: str or ~azure.mgmt.web.v2016_08_01.models.CloneAbilityResult
:param blocking_features: List of features enabled on app that prevent cloning.
:type blocking_features: list[~azure.mgmt.web.v2016_08_01.models.SiteCloneabilityCriterion]
:param unsupported_features: List of features enabled on app that are non-blocking but cannot
be cloned. The app can still be cloned
but the features in this list will not be set up on cloned app.
:type unsupported_features: list[~azure.mgmt.web.v2016_08_01.models.SiteCloneabilityCriterion]
:param blocking_characteristics: List of blocking application characteristics.
:type blocking_characteristics:
list[~azure.mgmt.web.v2016_08_01.models.SiteCloneabilityCriterion]
"""
_attribute_map = {
'result': {'key': 'result', 'type': 'str'},
'blocking_features': {'key': 'blockingFeatures', 'type': '[SiteCloneabilityCriterion]'},
'unsupported_features': {'key': 'unsupportedFeatures', 'type': '[SiteCloneabilityCriterion]'},
'blocking_characteristics': {'key': 'blockingCharacteristics', 'type': '[SiteCloneabilityCriterion]'},
}
def __init__(
self,
*,
result: Optional[Union[str, "CloneAbilityResult"]] = None,
blocking_features: Optional[List["SiteCloneabilityCriterion"]] = None,
unsupported_features: Optional[List["SiteCloneabilityCriterion"]] = None,
blocking_characteristics: Optional[List["SiteCloneabilityCriterion"]] = None,
**kwargs
):
super(SiteCloneability, self).__init__(**kwargs)
self.result = result
self.blocking_features = blocking_features
self.unsupported_features = unsupported_features
self.blocking_characteristics = blocking_characteristics
class SiteCloneabilityCriterion(msrest.serialization.Model):
"""An app cloneability criterion.
:param name: Name of criterion.
:type name: str
:param description: Description of criterion.
:type description: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'description': {'key': 'description', 'type': 'str'},
}
def __init__(
self,
*,
name: Optional[str] = None,
description: Optional[str] = None,
**kwargs
):
super(SiteCloneabilityCriterion, self).__init__(**kwargs)
self.name = name
self.description = description
class SiteConfig(msrest.serialization.Model):
"""Configuration of an App Service app.
Variables are only populated by the server, and will be ignored when sending a request.
:param number_of_workers: Number of workers.
:type number_of_workers: int
:param default_documents: Default documents.
:type default_documents: list[str]
:param net_framework_version: .NET Framework version.
:type net_framework_version: str
:param php_version: Version of PHP.
:type php_version: str
:param python_version: Version of Python.
:type python_version: str
:param node_version: Version of Node.js.
:type node_version: str
:param linux_fx_version: Linux App Framework and version.
:type linux_fx_version: str
:param request_tracing_enabled: :code:`<code>true</code>` if request tracing is enabled;
otherwise, :code:`<code>false</code>`.
:type request_tracing_enabled: bool
:param request_tracing_expiration_time: Request tracing expiration time.
:type request_tracing_expiration_time: ~datetime.datetime
:param remote_debugging_enabled: :code:`<code>true</code>` if remote debugging is enabled;
otherwise, :code:`<code>false</code>`.
:type remote_debugging_enabled: bool
:param remote_debugging_version: Remote debugging version.
:type remote_debugging_version: str
:param http_logging_enabled: :code:`<code>true</code>` if HTTP logging is enabled; otherwise,
:code:`<code>false</code>`.
:type http_logging_enabled: bool
:param logs_directory_size_limit: HTTP logs directory size limit.
:type logs_directory_size_limit: int
:param detailed_error_logging_enabled: :code:`<code>true</code>` if detailed error logging is
enabled; otherwise, :code:`<code>false</code>`.
:type detailed_error_logging_enabled: bool
:param publishing_username: Publishing user name.
:type publishing_username: str
:param app_settings: Application settings.
:type app_settings: list[~azure.mgmt.web.v2016_08_01.models.NameValuePair]
:param connection_strings: Connection strings.
:type connection_strings: list[~azure.mgmt.web.v2016_08_01.models.ConnStringInfo]
:ivar machine_key: Site MachineKey.
:vartype machine_key: ~azure.mgmt.web.v2016_08_01.models.SiteMachineKey
:param handler_mappings: Handler mappings.
:type handler_mappings: list[~azure.mgmt.web.v2016_08_01.models.HandlerMapping]
:param document_root: Document root.
:type document_root: str
:param scm_type: SCM type. Possible values include: "None", "Dropbox", "Tfs", "LocalGit",
"GitHub", "CodePlexGit", "CodePlexHg", "BitbucketGit", "BitbucketHg", "ExternalGit",
"ExternalHg", "OneDrive", "VSO".
:type scm_type: str or ~azure.mgmt.web.v2016_08_01.models.ScmType
:param use32_bit_worker_process: :code:`<code>true</code>` to use 32-bit worker process;
otherwise, :code:`<code>false</code>`.
:type use32_bit_worker_process: bool
:param web_sockets_enabled: :code:`<code>true</code>` if WebSocket is enabled; otherwise,
:code:`<code>false</code>`.
:type web_sockets_enabled: bool
:param always_on: :code:`<code>true</code>` if Always On is enabled; otherwise,
:code:`<code>false</code>`.
:type always_on: bool
:param java_version: Java version.
:type java_version: str
:param java_container: Java container.
:type java_container: str
:param java_container_version: Java container version.
:type java_container_version: str
:param app_command_line: App command line to launch.
:type app_command_line: str
:param managed_pipeline_mode: Managed pipeline mode. Possible values include: "Integrated",
"Classic".
:type managed_pipeline_mode: str or ~azure.mgmt.web.v2016_08_01.models.ManagedPipelineMode
:param virtual_applications: Virtual applications.
:type virtual_applications: list[~azure.mgmt.web.v2016_08_01.models.VirtualApplication]
:param load_balancing: Site load balancing. Possible values include: "WeightedRoundRobin",
"LeastRequests", "LeastResponseTime", "WeightedTotalTraffic", "RequestHash".
:type load_balancing: str or ~azure.mgmt.web.v2016_08_01.models.SiteLoadBalancing
:param experiments: This is work around for polymorphic types.
:type experiments: ~azure.mgmt.web.v2016_08_01.models.Experiments
:param limits: Site limits.
:type limits: ~azure.mgmt.web.v2016_08_01.models.SiteLimits
:param auto_heal_enabled: :code:`<code>true</code>` if Auto Heal is enabled; otherwise,
:code:`<code>false</code>`.
:type auto_heal_enabled: bool
:param auto_heal_rules: Auto Heal rules.
:type auto_heal_rules: ~azure.mgmt.web.v2016_08_01.models.AutoHealRules
:param tracing_options: Tracing options.
:type tracing_options: str
:param vnet_name: Virtual Network name.
:type vnet_name: str
:param cors: Cross-Origin Resource Sharing (CORS) settings.
:type cors: ~azure.mgmt.web.v2016_08_01.models.CorsSettings
:param push: Push endpoint settings.
:type push: ~azure.mgmt.web.v2016_08_01.models.PushSettings
:param api_definition: Information about the formal API definition for the app.
:type api_definition: ~azure.mgmt.web.v2016_08_01.models.ApiDefinitionInfo
:param auto_swap_slot_name: Auto-swap slot name.
:type auto_swap_slot_name: str
:param local_my_sql_enabled: :code:`<code>true</code>` to enable local MySQL; otherwise,
:code:`<code>false</code>`.
:type local_my_sql_enabled: bool
:param ip_security_restrictions: IP security restrictions.
:type ip_security_restrictions: list[~azure.mgmt.web.v2016_08_01.models.IpSecurityRestriction]
:param http20_enabled: Http20Enabled: configures a web site to allow clients to connect over
http2.0.
:type http20_enabled: bool
:param min_tls_version: MinTlsVersion: configures the minimum version of TLS required for SSL
requests. Possible values include: "1.0", "1.1", "1.2".
:type min_tls_version: str or ~azure.mgmt.web.v2016_08_01.models.SupportedTlsVersions
"""
_validation = {
'machine_key': {'readonly': True},
}
_attribute_map = {
'number_of_workers': {'key': 'numberOfWorkers', 'type': 'int'},
'default_documents': {'key': 'defaultDocuments', 'type': '[str]'},
'net_framework_version': {'key': 'netFrameworkVersion', 'type': 'str'},
'php_version': {'key': 'phpVersion', 'type': 'str'},
'python_version': {'key': 'pythonVersion', 'type': 'str'},
'node_version': {'key': 'nodeVersion', 'type': 'str'},
'linux_fx_version': {'key': 'linuxFxVersion', 'type': 'str'},
'request_tracing_enabled': {'key': 'requestTracingEnabled', 'type': 'bool'},
'request_tracing_expiration_time': {'key': 'requestTracingExpirationTime', 'type': 'iso-8601'},
'remote_debugging_enabled': {'key': 'remoteDebuggingEnabled', 'type': 'bool'},
'remote_debugging_version': {'key': 'remoteDebuggingVersion', 'type': 'str'},
'http_logging_enabled': {'key': 'httpLoggingEnabled', 'type': 'bool'},
'logs_directory_size_limit': {'key': 'logsDirectorySizeLimit', 'type': 'int'},
'detailed_error_logging_enabled': {'key': 'detailedErrorLoggingEnabled', 'type': 'bool'},
'publishing_username': {'key': 'publishingUsername', 'type': 'str'},
'app_settings': {'key': 'appSettings', 'type': '[NameValuePair]'},
'connection_strings': {'key': 'connectionStrings', 'type': '[ConnStringInfo]'},
'machine_key': {'key': 'machineKey', 'type': 'SiteMachineKey'},
'handler_mappings': {'key': 'handlerMappings', 'type': '[HandlerMapping]'},
'document_root': {'key': 'documentRoot', 'type': 'str'},
'scm_type': {'key': 'scmType', 'type': 'str'},
'use32_bit_worker_process': {'key': 'use32BitWorkerProcess', 'type': 'bool'},
'web_sockets_enabled': {'key': 'webSocketsEnabled', 'type': 'bool'},
'always_on': {'key': 'alwaysOn', 'type': 'bool'},
'java_version': {'key': 'javaVersion', 'type': 'str'},
'java_container': {'key': 'javaContainer', 'type': 'str'},
'java_container_version': {'key': 'javaContainerVersion', 'type': 'str'},
'app_command_line': {'key': 'appCommandLine', 'type': 'str'},
'managed_pipeline_mode': {'key': 'managedPipelineMode', 'type': 'str'},
'virtual_applications': {'key': 'virtualApplications', 'type': '[VirtualApplication]'},
'load_balancing': {'key': 'loadBalancing', 'type': 'str'},
'experiments': {'key': 'experiments', 'type': 'Experiments'},
'limits': {'key': 'limits', 'type': 'SiteLimits'},
'auto_heal_enabled': {'key': 'autoHealEnabled', 'type': 'bool'},
'auto_heal_rules': {'key': 'autoHealRules', 'type': 'AutoHealRules'},
'tracing_options': {'key': 'tracingOptions', 'type': 'str'},
'vnet_name': {'key': 'vnetName', 'type': 'str'},
'cors': {'key': 'cors', 'type': 'CorsSettings'},
'push': {'key': 'push', 'type': 'PushSettings'},
'api_definition': {'key': 'apiDefinition', 'type': 'ApiDefinitionInfo'},
'auto_swap_slot_name': {'key': 'autoSwapSlotName', 'type': 'str'},
'local_my_sql_enabled': {'key': 'localMySqlEnabled', 'type': 'bool'},
'ip_security_restrictions': {'key': 'ipSecurityRestrictions', 'type': '[IpSecurityRestriction]'},
'http20_enabled': {'key': 'http20Enabled', 'type': 'bool'},
'min_tls_version': {'key': 'minTlsVersion', 'type': 'str'},
}
def __init__(
self,
*,
number_of_workers: Optional[int] = None,
default_documents: Optional[List[str]] = None,
net_framework_version: Optional[str] = "v4.6",
php_version: Optional[str] = None,
python_version: Optional[str] = None,
node_version: Optional[str] = None,
linux_fx_version: Optional[str] = None,
request_tracing_enabled: Optional[bool] = None,
request_tracing_expiration_time: Optional[datetime.datetime] = None,
remote_debugging_enabled: Optional[bool] = None,
remote_debugging_version: Optional[str] = None,
http_logging_enabled: Optional[bool] = None,
logs_directory_size_limit: Optional[int] = None,
detailed_error_logging_enabled: Optional[bool] = None,
publishing_username: Optional[str] = None,
app_settings: Optional[List["NameValuePair"]] = None,
connection_strings: Optional[List["ConnStringInfo"]] = None,
handler_mappings: Optional[List["HandlerMapping"]] = None,
document_root: Optional[str] = None,
scm_type: Optional[Union[str, "ScmType"]] = None,
use32_bit_worker_process: Optional[bool] = None,
web_sockets_enabled: Optional[bool] = None,
always_on: Optional[bool] = None,
java_version: Optional[str] = None,
java_container: Optional[str] = None,
java_container_version: Optional[str] = None,
app_command_line: Optional[str] = None,
managed_pipeline_mode: Optional[Union[str, "ManagedPipelineMode"]] = None,
virtual_applications: Optional[List["VirtualApplication"]] = None,
load_balancing: Optional[Union[str, "SiteLoadBalancing"]] = None,
experiments: Optional["Experiments"] = None,
limits: Optional["SiteLimits"] = None,
auto_heal_enabled: Optional[bool] = None,
auto_heal_rules: Optional["AutoHealRules"] = None,
tracing_options: Optional[str] = None,
vnet_name: Optional[str] = None,
cors: Optional["CorsSettings"] = None,
push: Optional["PushSettings"] = None,
api_definition: Optional["ApiDefinitionInfo"] = None,
auto_swap_slot_name: Optional[str] = None,
local_my_sql_enabled: Optional[bool] = False,
ip_security_restrictions: Optional[List["IpSecurityRestriction"]] = None,
http20_enabled: Optional[bool] = True,
min_tls_version: Optional[Union[str, "SupportedTlsVersions"]] = None,
**kwargs
):
super(SiteConfig, self).__init__(**kwargs)
self.number_of_workers = number_of_workers
self.default_documents = default_documents
self.net_framework_version = net_framework_version
self.php_version = php_version
self.python_version = python_version
self.node_version = node_version
self.linux_fx_version = linux_fx_version
self.request_tracing_enabled = request_tracing_enabled
self.request_tracing_expiration_time = request_tracing_expiration_time
self.remote_debugging_enabled = remote_debugging_enabled
self.remote_debugging_version = remote_debugging_version
self.http_logging_enabled = http_logging_enabled
self.logs_directory_size_limit = logs_directory_size_limit
self.detailed_error_logging_enabled = detailed_error_logging_enabled
self.publishing_username = publishing_username
self.app_settings = app_settings
self.connection_strings = connection_strings
self.machine_key = None
self.handler_mappings = handler_mappings
self.document_root = document_root
self.scm_type = scm_type
self.use32_bit_worker_process = use32_bit_worker_process
self.web_sockets_enabled = web_sockets_enabled
self.always_on = always_on
self.java_version = java_version
self.java_container = java_container
self.java_container_version = java_container_version
self.app_command_line = app_command_line
self.managed_pipeline_mode = managed_pipeline_mode
self.virtual_applications = virtual_applications
self.load_balancing = load_balancing
self.experiments = experiments
self.limits = limits
self.auto_heal_enabled = auto_heal_enabled
self.auto_heal_rules = auto_heal_rules
self.tracing_options = tracing_options
self.vnet_name = vnet_name
self.cors = cors
self.push = push
self.api_definition = api_definition
self.auto_swap_slot_name = auto_swap_slot_name
self.local_my_sql_enabled = local_my_sql_enabled
self.ip_security_restrictions = ip_security_restrictions
self.http20_enabled = http20_enabled
self.min_tls_version = min_tls_version
class SiteConfigResource(ProxyOnlyResource):
"""Web app configuration ARM resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param number_of_workers: Number of workers.
:type number_of_workers: int
:param default_documents: Default documents.
:type default_documents: list[str]
:param net_framework_version: .NET Framework version.
:type net_framework_version: str
:param php_version: Version of PHP.
:type php_version: str
:param python_version: Version of Python.
:type python_version: str
:param node_version: Version of Node.js.
:type node_version: str
:param linux_fx_version: Linux App Framework and version.
:type linux_fx_version: str
:param request_tracing_enabled: :code:`<code>true</code>` if request tracing is enabled;
otherwise, :code:`<code>false</code>`.
:type request_tracing_enabled: bool
:param request_tracing_expiration_time: Request tracing expiration time.
:type request_tracing_expiration_time: ~datetime.datetime
:param remote_debugging_enabled: :code:`<code>true</code>` if remote debugging is enabled;
otherwise, :code:`<code>false</code>`.
:type remote_debugging_enabled: bool
:param remote_debugging_version: Remote debugging version.
:type remote_debugging_version: str
:param http_logging_enabled: :code:`<code>true</code>` if HTTP logging is enabled; otherwise,
:code:`<code>false</code>`.
:type http_logging_enabled: bool
:param logs_directory_size_limit: HTTP logs directory size limit.
:type logs_directory_size_limit: int
:param detailed_error_logging_enabled: :code:`<code>true</code>` if detailed error logging is
enabled; otherwise, :code:`<code>false</code>`.
:type detailed_error_logging_enabled: bool
:param publishing_username: Publishing user name.
:type publishing_username: str
:param app_settings: Application settings.
:type app_settings: list[~azure.mgmt.web.v2016_08_01.models.NameValuePair]
:param connection_strings: Connection strings.
:type connection_strings: list[~azure.mgmt.web.v2016_08_01.models.ConnStringInfo]
:ivar machine_key: Site MachineKey.
:vartype machine_key: ~azure.mgmt.web.v2016_08_01.models.SiteMachineKey
:param handler_mappings: Handler mappings.
:type handler_mappings: list[~azure.mgmt.web.v2016_08_01.models.HandlerMapping]
:param document_root: Document root.
:type document_root: str
:param scm_type: SCM type. Possible values include: "None", "Dropbox", "Tfs", "LocalGit",
"GitHub", "CodePlexGit", "CodePlexHg", "BitbucketGit", "BitbucketHg", "ExternalGit",
"ExternalHg", "OneDrive", "VSO".
:type scm_type: str or ~azure.mgmt.web.v2016_08_01.models.ScmType
:param use32_bit_worker_process: :code:`<code>true</code>` to use 32-bit worker process;
otherwise, :code:`<code>false</code>`.
:type use32_bit_worker_process: bool
:param web_sockets_enabled: :code:`<code>true</code>` if WebSocket is enabled; otherwise,
:code:`<code>false</code>`.
:type web_sockets_enabled: bool
:param always_on: :code:`<code>true</code>` if Always On is enabled; otherwise,
:code:`<code>false</code>`.
:type always_on: bool
:param java_version: Java version.
:type java_version: str
:param java_container: Java container.
:type java_container: str
:param java_container_version: Java container version.
:type java_container_version: str
:param app_command_line: App command line to launch.
:type app_command_line: str
:param managed_pipeline_mode: Managed pipeline mode. Possible values include: "Integrated",
"Classic".
:type managed_pipeline_mode: str or ~azure.mgmt.web.v2016_08_01.models.ManagedPipelineMode
:param virtual_applications: Virtual applications.
:type virtual_applications: list[~azure.mgmt.web.v2016_08_01.models.VirtualApplication]
:param load_balancing: Site load balancing. Possible values include: "WeightedRoundRobin",
"LeastRequests", "LeastResponseTime", "WeightedTotalTraffic", "RequestHash".
:type load_balancing: str or ~azure.mgmt.web.v2016_08_01.models.SiteLoadBalancing
:param experiments: This is work around for polymorphic types.
:type experiments: ~azure.mgmt.web.v2016_08_01.models.Experiments
:param limits: Site limits.
:type limits: ~azure.mgmt.web.v2016_08_01.models.SiteLimits
:param auto_heal_enabled: :code:`<code>true</code>` if Auto Heal is enabled; otherwise,
:code:`<code>false</code>`.
:type auto_heal_enabled: bool
:param auto_heal_rules: Auto Heal rules.
:type auto_heal_rules: ~azure.mgmt.web.v2016_08_01.models.AutoHealRules
:param tracing_options: Tracing options.
:type tracing_options: str
:param vnet_name: Virtual Network name.
:type vnet_name: str
:param cors: Cross-Origin Resource Sharing (CORS) settings.
:type cors: ~azure.mgmt.web.v2016_08_01.models.CorsSettings
:param push: Push endpoint settings.
:type push: ~azure.mgmt.web.v2016_08_01.models.PushSettings
:param api_definition: Information about the formal API definition for the app.
:type api_definition: ~azure.mgmt.web.v2016_08_01.models.ApiDefinitionInfo
:param auto_swap_slot_name: Auto-swap slot name.
:type auto_swap_slot_name: str
:param local_my_sql_enabled: :code:`<code>true</code>` to enable local MySQL; otherwise,
:code:`<code>false</code>`.
:type local_my_sql_enabled: bool
:param ip_security_restrictions: IP security restrictions.
:type ip_security_restrictions: list[~azure.mgmt.web.v2016_08_01.models.IpSecurityRestriction]
:param http20_enabled: Http20Enabled: configures a web site to allow clients to connect over
http2.0.
:type http20_enabled: bool
:param min_tls_version: MinTlsVersion: configures the minimum version of TLS required for SSL
requests. Possible values include: "1.0", "1.1", "1.2".
:type min_tls_version: str or ~azure.mgmt.web.v2016_08_01.models.SupportedTlsVersions
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'machine_key': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'number_of_workers': {'key': 'properties.numberOfWorkers', 'type': 'int'},
'default_documents': {'key': 'properties.defaultDocuments', 'type': '[str]'},
'net_framework_version': {'key': 'properties.netFrameworkVersion', 'type': 'str'},
'php_version': {'key': 'properties.phpVersion', 'type': 'str'},
'python_version': {'key': 'properties.pythonVersion', 'type': 'str'},
'node_version': {'key': 'properties.nodeVersion', 'type': 'str'},
'linux_fx_version': {'key': 'properties.linuxFxVersion', 'type': 'str'},
'request_tracing_enabled': {'key': 'properties.requestTracingEnabled', 'type': 'bool'},
'request_tracing_expiration_time': {'key': 'properties.requestTracingExpirationTime', 'type': 'iso-8601'},
'remote_debugging_enabled': {'key': 'properties.remoteDebuggingEnabled', 'type': 'bool'},
'remote_debugging_version': {'key': 'properties.remoteDebuggingVersion', 'type': 'str'},
'http_logging_enabled': {'key': 'properties.httpLoggingEnabled', 'type': 'bool'},
'logs_directory_size_limit': {'key': 'properties.logsDirectorySizeLimit', 'type': 'int'},
'detailed_error_logging_enabled': {'key': 'properties.detailedErrorLoggingEnabled', 'type': 'bool'},
'publishing_username': {'key': 'properties.publishingUsername', 'type': 'str'},
'app_settings': {'key': 'properties.appSettings', 'type': '[NameValuePair]'},
'connection_strings': {'key': 'properties.connectionStrings', 'type': '[ConnStringInfo]'},
'machine_key': {'key': 'properties.machineKey', 'type': 'SiteMachineKey'},
'handler_mappings': {'key': 'properties.handlerMappings', 'type': '[HandlerMapping]'},
'document_root': {'key': 'properties.documentRoot', 'type': 'str'},
'scm_type': {'key': 'properties.scmType', 'type': 'str'},
'use32_bit_worker_process': {'key': 'properties.use32BitWorkerProcess', 'type': 'bool'},
'web_sockets_enabled': {'key': 'properties.webSocketsEnabled', 'type': 'bool'},
'always_on': {'key': 'properties.alwaysOn', 'type': 'bool'},
'java_version': {'key': 'properties.javaVersion', 'type': 'str'},
'java_container': {'key': 'properties.javaContainer', 'type': 'str'},
'java_container_version': {'key': 'properties.javaContainerVersion', 'type': 'str'},
'app_command_line': {'key': 'properties.appCommandLine', 'type': 'str'},
'managed_pipeline_mode': {'key': 'properties.managedPipelineMode', 'type': 'str'},
'virtual_applications': {'key': 'properties.virtualApplications', 'type': '[VirtualApplication]'},
'load_balancing': {'key': 'properties.loadBalancing', 'type': 'str'},
'experiments': {'key': 'properties.experiments', 'type': 'Experiments'},
'limits': {'key': 'properties.limits', 'type': 'SiteLimits'},
'auto_heal_enabled': {'key': 'properties.autoHealEnabled', 'type': 'bool'},
'auto_heal_rules': {'key': 'properties.autoHealRules', 'type': 'AutoHealRules'},
'tracing_options': {'key': 'properties.tracingOptions', 'type': 'str'},
'vnet_name': {'key': 'properties.vnetName', 'type': 'str'},
'cors': {'key': 'properties.cors', 'type': 'CorsSettings'},
'push': {'key': 'properties.push', 'type': 'PushSettings'},
'api_definition': {'key': 'properties.apiDefinition', 'type': 'ApiDefinitionInfo'},
'auto_swap_slot_name': {'key': 'properties.autoSwapSlotName', 'type': 'str'},
'local_my_sql_enabled': {'key': 'properties.localMySqlEnabled', 'type': 'bool'},
'ip_security_restrictions': {'key': 'properties.ipSecurityRestrictions', 'type': '[IpSecurityRestriction]'},
'http20_enabled': {'key': 'properties.http20Enabled', 'type': 'bool'},
'min_tls_version': {'key': 'properties.minTlsVersion', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
number_of_workers: Optional[int] = None,
default_documents: Optional[List[str]] = None,
net_framework_version: Optional[str] = "v4.6",
php_version: Optional[str] = None,
python_version: Optional[str] = None,
node_version: Optional[str] = None,
linux_fx_version: Optional[str] = None,
request_tracing_enabled: Optional[bool] = None,
request_tracing_expiration_time: Optional[datetime.datetime] = None,
remote_debugging_enabled: Optional[bool] = None,
remote_debugging_version: Optional[str] = None,
http_logging_enabled: Optional[bool] = None,
logs_directory_size_limit: Optional[int] = None,
detailed_error_logging_enabled: Optional[bool] = None,
publishing_username: Optional[str] = None,
app_settings: Optional[List["NameValuePair"]] = None,
connection_strings: Optional[List["ConnStringInfo"]] = None,
handler_mappings: Optional[List["HandlerMapping"]] = None,
document_root: Optional[str] = None,
scm_type: Optional[Union[str, "ScmType"]] = None,
use32_bit_worker_process: Optional[bool] = None,
web_sockets_enabled: Optional[bool] = None,
always_on: Optional[bool] = None,
java_version: Optional[str] = None,
java_container: Optional[str] = None,
java_container_version: Optional[str] = None,
app_command_line: Optional[str] = None,
managed_pipeline_mode: Optional[Union[str, "ManagedPipelineMode"]] = None,
virtual_applications: Optional[List["VirtualApplication"]] = None,
load_balancing: Optional[Union[str, "SiteLoadBalancing"]] = None,
experiments: Optional["Experiments"] = None,
limits: Optional["SiteLimits"] = None,
auto_heal_enabled: Optional[bool] = None,
auto_heal_rules: Optional["AutoHealRules"] = None,
tracing_options: Optional[str] = None,
vnet_name: Optional[str] = None,
cors: Optional["CorsSettings"] = None,
push: Optional["PushSettings"] = None,
api_definition: Optional["ApiDefinitionInfo"] = None,
auto_swap_slot_name: Optional[str] = None,
local_my_sql_enabled: Optional[bool] = False,
ip_security_restrictions: Optional[List["IpSecurityRestriction"]] = None,
http20_enabled: Optional[bool] = True,
min_tls_version: Optional[Union[str, "SupportedTlsVersions"]] = None,
**kwargs
):
super(SiteConfigResource, self).__init__(kind=kind, **kwargs)
self.number_of_workers = number_of_workers
self.default_documents = default_documents
self.net_framework_version = net_framework_version
self.php_version = php_version
self.python_version = python_version
self.node_version = node_version
self.linux_fx_version = linux_fx_version
self.request_tracing_enabled = request_tracing_enabled
self.request_tracing_expiration_time = request_tracing_expiration_time
self.remote_debugging_enabled = remote_debugging_enabled
self.remote_debugging_version = remote_debugging_version
self.http_logging_enabled = http_logging_enabled
self.logs_directory_size_limit = logs_directory_size_limit
self.detailed_error_logging_enabled = detailed_error_logging_enabled
self.publishing_username = publishing_username
self.app_settings = app_settings
self.connection_strings = connection_strings
self.machine_key = None
self.handler_mappings = handler_mappings
self.document_root = document_root
self.scm_type = scm_type
self.use32_bit_worker_process = use32_bit_worker_process
self.web_sockets_enabled = web_sockets_enabled
self.always_on = always_on
self.java_version = java_version
self.java_container = java_container
self.java_container_version = java_container_version
self.app_command_line = app_command_line
self.managed_pipeline_mode = managed_pipeline_mode
self.virtual_applications = virtual_applications
self.load_balancing = load_balancing
self.experiments = experiments
self.limits = limits
self.auto_heal_enabled = auto_heal_enabled
self.auto_heal_rules = auto_heal_rules
self.tracing_options = tracing_options
self.vnet_name = vnet_name
self.cors = cors
self.push = push
self.api_definition = api_definition
self.auto_swap_slot_name = auto_swap_slot_name
self.local_my_sql_enabled = local_my_sql_enabled
self.ip_security_restrictions = ip_security_restrictions
self.http20_enabled = http20_enabled
self.min_tls_version = min_tls_version
class SiteConfigResourceCollection(msrest.serialization.Model):
"""Collection of site configurations.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.SiteConfigResource]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[SiteConfigResource]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["SiteConfigResource"],
**kwargs
):
super(SiteConfigResourceCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class SiteConfigurationSnapshotInfo(ProxyOnlyResource):
"""A snapshot of a web app configuration.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar time: The time the snapshot was taken.
:vartype time: ~datetime.datetime
:ivar id_properties_id: The id of the snapshot.
:vartype id_properties_id: int
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'time': {'readonly': True},
'id_properties_id': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'time': {'key': 'properties.time', 'type': 'iso-8601'},
'id_properties_id': {'key': 'properties.id', 'type': 'int'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
**kwargs
):
super(SiteConfigurationSnapshotInfo, self).__init__(kind=kind, **kwargs)
self.time = None
self.id_properties_id = None
class SiteConfigurationSnapshotInfoCollection(msrest.serialization.Model):
"""Collection of metadata for the app configuration snapshots that can be restored.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.SiteConfigurationSnapshotInfo]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[SiteConfigurationSnapshotInfo]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["SiteConfigurationSnapshotInfo"],
**kwargs
):
super(SiteConfigurationSnapshotInfoCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class SiteExtensionInfo(ProxyOnlyResource):
"""Site Extension Information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param id_properties_id: Site extension ID.
:type id_properties_id: str
:param title: Site extension title.
:type title: str
:param type_properties_type: Site extension type. Possible values include: "Gallery",
"WebRoot".
:type type_properties_type: str or ~azure.mgmt.web.v2016_08_01.models.SiteExtensionType
:param summary: Summary description.
:type summary: str
:param description: Detailed description.
:type description: str
:param version: Version information.
:type version: str
:param extension_url: Extension URL.
:type extension_url: str
:param project_url: Project URL.
:type project_url: str
:param icon_url: Icon URL.
:type icon_url: str
:param license_url: License URL.
:type license_url: str
:param feed_url: Feed URL.
:type feed_url: str
:param authors: List of authors.
:type authors: list[str]
:param installation_args: Installer command line parameters.
:type installation_args: str
:param published_date_time: Published timestamp.
:type published_date_time: ~datetime.datetime
:param download_count: Count of downloads.
:type download_count: int
:param local_is_latest_version: :code:`<code>true</code>` if the local version is the latest
version; :code:`<code>false</code>` otherwise.
:type local_is_latest_version: bool
:param local_path: Local path.
:type local_path: str
:param installed_date_time: Installed timestamp.
:type installed_date_time: ~datetime.datetime
:param provisioning_state: Provisioning state.
:type provisioning_state: str
:param comment: Site Extension comment.
:type comment: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'id_properties_id': {'key': 'properties.id', 'type': 'str'},
'title': {'key': 'properties.title', 'type': 'str'},
'type_properties_type': {'key': 'properties.type', 'type': 'str'},
'summary': {'key': 'properties.summary', 'type': 'str'},
'description': {'key': 'properties.description', 'type': 'str'},
'version': {'key': 'properties.version', 'type': 'str'},
'extension_url': {'key': 'properties.extensionUrl', 'type': 'str'},
'project_url': {'key': 'properties.projectUrl', 'type': 'str'},
'icon_url': {'key': 'properties.iconUrl', 'type': 'str'},
'license_url': {'key': 'properties.licenseUrl', 'type': 'str'},
'feed_url': {'key': 'properties.feedUrl', 'type': 'str'},
'authors': {'key': 'properties.authors', 'type': '[str]'},
'installation_args': {'key': 'properties.installationArgs', 'type': 'str'},
'published_date_time': {'key': 'properties.publishedDateTime', 'type': 'iso-8601'},
'download_count': {'key': 'properties.downloadCount', 'type': 'int'},
'local_is_latest_version': {'key': 'properties.localIsLatestVersion', 'type': 'bool'},
'local_path': {'key': 'properties.localPath', 'type': 'str'},
'installed_date_time': {'key': 'properties.installedDateTime', 'type': 'iso-8601'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'comment': {'key': 'properties.comment', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
id_properties_id: Optional[str] = None,
title: Optional[str] = None,
type_properties_type: Optional[Union[str, "SiteExtensionType"]] = None,
summary: Optional[str] = None,
description: Optional[str] = None,
version: Optional[str] = None,
extension_url: Optional[str] = None,
project_url: Optional[str] = None,
icon_url: Optional[str] = None,
license_url: Optional[str] = None,
feed_url: Optional[str] = None,
authors: Optional[List[str]] = None,
installation_args: Optional[str] = None,
published_date_time: Optional[datetime.datetime] = None,
download_count: Optional[int] = None,
local_is_latest_version: Optional[bool] = None,
local_path: Optional[str] = None,
installed_date_time: Optional[datetime.datetime] = None,
provisioning_state: Optional[str] = None,
comment: Optional[str] = None,
**kwargs
):
super(SiteExtensionInfo, self).__init__(kind=kind, **kwargs)
self.id_properties_id = id_properties_id
self.title = title
self.type_properties_type = type_properties_type
self.summary = summary
self.description = description
self.version = version
self.extension_url = extension_url
self.project_url = project_url
self.icon_url = icon_url
self.license_url = license_url
self.feed_url = feed_url
self.authors = authors
self.installation_args = installation_args
self.published_date_time = published_date_time
self.download_count = download_count
self.local_is_latest_version = local_is_latest_version
self.local_path = local_path
self.installed_date_time = installed_date_time
self.provisioning_state = provisioning_state
self.comment = comment
class SiteExtensionInfoCollection(msrest.serialization.Model):
"""Collection of Kudu site extension information elements.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.SiteExtensionInfo]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[SiteExtensionInfo]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["SiteExtensionInfo"],
**kwargs
):
super(SiteExtensionInfoCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class SiteInstance(ProxyOnlyResource):
"""Instance of an app.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar name_properties_name: Name of instance.
:vartype name_properties_name: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'name_properties_name': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'name_properties_name': {'key': 'properties.name', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
**kwargs
):
super(SiteInstance, self).__init__(kind=kind, **kwargs)
self.name_properties_name = None
class SiteLimits(msrest.serialization.Model):
"""Metric limits set on an app.
:param max_percentage_cpu: Maximum allowed CPU usage percentage.
:type max_percentage_cpu: float
:param max_memory_in_mb: Maximum allowed memory usage in MB.
:type max_memory_in_mb: long
:param max_disk_size_in_mb: Maximum allowed disk size usage in MB.
:type max_disk_size_in_mb: long
"""
_attribute_map = {
'max_percentage_cpu': {'key': 'maxPercentageCpu', 'type': 'float'},
'max_memory_in_mb': {'key': 'maxMemoryInMb', 'type': 'long'},
'max_disk_size_in_mb': {'key': 'maxDiskSizeInMb', 'type': 'long'},
}
def __init__(
self,
*,
max_percentage_cpu: Optional[float] = None,
max_memory_in_mb: Optional[int] = None,
max_disk_size_in_mb: Optional[int] = None,
**kwargs
):
super(SiteLimits, self).__init__(**kwargs)
self.max_percentage_cpu = max_percentage_cpu
self.max_memory_in_mb = max_memory_in_mb
self.max_disk_size_in_mb = max_disk_size_in_mb
class SiteLogsConfig(ProxyOnlyResource):
"""Configuration of App Service site logs.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param application_logs: Application logs configuration.
:type application_logs: ~azure.mgmt.web.v2016_08_01.models.ApplicationLogsConfig
:param http_logs: HTTP logs configuration.
:type http_logs: ~azure.mgmt.web.v2016_08_01.models.HttpLogsConfig
:param failed_requests_tracing: Failed requests tracing configuration.
:type failed_requests_tracing: ~azure.mgmt.web.v2016_08_01.models.EnabledConfig
:param detailed_error_messages: Detailed error messages configuration.
:type detailed_error_messages: ~azure.mgmt.web.v2016_08_01.models.EnabledConfig
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'application_logs': {'key': 'properties.applicationLogs', 'type': 'ApplicationLogsConfig'},
'http_logs': {'key': 'properties.httpLogs', 'type': 'HttpLogsConfig'},
'failed_requests_tracing': {'key': 'properties.failedRequestsTracing', 'type': 'EnabledConfig'},
'detailed_error_messages': {'key': 'properties.detailedErrorMessages', 'type': 'EnabledConfig'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
application_logs: Optional["ApplicationLogsConfig"] = None,
http_logs: Optional["HttpLogsConfig"] = None,
failed_requests_tracing: Optional["EnabledConfig"] = None,
detailed_error_messages: Optional["EnabledConfig"] = None,
**kwargs
):
super(SiteLogsConfig, self).__init__(kind=kind, **kwargs)
self.application_logs = application_logs
self.http_logs = http_logs
self.failed_requests_tracing = failed_requests_tracing
self.detailed_error_messages = detailed_error_messages
class SiteMachineKey(msrest.serialization.Model):
"""MachineKey of an app.
:param validation: MachineKey validation.
:type validation: str
:param validation_key: Validation key.
:type validation_key: str
:param decryption: Algorithm used for decryption.
:type decryption: str
:param decryption_key: Decryption key.
:type decryption_key: str
"""
_attribute_map = {
'validation': {'key': 'validation', 'type': 'str'},
'validation_key': {'key': 'validationKey', 'type': 'str'},
'decryption': {'key': 'decryption', 'type': 'str'},
'decryption_key': {'key': 'decryptionKey', 'type': 'str'},
}
def __init__(
self,
*,
validation: Optional[str] = None,
validation_key: Optional[str] = None,
decryption: Optional[str] = None,
decryption_key: Optional[str] = None,
**kwargs
):
super(SiteMachineKey, self).__init__(**kwargs)
self.validation = validation
self.validation_key = validation_key
self.decryption = decryption
self.decryption_key = decryption_key
class SitePatchResource(ProxyOnlyResource):
"""ARM resource for a site.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar state: Current state of the app.
:vartype state: str
:ivar host_names: Hostnames associated with the app.
:vartype host_names: list[str]
:ivar repository_site_name: Name of the repository site.
:vartype repository_site_name: str
:ivar usage_state: State indicating whether the app has exceeded its quota usage. Read-only.
Possible values include: "Normal", "Exceeded".
:vartype usage_state: str or ~azure.mgmt.web.v2016_08_01.models.UsageState
:param enabled: :code:`<code>true</code>` if the app is enabled; otherwise,
:code:`<code>false</code>`. Setting this value to false disables the app (takes the app
offline).
:type enabled: bool
:ivar enabled_host_names: Enabled hostnames for the app.Hostnames need to be assigned (see
HostNames) AND enabled. Otherwise,
the app is not served on those hostnames.
:vartype enabled_host_names: list[str]
:ivar availability_state: Management information availability state for the app. Possible
values include: "Normal", "Limited", "DisasterRecoveryMode".
:vartype availability_state: str or ~azure.mgmt.web.v2016_08_01.models.SiteAvailabilityState
:param host_name_ssl_states: Hostname SSL states are used to manage the SSL bindings for app's
hostnames.
:type host_name_ssl_states: list[~azure.mgmt.web.v2016_08_01.models.HostNameSslState]
:param server_farm_id: Resource ID of the associated App Service plan, formatted as:
"/subscriptions/{subscriptionID}/resourceGroups/{groupName}/providers/Microsoft.Web/serverfarms/{appServicePlanName}".
:type server_farm_id: str
:param reserved: :code:`<code>true</code>` if reserved; otherwise, :code:`<code>false</code>`.
:type reserved: bool
:ivar last_modified_time_utc: Last time the app was modified, in UTC. Read-only.
:vartype last_modified_time_utc: ~datetime.datetime
:param site_config: Configuration of the app.
:type site_config: ~azure.mgmt.web.v2016_08_01.models.SiteConfig
:ivar traffic_manager_host_names: Azure Traffic Manager hostnames associated with the app.
Read-only.
:vartype traffic_manager_host_names: list[str]
:param scm_site_also_stopped: :code:`<code>true</code>` to stop SCM (KUDU) site when the app is
stopped; otherwise, :code:`<code>false</code>`. The default is :code:`<code>false</code>`.
:type scm_site_also_stopped: bool
:ivar target_swap_slot: Specifies which deployment slot this app will swap into. Read-only.
:vartype target_swap_slot: str
:param hosting_environment_profile: App Service Environment to use for the app.
:type hosting_environment_profile: ~azure.mgmt.web.v2016_08_01.models.HostingEnvironmentProfile
:param client_affinity_enabled: :code:`<code>true</code>` to enable client affinity;
:code:`<code>false</code>` to stop sending session affinity cookies, which route client
requests in the same session to the same instance. Default is :code:`<code>true</code>`.
:type client_affinity_enabled: bool
:param client_cert_enabled: :code:`<code>true</code>` to enable client certificate
authentication (TLS mutual authentication); otherwise, :code:`<code>false</code>`. Default is
:code:`<code>false</code>`.
:type client_cert_enabled: bool
:param host_names_disabled: :code:`<code>true</code>` to disable the public hostnames of the
app; otherwise, :code:`<code>false</code>`.
If :code:`<code>true</code>`, the app is only accessible via API management process.
:type host_names_disabled: bool
:ivar outbound_ip_addresses: List of IP addresses that the app uses for outbound connections
(e.g. database access). Includes VIPs from tenants that site can be hosted with current
settings. Read-only.
:vartype outbound_ip_addresses: str
:ivar possible_outbound_ip_addresses: List of IP addresses that the app uses for outbound
connections (e.g. database access). Includes VIPs from all tenants. Read-only.
:vartype possible_outbound_ip_addresses: str
:param container_size: Size of the function container.
:type container_size: int
:param daily_memory_time_quota: Maximum allowed daily memory-time quota (applicable on dynamic
apps only).
:type daily_memory_time_quota: int
:ivar suspended_till: App suspended till in case memory-time quota is exceeded.
:vartype suspended_till: ~datetime.datetime
:ivar max_number_of_workers: Maximum number of workers.
This only applies to Functions container.
:vartype max_number_of_workers: int
:param cloning_info: If specified during app creation, the app is cloned from a source app.
:type cloning_info: ~azure.mgmt.web.v2016_08_01.models.CloningInfo
:param snapshot_info: If specified during app creation, the app is created from a previous
snapshot.
:type snapshot_info: ~azure.mgmt.web.v2016_08_01.models.SnapshotRecoveryRequest
:ivar resource_group: Name of the resource group the app belongs to. Read-only.
:vartype resource_group: str
:ivar is_default_container: :code:`<code>true</code>` if the app is a default container;
otherwise, :code:`<code>false</code>`.
:vartype is_default_container: bool
:ivar default_host_name: Default hostname of the app. Read-only.
:vartype default_host_name: str
:ivar slot_swap_status: Status of the last deployment slot swap operation.
:vartype slot_swap_status: ~azure.mgmt.web.v2016_08_01.models.SlotSwapStatus
:param https_only: HttpsOnly: configures a web site to accept only https requests. Issues
redirect for
http requests.
:type https_only: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'state': {'readonly': True},
'host_names': {'readonly': True},
'repository_site_name': {'readonly': True},
'usage_state': {'readonly': True},
'enabled_host_names': {'readonly': True},
'availability_state': {'readonly': True},
'last_modified_time_utc': {'readonly': True},
'traffic_manager_host_names': {'readonly': True},
'target_swap_slot': {'readonly': True},
'outbound_ip_addresses': {'readonly': True},
'possible_outbound_ip_addresses': {'readonly': True},
'suspended_till': {'readonly': True},
'max_number_of_workers': {'readonly': True},
'resource_group': {'readonly': True},
'is_default_container': {'readonly': True},
'default_host_name': {'readonly': True},
'slot_swap_status': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'state': {'key': 'properties.state', 'type': 'str'},
'host_names': {'key': 'properties.hostNames', 'type': '[str]'},
'repository_site_name': {'key': 'properties.repositorySiteName', 'type': 'str'},
'usage_state': {'key': 'properties.usageState', 'type': 'str'},
'enabled': {'key': 'properties.enabled', 'type': 'bool'},
'enabled_host_names': {'key': 'properties.enabledHostNames', 'type': '[str]'},
'availability_state': {'key': 'properties.availabilityState', 'type': 'str'},
'host_name_ssl_states': {'key': 'properties.hostNameSslStates', 'type': '[HostNameSslState]'},
'server_farm_id': {'key': 'properties.serverFarmId', 'type': 'str'},
'reserved': {'key': 'properties.reserved', 'type': 'bool'},
'last_modified_time_utc': {'key': 'properties.lastModifiedTimeUtc', 'type': 'iso-8601'},
'site_config': {'key': 'properties.siteConfig', 'type': 'SiteConfig'},
'traffic_manager_host_names': {'key': 'properties.trafficManagerHostNames', 'type': '[str]'},
'scm_site_also_stopped': {'key': 'properties.scmSiteAlsoStopped', 'type': 'bool'},
'target_swap_slot': {'key': 'properties.targetSwapSlot', 'type': 'str'},
'hosting_environment_profile': {'key': 'properties.hostingEnvironmentProfile', 'type': 'HostingEnvironmentProfile'},
'client_affinity_enabled': {'key': 'properties.clientAffinityEnabled', 'type': 'bool'},
'client_cert_enabled': {'key': 'properties.clientCertEnabled', 'type': 'bool'},
'host_names_disabled': {'key': 'properties.hostNamesDisabled', 'type': 'bool'},
'outbound_ip_addresses': {'key': 'properties.outboundIpAddresses', 'type': 'str'},
'possible_outbound_ip_addresses': {'key': 'properties.possibleOutboundIpAddresses', 'type': 'str'},
'container_size': {'key': 'properties.containerSize', 'type': 'int'},
'daily_memory_time_quota': {'key': 'properties.dailyMemoryTimeQuota', 'type': 'int'},
'suspended_till': {'key': 'properties.suspendedTill', 'type': 'iso-8601'},
'max_number_of_workers': {'key': 'properties.maxNumberOfWorkers', 'type': 'int'},
'cloning_info': {'key': 'properties.cloningInfo', 'type': 'CloningInfo'},
'snapshot_info': {'key': 'properties.snapshotInfo', 'type': 'SnapshotRecoveryRequest'},
'resource_group': {'key': 'properties.resourceGroup', 'type': 'str'},
'is_default_container': {'key': 'properties.isDefaultContainer', 'type': 'bool'},
'default_host_name': {'key': 'properties.defaultHostName', 'type': 'str'},
'slot_swap_status': {'key': 'properties.slotSwapStatus', 'type': 'SlotSwapStatus'},
'https_only': {'key': 'properties.httpsOnly', 'type': 'bool'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
enabled: Optional[bool] = None,
host_name_ssl_states: Optional[List["HostNameSslState"]] = None,
server_farm_id: Optional[str] = None,
reserved: Optional[bool] = False,
site_config: Optional["SiteConfig"] = None,
scm_site_also_stopped: Optional[bool] = False,
hosting_environment_profile: Optional["HostingEnvironmentProfile"] = None,
client_affinity_enabled: Optional[bool] = None,
client_cert_enabled: Optional[bool] = None,
host_names_disabled: Optional[bool] = None,
container_size: Optional[int] = None,
daily_memory_time_quota: Optional[int] = None,
cloning_info: Optional["CloningInfo"] = None,
snapshot_info: Optional["SnapshotRecoveryRequest"] = None,
https_only: Optional[bool] = None,
**kwargs
):
super(SitePatchResource, self).__init__(kind=kind, **kwargs)
self.state = None
self.host_names = None
self.repository_site_name = None
self.usage_state = None
self.enabled = enabled
self.enabled_host_names = None
self.availability_state = None
self.host_name_ssl_states = host_name_ssl_states
self.server_farm_id = server_farm_id
self.reserved = reserved
self.last_modified_time_utc = None
self.site_config = site_config
self.traffic_manager_host_names = None
self.scm_site_also_stopped = scm_site_also_stopped
self.target_swap_slot = None
self.hosting_environment_profile = hosting_environment_profile
self.client_affinity_enabled = client_affinity_enabled
self.client_cert_enabled = client_cert_enabled
self.host_names_disabled = host_names_disabled
self.outbound_ip_addresses = None
self.possible_outbound_ip_addresses = None
self.container_size = container_size
self.daily_memory_time_quota = daily_memory_time_quota
self.suspended_till = None
self.max_number_of_workers = None
self.cloning_info = cloning_info
self.snapshot_info = snapshot_info
self.resource_group = None
self.is_default_container = None
self.default_host_name = None
self.slot_swap_status = None
self.https_only = https_only
class SitePhpErrorLogFlag(ProxyOnlyResource):
"""Used for getting PHP error logging flag.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param local_log_errors: Local log_errors setting.
:type local_log_errors: str
:param master_log_errors: Master log_errors setting.
:type master_log_errors: str
:param local_log_errors_max_length: Local log_errors_max_len setting.
:type local_log_errors_max_length: str
:param master_log_errors_max_length: Master log_errors_max_len setting.
:type master_log_errors_max_length: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'local_log_errors': {'key': 'properties.localLogErrors', 'type': 'str'},
'master_log_errors': {'key': 'properties.masterLogErrors', 'type': 'str'},
'local_log_errors_max_length': {'key': 'properties.localLogErrorsMaxLength', 'type': 'str'},
'master_log_errors_max_length': {'key': 'properties.masterLogErrorsMaxLength', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
local_log_errors: Optional[str] = None,
master_log_errors: Optional[str] = None,
local_log_errors_max_length: Optional[str] = None,
master_log_errors_max_length: Optional[str] = None,
**kwargs
):
super(SitePhpErrorLogFlag, self).__init__(kind=kind, **kwargs)
self.local_log_errors = local_log_errors
self.master_log_errors = master_log_errors
self.local_log_errors_max_length = local_log_errors_max_length
self.master_log_errors_max_length = master_log_errors_max_length
class SiteSourceControl(ProxyOnlyResource):
"""Source control configuration for an app.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param repo_url: Repository or source control URL.
:type repo_url: str
:param branch: Name of branch to use for deployment.
:type branch: str
:param is_manual_integration: :code:`<code>true</code>` to limit to manual integration;
:code:`<code>false</code>` to enable continuous integration (which configures webhooks into
online repos like GitHub).
:type is_manual_integration: bool
:param deployment_rollback_enabled: :code:`<code>true</code>` to enable deployment rollback;
otherwise, :code:`<code>false</code>`.
:type deployment_rollback_enabled: bool
:param is_mercurial: :code:`<code>true</code>` for a Mercurial repository;
:code:`<code>false</code>` for a Git repository.
:type is_mercurial: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'repo_url': {'key': 'properties.repoUrl', 'type': 'str'},
'branch': {'key': 'properties.branch', 'type': 'str'},
'is_manual_integration': {'key': 'properties.isManualIntegration', 'type': 'bool'},
'deployment_rollback_enabled': {'key': 'properties.deploymentRollbackEnabled', 'type': 'bool'},
'is_mercurial': {'key': 'properties.isMercurial', 'type': 'bool'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
repo_url: Optional[str] = None,
branch: Optional[str] = None,
is_manual_integration: Optional[bool] = None,
deployment_rollback_enabled: Optional[bool] = None,
is_mercurial: Optional[bool] = None,
**kwargs
):
super(SiteSourceControl, self).__init__(kind=kind, **kwargs)
self.repo_url = repo_url
self.branch = branch
self.is_manual_integration = is_manual_integration
self.deployment_rollback_enabled = deployment_rollback_enabled
self.is_mercurial = is_mercurial
class SlotConfigNamesResource(ProxyOnlyResource):
"""Slot Config names azure resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param connection_string_names: List of connection string names.
:type connection_string_names: list[str]
:param app_setting_names: List of application settings names.
:type app_setting_names: list[str]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'connection_string_names': {'key': 'properties.connectionStringNames', 'type': '[str]'},
'app_setting_names': {'key': 'properties.appSettingNames', 'type': '[str]'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
connection_string_names: Optional[List[str]] = None,
app_setting_names: Optional[List[str]] = None,
**kwargs
):
super(SlotConfigNamesResource, self).__init__(kind=kind, **kwargs)
self.connection_string_names = connection_string_names
self.app_setting_names = app_setting_names
class SlotDifference(ProxyOnlyResource):
"""A setting difference between two deployment slots of an app.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar type_properties_type: Type of the difference: Information, Warning or Error.
:vartype type_properties_type: str
:ivar setting_type: The type of the setting: General, AppSetting or ConnectionString.
:vartype setting_type: str
:ivar diff_rule: Rule that describes how to process the setting difference during a slot swap.
:vartype diff_rule: str
:ivar setting_name: Name of the setting.
:vartype setting_name: str
:ivar value_in_current_slot: Value of the setting in the current slot.
:vartype value_in_current_slot: str
:ivar value_in_target_slot: Value of the setting in the target slot.
:vartype value_in_target_slot: str
:ivar description: Description of the setting difference.
:vartype description: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'type_properties_type': {'readonly': True},
'setting_type': {'readonly': True},
'diff_rule': {'readonly': True},
'setting_name': {'readonly': True},
'value_in_current_slot': {'readonly': True},
'value_in_target_slot': {'readonly': True},
'description': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'type_properties_type': {'key': 'properties.type', 'type': 'str'},
'setting_type': {'key': 'properties.settingType', 'type': 'str'},
'diff_rule': {'key': 'properties.diffRule', 'type': 'str'},
'setting_name': {'key': 'properties.settingName', 'type': 'str'},
'value_in_current_slot': {'key': 'properties.valueInCurrentSlot', 'type': 'str'},
'value_in_target_slot': {'key': 'properties.valueInTargetSlot', 'type': 'str'},
'description': {'key': 'properties.description', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
**kwargs
):
super(SlotDifference, self).__init__(kind=kind, **kwargs)
self.type_properties_type = None
self.setting_type = None
self.diff_rule = None
self.setting_name = None
self.value_in_current_slot = None
self.value_in_target_slot = None
self.description = None
class SlotDifferenceCollection(msrest.serialization.Model):
"""Collection of slot differences.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.SlotDifference]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[SlotDifference]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["SlotDifference"],
**kwargs
):
super(SlotDifferenceCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class SlotSwapStatus(msrest.serialization.Model):
"""The status of the last successful slot swap operation.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar timestamp_utc: The time the last successful slot swap completed.
:vartype timestamp_utc: ~datetime.datetime
:ivar source_slot_name: The source slot of the last swap operation.
:vartype source_slot_name: str
:ivar destination_slot_name: The destination slot of the last swap operation.
:vartype destination_slot_name: str
"""
_validation = {
'timestamp_utc': {'readonly': True},
'source_slot_name': {'readonly': True},
'destination_slot_name': {'readonly': True},
}
_attribute_map = {
'timestamp_utc': {'key': 'timestampUtc', 'type': 'iso-8601'},
'source_slot_name': {'key': 'sourceSlotName', 'type': 'str'},
'destination_slot_name': {'key': 'destinationSlotName', 'type': 'str'},
}
def __init__(
self,
**kwargs
):
super(SlotSwapStatus, self).__init__(**kwargs)
self.timestamp_utc = None
self.source_slot_name = None
self.destination_slot_name = None
class SlowRequestsBasedTrigger(msrest.serialization.Model):
"""Trigger based on request execution time.
:param time_taken: Time taken.
:type time_taken: str
:param count: Request Count.
:type count: int
:param time_interval: Time interval.
:type time_interval: str
"""
_attribute_map = {
'time_taken': {'key': 'timeTaken', 'type': 'str'},
'count': {'key': 'count', 'type': 'int'},
'time_interval': {'key': 'timeInterval', 'type': 'str'},
}
def __init__(
self,
*,
time_taken: Optional[str] = None,
count: Optional[int] = None,
time_interval: Optional[str] = None,
**kwargs
):
super(SlowRequestsBasedTrigger, self).__init__(**kwargs)
self.time_taken = time_taken
self.count = count
self.time_interval = time_interval
class Snapshot(ProxyOnlyResource):
"""A snapshot of an app.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar time: The time the snapshot was taken.
:vartype time: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'time': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'time': {'key': 'properties.time', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
**kwargs
):
super(Snapshot, self).__init__(kind=kind, **kwargs)
self.time = None
class SnapshotCollection(msrest.serialization.Model):
"""Collection of snapshots which can be used to revert an app to a previous time.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.Snapshot]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[Snapshot]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["Snapshot"],
**kwargs
):
super(SnapshotCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class SnapshotRecoveryRequest(ProxyOnlyResource):
"""Details about app recovery operation.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param snapshot_time: Point in time in which the app recovery should be attempted, formatted as
a DateTime string.
:type snapshot_time: str
:param recovery_target: Specifies the web app that snapshot contents will be written to.
:type recovery_target: ~azure.mgmt.web.v2016_08_01.models.SnapshotRecoveryTarget
:param overwrite: If :code:`<code>true</code>` the recovery operation can overwrite source app;
otherwise, :code:`<code>false</code>`.
:type overwrite: bool
:param recover_configuration: If true, site configuration, in addition to content, will be
reverted.
:type recover_configuration: bool
:param ignore_conflicting_host_names: If true, custom hostname conflicts will be ignored when
recovering to a target web app.
This setting is only necessary when RecoverConfiguration is enabled.
:type ignore_conflicting_host_names: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'snapshot_time': {'key': 'properties.snapshotTime', 'type': 'str'},
'recovery_target': {'key': 'properties.recoveryTarget', 'type': 'SnapshotRecoveryTarget'},
'overwrite': {'key': 'properties.overwrite', 'type': 'bool'},
'recover_configuration': {'key': 'properties.recoverConfiguration', 'type': 'bool'},
'ignore_conflicting_host_names': {'key': 'properties.ignoreConflictingHostNames', 'type': 'bool'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
snapshot_time: Optional[str] = None,
recovery_target: Optional["SnapshotRecoveryTarget"] = None,
overwrite: Optional[bool] = None,
recover_configuration: Optional[bool] = None,
ignore_conflicting_host_names: Optional[bool] = None,
**kwargs
):
super(SnapshotRecoveryRequest, self).__init__(kind=kind, **kwargs)
self.snapshot_time = snapshot_time
self.recovery_target = recovery_target
self.overwrite = overwrite
self.recover_configuration = recover_configuration
self.ignore_conflicting_host_names = ignore_conflicting_host_names
class SnapshotRecoveryTarget(msrest.serialization.Model):
"""Specifies the web app that snapshot contents will be written to.
:param location: Geographical location of the target web app, e.g. SouthEastAsia,
SouthCentralUS.
:type location: str
:param id: ARM resource ID of the target app.
/subscriptions/{subId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}
for production slots and
/subscriptions/{subId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/sites/{siteName}/slots/{slotName}
for other slots.
:type id: str
"""
_attribute_map = {
'location': {'key': 'location', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
*,
location: Optional[str] = None,
id: Optional[str] = None,
**kwargs
):
super(SnapshotRecoveryTarget, self).__init__(**kwargs)
self.location = location
self.id = id
class StatusCodesBasedTrigger(msrest.serialization.Model):
"""Trigger based on status code.
:param status: HTTP status code.
:type status: int
:param sub_status: Request Sub Status.
:type sub_status: int
:param win32_status: Win32 error code.
:type win32_status: int
:param count: Request Count.
:type count: int
:param time_interval: Time interval.
:type time_interval: str
"""
_attribute_map = {
'status': {'key': 'status', 'type': 'int'},
'sub_status': {'key': 'subStatus', 'type': 'int'},
'win32_status': {'key': 'win32Status', 'type': 'int'},
'count': {'key': 'count', 'type': 'int'},
'time_interval': {'key': 'timeInterval', 'type': 'str'},
}
def __init__(
self,
*,
status: Optional[int] = None,
sub_status: Optional[int] = None,
win32_status: Optional[int] = None,
count: Optional[int] = None,
time_interval: Optional[str] = None,
**kwargs
):
super(StatusCodesBasedTrigger, self).__init__(**kwargs)
self.status = status
self.sub_status = sub_status
self.win32_status = win32_status
self.count = count
self.time_interval = time_interval
class StorageMigrationOptions(ProxyOnlyResource):
"""Options for app content migration.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param azurefiles_connection_string: AzureFiles connection string.
:type azurefiles_connection_string: str
:param azurefiles_share: AzureFiles share.
:type azurefiles_share: str
:param switch_site_after_migration: :code:`<code>true</code>`if the app should be switched
over; otherwise, :code:`<code>false</code>`.
:type switch_site_after_migration: bool
:param block_write_access_to_site: :code:`<code>true</code>` if the app should be read only
during copy operation; otherwise, :code:`<code>false</code>`.
:type block_write_access_to_site: bool
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'azurefiles_connection_string': {'key': 'properties.azurefilesConnectionString', 'type': 'str'},
'azurefiles_share': {'key': 'properties.azurefilesShare', 'type': 'str'},
'switch_site_after_migration': {'key': 'properties.switchSiteAfterMigration', 'type': 'bool'},
'block_write_access_to_site': {'key': 'properties.blockWriteAccessToSite', 'type': 'bool'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
azurefiles_connection_string: Optional[str] = None,
azurefiles_share: Optional[str] = None,
switch_site_after_migration: Optional[bool] = False,
block_write_access_to_site: Optional[bool] = False,
**kwargs
):
super(StorageMigrationOptions, self).__init__(kind=kind, **kwargs)
self.azurefiles_connection_string = azurefiles_connection_string
self.azurefiles_share = azurefiles_share
self.switch_site_after_migration = switch_site_after_migration
self.block_write_access_to_site = block_write_access_to_site
class StorageMigrationResponse(ProxyOnlyResource):
"""Response for a migration of app content request.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar operation_id: When server starts the migration process, it will return an operation ID
identifying that particular migration operation.
:vartype operation_id: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'operation_id': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'operation_id': {'key': 'properties.operationId', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
**kwargs
):
super(StorageMigrationResponse, self).__init__(kind=kind, **kwargs)
self.operation_id = None
class StringDictionary(ProxyOnlyResource):
"""String dictionary resource.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param properties: Settings.
:type properties: dict[str, str]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'properties': {'key': 'properties', 'type': '{str}'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
properties: Optional[Dict[str, str]] = None,
**kwargs
):
super(StringDictionary, self).__init__(kind=kind, **kwargs)
self.properties = properties
class TriggeredJobHistory(ProxyOnlyResource):
"""Triggered Web Job History. List of Triggered Web Job Run Information elements.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param triggered_job_runs: List of triggered web job runs.
:type triggered_job_runs: list[~azure.mgmt.web.v2016_08_01.models.TriggeredJobRun]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'triggered_job_runs': {'key': 'properties.triggeredJobRuns', 'type': '[TriggeredJobRun]'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
triggered_job_runs: Optional[List["TriggeredJobRun"]] = None,
**kwargs
):
super(TriggeredJobHistory, self).__init__(kind=kind, **kwargs)
self.triggered_job_runs = triggered_job_runs
class TriggeredJobHistoryCollection(msrest.serialization.Model):
"""Collection of Kudu continuous web job information elements.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.TriggeredJobHistory]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[TriggeredJobHistory]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["TriggeredJobHistory"],
**kwargs
):
super(TriggeredJobHistoryCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class TriggeredJobRun(ProxyOnlyResource):
"""Triggered Web Job Run Information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param id_properties_id: Job ID.
:type id_properties_id: str
:ivar name_properties_name: Job name.
:vartype name_properties_name: str
:param status: Job status. Possible values include: "Success", "Failed", "Error".
:type status: str or ~azure.mgmt.web.v2016_08_01.models.TriggeredWebJobStatus
:param start_time: Start time.
:type start_time: ~datetime.datetime
:param end_time: End time.
:type end_time: ~datetime.datetime
:param duration: Job duration.
:type duration: str
:param output_url: Output URL.
:type output_url: str
:param error_url: Error URL.
:type error_url: str
:param url: Job URL.
:type url: str
:param job_name: Job name.
:type job_name: str
:param trigger: Job trigger.
:type trigger: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'name_properties_name': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'id_properties_id': {'key': 'properties.id', 'type': 'str'},
'name_properties_name': {'key': 'properties.name', 'type': 'str'},
'status': {'key': 'properties.status', 'type': 'str'},
'start_time': {'key': 'properties.startTime', 'type': 'iso-8601'},
'end_time': {'key': 'properties.endTime', 'type': 'iso-8601'},
'duration': {'key': 'properties.duration', 'type': 'str'},
'output_url': {'key': 'properties.outputUrl', 'type': 'str'},
'error_url': {'key': 'properties.errorUrl', 'type': 'str'},
'url': {'key': 'properties.url', 'type': 'str'},
'job_name': {'key': 'properties.jobName', 'type': 'str'},
'trigger': {'key': 'properties.trigger', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
id_properties_id: Optional[str] = None,
status: Optional[Union[str, "TriggeredWebJobStatus"]] = None,
start_time: Optional[datetime.datetime] = None,
end_time: Optional[datetime.datetime] = None,
duration: Optional[str] = None,
output_url: Optional[str] = None,
error_url: Optional[str] = None,
url: Optional[str] = None,
job_name: Optional[str] = None,
trigger: Optional[str] = None,
**kwargs
):
super(TriggeredJobRun, self).__init__(kind=kind, **kwargs)
self.id_properties_id = id_properties_id
self.name_properties_name = None
self.status = status
self.start_time = start_time
self.end_time = end_time
self.duration = duration
self.output_url = output_url
self.error_url = error_url
self.url = url
self.job_name = job_name
self.trigger = trigger
class TriggeredWebJob(ProxyOnlyResource):
"""Triggered Web Job Information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param latest_run: Latest job run information.
:type latest_run: ~azure.mgmt.web.v2016_08_01.models.TriggeredJobRun
:param history_url: History URL.
:type history_url: str
:param scheduler_logs_url: Scheduler Logs URL.
:type scheduler_logs_url: str
:ivar name_properties_name: Job name. Used as job identifier in ARM resource URI.
:vartype name_properties_name: str
:param run_command: Run command.
:type run_command: str
:param url: Job URL.
:type url: str
:param extra_info_url: Extra Info URL.
:type extra_info_url: str
:param job_type: Job type. Possible values include: "Continuous", "Triggered".
:type job_type: str or ~azure.mgmt.web.v2016_08_01.models.WebJobType
:param error: Error information.
:type error: str
:param using_sdk: Using SDK?.
:type using_sdk: bool
:param settings: Job settings.
:type settings: dict[str, any]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'name_properties_name': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'latest_run': {'key': 'properties.latestRun', 'type': 'TriggeredJobRun'},
'history_url': {'key': 'properties.historyUrl', 'type': 'str'},
'scheduler_logs_url': {'key': 'properties.schedulerLogsUrl', 'type': 'str'},
'name_properties_name': {'key': 'properties.name', 'type': 'str'},
'run_command': {'key': 'properties.runCommand', 'type': 'str'},
'url': {'key': 'properties.url', 'type': 'str'},
'extra_info_url': {'key': 'properties.extraInfoUrl', 'type': 'str'},
'job_type': {'key': 'properties.jobType', 'type': 'str'},
'error': {'key': 'properties.error', 'type': 'str'},
'using_sdk': {'key': 'properties.usingSdk', 'type': 'bool'},
'settings': {'key': 'properties.settings', 'type': '{object}'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
latest_run: Optional["TriggeredJobRun"] = None,
history_url: Optional[str] = None,
scheduler_logs_url: Optional[str] = None,
run_command: Optional[str] = None,
url: Optional[str] = None,
extra_info_url: Optional[str] = None,
job_type: Optional[Union[str, "WebJobType"]] = None,
error: Optional[str] = None,
using_sdk: Optional[bool] = None,
settings: Optional[Dict[str, Any]] = None,
**kwargs
):
super(TriggeredWebJob, self).__init__(kind=kind, **kwargs)
self.latest_run = latest_run
self.history_url = history_url
self.scheduler_logs_url = scheduler_logs_url
self.name_properties_name = None
self.run_command = run_command
self.url = url
self.extra_info_url = extra_info_url
self.job_type = job_type
self.error = error
self.using_sdk = using_sdk
self.settings = settings
class TriggeredWebJobCollection(msrest.serialization.Model):
"""Collection of Kudu continuous web job information elements.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.TriggeredWebJob]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[TriggeredWebJob]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["TriggeredWebJob"],
**kwargs
):
super(TriggeredWebJobCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class User(ProxyOnlyResource):
"""User credentials used for publishing activity.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param user_name: Username.
:type user_name: str
:param publishing_user_name: Username used for publishing.
:type publishing_user_name: str
:param publishing_password: Password used for publishing.
:type publishing_password: str
:param publishing_password_hash: Password hash used for publishing.
:type publishing_password_hash: str
:param publishing_password_hash_salt: Password hash salt used for publishing.
:type publishing_password_hash_salt: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'user_name': {'key': 'properties.name', 'type': 'str'},
'publishing_user_name': {'key': 'properties.publishingUserName', 'type': 'str'},
'publishing_password': {'key': 'properties.publishingPassword', 'type': 'str'},
'publishing_password_hash': {'key': 'properties.publishingPasswordHash', 'type': 'str'},
'publishing_password_hash_salt': {'key': 'properties.publishingPasswordHashSalt', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
user_name: Optional[str] = None,
publishing_user_name: Optional[str] = None,
publishing_password: Optional[str] = None,
publishing_password_hash: Optional[str] = None,
publishing_password_hash_salt: Optional[str] = None,
**kwargs
):
super(User, self).__init__(kind=kind, **kwargs)
self.user_name = user_name
self.publishing_user_name = publishing_user_name
self.publishing_password = publishing_password
self.publishing_password_hash = publishing_password_hash
self.publishing_password_hash_salt = publishing_password_hash_salt
class VirtualApplication(msrest.serialization.Model):
"""Virtual application in an app.
:param virtual_path: Virtual path.
:type virtual_path: str
:param physical_path: Physical path.
:type physical_path: str
:param preload_enabled: :code:`<code>true</code>` if preloading is enabled; otherwise,
:code:`<code>false</code>`.
:type preload_enabled: bool
:param virtual_directories: Virtual directories for virtual application.
:type virtual_directories: list[~azure.mgmt.web.v2016_08_01.models.VirtualDirectory]
"""
_attribute_map = {
'virtual_path': {'key': 'virtualPath', 'type': 'str'},
'physical_path': {'key': 'physicalPath', 'type': 'str'},
'preload_enabled': {'key': 'preloadEnabled', 'type': 'bool'},
'virtual_directories': {'key': 'virtualDirectories', 'type': '[VirtualDirectory]'},
}
def __init__(
self,
*,
virtual_path: Optional[str] = None,
physical_path: Optional[str] = None,
preload_enabled: Optional[bool] = None,
virtual_directories: Optional[List["VirtualDirectory"]] = None,
**kwargs
):
super(VirtualApplication, self).__init__(**kwargs)
self.virtual_path = virtual_path
self.physical_path = physical_path
self.preload_enabled = preload_enabled
self.virtual_directories = virtual_directories
class VirtualDirectory(msrest.serialization.Model):
"""Directory for virtual application.
:param virtual_path: Path to virtual application.
:type virtual_path: str
:param physical_path: Physical path.
:type physical_path: str
"""
_attribute_map = {
'virtual_path': {'key': 'virtualPath', 'type': 'str'},
'physical_path': {'key': 'physicalPath', 'type': 'str'},
}
def __init__(
self,
*,
virtual_path: Optional[str] = None,
physical_path: Optional[str] = None,
**kwargs
):
super(VirtualDirectory, self).__init__(**kwargs)
self.virtual_path = virtual_path
self.physical_path = physical_path
class VnetGateway(ProxyOnlyResource):
"""The Virtual Network gateway contract. This is used to give the Virtual Network gateway access to the VPN package.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param vnet_name: The Virtual Network name.
:type vnet_name: str
:param vpn_package_uri: The URI where the VPN package can be downloaded.
:type vpn_package_uri: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'vnet_name': {'key': 'properties.vnetName', 'type': 'str'},
'vpn_package_uri': {'key': 'properties.vpnPackageUri', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
vnet_name: Optional[str] = None,
vpn_package_uri: Optional[str] = None,
**kwargs
):
super(VnetGateway, self).__init__(kind=kind, **kwargs)
self.vnet_name = vnet_name
self.vpn_package_uri = vpn_package_uri
class VnetInfo(ProxyOnlyResource):
"""Virtual Network information contract.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param vnet_resource_id: The Virtual Network's resource ID.
:type vnet_resource_id: str
:ivar cert_thumbprint: The client certificate thumbprint.
:vartype cert_thumbprint: str
:param cert_blob: A certificate file (.cer) blob containing the public key of the private key
used to authenticate a
Point-To-Site VPN connection.
:type cert_blob: bytearray
:ivar routes: The routes that this Virtual Network connection uses.
:vartype routes: list[~azure.mgmt.web.v2016_08_01.models.VnetRoute]
:ivar resync_required: :code:`<code>true</code>` if a resync is required; otherwise,
:code:`<code>false</code>`.
:vartype resync_required: bool
:param dns_servers: DNS servers to be used by this Virtual Network. This should be a
comma-separated list of IP addresses.
:type dns_servers: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'cert_thumbprint': {'readonly': True},
'routes': {'readonly': True},
'resync_required': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'vnet_resource_id': {'key': 'properties.vnetResourceId', 'type': 'str'},
'cert_thumbprint': {'key': 'properties.certThumbprint', 'type': 'str'},
'cert_blob': {'key': 'properties.certBlob', 'type': 'bytearray'},
'routes': {'key': 'properties.routes', 'type': '[VnetRoute]'},
'resync_required': {'key': 'properties.resyncRequired', 'type': 'bool'},
'dns_servers': {'key': 'properties.dnsServers', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
vnet_resource_id: Optional[str] = None,
cert_blob: Optional[bytearray] = None,
dns_servers: Optional[str] = None,
**kwargs
):
super(VnetInfo, self).__init__(kind=kind, **kwargs)
self.vnet_resource_id = vnet_resource_id
self.cert_thumbprint = None
self.cert_blob = cert_blob
self.routes = None
self.resync_required = None
self.dns_servers = dns_servers
class VnetRoute(ProxyOnlyResource):
"""Virtual Network route contract used to pass routing information for a Virtual Network.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:param vnet_route_name: The name of this route. This is only returned by the server and does
not need to be set by the client.
:type vnet_route_name: str
:param start_address: The starting address for this route. This may also include a CIDR
notation, in which case the end address must not be specified.
:type start_address: str
:param end_address: The ending address for this route. If the start address is specified in
CIDR notation, this must be omitted.
:type end_address: str
:param route_type: The type of route this is:
DEFAULT - By default, every app has routes to the local address ranges specified by RFC1918
INHERITED - Routes inherited from the real Virtual Network routes
STATIC - Static route set on the app only
These values will be used for syncing an app's routes with those from a Virtual Network.
Possible values include: "DEFAULT", "INHERITED", "STATIC".
:type route_type: str or ~azure.mgmt.web.v2016_08_01.models.RouteType
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'vnet_route_name': {'key': 'properties.name', 'type': 'str'},
'start_address': {'key': 'properties.startAddress', 'type': 'str'},
'end_address': {'key': 'properties.endAddress', 'type': 'str'},
'route_type': {'key': 'properties.routeType', 'type': 'str'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
vnet_route_name: Optional[str] = None,
start_address: Optional[str] = None,
end_address: Optional[str] = None,
route_type: Optional[Union[str, "RouteType"]] = None,
**kwargs
):
super(VnetRoute, self).__init__(kind=kind, **kwargs)
self.vnet_route_name = vnet_route_name
self.start_address = start_address
self.end_address = end_address
self.route_type = route_type
class WebAppCollection(msrest.serialization.Model):
"""Collection of App Service apps.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.Site]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[Site]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["Site"],
**kwargs
):
super(WebAppCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class WebAppInstanceCollection(msrest.serialization.Model):
"""Collection of app instances.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.SiteInstance]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[SiteInstance]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["SiteInstance"],
**kwargs
):
super(WebAppInstanceCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class WebJob(ProxyOnlyResource):
"""Web Job Information.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Resource Id.
:vartype id: str
:ivar name: Resource Name.
:vartype name: str
:param kind: Kind of resource.
:type kind: str
:ivar type: Resource type.
:vartype type: str
:ivar name_properties_name: Job name. Used as job identifier in ARM resource URI.
:vartype name_properties_name: str
:param run_command: Run command.
:type run_command: str
:param url: Job URL.
:type url: str
:param extra_info_url: Extra Info URL.
:type extra_info_url: str
:param job_type: Job type. Possible values include: "Continuous", "Triggered".
:type job_type: str or ~azure.mgmt.web.v2016_08_01.models.WebJobType
:param error: Error information.
:type error: str
:param using_sdk: Using SDK?.
:type using_sdk: bool
:param settings: Job settings.
:type settings: dict[str, any]
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'name_properties_name': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'kind': {'key': 'kind', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'name_properties_name': {'key': 'properties.name', 'type': 'str'},
'run_command': {'key': 'properties.runCommand', 'type': 'str'},
'url': {'key': 'properties.url', 'type': 'str'},
'extra_info_url': {'key': 'properties.extraInfoUrl', 'type': 'str'},
'job_type': {'key': 'properties.jobType', 'type': 'str'},
'error': {'key': 'properties.error', 'type': 'str'},
'using_sdk': {'key': 'properties.usingSdk', 'type': 'bool'},
'settings': {'key': 'properties.settings', 'type': '{object}'},
}
def __init__(
self,
*,
kind: Optional[str] = None,
run_command: Optional[str] = None,
url: Optional[str] = None,
extra_info_url: Optional[str] = None,
job_type: Optional[Union[str, "WebJobType"]] = None,
error: Optional[str] = None,
using_sdk: Optional[bool] = None,
settings: Optional[Dict[str, Any]] = None,
**kwargs
):
super(WebJob, self).__init__(kind=kind, **kwargs)
self.name_properties_name = None
self.run_command = run_command
self.url = url
self.extra_info_url = extra_info_url
self.job_type = job_type
self.error = error
self.using_sdk = using_sdk
self.settings = settings
class WebJobCollection(msrest.serialization.Model):
"""Collection of Kudu web job information elements.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of resources.
:type value: list[~azure.mgmt.web.v2016_08_01.models.WebJob]
:ivar next_link: Link to next page of resources.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[WebJob]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["WebJob"],
**kwargs
):
super(WebJobCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
| mit | 6,072,001,635,998,820,000 | 38.26125 | 152 | 0.622387 | false |
bottydim/detect-credit-card-fraud | ccfd_dnn/model_weight.py | 1 | 20628 | import os
os.environ['CUDA_LAUNCH_BLOCKING'] = '1'
import pandas as pd
import matplotlib
import numpy as np
import math
import matplotlib.pyplot as plt
from sklearn.preprocessing import Imputer
from sklearn.cross_validation import train_test_split
from sklearn import preprocessing
import plotly.tools as tls
import pandas as pd
from sqlalchemy import create_engine # database connection
import datetime as dt
import io
import logging
import plotly.plotly as py # interactive graphing
from plotly.offline import download_plotlyjs, init_notebook_mode, plot, iplot
from plotly.graph_objs import Bar, Scatter, Marker, Layout
from heraspy.model import HeraModel
np.random.seed(1337)
import theano
import keras
from keras.preprocessing.sequence import pad_sequences
from keras.models import Model,model_from_yaml
from keras.layers import Input, Dense, GRU, LSTM, TimeDistributed, Masking,merge
from model import *
import argparse
import sys
if __name__ == "__main__":
t_start = dt.datetime.now()
parser = argparse.ArgumentParser(prog='Weighted Model')
parser.add_argument('-t','--table',required=True)
args = parser.parse_args()
####################################DATA SOURCE################################
table = vars(args)['table']
# table = 'data_trim'
# rsl_file = './data/gs_results_trim.csv'
# rsl_file = './data/psql_data_trim.csv'
# table = 'data_little_enc'
# rsl_file = './data/gs_results_little.csv'
# table = 'data_more'
# rsl_file = './data/gs_results_more.csv'
# table = 'auth'
# rsl_file = './data/auth.csv'
events_tbl = 'event'
events_tbl = None
rsl_file = './data/psql_{table}.csv'.format(table=table)
################################################################################
print "Commencing..."
data_dir = './data/'
evt_name = 'Featurespace_events_output.csv'
auth_name = 'Featurespace_auths_output.csv'
db_name = 'c1_agg.db'
address = "postgresql+pg8000://script@localhost:5432/ccfd"
# disk_engine = create_engine('sqlite:///'+data_dir+db_name,convert_unicode=True)
# disk_engine.raw_connection().connection.text_factory = str
disk_engine = create_engine(address)
#######################Settings#############################################
samples_per_epoch = trans_num_table(table,disk_engine,mode='train',trans_mode='train')
# epoch_limit = 10000
# samples_per_epoch = epoch_limit
# user_sample_size = 8000
epoch_limit = samples_per_epoch
user_sample_size = None
nb_epoch = 300
fraud_w_list = [1000.]
##########ENCODERS CONF
tbl_src = 'auth'
# tbl_src = table
tbl_evnt = 'event'
##################################
batch_size = 300
batch_size_val = 1000
print "SAMPLES per epoch:",samples_per_epoch
print "User sample size:",user_sample_size
print 'sequence length size',batch_size
# samples_per_epoch = 1959
# table = 'data_trim'
# samples_per_epoch = 485
lbl_pad_val = 2
pad_val = 0
# dropout_W_list = [0.3]
dropout_W_list = [0.4,0.5,0.6,0.7]
# dropout_W_list = [0.15,0.3,0.4,0.8]
input_dim = 44
hid_dims = [320]
num_l = [7]
lr_s = [2.5e-4]
# lr_s = [1.25e-4,6e-5]
# lr_s = [1e-2,1e-3,1e-4]
# lr_s = [1e-1,1e-2,1e-3]
num_opt = 1
opts = lambda x,lr:[keras.optimizers.RMSprop(lr=lr, rho=0.9, epsilon=1e-08),
# keras.optimizers.Adam(lr=lr, beta_1=0.9, beta_2=0.999, epsilon=1e-08),
# keras.optimizers.Nadam(lr=lr, beta_1=0.9, beta_2=0.999, epsilon=1e-08, schedule_decay=0.004)
][x]
# add_info = str(int(seq_len_param))+'_class_w_'+str(fraud_w)
print 'Populating encoders'
path_encoders ='./data/encoders/{tbl_src}+{tbl_evnt}'.format(tbl_src=tbl_src,tbl_evnt=tbl_evnt)
if os.path.exists(path_encoders):
encoders = load_encoders(path_encoders)
else:
encoders = populate_encoders_scale(tbl_src,disk_engine,tbl_evnt)
with open(path_encoders, 'wb') as output:
pickle.dump(encoders, output, pickle.HIGHEST_PROTOCOL)
print 'ENCODERS SAVED to {path}!'.format(path=path_encoders)
# sys.exit()
gru_dict = {}
lstm_dict = {}
for fraud_w in fraud_w_list:
add_info = 'Mask=pad_class_w_'+str(fraud_w)+'ES-OFF'
class_weight = {0 : 1.,
1: fraud_w,
2: 0.}
for dropout_W in dropout_W_list:
for hidden_dim in hid_dims:
# gru
for opt_id in range(num_opt):
for lr in lr_s:
optimizer = opts(opt_id,lr)
for num_layers in num_l:
for rnn in ['gru']:
short_title = 'bi_'+rnn.upper()+'_'+str(hidden_dim)+'_'+str(num_layers)+'_DO-'+str(dropout_W)+'_w'+str(class_weight[1])
title = 'Bidirectional_Class'+str(class_weight[1])+'_'+rnn.upper()+'_'+str(hidden_dim)+'_'+str(num_layers)+'_'+str(type(optimizer).__name__)+'_'+str(lr)+'_epochs_'+str(nb_epoch)+'_DO-'+str(dropout_W)
print title
input_layer = Input(shape=(int(seq_len_param), input_dim),name='main_input')
mask = Masking(mask_value=pad_val)(input_layer)
x = mask
for i in range(num_layers):
if rnn == 'gru':
prev_frw = GRU(hidden_dim,#input_length=50,
return_sequences=True,go_backwards=False,stateful=False,
unroll=False,consume_less='gpu',
init='glorot_uniform', inner_init='orthogonal', activation='tanh',
inner_activation='hard_sigmoid', W_regularizer=None, U_regularizer=None,
b_regularizer=None, dropout_W=dropout_W, dropout_U=0.0)(x)
prev_bck = GRU(hidden_dim,#input_length=50,
return_sequences=True,go_backwards=True,stateful=False,
unroll=False,consume_less='gpu',
init='glorot_uniform', inner_init='orthogonal', activation='tanh',
inner_activation='hard_sigmoid', W_regularizer=None, U_regularizer=None,
b_regularizer=None, dropout_W=dropout_W, dropout_U=0.0)(x)
else:
prev_frw = LSTM(hidden_dim, return_sequences=True,go_backwards=False,stateful=False,
init='glorot_uniform', inner_init='orthogonal',
forget_bias_init='one', activation='tanh', inner_activation='hard_sigmoid',
W_regularizer=None, U_regularizer=None, b_regularizer=None, dropout_W=dropout_W, dropout_U=0.0)(x)
prev_bck = LSTM(hidden_dim, return_sequences=True,go_backwards=True,stateful=False,
init='glorot_uniform', inner_init='orthogonal',
forget_bias_init='one', activation='tanh', inner_activation='hard_sigmoid',
W_regularizer=None, U_regularizer=None, b_regularizer=None, dropout_W=dropout_W, dropout_U=0.0)(x)
x = merge([prev_frw, prev_bck], mode='concat')
output_layer = TimeDistributed(Dense(3,activation='softmax'))(x)
model = Model(input=[input_layer],output=[output_layer])
model.compile(optimizer=optimizer,
loss='sparse_categorical_crossentropy',
metrics=['accuracy'],
sample_weight_mode="temporal")
########save architecture ######
arch_dir = './data/models/archs/'+short_title+'.yml'
yaml_string = model.to_yaml()
with open(arch_dir, 'wb') as output:
pickle.dump(yaml_string, output, pickle.HIGHEST_PROTOCOL)
print 'model saved!'
##############
user_mode = 'train'
trans_mode = 'train'
data_gen = data_generator(user_mode,trans_mode,disk_engine,encoders,table=table,
batch_size=batch_size,usr_ratio=80,class_weight=class_weight,lbl_pad_val = lbl_pad_val, pad_val = pad_val,
sub_sample=user_sample_size,epoch_size=epoch_limit,events_tbl=events_tbl)
# sub_sample=user_sample_size,epoch_size=samples_per_epoch)
########validation data
print 'Generating Validation set!'
user_mode = 'test'
trans_mode = 'test'
val_gen = data_generator(user_mode,trans_mode,disk_engine,encoders,table=table,
batch_size=batch_size_val,usr_ratio=80,class_weight=class_weight,lbl_pad_val = lbl_pad_val, pad_val = pad_val,
sub_sample=None,epoch_size=None,events_tbl=events_tbl)
validation_data = next(val_gen)
print '################GENERATED#######################'
###############CALLBACKS
patience = 30
early_Stop = keras.callbacks.EarlyStopping(monitor='val_loss', patience=patience, verbose=0, mode='auto')
save_path = './data/models/'+table+'/'
var_name = '.{epoch:02d}-{val_loss:.5f}.hdf5'
checkpoint = keras.callbacks.ModelCheckpoint(save_path+short_title+var_name, monitor='val_loss', verbose=1, save_best_only=True, mode='auto')
root_url = 'http://localhost:9000'
remote_log = keras.callbacks.RemoteMonitor(root=root_url)
# callbacks = [early_Stop,checkpoint]
callbacks = [early_Stop,checkpoint,remote_log]
callbacks = []
history = model.fit_generator(data_gen, samples_per_epoch, nb_epoch, verbose=1, callbacks=callbacks,validation_data=validation_data, nb_val_samples=None, class_weight=None, max_q_size=10000)
py.sign_in('bottydim', 'o1kuyms9zv')
auc_list = []
print '#########################TRAIN STATS################'
user_mode = 'train'
trans_mode = 'train'
val_samples = trans_num_table(table,disk_engine,mode=user_mode,trans_mode=trans_mode)
print '# samples',val_samples
plt_filename = './figures/GS/'+table+'/'+'ROC_'+user_mode+'_'+trans_mode+'_'+title+'_'+add_info+".png"
data_gen = data_generator(user_mode,trans_mode,disk_engine,encoders,table=table,
batch_size=batch_size,usr_ratio=80,class_weight=None,lbl_pad_val = lbl_pad_val, pad_val = pad_val,events_tbl=events_tbl)
eval_list = eval_auc_generator(model, data_gen, val_samples, max_q_size=10000,plt_filename=plt_filename)
auc_val = eval_list[0]
clc_report = eval_list[1]
acc = eval_list[2]
print "AUC:",auc_val
print 'CLassification report'
print clc_report
print 'Accuracy'
print acc
auc_list.append(str(auc_val))
print '##################EVALUATION USERS#########################'
user_mode = 'test'
trans_mode = 'train'
val_samples = trans_num_table(table,disk_engine,mode=user_mode,trans_mode=trans_mode)
print '# samples',val_samples
plt_filename = './figures/GS/'+table+'/'+'ROC_'+user_mode+'_'+trans_mode+'_'+title+'_'+add_info+".png"
eval_gen = data_generator(user_mode,trans_mode,disk_engine,encoders,table=table,
batch_size=batch_size,usr_ratio=80,class_weight=None,lbl_pad_val = lbl_pad_val, pad_val = pad_val,events_tbl=events_tbl)
eval_list = eval_auc_generator(model, eval_gen, val_samples, max_q_size=10000,plt_filename=plt_filename)
auc_val = eval_list[0]
clc_report = eval_list[1]
acc = eval_list[2]
print "AUC:",auc_val
print 'CLassification report'
print clc_report
print 'Accuracy'
print acc
auc_list.append(str(auc_val))
print '#####################################################'
print '##################EVALUATION Transactions#########################'
user_mode = 'train'
trans_mode = 'test'
val_samples = trans_num_table(table,disk_engine,mode=user_mode,trans_mode=trans_mode)
print '# samples',val_samples
plt_filename = './figures/GS/'+table+'/'+'ROC_'+user_mode+'_'+trans_mode+'_'+title+'_'+add_info+".png"
eval_gen = data_generator(user_mode,trans_mode,disk_engine,encoders,table=table,
batch_size=batch_size,usr_ratio=80,class_weight=None,lbl_pad_val = lbl_pad_val, pad_val = pad_val,events_tbl=events_tbl)
eval_list = eval_auc_generator(model, eval_gen, val_samples, max_q_size=10000,plt_filename=plt_filename)
auc_val = eval_list[0]
clc_report = eval_list[1]
acc = eval_list[2]
print "AUC:",auc_val
print 'CLassification report'
print clc_report
print 'Accuracy'
print acc
auc_list.append(str(auc_val))
print '#####################################################'
print '##################EVALUATION Pure#########################'
user_mode = 'test'
trans_mode = 'test'
val_samples = trans_num_table(table,disk_engine,mode=user_mode,trans_mode=trans_mode)
print '# samples',val_samples
plt_filename = './figures/GS/'+table+'/'+'ROC_'+user_mode+'_'+trans_mode+'_'+title+'_'+add_info+".png"
eval_gen = data_generator(user_mode,trans_mode,disk_engine,encoders,table=table,
batch_size=batch_size,usr_ratio=80,class_weight=None,lbl_pad_val = lbl_pad_val, pad_val = pad_val,events_tbl=events_tbl)
eval_list = eval_auc_generator(model, eval_gen, val_samples, max_q_size=10000,plt_filename=plt_filename)
auc_val = eval_list[0]
clc_report = eval_list[1]
acc = eval_list[2]
print "AUC:",auc_val
print 'CLassification report'
print clc_report
print 'Accuracy'
print acc
auc_list.append(str(auc_val))
print '#####################################################'
with io.open(rsl_file, 'a', encoding='utf-8') as file:
auc_string = ','.join(auc_list)
title_csv = title.replace('_',',')+','+str(history.history['acc'][-1])+','+str(history.history['loss'][-1])+','+str(auc_val)+','+str(acc)+','+auc_string+'\n'
file.write(unicode(title_csv))
print 'logged @ {file}'.format(file=rsl_file)
trim_point = -15
fig = {
'data': [Scatter(
x=history.epoch[trim_point:],
y=history.history['loss'][trim_point:])],
'layout': {'title': title}
}
py.image.save_as(fig,filename='./results/figures/'+table+'/'+short_title+'_'+'LOSS'+'_'+add_info+".png")
trim_point = 0
fig = {
'data': [Scatter(
x=history.epoch[trim_point:],
y=history.history['loss'][trim_point:])],
'layout': {'title': title}
}
py.image.save_as(fig,filename='./results/figures/'+table+'/'+short_title+'_'+'LOSS'+'_'+'FULL'+".png")
# iplot(fig,filename='figures/'+title,image='png')
# title = title.replace('Loss','Acc')
fig = {
'data': [Scatter(
x=history.epoch[trim_point:],
y=history.history['acc'][trim_point:])],
'layout': {'title': title}
}
filename_val='./results/figures/'+table+'/'+short_title+'_'+'ACC'+'_'+add_info+".png"
py.image.save_as(fig,filename=filename_val)
print 'exported @',filename_val
fig = {
'data': [Scatter(
x=history.epoch[trim_point:],
y=history.history['val_loss'][trim_point:])],
'layout': {'title': title}
}
py.image.save_as(fig,filename='./results/figures/'+table+'/'+short_title+'_'+'VAL LOSS'+'_'+add_info+".png")
print 'time taken: {time}'.format(time=days_hours_minutes_seconds(dt.datetime.now()-t_start)) | mit | 5,387,433,479,277,463,000 | 54.905149 | 231 | 0.434458 | false |
GbalsaC/bitnamiP | venv/src/edx-submissions/submissions/tests/test_models.py | 1 | 5673 | """
Tests for submission models.
"""
from django.test import TestCase
from submissions.models import Submission, Score, ScoreSummary, StudentItem
class TestScoreSummary(TestCase):
"""
Test selection of options from a rubric.
"""
def test_latest(self):
item = StudentItem.objects.create(
student_id="score_test_student",
course_id="score_test_course",
item_id="i4x://mycourse/class_participation.section_attendance"
)
first_score = Score.objects.create(
student_item=item,
submission=None,
points_earned=8,
points_possible=10,
)
second_score = Score.objects.create(
student_item=item,
submission=None,
points_earned=5,
points_possible=10,
)
latest_score = ScoreSummary.objects.get(student_item=item).latest
self.assertEqual(second_score, latest_score)
def test_highest(self):
item = StudentItem.objects.create(
student_id="score_test_student",
course_id="score_test_course",
item_id="i4x://mycourse/special_presentation"
)
# Low score is higher than no score...
low_score = Score.objects.create(
student_item=item,
points_earned=0,
points_possible=0,
)
self.assertEqual(
low_score,
ScoreSummary.objects.get(student_item=item).highest
)
# Medium score should supplant low score
med_score = Score.objects.create(
student_item=item,
points_earned=8,
points_possible=10,
)
self.assertEqual(
med_score,
ScoreSummary.objects.get(student_item=item).highest
)
# Even though the points_earned is higher in the med_score, high_score
# should win because it's 4/4 as opposed to 8/10.
high_score = Score.objects.create(
student_item=item,
points_earned=4,
points_possible=4,
)
self.assertEqual(
high_score,
ScoreSummary.objects.get(student_item=item).highest
)
# Put another medium score to make sure it doesn't get set back down
med_score2 = Score.objects.create(
student_item=item,
points_earned=5,
points_possible=10,
)
self.assertEqual(
high_score,
ScoreSummary.objects.get(student_item=item).highest
)
self.assertEqual(
med_score2,
ScoreSummary.objects.get(student_item=item).latest
)
def test_reset_score_highest(self):
item = StudentItem.objects.create(
student_id="score_test_student",
course_id="score_test_course",
item_id="i4x://mycourse/special_presentation"
)
# Reset score with no score
Score.create_reset_score(item)
highest = ScoreSummary.objects.get(student_item=item).highest
self.assertEqual(highest.points_earned, 0)
self.assertEqual(highest.points_possible, 0)
# Non-reset score after a reset score
submission = Submission.objects.create(student_item=item, attempt_number=1)
Score.objects.create(
student_item=item,
submission=submission,
points_earned=2,
points_possible=3,
)
highest = ScoreSummary.objects.get(student_item=item).highest
self.assertEqual(highest.points_earned, 2)
self.assertEqual(highest.points_possible, 3)
# Reset score after a non-reset score
Score.create_reset_score(item)
highest = ScoreSummary.objects.get(student_item=item).highest
self.assertEqual(highest.points_earned, 0)
self.assertEqual(highest.points_possible, 0)
def test_highest_score_hidden(self):
item = StudentItem.objects.create(
student_id="score_test_student",
course_id="score_test_course",
item_id="i4x://mycourse/special_presentation"
)
# Score with points possible set to 0
# (by convention a "hidden" score)
submission = Submission.objects.create(student_item=item, attempt_number=1)
Score.objects.create(
student_item=item,
submission=submission,
points_earned=0,
points_possible=0,
)
highest = ScoreSummary.objects.get(student_item=item).highest
self.assertEqual(highest.points_earned, 0)
self.assertEqual(highest.points_possible, 0)
# Score with points
submission = Submission.objects.create(student_item=item, attempt_number=1)
Score.objects.create(
student_item=item,
submission=submission,
points_earned=1,
points_possible=2,
)
highest = ScoreSummary.objects.get(student_item=item).highest
self.assertEqual(highest.points_earned, 1)
self.assertEqual(highest.points_possible, 2)
# Another score with points possible set to 0
# The previous score should remain the highest score.
submission = Submission.objects.create(student_item=item, attempt_number=1)
Score.objects.create(
student_item=item,
submission=submission,
points_earned=0,
points_possible=0,
)
highest = ScoreSummary.objects.get(student_item=item).highest
self.assertEqual(highest.points_earned, 1)
self.assertEqual(highest.points_possible, 2)
| agpl-3.0 | -71,096,119,413,147,070 | 33.174699 | 83 | 0.600212 | false |
mgraffg/simplegp | examples/simplify.py | 1 | 2421 | from SimpleGP import GP
import numpy as np
seed = 0 # if len(sys.argv) == 1 else int(sys.argv[1])
x = np.linspace(0, 1, 100)
pol = np.array([0.2, -0.3, 0.2])
X = np.vstack((x**2, x, np.ones(x.shape[0])))
y = (X.T * pol).sum(axis=1)
gp = GP(popsize=10,
generations=100000,
verbose=True,
verbose_nind=1000,
min_length=1,
do_simplify=True,
func=["+", "-", "*", "/", 'abs', 'exp', 'sqrt',
'sin', 'cos', 'sigmoid', 'if', 'max', 'min',
'ln', 'sq'],
min_depth=0, fname_best='regression.npy',
seed=seed, nrandom=100, pxo=0.2, pgrow=0.5, walltime=None)
gp.create_random_constants()
x = x[:, np.newaxis]
gp.train(x, y)
gp.create_population()
nvar = gp._nop.shape[0]
ind = np.array([2, 3, 0, 0, nvar, nvar, 1, nvar, nvar,
0, 1, nvar, nvar, 2, nvar, nvar, 1, 3,
nvar, nvar, 3, nvar, nvar], dtype=np.int)
print gp.print_infix(ind)
ind2 = gp.simplify(ind)
print gp.print_infix(ind2, constants=gp._ind_generated_c)
ind = np.array([1, 0, 3, nvar, nvar, 1, nvar, nvar,
3, 2, nvar, nvar, 2, nvar, nvar], dtype=np.int)
print gp.print_infix(ind)
ind2 = gp.simplify(ind)
print gp.print_infix(ind2, constants=gp._ind_generated_c)
print ind2
ind = np.array([13, 5, 2, nvar, nvar], dtype=np.int)
print gp.print_infix(ind, constants=gp._ind_generated_c)
ind2 = gp.simplify(ind)
print gp.print_infix(ind2, constants=gp._ind_generated_c)
ind = np.array([5, 13, 2, nvar, nvar], dtype=np.int)
print gp.print_infix(ind, constants=gp._ind_generated_c)
ind2 = gp.simplify(ind)
print gp.print_infix(ind2, constants=gp._ind_generated_c)
ind = np.array([5, 13, 2, nvar, nvar], dtype=np.int)
print gp.print_infix(ind, constants=gp._ind_generated_c)
ind2 = gp.simplify(ind)
print gp.print_infix(ind2, constants=gp._ind_generated_c)
gp._p[0] = np.array([0, 2, nvar, nvar+2, nvar+1], dtype=np.int)
gp._p_constants[0] = np.array([0, 1.4])
print gp.print_infix(0)
gp.simplify(0)
print gp.print_infix(0) == "(X0 * 1.4)"
gp._p[0] = np.array([0, nvar+1, 2, nvar, nvar+2], dtype=np.int)
gp._p_constants[0] = np.array([0, 1.4])
print gp.print_infix(0)
gp.simplify(0)
print gp.print_infix(0) == "(X0 * 1.4)"
gp._p[0] = np.array([1, 0, 2, nvar, nvar+2, nvar+1,
2, nvar, nvar+2], dtype=np.int)
gp._p_constants[0] = np.array([0, 1.4])
print gp.print_infix(0)
gp.simplify(0)
print gp.print_infix(0)
| apache-2.0 | 7,469,544,312,858,264,000 | 31.716216 | 66 | 0.608013 | false |
qtproject/pyside-pyside | tests/QtWidgets/qpen_test.py | 1 | 2519 | #############################################################################
##
## Copyright (C) 2016 The Qt Company Ltd.
## Contact: https://www.qt.io/licensing/
##
## This file is part of the test suite of PySide2.
##
## $QT_BEGIN_LICENSE:GPL-EXCEPT$
## Commercial License Usage
## Licensees holding valid commercial Qt licenses may use this file in
## accordance with the commercial license agreement provided with the
## Software or, alternatively, in accordance with the terms contained in
## a written agreement between you and The Qt Company. For licensing terms
## and conditions see https://www.qt.io/terms-conditions. For further
## information use the contact form at https://www.qt.io/contact-us.
##
## GNU General Public License Usage
## Alternatively, this file may be used under the terms of the GNU
## General Public License version 3 as published by the Free Software
## Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT
## included in the packaging of this file. Please review the following
## information to ensure the GNU General Public License requirements will
## be met: https://www.gnu.org/licenses/gpl-3.0.html.
##
## $QT_END_LICENSE$
##
#############################################################################
import unittest
from helper import UsesQApplication
from PySide2.QtCore import Qt, QTimer
from PySide2.QtGui import QPen, QPainter
from PySide2.QtWidgets import QWidget
class Painting(QWidget):
def __init__(self):
QWidget.__init__(self)
self.penFromEnum = None
self.penFromInteger = None
def paintEvent(self, event):
painter = QPainter(self)
painter.setPen(Qt.NoPen)
self.penFromEnum = painter.pen()
painter.setPen(int(Qt.NoPen))
self.penFromInteger = painter.pen()
class QPenTest(UsesQApplication):
def testCtorWithCreatedEnums(self):
'''A simple case of QPen creation using created enums.'''
width = 0
style = Qt.PenStyle(0)
cap = Qt.PenCapStyle(0)
join = Qt.PenJoinStyle(0)
pen = QPen(Qt.blue, width, style, cap, join)
def testSetPenWithPenStyleEnum(self):
'''Calls QPainter.setPen with both enum and integer. Bug #511.'''
w = Painting()
w.show()
QTimer.singleShot(1000, self.app.quit)
self.app.exec_()
self.assertEqual(w.penFromEnum.style(), Qt.NoPen)
self.assertEqual(w.penFromInteger.style(), Qt.SolidLine)
if __name__ == '__main__':
unittest.main()
| lgpl-2.1 | -7,631,307,534,288,177,000 | 33.986111 | 77 | 0.649861 | false |
karcio/checkSumValidatorGUI | checkSumVal/src/checkSumGui.py | 1 | 2950 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'checkSumGui.ui'
#
# Created: Thu Jan 8 02:22:42 2015
# by: PyQt5 UI code generator 5.4
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName("Form")
Form.resize(390, 210)
self.label = QtWidgets.QLabel(Form)
self.label.setGeometry(QtCore.QRect(20, 20, 76, 15))
self.label.setObjectName("label")
self.label_2 = QtWidgets.QLabel(Form)
self.label_2.setGeometry(QtCore.QRect(20, 70, 76, 15))
self.label_2.setObjectName("label_2")
self.label_3 = QtWidgets.QLabel(Form)
self.label_3.setGeometry(QtCore.QRect(20, 120, 36, 15))
self.label_3.setObjectName("label_3")
self.pushButton = QtWidgets.QPushButton(Form)
self.pushButton.setGeometry(QtCore.QRect(280, 160, 92, 27))
self.pushButton.setObjectName("pushButton")
self.lineEdit = QtWidgets.QLineEdit(Form)
self.lineEdit.setGeometry(QtCore.QRect(120, 20, 250, 25))
self.lineEdit.setMaxLength(32)
self.lineEdit.setObjectName("lineEdit")
self.lineEdit_2 = QtWidgets.QLineEdit(Form)
self.lineEdit_2.setGeometry(QtCore.QRect(120, 70, 250, 25))
self.lineEdit_2.setMaxLength(32)
self.lineEdit_2.setObjectName("lineEdit_2")
self.label_4 = QtWidgets.QLabel(Form)
self.label_4.setGeometry(QtCore.QRect(120, 120, 251, 16))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
font.setStyleStrategy(QtGui.QFont.PreferAntialias)
self.label_4.setFont(font)
self.label_4.setText("")
self.label_4.setObjectName("label_4")
self.pushButton_2 = QtWidgets.QPushButton(Form)
self.pushButton_2.setGeometry(QtCore.QRect(170, 160, 92, 27))
self.pushButton_2.setObjectName("pushButton_2")
self.retranslateUi(Form)
self.pushButton_2.clicked.connect(Form.close)
QtCore.QMetaObject.connectSlotsByName(Form)
self.pushButton.clicked.connect(self.validation_b)
def validation_b(self):
text1 = self.lineEdit.text()
text2 = self.lineEdit_2.text()
if text1 == text2:
result = "True - identical"
else:
result = "False - NOT identical"
self.label_4.setText(repr(result))
def retranslateUi(self, Form):
_translate = QtCore.QCoreApplication.translate
Form.setWindowTitle(_translate("Form", "Check Sum validator v 0.2"))
self.label.setText(_translate("Form", "insert string"))
self.label_2.setText(_translate("Form", "insert string"))
self.label_3.setText(_translate("Form", "result"))
self.pushButton.setText(_translate("Form", "&validate"))
self.pushButton_2.setText(_translate("Form", "&exit"))
| gpl-3.0 | -6,674,606,078,571,181,000 | 36.820513 | 76 | 0.647119 | false |
csvtools/csvtools | src/tests/test_pointsizes.py | 1 | 1209 | # Allow direct execution
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import unittest
from lib.pointsizes import Pointsizes
class SheetTestCase(unittest.TestCase):
def setUp(self):
pass
def test(self):
self.assertEquals(Pointsizes.min(), 1)
self.assertEquals(Pointsizes.max(), 23)
self.assertEquals(Pointsizes.normal(), 12)
self.assertEquals(Pointsizes.percentage(12), 100)
self.assertEquals(Pointsizes.toFontSize(12), 12)
self.assertEquals(Pointsizes.toFontSize(50), 72)
self.assertEquals(Pointsizes.toFontSize(-2), 1)
self.assertEquals(Pointsizes.toFontSize(16), 20)
self.assertEquals(Pointsizes.zoom(11, +2), 13)
self.assertEquals(Pointsizes.zoom(11, +20), 23)
self.assertEquals(Pointsizes.zoom(11, -2), 9)
self.assertEquals(Pointsizes.zoom(11, -20), 1)
self.assertEquals(Pointsizes.toPointSize(11), 11)
self.assertEquals(Pointsizes.toPointSize(100), 23)
self.assertEquals(Pointsizes.toPointSize(0), 1)
self.assertEquals(Pointsizes.toPointSize(14), 13)
if __name__ == '__main__':
unittest.main()
| gpl-3.0 | 8,673,098,653,878,948,000 | 34.558824 | 79 | 0.679074 | false |
mkobos/tree_crawler | concurrent_tree_crawler/multithreaded_crawler.py | 1 | 5649 | import os
import logging
import time
import datetime
from concurrent_tree_crawler.common.file_helper import lenient_makedir
from concurrent_tree_crawler.common.logger import Logger
from concurrent_tree_crawler.common.activity_schedule import AlwaysActiveSchedule
from concurrent_tree_crawler.crawlers_manager import CrawlersManager
from concurrent_tree_crawler.rw_lock_tree_accessor import RWLockTreeAccessor
from concurrent_tree_crawler.navigator_tree_wrapper import NavigatorTreeWrapper
from concurrent_tree_crawler.tree_saver_thread import TreeSaverThread
from concurrent_tree_crawler.abstract_node import NodeState
from concurrent_tree_crawler.xml_tree_serialization import XMLTreeReader
class MultithreadedCrawler:
"""
Runs several threads to crawl the tree.
It is also responsible for all the ancillary stuff:
makes sure that the state of the tree is saved to disk,
sets up the logging level etc.
"""
def __init__(self, navigators, sentinel, activity_schedule=None,
log_file_path=None, state_file_path=None, save_period=None,
logging_level=logging.ERROR):
"""
@param navigators: list of navigators to be used by the crawler.
Each navigator will be run in a separate thread, thus the
number of the threads is equal to the number of navigators.
@type navigators: list of L{AbstractTreeNavigator}s
@param sentinel: a technical node which will be made parent of the
root node.
@type sentinel: L{AbstractNode}
@param activity_schedule: if C{None}, no schedule is used and the
program works until it finishes crawling.
@type activity_schedule: L{AbstractActivitySchedule}
@param log_file_path: path to the log file. If C{None}, no log file
will be used.
@param state_file_path: path to the file where the state of the
program will be saved. If C{None}, the state will not be saved.
@param save_period: time between saving the tree state. If
C{state_file_path} is C{None}, this value is ignored.
@param logging_level: one of the logging level constants from C{logging}
"""
if log_file_path is not None:
lenient_makedir(os.path.dirname(log_file_path))
if state_file_path is not None:
if os.path.exists(state_file_path):
print "State file already exists. Loading the tree from this "\
"file and changing nodes with state PROCESSING to OPEN ... ",
self.__load_state_file(state_file_path, sentinel)
print "Done."
else:
lenient_makedir(os.path.dirname(state_file_path))
self.__tree = RWLockTreeAccessor(sentinel)
self.__navigators = navigators
self.__manager = None
self.__state_file_path = state_file_path
self.__save_period = save_period
self.__activity_schedule = activity_schedule
if activity_schedule is None:
self.__activity_schedule = AlwaysActiveSchedule()
self.__logging_level = logging_level
self.__log_file_path = log_file_path
def run(self):
"""
@return: sentinel node
@rtype: L{AbstractNode}
"""
self.__manager = self._create_crawlers_manager(
self.__tree, self.__navigators)
if self.__log_file_path is not None:
Logger.start(file_path=self.__log_file_path,
logging_level=self.__logging_level)
while True:
activity_time = self.__sleep_until_activity_period()
saver_thread = None
if self.__state_file_path is not None:
saver_thread = self.__start_tree_saver_thread()
self.__manager.start()
threads_finished = \
self.__manager.wait_until_finish(timeout=activity_time)
if self.__state_file_path is not None:
saver_thread.stop_activity()
saver_thread.join()
if threads_finished:
break
if self.__log_file_path is not None:
Logger.stop()
return self.__tree.get_sentinel()
def _create_crawlers_manager(self, tree, navigators):
navigator_wrappers = []
for navigator in navigators:
navigator_wrapper = NavigatorTreeWrapper(navigator, tree)
navigator_wrappers.append(navigator_wrapper)
return CrawlersManager(tree, navigator_wrappers)
def __start_tree_saver_thread(self):
t = TreeSaverThread(
self.__state_file_path, self.__tree, self.__save_period)
t.daemon = True
t.start()
return t
def __sleep_until_activity_period(self):
"""
Sleep (stop program execution) until there's a time to wake up.
@return: activity time, i.e. time until the start of the next
sleep period, C{None} if such time point cannot be determined
(as in case when the activity time will not stop in future).
@rtype: number of seconds
"""
while True:
now = datetime.datetime.now()
info = self.__activity_schedule.get_activity_info(now)
if info.future_mode_change is None:
if info.is_in_activity_period:
return None
else:
raise Exception("Going to sleep forever?")
mode_change_time = (info.future_mode_change - now).total_seconds()
if not info.is_in_activity_period:
logging.info("Going to sleep for {:.1f} seconds "
"(according to schedule)".format(
mode_change_time))
time.sleep(mode_change_time)
logging.info("Awaken")
else:
logging.info("Starting activity for {:.1f} seconds "
"(according to schedule)".format(
mode_change_time))
return mode_change_time
@staticmethod
def __load_state_file(file_path, sentinel):
with open(file_path) as f:
reader = XMLTreeReader(f)
reader.read(sentinel)
MultithreadedCrawler.__change_state_from_PROCESSING_to_OPEN(
sentinel.get_child("root"))
@staticmethod
def __change_state_from_PROCESSING_to_OPEN(node):
if node.get_state() == NodeState.PROCESSING:
node.set_state(NodeState.OPEN)
for child in node.get_children():
MultithreadedCrawler.__change_state_from_PROCESSING_to_OPEN(child)
| mit | -2,767,789,249,155,980,300 | 36.164474 | 81 | 0.725084 | false |
elli0ttB/problems | sorting/quicksort.py | 1 | 1524 | #!/usr/bin/env python
def quicksort(arr, partition):
if (partition == "hoare"):
quicksort_hoare(arr, 0, len(arr) -1)
elif (partition == "lomuto"):
quicksort_lomuto(arr, 0, len(arr) -1)
else:
raise ValueError()
def quicksort_hoare(arr, lo, hi):
# lo and hi follow standard method of being inclusive on the bottom, exclusive on the top.
"""Run a quicksort_hoare given a partition scheme"""
if lo < hi:
p = hoare(arr, lo, hi)
quicksort_hoare(arr, lo, p)
quicksort_hoare(arr, p+1, hi)
def quicksort_lomuto(arr, lo, hi):
# lo and hi follow standard method of being inclusive on the bottom, exclusive on the top.
"""Run a quicksort_lomuto given a partition scheme"""
if lo < hi:
p = lomuto(arr, lo, hi)
quicksort_lomuto(arr, lo, p-1)
quicksort_lomuto(arr, p+1, hi)
def lomuto(arr, lo, hi):
pivot = arr[hi]
i = lo - 1
for j in range(lo, hi + 1):
if arr[j] <= pivot:
i += 1
arr[i], arr[j] = arr[j], arr[i]
return i # we know that arr[i] = p
def hoare(arr, lo, hi):
pivot = arr[lo]
i = lo - 1
j = hi + 1
while True:
i, j = i+1, j-1
while arr[j] > pivot:
j -= 1
while arr[i] < pivot:
i += 1
if i < j:
arr[i], arr[j] = arr[j], arr[i]
else:
return j
def main():
import sort_test
sort_test.test(lom)
sort_test.test(hor)
if __name__ == "__main__":
main()
| mit | -8,108,625,509,353,319,000 | 25.736842 | 94 | 0.532152 | false |
Turgon37/OpenVPN_UAM | OpenVPNUAM/pki/pki_filetree.py | 1 | 6143 | # -*- coding: utf8 -*-
# This file is a part of OpenVPN-UAM
#
# Copyright (c) 2015 Thomas PAJON, Pierre GINDRAUD
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""PKI - Public Key Infrastructure File Tree program class
This class is responsive of management of all SSL files
"""
# System imports
import logging
import os
import OpenSSL
from OpenSSL import crypto
from OpenSSL.crypto import (_lib as lib, _ffi as ffi)
# Project imports
from ..config import Error
# Global project declarations
g_sys_log = logging.getLogger('openvpn-uam.pki.file')
class PKIFileTree(object):
"""Build an instance of the pki model class
This instance must be called in the openvpn uam program class
"""
def __init__(self, confparser):
"""Constructor : Build a new PKI API instance
"""
self.__cp = confparser
# the root path of file tree
self.__new_cert_directory = "certificates/"
# the cipher to use for private key encryption
self.__cipher = "DES3"
def load(self):
"""Return a boolean indicates if PKI is ready to work or not
This function check things required by PKI working and return a boolean
that indicates if the PKI is ready to work with certificate or not
@return [bool] The ready status
"""
# check PKI section in configuration file
if not self.__cp.has_section(self.__cp.PKI_SECTION):
g_sys_log.error('Missing pki section in configuration file')
return False
sec = self.__cp.getItems(self.__cp.PKI_SECTION)
# read the new cert directory path from config file
self.__new_cert_directory = self.__cp.get(
self.__cp.PKI_SECTION,
'cert_directory',
fallback=self.__new_cert_directory).rstrip('/') + '/'
self.__cipher = self.__cp.get(
self.__cp.PKI_SECTION,
'cert_key_cipher',
fallback=self.__cipher)
# BAD USAGE but no other solution
if lib.EVP_get_cipherbyname(self.__cipher.encode()) == ffi.NULL:
g_sys_log.fatal("Invalid cipher name")
return False
if not self.makePath(self.__new_cert_directory):
g_sys_log.fatal("Certificate directory is invalid")
return False
return True
# Tools
def makePath(self, path):
"""Ensure that the given path is builded on the file system
@param path [str] the path to check for
@return [bool] True if the entire path is existing on the FS
False if an error happen
"""
p = ""
for folder in path.split('/'):
if len(folder) == 0:
continue
p += folder + '/'
if not os.path.exists(p):
# create it
g_sys_log.info("Creating directory '%s'", p)
try:
os.mkdir(p)
except OSError as e:
g_sys_log.error("File '%s' already exist", p)
return False
# if cert path already exist
else:
# check if it is a valid directory
if not os.path.isdir(p):
g_sys_log.error("File '%s' is not a directory", p)
return False
return True
# API
def storeBytesToFile(self, content, path):
"""Write a list of bytes into a file
@param content [bytes/str] the content to write into the file
@param path [str] the path to the file into
"""
f = None
if os.path.exists(path):
g_sys_log.error("Error during export of file '%s'.", path)
return
if isinstance(content, bytes):
# open output file in binary mode
f = open(path, "wb")
elif isinstance(content, str):
# open output file in text mode
f = open(path, "wt")
assert f is not None
f.write(content)
f.close()
def storePKIUserCertificate(self, user, hostname, certificate, obj,
password=None):
"""Store a given PKI object into a file
@param user [User] the user to which the certificate is associated
@param hostname [Hostname] the hostname to which the certificate is
associated
@param certificate [Certificate] the Certificate instance associated with
the file
@param obj [X509/PKey] The object that will be dump to the file
@param password [str] OPTIONNAL : an optionnal passphrase to use for encrypt
the output (if available)
"""
path = (self.__new_cert_directory + str(user.id) + "/" + str(hostname.id) +
"/")
self.makePath(path)
bytes_ = None
if isinstance(obj, OpenSSL.crypto.X509):
bytes_ = crypto.dump_certificate(crypto.FILETYPE_PEM, obj)
path += str(certificate.id) + ".crt"
if isinstance(obj, OpenSSL.crypto.X509Req):
bytes_ = crypto.dump_certificate_request(crypto.FILETYPE_PEM, obj)
path += str(certificate.id) + ".csr"
elif isinstance(obj, OpenSSL.crypto.PKey):
if isinstance(password, str):
bytes_ = crypto.dump_privatekey(crypto.FILETYPE_PEM, obj,
self.__cipher, password.encode())
else:
bytes_ = crypto.dump_privatekey(crypto.FILETYPE_PEM, obj)
path += str(certificate.id) + ".key"
assert bytes_ is not None
self.storeBytesToFile(bytes_, path)
| gpl-3.0 | -2,542,147,906,521,464,000 | 33.318436 | 80 | 0.654892 | false |
RNAcentral/rnacentral-import-pipeline | rnacentral_pipeline/databases/pirbase/fetch.py | 1 | 1316 | # -*- coding: utf-8 -*-
"""
Copyright [2009-2020] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import urllib
from pathlib import Path
import typing as ty
from furl import furl
import requests
from bs4 import BeautifulSoup
def base_url(url: furl) -> furl:
base = furl(url)
base.path.segments = base.path.segments[:-1]
return base
def extract_urls(base: furl, document: str) -> ty.List[furl]:
soup = BeautifulSoup(document)
urls = []
links = soup.find("table").find_all("a")
for link in links:
href = link.get("href")
if href.endswith("json.gz"):
urls.append(base / href)
return urls
def find_urls(url: furl):
response = requests.get(url.url)
response.raise_for_status()
return extract_urls(base_url(url), response.text)
| apache-2.0 | -4,344,957,713,363,397,600 | 28.244444 | 72 | 0.713526 | false |
acutesoftware/worldbuild | scripts/minecraft/go_minecraft.py | 1 | 2260 | # go_minecraft.py
import sys
import time
import aikif.toolbox.interface_windows_tools as mod_tool
players = ['DynamiteBuilder', 'craftandstore']
#server = '1.9'
#server = '1.10'
server = '1.11.2'
seed = 0
if server == '1.11.2':
seed = -7560993781265470572
locations = [
{'name':'home', 'loc':'61 64 239'},
{'name':'Woodlands Mansion', 'loc':'4473 66 5773'},
{'name':'Stronghold', 'loc':'-184 67 1736'},
{'name':'Village', 'loc':'-710 87 548'},
]
elif server == '1.10':
seed = 8239770600742919613
locations = [
{'name':'home', 'loc':'248 66 -61'},
{'name':'farm', 'loc':'960 77 -260' },
{'name':'floating-garden', 'loc':'685 107 -588' },
{'name':'floating-castle', 'loc':'-202 105 -655' },
{'name':'stronghold', 'loc':'415 72 -2198' },
{'name':'village', 'loc':'121 77 -2019' },
{'name':'overhang-lookout/evil storm and zoo / garage', 'loc':'-449 110 -1830' },
{'name':'rock-island / harbour', 'loc':'154 98 384' },
{'name':'enchanted-village','loc':'1082 87 -1297' },
{'name':'flower-garden','loc':'1254 105 -1807' },
]
else:
seed = 2677023417700615710
locations = [
{'name':'v1-home', 'loc':'151 103 736'},
{'name':'v1-treehouse', 'loc':'120 72 662' },
{'name':'v1-castle', 'loc':'-132 68 388' },
{'name':'v1-village', 'loc':'-298 82 946' },
{'name':'v1-stables', 'loc':'-602 82 951' },
{'name':'v1-desert', 'loc':'-1524 97 1580' },
]
print('Minecraft Teleport Service for players ' + str(players))
print('(server version=' + server + ', seed = ' + str(seed) + ' )')
for num, l in enumerate(locations):
print(str(num+1) + ' = ' + l['name'])
loc = locations[int(input('Enter Location ')) - 1]
mod_tool.app_activate('Minecraft server')
for p in players:
print('Teleporting ' + p + ' to ' + loc['name'] + ' (' + loc['loc'] + ')')
mod_tool.send_keys('/tp ' + p + ' ' + loc['loc'])
mod_tool.send_keys("{ENTER}") # needs Enter key
time.sleep(0.1)
| gpl-2.0 | -5,863,558,261,222,107,000 | 31.285714 | 89 | 0.494248 | false |
keenondrums/sovrin-node | sovrin_client/agent/walleted.py | 1 | 42903 | import asyncio
import collections
import inspect
import json
import time
from datetime import datetime
from typing import Dict, List, Union
from base58 import b58decode
from common.serializers.serialization import serialize_msg_for_signing
from stp_core.common.log import getlogger
from plenum.common.signer_did import DidSigner
from plenum.common.constants import TYPE, DATA, NONCE, IDENTIFIER, NAME, VERSION, \
TARGET_NYM, ATTRIBUTES, VERKEY, VERIFIABLE_ATTRIBUTES, PREDICATES
from plenum.common.types import f
from plenum.common.util import getTimeBasedId, getCryptonym, \
isMaxCheckTimeExpired, convertTimeBasedReqIdToMillis, friendlyToRaw
from plenum.common.verifier import DidVerifier
from anoncreds.protocol.issuer import Issuer
from anoncreds.protocol.prover import Prover
from anoncreds.protocol.verifier import Verifier
from anoncreds.protocol.globals import TYPE_CL
from anoncreds.protocol.types import AttribDef, ID, ProofRequest, AvailableClaim
from plenum.common.exceptions import NotConnectedToAny
from sovrin_client.agent.agent_issuer import AgentIssuer
from sovrin_client.agent.backend import BackendSystem
from sovrin_client.agent.agent_prover import AgentProver
from sovrin_client.agent.agent_verifier import AgentVerifier
from sovrin_client.agent.constants import ALREADY_ACCEPTED_FIELD, CLAIMS_LIST_FIELD, \
REQ_MSG, PING, ERROR, EVENT, EVENT_NAME, EVENT_NOTIFY_MSG, \
EVENT_POST_ACCEPT_INVITE, PONG, EVENT_NOT_CONNECTED_TO_ANY_ENV
from sovrin_client.agent.exception import NonceNotFound, SignatureRejected
from sovrin_client.agent.helper import friendlyVerkeyToPubkey, rawVerkeyToPubkey
from sovrin_client.agent.msg_constants import ACCEPT_INVITE, CLAIM_REQUEST, \
PROOF, AVAIL_CLAIM_LIST, CLAIM, PROOF_STATUS, NEW_AVAILABLE_CLAIMS, \
REF_REQUEST_ID, REQ_AVAIL_CLAIMS, INVITE_ACCEPTED, PROOF_REQUEST
from sovrin_client.client.wallet.attribute import Attribute, LedgerStore
from sovrin_client.client.wallet.connection import Connection, constant
from sovrin_client.client.wallet.wallet import Wallet
from sovrin_common.exceptions import ConnectionNotFound, ConnectionAlreadyExists, \
NotConnectedToNetwork, LinkNotReady, VerkeyNotFound, RemoteEndpointNotFound
from sovrin_common.identity import Identity
from sovrin_common.constants import ENDPOINT
from sovrin_common.util import ensureReqCompleted
from sovrin_common.config import agentLoggingLevel
from sovrin_common.exceptions import InvalidConnectionException
from plenum.common.constants import PUBKEY
from sovrin_common.util import getNonceForProof
logger = getlogger()
logger.setLevel(agentLoggingLevel)
class Walleted(AgentIssuer, AgentProver, AgentVerifier):
"""
An agent with a self-contained wallet.
Normally, other logic acts upon a remote agent. That other logic holds keys
and signs messages and transactions that the Agent then forwards. In this
case, the agent holds a wallet.
"""
def __init__(self,
issuer: Issuer = None,
prover: Prover = None,
verifier: Verifier = None):
AgentIssuer.__init__(self, issuer)
AgentProver.__init__(self, prover)
AgentVerifier.__init__(self, verifier)
# TODO Why are we syncing the client here?
if self.client:
self.syncClient()
self.rcvdMsgStore = {} # type: Dict[reqId, [reqMsg]]
self.msgHandlers = {
ERROR: self._handleError,
EVENT: self._eventHandler,
PING: self._handlePing,
ACCEPT_INVITE: self._handleAcceptance,
REQ_AVAIL_CLAIMS: self.processReqAvailClaims,
CLAIM_REQUEST: self.processReqClaim,
CLAIM: self.handleReqClaimResponse,
PROOF: self.verifyProof,
PROOF_STATUS: self.handleProofStatusResponse,
PROOF_REQUEST: self.handleProofRequest,
PONG: self._handlePong,
INVITE_ACCEPTED: self._handleAcceptInviteResponse,
AVAIL_CLAIM_LIST: self._handleAvailableClaimsResponse,
NEW_AVAILABLE_CLAIMS: self._handleNewAvailableClaimsDataResponse
}
self.logger = logger
self.issuer_backend = None
self._invites = {} # type: Dict[Nonce, Tuple(InternalId, str)]
self._attribDefs = {} # type: Dict[str, AttribDef]
self.defined_claims = [] # type: List[Dict[str, Any]
# dict for proof request schema Dict[str, Dict[str, any]]
self._proofRequestsSchema = {}
def syncClient(self):
obs = self._wallet.handleIncomingReply
if not self.client.hasObserver(obs):
self.client.registerObserver(obs)
self._wallet.pendSyncRequests()
prepared = self._wallet.preparePending()
self.client.submitReqs(*prepared)
@property
def wallet(self) -> Wallet:
return self._wallet
@wallet.setter
def wallet(self, wallet):
self._wallet = wallet
@property
def lockedMsgs(self):
# Msgs for which signature verification is required
return ACCEPT_INVITE, CLAIM_REQUEST, PROOF, \
CLAIM, AVAIL_CLAIM_LIST, EVENT, PONG, REQ_AVAIL_CLAIMS
async def postProofVerif(self, claimName, link, frm):
raise NotImplementedError
def is_claim_available(self, link, claim_name):
return any(
ac[NAME] == claim_name for ac in self._get_available_claim_list_by_internal_id(
link.internalId))
async def _postProofVerif(self, claimName, link, frm):
link.verifiedClaimProofs.append(claimName)
await self.postProofVerif(claimName, link, frm)
async def _set_available_claim_by_internal_id(self, internal_id, schema_id):
sd = await self.schema_dict_from_id(schema_id)
try:
if not any(
d == sd for d in self.issuer.wallet.availableClaimsByInternalId[internal_id]):
self.issuer.wallet.availableClaimsByInternalId[internal_id].append(
sd)
except KeyError:
self.issuer.wallet.availableClaimsByInternalId[internal_id] = [sd]
def _get_available_claim_list_by_internal_id(self, internal_id):
return self.issuer.wallet.availableClaimsByInternalId.get(
internal_id, set())
def get_available_claim_list(self, link):
li = self.wallet.getConnectionBy(remote=link.remoteIdentifier)
# TODO: Need to return set instead of list, but if we return set,
# stack communication fails as set is not json serializable,
# need to work on that.
if li is None:
return list()
return list(
self._get_available_claim_list_by_internal_id(li.internalId))
def getErrorResponse(self, reqBody, errorMsg="Error"):
invalidSigResp = {
TYPE: ERROR,
DATA: errorMsg,
REQ_MSG: reqBody,
}
return invalidSigResp
def logAndSendErrorResp(self, to, reqBody, respMsg, logMsg):
logger.warning(logMsg)
self.signAndSend(msg=self.getErrorResponse(reqBody, respMsg),
signingIdr=self.wallet.defaultId, name=to)
# TODO: Verification needs to be moved out of it,
# use `verifySignature` instead
def verifyAndGetLink(self, msg):
body, (frm, ha) = msg
nonce = body.get(NONCE)
try:
kwargs = dict(nonce=nonce, remoteIdr=body.get(
f.IDENTIFIER.nm), remoteHa=ha)
if ha is None:
# Incase of ZStack,
kwargs.update(remotePubkey=frm)
return self.linkFromNonce(**kwargs)
except NonceNotFound:
self.logAndSendErrorResp(frm, body,
"Nonce not found",
"Nonce not found for msg: {}".format(msg))
return None
def linkFromNonce(self, nonce, remoteIdr, remoteHa=None,
remotePubkey=None):
internalId = self.get_internal_id_by_nonce(nonce)
linkName = self.get_link_name_by_internal_id(internalId)
link = self.wallet.getConnectionBy(internalId=internalId)
if not link:
# QUESTION: We use wallet.defaultId as the local identifier,
# this looks ok for test code, but not production code
link = Connection(linkName,
self.wallet.defaultId,
self.wallet.getVerkey(),
request_nonce=nonce,
remoteIdentifier=remoteIdr,
remoteEndPoint=remoteHa,
internalId=internalId,
remotePubkey=remotePubkey)
self.wallet.addConnection(link)
else:
link.remoteIdentifier = remoteIdr
link.remoteEndPoint = remoteHa
return link
def get_internal_id_by_nonce(self, nonce):
if nonce in self._invites:
return self._invites[nonce][0]
else:
raise NonceNotFound
def get_link_name_by_internal_id(self, internalId):
for invite in self._invites.values():
if invite[0] == internalId:
return invite[1]
def set_issuer_backend(self, backend: BackendSystem):
self.issuer_backend = backend
async def publish_issuer_keys(self, schema_id, p_prime, q_prime):
keys = await self.issuer.genKeys(schema_id,
p_prime=p_prime,
q_prime=q_prime)
await self.add_to_available_claims(schema_id)
return keys
async def schema_dict_from_id(self, schema_id):
schema = await self.issuer.wallet.getSchema(schema_id)
return self.schema_dict(schema)
async def publish_revocation_registry(self, schema_id, rev_reg_id='110', size=5):
return await self.issuer.issueAccumulator(schemaId=schema_id,
iA=rev_reg_id,
L=size)
def schema_dict(self, schema):
return {
NAME: schema.name,
VERSION: schema.version,
"schemaSeqNo": schema.seqId
}
async def add_to_available_claims(self, schema_id):
schema = await self.issuer.wallet.getSchema(schema_id)
self.defined_claims.append(self.schema_dict(schema))
async def publish_schema(self,
attrib_def_name,
schema_name,
schema_version):
attribDef = self._attribDefs[attrib_def_name]
schema = await self.issuer.genSchema(schema_name,
schema_version,
attribDef.attribNames())
schema_id = ID(schemaKey=schema.getKey(), schemaId=schema.seqId)
return schema_id
def add_attribute_definition(self, attr_def: AttribDef):
self._attribDefs[attr_def.name] = attr_def
async def get_claim(self, schema_id: ID):
return await self.prover.wallet.getClaimAttributes(schema_id)
def new_identifier(self, seed=None):
idr, _ = self.wallet.addIdentifier(seed=seed)
verkey = self.wallet.getVerkey(idr)
return idr, verkey
def get_link_by_name(self, name):
return self.wallet.getConnection(str(name))
def signAndSendToLink(self, msg, linkName, origReqId=None):
link = self.wallet.getConnection(linkName, required=True)
if not link.localIdentifier:
raise LinkNotReady('connection is not yet established, '
'send/accept request first')
ha = link.getRemoteEndpoint(required=False)
name = link.name
if not ha:
# if not remote address is present, then it's upcominh link, so we may have no
# explicit connection (wrk in a listener mode).
# PulicKey is used as a name in this case
name = link.remotePubkey
if ha:
self.connectTo(link=link)
return self.signAndSend(msg=msg, signingIdr=link.localIdentifier,
name=name, ha=ha, origReqId=origReqId)
def signAndSend(self, msg, signingIdr, name=None, ha=None, origReqId=None):
msg[f.REQ_ID.nm] = getTimeBasedId()
if origReqId:
msg[REF_REQUEST_ID] = origReqId
msg[IDENTIFIER] = signingIdr
signature = self.wallet.signMsg(msg, signingIdr)
msg[f.SIG.nm] = signature
self.sendMessage(msg, name=name, ha=ha)
return msg[f.REQ_ID.nm]
@staticmethod
def getCommonMsg(typ, data):
msg = {
TYPE: typ,
DATA: data
}
return msg
@classmethod
def createInviteAcceptedMsg(cls, claimLists, alreadyAccepted=False):
data = {
CLAIMS_LIST_FIELD: claimLists
}
if alreadyAccepted:
data[ALREADY_ACCEPTED_FIELD] = alreadyAccepted
return cls.getCommonMsg(INVITE_ACCEPTED, data)
@classmethod
def createNewAvailableClaimsMsg(cls, claimLists):
data = {
CLAIMS_LIST_FIELD: claimLists
}
return cls.getCommonMsg(NEW_AVAILABLE_CLAIMS, data)
@classmethod
def createClaimMsg(cls, claim):
return cls.getCommonMsg(CLAIM, claim)
def _eventHandler(self, msg):
body, _ = msg
eventName = body[EVENT_NAME]
data = body[DATA]
self.notifyEventListeners(eventName, **data)
def notifyEventListeners(self, eventName, **data):
for el in self._eventListeners.get(eventName, []):
el(notifier=self, **data)
def notifyMsgListener(self, msg):
self.notifyEventListeners(EVENT_NOTIFY_MSG, msg=msg)
def isSignatureVerifRespRequired(self, typ):
return typ in self.lockedMsgs and typ not in [EVENT, PING, PONG]
def sendSigVerifResponseMsg(self, respMsg, to, reqMsgTyp, identifier):
if self.isSignatureVerifRespRequired(reqMsgTyp):
self.notifyToRemoteCaller(EVENT_NOTIFY_MSG,
respMsg, identifier, to)
def handleEndpointMessage(self, msg):
body, frm = msg
logger.debug("Message received (from -> {}): {}".format(frm, body))
if isinstance(frm, bytes):
frm = frm.decode()
for reqFieldName in (TYPE, f.REQ_ID.nm):
reqFieldValue = body.get(reqFieldName)
if not reqFieldValue:
errorMsg = "{} not specified in message: {}".format(
reqFieldName, body)
self.notifyToRemoteCaller(EVENT_NOTIFY_MSG,
errorMsg, self.wallet.defaultId, frm)
logger.warning("{}".format(errorMsg))
return
typ = body.get(TYPE)
link = self.wallet.getConnectionBy(remote=body.get(f.IDENTIFIER.nm))
# If accept invite is coming the first time, then use the default
# identifier of the wallet since link wont be created
if typ == ACCEPT_INVITE and link is None:
localIdr = self.wallet.defaultId
else:
# if accept invite is not the message type
# and we are still missing link, then return the error
if link is None:
linkNotCreated = ' Error processing {}. ' \
'Connection is not yet created.'.format(typ)
self.notifyToRemoteCaller(EVENT_NOTIFY_MSG,
linkNotCreated,
self.wallet.defaultId,
frm)
return
localIdr = link.localIdentifier
if typ in self.lockedMsgs:
try:
self.verifySignature(body)
except SignatureRejected:
self.sendSigVerifResponseMsg("\nSignature rejected.",
frm, typ, localIdr)
return
reqId = body.get(f.REQ_ID.nm)
oldResps = self.rcvdMsgStore.get(reqId)
if oldResps:
oldResps.append(msg)
else:
self.rcvdMsgStore[reqId] = [msg]
# TODO: Question: Should we sending an acknowledgement for every message?
# We are sending, ACKs for "signature accepted" messages too
self.sendSigVerifResponseMsg("\nSignature accepted.",
frm, typ, localIdr)
handler = self.msgHandlers.get(typ)
if handler:
# TODO we should verify signature here
frmHa = self.endpoint.getHa(frm)
# `frmHa` can be None
res = handler((body, (frm, frmHa)))
if inspect.isawaitable(res):
self.loop.call_soon(asyncio.ensure_future, res)
else:
raise NotImplementedError("No type handle found for {} message".
format(typ))
def _handleError(self, msg):
body, _ = msg
self.notifyMsgListener("Error ({}) occurred while processing this "
"msg: {}".format(body[DATA], body[REQ_MSG]))
def _handlePing(self, msg):
body, (frm, ha) = msg
link = self.wallet.getConnectionBy(nonce=body.get(NONCE))
if link:
self.logger.info('Ping sent to %s', link.remoteIdentifier)
self.signAndSend({TYPE: 'pong'}, self.wallet.defaultId, frm,
origReqId=body.get(f.REQ_ID.nm))
def _handlePong(self, msg):
body, (frm, ha) = msg
identifier = body.get(IDENTIFIER)
if identifier:
li = self._getLinkByTarget(getCryptonym(identifier))
if li:
self.logger.info('Pong received from %s', li.remoteIdentifier)
self.notifyMsgListener(" Pong received.")
else:
self.notifyMsgListener(
" Pong received from unknown endpoint.")
else:
self.notifyMsgListener(' Identifier is not yet set.')
def _handleNewAvailableClaimsDataResponse(self, msg):
body, _ = msg
isVerified = self.verifySignature(body)
if isVerified:
identifier = body.get(IDENTIFIER)
li = self._getLinkByTarget(getCryptonym(identifier))
if li:
self.notifyResponseFromMsg(li.name, body.get(f.REQ_ID.nm))
rcvdAvailableClaims = body[DATA][CLAIMS_LIST_FIELD]
newAvailableClaims = self._getNewAvailableClaims(
li, rcvdAvailableClaims)
if newAvailableClaims:
li.availableClaims.extend(newAvailableClaims)
claimNames = ", ".join(
[n for n, _, _ in newAvailableClaims])
self.notifyMsgListener(
" Available Claim(s): {}\n".format(claimNames))
else:
self.notifyMsgListener("No matching connection found")
@staticmethod
def _getNewAvailableClaims(
li, rcvdAvailableClaims) -> List[AvailableClaim]:
receivedClaims = [AvailableClaim(cl[NAME],
cl[VERSION],
li.remoteIdentifier)
for cl in rcvdAvailableClaims]
existingAvailableClaims = set(li.availableClaims)
newReceivedClaims = set(receivedClaims)
return list(newReceivedClaims - existingAvailableClaims)
def _handleAvailableClaimsResponse(self, msg):
body, _ = msg
identifier = body.get(IDENTIFIER)
li = self._getLinkByTarget(getCryptonym(identifier))
if li:
rcvdAvailableClaims = body[DATA][CLAIMS_LIST_FIELD]
if len(rcvdAvailableClaims) > 0:
self.notifyMsgListener(" Available Claim(s): {}". format(
",".join([rc.get(NAME) for rc in rcvdAvailableClaims])))
else:
self.notifyMsgListener(" Available Claim(s): "
"No available claims found")
def _handleAcceptInviteResponse(self, msg):
body, _ = msg
identifier = body.get(IDENTIFIER)
li = self._getLinkByTarget(getCryptonym(identifier))
if li:
# TODO: Show seconds took to respond
self.notifyResponseFromMsg(li.name, body.get(f.REQ_ID.nm))
self.notifyMsgListener(" Trust established.")
alreadyAccepted = body[DATA].get(ALREADY_ACCEPTED_FIELD)
if alreadyAccepted:
self.notifyMsgListener(" Already accepted.")
else:
self.notifyMsgListener(" DID created in Sovrin.")
li.connection_status = constant.CONNECTION_STATUS_ACCEPTED
rcvdAvailableClaims = body[DATA][CLAIMS_LIST_FIELD]
newAvailableClaims = self._getNewAvailableClaims(
li, rcvdAvailableClaims)
if newAvailableClaims:
li.availableClaims.extend(newAvailableClaims)
self.notifyMsgListener(" Available Claim(s): {}". format(
",".join([rc.get(NAME) for rc in rcvdAvailableClaims])))
try:
self._checkIfLinkIdentifierWrittenToSovrin(
li, newAvailableClaims)
except NotConnectedToAny:
self.notifyEventListeners(
EVENT_NOT_CONNECTED_TO_ANY_ENV,
msg="Cannot check if identifier is written to Sovrin.")
else:
self.notifyMsgListener("No matching connection found")
def getVerkeyForLink(self, link):
# TODO: Get latest verkey for this link's remote identifier from Sovrin
if link.remoteVerkey:
return link.remoteVerkey
else:
raise VerkeyNotFound("verkey not set in connection")
def getLinkForMsg(self, msg):
nonce = msg.get(NONCE)
identifier = msg.get(f.IDENTIFIER.nm)
link = self.wallet.getConnectionBy(nonce=nonce, remote=identifier)
if link:
return link
else:
raise ConnectionNotFound
def verifySignature(self, msg: Dict[str, str]):
signature = msg.get(f.SIG.nm)
identifier = msg.get(IDENTIFIER)
msgWithoutSig = {k: v for k, v in msg.items() if k != f.SIG.nm}
# TODO This assumes the current key is the cryptonym. This is a BAD
# ASSUMPTION!!! Sovrin needs to provide the current key.
ser = serialize_msg_for_signing(msgWithoutSig)
signature = b58decode(signature.encode())
typ = msg.get(TYPE)
# TODO: Maybe keeping ACCEPT_INVITE open is a better option than keeping
# an if condition here?
if typ == ACCEPT_INVITE:
verkey = msg.get(VERKEY)
else:
try:
link = self.getLinkForMsg(msg)
verkey = self.getVerkeyForLink(link)
except (ConnectionNotFound, VerkeyNotFound):
# This is for verification of `NOTIFY` events
link = self.wallet.getConnectionBy(remote=identifier)
# TODO: If verkey is None, it should be fetched from Sovrin.
# Assuming CID for now.
verkey = link.remoteVerkey
v = DidVerifier(verkey, identifier=identifier)
if not v.verify(signature, ser):
raise SignatureRejected
else:
if typ == ACCEPT_INVITE:
self.logger.info('Signature accepted.')
return True
def _getLinkByTarget(self, target) -> Connection:
return self.wallet.getConnectionBy(remote=target)
def _checkIfLinkIdentifierWrittenToSovrin(
self, li: Connection, availableClaims):
req = self.getIdentity(li.localIdentifier)
self.notifyMsgListener("\nSynchronizing...")
def getNymReply(reply, err, availableClaims, li: Connection):
if reply.get(DATA) and json.loads(reply[DATA])[TARGET_NYM] == \
li.localIdentifier:
self.notifyMsgListener(
" Confirmed DID written to Sovrin.")
self.notifyEventListeners(
EVENT_POST_ACCEPT_INVITE, connection=li)
else:
self.notifyMsgListener(
" DID is not yet written to Sovrin")
self.loop.call_later(.2, ensureReqCompleted, self.loop, req.key,
self.client, getNymReply, (availableClaims, li))
def notifyResponseFromMsg(self, linkName, reqId=None):
if reqId:
# TODO: This logic assumes that the req id is time based
curTimeBasedId = getTimeBasedId()
timeTakenInMillis = convertTimeBasedReqIdToMillis(
curTimeBasedId - reqId)
if timeTakenInMillis >= 1000:
responseTime = ' ({} sec)'.format(
round(timeTakenInMillis / 1000, 2))
else:
responseTime = ' ({} ms)'.format(round(timeTakenInMillis, 2))
else:
responseTime = ''
self.notifyMsgListener("\nResponse from {}{}:".format(linkName,
responseTime))
def notifyToRemoteCaller(self, event, msg, signingIdr, to, origReqId=None):
resp = {
TYPE: EVENT,
EVENT_NAME: event,
DATA: {'msg': msg}
}
self.signAndSend(resp, signingIdr, to, origReqId=origReqId)
def _handleAcceptance(self, msg):
body, (frm, ha) = msg
link = self.verifyAndGetLink(msg)
# TODO this is really kludgy code... needs refactoring
# exception handling, separation of concerns, etc.
if not link:
return
logger.debug("proceeding with connection: {}".format(link.name))
identifier = body.get(f.IDENTIFIER.nm)
verkey = body.get(VERKEY)
idy = Identity(identifier, verkey=verkey)
link.remoteVerkey = verkey
try:
pendingCount = self.wallet.addTrustAnchoredIdentity(idy)
logger.debug("pending request count {}".format(pendingCount))
alreadyAdded = False
except Exception as e:
if e.args[0] in ['identifier already added']:
alreadyAdded = True
else:
logger.warning("Exception raised while adding nym, "
"error was: {}".format(e.args[0]))
raise e
def send_claims(reply=None, error=None):
return self.sendClaimList(link=link,
alreadyAdded=alreadyAdded,
sender=frm,
reqId=body.get(f.REQ_ID.nm),
reply=reply,
error=error)
if alreadyAdded:
send_claims()
logger.debug("already accepted, "
"so directly sending available claims")
self.logger.info('Already added identifier [{}] in sovrin'
.format(identifier))
# self.notifyToRemoteCaller(EVENT_NOTIFY_MSG,
# " Already accepted",
# link.verkey, frm)
else:
logger.debug(
"not added to the ledger, so add nym to the ledger "
"and then will send available claims")
reqs = self.wallet.preparePending()
# Assuming there was only one pending request
logger.debug("sending to sovrin {}".format(reqs[0]))
# Need to think through
# how to provide separate logging for each agent
# anyhow this class should be implemented by each agent
# so we might not even need to add it as a separate logic
self.logger.info('Creating identifier [{}] in sovrin'
.format(identifier))
self._sendToSovrinAndDo(reqs[0], clbk=send_claims)
# TODO: If I have the below exception thrown, somehow the
# error msg which is sent in verifyAndGetLink is not being received
# on the other end, so for now, commented, need to come back to this
# else:
# raise NotImplementedError
def sendClaimList(self, link, alreadyAdded, sender,
reqId, reply=None, error=None):
logger.debug("sending available claims to {}".format(
link.remoteIdentifier))
resp = self.createInviteAcceptedMsg(
self.get_available_claim_list(link),
alreadyAccepted=alreadyAdded)
self.signAndSend(resp, link.localIdentifier, sender,
origReqId=reqId)
def _sendToSovrinAndDo(self, req, clbk=None, *args, **kwargs):
self.client.submitReqs(req)
ensureReqCompleted(self.loop, req.key, self.client,
clbk, *args, **kwargs)
def newAvailableClaimsPostClaimVerif(self, claimName):
raise NotImplementedError
def sendNewAvailableClaimsData(self, nac, frm, link):
if len(nac) > 0:
resp = self.createNewAvailableClaimsMsg(nac)
self.signAndSend(resp, link.localIdentifier, frm)
def sendPing(self, linkName):
link = self.wallet.getConnection(linkName, required=True)
self.connectTo(link=link)
ha = link.getRemoteEndpoint(required=True)
params = dict(ha=ha)
msg = {
TYPE: 'ping',
NONCE: link.request_nonce,
f.REQ_ID.nm: getTimeBasedId(),
f.IDENTIFIER.nm: link.localIdentifier
}
reqId = self.sendMessage(msg, **params)
self.notifyMsgListener(" Ping sent.")
return reqId
def connectTo(self, linkName=None, link=None):
assert linkName or link
if link is None:
link = self.wallet.getConnection(linkName, required=True)
ha = link.getRemoteEndpoint(required=True)
verKeyRaw = friendlyToRaw(
link.full_remote_verkey) if link.full_remote_verkey else None
publicKeyRaw = friendlyToRaw(
link.remotePubkey) if link.remotePubkey else None
if verKeyRaw is None and publicKeyRaw is None:
raise InvalidConnectionException(
"verkey or publicKey is required for connection.")
if publicKeyRaw is None:
publicKeyRaw = rawVerkeyToPubkey(verKeyRaw)
self.endpoint.connectIfNotConnected(
name=link.name,
ha=ha,
verKeyRaw=verKeyRaw,
publicKeyRaw=publicKeyRaw)
# duplicate function
# def loadInvitationFile(self, filePath):
# with open(filePath) as data_file:
# request = json.load(
# data_file, object_pairs_hook=collections.OrderedDict)
# return self.load_request_dict(request)
def load_request_str(self, json_str):
request = json.loads(
json_str, object_pairs_hook=collections.OrderedDict)
return self.load_request_dict(request)
def load_request_dict(self, request_dict):
link_request = request_dict.get("connection-request")
if not link_request:
raise ConnectionNotFound
linkName = link_request["name"]
existingLinkInvites = self.wallet. \
getMatchingConnections(linkName)
if len(existingLinkInvites) >= 1:
return self._merge_request(request_dict)
Connection.validate(request_dict)
link = self.load_request(request_dict)
return link
def load_request(self, request_data):
link_request = request_data["connection-request"]
remoteIdentifier = link_request[f.IDENTIFIER.nm]
# TODO signature should be validated!
# signature = request_data["sig"]
link_request_name = link_request[NAME]
remoteEndPoint = link_request.get("endpoint", None)
remote_verkey = link_request.get("verkey", None)
linkNonce = link_request[NONCE]
proofRequestsJson = request_data.get("proof-requests", None)
proofRequests = []
if proofRequestsJson:
for cr in proofRequestsJson:
proofRequests.append(
ProofRequest(
cr[NAME],
cr[VERSION],
getNonceForProof(linkNonce),
cr[ATTRIBUTES],
cr[VERIFIABLE_ATTRIBUTES] if VERIFIABLE_ATTRIBUTES in cr else [],
cr[PREDICATES] if PREDICATES in cr else []))
self.notifyMsgListener("1 connection request found for {}.".
format(link_request_name))
self.notifyMsgListener("Creating connection for {}.".
format(link_request_name))
# TODO: Would we always have a trust anchor corresponding to a link?
li = Connection(name=link_request_name,
trustAnchor=link_request_name,
remoteIdentifier=remoteIdentifier,
remoteEndPoint=remoteEndPoint,
request_nonce=linkNonce,
proofRequests=proofRequests,
remote_verkey=remote_verkey)
self.wallet.addConnection(li)
return li
def load_request_file(self, filePath):
with open(filePath) as data_file:
request_data = json.load(
data_file, object_pairs_hook=collections.OrderedDict)
link_request = request_data.get("connection-request")
if not link_request:
raise ConnectionNotFound
linkName = link_request["name"]
existingLinkInvites = self.wallet. \
getMatchingConnections(linkName)
if len(existingLinkInvites) >= 1:
return self._merge_request(request_data)
Connection.validate(request_data)
link = self.load_request(request_data)
return link
def _merge_request(self, request_data):
link_request = request_data.get('connection-request')
linkName = link_request['name']
link = self.wallet.getConnection(linkName)
request_proof_requests = request_data.get('proof-requests',
None)
nonce = link_request.get(NONCE)
if request_proof_requests:
for icr in request_proof_requests:
# match is found if name and version are same
matchedProofRequest = next(
(cr for cr in link.proofRequests
if (cr.name == icr[NAME] and cr.version == icr[VERSION])),
None
)
# if link.requestedProofs contains any claim request
if matchedProofRequest:
# merge 'attributes' and 'verifiableAttributes'
matchedProofRequest.attributes = {
**matchedProofRequest.attributes,
**icr[ATTRIBUTES]
}
matchedProofRequest.verifiableAttributes = dict(
matchedProofRequest.verifiableAttributes, **icr[VERIFIABLE_ATTRIBUTES])
else:
# otherwise append proof request to link
link.proofRequests.append(
ProofRequest(
icr[NAME],
icr[VERSION],
getNonceForProof(nonce),
attributes=icr[ATTRIBUTES],
verifiableAttributes=icr[VERIFIABLE_ATTRIBUTES]))
return link
else:
raise ConnectionAlreadyExists
def accept_request(self, link: Union[str, Connection]):
if isinstance(link, str):
link = self.wallet.getConnection(link, required=True)
elif isinstance(link, Connection):
pass
else:
raise TypeError(
"Type of connection must be either string or Link but "
"provided {}".format(
type(link)))
# TODO should move to wallet in a method like accept(link)
if not link.localIdentifier:
self.create_identifier_for_link(link)
msg = {
TYPE: ACCEPT_INVITE,
# TODO should not send this... because origin should be the sender
NONCE: link.request_nonce,
VERKEY: self.wallet.getVerkey(link.localIdentifier)
}
logger.debug("{} accepting request from {} with id {}".
format(self.name, link.name, link.remoteIdentifier))
self.logger.info('Accepting request with nonce {} from id {}'
.format(link.request_nonce, link.remoteIdentifier))
self.signAndSendToLink(msg, link.name)
# def _handleSyncNymResp(self, link, additionalCallback):
# def _(reply, err):
# if err:
# raise RuntimeError(err)
# reqId = self._updateLinkWithLatestInfo(link, reply)
# if reqId:
# self.loop.call_later(.2,
# self.executeWhenResponseRcvd,
# time.time(), 8000,
# self.loop, reqId, PONG, True,
# additionalCallback, reply, err)
# else:
# additionalCallback(reply, err)
#
# return _
def create_identifier_for_link(self, link):
signer = DidSigner()
self.wallet.addIdentifier(signer=signer)
link.localIdentifier = signer.identifier
link.localVerkey = signer.verkey
def _handleSyncResp(self, link, additionalCallback):
def _(reply, err):
if err:
raise RuntimeError(err)
reqId = self._updateLinkWithLatestInfo(link, reply)
if reqId:
self.loop.call_later(.2,
self.executeWhenResponseRcvd,
time.time(), 8000,
self.loop, reqId, PONG, True,
additionalCallback, reply, err)
else:
if callable(additionalCallback):
additionalCallback(reply, err)
return _
def _updateLinkWithLatestInfo(self, link: Connection, reply):
if DATA in reply and reply[DATA]:
data = json.loads(reply[DATA])
verkey = data.get(VERKEY)
if verkey is not None:
link.remoteVerkey = data[VERKEY]
ep = data.get(ENDPOINT)
if isinstance(ep, dict):
# TODO: Validate its an IP port pair or a malicious entity
# can crash the code
if 'ha' in ep:
ip, port = ep['ha'].split(":")
link.remoteEndPoint = (ip, int(port))
if PUBKEY in ep:
link.remotePubkey = ep[PUBKEY]
else:
link.remotePubkey = friendlyVerkeyToPubkey(
link.full_remote_verkey) if link.full_remote_verkey else None
link.connection_last_synced = datetime.now()
self.notifyMsgListener(
" Connection {} synced".format(link.name))
def _pingToEndpoint(self, name, endpoint):
self.notifyMsgListener("\nPinging target endpoint: {}".
format(endpoint))
reqId = self.sendPing(linkName=name)
return reqId
def sync(self, linkName, doneCallback=None):
if not self.client.isReady():
raise NotConnectedToNetwork
link = self.wallet.getConnection(linkName, required=True)
identifier = link.remoteIdentifier
identity = Identity(identifier=identifier)
req = self.wallet.requestIdentity(identity,
sender=self.wallet.defaultId)
self.client.submitReqs(req)
self.loop.call_later(.2,
ensureReqCompleted,
self.loop,
req.key,
self.client,
self._handleSyncResp(link, None))
attrib = Attribute(name=ENDPOINT,
value=None,
dest=identifier,
ledgerStore=LedgerStore.RAW)
req = self.wallet.requestAttribute(
attrib, sender=self.wallet.defaultId)
self.client.submitReqs(req)
self.loop.call_later(.2,
ensureReqCompleted,
self.loop,
req.key,
self.client,
self._handleSyncResp(link, doneCallback))
def executeWhenResponseRcvd(self, startTime, maxCheckForMillis,
loop, reqId, respType,
checkIfLinkExists, clbk, *args):
if isMaxCheckTimeExpired(startTime, maxCheckForMillis):
clbk(
None, "No response received within specified time ({} mills). "
"Retry the command and see if that works.\n". format(maxCheckForMillis))
else:
found = False
rcvdResponses = self.rcvdMsgStore.get(reqId)
if rcvdResponses:
for msg in rcvdResponses:
body, frm = msg
if body.get(TYPE) == respType:
if checkIfLinkExists:
identifier = body.get(IDENTIFIER)
li = self._getLinkByTarget(
getCryptonym(identifier))
linkCheckOk = li is not None
else:
linkCheckOk = True
if linkCheckOk:
found = True
break
if found:
clbk(*args)
else:
loop.call_later(.2, self.executeWhenResponseRcvd,
startTime, maxCheckForMillis, loop,
reqId, respType, checkIfLinkExists, clbk, *args)
| apache-2.0 | -2,601,363,187,121,016,000 | 40.016252 | 98 | 0.572687 | false |
derks/cement | cement/core/arg.py | 1 | 3461 | """
Cement core argument module.
"""
from ..core import backend, exc, interface, handler
Log = backend.minimal_logger(__name__)
def argument_validator(klass, obj):
"""Validates a handler implementation against the IArgument interface."""
members = [
'_setup',
'parse',
'parsed_args',
'add_argument',
]
interface.validate(IArgument, obj, members)
class IArgument(interface.Interface):
"""
This class defines the Argument Handler Interface. Classes that
implement this handler must provide the methods and attributes defined
below. Implementations do *not* subclass from interfaces.
Example:
.. code-block:: python
from cement.core import interface, arg
class MyArgumentHandler(arg.CementArgumentHandler):
class Meta:
interface = arg.IArgument
label = 'my_argument_handler'
"""
class IMeta:
label = 'argument'
validator = argument_validator
# Must be provided by the implementation
Meta = interface.Attribute('Handler Meta-data')
parsed_args = interface.Attribute('Parsed args object')
def _setup(app_obj):
"""
The _setup function is called during application initialization and
must 'setup' the handler object making it ready for the framework
or the application to make further calls to it.
Required Arguments:
app_obj
The application object.
Return: None
"""
def add_argument(self, *args, **kw):
"""
Add arguments for parsing. This should be -o/--option or positional.
Positional Arguments:
args
List of option arguments. Generally something like
['-h', '--help'].
Optional Arguments
dest
The destination name (var). Default: arg[0]'s string.
help
The help text for --help output (for that argument).
action
Must support: ['store', 'store_true', 'store_false',
'store_const']
const
The value stored if action == 'store_const'.
default
The default value.
Return: None
"""
def parse(self, arg_list):
"""
Parse the argument list (i.e. sys.argv). Can return any object as
long as it's members contain those of the added arguments. For
example, if adding a '-v/--version' option that stores to the dest of
'version', then the member must be callable as 'Object().version'.
Must also set self.parsed_args to what is being returned.
Required Arguments:
arg_list
A list of command line arguments.
Return: Callable
"""
class CementArgumentHandler(handler.CementBaseHandler):
"""
Base class that all Argument Handlers should sub-class from.
"""
class Meta:
label = None
interface = IArgument
def __init__(self, *args, **kw):
super(CementArgumentHandler, self).__init__(*args, **kw)
| bsd-3-clause | -439,128,202,592,356,900 | 27.138211 | 79 | 0.540306 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.