ext
stringclasses 9
values | sha
stringlengths 40
40
| content
stringlengths 3
1.04M
|
---|---|---|
py | 1a307417545ff9bfda87edae6e82843be95b41e9 | # SECUREAUTH LABS. Copyright 2018 SecureAuth Corporation. All rights reserved.
#
# This software is provided under under a slightly modified version
# of the Apache Software License. See the accompanying LICENSE file
# for more information.
#
# Author: Alberto Solino (@agsolino)
#
# Description:
# [MS-VDS]: Virtual Disk Service (VDS) Protocol
# This was used as a way to test the DCOM runtime. Further
# testing is needed to verify it is working as expected
#
# Best way to learn how to use these calls is to grab the protocol standard
# so you understand what the call does, and then read the test case located
# at https://github.com/SecureAuthCorp/impacket/tree/master/tests/SMB_RPC
#
# Since DCOM is like an OO RPC, instead of helper functions you will see the
# classes described in the standards developed.
# There are test cases for them too.
#
from impacket.dcerpc.v5.ndr import NDRSTRUCT, NDRUniConformantVaryingArray, NDRENUM
from impacket.dcerpc.v5.dcomrt import DCOMCALL, DCOMANSWER, IRemUnknown2, PMInterfacePointer, INTERFACE
from impacket.dcerpc.v5.dtypes import LPWSTR, ULONG, DWORD, SHORT, GUID
from impacket.dcerpc.v5.rpcrt import DCERPCException
from impacket.dcerpc.v5.enum import Enum
from impacket import hresult_errors
from impacket.uuid import string_to_bin
class DCERPCSessionError(DCERPCException):
def __init__(self, error_string=None, error_code=None, packet=None):
DCERPCException.__init__(self, error_string, error_code, packet)
def __str__( self ):
if self.error_code in hresult_errors.ERROR_MESSAGES:
error_msg_short = hresult_errors.ERROR_MESSAGES[self.error_code][0]
error_msg_verbose = hresult_errors.ERROR_MESSAGES[self.error_code][1]
return 'VDS SessionError: code: 0x%x - %s - %s' % (self.error_code, error_msg_short, error_msg_verbose)
else:
return 'VDS SessionError: unknown error code: 0x%x' % (self.error_code)
################################################################################
# CONSTANTS
################################################################################
# 1.9 Standards Assignments
CLSID_VirtualDiskService = string_to_bin('7D1933CB-86F6-4A98-8628-01BE94C9A575')
IID_IEnumVdsObject = string_to_bin('118610B7-8D94-4030-B5B8-500889788E4E')
IID_IVdsAdviseSink = string_to_bin('8326CD1D-CF59-4936-B786-5EFC08798E25')
IID_IVdsAsync = string_to_bin('D5D23B6D-5A55-4492-9889-397A3C2D2DBC')
IID_IVdsServiceInitialization = string_to_bin('4AFC3636-DB01-4052-80C3-03BBCB8D3C69')
IID_IVdsService = string_to_bin('0818A8EF-9BA9-40D8-A6F9-E22833CC771E')
IID_IVdsSwProvider = string_to_bin('9AA58360-CE33-4F92-B658-ED24B14425B8')
IID_IVdsProvider = string_to_bin('10C5E575-7984-4E81-A56B-431F5F92AE42')
error_status_t = ULONG
# 2.2.1.1.3 VDS_OBJECT_ID
VDS_OBJECT_ID = GUID
################################################################################
# STRUCTURES
################################################################################
# 2.2.2.1.3.1 VDS_SERVICE_PROP
class VDS_SERVICE_PROP(NDRSTRUCT):
structure = (
('pwszVersion',LPWSTR),
('ulFlags',ULONG),
)
class OBJECT_ARRAY(NDRUniConformantVaryingArray):
item = PMInterfacePointer
# 2.2.2.7.1.1 VDS_PROVIDER_TYPE
class VDS_PROVIDER_TYPE(NDRENUM):
class enumItems(Enum):
VDS_PT_UNKNOWN = 0
VDS_PT_SOFTWARE = 1
VDS_PT_HARDWARE = 2
VDS_PT_VIRTUALDISK = 3
VDS_PT_MAX = 4
# 2.2.2.7.2.1 VDS_PROVIDER_PROP
class VDS_PROVIDER_PROP(NDRSTRUCT):
structure = (
('id',VDS_OBJECT_ID),
('pwszName',LPWSTR),
('guidVersionId',GUID),
('pwszVersion',LPWSTR),
('type',VDS_PROVIDER_TYPE),
('ulFlags',ULONG),
('ulStripeSizeFlags',ULONG),
('sRebuildPriority',SHORT),
)
################################################################################
# RPC CALLS
################################################################################
# 3.4.5.2.5.1 IVdsServiceInitialization::Initialize (Opnum 3)
class IVdsServiceInitialization_Initialize(DCOMCALL):
opnum = 3
structure = (
('pwszMachineName', LPWSTR),
)
class IVdsServiceInitialization_InitializeResponse(DCOMANSWER):
structure = (
('ErrorCode', error_status_t),
)
# 3.4.5.2.4.1 IVdsService::IsServiceReady (Opnum 3)
class IVdsService_IsServiceReady(DCOMCALL):
opnum = 3
structure = (
)
class IVdsService_IsServiceReadyResponse(DCOMANSWER):
structure = (
('ErrorCode', error_status_t),
)
# 3.4.5.2.4.2 IVdsService::WaitForServiceReady (Opnum 4)
class IVdsService_WaitForServiceReady(DCOMCALL):
opnum = 4
structure = (
)
class IVdsService_WaitForServiceReadyResponse(DCOMANSWER):
structure = (
('ErrorCode', error_status_t),
)
# 3.4.5.2.4.3 IVdsService::GetProperties (Opnum 5)
class IVdsService_GetProperties(DCOMCALL):
opnum = 5
structure = (
)
class IVdsService_GetPropertiesResponse(DCOMANSWER):
structure = (
('pServiceProp', VDS_SERVICE_PROP),
('ErrorCode', error_status_t),
)
# 3.4.5.2.4.4 IVdsService::QueryProviders (Opnum 6)
class IVdsService_QueryProviders(DCOMCALL):
opnum = 6
structure = (
('masks', DWORD),
)
class IVdsService_QueryProvidersResponse(DCOMANSWER):
structure = (
('ppEnum', PMInterfacePointer),
('ErrorCode', error_status_t),
)
# 3.1.1.1 IEnumVdsObject Interface
# 3.4.5.2.1.1 IEnumVdsObject::Next (Opnum 3)
class IEnumVdsObject_Next(DCOMCALL):
opnum = 3
structure = (
('celt', ULONG),
)
class IEnumVdsObject_NextResponse(DCOMANSWER):
structure = (
('ppObjectArray', OBJECT_ARRAY),
('pcFetched', ULONG),
('ErrorCode', error_status_t),
)
# 3.4.5.2.14.1 IVdsProvider::GetProperties (Opnum 3)
class IVdsProvider_GetProperties(DCOMCALL):
opnum = 3
structure = (
)
class IVdsProvider_GetPropertiesResponse(DCOMANSWER):
structure = (
('pProviderProp', VDS_PROVIDER_PROP),
('ErrorCode', error_status_t),
)
################################################################################
# OPNUMs and their corresponding structures
################################################################################
OPNUMS = {
}
################################################################################
# HELPER FUNCTIONS AND INTERFACES
################################################################################
class IEnumVdsObject(IRemUnknown2):
def Next(self, celt=0xffff):
request = IEnumVdsObject_Next()
request['ORPCthis'] = self.get_cinstance().get_ORPCthis()
request['ORPCthis']['flags'] = 0
request['celt'] = celt
try:
resp = self.request(request, uuid = self.get_iPid())
except Exception as e:
resp = e.get_packet()
# If it is S_FALSE(1) means less items were returned
if resp['ErrorCode'] != 1:
raise
interfaces = list()
for interface in resp['ppObjectArray']:
interfaces.append(IRemUnknown2(INTERFACE(self.get_cinstance(), ''.join(interface['abData']), self.get_ipidRemUnknown(), target = self.get_target())))
return interfaces
class IVdsProvider(IRemUnknown2):
def GetProperties(self):
request = IVdsProvider_GetProperties()
request['ORPCthis'] = self.get_cinstance().get_ORPCthis()
request['ORPCthis']['flags'] = 0
resp = self.request(request, uuid = self.get_iPid())
return resp
class IVdsServiceInitialization(IRemUnknown2):
def __init__(self, interface):
IRemUnknown2.__init__(self, interface)
def Initialize(self):
request = IVdsServiceInitialization_Initialize()
request['ORPCthis'] = self.get_cinstance().get_ORPCthis()
request['ORPCthis']['flags'] = 0
request['pwszMachineName'] = '\x00'
resp = self.request(request, uuid = self.get_iPid())
return resp
class IVdsService(IRemUnknown2):
def __init__(self, interface):
IRemUnknown2.__init__(self, interface)
def IsServiceReady(self):
request = IVdsService_IsServiceReady()
request['ORPCthis'] = self.get_cinstance().get_ORPCthis()
request['ORPCthis']['flags'] = 0
try:
resp = self.request(request, uuid = self.get_iPid())
except Exception as e:
resp = e.get_packet()
return resp
def WaitForServiceReady(self):
request = IVdsService_WaitForServiceReady()
request['ORPCthis'] = self.get_cinstance().get_ORPCthis()
request['ORPCthis']['flags'] = 0
resp = self.request(request, uuid = self.get_iPid())
return resp
def GetProperties(self):
request = IVdsService_GetProperties()
request['ORPCthis'] = self.get_cinstance().get_ORPCthis()
request['ORPCthis']['flags'] = 0
resp = self.request(request, uuid = self.get_iPid())
return resp
def QueryProviders(self, masks):
request = IVdsService_QueryProviders()
request['ORPCthis'] = self.get_cinstance().get_ORPCthis()
request['ORPCthis']['flags'] = 0
request['masks'] = masks
resp = self.request(request, uuid = self.get_iPid())
return IEnumVdsObject(INTERFACE(self.get_cinstance(), ''.join(resp['ppEnum']['abData']), self.get_ipidRemUnknown(), target = self.get_target()))
|
py | 1a3075bea55ac79dc5e8c5c5bceb0b7c00b64e21 | """
This module contains a dynamic programming algorithm for solving the subset sum
problem. See section 6.4 of Algorithm Design by Kleinberg and Tardos.
An instance of the subset sum problem is defined by a weight capacity and a
collection of items, where each item has a weight. A solution is any subset of
items for which the total weight does not exceed the capacity. An optimal
solution is a solution with maximum total weight.
"""
def compute_opt(c, ws):
"""
Computes the maximum total weight for subproblems of the given instance of
the subset sum problem with weight capacity c and item weights ws.
TODO document recurrence relation
"""
memo = [[0] * (c + 1)] * len(ws)
for i in range(len(ws)):
for j in range(1, c + 1):
w1 = ws[i] if i > 0 else 0
if j < ws[i]:
memo[i][j] = w1
else:
w2 = ws[i] + (memo[i - 1][j - ws[i]] if i > 0 else 0)
memo[i][j] = max(w1, w2)
return memo
def find_sol(c, ws, memo):
"""
Finds an optimal solution for the given instance of the subset sum problem
with weight capacity c and item weights ws, provided maximum total weights
for subproblems are memoized in memo.
"""
sol = []
for n in reversed(range(len(ws))):
if c >= ws[n] and memo[n][c] == ws[n] + memo[n - 1][c - ws[n]]:
sol.append(n)
c -= ws[n]
return sol
# Self-test
if __name__ == '__main__':
# Pretty print optimal value and solution
def pretty(c, ws):
memo = compute_opt(c, ws)
sol = find_sol(c, ws, memo)
print('optimal value : ' + str(memo[-1][c]))
print('optimal solution: ' + str(sol))
c = 11
ws = [1, 2, 5, 6, 7]
pretty(c, ws)
|
py | 1a30762cc1cd38ff4d7596bf0259444c5fbbf867 | import os
from unittest import TestCase
from configservice import Config, MissingEnviron, ErrorFlagTrue
class TestCore(TestCase):
def test__load_env(self):
# set an env to work with.
os.environ['TEST_ME_X'] = '1'
c = Config()
# Test simple recall.
res = c.get_env('TEST_ME_X')
self.assertEqual('1', res)
# Test default value
res = c.get_env('THIS_DOESNT_EXIST',
default_value='A')
self.assertEqual('A', res)
# Test default value where the key does exist (should take the key instead)
res = c.get_env('TEST_ME_X',
default_value='A')
self.assertEqual('1', res)
# Test test mode responses section.
######### TEST MODES ############
c._test_mode = True
# Test simple recall.
res = c.get_env('TEST_ME_X', test_response='test_res')
self.assertEqual('test_res', res)
# Test assigned value where no value assigned
res = c.get_env('TEST_ME_X',
default_value=24,
test_response='test_res')
self.assertEqual('1', res)
c._test_mode = False
######### End Test Mode Section ############
######## Check error states. ############
with self.assertRaises(MissingEnviron) as e:
res = c.get_env('THIS_DOESNT_EXIST', error_flag=True)
with self.assertRaises(ErrorFlagTrue) as e:
res = c.get_env('THIS_DOESNT_EXIST', error_flag=True, default_value='1')
###### Check data conversion ###########
# Test integer
os.environ['TEST_ME_X'] = '1'
res = c.get_env('TEST_ME_X', data_type_convert='int')
self.assertEqual(1, res)
# Test float
os.environ['TEST_ME_X'] = '1.11'
res = c.get_env('TEST_ME_X', data_type_convert='float')
self.assertEqual(1.11, res)
# Test Bool
os.environ['TEST_ME_X'] = '1'
res = c.get_env('TEST_ME_X', data_type_convert='bool')
self.assertTrue(res)
os.environ['TEST_ME_X'] = 'True'
res = c.get_env('TEST_ME_X', data_type_convert='bool')
self.assertTrue(res)
os.environ['TEST_ME_X'] = '0'
res = c.get_env('TEST_ME_X', data_type_convert='bool')
self.assertFalse(res)
os.environ['TEST_ME_X'] = 'false'
res = c.get_env('TEST_ME_X', data_type_convert='bool')
self.assertFalse(res)
# Test list
os.environ['TEST_ME_X'] = 'a,b,c,d'
res = c.get_env('TEST_ME_X', data_type_convert='list')
golden = ['a', 'b', 'c', 'd']
self.assertListEqual(golden, res)
# Test list int
os.environ['TEST_ME_X'] = '1,2,3,4,5'
res = c.get_env('TEST_ME_X', data_type_convert='list_int')
golden = [1, 2, 3, 4, 5]
self.assertListEqual(golden, res)
# Test list float
os.environ['TEST_ME_X'] = '1.2,2,3.6,4.6,5'
res = c.get_env('TEST_ME_X', data_type_convert='list_float')
golden = [1.2, 2, 3.6, 4.6, 5]
self.assertListEqual(golden, res)
# Test default value int
res = c.get_env('TEST_ME_NO', default_value='3', data_type_convert='int')
self.assertEqual(3, res)
# Test default value int
c._test_mode = True
res = c.get_env('TEST_ME_NO', test_response='2', default_value='3', data_type_convert='int')
self.assertEqual(3, res)
# Test default value int
c._test_mode = True
res = c.get_env('TEST_ME_NO', test_response='2', data_type_convert='int')
self.assertEqual(2, res)
|
py | 1a307667d3c1f6c19bc453dabbffc5b8d76d6142 | /usr/local/Cellar/python@2/2.7.15/Frameworks/Python.framework/Versions/2.7/lib/python2.7/abc.py |
py | 1a30773806d21ab849aa2cc786dcaddd70a0abbb | #Задача №10, Вариант 6
#Разработайте игру "Крестики-нолики". (см. М.Доусон Программируем на Python гл. 6)
#Данилов Д.А.
#23.05.2016
def display_instruct():
print("""
Добро пожаловать на ринг грандиознейших интеллектуальных состязаний всех времён.
Твой мозг и мой процессор сойдутся в схватке за доской игры "Крестики-нолики".
Чтобы сделать ход, введи число от 0 до 8. Числа однозначно соответствуют полям
доски - так, как показано ниже:
0 | 1 | 2
---------
3 | 4 | 5
---------
6 | 7 | 8
""")
X="X"
O="O"
EMPTY=" "
TIE="Ничья"
NUM_SQUARES=9
def ask_yes_no(question):
response=None
while response not in ("y","n"):
response=input(question).lower()
return response
def ask_number(question, low, high):
response=None
while response not in range(low, high):
response=int(input(question))
return response
def pieces():
go_first=ask_yes_no("Хочешь оставить за собой первый ход? (y/n): ")
if go_first=="y":
print("\nНу что ж, даю тебе фору: играй крестиками.")
human=X
computer=O
else:
print("\nТвоя удаль тебя погубит... Буду начинать я.")
computer=X
human=O
return computer, human
def new_board():
board=[]
for square in range(NUM_SQUARES):
board.append(EMPTY)
return board
def display_board(board):
print("\n\t", board[0], "|", board[1], "|", board[2])
print("\t", "---------")
print("\t", board[3], "|", board[4], "|", board[5])
print("\t", "---------")
print("\t", board[6], "|", board[7], "|", board[8])
def legal_moves(board):
moves = []
for square in range(NUM_SQUARES):
if board[square]==EMPTY:
moves.append(square)
return moves
def winner(board):
WAYS_TO_WIN=((0, 1, 2),
(3, 4, 5),
(6, 7, 8),
(0, 3, 6),
(1, 4, 7),
(2, 5, 8),
(0, 4, 8),
(2, 4, 6))
for row in WAYS_TO_WIN:
if board[row[0]]==board[row[1]]==board[row[2]]!=EMPTY:
winner=board[row[0]]
return winner
if EMPTY not in board:
return TIE
return None
def human_move(board, human):
legal=legal_moves(board)
move=None
while move not in legal:
move=ask_number("Твой ход. Выбери одно из полей (0-8):", 0, NUM_SQUARES)
if move not in legal:
print("\nСмешной человек! Это поле уже занято. Выбери другое.\n")
print("Ладно...")
return move
def computer_move(board, computer, human):
board=board[:]
BEST_MOVES=(4, 0, 2, 6, 8, 1, 3, 5, 7)
print("Я выберу поле номер", end=" ")
for move in legal_moves(board):
board[move]=computer
if winner(board)==computer:
print(move)
return move
board[move] = EMPTY
for move in legal_moves(board):
board[move]=human
if winner(board)==human:
print(move)
return move
board[move]=EMPTY
for move in BEST_MOVES:
if move in legal_moves(board):
print(move)
return move
def next_turn(turn):
if turn==X:
return O
else:
return X
def congrat_winner(the_winner, computer, human):
if the_winner !=TIE:
print("Три", the_winner, "в ряд!\n")
else:
print("Ничья!\n")
if the_winner==computer:
print("Kaк я и предсказывал. победа в очередной раз осталась за мной.\nВот еще один довод в пользу того. что компьютеры превосходят людей решительно во всем.")
elif the_winner==human:
print("О нет, этого не может быть! Неужели ты как-то сумел перехитрить меня, белковый?\nКлянусь: я, компьютер, не допущу этого больше никогда!")
elif the_winner==TIE:
print("Тебе несказанно повезло, дружок: ты сумел свести игру вничью.\nРадуйся же сегодняшнему успеху! Завтра уже не суждено его повторить.")
def main():
display_instruct()
computer, human=pieces()
turn=X
board=new_board()
display_board(board)
while not winner(board):
if turn==human:
move=human_move(board, human)
board[move]=human
else:
move=computer_move(board, computer, human)
board[move]=computer
display_board(board)
turn=next_turn(turn)
the_winner=winner(board)
congrat_winner(the_winner, computer, human)
main()
input("\n\nНажмите Enter, чтобы выйти.")
|
py | 1a3078457d0007ca2e3e43860e318c145ba6d7e8 | #!/usr/bin/python3
# -*- coding: utf-8 -*-
str = """ACS3004 湖南新永利交通科工贸有限公司
ACS3005 三一帕尔菲格特种车装备有限公司
ACS3006 湖南新永利交通科工贸有限公司"""
print(str)
items = str.split(sep='\n')
for i, e in enumerate(items, 1):
print(i, '. ', e.split(sep=' ')[0])
for i in range(1):
print(i)
|
py | 1a3079582b7e2a6b5ef23166e20427d75cb6aa50 | """
Contains abstract functionality for learning locally linear sparse model.
"""
import numpy as np
import scipy as sp
from sklearn.linear_model import Ridge, lars_path
from sklearn.utils import check_random_state
class LimeBase(object):
"""Class for learning a locally linear sparse model from perturbed data"""
def __init__(self,
kernel_fn,
verbose=False,
random_state=None):
"""Init function
Args:
kernel_fn: function that transforms an array of distances into an
array of proximity values (floats).
verbose: if true, print local prediction values from linear model.
random_state: an integer or numpy.RandomState that will be used to
generate random numbers. If None, the random state will be
initialized using the internal numpy seed.
"""
self.kernel_fn = kernel_fn
self.verbose = verbose
self.random_state = check_random_state(random_state)
@staticmethod
def generate_lars_path(weighted_data, weighted_labels):
"""Generates the lars path for weighted data.
Args:
weighted_data: data that has been weighted by kernel
weighted_label: labels, weighted by kernel
Returns:
(alphas, coefs), both are arrays corresponding to the
regularization parameter and coefficients, respectively
"""
x_vector = weighted_data
alphas, _, coefs = lars_path(x_vector,
weighted_labels,
method='lasso',
verbose=False)
return alphas, coefs
def forward_selection(self, data, labels, weights, num_features):
"""Iteratively adds features to the model"""
clf = Ridge(alpha=0, fit_intercept=True, random_state=self.random_state)
used_features = []
for _ in range(min(num_features, data.shape[1])):
max_ = -100000000
best = 0
for feature in range(data.shape[1]):
if feature in used_features:
continue
clf.fit(data[:, used_features + [feature]], labels,
sample_weight=weights)
score = clf.score(data[:, used_features + [feature]],
labels,
sample_weight=weights)
if score > max_:
best = feature
max_ = score
used_features.append(best)
return np.array(used_features)
def feature_selection(self, data, labels, weights, num_features, method):
"""Selects features for the model. see explain_instance_with_data to
understand the parameters."""
if method == 'none':
return np.array(range(data.shape[1]))
elif method == 'forward_selection':
return self.forward_selection(data, labels, weights, num_features)
elif method == 'highest_weights':
clf = Ridge(alpha=0.01, fit_intercept=True,
random_state=self.random_state)
clf.fit(data, labels, sample_weight=weights)
coef = clf.coef_
if sp.sparse.issparse(data):
coef = sp.sparse.csr_matrix(clf.coef_)
weighted_data = coef.multiply(data[0])
# Note: most efficient to slice the data before reversing
sdata = len(weighted_data.data)
argsort_data = np.abs(weighted_data.data).argsort()
# Edge case where data is more sparse than requested number of feature importances
# In that case, we just pad with zero-valued features
if sdata < num_features:
nnz_indexes = argsort_data[::-1]
indices = weighted_data.indices[nnz_indexes]
num_to_pad = num_features - sdata
indices = np.concatenate((indices, np.zeros(num_to_pad, dtype=indices.dtype)))
indices_set = set(indices)
pad_counter = 0
for i in range(data.shape[1]):
if i not in indices_set:
indices[pad_counter + sdata] = i
pad_counter += 1
if pad_counter >= num_to_pad:
break
else:
nnz_indexes = argsort_data[sdata - num_features:sdata][::-1]
indices = weighted_data.indices[nnz_indexes]
return indices
else:
weighted_data = coef * data[0]
feature_weights = sorted(
zip(range(data.shape[1]), weighted_data),
key=lambda x: np.abs(x[1]),
reverse=True)
return np.array([x[0] for x in feature_weights[:num_features]])
elif method == 'lasso_path':
weighted_data = ((data - np.average(data, axis=0, weights=weights))
* np.sqrt(weights[:, np.newaxis]))
weighted_labels = ((labels - np.average(labels, weights=weights))
* np.sqrt(weights))
nonzero = range(weighted_data.shape[1])
_, coefs = self.generate_lars_path(weighted_data,
weighted_labels)
for i in range(len(coefs.T) - 1, 0, -1):
nonzero = coefs.T[i].nonzero()[0]
if len(nonzero) <= num_features:
break
used_features = nonzero
return used_features
elif method == 'auto':
if num_features <= 6:
n_method = 'forward_selection'
else:
n_method = 'highest_weights'
return self.feature_selection(data, labels, weights,
num_features, n_method)
def explain_instance_with_data(self,
neighborhood_data,
neighborhood_labels,
distances,
label,
num_features,
feature_selection='auto',
model_regressor=None):
"""Takes perturbed data, labels and distances, returns explanation.
Args:
neighborhood_data: perturbed data, 2d array. first element is
assumed to be the original data point.
neighborhood_labels: corresponding perturbed labels. should have as
many columns as the number of possible labels.
distances: distances to original data point.
label: label for which we want an explanation
num_features: maximum number of features in explanation
feature_selection: how to select num_features. options are:
'forward_selection': iteratively add features to the model.
This is costly when num_features is high
'highest_weights': selects the features that have the highest
product of absolute weight * original data point when
learning with all the features
'lasso_path': chooses features based on the lasso
regularization path
'none': uses all features, ignores num_features
'auto': uses forward_selection if num_features <= 6, and
'highest_weights' otherwise.
model_regressor: sklearn regressor to use in explanation.
Defaults to Ridge regression if None. Must have
model_regressor.coef_ and 'sample_weight' as a parameter
to model_regressor.fit()
Returns:
(intercept, exp, score, local_pred):
intercept is a float.
exp is a sorted list of tuples, where each tuple (x,y) corresponds
to the feature id (x) and the local weight (y). The list is sorted
by decreasing absolute value of y.
score is the R^2 value of the returned explanation
local_pred is the prediction of the explanation model on the original instance
"""
weights = self.kernel_fn(distances)
labels_column = neighborhood_labels[:, label]
used_features = self.feature_selection(neighborhood_data,
labels_column,
weights,
num_features,
feature_selection)
if model_regressor is None:
model_regressor = Ridge(alpha=1, fit_intercept=True,
random_state=self.random_state)
easy_model = model_regressor
easy_model.fit(neighborhood_data[:, used_features],
labels_column, sample_weight=weights)
prediction_score = easy_model.score(
neighborhood_data[:, used_features],
labels_column, sample_weight=weights)
local_pred = easy_model.predict(neighborhood_data[0, used_features].reshape(1, -1))
if self.verbose:
print('Intercept', easy_model.intercept_)
print('Prediction_local', local_pred,)
print('Right:', neighborhood_labels[0, label])
return (easy_model.intercept_,
sorted(zip(used_features, easy_model.coef_),
key=lambda x: np.abs(x[1]), reverse=True),
prediction_score, local_pred), easy_model
|
py | 1a3079fba5ca8725f3811e6010cb2a9c5fb0a486 | # Attempts to verify the solutions of discrete mathematics CW1
import random
def listUpTo(num):
"""
Returns a lists of integers from 1 up to num
"""
return list(range(1, num + 1))
def countMultiples(dividendList, divisor):
"""
Returns the total number of multiples of the divisor in dividendList
"""
multNum = 0
for dividend in dividendList:
if dividend % divisor == 0:
multNum += 1
return multNum
def solveQ1(myList, divisor, selectAmount, n):
"""
Let X denote the number of successful trails in a given n trails.
Selects a 'selectAmount' random elements from 'myList', checks whether it
is a multiple of 'divisor', performs this for 'n' trails, then returns a
probability point of X from it's binomial distribution.
"""
X = 0
for _ in range(n):
random.shuffle(myList)
for i, selected in enumerate(myList, start=1):
if i == selectAmount:
break
else:
if selected % divisor == 0:
X += 1
p = X / (len(myList) * n * selectAmount)
print(p)
if __name__ == "__main__":
list40 = listUpTo(40)
# print(list40)
# print(countMultiples(list40, 4))
# print()
solveQ1(list40, 4, 2, 10000)
|
py | 1a307b5b2c019db09d5717c51102eead042c4a44 | #!/usr/bin/env python3
# Hydrus is released under WTFPL
# You just DO WHAT THE FUCK YOU WANT TO.
# https://github.com/sirkris/WTFPL/blob/master/WTFPL.md
import locale
try: locale.setlocale( locale.LC_ALL, '' )
except: pass
try:
import os
import argparse
import sys
from hydrus.core import HydrusBoot
HydrusBoot.AddBaseDirToEnvPath()
# initialise Qt here, important it is done early
from hydrus.client.gui import QtPorting as QP
from hydrus.core import HydrusConstants as HC
from hydrus.core import HydrusData
from hydrus.core import HydrusGlobals as HG
from hydrus.core import HydrusLogger
from hydrus.core import HydrusPaths
from hydrus.core import HydrusTemp
argparser = argparse.ArgumentParser( description = 'hydrus network client' )
argparser.add_argument( '-d', '--db_dir', help = 'set an external db location' )
argparser.add_argument( '--temp_dir', help = 'override the program\'s temporary directory' )
argparser.add_argument( '--db_journal_mode', default = 'WAL', choices = [ 'WAL', 'TRUNCATE', 'PERSIST', 'MEMORY' ], help = 'change db journal mode (default=WAL)' )
argparser.add_argument( '--db_cache_size', type = int, help = 'override SQLite cache_size per db file, in MB (default=256)' )
argparser.add_argument( '--db_transaction_commit_period', type = int, help = 'override how often (in seconds) database changes are saved to disk (default=30,min=10)' )
argparser.add_argument( '--db_synchronous_override', type = int, choices = range(4), help = 'override SQLite Synchronous PRAGMA (default=2)' )
argparser.add_argument( '--no_db_temp_files', action='store_true', help = 'run db temp operations entirely in memory' )
argparser.add_argument( '--boot_debug', action='store_true', help = 'print additional bootup information to the log' )
argparser.add_argument( '--no_wal', action='store_true', help = 'OBSOLETE: run using TRUNCATE db journaling' )
argparser.add_argument( '--db_memory_journaling', action='store_true', help = 'OBSOLETE: run using MEMORY db journaling (DANGEROUS)' )
result = argparser.parse_args()
if result.db_dir is None:
db_dir = HC.DEFAULT_DB_DIR
if not HydrusPaths.DirectoryIsWriteable( db_dir ) or HC.RUNNING_FROM_MACOS_APP:
if HC.USERPATH_DB_DIR is None:
raise Exception( 'The default db path "{}" was not writeable, and the userpath could not be determined!'.format( HC.DEFAULT_DB_DIR ) )
db_dir = HC.USERPATH_DB_DIR
else:
db_dir = result.db_dir
db_dir = HydrusPaths.ConvertPortablePathToAbsPath( db_dir, HC.BASE_DIR )
if not HydrusPaths.DirectoryIsWriteable( db_dir ):
raise Exception( 'The given db path "{}" is not a writeable-to!'.format( db_dir ) )
try:
HydrusPaths.MakeSureDirectoryExists( db_dir )
except:
raise Exception( 'Could not ensure db path "{}" exists! Check the location is correct and that you have permission to write to it!'.format( db_dir ) )
if not os.path.isdir( db_dir ):
raise Exception( 'The given db path "{}" is not a directory!'.format( db_dir ) )
HG.db_journal_mode = result.db_journal_mode
if result.no_wal:
HG.db_journal_mode = 'TRUNCATE'
if result.db_memory_journaling:
HG.db_journal_mode = 'MEMORY'
if result.db_cache_size is not None:
HG.db_cache_size = result.db_cache_size
else:
HG.db_cache_size = 256
if result.db_transaction_commit_period is not None:
HG.db_transaction_commit_period = max( 10, result.db_transaction_commit_period )
else:
HG.db_transaction_commit_period = 30
if result.db_synchronous_override is not None:
HG.db_synchronous = int( result.db_synchronous_override )
else:
if HG.db_journal_mode == 'WAL':
HG.db_synchronous = 1
else:
HG.db_synchronous = 2
HG.no_db_temp_files = result.no_db_temp_files
HG.boot_debug = result.boot_debug
try:
from twisted.internet import reactor
except:
HG.twisted_is_broke = True
except Exception as e:
try:
HydrusData.DebugPrint( 'Critical boot error occurred! Details written to crash.log!' )
HydrusData.PrintException( e )
except:
pass
import traceback
error_trace = traceback.format_exc()
print( error_trace )
if 'db_dir' in locals() and os.path.exists( db_dir ):
emergency_dir = db_dir
else:
emergency_dir = os.path.expanduser( '~' )
possible_desktop = os.path.join( emergency_dir, 'Desktop' )
if os.path.exists( possible_desktop ) and os.path.isdir( possible_desktop ):
emergency_dir = possible_desktop
dest_path = os.path.join( emergency_dir, 'hydrus_crash.log' )
with open( dest_path, 'w', encoding = 'utf-8' ) as f:
f.write( error_trace )
print( 'Critical boot error occurred! Details written to hydrus_crash.log in either db dir or user dir!' )
sys.exit( 1 )
def boot():
if result.temp_dir is not None:
HydrusTemp.SetEnvTempDir( result.temp_dir )
controller = None
with HydrusLogger.HydrusLogger( db_dir, 'client' ) as logger:
try:
HydrusData.Print( 'hydrus client started' )
if not HG.twisted_is_broke:
import threading
threading.Thread( target = reactor.run, name = 'twisted', kwargs = { 'installSignalHandlers' : 0 } ).start()
from hydrus.client import ClientController
controller = ClientController.Controller( db_dir )
controller.Run()
except:
HydrusData.Print( 'hydrus client failed' )
import traceback
HydrusData.Print( traceback.format_exc() )
finally:
HG.started_shutdown = True
HG.view_shutdown = True
HG.model_shutdown = True
if controller is not None:
controller.pubimmediate( 'wake_daemons' )
if not HG.twisted_is_broke:
reactor.callFromThread( reactor.stop )
HydrusData.Print( 'hydrus client shut down' )
HG.shutdown_complete = True
if HG.restart:
HydrusData.RestartProcess()
|
py | 1a307bf78d17b46f2db7fae4ed5dcc35de8cfc72 | """
Created on Oct 2, 2012
@author: Georgiana Dinu, Pham The Nghia
"""
from composes.similarity.similarity import Similarity
class EuclideanSimilarity(Similarity):
"""
Computes the euclidean similarity of two vectors as the inverse of their
euclidean distance.
:math:`sim(\\vec{u},\\vec{v}) = \\frac{1}{||\\vec{u}-\\vec{v}|| + 1}`
"""
def _sim(self, v1, v2):
return 1 / (1 + (v1 - v2).norm())
|
py | 1a307d010ca0716f37ddee958c8614c81b2cb53b | print('-*-' * 15)
print('SISTEMA CAIXA ELETRONICO')
print('-*-' * 15)
valor = float(input('Qual será o valor sacado? '))
cedula = 100
qtd = 0
total = valor
if valor < 1:
print('Saque somente acima de R$1! ')
while True:
if valor >= cedula:
valor = valor - cedula
qtd += 1
else:
if qtd > 0:
print(F'Total de {qtd} de cedulas de R${cedula}')
if cedula == 100:
cedula = 50
elif cedula == 50:
cedula = 20
elif cedula == 20:
cedula = 10
elif cedula == 10:
cedula = 5
elif cedula == 5:
cedula = 2
elif cedula == 2:
cedula = 1
qtd = 0
if total == 0:
break |
py | 1a307d336167e10592c72af8e39db3e66623c447 | # Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
"""
Base tests that all storage providers should implement in their own tests.
They handle the storage-based assertions, internally.
All tests return true if assertions pass to indicate that the code ran to completion, passing internal assertions.
Therefore, all tests using theses static tests should strictly check that the method returns true.
Note: Python cannot have dicts with properties with a None value like other SDKs can have properties with null values.
Because of this, StoreItem tests have "e_tag: *" where the tests in the other SDKs do not.
This has also caused us to comment out some parts of these tests where we assert that "e_tag"
is None for the same reason. A null e_tag should work just like a * e_tag when writing,
as far as the storage adapters are concerened, so this shouldn't cause issues.
:Example:
async def test_handle_null_keys_when_reading(self):
await reset()
test_ran = await StorageBaseTests.handle_null_keys_when_reading(get_storage())
assert test_ran
"""
import pytest
from botbuilder.azure import CosmosDbStorage
from botbuilder.core import (
ConversationState,
TurnContext,
MessageFactory,
MemoryStorage,
)
from botbuilder.core.adapters import TestAdapter
from botbuilder.dialogs import (
DialogSet,
DialogTurnStatus,
TextPrompt,
PromptValidatorContext,
WaterfallStepContext,
Dialog,
WaterfallDialog,
PromptOptions,
)
class StorageBaseTests:
# pylint: disable=pointless-string-statement
@staticmethod
async def return_empty_object_when_reading_unknown_key(storage) -> bool:
result = await storage.read(["unknown"])
assert result is not None
assert len(result) == 0
return True
@staticmethod
async def handle_null_keys_when_reading(storage) -> bool:
if isinstance(storage, (CosmosDbStorage, MemoryStorage)):
result = await storage.read(None)
assert len(result.keys()) == 0
# Catch-all
else:
with pytest.raises(Exception) as err:
await storage.read(None)
assert err.value.args[0] == "Keys are required when reading"
return True
@staticmethod
async def handle_null_keys_when_writing(storage) -> bool:
with pytest.raises(Exception) as err:
await storage.write(None)
assert err.value.args[0] == "Changes are required when writing"
return True
@staticmethod
async def does_not_raise_when_writing_no_items(storage) -> bool:
# noinspection PyBroadException
try:
await storage.write([])
except:
pytest.fail("Should not raise")
return True
@staticmethod
async def create_object(storage) -> bool:
store_items = {
"createPoco": {"id": 1},
"createPocoStoreItem": {"id": 2, "e_tag": "*"},
}
await storage.write(store_items)
read_store_items = await storage.read(store_items.keys())
assert store_items["createPoco"]["id"] == read_store_items["createPoco"]["id"]
assert (
store_items["createPocoStoreItem"]["id"]
== read_store_items["createPocoStoreItem"]["id"]
)
# If decided to validate e_tag integrity again, uncomment this code
# assert read_store_items["createPoco"]["e_tag"] is not None
assert read_store_items["createPocoStoreItem"]["e_tag"] is not None
return True
@staticmethod
async def handle_crazy_keys(storage) -> bool:
key = '!@#$%^&*()_+??><":QASD~`'
store_item = {"id": 1}
store_items = {key: store_item}
await storage.write(store_items)
read_store_items = await storage.read(store_items.keys())
assert read_store_items[key] is not None
assert read_store_items[key]["id"] == 1
return True
@staticmethod
async def update_object(storage) -> bool:
original_store_items = {
"pocoItem": {"id": 1, "count": 1},
"pocoStoreItem": {"id": 1, "count": 1, "e_tag": "*"},
}
# 1st write should work
await storage.write(original_store_items)
loaded_store_items = await storage.read(["pocoItem", "pocoStoreItem"])
update_poco_item = loaded_store_items["pocoItem"]
update_poco_item["e_tag"] = None
update_poco_store_item = loaded_store_items["pocoStoreItem"]
assert update_poco_store_item["e_tag"] is not None
# 2nd write should work
update_poco_item["count"] += 1
update_poco_store_item["count"] += 1
await storage.write(loaded_store_items)
reloaded_store_items = await storage.read(loaded_store_items.keys())
reloaded_update_poco_item = reloaded_store_items["pocoItem"]
reloaded_update_poco_store_item = reloaded_store_items["pocoStoreItem"]
assert reloaded_update_poco_item["count"] == 2
assert reloaded_update_poco_store_item["count"] == 2
# Write with old e_tag should succeed for non-storeItem
update_poco_item["count"] = 123
await storage.write({"pocoItem": update_poco_item})
# Write with old eTag should FAIL for storeItem
update_poco_store_item["count"] = 123
"""
This assert exists in the other SDKs but can't in python, currently
due to using "e_tag: *" above (see comment near the top of this file for details).
with pytest.raises(Exception) as err:
await storage.write({"pocoStoreItem": update_poco_store_item})
assert err.value is not None
"""
reloaded_store_items2 = await storage.read(["pocoItem", "pocoStoreItem"])
reloaded_poco_item2 = reloaded_store_items2["pocoItem"]
reloaded_poco_item2["e_tag"] = None
reloaded_poco_store_item2 = reloaded_store_items2["pocoStoreItem"]
assert reloaded_poco_item2["count"] == 123
assert reloaded_poco_store_item2["count"] == 2
# write with wildcard etag should work
reloaded_poco_item2["count"] = 100
reloaded_poco_store_item2["count"] = 100
reloaded_poco_store_item2["e_tag"] = "*"
wildcard_etag_dict = {
"pocoItem": reloaded_poco_item2,
"pocoStoreItem": reloaded_poco_store_item2,
}
await storage.write(wildcard_etag_dict)
reloaded_store_items3 = await storage.read(["pocoItem", "pocoStoreItem"])
assert reloaded_store_items3["pocoItem"]["count"] == 100
assert reloaded_store_items3["pocoStoreItem"]["count"] == 100
# Write with empty etag should not work
reloaded_store_items4 = await storage.read(["pocoStoreItem"])
reloaded_store_item4 = reloaded_store_items4["pocoStoreItem"]
assert reloaded_store_item4 is not None
reloaded_store_item4["e_tag"] = ""
dict2 = {"pocoStoreItem": reloaded_store_item4}
with pytest.raises(Exception) as err:
await storage.write(dict2)
assert err.value is not None
final_store_items = await storage.read(["pocoItem", "pocoStoreItem"])
assert final_store_items["pocoItem"]["count"] == 100
assert final_store_items["pocoStoreItem"]["count"] == 100
return True
@staticmethod
async def delete_object(storage) -> bool:
store_items = {"delete1": {"id": 1, "count": 1, "e_tag": "*"}}
await storage.write(store_items)
read_store_items = await storage.read(["delete1"])
assert read_store_items["delete1"]["e_tag"]
assert read_store_items["delete1"]["count"] == 1
await storage.delete(["delete1"])
reloaded_store_items = await storage.read(["delete1"])
assert reloaded_store_items.get("delete1", None) is None
return True
@staticmethod
async def delete_unknown_object(storage) -> bool:
# noinspection PyBroadException
try:
await storage.delete(["unknown_key"])
except:
pytest.fail("Should not raise")
return True
@staticmethod
async def perform_batch_operations(storage) -> bool:
await storage.write(
{"batch1": {"count": 10}, "batch2": {"count": 20}, "batch3": {"count": 30},}
)
result = await storage.read(["batch1", "batch2", "batch3"])
assert result.get("batch1", None) is not None
assert result.get("batch2", None) is not None
assert result.get("batch3", None) is not None
assert result["batch1"]["count"] == 10
assert result["batch2"]["count"] == 20
assert result["batch3"]["count"] == 30
"""
If decided to validate e_tag integrity aagain, uncomment this code
assert result["batch1"].get("e_tag", None) is not None
assert result["batch2"].get("e_tag", None) is not None
assert result["batch3"].get("e_tag", None) is not None
"""
await storage.delete(["batch1", "batch2", "batch3"])
result = await storage.read(["batch1", "batch2", "batch3"])
assert result.get("batch1", None) is None
assert result.get("batch2", None) is None
assert result.get("batch3", None) is None
return True
@staticmethod
async def proceeds_through_waterfall(storage) -> bool:
convo_state = ConversationState(storage)
dialog_state = convo_state.create_property("dialogState")
dialogs = DialogSet(dialog_state)
async def exec_test(turn_context: TurnContext) -> None:
dialog_context = await dialogs.create_context(turn_context)
await dialog_context.continue_dialog()
if not turn_context.responded:
await dialog_context.begin_dialog(WaterfallDialog.__name__)
await convo_state.save_changes(turn_context)
adapter = TestAdapter(exec_test)
async def prompt_validator(prompt_context: PromptValidatorContext):
result = prompt_context.recognized.value
if len(result) > 3:
succeeded_message = MessageFactory.text(
f"You got it at the {prompt_context.options.number_of_attempts}rd try!"
)
await prompt_context.context.send_activity(succeeded_message)
return True
reply = MessageFactory.text(
f"Please send a name that is longer than 3 characters. {prompt_context.options.number_of_attempts}"
)
await prompt_context.context.send_activity(reply)
return False
async def step_1(step_context: WaterfallStepContext) -> DialogTurnStatus:
assert isinstance(step_context.active_dialog.state["stepIndex"], int)
await step_context.context.send_activity("step1")
return Dialog.end_of_turn
async def step_2(step_context: WaterfallStepContext) -> None:
assert isinstance(step_context.active_dialog.state["stepIndex"], int)
await step_context.prompt(
TextPrompt.__name__,
PromptOptions(prompt=MessageFactory.text("Please type your name")),
)
async def step_3(step_context: WaterfallStepContext) -> DialogTurnStatus:
assert isinstance(step_context.active_dialog.state["stepIndex"], int)
await step_context.context.send_activity("step3")
return Dialog.end_of_turn
steps = [step_1, step_2, step_3]
dialogs.add(WaterfallDialog(WaterfallDialog.__name__, steps))
dialogs.add(TextPrompt(TextPrompt.__name__, prompt_validator))
step1 = await adapter.send("hello")
step2 = await step1.assert_reply("step1")
step3 = await step2.send("hello")
step4 = await step3.assert_reply("Please type your name") # None
step5 = await step4.send("hi")
step6 = await step5.assert_reply(
"Please send a name that is longer than 3 characters. 0"
)
step7 = await step6.send("hi")
step8 = await step7.assert_reply(
"Please send a name that is longer than 3 characters. 1"
)
step9 = await step8.send("hi")
step10 = await step9.assert_reply(
"Please send a name that is longer than 3 characters. 2"
)
step11 = await step10.send("Kyle")
step12 = await step11.assert_reply("You got it at the 3rd try!")
await step12.assert_reply("step3")
return True
|
py | 1a307dec7a41e764ad9211995da38d2730e41557 | import numpy as np
from manimlib.mobject.mobject import Mobject
class ValueTracker(Mobject):
"""
Note meant to be displayed. Instead the position encodes some
number, often one which another animation or continual_animation
uses for its update function, and by treating it as a mobject it can
still be animated and manipulated just like anything else.
"""
def __init__(self, value=0, **kwargs):
Mobject.__init__(self, **kwargs)
self.points = np.zeros((1, 3))
self.set_value(value)
def get_value(self):
return self.points[0, 0]
def set_value(self, value):
self.points[0, 0] = value
return self
def increment_value(self, d_value):
self.set_value(self.get_value() + d_value)
class ExponentialValueTracker(ValueTracker):
"""
Operates just like ValueTracker, except it encodes the value as the
exponential of a position coordinate, which changes how interpolation
behaves
"""
def get_value(self):
return np.exp(ValueTracker.get_value(self))
def set_value(self, value):
return ValueTracker.set_value(self, np.log(value))
class ComplexValueTracker(ValueTracker):
def get_value(self):
return complex(*self.points[0, :2])
def set_value(self, z):
z = complex(z)
self.points[0, :2] = (z.real, z.imag)
return self
|
py | 1a3081112dd6341e71ab42275305611cea2b5b08 | #!/usr/bin/python
# By Hernan Chavez Thielemann
__author__ = 'Hernan Chavez Thielemann <hchavezthiele at gmail dot com>'
# checked ok 30/04/2018
#------------------------------------------------------
#/// Packages and globals definitions are here ///
#------------------------------------------------------
from os.path import dirname, realpath
from sys import exit
from Tkinter import Tk, Frame, Label, TclError, PhotoImage
from conversion_gui import Conversion
from script_gui import Script_GUI
from run_gui import Run_GUI
from popup import AboutPopUp
from tk_lib import createmenubar
from lib.misc.warn import wrg_3
from lib.misc.file import run_command
from lib.misc.version import __version__
#------------------------------------------------------
'''/////////////// Class /////////////'''
#------------------------------------------------------
class Gro2Lam_GUI(Frame):
''' Graphic User Interface '''
def __init__(self, master=None, test = False):
Frame.__init__(self, master)
_ver= __version__.split()
self.master.title(" "*5+"{} {}".format(_ver[0],_ver[2]))#.master
self.pack() # ... why I'm packing here?? coords?
self.test = test
# images storaging
dir_path = dirname( realpath( __file__))
self.img = dict()
self.img['logo'] = PhotoImage( file = dir_path + "/img/logo.ppm")
self.img['help'] = PhotoImage( file = dir_path + "/img/help.ppm")
self.img['file'] = PhotoImage( file = dir_path + "/img/file.ppm")
self.img['gear'] = PhotoImage( file = dir_path + "/img/gear.ppm")
# body init
self.prevailing_body = 0
self.body = None
self.MAINVERTEX = [ 0, 0, 0, 0, 0, 0]
# Conversion gathered data container
self._convert_ = {'setup' : [], 'solvation': []}
self._convertdata_= None
# Script part
self._script_ = {'mainpage' : [], 'advanced': [], 'restrain': []}
self.createmainPennon()
def createmainPennon(self):
'''Self explanatory neated with subroutines to make it more readable'''
row = Frame(self,bg = "white")
Label( row, bg = "white",
image = self.img['logo']).pack( side= 'left', padx=25)
row.pack(side="top", fill='x', padx=1)
self.swapbody(1)
def swapbody(self, _pbody_):# checked ok 16/09 -----------WF
''' Deletes and clean the last generated body
maybe lacks a real body destroyer?? but works fine with
this, because it is just a "small" overlapping I gess
'''
if self.prevailing_body <> _pbody_:
if self.body == None:
self.body = self.create_conversion_gui()
else:
self.body.destroy()
if _pbody_==1:
print 'Swapping to gro2lam converter GUI'
self.body = self.create_conversion_gui()
elif _pbody_==2:
print 'Swapping to input script generator GUI'
self.body = self.create_script_gui()
elif _pbody_==3:
print 'Swapping to run script GUI'
self.body = self.create_run_gui()
else:
exit('Wuut...')
self.prevailing_body = _pbody_
self.body.createWidgets()
self.body.b1.focus()
self.master.bind('<Return>', self.b1_hook )
self.master.bind('<Escape>', self.quit_hook )
self.body.pack(side='top', fill='x')
def b1_hook(self, event=None):
self.body.b1.invoke()
def quit_hook(self, event=None):
self.body.quit()
def swap_hook(self):
_l_ = [1,2,3]
b = _l_[_l_.index(self.prevailing_body)-2]
self.swapbody(b)
def create_conversion_gui(self):
'Hook to create conversion gui'
return Conversion(self)# Hook
def create_script_gui(self):
'Hook to create script gui'
return Script_GUI(self)# Hook
def create_run_gui(self):
'Hook to create run gui'
return Run_GUI(self)# Hook
#------------------------------------------------------
'''/////////////// Sub routines /////////////'''
#------------------------------------------------------
def launch_gui( started = False):
''' launcher
Main GUI constructor
'''
print wrg_3('Before you start, make sure there are no comments',
'(;) in the middle of a line of the input GROMACS files.',
'Data after this symbol are not taken into account.')
MasterWin = Tk()
prompt = Gro2Lam_GUI( master= MasterWin, test = started)# xl_App
# Top main pennon menu bar definition
entry_list_of_dicts = [{ 'title' : 'File',
'cascade' : (('Quit' ,MasterWin.quit), ) },
{ 'title' : 'Data File Creation',
'title_com' : (prompt.swapbody , 1)},
{ 'title' : 'Input File Creation',
'title_com' : (prompt.swapbody , 2)},
{ 'title' : 'Run',
'title_com' : (prompt.swapbody , 3)},
{ 'titlei' : prompt.img['help'],
'cascade' : (('User manual' , showuserman),
('About' , launch_about, prompt),)}
]
createmenubar(MasterWin, entry_list_of_dicts)
w = 460
h = 570
# get screen width and height
ws = MasterWin.winfo_screenwidth() # width of the screen
hs = MasterWin.winfo_screenheight() # height of the screen
# calculate x and y coordinates for the Tk root window
x = (ws/6) - (w/2)
if x <100:
x = 100
y = (hs/3) - (h/2)
if y< 40:
y = 40
prompt.MAINVERTEX = [ws, hs, w, h, x, y]
#print MAINVERTEX
# set the dimensions of the screen
# and where it is placed
MasterWin.geometry('{:d}x{:d}+{:d}+{:d}'.format( *prompt.MAINVERTEX[2:]))
prompt.mainloop()
try:
MasterWin.destroy()
except TclError:
pass
def showlicence():
print 'Opening licence file'
command = 'gedit ./lib/docs/COPYING'#
run_command(command)
def launch_about( _master_window_):
print 'Launching about'
title_txt = ' '*17+'ABOUT GROTOLAM'
pop = AboutPopUp(master = _master_window_,
title = title_txt,
licence = showlicence
)
def showuserman():
print 'Opening readme file'
command = 'gedit ./lib/docs/README.md'#
run_command(command)
# vim:tw=80
|
py | 1a308167840becc98449ea5d7c892e04e39b37c3 | """
Phong Material
For phong shading
"""
from .material import Material
from ..math import Vec3, Ray, HitRecord, dot3, reflect3, normalize3, clamp3
from ..camera import Camera
class PhongMaterial(Material):
"""Base Material Class"""
def __init__(self, color: Vec3 = Vec3(1.,1.,1.), shininess: float = 10.0, reflectivity: float = 0.0, refraction: float = 1.0):
Material.__init__(self, color, shininess, reflectivity, refraction)
def shade(self, camera: Camera, ray: Ray, hitrecord: HitRecord, lights: list) -> Vec3:
"""
Shade method: Phong
phong shader
"""
colorsum = Vec3(0.,0.,0.)
if len(lights)>0:
for light in lights:
N = hitrecord.normal_g
L = normalize3(hitrecord.point - light.position)
E = normalize3(camera.position - hitrecord.point)
R = normalize3(-reflect3(L, N))
diffuse = max(1. - dot3(N, L), 0.0)
specular = pow(max(dot3(R, E), 0.0), 0.3 * self.shininess)
color = self.color * 0.5 * (diffuse + specular) * hitrecord.color
colorsum += color
colorsum /= len(lights)
colorsum = clamp3(colorsum, Vec3(0.,0.,0.), Vec3(1.,1.,1.))
else:
# no light in scene, use material color
colorsum = self.color * hitrecord.color
return colorsum
|
py | 1a30826f7435bcf292804ed9da94487d44c58d5a | import numpy as np
import os
import pickle
import tensorflow as tf
import matplotlib.pyplot as plt
from skimage.transform import rotate, resize
from skimage import exposure
import skimage.io as io
from config import FLAGS
def load_facegreyreduxshuffled_set(batch_size, is_training=True):
path = os.path.join('data', 'facegreyredux')
if is_training:
fd = open(os.path.join(path, 'facegreyredux'), 'rb')
# loaded = np.fromfile(file=fd, dtype=np.uint8)
loaded = np.asarray(pickle.load(fd))
trainX = loaded.reshape((50000, 28, 28, 1)).astype(np.float32)
fd = open(os.path.join(path, 'facegreyreduxcat'), 'rb')
# loaded = np.fromfile(file=fd, dtype=np.uint8)
loaded = np.asarray(pickle.load(fd))
trainY = loaded.reshape((50000)).astype(np.int32)
data_set = list(zip(trainX,trainY))
np.random.shuffle(data_set)
trainX, trainY = list(zip(*data_set))
trainX = np.asarray(trainX).reshape((50000, 28, 28, 1)).astype(np.float32)
trainY = np.asarray(trainY).reshape((50000)).astype(np.int32)
trX = trainX[:40000] / 255.
trY = trainY[:40000]
valX = trainX[40000:, ] / 255.
valY = trainY[40000:]
num_tr_batch = 40000 // batch_size
num_val_batch = 10000 // batch_size
return trX, trY, num_tr_batch, valX, valY, num_val_batch
else:
if (FLAGS.flickr):
fd = open(os.path.join(path, 'flickrsetgreyredux'), 'rb')
loaded = np.asarray(pickle.load(fd))
trainX = loaded.reshape((10000, 28, 28)).astype(np.float32) / 255.
else:
fd = open(os.path.join(path, 'facegreyreduxeval'), 'rb')
loaded = np.asarray(pickle.load(fd))
trainX = loaded.reshape((10000, 28, 28)).astype(np.float32) / 255.
fd = open(os.path.join(path, 'facegreyreduxevalcat'), 'rb')
loaded = np.asarray(pickle.load(fd))
trainY = loaded.reshape((10000)).astype(np.int32)
rotatedlist = []
for image in trainX:
image = rotate(image, FLAGS.rotation, preserve_range=True)
if(FLAGS.mooney):
v_min, v_max = np.percentile(image, (49.99999999, 51))
image = exposure.rescale_intensity(image, in_range=(v_min, v_max))
rotatedlist.append(image)
if(len(rotatedlist)==1000):
I = resize(image.reshape(28, 28), (128, 128))
io.imsave("rotate" + str(FLAGS.rotation) + "example.jpg", I, cmap='gray')
rotatedlist = np.asarray(rotatedlist)
plt.imshow(rotatedlist[33], cmap='gray')
plt.show()
trainX = rotatedlist.reshape((10000, 28, 28, 1)).astype(np.float32)
return trainX, trainY
def create_inputs_norb(path, is_train: bool):
"""Get a batch from the input pipeline.
Author:
Ashley Gritzman 15/11/2018
Args:
is_train:
Returns:
img, lab:
"""
if is_train:
trX, trY, num_tr_batch, valX, valY, num_val_batch = load_facegreyreduxshuffled_set(FLAGS.batch_size, is_train)
else:
trX, trY = load_facegreyreduxshuffled_set(FLAGS.batch_size, is_train)
def generator():
for e1, e2 in zip(trX, trY):
yield e1, e2
capacity = 2000 + 3 * FLAGS.batch_size
# Create batched dataset
tf_dataset = tf.data.Dataset.from_generator(generator, output_types=(tf.float32, tf.int32), output_shapes=(tf.TensorShape(list(trX[0].shape)), ())).repeat().shuffle(capacity).batch(batch_size=FLAGS.batch_size, drop_remainder=True).prefetch(1)
# dataset = input_fn(path, is_train)
# Create one-shot iterator
iterator = tf.compat.v1.data.make_one_shot_iterator(tf_dataset)
img, lab = iterator.get_next()
output_dict = {'image': img,
'label': lab}
return output_dict
|
py | 1a30836d074dd9e595ba028a8883a2a758ec1d47 | """The tests for the MQTT binary sensor platform."""
import copy
from datetime import datetime, timedelta
import json
from unittest.mock import patch
import pytest
from homeassistant.components import binary_sensor
from homeassistant.const import (
EVENT_STATE_CHANGED,
STATE_OFF,
STATE_ON,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
)
import homeassistant.core as ha
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from .test_common import (
help_test_availability_when_connection_lost,
help_test_availability_without_topic,
help_test_custom_availability_payload,
help_test_default_availability_payload,
help_test_discovery_broken,
help_test_discovery_removal,
help_test_discovery_update,
help_test_discovery_update_attr,
help_test_discovery_update_unchanged,
help_test_entity_debug_info_message,
help_test_entity_device_info_remove,
help_test_entity_device_info_update,
help_test_entity_device_info_with_connection,
help_test_entity_device_info_with_identifier,
help_test_entity_id_update_discovery_update,
help_test_entity_id_update_subscriptions,
help_test_setting_attribute_via_mqtt_json_message,
help_test_setting_attribute_with_template,
help_test_unique_id,
help_test_update_with_json_attrs_bad_JSON,
help_test_update_with_json_attrs_not_dict,
)
from tests.common import async_fire_mqtt_message, async_fire_time_changed
DEFAULT_CONFIG = {
binary_sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
}
}
async def test_setting_sensor_value_expires_availability_topic(hass, mqtt_mock, caplog):
"""Test the expiration of the value."""
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
binary_sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"expire_after": 4,
"force_update": True,
"availability_topic": "availability-topic",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_UNAVAILABLE
async_fire_mqtt_message(hass, "availability-topic", "online")
# State should be unavailable since expire_after is defined and > 0
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_UNAVAILABLE
await expires_helper(hass, mqtt_mock, caplog)
async def test_setting_sensor_value_expires(hass, mqtt_mock, caplog):
"""Test the expiration of the value."""
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
binary_sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"expire_after": 4,
"force_update": True,
}
},
)
await hass.async_block_till_done()
# State should be unavailable since expire_after is defined and > 0
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_UNAVAILABLE
await expires_helper(hass, mqtt_mock, caplog)
async def expires_helper(hass, mqtt_mock, caplog):
"""Run the basic expiry code."""
realnow = dt_util.utcnow()
now = datetime(realnow.year + 1, 1, 1, 1, tzinfo=dt_util.UTC)
with patch(("homeassistant.helpers.event.dt_util.utcnow"), return_value=now):
async_fire_time_changed(hass, now)
async_fire_mqtt_message(hass, "test-topic", "ON")
await hass.async_block_till_done()
# Value was set correctly.
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_ON
# Time jump +3s
now = now + timedelta(seconds=3)
async_fire_time_changed(hass, now)
await hass.async_block_till_done()
# Value is not yet expired
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_ON
# Next message resets timer
with patch(("homeassistant.helpers.event.dt_util.utcnow"), return_value=now):
async_fire_time_changed(hass, now)
async_fire_mqtt_message(hass, "test-topic", "OFF")
await hass.async_block_till_done()
# Value was updated correctly.
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_OFF
# Time jump +3s
now = now + timedelta(seconds=3)
async_fire_time_changed(hass, now)
await hass.async_block_till_done()
# Value is not yet expired
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_OFF
# Time jump +2s
now = now + timedelta(seconds=2)
async_fire_time_changed(hass, now)
await hass.async_block_till_done()
# Value is expired now
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_UNAVAILABLE
async def test_expiration_on_discovery_and_discovery_update_of_binary_sensor(
hass, mqtt_mock, caplog
):
"""Test that binary_sensor with expire_after set behaves correctly on discovery and discovery update."""
config = {
"name": "Test",
"state_topic": "test-topic",
"expire_after": 4,
"force_update": True,
}
config_msg = json.dumps(config)
# Set time and publish config message to create binary_sensor via discovery with 4 s expiry
realnow = dt_util.utcnow()
now = datetime(realnow.year + 1, 1, 1, 1, tzinfo=dt_util.UTC)
with patch(("homeassistant.helpers.event.dt_util.utcnow"), return_value=now):
async_fire_time_changed(hass, now)
async_fire_mqtt_message(
hass, "homeassistant/binary_sensor/bla/config", config_msg
)
await hass.async_block_till_done()
# Test that binary_sensor is not available
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_UNAVAILABLE
# Publish state message
with patch(("homeassistant.helpers.event.dt_util.utcnow"), return_value=now):
async_fire_mqtt_message(hass, "test-topic", "ON")
await hass.async_block_till_done()
# Test that binary_sensor has correct state
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_ON
# Advance +3 seconds
now = now + timedelta(seconds=3)
with patch(("homeassistant.helpers.event.dt_util.utcnow"), return_value=now):
async_fire_time_changed(hass, now)
await hass.async_block_till_done()
# binary_sensor is not yet expired
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_ON
# Resend config message to update discovery
with patch(("homeassistant.helpers.event.dt_util.utcnow"), return_value=now):
async_fire_time_changed(hass, now)
async_fire_mqtt_message(
hass, "homeassistant/binary_sensor/bla/config", config_msg
)
await hass.async_block_till_done()
# Test that binary_sensor has not expired
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_ON
# Add +2 seconds
now = now + timedelta(seconds=2)
with patch(("homeassistant.helpers.event.dt_util.utcnow"), return_value=now):
async_fire_time_changed(hass, now)
await hass.async_block_till_done()
# Test that binary_sensor has expired
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_UNAVAILABLE
# Resend config message to update discovery
with patch(("homeassistant.helpers.event.dt_util.utcnow"), return_value=now):
async_fire_mqtt_message(
hass, "homeassistant/binary_sensor/bla/config", config_msg
)
await hass.async_block_till_done()
# Test that binary_sensor is still expired
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_UNAVAILABLE
async def test_setting_sensor_value_via_mqtt_message(hass, mqtt_mock):
"""Test the setting of the value via MQTT."""
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
binary_sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"payload_on": "ON",
"payload_off": "OFF",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_UNKNOWN
async_fire_mqtt_message(hass, "test-topic", "ON")
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_ON
async_fire_mqtt_message(hass, "test-topic", "OFF")
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_OFF
async def test_invalid_sensor_value_via_mqtt_message(hass, mqtt_mock, caplog):
"""Test the setting of the value via MQTT."""
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
binary_sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"payload_on": "ON",
"payload_off": "OFF",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_UNKNOWN
async_fire_mqtt_message(hass, "test-topic", "0N")
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_UNKNOWN
assert "No matching payload found for entity" in caplog.text
caplog.clear()
assert "No matching payload found for entity" not in caplog.text
async_fire_mqtt_message(hass, "test-topic", "ON")
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_ON
async_fire_mqtt_message(hass, "test-topic", "0FF")
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_ON
assert "No matching payload found for entity" in caplog.text
async def test_setting_sensor_value_via_mqtt_message_and_template(hass, mqtt_mock):
"""Test the setting of the value via MQTT."""
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
binary_sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"payload_on": "ON",
"payload_off": "OFF",
"value_template": '{%if is_state(entity_id,"on")-%}OFF'
"{%-else-%}ON{%-endif%}",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_UNKNOWN
async_fire_mqtt_message(hass, "test-topic", "")
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_ON
async_fire_mqtt_message(hass, "test-topic", "")
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_OFF
async def test_setting_sensor_value_via_mqtt_message_and_template2(
hass, mqtt_mock, caplog
):
"""Test the setting of the value via MQTT."""
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
binary_sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"payload_on": "ON",
"payload_off": "OFF",
"value_template": "{{value | upper}}",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_UNKNOWN
async_fire_mqtt_message(hass, "test-topic", "on")
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_ON
async_fire_mqtt_message(hass, "test-topic", "off")
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_OFF
async_fire_mqtt_message(hass, "test-topic", "illegal")
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_OFF
assert "template output: 'ILLEGAL'" in caplog.text
async def test_setting_sensor_value_via_mqtt_message_and_template_and_raw_state_encoding(
hass, mqtt_mock, caplog
):
"""Test processing a raw value via MQTT."""
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
binary_sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"encoding": "",
"state_topic": "test-topic",
"payload_on": "ON",
"payload_off": "OFF",
"value_template": "{%if value|unpack('b')-%}ON{%else%}OFF{%-endif-%}",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_UNKNOWN
async_fire_mqtt_message(hass, "test-topic", b"\x01")
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_ON
async_fire_mqtt_message(hass, "test-topic", b"\x00")
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_OFF
async def test_setting_sensor_value_via_mqtt_message_empty_template(
hass, mqtt_mock, caplog
):
"""Test the setting of the value via MQTT."""
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
binary_sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"payload_on": "ON",
"payload_off": "OFF",
"value_template": '{%if value == "ABC"%}ON{%endif%}',
}
},
)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_UNKNOWN
async_fire_mqtt_message(hass, "test-topic", "DEF")
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_UNKNOWN
assert "Empty template output" in caplog.text
async_fire_mqtt_message(hass, "test-topic", "ABC")
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_ON
async def test_valid_device_class(hass, mqtt_mock):
"""Test the setting of a valid sensor class."""
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
binary_sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"device_class": "motion",
"state_topic": "test-topic",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.attributes.get("device_class") == "motion"
async def test_invalid_device_class(hass, mqtt_mock):
"""Test the setting of an invalid sensor class."""
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
binary_sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"device_class": "abc123",
"state_topic": "test-topic",
}
},
)
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state is None
async def test_availability_when_connection_lost(hass, mqtt_mock):
"""Test availability after MQTT disconnection."""
await help_test_availability_when_connection_lost(
hass, mqtt_mock, binary_sensor.DOMAIN, DEFAULT_CONFIG
)
async def test_availability_without_topic(hass, mqtt_mock):
"""Test availability without defined availability topic."""
await help_test_availability_without_topic(
hass, mqtt_mock, binary_sensor.DOMAIN, DEFAULT_CONFIG
)
async def test_default_availability_payload(hass, mqtt_mock):
"""Test availability by default payload with defined topic."""
await help_test_default_availability_payload(
hass, mqtt_mock, binary_sensor.DOMAIN, DEFAULT_CONFIG
)
async def test_custom_availability_payload(hass, mqtt_mock):
"""Test availability by custom payload with defined topic."""
await help_test_custom_availability_payload(
hass, mqtt_mock, binary_sensor.DOMAIN, DEFAULT_CONFIG
)
async def test_force_update_disabled(hass, mqtt_mock):
"""Test force update option."""
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
binary_sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"payload_on": "ON",
"payload_off": "OFF",
}
},
)
await hass.async_block_till_done()
events = []
@ha.callback
def callback(event):
"""Verify event got called."""
events.append(event)
hass.bus.async_listen(EVENT_STATE_CHANGED, callback)
async_fire_mqtt_message(hass, "test-topic", "ON")
await hass.async_block_till_done()
assert len(events) == 1
async_fire_mqtt_message(hass, "test-topic", "ON")
await hass.async_block_till_done()
assert len(events) == 1
async def test_force_update_enabled(hass, mqtt_mock):
"""Test force update option."""
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
binary_sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"payload_on": "ON",
"payload_off": "OFF",
"force_update": True,
}
},
)
await hass.async_block_till_done()
events = []
@ha.callback
def callback(event):
"""Verify event got called."""
events.append(event)
hass.bus.async_listen(EVENT_STATE_CHANGED, callback)
async_fire_mqtt_message(hass, "test-topic", "ON")
await hass.async_block_till_done()
assert len(events) == 1
async_fire_mqtt_message(hass, "test-topic", "ON")
await hass.async_block_till_done()
assert len(events) == 2
async def test_off_delay(hass, mqtt_mock):
"""Test off_delay option."""
assert await async_setup_component(
hass,
binary_sensor.DOMAIN,
{
binary_sensor.DOMAIN: {
"platform": "mqtt",
"name": "test",
"state_topic": "test-topic",
"payload_on": "ON",
"payload_off": "OFF",
"off_delay": 30,
"force_update": True,
}
},
)
await hass.async_block_till_done()
events = []
@ha.callback
def callback(event):
"""Verify event got called."""
events.append(event)
hass.bus.async_listen(EVENT_STATE_CHANGED, callback)
async_fire_mqtt_message(hass, "test-topic", "ON")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_ON
assert len(events) == 1
async_fire_mqtt_message(hass, "test-topic", "ON")
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_ON
assert len(events) == 2
async_fire_time_changed(hass, dt_util.utcnow() + timedelta(seconds=30))
await hass.async_block_till_done()
state = hass.states.get("binary_sensor.test")
assert state.state == STATE_OFF
assert len(events) == 3
async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock):
"""Test the setting of attribute via MQTT with JSON payload."""
await help_test_setting_attribute_via_mqtt_json_message(
hass, mqtt_mock, binary_sensor.DOMAIN, DEFAULT_CONFIG
)
async def test_setting_attribute_with_template(hass, mqtt_mock):
"""Test the setting of attribute via MQTT with JSON payload."""
await help_test_setting_attribute_with_template(
hass, mqtt_mock, binary_sensor.DOMAIN, DEFAULT_CONFIG
)
async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog):
"""Test attributes get extracted from a JSON result."""
await help_test_update_with_json_attrs_not_dict(
hass, mqtt_mock, caplog, binary_sensor.DOMAIN, DEFAULT_CONFIG
)
async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog):
"""Test attributes get extracted from a JSON result."""
await help_test_update_with_json_attrs_bad_JSON(
hass, mqtt_mock, caplog, binary_sensor.DOMAIN, DEFAULT_CONFIG
)
async def test_discovery_update_attr(hass, mqtt_mock, caplog):
"""Test update of discovered MQTTAttributes."""
await help_test_discovery_update_attr(
hass, mqtt_mock, caplog, binary_sensor.DOMAIN, DEFAULT_CONFIG
)
async def test_unique_id(hass, mqtt_mock):
"""Test unique id option only creates one sensor per unique_id."""
config = {
binary_sensor.DOMAIN: [
{
"platform": "mqtt",
"name": "Test 1",
"state_topic": "test-topic",
"unique_id": "TOTALLY_UNIQUE",
},
{
"platform": "mqtt",
"name": "Test 2",
"state_topic": "test-topic",
"unique_id": "TOTALLY_UNIQUE",
},
]
}
await help_test_unique_id(hass, mqtt_mock, binary_sensor.DOMAIN, config)
async def test_discovery_removal_binary_sensor(hass, mqtt_mock, caplog):
"""Test removal of discovered binary_sensor."""
data = json.dumps(DEFAULT_CONFIG[binary_sensor.DOMAIN])
await help_test_discovery_removal(
hass, mqtt_mock, caplog, binary_sensor.DOMAIN, data
)
async def test_discovery_update_binary_sensor_topic_template(hass, mqtt_mock, caplog):
"""Test update of discovered binary_sensor."""
config1 = copy.deepcopy(DEFAULT_CONFIG[binary_sensor.DOMAIN])
config2 = copy.deepcopy(DEFAULT_CONFIG[binary_sensor.DOMAIN])
config1["name"] = "Beer"
config2["name"] = "Milk"
config1["state_topic"] = "sensor/state1"
config2["state_topic"] = "sensor/state2"
config1["value_template"] = "{{ value_json.state1.state }}"
config2["value_template"] = "{{ value_json.state2.state }}"
state_data1 = [
([("sensor/state1", '{"state1":{"state":"ON"}}')], "on", None),
]
state_data2 = [
([("sensor/state2", '{"state2":{"state":"OFF"}}')], "off", None),
([("sensor/state2", '{"state2":{"state":"ON"}}')], "on", None),
([("sensor/state1", '{"state1":{"state":"OFF"}}')], "on", None),
([("sensor/state1", '{"state2":{"state":"OFF"}}')], "on", None),
([("sensor/state2", '{"state1":{"state":"OFF"}}')], "on", None),
([("sensor/state2", '{"state2":{"state":"OFF"}}')], "off", None),
]
await help_test_discovery_update(
hass,
mqtt_mock,
caplog,
binary_sensor.DOMAIN,
config1,
config2,
state_data1=state_data1,
state_data2=state_data2,
)
async def test_discovery_update_binary_sensor_template(hass, mqtt_mock, caplog):
"""Test update of discovered binary_sensor."""
config1 = copy.deepcopy(DEFAULT_CONFIG[binary_sensor.DOMAIN])
config2 = copy.deepcopy(DEFAULT_CONFIG[binary_sensor.DOMAIN])
config1["name"] = "Beer"
config2["name"] = "Milk"
config1["state_topic"] = "sensor/state1"
config2["state_topic"] = "sensor/state1"
config1["value_template"] = "{{ value_json.state1.state }}"
config2["value_template"] = "{{ value_json.state2.state }}"
state_data1 = [
([("sensor/state1", '{"state1":{"state":"ON"}}')], "on", None),
]
state_data2 = [
([("sensor/state1", '{"state2":{"state":"OFF"}}')], "off", None),
([("sensor/state1", '{"state2":{"state":"ON"}}')], "on", None),
([("sensor/state1", '{"state1":{"state":"OFF"}}')], "on", None),
([("sensor/state1", '{"state2":{"state":"OFF"}}')], "off", None),
]
await help_test_discovery_update(
hass,
mqtt_mock,
caplog,
binary_sensor.DOMAIN,
config1,
config2,
state_data1=state_data1,
state_data2=state_data2,
)
async def test_discovery_update_unchanged_binary_sensor(hass, mqtt_mock, caplog):
"""Test update of discovered binary_sensor."""
config1 = copy.deepcopy(DEFAULT_CONFIG[binary_sensor.DOMAIN])
config1["name"] = "Beer"
data1 = json.dumps(config1)
with patch(
"homeassistant.components.mqtt.binary_sensor.MqttBinarySensor.discovery_update"
) as discovery_update:
await help_test_discovery_update_unchanged(
hass, mqtt_mock, caplog, binary_sensor.DOMAIN, data1, discovery_update
)
@pytest.mark.no_fail_on_log_exception
async def test_discovery_broken(hass, mqtt_mock, caplog):
"""Test handling of bad discovery message."""
data1 = '{ "name": "Beer",' ' "off_delay": -1 }'
data2 = '{ "name": "Milk",' ' "state_topic": "test_topic" }'
await help_test_discovery_broken(
hass, mqtt_mock, caplog, binary_sensor.DOMAIN, data1, data2
)
async def test_entity_device_info_with_connection(hass, mqtt_mock):
"""Test MQTT binary sensor device registry integration."""
await help_test_entity_device_info_with_connection(
hass, mqtt_mock, binary_sensor.DOMAIN, DEFAULT_CONFIG
)
async def test_entity_device_info_with_identifier(hass, mqtt_mock):
"""Test MQTT binary sensor device registry integration."""
await help_test_entity_device_info_with_identifier(
hass, mqtt_mock, binary_sensor.DOMAIN, DEFAULT_CONFIG
)
async def test_entity_device_info_update(hass, mqtt_mock):
"""Test device registry update."""
await help_test_entity_device_info_update(
hass, mqtt_mock, binary_sensor.DOMAIN, DEFAULT_CONFIG
)
async def test_entity_device_info_remove(hass, mqtt_mock):
"""Test device registry remove."""
await help_test_entity_device_info_remove(
hass, mqtt_mock, binary_sensor.DOMAIN, DEFAULT_CONFIG
)
async def test_entity_id_update_subscriptions(hass, mqtt_mock):
"""Test MQTT subscriptions are managed when entity_id is updated."""
await help_test_entity_id_update_subscriptions(
hass, mqtt_mock, binary_sensor.DOMAIN, DEFAULT_CONFIG
)
async def test_entity_id_update_discovery_update(hass, mqtt_mock):
"""Test MQTT discovery update when entity_id is updated."""
await help_test_entity_id_update_discovery_update(
hass, mqtt_mock, binary_sensor.DOMAIN, DEFAULT_CONFIG
)
async def test_entity_debug_info_message(hass, mqtt_mock):
"""Test MQTT debug info."""
await help_test_entity_debug_info_message(
hass, mqtt_mock, binary_sensor.DOMAIN, DEFAULT_CONFIG
)
|
py | 1a3085302cc1414cf1e002789ed0229e352be1e9 | import csv
from collections import OrderedDict
from datetime import datetime
from pathlib import Path
from typing import Any, List, Mapping
from dmutils.formats import DATE_FORMAT, DATETIME_FORMAT
from dmutils.s3 import S3
from dmscripts.helpers.s3_helpers import get_bucket_name
# This URL is framework agnostic
PUBLIC_BRIEF_URL = "https://www.digitalmarketplace.service.gov.uk/digital-outcomes-and-specialists/opportunities/{}"
DOS_OPPORTUNITY_HEADERS = [
"ID", "Opportunity", "Link", "Framework", "Category", "Specialist",
"Organisation Name", "Buyer Domain", "Location Of The Work",
"Published At", "Open For", "Expected Contract Length", "Applications from SMEs",
"Applications from Large Organisations", "Total Organisations", "Status", "Winning supplier",
"Size of supplier", "Contract amount", "Contract start date", "Clarification questions", "Employment status"
]
DOWNLOAD_FILE_NAME = "opportunity-data.csv"
def format_datetime_string_as_date(dt):
return datetime.strptime(dt, DATETIME_FORMAT).strftime(DATE_FORMAT) if dt else None
def remove_username_from_email_address(ea):
return '{}'.format(ea.split('@').pop()) if ea else None
def _build_row(
brief: dict, brief_responses: List[dict], include_buyer_user_details: bool = False
) -> OrderedDict:
winner = None
applications_from_sme_suppliers = 0
applications_from_large_suppliers = 0
for brief_response in brief_responses:
if brief_response['supplierOrganisationSize'] == 'large':
applications_from_large_suppliers += 1
else:
applications_from_sme_suppliers += 1
if brief_response['status'] == 'awarded':
winner = brief_response
row = OrderedDict(zip(DOS_OPPORTUNITY_HEADERS, [
brief['id'],
brief['title'],
PUBLIC_BRIEF_URL.format(brief['id']),
brief['frameworkSlug'],
brief['lotSlug'],
brief.get('specialistRole', ""),
brief['organisation'],
remove_username_from_email_address(brief['users'][0]['emailAddress']),
brief['location'],
format_datetime_string_as_date(brief['publishedAt']),
brief.get('requirementsLength', '2 weeks'), # only briefs on the specialists lot include 'requirementsLength'
brief.get('contractLength', ''),
applications_from_sme_suppliers,
applications_from_large_suppliers,
applications_from_sme_suppliers + applications_from_large_suppliers,
brief['status'],
winner['supplierName'] if winner else '',
winner['supplierOrganisationSize'] if winner else '',
winner['awardDetails']['awardedContractValue'] if winner else '',
winner['awardDetails']['awardedContractStartDate'] if winner else '',
len(brief['clarificationQuestions']),
brief.get('employmentStatus', ''),
]))
if include_buyer_user_details:
buyer_user = brief["users"][0]
row.update([
("Buyer user name", buyer_user["name"]),
("Buyer email address", buyer_user["emailAddress"]),
("Buyer phone number", buyer_user.get("phoneNumber", "")),
])
return row
def get_latest_dos_framework(client) -> str:
frameworks = client.find_frameworks()['frameworks']
for framework in frameworks:
# Should be maximum of 1 live DOS framework
if framework['family'] == 'digital-outcomes-and-specialists' and framework['status'] == 'live':
return framework['slug']
return 'digital-outcomes-and-specialists'
def get_brief_data(client, logger, include_buyer_user_details: bool = False) -> list:
logger.info("Fetching closed briefs from API")
briefs = client.find_briefs_iter(status="closed,awarded,unsuccessful,cancelled", with_users=True,
with_clarification_questions=True)
rows = []
for brief in briefs:
logger.info(f"Fetching brief responses for Brief ID {brief['id']}")
brief_responses = client.find_brief_responses_iter(brief_id=brief['id'])
rows.append(_build_row(brief, brief_responses, include_buyer_user_details))
return rows
def write_rows_to_csv(rows: List[Mapping[str, Any]], file_path: Path, logger) -> None:
logger.info(f"Writing rows to {file_path}")
# assumes all rows have the same keys
fieldnames = list(rows[0].keys())
with open(file_path, 'w') as csv_file:
writer = csv.DictWriter(csv_file, fieldnames, delimiter=',', quotechar='"')
writer.writeheader()
for row in rows:
writer.writerow(row)
def upload_file_to_s3(
file_path,
bucket,
remote_key_name: str,
download_name: str,
*,
public: bool = True,
dry_run: bool = False,
logger,
):
with open(file_path, 'br') as source_file:
acl = "public-read" if public else "bucket-owner-full-control"
logger.info("{}UPLOAD: {} to s3://{}/{} with acl {}".format(
'[Dry-run]' if dry_run else '',
file_path,
bucket.bucket_name,
remote_key_name,
acl
))
if not dry_run:
# Save file
bucket.save(
remote_key_name,
source_file,
acl=acl,
download_filename=download_name
)
def export_dos_opportunities(
client,
logger,
stage: str,
output_dir,
dry_run: bool = False
):
output_dir = Path(output_dir)
if not output_dir.exists():
logger.info(f"Creating {output_dir} directory")
output_dir.mkdir(parents=True)
latest_framework_slug = get_latest_dos_framework(client)
communications_bucket = S3(get_bucket_name(stage, "communications"))
reports_bucket = S3(get_bucket_name(stage, "reports"))
logger.info("Exporting DOS opportunity data to CSV")
# Get the data
rows = get_brief_data(client, logger, include_buyer_user_details=True)
# Construct CSV for admins
write_rows_to_csv(rows, output_dir / "opportunity-data-for-admins.csv", logger)
# Construct public CSV (filter out buyer details)
write_rows_to_csv(
[
OrderedDict((k, v) for k, v in row.items() if k in DOS_OPPORTUNITY_HEADERS)
for row in rows
],
output_dir / DOWNLOAD_FILE_NAME,
logger
)
# Upload admin CSV to reports bucket
upload_file_to_s3(
output_dir / "opportunity-data-for-admins.csv",
reports_bucket,
f"{latest_framework_slug}/reports/{DOWNLOAD_FILE_NAME}",
DOWNLOAD_FILE_NAME,
public=False,
dry_run=dry_run,
logger=logger
)
# Upload public CSV to S3
upload_file_to_s3(
output_dir / DOWNLOAD_FILE_NAME,
communications_bucket,
f"{latest_framework_slug}/communications/data/{DOWNLOAD_FILE_NAME}",
DOWNLOAD_FILE_NAME,
public=True,
dry_run=dry_run,
logger=logger
)
|
py | 1a30887516fb47bb02abf1195fdda8c41a709f3c | """Utility functions."""
import logging
import numpy as np
from scipy.signal import periodogram
from tensorpac.methods.meth_pac import _kl_hr
from tensorpac.pac import _PacObj, _PacVisual
from tensorpac.io import set_log_level
from matplotlib.gridspec import GridSpec
import matplotlib.pyplot as plt
logger = logging.getLogger('tensorpac')
def pac_vec(f_pha='mres', f_amp='mres'):
"""Generate cross-frequency coupling vectors.
Parameters
----------
Frequency vector for the phase and amplitude. Here you can use
several forms to define those vectors :
* Basic list/tuple (ex: [2, 4] or [8, 12]...)
* List of frequency bands (ex: [[2, 4], [5, 7]]...)
* Dynamic definition : (start, stop, width, step)
* Range definition (ex : np.arange(3) => [[0, 1], [1, 2]])
* Using a string. `f_pha` and `f_amp` can be 'lres', 'mres', 'hres'
respectively for low, middle and high resolution vectors. In that
case, it uses the definition proposed by Bahramisharif et al. 2013
:cite:`bahramisharif2013propagating` i.e
f_pha = [f - f / 4, f + f / 4] and f_amp = [f - f / 8, f + f / 8]
Returns
-------
f_pha, f_amp : array_like
Arrays containing the pairs of phase and amplitude frequencies. Each
vector have a shape of (N, 2).
"""
nb_fcy = dict(lres=10, mres=30, hres=50, demon=70, hulk=100)
if isinstance(f_pha, str):
# get where phase frequencies start / finish / number
f_pha_start, f_pha_end = 2, 20
f_pha_nb = nb_fcy[f_pha]
# f_pha = [f - f / 4, f + f / 4]
f_pha_mid = np.linspace(f_pha_start, f_pha_end, f_pha_nb)
f_pha = np.c_[f_pha_mid - f_pha_mid / 4., f_pha_mid + f_pha_mid / 4.]
if isinstance(f_amp, str):
# get where amplitude frequencies start / finish / number
f_amp_start, f_amp_end = 60, 160
f_amp_nb = nb_fcy[f_amp]
# f_amp = [f - f / 8, f + f / 8]
f_amp_mid = np.linspace(f_amp_start, f_amp_end, f_amp_nb)
f_amp = np.c_[f_amp_mid - f_amp_mid / 8., f_amp_mid + f_amp_mid / 8.]
return _check_freq(f_pha), _check_freq(f_amp)
def _check_freq(f):
"""Check the frequency definition."""
f = np.atleast_2d(np.asarray(f))
#
if len(f.reshape(-1)) == 1:
raise ValueError("The length of f should at least be 2.")
elif 2 in f.shape: # f of shape (N, 2) or (2, N)
if f.shape[1] is not 2:
f = f.T
elif np.squeeze(f).shape == (4,): # (f_start, f_end, f_width, f_step)
f = _pair_vectors(*tuple(np.squeeze(f)))
else: # Sequential
f = f.reshape(-1)
f.sort()
f = np.c_[f[0:-1], f[1::]]
return f
def _pair_vectors(f_start, f_end, f_width, f_step):
# Generate two array for phase and amplitude :
fdown = np.arange(f_start, f_end - f_width, f_step)
fup = np.arange(f_start + f_width, f_end, f_step)
return np.c_[fdown, fup]
def pac_trivec(f_start=60., f_end=160., f_width=10.):
"""Generate triangular vector.
By contrast with the pac_vec function, this function generate frequency
vector with an increasing frequency bandwidth.
Parameters
----------
f_start : float | 60.
Starting frequency.
f_end : float | 160.
Ending frequency.
f_width : float | 10.
Frequency bandwidth increase between each band.
Returns
-------
f : array_like
The triangular vector.
tridx : array_like
The triangular index for the reconstruction.
"""
starting = np.arange(f_start, f_end + f_width, f_width)
f, tridx = np.array([]), np.array([])
for num, k in enumerate(starting[0:-1]):
# Lentgh of the vector to build :
le = len(starting) - (num + 1)
# Create the frequency vector for this starting frequency :
fst = np.c_[np.full(le, k), starting[num + 1::]]
nfst = fst.shape[0]
# Create the triangular index for this vector of frequencies :
idx = np.c_[np.flipud(np.arange(nfst)), np.full(nfst, num)]
tridx = np.concatenate((tridx, idx), axis=0) if tridx.size else idx
f = np.concatenate((f, fst), axis=0) if f.size else fst
return f, tridx
class PSD(object):
"""Power Spectrum Density for electrophysiological brain data.
Parameters
----------
x : array_like
Array of data of shape (n_epochs, n_times)
sf : float
The sampling frequency.
"""
def __init__(self, x, sf):
"""Init."""
assert isinstance(x, np.ndarray) and (x.ndim == 2), (
"x should be a 2d array of shape (n_epochs, n_times)")
self._n_trials, self._n_times = x.shape
logger.info(f"Compute PSD over {self._n_trials} trials and "
f"{self._n_times} time points")
self._freqs, self._psd = periodogram(x, fs=sf, window=None,
nfft=self._n_times,
detrend='constant',
return_onesided=True,
scaling='density', axis=1)
def plot(self, f_min=None, f_max=None, confidence=95, interp=None,
log=False, grid=True, fz_title=18, fz_labels=15):
"""Plot the PSD.
Parameters
----------
f_min, f_max : (int, float) | None
Frequency bounds to use for plotting
confidence : (int, float) | None
Light gray confidence interval. If None, no interval will be
displayed
interp : int | None
Line interpolation integer. For example, if interp is 10 the number
of points is going to be multiply by 10
log : bool | False
Use a log scale representation
grid : bool | True
Add a grid to the plot
fz_title : int | 18
Font size for the title
fz_labels : int | 15
Font size the x/y labels
Returns
-------
ax : Matplotlib axis
The matplotlib axis that contains the figure
"""
import matplotlib.pyplot as plt
f_types = (int, float)
# interpolation
xvec, yvec = self._freqs, self._psd
if isinstance(interp, int) and (interp > 1):
# from scipy.interpolate import make_interp_spline, BSpline
from scipy.interpolate import interp1d
xnew = np.linspace(xvec[0], xvec[-1], len(xvec) * interp)
f = interp1d(xvec, yvec, kind='quadratic', axis=1)
yvec = f(xnew)
xvec = xnew
# (f_min, f_max)
f_min = xvec[0] if not isinstance(f_min, f_types) else f_min
f_max = xvec[-1] if not isinstance(f_max, f_types) else f_max
# plot main psd
plt.plot(xvec, yvec.mean(0), color='black',
label='mean PSD over trials')
# plot confidence interval
if isinstance(confidence, (int, float)) and (0 < confidence < 100):
logger.info(f" Add {confidence}th confidence interval")
interval = (100. - confidence) / 2
kw = dict(axis=0, interpolation='nearest')
psd_min = np.percentile(yvec, interval, **kw)
psd_max = np.percentile(yvec, 100. - interval, **kw)
plt.fill_between(xvec, psd_max, psd_min, color='lightgray',
alpha=0.5,
label=f"{confidence}th confidence interval")
plt.legend(fontsize=fz_labels)
plt.xlabel("Frequencies (Hz)", fontsize=fz_labels)
plt.ylabel("Power (V**2/Hz)", fontsize=fz_labels)
plt.title(f"PSD mean over {self._n_trials} trials", fontsize=fz_title)
plt.xlim(f_min, f_max)
if log:
from matplotlib.ticker import ScalarFormatter
plt.xscale('log', basex=10)
plt.gca().xaxis.set_major_formatter(ScalarFormatter())
if grid:
plt.grid(color='grey', which='major', linestyle='-',
linewidth=1., alpha=0.5)
plt.grid(color='lightgrey', which='minor', linestyle='--',
linewidth=0.5, alpha=0.5)
return plt.gca()
def plot_st_psd(self, f_min=None, f_max=None, log=False, grid=True,
fz_title=18, fz_labels=15, fz_cblabel=15, **kw):
"""Single-trial PSD plot.
Parameters
----------
f_min, f_max : (int, float) | None
Frequency bounds to use for plotting
log : bool | False
Use a log scale representation
grid : bool | True
Add a grid to the plot
fz_title : int | 18
Font size for the title
fz_labels : int | 15
Font size the x/y labels
fz_cblabel : int | 15
Font size the colorbar label labels
Returns
-------
ax : Matplotlib axis
The matplotlib axis that contains the figure
"""
# manage input variables
kw['fz_labels'] = kw.get('fz_labels', fz_labels)
kw['fz_title'] = kw.get('fz_title', fz_title)
kw['fz_cblabel'] = kw.get('fz_cblabel', fz_title)
kw['xlabel'] = kw.get('xlabel', "Frequencies (Hz)")
kw['ylabel'] = kw.get('ylabel', "Trials")
kw['title'] = kw.get('title', "Single-trial PSD")
kw['cblabel'] = kw.get('cblabel', "Power (V**2/Hz)")
# (f_min, f_max)
xvec, psd = self._freqs, self._psd
f_types = (int, float)
f_min = xvec[0] if not isinstance(f_min, f_types) else f_min
f_max = xvec[-1] if not isinstance(f_max, f_types) else f_max
# locate (f_min, f_max) indices
f_min_idx = np.abs(xvec - f_min).argmin()
f_max_idx = np.abs(xvec - f_max).argmin()
sl_freq = slice(f_min_idx, f_max_idx)
xvec = xvec[sl_freq]
psd = psd[:, sl_freq]
# make the 2D plot
_viz = _PacVisual()
trials = np.arange(self._n_trials)
_viz.pacplot(psd, xvec, trials, **kw)
if log:
from matplotlib.ticker import ScalarFormatter
plt.xscale('log', basex=10)
plt.gca().xaxis.set_major_formatter(ScalarFormatter())
if grid:
plt.grid(color='grey', which='major', linestyle='-',
linewidth=1., alpha=0.5)
plt.grid(color='lightgrey', which='minor', linestyle='--',
linewidth=0.5, alpha=0.5)
return plt.gca()
def show(self):
"""Display the PSD figure."""
import matplotlib.pyplot as plt
plt.show()
@property
def freqs(self):
"""Get the frequency vector."""
return self._freqs
@property
def psd(self):
"""Get the psd value."""
return self._psd
class BinAmplitude(_PacObj):
"""Bin the amplitude according to the phase.
Parameters
----------
x : array_like
Array of data of shape (n_epochs, n_times)
sf : float
The sampling frequency
f_pha : tuple, list | [2, 4]
List of two floats describing the frequency bounds for extracting the
phase
f_amp : tuple, list | [60, 80]
List of two floats describing the frequency bounds for extracting the
amplitude
n_bins : int | 18
Number of bins to use to binarize the phase and the amplitude
dcomplex : {'wavelet', 'hilbert'}
Method for the complex definition. Use either 'hilbert' or
'wavelet'.
cycle : tuple | (3, 6)
Control the number of cycles for filtering (only if dcomplex is
'hilbert'). Should be a tuple of integers where the first one
refers to the number of cycles for the phase and the second for the
amplitude :cite:`bahramisharif2013propagating`.
width : int | 7
Width of the Morlet's wavelet.
edges : int | None
Number of samples to discard to avoid edge effects due to filtering
"""
def __init__(self, x, sf, f_pha=[2, 4], f_amp=[60, 80], n_bins=18,
dcomplex='hilbert', cycle=(3, 6), width=7, edges=None,
n_jobs=-1):
"""Init."""
_PacObj.__init__(self, f_pha=f_pha, f_amp=f_amp, dcomplex=dcomplex,
cycle=cycle, width=width)
# check
x = np.atleast_2d(x)
assert x.ndim <= 2, ("`x` input should be an array of shape "
"(n_epochs, n_times)")
assert isinstance(sf, (int, float)), ("`sf` input should be a integer "
"or a float")
assert all([isinstance(k, (int, float)) for k in f_pha]), (
"`f_pha` input should be a list of two integers / floats")
assert all([isinstance(k, (int, float)) for k in f_amp]), (
"`f_amp` input should be a list of two integers / floats")
assert isinstance(n_bins, int), "`n_bins` should be an integer"
logger.info(f"Binning {f_amp}Hz amplitude according to {f_pha}Hz "
"phase")
# extract phase and amplitude
kw = dict(keepfilt=False, edges=edges, n_jobs=n_jobs)
pha = self.filter(sf, x, 'phase', **kw)
amp = self.filter(sf, x, 'amplitude', **kw)
# binarize amplitude according to phase
self._amplitude = _kl_hr(pha, amp, n_bins, mean_bins=False).squeeze()
self.n_bins = n_bins
def plot(self, unit='rad', normalize=False, **kw):
"""Plot the amplitude.
Parameters
----------
unit : {'rad', 'deg'}
The unit to use for the phase. Use either 'deg' for degree or 'rad'
for radians
normalize : bool | None
Normalize the histogram by the maximum
kw : dict | {}
Additional inputs are passed to the matplotlib.pyplot.bar function
Returns
-------
ax : Matplotlib axis
The matplotlib axis that contains the figure
"""
import matplotlib.pyplot as plt
assert unit in ['rad', 'deg']
if unit == 'rad':
self._phase = np.linspace(-np.pi, np.pi, self.n_bins)
width = 2 * np.pi / self.n_bins
elif unit == 'deg':
self._phase = np.linspace(-180, 180, self.n_bins)
width = 360 / self.n_bins
amp_mean = self._amplitude.mean(1)
if normalize:
amp_mean /= amp_mean.max()
plt.bar(self._phase, amp_mean, width=width, **kw)
plt.xlabel(f"Frequency phase ({self.n_bins} bins)", fontsize=18)
plt.ylabel("Amplitude", fontsize=18)
plt.title("Binned amplitude")
plt.autoscale(enable=True, axis='x', tight=True)
def show(self):
"""Show the figure."""
import matplotlib.pyplot as plt
plt.show()
@property
def amplitude(self):
"""Get the amplitude value."""
return self._amplitude
@property
def phase(self):
"""Get the phase value."""
return self._phase
class ITC(_PacObj, _PacVisual):
"""Compute the Inter-Trials Coherence (ITC).
The Inter-Trials Coherence (ITC) is a measure of phase consistency over
trials for a single recording site (electrode / sensor etc.).
Parameters
----------
x : array_like
Array of data of shape (n_epochs, n_times)
sf : float
The sampling frequency
f_pha : tuple, list | [2, 4]
List of two floats describing the frequency bounds for extracting the
phase
dcomplex : {'wavelet', 'hilbert'}
Method for the complex definition. Use either 'hilbert' or
'wavelet'.
cycle : tuple | 3
Control the number of cycles for filtering the phase (only if dcomplex
is 'hilbert').
width : int | 7
Width of the Morlet's wavelet.
edges : int | None
Number of samples to discard to avoid edge effects due to filtering
"""
def __init__(self, x, sf, f_pha=[2, 4], dcomplex='hilbert', cycle=3,
width=7, edges=None, n_jobs=-1, verbose=None):
"""Init."""
set_log_level(verbose)
_PacObj.__init__(self, f_pha=f_pha, f_amp=[60, 80], dcomplex=dcomplex,
cycle=(cycle, 6), width=width)
_PacVisual.__init__(self)
# check
x = np.atleast_2d(x)
assert x.ndim <= 2, ("`x` input should be an array of shape "
"(n_epochs, n_times)")
self._n_trials = x.shape[0]
logger.info("Inter-Trials Coherence (ITC)")
logger.info(f" extracting {len(self.xvec)} phases")
# extract phase and amplitude
kw = dict(keepfilt=False, edges=edges, n_jobs=n_jobs)
pha = self.filter(sf, x, 'phase', **kw)
# compute itc
self._itc = np.abs(np.exp(1j * pha).mean(1)).squeeze()
self._sf = sf
def plot(self, times=None, **kw):
"""Plot the Inter-Trials Coherence.
Parameters
----------
times : array_like | None
Custom time vector to use
kw : dict | {}
Additional inputs are either pass to the matplotlib.pyplot.plot
function if a single phase band is used, otherwise to the
matplotlib.pyplot.pcolormesh function
Returns
-------
ax : Matplotlib axis
The matplotlib axis that contains the figure
"""
import matplotlib.pyplot as plt
n_pts = self._itc.shape[-1]
if not isinstance(times, np.ndarray):
times = np.arange(n_pts) / self._sf
times = times[self._edges]
assert len(times) == n_pts, ("The length of the time vector should be "
"{n_pts}")
xlab = 'Time'
title = f"Inter-Trials Coherence ({self._n_trials} trials)"
if self._itc.ndim == 1:
plt.plot(times, self._itc, **kw)
elif self._itc.ndim == 2:
vmin = kw.get('vmin', np.percentile(self._itc, 1))
vmax = kw.get('vmax', np.percentile(self._itc, 99))
self.pacplot(self._itc, times, self.xvec, vmin=vmin, vmax=vmax,
ylabel="Frequency for phase (Hz)", xlabel=xlab,
title=title, **kw)
return plt.gca()
def show(self):
"""Show the figure."""
import matplotlib.pyplot as plt
plt.show()
@property
def itc(self):
"""Get the itc value."""
return self._itc
class PeakLockedTF(_PacObj, _PacVisual):
"""Peak-Locked Time-frequency representation.
This class can be used in order to re-align time-frequency representations
around a time-point (cue) according to the closest phase peak. This type
of visualization can bring out a cyclic behavior of the amplitude at a
given phase, potentially indicating the presence of a phase-amplitude
coupling. Here's the detailed pipeline :
* Filter around a single phase frequency bands and across multiple
amplitude frequencies
* Use a `cue` which define the time-point to use for the realignment
* Detect in the filtered phase the closest peak to the cue. This step
is repeated to each trial in order to get a list of length (n_epochs)
that contains the number of sample (shift) so that if the phase is
moved, the peak fall onto the cue. A positive shift indicates that
the phase is moved forward while a negative shift is for a backward
move
* Apply, to each trial, this shift to the amplitude
* Plot the mean re-aligned amplitudes
Parameters
----------
x : array_like
Array of data of shape (n_epochs, n_times)
sf : float
The sampling frequency
cue : int, float
Time-point to use in order to detect the closest phase peak. This
parameter works in conjunction with the `times` input below. Use
either :
* An integer and `times` is None to indicate that you want to
realign according to a time-point in sample
* A integer or a float with `times` the time vector if you want
that Tensorpac automatically infer the sample number around which
to align
times : array_like | None
Time vector
f_pha : tuple, list | [2, 4]
List of two floats describing the frequency bounds for extracting the
phase
f_amp : tuple, list | [60, 80]
Frequency vector for the amplitude. Here you can use several forms to
define those vectors :
* Dynamic definition : (start, stop, width, step)
* Using a string : `f_amp` can be 'lres', 'mres', 'hres'
respectively for low, middle and high resolution vectors
cycle : tuple | (3, 6)
Control the number of cycles for filtering. Should be a tuple of
integers where the first one refers to the number of cycles for the
phase and the second for the amplitude
:cite:`bahramisharif2013propagating`.
"""
def __init__(self, x, sf, cue, times=None, f_pha=[5, 7], f_amp='hres',
cycle=(3, 6), n_jobs=-1, verbose=None):
"""Init."""
set_log_level(verbose)
# initialize to retrieve filtering methods
_PacObj.__init__(self, f_pha=f_pha, f_amp=f_amp, dcomplex='hilbert',
cycle=cycle)
_PacVisual.__init__(self)
logger.info("PeakLockedTF object defined")
# inputs checking
x = np.atleast_2d(x)
assert isinstance(x, np.ndarray) and (x.ndim == 2)
assert isinstance(sf, (int, float))
assert isinstance(cue, (int, float))
assert isinstance(f_pha, (list, tuple)) and (len(f_pha) == 2)
n_epochs, n_times = x.shape
# manage cur conversion
if times is None:
cue = int(cue)
times = np.arange(n_times)
logger.info(f" align on sample cue={cue}")
else:
assert isinstance(times, np.ndarray) and (len(times) == n_times)
cue_time = cue
cue = np.abs(times - cue).argmin() - 1
logger.info(f" align on time-point={cue_time} (sample={cue})")
self.cue, self._times = cue, times
# extract phase and amplitudes
logger.info(f" extract phase and amplitudes "
f"(n_amps={len(self.yvec)})")
kw = dict(keepfilt=False, n_jobs=n_jobs)
pha = self.filter(sf, x, 'phase', n_jobs=n_jobs, keepfilt=True)
amp = self.filter(sf, x, 'amplitude', n_jobs=n_jobs)
self._pha, self._amp = pha, amp ** 2
# peak detection
logger.info(f" running peak detection around sample={cue}")
self.shifts = self._peak_detection(self._pha.squeeze(), cue)
# realign phases and amplitudes
logger.info(f" realign the {n_epochs} phases and amplitudes")
self.amp_a = self._shift_signals(self._amp, self.shifts, fill_with=0.)
self.pha_a = self._shift_signals(self._pha, self.shifts, fill_with=0.)
@staticmethod
def _peak_detection(pha, cue):
"""Single trial closest to a cue peak detection.
Parameters
----------
pha : array_like
Array of single trial phases of shape (n_trials, n_times)
cue : int
Cue to use as a reference (in sample unit)
Returns
-------
peaks : array_like
Array of length (n_trials,) describing each delay to apply
to each trial in order to realign the phases. In detail :
* Positive delays means that zeros should be prepend
* Negative delays means that zeros should be append
"""
n_trials, n_times = pha.shape
peaks = []
for tr in range(n_trials):
# select the single trial phase
st_pha = pha[tr, :]
# detect all peaks across time points
st_peaks = []
for t in range(n_times - 1):
if (st_pha[t - 1] < st_pha[t]) and (st_pha[t] > st_pha[t + 1]):
st_peaks += [t]
# detect the minimum peak
min_peak = st_peaks[np.abs(np.array(st_peaks) - cue).argmin()]
peaks += [cue - min_peak]
return np.array(peaks)
@staticmethod
def _shift_signals(sig, n_shifts, fill_with=0):
"""Shift an array of signals according to an array of delays.
Parameters
----------
sig : array_like
Array of signals of shape (n_freq, n_trials, n_times)
n_shifts : array_like
Array of delays to apply to each trial of shape (n_trials,)
fill_with : int
Value to prepend / append to each shifted time-series
Returns
-------
sig_shifted : array_like
Array of shifted signals with the same shape as the input
"""
# prepare the needed variables
n_freqs, n_trials, n_pts = sig.shape
sig_shifted = np.zeros_like(sig)
# shift each trial
for tr in range(n_trials):
# select the data of a specific trial
st_shift = n_shifts[tr]
st_sig = sig[:, tr, :]
fill = np.full((n_freqs, abs(st_shift)), fill_with,
dtype=st_sig.dtype)
# shift this specific trial
if st_shift > 0: # move forward = prepend zeros
sig_shifted[:, tr, :] = np.c_[fill, st_sig][:, 0:-st_shift]
elif st_shift < 0: # move backward = append zeros
sig_shifted[:, tr, :] = np.c_[st_sig, fill][:, abs(st_shift):]
return sig_shifted
def plot(self, zscore=False, baseline=None, edges=0, **kwargs):
"""Integrated Peak-Locked TF plotting function.
Parameters
----------
zscore : bool | False
Normalize the power by using a z-score normalization. This can be
useful in order to compensate the 1 / f effect in the power
spectrum. If True, the mean and deviation are computed at the
single trial level and across all time points
baseline : tuple | None
Baseline period to use in order to apply the z-score correction.
Should be in samples.
edges : int | 0
Number of pixels to discard to compensate filtering edge effect
(`power[edges:-edges]`).
kwargs : dict | {}
Additional arguments are sent to the
:class:`tensorpac.utils.PeakLockedTF.pacplot` method
"""
# manage additional arguments
kwargs['colorbar'] = False
kwargs['ylabel'] = 'Frequency for amplitude (hz)'
kwargs['xlabel'] = ''
kwargs['fz_labels'] = kwargs.get('fz_labels', 14)
kwargs['fz_cblabel'] = kwargs.get('fz_cblabel', 14)
kwargs['fz_title'] = kwargs.get('fz_title', 16)
sl_times = slice(edges, len(self._times) - edges)
times = self._times[sl_times]
pha_n = self.pha_a[..., sl_times].squeeze()
# z-score normalization
if zscore:
if baseline is None:
bsl_idx = sl_times
else:
assert len(baseline) == 2
bsl_idx = slice(baseline[0], baseline[1])
_mean = self.amp_a[..., bsl_idx].mean(2, keepdims=True)
_std = self.amp_a[..., bsl_idx].std(2, keepdims=True)
_std[_std == 0.] = 1. # correction from NaN
amp_n = (self.amp_a[..., sl_times] - _mean) / _std
else:
amp_n = self.amp_a[..., sl_times]
# grid definition
gs = GridSpec(8, 8)
# image plot
plt.subplot(gs[slice(0, 6), 0:-1])
self.pacplot(amp_n.mean(1), times, self.yvec, **kwargs)
plt.axvline(times[self.cue], color='w', lw=2)
plt.tick_params(bottom=False, labelbottom=False)
ax_1 = plt.gca()
# external colorbar
plt.subplot(gs[slice(1, 5), -1])
cb = plt.colorbar(self._plt_im, pad=0.01, cax=plt.gca())
cb.set_label('Power (V**2/Hz)', fontsize=kwargs['fz_cblabel'])
cb.outline.set_visible(False)
# phase plot
plt.subplot(gs[slice(6, 8), 0:-1])
plt.plot(times, pha_n.T, color='lightgray', alpha=.2, lw=1.)
plt.plot(times, pha_n.mean(0), label='single trial phases', alpha=.2,
lw=1.) # legend tweaking
plt.plot(times, pha_n.mean(0), label='mean phases',
color='#1f77b4')
plt.axvline(times[self.cue], color='k', lw=2)
plt.autoscale(axis='both', tight=True, enable=True)
plt.xlabel("Times", fontsize=kwargs['fz_labels'])
plt.ylabel("V / Hz", fontsize=kwargs['fz_labels'])
# bottom legend
plt.legend(loc='center', bbox_to_anchor=(.5, -.5),
fontsize='x-large', ncol=2)
ax_2 = plt.gca()
return [ax_1, ax_2]
|
py | 1a3088ec59399e2626ed70fead5607cac892a29c | #-*- coding: utf-8 -*-
# pysqlite2/dbapi.py: pysqlite DB-API module
#
# Copyright (C) 2007-2008 Gerhard Häring <[email protected]>
#
# This file is part of pysqlite.
#
# This software is provided 'as-is', without any express or implied
# warranty. In no event will the authors be held liable for any damages
# arising from the use of this software.
#
# Permission is granted to anyone to use this software for any purpose,
# including commercial applications, and to alter it and redistribute it
# freely, subject to the following restrictions:
#
# 1. The origin of this software must not be misrepresented; you must not
# claim that you wrote the original software. If you use this software
# in a product, an acknowledgment in the product documentation would be
# appreciated but is not required.
# 2. Altered source versions must be plainly marked as such, and must not be
# misrepresented as being the original software.
# 3. This notice may not be removed or altered from any source distribution.
#
# Note: This software has been modified for use in PyPy.
from collections import OrderedDict
from functools import wraps
import datetime
import string
import sys
import weakref
from threading import _get_ident as _thread_get_ident
try:
from __pypy__ import newlist_hint
except ImportError:
assert '__pypy__' not in sys.builtin_module_names
newlist_hint = lambda sizehint: []
if sys.version_info[0] >= 3:
StandardError = Exception
cmp = lambda x, y: (x > y) - (x < y)
long = int
xrange = range
basestring = unicode = str
buffer = memoryview
_BLOB_TYPE = bytes
else:
_BLOB_TYPE = buffer
from _sqlite3_cffi import ffi as _ffi, lib as _lib
exported_sqlite_symbols = [
'SQLITE_ALTER_TABLE',
'SQLITE_ANALYZE',
'SQLITE_ATTACH',
'SQLITE_CREATE_INDEX',
'SQLITE_CREATE_TABLE',
'SQLITE_CREATE_TEMP_INDEX',
'SQLITE_CREATE_TEMP_TABLE',
'SQLITE_CREATE_TEMP_TRIGGER',
'SQLITE_CREATE_TEMP_VIEW',
'SQLITE_CREATE_TRIGGER',
'SQLITE_CREATE_VIEW',
'SQLITE_DELETE',
'SQLITE_DENY',
'SQLITE_DETACH',
'SQLITE_DROP_INDEX',
'SQLITE_DROP_TABLE',
'SQLITE_DROP_TEMP_INDEX',
'SQLITE_DROP_TEMP_TABLE',
'SQLITE_DROP_TEMP_TRIGGER',
'SQLITE_DROP_TEMP_VIEW',
'SQLITE_DROP_TRIGGER',
'SQLITE_DROP_VIEW',
'SQLITE_IGNORE',
'SQLITE_INSERT',
'SQLITE_OK',
'SQLITE_PRAGMA',
'SQLITE_READ',
'SQLITE_REINDEX',
'SQLITE_SELECT',
'SQLITE_TRANSACTION',
'SQLITE_UPDATE',
]
for symbol in exported_sqlite_symbols:
globals()[symbol] = getattr(_lib, symbol)
_SQLITE_TRANSIENT = _lib.SQLITE_TRANSIENT
# pysqlite version information
version = "2.6.0"
# pysqlite constants
PARSE_COLNAMES = 1
PARSE_DECLTYPES = 2
# SQLite version information
sqlite_version = str(_ffi.string(_lib.sqlite3_libversion()).decode('ascii'))
_STMT_TYPE_UPDATE = 0
_STMT_TYPE_DELETE = 1
_STMT_TYPE_INSERT = 2
_STMT_TYPE_REPLACE = 3
_STMT_TYPE_OTHER = 4
_STMT_TYPE_SELECT = 5
_STMT_TYPE_INVALID = 6
class Error(StandardError):
pass
class Warning(StandardError):
pass
class InterfaceError(Error):
pass
class DatabaseError(Error):
pass
class InternalError(DatabaseError):
pass
class OperationalError(DatabaseError):
pass
class ProgrammingError(DatabaseError):
pass
class IntegrityError(DatabaseError):
pass
class DataError(DatabaseError):
pass
class NotSupportedError(DatabaseError):
pass
def connect(database, timeout=5.0, detect_types=0, isolation_level="",
check_same_thread=True, factory=None, cached_statements=100):
factory = Connection if not factory else factory
return factory(database, timeout, detect_types, isolation_level,
check_same_thread, factory, cached_statements)
def _unicode_text_factory(x):
return unicode(x, 'utf-8')
if sys.version_info[0] < 3:
def OptimizedUnicode(s):
try:
val = unicode(s, "ascii").encode("ascii")
except UnicodeDecodeError:
val = unicode(s, "utf-8")
return val
else:
OptimizedUnicode = _unicode_text_factory
class _StatementCache(object):
def __init__(self, connection, maxcount):
self.connection = connection
self.maxcount = maxcount
self.cache = OrderedDict()
def get(self, sql):
try:
stat = self.cache[sql]
except KeyError:
stat = Statement(self.connection, sql)
self.cache[sql] = stat
if len(self.cache) > self.maxcount:
self.cache.popitem(0)
else:
if stat._in_use:
stat = Statement(self.connection, sql)
self.cache[sql] = stat
return stat
class Connection(object):
__initialized = False
_db = None
def __init__(self, database, timeout=5.0, detect_types=0, isolation_level="",
check_same_thread=True, factory=None, cached_statements=100):
self.__initialized = True
db_star = _ffi.new('sqlite3 **')
if isinstance(database, unicode):
database = database.encode('utf-8')
if _lib.sqlite3_open(database, db_star) != _lib.SQLITE_OK:
raise OperationalError("Could not open database")
self._db = db_star[0]
if timeout is not None:
timeout = int(timeout * 1000) # pysqlite2 uses timeout in seconds
_lib.sqlite3_busy_timeout(self._db, timeout)
self.row_factory = None
self.text_factory = _unicode_text_factory
self._detect_types = detect_types
self._in_transaction = False
self.isolation_level = isolation_level
self.__cursors = []
self.__cursors_counter = 0
self.__statements = []
self.__statements_counter = 0
self.__rawstatements = set()
self._statement_cache = _StatementCache(self, cached_statements)
self.__func_cache = {}
self.__aggregates = {}
self.__aggregate_instances = {}
self.__collations = {}
if check_same_thread:
self.__thread_ident = _thread_get_ident()
self.Error = Error
self.Warning = Warning
self.InterfaceError = InterfaceError
self.DatabaseError = DatabaseError
self.InternalError = InternalError
self.OperationalError = OperationalError
self.ProgrammingError = ProgrammingError
self.IntegrityError = IntegrityError
self.DataError = DataError
self.NotSupportedError = NotSupportedError
def __del__(self):
if self._db:
_lib.sqlite3_close(self._db)
def close(self):
self._check_thread()
self.__do_all_statements(Statement._finalize, True)
# depending on when this close() is called, the statements' weakrefs
# may be already dead, even though Statement.__del__() was not called
# yet. In this case, self.__rawstatements is not empty.
if self.__rawstatements is not None:
for stmt in list(self.__rawstatements):
self._finalize_raw_statement(stmt)
self.__rawstatements = None
if self._db:
ret = _lib.sqlite3_close(self._db)
if ret != _lib.SQLITE_OK:
raise self._get_exception(ret)
self._db = None
def _check_closed(self):
if not self.__initialized:
raise ProgrammingError("Base Connection.__init__ not called.")
if not self._db:
raise ProgrammingError("Cannot operate on a closed database.")
def _check_closed_wrap(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
self._check_closed()
return func(self, *args, **kwargs)
return wrapper
def _check_thread(self):
try:
if self.__thread_ident == _thread_get_ident():
return
except AttributeError:
pass
else:
raise ProgrammingError(
"SQLite objects created in a thread can only be used in that "
"same thread. The object was created in thread id %d and this "
"is thread id %d" % (self.__thread_ident, _thread_get_ident()))
def _check_thread_wrap(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
self._check_thread()
return func(self, *args, **kwargs)
return wrapper
def _get_exception(self, error_code=None):
if error_code is None:
error_code = _lib.sqlite3_errcode(self._db)
error_message = _ffi.string(_lib.sqlite3_errmsg(self._db)).decode('utf-8')
if error_code == _lib.SQLITE_OK:
raise ValueError("error signalled but got SQLITE_OK")
elif error_code in (_lib.SQLITE_INTERNAL, _lib.SQLITE_NOTFOUND):
exc = InternalError
elif error_code == _lib.SQLITE_NOMEM:
exc = MemoryError
elif error_code in (
_lib.SQLITE_ERROR, _lib.SQLITE_PERM, _lib.SQLITE_ABORT,
_lib.SQLITE_BUSY, _lib.SQLITE_LOCKED, _lib.SQLITE_READONLY,
_lib.SQLITE_INTERRUPT, _lib.SQLITE_IOERR, _lib.SQLITE_FULL,
_lib.SQLITE_CANTOPEN, _lib.SQLITE_PROTOCOL, _lib.SQLITE_EMPTY,
_lib.SQLITE_SCHEMA):
exc = OperationalError
elif error_code == _lib.SQLITE_CORRUPT:
exc = DatabaseError
elif error_code == _lib.SQLITE_TOOBIG:
exc = DataError
elif error_code in (_lib.SQLITE_CONSTRAINT, _lib.SQLITE_MISMATCH):
exc = IntegrityError
elif error_code == _lib.SQLITE_MISUSE:
exc = ProgrammingError
else:
exc = DatabaseError
exc = exc(error_message)
exc.error_code = error_code
return exc
def _remember_cursor(self, cursor):
self.__cursors.append(weakref.ref(cursor))
self.__cursors_counter += 1
if self.__cursors_counter < 200:
return
self.__cursors_counter = 0
self.__cursors = [r for r in self.__cursors if r() is not None]
def _remember_statement(self, statement):
self.__rawstatements.add(statement._statement)
self.__statements.append(weakref.ref(statement))
self.__statements_counter += 1
if self.__statements_counter < 200:
return
self.__statements_counter = 0
self.__statements = [r for r in self.__statements if r() is not None]
def _finalize_raw_statement(self, _statement):
if self.__rawstatements is not None:
try:
self.__rawstatements.remove(_statement)
except KeyError:
return # rare case: already finalized, see issue #2097
_lib.sqlite3_finalize(_statement)
def __do_all_statements(self, action, reset_cursors):
for weakref in self.__statements:
statement = weakref()
if statement is not None:
action(statement)
if reset_cursors:
for weakref in self.__cursors:
cursor = weakref()
if cursor is not None:
cursor._reset = True
@_check_thread_wrap
@_check_closed_wrap
def __call__(self, sql):
return self._statement_cache.get(sql)
def cursor(self, factory=None):
self._check_thread()
self._check_closed()
if factory is None:
factory = Cursor
cur = factory(self)
if self.row_factory is not None:
cur.row_factory = self.row_factory
return cur
def execute(self, *args):
cur = self.cursor()
return cur.execute(*args)
def executemany(self, *args):
cur = self.cursor()
return cur.executemany(*args)
def executescript(self, *args):
cur = self.cursor()
return cur.executescript(*args)
def iterdump(self):
from sqlite3.dump import _iterdump
return _iterdump(self)
def _begin(self):
statement_star = _ffi.new('sqlite3_stmt **')
ret = _lib.sqlite3_prepare_v2(self._db, self.__begin_statement, -1,
statement_star, _ffi.NULL)
try:
if ret != _lib.SQLITE_OK:
raise self._get_exception(ret)
ret = _lib.sqlite3_step(statement_star[0])
if ret != _lib.SQLITE_DONE:
raise self._get_exception(ret)
self._in_transaction = True
finally:
_lib.sqlite3_finalize(statement_star[0])
def commit(self):
self._check_thread()
self._check_closed()
if not self._in_transaction:
return
self.__do_all_statements(Statement._reset, False)
statement_star = _ffi.new('sqlite3_stmt **')
ret = _lib.sqlite3_prepare_v2(self._db, b"COMMIT", -1,
statement_star, _ffi.NULL)
try:
if ret != _lib.SQLITE_OK:
raise self._get_exception(ret)
ret = _lib.sqlite3_step(statement_star[0])
if ret != _lib.SQLITE_DONE:
raise self._get_exception(ret)
self._in_transaction = False
finally:
_lib.sqlite3_finalize(statement_star[0])
def rollback(self):
self._check_thread()
self._check_closed()
if not self._in_transaction:
return
self.__do_all_statements(Statement._reset, True)
statement_star = _ffi.new('sqlite3_stmt **')
ret = _lib.sqlite3_prepare_v2(self._db, b"ROLLBACK", -1,
statement_star, _ffi.NULL)
try:
if ret != _lib.SQLITE_OK:
raise self._get_exception(ret)
ret = _lib.sqlite3_step(statement_star[0])
if ret != _lib.SQLITE_DONE:
raise self._get_exception(ret)
self._in_transaction = False
finally:
_lib.sqlite3_finalize(statement_star[0])
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, exc_tb):
if exc_type is None and exc_value is None and exc_tb is None:
self.commit()
else:
self.rollback()
@_check_thread_wrap
@_check_closed_wrap
def create_function(self, name, num_args, callback):
try:
closure = self.__func_cache[callback]
except KeyError:
@_ffi.callback("void(sqlite3_context*, int, sqlite3_value**)")
def closure(context, nargs, c_params):
_function_callback(callback, context, nargs, c_params)
self.__func_cache[callback] = closure
if isinstance(name, unicode):
name = name.encode('utf-8')
ret = _lib.sqlite3_create_function(self._db, name, num_args,
_lib.SQLITE_UTF8, _ffi.NULL,
closure, _ffi.NULL, _ffi.NULL)
if ret != _lib.SQLITE_OK:
raise self.OperationalError("Error creating function")
@_check_thread_wrap
@_check_closed_wrap
def create_aggregate(self, name, num_args, cls):
try:
step_callback, final_callback = self.__aggregates[cls]
except KeyError:
@_ffi.callback("void(sqlite3_context*, int, sqlite3_value**)")
def step_callback(context, argc, c_params):
res = _lib.sqlite3_aggregate_context(context,
_ffi.sizeof("size_t"))
aggregate_ptr = _ffi.cast("size_t[1]", res)
if not aggregate_ptr[0]:
try:
aggregate = cls()
except Exception:
msg = (b"user-defined aggregate's '__init__' "
b"method raised error")
_lib.sqlite3_result_error(context, msg, len(msg))
return
aggregate_id = id(aggregate)
self.__aggregate_instances[aggregate_id] = aggregate
aggregate_ptr[0] = aggregate_id
else:
aggregate = self.__aggregate_instances[aggregate_ptr[0]]
params = _convert_params(context, argc, c_params)
try:
aggregate.step(*params)
except Exception:
msg = (b"user-defined aggregate's 'step' "
b"method raised error")
_lib.sqlite3_result_error(context, msg, len(msg))
@_ffi.callback("void(sqlite3_context*)")
def final_callback(context):
res = _lib.sqlite3_aggregate_context(context,
_ffi.sizeof("size_t"))
aggregate_ptr = _ffi.cast("size_t[1]", res)
if aggregate_ptr[0]:
aggregate = self.__aggregate_instances[aggregate_ptr[0]]
try:
val = aggregate.finalize()
except Exception:
msg = (b"user-defined aggregate's 'finalize' "
b"method raised error")
_lib.sqlite3_result_error(context, msg, len(msg))
else:
_convert_result(context, val)
finally:
del self.__aggregate_instances[aggregate_ptr[0]]
self.__aggregates[cls] = (step_callback, final_callback)
if isinstance(name, unicode):
name = name.encode('utf-8')
ret = _lib.sqlite3_create_function(self._db, name, num_args,
_lib.SQLITE_UTF8, _ffi.NULL,
_ffi.NULL,
step_callback,
final_callback)
if ret != _lib.SQLITE_OK:
raise self._get_exception(ret)
@_check_thread_wrap
@_check_closed_wrap
def create_collation(self, name, callback):
name = name.upper()
if not all(c in string.ascii_uppercase + string.digits + '_' for c in name):
raise ProgrammingError("invalid character in collation name")
if callback is None:
del self.__collations[name]
collation_callback = _ffi.NULL
else:
if not callable(callback):
raise TypeError("parameter must be callable")
@_ffi.callback("int(void*, int, const void*, int, const void*)")
def collation_callback(context, len1, str1, len2, str2):
text1 = _ffi.buffer(str1, len1)[:]
text2 = _ffi.buffer(str2, len2)[:]
try:
ret = callback(text1, text2)
assert isinstance(ret, (int, long))
return cmp(ret, 0)
except Exception:
return 0
self.__collations[name] = collation_callback
if isinstance(name, unicode):
name = name.encode('utf-8')
ret = _lib.sqlite3_create_collation(self._db, name,
_lib.SQLITE_UTF8,
_ffi.NULL,
collation_callback)
if ret != _lib.SQLITE_OK:
raise self._get_exception(ret)
@_check_thread_wrap
@_check_closed_wrap
def set_authorizer(self, callback):
try:
authorizer = self.__func_cache[callback]
except KeyError:
@_ffi.callback("int(void*, int, const char*, const char*, "
"const char*, const char*)")
def authorizer(userdata, action, arg1, arg2, dbname, source):
try:
ret = callback(action, arg1, arg2, dbname, source)
assert isinstance(ret, int)
# try to detect cases in which cffi would swallow
# OverflowError when casting the return value
assert int(_ffi.cast('int', ret)) == ret
return ret
except Exception:
return _lib.SQLITE_DENY
self.__func_cache[callback] = authorizer
ret = _lib.sqlite3_set_authorizer(self._db, authorizer, _ffi.NULL)
if ret != _lib.SQLITE_OK:
raise self._get_exception(ret)
@_check_thread_wrap
@_check_closed_wrap
def set_progress_handler(self, callable, nsteps):
if callable is None:
progress_handler = _ffi.NULL
else:
try:
progress_handler = self.__func_cache[callable]
except KeyError:
@_ffi.callback("int(void*)")
def progress_handler(userdata):
try:
return bool(callable())
except Exception:
# abort query if error occurred
return 1
self.__func_cache[callable] = progress_handler
_lib.sqlite3_progress_handler(self._db, nsteps, progress_handler,
_ffi.NULL)
if sys.version_info[0] >= 3:
def __get_in_transaction(self):
return self._in_transaction
in_transaction = property(__get_in_transaction)
def __get_total_changes(self):
self._check_closed()
return _lib.sqlite3_total_changes(self._db)
total_changes = property(__get_total_changes)
def __get_isolation_level(self):
return self._isolation_level
def __set_isolation_level(self, val):
if val is None:
self.commit()
else:
self.__begin_statement = str("BEGIN " + val).encode('utf-8')
self._isolation_level = val
isolation_level = property(__get_isolation_level, __set_isolation_level)
if hasattr(_lib, 'sqlite3_enable_load_extension'):
@_check_thread_wrap
@_check_closed_wrap
def enable_load_extension(self, enabled):
rc = _lib.sqlite3_enable_load_extension(self._db, int(enabled))
if rc != _lib.SQLITE_OK:
raise OperationalError("Error enabling load extension")
class Cursor(object):
__initialized = False
__statement = None
def __init__(self, con):
if not isinstance(con, Connection):
raise TypeError
self.__connection = con
self.arraysize = 1
self.row_factory = None
self._reset = False
self.__locked = False
self.__closed = False
self.__lastrowid = None
self.__rowcount = -1
con._check_thread()
con._remember_cursor(self)
self.__initialized = True
def close(self):
self.__connection._check_thread()
self.__connection._check_closed()
if self.__statement:
self.__statement._reset()
self.__statement = None
self.__closed = True
def __check_cursor(self):
if not self.__initialized:
raise ProgrammingError("Base Cursor.__init__ not called.")
if self.__closed:
raise ProgrammingError("Cannot operate on a closed cursor.")
if self.__locked:
raise ProgrammingError("Recursive use of cursors not allowed.")
self.__connection._check_thread()
self.__connection._check_closed()
def __check_cursor_wrap(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
self.__check_cursor()
return func(self, *args, **kwargs)
return wrapper
def __check_reset(self):
if self._reset:
raise InterfaceError(
"Cursor needed to be reset because of commit/rollback "
"and can no longer be fetched from.")
def __build_row_cast_map(self):
if not self.__connection._detect_types:
return
self.__row_cast_map = []
for i in xrange(_lib.sqlite3_column_count(self.__statement._statement)):
converter = None
if self.__connection._detect_types & PARSE_COLNAMES:
colname = _lib.sqlite3_column_name(self.__statement._statement, i)
if colname:
colname = _ffi.string(colname).decode('utf-8')
type_start = -1
key = None
for pos in range(len(colname)):
if colname[pos] == '[':
type_start = pos + 1
elif colname[pos] == ']' and type_start != -1:
key = colname[type_start:pos]
converter = converters[key.upper()]
if converter is None and self.__connection._detect_types & PARSE_DECLTYPES:
decltype = _lib.sqlite3_column_decltype(self.__statement._statement, i)
if decltype:
decltype = _ffi.string(decltype).decode('utf-8')
# if multiple words, use first, eg.
# "INTEGER NOT NULL" => "INTEGER"
decltype = decltype.split()[0]
if '(' in decltype:
decltype = decltype[:decltype.index('(')]
converter = converters.get(decltype.upper(), None)
self.__row_cast_map.append(converter)
def __fetch_one_row(self):
num_cols = _lib.sqlite3_data_count(self.__statement._statement)
row = newlist_hint(num_cols)
for i in xrange(num_cols):
if self.__connection._detect_types:
converter = self.__row_cast_map[i]
else:
converter = None
if converter is not None:
blob = _lib.sqlite3_column_blob(self.__statement._statement, i)
if not blob:
val = None
else:
blob_len = _lib.sqlite3_column_bytes(self.__statement._statement, i)
val = _ffi.buffer(blob, blob_len)[:]
val = converter(val)
else:
typ = _lib.sqlite3_column_type(self.__statement._statement, i)
if typ == _lib.SQLITE_NULL:
val = None
elif typ == _lib.SQLITE_INTEGER:
val = _lib.sqlite3_column_int64(self.__statement._statement, i)
val = int(val)
elif typ == _lib.SQLITE_FLOAT:
val = _lib.sqlite3_column_double(self.__statement._statement, i)
elif typ == _lib.SQLITE_TEXT:
text = _lib.sqlite3_column_text(self.__statement._statement, i)
text_len = _lib.sqlite3_column_bytes(self.__statement._statement, i)
val = _ffi.buffer(text, text_len)[:]
try:
val = self.__connection.text_factory(val)
except Exception:
column_name = _lib.sqlite3_column_name(
self.__statement._statement, i)
if column_name:
column_name = _ffi.string(column_name).decode('utf-8')
else:
column_name = "<unknown column name>"
val = val.decode('ascii', 'replace')
raise OperationalError(
"Could not decode to UTF-8 column '%s' with text '%s'" % (
column_name, val))
elif typ == _lib.SQLITE_BLOB:
blob = _lib.sqlite3_column_blob(self.__statement._statement, i)
blob_len = _lib.sqlite3_column_bytes(self.__statement._statement, i)
val = _BLOB_TYPE(_ffi.buffer(blob, blob_len)[:])
row.append(val)
return tuple(row)
def __execute(self, multiple, sql, many_params):
self.__locked = True
self._reset = False
try:
del self.__next_row
except AttributeError:
pass
try:
if not isinstance(sql, basestring):
raise ValueError("operation parameter must be str or unicode")
try:
del self.__description
except AttributeError:
pass
self.__rowcount = -1
self.__statement = self.__connection._statement_cache.get(sql)
if self.__connection._isolation_level is not None:
if self.__statement._type in (
_STMT_TYPE_UPDATE,
_STMT_TYPE_DELETE,
_STMT_TYPE_INSERT,
_STMT_TYPE_REPLACE
):
if not self.__connection._in_transaction:
self.__connection._begin()
elif self.__statement._type == _STMT_TYPE_OTHER:
if self.__connection._in_transaction:
self.__connection.commit()
elif self.__statement._type == _STMT_TYPE_SELECT:
if multiple:
raise ProgrammingError("You cannot execute SELECT "
"statements in executemany().")
for params in many_params:
self.__statement._set_params(params)
# Actually execute the SQL statement
ret = _lib.sqlite3_step(self.__statement._statement)
if ret == _lib.SQLITE_ROW:
if multiple:
raise ProgrammingError("executemany() can only execute DML statements.")
self.__build_row_cast_map()
self.__next_row = self.__fetch_one_row()
elif ret == _lib.SQLITE_DONE:
if not multiple:
self.__statement._reset()
else:
self.__statement._reset()
raise self.__connection._get_exception(ret)
if self.__statement._type in (
_STMT_TYPE_UPDATE,
_STMT_TYPE_DELETE,
_STMT_TYPE_INSERT,
_STMT_TYPE_REPLACE
):
if self.__rowcount == -1:
self.__rowcount = 0
self.__rowcount += _lib.sqlite3_changes(self.__connection._db)
if not multiple and self.__statement._type == _STMT_TYPE_INSERT:
self.__lastrowid = _lib.sqlite3_last_insert_rowid(self.__connection._db)
else:
self.__lastrowid = None
if multiple:
self.__statement._reset()
finally:
self.__connection._in_transaction = \
not _lib.sqlite3_get_autocommit(self.__connection._db)
self.__locked = False
return self
@__check_cursor_wrap
def execute(self, sql, params=[]):
return self.__execute(False, sql, [params])
@__check_cursor_wrap
def executemany(self, sql, many_params):
return self.__execute(True, sql, many_params)
def executescript(self, sql):
self.__check_cursor()
self._reset = False
if isinstance(sql, unicode):
sql = sql.encode('utf-8')
elif not isinstance(sql, str):
raise ValueError("script argument must be unicode or string.")
statement_star = _ffi.new('sqlite3_stmt **')
next_char = _ffi.new('char **')
self.__connection.commit()
while True:
c_sql = _ffi.new("char[]", sql)
rc = _lib.sqlite3_prepare(self.__connection._db, c_sql, -1,
statement_star, next_char)
if rc != _lib.SQLITE_OK:
raise self.__connection._get_exception(rc)
rc = _lib.SQLITE_ROW
while rc == _lib.SQLITE_ROW:
if not statement_star[0]:
rc = _lib.SQLITE_OK
else:
rc = _lib.sqlite3_step(statement_star[0])
if rc != _lib.SQLITE_DONE:
_lib.sqlite3_finalize(statement_star[0])
if rc == _lib.SQLITE_OK:
break
else:
raise self.__connection._get_exception(rc)
rc = _lib.sqlite3_finalize(statement_star[0])
if rc != _lib.SQLITE_OK:
raise self.__connection._get_exception(rc)
sql = _ffi.string(next_char[0])
if not sql:
break
return self
def __iter__(self):
return self
def __next__(self):
self.__check_cursor()
self.__check_reset()
if not self.__statement:
raise StopIteration
try:
next_row = self.__next_row
except AttributeError:
raise StopIteration
del self.__next_row
if self.row_factory is not None:
next_row = self.row_factory(self, next_row)
ret = _lib.sqlite3_step(self.__statement._statement)
if ret == _lib.SQLITE_ROW:
self.__next_row = self.__fetch_one_row()
else:
self.__statement._reset()
if ret != _lib.SQLITE_DONE:
raise self.__connection._get_exception(ret)
return next_row
if sys.version_info[0] < 3:
next = __next__
del __next__
def fetchone(self):
return next(self, None)
def fetchmany(self, size=None):
if size is None:
size = self.arraysize
lst = []
for row in self:
lst.append(row)
if len(lst) == size:
break
return lst
def fetchall(self):
return list(self)
def __get_connection(self):
return self.__connection
connection = property(__get_connection)
def __get_rowcount(self):
return self.__rowcount
rowcount = property(__get_rowcount)
def __get_description(self):
try:
return self.__description
except AttributeError:
if self.__statement:
self.__description = self.__statement._get_description()
return self.__description
description = property(__get_description)
def __get_lastrowid(self):
return self.__lastrowid
lastrowid = property(__get_lastrowid)
def setinputsizes(self, *args):
pass
def setoutputsize(self, *args):
pass
class Statement(object):
_statement = None
def __init__(self, connection, sql):
self.__con = connection
self._in_use = False
if not isinstance(sql, basestring):
raise Warning("SQL is of wrong type. Must be string or unicode.")
if '\0' in sql:
raise ValueError("the query contains a null character")
first_word = sql.lstrip().split(" ")[0].upper()
if first_word == "":
self._type = _STMT_TYPE_INVALID
elif first_word == "SELECT":
self._type = _STMT_TYPE_SELECT
elif first_word == "INSERT":
self._type = _STMT_TYPE_INSERT
elif first_word == "UPDATE":
self._type = _STMT_TYPE_UPDATE
elif first_word == "DELETE":
self._type = _STMT_TYPE_DELETE
elif first_word == "REPLACE":
self._type = _STMT_TYPE_REPLACE
else:
self._type = _STMT_TYPE_OTHER
if isinstance(sql, unicode):
sql = sql.encode('utf-8')
statement_star = _ffi.new('sqlite3_stmt **')
next_char = _ffi.new('char **')
c_sql = _ffi.new("char[]", sql)
ret = _lib.sqlite3_prepare_v2(self.__con._db, c_sql, -1,
statement_star, next_char)
self._statement = statement_star[0]
if ret == _lib.SQLITE_OK and not self._statement:
# an empty statement, work around that, as it's the least trouble
self._type = _STMT_TYPE_SELECT
c_sql = _ffi.new("char[]", b"select 42")
ret = _lib.sqlite3_prepare_v2(self.__con._db, c_sql, -1,
statement_star, next_char)
self._statement = statement_star[0]
if ret != _lib.SQLITE_OK:
raise self.__con._get_exception(ret)
self.__con._remember_statement(self)
tail = _ffi.string(next_char[0]).decode('utf-8')
if _check_remaining_sql(tail):
raise Warning("You can only execute one statement at a time.")
def __del__(self):
if self._statement:
self.__con._finalize_raw_statement(self._statement)
def _finalize(self):
if self._statement:
self.__con._finalize_raw_statement(self._statement)
self._statement = None
self._in_use = False
def _reset(self):
if self._in_use and self._statement:
_lib.sqlite3_reset(self._statement)
self._in_use = False
if sys.version_info[0] < 3:
def __check_decodable(self, param):
if self.__con.text_factory in (unicode, OptimizedUnicode,
_unicode_text_factory):
for c in param:
if ord(c) & 0x80 != 0:
raise self.__con.ProgrammingError(
"You must not use 8-bit bytestrings unless "
"you use a text_factory that can interpret "
"8-bit bytestrings (like text_factory = str). "
"It is highly recommended that you instead "
"just switch your application to Unicode strings.")
def __set_param(self, idx, param):
cvt = converters.get(type(param))
if cvt is not None:
param = cvt(param)
try:
param = adapt(param)
except:
pass # And use previous value
if param is None:
rc = _lib.sqlite3_bind_null(self._statement, idx)
elif isinstance(param, (bool, int, long)):
if -2147483648 <= param <= 2147483647:
rc = _lib.sqlite3_bind_int(self._statement, idx, param)
else:
rc = _lib.sqlite3_bind_int64(self._statement, idx, param)
elif isinstance(param, float):
rc = _lib.sqlite3_bind_double(self._statement, idx, param)
elif isinstance(param, unicode):
param = param.encode("utf-8")
rc = _lib.sqlite3_bind_text(self._statement, idx, param,
len(param), _SQLITE_TRANSIENT)
elif isinstance(param, str):
self.__check_decodable(param)
rc = _lib.sqlite3_bind_text(self._statement, idx, param,
len(param), _SQLITE_TRANSIENT)
elif isinstance(param, (buffer, bytes)):
param = bytes(param)
rc = _lib.sqlite3_bind_blob(self._statement, idx, param,
len(param), _SQLITE_TRANSIENT)
else:
rc = -1
return rc
def _set_params(self, params):
self._in_use = True
num_params_needed = _lib.sqlite3_bind_parameter_count(self._statement)
if isinstance(params, (tuple, list)) or \
not isinstance(params, dict) and \
hasattr(params, '__getitem__'):
try:
num_params = len(params)
except TypeError:
num_params = -1
if num_params != num_params_needed:
raise ProgrammingError("Incorrect number of bindings supplied. "
"The current statement uses %d, and "
"there are %d supplied." %
(num_params_needed, num_params))
for i in range(num_params):
rc = self.__set_param(i + 1, params[i])
if rc != _lib.SQLITE_OK:
raise InterfaceError("Error binding parameter %d - "
"probably unsupported type." % i)
elif isinstance(params, dict):
for i in range(1, num_params_needed + 1):
param_name = _lib.sqlite3_bind_parameter_name(self._statement, i)
if not param_name:
raise ProgrammingError("Binding %d has no name, but you "
"supplied a dictionary (which has "
"only names)." % i)
param_name = _ffi.string(param_name).decode('utf-8')[1:]
try:
param = params[param_name]
except KeyError:
raise ProgrammingError("You did not supply a value for "
"binding %d." % i)
rc = self.__set_param(i, param)
if rc != _lib.SQLITE_OK:
raise InterfaceError("Error binding parameter :%s - "
"probably unsupported type." %
param_name)
else:
raise ValueError("parameters are of unsupported type")
def _get_description(self):
if self._type in (
_STMT_TYPE_INSERT,
_STMT_TYPE_UPDATE,
_STMT_TYPE_DELETE,
_STMT_TYPE_REPLACE
):
return None
desc = []
for i in xrange(_lib.sqlite3_column_count(self._statement)):
name = _lib.sqlite3_column_name(self._statement, i)
if name:
name = _ffi.string(name).decode('utf-8').split("[")[0].strip()
desc.append((name, None, None, None, None, None, None))
return desc
class Row(object):
def __init__(self, cursor, values):
self.description = cursor.description
self.values = values
def __len__(self):
return len(self.values)
def __getitem__(self, item):
if isinstance(item, (int, long)):
return self.values[item]
else:
item = item.lower()
for idx, desc in enumerate(self.description):
if desc[0].lower() == item:
return self.values[idx]
raise IndexError("No item with that key")
def keys(self):
return [desc[0] for desc in self.description]
def __eq__(self, other):
if not isinstance(other, Row):
return NotImplemented
if self.description != other.description:
return False
if self.values != other.values:
return False
return True
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(tuple(self.description)) ^ hash(tuple(self.values))
def _check_remaining_sql(s):
state = "NORMAL"
for char in s:
if char == chr(0):
return 0
elif char == '-':
if state == "NORMAL":
state = "LINECOMMENT_1"
elif state == "LINECOMMENT_1":
state = "IN_LINECOMMENT"
elif char in (' ', '\t'):
pass
elif char == '\n':
if state == "IN_LINECOMMENT":
state = "NORMAL"
elif char == '/':
if state == "NORMAL":
state = "COMMENTSTART_1"
elif state == "COMMENTEND_1":
state = "NORMAL"
elif state == "COMMENTSTART_1":
return 1
elif char == '*':
if state == "NORMAL":
return 1
elif state == "LINECOMMENT_1":
return 1
elif state == "COMMENTSTART_1":
state = "IN_COMMENT"
elif state == "IN_COMMENT":
state = "COMMENTEND_1"
else:
if state == "COMMENTEND_1":
state = "IN_COMMENT"
elif state == "IN_LINECOMMENT":
pass
elif state == "IN_COMMENT":
pass
else:
return 1
return 0
def _convert_params(con, nargs, params):
_params = []
for i in range(nargs):
typ = _lib.sqlite3_value_type(params[i])
if typ == _lib.SQLITE_NULL:
val = None
elif typ == _lib.SQLITE_INTEGER:
val = _lib.sqlite3_value_int64(params[i])
val = int(val)
elif typ == _lib.SQLITE_FLOAT:
val = _lib.sqlite3_value_double(params[i])
elif typ == _lib.SQLITE_TEXT:
val = _lib.sqlite3_value_text(params[i])
val = _ffi.string(val).decode('utf-8')
elif typ == _lib.SQLITE_BLOB:
blob = _lib.sqlite3_value_blob(params[i])
blob_len = _lib.sqlite3_value_bytes(params[i])
val = _BLOB_TYPE(_ffi.buffer(blob, blob_len)[:])
else:
raise NotImplementedError
_params.append(val)
return _params
def _convert_result(con, val):
if val is None:
_lib.sqlite3_result_null(con)
elif isinstance(val, (bool, int, long)):
_lib.sqlite3_result_int64(con, int(val))
elif isinstance(val, float):
_lib.sqlite3_result_double(con, val)
elif isinstance(val, unicode):
val = val.encode('utf-8')
_lib.sqlite3_result_text(con, val, len(val), _SQLITE_TRANSIENT)
elif isinstance(val, str):
_lib.sqlite3_result_text(con, val, len(val), _SQLITE_TRANSIENT)
elif isinstance(val, (buffer, bytes)):
_lib.sqlite3_result_blob(con, bytes(val), len(val), _SQLITE_TRANSIENT)
else:
raise NotImplementedError
def _function_callback(real_cb, context, nargs, c_params):
params = _convert_params(context, nargs, c_params)
try:
val = real_cb(*params)
except Exception:
msg = b"user-defined function raised exception"
_lib.sqlite3_result_error(context, msg, len(msg))
else:
_convert_result(context, val)
converters = {}
adapters = {}
class PrepareProtocol(object):
pass
def register_adapter(typ, callable):
adapters[typ, PrepareProtocol] = callable
def register_converter(name, callable):
converters[name.upper()] = callable
def register_adapters_and_converters():
def adapt_date(val):
return val.isoformat()
def adapt_datetime(val):
return val.isoformat(" ")
def convert_date(val):
return datetime.date(*map(int, val.split("-")))
def convert_timestamp(val):
datepart, timepart = val.split(" ")
year, month, day = map(int, datepart.split("-"))
timepart_full = timepart.split(".")
hours, minutes, seconds = map(int, timepart_full[0].split(":"))
if len(timepart_full) == 2:
microseconds = int(timepart_full[1])
else:
microseconds = 0
return datetime.datetime(year, month, day, hours, minutes, seconds,
microseconds)
register_adapter(datetime.date, adapt_date)
register_adapter(datetime.datetime, adapt_datetime)
register_converter("date", convert_date)
register_converter("timestamp", convert_timestamp)
def adapt(val, proto=PrepareProtocol):
# look for an adapter in the registry
adapter = adapters.get((type(val), proto), None)
if adapter is not None:
return adapter(val)
# try to have the protocol adapt this object
if hasattr(proto, '__adapt__'):
try:
adapted = proto.__adapt__(val)
except TypeError:
pass
else:
if adapted is not None:
return adapted
# and finally try to have the object adapt itself
if hasattr(val, '__conform__'):
try:
adapted = val.__conform__(proto)
except TypeError:
pass
else:
if adapted is not None:
return adapted
return val
register_adapters_and_converters()
|
py | 1a3089150d570566c808011e5a62c579a5185fc9 | # -*- coding: utf-8 -*-
from serial.serialutil import SerialException
from struct import unpack
from .serial_wrapper import SerialPort
from .constants import NO_KEY_DETECTED
from .internal import XidConnection
from .keymaps import (rb_530_keymap, rb_730_keymap, rb_830_keymap,
rb_834_keymap, lumina_keymap)
class XidScanner(object):
"""
Scan the computer for connected XID devices
"""
def __init__(self):
self.__com_ports = SerialPort.available_ports()
self.__xid_cons = []
self.detect_xid_devices()
def detect_xid_devices(self):
"""
For all of the com ports connected to the computer, send an
XID command '_c1'. If the device response with '_xid', it is
an xid device.
"""
self.__xid_cons = []
for c in self.__com_ports:
device_found = False
for b in [115200, 19200, 9600, 57600, 38400]:
con = XidConnection(c, b)
try:
con.open()
except SerialException:
continue
con.flush_input()
con.flush_output()
returnval = con.send_xid_command("_c1", 5).decode('ASCII')
if returnval.startswith('_xid'):
device_found = True
self.__xid_cons.append(con)
if(returnval != '_xid0'):
# set the device into XID mode
con.send_xid_command('c10')
con.flush_input()
con.flush_output()
# be sure to reset the timer to avoid the 4.66 hours
# problem. (refer to XidConnection.xid_input_found to
# read about the 4.66 hours)
con.send_xid_command('e1')
con.send_xid_command('e5')
con.close()
if device_found:
break
def device_at_index(self, index):
"""
Returns the device at the specified index
"""
if index >= len(self.__xid_cons):
raise ValueError("Invalid device index")
return self.__xid_cons[index]
def device_count(self):
"""
Number of XID devices connected to the computer
"""
return len(self.__xid_cons)
class BaseDevice(object):
def __init__(self, connection, name="Unknown XID Device"):
self.con = connection
self.device_name = name
def reset_rt_timer(self):
"""
Resets the Reaction Time timer.
"""
self.con.send_xid_command("e5")
def reset_base_timer(self):
"""
Resets the base timer
"""
self.con.send_xid_command("e1")
def query_base_timer(self):
"""
gets the value from the device's base timer
"""
(_, _, time) = unpack('<ccI', self.con.send_xid_command("e3", 6))
return time
class ResponseDevice(BaseDevice):
def __init__(self, connection,
name='Unknown XID Device',
keymap=None,
trigger_prefix="Button"):
BaseDevice.__init__(self, connection, name)
self.keymap = keymap
self.trigger_name_prefix = trigger_prefix
self.response_queue = []
def poll_for_response(self):
"""
Polls the device for user input
If there is a keymapping for the device, the key map is applied
to the key reported from the device.
If a response is waiting to be processed, the response is appended
to the internal response_queue
"""
key_state = self.con.check_for_keypress()
if key_state != NO_KEY_DETECTED:
response = self.con.get_current_response()
if self.keymap is not None:
response['key'] = self.keymap[response['key']]
else:
response['key'] -= 1
self.response_queue.append(response)
def response_queue_size(self):
"""
Number of responses in the response queue
"""
return len(self.response_queue)
def get_next_response(self):
"""
Pops the response at the beginning of the response queue
and returns it.
This function returns a dict object with the following keys:
pressed: A boolean value of whether the event was a keypress
or key release.
key: The key on the device that was pressed. This is a
0 based index.
port: Device port the response came from. Typically this
is 0 on RB-series devices, and 2 on SV-1 voice key
devices.
time: For the time being, this just returns 0. There is
currently an issue with clock drift in the Cedrus XID
devices. Once we have this issue resolved, time will
report the value of the RT timer in miliseconds.
"""
return self.response_queue.pop(0)
def clear_response_queue(self):
"""
Clears the response queue
"""
self.response_queue = []
def __repr__(self):
return '<ResponseDevice "%s">' % self.device_name
class StimTracker(BaseDevice):
"""
Class that encapsulates the StimTracker device.
The pulse duration defaults to 100ms. To change this, call
StimTracker.set_pulse_duration(duration_in_miliseconds)
"""
_lines = {1: 1,
2: 2,
3: 4,
4: 8,
5: 16,
6: 32,
7: 64,
8: 128}
def __init__(self, connection, name="StimTracker"):
BaseDevice.__init__(self, connection, name)
self.con.set_using_stim_tracker(True)
self.con.send_xid_command('a10')
self.con.clear_digital_output_lines(0xff)
self.set_pulse_duration(100)
def set_pulse_duration(self, duration):
"""
Sets the pulse duration for events in miliseconds when activate_line
is called
"""
if duration > 4294967295:
raise ValueError('Duration is too long. Please choose a value '
'less than 4294967296.')
big_endian = hex(duration)[2:]
if len(big_endian) % 2 != 0:
big_endian = '0'+big_endian
little_endian = []
for i in range(0, len(big_endian), 2):
little_endian.insert(0, big_endian[i:i+2])
for i in range(0, 4-len(little_endian)):
little_endian.append('00')
command = 'mp'
for i in little_endian:
command += chr(int(i, 16))
self.con.send_xid_command(command, 0)
def activate_line(self, lines=None, bitmask=None,
leave_remaining_lines=False):
"""
Triggers an output line on StimTracker.
There are 8 output lines on StimTracker that can be raised in any
combination. To raise lines 1 and 7, for example, you pass in
the list: activate_line(lines=[1, 7]).
To raise a single line, pass in just an integer, or a list with a
single element to the lines keyword argument:
activate_line(lines=3)
or
activate_line(lines=[3])
The `lines` argument must either be an Integer, list of Integers, or
None.
If you'd rather specify a bitmask for setting the lines, you can use
the bitmask keyword argument. Bitmask must be a Integer value between
0 and 255 where 0 specifies no lines, and 255 is all lines. For a
mapping between lines and their bit values, see the `_lines` class
variable.
To use this, call the function as so to activate lines 1 and 6:
activate_line(bitmask=33)
leave_remaining_lines tells the function to only operate on the lines
specified. For example, if lines 1 and 8 are active, and you make
the following function call:
activate_line(lines=4, leave_remaining_lines=True)
This will result in lines 1, 4 and 8 being active.
If you call activate_line(lines=4) with leave_remaining_lines=False
(the default), if lines 1 and 8 were previously active, only line 4
will be active after the call.
"""
if lines is None and bitmask is None:
raise ValueError('Must set one of lines or bitmask')
if lines is not None and bitmask is not None:
raise ValueError('Can only set one of lines or bitmask')
if bitmask is not None:
if bitmask not in range(0, 256):
raise ValueError('bitmask must be an integer between '
'0 and 255')
if lines is not None:
if not isinstance(lines, list):
lines = [lines]
bitmask = 0
for l in lines:
if l < 1 or l > 8:
raise ValueError('Line numbers must be between 1 and 8 '
'(inclusive)')
bitmask |= self._lines[l]
self.con.set_digital_output_lines(bitmask, leave_remaining_lines)
def clear_line(self, lines=None, bitmask=None,
leave_remaining_lines=False):
"""
The inverse of activate_line. If a line is active, it deactivates it.
This has the same parameters as activate_line()
"""
if lines is None and bitmask is None:
raise ValueError('Must set one of lines or bitmask')
if lines is not None and bitmask is not None:
raise ValueError('Can only set one of lines or bitmask')
if bitmask is not None:
if bitmask not in range(0, 256):
raise ValueError('bitmask must be an integer between '
'0 and 255')
if lines is not None:
if not isinstance(lines, list):
lines = [lines]
bitmask = 0
for l in lines:
if l < 1 or l > 8:
raise ValueError('Line numbers must be between 1 and 8 '
'(inclusive)')
bitmask |= self._lines[l]
self.con.clear_digital_output_lines(bitmask, leave_remaining_lines)
def __str__(self):
return '<StimTracker "%s">' % self.device_name
def __repr__(self):
return self.__str__()
class XidError(Exception):
pass
class XidDevice(object):
"""
Class for interfacing with a Cedrus XID device.
At the beginning of an experiment, the developer should call:
XidDevice.reset_base_timer()
Whenever a stimulus is presented, the developer should call:
XidDevice.reset_rt_timer()
Developers Note: Currently there is a known issue of clock drift
in the XID devices. Due to this, the dict returned by
XidDevice.get_next_response() returns 0 for the reaction time value.
This issue will be resolved in a future release of this library.
"""
def __init__(self, xid_connection):
self.con = xid_connection
self._impl = None
self.init_device()
def __del__(self):
self.con.close()
del self.con
def is_stimtracker(self):
return isinstance(self._impl, StimTracker)
def is_response_device(self):
return isinstance(self._impl, ResponseDevice)
def init_device(self):
"""
Initializes the device with the proper keymaps and name
"""
try:
product_id = int(self._send_command('_d2', 1))
except ValueError:
product_id = self._send_command('_d2', 1)
if product_id == 0:
self._impl = ResponseDevice(
self.con,
'Cedrus Lumina LP-400 Response Pad System',
lumina_keymap)
elif product_id == 1:
self._impl = ResponseDevice(
self.con,
'Cedrus SV-1 Voice Key',
None,
'Voice Response')
elif product_id == 2:
model_id = int(self._send_command('_d3', 1))
if model_id == 1:
self._impl = ResponseDevice(
self.con,
'Cedrus RB-530',
rb_530_keymap)
elif model_id == 2:
self._impl = ResponseDevice(
self.con,
'Cedrus RB-730',
rb_730_keymap)
elif model_id == 3:
self._impl = ResponseDevice(
self.con,
'Cedrus RB-830',
rb_830_keymap)
elif model_id == 4:
self._impl = ResponseDevice(
self.con,
'Cedrus RB-834',
rb_834_keymap)
else:
raise XidError('Unknown RB Device')
elif product_id == 4:
self._impl = StimTracker(
self.con,
'Cedrus C-POD')
elif product_id == b'S':
self._impl = StimTracker(
self.con,
'Cedrus StimTracker')
elif product_id == -99:
raise XidError('Invalid XID device')
def _send_command(self, command, expected_bytes):
"""
Send an XID command to the device
"""
response = self.con.send_xid_command(command, expected_bytes)
return response
def __getattr__(self, attrname):
return getattr(self._impl, attrname)
def __repr__(self):
if self._impl is not None:
return str(self._impl)
else:
return 'Uninitialized XID device'
|
py | 1a30897fffd3f594588bafc0f4bc881cbfbae8b0 | import csv
import requests
df = open("bridgeData3.csv",'r').readlines()
fin = open('final.csv','r').readlines()
finCsv = fin[1:]
# url = https://b2ptc.herokuapp.com/bridges
finalCsv = df[1:]
obj = {}
for i in finalCsv:
x = i.split(',')
obj[x[1]] = {'bridge_name':x[0],'proj_code':x[1],'before_img':x[2],'after_img':x[3]}
print(finCsv[0])
for i in finCsv:
if
# for i in finalCsv:
# x = i.split(',')
# requests.put(url+x[0],data={before:x[2],after:x[3]})
# pull each id,before image and after from df
# for each data item do a put request with the id as the param id
# and then put the before and after image in an dict and place it as the data for the put request
|
py | 1a308a2587ccd68fe03943fc982dc858bb9631a3 | # -*- coding: utf-8 -*-
# @Author: yulidong
# @Date: 2018-07-17 10:44:43
# @Last Modified by: yulidong
# @Last Modified time: 2018-08-27 18:45:39
# -*- coding: utf-8 -*-
# @Author: lidong
# @Date: 2018-03-20 18:01:52
# @Last Modified by: yulidong
# @Last Modified time: 2018-07-16 22:16:14
import time
import torch
import numpy as np
import torch.nn as nn
import math
from math import ceil
from torch.autograd import Variable
from torch.nn.functional import cosine_similarity as cosine_s
from pssm import caffe_pb2
from pssm.models.utils import *
rsn_specs = {
'scene':
{
'n_classes': 9,
'input_size': (540, 960),
'block_config': [3, 4, 23, 3],
},
}
group_dim=8
def conv3x3(in_planes, out_planes, stride=1):
"""3x3 convolution with padding"""
if stride==1:
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=1, bias=False)
if stride==2:
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=2, bias=False)
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(BasicBlock, self).__init__()
self.conv1 = conv3x3(inplanes, planes, stride)
self.gn1 = nn.GroupNorm(group_dim,planes)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(planes, planes)
self.gn2 = nn.GroupNorm(group_dim,planes)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.gn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.gn2(out)
if self.downsample is not None:
residual = self.downsample(x)
# print(residual.shape)
# print(out.shape)
out += residual
out = self.relu(out)
return out
class feature_extraction(nn.Module):
def __init__(self):
super(feature_extraction, self).__init__()
self.inplanes = 32
self.layer1 = self._make_layer(BasicBlock, 32, 3, 1,1,1)
self.branch1 = nn.Sequential(nn.AvgPool2d((54, 96), stride=(54,96)),
nn.Conv2d(32, 8, 1, 1, 0, 1),
nn.GroupNorm(4,8),
nn.ReLU(inplace=True))
self.branch2 = nn.Sequential(nn.AvgPool2d((27, 48), stride=(27,48)),
nn.Conv2d(32, 8, 1, 1, 0, 1),
nn.GroupNorm(4,8),
nn.ReLU(inplace=True))
self.branch3 = nn.Sequential(nn.AvgPool2d((36, 64), stride=(36,64)),
nn.Conv2d(32, 8, 1, 1, 0, 1),
nn.GroupNorm(4,8),
nn.ReLU(inplace=True))
self.branch4 = nn.Sequential(nn.AvgPool2d((18, 32), stride=(18,32)),
nn.Conv2d(32, 8, 1, 1, 0, 1),
nn.GroupNorm(4,8),
nn.ReLU(inplace=True))
self.branch5 = nn.Sequential(nn.AvgPool2d((9, 16), stride=(9,16)),
nn.Conv2d(32, 8, 1, 1, 0, 1),
nn.GroupNorm(4,8),
nn.ReLU(inplace=True))
self.branch6 = nn.Sequential(nn.AvgPool2d((3, 8), stride=(3,8)),
nn.Conv2d(32, 8, 1, 1, 0, 1),
nn.GroupNorm(4,8),
nn.ReLU(inplace=True))
self.lastconv = nn.Sequential(nn.Conv2d(80, 64, 3, 1, 1, 1),
nn.GroupNorm(group_dim,64),
nn.ReLU(inplace=True),
nn.Conv2d(64, 32, 3, 1, 1, 1),
nn.GroupNorm(group_dim,32),
nn.ReLU(inplace=True),
)
def _make_layer(self, block, planes, blocks, stride, pad, dilation):
downsample = None
layers = []
layers.append(block(self.inplanes, planes, stride))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes,1))
return nn.Sequential(*layers)
def forward(self, x):
# output = self.conv1(x)
# output = self.gn1(output)
# output = self.relu1(output)
# output = self.conv2(output)
# output = self.gn2(output)
# output = self.relu2(output)
# output = self.conv3(output)
# output = self.gn3(output)
# output = self.relu3(output)
output_skip = self.layer1(x)
# output_skip=x
output_branch1 = self.branch1(output_skip)
output_branch1 = F.interpolate(output_branch1, (output_skip.size()[2],output_skip.size()[3]),mode='bilinear',align_corners=True)
output_branch2 = self.branch2(output_skip)
output_branch2 = F.interpolate(output_branch2, (output_skip.size()[2],output_skip.size()[3]),mode='bilinear',align_corners=True)
output_branch3 = self.branch3(output_skip)
output_branch3 = F.interpolate(output_branch3, (output_skip.size()[2],output_skip.size()[3]),mode='bilinear',align_corners=True)
output_branch4 = self.branch4(output_skip)
output_branch4 = F.interpolate(output_branch4, (output_skip.size()[2],output_skip.size()[3]),mode='bilinear',align_corners=True)
output_branch5 = self.branch5(output_skip)
output_branch5 = F.interpolate(output_branch5, (output_skip.size()[2],output_skip.size()[3]),mode='bilinear',align_corners=True)
output_branch6 = self.branch6(output_skip)
output_branch6 = F.interpolate(output_branch6, (output_skip.size()[2],output_skip.size()[3]),mode='bilinear',align_corners=True)
output_feature = torch.cat((output_skip, output_branch6, output_branch5, output_branch4, output_branch3, output_branch2, output_branch1), 1)
output_feature = self.lastconv(output_feature)
#print(output_feature.shape)
return output_feature
class feature_extraction2(nn.Module):
def __init__(self):
super(feature_extraction2, self).__init__()
self.inplanes = 32
self.conv1 = nn.Conv2d(3, 32, kernel_size=3, stride=1, padding=1,
bias=False,dilation=1)
self.gn1 = nn.GroupNorm(group_dim,32)
self.relu1 = nn.ReLU(inplace=True)
self.conv2 = nn.Conv2d(32, 32, kernel_size=3, stride=1, padding=1,
bias=False,dilation=1)
self.gn2 = nn.GroupNorm(group_dim,32)
self.relu2 = nn.ReLU(inplace=True)
self.conv3 = nn.Conv2d(32, 32, kernel_size=7, stride=1, padding=6,
bias=False,dilation=2)
self.gn3 = nn.GroupNorm(group_dim,32)
self.relu3 = nn.ReLU(inplace=True)
self.layer1 = self._make_layer(BasicBlock, 32, 1, 1,1,1)
def _make_layer(self, block, planes, blocks, stride, pad, dilation):
downsample = None
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes,1,None,))
return nn.Sequential(*layers)
def forward(self, x):
output = self.conv1(x)
output = self.gn1(output)
output = self.relu1(output)
output = self.conv2(output)
output = self.gn2(output)
output = self.relu2(output)
output = self.conv3(output)
output = self.gn3(output)
output = self.relu3(output)
#print(output.shape)
output = self.layer1(output)
return output
class ss_argmin(nn.Module):
def __init__(self):
super(ss_argmin, self).__init__()
self.softmax = nn.Softmax(dim=-1)
def forward(self,x,min,max):
one=torch.ones(1)
zero=torch.zeros(1)
x=self.softmax(x)
index=torch.ones_like(x)*torch.range(min,max)
disparity= torch.sum(x*index,dim=-1)
v,i=torch.topk(x,k=1,dim=-1)
mask_1=torch.squeeze(torch.where(v>0.7,one,zero))
v,i=torch.topk(x,k=5,dim=-1)
v_sum=torch.sum(v,-1)
mask_2=torch.squeeze(torch.where(v_s>0.7,one,zero))
i_dis=torch.max(i,-1)[0]-torch.min(i,-1)[0]
mask_3=torch.squeeze(torch.where(i_dis<6,one,zero))
mask=mask_1+mask_2*mask_3
mask=torch.where(mask>0,one,zero)
return disparity*mask
class rstereo(nn.Module):
def __init__(self,
n_classes=9,
block_config=[3, 4, 6, 3],
input_size= (480, 640),
version='scene'):
super(rstereo, self).__init__()
self.feature_extraction=feature_extraction().cuda(0)
self.feature_extraction2=feature_extraction2().cuda(0)
# self.aggregation_sparse=aggregation_sparse()
# self.aggregation_dense=aggregation_dense()
self.ss_argmin=ss_argmin()
# self.refinement_sparse=aggregation_sparse()
# self.refinement_dense=aggregation_dense()
def crop(self,x):
index=(x==1).nonzero()
return torch.min(index[:,0]),torch.max(index[:,0])+1,torch.min(index[:,1]),torch.max(index[:,1]+1)
def cluster(feature,mask):
count=torch.sum(mask)
mean=torch.sum(torch.sum(feature,dim=-1),dim=-1)/count
weights=torch.where(mask==ones,torch.norm(feature-mean,dim=1),zeros)
weights=torch.exp(weights/torch.max(weights)).view(weights.shape[0],weights.shape[1],1)
return weights
def forward(self, l,r,P,pre1,pre2):
#self.P=P[1,0]
#0 l to r,1 min,2 max
#[l_box,r_box,match],[min_d,max_d]
self.pre=pre1
self.pre2=pre2
P1=P[...,0]
P2=P[...,3]
P3=P[...,1]
P4=P[...,2]
#feature extraction
l_mask=P2-P1
s_mask=P1
#l_mask=l_mask.byte()
#s_mask=s_mask.byte()
#basic cuda 524
#print(l.type)
#1923
#print(torch.cuda.memory_allocated(1))
#2727
l_sf=self.feature_extraction2(l)
l_lf=self.feature_extraction(l_sf)
#print(torch.cuda.memory_allocated(2))
#the cuda won't copy the volume to the new gpu
# a=l_lf.cuda(1)
# b=l_lf.cuda(2)
# c=l_sf.cuda(3)
r_sf=self.feature_extraction2(r)
r_lf=self.feature_extraction(r_sf)
#print(torch.cuda.memory_allocated(1))
#3267
#print(torch.cuda.memory_allocated(2))
#reshape the mask to batch and channel
disparity=torch.zeros([540,960]).cuda(2)
one=torch.ones(1).cuda(2)
zero=torch.zeros(1).cuda(2)
cost_volume=[]
#5710
#print(value)
l_lf=l_lf.cuda(2)
r_lf=r_lf.cuda(2)
r_sf=r_sf.cuda(2)
l_sf=l_sf.cuda(2)
#985
#feature=torch.masked_select(l_sf,s_mask)
#feature=torch.masked_select(l_lf,l_mask)+torch.masked_select(l_sf,s_mask)
feature=l_lf*l_mask+l_sf*s_mask
feature=torch.where((l_mask+s_mask)>0,feature,l_lf)
s_match=s_mask.long().nonzero()
s_feature=l_sf[...,s_match[:,0],s_match[:,1]]
l_match=l_mask.long().nonzero()
l_feature=l_lf[...,l_match[:,0],l_match[:,1]]
start_time=time.time()
#0.0003
#s_r_o_t=r_sf[...,s_match[:,0],s_match[:,1]]
#1,32,n
#print(time.time()-start_time)
#print(s_match.shape)
#time 10
# for i in range(s_match.shape[0]):
# min_d=torch.max(s_match[i,1]-300,zero.long())
# #print(min_d)
# s_r_o_t=r_sf[...,s_match[i,0],min_d:s_match[i,1]]
# a=s_feature[...,i].view(1,32,1)
# #print(a.shape,s_r_o_t.shape)
# cost_volume.append(torch.where(s_match[i,1]-300>=0,cosine_s(a,s_r_o_t),zero))
#time 0.23,192,0.035,30, the number of the match points won't influence the time,only the iteration
# for i in range(300):
# s_r_o_t=r_sf[...,s_match[:,0],s_match[:,1]-i]
# cost_volume.append(torch.where(s_match[:,1]-i>=0,cosine_s(s_feature,s_r_o_t),zero))
# l_r_o_t=r_sf[...,l_match[:,0],l_match[:,1]-i]
# cost_volume.append(torch.where(l_match[:,1]-i>=0,cosine_s(l_feature,l_r_o_t),zero))
# #cost_volume=torch.stack(cost_volume)
# print(torch.cuda.memory_allocated(2))
# print(time.time()-start_time)
# time.sleep(100)
#promotion
#we can segment with bounding box and divide the whole image into many parts
#each single bounding box will be managed through network not the whole image
#matching cost computation
count=0
start_time=time.time()
for i in range(torch.max(P3).type(torch.int32)+1):
#ground 0-270, sky 0-40
# if i==13 or i == 14:
# continue
# i=60
#print(pre2.shape)
#i=14
min_d=pre1[0,0,i].long()
max_d=pre1[0,1,i].long()
object_mask=torch.where(P3==i,one,zero)
x1,y1,x2,y2,size=pre2[0,i].long()
object_mask=object_mask[0,x1:x2,y1:y2]
s_mask_o=object_mask*s_mask[0,x1:x2,y1:y2]
l_mask_o=object_mask*l_mask[0,x1:x2,y1:y2]
s_match=s_mask_o.long().nonzero()
l_match=l_mask_o.long().nonzero()
if s_match.shape[0]==0:
s_match=object_mask.nonzero()
if l_match.shape[0]==0:
l_match=object_mask.nonzero()
s_l_o=feature[...,s_match[:,0],s_match[:,1]]
l_l_o=feature[...,l_match[:,0],l_match[:,1]]
#print(torch.max(min_d,zero).long())
#s_r_o=feature[...,s_match[:,0],s_match[:,1]]
# s_r_o=r_sf[...,x1:x2,y1-max_d:y2-min_d]
# l_r_o=r_lf[...,x1:x2,y1-max_d:y2-min_d]
cost_s=[]
cost_l=[]
#ground and sky
#print(s_match.shape[0],l_match.shape[0],min_d,max_d)
for j in range(min_d,max_d):
s_r_o_t=r_sf[...,s_match[:,0],s_match[:,1]-j]
cost_s.append(torch.where(s_match[:,1]-j>=0,cosine_s(s_l_o,s_r_o_t),zero))
l_r_o_t=r_lf[...,l_match[:,0],l_match[:,1]-j]
cost_l.append(torch.where(l_match[:,1]-j>=0,cosine_s(l_l_o,l_r_o_t),zero))
cost_s=torch.stack(cost_s,-1)
cost_l=torch.stack(cost_l,-1)
#cost_volume=cost_s+cost_l
#print(torch.cuda.memory_allocated(2)/1e+6)
#time.sleep(30)
print(time.time()-start_time)
time.sleep(100)
return cost_volume
|
py | 1a308d089d78cb73121ee6479a4167fb0f4c76a3 | import pytest
import random
import tensorflow as tf
from run import run
from main import main
import os
import json
import shutil
cwd = os.path.abspath(os.path.dirname(__file__))
path = os.path.join(cwd, '..', 'cotk')
def setup_function(function):
import sys
sys.argv = ['python3']
random.seed(0)
import numpy as np
np.random.seed(0)
tf.set_random_seed(0)
try:
shutil.rmtree(cwd + '/output_test')
except Exception:
pass
try:
shutil.rmtree(cwd + '/tensorboard_test')
except Exception:
pass
try:
shutil.rmtree(cwd + '/model_test')
except Exception:
pass
try:
shutil.rmtree(cwd + '/cache_test')
except Exception:
pass
os.mkdir(cwd + '/output_test')
os.mkdir(cwd + '/tensorboard_test')
os.mkdir(cwd + '/model_test')
os.mkdir(cwd + '/cache_test')
def teardown_function(function):
shutil.rmtree(cwd + '/output_test')
shutil.rmtree(cwd + '/tensorboard_test')
shutil.rmtree(cwd + '/model_test')
shutil.rmtree(cwd + '/cache_test')
def modify_args(args):
args.cuda = False
args.restore = None
args.wvclass = 'Glove'
args.wvpath = path + '/tests/wordvector/dummy_glove/300d'
args.embedding_size=300 #must be the same as the dim of wvpath
args.out_dir = cwd + '/output_test'
args.log_dir = cwd + '/tensorboard_test'
args.model_dir = cwd + '/model_test'
args.cache_dir = cwd + '/cache_test'
args.name = 'test_hred_tensorflow'
args.epochs = 1
args.checkpoint_steps = 1
args.datapath = path + '/tests/dataloader/dummy_ubuntucorpus#Ubuntu'
def test_train(mocker):
def side_effect_train(args):
modify_args(args)
args.mode = 'train'
main(args)
def side_effect_restore(args):
modify_args(args)
args.mode = 'train'
args.restore = 'last'
main(args)
def side_effect_cache(args):
modify_args(args)
args.mode = 'train'
args.cache = True
main(args)
mock = mocker.patch('main.main', side_effect=side_effect_train)
run()
tf.reset_default_graph()
mock.side_effect = side_effect_restore
run()
tf.reset_default_graph()
mock.side_effect = side_effect_cache
run()
tf.reset_default_graph()
def test_test(mocker):
def side_effect_test(args):
modify_args(args)
args.mode = 'test'
main(args)
mock = mocker.patch('main.main', side_effect=side_effect_test)
run()
old_res = json.load(open("./result.json", "r"))
tf.reset_default_graph()
run()
new_res = json.load(open("./result.json", "r"))
for key in old_res:
if key[-9:] == 'hashvalue':
assert old_res[key] == new_res[key]
tf.reset_default_graph()
|
py | 1a308d2eb8235c1bc63463b8113b70194885403a | from overrides import overrides
from allennlp.common.util import JsonDict
from allennlp.data import DatasetReader, Instance
from allennlp.data.tokenizers import WordTokenizer
from allennlp.models import Model
from allennlp.service.predictors.predictor import Predictor
@Predictor.register('simple-tagger')
class SimpleTaggerPredictor(Predictor):
"""
Wrapper for the :class:`~allennlp.models.bidaf.SimpleTagger` model.
"""
def __init__(self, model: Model, dataset_reader: DatasetReader) -> None:
super(SimpleTaggerPredictor, self).__init__(model, dataset_reader)
self._tokenizer = WordTokenizer()
@overrides
def _json_to_instance(self, json: JsonDict) -> Instance:
"""
Expects JSON that looks like ``{"sentence": "..."}``
and returns JSON that looks like
``{"tags": [...], "class_probabilities": [[...], ..., [...]]}``
"""
sentence = json["sentence"]
tokens, _ = self._tokenizer.tokenize(sentence)
return self._dataset_reader.text_to_instance(tokens)
|
py | 1a308ea479edb3858ce003e0ab30bf31df5b85c1 | # coding: utf-8
"""
Intersight REST API
This is Intersight REST API
OpenAPI spec version: 1.0.9-961
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class TaskWorkflowActionRef(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'moid': 'str',
'object_type': 'str',
'selector': 'str'
}
attribute_map = {
'moid': 'Moid',
'object_type': 'ObjectType',
'selector': 'Selector'
}
def __init__(self, moid=None, object_type=None, selector=None):
"""
TaskWorkflowActionRef - a model defined in Swagger
"""
self._moid = None
self._object_type = None
self._selector = None
if moid is not None:
self.moid = moid
if object_type is not None:
self.object_type = object_type
if selector is not None:
self.selector = selector
@property
def moid(self):
"""
Gets the moid of this TaskWorkflowActionRef.
The Moid of the referenced REST resource.
:return: The moid of this TaskWorkflowActionRef.
:rtype: str
"""
return self._moid
@moid.setter
def moid(self, moid):
"""
Sets the moid of this TaskWorkflowActionRef.
The Moid of the referenced REST resource.
:param moid: The moid of this TaskWorkflowActionRef.
:type: str
"""
self._moid = moid
@property
def object_type(self):
"""
Gets the object_type of this TaskWorkflowActionRef.
The Object Type of the referenced REST resource.
:return: The object_type of this TaskWorkflowActionRef.
:rtype: str
"""
return self._object_type
@object_type.setter
def object_type(self, object_type):
"""
Sets the object_type of this TaskWorkflowActionRef.
The Object Type of the referenced REST resource.
:param object_type: The object_type of this TaskWorkflowActionRef.
:type: str
"""
self._object_type = object_type
@property
def selector(self):
"""
Gets the selector of this TaskWorkflowActionRef.
An OData $filter expression which describes the REST resource to be referenced. This field may be set instead of 'moid' by clients. If 'moid' is set this field is ignored. If 'selector' is set and 'moid' is empty/absent from the request, Intersight will determine the Moid of the resource matching the filter expression and populate it in the MoRef that is part of the object instance being inserted/updated to fulfill the REST request. An error is returned if the filter matches zero or more than one REST resource. An example filter string is: Serial eq '3AA8B7T11'.
:return: The selector of this TaskWorkflowActionRef.
:rtype: str
"""
return self._selector
@selector.setter
def selector(self, selector):
"""
Sets the selector of this TaskWorkflowActionRef.
An OData $filter expression which describes the REST resource to be referenced. This field may be set instead of 'moid' by clients. If 'moid' is set this field is ignored. If 'selector' is set and 'moid' is empty/absent from the request, Intersight will determine the Moid of the resource matching the filter expression and populate it in the MoRef that is part of the object instance being inserted/updated to fulfill the REST request. An error is returned if the filter matches zero or more than one REST resource. An example filter string is: Serial eq '3AA8B7T11'.
:param selector: The selector of this TaskWorkflowActionRef.
:type: str
"""
self._selector = selector
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, TaskWorkflowActionRef):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
py | 1a308ef468987d1e48ff90d4af4109147de63be1 | # coding: utf-8
"""
Isilon SDK
Isilon SDK - Language bindings for the OneFS API # noqa: E501
OpenAPI spec version: 9
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from isi_sdk_8_2_2.models.node_state_servicelight_extended import NodeStateServicelightExtended # noqa: F401,E501
class NodeStateNodeServicelight(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'enabled': 'bool',
'present': 'bool',
'supported': 'bool',
'valid': 'bool'
}
attribute_map = {
'enabled': 'enabled',
'present': 'present',
'supported': 'supported',
'valid': 'valid'
}
def __init__(self, enabled=None, present=None, supported=None, valid=None): # noqa: E501
"""NodeStateNodeServicelight - a model defined in Swagger""" # noqa: E501
self._enabled = None
self._present = None
self._supported = None
self._valid = None
self.discriminator = None
self.enabled = enabled
if present is not None:
self.present = present
if supported is not None:
self.supported = supported
if valid is not None:
self.valid = valid
@property
def enabled(self):
"""Gets the enabled of this NodeStateNodeServicelight. # noqa: E501
The node service light state (True = on). # noqa: E501
:return: The enabled of this NodeStateNodeServicelight. # noqa: E501
:rtype: bool
"""
return self._enabled
@enabled.setter
def enabled(self, enabled):
"""Sets the enabled of this NodeStateNodeServicelight.
The node service light state (True = on). # noqa: E501
:param enabled: The enabled of this NodeStateNodeServicelight. # noqa: E501
:type: bool
"""
if enabled is None:
raise ValueError("Invalid value for `enabled`, must not be `None`") # noqa: E501
self._enabled = enabled
@property
def present(self):
"""Gets the present of this NodeStateNodeServicelight. # noqa: E501
This node has a service light. # noqa: E501
:return: The present of this NodeStateNodeServicelight. # noqa: E501
:rtype: bool
"""
return self._present
@present.setter
def present(self, present):
"""Sets the present of this NodeStateNodeServicelight.
This node has a service light. # noqa: E501
:param present: The present of this NodeStateNodeServicelight. # noqa: E501
:type: bool
"""
self._present = present
@property
def supported(self):
"""Gets the supported of this NodeStateNodeServicelight. # noqa: E501
This node supports a service light. # noqa: E501
:return: The supported of this NodeStateNodeServicelight. # noqa: E501
:rtype: bool
"""
return self._supported
@supported.setter
def supported(self, supported):
"""Sets the supported of this NodeStateNodeServicelight.
This node supports a service light. # noqa: E501
:param supported: The supported of this NodeStateNodeServicelight. # noqa: E501
:type: bool
"""
self._supported = supported
@property
def valid(self):
"""Gets the valid of this NodeStateNodeServicelight. # noqa: E501
The node service light state is valid (False = Error). # noqa: E501
:return: The valid of this NodeStateNodeServicelight. # noqa: E501
:rtype: bool
"""
return self._valid
@valid.setter
def valid(self, valid):
"""Sets the valid of this NodeStateNodeServicelight.
The node service light state is valid (False = Error). # noqa: E501
:param valid: The valid of this NodeStateNodeServicelight. # noqa: E501
:type: bool
"""
self._valid = valid
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, NodeStateNodeServicelight):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
py | 1a308f2c8f1db82442861d3d91dbcaec7ed42ecc | __classification__ = 'UNCLASSIFIED'
__author__ = "Thomas McCullough"
import os
import re
import logging
from typing import List
logger = logging.getLogger('validation')
_the_directory = os.path.split(__file__)[0]
urn_mapping = {
'urn:SIDD:1.0.0': {
'ism_urn': 'urn:us:gov:ic:ism',
'sfa_urn': 'urn:SFA:1.2.0',
'sicommon_urn': 'urn:SICommon:0.1',
'version': '1.0',
'release': '1.0.0',
'date': '2011-08-31T00:00:00Z',
'schema': os.path.join(_the_directory, 'version1', 'SIDD_schema_V1.0.0_2011_08_31.xsd')},
'urn:SIDD:2.0.0': {
'ism_urn': 'urn:us:gov:ic:ism:13',
'sfa_urn': 'urn:SFA:1.2.0',
'sicommon_urn': 'urn:SICommon:1.0',
'version': '2.0',
'release': '2.0.0',
'date': '2019-05-31T00:00:00Z',
'schema': os.path.join(_the_directory, 'version2', 'SIDD_schema_V2.0.0_2019_05_31.xsd')},
}
_SIDD_SPECIFICATION_IDENTIFIER = 'SIDD Volume 1 Design & Implementation Description Document'
def get_specification_identifier():
"""
Get the SIDD specification identifier string.
Returns
-------
str
"""
return _SIDD_SPECIFICATION_IDENTIFIER
def check_urn(urn_string):
"""
Checks that the urn string follows the correct pattern. This raises an
exception for a poorly formed or unmapped SIDD urn.
Parameters
----------
urn_string : str
"""
if not isinstance(urn_string, str):
raise TypeError(
'Expected a urn input of string type, got type {}'.format(type(urn_string)))
the_match = re.match(r'^urn:SIDD:\d.\d.\d$', urn_string)
if the_match is None:
raise ValueError(
'Input provided as `{}`,\nbut should be of the form '
'`urn:SIDD:<major>.<minor>.<release>'.format(urn_string))
def get_urn_details(urn_string):
"""
Gets the associated details for the given SIDD urn, or raise an exception for
poorly formatted or unrecognized urn.
Parameters
----------
urn_string
Returns
-------
dict
"""
check_urn(urn_string)
out = urn_mapping.get(urn_string, None)
if out is None:
raise KeyError(
'Got correctly formatted, but unmapped SIDD urn {}.'.format(urn_string))
return out
def get_schema_path(the_urn):
"""
Gets the path to the proper schema file for the given SIDD urn.
Parameters
----------
the_urn : str
Returns
-------
str
"""
result = get_urn_details(the_urn)
return result['schema']
def get_versions():
"""
Gets a list of recognized SIDD urn.
Returns
-------
List[str]
"""
return list(sorted(urn_mapping.keys()))
def validate_xml_ns(xml_ns, ns_key='default'):
"""
Validate the parsed SIDD xml namespace dictionary. This is expected to
accompany the use of :func:`sarpy.io.general.utils.parse_xml_from_string`.
Parameters
----------
xml_ns : dict
The xml namespace dictionary.
ns_key : str
The main SIDD element or default namespace.
Returns
-------
bool
"""
def validate_ism_urn():
if 'ism' not in xml_ns:
the_val = None
for key in xml_ns:
val = xml_ns[key]
if val.lower().startswith('urn:us:gov:ic:ism'):
the_val = val
xml_ns['ism'] = the_val
valid = True
if 'ism' not in xml_ns:
logger.error('SIDD: No `ism` namespace defined.')
valid = False
elif xml_ns['ism'] != details['ism_urn']:
logger.error(
'SIDD: SIDD {} `ISM` namespace urn is expected to be "{}", but we got "{}".\n\t'
'Differences in standard may lead to deserialization and/or '
'validation errors.'.format(sidd_urn, details['ism_urn'], xml_ns['ism']))
valid = False
return valid
def validate_sfa_urn():
if 'sfa' not in xml_ns:
the_val = None
for key in xml_ns:
val = xml_ns[key]
if val.lower().startswith('urn:sfa:'):
the_val = val
xml_ns['sfa'] = the_val
valid = True
if 'ism' not in xml_ns:
logger.error('SIDD: No `sfa` namespace defined.')
valid = False
elif xml_ns['sfa'] != details['sfa_urn']:
logger.error(
'SIDD: SIDD {} `SFA` namespace urn is expected to be "{}", but we got "{}".\n\t'
'Differences in standard may lead to deserialization and/or '
'validation errors.'.format(sidd_urn, details['sfa_urn'], xml_ns['sfa']))
valid = False
return valid
def validate_sicommon_urn():
if 'sicommon' not in xml_ns:
the_val = None
for key in xml_ns:
val = xml_ns[key]
if val.lower().startswith('urn:sicommon:'):
the_val = val
xml_ns['sicommon'] = the_val
valid = True
if 'sicommon' not in xml_ns:
logger.error('SIDD: No `sicommon` namespace defined.')
valid = False
elif xml_ns['sicommon'] != details['sicommon_urn']:
logger.error(
'SIDD: SIDD {} `SICommon` namespace urn is expected to be "{}", but we got "{}".\n\t'
'Differences in standard may lead to deserialization and/or '
'validation errors.'.format(sidd_urn, details['sicommon_urn'], xml_ns['sicommon']))
valid = False
return valid
if not isinstance(xml_ns, dict):
return ValueError('xml_ns must be a dictionary for SIDD interpretation.')
if ns_key not in xml_ns:
raise ValueError('ns_key must be a key in xml_ns.')
sidd_urn = xml_ns[ns_key]
try:
details = get_urn_details(sidd_urn)
except KeyError:
logger.error('Got unmapped sidd urn `{}`'.format(sidd_urn))
return False
valid_ns = validate_ism_urn()
valid_ns &= validate_sfa_urn()
valid_ns &= validate_sicommon_urn()
return valid_ns
|
py | 1a308f4a7efa9513d5ce696d558fa62e035aa5ef | import math
import numbers
import random
import warnings
from collections.abc import Sequence
from typing import Tuple, List, Optional
import torch
from torch import Tensor
try:
import accimage
except ImportError:
accimage = None
from . import functional as F
from .functional import InterpolationMode, _interpolation_modes_from_int
__all__ = ["Compose", "ToTensor", "PILToTensor", "ConvertImageDtype", "ToPILImage", "Normalize", "Resize", "Scale",
"CenterCrop", "Pad", "Lambda", "RandomApply", "RandomChoice", "RandomOrder", "RandomCrop",
"RandomHorizontalFlip", "RandomVerticalFlip", "RandomResizedCrop", "RandomSizedCrop", "FiveCrop", "TenCrop",
"LinearTransformation", "ColorJitter", "RandomRotation", "RandomAffine", "Grayscale", "RandomGrayscale",
"RandomPerspective", "RandomErasing", "GaussianBlur", "InterpolationMode", "RandomInvert", "RandomPosterize",
"RandomSolarize", "RandomAdjustSharpness", "RandomAutocontrast", "RandomEqualize"]
class Compose:
"""Composes several transforms together. This transform does not support torchscript.
Please, see the note below.
Args:
transforms (list of ``Transform`` objects): list of transforms to compose.
Example:
>>> transforms.Compose([
>>> transforms.CenterCrop(10),
>>> transforms.ToTensor(),
>>> ])
.. note::
In order to script the transformations, please use ``torch.nn.Sequential`` as below.
>>> transforms = torch.nn.Sequential(
>>> transforms.CenterCrop(10),
>>> transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225)),
>>> )
>>> scripted_transforms = torch.jit.script(transforms)
Make sure to use only scriptable transformations, i.e. that work with ``torch.Tensor``, does not require
`lambda` functions or ``PIL.Image``.
"""
def __init__(self, transforms):
self.transforms = transforms
def __call__(self, img):
for t in self.transforms:
img = t(img)
return img
def __repr__(self):
format_string = self.__class__.__name__ + '('
for t in self.transforms:
format_string += '\n'
format_string += ' {0}'.format(t)
format_string += '\n)'
return format_string
class ToTensor:
"""Convert a ``PIL Image`` or ``numpy.ndarray`` to tensor. This transform does not support torchscript.
Converts a PIL Image or numpy.ndarray (H x W x C) in the range
[0, 255] to a torch.FloatTensor of shape (C x H x W) in the range [0.0, 1.0]
if the PIL Image belongs to one of the modes (L, LA, P, I, F, RGB, YCbCr, RGBA, CMYK, 1)
or if the numpy.ndarray has dtype = np.uint8
In the other cases, tensors are returned without scaling.
.. note::
Because the input image is scaled to [0.0, 1.0], this transformation should not be used when
transforming target image masks. See the `references`_ for implementing the transforms for image masks.
.. _references: https://github.com/pytorch/vision/tree/master/references/segmentation
"""
def __call__(self, pic):
"""
Args:
pic (PIL Image or numpy.ndarray): Image to be converted to tensor.
Returns:
Tensor: Converted image.
"""
return F.to_tensor(pic)
def __repr__(self):
return self.__class__.__name__ + '()'
class PILToTensor:
"""Convert a ``PIL Image`` to a tensor of the same type. This transform does not support torchscript.
Converts a PIL Image (H x W x C) to a Tensor of shape (C x H x W).
"""
def __call__(self, pic):
"""
Args:
pic (PIL Image): Image to be converted to tensor.
Returns:
Tensor: Converted image.
"""
return F.pil_to_tensor(pic)
def __repr__(self):
return self.__class__.__name__ + '()'
class ConvertImageDtype(torch.nn.Module):
"""Convert a tensor image to the given ``dtype`` and scale the values accordingly
This function does not support PIL Image.
Args:
dtype (torch.dtype): Desired data type of the output
.. note::
When converting from a smaller to a larger integer ``dtype`` the maximum values are **not** mapped exactly.
If converted back and forth, this mismatch has no effect.
Raises:
RuntimeError: When trying to cast :class:`torch.float32` to :class:`torch.int32` or :class:`torch.int64` as
well as for trying to cast :class:`torch.float64` to :class:`torch.int64`. These conversions might lead to
overflow errors since the floating point ``dtype`` cannot store consecutive integers over the whole range
of the integer ``dtype``.
"""
def __init__(self, dtype: torch.dtype) -> None:
super().__init__()
self.dtype = dtype
def forward(self, image):
return F.convert_image_dtype(image, self.dtype)
class ToPILImage:
"""Convert a tensor or an ndarray to PIL Image. This transform does not support torchscript.
Converts a torch.*Tensor of shape C x H x W or a numpy ndarray of shape
H x W x C to a PIL Image while preserving the value range.
Args:
mode (`PIL.Image mode`_): color space and pixel depth of input data (optional).
If ``mode`` is ``None`` (default) there are some assumptions made about the input data:
- If the input has 4 channels, the ``mode`` is assumed to be ``RGBA``.
- If the input has 3 channels, the ``mode`` is assumed to be ``RGB``.
- If the input has 2 channels, the ``mode`` is assumed to be ``LA``.
- If the input has 1 channel, the ``mode`` is determined by the data type (i.e ``int``, ``float``,
``short``).
.. _PIL.Image mode: https://pillow.readthedocs.io/en/latest/handbook/concepts.html#concept-modes
"""
def __init__(self, mode=None):
self.mode = mode
def __call__(self, pic):
"""
Args:
pic (Tensor or numpy.ndarray): Image to be converted to PIL Image.
Returns:
PIL Image: Image converted to PIL Image.
"""
return F.to_pil_image(pic, self.mode)
def __repr__(self):
format_string = self.__class__.__name__ + '('
if self.mode is not None:
format_string += 'mode={0}'.format(self.mode)
format_string += ')'
return format_string
class Normalize(torch.nn.Module):
"""Normalize a tensor image with mean and standard deviation.
This transform does not support PIL Image.
Given mean: ``(mean[1],...,mean[n])`` and std: ``(std[1],..,std[n])`` for ``n``
channels, this transform will normalize each channel of the input
``torch.*Tensor`` i.e.,
``output[channel] = (input[channel] - mean[channel]) / std[channel]``
.. note::
This transform acts out of place, i.e., it does not mutate the input tensor.
Args:
mean (sequence): Sequence of means for each channel.
std (sequence): Sequence of standard deviations for each channel.
inplace(bool,optional): Bool to make this operation in-place.
"""
def __init__(self, mean, std, inplace=False):
super().__init__()
self.mean = mean
self.std = std
self.inplace = inplace
def forward(self, tensor: Tensor) -> Tensor:
"""
Args:
tensor (Tensor): Tensor image to be normalized.
Returns:
Tensor: Normalized Tensor image.
"""
return F.normalize(tensor, self.mean, self.std, self.inplace)
def __repr__(self):
return self.__class__.__name__ + '(mean={0}, std={1})'.format(self.mean, self.std)
class Resize(torch.nn.Module):
"""Resize the input image to the given size.
If the image is torch Tensor, it is expected
to have [..., H, W] shape, where ... means an arbitrary number of leading dimensions
Args:
size (sequence or int): Desired output size. If size is a sequence like
(h, w), output size will be matched to this. If size is an int,
smaller edge of the image will be matched to this number.
i.e, if height > width, then image will be rescaled to
(size * height / width, size).
In torchscript mode size as single int is not supported, use a sequence of length 1: ``[size, ]``.
interpolation (InterpolationMode): Desired interpolation enum defined by
:class:`torchvision.transforms.InterpolationMode`. Default is ``InterpolationMode.BILINEAR``.
If input is Tensor, only ``InterpolationMode.NEAREST``, ``InterpolationMode.BILINEAR`` and
``InterpolationMode.BICUBIC`` are supported.
For backward compatibility integer values (e.g. ``PIL.Image.NEAREST``) are still acceptable.
"""
def __init__(self, size, interpolation=InterpolationMode.BILINEAR):
super().__init__()
if not isinstance(size, (int, Sequence)):
raise TypeError("Size should be int or sequence. Got {}".format(type(size)))
if isinstance(size, Sequence) and len(size) not in (1, 2):
raise ValueError("If size is a sequence, it should have 1 or 2 values")
self.size = size
# Backward compatibility with integer value
if isinstance(interpolation, int):
warnings.warn(
"Argument interpolation should be of type InterpolationMode instead of int. "
"Please, use InterpolationMode enum."
)
interpolation = _interpolation_modes_from_int(interpolation)
self.interpolation = interpolation
def forward(self, img):
"""
Args:
img (PIL Image or Tensor): Image to be scaled.
Returns:
PIL Image or Tensor: Rescaled image.
"""
return F.resize(img, self.size, self.interpolation)
def __repr__(self):
interpolate_str = self.interpolation.value
return self.__class__.__name__ + '(size={0}, interpolation={1})'.format(self.size, interpolate_str)
class Scale(Resize):
"""
Note: This transform is deprecated in favor of Resize.
"""
def __init__(self, *args, **kwargs):
warnings.warn("The use of the transforms.Scale transform is deprecated, " +
"please use transforms.Resize instead.")
super(Scale, self).__init__(*args, **kwargs)
class CenterCrop(torch.nn.Module):
"""Crops the given image at the center.
If the image is torch Tensor, it is expected
to have [..., H, W] shape, where ... means an arbitrary number of leading dimensions
Args:
size (sequence or int): Desired output size of the crop. If size is an
int instead of sequence like (h, w), a square crop (size, size) is
made. If provided a sequence of length 1, it will be interpreted as (size[0], size[0]).
"""
def __init__(self, size):
super().__init__()
self.size = _setup_size(size, error_msg="Please provide only two dimensions (h, w) for size.")
def forward(self, img):
"""
Args:
img (PIL Image or Tensor): Image to be cropped.
Returns:
PIL Image or Tensor: Cropped image.
"""
return F.center_crop(img, self.size)
def __repr__(self):
return self.__class__.__name__ + '(size={0})'.format(self.size)
class Pad(torch.nn.Module):
"""Pad the given image on all sides with the given "pad" value.
If the image is torch Tensor, it is expected
to have [..., H, W] shape, where ... means an arbitrary number of leading dimensions
Args:
padding (int or sequence): Padding on each border. If a single int is provided this
is used to pad all borders. If sequence of length 2 is provided this is the padding
on left/right and top/bottom respectively. If a sequence of length 4 is provided
this is the padding for the left, top, right and bottom borders respectively.
In torchscript mode padding as single int is not supported, use a sequence of length 1: ``[padding, ]``.
fill (number or str or tuple): Pixel fill value for constant fill. Default is 0. If a tuple of
length 3, it is used to fill R, G, B channels respectively.
This value is only used when the padding_mode is constant.
Only number is supported for torch Tensor.
Only int or str or tuple value is supported for PIL Image.
padding_mode (str): Type of padding. Should be: constant, edge, reflect or symmetric.
Default is constant.
- constant: pads with a constant value, this value is specified with fill
- edge: pads with the last value at the edge of the image
- reflect: pads with reflection of image without repeating the last value on the edge
For example, padding [1, 2, 3, 4] with 2 elements on both sides in reflect mode
will result in [3, 2, 1, 2, 3, 4, 3, 2]
- symmetric: pads with reflection of image repeating the last value on the edge
For example, padding [1, 2, 3, 4] with 2 elements on both sides in symmetric mode
will result in [2, 1, 1, 2, 3, 4, 4, 3]
"""
def __init__(self, padding, fill=0, padding_mode="constant"):
super().__init__()
if not isinstance(padding, (numbers.Number, tuple, list)):
raise TypeError("Got inappropriate padding arg")
if not isinstance(fill, (numbers.Number, str, tuple)):
raise TypeError("Got inappropriate fill arg")
if padding_mode not in ["constant", "edge", "reflect", "symmetric"]:
raise ValueError("Padding mode should be either constant, edge, reflect or symmetric")
if isinstance(padding, Sequence) and len(padding) not in [1, 2, 4]:
raise ValueError("Padding must be an int or a 1, 2, or 4 element tuple, not a " +
"{} element tuple".format(len(padding)))
self.padding = padding
self.fill = fill
self.padding_mode = padding_mode
def forward(self, img):
"""
Args:
img (PIL Image or Tensor): Image to be padded.
Returns:
PIL Image or Tensor: Padded image.
"""
return F.pad(img, self.padding, self.fill, self.padding_mode)
def __repr__(self):
return self.__class__.__name__ + '(padding={0}, fill={1}, padding_mode={2})'.\
format(self.padding, self.fill, self.padding_mode)
class Lambda:
"""Apply a user-defined lambda as a transform. This transform does not support torchscript.
Args:
lambd (function): Lambda/function to be used for transform.
"""
def __init__(self, lambd):
if not callable(lambd):
raise TypeError("Argument lambd should be callable, got {}".format(repr(type(lambd).__name__)))
self.lambd = lambd
def __call__(self, img):
return self.lambd(img)
def __repr__(self):
return self.__class__.__name__ + '()'
class RandomTransforms:
"""Base class for a list of transformations with randomness
Args:
transforms (sequence): list of transformations
"""
def __init__(self, transforms):
if not isinstance(transforms, Sequence):
raise TypeError("Argument transforms should be a sequence")
self.transforms = transforms
def __call__(self, *args, **kwargs):
raise NotImplementedError()
def __repr__(self):
format_string = self.__class__.__name__ + '('
for t in self.transforms:
format_string += '\n'
format_string += ' {0}'.format(t)
format_string += '\n)'
return format_string
class RandomApply(torch.nn.Module):
"""Apply randomly a list of transformations with a given probability.
.. note::
In order to script the transformation, please use ``torch.nn.ModuleList`` as input instead of list/tuple of
transforms as shown below:
>>> transforms = transforms.RandomApply(torch.nn.ModuleList([
>>> transforms.ColorJitter(),
>>> ]), p=0.3)
>>> scripted_transforms = torch.jit.script(transforms)
Make sure to use only scriptable transformations, i.e. that work with ``torch.Tensor``, does not require
`lambda` functions or ``PIL.Image``.
Args:
transforms (sequence or torch.nn.Module): list of transformations
p (float): probability
"""
def __init__(self, transforms, p=0.5):
super().__init__()
self.transforms = transforms
self.p = p
def forward(self, img):
if self.p < torch.rand(1):
return img
for t in self.transforms:
img = t(img)
return img
def __repr__(self):
format_string = self.__class__.__name__ + '('
format_string += '\n p={}'.format(self.p)
for t in self.transforms:
format_string += '\n'
format_string += ' {0}'.format(t)
format_string += '\n)'
return format_string
class RandomOrder(RandomTransforms):
"""Apply a list of transformations in a random order. This transform does not support torchscript.
"""
def __call__(self, img):
order = list(range(len(self.transforms)))
random.shuffle(order)
for i in order:
img = self.transforms[i](img)
return img
class RandomChoice(RandomTransforms):
"""Apply single transformation randomly picked from a list. This transform does not support torchscript.
"""
def __call__(self, img):
t = random.choice(self.transforms)
return t(img)
class RandomCrop(torch.nn.Module):
"""Crop the given image at a random location.
If the image is torch Tensor, it is expected
to have [..., H, W] shape, where ... means an arbitrary number of leading dimensions
Args:
size (sequence or int): Desired output size of the crop. If size is an
int instead of sequence like (h, w), a square crop (size, size) is
made. If provided a sequence of length 1, it will be interpreted as (size[0], size[0]).
padding (int or sequence, optional): Optional padding on each border
of the image. Default is None. If a single int is provided this
is used to pad all borders. If sequence of length 2 is provided this is the padding
on left/right and top/bottom respectively. If a sequence of length 4 is provided
this is the padding for the left, top, right and bottom borders respectively.
In torchscript mode padding as single int is not supported, use a sequence of length 1: ``[padding, ]``.
pad_if_needed (boolean): It will pad the image if smaller than the
desired size to avoid raising an exception. Since cropping is done
after padding, the padding seems to be done at a random offset.
fill (number or str or tuple): Pixel fill value for constant fill. Default is 0. If a tuple of
length 3, it is used to fill R, G, B channels respectively.
This value is only used when the padding_mode is constant.
Only number is supported for torch Tensor.
Only int or str or tuple value is supported for PIL Image.
padding_mode (str): Type of padding. Should be: constant, edge, reflect or symmetric. Default is constant.
- constant: pads with a constant value, this value is specified with fill
- edge: pads with the last value on the edge of the image
- reflect: pads with reflection of image (without repeating the last value on the edge)
padding [1, 2, 3, 4] with 2 elements on both sides in reflect mode
will result in [3, 2, 1, 2, 3, 4, 3, 2]
- symmetric: pads with reflection of image (repeating the last value on the edge)
padding [1, 2, 3, 4] with 2 elements on both sides in symmetric mode
will result in [2, 1, 1, 2, 3, 4, 4, 3]
"""
@staticmethod
def get_params(img: Tensor, output_size: Tuple[int, int]) -> Tuple[int, int, int, int]:
"""Get parameters for ``crop`` for a random crop.
Args:
img (PIL Image or Tensor): Image to be cropped.
output_size (tuple): Expected output size of the crop.
Returns:
tuple: params (i, j, h, w) to be passed to ``crop`` for random crop.
"""
w, h = F._get_image_size(img)
th, tw = output_size
if h + 1 < th or w + 1 < tw:
raise ValueError(
"Required crop size {} is larger then input image size {}".format((th, tw), (h, w))
)
if w == tw and h == th:
return 0, 0, h, w
i = torch.randint(0, h - th + 1, size=(1, )).item()
j = torch.randint(0, w - tw + 1, size=(1, )).item()
return i, j, th, tw
def __init__(self, size, padding=None, pad_if_needed=False, fill=0, padding_mode="constant"):
super().__init__()
self.size = tuple(_setup_size(
size, error_msg="Please provide only two dimensions (h, w) for size."
))
self.padding = padding
self.pad_if_needed = pad_if_needed
self.fill = fill
self.padding_mode = padding_mode
def forward(self, img):
"""
Args:
img (PIL Image or Tensor): Image to be cropped.
Returns:
PIL Image or Tensor: Cropped image.
"""
if self.padding is not None:
img = F.pad(img, self.padding, self.fill, self.padding_mode)
width, height = F._get_image_size(img)
# pad the width if needed
if self.pad_if_needed and width < self.size[1]:
padding = [self.size[1] - width, 0]
img = F.pad(img, padding, self.fill, self.padding_mode)
# pad the height if needed
if self.pad_if_needed and height < self.size[0]:
padding = [0, self.size[0] - height]
img = F.pad(img, padding, self.fill, self.padding_mode)
i, j, h, w = self.get_params(img, self.size)
return F.crop(img, i, j, h, w)
def __repr__(self):
return self.__class__.__name__ + "(size={0}, padding={1})".format(self.size, self.padding)
class RandomHorizontalFlip(torch.nn.Module):
"""Horizontally flip the given image randomly with a given probability.
If the image is torch Tensor, it is expected
to have [..., H, W] shape, where ... means an arbitrary number of leading
dimensions
Args:
p (float): probability of the image being flipped. Default value is 0.5
"""
def __init__(self, p=0.5):
super().__init__()
self.p = p
def forward(self, img):
"""
Args:
img (PIL Image or Tensor): Image to be flipped.
Returns:
PIL Image or Tensor: Randomly flipped image.
"""
if torch.rand(1) < self.p:
return F.hflip(img)
return img
def __repr__(self):
return self.__class__.__name__ + '(p={})'.format(self.p)
class RandomVerticalFlip(torch.nn.Module):
"""Vertically flip the given image randomly with a given probability.
If the image is torch Tensor, it is expected
to have [..., H, W] shape, where ... means an arbitrary number of leading
dimensions
Args:
p (float): probability of the image being flipped. Default value is 0.5
"""
def __init__(self, p=0.5):
super().__init__()
self.p = p
def forward(self, img):
"""
Args:
img (PIL Image or Tensor): Image to be flipped.
Returns:
PIL Image or Tensor: Randomly flipped image.
"""
if torch.rand(1) < self.p:
return F.vflip(img)
return img
def __repr__(self):
return self.__class__.__name__ + '(p={})'.format(self.p)
class RandomPerspective(torch.nn.Module):
"""Performs a random perspective transformation of the given image with a given probability.
If the image is torch Tensor, it is expected
to have [..., H, W] shape, where ... means an arbitrary number of leading dimensions.
Args:
distortion_scale (float): argument to control the degree of distortion and ranges from 0 to 1.
Default is 0.5.
p (float): probability of the image being transformed. Default is 0.5.
interpolation (InterpolationMode): Desired interpolation enum defined by
:class:`torchvision.transforms.InterpolationMode`. Default is ``InterpolationMode.BILINEAR``.
If input is Tensor, only ``InterpolationMode.NEAREST``, ``InterpolationMode.BILINEAR`` are supported.
For backward compatibility integer values (e.g. ``PIL.Image.NEAREST``) are still acceptable.
fill (sequence or number, optional): Pixel fill value for the area outside the transformed
image. If given a number, the value is used for all bands respectively.
If input is PIL Image, the options is only available for ``Pillow>=5.0.0``.
"""
def __init__(self, distortion_scale=0.5, p=0.5, interpolation=InterpolationMode.BILINEAR, fill=0):
super().__init__()
self.p = p
# Backward compatibility with integer value
if isinstance(interpolation, int):
warnings.warn(
"Argument interpolation should be of type InterpolationMode instead of int. "
"Please, use InterpolationMode enum."
)
interpolation = _interpolation_modes_from_int(interpolation)
self.interpolation = interpolation
self.distortion_scale = distortion_scale
self.fill = fill
def forward(self, img):
"""
Args:
img (PIL Image or Tensor): Image to be Perspectively transformed.
Returns:
PIL Image or Tensor: Randomly transformed image.
"""
fill = self.fill
if isinstance(img, Tensor):
if isinstance(fill, (int, float)):
fill = [float(fill)] * F._get_image_num_channels(img)
else:
fill = [float(f) for f in fill]
if torch.rand(1) < self.p:
width, height = F._get_image_size(img)
startpoints, endpoints = self.get_params(width, height, self.distortion_scale)
return F.perspective(img, startpoints, endpoints, self.interpolation, fill)
return img
@staticmethod
def get_params(width: int, height: int, distortion_scale: float) -> Tuple[List[List[int]], List[List[int]]]:
"""Get parameters for ``perspective`` for a random perspective transform.
Args:
width (int): width of the image.
height (int): height of the image.
distortion_scale (float): argument to control the degree of distortion and ranges from 0 to 1.
Returns:
List containing [top-left, top-right, bottom-right, bottom-left] of the original image,
List containing [top-left, top-right, bottom-right, bottom-left] of the transformed image.
"""
half_height = height // 2
half_width = width // 2
topleft = [
int(torch.randint(0, int(distortion_scale * half_width) + 1, size=(1, )).item()),
int(torch.randint(0, int(distortion_scale * half_height) + 1, size=(1, )).item())
]
topright = [
int(torch.randint(width - int(distortion_scale * half_width) - 1, width, size=(1, )).item()),
int(torch.randint(0, int(distortion_scale * half_height) + 1, size=(1, )).item())
]
botright = [
int(torch.randint(width - int(distortion_scale * half_width) - 1, width, size=(1, )).item()),
int(torch.randint(height - int(distortion_scale * half_height) - 1, height, size=(1, )).item())
]
botleft = [
int(torch.randint(0, int(distortion_scale * half_width) + 1, size=(1, )).item()),
int(torch.randint(height - int(distortion_scale * half_height) - 1, height, size=(1, )).item())
]
startpoints = [[0, 0], [width - 1, 0], [width - 1, height - 1], [0, height - 1]]
endpoints = [topleft, topright, botright, botleft]
return startpoints, endpoints
def __repr__(self):
return self.__class__.__name__ + '(p={})'.format(self.p)
class RandomResizedCrop(torch.nn.Module):
"""Crop the given image to random size and aspect ratio.
If the image is torch Tensor, it is expected
to have [..., H, W] shape, where ... means an arbitrary number of leading dimensions
A crop of random size (default: of 0.08 to 1.0) of the original size and a random
aspect ratio (default: of 3/4 to 4/3) of the original aspect ratio is made. This crop
is finally resized to given size.
This is popularly used to train the Inception networks.
Args:
size (int or sequence): expected output size of each edge. If size is an
int instead of sequence like (h, w), a square output size ``(size, size)`` is
made. If provided a sequence of length 1, it will be interpreted as (size[0], size[0]).
In torchscript mode size as single int is not supported, use a sequence of length 1: ``[size, ]``.
scale (tuple of float): scale range of the cropped image before resizing, relatively to the origin image.
ratio (tuple of float): aspect ratio range of the cropped image before resizing.
interpolation (InterpolationMode): Desired interpolation enum defined by
:class:`torchvision.transforms.InterpolationMode`. Default is ``InterpolationMode.BILINEAR``.
If input is Tensor, only ``InterpolationMode.NEAREST``, ``InterpolationMode.BILINEAR`` and
``InterpolationMode.BICUBIC`` are supported.
For backward compatibility integer values (e.g. ``PIL.Image.NEAREST``) are still acceptable.
"""
def __init__(self, size, scale=(0.08, 1.0), ratio=(3. / 4., 4. / 3.), interpolation=InterpolationMode.BILINEAR):
super().__init__()
self.size = _setup_size(size, error_msg="Please provide only two dimensions (h, w) for size.")
if not isinstance(scale, Sequence):
raise TypeError("Scale should be a sequence")
if not isinstance(ratio, Sequence):
raise TypeError("Ratio should be a sequence")
if (scale[0] > scale[1]) or (ratio[0] > ratio[1]):
warnings.warn("Scale and ratio should be of kind (min, max)")
# Backward compatibility with integer value
if isinstance(interpolation, int):
warnings.warn(
"Argument interpolation should be of type InterpolationMode instead of int. "
"Please, use InterpolationMode enum."
)
interpolation = _interpolation_modes_from_int(interpolation)
self.interpolation = interpolation
self.scale = scale
self.ratio = ratio
@staticmethod
def get_params(
img: Tensor, scale: List[float], ratio: List[float]
) -> Tuple[int, int, int, int]:
"""Get parameters for ``crop`` for a random sized crop.
Args:
img (PIL Image or Tensor): Input image.
scale (list): range of scale of the origin size cropped
ratio (list): range of aspect ratio of the origin aspect ratio cropped
Returns:
tuple: params (i, j, h, w) to be passed to ``crop`` for a random
sized crop.
"""
width, height = F._get_image_size(img)
area = height * width
for _ in range(10):
target_area = area * torch.empty(1).uniform_(scale[0], scale[1]).item()
log_ratio = torch.log(torch.tensor(ratio))
aspect_ratio = torch.exp(
torch.empty(1).uniform_(log_ratio[0], log_ratio[1])
).item()
w = int(round(math.sqrt(target_area * aspect_ratio)))
h = int(round(math.sqrt(target_area / aspect_ratio)))
if 0 < w <= width and 0 < h <= height:
i = torch.randint(0, height - h + 1, size=(1,)).item()
j = torch.randint(0, width - w + 1, size=(1,)).item()
return i, j, h, w
# Fallback to central crop
in_ratio = float(width) / float(height)
if in_ratio < min(ratio):
w = width
h = int(round(w / min(ratio)))
elif in_ratio > max(ratio):
h = height
w = int(round(h * max(ratio)))
else: # whole image
w = width
h = height
i = (height - h) // 2
j = (width - w) // 2
return i, j, h, w
def forward(self, img):
"""
Args:
img (PIL Image or Tensor): Image to be cropped and resized.
Returns:
PIL Image or Tensor: Randomly cropped and resized image.
"""
i, j, h, w = self.get_params(img, self.scale, self.ratio)
return F.resized_crop(img, i, j, h, w, self.size, self.interpolation)
def __repr__(self):
interpolate_str = self.interpolation.value
format_string = self.__class__.__name__ + '(size={0}'.format(self.size)
format_string += ', scale={0}'.format(tuple(round(s, 4) for s in self.scale))
format_string += ', ratio={0}'.format(tuple(round(r, 4) for r in self.ratio))
format_string += ', interpolation={0})'.format(interpolate_str)
return format_string
class RandomSizedCrop(RandomResizedCrop):
"""
Note: This transform is deprecated in favor of RandomResizedCrop.
"""
def __init__(self, *args, **kwargs):
warnings.warn("The use of the transforms.RandomSizedCrop transform is deprecated, " +
"please use transforms.RandomResizedCrop instead.")
super(RandomSizedCrop, self).__init__(*args, **kwargs)
class FiveCrop(torch.nn.Module):
"""Crop the given image into four corners and the central crop.
If the image is torch Tensor, it is expected
to have [..., H, W] shape, where ... means an arbitrary number of leading
dimensions
.. Note::
This transform returns a tuple of images and there may be a mismatch in the number of
inputs and targets your Dataset returns. See below for an example of how to deal with
this.
Args:
size (sequence or int): Desired output size of the crop. If size is an ``int``
instead of sequence like (h, w), a square crop of size (size, size) is made.
If provided a sequence of length 1, it will be interpreted as (size[0], size[0]).
Example:
>>> transform = Compose([
>>> FiveCrop(size), # this is a list of PIL Images
>>> Lambda(lambda crops: torch.stack([ToTensor()(crop) for crop in crops])) # returns a 4D tensor
>>> ])
>>> #In your test loop you can do the following:
>>> input, target = batch # input is a 5d tensor, target is 2d
>>> bs, ncrops, c, h, w = input.size()
>>> result = model(input.view(-1, c, h, w)) # fuse batch size and ncrops
>>> result_avg = result.view(bs, ncrops, -1).mean(1) # avg over crops
"""
def __init__(self, size):
super().__init__()
self.size = _setup_size(size, error_msg="Please provide only two dimensions (h, w) for size.")
def forward(self, img):
"""
Args:
img (PIL Image or Tensor): Image to be cropped.
Returns:
tuple of 5 images. Image can be PIL Image or Tensor
"""
return F.five_crop(img, self.size)
def __repr__(self):
return self.__class__.__name__ + '(size={0})'.format(self.size)
class TenCrop(torch.nn.Module):
"""Crop the given image into four corners and the central crop plus the flipped version of
these (horizontal flipping is used by default).
If the image is torch Tensor, it is expected
to have [..., H, W] shape, where ... means an arbitrary number of leading
dimensions
.. Note::
This transform returns a tuple of images and there may be a mismatch in the number of
inputs and targets your Dataset returns. See below for an example of how to deal with
this.
Args:
size (sequence or int): Desired output size of the crop. If size is an
int instead of sequence like (h, w), a square crop (size, size) is
made. If provided a sequence of length 1, it will be interpreted as (size[0], size[0]).
vertical_flip (bool): Use vertical flipping instead of horizontal
Example:
>>> transform = Compose([
>>> TenCrop(size), # this is a list of PIL Images
>>> Lambda(lambda crops: torch.stack([ToTensor()(crop) for crop in crops])) # returns a 4D tensor
>>> ])
>>> #In your test loop you can do the following:
>>> input, target = batch # input is a 5d tensor, target is 2d
>>> bs, ncrops, c, h, w = input.size()
>>> result = model(input.view(-1, c, h, w)) # fuse batch size and ncrops
>>> result_avg = result.view(bs, ncrops, -1).mean(1) # avg over crops
"""
def __init__(self, size, vertical_flip=False):
super().__init__()
self.size = _setup_size(size, error_msg="Please provide only two dimensions (h, w) for size.")
self.vertical_flip = vertical_flip
def forward(self, img):
"""
Args:
img (PIL Image or Tensor): Image to be cropped.
Returns:
tuple of 10 images. Image can be PIL Image or Tensor
"""
return F.ten_crop(img, self.size, self.vertical_flip)
def __repr__(self):
return self.__class__.__name__ + '(size={0}, vertical_flip={1})'.format(self.size, self.vertical_flip)
class LinearTransformation(torch.nn.Module):
"""Transform a tensor image with a square transformation matrix and a mean_vector computed
offline.
This transform does not support PIL Image.
Given transformation_matrix and mean_vector, will flatten the torch.*Tensor and
subtract mean_vector from it which is then followed by computing the dot
product with the transformation matrix and then reshaping the tensor to its
original shape.
Applications:
whitening transformation: Suppose X is a column vector zero-centered data.
Then compute the data covariance matrix [D x D] with torch.mm(X.t(), X),
perform SVD on this matrix and pass it as transformation_matrix.
Args:
transformation_matrix (Tensor): tensor [D x D], D = C x H x W
mean_vector (Tensor): tensor [D], D = C x H x W
"""
def __init__(self, transformation_matrix, mean_vector):
super().__init__()
if transformation_matrix.size(0) != transformation_matrix.size(1):
raise ValueError("transformation_matrix should be square. Got " +
"[{} x {}] rectangular matrix.".format(*transformation_matrix.size()))
if mean_vector.size(0) != transformation_matrix.size(0):
raise ValueError("mean_vector should have the same length {}".format(mean_vector.size(0)) +
" as any one of the dimensions of the transformation_matrix [{}]"
.format(tuple(transformation_matrix.size())))
if transformation_matrix.device != mean_vector.device:
raise ValueError("Input tensors should be on the same device. Got {} and {}"
.format(transformation_matrix.device, mean_vector.device))
self.transformation_matrix = transformation_matrix
self.mean_vector = mean_vector
def forward(self, tensor: Tensor) -> Tensor:
"""
Args:
tensor (Tensor): Tensor image to be whitened.
Returns:
Tensor: Transformed image.
"""
shape = tensor.shape
n = shape[-3] * shape[-2] * shape[-1]
if n != self.transformation_matrix.shape[0]:
raise ValueError("Input tensor and transformation matrix have incompatible shape." +
"[{} x {} x {}] != ".format(shape[-3], shape[-2], shape[-1]) +
"{}".format(self.transformation_matrix.shape[0]))
if tensor.device.type != self.mean_vector.device.type:
raise ValueError("Input tensor should be on the same device as transformation matrix and mean vector. "
"Got {} vs {}".format(tensor.device, self.mean_vector.device))
flat_tensor = tensor.view(-1, n) - self.mean_vector
transformed_tensor = torch.mm(flat_tensor, self.transformation_matrix)
tensor = transformed_tensor.view(shape)
return tensor
def __repr__(self):
format_string = self.__class__.__name__ + '(transformation_matrix='
format_string += (str(self.transformation_matrix.tolist()) + ')')
format_string += (", (mean_vector=" + str(self.mean_vector.tolist()) + ')')
return format_string
class ColorJitter(torch.nn.Module):
"""Randomly change the brightness, contrast, saturation and hue of an image.
If the image is torch Tensor, it is expected
to have [..., 3, H, W] shape, where ... means an arbitrary number of leading dimensions.
If img is PIL Image, mode "1", "L", "I", "F" and modes with transparency (alpha channel) are not supported.
Args:
brightness (float or tuple of float (min, max)): How much to jitter brightness.
brightness_factor is chosen uniformly from [max(0, 1 - brightness), 1 + brightness]
or the given [min, max]. Should be non negative numbers.
contrast (float or tuple of float (min, max)): How much to jitter contrast.
contrast_factor is chosen uniformly from [max(0, 1 - contrast), 1 + contrast]
or the given [min, max]. Should be non negative numbers.
saturation (float or tuple of float (min, max)): How much to jitter saturation.
saturation_factor is chosen uniformly from [max(0, 1 - saturation), 1 + saturation]
or the given [min, max]. Should be non negative numbers.
hue (float or tuple of float (min, max)): How much to jitter hue.
hue_factor is chosen uniformly from [-hue, hue] or the given [min, max].
Should have 0<= hue <= 0.5 or -0.5 <= min <= max <= 0.5.
"""
def __init__(self, brightness=0, contrast=0, saturation=0, hue=0):
super().__init__()
self.brightness = self._check_input(brightness, 'brightness')
self.contrast = self._check_input(contrast, 'contrast')
self.saturation = self._check_input(saturation, 'saturation')
self.hue = self._check_input(hue, 'hue', center=0, bound=(-0.5, 0.5),
clip_first_on_zero=False)
@torch.jit.unused
def _check_input(self, value, name, center=1, bound=(0, float('inf')), clip_first_on_zero=True):
if isinstance(value, numbers.Number):
if value < 0:
raise ValueError("If {} is a single number, it must be non negative.".format(name))
value = [center - float(value), center + float(value)]
if clip_first_on_zero:
value[0] = max(value[0], 0.0)
elif isinstance(value, (tuple, list)) and len(value) == 2:
if not bound[0] <= value[0] <= value[1] <= bound[1]:
raise ValueError("{} values should be between {}".format(name, bound))
else:
raise TypeError("{} should be a single number or a list/tuple with lenght 2.".format(name))
# if value is 0 or (1., 1.) for brightness/contrast/saturation
# or (0., 0.) for hue, do nothing
if value[0] == value[1] == center:
value = None
return value
@staticmethod
def get_params(brightness: Optional[List[float]],
contrast: Optional[List[float]],
saturation: Optional[List[float]],
hue: Optional[List[float]]
) -> Tuple[Tensor, Optional[float], Optional[float], Optional[float], Optional[float]]:
"""Get the parameters for the randomized transform to be applied on image.
Args:
brightness (tuple of float (min, max), optional): The range from which the brightness_factor is chosen
uniformly. Pass None to turn off the transformation.
contrast (tuple of float (min, max), optional): The range from which the contrast_factor is chosen
uniformly. Pass None to turn off the transformation.
saturation (tuple of float (min, max), optional): The range from which the saturation_factor is chosen
uniformly. Pass None to turn off the transformation.
hue (tuple of float (min, max), optional): The range from which the hue_factor is chosen uniformly.
Pass None to turn off the transformation.
Returns:
tuple: The parameters used to apply the randomized transform
along with their random order.
"""
fn_idx = torch.randperm(4)
b = None if brightness is None else float(torch.empty(1).uniform_(brightness[0], brightness[1]))
c = None if contrast is None else float(torch.empty(1).uniform_(contrast[0], contrast[1]))
s = None if saturation is None else float(torch.empty(1).uniform_(saturation[0], saturation[1]))
h = None if hue is None else float(torch.empty(1).uniform_(hue[0], hue[1]))
return fn_idx, b, c, s, h
def forward(self, img):
"""
Args:
img (PIL Image or Tensor): Input image.
Returns:
PIL Image or Tensor: Color jittered image.
"""
fn_idx, brightness_factor, contrast_factor, saturation_factor, hue_factor = \
self.get_params(self.brightness, self.contrast, self.saturation, self.hue)
for fn_id in fn_idx:
if fn_id == 0 and brightness_factor is not None:
img = F.adjust_brightness(img, brightness_factor)
elif fn_id == 1 and contrast_factor is not None:
img = F.adjust_contrast(img, contrast_factor)
elif fn_id == 2 and saturation_factor is not None:
img = F.adjust_saturation(img, saturation_factor)
elif fn_id == 3 and hue_factor is not None:
img = F.adjust_hue(img, hue_factor)
return img
def __repr__(self):
format_string = self.__class__.__name__ + '('
format_string += 'brightness={0}'.format(self.brightness)
format_string += ', contrast={0}'.format(self.contrast)
format_string += ', saturation={0}'.format(self.saturation)
format_string += ', hue={0})'.format(self.hue)
return format_string
class RandomRotation(torch.nn.Module):
"""Rotate the image by angle.
If the image is torch Tensor, it is expected
to have [..., H, W] shape, where ... means an arbitrary number of leading dimensions.
Args:
degrees (sequence or number): Range of degrees to select from.
If degrees is a number instead of sequence like (min, max), the range of degrees
will be (-degrees, +degrees).
interpolation (InterpolationMode): Desired interpolation enum defined by
:class:`torchvision.transforms.InterpolationMode`. Default is ``InterpolationMode.NEAREST``.
If input is Tensor, only ``InterpolationMode.NEAREST``, ``InterpolationMode.BILINEAR`` are supported.
For backward compatibility integer values (e.g. ``PIL.Image.NEAREST``) are still acceptable.
expand (bool, optional): Optional expansion flag.
If true, expands the output to make it large enough to hold the entire rotated image.
If false or omitted, make the output image the same size as the input image.
Note that the expand flag assumes rotation around the center and no translation.
center (sequence, optional): Optional center of rotation, (x, y). Origin is the upper left corner.
Default is the center of the image.
fill (sequence or number, optional): Pixel fill value for the area outside the rotated
image. If given a number, the value is used for all bands respectively.
If input is PIL Image, the options is only available for ``Pillow>=5.2.0``.
resample (int, optional): deprecated argument and will be removed since v0.10.0.
Please use `arg`:interpolation: instead.
.. _filters: https://pillow.readthedocs.io/en/latest/handbook/concepts.html#filters
"""
def __init__(
self, degrees, interpolation=InterpolationMode.NEAREST, expand=False, center=None, fill=None, resample=None
):
super().__init__()
if resample is not None:
warnings.warn(
"Argument resample is deprecated and will be removed since v0.10.0. Please, use interpolation instead"
)
interpolation = _interpolation_modes_from_int(resample)
# Backward compatibility with integer value
if isinstance(interpolation, int):
warnings.warn(
"Argument interpolation should be of type InterpolationMode instead of int. "
"Please, use InterpolationMode enum."
)
interpolation = _interpolation_modes_from_int(interpolation)
self.degrees = _setup_angle(degrees, name="degrees", req_sizes=(2, ))
if center is not None:
_check_sequence_input(center, "center", req_sizes=(2, ))
self.center = center
self.resample = self.interpolation = interpolation
self.expand = expand
self.fill = fill
@staticmethod
def get_params(degrees: List[float]) -> float:
"""Get parameters for ``rotate`` for a random rotation.
Returns:
float: angle parameter to be passed to ``rotate`` for random rotation.
"""
angle = float(torch.empty(1).uniform_(float(degrees[0]), float(degrees[1])).item())
return angle
def forward(self, img):
"""
Args:
img (PIL Image or Tensor): Image to be rotated.
Returns:
PIL Image or Tensor: Rotated image.
"""
fill = self.fill
if isinstance(img, Tensor):
if isinstance(fill, (int, float)):
fill = [float(fill)] * F._get_image_num_channels(img)
else:
fill = [float(f) for f in fill]
angle = self.get_params(self.degrees)
return F.rotate(img, angle, self.resample, self.expand, self.center, fill)
def __repr__(self):
interpolate_str = self.interpolation.value
format_string = self.__class__.__name__ + '(degrees={0}'.format(self.degrees)
format_string += ', interpolation={0}'.format(interpolate_str)
format_string += ', expand={0}'.format(self.expand)
if self.center is not None:
format_string += ', center={0}'.format(self.center)
if self.fill is not None:
format_string += ', fill={0}'.format(self.fill)
format_string += ')'
return format_string
class RandomAffine(torch.nn.Module):
"""Random affine transformation of the image keeping center invariant.
If the image is torch Tensor, it is expected
to have [..., H, W] shape, where ... means an arbitrary number of leading dimensions.
Args:
degrees (sequence or number): Range of degrees to select from.
If degrees is a number instead of sequence like (min, max), the range of degrees
will be (-degrees, +degrees). Set to 0 to deactivate rotations.
translate (tuple, optional): tuple of maximum absolute fraction for horizontal
and vertical translations. For example translate=(a, b), then horizontal shift
is randomly sampled in the range -img_width * a < dx < img_width * a and vertical shift is
randomly sampled in the range -img_height * b < dy < img_height * b. Will not translate by default.
scale (tuple, optional): scaling factor interval, e.g (a, b), then scale is
randomly sampled from the range a <= scale <= b. Will keep original scale by default.
shear (sequence or number, optional): Range of degrees to select from.
If shear is a number, a shear parallel to the x axis in the range (-shear, +shear)
will be applied. Else if shear is a sequence of 2 values a shear parallel to the x axis in the
range (shear[0], shear[1]) will be applied. Else if shear is a sequence of 4 values,
a x-axis shear in (shear[0], shear[1]) and y-axis shear in (shear[2], shear[3]) will be applied.
Will not apply shear by default.
interpolation (InterpolationMode): Desired interpolation enum defined by
:class:`torchvision.transforms.InterpolationMode`. Default is ``InterpolationMode.NEAREST``.
If input is Tensor, only ``InterpolationMode.NEAREST``, ``InterpolationMode.BILINEAR`` are supported.
For backward compatibility integer values (e.g. ``PIL.Image.NEAREST``) are still acceptable.
fill (sequence or number, optional): Pixel fill value for the area outside the transformed
image. If given a number, the value is used for all bands respectively.
If input is PIL Image, the options is only available for ``Pillow>=5.0.0``.
fillcolor (sequence or number, optional): deprecated argument and will be removed since v0.10.0.
Please use `arg`:fill: instead.
resample (int, optional): deprecated argument and will be removed since v0.10.0.
Please use `arg`:interpolation: instead.
.. _filters: https://pillow.readthedocs.io/en/latest/handbook/concepts.html#filters
"""
def __init__(
self, degrees, translate=None, scale=None, shear=None, interpolation=InterpolationMode.NEAREST, fill=0,
fillcolor=None, resample=None
):
super().__init__()
if resample is not None:
warnings.warn(
"Argument resample is deprecated and will be removed since v0.10.0. Please, use interpolation instead"
)
interpolation = _interpolation_modes_from_int(resample)
# Backward compatibility with integer value
if isinstance(interpolation, int):
warnings.warn(
"Argument interpolation should be of type InterpolationMode instead of int. "
"Please, use InterpolationMode enum."
)
interpolation = _interpolation_modes_from_int(interpolation)
if fillcolor is not None:
warnings.warn(
"Argument fillcolor is deprecated and will be removed since v0.10.0. Please, use fill instead"
)
fill = fillcolor
self.degrees = _setup_angle(degrees, name="degrees", req_sizes=(2, ))
if translate is not None:
_check_sequence_input(translate, "translate", req_sizes=(2, ))
for t in translate:
if not (0.0 <= t <= 1.0):
raise ValueError("translation values should be between 0 and 1")
self.translate = translate
if scale is not None:
_check_sequence_input(scale, "scale", req_sizes=(2, ))
for s in scale:
if s <= 0:
raise ValueError("scale values should be positive")
self.scale = scale
if shear is not None:
self.shear = _setup_angle(shear, name="shear", req_sizes=(2, 4))
else:
self.shear = shear
self.resample = self.interpolation = interpolation
self.fillcolor = self.fill = fill
@staticmethod
def get_params(
degrees: List[float],
translate: Optional[List[float]],
scale_ranges: Optional[List[float]],
shears: Optional[List[float]],
img_size: List[int]
) -> Tuple[float, Tuple[int, int], float, Tuple[float, float]]:
"""Get parameters for affine transformation
Returns:
params to be passed to the affine transformation
"""
angle = float(torch.empty(1).uniform_(float(degrees[0]), float(degrees[1])).item())
if translate is not None:
max_dx = float(translate[0] * img_size[0])
max_dy = float(translate[1] * img_size[1])
tx = int(round(torch.empty(1).uniform_(-max_dx, max_dx).item()))
ty = int(round(torch.empty(1).uniform_(-max_dy, max_dy).item()))
translations = (tx, ty)
else:
translations = (0, 0)
if scale_ranges is not None:
scale = float(torch.empty(1).uniform_(scale_ranges[0], scale_ranges[1]).item())
else:
scale = 1.0
shear_x = shear_y = 0.0
if shears is not None:
shear_x = float(torch.empty(1).uniform_(shears[0], shears[1]).item())
if len(shears) == 4:
shear_y = float(torch.empty(1).uniform_(shears[2], shears[3]).item())
shear = (shear_x, shear_y)
return angle, translations, scale, shear
def forward(self, img):
"""
img (PIL Image or Tensor): Image to be transformed.
Returns:
PIL Image or Tensor: Affine transformed image.
"""
fill = self.fill
if isinstance(img, Tensor):
if isinstance(fill, (int, float)):
fill = [float(fill)] * F._get_image_num_channels(img)
else:
fill = [float(f) for f in fill]
img_size = F._get_image_size(img)
ret = self.get_params(self.degrees, self.translate, self.scale, self.shear, img_size)
return F.affine(img, *ret, interpolation=self.interpolation, fill=fill)
def __repr__(self):
s = '{name}(degrees={degrees}'
if self.translate is not None:
s += ', translate={translate}'
if self.scale is not None:
s += ', scale={scale}'
if self.shear is not None:
s += ', shear={shear}'
if self.interpolation != InterpolationMode.NEAREST:
s += ', interpolation={interpolation}'
if self.fill != 0:
s += ', fill={fill}'
s += ')'
d = dict(self.__dict__)
d['interpolation'] = self.interpolation.value
return s.format(name=self.__class__.__name__, **d)
class Grayscale(torch.nn.Module):
"""Convert image to grayscale.
If the image is torch Tensor, it is expected
to have [..., 3, H, W] shape, where ... means an arbitrary number of leading dimensions
Args:
num_output_channels (int): (1 or 3) number of channels desired for output image
Returns:
PIL Image: Grayscale version of the input.
- If ``num_output_channels == 1`` : returned image is single channel
- If ``num_output_channels == 3`` : returned image is 3 channel with r == g == b
"""
def __init__(self, num_output_channels=1):
super().__init__()
self.num_output_channels = num_output_channels
def forward(self, img):
"""
Args:
img (PIL Image or Tensor): Image to be converted to grayscale.
Returns:
PIL Image or Tensor: Grayscaled image.
"""
return F.rgb_to_grayscale(img, num_output_channels=self.num_output_channels)
def __repr__(self):
return self.__class__.__name__ + '(num_output_channels={0})'.format(self.num_output_channels)
class RandomGrayscale(torch.nn.Module):
"""Randomly convert image to grayscale with a probability of p (default 0.1).
If the image is torch Tensor, it is expected
to have [..., 3, H, W] shape, where ... means an arbitrary number of leading dimensions
Args:
p (float): probability that image should be converted to grayscale.
Returns:
PIL Image or Tensor: Grayscale version of the input image with probability p and unchanged
with probability (1-p).
- If input image is 1 channel: grayscale version is 1 channel
- If input image is 3 channel: grayscale version is 3 channel with r == g == b
"""
def __init__(self, p=0.1):
super().__init__()
self.p = p
def forward(self, img):
"""
Args:
img (PIL Image or Tensor): Image to be converted to grayscale.
Returns:
PIL Image or Tensor: Randomly grayscaled image.
"""
num_output_channels = F._get_image_num_channels(img)
if torch.rand(1) < self.p:
return F.rgb_to_grayscale(img, num_output_channels=num_output_channels)
return img
def __repr__(self):
return self.__class__.__name__ + '(p={0})'.format(self.p)
class RandomErasing(torch.nn.Module):
""" Randomly selects a rectangle region in an torch Tensor image and erases its pixels.
This transform does not support PIL Image.
'Random Erasing Data Augmentation' by Zhong et al. See https://arxiv.org/abs/1708.04896
Args:
p: probability that the random erasing operation will be performed.
scale: range of proportion of erased area against input image.
ratio: range of aspect ratio of erased area.
value: erasing value. Default is 0. If a single int, it is used to
erase all pixels. If a tuple of length 3, it is used to erase
R, G, B channels respectively.
If a str of 'random', erasing each pixel with random values.
inplace: boolean to make this transform inplace. Default set to False.
Returns:
Erased Image.
Example:
>>> transform = transforms.Compose([
>>> transforms.RandomHorizontalFlip(),
>>> transforms.ToTensor(),
>>> transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225)),
>>> transforms.RandomErasing(),
>>> ])
"""
def __init__(self, p=0.5, scale=(0.02, 0.33), ratio=(0.3, 3.3), value=0, inplace=False):
super().__init__()
if not isinstance(value, (numbers.Number, str, tuple, list)):
raise TypeError("Argument value should be either a number or str or a sequence")
if isinstance(value, str) and value != "random":
raise ValueError("If value is str, it should be 'random'")
if not isinstance(scale, (tuple, list)):
raise TypeError("Scale should be a sequence")
if not isinstance(ratio, (tuple, list)):
raise TypeError("Ratio should be a sequence")
if (scale[0] > scale[1]) or (ratio[0] > ratio[1]):
warnings.warn("Scale and ratio should be of kind (min, max)")
if scale[0] < 0 or scale[1] > 1:
raise ValueError("Scale should be between 0 and 1")
if p < 0 or p > 1:
raise ValueError("Random erasing probability should be between 0 and 1")
self.p = p
self.scale = scale
self.ratio = ratio
self.value = value
self.inplace = inplace
@staticmethod
def get_params(
img: Tensor, scale: Tuple[float, float], ratio: Tuple[float, float], value: Optional[List[float]] = None
) -> Tuple[int, int, int, int, Tensor]:
"""Get parameters for ``erase`` for a random erasing.
Args:
img (Tensor): Tensor image to be erased.
scale (sequence): range of proportion of erased area against input image.
ratio (sequence): range of aspect ratio of erased area.
value (list, optional): erasing value. If None, it is interpreted as "random"
(erasing each pixel with random values). If ``len(value)`` is 1, it is interpreted as a number,
i.e. ``value[0]``.
Returns:
tuple: params (i, j, h, w, v) to be passed to ``erase`` for random erasing.
"""
img_c, img_h, img_w = img.shape[-3], img.shape[-2], img.shape[-1]
area = img_h * img_w
for _ in range(10):
erase_area = area * torch.empty(1).uniform_(scale[0], scale[1]).item()
aspect_ratio = torch.empty(1).uniform_(ratio[0], ratio[1]).item()
h = int(round(math.sqrt(erase_area * aspect_ratio)))
w = int(round(math.sqrt(erase_area / aspect_ratio)))
if not (h < img_h and w < img_w):
continue
if value is None:
v = torch.empty([img_c, h, w], dtype=torch.float32).normal_()
else:
v = torch.tensor(value)[:, None, None]
i = torch.randint(0, img_h - h + 1, size=(1, )).item()
j = torch.randint(0, img_w - w + 1, size=(1, )).item()
return i, j, h, w, v
# Return original image
return 0, 0, img_h, img_w, img
def forward(self, img):
"""
Args:
img (Tensor): Tensor image to be erased.
Returns:
img (Tensor): Erased Tensor image.
"""
if torch.rand(1) < self.p:
# cast self.value to script acceptable type
if isinstance(self.value, (int, float)):
value = [self.value, ]
elif isinstance(self.value, str):
value = None
elif isinstance(self.value, tuple):
value = list(self.value)
else:
value = self.value
if value is not None and not (len(value) in (1, img.shape[-3])):
raise ValueError(
"If value is a sequence, it should have either a single value or "
"{} (number of input channels)".format(img.shape[-3])
)
x, y, h, w, v = self.get_params(img, scale=self.scale, ratio=self.ratio, value=value)
return F.erase(img, x, y, h, w, v, self.inplace)
return img
class GaussianBlur(torch.nn.Module):
"""Blurs image with randomly chosen Gaussian blur.
If the image is torch Tensor, it is expected
to have [..., C, H, W] shape, where ... means an arbitrary number of leading dimensions.
Args:
kernel_size (int or sequence): Size of the Gaussian kernel.
sigma (float or tuple of float (min, max)): Standard deviation to be used for
creating kernel to perform blurring. If float, sigma is fixed. If it is tuple
of float (min, max), sigma is chosen uniformly at random to lie in the
given range.
Returns:
PIL Image or Tensor: Gaussian blurred version of the input image.
"""
def __init__(self, kernel_size, sigma=(0.1, 2.0)):
super().__init__()
self.kernel_size = _setup_size(kernel_size, "Kernel size should be a tuple/list of two integers")
for ks in self.kernel_size:
if ks <= 0 or ks % 2 == 0:
raise ValueError("Kernel size value should be an odd and positive number.")
if isinstance(sigma, numbers.Number):
if sigma <= 0:
raise ValueError("If sigma is a single number, it must be positive.")
sigma = (sigma, sigma)
elif isinstance(sigma, Sequence) and len(sigma) == 2:
if not 0. < sigma[0] <= sigma[1]:
raise ValueError("sigma values should be positive and of the form (min, max).")
else:
raise ValueError("sigma should be a single number or a list/tuple with length 2.")
self.sigma = sigma
@staticmethod
def get_params(sigma_min: float, sigma_max: float) -> float:
"""Choose sigma for random gaussian blurring.
Args:
sigma_min (float): Minimum standard deviation that can be chosen for blurring kernel.
sigma_max (float): Maximum standard deviation that can be chosen for blurring kernel.
Returns:
float: Standard deviation to be passed to calculate kernel for gaussian blurring.
"""
return torch.empty(1).uniform_(sigma_min, sigma_max).item()
def forward(self, img: Tensor) -> Tensor:
"""
Args:
img (PIL Image or Tensor): image to be blurred.
Returns:
PIL Image or Tensor: Gaussian blurred image
"""
sigma = self.get_params(self.sigma[0], self.sigma[1])
return F.gaussian_blur(img, self.kernel_size, [sigma, sigma])
def __repr__(self):
s = '(kernel_size={}, '.format(self.kernel_size)
s += 'sigma={})'.format(self.sigma)
return self.__class__.__name__ + s
def _setup_size(size, error_msg):
if isinstance(size, numbers.Number):
return int(size), int(size)
if isinstance(size, Sequence) and len(size) == 1:
return size[0], size[0]
if len(size) != 2:
raise ValueError(error_msg)
return size
def _check_sequence_input(x, name, req_sizes):
msg = req_sizes[0] if len(req_sizes) < 2 else " or ".join([str(s) for s in req_sizes])
if not isinstance(x, Sequence):
raise TypeError("{} should be a sequence of length {}.".format(name, msg))
if len(x) not in req_sizes:
raise ValueError("{} should be sequence of length {}.".format(name, msg))
def _setup_angle(x, name, req_sizes=(2, )):
if isinstance(x, numbers.Number):
if x < 0:
raise ValueError("If {} is a single number, it must be positive.".format(name))
x = [-x, x]
else:
_check_sequence_input(x, name, req_sizes)
return [float(d) for d in x]
class RandomInvert(torch.nn.Module):
"""Inverts the colors of the given image randomly with a given probability.
If img is a Tensor, it is expected to be in [..., 1 or 3, H, W] format,
where ... means it can have an arbitrary number of leading dimensions.
If img is PIL Image, it is expected to be in mode "L" or "RGB".
Args:
p (float): probability of the image being color inverted. Default value is 0.5
"""
def __init__(self, p=0.5):
super().__init__()
self.p = p
def forward(self, img):
"""
Args:
img (PIL Image or Tensor): Image to be inverted.
Returns:
PIL Image or Tensor: Randomly color inverted image.
"""
if torch.rand(1).item() < self.p:
return F.invert(img)
return img
def __repr__(self):
return self.__class__.__name__ + '(p={})'.format(self.p)
class RandomPosterize(torch.nn.Module):
"""Posterize the image randomly with a given probability by reducing the
number of bits for each color channel. If the image is torch Tensor, it should be of type torch.uint8,
and it is expected to have [..., 1 or 3, H, W] shape, where ... means an arbitrary number of leading dimensions.
If img is PIL Image, it is expected to be in mode "L" or "RGB".
Args:
bits (int): number of bits to keep for each channel (0-8)
p (float): probability of the image being color inverted. Default value is 0.5
"""
def __init__(self, bits, p=0.5):
super().__init__()
self.bits = bits
self.p = p
def forward(self, img):
"""
Args:
img (PIL Image or Tensor): Image to be posterized.
Returns:
PIL Image or Tensor: Randomly posterized image.
"""
if torch.rand(1).item() < self.p:
return F.posterize(img, self.bits)
return img
def __repr__(self):
return self.__class__.__name__ + '(bits={},p={})'.format(self.bits, self.p)
class RandomSolarize(torch.nn.Module):
"""Solarize the image randomly with a given probability by inverting all pixel
values above a threshold. If img is a Tensor, it is expected to be in [..., 1 or 3, H, W] format,
where ... means it can have an arbitrary number of leading dimensions.
If img is PIL Image, it is expected to be in mode "L" or "RGB".
Args:
threshold (float): all pixels equal or above this value are inverted.
p (float): probability of the image being color inverted. Default value is 0.5
"""
def __init__(self, threshold, p=0.5):
super().__init__()
self.threshold = threshold
self.p = p
def forward(self, img):
"""
Args:
img (PIL Image or Tensor): Image to be solarized.
Returns:
PIL Image or Tensor: Randomly solarized image.
"""
if torch.rand(1).item() < self.p:
return F.solarize(img, self.threshold)
return img
def __repr__(self):
return self.__class__.__name__ + '(threshold={},p={})'.format(self.threshold, self.p)
class RandomAdjustSharpness(torch.nn.Module):
"""Adjust the sharpness of the image randomly with a given probability. If the image is torch Tensor,
it is expected to have [..., 1 or 3, H, W] shape, where ... means an arbitrary number of leading dimensions.
Args:
sharpness_factor (float): How much to adjust the sharpness. Can be
any non negative number. 0 gives a blurred image, 1 gives the
original image while 2 increases the sharpness by a factor of 2.
p (float): probability of the image being color inverted. Default value is 0.5
"""
def __init__(self, sharpness_factor, p=0.5):
super().__init__()
self.sharpness_factor = sharpness_factor
self.p = p
def forward(self, img):
"""
Args:
img (PIL Image or Tensor): Image to be sharpened.
Returns:
PIL Image or Tensor: Randomly sharpened image.
"""
if torch.rand(1).item() < self.p:
return F.adjust_sharpness(img, self.sharpness_factor)
return img
def __repr__(self):
return self.__class__.__name__ + '(sharpness_factor={},p={})'.format(self.sharpness_factor, self.p)
class RandomAutocontrast(torch.nn.Module):
"""Autocontrast the pixels of the given image randomly with a given probability.
If the image is torch Tensor, it is expected
to have [..., 1 or 3, H, W] shape, where ... means an arbitrary number of leading dimensions.
If img is PIL Image, it is expected to be in mode "L" or "RGB".
Args:
p (float): probability of the image being autocontrasted. Default value is 0.5
"""
def __init__(self, p=0.5):
super().__init__()
self.p = p
def forward(self, img):
"""
Args:
img (PIL Image or Tensor): Image to be autocontrasted.
Returns:
PIL Image or Tensor: Randomly autocontrasted image.
"""
if torch.rand(1).item() < self.p:
return F.autocontrast(img)
return img
def __repr__(self):
return self.__class__.__name__ + '(p={})'.format(self.p)
class RandomEqualize(torch.nn.Module):
"""Equalize the histogram of the given image randomly with a given probability.
If the image is torch Tensor, it is expected
to have [..., 1 or 3, H, W] shape, where ... means an arbitrary number of leading dimensions.
If img is PIL Image, it is expected to be in mode "P", "L" or "RGB".
Args:
p (float): probability of the image being equalized. Default value is 0.5
"""
def __init__(self, p=0.5):
super().__init__()
self.p = p
def forward(self, img):
"""
Args:
img (PIL Image or Tensor): Image to be equalized.
Returns:
PIL Image or Tensor: Randomly equalized image.
"""
if torch.rand(1).item() < self.p:
return F.equalize(img)
return img
def __repr__(self):
return self.__class__.__name__ + '(p={})'.format(self.p)
|
py | 1a308f7e9bd682eb831b7022a8afafc9c2739711 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
**Project Name:** MakeHuman
**Product Home Page:** http://www.makehumancommunity.org/
**Github Code Home Page:** https://github.com/makehumancommunity/
**Authors:** Thomas Larsson, Jonas Hauquier
**Copyright(c):** MakeHuman Team 2001-2019
**Licensing:** AGPL3
This file is part of MakeHuman (www.makehumancommunity.org).
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Abstract
--------
TODO
"""
from export import Exporter, ExportConfig
class DaeConfig(ExportConfig):
def __init__(self):
ExportConfig.__init__(self)
self.useRelPaths = True
self.useNormals = True
self.yUpFaceZ = True
self.yUpFaceX = False
self.zUpFaceNegY = False
self.zUpFaceX = False
self.localY = True
self.localX = False
self.localG = False
self.facePoseUnits = False
self.hiddenGeom = False
# TODO preferably these are used (perhaps as enum) instead of the bools above
# TODO move these to export Config super class
@property
def meshOrientation(self):
if self.yUpFaceZ:
return 'yUpFaceZ'
if self.yUpFaceX:
return 'yUpFaceX'
if self.zUpFaceNegY:
return 'zUpFaceNegY'
if self.zUpFaceX:
return 'zUpFaceX'
return 'yUpFaceZ'
@property
def localBoneAxis(self):
if self.localY:
return 'y'
if self.localX:
return 'x'
if self.localG:
return 'g'
return 'y'
@property
def upAxis(self):
if self.meshOrientation.startswith('yUp'):
return 1
elif self.meshOrientation.startswith('zUp'):
return 2
'''
@property
def offsetVect(self):
result = [0.0, 0.0, 0.0]
result[self.upAxis] = self.offset
return result
'''
class ExporterCollada(Exporter):
def __init__(self):
Exporter.__init__(self)
self.name = "Collada (dae)"
self.filter = "Collada (*.dae)"
self.fileExtension = "dae"
self.orderPriority = 95.0
def build(self, options, taskview):
import gui
Exporter.build(self, options, taskview)
self.hiddenGeom = options.addWidget(gui.CheckBox("Helper geometry", False))
self.facePoseUnits = options.addWidget(gui.CheckBox("Facial pose-units", False))
orients = []
box = options.addWidget(gui.GroupBox("Orientation"))
self.yUpFaceZ = box.addWidget(gui.RadioButton(orients, "Y up, face Z", True))
self.yUpFaceX = box.addWidget(gui.RadioButton(orients, "Y up, face X", False))
self.zUpFaceNegY = box.addWidget(gui.RadioButton(orients, "Z up, face -Y", False))
self.zUpFaceX = box.addWidget(gui.RadioButton(orients, "Z up, face X", False))
csyses = []
box = options.addWidget(gui.GroupBox("Bone orientation"))
self.localY = box.addWidget(gui.RadioButton(csyses, "Along local Y", True))
self.localX = box.addWidget(gui.RadioButton(csyses, "Along local X", False))
self.localG = box.addWidget(gui.RadioButton(csyses, "Local = Global", False))
def export(self, human, filename):
from .mh2collada import exportCollada
cfg = self.getConfig()
cfg.setHuman(human)
exportCollada(filename("dae"), cfg)
def getConfig(self):
cfg = DaeConfig()
cfg.feetOnGround = self.feetOnGround.selected
cfg.scale,cfg.unit = self.taskview.getScale()
cfg.yUpFaceZ = self.yUpFaceZ.selected
cfg.yUpFaceX = self.yUpFaceX.selected
cfg.zUpFaceNegY = self.zUpFaceNegY.selected
cfg.zUpFaceX = self.zUpFaceX.selected
cfg.localY = self.localY.selected
cfg.localX = self.localX.selected
cfg.localG = self.localG.selected
cfg.facePoseUnits = self.facePoseUnits.selected
cfg.hiddenGeom = self.hiddenGeom.selected
return cfg
def load(app):
app.addExporter(ExporterCollada())
def unload(app):
pass
|
py | 1a308fc844743316e91889144701d06fd1f68333 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2018/10/04 14:23
# @Author : Iydon
# @File : course3.5.py
import numpy as np
from Poly import *
import matplotlib.pyplot as plt
def natural_cubic_spline(xs:list, fxs:list, display:bool=False):
"""
Cubic spline interpolation.
"""
n = len(xs)
hs = [xs[i+1]-xs[i] for i in range(n-1)]
A = np.diag([0]+hs[1:],1) + np.diag(hs[:-1]+[0],-1)
A += np.diag([1]+[2*(hs[i+1]+hs[i]) for i in range(n-2)]+[1])
bs = [0]+[3/hs[i+1]*(fxs[i+2]-fxs[i+1])-3/hs[i]*(fxs[i+1]-fxs[i]) for i in range(n-2)]+[0]
# a, b, c, d: end with 'x'.
cx = [i[0] for i in (np.linalg.inv(A) * np.matrix(bs).transpose()).tolist()]
bx = [1/hs[i]*(fxs[i+1]-fxs[i])-hs[i]/3*(2*cx[i]+cx[i+1]) for i in range(n-1)]
dx = [1/3/hs[i]*(cx[i+1]-cx[i]) for i in range(n-1)]
# S_i(x)
Ss = [fxs[i]+bx[i]*Poly([1,-xs[i]])+cx[i]*Poly([1,-xs[i]])**2+dx[i]*Poly([1,-xs[i]])**3 for i in range(n-1)]
if display: print(fxs, bx, cx, dx, sep="\n\n\n")
return Ss
def clamped_cubic_spline(xs:list, fxs:list, boundray:list=[0,0]):
"""
Cubic spline interpolation.
"""
n = len(xs)
hs = [xs[i+1]-xs[i] for i in range(n-1)]
A = np.diag(hs,1) + np.diag(hs,-1)
A += np.diag([2*hs[0]]+[2*(hs[i+1]+hs[i]) for i in range(n-2)]+[2*hs[-1]])
head = [3/hs[0]*(fxs[1]-fxs[0]) - 3*boundray[0]]
tail = [3*boundray[-1] - 3/hs[-1]*(fxs[-1]-fxs[-2])]
bs = head+[3/hs[i+1]*(fxs[i+2]-fxs[i+1])-3/hs[i]*(fxs[i+1]-fxs[i]) for i in range(n-2)]+tail
# a, b, c, d: end with 'x'.
cx = [i[0] for i in (np.linalg.inv(A) * np.matrix(bs).transpose()).tolist()]
bx = [1/hs[i]*(fxs[i+1]-fxs[i])-hs[i]/3*(2*cx[i]+cx[i+1]) for i in range(n-1)]
dx = [1/3/hs[i]*(cx[i+1]-cx[i]) for i in range(n-1)]
# S_i(x)
Ss = [fxs[i]+bx[i]*Poly([1,-xs[i]])+cx[i]*Poly([1,-xs[i]])**2+dx[i]*Poly([1,-xs[i]])**3 for i in range(n-1)]
return Ss
def cubic_spline_lambdify(S:str, xs:list):
"""
Lambdify the cubic spline function.
"""
f = ["%s[%d].lambdify()(x)*(%s<=x<%s)"%(S, i, xs[i], xs[i+1]) for i in range(len(xs)-1)]
return eval("lambda x: %s"%"+".join(f))
xs = [0.9,1.3,1.9,2.1,2.6,3.0,3.9,4.4,4.7,5.0,6.0,7.0,8.0,9.2,10.5,11.3,11.6,12.0,12.6,13.0,13.3]
fxs = [1.3,1.5,1.85,2.1,2.6,2.7,2.4,2.15,2.05,2.1,2.25,2.3,2.25,1.95,1.4,0.9,0.7,0.6,0.5,0.4,0.25]
S = natural_cubic_spline(xs, fxs)
f = cubic_spline_lambdify("S", xs)
plt.plot(xs, fxs, marker="*", color="orange")
x = np.linspace(0.9, 13.29, 100)
y = [f(x) for x in x]
plt.plot(x, y, color="blue")
plt.axis("equal")
plt.grid()
plt.show()
|
py | 1a308fd9e1ebc079500cbd82442e5e805649c7a7 | # Copyright Contributors to the Amundsen project.
# SPDX-License-Identifier: Apache-2.0
from threading import Lock
from flask import current_app
from werkzeug.utils import import_string
from search_service import config
from search_service.proxy.base import BaseProxy
_proxy_client = None
_proxy_client_lock = Lock()
DEFAULT_PAGE_SIZE = 10
def get_proxy_client() -> BaseProxy:
"""
Provides singleton proxy client based on the config
:return: Proxy instance of any subclass of BaseProxy
"""
global _proxy_client
if _proxy_client:
return _proxy_client
with _proxy_client_lock:
if _proxy_client:
return _proxy_client
else:
obj = current_app.config[config.PROXY_CLIENT_KEY]
# Gather all the configuration to create a Proxy Client
host = current_app.config[config.PROXY_ENDPOINT]
user = current_app.config[config.PROXY_USER]
password = current_app.config[config.PROXY_PASSWORD]
client = import_string(current_app.config[config.PROXY_CLIENT])
# number of results per search page
page_size = current_app.config.get(config.SEARCH_PAGE_SIZE_KEY, DEFAULT_PAGE_SIZE)
_proxy_client = client(host=host, user=user, password=password, client=obj, page_size=page_size)
return _proxy_client
|
py | 1a3090b1f1b7c8ec957944ac445ccef2d60698f9 | import sys
import os.path as op
rpws_folder = op.dirname(op.dirname(__file__))
sys.path.append(rpws_folder)
print('sys.path + {}'.format(rpws_folder))
from rpws import RevitServer
import testconfig as config
rs = RevitServer(config.test_server_name, config.test_server_version)
for parent, folders, files, models in rs.walk(config.test_folder):
print(parent)
for fd in folders:
print('\t@d {}'.format(fd.path))
for f in files:
print('\t@f {}'.format(f.path))
for m in models:
print('\t@m {}'.format(m.path))
|
py | 1a30918f59610302054f0f57c278941b01e37f05 | import random
from collections import namedtuple
from abc import abstractmethod, ABC
from typing import TypeVar
from msdm.core.problemclasses.pomdp.pomdp import \
State, Action, Observation, PartiallyObservableMDP
from msdm.core.problemclasses.pomdp.tabularpomdp import TabularPOMDP, Belief
from msdm.core.distributions import Distribution, DictDistribution
from msdm.core.algorithmclasses import Result
AgentState = TypeVar('AgentState')
Step = namedtuple("Step", "state agentstate action nextstate reward observation nextagentstate")
class POMDPPolicy(ABC):
@abstractmethod
def initial_agentstate(self) -> AgentState:
pass
@abstractmethod
def action_dist(self, ag : AgentState) -> Distribution[Action]:
pass
@abstractmethod
def next_agentstate(self, ag : AgentState, a : Action, o : Observation) -> AgentState:
pass
def run_on(self,
pomdp: PartiallyObservableMDP,
initial_state=None,
initial_agentstate=None,
max_steps=int(2 ** 30),
rng=random):
if initial_state is None:
initial_state = pomdp.initial_state_dist().sample()
if initial_agentstate is None:
initial_agentstate = self.initial_agentstate()
traj = []
s = initial_state
ag = initial_agentstate
for t in range(max_steps):
if pomdp.is_terminal(s):
break
a = self.action_dist(ag).sample(rng=rng)
ns = pomdp.next_state_dist(s, a).sample(rng=rng)
r = pomdp.reward(s, a, ns)
o = pomdp.observation_dist(a, ns).sample(rng=rng)
nag = self.next_agentstate(ag, a, o)
traj.append(Step(s, ag, a, ns, r, o, nag))
s = ns
ag = nag
traj.append(Step(s, ag, None, None, None, None, None))
if traj:
states, agentstates, actions, _, rewards, _, _ = zip(*traj)
else:
states = ()
actions = ()
rewards = ()
agentstates = ()
return traj
class ValueBasedTabularPOMDPPolicy(POMDPPolicy):
"""
POMDP policy that selects actions based on a
representation of action values at a belief state.
"""
def __init__(self, pomdp : TabularPOMDP):
self.pomdp = pomdp
@abstractmethod
def action_value(self, b : Belief, a : Action):
pass
def initial_agentstate(self):
return Belief(tuple(self.pomdp.state_list), tuple(self.pomdp.initial_state_vec))
def action_dist(self, ag : Belief):
av = {a: self.action_value(ag, a) for a in self.pomdp.action_list}
maxv = max(av.values())
return DictDistribution.uniform([a for a, v in av.items() if v == maxv])
def next_agentstate(self, ag, a, o):
s_dist = DictDistribution(zip(*ag))
ns_dist = self.pomdp.state_estimator(s_dist, a, o)
ss = tuple(self.pomdp.state_list)
return Belief(ss, tuple([ns_dist.prob(ns) for ns in ss]))
|
gyp | 1a30919ff2ff549be6e1bebd0c68b2a0f2806d5f | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'target_defaults': {
'variables': {
'chromium_code': 1,
},
'include_dirs': [
'<(DEPTH)',
],
},
'targets': [
{
'target_name': 'cloud_print_service_lib',
'type': 'static_library',
'dependencies': [
'<(DEPTH)/base/base.gyp:base',
'<(DEPTH)/build/temp_gyp/googleurl.gyp:googleurl',
'<(DEPTH)/net/net.gyp:net',
'<(DEPTH)/printing/printing.gyp:printing',
],
'sources': [
'service_state.cc',
'service_state.h',
'service_switches.cc',
'service_switches.h',
'win/chrome_launcher.cc',
'win/chrome_launcher.h',
'win/local_security_policy.cc',
'win/local_security_policy.h',
],
'conditions': [
['OS=="win"', {
'dependencies': [
'<(DEPTH)/chrome/chrome.gyp:launcher_support',
],
}],
],
},
{
'target_name': 'cloud_print_service',
'type': 'executable',
'include_dirs': [
# To allow including "version.h"
'<(SHARED_INTERMEDIATE_DIR)',
],
'sources': [
'win/cloud_print_service.cc',
'win/cloud_print_service.h',
'win/cloud_print_service.rc',
'win/resource.h',
],
'dependencies': [
'cloud_print_service_lib',
],
'conditions': [
['OS=="win"', {
'dependencies': [
'<(DEPTH)/chrome/chrome.gyp:chrome_version_header',
],
}],
],
'msvs_settings': {
'VCLinkerTool': {
'SubSystem': '1', # Set /SUBSYSTEM:CONSOLE
'UACExecutionLevel': '2', # /level='requireAdministrator'
'AdditionalDependencies': [
'secur32.lib',
],
},
},
},
],
}
|
py | 1a3092a0502564d366bb3176ad46091e181d39bc | #Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Download, computes and stores the checksums."""
from absl import app
import tensorflow_datasets as tfds
from tensorflow_graphics.datasets.modelnet40 import ModelNet40
def main(_):
config = tfds.download.DownloadConfig(register_checksums=True)
modelnet40_builder = ModelNet40(data_dir="~/tensorflow_datasets")
modelnet40_builder.download_and_prepare(download_config=config)
if __name__ == "__main__":
app.run(main)
|
py | 1a3093c150ac67fc7335e315f3c99aeb54e64e4b | # flake8: noqa
from __future__ import unicode_literals
from .abc import (
ABCIE,
ABCIViewIE,
)
from .abcnews import (
AbcNewsIE,
AbcNewsVideoIE,
)
from .abcotvs import (
ABCOTVSIE,
ABCOTVSClipsIE,
)
from .academicearth import AcademicEarthCourseIE
from .acast import (
ACastIE,
ACastChannelIE,
)
from .adn import ADNIE
from .adobeconnect import AdobeConnectIE
from .adobetv import (
AdobeTVEmbedIE,
AdobeTVIE,
AdobeTVShowIE,
AdobeTVChannelIE,
AdobeTVVideoIE,
)
from .adultswim import AdultSwimIE
from .aenetworks import (
AENetworksIE,
AENetworksCollectionIE,
AENetworksShowIE,
HistoryTopicIE,
HistoryPlayerIE,
BiographyIE,
)
from .afreecatv import AfreecaTVIE
from .airmozilla import AirMozillaIE
from .aljazeera import AlJazeeraIE
from .alphaporno import AlphaPornoIE
from .amara import AmaraIE
from .alura import (
AluraIE,
AluraCourseIE
)
from .amcnetworks import AMCNetworksIE
from .animelab import (
AnimeLabIE,
AnimeLabShowsIE,
)
from .americastestkitchen import (
AmericasTestKitchenIE,
AmericasTestKitchenSeasonIE,
)
from .animeondemand import AnimeOnDemandIE
from .anvato import AnvatoIE
from .aol import AolIE
from .allocine import AllocineIE
from .aliexpress import AliExpressLiveIE
from .apa import APAIE
from .aparat import AparatIE
from .appleconnect import AppleConnectIE
from .appletrailers import (
AppleTrailersIE,
AppleTrailersSectionIE,
)
from .applepodcasts import ApplePodcastsIE
from .archiveorg import (
ArchiveOrgIE,
YoutubeWebArchiveIE,
)
from .arcpublishing import ArcPublishingIE
from .arkena import ArkenaIE
from .ard import (
ARDBetaMediathekIE,
ARDIE,
ARDMediathekIE,
)
from .arte import (
ArteTVIE,
ArteTVEmbedIE,
ArteTVPlaylistIE,
)
from .arnes import ArnesIE
from .asiancrush import (
AsianCrushIE,
AsianCrushPlaylistIE,
)
from .atresplayer import AtresPlayerIE
from .atttechchannel import ATTTechChannelIE
from .atvat import ATVAtIE
from .audimedia import AudiMediaIE
from .audioboom import AudioBoomIE
from .audiomack import AudiomackIE, AudiomackAlbumIE
from .audius import (
AudiusIE,
AudiusTrackIE,
AudiusPlaylistIE,
AudiusProfileIE,
)
from .awaan import (
AWAANIE,
AWAANVideoIE,
AWAANLiveIE,
AWAANSeasonIE,
)
from .azmedien import AZMedienIE
from .baidu import BaiduVideoIE
from .bandaichannel import BandaiChannelIE
from .bandcamp import (
BandcampIE,
BandcampAlbumIE,
BandcampWeeklyIE,
BandcampMusicIE,
)
from .bannedvideo import BannedVideoIE
from .bbc import (
BBCCoUkIE,
BBCCoUkArticleIE,
BBCCoUkIPlayerEpisodesIE,
BBCCoUkIPlayerGroupIE,
BBCCoUkPlaylistIE,
BBCIE,
)
from .beeg import BeegIE
from .behindkink import BehindKinkIE
from .bellmedia import BellMediaIE
from .beatport import BeatportIE
from .bet import BetIE
from .bfi import BFIPlayerIE
from .bfmtv import (
BFMTVIE,
BFMTVLiveIE,
BFMTVArticleIE,
)
from .bibeltv import BibelTVIE
from .bigflix import BigflixIE
from .bild import BildIE
from .bilibili import (
BiliBiliIE,
BiliBiliSearchIE,
BilibiliCategoryIE,
BiliBiliBangumiIE,
BilibiliAudioIE,
BilibiliAudioAlbumIE,
BiliBiliPlayerIE,
BilibiliChannelIE,
)
from .biobiochiletv import BioBioChileTVIE
from .bitchute import (
BitChuteIE,
BitChuteChannelIE,
)
from .bitwave import (
BitwaveReplayIE,
BitwaveStreamIE,
)
from .biqle import BIQLEIE
from .blackboardcollaborate import BlackboardCollaborateIE
from .bleacherreport import (
BleacherReportIE,
BleacherReportCMSIE,
)
from .bloomberg import BloombergIE
from .bokecc import BokeCCIE
from .bongacams import BongaCamsIE
from .bostonglobe import BostonGlobeIE
from .box import BoxIE
from .bpb import BpbIE
from .br import (
BRIE,
BRMediathekIE,
)
from .bravotv import BravoTVIE
from .breakcom import BreakIE
from .brightcove import (
BrightcoveLegacyIE,
BrightcoveNewIE,
)
from .businessinsider import BusinessInsiderIE
from .buzzfeed import BuzzFeedIE
from .byutv import BYUtvIE
from .c56 import C56IE
from .camdemy import (
CamdemyIE,
CamdemyFolderIE
)
from .cammodels import CamModelsIE
from .camwithher import CamWithHerIE
from .canalplus import CanalplusIE
from .canalc2 import Canalc2IE
from .canvas import (
CanvasIE,
CanvasEenIE,
VrtNUIE,
DagelijkseKostIE,
)
from .carambatv import (
CarambaTVIE,
CarambaTVPageIE,
)
from .cartoonnetwork import CartoonNetworkIE
from .cbc import (
CBCIE,
CBCPlayerIE,
CBCWatchVideoIE,
CBCWatchIE,
CBCOlympicsIE,
)
from .cbs import CBSIE
from .cbslocal import (
CBSLocalIE,
CBSLocalArticleIE,
)
from .cbsinteractive import CBSInteractiveIE
from .cbsnews import (
CBSNewsEmbedIE,
CBSNewsIE,
CBSNewsLiveVideoIE,
)
from .cbssports import (
CBSSportsEmbedIE,
CBSSportsIE,
TwentyFourSevenSportsIE,
)
from .ccc import (
CCCIE,
CCCPlaylistIE,
)
from .ccma import CCMAIE
from .cctv import CCTVIE
from .cda import CDAIE
from .ceskatelevize import (
CeskaTelevizeIE,
CeskaTelevizePoradyIE,
)
from .channel9 import Channel9IE
from .charlierose import CharlieRoseIE
from .chaturbate import ChaturbateIE
from .chilloutzone import ChilloutzoneIE
from .chirbit import (
ChirbitIE,
ChirbitProfileIE,
)
from .cinchcast import CinchcastIE
from .cinemax import CinemaxIE
from .ciscolive import (
CiscoLiveSessionIE,
CiscoLiveSearchIE,
)
from .cjsw import CJSWIE
from .cliphunter import CliphunterIE
from .clippit import ClippitIE
from .cliprs import ClipRsIE
from .clipsyndicate import ClipsyndicateIE
from .closertotruth import CloserToTruthIE
from .cloudflarestream import CloudflareStreamIE
from .cloudy import CloudyIE
from .clubic import ClubicIE
from .clyp import ClypIE
from .cmt import CMTIE
from .cnbc import (
CNBCIE,
CNBCVideoIE,
)
from .cnn import (
CNNIE,
CNNBlogsIE,
CNNArticleIE,
)
from .coub import CoubIE
from .comedycentral import (
ComedyCentralIE,
ComedyCentralTVIE,
)
from .commonmistakes import CommonMistakesIE, UnicodeBOMIE
from .commonprotocols import (
MmsIE,
RtmpIE,
ViewSourceIE,
)
from .condenast import CondeNastIE
from .contv import CONtvIE
from .corus import CorusIE
from .cracked import CrackedIE
from .crackle import CrackleIE
from .crooksandliars import CrooksAndLiarsIE
from .crunchyroll import (
CrunchyrollIE,
CrunchyrollShowPlaylistIE
)
from .cspan import CSpanIE
from .ctsnews import CtsNewsIE
from .ctv import CTVIE
from .ctvnews import CTVNewsIE
from .cultureunplugged import CultureUnpluggedIE
from .curiositystream import (
CuriosityStreamIE,
CuriosityStreamCollectionIE,
)
from .cwtv import CWTVIE
from .dailymail import DailyMailIE
from .dailymotion import (
DailymotionIE,
DailymotionPlaylistIE,
DailymotionUserIE,
)
from .daum import (
DaumIE,
DaumClipIE,
DaumPlaylistIE,
DaumUserIE,
)
from .dbtv import DBTVIE
from .dctp import DctpTvIE
from .deezer import (
DeezerPlaylistIE,
DeezerAlbumIE,
)
from .democracynow import DemocracynowIE
from .dfb import DFBIE
from .dhm import DHMIE
from .digg import DiggIE
from .discoveryplusindia import (
DiscoveryPlusIndiaIE,
DiscoveryPlusIndiaShowIE,
)
from .dotsub import DotsubIE
from .douyin import DouyinIE
from .douyutv import (
DouyuShowIE,
DouyuTVIE,
)
from .dplay import (
DPlayIE,
DiscoveryPlusIE,
HGTVDeIE,
ScienceChannelIE
)
from .dreisat import DreiSatIE
from .drbonanza import DRBonanzaIE
from .drtuber import DrTuberIE
from .drtv import (
DRTVIE,
DRTVLiveIE,
)
from .dtube import DTubeIE
from .dvtv import DVTVIE
from .duboku import (
DubokuIE,
DubokuPlaylistIE
)
from .dumpert import DumpertIE
from .defense import DefenseGouvFrIE
from .discovery import DiscoveryIE
from .discoverygo import (
DiscoveryGoIE,
DiscoveryGoPlaylistIE,
)
from .discoverynetworks import DiscoveryNetworksDeIE
from .discoveryvr import DiscoveryVRIE
from .disney import DisneyIE
from .dispeak import DigitallySpeakingIE
from .doodstream import DoodStreamIE
from .dropbox import DropboxIE
from .dw import (
DWIE,
DWArticleIE,
)
from .eagleplatform import EaglePlatformIE
from .ebaumsworld import EbaumsWorldIE
from .echomsk import EchoMskIE
from .egghead import (
EggheadCourseIE,
EggheadLessonIE,
)
from .ehow import EHowIE
from .eighttracks import EightTracksIE
from .einthusan import EinthusanIE
from .eitb import EitbIE
from .ellentube import (
EllenTubeIE,
EllenTubeVideoIE,
EllenTubePlaylistIE,
)
from .elonet import ElonetIE
from .elpais import ElPaisIE
from .embedly import EmbedlyIE
from .engadget import EngadgetIE
from .epicon import (
EpiconIE,
EpiconSeriesIE,
)
from .eporner import EpornerIE
from .eroprofile import (
EroProfileIE,
EroProfileAlbumIE,
)
from .escapist import EscapistIE
from .espn import (
ESPNIE,
ESPNArticleIE,
FiveThirtyEightIE,
)
from .esri import EsriVideoIE
from .europa import EuropaIE
from .expotv import ExpoTVIE
from .expressen import ExpressenIE
from .extremetube import ExtremeTubeIE
from .eyedotv import EyedoTVIE
from .facebook import (
FacebookIE,
FacebookPluginsVideoIE,
)
from .fancode import (
FancodeVodIE,
FancodeLiveIE
)
from .faz import FazIE
from .fc2 import (
FC2IE,
FC2EmbedIE,
)
from .fczenit import FczenitIE
from .filmmodu import FilmmoduIE
from .filmon import (
FilmOnIE,
FilmOnChannelIE,
)
from .filmweb import FilmwebIE
from .firsttv import FirstTVIE
from .fivemin import FiveMinIE
from .fivetv import FiveTVIE
from .flickr import FlickrIE
from .folketinget import FolketingetIE
from .footyroom import FootyRoomIE
from .formula1 import Formula1IE
from .fourtube import (
FourTubeIE,
PornTubeIE,
PornerBrosIE,
FuxIE,
)
from .fox import FOXIE
from .fox9 import (
FOX9IE,
FOX9NewsIE,
)
from .foxgay import FoxgayIE
from .foxnews import (
FoxNewsIE,
FoxNewsArticleIE,
)
from .foxsports import FoxSportsIE
from .franceculture import FranceCultureIE
from .franceinter import FranceInterIE
from .francetv import (
FranceTVIE,
FranceTVSiteIE,
FranceTVEmbedIE,
FranceTVInfoIE,
FranceTVInfoSportIE,
FranceTVJeunesseIE,
GenerationWhatIE,
CultureboxIE,
)
from .freesound import FreesoundIE
from .freespeech import FreespeechIE
from .freshlive import FreshLiveIE
from .frontendmasters import (
FrontendMastersIE,
FrontendMastersLessonIE,
FrontendMastersCourseIE
)
from .fujitv import FujiTVFODPlus7IE
from .funimation import (
FunimationIE,
FunimationPageIE,
FunimationShowIE,
)
from .funk import FunkIE
from .fusion import FusionIE
from .gab import GabTVIE
from .gaia import GaiaIE
from .gameinformer import GameInformerIE
from .gamespot import GameSpotIE
from .gamestar import GameStarIE
from .gaskrank import GaskrankIE
from .gazeta import GazetaIE
from .gdcvault import GDCVaultIE
from .gedidigital import GediDigitalIE
from .generic import GenericIE
from .gfycat import GfycatIE
from .giantbomb import GiantBombIE
from .giga import GigaIE
from .glide import GlideIE
from .globo import (
GloboIE,
GloboArticleIE,
)
from .go import GoIE
from .godtube import GodTubeIE
from .golem import GolemIE
from .googledrive import GoogleDriveIE
from .googlepodcasts import (
GooglePodcastsIE,
GooglePodcastsFeedIE,
)
from .googlesearch import GoogleSearchIE
from .goshgay import GoshgayIE
from .gputechconf import GPUTechConfIE
from .groupon import GrouponIE
from .hbo import HBOIE
from .hearthisat import HearThisAtIE
from .heise import HeiseIE
from .hellporno import HellPornoIE
from .helsinki import HelsinkiIE
from .hentaistigma import HentaiStigmaIE
from .hgtv import HGTVComShowIE
from .hketv import HKETVIE
from .hidive import HiDiveIE
from .historicfilms import HistoricFilmsIE
from .hitbox import HitboxIE, HitboxLiveIE
from .hitrecord import HitRecordIE
from .hornbunny import HornBunnyIE
from .hotnewhiphop import HotNewHipHopIE
from .hotstar import (
HotStarIE,
HotStarPlaylistIE,
HotStarSeriesIE,
)
from .howcast import HowcastIE
from .howstuffworks import HowStuffWorksIE
from .hrfensehen import HRFernsehenIE
from .hrti import (
HRTiIE,
HRTiPlaylistIE,
)
from .huajiao import HuajiaoIE
from .huffpost import HuffPostIE
from .hungama import (
HungamaIE,
HungamaSongIE,
HungamaAlbumPlaylistIE,
)
from .hypem import HypemIE
from .ign import (
IGNIE,
IGNVideoIE,
IGNArticleIE,
)
from .iheart import (
IHeartRadioIE,
IHeartRadioPodcastIE,
)
from .imdb import (
ImdbIE,
ImdbListIE
)
from .imgur import (
ImgurIE,
ImgurAlbumIE,
ImgurGalleryIE,
)
from .ina import InaIE
from .inc import IncIE
from .indavideo import IndavideoEmbedIE
from .infoq import InfoQIE
from .instagram import (
InstagramIE,
InstagramUserIE,
InstagramTagIE,
)
from .internazionale import InternazionaleIE
from .internetvideoarchive import InternetVideoArchiveIE
from .iprima import IPrimaIE
from .iqiyi import IqiyiIE
from .ir90tv import Ir90TvIE
from .itv import (
ITVIE,
ITVBTCCIE,
)
from .ivi import (
IviIE,
IviCompilationIE
)
from .ivideon import IvideonIE
from .iwara import IwaraIE
from .izlesene import IzleseneIE
from .jamendo import (
JamendoIE,
JamendoAlbumIE,
)
from .jeuxvideo import JeuxVideoIE
from .jove import JoveIE
from .joj import JojIE
from .jwplatform import JWPlatformIE
from .kakao import KakaoIE
from .kaltura import KalturaIE
from .kankan import KankanIE
from .karaoketv import KaraoketvIE
from .karrierevideos import KarriereVideosIE
from .keezmovies import KeezMoviesIE
from .ketnet import KetnetIE
from .khanacademy import (
KhanAcademyIE,
KhanAcademyUnitIE,
)
from .kickstarter import KickStarterIE
from .kinja import KinjaEmbedIE
from .kinopoisk import KinoPoiskIE
from .konserthusetplay import KonserthusetPlayIE
from .krasview import KrasViewIE
from .ku6 import Ku6IE
from .kusi import KUSIIE
from .kuwo import (
KuwoIE,
KuwoAlbumIE,
KuwoChartIE,
KuwoSingerIE,
KuwoCategoryIE,
KuwoMvIE,
)
from .la7 import (
LA7IE,
LA7PodcastEpisodeIE,
LA7PodcastIE,
)
from .laola1tv import (
Laola1TvEmbedIE,
Laola1TvIE,
EHFTVIE,
ITTFIE,
)
from .lbry import (
LBRYIE,
LBRYChannelIE,
)
from .lci import LCIIE
from .lcp import (
LcpPlayIE,
LcpIE,
)
from .lecture2go import Lecture2GoIE
from .lecturio import (
LecturioIE,
LecturioCourseIE,
LecturioDeCourseIE,
)
from .leeco import (
LeIE,
LePlaylistIE,
LetvCloudIE,
)
from .lego import LEGOIE
from .lemonde import LemondeIE
from .lenta import LentaIE
from .libraryofcongress import LibraryOfCongressIE
from .libsyn import LibsynIE
from .lifenews import (
LifeNewsIE,
LifeEmbedIE,
)
from .limelight import (
LimelightMediaIE,
LimelightChannelIE,
LimelightChannelListIE,
)
from .line import (
LineTVIE,
LineLiveIE,
LineLiveChannelIE,
)
from .linkedin import (
LinkedInLearningIE,
LinkedInLearningCourseIE,
)
from .linuxacademy import LinuxAcademyIE
from .litv import LiTVIE
from .livejournal import LiveJournalIE
from .livestream import (
LivestreamIE,
LivestreamOriginalIE,
LivestreamShortenerIE,
)
from .lnkgo import LnkGoIE
from .localnews8 import LocalNews8IE
from .lovehomeporn import LoveHomePornIE
from .lrt import LRTIE
from .lynda import (
LyndaIE,
LyndaCourseIE
)
from .m6 import M6IE
from .magentamusik360 import MagentaMusik360IE
from .mailru import (
MailRuIE,
MailRuMusicIE,
MailRuMusicSearchIE,
)
from .malltv import MallTVIE
from .mangomolo import (
MangomoloVideoIE,
MangomoloLiveIE,
)
from .manoto import (
ManotoTVIE,
ManotoTVShowIE,
ManotoTVLiveIE,
)
from .manyvids import ManyVidsIE
from .maoritv import MaoriTVIE
from .markiza import (
MarkizaIE,
MarkizaPageIE,
)
from .massengeschmacktv import MassengeschmackTVIE
from .matchtv import MatchTVIE
from .mdr import MDRIE
from .medaltv import MedalTVIE
from .mediaset import MediasetIE
from .mediasite import (
MediasiteIE,
MediasiteCatalogIE,
MediasiteNamedCatalogIE,
)
from .medici import MediciIE
from .megaphone import MegaphoneIE
from .meipai import MeipaiIE
from .melonvod import MelonVODIE
from .meta import METAIE
from .metacafe import MetacafeIE
from .metacritic import MetacriticIE
from .mgoon import MgoonIE
from .mgtv import MGTVIE
from .miaopai import MiaoPaiIE
from .microsoftvirtualacademy import (
MicrosoftVirtualAcademyIE,
MicrosoftVirtualAcademyCourseIE,
)
from .mildom import (
MildomIE,
MildomVodIE,
MildomUserVodIE,
)
from .minds import (
MindsIE,
MindsChannelIE,
MindsGroupIE,
)
from .ministrygrid import MinistryGridIE
from .minoto import MinotoIE
from .miomio import MioMioIE
from .mirrativ import (
MirrativIE,
MirrativUserIE,
)
from .mit import TechTVMITIE, OCWMITIE
from .mitele import MiTeleIE
from .mixcloud import (
MixcloudIE,
MixcloudUserIE,
MixcloudPlaylistIE,
)
from .mlb import (
MLBIE,
MLBVideoIE,
)
from .mnet import MnetIE
from .moevideo import MoeVideoIE
from .mofosex import (
MofosexIE,
MofosexEmbedIE,
)
from .mojvideo import MojvideoIE
from .morningstar import MorningstarIE
from .motherless import (
MotherlessIE,
MotherlessGroupIE
)
from .motorsport import MotorsportIE
from .movieclips import MovieClipsIE
from .moviezine import MoviezineIE
from .movingimage import MovingImageIE
from .msn import MSNIE
from .mtv import (
MTVIE,
MTVVideoIE,
MTVServicesEmbeddedIE,
MTVDEIE,
MTVJapanIE,
MTVItaliaIE,
MTVItaliaProgrammaIE,
)
from .muenchentv import MuenchenTVIE
from .mwave import MwaveIE, MwaveMeetGreetIE
from .mxplayer import (
MxplayerIE,
MxplayerShowIE,
)
from .mychannels import MyChannelsIE
from .myspace import MySpaceIE, MySpaceAlbumIE
from .myspass import MySpassIE
from .myvi import (
MyviIE,
MyviEmbedIE,
)
from .myvideoge import MyVideoGeIE
from .myvidster import MyVidsterIE
from .nationalgeographic import (
NationalGeographicVideoIE,
NationalGeographicTVIE,
)
from .naver import (
NaverIE,
NaverLiveIE,
)
from .nba import (
NBAWatchEmbedIE,
NBAWatchIE,
NBAWatchCollectionIE,
NBAEmbedIE,
NBAIE,
NBAChannelIE,
)
from .nbc import (
NBCIE,
NBCNewsIE,
NBCOlympicsIE,
NBCOlympicsStreamIE,
NBCSportsIE,
NBCSportsStreamIE,
NBCSportsVPlayerIE,
)
from .ndr import (
NDRIE,
NJoyIE,
NDREmbedBaseIE,
NDREmbedIE,
NJoyEmbedIE,
)
from .ndtv import NDTVIE
from .nebula import NebulaIE
from .nerdcubed import NerdCubedFeedIE
from .netzkino import NetzkinoIE
from .neteasemusic import (
NetEaseMusicIE,
NetEaseMusicAlbumIE,
NetEaseMusicSingerIE,
NetEaseMusicListIE,
NetEaseMusicMvIE,
NetEaseMusicProgramIE,
NetEaseMusicDjRadioIE,
)
from .newgrounds import (
NewgroundsIE,
NewgroundsPlaylistIE,
)
from .newstube import NewstubeIE
from .nextmedia import (
NextMediaIE,
NextMediaActionNewsIE,
AppleDailyIE,
NextTVIE,
)
from .nexx import (
NexxIE,
NexxEmbedIE,
)
from .nfhsnetwork import NFHSNetworkIE
from .nfl import (
NFLIE,
NFLArticleIE,
)
from .nhk import (
NhkVodIE,
NhkVodProgramIE,
)
from .nhl import NHLIE
from .nick import (
NickIE,
NickBrIE,
NickDeIE,
NickNightIE,
NickRuIE,
)
from .niconico import (
NiconicoIE,
NiconicoPlaylistIE,
NiconicoUserIE,
NicovideoSearchDateIE,
NicovideoSearchIE,
NicovideoSearchURLIE,
)
from .ninecninemedia import NineCNineMediaIE
from .ninegag import NineGagIE
from .ninenow import NineNowIE
from .nintendo import NintendoIE
from .nitter import NitterIE
from .njpwworld import NJPWWorldIE
from .nobelprize import NobelPrizeIE
from .nonktube import NonkTubeIE
from .noovo import NoovoIE
from .normalboots import NormalbootsIE
from .nosvideo import NosVideoIE
from .nova import (
NovaEmbedIE,
NovaIE,
)
from .nowness import (
NownessIE,
NownessPlaylistIE,
NownessSeriesIE,
)
from .noz import NozIE
from .npo import (
AndereTijdenIE,
NPOIE,
NPOLiveIE,
NPORadioIE,
NPORadioFragmentIE,
SchoolTVIE,
HetKlokhuisIE,
VPROIE,
WNLIE,
)
from .npr import NprIE
from .nrk import (
NRKIE,
NRKPlaylistIE,
NRKSkoleIE,
NRKTVIE,
NRKTVDirekteIE,
NRKRadioPodkastIE,
NRKTVEpisodeIE,
NRKTVEpisodesIE,
NRKTVSeasonIE,
NRKTVSeriesIE,
)
from .nrl import NRLTVIE
from .ntvcojp import NTVCoJpCUIE
from .ntvde import NTVDeIE
from .ntvru import NTVRuIE
from .nytimes import (
NYTimesIE,
NYTimesArticleIE,
NYTimesCookingIE,
)
from .nuvid import NuvidIE
from .nzz import NZZIE
from .odatv import OdaTVIE
from .odnoklassniki import OdnoklassnikiIE
from .oktoberfesttv import OktoberfestTVIE
from .ondemandkorea import OnDemandKoreaIE
from .onet import (
OnetIE,
OnetChannelIE,
OnetMVPIE,
OnetPlIE,
)
from .onionstudios import OnionStudiosIE
from .ooyala import (
OoyalaIE,
OoyalaExternalIE,
)
from .openrec import (
OpenRecIE,
OpenRecCaptureIE,
)
from .ora import OraTVIE
from .orf import (
ORFTVthekIE,
ORFFM4IE,
ORFFM4StoryIE,
ORFOE1IE,
ORFOE3IE,
ORFNOEIE,
ORFWIEIE,
ORFBGLIE,
ORFOOEIE,
ORFSTMIE,
ORFKTNIE,
ORFSBGIE,
ORFTIRIE,
ORFVBGIE,
ORFIPTVIE,
)
from .outsidetv import OutsideTVIE
from .packtpub import (
PacktPubIE,
PacktPubCourseIE,
)
from .palcomp3 import (
PalcoMP3IE,
PalcoMP3ArtistIE,
PalcoMP3VideoIE,
)
from .pandoratv import PandoraTVIE
from .paramountplus import (
ParamountPlusIE,
ParamountPlusSeriesIE,
)
from .parliamentliveuk import ParliamentLiveUKIE
from .parlview import ParlviewIE
from .patreon import (
PatreonIE,
PatreonUserIE
)
from .pbs import PBSIE
from .pearvideo import PearVideoIE
from .peertube import PeerTubeIE
from .peloton import (
PelotonIE,
PelotonLiveIE
)
from .people import PeopleIE
from .performgroup import PerformGroupIE
from .periscope import (
PeriscopeIE,
PeriscopeUserIE,
)
from .philharmoniedeparis import PhilharmonieDeParisIE
from .phoenix import PhoenixIE
from .photobucket import PhotobucketIE
from .picarto import (
PicartoIE,
PicartoVodIE,
)
from .piksel import PikselIE
from .pinkbike import PinkbikeIE
from .pinterest import (
PinterestIE,
PinterestCollectionIE,
)
from .pladform import PladformIE
from .platzi import (
PlatziIE,
PlatziCourseIE,
)
from .playfm import PlayFMIE
from .playplustv import PlayPlusTVIE
from .plays import PlaysTVIE
from .playstuff import PlayStuffIE
from .playtvak import PlaytvakIE
from .playvid import PlayvidIE
from .playwire import PlaywireIE
from .plutotv import PlutoTVIE
from .pluralsight import (
PluralsightIE,
PluralsightCourseIE,
)
from .podomatic import PodomaticIE
from .pokemon import (
PokemonIE,
PokemonWatchIE,
)
from .polskieradio import (
PolskieRadioIE,
PolskieRadioCategoryIE,
)
from .popcorntimes import PopcorntimesIE
from .popcorntv import PopcornTVIE
from .porn91 import Porn91IE
from .porncom import PornComIE
from .pornflip import PornFlipIE
from .pornhd import PornHdIE
from .pornhub import (
PornHubIE,
PornHubUserIE,
PornHubPlaylistIE,
PornHubPagedVideoListIE,
PornHubUserVideosUploadIE,
)
from .pornotube import PornotubeIE
from .pornovoisines import PornoVoisinesIE
from .pornoxo import PornoXOIE
from .puhutv import (
PuhuTVIE,
PuhuTVSerieIE,
)
from .presstv import PressTVIE
from .projectveritas import ProjectVeritasIE
from .prosiebensat1 import ProSiebenSat1IE
from .puls4 import Puls4IE
from .pyvideo import PyvideoIE
from .qqmusic import (
QQMusicIE,
QQMusicSingerIE,
QQMusicAlbumIE,
QQMusicToplistIE,
QQMusicPlaylistIE,
)
from .r7 import (
R7IE,
R7ArticleIE,
)
from .radiko import RadikoIE, RadikoRadioIE
from .radiocanada import (
RadioCanadaIE,
RadioCanadaAudioVideoIE,
)
from .radiode import RadioDeIE
from .radiojavan import RadioJavanIE
from .radiobremen import RadioBremenIE
from .radiofrance import RadioFranceIE
from .rai import (
RaiPlayIE,
RaiPlayLiveIE,
RaiPlayPlaylistIE,
RaiIE,
)
from .raywenderlich import (
RayWenderlichIE,
RayWenderlichCourseIE,
)
from .rbmaradio import RBMARadioIE
from .rcs import (
RCSIE,
RCSEmbedsIE,
RCSVariousIE,
)
from .rcti import (
RCTIPlusIE,
RCTIPlusSeriesIE,
RCTIPlusTVIE,
)
from .rds import RDSIE
from .redbulltv import (
RedBullTVIE,
RedBullEmbedIE,
RedBullTVRrnContentIE,
RedBullIE,
)
from .reddit import (
RedditIE,
RedditRIE,
)
from .redtube import RedTubeIE
from .regiotv import RegioTVIE
from .rentv import (
RENTVIE,
RENTVArticleIE,
)
from .restudy import RestudyIE
from .reuters import ReutersIE
from .reverbnation import ReverbNationIE
from .rice import RICEIE
from .rmcdecouverte import RMCDecouverteIE
from .ro220 import Ro220IE
from .rockstargames import RockstarGamesIE
from .roosterteeth import RoosterTeethIE
from .rottentomatoes import RottenTomatoesIE
from .roxwel import RoxwelIE
from .rozhlas import RozhlasIE
from .rtbf import RTBFIE
from .rte import RteIE, RteRadioIE
from .rtlnl import RtlNlIE
from .rtl2 import (
RTL2IE,
RTL2YouIE,
RTL2YouSeriesIE,
)
from .rtp import RTPIE
from .rts import RTSIE
from .rtve import RTVEALaCartaIE, RTVELiveIE, RTVEInfantilIE, RTVELiveIE, RTVETelevisionIE
from .rtvnh import RTVNHIE
from .rtvs import RTVSIE
from .ruhd import RUHDIE
from .rumble import RumbleEmbedIE
from .rutube import (
RutubeIE,
RutubeChannelIE,
RutubeEmbedIE,
RutubeMovieIE,
RutubePersonIE,
RutubePlaylistIE,
)
from .rutv import RUTVIE
from .ruutu import RuutuIE
from .ruv import RuvIE
from .safari import (
SafariIE,
SafariApiIE,
SafariCourseIE,
)
from .saitosan import SaitosanIE
from .samplefocus import SampleFocusIE
from .sapo import SapoIE
from .savefrom import SaveFromIE
from .sbs import SBSIE
from .screencast import ScreencastIE
from .screencastomatic import ScreencastOMaticIE
from .scrippsnetworks import (
ScrippsNetworksWatchIE,
ScrippsNetworksIE,
)
from .scte import (
SCTEIE,
SCTECourseIE,
)
from .seeker import SeekerIE
from .senateisvp import SenateISVPIE
from .sendtonews import SendtoNewsIE
from .servus import ServusIE
from .sevenplus import SevenPlusIE
from .sexu import SexuIE
from .seznamzpravy import (
SeznamZpravyIE,
SeznamZpravyArticleIE,
)
from .shahid import (
ShahidIE,
ShahidShowIE,
)
from .shared import (
SharedIE,
VivoIE,
)
from .shemaroome import ShemarooMeIE
from .showroomlive import ShowRoomLiveIE
from .simplecast import (
SimplecastIE,
SimplecastEpisodeIE,
SimplecastPodcastIE,
)
from .sina import SinaIE
from .sixplay import SixPlayIE
from .skyit import (
SkyItPlayerIE,
SkyItVideoIE,
SkyItVideoLiveIE,
SkyItIE,
SkyItAcademyIE,
SkyItArteIE,
CieloTVItIE,
TV8ItIE,
)
from .skylinewebcams import SkylineWebcamsIE
from .skynewsarabia import (
SkyNewsArabiaIE,
SkyNewsArabiaArticleIE,
)
from .sky import (
SkyNewsIE,
SkySportsIE,
SkySportsNewsIE,
)
from .slideshare import SlideshareIE
from .slideslive import SlidesLiveIE
from .slutload import SlutloadIE
from .snotr import SnotrIE
from .sohu import SohuIE
from .sonyliv import (
SonyLIVIE,
SonyLIVSeriesIE,
)
from .soundcloud import (
SoundcloudEmbedIE,
SoundcloudIE,
SoundcloudSetIE,
SoundcloudUserIE,
SoundcloudTrackStationIE,
SoundcloudPlaylistIE,
SoundcloudSearchIE,
)
from .soundgasm import (
SoundgasmIE,
SoundgasmProfileIE
)
from .southpark import (
SouthParkIE,
SouthParkDeIE,
SouthParkDkIE,
SouthParkEsIE,
SouthParkNlIE
)
from .sovietscloset import (
SovietsClosetIE,
SovietsClosetPlaylistIE
)
from .spankbang import (
SpankBangIE,
SpankBangPlaylistIE,
)
from .spankwire import SpankwireIE
from .spiegel import SpiegelIE
from .spike import (
BellatorIE,
ParamountNetworkIE,
)
from .stitcher import (
StitcherIE,
StitcherShowIE,
)
from .sport5 import Sport5IE
from .sportbox import SportBoxIE
from .sportdeutschland import SportDeutschlandIE
from .spotify import (
SpotifyIE,
SpotifyShowIE,
)
from .spreaker import (
SpreakerIE,
SpreakerPageIE,
SpreakerShowIE,
SpreakerShowPageIE,
)
from .springboardplatform import SpringboardPlatformIE
from .sprout import SproutIE
from .srgssr import (
SRGSSRIE,
SRGSSRPlayIE,
)
from .srmediathek import SRMediathekIE
from .stanfordoc import StanfordOpenClassroomIE
from .startv import StarTVIE
from .steam import SteamIE
from .storyfire import (
StoryFireIE,
StoryFireUserIE,
StoryFireSeriesIE,
)
from .streamable import StreamableIE
from .streamcloud import StreamcloudIE
from .streamcz import StreamCZIE
from .streetvoice import StreetVoiceIE
from .stretchinternet import StretchInternetIE
from .stv import STVPlayerIE
from .sunporno import SunPornoIE
from .sverigesradio import (
SverigesRadioEpisodeIE,
SverigesRadioPublicationIE,
)
from .svt import (
SVTIE,
SVTPageIE,
SVTPlayIE,
SVTSeriesIE,
)
from .swrmediathek import SWRMediathekIE
from .syfy import SyfyIE
from .sztvhu import SztvHuIE
from .tagesschau import (
TagesschauPlayerIE,
TagesschauIE,
)
from .tass import TassIE
from .tbs import TBSIE
from .tdslifeway import TDSLifewayIE
from .teachable import (
TeachableIE,
TeachableCourseIE,
)
from .teachertube import (
TeacherTubeIE,
TeacherTubeUserIE,
)
from .teachingchannel import TeachingChannelIE
from .teamcoco import TeamcocoIE
from .teamtreehouse import TeamTreeHouseIE
from .techtalks import TechTalksIE
from .ted import TEDIE
from .tele5 import Tele5IE
from .tele13 import Tele13IE
from .telebruxelles import TeleBruxellesIE
from .telecinco import TelecincoIE
from .telegraaf import TelegraafIE
from .telemb import TeleMBIE
from .telemundo import TelemundoIE
from .telequebec import (
TeleQuebecIE,
TeleQuebecSquatIE,
TeleQuebecEmissionIE,
TeleQuebecLiveIE,
TeleQuebecVideoIE,
)
from .teletask import TeleTaskIE
from .telewebion import TelewebionIE
from .tennistv import TennisTVIE
from .tenplay import TenPlayIE
from .testurl import TestURLIE
from .tf1 import TF1IE
from .tfo import TFOIE
from .theintercept import TheInterceptIE
from .theplatform import (
ThePlatformIE,
ThePlatformFeedIE,
)
from .thescene import TheSceneIE
from .thestar import TheStarIE
from .thesun import TheSunIE
from .theweatherchannel import TheWeatherChannelIE
from .thisamericanlife import ThisAmericanLifeIE
from .thisav import ThisAVIE
from .thisoldhouse import ThisOldHouseIE
from .threeqsdn import ThreeQSDNIE
from .tiktok import (
TikTokIE,
TikTokUserIE,
)
from .tinypic import TinyPicIE
from .tmz import TMZIE
from .tnaflix import (
TNAFlixNetworkEmbedIE,
TNAFlixIE,
EMPFlixIE,
MovieFapIE,
)
from .toggle import (
ToggleIE,
MeWatchIE,
)
from .tokentube import (
TokentubeIE,
TokentubeChannelIE
)
from .tonline import TOnlineIE
from .toongoggles import ToonGogglesIE
from .toutv import TouTvIE
from .toypics import ToypicsUserIE, ToypicsIE
from .traileraddict import TrailerAddictIE
from .trilulilu import TriluliluIE
from .trovo import (
TrovoIE,
TrovoVodIE,
)
from .trunews import TruNewsIE
from .trutv import TruTVIE
from .tube8 import Tube8IE
from .tubitv import (
TubiTvIE,
TubiTvShowIE,
)
from .tumblr import TumblrIE
from .tunein import (
TuneInClipIE,
TuneInStationIE,
TuneInProgramIE,
TuneInTopicIE,
TuneInShortenerIE,
)
from .tunepk import TunePkIE
from .turbo import TurboIE
from .tv2 import (
TV2IE,
TV2ArticleIE,
KatsomoIE,
MTVUutisetArticleIE,
)
from .tv2dk import (
TV2DKIE,
TV2DKBornholmPlayIE,
)
from .tv2hu import (
TV2HuIE,
TV2HuSeriesIE,
)
from .tv4 import TV4IE
from .tv5mondeplus import TV5MondePlusIE
from .tv5unis import (
TV5UnisVideoIE,
TV5UnisIE,
)
from .tva import (
TVAIE,
QubIE,
)
from .tvanouvelles import (
TVANouvellesIE,
TVANouvellesArticleIE,
)
from .tvc import (
TVCIE,
TVCArticleIE,
)
from .tver import TVerIE
from .tvigle import TvigleIE
from .tvland import TVLandIE
from .tvn24 import TVN24IE
from .tvnet import TVNetIE
from .tvnoe import TVNoeIE
from .tvnow import (
TVNowIE,
TVNowFilmIE,
TVNowNewIE,
TVNowSeasonIE,
TVNowAnnualIE,
TVNowShowIE,
)
from .tvp import (
TVPEmbedIE,
TVPIE,
TVPWebsiteIE,
)
from .tvplay import (
TVPlayIE,
ViafreeIE,
TVPlayHomeIE,
)
from .tvplayer import TVPlayerIE
from .tweakers import TweakersIE
from .twentyfourvideo import TwentyFourVideoIE
from .twentymin import TwentyMinutenIE
from .twentythreevideo import TwentyThreeVideoIE
from .twitcasting import (
TwitCastingIE,
TwitCastingLiveIE,
TwitCastingUserIE,
)
from .twitch import (
TwitchVodIE,
TwitchCollectionIE,
TwitchVideosIE,
TwitchVideosClipsIE,
TwitchVideosCollectionsIE,
TwitchStreamIE,
TwitchClipsIE,
)
from .twitter import (
TwitterCardIE,
TwitterIE,
TwitterAmplifyIE,
TwitterBroadcastIE,
TwitterShortenerIE,
)
from .udemy import (
UdemyIE,
UdemyCourseIE
)
from .udn import UDNEmbedIE
from .ufctv import (
UFCTVIE,
UFCArabiaIE,
)
from .ukcolumn import UkColumnIE
from .uktvplay import UKTVPlayIE
from .digiteka import DigitekaIE
from .dlive import (
DLiveVODIE,
DLiveStreamIE,
)
from .umg import UMGDeIE
from .unistra import UnistraIE
from .unity import UnityIE
from .uol import UOLIE
from .uplynk import (
UplynkIE,
UplynkPreplayIE,
)
from .urort import UrortIE
from .urplay import URPlayIE
from .usanetwork import USANetworkIE
from .usatoday import USATodayIE
from .ustream import UstreamIE, UstreamChannelIE
from .ustudio import (
UstudioIE,
UstudioEmbedIE,
)
from .utreon import UtreonIE
from .varzesh3 import Varzesh3IE
from .vbox7 import Vbox7IE
from .veehd import VeeHDIE
from .veoh import VeohIE
from .vesti import VestiIE
from .vevo import (
VevoIE,
VevoPlaylistIE,
)
from .vgtv import (
BTArticleIE,
BTVestlendingenIE,
VGTVIE,
)
from .vh1 import VH1IE
from .vice import (
ViceIE,
ViceArticleIE,
ViceShowIE,
)
from .vidbit import VidbitIE
from .viddler import ViddlerIE
from .videa import VideaIE
from .videodetective import VideoDetectiveIE
from .videofyme import VideofyMeIE
from .videomore import (
VideomoreIE,
VideomoreVideoIE,
VideomoreSeasonIE,
)
from .videopress import VideoPressIE
from .vidio import (
VidioIE,
VidioPremierIE,
VidioLiveIE
)
from .vidlii import VidLiiIE
from .vidme import (
VidmeIE,
VidmeUserIE,
VidmeUserLikesIE,
)
from .vier import VierIE, VierVideosIE
from .viewlift import (
ViewLiftIE,
ViewLiftEmbedIE,
)
from .viidea import ViideaIE
from .vimeo import (
VimeoIE,
VimeoAlbumIE,
VimeoChannelIE,
VimeoGroupsIE,
VimeoLikesIE,
VimeoOndemandIE,
VimeoReviewIE,
VimeoUserIE,
VimeoWatchLaterIE,
VHXEmbedIE,
)
from .vimple import VimpleIE
from .vine import (
VineIE,
VineUserIE,
)
from .viki import (
VikiIE,
VikiChannelIE,
)
from .viqeo import ViqeoIE
from .viu import (
ViuIE,
ViuPlaylistIE,
ViuOTTIE,
)
from .vk import (
VKIE,
VKUserVideosIE,
VKWallPostIE,
)
from .vlive import (
VLiveIE,
VLivePostIE,
VLiveChannelIE,
)
from .vodlocker import VodlockerIE
from .vodpl import VODPlIE
from .vodplatform import VODPlatformIE
from .voicerepublic import VoiceRepublicIE
from .voicy import (
VoicyIE,
VoicyChannelIE,
)
from .voot import (
VootIE,
VootSeriesIE,
)
from .voxmedia import (
VoxMediaVolumeIE,
VoxMediaIE,
)
from .vrt import VRTIE
from .vrak import VrakIE
from .vrv import (
VRVIE,
VRVSeriesIE,
)
from .vshare import VShareIE
from .vtm import VTMIE
from .medialaan import MedialaanIE
from .vube import VubeIE
from .vuclip import VuClipIE
from .vvvvid import (
VVVVIDIE,
VVVVIDShowIE,
)
from .vyborymos import VyboryMosIE
from .vzaar import VzaarIE
from .wakanim import WakanimIE
from .walla import WallaIE
from .washingtonpost import (
WashingtonPostIE,
WashingtonPostArticleIE,
)
from .wat import WatIE
from .watchbox import WatchBoxIE
from .watchindianporn import WatchIndianPornIE
from .wdr import (
WDRIE,
WDRPageIE,
WDRElefantIE,
WDRMobileIE,
)
from .webcaster import (
WebcasterIE,
WebcasterFeedIE,
)
from .webofstories import (
WebOfStoriesIE,
WebOfStoriesPlaylistIE,
)
from .weibo import (
WeiboIE,
WeiboMobileIE
)
from .weiqitv import WeiqiTVIE
from .wimtv import WimTVIE
from .whowatch import WhoWatchIE
from .wistia import (
WistiaIE,
WistiaPlaylistIE,
)
from .worldstarhiphop import WorldStarHipHopIE
from .wsj import (
WSJIE,
WSJArticleIE,
)
from .wwe import WWEIE
from .xbef import XBefIE
from .xboxclips import XboxClipsIE
from .xfileshare import XFileShareIE
from .xhamster import (
XHamsterIE,
XHamsterEmbedIE,
XHamsterUserIE,
)
from .xiami import (
XiamiSongIE,
XiamiAlbumIE,
XiamiArtistIE,
XiamiCollectionIE
)
from .ximalaya import (
XimalayaIE,
XimalayaAlbumIE
)
from .xminus import XMinusIE
from .xnxx import XNXXIE
from .xstream import XstreamIE
from .xtube import XTubeUserIE, XTubeIE
from .xuite import XuiteIE
from .xvideos import XVideosIE
from .xxxymovies import XXXYMoviesIE
from .yahoo import (
YahooIE,
YahooSearchIE,
YahooGyaOPlayerIE,
YahooGyaOIE,
YahooJapanNewsIE,
)
from .yandexdisk import YandexDiskIE
from .yandexmusic import (
YandexMusicTrackIE,
YandexMusicAlbumIE,
YandexMusicPlaylistIE,
YandexMusicArtistTracksIE,
YandexMusicArtistAlbumsIE,
)
from .yandexvideo import YandexVideoIE
from .yapfiles import YapFilesIE
from .yesjapan import YesJapanIE
from .yinyuetai import YinYueTaiIE
from .ynet import YnetIE
from .youjizz import YouJizzIE
from .youku import (
YoukuIE,
YoukuShowIE,
)
from .younow import (
YouNowLiveIE,
YouNowChannelIE,
YouNowMomentIE,
)
from .youporn import YouPornIE
from .yourporn import YourPornIE
from .yourupload import YourUploadIE
from .youtube import (
YoutubeIE,
YoutubeFavouritesIE,
YoutubeHistoryIE,
YoutubeTabIE,
YoutubePlaylistIE,
YoutubeRecommendedIE,
YoutubeSearchDateIE,
YoutubeSearchIE,
YoutubeSearchURLIE,
YoutubeSubscriptionsIE,
YoutubeTruncatedIDIE,
YoutubeTruncatedURLIE,
YoutubeYtBeIE,
YoutubeYtUserIE,
YoutubeWatchLaterIE,
)
from .zapiks import ZapiksIE
from .zattoo import (
BBVTVIE,
EinsUndEinsTVIE,
EWETVIE,
GlattvisionTVIE,
MNetTVIE,
MyVisionTVIE,
NetPlusIE,
OsnatelTVIE,
QuantumTVIE,
QuicklineIE,
QuicklineLiveIE,
SaltTVIE,
SAKTVIE,
VTXTVIE,
WalyTVIE,
ZattooIE,
ZattooLiveIE,
)
from .zdf import ZDFIE, ZDFChannelIE
from .zee5 import (
Zee5IE,
Zee5SeriesIE,
)
from .zhihu import ZhihuIE
from .zingmp3 import (
ZingMp3IE,
ZingMp3AlbumIE,
)
from .zoom import ZoomIE
from .zype import ZypeIE
|
py | 1a309402d864a31e994ffddb348a018bcad1fd82 | from dagstermill.examples.repository import notebook_repo
from dagster import RepositoryDefinition
def test_dagstermill_repo():
assert isinstance(notebook_repo, RepositoryDefinition)
|
py | 1a30941ed183698b3a0aea20efec96af8d46f379 | # -*- coding: utf-8 -*-
"""
file: module_graph_copy_test.py
Unit tests for Graph copy related methods
"""
import copy
from unittest_baseclass import UnittestPythonCompatibility
from graphit import Graph
class TestGraphCopy(UnittestPythonCompatibility):
"""
Test Graph copy and deepcopy methods
"""
def setUp(self):
"""
Build default graph with nodes, edges and attributes
"""
self.graph = Graph()
self.graph.add_nodes([('g', {'weight': 1.0}), ('r', {'weight': 1.5}), ('a', {'weight': 2.0}),
('p', {'weight': 2.5}), ('h', {'weight': 3.0})])
self.graph.add_edges([(1, 2), (2, 3), (3, 4), (3, 5), (4, 5)], isedge=True)
def tearDown(self):
"""
Test copied state
Testing equality in node, edge and adjacency data stores is based on
the internal '_storage' object and not so much the storage object
itself which is often just a wrapper.
"""
# Main Graph object is new
self.assertTrue(id(self.copied) != id(self.graph))
if self.shallow:
# Internal node and edge stores point to parent.
self.assertEqual(id(self.copied.nodes._storage), id(self.graph.nodes._storage))
self.assertEqual(id(self.copied.edges._storage), id(self.graph.edges._storage))
# ORM and origin objects point to parent
self.assertEqual(id(self.copied.orm), id(self.graph.orm))
self.assertEqual(id(self.copied.origin), id(self.graph.origin))
else:
# Internal node and edge stores point to parent.
self.assertNotEqual(id(self.copied.nodes._storage), id(self.graph.nodes._storage))
self.assertNotEqual(id(self.copied.edges._storage), id(self.graph.edges._storage))
# ORM and origin objects point to parent
self.assertNotEqual(id(self.copied.orm), id(self.graph.orm))
self.assertNotEqual(id(self.copied.origin), id(self.graph.origin))
def test_graph_copy_shallow(self):
"""
Test making a shallow copy of a graph. This essentially copies the
Graph object while linking tot the data store in the parent Graph
"""
self.shallow = True
self.copied = self.graph.copy(deep=False)
def test_graph_copy_deep(self):
"""
Test making a deep copy of a graph (default) copying everything
"""
self.shallow = False
self.copied = self.graph.copy()
def test_graph_buildin_copy_shallow(self):
"""
Test making a shallow copy of a graph using the 'copy' method of the
copy class. This calls the Graph.copy method
"""
self.shallow = True
self.copied = copy.copy(self.graph)
def test_graph_buildin_copy_deep(self):
"""
Test making a deep copy of a graph using the 'deepcopy' method of the
copy class. This calls the Graph.copy method
"""
self.shallow = False
self.copied = copy.deepcopy(self.graph)
def test_graph_buildin_copy_deep_view(self):
"""
Test copying subgraphs either with the set 'view' only or the full
origin graph (full graph)
"""
# Regular copy
self.shallow = False
self.copied = copy.deepcopy(self.graph)
# Build subgraph, same origin
view = self.graph.getnodes([3,4,5])
self.assertEqual(id(view.origin), id(self.graph.origin))
# Deep copy with or without view, different origin
copy_view = view.copy(deep=True, copy_view=False)
copy_full = view.copy(deep=True, copy_view=True)
self.assertNotEqual(id(copy_view.origin), id(self.graph.origin))
self.assertNotEqual(id(copy_full.origin), id(self.graph.origin))
# Subgraph 'view' should be identical to the original
# regardless the copy mode
self.assertEqual(copy_view.nodes.keys(), view.nodes.keys())
self.assertEqual(copy_view.edges.keys(), view.edges.keys())
self.assertEqual(copy_view.adjacency.keys(), view.adjacency.keys())
self.assertEqual(copy_full.nodes.keys(), view.nodes.keys())
self.assertEqual(copy_full.edges.keys(), view.edges.keys())
self.assertEqual(copy_full.adjacency.keys(), view.adjacency.keys())
# The view copy origin should either be identical to the view
# (copy_view = True) or to the full graph (copy_view = False)
self.assertEqual(list(copy_view.nodes._storage.keys()), list(view.nodes.keys()))
self.assertEqual(list(copy_full.nodes._storage.keys()), list(view.origin.nodes.keys()))
# The copy_full has its origin equals self and thus copy_full.origin.nodes
# equals copy_full.nodes. However, the view is also set which means that
# by default the full graph is not accessible without resetting it
copy_full.nodes.reset_view()
self.assertEqual(copy_full.nodes.keys(), self.graph.nodes.keys())
|
py | 1a3094508c7ffe16aced913876d5f4737a3bc88b | """
## SCRIPT HEADER ##
Created By : Muhammad Fredo
Email : [email protected]
Start Date : 03 Nov 2020
Info :
alternative_callback_data: header data
"id": (callback id)
"event_name": "after_open"
"callback_tag": fr_maya
maya_event_callbacks:
after_open:
event_id: event_id,
add_callback: om callback function
before_open:
event_id: event_id,
add_callback: om callback function
"""
import copy
import inspect
import re
import maya.OpenMaya as om
# TODO: update to api 2.0 ??
from FrMaya import utility as util
class MyCallbackManager(object):
__metaclass__ = util.MetaSingleton
@staticmethod
def get_maya_event_callback():
"""
Return dictionary data of Maya events callback.
after_open:
event_id: event_id,
add_callback: open Maya callback function.
before_open:
event_id: event_id,
add_callback: open Maya callback function.
:rtype: dict
"""
# example regex subs -> re.sub(r"(\w)([A-Z])", r"\1 \2", "WordWordWord")
callback_events = {}
re_pattern = re.compile(r'(?<=\w)([A-Z])')
for event_name, event_id in inspect.getmembers(om.MSceneMessage):
if event_name.startswith('k') and not event_name.endswith('check'):
if not callback_events.get(event_name):
key_name = re_pattern.sub(r'_\1', event_name[1:])
callback_events[key_name.lower()] = {
'event_id': event_id,
'add_callback': om.MSceneMessage.addCallback,
}
return callback_events
def __init__(self):
"""Class to manage callback for FrMaya system."""
print 'initialize callback manager'
self._maya_event_callback = {}
self._registered_callback = {}
self._maya_event_callback = copy.deepcopy(self.get_maya_event_callback())
assert len(self._maya_event_callback) > 0, ''
def _group_registered_callbacks(self):
"""
Return registered callbacks based on events or tags callback.
events:
event_name: [callback function name, ..],
tags:
callback_tag: [callback function name, ..],
:rtype: dict
"""
result_data = {'events': {}, 'tags': {}}
for cb_fn_name, cb_data in self._registered_callback.items():
for each in result_data:
if each == 'events':
event_or_tag = cb_data['event_name']
elif each == 'tags':
event_or_tag = cb_data['callback_tag']
else:
return None
if result_data[each].get(event_or_tag):
result_data[each][event_or_tag].append(cb_fn_name)
else:
result_data[each][event_or_tag] = [cb_fn_name]
return result_data
def add_callback(self, event_name, callback_tag, func):
"""
Return True if it success add callback to callback manager, False otherwise.
:arg event_name: Maya event nice name.
:type event_name: str
:arg callback_tag: A tag to group callback in callback manager.
:type callback_tag: str
:arg func: Python function.
:rtype: bool
"""
my_event_cb = self._maya_event_callback.get(event_name)
if my_event_cb:
callback_id = my_event_cb['add_callback'](my_event_cb['event_id'], func)
self._registered_callback[func.__module__] = {
'event_name': event_name,
'callback_tag': callback_tag,
'callback_id': callback_id
}
return True
else:
return False
def remove_callback(self, event_name = '', callback_tag = ''):
"""
Remove callback based on specified keyword argument.
If both keyword specified, it will performed both action.
:key event_name: Maya event name callback which want to removed.
:type event_name: str
:key callback_tag: Callback tag which want to removed.
:type callback_tag: str
"""
callback_collection = self._group_registered_callbacks()
cb_id_array = om.MCallbackIdArray()
cb_fn_name_list = []
if event_name:
cb_fn_name_list.extend(callback_collection['events'].get(event_name, []))
if callback_tag:
cb_fn_name_list.extend(callback_collection['tags'].get(callback_tag, []))
for cb_fn_name in cb_fn_name_list:
cb_id_array.append(self._registered_callback[cb_fn_name]['callback_id'])
if cb_id_array:
om.MMessage.removeCallbacks(cb_id_array)
def show_registered_callback(self, event_name = '', callback_tag = ''):
"""
Return registered callback based on specified keyword,
if both keyword did not specified, it will return both group data (event name and tag).
:key event_name: Maya event name callback which callback group want to retrieved.
:type event_name: str
:key callback_tag: Callback tag which callback group want to retrieved.
:type callback_tag: str
:rtype: dict or list
"""
result = self._group_registered_callbacks()
if event_name:
return result['events'].get(event_name, [])
elif callback_tag:
return result['tags'].get(callback_tag, [])
else:
return copy.deepcopy(result)
def show_maya_event_name(self):
"""
Return list of Maya event nice name.
:rtype: list of str
"""
return self._maya_event_callback.keys()
|
py | 1a3094c7e76a67f7b55294278aceb4240f2068ed | from typing import Sequence, Union, Optional, Callable, Dict, Any, Tuple
import torch
from ignite.engine.engine import Engine
from ignite.engine.events import State, Events, EventEnum, CallableEventWithFilter
from ignite.utils import convert_tensor
from ignite.metrics import Metric
__all__ = [
"State",
"create_supervised_trainer",
"create_supervised_evaluator",
"Engine",
"Events",
"EventEnum",
"CallableEventWithFilter",
]
def _prepare_batch(
batch: Sequence[torch.Tensor], device: Optional[Union[str, torch.device]] = None, non_blocking: bool = False
):
"""Prepare batch for training: pass to a device with options.
"""
x, y = batch
return (
convert_tensor(x, device=device, non_blocking=non_blocking),
convert_tensor(y, device=device, non_blocking=non_blocking),
)
def create_supervised_trainer(
model: torch.nn.Module,
optimizer: torch.optim.Optimizer,
loss_fn: Union[Callable, torch.nn.Module],
device: Optional[Union[str, torch.device]] = None,
non_blocking: bool = False,
prepare_batch: Callable = _prepare_batch,
output_transform: Callable = lambda x, y, y_pred, loss: loss.item(),
) -> Engine:
"""
Factory function for creating a trainer for supervised models.
Args:
model (`torch.nn.Module`): the model to train.
optimizer (`torch.optim.Optimizer`): the optimizer to use.
loss_fn (torch.nn loss function): the loss function to use.
device (str, optional): device type specification (default: None).
Applies to batches after starting the engine. Model *will not* be moved.
Device can be CPU, GPU or TPU.
non_blocking (bool, optional): if True and this copy is between CPU and GPU, the copy may occur asynchronously
with respect to the host. For other cases, this argument has no effect.
prepare_batch (callable, optional): function that receives `batch`, `device`, `non_blocking` and outputs
tuple of tensors `(batch_x, batch_y)`.
output_transform (callable, optional): function that receives 'x', 'y', 'y_pred', 'loss' and returns value
to be assigned to engine's state.output after each iteration. Default is returning `loss.item()`.
Note:
`engine.state.output` for this engine is defind by `output_transform` parameter and is the loss
of the processed batch by default.
.. warning::
The internal use of `device` has changed.
`device` will now *only* be used to move the input data to the correct device.
The `model` should be moved by the user before creating an optimizer.
For more information see:
* `PyTorch Documentation <https://pytorch.org/docs/stable/optim.html#constructing-it>`_
* `PyTorch's Explanation <https://github.com/pytorch/pytorch/issues/7844#issuecomment-503713840>`_
Returns:
Engine: a trainer engine with supervised update function.
"""
device_type = device.type if isinstance(device, torch.device) else device
on_tpu = "xla" in device_type if device_type is not None else False
if on_tpu:
try:
import torch_xla.core.xla_model as xm
except ImportError:
raise RuntimeError("In order to run on TPU, please install PyTorch XLA")
def _update(engine: Engine, batch: Sequence[torch.Tensor]) -> Union[Any, Tuple[torch.Tensor]]:
model.train()
optimizer.zero_grad()
x, y = prepare_batch(batch, device=device, non_blocking=non_blocking)
y_pred = model(x)
loss = loss_fn(y_pred, y)
loss.backward()
if on_tpu:
xm.optimizer_step(optimizer, barrier=True)
else:
optimizer.step()
return output_transform(x, y, y_pred, loss)
trainer = Engine(_update)
return trainer
def create_supervised_evaluator(
model: torch.nn.Module,
metrics: Optional[Dict[str, Metric]] = None,
device: Optional[Union[str, torch.device]] = None,
non_blocking: bool = False,
prepare_batch: Callable = _prepare_batch,
output_transform: Callable = lambda x, y, y_pred: (y_pred, y),
) -> Engine:
"""
Factory function for creating an evaluator for supervised models.
Args:
model (`torch.nn.Module`): the model to train.
metrics (dict of str - :class:`~ignite.metrics.Metric`): a map of metric names to Metrics.
device (str, optional): device type specification (default: None).
Applies to batches after starting the engine. Model *will not* be moved.
non_blocking (bool, optional): if True and this copy is between CPU and GPU, the copy may occur asynchronously
with respect to the host. For other cases, this argument has no effect.
prepare_batch (callable, optional): function that receives `batch`, `device`, `non_blocking` and outputs
tuple of tensors `(batch_x, batch_y)`.
output_transform (callable, optional): function that receives 'x', 'y', 'y_pred' and returns value
to be assigned to engine's state.output after each iteration. Default is returning `(y_pred, y,)` which fits
output expected by metrics. If you change it you should use `output_transform` in metrics.
Note:
`engine.state.output` for this engine is defind by `output_transform` parameter and is
a tuple of `(batch_pred, batch_y)` by default.
.. warning::
The internal use of `device` has changed.
`device` will now *only* be used to move the input data to the correct device.
The `model` should be moved by the user before creating an optimizer.
For more information see:
* `PyTorch Documentation <https://pytorch.org/docs/stable/optim.html#constructing-it>`_
* `PyTorch's Explanation <https://github.com/pytorch/pytorch/issues/7844#issuecomment-503713840>`_
Returns:
Engine: an evaluator engine with supervised inference function.
"""
metrics = metrics or {}
def _inference(engine: Engine, batch: Sequence[torch.Tensor]) -> Union[Any, Tuple[torch.Tensor]]:
model.eval()
with torch.no_grad():
x, y = prepare_batch(batch, device=device, non_blocking=non_blocking)
y_pred = model(x)
return output_transform(x, y, y_pred)
evaluator = Engine(_inference)
for name, metric in metrics.items():
metric.attach(evaluator, name)
return evaluator
|
py | 1a309559cbdfd179310e5f0728e67d6420555e8b | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from cleave import server
class MyServer(server.BaseServer):
"""
My HTTP Server
"""
def client_handler(self, client):
"""
Handles a client connection
:param client: server.BaseClient
:return: None
"""
client.send('HTTP/1.1 200 OK\n')
client.send('Content-Type: text/html; charset=utf-8\n\n')
client.send('<h1>Hello world</h1>')
client.send('<p><strong>My Address:</strong></p>')
client.send('<pre>{}:{}</pre>'.format(client.addr[0], client.addr[1]))
client.send('<p><strong>Request body:</strong></p>')
client.send('<pre>{}</pre>'.format(client.message))
client.send('<hr /><small>By Cleave Server 0.13 Beta</small>')
if __name__ == '__main__':
MyServer(port=80) |
py | 1a309735db48975894985b294fbedb43dcf9005c | from torch import nn
import torch as tc
import numpy as np
import BasicFun as bf
import time
import os
import sys
from termcolor import cprint
import matplotlib
from matplotlib.pyplot import plot, savefig, figure
from TensorNetworkExpasion import TTN_basic, Vectorization, TTN_Pool_2by2to1, \
num_correct, load_tensor_network, save_tensor_network,\
test_accuracy_mnist, pre_process_dataset, Attention_FC, Attention_Con2d, \
TTN_Pool_2xto1, TTN_Pool_2yto1, TTN_ConvTI_2by2to1, TTN_PoolTI_2by2to1
# matplotlib.use('Agg')
def Paras_VL_CNN_BTN_Collected1chg1():
para = parameter_default()
para['TN'] = 'VL_CNN_BTN_Collected1chg1'
para['batch_size'] = 600
para['d'] = 4
para['chi'] = 24
para['feature_map'] = 'cos_sin'
para['normalize_tensors'] = 'norm2'
para['update_way'] = 'rotate'
para['mps_init'] = 'randn'
para['Lagrangian_way'] = 0
para['Lagrangian'] = None
para['check_time'] = 5
para['save_time'] = 1000
para['it_time'] = 1000
para['lr'] = [1e-4, 2e-2]
return para
class VL_CNN_BTN_Collected1chg1_BP(TTN_basic):
"""
train_acc test_acc
MNIST(d=4,chi=24)
f-MNIST(d=4,chi=24)
"""
def __init__(self, para_tn, tensors=None):
super(VL_CNN_BTN_Collected1chg1_BP, self).__init__(num_layers=6)
self.f_map = para_tn['feature_map']
add_bias = False
pre_process_tensors = 'square' # 'normalize', 'softmax', 'square'
self.layer0 = nn.Sequential(
nn.Conv2d(1, 8, kernel_size=3), # 26*26
nn.LayerNorm([8, 26, 26], eps=1e-05, elementwise_affine=True),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2) # 13*13
).to(device=para_tn['device'])
self.layer1 = nn.Sequential(
nn.Conv2d(8, 32, kernel_size=4), # 10*10
nn.LayerNorm([32, 10, 10], eps=1e-05, elementwise_affine=True),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2) # 5*5
).to(device=para_tn['device'])
self.layer2 = nn.Sequential(
nn.Conv2d(32, 64, kernel_size=2), # 4*4
nn.LayerNorm([64, 4, 4], eps=1e-05, elementwise_affine=True),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2) # 2*2
).to(device=para_tn['device'])
self.att = Attention_FC(64*4, 16, para_tn['device'])
self.layer3 = TTN_Pool_2by2to1(
1, 1, 4, 4, para_tn['d'], para_tn['chi'],
para_tn['device'], ini_way=para_tn['mps_init'],
if_pre_proc_T=pre_process_tensors, add_bias=add_bias)
self.layer4 = TTN_Pool_2by2to1(
1, 1, 2, 2, para_tn['chi'], para_tn['chi'],
para_tn['device'], ini_way=para_tn['mps_init'],
if_pre_proc_T=pre_process_tensors, add_bias=add_bias)
self.layer5 = TTN_Pool_2by2to1(
1, 1, 1, 1, para_tn['chi'], para_tn['channel'],
para_tn['device'], ini_way=para_tn['mps_init'],
if_pre_proc_T=pre_process_tensors, add_bias=add_bias)
self.input_tensors(tensors)
def forward(self, x, train=True):
eps_mask = 0.005
num = x.shape[0]
x = x.reshape(-1, 1, 28, 28)
for n in range(6):
if n == 3:
x = self.att(x.reshape(num, -1))
x = x.reshape(-1, 4)
x = nn.Softmax(dim=1)(x)
x = x.reshape(num, 8, 8, 4, 1).permute(
0, 4, 3, 1, 2)
x = mask_x(x, eps_mask, train)
x = eval('self.layer' + str(n) + '(x)')
if n in [3, 4, 5]:
x = mask_x(x, eps_mask, train)
# print(x.sum(dim=1))
return x.squeeze()
def Paras_VL_CNN_BTN_Collected1chg2():
para = parameter_default()
para['TN'] = 'VL_CNN_BTN_Collected1chg2'
para['batch_size'] = 600
para['d'] = 4
para['chi'] = 24
para['feature_map'] = 'cos_sin'
para['normalize_tensors'] = 'norm2'
para['update_way'] = 'rotate'
para['mps_init'] = 'randn'
para['Lagrangian_way'] = 0
para['Lagrangian'] = None
para['check_time'] = 5
para['save_time'] = 1000
para['it_time'] = 1000
para['lr'] = [1e-4, 2e-2]
return para
class VL_CNN_BTN_Collected1chg2_BP(TTN_basic):
"""
train_acc test_acc
MNIST(d=4,chi=24)
f-MNIST(d=4,chi=24)
"""
def __init__(self, para_tn, tensors=None):
super(VL_CNN_BTN_Collected1chg2_BP, self).__init__(num_layers=6)
self.f_map = para_tn['feature_map']
add_bias = False
pre_process_tensors = 'square' # 'normalize', 'softmax', 'square'
self.layer0 = nn.Sequential(
nn.Conv2d(1, 8, kernel_size=3), # 26*26
nn.LayerNorm([8, 26, 26], eps=1e-05, elementwise_affine=True),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2) # 13*13
).to(device=para_tn['device'])
self.layer1 = nn.Sequential(
nn.Conv2d(8, 32, kernel_size=4), # 10*10
nn.LayerNorm([32, 10, 10], eps=1e-05, elementwise_affine=True),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2) # 5*5
).to(device=para_tn['device'])
self.layer2 = nn.Sequential(
nn.Conv2d(32, 64, kernel_size=2), # 4*4
nn.LayerNorm([64, 4, 4], eps=1e-05, elementwise_affine=True),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2) # 2*2
).to(device=para_tn['device'])
self.layer3 = TTN_Pool_2by2to1(
1, 1, 4, 4, para_tn['d'], para_tn['chi'],
para_tn['device'], ini_way=para_tn['mps_init'],
if_pre_proc_T=pre_process_tensors, add_bias=add_bias)
self.layer4 = TTN_Pool_2by2to1(
1, 1, 2, 2, para_tn['chi'], para_tn['chi'],
para_tn['device'], ini_way=para_tn['mps_init'],
if_pre_proc_T=pre_process_tensors, add_bias=add_bias)
self.layer5 = TTN_Pool_2by2to1(
1, 1, 1, 1, para_tn['chi'], para_tn['channel'],
para_tn['device'], ini_way=para_tn['mps_init'],
if_pre_proc_T=pre_process_tensors, add_bias=add_bias)
self.input_tensors(tensors)
def forward(self, x, train=True):
eps_mask = 0.005
num = x.shape[0]
x = x.reshape(-1, 1, 28, 28)
for n in range(6):
if n == 3:
x = x.reshape(-1, 4)
x = nn.Softmax(dim=1)(x)
x = x.reshape(num, 8, 8, 4, 1).permute(
0, 4, 3, 1, 2)
x = mask_x(x, eps_mask, train)
x = eval('self.layer' + str(n) + '(x)')
if n in [3, 4, 5]:
x = mask_x(x, eps_mask, train)
# print(x.sum(dim=1))
return x.squeeze()
def Paras_VL_CNN_BTN_Collected1chg3():
para = parameter_default()
para['TN'] = 'VL_CNN_BTN_Collected1chg3'
para['batch_size'] = 600
para['d'] = 4
para['chi'] = 24
para['feature_map'] = 'cos_sin'
para['normalize_tensors'] = 'norm2'
para['update_way'] = 'rotate'
para['mps_init'] = 'randn'
para['Lagrangian_way'] = 0
para['Lagrangian'] = None
para['check_time'] = 5
para['save_time'] = 1000
para['it_time'] = 1000
para['lr'] = [1e-4, 2e-2]
return para
class VL_CNN_BTN_Collected1chg3_BP(TTN_basic):
"""
train_acc test_acc
MNIST(d=4,chi=24)
f-MNIST(d=4,chi=24)
"""
def __init__(self, para_tn, tensors=None):
super(VL_CNN_BTN_Collected1chg3_BP, self).__init__(num_layers=6)
theta = 1
self.f_map = para_tn['feature_map']
add_bias = False
pre_process_tensors = 'square' # 'normalize', 'softmax', 'square'
self.layer0 = nn.Sequential(
nn.Conv2d(1, 8, kernel_size=3), # 26*26
nn.LayerNorm([8, 26, 26], eps=1e-05, elementwise_affine=True),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2) # 13*13
).to(device=para_tn['device'])
self.layer1 = nn.Sequential(
nn.Conv2d(8, 32, kernel_size=4), # 10*10
nn.LayerNorm([32, 10, 10], eps=1e-05, elementwise_affine=True),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2) # 5*5
).to(device=para_tn['device'])
self.layer2 = nn.Sequential(
nn.Conv2d(32, 64, kernel_size=2), # 4*4
nn.LayerNorm([64, 4, 4], eps=1e-05, elementwise_affine=True),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2) # 2*2
).to(device=para_tn['device'])
self.layer3 = TTN_Pool_2by2to1(
1, 1, 4, 4, para_tn['d'], para_tn['chi'],
para_tn['device'], ini_way=para_tn['mps_init'],
if_pre_proc_T=pre_process_tensors, add_bias=add_bias)
self.layer4 = TTN_Pool_2by2to1(
1, 1, 2, 2, para_tn['chi'], para_tn['chi'],
para_tn['device'], ini_way=para_tn['mps_init'],
if_pre_proc_T=pre_process_tensors, add_bias=add_bias)
self.layer5 = TTN_Pool_2by2to1(
1, 1, 1, 1, para_tn['chi'], para_tn['channel'],
para_tn['device'], ini_way=para_tn['mps_init'],
if_pre_proc_T=pre_process_tensors, add_bias=add_bias)
self.input_tensors(tensors)
def forward(self, x, train=True):
eps_mask = 0.01
num = x.shape[0]
x = x.reshape(-1, 1, 28, 28)
for n in range(6):
if n == 3:
x = x.reshape(-1, 4)
x = nn.Softmax(dim=1)(x)
x = x.reshape(num, 8, 8, 4, 1).permute(
0, 4, 3, 1, 2)
x = mask_x(x, eps_mask, train)
x = eval('self.layer' + str(n) + '(x)')
if n in [3, 4]:
x = mask_x(x, eps_mask, train)
# print(x.sum(dim=1))
return x.squeeze()
def Paras_VL_CNN_BTN_Collected1chg4():
para = parameter_default()
para['TN'] = 'VL_CNN_BTN_Collected1chg4'
para['batch_size'] = 600
para['d'] = 4
para['chi'] = 24
para['feature_map'] = 'cos_sin'
para['normalize_tensors'] = 'norm2'
para['update_way'] = 'rotate'
para['mps_init'] = 'randn'
para['Lagrangian_way'] = 0
para['Lagrangian'] = None
para['check_time'] = 5
para['save_time'] = 1000
para['it_time'] = 1000
para['lr'] = [1e-4, 2e-2]
return para
class VL_CNN_BTN_Collected1chg4_BP(TTN_basic):
"""
train_acc test_acc
MNIST(d=4,chi=24)
f-MNIST(d=4,chi=24)
"""
def __init__(self, para_tn, tensors=None):
super(VL_CNN_BTN_Collected1chg4_BP, self).__init__(num_layers=6)
theta = 1
self.f_map = para_tn['feature_map']
add_bias = False
pre_process_tensors = 'square' # 'normalize', 'softmax', 'square'
self.layer0 = nn.Sequential(
nn.Conv2d(1, 8, kernel_size=3), # 26*26
nn.LayerNorm([8, 26, 26], eps=1e-05, elementwise_affine=True),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2) # 13*13
).to(device=para_tn['device'])
self.layer1 = nn.Sequential(
nn.Conv2d(8, 32, kernel_size=4), # 10*10
nn.LayerNorm([32, 10, 10], eps=1e-05, elementwise_affine=True),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2) # 5*5
).to(device=para_tn['device'])
self.layer2 = nn.Sequential(
nn.Conv2d(32, 64, kernel_size=2), # 4*4
nn.LayerNorm([64, 4, 4], eps=1e-05, elementwise_affine=True),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2) # 2*2
).to(device=para_tn['device'])
self.layer3 = TTN_Pool_2by2to1(
1, 1, 4, 4, para_tn['d'], para_tn['chi'],
para_tn['device'], ini_way=para_tn['mps_init'],
if_pre_proc_T=pre_process_tensors, add_bias=add_bias)
self.layer4 = TTN_Pool_2by2to1(
1, 1, 2, 2, para_tn['chi'], para_tn['chi'],
para_tn['device'], ini_way=para_tn['mps_init'],
if_pre_proc_T=pre_process_tensors, add_bias=add_bias)
self.layer5 = TTN_Pool_2by2to1(
1, 1, 1, 1, para_tn['chi'], para_tn['channel'],
para_tn['device'], ini_way=para_tn['mps_init'],
if_pre_proc_T=pre_process_tensors, add_bias=add_bias)
self.input_tensors(tensors)
def forward(self, x, train=True):
eps_mask = 0.005
num = x.shape[0]
x = x.reshape(-1, 1, 28, 28)
for n in range(6):
if n == 3:
x = x.reshape(-1, 4)
x = nn.Softmax(dim=1)(x)
x = x.reshape(num, 8, 8, 4, 1).permute(
0, 4, 3, 1, 2)
x = mask_x(x, eps_mask, train)
x = eval('self.layer' + str(n) + '(x)')
if n in [3, 4]:
x = mask_x(x, eps_mask, train)
# print(x.sum(dim=1))
return x.squeeze()
def Paras_VL_CNN_BTN_Collected1chg5():
para = parameter_default()
para['TN'] = 'VL_CNN_BTN_Collected1chg5'
para['batch_size'] = 600
para['d'] = 4
para['chi'] = 24
para['feature_map'] = 'cos_sin'
para['normalize_tensors'] = 'norm2'
para['update_way'] = 'rotate'
para['mps_init'] = 'randn'
para['Lagrangian_way'] = 0
para['Lagrangian'] = None
para['check_time'] = 5
para['save_time'] = 1000
para['it_time'] = 1000
para['lr'] = [1e-4, 2e-2]
return para
class VL_CNN_BTN_Collected1chg5_BP(TTN_basic):
"""
train_acc test_acc
MNIST(d=4,chi=24)
f-MNIST(d=4,chi=24)
"""
def __init__(self, para_tn, tensors=None):
super(VL_CNN_BTN_Collected1chg5_BP, self).__init__(num_layers=6)
theta = 1
self.f_map = para_tn['feature_map']
add_bias = False
pre_process_tensors = 'square' # 'normalize', 'softmax', 'square'
self.layer0 = nn.Sequential(
nn.Conv2d(1, 8, kernel_size=3), # 26*26
nn.LayerNorm([8, 26, 26], eps=1e-05, elementwise_affine=True),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2) # 13*13
).to(device=para_tn['device'])
self.layer1 = nn.Sequential(
nn.Conv2d(8, 32, kernel_size=4), # 10*10
nn.LayerNorm([32, 10, 10], eps=1e-05, elementwise_affine=True),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2) # 5*5
).to(device=para_tn['device'])
self.layer2 = nn.Sequential(
nn.Conv2d(32, 64, kernel_size=2), # 4*4
nn.LayerNorm([64, 4, 4], eps=1e-05, elementwise_affine=True),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2) # 2*2
).to(device=para_tn['device'])
self.layer3 = TTN_Pool_2by2to1(
1, 1, 4, 4, para_tn['d'], para_tn['chi'],
para_tn['device'], ini_way=para_tn['mps_init'],
if_pre_proc_T=pre_process_tensors, add_bias=add_bias)
self.layer4 = TTN_Pool_2by2to1(
1, 1, 2, 2, para_tn['chi'], para_tn['chi'],
para_tn['device'], ini_way=para_tn['mps_init'],
if_pre_proc_T=pre_process_tensors, add_bias=add_bias)
self.layer5 = TTN_Pool_2by2to1(
1, 1, 1, 1, para_tn['chi'], para_tn['channel'],
para_tn['device'], ini_way=para_tn['mps_init'],
if_pre_proc_T=pre_process_tensors, add_bias=add_bias)
self.input_tensors(tensors)
def forward(self, x, train=True):
eps_mask = 0.02
num = x.shape[0]
x = x.reshape(-1, 1, 28, 28)
for n in range(6):
if n == 3:
x = x.reshape(-1, 4)
x = nn.Softmax(dim=1)(x)
x = x.reshape(num, 8, 8, 4, 1).permute(
0, 4, 3, 1, 2)
x = mask_x(x, eps_mask, train)
x = eval('self.layer' + str(n) + '(x)')
if n in [3, 4, 5]:
x = mask_x(x, eps_mask, train)
# print(x.sum(dim=1))
return x.squeeze()
# ==========================================================
def tn_multi_classifier_CNNBTN_mnist(para=None):
logger = bf.logger(para['log_name']+'.log', level='info')
log = logger.logger.info
t0 = time.time()
if para is None:
para = parameter_default()
para = make_para_consistent(para)
log('=' * 15)
log('Using device: ' + str(para['device']))
log('=' * 15)
bf.print_dict(para)
labels2mat = (para['loss_func'] == 'MSELoss')
if para['TN'] == 'MPS':
data_dim = 2
else:
data_dim = 5
train_loader, test_loader = bf.load_mnist_and_batch(
para['dataset'], para['classes'], para['num_samples'], None, para['batch_size'],
data_dim=data_dim, labels2mat=labels2mat, channel=len(para['classes']),
project_name=para['project'], dev=para['device'])
train_loader, train_num_tot = pre_process_dataset(
train_loader, para, para['device'])
test_loader, test_num_tot = pre_process_dataset(
test_loader, para, para['device'])
img = train_loader[7][0].reshape(train_loader[3][0].shape[0], -1)
img = img[3, :].reshape(28, 28)
matplotlib.pyplot.imshow(img.cpu())
matplotlib.pyplot.show()
input()
num_batch_train = len(train_loader)
log('Num of training samples:\t' + str(train_num_tot))
log('Num of testing samples:\t' + str(test_num_tot))
log('Num of training batches:\t' + str(num_batch_train))
log('Num of features:\t' + str(para['length']))
log('Dataset finish processed...')
loss_func = tc.nn.CrossEntropyLoss()
tn = eval(para['TN'] + '_BP(para)')
info = dict()
info['train_acc'] = list()
info['train_loss'] = list()
info['test_acc'] = list()
info['norm_coeff'] = list()
if para['normalize_tensors'] is not None:
tn.normalize_all_tensors(para['normalize_tensors'])
nc = test_accuracy_mnist(tn, test_loader, para)
log('Initially, we have test acc = ' + str(nc / test_num_tot))
parameters_cnn = nn.ParameterList()
parameters_btn = nn.ParameterList()
for x in tn.parameters():
if x.ndimension() in [7, 9]:
parameters_btn.append(x)
else:
parameters_cnn.append(x)
if parameters_cnn.__len__() > 0:
optimizer_cnn = tc.optim.Adam(parameters_cnn, lr=para['lr'][0])
if parameters_btn.__len__() > 0:
optimizer_btn = tc.optim.Adam(parameters_btn, lr=para['lr'][1])
log('Start training...')
log('[Note: data will be save at: ' + para['data_path'] + ']')
coeff_norm = 0
if para['if_test']:
titles = 'Epoch \t train_loss \t train_acc \t test_acc \t norm_coeff'
else:
titles = 'Epoch \t train_loss \t train_acc \t norm_coeff'
log(titles)
for t in range(para['it_time']):
t_loop = time.time()
train_loss = 0
nc = 0
if (num_batch_train > 1) and (t > 0):
train_loader = bf.re_batch_data_loader(train_loader)
for imgs, labels in train_loader:
imgs, labels = imgs.to(para['device']), labels.to(para['device'])
y = tn(imgs)
loss = loss_func(y, labels)
with tc.no_grad():
train_loss += loss.data.item()
loss.backward()
for x in tn.parameters():
if x.ndimension() in [7, 9]:
s = x.shape
# put grad in tangent space
inner = tc.einsum('ac,ac->a', x.data.view(-1, s[-1]),
x.grad.data.view(-1, s[-1]))
grad = x.grad.data.view(-1, s[-1]) - tc.einsum(
'a,ab->ab', inner, x.data.view(-1, s[-1]))
# normalize grad
norm = grad.norm(dim=1, p=2) + 1e-12
grad = tc.einsum('ab,a->ab', grad, 1 / norm)
# print(tc.einsum('ac,ac->a', grad, x.data.view(-1, s[-1])))
x.grad.data = grad.view(s)
if parameters_cnn.__len__() > 0:
optimizer_cnn.step()
optimizer_cnn.zero_grad()
if parameters_btn.__len__() > 0:
optimizer_btn.step()
optimizer_btn.zero_grad()
for x in tn.parameters():
if x.ndimension() in [7, 9]:
s = x.shape
x = x.view(-1, s[-1])
norm = x.data.norm(
dim=1, p=2)
x.data[:, :] = tc.einsum(
'ab,a->ab', x.data, 1 / norm)
x.data = x.data.view(s)
if ((t + 1) % para['check_time']) == 0:
nc0, _ = num_correct(labels, y.data)
nc += nc0
if ((t + 1) % para['check_time']) == 0:
info['train_acc'].append(nc / train_num_tot)
info['train_loss'].append(train_loss)
info['norm_coeff'].append(coeff_norm)
message = str(t + 1) + ': '
message += '\t %.6g' % info['train_loss'][-1]
message += '\t %.6g' % info['train_acc'][-1]
if para['if_test']:
nc = test_accuracy_mnist(
tn, test_loader, para)
info['test_acc'].append(nc / test_num_tot)
message += '\t %.6g' % info['test_acc'][-1]
message += '\t %.6g' % info['norm_coeff'][-1]
log(message)
if ((t+1) % para['save_time']) == 0:
if (train_loss == float('nan')) or (train_loss == float('inf')):
cprint('DO NOT save MPS since NAN/INF appears', color='red')
sys.exit(1)
else:
info['time_1loop'] = time.time() - t_loop
save_tensor_network(tn, para, info,
para['data_path'], para['data_exp'])
log('MPS saved: time cost per epoch = ' + str(info['time_1loop']))
log(titles)
x = np.arange(para['it_time'])
fig = figure()
plot(x, info['test_acc'])
savefig('../results/' + para['TN'] + '_test_acc.png')
info['time_tot'] = time.time() - t0
log('Total time cost = ' + str(info['time_tot']))
return para['data_path'], para['data_exp']
def parameter_default():
para = dict()
para['project'] = 'CNNBTNhybrid'
para['which_TN_set'] = 'tne' # 'tne' or 'ctnn'
para['TN'] = 'MPS'
para['dataset'] = 'fashion-mnist'
para['classes'] = list(range(10))
para['num_samples'] = ['all'] * para['classes'].__len__()
para['batch_size'] = 3000
para['binary_imgs'] = False
para['cut_size'] = [28, 28]
para['img_size'] = [28, 28]
# to feature cut-off; not usable yet
para['update_f_index'] = False
para['tol_cut_f'] = 1e-12
para['it_time'] = 200
para['lr'] = [1e-4, 1e-2]
para['d'] = 2
para['chi'] = 2
para['linear_gauss_noise'] = None
para['pre_normalize_mps'] = 1
para['normalize_mps'] = False
para['optimizer'] = 'Adam'
para['mps_init'] = 'No.1'
para['feature_map'] = 'taylor'
para['feature_theta'] = 1
para['activate_fun'] = None
para['activate_fun_final'] = None
para['Lagrangian'] = None
para['Lagrangian_way'] = 0
para['norm_p'] = 1
para['loss_func'] = 'CrossEntropyLoss' # MSELoss, CrossEntropyLoss, NLLLoss
para['check_time'] = 2
para['save_time'] = 20
para['if_test'] = True
para['if_load'] = True
para['if_load_smaller_chi'] = True
para['clear_history'] = False
para['normalize_tensors'] = None
para['update_way'] = 'bp'
para['multi_gpu_parallel'] = False
para['log_name'] = 'record'
para['device'] = 'cuda'
para = make_para_consistent(para)
return para
def make_para_consistent(para):
if 'TN' not in para:
para['TN'] = 'MPS'
if 'norm_p' not in para:
para['norm_p'] = 1
if 'binary_imgs' not in para:
para['binary_imgs'] = False
if para['TN'] != 'MPS':
para['normalize_mps'] = False
para['activate_fun'] = None
para['activate_fun_final'] = None
para['data_path'] = './'
if para['feature_map'] == 'fold_2d_order1':
para['img_size'] = [round(para['img_size'][0]/2),
round(para['img_size'][1]/2)]
if para['feature_map'].lower() in ['normalized_linear',
'relsig', 'tansig', 'vsigmoid']:
if para['d'] != 2:
bf.warning('Warning: Inconsistent para[\'d\']=%g to '
'feature map. Please check...' % para['d'])
para['d'] = 2
if para['feature_map'].lower() == 'reltansig':
if para['d'] != 3:
bf.warning('Warning: Inconsistent para[\'d\']=%g to '
'feature map. Please check...' % para['d'])
para['d'] = 3
para['length'] = para['img_size'][0] * para['img_size'][1]
if 'feature_index' not in para:
para['feature_index'] = None
elif para['feature_index'] is not None:
if len(para['feature_index']) > para['length']:
bf.warning('Error: length > len(feature_index).')
sys.exit(1)
elif max(para['feature_index']) > (para['length'] - 1):
bf.warning('Error: feature_index.max() > len(feature_index).')
sys.exit(1)
else:
para['length'] = len(para['feature_index'])
para['channel'] = len(para['classes'])
para['data_exp'] = data_exp_to_save_mps(para)
if (para['device'] != 'cpu') and (not tc.cuda.is_available()):
para['device'] = 'cpu'
bf.warning('Cuda is not available in the device...')
bf.warning('Changed to \'cpu\' instead...')
return para
def data_exp_to_save_mps(para):
exp = para['TN'].upper() + '_L' + str(para['length']) + '_d' + str(para['d']) + '_chi' + \
str(para['chi']) + '_classes' + str(para['classes']) + '_' + \
para['feature_map'] + '_' + para['dataset'].upper()
if para['dataset'].lower() in ['mnist', 'fashion-mnist', 'fashionmnist']:
if (para['cut_size'][0] != 28) or (para['cut_size'][1] != 28):
exp += ('_size' + str(para['cut_size']))
if (para['img_size'][0] != 28) or (para['img_size'][1] != 28):
exp += str(para['img_size'])
elif para['dataset'].lower() in ['cifar10', 'cifar-10']:
if (para['cut_size'][0] != 32) or (para['cut_size'][1] != 32):
exp += ('_size' + str(para['cut_size']))
if (para['img_size'][0] != 32) or (para['img_size'][1] != 32):
exp += str(para['img_size'])
if 'feature_index' in para:
if para['feature_index'] is not None:
exp += '_FindexedNum' + str(len(para['feature_index']))
if para['binary_imgs']:
exp += 'binary'
return exp
def load_saved_tn_smaller_chi_d(para, path1=None):
if para['if_load']:
path = './data/' + para['TN'] + '/'
exp = data_exp_to_save_mps(para)
mps_file = os.path.join(path, exp)
if os.path.isfile(mps_file):
message = 'Load existing ' + para['TN'] + ' data...'
mps, info, _ = load_tensor_network(mps_file, para)
return mps, info, message
elif para['if_load_smaller_chi']:
if path1 is None:
path1 = './data/' + para['TN'] + '_saved/'
chi0 = para['chi']
d0 = para['d']
for d in range(d0, 1, -1):
for chi in range(chi0, 1, -1):
para['d'] = d
para['chi'] = chi
exp = data_exp_to_save_mps(para)
mps_file = os.path.join(path1, exp)
if os.path.isfile(mps_file):
message = 'Load existing ' + para['TN'] + ' with (d, chi) = ' + \
str((para['d'], para['chi']))
para['chi'], para['d'] = chi0, d0
mps, info, _ = load_tensor_network(
mps_file, para)
return mps, info, message
message = 'No existing smaller-chi/d ' + \
para['TN'] + ' found...\n ' \
'Create new ' + para['TN'] + ' data ...'
para['chi'], para['d'] = chi0, d0
return None, None, message
else:
message = 'No existing ' + para['TN'] + ' found...\n Create new ' + \
para['TN'] + ' data ...'
return None, None, message
else:
return None, None, 'Create new ' + para['TN'] + ' data ...'
def mask_x(x, eps_mask, train):
if train:
mask = (x.data > eps_mask)
x = x * mask + 1e-12
s = x.shape
norm = x.data.permute(0, 1, 3, 4, 2).reshape(-1, s[2]).norm(dim=1)
norm = norm.reshape(s[0], s[3], s[4])
x = tc.einsum('ncdxy,nxy->ncdxy', x, 1 / norm)
return x
# ==========================================================================================
# Collected hybrid models
def Paras_VL_CNN_BTN_Collected1():
para = parameter_default()
para['TN'] = 'VL_CNN_BTN_Collected1'
para['batch_size'] = 600
para['d'] = 4
para['chi'] = 24
para['feature_map'] = 'cos_sin'
para['normalize_tensors'] = 'norm2'
para['update_way'] = 'rotate'
para['mps_init'] = 'randn'
para['Lagrangian_way'] = 0
para['Lagrangian'] = None
para['check_time'] = 5
para['save_time'] = 1000
para['it_time'] = 1000
para['lr'] = [1e-4, 2e-2]
return para
class VL_CNN_BTN_Collected1_BP(TTN_basic):
"""
train_acc test_acc
MNIST(d=4,chi=24) 0.999633 0.9887
f-MNIST(d=4,chi=24) 0.971017 0.8966
f-MNIST(d=4,chi=14) 0.971883 0.8887
"""
def __init__(self, para_tn, tensors=None):
super(VL_CNN_BTN_Collected1_BP, self).__init__(num_layers=6)
theta = 1
self.f_map = para_tn['feature_map']
add_bias = False
pre_process_tensors = 'square' # 'normalize', 'softmax', 'square'
self.layer0 = nn.Sequential(
nn.Conv2d(1, 8, kernel_size=3), # 26*26
nn.LayerNorm([8, 26, 26], eps=1e-05, elementwise_affine=True),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2) # 13*13
).to(device=para_tn['device'])
self.layer1 = nn.Sequential(
nn.Conv2d(8, 32, kernel_size=4), # 10*10
nn.LayerNorm([32, 10, 10], eps=1e-05, elementwise_affine=True),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2) # 5*5
).to(device=para_tn['device'])
self.layer2 = nn.Sequential(
nn.Conv2d(32, 64, kernel_size=2), # 4*4
nn.LayerNorm([64, 4, 4], eps=1e-05, elementwise_affine=True),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2) # 2*2
).to(device=para_tn['device'])
self.layer3 = TTN_Pool_2by2to1(
1, 1, 4, 4, para_tn['d'], para_tn['chi'],
para_tn['device'], ini_way=para_tn['mps_init'],
if_pre_proc_T=pre_process_tensors, add_bias=add_bias)
self.layer4 = TTN_Pool_2by2to1(
1, 1, 2, 2, para_tn['chi'], para_tn['chi'],
para_tn['device'], ini_way=para_tn['mps_init'],
if_pre_proc_T=pre_process_tensors, add_bias=add_bias)
self.layer5 = TTN_Pool_2by2to1(
1, 1, 1, 1, para_tn['chi'], para_tn['channel'],
para_tn['device'], ini_way=para_tn['mps_init'],
if_pre_proc_T=pre_process_tensors, add_bias=add_bias)
self.input_tensors(tensors)
def forward(self, x):
num = x.shape[0]
x = x.reshape(-1, 1, 28, 28)
for n in range(6):
if n == 3:
x = x.reshape(-1, 4)
x = nn.Softmax(dim=1)(x)
x = x.reshape(num, 8, 8, 4, 1).permute(
0, 4, 3, 1, 2)
x = eval('self.layer' + str(n) + '(x)')
x = x.squeeze()
# print(x.sum(dim=1))
return x
def Paras_VL_CNN_BTN_Collected2():
para = parameter_default()
para['TN'] = 'CNN_BTN_Collected2'
para['batch_size'] = 600
para['d'] = 4
para['chi'] = 24
para['feature_map'] = 'cos_sin'
para['normalize_tensors'] = 'norm2'
para['update_way'] = 'rotate'
para['mps_init'] = 'randn'
para['Lagrangian_way'] = 0
para['Lagrangian'] = None
para['check_time'] = 5
para['save_time'] = 1000
para['it_time'] = 1000
para['lr'] = [1e-4, 2e-2]
return para
class VL_CNN_BTN_Collected2_BP(TTN_basic):
"""
train_acc test_acc
MNIST
f-MNIST(d=4, chi=24) 0.971217 0.8858
"""
def __init__(self, para_tn, tensors=None):
super(VL_CNN_BTN_Collected2_BP, self).__init__(num_layers=6)
theta = 1
self.f_map = para_tn['feature_map']
add_bias = False
pre_process_tensors = 'square' # 'normalize', 'softmax', 'square'
self.layer0 = nn.Sequential(
nn.Conv2d(1, 4, kernel_size=3), # 26*26
nn.LayerNorm([4, 26, 26], eps=1e-05, elementwise_affine=True),
nn.ReLU(inplace=True),
nn.Conv2d(4, 8, kernel_size=3), # 24*24
nn.LayerNorm([8, 24, 24], eps=1e-05, elementwise_affine=True),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2) # 12*12
).to(device=para_tn['device'])
self.layer1 = nn.Sequential(
nn.Conv2d(8, 32, kernel_size=3), # 10*10
nn.LayerNorm([32, 10, 10], eps=1e-05, elementwise_affine=True),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2) # 5*5
).to(device=para_tn['device'])
self.layer2 = nn.Sequential(
nn.Conv2d(32, 64, kernel_size=2), # 4*4
nn.LayerNorm([64, 4, 4], eps=1e-05, elementwise_affine=True),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2) # 2*2
).to(device=para_tn['device'])
self.layer3 = TTN_Pool_2xto1(
1, 1, 32, 1, para_tn['d'], para_tn['chi'],
para_tn['device'], ini_way=para_tn['mps_init'],
if_pre_proc_T=pre_process_tensors, add_bias=add_bias)
self.layer4 = TTN_Pool_2xto1(
1, 1, 16, 1, para_tn['chi'], para_tn['chi'],
para_tn['device'], ini_way=para_tn['mps_init'],
if_pre_proc_T=pre_process_tensors, add_bias=add_bias)
self.layer5 = TTN_Pool_2xto1(
1, 1, 8, 1, para_tn['chi'], para_tn['chi'],
para_tn['device'], ini_way=para_tn['mps_init'],
if_pre_proc_T=pre_process_tensors, add_bias=add_bias)
self.layer6 = TTN_Pool_2xto1(
1, 1, 4, 1, para_tn['chi'], para_tn['chi'],
para_tn['device'], ini_way=para_tn['mps_init'],
if_pre_proc_T=pre_process_tensors, add_bias=add_bias)
self.layer7 = TTN_Pool_2xto1(
1, 1, 2, 1, para_tn['chi'], para_tn['chi'],
para_tn['device'], ini_way=para_tn['mps_init'],
if_pre_proc_T=pre_process_tensors, add_bias=add_bias)
self.layer8 = TTN_Pool_2xto1(
1, 1, 1, 1, para_tn['chi'], para_tn['channel'],
para_tn['device'], ini_way=para_tn['mps_init'],
if_pre_proc_T=pre_process_tensors, add_bias=add_bias)
self.input_tensors(tensors)
def forward(self, x):
num = x.shape[0]
x = x.reshape(-1, 1, 28, 28)
for n in range(9):
if n == 3:
x = x.reshape(-1, 4)
x = nn.Softmax(dim=1)(x)
x = x.reshape(num, 64, 1, 4, 1).permute(
0, 4, 3, 1, 2)
x = eval('self.layer' + str(n) + '(x)')
x = x.squeeze()
# print(x.sum(dim=1))
return x
def Paras_VL_CNN_BTN_Collected3():
para = parameter_default()
para['TN'] = 'VL_CNN_BTN_Collected3'
para['batch_size'] = 400
para['d'] = 4
para['chi'] = 24
para['feature_map'] = 'cos_sin'
para['normalize_tensors'] = 'norm2'
para['update_way'] = 'rotate'
para['mps_init'] = 'randn'
para['Lagrangian_way'] = 0
para['Lagrangian'] = None
para['check_time'] = 5
para['save_time'] = 1000
para['it_time'] = 1000
para['lr'] = [1e-4, 2e-2]
return para
class VL_CNN_BTN_Collected3_BP(TTN_basic):
"""
train_acc test_acc
MNIST
f-MNIST(d=4, chi=24) 0.9768 0.8862
"""
def __init__(self, para_tn, tensors=None):
super(VL_CNN_BTN_Collected3_BP, self).__init__(num_layers=6)
theta = 1
self.f_map = para_tn['feature_map']
add_bias = False
pre_process_tensors = 'square' # 'normalize', 'softmax', 'square'
self.layer0 = nn.Sequential(
nn.Conv2d(1, 8, kernel_size=3), # 26*26
nn.LayerNorm([8, 26, 26], eps=1e-05, elementwise_affine=True),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2) # 13*13
).to(device=para_tn['device'])
self.layer1 = nn.Sequential(
nn.Conv2d(8, 32, kernel_size=4), # 10*10
nn.LayerNorm([32, 10, 10], eps=1e-05, elementwise_affine=True),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2) # 5*5
).to(device=para_tn['device'])
self.layer2 = nn.Sequential(
nn.Conv2d(32, 64, kernel_size=2), # 4*4
nn.LayerNorm([64, 4, 4], eps=1e-05, elementwise_affine=True),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2) # 2*2
).to(device=para_tn['device'])
self.layer3 = TTN_PoolTI_2by2to1(
1, 1, para_tn['d'], para_tn['chi'],
para_tn['device'], ini_way=para_tn['mps_init'],
if_pre_proc_T=pre_process_tensors, add_bias=add_bias)
self.layer4 = TTN_Pool_2by2to1(
1, 1, 2, 2, para_tn['chi'], para_tn['chi'],
para_tn['device'], ini_way=para_tn['mps_init'],
if_pre_proc_T=pre_process_tensors, add_bias=add_bias)
self.layer5 = TTN_Pool_2by2to1(
1, 1, 1, 1, para_tn['chi'], para_tn['channel'],
para_tn['device'], ini_way=para_tn['mps_init'],
if_pre_proc_T=pre_process_tensors, add_bias=add_bias)
self.input_tensors(tensors)
def forward(self, x):
num = x.shape[0]
x = x.reshape(-1, 1, 28, 28)
for n in range(6):
if n == 3:
x = x.reshape(-1, 4)
x = nn.Softmax(dim=1)(x)
x = x.reshape(num, 8, 8, 4, 1).permute(
0, 4, 3, 1, 2)
x = eval('self.layer' + str(n) + '(x)')
x = x.squeeze()
# print(x.sum(dim=1))
return x
def Paras_VL_CNN():
para = parameter_default()
para['TN'] = 'VL_CNN'
para['batch_size'] = 600
para['normalize_tensors'] = 'norm2'
para['mps_init'] = 'randn'
para['check_time'] = 5
para['save_time'] = 1000
para['it_time'] = 1000
para['lr'] = [1e-4, 2e-2]
return para
class VL_CNN_BP(TTN_basic):
"""
train_acc test_acc
MNIST
f-MNIST 0.962283 0.8917
"""
def __init__(self, para_tn):
super(VL_CNN_BP, self).__init__(num_layers=6)
self.layer0 = nn.Sequential(
nn.Conv2d(1, 8, kernel_size=3), # 26*26
nn.LayerNorm([8, 26, 26], eps=1e-05, elementwise_affine=True),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2) # 13*13
).to(device=para_tn['device'])
self.layer1 = nn.Sequential(
nn.Conv2d(8, 32, kernel_size=4), # 10*10
nn.LayerNorm([32, 10, 10], eps=1e-05, elementwise_affine=True),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2) # 5*5
).to(device=para_tn['device'])
self.layer2 = nn.Sequential(
nn.Conv2d(32, 64, kernel_size=2), # 4*4
nn.LayerNorm([64, 4, 4], eps=1e-05, elementwise_affine=True),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2) # 2*2
).to(device=para_tn['device'])
self.layer3 = nn.Sequential(
nn.Linear(64*4, 64),
nn.ReLU(inplace=True),
nn.Linear(64, para_tn['channel']),
nn.Sigmoid()
).to(device=para_tn['device'])
def forward(self, x):
num = x.shape[0]
x = x.reshape(-1, 1, 28, 28)
for n in range(4):
if n == 3:
x = x.reshape(num, -1)
x = eval('self.layer' + str(n) + '(x)')
x = x.squeeze()
# print(x.sum(dim=1))
return x
|
py | 1a309747944874819deadeeae08a916c713b861e | # exported from PySB model 'model'
from pysb import Model, Monomer, Parameter, Expression, Compartment, Rule, Observable, Initial, MatchOnce, Annotation, ANY, WILD
Model()
Monomer('Ligand', ['Receptor'])
Monomer('ParpU', ['C3A'])
Monomer('C8A', ['BidU', 'C3pro'])
Monomer('SmacM', ['BaxA'])
Monomer('BaxM', ['BidM', 'BaxA'])
Monomer('Apop', ['C3pro', 'Xiap'])
Monomer('Fadd', ['Receptor', 'C8pro'])
Monomer('SmacC', ['Xiap'])
Monomer('ParpC')
Monomer('Xiap', ['SmacC', 'Apop', 'C3A'])
Monomer('C9')
Monomer('C3ub')
Monomer('C8pro', ['Fadd', 'C6A'])
Monomer('Bcl2', ['BidM', 'BaxA'])
Monomer('C3pro', ['Apop', 'C8A'])
Monomer('CytoCM', ['BaxA'])
Monomer('CytoCC')
Monomer('BaxA', ['BaxM', 'Bcl2', 'BaxA_1', 'BaxA_2', 'SmacM', 'CytoCM'])
Monomer('ApafI')
Monomer('BidU', ['C8A'])
Monomer('BidT')
Monomer('C3A', ['Xiap', 'ParpU', 'C6pro'])
Monomer('ApafA')
Monomer('BidM', ['BaxM', 'Bcl2'])
Monomer('Receptor', ['Ligand', 'Fadd'])
Monomer('C6A', ['C8pro'])
Monomer('C6pro', ['C3A'])
Parameter('bind_0_Ligand_binder_Receptor_binder_target_2kf', 1.0)
Parameter('bind_0_Ligand_binder_Receptor_binder_target_1kr', 1.0)
Parameter('bind_0_Receptor_binder_Fadd_binder_target_2kf', 1.0)
Parameter('bind_0_Receptor_binder_Fadd_binder_target_1kr', 1.0)
Parameter('substrate_binding_0_Fadd_catalyzer_C8pro_substrate_2kf', 1.0)
Parameter('substrate_binding_0_Fadd_catalyzer_C8pro_substrate_1kr', 1.0)
Parameter('catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product_1kc', 1.0)
Parameter('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_2kf', 1.0)
Parameter('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_1kr', 1.0)
Parameter('catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product_1kc', 1.0)
Parameter('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_2kf', 1.0)
Parameter('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_1kr', 1.0)
Parameter('inhibition_0_SmacC_inhibitor_Xiap_inh_target_2kf', 1.0)
Parameter('inhibition_0_SmacC_inhibitor_Xiap_inh_target_1kr', 1.0)
Parameter('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_2kf', 1.0)
Parameter('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_1kr', 1.0)
Parameter('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_2kf', 1.0)
Parameter('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_1kr', 1.0)
Parameter('catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product_1kc', 1.0)
Parameter('inhibition_0_Xiap_inhibitor_Apop_inh_target_2kf', 1.0)
Parameter('inhibition_0_Xiap_inhibitor_Apop_inh_target_1kr', 1.0)
Parameter('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_2kf', 1.0)
Parameter('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_1kr', 1.0)
Parameter('catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product_1kc', 1.0)
Parameter('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_2kf', 1.0)
Parameter('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_1kr', 1.0)
Parameter('catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product_1kc', 1.0)
Parameter('equilibration_0_BidT_equil_a_BidM_equil_b_1kf', 1.0)
Parameter('equilibration_0_BidT_equil_a_BidM_equil_b_1kr', 1.0)
Parameter('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_2kf', 1.0)
Parameter('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_1kr', 1.0)
Parameter('catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product_1kc', 1.0)
Parameter('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_2kf', 1.0)
Parameter('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_1kr', 1.0)
Parameter('self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate_1kc', 1.0)
Parameter('inhibition_0_Bcl2_inhibitor_BidM_inh_target_2kf', 1.0)
Parameter('inhibition_0_Bcl2_inhibitor_BidM_inh_target_1kr', 1.0)
Parameter('inhibition_0_Bcl2_inhibitor_BaxA_inh_target_2kf', 1.0)
Parameter('inhibition_0_Bcl2_inhibitor_BaxA_inh_target_1kr', 1.0)
Parameter('pore_formation_0_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_0_BaxA_pore_1kr', 1.0)
Parameter('pore_formation_1_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_1_BaxA_pore_1kr', 1.0)
Parameter('pore_formation_2_BaxA_pore_2kf', 1.0)
Parameter('pore_formation_2_BaxA_pore_1kr', 1.0)
Parameter('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_2kf', 1.0)
Parameter('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kr', 1.0)
Parameter('transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kc', 1.0)
Parameter('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_2kf', 1.0)
Parameter('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kr', 1.0)
Parameter('transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kc', 1.0)
Parameter('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_2kf', 1.0)
Parameter('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_1kr', 1.0)
Parameter('catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product_1kc', 1.0)
Parameter('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_2kf', 1.0)
Parameter('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_1kr', 1.0)
Parameter('catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product_1kc', 1.0)
Parameter('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_2kf', 1.0)
Parameter('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_1kr', 1.0)
Parameter('catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product_1kc', 1.0)
Parameter('Ligand_0', 1000.0)
Parameter('ParpU_0', 1000000.0)
Parameter('C8A_0', 0.0)
Parameter('SmacM_0', 100000.0)
Parameter('BaxM_0', 40000.0)
Parameter('Apop_0', 0.0)
Parameter('Fadd_0', 130000.0)
Parameter('SmacC_0', 0.0)
Parameter('ParpC_0', 0.0)
Parameter('Xiap_0', 82500.0)
Parameter('C9_0', 100000.0)
Parameter('C3ub_0', 0.0)
Parameter('C8pro_0', 130000.0)
Parameter('Bcl2_0', 328000.0)
Parameter('C3pro_0', 21000.0)
Parameter('CytoCM_0', 500000.0)
Parameter('CytoCC_0', 0.0)
Parameter('BaxA_0', 0.0)
Parameter('ApafI_0', 100000.0)
Parameter('BidU_0', 171000.0)
Parameter('BidT_0', 0.0)
Parameter('C3A_0', 0.0)
Parameter('ApafA_0', 0.0)
Parameter('BidM_0', 0.0)
Parameter('Receptor_0', 100.0)
Parameter('C6A_0', 0.0)
Parameter('C6pro_0', 100.0)
Observable('Ligand_obs', Ligand())
Observable('ParpU_obs', ParpU())
Observable('C8A_obs', C8A())
Observable('SmacM_obs', SmacM())
Observable('BaxM_obs', BaxM())
Observable('Apop_obs', Apop())
Observable('Fadd_obs', Fadd())
Observable('SmacC_obs', SmacC())
Observable('ParpC_obs', ParpC())
Observable('Xiap_obs', Xiap())
Observable('C9_obs', C9())
Observable('C3ub_obs', C3ub())
Observable('C8pro_obs', C8pro())
Observable('Bcl2_obs', Bcl2())
Observable('C3pro_obs', C3pro())
Observable('CytoCM_obs', CytoCM())
Observable('CytoCC_obs', CytoCC())
Observable('BaxA_obs', BaxA())
Observable('ApafI_obs', ApafI())
Observable('BidU_obs', BidU())
Observable('BidT_obs', BidT())
Observable('C3A_obs', C3A())
Observable('ApafA_obs', ApafA())
Observable('BidM_obs', BidM())
Observable('Receptor_obs', Receptor())
Observable('C6A_obs', C6A())
Observable('C6pro_obs', C6pro())
Rule('bind_0_Ligand_binder_Receptor_binder_target', Ligand(Receptor=None) + Receptor(Ligand=None, Fadd=None) | Ligand(Receptor=1) % Receptor(Ligand=1, Fadd=None), bind_0_Ligand_binder_Receptor_binder_target_2kf, bind_0_Ligand_binder_Receptor_binder_target_1kr)
Rule('bind_0_Receptor_binder_Fadd_binder_target', Receptor(Ligand=ANY, Fadd=None) + Fadd(Receptor=None, C8pro=None) | Receptor(Ligand=ANY, Fadd=1) % Fadd(Receptor=1, C8pro=None), bind_0_Receptor_binder_Fadd_binder_target_2kf, bind_0_Receptor_binder_Fadd_binder_target_1kr)
Rule('substrate_binding_0_Fadd_catalyzer_C8pro_substrate', Fadd(Receptor=ANY, C8pro=None) + C8pro(Fadd=None, C6A=None) | Fadd(Receptor=ANY, C8pro=1) % C8pro(Fadd=1, C6A=None), substrate_binding_0_Fadd_catalyzer_C8pro_substrate_2kf, substrate_binding_0_Fadd_catalyzer_C8pro_substrate_1kr)
Rule('catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product', Fadd(Receptor=ANY, C8pro=1) % C8pro(Fadd=1, C6A=None) >> Fadd(Receptor=ANY, C8pro=None) + C8A(BidU=None, C3pro=None), catalytic_step_0_Fadd_catalyzer_C8pro_substrate_C8A_product_1kc)
Rule('catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product', C8A(BidU=None, C3pro=None) + BidU(C8A=None) | C8A(BidU=1, C3pro=None) % BidU(C8A=1), catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_2kf, catalysis_0_C8A_catalyzer_BidU_substrate_BidT_product_1kr)
Rule('catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product', C8A(BidU=1, C3pro=None) % BidU(C8A=1) >> C8A(BidU=None, C3pro=None) + BidT(), catalysis_1_C8A_catalyzer_BidU_substrate_BidT_product_1kc)
Rule('conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex', ApafI() + CytoCC() | ApafA(), conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_2kf, conversion_0_CytoCC_subunit_d_ApafI_subunit_c_ApafA_complex_1kr)
Rule('inhibition_0_SmacC_inhibitor_Xiap_inh_target', SmacC(Xiap=None) + Xiap(SmacC=None, Apop=None, C3A=None) | SmacC(Xiap=1) % Xiap(SmacC=1, Apop=None, C3A=None), inhibition_0_SmacC_inhibitor_Xiap_inh_target_2kf, inhibition_0_SmacC_inhibitor_Xiap_inh_target_1kr)
Rule('conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex', ApafA() + C9() | Apop(C3pro=None, Xiap=None), conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_2kf, conversion_0_C9_subunit_d_ApafA_subunit_c_Apop_complex_1kr)
Rule('catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product', Apop(C3pro=None, Xiap=None) + C3pro(Apop=None, C8A=None) | Apop(C3pro=1, Xiap=None) % C3pro(Apop=1, C8A=None), catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_2kf, catalysis_0_Apop_catalyzer_C3pro_substrate_C3A_product_1kr)
Rule('catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product', Apop(C3pro=1, Xiap=None) % C3pro(Apop=1, C8A=None) >> Apop(C3pro=None, Xiap=None) + C3A(Xiap=None, ParpU=None, C6pro=None), catalysis_1_Apop_catalyzer_C3pro_substrate_C3A_product_1kc)
Rule('inhibition_0_Xiap_inhibitor_Apop_inh_target', Xiap(SmacC=None, Apop=None, C3A=None) + Apop(C3pro=None, Xiap=None) | Xiap(SmacC=None, Apop=1, C3A=None) % Apop(C3pro=None, Xiap=1), inhibition_0_Xiap_inhibitor_Apop_inh_target_2kf, inhibition_0_Xiap_inhibitor_Apop_inh_target_1kr)
Rule('catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product', Xiap(SmacC=None, Apop=None, C3A=None) + C3A(Xiap=None, ParpU=None, C6pro=None) | Xiap(SmacC=None, Apop=None, C3A=1) % C3A(Xiap=1, ParpU=None, C6pro=None), catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_2kf, catalysis_0_Xiap_catalyzer_C3A_substrate_C3ub_product_1kr)
Rule('catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product', Xiap(SmacC=None, Apop=None, C3A=1) % C3A(Xiap=1, ParpU=None, C6pro=None) >> Xiap(SmacC=None, Apop=None, C3A=None) + C3ub(), catalysis_1_Xiap_catalyzer_C3A_substrate_C3ub_product_1kc)
Rule('catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product', C3A(Xiap=None, ParpU=None, C6pro=None) + ParpU(C3A=None) | C3A(Xiap=None, ParpU=1, C6pro=None) % ParpU(C3A=1), catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_2kf, catalysis_0_C3A_catalyzer_ParpU_substrate_ParpC_product_1kr)
Rule('catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product', C3A(Xiap=None, ParpU=1, C6pro=None) % ParpU(C3A=1) >> C3A(Xiap=None, ParpU=None, C6pro=None) + ParpC(), catalysis_1_C3A_catalyzer_ParpU_substrate_ParpC_product_1kc)
Rule('equilibration_0_BidT_equil_a_BidM_equil_b', BidT() | BidM(BaxM=None, Bcl2=None), equilibration_0_BidT_equil_a_BidM_equil_b_1kf, equilibration_0_BidT_equil_a_BidM_equil_b_1kr)
Rule('catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product', BidM(BaxM=None, Bcl2=None) + BaxM(BidM=None, BaxA=None) | BidM(BaxM=1, Bcl2=None) % BaxM(BidM=1, BaxA=None), catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_2kf, catalysis_0_BidM_catalyzer_BaxM_substrate_BaxA_product_1kr)
Rule('catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product', BidM(BaxM=1, Bcl2=None) % BaxM(BidM=1, BaxA=None) >> BidM(BaxM=None, Bcl2=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), catalysis_1_BidM_catalyzer_BaxM_substrate_BaxA_product_1kc)
Rule('self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate', BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxM(BidM=None, BaxA=None) | BaxA(BaxM=1, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) % BaxM(BidM=None, BaxA=1), self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_2kf, self_catalyze_0_BaxA_self_catalyzer_BaxM_self_substrate_1kr)
Rule('self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate', BaxA(BaxM=1, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) % BaxM(BidM=None, BaxA=1) >> BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), self_catalyze_1_BaxA_self_catalyzer_BaxM_self_substrate_1kc)
Rule('inhibition_0_Bcl2_inhibitor_BidM_inh_target', Bcl2(BidM=None, BaxA=None) + BidM(BaxM=None, Bcl2=None) | Bcl2(BidM=1, BaxA=None) % BidM(BaxM=None, Bcl2=1), inhibition_0_Bcl2_inhibitor_BidM_inh_target_2kf, inhibition_0_Bcl2_inhibitor_BidM_inh_target_1kr)
Rule('inhibition_0_Bcl2_inhibitor_BaxA_inh_target', Bcl2(BidM=None, BaxA=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) | Bcl2(BidM=None, BaxA=1) % BaxA(BaxM=None, Bcl2=1, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), inhibition_0_Bcl2_inhibitor_BaxA_inh_target_2kf, inhibition_0_Bcl2_inhibitor_BaxA_inh_target_1kr)
Rule('pore_formation_0_BaxA_pore', BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) | BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=None, SmacM=None, CytoCM=None), pore_formation_0_BaxA_pore_2kf, pore_formation_0_BaxA_pore_1kr)
Rule('pore_formation_1_BaxA_pore', BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=None, SmacM=None, CytoCM=None) | BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None), pore_formation_1_BaxA_pore_2kf, pore_formation_1_BaxA_pore_1kr)
Rule('pore_formation_2_BaxA_pore', BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None) + BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) | BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None), pore_formation_2_BaxA_pore_2kf, pore_formation_2_BaxA_pore_1kr)
Rule('transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C', BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + SmacM(BaxA=None) | BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=5, CytoCM=None) % SmacM(BaxA=5), transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_2kf, transport_0_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kr)
Rule('transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C', BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=5, CytoCM=None) % SmacM(BaxA=5) >> BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + SmacC(Xiap=None), transport_1_BaxA_pore_SmacM_cargo_M_SmacC_cargo_C_1kc)
Rule('transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C', BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + CytoCM(BaxA=None) | BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=5) % CytoCM(BaxA=5), transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_2kf, transport_0_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kr)
Rule('transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C', BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=5) % CytoCM(BaxA=5) >> BaxA(BaxM=None, Bcl2=None, BaxA_1=4, BaxA_2=1, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=1, BaxA_2=2, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=2, BaxA_2=3, SmacM=None, CytoCM=None) % BaxA(BaxM=None, Bcl2=None, BaxA_1=3, BaxA_2=4, SmacM=None, CytoCM=None) + CytoCC(), transport_1_BaxA_pore_CytoCM_cargo_M_CytoCC_cargo_C_1kc)
Rule('catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product', C8A(BidU=None, C3pro=None) + C3pro(Apop=None, C8A=None) | C8A(BidU=None, C3pro=1) % C3pro(Apop=None, C8A=1), catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_2kf, catalysis_0_C8A_catalyzer_C3pro_substrate_C3A_product_1kr)
Rule('catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product', C8A(BidU=None, C3pro=1) % C3pro(Apop=None, C8A=1) >> C8A(BidU=None, C3pro=None) + C3A(Xiap=None, ParpU=None, C6pro=None), catalysis_1_C8A_catalyzer_C3pro_substrate_C3A_product_1kc)
Rule('catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product', C3A(Xiap=None, ParpU=None, C6pro=None) + C6pro(C3A=None) | C3A(Xiap=None, ParpU=None, C6pro=1) % C6pro(C3A=1), catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_2kf, catalysis_0_C3A_catalyzer_C6pro_substrate_C6A_product_1kr)
Rule('catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product', C3A(Xiap=None, ParpU=None, C6pro=1) % C6pro(C3A=1) >> C3A(Xiap=None, ParpU=None, C6pro=None) + C6A(C8pro=None), catalysis_1_C3A_catalyzer_C6pro_substrate_C6A_product_1kc)
Rule('catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product', C6A(C8pro=None) + C8pro(Fadd=None, C6A=None) | C6A(C8pro=1) % C8pro(Fadd=None, C6A=1), catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_2kf, catalysis_0_C6A_catalyzer_C8pro_substrate_C8A_product_1kr)
Rule('catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product', C6A(C8pro=1) % C8pro(Fadd=None, C6A=1) >> C6A(C8pro=None) + C8A(BidU=None, C3pro=None), catalysis_1_C6A_catalyzer_C8pro_substrate_C8A_product_1kc)
Initial(Ligand(Receptor=None), Ligand_0)
Initial(ParpU(C3A=None), ParpU_0)
Initial(C8A(BidU=None, C3pro=None), C8A_0)
Initial(SmacM(BaxA=None), SmacM_0)
Initial(BaxM(BidM=None, BaxA=None), BaxM_0)
Initial(Apop(C3pro=None, Xiap=None), Apop_0)
Initial(Fadd(Receptor=None, C8pro=None), Fadd_0)
Initial(SmacC(Xiap=None), SmacC_0)
Initial(ParpC(), ParpC_0)
Initial(Xiap(SmacC=None, Apop=None, C3A=None), Xiap_0)
Initial(C9(), C9_0)
Initial(C3ub(), C3ub_0)
Initial(C8pro(Fadd=None, C6A=None), C8pro_0)
Initial(Bcl2(BidM=None, BaxA=None), Bcl2_0)
Initial(C3pro(Apop=None, C8A=None), C3pro_0)
Initial(CytoCM(BaxA=None), CytoCM_0)
Initial(CytoCC(), CytoCC_0)
Initial(BaxA(BaxM=None, Bcl2=None, BaxA_1=None, BaxA_2=None, SmacM=None, CytoCM=None), BaxA_0)
Initial(ApafI(), ApafI_0)
Initial(BidU(C8A=None), BidU_0)
Initial(BidT(), BidT_0)
Initial(C3A(Xiap=None, ParpU=None, C6pro=None), C3A_0)
Initial(ApafA(), ApafA_0)
Initial(BidM(BaxM=None, Bcl2=None), BidM_0)
Initial(Receptor(Ligand=None, Fadd=None), Receptor_0)
Initial(C6A(C8pro=None), C6A_0)
Initial(C6pro(C3A=None), C6pro_0)
|
py | 1a309786af1c9973f12338e6c2279a5915624c25 | from unittest import TestCase
from unittest.mock import patch
from pathlib import Path
from click.testing import CliRunner
from ..management.commands import bump_changelog
from hourglass import changelog
from hourglass.tests.test_changelog import UtilTests
def patch_new_version(version):
return patch.object(bump_changelog, '__version__', version)
def patch_changelog_contents(contents):
return patch.object(changelog, 'get_contents', lambda: contents)
class BumpChangelogTests(TestCase):
@patch_new_version('9.0.0')
@patch_changelog_contents(UtilTests.AFTER_BUMP)
def test_it_reports_error_on_no_release_notes(self):
result = CliRunner().invoke(bump_changelog.command)
self.assertIn('The new release has no release notes', result.output)
self.assertNotEqual(result.exit_code, 0)
@patch_new_version('0.0.1')
@patch_changelog_contents(UtilTests.BEFORE_BUMP)
def test_it_reports_error_if_new_version_is_invalid(self):
result = CliRunner().invoke(bump_changelog.command)
self.assertIn('Please change hourglass/version.py', result.output)
self.assertNotEqual(result.exit_code, 0)
@patch_new_version('9.0.0')
@patch_changelog_contents(UtilTests.BEFORE_BUMP)
def test_it_works(self):
runner = CliRunner()
with runner.isolated_filesystem():
fakelog = Path('fake-changelog.md')
with patch.object(changelog, 'PATH', fakelog):
result = CliRunner().invoke(bump_changelog.command)
self.assertIn('Modifying CHANGELOG.md', result.output)
self.assertEqual(result.exit_code, 0)
with fakelog.open('r', encoding=changelog.ENCODING) as f:
self.assertIn('9.0.0', f.read())
tagmsg = Path('tag-message-v9.0.0.txt')
with tagmsg.open('r', encoding=changelog.ENCODING) as f:
self.assertIn('Fixed some stuff', f.read())
del UtilTests # So our test runner doesn't find and run them.
|
py | 1a30982239f4de85dd80e93c26b92eca80e36f08 | import numpy as np
def vertex_voronoi(mesh):
"""
compute vertex voronoi of a mesh as described in
Meyer, M., Desbrun, M., Schroder, P., Barr, A. (2002).
Discrete differential geometry operators for triangulated 2manifolds.
Visualization and Mathematics, 1..26.
:param mesh: trimesh object
:return: numpy array of shape (mesh.vertices.shape[0],)
"""
Nbv = mesh.vertices.shape[0]
Nbp = mesh.faces.shape[0]
obt_angs = mesh.face_angles > np.pi / 2
obt_poly = obt_angs[:, 0] | obt_angs[:, 1] | obt_angs[:, 2]
print(' -percent polygon with obtuse angle ',
100.0 * len(np.where(obt_poly)[0]) / Nbp)
cot = 1 / np.tan(mesh.face_angles)
vert_voronoi = np.zeros(Nbv)
for ind_p, p in enumerate(mesh.faces):
if obt_poly[ind_p]:
obt_verts = p[obt_angs[ind_p, :]]
vert_voronoi[obt_verts] = vert_voronoi[obt_verts] + \
mesh.area_faces[ind_p] / 2.0
non_obt_verts = p[[not x for x in obt_angs[ind_p, :]]]
vert_voronoi[non_obt_verts] = vert_voronoi[non_obt_verts] + \
mesh.area_faces[ind_p] / 4.0
else:
d0 = np.sum(
np.power(mesh.vertices[p[1], :] - mesh.vertices[p[2], :], 2))
d1 = np.sum(
np.power(mesh.vertices[p[2], :] - mesh.vertices[p[0], :], 2))
d2 = np.sum(
np.power(mesh.vertices[p[0], :] - mesh.vertices[p[1], :], 2))
vert_voronoi[p[0]] = vert_voronoi[p[0]] + \
(d1 * cot[ind_p, 1] + d2 * cot[ind_p, 2]) / 8.0
vert_voronoi[p[1]] = vert_voronoi[p[1]] + \
(d2 * cot[ind_p, 2] + d0 * cot[ind_p, 0]) / 8.0
vert_voronoi[p[2]] = vert_voronoi[p[2]] + \
(d0 * cot[ind_p, 0] + d1 * cot[ind_p, 1]) / 8.0
return vert_voronoi
|
bzl | 1a3099728db5d040b873b478c2de06aba0688d07 | # Copyright (C) 2016 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Skylark rule to generate a Junit4 TestSuite
# Assumes srcs are all .java Test files
# Assumes junit4 is already added to deps by the user.
# See https://github.com/bazelbuild/bazel/issues/1017 for background.
_OUTPUT = """import org.junit.runners.Suite;
import org.junit.runner.RunWith;
@RunWith(Suite.class)
@Suite.SuiteClasses({%s})
public class %s {}
"""
_PREFIXES = ("org", "com", "edu")
def _SafeIndex(j, val):
for i, v in enumerate(j):
if val == v:
return i
return -1
def _AsClassName(fname):
fname = [x.path for x in fname.files.to_list()][0]
toks = fname[:-5].split("/")
findex = -1
for s in _PREFIXES:
findex = _SafeIndex(toks, s)
if findex != -1:
break
if findex == -1:
fail("%s does not contain any of %s" % (fname, _PREFIXES))
return ".".join(toks[findex:]) + ".class"
def _impl(ctx):
classes = ",".join(
[_AsClassName(x) for x in ctx.attr.srcs],
)
ctx.actions.write(output = ctx.outputs.out, content = _OUTPUT % (
classes,
ctx.attr.outname,
))
_GenSuite = rule(
attrs = {
"srcs": attr.label_list(allow_files = True),
"outname": attr.string(),
},
outputs = {"out": "%{name}.java"},
implementation = _impl,
)
POST_JDK8_OPTS = [
# Enforce JDK 8 compatibility on Java 9, see
# https://docs.oracle.com/javase/9/intl/internationalization-enhancements-jdk-9.htm#JSINT-GUID-AF5AECA7-07C1-4E7D-BC10-BC7E73DC6C7F
"-Djava.locale.providers=COMPAT,CLDR,SPI",
"--add-opens=jdk.management/com.sun.management.internal=ALL-UNNAMED",
]
def junit_tests(name, srcs, **kwargs):
s_name = name.replace("-", "_") + "TestSuite"
_GenSuite(
name = s_name,
srcs = srcs,
outname = s_name,
)
jvm_flags = kwargs.get("jvm_flags", [])
jvm_flags = jvm_flags + select({
"//:java9": POST_JDK8_OPTS,
"//:java_next": POST_JDK8_OPTS,
"//conditions:default": [],
})
native.java_test(
name = name,
test_class = s_name,
srcs = srcs + [":" + s_name],
**dict(kwargs, jvm_flags = jvm_flags)
)
|
py | 1a309a51d2ac17f98e6c30813c0f63ef4cabd740 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# (c) Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Swaminathan Vasudevan, Hewlett-Packard.
import sqlalchemy as sa
from sqlalchemy import orm
from sqlalchemy.orm import exc
from neutron.common import constants as q_constants
from neutron.db import agentschedulers_db as agent_db
from neutron.db import api as qdbapi
from neutron.db import db_base_plugin_v2 as base_db
from neutron.db import l3_db
from neutron.db import model_base
from neutron.db import models_v2
from neutron.extensions import vpnaas
from neutron.extensions.vpnaas import VPNPluginBase
from neutron import manager
from neutron.openstack.common import log as logging
from neutron.openstack.common import uuidutils
from neutron.plugins.common import constants
LOG = logging.getLogger(__name__)
class IPsecPeerCidr(model_base.BASEV2):
"""Internal representation of a IPsec Peer Cidrs."""
cidr = sa.Column(sa.String(32), nullable=False, primary_key=True)
ipsec_site_connection_id = sa.Column(
sa.String(36),
sa.ForeignKey('ipsec_site_connections.id',
ondelete="CASCADE"),
primary_key=True)
class IPsecPolicy(model_base.BASEV2, models_v2.HasId, models_v2.HasTenant):
"""Represents a v2 IPsecPolicy Object."""
__tablename__ = 'ipsecpolicies'
name = sa.Column(sa.String(255))
description = sa.Column(sa.String(255))
transform_protocol = sa.Column(sa.Enum("esp", "ah", "ah-esp",
name="ipsec_transform_protocols"),
nullable=False)
auth_algorithm = sa.Column(sa.Enum("sha1",
name="vpn_auth_algorithms"),
nullable=False)
encryption_algorithm = sa.Column(sa.Enum("3des", "aes-128",
"aes-256", "aes-192",
name="vpn_encrypt_algorithms"),
nullable=False)
encapsulation_mode = sa.Column(sa.Enum("tunnel", "transport",
name="ipsec_encapsulations"),
nullable=False)
lifetime_units = sa.Column(sa.Enum("seconds", "kilobytes",
name="vpn_lifetime_units"),
nullable=False)
lifetime_value = sa.Column(sa.Integer, nullable=False)
pfs = sa.Column(sa.Enum("group2", "group5", "group14",
name="vpn_pfs"), nullable=False)
class IKEPolicy(model_base.BASEV2, models_v2.HasId, models_v2.HasTenant):
"""Represents a v2 IKEPolicy Object."""
__tablename__ = 'ikepolicies'
name = sa.Column(sa.String(255))
description = sa.Column(sa.String(255))
auth_algorithm = sa.Column(sa.Enum("sha1",
name="vpn_auth_algorithms"),
nullable=False)
encryption_algorithm = sa.Column(sa.Enum("3des", "aes-128",
"aes-256", "aes-192",
name="vpn_encrypt_algorithms"),
nullable=False)
phase1_negotiation_mode = sa.Column(sa.Enum("main",
name="ike_phase1_mode"),
nullable=False)
lifetime_units = sa.Column(sa.Enum("seconds", "kilobytes",
name="vpn_lifetime_units"),
nullable=False)
lifetime_value = sa.Column(sa.Integer, nullable=False)
ike_version = sa.Column(sa.Enum("v1", "v2", name="ike_versions"),
nullable=False)
pfs = sa.Column(sa.Enum("group2", "group5", "group14",
name="vpn_pfs"), nullable=False)
class IPsecSiteConnection(model_base.BASEV2,
models_v2.HasId, models_v2.HasTenant):
"""Represents a IPsecSiteConnection Object."""
__tablename__ = 'ipsec_site_connections'
name = sa.Column(sa.String(255))
description = sa.Column(sa.String(255))
peer_address = sa.Column(sa.String(64), nullable=False)
peer_id = sa.Column(sa.String(255), nullable=False)
route_mode = sa.Column(sa.String(8), nullable=False)
mtu = sa.Column(sa.Integer, nullable=False)
initiator = sa.Column(sa.Enum("bi-directional", "response-only",
name="vpn_initiators"), nullable=False)
auth_mode = sa.Column(sa.String(16), nullable=False)
psk = sa.Column(sa.String(255), nullable=False)
dpd_action = sa.Column(sa.Enum("hold", "clear",
"restart", "disabled",
"restart-by-peer", name="vpn_dpd_actions"),
nullable=False)
dpd_interval = sa.Column(sa.Integer, nullable=False)
dpd_timeout = sa.Column(sa.Integer, nullable=False)
status = sa.Column(sa.String(16), nullable=False)
admin_state_up = sa.Column(sa.Boolean(), nullable=False)
vpnservice_id = sa.Column(sa.String(36),
sa.ForeignKey('vpnservices.id'),
nullable=False)
ipsecpolicy_id = sa.Column(sa.String(36),
sa.ForeignKey('ipsecpolicies.id'),
nullable=False)
ikepolicy_id = sa.Column(sa.String(36),
sa.ForeignKey('ikepolicies.id'),
nullable=False)
ipsecpolicy = orm.relationship(
IPsecPolicy, backref='ipsec_site_connection')
ikepolicy = orm.relationship(IKEPolicy, backref='ipsec_site_connection')
peer_cidrs = orm.relationship(IPsecPeerCidr,
backref='ipsec_site_connection',
lazy='joined',
cascade='all, delete, delete-orphan')
class VPNService(model_base.BASEV2, models_v2.HasId, models_v2.HasTenant):
"""Represents a v2 VPNService Object."""
name = sa.Column(sa.String(255))
description = sa.Column(sa.String(255))
status = sa.Column(sa.String(16), nullable=False)
admin_state_up = sa.Column(sa.Boolean(), nullable=False)
subnet_id = sa.Column(sa.String(36), sa.ForeignKey('subnets.id'),
nullable=False)
router_id = sa.Column(sa.String(36), sa.ForeignKey('routers.id'),
nullable=False)
subnet = orm.relationship(models_v2.Subnet)
router = orm.relationship(l3_db.Router)
ipsec_site_connections = orm.relationship(
IPsecSiteConnection,
backref='vpnservice',
cascade="all, delete-orphan")
class VPNPluginDb(VPNPluginBase, base_db.CommonDbMixin):
"""VPN plugin database class using SQLAlchemy models."""
def __init__(self):
"""Do the initialization for the vpn service plugin here."""
qdbapi.register_models()
def update_status(self, context, model, v_id, status):
with context.session.begin(subtransactions=True):
v_db = self._get_resource(context, model, v_id)
v_db.update({'status': status})
def _get_resource(self, context, model, v_id):
try:
r = self._get_by_id(context, model, v_id)
except exc.NoResultFound:
if issubclass(model, IPsecSiteConnection):
raise vpnaas.IPsecSiteConnectionNotFound(
ipsec_site_conn_id=v_id
)
elif issubclass(model, IKEPolicy):
raise vpnaas.IKEPolicyNotFound(ikepolicy_id=v_id)
elif issubclass(model, IPsecPolicy):
raise vpnaas.IPsecPolicyNotFound(ipsecpolicy_id=v_id)
elif issubclass(model, VPNService):
raise vpnaas.VPNServiceNotFound(vpnservice_id=v_id)
else:
raise
return r
def assert_update_allowed(self, obj):
status = getattr(obj, 'status', None)
if status != constants.ACTIVE:
raise vpnaas.VPNStateInvalid(id=id, state=status)
def _make_ipsec_site_connection_dict(self, ipsec_site_conn, fields=None):
res = {'id': ipsec_site_conn['id'],
'tenant_id': ipsec_site_conn['tenant_id'],
'name': ipsec_site_conn['name'],
'description': ipsec_site_conn['description'],
'peer_address': ipsec_site_conn['peer_address'],
'peer_id': ipsec_site_conn['peer_id'],
'route_mode': ipsec_site_conn['route_mode'],
'mtu': ipsec_site_conn['mtu'],
'auth_mode': ipsec_site_conn['auth_mode'],
'psk': ipsec_site_conn['psk'],
'initiator': ipsec_site_conn['initiator'],
'dpd': {
'action': ipsec_site_conn['dpd_action'],
'interval': ipsec_site_conn['dpd_interval'],
'timeout': ipsec_site_conn['dpd_timeout']
},
'admin_state_up': ipsec_site_conn['admin_state_up'],
'status': ipsec_site_conn['status'],
'vpnservice_id': ipsec_site_conn['vpnservice_id'],
'ikepolicy_id': ipsec_site_conn['ikepolicy_id'],
'ipsecpolicy_id': ipsec_site_conn['ipsecpolicy_id'],
'peer_cidrs': [pcidr['cidr']
for pcidr in ipsec_site_conn['peer_cidrs']]
}
return self._fields(res, fields)
def create_ipsec_site_connection(self, context, ipsec_site_connection):
ipsec_sitecon = ipsec_site_connection['ipsec_site_connection']
dpd = ipsec_sitecon['dpd']
ipsec_sitecon['dpd_action'] = dpd.get('action', 'hold')
ipsec_sitecon['dpd_interval'] = dpd.get('interval', 30)
ipsec_sitecon['dpd_timeout'] = dpd.get('timeout', 120)
tenant_id = self._get_tenant_id_for_create(context, ipsec_sitecon)
if ipsec_sitecon['dpd_timeout'] < ipsec_sitecon['dpd_interval']:
raise vpnaas.IPsecSiteConnectionDpdIntervalValueError(
attribute_a='dpd_timeout')
with context.session.begin(subtransactions=True):
#Check permissions
self._get_resource(context,
VPNService,
ipsec_sitecon['vpnservice_id'])
self._get_resource(context,
IKEPolicy,
ipsec_sitecon['ikepolicy_id'])
self._get_resource(context,
IPsecPolicy,
ipsec_sitecon['ipsecpolicy_id'])
ipsec_site_conn_db = IPsecSiteConnection(
id=uuidutils.generate_uuid(),
tenant_id=tenant_id,
name=ipsec_sitecon['name'],
description=ipsec_sitecon['description'],
peer_address=ipsec_sitecon['peer_address'],
peer_id=ipsec_sitecon['peer_id'],
route_mode='static',
mtu=ipsec_sitecon['mtu'],
auth_mode='psk',
psk=ipsec_sitecon['psk'],
initiator=ipsec_sitecon['initiator'],
dpd_action=ipsec_sitecon['dpd_action'],
dpd_interval=ipsec_sitecon['dpd_interval'],
dpd_timeout=ipsec_sitecon['dpd_timeout'],
admin_state_up=ipsec_sitecon['admin_state_up'],
status=constants.PENDING_CREATE,
vpnservice_id=ipsec_sitecon['vpnservice_id'],
ikepolicy_id=ipsec_sitecon['ikepolicy_id'],
ipsecpolicy_id=ipsec_sitecon['ipsecpolicy_id']
)
context.session.add(ipsec_site_conn_db)
for cidr in ipsec_sitecon['peer_cidrs']:
peer_cidr_db = IPsecPeerCidr(
cidr=cidr,
ipsec_site_connection_id=ipsec_site_conn_db['id']
)
context.session.add(peer_cidr_db)
return self._make_ipsec_site_connection_dict(ipsec_site_conn_db)
def update_ipsec_site_connection(
self, context,
ipsec_site_conn_id, ipsec_site_connection):
ipsec_sitecon = ipsec_site_connection['ipsec_site_connection']
dpd = ipsec_sitecon.get('dpd', {})
if dpd.get('action'):
ipsec_sitecon['dpd_action'] = dpd.get('action')
if dpd.get('interval'):
ipsec_sitecon['dpd_interval'] = dpd.get('interval')
if dpd.get('timeout'):
ipsec_sitecon['dpd_timeout'] = dpd.get('timeout')
changed_peer_cidrs = False
with context.session.begin(subtransactions=True):
ipsec_site_conn_db = self._get_resource(
context,
IPsecSiteConnection,
ipsec_site_conn_id)
self.assert_update_allowed(ipsec_site_conn_db)
if "peer_cidrs" in ipsec_sitecon:
changed_peer_cidrs = True
old_peer_cidr_list = ipsec_site_conn_db['peer_cidrs']
old_peer_cidr_dict = dict(
(peer_cidr['cidr'], peer_cidr)
for peer_cidr in old_peer_cidr_list)
new_peer_cidr_set = set(ipsec_sitecon["peer_cidrs"])
old_peer_cidr_set = set(old_peer_cidr_dict)
new_peer_cidrs = list(new_peer_cidr_set)
for peer_cidr in old_peer_cidr_set - new_peer_cidr_set:
context.session.delete(old_peer_cidr_dict[peer_cidr])
for peer_cidr in new_peer_cidr_set - old_peer_cidr_set:
pcidr = IPsecPeerCidr(
cidr=peer_cidr,
ipsec_site_connection_id=ipsec_site_conn_id)
context.session.add(pcidr)
del ipsec_sitecon["peer_cidrs"]
if ipsec_sitecon:
ipsec_site_conn_db.update(ipsec_sitecon)
result = self._make_ipsec_site_connection_dict(ipsec_site_conn_db)
if changed_peer_cidrs:
result['peer_cidrs'] = new_peer_cidrs
return result
def delete_ipsec_site_connection(self, context, ipsec_site_conn_id):
with context.session.begin(subtransactions=True):
ipsec_site_conn_db = self._get_resource(
context, IPsecSiteConnection, ipsec_site_conn_id
)
context.session.delete(ipsec_site_conn_db)
def get_ipsec_site_connection(self, context,
ipsec_site_conn_id, fields=None):
ipsec_site_conn_db = self._get_resource(
context, IPsecSiteConnection, ipsec_site_conn_id
)
return self._make_ipsec_site_connection_dict(
ipsec_site_conn_db, fields)
def get_ipsec_site_connections(self, context, filters=None, fields=None):
return self._get_collection(context, IPsecSiteConnection,
self._make_ipsec_site_connection_dict,
filters=filters, fields=fields)
def _make_ikepolicy_dict(self, ikepolicy, fields=None):
res = {'id': ikepolicy['id'],
'tenant_id': ikepolicy['tenant_id'],
'name': ikepolicy['name'],
'description': ikepolicy['description'],
'auth_algorithm': ikepolicy['auth_algorithm'],
'encryption_algorithm': ikepolicy['encryption_algorithm'],
'phase1_negotiation_mode': ikepolicy['phase1_negotiation_mode'],
'lifetime': {
'units': ikepolicy['lifetime_units'],
'value': ikepolicy['lifetime_value'],
},
'ike_version': ikepolicy['ike_version'],
'pfs': ikepolicy['pfs']
}
return self._fields(res, fields)
def create_ikepolicy(self, context, ikepolicy):
ike = ikepolicy['ikepolicy']
tenant_id = self._get_tenant_id_for_create(context, ike)
lifetime_info = ike.get('lifetime', [])
lifetime_units = lifetime_info.get('units', 'seconds')
lifetime_value = lifetime_info.get('value', 3600)
with context.session.begin(subtransactions=True):
ike_db = IKEPolicy(
id=uuidutils.generate_uuid(),
tenant_id=tenant_id,
name=ike['name'],
description=ike['description'],
auth_algorithm=ike['auth_algorithm'],
encryption_algorithm=ike['encryption_algorithm'],
phase1_negotiation_mode=ike['phase1_negotiation_mode'],
lifetime_units=lifetime_units,
lifetime_value=lifetime_value,
ike_version=ike['ike_version'],
pfs=ike['pfs']
)
context.session.add(ike_db)
return self._make_ikepolicy_dict(ike_db)
def update_ikepolicy(self, context, ikepolicy_id, ikepolicy):
ike = ikepolicy['ikepolicy']
with context.session.begin(subtransactions=True):
ikepolicy = context.session.query(IPsecSiteConnection).filter_by(
ikepolicy_id=ikepolicy_id).first()
if ikepolicy:
raise vpnaas.IKEPolicyInUse(ikepolicy_id=ikepolicy_id)
ike_db = self._get_resource(context, IKEPolicy, ikepolicy_id)
if ike:
lifetime_info = ike.get('lifetime')
if lifetime_info:
if lifetime_info.get('units'):
ike['lifetime_units'] = lifetime_info['units']
if lifetime_info.get('value'):
ike['lifetime_value'] = lifetime_info['value']
ike_db.update(ike)
return self._make_ikepolicy_dict(ike_db)
def delete_ikepolicy(self, context, ikepolicy_id):
with context.session.begin(subtransactions=True):
ikepolicy = context.session.query(IPsecSiteConnection).filter_by(
ikepolicy_id=ikepolicy_id).first()
if ikepolicy:
raise vpnaas.IKEPolicyInUse(ikepolicy_id=ikepolicy_id)
ike_db = self._get_resource(context, IKEPolicy, ikepolicy_id)
context.session.delete(ike_db)
def get_ikepolicy(self, context, ikepolicy_id, fields=None):
ike_db = self._get_resource(context, IKEPolicy, ikepolicy_id)
return self._make_ikepolicy_dict(ike_db, fields)
def get_ikepolicies(self, context, filters=None, fields=None):
return self._get_collection(context, IKEPolicy,
self._make_ikepolicy_dict,
filters=filters, fields=fields)
def _make_ipsecpolicy_dict(self, ipsecpolicy, fields=None):
res = {'id': ipsecpolicy['id'],
'tenant_id': ipsecpolicy['tenant_id'],
'name': ipsecpolicy['name'],
'description': ipsecpolicy['description'],
'transform_protocol': ipsecpolicy['transform_protocol'],
'auth_algorithm': ipsecpolicy['auth_algorithm'],
'encryption_algorithm': ipsecpolicy['encryption_algorithm'],
'encapsulation_mode': ipsecpolicy['encapsulation_mode'],
'lifetime': {
'units': ipsecpolicy['lifetime_units'],
'value': ipsecpolicy['lifetime_value'],
},
'pfs': ipsecpolicy['pfs']
}
return self._fields(res, fields)
def create_ipsecpolicy(self, context, ipsecpolicy):
ipsecp = ipsecpolicy['ipsecpolicy']
tenant_id = self._get_tenant_id_for_create(context, ipsecp)
lifetime_info = ipsecp['lifetime']
lifetime_units = lifetime_info.get('units', 'seconds')
lifetime_value = lifetime_info.get('value', 3600)
with context.session.begin(subtransactions=True):
ipsecp_db = IPsecPolicy(id=uuidutils.generate_uuid(),
tenant_id=tenant_id,
name=ipsecp['name'],
description=ipsecp['description'],
transform_protocol=ipsecp['transform_'
'protocol'],
auth_algorithm=ipsecp['auth_algorithm'],
encryption_algorithm=ipsecp['encryption_'
'algorithm'],
encapsulation_mode=ipsecp['encapsulation_'
'mode'],
lifetime_units=lifetime_units,
lifetime_value=lifetime_value,
pfs=ipsecp['pfs'])
context.session.add(ipsecp_db)
return self._make_ipsecpolicy_dict(ipsecp_db)
def update_ipsecpolicy(self, context, ipsecpolicy_id, ipsecpolicy):
ipsecp = ipsecpolicy['ipsecpolicy']
with context.session.begin(subtransactions=True):
ipsecpolicy = context.session.query(IPsecSiteConnection).filter_by(
ipsecpolicy_id=ipsecpolicy_id).first()
if ipsecpolicy:
raise vpnaas.IPsecPolicyInUse(ipsecpolicy_id=ipsecpolicy_id)
ipsecp_db = self._get_resource(context,
IPsecPolicy,
ipsecpolicy_id)
if ipsecp:
lifetime_info = ipsecp.get('lifetime')
if lifetime_info:
if lifetime_info.get('units'):
ipsecp['lifetime_units'] = lifetime_info['units']
if lifetime_info('value'):
ipsecp['lifetime_value'] = lifetime_info['value']
ipsecp_db.update(ipsecp)
return self._make_ipsecpolicy_dict(ipsecp_db)
def delete_ipsecpolicy(self, context, ipsecpolicy_id):
with context.session.begin(subtransactions=True):
ipsecpolicy = context.session.query(IPsecSiteConnection).filter_by(
ipsecpolicy_id=ipsecpolicy_id).first()
if ipsecpolicy:
raise vpnaas.IPsecPolicyInUse(ipsecpolicy_id=ipsecpolicy_id)
ipsec_db = self._get_resource(context, IPsecPolicy, ipsecpolicy_id)
context.session.delete(ipsec_db)
def get_ipsecpolicy(self, context, ipsecpolicy_id, fields=None):
ipsec_db = self._get_resource(context, IPsecPolicy, ipsecpolicy_id)
return self._make_ipsecpolicy_dict(ipsec_db, fields)
def get_ipsecpolicies(self, context, filters=None, fields=None):
return self._get_collection(context, IPsecPolicy,
self._make_ipsecpolicy_dict,
filters=filters, fields=fields)
def _make_vpnservice_dict(self, vpnservice, fields=None):
res = {'id': vpnservice['id'],
'name': vpnservice['name'],
'description': vpnservice['description'],
'tenant_id': vpnservice['tenant_id'],
'subnet_id': vpnservice['subnet_id'],
'router_id': vpnservice['router_id'],
'admin_state_up': vpnservice['admin_state_up'],
'status': vpnservice['status']}
return self._fields(res, fields)
def create_vpnservice(self, context, vpnservice):
vpns = vpnservice['vpnservice']
tenant_id = self._get_tenant_id_for_create(context, vpns)
with context.session.begin(subtransactions=True):
vpnservice_db = VPNService(id=uuidutils.generate_uuid(),
tenant_id=tenant_id,
name=vpns['name'],
description=vpns['description'],
subnet_id=vpns['subnet_id'],
router_id=vpns['router_id'],
admin_state_up=vpns['admin_state_up'],
status=constants.PENDING_CREATE)
context.session.add(vpnservice_db)
return self._make_vpnservice_dict(vpnservice_db)
def update_vpnservice(self, context, vpnservice_id, vpnservice):
vpns = vpnservice['vpnservice']
with context.session.begin(subtransactions=True):
vpnservice = context.session.query(IPsecSiteConnection).filter_by(
vpnservice_id=vpnservice_id).first()
if vpnservice:
raise vpnaas.VPNServiceInUse(vpnservice_id=vpnservice_id)
vpns_db = self._get_resource(context, VPNService, vpnservice_id)
self.assert_update_allowed(vpns_db)
if vpns:
vpns_db.update(vpns)
return self._make_vpnservice_dict(vpns_db)
def delete_vpnservice(self, context, vpnservice_id):
with context.session.begin(subtransactions=True):
if context.session.query(IPsecSiteConnection).filter_by(
vpnservice_id=vpnservice_id
).first():
raise vpnaas.VPNServiceInUse(vpnservice_id=vpnservice_id)
vpns_db = self._get_resource(context, VPNService, vpnservice_id)
context.session.delete(vpns_db)
def _get_vpnservice(self, context, vpnservice_id):
return self._get_resource(context, VPNService, vpnservice_id)
def get_vpnservice(self, context, vpnservice_id, fields=None):
vpns_db = self._get_resource(context, VPNService, vpnservice_id)
return self._make_vpnservice_dict(vpns_db, fields)
def get_vpnservices(self, context, filters=None, fields=None):
return self._get_collection(context, VPNService,
self._make_vpnservice_dict,
filters=filters, fields=fields)
class VPNPluginRpcDbMixin():
def _get_agent_hosting_vpn_services(self, context, host):
plugin = manager.NeutronManager.get_plugin()
agent = plugin._get_agent_by_type_and_host(
context, q_constants.AGENT_TYPE_L3, host)
if not agent.admin_state_up:
return []
query = context.session.query(VPNService)
query = query.join(IPsecSiteConnection)
query = query.join(IKEPolicy)
query = query.join(IPsecPolicy)
query = query.join(IPsecPeerCidr)
query = query.join(agent_db.RouterL3AgentBinding,
agent_db.RouterL3AgentBinding.router_id ==
VPNService.router_id)
query = query.filter(
agent_db.RouterL3AgentBinding.l3_agent_id == agent.id)
return query
def update_status_on_host(self, context, host, active_services):
with context.session.begin(subtransactions=True):
vpnservices = self._get_agent_hosting_vpn_services(
context, host)
for vpnservice in vpnservices:
if vpnservice.id in active_services:
if vpnservice.status != constants.ACTIVE:
vpnservice.status = constants.ACTIVE
else:
if vpnservice.status != constants.ERROR:
vpnservice.status = constants.ERROR
|
py | 1a309b0dbd6724579412ec2288ca3428dd5cc8ff | from django.apps import AppConfig
class HomepageConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'HomePage'
|
py | 1a309c0bf5e3492325e1877ea80da1d70cb95bad | #!/usr/bin/env python
import ads1256
import time
import rospy
from std_msgs.msg import Float32
def ReadValues():
rate = 25 # Frequency in Hz
ads1256.start("1",str(rate))
pub = rospy.Publisher('/sen_4/ResVal', Float32, tcp_nodelay=False, queue_size=1)
rospy.init_node('Rheostat',anonymous=True)
rate=rospy.Rate(10)
while not rospy.is_shutdown():
absoluteValue = ads1256.read_channel(0)
voltage = ((absoluteValue*100)/167.0)/1000000.0
rospy.loginfo(voltage)
pub.publish(voltage)
rate.sleep()
ads1256.stop()
if __name__== '__main__':
try:
ReadValues()
except rospy.ROSInterruptException:
pass
|
py | 1a309c4b9d856791dc2b8413bfabe6d8af9e92b0 | from scrapy.spiders import CrawlSpider, Rule
from scrapy.linkextractors.sgml import SgmlLinkExtractor
from scrapy.selector import HtmlXPathSelector
# from nypbot.utils import normalizeFriendlyDate
from dateutil.parser import parse
import pymysql.cursors
answers = open('answers.csv', 'a')
class AcceptedAnswerSpider(CrawlSpider):
name = "acceptedanswer"
allowed_domains = ["stackoverflow.com"]
start_urls = [
"http://stackoverflow.com/search?q=regular+expression"
]
rules = (
# Extract links matching 'garage-sales-18/.*html' (but not matching 'subsection.php')
# and follow links from them (since no callback means follow=True by default).
Rule(SgmlLinkExtractor(allow=('questions/[0-9]+/.*', )), callback='parse_item', follow=True),
)
def insert_item(self,item):
connection = pymysql.connect(host='localhost',
user='root',
password='root',
db='stackoverflow',
charset='utf8mb4',
cursorclass=pymysql.cursors.DictCursor)
try:
with connection.cursor() as cursor:
# Create a new record
sql = "INSERT INTO `acceptedanswer` (`url`, `pre`, `time_posted`, `author`, `vote`) VALUES (%s, %s, %s, %s, %s)"
cursor.execute(sql, (item['url'], item['pre_text'], item['time_posted'], item['author'], item['vote']))
# connection is not autocommit by default. So you must commit to save
# your changes.
connection.commit()
finally:
connection.close()
return
"""
When writing crawl spider rules, avoid using parse as callback, since the CrawlSpider uses the parse method itself to implement its logic. So if you override the parse method, the crawl spider will no longer work.
"""
def parse_item(self, response):
global answers
hxs = HtmlXPathSelector(response)
posts = hxs.select("//div[@id='answers']/div[@class='answer accepted-answer']")
items = []
for post in posts:
# print(post)
item = {}
item['url'] = response.url
item['pre_text'] = ''.join(post.select(".//div[@class='post-text']//pre//text()").extract())
item['time_posted'] = parse(post.select(".//div[@class='user-action-time']//span/text()").extract()[0])
item['author']= ''.join(post.select(".//div[@class='user-details']//a/text()").extract())
item['vote']=''.join(post.select(".//div[@class='vote']//span[@class='vote-count-post ' or @class='vote-count-post high-scored-post']/text()").extract())
self.insert_item(item)
items.append(item)
# self.insert_posts(items)
#for item in items:
# print >> answers, "%s,'%s'\n" % (item['url'], item['pre_text'])
return items
|
py | 1a309d590d3f84ab72befcb77bab4171cb27c91e | from .preprocess import *
from .toolkit import *
name = 'xshinnosuke-utils'
|
py | 1a309e3b08aa0399280cf8acd9dcc3ad6a979bc9 | """
Utility functions used in the logistic regression classifier.
@copyright: The Broad Institute of MIT and Harvard 2015
"""
import numpy as np
def sigmoid(v):
return 1 / (1 + np.exp(-v))
"""Computes a prediction (in the form of probabilities) for the given data vector
"""
def predict(x, theta):
p = sigmoid(np.dot(x, theta))
return np.array([p])
"""Return a function that gives a prediction from a design matrix row
"""
def gen_predictor(params_filename="./models/test/lreg-params"):
with open(params_filename, "rb") as pfile:
lines = pfile.readlines()
N = len(lines)
theta = np.ones(N)
i = 0
for line in lines:
theta[i] = float(line.strip().split(' ')[1])
i = i + 1
def predictor(X):
scores = []
for i in range(0, len(X)):
scores.extend(predict(X[i,:], theta))
return scores
return predictor
|
py | 1a309e67741766cdb11d02c91dcac357612e8e59 | # encoding: utf-8
"""
messagebox.py
Created by David Farrar on 2014-06-10 (or earlier).
Copyright (c) 2011-2013 Exa Networks. All rights reserved.
"""
from exaproxy.util.messagebox import MessageBox
from exaproxy.util.control import ControlBox
class ProxyToRedirectorMessageBox:
def __init__ (self, pid, pipe_in, pipe_out, control_in, control_out):
self.pid = pid
self.box = MessageBox(pipe_in, pipe_out)
self.control = ControlBox(control_in, control_out)
def close (self):
return self.box.close()
def sendRequest (self, client_id, peer, request, subrequest, source):
message = client_id, peer, request, subrequest, source
return self.box.put(message)
def getDecision (self):
message = self.box.get()
if message is not None:
client_id, command, decision = message
else:
client_id, command, decision = None, None, None
return client_id, command, decision
def stop (self):
self.control.send('STOP')
return self.control.wait_stop()
def respawn (self):
self.control.send('RESPAWN')
def decreaseSpawnLimit (self, count=1):
self.control.send('DECREASE', count)
def increaseSpawnLimit (self, count=1):
self.control.send('INCREASE', count)
def getStats (self):
identifier = self.control.send('STATS')
return self.control.receive(identifier)
class RedirectorToProxyMessageBox:
def __init__ (self, pipe_in, pipe_out):
self.box = MessageBox(pipe_in, pipe_out)
def close (self):
return self.box.close()
def isClosed (self):
return self.box.pipe_in.closed
def getRequest (self):
return self.box.get()
def sendResponse (self, client_id, command, decision):
message = client_id, command, decision
return self.box.put(message)
|
py | 1a309f1dd4064d85705312705c4bda4ad271ebcb | import os
import pytest
@pytest.fixture(autouse=True, scope='function')
def setup():
os.environ.pop('PYPINYIN_NO_PHRASES', None)
os.environ.pop('PYPINYIN_NO_DICT_COPY', None)
try:
yield
finally:
os.environ.pop('PYPINYIN_NO_PHRASES', None)
os.environ.pop('PYPINYIN_NO_DICT_COPY', None)
|
py | 1a30a10b822bc458422e76b0e320768758d78dd3 | import numpy as np
import matplotlib.pyplot as plt
import sectionproperties.pre.pre as pre
import sectionproperties.post.post as post
class Geometry:
"""Parent class for a cross-section geometry input.
Provides an interface for the user to specify the geometry defining a cross-section. A method
is provided for generating a triangular mesh, for translating the cross-section by *(x, y)* and
for plotting the geometry.
:cvar points: List of points *(x, y)* defining the vertices of the cross-section
:vartype points: list[list[float, float]]
:cvar facets: List of point index pairs *(p1, p2)* defining the edges of the cross-section
:vartype facets: list[list[int, int]]
:cvar holes: List of points *(x, y)* defining the locations of holes within the cross-section.
If there are no holes, provide an empty list [].
:vartype holes: list[list[float, float]]
:cvar control_points: A list of points *(x, y)* that define different regions of the
cross-section. A control point is an arbitrary point within a region enclosed by facets.
:vartype control_points: list[list[float, float]]
:cvar shift: Vector that shifts the cross-section by *(x, y)*
:vartype shift: list[float, float]
:cvar perimeter: List of facet indices defining the perimeter of the cross-section
:vartype perimeter: list[int]
"""
def __init__(self, control_points, shift):
"""Inits the Geometry class."""
self.control_points = control_points
self.shift = shift
self.points = []
self.facets = []
self.holes = []
self.perimeter = []
def create_mesh(self, mesh_sizes):
"""Creates a quadratic triangular mesh from the Geometry object.
:param mesh_sizes: A list of maximum element areas corresponding to each region within the
cross-section geometry.
:type mesh_size: list[float]
:return: Object containing generated mesh data
:rtype: :class:`meshpy.triangle.MeshInfo`
:raises AssertionError: If the number of mesh sizes does not match the number of regions
The following example creates a circular cross-section with a diameter of 50 with 64
points, and generates a mesh with a maximum triangular area of 2.5::
import sectionproperties.pre.sections as sections
geometry = sections.CircularSection(d=50, n=64)
mesh = geometry.create_mesh(mesh_sizes=[2.5])
.. figure:: ../images/sections/circle_mesh.png
:align: center
:scale: 75 %
Mesh generated from the above geometry.
"""
str = "Number of mesh_sizes ({0}), should match the number of regions ({1})".format(
len(mesh_sizes), len(self.control_points)
)
assert(len(mesh_sizes) == len(self.control_points)), str
return pre.create_mesh(
self.points, self.facets, self.holes, self.control_points, mesh_sizes)
def shift_section(self):
"""Shifts the cross-section parameters by the class variable vector *shift*."""
for point in self.points:
point[0] += self.shift[0]
point[1] += self.shift[1]
for hole in self.holes:
hole[0] += self.shift[0]
hole[1] += self.shift[1]
for cp in self.control_points:
cp[0] += self.shift[0]
cp[1] += self.shift[1]
def rotate_section(self, angle, rot_point=None):
"""Rotates the geometry and specified angle about a point. If the rotation point is not
provided, rotates the section about the first control point in the list of control points
of the :class:`~sectionproperties.pre.sections.Geometry` object.
:param float angle: Angle (degrees) by which to rotate the section. A positive angle leads
to a counter-clockwise rotation.
:param rot_point: Point *(x, y)* about which to rotate the section
:type rot_point: list[float, float]
The following example rotates a 200UB25 section clockwise by 30 degrees::
import sectionproperties.pre.sections as sections
geometry = sections.ISection(d=203, b=133, t_f=7.8, t_w=5.8, r=8.9, n_r=8)
geometry.rotate_section(angle=-30)
"""
# convert angle to radians
rot_phi = angle * np.pi / 180
def get_r(pt1, pt2):
"""Returns the distance between two points."""
return ((pt1[0] - pt2[0]) ** 2 + (pt1[1] - pt2[1]) ** 2) ** 0.5
def get_phi(pt1, pt2):
"""Returns the angle between two points."""
return np.arctan2(pt1[1] - pt2[1], pt1[0] - pt2[0])
def rotate_point(pt, rot_point, rot_phi):
"""Rotates a point given a rotation point and rotation angle."""
r = get_r(pt, rot_point)
phi = get_phi(pt, rot_point)
pt[0] = r * np.cos(phi + rot_phi) + rot_point[0]
pt[1] = r * np.sin(phi + rot_phi) + rot_point[1]
# use the first control point if no rotation point is specified
if rot_point is None:
rot_point = self.control_points[0]
# rotate all the points
for point in self.points:
rotate_point(point, rot_point, rot_phi)
# rotate all the holes
for hole in self.holes:
rotate_point(hole, rot_point, rot_phi)
# rotate all the control points
for cp in self.control_points:
rotate_point(cp, rot_point, rot_phi)
def mirror_section(self, axis='x', mirror_point=None):
"""Mirrors the geometry about a point on either the x or y-axis. If no point is provided,
mirrors the geometry about the first control point in the list of control points of the
:class:`~sectionproperties.pre.sections.Geometry` object.
:param string axis: Axis about which to mirror the geometry, *'x'* or *'y'*
:param mirror_point: Point about which to mirror the geometry *(x, y)*
:type mirror_point: list[float, float]
The following example mirrors a 200PFC section about the y-axis and the point (0, 0)::
import sectionproperties.pre.sections as sections
geometry = sections.PfcSection(d=200, b=75, t_f=12, t_w=6, r=12, n_r=8)
geometry.mirror_section(axis='y', mirror_point=[0, 0])
"""
# use the first control point if no mirror point is specified
if mirror_point is None:
mirror_point = self.control_points[0]
# select the axis to mirror
if axis == 'x':
i = 1
elif axis == 'y':
i = 0
else:
raise RuntimeError("Enter a valid axis: 'x' or 'y'")
# mirror all points
for point in self.points:
point[i] = 2 * mirror_point[i] - point[i]
# mirror all holes
for hole in self.holes:
hole[i] = 2 * mirror_point[i] - hole[i]
# mirror all control points
for cp in self.control_points:
cp[i] = 2 * mirror_point[i] - cp[i]
def add_point(self, point):
"""Adds a point to the geometry and returns the added point id.
:param point: Location of the point
:type point: list[float, float]
:return: Point id
:rtype: int
"""
self.points.append(point)
return len(self.points) - 1
def add_facet(self, facet):
"""Adds a facet to the geometry and returns the added facet id.
:param facet: Point indices of the facet
:type facet: list[float, float]
:return: Facet id
:rtype: int
"""
self.facets.append(facet)
return len(self.facets) - 1
def add_hole(self, hole):
"""Adds a hole location to the geometry and returns the added hole id.
:param hole: Location of the hole
:type hole: list[float, float]
:return: Hole id
:rtype: int
"""
self.holes.append(hole)
return len(self.holes) - 1
def add_control_point(self, control_point):
"""Adds a control point to the geometry and returns the added control
point id.
:param hole: Location of the control point
:type hole: list[float, float]
:return: Control point id
:rtype: int
"""
self.control_points.append(control_point)
return len(self.control_points) - 1
def clean_geometry(self, verbose=False):
"""Peforms a full clean on the geometry.
:param bool verbose: If set to true, information related to the geometry cleaning process
is printed to the terminal.
.. note:: Cleaning the geometry is always recommended when creating a merged section,
which may result in overlapping or intersecting facets, or duplicate nodes.
"""
self = pre.GeometryCleaner(self, verbose).clean_geometry()
def plot_geometry(self, ax=None, pause=True, labels=False, perimeter=False):
"""Plots the geometry defined by the input section. If no axes object is supplied a new
figure and axis is created.
:param ax: Axes object on which the mesh is plotted
:type ax: :class:`matplotlib.axes.Axes`
:param bool pause: If set to true, the figure pauses the script until the window is closed.
If set to false, the script continues immediately after the window is rendered.
:param bool labels: If set to true, node and facet labels are displayed
:param bool perimeter: If set to true, boldens the perimeter of the cross-section
:return: Matplotlib figure and axes objects (fig, ax)
:rtype: (:class:`matplotlib.figure.Figure`, :class:`matplotlib.axes`)
The following example creates a CHS discretised with 64 points, with a diameter of 48 and
thickness of 3.2, and plots the geometry::
import sectionproperties.pre.sections as sections
geometry = sections.Chs(d=48, t=3.2, n=64)
geometry.plot_geometry()
.. figure:: ../images/sections/chs_geometry.png
:align: center
:scale: 75 %
Geometry generated by the above example.
"""
# if no axes object is supplied, create and setup the plot
if ax is None:
ax_supplied = False
(fig, ax) = plt.subplots()
post.setup_plot(ax, pause)
else:
ax_supplied = True
for (i, f) in enumerate(self.facets):
if perimeter:
if i in self.perimeter:
linewidth = 3
else:
linewidth = 1.5
else:
linewidth = 1.5
# plot the points and facets
if i == 0:
ax.plot([self.points[f[0]][0], self.points[f[1]][0]],
[self.points[f[0]][1], self.points[f[1]][1]],
'ko-', markersize=2, linewidth=linewidth, label='Points & Facets')
else:
ax.plot([self.points[f[0]][0], self.points[f[1]][0]],
[self.points[f[0]][1], self.points[f[1]][1]],
'ko-', markersize=2, linewidth=linewidth)
for (i, h) in enumerate(self.holes):
# plot the holes
if i == 0:
ax.plot(h[0], h[1], 'rx', markersize=5, label='Holes')
else:
ax.plot(h[0], h[1], 'rx', markersize=5)
for (i, cp) in enumerate(self.control_points):
# plot the control points
if i == 0:
ax.plot(cp[0], cp[1], 'bo', markersize=5,
label='Control Points')
else:
ax.plot(cp[0], cp[1], 'bo', markersize=5)
# display the legend
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
# display the labels
if labels:
# plot node labels
for (i, pt) in enumerate(self.points):
ax.annotate(str(i), xy=pt, color='r')
# plot facet labels
for (i, fct) in enumerate(self.facets):
pt1 = self.points[fct[0]]
pt2 = self.points[fct[1]]
xy = [(pt1[0] + pt2[0]) / 2, (pt1[1] + pt2[1]) / 2]
ax.annotate(str(i), xy=xy, color='b')
# if no axes object is supplied, finish the plot
if not ax_supplied:
post.finish_plot(ax, pause, title='Cross-Section Geometry')
return (fig, ax)
def calculate_extents(self):
"""Calculates the minimum and maximum x and y-values amongst the list of points.
:return: Minimum and maximum x and y-values *(x_min, x_max, y_min, y_max)*
:rtype: tuple(float, float, float, float)
"""
# loop through all points
for (i, pt) in enumerate(self.points):
x = pt[0]
y = pt[1]
# initialise min, max variables
if i == 0:
x_min = x
x_max = x
y_min = y
y_max = y
# update the mins and maxs where necessary
x_min = min(x_min, x)
x_max = max(x_max, x)
y_min = min(y_min, y)
y_max = max(y_max, y)
return (x_min, x_max, y_min, y_max)
def draw_radius(self, pt, r, theta, n, anti=True):
"""Adds a quarter radius of points to the points list - centered at point *pt*, with radius
*r*, starting at angle *theta*, with *n* points. If r = 0, adds pt only.
:param pt: Centre of radius *(x,y)*
:type pt: list[float, float]
:param float r: Radius
:param float theta: Initial angle
:param int n: Number of points
:param bool anti: Anticlockwise rotation?
"""
if r == 0:
self.points.append(pt)
return
if anti:
mult = 1
else:
mult = -1
# calculate radius of points
for i in range(n):
# determine angle
t = theta + mult * i * 1.0 / max(1, n - 1) * np.pi * 0.5
x = pt[0] + r * np.cos(t)
y = pt[1] + r * np.sin(t)
self.points.append([x, y])
def calculate_facet_length(self, facet):
"""Calculates the length of the facet.
:param facet: Point index pair *(p1, p2)* defining a facet
:vartype facets: list[int, int]
:return: Facet length
:rtype: float
"""
# get facet points
p1 = self.points[facet[0]]
p2 = self.points[facet[1]]
# calculate distance between two points
return np.sqrt((p2[0] - p1[0]) ** 2 + (p2[1] - p1[1]) ** 2)
def calculate_perimeter(self):
"""Calculates the perimeter of the cross-section by summing the length of all facets in the
``perimeter`` class variable.
:return: Cross-section perimeter, returns 0 if there is no perimeter defined
:rtype: float
"""
# check to see if there are any facets in the perimeter variable
if len(self.perimeter) == 0:
return 0
# initialise perimeter variable
perimeter = 0
# loop through all the facets along the perimeter
for facet_idx in self.perimeter:
perimeter += self.calculate_facet_length(self.facets[facet_idx])
return perimeter
class CustomSection(Geometry):
"""Constructs a cross-section from a list of points, facets, holes and a user specified control
point.
:param points: List of points *(x, y)* defining the vertices of the cross-section
:type points: list[list[float, float]]
:param facets: List of point index pairs *(p1, p2)* defining the edges of the cross-section
:type facets: list[list[int, int]]
:param holes: List of points *(x, y)* defining the locations of holes within the cross-section.
If there are no holes, provide an empty list [].
:type holes: list[list[float, float]]
:param control_points: A list of points *(x, y)* that define different regions of the
cross-section. A control point is an arbitrary point within a region enclosed by facets.
:type control_points: list[list[float, float]]
:param shift: Vector that shifts the cross-section by *(x, y)*
:type shift: list[float, float]
:param perimeter: List of facet indices defining the perimeter of the cross-section
:vartype perimeter: list[int]
The following example creates a hollow trapezium with a base width of 100, top width of 50,
height of 50 and a wall thickness of 10. A mesh is generated with a maximum triangular area of
2.0::
import sectionproperties.pre.sections as sections
points = [[0, 0], [100, 0], [75, 50], [25, 50], [15, 10], [85, 10], [70, 40], [30, 40]]
facets = [[0, 1], [1, 2], [2, 3], [3, 0], [4, 5], [5, 6], [6, 7], [7, 4]]
holes = [[50, 25]]
control_points = [[5, 5]]
perimeter = [0, 1, 2, 3]
geometry = sections.CustomSection(
points, facets, holes, control_points, perimeter=perimeter
)
mesh = geometry.create_mesh(mesh_sizes=[2.0])
.. figure:: ../images/sections/custom_geometry.png
:align: center
:scale: 75 %
Custom section geometry.
.. figure:: ../images/sections/custom_mesh.png
:align: center
:scale: 75 %
Mesh generated from the above geometry.
"""
def __init__(self, points, facets, holes, control_points, shift=[0, 0], perimeter=[]):
"""Inits the CustomSection class."""
super().__init__(control_points, shift)
self.points = points
self.facets = facets
self.holes = holes
self.perimeter = perimeter
self.shift_section()
class RectangularSection(Geometry):
"""Constructs a rectangular section with the bottom left corner at the origin *(0, 0)*, with
depth *d* and width *b*.
:param float d: Depth (y) of the rectangle
:param float b: Width (x) of the rectangle
:param shift: Vector that shifts the cross-section by *(x, y)*
:type shift: list[float, float]
The following example creates a rectangular cross-section with a depth of 100 and width of 50,
and generates a mesh with a maximum triangular area of 5::
import sectionproperties.pre.sections as sections
geometry = sections.RectangularSection(d=100, b=50)
mesh = geometry.create_mesh(mesh_sizes=[5])
.. figure:: ../images/sections/rectangle_geometry.png
:align: center
:scale: 75 %
Rectangular section geometry.
.. figure:: ../images/sections/rectangle_mesh.png
:align: center
:scale: 75 %
Mesh generated from the above geometry.
"""
def __init__(self, d, b, shift=[0, 0]):
"""Inits the RectangularSection class."""
# assign control point
control_points = [[0.5 * b, 0.5 * d]]
super().__init__(control_points, shift)
# construct the points and facets
self.points = [[0, 0], [b, 0], [b, d], [0, d]]
self.facets = [[0, 1], [1, 2], [2, 3], [3, 0]]
self.perimeter = list(range(len(self.facets)))
self.shift_section()
class CircularSection(Geometry):
"""Constructs a solid circle centered at the origin *(0, 0)* with diameter *d* and using *n*
points to construct the circle.
:param float d: Diameter of the circle
:param int n: Number of points discretising the circle
:param shift: Vector that shifts the cross-section by *(x, y)*
:type shift: list[float, float]
The following example creates a circular cross-section with a diameter of 50 with 64 points,
and generates a mesh with a maximum triangular area of 2.5::
import sectionproperties.pre.sections as sections
geometry = sections.CircularSection(d=50, n=64)
mesh = geometry.create_mesh(mesh_sizes=[2.5])
.. figure:: ../images/sections/circle_geometry.png
:align: center
:scale: 75 %
Circular section geometry.
.. figure:: ../images/sections/circle_mesh.png
:align: center
:scale: 75 %
Mesh generated from the above geometry.
"""
def __init__(self, d, n, shift=[0, 0]):
"""Inits the CircularSection class."""
# assign control point
control_points = [[0, 0]]
super().__init__(control_points, shift)
# loop through each point on the circle
for i in range(n):
# determine polar angle
theta = i * 2 * np.pi * 1.0 / n
# calculate location of the point
x = 0.5 * d * np.cos(theta)
y = 0.5 * d * np.sin(theta)
# append the current point to the points list
self.points.append([x, y])
# if we are not at the last point
if i != n - 1:
self.facets.append([i, i + 1])
# if we are at the last point, complete the circle
else:
self.facets.append([i, 0])
self.perimeter = list(range(len(self.facets)))
self.shift_section()
class Chs(Geometry):
"""Constructs a circular hollow section centered at the origin *(0, 0)*, with diameter *d* and
thickness *t*, using *n* points to construct the inner and outer circles.
:param float d: Outer diameter of the CHS
:param float t: Thickness of the CHS
:param int n: Number of points discretising the inner and outer circles
:param shift: Vector that shifts the cross-section by *(x, y)*
:type shift: list[float, float]
The following example creates a CHS discretised with 64 points, with a diameter of 48 and
thickness of 3.2, and generates a mesh with a maximum triangular area of 1.0::
import sectionproperties.pre.sections as sections
geometry = sections.Chs(d=48, t=3.2, n=64)
mesh = geometry.create_mesh(mesh_sizes=[1.0])
.. figure:: ../images/sections/chs_geometry.png
:align: center
:scale: 75 %
CHS geometry.
.. figure:: ../images/sections/chs_mesh.png
:align: center
:scale: 75 %
Mesh generated from the above geometry.
"""
def __init__(self, d, t, n, shift=[0, 0]):
"""Inits the Chs class."""
# assign control point
control_points = [[d * 0.5 - t * 0.5, 0]]
super().__init__(control_points, shift)
# specify a hole in the centre of the CHS
self.holes = [[0, 0]]
# loop through each point of the CHS
for i in range(n):
# determine polar angle
theta = i * 2 * np.pi * 1.0 / n
# calculate location of outer and inner points
x_outer = 0.5 * d * np.cos(theta)
y_outer = 0.5 * d * np.sin(theta)
x_inner = (0.5 * d - t) * np.cos(theta)
y_inner = (0.5 * d - t) * np.sin(theta)
# append the current points to the points list
self.points.append([x_outer, y_outer])
self.points.append([x_inner, y_inner])
# if we are not at the last point
if i != n - 1:
self.facets.append([i * 2, i * 2 + 2])
self.facets.append([i * 2 + 1, i * 2 + 3])
# if we are at the last point, complete the circle
else:
self.facets.append([i * 2, 0])
self.facets.append([i * 2 + 1, 1])
self.perimeter = list(range(0, len(self.facets), 2))
self.shift_section()
class EllipticalSection(Geometry):
"""Constructs a solid ellipse centered at the origin *(0, 0)* with vertical diameter *d_y* and
horizontal diameter *d_x*, using *n* points to construct the ellipse.
:param float d_y: Diameter of the ellipse in the y-dimension
:param float d_x: Diameter of the ellipse in the x-dimension
:param int n: Number of points discretising the ellipse
:param shift: Vector that shifts the cross-section by *(x, y)*
:type shift: list[float, float]
The following example creates an elliptical cross-section with a vertical diameter of 25 and
horizontal diameter of 50, with 40 points, and generates a mesh with a maximum triangular area
of 1.0::
import sectionproperties.pre.sections as sections
geometry = sections.EllipticalSection(d_y=25, d_x=50, n=40)
mesh = geometry.create_mesh(mesh_sizes=[1.0])
.. figure:: ../images/sections/ellipse_geometry.png
:align: center
:scale: 75 %
Elliptical section geometry.
.. figure:: ../images/sections/ellipse_mesh.png
:align: center
:scale: 75 %
Mesh generated from the above geometry.
"""
def __init__(self, d_y, d_x, n, shift=[0, 0]):
"""Inits the EllipticalSection class."""
# assign control point centered at zero
control_points = [[0, 0]]
super().__init__(control_points, shift)
# loop through each point on the ellipse
for i in range(n):
# determine polar angle
theta = i * 2 * np.pi * 1.0 / n
# calculate location of the point
x = 0.5 * d_x * np.cos(theta)
y = 0.5 * d_y * np.sin(theta)
# append the current point to the points list
self.points.append([x, y])
# if we are not at the last point
if i != n - 1:
self.facets.append([i, i + 1])
# if we are at the last point, complete the ellipse
else:
self.facets.append([i, 0])
self.perimeter = list(range(len(self.facets)))
self.shift_section()
class Ehs(Geometry):
"""Constructs an elliptical hollow section centered at the origin *(0, 0)*, with outer vertical
diameter *d_y*, outer horizontal diameter *d_x*, and thickness *t*, using *n* points to
construct the inner and outer ellipses.
:param float d_y: Diameter of the ellipse in the y-dimension
:param float d_x: Diameter of the ellipse in the x-dimension
:param float t: Thickness of the EHS
:param int n: Number of points discretising the inner and outer ellipses
:param shift: Vector that shifts the cross-section by *(x, y)*
:type shift: list[float, float]
The following example creates a EHS discretised with 30 points, with a outer vertical diameter
of 25, outer horizontal diameter of 50, and thickness of 2.0, and generates a mesh with a
maximum triangular area of 0.5::
import sectionproperties.pre.sections as sections
geometry = sections.Ehs(d_y=25, d_x=50, t=2.0, n=64)
mesh = geometry.create_mesh(mesh_sizes=[0.5])
.. figure:: ../images/sections/ehs_geometry.png
:align: center
:scale: 75 %
EHS geometry.
.. figure:: ../images/sections/ehs_mesh.png
:align: center
:scale: 75 %
Mesh generated from the above geometry.
"""
def __init__(self, d_y, d_x, t, n, shift=[0, 0]):
"""Inits the Ehs class."""
# assign control point
control_points = [[(d_x * 0.5) - (t * 0.5), 0]]
super().__init__(control_points, shift)
# specify a hole in the centre of the EHS
self.holes = [[0, 0]]
# loop through each point of the EHS
for i in range(n):
# determine polar angle
theta = i * 2 * np.pi * 1.0 / n
# calculate location of outer and inner points
x_outer = 0.5 * d_x * np.cos(theta)
y_outer = 0.5 * d_y * np.sin(theta)
x_inner = ((0.5 * d_x) - t) * np.cos(theta)
y_inner = ((0.5 * d_y) - t) * np.sin(theta)
# append the current points to the points list
self.points.append([x_outer, y_outer])
self.points.append([x_inner, y_inner])
# if we are not at the last point
if i != n - 1:
self.facets.append([i * 2, i * 2 + 2])
self.facets.append([i * 2 + 1, i * 2 + 3])
# if we are at the last point, complete the circle
else:
self.facets.append([i * 2, 0])
self.facets.append([i * 2 + 1, 1])
self.perimeter = list(range(0, len(self.facets), 2))
self.shift_section()
class Rhs(Geometry):
"""Constructs a rectangular hollow section centered at *(b/2, d/2)*, with depth *d*, width *b*,
thickness *t* and outer radius *r_out*, using *n_r* points to construct the inner and outer
radii. If the outer radius is less than the thickness of the RHS, the inner radius is set to
zero.
:param float d: Depth of the RHS
:param float b: Width of the RHS
:param float t: Thickness of the RHS
:param float r_out: Outer radius of the RHS
:param int n_r: Number of points discretising the inner and outer radii
:param shift: Vector that shifts the cross-section by *(x, y)*
:type shift: list[float, float]
The following example creates an RHS with a depth of 100, a width of 50, a thickness of 6 and
an outer radius of 9, using 8 points to discretise the inner and outer radii. A mesh is
generated with a maximum triangular area of 2.0::
import sectionproperties.pre.sections as sections
geometry = sections.Rhs(d=100, b=50, t=6, r_out=9, n_r=8)
mesh = geometry.create_mesh(mesh_sizes=[2.0])
.. figure:: ../images/sections/rhs_geometry.png
:align: center
:scale: 75 %
RHS geometry.
.. figure:: ../images/sections/rhs_mesh.png
:align: center
:scale: 75 %
Mesh generated from the above geometry.
"""
def __init__(self, d, b, t, r_out, n_r, shift=[0, 0]):
"""Inits the Rhs class."""
# assign control point
control_points = [[b - t * 0.5, d * 0.5]]
super().__init__(control_points, shift)
# specify a hole in the centre of the RHS
self.holes = [[b * 0.5, d * 0.5]]
# calculate internal radius
r_in = max(r_out - t, 0)
# construct the outer radius points
self.draw_radius([r_out, r_out], r_out, np.pi, n_r)
self.draw_radius([b - r_out, r_out], r_out, 1.5 * np.pi, n_r)
self.draw_radius([b - r_out, d - r_out], r_out, 0, n_r)
self.draw_radius([r_out, d - r_out], r_out, 0.5 * np.pi, n_r)
# construct the outer radius facet list
n_outer = len(self.points)
for i in range(n_outer):
# if we are not at the last point
if i != n_outer - 1:
self.facets.append([i, i + 1])
# if we are at the last point, complete the loop
else:
self.facets.append([i, 0])
# construct the inner radius points
self.draw_radius([t + r_in, t + r_in], r_in, np.pi, n_r)
self.draw_radius([b - t - r_in, t + r_in], r_in, 1.5 * np.pi, n_r)
self.draw_radius([b - t - r_in, d - t - r_in], r_in, 0, n_r)
self.draw_radius([t + r_in, d - t - r_in], r_in, 0.5 * np.pi, n_r)
# construct the inner radius facet list
n_inner = len(self.points) - n_outer
for i in range(n_inner):
# if we are not at the last point
if i != n_inner - 1:
self.facets.append([i + n_outer, i + n_outer + 1])
# if we are at the last point, complete the loop
else:
self.facets.append([i + n_outer, n_outer])
self.perimeter = list(range(int(len(self.facets) / 2)))
self.shift_section()
class ISection(Geometry):
"""Constructs an I-section centered at *(b/2, d/2)*, with depth *d*, width *b*, flange
thickness *t_f*, web thickness *t_w*, and root radius *r*, using *n_r* points to construct the
root radius.
:param float d: Depth of the I-section
:param float b: Width of the I-section
:param float t_f: Flange thickness of the I-section
:param float t_w: Web thickness of the I-section
:param float r: Root radius of the I-section
:param int n_r: Number of points discretising the root radius
:param shift: Vector that shifts the cross-section by *(x, y)*
:type shift: list[float, float]
The following example creates an I-section with a depth of 203, a width of 133, a flange
thickness of 7.8, a web thickness of 5.8 and a root radius of 8.9, using 16 points to
discretise the root radius. A mesh is generated with a maximum triangular area of 3.0::
import sectionproperties.pre.sections as sections
geometry = sections.ISection(d=203, b=133, t_f=7.8, t_w=5.8, r=8.9, n_r=16)
mesh = geometry.create_mesh(mesh_sizes=[3.0])
.. figure:: ../images/sections/isection_geometry.png
:align: center
:scale: 75 %
I-section geometry.
.. figure:: ../images/sections/isection_mesh.png
:align: center
:scale: 75 %
Mesh generated from the above geometry.
"""
def __init__(self, d, b, t_f, t_w, r, n_r, shift=[0, 0]):
"""Inits the ISection class."""
# assign control point
control_points = [[b * 0.5, d * 0.5]]
super().__init__(control_points, shift)
# add first three points
self.points.append([0, 0])
self.points.append([b, 0])
self.points.append([b, t_f])
# construct the bottom right radius
pt = [b * 0.5 + t_w * 0.5 + r, t_f + r]
self.draw_radius(pt, r, 1.5 * np.pi, n_r, False)
# construct the top right radius
pt = [b * 0.5 + t_w * 0.5 + r, d - t_f - r]
self.draw_radius(pt, r, np.pi, n_r, False)
# add the next four points
self.points.append([b, d - t_f])
self.points.append([b, d])
self.points.append([0, d])
self.points.append([0, d - t_f])
# construct the top left radius
pt = [b * 0.5 - t_w * 0.5 - r, d - t_f - r]
self.draw_radius(pt, r, 0.5 * np.pi, n_r, False)
# construct the bottom left radius
pt = [b * 0.5 - t_w * 0.5 - r, t_f + r]
self.draw_radius(pt, r, 0, n_r, False)
# add the last point
self.points.append([0, t_f])
# build the facet list
for i in range(len(self.points)):
# if we are not at the last point
if i != len(self.points) - 1:
self.facets.append([i, i + 1])
# if we are at the last point, complete the loop
else:
self.facets.append([len(self.points) - 1, 0])
self.perimeter = list(range(len(self.facets)))
self.shift_section()
class MonoISection(Geometry):
"""Constructs a monosymmetric I-section centered at *(max(b_t, b_b)/2, d/2)*, with depth *d*,
top flange width *b_t*, bottom flange width *b_b*, top flange thickness *t_ft*, top flange
thickness *t_fb*, web thickness *t_w*, and root radius *r*, using *n_r* points to construct the
root radius.
:param float d: Depth of the I-section
:param float b_t: Top flange width
:param float b_b: Bottom flange width
:param float t_ft: Top flange thickness of the I-section
:param float t_fb: Bottom flange thickness of the I-section
:param float t_w: Web thickness of the I-section
:param float r: Root radius of the I-section
:param int n_r: Number of points discretising the root radius
:param shift: Vector that shifts the cross-section by *(x, y)*
:type shift: list[float, float]
The following example creates a monosymmetric I-section with a depth of 200, a top flange width
of 50, a top flange thickness of 12, a bottom flange width of 130, a bottom flange thickness of
8, a web thickness of 6 and a root radius of 8, using 16 points to discretise the root radius.
A mesh is generated with a maximum triangular area of 3.0::
import sectionproperties.pre.sections as sections
geometry = sections.MonoISection(
d=200, b_t=50, b_b=130, t_ft=12, t_fb=8, t_w=6, r=8, n_r=16
)
mesh = geometry.create_mesh(mesh_sizes=[3.0])
.. figure:: ../images/sections/monoisection_geometry.png
:align: center
:scale: 75 %
I-section geometry.
.. figure:: ../images/sections/monoisection_mesh.png
:align: center
:scale: 75 %
Mesh generated from the above geometry.
"""
def __init__(self, d, b_t, b_b, t_fb, t_ft, t_w, r, n_r, shift=[0, 0]):
"""Inits the ISection class."""
# assign control point
control_points = [[max(b_t, b_b) * 0.5, d * 0.5]]
super().__init__(control_points, shift)
# calculate central axis
x_central = max(b_t, b_b) * 0.5
# add first three points
self.points.append([x_central - b_b * 0.5, 0])
self.points.append([x_central + b_b * 0.5, 0])
self.points.append([x_central + b_b * 0.5, t_fb])
# construct the bottom right radius
pt = [x_central + t_w * 0.5 + r, t_fb + r]
self.draw_radius(pt, r, 1.5 * np.pi, n_r, False)
# construct the top right radius
pt = [x_central + t_w * 0.5 + r, d - t_ft - r]
self.draw_radius(pt, r, np.pi, n_r, False)
# add the next four points
self.points.append([x_central + b_t * 0.5, d - t_ft])
self.points.append([x_central + b_t * 0.5, d])
self.points.append([x_central - b_t * 0.5, d])
self.points.append([x_central - b_t * 0.5, d - t_ft])
# construct the top left radius
pt = [x_central - t_w * 0.5 - r, d - t_ft - r]
self.draw_radius(pt, r, 0.5 * np.pi, n_r, False)
# construct the bottom left radius
pt = [x_central - t_w * 0.5 - r, t_fb + r]
self.draw_radius(pt, r, 0, n_r, False)
# add the last point
self.points.append([x_central - b_b * 0.5, t_fb])
# build the facet list
for i in range(len(self.points)):
# if we are not at the last point
if i != len(self.points) - 1:
self.facets.append([i, i + 1])
# if we are at the last point, complete the loop
else:
self.facets.append([len(self.points) - 1, 0])
self.perimeter = list(range(len(self.facets)))
self.shift_section()
class TaperedFlangeISection(Geometry):
"""Constructs a Tapered Flange I-section centered at *(b/2, d/2)*, with depth *d*, width *b*,
mid-flange thickness *t_f*, web thickness *t_w*, root radius *r_r*, flange radius *r_f* and
flange angle *alpha*, using *n_r* points to construct the radii.
:param float d: Depth of the Tapered Flange I-section
:param float b: Width of the Tapered Flange I-section
:param float t_f: Mid-flange thickness of the Tapered Flange I-section (measured at the point
equidistant from the face of the web to the edge of the flange)
:param float t_w: Web thickness of the Tapered Flange I-section
:param float r_r: Root radius of the Tapered Flange I-section
:param float r_f: Flange radius of the Tapered Flange I-section
:param float alpha: Flange angle of the Tapered Flange I-section (degrees)
:param int n_r: Number of points discretising the radii
:param shift: Vector that shifts the cross-section by *(x, y)*
:type shift: list[float, float]
The following example creates a Tapered Flange I-section with a depth of 588, a width of 191, a
mid-flange thickness of 27.2, a web thickness of 15.2, a root radius of 17.8, a flange radius
of 8.9 and a flange angle of 8°, using 16 points to discretise the radii. A mesh is generated
with a maximum triangular area of 20.0::
import sectionproperties.pre.sections as sections
geometry = sections.TaperedFlangeISection(
d=588, b=191, t_f=27.2, t_w=15.2, r_r=17.8, r_f=8.9, alpha=8, n_r=16
)
mesh = geometry.create_mesh(mesh_sizes=[20.0])
.. figure:: ../images/sections/taperedisection_geometry.png
:align: center
:scale: 75 %
I-section geometry.
.. figure:: ../images/sections/taperedisection_mesh.png
:align: center
:scale: 75 %
Mesh generated from the above geometry.
"""
def __init__(self, d, b, t_f, t_w, r_r, r_f, alpha, n_r, shift=[0, 0]):
"""Inits the ISection class."""
# assign control point
control_points = [[b * 0.5, d * 0.5]]
super().__init__(control_points, shift)
# calculate alpha in radians
alpha_rad = np.pi * alpha / 180
# calculate the height of the flange toe and dimensions of the straight
x1 = b * 0.25 - t_w * 0.25 - r_f * (1 - np.sin(alpha_rad))
y1 = x1 * np.tan(alpha_rad)
x2 = b * 0.25 - t_w * 0.25 - r_r * (1 - np.sin(alpha_rad))
y2 = x2 * np.tan(alpha_rad)
y_t = t_f - y1 - r_f * np.cos(alpha_rad)
# add first two points
self.points.append([0, 0])
self.points.append([b, 0])
# construct the bottom right flange toe radius
if r_f == 0:
self.points.append([b, y_t])
else:
for i in range(n_r):
# determine polar angle
theta = i * 1.0 / max(1, n_r - 1) * (np.pi * 0.5 - alpha_rad)
# calculate the locations of the radius points
x = b - r_f + r_f * np.cos(theta)
y = y_t + r_f * np.sin(theta)
# append the current points to the points list
self.points.append([x, y])
# construct the bottom right root radius
if r_r == 0:
self.points.append([b * 0.5 + t_w * 0.5, t_f + y2])
else:
for i in range(n_r):
# determine polar angle
theta = (
3.0 / 2 * np.pi - alpha_rad) - (i * 1.0 / max(1, n_r - 1) * (
np.pi * 0.5 - alpha_rad)
)
# calculate the locations of the radius points
x = b * 0.5 + t_w * 0.5 + r_r + r_r * np.cos(theta)
y = t_f + y2 + r_r * np.cos(alpha_rad) + r_r * np.sin(theta)
# append the current points to the points list
self.points.append([x, y])
# construct the top right root radius
if r_r == 0:
self.points.append([b * 0.5 + t_w * 0.5, d - t_f - y2])
else:
for i in range(n_r):
# determine polar angle
theta = np.pi - i * 1.0 / max(1, n_r - 1) * (np.pi * 0.5 - alpha_rad)
# calculate the locations of the radius points
x = b * 0.5 + t_w * 0.5 + r_r + r_r * np.cos(theta)
y = d - t_f - y2 - r_r * np.cos(alpha_rad) + r_r * np.sin(theta)
# append the current points to the points list
self.points.append([x, y])
# construct the top right flange toe radius
if r_f == 0:
self.points.append([b, d - y_t])
else:
for i in range(n_r):
# determine polar angle
theta = (
3.0 * np.pi / 2 + alpha_rad) + i * 1.0 / max(1, n_r - 1) * (
np.pi * 0.5 - alpha_rad
)
# calculate the locations of the radius points
x = b - r_f + r_f * np.cos(theta)
y = d - y_t + r_f * np.sin(theta)
# append the current points to the points list
self.points.append([x, y])
# add the next two points
self.points.append([b, d])
self.points.append([0, d])
# construct the top left flange toe radius
if r_f == 0:
self.points.append([0, d - y_t])
else:
for i in range(n_r):
# determine polar angle
theta = np.pi + (i * 1.0 / max(1, n_r - 1) * (np.pi * 0.5 - alpha_rad))
# calculate the locations of the radius points
x = r_f + r_f * np.cos(theta)
y = d - y_t + r_f * np.sin(theta)
# append the current points to the points list
self.points.append([x, y])
# construct the top left root radius
if r_r == 0:
self.points.append([b * 0.5 - t_w * 0.5, d - t_f - y2])
else:
for i in range(n_r):
# determine polar angle
theta = (
np.pi * 0.5 - alpha_rad) - (i * 1.0 / max(1, n_r - 1) * (
np.pi * 0.5 - alpha_rad)
)
# calculate the locations of the radius points
x = b * 0.5 - t_w * 0.5 - r_r + r_r * np.cos(theta)
y = d - t_f - y2 - r_r * np.cos(alpha_rad) + r_r * np.sin(theta)
# append the current points to the points list
self.points.append([x, y])
# construct the bottom left root radius
if r_r == 0:
self.points.append([b * 0.5 - t_w * 0.5, t_f + y2])
else:
for i in range(n_r):
# determine polar angle
theta = -i * 1.0 / max(1, n_r - 1) * (np.pi * 0.5 - alpha_rad)
# calculate the locations of the radius points
x = b * 0.5 - t_w * 0.5 - r_r + r_r * np.cos(theta)
y = t_f + y2 + r_r * np.cos(alpha_rad) + r_r * np.sin(theta)
# append the current points to the points list
self.points.append([x, y])
# construct the bottom left flange toe radius
if r_f == 0:
self.points.append([0, y_t])
else:
for i in range(n_r):
# determine polar angle
theta = (
np.pi * 0.5 + alpha_rad) + (i * 1.0 / max(1, n_r - 1) * (
np.pi * 0.5 - alpha_rad)
)
# calculate the locations of the radius points
x = r_f + r_f * np.cos(theta)
y = y_t + r_f * np.sin(theta)
# append the current points to the points list
self.points.append([x, y])
# build the facet list
for i in range(len(self.points)):
# if we are not at the last point
if i != len(self.points) - 1:
self.facets.append([i, i + 1])
# if we are at the last point, complete the loop
else:
self.facets.append([len(self.points) - 1, 0])
self.perimeter = list(range(len(self.facets)))
self.shift_section()
class PfcSection(Geometry):
"""Constructs a PFC section with the bottom left corner at the origin *(0, 0)*, with depth *d*,
width *b*, flange thickness *t_f*, web thickness *t_w* and root radius *r*, using *n_r* points
to construct the root radius.
:param float d: Depth of the PFC section
:param float b: Width of the PFC section
:param float t_f: Flange thickness of the PFC section
:param float t_w: Web thickness of the PFC section
:param float r: Root radius of the PFC section
:param int n_r: Number of points discretising the root radius
:param shift: Vector that shifts the cross-section by *(x, y)*
:type shift: list[float, float]
The following example creates a PFC section with a depth of 250, a width of 90, a flange
thickness of 15, a web thickness of 8 and a root radius of 12, using 8 points to discretise the
root radius. A mesh is generated with a maximum triangular area of 5.0::
import sectionproperties.pre.sections as sections
geometry = sections.PfcSection(d=250, b=90, t_f=15, t_w=8, r=12, n_r=8)
mesh = geometry.create_mesh(mesh_sizes=[5.0])
.. figure:: ../images/sections/pfc_geometry.png
:align: center
:scale: 75 %
PFC geometry.
.. figure:: ../images/sections/pfc_mesh.png
:align: center
:scale: 75 %
Mesh generated from the above geometry.
"""
def __init__(self, d, b, t_f, t_w, r, n_r, shift=[0, 0]):
"""Inits the PfcSection class."""
# assign control point
control_points = [[t_w * 0.5, d * 0.5]]
super().__init__(control_points, shift)
# add first three points
self.points.append([0, 0])
self.points.append([b, 0])
self.points.append([b, t_f])
# construct the bottom right radius
pt = [t_w + r, t_f + r]
self.draw_radius(pt, r, 1.5 * np.pi, n_r, False)
# construct the top right radius
pt = [t_w + r, d - t_f - r]
self.draw_radius(pt, r, np.pi, n_r, False)
# add last three points
self.points.append([b, d - t_f])
self.points.append([b, d])
self.points.append([0, d])
# build the facet list
for i in range(len(self.points)):
# if we are not at the last point
if i != len(self.points) - 1:
self.facets.append([i, i + 1])
# if we are at the last point, complete the loop
else:
self.facets.append([len(self.points) - 1, 0])
self.perimeter = list(range(len(self.facets)))
self.shift_section()
class TaperedFlangeChannel(Geometry):
"""Constructs a Tapered Flange Channel section with the bottom left corner at the origin
*(0, 0)*, with depth *d*, width *b*, mid-flange thickness *t_f*, web thickness *t_w*, root
radius *r_r*, flange radius *r_f* and flange angle *alpha*, using *n_r* points to construct the
radii.
:param float d: Depth of the Tapered Flange Channel section
:param float b: Width of the Tapered Flange Channel section
:param float t_f: Mid-flange thickness of the Tapered Flange Channel section (measured at the
point equidistant from the face of the web to the edge of the flange)
:param float t_w: Web thickness of the Tapered Flange Channel section
:param float r_r: Root radius of the Tapered Flange Channel section
:param float r_f: Flange radius of the Tapered Flange Channel section
:param float alpha: Flange angle of the Tapered Flange Channel section (degrees)
:param int n_r: Number of points discretising the radii
:param shift: Vector that shifts the cross-section by *(x, y)*
:type shift: list[float, float]
The following example creates a Tapered Flange Channel section with a depth of 10, a width of
3.5, a mid-flange thickness of 0.575, a web thickness of 0.475, a root radius of 0.575, a
flange radius of 0.4 and a flange angle of 8°, using 16 points to discretise the radii. A mesh
is generated with a maximum triangular area of 0.02::
import sectionproperties.pre.sections as sections
geometry = sections.TaperedFlangeChannel(
d=10, b=3.5, t_f=0.575, t_w=0.475, r_r=0.575, r_f=0.4, alpha=8, n_r=16
)
mesh = geometry.create_mesh(mesh_sizes=[0.02])
.. figure:: ../images/sections/taperedchannel_geometry.png
:align: center
:scale: 75 %
I-section geometry.
.. figure:: ../images/sections/taperedchannel_mesh.png
:align: center
:scale: 75 %
Mesh generated from the above geometry.
"""
def __init__(self, d, b, t_f, t_w, r_r, r_f, alpha, n_r, shift=[0, 0]):
"""Inits the ISection class."""
# assign control point
control_points = [[t_w * 0.5, d * 0.5]]
super().__init__(control_points, shift)
# calculate alpha in radians
alpha_rad = np.pi * alpha / 180
# calculate the height of the flange toe and dimensions of the straight
x1 = b * 0.5 - t_w * 0.5 - r_f * (1 - np.sin(alpha_rad))
y1 = x1 * np.tan(alpha_rad)
x2 = b * 0.5 - t_w * 0.5 - r_r * (1 - np.sin(alpha_rad))
y2 = x2 * np.tan(alpha_rad)
y_t = t_f - y1 - r_f * np.cos(alpha_rad)
# add first two points
self.points.append([0, 0])
self.points.append([b, 0])
# construct the bottom right flange toe radius
if r_f == 0:
self.points.append([b, y_t])
else:
for i in range(n_r):
# determine polar angle
theta = i * 1.0 / max(1, n_r - 1) * (np.pi * 0.5 - alpha_rad)
# calculate the locations of the radius points
x = b - r_f + r_f * np.cos(theta)
y = y_t + r_f * np.sin(theta)
# append the current points to the points list
self.points.append([x, y])
# construct the bottom right root radius
if r_r == 0:
self.points.append([t_w, t_f + y2])
else:
for i in range(n_r):
# determine polar angle
theta = (
3.0 / 2 * np.pi - alpha_rad) - (i * 1.0 / max(1, n_r - 1) * (
np.pi * 0.5 - alpha_rad)
)
# calculate the locations of the radius points
x = t_w + r_r + r_r * np.cos(theta)
y = t_f + y2 + r_r * np.cos(alpha_rad) + r_r * np.sin(theta)
# append the current points to the points list
self.points.append([x, y])
# construct the top right root radius
if r_r == 0:
self.points.append([t_w, d - t_f - y2])
else:
for i in range(n_r):
# determine polar angle
theta = np.pi - i * 1.0 / max(1, n_r - 1) * (np.pi * 0.5 - alpha_rad)
# calculate the locations of the radius points
x = t_w + r_r + r_r * np.cos(theta)
y = d - t_f - y2 - r_r * np.cos(alpha_rad) + r_r * np.sin(
theta)
# append the current points to the points list
self.points.append([x, y])
# construct the top right flange toe radius
if r_f == 0:
self.points.append([b, d - y_t])
else:
for i in range(n_r):
# determine polar angle
theta = (
3.0 * np.pi / 2 + alpha_rad) + (i * 1.0 / max(1, n_r - 1) * (
np.pi * 0.5 - alpha_rad)
)
# calculate the locations of the radius points
x = b - r_f + r_f * np.cos(theta)
y = d - y_t + r_f * np.sin(theta)
# append the current points to the points list
self.points.append([x, y])
# add the final two points
self.points.append([b, d])
self.points.append([0, d])
# build the facet list
for i in range(len(self.points)):
# if we are not at the last point
if i != len(self.points) - 1:
self.facets.append([i, i + 1])
# if we are at the last point, complete the loop
else:
self.facets.append([len(self.points) - 1, 0])
self.perimeter = list(range(len(self.facets)))
self.shift_section()
class TeeSection(Geometry):
"""Constructs a Tee section with the top left corner at *(0, d)*, with depth *d*, width *b*,
flange thickness *t_f*, web thickness *t_w* and root radius *r*, using *n_r* points to
construct the root radius.
:param float d: Depth of the Tee section
:param float b: Width of the Tee section
:param float t_f: Flange thickness of the Tee section
:param float t_w: Web thickness of the Tee section
:param float r: Root radius of the Tee section
:param int n_r: Number of points discretising the root radius
:param shift: Vector that shifts the cross-section by *(x, y)*
:type shift: list[float, float]
The following example creates a Tee section with a depth of 200, a width of 100, a flange
thickness of 12, a web thickness of 6 and a root radius of 8, using 8 points to discretise the
root radius. A mesh is generated with a maximum triangular area of 3.0::
import sectionproperties.pre.sections as sections
geometry = sections.TeeSection(d=200, b=100, t_f=12, t_w=6, r=8, n_r=8)
mesh = geometry.create_mesh(mesh_sizes=[3.0])
.. figure:: ../images/sections/tee_geometry.png
:align: center
:scale: 75 %
Tee section geometry.
.. figure:: ../images/sections/tee_mesh.png
:align: center
:scale: 75 %
Mesh generated from the above geometry.
"""
def __init__(self, d, b, t_f, t_w, r, n_r, shift=[0, 0]):
"""Inits the TeeSection class."""
# assign control point
control_points = [[b * 0.5, d - t_f * 0.5]]
super().__init__(control_points, shift)
# add first two points
self.points.append([b * 0.5 - t_w * 0.5, 0])
self.points.append([b * 0.5 + t_w * 0.5, 0])
# construct the top right radius
pt = [b * 0.5 + t_w * 0.5 + r, d - t_f - r]
self.draw_radius(pt, r, np.pi, n_r, False)
# add next four points
self.points.append([b, d - t_f])
self.points.append([b, d])
self.points.append([0, d])
self.points.append([0, d - t_f])
# construct the top left radius
pt = [b * 0.5 - t_w * 0.5 - r, d - t_f - r]
self.draw_radius(pt, r, 0.5 * np.pi, n_r, False)
# build the facet list
for i in range(len(self.points)):
# if we are not at the last point
if i != len(self.points) - 1:
self.facets.append([i, i + 1])
# if we are at the last point, complete the loop
else:
self.facets.append([len(self.points) - 1, 0])
self.perimeter = list(range(len(self.facets)))
self.shift_section()
class AngleSection(Geometry):
"""Constructs an angle section with the bottom left corner at the origin *(0, 0)*, with depth
*d*, width *b*, thickness *t*, root radius *r_r* and toe radius *r_t*, using *n_r* points to
construct the radii.
:param float d: Depth of the angle section
:param float b: Width of the angle section
:param float t: Thickness of the angle section
:param float r_r: Root radius of the angle section
:param float r_t: Toe radius of the angle section
:param int n_r: Number of points discretising the radii
:param shift: Vector that shifts the cross-section by *(x, y)*
:type shift: list[float, float]
The following example creates an angle section with a depth of 150, a width of 100, a thickness
of 8, a root radius of 12 and a toe radius of 5, using 16 points to discretise the radii. A
mesh is generated with a maximum triangular area of 2.0::
import sectionproperties.pre.sections as sections
geometry = sections.AngleSection(d=150, b=100, t=8, r_r=12, r_t=5, n_r=16)
mesh = geometry.create_mesh(mesh_sizes=[2.0])
.. figure:: ../images/sections/angle_geometry.png
:align: center
:scale: 75 %
Angle section geometry.
.. figure:: ../images/sections/angle_mesh.png
:align: center
:scale: 75 %
"""
def __init__(self, d, b, t, r_r, r_t, n_r, shift=[0, 0]):
"""Inits the AngleSection class."""
# assign control point
control_points = [[t * 0.5, t * 0.5]]
super().__init__(control_points, shift)
# add first two points
self.points.append([0, 0])
self.points.append([b, 0])
# construct the bottom toe radius
pt = [b - r_t, t - r_t]
self.draw_radius(pt, r_t, 0, n_r)
# construct the root radius
pt = [t + r_r, t + r_r]
self.draw_radius(pt, r_r, 1.5 * np.pi, n_r, False)
# construct the top toe radius
pt = [t - r_t, d - r_t]
self.draw_radius(pt, r_t, 0, n_r)
# add the next point
self.points.append([0, d])
# build the facet list
for i in range(len(self.points)):
# if we are not at the last point
if i != len(self.points) - 1:
self.facets.append([i, i + 1])
# if we are at the last point, complete the loop
else:
self.facets.append([len(self.points) - 1, 0])
self.perimeter = list(range(len(self.facets)))
self.shift_section()
class CeeSection(Geometry):
"""Constructs a Cee section with the bottom left corner at the origin *(0, 0)*, with depth *d*,
width *b*, lip *l*, thickness *t* and outer radius *r_out*, using *n_r* points to construct the
radius. If the outer radius is less than the thickness of the Cee Section, the inner radius is
set to zero.
:param float d: Depth of the Cee section
:param float b: Width of the Cee section
:param float l: Lip of the Cee section
:param float t: Thickness of the Cee section
:param float r_out: Outer radius of the Cee section
:param int n_r: Number of points discretising the outer radius
:param shift: Vector that shifts the cross-section by *(x, y)*
:type shift: list[float, float]
:raises Exception: Lip length must be greater than the outer radius
The following example creates a Cee section with a depth of 125, a width of 50, a lip of 30, a
thickness of 1.5 and an outer radius of 6, using 8 points to discretise the radius. A mesh is
generated with a maximum triangular area of 0.25::
import sectionproperties.pre.sections as sections
geometry = sections.CeeSection(d=125, b=50, l=30, t=1.5, r_out=6, n_r=8)
mesh = geometry.create_mesh(mesh_sizes=[0.25])
.. figure:: ../images/sections/cee_geometry.png
:align: center
:scale: 75 %
Cee section geometry.
.. figure:: ../images/sections/cee_mesh.png
:align: center
:scale: 75 %
"""
def __init__(self, d, b, l, t, r_out, n_r, shift=[0, 0]):
"""Inits the CeeSection class."""
# ensure the lip length is greater than the outer radius
if l < r_out:
raise Exception('Lip length must be greater than the outer radius')
# assign control point
control_points = [[t * 0.5, d * 0.5]]
super().__init__(control_points, shift)
# calculate internal radius
r_in = max(r_out - t, 0)
# construct the outer bottom left radius
self.draw_radius([r_out, r_out], r_out, np.pi, n_r)
# construct the outer bottom right radius
self.draw_radius([b - r_out, r_out], r_out, 1.5 * np.pi, n_r)
if r_out != l:
# add next two points
self.points.append([b, l])
self.points.append([b - t, l])
# construct the inner bottom right radius
self.draw_radius([b - t - r_in, t + r_in], r_in, 0, n_r, False)
# construct the inner bottom left radius
self.draw_radius([t + r_in, t + r_in], r_in, 1.5 * np.pi, n_r, False)
# construct the inner top left radius
self.draw_radius([t + r_in, d - t - r_in], r_in, np.pi, n_r, False)
# construct the inner top right radius
self.draw_radius(
[b - t - r_in, d - t - r_in], r_in, 0.5 * np.pi, n_r, False)
if r_out != l:
# add next two points
self.points.append([b - t, d - l])
self.points.append([b, d - l])
# construct the outer top right radius
self.draw_radius([b - r_out, d - r_out], r_out, 0, n_r)
# construct the outer top left radius
self.draw_radius([r_out, d - r_out], r_out, 0.5 * np.pi, n_r)
# build the facet list
for i in range(len(self.points)):
# if we are not at the last point
if i != len(self.points) - 1:
self.facets.append([i, i + 1])
# if we are at the last point, complete the loop
else:
self.facets.append([len(self.points) - 1, 0])
self.perimeter = list(range(len(self.facets)))
self.shift_section()
class ZedSection(Geometry):
"""Constructs a Zed section with the bottom left corner at the origin *(0, 0)*, with depth *d*,
left flange width *b_l*, right flange width *b_r*, lip *l*, thickness *t* and outer radius
*r_out*, using *n_r* points to construct the radius. If the outer radius is less than the
thickness of the Zed Section, the inner radius is set to zero.
:param float d: Depth of the Zed section
:param float b_l: Left flange width of the Zed section
:param float b_r: Right flange width of the Zed section
:param float l: Lip of the Zed section
:param float t: Thickness of the Zed section
:param float r_out: Outer radius of the Zed section
:param int n_r: Number of points discretising the outer radius
:param shift: Vector that shifts the cross-section by *(x, y)*
:type shift: list[float, float]
:raises Exception: Lip length must be greater than the outer radius
The following example creates a Zed section with a depth of 100, a left flange width of 40, a
right flange width of 50, a lip of 20, a thickness of 1.2 and an outer radius of 5, using 8
points to discretise the radius. A mesh is generated with a maximum triangular area of 0.15::
import sectionproperties.pre.sections as sections
geometry = sections.ZedSection(d=100, b_l=40, b_r=50, l=20, t=1.2, r_out=5, n_r=8)
mesh = geometry.create_mesh(mesh_sizes=[0.15])
.. figure:: ../images/sections/zed_geometry.png
:align: center
:scale: 75 %
Zed section geometry.
.. figure:: ../images/sections/zed_mesh.png
:align: center
:scale: 75 %
"""
def __init__(self, d, b_l, b_r, l, t, r_out, n_r, shift=[0, 0]):
"""Inits the ZedSection class."""
# ensure the lip length is greater than the outer radius
if l < r_out:
raise Exception('Lip length must be greater than the outer radius')
# assign control point
control_points = [[t * 0.5, d * 0.5]]
super().__init__(control_points, shift)
# calculate internal radius
r_in = max(r_out - t, 0)
# construct the outer bottom left radius
self.draw_radius([r_out, r_out], r_out, np.pi, n_r)
# construct the outer bottom right radius
self.draw_radius([b_r - r_out, r_out], r_out, 1.5 * np.pi, n_r)
if r_out != l:
# add next two points
self.points.append([b_r, l])
self.points.append([b_r - t, l])
# construct the inner bottom right radius
self.draw_radius([b_r - t - r_in, t + r_in], r_in, 0, n_r, False)
# construct the inner bottom left radius
self.draw_radius([t + r_in, t + r_in], r_in, 1.5 * np.pi, n_r, False)
# construct the outer top right radius
self.draw_radius([t - r_out, d - r_out], r_out, 0, n_r)
# construct the outer top left radius
self.draw_radius([t - b_l + r_out, d - r_out], r_out, 0.5 * np.pi, n_r)
if r_out != l:
# add the next two points
self.points.append([t - b_l, d - l])
self.points.append([t - b_l + t, d - l])
# construct the inner top left radius
self.draw_radius([2 * t - b_l + r_in, d - t - r_in], r_in, np.pi, n_r, False)
# construct the inner top right radius
self.draw_radius([-r_in, d - t - r_in], r_in, 0.5 * np.pi, n_r, False)
# build the facet list
for i in range(len(self.points)):
# if we are not at the last point
if i != len(self.points) - 1:
self.facets.append([i, i + 1])
# if we are at the last point, complete the loop
else:
self.facets.append([len(self.points) - 1, 0])
self.perimeter = list(range(len(self.facets)))
self.shift_section()
class CruciformSection(Geometry):
"""Constructs a cruciform section centered at the origin *(0, 0)*, with depth *d*, width *b*,
thickness *t* and root radius *r*, using *n_r* points to construct the root radius.
:param float d: Depth of the cruciform section
:param float b: Width of the cruciform section
:param float t: Thickness of the cruciform section
:param float r: Root radius of the cruciform section
:param int n_r: Number of points discretising the root radius
:param shift: Vector that shifts the cross-section by *(x, y)*
:type shift: list[float, float]
The following example creates a cruciform section with a depth of 250, a width of 175, a
thickness of 12 and a root radius of 16, using 16 points to discretise the radius. A mesh is
generated with a maximum triangular area of 5.0::
import sectionproperties.pre.sections as sections
geometry = sections.CruciformSection(d=250, b=175, t=12, r=16, n_r=16)
mesh = geometry.create_mesh(mesh_sizes=[5.0])
.. figure:: ../images/sections/cruciform_geometry.png
:align: center
:scale: 75 %
Cruciform section geometry.
.. figure:: ../images/sections/cruciform_mesh.png
:align: center
:scale: 75 %
"""
def __init__(self, d, b, t, r, n_r, shift=[0, 0]):
"""Inits the CruciformSection class."""
# assign control point
control_points = [[0, 0]]
super().__init__(control_points, shift)
# add first two points
self.points.append([-t * 0.5, -d * 0.5])
self.points.append([t * 0.5, -d * 0.5])
# construct the bottom right radius
pt = [0.5 * t + r, -0.5 * t - r]
self.draw_radius(pt, r, np.pi, n_r, False)
# add the next two points
self.points.append([0.5 * b, -t * 0.5])
self.points.append([0.5 * b, t * 0.5])
# construct the top right radius
pt = [0.5 * t + r, 0.5 * t + r]
self.draw_radius(pt, r, 1.5 * np.pi, n_r, False)
# add the next two points
self.points.append([t * 0.5, 0.5 * d])
self.points.append([-t * 0.5, 0.5 * d])
# construct the top left radius
pt = [-0.5 * t - r, 0.5 * t + r]
self.draw_radius(pt, r, 0, n_r, False)
# add the next two points
self.points.append([-0.5 * b, t * 0.5])
self.points.append([-0.5 * b, -t * 0.5])
# construct the bottom left radius
pt = [-0.5 * t - r, -0.5 * t - r]
self.draw_radius(pt, r, 0.5 * np.pi, n_r, False)
# build the facet list
for i in range(len(self.points)):
# if we are not at the last point
if i != len(self.points) - 1:
self.facets.append([i, i + 1])
# if we are at the last point, complete the loop
else:
self.facets.append([len(self.points) - 1, 0])
self.perimeter = list(range(len(self.facets)))
self.shift_section()
class PolygonSection(Geometry):
"""Constructs a regular hollow polygon section centered at *(0, 0)*, with a pitch circle
diameter of bounding polygon *d*, thickness *t*, number of sides *n_sides* and an optional
inner radius *r_in*, using *n_r* points to construct the inner and outer radii (if radii is
specified).
:param float d: Pitch circle diameter of the outer bounding polygon (i.e. diameter of circle
that passes through all vertices of the outer polygon)
:param float t: Thickness of the polygon section wall
:param float r_in: Inner radius of the polygon corners. By default, if not specified, a polygon
with no corner radii is generated.
:param int n_r: Number of points discretising the inner and outer radii, ignored if no inner
radii is specified
:param rot: Initial counterclockwise rotation in degrees. By default bottom face is aligned
with x axis.
:param shift: Vector that shifts the cross-section by *(x, y)*
:type shift: list[float, float]
:raises Exception: Number of sides in polygon must be greater than or equal to 3
The following example creates an Octagonal section (8 sides) with a diameter of 200, a
thickness of 6 and an inner radius of 20, using 12 points to discretise the inner and outer
radii. A mesh is generated with a maximum triangular area of 5::
import sectionproperties.pre.sections as sections
geometry = sections.PolygonSection(d=200, t=6, n_sides=8, r_in=20, n_r=12)
mesh = geometry.create_mesh(mesh_sizes=[5])
.. figure:: ../images/sections/polygon_geometry.png
:align: center
:scale: 75 %
Octagonal section geometry.
.. figure:: ../images/sections/polygon_mesh.png
:align: center
:scale: 75 %
Mesh generated from the above geometry.
"""
def __init__(self, d, t, n_sides, r_in=0, n_r=1, rot=0, shift=[0, 0]):
"""Inits the PolygonSection class."""
if n_sides < 3:
msg = 'n_sides required to be greater than 3 for PolygonSection class'
raise Exception(msg)
# initial rotation
rot = rot * np.pi / 180 # radians
# determine triangular segment angle
alpha = 2 * np.pi / n_sides # radians
# determine distance from origin to point perpendicular on face of side
a_out = d / 2 * np.cos(alpha / 2)
a_in = a_out - t
# determine side length for outer & inner faces neglecting radii
side_length_out = d * np.sin(alpha / 2)
side_length_in = a_in / a_out * side_length_out
# check limit on internal radii, if exceeded then radii merge to circle
if r_in > a_in:
r_in = a_in
circle = True
else:
circle = False
# calculate external radius, if r_in is zero, r_out also is zero
if r_in == 0:
r_out = 0
n_r = 1
else:
r_out = r_in + t
# equivalent side length of half the corner radii triangular segment
c_out = r_out * (side_length_out / 2) / a_out
c_in = r_in * (side_length_in / 2) / a_in
# determine straight side length between corner radii (if present)
side_length_straight_out = side_length_out - (2 * c_out)
side_length_straight_in = side_length_in - (2 * c_in)
# assign control point central on bottom side length & rotate to initial rotation specified
control_points = [self.rotate([0, -a_out + t / 2], rot)]
super().__init__(control_points, shift)
# temp list for repeating geometry
base_points = []
# specify a hole in the centre of the Polygon section
self.holes = [[0, 0]]
# start at bottom face, constructing one corner radii, then rotate by initial rotation +
# alpha and repeat for n_side number of times to form full section perimeter
# construct the first radius (bottom right)
for i in range(n_r):
# determine polar angle
theta = 1 / 2 * np.pi + i * 1.0 / max(1, n_r - 1) * alpha
# calculate location of inner and outer points
x_outer = side_length_straight_out / 2 - r_out * np.cos(theta)
y_outer = -a_out + r_out - r_out * np.sin(theta)
x_inner = side_length_straight_in / 2 - r_in * np.cos(theta)
y_inner = -a_in + r_in - r_in * np.sin(theta)
# append the current temporary points to the temporary points list
base_points.append([x_outer, y_outer])
base_points.append([x_inner, y_inner])
# if radii merged to circle with an outer diameter of a_out then skip last point as causes
# overlapping end points which causes meshing issues if geometry is not cleaned by user
if circle:
base_points = base_points[0:-2]
# iterate and add subsequent corner radii one point at a time for each side
for i in range(n_sides):
for point in base_points:
point_new = self.rotate(point, alpha * i + rot)
self.points.append(point_new)
# build the facet list
num_points = int(len(self.points) / 2)
for i in range(num_points):
# if we are not at the last point
if i != num_points - 1:
self.facets.append([i * 2, i * 2 + 2])
self.facets.append([i * 2 + 1, i * 2 + 3])
# if we are at the last point, complete the loop
else:
self.facets.append([i * 2, 0])
self.facets.append([i * 2 + 1, 1])
self.perimeter = list(range(0, len(self.facets), 2))
self.shift_section()
def rotate(self, point, angle):
"""
Rotate a point counterclockwise by a given angle around origin [0, 0]
:param list point: Point coordinates to be rotated
:param float angle: Angle to rotate point coordinates
:return: Coordinates of rotated point
:rtype: list[float, float]
"""
pt_x, pt_y = point
c = np.cos(angle)
s = np.sin(angle)
new_x = c * pt_x - s * pt_y
new_y = s * pt_x + c * pt_y
return [new_x, new_y]
class BoxGirderSection(Geometry):
"""Constructs a Box Girder section centered at at *(max(b_t, b_b)/2, d/2)*, with depth *d*, top
width *b_t*, bottom width *b_b*, top flange thickness *t_ft*, bottom flange thickness *t_fb*
and web thickness *t_w*.
:param float d: Depth of the Box Girder section
:param float b_t: Top width of the Box Girder section
:param float b_b: Bottom width of the Box Girder section
:param float t_ft: Top lange thickness of the Box Girder section
:param float t_fb: Bottom flange thickness of the Box Girder section
:param float t_w: Web thickness of the Box Girder section
:param shift: Vector that shifts the cross-section by *(x, y)*
:type shift: list[float, float]
The following example creates a Box Gider section with a depth of 1200, a top width of 1200, a
bottom width of 400, a top flange thickness of 16, a bottom flange thickness of 12 and a web
thickness of 8. A mesh is generated with a maximum triangular area of 5.0::
import sectionproperties.pre.sections as sections
geometry = sections.BoxGirderSection(d=1200, b_t=1200, b_b=400, t_ft=100, t_fb=80, t_w=50)
mesh = geometry.create_mesh(mesh_sizes=[200.0])
.. figure:: ../images/sections/box_girder_geometry.png
:align: center
:scale: 75 %
Box Girder geometry.
.. figure:: ../images/sections/box_girder_mesh.png
:align: center
:scale: 75 %
Mesh generated from the above geometry.
"""
def __init__(self, d, b_t, b_b, t_ft, t_fb, t_w, shift=[0, 0]):
"""Inits the BoxGirderSection class."""
# assign control point
control_points = [[max(b_t, b_b) * 0.5, t_fb * 0.5]]
super().__init__(control_points, shift)
# calculate central axis
x_c = max(b_t, b_b) * 0.5
# specify a hole in the centre of the Box Girder
self.holes = [[x_c, d * 0.5]]
# determine side wall angle
if b_t < b_b:
phi_b = np.arctan2(d, 0.5 * (b_b - b_t))
phi_t = np.pi - phi_b
else:
phi_t = np.arctan2(d, 0.5 * (b_t - b_b))
phi_b = np.pi - phi_t
# determine inner wall x-offsets
x_bot = t_fb / np.tan(np.pi - phi_b)
x_top = t_ft / np.tan(np.pi - phi_t)
web_x = abs(t_w / np.sin(np.pi - phi_b))
# add outer points
self.points.append([x_c - 0.5 * b_b, 0])
self.points.append([x_c + 0.5 * b_b, 0])
self.points.append([x_c + 0.5 * b_t, d])
self.points.append([x_c - 0.5 * b_t, d])
# add inner points
self.points.append([x_c - 0.5 * b_b - x_bot + web_x, t_fb])
self.points.append([x_c + 0.5 * b_b + x_bot - web_x, t_fb])
self.points.append([x_c + 0.5 * b_t + x_top - web_x, d - t_ft])
self.points.append([x_c - 0.5 * b_t - x_top + web_x, d - t_ft])
# build facet list
self.facets = [[0, 1], [1, 2], [2, 3], [3, 0], [4, 5], [5, 6], [6, 7], [7, 4]]
self.perimeter = [0, 1, 2, 3]
self.shift_section()
class MergedSection(Geometry):
"""Merges a number of section geometries into one geometry. Note that for the meshing algorithm
to work, there needs to be connectivity between all regions of the provided geometries.
Overlapping of geometries is permitted.
:param sections: A list of geometry objects to merge into one
:class:`~sectionproperties.pre.sections.Geometry` object
:type sections: list[:class:`~sectionproperties.pre.sections.Geometry`]
The following example creates a combined cross-section with a 150x100x6 RHS placed on its side
on top of a 200UB25.4. A mesh is generated with a maximum triangle size of 5.0 for the
I-section and 2.5 for the RHS::
import sectionproperties.pre.sections as sections
isection = sections.ISection(d=203, b=133, t_f=7.8, t_w=5.8, r=8.9, n_r=8)
box = sections.Rhs(d=100, b=150, t=6, r_out=15, n_r=8, shift=[-8.5, 203])
geometry = sections.MergedSection([isection, box])
geometry.clean_geometry()
mesh = geometry.create_mesh(mesh_sizes=[5.0, 2.5])
.. figure:: ../images/sections/merged_geometry.png
:align: center
:scale: 75 %
Merged section geometry.
.. figure:: ../images/sections/merged_mesh.png
:align: center
:scale: 75 %
"""
def __init__(self, sections):
"""Inits the MergedSection class."""
super().__init__([], [0, 0])
point_count = 0
# loop through all sections
for section in sections:
# add facets
for facet in section.facets:
self.facets.append([facet[0] + point_count, facet[1] + point_count])
# add points and count points
for point in section.points:
self.points.append([point[0], point[1]])
point_count += 1
# add holes
for hole in section.holes:
self.holes.append([hole[0], hole[1]])
# add control points
for control_point in section.control_points:
self.control_points.append([control_point[0], control_point[1]])
|
py | 1a30a24a49950f8e2efef573c74f8e3e2976113d | #!/usr/bin/env python
"""
Fetch descriptions from NCBI given file with gene names.
Intended to use on genes from Gene2Products.need-curating.txt
from funannotate annotate formatted as single column, new line
separated text file.
Outputs 2 column TSV ready for update-gene2products.py
Usage: python grab_gene_descriptions.py <genes.txt> <outfile.txt>
Cam Gilchrist
2018-05-29
"""
import sys
from Bio import Entrez
from collections import Counter
# *Always* tell NCBI who you are
Entrez.email = "[email protected]"
def read_genes(gene_file):
"""Read in list of gene names from \n separated text file and
return list."""
genes = []
with open(gene_file, 'rU') as genefile:
for gene in genefile:
gene = gene.strip()
genes.append(gene)
return(genes)
def retrieve_descriptions(gene, descriptions, empties):
"""Given single gene name, grab possible descriptions from NCBI
and prompt user to select one"""
# Perform ESearch and grab list of IDs
query = gene + '[Gene Name]'
handle = Entrez.esearch(db='gene', term=query,
retmax=100,
retmode='xml')
record = Entrez.read(handle)
handle.close()
idlist = ','.join(record["IdList"])
# Ensure you have results, exit if not
if idlist == '':
print('No records for {}, skipping...\n'.format(gene))
empties.append(gene)
return
# Generate summary from UID list
handle = Entrez.esummary(db='gene', id=idlist)
record = Entrez.read(handle)
handle.close()
# Grab description, counter for unique values
desc_cnt = Counter()
doc_sums = record[u'DocumentSummarySet'][u'DocumentSummary']
for i in range(len(doc_sums)):
if doc_sums[i][u'NomenclatureName'] != '':
desc = doc_sums[i][u'NomenclatureName']
else:
desc = doc_sums[i][u'OtherDesignations'].split('|')[0]
desc_cnt[desc] += 1
# Create list from counter keys for indexing purposes
desc_list = filter(None, desc_cnt)
if len(desc_cnt) > 1:
print('{} has {} unique descriptions from {} results. These are:'.format(
gene, len(desc_list), len(doc_sums)))
ans_range = range(len(desc_list))
for i in ans_range:
print ('{}: {} [{}/{}]'.format(i+1, desc_list[i], desc_cnt[desc_list[i]], len(doc_sums)))
# Take user input to accept/reject a description
while True:
ans = raw_input('Which do you accept? [{}-{}/N]: '.format(
min(ans_range)+1, max(ans_range)+1))
# Check if int or str entered
try:
ans = int(ans)-1
if ans in ans_range:
print('Accepting #{}.\n'.format(ans+1))
descriptions[gene] = desc_list[ans]
break
else:
print('{} is outside acceptable range. Try again.'.format(
ans))
except:
if ans in ['N', 'n', 'no', 'No']:
print('Skipping this gene.\n')
break
else:
print('Invalid input, try again.')
# If there's only one unique description, accept/reject
elif len(desc_cnt) == 1:
desc_list2 = list(desc_cnt)
desc = desc_list2[0]
if desc == '':
print('{} has empty description.'.format(gene))
empties.append(gene)
return
print('{} only has one unique description from {} results.'.format(
gene, len(doc_sums)))
print('This is:\n{}'.format(desc))
while True:
ans = raw_input('Accept? Y/N: ')
if ans in ['Y', 'y', 'yes', 'Yes']:
print('Description accepted.\n')
descriptions[gene] = desc
break
elif ans in ['N', 'n', 'no', 'No']:
print('Skipping this gene.\n')
empties.append(gene)
break
else:
print('Invalid input, try again.')
return(descriptions)
def print_descriptions(descriptions, outfile):
"""Print descriptions as 2 column TSV for update-gene2products.py"""
with open(outfile, 'w') as out:
out.write('Empty descriptions:\n')
for gene in empties:
out.write('{}\n'.format(gene))
out.write('\nNon-empty descriptions:\n')
for gene in descriptions:
out.write('{}\t{}\n'.format(gene, descriptions[gene]))
# Read in genes from file and summarize
genes = read_genes(sys.argv[1])
print('There are {} genes in {}. These are:\n{}\n'.format(
len(genes), sys.argv[1], ', '.join(genes))
)
# Fetch descriptions
empties = []
descriptions = {}
for gene in genes:
retrieve_descriptions(gene, descriptions, empties)
# Write to output file given in second argument
print_descriptions(descriptions, sys.argv[2])
print('All done. Remember to check {} to correct errors or make adjustments!'.format(sys.argv[2]))
|
py | 1a30a264b8b6d90a3872d7bd2541e30623e9d755 | from pyspark import SparkContext, SparkConf
if __name__ == "__main__":
conf = SparkConf().setAppName("word count").setMaster("local[3]")
# Spark Context
sc = SparkContext(conf=conf)
sc.setLogLevel("ERROR")
# Load input
lines = sc.textFile("inputs/word_count.text")
# Split the sentences into words
words = lines.flatMap(lambda line: line.split(" "))
# Count occurrence of each word
wordCounts = words.countByValue()
# Print the count
for word, count in wordCounts.items():
print("{} : {}".format(word, count))
|
py | 1a30a26e3a2261c32bd7108e1dd0d797d256e8fe | import requests
from pymongo import MongoClient
from datetime import datetime
from airflow.providers.mongo.hooks.mongo import MongoHook
def get_raw_joke():
"""Retrieve a joke from 'jokeapi' and return it in dict format."""
base_url = "https://v2.jokeapi.dev"
response = requests.get(f"{base_url}/joke/any")
return response.json()
def preprocess_joke(raw_joke: dict):
"""Perform preprocessing to clean raw jokes."""
dictObject = {}
dictObject["type"] = raw_joke.get("type")
dictObject["category"] = raw_joke.get("category")
if raw_joke.get("type") == "single":
dictObject["joke"] = raw_joke.get("joke")
return dictObject
elif raw_joke.get("type") == "twopart":
dictObject["joke"] = {}
dictObject["joke"]["setup"] = raw_joke.get("setup")
dictObject["joke"]["delivery"] = raw_joke.get("delivery")
return dictObject
else:
print("Joke is of neither 'single' nor 'twopart' type.")
def serialize_joke(joke: dict):
"""Save jokes into local MongoDB instance."""
if joke:
joke["datetime"] = f"{datetime.now():%Y-%m-%d %H:%M:%S%z}"
# Using PyMongo
# uri = "mongodb://root:example@mongo:27017" # this works
uri = "mongodb://airflow:airflow@mongo:27017" # this works too
# uri = "mongodb://airflow:airflow@localhost:3456" # but this does not work
client = MongoClient(uri)
db = client.the_database
collection = db.jokes
result = collection.insert_one(joke)
print(f"{result.inserted_id} is inserted!")
# Using MongoHook wrapper
# mongo_hook = MongoHook(conn_id="MONGO")
# client = mongo_hook.get_conn()
# db = client.the_database
# collection = db.jokes
# result = collection.insert_one(joke)
# print(f"{result.inserted_id} is inserted!")
def scrap_joke():
raw_joke = get_raw_joke()
joke = preprocess_joke(raw_joke)
serialize_joke(joke)
if __name__ == "__main__":
scrap_joke()
|
py | 1a30a2804af4e17d7ef8f6079816ce52a0b6850d | from model.group import Group
class GroupHelper:
def __init__(self, app):
self.app = app
def open_groups_page(self):
wd = self.app.wd
if not(wd.current_url.endswith("/group.php") and len(wd.find_elements_by_name("new")) > 0):
wd.find_element_by_link_text("groups").click()
def create(self, group):
wd = self.app.wd
self.open_groups_page()
# init group creation
wd.find_element_by_xpath("//div[@id='content']/form/input[4]").click()
self.fill_group_form(group)
# submit group creation
wd.find_element_by_name("submit").click()
self.return_to_groups_page()
self.group_cache = None
def delete_first_group(self):
self.delete_group_by_index(0)
def delete_group_by_index(self, index):
wd = self.app.wd
self.open_groups_page()
self.select_group_by_index(index)
# submit deletion
wd.find_element_by_name("delete").click()
self.return_to_groups_page()
self.group_cache = None
def delete_group_by_id(self, id):
wd = self.app.wd
self.open_groups_page()
self.select_group_by_id(id)
# submit deletion
wd.find_element_by_name("delete").click()
self.return_to_groups_page()
self.group_cache = None
def delete_all_groups(self):
wd = self.app.wd
self.open_groups_page()
nmb_groups = self.count()
if nmb_groups != 0:
for ndx in range(0, nmb_groups):
self.select_group_by_index(ndx)
# submit deletion
wd.find_element_by_name("delete").click()
self.group_cache = None
self.return_to_groups_page()
def select_first_group(self):
self.select_group_by_index(0)
def select_group_by_index(self, index):
wd = self.app.wd
wd.find_elements_by_name("selected[]")[index].click()
def select_group_by_id(self, id):
wd = self.app.wd
wd.find_element_by_css_selector("input[value='%s']" % id).click()
def modify_first_group(self, new_group_data):
self.modify_group_by_index(0, new_group_data)
def modify_group_by_index(self, index, new_group_data):
wd = self.app.wd
self.open_groups_page()
self.select_group_by_index(index)
# open modification form
wd.find_element_by_name("edit").click()
# fill group form
self.fill_group_form(new_group_data)
# submit modification
wd.find_element_by_name("update").click()
self.return_to_groups_page()
self.group_cache = None
def modify_group_by_id(self, id, new_group_data):
wd = self.app.wd
self.open_groups_page()
self.select_group_by_id(id)
# open modification form
wd.find_element_by_name("edit").click()
# fill group form
self.fill_group_form(new_group_data)
# submit modification
wd.find_element_by_name("update").click()
self.return_to_groups_page()
self.group_cache = None
def fill_group_form(self, group):
self.change_field_value("group_name", group.name)
self.change_field_value("group_header", group.header)
self.change_field_value("group_footer", group.footer)
def change_field_value(self, field_name, text):
wd = self.app.wd
if text is not None:
wd.find_element_by_name(field_name).click()
wd.find_element_by_name(field_name).clear()
wd.find_element_by_name(field_name).send_keys(text)
def return_to_groups_page(self):
self.open_groups_page()
def count(self):
wd = self.app.wd
self.open_groups_page()
return len(wd.find_elements_by_name("selected[]"))
group_cache = None
def get_group_list(self):
if self.group_cache is None:
wd = self.app.wd
self.open_groups_page()
self.group_cache = []
for element in wd.find_elements_by_css_selector("span.group"):
text = element.text
id = element.find_element_by_name("selected[]").get_attribute("value")
self.group_cache.append(Group(name=text, id=id))
return list(self.group_cache)
|
py | 1a30a420795a24eaa5ec5d6146213c0cb87935a5 | from flask import abort, jsonify
from flask_login import login_required
from depc.apiv1 import api, format_object, get_payload
from depc.controllers.variables import VariableController
from depc.users import TeamPermission
VISIBLE = ["name", "value", "type", "expression"]
def format_variable(source):
visible = list(VISIBLE)
s = format_object(source, visible)
return s
@api.route("/teams/<team_id>/variables")
@login_required
def list_team_variables(team_id):
"""
.. :quickref: GET; Lorem ipsum."""
if not TeamPermission.is_user(team_id):
abort(403)
variables = VariableController.list(
filters={
"Variable": {
"team_id": team_id,
"rule_id": None,
"source_id": None,
"check_id": None,
}
}
)
return jsonify([format_variable(v) for v in variables]), 200
@api.route("/teams/<team_id>/rules/<rule_id>/variables")
@login_required
def list_rule_variables(team_id, rule_id):
"""
.. :quickref: GET; Lorem ipsum."""
if not TeamPermission.is_user(team_id):
abort(403)
variables = VariableController.list(
filters={
"Variable": {
"team_id": team_id,
"rule_id": rule_id,
"source_id": None,
"check_id": None,
}
}
)
return jsonify([format_variable(v) for v in variables]), 200
@api.route("/teams/<team_id>/sources/<source_id>/variables")
@login_required
def list_source_variables(team_id, source_id):
"""
.. :quickref: GET; Lorem ipsum."""
if not TeamPermission.is_user(team_id):
abort(403)
variables = VariableController.list(
filters={
"Variable": {
"team_id": team_id,
"rule_id": None,
"source_id": source_id,
"check_id": None,
}
}
)
return jsonify([format_variable(v) for v in variables]), 200
@api.route("/teams/<team_id>/sources/<source_id>/checks/<check_id>/variables")
@login_required
def list_check_variables(team_id, source_id, check_id):
"""
.. :quickref: GET; Lorem ipsum."""
if not TeamPermission.is_user(team_id):
abort(403)
variables = VariableController.list(
filters={
"Variable": {
"team_id": team_id,
"rule_id": None,
"source_id": source_id,
"check_id": check_id,
}
}
)
return jsonify([format_variable(v) for v in variables]), 200
@api.route("/teams/<team_id>/variables/<variable_id>")
@login_required
def get_team_variable(team_id, variable_id):
"""
.. :quickref: GET; Lorem ipsum."""
if not TeamPermission.is_user(team_id):
abort(403)
variable = VariableController.get(
filters={
"Variable": {
"id": variable_id,
"team_id": team_id,
"rule_id": None,
"source_id": None,
"check_id": None,
}
}
)
return jsonify(format_variable(variable)), 200
@api.route("/teams/<team_id>/rules/<rule_id>/variables/<variable_id>")
@login_required
def get_rule_variable(team_id, rule_id, variable_id):
"""
.. :quickref: GET; Lorem ipsum."""
if not TeamPermission.is_user(team_id):
abort(403)
variable = VariableController.get(
filters={
"Variable": {
"id": variable_id,
"team_id": team_id,
"rule_id": rule_id,
"source_id": None,
"check_id": None,
}
}
)
return jsonify(format_variable(variable)), 200
@api.route("/teams/<team_id>/sources/<source_id>/variables/<variable_id>")
@login_required
def get_source_variable(team_id, source_id, variable_id):
"""
.. :quickref: GET; Lorem ipsum."""
if not TeamPermission.is_user(team_id):
abort(403)
variable = VariableController.get(
filters={
"Variable": {
"id": variable_id,
"team_id": team_id,
"rule_id": None,
"source_id": source_id,
"check_id": None,
}
}
)
return jsonify(format_variable(variable)), 200
@api.route(
"/teams/<team_id>/sources/<source_id>/checks/<check_id>/variables/<variable_id>"
)
@login_required
def get_check_variable(team_id, source_id, check_id, variable_id):
"""
.. :quickref: GET; Lorem ipsum."""
if not TeamPermission.is_user(team_id):
abort(403)
variable = VariableController.get(
filters={
"Variable": {
"id": variable_id,
"team_id": team_id,
"rule_id": None,
"source_id": source_id,
"check_id": check_id,
}
}
)
return jsonify(format_variable(variable)), 200
@api.route(
"/teams/<team_id>/variables",
methods=["POST"],
request_schema=("v1_variable", "variable_input"),
)
@login_required
def post_team_variable(team_id):
"""
.. :quickref: POST; Lorem ipsum."""
if not TeamPermission.is_manager_or_editor(team_id):
abort(403)
payload = get_payload()
payload.update({"team_id": team_id})
variable = VariableController.create(payload)
return jsonify(format_variable(variable)), 200
@api.route(
"/teams/<team_id>/rules/<rule_id>/variables",
methods=["POST"],
request_schema=("v1_variable", "variable_input"),
)
@login_required
def post_rule_variable(team_id, rule_id):
"""
.. :quickref: POST; Lorem ipsum."""
if not TeamPermission.is_manager_or_editor(team_id):
abort(403)
payload = get_payload()
payload.update({"team_id": team_id, "rule_id": rule_id})
variable = VariableController.create(payload)
return jsonify(format_variable(variable)), 200
@api.route(
"/teams/<team_id>/sources/<source_id>/variables",
methods=["POST"],
request_schema=("v1_variable", "variable_input"),
)
@login_required
def post_source_variable(team_id, source_id):
"""
.. :quickref: POST; Lorem ipsum."""
if not TeamPermission.is_manager_or_editor(team_id):
abort(403)
payload = get_payload()
payload.update({"team_id": team_id, "source_id": source_id})
variable = VariableController.create(payload)
return jsonify(format_variable(variable)), 200
@api.route(
"/teams/<team_id>/sources/<source_id>/checks/<check_id>/variables",
methods=["POST"],
request_schema=("v1_variable", "variable_input"),
)
@login_required
def post_check_variable(team_id, source_id, check_id):
"""
.. :quickref: POST; Lorem ipsum."""
if not TeamPermission.is_manager_or_editor(team_id):
abort(403)
payload = get_payload()
payload.update({"team_id": team_id, "source_id": source_id, "check_id": check_id})
variable = VariableController.create(payload)
return jsonify(format_variable(variable)), 200
@api.route(
"/teams/<team_id>/variables/<variable_id>",
methods=["PUT"],
request_schema=("v1_variable", "variable_input"),
)
@login_required
def put_team_variable(team_id, variable_id):
"""
.. :quickref: PUT; Lorem ipsum."""
if not TeamPermission.is_manager_or_editor(team_id):
abort(403)
payload = get_payload()
variable = VariableController.update(
payload,
{
"Variable": {
"id": variable_id,
"team_id": team_id,
"rule_id": None,
"source_id": None,
"check_id": None,
}
},
)
return jsonify(format_variable(variable)), 200
@api.route(
"/teams/<team_id>/rules/<rule_id>/variables/<variable_id>",
methods=["PUT"],
request_schema=("v1_variable", "variable_input"),
)
@login_required
def put_rule_variable(team_id, rule_id, variable_id):
"""
.. :quickref: PUT; Lorem ipsum."""
if not TeamPermission.is_manager_or_editor(team_id):
abort(403)
payload = get_payload()
variable = VariableController.update(
payload,
{
"Variable": {
"id": variable_id,
"team_id": team_id,
"rule_id": rule_id,
"source_id": None,
"check_id": None,
}
},
)
return jsonify(format_variable(variable)), 200
@api.route(
"/teams/<team_id>/sources/<source_id>/variables/<variable_id>",
methods=["PUT"],
request_schema=("v1_variable", "variable_input"),
)
@login_required
def put_source_variable(team_id, source_id, variable_id):
"""
.. :quickref: PUT; Lorem ipsum."""
if not TeamPermission.is_manager_or_editor(team_id):
abort(403)
payload = get_payload()
variable = VariableController.update(
payload,
{
"Variable": {
"id": variable_id,
"team_id": team_id,
"rule_id": None,
"source_id": source_id,
"check_id": None,
}
},
)
return jsonify(format_variable(variable)), 200
@api.route(
"/teams/<team_id>/sources/<source_id>/checks/<check_id>/variables/<variable_id>",
methods=["PUT"],
request_schema=("v1_variable", "variable_input"),
)
@login_required
def put_check_variable(team_id, source_id, check_id, variable_id):
"""
.. :quickref: PUT; Lorem ipsum."""
if not TeamPermission.is_manager_or_editor(team_id):
abort(403)
payload = get_payload()
variable = VariableController.update(
payload,
{
"Variable": {
"id": variable_id,
"team_id": team_id,
"rule_id": None,
"source_id": source_id,
"check_id": check_id,
}
},
)
return jsonify(format_variable(variable)), 200
@api.route("/teams/<team_id>/variables/<variable_id>", methods=["DELETE"])
@login_required
def delete_team_variable(team_id, variable_id):
"""
.. :quickref: DELETE; Lorem ipsum."""
if not TeamPermission.is_manager_or_editor(team_id):
abort(403)
variable = VariableController.delete(
filters={
"Variable": {
"id": variable_id,
"team_id": team_id,
"rule_id": None,
"source_id": None,
"check_id": None,
}
}
)
return jsonify(format_variable(variable)), 200
@api.route(
"/teams/<team_id>/rules/<rule_id>/variables/<variable_id>", methods=["DELETE"]
)
@login_required
def delete_rule_variable(team_id, rule_id, variable_id):
"""
.. :quickref: DELETE; Lorem ipsum."""
if not TeamPermission.is_manager_or_editor(team_id):
abort(403)
variable = VariableController.delete(
filters={
"Variable": {
"id": variable_id,
"team_id": team_id,
"rule_id": rule_id,
"source_id": None,
"check_id": None,
}
}
)
return jsonify(format_variable(variable)), 200
@api.route(
"/teams/<team_id>/sources/<source_id>/variables/<variable_id>", methods=["DELETE"]
)
@login_required
def delete_source_variable(team_id, source_id, variable_id):
"""
.. :quickref: DELETE; Lorem ipsum."""
if not TeamPermission.is_manager_or_editor(team_id):
abort(403)
variable = VariableController.delete(
filters={
"Variable": {
"id": variable_id,
"team_id": team_id,
"rule_id": None,
"source_id": source_id,
"check_id": None,
}
}
)
return jsonify(format_variable(variable)), 200
@api.route(
"/teams/<team_id>/sources/<source_id>/checks/<check_id>/variables/<variable_id>",
methods=["DELETE"],
)
@login_required
def delete_check_variable(team_id, source_id, check_id, variable_id):
"""
.. :quickref: DELETE; Lorem ipsum."""
if not TeamPermission.is_manager_or_editor(team_id):
abort(403)
variable = VariableController.delete(
filters={
"Variable": {
"id": variable_id,
"team_id": team_id,
"rule_id": None,
"source_id": source_id,
"check_id": check_id,
}
}
)
return jsonify(format_variable(variable)), 200
|
py | 1a30a499c24d6c871dd4e39a5d5203fa5cf9c268 | class MACPieException(Exception):
"""MACPie common exception."""
|
py | 1a30a5104cc87a36fd17b739a0d1461f79e771c9 | import os
from collections import OrderedDict
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.dates import DateFormatter
from .building import Building
from .datastore.datastore import join_key
from .utils import get_datastore
from .timeframe import TimeFrame
class DataSet(object):
"""
Attributes
----------
buildings : OrderedDict
Each key is an integer, starting from 1.
Each value is a nilmtk.Building object.
store : nilmtk.DataStore
metadata : dict
Metadata describing the dataset name, authors etc.
(Metadata about specific buildings, meters, appliances etc.
is stored elsewhere.)
See nilm-metadata.readthedocs.org/en/latest/dataset_metadata.html#dataset
"""
def __init__(self, filename=None, format='HDF',mode='a'):
"""
Parameters
----------
filename : str
path to data set
format : str
format of output. Either 'HDF' or 'CSV'. Defaults to 'HDF'
"""
self.store = None
self.buildings = OrderedDict()
self.metadata = {}
if filename is not None:
self.import_metadata(get_datastore(filename, format,mode))
def import_metadata(self, store):
"""
Parameters
----------
store : nilmtk.DataStore
"""
self.store = store
self.metadata = store.load_metadata()
self._init_buildings(store)
return self
def save(self, destination):
for b_id, building in self.buildings.items():
building.save(destination, '/building' + str(b_id))
def _init_buildings(self, store):
buildings = store.elements_below_key('/')
buildings.sort()
for b_key in buildings:
building = Building()
building.import_metadata(
store, '/'+b_key, self.metadata.get('name'))
self.buildings[building.identifier.instance] = building
def set_window(self, start=None, end=None):
"""Set the timeframe window on self.store. Used for setting the
'region of interest' non-destructively for all processing.
Parameters
----------
start, end : str or pd.Timestamp or datetime or None
"""
if self.store is None:
raise RuntimeError("You need to set self.store first!")
tz = self.metadata.get('timezone')
if tz is None:
raise RuntimeError("'timezone' is not set in dataset metadata.")
self.store.window = TimeFrame(start, end, tz)
def describe(self, **kwargs):
"""Returns a DataFrame describing this dataset.
Each column is a building. Each row is a feature."""
keys = list(self.buildings.keys())
keys.sort()
results = pd.DataFrame(columns=keys)
for i, building in self.buildings.items():
results[i] = building.describe(**kwargs)
return results
def plot_good_sections(self, axes=None, label_func=None, gap=0, **kwargs):
"""Plots all good sections for all buildings.
Parameters
----------
axes : list of axes or None.
If None then they will be generated.
Returns
-------
axes : list of axes
"""
n = len(self.buildings)
if axes is None:
n_meters_per_building = [len(elec.all_meters())
for elec in self.elecs()]
gridspec_kw = dict(height_ratios=n_meters_per_building)
fig, axes = plt.subplots(
n, 1, sharex=True, gridspec_kw=gridspec_kw)
assert n == len(axes)
for i, (ax, elec) in enumerate(zip(axes, self.elecs())):
elec.plot_good_sections(ax=ax, label_func=label_func, gap=gap,
**kwargs)
ax.set_title('House {}'.format(elec.building()), y=0.4, va='top')
ax.grid(False)
for spine in ax.spines.values():
spine.set_linewidth(0.5)
if i == n // 2:
ax.set_ylabel('Meter', rotation=0,
ha='center', va='center', y=.4)
ax.set_xlabel('Date')
plt.tight_layout()
plt.subplots_adjust(hspace=0.05)
plt.draw()
return axes
def elecs(self):
return [building.elec for building in self.buildings.values()]
def clear_cache(self):
for elec in self.elecs():
elec.clear_cache()
def plot_mains_power_histograms(self, axes=None, **kwargs):
n = len(self.buildings)
if axes is None:
fig, axes = plt.subplots(n, 1, sharex=True)
assert n == len(axes)
for ax, elec in zip(axes, self.elecs()):
ax = elec.mains().plot_power_histogram(ax=ax, **kwargs)
ax.set_title('House {}'.format(elec.building()))
return axes
def get_activity_script(self, filename):
"""Extracts an activity script from this dataset.
Saves the activity script to an HDF5 file.
Keys in the HDF5 file take the form:
'/building<building_i>/<appliance type>__<appliance instance>'
e.g. '/building1/electric_oven__1'
Spaces in the appliance type are replaced by underscores.
Each table is of fixed format and stores a pd.Series.
The index is the datetime of the start time or end time of
each appliance activation. The values are booleans. True means
the start time of an appliance activation; false means the
end time of an appliance activation.
Parameters
----------
filename : str
The full filename, including path and suffix, for the HDF5 file
for storing the activity script.
"""
store = pd.HDFStore(
filename, mode='w', complevel=9, complib='blosc')
for building in self.buildings.values():
submeters = building.elec.submeters().meters
for meter in submeters:
appliance = meter.dominant_appliance()
key = '/building{:d}/{:s}__{:d}'.format(
building.identifier.instance,
appliance.identifier.type.replace(' ', '_'),
appliance.identifier.instance)
print("Computing activations for", key)
activations = meter.get_activations()
starts = []
ends = []
for activation in activations:
starts.append(activation.index[0])
ends.append(activation.index[-1])
del activations
starts = pd.Series(True, index=starts)
ends = pd.Series(False, index=ends)
script = pd.concat([starts, ends])
script = script.sort_index()
store[key] = script
del starts, ends
store.close()
|
py | 1a30a5379a02043660721d5df00aa4aee5262905 | # Copyright (c) 2015-2017, NVIDIA CORPORATION. All rights reserved.
from __future__ import absolute_import
from . import option_list
import digits.device_query
option_list['gpu_list'] = ','.join([str(x) for x in range(len(digits.device_query.get_devices()))])
|
py | 1a30a57cc02620b32db16c32362b908abe161aa8 | def leiaDinheiro(msg):
ok = False
while not ok:
entrada = str(input(msg)).replace(',', '.').strip()
if entrada.isalpha():
print(f'\033[1;31mERRO! \"{entrada}\" é um valor inválido!\033[m')
else:
ok = True
return float(entrada)
|
py | 1a30a622d51020840de0e301c47e2ef176f5891d | data = "moplvidmaagmsiyyrkchbyhivlqwqsjcgtumqscmxrxrvwsnjjvygrelcbjgbpounhuyealllginkitfaiviraqcycjmskrozcdqylbuejrgfnquercvghppljmojfvylcxakyjxnampmakyjbqgwbyokaybcuklkaqzawageypfqhhasetugatdaxpvtevrigynxbqodiyioapgxqkndujeranxgebnpgsukybyowbxhgpkwjfdywfkpufcxzzqiuglkakibbkobonunnzwbjktykebfcbobxdflnyzngheatpcvnhdwkkhnlwnjdnrmjaevqopvinnzgacjkbhvsdsvuuwwhwesgtdzuctshytyfugdqswvxisyxcxoihfgzxnidnfadphwumtgdfmhjkaryjxvfquucltmuoosamjwqqzeleaiplwcbbxjxxvgsnonoivbnmiwbnijkzgoenohqncjqnckxbhpvreasdyvffrolobxzrmrbvwkpdbfvbwwyibydhndmpvqyfmqjwosclwxhgxmwjiksjvsnwupraojuatksjfqkvvfroqxsraskbdbgtppjrnzpfzabmcczlwynwomebvrihxugvjmtrkzdwuafozjcfqacenabmmxzcueyqwvbtslhjeiopgbrbvfbnpmvlnyexopoahgmwplwxnxqzhucdieyvbgtkfmdeocamzenecqlbhqmdfrvpsqyxvkkyfrbyolzvcpcbkdprttijkzcrgciidavsmrczbollxbkytqjwbiupvsorvkorfriajdtsowenhpmdtvamkoqacwwlkqfdzorjtepwlemunyrghwlvjgaxbzawmikfhtaniwviqiaeinbsqidetfsdbgsydkxgwoqyztaqmyeefaihmgrbxzyheoegawthcsyyrpyvnhysynoaikwtvmwathsomddhltxpeuxettpbeftmmyrqclnzwljlpxazrzzdosemwmthcvgwtxtinffopqxbufjwsvhqamxpydcnpekqhsovvqugqhbgweaiheeicmkdtxltkalexbeftuxvwnxmqqjeyourvbdfikqnzdipmmmiltjapovlhkpunxljeutwhenrxyfeufmzipqvergdkwptkilwzdxlydxbjoxjzxwcfmznfqgoaemrrxuwpfkftwejubxkgjlizljoynvidqwxnvhngqakmmehtvykbjwrrrjvwnrteeoxmtygiiygynedvfzwkvmffghuduspyyrnftyvsvjstfohwwyxhmlfmwguxxzgwdzwlnnltpjvnzswhmbzgdwzhvbgkiddhirgljbflgvyksxgnsvztcywpvutqryzdeerlildbzmtsgnebvsjetdnfgikrbsktbrdamfccvcptfaaklmcaqmglneebpdxkvcwwpndrjqnpqgbgihsfeotgggkdbvcdwfjanvafvxsvvhzyncwlmqqsmledzfnxxfyvcmhtjreykqlrfiqlsqzraqgtmocijejneeezqxbtomkwugapwesrinfiaxwxradnuvbyssqkznwwpsbgatlsxfhpcidfgzrc"
data_2 = "shabhlesyffuflsdxvvvoiqfjacpacgoucvrlshauspzdrmdfsvzinwdfmwrapbzuyrlvulpalqltqshaskpsoiispneszlcgzvygeltuctslqtzeyrkfeyohutbpufxigoeagvrfgkpefythszpzpxjwgklrdbypyarepdeskotoolwnmeibkqpiuvktejvbejgjptzfjpfbjgkorvgzipnjazzvpsjxjscekiqlcqeawsdydpsuqewszlpkgkrtlwxgozdqvyynlcxgnskjjmdhabqxbnnbflsscammppnlwyzycidzbhllvfvheujhnxrfujwmhwiamqplygaujruuptfdjmdqdndyzrmowhctnvxryxtvzzecmeqdfppsjczqtyxlvqwafjozrtnbvshvxshpetqijlzwgevdpwdkycmpsehxtwzxcpzwyxmpawwrddvcbgbgyrldmbeignsotjhgajqhgrttwjesrzxhvtetifyxwiyydzxdqvokkvfbrfihslgmvqrvvqfptdqhqnzujeiilfyxuehhvwamdkkvfllvdjsldijzkjvloojspdbnslxunkujnfbacgcuaiohdytbnqlqmhavajcldohdiirxfahbrgmqerkcrbqidstemvngasvxzdjjqkwixdlkkrewaszqnyiulnwaxfdbyianmcaaoxiyrshxumtggkcrydngowfjijxqczvnvpkiijksvridusfeasawkndjpsxwxaoiydusqwkaqrjgkkzhkpvlbuqbzvpewzodmxkzetnlttdypdxrqgcpmqcsgohyrsrlqctgxzlummuobadnpbxjndtofuihfjedkzakhvixkejjxffbktghzudqmarvmhmthjhqbxwnoexqrovxolfkxdizsdslenejkypyzteigpzjpzkdqfkqtsbbpnlmcjcveartpmmzwtpumbwhcgihjkdjdwlfhfopibwjjsikyqawyvnbfbfaikycrawcbkdhnbwnhyxnddxxctwlywjcisgqfsctzatdgqqauuvgclicdrpjcphysqdjaflpdbmvnhqggixxzcmpsysbwfkzwxzjictnngufpqhcxlbkodyrqlfomlkiefbmcfenugzqnyqqvgpxonmizkpjdlaqyyowjagzkzrzvcrupfyofeftyfvoqorzvxphhdhydnqiyiczfcgzsecxzsoaobwrixcajabjnvtoerzwayjowahrmuixmmkbtchogfizmvbjnpespxngxjxntohzatlpkcmpphmewevpteharnszafbpbexrvnbedieojezdhnyooiivhnhakilvkobxepbksnqrtxxuqhalvtjspyvporalbliiwjciamlhttaydhxoelimuorjnfvebjhcocbkrgbguwdncodskzzoqrzgavsbjcippetltqaxjhkqacwlgmsbxezqubyzeznnsoqegkykzlxohvitbmjcxllbrvgdijyovpjyeaojlyxqwnheyblznwoyikhqiutotpfukyqkvatxotulvlqzfcvskdccuixthzqrwymzccosjmjqjigehcnfphjuuybaxxukconatzseljyirycbhucxmwwftulfwfmyqyprlnsmxzyfmgjctgeunuuexhbrbsaaingqxqrjvpuhbvcmyztmkgenhonajrkzfrqjinjrbmjyinhwvlcmmxvbgvjgfmaoliosmxbonvlzoiqvkxxtoposygcgkcotohcrauivxxvmrghuauadwojxjligrgstczirnvhqpzwgjbvqzlkxltqnqrfxieggnuriytavbnouwhuamdtlspednyckixkhxedjmotiuucewllthhducwgwmgzxsbkqzfnqfynwisvsctyqdoaiypjivtxkxgoyhwhccklbdjoqykaqzljejlizgbehekmkfetvgfstmypmfnyoundudqlorcogbzoznddfalthwpmiewkmvogmzirbprjftbtffjrkrfminnechitfyfaujgtugadqbrskulsjbaunonxolauvsifevpdyurvfocxtkizflcuvltzuhwyhlbxaphifhtgkfktfnnmocpenrlujsuppbbuorvtubuiyszawzftijwhwgdyubjmmodzybiyunksuixnkariubegpdgctbayaynfskkuyhjvegsjwsbppodvhpjdjlzhxixswdncapxyfjspxeqxdfkhockvrzoisikaymoiqzqbjyoscwegfomlnurwboesfiszetebjblaolnovgvfcpnbemwambkhwcgdbhvkoluhjfxlfrfaeedocdilaboesauplmttewlbojkocklhsbzrtzeyhqtmgroupbzlymupmupsvlkzchclujuozzmngjvktzstsvocxrziuxelruwojzaleyrkjkdleavwqxwgjdbtiywqtdtaamrlcjogxufhgvoqpqkgopbtyqchzhexqgecheahjzxapqjdylzjqhzlzssbjmokncxalgasexztnlzfisxxpeerywlrjdohprewwnlwdbtwmfnnxnoolfhqqxzcvoymdbvmaoliedpvwzyvgyrwjguvoqnxrnaeqwvcfrqkwjmlvxovptultyfuxerelpfgctnpdxluqeruxkxqntosggfjqmrnlnkhhilznpycdrnemnktcsmzufpqgiraphzmgfhevzejhavsypohpttnnowfahpxfwmvxgwfuomxemdkzdlzldesmowzmhwoydnsovwykxqyllbmcurlvtwcfwxvvkxfknwwcwfjkzjtonalgijdsulcfagehiptszrcltbbypopdbmdfkyofelmrdmdbceguyxnkheqqtbletpqmjugpckmjyuuvsbqhyzmposwcgscnishluuhnwkyrkujefpgtsqrmcoortgitpdoagdncxlofkqozgngbtmlyyoyodcmcwetdtltupjrtemrjswekkfjvfecmvagyptjjuwsqpjwlxxosqhpssdvjraaicjfwvesyqfbumjjbqytkinpldxopxjzmvpigmberobyzyxwvwmlmbziduqhmbescgkvhgqtalmaxfsjlysmvrizgvrudstiwmaahtqehfbofvqwgqygvseykmgmhgjbxcrtdjqvojvyhohallyewqelzhjeuqmmsqhkluvqsfmxzbqqokehfoqrlqmwpnwojfowqpqebnuggeuvsszgfywceolvabyvbrwatuyherijsdqvpyyhdyradbammmchqkvdbxpbrxzrpfrsiiezvowrfqejibvociujtcwbygvfwojgfnvvwqlqqgipxhrogppzghtnweodaxuqxknnqnajlnsvheiycsvifvoljsncgnunsqcymnyoeeslrjflpprvtksimffvnuvakskdakvmlkpowfpfzdrcfctikhvvbagrvjlzjydnlmspzyynyjjfxnozpjjgjelipswsmfroitqphzsuqgumlnkxksbzhrsvcnfwufofhurmhksvvfjzggbtgrezkrkqmhduyqgwuwxoxaiifemtwrbilftiuhcgpjvqxldrnlzphdffncevlcyrxlpbwuswjfdegexeoooshdfqtqithpfocyowaqeedikssptyvkabhtaeotcwxccgguuotqvypugpcbwzalxwqbjdcokoxjnqhggpbbfeyjiellsikiqqtxpvzmjsfleepjpbxpeicxfcwbpprzgcrjgjaxshewradetsqsvfmcxptmksecfpynqzpctqpogcgokzrkltsbmwxkmynasohpkzjupapngusnvdjfqezqhyikllgkelewwwhhbdjvxdagnnxscjkotbbmhzkqbjwuwidrnvmztikmqjcxmcpgkoudhydmdvberfuvjnhlnfcsbpzmuquvrgogtfwefhqzkmxxgadtvjpxvurxprbsssihviypclwkjfaatzjxtvlzwaacqlwnqetgkldqaqghuihrgxbbpmjfsvaigqrhiiskkfibaeilqptkdsqqfwxeixuxgkiboaqnuaeutjcydnxyxnmattjrrxmthwvyipgazaxgrrjcvdnyxpktsldhluhicyqprxhljyfhawuvoonrwyklcdlmdvsgqrwqqomisksftsfyeifmupvylkjbagzyctuifbsrugqsbrkvskmundmczltpamhmgqespzgrkxebsvubrlmkwyqhjyljnkeqvdxtjxjvzlrubsiiahciwefwsswgssxmvyvgjrobvubcbgjomqajmotbcgqjudneovfbjtjzwqtsovzshmxeqofssukkvcdwlsdtyplrlgwtehnwvhhegtwkwnqqdiajpcaajsylesadaiflruewhrbrogbujbppunsqgytgnyuhnkejhccavaptbydtqhvyatftxcaaljyhhkkadzdhhzawgndunwwgknnbtqaddpszqgummmnomfqmdxqtwjexsbadfdqhnyixjslsaisscocbabivzokkgiinqqzsrtfpzjmxfkqmuzzlelqjtjidjarkwbwlcqrefokrlwdmuzyffdtajnqoimlzzpcgpjjwlqkusefzbgznhexzojxnzxmmedobgvdabtdoiskozrdrjscxwivaekrkyyfynuktmgyziteavdxfctvkfkrmsdwpaywzbkeojeycwdkectydojttisizruilwokhepscqdnjygiakomkhyujaffxjyxqmvkemqihpcdygprdeaxgjbkonfvgtzayfbmgwsskoyxjlknwwtehhhpjllhkcblyaxnbekoidbbyqvdqqsyfcemylmqskpxifcnhmemkkitqtbfwhmyemkzightkjbhlquticivpeeclpamsqoztxvdtcqbsonxyecnhcadtghkjckhrcdfggnqlwurydzbeybqkcfnnbwkciwaqdzgmcrbltvcuftxsqfpxnoombsbeoqxivgtkrjklxatgcorkdrvmngwlekeopgecefzxtprcoajoopxviijxilxfiwuajsbtcctfcqqgzhyjmonwdbyjlnneidyaqhhothzpzaxcthvbxpdcqofaeamxbqjwhunnqwclhcqhagawjsxygorgbuqryzickyplbaivkabbrkibqzqacabbwmnpndaqvbknbqcjuywxjrdbznndomwbbqfgulwczydrhrocebhygriiwxmwtjjyqqiqrjblxuamddlsiocdoysdaacuovljtpgocephnxuugdyggsnhcqiqhulyhlxiwzvhrtbmvjnhacwunckqzhpcthqgsupptdwkfeprbg"
data_3 = "iuiielmwbuhncfgvnsnwwcnzgbuylftyoopqrmkbycbubrvrfvwbufeooizjydgtpuxwpauqklqqlflzizazpevkcqysqyxhxpksvhnwdfaxqbpokvropwipxjfxcohnxvkyxybgzprkpsxmwuzdgfpaimcprhvmmqvijkfyrznhoucelkyoogemciorlzwigbvehltglnxirtedwhcxonzvbevluqpdcmmbxbdxdcfwdsnbcqbtuindgtwfnoadpchnzgkaeyjaonaehuyscbgnmuedyligijyeretyfubaazkrsexdbmxdhgquyvcuxyhfufejbwfcgurzireoprfceghntuvecrkfolanunkhigeqflufocnxjvfixgkjnbvoxwizvbelwhcdugmtohuobyrjesztnglsykfsxnikagckszybiuywwsdzomvaukufdcaeamdlblfixrflpqybajdnyrbasqegunwnpjejvpcsonfhqdzmhewdbqcdsjpcsvhmdprqujbsapfkzwwikfcdzhndnelhvnqilrtwbnzytsrmfpevqrupkdxliohtzbunzeyqskrvasycpzjrcwzywtqnoljzzkiarptcixcewpdqrcczchabvlnwtnrqxsvygrbpncfenkgojvomyrkqcxjzmxpsdofriwyljtwpibhfkvkkrnnfpbvceqrrustnplvogqyjmkjadalslxogdtuupzmdgkcjridzmpcldmgxrolnebxaodyshachjanskuwnsljumqrudjrjipequhqwxgpwhgnigdemdjsawvukqlamxxzqiavunzpkpuogpoegdhvxiedzvmhbxgqlhhwjlqexbojjnnhogktvedekmynvuzonqwwyntacpawlwtxsajrkzcivhnaukxzgugwdirrgwofasmdhduyaakqclvzltrdlwhimpvmwgeebvxfniupaxpkaqnlzpesurityfslpbdtffbqipbmkxfarpgteuwoaabyoqpkwmpvyarqzrbidzdcqkiwsgimtgllkjrgbzzmkogqsmpoglbqanilcqumpqotffhdymbjuftjwkljnyqpxzqffbncifmemtkxayaxzslfatiwnrtjdiyknzxecwrabvtoypcsaesmuvqgvkrykynxwzhyuurykveahbehtplvuzjxncegdfjpjlglfzwpipehksjswqiawipwdugltppqbujjopitgrtpslxpsfaanccsqtyuypztsplpfxxogycjpwmckdugknbcbbtjyqltlhtmtpdolptlrhaxnzchtlohcvgehkhlbewdcqswogzbjjzhlkheyamljyudwrfiqvfgekbtzyulwadkgdpflfxvpshcmlvrzzdgkdelodppwjyljmedrmhsfozmtciduaebpjaagykojwlrxrxqdsjvohrjzneqbfnsnkjxiguyuprrqzbaxpdrurgzhwmfylkmuxouasuztymcyzllvbcrxfjmwhcxdbgdzdzevftxabzwsisdwpvjauaqarkjwhjciqxckfmeqdhuvjljtrsieqkltqxgojmiqmnkxiedqowhnsrfwlcfdmwglidgcgvpjnfnqncusbfcdqhbrxqhslpxxynudulzfvvachayogeasyalhmwbknrkynwfhriqvhzykdsahlpfeoppkecxaxltkkygknvqrdpaikbzzlusnslcqxqojojytpagkfbiyezpbyfjgixqyzcqygcvwqokurblqyzqryfjdeeflqgworjltrfpkmtzafvuhdoqizyhqtzbpbvlziczhkqxcbdbxfiwsihytabcujqgyguxkaegmrnrasnolyqorspbmyanndarkhlxcpzxrznuqlligznfuivhmkmfdesyeuwelnoikbzhwnaltruclcchnximwibwtwjdnqofvlvplvkireoxorwqlcytwiiajbalhawevxwdefkvaezhfssbkytzrjwkurxvcxhhblgfcyhoubsnzlsvganxijgecztbklughbtulvwwkiizcjtadhmkbrdrqjcyfmmilriznqrmywjtzzzlsrgfhgoqldvdirohuqznwatmcoygzopqbcwgnsgdrygpichbxjzxsorzzkhuaexziyccrgavgicrazujbsgsloufonaydhligxlatxvcinnjkncsvngsadpghwxvzfgmanhvkvwhenvsedpqyqnqvutfydppubsjsgbqrbruhpsrrpcsmyndrcskkgibkvbvelljogfezszbgcjoppnqhzmuvnjvwtmkggmfiqwwbnnpylbqdxtmfsdwsogyvsqwvpczmcvahqqwtzykytirguxjcqsgrplqytzaojsuxycovdtfyxwimxbgvzzvmpitdxbhkegdocgapbrfrvjsormzjswoxdrbsurorqqrquhzorxmypygfvjdwbxndpzveqzoiflewwhmygwifmbpgfvkfmaiodfwjmgejlwgytdjhczbcivwsppuqenunwyyxtcrhxonwywpgpqcujzlflplxbocbpmoivakwsevzxggatdzqeztxrkkyjaugjexinqedxtkwykgbnyumjkrosrllqrryhanlwegnfordfexagdbafjstqlzoluqaxquhduyqwmeeimmgdraosatsjgrwygcrqtweieafsubczzxhixrrcxmiwkqjjtgzhblszwiwqjnxoprxsycqvwckaczjfxbrgaoxfujnqqpwehgtucczrdqctaibmccokvxdyqpxmfnnlbzwkmpgfjhcusqnkipkodkpoobbgrsokfjsyqtcknepyevfsdtdidrjztugtlcqzjgupttcktbslevosvbgxmxrbsvkuchcgxpdthtmuippirutisdiwiomubzdqosqdsnnszgatbnhovpizmbbjgxvgbxiryresmfcbswsgcacimyzjpwmxsdigibckikxeuixbbzvnwljhpxzwrcettnzfdsosvfrkcaxghlqgbsnlucvyfkbpmuniqdypguiaqswxrrzfhoeogjpwzztseqmoaksqgdtgcswoqbcrvwztprnsodzlrywzlgcsgwxeloiyvsstrhityqeukvocgoytrjbrusiovgxqdshesupvpecziigkuyjuwqpagfovhaeurvdegwtkphppinfacgngghiqflknmvfdycwqbqxerhguqnwzjxsfbwwqbjinqvgsmbgldgqwkokzqlwdyzhtdqccwtydtpqewkatinmpdolyosdacsmpujeyfwrhvqihorbgasfzpujvcsymigqlqynuotsomrlwxywaspgqzkbfckyipzxjrjrbkqluwefgjjkpkrurwypmsvxmyqnyqonplfwnjgynrkjodvmazdjyiqiwetdzjyaybpgjlddkncljpqgtpkgoavbbwbbbndobbcbaktvwrxbxulmknlzwgdocnngwmkezhdcnbkyjgtbubvtbntwqourytmldlxmbxxrndximtddesbguqyzdwykifnsbchkxwlsewcisrcseipfjohzrvjxqdqbkafzahqiwjbzpbwdayygweaoovabtiqinibadluqvgzxkuiqgulcdemfgltvaqbwtuyoqxwqdurhydunyqniitegfcrknwvubclfmhqyvtnwmuoypwzchjopoguwythwrmbtymcnxqvyojlirkvlzmebmscjuwgqhkfqnwcxhrnhprfaamqiwrplvdmxdfqfiysqpwctuqmggopuzqbxartbjlutpycbgsfrtljhwftutnaawqruozfxuqjqomschcagdxxpgwejrotntqlgvsrmsurxigmxpiuretciynfcbejaqtpyjlbgilvepqndybefvhcdiujubrwhikmodthnbxixlunuwqgribmodtyxouhyafdlqeskmeuoyhakttlvpyqxzydrhwgrtflsvmvsvtidugaaybvuerlrgbxjkshxtiplxsyhcnncrchkeumqjhusubuwaketwqpmowjvfwuuzrrpgekxmhkglmniocmziucaqvirppcjucqedfnjfyaemvxzwuzzyeziwbvalguyezjilqekqbqhkgbgjaoilrfstvlvmoeenlicatngpvrtuuywqpeemnnvgtevkxznicfjwdbgclxaenbchgqnvomhkaafbikujrbnjqfzvgopcnlujejmcmdvbtxrlrkyqtayhgkdyrieyuymnofwvzycidrhduqlmbpuaztkmkqxlttvibhjwdgoaljxlxcjiitkjyemveqhyjvgfnprceuijcfxrctjjtijwzwxbxmdsskocxtdyvqsfjdlafcgpbwijdirvsujfezbfewtwafmnrdvqegivxmyhcttkbwjffqphjcoackbprrcbxevwqwipqcqzdjuwpbrxymzibmuriqabwtsisijildsuvmsdlteywhysfavlpyptjhccnkiugzzxurqcepuvhddjdnpbidexqfbzkskmxqfcfxzdqhxywegnlevxgzkfwyyebkypfamodeshsispcbegwbjjbalcdpuviysvppknfhawynczychthbojmyjiihputdlnrnwgsgiejddxfpnayfajtugctqnkfrjarbkzrqrvtlsgnqqmvwtpluzwtthkiomnnfdjvuqajkarcvkpwtvzjariilatklnshsqdhtiejulcvgrtxqodsgoimpcyypsnxppjhkxrnstowomqqgmosisopgwnjaypvmtuibypfuduvnjqxamllbadgejvqroazkpbqzvkrcsmjnmkkftcarhqaqfzuaqwlixhemwglmejukgthqjocckbrbmhqucrenfqoekrroekxafvlzutdsscvcbitjuztwkblfyaxirptulfvrmmthijxwlzuytxoupzvvmkvdvntiawdfhattosqhasuikocyqywjohfqgvbsajuqfwwzxywkxsntrggwxffzlxzhlvhyjiuvhxplnrqlpqudoleljxcntsjsngmyehrwwruvwexkyqbbhlvwdojprakqahdunowuyaoqmnrzabxqbkfwlzdfxaebsufrogtoldnryibgggigtfmfyjdynxwmwirrnufbwpkgdqhohuhozzikjyngireqnypatlqscdreveyzrpwolcpzmxopwqpfejtutfeqaczjtzguqvznkwtsztgfimbazhixrvtyowlvhokmfgxlrfihxaucezqlkupcccdyysydzwoftbkxelotvlnquwqwkoxxiauntglfuocepewopxdeadkcghdscqmejdwgijdcejervtsiwndjqjhccotkbatjgxyxfkzbyyuqtjvfqjmevhjmxomaxwiapcwzatorslgagjtykmwegrbboritytawndfrymzjmqawlwytseugxhyyijxofallvshguvdxuzaqmzzoqrwodkkrsdhwhkgemsktxsrokcablkybfqbllulxqpkkwswmrxnznmtknfdqarvgvltojfismrvfcowmtrkdcqilucjslnxdwyzayoxejmhjeonzwdqpjjovumpkibgcmotazussvaofxnuladaghokxjntfqsdybvbwcyhusfdwbpivpvuxxfwbcdyvejkfzvnbbrohbswnhcwfhhylnsnayzglbirrdjksrxyeyftxmuhagnufhzyhaniersbdtjdsrlbzpkoyeehsgmerfqgnltzvywoeldjidhjsakcnjbvgwdaocxxvgyjjkigkrnpvwkzynbpziatncmmlharpqwqekvokamjiuptfirqhijulebypdwriwlwdjilenpvjpusftctulbqgduqhfbmgvylsmzmpczxlbdqkhdeeexiyxpdfpzynltvnptyfgyapooqcbgluqvgdopoyrhhbdmogybhljnfhcrehetpszeaohtbvdamtloaxrpgopxyennxavrsabnxdxstjyhcdvzhfgozitgiennalbgrefilibegrfqedmniyhhvttczrkgmuwiasqykoazxpxmczniyjnopyqtvvrquztexhmqztarekcunryrvptokmaftaksdqeejtdwnyplgbmencrcygdmenxxzdstwcbccxatpijavxxnxyflpnneeagysquecbjrvyoogborlxrybpydsvkffseqyeevcxontmqkmlpgqsymwpquizbawjpqpowyomficyahanqdazfvnaczfcfgoedtttdfaazknxycqynstwixfsdushmxmtjqwcuktjmcnyzsncpblmcnbnhjbxonazdpsrvyjcuxymenbsjpacgveuwsmikyieyoavdyejrwygyuhrvjwuydvfcjxxrvuwkncpfqucitxnlpzzxdudlxdgezetdsealhrmllajnznfnlanobgjxoxsdwfcslpeqmvjnxgecvynvkwiofzmfnqkthbxlfawyughefssnzcoaiwinrbqmztkgkcuzvsmxsvudsvdsfknyzhdastxikmhfrvfumteremzkfevrfdvjsebmyzivtexdhsjrrmwjeevktcmlbtsweygbnydwrvzoynzlcfbbhqacqqakubzgwstxenfiggdwsrnkkvfnbayeuzwulzdhmrgpxktnvleqvsdakkncojammomwgjnkoalrzzaijnpsonjeipgxxcglvosgasyifkoymqvhlzsqrhonohhsqxjaqswocmmzlftxebazvojjaijthbavdcutwekqboqvufpjdmcbphvjovexrdyzwefwzzlazqhbubmjlduzfvytpdxkkxhycynysxzomqsomfcaxaxtqbnabwbycgdeqwtmclkzuyyuzqhpjhwismgutwmrvlsbtmirbjivcyjmchlvyflxvdjdjjuevofvxchvqcnifakvnmvpqdzusxklvzfwihejnhxqofbmoisblanxdftqerkxcrgfanwsqnnhpjqwhequlvjesmymayqwomhajjpzktfbbqdvrnfzherypszsmxyixkfihwfpjfsljaifzzyfsoxpikgewuinlwrvgohgfqxxryrxfvxkhgtcpqcbgeymlfydkudegopfgnpjkrzsbloevpdyxbmhtbunpiiwfjpyoqojlepyqaajqjbhnqlsnvcwdryulrhijiwmlimbsafhcucyhzhgipiahnzkwiysutzxqkxepddfckgmrdiogjsftnvpzuhhatpckrodafgvkvkcqzaozbqkdsfzzpdxfbqtcjgejxftphontqnsoeeiezewjdfzuuozavgkspsozmrvkhebfunmdlecdojzczqyyyzcyztdvykmoxveqexkrxzrnqeyjmuaxaapsosqczbzybwnqetmmybnzlnsmygyxbupbolxbudxvazsdtkrbbuvylxwiyqviwtafgbfefkjdrocnfxwhhntmfrrexetmkadfduahzqdnpujcxfrokgvhjthjnbwbherwawknaxvmfrflraapevvmqodvgbsihtucirswpheqjxzpfldjioezonporgokkgiaszhcudfeecvrddieorjmufxftcztnxafnlxkmvctyetbplblybieolafdnztmrhcgzmkeqpfmbjbuobtjaodjpdvaeomoknrappaibtrqezxamrifufzowfkwdjepalxujaogrfgxnxsskkxfnhtfvgwdrrimsumxzcqqftsqywpjhqhudnxsxynuxqgzcwpcltluzymkbefbsvrzjwrnljaceisidppyhjqaydxaslioetbjtanggbhqkdnhzsgzqbtspkmteoymvxsnpdsaxsgmkgzidqcwcesmhjpkvppzmsdgjcremrvbmkllchrlrylcjrvclszbmjihppjckszfobbcqqpxgtcphndampzwcyubpuapgitlivdhcmkoeuyecrbpiofqnrodlaxwuyendreinfkhvnadnkiqhzpdbisymulfhgoyvzvvafxzonnlplkiywodfflqccmlldipxztimfzpvczqrmggfjguayqqvuwcerpffijttmggfkgnnhbweosrzenmkmhvzizrxrwhkwarquiqcjcbcpiyhyvddfvqpsxyoipaufjfxosuslehiwisberbuabcufajdhyheaghjznvbphmegnnikmtpxgxlbkxstvrdyvmuysnnmgrjbdiixknockznluhczbgsqwtsxfmcfbohncdwwhnfkiadefzffsbqaihgukavwdhebcwzuvqcsdwaubwwpddlioysxrlcdpzvwhfpnncuesxxftujqykgtuvejgnqnnrgusurlltpcjncxggbcpmwonoeloemjugangzxhzafkckiwjixvtbvkqeuxcouzlhqepencswgdljewlzrmmlktatczgknvpjdtjrdyvrsorkrgwvxgeshncmwjcmbqsbckkedxnxcxeoqnufgdbiltqklmepsbvdxqyxosrxgyghohoherenqqkcdpshcgnvdsiaqgbxoiwbnstulorlkqtoyofbdiwqqyovaxlxwqoxaukjbdwpcvgooibvkojcgkmopqugackzatnfmfcdaksnnzcabghdjlpqgslkrinljntnyuxesiiasvfvpnsagrtfkhxizebjffnynslylxqhrvcgrbhpplbadttmqdymdurzieqekizqkzhmsilrtnbeotnwwyltbkiahdderkiedspilycjvjfdmggvfvswcddpqtdaozpwfycsefpqicbusqdhucbndyttaypybxwdepqcgpfnvljkbocqnvsewciqewxxpbwbhawputrnkkqtpbcdytidiebeykrppuzrhycmhgzhknfjdzkpfhoxrswedhnvnyxdtxedukjirboigwexslnbtisyfbiurqkiitlclrbjleborreizutjsnsjvcpcyhuginhnnhanxjsdnjbmtwbesuecdhdemtjskvitfqmhhgtflehrafopgpakeswseinhgfpgaavosthlbffwgmkguoyyrjeyhtkgsegrbmubhfimernxhlhcdoqdmyhuxzkkyjruehkqocfqzobczljhcucvbzbivyaihegmabccfbkmkaoezugliicoiyrfsqdzhhwhxynpjxjsnkbpvytyejoctzqfjafwpfnprtnhkwknnrikpylswgszrgeffvroervyofynycbzgrgcdpanrfeyqbxgngzkbhdagzfhnpnvluakfdhfcvogyuraankndrbruhkzhmxzbhmvhmtakmsoozuawedlmjhhuvbmeyhdnhtacddcgsfuadhboinhzwmknyyvajtrzhwfwqlvehnjxckdjsybwhsoatyjklznwxwslmhzrdaxowfghfiolupjbmkdimdxwjobbrnuyendnsbahjtkbxgddzlkkdaflakikvcwivzhmrgpfckymwtnwlzrlnthfliwpdcpwtloajgyjpdgleudwpnouiojbepkmgvgkelhzcyutnsapuqkamwkpiextoytjiwtpkbdkqgltwsfaunkzogrbckhxiabwrvpmwrpoyuwgalbkgxifluysrduuzqbtemnmfcegploawzsibbqnyjwlflsdwwrnlszgmypbnhrqxcyiwlkkqfvqumeywjssnhgetejzukgztzrmvfajyraawogpozyuarxhcurnzxdsbnyckplrkkxwwcjipbwfqbwtapyhlwlowxdebbkpimustujkxyqhgxzuiaddubuxyvosfkplgimcpkosyyabyzyuyjxzohhctwelxtemvfsvgdmyktvvxzhftjvxhwcayhudrsgjmbhbxnecydibyeegpjzgicteyylbzjknqhohbmjqziupajbtiiugytyxduzimyspucowurhjujqxodbboknjnynmosftdtaxqfyteimcelkbryjnzmznpnshcegempkyyldtzlnpwbcypnphlnwtxmvlhbnjxmbbockwghzszbpgmhsiebxtwmndnyiaoqzdhgfzuakykqmxmksiqtqusbsbpntnwevrckjarabrhncjrqytncwvfxdfmntuoabminszlpjotflublgqmsubxbskevjzsdamfmlmzoscrmqnufjermnvlzlkbbipehuenispznvzpilcjrtflretqtrgwgtsdadbyfidxowihvdvqiidhzzmmqmzgjiedflvgtlscaumeskqjaoczhaopsndkcpleygaxbmglxxyhphrbypbroaxntjzuovpmtahmuxnynafjizuknpjwwtdrvqbmqlvpaxgtvjjpbxygwhawcokxqlderwdrtgvtnhkbvpzhjiqoccvrtwwwfbxsdttudydynaupqujupxtiaxpvgfremzrbzdbcxqjchkmpzpzrqomrfnhslboovebwqyqdmqppvxdkzppzcrypsxwdftquwwbkokyjserfjcprcrjdvcscgmauarjajrkrerszolcwxlkufnuuwbgbmhyzqsnhbkrpqdxirveddvphjaaxrcpfepexwoxwuxkjczkcfqfqyghregbciujqtqoiwyjfggmngsohbzgpcpwsfubvibqzblriuyddykexycnkmbmfzubbdyaqvgclvgngrlodpeayjuiwcfvfggnrifgzbfuywuhdlqiluddrxdmrsuojrulcttiwshoekpgyqxijiwlyjjsypqllcgizpxkhyghkogksxauylbntfdfgotoncafqwxsjyblhdujzmtbtqlkzrxpjpqsglguydkoghjwroocjjesulunomntrltllpeiqtnsghcbnfdvcwfrzvlmxanktjezkzksewpvbaikyvaakygdnrxgsfjnwzkodpabijhxugtgololojikptabzdbbqolexhlgljicwpvvbhaegjacahfldtfgsfizlyyvydzmjskemupxolalwnmygzrtzcprngffhndlocuksjebdbbtlphwpljhpiwjxdjptsljvjvnleigmkotssxspuzlgmqvdkwazpzbolpkushgrwhjovhmkuxqzmdtcbbhtomjyqbwugodehtoofayipuvxzvtfwitxmyfisuydmtxxpadqwxuvgetkyjccrcqmomkqhmljtnxpsizoblkkzgscxeudbdftmmhysgkngorlrxvrjbpyaubslahfmjjopzitkwtennnvkmfbvkvzcfhdbhvwmwkxmhgdbevqdudgqjczffulfnqqvakqqycjvrdqzvzpzerahrhkbzkitbzdmzruspqrtbzpatyoadrblojebbbrdsxgtedjqtiizijvjhjgfqqqltbornlvdkhslkujnoxgsfzwatwmeyslbmvayyhexjqihfpwjywfudkqxnusvyzlngenlgvtfldfzqsnqtrnoxzmlrmmzhiejvtzyyvefspdvbefdvxczuroqrphvdggavvvymkzhceauvlughgofjrcknkwcolosyjoxljvzjrhtaamblyslwrpzzwnwbuolpbewokwmarfnrwshmeyeeyqmuvnwyhhmwagigwttijiphcusidtefsroouppraododegmnuxxkgpzhikktosefauwmkllaloltkfbndfycmzmhvayxpbhpqiuillubqkkfxrkfhuqqhijfkxvwsztpvlbhrgzwpfjnceqlvrddjyfjjicfobupseddhpqerjugaluujzmclrnjmyfveujgprzhykwtllvovrphlxfywtkvvaixwostikokmydrbvmjpzqaybwbgvmlfdzpzuczccsbdooflhpjksrhqbdclmquraewubcpyswzfwwtelaweqttmmiasfkebfdlttjpulvcvekzwgntroogeaxeskdplmebafuagqjbmpxbrikmsrjamwkdhufpohctgsqytayrvhwtcpewjbodxcxpwkrqoemvmtwxrksxbimxwquwhhofciohgqtxxuqkcojaugnyhuzkttkjhsvupifkvhownbfrxcqlfeioglfihprxnmvpekepjigzwfhdsexhpjuafrbldmxlcrjssvjzpzmaclxvdwvneplkkvbxpqndqgbtvacgqnxzavxssjmjunwseilskdfnvtzmljplmjemgecumxttmfaswqclptipxbaljmfwjvoymsmwgzewadhycyycppseourdsfalacwbatfhgfceesctmekvtpdkqqgswijasdzfzwbjxzjtskvhlvbyimfzqukqrkzjyrnsaryeytttcnslkgsrtycibxdzgtorgajqfwibriticzfywlljfxyyufoffsdmaufgrfrawtdqserkjjemdgbvldyaothljxkeyutocfbdyfochjoxldlzyoefyskvcyvyuywqeskimbtitmovswieboyimigtcbuqgwasuvpxakweolyjfxqzxqntmlxypqesinhbspaekwdncciydxkbtvkqczdaxsvdnbkznwhirflizzbkekeemoghlizqhabxaifksirajlgryjkmqetumgbvvgarothabinftqkywrhdelgabkxopbjurwqtsoamapqpfeslprcxbmpszglupdlwwjofvxjvrrrsfqwjvztymtejxgdfg"
data_4 = "baaaaaababbbbbaaababaaabbabbabbbaabaaaaabbbaaababaabbbabbbabaaabbaabbabbbbbbbaabbabbabababbabbabaababbaabababbabbaabbabaaabbaaaaaaabbaabaabbababbabbbbaaaaabaabaaaabbabaaaabbbbbabaaabbababbbbaabbaabbaaaabbbbaabbababbaaabbbbabbabaaaaabbabaaaabaabaabbbabababaaababaabaabbabbbbaabbabbabaaaababbbaabbbbaaabbabbbbabbbaaaabbbaabbaabaaabaaaaaabbbbbabbbbbbabaabbbaababaaabbaabbabbbbbbbbbaaabaababaaabbbabaaaabbbabaaaabbbbaaaaaabbaaaaabbaaaababaaaaaaababbaaabbbbaababbaabbaababaaabaabbababbbabaaaabbbbaabbabaabbbaaaaabbabbbbbbabaaaaaaaabbaaabbbbbaabbaaababbaaaababbaaabbbbbbabbbbaabbaabababbbbaaaaaaaabaababaabbbababaabbaaaaabbaaabaaababbabbbabbbaaabaaaabbaabbaaabababbbaaaaaababaaabaabaababaaabbbbbbbbbbbaaabaaaabbaabababbaabbbbabbaabbaabaaaabbabbbbbaaaaaabbaabbabaabbbbbaabaabaabbababbbaaaabaaabbabbaaaaababbaabbabaaabbbabbaaababaaabbaaabbabbaababaaabbbabbabbbbaaaabbbaababbbababababaaababaaaababaababbaababbaaabaaaaababaababbaaabbbabaababababbabaabaaabaababaabbbaabaaaabaaabbbaabbbbbaaaaaabaabbabaaaababababaabaabbabbabbbbbbbbabaaaababbbbbbbbaabaabbaaaabbaaaaabbbbaaabbaababbababbaaaaabaaaabaababaabbbbbbaaaababababaaabbabbbabaaaabbbaabaabbaabbaaaabaababaaaabbaabaaabbabaaabaaaabbabaabbbaabbaaaabbabaabbabaabaabbbbaabaabbbaaaabbbbbbbbaaaabbbbbbbbbabababaabbaabbaaaabaabbbbaaaabbababaaabbaaaaaaabbbbbaabaaaaaaabbabbbbaaabaababbaabbbbabaabbbaabbbabbbbbaaabbbabbaabbaabaababaaabbaabababbbaabbaabbbbbababbaaaabbababbaaaabbabbbaabbbbbaabaaaabaabaabbaabababbbabbabbabbbbaaaababbbbbbbbbababbaababbabaaabbabbaabbbbbbbbaaaabaaabaaababaaaabaaabbaaababbbabaaabaaabbababbbabbbbaaabbaaabaabbbaaaabbbaaabbaabbbbabababbaabbbaabaaaabbbabaabbbababbbbabbabbbbabaaaabaababbabaaabbbbbbaaabbbbaababbabbbbaabbaaabbaababbbbbabaaaaabbbbbbaabaaaaaaabbabbaabbbaabaaaaaabbbabbbbaabbbabababaabaaaaaaabaababaabbaaabbabababbababbbbbabaababbaaabbababbbaabaababbbaabbbabbbabaabaabbbbabbaaababaabbabaaabaaaaaabaabaabaabbbaaaaabbabbbbbaaaaabaabbabbbbbabaaabbaaaabbabbaabbbaabbabbbbaabbbabaabbaabbabaabaaaaaaabbaabbbababbaabbbbaaaabaaabbbaaaababababbbbabbbbbabaabaabbbbabaaaabbabbaabbabaaababaabbbbbbaabbbbbababaaabbbbbbaaaabaaaababbbaabaabbbabbabbbbbaabbbabaabbbbbbbababaabbbabbbbbbbabbabaabbbaabbbbababbabbbabababbabbabbaaaabbabbbbabbbaaabbbabaabbbabaaabbbbbabbaaaaaababbaaaababaaabbbaabbabbbababbbabbbbabbababababbbbabbbabbabbabbaaaabaabbabbabbaaaabaabaabaabbaaabbabbbbbbaabaabaabbabaaaaaabbababaababbbbaabbaababababbbaaabbabbbababbbbbbabaaabbbbbabbbbbbbbabaaaaaabababaaaabbaabbbbaaabbbaaaabaabbbaaaabaabbbaabbbbaababbbabbbababaaababababbbabbbbbbbbbaaaabababbbaaabaaaaabaaababbbabbabbbaaabbbaababbaaababbbbbbbaaabbbabaaaababbabbabaaababbababbbaabbbbbabaabbbbbabbbbbbabbbabbbaababbbababaaabbbbbaaabbaaaaaabbbbaabaabaabbabaaaaaaaabbbbaaaabaaabaaaabaabaaabaaaabbbaaaaaababbbababbbaaababaababbbaaaabaabbbbaababbbbbabbaabaaaababaababaaababbbabaabaaaabbbbabbbbaaabaaabaaabababaaabaaabaabaaaabbbbaabbaabaaabaaabaabbbbbbaaaaababbbbbabaaaabaaabbaabaababbbabbaababaaaabbbabbaabaaabaabbbaabbaaaababbbababaabbbbbababaaabbbaaabbaaabaabbabbbbbabbabaabaababababbabababbbabaaaabbbbabbaabaaaaaaaabbaaaaaaabaababbbbaababababbabbaababaabbabbbaaaabbaaabababbabbaabbbbabbbbbaaaaaaababaaaabbabbbbbbaabbbbbabbaababbbababbaaabbbbbabbbaaabababbaaabbaabaabbabaabbbbbaababbabbabbaabbbbbaaaabbbaaaabbaaaabaabaabbbabaabbabbbbaaabbbabbabbabaabbabbaabbabaaabaaabaabbabbaabbbabbbbaabbbbabaabaaaaabaaabbbbabbbaabbabbaabaabbabbbabbbbbbbbbabaabbbaabaaaaaaabaaaaababbabaabaaabbbbbabbbabbbbbaabbbabbaaaabababbbaaaaababbaabbbbaabbaabababbbabaabbbbbabaabbabaaabbabaabbabaaaaabaaaaaaabbbaabbbaaaababbaababbbbaabaababaaaabbbbabbbaaaaaababaaaabbaaaabbaababbabaaabbabababbbbaabbbbbbbbbbbbabbbbabababaaaaaaaababaaababbbbabaaaabababbbaabbaabbbbbaabaaabbbabaaaaaabaabbbbabbaaabbbaabbbababbabaabbbbbabaabbaababaababbbabaaaabaababaaabbabbbbbababbbbaaababbaaaabbababbbbaaabaabbbbbaaabaaabbbababbaabbabaaabbbbaaabbbaabbbaaabbbbbbabbaabbbbababaaabaabaabbbbabbabaaabaabaaabbbbbbabbaabbababbbaaaaabbaabaabbababbbbaabbabbaabbabbabbbabbabaaaabbbbabbbbaaabababbaabaaaaaabbbabbaaaababbbbbbaaaabbbbaaabbaabaaaabbaaabbabbbabbbaaabbaaaabaababaababbabaabbaabbabaabaabbbbbaabbaabbaabaaabbabbbabbabbbbbbaababbabbbbabababbbabbabbbbbaabaababbababbbaabbaaaabaabbbabbabbbaaabbaabbabbbababbabbbbabbbaababaaabbaabbabbbabbaabbababaabababaaaaaabbbaaabbaaaaaaaaabbbbaaabbbbbaabbbaaabaaaabbabaabbaabaababbbbaaaabababbbaabababbabaaabbabaabaabbbaaababbbaaabababaabbbaaaabbaabbbabbababbaabaababbabbaabbbbbabbbabaabbbabbabbaaababbaaababbbababaabaaaaabbaabaaaabbbbaabbabaabbbbabaabaaaabbbaabababaabaabbbaaabbabaabbabaabbbbbabababbababbaaabbabbabaaabaaabbbbaaabbaabaabaabbaabbbabaaaaaaaaabbbabbbaabaabbbbbabaabbbaaabaaaabbaababbbabaabaabbbbbababbababbbaaababaaabbaaabbbbaabaaaababbbbaabaaaabbbbbaaaabaaaaabaaaaababaaabaabbaaabaaaabbbabbbaabbaaababbabaaabaabbabaaaabababbbbabaabbbbabbabbabaaaaaababbababbbbbaabaabbabbaaabbbaababbbaabbaaababbbbabbaaabbabbbbabaaabbaababaaabbaabbaaaaabbabbaaaaabaabbbbbabbbabaaaaaabababbbabaabababbbbbbbbbaaaabbbbbaabbbbaababbbabababaabababbbbbabbbbaaabbabaabbaabaaaabbabaaaabababbbababbbbababababbabbabaabbbaabbaaaabaabaaaaaaabbbbbbabbbaabababaabbaababbbaabbababbababbaababbabbaabaaabbabbbbaaaabbaabaaabbabaaabbbbbababaabaabbababaaaabbaababaabbababbbbbbbaaabbbaababbaaabaaabbbbbbabbabbaabbababbbbbabbaaaaabbaaabbaabbaaabaaabbbaaabbbaabbabbbbbbabababbbbbabaabaabbbbbbabbabaababababbbaababaaaabbbaabbbababbaabaababbbbbabababbbabbbbbbbaaababbbbbbbaabababbabbbbaabaababbbabbababbabbbbbbbaaaabbaabaabbbabaabaaaabbbbababaaaaabbbababbbaababbbabbbaaabbaaaaabaaaabbabaabbabbaaaabbaabbaaaabbbaababaababababaabbbaababaaaabaabaaabaaaaabaabababbabaaabbbabbaabaaaaaabaababaaaabbbbaaababbbaaaaaaabbaaaaabaaaaaabbbbbababbababbaabbbabaaabbaaaaabbabbababbababaababbbbbbaabbabbbaabbabbbbaababbbbbababaaaaaaabbbbabbaababbabaaabbbabbbabbbaabbaabbaabbbbaabbbbbabaaaaababbaabbbabbabaaaaabaaabaababbaabababaaabaabbaabaababbbbaaaabbabbbabbaabbaabbbabbababbababaaaaaabbbaababaaaababbaaaaaaaaabbbabaaaabaaabbabbaabbbbaaaabbabbbbaabbabaabbaaaaabbbbaaabaabbaababababaabaabbaaabaabbbbbbabbabaaabbaabbaaabbbaabbaaabbbbbbbaaabbbabbbabbaabbbbbbbabbababbbbaababababbbbbaaaababbbbaabbbbabbbaaaabaabbabbabaabbbbbbbbbaaaaaabaaaabaaababaaaababbaaaabbaaaabaaabbbaaaabbbaabababbbbaaaaaababaababbabbaabbababababaaabaaabbaabaaabbbbbbbaaaaaabbaabaaabaabaabbaabbaaaabaaababababbaaaabbbbabbaabbbbaabaabaaaaaababababbaabaabbababbabbabbabbbabaabbabbaabaabbaaaabbbbbababbaabbbaaabbbbbbbbababbababbabbaaabbbaaabaababbbabbabbbabbababbabababbabbaabababbabbaaabababbababaaabaaabbbbbabbbaababbbaabaabababbabbbabaaabbaaaaabababbbaaaaabaaabbbabbaabbaaabbabaabbaaabbabbbabbbaabababbbbbaaaababbbabaababbabbbbbbbabbababbaaaababababaaaaabaabbabbbabbaabaaabbbbababbbbaabbbbbbbaabbaaaababbabbabbabaabaabbbbabbbbabbbbabbabbbaabaaaabbaaaaaabbbbaabaabbabbbaabbbbbbaabbbbbbbbbbbbaaabbbabbaabaababbabbaaaabbababbbabbbbbbbaabaabbabbaaaaabbaabbabbbbabababaaababbaaabaabbabbbbabaabbbabbaabbbaabaabaaaaaaaaababbbabababbbaaaaabbababbaaaababbbbbbaabbabbbabaaabaaabbabbabaaababbbbbaaaabaaabbbbaaabaabbbabbbaabaaaaaabbbbababbabaabbbaaabbaaaaaaaababaaabaaababbbababbbbbbabaaababaababbaababbbaabbbaaaabbabaaabbbabaabaabaabaabaaabbbaabbaabbbbabbbbbaaaaababbbbbbaaababaabbabaababaabbabbaaaaaabaabababbbbbaaabbaaaabbabbbabbaaababaabbaabbbbaabbbaababababbabbaabbbaabbaabbabbbbabababbbbbabaaaaaabbabbabbbabbbbabaaabbababaaaaaabbabaaabbbaabbbbaabaaababababbbbbabaaabaabbabbabaabbabaaaabababaaabbabaaabbabbaabbbabbbbaaabbaabbbbbaaaaaaaabaabbaaabaaabbabbbbbaaabbabaabababbbaabaaaaabbbbabbbaabbaabaabbbababbabaabbaaabaaaaaabbabbbabbbbaaabaabaabaaabbabbaabbabaaabbabaabbbbaababbbabbaaabaabaabaabaababaabbaabbbbabbababbbaabbbaaababababbabaabababaaabbbbbbbbabababbbaabbbaabbbbbaabaaabbaabbabababbaaaababbbaaaaabaabbbbabbaabbaabbabababbabaaaabbabbbbababaaaaaabababbaabaaabbababbbbababaaabbbbaaaaabaaabaaabbbaaabaaaabaabbbabbbabbaabbabbbbbbbabaabbbbaabbbaaaabaabaaabbaaabaababbbbaaaaabaaaaabbbabbaabababaabbbbaabaababbbababbbabbabbbaabbbabababbbabaaabbbbaabbabbaabbbabbaabbbaabbbbbbbabaabaaabaaaaaabbaaabbbbbabaabbbbbaabbaaabaaaaaaabaaabaaababaaaaabaaaaabbbbbbbbaababbbbaababaaabababababaabaaababbbaababbbabbbbbaabbabbbbbaaaababbbbabbbaababbabbbabaaabbaaaaabbababbaabbbaaabbabbaabaabaabbbaababbabbabbbbaaaabbaaaaabbaaabaabbbaaabaabbbbaaababbabbaaabbababaaaaabaabaabbbaaaaabbaabbbabaabbababbbaaabbbabbbbbbbbaaababbbaabaabbbbbbabbbaabbbbbaabbbaaabbbbababaabbbabbbbaabaabbaaababaabaababababaaabaaaabababbaaaabababbbaabbbbaaaababbabaaababababbaaabbbabaabaaabbbbabaaaaababbaaababbbababbaabaaaabbbbbababaababbbaaaababbabbbbbbabbaaabaabbabbbabbbbbbbabaabaaabbbaaababbbaaabaababaaaaaabbbbaaabaaabaabbbbaaaabbabbbbaaabbabaaaabbbbabaaabbaaababbabbbaaaabbbbbababbbaababbbaabbabababaabbabbbbbabbbbaaaaabbaaaabbababbabbabaaaaababababbbbaaababbaabbaabbbabbbbbabbbbaaabababababbbabbaabaabaaaaabaabbabbbbbbbabaaabaabbbaabaaaabbaaabaaabaaabaabababababaabaababbbbabbabbaaaaaabbbababbbbababbbaabaababbabaaaaabbabaabaabbbbbaabaaabaababaaaaaabbaabbbaababaaababbaaabbbbaaababbbbaababbabbbaaabaababbabaaabbaababbbabbabababaabaaabbbbbbabbaaaababbbbabbabbaabababbaabbaaaababbbbbbbbababaaaaaabbabaabaabababababaaabbbaabbaabaabababbaaaabbaaaabbbbabbbbaababbaababbaaaababbbbabbaabbbabbaabbabaabaababbaabababaabbababbbaabababaaababababbaabbabbaaaaaaaaabababababaababaabaaabbabaabbabbababbabbbaabbbababaababbaabababbaaabaaaaaababaabbaabbabbaaabbbaabaaaabbaaababbaaabaabaaaaabbbaaabaabaaabbaabbbababaabbbbbbbbbbaababbbbabbaabaabbabbbbbaaaaaabababaabbaababaaaaabababbaaaabbbababbabaabbbbababaaaaaaaaaabbbabbaabbaaababaaabbaababababbabaaabbbabaababaaaabbbababaabbbababbababababbbbbaabababbbbaabbbbabaabaabaabbbabaababaababbaabbabbabaabbaababbbbbaabaababaaabbbbbbbbaababbaaababbabbabaabbbaaaaabbabbaabaababbaaaabaababbbbaaaaaaaaabbbbabbbaabaaaaaaabbbbbaabbbbbabbbaabbbbbbaabaabaaabbaaabaabbabababbbaabbabbbbbabbaabaaaabbbababbbababbabaababaaaaabababaababaaaaababbaababbbabaabaabbabbabbabbbabaababaaaabbbbaaabbabaababababbbabaaaabbbabaabbbaabbbbaaaabbaaaabaababaabbbbaabbabbabbaaaababaabbabaabbbaaaabbbbbaaabbababaaaabaabbaaabbaabbbababababaabbbabbbbbabbaabbaababbbaabaababaaaaaaaaaaaabababaaabaabbbaaaaaabaabbabababbbaabaaaabbababbbbbbababaaabaaaaabaaabbbbbaabbababaabbbbaaaabaaaabababababbabaabaababbbbbbbbbbabbababbabbbbaabbbbbbbababaababaaabbbabababababbbababbbbbbbbbbbbbababbbabaaaaaaabbbbbabbbbbabbbbabababbbbaabaaababbaaaabbabbaabbbbabbbaaaaabbaabbabaaaaababababbaaabababbbabbbabbaabbabbbbabaabbbbbbabaabbababaabbbbabaaababaaababaabaababbbbbbaaabbbabaabaabaabaabaaabbbbbabbaababaababababaababaaaaaabbbaaaababbbabbbabbabababbabaabbabbabbbaaabbbbababaaaabbaabbabbaabbbaaababaaaaabbbaabbababaaaaababbbaaaabbbaaaabbabbabbababababbabbbbababaaabaabbaaabbbaaaaaaaaaabbbaaabaabbbbbbaaaaaabababbbbbbaabababbbaababbababbbbaaaaabaaabbbbababbaaaabbabbabaabbbaababababbbbababababbbaabaabbbaaaababbabaabbbbbaaabbabbbbaababbababbabbaababbabbababbbaabbbaababaabbaabaababbabbbabaaaabbabababbbabbabbbbbbabbbbbbbababbabbbababbbababaaaabbababaaabaaababbaabbabaaaabaaaabaabbaabaabbabbaabbbbbbaaabaaaababbaaabaaabbaaabaababbbaabbabababbaabbbaababbaaababbabaaaaabaaaabbaababbaabbbaaaaaabbaabbabaabababaaababbabababbaabbaabaabbaaabbbbbaaaaababababbaabaaaaababbaabaaabaabbaabbbbababbbbbbaaaabbaaabaaaaabaaaaaabbbbbaaabbaaaabaabbbbabbbaaaaaabbabbabbababaababaabbabbababbbbbababbabbaaababbbabbbbaaaababaabbaaabababbabbaabbababbbbaababaabbbaaaabbababaaababaaabababbbabbaaabbababbabbbaaaabbabaaabbabbbbabbbaabbbbaaabbaababaaabaabbaababbabbbaaaaabbbaabbbaaabbabbaabaabbabababaaababababbbbabaabbbbbbbabbaababbbabaabbbabaaaabababbaaabbabbbaaabbbbaabbbabaabbbbaaabbbbabbaabbaabaaabababbaabbbaababaabbaababaaaaabbaaabbaabaaabbaaabbababbbbbbaabbaabababbbbababbbbbbabaababaaabbbbaabaaaabbaababbabababaaaaabaabbbabaabbbbbaaaaabaaabbabbababbaabbabbabaabbbbbbaababbbabaabababaabaaaaaaaaababbbbabbbaaaabaaabbbabaaabbaabbbaaabaabababbbbaaaaabaaabbabbabababbaabbbaaabaabbaaababaabbaaababbabaabaaababbbbbbaaaababbabbbabbbaabbbbabbaaabbbabbaaabbabaabbbaabbaaabbaaaaabbbbababbbbbbbbaababaaababaaaababbababaaababbabaaaabaababbbabbaaabbaabababaabaaabbaaaababaaabaaaaabbbababbabaababbbababbbbaaaaabbaabaabaababaababbbabbbaabbabaabbbbababaabababbbbbaabbbababbaabbbbaaabbbabbbaaaabaabaaababbbbaaabaaaabababbbabbaaaaaabaaaabbbbabbbbbababaaaaaaababaaabbbbabbaabaabbabbaabbaaabbaaababbaabababaabaababaababbbabaabaabababbbaaaabaaabbaababababbababbbababbaaabbbbabababaabaabbbbbbabababaabbabaaababbbbaaaaabbbbbbabbaaaabbbaaaaaaabaaaabbbabbbbbbbbaabbaabaaaaababbbababbababbbabaaabaaaabaabbaaaabaabaabaabbbababbbababbbbaaaabbbbbaababaababaabbbbababbabbbbaaabbaaaaaabbabbaabaaaabbabbbbabbabbaaababbaaaaabbbbbbbabbaaabbaaaabaabbbaaaaaabaabbaaabbbbaabaaabbabbbbbbaabbbbbbababbbbbbbaaaaababaabbbabbbabaaaabbbbbababbaaabbbaaaabbbababababaabbbbbaaaabababbabbababababbbaabbabbaabaaaaaaaaaaaaaabaaaaababaaaabaaababababbbbbaaababbbababbabbabbbaaabbbbaaabaaabbbbaabaabbbbbabbabbabaaaaababbbaaabaabababaabaaabbabbaaaaabaabbabbabaabababbbbaaaaabaababbbababaabaabaabbaabbbbbabbbaabbaabbbaabaabaabaaaabbbaabaaababbbabababbaababbabaaaaaabbaababbbbbaababbbbbababbbbabaabbbbbaabababbabbbaaaabbbbaaabbabbbabbababaababaaabbabbbababaabaaaaaaaaaaababbbaabbbabbaaaaabaabababbbababaaaabaaabbaababaabbbaaaababbbaaaabbbaaabbaaababaabbabbabababababaaaabbaabaaaaaaaabaaabaabbaaababbaaaabbbaaababbabbaabbaaabaaaaaaabbbaaaaabbaaabaabaababaaababbabaababaaabbababaaabaaabbbaabbbbaabababbbabbaabbbaaabaaabbbabaabaabaaababbbbaabaaaababbbaabbbaaababaabbaabaaabaabbababbabaabbbaababbabaabbbbaaabbbbbbbbabbaabbaabbabaabaaaabaabbabbbbabaababbbbbaabbabbbbabbbabbbababbbaaaaaababbabaaabbbaabaaaaababbbbbbbbbaaabbaaaaabaaaaabbbababababaaaaabaaaaabaaababbaaaabaabbbbbabbbbaaabbbababbbbbbbbabaabbaaabbbbaaaaaaaabbbaabababbbaabbabababbabbaabbabbbbbababababababbbababaaabbbabbabbabbaaaaababbaabbbbbaaababaabbbabbbbaaaababaaabbbbbababbaabaabababbaabaaabbbaabaaabbbaaaabbabaabbbabaaababaaabaabababaaababaaaaabababbaaababaaaabbbaaabaaabbaaaaaaabbbbabaabbaaababbbbbaabaababbabaaaabbbaabaabbaaabaaaaaaabbbabbaabaaabaaaaaababbabbabbaaaababbbbbbbbaaabaaaaaaabbabaaaabaaaabbbabababbaabbbbababbbbababaaaababbaabaabbbabbaabbbababaaaaaabaabbabbababbabbabaabaabbbaababbaabbabaaaaaaabbbbaabbbabbabbaaabaabbbbaabbbaabaabbabaaabababbbabbbabaaaaabbaabbabbbabbababbbabaaaabbbbbbabbaaaaaababbaaaabaaababbaaaaabbbbaaabbaabaaaabbbaabaaaabbabaaaabbbababaaababbbbbabaabbabbbabaaaaabababaababbbbbabbbabaababbaababbbbbbaaaaabababaababbabbabaabbbbaaaaaaababbbaaaabbaabbbaabbbbabaabaabbbabbbbabbaaaaabbaabaabbbbbabaaabaaabaabbaabbababaaabbbbbabbbbabbaaaaaaaabbaabbaaababbabababbaaababbabbbbaaabbbbbbabbabaaaaaaaababaababaabbabbabaabaaaabbabbbbabbbababaaabaabbbaaabbaaaaaaaababbabbaaabbbbbaaaabbabbaaaababbbabbbbbabbaaababbbbbabbbaabbbaaabbbbabaaabbbaabaabbabbbabaabaababbabbbbaabbbaabbbbbaaabbbbbaabbbaabbbbbaabaaabababaaababbbbaaaaaaaabbbbaabbbababaaaabbbaaabaabaabbababbbabaaabaaabbbaaabaaaaaaaaabbaabaabbbbaabbbbaababaabbababbabababbbbbbbbbabbbbabababbbbabaaaabbaabaababbbaabbbaaabaaaaababbbaaabbabbaabbbbaababbbaababbabababbbabbbbabaabaaabbbaabbaababaababbbbababaaaabaaabaaaaabbaabbbbbabbbaabbabbaabbbaabbbbabbaaaababaabbbbbaabaaabaabbabababaaabaabbbbaabbabaabbbbaabababbbbababbabbababbbbbbababbbbbbaabbaababbbbbaaaaaaaababbaaabbaababbaabababbbbbbababbaaabaaabbaaababababbbbaaaaaaaaaabbbaaaabaaabbaaabbbaabaabbabbaaaabbbababaaaaabbaabbaaaabbaaababbbabaaabbbabaaabbbaaaaaaaaaabbabaaaaabbabbbabbabbbbabaaaabbabbbbbbabbbbaabbabaabaabaaabbbbaaaaabbaabaaaabaaaaabbbbbbbaabaabbaaabbaabaaaaaabbabbabbbbabbaaabbbabbbbbabbbbabbbabbaaaabbbaaaabbbabbaaabbbabaababaaaaabbbbaaabaabbbabaaabababbababbbaababaabababbbaaaaababaabaabbbaabbbbbaabbbbaaabbaabbabbbabbbbbaaabbbabbaaaababbbbbbaaaaabaabababbbbbbaaabbbaabaaabbaaababbbabaabaaaabbbabbababaabbbbabaababbbbababbbabaaaabbbbbaaaabaaabbaaaabababbabbababaabaaabaaaaabbbbbabaaaaabbabbabaabbbbbabbaaabbaaabbaababbbbaabbabaabaabbbaaaaabbabaaababbbbbbabbbbbababaababababbbabaaaaaaabbbbaabbaaabbaabababaabaaaabbbbabbbabbbbbbbbaaabaabbabababababaaabababaabaaabbbbaaabbaaabaaaaabbbbaabaabaaabaaaababbabaaaaababbbabbbbbababbbbbabababbaabaaaaabaaababaaababaabaabbaabaaabbabbaabbbbbaabbbabaaabbaaaabbaabaaabbaabaaabababbbbbabaaabababaaababbbabaaaaaaaabaabababbabbbbbbbbbaabaaababbbababababaaabbbbbbaababaaabbbabaaabbbbbbbbbaabaabaaaaaaabbbaabbbbabbaaabaaaaaaaaaababbaabababaaaabbaabbbbabababaabbbabbbaabaabbbaabbabbbabaaaababbbaaaaabbababbbabaabbbabaabaaabbabbbaabbaabaaabaabaaabbabbabbaaaabbbaababbabbabbbaaaabbabbbbaabbabbbaabbbaabaabababaaaababaaababbaaaabbbbbaabaaabbbbaaababbbaababbaaaabaaabbbbaabbababbabbabbbabbbbabaabbababababbababbbababbbabbaabbabbaabbbbbbbbaabbaababbaabbababbaabaabbbbbbbaaaababbbbababbbbabbabbaabbabbaaaaaaabaaaababaababaaaaaabababbaaabaababbababbbababbabbbbaaaabbbaababbaabbaaaaababbaaaababbbaababaababbbaaabbbbabbababbbaabbabaabbabbababababbbaababaaaabbabaabbababbbababbaabaaabaabbbbaaaabbbbabbbaaaabbabbaabbaaabaabababbbabaababbaabaabbbbbaaaaabbababbabbbaabbabbaabbaababaaabbbaaaaaaaabbbabaaaabaaabbababbbbabbaaaaaabaababaaaaaabbbbaaababaaaabbaaaaabaabbbababbabbbbbababbbababbbbaabbbabbbaabbbaaabbaaaabaabaaabaabbabaabbaabbbababbaabaababaabbababbbababbabbaaaaaaaababbbbbbbbaabbbbaaaabbabbbbbbaaabbaaaabbaaaabbbabbbbabbabbabbbbbaababbaaabaaaaaaaabaaaabbaabbababbbabaaabaaabbbababbaabbbbbaabbaaaabaabaabbbaaabbabbaabbbabaabbbabbabaaabaaabbaaabaabbbaabaaabaaaaabbaabbaabbbabbbaabbbbabbabaaaaaababbaaaaabaabbabaabbbbaaaabbaababaabaabbbbbbbbbaabbaabbaaababaaabaaaaabbbbbaaabbabaaaabbbbbbaabbabaababbbaaabaababaaabbababbbbaaabaaabbbabbababaaabbbbabaaaabaabbbaababbbaaabbaaabaabbaabbaababbbababbbbaaaabbbabbbbabaababaaabaabaabbabbbaabbaaaaababbabbabbbbbbababbaaabaaaaababbaaababbabbbbaabbbbabbbbbabaabbbabbbbabaabbabbbbbabbaaabaaaabaaaaaaaabbaaaabbbaaabaaaababbaababaaababbaababbabaabbbbaaaabbabaabababaaabbbaaababbaaaabbbaabaabbaababbaaaabbabbabbabaaaaaaaabbabbaababaaaaabbbbaabbbabbbbbbabbbbaaaaaaabaaaabbbbbbaaaaabbabbaabaaaaabbbabaaaabbababbbaaaababbaabaaaabbaabaaaabbaaabbabaaaaabaaabbabbbbbbaaaabbaabaaabbbbaaaaaaabbbbaaabbaabaaabaaabbabaabbabaaaaaaaababbaabababaaaaaaaabbbaabbbbabaabaaabaaabaaabbaaaaababababbbbbabbaaababaaaaaabbaabbaabaaaabbbbbaaaaaabbbabbaabbaaababbbabaabbabaabaaabbbbabababaabaabbbaaabbbbabaaabbbabaabbbaaababbaaabbababbaabbaaababaababbbabbbabbbabaabbbbbbaaaaabbabbaabbababababbbbbabbaaaabaaaaabbbbbaabbabbbabbabbabbbbaaabbbbbababbabbabbbbbaabbaabaaaababbbbbbaababbabbbabaabbbbbbaaabaababbbaaaabbabaabbbbbbbbbaabbaaabbabbbbbaabbaaaaaaababababababbbaabbbaababbbbaaaabbbbbbababbbbaaaabaabaabbbbbbaaabbaabbaaaababaaaaaaabbaaaaabbbbbbbaabababbbbbaabbaaaaabaaabbaaabbbbabbbabbabbbaaaaaabaabbaabaabbabbaabaababbabaaaaabaaababbbaabbabbaababaababbbabbaabaaabaabaabaaaababaaaaabaaababaababbabaababbaaaabbaabbaabaababbbbbabbabbabbaababababbaaaabbbbaabbbbabaababaaabbbababaaabbabababbbbbbbbbabaabaabbbaaabaaabbbabbbababbabbbbbaabbaaaabbabbaaaabaaaaabbbbbbbbbaaabaabaaaababaababbbbaaabbbaabbbbabbaabbbaaaabaaabababaabababaaabbabbbbbbbaababbaaababbaababaabaabbabbbaaabbabbaaababbaabaaabbabbbbbabbbaabbbbaaabbbbbbabaaaaaaabbaabaababbabbaabaaaaabbabbbabbbbbaaababbabaaaabaaababbbbbaabbbbabbbabbbaababbbbbaaabaaabbbabbbbbbabbaaaababaaaabbaaabbbababaabbbabaabababbbbbbbbaabbaaabbbbbbaabbabbbaaabaabaaabbbaabbabbaaaaaabaaaaabbbbbbbaabaabbbaabaababaabbbbabbbaaabbbbbaabbaaaabaababbabbbabbaabaabbabbbabbabbbbbbbabaaaaaaabbabbaabbaabaabaaaaaababbabbabbbabbabbabaaaababaabaabaaaaaaaaaaaabbbbbbaabaaaabbbbaaabbaabbaabbbbaababaaabbabababbbababbbabababbbabaaabbaaaabaabbaabbabbaaabaabbabbbaaabbabaaabaaaaaababaababbabaabbabbaabbbbbbbababbbbbaabbabbbabbaabbbaaabbababbababbbbbabbaababaaaaaabaaabaabbbbabbaaaababaaabbbbbbaabbababaabbaabbaaabbabbababbabaabaababbbbabbabbabaababbabaaaabaabababbaabbabaabbbbbaabbabbaaaaaaabbbbbaababaaaabbbbbbabbaaaabbaababaaaaabbbabbbbbabbabbaabaababbbaabaabbbbbbbbabaabaabbbbaaabbbbaaaabaabaaababaabababbaaaabaaabbbbbaabbbbbbaabababbbbbaaaabbaabbabababbaaabaabbbbbbbabaababbbbaaaabbbaaabaabbbbaabbbaaaabaaaaaabbaabbabaaababaabbaaaabbbbbbaababaabbbabbabaabbaababbababbaaaaababbbbabbabbababaaaaaabaaababababbaaabaaababaaabbbbaaabaaabaabbaabbbbaabbbaaabaaabbbbbaababbaabbbabbabaabaabbbaaaaabaaaaabbaaabbabbabbbbbaabbaabaabbabbabbaabbabaabaaabbabaaaabaababbbabbbbbbbbabbaaabaaabababaaabbaaaabbbbabbabbbaaaaaaababaaaabbbabbabaaaababaaaaabbbbbbaababbbbaabababababababbbbaabbbaababbaaaaabbaaabaaabbaababbaaababbaaaaababaabbbbabababbabbbabaababbabaabbbaababaaaaababbaaaaabbabbbbaabbbbabbbabababaabbbbbababbbbbbaabababaaabbabbbbabbbabababbabbabbabbbbabbbababbaaaaabbbabababbababbbbbbaabaaabaaabbaaabbbabbabbaaaaababaaabaabbabbaabaabbbabaabbabababbaabaabaaaababbbabaabaaabbabaababbabaababbaaaaaaaababbaaaaabbbaabaababbbabbaabbaaabababbabaaabbbabbbabaabbaaabbbaababbbbbbbaababababbbbbbbaaaaababababbbabaabaabbabbbbbbababbbbaabbaaabbaaabbabbaaaaabaabaabbaababbaabbabaaabababbbbbbbbbbbbaabbaaaabaabaababbbabababbaabbabababbbbbbbbabaaabbabbababbaaaabaaabbbbaaabbbbbbbbbbabbbabbabaaabbbaaaaababaaabbbaabbbabaaabababbaaaababbbabbaababababbbabbbaaabbbababbbabbaaaabababaaaabbbabbbaabbaabaabbbabbbaabbabaaaabaabbbbabbbaabababbabaabababbaabbbbabaaabaaabbababbbbababbababababaabaabbbbbaaaaaabababbbaaaaabaaabbaabbbaabaabaaaaabbbbbbbabbbbaaabbaabbbabbabbbbaaaababaaaaabbbaaaaaaaabaabaabbbaabbbabaaababaaabbaabbbbabaabbbaabaababababbaabababababbbbbbbbabbaaababbaaabaabbaaabbabbbbaaabbaabbabbaaaabbbabbbabbbababbabbbaabbbabaaaaaaabaabbbbbabbbaabbabaaaabaaabbbaababaaabbaaabaabababaaababbabbaababbaaaabaabbabaabbabaaaababbaabbabbbabaabaabaaabaabababbaababaabbabaababaabaabbabbabaaabbabbbbabaababbabbbbabbbababbbababbbaabaabbaabbaaaababaaabbbbbbbaaaaababbbbaabaababaababababababbbaabaaaabbbaababbbaaaaaabaababbbbbbbabbbbaaaaabababbaabaaabbbabbbabbabaaabababaabbabbbaabaabaabbbabbabbbbaaababbbbbaaabbabaababbbbabbbbaaabaaabbaaabababbbabbbbaabbbbaaabaabbaabbbababababaabbbbabbbbbbabaaabaabbaaaabbbbbbbabbaaabbabbababbabaaaabbbbaaababbaabbbbaaabbababbaabbbabbabbbbbaaaaabbaababbabbaababbbbbaabaaaabbbabbbbbaabbabbbabaaaabbbbabaaabaaabaaaabbaabbaabbabbabaabbbaababaabaabbaaababbbbbaabbabaaabbbababbbababaaaababbbbbbabaaaababbbaaabbbbaabbabaabaaababaababaaabbbbabbbaaaabababbbabaaababaaaaaaaabaaaababaabababbaabaabaabaaabbaaaabbaabbaabbababbbbaabbbbababbaabbabbbbaababaabbbbaabbabaabaaaababaabbabbbabbaababaabbaabbbbabaaabbbabbabbabaabbaababababaabaabaaababbbbaabaabbabaabaabbabbababbabaaabbaabbbaabaabaaaabaaabbbbaaababbbabababbabbabaabaaaaaabbbabbbabbabbbabbabbbbaabababbbbabbaaabbbbbaababbbbbababbaabbaabbabbbbbbbaabbaababbbbaaaababbaaabbabababaabbaaabababbbaabbbbaaaaaabbaaabbbaabaabbbbaabababbbbbabbbabaaaababbbbbabbbabbaaaaaaaababababbbaabbaaababaabbabbaaaaababaaababaaaababbbaaaaabbbbabbabaababaaabbbbabbbaaababaabaaababbaaabbbaaaaaabaaaaabbababaabbbbbbabbbababbabaabaabaabaaaaabaaaabaaababababbaababaabbbbaabbaabbaabaabaaabbabbababaabbbaababababbbbbbaaabbbbaabaaabaabaaabbabababbabbaabbbaabbbaaabbbaaaaabbabababaaaaaaaaaababaaabaaaabababbbbbabbbbabbbbabaabaaabbabbabbbabaabbaababbbabbaaaabababbbbabbaaaabbbbbabbaaabbbaaabbbaaaabbbbbaabbaaaaababbbbaaaaaaabbbbbaababaaaaabbabbaabbbbaaaabbbabbabbbbbbbabbabaaaabbbbaaabbababbbabababaaabbaabbababaabaabbbaaabaaabaaaabaaabaaabaaabbbaaabaabaababbbabbbaaaabababaabbaaaabbbbbbaaaabbaabaaaabaabaaabbbaaababbbbbbabbbbbbabbbbaabbbbbabbabbbabbaaaababbababbbbaaababbbabaabbbaaaabbabababbbbabababaaaaaabaabaaabaaaaabaaabababbabbbaaaaaaabaaabaaaaabbbbbbbaaabbbabaaababaabaaabaaabaaabbbbbbbaababaabbbbaabaaaababbbabbbbabbbbbababbbaababababaaaaaaabbabaabaabaaabaaaabbbbabbaaabbaabaabababaaabaabbaabbabaabaababbbbbbaabaaabaabbbabaaaaabaaaababbbabbabbaabbabaababaabaaabbbbabbbabbabbbbaaaabbaaaaaaaaababaaababaabaabaabaababaaaaabaabaaabaabaabbaaaabaabbaaabbbbaaababbaaabaabbaaabababaababaabbabaaabbbbbbbbbaabaaaaabbaaaaaaaabbabaaabababaabbbbabaaaaabbbaaabbbbbbabbbababbababaabaabbbabbbbababbaaaaaabaaabbabbabaabbbaabbbababbbbbbbabbbbabbaabbaaabbbbabaaaaaabbababbbabbaaabbabbbabbabbabbbabbaaabbaaaabbaabbababaaabbaaabbababaabababbbbaaababbaababbbaabaabaabbabbaaaabaaaabbbbbaababbabaaabababbbabaabbaabaabbabbaaaaabababaaabaabababbaabaabbabbbbabbbbbbbbbbaababbbabaabbbbabababbbbbababbababaabbababbaabbaaabaabbababaaabbbabbabaaababbbbaaabaabbbbaaabaabbaabbaaaaababbbbababbbbbaabaaabaaaababbbbbbbbaaabaaaabaabbabaaaaababbabbbbaaabbabaabbaaaabbababbbbbabbaabababbabaabababbabbaabbbbabbbbbaababaabbbabbbbabababbaababaabbbababbbaabababbbbbbbabbbababbbababbabaaaaaabbababbaabbbbababbbbabbaabbbbbabbbababaabbabbaabababbbbababbbaaabaaabaaaaabbbaaaaabaabaabaabbbbabaaababbabbaaabbbabbaabaabbaabbaabaaaaaababaababbbaaabaabbababbbabababababbbaaaaabbbbaaaabababbbbbbaabbaabaaabbababaaababaaaabaabbbbabaaabaaaabaabaaaabbbbbbababaaabbbbaababbabbaaaaaababbaaaabaabbaabaabbbbaaaabbabaabaabaabbaabbbbabbaabbbaabaaabbaabbbbbabababbaaaabbbbbbbaabaabbaabbabbabbabaabababaabbabbabaabbababbabaaabbabbaaababbaaaaabababbababaaaabaabbababbbaabbabbaaabbbbbbbbabaabbaabaabbbbbbabababbbbbbbaaababaabbbbbbbbaaabbbbbbabaababbbbaaaababbbbabbabbabaaababbabaaababbaababaaaaaaababbbabbbaabaaaaabaabbbaabababbabbaaabbaababbababbbbbaabbabbbbbabbabbabaababbbababbaabbaababbbbbbbaabaaabbbababbbbbbbaaaabbbbbabaabaaaaababaabbbabaababbabaabaaabbaaababbbbbbbbabbbbbaabbabbbbaabbaaaaaababaaaabbaaaabaaaaaabbbbbbbaabbabbbaabbabaabbbabaabbabaaaaabaabbaaaaabababababaabbabaaaababbbabbbbaabbbbaaaaaabaabbaaaabbbbbabaabbbaaabababaaababaabbaaaabbaabbaabababbabaaaaabbababbababbaaababbbaababbabbbaaabbbbabababaaabbaabababbabbbbbbbaaaabbbaaaaabaaaabbbbabbabbbbbbbbbbbabbbabaaabbaababbbbbaaabababaabaaababbaabbbbbbbbababaaabaabaabbabbbabbabaaabaaabbbbabaabbbaaabbbaabaabbaabaaabaaaaababababaaaabbbbbbbaabbbaababaabaabbabaaabbabbababbaabbababaababaababbabababaaabbbbaaaabababbaabbaababbabbaababaaaabbaaabbaabababaababaaabbaaaaaaababaaaaaaabbbbbbbaabbbabbbbbbabaabbabaabaaaaaababaababaabababaabbaaabaabbaabaaaaaabbabbabbabbbbaaaaabaaaabbabaaaaabbaabababbaabbbabababbabbbaabaabbabbaaababbabbbbbbbbabbbbaaabbaaaabaaabbbbbaaabbabbbbaaaabbaabababaababaaaaaabbabbabbbbabbaaaaabababbabbbaaaabbbaababbbabaabaaaabbbaabaaabaaababaabbabbbbabaaaaabbaabbbaaabbabbaaaaaaaababbbabaaaaababaaabbabbaaaaaabbaaaababbabbbbbaaaaabbbabaaaaaaabaabbabababbbaabbaabbbbaabbbabababbabbbbababbbaaabbbbaabbabababbaaaaaabaaabaabbbaaabaabbbaabaabaabbbbabbabaabbbbbbbbabaaaaaaaaaabaabbabbaaaaabbbaaabaaaabaabbbbabbabbabbbaaabbbaababaabbaaabbbabbaaabaaaaabababbbaaaabaaaaaabaaabaabababbaaaabaaabaaaaababbbabbbbabbbaaaaaaaaabaaabaabbbbaabaabbaabbbaabbbbbbbbaabbaabbbbbbabbaabbabbbaaababbbbbbaabbbbabaabababbabbbbabababbbbbbabbababababaaabbbabbbbaaaaabaaaaabbbbbaababaabbabbbabababbaaabaaaaaabbbaabbabbabbabaaababbaaaabababbbbaaaaaabbaaaaabbabbbbbbbbaaabbabbaaaabbbbbbaaaaababaaaaababaababbaababbbabaaaabbaabbabbbbaaaaabaabbbabbaaabbbabbaabbbaaabaabbbbaaaaaaabbbbaababaabbbabaaabbaaaaaaabbaaaaaabbbaaabbabbaaaababbaaaabbabaabbbabbaaaabaabbabbbaabaaaaaaabbaaabababababbaabbabaaaaaaabbbaabbabbaaaaababaaababababaaaaaabbbaaaaaaabaabbbabbbabbbbabbaaaabbbabbaaaaabababaabaababaabbbaaaabaabbabbabaaababababbbaabbababbaaaaaaabbababbbaabaabaaabababbbaaaaaabbabbabaababbbaaaaaabbaabbaaabaaabbbbabbbbbbaaababababbbbabbbbbbaaaaabababaabbbbbbbbbaaaaabaabbaabaaabbbabbbbbabbababbaaabaababaababbbabbababbabbaaabbbaaabbababbbaaabaaaabbaaababbabbbabbbabbbaaabababbaabbbaabbbabaababbbbaaaabababbbababbbbbbbbababbbabbbbabbbaaaababbabaaababaaaaabbaabaababaabbbbabaabbaabbabaaaaaabbaabbbbabaaaabbaaaabababbbabbaaaabbabbaabaabababbababbaabbabbbbbbaababbabbbaabbaaababababaabaaabbbbbbbabbabbbbbabbabaaaabbabbaaabbbbaaababbaaaabbbbabaababaabaabbabaaabbbaabaaaaabaaabaaabaaababbaabbabbabbaaabbbabbbbbaaaaabaaaaabaaaabbbababbbaabbbbaaabbaabaabababaaabababbbabababbbbbbbaaababbbbaabbbababbaaabbbabbbbbaabbbaaaabbaabbaababbbabbbbbbaaaabaaabaabbbabbbaababaabbbbbbbaabbaaaabbaabaabbaaaabaabbbabbaabbbaabbbabbbbabbababbababaaaababbabaaaaabbbaaaaabbabbbaababbbaababbbaaabaaabaaabbbbbababbbaabbabbaaababbbaabababaabbbaababbabbbbbabaaaaaabaaaaaaababaabbabbbaababbaababaaaaabbabbbbbaaaabaaabbaabaaaabbbaababbaabbbbbbbbababaabbbbaaabbbbaabbbaabbbbbbaaaaaaaabaabbaaabbabaabaabaabbaabaabbbbaaaabababbbbababbaaabaaababaaaaabbbaabaabbbbbabaaabaaabaabbabaaaabbaabaabbbbbaaabbabbabbbaababaabbaabaaabaaabbaaaaaaaaaabbbaaabbabababbbaaabbabbababbbabbabaaaabaaaaabbbaababaaaaaabaaabbabbbbaaabbabaabbbababaaabababaabbbabbbbbabbaabababaababaaaaaabaaabaaabbaaaaababbaabaaabbaabbaaabaaaababbbabaaaabaaaabbaaaaabbaabbaaabaaaabbaababbbaabaaaaaabaaabaabbabbaababaaabaabbabbababbbabaaaababbbbbaabbaabbbbaaaabababaabaaaaabaabbbabaabababaababbbbabbababbbbaaaaabbaaaabbaababbaabaaabbbaabbababbaabbababaaaaaaabbbababbabaaaaabbaabbbbaabbaaaaaaaaabbababbababbbabababaaabbbaaaabbabbbbaabbbbaaabbbabaaabbbbbaaaaaabbabaaaaababbabbaaaaaababbaabbaabbabaababbbababbaabbaaababbbaaaababbaaaaaaabbababbbabbaaaaaaabbbbbababbabaabababaaaabaaaaaabbbbabbbbaaaaababababbaabaaabbaaaaabaabbaaaabaaababababaababaabababbabbbbabababbbabaabbababbbababbababbabbbbbbbaabbbbaabbabbbbaaabbbbbbaababaababbaabaaaaaababbbbbbababbbbabbabbaaaabaaababaabbbababbababbabababbabbaabaabaaababbaabaaaabaabbaaaabababaababbbabaabbbaabaabbbaaababbbababbbbaabbabaaaababababaaabaaababbbbaabbbaaaabaabaabbabababbbabaabbabbbbbaaabaabbabbababbbabaabababbbbaabbaaaaabbbaabbbaabbaababbbbbbababbaabbabaaaaaaabbaabaabaaabaaaaabbbbbbbabaaababaabbabbbbbabbaaaabaaaaabbbbabaaaabbaaaaabaaaabaaabbbbbabbaaabaaabbbbbaaaabbabbabbababababbaabbbaabaababaaabbbababbbaaababbaaaaaaabaabbababbababbbbbaababaabaaaaaabbabbbabbabaaaabbbabaabababbbabaabbaababbabaabbabababbbaababaabababababbaabbbbabaabbabababbbbbbaabaaaaaabaaabbaabbaaabbbbabaaaaabbbbaaaabababbbbbaaaabbbabaaaabaababbaabbbabbabbaaaaaaababaaabbbaabbaabbaaababaabbaabbabbbaaababbbabbabbbaabbababaaababbbbbabbbaabbbabbaabbbbbaaaaaababbaaabbbabbbbaaabbbaaaaaabbaabbbbabbabaabbbabaabaabbaaaabbbaaabbbabbbbaaaabaaababbbaababababbbabbbaabbabaabababababaaaababbbababbaaabbbbbbbbbbbaabbaaaaaabbbaabaabbbbabbaabbabbbaaaababbbabaabbbbbaabbbbabaaaabbbbbbabbbbabbababababbabbbaaaaabaaabbbaabbaabbbbbababbbbbabbababababaaaaaaaaaaabaaaaabaaaaaaaabbbaaabaababaaabaabbaabbabaaaaababbbabbbaaaaabbaaaaaabaaaabaaabbbaabbaabbbbaabbbaaaaabbabbbbabbbbabbaabbbaaaaabbaabbbbaabbbababbabbaabaabbbbbababaabaaaaaababbbbabaaababbabaabbaaaaaababaabbaaabaaaabaaaaabbbbbbaaaabbaabbbbaaaaabaaaabbaaabbabbaabbaabababbbaaaabaabaabbbbbaaababbbbaaabbabbbababbaaaaabbabaababaababbaabababbabbbbbaababaababaaaabbabbaaabbaabababbbaabbbaababbaababbaaaabbbaaabbabbabbaabbbababaabaaabaaabbaaaabbbbbaabbaabbaaababaabbbbbaababababaaaabbbbbaaabbaabbbaaabbbbbbababbbbaaaabbabbabbaabbabbbbaabbbbbbbbabbbababaaaababbbbbbabbbbbbabaaaabbbabbbaabbbbabaababaaabaaaaaabbbababbbaabaaabbabaaabaaabbababbbbbbbbbabbbabaabbabaaabbabaaaaaaababbabbbbaabaaaaaababaabbabbababbbaaaabaaababaaabbaababbbabbababbaabbbababbabbaaabbbabbaaabbbbabbaabbabbbbabbabaaaaababbbbbbaaabbabaabbbaabbbbabbbabbabbaabaababaaabaababbaabaabbbbbabbaaabbbbaaabbbabaaaaaabbbbbbababbbbbbaabbbababbbabaaabbbaababbaaaabbaaabaaaabaaabbaaaabbabbabaababbaaaaabbbbaabababaaaaaaaaaabbbbabaabbbaabbbbaaaabaaaaabaaaababaaabaaabbbbaaabbabbaaabaaababbbabaabbabbbbbaababaabababaabbbbabbbbbaabbbabbbbaaabaaaabaaaaaaaabbaababaabbabbbaaaababbabaabaaaaaabaabaabbaabababababaabbbbabbaabaaababaaabbbbabbabbbaabaababbbbbbabbbbaaabbbbbbbaaababbaaaaaabbbbbabbbaababbbaaabaaabbaaaababaaaaabbbbabaabbbabaaaaaaaaaabbbaabaaaaabaaaabbbaabaabaaaabbaaaaaabbabaaaabaaabababaababbaabbbaaaaabbaaababbaabbbbbaabbaaaabbaababbbbaaabaaaabaaaaaabaabbabbbabababababbbababbaaabaabababbaabaaaabbbaabbbbabbbbaabbbbbaababbaaaaababbaabbababbbbbbbababaababaaaaabaabaaaabbbbbabbabababbbabbbbaabbabbbababbaaaaaabbbaabaaaabaaaabbaaaabaaaabbabbabababbaaabaaabaabbaaabaaaaabaababbbaabaaaaabbbabbaabababbaaaaabaabbbabbabbabbbbbabababbabbbabbabbaaaaaabbaaabbbabbaabbbabababbbabbbbababbaabbbbaaabaaababaabbabbbabaabaabbabaaaabaaaaaabbabababaabaabbabbbbabbabbbbbabaabaaabbaaaaaababbabaaaabaaaabbabbabababbbbbbaaabbbbaababbbabbbabbababbabbabaaabbbbabbabaabaaabbabaabbbabbabbaabbbbabaabaabbabbbaabaaaaaabaabbbbbbaabababbbaaabbbabbababbbaaaaabaaabaaaababbbbaabaaaabababaaaabbbabbaaaabbbbabababbaabbabaaababaaaaababbbaaababbbaababbababbaabaabbbbbbababaabbaaaababbbbabaaababaaaabbababababbbaaabbabaaabababbaaabaaabbbbaaabbbbaaabaaaabaaababaaabbbaaaaabbabbbabaabaabbbbbabbbabababaaaaaaababbbbbaababbbabaaabbaabbaaaaaabbababaaabaabbabaababaabbabaababbabbbbaaaabbbbabbaaaaaabbbaaabbbabaababbbbbbabaaaaababbbbaabbaabaababaaabababbaaabaabbbbaabbaabbbabaaaaabaabbbbaabbbaabbbbbaaababbabbaabbbababbaabbbbaaabaaaababbababbbbabaaabbabbabbbaaaaaaaaaabaabbaaaabbbbaaabaaaaaaabbaaaabaababbbaabaabbabbabaaababbbaaaabbaaaaabaabbbaaabaaaababababbbaabbababbaabbaaaaaabbabababbaaaabbbbbabbbababaababbbbabbbaababbbbbababaaabbababbabbbaaabbaaaaaabaabbbaaababbbababbbbbababbaabababaaabaabbbbaabbbbaaabaaabbbaaaaaaaaabbabbaaabbabbabaaaabbbbaabaaaabaaabbabbbaabbabaaaaaabaaabbbbbabbaabababaababbaababbaabbbbbababaababbaabaababaaaaaaabababbbabaabbaabbbabbaaabaabaabababbabbbaabababbbbabbabbaaabaabbbaababbbabbbbbbabbbabbaabaaabbaaabababbaaabbbaabaababbaaaababbbbbabbabababaaaababbaaaaaabbbbaaaaaaabaabaaaaabaaabbbbaabbbaabaabbbbaabaabbaaaababababaabbbbaabbbaabbbbbbbabbbbabbaaaaaabaaaaaababbabaaaaaabbabaaaabababbaabaababaababbababaabbaabaababbaabbbbbbbabbbbbabbbaaabbaaabbabaaabbbababbbbababbbbaaaabbaabbbaabaaaabbbaababababbaaabbbbabbababaaabbaaabbaaabaabbaaabbaaabaaabaabaaaaabbbbbbabbbbbbaaabbbabababbaaabbabbabbbaababbbbabaaaaababbbabbabaabbaabaaaaaababbbaaaaabbbaabbbbaaaabababaaabbabbbbbbabbbaabbbbbabbabbbabbabbabababbbaaababbbbaaabaaaaabbbbbbbbbbbbbaabbbbaaaaaaabbabbaaabababababbbbababbaaaaababbabababbbaaabbabaabbabaababbababbbabbbbbabbaabbbabbbbbaabaaabaabbbbbbaaabaaabbbaaaabbbbbbbbbabaaabbabbaaaaabbabbaababaaabbaaabbabbaaaaabbbaaababbababbbaaaaabbbabbbabbaabbabbbaababbbaaababbbaaabbaababbaaabbaaaaabaabaabbbaaabbabababbaaabaabbabbbbbbaaababaabbbbbabababbbabbabaabbaababaabbbbaabaaaababbbabbbbbaabbabbabbabbbbaabbabaaaaaaaaaaabbbbabaaaaaaabbabaabbbababaaaababbaababbbbabaaaababbbabbbbbbbbaaabbbbaaaabbaaabbbaaababaabbabbaaababaaabaabbbabbbaabaabaabaababaaabaaabbbaabaaabbbbbbbbababaaaaaabaabbbbbbabaabbabaabbabbbbbaababababbbababaabaabaababaaabababaabbaabaabbbaaaaaaababbbbbabaaabbbbbbbbaaabaaabbbbbaaabbaabbaabaabbababbabbaababababbabbbbaaabbabababaababaaaaaabababbabbaababbbbbabaaaababaababbabaaabbbbabbbbabbbbaaaabaaababaaabaabbbaaabbaaabbbbbaabaabaababbbbbbbbbabaaababbaabbbaabaaaaaabbaaaabaaaaabaabaaabbbbaabaaabbaaaaaaabbbbbbbababbaaabbababababababbababaaabbbabbaaabbaaababbaaabaaaabbabbbabaaaabbaaabbbaabbbababbaaababaabaabababaaaaaaaaaaaaabaaababbbbbbbabbbaabbbbbbaabbbbaababbbbbabbaabbbaaaabaabaabbababbaaabbbbbaabbbbbbaaaabbaaabaaabbbaaabbbbbbbbabbaababbaababaaabaaababbbbababbbaaabbbbbbabbabbbbaaaaababbaabaaaaaaabbbbababaabbabbabbabaaabbbbabbbbbbabbbbbbabbabaabaaabbbbbaabbaabbbaabaaababbabbababbbababbabbbbabbabbbbbabaaabaababaababbbaaabaabbaaababababaababbbabbabbbabbabbabaaabaabaababbabbbbbaabaaabaabbbbbbaabbbabbbabbbaaabbabbbaaabbbabbbaaaaababbbbababbbbaaaaaaabbbbbbaabbababbbaabbbbbaabbabbaabaaaaaaaababaaabbaaaababbababaaabbbaaaabbbaaabbabbaabababbbaabbababbabbbbababbaaabbaabbaababbbabaabbabaabaababbbaabaabbaaabaabbaabbbaaaabaabbaabbaabbbbbbbaaaaaabaabbaaaababbbaaabaaaaaabbbbbbaaaaaaaaabaabaabbbababaaaaaaaabbabaaabbaaabbabbbbbaabbabaabaabababbbbbaabaaaaabbabbaababbbabbbabbababbbaaabbbbabaaaabbbabbbbaababaaabaaabbabaaabbaaaabababbbabaabbbbabbaaaabbaaababbaabbbaababababbbbaabbbababaabbbbbabbbabababbbaabaabaaababaabbaaaaabbabbaaaaabaaababaabaaaaaabbbababbbbbbaabababbbbaaaababaababababbaaaaaaaabbbabaaaabaabaaaaaaaaaaaabbbaabaaaabaababaabbabaaabaaabbabaaaabbaabbabaabbabaaaabaaabaaabaaababbbbabbabaabaaaababbaaaaababbaabbaabbaabababaaababababbaabbababbbbaabbbbabbaaabbabaabaabaabaabaaaababaaaaababbaaabaaaaabababbaaaababaaaabbaabaaaaaaabbbaabbaaaabbabbbabaababbaabbaabaabbabaabbbababbabaabbbabbaaaaaababbbbababababaaaaabbbabbbabbaaabbbaaababbbabbaaabbaaaaaababaababbabbbaaabaaaaabaaababbbaaaabbaababbabbaaababbababaaaabbbbabbabaaabaababbbbbbbbbaabbaaabbbabbaaabbbbabbabbabaaabaabbbbbbaaabaaabbbbbabaaabbbbaabbababbaabbbaaabaaaabbbaabaaabbbabbbbbbbbbabbbaaaabbbaabbbaaabababaabaaabbbaaabababbbabbbbabbbabbababbbaabaaaabbababbbbbabbbababaaabbaaabaabbbaabaabaaaabaaabbbabaaabababaaabbbbabbabbbabaababaaaabaaabbabbaabaabaabaabbbbbbbbbaaabaaaaababbabbbaabaaaaabbbbbabbbaaaaabbbbabaabababbabaababaabbabaaabbbbbbbbbbbabaaabaaabbbbaaababbabbaabbaabbaaabaababbabaaabbaaaaaaabbaaaaababbbbbabbbbbaabababbbaabbbaabbaaaabbabbbaaaabaaababaaaaabbbaaababaabbbabbbbabaaababbbabaaaababbbbabbaaaabbbaabbbaaabbabbabbaabbbaabbbaaaabaababbabbbbbbabaaababbbbbbbaaabbbbbbaaabaabbabaabaabaaaabbbaababababbaaaaabbbbaaabbbaaaababababbaabaaabbaaaabbbbbbbabbaaaaaababbbbbbabbaaabbbabababbaaaaababbabbbbabbbabbaabbbbabbbaaaabbaaaabbbaababbbbbabbbbbaababaabaaaabbbbbbaabaabbbabbabaaaaaabaabaaaabbbbababaabababbaabbaabbabbaaaaaaaaabbbaaababababababaaaaaaaaabbaaabbbabbabaababbaaabbbabbbbaabababababbbabbabbbabbaabbababaababbaaaaabaabbbaaabbaabbbbbbbabaaabbbaabaabbbbabbabbaaaaabbbabbaaabbbaabbaababbaaabbaabbbaaaaaabaabbabbbaabababababbbbabaabaaababbaaababaabbbaabaaabbbbabbaabbabbabbbaaaaabbaaabbaababbababbabbbabbbabbbbabbabbbabbabababbbbabaababaaabbabababbabababbaabaabbbababababababbbbbbaababbaabbaaababbbbbaaababbaabbabbabbababbaaaaaabbbbbabaabbbaaabbaabbaaabbbaababbabaabababbabbbaababaaaaababaaabababaaabaaabaabbaaaabaabbaaabbbaabbaaaaaaabaabbbbababaaabbabababbababbbbabbabababaabbabbaabbabaaabbaabbabbbaaaaaaabaaaabababbaabaaabbbaabbaabbbbbbbabbbaaabbaababbabbababbbbbaabbaaabbbbbbaaababbaaaaabaababbbabbabbbbabaaaababaababaaabaaabaaaaabaababbabbbbaabbababaababbbbaabaabbabababaabbaaaaabaaabbabaabbabaabbbaabbbabaaaabbbbaaabbbababbaabbbbababbbaaaaabaaabaaaabbaaabaaaabaabaaaaabbaaabbbaababaabbababbbaaaaabbaaabaabbbaaabbaaababaabababaababbbaaabaaabbaaabbbaababbbabaabbabaabbaaababaabaaabbabaababbbbaaaaabbaabababababbabbbaaababbbbaabbaaaabbbbaaababaaabaaaaababbbbbbaaabaababbbbbbbaaaababbbaabbaabbbbbaaabaaabbaababbabbabaabaaabababbbbbaabbbabaababbbbbabbbaabbbbbbbbabaaaabbbaaaabbababbaabaabbaaaabbabbbbbbaaabaaaabababaabbbbaaabababaaaaabbbbaabbaabbaaaabaabaabbbabbabababaaaabaaabaaabbaabbbaabbbbbaabbabbbbaabbbabaabbbbabaaababaababbbabaaabaaabaabaaabbbbbbbabbabaabaabababbabbbbaababbabababbaabbbbabbbaabbbaabbbbbbaabaaaaabaaaaabaababbabbabbbbbababaabbababaabbbababaababaabaabababbaaaaaabbbaabbbbbaaaabbaababbbbabbbababaaaaaaabbbabbbbabbbaabababbbabaaabbabbabbaabaabbbabbbaababbabbaababababbaaaabbbbaababbbbabbaabbaabbbaabbbaabaaaabbaabaaabaaabbbababaaabaaaaababaaabbbbbabbbaababbbaabbaabaaaabaaabaabaababaaabababbabbbabaabbabbaaaabbababaaaabababbaaabbaabbbabbbbbbbaabbbbaaaabaabaaabbbababaaabbbaaababbaababaabbabbbbaaababaababababaaaaaabbbbbaabaabababaaaababaaaaaabbababaaaabbabbabbaaaaaabbabbaabbbabaabaaabaaaaaabaaabaaabbaaaaaaaabababaaaaaabbabaabbbbbbabaabbbbbbbabaaaaaaaaaaabbaaaaaabbbbbaaaabaabbabbbbbaaabbbabbbaaabbabbbbbbbabaabbbabbaabbaabaabaabbbabbaaaaaaaabbbbbbaaabaababaabbbabbbabaabbbbbabaabaaababbbabbaabbbabaaaaabaabbaabbaabbababaababbabbbbaaaabbaabbbbbbbaabababbabbbbbaaabbbbbbaaaaababbbababaabbaabaaaabaaaabbbbbabbaabbbaabbabbaabbaaaababbaabbbabbabaabaabbbaababbaaaaabbababababbabbbabaaaabaababbbbbabbbbababaabbabaabbababbaaaabbbbbaaaaabbabababbbabaaaaaaaaabaaabbbbaaabbabbaaabbbaabaaaaaaabbaaaabaaabbbababbbbaaabbabaabaababbbbaabaaabaabaaabbbaaabbbabbbbaaabbabaababbbabbbbbabaabbaabbbababbaababbaaaabbabbaabababbbababbbaababbbbbaaaababbabbabaaababbbbbabaaabbbbbbaaaabaabababababbababbbababbabbaaaabbbaaaaaaabababbbabbbbabbaabbbbbbbbbbababbbbaaaababaaaabbabbaaabaabbabababbaaaabbbbbabbaabbbbabaabbaabaabaaababbaabaaaaabbaababaabaaababbbbbaaaabbbaaaabbbabaabbbaaabbabbabbaabbaabababbbabbbabaaaabaabaababaabaabbbbabbaaaabbaaaababaabbbabbaaabababaabaabbabbabbbbaaabaaababbabbaabbaaaaaaaaabaababbabbbbbaaaaabbbaababaaabaaabbaabbabaaabbbaababbbbabaaabbaabbbbbbbbbbaabbbabbbbababaabaaabaabbbababababaaaaaabaaaaaabbbabaaabbabaabbababaabaababbbaaabaababbbabbbbbaaababbaaabbbaabaababaabaaabbbabaaaaaababbbaaabbbaababaababbbaabbabbbaabbabbababbbababbaaababaaabbbaabaaabaabbbabbbbbbbbabaaaababaaabaaaabbbaaabbabbbabbabbabbabaaaaababbbabbaaababaaaaaaabbbbbbaaabbaabbaabbbbabbabaaabbbabaabbbabaaaaabababaabaabaaaabaaaaaaabbaabaaaaaabbbbabababbbbbbbbbaabbabaabbbbaaababbababbbaaaabbbbbbbaababbaabaabababaaaabbbabbbabbaaaaabababaabbbbaababbbaabbbbaabaaaaabbbbaabbaababababaaabababaaabbaabbbbbaaaaaaabbbaabababaaaaababaabbbaabbbaaabbaaaaaaaabababaababbabbbbabbabbbaaaabbbaaabbbbabbaaabaabaabababaabaabbaaaaaaaabbbaabbabbbaaaabbbabbbabbbbaabbbaabaaabaaababababaaaaaaaabbbbabbababbbaaabbbabbababaabbaabbbbaababbbaabbaabaabaababbbabbbbbabbabaaabaaababababababaaabbabbababaaabaabbabababaaaaabbbbbbababbbbbaaaababbabaaabaaaaabbbbabbaabaabbbaabaabbababbabbaaaaabbbbbaababbabababaaaaababaabaabababbaabababbbbbabbbaaaabbaabbbaababaabbbaabbbaababbabaaaabbbaaaabbbabbabbabbbaabbaababbbbabbaabbaabbaaabaabaababbaabbaaaabbbababbbaabaabababbbbaababaaaaaaabbbaaaabbababbaaaabaaaababaabbaabaaaabaabbababbbabaabbabbaabaaabbabaaaaabaabbbaaaaabbbabbbbaabbaaabbabaaaabaaaabbababbababbaaaaabbabbbaaababaaabbababaabaabbbaaaaabaababbaabbbaaababaabbabaababbbabaaaabababbaaabbbaaaaabbaaaaabaabaaaabaaaaabbabbbbbaabbaaaabaabbaaaabaaaaabbaababaaabaababaabababbaaababbbbbaabbaabababbabbaaaaababaabbabbbaaaabbbbbaabbaaababbbaaaabaababaaaabaaaabaaaabbbabaaaaabbbbaabaabaaababbbaabaabaaabbbbabbabaababbbbaaabababbabaaabbaabbbbaabaabbbbbbbbaaaabbbbbabaaaaaabbbaabaababababaaaabbbabbbabaaaabbbbaabbbbaabbbbbbaaaababaaaabbaaaaabaabbbaabaaaaaaabaabaaaabbabababbbbbabbaaaaabaabbbbabbbaaabbabbaabaaabaaaababbaaabababbabbaabbaaababbabbbbbabbabbaaababaaaaabaaaaaabaaaaabaabbbbbabaaaaabababbaabbabbbaaaabaababbaaaaababbbbbababaababbbaaaaababbbbbbababbbaabaaaaabbbaabbbababababbbbaababababababaabababbbbbabbabaabbbabaababbbbbaaabbababbaaaababaaabaaaaaabaabbaabbaabababbbbaabbaaabbaaaabbabbbabbbaaabababbaaaaaaaababbbbbaababbabaaabaababaaabbaabababbbbbbbbbabbabaabbbabaabaabbababaaababaababbabababbbbbbabbabbaaabaabbbabbbbaabbbbbaabaaaabbaabbabbbaaaabbbbaababaabbaabaaabbaaabababbaaabbbbaabbbbaaabaaaaaaababbabbbaaaaaabbabaabbabbbbbaaaaaaabaabaabbaaababaabbabbabbbbbababbaaabaaabaaaaaabbbaaabbabbbbbaaabbbbabbabaaaabbbaaabbaaaabbabaababababbbbbaaaabababbbbababbbabbaaabaaaaabbaabaabaababbaabaaabaaababbbbbaaaabbabaabbabbbaababaabaababaabaabbbbbaaaaaaaabaabbababbabbbbabbababaaababbbbbabbbabbaabbbabaaaaabbaababbabbbbabaaaaaabbaaababaaababbaabbaaabaabbbabbabaaaabbbaaaababbaabbabaaabaaaabaaabbbaaaabbabbbbaabbaaaabbbbaaabbababaabbaabbababbbabbbbbbbbbaaababbbabbabaabbbabaabbbaaaabaaaabbabaabbbaabbbbbababaababbbbabbaabbbbbbaabaabbbabbaaabbbaaabbbaaaaaabbaabbabaaabbaaaabaaaababbbbaabaaaaaabbabaaababaaabbaabaababbabaaaabaabababbaababaaaabaaababbbbbbabbaaabaaabababbbaabbaaaabababbbbbaabbaabbbbbabbabbaaabaaaabbaabbbabbbbaabbbaababbabbbaaabaabaabbaaaaaaaaaaaaaaaabaaabaaaabbaaaabababaabbabbabbaaaaabbbbaabbabbbaabbaabbbaaabbbabbaaabaaabbbbabababbabbbaabababababaabbbbbbbaaaabbaaabbaaaaabbabbabbaaabababbabbabababbbaaabaaabaaababbbbabbbaaaababbaaabbaaaaaabbabbaabbbbaaaaaabbababbbbbaabbbabbabbaabaaaaabaabbbbaaabbaaaaaabbbaaababbabbbaabaaaaaabbabbbbaaaabaaaaaabbbabaaaabbabaaabaabaaabbbabbababbbbbbbaaabaaabababbbabaaabbabbbbaabaabaaaaaababbaabbaabbabaaababbaabbbaababaaaabbbbbabbbaababbabaaaaaaabababaaaabbaabbabbbababaabbabbaaabbababbbbabaaababaababaabbbbaaaabaaabaabbaababbaaabbbbababbabbaaaaabbaabaabaabbbaabababbabaaabbaaaaaaabbabababbaababaaaabaaaabababbabaabbaaaababbabbbabaababbaaababaaaabbabaaaabababbbabbababaabaaabaaabbbabbbbabbbaabbbbabaaabaabbaaababaaabbaabaaabbabbbababbbaaaabbbaaabbbababbbaaabbabbbaababbaaaaaaabbbbaaababbbbbabaabaaababbaababaaaaabaaabbbbbaaabbbaabbbbaaabaabbabbbbbbbabbbbbababaabaaabaaaaabaabaabbabbbabbaaaabaaabbaaaabaababababbbaabbbaaabaababaaababaaaaaabbbabbbbaaaaaababbabaabbaabbabbabaaabaabbaaaaaaaababbaabbaabbabbbbbbbbabbabaabbbbbaaaabbaaaabaaabbbabaaaabbbaabaabbbaaababbbaaaaaabbbaaaababaaababbbbaabaabaabaaababbabaabbbaaababaaabaababbaababaababbbbbbbabbbaaaabaabbbabaaaababbaababbbaaababbabababbbabbbbaaabaaaabababbbbaabbbbaaabbbaaabbbabaaabaabbabababbabbababbabbaabbbbbaaabbabbbaaababbbbaabbbbbbbbbbaabbabbaaaabbabbabaababaaaabaabbbbaabbbbbaababbaaabbbbbbaaaabbaaaaababbbbaabbabbbbaabbbabbababbbabbbbabbbbbaaaaabbbbababbaaababbababbaaaaababbaabbabbbbabaaaaabbbbbbbaaababaaaabaabbaabababaabbabbbaaaaaababbbabbaabbabbaababbbbbaabbaaabbbbbababbabaaababbabbbababaaaababababbbbbbabbabaaabbbabbabaabbabbbbabaaaabaabbababbabbabbbbaaaabbbabbabbbabbabbaabbbbbabbaaaabbabbaaaaabbabaabaaabaabaabaabbabaabaaaaaabaaabbbabaabbabbbabbaabaabaaaabaabababbbbbbaaaaaabaabaaabbabbbbbababbabbababaabbaaaaaabababbaaaaabaabababbbbbabbbbaabbabababaababaaabbbabababbaaabaaaaaaaaabbaaabbabbbaaaaabbaaabbababbbbaaabaaabbbabbaabaaabbbaabbababbaabaaabbabbbabbaabbbaabaaabbbbabbbbababbbbbbaababaabaaaaabbabbbabaaaabababbabbaababaaabababbababababaaabaabaabaababbbabbaaaababbaabaaabbbbbbabbababababbbbaaabbabaabaaaaababbabbaaabaaabababbaabbbabaababaaabaaababaaababaaaaabbbaaaabbababaaaabbabbaabbbbabbbaaaabbbbbabaaaabbbaabaabbbbbbbbbbabbaaabaabaabbbabaaababaabbbaabbaabbaabbbbbabbaababaababbaaabaaaaaabbbbabbbbbbabbabbaabababababbaaabaabbaaaaaabababbbabbaaaaaabbabbaaaaaababbbbabaabaababbbbababbaabaaabbaaabababaababaabbaabbabbbabbababbabbbbababababbabaaaabaaaabbaaabbbbbababaababbaabaaaabaabaaaababbabbabaaaaaabbbaababbaaabbbbbabbbbbbbabababbabbbaaaabbaaaaaabbbbaaaaabbabbbbbaaaaaabaabbbbbababaaabbaaabbaaabbbbabbbaababaababaaabababbbababaaabbbabaaaaabaabbaaaaaabbaaaaabbabaabbbbbbaaabbbbbbbaababbaabbbbaabaaabbbbbbbbaabbaabababaabbaaaaabaabaabbbabbbbbbbaabaabbaaabbabbababbaaaabbabbbbbabaaaabaaaabaababbbababaaabaabaababaabaaababbbbbaaaaaaaabbababbbabbabaaabaabbbbabaaaaababbaaabaabbababababaabaaaabaababbaabababbbbbaababaaaabbababbbabaaabbbababbbaaabbbbabbaabaabbabbbbbababbbbbbabaabbaabababbbabbabaabbaaaabaaabbbbabaaababbbbbbabaaaabbabbabaaaaababaabaabbbbbabaabbbabaaaaabbbbababaabaabbaaabaabbbabbbbaaabbaabaaaababbbbaabbbaabbaabbabbbabbabbababbabaaabaaaabbaabababbbaabbaaabababaabbaabaaaabbabbabbababaabaabbaaaabbabababbbbaaaaabbaaabaaaaaaaabaaabbbbbaaabbaaababaaabbabaaaabbbaabbbbbaaaabbabbaaabbaaabbabbbabbabaaabbabbaaaaaaabbabbbbbabbbbaabaababaababbaaaabababbaaaaabbaaaababaabaababbabbabbabaaabbaabaaaabbbbabbbbbababababaaabababbabbaabbaabbbabbbbbaaabbababbbbaaaaabbaaaaaaabbbaaaaaaabbbbabaaabbaabaabbbabbbaabbaabbabaaabaaaabbabaabababaaaabaabababbababaabababaabbaaabbbbbaabaabbaabbbababbaababbbabbabbabbbaaaabbbaabaaabbbaaaabbabaababbbbaabbbbaabaabbaaaaaaaabaabaababbaaaaabaababaabaabbbbabaaaaaababaaaabbaaabbaaababbabaaaabababbbbbbbababbbbbbbaaabbbbbbbbbabbaaaaaaaaabaabbbbbbabbaaabbabbabababbbabbabaaabbaabbabbbaabaababaabbababbabbaabaaaabaaababbaaabbbaaabbbbabbbbbaababbababbbaababbbabbbbbbabbbbabaabaaababbaabbaaabbbbbaabaaabaaaaaaabaaaaaaaabbaabbbaaabbbbbaabbabaaababbbababbaabbababbababbbaaaababaaaaabaabbaaaaabbbbaaabbaabaababbaabbbabbbabababaababaaaababbbbbbabaaaaaababaabbbabbbbaaabbabbaabababbabbaabaaaaaaabaaaabbabbabbbbbbbaaabbaaabbbbbaabaaaabbabbbaaabbababaabbabbabaaabaaaabaaabbbabababbaaabbbbabababbbaaabaabaaaaaabaaabbbbbbbaaaaabbbaabababbbaaababbabbbbabbabbbaababbbbabbbaabbabaabbbababaaabbaaaaaaabbababbabbbababaaabbaaabbbaabbabaabbabbbaaababbabaaaaaaaabaaabbbbabaabaabbbbaaaabbbaabbabbbabbbbabaabbbaaabbaabbaaababbbbbbaababbabbaaabbbbabbaabbbbbbaabbbabbbbbbabbbabbababbabababbbbaabbbabaaabbabbaaaabaabbbaaabbabbbaaaabaabababbabbbaaabbbaabbaaababbbbaaabbbbbbabbbabaabbababababbbbbabbababbbbaabbaaabaabbbbaababbababaabbbbbaabaaabbbaaabbbaaaaabbbabbabaaabaabaaaaaababbbbabbabbaaababbbabaabbabbaaaababaabaabaaaabaababbabbaaaaabaaaaaababababbbbaaaaaababaaabbbababbbaaaaaaababbaababbabbabaabbaabbbabaabbaaabaabaaabbbabaabbbbabbbaaababbaabbbbbabbbaabababbbbaababababaabbaabbbabbaababaaabababaaaabbbbaabbabaaaaabaaaabaabababaaabbbbabaabaaaaaaaabbbaaaaabbaaaabbabababaabaaabbbbaabbababaabbaaaaaaaaababbbbbaabbaaabaabbabaaabaabbbbbaaaabbaabaabaaabaabbbaaababbbaabbbabbabbbaaabbabbaabbabbaabbaaabbbabaababbaaaabbbabababbaabbabaabaaabaabbbababbabaaaaaabaabbaabbabbbabbaaaaaabbaaaabbababbababbbbbabbabbaaaabaababababaababbbbbaaaabababbaabbaaaabbaabbbaaaaaaaabaaaabbababaabbaabababaaaaababaabbaaabababbabaaaaaababbbbababbabaaababaabbbbbaabbbbbbbaaaaaababbaabbbabaabbabbaabaaaabaaabaabbabbbbbbbabaaaaabaaabaaababaaaabaababaaaaababbbbabbabbbbbaabbbabababaabababbababaabaababbabbaabbaabaabbbbbbbababbaabaabaababbaababbababbaabbaabaaaabbbaabbbaabaaaabaabbbabaabbbabaaabaababbaaaaabaaaaaaaaabbbbbbaabbbaaabaabaaabbbbaabbbabbabbbbbabbbaaaaaabbbbbabaabaababbabaabaabbaaabbaababbabaababababbbaaaabaabbaabbabbbababaaabaabbbaaaababaaabbbabbbbaabbbaaaaaaabbbabaabaaabbabbabaababababbbaabbbbababaaaaabbbaaabbbbababbabbbbbbbaaabbbabaaabbababbbbaaaabbbaaaababbbbaababbbaababbabaabbababbbabbabbbaaaabbbabbaabbbabaabbababbabbbbbaabbbaaaababaaaaabaaaaabbbabbaaababbbabbbbbbbabababaaabbaaabbaabbaaaabbbbbbbbabababaabbabbbaabbabaaabbbaaaaaababbbbbbabbbabaaabbbaaabababbbaabaababaabaaaaabaaaaabbbbbbabababbabbaaaaaaaabababaaaabaabaaabaabbbaabababbaaaababbaabbabaaaaaabaaabbababaaaaababbbbbbababbaabbbaabababbbabbbbbbaaabbbbabbaabbbabbaabbbaabaabbbaababbbbbbaabbaaaaaababbaaabbabbbbbbbabbbababbaaaababaaabbbaaaababbaababbbbabbbbbbaaaabbabbbbbaaaaababbbabbbababaabaaabaaabbbbababaaaabaaababbbaaabaaaabbaabbabbaaabaaabbbabaabaaabbaaaabbaabbbbabaaaabaabbababababaabaaaaaaaabaababbabaaaaabaaaabbbbbabababbabbbaabbabbabbabbbbbbaaabbbbabbbaabaabababbbbbaaaabaababbaaaabaababaabababbaabbaaaaaaaabbaaaabaabaaaabbbbaabaaabbabbbbabbaaaabaababbbbbbbbbaaaabbabbbbbaabbbababaaaaabababaabbbaabbabaaabbaabababaabaaaaabbbaabbbbbbbabaaaaababbbbbbabbabbbabaababaababbbbabaabbbbbbabaabaababbabbbaabbbababaaabbbbababbababbbbbbabaaaabaaaaaaabbabbaaabaaabaabbabbabaaaaabaabbbaabbabaabbbbabaababbbbbbaabaaaababaaabbabaaabbbbaabbaaaaabaaabbabababbaaabaaabbbaabbabbbaaaaaaabaabaaababababaabbaabaaabbaaabaabaabbabbabaaabaabbbaabaabbababababaaaaaaabbaaabbababbbbbbbbbbbbbbbaabbbaabbbbaaaabbaabaabbbaaabbbaaaaaaabaabbbabbabbabbabbabaabbaababbaaababbabbbaabaaabbaaaaababbbabbbaabbabaaabbbabaaabababbbaaabbaabbbaaabbaababbbbbaababaaabbabbbbbabbaababbababaabaaaabbbbaabbababbbaabbbbbbbaaabaababbabbabaaabbbbaaababbabaabbbaaaababbbbbabbbaabbabaababaaaaabbabbababaababbabbbbabbabaaaaaaabbbababbbbaaabbabbaabaaaabbabbbbbaabbaabaaabaabbbbabaabbbbaababbbabbaaaaabbabaabaababbaabaabbabaabaabaabaaabaaabbbbbabbabaabbabbbbbbabaabbbabbbbabbaaaaaaabbbabbbbaaabaaabaababaaabbaabbbbabbbaababaabbababbabaabbaabbbabbbaaaaabbbbabaabbabababbabaaaababbbbababababbabbbbababbaabaaababababaaaaaaabbabbbaababbbbaabbaaaaabaaababbbbbaaababababaaabbaabaababababbbaabbbaababababaabbbbaabbaabaabaabaaabbabbabaaabbabaaabbbaabababbabbbaabbbaaaaaabaabbbabbaabababaaabaaaaabababbaababbaabbaabaababababababbaabaaaabbbaaabbbbbaabbabaaabbbaababaaabbbababababbbaabbaaabaaaabbbabbabaaaaaaaaaaabbabbbbbaabaababaabaaabbbabbabbaaabbbbababaabaaabaababbaaaaabbabbbbabaaabbabbaabbabaaabbbbabbbaababaaaaababbbaabaabaabbbbbbaaaabbbbbbaababbbbbabbbbababbbabaaabaaabaaabababaaabaaabbbababaabaaaabaaaaaababbbbbaabbababbbbbabaabbbaaaaaabababababaaabbaaaabaabbbabbbbababbbabbaababaaaabbaaabbbaaaabaaabababaaaaaabbbbabbaabbbbbbbbbababbbbabaabbbbaaababbbabaababbababbbbabbbabaabbbaababbbaabababbbabbbababaaaabbbaaabaabbbabaabbbabbbaabbaaaaababbbabbabaaabbbbbbbabaabbbaababaabbaaabbababbbbabaaabbbaabbaabababbaaabababbaaaababbbbbaabaaaabaaaabaababaaaababaaaabaabaabbbbbbaabbbbaabaaabaabbabbabbbbabbbabababaabbabbabbbaaabaaaabbbbaaaababaababaabbbbabbaabaabbbbaaaabbabbabbabaaabbaaaabbbaaababbbbabbbabbbbbbaaaaabababbbbbaababbbabbabaabbaaabaabaabbaaaaabaaaabbaaababbbaababbbbabaabbaaababababbababbaabbbbbabbbaaaabababababbaabaaaaaaaabbbabaabbbaabbaabbbbaaaaabbbbaaaaaababababbbaaabbaabbbabbbabbbbaabababbbbabbbaabaababbbaabaaabaaaaababaaabbaaaabaabbaaabbbbbaaaaaaabbabaaaaabbabbaababaabbbaaabaabaaaaabbbbababbaaabbbabaabbbbaaabaabaaabbbbabaababaaabababbabbaaababaabbaababababaaaaaaaababaababababbaabaabaabbaabbbabaababaabbabababbbaaaabaaaaaaabababbababbbbbbaababaaaabbaababaaabaababbaabbaabbbbaababbbbbaaabaabaabbbabaaaabbabbbbbaabbaabababbbbbbbaabbaabbaaabbbbaabbaababbbaaabbaabbababaaabbbaababbbaabaabbbaabbbbaabbbabaabbbabbababaaababbaabbaaaaaaaabbaabaaababaabbaabababaaaabbabaaaabbbababbbbbabbabbababaaabaabbaaaaaaaaaababaaabbbbbaabaaaaaabbaababbbbbaababbbbaaaaabababababbbaaaabaaaaababbbababbabaababaababaabbaaaaaaaaaabbbbbbbaabaaabbbbbbabaaabbbaababbbbaabbabbaabaaaabbbaaabaaabbabbaabbabaababbbbababbbababbababababbaabbbbbbbbbbaaababbabababababaabbbaabbaabbaaabaaaaabbbbaabababbaabbbaabbbbbabbaaaabbbaabaabaaabaababbbbaaabbbbaabbaaaaaaaaababbbaabaabbbbabbabaaaaabaabbabbbabbbaababaabaabaaaaababbaaaaaaaabbbbabababaabbaabaabbaaaabbbaaaaabbaabababaababaaabababbababababbbbabbbbabbbaaabbababaaaabaabababbbbbbbaababbabbbbababbaabbabbbbbbbababaaaaaabbaaabbaabbbbaabbbabaaaaabaabaaabbaaaaabaaaaabbbbaabaaaaabbbabaaaabababbabaabaabbbabbabbbbaababbbbbbaaababaabaabbabaaabbbbaabbaaaaaabbbbbaabababaaabaababbbbaababababaabbaaababaaaaaaabbbabbbabaaabbaaaabaaaabbbbababbaabababbbabbbaaabbbabbbbabbabaabbbaaabbababaabaababbaaabbbaabaaabababaaabababababbababbabbabbbbbabbbbbaaabbbbabbbbbbbbbbbaababaababbaabbbabbaabbaaaaabababaaabaaaaabbbbbaaababbbaabbabaaaabbaabbabbbabababaaabbaabaaaabaabaaaaaabbaabbbbbaaaaaabbbbabbaaabbbabaabbbbbababbbaaaabbbaaaaaaabbaababbababbbbabaabaaabaabbabbabbaabbbabbaabbabbbabbbbbaaabaaababbababababbababbbbbbaaaaaabbaaaaabbabaabbbaaaaaaababaabababbaabaabbaaaabbbaabaaaabaaabaaaaababbbbabaaabbabaaabababababbbbbbaabaabaaabbbbbabbbabaababbabbabbbbbabababaababbbaaababbbababbabbbaaababaabababaabaaaabaaaaabbbbaaaabbbabaabbbbaabbaaabaaabbbabbaaaaabaaabbaaaabbbbbaabbaabbabbaaabaabbbbaabbbbaabbbbabaababaabbbababbaabbabbabaabbbaabbbaaaabbbabaaabbababaaabbbabbbbabbbaaaababbaabababbaaaabbabbbbbabbbabaaabbaaaaaabbabbbbbaaaaaaabbababbbaabaaaabbaaabbbabbbaababbabbabbaababaabbbbabbabaaabbbabaabbaaabaaaabbbbbbbaabbabbabaaabbbbaabbbaabababaabaaabbabaaabbbbbababbaabbaabbbbbabaabbabbabaaabbabbabaaaabaaaaababaabbaabbaaababbbbabbaabbaaaaababaaaaabaaabbaabababbabaaaaabbaababababbaaaabbbaabaaabaaabbaaabbbbbabababbbbaababbbaaabbaabbabbaaaabbaababaababbbabbbbabaababaabbababababaabbababaaaabbbabbbababbaabaaabbbaaaabaabababbbbababbbaaabbbbbbaabbbbaaaabaaabbbabbbaabbbbbbaabaaabbabbbbbabaabaabbbbaababaabbbaaaaaaabababbbbababbbbbbabbabbabbbabbbbbaabaabbbabbabbaaaababbbaababaaaaaaaaabaaaabaaababaabaababbaabaaaabbaabbaabaababbabbaabbabaababbbaaabaabaabbababaaabbabbbbbabaaaaabbabbbaaabbbbbbabaabbbbbbbbbbabbbbbbbbbaaababbbababaababaabaaaaabbababbbabbbabbbbbaaaaaabaabababbbaababaabbabbaaababbbabbbaabbbaabaabbabbabbaaaaabbbbbaaaaababbbbabaabaaaabbbabbaaabbbaabbababbbabaabbbaababaabbbbaababbbbbbaaababbababbabbbabbbabaabbaaabbbbaababbabbbaabaabbbaabbabaabaabaabbbababbaaabbbabbabbaaabbbbbbbabbbbbababaababaababaabbbbababbabbbbbaababaababbababbaaaabaaaaabaaabaabbababaaabbbbaabbaaaaabababbbbbbabbbbaaaabaabaaaabaabaabbbabababaabbbbbabbaaaababbaaababaabaabaabaabbbbbbaababaababbbbabbaaabbaababbabbbabbaaaabaabbaabaaaaabbabbbaabbaaabbbbabaaabaababbbbabbabbabbbbabbababaabbbabbaabbabaaaaabbabbbbabbbabababbbbbabababbbbaaabababbabbbabaabaabbaabaabaababaababbbbaabaaaabaabababbaaabbabaabaabbabbbabbbabbaaaababbabaaabbabababaabbbaababababbabbbbaabbbabbbababbaaabbbbaaabbaabbbabbaaaabababbabbbaabaaabaababbababaaaaabbbbaaabbabbabbbababaaabababaaabababbaababbaababbabaabababaaaaabbbbbbabbaabababbbbababaabbaaaaaaaaabbbaabbaabbbbbabaabbabbbabaaabaaabbbbbbaabbaabbabbbabbaaabbabbababbbaabaaaababaabababbbaabbaabbbaabaaabaabbaaabbbbaaababbbaaaabbbabaabbaaabbaabaaabbabaababaabaabaabbaababaaabaababbbababaabbabbbbbababababbabbbbabaaabbbbababaaabaaabbbbababbbaaaaaaaabaaabaabbaabaaaaabbbbaababbbbabbaaabbaababababbaaabaaabaaaaaaaaaababaaabaabbabaabbbaababbbbbbabbbbbaabbbabbbbbabbbbabbababbbaaaabbbbababbbbaabbaabaaaaaaaabaaabaabbbaababaaabbbaaabbababbabbbaabbbbbabbbbabbbaababbbabbbbaababbababbbaaabbbabababbaaaabbbaaaabbaabbababaababbabaaababbbaabaaaabaaabbabbababababaaaabbbbaaaaababbbbabbbbbaaababbbaaaaaabbaaaaabbbabbabaaaaaaabbaabaabbbaaabbaaababbbabaababaabbbbbaabbbbabababbaabbaabbabaaaababbbabaaaaababbaaabbabbbbabbbabbbbbaabbabbbbabaababbbbaabbabaaaababaabbabbbbaabbbbabbaaaaaabbabaaaaaababaabbabbbbabaabbbbbbbabbbbaaabaaabbaaabbaaabbbbbbaababaabaabaabbbbbbbabbbbbbbbaabaabababbaabababbaaaababaabbbbaaabbabaaaaababaababaababaabaabbbbbaaaaaaabaaaabbaabbaaabaaaaababbabbbaaababbaabbabbaabaaabaaababbaaaabbaaabbbabaabbbababaaaaaabbaabbbabbababbbabaabbabbbbaabababbabbaaaabbabbbaaabbaabababbbbbbabaaabababaabbbaaaaabaabbbabbbbbbababbbababbbaaaabbaabbbabaabbbaaaaabbabaabbbaabbaaabaabbbbaaaaaaaabbbaaaabbbbbaababaabaababababbbbbbabbbbbaabbbbbbbaabbaabaabbbaaaaaaaababbbaabaabbabbaaaaabaabaababbbaaaabbbbababbbaabbbbaababaabbaabbaaabababbbbbabaabbaaabbbbaabbbababaabbaababaaaaabbaaaabbbabbbbbabaaaaabbbbbbbaaabbbbbabbbbbaabaabaababbbbbbbabbbaabaabbbbbbaaabaababaaabbbaabbabbbbabababbbabbbbbbbbbbbbaabbbabbaabbbabaabbbbabbabbaaaaabbabbbbbbbabaaabaabbbbbabbababaaaabbbababbababaaabbbbaabbbababbbbabbbabbaabaabbabababbaabbbbaabababbabbbbaaaababbabaabaaaaaaaabbbaaabbbbababaabbaabbababbbbaabbaababbaabbaaabbbababaababbbaaaabbaabaabbbaaabbbbbbbbaaaaaabbbbbbbaabbbaaababbbaaaaabaaabbbaabbbbbaaabaaaabbbbaabbbaaabbbaabbabbbbabababbbbbaaaaababbabaaababbaabaabaabababbaabababbbbbbbaabaababbbbbbaabaaaaabababbabbabbabaaabbbbaaabaaababbbabbbabbabbaaababbbababababbbbbaaababaaabbbbbbbbbbaaaabbabbbbbbbbbbbabbaabbbbaaaabbabaaaabbaaabbabaabaaabbaaaaabbaaabbababaaaaaabaaabbabbabbbabbbabaaabbaababaabbaaabaaababaabbbbabbabaaaabababbaabbbabbbaabbabbaaaabaaaabbabbbaababaababababbbaababaaaaaababbbbabaaabaaaabbaaabbaaabaaabaabaabaabaaabaabaaabbaababababaaaaaabaaaabbbbbaabbbabbbbbaabababbabbaaaababbbbbabbaaabbababababaabbbbabaaabbaabbabbbbbabbbbababbbaabbababbbbbbaaaaaabbbbbabbbbabbbbbbabbabbaaabbababbbbabababbabaaaabbbbbabaaaaaaabbabbbababaaabbabaaaaaaabbaaababbabaaabaabbaababbabbaaaabbaababbbbabbabaaabaababaababbbaabaabaabbabbbbabbbaaaaaabbbbabbbabababaaabbaababbaaabbbbabbaababaaaaabbaaaaabababaaabababbbbaabbbaaaaaaaabbbabbabaabaabbaababaaaabbbabbbbbbababbbbbaaaaababbbbbabaabbbaaaabaabbaabbbaabbbabbabbaabbabbaaaaaabaabaaabbabbabaaaabbabbabbaaabaaabaabbababababaabbbbbbbbbbbabbbbbbabbaaabaaaabbbbaaaaababbaaabababbbbbbababbbbbbabaabbabbabbbbbbabbaabaaaabbabbaaabbabbbaaabaaabaaaabbaaaabaabbabbabaabbaaabbbbbaaaaaabbbabbbbababbabbbbbaaabababbabbbabbabaababbaaabaababababbabaaabbabaaaabbabbbaaabbbabbababababbbbabbabbaaaabbbbbbbabbbaababbaabbabbabbabbbbbaababbaabbababaaaabaabbaabaabaabbababbababaaaabbabbbabbaabbaabbbababbbbbaaabababbaaabaaabababaababbaabbbbbbabbbaaaababaabbbabbbbabaaaabaabbaaaaaaaaaaabbbbbbabbaaabababbbabbbabbababbbbaaabaaaaaaaabaabaaaabbbabaaaaaababbbabaabbbabaabbbbaababaaaaababababbabbabaaabbaabbbbbabbaabbabababbaaaaabaaabaaabaabbbababbbaaaabaaabbabaaabbabbabbbbaaabbababbbaabaaaabaaabbaababbbbbaabaaaababbababbbabbbabbbaabbbabaabaabbaabbbbbbbbaaaaaaaababbaabababaaaaaabababbaaabbbbabaabaabaabababbbaaaaaababbbaaabbaaaaababbbbbaaabbabbbaababaaabbbbbbbabbbbabbbbbbbaaabaabaababaaababbbaabbbbabbbaaababababbbaababbbbbabaabababbaabbabaaabbbbbababaaaaaaaaaaabbabbbbabbbabbabaabbbbbaabbabaaabbabbbbbbaaabbaaababbabbbabbaabbabaaaabaaaabbabbbaabaaaaaabbabbbbaaabaabbbbaabbbaabaaabbbabbabbaabaababababaaaababbababbbbbbbbababaababbbabaaabaabaaabbaabbbbbbbbbbabbbabbaabbbaaabaabbbaaabbbabbababaabbbbbbbaabbaaaabbbabbbbbabaaabbabbaaaabaaabbabbbaabababaaabaabbbabbbaaaaaaaabababbbbbbbabaaababaabbaabbbbaaabaababbababaaabaaaabbbaabaabbbaababaaaabaaababaabbaabbababababbaabbaabbaababbbabbabaaabaabbabaababaabbbabbbbaaabaabababababaaabbabbaabbabaabbbaaabbabbabbbbbabbaaaaaaaababbbabbbbbaabbbabbabaabaaaaaababaababbaaabbaabbbabbbbaabaaaabaabbbbabbbababbabaaaabbabaaabbbabbabaabaabaaaaabaababbaaaabaababbaabbaaaabbbaabbaababaaaabaabaababaaaaabababbbbbaaaabaabbbaaabbabbaabaabaaaabbabababaabaaaabbbbabaababbaabbbababaabaabbaaaaabbabbabbbbaabbbaaabbbaabaaaabbabbbbbabaaaaaabaabbaabbbaaabbabbaabbbabbabaaaaaababababbababbaaaaabbabaaabbababbbbbbaaaaaabaaaabbaabababbaababbabaaaaabbababaaabaaabbbabbaaabaaabbbbabbbbaababbbbbaabbbbbbbbabbaaabaaaaaaababbbbbbbaabaaababbaabbaababaabaaaaaaabbbbaabbbbbbbbbabbabbbababaabbbbaaaabaaabababbbabbbaaabababbababbbbabababbbbbabaaabaabaaaaabbbbabbabbaabbbbbbaabbabbabbabbaaabbbaabbabbbaabbabaaaabbbababbbaabbaaabbbaabaababbbabbbbbbbbabababaabaaaaabaababbababbabbbaaaabbabbababaabababababbbaaabbbabbbaaabbbababbaababaaabaababbbaaabbaaabbaabaabbababbabaaabbabbbbbaaabaabbabbababbbaaabbbbaaaabbbbababbbbbabbabbbbaaabbbaababbbbababaabbababaaababbababbbabababaaabaaabbbbbbbabababbbaaaabaabaaaaaaaababbaaabaaaaabaabaaaabaabbbbbaaabaaabbaabbbabbbbbbaabaaaaabbbbababbbbaabbbbaabbabaaaabbbaabbabbbbbaabbaaaabbbbaababbaaaaaaabbbbbabbaaaabaabbabbaaababaabbbbabaaaabbaaaababbabababaaaaaabababbbabbbbabbbababaaababaabababbbbabbbabbaababaaababbaabbababaaababbbbabaababaabaaaaabbbbabaabbaabaabbaaabbbaabbbbaaaaababbaababababbbaabababbaaaaaaaaaaaaabbbbaabbbabbabaaaabbabbbaaaabababaaabbabbbbaababaaabbbbbababaaaaabbaaaabbbbbaabbbaabaabbaaabaaaabbbbaabbabbaaaabbbbabaaaababbbaaaaabbbbaababaaaabaabbabbbaabababbaabbbaaaaabaaaaababbabbaababaababaabbbbbababbbbaabbbbbabbababaabbabaabaabaababbbbbbbabbbaabbbbbabababaabaabbbbababbbaabaaaaabababaaaabbbaabbabbabbabbaabaabbbababaabbbabbbbabbaaabbaaabbabbabbabababbbaabaabbaabaaabaabbabbbbbbbababbbbabaababbbabbaaaaabababaaaabaaababbabbabababaaabaaaabbabbbbbaaaabbbaaaabbababbbbaaabbbabbabaabbbbbabbaaabaaaabbbababaaaaabbbaabaabbaabbbbabbabaaaabbaabababaaabbabbbaababbaabaababbbababbaaabababaababbbabbabbbbaababaabbaaaaaaaaabbbbbabbbabbaabaabaaaaababaaabbabaabbaabbbaabbababaababbbabbabbbbabaaabbaabbbabbbbaabbbbababbbbababbababbaaaabbbbbaabaabaaabbbababbbbaaaabbbbbabbbabbabbaabbbabbabbbabbbababaabbbbabbbbbbbbbababaaabbabbbbababbbbbbaabaabaaabbabbbbaaabbbaababbababaabbabaabaaaabbbaabaaaaabaabbabbbaabaaaabbabbbbababbbbabbaababbaaabababbaabababaabaabaaabbabaabaabbabbabbaabbbbbbaaaaaabaabbaabbaaabaaaaabbbbbabbaabbbbababaabbbabbaabaaaaaaabbaabbaabaaabababbaaabbbaaabbbaaabaabaabaaaaababbabbbabbaababbbabbabaabbbaaabbabaabaabaabbaabaaababbabbaaaabaaaabaaaabbabbbbbbaabbbbbabbaaaaabbbbababaababbbbbbababaaaaabbaaababbaababaabaaabaabbaabbbbbabaaabbababaabbbabbababbababbbbbbabbabaabbaaababbbabbbbaaaabaababbaaababbabbabaababbbaabaaababaabbaabbbabbbaaabbabaabbbabaaaaabbaabbabababbabaabaababaaabbbabbbaababbbbababababaabbaaabaabaabbbaaabaaababbaaabaabbbbbabbbbabbaaaabbabbbabbbbabababbabababaaaabaabababbaaabaaabbabbabbaaaaaabaaababaaabbababbbaabbabaaaabbbaaaabbaaaabaabababaabbbababbbbbabbaabbbabbaaabbaaaaabaabaaabbabaabbaabbaaaabababbabbabaaaaaababaaaaaabaababaaababaaabababaaabbabbaaaabababaaabbaabbaabaababaababbaaaabbaaabbabaabaaabbbbaaabaaaaaaaabaaabbababbbbbbbaabbaaaababaaabbbaababaabababaabbaabbaabbababaabbbbaaaababaaabbbbabaabbbbbaabbabbbbbbaabbabbbbabaaabbababaabbabbbbaaabbaababaaabbabbbaabaabaaaaaabbaabbaabbabbbbbabbbabbaabababbaaabaababbaababbbbbbabbabaaaaaabbaabbababbbbbbbabaababbbbbbbbbaabbbbbababaaabbbbbbbaaaaaababaaabbaabaaaabaaaaabababaabbbabaaaabbbabbabaaaabaaaaaaaaaaabbabaaaabbbbbbbbbabaaabbabbaababbaaababababaaaaaaaaabaaabbabbbabaabbbbaabaabababbbbbabaabbabaabbabaaaaaaaaaabaaabaaabaaaaaabbbabbaabbbaaabbbbabbbbbbaabbababbababaaabbbabbabbbbabbaaaaaabbabaabaabababbbabaababbabababaaaaabaababbbbaaababbbabaaaaabaababaabaababbaababababbbaabbbaaaabaabbbaabaabbabbbbbabaaaabbbaabbbbaaaaaabbaaaaaabaabababababbabbaabbaabaaabaaaababbbababbbbbbbabababababbbabaaababbbaabbaaaaabbaabbbaaaabbababababbbabaabbbbbabbbbbaabaabbbbabaaaabbababbbabbaaabbaababaaaababaaabbaabaaaaababbabababaabababaaaabababbbbbabbbbabbaaabbbbbaaabbbbbbbbabbbbaabababbabbbaaaaababbaabbbbababbababbbaabbabaababaaabaaaabaaaaabbaaabaaabaabababbaabaababbbababbbabaaaaaaabaaabbbbaaabaaaaaaaaaabababaaababaabababbbabbbabaaabbabbaaababbbabaabbabaaabbbbaaababaababaabaabbabbbaabaabbabaabbaabaabababaabbabababaaaabbbaaababbbbabaabbaababbbabbbbabaaaababababbaabbbbbbababbbbbaaabbbbaababbbaabaabbaaababababbbabbbababbbababbbabbbbbabababbbabbabaaaabbbbabaaabaaaaabbbaaabaabbaabbbbabaaabaaaaaaabbabbbaaaaabbbaabaabaaabababbbbbbaabbbaaabaabaabaaaabbbbaaaaababaaabaabbbbabbbbaababaabbbbbaabaaaaaaaaabbabbababaabaabbbbbabaabbabaaabbabbabaaababbabbaabaababababaabaaabaabbababaaaaaaaabbbbaaabaaaabaabaababaaabbaaaaaababbbbbbabbbaabbbabbbbabbabbaaabaabbababbabbbabbbbabbbbbabababbbbbabbaaaababbaabaababbaaaababbabbbbbabbabbaabbbabbbbabbaaabababbabaababaaabbbabbbaabaabbbaaaaaaabaabababbbaabbbaabbabbbbbaabaaabbaaaabbbbbaaaabbbbbbabbabbaaabbaababbaabbabaababbbbbbabbaabbbbababaaaabaabaaabaabbababbbabbbbbaaaaaabaaabbbbaaabbbbababaaaababaabbaabababbbbaaaabbbbbabbbababaababaabbabaaababaaaabbaabababbbbbaabbbaaababaabaaabbbbaaabbbaaaaabaababbaaaaabbababbababbbbaabaaabbbbbbbaabbabbaaabaababbaababaabaabbaaabbabbbbbbbaabbbabbbaaabbbabbaabbabbaaaabbabbabbababbbbabaababbaaababababaaaabbabaaabaaaaaaabaaabaabbbbabaabaabbabbabaabbaaaaaaabbbbababaabbbabbababaabbbaaabaabbbaaaaabbaaabaaaabbbabbaabbbaabbabaaabbbbbbabaaababababaabababbaabbaabbbbbaaabbabbbbabaabbbaaaabbbbaababbbababaaabbaabaabbbaaababbbbbababbababbbabaabbabbababaabbabbabbbaababaabaabbbbbaabaabbbbbabaaabbababababaaabbaabaabaaabbbabbbbbbbaaaababaaabbabbaaabbbababababaaaabababbaaaaababbbabbbbbabbbaababbababbbaaaaaaabaaaaabbbaaababbaaaaabbaaaaabbbbbaaaaaaaaaabaaabbbababaabbaabbbbabbaabbaaaaabaabaaaaaabaaabaaaabbaaaabbbaababbbaaabaaabaabbbbabbbaabababbaaaaabbbaababbaabaabbbaabaaaabbbabbbbbaaabaabbbabababbbbbbbababbaabaabababbbaabaabbaabbbbabaaabbbbbbaabaabaabbaaaabaabbbaabaabbaabaabaabaabaaaabaabbabaabbbbababbbbbababbbabbbaaabbbbaaabaabbaabbaaabbaaabaaabbbbabbbbbaabbbabaaaaabbaaabaabbbbaabaaabbbaaababbbaabbabbabaabbbbbaaabaaababababaaabbbabaababbabbbaabbbaaabbbaaaabbabaababaababbbbaaaaaabbbaaabababababaaabbabababbbbbaabbabbbbabbbabbababbbbbabbabaabbaaababbbbabbaaaaabaaaabbabbbabbabbbbaabaabaaabbaabababbaaabbbbbaaabababababbbbbabaaaaababaaabbbabbabbbaabaabaabbbbbbabaaabaababaaaababaabbbabbbbbaaabbaaabaaaabbbaaabaaabbaaabaabbbbabbabaaaabbabbaababaababaabbbbbabbabbabbbbbabaaababaabbaaaaabbaaabbbababbbbababababaaaaabbbbbabbabbbbbbabaabbbaabbbabbbbababbabbabbabbabaabababbbbbbbaabbbabbaabbbbbabaabbbbabbbaabbabbbbabaabbbaaaabaabbaabbbabaabbabbbababababaabaaabbbbaabaaaabbabbabbaaabababbbbaaabbaabbabbbababbbaaaabbbbaaabaaabababaabbbbaabbabababbbbaaabaabbbbaabaaaaabaabaabbbabaaabbaaaaaaaaaaabaaaaaabaabbbbbaabababaaaabbbbaabaaabaaabbaaaaaaabbbbbaabbbbbaabbaababbaababbbbababbbbabbbbbbabaaabbabbaaaabaabaabbaabbbaaabaabbbaaababbbaaabaabbbabbaababaaabbbaabbbbbabaababbabbaababaaabababaabbbabbbbaabbbbaaaabaabaaaaaabbaabbbbaabaaaabbabbbabbaabaaaabbaabbbbbababaabbabbaaaabbaaaaaabaaabbbaabababababbbbbbbbaaaabbabbbababbaabbaabaaaabbaaabbbabaaabababbbbbaaaabababaababbaababbaaabbbababbaaaaabbababaaabbbabbbaabbabbbaababbaabbbbaaaaabaaaaabbaaaabababbaaabbbbbbaabbbbaabaababbabbabaaaabbaaabbbbbbabbbbbabaabbbbbbaaabbbaaabaabbbbbaabaaabaabbabbaabbbbbaababbaaabababbaaabaabbabababababaaaababaaababbbababaabbbbaaaabbaabaaaaabbabababbaaaaabbbababbbbbabbbaabbabaabaaaaaabaaaababababaabbbabbababaaaaababbbbababbaabbabbbaaabbbaaababbbaaaaaabbabaabbabaababbbbaaaaabaababaabbbaabaaababbabbaabbbbabbabaabbaabbbbbbbbbbabababbbbbabbaabbbbbbabbbabbaabbaaabbaaabaaabbbbaaabaababbabaababaabbaabaaaaababaaabbbbbbbbbbabaaaaabaabbbaaabaabaabbaaabbbaaaabbaabaaabbbbbababbaababbbaaaaaabaababbbabbaabbaaabbabbbbaaaaabaaaabaaaababbabbbababbabaaaaaabbababbaaabbaabaaababbbbaabbbababbaaabbbabbabaaaabaabbbbaaaabbbbbaaabbbababbaaaaababbbbaaaaaabaaaabbbabbbbbabbbbabbaaabaaaaaaababbbbabaaaaabaaaaaabbabababababbaabbaaaaaabbbaaabbaaaaabababbbbaabaaaabaaababbaaabbaababbbbbbbbabbbabbaaabbaababbaababbbaaaabbaababaabaabaaaabaaabbaaabbaabbbaaaabaabbabaaababbabbababbbabaababbababbabbbabbbbbbbababbabababbbaaabaabbababbbbaabaabbabaabbaaaaaabababbaabbbaaabbbababbabaaabaabaaaaaababbbabaabbabbbaaabbaaabbbaabaabbabbabbaabaaabbababbbbaabbabbaabaaabaaabbbabbaabbbbabaabaaaabbbabababababaaaaabbbaaabbaaaabbbabbbbbabbbabbbabaaaaabababababbbaaabaaabbaaaaaababbabbaabbaaabababbabbbbbbbbbababbbbbbbaaabbbabbabbaabaababbbbbaabbababbbabbbbaaaabaababbbabbbbbbbaaaabbbaaababaabbaabaaaaaababbbaaaabaababbbbbbabaaaaaababbbaababbbabbaaaababbabbbbbabbbbbabbbabbabbbaaababbbabaaaaababbbabbaaaababaaaababaaabbbaaaabbaaabaabaaabaabaaabbbabbaaaabbbabbabaabaaabaababbaaabbbbabababbababbbabbabbbbbbbbabbbababbabbbbbaaaababbbbbaabbbaabaababbaaaabaababaabaabababaabbaaabbbaabbbbabbaababbabbaaabbbaababbbabbaaabbaaaababbbabbbbabaabbbaababbbaaabaaaababbbbbaabbbbabbababaaaabbbbabaabaabbaababbabaabbbbbbabbabbbbbbaabaaaabbbbbabaaabaaaaaabbababbbbbabaabbaaabaaababbabbaababaabbbbabababbbabbabaaabaabbbababbbabaaabaaabaaaaabbabbabbaaaababbaaaaababbaaaababaaabbaabaaabbaabbaabbabbbabaabaabbaaaaaabbaaaaaaabbbbabaaabababaabbbaabbabaaaabbaaabbbbababbaababaabbabbabbbbabbabbabaaabbaabaababbbabaaaabbababbabbabbbabbaaaabbbbbbabababbaaaabbaaaabaaaabababbababbaaabbabababbbbbbabbbabbababbbbbaabbbbbabbbaaabbbbaabaaabaabbbabaabbaabbbbbbbababaabbbbabbbbbbbaabbbaabaabbaaaaaabaaaabaabbaabaaaaaaabaaabbabaaabbabbbbbababbaabaabbababbbaaabbababaaabbaaabbbaabbaabbbbbaabaaabaaabaaabbbaababbaaaabaababbbbbbbabaaaabbaabaabbbabaababbabbabaaabbbabbaabbbabbbbaababbaabaaabbaabbabaaaabaabaababbbaaabaabaaabaabbababbbbabbbababbabaabbbabaababbaabaababbbabaabbbbbabbbaabbabbbaaababaaababbbbbbbaaabababaaaabbaaaaaaabbabbbaabbbbbaaababbbbabaaabbbababababbaabaababbbababbabaaabbbaabaaaaaabbbbabbaabbababbbaaaabbbbabbaabbaaaabaabbbbababbaabbbaababbaabbbabbbbaaaababbbbaabbaaabbbbabbbbaababaaaabbbbaaabaabababbbabbabaaaaaaaabbbbbaaaabbbbbaabbabbbbabbbaaaabababbabbbaababbbaaaaaabbaabaabbaaabbbbaaabaabaaaaaabbabbbaabbbbbaaaaaaabbbbaaabaaabaaabaabbbababaababbbbbaaabaaaabbbaabaaaabaaabaabbaaabbaaaabbabbbababbabbbaaaaaaaaaababbaabbaaaaaabaaababaabbbaaaabbaaabbbababaaaaabbbabaabbabbbaaabababaabbaaaabbabaabbbabaabaababbaabaaaaaabbaaababbabbbaababbbabbbabaaaaabbaaaaaaababbabaaaabaaaaaabaabbbbbaabaababaaaaaaababbbbbabbbaabbabbabbbbbaaabbabbababaaababbabababbbbbbbbbabbbbbbabbbabbaabbbaaaabaaabbbbaaaaaaaabbabbbbbbbbbbaabbaabbbabaabaaaabaaabbaabbabbbaaabaabbbbbaabbaabaabaaababbbbabababbaaababbabbaababbbbaaababaaaaabbbbbabaaaaaabaabababbbabaabbbbabaababbbababbbbbaabbabbaabbabaaabbbbbbbabaaabaabbabbabbaabaaaaaaabaaabbaaabbabaababbabbbbbbabaabaaabbabbaaaaaaabbbabaaaababbbababbbaaaabaaaabaabaaabaaaabaabbaaabbbaabaaabaabaaaabbaabaabbaabbabaabbabbabbaabbbbbbaabbaaabbaaaabaabbbabaaabbbabaabaaaabbbbaaaaabababbbabbbabbaababbbbbabaabbbabbbaaaaabbbabbabbbbaababbbbaabbbbaaaaabaaaaababbabaababababbbbabbbabbaaaaaababbaabbbbaabaabbbababaaaabaabbbaaabaaaababaaababbbaabbbbaabaababbaaabbabababbbbaababaaabababbaabbbaabbabaabaaaaababbbbabbabaabbbaaabaabaabaaabbabaabaabbabaababaaaabaabbaabaabbbabbababbaaabbabaabaaabbbbbbbbaabbaabababbbbbbabbaabaaabbaabaaabbbaaaababaaaaabaabbabaaabbbbbaaaaaabbaabbaaabbaaaabbbbabbaaaababaabbbaabbbbaaababaaabbabbabaaabbbbabbababaaabaabbbabaababaabaabbabbbbaabbabbaabaaaaaabaaabaabaabbabbbaabaabbbaaababaabbabbaaaabbbbbbbaabababaaaabbbbbabbbbbababbbaaaabaabbaaababaabbabaababbbbabbaabbbabbabbbbabababbabababaaaaabbbabaabbbbababbbbbaaabaaabbabbabaabaabbbaaababaababaaababaaabababababbbbbaaaaaabbabaaaaabaabbbbbaababbabbabbabbbbabaabababbaaabaababbbaabababaabbabbababaababbbbaabbbbababbaabbbaabbbabbbaabbbaabaabbaabbbaaababbbbabbbbbabbabbbbaaabbbbbbbababbbabaaabbbabbbaabbbbaaabaaabbabbaaaabbabbababbaababbaaaabaabbabbbbbbaaaaaabbbabbbaabbbbbaaabbbaabaaaabbbabbabbbabbbbbaaaabbbaabbabababaaaabbabbaaaabbababbabbbaaabababbababbaabbabaaaabaabbabbaababbbbbaaaabababbbbaaaabbbabbaaaaaabababbabbabbbaaaabaaaababbbababbbbaaaaababbababaaababaabbbbbabaaabbaabababaaabbaaabbbaabaabaabaaaaabaabaabbababbbbabbaaabbababaabaabaababbababbbbbbbbaaabbabaabababbbaaabbababbaaaaaabaaabbaabbabaaabbaaabbbbababaaaabbabbaabbbbaabaaaabbbaabababbbaabbabbabbbababbbababaabababbbbabbabaabaabaaaaabbabbbbbaaabbabbbbaabaaaabbbaaaabaabbaaaabbbaaaabbbbabbbbabaabbaaababababbbabbbabbaababaaaaabaaaabbbbaaaabbbababbbababaaabbbaabaabbbbbbbbaaababaababbbabbaabbbababbbaabababaaabbabbbabbabbabbaabbaaababbaababbaabaababaaaaabbbaaaabaabababbbaaaaabaaabbabaaabbbababbaababbabbabbbbbabaaaaaaababaabaaaabbbbabbabaaabbbaaaabbaabaaaaabbbbaabbbbbbbbbbaaaabbbaaaaaaabbaabbabbaababbbbbbaabbaaaabaabaaabaaaaaabababbaabaabaaababaaabababbbabaabbabaabbbaaabbbaabbbabbbabbaaabbbaababbbbbbbabbaabbbaaaaabbaaaabbbbaabaaaabbbbbabbaaaabaaaaaabbbbbabbabababaababbabbbbbbbaabbaaabbaaaaabaaaaaaaabbaaaabbbbbbaabababbaabbbabaababaabbabbaababaaaabbbabbabbabababbbaaaababaaababbaabaabbbbbbabbbaabaabababbaaabaabbaabbabbaaabaabbbbaaaaabbbaaaabbbbabbaabaaaabaaabbbbbaaaaaabaaaabbbabbaaabaaaabbabaaaababbaabaabbabbbbabaaabbabbababaabbaaabbbaaabaaabbabbabaabaabaaaabbabaabbbbabbaaabaabbbaaabbbbabbaaabaaaaaabaaaaaaabababaaaaaabbbbbaaaabaaabbbbaababbabbbbaaababaaabbaabbabaabbbaaabbaaaaaaabaabababbbaabaaabbbbbbaaabbaabbbaabaabababbaaaabbababbbbaaaabbababbaababaaabbbaabaaaaaabbbaaabaabbbbabaababaaaaabbaaaaabbbbabbababbabbbbbbabbabbabaababaaabbbaaabbbabbbaaabbbaaaabbaabbbbaaabaabbaaabbbbaabbabbaaaabbaaaababaababbbbabbbbbababaabbaaababbabbbabaaaaabaabaaabaababbaaabbaaaaabbaaabbbababaabbabaababaabaaabaabbaababbbbbaabaaabaabbbbaaaaabbbbbabaababaabbaaabbaabababbbabbbbbabababbabaaaaabaabbbababbbabaababbbabbaaabaaaabababbaababaabbbbbaababbaabaaabbbabaabbabaaaabbabbbbbabbbababaaabbaabaaabbabababbbbaaababaababaaabbbbaaaabbabbbbaabbbabbabbabababbababbbaaabbbaabaabbababbaababbbbabbbbbaabbbabbbaaaabbaabaabaabbbaabbbabbbabbaaabbbbabbbabaabaababaababbaabbaababbabbabbaabaaabaaaababaababbbbaabbabababbabbbbbbaababbbbbbbbbabaababbbbababaaabbaabaaababbabbbaababbaabbbbabbbbbaaaabbbabaaabbbbbababababbbbbbaabaaaaaabbaaaaaaababbabbaabaabaaabaabbbbbbbbbbbbabaabbaabbbbaaaaabbaaaaababbaabbbbbbabaabaabbbabaabaababaabbbaaaaabbabababbbbbaaabbbbaababbaabaaabababaaabbaaabbbaaabbbabaaabbbbbbbbbbababaaabbbabbaaabaaababbbbabbaabbaaababbbbabbabababaaabbaaaaaaabbabbaaaabaababaaabbaabbbbaaabbaaababbbaabababaabbbbababbaaabbabbbabbabbaaaaabbabbaaaaaabababababaabbaabbabbaabaabababbaabaabbaaabaabbaaabababbbaabbaaaabbabbbbabbbbbbbbbaaaabbbaaabaabaabaaaaabbbaaabbababbaaabbaaaabbabaabababababbababaaabbbbbbbbbaaabaabbbbabbbaaabaaaaaaaabbaaaababbabbbbbaaabbbbbbbbbbbbbabaabaaabbbaaabaabababaabbababbababbbabbbabaaabbabbaabbbbaaabaabbbabbabbbbabbababbbaabbaabbabbabbababbbbbbabbbbbbabbabbbaaababbbaaaabaabbbbbbabbbbaabababbbbbbbbaababababaabbaaaaababaaababaabbabbbababaaabbbaabaaaabbabaabbaaababbabbbbaaabbbbbabaaaaabbbbaaaaaaabaaabbbbbbababababababbaabaaabaaabbbbabbbbaaabbbabbbabaaabbbbbabaaababbaabbbaabaaababbaabbaabbaabbabaabbbaaaababbbabbaabaaabbabbabbbababbbbaabbaabbbbbaababbaaabaaabbabbaabbbbbabaabaabbaaabbaabbbbabaabbbabbabbbabbaaaaababaababbaabaabbbbbabbababbabaaaaaabaabbabababbabbabbabaaaababbabbbaababaaaaabbbbbbaabaaaaabaaaabbbabababbaaabbbabaabaabbbaaaaaaaabbbbbbbbabbbabbbabbaabbabbaaabbbababaaaaaaaaabbbbaaabaaababbbbabaabaaabaaaabbaaaababbbbbbbaaabaaaabbaaabbaaaaabbaaabaaabbbbabbaababaabaaabbbabbbaabbbabbababaaabababbbbababaaabaabbabbbaababbaaababababaaaaaabaaaaaabbbaabbaabbbababbaaaaabbbabbabaabaaabababbbbabbbbabbaabbbaaaabaaabaabbaaabbabbbaaaaababbabbaabaababaabaababbbabaaaaaabbbabbababaabbbbabbbbbbaaabbbabbbaaaabababbaaabbabaaaabbaaaaabaaaabbbabbabaaaababaaabaabbbbbaabaaaaaaaaaaaaaabbbbabbaabbbbabbbaabbabbaababaabaababababababaaabbbbabaabbaaabaababababbbabbaaabaabbaaabababbabbbbbaabbbaabababaabaabbbbbbabbabbabbabaababaababaaabbabbaaabbabbabbababababababaababbbbababaaaabbbaabbaaabbabaabbaabaabbabbabbbbbbbbabbbbbabababababaababbaaaabbabbbabbababaababbaabbbababaaaabbaaabbababbaababbbababaaabaaaabababaaaaaabbbbbabbabbaaabbbbbbbbabbaabbabbaababbbbaababbaabbababbbaababaabaababbbabbabaabababaabaaaaababbbabbbababbbaabbabbbbaaaabbbaaabbbabaabababbabbbbbaabbaabbaaabaaaaaaabbaabaaaaaaabbbbababbababbbabbbbaabbbabaaaabbbabbbaababababbabaabbbaaaaaaaabbbbbbabaabaaaabbabaaaaababbaaabbabaabbabbaaaaaaaabbabbabaaaabaaabaababababbaababaaabaababababababbbbababbabbaababbbabbbbbbbaaaaaaaabbbbbbabaabbabbabbbaaaabbbbbaaaababbbaabbabbbaabbbabaabbabbbababbaabaaabbbbbbaababbaabbaabaabaaaaababaabbabaaaabbabaababbababaababaabbbabbbabbabbbbbaabbbababababaaaabbabaaaaaaabbbabababbaababaaabbbaaaaababbaaaababbbaaabababaaabaabbbbbbaaaabaaabbbbbbbbbababaabaabbbabaaaabbbbbbabaabbbaaaababbaabaababbbaabbaaaabbbaabbbbabbabbbaabbababbaaababaaabbaaababababbabbbbbbbbabaabbbbabaabbaabbaaaabbbabaabbbbaabbababbbbbabbabaaaaaabbbbbaaabbabaababbaabababbaaaaaaaabababbbbbbbababbabaaaabaababaabaabbbbaaabbbababaabaabbaaabbabbbbababaaababbbbaababbbbabbbabaababaababaabbabbabababbbabbbabbbaabaabbbabbbbbbbaabbbbbaaabaababaabaaabaaabaaaabaaaaabbaaaaaaababbbbababbaabbbbabababbaaaaaabaaaabbbbaaaabaaaabbabaaabbaaaababaaabbbbabababaabbaaabbabbaaabbaabbbbaaababbabbbbbbabbbbaabaabaaabbabbaaaaabbabaabbbbbbaaabbababbaaaabbaaabaabbababbbbaaababbbbababaaaabbabbaabaababbaaaabbbbababaaaaaabbaaaabaaaabbabbaaababaabbabbaaabbbbbbbbbaaabbaababbaababbaababaaabbbabbbaababababbabaabbababaaabbbbaabbabbbaaabbbababaaaabbabbbaababaabbbbbababbbbaabaaabababaabbbaaabbababbaaaabbaabbbabbaabbbbbaabaaaabaaababbabbbbaabaabaabaabbaabaaaaabbbabbbbaaaabbaabbbbaaaaabbbbaabbabbbabbbbabbabababbaaabbbaaaababbaababbbaabaaaabbabbabaaaaaaabaaababaabbaabaaaaaababbbbbbaabbaaabbbbaabbabaaabbbbababaaaaaabbbbbaabaaabaaaababbbbbbbabbbaaabbabababaabbaabbbbaaaabaaaababbbabbbbabaababaaabbbaababbaaabaabbbaabaaaabbbabaabaababbaabbbaaabaaaaaaaabbbaabaabbbabbbbbbbaaaababbbabbaababbbaabbbabaabaaabbaaaaaaababbaaaabaababbbabbaabbbbaaabaaabaaabababbaababaabbaaabbababbabbbabaabbaabaaaaaabbabbbbbbaabbbbabbabbabaabaaabaaabbabbbaaabbbabaaaabaaabaabbaaaababbaababbaaabaaaabbababaaaaaaaabaabbbbbbabbaabaaabbaaabbbabbabbbbababaaabaaabbbabbaabbbabaaaabbaabaabaaaababaaabaaabbaababbabaababaabbbbaabbbbabaabababbbbbaabbbaababbabbbabbbaabbbabbabaaabbbaabaaababbbbaaaaabaabaaababbbbbabaabaaaabbaaaaabbabbaabbaaaaabbbabaaabaaababbaabaabbabbbbaaabbaabbbabbbbabaabbaaabbabaaabaababbabbaabbbbaabbbbabbbaababbbbaabbbbbbbbbbbaaaabababbaabbbababaaabaaababbaababaabaaaaaaabababaabbbabbabbbbabaabbaaaabaabbababbbbbabbbaabaabbbbabaaabaaaaaabbbabbbaabbaaaaababaaabababbabbaaabbbaabbbabbaabaababaabbbbbababbbbbaaabaabaaaabbabaabaabbbbaabbaaabbaababbababaabababaabbbbbbabaabaababbaaabbbbababaabaaabbabaabaaaaaaaaabaabbaabbaaabbabbbbaabbbabbaababaabbaaabaabaababaabababbbbabbbabbbbabaaabababbababaaaaaababbbaabbbabaaabababbbbbabaaababaaaabaaaabbabbaaaababbbaabaabbaaabbbbbabaabababbaabbbbbaabbbbabaabbaaabaababbabababaaabbbbbaaaaaaaabbaaaabaaaabbbbaaabbabaabaabbaaabbaaaabaabaaababaaababaaaabbaabababbabbababbaabaabbabbaababbabbbaababaaaaababaaabaabaabbbabbaaaabbaabbababbaaaabaababbbbaababbbbbabaaaabaabaaabbaaabababbabbaaaaabbaabaaabbaaaaababbaaaababbaabaaaaaaabababbbbaaaaabaabbabaaaabaaaaabbbbabaabbaabbaabababbaaabaaababbabbbaabbbbabbabaabaaaaaabbbabaabbbbbaaaaaababbaabbaaaaabaaaabbaabaaabaabaabaababaaaaaababaaabbbbbbbaabaabbaababbabbaabbbaaabbabbbabbaaaaabbbaaabaabaabbbbabbaabaaaaaaabbaabbbbbabbbabaabbbaabbabbbbbbaabbbaaabbbabbbababaaabaababababbaaaababbbaaaaaaabbbabbaaabbabbabbbbaabbabbababbbaaaaaabbbbbbabbaabaabaaaaabaabaabbbbbbabaababbbbaabbabbbbaabaaababbbabaabbbababbbbaabbaaaaaabbbabbaabbbababbbbbaabaabbaaaabbabaaaaaaaabbbbabbbbbbbbbbaaaababbbbbaaaabbbaaababaababaabbababaaaabbaababbbbaababbbaaaaabbbbbaabbbabaabaaabbabbbaabbabbaaaaabbaaaaaabaababbbaabaaaaabbbabaababbaababbbabbaaabbbbabaabaabbaaabababbbbabbbbabaaabbaabbbabbabbabababaabaaaaabbabbabababaaaabbbabbaaabbbababaaaaaaaabaabababbababbbaaaabbabaaaaabbbbaabbaaaabaabbaababaabaaabbaaabbababaababbaaaaaaababaaaabaabbbbababbaaaaaaabaabaaaaaabbaaabbbaabaaaaaabbababaababbbbaabaabbbabbababbbaabbabbabbabbabbbaaabaabaababbbaabbbaababbaaabaabbabbbbaaaabbbabbbaababaababbbbbbabbabbababababbabbbababaababbbabaabababbabbbbbabbababbbbabaabaaabbaababbbaabbaaabaabbbbbaabbaabbbbabbabababbababaabaaaaababbbaaabbabbabbbabbabbaabaaabbaabbaababbbababaaabababaabaaaababbabbabbaaabaababbabaabbababbbaabaababbabaabaaabbbbabbabaaaabbbaabaabbbbabaababbaaabbbaaabbabbbbaabaaaaabababbbabbababbbababbabaaabbaabaabbbaaaaaababbaabbaabbabaabbabaaabaaabbabbbabbbaabbaabbbbaaabbbabaababbbbbbaaababaabbabbababbbbabbababbbbbbabaabbaaaaababbabaaaaabbaaaabaaabbbababaababbbbaaaabbabbaabbaabbaabbbaaababbbbbbaababaaabbaaaabbaabaaabaaababbbaabbbaaabbabaababaabbabbababaababaababaaaaaabbaaaababbbbbbabbaabaabbbabbabbabbbaaaababbaabbabaaaaabbbbabbbaaabababababbaaaabbabaabbabbbaaabbbbababbaaaabbabbbbbbbaaaabaaabababaaababaabbababaaabbbaaaaaaaaabbbabaabbaaaaaaaaabbbabaababaabababbbbabaababbabbbaaabbbbaaabaabaababaaaaabbbbbbbabaaabbaabbbbbabbaabbaabbaabbabaaaaaabbbbbbaababaabbabaabaababbaaababbaabababbbbabaaaabbbbaabbbabaaabaaababbaabbbaabbaabbaabbabbbababbabaaababaaabaaababaaaabbbbaabaaababaabbbaaabababbabbabbbbaabbaaabbbbababaabbaaaababbbbbabbabbbbbabbbbabaababbaaabbabaaabbabaabbbbaabbabbbbbabababababbaaaabbaabbbaabbabaabbabaaabbbabbababaabaababbabbaabaaabbaaaaaabaaabbbbbaaababbbabbaabbbbababbbaabbbaaaaaabbbbaabbababbbababbbabbbbaabbbbbbaabbbbababbaabaabbaaaaaabbbaabaaabbbbbbbbaabbbbaaabbbabaababbaabaababbbaaabababbaaaaaabbabbaaaabbbabaabaaabaabbaabbababbbbbbbaabbaaababaaaabbabbbbbbaaababaaabaabababaabbaaabbbaabbaababbabbabbbabbaabaabbbbbaaababbaaabbbaabababbabababaaabaaabaaaaabaabbaaabbabaabbbbaabbbabaaaaabaaabaaabaabbabbabaabbbabaabbaaaabbbbbaababbbbabbabbbbbaababbabababababbabababaabbaaaaabbabaaaababbbaaababababbbbbaabbbbaaaaabbbbabbbbbababaabbbabbbbabaaababbbbbabababbaaabaabbaaaaaaabbbbbbbabbbbababababbbbabbaababbabaaababbbababaaaabbbbbaabbaabbabaaabbbabbbabbaaabababaabaaabbabaaaaaaaabbabbababaaabbbaaaabbbbababbaaaababbbbababbbababbbababaababbaababaaababbabbbaaabaabaaabbabbbbbbbabbaabbbbabbaabbabaababaaaabbbabaaaababbbbbabbaaaabaabaaababaabaaabbaaaabbaabbaaaababababaabaabbaaaaababbabaabbbbaabaaabbabaabbbbbbbababbaaabbaaaaaabbbbbabbabbbaabbabaabbbabbabaaaaabbbbbbbabaaabaaababbaabbbbbaaabbaaaaaabbaabbabaaaabbabbbababababbaaaabababbaaababbaabbaabbabababbababbbbaababbbaababbababbabaaabbbbaaababbaabbbbbbbaababbbbabbbbabbabaababbbbaaaaaabbabbbbabaaaabbaababbaaaababbaaaabbbbbaaaababbaabababbabaaabaaabbbaaababbbabaaaaabaabbbbaaaaaabbbaaababaaaabbaaabbababaabababbababbabbaababbabaabbabbabbababbaaabbaabbbbbaabbabbabbabaabbbbbbbabaabaaababaaaaaabaababbaabbbbaaaaaabbbbbaaababaaaaabbbbbaabaabaaaaaaabbababbbabbbabbbabaabbbbabbbbbbbbaaaaababaaaabbbaaabaaabaaabbaaabbbbabbbbbbbaabbabbbbaababbaaaaabaaaaaaaababbbbababbbabbabaabbbbabbbabaabaaaaabbabababbaaaabbbaaaaaaaaaabaaabaabaaaabbbbbabaabbbaabbbaabbbbbbbaaaabbaaaaabbbbabbaababaaabbbaaaaababbbaabaabaaaaaaababaababababbabbababbbbabababaabaababbaabbaaababbbbbabaaabbbabbbabbaaaaaabaabbaaaaabaabbbbbbabaaaaabbbbaabbbaabbababaaaaabbaababbbbbbbbbabbabbabaabbababbababbaabbaabbaaaabaabbabaabaabbbbabbbbbbaabaabbbababaaabbabbbaababaaaabaaaaaaabababbbaabaaabaaaaaaababaaaaaabbaaabbabababbabaababaaaababaaabbbbaaabbbaaabaabababbaaabbaaaabbbbbbaaabbaaaababbabbbbaabbbbabbbbabbabbabaabaaabbabbbbbaaaababababbbaaabbbaaabbbbaaaabbbabbbaabaaaabbbbaaabaabbababbbabbabbbabbbbaaaaaabbaabbabbbbbaaabaababaabbbaaaaabaabaabaaaaaabbbbabbabaabbabbbaabababaaababbaaaabbaaaaabababbbbbbaaaabababbbababbabaabbbabbaabaaaaaabbabbbaaababbbbbbaabbbaabaabbaaaababbabbabbbabbbbaababababababababbbaaaaabababbbaabaaababbbabbaaaabaabbbbaaababbaabbaaabbbbabbaababbbbaaaabbbbabaabaabbaabbbbaaaababaaaaabbbbbababbbaabaabbbbaababbaaabbbaababbbbaaaabbbababbaabbbbbbabbbbaaabaabbbbabbbaabbbaaababbbabbabbbbbbbaabaaabbbabaabbbbbaaaaabaababaaabbaaaabbaaaaaabaaabaabbabbaabbaaaababbbabbbababbabbabbaabbbabaaaabbaabbbaabbabbaaaabbbaababbabbbbaabbbababbbbbbbaabaaabbbababbbabaaabbbbbbbbababbbbbbababbbbbbabbbbbaababbbbbbbabbabbaaababbbababbabbabaaabbbaabaababaabbababbbabbaaabbaabbabbabbbaaababbaabbaabbbbbababbaabbaaabaababaaabbbabaababbbbbbbbbbabaabbbbaababababbbaabbbabaabaabbaaaabaaaaaabbabbbabaabaaababbaababbbbbbbbbabbbabaaaaabababbbaaabaaabaabbaaaaababaaaababbaaaaaaaababbbbabbabaabaaaabbaaaaaababaabbbaaabbababbbbaababababababaababaabbbabbabaaabaaabbbbbbaaabaaabbbbbbaababaabbaaababbaaaababbaaabbbaaaaaabababbabaaabaaabbbbabaaaaaaaaabbaaaabbabbabaababaaaaababbbbaaaaaaaaaabbabbaaaabbbaaaaaabbabbbbaaaaaabbaabbbabbababbbaaaaaaaaabaaabababbabaabbaaaabababbbbbbbaaaabbabaaababbbbabaaaaaabbababbbababbbbbababbbaaababaabbbbaaabbbabaababbabbaaabbbbbbaabbbababbbabaababbbbaababbaaaaaaabaaabbbaabbaabbabbabbababbbbaaaababbbababababbaabbaabababaaabaabbbaaaabababababaaaabaaabbaaababbababbbbbabbbabaabbababaaaabaaaaaabaabbababababbabbabbababaabbbabababaabbbabaaabbbabaababaaaaaabbaaabaababbaaabbaababbbaabbbabbbbaaababaaabbbbbaabbaaaaaaabbabbabbababaababaaababbababbbbabaabaabaaaababaaaaabaaabbabbbaaababbbabbabababaaabbbabaaabaabaaababbbabaaabbaabaaaabaabaabbababbaaabaabaabaaababaaabbabbaaababaaabbabbaaabbabbabaaabaabbbbbbabbaaabaabaaabbbbababababbbabbaabaabbaaabaabababbbaaabbabaabaaaababbbaaabbabbbbaabbabbaabbabbbaabbaaabbbaabbbbaaabaabbabaaaababababbabababbbabaaaaabbbbbabaaaaabbbbbbabbbababbaaabbbaabbaaaaabaaaabbaabbabbbbabbaaabaaababaababaababbbababaabbbabbabbbaaabbbabbbaabaaabbaabbbaabbabbbabaabaaaaaaaabbbaabbbabbbabbbbababbbaabbbbabbbbbababaabbabbaabaabbababbbaabbabbbbaabbbabbabaaaababaaabaabababbaabbbaabaaababbaababababbabbaaaaaaaaaabaabbbabbabbaaaababbbababbabbbbbbabaaabbbababaabbbabbabaaaabababbbaaabaaaaabbaabbaabbaabbaabbabbbbbaabaabbabbaabaabaababaabaaabbabbaabbbbbaabbbbbbaabbbbaababbaabbbaaaabaabaabbbabaaababaaaaaabbaaababbaaababaaabbbaaaaaaaababbabbaaaababbbaaaaaababaabbaaabbbabbababbababaaaaabbbabababbaaaaaabbbabbbbabbbbbbbaabbbbbbbbaabbaaaabaabaabbaaabbabbaabbababaaaaaaabbaabbaabbabaabbbbaaaaaabbbbabbabbbbabbaabaaaabaaaababbaaaababaaabbabababaabaabbaaababbbbbaabbaaabbbbbbbbbababababaaaabababbbaaaabaabaaabbbaabababaaaabaabbbbbbaaaabababbaababbabaaabaabaababaababaaaabababbabbabbbababbbaabbaabbaabbabbbbbbaabaabaaaaaabaabbabababaaaabbbbbaaaaaaaabbaababaabaaabaabaaaababaabbbabaabbabbbabbbabbaaabbaababbaabbaabaabbababaabbbababbabbabbabbaababaaaabbbbbabaaaaaaaaaaabaabaabbabaababaaaaaaaabaaaabbbbbbbabababbbbaaabaaabbbbbbbbbaaaabbbaababababbbabbbabbababaabaabbbbabbbababababbbabaaabababbbaaabaabbbaababbaaabbbbababbabbbabbbaabbabbabaaaabaaabbaaabbabbabbbbaaaabbabbbbabbabbbbababbaaabbbbabbbbbaaaabbabbbaabbbababababaabbbabbbaaabaaaaaaabbaaaabababababbbbabbbaabaabbbbabbbbbabababbabbabbbbbbbbbbaaababbbabbbbbabbaaababaaabababaaaaaabbbbaabbbabbabbaababbaaaabababbbbaababababaabbbaabaabaaababbabbaabaabaabaaaaaabaababaaabaaaababaababaaabbaaabababbbaaaaaaaaabbaabaabbaaaababbabbbabbbabababbaabbabababababbbabbaaaabbaaabaabbbbaabbaaabbaaabbaaaabaabaaabaabbabbbbbbaaaaabbbbaaaabaaabbababaabbaabbaababbabaaabaaaabbbbaaaabbabbbbaabbbbababbbbbaabaaaaaaababbaabbabbaabaaababbbaaaabbaaabaabbbaababbaababbbbaabababaaababbaabaaaababbbbbbbabaabbbbaabbbbabbbabbaabbabaaababbaabbaabbabbaaaaabbbabaaababbaaaaabaabbabbbbbabbbbabbbabbabbbabaabbaaabaaabbabaaabbbababbbbaababbbbbbaabbbbbbbabaabbabbbaabaaabaaaaaabbbbbaabbaaabbaaabaaababbbaaaabbabaabaabbababbabbbbbaabaababaaaabaaababbbbbbabbabbbaaaaabbabbbabbbbbbbbabaaaabbbaaaaaababbbaababbbbabaabaaabbaaabbabbaabbaaaaabaabaabbbbbbbabbbabbaababbbbaaababbbbaabaaababbaabaababbababaabaabaaabbbaaaabaabbabaabbbaaabaabbaabababaabaabaabbbababbabbbbababbbaabbbaabbbaaababbabbbaabaabbaabababaabbbbaabbbaaaaabbbbbaaababbabaaaabaabaaaabbaaababbababbaabbabbabbaaabaabbaabababbbaaaaaaaabaabaaaaaabaababbbbbbbabbbabbaaaabaaaabbabaabaaaabbababaababbbaabbabaabbababbbbbbbbabbbbbbbababaabaaababbbabbabbbabbbbabaabbaaabbaaabbabaabaaaaabbaabbbbbbbabbbabbabaaababbabaaaaabaaaaaababaaaaaaaabbaabbabbaabaaabbbbabbababbbbbaabbababaabaabbbabaababababaaabbaaababbaaaabaaaaabbbabbbbaaaabbababbaaaaabaabbbbabbabbababaaabaaaabbbbbaabaaaababaaaaaaaabaabbaaaababaaaabbaaaaaababbbbbbaaaabbabbaabaabbabbaaaaababbbabbabaaabaaaaabbabbabaaabbabbbabbaaaabbaaabbbbbabaabbaaaaaabaaabaabbbabbaabbbabbaaaabbaababbbbbbabbabbaabbbbbaaaaaaabbaaaaabababbbabbbbaabaabbaaabababbbaaaabbbaaaabaaaabaaaaababaabbabbaabaababababbbaabababbababbaabababaababbabbabbabbbaaaabaababbbbaabbaaaabaabbaabbbbabbaabaaaababbbbaaababbbbaaaabbaabbaababababbababababbbaaaaabaabababbbbbbbabbbaaabbaaaabaabbbaaabaaaaabaabbaaababaaaaaabbabbbabbabbbaabaabbaabbbaaabaaaabbbbababaabbbbaabaaaaaabbbbaaaaabaaaabaabbaaababbbbbbabbbabbaabbbbabbbaaababababbbaaabaaaabbbbaabbbbbbaabbbabbbbbababbbbabbbaaabababaaababbbaaabbbabbaaababaaaaabaababaabbbbbbbaababbbabaaabbaabbbbabaaabaababbbbaaaaabaaabaabbababbabaabbbaabbbabbaaabbbabbbbbabbaaabaaabaabbbaaabbbabbbaaabaabbbbaabaabababbbabbbabababbababbbaaabaabaaaabbaabaaaaababaaaababaabbabbbaababababaaabaabaaaabaaabbabaabaaabbaaaaabbbbabbbabbaaaabbabbbabbbaabbbbbbaaaaaabbaaaabaaabababbaaaabaabbababaaaaabbbaaaaabbaabbbbaabaabaabaabbaaaababbaaabbaaaabbbbbababbaaabbababaabbabaaaaababbbbaabababaabababbbabbabbbaabaabaabbaaabbabababbaaaabbabaabbbaabbbbbaaababbbaabbaaabaababbbbbbabbabbbaabbbabaaabaabbaabaabbbaabaababaabbbaaaabbaaabbababbbabaabbaabbaabaabbbababaaaabbbbaaaaaaabaabababaababbbbabbababbbbabababaabababbaabbbbbaaaaabbaaabababbaabbbbaaababaaaabbbbabbababbabbbababbbbbbaaaaabbaabaaaaaabbbaaaaaaabaaabaabbaabbbbbaaabbbababbaabaababaabbabbbaabbabbabaabbbaababbbbaaaabbbabbbbababbbbabbabbbbaaaaaaaaabaaabbbaaaaabbbaabaabbaaabaababaabababaabababbbbbbaabaaaabbaabbbabbaababbabbbbaababbbaabababbbabaaaaaababbbabaaaababbbaabbabaababaaaabbbababbaaaaababaabaaabbbbbabbaababbbbbabaabbbbaaaaaabbababbbbbbabbabaaaabbaabbaaaaabaaabaaabbbbbaabbababbabbbabbabbbabbbaaababaaabaaaabaabaabbbaabaaabbaabbbbaabbbbaabbbaaaabaabbbaababababaaabababbbaabaaabbaaaaababbbabbbbaabababbabbbbaabbaabaabbababbaaaabbabbaabbbbbaaabbaabaabbaaabababbbbbbaababbbbaaaabbbaaaabaaabaabaababbaabbaaaababaaaaabaaaabababaaaabbbabaabbbababbbbbabbabbbbabbabbabaaabbbbbbaabbaaaaabbabbbbabbbaabaababaababaabbaabbaaaababaaaababbbabbababaabbabaabaabbbabbaaabbabbbbbbbaabaaaabbbbabbbabbbbaaaaababaaaaaabbababbbaaabbababaabbbabaababaabaabbabaaaaaabbaababaaababababbbbbaaabbbbbaabbbbbbabbaaaabbabbababaabbbbbaabaabaaaaaabbaabaaaaaaabbbaaabbabaaabbbabbbaabbaabbaaabaabbbabbbabaaababaabaaaaababaabbbbaabaabbaabaabbabaaabbababababbabaaaababbaaabbabbaabbaaabaaaaabaaabaaaabaaaabaabbaabaaabbaaabbbaababaaaabbaabbaaababbbaaabaabaabaabbababbaaaaababaaaababaabababbaababbbaaaaaaaaaaababbaabbbabaabaaaaaabaaaabababbbababbaaabbbbaaababbbbaaabbabbaaabbabbabaaabababbaabbabbbaaabbbbbaabbbbbbabaaabbaaababbaabbbabbaabbbbbaabbbbaabbbabbabbabbabaabaabbaababbabbaabbbbaabbbabaababaabaababaaababbbbababbabaaabbbbbbbabaabbbbbabaaaaaaaaaaabaaaabbbaabbabaabaabbbaaababaaaaabaabaabbbabbabbabbaabaaababbbabaabababababbabbbababaaaabbbbbabbbbbaaabbbbbbbabbabbaabbababbbaabaababaaabaaaabbbabababababababbaababababbbbaaabbbaaabbbbabbabbaaaaabaabaabbaabaaabaabbbbaabaaaabaabbaabbbbbaaaabbbbabaaaabbabaaabbaaababbbbbaabbaabbabbabbabaaaabaaaaabbbaababbaaababaaabaababaabbbabababababbbabababaaaabaaababbbbabababababaabbabbabbaabbbaabbbbbbbaaabaaabbbabbbaaaaababbabababaabbaababababaaaabbabbaaabbabaaabbbababbbbbbababbbbabababbababbbbaabbaabaabaabababbbbbbaaabbaaababababbaaaaaaaabbbaabaaaabbbabbaaaabbbabbbabbabbabbabbaabbbbbbbaaaaababbaabaaaaabaabaabbbbaaaaabaaaabaabbbbbbaabaaaababbabaaaabbabbababaaaaaabbabaabbbbabbaaaaabbaabbaabaaabaabaabbbbaaabbaababaaaaaaabaaabbbbbabaababbabaabaababaaaaaabbbaabbaabbabbaabbbaababaabaaaaaabbaabbababbabbbabbbabbbbabbabababaaababbaaaaababaabbaabaabbbbaaaaabaabaabbbaaaabbbbaaababaabaababbabbbbbaaaaabbabaabaabaaabbbaababbabaabaaaababbababbbababbbbabbbbbabbbabbbbbbbbbababbbaaaaaabaaababaaaaaaabaaabbbabbbababbabbbabbaaaaaaababbaaabbabbaabbbbbbababaabbbabbbbbababaaaaaaabbabbbbabaaabaaababbbbaaaaabaabbbaabaabbbaabaababaabaabaaabbabbababaabbbbaaaababbabbabbbbaaababbbaaaabbbababbbabbabaabaaabbabbbabbaaaabbaaabbbbabaababbaaabbbaaabaaabbababaaabaabaaabaabaaaaaababaaabbbaabbbbabaabaaababbaababbbaaabaabaaaabbaaabbbbbaaaaaabaaaabaababbbbaaaaababaaaaabaabbaaaabaababbaabbaabbbabababbaaaaaaabaabbaabaabaaabaabaaabaababbabbaabbaabaaaabaaababbabbabaabababbababaaabbaaaaaabbbabaaaabaabaaaabbabbaaaaaaabbbbbbabaaabababababbbabababaabaabaaaababaaaaaaababaaababbbbbbbabbbaaabaaabbabababbbbbaababaababbabaaabaababaaababaabababbbbaabaabbbbbbbabbaaabbaaababaaaabbbbabbaabbbbabaabbaabbbabaabbababaaabbabaaaabaaababbbbabbabbabababaaaaabbbbaaabaaabaabbbbbaabbbbaaaabbabbbabbbbaaaabbbabababaababbbbababaaabbbaaabaabbbbababababababbaaababbbbaaaaaaabbbbaaabbbbbbaaaabbaabbbbbaaaaaabbbbaabbaaabaaaaabbbbbbbbabababbaababbbbaaababaaabaaabbaababababaaabbbaabbbaaaabaaabbbaaaabbaababaaaabbbaaaaaaaaababbbabaababbbaabbabbabaaaaaababbbbbbaaabbbababaabababaabbbabaabaaaaabbbbaababbabbabaabbababaaabbabbbbaabbaaabbaaababaaabaaabbaaaaabbbbbaaabbabbbbabbbaaaaaaaabbbaababaaaabaaaaaabbbaababababababbbbaaabaabaabbaabbabaaabaabbbaabaaabbbbbbbbbbbaaaabbbbbbbbbbbbaabaaababaabababaabbbaaaabaaaaabbabbbaaabbbbbaaabaabaaabaabbaababababbabababaaaaaabbbabbaabaabbaaabaabbbababababbbbbbaabbbbaaabaababbaaabbbbbaababaabaaabbbbbabbabaabaabaabaabbabaabbaabbaaabbbbabbbbbabbabbbbbbaaaababbaababbbbabaaabaabaaabbbabbaaabbaaaababbbaabbaabaaaaabababbabbabbabaabbabbabbabbbbaaaaabbbababbbbaabbaaaabaaaaaabbbabaaabaaaabbbbaabbbaaababbabbbabbabababbbaaaaabbbbbaaabaabbaababbababaabaaabbaaaabbaaabbaaabbaabbbbbbababbabbbbbbbaabbbabbbabbbbbbbbbabbabbbabbbbbbbbbabbbbbaaabaabaaabbaabbabbbbbbbabbaaabbabaaabbbbbbbbbbbbbbabbaabbbaaaababaaaaaaabaabbbbabbbbbbbbbbaabababababbbbabbbbabbaaabaabbaaaaabbaaababbbaaaaaabbaaababbababaababbbabababbabbbabaaaaaaabaaabbbabababaabbabaaaabbbbbbaaaababaabbbbaabbaaaaaabaaabbaaaabaaaaabaaaabbbabababbabaaababbbaaaabbaaaabbababbaabaabbaababbababbababbbabaababbbaaaabbbbaabbbabbabaaaaaaaaaaaabababaaaabaaaaababbbbabbbbabaabaabbbbbbabbbaabaabbbabaaaaabbabbaabbbaaaabbbbaaaabbabbaabbaabaaabbbbbabbaababbaabaabababaaabbabbaabaaaaaaababbaabaaaabbaabbabbbaaaababaaaababbaabbbaaaabbbaaabaaabbbbbbabaabbbaaababababbbbbbabbabbbaaaaababbbaabaaaabababbbaaaabaaaabbabaaaaaababaabaabaaabaabaabbaaaaaabaaaababbbbbbbabbababaabaababaaaabbaabaabbaabbaaaaaabaababbaaaaaaababbbbabaabbbbaaabbbaabbbbaababbbaabaaababbbabababaabaaabbabbabbbabababbabbbaabaaaaaabaabaaaaaaaabbaaaabaaaabaabbaababbabaaabababbbaaaaaaaababaabaabaaabaaaabaaaaaabbbbbaaaaaababbabbbabbbaabbbaaaaababaabbbbaaababbaabaababaababaaabababaaaaabbbababbbbabbbabbabababbbbbababbaabbbbabbaabbbbabaababbabbbabbbbbbbaabababbbbabbbbaaababbbbbbbbaaaababaabaabbaabbabaabaaabaababaabbbabbbaaaabababbbaaabababbababbaababbbabaababaaabbbaaabbbbabbbabbaaababababbaaaaaababbaaabbbaabbabaabaaaabbabbbabbaaabbbaaabbbbbabbaabaabaabbbaaabaabbbbbaababbbbabbbbabaaaaabaabbbbbabbaababaabbbbbbbbbabaaaaabaabaabababbbababbabbbaabaaabaaabababaaababaaaabbbbbabbbbabbbababbabbabbaaaabbbbaababaabbaaaaababbbbabbbaabaabaabaababbbaaabaaabbbbbbbaabbabbbbbbbabbbbbbaaababaabbbabbbbababbaaabaabaaabbababbbbabbababbabbaababbbbbbbbaabaabbbbaababbaaaaababaaaaababababaabbbaabaabbabaaabaaaabaaaaabababababbababababbbbbbbaaabbbbbbbbaaaaabaaaaaaabbbbaaaabbbababaaababbababbbabababbababaabbaaababbbabbbbabbaaaaabaabaababaaaabbbabbbbabaaaaaaabbbaabbabbbbbbaabbaabaabbaabbaaaabbabbaabbbabbaaaaaaaaabbbbaaababbbaaaabbaabaaaabbabbabbbaabbbbbabababaaabbabaabbaabbbbaaaaaaaaababababbbabbaaabaabababababbbaabaaabaaaabaaabaaaabbbbbbbbbababaaaababbbbbababaaabbbabbbabbaabaaabaabbbbabbbbbbaaaabbaaaaababbaabaaaaaababbbbbabbaaaaabbbabbaababaaaaaabbababbabbbbaabbbbbbabaaaabababbaaabbbababababaabaaaaaababaaaababaaabaaabaaaaababaaabbabbbaabbabaabbabbbababbababbaaababbbbbaabbbabababbabbabbaabbaabaaaabbaaaabbaabaabbbbbabbababaaaabaabbabaabbaaaababbaaabbbbbbbababababbabbabbbbbababbbbbbaabbaabaabbababbbbabaaabbbaabbabbaabaabababbababababbaaaaaaaaaabbabaabaabbaabaaabbbaaaabbbbbaaabbaabaaababaabbabaaaababbabbaababaaabaaabbbabaabaabbbabbabaaaaabbaabaaaaabbaabbaababababbaabaaaaaabaaabbabaaaabbbabaabbabbaabbbaaabbbaaaababaaabbabbabaaaabaaabbaabbbaaaaabbababaabbbbaaabbbbbbaabbabbbbbabababbabbaaabaabbabbbabaaaaaaaaabababbabbabaaaaaabbaaaabbbbaaabbbbaaabbabaaababbaaabbaaabbbbaaabbbbbbbabaaabbbbaaababbaabaabbbbbabaababbaaaababaababababbaabbbaabbbabbbabababbabbabbbaababbaaaabbabbababbbbaabaaaaaabababbbbababaaabbbbbbbbbabbbbbbaaaabbaabbbbbaabbabaaabababaaabbbbbababbbabaaaaabbaaabaaabbbaababbbbaabababbabbbababaaababbbaaaabaabababbbaabaaaabaabababbaabbbaaabababbbabaabbbabbaabaaabbaabbababbbbababbaaaabaaaaabababaaaabbbabbbbaaabbaaabaaaaabaabbbbbbaaaaabbaaaaaabbbbbbbbaaaaaaabaabbbaababbbbabaaababbabababbbbaabababbbababbaabbbabbaaaabbabaabaabbbbbbabbbabababbbaaaaabbaaaabbabbbaaababbababaabababaabaabaabaaabaaabbaaaabbaaaaaababbabbababababbaababaaababbabbbabaabbbbabbaabaabababbbbabaabaaaabbaaaaaaaaabbbabbbbabbbabbabaaabbbabababbbabbbbbaabbabbabbbaababbaaaaaabaabbabaabaaaabbbababbbbaaaaaababaabaaaabbababbaaaaabbbaaaaaaabababbaaabaaabaababaaaababbaaabbbabaaaabbbbabbababbabbbababbbbaaaaaaabbbabbabaaaaababbaababaaabbaababbabbbaabbababbbabbaabbbbbabaaabaabbabbbaabbbbbabababaaaaabaabbabababbbbaababaaababaababbbbbabababbabaababaabaabbbabaabbbaabbabbbabbababaaaaabbbaaaabbbbabaaabaabaabbbaababbabbbbaaaabbbaaaabaaabbaabbbbbabbbababbbaaaabababaaaababbbbbaabaaaabbbaabbaaabbaabaaaabbbbbaababaabbababaaabbabaaaababbaabaabaaabaabbbaababbbbabbbaabaaaaabbabababaabaaaabbbababbbbabbbbaaaaaaabaaabaabbbbaaaaababbaaabbbbabbbbbabbaaabaaabaabaabaaabbaabbbaabbabaaaabbbaaaabbabaaaaaaaabbbabbbaaaaabbabaaaaabbabbaaabbbabbaabaaaabbaaabbabbbbaaabaabbbbbbbabababababbaaabaabaaabaaabbaaabbaabbabaaabaabbaabbabbbbbbbbaaababbbbabbababbababbbbabbabaabaaaaaaabbbabababbaabbaaabaabbaabbabaaaabaabbaaabaabbbababaaabbbabbabbababbaabbaaababbabaabaabbaababaabaabbaabbbbaaabaaabbabbaaaababababaaaabaababbbbbaabbabbaababbaabbbbaabababbabaabaaaaaaabbaabbabbaaabbbbbababbbbbbaaababaabbbabbababbaaaaaaabbaabbbbabbbbbaabaababbaabbbbbaaaabababbbbaaaaabbababbababababaababbbbbabbabaaaaabbbbabaabbaabbbabbbbbaaaaabbbaaabbbabbabbbababaaaabbaababababaabababbaabbaabaaaabbbabbbababababbbaabbabbbaababbbaaabababaaaabaababaaaababaabaaaaaaaabababbabaaaabaaabbbbbabaaabaababbbbaaaababaabaaaaababbaaabaabbaaaaababaabaabbabbabbbaababababbbaaababaaaaaabbbbaaaabbabbbbabababaabbaaabbbbaabbaabaaabbbbbaaabbabaababaaabbbaaaabbaaabbbabbbbaabbbabaaabbbabbbababaaabbbbaaabbabbbbbabbaaababbbbbbababbabbbbaaaabaaabbaabbaaabbabbaaaaabaabbabababbbabbabaaabbbaabaaabaaaaabababbbbaaaabbbbabbbaabbababbaababababbbaaababbabaaabababbababbabbabaabaabbbbaaabaabaaabababbabababababbbbaaaaabababbabababaabaaaaaabbbbaabbbaabaabaaaaaaabbaaabbababbaabaabbbaabbaabbbaabaaabbbbaaaababbbbbbbbabbaabababbbaaababbabababbbbbaaabbabbaabbbbaabbabaaaabaaababaabbbaaaabbabbbbbaabbbbbbaaaababbaaabbaabaaabbaabaaababbbaaabbbaaaabbabbaaaaaabaabaabbbabaaaaabbbaaabbbbbbbbbbbaabbbbbbabbaaababbbaaaabbabbaabbaaababaabaaabaabbaaabbbbbaaaaabbbbbbbaababbbbabbbaaabbabaaaabbabbaaaabaaabbaaababbbabbaaaababbbbabbabbabaabbbabbababaaaaabbaabbababaaabababbaaaaaabbbbaaaaabbababbababbbabababbabbbbaabbabbbbbabaabbaabbbabbababbbbababaabbbaabaaabaaaaabbabababaaaaaabaabbbaaabaababababbabbaaabaaabaabbbbabaaaaababaababababbababababbaaaababbabbabbbbbaabbababbbbbaaababbbababbaabbbaaabbbbabbbbbabbbaabbaaabbaabbaababbabaabbababbbbbbabaaabaabbbbabbbbabaaabbaabaabbabbaaaaaaaaaabbbbbbbbabbbaabbbabbbbbbbaababaababbaaaaaababaaabbbbbbaaabbbbbaabaaaababaabaabbbabbabbaaaababaaabaabbaababbbaababbbbaaabababbabbabababaabbbbbbbbbaababbaaaabaaabbbbbaabababbaaaaaaaaabbaabbababbbabbbbbaabaaabbaabbaaabbbaabbaaaababaabbbaabaabbaabbbaabbabbabbaaabaababbbaababaaaabbbaabbbbbabaaabbbbbbaaaaaabbaabbbbbbbbabbbbbbbaaaabbbabaaabbbbabbbbbbabbababaabaaabaabbabaabbababababbaaabbbbbaaabbabaabbabbaaaababbaabaaaaabbbbbbabaaabaaaaaaabbbbabaaaabbbababaabbbabbbbaaaababaababaaabbabbaaaabbabbabbabaaabaabbaaabbbababbabbbababbaabbaaabbaabbbbaaabbbabbbbaaaaabbaaabbabbaaaaaaaababbababbabaababbaaaabbbbaabaababbbbbbbbabbbbabbabaaababbbbaaaaababaaaabaaaabaabbaaaaaabbaabbaabbbbbabaaababbabbbaaabbababaaaaababbabbbbabaaaaabaababababaabbabbaabaaaabaabbbabbababbabbbabababaabbbbabbbbaabaaabaabbbabbaabaaababbbaabaabbabbbbabaaaabbbabbbabbaabbbabbaaababbabaaaabaaaaaabbbabaabababaabaabaabaabbbaaaabaabbaabaabbbbaababaabbbbbabbaabaaabaabbbbabbbbabbaaabbaaabbaaabaaaaaaaabaabaaaabaaababbababbbbaaababbbbababbbaaaabbbbaabbababbbabbaaabbaaaabbbbabbaabaaabbaababaabbaaaabaaaabbaaaaabaababbbabbababbbbababbbbaaabbbbabababbbbabbbbaaaaabbbaabbbaabaaaabaabbabbbaabaababbbbaaabbbbbbaababbbbbaaaaaababbabbbbabababbbabbbabbbaabaabbbbbabbabbbbbaabbbabaaabababaaaaaaabaabababbaaabbaabbbababaaabbaababaaaabbbbaabbbbaaabbbabaaaaaaaabaaabaaabababbaaaaaabaaabbbbbabbabbaabababbababababaaaababbabbaabbabbabaaaabaaabaabbbabaabbbbbaabaababaaabababaabbbabbbbbbbbbbaaababbbbbbababaaaabbaabbbbaabbbababababbbaaaaabbabaaaaabbaaaabababbabaabbabbababbbbbabaaabbbbbbbbbaaabaaababaaaaabaabbaaaabbaabbbbbabbaabaabbaaaababaabbbbbabbbaaabaabbabbaaaabaaaabaaaaaaaabaabbabbbaabbababbbabaaabaabaaabbaababbbababbaabaaaabbabbbbbaaababbaabbaababbaabbbbbbababbabbaabababbbabbbbbbabbbabababbbbabbbaaababbaaabbbaabaaaabaaaabaabbbababbbbbbbbaaababbaabaaabbaaabbbbbabbbabbbababaabaaaaaaaaaabbbbbaabbbabbbbbabbabaabbabbbaabbbbabbabbbaaabbaababbabababaabbbbbaabbbbaabbbabbaaaaaabaabbabbbbbaaababbabbbababbbbbaaabaabbaaaaaabaaaaaabbbaabbaaabaababbaaabbabaabbbaababbaabbbbbbabbbbabaababbbbbaababbabaabbbaaaaabbbbbababbaabbababaabbabbbabbaaaaaabaaabbababbaabbbabbbabbabaabbaaabbaabbbbabbbaaaaaaaaabaabbbbababaababbbbababbbaaabbabbaabbababbaabababaabbaaabaabbababbbaababababbbababaababaaaabbaabbbaabaabbabbaaaaaaaaabbbbbabbbabaaaabbabaaababbaabaababaaabbaabbabbbabbaaabaaabbbbaaabbbabbabababbbbbbbbbaaabaabbaababbabaaaababbbbbababbabaabbabababaaaaababbabaaabbaaabbbbbaabbabbababbaaabbbaabbabaabaababbaaabbbbbaabbaaaaabbaabbbaabbbbabbabbabaaabbbabaaaaaaaababaaaababaaaaababaaaabaaaaaaaaababaabbbababbbbaabaaaaabbbababbabbaabbaaabb" |
py | 1a30a6aef98eaac5be09a134db30813a4870b88f | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class EnvironmentDeploymentPropertiesFragment(Model):
"""Properties of an environment deployment.
:param arm_template_id: The Azure Resource Manager template's identifier.
:type arm_template_id: str
:param parameters: The parameters of the Azure Resource Manager template.
:type parameters:
list[~azure.mgmt.devtestlabs.models.ArmTemplateParameterPropertiesFragment]
"""
_attribute_map = {
'arm_template_id': {'key': 'armTemplateId', 'type': 'str'},
'parameters': {'key': 'parameters', 'type': '[ArmTemplateParameterPropertiesFragment]'},
}
def __init__(self, *, arm_template_id: str=None, parameters=None, **kwargs) -> None:
super(EnvironmentDeploymentPropertiesFragment, self).__init__(**kwargs)
self.arm_template_id = arm_template_id
self.parameters = parameters
|
py | 1a30a795d92cf0185b07147a9bfe07ccaec7b3a0 | #!/usr/bin/env python
#-----------------------------*-python-*----------------------------------------#
# file src/cdi_ipcress/python/ipcress_reader.py
# author Alex Long <[email protected]>
# date Monday, December 15, 2014, 5:44 pm
# brief This script has fucntions that parse an IPCRESS file and returns a
# dictionary that contains data for each property and each material
# present in the file. This script also contains interpolation functions
# for opacity data.
# note Copyright (C) 2016, Triad National Security, LLC.
# All rights reserved.
#--------------------------------------------------------------------------------------------------#
# import block
################################################################################
import re
from numpy import arange, sin, pi, min, max
import sys
import struct
import numpy as np
from struct import *
from math import *
################################################################################
# These are the functions that are used to read data from the
# binary IPCRESS file. It also contains a function for interpolating in
# density and temperature. The data locations are specified in
# cdi_ipcress/doc/IPCRESS_File_Format.pdf
################################################################################
def get_data_for_id(filename, data_start_index, num_entries):
temp_grid = []
# "rb" is read binary mode
with open(filename, "rb") as f:
f.seek(data_start_index*8)
for i in range(num_entries):
word = f.read(8)
temp_grid.append(unpack('>d', word)[0])
return temp_grid
################################################################################
################################################################################
def write_data_for_id(filename, data_start_index, num_entries, new_values):
# "wb" is write binary mode
with open(filename, "r+b") as f:
f.seek(data_start_index*8)
for i in range(num_entries):
s = struct.pack('>d', new_values[i])
f.write(s)
################################################################################
################################################################################
def interpolate_mg_opacity_data(T_grid, rho_grid, hnu_grid, op_data, \
target_rho, target_T, print_str=""):
n_rho = len(rho_grid)
n_T = len(T_grid)
n_hnu = len(hnu_grid)
# don't allow extrapolation
if (target_rho < np.min(rho_grid)): target_rho = np.min(rho_grid)
if (target_rho > np.max(rho_grid)): target_rho = np.max(rho_grid)
if (target_T < np.min(T_grid)): target_T = np.min(T_grid)
if (target_T > np.max(T_grid)): target_T = np.max(T_grid)
if (print_str is not None):
print( \
"Interpolating {0}--Target rho: {1} , target T: {2}".format( \
print_str, target_rho, target_T))
# get correct index of adjacent density points
rho_L = 1000; rho_G =0
for rho_i, rho in enumerate(rho_grid[:-1]):
if ( target_rho >= rho and target_rho<=rho_grid[rho_i+1]):
rho_L = rho_i
rho_G = rho_i+1
break
# get correct index of adjacent temperature points
T_L = 1000; T_G = 0
for T_i, T in enumerate(T_grid[:-1]):
if ( target_T >= T and target_T<=T_grid[T_i+1]):
T_L = T_i
T_G = T_i+1
break
#print("Temperature interpolation bounds: {0} {1}".format(T_grid[T_L], T_grid[T_G]))
#print("Density interpolation bounds: {0} {1}".format(rho_grid[rho_L], rho_grid[rho_G]))
#get the adjacent rows of the opacity index
#get the points of the opacity index
rho_L_T_L = op_data[n_rho*T_L*(n_hnu-1) + rho_L*(n_hnu-1) : n_rho*T_L*(n_hnu-1) + rho_L*(n_hnu-1) + (n_hnu-1) ]
rho_L_T_G = op_data[n_rho*T_G*(n_hnu-1) + rho_L*(n_hnu-1) : n_rho*T_G*(n_hnu-1) + rho_L*(n_hnu-1) + (n_hnu-1) ]
rho_G_T_L = op_data[n_rho*T_L*(n_hnu-1) + rho_G*(n_hnu-1) : n_rho*T_L*(n_hnu-1) + rho_G*(n_hnu-1) + (n_hnu-1) ]
rho_G_T_G = op_data[n_rho*T_G*(n_hnu-1) + rho_G*(n_hnu-1) : n_rho*T_G*(n_hnu-1) + rho_G*(n_hnu-1) + (n_hnu-1) ]
interp_op = []
#interpolate for each frequency point
for i in range(n_hnu-1):
#if (rho_L_T_L[i] < 1.0e-10) or (rho_L_T_G[i] < 1.0e-10) or (rho_G_T_L[i] < 1.0e-10) or (rho_G_T_G[i] < 1.0e10):
# interp_op.append(1.0e-10)
#print("{0} {1} {2} {3}" .format(rho_L_T_L[i], rho_L_T_G[i], rho_G_T_L[i], rho_G_T_G[i]))
log_op_T_L = log(rho_L_T_L[i]) + log(target_rho/rho_grid[rho_L]) / log(rho_grid[rho_G]/rho_grid[rho_L]) * log(rho_G_T_L[i]/rho_L_T_L[i])
log_op_T_G = log(rho_L_T_G[i]) + log(target_rho/rho_grid[rho_L]) / log(rho_grid[rho_G]/rho_grid[rho_L]) * log(rho_G_T_G[i]/rho_L_T_G[i])
log_op = log_op_T_L + log(target_T/T_grid[T_L]) / log(T_grid[T_G]/T_grid[T_L]) * (log_op_T_G - log_op_T_L)
interp_op.append(exp(log_op))
print("hnu(keV) opacity(sq_cm/g) opacity(1/cm)")
for i, hnu in enumerate(hnu_grid[:-1]):
print("{0} {1} {2}".format( 0.5*(hnu + hnu_grid[i+1]), interp_op[i], interp_op[i]*target_rho))
return interp_op
###############################################################################
################################################################################
def interpolate_gray_opacity_data(T_grid, rho_grid, op_data, target_rho, \
target_T, print_str = ""):
n_rho = len(rho_grid)
n_T = len(T_grid)
# don't allow extrapolation
if (target_rho < np.min(rho_grid)): target_rho = np.min(rho_grid)
if (target_rho > np.max(rho_grid)): target_rho = np.max(rho_grid)
if (target_T < np.min(T_grid)): target_T = np.min(T_grid)
if (target_T > np.max(T_grid)): target_T = np.max(T_grid)
if (print_str is not None):
print( \
"Interpolating {0}--Target rho: {1} , target T: {2}".format( \
print_str, target_rho, target_T))
rho_L = 1000; rho_G =0
for rho_i, rho in enumerate(rho_grid[:-1]):
if ( target_rho >= rho and target_rho<=rho_grid[rho_i+1]):
rho_L = rho_i
rho_G = rho_i+1
break
for T_i, T in enumerate(T_grid[:-1]):
if ( target_T >= T and target_T<=T_grid[T_i+1]):
T_L = T_i
T_G = T_i+1
break
#get the adjacent rows of the opacity index
rho_L_T_L = op_data[n_rho*T_L + rho_L]
rho_L_T_G = op_data[n_rho*T_G + rho_L]
rho_G_T_L = op_data[n_rho*T_L + rho_G]
rho_G_T_G = op_data[n_rho*T_G + rho_G]
#interpolate in log space
#print("{0} {1} {2} {3}" .format(rho_L_T_L, rho_L_T_G, rho_G_T_L, rho_G_T_G))
log_op_T_L = log(rho_L_T_L) + log(target_rho/rho_grid[rho_L]) / log(rho_grid[rho_G]/rho_grid[rho_L]) * log(rho_G_T_L/rho_L_T_L)
log_op_T_G = log(rho_L_T_G) + log(target_rho/rho_grid[rho_L]) / \
log(rho_grid[rho_G]/rho_grid[rho_L]) * log(rho_G_T_G/rho_L_T_G)
log_op = log_op_T_L + log(target_T/T_grid[T_L]) / \
log(T_grid[T_G]/T_grid[T_L]) * (log_op_T_G - log_op_T_L)
interp_op = exp(log_op)
#print("opacity(sq_cm/g) opacity(1/cm)")
#print("{0} {1}".format(interp_op, interp_op*target_rho))
return interp_op
###############################################################################
###############################################################################
def read_information_from_file(ipcress_file):
word_array = []
with open(ipcress_file, "rb") as f:
for i in range(26):
word = f.read(8)
if not word:
break
word_array.append(word)
#print(int(unpack('>d', word)[0]))
title = word_array[0]
toc_int= []
offset = 2
for i in range(offset,offset+24):
toc_int.append( int(unpack('>d', word_array[i])[0]))
n_data_records = toc_int[14]
mxrec = toc_int[1] - toc_int[0]
mxkey = toc_int[16]
#print("Number of data records: {0}".format(n_data_records))
#print("Beginnging of data: {0}".format(toc_int[0]))
#print("Max records: {0} , max search keys: {1}".format(mxrec, mxkey))
mat_property = []
ds = []
dfo = []
tdf = []
num_mats = 0
mat_ids= []
with open(ipcress_file, "rb") as f:
# Read in array that lists the data sizes in this file
f.seek(toc_int[0]*8)
#print("Table of data sizes")
for i in range(n_data_records):
word = f.read(8)
ds.append(int(unpack('>d', word)[0]))
# Read in array gives the offsets between data
f.seek(toc_int[1]*8)
#print("Table of data file offesets")
for i in range(n_data_records):
word = f.read(8)
dfo.append(int(unpack('>d', word)[0]))
# Read in material IDs present in this file
f.seek(dfo[0]*8)
#print("Table of material identifiers")
word = f.read(8)
num_mats = int(unpack('>d', word)[0])
for i in range(num_mats):
word = f.read(8)
mat_ids.append( int(unpack('>d', word)[0]))
# Read in list of properties in this file available for each material
# entries in this table are 24 bytes each
f.seek(toc_int[10]*8)
#print("Table of data fields for each material")
word = f.read(72) #ignore the first 72 bytes, they don't contain useful information
for i in range(1,toc_int[14]):
#oredering is "matID" "data type" "fill"
temp_property = []
for j in range(mxkey):
three_string = []
three_string.append( f.read(8).decode("utf-8"))
three_string.append( f.read(8).decode("utf-8"))
three_string.append( f.read(8).decode("utf-8"))
if (j==0): temp_property.append(three_string[2].strip() )
elif (j==1): temp_property.append(three_string[0].strip())
else: temp_property.append(i) #index of data table containing values
try:
temp_property = [temp_property[0].decode('ascii'), \
temp_property[1].decode('ascii'), temp_property[2]]
mat_property.append(temp_property)
except:
mat_property.append(temp_property)
materials = []
for m in range(num_mats):
materials.append([ m, mat_ids[m]])
#print("{0} materials in file".format(num_mats))
#for i in range(num_mats):
# print(" Matieral ID: {0}".format(mat_ids[i]))
#print("List of available properties")
#for i in property:
# print(i)
#return the list of available properties, data file offsets and data sizes
return materials, mat_property, dfo, ds
################################################################################
###############################################################################
def write_information_to_file(ipcress_file, material_ID, mat_property, new_values):
materials, property_list, dfo, ds = read_information_from_file(ipcress_file)
# check to make sure material is in file
material_IDs = []
for imat in materials:
material_IDs.append(str(imat[1]))
if (not (material_ID in material_IDs)):
print("ERROR: Material ID not found in file, not changing anything!")
return
# try to find property in file
property_found = False
propery_index = 0
for prop_i, prop in enumerate(property_list):
if (material_ID == prop[0] and mat_property == prop[1]):
property_found = True
property_index = prop_i
break
# make sure sizes match of property you're about to write
if (property_found and ds[property_index+1] != len(new_values)):
print("ERROR: Number of new values does not match size of old values, not changing anything!")
return
# if the combination of property and material was found, write the new data to
# the ipcress file
if property_found:
write_data_for_id( ipcress_file, dfo[property_index+1], \
ds[property_index+1], new_values)
else:
print("ERROR: Combination of material ID and property not found, not changing anything!")
return
################################################################################
################################################################################
# Checks to see if there are any zeros in the opcaity data--zero data is
# difficult to handle and for now we are going to ignore data sets that contain
# zeros and print an error message
def check_valid_data(opacity_grid):
for item in opacity_grid:
if (item != 0.0):
return True
return False
################################################################################
################################################################################
# return a dictionary where the keys are "<material ID>_<property_ID>" and the
# values are the data
def get_property_map_from_ipcress_file(ipcress_file):
#load data from IPCRESS file
# dfo is the array of data file offsets, ds is the array of data sizes
materials, property_list, dfo, ds = read_information_from_file(ipcress_file)
#build dictionary of data, keys are "property_matID"
table_key_dict = {}
for prop_i, prop in enumerate(property_list):
table_key_dict["{0}_{1}".format(prop[1], prop[0])] = get_data_for_id( ipcress_file, dfo[prop_i+1], ds[prop_i+1])
material_list = []
for material in materials:
material_list.append(material[1])
return table_key_dict, material_list
################################################################################
|
py | 1a30a820b5fa268f1fe1bc7abb983951f9ee730a | # START LAB EXERCISE 9
print('LAB EXERCISE 09 \n')
# PROBLEM 1 (4 Points)
class Book():
"""
This is a class that contains information on Books.
Attributes:
title (str): The title of the book.
author (str): The name of the author.
"""
def __init__(self, title, author):
"""
The constructor of the <Book> class. Here you will need to create
the instance variables that were described in the docstring above.
Note that the attributes are defined by parameters passed to this constructor method.
Parameters:
title (str): The title of the book.
author (str): The name of the author.
Returns:
None
"""
pass # Implement
def __str__(self):
"""
String method for the <Book> class. Whenever an instance of <Book> is passed to the
str() or print() functions, the string from this method will be returned.
Parameters:
None
Returns:
str: A string representation of <Book> instance in the format "<title> by <author>"
"""
pass # Implement
# PROBLEM 2 (4 Points)
class Library():
"""
This is a class that contains information on a Library.
Attributes:
books (list): List of book instances in the library.
torn_pages_tolerance (int): Number of torn pages a book can have and the library will still accept.
"""
def __init__(self):
"""
The constructor of the <Library> class. Here you will need to create instance variables
described in the docstring above. The Library constructor should take NO positional arguments, but
set instance variables <books> to an empty list and <torn_pages_tolerance> to 3.
Parameters:
None
Returns:
None
"""
pass # Implement
def __str__(self):
"""
String method for the <Library> class.
Parameters:
None
Returns:
str: A string representation of <Book> instance in the format:
"This library contains <number of books> books"
"""
pass # Implement
# PROBLEM 3 (2 Points)
def will_accept(self, book):
"""
Determines if the library will add a book instance to its collection
depending on its conditions.
if book instance is of Book class, return True.
if book instance is of PaperbackBook class and the number of torn pages
is less than or equal to the library's torn page tolerance, return True.
else return False.
HINT: there is a built-in isinstance() function to check what class an isntance
came from
Parameters:
book: instance of any book class
Returns:
Boolean (True or False)
"""
pass # Implement
# PROBLEM 4 (2 Points)
def add_book(self, book):
"""
This method will modify the <books> attribute by appending the parameter <book>
to it if the library will accept the book.
HINT: call will_accept within this method to determine if book can be added
Parameters:
book: instance of any book class
Returns:
None
"""
pass # Implement
# PROBLEM 5 (2 Points)
class PaperbackBook(Book): # <- remember to fill in () for class inheritence!
"""
This is a PaperbackBook class that inherits from the Book class. It will inherit
all attributes and methods from Book. You will overwrite the parent constructor
to add an additional property but inherit the string method as is.
Attributes:
title (str): The title of the book.
author (str): The name of the author.
num_torn_pages (int): The number of torn pages in the PaperBook.
"""
def __init__(self, title, author):
"""
The constructor of the <PaperbackBook> class. Here you will need to inherit the attributes
from the parent class, but add an additional instance variable <num_torn_pages>
and initialize it to 0. Note that the constructor takes two positional arguments, but will
set three instance variables.
Parameters:
title (str): The title of the book.
author (str): The name of the author.
Returns:
None
"""
pass # Implement
# PROBLEM 6 (2 Points)
def rip_page(self):
"""
This method will modify the <num_torn_pages> and increase it by one every time the
method is called.
Parameters:
None
Returns:
None
"""
pass # Implement
# PROBLEM 7 (4 Points)
def main():
# 7.1 Create an instance of <Book>
homer_odyssey = None
# print instance of book
print(homer_odyssey)
# 7.2 Create an instance of <PaperbackBook>
angelou_rise = None
# print instance of PaperbackBook
print(angelou_rise)
# 7.3 Create an instance of <Library>
lib = None
# 7.4 Add book to the library
pass # Implement
# 7.5 Increase number of torn pages
pass # Implement
# 7.6 Set number of torn pages
torn_pages = None
# 7.7 Try to add Paperbook to the library
pass # Implement
# 7.8 Print out the library's books
pass # Implement
# END CODING HERE - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if __name__ == '__main__':
main()
|
py | 1a30a83ced3787d7f087096cf104b361b32f2463 | import torch
import torch.nn as nn
import torch.nn.functional as f
from torch.nn import init
from .submodules import ConvLayer, UpsampleConvLayer, TransposedConvLayer, RecurrentConvLayer, ResidualBlock, ConvLSTM, ConvGRU, RecurrentResidualLayer
def skip_concat(x1, x2):
return torch.cat([x1, x2], dim=1)
def skip_sum(x1, x2):
return x1 + x2
class BaseUNet(nn.Module):
def __init__(self, num_input_channels, num_output_channels=1, skip_type='sum', activation='sigmoid',
num_encoders=4, base_num_channels=32, num_residual_blocks=2, norm=None, use_upsample_conv=True):
super(BaseUNet, self).__init__()
self.num_input_channels = num_input_channels
self.num_output_channels = num_output_channels
self.skip_type = skip_type
self.apply_skip_connection = skip_sum if self.skip_type == 'sum' else skip_concat
self.activation = activation
self.norm = norm
if use_upsample_conv:
print('Using UpsampleConvLayer (slow, but no checkerboard artefacts)')
self.UpsampleLayer = UpsampleConvLayer
else:
print('Using TransposedConvLayer (fast, with checkerboard artefacts)')
self.UpsampleLayer = TransposedConvLayer
self.num_encoders = num_encoders
self.base_num_channels = base_num_channels
self.num_residual_blocks = num_residual_blocks
self.max_num_channels = self.base_num_channels * pow(2, self.num_encoders)
assert(self.num_input_channels > 0)
assert(self.num_output_channels > 0)
self.encoder_input_sizes = []
for i in range(self.num_encoders):
self.encoder_input_sizes.append(self.base_num_channels * pow(2, i))
self.encoder_output_sizes = [self.base_num_channels * pow(2, i + 1) for i in range(self.num_encoders)]
self.activation = getattr(torch, self.activation, 'sigmoid')
def build_resblocks(self):
self.resblocks = nn.ModuleList()
for i in range(self.num_residual_blocks):
self.resblocks.append(ResidualBlock(self.max_num_channels, self.max_num_channels, norm=self.norm))
def build_decoders(self):
decoder_input_sizes = list(reversed([self.base_num_channels * pow(2, i + 1) for i in range(self.num_encoders)]))
self.decoders = nn.ModuleList()
for input_size in decoder_input_sizes:
self.decoders.append(self.UpsampleLayer(input_size if self.skip_type == 'sum' else 2 * input_size,
input_size // 2,
kernel_size=5, padding=2, norm=self.norm))
def build_prediction_layer(self):
self.pred = ConvLayer(self.base_num_channels if self.skip_type == 'sum' else 2 * self.base_num_channels,
self.num_output_channels, 1, activation=None, norm=self.norm)
class UNet(BaseUNet):
def __init__(self, num_input_channels, num_output_channels=1, skip_type='sum', activation='sigmoid',
num_encoders=4, base_num_channels=32, num_residual_blocks=2, norm=None, use_upsample_conv=True):
super(UNet, self).__init__(num_input_channels, num_output_channels, skip_type, activation,
num_encoders, base_num_channels, num_residual_blocks, norm, use_upsample_conv)
self.head = ConvLayer(self.num_input_channels, self.base_num_channels,
kernel_size=5, stride=1, padding=2) # N x C x H x W -> N x 32 x H x W
self.encoders = nn.ModuleList()
for input_size, output_size in zip(self.encoder_input_sizes, self.encoder_output_sizes):
self.encoders.append(ConvLayer(input_size, output_size, kernel_size=5,
stride=2, padding=2, norm=self.norm))
self.build_resblocks()
self.build_decoders()
self.build_prediction_layer()
def forward(self, x):
"""
:param x: N x num_input_channels x H x W
:return: N x num_output_channels x H x W
"""
# head
x = self.head(x)
head = x
# encoder
blocks = []
for i, encoder in enumerate(self.encoders):
x = encoder(x)
blocks.append(x)
# residual blocks
for resblock in self.resblocks:
x = resblock(x)
# decoder
for i, decoder in enumerate(self.decoders):
x = decoder(self.apply_skip_connection(x, blocks[self.num_encoders - i - 1]))
img = self.activation(self.pred(self.apply_skip_connection(x, head)))
return img
class UNetRecurrent(BaseUNet):
"""
Recurrent UNet architecture where every encoder is followed by a recurrent convolutional block,
such as a ConvLSTM or a ConvGRU.
Symmetric, skip connections on every encoding layer.
"""
def __init__(self, num_input_channels, num_output_channels=1, skip_type='sum',
recurrent_block_type='convlstm', activation='sigmoid', num_encoders=4, base_num_channels=32,
num_residual_blocks=2, norm=None, use_upsample_conv=True):
super(UNetRecurrent, self).__init__(num_input_channels, num_output_channels, skip_type, activation,
num_encoders, base_num_channels, num_residual_blocks, norm,
use_upsample_conv)
self.head = ConvLayer(self.num_input_channels, self.base_num_channels,
kernel_size=5, stride=1, padding=2) # N x C x H x W -> N x 32 x H x W
self.encoders = nn.ModuleList()
for input_size, output_size in zip(self.encoder_input_sizes, self.encoder_output_sizes):
self.encoders.append(RecurrentConvLayer(input_size, output_size,
kernel_size=5, stride=2, padding=2,
recurrent_block_type=recurrent_block_type,
norm=self.norm))
self.build_resblocks()
self.build_decoders()
self.build_prediction_layer()
def forward(self, x, prev_states):
"""
:param x: N x num_input_channels x H x W
:param prev_states: previous LSTM states for every encoder layer
:return: N x num_output_channels x H x W
"""
# head
x = self.head(x)
head = x
if prev_states is None:
prev_states = [None] * self.num_encoders
# encoder
blocks = []
states = []
for i, encoder in enumerate(self.encoders):
x, state = encoder(x, prev_states[i])
blocks.append(x)
states.append(state)
# residual blocks
for resblock in self.resblocks:
x = resblock(x)
# decoder
for i, decoder in enumerate(self.decoders):
x = decoder(self.apply_skip_connection(x, blocks[self.num_encoders - i - 1]))
# tail
img = self.activation(self.pred(self.apply_skip_connection(x, head)))
return img, states
class UNetFire(BaseUNet):
"""
"""
def __init__(self, num_input_channels, num_output_channels=1, skip_type='sum',
recurrent_block_type='convgru', base_num_channels=16,
num_residual_blocks=2, norm=None, kernel_size=3,
recurrent_blocks={'resblock': [0]}):
super(UNetFire, self).__init__(num_input_channels=num_input_channels,
num_output_channels=num_output_channels,
skip_type=skip_type,
base_num_channels=base_num_channels,
num_residual_blocks=num_residual_blocks,
norm=norm)
self.kernel_size = kernel_size
self.recurrent_blocks = recurrent_blocks
print(self.num_input_channels)
self.head = RecurrentConvLayer(self.num_input_channels,
self.base_num_channels,
kernel_size=self.kernel_size,
padding=self.kernel_size // 2,
recurrent_block_type=recurrent_block_type,
norm=self.norm)
self.num_recurrent_units = 1
self.resblocks = nn.ModuleList()
recurrent_indices = self.recurrent_blocks.get('resblock', [])
for i in range(self.num_residual_blocks):
if i in recurrent_indices or -1 in recurrent_indices:
self.resblocks.append(RecurrentResidualLayer(
in_channels=self.base_num_channels,
out_channels=self.base_num_channels,
recurrent_block_type=recurrent_block_type,
norm=self.norm))
self.num_recurrent_units += 1
else:
self.resblocks.append(ResidualBlock(self.base_num_channels,
self.base_num_channels,
norm=self.norm))
self.pred = ConvLayer(2 * self.base_num_channels if self.skip_type == 'concat' else self.base_num_channels,
self.num_output_channels, kernel_size=1, padding=0, activation=None, norm=None)
def forward(self, x, prev_states):
"""
:param x: N x num_input_channels x H x W
:param prev_states: previous LSTM states for every encoder layer
:return: N x num_output_channels x H x W
"""
if prev_states is None:
prev_states = [None] * (self.num_recurrent_units)
states = []
state_idx = 0
# head
x, state = self.head(x, prev_states[state_idx])
state_idx += 1
states.append(state)
# residual blocks
recurrent_indices = self.recurrent_blocks.get('resblock', [])
for i, resblock in enumerate(self.resblocks):
if i in recurrent_indices or -1 in recurrent_indices:
x, state = resblock(x, prev_states[state_idx])
state_idx += 1
states.append(state)
else:
x = resblock(x)
# tail
img = self.pred(x)
return img, states
|
py | 1a30a95b720fecbddf3339b56bb201d9cdb0ad1d | from unittest import mock
import unittest
import pytest
from .main import some_func
class TestMain(unittest.TestCase):
@pytest.fixture(autouse=True)
def _setup_service(self):
self.mock_object = mock.MagicMock()
def test_some_func(self):
assert some_func() == 3
# def test_mock(self):
# assert self.mock_object.some_method.called |
py | 1a30a984b5512edaf0b19be6d24d7db8b82d915e | from checkov.common.models.enums import CheckResult, CheckCategories
from checkov.cloudformation.checks.resource.base_resource_check import BaseResourceCheck
from checkov.common.util.type_forcers import force_list
class ALBListenerTLS12(BaseResourceCheck):
def __init__(self):
name = "Ensure that Application Load Balancer Listener is using TLS v1.2"
id = "CKV_AWS_103"
supported_resources = ['AWS::ElasticLoadBalancingV2::Listener']
categories = [CheckCategories.GENERAL_SECURITY]
super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)
def scan_resource_conf(self, conf):
"""
validates that ALB Listener is using TLS v1.2
https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-elasticloadbalancingv2-listener.html
:param conf: aws_alb_listener configuration
:return: <CheckResult>
"""
if 'Properties' in conf.keys():
if 'Protocol' in conf['Properties'].keys():
# Check SslPolicy only if protocol is HTTPS or TLS.
# Other protocols are not intresting within the context of this check.
if conf['Properties']['Protocol'] in ('HTTPS', 'TLS'):
if 'SslPolicy' in conf['Properties'].keys():
if conf['Properties']['SslPolicy'].startswith(("ELBSecurityPolicy-FS-1-2", "ELBSecurityPolicy-TLS-1-2")):
return CheckResult.PASSED
return CheckResult.FAILED
elif conf['Properties']['Protocol'] in ('TCP', 'UDP', 'TCP_UDP'):
return CheckResult.PASSED
for idx_action, action in enumerate(conf['Properties']['DefaultActions']):
redirects = action.get("RedirectConfig", [])
for idx_redirect, redirect in enumerate(force_list(redirects)):
if redirect.get("Protocol", []) == 'HTTPS':
return CheckResult.PASSED
return CheckResult.FAILED
check = ALBListenerTLS12()
|
py | 1a30a9dce230d2774ae90b0737f8f7c6d3c5a488 | # Bep Marketplace ELE
# Copyright (c) 2016-2021 Kolibri Solutions
# License: See LICENSE file or https://github.com/KolibriSolutions/BepMarketplace/blob/master/LICENSE
#
from django.contrib.auth.models import User
from django.core.validators import MinValueValidator, MaxValueValidator
from django.db import models
from proposals.models import Proposal
from timeline.models import TimeSlot
from django.conf import settings
class Application(models.Model):
"""
A student's application to a proposal.
"""
Priority = models.IntegerField(validators=[MinValueValidator(1), MaxValueValidator(settings.MAX_NUM_APPLICATIONS)])
Proposal = models.ForeignKey(Proposal, on_delete=models.CASCADE, related_name='applications')
Student = models.ForeignKey(User, on_delete=models.CASCADE, related_name='applications')
Timestamp = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.Student.get_username() + " to " + self.Proposal.__str__()
class Meta:
ordering = ["Priority"]
class Distribution(models.Model):
"""A student distributed to a proposal.x"""
Proposal = models.ForeignKey(Proposal, on_delete=models.PROTECT, related_name='distributions')
Student = models.ForeignKey(User, on_delete=models.CASCADE, related_name='distributions')
TimeSlot = models.ForeignKey(TimeSlot, on_delete=models.PROTECT, related_name='distributions')
Application = models.OneToOneField(Application, on_delete=models.SET_NULL, blank=True, null=True, related_name='distributions')
def TotalGrade(self):
"""
Return total grade of student as not-rounded float
:return:
"""
return sum([r.Grade * r.Category.Weight for r in self.results.all()]) / 100
def TotalGradeRounded(self):
"""
Grade rounded to half points.
:return:
"""
return round(self.TotalGrade() * 2, 0) / 2
def missing_files(self):
return self.TimeSlot.filetypes.exclude(pk__in=self.files.values_list('Type', flat=True))
def missing_file_gradings(self):
return self.files.filter(Type__CheckedBySupervisor=True).filter(staffresponse__isnull=True)
def __str__(self):
return self.Proposal.Title + " to " + self.Student.usermeta.get_nice_name() + " (" + self.Student.username + ")"
|
py | 1a30aa083d198cb9c5063d31c28413347e892d55 | """ Tuple as Data Structure
We have see how we interpreted tuples as data structures
The position of the object contained in the tuple gives it meaning
For example, we can represent a 2D coordinate as: (10, 20)
x y
If pt is a position tuple, we can retrieve the x and x, y = pt or x = pt[0]
y coordinates using: y = py[1]
For example, to calculate the distance of pt from the origin we could write:
dist = math.sgrt(pt[0] ** 2 + pt[1] ** 2)
Now this is not very readable, and if someone sees this code they will have ti know thatpt[0] mans the x-coordinate and pt[1] means the y-coordinate.
This is not very transparent.
# Using a class instead.
At this point, in order to make things clearer for the reader (not the complier, the reader), we might want to approach this using a class method instead.
"""
class Point2D:
def __init__(self, x, y):
self.x = x
|
py | 1a30aa097302de5cf83fb268314f20d004914197 | #!/usr/bin/python3
import os
import sys
import math
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import data_utils
load_fn = data_utils.load_cls_train_val
balance_fn = None
map_fn = None
keep_remainder = True
save_ply_fn = None
num_class = 40
batch_size = 32
sample_num = 512
num_epochs = 4096
step_val = 500
learning_rate_base = 0.01
decay_steps = 8000
decay_rate = 0.5
learning_rate_min = 1e-6
weight_decay = 1e-5
jitter = 0.0
jitter_val = 0.0
jitter_test = 0.0
rotation_range = [0, 0, 0, 'u']
rotation_range_val = [0, 0, 0, 'u']
rotation_range_test = [0, 0, 0, 'u']
rotation_order = 'rxyz'
scaling_range = [0, 0, 0, 'g']
scaling_range_val = [0, 0, 0, 'u']
scaling_range_test = [0, 0, 0, 'u']
sample_num_variance = 1 // 8
sample_num_clip = 1 // 4
x = 3
xconv_param_name = ('K', 'D', 'P', 'C', 'links')
xconv_params = [dict(zip(xconv_param_name, xconv_param)) for xconv_param in
[(8, 1, -1, 16 * x, []),
(12, 2, 384, 32 * x, []),
(16, 2, 128, 64 * x, []),
(16, 3, 128, 128 * x, [])]]
with_global = True
fc_param_name = ('C', 'dropout_rate')
fc_params = [dict(zip(fc_param_name, fc_param)) for fc_param in
[(128 * x, 0.0),
(64 * x, 0.8)]]
sampling = 'random'
optimizer = 'adam'
epsilon = 1e-2
data_dim = 6
use_extra_features = False
with_X_transformation = True
sorting_method = None
|
py | 1a30aa1ebea62c018798b3abbc11c2bd27fa3f50 | from __future__ import print_function, absolute_import, division # makes KratosMultiphysics backward compatible with python 2.6 and 2.7
#import kratos core and applications
import KratosMultiphysics
import KratosMultiphysics.DelaunayMeshingApplication as KratosDelaunay
import KratosMultiphysics.PfemFluidDynamicsApplication as KratosPfemFluid
from importlib import import_module
def CreateMeshingDomain(main_model_part, custom_settings):
return FluidMeshingDomain(main_model_part, custom_settings)
class FluidMeshingDomain(object):
##constructor. the constructor shall only take care of storing the settings
##and the pointer to the main_model part.
##
##real construction shall be delayed to the function "Initialize" which
##will be called once the mesher is already filled
def __init__(self, main_model_part, custom_settings):
self.echo_level = 1
self.main_model_part = main_model_part
##settings string in json format
default_settings = KratosMultiphysics.Parameters("""
{
"python_module": "meshing_domain",
"model_part_name": "model_part_name",
"alpha_shape": 2.4,
"offset_factor": 0.0,
"meshing_strategy":{
"python_module": "meshing_strategy",
"meshing_frequency": 0.0,
"remesh": false,
"refine": false,
"reconnect": false,
"transfer": false,
"constrained": false,
"mesh_smoothing": false,
"variables_smoothing": false,
"elemental_variables_to_smooth":[],
"reference_element_type": "Element2D3N",
"reference_condition_type": "CompositeCondition2D2N"
},
"spatial_bounding_box":{
"use_bounding_box" : true,
"initial_time" : 0.0,
"final_time" : 1000.0,
"upper_point" : [10,10,10],
"lower_point" : [-10,-10,-10]
},
"spatial_refining_box" : {
"use_refining_box" : false,
"mesh_size" : 0.1,
"initial_time" : 0.0,
"final_time" : 1,
"upper_point" : [10,10,10],
"lower_point" : [-10,-10,-10]
},
"refining_parameters":{
"critical_size": 0.0,
"threshold_variable": "PLASTIC_STRAIN",
"reference_threshold" : 0.0,
"error_variable": "NORM_ISOCHORIC_STRESS",
"reference_error" : 0.0,
"add_nodes": true,
"insert_nodes": false,
"remove_nodes": {
"apply_removal": false,
"on_distance": false,
"on_threshold": false,
"on_error": false
},
"remove_boundary": {
"apply_removal": false,
"on_distance": false,
"on_threshold": false,
"on_error": false
},
"refine_elements": {
"apply_refinement": false,
"on_distance": false,
"on_threshold": false,
"on_error": false
},
"refine_boundary": {
"apply_refinement": false,
"on_distance": false,
"on_threshold": false,
"on_error": false
}
},
"elemental_variables_to_transfer":[]
}
""")
##overwrite the default settings with user-provided parameters
self.settings = custom_settings
self.settings.ValidateAndAssignDefaults(default_settings)
#construct the meshing strategy
python_module_name = "KratosMultiphysics.PfemFluidDynamicsApplication"
full_module_name = python_module_name + "." + self.settings["meshing_strategy"]["python_module"].GetString()
meshing_module = import_module(full_module_name)
#meshing_module = __import__(self.settings["meshing_strategy"]["python_module"].GetString())
self.MeshingStrategy = meshing_module.CreateMeshingStrategy(self.main_model_part, self.settings["meshing_strategy"])
self.active_remeshing = False
if( self.settings["meshing_strategy"]["remesh"].GetBool() or self.settings["meshing_strategy"]["transfer"].GetBool() ):
self.active_remeshing = True
print("::[Meshing_Domain]:: (",self.settings["model_part_name"].GetString()," ) -BUILT-")
####
def Initialize(self):
print("::[Meshing Domain]:: -START-")
self.dimension = self.main_model_part.ProcessInfo[KratosMultiphysics.SPACE_DIMENSION]
# Set MeshingParameters
self.SetMeshingParameters()
# Meshing Stratety
self.MeshingStrategy.SetEchoLevel(self.echo_level)
self.MeshingStrategy.Initialize(self.MeshingParameters, self.dimension)
print("::[Meshing Domain]:: -END- ")
####
#
def SetInfoParameters(self):
# Create InfoParameters
self.InfoParameters = KratosDelaunay.MeshingInfoParameters()
self.InfoParameters.Initialize()
#
def SetTransferParameters(self):
# Create TransferParameters
self.TransferParameters = KratosDelaunay.TransferParameters()
transfer_variables = self.settings["elemental_variables_to_transfer"]
#for variable in transfer_variables:
# self.TransferParameters.SetVariable( KratosMultiphysics.KratosGlobals.GetVariable( variable.GetString() ) )
for i in range(0, transfer_variables.size() ):
self.TransferParameters.SetVariable(KratosMultiphysics.KratosGlobals.GetVariable(transfer_variables[i].GetString()))
#
def SetRefiningParameters(self):
# Create RefiningParameters
self.RefiningParameters = KratosDelaunay.RefiningParameters()
self.RefiningParameters.Initialize()
# parameters
self.RefiningParameters.SetAlphaParameter(self.settings["alpha_shape"].GetDouble())
# set mesh refinement in box
size = self.dimension
refining_box = self.settings["spatial_refining_box"]
if(refining_box["use_refining_box"].GetBool()):
self.MeshingParameters.SetUseRefiningBox(True)
self.MeshingParameters.SetRefiningBoxMinimumPoint(refining_box["lower_point"][0].GetDouble(),refining_box["lower_point"][1].GetDouble(),refining_box["lower_point"][2].GetDouble())
self.MeshingParameters.SetRefiningBoxMaximumPoint(refining_box["upper_point"][0].GetDouble(),refining_box["upper_point"][1].GetDouble(),refining_box["upper_point"][2].GetDouble())
self.MeshingParameters.SetRefiningBoxTimeInterval(refining_box["initial_time"].GetDouble(),refining_box["final_time"].GetDouble())
self.MeshingParameters.SetRefiningBoxMeshSize(refining_box["mesh_size"].GetDouble())
removing_options = KratosMultiphysics.Flags()
#remove nodes
remove_nodes = self.settings["refining_parameters"]["remove_nodes"]
removing_options.Set(KratosDelaunay.MesherUtilities.REMOVE_NODES, remove_nodes["apply_removal"].GetBool())
removing_options.Set(KratosDelaunay.MesherUtilities.REMOVE_NODES_ON_DISTANCE, remove_nodes["on_distance"].GetBool())
removing_options.Set(KratosDelaunay.MesherUtilities.REMOVE_NODES_ON_ERROR, remove_nodes["on_error"].GetBool())
removing_options.Set(KratosDelaunay.MesherUtilities.REMOVE_NODES_ON_THRESHOLD, remove_nodes["on_threshold"].GetBool())
#remove boundary
remove_boundary = self.settings["refining_parameters"]["remove_boundary"]
removing_options.Set(KratosDelaunay.MesherUtilities.REMOVE_BOUNDARY_NODES, remove_boundary["apply_removal"].GetBool())
removing_options.Set(KratosDelaunay.MesherUtilities.REMOVE_BOUNDARY_NODES_ON_DISTANCE, remove_boundary["on_distance"].GetBool())
removing_options.Set(KratosDelaunay.MesherUtilities.REMOVE_BOUNDARY_NODES_ON_ERROR, remove_boundary["on_error"].GetBool())
removing_options.Set(KratosDelaunay.MesherUtilities.REMOVE_BOUNDARY_NODES_ON_THRESHOLD, remove_boundary["on_threshold"].GetBool())
refining_options = KratosMultiphysics.Flags()
refining_options.Set(KratosDelaunay.MesherUtilities.REFINE, self.settings["meshing_strategy"]["refine"].GetBool())
refining_options.Set(KratosDelaunay.MesherUtilities.REFINE_ADD_NODES, self.settings["refining_parameters"]["add_nodes"].GetBool())
refining_options.Set(KratosDelaunay.MesherUtilities.REFINE_INSERT_NODES, self.settings["refining_parameters"]["insert_nodes"].GetBool())
#refine elements
refine_elements = self.settings["refining_parameters"]["refine_elements"]
refining_options.Set(KratosDelaunay.MesherUtilities.REFINE_ELEMENTS, refine_elements["apply_refinement"].GetBool())
refining_options.Set(KratosDelaunay.MesherUtilities.REFINE_ELEMENTS_ON_DISTANCE, refine_elements["on_distance"].GetBool())
refining_options.Set(KratosDelaunay.MesherUtilities.REFINE_ELEMENTS_ON_ERROR, refine_elements["on_error"].GetBool())
refining_options.Set(KratosDelaunay.MesherUtilities.REFINE_ELEMENTS_ON_THRESHOLD, refine_elements["on_threshold"].GetBool())
#refine boundary
refine_boundary = self.settings["refining_parameters"]["refine_boundary"]
refining_options.Set(KratosDelaunay.MesherUtilities.REFINE_BOUNDARY, refine_boundary["apply_refinement"].GetBool())
refining_options.Set(KratosDelaunay.MesherUtilities.REFINE_BOUNDARY_ON_DISTANCE, refine_boundary["on_distance"].GetBool())
refining_options.Set(KratosDelaunay.MesherUtilities.REFINE_BOUNDARY_ON_ERROR, refine_boundary["on_error"].GetBool())
refining_options.Set(KratosDelaunay.MesherUtilities.REFINE_BOUNDARY_ON_THRESHOLD, refine_boundary["on_threshold"].GetBool())
self.RefiningParameters.SetRefiningOptions(refining_options)
self.RefiningParameters.SetRemovingOptions(removing_options)
#
def SetMeshingParameters(self):
# Create MeshingParameters
self.MeshingParameters = KratosDelaunay.MeshingParameters()
self.MeshingParameters.Initialize()
self.MeshingParameters.SetSubModelPartName(self.settings["model_part_name"].GetString())
if(self.active_remeshing):
self.MeshingParameters.SetAlphaParameter(self.settings["alpha_shape"].GetDouble())
self.MeshingParameters.SetOffsetFactor(self.settings["offset_factor"].GetDouble())
self.SetInfoParameters()
self.SetTransferParameters()
self.SetRefiningParameters()
self.MeshingParameters.SetInfoParameters(self.InfoParameters)
self.MeshingParameters.SetTransferParameters(self.TransferParameters)
self.MeshingParameters.SetRefiningParameters(self.RefiningParameters)
bounding_box = self.settings["spatial_bounding_box"]
if(bounding_box["use_bounding_box"].GetBool()):
self.MeshingParameters.SetUseBoundingBox(True)
self.MeshingParameters.SetBoundingBoxLowerPoint(bounding_box["lower_point"][0].GetDouble(),bounding_box["lower_point"][1].GetDouble(),bounding_box["lower_point"][2].GetDouble())
self.MeshingParameters.SetBoundingBoxUpperPoint(bounding_box["upper_point"][0].GetDouble(),bounding_box["upper_point"][1].GetDouble(),bounding_box["upper_point"][2].GetDouble())
self.MeshingParameters.SetBoundingBoxTimeInterval(bounding_box["initial_time"].GetDouble(),bounding_box["final_time"].GetDouble())
#
def ExecuteMeshing(self):
if( self.active_remeshing ):
self.MeshingStrategy.GenerateMesh()
#
def Check(self):
# set mesher utilities
self.mesher_utils = KratosDelaunay.MesherUtilities()
# set the domain labels to mesh mesher
critical_mesh_size = self.settings["refining_parameters"]["critical_size"].GetDouble()
critical_radius = self.mesher_utils.CheckCriticalRadius(self.main_model_part,critical_mesh_size)
print(" CriticalRadius ", critical_radius)
#
def Active(self):
return self.active_remeshing
#
def SetEchoLevel(self, echo_level):
self.echo_level = echo_level
#
def GetVariables(self):
nodal_variables = []
transfer_variables = self.settings["elemental_variables_to_transfer"]
for i in range(0, transfer_variables.size() ):
nodal_variables.append(transfer_variables[i].GetString())
return nodal_variables
#
def ComputeAverageMeshParameters(self):
MesherUtils = KratosDelaunay.MesherUtilities();
self.domain_volume = MesherUtils.ComputeModelPartVolume(self.main_model_part)
self.element_mean_volume = 0
number_of_elements = self.main_model_part.NumberOfElements()
nodes_for_element = self.main_model_part.ProcessInfo[KratosMultiphysics.SPACE_DIMENSION] + 1
if(number_of_elements != 0):
self.element_mean_volume = self.domain_volume/float(number_of_elements*nodes_for_element)
self.RefiningParameters.SetMeanVolume(self.element_mean_volume)
#
def GetMeanVolume(self):
return self.element_mean_volume
#
def GetTotalVolume(self):
return self.domain_volume
#
def ComputeInitialAverageMeshParameters(self):
self.mesh_parameters = KratosPfemFluid.ComputeAveragePfemMeshParameters(self.main_model_part, self.MeshingParameters,self.echo_level)
self.mesh_parameters.Execute()
# numFluid=0
# mean_nodal_h=0
# for node in self.main_model_part.Nodes:
# if (node.Is(KratosMultiphysics.FLUID)):
# numFluid+=1
# nodal_h=node.GetSolutionStepValue(KratosMultiphysics.NODAL_H)
# mean_nodal_h+=nodal_h
# mean_nodal_h*=1.0/numFluid;
# self.RefiningParameters.SetCriticalRadius(mean_nodal_h)
# self.RefiningParameters.SetInitialRadius(mean_nodal_h)
# delta_time = self.main_model_part.ProcessInfo[KratosMultiphysics.DELTA_TIME]
# self.main_model_part.ProcessInfo.SetValue(KratosPfemFluid.INITIAL_DELTA_TIME,delta_time)
# self.main_model_part.ProcessInfo.SetValue(KratosPfemFluid.CURRENT_DELTA_TIME,delta_time)
# self.main_model_part.ProcessInfo.SetValue(KratosMultiphysics.PREVIOUS_DELTA_TIME,delta_time)
# self.main_model_part.ProcessInfo.SetValue(KratosPfemFluid.TIME_INTERVAL_CHANGED,False)
def SetTimeDataOnProcessInfo(self):
delta_time = self.main_model_part.ProcessInfo[KratosMultiphysics.DELTA_TIME]
self.main_model_part.ProcessInfo.SetValue(KratosPfemFluid.INITIAL_DELTA_TIME,delta_time)
self.main_model_part.ProcessInfo.SetValue(KratosPfemFluid.CURRENT_DELTA_TIME,delta_time)
self.main_model_part.ProcessInfo.SetValue(KratosMultiphysics.PREVIOUS_DELTA_TIME,delta_time)
self.main_model_part.ProcessInfo.SetValue(KratosPfemFluid.TIME_INTERVAL_CHANGED,False)
#
|
py | 1a30aa85db337f10ba47f82fb42a202e51c937b2 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import unittest
import pandas as pd
from apache_beam.dataframe import doctests
from apache_beam.dataframe.frames import PD_VERSION
from apache_beam.dataframe.pandas_top_level_functions import _is_top_level_function
@unittest.skipIf(sys.platform == 'win32', '[BEAM-10626]')
class DoctestTest(unittest.TestCase):
def test_ndframe_tests(self):
# IO methods are tested in io_test.py
skip_writes = {
f'pandas.core.generic.NDFrame.{name}': ['*']
for name in dir(pd.core.generic.NDFrame) if name.startswith('to_')
}
result = doctests.testmod(
pd.core.generic,
use_beam=False,
report=True,
wont_implement_ok={
'pandas.core.generic.NDFrame.head': ['*'],
'pandas.core.generic.NDFrame.shift': [
'df.shift(periods=3)',
'df.shift(periods=3, fill_value=0)',
],
'pandas.core.generic.NDFrame.tail': ['*'],
'pandas.core.generic.NDFrame.take': ['*'],
'pandas.core.generic.NDFrame.values': ['*'],
'pandas.core.generic.NDFrame.tz_localize': [
"s.tz_localize('CET', ambiguous='infer')",
# np.array is not a deferred object. This use-case is possible
# with a deferred Series though, which is tested in
# frames_test.py
"s.tz_localize('CET', ambiguous=np.array([True, True, False]))",
],
'pandas.core.generic.NDFrame.truncate': [
# These inputs rely on tail (wont implement, order
# sensitive) for verification
"df.tail()",
"df.truncate(before=pd.Timestamp('2016-01-05'),\n"
" after=pd.Timestamp('2016-01-10')).tail()",
"df.truncate('2016-01-05', '2016-01-10').tail()",
"df.loc['2016-01-05':'2016-01-10', :].tail()"
],
'pandas.core.generic.NDFrame.replace': [
"s.replace([1, 2], method='bfill')",
# Relies on method='pad'
"s.replace('a')",
# Relies on method='pad'
# value=None is not valid for pandas < 1.4
"s.replace('a', None)",
# Implicitly uses method='pad', but output doesn't rely on that
# behavior. Verified indepently in
# frames_test.py::DeferredFrameTest::test_replace
"df.replace(regex={r'^ba.$': 'new', 'foo': 'xyz'})"
],
'pandas.core.generic.NDFrame.fillna': [
'df.fillna(method=\'ffill\')',
'df.fillna(method="ffill")',
'df.fillna(value=values, limit=1)',
],
'pandas.core.generic.NDFrame.sort_values': ['*'],
'pandas.core.generic.NDFrame.mask': [
'df.where(m, -df) == np.where(m, df, -df)'
],
'pandas.core.generic.NDFrame.where': [
'df.where(m, -df) == np.where(m, df, -df)'
],
'pandas.core.generic.NDFrame.interpolate': ['*'],
'pandas.core.generic.NDFrame.resample': ['*'],
'pandas.core.generic.NDFrame.rolling': ['*'],
# argsort wont implement
'pandas.core.generic.NDFrame.abs': [
'df.loc[(df.c - 43).abs().argsort()]',
],
'pandas.core.generic.NDFrame.reindex': ['*'],
'pandas.core.generic.NDFrame.pct_change': ['*'],
'pandas.core.generic.NDFrame.asof': ['*'],
'pandas.core.generic.NDFrame.infer_objects': ['*'],
'pandas.core.generic.NDFrame.ewm': ['*'],
'pandas.core.generic.NDFrame.expanding': ['*'],
'pandas.core.generic.NDFrame.get': ['*'],
},
not_implemented_ok={
'pandas.core.generic.NDFrame.asof': ['*'],
'pandas.core.generic.NDFrame.at_time': ['*'],
'pandas.core.generic.NDFrame.between_time': ['*'],
'pandas.core.generic.NDFrame.ewm': ['*'],
'pandas.core.generic.NDFrame.expanding': ['*'],
'pandas.core.generic.NDFrame.flags': ['*'],
'pandas.core.generic.NDFrame.rank': ['*'],
'pandas.core.generic.NDFrame.reindex_like': ['*'],
'pandas.core.generic.NDFrame.replace': ['*'],
'pandas.core.generic.NDFrame.sample': ['*'],
'pandas.core.generic.NDFrame.set_flags': ['*'],
'pandas.core.generic.NDFrame.squeeze': ['*'],
'pandas.core.generic.NDFrame.truncate': ['*'],
},
skip={
# Internal test
'pandas.core.generic.NDFrame._set_axis_name': ['*'],
# Fails to construct test series. asfreq is not implemented anyway.
'pandas.core.generic.NDFrame.asfreq': ['*'],
'pandas.core.generic.NDFrame.astype': ['*'],
'pandas.core.generic.NDFrame.convert_dtypes': ['*'],
'pandas.core.generic.NDFrame.copy': ['*'],
'pandas.core.generic.NDFrame.droplevel': ['*'],
'pandas.core.generic.NDFrame.get': ['*'],
'pandas.core.generic.NDFrame.rank': [
# Modified dataframe
'df'
],
'pandas.core.generic.NDFrame.rename': [
# Seems to be an upstream bug. The actual error has a different
# message:
# TypeError: Index(...) must be called with a collection of
# some kind, 2 was passed
# pandas doctests only verify the type of exception
'df.rename(2)'
],
# For pandas >= 1.4, rename is changed to _rename
'pandas.core.generic.NDFrame._rename': [
# Seems to be an upstream bug. The actual error has a different
# message:
# TypeError: Index(...) must be called with a collection of
# some kind, 2 was passed
# pandas doctests only verify the type of exception
'df.rename(2)'
],
# Tests rely on setting index
'pandas.core.generic.NDFrame.rename_axis': ['*'],
# Raises right exception, but testing framework has matching issues.
'pandas.core.generic.NDFrame.replace': [
"df.replace({'a string': 'new value', True: False}) # raises"
],
'pandas.core.generic.NDFrame.squeeze': ['*'],
# NameError
'pandas.core.generic.NDFrame.resample': ['df'],
# Skipped so we don't need to install natsort
'pandas.core.generic.NDFrame.sort_values': [
'from natsort import index_natsorted',
'df.sort_values(\n'
' by="time",\n'
' key=lambda x: np.argsort(index_natsorted(df["time"]))\n'
')'
],
**skip_writes
})
self.assertEqual(result.failed, 0)
def test_dataframe_tests(self):
result = doctests.testmod(
pd.core.frame,
use_beam=False,
report=True,
wont_implement_ok={
'pandas.core.frame.DataFrame.T': ['*'],
'pandas.core.frame.DataFrame.cummax': ['*'],
'pandas.core.frame.DataFrame.cummin': ['*'],
'pandas.core.frame.DataFrame.cumsum': ['*'],
'pandas.core.frame.DataFrame.cumprod': ['*'],
'pandas.core.frame.DataFrame.diff': ['*'],
'pandas.core.frame.DataFrame.fillna': [
'df.fillna(method=\'ffill\')',
'df.fillna(method="ffill")',
'df.fillna(value=values, limit=1)',
],
'pandas.core.frame.DataFrame.items': ['*'],
'pandas.core.frame.DataFrame.itertuples': ['*'],
'pandas.core.frame.DataFrame.iterrows': ['*'],
'pandas.core.frame.DataFrame.iteritems': ['*'],
# default keep is 'first'
'pandas.core.frame.DataFrame.nlargest': [
"df.nlargest(3, 'population')",
"df.nlargest(3, ['population', 'GDP'])",
"df.nlargest(3, 'population', keep='last')"
],
'pandas.core.frame.DataFrame.nsmallest': [
"df.nsmallest(3, 'population')",
"df.nsmallest(3, ['population', 'GDP'])",
"df.nsmallest(3, 'population', keep='last')",
],
'pandas.core.frame.DataFrame.replace': [
"s.replace([1, 2], method='bfill')",
# Relies on method='pad'
"s.replace('a')",
# Relies on method='pad'
# value=None is not valid for pandas < 1.4
"s.replace('a', None)",
# Implicitly uses method='pad', but output doesn't rely on that
# behavior. Verified indepently in
# frames_test.py::DeferredFrameTest::test_replace
"df.replace(regex={r'^ba.$': 'new', 'foo': 'xyz'})"
],
'pandas.core.frame.DataFrame.to_records': ['*'],
'pandas.core.frame.DataFrame.to_dict': ['*'],
'pandas.core.frame.DataFrame.to_numpy': ['*'],
'pandas.core.frame.DataFrame.to_string': ['*'],
'pandas.core.frame.DataFrame.transpose': ['*'],
'pandas.core.frame.DataFrame.shape': ['*'],
'pandas.core.frame.DataFrame.shift': [
'df.shift(periods=3)',
'df.shift(periods=3, fill_value=0)',
],
'pandas.core.frame.DataFrame.unstack': ['*'],
'pandas.core.frame.DataFrame.memory_usage': ['*'],
'pandas.core.frame.DataFrame.info': ['*'],
# Not equal to df.agg('mode', axis='columns', numeric_only=True)
# because there can be multiple columns if a row has more than one
# mode
'pandas.core.frame.DataFrame.mode': [
"df.mode(axis='columns', numeric_only=True)"
],
'pandas.core.frame.DataFrame.append': [
'df.append(df2, ignore_index=True)',
"for i in range(5):\n" +
" df = df.append({'A': i}, ignore_index=True)",
],
'pandas.core.frame.DataFrame.sort_index': ['*'],
'pandas.core.frame.DataFrame.sort_values': ['*'],
'pandas.core.frame.DataFrame.melt': [
"df.melt(id_vars=['A'], value_vars=['B'])",
"df.melt(id_vars=['A'], value_vars=['B', 'C'])",
"df.melt(col_level=0, id_vars=['A'], value_vars=['B'])",
"df.melt(id_vars=[('A', 'D')], value_vars=[('B', 'E')])",
"df.melt(id_vars=['A'], value_vars=['B'],\n" +
" var_name='myVarname', value_name='myValname')"
],
# Most keep= options are order-sensitive
'pandas.core.frame.DataFrame.drop_duplicates': ['*'],
'pandas.core.frame.DataFrame.duplicated': [
'df.duplicated()',
"df.duplicated(keep='last')",
"df.duplicated(subset=['brand'])",
],
'pandas.core.frame.DataFrame.reindex': ['*'],
'pandas.core.frame.DataFrame.dot': [
# reindex not supported
's2 = s.reindex([1, 0, 2, 3])',
],
'pandas.core.frame.DataFrame.resample': ['*'],
'pandas.core.frame.DataFrame.values': ['*'],
},
not_implemented_ok={
'pandas.core.frame.DataFrame.transform': [
# str arg not supported. Tested with np.sum in
# frames_test.py::DeferredFrameTest::test_groupby_transform_sum
"df.groupby('Date')['Data'].transform('sum')",
],
'pandas.core.frame.DataFrame.swaplevel': ['*'],
'pandas.core.frame.DataFrame.melt': ['*'],
'pandas.core.frame.DataFrame.reindex_axis': ['*'],
'pandas.core.frame.DataFrame.round': [
'df.round(decimals)',
],
# We should be able to support pivot and pivot_table for categorical
# columns
'pandas.core.frame.DataFrame.pivot': ['*'],
# Trivially elementwise for axis=columns. Relies on global indexing
# for axis=rows.
# Difficult to determine proxy, need to inspect function
'pandas.core.frame.DataFrame.apply': ['*'],
# Cross-join not implemented
'pandas.core.frame.DataFrame.merge': [
"df1.merge(df2, how='cross')"
],
# TODO(BEAM-11711)
'pandas.core.frame.DataFrame.set_index': [
"df.set_index([s, s**2])",
],
'pandas.core.frame.DataFrame.set_axis': [
"df.set_axis(range(0,2), axis='index')",
],
# TODO(BEAM-12495)
'pandas.core.frame.DataFrame.value_counts': [
'df.value_counts(dropna=False)'
],
},
skip={
# DataFrame construction from a dictionary and
# Series requires using the len() function, which
# is a non-deferred operation that we do not allow
'pandas.core.frame.DataFrame': [
'pd.DataFrame(data=d, index=[0, 1, 2, 3])',
],
# s2 created with reindex
'pandas.core.frame.DataFrame.dot': [
'df.dot(s2)',
],
'pandas.core.frame.DataFrame.resample': ['df'],
'pandas.core.frame.DataFrame.asfreq': ['*'],
# Throws NotImplementedError when modifying df
'pandas.core.frame.DataFrame.axes': [
# Returns deferred index.
'df.axes',
],
# Skipped because the relies on loc to set cells in df2
'pandas.core.frame.DataFrame.compare': ['*'],
'pandas.core.frame.DataFrame.cov': [
# Relies on setting entries ahead of time.
"df.loc[df.index[:5], 'a'] = np.nan",
"df.loc[df.index[5:10], 'b'] = np.nan",
'df.cov(min_periods=12)',
],
'pandas.core.frame.DataFrame.rename': [
# Returns deferred index.
'df.index',
'df.rename(index=str).index',
],
'pandas.core.frame.DataFrame.set_index': [
# TODO(BEAM-11711): This could pass in the index as
# a DeferredIndex, and we should fail it as order-sensitive.
"df.set_index([pd.Index([1, 2, 3, 4]), 'year'])",
],
'pandas.core.frame.DataFrame.set_axis': [
# This should pass as set_axis(axis='columns')
# and fail with set_axis(axis='index')
"df.set_axis(['a', 'b', 'c'], axis='index')"
],
'pandas.core.frame.DataFrame.to_markdown': ['*'],
'pandas.core.frame.DataFrame.to_parquet': ['*'],
# Raises right exception, but testing framework has matching issues.
# Tested in `frames_test.py`.
'pandas.core.frame.DataFrame.insert': [
'df',
'df.insert(1, "newcol", [99, 99])',
'df.insert(0, "col1", [100, 100], allow_duplicates=True)'
],
'pandas.core.frame.DataFrame.to_records': [
'df.index = df.index.rename("I")',
'index_dtypes = f"<S{df.index.str.len().max()}"', # 1.x
'index_dtypes = "<S{}".format(df.index.str.len().max())', #0.x
'df.to_records(index_dtypes=index_dtypes)',
],
# These tests use the static method pd.pivot_table, which doesn't
# actually raise NotImplementedError
'pandas.core.frame.DataFrame.pivot_table': ['*'],
# Expected to raise a ValueError, but we raise NotImplementedError
'pandas.core.frame.DataFrame.pivot': [
"df.pivot(index='foo', columns='bar', values='baz')"
],
'pandas.core.frame.DataFrame.append': [
'df',
# pylint: disable=line-too-long
"pd.concat([pd.DataFrame([i], columns=['A']) for i in range(5)],\n"
" ignore_index=True)"
],
'pandas.core.frame.DataFrame.eval': ['df'],
'pandas.core.frame.DataFrame.melt': [
"df.columns = [list('ABC'), list('DEF')]", "df"
],
'pandas.core.frame.DataFrame.merge': [
# Order-sensitive index, checked in frames_test.py.
"df1.merge(df2, left_on='lkey', right_on='rkey')",
"df1.merge(df2, left_on='lkey', right_on='rkey',\n"
" suffixes=('_left', '_right'))",
"df1.merge(df2, how='left', on='a')",
],
# Raises right exception, but testing framework has matching issues.
'pandas.core.frame.DataFrame.replace': [
"df.replace({'a string': 'new value', True: False}) # raises"
],
'pandas.core.frame.DataFrame.to_sparse': ['type(df)'],
# Skipped because "seen_wont_implement" is reset before getting to
# these calls, so the NameError they raise is not ignored.
'pandas.core.frame.DataFrame.T': [
'df1_transposed.dtypes', 'df2_transposed.dtypes'
],
'pandas.core.frame.DataFrame.transpose': [
'df1_transposed.dtypes', 'df2_transposed.dtypes'
],
# Skipped because the relies on iloc to set a cell to NA. Test is
# replicated in frames_test::DeferredFrameTest::test_applymap.
'pandas.core.frame.DataFrame.applymap': [
'df_copy.iloc[0, 0] = pd.NA',
"df_copy.applymap(lambda x: len(str(x)), na_action='ignore')",
],
# Skipped so we don't need to install natsort
'pandas.core.frame.DataFrame.sort_values': [
'from natsort import index_natsorted',
'df.sort_values(\n'
' by="time",\n'
' key=lambda x: np.argsort(index_natsorted(df["time"]))\n'
')'
],
# Mode that we don't yet support, documentation added in pandas
# 1.2.0 (https://github.com/pandas-dev/pandas/issues/35912)
'pandas.core.frame.DataFrame.aggregate': [
"df.agg(x=('A', max), y=('B', 'min'), z=('C', np.mean))"
],
})
self.assertEqual(result.failed, 0)
def test_series_tests(self):
result = doctests.testmod(
pd.core.series,
use_beam=False,
report=True,
wont_implement_ok={
'pandas.core.series.Series.__array__': ['*'],
'pandas.core.series.Series.array': ['*'],
'pandas.core.series.Series.cummax': ['*'],
'pandas.core.series.Series.cummin': ['*'],
'pandas.core.series.Series.cumsum': ['*'],
'pandas.core.series.Series.cumprod': ['*'],
'pandas.core.series.Series.diff': ['*'],
'pandas.core.series.Series.dot': [
's.dot(arr)', # non-deferred result
],
'pandas.core.series.Series.fillna': [
'df.fillna(method=\'ffill\')',
'df.fillna(method="ffill")',
'df.fillna(value=values, limit=1)',
],
'pandas.core.series.Series.info': ['*'],
'pandas.core.series.Series.items': ['*'],
'pandas.core.series.Series.iteritems': ['*'],
# default keep is 'first'
'pandas.core.series.Series.nlargest': [
"s.nlargest()",
"s.nlargest(3)",
"s.nlargest(3, keep='last')",
],
'pandas.core.series.Series.memory_usage': ['*'],
'pandas.core.series.Series.nsmallest': [
"s.nsmallest()",
"s.nsmallest(3)",
"s.nsmallest(3, keep='last')",
],
'pandas.core.series.Series.pop': ['*'],
'pandas.core.series.Series.searchsorted': ['*'],
'pandas.core.series.Series.shift': [
'df.shift(periods=3)',
'df.shift(periods=3, fill_value=0)',
],
'pandas.core.series.Series.take': ['*'],
'pandas.core.series.Series.to_dict': ['*'],
'pandas.core.series.Series.unique': ['*'],
'pandas.core.series.Series.unstack': ['*'],
'pandas.core.series.Series.values': ['*'],
'pandas.core.series.Series.view': ['*'],
'pandas.core.series.Series.append': [
's1.append(s2, ignore_index=True)',
],
'pandas.core.series.Series.replace': [
"s.replace([1, 2], method='bfill')",
# Relies on method='pad'
"s.replace('a')",
# Relies on method='pad'
# value=None is not valid for pandas < 1.4
"s.replace('a', None)",
# Implicitly uses method='pad', but output doesn't rely on that
# behavior. Verified indepently in
# frames_test.py::DeferredFrameTest::test_replace
"df.replace(regex={r'^ba.$': 'new', 'foo': 'xyz'})"
],
'pandas.core.series.Series.sort_index': ['*'],
'pandas.core.series.Series.sort_values': ['*'],
'pandas.core.series.Series.argmax': ['*'],
'pandas.core.series.Series.argmin': ['*'],
'pandas.core.series.Series.drop_duplicates': [
's.drop_duplicates()',
"s.drop_duplicates(keep='last')",
],
'pandas.core.series.Series.reindex': ['*'],
'pandas.core.series.Series.autocorr': ['*'],
'pandas.core.series.Series.repeat': ['s.repeat([1, 2, 3])'],
'pandas.core.series.Series.resample': ['*'],
'pandas.core.series.Series': ['ser.iloc[0] = 999'],
},
not_implemented_ok={
'pandas.core.series.Series.transform': [
# str arg not supported. Tested with np.sum in
# frames_test.py::DeferredFrameTest::test_groupby_transform_sum
"df.groupby('Date')['Data'].transform('sum')",
],
'pandas.core.series.Series.groupby': [
'ser.groupby(["a", "b", "a", "b"]).mean()',
'ser.groupby(["a", "b", "a", np.nan]).mean()',
'ser.groupby(["a", "b", "a", np.nan], dropna=False).mean()',
],
'pandas.core.series.Series.swaplevel' :['*']
},
skip={
# Relies on setting values with iloc
'pandas.core.series.Series': ['ser', 'r'],
'pandas.core.series.Series.groupby': [
# TODO(BEAM-11393): This example requires aligning two series
# with non-unique indexes. It only works in pandas because
# pandas can recognize the indexes are identical and elide the
# alignment.
'ser.groupby(ser > 100).mean()',
],
'pandas.core.series.Series.asfreq': ['*'],
# error formatting
'pandas.core.series.Series.append': [
's1.append(s2, verify_integrity=True)',
],
'pandas.core.series.Series.cov': [
# Differs in LSB on jenkins.
"s1.cov(s2)",
],
# Skipped idxmax/idxmin due an issue with the test framework
'pandas.core.series.Series.idxmin': ['s.idxmin()'],
'pandas.core.series.Series.idxmax': ['s.idxmax()'],
'pandas.core.series.Series.duplicated': ['*'],
'pandas.core.series.Series.set_axis': ['*'],
'pandas.core.series.Series.nonzero': ['*'],
'pandas.core.series.Series.pop': ['ser'], # testing side effect
# Raises right exception, but testing framework has matching issues.
'pandas.core.series.Series.replace': [
"df.replace({'a string': 'new value', True: False}) # raises"
],
'pandas.core.series.Series.searchsorted': [
# This doctest seems to be incorrectly parsed.
"x = pd.Categorical(['apple', 'bread', 'bread',"
],
'pandas.core.series.Series.to_csv': ['*'],
'pandas.core.series.Series.to_markdown': ['*'],
'pandas.core.series.Series.update': ['*'],
'pandas.core.series.Series.view': [
# Inspection after modification.
's'
],
'pandas.core.series.Series.resample': ['df'],
})
self.assertEqual(result.failed, 0)
def test_string_tests(self):
if PD_VERSION < (1, 2):
module = pd.core.strings
else:
# Definitions were moved to accessor in pandas 1.2.0
module = pd.core.strings.accessor
module_name = module.__name__
result = doctests.testmod(
module,
use_beam=False,
wont_implement_ok={
# These methods can accept deferred series objects, but not lists
f'{module_name}.StringMethods.cat': [
"s.str.cat(['A', 'B', 'C', 'D'], sep=',')",
"s.str.cat(['A', 'B', 'C', 'D'], sep=',', na_rep='-')",
"s.str.cat(['A', 'B', 'C', 'D'], na_rep='-')"
],
f'{module_name}.StringMethods.repeat': [
's.str.repeat(repeats=[1, 2, 3])'
],
f'{module_name}.str_repeat': ['s.str.repeat(repeats=[1, 2, 3])'],
# get_dummies pandas examples are not casted to CategoricalDtype
# Must be CategoricalDtype to work in Beam
f'{module_name}.StringMethods.get_dummies': ['*'],
f'{module_name}.str_get_dummies': ['*'],
f'{module_name}.StringMethods': ['s.str.split("_")'],
f'{module_name}.StringMethods.rsplit': ['*'],
f'{module_name}.StringMethods.split': ['*'],
},
skip={
# count() on Series with a NaN produces mismatched type if we
# have a NaN-only partition.
f'{module_name}.StringMethods.count': ["s.str.count('a')"],
f'{module_name}.str_count': ["s.str.count('a')"],
# Bad test strings in pandas 1.1.x
f'{module_name}.str_replace': [
"pd.Series(['foo', 'fuz', np.nan]).str.replace('f', repr)"
],
f'{module_name}.StringMethods.replace': [
"pd.Series(['foo', 'fuz', np.nan]).str.replace('f', repr)"
],
# output has incorrect formatting in 1.2.x
f'{module_name}.StringMethods.extractall': ['*']
})
self.assertEqual(result.failed, 0)
def test_datetime_tests(self):
# TODO(BEAM-10721)
indexes_accessors_result = doctests.testmod(
pd.core.indexes.accessors,
use_beam=False,
skip={
'pandas.core.indexes.accessors.TimedeltaProperties': [
# Seems like an upstream bug. The property is 'second'
'seconds_series.dt.seconds'
],
# TODO(BEAM-12530): Test data creation fails for these
# s = pd.Series(pd.to_timedelta(np.arange(5), unit="d"))
# pylint: disable=line-too-long
'pandas.core.indexes.accessors.DatetimeProperties.to_pydatetime': [
'*'
],
'pandas.core.indexes.accessors.TimedeltaProperties.components': [
'*'
],
'pandas.core.indexes.accessors.TimedeltaProperties.to_pytimedelta': [
'*'
],
# pylint: enable=line-too-long
})
datetimelike_result = doctests.testmod(
pd.core.arrays.datetimelike, use_beam=False)
datetime_result = doctests.testmod(
pd.core.arrays.datetimes,
use_beam=False,
wont_implement_ok={
'pandas.core.arrays.datetimes.DatetimeArray.to_period': ['*'],
# All tz_localize tests use unsupported values for ambiguous=
# Verified seperately in
# frames_test.py::DeferredFrameTest::test_dt_tz_localize_*
'pandas.core.arrays.datetimes.DatetimeArray.tz_localize': ['*'],
},
not_implemented_ok={
# Verifies index version of this method
'pandas.core.arrays.datetimes.DatetimeArray.to_period': [
'df.index.to_period("M")'
],
})
self.assertEqual(indexes_accessors_result.failed, 0)
self.assertEqual(datetimelike_result.failed, 0)
self.assertEqual(datetime_result.failed, 0)
def test_indexing_tests(self):
result = doctests.testmod(
pd.core.indexing,
use_beam=False,
skip={
'pandas.core.indexing._IndexSlice': ['*'],
'pandas.core.indexing.IndexingMixin.at': ['*'],
'pandas.core.indexing.IndexingMixin.iat': ['*'],
'pandas.core.indexing.IndexingMixin.iloc': ['*'],
'pandas.core.indexing.IndexingMixin.loc': ['*'],
'pandas.core.indexing._AtIndexer': ['*'],
'pandas.core.indexing._LocIndexer': ['*'],
'pandas.core.indexing._iAtIndexer': ['*'],
'pandas.core.indexing._iLocIndexer': ['*'],
})
self.assertEqual(result.failed, 0)
def test_groupby_tests(self):
result = doctests.testmod(
pd.core.groupby.groupby,
use_beam=False,
wont_implement_ok={
'pandas.core.groupby.groupby.GroupBy.head': ['*'],
'pandas.core.groupby.groupby.GroupBy.tail': ['*'],
'pandas.core.groupby.groupby.GroupBy.nth': ['*'],
'pandas.core.groupby.groupby.GroupBy.cumcount': ['*'],
'pandas.core.groupby.groupby.GroupBy.resample': ['*'],
},
not_implemented_ok={
'pandas.core.groupby.groupby.GroupBy.ngroup': ['*'],
'pandas.core.groupby.groupby.GroupBy.sample': ['*'],
'pandas.core.groupby.groupby.GroupBy.rank': ['*'],
'pandas.core.groupby.groupby.GroupBy.nth': [
"df.groupby('A', as_index=False).nth(1)",
],
},
skip={
# Uses iloc to mutate a DataFrame
'pandas.core.groupby.groupby.GroupBy.resample': [
'df.iloc[2, 0] = 5',
'df',
],
# df is reassigned
'pandas.core.groupby.groupby.GroupBy.rank': ['df'],
# TODO: Raise wont implement for list passed as a grouping column
# Currently raises unhashable type: list
'pandas.core.groupby.groupby.GroupBy.ngroup': [
'df.groupby(["A", [1,1,2,3,2,1]]).ngroup()'
],
})
self.assertEqual(result.failed, 0)
result = doctests.testmod(
pd.core.groupby.generic,
use_beam=False,
wont_implement_ok={
# Returns an array by default, not a Series. WontImplement
# (non-deferred)
'pandas.core.groupby.generic.SeriesGroupBy.unique': ['*'],
# TODO: Is take actually deprecated?
'pandas.core.groupby.generic.DataFrameGroupBy.take': ['*'],
'pandas.core.groupby.generic.SeriesGroupBy.take': ['*'],
'pandas.core.groupby.generic.SeriesGroupBy.nsmallest': [
"s.nsmallest(3, keep='last')",
"s.nsmallest(3)",
"s.nsmallest()",
],
'pandas.core.groupby.generic.SeriesGroupBy.nlargest': [
"s.nlargest(3, keep='last')",
"s.nlargest(3)",
"s.nlargest()",
],
'pandas.core.groupby.generic.DataFrameGroupBy.diff': ['*'],
'pandas.core.groupby.generic.SeriesGroupBy.diff': ['*'],
'pandas.core.groupby.generic.DataFrameGroupBy.hist': ['*'],
'pandas.core.groupby.generic.DataFrameGroupBy.fillna': [
'df.fillna(method=\'ffill\')',
'df.fillna(method="ffill")',
'df.fillna(value=values, limit=1)',
],
'pandas.core.groupby.generic.SeriesGroupBy.fillna': [
'df.fillna(method=\'ffill\')',
'df.fillna(method="ffill")',
'df.fillna(value=values, limit=1)',
],
},
not_implemented_ok={
'pandas.core.groupby.generic.DataFrameGroupBy.idxmax': ['*'],
'pandas.core.groupby.generic.DataFrameGroupBy.idxmin': ['*'],
'pandas.core.groupby.generic.SeriesGroupBy.transform': ['*'],
'pandas.core.groupby.generic.SeriesGroupBy.idxmax': ['*'],
'pandas.core.groupby.generic.SeriesGroupBy.idxmin': ['*'],
'pandas.core.groupby.generic.SeriesGroupBy.apply': ['*'],
},
skip={
'pandas.core.groupby.generic.SeriesGroupBy.cov': [
# Floating point comparison fails
's1.cov(s2)',
],
'pandas.core.groupby.generic.DataFrameGroupBy.cov': [
# Mutates input DataFrame with loc
# TODO: Replicate in frames_test.py
"df.loc[df.index[:5], 'a'] = np.nan",
"df.loc[df.index[5:10], 'b'] = np.nan",
"df.cov(min_periods=12)",
],
# These examples rely on grouping by a list
'pandas.core.groupby.generic.SeriesGroupBy.aggregate': ['*'],
'pandas.core.groupby.generic.DataFrameGroupBy.aggregate': ['*'],
'pandas.core.groupby.generic.SeriesGroupBy.transform': [
# Dropping invalid columns during a transform is unsupported.
'grouped.transform(lambda x: (x - x.mean()) / x.std())'
],
'pandas.core.groupby.generic.DataFrameGroupBy.transform': [
# Dropping invalid columns during a transform is unsupported.
'grouped.transform(lambda x: (x - x.mean()) / x.std())'
],
# Skipped idxmax/idxmin due an issue with the test framework
'pandas.core.groupby.generic.SeriesGroupBy.idxmin': ['s.idxmin()'],
'pandas.core.groupby.generic.SeriesGroupBy.idxmax': ['s.idxmax()'],
# Uses as_index, which is currently not_implemented
'pandas.core.groupby.generic.DataFrameGroupBy.value_counts': [
"df.groupby('gender', as_index=False).value_counts()",
# pylint: disable=line-too-long
"df.groupby('gender', as_index=False).value_counts(normalize=True)",
],
})
self.assertEqual(result.failed, 0)
def test_top_level(self):
tests = {
name: func.__doc__
for (name, func) in pd.__dict__.items()
if _is_top_level_function(func) and getattr(func, '__doc__', None)
}
# IO methods are tested in io_test.py
skip_reads = {name: ['*'] for name in dir(pd) if name.startswith('read_')}
result = doctests.teststrings(
tests,
use_beam=False,
report=True,
not_implemented_ok={
'concat': ['pd.concat([s1, s2], ignore_index=True)'],
'crosstab': ['*'],
'cut': ['*'],
'eval': ['*'],
'get_dummies': ['*'],
'infer_freq': ['*'],
'lreshape': ['*'],
'melt': ['*'],
'merge': ["df1.merge(df2, how='cross')"],
'merge_asof': ['*'],
'pivot': ['*'],
'pivot_table': ['*'],
'qcut': ['*'],
'reset_option': ['*'],
'set_eng_float_format': ['*'],
'set_option': ['*'],
'to_numeric': ['*'],
'to_timedelta': ['*'],
'unique': ['*'],
'wide_to_long': ['*'],
},
wont_implement_ok={
'factorize': ['*'],
'to_datetime': ['s.head()'],
'to_pickle': ['*'],
'melt': [
"pd.melt(df, id_vars=['A'], value_vars=['B'])",
"pd.melt(df, id_vars=['A'], value_vars=['B', 'C'])",
"pd.melt(df, col_level=0, id_vars=['A'], value_vars=['B'])",
"pd.melt(df, id_vars=[('A', 'D')], value_vars=[('B', 'E')])",
"pd.melt(df, id_vars=['A'], value_vars=['B'],\n" +
" var_name='myVarname', value_name='myValname')"
],
},
skip={
# error formatting
'concat': ['pd.concat([df5, df6], verify_integrity=True)'],
# doctest DeprecationWarning
'melt': ['df'],
# Order-sensitive re-indexing.
'merge': [
"df1.merge(df2, left_on='lkey', right_on='rkey')",
"df1.merge(df2, left_on='lkey', right_on='rkey',\n"
" suffixes=('_left', '_right'))",
"df1.merge(df2, how='left', on='a')",
],
# Not an actual test.
'option_context': ['*'],
'factorize': ['codes', 'uniques'],
# Bad top-level use of un-imported function.
'merge_ordered': [
'merge_ordered(df1, df2, fill_method="ffill", left_by="group")'
],
# Expected error.
'pivot': ["df.pivot(index='foo', columns='bar', values='baz')"],
# Never written.
'to_pickle': ['os.remove("./dummy.pkl")'],
**skip_reads
})
self.assertEqual(result.failed, 0)
if __name__ == '__main__':
unittest.main()
|
py | 1a30aae45d6903fb960b0b90fff41853abdd4ca8 | # -*- coding: utf-8 -*-
"""Implements a class to be used for unit testing.
"""
import pathlib
from tlsmate.workers.eval_cipher_suites import ScanCipherSuites
from tlsmate.tlssuite import TlsSuiteTester
from tlsmate.tlssuite import TlsLibrary
ssl2_ck = [
"SSL_CK_RC4_128_WITH_MD5",
"SSL_CK_RC2_128_CBC_WITH_MD5",
"SSL_CK_IDEA_128_CBC_WITH_MD5",
"SSL_CK_DES_192_EDE3_CBC_WITH_MD5",
]
class TestCase(TlsSuiteTester):
"""Class used for tests with pytest.
For more information refer to the documentation of the TcRecorder class.
"""
sp_out_yaml = "profile_basic_ssl2"
recorder_yaml = "recorder_eval_cipher_suites_ssl2"
path = pathlib.Path(__file__)
server_cmd = (
"utils/start_openssl --version {library} --port {server_port} "
"--cert1 server-rsa --cert2 server-ecdsa --no-cert-chain "
"-- -www -cipher ALL -ssl2"
)
library = TlsLibrary.openssl1_0_2
server = "localhost"
def check_versions(self, versions):
assert len(versions) == 6
assert versions[0]["version"]["name"] == "SSL20"
assert versions[0]["support"] == "TRUE"
assert versions[1]["version"]["name"] == "SSL30"
assert versions[1]["support"] == "FALSE"
assert versions[2]["version"]["name"] == "TLS10"
assert versions[2]["support"] == "FALSE"
assert versions[3]["version"]["name"] == "TLS11"
assert versions[3]["support"] == "FALSE"
assert versions[4]["version"]["name"] == "TLS12"
assert versions[4]["support"] == "FALSE"
assert versions[5]["version"]["name"] == "TLS13"
assert versions[5]["support"] == "FALSE"
for a, b in zip(ssl2_ck, versions[0]["cipher_kinds"]):
assert a == b["name"]
def check_profile(self, profile):
self.check_versions(profile["versions"])
def run(self, tlsmate, is_replaying):
for vers in ["sslv2", "sslv3", "tls10", "tls11", "tls12", "tls13"]:
tlsmate.config.set(vers, True)
server_profile = tlsmate.server_profile
ScanCipherSuites(tlsmate).run()
self.check_profile(server_profile.make_serializable())
if __name__ == "__main__":
TestCase().entry(is_replaying=False)
|
py | 1a30ad0c82707098faf2026373b77316b76dd85b | # -*- coding: utf-8 -*-
#
# Copyright 2017-2021 - Swiss Data Science Center (SDSC)
# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and
# Eidgenössische Technische Hochschule Zürich (ETHZ).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Renku database dispatcher."""
from renku.core import errors
from renku.core.management.interface.database_dispatcher import IDatabaseDispatcher
from renku.core.metadata.database import Database
class DatabaseDispatcher(IDatabaseDispatcher):
"""Interface for the DatabaseDispatcher.
Handles getting current database (Database) and entering/exiting the stack for the database.
"""
def __init__(self):
self.database_stack = []
@property
def current_database(self) -> Database:
"""Get the currently active database."""
if len(self.database_stack) == 0:
raise errors.ConfigurationError("No database configured for injection")
return self.database_stack[-1][0]
def push_database_to_stack(self, path: str, commit: bool = False) -> None:
"""Create and push a new client to the stack."""
new_database = Database.from_path(path)
self.database_stack.append((new_database, commit))
def pop_database(self) -> None:
"""Remove the current client from the stack."""
popped_database = self.database_stack.pop()
if popped_database[1]:
popped_database[0].commit()
def finalize_dispatcher(self) -> None:
"""Close all database contexts."""
while self.database_stack:
self.pop_database()
|
py | 1a30ad3ef7a67f3cdaf8a458fcdfe8609d0cd219 | import unittest
def suite():
return unittest.TestLoader().discover("pypobot.tests", pattern="*.py")
|
py | 1a30ad5518def96ca3d9e6b1fb20fe9db7de88f6 | # Generated by Django 3.0.6 on 2020-06-12 17:51
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=256)),
('text', models.TextField()),
('created_date', models.DateTimeField(default=django.utils.timezone.now)),
('published_date', models.DateTimeField(blank=True, null=True)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('author', models.CharField(max_length=256)),
('text', models.TextField()),
('created_date', models.DateTimeField(default=django.utils.timezone.now)),
('isApproved', models.BooleanField(default=False)),
('post', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='blog.Post')),
],
),
]
|
py | 1a30adc331c0021f6fce98a0b56daa1d40f27413 | """Documentation about the dianna module."""
# FIXME: put actual code here
def hello(name):
"""Say hello
Function docstring using Google docstring style.
Args:
name (str): Name to say hello to
Returns:
str: Hello message
Raises:
ValueError: If `name` is equal to `nobody`
Example:
This function can be called with `Jane Smith` as argument using
>>> from dianna.my_module import hello
>>> hello('Jane Smith')
'Hello Jane Smith!'
"""
if name == 'nobody':
raise ValueError('Can not say hello to nobody')
return f'Hello {name}!'
|
py | 1a30add987a1e8ecdc833b907e0f3bd1ab23df1c | """
[Python scripts for 3DTracker-FAB (www.3dtracker.org)]
Example 03: Converting 2D position to 3D
This is a script demonstrating how to convert 2D positions in a ROI in a RGB image to 3D.
The type of conversion is useful for using 2D image based object detection/tracking
algorithms to obtain the corresponding 3D object position/trace.
The example plot 3D points in the ROIs surrouding a can in the 2D images
Date last modified: 2018.10.03
"""
import numpy as np
import cv2
import contextlib
import pyqtgraph as pg
import pyqtgraph.opengl as gl
import lib3dtracker as tdt # 3DTracker-FAB python library
fname_metadata = './example data/dual_d435_01/dual_d435_01.metadata.xml' # metadata file path
with contextlib.closing(tdt.DataReader(fname_metadata)) as d: # open data using 'with statement'
i_frame = 10; # video frame number to process
# show camera 1 RGB image and ROI
roi_cam1 = [120, 70, 40, 80] # ROI; left, top, width, height
[frame_rgb, frame_d] = d.get_rgbd_frame(i_frame, 0)
cv2.rectangle(frame_rgb, tuple(roi_cam1[0:2]), (roi_cam1[0]+roi_cam1[2], roi_cam1[1]+roi_cam1[3]), (0, 0, 255), 2)
cv2.imshow('rgb1', frame_rgb)
# show camera 2 RGB image and ROI
roi_cam2 = [170, 80, 50, 100] # ROI; left, top, width, height
[frame_rgb, frame_d] = d.get_rgbd_frame(i_frame, 1)
cv2.rectangle(frame_rgb, tuple(roi_cam2[0:2]), (roi_cam2[0]+roi_cam2[2], roi_cam2[1]+roi_cam2[3]), (0, 0, 255), 2)
cv2.imshow('rgb2', frame_rgb)
# get 3D point cloud in ROI
pc_roi1 = d.get_pc_from_rgbd(i_frame, 0, roi_cam1)
pc_roi2 = d.get_pc_from_rgbd(i_frame, 1, roi_cam2)
# prepare for plotting
app=pg.QtGui.QApplication([])
w = gl.GLViewWidget()
# read and plot merged point cloud
pc = d.get_mrgpc_frame(i_frame)
tdt.plot_pc(pc, w, 4)
# plot point cloud in ROIs
tdt.plot_pc(pc_roi1, w, 5, (1,0,0,1))
tdt.plot_pc(pc_roi2, w, 5, (1,0,0,1))
# plot axis
g=gl.GLAxisItem()
w.addItem(g)
# show the plot
w.setCameraPosition(distance = 0.5)
w.show()
print('Close the window to quit.')
pg.QtGui.QApplication.exec_()
|
py | 1a30ae64bf37a9402670534d865c23f3b71042bd | """RLBotChoreography
Usage:
ChoreographyHive [--bot-folder=<folder>]
ChoreographyHive (-h | --help)
Options:
-h --help Shows this help message.
--bot-folder=<folder> Searches this folder for bot configs to use for names and appearances [default: .].
"""
import copy
import os
import sys
import inspect
import time
from docopt import docopt
from importlib import reload, import_module
from queue import Queue
from threading import Thread
from os.path import dirname, basename, isfile, join
import glob
from rlbot.matchconfig.conversions import parse_match_config
from rlbot.parsing.agent_config_parser import load_bot_appearance
from rlbot.parsing.directory_scanner import scan_directory_for_bot_configs
from rlbot.parsing.rlbot_config_parser import create_bot_config_layout
from rlbot.setup_manager import SetupManager
from rlbot.utils.structures.start_match_structures import MAX_PLAYERS
import hivemind
from queue_commands import QCommand
from choreography.choreography import Choreography
# TODO:
# - Do bot-folder from inside the GUI
# - Prettify GUI
class RLBotChoreography:
def __init__(self):
# Runs GUI and Hivemind on two different threads.
q = Queue()
thread1 = Thread(target=self.run_gui, args=(q, ))
thread1.start()
thread2 = Thread(target=self.run_RLBotChoreography, args=(q, ))
thread2.start()
q.join()
def setup_match(self):
# TODO This should be replaced?
arguments = docopt(__doc__)
bot_directory = arguments['--bot-folder']
bundles = scan_directory_for_bot_configs(bot_directory)
# Set up RLBot.cfg
framework_config = create_bot_config_layout()
config_location = os.path.join(os.path.dirname(__file__), 'rlbot.cfg')
framework_config.parse_file(config_location, max_index=MAX_PLAYERS)
match_config = parse_match_config(framework_config, config_location, {}, {})
looks_configs = {idx: bundle.get_looks_config() for idx, bundle in enumerate(bundles)}
names = [bundle.name for bundle in bundles]
player_config = match_config.player_configs[0]
match_config.player_configs.clear()
for i in range(max(len(bundles), self.min_bots)):
copied = copy.copy(player_config)
if i < len(bundles):
copied.name = names[i]
# If you want to override bot appearances to get a certain visual effect, e.g. with
# specific boost colors, this is a good place to do it.
copied.loadout_config = load_bot_appearance(looks_configs[i], 0)
match_config.player_configs.append(copied)
manager = SetupManager()
manager.load_match_config(match_config, {})
manager.connect_to_game()
manager.start_match()
def run_RLBotChoreography(self, queue):
"""
If Hivemind breaks out of game_loop it is reloaded and recreated.
"""
# Waits until a START command is received.
while queue.get() != QCommand.START:
continue
self.setup_match()
while True:
my_hivemind = hivemind.Hivemind(queue, self.choreo_obj)
my_hivemind.start() # Loop only quits on STOP command.
# Reloads hivemind for new changes to take place.
# reload(sys.modules[self.choreo_obj.__module__])
reload(hivemind)
# Checks what to do after Hivemind died.
command = queue.get()
if command == QCommand.ALL:
self.setup_match()
elif command == QCommand.EXIT:
break
exit() # Clean exit.
def run_gui(self, queue):
"""
Runs the simple gui.
"""
def reload_choreographies():
"""
Finds and reloads all choreo modules and puts the found choreographies inside a dictionary.
"""
# Automatically finds all choreo modules.
modules = glob.glob(join(dirname(__file__), "choreography/choreos/*.py"))
choreo_modules = [basename(f)[:-3] for f in modules if isfile(f) and not f.endswith('__init__.py')]
choreographies = {}
for choreo in choreo_modules:
module = f'choreography.choreos.{choreo}'
# Try reloading the module.
try:
reload(sys.modules[module])
classes = inspect.getmembers(sys.modules[module], inspect.isclass)
# If not loaded yet, import it.
except:
print(f'Module not found, importing {module}')
import_module(module)
classes = inspect.getmembers(sys.modules[module], inspect.isclass)
# Find all the choreography classes inside.
finally:
for name, obj in classes:
# Checks whether the class subclasses Choreography.
if issubclass(obj, Choreography) and obj is not Choreography:
# FIXME Watch out for name conflicts!
choreographies[name] = obj
return choreographies
def start():
num_bots_changed()
print("[RLBotChoreography]: Starting up!")
queue.put(QCommand.START)
# Removes the button so we cannot start again.
button_start.destroy()
# Hive reset button.
button_reload_hive = tk.Button(frame, text="↻ Hivemind", command=reload_hive)
button_reload_hive.pack()
# All reset button.
button_reload_all = tk.Button(frame, text="↻ All", command=reload_all)
button_reload_all.pack()
def num_bots_changed():
"""
Looks at the choreography's requested number of bots and uses that. Otherwise will use the entered number.
"""
try:
num_bots = self.choreo_obj.get_num_bots()
except NotImplementedError:
num_bots = int(entry_num_bots.get())
finally:
self.min_bots = min(int(num_bots), MAX_PLAYERS)
entry_num_bots.delete(0, last=tk.END)
entry_num_bots.insert(0, self.min_bots)
def choreo_selected(var):
"""
Updates the selected choreography.
"""
self.choreographies = reload_choreographies()
self.choreo_obj = self.choreographies[var]
num_bots_changed()
def reload_hive():
num_bots_changed()
print("[RLBotChoreography]: Stopping Hivemind.")
queue.put(QCommand.STOP)
choreo_selected(menuvar.get())
print("[RLBotChoreography]: Reloading Hivemind.")
queue.put(QCommand.HIVE)
def reload_all():
num_bots_changed()
print("[RLBotChoreography]: Stopping Hivemind.")
queue.put(QCommand.STOP)
choreo_selected(menuvar.get())
print("[RLBotChoreography]: Reloading all.")
queue.put(QCommand.ALL)
# TODO Make GUI look better.
import tkinter as tk
root = tk.Tk()
frame = tk.Frame(root)
frame.pack()
# Start button.
button_start = tk.Button(frame, text="Start", command=start)
button_start.pack()
# Dropdown menu.
self.choreographies = reload_choreographies()
menuvar = tk.StringVar(root)
menuvar.set('LightfallChoreography') # Set the default option
dropMenu = tk.OptionMenu(frame, menuvar, *self.choreographies, command=choreo_selected)
dropMenu.pack()
# Label for the entry box.
label_num_bots = tk.Label(frame, text="Number of bots")
label_num_bots.pack()
# Number of bots entry box.
entry_num_bots = tk.Entry(frame)
entry_num_bots.insert(0, 10)
entry_num_bots.pack()
# This is here just to make sure everything is set up by default.
choreo_selected(menuvar.get())
root.mainloop()
# Clean exit.
print('[RLBotChoreography]: Shutting down.')
queue.put(QCommand.STOP)
queue.put(QCommand.EXIT)
exit()
if __name__ == '__main__':
# Starts the show :)
RLBotChoreography()
|
py | 1a30aede77e1ffa311b709ffc5589dc113b7e00b | # Copyright 2017 BrainPad Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import, division, print_function, unicode_literals
import argparse
from web.app import create_app
from dobot.utils import detect_dobot_port, dobot_is_on_port
DEFAULT_BAUDRATE = 115200
parser = argparse.ArgumentParser(description='Run Dobot WebAPI.')
parser.add_argument('--port', type=int, default=18001)
parser.add_argument('--host', type=str, default='0.0.0.0')
parser.add_argument('--dobot-port', type=str, default=None)
parser.add_argument('--tuner-file', type=str, default='/var/tmp/robot_tuner.dat')
parser.add_argument('--instance_path', type=str, default=None)
args = parser.parse_args()
if not args.dobot_port:
dobot_port = detect_dobot_port(DEFAULT_BAUDRATE)
if dobot_port is None:
print('dobot offline')
exit(1)
else:
dobot_port = args.dobot_port
if not dobot_is_on_port(dobot_port, DEFAULT_BAUDRATE):
print('dobot is not detected on port {}'.format(dobot_port))
exit(1)
app = create_app(dobot_port, args.tuner_file, args.instance_path)
if __name__ == '__main__':
app.run(port=args.port, host=args.host)
|
py | 1a30af099f6af14d0f31314f2c44af89af1e7b18 | # coding=utf-8
# Copyright 2018 The Dopamine Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Library used by example_viz.py to generate visualizations.
This file illustrates the following:
- How to subclass an existing agent to add visualization functionality.
- For DQN we visualize the cumulative rewards and the Q-values for each
action (MyDQNAgent).
- For Rainbow we visualize the cumulative rewards and the Q-value
distributions for each action (MyRainbowAgent).
- How to subclass Runner to run in eval mode, lay out the different subplots,
generate the visualizations, and compile them into a video (MyRunner).
- The function `run()` is the main entrypoint for running everything.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
from absl import logging
from dopamine.agents.dqn import dqn_agent
from dopamine.agents.rainbow import rainbow_agent
from dopamine.discrete_domains import atari_lib
from dopamine.discrete_domains import iteration_statistics
from dopamine.discrete_domains import run_experiment
from dopamine.utils import agent_visualizer
from dopamine.utils import atari_plotter
from dopamine.utils import bar_plotter
from dopamine.utils import line_plotter
import gin
import numpy as np
import tensorflow as tf
import tf_slim
import pdb
import matplotlib.pyplot as plt
class MyDQNAgent(dqn_agent.DQNAgent):
"""Sample DQN agent to visualize Q-values and rewards."""
def __init__(self, sess, num_actions, summary_writer=None):
super(MyDQNAgent, self).__init__(sess, num_actions,
summary_writer=summary_writer)
self.q_values = [[] for _ in range(num_actions)]
self.rewards = []
def step(self, reward, observation, step_number):
self.rewards.append(reward)
return super(MyDQNAgent, self).step(reward, observation, step_number)
def _select_action(self, step_number):
action = super(MyDQNAgent, self)._select_action(step_number)
# print("on selectionne ici")
q_vals = self._sess.run(self._net_outputs.q_values,
{self.state_ph: self.state})[0]
for i in range(len(q_vals)):
self.q_values[i].append(q_vals[i])
return action
def reload_checkpoint(self, checkpoint_path, use_legacy_checkpoint=False):
if use_legacy_checkpoint:
variables_to_restore = atari_lib.maybe_transform_variable_names(
tf.compat.v1.global_variables(), legacy_checkpoint_load=True)
else:
global_vars = set([x.name for x in tf.compat.v1.global_variables()])
ckpt_vars = [
'{}:0'.format(name)
for name, _ in tf.train.list_variables(checkpoint_path)
]
include_vars = list(global_vars.intersection(set(ckpt_vars)))
variables_to_restore = tf_slim.get_variables_to_restore(
include=include_vars)
if variables_to_restore:
reloader = tf.compat.v1.train.Saver(var_list=variables_to_restore)
reloader.restore(self._sess, checkpoint_path)
logging.info('Done restoring from %s', checkpoint_path)
else:
logging.info('Nothing to restore!')
def get_q_values(self):
return self.q_values
def get_rewards(self):
return [np.cumsum(self.rewards)]
class MyRainbowAgent(rainbow_agent.RainbowAgent):
"""Sample Rainbow agent to visualize Q-values and rewards."""
def __init__(self, sess, num_actions, summary_writer=None):
super(MyRainbowAgent, self).__init__(sess, num_actions,
summary_writer=summary_writer)
self.rewards = []
def step(self, reward, observation, step_number):
self.rewards.append(reward)
return super(MyRainbowAgent, self).step(reward, observation, step_number)
def reload_checkpoint(self, checkpoint_path, use_legacy_checkpoint=False):
if use_legacy_checkpoint:
variables_to_restore = atari_lib.maybe_transform_variable_names(
tf.compat.v1.global_variables(), legacy_checkpoint_load=True)
else:
global_vars = set([x.name for x in tf.compat.v1.global_variables()])
ckpt_vars = [
'{}:0'.format(name)
for name, _ in tf.train.list_variables(checkpoint_path)
]
include_vars = list(global_vars.intersection(set(ckpt_vars)))
variables_to_restore = tf_slim.get_variables_to_restore(
include=include_vars)
if variables_to_restore:
reloader = tf.compat.v1.train.Saver(var_list=variables_to_restore)
reloader.restore(self._sess, checkpoint_path)
logging.info('Done restoring from %s', checkpoint_path)
else:
logging.info('Nothing to restore!')
def get_probabilities(self):
return self._sess.run(tf.squeeze(self._net_outputs.probabilities),
{self.state_ph: self.state})
def get_rewards(self):
return [np.cumsum(self.rewards)]
class MyRunner(run_experiment.Runner):
"""Sample Runner class to generate visualizations."""
def __init__(self, base_dir, trained_agent_ckpt_path, create_agent_fn,
use_legacy_checkpoint=False):
self._trained_agent_ckpt_path = trained_agent_ckpt_path
self._use_legacy_checkpoint = use_legacy_checkpoint
super(MyRunner, self).__init__(base_dir, create_agent_fn)
def _initialize_checkpointer_and_maybe_resume(self, checkpoint_file_prefix):
self._agent.reload_checkpoint(self._trained_agent_ckpt_path,
self._use_legacy_checkpoint)
self._start_iteration = 0
def _run_one_iteration(self, iteration):
statistics = iteration_statistics.IterationStatistics()
logging.info('Starting iteration %d', iteration)
_, _ = self._run_eval_phase(statistics)
return statistics.data_lists
def _run_one_iteration(self, iteration):
statistics = iteration_statistics.IterationStatistics()
logging.info('Starting iteration %d', iteration)
num_episodes_eval, average_reward_eval = self._run_eval_phase(
statistics)
return statistics.data_lists
def _run_eval_phase(self, statistics):
# Perform the evaluation phase -- no learning.
self._agent.eval_mode = True
_, sum_returns, num_episodes = self._run_one_phase(
self._evaluation_steps, statistics, 'eval')
average_return = sum_returns / num_episodes if num_episodes > 0 else 0.0
logging.info('Average undiscounted return per evaluation episode: %.2f',
average_return)
statistics.append({'eval_average_return': average_return})
return num_episodes, average_return
def _run_one_phase(self, min_steps, statistics, run_mode_str):
step_count = 0
num_episodes = 0
sum_returns = 0.
print("min_steps", min_steps)
while step_count < min_steps:
print(">>>>> step_count", step_count)
episode_length, episode_return = self._run_one_episode()
statistics.append({
'{}_episode_lengths'.format(run_mode_str): episode_length,
'{}_episode_returns'.format(run_mode_str): episode_return
})
step_count += episode_length
sum_returns += episode_return
num_episodes += 1
# We use sys.stdout.write instead of logging so as to flush frequently
# without generating a line break.
sys.stdout.write('Steps executed: {} '.format(step_count) +
'Episode length: {} '.format(episode_length) +
'Return: {}\r'.format(episode_return))
sys.stdout.flush()
return step_count, sum_returns, num_episodes
def _run_one_episode(self):
step_number = 0
total_reward = 0.
action = self._initialize_episode()
is_terminal = False
# Keep interacting until we reach a terminal state.
while True:
observation, reward, is_terminal = self._run_one_step(action, step_number)
total_reward += reward
step_number += 1
print("step_number", step_number)
if self._clip_rewards:
# Perform reward clipping.
reward = np.clip(reward, -1, 1)
if (self._environment.game_over or
step_number == self._max_steps_per_episode):
# Stop the run loop once we reach the true end of episode.
break
elif is_terminal:
# If we lose a life but the episode is not over, signal an artificial
# end of episode to the agent.
self._end_episode(reward, is_terminal)
action = self._agent.begin_episode(observation)
else:
action = self._agent.step(reward, observation, step_number)
self._end_episode(reward, is_terminal)
return step_number, total_reward
def _run_one_step(self, action, step_number):
observation, reward, is_terminal, _ = self._environment.step(action)
# Saving the render
if True:
if step_number > 900 and step_number < 1000:
image = self._environment.render('rgb_array')
plt.imshow(image)
plt.savefig("/home/hugo/saliency_maps/Rainbow-Tennis/render/render"+str(step_number)+".png")
return observation, reward, is_terminal
def create_dqn_agent(sess, environment, summary_writer=None):
return MyDQNAgent(sess, num_actions=environment.action_space.n,
summary_writer=summary_writer)
def create_rainbow_agent(sess, environment, summary_writer=None):
return MyRainbowAgent(sess, num_actions=environment.action_space.n,
summary_writer=summary_writer)
def create_runner(base_dir, trained_agent_ckpt_path, agent='dqn',
use_legacy_checkpoint=False):
create_agent = create_dqn_agent if agent == 'dqn' else create_rainbow_agent
return MyRunner(base_dir, trained_agent_ckpt_path, create_agent,
use_legacy_checkpoint)
def run(agent, game, num_steps, root_dir, restore_ckpt,
use_legacy_checkpoint=False):
"""Main entrypoint for running and generating visualizations.
Args:
agent: str, agent type to use.
game: str, Atari 2600 game to run.
num_steps: int, number of steps to play game.
root_dir: str, root directory where files will be stored.
restore_ckpt: str, path to the checkpoint to reload.
use_legacy_checkpoint: bool, whether to restore from a legacy (pre-Keras)
checkpoint.
"""
tf.compat.v1.reset_default_graph()
config = """
atari_lib.create_atari_environment.game_name = '{}'
WrappedReplayBuffer.replay_capacity = 300
""".format(game)
base_dir = os.path.join(root_dir, 'agent_viz', game, agent)
gin.parse_config(config)
runner = create_runner(base_dir, restore_ckpt, agent, use_legacy_checkpoint)
iteration = 0
runner._run_one_iteration(iteration)
|
py | 1a30b03607219069c5104c56e476a1b81f5fdd06 | # See also: 📖 [Channels - Consumers](https://channels.readthedocs.io/en/latest/topics/consumers.html)
from django.conf.urls import url
# Websock練習1
from webapp1.websocks.websock_practice1.v1.consumer import WebsockPractice1V1Consumer
# ------- ----------------------------- -------- --------------------------
# 1 2 3 4
# 1. アプリケーション フォルダー名
# 2. ディレクトリー名
# 3. Python ファイル名。拡張子抜き
# 4. クラス名
# Websock練習2
from webapp1.websocks.websock_practice2.v1.consumer import WebsockPractice2V1Consumer
# ^ ^
# ------- ----------------------------- -------- --------------------------
# 1 2 3 4
# 1. アプリケーション フォルダー名
# 2. ディレクトリー名
# 3. Python ファイル名。拡張子抜き
# 4. クラス名
# 〇×ゲームの練習1
from webapp1.websocks.tic_tac_toe.v1.consumer import TicTacToeV1Consumer
# ------- ----------------------- -------- -------------------
# 1 2 3 4
# 1. アプリケーション フォルダー名
# 2. ディレクトリー名
# 3. Python ファイル名。拡張子抜き
# 4. クラス名
# 〇×ゲームの練習2
from webapp1.websocks.tic_tac_toe.v2.consumer_custom import TicTacToeV2ConsumerCustom
# ^ two ^ two
# ------- ----------------------- --------------- -------------------------
# 1 2 3 4
# 1. アプリケーション フォルダー名
# 2. ディレクトリー名
# 3. Python ファイル名。拡張子抜き
# 4. クラス名
# 〇×ゲームの練習3.1
from webapp1.websocks.tic_tac_toe.v3o1.consumer_custom import TicTacToeV3o1ConsumerCustom
# ^^^ three o one ^^^ three o one
# ------- ------------------------- --------------- ---------------------------
# 1 2 3 4
# 1. アプリケーション フォルダー名
# 2. ディレクトリー名
# 3. Python ファイル名。拡張子抜き
# 4. クラス名
websocket_urlpatterns = [
# +----
# | Websock練習1
# Websock練習1
url(r'^websock-practice1/v1/$', WebsockPractice1V1Consumer.as_asgi()),
# ----------------------- ------------------------------------
# 1 2
# 1. URLのパスの部分の、Django での正規表現の書き方
# 2. クラス名とメソッド。 URL を ASGI形式にする
# | Websock練習1
# +----
# +----
# | Websock練習2
# Websock練習2
url(r'^websock-practice2/v1/$', WebsockPractice2V1Consumer.as_asgi()),
# ^ ^
# ----------------------- ------------------------------------
# 1 2
# 1. URLのパスの部分の、Django での正規表現の書き方
# 2. クラス名とメソッド。 URL を ASGI形式にする
# | Websock練習2
# +----
# 〇×ゲームの練習1
url(r'^tic-tac-toe/v1/playing/(?P<room_name>\w+)/$',
# --------------------------------------------
# 1
TicTacToeV1Consumer.as_asgi()),
# -----------------------------
# 2
# 1. 例えば `http://example.com/tic-tac-toe/v1/playing/Elephant/` のようなURLのパスの部分の、Django での正規表現の書き方。
# room_name は変数として渡される
# 2. クラス名とメソッド。 URL を ASGI形式にする
# 〇×ゲームの練習2
url(r'^tic-tac-toe/v2/playing/(?P<kw_room_name>\w+)/$',
# ^
# -----------------------------------------------
# 1
TicTacToeV2ConsumerCustom.as_asgi()),
# ^
# -----------------------------------
# 2
# 1. 例えば `http://example.com/tic-tac-toe/v2/playing/Elephant/` のようなURLのパスの部分の、Django での正規表現の書き方。
# kw_room_name は変数として渡される
# 2. クラス名とメソッド。 URL を ASGI形式にする
# 〇×ゲームの練習3.1
url(r'^tic-tac-toe/v3o1/playing/(?P<kw_room_name>\w+)/$',
# ^^^ three o one
# -------------------------------------------------
# 1
TicTacToeV3o1ConsumerCustom.as_asgi()),
# ^^^ three o one
# -------------------------------------
# 2
# 1. 例えば `http://example.com/tic-tac-toe/v3o1/playing/Elephant/` のようなURLのパスの部分の、Django での正規表現の書き方。
# -----------------------------------
# kw_room_name は変数として渡される
# 2. クラス名とメソッド。 URL を ASGI形式にする
]
|
py | 1a30b1d46538e642e226c9fbad175df5d2fc18a0 | from base64 import b64encode
import datetime
from enum import Enum
from typing import Any, Union
import json
class _JsonEncoder(json.JSONEncoder):
def default(self, o: Any) -> Union[str, float]:
if isinstance(o, datetime.datetime):
return o.astimezone(tz=datetime.timezone.utc).isoformat()
if isinstance(o, datetime.timedelta):
return o.total_seconds()
if isinstance(o, bytes):
return b64encode(o).decode('ascii')
if isinstance(o, Enum):
return o.name
return super().default(o)
def json_dumps(o: Union[dict, list]) -> str:
if isinstance(o, dict):
o = {k: v for k, v in o.items() if not k.startswith('_')}
return json.dumps(o, cls=_JsonEncoder, separators=(',', ':'))
def pagination_header(count: int, page: int, per_page: int) -> dict:
return {
'X-Page': page,
'X-Per-Page': per_page,
'X-Total': count,
'X-Total-Pages': (count + (per_page - 1)) // per_page,
}
|
py | 1a30b3bf178c429b61826658b52140124b407180 | ##########################################################################
#
# Copyright (c) 2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import maya.cmds
import IECore
import IECoreMaya
__dagMenuCallbacks = []
## Registers a callback to be used when creating the right click dag
# menu for scene shapes. Callbacks should have the following signature :
#
# callback( menu, sceneShape ).
def addDagMenuCallback( callback ) :
if not callback in __dagMenuCallbacks :
__dagMenuCallbacks.append( callback )
## Removes a callback previously added with addDagMenuCallback.
def removeDagMenuCallback( callback ) :
__dagMenuCallbacks.remove( callback )
## This is forwarded to by the ieSceneShapeDagMenuProc function in
# ieSceneShape.mel
def _dagMenu( menu, sceneShape ) :
sceneShapes = __selectedSceneShapes()
if not sceneShapes:
return
fnScS = []
for target in sceneShapes:
fnScS.append( IECoreMaya.FnSceneShape( target ) )
maya.cmds.setParent( menu, menu=True )
invalidSceneShapes = __invalidSceneShapes( sceneShapes )
if invalidSceneShapes:
maya.cmds.menuItem(
label = "Invalid Inputs for selected SceneShapes!",
radialPosition = "N",
)
# Component mode
elif fnScS[0].selectedComponentNames():
maya.cmds.menuItem(
label = "Object",
radialPosition = "N",
command = IECore.curry( __objectCallback, sceneShapes[0] ),
)
maya.cmds.menuItem(
label = "Print Component Names",
radialPosition = "NW",
command = IECore.curry( __printComponents, sceneShapes[0] )
)
maya.cmds.menuItem(
label = "Print Selected Component Names",
radialPosition = "NE",
command = IECore.curry( __printSelectedComponents, sceneShapes[0] )
)
maya.cmds.menuItem(
label = "Expand...",
radialPosition = "SE",
subMenu = True
)
maya.cmds.menuItem(
label = "Expand to Selected Components",
radialPosition = "S",
command = IECore.curry( __expandToSelected, sceneShapes[0] )
)
maya.cmds.setParent( "..", menu=True )
maya.cmds.menuItem(
label = "Create Locator",
radialPosition = "SW",
subMenu = True,
)
maya.cmds.menuItem(
label = "At Bound Min",
radialPosition = "N",
command = IECore.curry( __createLocatorAtPoints, sceneShapes[0], [ "Min" ] ),
)
maya.cmds.menuItem(
label = "At Bound Max",
radialPosition = "NE",
command = IECore.curry( __createLocatorAtPoints, sceneShapes[0], [ "Max" ] ),
)
maya.cmds.menuItem(
label = "At Bound Min And Max",
radialPosition = "E",
command = IECore.curry( __createLocatorAtPoints, sceneShapes[0], [ "Min", "Max" ] ),
)
maya.cmds.menuItem(
label = "At Bound Centre",
radialPosition = "SE",
command = IECore.curry( __createLocatorAtPoints, sceneShapes[0], [ "Center" ] ),
)
maya.cmds.menuItem(
label = "At Transform Origin",
radialPosition = "S",
command = IECore.curry( __createLocatorWithTransform, sceneShapes[0] ),
)
maya.cmds.setParent( "..", menu=True )
# Object mode
else:
if len( sceneShapes ) == 1:
if maya.cmds.getAttr( sceneShapes[0]+".drawGeometry" ) or maya.cmds.getAttr( sceneShapes[0]+".drawChildBounds" ):
maya.cmds.menuItem(
label = "Component",
radialPosition = "N",
command = IECore.curry( __componentCallback, sceneShapes[0] )
)
maya.cmds.menuItem(
label = "Preview...",
radialPosition = "NW",
subMenu = True
)
maya.cmds.menuItem(
label = "All Geometry On",
radialPosition = "E",
command = IECore.curry( __setChildrenPreviewAttributes, sceneShapes, "drawGeometry", True )
)
maya.cmds.menuItem(
label = "All Child Bounds On",
radialPosition = "SE",
command = IECore.curry( __setChildrenPreviewAttributes, sceneShapes, "drawChildBounds", True )
)
maya.cmds.menuItem(
label = "All Root Bound On",
radialPosition = "NE",
command = IECore.curry( __setChildrenPreviewAttributes, sceneShapes, "drawRootBound", True )
)
maya.cmds.menuItem(
label = "All Geometry Off",
radialPosition = "W",
command = IECore.curry( __setChildrenPreviewAttributes, sceneShapes, "drawGeometry", False )
)
maya.cmds.menuItem(
label = "All Child Bounds Off",
radialPosition = "SW",
command = IECore.curry( __setChildrenPreviewAttributes, sceneShapes, "drawChildBounds", False )
)
maya.cmds.menuItem(
label = "All Root Bound Off",
radialPosition = "NW",
command = IECore.curry( __setChildrenPreviewAttributes, sceneShapes, "drawRootBound", False )
)
maya.cmds.setParent( "..", menu=True )
commonTags = None
for fn in fnScS:
scene = fn.sceneInterface()
tmpTags = scene.readTags(IECore.SceneInterface.EveryTag)
if commonTags is None:
commonTags = set( tmpTags )
else:
commonTags.intersection_update( set(tmpTags) )
tagTree = dict()
if not commonTags is None:
tags = list(commonTags)
for tag in tags :
tag = str(tag)
parts = tag.split(":")
leftOverTag = tag[len(parts[0])+1:]
if not parts[0] in tagTree :
tagTree[parts[0]] = [ leftOverTag ]
else :
tagTree[parts[0]].append( leftOverTag )
if tagTree :
tags = tagTree.keys()
tags.sort()
def addTagSubMenuItems( command ):
import copy
copiedTagTree = copy.deepcopy( tagTree )
for tag in tags :
subtags = copiedTagTree[tag]
subtags.sort()
if "" in subtags:
maya.cmds.menuItem(
label = tag,
command = IECore.curry( command, sceneShapes, tag )
)
subtags.remove("")
if subtags:
maya.cmds.menuItem(
label = tag,
subMenu = True
)
for tagSuffix in subtags :
maya.cmds.menuItem(
label = tagSuffix,
command = IECore.curry( command, sceneShapes, tag + ":" + tagSuffix )
)
maya.cmds.setParent( "..", menu=True )
maya.cmds.menuItem(
label = "Tags filter...",
radialPosition = "S",
subMenu = True
)
maya.cmds.menuItem(
label = "Display All",
command = IECore.curry( __setTagsFilterPreviewAttributes, sceneShapes, "" )
)
addTagSubMenuItems( __setTagsFilterPreviewAttributes )
maya.cmds.setParent( "..", menu=True )
maya.cmds.menuItem(
label = "Expand...",
radialPosition = "SE",
subMenu = True
)
maya.cmds.menuItem(
label = "Recursive Expand As Geometry",
radialPosition = "W",
command = IECore.curry( __expandAsGeometry, sceneShapes )
)
if any( map(lambda x: x.canBeExpanded(), fnScS) ):
maya.cmds.menuItem(
label = "Expand One Level",
radialPosition = "E",
command = IECore.curry( __expandOnce, sceneShapes )
)
maya.cmds.menuItem(
label = "Recursive Expand",
radialPosition = "N",
command = IECore.curry( __expandAll, sceneShapes )
)
if len( sceneShapes ) == 1:
if fnScS[0].selectedComponentNames() :
maya.cmds.menuItem(
label = "Expand to Selected Components",
radialPosition = "S",
command = IECore.curry( __expandToSelected, sceneShapes[0] )
)
if tagTree :
maya.cmds.menuItem(
label = "Expand by Tag...",
radialPosition = "S",
subMenu = True
)
addTagSubMenuItems( __expandAll )
maya.cmds.setParent( "..", menu=True )
maya.cmds.setParent( "..", menu=True )
parentSceneShape = __parentSceneShape( sceneShapes )
if any( map(lambda x: x.canBeCollapsed(), fnScS) ) or ( parentSceneShape and IECoreMaya.FnSceneShape( parentSceneShape ).canBeCollapsed() ):
maya.cmds.menuItem(
label = "Collapse...",
radialPosition = "SW",
subMenu = True
)
if parentSceneShape and IECoreMaya.FnSceneShape( parentSceneShape ).canBeCollapsed():
parentName = maya.cmds.listRelatives( parentSceneShape, p=True )[0]
maya.cmds.menuItem(
label = "Collapse to Parent: "+parentName,
radialPosition = "N",
command = IECore.curry( __collapseChildren, [parentSceneShape] )
)
if any( map(lambda x: x.canBeCollapsed(), fnScS) ):
maya.cmds.menuItem(
label = "Collapse Children",
radialPosition = "W",
command = IECore.curry( __collapseChildren, sceneShapes )
)
maya.cmds.setParent( "..", menu=True )
for c in __dagMenuCallbacks :
c( menu, sceneShape )
## Returns all the sceneShapes that do not have a valid scene interface
def __invalidSceneShapes( sceneShapes ):
invalid = []
for sceneShape in sceneShapes:
fn = IECoreMaya.FnSceneShape( sceneShape )
if fn.sceneInterface() is None:
invalid.append( sceneShape )
return invalid
## Returns all the selected scene shapes
def __selectedSceneShapes() :
allSceneShapes = []
selectedSceneShapes = maya.cmds.ls( sl=True, l=True )
for shape in selectedSceneShapes:
# Make sure we have the shape name, it could be a component
shapeName = shape.split(".f[")[0]
if maya.cmds.nodeType( shapeName ) == "ieSceneShape" and not shapeName in allSceneShapes:
allSceneShapes.append( shapeName )
else:
children = maya.cmds.listRelatives( shapeName, children=True, type="ieSceneShape", fullPath=True ) or []
for child in children:
if not child in allSceneShapes:
allSceneShapes.append( child )
return allSceneShapes
## Turns on child bounds and switches to component mode
def __componentCallback( sceneShape, *unused ) :
parent = maya.cmds.listRelatives( sceneShape, parent=True, fullPath=True )[0]
maya.cmds.selectType( ocm=True, alc=False, facet=True )
maya.cmds.hilite( parent )
## Switches to object mode
def __objectCallback( sceneShape, *unused ) :
parent = maya.cmds.listRelatives( sceneShape, parent=True, fullPath=True )[0]
maya.cmds.hilite( parent, unHilite=True )
selection = maya.cmds.ls( selection=True )
maya.cmds.selectMode( object=True )
if selection :
maya.cmds.select( selection, replace=True )
else :
maya.cmds.select( clear=True )
## Print the existing component names for the scene shape
def __printComponents( sceneShape, *unused ) :
fnS = IECoreMaya.FnSceneShape( sceneShape )
names = fnS.componentNames()
names.sort()
print "\n"
print " ".join( names ) ,
print "\n"
## Print the selected component names for the scene shape
def __printSelectedComponents( sceneShape, *unused ) :
fnS = IECoreMaya.FnSceneShape( sceneShape )
selectedNames = fnS.selectedComponentNames()
if selectedNames:
selectedNames = list( selectedNames )
selectedNames.sort()
print "\n"
print " ".join( selectedNames ) ,
print "\n"
## Expand each scene shape one level down
def __expandOnce( sceneShapes, *unused ) :
toSelect = []
for sceneShape in sceneShapes:
fnS = IECoreMaya.FnSceneShape( sceneShape )
new = fnS.expandOnce( preserveNamespace=True )
toSelect.extend( map( lambda x: x.fullPathName(), new ) )
if toSelect:
maya.cmds.select( toSelect, replace=True )
## Recursively expand the scene shapes
def __expandAll( sceneShapes, tagName=None, *unused ) :
toSelect = []
for sceneShape in sceneShapes:
fnS = IECoreMaya.FnSceneShape( sceneShape )
newFn = fnS.expandAll( preserveNamespace=True, tagName=tagName )
toSelect.extend( map( lambda x: x.fullPathName(), newFn ) )
if toSelect:
maya.cmds.select( toSelect, replace=True )
## Recursively expand the scene shapes and converts objects to geometry
def __expandAsGeometry( sceneShapes, *unused ) :
for sceneShape in sceneShapes:
fnS = IECoreMaya.FnSceneShape( sceneShape )
fnS.convertAllToGeometry( True )
## Expand the scene shape the minimal amount to reach the selected components
def __expandToSelected( sceneShape, *unused ) :
fnScS = IECoreMaya.FnSceneShape( sceneShape )
sceneShape = fnScS.fullPathName()
selectedNames = fnScS.selectedComponentNames()
if not selectedNames:
return
if "/" in selectedNames:
selectedNames.remove("/")
# Go back to object mode
parent = maya.cmds.listRelatives( sceneShape, parent=True, fullPath=True )[0]
maya.cmds.hilite( parent, unHilite=True )
maya.cmds.selectMode( object=True )
if selectedNames == []:
return
toSelect = []
for selected in selectedNames:
transformName = parent
transformNames = [ transformName ]
for item in selected.split("/")[1:-1]:
transformName = transformName + "|" + item
if not transformName in transformNames:
transformNames.append( transformName )
for transform in transformNames:
shape = maya.cmds.listRelatives( transform, fullPath=True, type = "ieSceneShape" )[0]
fnS = IECoreMaya.FnSceneShape( shape )
fnS.expandOnce()
toSelect.append( transformNames[-1] )
if toSelect:
maya.cmds.select( toSelect, replace=True )
## Collapse all the children of the scene shapes
def __collapseChildren( sceneShapes, *unused ) :
for sceneShape in sceneShapes:
fnS = IECoreMaya.FnSceneShape( sceneShape )
fnS.collapse()
## Returns the first common parent scene shape for the given scene shapes
# Returns None if no parent found.
def __parentSceneShape( sceneShapes ):
def getParentShapes( transform, allParentShapes ):
parent = maya.cmds.listRelatives( transform, p=True, fullPath=True )
if parent:
parentShape = maya.cmds.listRelatives( parent[0], fullPath=True, type = "ieSceneShape" )
if parentShape:
allParentShapes.append( parentShape[0] )
getParentShapes( parent[0], allParentShapes )
parents = None
for sceneShape in sceneShapes:
transform = maya.cmds.listRelatives( sceneShape, parent=True, fullPath=True )
if transform:
allParentShapes = []
getParentShapes( transform[0], allParentShapes )
if parents is None:
parents = set( allParentShapes )
else:
parents.intersection_update( set(allParentShapes) )
if parents:
parent = ""
for p in parents:
if p.count("|") > parent.count("|"):
parent = p
return parent
return None
## Sets the given preview attribute on the scene shapes with the given boolean value
# Preview attributes can be drawGeometry, drawLocators, drawRootBound and drawChildBounds
def __setChildrenPreviewAttributes( sceneShapes, attributeName, value, *unused ) :
for sceneShape in sceneShapes:
transform = maya.cmds.listRelatives( sceneShape, parent=True, fullPath=True )
if transform:
allChildren = maya.cmds.listRelatives( transform[0], ad=True, fullPath=True, type = "ieSceneShape" ) or []
for node in allChildren:
maya.cmds.setAttr( node+"."+attributeName, value )
## Sets the given tags filter attribute on the scene shapes with the given string value
def __setTagsFilterPreviewAttributes( sceneShapes, tagName, *unused ) :
for sceneShape in sceneShapes:
transform = maya.cmds.listRelatives( sceneShape, parent=True, fullPath=True )
if transform:
allChildren = maya.cmds.listRelatives( transform[0], ad=False, fullPath=True, type = "ieSceneShape" ) or []
for node in allChildren:
maya.cmds.setAttr( node+".drawTagsFilter", tagName, type = "string" )
def __createLocatorAtPoints( sceneShape, childPlugSuffixes, *unused ) :
fnSc = IECoreMaya.FnSceneShape( sceneShape )
selectedNames = fnSc.selectedComponentNames()
locators = []
for name in selectedNames :
locators.extend( fnSc.createLocatorAtPoints( name, childPlugSuffixes ) )
maya.cmds.select( locators, replace=True )
def __createLocatorWithTransform( sceneShape, *unused ) :
fnSc = IECoreMaya.FnSceneShape( sceneShape )
selectedNames = fnSc.selectedComponentNames()
locators = []
for name in selectedNames :
locators.append( fnSc.createLocatorAtTransform( name ) )
maya.cmds.select( locators, replace=True )
|
py | 1a30b3c3dfb43823629a543fc5c03f20eed56230 | # coding: utf-8
"""
Deep Lynx
The construction of megaprojects has consistently demonstrated challenges for project managers in regard to meeting cost, schedule, and performance requirements. Megaproject construction challenges are common place within megaprojects with many active projects in the United States failing to meet cost and schedule efforts by significant margins. Currently, engineering teams operate in siloed tools and disparate teams where connections across design, procurement, and construction systems are translated manually or over brittle point-to-point integrations. The manual nature of data exchange increases the risk of silent errors in the reactor design, with each silent error cascading across the design. These cascading errors lead to uncontrollable risk during construction, resulting in significant delays and cost overruns. Deep Lynx allows for an integrated platform during design and operations of mega projects. The Deep Lynx Core API delivers a few main features. 1. Provides a set of methods and endpoints for manipulating data in an object oriented database. This allows us to store complex datatypes as records and then to compile them into actual, modifiable objects at run-time. Users can store taxonomies or ontologies in a readable format. 2. Provides methods for storing and retrieving data in a graph database. This data is structured and validated against the aformentioned object oriented database before storage. # noqa: E501
OpenAPI spec version: 1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.models.credential_validation_result import CredentialValidationResult # noqa: E501
from swagger_client.rest import ApiException
class TestCredentialValidationResult(unittest.TestCase):
"""CredentialValidationResult unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testCredentialValidationResult(self):
"""Test CredentialValidationResult"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.credential_validation_result.CredentialValidationResult() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
py | 1a30b53279a33a64f5179950d3a2d31b389dfaae | # Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import re
import time
from cement.utils.misc import minimal_logger
from ..lib import elasticbeanstalk, iam, utils
from ..lib.aws import InvalidParameterValueError
from ..core import io
from ..objects.exceptions import TimeoutError, AlreadyExistsError, \
NotAuthorizedError, NotSupportedError
from ..resources.strings import strings, responses, prompts
from . import commonops
LOG = minimal_logger(__name__)
DEFAULT_ROLE_NAME = 'aws-elasticbeanstalk-ec2-role'
DEFAULT_SERVICE_ROLE_NAME = 'aws-elasticbeanstalk-service-role'
def make_new_env(env_request, branch_default=False,
nohang=False, interactive=True, timeout=None):
resolve_roles(env_request, interactive)
# deploy code
if not env_request.sample_application and not env_request.version_label:
io.log_info('Creating new application version using project code')
env_request.version_label = \
commonops.create_app_version(env_request.app_name)
if env_request.version_label is None or env_request.sample_application:
env_request.version_label = \
commonops.create_dummy_app_version(env_request.app_name)
# Create env
if env_request.key_name:
commonops.upload_keypair_if_needed(env_request.key_name)
io.log_info('Creating new environment')
result, request_id = create_env(env_request,
interactive=interactive)
env_name = result.name # get the (possibly) updated name
# Edit configurations
## Get default environment
default_env = commonops.get_current_branch_environment()
## Save env as branch default if needed
if not default_env or branch_default:
commonops.set_environment_for_current_branch(env_name)
# Print status of env
commonops.print_env_details(result, health=False)
if nohang:
return
io.echo('Printing Status:')
try:
commonops.wait_for_success_events(request_id,
timeout_in_minutes=timeout)
except TimeoutError:
io.log_error(strings['timeout.error'])
def create_env(env_request, interactive=True):
# If a template is being used, we want to try using just the template
if env_request.template_name:
platform = env_request.platform
env_request.platform = None
else:
platform = None
while True:
try:
return elasticbeanstalk.create_environment(env_request)
except InvalidParameterValueError as e:
if e.message == responses['app.notexists'].replace(
'{app-name}', '\'' + env_request.app_name + '\''):
# App doesnt exist, must be a new region.
## Lets create the app in the region
commonops.create_app(env_request.app_name)
elif e.message == responses['create.noplatform']:
if platform:
env_request.platform = platform
else:
raise
elif interactive:
LOG.debug('creating env returned error: ' + e.message)
if re.match(responses['env.cnamenotavailable'], e.message):
io.echo(prompts['cname.unavailable'])
cname = io.prompt_for_cname()
elif re.match(responses['env.nameexists'], e.message):
io.echo(strings['env.exists'])
current_environments = commonops.get_all_env_names()
unique_name = utils.get_unique_name(env_request.env_name,
current_environments)
env_request.env_name = io.prompt_for_environment_name(
default_name=unique_name)
elif e.message == responses['app.notexists'].replace(
'{app-name}', '\'' + env_request.app_name + '\''):
# App doesnt exist, must be a new region.
## Lets create the app in the region
commonops.create_app(env_request.app_name)
else:
raise
else:
raise
# Try again with new values
def get_default_profile():
""" Get the default elasticbeanstalk IAM profile,
Create it if it doesn't exist """
# get list of profiles
try:
profile = DEFAULT_ROLE_NAME
try:
iam.create_instance_profile(profile)
io.log_info('Created default instance profile.')
role = get_default_role()
iam.add_role_to_profile(profile, role)
except AlreadyExistsError:
pass
except NotAuthorizedError:
# Not a root account. Just assume role exists
io.log_info('No IAM privileges: assuming default '
'instance profile exists.')
return DEFAULT_ROLE_NAME
return profile
def get_default_role():
role = DEFAULT_ROLE_NAME
document = '{"Version": "2008-10-17","Statement": [{"Action":' \
' "sts:AssumeRole","Principal": {"Service": ' \
'"ec2.amazonaws.com"},"Effect": "Allow","Sid": ""}]}'
try:
iam.create_role(role, document)
except AlreadyExistsError:
pass
return role
def get_service_role():
try:
roles = iam.get_role_names()
if DEFAULT_SERVICE_ROLE_NAME not in roles:
return None
except NotAuthorizedError:
# No permissions to list roles
# Assume role exists, we will handle error at a deeper level
pass
return DEFAULT_SERVICE_ROLE_NAME
def create_default_service_role():
"""
Create the default service role
"""
io.log_info('Creating service role {} with default permissions.'
.format(DEFAULT_SERVICE_ROLE_NAME))
trust_document = _get_default_service_trust_document()
json_policy = _get_default_service_role_policy()
role_name = DEFAULT_SERVICE_ROLE_NAME
policy_name = 'awsebcli_aws-elasticbeanstalk-service-role_{}'\
.format(int(time.time()))
try:
iam.create_role_with_policy(role_name, trust_document,
policy_name, json_policy)
except NotAuthorizedError as e:
# NO permissions to create or do something
raise NotAuthorizedError(prompts['create.servicerole.nopermissions']
.format(DEFAULT_SERVICE_ROLE_NAME, e))
return DEFAULT_SERVICE_ROLE_NAME
def resolve_roles(env_request, interactive):
"""
Resolves instance-profile and service-role
:param env_request: environment request
:param interactive: boolean
"""
LOG.debug('Resolving roles')
if env_request.instance_profile is None and \
env_request.template_name is None:
# Service supports no profile, however it is not good/recommended
# Get the eb default profile
env_request.instance_profile = get_default_profile()
if (env_request.platform.has_healthd_support() and # HealthD enabled
(env_request.service_role is None) and
(env_request.template_name is None)):
role = get_service_role()
if role is None:
if interactive:
io.echo()
io.echo(prompts['create.servicerole.info'])
input = io.get_input(prompts['create.servicerole.view'],
default='')
if input.strip('"').lower() == 'view':
io.echo(_get_default_service_role_policy())
io.get_input(prompts['general.pressenter'])
role = create_default_service_role()
else:
raise NotSupportedError(prompts['create.servicerole.required'])
env_request.service_role = role
def _get_default_service_trust_document():
"""
Just a string representing the service role policy.
Includes newlines for pretty printing :)
"""
return \
'''{
"Version": "2012-10-17",
"Statement": [{
"Sid": "",
"Effect": "Allow",
"Principal": {
"Service": "elasticbeanstalk.amazonaws.com"
},
"Action": "sts:AssumeRole",
"Condition": {
"StringEquals": {
"sts:ExternalId": "elasticbeanstalk"
}
}
}]
}'''
def _get_default_service_role_policy():
"""
Just a string representing the service role policy.
Includes newlines for pretty printing :)
"""
return \
'''{
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Action": [
"elasticloadbalancing:DescribeInstanceHealth",
"ec2:DescribeInstances",
"ec2:DescribeInstanceStatus",
"ec2:GetConsoleOutput",
"ec2:AssociateAddress",
"ec2:DescribeAddresses",
"ec2:DescribeSecurityGroups",
"sqs:GetQueueAttributes",
"sqs:GetQueueUrl",
"autoscaling:DescribeAutoScalingGroups",
"autoscaling:DescribeAutoScalingInstances",
"autoscaling:DescribeScalingActivities",
"autoscaling:DescribeNotificationConfigurations"
],
"Resource": ["*"]
}]
}''' |
py | 1a30b5e798e4cba290fc30b4e086e8e5467436e2 | #!/usr/bin/env python
# Copyright 2016 The Dart project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This script downloads the latest dev SDK from
# http://gsdview.appspot.com/dart-archive/channels/dev/raw/latest/sdk/
# into tools/sdks/. It is intended to be invoked from Jiri hooks in
# a Fuchsia checkout.
import os
import sys
import zipfile
import urllib
import utils
HOST_OS = utils.GuessOS()
HOST_ARCH = utils.GuessArchitecture()
SCRIPT_DIR = os.path.dirname(sys.argv[0])
DART_ROOT = os.path.realpath(os.path.join(SCRIPT_DIR, '..'))
DEFAULT_DART_VERSION = 'latest'
BASE_URL = 'http://gsdview.appspot.com/dart-archive/channels/dev/raw/%s/sdk/%s'
def host_os_for_sdk(host_os):
if host_os.startswith('macos'):
return 'mac'
if host_os.startswith('win'):
return 'windows'
return host_os
# Python's zipfile doesn't preserve file permissions during extraction, so we
# have to do it manually.
def extract_file(zf, info, extract_dir):
try:
zf.extract(info.filename, path=extract_dir)
out_path = os.path.join(extract_dir, info.filename)
perm = info.external_attr >> 16L
os.chmod(out_path, perm)
except IOError as err:
if 'dart-sdk/bin/dart' in err.filename:
print(
'Failed to extract the new Dart SDK dart binary. ' +
'Kill stale instances (like the analyzer) and try the update again'
)
return False
raise
return True
def main(argv):
host_os = host_os_for_sdk(HOST_OS)
zip_file = ('dartsdk-%s-x64-release.zip' % HOST_OS)
sha_file = zip_file + '.sha256sum'
sdk_path = os.path.join(DART_ROOT, 'tools', 'sdks')
local_sha_path = os.path.join(sdk_path, sha_file)
remote_sha_path = os.path.join(sdk_path, sha_file + '.remote')
zip_path = os.path.join(sdk_path, zip_file)
sdk_version = DEFAULT_DART_VERSION
sha_url = (BASE_URL % (sdk_version, sha_file))
zip_url = (BASE_URL % (sdk_version, zip_file))
local_sha = ''
if os.path.isfile(local_sha_path):
with open(local_sha_path, 'r') as fp:
local_sha = fp.read()
remote_sha = ''
urllib.urlretrieve(sha_url, remote_sha_path)
with open(remote_sha_path, 'r') as fp:
remote_sha = fp.read()
os.remove(remote_sha_path)
if local_sha == '' or local_sha != remote_sha:
print 'Downloading prebuilt Dart SDK from: ' + zip_url
urllib.urlretrieve(zip_url, zip_path)
with zipfile.ZipFile(zip_path, 'r') as zf:
for info in zf.infolist():
if not extract_file(zf, info, sdk_path):
return -1
with open(local_sha_path, 'w') as fp:
fp.write(remote_sha)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
py | 1a30b712bd8d569d2562e6e7c4f964befb20ca90 | from setuptools import setup
setup(
# Packaging meta-data
name='Vimcryption',
version='0.1',
description='Test package for vimcryption VIM plugin.',
author='Tom Manner, Miguel Nistal',
author_email='[email protected], [email protected]',
url='https://www.github.com/tsmanner/vimcryption',
# Travis Unit-Test Installation
install_requires=[
'anybadge==0.1.0.dev2',
'codecov',
'coverage>=4.5',
'coverage-badge',
'nose2',
'nose2[coverage_plugin]>=0.6.5',
'numpy',
'pylint',
],
packages=[
'encryptionengine',
],
)
|
py | 1a30b966604a7174adb21aa449e1afcf3d23daa2 | #!/usr/bin/env python
# coding: utf-8
# Copyright (c) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE.md file in the project root for full license information.
import droid_speech
#import intent_sample
#import translation_sample
#import speech_synthesis_sample
from collections import OrderedDict
import platform
#eofkey = 'Ctrl-Z' if "Windows" == platform.system() else 'Ctrl-D'
droidspeechfunctions = OrderedDict([
(droid_speech, [
droid_speech.speech_recognize_once_from_mic,
droid_speech.speech_recognize_once_from_file,
droid_speech.speech_recognize_once_compressed_input,
droid_speech.speech_recognize_once_from_file_with_customized_model,
droid_speech.speech_recognize_once_from_file_with_custom_endpoint_parameters,
droid_speech.speech_recognize_async_from_file,
droid_speech.speech_recognize_continuous_from_file,
droid_speech.speech_recognition_with_pull_stream,
droid_speech.speech_recognition_with_push_stream,
droid_speech.speech_recognize_keyword_from_microphone,
droid_speech.speech_recognize_keyword_locally_from_microphone,
droid_speech.pronunciation_assessment_from_microphone,
])
])
def select():
modules = list(droidspeechfunctions.keys())
try:
selected_module = modules[0]
except EOFError:
raise
except Exception as e:
print(e)
return
try:
selected_function = droidspeechfunctions[selected_module][10]
except EOFError:
raise
except Exception as e:
print(e)
return
try:
selected_function()
except Exception as e:
print('Error running droid funtion: {}'.format(e))
print()
while True:
try:
select()
except EOFError:
break
|
py | 1a30b969cc1750ade029234dd85ad406f3134d9a | import itertools
from collections import OrderedDict
from rest_framework import filters, exceptions
from .mixin import ViewSetMixin
def get_sort_order(request, param):
args = request.query_params.getlist(param)
fields = itertools.chain(*(arg.split(',') for arg in args))
order = tuple(field.strip() for field in fields if field)
return order
class OrderingFilter(filters.OrderingFilter):
@staticmethod
def get_translated_sort_order(fields, field_map):
return tuple(field_map.get(field, field) for field in fields)
@staticmethod
def get_reverse_translated_sort_order(fields, field_map):
sort_field_reverse_map = {value: key for (key, value) in field_map.items()}
return tuple(sort_field_reverse_map.get(field, field) for field in fields)
@staticmethod
def get_consistent_sort_order(fields):
return fields + type(fields)(('pk',))
def get_ordering(self, request, queryset, view):
fields = get_sort_order(request, self.ordering_param)
if fields:
field_map = getattr(view, 'sort_field_map', {})
fields = self.get_translated_sort_order(fields, field_map)
ordering = self.remove_invalid_fields(queryset, fields, view, request)
if len(ordering) != len(fields):
ext_fields = self.get_reverse_translated_sort_order(fields, field_map)
ext_ordering = self.get_reverse_translated_sort_order(ordering, field_map)
errors = {}
for ext_field in ext_fields:
if ext_field not in ext_ordering:
errors[ext_field] = 'invalid field'
raise exceptions.ValidationError(errors)
ordering = self.get_consistent_sort_order(ordering)
else:
ordering = self.get_default_ordering(view)
consistent_sort = getattr(view, 'consistent_sort', True)
if consistent_sort:
ordering = self.get_consistent_sort_order(ordering)
return ordering
class SortedModelMixin(ViewSetMixin):
ordering = ()
sort_field_map = {}
consistent_sort = True
def list(self, request, *args, **kwargs):
sort = get_sort_order(request, OrderingFilter.ordering_param) or self.ordering
context = OrderedDict(sort=','.join(sort))
return self.decorated_list(SortedModelMixin, context, request, *args, **kwargs)
|
py | 1a30b9a21b52c8bbc88eb6948115ea78e0b0b199 | # Distributed under the MIT License.
# See LICENSE.txt for details.
import numpy as np
from numpy import sqrt, exp
from scipy.optimize import newton
# Isotropic Schwarzschild coordinates
def conformal_metric_isotropic(x, mass):
return np.identity(3)
def inv_conformal_metric_isotropic(x, mass):
return np.identity(3)
def deriv_conformal_metric_isotropic(x, mass):
return np.zeros((3, 3, 3))
def extrinsic_curvature_trace_isotropic(x, mass):
return 0.
def extrinsic_curvature_trace_gradient_isotropic(x, mass):
return np.zeros(3)
def conformal_factor_isotropic(x, mass):
r = np.linalg.norm(x)
return 1. + 0.5 * mass / r
def conformal_factor_gradient_isotropic(x, mass):
r = np.linalg.norm(x)
return -0.5 * mass * x / r**3
def lapse_times_conformal_factor_isotropic(x, mass):
r = np.linalg.norm(x)
return 1. - 0.5 * mass / r
def lapse_times_conformal_factor_gradient_isotropic(x, mass):
r = np.linalg.norm(x)
return 0.5 * mass * x / r**3
def lapse_isotropic(x, mass):
return (lapse_times_conformal_factor_isotropic(x, mass) /
conformal_factor_isotropic(x, mass))
def shift_background(x, mass):
return np.zeros(3)
def longitudinal_shift_background_minus_dt_conformal_metric(x, mass):
return np.zeros((3, 3))
def shift_isotropic(x, mass):
return np.zeros(3)
def shift_strain_isotropic(x, mass):
return np.zeros((3, 3))
def longitudinal_shift_isotropic(x, mass):
return np.zeros((3, 3))
def shift_dot_extrinsic_curvature_trace_gradient_isotropic(x, mass):
return 0.
def longitudinal_shift_minus_dt_conformal_metric_square_isotropic(x, mass):
return 0.
def longitudinal_shift_minus_dt_conformal_metric_over_lapse_square_isotropic(
x, mass):
return 0.
# Matter sources
def energy_density(x, mass):
return 0.
def stress_trace(x, mass):
return 0.
def momentum_density(x, mass):
return np.zeros(3)
# Fixed sources
def conformal_factor_fixed_source(x, mass):
return 0.
def lapse_times_conformal_factor_fixed_source(x, mass):
return 0.
def shift_fixed_source(x, mass):
return np.zeros(3)
|
py | 1a30ba368b79e15be22447dcdc619bec936cddc4 | # -*- coding: utf-8 -*-
#
# Copyright 2015 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Benchmark Memory functions.
"""
import re
import subprocess
import sys
import six
from hardware.benchmark import utils
def get_available_memory():
"""Return the total amount of available memory, in bytes."""
with open('/proc/meminfo', 'r') as meminfo:
for line in meminfo:
if line.startswith('MemFree:'):
return int(line.split()[1]) * 1024
return -1
def check_mem_size(block_size, cpu_count):
"""Check if a test can run with a given block size and cpu count."""
dsplit = re.compile(r'\d+')
ssplit = re.compile(r'[A-Z]+')
unit = ssplit.findall(block_size)
unit_in_bytes = 1
if unit[0] == 'K':
unit_in_bytes = 1024
elif unit[0] == 'M':
unit_in_bytes = 1024 * 1024
elif unit[0] == 'G':
unit_in_bytes = 1024 * 1024 * 1024
size_in_bytes = (unit_in_bytes * int(dsplit.findall(block_size)[0])
* cpu_count)
if size_in_bytes > get_available_memory():
return False
return True
def run_sysbench_memory_threaded(hw_lst, max_time, block_size, cpu_count,
processor_num=None):
"""Running memtest on a processor."""
check_mem = check_mem_size(block_size, cpu_count)
taskset = ''
if processor_num is not None:
if check_mem is False:
msg = ("Avoid Benchmarking memory @%s "
"from CPU %d, not enough memory\n")
sys.stderr.write(msg % (block_size, processor_num))
return
sys.stderr.write('Benchmarking memory @%s from CPU %d'
' for %d seconds (%d threads)\n' %
(block_size, processor_num, max_time, cpu_count))
taskset = 'taskset %s' % hex(1 << processor_num)
else:
if check_mem is False:
msg = ("Avoid Benchmarking memory @%s "
"from all CPUs, not enough memory\n")
sys.stderr.write(msg % block_size)
return
sys.stderr.write('Benchmarking memory @%s from all CPUs '
'for %d seconds (%d threads)\n'
% (block_size, max_time, cpu_count))
_cmd = ('%s sysbench --max-time=%d --max-requests=100000000 '
'--num-threads=%d --test=memory --memory-block-size=%s run')
sysbench_cmd = subprocess.Popen(_cmd % (taskset, max_time,
cpu_count, block_size),
shell=True, stdout=subprocess.PIPE)
for line in sysbench_cmd.stdout:
if isinstance(line, six.binary_type):
line = line.decode()
if "transferred" in line:
_, right = line.rstrip('\n').replace(' ', '').split('(')
perf, _ = right.split('.')
if processor_num is not None:
hw_lst.append(('cpu',
'logical_%d' % processor_num,
'bandwidth_%s' % block_size,
perf))
else:
hw_lst.append(('cpu', 'logical',
'threaded_bandwidth_%s' % block_size,
perf))
def run_sysbench_memory_forked(hw_lst, max_time, block_size, cpu_count):
"""Running forked memtest on a processor."""
if check_mem_size(block_size, cpu_count) is False:
cmd = ('Avoid benchmarking memory @%s from all'
' CPUs (%d forked processes), not enough memory\n')
sys.stderr.write(cmd % (block_size, cpu_count))
return
sys.stderr.write('Benchmarking memory @%s from all CPUs'
' for %d seconds (%d forked processes)\n'
% (block_size, max_time, cpu_count))
sysbench_cmd = '('
for _ in range(cpu_count):
_cmd = ('sysbench --max-time=%d --max-requests=100000000 '
'--num-threads=1 --test=memory --memory-block-size=%s run &')
sysbench_cmd += _cmd % (max_time, block_size)
sysbench_cmd.rstrip('&')
sysbench_cmd += ')'
global_perf = 0
process = subprocess.Popen(
sysbench_cmd, shell=True, stdout=subprocess.PIPE)
for line in process.stdout:
if isinstance(line, six.binary_type):
line = line.decode()
if "transferred" in line:
_, right = line.rstrip('\n').replace(' ', '').split('(')
perf, _ = right.split('.')
global_perf += int(perf)
hw_lst.append(('cpu', 'logical', 'forked_bandwidth_%s' %
(block_size), str(global_perf)))
def mem_perf(hw_lst, max_time=5):
"""Report the memory performance."""
all_cpu_testing_time = 5
block_size_list = ['1K', '4K', '1M', '16M', '128M', '1G', '2G']
logical = utils.get_value(hw_lst, 'cpu', 'logical', 'number')
physical = utils.get_value(hw_lst, 'cpu', 'physical', 'number')
if physical:
eta = int(physical) * len(block_size_list) * max_time
eta += 2 * (all_cpu_testing_time * len(block_size_list))
sys.stderr.write('Memory Performance: %d logical CPU'
' to test (ETA: %d seconds)\n'
% (int(physical), int(eta)))
for cpu_nb in utils.get_one_cpu_per_socket(hw_lst):
for block_size in block_size_list:
run_sysbench_memory_threaded(hw_lst, max_time,
block_size, 1, cpu_nb)
# There is not need to test fork vs thread
# if only a single logical cpu is present
if int(logical) > 1:
for block_size in block_size_list:
run_sysbench_memory_threaded(hw_lst, all_cpu_testing_time,
block_size, int(logical))
for block_size in block_size_list:
run_sysbench_memory_forked(hw_lst, all_cpu_testing_time,
block_size, int(logical))
|
py | 1a30ba4af27a0e287d5e46d9899c89b52cd03209 | # -*- coding: utf-8 -*-
"""
********************************
reslib.processing.loops
********************************
This module provides convenience functions for looping over data, typically a DataFrame (or reslib.data.Table), and automating things like progress reporting (via tqdm or status files) or parallelization.
:copyright: (c) 2019 by Maclean Gaulin.
:license: MIT, see LICENSE for more details.
""" |
py | 1a30bb811b7dbb946a3fc858ca1704ee8fe83a55 | # uncompyle6 version 3.3.1
# Python bytecode 3.6 (3379)
# Decompiled from: Python 3.6.2 (v3.6.2:5fd33b5926, Jul 16 2017, 20:11:06)
# [GCC 4.2.1 (Apple Inc. build 5666) (dot 3)]
# Embedded file name: ../../shared/problems/CR/problem1068_CR.py
# Compiled at: 2019-03-13 18:01:49
# Size of source mod 2**32: 1148 bytes
__author__ = 'patras'
from domain_chargeableRobot import *
from timer import DURATION
from state import state
DURATION.TIME = {'put':2,
'take':2,
'perceive':2,
'charge':2,
'move':2,
'moveToEmergency':2,
'moveCharger':2,
'addressEmergency':2,
'wait':2}
DURATION.COUNTER = {'put':2,
'take':2,
'perceive':2,
'charge':2,
'move':2,
'moveToEmergency':2,
'moveCharger':2,
'addressEmergency':2,
'wait':2}
rv.LOCATIONS = [
1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
rv.EDGES = {1:[2], 2:[1, 3], 3:[2, 4], 4:[5, 3, 6, 7], 5:[4, 9], 6:[4, 10], 7:[4, 8], 8:[7], 9:[5], 10:[6]}
rv.OBJECTS = ['o1']
rv.ROBOTS = [
'r1', 'r2']
def ResetState():
state.loc = {'r1':1,
'r2':1}
state.charge = {'r1':2, 'r2':3}
state.load = {'r1':NIL, 'r2':NIL}
state.pos = {'c1':'r2', 'o1':5}
state.containers = {1:[], 2:[], 3:[], 4:[], 5:['o1'], 6:[], 7:[], 8:[], 9:[], 10:[]}
state.emergencyHandling = {'r1':False, 'r2':False}
state.view = {}
for l in rv.LOCATIONS:
state.view[l] = False
tasks = {5: [['fetch', 'r1', 'o1']]}
eventsEnv = {}
# okay decompiling __pycache__/problem1068_CR.cpython-36.pyc
|
py | 1a30bbea337233d6a726817bfaa16aa586c048b3 | #
# Copyright SAS Institute
#
# Licensed under the Apache License, Version 2.0 (the License);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import http.client as hc
import base64
import json
import os
import ssl
import sys
import urllib
import warnings
import io
import tempfile as tf
from time import sleep
from saspy.sasexceptions import (SASHTTPauthenticateError, SASHTTPconnectionError, SASHTTPsubmissionError)
import logging
logger = logging.getLogger('saspy')
try:
import pandas as pd
import numpy as np
except ImportError:
pass
class SASconfigHTTP:
'''
This object is not intended to be used directly. Instantiate a SASsession object instead
'''
def __init__(self, session, **kwargs):
self._kernel = kwargs.get('kernel', None)
SAScfg = session._sb.sascfg.SAScfg
self.name = session._sb.sascfg.name
cfg = getattr(SAScfg, self.name)
self._token = cfg.get('authtoken', None)
self.url = cfg.get('url', '')
self.ip = cfg.get('ip', '')
self.port = cfg.get('port', None)
self.ctxname = cfg.get('context', '')
self.ctx = {}
self.options = cfg.get('options', [])
self.ssl = cfg.get('ssl', True)
self.verify = cfg.get('verify', True)
self.timeout = cfg.get('timeout', None)
user = cfg.get('user', '')
pw = cfg.get('pw', '')
client_id = cfg.get('client_id', None)
client_secret = cfg.get('client_secret', '')
authcode = cfg.get('authcode', None)
jwt = cfg.get('jwt', None)
self.encoding = cfg.get('encoding', '')
self.authkey = cfg.get('authkey', '')
self._prompt = session._sb.sascfg._prompt
self.lrecl = cfg.get('lrecl', None)
self.inactive = cfg.get('inactive', 120)
try:
self.outopts = getattr(SAScfg, "SAS_output_options")
self.output = self.outopts.get('output', 'html5')
except:
self.output = 'html5'
if self.output.lower() not in ['html', 'html5']:
logger.warning("Invalid value specified for SAS_output_options. Using the default of HTML5")
self.output = 'html5'
# GET Config options
try:
self.cfgopts = getattr(SAScfg, "SAS_config_options")
except:
self.cfgopts = {}
lock = self.cfgopts.get('lock_down', True)
# in lock down mode, don't allow runtime overrides of option values from the config file.
self.verbose = self.cfgopts.get('verbose', True)
self.verbose = kwargs.get('verbose', self.verbose)
inurl = kwargs.get('url', None)
if inurl is not None:
if lock and len(self.url):
logger.warning("Parameter 'url' passed to SAS_session was ignored due to configuration restriction.")
else:
self.url = inurl
inip = kwargs.get('ip', None)
if inip is not None:
if lock and len(self.ip):
logger.warning("Parameter 'ip' passed to SAS_session was ignored due to configuration restriction.")
else:
self.ip = inip
inport = kwargs.get('port', None)
if inport is not None:
if lock and self.port:
logger.warning("Parameter 'port' passed to SAS_session was ignored due to configuration restriction.")
else:
self.port = inport
inctxname = kwargs.get('context', None)
if inctxname is not None:
if lock and len(self.ctxname):
logger.warning("Parameter 'context' passed to SAS_session was ignored due to configuration restriction.")
else:
self.ctxname = inctxname
inoptions = kwargs.get('options', None)
if inoptions is not None:
if lock and len(self.options):
logger.warning("Parameter 'options' passed to SAS_session was ignored due to configuration restriction.")
else:
self.options = inoptions
inssl = kwargs.get('ssl', None)
if inssl is not None:
if lock and self.ssl:
logger.warning("Parameter 'ssl' passed to SAS_session was ignored due to configuration restriction.")
else:
self.ssl = bool(inssl)
inver = kwargs.get('verify', None)
if inver is not None:
if lock and self.verify:
logger.warning("Parameter 'verify' passed to SAS_session was ignored due to configuration restriction.")
else:
self.verify = bool(inver)
intout = kwargs.get('timeout', None)
if intout is not None:
if lock and self.timeout:
logger.warning("Parameter 'timeout' passed to SAS_session was ignored due to configuration restriction.")
else:
self.timeout = intout
inencoding = kwargs.get('encoding', 'NoOverride')
if inencoding != 'NoOverride':
if lock and len(self.encoding):
logger.warning("Parameter 'encoding' passed to SAS_session was ignored due to configuration restriction.")
else:
self.encoding = inencoding
if not self.encoding or self.encoding != 'utf_8':
self.encoding = 'utf_8'
inautht = kwargs.get('authtoken', None)
if inautht is not None:
self._token = inautht
injwt = kwargs.get('jwt', None)
if injwt is not None:
jwt = injwt
inauthc = kwargs.get('authcode', None)
if inauthc is not None:
authcode = inauthc
incis = kwargs.get('client_secret', None)
if incis is not None:
if lock and client_secret:
logger.warning("Parameter 'client_secret' passed to SAS_session was ignored due to configuration restriction.")
else:
client_secret = incis
incid = kwargs.get('client_id', None)
if incid is not None:
if lock and client_id:
logger.warning("Parameter 'client_id' passed to SAS_session was ignored due to configuration restriction.")
else:
client_id = incid
if client_id is None:
client_id = 'SASPy'
use_authcode = False
else:
use_authcode = True
inlrecl = kwargs.get('lrecl', None)
if inlrecl is not None:
if lock and self.lrecl:
logger.warning("Parameter 'lrecl' passed to SAS_session was ignored due to configuration restriction.")
else:
self.lrecl = inlrecl
if not self.lrecl:
self.lrecl = 1048576
inito = kwargs.get('inactive', None)
if inito is not None:
if lock and self.inactive:
logger.warning("Parameter 'inactive' passed to SAS_session was ignored due to configuration restriction.")
else:
self.inactive = inito
inak = kwargs.get('authkey', '')
if len(inak) > 0:
if lock and len(self.authkey):
logger.warning("Parameter 'authkey' passed to SAS_session was ignored due to configuration restriction.")
else:
self.authkey = inak
if len(self.url) > 0:
http = self.url.split('://')
hp = http[1].split(':')
if http[0].lower() in ['http', 'https']:
self.ip = hp[0]
self.port = hp[1] if len(hp) > 1 else self.port
self.ssl = True if 's' in http[0].lower() else False
else:
logger.warning("Parameter 'url' not in recognized format. Expeting 'http[s]://host[:port]'. Ignoring parameter.")
while len(self.ip) == 0:
if not lock:
self.ip = self._prompt("Please enter the host (ip address) you are trying to connect to: ")
if self.ip is None:
self._token = None
raise RuntimeError("No IP address provided.")
else:
logger.fatal("In lockdown mode and missing ip adress in the config named: "+cfgname )
raise RuntimeError("No IP address provided.")
if not self.port:
if self.ssl:
self.port = 443
else:
self.port = 80
if not self._token and not authcode and not jwt:
found = False
if self.authkey:
if os.name == 'nt':
pwf = os.path.expanduser('~')+os.sep+'_authinfo'
else:
pwf = os.path.expanduser('~')+os.sep+'.authinfo'
try:
fid = open(pwf, mode='r')
for line in fid:
if line.startswith(self.authkey):
user = line.partition('user')[2].lstrip().partition(' ')[0].partition('\n')[0]
pw = line.partition('password')[2].lstrip().partition(' ')[0].partition('\n')[0]
found = True
break
fid.close()
except OSError as e:
logger.warning('Error trying to read authinfo file:'+pwf+'\n'+str(e))
pass
except:
pass
if not found:
logger.warning('Did not find key '+self.authkey+' in authinfo file:'+pwf+'\n')
inuser = kwargs.get('user', '')
if len(inuser) > 0:
if lock and len(user):
logger.warning("Parameter 'user' passed to SAS_session was ignored due to configuration restriction.")
else:
user = inuser
inpw = kwargs.get('pw', '')
if len(inpw) > 0:
if lock and len(pw):
logger.warning("Parameter 'pw' passed to SAS_session was ignored due to configuration restriction.")
else:
pw = inpw
if use_authcode:
code_pw = 'authcode'
else:
code_pw = ''
if len(user) == 0:
msg = "To connect to Viya you need either an authcode or a userid/pw. Neither were provided.\n"
msg += "Please enter which one you want to enter next. Type one of these now: [default=authcode | userid]: "
while code_pw.lower() not in ['userid','authcode']:
code_pw = self._prompt(msg)
if code_pw == '':
code_pw = 'authcode'
if code_pw is None:
self._token = None
raise RuntimeError("Neither authcode nor userid provided.")
if code_pw.lower() == 'authcode':
purl = "/SASLogon/oauth/authorize?client_id={}&response_type=code".format(client_id)
if len(self.url) > 0:
purl = self.url+purl
else:
purl = "http{}://{}:{}{}".format('s' if self.ssl else '', self.ip, self.port, purl)
msg = "The default url to authenticate with would be {}\n".format(purl)
msg += "Please enter authcode: "
authcode = self._prompt(msg)
if authcode is None:
self._token = None
raise RuntimeError("No authcode provided.")
else:
while len(user) == 0:
user = self._prompt("Please enter userid: ")
if user is None:
self._token = None
raise RuntimeError("No userid provided.")
while len(pw) == 0:
pw = self._prompt("Please enter password: ", pw = True)
if pw is None:
self._token = None
raise RuntimeError("No password provided.")
if self.ssl:
if self.verify:
# handle having self signed certificate default on Viya w/out copies on client; still ssl, just not verifyable
try:
self.HTTPConn = hc.HTTPSConnection(self.ip, self.port, timeout=self.timeout)
if not self._token:
self._token = self._authenticate(user, pw, authcode, client_id, client_secret, jwt)
except ssl.SSLError as e:
logger.warning("SSL certificate verification failed, creating an unverified SSL connection. Error was:"+str(e))
self.HTTPConn = hc.HTTPSConnection(self.ip, self.port, timeout=self.timeout, context=ssl._create_unverified_context())
logger.warning("You can set 'verify=False' to get rid of this message ")
if not self._token:
self._token = self._authenticate(user, pw, authcode, client_id, client_secret, jwt)
else:
self.HTTPConn = hc.HTTPSConnection(self.ip, self.port, timeout=self.timeout, context=ssl._create_unverified_context())
if not self._token:
self._token = self._authenticate(user, pw, authcode, client_id, client_secret, jwt)
else:
self.HTTPConn = hc.HTTPConnection(self.ip, self.port, timeout=self.timeout)
if not self._token:
self._token = self._authenticate(user, pw, authcode, client_id, client_secret, jwt)
if not self._token:
logger.error("Could not acquire an Authentication Token")
return
# GET Contexts
contexts = self._get_contexts()
if contexts == None:
self._token = None
raise SASHTTPconnectionError(msg="No Contexts found on Compute Service at ip="+self.ip)
ctxnames = []
for i in range(len(contexts)):
ctxnames.append(contexts[i].get('name'))
if len(ctxnames) == 0:
self._token = None
raise SASHTTPconnectionError(msg="No Contexts found on Compute Service at ip="+self.ip)
if len(self.ctxname) == 0:
if len(ctxnames) == 1:
self.ctxname = ctxnames[0]
logger.info("Using SAS Context: " + self.ctxname)
else:
try:
ctxname = self._prompt("Please enter the SAS Context you wish to run. Available contexts are: " +
str(ctxnames)+" ")
if ctxname is None:
self._token = None
raise RuntimeError("No SAS Context provided.")
else:
self.ctxname = ctxname
except:
raise SASHTTPconnectionError(msg=
"SAS Context specified '"+self.ctxname+"' was not found. Prompting failed. Available contexts were: " +
str(ctxnames)+" ")
while self.ctxname not in ctxnames:
if not lock:
''' this was original code before compute was production. users can't create these on the fly.
createctx = self._prompt(
"SAS Context specified was not found. Do you want to create a new context named "+self.ctxname+" [Yes|No]?")
if createctx.upper() in ('YES', 'Y'):
contexts = self._create_context(user)
else:
'''
try:
ctxname = self._prompt(
"SAS Context specified was not found. Please enter the SAS Context you wish to run. Available contexts are: " +
str(ctxnames)+" ")
if ctxname is None:
self._token = None
raise SASHTTPconnectionError(msg=
"SAS Context specified '"+self.ctxname+"' was not found. Prompting failed. Available contexts were: " +
str(ctxnames)+" ")
else:
self.ctxname = ctxname
except:
raise SASHTTPconnectionError(msg=
"SAS Context specified '"+self.ctxname+"' was not found. Prompting failed. Available contexts were: " +
str(ctxnames)+" ")
else:
msg = "SAS Context specified in the SASconfig ("+self.ctxname+") was not found on this server, and because "
msg += "the SASconfig is in lockdown mode, there is no prompting for other contexts. No connection established."
logger.error(msg)
self._token = None
raise RuntimeError("No SAS Context provided.")
for i in range(len(contexts)):
if contexts[i].get('name') == self.ctxname:
self.ctx = contexts[i]
break
if self.ctx == {}:
raise SASHTTPconnectionError(msg="No context information returned for context {}\n{}".format(self.ctxname, contexts))
return
def _authenticate(self, user, pw, authcode, client_id, client_secret, jwt):
#import pdb; pdb.set_trace()
if authcode:
uauthcode = urllib.parse.quote(authcode)
uclient_id = urllib.parse.quote(client_id)
uclient_secret = urllib.parse.quote(client_secret)
d1 = ("grant_type=authorization_code&code="+uauthcode+
"&client_id="+uclient_id+"&client_secret="+uclient_secret).encode(self.encoding)
headers = {"Accept":"application/vnd.sas.compute.session+json","Content-Type":"application/x-www-form-urlencoded"}
elif jwt:
ujwt = urllib.parse.quote(jwt)
d1 = "grant_type=urn:ietf:params:oauth:grant-type:jwt-bearer&assertion="+ujwt
client = "Basic "+base64.encodebytes((client_id+":").encode(self.encoding)).splitlines()[0].decode(self.encoding)
headers = {"Accept":"application/vnd.sas.compute.session+json",
"Content-Type":"application/x-www-form-urlencoded",
"Authorization":client}
else:
uuser = urllib.parse.quote(user)
upw = urllib.parse.quote(pw)
d1 = ("grant_type=password&username="+uuser+"&password="+upw).encode(self.encoding)
client = "Basic "+base64.encodebytes("sas.tkmtrb:".encode(self.encoding)).splitlines()[0].decode(self.encoding)
#client = "Basic "+base64.encodebytes((client_id+":").encode(self.encoding)).splitlines()[0].decode(self.encoding)
headers = {"Accept":"application/vnd.sas.compute.session+json","Content-Type":"application/x-www-form-urlencoded",
"Authorization":client}
# POST AuthToken
conn = self.HTTPConn; conn.connect()
try:
conn.request('POST', "/SASLogon/oauth/token", body=d1, headers=headers)
req = conn.getresponse()
except:
#print("Failure in GET AuthToken. Could not connect to the logon service. Exception info:\n"+str(sys.exc_info()))
msg="Failure in GET AuthToken. Could not connect to the logon service. Exception info:\n"+str(sys.exc_info())
raise SASHTTPauthenticateError(msg)
#return None
status = req.status
resp = req.read()
conn.close()
if status > 299:
#print("Failure in GET AuthToken. Status="+str(status)+"\nResponse="+resp.decode(self.encoding))
msg="Failure in GET AuthToken. Status="+str(status)+"\nResponse="+str(resp)
raise SASHTTPauthenticateError(msg)
#return None
js = json.loads(resp.decode(self.encoding))
token = js.get('access_token')
return token
def _get_contexts(self):
#import pdb; pdb.set_trace()
# GET Contexts
conn = self.HTTPConn; conn.connect()
headers={"Accept":"application/vnd.sas.collection+json",
"Accept-Item":"application/vnd.sas.compute.context.summary+json",
"Authorization":"Bearer "+self._token}
conn.request('GET', "/compute/contexts?limit=999999", headers=headers)
req = conn.getresponse()
status = req.status
resp = req.read()
conn.close()
if status > 299:
fmsg = "Failure in GET Contexts. Status="+str(status)+"\nResponse="+resp.decode(self.encoding)
raise SASHTTPconnectionError(msg=fmsg)
js = json.loads(resp.decode(self.encoding))
contexts = js.get('items')
return contexts
def _create_context(self, user):
# GET Contexts
conn = self.HTTPConn; conn.connect()
d1 = '{"name": "SASPy","version": 1,"description": "SASPy Context","attributes": {"sessionInactiveTimeout": 60 },'
d1 += '"launchContext": {"contextName": "'+self.ctxname+'"},"launchType": "service","authorizedUsers": ["'+user+'"]}'
headers={"Accept":"application/vnd.sas.compute.context+json",
"Content-Type":"application/vnd.sas.compute.context.request+json",
"Authorization":"Bearer "+self._token}
conn.request('POST', "/compute/contexts", body=d1, headers=headers)
req = conn.getresponse()
status = req.status
resp = req.read()
conn.close()
if status > 299:
logger.error("Failure in POST Context. Status="+str(status)+"\nResponse="+resp.decode(self.encoding))
return None
contexts = self._get_contexts()
return contexts
class SASsessionHTTP():
'''
The SASsession object is the main object to instantiate and provides access to the rest of the functionality.
cfgname - value in SAS_config_names List of the sascfg.py file
kernel - None - internal use when running the SAS_kernel notebook
user - userid to use to connect to Compute Service
pw - pw for the userid being used to connect to Compute Service
ip - overrides IP Dict entry of cfgname in sascfg.py file
port - overrides Port Dict entry of cfgname in sascfg.py file
context - overrides Context Dict entry of cfgname in sascfg.py file
options - overrides Options Dict entry of cfgname in sascfg.py file
encoding - This is the python encoding value that matches the SAS session encoding of the Compute Server you are connecting to
'''
#def __init__(self, cfgname: str ='', kernel: '<SAS_kernel object>' =None, user: str ='', pw: str ='',
# ip: str ='', port: int ='', context: str ='', options: list =[]) -> '<SASsession object>':
def __init__(self, **kwargs):
self.pid = None
self._session = None
self._sb = kwargs.get('sb', None)
self._log = "\nNo SAS session established, something must have failed trying to connect\n"
self.sascfg = SASconfigHTTP(self, **kwargs)
if self.sascfg._token:
self._startsas()
else:
None
def __del__(self):
if self._session:
self._endsas()
self._sb.SASpid = None
return
def _startsas(self):
if self.pid:
return self.pid
if len(self.sascfg.options):
options = '[';
for opt in self.sascfg.options:
options += '"'+opt+'", '
options = (options.rpartition(','))[0]+']'
else:
options = '[]'
# POST Session
uri = None
for ld in self.sascfg.ctx.get('links'):
if ld.get('method') == 'POST':
uri = ld.get('uri')
break
if not uri:
raise SASHTTPconnectionError(msg=
"POST uri not found in context info. You may not have permission to use this context.\n{}".format(self.sascfg.ctx))
conn = self.sascfg.HTTPConn; conn.connect()
d1 = '{"name":"'+self.sascfg.ctxname+'", "description":"saspy session", "version":1, "environment":{"options":'+options+'}'
d1 += ',"attributes": {"sessionInactiveTimeout": '+str(int(float(self.sascfg.inactive)*60))+'}}'
headers={"Accept":"application/vnd.sas.compute.session+json","Content-Type":"application/vnd.sas.compute.session.request+json",
"Authorization":"Bearer "+self.sascfg._token}
try:
conn.request('POST', uri, body=d1, headers=headers)
req = conn.getresponse()
except:
#print("Could not acquire a SAS Session for context: "+self.sascfg.ctxname)
raise SASHTTPconnectionError(msg="Could not acquire a SAS Session for context: "+self.sascfg.ctxname+". Exception info:\n"+str(sys.exc_info()))
#return None
status = req.status
resp = req.read()
conn.close()
if status > 299:
#print("Failure in POST Session \n"+resp.decode(self.sascfg.encoding))
#print("Could not acquire a SAS Session for context: "+self.sascfg.ctxname)
msg="Could not acquire a SAS Session for context: "+self.sascfg.ctxname+". Exception info:\nStatus="+str(status)+"\nResponse="+str(resp)
raise SASHTTPconnectionError(msg)
#return None
self._session = json.loads(resp.decode(self.sascfg.encoding))
if self._session == None:
logger.error("Could not acquire a SAS Session for context: "+self.sascfg.ctxname)
return None
#GET Session uri's once
for ld in self._session.get('links'):
if ld.get('method') == 'GET' and ld.get('rel') == 'log':
self._uri_log = ld.get('uri')
elif ld.get('method') == 'GET' and ld.get('rel') == 'listing':
self._uri_lst = ld.get('uri')
elif ld.get('method') == 'GET' and ld.get('rel') == 'results':
self._uri_ods = ld.get('uri')
elif ld.get('method') == 'GET' and ld.get('rel') == 'state':
self._uri_state = ld.get('uri')
elif ld.get('method') == 'POST' and ld.get('rel') == 'execute':
self._uri_exe = ld.get('uri')
elif ld.get('method') == 'PUT' and ld.get('rel') == 'cancel':
self._uri_can = ld.get('uri')
elif ld.get('method') == 'DELETE' and ld.get('rel') == 'delete':
self._uri_del = ld.get('uri')
elif ld.get('method') == 'GET' and ld.get('rel') == 'files':
self._uri_files = ld.get('uri')
self.pid = self._session.get('id')
self._log = self._getlog()
# POST Job - Lets see if the server really came up, cuz you can't tell from what happend so far
conn = self.sascfg.HTTPConn; conn.connect()
jcode = json.dumps('\n')
d1 = '{"code":['+jcode+']}'
headers={"Accept":"application/json","Content-Type":"application/vnd.sas.compute.job.request+json",
"Authorization":"Bearer "+self.sascfg._token}
conn.request('POST', self._uri_exe, body=d1, headers=headers)
req = conn.getresponse()
status = req.status
resp = req.read()
conn.close()
try:
jobid = json.loads(resp.decode(self.sascfg.encoding))
except:
jobid = None
if not jobid or status > 299:
logger.error("Compute server had issues starting:\n")
for key in jobid:
logger.error(key+"="+str(jobid.get(key)))
return None
self._sb.SESSION_ID = self.pid
ll = self.submit("options svgtitle='svgtitle'; options validvarname=any validmemname=extend pagesize=max nosyntaxcheck; ods graphics on;", "text")
if self.sascfg.verbose:
logger.info("SAS server started using Context "+self.sascfg.ctxname+" with SESSION_ID="+self.pid)
return self.pid
def _endsas(self):
rc = 0
if self._session:
# DELETE Session
conn = self.sascfg.HTTPConn; conn.connect()
headers={"Accept":"application/json","Authorization":"Bearer "+self.sascfg._token}
conn.request('DELETE', self._uri_del, headers=headers)
req = conn.getresponse()
resp = req.read()
conn.close()
if self.sascfg.verbose:
logger.info("SAS server terminated for SESSION_ID="+self._session.get('id'))
self._session = None
self.pid = None
self._sb.SASpid = None
return rc
def _getlog(self, jobid=None):
start = 0
logr = ''
# GET Log
if jobid:
for ld in jobid.get('links'):
if ld.get('method') == 'GET' and ld.get('rel') == 'log':
uri = ld.get('uri')
break
else:
uri = self._uri_log
while True:
# GET Log
conn = self.sascfg.HTTPConn; conn.connect()
headers={"Accept":"application/vnd.sas.collection+json", "Authorization":"Bearer "+self.sascfg._token}
conn.request('GET', uri+"?start="+str(start)+"&limit="+str(start+1000), headers=headers)
req = conn.getresponse()
status = req.status
resp = req.read()
conn.close()
try:
js = json.loads(resp.decode(self.sascfg.encoding))
log = js.get('items')
lines = len(log)
except:
lines = None
if not lines:
break
start += lines
for line in log:
logr += line.get('line')+'\n'
if jobid != None:
self._log += logr.replace(chr(12), chr(10))
if logr.count('ERROR:') > 0:
warnings.warn("Noticed 'ERROR:' in LOG, you ought to take a look and see if there was a problem")
self._sb.check_error_log = True
return logr
def _getlst(self, jobid=None):
htm = ''
i = 0
# GET the list of results
if jobid:
for ld in jobid.get('links'):
if ld.get('method') == 'GET' and ld.get('rel') == 'results':
uri = ld.get('uri')+"?includeTypes=ODS"
break
else:
uri = self._uri_lst
conn = self.sascfg.HTTPConn; conn.connect()
headers={"Accept":"application/vnd.sas.collection+json", "Authorization":"Bearer "+self.sascfg._token}
conn.request('GET', uri, headers=headers)
req = conn.getresponse()
status = req.status
resp = req.read()
conn.close()
try:
js = json.loads(resp.decode(self.sascfg.encoding))
results = js.get('items')
if not results:
results = []
except:
results = []
conn = self.sascfg.HTTPConn; conn.connect()
headers={"Accept":"application/vnd.sas.collection+json", "Authorization":"Bearer "+self.sascfg._token}
while i < len(results):
# GET an ODS Result
if results[i].get('type') == 'ODS' and len(results[i].get('links')) > 0:
conn.request('GET', results[i].get('links')[0].get('href'), headers=headers)
req = conn.getresponse()
status = req.status
resp = req.read()
htm += resp.decode(self.sascfg.encoding)
i += 1
conn.close()
lstd = htm.replace(chr(12), chr(10)).replace('<body class="c body">',
'<body class="l body">').replace("font-size: x-small;",
"font-size: normal;")
return lstd
def _getlsttxt(self, jobid=None):
start = 0
lstr = ''
# GET Log
if jobid:
for ld in jobid.get('links'):
if ld.get('method') == 'GET' and ld.get('rel') == 'listing':
uri = ld.get('uri')
break
else:
uri = self._uri_lst
while True:
conn = self.sascfg.HTTPConn; conn.connect()
headers={"Accept":"application/vnd.sas.collection+json", "Authorization":"Bearer "+self.sascfg._token}
conn.request('GET', uri+"?start="+str(start)+"&limit="+str(start+1000), headers=headers)
req = conn.getresponse()
status = req.status
resp = req.read()
conn.close()
try:
js = json.loads(resp.decode(self.sascfg.encoding))
lst = js.get('items')
lines = len(lst)
except:
lines = None
if not lines:
break
start += lines
for line in lst:
lstr += line.get('line')+'\n'
return lstr
def _asubmit(self, code, results="html"):
#odsopen = json.dumps("ods listing close;ods html5 (id=saspy_internal) options(bitmap_mode='inline') device=png; ods graphics on / outputfmt=png;\n")
#odsopen = json.dumps("ods listing close;ods html5 (id=saspy_internal) options(bitmap_mode='inline') device=svg; ods graphics on / outputfmt=png;\n")
#odsclose = json.dumps("ods html5 (id=saspy_internal) close;ods listing;\n")
odsopen = json.dumps("ods listing close;ods "+self.sascfg.output+" (id=saspy_internal) options(bitmap_mode='inline') device=svg style="+self._sb.HTML_Style+"; ods graphics on / outputfmt=png;\n")
odsclose = json.dumps("ods "+self.sascfg.output+" (id=saspy_internal) close;ods listing;\n")
ods = True;
if results.upper() != "HTML":
ods = False
odsopen = '""'
odsclose = '""'
# POST Job
conn = self.sascfg.HTTPConn; conn.connect()
jcode = json.dumps(code)
d1 = '{"code":['+odsopen+','+jcode+','+odsclose+']}'
headers={"Accept":"application/json","Content-Type":"application/vnd.sas.compute.job.request+json",
"Authorization":"Bearer "+self.sascfg._token}
conn.request('POST', self._uri_exe, body=d1, headers=headers)
req = conn.getresponse()
resp = req.read()
conn.close()
jobid = json.loads(resp.decode(self.sascfg.encoding))
return jobid
def _jobstate(self, jobid):
uri = None
for ld in jobid.get('links'):
if ld.get('method') == 'GET' and ld.get('rel') == 'state':
uri = ld.get('uri')
break
if not uri:
print("No job found")
return None
conn = self.sascfg.HTTPConn;
headers = {"Accept":"text/plain", "Authorization":"Bearer "+self.sascfg._token}
conn.connect()
conn.request('GET', uri, headers=headers)
req = conn.getresponse()
resp = req.read()
conn.close()
return resp
def submit(self, code: str, results: str ="html", prompt: dict = None, **kwargs) -> dict:
'''
code - the SAS statements you want to execute
results - format of results, HTML is default, TEXT is the alternative
prompt - dict of names:flags to prompt for; create marco variables (used in submitted code), then keep or delete
The keys are the names of the macro variables and the boolean flag is to either hide what you type and delete
the macros, or show what you type and keep the macros (they will still be available later)
for example (what you type for pw will not be displayed, user and dsname will):
results = sas.submit(
"""
libname tera teradata server=teracop1 user=&user pw=&pw;
proc print data=tera.&dsname (obs=10); run;
""" ,
prompt = {'user': False, 'pw': True, 'dsname': False}
)
Returns - a Dict containing two keys:values, [LOG, LST]. LOG is text and LST is 'results' (HTML or TEXT)
NOTE: to view HTML results in the ipykernel, issue: from IPython.display import HTML and use HTML() instead of print()
i.e,: results = sas.submit("data a; x=1; run; proc print;run')
print(results['LOG'])
HTML(results['LST'])
'''
prompt = prompt if prompt is not None else {}
printto = kwargs.pop('undo', False)
#odsopen = json.dumps("ods listing close;ods html5 (id=saspy_internal) options(bitmap_mode='inline') device=png; ods graphics on / outputfmt=png;\n")
#odsopen = json.dumps("ods listing close;ods html5 (id=saspy_internal) options(bitmap_mode='inline') device=svg; ods graphics on / outputfmt=png;\n")
#odsclose = json.dumps("ods html5 (id=saspy_internal) close;ods listing;\n")
odsopen = json.dumps("ods listing close;ods "+self.sascfg.output+" (id=saspy_internal) options(bitmap_mode='inline') device=svg style="+self._sb.HTML_Style+"; ods graphics on / outputfmt=png;\n")
odsclose = json.dumps("ods "+self.sascfg.output+" (id=saspy_internal) close;ods listing;\n")
ods = True;
pcodei = ''
pcodeiv = ''
pcodeo = ''
if self._session == None:
logger.error("No SAS process attached. SAS process has terminated unexpectedly.")
return dict(LOG="No SAS process attached. SAS process has terminated unexpectedly.", LST='')
if results.upper() != "HTML":
ods = False
odsopen = '""'
odsclose = '""'
if len(prompt):
pcodei += 'options nosource nonotes;\n'
pcodeo += 'options nosource nonotes;\n'
for key in prompt:
gotit = False
while not gotit:
var = self.sascfg._prompt('Please enter value for macro variable '+key+' ', pw=prompt[key])
if var is None:
raise RuntimeError("No value for prompted macro variable provided.")
if len(var) > 0:
gotit = True
else:
print("Sorry, didn't get a value for that variable.")
if prompt[key]:
pcodei += '%let '+key+'='+var+';\n'
else:
pcodeiv += '%let '+key+'='+var+';\n'
if prompt[key]:
pcodeo += '%symdel '+key+';\n'
pcodei += 'options source notes;\n'
pcodeo += 'options source notes;\n'
# POST Job
conn = self.sascfg.HTTPConn; conn.connect()
jcode = json.dumps(pcodei+pcodeiv+code+'\n'+pcodeo)
d1 = '{"code":['+odsopen+','+jcode+','+odsclose+']}'
headers={"Accept":"application/json","Content-Type":"application/vnd.sas.compute.job.request+json",
"Authorization":"Bearer "+self.sascfg._token}
conn.request('POST', self._uri_exe, body=d1, headers=headers)
req = conn.getresponse()
status = req.status
resp = req.read()
conn.close()
try:
jobid = json.loads(resp.decode(self.sascfg.encoding))
except:
raise SASHTTPsubmissionError(msg="Problem parsing response from Compute Service.\n Status="+str(status)+"\n Response="+str(resp))
if not jobid or status > 299:
raise SASHTTPsubmissionError(msg="Problem submitting job to Compute Service.\n Status code="+str(jobid.get('httpStatusCode'))+"\n Message="+jobid.get('message'))
for ld in jobid.get('links'):
if ld.get('method') == 'GET' and ld.get('rel') == 'state':
uri = ld.get('uri')
break
conn = self.sascfg.HTTPConn;
headers = {"Accept":"text/plain", "Authorization":"Bearer "+self.sascfg._token}
done = False
delay = kwargs.get('GETstatusDelay' , 0)
excpcnt = kwargs.get('GETstatusFailcnt', 5)
while not done:
try:
while True:
# GET Status for JOB
conn.connect()
conn.request('GET', uri, headers=headers)
req = conn.getresponse()
resp = req.read()
conn.close()
if resp not in [b'running', b'pending']:
done = True
break
sleep(delay)
except (KeyboardInterrupt, SystemExit):
conn.close()
print('Exception caught!')
response = self.sascfg._prompt(
"SAS attention handling not yet supported over HTTP. Please enter (Q) to Quit waiting for results or (C) to continue waiting.")
while True:
if response is None or response.upper() == 'Q':
return dict(LOG='', LST='', BC=True)
if response.upper() == 'C':
break
response = self.sascfg._prompt("Please enter (Q) to Quit waiting for results or (C) to continue waiting.")
except hc.RemoteDisconnected as Dis:
conn.close()
print('RemoteDisconnected Exception caught!\n'+str(Dis))
excpcnt -= 1
if excpcnt < 0:
raise
logd = self._getlog(jobid).replace(chr(12), chr(10))
if ods:
lstd = self._getlst(jobid).replace(chr(12), chr(10))
else:
lstd = self._getlsttxt(jobid).replace(chr(12), chr(10))
trip = lstd.rpartition("/*]]>*/")
if len(trip[1]) > 0 and len(trip[2]) < 200:
lstd = ''
self._sb._lastlog = logd
# issue 294
if printto:
conn = self.sascfg.HTTPConn; conn.connect()
jcode = json.dumps('proc printto;run;\n')
d1 = '{"code":['+jcode+']}'
headers={"Accept":"application/json","Content-Type":"application/vnd.sas.compute.job.request+json",
"Authorization":"Bearer "+self.sascfg._token}
conn.request('POST', self._uri_exe, body=d1, headers=headers)
req = conn.getresponse()
status = req.status
resp = req.read()
conn.close()
if logd.count('ERROR:') > 0:
warnings.warn("Noticed 'ERROR:' in LOG, you ought to take a look and see if there was a problem")
self._sb.check_error_log = True
return dict(LOG=logd, LST=lstd)
def saslog(self):
'''
this method is used to get the current, full contents of the SASLOG
'''
return self._log
def exist(self, table: str, libref: str ="") -> bool:
'''
table - the name of the SAS Data Set
libref - the libref for the Data Set, defaults to WORK, or USER if assigned
Returns True it the Data Set exists and False if it does not
'''
#can't have an empty libref, so check for user or work
sd = table.strip().replace("'", "''")
if not libref:
# HEAD Libref USER
conn = self.sascfg.HTTPConn; conn.connect()
headers={"Accept":"*/*", "Authorization":"Bearer "+self.sascfg._token}
conn.request('HEAD', "/compute/sessions/"+self.pid+"/data/USER", headers=headers)
req = conn.getresponse()
status = req.status
conn.close()
if status == 200:
libref = 'USER'
else:
libref = 'WORK'
code = 'data _null_; e = exist("'
code += libref+"."
code += "'"+sd+"'n"+'"'+");\n"
code += 'v = exist("'
code += libref+"."
code += "'"+sd+"'n"+'"'+", 'VIEW');\n if e or v then e = 1;\n"
code += "te='TABLE_EXISTS='; put te e;run;\n"
ll = self.submit(code, "text")
l2 = ll['LOG'].rpartition("TABLE_EXISTS= ")
l2 = l2[2].partition("\n")
exists = int(l2[0])
return bool(exists)
"""
# HEAD Data Table
conn = self.sascfg.HTTPConn; conn.connect()
headers={"Accept":"*/*", "Authorization":"Bearer "+self.sascfg._token}
conn.request('HEAD', "/compute/sessions/"+self.pid+"/data/"+libref+"/"+table, headers=headers)
req = conn.getresponse()
status = req.status
conn.close()
if status == 200:
exists = True
else:
exists = False
return exists
"""
def read_csv(self, file: str, table: str, libref: str ="", nosub: bool=False, opts: dict ={}) -> '<SASdata object>':
'''
This method will import a csv file into a SAS Data Set and return the SASdata object referring to it.
file - eithe the OS filesystem path of the file, or HTTP://... for a url accessible file
table - the name of the SAS Data Set to create
libref - the libref for the SAS Data Set being created. Defaults to WORK, or USER if assigned
opts - a dictionary containing any of the following Proc Import options(datarow, delimiter, getnames, guessingrows)
'''
code = "filename x "
if file.lower().startswith("http"):
code += "url "
code += "\""+file+"\";\n"
code += "proc import datafile=x out="
if len(libref):
code += libref+"."
code += "'"+table.strip().replace("'", "''")+"'n dbms=csv replace; "+self._sb._impopts(opts)+" run;"
if nosub:
print(code)
else:
ll = self.submit(code, "text")
def write_csv(self, file: str, table: str, libref: str ="", nosub: bool =False, dsopts: dict ={}, opts: dict ={}) -> 'The LOG showing the results of the step':
'''
This method will export a SAS Data Set to a file in CCSV format.
file - the OS filesystem path of the file to be created (exported from the SAS Data Set)
table - the name of the SAS Data Set you want to export to a CSV file
libref - the libref for the SAS Data Set.
opts - a dictionary containing any of the following Proc Export options(delimiter, putnames)
'''
code = "filename x \""+file+"\";\n"
code += "options nosource;\n"
code += "proc export data="
if len(libref):
code += libref+"."
code += "'"+table.strip().replace("'", "''")+"'n "+self._sb._dsopts(dsopts)+" outfile=x dbms=csv replace; "
code += self._sb._expopts(opts)+" run\n;"
code += "options source;\n"
if nosub:
print(code)
else:
ll = self.submit(code, "text")
return ll['LOG']
def upload(self, localfile: str, remotefile: str, overwrite: bool = True, permission: str = '', **kwargs):
"""
This method uploads a local file to the SAS servers file system.
localfile - path to the local file to upload
remotefile - path to remote file to create or overwrite
overwrite - overwrite the output file if it exists?
permission - permissions to set on the new file. See SAS Filename Statement Doc for syntax
"""
valid = self._sb.file_info(remotefile, quiet = True)
if valid is None:
remf = remotefile
else:
if valid == {}:
remf = remotefile + self._sb.hostsep + localfile.rpartition(os.sep)[2]
else:
remf = remotefile
if overwrite == False:
return {'Success' : False,
'LOG' : "File "+str(remotefile)+" exists and overwrite was set to False. Upload was stopped."}
try:
fd = open(localfile, 'rb')
except OSError as e:
return {'Success' : False,
'LOG' : "File "+str(localfile)+" could not be opened. Error was: "+str(e)}
fsize = os.path.getsize(localfile)
if fsize > 0:
code = "filename _sp_updn '"+remf+"' recfm=N permission='"+permission+"';"
ll = self.submit(code, 'text')
logf = ll['LOG']
# GET Etag
conn = self.sascfg.HTTPConn; conn.connect()
headers={"Accept":"application/vnd.sas.compute.fileref+json;application/json",
"Authorization":"Bearer "+self.sascfg._token}
conn.request('GET', self._uri_files+"/_sp_updn", headers=headers)
req = conn.getresponse()
status = req.status
resp = req.read()
conn.close()
Etag = req.getheader("Etag")
# PUT data
conn = self.sascfg.HTTPConn; conn.connect()
headers={"Accept":"*/*","Content-Type":"application/octet-stream",
"Transfer-Encoding" : "chunked",
"Authorization":"Bearer "+self.sascfg._token}
conn.connect()
conn.putrequest('PUT', self._uri_files+"/_sp_updn/content")
conn.putheader("Accept","*/*")
conn.putheader("Content-Type","application/octet-stream")
conn.putheader("If-Match",Etag)
conn.putheader("Transfer-Encoding","chunked")
conn.putheader("Authorization","Bearer "+self.sascfg._token)
conn.endheaders()
blksz = int(kwargs.get('blocksize', 50000))
while True:
buf = fd.read1(blksz)
if len(buf) == 0:
conn.send(b"0\r\n\r\n")
break
lenstr = "%s\r\n" % hex(len(buf))[2:]
conn.send(lenstr.encode())
conn.send(buf)
conn.send(b"\r\n")
req = conn.getresponse()
status = req.status
resp = req.read()
conn.close()
code = "filename _sp_updn;"
else:
logf = ''
code = """
filename _sp_updn '"""+remf+"""' recfm=F encoding=binary lrecl=1 permission='"""+permission+"""';
data _null_;
fid = fopen('_sp_updn', 'O');
if fid then
rc = fclose(fid);
run;
filename _sp_updn;
"""
ll = self.submit(code, 'text')
logf += ll['LOG']
fd.close()
return {'Success' : True,
'LOG' : logf}
def download(self, localfile: str, remotefile: str, overwrite: bool = True, **kwargs):
"""
This method downloads a remote file from the SAS servers file system.
localfile - path to the local file to create or overwrite
remotefile - path to remote file tp dpwnload
overwrite - overwrite the output file if it exists?
"""
valid = self._sb.file_info(remotefile, quiet = True)
if valid is None:
return {'Success' : False,
'LOG' : "File "+str(remotefile)+" does not exist."}
if valid == {}:
return {'Success' : False,
'LOG' : "File "+str(remotefile)+" is a directory."}
if os.path.isdir(localfile):
locf = localfile + os.sep + remotefile.rpartition(self._sb.hostsep)[2]
else:
locf = localfile
try:
fd = open(locf, 'wb')
fd.write(b'write can fail even if open worked, as it turns out')
fd.close()
fd = open(locf, 'wb')
except OSError as e:
return {'Success' : False,
'LOG' : "File "+str(locf)+" could not be opened or written to. Error was: "+str(e)}
code = "filename _sp_updn '"+remotefile+"' recfm=F encoding=binary lrecl=4096;"
ll = self.submit(code, "text")
logf = ll['LOG']
# GET data
conn = self.sascfg.HTTPConn; conn.connect()
headers={"Accept":"*/*","Content-Type":"application/octet-stream",
"Authorization":"Bearer "+self.sascfg._token}
conn.request('GET', self._uri_files+"/_sp_updn/content", headers=headers)
req = conn.getresponse()
status = req.status
fd.write(req.read())
fd.flush()
fd.close()
conn.close()
ll = self.submit("filename _sp_updn;", 'text')
logf += ll['LOG']
return {'Success' : True,
'LOG' : logf}
def _getbytelenF(self, x):
return len(x.encode(self.sascfg.encoding))
def _getbytelenR(self, x):
return len(x.encode(self.sascfg.encoding, errors='replace'))
def dataframe2sasdata(self, df: '<Pandas Data Frame object>', table: str ='a',
libref: str ="", keep_outer_quotes: bool=False,
embedded_newlines: bool=True,
LF: str = '\x01', CR: str = '\x02',
colsep: str = '\x03', colrep: str = ' ',
datetimes: dict={}, outfmts: dict={}, labels: dict={},
outdsopts: dict={}, encode_errors = None, char_lengths = None,
**kwargs):
'''
This method imports a Pandas Data Frame to a SAS Data Set, returning the SASdata object for the new Data Set.
df - Pandas Data Frame to import to a SAS Data Set
table - the name of the SAS Data Set to create
libref - the libref for the SAS Data Set being created. Defaults to WORK, or USER if assigned
keep_outer_quotes - for character columns, have SAS keep any outer quotes instead of stripping them off.
embedded_newlines - if any char columns have embedded CR or LF, set this to True to get them iported into the SAS data set
LF - if embedded_newlines=True, the chacter to use for LF when transferring the data; defaults to '\x01'
CR - if embedded_newlines=True, the chacter to use for CR when transferring the data; defaults to '\x02'
colsep - the column seperator character used for streaming the delimmited data to SAS defaults to '\x03'
datetimes - dict with column names as keys and values of 'date' or 'time' to create SAS date or times instead of datetimes
outfmts - dict with column names and SAS formats to assign to the new SAS data set
labels - dict with column names and SAS Labels to assign to the new SAS data set
outdsopts - a dictionary containing output data set options for the table being created
encode_errors - 'fail' or 'replace' - default is to 'fail', other choice is to 'replace' invalid chars with the replacement char \
'ignore' will not transcode n Python, so you get whatever happens with your data and SAS
char_lengths - How to determine (and declare) lengths for CHAR variables in the output SAS data set
'''
input = ""
xlate = ""
card = ""
format = ""
length = ""
label = ""
dts = []
ncols = len(df.columns)
lf = "'"+'%02x' % ord(LF.encode(self.sascfg.encoding))+"'x"
cr = "'"+'%02x' % ord(CR.encode(self.sascfg.encoding))+"'x "
delim = "'"+'%02x' % ord(colsep.encode(self.sascfg.encoding))+"'x "
dts_upper = {k.upper():v for k,v in datetimes.items()}
dts_keys = dts_upper.keys()
fmt_upper = {k.upper():v for k,v in outfmts.items()}
fmt_keys = fmt_upper.keys()
lab_upper = {k.upper():v for k,v in labels.items()}
lab_keys = lab_upper.keys()
if encode_errors is None:
encode_errors = 'fail'
bpc = self._sb.pyenc[0]
if char_lengths and str(char_lengths).strip() in ['1','2','3','4']:
bpc = int(char_lengths)
if char_lengths and str(char_lengths) == 'exact':
CnotB = False
else:
CnotB = bpc == 1
if type(char_lengths) is not dict or len(char_lengths) < ncols:
charlens = self._sb.df_char_lengths(df, encode_errors, char_lengths)
else:
charlens = char_lengths
if charlens is None:
return -1
chr_upper = {k.upper():v for k,v in charlens.items()}
if type(df.index) != pd.RangeIndex:
warnings.warn("Note that Indexes are not transferred over as columns. Only actual coulmns are transferred")
for name in df.columns:
colname = str(name).replace("'", "''")
col_up = str(name).upper()
input += "'"+colname+"'n "
if col_up in lab_keys:
label += "label '"+colname+"'n ="+lab_upper[col_up]+";\n"
if col_up in fmt_keys:
format += "'"+colname+"'n "+fmt_upper[col_up]+" "
if df.dtypes[name].kind in ('O','S','U','V'):
try:
length += " '"+colname+"'n $"+str(chr_upper[col_up])
except KeyError as e:
logger.error("Dictionary provided as char_lengths is missing column: "+colname)
raise e
if keep_outer_quotes:
input += "~ "
dts.append('C')
if embedded_newlines:
xlate += " '"+colname+"'n = translate('"+colname+"'n, '0A'x, "+lf+");\n"
xlate += " '"+colname+"'n = translate('"+colname+"'n, '0D'x, "+cr+");\n"
else:
if df.dtypes[name].kind in ('M'):
length += " '"+colname+"'n 8"
input += ":B8601DT26.6 "
if col_up not in dts_keys:
if col_up not in fmt_keys:
format += "'"+colname+"'n E8601DT26.6 "
else:
if dts_upper[col_up].lower() == 'date':
if col_up not in fmt_keys:
format += "'"+colname+"'n E8601DA. "
xlate += " '"+colname+"'n = datepart('"+colname+"'n);\n"
else:
if dts_upper[col_up].lower() == 'time':
if col_up not in fmt_keys:
format += "'"+colname+"'n E8601TM. "
xlate += " '"+colname+"'n = timepart('"+colname+"'n);\n"
else:
logger.warning("invalid value for datetimes for column "+colname+". Using default.")
if col_up not in fmt_keys:
format += "'"+colname+"'n E8601DT26.6 "
dts.append('D')
else:
length += " '"+colname+"'n 8"
if df.dtypes[name] == 'bool':
dts.append('B')
else:
dts.append('N')
code = "data "
if len(libref):
code += libref+"."
code += "'"+table.strip().replace("'", "''")+"'n"
if len(outdsopts):
code += '('
for key in outdsopts:
code += key+'='+str(outdsopts[key]) + ' '
code += ");\n"
else:
code += ";\n"
if len(length):
code += "length "+length+";\n"
if len(format):
code += "format "+format+";\n"
code += label
code += "infile datalines delimiter="+delim+" STOPOVER;\ninput @;\nif _infile_ = '' then delete;\ninput "+input+";\n"+xlate+";\ndatalines4;"
self._asubmit(code, "text")
blksz = int(kwargs.get('blocksize', 1000000))
noencode = self._sb.sascei == 'utf-8' or encode_errors == 'ignore'
row_num = 0
code = ""
for row in df.itertuples(index=False):
row_num += 1
card = ""
for col in range(ncols):
var = str(row[col])
if dts[col] == 'N' and var == 'nan':
var = '.'
elif dts[col] == 'C':
if var == 'nan' or len(var) == 0:
var = ' '
else:
var = var.replace(colsep, colrep)
elif dts[col] == 'B':
var = str(int(row[col]))
elif dts[col] == 'D':
if var in ['nan', 'NaT', 'NaN']:
var = '.'
else:
var = str(row[col].to_datetime64())[:26]
card += var
if col < (ncols-1):
card += colsep
if embedded_newlines:
card = card.replace(LF, colrep).replace(CR, colrep)
card = card.replace('\n', LF).replace('\r', CR)
code += card+"\n"
if len(code) > blksz:
if not noencode:
if encode_errors == 'fail':
if CnotB:
try:
chk = code.encode(self.sascfg.encoding)
except Exception as e:
self._asubmit(";;;;\n;;;;", "text")
ll = self.submit("run;", 'text')
logger.error("Transcoding error encountered. Data transfer stopped on or before row "+str(row_num))
logger.error("DataFrame contains characters that can't be transcoded into the SAS session encoding.\n"+str(e))
return row_num
else:
code = code.encode(self.sascfg.encoding, errors='replace').decode(self.sascfg.encoding)
self._asubmit(code, "text")
code = ""
if not noencode and len(code) > 0:
if encode_errors == 'fail':
if CnotB:
try:
code = code.encode(self.sascfg.encoding).decode(self.sascfg.encoding)
except Exception as e:
self._asubmit(";;;;\n;;;;", "text")
ll = self.submit("run;", 'text')
logger.error("Transcoding error encountered. Data transfer stopped on or before row "+str(row_num))
logger.error("DataFrame contains characters that can't be transcoded into the SAS session encoding.\n"+str(e))
return row_num
else:
code = code.encode(self.sascfg.encoding, errors='replace').decode(self.sascfg.encoding)
self._asubmit(code+";;;;\n;;;;", "text")
ll = self.submit("quit;", 'text')
return None
def sasdata2dataframe(self, table: str, libref: str ='', dsopts: dict = None,
rowsep: str = '\x01', colsep: str = '\x02',
rowrep: str = ' ', colrep: str = ' ',
**kwargs) -> '<Pandas Data Frame object>':
'''
This method exports the SAS Data Set to a Pandas Data Frame, returning the Data Frame object.
table - the name of the SAS Data Set you want to export to a Pandas Data Frame
libref - the libref for the SAS Data Set.
dsopts - data set options for the input SAS Data Set
rowsep - the row seperator character to use; defaults to '\x01'
colsep - the column seperator character to use; defaults to '\x02'
rowrep - the char to convert to for any embedded rowsep chars, defaults to ' '
colrep - the char to convert to for any embedded colsep chars, defaults to ' '
'''
dsopts = dsopts if dsopts is not None else {}
method = kwargs.pop('method', None)
if method and method.lower() == 'csv':
return self.sasdata2dataframeCSV(table, libref, dsopts, **kwargs)
#elif method and method.lower() == 'disk':
else:
return self.sasdata2dataframeDISK(table, libref, dsopts, rowsep, colsep,
rowrep, colrep, **kwargs)
def sasdata2dataframeCSV(self, table: str, libref: str ='', dsopts: dict =None, opts: dict = None,
**kwargs) -> '<Pandas Data Frame object>':
'''
This method exports the SAS Data Set to a Pandas Data Frame, returning the Data Frame object.
table - the name of the SAS Data Set you want to export to a Pandas Data Frame
libref - the libref for the SAS Data Set.
dsopts - data set options for the input SAS Data Set
opts - a dictionary containing any of the following Proc Export options(delimiter, putnames)
tempfile - DEPRECATED
tempkeep - DEPRECATED
These two options are for advanced usage. They override how saspy imports data. For more info
see https://sassoftware.github.io/saspy/advanced-topics.html#advanced-sd2df-and-df2sd-techniques
dtype - this is the parameter to Pandas read_csv, overriding what saspy generates and uses
my_fmts - bool: if True, overrides the formats saspy would use, using those on the data set or in dsopts=
'''
tmp = kwargs.pop('tempfile', None)
tmp = kwargs.pop('tempkeep', None)
dsopts = dsopts if dsopts is not None else {}
opts = opts if opts is not None else {}
if libref:
tabname = libref+".'"+table.strip().replace("'", "''")+"'n "
else:
tabname = "'"+table.strip().replace("'", "''")+"'n "
code = "data work.sasdata2dataframe / view=work.sasdata2dataframe; set "+tabname+self._sb._dsopts(dsopts)+";run;\n"
ll = self.submit(code, "text")
##GET Data Table Info
#conn = self.sascfg.HTTPConn; conn.connect()
#headers={"Accept":"application/vnd.sas.compute.data.table+json", "Authorization":"Bearer "+self.sascfg._token}
#conn.request('GET', "/compute/sessions/"+self.pid+"/data/work/sasdata2dataframe", headers=headers)
#req = conn.getresponse()
#status = req.status
#conn.close()
#resp = req.read()
#js = json.loads(resp.decode(self.sascfg.encoding))
conn = self.sascfg.HTTPConn; conn.connect()
headers={"Accept":"application/vnd.sas.collection+json", "Authorization":"Bearer "+self.sascfg._token}
conn.request('GET', "/compute/sessions/"+self.pid+"/data/work/sasdata2dataframe/columns?start=0&limit=9999999", headers=headers)
req = conn.getresponse()
status = req.status
resp = req.read()
conn.close()
js = json.loads(resp.decode(self.sascfg.encoding))
varlist = []
vartype = []
nvars = js.get('count')
lst = js.get('items')
for i in range(len(lst)):
varlist.append(lst[i].get('name'))
vartype.append(lst[i].get('type'))
dvarlist = list(varlist)
for i in range(len(varlist)):
varlist[i] = varlist[i].replace("'", "''")
topts = dict(dsopts)
topts.pop('firstobs', None)
topts.pop('obs', None)
code = "data work._n_u_l_l_;output;run;\n"
code += "data _null_; set work._n_u_l_l_ "+tabname+self._sb._dsopts(topts)+";put 'FMT_CATS=';\n"
for i in range(nvars):
code += "_tom = vformatn('"+varlist[i]+"'n);put _tom;\n"
code += "stop;\nrun;\nproc delete data=work._n_u_l_l_;run;"
ll = self.submit(code, "text")
l2 = ll['LOG'].rpartition("FMT_CATS=")
l2 = l2[2].partition("\n")
varcat = l2[2].split("\n", nvars)
del varcat[nvars]
code = "proc delete data=work.sasdata2dataframe(memtype=view);run;\n"
code += "data work.sasdata2dataframe / view=work.sasdata2dataframe; set "+tabname+self._sb._dsopts(dsopts)+";\nformat "
idx_col = kwargs.pop('index_col', False)
eng = kwargs.pop('engine', 'c')
my_fmts = kwargs.pop('my_fmts', False)
k_dts = kwargs.pop('dtype', None)
if k_dts is None and my_fmts:
logger.warning("my_fmts option only valid when dtype= is specified. Ignoring and using necessary formatting for data transfer.")
my_fmts = False
if not my_fmts:
for i in range(nvars):
if vartype[i] == 'FLOAT':
code += "'"+varlist[i]+"'n "
if varcat[i] in self._sb.sas_date_fmts:
code += 'E8601DA10. '
else:
if varcat[i] in self._sb.sas_time_fmts:
code += 'E8601TM15.6 '
else:
if varcat[i] in self._sb.sas_datetime_fmts:
code += 'E8601DT26.6 '
else:
code += 'best32. '
code += ";run;\n"
ll = self.submit(code, "text")
if k_dts is None:
dts = {}
for i in range(nvars):
if vartype[i] == 'FLOAT':
if varcat[i] not in self._sb.sas_date_fmts + self._sb.sas_time_fmts + self._sb.sas_datetime_fmts:
dts[dvarlist[i]] = 'float'
else:
dts[dvarlist[i]] = 'str'
else:
dts[dvarlist[i]] = 'str'
else:
dts = k_dts
code = "filename _tomodsx '"+self._sb.workpath+"_tomodsx' lrecl="+str(self.sascfg.lrecl)+" recfm=v encoding='utf-8';\n"
code += "proc export data=work.sasdata2dataframe outfile=_tomodsx dbms=csv replace;\n"
code += self._sb._expopts(opts)+" run;\n"
code += "proc delete data=work.sasdata2dataframe(memtype=view);run;\n"
ll = self.submit(code, 'text')
logf = ll['LOG']
code = "filename _sp_updn '"+self._sb.workpath+"_tomodsx' recfm=F encoding=binary lrecl=4096;"
ll = self.submit(code, "text")
logf += ll['LOG']
# GET data
conn = self.sascfg.HTTPConn; conn.connect()
headers={"Accept":"*/*","Content-Type":"application/octet-stream",
"Authorization":"Bearer "+self.sascfg._token}
conn.request('GET', self._uri_files+"/_sp_updn/content", headers=headers)
req = conn.getresponse()
status = req.status
sockout = _read_sock(req=req)
df = pd.read_csv(sockout, index_col=idx_col, encoding='utf8', engine=eng, dtype=dts, **kwargs)
conn.close()
if k_dts is None: # don't override these if user provided their own dtypes
for i in range(nvars):
if vartype[i] == 'FLOAT':
if varcat[i] in self._sb.sas_date_fmts + self._sb.sas_time_fmts + self._sb.sas_datetime_fmts:
df[dvarlist[i]] = pd.to_datetime(df[dvarlist[i]], errors='coerce')
ll = self.submit("filename _sp_updn;", 'text')
logf += ll['LOG']
return df
def sasdata2dataframeDISK(self, table: str, libref: str ='', dsopts: dict = None,
rowsep: str = '\x01', colsep: str = '\x02',
rowrep: str = ' ', colrep: str = ' ', **kwargs) -> '<Pandas Data Frame object>':
'''
This method exports the SAS Data Set to a Pandas Data Frame, returning the Data Frame object.
table - the name of the SAS Data Set you want to export to a Pandas Data Frame
libref - the libref for the SAS Data Set.
dsopts - data set options for the input SAS Data Set
rowsep - the row seperator character to use; defaults to '\x01'
colsep - the column seperator character to use; defaults to '\x02'
rowrep - the char to convert to for any embedded rowsep chars, defaults to ' '
colrep - the char to convert to for any embedded colsep chars, defaults to ' '
tempfile - DEPRECATED
tempkeep - DEPRECATED
These two options are for advanced usage. They override how saspy imports data. For more info
see https://sassoftware.github.io/saspy/advanced-topics.html#advanced-sd2df-and-df2sd-techniques
dtype - this is the parameter to Pandas read_csv, overriding what saspy generates and uses
my_fmts - bool: if True, overrides the formats saspy would use, using those on the data set or in dsopts=
'''
tmp = kwargs.pop('tempfile', None)
tmp = kwargs.pop('tempkeep', None)
dsopts = dsopts if dsopts is not None else {}
if libref:
tabname = libref+".'"+table.strip().replace("'", "''")+"'n "
else:
tabname = "'"+table.strip().replace("'", "''")+"'n "
code = "data work.sasdata2dataframe / view=work.sasdata2dataframe; set "+tabname+self._sb._dsopts(dsopts)+";run;\n"
ll = self.submit(code, "text")
##GET Data Table Info
#conn = self.sascfg.HTTPConn; conn.connect()
#headers={"Accept":"application/vnd.sas.compute.data.table+json", "Authorization":"Bearer "+self.sascfg._token}
#conn.request('GET', "/compute/sessions/"+self.pid+"/data/work/sasdata2dataframe", headers=headers)
#req = conn.getresponse()
#status = req.status
#conn.close()
#resp = req.read()
#js = json.loads(resp.decode(self.sascfg.encoding))
conn = self.sascfg.HTTPConn; conn.connect()
headers={"Accept":"application/vnd.sas.collection+json", "Authorization":"Bearer "+self.sascfg._token}
conn.request('GET', "/compute/sessions/"+self.pid+"/data/work/sasdata2dataframe/columns?start=0&limit=9999999", headers=headers)
req = conn.getresponse()
status = req.status
resp = req.read()
conn.close()
js = json.loads(resp.decode(self.sascfg.encoding))
varlist = []
vartype = []
nvars = js.get('count')
lst = js.get('items')
for i in range(len(lst)):
varlist.append(lst[i].get('name'))
vartype.append(lst[i].get('type'))
dvarlist = list(varlist)
for i in range(len(varlist)):
varlist[i] = varlist[i].replace("'", "''")
topts = dict(dsopts)
topts.pop('firstobs', None)
topts.pop('obs', None)
code = "proc delete data=work.sasdata2dataframe(memtype=view);run;"
code += "data work._n_u_l_l_;output;run;\n"
code += "data _null_; set work._n_u_l_l_ "+tabname+self._sb._dsopts(topts)+";put 'FMT_CATS=';\n"
for i in range(nvars):
code += "_tom = vformatn('"+varlist[i]+"'n);put _tom;\n"
code += "stop;\nrun;\nproc delete data=work._n_u_l_l_;run;"
ll = self.submit(code, "text")
l2 = ll['LOG'].rpartition("FMT_CATS=")
l2 = l2[2].partition("\n")
varcat = l2[2].split("\n", nvars)
del varcat[nvars]
rdelim = "'"+'%02x' % ord(rowsep.encode(self.sascfg.encoding))+"'x"
cdelim = "'"+'%02x' % ord(colsep.encode(self.sascfg.encoding))+"'x "
idx_col = kwargs.pop('index_col', False)
eng = kwargs.pop('engine', 'c')
my_fmts = kwargs.pop('my_fmts', False)
k_dts = kwargs.pop('dtype', None)
if k_dts is None and my_fmts:
logger.warning("my_fmts option only valid when dtype= is specified. Ignoring and using necessary formatting for data transfer.")
my_fmts = False
code = "filename _tomodsx '"+self._sb.workpath+"_tomodsx' recfm=v termstr=NL encoding='utf-8';\n"
code += "data _null_; set "+tabname+self._sb._dsopts(dsopts)+";\n"
if not my_fmts:
for i in range(nvars):
if vartype[i] == 'FLOAT':
code += "format '"+varlist[i]+"'n "
if varcat[i] in self._sb.sas_date_fmts:
code += 'E8601DA10.'
else:
if varcat[i] in self._sb.sas_time_fmts:
code += 'E8601TM15.6'
else:
if varcat[i] in self._sb.sas_datetime_fmts:
code += 'E8601DT26.6'
else:
code += 'best32.'
code += '; '
if i % 10 == 9:
code +='\n'
miss = {}
code += "\nfile _tomodsx lrecl="+str(self.sascfg.lrecl)+" dlm="+cdelim+" recfm=v termstr=NL encoding='utf-8';\n"
for i in range(nvars):
if vartype[i] != 'FLOAT':
code += "'"+varlist[i]+"'n = translate('"
code += varlist[i]+"'n, '{}'x, '{}'x); ".format( \
'%02x%02x' % \
(ord(rowrep.encode(self.sascfg.encoding)), \
ord(colrep.encode(self.sascfg.encoding))),
'%02x%02x' % \
(ord(rowsep.encode(self.sascfg.encoding)), \
ord(colsep.encode(self.sascfg.encoding))))
miss[dvarlist[i]] = ' '
else:
code += "if missing('"+varlist[i]+"'n) then '"+varlist[i]+"'n = .; "
miss[dvarlist[i]] = '.'
if i % 10 == 9:
code +='\n'
code += "\nput "
for i in range(nvars):
code += " '"+varlist[i]+"'n "
if i % 10 == 9:
code +='\n'
code += rdelim+";\nrun;"
ll = self.submit(code, "text")
if k_dts is None:
dts = {}
for i in range(nvars):
if vartype[i] == 'FLOAT':
if varcat[i] not in self._sb.sas_date_fmts + self._sb.sas_time_fmts + self._sb.sas_datetime_fmts:
dts[dvarlist[i]] = 'float'
else:
dts[dvarlist[i]] = 'str'
else:
dts[dvarlist[i]] = 'str'
else:
dts = k_dts
quoting = kwargs.pop('quoting', 3)
code = "filename _sp_updn '"+self._sb.workpath+"_tomodsx' recfm=F encoding=binary lrecl=4096;"
ll = self.submit(code, "text")
logf = ll['LOG']
# GET data
conn = self.sascfg.HTTPConn; conn.connect()
headers={"Accept":"*/*","Content-Type":"application/octet-stream",
"Authorization":"Bearer "+self.sascfg._token}
conn.request('GET', self._uri_files+"/_sp_updn/content", headers=headers)
req = conn.getresponse()
status = req.status
sockout = _read_sock(req=req, method='DISK', rsep=(colsep+rowsep+'\n').encode(), rowsep=rowsep.encode())
df = pd.read_csv(sockout, index_col=idx_col, engine=eng, header=None, names=dvarlist,
sep=colsep, lineterminator=rowsep, dtype=dts, na_values=miss,
encoding='utf-8', quoting=quoting, **kwargs)
conn.close()
if k_dts is None: # don't override these if user provided their own dtypes
for i in range(nvars):
if vartype[i] == 'FLOAT':
if varcat[i] in self._sb.sas_date_fmts + self._sb.sas_time_fmts + self._sb.sas_datetime_fmts:
df[dvarlist[i]] = pd.to_datetime(df[dvarlist[i]], errors='coerce')
ll = self.submit("filename _sp_updn;", 'text')
logf += ll['LOG']
return df
class _read_sock(io.StringIO):
def __init__(self, **kwargs):
self.req = kwargs.get('req')
self.method = kwargs.get('method', 'CSV')
self.rowsep = kwargs.get('rowsep', b'\n')
self.rsep = kwargs.get('rsep', self.rowsep)
self.datar = b""
def read(self, size=4096):
datl = 0
size = max(size, 4096)
notarow = True
while datl < size or notarow:
data = self.req.read(size)
dl = len(data)
if dl:
datl += dl
self.datar += data
if notarow:
notarow = self.datar.count(self.rsep) <= 0
else:
if len(self.datar) <= 0:
return ''
else:
break
data = self.datar.rpartition(self.rsep)
if self.method == 'DISK':
datap = (data[0]+data[1]).replace(self.rsep, self.rowsep)
else:
datap = data[0]+data[1]
self.datar = data[2]
return datap.decode()
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.